hexsha
stringlengths 40
40
| size
int64 3
1.05M
| ext
stringclasses 163
values | lang
stringclasses 53
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
112
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
float64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
113
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
float64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
113
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
float64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.05M
| avg_line_length
float64 1
966k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
73e0eeb0e8dbf7cc2e6bb9bb515832b20a889de2 | 2,404 | t | Perl | t/ppi_statement.t | guillaumeaubert/PPI | 6e2583b92847744aadc35438dcda80e3bd44013a | [
"Artistic-1.0"
]
| null | null | null | t/ppi_statement.t | guillaumeaubert/PPI | 6e2583b92847744aadc35438dcda80e3bd44013a | [
"Artistic-1.0"
]
| null | null | null | t/ppi_statement.t | guillaumeaubert/PPI | 6e2583b92847744aadc35438dcda80e3bd44013a | [
"Artistic-1.0"
]
| null | null | null | #!/usr/bin/perl
# Unit testing for PPI::Statement
use t::lib::PPI::Test::pragmas;
use Test::More tests => 23;
use PPI;
SPECIALIZED: {
my $Document = PPI::Document->new(\<<'END_PERL');
package Foo;
use strict;
;
while (1) { last; }
BEGIN { }
sub foo { }
state $x;
$x = 5;
END_PERL
isa_ok( $Document, 'PPI::Document' );
my $statements = $Document->find('Statement');
is( scalar @{$statements}, 10, 'Found the 10 test statements' );
isa_ok( $statements->[0], 'PPI::Statement::Package', 'Statement 1: isa Package' );
ok( $statements->[0]->specialized, 'Statement 1: is specialized' );
isa_ok( $statements->[1], 'PPI::Statement::Include', 'Statement 2: isa Include' );
ok( $statements->[1]->specialized, 'Statement 2: is specialized' );
isa_ok( $statements->[2], 'PPI::Statement::Null', 'Statement 3: isa Null' );
ok( $statements->[2]->specialized, 'Statement 3: is specialized' );
isa_ok( $statements->[3], 'PPI::Statement::Compound', 'Statement 4: isa Compound' );
ok( $statements->[3]->specialized, 'Statement 4: is specialized' );
isa_ok( $statements->[4], 'PPI::Statement::Expression', 'Statement 5: isa Expression' );
ok( $statements->[4]->specialized, 'Statement 5: is specialized' );
isa_ok( $statements->[5], 'PPI::Statement::Break', 'Statement 6: isa Break' );
ok( $statements->[5]->specialized, 'Statement 6: is specialized' );
isa_ok( $statements->[6], 'PPI::Statement::Scheduled', 'Statement 7: isa Scheduled' );
ok( $statements->[6]->specialized, 'Statement 7: is specialized' );
isa_ok( $statements->[7], 'PPI::Statement::Sub', 'Statement 8: isa Sub' );
ok( $statements->[7]->specialized, 'Statement 8: is specialized' );
isa_ok( $statements->[8], 'PPI::Statement::Variable', 'Statement 9: isa Variable' );
ok( $statements->[8]->specialized, 'Statement 9: is specialized' );
is( ref $statements->[9], 'PPI::Statement', 'Statement 10: is a simple Statement' );
ok( ! $statements->[9]->specialized, 'Statement 10: is not specialized' );
}
| 49.061224 | 97 | 0.546173 |
ed5b7ee90abd66c4361ebe580b82a32e39215a64 | 4,567 | al | Perl | benchmark/benchmarks/FASP-benchmarks/data/random-oriented/randomoriented-1110-320-928.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | benchmark/benchmarks/FASP-benchmarks/data/random-oriented/randomoriented-1110-320-928.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | benchmark/benchmarks/FASP-benchmarks/data/random-oriented/randomoriented-1110-320-928.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
]
| null | null | null | 1 154 185 214
2 111 207 262 298 307
3 17 133
4 30
5 8 41
6 5 111
7 64 72 237 253 261 296
8 55 57
9 8 92 114
10 148 175 269
11 4 90 191 245
12 13 117 246 268
13 64 181
14 37 146 190
15 29 288 298
16 149 239
17 16 53 183
18 31 121 146 232
19 105 118
20 22 80 119 173 209 229
21 60 175 188 250 298 300 319
22 170 223
23 1 86 98 241 299
24 122
25 42
26 114 215 259 286
27 17 35 222 260 266 297
28 70 172 227 232 294 296
29 19 188 234
30 139 224
31 9
32 35 82 120 197
33 60 73 231
34 11 125 285
35 16 36
36 56 137 184 205
37 110 262
38 285
39 67 233 295 301
40 111 183 251
41 173 228
42 164
43 58 182 239 244 254
44 147 245
45 95 131 186
46 14 22 123
47 8 36 83
48
49 21 33 239 252 279
50 10 20 38 48 55 69 105 285 300
51 11 30 68 82 145 223 290 295
52 71 105
53 194 195 295 317
54 19 104 141 226 261 311
55
56 155
57 36 48 67 106
58 86 163 318
59 33 93 101 154
60 30 257
61 23 28 86 143 202
62 85 263 267 294
63 123 214
64 5 75 115 220
65 84
66 39 167
67 73 109 164 191 208 250 252
68 13 116 235
69 245 286
70 161 163 240
71 24 172 311
72 32 234
73 210 262 266 297
74 129 137
75 9 143
76 126 134 180 186 235 299
77 308
78 191
79 28 47 131 160 315
80 6 144 219 281 309
81 56 57 68 145 237 252 306
82 14 75 87
83 128 201
84 39
85 42 55 140 295
86 84 149 218 220 261 282
87 137 146
88 96 171 189
89 62 65 181
90 130 181 195 264
91 124
92 182 236 297
93 218
94
95 27 56
96 57 166 179 252
97 99 122 178 299
98 83 106 134 185
99 286
100 5 22 193
101 85
102 97 178 305
103 14 283
104 37 59 67
105 212 250
106 37
107 305
108 28 147
109 103 217
110 153 273
111 171 196
112 123 170 186
113 16 142 195 229
114 73 215
115 168 318
116 202 313 318
117 96 189
118 274 279 316
119 74
120 108 289
121 62 172
122
123 40 73 90 92 125 150 214 255
124 274
125
126 17 29 113 141 261
127 24 162 172 278 311
128 108 118 169 188 299
129 45 275 288
130 38 217
131 14 42 278
132 148 223
133 144
134 38 71 232
135 209 286 302
136 67 143 189 254 320
137
138 149 224
139 45 50 57
140 237 291
141 58 60
142 22 24 282 309
143 88 193
144 15 25 47 141 224
145 272 297
146 64 269
147 8 120
148 79 135 193 201 241
149 98 165 184 188 196 245
150 45 235 293
151 200 235 260 262 269
152 227 314
153 4 26 194 223 268
154 21 27 57 80 168 202
155 199 287 300
156
157 3 64
158 168
159 20 97 175
160 96
161 139 178
162 1 80 295
163 199 209 234
164 114
165 5 51 196 265
166 150 261 305
167 207 282
168 15 24 78 141 183 184
169 66 111
170 40 107 152 185 216 315
171 163 275
172 261 283
173 119 141 212 237 293 300
174 23 239 277
175 127
176 54
177 7 225 316
178 207
179
180 138 142 158 184
181 136 161 271
182 45
183 154 166
184 69 254 279
185 166 308
186 81 211 278
187 18 37 97
188 78 110 141 154 299
189 34 212 228
190 66 304
191 65 96 225
192 60
193
194 7 224 231 309
195 122 160 193
196 184 220
197 11 42 89 141 257 304
198 153 223 235 313
199 7 39 98 276 284
200 10 11 51 66
201 57
202 272
203 131 172 210
204 31 198 253
205 1 127 210 248
206 166 221 282
207 32 42 214 218 316
208 169 202 289 290
209 137 153 290
210 249 272
211 206 237 306
212 9 108
213 17 57 109 143 269 282 300
214 130 232 269 277
215 69 97 268
216 34 137 202 219
217 69 148
218 26 53 217 249
219 41 43 92
220 276
221 46 159
222 247 283
223 52 86 128 168 288 302
224
225 206 265 267
226 168 251
227 131 161 167
228 276 289 300
229 122 163 198
230 167 190 295
231 20
232 65 99 122 158 197 210
233 187 231 277
234 25 126 151
235 123
236 274
237 156
238 55 74 93 153 178 200 203
239 28 77 267 297
240 107 206 300
241 118 165
242 281
243 309
244 110 118 167 239
245 92 111 113 121 260 308
246 23 217
247 98
248 207 319
249 240 309
250 60 127 134
251 59 78 238 248 272
252 131 169
253 5 86 158 181 220 236 258 286
254 74 221 251 259
255 127 303
256 156 198 213 237
257 10 131 186 285
258 45
259 10 93 142 197 290
260 142 215
261 278
262 98 152
263 179
264
265 19 77 153 226
266 45
267 47 94 134
268 44 165
269 204
270 20 92 121
271 29 44 236 308
272 53 271 298 312
273 10 88 125
274 105 160 212
275 150 224 261
276 116 167
277 76 182
278 4 27 28 108 146 289
279 204
280 81 262 264
281 44 269
282 95 274
283 189 314
284 17 102 148 161 182 208 245 255 257
285
286 156 283
287 212
288 9 16 72 133 169
289 67 98 103 134 147 253 271
290 94 257
291 9 21 81 162 190 203 208
292 33 194
293 27 53 87 237 296
294 9 136 148
295 5 79 98 186 256
296 224
297 185
298 72 105 225 262
299 127 231 296
300 142
301 108 220 261
302 115 120
303 15 52 206
304 177 188 231
305 2 17 77
306 34 301
307 36 222
308 123 171 256
309 203
310 58
311 142 237
312 53 122 165 195 275
313 158 223 267
314 234 312
315 137 270
316 114 147
317 81 148
318 137
319 6 240
320 145 166 175 | 14.271875 | 38 | 0.726954 |
ed6b9785effb8244ec8ae1effa12832ecef97b4f | 172 | pl | Perl | t/example/test01.pl | mschout/MooseX-App | 305e17899fc023b357f5cbb71a361fbcf8db07e8 | [
"Artistic-1.0-cl8"
]
| 7 | 2015-10-18T20:45:16.000Z | 2019-05-13T08:31:52.000Z | t/example/test01.pl | mschout/MooseX-App | 305e17899fc023b357f5cbb71a361fbcf8db07e8 | [
"Artistic-1.0-cl8"
]
| 39 | 2015-02-06T13:48:33.000Z | 2021-08-15T15:39:00.000Z | t/example/test01.pl | mschout/MooseX-App | 305e17899fc023b357f5cbb71a361fbcf8db07e8 | [
"Artistic-1.0-cl8"
]
| 19 | 2015-01-01T20:41:09.000Z | 2021-01-31T10:15:21.000Z | #!/usr/bin/env perl
use strict;
use warnings;
use 5.010;
use FindBin qw();
use lib $FindBin::Bin.'/../testlib';
use Test01;
Test01->new_with_command( global => 1 )->run; | 15.636364 | 45 | 0.674419 |
ed0d7ae343c86807b3a25d9b4694ad8210fec4a6 | 427 | pm | Perl | auto-lib/Paws/NimbleStudio/GetEulaResponse.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/NimbleStudio/GetEulaResponse.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/NimbleStudio/GetEulaResponse.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::NimbleStudio::GetEulaResponse;
use Moose;
has Eula => (is => 'ro', isa => 'Paws::NimbleStudio::Eula', traits => ['NameInRequest'], request_name => 'eula');
has _request_id => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::NimbleStudio::GetEulaResponse
=head1 ATTRIBUTES
=head2 Eula => L<Paws::NimbleStudio::Eula>
The EULA.
=head2 _request_id => Str
=cut
| 15.25 | 115 | 0.653396 |
ed7b56da65d1fa24b4bc827b58b9f188b21a6837 | 1,679 | pm | Perl | inst/perl/map3c/TGPipeLib/FastqCountRegexp.pm | tanaylab/umi4cpackage | 88b07d896a137418ba6c31c2474b9dbe1d86fc20 | [
"MIT"
]
| 1 | 2020-07-01T07:06:02.000Z | 2020-07-01T07:06:02.000Z | inst/perl/map3c/TGPipeLib/FastqCountRegexp.pm | tanaylab/umi4cpackage | 88b07d896a137418ba6c31c2474b9dbe1d86fc20 | [
"MIT"
]
| 6 | 2020-09-15T07:34:23.000Z | 2021-06-16T09:33:45.000Z | inst/perl/map3c/TGPipeLib/FastqCountRegexp.pm | tanaylab/umi4cpackage | 88b07d896a137418ba6c31c2474b9dbe1d86fc20 | [
"MIT"
]
| 2 | 2020-07-01T07:06:04.000Z | 2020-10-07T15:01:06.000Z | use strict;
package TGPipeLib::FastqSplicer;
1;
sub required() {
return("re_code");
}
sub proc_line
{
my($split_id, $params, $fastq, $fastq2) = @_;
my($n) = 0;
$fastq->first();
my($re1) = $params->{regexp1};
my(%count);
if($fastq2 eq "NA") {
while($fastq->valid()) {
$fastq->next();
if($re1 eq "" || $fastq->cur_seq()=~/$re1/) {
my($res1) = ($re1 eq "") ? "" : $fastq->cur_seq()=~/$re1/;
$count{$res1}++;
}
$n++;
}
} else {
$fastq2->first();
my($re2) = $params->{regexp2};
while($fastq->valid()) {
$fastq->next();
$fastq2->next();
if(($re1 eq "" || $fastq->cur_seq()=~/$re1/)
&& ($re2 eq "" || $fastq2->cur_seq()=~/$re2/)) {
my($res1) = ($re1 eq "") ? "" : $fastq->cur_seq()=~/$re1/;
my($res2) = ($re2 eq "") ? "" : $fastq2->cur_seq()=~/$re2/;
$count{$res1.$res2}++;
}
$n++;
}
}
my($k);
my($out_fn) = $params->{work_dir}."/stat.$split_id";
if(open(RPT, ">$out_fn") ) {
foreach $k (keys %count) {
print RPT "$k\t$count{$k}\n";
}
close RPT;
return($out_fn);
} else {
return("ERR:cannot write $split_id");
}
}
sub reduce($) {
my($params) = @_;
my($wd) = $params->{work_dir};
my(@out_fns) = <$wd/stat.*>;
print STDERR "merge $#out_fns files\n";
my(%all);
my($i) = 0;
my($fn);
foreach $fn (@out_fns) {
if(!open(CNT, $fn) ) {
print STDERR "cannot open count report for split $i. $fn\n";
next;
}
print STDERR "reduxing $i fn $fn\n";
while(<CNT>) {
chop;
my($k, $v) = split("\t", $_);
$all{$k} += $v;
}
$i++;
}
my($k);
open(OUT, ">".($params->{out_fn})) || die "cannot write output!\n";
foreach $k (keys %all) {
print OUT "$k\t$all{$k}\n";
}
}
| 19.298851 | 68 | 0.503276 |
ed2b33c4c0ef6493dee7d61c25631486525fd2e8 | 44,725 | pm | Perl | webapp/perl/local/lib/perl5/DateTime/TimeZone/Europe/Brussels.pm | tomoyanp/isucon9-qualify-20210912 | f84b5d1c82f9d41bbba02422c1a6acd358d9c41a | [
"MIT"
]
| null | null | null | webapp/perl/local/lib/perl5/DateTime/TimeZone/Europe/Brussels.pm | tomoyanp/isucon9-qualify-20210912 | f84b5d1c82f9d41bbba02422c1a6acd358d9c41a | [
"MIT"
]
| 5 | 2021-05-20T04:16:14.000Z | 2022-02-12T01:40:02.000Z | webapp/perl/local/lib/perl5/DateTime/TimeZone/Europe/Brussels.pm | matsubara0507/isucon9-kansousen | 77b19085d76add98a3ce7370063a8636cde62499 | [
"MIT"
]
| null | null | null | # This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.08) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/tRZSIOcmOW/europe. Olson data version 2019b
#
# Do not edit this file directly.
#
package DateTime::TimeZone::Europe::Brussels;
use strict;
use warnings;
use namespace::autoclean;
our $VERSION = '2.36';
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::Europe::Brussels::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
59295541350, # utc_end 1879-12-31 23:42:30 (Wed)
DateTime::TimeZone::NEG_INFINITY, # local_start
59295542400, # local_end 1880-01-01 00:00:00 (Thu)
1050,
0,
'LMT',
],
[
59295541350, # utc_start 1879-12-31 23:42:30 (Wed)
59684730150, # utc_end 1892-05-01 11:42:30 (Sun)
59295542400, # local_start 1880-01-01 00:00:00 (Thu)
59684731200, # local_end 1892-05-01 12:00:00 (Sun)
1050,
0,
'BMT',
],
[
59684730150, # utc_start 1892-05-01 11:42:30 (Sun)
60395328000, # utc_end 1914-11-08 00:00:00 (Sun)
59684730150, # local_start 1892-05-01 11:42:30 (Sun)
60395328000, # local_end 1914-11-08 00:00:00 (Sun)
0,
0,
'WET',
],
[
60395328000, # utc_start 1914-11-08 00:00:00 (Sun)
60441980400, # utc_end 1916-04-30 23:00:00 (Sun)
60395331600, # local_start 1914-11-08 01:00:00 (Sun)
60441984000, # local_end 1916-05-01 00:00:00 (Mon)
3600,
0,
'CET',
],
[
60441980400, # utc_start 1916-04-30 23:00:00 (Sun)
60455199600, # utc_end 1916-09-30 23:00:00 (Sat)
60441987600, # local_start 1916-05-01 01:00:00 (Mon)
60455206800, # local_end 1916-10-01 01:00:00 (Sun)
7200,
1,
'CEST',
],
[
60455199600, # utc_start 1916-09-30 23:00:00 (Sat)
60472227600, # utc_end 1917-04-16 01:00:00 (Mon)
60455203200, # local_start 1916-10-01 00:00:00 (Sun)
60472231200, # local_end 1917-04-16 02:00:00 (Mon)
3600,
0,
'CET',
],
[
60472227600, # utc_start 1917-04-16 01:00:00 (Mon)
60485533200, # utc_end 1917-09-17 01:00:00 (Mon)
60472234800, # local_start 1917-04-16 03:00:00 (Mon)
60485540400, # local_end 1917-09-17 03:00:00 (Mon)
7200,
1,
'CEST',
],
[
60485533200, # utc_start 1917-09-17 01:00:00 (Mon)
60503677200, # utc_end 1918-04-15 01:00:00 (Mon)
60485536800, # local_start 1917-09-17 02:00:00 (Mon)
60503680800, # local_end 1918-04-15 02:00:00 (Mon)
3600,
0,
'CET',
],
[
60503677200, # utc_start 1918-04-15 01:00:00 (Mon)
60516982800, # utc_end 1918-09-16 01:00:00 (Mon)
60503684400, # local_start 1918-04-15 03:00:00 (Mon)
60516990000, # local_end 1918-09-16 03:00:00 (Mon)
7200,
1,
'CEST',
],
[
60516982800, # utc_start 1918-09-16 01:00:00 (Mon)
60521857200, # utc_end 1918-11-11 11:00:00 (Mon)
60516986400, # local_start 1918-09-16 02:00:00 (Mon)
60521860800, # local_end 1918-11-11 12:00:00 (Mon)
3600,
0,
'CET',
],
[
60521857200, # utc_start 1918-11-11 11:00:00 (Mon)
60531404400, # utc_end 1919-03-01 23:00:00 (Sat)
60521857200, # local_start 1918-11-11 11:00:00 (Mon)
60531404400, # local_end 1919-03-01 23:00:00 (Sat)
0,
0,
'WET',
],
[
60531404400, # utc_start 1919-03-01 23:00:00 (Sat)
60550153200, # utc_end 1919-10-04 23:00:00 (Sat)
60531408000, # local_start 1919-03-02 00:00:00 (Sun)
60550156800, # local_end 1919-10-05 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
60550153200, # utc_start 1919-10-04 23:00:00 (Sat)
60561644400, # utc_end 1920-02-14 23:00:00 (Sat)
60550153200, # local_start 1919-10-04 23:00:00 (Sat)
60561644400, # local_end 1920-02-14 23:00:00 (Sat)
0,
0,
'WET',
],
[
60561644400, # utc_start 1920-02-14 23:00:00 (Sat)
60583417200, # utc_end 1920-10-23 23:00:00 (Sat)
60561648000, # local_start 1920-02-15 00:00:00 (Sun)
60583420800, # local_end 1920-10-24 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
60583417200, # utc_start 1920-10-23 23:00:00 (Sat)
60595686000, # utc_end 1921-03-14 23:00:00 (Mon)
60583417200, # local_start 1920-10-23 23:00:00 (Sat)
60595686000, # local_end 1921-03-14 23:00:00 (Mon)
0,
0,
'WET',
],
[
60595686000, # utc_start 1921-03-14 23:00:00 (Mon)
60615126000, # utc_end 1921-10-25 23:00:00 (Tue)
60595689600, # local_start 1921-03-15 00:00:00 (Tue)
60615129600, # local_end 1921-10-26 00:00:00 (Wed)
3600,
1,
'WEST',
],
[
60615126000, # utc_start 1921-10-25 23:00:00 (Tue)
60628172400, # utc_end 1922-03-25 23:00:00 (Sat)
60615126000, # local_start 1921-10-25 23:00:00 (Tue)
60628172400, # local_end 1922-03-25 23:00:00 (Sat)
0,
0,
'WET',
],
[
60628172400, # utc_start 1922-03-25 23:00:00 (Sat)
60645106800, # utc_end 1922-10-07 23:00:00 (Sat)
60628176000, # local_start 1922-03-26 00:00:00 (Sun)
60645110400, # local_end 1922-10-08 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
60645106800, # utc_start 1922-10-07 23:00:00 (Sat)
60662041200, # utc_end 1923-04-21 23:00:00 (Sat)
60645106800, # local_start 1922-10-07 23:00:00 (Sat)
60662041200, # local_end 1923-04-21 23:00:00 (Sat)
0,
0,
'WET',
],
[
60662041200, # utc_start 1923-04-21 23:00:00 (Sat)
60676556400, # utc_end 1923-10-06 23:00:00 (Sat)
60662044800, # local_start 1923-04-22 00:00:00 (Sun)
60676560000, # local_end 1923-10-07 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
60676556400, # utc_start 1923-10-06 23:00:00 (Sat)
60691676400, # utc_end 1924-03-29 23:00:00 (Sat)
60676556400, # local_start 1923-10-06 23:00:00 (Sat)
60691676400, # local_end 1924-03-29 23:00:00 (Sat)
0,
0,
'WET',
],
[
60691676400, # utc_start 1924-03-29 23:00:00 (Sat)
60708006000, # utc_end 1924-10-04 23:00:00 (Sat)
60691680000, # local_start 1924-03-30 00:00:00 (Sun)
60708009600, # local_end 1924-10-05 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
60708006000, # utc_start 1924-10-04 23:00:00 (Sat)
60723730800, # utc_end 1925-04-04 23:00:00 (Sat)
60708006000, # local_start 1924-10-04 23:00:00 (Sat)
60723730800, # local_end 1925-04-04 23:00:00 (Sat)
0,
0,
'WET',
],
[
60723730800, # utc_start 1925-04-04 23:00:00 (Sat)
60739455600, # utc_end 1925-10-03 23:00:00 (Sat)
60723734400, # local_start 1925-04-05 00:00:00 (Sun)
60739459200, # local_end 1925-10-04 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
60739455600, # utc_start 1925-10-03 23:00:00 (Sat)
60756390000, # utc_end 1926-04-17 23:00:00 (Sat)
60739455600, # local_start 1925-10-03 23:00:00 (Sat)
60756390000, # local_end 1926-04-17 23:00:00 (Sat)
0,
0,
'WET',
],
[
60756390000, # utc_start 1926-04-17 23:00:00 (Sat)
60770905200, # utc_end 1926-10-02 23:00:00 (Sat)
60756393600, # local_start 1926-04-18 00:00:00 (Sun)
60770908800, # local_end 1926-10-03 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
60770905200, # utc_start 1926-10-02 23:00:00 (Sat)
60787234800, # utc_end 1927-04-09 23:00:00 (Sat)
60770905200, # local_start 1926-10-02 23:00:00 (Sat)
60787234800, # local_end 1927-04-09 23:00:00 (Sat)
0,
0,
'WET',
],
[
60787234800, # utc_start 1927-04-09 23:00:00 (Sat)
60802354800, # utc_end 1927-10-01 23:00:00 (Sat)
60787238400, # local_start 1927-04-10 00:00:00 (Sun)
60802358400, # local_end 1927-10-02 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
60802354800, # utc_start 1927-10-01 23:00:00 (Sat)
60819289200, # utc_end 1928-04-14 23:00:00 (Sat)
60802354800, # local_start 1927-10-01 23:00:00 (Sat)
60819289200, # local_end 1928-04-14 23:00:00 (Sat)
0,
0,
'WET',
],
[
60819289200, # utc_start 1928-04-14 23:00:00 (Sat)
60834420000, # utc_end 1928-10-07 02:00:00 (Sun)
60819292800, # local_start 1928-04-15 00:00:00 (Sun)
60834423600, # local_end 1928-10-07 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
60834420000, # utc_start 1928-10-07 02:00:00 (Sun)
60851354400, # utc_end 1929-04-21 02:00:00 (Sun)
60834420000, # local_start 1928-10-07 02:00:00 (Sun)
60851354400, # local_end 1929-04-21 02:00:00 (Sun)
0,
0,
'WET',
],
[
60851354400, # utc_start 1929-04-21 02:00:00 (Sun)
60865869600, # utc_end 1929-10-06 02:00:00 (Sun)
60851358000, # local_start 1929-04-21 03:00:00 (Sun)
60865873200, # local_end 1929-10-06 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
60865869600, # utc_start 1929-10-06 02:00:00 (Sun)
60882199200, # utc_end 1930-04-13 02:00:00 (Sun)
60865869600, # local_start 1929-10-06 02:00:00 (Sun)
60882199200, # local_end 1930-04-13 02:00:00 (Sun)
0,
0,
'WET',
],
[
60882199200, # utc_start 1930-04-13 02:00:00 (Sun)
60897319200, # utc_end 1930-10-05 02:00:00 (Sun)
60882202800, # local_start 1930-04-13 03:00:00 (Sun)
60897322800, # local_end 1930-10-05 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
60897319200, # utc_start 1930-10-05 02:00:00 (Sun)
60914253600, # utc_end 1931-04-19 02:00:00 (Sun)
60897319200, # local_start 1930-10-05 02:00:00 (Sun)
60914253600, # local_end 1931-04-19 02:00:00 (Sun)
0,
0,
'WET',
],
[
60914253600, # utc_start 1931-04-19 02:00:00 (Sun)
60928768800, # utc_end 1931-10-04 02:00:00 (Sun)
60914257200, # local_start 1931-04-19 03:00:00 (Sun)
60928772400, # local_end 1931-10-04 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
60928768800, # utc_start 1931-10-04 02:00:00 (Sun)
60944493600, # utc_end 1932-04-03 02:00:00 (Sun)
60928768800, # local_start 1931-10-04 02:00:00 (Sun)
60944493600, # local_end 1932-04-03 02:00:00 (Sun)
0,
0,
'WET',
],
[
60944493600, # utc_start 1932-04-03 02:00:00 (Sun)
60960218400, # utc_end 1932-10-02 02:00:00 (Sun)
60944497200, # local_start 1932-04-03 03:00:00 (Sun)
60960222000, # local_end 1932-10-02 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
60960218400, # utc_start 1932-10-02 02:00:00 (Sun)
60975338400, # utc_end 1933-03-26 02:00:00 (Sun)
60960218400, # local_start 1932-10-02 02:00:00 (Sun)
60975338400, # local_end 1933-03-26 02:00:00 (Sun)
0,
0,
'WET',
],
[
60975338400, # utc_start 1933-03-26 02:00:00 (Sun)
60992272800, # utc_end 1933-10-08 02:00:00 (Sun)
60975342000, # local_start 1933-03-26 03:00:00 (Sun)
60992276400, # local_end 1933-10-08 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
60992272800, # utc_start 1933-10-08 02:00:00 (Sun)
61007997600, # utc_end 1934-04-08 02:00:00 (Sun)
60992272800, # local_start 1933-10-08 02:00:00 (Sun)
61007997600, # local_end 1934-04-08 02:00:00 (Sun)
0,
0,
'WET',
],
[
61007997600, # utc_start 1934-04-08 02:00:00 (Sun)
61023722400, # utc_end 1934-10-07 02:00:00 (Sun)
61008001200, # local_start 1934-04-08 03:00:00 (Sun)
61023726000, # local_end 1934-10-07 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
61023722400, # utc_start 1934-10-07 02:00:00 (Sun)
61038842400, # utc_end 1935-03-31 02:00:00 (Sun)
61023722400, # local_start 1934-10-07 02:00:00 (Sun)
61038842400, # local_end 1935-03-31 02:00:00 (Sun)
0,
0,
'WET',
],
[
61038842400, # utc_start 1935-03-31 02:00:00 (Sun)
61055172000, # utc_end 1935-10-06 02:00:00 (Sun)
61038846000, # local_start 1935-03-31 03:00:00 (Sun)
61055175600, # local_end 1935-10-06 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
61055172000, # utc_start 1935-10-06 02:00:00 (Sun)
61072106400, # utc_end 1936-04-19 02:00:00 (Sun)
61055172000, # local_start 1935-10-06 02:00:00 (Sun)
61072106400, # local_end 1936-04-19 02:00:00 (Sun)
0,
0,
'WET',
],
[
61072106400, # utc_start 1936-04-19 02:00:00 (Sun)
61086621600, # utc_end 1936-10-04 02:00:00 (Sun)
61072110000, # local_start 1936-04-19 03:00:00 (Sun)
61086625200, # local_end 1936-10-04 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
61086621600, # utc_start 1936-10-04 02:00:00 (Sun)
61102346400, # utc_end 1937-04-04 02:00:00 (Sun)
61086621600, # local_start 1936-10-04 02:00:00 (Sun)
61102346400, # local_end 1937-04-04 02:00:00 (Sun)
0,
0,
'WET',
],
[
61102346400, # utc_start 1937-04-04 02:00:00 (Sun)
61118071200, # utc_end 1937-10-03 02:00:00 (Sun)
61102350000, # local_start 1937-04-04 03:00:00 (Sun)
61118074800, # local_end 1937-10-03 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
61118071200, # utc_start 1937-10-03 02:00:00 (Sun)
61133191200, # utc_end 1938-03-27 02:00:00 (Sun)
61118071200, # local_start 1937-10-03 02:00:00 (Sun)
61133191200, # local_end 1938-03-27 02:00:00 (Sun)
0,
0,
'WET',
],
[
61133191200, # utc_start 1938-03-27 02:00:00 (Sun)
61149520800, # utc_end 1938-10-02 02:00:00 (Sun)
61133194800, # local_start 1938-03-27 03:00:00 (Sun)
61149524400, # local_end 1938-10-02 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
61149520800, # utc_start 1938-10-02 02:00:00 (Sun)
61166455200, # utc_end 1939-04-16 02:00:00 (Sun)
61149520800, # local_start 1938-10-02 02:00:00 (Sun)
61166455200, # local_end 1939-04-16 02:00:00 (Sun)
0,
0,
'WET',
],
[
61166455200, # utc_start 1939-04-16 02:00:00 (Sun)
61185204000, # utc_end 1939-11-19 02:00:00 (Sun)
61166458800, # local_start 1939-04-16 03:00:00 (Sun)
61185207600, # local_end 1939-11-19 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
61185204000, # utc_start 1939-11-19 02:00:00 (Sun)
61193671200, # utc_end 1940-02-25 02:00:00 (Sun)
61185204000, # local_start 1939-11-19 02:00:00 (Sun)
61193671200, # local_end 1940-02-25 02:00:00 (Sun)
0,
0,
'WET',
],
[
61193671200, # utc_start 1940-02-25 02:00:00 (Sun)
61201015200, # utc_end 1940-05-20 02:00:00 (Mon)
61193674800, # local_start 1940-02-25 03:00:00 (Sun)
61201018800, # local_end 1940-05-20 03:00:00 (Mon)
3600,
1,
'WEST',
],
[
61201015200, # utc_start 1940-05-20 02:00:00 (Mon)
61278426000, # utc_end 1942-11-02 01:00:00 (Mon)
61201022400, # local_start 1940-05-20 04:00:00 (Mon)
61278433200, # local_end 1942-11-02 03:00:00 (Mon)
7200,
1,
'CEST',
],
[
61278426000, # utc_start 1942-11-02 01:00:00 (Mon)
61291126800, # utc_end 1943-03-29 01:00:00 (Mon)
61278429600, # local_start 1942-11-02 02:00:00 (Mon)
61291130400, # local_end 1943-03-29 02:00:00 (Mon)
3600,
0,
'CET',
],
[
61291126800, # utc_start 1943-03-29 01:00:00 (Mon)
61307456400, # utc_end 1943-10-04 01:00:00 (Mon)
61291134000, # local_start 1943-03-29 03:00:00 (Mon)
61307463600, # local_end 1943-10-04 03:00:00 (Mon)
7200,
1,
'CEST',
],
[
61307456400, # utc_start 1943-10-04 01:00:00 (Mon)
61323181200, # utc_end 1944-04-03 01:00:00 (Mon)
61307460000, # local_start 1943-10-04 02:00:00 (Mon)
61323184800, # local_end 1944-04-03 02:00:00 (Mon)
3600,
0,
'CET',
],
[
61323181200, # utc_start 1944-04-03 01:00:00 (Mon)
61336389600, # utc_end 1944-09-02 22:00:00 (Sat)
61323188400, # local_start 1944-04-03 03:00:00 (Mon)
61336396800, # local_end 1944-09-03 00:00:00 (Sun)
7200,
1,
'CEST',
],
[
61336389600, # utc_start 1944-09-02 22:00:00 (Sat)
61337610000, # utc_end 1944-09-17 01:00:00 (Sun)
61336396800, # local_start 1944-09-03 00:00:00 (Sun)
61337617200, # local_end 1944-09-17 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
61337610000, # utc_start 1944-09-17 01:00:00 (Sun)
61354630800, # utc_end 1945-04-02 01:00:00 (Mon)
61337613600, # local_start 1944-09-17 02:00:00 (Sun)
61354634400, # local_end 1945-04-02 02:00:00 (Mon)
3600,
0,
'CET',
],
[
61354630800, # utc_start 1945-04-02 01:00:00 (Mon)
61369059600, # utc_end 1945-09-16 01:00:00 (Sun)
61354638000, # local_start 1945-04-02 03:00:00 (Mon)
61369066800, # local_end 1945-09-16 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
61369059600, # utc_start 1945-09-16 01:00:00 (Sun)
61390227600, # utc_end 1946-05-19 01:00:00 (Sun)
61369063200, # local_start 1945-09-16 02:00:00 (Sun)
61390231200, # local_end 1946-05-19 02:00:00 (Sun)
3600,
0,
'CET',
],
[
61390227600, # utc_start 1946-05-19 01:00:00 (Sun)
61402410000, # utc_end 1946-10-07 01:00:00 (Mon)
61390234800, # local_start 1946-05-19 03:00:00 (Sun)
61402417200, # local_end 1946-10-07 03:00:00 (Mon)
7200,
1,
'CEST',
],
[
61402410000, # utc_start 1946-10-07 01:00:00 (Mon)
62356604400, # utc_end 1976-12-31 23:00:00 (Fri)
61402413600, # local_start 1946-10-07 02:00:00 (Mon)
62356608000, # local_end 1977-01-01 00:00:00 (Sat)
3600,
0,
'CET',
],
[
62356604400, # utc_start 1976-12-31 23:00:00 (Fri)
62364560400, # utc_end 1977-04-03 01:00:00 (Sun)
62356608000, # local_start 1977-01-01 00:00:00 (Sat)
62364564000, # local_end 1977-04-03 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62364560400, # utc_start 1977-04-03 01:00:00 (Sun)
62379680400, # utc_end 1977-09-25 01:00:00 (Sun)
62364567600, # local_start 1977-04-03 03:00:00 (Sun)
62379687600, # local_end 1977-09-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62379680400, # utc_start 1977-09-25 01:00:00 (Sun)
62396010000, # utc_end 1978-04-02 01:00:00 (Sun)
62379684000, # local_start 1977-09-25 02:00:00 (Sun)
62396013600, # local_end 1978-04-02 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62396010000, # utc_start 1978-04-02 01:00:00 (Sun)
62411734800, # utc_end 1978-10-01 01:00:00 (Sun)
62396017200, # local_start 1978-04-02 03:00:00 (Sun)
62411742000, # local_end 1978-10-01 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62411734800, # utc_start 1978-10-01 01:00:00 (Sun)
62427459600, # utc_end 1979-04-01 01:00:00 (Sun)
62411738400, # local_start 1978-10-01 02:00:00 (Sun)
62427463200, # local_end 1979-04-01 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62427459600, # utc_start 1979-04-01 01:00:00 (Sun)
62443184400, # utc_end 1979-09-30 01:00:00 (Sun)
62427466800, # local_start 1979-04-01 03:00:00 (Sun)
62443191600, # local_end 1979-09-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62443184400, # utc_start 1979-09-30 01:00:00 (Sun)
62459514000, # utc_end 1980-04-06 01:00:00 (Sun)
62443188000, # local_start 1979-09-30 02:00:00 (Sun)
62459517600, # local_end 1980-04-06 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62459514000, # utc_start 1980-04-06 01:00:00 (Sun)
62474634000, # utc_end 1980-09-28 01:00:00 (Sun)
62459521200, # local_start 1980-04-06 03:00:00 (Sun)
62474641200, # local_end 1980-09-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62474634000, # utc_start 1980-09-28 01:00:00 (Sun)
62490358800, # utc_end 1981-03-29 01:00:00 (Sun)
62474637600, # local_start 1980-09-28 02:00:00 (Sun)
62490362400, # local_end 1981-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62490358800, # utc_start 1981-03-29 01:00:00 (Sun)
62506083600, # utc_end 1981-09-27 01:00:00 (Sun)
62490366000, # local_start 1981-03-29 03:00:00 (Sun)
62506090800, # local_end 1981-09-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62506083600, # utc_start 1981-09-27 01:00:00 (Sun)
62521808400, # utc_end 1982-03-28 01:00:00 (Sun)
62506087200, # local_start 1981-09-27 02:00:00 (Sun)
62521812000, # local_end 1982-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62521808400, # utc_start 1982-03-28 01:00:00 (Sun)
62537533200, # utc_end 1982-09-26 01:00:00 (Sun)
62521815600, # local_start 1982-03-28 03:00:00 (Sun)
62537540400, # local_end 1982-09-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62537533200, # utc_start 1982-09-26 01:00:00 (Sun)
62553258000, # utc_end 1983-03-27 01:00:00 (Sun)
62537536800, # local_start 1982-09-26 02:00:00 (Sun)
62553261600, # local_end 1983-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62553258000, # utc_start 1983-03-27 01:00:00 (Sun)
62568982800, # utc_end 1983-09-25 01:00:00 (Sun)
62553265200, # local_start 1983-03-27 03:00:00 (Sun)
62568990000, # local_end 1983-09-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62568982800, # utc_start 1983-09-25 01:00:00 (Sun)
62584707600, # utc_end 1984-03-25 01:00:00 (Sun)
62568986400, # local_start 1983-09-25 02:00:00 (Sun)
62584711200, # local_end 1984-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62584707600, # utc_start 1984-03-25 01:00:00 (Sun)
62601037200, # utc_end 1984-09-30 01:00:00 (Sun)
62584714800, # local_start 1984-03-25 03:00:00 (Sun)
62601044400, # local_end 1984-09-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62601037200, # utc_start 1984-09-30 01:00:00 (Sun)
62616762000, # utc_end 1985-03-31 01:00:00 (Sun)
62601040800, # local_start 1984-09-30 02:00:00 (Sun)
62616765600, # local_end 1985-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62616762000, # utc_start 1985-03-31 01:00:00 (Sun)
62632486800, # utc_end 1985-09-29 01:00:00 (Sun)
62616769200, # local_start 1985-03-31 03:00:00 (Sun)
62632494000, # local_end 1985-09-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62632486800, # utc_start 1985-09-29 01:00:00 (Sun)
62648211600, # utc_end 1986-03-30 01:00:00 (Sun)
62632490400, # local_start 1985-09-29 02:00:00 (Sun)
62648215200, # local_end 1986-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62648211600, # utc_start 1986-03-30 01:00:00 (Sun)
62663936400, # utc_end 1986-09-28 01:00:00 (Sun)
62648218800, # local_start 1986-03-30 03:00:00 (Sun)
62663943600, # local_end 1986-09-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62663936400, # utc_start 1986-09-28 01:00:00 (Sun)
62679661200, # utc_end 1987-03-29 01:00:00 (Sun)
62663940000, # local_start 1986-09-28 02:00:00 (Sun)
62679664800, # local_end 1987-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62679661200, # utc_start 1987-03-29 01:00:00 (Sun)
62695386000, # utc_end 1987-09-27 01:00:00 (Sun)
62679668400, # local_start 1987-03-29 03:00:00 (Sun)
62695393200, # local_end 1987-09-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62695386000, # utc_start 1987-09-27 01:00:00 (Sun)
62711110800, # utc_end 1988-03-27 01:00:00 (Sun)
62695389600, # local_start 1987-09-27 02:00:00 (Sun)
62711114400, # local_end 1988-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62711110800, # utc_start 1988-03-27 01:00:00 (Sun)
62726835600, # utc_end 1988-09-25 01:00:00 (Sun)
62711118000, # local_start 1988-03-27 03:00:00 (Sun)
62726842800, # local_end 1988-09-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62726835600, # utc_start 1988-09-25 01:00:00 (Sun)
62742560400, # utc_end 1989-03-26 01:00:00 (Sun)
62726839200, # local_start 1988-09-25 02:00:00 (Sun)
62742564000, # local_end 1989-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62742560400, # utc_start 1989-03-26 01:00:00 (Sun)
62758285200, # utc_end 1989-09-24 01:00:00 (Sun)
62742567600, # local_start 1989-03-26 03:00:00 (Sun)
62758292400, # local_end 1989-09-24 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62758285200, # utc_start 1989-09-24 01:00:00 (Sun)
62774010000, # utc_end 1990-03-25 01:00:00 (Sun)
62758288800, # local_start 1989-09-24 02:00:00 (Sun)
62774013600, # local_end 1990-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62774010000, # utc_start 1990-03-25 01:00:00 (Sun)
62790339600, # utc_end 1990-09-30 01:00:00 (Sun)
62774017200, # local_start 1990-03-25 03:00:00 (Sun)
62790346800, # local_end 1990-09-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62790339600, # utc_start 1990-09-30 01:00:00 (Sun)
62806064400, # utc_end 1991-03-31 01:00:00 (Sun)
62790343200, # local_start 1990-09-30 02:00:00 (Sun)
62806068000, # local_end 1991-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62806064400, # utc_start 1991-03-31 01:00:00 (Sun)
62821789200, # utc_end 1991-09-29 01:00:00 (Sun)
62806071600, # local_start 1991-03-31 03:00:00 (Sun)
62821796400, # local_end 1991-09-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62821789200, # utc_start 1991-09-29 01:00:00 (Sun)
62837514000, # utc_end 1992-03-29 01:00:00 (Sun)
62821792800, # local_start 1991-09-29 02:00:00 (Sun)
62837517600, # local_end 1992-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62837514000, # utc_start 1992-03-29 01:00:00 (Sun)
62853238800, # utc_end 1992-09-27 01:00:00 (Sun)
62837521200, # local_start 1992-03-29 03:00:00 (Sun)
62853246000, # local_end 1992-09-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62853238800, # utc_start 1992-09-27 01:00:00 (Sun)
62868963600, # utc_end 1993-03-28 01:00:00 (Sun)
62853242400, # local_start 1992-09-27 02:00:00 (Sun)
62868967200, # local_end 1993-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62868963600, # utc_start 1993-03-28 01:00:00 (Sun)
62884688400, # utc_end 1993-09-26 01:00:00 (Sun)
62868970800, # local_start 1993-03-28 03:00:00 (Sun)
62884695600, # local_end 1993-09-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62884688400, # utc_start 1993-09-26 01:00:00 (Sun)
62900413200, # utc_end 1994-03-27 01:00:00 (Sun)
62884692000, # local_start 1993-09-26 02:00:00 (Sun)
62900416800, # local_end 1994-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62900413200, # utc_start 1994-03-27 01:00:00 (Sun)
62916138000, # utc_end 1994-09-25 01:00:00 (Sun)
62900420400, # local_start 1994-03-27 03:00:00 (Sun)
62916145200, # local_end 1994-09-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62916138000, # utc_start 1994-09-25 01:00:00 (Sun)
62931862800, # utc_end 1995-03-26 01:00:00 (Sun)
62916141600, # local_start 1994-09-25 02:00:00 (Sun)
62931866400, # local_end 1995-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62931862800, # utc_start 1995-03-26 01:00:00 (Sun)
62947587600, # utc_end 1995-09-24 01:00:00 (Sun)
62931870000, # local_start 1995-03-26 03:00:00 (Sun)
62947594800, # local_end 1995-09-24 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62947587600, # utc_start 1995-09-24 01:00:00 (Sun)
62963917200, # utc_end 1996-03-31 01:00:00 (Sun)
62947591200, # local_start 1995-09-24 02:00:00 (Sun)
62963920800, # local_end 1996-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62963917200, # utc_start 1996-03-31 01:00:00 (Sun)
62982061200, # utc_end 1996-10-27 01:00:00 (Sun)
62963924400, # local_start 1996-03-31 03:00:00 (Sun)
62982068400, # local_end 1996-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62982061200, # utc_start 1996-10-27 01:00:00 (Sun)
62995366800, # utc_end 1997-03-30 01:00:00 (Sun)
62982064800, # local_start 1996-10-27 02:00:00 (Sun)
62995370400, # local_end 1997-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62995366800, # utc_start 1997-03-30 01:00:00 (Sun)
63013510800, # utc_end 1997-10-26 01:00:00 (Sun)
62995374000, # local_start 1997-03-30 03:00:00 (Sun)
63013518000, # local_end 1997-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63013510800, # utc_start 1997-10-26 01:00:00 (Sun)
63026816400, # utc_end 1998-03-29 01:00:00 (Sun)
63013514400, # local_start 1997-10-26 02:00:00 (Sun)
63026820000, # local_end 1998-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63026816400, # utc_start 1998-03-29 01:00:00 (Sun)
63044960400, # utc_end 1998-10-25 01:00:00 (Sun)
63026823600, # local_start 1998-03-29 03:00:00 (Sun)
63044967600, # local_end 1998-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63044960400, # utc_start 1998-10-25 01:00:00 (Sun)
63058266000, # utc_end 1999-03-28 01:00:00 (Sun)
63044964000, # local_start 1998-10-25 02:00:00 (Sun)
63058269600, # local_end 1999-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63058266000, # utc_start 1999-03-28 01:00:00 (Sun)
63077014800, # utc_end 1999-10-31 01:00:00 (Sun)
63058273200, # local_start 1999-03-28 03:00:00 (Sun)
63077022000, # local_end 1999-10-31 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63077014800, # utc_start 1999-10-31 01:00:00 (Sun)
63089715600, # utc_end 2000-03-26 01:00:00 (Sun)
63077018400, # local_start 1999-10-31 02:00:00 (Sun)
63089719200, # local_end 2000-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63089715600, # utc_start 2000-03-26 01:00:00 (Sun)
63108464400, # utc_end 2000-10-29 01:00:00 (Sun)
63089722800, # local_start 2000-03-26 03:00:00 (Sun)
63108471600, # local_end 2000-10-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63108464400, # utc_start 2000-10-29 01:00:00 (Sun)
63121165200, # utc_end 2001-03-25 01:00:00 (Sun)
63108468000, # local_start 2000-10-29 02:00:00 (Sun)
63121168800, # local_end 2001-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63121165200, # utc_start 2001-03-25 01:00:00 (Sun)
63139914000, # utc_end 2001-10-28 01:00:00 (Sun)
63121172400, # local_start 2001-03-25 03:00:00 (Sun)
63139921200, # local_end 2001-10-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63139914000, # utc_start 2001-10-28 01:00:00 (Sun)
63153219600, # utc_end 2002-03-31 01:00:00 (Sun)
63139917600, # local_start 2001-10-28 02:00:00 (Sun)
63153223200, # local_end 2002-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63153219600, # utc_start 2002-03-31 01:00:00 (Sun)
63171363600, # utc_end 2002-10-27 01:00:00 (Sun)
63153226800, # local_start 2002-03-31 03:00:00 (Sun)
63171370800, # local_end 2002-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63171363600, # utc_start 2002-10-27 01:00:00 (Sun)
63184669200, # utc_end 2003-03-30 01:00:00 (Sun)
63171367200, # local_start 2002-10-27 02:00:00 (Sun)
63184672800, # local_end 2003-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63184669200, # utc_start 2003-03-30 01:00:00 (Sun)
63202813200, # utc_end 2003-10-26 01:00:00 (Sun)
63184676400, # local_start 2003-03-30 03:00:00 (Sun)
63202820400, # local_end 2003-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63202813200, # utc_start 2003-10-26 01:00:00 (Sun)
63216118800, # utc_end 2004-03-28 01:00:00 (Sun)
63202816800, # local_start 2003-10-26 02:00:00 (Sun)
63216122400, # local_end 2004-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63216118800, # utc_start 2004-03-28 01:00:00 (Sun)
63234867600, # utc_end 2004-10-31 01:00:00 (Sun)
63216126000, # local_start 2004-03-28 03:00:00 (Sun)
63234874800, # local_end 2004-10-31 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63234867600, # utc_start 2004-10-31 01:00:00 (Sun)
63247568400, # utc_end 2005-03-27 01:00:00 (Sun)
63234871200, # local_start 2004-10-31 02:00:00 (Sun)
63247572000, # local_end 2005-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63247568400, # utc_start 2005-03-27 01:00:00 (Sun)
63266317200, # utc_end 2005-10-30 01:00:00 (Sun)
63247575600, # local_start 2005-03-27 03:00:00 (Sun)
63266324400, # local_end 2005-10-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63266317200, # utc_start 2005-10-30 01:00:00 (Sun)
63279018000, # utc_end 2006-03-26 01:00:00 (Sun)
63266320800, # local_start 2005-10-30 02:00:00 (Sun)
63279021600, # local_end 2006-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63279018000, # utc_start 2006-03-26 01:00:00 (Sun)
63297766800, # utc_end 2006-10-29 01:00:00 (Sun)
63279025200, # local_start 2006-03-26 03:00:00 (Sun)
63297774000, # local_end 2006-10-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63297766800, # utc_start 2006-10-29 01:00:00 (Sun)
63310467600, # utc_end 2007-03-25 01:00:00 (Sun)
63297770400, # local_start 2006-10-29 02:00:00 (Sun)
63310471200, # local_end 2007-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63310467600, # utc_start 2007-03-25 01:00:00 (Sun)
63329216400, # utc_end 2007-10-28 01:00:00 (Sun)
63310474800, # local_start 2007-03-25 03:00:00 (Sun)
63329223600, # local_end 2007-10-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63329216400, # utc_start 2007-10-28 01:00:00 (Sun)
63342522000, # utc_end 2008-03-30 01:00:00 (Sun)
63329220000, # local_start 2007-10-28 02:00:00 (Sun)
63342525600, # local_end 2008-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63342522000, # utc_start 2008-03-30 01:00:00 (Sun)
63360666000, # utc_end 2008-10-26 01:00:00 (Sun)
63342529200, # local_start 2008-03-30 03:00:00 (Sun)
63360673200, # local_end 2008-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63360666000, # utc_start 2008-10-26 01:00:00 (Sun)
63373971600, # utc_end 2009-03-29 01:00:00 (Sun)
63360669600, # local_start 2008-10-26 02:00:00 (Sun)
63373975200, # local_end 2009-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63373971600, # utc_start 2009-03-29 01:00:00 (Sun)
63392115600, # utc_end 2009-10-25 01:00:00 (Sun)
63373978800, # local_start 2009-03-29 03:00:00 (Sun)
63392122800, # local_end 2009-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63392115600, # utc_start 2009-10-25 01:00:00 (Sun)
63405421200, # utc_end 2010-03-28 01:00:00 (Sun)
63392119200, # local_start 2009-10-25 02:00:00 (Sun)
63405424800, # local_end 2010-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63405421200, # utc_start 2010-03-28 01:00:00 (Sun)
63424170000, # utc_end 2010-10-31 01:00:00 (Sun)
63405428400, # local_start 2010-03-28 03:00:00 (Sun)
63424177200, # local_end 2010-10-31 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63424170000, # utc_start 2010-10-31 01:00:00 (Sun)
63436870800, # utc_end 2011-03-27 01:00:00 (Sun)
63424173600, # local_start 2010-10-31 02:00:00 (Sun)
63436874400, # local_end 2011-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63436870800, # utc_start 2011-03-27 01:00:00 (Sun)
63455619600, # utc_end 2011-10-30 01:00:00 (Sun)
63436878000, # local_start 2011-03-27 03:00:00 (Sun)
63455626800, # local_end 2011-10-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63455619600, # utc_start 2011-10-30 01:00:00 (Sun)
63468320400, # utc_end 2012-03-25 01:00:00 (Sun)
63455623200, # local_start 2011-10-30 02:00:00 (Sun)
63468324000, # local_end 2012-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63468320400, # utc_start 2012-03-25 01:00:00 (Sun)
63487069200, # utc_end 2012-10-28 01:00:00 (Sun)
63468327600, # local_start 2012-03-25 03:00:00 (Sun)
63487076400, # local_end 2012-10-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63487069200, # utc_start 2012-10-28 01:00:00 (Sun)
63500374800, # utc_end 2013-03-31 01:00:00 (Sun)
63487072800, # local_start 2012-10-28 02:00:00 (Sun)
63500378400, # local_end 2013-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63500374800, # utc_start 2013-03-31 01:00:00 (Sun)
63518518800, # utc_end 2013-10-27 01:00:00 (Sun)
63500382000, # local_start 2013-03-31 03:00:00 (Sun)
63518526000, # local_end 2013-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63518518800, # utc_start 2013-10-27 01:00:00 (Sun)
63531824400, # utc_end 2014-03-30 01:00:00 (Sun)
63518522400, # local_start 2013-10-27 02:00:00 (Sun)
63531828000, # local_end 2014-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63531824400, # utc_start 2014-03-30 01:00:00 (Sun)
63549968400, # utc_end 2014-10-26 01:00:00 (Sun)
63531831600, # local_start 2014-03-30 03:00:00 (Sun)
63549975600, # local_end 2014-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63549968400, # utc_start 2014-10-26 01:00:00 (Sun)
63563274000, # utc_end 2015-03-29 01:00:00 (Sun)
63549972000, # local_start 2014-10-26 02:00:00 (Sun)
63563277600, # local_end 2015-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63563274000, # utc_start 2015-03-29 01:00:00 (Sun)
63581418000, # utc_end 2015-10-25 01:00:00 (Sun)
63563281200, # local_start 2015-03-29 03:00:00 (Sun)
63581425200, # local_end 2015-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63581418000, # utc_start 2015-10-25 01:00:00 (Sun)
63594723600, # utc_end 2016-03-27 01:00:00 (Sun)
63581421600, # local_start 2015-10-25 02:00:00 (Sun)
63594727200, # local_end 2016-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63594723600, # utc_start 2016-03-27 01:00:00 (Sun)
63613472400, # utc_end 2016-10-30 01:00:00 (Sun)
63594730800, # local_start 2016-03-27 03:00:00 (Sun)
63613479600, # local_end 2016-10-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63613472400, # utc_start 2016-10-30 01:00:00 (Sun)
63626173200, # utc_end 2017-03-26 01:00:00 (Sun)
63613476000, # local_start 2016-10-30 02:00:00 (Sun)
63626176800, # local_end 2017-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63626173200, # utc_start 2017-03-26 01:00:00 (Sun)
63644922000, # utc_end 2017-10-29 01:00:00 (Sun)
63626180400, # local_start 2017-03-26 03:00:00 (Sun)
63644929200, # local_end 2017-10-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63644922000, # utc_start 2017-10-29 01:00:00 (Sun)
63657622800, # utc_end 2018-03-25 01:00:00 (Sun)
63644925600, # local_start 2017-10-29 02:00:00 (Sun)
63657626400, # local_end 2018-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63657622800, # utc_start 2018-03-25 01:00:00 (Sun)
63676371600, # utc_end 2018-10-28 01:00:00 (Sun)
63657630000, # local_start 2018-03-25 03:00:00 (Sun)
63676378800, # local_end 2018-10-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63676371600, # utc_start 2018-10-28 01:00:00 (Sun)
63689677200, # utc_end 2019-03-31 01:00:00 (Sun)
63676375200, # local_start 2018-10-28 02:00:00 (Sun)
63689680800, # local_end 2019-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63689677200, # utc_start 2019-03-31 01:00:00 (Sun)
63707821200, # utc_end 2019-10-27 01:00:00 (Sun)
63689684400, # local_start 2019-03-31 03:00:00 (Sun)
63707828400, # local_end 2019-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63707821200, # utc_start 2019-10-27 01:00:00 (Sun)
63721126800, # utc_end 2020-03-29 01:00:00 (Sun)
63707824800, # local_start 2019-10-27 02:00:00 (Sun)
63721130400, # local_end 2020-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63721126800, # utc_start 2020-03-29 01:00:00 (Sun)
63739270800, # utc_end 2020-10-25 01:00:00 (Sun)
63721134000, # local_start 2020-03-29 03:00:00 (Sun)
63739278000, # local_end 2020-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63739270800, # utc_start 2020-10-25 01:00:00 (Sun)
63752576400, # utc_end 2021-03-28 01:00:00 (Sun)
63739274400, # local_start 2020-10-25 02:00:00 (Sun)
63752580000, # local_end 2021-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63752576400, # utc_start 2021-03-28 01:00:00 (Sun)
63771325200, # utc_end 2021-10-31 01:00:00 (Sun)
63752583600, # local_start 2021-03-28 03:00:00 (Sun)
63771332400, # local_end 2021-10-31 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63771325200, # utc_start 2021-10-31 01:00:00 (Sun)
63784026000, # utc_end 2022-03-27 01:00:00 (Sun)
63771328800, # local_start 2021-10-31 02:00:00 (Sun)
63784029600, # local_end 2022-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63784026000, # utc_start 2022-03-27 01:00:00 (Sun)
63802774800, # utc_end 2022-10-30 01:00:00 (Sun)
63784033200, # local_start 2022-03-27 03:00:00 (Sun)
63802782000, # local_end 2022-10-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63802774800, # utc_start 2022-10-30 01:00:00 (Sun)
63815475600, # utc_end 2023-03-26 01:00:00 (Sun)
63802778400, # local_start 2022-10-30 02:00:00 (Sun)
63815479200, # local_end 2023-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63815475600, # utc_start 2023-03-26 01:00:00 (Sun)
63834224400, # utc_end 2023-10-29 01:00:00 (Sun)
63815482800, # local_start 2023-03-26 03:00:00 (Sun)
63834231600, # local_end 2023-10-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63834224400, # utc_start 2023-10-29 01:00:00 (Sun)
63847530000, # utc_end 2024-03-31 01:00:00 (Sun)
63834228000, # local_start 2023-10-29 02:00:00 (Sun)
63847533600, # local_end 2024-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63847530000, # utc_start 2024-03-31 01:00:00 (Sun)
63865674000, # utc_end 2024-10-27 01:00:00 (Sun)
63847537200, # local_start 2024-03-31 03:00:00 (Sun)
63865681200, # local_end 2024-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63865674000, # utc_start 2024-10-27 01:00:00 (Sun)
63878979600, # utc_end 2025-03-30 01:00:00 (Sun)
63865677600, # local_start 2024-10-27 02:00:00 (Sun)
63878983200, # local_end 2025-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63878979600, # utc_start 2025-03-30 01:00:00 (Sun)
63897123600, # utc_end 2025-10-26 01:00:00 (Sun)
63878986800, # local_start 2025-03-30 03:00:00 (Sun)
63897130800, # local_end 2025-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63897123600, # utc_start 2025-10-26 01:00:00 (Sun)
63910429200, # utc_end 2026-03-29 01:00:00 (Sun)
63897127200, # local_start 2025-10-26 02:00:00 (Sun)
63910432800, # local_end 2026-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63910429200, # utc_start 2026-03-29 01:00:00 (Sun)
63928573200, # utc_end 2026-10-25 01:00:00 (Sun)
63910436400, # local_start 2026-03-29 03:00:00 (Sun)
63928580400, # local_end 2026-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63928573200, # utc_start 2026-10-25 01:00:00 (Sun)
63941878800, # utc_end 2027-03-28 01:00:00 (Sun)
63928576800, # local_start 2026-10-25 02:00:00 (Sun)
63941882400, # local_end 2027-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63941878800, # utc_start 2027-03-28 01:00:00 (Sun)
63960627600, # utc_end 2027-10-31 01:00:00 (Sun)
63941886000, # local_start 2027-03-28 03:00:00 (Sun)
63960634800, # local_end 2027-10-31 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63960627600, # utc_start 2027-10-31 01:00:00 (Sun)
63973328400, # utc_end 2028-03-26 01:00:00 (Sun)
63960631200, # local_start 2027-10-31 02:00:00 (Sun)
63973332000, # local_end 2028-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63973328400, # utc_start 2028-03-26 01:00:00 (Sun)
63992077200, # utc_end 2028-10-29 01:00:00 (Sun)
63973335600, # local_start 2028-03-26 03:00:00 (Sun)
63992084400, # local_end 2028-10-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63992077200, # utc_start 2028-10-29 01:00:00 (Sun)
64004778000, # utc_end 2029-03-25 01:00:00 (Sun)
63992080800, # local_start 2028-10-29 02:00:00 (Sun)
64004781600, # local_end 2029-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
64004778000, # utc_start 2029-03-25 01:00:00 (Sun)
64023526800, # utc_end 2029-10-28 01:00:00 (Sun)
64004785200, # local_start 2029-03-25 03:00:00 (Sun)
64023534000, # local_end 2029-10-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
64023526800, # utc_start 2029-10-28 01:00:00 (Sun)
64036832400, # utc_end 2030-03-31 01:00:00 (Sun)
64023530400, # local_start 2029-10-28 02:00:00 (Sun)
64036836000, # local_end 2030-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
64036832400, # utc_start 2030-03-31 01:00:00 (Sun)
64054976400, # utc_end 2030-10-27 01:00:00 (Sun)
64036839600, # local_start 2030-03-31 03:00:00 (Sun)
64054983600, # local_end 2030-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
];
sub olson_version {'2019b'}
sub has_dst_changes {85}
sub _max_year {2029}
sub _new_instance {
return shift->_init( @_, spans => $spans );
}
sub _last_offset { 3600 }
my $last_observance = bless( {
'format' => 'CE%sT',
'gmtoff' => '1:00',
'local_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 721720,
'local_rd_secs' => 0,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 721720,
'utc_rd_secs' => 0,
'utc_year' => 1978
}, 'DateTime' ),
'offset_from_std' => 0,
'offset_from_utc' => 3600,
'until' => [],
'utc_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 721719,
'local_rd_secs' => 82800,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 721719,
'utc_rd_secs' => 82800,
'utc_year' => 1977
}, 'DateTime' )
}, 'DateTime::TimeZone::OlsonDB::Observance' )
;
sub _last_observance { $last_observance }
my $rules = [
bless( {
'at' => '1:00u',
'from' => '1996',
'in' => 'Oct',
'letter' => '',
'name' => 'EU',
'offset_from_std' => 0,
'on' => 'lastSun',
'save' => '0',
'to' => 'max'
}, 'DateTime::TimeZone::OlsonDB::Rule' ),
bless( {
'at' => '1:00u',
'from' => '1981',
'in' => 'Mar',
'letter' => 'S',
'name' => 'EU',
'offset_from_std' => 3600,
'on' => 'lastSun',
'save' => '1:00',
'to' => 'max'
}, 'DateTime::TimeZone::OlsonDB::Rule' )
]
;
sub _rules { $rules }
1;
| 26.861862 | 90 | 0.633963 |
73e8eaa8b04046a4198a32c2d7faebc69249e8e3 | 1,221 | pm | Perl | lib/Net/Async/Spotify/Object/Generated/SavedAlbum.pm | vnealv/Net-Async-Spotify | 8d14a0cc592ee1b4a18b6541387f1d6c74c86a8d | [
"Artistic-1.0"
]
| 2 | 2021-06-08T02:43:56.000Z | 2021-07-23T06:22:00.000Z | lib/Net/Async/Spotify/Object/Generated/SavedAlbum.pm | vnealv/Net-Async-Spotify | 8d14a0cc592ee1b4a18b6541387f1d6c74c86a8d | [
"Artistic-1.0"
]
| 1 | 2021-06-08T02:46:57.000Z | 2021-06-27T16:09:04.000Z | lib/Net/Async/Spotify/Object/Generated/SavedAlbum.pm | vnealv/Net-Async-Spotify | 8d14a0cc592ee1b4a18b6541387f1d6c74c86a8d | [
"Artistic-1.0"
]
| null | null | null | package Net::Async::Spotify::Object::Generated::SavedAlbum;
use strict;
use warnings;
# VERSION
# AUTHORITY
use mro;
use parent qw(Net::Async::Spotify::Object::Base);
=encoding utf8
=head1 NAME
Net::Async::Spotify::Object::Generated::SavedAlbum - Package representing Spotify SavedAlbum Object
=head1 DESCRIPTION
Autogenerated module.
Based on https://developer.spotify.com/documentation/web-api/reference/#objects-index
Check C<crawl-api-doc.pl> for more information.
=head1 PARAMETERS
Those are Spotify SavedAlbum Object attributes:
=over 4
=item added_at
Type:Timestamp
Description:The date and time the album was saved
Timestamps are returned in ISO 8601 format as Coordinated Universal Time (UTC) with a zero offset: YYYY-MM-DDTHH:MM:SSZ.
If the time is imprecise (for example, the date/time of an album release), an additional field indicates the precision; see for example, release_date in an album object.
=item album
Type:AlbumObject
Description:Information about the album.
=back
=cut
sub new {
my ($class, %args) = @_;
my $fields = {
added_at => 'Timestamp',
album => 'AlbumObject',
};
my $obj = next::method($class, $fields, %args);
return $obj;
}
1;
| 20.35 | 169 | 0.732187 |
73fb25f76e9b72e3ac47ce49dadab95e60b3c37d | 2,273 | t | Perl | test/blackbox-tests/test-cases/jsoo/simple.t/run.t | marsam/dune | 8a3d7f2f2015b71384caa07226d1a89dba9d6c25 | [
"MIT"
]
| 1 | 2020-09-18T13:10:03.000Z | 2020-09-18T13:10:03.000Z | test/blackbox-tests/test-cases/jsoo/simple.t/run.t | marsam/dune | 8a3d7f2f2015b71384caa07226d1a89dba9d6c25 | [
"MIT"
]
| 1 | 2020-11-12T13:27:57.000Z | 2020-11-12T13:27:57.000Z | test/blackbox-tests/test-cases/jsoo/simple.t/run.t | marsam/dune | 8a3d7f2f2015b71384caa07226d1a89dba9d6c25 | [
"MIT"
]
| null | null | null | Compilation using jsoo
$ dune build --display short bin/technologic.bc.js @install 2>&1 | \
> sed s,^\ *$(ocamlc -config-var c_compiler),\ \ C_COMPILER,g
C_COMPILER lib/stubs.o
ocamlopt .ppx/7b799aed44581cc79b02033532c5f775/ppx.exe
ocamlc lib/.x.objs/byte/x__.{cmi,cmo,cmt}
js_of_ocaml .js/stdlib/std_exit.cmo.js
js_of_ocaml bin/technologic.bc.runtime.js
ocamlmklib lib/dllx_stubs.so,lib/libx_stubs.a
ppx lib/x.pp.ml
ppx lib/y.pp.ml
ppx bin/technologic.pp.ml
ppx bin/z.pp.ml
ocamlopt lib/.x.objs/native/x__.{cmx,o}
ocamldep lib/.x.objs/x.pp.ml.d
ocamldep lib/.x.objs/y.pp.ml.d
ocamldep bin/.technologic.eobjs/technologic.pp.ml.d
ocamldep bin/.technologic.eobjs/z.pp.ml.d
ocamlc lib/.x.objs/byte/x__Y.{cmi,cmo,cmt}
js_of_ocaml .js/js_of_ocaml/js_of_ocaml.cma.js
js_of_ocaml .js/stdlib/stdlib.cma.js
ocamlopt lib/.x.objs/native/x__Y.{cmx,o}
ocamlc lib/.x.objs/byte/x.{cmi,cmo,cmt}
ocamlopt lib/.x.objs/native/x.{cmx,o}
ocamlc bin/.technologic.eobjs/byte/z.{cmi,cmo,cmt}
ocamlc lib/x.cma
ocamlopt lib/x.{a,cmxa}
js_of_ocaml bin/.technologic.eobjs/byte/z.cmo.js
ocamlc bin/.technologic.eobjs/byte/technologic.{cmi,cmo,cmt}
js_of_ocaml lib/.x.objs/x.cma.js
ocamlopt lib/x.cmxs
js_of_ocaml bin/.technologic.eobjs/byte/technologic.cmo.js
js_of_ocaml bin/technologic.bc.js
$ node ./_build/default/bin/technologic.bc.js
buy it
use it
break it
fix it
$ dune build --display short bin/technologic.bc.js @install --profile release
ocamlc lib/.x.objs/byte/x__.{cmi,cmo,cmt}
ocamlc lib/.x.objs/byte/x__Y.{cmi,cmo,cmt}
ocamlopt lib/.x.objs/native/x__.{cmx,o}
ocamlc lib/.x.objs/byte/x.{cmi,cmo,cmt}
ocamlopt lib/.x.objs/native/x__Y.{cmx,o}
ocamlc lib/x.cma
ocamlopt lib/.x.objs/native/x.{cmx,o}
ocamlc bin/.technologic.eobjs/byte/z.{cmi,cmo,cmt}
ocamlopt lib/x.{a,cmxa}
ocamlc bin/.technologic.eobjs/byte/technologic.{cmi,cmo,cmt}
ocamlopt lib/x.cmxs
ocamlc bin/technologic.bc
js_of_ocaml bin/technologic.bc.js
$ node ./_build/default/bin/technologic.bc.js
buy it
use it
break it
fix it
| 38.525424 | 79 | 0.66608 |
73e35a1de4b7742539507a469608d5cb40b1b95f | 876 | pm | Perl | lib/DDG/Spice/Kwixer.pm | tejasmanohar/zeroclickinfo-spice | 64292051824ddf4d297281722ec5e396e2d7cfda | [
"Apache-2.0"
]
| 1 | 2020-03-03T05:38:12.000Z | 2020-03-03T05:38:12.000Z | lib/DDG/Spice/Kwixer.pm | tejasmanohar/zeroclickinfo-spice | 64292051824ddf4d297281722ec5e396e2d7cfda | [
"Apache-2.0"
]
| null | null | null | lib/DDG/Spice/Kwixer.pm | tejasmanohar/zeroclickinfo-spice | 64292051824ddf4d297281722ec5e396e2d7cfda | [
"Apache-2.0"
]
| null | null | null | package DDG::Spice::Kwixer;
use strict;
use DDG::Spice;
#metadata
primary_example_queries "movies with Keira Knightley";
secondary_example_queries "films with with Tom Cruise and Emily";
description "Advanced movie queries with Kwixer";
name "Kwixer";
code_url "https://github.com/duckduckgo/zeroclickinfo-spice/blob/master/lib/DDG/Spice/Kwixer.pm";
icon_url "https://kwixer.com/favicon.ico";
topics "entertainment", "special_interest";
category "entertainment";
attribution twitter => ['kwixerapp','Kwixer'],
web => ['https://www.kwixer.com','Kwixer'];
spice to => 'https://www.kwixer.com/api/search?filter=movie&take=40&source=ddg&lang=en&query=$1';
spice wrap_jsonp_callback => 1;
my @triggers = share("triggers.txt")->slurp;
triggers start => @triggers;
#triggers end => ('actor','actress', 'director');
handle query => sub {
return $_ if $_;
return;
};
1;
| 26.545455 | 97 | 0.728311 |
ed7e78e40d10d0bedf59be3b44f21baf168c244a | 1,622 | pm | Perl | auto-lib/Paws/CloudDirectory/BatchGetObjectInformation.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/CloudDirectory/BatchGetObjectInformation.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/CloudDirectory/BatchGetObjectInformation.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z | # Generated by default/object.tt
package Paws::CloudDirectory::BatchGetObjectInformation;
use Moose;
has ObjectReference => (is => 'ro', isa => 'Paws::CloudDirectory::ObjectReference', required => 1);
1;
### main pod documentation begin ###
=head1 NAME
Paws::CloudDirectory::BatchGetObjectInformation
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::CloudDirectory::BatchGetObjectInformation object:
$service_obj->Method(Att1 => { ObjectReference => $value, ..., ObjectReference => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::CloudDirectory::BatchGetObjectInformation object:
$result = $service_obj->Method(...);
$result->Att1->ObjectReference
=head1 DESCRIPTION
Retrieves metadata about an object inside a BatchRead operation. For
more information, see GetObjectInformation and
BatchReadRequest$Operations.
=head1 ATTRIBUTES
=head2 B<REQUIRED> ObjectReference => L<Paws::CloudDirectory::ObjectReference>
A reference to the object.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::CloudDirectory>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 26.590164 | 118 | 0.764488 |
ed75a5a129a60f2b89e779e161231684928427a3 | 11,747 | pm | Perl | modules/Bio/EnsEMBL/DBSQL/DataFileAdaptor.pm | dbolser-ebi/ensembl | d60bb4562d2c82637a7befdee5a4ebe6b9795a3d | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/DBSQL/DataFileAdaptor.pm | dbolser-ebi/ensembl | d60bb4562d2c82637a7befdee5a4ebe6b9795a3d | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/DBSQL/DataFileAdaptor.pm | dbolser-ebi/ensembl | d60bb4562d2c82637a7befdee5a4ebe6b9795a3d | [
"Apache-2.0"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package Bio::EnsEMBL::DBSQL::DataFileAdaptor;
=pod
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=head1 NAME
Bio::EnsEMBL::DBSQL::DataFileAdaptor
=head1 SYNOPSIS
my $dfa = $dba->get_DataFileAdaptor();
my $file = $dfa->fetch_by_dbID(1);
my $files = $dfa->fetch_all();
my $logic_name_files = $dfa->fetch_all_by_logic_name('bam_alignments');
=head1 DESCRIPTION
Provides a database wrapper to store the locations of files and to pull these
records back out. DataFile objects can only provide basic information but they
can return an intended external database adaptor which can be used to
parse the information. This system assumes nothing about the file just that
your parser can access it.
Files are supported over any protocol your parser supports and locations can be
made absolute, built on the fly or versioned.
=head1 METHODS
=cut
use strict;
use warnings;
use base qw/Bio::EnsEMBL::DBSQL::BaseAdaptor/;
use Bio::EnsEMBL::DataFile;
use Bio::EnsEMBL::DBSQL::BaseAdaptor;
use Bio::EnsEMBL::Utils::Exception qw/throw warning deprecate/;
use Bio::EnsEMBL::Utils::Scalar qw/:assert/;
my $GLOBAL_BASE_PATH;
=head2 global_base_path
Arg[1] : String; base path
Example : Bio::EnsEMBL::DBSQL::DataFileAdaptor->global_base_path('/base/path');
Description : Stores a global value to be used when building data file paths
Returntype : String
Exceptions : None
=cut
sub global_base_path {
my ($class, $base_path) = @_;
return $GLOBAL_BASE_PATH unless $base_path;
$GLOBAL_BASE_PATH = $base_path;
return $GLOBAL_BASE_PATH;
}
=head2 get_base_path
Arg[1] : String; (optional) base path
Example : $dfa->get_base_path();
Description : If given the path it will return that path; if not it consults
$self->global_base_path() for a value. As a last resort
it will look at the meta table for an entry keyed by
B<data_file.base_path>
Returntype : String
Exceptions : Thrown if nothing is found after consulting all three locations
=cut
sub get_base_path {
my ($self, $path) = @_;
return $path if defined $path;
my $global_base_path = $self->global_base_path();
return $global_base_path if defined $global_base_path;
my $meta_base_path = $self->db()->get_MetaContainer()->single_value_by_key('data_file.base_path', 1);
return $meta_base_path if defined $meta_base_path;
throw "No base path discovered. Either provide a path, set a global using global_base_path() or specify 'data_file.base_path' in meta";
}
=head2 DataFile_to_extension
Deprecated
Arg[1] : Bio::EnsEMBL::DataFile
Example : my $ext = $dfa->DataFile_to_extension($bam_df);
Description : Returns an expected extension for the given DataFile type
Returntype : Scalar of the expected file extension
Exceptions : Raised if the given file type is not understood
=cut
sub DataFile_to_extension {
my ($self, $df) = @_;
deprecate("Use DataFile_to_extensions() instead");
my $extensions = $self->DataFile_to_extensions($df);
return $extensions->[0];
}
=head2 DataFile_to_extensions
Arg[1] : Bio::EnsEMBL::DataFile
Example : my $exts = $dfa->DataFile_to_extensions($bam_df);
Description : Returns all expected extensions for the given DataFile type. The
first returned is the default extension
Returntype : ArrayRef
Exceptions : Raised if the given file type is not understood
=cut
sub DataFile_to_extensions {
my ($self, $df) = @_;
my $type = $df->file_type();
my $extensions = {
BAM => ['bam', 'bam.bai'],
# BIGBED => 'bb',
BIGWIG => ['bw'],
VCF => ['vcf.gz', 'vcf.gz.tbi'],
}->{$type};
throw sprintf(q{No extensions found for the type '%s'}, $type ) if ! $extensions;
return $extensions;
}
=head2 DataFile_to_adaptor
Arg[1] : Bio::EnsEMBL::DataFile
Arg[2] : (optional) base path
Example : my $bam = $dfa->DataFile_to_adaptor($bam_df);
Description : Returns an adaptor instance which will access the given DataFile
Returntype : Scalar actual return depends upon the given file type
Exceptions : Raised if the given file type is not understood
=cut
sub DataFile_to_adaptor {
my ($self, $df, $base) = @_;
my $type = $df->file_type();
my $dispatch = {
BAM => sub {
require Bio::EnsEMBL::ExternalData::BAM::BAMAdaptor;
return Bio::EnsEMBL::ExternalData::BAM::BAMAdaptor->new($df->path($base));
},
BIGBED => sub {
require Bio::EnsEMBL::ExternalData::BigFile::BigBedAdaptor;
return Bio::EnsEMBL::ExternalData::BigFile::BigBedAdaptor->new($df->path($base));
},
BIGWIG => sub {
require Bio::EnsEMBL::ExternalData::BigFile::BigWigAdaptor;
return Bio::EnsEMBL::ExternalData::BigFile::BigWigAdaptor->new($df->path($base));
},
VCF => sub {
require Bio::EnsEMBL::ExternalData::VCF::VCFAdaptor;
return Bio::EnsEMBL::ExternalData::VCF::VCFAdaptor->new($df->path($base));
},
}->{$type};
throw sprintf(q{No handler found for the type '%s'}, $type ) if ! $dispatch;
return $dispatch->();
}
=head2 fetch_all_by_logic_name
Args [1] : String $logic_name for the linked analysis
Example : my $dfs = $dfa->fetch_all_by_logic_name('bam_alignments');
Description : Returns all DataFile entries linked to the given analysis
logic name
Returntype : ArrayRef contains Bio::EnsEMBL::DataFile instances
Exceptions : Thrown if logic name does not exist
=cut
sub fetch_all_by_logic_name {
my ($self, $logic_name) = @_;
my $analysis = $self->db()->get_AnalysisAdaptor()->fetch_by_logic_name($logic_name);
throw "No analysis found for logic_name '${logic_name}'" if ! $analysis;
return $self->fetch_all_by_Analysis($analysis);
}
=head2 fetch_all_by_Analysis
Args [1] : Bio::EnsEMBL::Analysis $analysis to look up by
Example : my $dfs = $dfa->fetch_all_by_Analysis($analysis);
Description : Returns all DataFile entries linked to the given analysis
Returntype : ArrayRef contains Bio::EnsEMBL::DataFile instances
Exceptions : None
=cut
sub fetch_all_by_Analysis {
my ($self, $analysis) = @_;
assert_ref($analysis, 'Bio::EnsEMBL::Analysis', 'analysis');
$self->bind_param_generic_fetch($analysis->dbID(), SQL_INTEGER);
return $self->generic_fetch('df.analysis_id =?');
}
=head2 fetch_all_by_CoordSystem
Args [1] : Bio::EnsEMBL::CoordSystem $coord_system to look up by
Example : my $dfs = $dfa->fetch_all_by_CoordSystem($cs);
Description : Returns all DataFile entries linked to the given coordinate
system. Does B<not> support I<toplevel>
Returntype : ArrayRef contains Bio::EnsEMBL::DataFile instances
Exceptions : None
=cut
sub fetch_all_by_CoordSystem {
my ($self, $cs) = @_;
assert_ref($cs, 'Bio::EnsEMBL::CoordSystem', 'coord_system');
$self->bind_param_generic_fetch($cs->dbID(), SQL_INTEGER);
return $self->generic_fetch('df.coord_system_id =?');
}
sub fetch_by_name_and_type {
my ($self, $name, $type) = @_;
$self->bind_param_generic_fetch($name, SQL_VARCHAR);
$self->bind_param_generic_fetch($type, SQL_VARCHAR);
my $results = $self->generic_fetch('df.name =? and df.file_type =?');
return $results->[0] if @{$results};
return;
}
sub generic_fetch {
my ($self, $constraint) = @_;
$constraint ||= q{};
my $sql = <<'SQL';
select df.data_file_id, df.coord_system_id, df.analysis_id, df.name, df.version_lock, df.absolute, df.url, df.file_type
from data_file df
join coord_system cs using (coord_system_id)
where cs.species_id =?
SQL
$sql .= 'AND '.$constraint if $constraint;
my $params = $self->bind_param_generic_fetch();
if(defined $params) {
$self->{'_bind_param_generic_fetch'} = ();
}
else {
$params = [];
}
unshift(@{$params}, $self->db()->species_id());
my $csa = $self->db()->get_CoordSystemAdaptor();
my $aa = $self->db()->get_AnalysisAdaptor();
return $self->dbc()->sql_helper()->execute(-SQL => $sql, -PARAMS => $params, -CALLBACK => sub {
my ($row) = @_;
my ($data_file_id, $coord_system_id, $analysis_id, $name, $version_lock, $absolute, $url, $file_type) = @{$row};
my $hash = {
dbID => $data_file_id,
adaptor => $self,
coord_system => $csa->fetch_by_dbID($coord_system_id),
analysis => $aa->fetch_by_dbID($analysis_id),
name => $name,
version_lock => $version_lock,
absolute => $absolute,
file_type => $file_type,
};
$hash->{url} = $url if $url;
return Bio::EnsEMBL::DataFile->new_fast($hash);
});
}
sub store {
my ($self, $df) = @_;
assert_ref($df, 'Bio::EnsEMBL::DataFile', 'datafile');
if ($df->is_stored($self->db())) {
return $df->dbID();
}
throw 'Analysis is not defined for this data file' if ! defined $df->analysis();
throw 'Coord system is not defined for this data file' if ! defined $df->coord_system();
my $sql = <<'SQL';
INSERT INTO data_file (coord_system_id, analysis_id, name, version_lock, absolute, url, file_type)
VALUES (?,?,?,?,?,?,?)
SQL
my $params = [
[$df->coord_system()->dbID(), SQL_INTEGER],
[$df->analysis()->dbID(), SQL_INTEGER],
[$df->name(), SQL_VARCHAR],
[$df->version_lock(), SQL_INTEGER],
[$df->absolute(), SQL_INTEGER],
[$df->url(), SQL_VARCHAR],
[$df->file_type(), SQL_VARCHAR],
];
$self->dbc()->sql_helper()->execute_update(-SQL => $sql, -PARAMS => $params, -CALLBACK => sub {
my ( $sth, $dbh ) = @_;
$df->dbID($self->last_insert_id());
return;
});
$df->adaptor($self);
return $df->dbID();
}
sub update {
my ($self, $df) = @_;
assert_ref($df, 'Bio::EnsEMBL::DataFile', 'datafile');
if (! $df->is_stored($self->db())) {
$self->store($df);
return;
}
my $sql = <<'SQL';
UPDATE data_file SET coord_system_id =?, analysis_id=?, name=?, version_lock=?, absolute=?, url=?, file_type=?
WHERE data_file_id =?
SQL
my $params = [
[$df->coord_system()->dbID(), SQL_INTEGER],
[$df->analysis()->dbID(), SQL_INTEGER],
[$df->name(), SQL_VARCHAR],
[$df->version_lock(), SQL_INTEGER],
[$df->absolute(), SQL_INTEGER],
[$df->url(), SQL_VARCHAR],
[$df->file_type(), SQL_VARCHAR],
[$df->dbID(), SQL_INTEGER],
];
$self->dbc()->sql_helper()->execute_update(-SQL => $sql, -PARAMS => $params);
return;
}
sub delete {
my ($self, $df) = @_;
assert_ref($df, 'Bio::EnsEMBL::DataFile', 'datafile');
if (! $df->is_stored($self->db())) {
throw "Cannot delete the data file if it has not already been stored in this database";
}
$self->dbc()->sql_helper()->execute_update(
-SQL => 'DELETE from data_file where data_file_id =?',
-PARAMS => [[$df->dbID(), SQL_INTEGER]],
);
return;
}
sub _tables {
my ($self) = @_;
return (
[qw/data_file df/]
);
}
1;
| 30.751309 | 137 | 0.669022 |
ed459a9e3fb50a94575d1ce9fdca40226da3378d | 6,983 | pm | Perl | modules/EnsEMBL/Web/Factory/Gene.pm | nerdstrike/ensembl-webcode | ab69513124329e1b2d686c7d2c9f1d7689996a0b | [
"Apache-2.0",
"MIT"
]
| null | null | null | modules/EnsEMBL/Web/Factory/Gene.pm | nerdstrike/ensembl-webcode | ab69513124329e1b2d686c7d2c9f1d7689996a0b | [
"Apache-2.0",
"MIT"
]
| null | null | null | modules/EnsEMBL/Web/Factory/Gene.pm | nerdstrike/ensembl-webcode | ab69513124329e1b2d686c7d2c9f1d7689996a0b | [
"Apache-2.0",
"MIT"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2013] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::Factory::Gene;
use strict;
use warnings;
no warnings 'uninitialized';
use HTML::Entities qw(encode_entities);
use base qw(EnsEMBL::Web::Factory);
sub createObjects {
my $self = shift;
my $gene = shift;
my ($identifier, $id, $param);
my $db = $self->param('db') || 'core';
$db = 'otherfeatures' if $db eq 'est';
my $db_adaptor = $self->database($db);
return $self->problem('fatal', 'Database Error', $self->_help("Could not connect to the $db database.")) unless $db_adaptor;
# Mapping of supported URL parameters to function calls on GeneAdaptor which should get a Gene for those parameters
# Ordered by most likely parameter to appear in the URL
my @params = (
[ [qw(g gene )], [qw(fetch_by_stable_id fetch_by_transcript_stable_id fetch_by_translation_stable_id)] ],
[ [qw(t transcript )], [qw(fetch_by_transcript_stable_id fetch_by_translation_stable_id )] ],
[ [qw(p peptide protein)], [qw(fetch_by_translation_stable_id fetch_by_transcript_stable_id )] ],
[ [qw(exon )], [qw(fetch_by_exon_stable_id )] ],
[ [qw(anchor1 )], [qw(fetch_by_stable_id fetch_by_transcript_stable_id fetch_by_translation_stable_id)] ],
);
if (!$gene) {
my $adaptor = $db_adaptor->get_GeneAdaptor;
# Loop through the parameters and the function calls, trying to find a Gene
foreach my $p (@params) {
foreach (@{$p->[0]}) {
if ($id = $self->param($_)) {
(my $t = $id) =~ s/^(\S+)\.\d*/$1/g; # Strip versions
(my $t2 = $id) =~ s/^(\S+?)(\d+)(\.\d*)?/$1 . sprintf('%011d', $2)/eg; # Make sure we've got eleven digits
$param = $_;
$identifier = $id;
foreach my $fetch_call (@{$p->[1]}) {
eval { $gene = $adaptor->$fetch_call($id); };
last if $gene;
eval { $gene = $adaptor->$fetch_call($t2); };
last if $gene;
eval { $gene = $adaptor->$fetch_call($t); };
last if $gene;
}
last;
}
}
last if $gene;
}
# Check if there is a family parameter
if (!$gene && ($id = $self->param('family'))) {
my $compara_db = $self->database('compara');
if ($compara_db) {
my $fa = $compara_db->get_FamilyAdaptor;
$gene = $fa->fetch_by_stable_id($id) if $fa;
if ($gene) {
$param = 'family';
$identifier = $id;
}
}
}
$gene ||= $self->_archive($param); # Check if this is an ArchiveStableId
$gene ||= $self->_known_feature('Gene', $param, 'g'); # Last check to see if a feature can be found for the parameters supplied
}
if ($gene) {
$self->DataObjects($self->new_object('Gene', $gene, $self->__data));
$self->generate_object('Location', $gene->feature_Slice) if $gene->can('feature_Slice'); # Generate a location from the gene. Won't be called if $gene is an ArchiveStableId object
my $transcript;
if ($gene->can('get_all_Transcripts')) { # will be false for families
my @transcripts = @{$gene->get_all_Transcripts};
# Mapping of supported URL parameters to functions used to find the relevant transcript
my %get_transcript = (
t => sub { return [ grep $_->stable_id eq $_[1] || $_->external_name eq $_[1], @{$_[0]} ]->[0]; },
p => sub { return [ grep $_->translation && $_->translation->stable_id eq $_[1], @{$_[0]} ]->[0]; },
exon => sub { for (@{$_[0]}) { return $_ if grep $_->stable_id eq $_[1], @{$_->get_all_Exons}; } }
);
$get_transcript{'protein'} = $get_transcript{'peptide'} = $get_transcript{'p'};
$get_transcript{'transcript'} = $get_transcript{'t'};
# If the gene has a single transcript, or a transcript can be found based on the URL parameter (see functions in %get_transcript above),
# we need to generate a transcript object for the top tabs
$transcript = scalar @transcripts == 1 ? $transcripts[0] : exists $get_transcript{$param} ? $get_transcript{$param}(\@transcripts, $identifier) : undef;
# If we haven't got a transcript yet, loop through the @params mapping, trying to find a transcript.
# We can get to this point if $param is g or gene
if (!$transcript && !$get_transcript{$param}) {
shift @params; # $param is g or gene, so we don't care about this element in the @params array
foreach (map @{$_->[0]}, @params) {
if (exists $get_transcript{$_} && ($id = $self->param($_))) {
$transcript = $get_transcript{$_}(\@transcripts, $id);
last if $transcript;
}
}
}
my @transcript_params = grep s/^t(\d+)$/$1/, $self->param;
if (scalar @transcript_params) {
my %transcript_ids = map { $_->stable_id => 1 } @transcripts;
$self->delete_param("t$_") for grep !$transcript_ids{$self->param("t$_")}, @transcript_params;
}
}
# Generate the transcript object for the top tabs, and set the t parameter for the URL
# If there's no transcript, delete any existing t parameter, because it does not map to this gene
if ($transcript) {
$self->generate_object('Transcript', $transcript);
$self->param('t', $transcript->stable_id);
} else {
$self->delete_param('t');
}
$self->param('g', $gene->stable_id) unless $param eq 'family';
$self->delete_param('gene');
}
}
sub _help {
my ($self, $string) = @_;
my %sample = %{$self->species_defs->SAMPLE_DATA || {}};
my $help_text = $string ? sprintf '<p>%s</p>', encode_entities($string) : '';
my $url = $self->hub->url({ __clear => 1, action => 'Summary', g => $sample{'GENE_PARAM'} });
$help_text .= sprintf('
<p>
This view requires a gene, transcript or protein identifier in the URL. For example:
</p>
<blockquote class="space-below"><a href="%s">%s</a></blockquote>',
encode_entities($url),
encode_entities($self->species_defs->ENSEMBL_BASE_URL . $url)
);
return $help_text;
}
1;
| 39.451977 | 183 | 0.59115 |
ed87125da4c78168494860ce206e2984b097e2a0 | 1,993 | pm | Perl | Web/lib/MeshMage/Web/Controller/Deploy.pm | symkat/MeshMage | 95f4158f60b2c21214aaa4e92779ee5968492789 | [
"MIT",
"Unlicense"
]
| 26 | 2021-09-18T03:53:49.000Z | 2022-03-12T17:55:42.000Z | Web/lib/MeshMage/Web/Controller/Deploy.pm | silversword411/MeshMage | 95f4158f60b2c21214aaa4e92779ee5968492789 | [
"MIT",
"Unlicense"
]
| 2 | 2021-06-09T09:15:02.000Z | 2021-09-20T10:55:02.000Z | Web/lib/MeshMage/Web/Controller/Deploy.pm | silversword411/MeshMage | 95f4158f60b2c21214aaa4e92779ee5968492789 | [
"MIT",
"Unlicense"
]
| 1 | 2021-09-26T17:06:47.000Z | 2021-09-26T17:06:47.000Z | package MeshMage::Web::Controller::Deploy;
use Mojo::Base 'Mojolicious::Controller', -signatures;
sub manual ($c) {
my $node = $c->db->resultset('Node')->find( $c->param('node_id') );
my $n_id = $node->network->id;
my $host = $node->hostname;
my $ca = Mojo::File->new( $c->filepath_for( nebula => $n_id, 'ca.crt' ) )->slurp;
my $cert = Mojo::File->new( $c->filepath_for( nebula => $n_id, $host . '.crt') )->slurp;
my $key = Mojo::File->new( $c->filepath_for( nebula => $n_id, $host . '.key') )->slurp;
$c->stash(
node => $node,
ca => $ca,
cert => $cert,
key => $key,
conf => $c->templated_file( 'nebula_config.yml', node => $node ),
platforms => $c->nebula_platforms,
);
}
sub create_macos ($c) {
my $node = $c->db->resultset('Node')->find( $c->param('node_id') );
my $platform = $c->param('platform');
my $job_id = $c->minion->enqueue( create_macos_bundle => [ $node->id, $platform ],
{ notes => { $node->hostname => 1 } }
);
$c->redirect_to(
$c->url_for( 'view_node', node_id => $node->id )->query(
pending => sprintf( "%s_macos.tgz", $node->hostname )
)
);
}
# Automatic Deployment
sub automatic ($c) {
my $node = $c->db->resultset('Node')->find( $c->param('node_id') );
my @keys = $c->db->resultset('Sshkey')->all();
$c->stash(
node => $node,
sshkeys => \@keys,
platforms => $c->nebula_platforms,
);
}
sub create_automatic ($c) {
my $node = $c->db->resultset('Node')->find( $c->param('node_id') );
my $job_id = $c->minion->enqueue(
deploy_node => [
$node->id,
$c->param('sshkey_id'),
$c->param('deploy_ip'),
$c->param('platform'),
],
{ notes => { $node->hostname => 1 } }
);
$c->redirect_to( $c->url_for( 'view_node', node_id => $node->id ) );
}
1;
| 28.070423 | 92 | 0.502258 |
73f868781e448132cbdba6ffc4bf1db665623c60 | 3,959 | pm | Perl | lib/DisDB/SQL/Search.pm | MattOates/d2p2.pro | dd4fc7500344d173fa4ae2599632ab852d720bfc | [
"BSD-3-Clause"
]
| 4 | 2015-11-24T21:18:05.000Z | 2020-12-01T03:18:00.000Z | lib/DisDB/SQL/Search.pm | MattOates/d2p2.pro | dd4fc7500344d173fa4ae2599632ab852d720bfc | [
"BSD-3-Clause"
]
| null | null | null | lib/DisDB/SQL/Search.pm | MattOates/d2p2.pro | dd4fc7500344d173fa4ae2599632ab852d720bfc | [
"BSD-3-Clause"
]
| 1 | 2020-07-03T01:39:29.000Z | 2020-07-03T01:39:29.000Z | #!/usr/bin/env perl
package DisDB::SQL::Search;
use strict;
use warnings;
our $VERSION = '1.00';
use base 'Exporter';
use DisDB::SQL::Connect qw'dbConnect dbDisconnect';
our %EXPORT_TAGS = (
'all' => [ qw/
getProteinBySeq
getProteinBySeqID
getProteinConsensus
/ ],
'arch' => [ qw/
getProteinBySeq
getProteinBySeqID
/ ]
);
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
our @EXPORT = qw//;
=item getProteinBySeq - Retrieve proteins by sequence
params:
returns:
=cut
sub getProteinBySeq {
my ($sequence,$dbh) = @_;
$dbh = dbConnect('superfamily') unless defined $dbh;
my $close_dbh = (@_ > 1)?1:0;
my $proteins;
my $query = $dbh->prepare(
"SELECT protein.protein, protein.genome, protein.seqid
FROM protein, genome_sequence, disorder.genome
WHERE protein.protein = genome_sequence.protein
AND protein.genome = disorder.genome.genome
AND genome_sequence.sequence = ?
ORDER BY protein.genome"
);
if (ref $sequence eq "ARRAY") {
foreach my $seq (@$sequence) {
$query->execute($seq);
my $result = $query->fetchall_arrayref();
if (scalar @{$result} >= 1) {
$proteins //= {};
$proteins->{$seq} = $result;
}
}
}
else {
$query->execute($sequence);
my $result = $query->fetchall_arrayref;
if (scalar @{$result} >= 1) {
$proteins = $result;
}
}
dbDisconnect($dbh) if $close_dbh;
return $proteins;
}
=item getProteinBySeqID - Retrieve proteins by sequence ID without genome
params:
returns:
=cut
sub getProteinBySeqID {
my ($seqid,$dbh) = @_;
$dbh = dbConnect('superfamily') unless defined $dbh;
my $close_dbh = (@_ > 1)?1:0;
my $proteins;
my $query = $dbh->prepare("
SELECT DISTINCT(protein.protein), count(protein.genome), genome_sequence.sequence
FROM protein, genome, genome_sequence
WHERE protein.genome = genome.genome
AND protein.protein = genome_sequence.protein
AND protein.seqid=?
GROUP BY protein.protein
");
if (ref $seqid eq "ARRAY") {
foreach my $id (@$seqid) {
$query->execute($id);
my $result = $query->fetchall_arrayref();
if ( scalar @$result >= 1 ) {
$proteins //= {};
$proteins->{$id} = $result;
}
else {
$proteins->{$id} = undef;
}
}
}
else {
$query->execute($seqid);
my $result = $query->fetchall_arrayref();
if ( scalar @$result >= 1 ) {
$proteins = $result;
}
}
dbDisconnect($dbh) if $close_dbh;
return $proteins;
}
sub getProteinConsensus {
my ($protein, $cutoff, $dbh) = @_;
$dbh = dbConnect('disorder') unless defined $dbh;
my $close_dbh = (@_ > 1)?1:0;
my $consensus = [];
my $query = $dbh->prepare("
SELECT start, end
FROM dis_consensus_assignment
WHERE protein = ?
AND cutoff = ?
ORDER BY start
");
if (ref $protein eq "ARRAY") {
foreach my $pid (@$protein) {
$query->execute($pid, $cutoff);
my $result = $query->fetchall_arrayref();
if (scalar @$result >= 1) {
$consensus //= {};
$consensus->{$pid} = $result;
}
}
}
else {
$query->execute($protein, $cutoff);
my $result = $query->fetchall_arrayref();
if (scalar @$result >= 1) {
$consensus = $result;
}
}
dbDisconnect($dbh) if $close_dbh;
return $consensus;
}
1;
| 27.303448 | 89 | 0.506694 |
ed7856be2a9d2ecac51cb20da9d74ca78884bc7b | 3,497 | t | Perl | t/core/response.t | hwy1782/apisix | 79601b820f15ddb98c85237e8a7b45f3263fb5a1 | [
"Apache-2.0"
]
| 2,071 | 2019-10-29T01:52:46.000Z | 2020-07-31T15:04:10.000Z | t/core/response.t | hwy1782/apisix | 79601b820f15ddb98c85237e8a7b45f3263fb5a1 | [
"Apache-2.0"
]
| 973 | 2019-10-29T04:14:11.000Z | 2020-07-31T16:28:32.000Z | t/core/response.t | zhangbao0325/apisix | b4fd8ad943e9b30f3641a823fe53ad9a836986ca | [
"Apache-2.0"
]
| 465 | 2019-10-29T02:01:15.000Z | 2020-07-31T08:05:54.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
use t::APISIX 'no_plan';
repeat_each(1);
no_long_string();
no_root_location();
log_level("info");
run_tests;
__DATA__
=== TEST 1: exit with string
--- config
location = /t {
access_by_lua_block {
local core = require("apisix.core")
core.response.exit(201, "done\n")
}
}
--- request
GET /t
--- error_code: 201
--- response_body
done
--- no_error_log
[error]
=== TEST 2: exit with table
--- config
location = /t {
access_by_lua_block {
local core = require("apisix.core")
core.response.exit(201, {a = "a"})
}
}
--- request
GET /t
--- error_code: 201
--- response_body
{"a":"a"}
--- no_error_log
[error]
=== TEST 3: multiple response headers
--- config
location = /t {
access_by_lua_block {
local core = require("apisix.core")
core.response.set_header("aaa", "bbb", "ccc", "ddd")
core.response.exit(200, "done\n")
}
}
--- request
GET /t
--- response_body
done
--- response_headers
aaa: bbb
ccc: ddd
--- no_error_log
[error]
=== TEST 4: multiple response headers by table
--- config
location = /t {
access_by_lua_block {
local core = require("apisix.core")
core.response.set_header({aaa = "bbb", ccc = "ddd"})
core.response.exit(200, "done\n")
}
}
--- request
GET /t
--- response_body
done
--- response_headers
aaa: bbb
ccc: ddd
--- no_error_log
[error]
=== TEST 5: multiple response headers (add)
--- config
location = /t {
access_by_lua_block {
local core = require("apisix.core")
core.response.add_header("aaa", "bbb", "aaa", "bbb")
core.response.exit(200, "done\n")
}
}
--- request
GET /t
--- response_body
done
--- response_headers
aaa: bbb, bbb
--- no_error_log
[error]
=== TEST 6: multiple response headers by table (add)
--- config
location = /t {
access_by_lua_block {
local core = require("apisix.core")
core.response.set_header({aaa = "bbb"})
core.response.add_header({aaa = "bbb", ccc = "ddd"})
core.response.exit(200, "done\n")
}
}
--- request
GET /t
--- response_body
done
--- response_headers
aaa: bbb, bbb
ccc: ddd
--- no_error_log
[error]
=== TEST 7: delete header
--- config
location = /t {
access_by_lua_block {
local core = require("apisix.core")
core.response.set_header("aaa", "bbb")
core.response.set_header("aaa", nil)
core.response.exit(200, "done\n")
}
}
--- request
GET /t
--- response_body
done
--- response_headers
aaa:
--- no_error_log
[error]
| 21.066265 | 74 | 0.616814 |
ed65a25e5fe1839556ff06346ee52fbf9d29746c | 18,634 | pl | Perl | testresults/scons230_trace/batchbuild/reference/l/genscons.pl | SCons/scons-performance | 2df4558a1132b62a36f20c1c0b37da8fafa00114 | [
"MIT"
]
| null | null | null | testresults/scons230_trace/batchbuild/reference/l/genscons.pl | SCons/scons-performance | 2df4558a1132b62a36f20c1c0b37da8fafa00114 | [
"MIT"
]
| 1 | 2020-09-24T16:09:23.000Z | 2020-09-27T17:30:13.000Z | testresults/scons230_trace/batchbuild/reference/l/genscons.pl | SCons/scons-performance | 2df4558a1132b62a36f20c1c0b37da8fafa00114 | [
"MIT"
]
| 2 | 2020-09-27T21:18:11.000Z | 2022-03-23T17:32:03.000Z | #!/usr/bin/perl
#
# genscons.pl
#
# This script generates a build tree with $ndirs + 1 directories, containing
# $nfils source files each, and both SConstruct files and non-recursive
# Makefiles to build the tree.
#
# Copyright (c) 2010 Electric Cloud, Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Electric Cloud nor the names of its employees may
# be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# example: /genscons.pl -d N -l 2 -f 500 (with N from 3-99)
# These settings will generate 2,000 total C files, grouped in blocks of 20,
# each of which does a couple of #includes.
$nlvls = 2; $ndirs = 144; $nfils = 500; $nlups = 2; $group = 20;
$rmdel = "rm -f";
$OBJ = ".o";
# Check Variables settings
if ( ($nfils % $group) != 0) {
die "ERROR: The number of files ($nfils) must be a multiple of the group size ($group)";
}
sub init() {
use Getopt::Std;
my $opt_string = 'd:f:g:l:u:h';
getopts ( "$opt_string", \%opt) or usage ();
&usage() if $opt{h};
$ndirs = $opt{d} if $opt{d};
$nfils = $opt{f} if $opt{f};
$group = $opt{g} if $opt{g};
$nlvls = $opt{l} if $opt{l};
$nlups = $opt{u} if $opt{u};
return 0;
}
sub usage () {
print STDERR << "EOF";
usage: $0 [-l Levels] [-d Dirs] [-f Files] [-g Grouping] [-u Lookups]
[-h]
-l Levels : number of levels of directories (default $nlvls)
-d Dirs : number of directories at each level (default $ndirs)
-f Files : number of source files per directory (default $nfils)
-g Grouping : compile in groups of Grouping files (default $group)
-u Lookups : number of lookups per source file (default $nlups)
-h : this help message
You can edit the default values in genscons.pl
EOF
exit;
}
# fmt
#
# Adds commas to long numbers to make them more readable.
sub fmt {
my ($value) = @_;
my $running = 1;
while ($running) {
$value =~ s/([0-9])([0-9]{3})(?![0-9])/\1,\2/g;
$running = ($1 =~ /[0-9]/);
}
return $value;
}
# gen_incfile
#
# Generate a generic include file to keep the compiler busy.
sub gen_incfile {
my ($basedir, $filename, $idx) = @_;
open (INC, "> $filename") || die "Cannot open $filename for output.";
print INC "#ifndef $filname[$idx]\n"
. "#define $filname[$idx] \"$basedir\"\n\n"
. "#include \"stdio.h\"\n\n";
print INC "#endif\n";
close (INC);
}
# gen_cfile
#
# Generate a distinct C file to keep the compiler busy.
sub gen_cfile {
my ($basedir, $filename, $idx) = @_;
open (CFILE, "> $basedir/$filename")
|| die "Cannot open $basedir/$filename for output";
$buff = "#include <$filname[$idx].h>\n";
$buff .= "#include <omega.h>\n";
print CFILE $buff;
if ($group == 1) {
print CFILE "main (int argc, char * argv[]) {\n"
. "\tint i, mb_out;\n"
. "\tprintf (\"I am $basedir/%s\\n\", \"$filname[$idx]\""
. ");\n"
. "\treturn (0);\n}\n";
}
elsif ( ($group - ($fil[$idx] % $group)) == 1) {
print CFILE "printr_$filname[$idx] (char * fname) {\n"
. " printf (\"I am $basedir/%s\\n\", fname);\n"
. " return (0);\n}\n";
}
elsif ( ($fil[$idx] % $group) == 0) {
$idx2 = $fil[$idx] + 1;
print CFILE "extern int printr_$file[$idx2] (char * fname);\n"
. "main (int argc, char * argv[]) {\n"
. "\tint i, mb_out;\n";
print CFILE "\tprintr_$file[$idx2] (\"$filname[$idx]\");\n"
. "\n"
. "\tmb_out = 0;\n"
. "\tif (argc > 1) {\n"
. "\t\tmb_out = atoi (argv[1]);\n"
. "\t}\n"
. "\tfor (i = 0; i < (mb_out * 16000); i++) {\n"
. "\t\tprintf (\"%07d 9a123456789b123456789c12345"
. "6789d123456789e123456789f12345678\\n\", i);\n"
. "\t}\n"
. "\texit (0);\n}\n";
}
else {
$idx2 = $fil[$idx] + 1;
print CFILE "extern int printr_$file[$idx2] (char * fname);\n"
. "printr_$filname[$idx] (char * fname) {\n"
. " printr_$file[$idx2] (fname);\n"
. " return (0);\n}\n";
}
close (CFILE);
}
# mkdirs
#
# Recursive function for generating directories full of files to build, and
# the makefiles that go with them.
#
sub mkdirs {
my ($idx, $basedir, $thisLvl) = @_;
if ( ! -d $basedir ) {
mkdir $basedir || die "Cannot create folder $basedir: $!";
}
$relpath[$idx] = substr ($basedir, 8); # assumed top dir is "sconsbld"
if ("$relpath[$idx]" eq "") {
$relpath[$idx] = ".";
$basestr = $basedir;
$foo = "";
$libdir = ".";
}
else {
$basestr = $relpath[$idx];
$basestr =~ s|/|_|g;
$foo = substr($basedir,9) . "/";
$libdir = substr($basedir,9);
}
$bstr[$idx] = $basestr;
$dirstr = $basedir;
$dirstr =~ s|/|_|g;
# $basedir is $relpath[$idx] with "sconsbld/" prepended so
# $dirstr is $basestr with "sconsbld_" prepended.
$cdstr = ".";
for ($cdidx = 1; $cdidx < $thisLvl; $cdidx++) { $cdstr .= "/.."; }
$thissc[$idx] = "$basedir/SConstruct";
$thismk[$idx] = "$basedir/Makefile";
$fzero[$idx] = "$basedir/file0";
open (SC, "> $thissc[$idx]")
|| die "Cannot open $thissc[$idx] for output: $!";
open (MK, "> $thismk[$idx]")
|| die "Cannot open $thismk[$idx] for output: $!";
print SC "import os\n"
. "env = Environment(ENV = {'PATH' : os.environ['PATH']}, tools=['default'])\n\n";
my $cfgpath = "";
for (my $junk = 1; $junk < $thisLvl; $junk++) { $cfgpath .= "../"; }
my $arkive = "archive";
if ($thisLvl == 1) { $mysubdir = "."; }
else { $mysubdir = substr ($basedir, 8); }
if (index ($basedir, "/") > 0) {
@pieces = split (/\//, $basedir);
$fileSuffix = "";
for (my $ii =0; $ii <= $#pieces; $ii++) {
$fileSuffix .= "_" . $pieces[$ii];
}
} else {
$fileSuffix = "_" . $basedir;
}
for (my $lupsIdx = 0; $lupsIdx < $nlups; $lupsIdx++) {
printf SC ("env.Append (CPPPATH = ['./lup%03d$fileSuffix'])\n",
$lupsIdx);
if ($lupsIdx == 0) {
$eq = "=";
} else {
$eq = "+=";
}
printf MK ("${foo}%%.o: CPPFLAGS $eq -I${foo}lup%03d$fileSuffix\n",
$lupsIdx);
}
print SC "env.Append (LIBPATH = ['.'])\n\n";
print MK "${foo}%: LDFLAGS = -L$libdir\n\n";
if ($thisLvl == 1) {
print SC "\n"
. "env.Help(\"\"\"\n"
. "This build has parameters:\n"
. " number of levels = $nlvls\n"
. " directories/level = $ndirs\n"
. " cfiles/directory = $nfils\n"
. " lookups/source = $nlups\n"
. " compiles grouped = $group\n"
. "\"\"\")\n";
print MK "${foo}%.a:\n";
print MK "\tar rc \$@ \$^\n";
print MK "\tranlib \$@\n\n";
print MK "%.o: %.c\n";
print MK "\t\$(CC) -MMD -o \$@ -c \$(CPPFLAGS) \$<\n\n";
print MK "%: %.o\n";
print MK "\t\$(CC) -o \$@ \$< \$(LDFLAGS) -l\$(notdir \$@)\n\n";
print MK "CC=gcc\n\n";
print MK "all:\n\t\@ps -eo vsz,rss,comm | fgrep make\n\n";
}
print SC "\n";
print MK "\n";
# Create include directories for doing additional lookups.
for (my $ii = 0; $ii < $nlups; $ii++) {
$lupDir = sprintf ("lup%03d$fileSuffix", $ii);
mkdir "$basedir/$lupDir" || die "Couldn't create $basedir/$lupDir: $!";
$totald++;
}
$scfcc = "";
$scfar = "";
$mkfcc = "";
$mkfar = "";
###
### generate the .c files and the .h files they include.
### Also generate the corresponding Makefile commands.
###
for (my $filidx = 0; $filidx < $nfils; $filidx++) {
$file[$filidx] = sprintf ("f%05d$fileSuffix", $filidx);
}
for ($fil[$idx] = 0; $fil[$idx] < $nfils; $fil[$idx]++) {
$filname[$idx] = "$file[$fil[$idx]]";
$nextnum = substr ($filname[$idx], 1, 5);
if ($group == 1) {
#
# Even when there are no groups, pre-compiled headers
# still apply.
#
print SC "env.Program ('$filname[$idx].c')\n\n";
} # end of $group == 1
#
# Compile source files in groups.
# This removes unique lookups but adds some :: rules.
#
else {
if ( ($fil[$idx] % $group) == 0) {
if ("$scfcc" ne "") {
print SC "$scfcc\n$scfar\n\n";
$scfcc = "";
$scfar = "";
}
if ("$mkfcc" ne "") {
print MK "$mkfcc\n$mkfar\n\n";
$mkfcc = "";
$mkfar = "";
}
$groupFilename = "$filname[$idx]";
$nextnum = substr ($filname[$idx], 1, 5);
$scfcc = "env.Program('$filname[$idx]',\n"
. "\tLIBS=['$filname[$idx]'])\n";
$scfar = "env.Library ('$filname[$idx]',\n"
. "\t['$filname[$idx].c'";
$mkfcc = "TARGETS += ${foo}$filname[$idx]\n${foo}$filname[$idx]: ${foo}$filname[$idx].o ${foo}lib$filname[$idx].a\n";
$mkfar = "${foo}lib$filname[$idx].a: ${foo}$filname[$idx].o";
print MK "SRCS += ${foo}$filname[$idx].c\n";
$tmpfnam = $filname[$idx];
for ($filei = 1; $filei < $group; $filei++) {
$tmpfnum = sprintf ("%05d", $nextnum + $filei);
substr ($tmpfnam, 1, 5) = $tmpfnum;
$scfar .= ",\n\t '$tmpfnam.c'";
$mkfar .= " ${foo}$tmpfnam.o";
print MK "SRCS += ${foo}$tmpfnam.c\n";
}
$scfar .= "])\n\n";
$mkfar .= "\n";
}
} # end of handling of compiles for $group > 1
gen_incfile($basedir, "$basedir/$lupDir/$filname[$idx].h", $idx);
if ($fil[$idx] == 0) {
open (INC, "> $basedir/$lupDir/omega.h")
|| die "Cannot open $basedir/$lupDir/omega.h for output.";
print INC "// comment in dummy file.\n";
close (INC);
}
gen_cfile($basedir, "$filname[$idx].c", $idx);
} # end of generation of source files and header files
if ($group > 1 && "$scfcc" ne "") {
#
# create makefile commands for the leftover files
#
print SC "$scfcc\n$scfar\n\n";
print MK "$mkfcc\n$mkfar\n\n";
}
close (SC);
close (MK);
# Recurse and create more subdirectories and their contents.
if ($thisLvl < $nlvls) {
$allsubs[$idx] = "";
for ($dir[$idx] = 0; $dir[$idx] < $ndirs; $dir[$idx]++) {
$dirname[$idx] = sprintf ("d${thisLvl}_%d", $dir[$idx]);
$allsubs[$idx] .= "$dirname[$idx].subdir ";
#
# divide the subdirectories into 2 lists
# The two lists are/can be treated differently in Makefile.util
#
if ($dir[$idx] < ($ndirs / 2)) {
if ("$dirs1[$idx]" eq "") { $dirs1[$idx] = "$dirname[$idx]"; }
elsif (index ($dirs1[$idx], $dirname[$idx]) < 0) {
$dirs1[$idx] .= " $dirname[$idx]";
}
}
else {
if ("$dirs2[$idx]" eq "") { $dirs2[$idx] = "$dirname[$idx]"; }
elsif (index ($dirs2[$idx], $dirname[$idx]) < 0) {
$dirs2[$idx] .= " $dirname[$idx]";
#
# The preceding elsif should really just be
# "else" but when nlvls > 2, you start getting repetition
# of directory names in $dirs1[$idx] and $dirs2[$idx].
# Rather than figure out where the duplication is coming
# from, just prevent it.
#
}
}
if ( ! -d "$basedir/$dirname[$idx]") {
mkdir "$basedir/$dirname[$idx]" ||
die "Couldn't create $basedir/$dirname[$idx]: $!";
$totald++;
}
&mkdirs ($idx + 1, "$basedir/$dirname[$idx]", $thisLvl + 1);
if ($thisLvl == 1) {
print "Finished folder $dirname[$idx] in $basedir at "
. `date`;
}
}
}
if ($thisLvl < $nlvls) {
open (SC, ">> $thissc[$idx]")
|| die "Cannot open $thissc[$idx] for append: $!";
open (MK, ">> $thismk[$idx]")
|| die "Cannot open $thismk[$idx] for append: $!";
print SC "SConscript([";
if (index ($dirs1[$idx], " ") > 0) {
@subdirs = split (/ /, $dirs1[$idx]);
for ($i = 0; $i <= $#subdirs; $i++) {
print SC "'$subdirs[$i]/SConstruct',\n\t";
print MK "include $subdirs[$i]/Makefile\n";
}
}
else {
print SC "'$dirs1[$idx]/SConstruct',\n\t";
print MK "include $dirs1[$idx]/Makefile\n";
}
if (index ($dirs2[$idx], " ") > 0) {
@subdirs = split (/ /, $dirs2[$idx]);
for ($i = 0; $i < $#subdirs; $i++) {
print SC "'$subdirs[$i]/SConstruct',\n\t";
print MK "include $subdirs[$i]/Makefile\n";
}
print SC "'$subdirs[$#subdirs]/SConstruct'";
print MK "include $subdirs[$#subdirs]/Makefile\n";
}
else {
print SC "'$dirs2[$idx]/SConstruct'";
print MK "include $dirs2[$idx]/Makefile\n";
}
print SC "])\n\n";
print SC "\n";
print MK "NUL=\n";
print MK "SPACE=\$(NUL) \$(NUL)\n";
print MK "define nl\n\$(SPACE)\n\$(SPACE)\nendef\n\n";
print MK "all: \$(TARGETS)\n\n";
print MK "clean:\n";
print MK "\t\$(foreach tgt,\$(TARGETS),rm -f \$(tgt)\$(nl))\n";
print MK "\tfor n in d1*; do rm -f \$\$n/*.o ; rm -f \$\$n/*.a;done\n";
print MK "\trm -f *.o ; rm -f *.a\n\n";
print MK "-include \$(SRCS:.c=.d)";
close (SC);
close (MK);
}
return 0;
}
$basedir = "sconsbld";
if ( ! -d $basedir) { mkdir $basedir || die "Couldn't create $basedir: $!"; }
&init ();
$numdirs = 0; # dirs other than include dirs
for ($i = 0; $i < $nlvls; $i++) { $numdirs += ($ndirs ** $i); }
$totldirs = $numdirs * ($nlups + 1); # dirs including include dirs
# total = ( .c ) + ( .h ) + mkfiles + Makefile.util
# + Makefile.cfg
# + readme
# + omega.h
# + Makefile.clean
$totlfils = ($nfils * $numdirs) + ($nfils * $numdirs)
+ $numdirs + 3 + $numdirs;
$totlobjs = $nfils * $numdirs;
$totlexes = $numdirs * ($nfils / $group);
$totllups = $nfils * $numdirs * $nlups / $group;
$allfiles = $totlfils + $totlobjs + $totlexes;
# one rule for each group plus overhead of 10/makefile
$nrules = ($numdirs * $nfils / $group) + ($numdirs * 10);
$txt1 = "Number of levels = $nlvls\n"
. "Number of dirs / level = $ndirs\n"
. "Number of source files / dir = $nfils\n"
. "Number of lookups / source file = $nlups\n"
. "Number of compiles grouped = $group\n";
print $txt1;
print $vartxt;
$numMakefiles = 1;
$txt2 = "Expecting:\n"
. "\tdirectories: " . fmt($totldirs) . "\n"
. "\tsource files: " . fmt($numdirs * $nfils) . "\n"
. "\tinclude files: " . fmt($numdirs * ($nfils + 1)) . "\n"
. "\tmakefiles: " . fmt($numdirs * $numMakefiles)
. " ($numMakefiles per directory)\n"
. "\ttotal files: " . fmt($totlfils) . "\n"
. "\tlook-ups: >= " . fmt($totllups) . "\n"
. "\trules: >= " . fmt($nrules) . "\n";
print $txt2;
$txt3 = "When the build runs, " . fmt($totlobjs) . " object files & "
. fmt($totlexes) . " executable(s)"
. "\nwill be created, for a total of " . fmt($allfiles) . " files.\n";
print $txt3;
# Using local archives the number of conflicts is about the number of compiles
# which equals the number of archive writes.
#
if (-d $basedir) {
print "Cleaning up from a previous run of this perl script.\n\n";
system ("rm -rf $basedir/*");
}
###
### Generate README.txt
###
$readme = "$basedir/README.txt";
open (README, "> $readme") || die "Cannot open $readme for output.";
$txt = "\nStarted at " . `date` . "\n";
print $txt;
print README $txt
. $vartxt
. $txt1
. $txt2
. $txt3 || die "Cannot write txt, vartxt, txt1 etc to README";
###
### Do the heavy lifting
###
print "Start writing new files at " . `date` . "......\n";
$basedir0 = $basedir;
&mkdirs (0, $basedir, 1);
###
### Summarize the results to the README and the console
###
$txt = "\nFile creation ended at " . `date` . "\n";
print $txt; print README $txt || die "Cannot print txt to README";
close (README);
| 33.215686 | 133 | 0.496243 |
ed42546465d2f1346e9ef1e56c8be2e7d3af9e42 | 1,143 | pm | Perl | lib/Google/Ads/AdWords/v201809/ServiceType.pm | googleads/googleads-perl-lib | 69e66d7e46fbd8ad901581b108ea6c14212701cf | [
"Apache-2.0"
]
| 4 | 2015-04-23T01:59:40.000Z | 2021-10-12T23:14:36.000Z | lib/Google/Ads/AdWords/v201809/ServiceType.pm | googleads/googleads-perl-lib | 69e66d7e46fbd8ad901581b108ea6c14212701cf | [
"Apache-2.0"
]
| 23 | 2015-02-19T17:03:58.000Z | 2019-07-01T10:15:46.000Z | lib/Google/Ads/AdWords/v201809/ServiceType.pm | googleads/googleads-perl-lib | 69e66d7e46fbd8ad901581b108ea6c14212701cf | [
"Apache-2.0"
]
| 10 | 2015-08-03T07:51:58.000Z | 2020-09-26T16:17:46.000Z | package Google::Ads::AdWords::v201809::ServiceType;
use strict;
use warnings;
sub get_xmlns { 'https://adwords.google.com/api/adwords/mcm/v201809'};
# derivation by restriction
use base qw(
SOAP::WSDL::XSD::Typelib::Builtin::string);
1;
__END__
=pod
=head1 NAME
=head1 DESCRIPTION
Perl data type class for the XML Schema defined simpleType
ServiceType from the namespace https://adwords.google.com/api/adwords/mcm/v201809.
Services whose links to AdWords accounts are visible in {@link CustomerServicee}
This clase is derived from
SOAP::WSDL::XSD::Typelib::Builtin::string
. SOAP::WSDL's schema implementation does not validate data, so you can use it exactly
like it's base type.
# Description of restrictions not implemented yet.
=head1 METHODS
=head2 new
Constructor.
=head2 get_value / set_value
Getter and setter for the simpleType's value.
=head1 OVERLOADING
Depending on the simple type's base type, the following operations are overloaded
Stringification
Numerification
Boolification
Check L<SOAP::WSDL::XSD::Typelib::Builtin> for more information.
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| 17.318182 | 86 | 0.764654 |
ed6a26dd5144dc63ea294ef665f1be5ab4524edd | 1,477 | pm | Perl | lib/Data/Sah/Compiler/perl/TH/code.pm | gitpan/Data-Sah | 39b82b0344a3f1a92ff73917bb887cc4a6d8eee0 | [
"Artistic-1.0"
]
| null | null | null | lib/Data/Sah/Compiler/perl/TH/code.pm | gitpan/Data-Sah | 39b82b0344a3f1a92ff73917bb887cc4a6d8eee0 | [
"Artistic-1.0"
]
| null | null | null | lib/Data/Sah/Compiler/perl/TH/code.pm | gitpan/Data-Sah | 39b82b0344a3f1a92ff73917bb887cc4a6d8eee0 | [
"Artistic-1.0"
]
| null | null | null | package Data::Sah::Compiler::perl::TH::code;
use 5.010;
use Log::Any '$log';
use Moo;
extends 'Data::Sah::Compiler::perl::TH';
with 'Data::Sah::Type::code';
our $VERSION = '0.41'; # VERSION
sub handle_type {
my ($self, $cd) = @_;
my $c = $self->compiler;
my $dt = $cd->{data_term};
$cd->{_ccl_check_type} = "ref($dt) eq 'CODE'";
}
1;
# ABSTRACT: perl's type handler for type "code"
__END__
=pod
=encoding UTF-8
=head1 NAME
Data::Sah::Compiler::perl::TH::code - perl's type handler for type "code"
=head1 VERSION
This document describes version 0.41 of Data::Sah::Compiler::perl::TH::code (from Perl distribution Data-Sah), released on 2015-01-06.
=for Pod::Coverage ^(clause_.+|superclause_.+)$
=head1 HOMEPAGE
Please visit the project's homepage at L<https://metacpan.org/release/Data-Sah>.
=head1 SOURCE
Source repository is at L<https://github.com/perlancar/perl-Data-Sah>.
=head1 BUGS
Please report any bugs or feature requests on the bugtracker website L<https://rt.cpan.org/Public/Dist/Display.html?Name=Data-Sah>
When submitting a bug or request, please include a test-file or a
patch to an existing test-file that illustrates the bug or desired
feature.
=head1 AUTHOR
perlancar <perlancar@cpan.org>
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 2015 by perlancar@cpan.org.
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
=cut
| 22.378788 | 134 | 0.717671 |
ed0a3313ac30bc2f1ec45614c8b43f1bb9ad2334 | 2,049 | t | Perl | t/unit/bin/Agua/Common/App/App.t | aguadev/aguadev | db22858faa425b7af2743d98c31dabef644e519c | [
"MIT"
]
| 1 | 2022-01-26T14:09:30.000Z | 2022-01-26T14:09:30.000Z | t/unit/bin/Agua/Common/App/App.t | aguadev/aguadev | db22858faa425b7af2743d98c31dabef644e519c | [
"MIT"
]
| null | null | null | t/unit/bin/Agua/Common/App/App.t | aguadev/aguadev | db22858faa425b7af2743d98c31dabef644e519c | [
"MIT"
]
| null | null | null | #!/usr/bin/perl -w
=head2
APPLICATION Common::App.t
PURPOSE
Test Agua::Common::App module
NOTES
1. RUN AS ROOT
2. BEFORE RUNNING, SET ENVIRONMENT VARIABLES, E.G.:
export installdir=/aguadev
=cut
use Test::More tests => 11;
use Getopt::Long;
use FindBin qw($Bin);
use lib "$Bin/../../../../lib";
BEGIN
{
my $installdir = $ENV{'installdir'} || "/agua";
unshift(@INC, "$installdir/lib");
unshift(@INC, "$installdir/lib/external/lib/perl5");
}
#### CREATE OUTPUTS DIR
my $outputsdir = "$Bin/outputs";
`mkdir -p $outputsdir` if not -d $outputsdir;
use_ok('Conf::Yaml');
use_ok('Test::Agua::Common::App');
#### SET CONF FILE
my $installdir = $ENV{'installdir'} || "/agua";
my $configfile = "$installdir/conf/config.yaml";
#### SET $Bin
$Bin =~ s/^.+\/bin/$installdir\/t\/bin/;
#### GET OPTIONS
my $logfile = "/tmp/testuser.login.log";
my $log = 2;
my $printlog = 5;
my $help;
GetOptions (
'log=i' => \$log,
'printlog=i' => \$printlog,
'logfile=s' => \$logfile,
'help' => \$help
) or die "No options specified. Try '--help'\n";
usage() if defined $help;
my $conf = Conf::Yaml->new(
inputfile => $configfile,
backup => 1,
separator => "\t",
spacer => "\\s\+",
logfile => $logfile,
log => 2,
printlog => 5
);
isa_ok($conf, "Conf::Yaml", "conf");
#### SET DUMPFILE
my $dumpfile = "$Bin/../../../../dump/create.dump";
my $object = new Test::Agua::Common::App(
conf => $conf,
logfile => $logfile,
log => $log,
printlog => $printlog
);
isa_ok($object, "Test::Agua::Common::App", "object");
#### TESTS
$object->testSaveApp();
#### SATISFY Agua::Common::Logger::logError CALL TO EXITLABEL
no warnings;
EXITLABEL : {};
use warnings;
#:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# SUBROUTINES
#:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
sub usage {
print `perldoc $0`;
}
| 20.69697 | 80 | 0.518302 |
ed23289c92246f1717dfebb830e5f79544d1d25d | 5,625 | pm | Perl | auto-lib/Paws/Pinpoint/CreatePushTemplate.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/Pinpoint/CreatePushTemplate.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/Pinpoint/CreatePushTemplate.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::Pinpoint::CreatePushTemplate;
use Moose;
has PushNotificationTemplateRequest => (is => 'ro', isa => 'Paws::Pinpoint::PushNotificationTemplateRequest', required => 1);
has TemplateName => (is => 'ro', isa => 'Str', traits => ['ParamInURI'], uri_name => 'template-name', required => 1);
use MooseX::ClassAttribute;
class_has _stream_param => (is => 'ro', default => 'PushNotificationTemplateRequest');
class_has _api_call => (isa => 'Str', is => 'ro', default => 'CreatePushTemplate');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/v1/templates/{template-name}/push');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'POST');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::Pinpoint::CreatePushTemplateResponse');
1;
### main pod documentation begin ###
=head1 NAME
Paws::Pinpoint::CreatePushTemplate - Arguments for method CreatePushTemplate on L<Paws::Pinpoint>
=head1 DESCRIPTION
This class represents the parameters used for calling the method CreatePushTemplate on the
L<Amazon Pinpoint|Paws::Pinpoint> service. Use the attributes of this class
as arguments to method CreatePushTemplate.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to CreatePushTemplate.
=head1 SYNOPSIS
my $pinpoint = Paws->service('Pinpoint');
my $CreatePushTemplateResponse = $pinpoint->CreatePushTemplate(
PushNotificationTemplateRequest => {
ADM => {
Action => 'OPEN_APP', # values: OPEN_APP, DEEP_LINK, URL; OPTIONAL
Body => 'My__string', # OPTIONAL
ImageIconUrl => 'My__string', # OPTIONAL
ImageUrl => 'My__string', # OPTIONAL
RawContent => 'My__string', # OPTIONAL
SmallImageIconUrl => 'My__string', # OPTIONAL
Sound => 'My__string', # OPTIONAL
Title => 'My__string', # OPTIONAL
Url => 'My__string', # OPTIONAL
}, # OPTIONAL
APNS => {
Action => 'OPEN_APP', # values: OPEN_APP, DEEP_LINK, URL; OPTIONAL
Body => 'My__string', # OPTIONAL
MediaUrl => 'My__string', # OPTIONAL
RawContent => 'My__string', # OPTIONAL
Sound => 'My__string', # OPTIONAL
Title => 'My__string', # OPTIONAL
Url => 'My__string', # OPTIONAL
}, # OPTIONAL
Baidu => {
Action => 'OPEN_APP', # values: OPEN_APP, DEEP_LINK, URL; OPTIONAL
Body => 'My__string', # OPTIONAL
ImageIconUrl => 'My__string', # OPTIONAL
ImageUrl => 'My__string', # OPTIONAL
RawContent => 'My__string', # OPTIONAL
SmallImageIconUrl => 'My__string', # OPTIONAL
Sound => 'My__string', # OPTIONAL
Title => 'My__string', # OPTIONAL
Url => 'My__string', # OPTIONAL
}, # OPTIONAL
Default => {
Action => 'OPEN_APP', # values: OPEN_APP, DEEP_LINK, URL; OPTIONAL
Body => 'My__string', # OPTIONAL
Sound => 'My__string', # OPTIONAL
Title => 'My__string', # OPTIONAL
Url => 'My__string', # OPTIONAL
}, # OPTIONAL
DefaultSubstitutions => 'My__string', # OPTIONAL
GCM => {
Action => 'OPEN_APP', # values: OPEN_APP, DEEP_LINK, URL; OPTIONAL
Body => 'My__string', # OPTIONAL
ImageIconUrl => 'My__string', # OPTIONAL
ImageUrl => 'My__string', # OPTIONAL
RawContent => 'My__string', # OPTIONAL
SmallImageIconUrl => 'My__string', # OPTIONAL
Sound => 'My__string', # OPTIONAL
Title => 'My__string', # OPTIONAL
Url => 'My__string', # OPTIONAL
}, # OPTIONAL
RecommenderId => 'My__string', # OPTIONAL
Tags => {
'My__string' => 'My__string', # key: OPTIONAL, value: OPTIONAL
}, # OPTIONAL
TemplateDescription => 'My__string', # OPTIONAL
},
TemplateName => 'My__string',
);
# Results:
my $CreateTemplateMessageBody =
$CreatePushTemplateResponse->CreateTemplateMessageBody;
# Returns a L<Paws::Pinpoint::CreatePushTemplateResponse> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/pinpoint/CreatePushTemplate>
=head1 ATTRIBUTES
=head2 B<REQUIRED> PushNotificationTemplateRequest => L<Paws::Pinpoint::PushNotificationTemplateRequest>
=head2 B<REQUIRED> TemplateName => Str
The name of the message template. A template name must start with an
alphanumeric character and can contain a maximum of 128 characters. The
characters can be alphanumeric characters, underscores (_), or hyphens
(-). Template names are case sensitive.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method CreatePushTemplate in L<Paws::Pinpoint>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 41.977612 | 249 | 0.6032 |
ed5d774dbc49afefd46c0450acc33d3f08a268ea | 5,369 | t | Perl | S10-packages/precompilation.t | b2gills/roast | 4b689b3c9cc2642fdeb8176a24415ec1540f013f | [
"Artistic-2.0"
]
| null | null | null | S10-packages/precompilation.t | b2gills/roast | 4b689b3c9cc2642fdeb8176a24415ec1540f013f | [
"Artistic-2.0"
]
| null | null | null | S10-packages/precompilation.t | b2gills/roast | 4b689b3c9cc2642fdeb8176a24415ec1540f013f | [
"Artistic-2.0"
]
| null | null | null | use lib 't/spec/packages';
use Test;
use Test::Util;
plan 39;
my @*MODULES; # needed for calling CompUnit::Repository::need directly
my $precomp-ext := $*VM.precomp-ext;
my $precomp-target := $*VM.precomp-target;
my @precomp-paths;
my @precompiled = Test::Util::run( q:to"--END--").lines;
use lib 't/spec/packages';
for <C A B> {
my $comp-unit = $*REPO.need(CompUnit::DependencySpecification.new(:short-name("Example::$_")));
say $comp-unit.precompiled;
}
--END--
is @precompiled.elems, 3;
is $_, 'True' for @precompiled;
# RT #122773
my @keys = Test::Util::run( q:to"--END--").lines;
use lib 't/spec/packages';
use Example::A;
use Example::B;
.say for Example::.keys.sort;
--END--
is-deeply @keys, [<A B C>], 'Diamond relationship';
my @precompiled2 = Test::Util::run( q:to"--END--").lines;
use lib 't/spec/packages';
for <T P D N S B G K C E F H R A U> {
my $comp-unit = $*REPO.need(CompUnit::DependencySpecification.new(:short-name("Example2::$_")));
say $comp-unit.precompiled;
}
--END--
is @precompiled2.elems, 15;
is $_, 'True' for @precompiled2;
# RT #123272
my @keys2 = Test::Util::run( q:to"--END--").lines;
use v6;
use lib 't/spec/packages';
use Example2::T;
use Example2::G;
use Example2::F;
use Example2::A;
use Example2::U;
.say for Example2::.keys.sort;
--END--
is-deeply @keys2, [<C D E F H K N P R S>], 'Twisty maze of dependencies, all different';
#?rakudo.jvm skip 'RT #122896'
#?rakudo.moar skip 'RT #122896'
{
is_run
'use lib "t/spec/packages";
use Example::C;
f();',
{ err => '',
out => '',
status => 0,
},
'precompile exported cached sub';
}
# RT #76456
{
my $comp-unit = $*REPO.need(CompUnit::DependencySpecification.new(:short-name<RT76456>));
ok $comp-unit.precompiled, 'precompiled a parameterized role';
}
#RT #122447
{
my $comp-unit = $*REPO.need(CompUnit::DependencySpecification.new(:short-name<RT122447>));
ok $comp-unit.precompiled, 'precompiled a sub with params returning a proxy';
}
#RT #115240
{
my $comp-unit = $*REPO.need(CompUnit::DependencySpecification.new(:short-name<RT115240>));
ok $comp-unit.precompiled, 'precomp curried role compose';
}
#RT #123276
{
my @precompiled = Test::Util::run( q:to"--END--").lines;
use lib 't/spec/packages';
my $name = 'RT123276';
for "{$name}", "{$name}::B::C1", "{$name}::B::C2" -> $module-name {
my $comp-unit = $*REPO.need(
CompUnit::DependencySpecification.new(:short-name($module-name))
);
say $comp-unit.precompiled;
}
--END--
is @precompiled.elems, 3, "tried to precompile all 3 modules";
is $_, 'True' for @precompiled;
my @keys = Test::Util::run( q:to"--END--").lines;
use lib 't/spec/packages';
use RT123276::B::C1;
use RT123276::B::C2;
say RT123276::B::C1.^methods
--END--
#?rakudo.jvm todo 'RT #123276'
#?rakudo.moar todo 'RT #123276'
is-deeply @keys, [<foo>], 'RT123276';
}
#RT #124162
{
my $comp-unit = $*REPO.need(CompUnit::DependencySpecification.new(:short-name<RT124162>));
ok $comp-unit.precompiled, 'precomp of native array parameterization';
}
{
my $module-name-a = 'InternArrayA';
my $output-path-a = "t/spec/packages/" ~ $module-name-a ~ '.pm.' ~ $precomp-ext;
unlink $output-path-a; # don't care if failed
is_run
'my constant VALUE = array[uint32].new;
sub a() is export { VALUE }',
{ err => '',
out => '',
status => 0,
},
:compiler-args[
'--target', $precomp-target,
'--output', $output-path-a,
],
"precomp of native array parameterization intern test (a)";
ok $output-path-a.IO.e, "did we create a $output-path-a";
my $module-name-b = 'InternArrayB';
my $output-path-b = "t/spec/packages/" ~ $module-name-b ~ '.pm.' ~ $precomp-ext;
unlink $output-path-b; # don't care if failed
is_run
'my constant VALUE = array[uint32].new;
sub b() is export { VALUE }',
{ err => '',
out => '',
status => 0,
},
:compiler-args[
'--target', $precomp-target,
'--output', $output-path-b,
],
"precomp of native array parameterization intern test (b)";
ok $output-path-b.IO.e, "did we create a $output-path-b";
#?rakudo.jvm todo 'no 6model parametrics interning yet'
#?rakudo.moar todo 'no 6model parametrics interning yet'
is_run
"use $module-name-a;
use $module-name-b;
print a().WHAT =:= b().WHAT",
{ err => '',
out => "True",
status => 0,
},
:compiler-args['-I', 't/spec/packages'],
'precompile load of both and identity check passed';
unlink $_ for $output-path-a, $output-path-b; # don't care if failed
}
# RT #125090
{
my $comp-unit = $*REPO.need(CompUnit::DependencySpecification.new(:short-name<RT125090>));
ok $comp-unit.precompiled, 'precomp of BEGIN using $*KERNEL and $*DISTRO';
}
# RT #125245
{
my $comp-unit = $*REPO.need(CompUnit::DependencySpecification.new(:short-name<RT125245>));
ok $comp-unit.precompiled, 'precomp of assignment to variable using subset type';
}
| 27.818653 | 104 | 0.585584 |
ed12468aea854a76006492432fc2b54eb3ed6a03 | 602 | pm | Perl | pollen/scribblings/third-tutorial-files/chess.html.pm | shhyou/pollen | 58e02d4eee22a064cc69374a6c9fd01d4db53af7 | [
"MIT"
]
| 1,031 | 2015-01-01T19:01:47.000Z | 2022-03-30T00:16:55.000Z | pollen/scribblings/third-tutorial-files/chess.html.pm | shhyou/pollen | 58e02d4eee22a064cc69374a6c9fd01d4db53af7 | [
"MIT"
]
| 218 | 2015-02-18T05:33:29.000Z | 2022-03-14T23:34:46.000Z | pollen/scribblings/third-tutorial-files/chess.html.pm | shhyou/pollen | 58e02d4eee22a064cc69374a6c9fd01d4db53af7 | [
"MIT"
]
| 88 | 2015-02-04T19:06:28.000Z | 2022-02-14T14:06:46.000Z | #lang pollen
◊h1{II. A Game of Chess}
And still she cried, and still the world pursues,
"Jug Jug" to dirty ears.
And other withered stumps of time
Were told upon the walls; staring forms
Leaned out, leaning, hushing the room enclosed.
Footsteps shuffled on the stair,
Under the firelight, under the brush, her hair
Spread out in fiery points
Glowed into words, then would be savagely still.
"My nerves are bad to-night. Yes, bad. Stay with me.
Speak to me. Why do you never speak? Speak.
What are you thinking of? What thinking? What?
I never know what you are thinking. Think." | 33.444444 | 54 | 0.73588 |
ed293a5ead536d7247142edaa6b74f2556016a07 | 2,414 | pm | Perl | auto-lib/Paws/Glue/DynamoDBTarget.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/Glue/DynamoDBTarget.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/Glue/DynamoDBTarget.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z | # Generated by default/object.tt
package Paws::Glue::DynamoDBTarget;
use Moose;
has Path => (is => 'ro', isa => 'Str');
has ScanAll => (is => 'ro', isa => 'Bool', request_name => 'scanAll', traits => ['NameInRequest']);
has ScanRate => (is => 'ro', isa => 'Num', request_name => 'scanRate', traits => ['NameInRequest']);
1;
### main pod documentation begin ###
=head1 NAME
Paws::Glue::DynamoDBTarget
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::Glue::DynamoDBTarget object:
$service_obj->Method(Att1 => { Path => $value, ..., ScanRate => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::Glue::DynamoDBTarget object:
$result = $service_obj->Method(...);
$result->Att1->Path
=head1 DESCRIPTION
Specifies an Amazon DynamoDB table to crawl.
=head1 ATTRIBUTES
=head2 Path => Str
The name of the DynamoDB table to crawl.
=head2 ScanAll => Bool
Indicates whether to scan all the records, or to sample rows from the
table. Scanning all the records can take a long time when the table is
not a high throughput table.
A value of C<true> means to scan all records, while a value of C<false>
means to sample the records. If no value is specified, the value
defaults to C<true>.
=head2 ScanRate => Num
The percentage of the configured read capacity units to use by the Glue
crawler. Read capacity units is a term defined by DynamoDB, and is a
numeric value that acts as rate limiter for the number of reads that
can be performed on that table per second.
The valid values are null or a value between 0.1 to 1.5. A null value
is used when user does not provide a value, and defaults to 0.5 of the
configured Read Capacity Unit (for provisioned tables), or 0.25 of the
max configured Read Capacity Unit (for tables using on-demand mode).
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::Glue>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 28.4 | 102 | 0.730737 |
ed64969152cf8f960ac886670f9a9f347ccacec4 | 770 | pm | Perl | compiled/perl/MetaTags.pm | dgelessus/ci_targets | bb1a0d76b7673920c832e5332a7b257614fa0e1b | [
"MIT"
]
| 4 | 2017-04-08T12:55:11.000Z | 2020-12-05T21:09:31.000Z | compiled/perl/MetaTags.pm | dgelessus/ci_targets | bb1a0d76b7673920c832e5332a7b257614fa0e1b | [
"MIT"
]
| 7 | 2018-04-23T01:30:33.000Z | 2020-10-30T23:56:14.000Z | compiled/perl/MetaTags.pm | dgelessus/ci_targets | bb1a0d76b7673920c832e5332a7b257614fa0e1b | [
"MIT"
]
| 6 | 2017-04-08T11:41:14.000Z | 2020-10-30T22:47:31.000Z | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
use strict;
use warnings;
use IO::KaitaiStruct 0.009_000;
########################################################################
package MetaTags;
our @ISA = 'IO::KaitaiStruct::Struct';
sub from_file {
my ($class, $filename) = @_;
my $fd;
open($fd, '<', $filename) or return undef;
binmode($fd);
return new($class, IO::KaitaiStruct::Stream->new($fd));
}
sub new {
my ($class, $_io, $_parent, $_root) = @_;
my $self = IO::KaitaiStruct::Struct->new($_io);
bless $self, $class;
$self->{_parent} = $_parent;
$self->{_root} = $_root || $self;;
$self->_read();
return $self;
}
sub _read {
my ($self) = @_;
}
1;
| 19.25 | 98 | 0.541558 |
ed3b1c4aacb306c3c4eaef6fcda45a8af5d1e5d8 | 35,722 | pm | Perl | modules/EnsEMBL/Web/Component/VariationTable.pm | pblins/ensembl-webcode | 1b70534380de5e46f3778b03296ffad6eaf739db | [
"Apache-2.0"
]
| null | null | null | modules/EnsEMBL/Web/Component/VariationTable.pm | pblins/ensembl-webcode | 1b70534380de5e46f3778b03296ffad6eaf739db | [
"Apache-2.0"
]
| null | null | null | modules/EnsEMBL/Web/Component/VariationTable.pm | pblins/ensembl-webcode | 1b70534380de5e46f3778b03296ffad6eaf739db | [
"Apache-2.0"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2022] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::Component::VariationTable;
use strict;
use List::Util qw(max min);
use Bio::EnsEMBL::Variation::Utils::Config qw(%ATTRIBS);
use Bio::EnsEMBL::Variation::Utils::Constants qw(%VARIATION_CLASSES);
use Bio::EnsEMBL::Variation::Utils::VariationEffect qw($UPSTREAM_DISTANCE $DOWNSTREAM_DISTANCE);
use EnsEMBL::Web::NewTable::NewTable;
use EnsEMBL::Web::Constants;
use Bio::EnsEMBL::Variation::Utils::VariationEffect;
use Bio::EnsEMBL::Utils::Sequence qw(reverse_comp);
use Scalar::Util qw(looks_like_number);
use base qw(EnsEMBL::Web::Component::Variation);
our $TV_MAX = 100000;
sub _init {
my $self = shift;
$self->cacheable(0);
$self->ajaxable(1);
}
sub new_consequence_type {
my $self = shift;
my $tva = shift;
my $only_coding = shift;
my $overlap_consequences = $tva->get_all_OverlapConsequences || [];
# Sort by rank, with only one copy per consequence type
my @consequences = sort {$a->rank <=> $b->rank} (values %{{map {$_->label => $_} @{$overlap_consequences}}});
if ($only_coding) {
@consequences = grep { $_->rank < 18 } @consequences;
}
my @type;
foreach my $c (@consequences) {
push @type,$c->label;
}
return join('~',@type);
}
sub table_content {
my ($self,$callback) = @_;
my $hub = $self->hub;
my $icontext = $hub->param('context') || 100;
my $gene_object = $self->configure($icontext,'ALL');
my $object_type = $hub->type;
my $transcript;
$transcript = $hub->param('t') if $object_type eq 'Transcript';
my $phase = $callback->phase;
$transcript = $phase if $phase =~ s/^full-//;
my @transcripts;
if(defined $transcript) {
@transcripts = ($gene_object->get_transcript_by_stable_id($transcript));
} else {
@transcripts = sort { $a->stable_id cmp $b->stable_id } @{$gene_object->get_all_transcripts};
}
# get appropriate slice
my $slice = $self->object->Obj->feature_Slice->expand(
$Bio::EnsEMBL::Variation::Utils::VariationEffect::UPSTREAM_DISTANCE,
$Bio::EnsEMBL::Variation::Utils::VariationEffect::DOWNSTREAM_DISTANCE
);
my $exonic_types = $self->get_exonic_type_classes;
# Get the number of TranscriptVariations
my $tv_count = 0;
foreach my $transcript (@transcripts) {
$tv_count += $self->_count_transcript_variations($transcript->Obj);
}
my $vfs = $self->_get_variation_features($slice, $tv_count, $exonic_types);
return $self->variation_table($callback,'ALL',\@transcripts, $tv_count, $vfs);
}
sub content {
my $self = shift;
my $hub = $self->hub;
my $object_type = $hub->type;
my $consequence_type = $hub->param('sub_table');
my $icontext = $hub->param('context') || 100;
my $gene_object = $self->configure($icontext, $consequence_type);
my @transcripts = sort { $a->stable_id cmp $b->stable_id } @{$gene_object->get_all_transcripts};
my $html;
if ($object_type eq 'Transcript') {
my $t = $hub->param('t');
@transcripts = grep $_->stable_id eq $t, @transcripts;
}
my $thing = 'gene';
$thing = 'transcript' if $object_type eq 'Transcript';
my $slice = $self->object->Obj->feature_Slice->expand(
$Bio::EnsEMBL::Variation::Utils::VariationEffect::UPSTREAM_DISTANCE,
$Bio::EnsEMBL::Variation::Utils::VariationEffect::DOWNSTREAM_DISTANCE
);
# Get the number of TranscriptVariations
my $tv_count = 0;
foreach my $transcript (@transcripts) {
$tv_count += $self->_count_transcript_variations($transcript->Obj);
}
my $only_exonic = 0;
if ($tv_count > $TV_MAX ) {
my $bm_prefix = 'hsapiens_snp.default.snp';
my $bm_prefix2 = 'hsapiens_snp.default.filters';
my $biomart_link = $self->hub->species_defs->ENSEMBL_MART_ENABLED ? '/biomart/martview?VIRTUALSCHEMANAME=default'.
"&ATTRIBUTES=$bm_prefix.refsnp_id|$bm_prefix.refsnp_source|$bm_prefix.chr_name|$bm_prefix.chrom_start|$bm_prefix.chrom_end|".
"$bm_prefix.minor_allele_freq|$bm_prefix.minor_allele|$bm_prefix.clinical_significance|$bm_prefix.allele|".
"$bm_prefix.consequence_type_tv|$bm_prefix.consequence_allele_string|$bm_prefix.ensembl_peptide_allele|$bm_prefix.translation_start|".
"$bm_prefix.translation_end|$bm_prefix.polyphen_prediction|$bm_prefix.polyphen_score|".
"$bm_prefix.sift_prediction|$bm_prefix.sift_score|$bm_prefix.ensembl_transcript_stable_id|$bm_prefix.validated".
"&FILTERS=$bm_prefix2.chromosomal_region."".$slice->seq_region_name.":".$slice->start.":".$slice->end.""".
'&VISIBLEPANEL=resultspanel' : '';
my $vf_count = $self->_count_variation_features($slice);
my $warning_content = "There are ".$self->thousandify($vf_count)." variants for this $object_type, which is too many to display in this page, so <b>only exonic variants</b> are displayed.";
$warning_content .= " Please use <a href=\"$biomart_link\">BioMart</a> to extract all data." if ($biomart_link ne '');
$html .= $self->_warning( "Too much data to display", $warning_content);
$only_exonic = 1;
}
else {
$html .= $self->_hint('snp_table', 'Variant table', "This table shows known variants for this $thing. Use the 'Consequence Type' filter to view a subset of these.");
}
my $table = $self->make_table(\@transcripts, $only_exonic);
$html .= $table->render($self->hub,$self);
return $html;
}
sub sift_poly_classes {
my ($self,$table) = @_;
my $sp_classes = EnsEMBL::Web::Constants::PREDICTIONS_CLASSES;
foreach my $column_name (qw(sift polyphen cadd revel meta_lr mutation_assessor)) {
my $value_column = $table->column("${column_name}_value");
my $class_column = $table->column("${column_name}_class");
next unless $value_column and $class_column;
$value_column->editorial_type('lozenge');
$value_column->editorial_source("${column_name}_class");
foreach my $pred (keys %$sp_classes) {
$value_column->editorial_cssclass($pred,"score_$sp_classes->{$pred}");
$value_column->editorial_helptip($pred,$pred);
}
# TODO: make decorators accessible to filters. Complexity is that
# many decorators (including these) are multi-column.
my $lozenge = qq(<div class="score score_%s score_example">%s</div>);
my $left = { sift => 'bad', polyphen => 'good', cadd => 'good', revel => 'good', meta_lr => 'good', mutation_assessor => 'good'}->{$column_name};
my $right = { sift => 'good', polyphen => 'bad', cadd => 'bad', revel => 'bad', meta_lr => 'bad', 'mutation_assessor' => 'bad'}->{$column_name};
$value_column->filter_endpoint_markup(0,sprintf($lozenge,$left,"0"));
$value_column->filter_endpoint_markup(1,sprintf($lozenge,$right,"1"));
my $slider_class =
{ sift => 'redgreen', polyphen => 'greenred', cadd => 'greenred', revel => 'greenred', meta_lr => 'greenred', mutation_assessor => 'greenred'}->{$column_name};
$value_column->filter_slider_class("newtable_slider_$slider_class");
}
}
sub evidence_classes {
my ($self,$table) = @_;
my @evidence_order = reverse @{$ATTRIBS{'evidence'}};
my %evidence_key;
$evidence_key{$_} = "B".lc $_ for(@evidence_order);
$evidence_key{'1000Genomes'} = "A0001";
$evidence_key{'HapMap'} = "A0002";
@evidence_order =
sort { $evidence_key{$a} cmp $evidence_key{$b} } @evidence_order;
my %evidence_order;
$evidence_order{$evidence_order[$_]} = $_ for(0..$#evidence_order);
my $evidence_col = $table->column('status');
foreach my $ev (keys %evidence_order) {
my $evidence_label = $ev;
$evidence_label =~ s/_/ /g;
$evidence_col->icon_url($ev,sprintf("%s/val/evidence_%s.png",$self->img_url,$ev));
$evidence_col->icon_helptip($ev,$evidence_label);
$evidence_col->icon_export($ev,$evidence_label);
$evidence_col->icon_order($ev,$evidence_order{$ev});
}
}
sub class_classes {
my ($self,$table) = @_;
my $classes_col = $table->column('class');
$classes_col->filter_add_baked('somatic','Only Somatic','Only somatic variant classes');
$classes_col->filter_add_baked('not_somatic','Not Somatic','Exclude somatic variant classes');
my $i = 0;
foreach my $term (qw(display_term somatic_display_term)) {
foreach my $class (sort { ($a->{$term} !~ /SNP|SNV/ cmp $b->{$term} !~ /SNP|SNV/) || $a->{$term} cmp $b->{$term} } values %VARIATION_CLASSES) {
next if ($class->{'type'} eq 'sv');
$classes_col->icon_order($class->{$term},$i++);
if($term eq 'somatic_display_term') {
$classes_col->filter_bake_into($class->{$term},'somatic');
} else {
$classes_col->filter_bake_into($class->{$term},'not_somatic');
}
}
}
}
sub clinsig_classes {
my ($self,$table) = @_;
# This order is a guess at the most useful and isn't strongly motivated.
# Feel free to rearrange.
my @clinsig_order = reverse qw(
pathogenic protective likely-pathogenic risk-factor drug-response
confers-sensitivity histocompatibility association likely-benign
benign other not-provided uncertain-significance
);
my %clinsig_order;
$clinsig_order{$clinsig_order[$_]} = $_ for(0..$#clinsig_order);
my $clinsig_col = $table->column('clinsig');
foreach my $cs_img (keys %clinsig_order) {
my $cs = $cs_img;
$cs =~ s/-/ /g;
$clinsig_col->icon_url($cs,sprintf("%s/val/clinsig_%s.png",$self->img_url,$cs_img));
$clinsig_col->icon_helptip($cs,$cs);
$clinsig_col->icon_export($cs,$cs);
$clinsig_col->icon_order($cs,$clinsig_order{$cs_img});
}
$clinsig_col->filter_maybe_blank(1);
}
sub snptype_classes {
my ($self,$table,$hub,$only_exonic) = @_;
my $species_defs = $hub->species_defs;
my $var_styles = $species_defs->colour('variation');
my @all_cons = grep $_->feature_class =~ /transcript/i, values %Bio::EnsEMBL::Variation::Utils::Constants::OVERLAP_CONSEQUENCES;
my $column = $table->column('snptype');
$column->filter_add_baked('lof','PTV','Select all protein truncating variant types');
$column->filter_add_baked('lof_missense','PTV & Missense','Select all protein truncating and missense variant types');
$column->filter_add_baked('exon','Only Exonic','Select exon and splice region variant types') if (!$only_exonic);
$column->filter_add_bakefoot('PTV = Protein Truncating Variant');
my @lof = qw(stop_gained frameshift_variant splice_donor_variant
splice_acceptor_variant);
foreach my $con (@all_cons) {
next if $con->SO_accession =~ /x/i;
next if ($only_exonic and $con->rank >= 18);
my $so_term = lc $con->SO_term;
my $colour = $var_styles->{$so_term||'default'}->{'default'};
$column->icon_export($con->label,$con->label);
$column->icon_order($con->label,$con->rank);
$column->icon_helptip($con->label,$con->description);
$column->icon_coltab($con->label,$colour);
if(grep { $_ eq $so_term } @lof) {
$column->filter_bake_into($con->label,'lof');
$column->filter_bake_into($con->label,'lof_missense');
}
if($so_term eq 'missense_variant') {
$column->filter_bake_into($con->label,'lof_missense');
}
if(!$only_exonic and $con->rank < 18) { # TODO: specify this properly
$column->filter_bake_into($con->label,'exon');
}
}
}
sub get_exonic_type_classes {
my $self = shift;
my @all_cons = grep $_->feature_class =~ /transcript/i, values %Bio::EnsEMBL::Variation::Utils::Constants::OVERLAP_CONSEQUENCES;
my @exonic_types = map { $_->SO_term } grep { $_->rank < 18 } @all_cons;
return \@exonic_types;
}
sub make_table {
my ($self,$transcripts,$only_exonic) = @_;
my $hub = $self->hub;
my $glossary = $hub->glossary_lookup;
my $table = EnsEMBL::Web::NewTable::NewTable->new($self);
my $sd = $hub->species_defs->get_config($hub->species, 'databases')->{'DATABASE_VARIATION'};
my $is_lrg = $self->isa('EnsEMBL::Web::Component::LRG::VariationTable');
my @exclude;
push @exclude,'gmaf','gmaf_freq','gmaf_allele' unless $hub->species eq 'Homo_sapiens';
push @exclude,'HGVS' unless $self->param('hgvs') eq 'on';
if($is_lrg) {
push @exclude,'Transcript';
} else {
push @exclude,'Submitters','LRGTranscript','LRG';
}
push @exclude,'sift_sort','sift_class','sift_value' unless $sd->{'SIFT'};
unless($hub->species eq 'Homo_sapiens') {
push @exclude,'polyphen_sort','polyphen_class','polyphen_value', 'cadd_sort', 'cadd_class', 'cadd_value', 'revel_sort', 'revel_class', 'revel_value', 'meta_lr_sort', 'meta_lr_class', 'meta_lr_value', 'mutation_assessor_sort', 'mutation_assessor_class', 'mutation_assessor_value';
}
push @exclude,'Transcript' if $hub->type eq 'Transcript';
my @columns = ({
_key => 'ID', _type => 'string no_filter',
label => "Variant ID",
width => 2,
helptip => 'Variant identifier',
link_url => {
type => 'Variation',
action => 'Summary',
vf => ["vf"],
v => undef # remove the 'v' param from the links if already present
}
},{
_key => 'vf', _type => 'string unshowable no_filter'
},{
_key => 'location', _type => 'position unshowable',
label => 'Location', sort_for => 'chr',
state_filter_ephemeral => 1,
},{
_key => 'chr', _type => 'string no_filter',
label => $is_lrg?'bp':'Chr: bp',
width => 1.75,
helptip => $glossary->{'Chr:bp'},
},{
_key => 'vf_allele', _type => 'string no_filter unshowable',
},{
_key => 'Alleles', _type => 'string no_filter no_sort',
label => "Alle\fles",
helptip => 'Alternative nucleotides',
toggle_separator => '/',
toggle_maxlen => 20,
toggle_highlight_column => 'vf_allele',
toggle_highlight_over => 2
},{
_key => 'gmaf_allele', _type => 'string no_filter unshowable',
},{
_key => 'gmaf_freq', _type => 'numeric unshowable',
sort_for => 'gmaf',
filter_label => 'Global MAF',
filter_range => [0,0.5],
filter_fixed => 1,
filter_logarithmic => 1,
primary => 1,
},{
_key => 'gmaf', _type => 'string no_filter', label => "Glo\fbal MAF",
helptip => $glossary->{'Global MAF'},
also_cols => 'gmaf_allele',
},{
_key => 'HGVS', _type => 'string no_filter', label => 'HGVS name(s)',
width => 1.75
},{
_key => 'class', _type => 'iconic', label => 'Class',
width => 2,
helptip => $glossary->{'Class'},
filter_keymeta_enum => 1,
filter_maybe_blank => 1,
filter_sorted => 1,
},{
_key => 'Source', _type => 'iconic', label => "Sour\fce",
width => 1.25,
helptip => $glossary->{'Source'},
filter_maybe_blank => 1,
},{
_key => 'Submitters', _type => 'string no_filter',
label => 'Submitters',
width => 1.75
# export_options => { split_newline => 2 },
},{
_key => 'status', _type => 'iconic', label => "Evid\fence",
width => 1.5,
helptip => $glossary->{'Evidence status (variant)'},
filter_keymeta_enum => 1,
filter_maybe_blank => 1,
filter_sorted => 1,
},{
_key => 'clinsig', _type => 'iconic', label => "Clin. Sig.",
helptip => 'Clinical significance',
filter_label => 'Clinical Significance',
filter_keymeta_enum => 1,
filter_sorted => 1,
},{
_key => 'snptype', _type => 'iconic', label => "Conseq. Type",
filter_label => 'Consequences',
filter_sorted => 1,
width => 1.5,
helptip => 'Consequence type',
sort_down_first => 1,
filter_keymeta_enum => 1,
primary => 4,
},{
_key => 'aachange', _type => 'string no_filter no_sort', label => "AA",
helptip => "Resulting amino acid(s)"
},{
_key => 'aacoord_sort', _type => 'integer unshowable',
label => 'AA coord', sort_for => 'aacoord',
filter_blank_button => 1,
state_filter_ephemeral => 1,
},{
_key => 'aacoord', _type => 'string no_filter', label => "AA co\ford",
helptip => 'Amino Acid Coordinates',
},{
_key => 'sift_sort', _type => 'numeric no_filter unshowable',
sort_for => 'sift_value',
sort_down_first => 1,
},{
_key => 'sift_class', _type => 'iconic no_filter unshowable',
},{
_key => 'sift_value', _type => 'numeric',
label => "SI\aFT",
helptip => $glossary->{'SIFT'},
filter_range => [0,1],
filter_fixed => 1,
filter_blank_button => 1,
primary => 2,
},{
_key => 'polyphen_sort', _type => 'numeric no_filter unshowable',
sort_for => 'polyphen_value',
},{
_key => 'polyphen_class', _type => 'iconic no_filter unshowable',
},{
_key => 'polyphen_value', _type => 'numeric',
label => "Poly\fPhen",
helptip => $glossary->{'PolyPhen'},
filter_range => [0,1],
filter_fixed => 1,
filter_blank_button => 1,
primary => 3,
},{
_key => 'cadd_sort', _type => 'numeric no_filter unshowable',
sort_for => 'cadd_value',
},{
_key => 'cadd_class', _type => 'iconic no_filter unshowable',
},{
_key => 'cadd_value', _type => 'numeric',
label => "CADD",
helptip => $glossary->{'CADD'},
filter_range => [0,100],
filter_fixed => 1,
filter_blank_button => 1,
},{
_key => 'revel_sort', _type => 'numeric no_filter unshowable',
sort_for => 'revel_value',
},{
_key => 'revel_class', _type => 'iconic no_filter unshowable',
},{
_key => 'revel_value', _type => 'numeric',
label => "REVEL",
helptip => $glossary->{'REVEL'},
filter_range => [0,1],
filter_fixed => 1,
filter_blank_button => 1,
},{
_key => 'meta_lr_sort', _type => 'numeric no_filter unshowable',
sort_for => 'meta_lr_value',
},{
_key => 'meta_lr_class', _type => 'iconic no_filter unshowable',
},{
_key => 'meta_lr_value', _type => 'numeric',
label => "MetaLR",
helptip => $glossary->{'MetaLR'},
filter_range => [0,1],
filter_fixed => 1,
filter_blank_button => 1,
},{
_key => 'mutation_assessor_sort', _type => 'numeric no_filter unshowable',
sort_for => 'mutation_assessor_value',
},{
_key => 'mutation_assessor_class', _type => 'iconic no_filter unshowable',
},{
_key => 'mutation_assessor_value', _type => 'numeric',
label => "Mutation Assessor",
helptip => $glossary->{'MutationAssessor'},
filter_range => [0,1],
filter_fixed => 1,
filter_blank_button => 1,
},{
_key => 'LRG', _type => 'string unshowable',
label => "LRG",
},{
_key => 'Transcript', _type => 'iconic',
width => 2,
helptip => $glossary->{'Transcript'},
link_url => {
type => 'Transcript',
action => 'Summary',
t => ["Transcript"]
},
state_filter_ephemeral => 1,
},{
_key => 'LRGTranscript', _type => 'string',
width => 2,
helptip => $glossary->{'Transcript'},
link_url => {
type => 'LRG',
action => 'Summary',
lrgt => ["LRGTranscript"],
lrg => ["LRG"],
__clear => 1
}
});
$table->add_columns(\@columns,\@exclude);
$self->evidence_classes($table);
$self->clinsig_classes($table);
$self->class_classes($table);
$self->snptype_classes($table,$self->hub,$only_exonic);
$self->sift_poly_classes($table);
my (@lens,@starts,@ends,@seq);
foreach my $t (@$transcripts) {
my $p = $t->translation_object;
push @lens,$p->length if $p;
push @starts,$t->seq_region_start;
push @ends,$t->seq_region_end;
push @seq,$t->seq_region_name;
}
if(@lens) {
my $aa_col = $table->column('aacoord_sort');
$aa_col->filter_range([1,max(@lens)]);
$aa_col->filter_fixed(1);
}
if(@starts && @ends) {
my $loc_col = $table->column('location');
$loc_col->filter_seq_range($seq[0],[min(@starts)-$UPSTREAM_DISTANCE,
max(@ends)+$DOWNSTREAM_DISTANCE]);
$loc_col->filter_fixed(1);
}
# Separate phase for each transcript speeds up gene variation table
my $icontext = $self->hub->param('context') || 100;
my $gene_object = $self->configure($icontext,'ALL');
my $object_type = $self->hub->type;
my @transcripts = sort { $a->stable_id cmp $b->stable_id } @{$gene_object->get_all_transcripts};
if ($object_type eq 'Transcript') {
my $t = $hub->param('t');
@transcripts = grep $_->stable_id eq $t, @transcripts;
}
$table->add_phase("taster",'taster',[0,50]);
$table->add_phase("full-$_",'full') for(map { $_->stable_id } @transcripts);
return $table;
}
sub variation_table {
my ($self,$callback,$consequence_type, $transcripts, $tv_count, $vfs) = @_;
my $hub = $self->hub;
my $show_scores = $hub->param('show_scores');
my ($base_trans_url, $url_transcript_prefix, %handles);
my $num = 0;
# create some URLs - quicker than calling the url method for every variant
my $base_url = $hub->url({
type => 'Variation',
action => 'Summary',
vf => undef,
v => undef,
});
# get appropriate slice
my $slice = $self->object->Obj->feature_Slice->expand(
$Bio::EnsEMBL::Variation::Utils::VariationEffect::UPSTREAM_DISTANCE,
$Bio::EnsEMBL::Variation::Utils::VariationEffect::DOWNSTREAM_DISTANCE
);
my $var_styles = $hub->species_defs->colour('variation');
my $exonic_types = $self->get_exonic_type_classes;
my $tva = $hub->get_adaptor('get_TranscriptVariationAdaptor', 'variation');
if ($self->isa('EnsEMBL::Web::Component::LRG::VariationTable')) {
my $gene_stable_id = $transcripts->[0] && $transcripts->[0]->gene ? $transcripts->[0]->gene->stable_id : undef;
$url_transcript_prefix = 'lrgt';
my $vfa = $hub->get_adaptor('get_VariationFeatureAdaptor', 'variation');
my @var_ids;
foreach my $transcript (@$transcripts) {
# get TVs
my $tvs = $self->_get_transcript_variations($transcript->Obj, $tv_count, $exonic_types);
foreach my $tv (@$tvs) {
my $raw_id = $tv->{_variation_feature_id};
my $vf = $vfs->{$raw_id};
next unless $vf;
push @var_ids,$vf->get_Variation_dbID();
}
}
%handles = %{$vfa->_get_all_subsnp_handles_from_variation_ids(\@var_ids)};
} else {
$url_transcript_prefix = 't';
}
ROWS: foreach my $transcript (@$transcripts) {
my $tr_id = $transcript ? $transcript->Obj->dbID : 0;
my $tvs = $self->_get_transcript_variations($transcript->Obj, $tv_count, $exonic_types);
my $transcript_stable_id = $transcript->stable_id;
my $gene = $transcript->gene;
my $lrg_correction = 0;
my $lrg_strand = 0;
if($self->isa('EnsEMBL::Web::Component::LRG::VariationTable')) {
my $gs = $gene->slice->project("chromosome");
foreach my $ps(@{$gs}) {
$lrg_strand = $ps->to_Slice->strand;
if($lrg_strand>0) {
$lrg_correction = 1-$ps->to_Slice->start;
} else {
$lrg_correction = $ps->to_Slice->end+1;
}
}
}
my $chr = $transcript->seq_region_name;
my @tv_sorted;
foreach my $tv (@$tvs) {
my $vf = $self->_get_vf_from_tv($tv, $vfs, $slice, $tv_count, $exonic_types);
next unless $vf;
push @tv_sorted,[$tv,$vf->seq_region_start];
}
@tv_sorted = map { $_->[0] } sort { $a->[1] <=> $b->[1] } @tv_sorted;
foreach my $tv (@tv_sorted) {
my $vf = $self->_get_vf_from_tv($tv, $vfs, $slice, $tv_count, $exonic_types);
next unless $vf;
my ($start, $end) = ($vf->seq_region_start,$vf->seq_region_end);
if($lrg_strand) {
$start = $start*$lrg_strand + $lrg_correction;
$end = $end*$lrg_strand + $lrg_correction;
($start,$end) = ($end,$start) if $lrg_strand < 0;
}
my $tvas = $tv->get_all_alternate_TranscriptVariationAlleles;
foreach my $tva (@$tvas) {
next if $callback->free_wheel();
# this isn't needed anymore, I don't think!!!
# thought I'd leave this indented though to keep the diff neater
if (1) {#$tva && $end >= $tr_start - $extent && $start <= $tr_end + $extent) {
my $row;
my $variation_name = $vf->variation_name;
my $vf_dbID = $vf->dbID;
$row->{'ID'} = $variation_name;
my $source = $vf->source_name;
$row->{'Source'} = $source;
unless($callback->phase eq 'outline') {
my $evidences = $vf->get_all_evidence_values || [];
my $clin_sigs = $vf->get_all_clinical_significance_states || [];
my $var_class = $vf->var_class;
my $translation_start = $tv->translation_start;
my $translation_end = $tv->translation_end;
my $aachange = $translation_start ? $tva->pep_allele_string : '';
my $aacoord = $translation_start ? ($translation_start eq $translation_end ? $translation_start : "$translation_start-$translation_end") : '';
my $aacoord_sort = $translation_start ? $translation_start : '';
my $trans_url = ";$url_transcript_prefix=$transcript_stable_id";
my $vf_allele = $tva->variation_feature_seq;
my $allele_string = $vf->allele_string;
# Reverse complement if it's a LRG table with a LRG mapping to the reverse strand
if ($self->isa('EnsEMBL::Web::Component::LRG::VariationTable') && $lrg_strand == -1) {
my @alleles = split('/',$allele_string);
foreach my $l_allele (@alleles) {
next if ($l_allele !~ /^[ATGCN]+$/);
reverse_comp(\$l_allele);
}
$allele_string = join('/',@alleles);
}
# Sort out consequence type string
my $only_coding = $tv_count > $TV_MAX ? 1 : 0;
my $type = $self->new_consequence_type($tva, $only_coding);
my $sifts = $self->classify_sift_polyphen($tva->sift_prediction, $tva->sift_score);
my $polys = $self->classify_sift_polyphen($tva->polyphen_prediction, $tva->polyphen_score);
my $cadds = $self->classify_score_prediction($tva->cadd_prediction, $tva->cadd_score);
my $revels = $self->classify_score_prediction($tva->dbnsfp_revel_prediction, $tva->dbnsfp_revel_score);
my $meta_lrs = $self->classify_score_prediction($tva->dbnsfp_meta_lr_prediction, $tva->dbnsfp_meta_lr_score);
my $mutation_assessors = $self->classify_score_prediction($tva->dbnsfp_mutation_assessor_prediction, $tva->dbnsfp_mutation_assessor_score);
# Adds LSDB/LRG sources
if ($self->isa('EnsEMBL::Web::Component::LRG::VariationTable')) {
my $var = $vf->variation;
my $syn_sources = $var->get_all_synonym_sources;
foreach my $s_source (@$syn_sources) {
next if $s_source !~ /LSDB|LRG/;
my ($synonym) = $var->get_all_synonyms($s_source);
$source .= ', ' . $hub->get_ExtURL_link($s_source, $s_source, $synonym);
}
}
my $gmaf = $vf->minor_allele_frequency; # global maf
my $gmaf_freq;
my $gmaf_allele;
if (defined $gmaf) {
$gmaf_freq = $gmaf;
$gmaf = ($gmaf < 0.001) ? '< 0.001' : sprintf("%.3f",$gmaf);
$gmaf_allele = $vf->minor_allele;
}
my $status = join('~',@$evidences);
my $clin_sig = join("~",@$clin_sigs);
my $transcript_name = ($url_transcript_prefix eq 'lrgt') ? $transcript->Obj->external_name : $transcript->version ? $transcript_stable_id.".".$transcript->version : $transcript_stable_id;
my $more_row = {
vf => $vf_dbID,
class => $var_class,
Alleles => $allele_string,
vf_allele => $vf_allele,
Ambiguity => $vf->ambig_code,
gmaf => $gmaf || '-',
gmaf_freq => $gmaf_freq || '',
gmaf_allele => $gmaf_allele,
status => $status,
clinsig => $clin_sig,
chr => "$chr:" . ($start > $end ? " between $end & $start" : "$start".($start == $end ? '' : "-$end")),
location => "$chr:".($start>$end?$end:$start),
Submitters => %handles && defined($handles{$vf->{_variation_id}}) ? join(", ", @{$handles{$vf->{_variation_id}}}) : undef,
snptype => $type,
Transcript => $transcript_name,
LRGTranscript => $transcript_name,
LRG => $gene->stable_id,
aachange => $aachange,
aacoord => $aacoord,
aacoord_sort => $aacoord_sort,
sift_sort => $sifts->[0],
sift_class => $sifts->[1],
sift_value => $sifts->[2],
polyphen_sort => $polys->[0],
polyphen_class => $polys->[1],
polyphen_value => $polys->[2],
cadd_sort => $cadds->[0],
cadd_class => $cadds->[1],
cadd_value => $cadds->[2],
revel_sort => $revels->[0],
revel_class => $revels->[1],
revel_value => $revels->[2],
meta_lr_sort => $meta_lrs->[0],
meta_lr_class => $meta_lrs->[1],
meta_lr_value => $meta_lrs->[2],
mutation_assessor_sort => $mutation_assessors->[0],
mutation_assessor_class => $mutation_assessors->[1],
mutation_assessor_value => $mutation_assessors->[2],
HGVS => $self->param('hgvs') eq 'on' ? ($self->get_hgvs($tva) || '-') : undef,
};
$row = { %$row, %$more_row };
}
$num++;
$callback->add_row($row);
last ROWS if $callback->stand_down;
}
}
}
}
}
sub _get_transcript_variations {
my $self = shift;
my $tr = shift;
my $tv_count = shift;
my $exonic_types = shift;
my $tr_id = $tr ? $tr->dbID : 0;
my $cache = $self->{_transcript_variations} ||= {};
if(!exists($cache->{$tr_id})) {
my $slice = $tr->feature_Slice;
if ($tv_count <= $TV_MAX ) {
$slice = $slice->expand(
$Bio::EnsEMBL::Variation::Utils::VariationEffect::UPSTREAM_DISTANCE,
$Bio::EnsEMBL::Variation::Utils::VariationEffect::DOWNSTREAM_DISTANCE
);
}
my $vfs = $self->_get_variation_features($slice, $tv_count, $exonic_types);
my $tva = $self->hub->get_adaptor('get_TranscriptVariationAdaptor', 'variation');
my @tvs = ();
# deal with vfs with (from database) and without dbid (from vcf)
my $have_vfs_with_id = 0;
foreach my $vf(values %$vfs) {
if(looks_like_number($vf->dbID)) {
$have_vfs_with_id = 1;
}
else {
push @tvs, @{$vf->get_all_TranscriptVariations([$tr])};
}
}
if($have_vfs_with_id) {
if ($tv_count > $TV_MAX ) {
push @tvs, @{$tva->fetch_all_by_Transcripts_SO_terms([$tr],$exonic_types)};
push @tvs, @{$tva->fetch_all_somatic_by_Transcripts_SO_terms([$tr],$exonic_types)};
}
else {
push @tvs, @{$tva->fetch_all_by_Transcripts([$tr])};
push @tvs, @{$tva->fetch_all_somatic_by_Transcripts([$tr])};
}
}
$cache->{$tr_id} = \@tvs;
}
return $cache->{$tr_id};
}
sub _get_variation_features {
my $self = shift;
my $slice = shift;
my $tv_count = shift;
my $exonic_types = shift;
if(!exists($self->{_variation_features})) {
my $vfa = $self->hub->get_adaptor('get_VariationFeatureAdaptor', 'variation');
if ($tv_count > $TV_MAX) {
# No need to have the slice expanded to upstream/downstream
$slice = $self->object->Obj->feature_Slice;
$self->{_variation_features} = { map {$_->dbID => $_} (@{ $vfa->fetch_all_by_Slice_SO_terms($slice,$exonic_types) }, @{ $vfa->fetch_all_somatic_by_Slice_SO_terms($slice,$exonic_types) })};
}
else {
$self->{_variation_features} = { map {$_->dbID => $_} (@{ $vfa->fetch_all_by_Slice($slice) }, @{ $vfa->fetch_all_somatic_by_Slice($slice) })};
}
}
return $self->{_variation_features};
}
sub _count_variation_features {
my $self = shift;
my $slice = shift;
my $vfa = $self->hub->get_adaptor('get_VariationFeatureAdaptor', 'variation');
return $vfa->count_by_Slice_constraint($slice);
}
sub _count_transcript_variations {
my $self = shift;
my $tr = shift;
my $tva = $self->hub->get_adaptor('get_TranscriptVariationAdaptor', 'variation');
return $tva->count_all_by_Transcript($tr);
}
sub _get_vf_from_tv {
my ($self, $tv, $vfs, $slice, $tv_count, $exonic_types) = @_;
my $vf;
if(my $raw_id = $tv->{_variation_feature_id}) {
$vfs ||= $self->_get_variation_features($slice, $tv_count, $exonic_types);
$vf = $vfs->{$raw_id};
}
else {
$vf = $tv->variation_feature;
}
return $vf;
}
sub configure {
my ($self, $context, $consequence) = @_;
my $object = $self->object;
my $object_type = $self->hub->type;
my $extent = $context eq 'FULL' ? 5000 : $context;
my %cons = %Bio::EnsEMBL::Variation::Utils::Constants::OVERLAP_CONSEQUENCES;
my %selected_so = map { $_ => 1 } defined $consequence && $consequence ne 'ALL' ? split /\,/, $consequence : (); # map the selected consequence type to SO terms
my @so_terms = keys %selected_so;
my ($gene_object, $transcript_object);
if ($object->isa('EnsEMBL::Web::Object::Gene')){ #|| $object->isa('EnsEMBL::Web::Object::LRG')){
$gene_object = $object;
} elsif ($object->isa('EnsEMBL::Web::Object::LRG')){
my @genes = @{$object->Obj->get_all_Genes('LRG_import')||[]};
my $gene = $genes[0];
my $factory = $self->builder->create_factory('Gene');
$factory->createObjects($gene);
$gene_object = $factory->object;
} else {
$transcript_object = $object;
$gene_object = $self->hub->core_object('gene');
}
$gene_object->get_gene_slices(
undef,
[ 'context', 'normal', '100%' ],
[ 'gene', 'normal', '33%' ],
[ 'transcripts', 'munged', $extent ]
);
return $gene_object;
}
sub get_hgvs {
my ($self, $tva) = @_;
my $hgvs_c = $tva->hgvs_transcript;
my $hgvs_p = $tva->hgvs_protein;
my $hgvs;
if ($hgvs_c) {
if (length $hgvs_c > 35) {
my $display_hgvs_c = substr($hgvs_c, 0, 35) . '...';
$display_hgvs_c .= $self->trim_large_string($hgvs_c, 'hgvs_c_' . $tva->dbID);
$hgvs_c = $display_hgvs_c;
}
$hgvs .= $hgvs_c;
}
if ($hgvs_p) {
if (length $hgvs_p > 35) {
my $display_hgvs_p = substr($hgvs_p, 0, 35) . '...';
$display_hgvs_p .= $self->trim_large_string($hgvs_p, 'hgvs_p_'. $tva->dbID);
$hgvs_p = $display_hgvs_p;
}
$hgvs .= "<br />$hgvs_p";
}
return $hgvs;
}
sub memo_argument {
my ($self) = @_;
return {
url => $self->hub->url
};
}
1;
| 36.302846 | 283 | 0.605845 |
ed6042e04960aeff9d0b3225232d066c7d385327 | 5,883 | pm | Perl | lib/Rex/Commands/Partition.pm | ironcamel/Rex | 896a38aa0546388cc3eb288b077f0303b2452161 | [
"Apache-2.0"
]
| null | null | null | lib/Rex/Commands/Partition.pm | ironcamel/Rex | 896a38aa0546388cc3eb288b077f0303b2452161 | [
"Apache-2.0"
]
| null | null | null | lib/Rex/Commands/Partition.pm | ironcamel/Rex | 896a38aa0546388cc3eb288b077f0303b2452161 | [
"Apache-2.0"
]
| 1 | 2018-10-17T20:58:08.000Z | 2018-10-17T20:58:08.000Z | #
# (c) Jan Gehring <jan.gehring@gmail.com>
#
# vim: set ts=3 sw=3 tw=0:
# vim: set expandtab:
=head1 NAME
Rex::Commands::Partition - Partition module
=head1 DESCRIPTION
With this Module you can partition your harddrive.
=head1 SYNOPSIS
use Rex::Commands::Partition;
=head1 EXPORTED FUNCTIONS
=over 4
=cut
package Rex::Commands::Partition;
use strict;
use warnings;
require Rex::Exporter;
use base qw(Rex::Exporter);
use vars qw(@EXPORT);
use Data::Dumper;
use Rex::Logger;
use Rex::Commands::Run;
use Rex::Commands::File;
use Rex::Commands::LVM;
use Rex::Commands::Fs;
@EXPORT = qw(clearpart partition);
=item clearpart($drive)
Clear partitions on $drive.
clearpart "sda";
clearpart "sda",
initialize => "gpt";
=cut
sub clearpart {
my ($disk, %option) = @_;
if($option{initialize}) {
# will destroy partition table
run "parted -s /dev/$disk mklabel " . $option{initialize};
if($? != 0) {
die("Error setting disklabel from $disk to $option{initialize}");
}
if($option{initialize} eq "gpt") {
Rex::Logger::info("Creating bios boot partition");
partition("none",
fstype => "non-fs",
ondisk => $disk,
size => "1");
run "parted /dev/$disk set 1 bios_grub on";
}
}
else {
my @partitions = grep { /$disk\d+$/ } split /\n/, cat "/proc/partitions";
for my $part_line (@partitions) {
my ($num, $part) = ($part_line =~ m/\d+\s+(\d+)\s+\d+\s(.*)$/);
Rex::Logger::info("Removing $part");
run "parted -s /dev/$disk rm $num";
}
}
}
=item partition($mountpoint, %option)
Create a partition with mountpoint $mountpoint.
partition "/",
fstype => "ext3",
size => 15000,
ondisk => "sda",
type => "primary";
partition "none",
type => "extended",
ondisk => "sda",
grow => 1,
mount => TRUE,
partition "swap",
fstype => "swap",
type => "logical",
ondisk => "sda",
size => 8000;
partition "none",
lvm => 1,
type => "primary",
size => 15000,
ondisk => "vda";
partition "/",
fstype => "ext3",
size => 10000,
onvg => "vg0";
=cut
sub partition {
my ($mountpoint, %option) = @_;
$option{type} ||= "primary"; # primary is default
# info:
# disk size, partition start, partition end is in MB
unless($option{ondisk}) {
die("You have to specify ,,ondisk''.");
}
my $disk = $option{ondisk};
my @output_lines = grep { /^\s+\d+/ } run "parted /dev/$disk print";
my $last_partition_end = 1;
my $unit;
if(@output_lines) {
($last_partition_end, $unit) = ($output_lines[-1] =~ m/\s+[\d\.]+[a-z]+\s+[\d\.]+[a-z]+\s+([\d\.]+)(kB|MB|GB)/i);
if($unit eq "GB") { $last_partition_end = sprintf("%i", (($last_partition_end * 1000)+1)); } # * 1000 because of parted, +1 to round up
if($unit eq "kB") { $last_partition_end = sprintf("%i", (($last_partition_end / 1000)+1)); } # / 1000 because of parted, +1 to round up
}
Rex::Logger::info("Last parition ending at $last_partition_end");
my $next_partition_start = $last_partition_end;
my $next_partition_end = $option{size} + $last_partition_end;
if($option{grow}) {
$next_partition_end = "-- -1";
}
run "parted -s /dev/$disk mkpart $option{type} $next_partition_start $next_partition_end";
if($? != 0) {
die("Error creating partition.");
}
run "partprobe";
# get the partition id
my @partitions = grep { /$disk\d+$/ } split /\n/, cat "/proc/partitions";
my ($part_num) = ($partitions[-1] =~ m/$disk(\d+)/);
if(! $part_num) {
die("Error getting partition number.");
}
if($option{boot}) {
run "parted /dev/$disk set $part_num boot on";
}
if($option{vg}) {
run "parted /dev/$disk set $part_num lvm on";
pvcreate "/dev/$disk$part_num";
my @vgs = vgs();
if( grep { $_->{volume_group} eq $option{vg} } @vgs ) {
# vg exists, so extend it
vgextend $option{vg}, "/dev/$disk$part_num";
}
else {
# vg doesnt exist, create a new one
vgcreate $option{vg} => "/dev/$disk$part_num";
}
}
my $found_part=0;
while($found_part == 0) {
Rex::Logger::debug("Waiting on /dev/$disk$part_num to appear...");
run "ls -l /dev/$disk$part_num";
if($? == 0) { $found_part = 1; last; }
sleep 1;
}
if(! exists $option{fstype} || $option{fstype} eq "non-fs" || $option{fstype} eq "none" || $option{fstype} eq "") {
# nix
}
elsif(can_run("mkfs.$option{fstype}")) {
Rex::Logger::info("Creating filesystem $option{fstype} on /dev/$disk$part_num");
my $add_opts = "";
if(exists $option{label} || exists $option{lable}) {
my $label = $option{label} || $option{lable};
$add_opts .= " -L $label ";
}
run "mkfs.$option{fstype} $add_opts /dev/$disk$part_num";
}
elsif($option{fstype} eq "swap") {
Rex::Logger::info("Creating swap space on /dev/$disk$part_num");
my $add_opts = "";
if(exists $option{label} || exists $option{lable}) {
my $label = $option{label} || $option{lable};
$add_opts .= " -L $label ";
}
run "mkswap $add_opts /dev/$disk$part_num";
}
else {
die("Can't format partition with $option{fstype}");
}
if(exists $option{mount} && $option{mount}) {
mount "/dev/$disk$part_num", $mountpoint,
fs => $option{fstype};
}
if(exists $option{mount_persistent} && $option{mount_persistent}) {
mount "/dev/$disk$part_num", $mountpoint,
fs => $option{fstype},
label => $option{label} || "",
persistent => 1;
}
return "$disk$part_num";
}
=back
=cut
1;
| 23.532 | 141 | 0.557539 |
73dabe7002a7a67a0dd7814194c316821eb792b2 | 3,466 | pm | Perl | lib/DeviewSched/Controller/UserAttendance.pm | GDG-SSU/deviewsched-backend | bc315e75e275275e9465f50883875c14570d1f64 | [
"MIT"
]
| 4 | 2015-06-29T16:10:37.000Z | 2015-09-06T07:04:39.000Z | lib/DeviewSched/Controller/UserAttendance.pm | GDG-SSU/deviewsched-backend | bc315e75e275275e9465f50883875c14570d1f64 | [
"MIT"
]
| 12 | 2015-07-24T14:16:01.000Z | 2015-09-06T12:24:57.000Z | lib/DeviewSched/Controller/UserAttendance.pm | GDG-SSU/deviewsched-backend | bc315e75e275275e9465f50883875c14570d1f64 | [
"MIT"
]
| null | null | null | package DeviewSched::Controller::UserAttendance;
use utf8;
use DeviewSched::FacebookAPI;
use Mojo::Base 'DeviewSched::Controller';
# FIXME: use base.. 로 상속받은 클래스에 Exporter를 사용할 수가 없었다.. ㅜㅜ
sub FB_USER_REQUIRED_FIELDS () { DeviewSched::FacebookAPI->FB_USER_REQUIRED_FIELDS }
sub get_status {
my $self = shift;
my ($user, $fetch_all, $fetch_friend) = map { $self->stash($_) } qw/user fetch_all fetch_friend/;
my ($year, $day) = map { $self->param($_) } qw/year day/;
# FIXME: 멍청한 변수 선언
my $facebook = $self->fb_graph($user->fb_token);
my @friends_id;
@friends_id = map { $_->{id} } $facebook->friends if $fetch_friend;
my $attendance_rs = $self->db_schema->resultset('UserAttendance');
my @attendances = $attendance_rs->search({
session_year => $year,
( $fetch_friend ) ?
(
( !defined $fetch_all ) ?
( session_day => $day ) : (),
user_id => \@friends_id
) : (
user_id => $user->id
)
}, {
( $fetch_friend ) ?
(
join => 'user',
prefetch => 'user'
) : ()
})->all;
if ($fetch_friend) {
return $self->render_wrap(200,
( $fetch_all ) ?
$self->_classify_by_days(\@attendances) :
$self->_classify_by_user(\@attendances)
);
} else {
return $self->render_wrap(200,
{ days => $self->_classify_single(\@attendances) }
);
}
}
sub set_status {
my $self = shift;
my $user = $self->stash('user');
my ($year, $day) = map { $self->param($_) } qw/year day/;
my $attendance_rs = $self->db_schema->resultset('UserAttendance');
if ($self->req->method eq $self->METHOD_PUT) {
return $self->fail($self->FAIL_BAD_PARAMETERS) if $self->_get_last_day($year) < $day;
$attendance_rs->update_or_create({
user_id => $user->id,
session_year => $year,
session_day => $day,
});
} else {
$attendance_rs->search({
user_id => $user->id,
session_year => $year,
session_day => $day,
})->delete;
}
return $self->render_wrap(200, {});
}
sub _get_last_day {
my $self = shift;
my $year = shift;
my $session_rs = $self->db_schema->resultset('Session');
my $last_day = $session_rs->search({
year => $year,
})->get_column('day')->max;
return $last_day;
}
sub _classify_single {
my $self = shift;
my @attendances = @{ ( shift || []) };
# must return {days: [1, 2, 3]}
return [map {
int $_->session_day
} @attendances];
}
sub _classify_by_user {
my $self = shift;
my @attendances = @{ ( shift || []) };
# users: [1234, 1234, 1234]
my $result = {
users => [ map {
$_->user->serialize_columns([
FB_USER_REQUIRED_FIELDS
])
} @attendances ]
};
return $result;
}
sub _classify_by_days {
my $self = shift;
my @attendances = @{ ( shift || []) };
# days: [{users: [1234, 1234, 1234]}, [1234, 1234, 1234], [1234, 1234, 1234]]
my $result = [];
map {
push @{ $result->[$_->session_day - 1]->{users} },
$_->user->serialize_columns([
FB_USER_REQUIRED_FIELDS
]);
} @attendances;
return {days => $result};
}
1;
| 24.58156 | 101 | 0.521639 |
ed5c556aa0ca849b3e9d3e5974de32755b6164a6 | 1,067 | pm | Perl | lib/Google/Ads/GoogleAds/V5/Services/AdGroupCriterionSimulationService.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
]
| null | null | null | lib/Google/Ads/GoogleAds/V5/Services/AdGroupCriterionSimulationService.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
]
| null | null | null | lib/Google/Ads/GoogleAds/V5/Services/AdGroupCriterionSimulationService.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
]
| null | null | null | # Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V5::Services::AdGroupCriterionSimulationService;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseService);
sub get {
my $self = shift;
my $request_body = shift;
my $http_method = 'GET';
my $request_path = 'v5/{+resourceName}';
my $response_type =
'Google::Ads::GoogleAds::V5::Resources::AdGroupCriterionSimulation';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
1;
| 31.382353 | 80 | 0.732896 |
ed58b7a3a7b702df775c90900fd24ffa7a4f33e5 | 4,042 | al | Perl | Modules/System/Email/src/Message/EmailAttachments.Page.al | bjarkihall/ALAppExtensions | d8243d27e0280dec6e079ab9f1e838f9768c208c | [
"MIT"
]
| 1 | 2021-08-16T18:14:49.000Z | 2021-08-16T18:14:49.000Z | Modules/System/Email/src/Message/EmailAttachments.Page.al | bjarkihall/ALAppExtensions | d8243d27e0280dec6e079ab9f1e838f9768c208c | [
"MIT"
]
| null | null | null | Modules/System/Email/src/Message/EmailAttachments.Page.al | bjarkihall/ALAppExtensions | d8243d27e0280dec6e079ab9f1e838f9768c208c | [
"MIT"
]
| 1 | 2021-02-09T10:23:09.000Z | 2021-02-09T10:23:09.000Z | // ------------------------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// ------------------------------------------------------------------------------------------------
page 8889 "Email Attachments"
{
PageType = ListPart;
SourceTable = "Email Message Attachment";
InsertAllowed = false;
ModifyAllowed = false;
DeleteAllowed = false;
ShowFilter = false;
Permissions = tabledata "Email Message Attachment" = rmd;
layout
{
area(Content)
{
repeater(GroupName)
{
field(FileName; Rec."Attachment Name")
{
ApplicationArea = All;
Caption = 'Filename';
ToolTip = 'Specifies the name of the attachment';
trigger OnDrillDown()
var
Instream: Instream;
Filename: Text;
begin
Rec.CalcFields(Attachment);
Rec.Attachment.CreateInStream(Instream);
Filename := Rec."Attachment Name";
DownloadFromStream(Instream, '', '', '', Filename);
CurrPage.Update(false);
end;
}
}
}
}
actions
{
area(Processing)
{
action(Upload)
{
ApplicationArea = All;
Promoted = true;
PromotedCategory = Process;
PromotedOnly = true;
Image = Attach;
Caption = 'Attach File';
ToolTip = 'Attach files, such as documents or images, to the email.';
Scope = Page;
Visible = not IsMessageReadOnly;
trigger OnAction()
var
EmailEditor: Codeunit "Email Editor";
begin
EmailEditor.UploadAttachment(EmailMessage);
UpdateDeleteEnablement();
CurrPage.Update();
end;
}
action(Delete)
{
ApplicationArea = All;
Promoted = true;
PromotedCategory = Process;
PromotedOnly = true;
Enabled = DeleteActionEnabled;
Image = Delete;
Caption = 'Delete';
ToolTip = 'Delete the selected row.';
Scope = Repeater;
Visible = not IsMessageReadOnly;
trigger OnAction()
var
EmailMessageAttachment: Record "Email Message Attachment";
begin
if Confirm(DeleteQst) then begin
CurrPage.SetSelectionFilter(EmailMessageAttachment);
EmailMessageAttachment.DeleteAll();
UpdateDeleteEnablement();
CurrPage.Update();
end;
end;
}
}
}
internal procedure UpdateValues(MessageId: Guid)
begin
EmailMessageId := MessageId;
EmailMessage.Get(EmailMessageId);
UpdateDeleteEnablement();
IsMessageReadOnly := EmailMessage.IsReadOnly();
end;
internal procedure UpdateDeleteEnablement()
var
EmailMessageAttachment: Record "Email Message Attachment";
begin
EmailMessageAttachment.SetFilter("Email Message Id", EmailMessageId);
DeleteActionEnabled := not EmailMessageAttachment.IsEmpty();
end;
var
EmailMessage: Codeunit "Email Message Impl.";
[InDataSet]
DeleteActionEnabled: Boolean;
IsMessageReadOnly: Boolean;
EmailMessageId: Guid;
DeleteQst: Label 'Go ahead and delete?';
}
| 32.861789 | 99 | 0.476002 |
ed4b98f23b453728502446424e1eb016ca2ef1c4 | 530 | pm | Perl | auto-lib/Paws/Lightsail/UpdateLoadBalancerAttributeResult.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
]
| null | null | null | auto-lib/Paws/Lightsail/UpdateLoadBalancerAttributeResult.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
]
| 1 | 2021-05-26T19:13:58.000Z | 2021-05-26T19:13:58.000Z | auto-lib/Paws/Lightsail/UpdateLoadBalancerAttributeResult.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
]
| null | null | null |
package Paws::Lightsail::UpdateLoadBalancerAttributeResult;
use Moose;
has Operations => (is => 'ro', isa => 'ArrayRef[Paws::Lightsail::Operation]', traits => ['NameInRequest'], request_name => 'operations' );
has _request_id => (is => 'ro', isa => 'Str');
### main pod documentation begin ###
=head1 NAME
Paws::Lightsail::UpdateLoadBalancerAttributeResult
=head1 ATTRIBUTES
=head2 Operations => ArrayRef[L<Paws::Lightsail::Operation>]
An object describing the API operations.
=head2 _request_id => Str
=cut
1; | 19.62963 | 140 | 0.70566 |
ed7cbd6c958d76179d7f1cd2f1b0ff3e2fb95491 | 2,578 | pl | Perl | build/ImageMagick-7.1.0-2/IMDelegates/ghostscript-9.54.0/toolbin/apitest.pl | roMummy/imagemagick_lib_ios | 0e0e6fa77e06b471f5019d5b1b28caabd08d5e6a | [
"ImageMagick"
]
| null | null | null | build/ImageMagick-7.1.0-2/IMDelegates/ghostscript-9.54.0/toolbin/apitest.pl | roMummy/imagemagick_lib_ios | 0e0e6fa77e06b471f5019d5b1b28caabd08d5e6a | [
"ImageMagick"
]
| null | null | null | build/ImageMagick-7.1.0-2/IMDelegates/ghostscript-9.54.0/toolbin/apitest.pl | roMummy/imagemagick_lib_ios | 0e0e6fa77e06b471f5019d5b1b28caabd08d5e6a | [
"ImageMagick"
]
| null | null | null | #!/usr/bin/perl
use strict;
use warnings;
use File::Find;
my @testdirs=("/home/marcos/cluster/tests","/home/marcos/cluster/tests_private");
my @devices = ("pdfwrite", "ps2write", "ppmraw");
my @testfiles = ();
sub process_file {
if (!-f $_) {
return;
}
if ($_ =~ m/\.pdf$/) {
push @testfiles, $_;
}
if ($_ =~ m/\.ps$/) {
push @testfiles, $_;
}
if ($_ =~ m/\.PS$/) {
push @testfiles, $_;
}
}
find({ wanted=>\&process_file, no_chdir => 1}, @testdirs);
sub system_bash($) {
my $cmd = shift;
my @args = ( "bash", "-c", $cmd);
my $rc = system(@args);
if ($rc == -1) {
print "Failed to execute: $!\n";
}
elsif ($rc & 127) {
if ($rc == 2) {
die "You keel me!";
}
printf "child died with signal %d, %s coredump\n", ($? & 127), ($? & 128) ? 'with' : 'without';
}
return $rc;
}
foreach my $testfile (@testfiles) {
foreach my $dev (@devices) {
printf("$testfile to $dev\n");
`rm -f outfile.* stdout.* stderr.*`;
my $rc = system_bash("./bin/apitest -sDEVICE=$dev -o outfile.%d. -r72 $testfile");
if ($rc) {
printf("Failed with return code $rc\n");
next;
}
my $grep = "-av BOGOSITY10000";
if ($testfile =~ m/.pdf$/) {
$grep = '-av "\(/ID\|uuid\|CreationDate\|ModDate\|CreateDate\)"';
}
if ($testfile =~ m/.ps$/) {
$grep = "-av CreationDate";
}
if ($testfile =~ m/.PS$/) {
$grep = "-av CreationDate";
}
my $fail = 0;
for (my $page=1; -e "outfile.$page.0"; $page++) {
my $diffcount=0;
for (my $thrd=1; -e "outfile.$page.$thrd"; $thrd++) {
my $cmd = "diff -q <( grep $grep outfile.$page.0 ) <( grep $grep outfile.$page.$thrd )";
my $diff1=system_bash($cmd);
if ($diff1) {
$diffcount++;
}
}
if ($diffcount) {
printf("Page $page differs $diffcount times\n");
$fail = 1;
}
}
if ($fail) {
next;
}
my $diffcount=0;
for (my $thrd=1; -e "stdout.$thrd"; $thrd++) {
my $cmd = "diff -q <( grep -av Loading stdout.0) <( grep -av Loading stdout.1)";
my $diff1=system_bash($cmd);
if ($diff1) {
$diffcount++;
}
}
if ($diffcount) {
printf("Stdout differs $diffcount times\n");
next;
}
$diffcount=0;
for (my $thrd=1; -e "stderr.$thrd"; $thrd++) {
my $cmd = "diff -q stderr.0 stderr.1";
my $diff1=system_bash($cmd);
if ($diff1) {
$diffcount++;
}
}
if ($diffcount) {
printf("Stderr differs $diffcount times\n");
next;
}
}
}
| 22.614035 | 96 | 0.503879 |
73f6bd265bb6fdbee579d2035d5ee3eaf563109a | 1,663 | pl | Perl | libwc/map/mk_sjis_ucs_map.pl | dafyddcrosby/w3m-openbsd | 9372551a1289aecfed35ae9cd702cd3956240064 | [
"MIT"
]
| 3 | 2020-09-03T10:37:24.000Z | 2022-02-17T11:04:00.000Z | libwc/map/mk_sjis_ucs_map.pl | dafyddcrosby/w3m-openbsd | 9372551a1289aecfed35ae9cd702cd3956240064 | [
"MIT"
]
| 1 | 2021-05-02T04:57:02.000Z | 2021-05-27T04:28:07.000Z | libwc/map/mk_sjis_ucs_map.pl | devinsmith/w3m | b0a52ca76dcc0e38fce4dadc5857fd20f68c4b8d | [
"MTLL"
]
| null | null | null |
@NAME = ();
while(<DATA>) {
chop;
s/\s*$//;
(($n, $m, $c) = split(" ", $_, 3)) >= 3 || next;
push(@NAME, $n);
$MAP{$n} = $m;
$CODE{$n} = $c;
}
foreach $name (@NAME) {
$code = $CODE{$name};
$map = $MAP{$name};
print "$name\t$map\t$code\n";
%to_ucs = ();
%from_ucs = ();
open(MAP, "< $map");
open(OUT, "> ${name}_ucs.map");
while(<MAP>) {
/^#/ && next;
s/#.*//;
(($i, $u) = split(" ")) || next;
$i = hex($i);
$u = hex($u);
($i >= 0x8740 && $i <= 0x87FC) ||
($i >= 0xED40 && $i <= 0xEEFC) ||
($i >= 0xFA40 && $i <= 0xFCFC) || next;
$to_ucs{$i} = $u;
if ($u > 0 && (! $from_ucs{$u} || ($from_ucs{$u} >= 0xED40 && $from_ucs{$u} <= 0xEEFC))) {
$from_ucs{$u} = $i;
}
}
# print OUT <<EOF;
# /*
# These conversion tables between $code and
# Unicode were made from
#
# ftp://ftp.unicode.org/Public/MAPPINGS/$map.
# */
print OUT <<EOF;
/* $code */
static wc_uint16 ${name}_ucs_map[ 0x5E * 10 ] = {
EOF
for $ub (0x87, 0xed, 0xee, 0xfa, 0xfb, 0xfc) {
for $lb (0x40 .. 0x7E, 0x80 .. 0x9E) {
$_ = ($ub << 8) + $lb;
printf OUT " 0x%.4X,\t/* 0x%.4X */\n", $to_ucs{$_}, $_;
}
if ($ub == 0x87 || $ub == 0xfc) {
next;
}
for $lb (0x9F .. 0xFC) {
$_ = ($ub << 8) + $lb;
printf OUT " 0x%.4X,\t/* 0x%.4X */\n", $to_ucs{$_}, $_;
}
}
@ucs = sort { $a <=> $b } keys %from_ucs;
$nucs = @ucs + 0;
print OUT <<EOF;
};
#define N_ucs_${name}_map $nucs
static wc_map ucs_${name}_map[ N_ucs_${name}_map ] = {
EOF
for(@ucs) {
printf OUT " { 0x%.4X, 0x%.4X },\n", $_, $from_ucs{$_};
}
print OUT <<EOF;
};
EOF
close(MAP);
}
__END__
sjis_ext VENDORS/MICSFT/WINDOWS/CP932.TXT Shift_JIS/CP932 (Japanese)
| 18.897727 | 92 | 0.490078 |
ed861b716c172b19a6e704297ca14059baae19a3 | 603 | al | Perl | Modules/System/Time Zone Selection/Permissions/TimeZoneSelectionRead.PermissionSet.al | waldo1001/ALAppExtensions | 935155845bf45b631d1c34b6bcd5aec54308d50f | [
"MIT"
]
| 127 | 2018-04-17T18:03:03.000Z | 2019-05-06T18:54:17.000Z | Modules/System/Time Zone Selection/Permissions/TimeZoneSelectionRead.PermissionSet.al | waldo1001/ALAppExtensions | 935155845bf45b631d1c34b6bcd5aec54308d50f | [
"MIT"
]
| 2,279 | 2018-09-12T12:01:49.000Z | 2019-05-06T13:59:35.000Z | Modules/System/Time Zone Selection/Permissions/TimeZoneSelectionRead.PermissionSet.al | waldo1001/ALAppExtensions | 935155845bf45b631d1c34b6bcd5aec54308d50f | [
"MIT"
]
| 41 | 2018-05-17T11:19:52.000Z | 2019-04-30T17:30:38.000Z | // ------------------------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// ------------------------------------------------------------------------------------------------
permissionset 9216 "Time Zone Selection - Read"
{
Assignable = false;
IncludedPermissionSets = "Time Zone Selection - Objects";
Permissions = tabledata "Page Data Personalization" = r,
tabledata "Time Zone" = r;
} | 43.071429 | 99 | 0.470978 |
ed64e157baddc6949303bd1288868988d2f7d4a9 | 44,472 | pm | Perl | bin/lib/Image/ExifTool/Qualcomm.pm | mceachen/exiftool_vendored.rb | bab2705f32f3b8fc47486ec9ceb6f012972419c2 | [
"MIT"
]
| 5 | 2017-02-18T11:03:32.000Z | 2019-01-29T16:04:41.000Z | bin/lib/Image/ExifTool/Qualcomm.pm | mceachen/exiftool_vendored | c154ac88071e480d595fa47140eb65487d4f2b07 | [
"MIT"
]
| null | null | null | bin/lib/Image/ExifTool/Qualcomm.pm | mceachen/exiftool_vendored | c154ac88071e480d595fa47140eb65487d4f2b07 | [
"MIT"
]
| null | null | null | #------------------------------------------------------------------------------
# File: Qualcomm.pm
#
# Description: Read Qualcomm APP7 meta information
#
# Revisions: 2012/02/14 - P. Harvey Created
#------------------------------------------------------------------------------
package Image::ExifTool::Qualcomm;
use strict;
use vars qw($VERSION);
use Image::ExifTool qw(:DataAccess :Utils);
$VERSION = '1.01';
sub ProcessQualcomm($$$);
sub MakeNameAndDesc($$);
# Qualcomm format codes (ref PH (NC))
my @qualcommFormat = (
'int8u', 'int8s', 'int16u', 'int16s',
'int32u', 'int32s', 'float', 'double',
);
# information found in JPEG APP7 Qualcomm Camera Attributes segment
%Image::ExifTool::Qualcomm::Main = (
PROCESS_PROC => \&ProcessQualcomm,
GROUPS => { 0 => 'MakerNotes', 2 => 'Camera' },
VARS => { NO_ID => 1, NO_LOOKUP => 1 }, # too long, too many, and too obscure
NOTES => q{
The tags below have been observed in the JPEG APP7 "Qualcomm Camera
Attributes" segment written by some cameras such as the HP iPAQ Voice
Messenger. ExifTool will extract any information found from this segment,
even if it is not listed in this table.
},
'aec_current_sensor_luma' => { },
'af_position' => { },
'aec_current_exp_index' => { },
'awb_sample_decision' => { },
'asf5_enable' => { },
'asf5_filter_mode' => { },
'asf5_exposure_index_1' => { },
'asf5_exposure_index_2' => { },
'asf5_max_exposure_index' => { },
'asf5_luma_filter[0]' => { },
'asf5_luma_filter[1]' => { },
'asf5_luma_filter[2]' => { },
'asf5_luma_filter[3]' => { },
'asf5_luma_filter[4]' => { },
'asf5_luma_filter[5]' => { },
'asf5_luma_filter[6]' => { },
'asf5_luma_filter[7]' => { },
'asf5_luma_filter[8]' => { },
'asf5_filter1_a11' => { },
'asf5_filter1_a12' => { },
'asf5_filter1_a13' => { },
'asf5_filter1_a14' => { },
'asf5_filter1_a15' => { },
'asf5_filter1_a21' => { },
'asf5_filter1_a22' => { },
'asf5_filter1_a23' => { },
'asf5_filter1_a24' => { },
'asf5_filter1_a25' => { },
'asf5_filter1_a31' => { },
'asf5_filter1_a32' => { },
'asf5_filter1_a33' => { },
'asf5_filter1_a34' => { },
'asf5_filter1_a35' => { },
'asf5_filter1_a41' => { },
'asf5_filter1_a42' => { },
'asf5_filter1_a43' => { },
'asf5_filter1_a44' => { },
'asf5_filter1_a45' => { },
'asf5_filter1_a51' => { },
'asf5_filter1_a52' => { },
'asf5_filter1_a53' => { },
'asf5_filter1_a54' => { },
'asf5_filter1_a55' => { },
'asf5_filter2_a11' => { },
'asf5_filter2_a12' => { },
'asf5_filter2_a13' => { },
'asf5_filter2_a14' => { },
'asf5_filter2_a15' => { },
'asf5_filter2_a21' => { },
'asf5_filter2_a22' => { },
'asf5_filter2_a23' => { },
'asf5_filter2_a24' => { },
'asf5_filter2_a25' => { },
'asf5_filter2_a31' => { },
'asf5_filter2_a32' => { },
'asf5_filter2_a33' => { },
'asf5_filter2_a34' => { },
'asf5_filter2_a35' => { },
'asf5_filter2_a41' => { },
'asf5_filter2_a42' => { },
'asf5_filter2_a43' => { },
'asf5_filter2_a44' => { },
'asf5_filter2_a45' => { },
'asf5_filter2_a51' => { },
'asf5_filter2_a52' => { },
'asf5_filter2_a53' => { },
'asf5_filter2_a54' => { },
'asf5_filter2_a55' => { },
'asf5_nrmize_factor1' => { },
'asf5_nrmize_factor2' => { },
'asf5_low_lo_thres' => { },
'asf5_low_up_thres' => { },
'asf5_low_shrp_deg_f1' => { },
'asf5_low_shrp_deg_f2' => { },
'asf5_low_smth_prcnt' => { },
'asf5_nrm_lo_thres' => { },
'asf5_nrm_up_thres' => { },
'asf5_nrm_shrp_deg_f1' => { },
'asf5_nrm_shrp_deg_f2' => { },
'asf5_nrm_smth_prcnt' => { },
'asf5_brt_lo_thres' => { },
'asf5_brt_up_thres' => { },
'asf5_brt_shrp_deg_f1' => { },
'asf5_brt_shrp_deg_f2' => { },
'asf5_brt_smth_percent' => { },
'asf3_enable' => { },
'asf3_edge_filter_a11' => { },
'asf3_edge_filter_a12' => { },
'asf3_edge_filter_a13' => { },
'asf3_edge_filter_a21' => { },
'asf3_edge_filter_a22' => { },
'asf3_edge_filter_a23' => { },
'asf3_edge_filter_a31' => { },
'asf3_edge_filter_a32' => { },
'asf3_edge_filter_a33' => { },
'asf3_noise_filter_a11' => { },
'asf3_noise_filter_a12' => { },
'asf3_noise_filter_a13' => { },
'asf3_noise_filter_a21' => { },
'asf3_noise_filter_a22' => { },
'asf3_noise_filter_a23' => { },
'asf3_noise_filter_a31' => { },
'asf3_noise_filter_a32' => { },
'asf3_noise_filter_a33' => { },
'asf3_lower_threshold' => { },
'asf3_upper_threshold' => { },
'asf3_edge_detect' => { },
'aec_enable' => { },
'aec_mode' => { },
'aec_aggressiveness' => { },
'aec_luma_target' => { },
'aec_luma_tolerance' => { },
'aec_indoor_idx' => { },
'aec_odoor_idx' => { },
'aec_exposure_index_adj_step' => { },
'aec_outdoor_gamma_index' => { },
'aec_vfe_luma' => { },
'aec_high_luma_region_threshold' => { },
'aec_snapshot_sensor_gain' => { },
'aec_snapshot_digital_gain' => { },
'aec_snapshot_line_count' => { },
'aec_snapshot_exposure_time_ms' => { },
'aec_outdoor_bright_enable' => { },
'aec_outdoor_bright_reduction' => { },
'aec_outdoor_bright_threshold_LO' => { },
'aec_outdoor_bright_threshold_HI' => { },
'aec_outdoor_bright_discarded' => { },
'aec_high_luma_region_count' => { },
'antibanding_enable' => { },
'anti_bading_pixel_clk' => { },
'anti_bading_pixel_clk_per_line' => { },
'afr_enable' => { },
'afr_faster_0_trigger' => { },
'afr_slower_0_trigger' => { },
'afr_faster_0_exp_mod' => { },
'afr_slower_0_exp_mod' => { },
'afr_faster_1_trigger' => { },
'afr_slower_1_trigger' => { },
'afr_faster_1_exp_mod' => { },
'afr_slower_1_exp_mod' => { },
'afr_faster_2_trigger' => { },
'afr_slower_2_trigger' => { },
'afr_faster_2_exp_mod' => { },
'afr_slower_2_exp_mod' => { },
'afr_faster_3_trigger' => { },
'afr_slower_3_trigger' => { },
'afr_faster_3_exp_mod' => { },
'afr_slower_3_exp_mod' => { },
'afr_faster_4_trigger' => { },
'afr_slower_4_trigger' => { },
'afr_faster_4_exp_mod' => { },
'afr_slower_4_exp_mod' => { },
'afr_possible_frame_cnt' => { },
'af_enable' => { },
'af_steps_near_far' => { },
'af_steps_near_infinity' => { },
'af_gross_step' => { },
'af_fine_step' => { },
'af_fine_srch_points' => { },
'af_process' => { },
'af_mode' => { },
'af_near_end' => { },
'af_boundary' => { },
'af_far_end' => { },
'af_collect_end_stat' => { },
'af_test_mode' => { },
'af_undershoot_protect' => { },
'af_reset_lens_after_snap' => { },
'clip_to_af_rato' => { },
'af_pos_def_macro' => { },
'af_pos_def_norm' => { },
'af_vfe_vert_offset' => { },
'af_vfe_horz_offset' => { },
'af_vfe_vert_height' => { },
'af_vfe_horz_width' => { },
'af_vfe_metric_max' => { },
'af_trace_positions[0]' => { },
'af_trace_positions[1]' => { },
'af_trace_positions[2]' => { },
'af_trace_positions[3]' => { },
'af_trace_positions[4]' => { },
'af_trace_positions[5]' => { },
'af_trace_positions[6]' => { },
'af_trace_positions[7]' => { },
'af_trace_positions[8]' => { },
'af_trace_positions[9]' => { },
'af_trace_positions[10]' => { },
'af_trace_positions[11]' => { },
'af_trace_positions[12]' => { },
'af_trace_positions[13]' => { },
'af_trace_positions[14]' => { },
'af_trace_positions[15]' => { },
'af_trace_positions[16]' => { },
'af_trace_positions[17]' => { },
'af_trace_positions[18]' => { },
'af_trace_positions[19]' => { },
'af_trace_positions[20]' => { },
'af_trace_positions[21]' => { },
'af_trace_positions[22]' => { },
'af_trace_positions[23]' => { },
'af_trace_positions[24]' => { },
'af_trace_positions[25]' => { },
'af_trace_positions[26]' => { },
'af_trace_positions[27]' => { },
'af_trace_positions[28]' => { },
'af_trace_positions[29]' => { },
'af_trace_positions[30]' => { },
'af_trace_positions[31]' => { },
'af_trace_positions[32]' => { },
'af_trace_positions[33]' => { },
'af_trace_positions[34]' => { },
'af_trace_positions[35]' => { },
'af_trace_positions[36]' => { },
'af_trace_positions[37]' => { },
'af_trace_positions[38]' => { },
'af_trace_positions[39]' => { },
'af_trace_positions[40]' => { },
'af_trace_positions[41]' => { },
'af_trace_positions[42]' => { },
'af_trace_positions[43]' => { },
'af_trace_positions[44]' => { },
'af_trace_positions[45]' => { },
'af_trace_positions[46]' => { },
'af_trace_positions[47]' => { },
'af_trace_positions[48]' => { },
'af_trace_positions[49]' => { },
'af_trace_stats[0]' => { },
'af_trace_stats[1]' => { },
'af_trace_stats[2]' => { },
'af_trace_stats[3]' => { },
'af_trace_stats[4]' => { },
'af_trace_stats[5]' => { },
'af_trace_stats[6]' => { },
'af_trace_stats[7]' => { },
'af_trace_stats[8]' => { },
'af_trace_stats[9]' => { },
'af_trace_stats[10]' => { },
'af_trace_stats[11]' => { },
'af_trace_stats[12]' => { },
'af_trace_stats[13]' => { },
'af_trace_stats[14]' => { },
'af_trace_stats[15]' => { },
'af_trace_stats[16]' => { },
'af_trace_stats[17]' => { },
'af_trace_stats[18]' => { },
'af_trace_stats[19]' => { },
'af_trace_stats[20]' => { },
'af_trace_stats[21]' => { },
'af_trace_stats[22]' => { },
'af_trace_stats[23]' => { },
'af_trace_stats[24]' => { },
'af_trace_stats[25]' => { },
'af_trace_stats[26]' => { },
'af_trace_stats[27]' => { },
'af_trace_stats[28]' => { },
'af_trace_stats[29]' => { },
'af_trace_stats[30]' => { },
'af_trace_stats[31]' => { },
'af_trace_stats[32]' => { },
'af_trace_stats[33]' => { },
'af_trace_stats[34]' => { },
'af_trace_stats[35]' => { },
'af_trace_stats[36]' => { },
'af_trace_stats[37]' => { },
'af_trace_stats[38]' => { },
'af_trace_stats[39]' => { },
'af_trace_stats[40]' => { },
'af_trace_stats[41]' => { },
'af_trace_stats[42]' => { },
'af_trace_stats[43]' => { },
'af_trace_stats[44]' => { },
'af_trace_stats[45]' => { },
'af_trace_stats[46]' => { },
'af_trace_stats[47]' => { },
'af_trace_stats[48]' => { },
'af_trace_stats[49]' => { },
'af_focus_time' => { },
'awb_enable' => { },
'awb_algorithm' => { },
'awb_aggressiveness' => { },
'awb_red_gain_ref1' => { },
'awb_blue_gain_ref1' => { },
'awb_red_gain_adj_ref1' => { },
'awb_blue_gain_adj_ref1' => { },
'awb_red_gain_ref2' => { },
'awb_blue_gain_ref2' => { },
'awb_red_gain_adj_ref2' => { },
'awb_blue_gain_adj_ref2' => { },
'awb_red_gain_ref3' => { },
'awb_blue_gain_ref3' => { },
'awb_red_gain_adj_ref3' => { },
'awb_blue_gain_adj_ref3' => { },
'awb_red_gain_ref4' => { },
'awb_blue_gain_ref4' => { },
'awb_red_gain_adj_ref4' => { },
'awb_blue_gain_adj_ref4' => { },
'awb_red_gain_ref5' => { },
'awb_blue_gain_ref5' => { },
'awb_red_gain_adj_ref5' => { },
'awb_blue_gain_adj_ref5' => { },
'awb_red_gain_ref6' => { },
'awb_blue_gain_ref6' => { },
'awb_red_gain_adj_ref6' => { },
'awb_blue_gain_adj_ref6' => { },
'awb_red_gain_ref7' => { },
'awb_blue_gain_ref7' => { },
'awb_red_gain_adj_ref7' => { },
'awb_blue_gain_adj_ref7' => { },
'awb_red_gain_ref8' => { },
'awb_blue_gain_ref8' => { },
'awb_red_gain_adj_ref8' => { },
'awb_blue_gain_adj_ref8' => { },
'awb_lo_vfe_max_y' => { },
'awb_lo_vfe_min_y' => { },
'awb_lo_vfe_m1' => { },
'awb_lo_vfe_m2' => { },
'awb_lo_vfe_m3' => { },
'awb_lo_vfe_m4' => { },
'awb_lo_vfe_c1' => { },
'awb_lo_vfe_c2' => { },
'awb_lo_vfe_c3' => { },
'awb_lo_vfe_c4' => { },
'awb_norm_vfe_max_y' => { },
'awb_norm_vfe_min_y' => { },
'awb_norm_vfe_m1' => { },
'awb_norm_vfe_m2' => { },
'awb_norm_vfe_m3' => { },
'awb_norm_vfe_m4' => { },
'awb_norm_vfe_c1' => { },
'awb_norm_vfe_c2' => { },
'awb_norm_vfe_c3' => { },
'awb_norm_vfe_c4' => { },
'awb_oudor_vfe_max_y' => { },
'awb_oudor_vfe_min_y' => { },
'awb_oudor_vfe_m1' => { },
'awb_oudor_vfe_m2' => { },
'awb_oudor_vfe_m3' => { },
'awb_oudor_vfe_m4' => { },
'awb_oudor_vfe_c1' => { },
'awb_oudor_vfe_c2' => { },
'awb_oudor_vfe_c3' => { },
'awb_oudor_vfe_c4' => { },
'awb_cc_bias' => { },
'awb_min_r_gain' => { },
'awb_min_g_gain' => { },
'awb_min_b_gain' => { },
'awb_max_r_gain' => { },
'awb_max_g_gain' => { },
'awb_max_b_gain' => { },
'awb_outdoor_sample_influence' => { },
'awb_indoor_sample_influence' => { },
'awb_low_lig_col_cor_ena' => { },
'awb_agw_grid_dist_2_thresh' => { },
'awb_ave_rg_ratio' => { },
'awb_ave_bg_ratio' => { },
'awb_compact_cluster_R2' => { },
'outlier_distance' => { },
'awb_green_offset_rg' => { },
'awb_green_offset_bg' => { },
'awb_prev_wb_rgain' => { },
'awb_prev_wb_ggain' => { },
'awb_prev_wb_bgain' => { },
'awb_snapshot_r_gain' => { },
'awb_snapshot_b_gain' => { },
'rolloff_enable' => { },
'r2_tl84_cx' => { },
'r2_tl84_cy' => { },
'r2_tl84_width' => { },
'r2_tl84_height' => { },
'r2_tl84_intervals' => { },
'r2_tl84_tbl[0]' => { },
'r2_tl84_tbl[1]' => { },
'r2_tl84_tbl[2]' => { },
'r2_tl84_tbl[3]' => { },
'r2_tl84_tbl[4]' => { },
'r2_tl84_tbl[5]' => { },
'r2_tl84_tbl[6]' => { },
'r2_tl84_tbl[7]' => { },
'r2_tl84_tbl[8]' => { },
'r2_tl84_tbl[9]' => { },
'r2_tl84_tbl[10]' => { },
'r2_tl84_tbl[11]' => { },
'r2_tl84_tbl[12]' => { },
'r2_tl84_tbl[13]' => { },
'r2_tl84_tbl[14]' => { },
'r2_tl84_tbl[15]' => { },
'r2_tl84_tbl[16]' => { },
'r2_tl84_tbl[17]' => { },
'r2_tl84_tbl[18]' => { },
'r2_tl84_tbl[19]' => { },
'r2_tl84_tbl[20]' => { },
'r2_tl84_tbl[21]' => { },
'r2_tl84_tbl[22]' => { },
'r2_tl84_tbl[23]' => { },
'r2_tl84_tbl[24]' => { },
'r2_tl84_tbl[25]' => { },
'r2_tl84_tbl[26]' => { },
'r2_tl84_tbl[27]' => { },
'r2_tl84_tbl[28]' => { },
'r2_tl84_tbl[29]' => { },
'r2_tl84_tbl[30]' => { },
'r2_tl84_tbl[31]' => { },
'r2_tl84_red_ctbl[0]' => { },
'r2_tl84_red_ctbl[1]' => { },
'r2_tl84_red_ctbl[2]' => { },
'r2_tl84_red_ctbl[3]' => { },
'r2_tl84_red_ctbl[4]' => { },
'r2_tl84_red_ctbl[5]' => { },
'r2_tl84_red_ctbl[6]' => { },
'r2_tl84_red_ctbl[7]' => { },
'r2_tl84_red_ctbl[8]' => { },
'r2_tl84_red_ctbl[9]' => { },
'r2_tl84_red_ctbl[10]' => { },
'r2_tl84_red_ctbl[11]' => { },
'r2_tl84_red_ctbl[12]' => { },
'r2_tl84_red_ctbl[13]' => { },
'r2_tl84_red_ctbl[14]' => { },
'r2_tl84_red_ctbl[15]' => { },
'r2_tl84_red_ctbl[16]' => { },
'r2_tl84_red_ctbl[17]' => { },
'r2_tl84_red_ctbl[18]' => { },
'r2_tl84_red_ctbl[19]' => { },
'r2_tl84_red_ctbl[20]' => { },
'r2_tl84_red_ctbl[21]' => { },
'r2_tl84_red_ctbl[22]' => { },
'r2_tl84_red_ctbl[23]' => { },
'r2_tl84_red_ctbl[24]' => { },
'r2_tl84_red_ctbl[25]' => { },
'r2_tl84_red_ctbl[26]' => { },
'r2_tl84_red_ctbl[27]' => { },
'r2_tl84_red_ctbl[28]' => { },
'r2_tl84_red_ctbl[29]' => { },
'r2_tl84_red_ctbl[30]' => { },
'r2_tl84_red_ctbl[31]' => { },
'r2_tl84_green_ctbl[0]' => { },
'r2_tl84_green_ctbl[1]' => { },
'r2_tl84_green_ctbl[2]' => { },
'r2_tl84_green_ctbl[3]' => { },
'r2_tl84_green_ctbl[4]' => { },
'r2_tl84_green_ctbl[5]' => { },
'r2_tl84_green_ctbl[6]' => { },
'r2_tl84_green_ctbl[7]' => { },
'r2_tl84_green_ctbl[8]' => { },
'r2_tl84_green_ctbl[9]' => { },
'r2_tl84_green_ctbl[10]' => { },
'r2_tl84_green_ctbl[11]' => { },
'r2_tl84_green_ctbl[12]' => { },
'r2_tl84_green_ctbl[13]' => { },
'r2_tl84_green_ctbl[14]' => { },
'r2_tl84_green_ctbl[15]' => { },
'r2_tl84_green_ctbl[16]' => { },
'r2_tl84_green_ctbl[17]' => { },
'r2_tl84_green_ctbl[18]' => { },
'r2_tl84_green_ctbl[19]' => { },
'r2_tl84_green_ctbl[20]' => { },
'r2_tl84_green_ctbl[21]' => { },
'r2_tl84_green_ctbl[22]' => { },
'r2_tl84_green_ctbl[23]' => { },
'r2_tl84_green_ctbl[24]' => { },
'r2_tl84_green_ctbl[25]' => { },
'r2_tl84_green_ctbl[26]' => { },
'r2_tl84_green_ctbl[27]' => { },
'r2_tl84_green_ctbl[28]' => { },
'r2_tl84_green_ctbl[29]' => { },
'r2_tl84_green_ctbl[30]' => { },
'r2_tl84_green_ctbl[31]' => { },
'r2_tl84_blue_ctbl[0]' => { },
'r2_tl84_blue_ctbl[1]' => { },
'r2_tl84_blue_ctbl[2]' => { },
'r2_tl84_blue_ctbl[3]' => { },
'r2_tl84_blue_ctbl[4]' => { },
'r2_tl84_blue_ctbl[5]' => { },
'r2_tl84_blue_ctbl[6]' => { },
'r2_tl84_blue_ctbl[7]' => { },
'r2_tl84_blue_ctbl[8]' => { },
'r2_tl84_blue_ctbl[9]' => { },
'r2_tl84_blue_ctbl[10]' => { },
'r2_tl84_blue_ctbl[11]' => { },
'r2_tl84_blue_ctbl[12]' => { },
'r2_tl84_blue_ctbl[13]' => { },
'r2_tl84_blue_ctbl[14]' => { },
'r2_tl84_blue_ctbl[15]' => { },
'r2_tl84_blue_ctbl[16]' => { },
'r2_tl84_blue_ctbl[17]' => { },
'r2_tl84_blue_ctbl[18]' => { },
'r2_tl84_blue_ctbl[19]' => { },
'r2_tl84_blue_ctbl[20]' => { },
'r2_tl84_blue_ctbl[21]' => { },
'r2_tl84_blue_ctbl[22]' => { },
'r2_tl84_blue_ctbl[23]' => { },
'r2_tl84_blue_ctbl[24]' => { },
'r2_tl84_blue_ctbl[25]' => { },
'r2_tl84_blue_ctbl[26]' => { },
'r2_tl84_blue_ctbl[27]' => { },
'r2_tl84_blue_ctbl[28]' => { },
'r2_tl84_blue_ctbl[29]' => { },
'r2_tl84_blue_ctbl[30]' => { },
'r2_tl84_blue_ctbl[31]' => { },
'r2_tl84_red_stbl[0]' => { },
'r2_tl84_red_stbl[1]' => { },
'r2_tl84_red_stbl[2]' => { },
'r2_tl84_red_stbl[3]' => { },
'r2_tl84_red_stbl[4]' => { },
'r2_tl84_red_stbl[5]' => { },
'r2_tl84_red_stbl[6]' => { },
'r2_tl84_red_stbl[7]' => { },
'r2_tl84_red_stbl[8]' => { },
'r2_tl84_red_stbl[9]' => { },
'r2_tl84_red_stbl[10]' => { },
'r2_tl84_red_stbl[11]' => { },
'r2_tl84_red_stbl[12]' => { },
'r2_tl84_red_stbl[13]' => { },
'r2_tl84_red_stbl[14]' => { },
'r2_tl84_red_stbl[15]' => { },
'r2_tl84_red_stbl[16]' => { },
'r2_tl84_red_stbl[17]' => { },
'r2_tl84_red_stbl[18]' => { },
'r2_tl84_red_stbl[19]' => { },
'r2_tl84_red_stbl[20]' => { },
'r2_tl84_red_stbl[21]' => { },
'r2_tl84_red_stbl[22]' => { },
'r2_tl84_red_stbl[23]' => { },
'r2_tl84_red_stbl[24]' => { },
'r2_tl84_red_stbl[25]' => { },
'r2_tl84_red_stbl[26]' => { },
'r2_tl84_red_stbl[27]' => { },
'r2_tl84_red_stbl[28]' => { },
'r2_tl84_red_stbl[29]' => { },
'r2_tl84_red_stbl[30]' => { },
'r2_tl84_red_stbl[31]' => { },
'r2_tl84_blue_stbl[0]' => { },
'r2_tl84_blue_stbl[1]' => { },
'r2_tl84_blue_stbl[2]' => { },
'r2_tl84_blue_stbl[3]' => { },
'r2_tl84_blue_stbl[4]' => { },
'r2_tl84_blue_stbl[5]' => { },
'r2_tl84_blue_stbl[6]' => { },
'r2_tl84_blue_stbl[7]' => { },
'r2_tl84_blue_stbl[8]' => { },
'r2_tl84_blue_stbl[9]' => { },
'r2_tl84_blue_stbl[10]' => { },
'r2_tl84_blue_stbl[11]' => { },
'r2_tl84_blue_stbl[12]' => { },
'r2_tl84_blue_stbl[13]' => { },
'r2_tl84_blue_stbl[14]' => { },
'r2_tl84_blue_stbl[15]' => { },
'r2_tl84_blue_stbl[16]' => { },
'r2_tl84_blue_stbl[17]' => { },
'r2_tl84_blue_stbl[18]' => { },
'r2_tl84_blue_stbl[19]' => { },
'r2_tl84_blue_stbl[20]' => { },
'r2_tl84_blue_stbl[21]' => { },
'r2_tl84_blue_stbl[22]' => { },
'r2_tl84_blue_stbl[23]' => { },
'r2_tl84_blue_stbl[24]' => { },
'r2_tl84_blue_stbl[25]' => { },
'r2_tl84_blue_stbl[26]' => { },
'r2_tl84_blue_stbl[27]' => { },
'r2_tl84_blue_stbl[28]' => { },
'r2_tl84_blue_stbl[29]' => { },
'r2_tl84_blue_stbl[30]' => { },
'r2_tl84_blue_stbl[31]' => { },
'r2_tl84_green_stbl[0]' => { },
'r2_tl84_green_stbl[1]' => { },
'r2_tl84_green_stbl[2]' => { },
'r2_tl84_green_stbl[3]' => { },
'r2_tl84_green_stbl[4]' => { },
'r2_tl84_green_stbl[5]' => { },
'r2_tl84_green_stbl[6]' => { },
'r2_tl84_green_stbl[7]' => { },
'r2_tl84_green_stbl[8]' => { },
'r2_tl84_green_stbl[9]' => { },
'r2_tl84_green_stbl[10]' => { },
'r2_tl84_green_stbl[11]' => { },
'r2_tl84_green_stbl[12]' => { },
'r2_tl84_green_stbl[13]' => { },
'r2_tl84_green_stbl[14]' => { },
'r2_tl84_green_stbl[15]' => { },
'r2_tl84_green_stbl[16]' => { },
'r2_tl84_green_stbl[17]' => { },
'r2_tl84_green_stbl[18]' => { },
'r2_tl84_green_stbl[19]' => { },
'r2_tl84_green_stbl[20]' => { },
'r2_tl84_green_stbl[21]' => { },
'r2_tl84_green_stbl[22]' => { },
'r2_tl84_green_stbl[23]' => { },
'r2_tl84_green_stbl[24]' => { },
'r2_tl84_green_stbl[25]' => { },
'r2_tl84_green_stbl[26]' => { },
'r2_tl84_green_stbl[27]' => { },
'r2_tl84_green_stbl[28]' => { },
'r2_tl84_green_stbl[29]' => { },
'r2_tl84_green_stbl[30]' => { },
'r2_tl84_green_stbl[31]' => { },
'r2_d65_cx' => { },
'r2_d65_cy' => { },
'r2_d65_width' => { },
'r2_d65_height' => { },
'r2_d65_intervals' => { },
'r2_d65_tbl[0]' => { },
'r2_d65_tbl[1]' => { },
'r2_d65_tbl[2]' => { },
'r2_d65_tbl[3]' => { },
'r2_d65_tbl[4]' => { },
'r2_d65_tbl[5]' => { },
'r2_d65_tbl[6]' => { },
'r2_d65_tbl[7]' => { },
'r2_d65_tbl[8]' => { },
'r2_d65_tbl[9]' => { },
'r2_d65_tbl[10]' => { },
'r2_d65_tbl[11]' => { },
'r2_d65_tbl[12]' => { },
'r2_d65_tbl[13]' => { },
'r2_d65_tbl[14]' => { },
'r2_d65_tbl[15]' => { },
'r2_d65_tbl[16]' => { },
'r2_d65_tbl[17]' => { },
'r2_d65_tbl[18]' => { },
'r2_d65_tbl[19]' => { },
'r2_d65_tbl[20]' => { },
'r2_d65_tbl[21]' => { },
'r2_d65_tbl[22]' => { },
'r2_d65_tbl[23]' => { },
'r2_d65_tbl[24]' => { },
'r2_d65_tbl[25]' => { },
'r2_d65_tbl[26]' => { },
'r2_d65_tbl[27]' => { },
'r2_d65_tbl[28]' => { },
'r2_d65_tbl[29]' => { },
'r2_d65_tbl[30]' => { },
'r2_d65_tbl[31]' => { },
'r2_d65_red_ctbl[0]' => { },
'r2_d65_red_ctbl[1]' => { },
'r2_d65_red_ctbl[2]' => { },
'r2_d65_red_ctbl[3]' => { },
'r2_d65_red_ctbl[4]' => { },
'r2_d65_red_ctbl[5]' => { },
'r2_d65_red_ctbl[6]' => { },
'r2_d65_red_ctbl[7]' => { },
'r2_d65_red_ctbl[8]' => { },
'r2_d65_red_ctbl[9]' => { },
'r2_d65_red_ctbl[10]' => { },
'r2_d65_red_ctbl[11]' => { },
'r2_d65_red_ctbl[12]' => { },
'r2_d65_red_ctbl[13]' => { },
'r2_d65_red_ctbl[14]' => { },
'r2_d65_red_ctbl[15]' => { },
'r2_d65_red_ctbl[16]' => { },
'r2_d65_red_ctbl[17]' => { },
'r2_d65_red_ctbl[18]' => { },
'r2_d65_red_ctbl[19]' => { },
'r2_d65_red_ctbl[20]' => { },
'r2_d65_red_ctbl[21]' => { },
'r2_d65_red_ctbl[22]' => { },
'r2_d65_red_ctbl[23]' => { },
'r2_d65_red_ctbl[24]' => { },
'r2_d65_red_ctbl[25]' => { },
'r2_d65_red_ctbl[26]' => { },
'r2_d65_red_ctbl[27]' => { },
'r2_d65_red_ctbl[28]' => { },
'r2_d65_red_ctbl[29]' => { },
'r2_d65_red_ctbl[30]' => { },
'r2_d65_red_ctbl[31]' => { },
'r2_d65_green_ctbl[0]' => { },
'r2_d65_green_ctbl[1]' => { },
'r2_d65_green_ctbl[2]' => { },
'r2_d65_green_ctbl[3]' => { },
'r2_d65_green_ctbl[4]' => { },
'r2_d65_green_ctbl[5]' => { },
'r2_d65_green_ctbl[6]' => { },
'r2_d65_green_ctbl[7]' => { },
'r2_d65_green_ctbl[8]' => { },
'r2_d65_green_ctbl[9]' => { },
'r2_d65_green_ctbl[10]' => { },
'r2_d65_green_ctbl[11]' => { },
'r2_d65_green_ctbl[12]' => { },
'r2_d65_green_ctbl[13]' => { },
'r2_d65_green_ctbl[14]' => { },
'r2_d65_green_ctbl[15]' => { },
'r2_d65_green_ctbl[16]' => { },
'r2_d65_green_ctbl[17]' => { },
'r2_d65_green_ctbl[18]' => { },
'r2_d65_green_ctbl[19]' => { },
'r2_d65_green_ctbl[20]' => { },
'r2_d65_green_ctbl[21]' => { },
'r2_d65_green_ctbl[22]' => { },
'r2_d65_green_ctbl[23]' => { },
'r2_d65_green_ctbl[24]' => { },
'r2_d65_green_ctbl[25]' => { },
'r2_d65_green_ctbl[26]' => { },
'r2_d65_green_ctbl[27]' => { },
'r2_d65_green_ctbl[28]' => { },
'r2_d65_green_ctbl[29]' => { },
'r2_d65_green_ctbl[30]' => { },
'r2_d65_green_ctbl[31]' => { },
'r2_d65_blue_ctbl[0]' => { },
'r2_d65_blue_ctbl[1]' => { },
'r2_d65_blue_ctbl[2]' => { },
'r2_d65_blue_ctbl[3]' => { },
'r2_d65_blue_ctbl[4]' => { },
'r2_d65_blue_ctbl[5]' => { },
'r2_d65_blue_ctbl[6]' => { },
'r2_d65_blue_ctbl[7]' => { },
'r2_d65_blue_ctbl[8]' => { },
'r2_d65_blue_ctbl[9]' => { },
'r2_d65_blue_ctbl[10]' => { },
'r2_d65_blue_ctbl[11]' => { },
'r2_d65_blue_ctbl[12]' => { },
'r2_d65_blue_ctbl[13]' => { },
'r2_d65_blue_ctbl[14]' => { },
'r2_d65_blue_ctbl[15]' => { },
'r2_d65_blue_ctbl[16]' => { },
'r2_d65_blue_ctbl[17]' => { },
'r2_d65_blue_ctbl[18]' => { },
'r2_d65_blue_ctbl[19]' => { },
'r2_d65_blue_ctbl[20]' => { },
'r2_d65_blue_ctbl[21]' => { },
'r2_d65_blue_ctbl[22]' => { },
'r2_d65_blue_ctbl[23]' => { },
'r2_d65_blue_ctbl[24]' => { },
'r2_d65_blue_ctbl[25]' => { },
'r2_d65_blue_ctbl[26]' => { },
'r2_d65_blue_ctbl[27]' => { },
'r2_d65_blue_ctbl[28]' => { },
'r2_d65_blue_ctbl[29]' => { },
'r2_d65_blue_ctbl[30]' => { },
'r2_d65_blue_ctbl[31]' => { },
'r2_d65_red_stbl[0]' => { },
'r2_d65_red_stbl[1]' => { },
'r2_d65_red_stbl[2]' => { },
'r2_d65_red_stbl[3]' => { },
'r2_d65_red_stbl[4]' => { },
'r2_d65_red_stbl[5]' => { },
'r2_d65_red_stbl[6]' => { },
'r2_d65_red_stbl[7]' => { },
'r2_d65_red_stbl[8]' => { },
'r2_d65_red_stbl[9]' => { },
'r2_d65_red_stbl[10]' => { },
'r2_d65_red_stbl[11]' => { },
'r2_d65_red_stbl[12]' => { },
'r2_d65_red_stbl[13]' => { },
'r2_d65_red_stbl[14]' => { },
'r2_d65_red_stbl[15]' => { },
'r2_d65_red_stbl[16]' => { },
'r2_d65_red_stbl[17]' => { },
'r2_d65_red_stbl[18]' => { },
'r2_d65_red_stbl[19]' => { },
'r2_d65_red_stbl[20]' => { },
'r2_d65_red_stbl[21]' => { },
'r2_d65_red_stbl[22]' => { },
'r2_d65_red_stbl[23]' => { },
'r2_d65_red_stbl[24]' => { },
'r2_d65_red_stbl[25]' => { },
'r2_d65_red_stbl[26]' => { },
'r2_d65_red_stbl[27]' => { },
'r2_d65_red_stbl[28]' => { },
'r2_d65_red_stbl[29]' => { },
'r2_d65_red_stbl[30]' => { },
'r2_d65_red_stbl[31]' => { },
'r2_d65_blue_stbl[0]' => { },
'r2_d65_blue_stbl[1]' => { },
'r2_d65_blue_stbl[2]' => { },
'r2_d65_blue_stbl[3]' => { },
'r2_d65_blue_stbl[4]' => { },
'r2_d65_blue_stbl[5]' => { },
'r2_d65_blue_stbl[6]' => { },
'r2_d65_blue_stbl[7]' => { },
'r2_d65_blue_stbl[8]' => { },
'r2_d65_blue_stbl[9]' => { },
'r2_d65_blue_stbl[10]' => { },
'r2_d65_blue_stbl[11]' => { },
'r2_d65_blue_stbl[12]' => { },
'r2_d65_blue_stbl[13]' => { },
'r2_d65_blue_stbl[14]' => { },
'r2_d65_blue_stbl[15]' => { },
'r2_d65_blue_stbl[16]' => { },
'r2_d65_blue_stbl[17]' => { },
'r2_d65_blue_stbl[18]' => { },
'r2_d65_blue_stbl[19]' => { },
'r2_d65_blue_stbl[20]' => { },
'r2_d65_blue_stbl[21]' => { },
'r2_d65_blue_stbl[22]' => { },
'r2_d65_blue_stbl[23]' => { },
'r2_d65_blue_stbl[24]' => { },
'r2_d65_blue_stbl[25]' => { },
'r2_d65_blue_stbl[26]' => { },
'r2_d65_blue_stbl[27]' => { },
'r2_d65_blue_stbl[28]' => { },
'r2_d65_blue_stbl[29]' => { },
'r2_d65_blue_stbl[30]' => { },
'r2_d65_blue_stbl[31]' => { },
'r2_d65_green_stbl[0]' => { },
'r2_d65_green_stbl[1]' => { },
'r2_d65_green_stbl[2]' => { },
'r2_d65_green_stbl[3]' => { },
'r2_d65_green_stbl[4]' => { },
'r2_d65_green_stbl[5]' => { },
'r2_d65_green_stbl[6]' => { },
'r2_d65_green_stbl[7]' => { },
'r2_d65_green_stbl[8]' => { },
'r2_d65_green_stbl[9]' => { },
'r2_d65_green_stbl[10]' => { },
'r2_d65_green_stbl[11]' => { },
'r2_d65_green_stbl[12]' => { },
'r2_d65_green_stbl[13]' => { },
'r2_d65_green_stbl[14]' => { },
'r2_d65_green_stbl[15]' => { },
'r2_d65_green_stbl[16]' => { },
'r2_d65_green_stbl[17]' => { },
'r2_d65_green_stbl[18]' => { },
'r2_d65_green_stbl[19]' => { },
'r2_d65_green_stbl[20]' => { },
'r2_d65_green_stbl[21]' => { },
'r2_d65_green_stbl[22]' => { },
'r2_d65_green_stbl[23]' => { },
'r2_d65_green_stbl[24]' => { },
'r2_d65_green_stbl[25]' => { },
'r2_d65_green_stbl[26]' => { },
'r2_d65_green_stbl[27]' => { },
'r2_d65_green_stbl[28]' => { },
'r2_d65_green_stbl[29]' => { },
'r2_d65_green_stbl[30]' => { },
'r2_d65_green_stbl[31]' => { },
'r2_a_cx' => { },
'r2_a_cy' => { },
'r2_a_width' => { },
'r2_a_height' => { },
'r2_a_intervals' => { },
'r2_a_tbl[0]' => { },
'r2_a_tbl[1]' => { },
'r2_a_tbl[2]' => { },
'r2_a_tbl[3]' => { },
'r2_a_tbl[4]' => { },
'r2_a_tbl[5]' => { },
'r2_a_tbl[6]' => { },
'r2_a_tbl[7]' => { },
'r2_a_tbl[8]' => { },
'r2_a_tbl[9]' => { },
'r2_a_tbl[10]' => { },
'r2_a_tbl[11]' => { },
'r2_a_tbl[12]' => { },
'r2_a_tbl[13]' => { },
'r2_a_tbl[14]' => { },
'r2_a_tbl[15]' => { },
'r2_a_tbl[16]' => { },
'r2_a_tbl[17]' => { },
'r2_a_tbl[18]' => { },
'r2_a_tbl[19]' => { },
'r2_a_tbl[20]' => { },
'r2_a_tbl[21]' => { },
'r2_a_tbl[22]' => { },
'r2_a_tbl[23]' => { },
'r2_a_tbl[24]' => { },
'r2_a_tbl[25]' => { },
'r2_a_tbl[26]' => { },
'r2_a_tbl[27]' => { },
'r2_a_tbl[28]' => { },
'r2_a_tbl[29]' => { },
'r2_a_tbl[30]' => { },
'r2_a_tbl[31]' => { },
'r2_a_red_ctbl[0]' => { },
'r2_a_red_ctbl[1]' => { },
'r2_a_red_ctbl[2]' => { },
'r2_a_red_ctbl[3]' => { },
'r2_a_red_ctbl[4]' => { },
'r2_a_red_ctbl[5]' => { },
'r2_a_red_ctbl[6]' => { },
'r2_a_red_ctbl[7]' => { },
'r2_a_red_ctbl[8]' => { },
'r2_a_red_ctbl[9]' => { },
'r2_a_red_ctbl[10]' => { },
'r2_a_red_ctbl[11]' => { },
'r2_a_red_ctbl[12]' => { },
'r2_a_red_ctbl[13]' => { },
'r2_a_red_ctbl[14]' => { },
'r2_a_red_ctbl[15]' => { },
'r2_a_red_ctbl[16]' => { },
'r2_a_red_ctbl[17]' => { },
'r2_a_red_ctbl[18]' => { },
'r2_a_red_ctbl[19]' => { },
'r2_a_red_ctbl[20]' => { },
'r2_a_red_ctbl[21]' => { },
'r2_a_red_ctbl[22]' => { },
'r2_a_red_ctbl[23]' => { },
'r2_a_red_ctbl[24]' => { },
'r2_a_red_ctbl[25]' => { },
'r2_a_red_ctbl[26]' => { },
'r2_a_red_ctbl[27]' => { },
'r2_a_red_ctbl[28]' => { },
'r2_a_red_ctbl[29]' => { },
'r2_a_red_ctbl[30]' => { },
'r2_a_red_ctbl[31]' => { },
'r2_a_green_ctbl[0]' => { },
'r2_a_green_ctbl[1]' => { },
'r2_a_green_ctbl[2]' => { },
'r2_a_green_ctbl[3]' => { },
'r2_a_green_ctbl[4]' => { },
'r2_a_green_ctbl[5]' => { },
'r2_a_green_ctbl[6]' => { },
'r2_a_green_ctbl[7]' => { },
'r2_a_green_ctbl[8]' => { },
'r2_a_green_ctbl[9]' => { },
'r2_a_green_ctbl[10]' => { },
'r2_a_green_ctbl[11]' => { },
'r2_a_green_ctbl[12]' => { },
'r2_a_green_ctbl[13]' => { },
'r2_a_green_ctbl[14]' => { },
'r2_a_green_ctbl[15]' => { },
'r2_a_green_ctbl[16]' => { },
'r2_a_green_ctbl[17]' => { },
'r2_a_green_ctbl[18]' => { },
'r2_a_green_ctbl[19]' => { },
'r2_a_green_ctbl[20]' => { },
'r2_a_green_ctbl[21]' => { },
'r2_a_green_ctbl[22]' => { },
'r2_a_green_ctbl[23]' => { },
'r2_a_green_ctbl[24]' => { },
'r2_a_green_ctbl[25]' => { },
'r2_a_green_ctbl[26]' => { },
'r2_a_green_ctbl[27]' => { },
'r2_a_green_ctbl[28]' => { },
'r2_a_green_ctbl[29]' => { },
'r2_a_green_ctbl[30]' => { },
'r2_a_green_ctbl[31]' => { },
'r2_a_blue_ctbl[0]' => { },
'r2_a_blue_ctbl[1]' => { },
'r2_a_blue_ctbl[2]' => { },
'r2_a_blue_ctbl[3]' => { },
'r2_a_blue_ctbl[4]' => { },
'r2_a_blue_ctbl[5]' => { },
'r2_a_blue_ctbl[6]' => { },
'r2_a_blue_ctbl[7]' => { },
'r2_a_blue_ctbl[8]' => { },
'r2_a_blue_ctbl[9]' => { },
'r2_a_blue_ctbl[10]' => { },
'r2_a_blue_ctbl[11]' => { },
'r2_a_blue_ctbl[12]' => { },
'r2_a_blue_ctbl[13]' => { },
'r2_a_blue_ctbl[14]' => { },
'r2_a_blue_ctbl[15]' => { },
'r2_a_blue_ctbl[16]' => { },
'r2_a_blue_ctbl[17]' => { },
'r2_a_blue_ctbl[18]' => { },
'r2_a_blue_ctbl[19]' => { },
'r2_a_blue_ctbl[20]' => { },
'r2_a_blue_ctbl[21]' => { },
'r2_a_blue_ctbl[22]' => { },
'r2_a_blue_ctbl[23]' => { },
'r2_a_blue_ctbl[24]' => { },
'r2_a_blue_ctbl[25]' => { },
'r2_a_blue_ctbl[26]' => { },
'r2_a_blue_ctbl[27]' => { },
'r2_a_blue_ctbl[28]' => { },
'r2_a_blue_ctbl[29]' => { },
'r2_a_blue_ctbl[30]' => { },
'r2_a_blue_ctbl[31]' => { },
'r2_a_red_stbl[0]' => { },
'r2_a_red_stbl[1]' => { },
'r2_a_red_stbl[2]' => { },
'r2_a_red_stbl[3]' => { },
'r2_a_red_stbl[4]' => { },
'r2_a_red_stbl[5]' => { },
'r2_a_red_stbl[6]' => { },
'r2_a_red_stbl[7]' => { },
'r2_a_red_stbl[8]' => { },
'r2_a_red_stbl[9]' => { },
'r2_a_red_stbl[10]' => { },
'r2_a_red_stbl[11]' => { },
'r2_a_red_stbl[12]' => { },
'r2_a_red_stbl[13]' => { },
'r2_a_red_stbl[14]' => { },
'r2_a_red_stbl[15]' => { },
'r2_a_red_stbl[16]' => { },
'r2_a_red_stbl[17]' => { },
'r2_a_red_stbl[18]' => { },
'r2_a_red_stbl[19]' => { },
'r2_a_red_stbl[20]' => { },
'r2_a_red_stbl[21]' => { },
'r2_a_red_stbl[22]' => { },
'r2_a_red_stbl[23]' => { },
'r2_a_red_stbl[24]' => { },
'r2_a_red_stbl[25]' => { },
'r2_a_red_stbl[26]' => { },
'r2_a_red_stbl[27]' => { },
'r2_a_red_stbl[28]' => { },
'r2_a_red_stbl[29]' => { },
'r2_a_red_stbl[30]' => { },
'r2_a_red_stbl[31]' => { },
'r2_a_blue_stbl[0]' => { },
'r2_a_blue_stbl[1]' => { },
'r2_a_blue_stbl[2]' => { },
'r2_a_blue_stbl[3]' => { },
'r2_a_blue_stbl[4]' => { },
'r2_a_blue_stbl[5]' => { },
'r2_a_blue_stbl[6]' => { },
'r2_a_blue_stbl[7]' => { },
'r2_a_blue_stbl[8]' => { },
'r2_a_blue_stbl[9]' => { },
'r2_a_blue_stbl[10]' => { },
'r2_a_blue_stbl[11]' => { },
'r2_a_blue_stbl[12]' => { },
'r2_a_blue_stbl[13]' => { },
'r2_a_blue_stbl[14]' => { },
'r2_a_blue_stbl[15]' => { },
'r2_a_blue_stbl[16]' => { },
'r2_a_blue_stbl[17]' => { },
'r2_a_blue_stbl[18]' => { },
'r2_a_blue_stbl[19]' => { },
'r2_a_blue_stbl[20]' => { },
'r2_a_blue_stbl[21]' => { },
'r2_a_blue_stbl[22]' => { },
'r2_a_blue_stbl[23]' => { },
'r2_a_blue_stbl[24]' => { },
'r2_a_blue_stbl[25]' => { },
'r2_a_blue_stbl[26]' => { },
'r2_a_blue_stbl[27]' => { },
'r2_a_blue_stbl[28]' => { },
'r2_a_blue_stbl[29]' => { },
'r2_a_blue_stbl[30]' => { },
'r2_a_blue_stbl[31]' => { },
'r2_a_green_stbl[0]' => { },
'r2_a_green_stbl[1]' => { },
'r2_a_green_stbl[2]' => { },
'r2_a_green_stbl[3]' => { },
'r2_a_green_stbl[4]' => { },
'r2_a_green_stbl[5]' => { },
'r2_a_green_stbl[6]' => { },
'r2_a_green_stbl[7]' => { },
'r2_a_green_stbl[8]' => { },
'r2_a_green_stbl[9]' => { },
'r2_a_green_stbl[10]' => { },
'r2_a_green_stbl[11]' => { },
'r2_a_green_stbl[12]' => { },
'r2_a_green_stbl[13]' => { },
'r2_a_green_stbl[14]' => { },
'r2_a_green_stbl[15]' => { },
'r2_a_green_stbl[16]' => { },
'r2_a_green_stbl[17]' => { },
'r2_a_green_stbl[18]' => { },
'r2_a_green_stbl[19]' => { },
'r2_a_green_stbl[20]' => { },
'r2_a_green_stbl[21]' => { },
'r2_a_green_stbl[22]' => { },
'r2_a_green_stbl[23]' => { },
'r2_a_green_stbl[24]' => { },
'r2_a_green_stbl[25]' => { },
'r2_a_green_stbl[26]' => { },
'r2_a_green_stbl[27]' => { },
'r2_a_green_stbl[28]' => { },
'r2_a_green_stbl[29]' => { },
'r2_a_green_stbl[30]' => { },
'r2_a_green_stbl[31]' => { },
'def_cor_c0' => { },
'def_cor_c1' => { },
'def_cor_c2' => { },
'def_cor_c3' => { },
'def_cor_c4' => { },
'def_cor_c5' => { },
'def_cor_c6' => { },
'def_cor_c7' => { },
'def_cor_c8' => { },
'def_cor_k0' => { },
'def_cor_k1' => { },
'def_cor_k2' => { },
'yhi_ylo_cor_c0' => { },
'yhi_ylo_cor_c1' => { },
'yhi_ylo_cor_c2' => { },
'yhi_ylo_cor_c3' => { },
'yhi_ylo_cor_c4' => { },
'yhi_ylo_cor_c5' => { },
'yhi_ylo_cor_c6' => { },
'yhi_ylo_cor_c7' => { },
'yhi_ylo_cor_c8' => { },
'yhi_ylo_cor_k0' => { },
'yhi_ylo_cor_k1' => { },
'yhi_ylo_cor_k2' => { },
'def_conv_chrm_a_m' => { },
'def_conv_chrm_a_p' => { },
'def_conv_chrm_b_m' => { },
'def_conv_chrm_b_p' => { },
'def_conv_chrm_c_m' => { },
'def_conv_chrm_c_p' => { },
'def_conv_chrm_d_m' => { },
'def_conv_chrm_d_p' => { },
'def_conv_chrm_k_cb' => { },
'def_conv_chrm_k_cr' => { },
'def_conv_luma_v0' => { },
'def_conv_luma_v1' => { },
'def_conv_luma_v2' => { },
'def_conv_luma_k' => { },
'tl84_conv_chrm_a_m' => { },
'tl84_conv_chrm_a_p' => { },
'tl84_conv_chrm_b_m' => { },
'tl84_conv_chrm_b_p' => { },
'tl84_conv_chrm_c_m' => { },
'tl84_conv_chrm_c_p' => { },
'tl84_conv_chrm_d_m' => { },
'tl84_conv_chrm_d_p' => { },
'tl84_conv_chrm_k_cb' => { },
'tl84_conv_chrm_k_cr' => { },
'tl84_conv_luma_v0' => { },
'tl84_conv_luma_v1' => { },
'tl84_conv_luma_v2' => { },
'tl84_conv_luma_k' => { },
'incand_conv_chrm_a_m' => { },
'incand_conv_chrm_a_p' => { },
'incand_conv_chrm_b_m' => { },
'incand_conv_chrm_b_p' => { },
'incand_conv_chrm_c_m' => { },
'incand_conv_chrm_c_p' => { },
'incand_conv_chrm_d_m' => { },
'incand_conv_chrm_d_p' => { },
'incand_conv_chrm_k_cb' => { },
'incand_conv_chrm_k_cr' => { },
'incand_conv_luma_v0' => { },
'incand_conv_luma_v1' => { },
'incand_conv_luma_v2' => { },
'incand_conv_luma_k' => { },
'daylt_conv_chrm_a_m' => { },
'daylt_conv_chrm_a_p' => { },
'daylt_conv_chrm_b_m' => { },
'daylt_conv_chrm_b_p' => { },
'daylt_conv_chrm_c_m' => { },
'daylt_conv_chrm_c_p' => { },
'daylt_conv_chrm_d_m' => { },
'daylt_conv_chrm_d_p' => { },
'daylt_conv_chrm_k_cb' => { },
'daylt_conv_chrm_k_cr' => { },
'daylt_conv_luma_v0' => { },
'daylt_conv_luma_v1' => { },
'daylt_conv_luma_v2' => { },
'daylt_conv_luma_k' => { },
'yhi_ylo_conv_chrm_a_m' => { },
'yhi_ylo_conv_chrm_a_p' => { },
'yhi_ylo_conv_chrm_b_m' => { },
'yhi_ylo_conv_chrm_b_p' => { },
'yhi_ylo_conv_chrm_c_m' => { },
'yhi_ylo_conv_chrm_c_p' => { },
'yhi_ylo_conv_chrm_d_m' => { },
'yhi_ylo_conv_chrm_d_p' => { },
'yhi_ylo_conv_chrm_k_cb' => { },
'yhi_ylo_conv_chrm_k_cr' => { },
'yhi_ylo_conv_luma_v0' => { },
'yhi_ylo_conv_luma_v1' => { },
'yhi_ylo_conv_luma_v2' => { },
'yhi_ylo_conv_luma_k' => { },
'gamma_enable' => { },
'def_luma_gamma_mode' => { },
'def_rgb_gamma_mode' => { },
'blck_lvl_even_cols' => { },
'blck_lvl_odd_cols' => { },
'defect_pix_min_thresh' => { },
'defect_pix_max_thresh' => { },
'defect_pix_cor_enable' => { },
'prview_resol' => { },
'snapshot_resol' => { },
'curr_resol' => { },
'sensor_fmt' => { },
'discard_frst_frm' => { },
'frm_skip_pttrn' => { },
'sensor_type' => { },
'max_video_fps' => { },
'video_fps' => { },
'max_prview_fps' => { },
'prview_fps' => { },
'nghtsht_fps' => { },
'sensr_ful_wdth' => { },
'sensr_ful_hght' => { },
'sensr_qtr_wdth' => { },
'sensr_qtr_hght' => { },
'nightshot_mode' => { },
'pclk_invert' => { },
'cam_mclk_hz' => { },
'chrom_supress' => { },
'chro_sup_luma_thres_1' => { },
'chro_sup_luma_thres_2' => { },
'chro_sup_luma_thres_3' => { },
'chro_sup_luma_thres_4' => { },
'chro_sup_chro_thres_1' => { },
'chro_sup_chro_thres_2' => { },
'la_detect' => { },
'la_enable' => { },
'HJR_enable' => { },
'HJR_max_num_frames' => { },
'HJR_one_to_two_offset' => { },
'HJR_n_reduction_flat' => { },
'HJR_n_reduction_texture' => { },
'HJR_texture_threshold' => { },
);
# generate tag names and descriptions
{
local $_;
my $table = \%Image::ExifTool::Qualcomm::Main;
MakeNameAndDesc($_, $$table{$_}) foreach TagTableKeys($table);
}
#------------------------------------------------------------------------------
# Generate tag Name and Description from a Qualcomm tag ID
# Inputs: 0) tag ID, 1) tagInfo ref
# Returns: true on success
sub MakeNameAndDesc($$)
{
local $_ = shift;
my $tagInfo = shift;
# capitalize various leading acronyms or just first letter...
s/^(asf|awb|aec|afr|af_|la_|r2_tl|tl)/\U$1/ or $_ = ucfirst;
s/_([a-z])/_\u$1/g; # capitalize first letter of each word
s/\[(\d+)\]$/sprintf("_%.2d",$1)/e; # use 2-digit subscripts (and remove brackets)
tr/-_a-zA-Z0-9//dc; # delete invalid characters
my $desc = $_;
# convert underlines to spaces in description
if ($desc =~ tr/_/ /) {
# remove unnecessary underlines from tag name...
s/_([A-Z][a-z])/$1/g;
s/([a-z0-9])_([A-Z])/$1$2/g;
s/([A-Za-z])_(\d)/$1$2/g;
}
return 0 unless length;
$$tagInfo{Name} = $_;
$$tagInfo{Description} = $desc;
return 1;
}
#------------------------------------------------------------------------------
# Process Qualcomm APP7 metadata (ref PH)
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success
sub ProcessQualcomm($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $dataPos = $$dirInfo{DataPos};
my $pos = $$dirInfo{DirStart};
my $dirEnd = $pos + $$dirInfo{DirLen};
$et->VerboseDir('Qualcomm', undef, $$dirInfo{DirLen});
SetByteOrder('II');
while ($pos + 3 < $dirEnd) {
my $valLen = Get16u($dataPt, $pos);
my $tagLen = Get8u($dataPt, $pos + 2);
last if $pos + 8 + $tagLen + $valLen > $dirEnd;
my $tag = substr($$dataPt, $pos + 3, $tagLen);
$pos += 3 + $tagLen; # point to format byte
my $fmt = Get8u($dataPt, $pos);
# (not sure what these counts are for -- both are always 1 in my samples)
#my $cnt1 = Get16u($dataPt, $pos + 1);
#my $cnt2 = Get16u($dataPt, $pos + 3);
$pos += 5; # point to start of value data
my ($val, $format);
if ($fmt <= 7) {
$format = $qualcommFormat[$fmt];
$val = ReadValue($dataPt, $pos, $format, undef, $valLen);
} else {
$format = "format $fmt";
my $value = substr($$dataPt, $pos, $valLen);
$val = \$value;
}
unless (defined $$tagTablePtr{$tag} or $Image::ExifTool::specialTags{$tag}) {
my %tagInfo;
if (MakeNameAndDesc($tag, \%tagInfo)) {
$et->VPrint(0, $$et{INDENT}, "[adding Qualcomm:$tagInfo{Name}]\n");
AddTagToTable($tagTablePtr, $tag, \%tagInfo);
}
}
$et->HandleTag($tagTablePtr, $tag, $val,
DataPt => $dataPt,
DataPos => $dataPos,
Start => $pos,
Size => $valLen,
Format => $format,
);
$pos += $valLen; # point to start of next entry
}
return 1;
}
1; # end
__END__
=head1 NAME
Image::ExifTool::Qualcomm - Read Qualcomm APP7 meta information
=head1 SYNOPSIS
This module is loaded automatically by Image::ExifTool when required.
=head1 DESCRIPTION
This module contains definitions required by Image::ExifTool to read
information from the APP7 Qualcomm segment in JPEG images.
=head1 AUTHOR
Copyright 2003-2022, Phil Harvey (philharvey66 at gmail.com)
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 SEE ALSO
L<Image::ExifTool::TagNames/Qualcomm Tags>,
L<Image::ExifTool(3pm)|Image::ExifTool>
=cut
| 33.064684 | 86 | 0.513559 |
ed76e4bcda87b289436da3a3fe0aec4d374d2e1d | 4,724 | pm | Perl | tools_hive/modules/EnsEMBL/Web/Parsers/NCBIBLAST.pm | sarahhunt/public-plugins | e5998b5b1791befdd67cf4e8f210c23cec7527d1 | [
"Apache-2.0"
]
| null | null | null | tools_hive/modules/EnsEMBL/Web/Parsers/NCBIBLAST.pm | sarahhunt/public-plugins | e5998b5b1791befdd67cf4e8f210c23cec7527d1 | [
"Apache-2.0"
]
| null | null | null | tools_hive/modules/EnsEMBL/Web/Parsers/NCBIBLAST.pm | sarahhunt/public-plugins | e5998b5b1791befdd67cf4e8f210c23cec7527d1 | [
"Apache-2.0"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::Parsers::NCBIBLAST;
use strict;
use warnings;
use Bio::EnsEMBL::DBSQL::DBAdaptor;
use EnsEMBL::Web::Utils::FileHandler qw(file_get_contents);
use parent qw(EnsEMBL::Web::Parsers);
sub new {
my $self = shift->SUPER::new(@_);
$self->{'dba'} = Bio::EnsEMBL::DBSQL::DBAdaptor->new(%{$self->runnable->param('dba')});
return $self;
}
sub disconnect_dbc {
my $self = shift;
$self->{'dba'}->dbc->disconnect_if_idle;
}
sub get_adapter {
my ($self, $feature_type) = @_;
return $self->{"_adaptor_$feature_type"} ||= $self->{'dba'}->get_adaptor($feature_type);
}
sub parse {
my ($self, $file) = @_;
my $runnable = $self->runnable;
my $species = $runnable->param('species');
my $source_type = $runnable->param('source');
my $max_hits = $runnable->param('__max_hits');
my @results = file_get_contents($file, sub {
chomp;
my @hit_data = split /\t/, $_;
my $q_ori = $hit_data[1] < $hit_data[2] ? 1 : -1;
my $t_ori = $hit_data[4] < $hit_data[5] ? 1 : -1;
my $tstart = $hit_data[4] < $hit_data[5] ? $hit_data[4] : $hit_data[5];
my $tend = $hit_data[4] < $hit_data[5] ? $hit_data[5] : $hit_data[4];
return {
qid => $hit_data[0],
qstart => $hit_data[1],
qend => $hit_data[2],
qori => $q_ori,
qframe => $hit_data[11],
tid => $hit_data[3] =~ s/\..+//r, # without any version info
v_tid => $hit_data[3], # possibly versioned
tstart => $tstart,
tend => $tend,
tori => $t_ori,
tframe => $hit_data[12],
score => $hit_data[6],
evalue => $hit_data[7],
pident => $hit_data[8],
len => $hit_data[9],
aln => $hit_data[10],
};
});
@results = sort { $a->{'evalue'} <=> $b->{'evalue'} } @results;
@results = splice @results, 0, $max_hits if $max_hits && @results > $max_hits; # only keep the requested number of hits
@results = map $self->map_to_genome($_, $species, $source_type), @results;
$self->disconnect_dbc;
return \@results;
}
sub map_to_genome {
my ($self, $hit, $species, $source_type) = @_;
my ($g_id, $g_start, $g_end, $g_ori, $g_coords, $feature_type);
if ($source_type =~/LATESTGP/) {
$g_id = $hit->{'tid'};
$g_start = $hit->{'tstart'};
$g_end = $hit->{'tend'};
$g_ori = $hit->{'tori'};
} else {
$feature_type = $source_type =~ /abinitio/i ? 'PredictionTranscript' : $source_type =~ /pep/i ? 'Translation' : 'Transcript';
my $mapper = $source_type =~ /pep/i ? 'pep2genomic' : 'cdna2genomic';
my $adaptor = $self->get_adapter($feature_type);
# if object is not found against un-versioned id, try the one which looked like versioned and retain it as tid if object is found
my $object;
$object = $adaptor->fetch_by_stable_id($_) and $hit->{'tid'} = $_ and last for $hit->{'tid'}, $hit->{'v_tid'};
if ($object) {
$object = $object->transcript if $feature_type eq 'Translation';
my @coords = sort { $a->start <=> $b->start } grep { !$_->isa('Bio::EnsEMBL::Mapper::Gap') } $object->$mapper($hit->{'tstart'}, $hit->{'tend'});
$g_id = $object->seq_region_name;
$g_start = $coords[0]->start;
$g_end = $coords[-1]->end;
$g_ori = $object->strand;
$g_coords = \@coords;
} else {
$g_id = 'Unmapped';
$g_start = 'N/A';
$g_end = 'N/A';
$g_ori = 'N/A'
}
}
delete $hit->{'v_tid'} if $feature_type && $feature_type ne 'Transcript'; # we need versioning for transcript only
$hit->{'gid'} = $g_id;
$hit->{'gstart'} = $g_start;
$hit->{'gend'} = $g_end;
$hit->{'gori'} = $g_ori;
$hit->{'species'} = $species;
$hit->{'source'} = $source_type;
$hit->{'g_coords'} = $g_coords if $g_coords;
return $hit;
}
1;
| 31.704698 | 151 | 0.578323 |
ed874d7ef741dba8fdb5ce63ee3ce43ab24e5ae8 | 7,655 | pm | Perl | lib/Pithub/Issues/Labels.pm | gitpan/Pithub | fac58631cf3fe488dbb65ddb5bfdf10dfad01e80 | [
"Artistic-1.0"
]
| null | null | null | lib/Pithub/Issues/Labels.pm | gitpan/Pithub | fac58631cf3fe488dbb65ddb5bfdf10dfad01e80 | [
"Artistic-1.0"
]
| null | null | null | lib/Pithub/Issues/Labels.pm | gitpan/Pithub | fac58631cf3fe488dbb65ddb5bfdf10dfad01e80 | [
"Artistic-1.0"
]
| null | null | null | package Pithub::Issues::Labels;
$Pithub::Issues::Labels::VERSION = '0.01028';
our $AUTHORITY = 'cpan:PLU';
# ABSTRACT: Github v3 Issue Labels API
use Moo;
use Carp qw(croak);
extends 'Pithub::Base';
sub add {
my ( $self, %args ) = @_;
croak 'Missing key in parameters: issue_id' unless $args{issue_id};
croak 'Missing key in parameters: data (arrayref)' unless ref $args{data} eq 'ARRAY';
$self->_validate_user_repo_args( \%args );
return $self->request(
method => 'POST',
path => sprintf( '/repos/%s/%s/issues/%s/labels', delete $args{user}, delete $args{repo}, delete $args{issue_id} ),
%args,
);
}
sub create {
my ( $self, %args ) = @_;
croak 'Missing key in parameters: data (hashref)' unless ref $args{data} eq 'HASH';
$self->_validate_user_repo_args( \%args );
return $self->request(
method => 'POST',
path => sprintf( '/repos/%s/%s/labels', delete $args{user}, delete $args{repo} ),
%args,
);
}
sub delete {
my ( $self, %args ) = @_;
croak 'Missing key in parameters: label' unless $args{label};
$self->_validate_user_repo_args( \%args );
return $self->request(
method => 'DELETE',
path => sprintf( '/repos/%s/%s/labels/%s', delete $args{user}, delete $args{repo}, delete $args{label} ),
%args,
);
}
sub get {
my ( $self, %args ) = @_;
croak 'Missing key in parameters: label' unless $args{label};
$self->_validate_user_repo_args( \%args );
return $self->request(
method => 'GET',
path => sprintf( '/repos/%s/%s/labels/%s', delete $args{user}, delete $args{repo}, delete $args{label} ),
%args,
);
}
sub list {
my ( $self, %args ) = @_;
$self->_validate_user_repo_args( \%args );
if ( my $milestone_id = delete $args{milestone_id} ) {
return $self->request(
method => 'GET',
path => sprintf( '/repos/%s/%s/milestones/%s/labels', delete $args{user}, delete $args{repo}, $milestone_id ),
%args,
);
}
elsif ( my $issue_id = delete $args{issue_id} ) {
return $self->request(
method => 'GET',
path => sprintf( '/repos/%s/%s/issues/%s/labels', delete $args{user}, delete $args{repo}, $issue_id ),
%args
);
}
return $self->request(
method => 'GET',
path => sprintf( '/repos/%s/%s/labels', delete $args{user}, delete $args{repo} ),
%args,
);
}
sub remove {
my ( $self, %args ) = @_;
$self->_validate_user_repo_args( \%args );
croak 'Missing key in parameters: issue_id' unless $args{issue_id};
if ( my $label = delete $args{label} ) {
return $self->request(
method => 'DELETE',
path => sprintf( '/repos/%s/%s/issues/%s/labels/%s', delete $args{user}, delete $args{repo}, delete $args{issue_id}, $label ),
%args,
);
}
return $self->request(
method => 'DELETE',
path => sprintf( '/repos/%s/%s/issues/%s/labels', delete $args{user}, delete $args{repo}, delete $args{issue_id} ),
%args,
);
}
sub replace {
my ( $self, %args ) = @_;
croak 'Missing key in parameters: issue_id' unless $args{issue_id};
croak 'Missing key in parameters: data (arrayref)' unless ref $args{data} eq 'ARRAY';
$self->_validate_user_repo_args( \%args );
return $self->request(
method => 'PUT',
path => sprintf( '/repos/%s/%s/issues/%s/labels', delete $args{user}, delete $args{repo}, delete $args{issue_id} ),
%args,
);
}
sub update {
my ( $self, %args ) = @_;
croak 'Missing key in parameters: label' unless $args{label};
croak 'Missing key in parameters: data (hashref)' unless ref $args{data} eq 'HASH';
$self->_validate_user_repo_args( \%args );
return $self->request(
method => 'PATCH',
path => sprintf( '/repos/%s/%s/labels/%s', delete $args{user}, delete $args{repo}, delete $args{label} ),
%args,
);
}
1;
__END__
=pod
=encoding UTF-8
=head1 NAME
Pithub::Issues::Labels - Github v3 Issue Labels API
=head1 VERSION
version 0.01028
=head1 METHODS
=head2 add
=over
=item *
Add labels to an issue
POST /repos/:user/:repo/issues/:id/labels
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->add(
repo => 'Pithub',
user => 'plu',
issue_id => 1,
data => ['Label1', 'Label2'],
);
=back
=head2 create
=over
=item *
Create a label
POST /repos/:user/:repo/labels
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->create(
repo => 'Pithub',
user => 'plu',
data => {
color => 'FFFFFF',
name => 'some label',
}
);
=back
=head2 delete
=over
=item *
Delete a label
DELETE /repos/:user/:repo/labels/:id
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->delete(
repo => 'Pithub',
user => 'plu',
label => 1,
);
=back
=head2 get
=over
=item *
Get a single label
GET /repos/:user/:repo/labels/:id
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->get(
repo => 'Pithub',
user => 'plu',
label => 1,
);
=back
=head2 list
=over
=item *
List all labels for this repository
GET /repos/:user/:repo/labels
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->list(
repo => 'Pithub',
user => 'plu'
);
=item *
List labels on an issue
GET /repos/:user/:repo/issues/:id/labels
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->list(
repo => 'Pithub',
user => 'plu',
issue_id => 1,
);
=item *
Get labels for every issue in a milestone
GET /repos/:user/:repo/milestones/:id/labels
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->get(
repo => 'Pithub',
user => 'plu',
milestone_id => 1
);
=back
=head2 remove
=over
=item *
Remove a label from an issue
DELETE /repos/:user/:repo/issues/:id/labels/:id
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->delete(
repo => 'Pithub',
user => 'plu',
issue_id => 1,
label => 1,
);
=item *
Remove all labels from an issue
DELETE /repos/:user/:repo/issues/:id/labels
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->delete(
repo => 'Pithub',
user => 'plu',
issue_id => 1,
);
=back
=head2 replace
=over
=item *
Replace all labels for an issue
PUT /repos/:user/:repo/issues/:id/labels
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->replace(
repo => 'Pithub',
user => 'plu',
issue_id => 1,
data => [qw(label3 label4)],
);
=back
=head2 update
=over
=item *
Update a label
PATCH /repos/:user/:repo/labels/:id
Examples:
my $l = Pithub::Issues::Labels->new;
my $result = $l->update(
repo => 'Pithub',
user => 'plu',
label => 1,
data => {
color => 'FFFFFF',
name => 'API',
}
);
=back
=head1 AUTHOR
Johannes Plunien <plu@cpan.org>
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 2011 by Johannes Plunien.
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
=cut
| 20.039267 | 140 | 0.550751 |
73e501337adffa8ebf5ebdb0057bde37d4fd39ff | 560 | t | Perl | t/00-load.t | franckporcher/perl5-Franckys-Trace | db3740470a5d3f6f8b481f8b3278a1c82a1d9f41 | [
"Artistic-2.0",
"Unlicense"
]
| 1 | 2015-04-13T07:25:18.000Z | 2015-04-13T07:25:18.000Z | t/00-load.t | franckporcher/perl5-Franckys-Trace | db3740470a5d3f6f8b481f8b3278a1c82a1d9f41 | [
"Artistic-2.0",
"Unlicense"
]
| null | null | null | t/00-load.t | franckporcher/perl5-Franckys-Trace | db3740470a5d3f6f8b481f8b3278a1c82a1d9f41 | [
"Artistic-2.0",
"Unlicense"
]
| null | null | null | #!perl -T
use 5.006;
use strict;
use warnings;
use Test::More;
plan tests => 10;
use_ok( 'Franckys::Trace' );
can_ok( 'Franckys::Trace', 'trace_on');
can_ok( 'Franckys::Trace', 'trace_off');
can_ok( 'Franckys::Trace', 'trace_mode');
can_ok( 'Franckys::Trace', 'tracein');
can_ok( 'Franckys::Trace', 'traceout');
can_ok( 'Franckys::Trace', 'pp_tracein');
can_ok( 'Franckys::Trace', 'pp_traceout');
can_ok( 'Franckys::Trace', 'rp_tracein');
can_ok( 'Franckys::Trace', 'rp_traceout');
diag( "Testing Franckys::Trace $Franckys::Trace::VERSION, Perl $], $^X" );
| 26.666667 | 74 | 0.673214 |
ed7767cb9476681bb07b2585d48ac445e781462d | 4,396 | pl | Perl | scripts/ciconfig.pl | inPhraZ/curl | 5b9770e5fc95bbaa2ebfe360c6df48ad50632e6d | [
"curl"
]
| 1 | 2022-02-01T18:43:23.000Z | 2022-02-01T18:43:23.000Z | scripts/ciconfig.pl | inPhraZ/curl | 5b9770e5fc95bbaa2ebfe360c6df48ad50632e6d | [
"curl"
]
| null | null | null | scripts/ciconfig.pl | inPhraZ/curl | 5b9770e5fc95bbaa2ebfe360c6df48ad50632e6d | [
"curl"
]
| null | null | null | #!/usr/bin/perl
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 2022, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
###########################################################################
# these options are enabled by default in the sense that they will attempt to
# check for and use this feature without the configure flag
my %defaulton = (
# --enable-
'shared' => 1,
'static' => 1,
'fast-install' => 1,
'silent-rules' => 1,
'optimize' => 1,
'http' => 1,
'ftp' => 1,
'file' => 1,
'ldap' => 1,
'ldaps' => 1,
'rtsp' => 1,
'proxy' => 1,
'dict' => 1,
'telnet' => 1,
'tftp' => 1,
'pop3' => 1,
'imap' => 1,
'smb' => 1,
'smtp' => 1,
'gopher' => 1,
'mqtt' => 1,
'manual' => 1,
'libcurl-option' => 1,
'libgcc' => 1,
'ipv6' => 1,
'openssl-auto-load-config' => 1,
'versioned-symbols' => 1,
'symbol-hiding' => 1,
'threaded-resolver' => 1,
'pthreads' => 1,
'verbose' => 1,
'crypto-auth' => 1,
'ntlm' => 1,
'ntlm-wb' => 1,
'tls-srp' => 1,
'unix-sockets' => 1,
'cookies' => 1,
'socketpair' => 1,
'http-auth' => 1,
'doh' => 1,
'mime' => 1,
'dateparse' => 1,
'netrc' => 1,
'progress-meter' => 1,
'dnsshuffle' => 1,
'get-easy-options' => 1,
'alt-svc' => 1,
'hsts' => 1,
# --with-
'aix-soname' => 1,
'pic' => 1,
'zlib' => 1,
'zstd' => 1,
'brotli' => 1,
'random' => 1,
'egd-socket' => 1,
'ca-bundle' => 1,
'ca-path' => 1,
'libssh2' => 1,
'nghttp2' => 1,
'librtmp' => 1,
'libidn2' => 1,
'sysroot' => 1,
'lber-lib' => 1,
'ldap-lib' => 1,
);
sub configureopts {
my ($opts)=@_;
my %thisin;
my %thisout;
while($opts =~ s/--with-([^ =]*)//) {
$with{$1}++;
$used{$1}++;
$thisin{$1}++;
}
while($opts =~ s/--enable-([^ =]*)//) {
$with{$1}++;
$used{$1}++;
$thisin{$1}++;
}
while($opts =~ s/--without-([^ =]*)//) {
$without{$1}++;
$used{$1}++;
$thisout{$1}++;
}
while($opts =~ s/--disable-([^ =]*)//) {
$without{$1}++;
$used{$1}++;
$thisout{$1}++;
}
return join(" ", sort(keys %thisin), "/", sort(keys %thisout));
}
# run configure --help and check what available WITH/ENABLE options that exist
sub configurehelp {
open(C, "./configure --help|");
while(<C>) {
if($_ =~ /^ --(with|enable)-([a-z0-9-]+)/) {
$avail{$2}++;
}
}
close(C);
}
sub scanjobs {
my $jobs;
open(CI, "./scripts/cijobs.pl|");
while(<CI>) {
if($_ =~ /^\#\#\#/) {
$jobs++;
}
if($_ =~ /^configure: (.*)/) {
my $c= configureopts($1);
#print "C: $c\n";
}
}
close(CI);
}
configurehelp();
scanjobs();
print "Used configure options (with / without)\n";
for my $w (sort keys %used) {
printf " %s: %d %d%s\n", $w, $with{$w}, $without{$w},
$defaulton{$w} ? " (auto)":"";
}
print "Never used configure options\n";
for my $w (sort keys %avail) {
if(!$used{$w}) {
printf " %s%s\n", $w,
$defaulton{$w} ? " (auto)":"";
}
}
print "Never ENABLED configure options that aren't on by default\n";
for my $w (sort keys %avail) {
if(!$with{$w} && !$defaulton{$w}) {
printf " %s\n", $w;
}
}
print "ENABLED configure options that aren't available\n";
for my $w (sort keys %with) {
if(!$avail{$w}) {
printf " %s\n", $w;
}
}
| 23.762162 | 78 | 0.445177 |
ed7ca2a1308710479c0da5c64cd0f613e57aa682 | 2,026 | pm | Perl | auto-lib/Paws/WAFRegional/DeleteSizeConstraintSet.pm | agimenez/aws-sdk-perl | 9c4dff7d1af2ff0210c28ca44fb9e92bc625712b | [
"Apache-2.0"
]
| null | null | null | auto-lib/Paws/WAFRegional/DeleteSizeConstraintSet.pm | agimenez/aws-sdk-perl | 9c4dff7d1af2ff0210c28ca44fb9e92bc625712b | [
"Apache-2.0"
]
| null | null | null | auto-lib/Paws/WAFRegional/DeleteSizeConstraintSet.pm | agimenez/aws-sdk-perl | 9c4dff7d1af2ff0210c28ca44fb9e92bc625712b | [
"Apache-2.0"
]
| null | null | null |
package Paws::WAFRegional::DeleteSizeConstraintSet;
use Moose;
has ChangeToken => (is => 'ro', isa => 'Str', required => 1);
has SizeConstraintSetId => (is => 'ro', isa => 'Str', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DeleteSizeConstraintSet');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::WAFRegional::DeleteSizeConstraintSetResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::WAFRegional::DeleteSizeConstraintSet - Arguments for method DeleteSizeConstraintSet on Paws::WAFRegional
=head1 DESCRIPTION
This class represents the parameters used for calling the method DeleteSizeConstraintSet on the
AWS WAF Regional service. Use the attributes of this class
as arguments to method DeleteSizeConstraintSet.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DeleteSizeConstraintSet.
As an example:
$service_obj->DeleteSizeConstraintSet(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> ChangeToken => Str
The value returned by the most recent call to GetChangeToken.
=head2 B<REQUIRED> SizeConstraintSetId => Str
The C<SizeConstraintSetId> of the SizeConstraintSet that you want to
delete. C<SizeConstraintSetId> is returned by CreateSizeConstraintSet
and by ListSizeConstraintSets.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DeleteSizeConstraintSet in L<Paws::WAFRegional>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| 31.65625 | 249 | 0.752715 |
ed875852e21f27043f4c4f3e4e62e499a38622dc | 903 | t | Perl | t/02_custom_settings.t | bor/Dancer2-Plugin-Argon2 | cac42354cb61575474bf63eefb8563edaaf83725 | [
"Artistic-1.0"
]
| null | null | null | t/02_custom_settings.t | bor/Dancer2-Plugin-Argon2 | cac42354cb61575474bf63eefb8563edaaf83725 | [
"Artistic-1.0"
]
| null | null | null | t/02_custom_settings.t | bor/Dancer2-Plugin-Argon2 | cac42354cb61575474bf63eefb8563edaaf83725 | [
"Artistic-1.0"
]
| null | null | null | #!perl
use lib './lib';
use strict;
use warnings;
use Test::More tests => 1;
use HTTP::Request::Common qw( GET );
use Plack::Test;
my $password = 'some-secret-password';
{
package t::lib::TestApp;
use Dancer2;
use Dancer2::Plugin::Argon2;
set plugins => {
Argon2 => {
cost => 4,
factor => '32M',
parallelism => 2,
size => 24,
} };
get '/passphrase' => sub {
return passphrase($password)->encoded;
};
}
subtest 'test app with custom settings' => sub {
my $app = t::lib::TestApp->to_app;
is( ref $app, "CODE", "Got a code ref" );
test_psgi $app => sub {
my $cb = shift;
{
my $res = $cb->( GET '/passphrase' );
like $res->content, qr/^\$argon2id\$v=19\$m=32768,t=4,p=2\$[\w\+\$\/]+\z/, 'with default settings';
}
};
};
| 21 | 111 | 0.48948 |
73f28ee4ba657e67aeb6c90af78d844f719153b7 | 1,413 | pm | Perl | tests/console/parsec.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
]
| 84 | 2015-02-10T16:01:52.000Z | 2022-03-10T21:20:14.000Z | tests/console/parsec.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
]
| 8,065 | 2015-01-07T07:44:02.000Z | 2022-03-31T12:02:06.000Z | tests/console/parsec.pm | acerv/os-autoinst-distri-opensuse | 0e0cfca02f3a86323682c511a1efa926c7f0df3a | [
"FSFAP"
]
| 404 | 2015-01-14T14:42:44.000Z | 2022-03-30T07:38:08.000Z | # SUSE's openQA tests
#
# Copyright 2020 SUSE LLC
# SPDX-License-Identifier: FSFAP
# Package: parsec parsec-tool
# Summary: Test parsec service with parsec-tool
# Maintainer: Guillaume Gardet <guillaume@opensuse.org>
use base "consoletest";
use strict;
use warnings;
use testapi;
use utils;
sub run {
my $self = shift;
# Install requirements
select_console 'root-console';
my $pkg_list = "parsec parsec-tool";
zypper_call("in $pkg_list");
systemctl 'start parsec';
# Add user to 'parsec-clients' group
assert_script_run("usermod -a -G parsec-clients $testapi::username");
select_console('user-console');
# Use newgrp to get 'parsec-clients' group membership
enter_cmd("newgrp parsec-clients");
# Run tests as user with 'parsec-clients' permissions, with default config
record_info('ping');
assert_script_run 'parsec-tool ping';
save_screenshot;
record_info('list-opcodes');
assert_script_run 'parsec-tool list-opcodes --provider 1';
save_screenshot;
record_info('list-providers');
assert_script_run 'parsec-tool list-providers';
save_screenshot;
record_info('list-keys');
assert_script_run 'parsec-tool list-keys';
save_screenshot;
# exit from newgrp session
enter_cmd("exit");
# Clean-up
select_console 'root-console';
systemctl 'stop parsec';
zypper_call("rm -u $pkg_list");
}
1;
| 24.789474 | 78 | 0.699222 |
ed845bb2efa9a7086948c8f344916cd569a47576 | 2,888 | pm | Perl | lib/Chart/Plotly/Trace/Sankey/Domain.pm | hstejas/p5-Chart-Plotly | c1666613369ff44d0ec984935b02b4024638aacf | [
"MIT"
]
| 11 | 2017-01-08T12:08:24.000Z | 2021-12-14T15:27:56.000Z | lib/Chart/Plotly/Trace/Sankey/Domain.pm | hstejas/p5-Chart-Plotly | c1666613369ff44d0ec984935b02b4024638aacf | [
"MIT"
]
| 28 | 2016-11-21T20:25:21.000Z | 2021-04-20T16:58:38.000Z | lib/Chart/Plotly/Trace/Sankey/Domain.pm | hstejas/p5-Chart-Plotly | c1666613369ff44d0ec984935b02b4024638aacf | [
"MIT"
]
| 3 | 2016-12-14T15:59:41.000Z | 2020-07-30T04:39:43.000Z | package Chart::Plotly::Trace::Sankey::Domain;
use Moose;
use MooseX::ExtraArgs;
use Moose::Util::TypeConstraints qw(enum union);
if (!defined Moose::Util::TypeConstraints::find_type_constraint('PDL')) {
Moose::Util::TypeConstraints::type('PDL');
}
# VERSION
# ABSTRACT: This attribute is one of the possible options for the trace sankey.
=encoding utf-8
=head1 SYNOPSIS
# EXAMPLE: examples/traces/sankey.pl
=head1 DESCRIPTION
This attribute is part of the possible options for the trace sankey.
This file has been autogenerated from the official plotly.js source.
If you like Plotly, please support them: L<https://plot.ly/>
Open source announcement: L<https://plot.ly/javascript/open-source-announcement/>
Full reference: L<https://plot.ly/javascript/reference/#sankey>
=head1 DISCLAIMER
This is an unofficial Plotly Perl module. Currently I'm not affiliated in any way with Plotly.
But I think plotly.js is a great library and I want to use it with perl.
=head1 METHODS
=cut
=head2 TO_JSON
Serialize the trace to JSON. This method should be called only by L<JSON> serializer.
=cut
sub TO_JSON {
my $self = shift;
my $extra_args = $self->extra_args // {};
my $meta = $self->meta;
my %hash = %$self;
for my $name (sort keys %hash) {
my $attr = $meta->get_attribute($name);
if (defined $attr) {
my $value = $hash{$name};
my $type = $attr->type_constraint;
if ($type && $type->equals('Bool')) {
$hash{$name} = $value ? \1 : \ 0;
}
}
}
%hash = (%hash, %$extra_args);
delete $hash{'extra_args'};
if ($self->can('type') && (!defined $hash{'type'})) {
$hash{type} = $self->type();
}
return \%hash;
}
=head1 ATTRIBUTES
=over
=cut
=item * column
If there is a layout grid, use the domain for this column in the grid for this sankey trace .
=cut
has column => (
is => "rw",
isa => "Int",
documentation => "If there is a layout grid, use the domain for this column in the grid for this sankey trace .",
);
=item * row
If there is a layout grid, use the domain for this row in the grid for this sankey trace .
=cut
has row => (
is => "rw",
isa => "Int",
documentation => "If there is a layout grid, use the domain for this row in the grid for this sankey trace .",
);
=item * x
Sets the horizontal domain of this sankey trace (in plot fraction).
=cut
has x => (
is => "rw",
isa => "ArrayRef|PDL",
documentation => "Sets the horizontal domain of this sankey trace (in plot fraction).",
);
=item * y
Sets the vertical domain of this sankey trace (in plot fraction).
=cut
has y => (
is => "rw",
isa => "ArrayRef|PDL",
documentation => "Sets the vertical domain of this sankey trace (in plot fraction).",
);
=pod
=back
=cut
__PACKAGE__->meta->make_immutable();
1;
| 21.714286 | 117 | 0.646814 |
ed78a2f238e25b72f2b37bc43a781b95e42d8498 | 571 | pm | Perl | lib/SemanticWeb/Schema/NLNonprofitType.pm | robrwo/LDF-JSON-LD | 2745fa73562625ab217b7094a812bfc1f4be8cbc | [
"ClArtistic"
]
| null | null | null | lib/SemanticWeb/Schema/NLNonprofitType.pm | robrwo/LDF-JSON-LD | 2745fa73562625ab217b7094a812bfc1f4be8cbc | [
"ClArtistic"
]
| null | null | null | lib/SemanticWeb/Schema/NLNonprofitType.pm | robrwo/LDF-JSON-LD | 2745fa73562625ab217b7094a812bfc1f4be8cbc | [
"ClArtistic"
]
| null | null | null | use utf8;
package SemanticWeb::Schema::NLNonprofitType;
# ABSTRACT: NLNonprofitType: Non-profit organization type originating from the Netherlands.
use Moo;
extends qw/ SemanticWeb::Schema::NonprofitType /;
use MooX::JSON_LD 'NLNonprofitType';
use Ref::Util qw/ is_plain_hashref /;
# RECOMMEND PREREQ: Ref::Util::XS
use namespace::autoclean;
our $VERSION = 'v14.0.1';
=encoding utf8
=head1 DESCRIPTION
NLNonprofitType: Non-profit organization type originating from the
Netherlands.
=cut
=head1 SEE ALSO
L<SemanticWeb::Schema::NonprofitType>
=cut
1;
| 13.595238 | 91 | 0.756567 |
ed2851eee9175e78539577ea03ae392cc2e60f35 | 8,005 | pl | Perl | src/processReads-median.pl | AlexSaraz1/paramut_bot | 08f61f231d599f2451b5ab09274a0ffbcf3cb495 | [
"MIT"
]
| 2 | 2021-01-18T15:29:28.000Z | 2021-04-25T02:59:11.000Z | src/processReads-median.pl | AlexSaraz1/paramut_bot | 08f61f231d599f2451b5ab09274a0ffbcf3cb495 | [
"MIT"
]
| null | null | null | src/processReads-median.pl | AlexSaraz1/paramut_bot | 08f61f231d599f2451b5ab09274a0ffbcf3cb495 | [
"MIT"
]
| null | null | null | ###!/usr/bin/perl -w
#created by Chongjian CHEN- 16/09/2011
#The script is used to check the input format, calcualte read length distribution, compute median quality score of each base
#if -g is specified with 1, group reads which have the same sequence in raw sequencing data. For fastq reads, the median quality score is computed in each read group.
#or else, just output original sequence with extended read name with "_1" suffix in order to have the same name format as group read.
use Getopt::Std;
use utf8;
use strict;
## get options from command line
my %opts= ('i'=>'','f'=>'','g'=>'','D'=>'','d'=>'');
sub usage{
print STDERR <<EOF;
usage: $0 -i read_file -f format -g 1|0 -D seq_dir -d data_dir [-h]
-h : help message;
-i : input read file;
-f : format of read file, could be specified as "fa","csfast", "solexa", "solexa1.3", and "phred33";
-g : group read or not, 1: yes, 0: no;
-D : the raw sequence directory;
-d : the data directory;
example1: $0 -i file -f "phred33" -g 1 -D "rawdata" -d "data"
example2: $0 -i file -f "fa" -g 1 -D "rawdata" -d "data"
example3: $0 -i file -f "csfast" -g 1 -D "rawdata" -d "data"
EOF
exit;
}
getopts('i:f:g:D:d:h', \%opts) || usage();
usage() if ($opts{h});
my ($read_file,$format,$do_group,$seq_dir,$data_dir)=($opts{i},$opts{f},$opts{g},$opts{D},$opts{d});
##specify start identifier of read (> or @) in sequence file
my $stid=">";
if($format!~/fa/){
$stid="@";
}
my ($max_len,$cur_seq,$cur_qc)=(0,"","");
my %seq_len=(); # hash of sequence length
my %seq_group=(); # hash of read group
my %seq_qc=(); # hash of quality score
my %seq_base=(); # hash of read base
open (SEQ,"$seq_dir/$read_file") || warn "can't open $read_file";
open (GROUPOUT,">$seq_dir/$read_file.pmod"); #modified read file after processing
while(<SEQ>){
if(/^$stid(\S+)/){
my $cur_header=$1;
$cur_seq=<SEQ>;
chomp($cur_seq);
my $nuc_pos=1;
foreach my $c (split(//,$cur_seq)){
$seq_base{$nuc_pos}{$c}++;
$nuc_pos++;
}
my $cur_seq_len=length($cur_seq);
##validate the fasta/csfast format
if($format eq "fasta"){
if($cur_seq!~/[ATGCNatgcN]{$cur_seq_len,}/){
`rm -f $seq_dir/$read_file.group`;
die("$seq_dir/$read_file is not a fasta file\n");
}
#output read without grouping
if(!$do_group){
print GROUPOUT ">",$cur_header,"_1\n",$cur_seq,"\n";
}
}
if($format eq "csfasta"){
my $len_nonuc=$cur_seq_len-1;
##Remove the first tag from the sequence length
$cur_seq_len=$cur_seq_len-1;
if($cur_seq!~/[ATGCatgc][0123\.]{$len_nonuc,}/){
`rm -f $seq_dir/$read_file.group`;
die("$seq_dir/$read_file is not a csfasta file\n");
}
#output read without grouping
if(!$do_group){
print GROUPOUT ">",$cur_header,"_1\n",$cur_seq,"\n";
}
}
#get length info
$max_len=$cur_seq_len if ($cur_seq_len>$max_len);
$seq_len{$cur_seq_len}++;
#get read group (distinct read) information
$seq_group{$cur_seq}{"ct"}++;
#process fastq reads
if($stid eq "@"){
my $mid_line=<SEQ>;
#validate the fastq format
if($mid_line!~/^\+/){
`rm -f $seq_dir/$read_file.group`;
die("$seq_dir/$read_file is not a fastq file\n");
}
$cur_qc=<SEQ>;
#output read without grouping
if(!$do_group){
print GROUPOUT "@",$cur_header,"_1\n",$cur_seq,"\n","+",$cur_header,"_1\n",$cur_qc;
}
chomp($cur_qc);
my $pos=1;
foreach my $c (split(//, $cur_qc)){
#get unicode value from characters
my $cur_ord=ord($c);
#validate the fastq format
if(($format eq "phred33") && ($cur_ord<33 || $cur_ord>80)){
`rm -f $seq_dir/$read_file.group`;
die("$seq_dir/$read_file is not a sanger fastq file\n");
}
if(($format eq "solexa") && ($cur_ord<59 || $cur_ord>126)){
`rm -f $seq_dir/$read_file.group`;
die("$seq_dir/$read_file is not a solexa 1.0 fastq file\n");
}
if(($format eq "solexa1.3") && ($cur_ord<64 || $cur_ord>104)){
# warn $cur_ord,"\n";
`rm -f $seq_dir/$read_file.group`;
die("$seq_dir/$read_file is not a solexa 1.3 fastq file\n");
}
#store unicode value of positional quality score
if (!$seq_qc{$pos}){
$seq_qc{$pos}=$cur_ord;
}
else{
$seq_qc{$pos}.="\t" . $cur_ord;
}
#store base quality score info for each read group, if $do_group is active
if($do_group==1){
if(!$seq_group{$cur_seq}{$pos}){
$seq_group{$cur_seq}{$pos}=$cur_ord;
}
else{
$seq_group{$cur_seq}{$pos}.="\t" . $cur_ord;
}
}
$pos++;
}
}
}
}
close(SEQ);
##get sample name
my $sample="";
if($read_file=~/(\S+)\./){
$sample=$1;
}
##output length distribution of abundant reads
open (LENOUT,">$data_dir/$sample\_readlen.data");
print LENOUT "idx\t$sample\n";
foreach my $len (sort {$a <=> $b} keys %seq_len){
print LENOUT $len,"\t",$seq_len{$len},"\n";
}
close(LENOUT);
##output read base composition
open (BASE,">$data_dir/$sample\_basestat.data");
open (GC,">$data_dir/$sample\_baseGCstat.data");
print BASE "idx\tA\tT\tG\tC\n";
print GC "idx\t$sample\n";
foreach my $pos (1..$max_len){
$seq_base{$pos}{"A"}=0 if (!$seq_base{$pos}{"A"});
$seq_base{$pos}{"T"}=0 if (!$seq_base{$pos}{"T"});
$seq_base{$pos}{"G"}=0 if (!$seq_base{$pos}{"G"});
$seq_base{$pos}{"C"}=0 if (!$seq_base{$pos}{"C"});
my $cur_basect=$seq_base{$pos}{"A"}+$seq_base{$pos}{"T"}+$seq_base{$pos}{"G"}+$seq_base{$pos}{"C"};
my $cur_gcct=$seq_base{$pos}{"G"}+$seq_base{$pos}{"C"};
print BASE $pos,"\t",100*$seq_base{$pos}{"A"}/$cur_basect,"\t",100*$seq_base{$pos}{"T"}/$cur_basect,"\t",100*$seq_base{$pos}{"G"}/$cur_basect,"\t",100*$seq_base{$pos}{"C"}/$cur_basect,"\n";
print GC $pos,"\t",100*$cur_gcct/$cur_basect,"\n";
}
close(BASE);
close(GC);
##output length distribution of distinct reads
open (DISTLENOUT,">$data_dir/$sample\_distinct_readlen.data");
print DISTLENOUT "idx\t$sample\n";
my %distinct_len;
foreach my $cur_read (keys %seq_group){
my $cur_len=length($cur_read);
$distinct_len{$cur_len}++;
}
foreach my $len (sort {$a <=> $b} keys %distinct_len){
print DISTLENOUT $len,"\t",$distinct_len{$len},"\n";
}
close(DISTLENOUT);
##output read groups if $do_group is active
if($do_group==1){
my $gp_id=1;
foreach my $sg (keys %seq_group){
#for fastq format read
if($stid eq "@"){
#use group id and the number of read in the group to uniquely define a read group
print GROUPOUT "\@$gp_id\_",$seq_group{$sg}{"ct"},"\n";
print GROUPOUT $sg,"\n";
print GROUPOUT "\+$gp_id\_",$seq_group{$sg}{"ct"},"\n";
my $cur_seq_len=length($sg);
foreach my $id (1..$cur_seq_len){
my $cur_median_qc=median_qc($seq_group{$sg}{$id});
print GROUPOUT chr($cur_median_qc);
}
print GROUPOUT "\n";
$gp_id++;
}
#for fa and csfast reads
else{
#use group id and the number of read in the group to uniquely define a read group
print GROUPOUT ">$gp_id\_",$seq_group{$sg}{"ct"},"\n";
print GROUPOUT $sg,"\n";
$gp_id++;
}
}
}
close(GROUPOUT);
#output median quality score for each position in all fastq reads
if($stid eq "@"){
open(QCOUT,">$data_dir/$sample\_medquality.data");
print QCOUT "idx\t$sample\n";
foreach my $pos (1..$max_len){
my $cur_qcstr=$seq_qc{$pos};
my $pos_mediansc=median_qc($cur_qcstr);
$pos_mediansc=qc_real_sc($pos_mediansc);
print QCOUT $pos,"\t",$pos_mediansc,"\n";
}
close(QCOUT);
}
#########
#function to get real quanlity score based on the different fastq format
sub qc_real_sc{
my ($sc)=@_;
if($format eq "solexa1.3"){
$sc-=64;
}
elsif($format eq "phred33"){
$sc-=33;
}
else{
$sc-=64;
$sc=log(1+10^($sc/10))/log(10);
}
}
#########
#function to calcluate the median score in a given string.
sub median_qc{
my ($s)=@_;
my @array=split(/\t/,$s);
my $count=$#array + 1;
@array=sort {$a <=> $b} @array;
my $median="";
if ($count % 2){
$median=$array[int($count/2)];
}
else{
$median=($array[$count/2]+$array[$count/2 - 1])/2;
}
return($median);
}
| 29.981273 | 193 | 0.615365 |
ed6e0abd233af29d1de65766a8bffcb7156bb9c4 | 1,715 | t | Perl | t/regression/hyperlink16.t | f20/excel-writer-xlsx | b08a865c6972f935b7d72e64e5580cca8e6cc299 | [
"Artistic-1.0-Perl"
]
| 61 | 2015-02-03T02:49:53.000Z | 2022-02-13T09:17:53.000Z | t/regression/hyperlink16.t | f20/excel-writer-xlsx | b08a865c6972f935b7d72e64e5580cca8e6cc299 | [
"Artistic-1.0-Perl"
]
| 167 | 2015-01-02T09:25:11.000Z | 2022-02-16T22:04:20.000Z | t/regression/hyperlink16.t | f20/excel-writer-xlsx | b08a865c6972f935b7d72e64e5580cca8e6cc299 | [
"Artistic-1.0-Perl"
]
| 31 | 2015-02-16T12:06:45.000Z | 2021-10-14T13:03:22.000Z | ###############################################################################
#
# Tests the output of Excel::Writer::XLSX against Excel generated files.
#
# Copyright 2000-2021, John McNamara, jmcnamara@cpan.org
#
use lib 't/lib';
use TestFunctions qw(_compare_xlsx_files _is_deep_diff);
use strict;
use warnings;
use Test::More tests => 1;
###############################################################################
#
# Tests setup.
#
my $filename = 'hyperlink16.xlsx';
my $dir = 't/regression/';
my $got_filename = $dir . "ewx_$filename";
my $exp_filename = $dir . 'xlsx_files/' . $filename;
my $ignore_members = [];
my $ignore_elements = { 'xl/workbook.xml' => ['<workbookView'] };
###############################################################################
#
# Test the creation of a simple Excel::Writer::XLSX file with hyperlinks.
# This example doesn't have any link formatting and tests the relationship
# linkage code.
#
use Excel::Writer::XLSX;
my $workbook = Excel::Writer::XLSX->new( $got_filename );
my $worksheet = $workbook->add_worksheet();
# Turn off default URL format for testing.
$worksheet->{_default_url_format} = undef;
$worksheet->write_url( 'B2', 'external:./subdir/blank.xlsx' );
$workbook->close();
###############################################################################
#
# Compare the generated and existing Excel files.
#
my ( $got, $expected, $caption ) = _compare_xlsx_files(
$got_filename,
$exp_filename,
$ignore_members,
$ignore_elements,
);
_is_deep_diff( $got, $expected, $caption );
###############################################################################
#
# Cleanup.
#
unlink $got_filename;
__END__
| 23.493151 | 79 | 0.529446 |
ed2df20a4b7c3a2b5cfa9d3313ff46530feaa640 | 1,108 | pm | Perl | lib/Installation/Partitioner/LibstorageNG/v4_3/RaidOptionsPage.pm | Dawei-Pang/os-autoinst-distri-opensuse | 013cccfcdc84edb8faff13eab1ab2e6288aacd26 | [
"FSFAP"
]
| null | null | null | lib/Installation/Partitioner/LibstorageNG/v4_3/RaidOptionsPage.pm | Dawei-Pang/os-autoinst-distri-opensuse | 013cccfcdc84edb8faff13eab1ab2e6288aacd26 | [
"FSFAP"
]
| 5 | 2019-01-17T03:09:17.000Z | 2019-08-20T06:34:48.000Z | lib/Installation/Partitioner/LibstorageNG/v4_3/RaidOptionsPage.pm | Dawei-Pang/os-autoinst-distri-opensuse | 013cccfcdc84edb8faff13eab1ab2e6288aacd26 | [
"FSFAP"
]
| null | null | null | # SUSE's openQA tests
#
# Copyright © 2021 SUSE LLC
#
# Copying and distribution of this file, with or without modification,
# are permitted in any medium without royalty provided the copyright
# notice and this notice are preserved. This file is offered as-is,
# without any warranty.
# Summary: Handle page to set RAID options, like chunk size in the Expert Partitioner
# Maintainer: QE YaST <qa-sle-yast@suse.de>
package Installation::Partitioner::LibstorageNG::v4_3::RaidOptionsPage;
use strict;
use warnings;
use testapi;
use YuiRestClient;
sub new {
my ($class, $args) = @_;
my $self = bless {
app => $args->{app}
}, $class;
return $self->init($args);
}
sub init {
my $self = shift;
$self->{btn_next} = $self->{app}->button({id => 'next'});
$self->{cb_chunk_size} = $self->{app}->combobox({id => '"Y2Partitioner::Dialogs::MdOptions::ChunkSize"'});
return $self;
}
sub select_chunk_size {
my ($self, $chunk_size) = @_;
$self->{cb_chunk_size}->select($chunk_size);
}
sub press_next {
my ($self) = @_;
$self->{btn_next}->click();
}
1;
| 23.574468 | 110 | 0.656137 |
ed6074542b649208e1c076ffb4132aa038497dc4 | 2,509 | pm | Perl | lib/Google/Ads/AdWords/v201809/MediaUploadError.pm | googleads/googleads-perl-lib | 69e66d7e46fbd8ad901581b108ea6c14212701cf | [
"Apache-2.0"
]
| 4 | 2015-04-23T01:59:40.000Z | 2021-10-12T23:14:36.000Z | lib/Google/Ads/AdWords/v201809/MediaUploadError.pm | googleads/googleads-perl-lib | 69e66d7e46fbd8ad901581b108ea6c14212701cf | [
"Apache-2.0"
]
| 23 | 2015-02-19T17:03:58.000Z | 2019-07-01T10:15:46.000Z | lib/Google/Ads/AdWords/v201809/MediaUploadError.pm | googleads/googleads-perl-lib | 69e66d7e46fbd8ad901581b108ea6c14212701cf | [
"Apache-2.0"
]
| 10 | 2015-08-03T07:51:58.000Z | 2020-09-26T16:17:46.000Z | package Google::Ads::AdWords::v201809::MediaUploadError;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use base qw(Google::Ads::AdWords::v201809::ApiError);
# Variety: sequence
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %fieldPath_of :ATTR(:get<fieldPath>);
my %fieldPathElements_of :ATTR(:get<fieldPathElements>);
my %trigger_of :ATTR(:get<trigger>);
my %errorString_of :ATTR(:get<errorString>);
my %ApiError__Type_of :ATTR(:get<ApiError__Type>);
my %reason_of :ATTR(:get<reason>);
__PACKAGE__->_factory(
[ qw( fieldPath
fieldPathElements
trigger
errorString
ApiError__Type
reason
) ],
{
'fieldPath' => \%fieldPath_of,
'fieldPathElements' => \%fieldPathElements_of,
'trigger' => \%trigger_of,
'errorString' => \%errorString_of,
'ApiError__Type' => \%ApiError__Type_of,
'reason' => \%reason_of,
},
{
'fieldPath' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'fieldPathElements' => 'Google::Ads::AdWords::v201809::FieldPathElement',
'trigger' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'errorString' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'ApiError__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'reason' => 'Google::Ads::AdWords::v201809::MediaUploadError::Reason',
},
{
'fieldPath' => 'fieldPath',
'fieldPathElements' => 'fieldPathElements',
'trigger' => 'trigger',
'errorString' => 'errorString',
'ApiError__Type' => 'ApiError.Type',
'reason' => 'reason',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::MediaUploadError
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
MediaUploadError from the namespace https://adwords.google.com/api/adwords/cm/v201809.
Error when uploading a media.
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * reason
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| 19.755906 | 86 | 0.662017 |
ed83d5d420b02345aa93d973485a06c5c3bf2f40 | 3,046 | pm | Perl | lib/Mail/ListDetector/Detector/ListSTAR.pm | mstevens/Mail--ListDetector | dd89c42d89189f11e1d815436e35f2830ba902f1 | [
"Artistic-1.0-Perl"
]
| null | null | null | lib/Mail/ListDetector/Detector/ListSTAR.pm | mstevens/Mail--ListDetector | dd89c42d89189f11e1d815436e35f2830ba902f1 | [
"Artistic-1.0-Perl"
]
| null | null | null | lib/Mail/ListDetector/Detector/ListSTAR.pm | mstevens/Mail--ListDetector | dd89c42d89189f11e1d815436e35f2830ba902f1 | [
"Artistic-1.0-Perl"
]
| 1 | 2019-10-25T10:04:35.000Z | 2019-10-25T10:04:35.000Z | package Mail::ListDetector::Detector::ListSTAR;
use strict;
use warnings;
use vars qw($VERSION);
$VERSION = '0.01';
use base qw(Mail::ListDetector::Detector::Base);
use Mail::ListDetector::List;
use Mail::ListDetector::Detector::RFC2369;
use Carp;
sub DEBUG { 0 }
sub match {
my $self = shift;
my $message = shift;
print "Got message $message\n" if DEBUG;
carp ("Mail::ListDetector::Detector::ListSTAR - no message supplied") unless defined($message);
use Email::Abstract;
my $x_listserver = Email::Abstract->get_header($message, 'X-Listserver');
my $x_list_software = Email::Abstract->get_header($message, 'X-List-Software');
my $list_software = Email::Abstract->get_header($message, 'List-Software');
my $listsoftware;
if (defined($x_listserver) && ($x_listserver =~ m/(ListSTAR v[\w\.]+)/)) {
$listsoftware = $1;
} elsif (defined($list_software) && ($list_software =~ m/(ListSTAR v[\w\.]+)/)) {
$listsoftware = $1;
} elsif (defined($x_list_software) && ($x_list_software =~ m/(ListSTAR v[\w\.]+)/)) {
$listsoftware = $1;
} else {
return undef;
}
my $listname;
my $sender = Email::Abstract->get_header($message, 'Sender');
if (defined($sender) && ($sender =~ m/<(.*)@.*>/)) {
$listname = $1;
}
my $rfc2369 = new Mail::ListDetector::Detector::RFC2369
my $list;
unless ($list = $rfc2369->match($message)) {
my $x_list_subscribe = Email::Abstract->get_header($message, 'X-List-Subscribe');
return undef unless defined($x_list_subscribe);
chomp $x_list_subscribe;
return undef unless $x_list_subscribe =~ m/(<.*>)/;
my $list_uri = new URI($1);
return undef unless defined $list_uri;
if ($list_uri->scheme ne 'mailto') {
return undef;
}
my $posting_address = $list_uri->to;
my $listname;
if($posting_address =~ m/^(.*)@.*$/) {
$listname = $1;
}
$list = new Mail::ListDetector::List;
$list->listname($listname);
$list->posting_address($posting_address);
}
if (defined($listname)) {
$list->listname($listname);
}
$list->listsoftware($listsoftware);
return $list;
}
1;
__END__
=pod
=head1 NAME
Mail::ListDetector::Detector::ListSTAR - ListSTAR message detector
=head1 SYNOPSIS
use Mail::ListDetector::Detector::ListSTAR;
=head1 DESCRIPTION
An implementation of a mailing list detector, for ListSTAR mailing lists,
ListSTAR (not to be confused with Listar) is a MacOS mailing list publishing tool
by MCF Software, see http://www.liststar.com/ for details about ListSTAR.
=head1 METHODS
=head2 new()
Inherited from Mail::ListDetector::Detector::Base.
=head2 match()
Accepts a Mail::Internet object and returns either a
Mail::ListDetector::List object if it is a post to a ListSTAR
mailing list, or C<undef>.
=head1 BUGS
No known bugs.
=head1 AUTHOR
Matthew Walker - matthew@walker.wattle.id.au,
Michael Stevens - michael@etla.org,
Peter Oliver - p.d.oliver@mavit.freeserve.co.uk.
Tatsuhiko Miyagawa E<lt>miyagawa@bulknews.netE<gt>
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
| 24.764228 | 96 | 0.697636 |
ed4ed876623a6f5983df5bf10c685530f17974f4 | 3,969 | pm | Perl | lib/UV/Stream.pm | genio/p5-UV | a7aa6aa159cd411f621cf75c7a638996f9b5a65e | [
"Artistic-1.0"
]
| 4 | 2017-08-19T17:36:04.000Z | 2020-03-06T11:41:10.000Z | lib/UV/Stream.pm | genio/p5-UV | a7aa6aa159cd411f621cf75c7a638996f9b5a65e | [
"Artistic-1.0"
]
| 15 | 2017-07-11T01:03:41.000Z | 2019-02-07T14:33:11.000Z | lib/UV/Stream.pm | genio/p5-UV | a7aa6aa159cd411f621cf75c7a638996f9b5a65e | [
"Artistic-1.0"
]
| 8 | 2017-07-04T02:11:07.000Z | 2020-05-18T15:00:47.000Z | package UV::Stream;
our $VERSION = '2.000';
use strict;
use warnings;
use Carp ();
use parent 'UV::Handle';
sub listen
{
my $self = shift;
my ($backlog, $cb) = @_;
$self->on(connection => $cb) if $cb;
$self->_listen($backlog);
}
sub accept
{
my $self = shift;
my $client = (ref $self)->_new($self->loop);
$self->_accept($client);
return $client;
}
1;
__END__
=encoding utf8
=head1 NAME
UV::Stream - Stream handles in libuv
=head1 SYNOPSIS
#!/usr/bin/env perl
use strict;
use warnings;
use UV;
# Stream is the superclass of Pipe, TTY and TCP handles
# TODO
=head1 DESCRIPTION
This module provides an interface to
L<libuv's stream|http://docs.libuv.org/en/v1.x/stream.html>. We will try to
document things here as best as we can, but we also suggest you look at the
L<libuv docs|http://docs.libuv.org> directly for more details on how things
work.
You will likely never use this class directly. You will use the different
stream sub-classes directly. Some of these methods or events will be called
or fired from those sub-classes.
=head1 EVENTS
L<UV::Stream> makes the following extra events available.
=head2 connection
$stream->on("connection", sub {
my ($self) = @_;
my $client = $self->accept;
...
});
The L<connection|http://docs.libuv.org/en/v1.x/stream.html#c.uv_connection_cb>
callback fires when a new connection is received on a listening stream server.
Within the callback you should use L</accept> to obtain the new client stream.
=head2 read
$stream->on("read", sub {
my ($self, $status, $buf) = @_;
say "Received more data: <$buf>";
});
The L<read|http://docs.libuv.org/en/v1.x/stream.html#c.uv_read_cb> callback
fires whenever there is more incoming data on the stream to be passed to the
application.
=head1 METHODS
L<UV::Stream> makes the following methods available.
=head2 listen
# start listening with the callback we supplied with ->on()
$stream->listen($backlog);
# pass a callback for the "connection" event
$stream->listen($backlog, sub {
my $client = $stream->accept;
say "Received a new connection";
});
The L<listen|http://docs.libuv.org/en/v1.x/stream.html#c.uv_listen> method
starts a stream server listening for incoming client client connections. The
C<connection> event will be fired each time a new one arrives.
=head2 accept
my $client = $stream->accept;
The L<accept|http://docs.libuv.org/en/v1.x/stream.html#c.uv_accept> method
prepares a new stream connection to represent the next incoming client
connection that has been received.
=head2 shutdown
$stream->shutdown(sub {
say "Stream is now shut down";
});
The L<shutdown|http://docs.libuv.org/en/v1.x/stream.html#c.uv_shutdown> method
stops the writing half of the socket once all of the currently-pending writes
have been flushed.
=head2 read_start
# start reading with the callback we supplied with ->on()
$stream->read_start;
The L<read_start|http://docs.libuv.org/en/v1.x/stream.html#c.uv_read_start>
starts the reading side of the stream handle. The C<read> event callback will
be invoked whenever there is new data to be given to the application.
Returns the C<$stream> instance itself.
=head2 read_stop
$stream->read_stop;
The L<read_stop|http://docs.libuv.org/en/v1.x/stream.html#c.uv_read_stop>
method stops the reading side of the stream handle.
=head2 write
$stream->write($s, sub {
say "Data has now been written to the stream";
});
The L<write|http://docs.libuv.org/en/v1.x/stream.html#c.uv_write> method
sends more data through the writing side of the stream. The callback argument
will be invoked when the data has been flushed to the filehandle.
=head1 AUTHOR
Paul Evans <leonerd@leonerd.org.uk>
=head1 LICENSE
This library is free software; you can redistribute it and/or modify it under
the same terms as Perl itself.
=cut
| 23.766467 | 78 | 0.705719 |
ed14def2bc49beac3ffcbbc64db91e08df5f13bd | 21,017 | pm | Perl | lib/Bio/DB/GFF/Adaptor/memory.pm | Helmholtz-HIPS/prosnap | 5286cda39276d5eda85d2ddb23b8ab83c5d4960c | [
"MIT"
]
| 5 | 2017-10-27T15:03:19.000Z | 2020-04-25T17:44:49.000Z | lib/Bio/DB/GFF/Adaptor/memory.pm | Helmholtz-HIPS/prosnap | 5286cda39276d5eda85d2ddb23b8ab83c5d4960c | [
"MIT"
]
| 4 | 2021-01-28T20:49:55.000Z | 2022-03-25T19:02:54.000Z | lib/Bio/DB/GFF/Adaptor/memory.pm | Helmholtz-HIPS/prosnap | 5286cda39276d5eda85d2ddb23b8ab83c5d4960c | [
"MIT"
]
| 2 | 2019-02-22T10:51:15.000Z | 2019-02-22T12:35:35.000Z | package Bio::DB::GFF::Adaptor::memory;
=head1 NAME
Bio::DB::GFF::Adaptor::memory -- Bio::DB::GFF database adaptor for in-memory databases
=head1 SYNOPSIS
use Bio::DB::GFF;
my $db = Bio::DB::GFF->new(-adaptor=> 'memory',
-gff => 'my_features.gff',
-fasta => 'my_dna.fa'
);
or
my $db = Bio::DB::GFF->new(-adaptor=>'memory');
$db->load_gff_file('my_features.gff');
$db->load_fasta_file('my_dna.fa');
See L<Bio::DB::GFF> for other methods.
=head1 DESCRIPTION
This adaptor implements an in-memory version of Bio::DB::GFF. It can be used to
store and retrieve SHORT GFF files. It inherits from Bio::DB::GFF.
=head1 CONSTRUCTOR
Use Bio::DB::GFF-E<gt>new() to construct new instances of this class.
Three named arguments are recommended:
Argument Description
-adaptor Set to "memory" to create an instance of this class.
-gff Read the indicated file or directory of .gff file.
-fasta Read the indicated file or directory of fasta files.
-dir Indicates a directory containing .gff and .fa files
If you use the -dir option and the indicated directory is writable by
the current process, then this library will create a FASTA file index
that greatly diminishes the memory usage of this module.
Alternatively you may create an empty in-memory object using just the
-adaptor=E<gt>'memory' argument and then call the load_gff_file() and
load_fasta_file() methods to load GFF and/or sequence
information. This is recommended in CGI/mod_perl/fastCGI environments
because these methods do not modify STDIN, unlike the constructor.
=head1 METHODS
See L<Bio::DB::GFF> for inherited methods.
=head1 BUGS
none ;-)
=head1 SEE ALSO
L<Bio::DB::GFF>, L<bioperl>
=head1 AUTHOR
Shuly Avraham E<lt>avraham@cshl.orgE<gt>.
Copyright (c) 2002 Cold Spring Harbor Laboratory.
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
use strict;
# AUTHOR: Shulamit Avraham
# This module needs to be cleaned up and documented
# Bio::DB::GFF::Adaptor::memory -- in-memory db adaptor
# implements the low level handling of data which stored in memory.
# This adaptor implements a specific in memory schema that is compatible with Bio::DB::GFF.
# Inherits from Bio::DB::GFF.
use Bio::DB::GFF::Util::Rearrange; # for rearrange()
use Bio::DB::GFF::Adaptor::memory::iterator;
use File::Basename 'dirname';
use Bio::DB::GFF::Adaptor::memory::feature_serializer qw(@hash2array_map);
use constant MAX_SEGMENT => 1_000_000_000; # the largest a segment can get
use base qw(Bio::DB::GFF);
sub new {
my $class = shift ;
my ($file,$fasta,$dbdir,$preferred_groups) = rearrange([
[qw(GFF FILE)],
'FASTA',
[qw(DSN DB DIR DIRECTORY)],
'PREFERRED_GROUPS',
],@_);
# fill in object
my $self = bless{ data => [] },$class;
$self->preferred_groups($preferred_groups) if defined $preferred_groups;
$file ||= $dbdir;
$fasta ||= $dbdir;
$self->load_gff($file) if $file;
$self->load_or_store_fasta($fasta) if $fasta;
return $self;
}
sub load_or_store_fasta {
my $self = shift;
my $fasta = shift;
if ((-f $fasta && -w dirname($fasta))
or
(-d $fasta && -w $fasta)) {
require Bio::DB::Fasta;
my $dna_db = eval {Bio::DB::Fasta->new($fasta);}
or warn "$@\nCan't open sequence file(s). Use -gff instead of -dir if you wish to load features without sequence.\n";
$dna_db && $self->dna_db($dna_db);
} else {
$self->load_fasta($fasta);
}
}
sub dna_db {
my $self = shift;
my $d = $self->{dna_db};
$self->{dna_db} = shift if @_;
$d;
}
sub insert_sequence {
my $self = shift;
my($id,$offset,$seq) = @_;
$self->{dna}{$id} .= $seq;
}
# low-level fetch of a DNA substring given its
# name, class and the desired range.
sub get_dna {
my $self = shift;
my ($id,$start,$stop,$class) = @_;
if (my $dna_db = $self->dna_db) {
return $dna_db->seq($id,$start=>$stop);
}
return '' unless $self->{dna};
return $self->{dna}{$id} unless defined $start || defined $stop;
$start = 1 if !defined $start;
my $reversed = 0;
if ($start > $stop) {
$reversed++;
($start,$stop) = ($stop,$start);
}
my $dna = substr($self->{dna}{$id},$start-1,$stop-$start+1);
if ($reversed) {
$dna =~ tr/gatcGATC/ctagCTAG/;
$dna = reverse $dna;
}
$dna;
}
sub setup_load {
my $self = shift;
$self->{tmp} = {};
$self->{data} = [];
1;
}
sub finish_load {
my $self = shift;
my $idx = 0;
foreach my $arrayref (values %{$self->{tmp}}) {
foreach (@$arrayref) {$_->{feature_id} = $idx++; }
push @{$self->{data}},@$arrayref;
}
1;
}
# this method loads the feature as a hash into memory -
# keeps an array of features-hashes as an in-memory db
sub load_gff_line {
my $self = shift;
my $feature_hash = shift;
$feature_hash->{strand} = '' if $feature_hash->{strand} && $feature_hash->{strand} eq '.';
$feature_hash->{phase} = '' if $feature_hash->{phase} && $feature_hash->{phase} eq '.';
$feature_hash->{gclass} = 'Sequence' unless length $feature_hash->{gclass} > 0;
# sort by group please
push @{$self->{tmp}{$feature_hash->{gclass},$feature_hash->{gname}}},$feature_hash;
}
# given sequence name, return (reference,start,stop,strand)
sub get_abscoords {
my $self = shift;
my ($name,$class,$refseq) = @_;
my %refs;
my $regexp;
if ($name =~ /[*?]/) { # uh oh regexp time
$name = quotemeta($name);
$name =~ s/\\\*/.*/g;
$name =~ s/\\\?/.?/g;
$regexp++;
}
# Find all features that have the requested name and class.
# Sort them by reference point.
for my $feature (@{$self->{data}}) {
my $no_match_class_name;
my $empty_class_name;
my $class_matches = !defined($feature->{gclass}) ||
length($feature->{gclass}) == 0 ||
$feature->{gclass} eq $class;
if (defined $feature->{gname}) {
my $matches = $class_matches
&& ($regexp ? $feature->{gname} =~ /$name/i : lc($feature->{gname}) eq lc($name));
$no_match_class_name = !$matches; # to accomodate Shuly's interesting logic
}
else{
$empty_class_name = 1;
}
if ($no_match_class_name){
my $feature_attributes = $feature->{attributes};
my $attributes = {Alias => $name};
if (!$self->_matching_attributes($feature_attributes,$attributes)){
next;
}
}
push @{$refs{$feature->{ref}}},$feature;
}
# find out how many reference points we recovered
if (! %refs) {
$self->error("$name not found in database");
return;
}
# compute min and max
my ($ref) = keys %refs;
my @found = @{$refs{$ref}};
my ($strand,$start,$stop);
my @found_segments;
foreach my $ref (keys %refs) {
next if defined($refseq) and lc($ref) ne lc($refseq);
my @found = @{$refs{$ref}};
my ($strand,$start,$stop,$name);
foreach (@found) {
$strand ||= $_->{strand};
$strand = '+' if $strand && $strand eq '.';
$start = $_->{start} if !defined($start) || $start > $_->{start};
$stop = $_->{stop} if !defined($stop) || $stop < $_->{stop};
$name ||= $_->{gname};
}
push @found_segments,[$ref,$class,$start,$stop,$strand,$name];
}
return \@found_segments;
}
sub search_notes {
my $self = shift;
my ($search_string,$limit) = @_;
$search_string =~ tr/*?//d;
my @results;
my @words = map {quotemeta($_)} $search_string =~ /(\w+)/g;
my $search = join '|',@words;
for my $feature (@{$self->{data}}) {
next unless defined $feature->{gclass} && defined $feature->{gname}; # ignore NULL objects
next unless $feature->{attributes};
my @attributes = @{$feature->{attributes}};
my @values = map {$_->[1]} @attributes;
my $value = "@values";
my $matches = 0;
for my $w (@words) {
my @hits = $value =~ /($w)/ig;
$matches += @hits;
}
next unless $matches;
my $relevance = 10 * $matches;
my $featname = Bio::DB::GFF::Featname->new($feature->{gclass}=>$feature->{gname});
my $note;
$note = join ' ',map {$_->[1]} grep {$_->[0] eq 'Note'} @{$feature->{attributes}};
$note .= join ' ',grep /$search/,map {$_->[1]} grep {$_->[0] ne 'Note'} @{$feature->{attributes}};
my $type = Bio::DB::GFF::Typename->new($feature->{method},$feature->{source});
push @results,[$featname,$note,$relevance,$type];
last if defined $limit && @results >= $limit;
}
#added result filtering so that this method returns the expected results
#this section of code used to be in GBrowse's do_keyword_search method
my $match_sub = 'sub {';
foreach (split /\s+/,$search_string) {
$match_sub .= "return unless \$_[0] =~ /\Q$_\E/i; ";
}
$match_sub .= "};";
my $match = eval $match_sub;
my @matches = grep { $match->($_->[1]) } @results;
return @matches;
}
sub _delete_features {
my $self = shift;
my @feature_ids = sort {$b<=>$a} @_;
my $removed = 0;
foreach (@feature_ids) {
next unless $_ >= 0 && $_ < @{$self->{data}};
$removed += defined splice(@{$self->{data}},$_,1);
}
$removed;
}
sub _delete {
my $self = shift;
my $delete_spec = shift;
my $ranges = $delete_spec->{segments} || [];
my $types = $delete_spec->{types} || [];
my $force = $delete_spec->{force};
my $range_type = $delete_spec->{range_type};
my $deleted = 0;
if (@$ranges) {
my @args = @$types ? (-type=>$types) : ();
push @args,(-range_type => $range_type);
my %ids_to_remove = map {$_->id => 1} map {$_->features(@args)} @$ranges;
$deleted = $self->delete_features(keys %ids_to_remove);
} elsif (@$types) {
my %ids_to_remove = map {$_->id => 1} $self->features(-type=>$types);
$deleted = $self->delete_features(keys %ids_to_remove);
} else {
$self->throw("This operation would delete all feature data and -force not specified")
unless $force;
$deleted = @{$self->{data}};
@{$self->{data}} = ();
}
$deleted;
}
# attributes -
# Some GFF version 2 files use the groups column to store a series of
# attribute/value pairs. In this interpretation of GFF, the first such
# pair is treated as the primary group for the feature; subsequent pairs
# are treated as attributes. Two attributes have special meaning:
# "Note" is for backward compatibility and is used for unstructured text
# remarks. "Alias" is considered as a synonym for the feature name.
# If no name is provided, then attributes() returns a flattened hash, of
# attribute=>value pairs.
sub do_attributes{
my $self = shift;
my ($feature_id,$tag) = @_;
my $attr ;
#my $feature = ${$self->{data}}[$feature_id];
my $feature = $self->_basic_features_by_id($feature_id);
my @result;
for my $attr (@{$feature->{attributes}}) {
my ($attr_name,$attr_value) = @$attr ;
if (defined($tag) && lc($attr_name) eq lc($tag)){push @result,$attr_value;}
elsif (!defined($tag)) {push @result,($attr_name,$attr_value);}
}
return @result;
}
#sub get_feature_by_attribute{
sub _feature_by_attribute{
my $self = shift;
my ($attributes,$callback) = @_;
$callback || $self->throw('must provide a callback argument');
my $count = 0;
my $feature_id = -1;
my $feature_group_id = undef;
for my $feature (@{$self->{data}}) {
$feature_id++;
for my $attr (@{$feature->{attributes}}) {
my ($attr_name,$attr_value) = @$attr ;
#there could be more than one set of attributes......
foreach (keys %$attributes) {
if (lc($_) eq lc($attr_name) && lc($attributes->{$_}) eq lc($attr_value)) {
$callback->($self->_hash_to_array($feature));
$count++;
}
}
}
}
}
# This is the low-level method that is called to retrieve GFF lines from
# the database. It is responsible for retrieving features that satisfy
# range and feature type criteria, and passing the GFF fields to a
# callback subroutine.
sub get_features{
my $self = shift;
my $count = 0;
my ($search,$options,$callback) = @_;
my $found_features;
$found_features = $self->_get_features_by_search_options($search,$options);
# only true if the sort by group option was specified
@{$found_features} = sort {lc("$a->{gclass}:$a->{gname}") cmp lc("$b->{gclass}:$b->{gname}")}
@{$found_features} if $options->{sort_by_group} ;
for my $feature (@{$found_features}) { # only true if the sort by group option was specified
$count++;
$callback->(
$self->_hash_to_array($feature)
);
}
return $count;
}
# Low level implementation of fetching a named feature.
# GFF annotations are named using the group class and name fields.
# May return zero, one, or several Bio::DB::GFF::Feature objects.
=head2 _feature_by_name
Title : _feature_by_name
Usage : $db->get_features_by_name($name,$class,$callback)
Function: get a list of features by name and class
Returns : count of number of features retrieved
Args : name of feature, class of feature, and a callback
Status : protected
This method is used internally. The callback arguments are those used
by make_feature().
=cut
sub _feature_by_name {
my $self = shift;
my ($class,$name,$location,$callback) = @_;
$callback || $self->throw('must provide a callback argument');
my $count = 0;
my $regexp;
if ($name =~ /[*?]/) { # uh oh regexp time
$name = quotemeta($name);
$name =~ s/\\\*/.*/g;
$name =~ s/\\\?/.?/g;
$regexp++;
}
for my $feature (@{$self->{data}}) {
next unless ($regexp && $feature->{gname} =~ /$name/i) || lc($feature->{gname}) eq lc($name);
next if defined($feature->{gclass}) && length($feature->{gclass}) > 0 && $feature->{gclass} ne $class;
if ($location) {
next if $location->[0] ne $feature->{ref};
next if $location->[1] && $location->[1] > $feature->{stop};
next if $location->[2] && $location->[2] < $feature->{start};
}
$count++;
$callback->($self->_hash_to_array($feature),0);
}
return $count;
}
# Low level implementation of fetching a feature by it's id.
# The id of the feature as implemented in the in-memory db, is the location of the
# feature in the features hash array.
sub _feature_by_id{
my $self = shift;
my ($ids,$type,$callback) = @_;
$callback || $self->throw('must provide a callback argument');
my $feature_group_id = undef;
my $count = 0;
if ($type eq 'feature'){
for my $feature_id (@$ids){
my $feature = $self->_basic_features_by_id($feature_id);
$callback->($self->_hash_to_array($feature)) if $callback;
$count++;
}
}
}
sub _basic_features_by_id{
my $self = shift;
my ($ids) = @_;
$ids = [$ids] unless ref $ids =~ /ARRAY/;
my @result;
for my $feature_id (@$ids){
push @result, ${$self->{data}}[$feature_id];
}
return wantarray() ? @result : $result[0];
}
# This method is similar to get_features(), except that it returns an
# iterator across the query.
# See Bio::DB::GFF::Adaptor::memory::iterator.
sub get_features_iterator {
my $self = shift;
my ($search,$options,$callback) = @_;
$callback || $self->throw('must provide a callback argument');
my $results = $self->_get_features_by_search_options($search,$options);
my $results_array = $self->_convert_feature_hash_to_array($results);
return Bio::DB::GFF::Adaptor::memory::iterator->new($results_array,$callback);
}
# This method is responsible for fetching the list of feature type names.
# The query may be limited to a particular range, in
# which case the range is indicated by a landmark sequence name and
# class and its subrange, if any. These arguments may be undef if it is
# desired to retrieve all feature types.
# If the count flag is false, the method returns a simple list of
# Bio::DB::GFF::Typename objects. If $count is true, the method returns
# a list of $name=>$count pairs, where $count indicates the number of
# times this feature occurs in the range.
sub get_types {
my $self = shift;
my ($srcseq,$class,$start,$stop,$want_count,$typelist) = @_;
my(%result,%obj);
for my $feature (@{$self->{data}}) {
my $feature_start = $feature->{start};
my $feature_stop = $feature->{stop};
my $feature_ref = $feature->{ref};
my $feature_class = $feature->{class};
my $feature_method = $feature->{method};
my $feature_source = $feature->{source};
if (defined $srcseq){
next unless lc($feature_ref) eq lc($srcseq);
}
if (defined $class){
next unless defined $feature_class && $feature_class eq $class ;
}
# the requested range should OVERLAP the retrieved features
if (defined $start or defined $stop) {
$start = 1 unless defined $start;
$stop = MAX_SEGMENT unless defined $stop;
next unless $feature_stop >= $start && $feature_start <= $stop;
}
if (defined $typelist && @$typelist){
next unless $self->_matching_typelist($feature_method,$feature_source,$typelist);
}
my $type = Bio::DB::GFF::Typename->new($feature_method,$feature_source);
$result{$type}++;
$obj{$type} = $type;
} #end features loop
return $want_count ? %result : values %obj;
}
sub classes {
my $self = shift;
my %classes;
for my $feature (@{$self->{data}}) {
$classes{$feature->{gclass}}++;
}
my @classes = sort keys %classes;
return @classes;
}
# Internal method that performs a search on the features array,
# sequentialy retrieves the features, and performs a check on each feature
# according to the search options.
sub _get_features_by_search_options{
my $count = 0;
my ($self, $search,$options) = @_;
my ($rangetype,$refseq,$class,$start,$stop,$types,$sparse,$order_by_group,$attributes) =
(@{$search}{qw(rangetype refseq refclass start stop types)},
@{$options}{qw(sparse sort_by_group ATTRIBUTES)}) ;
my @found_features;
my $data = $self->{data};
my $feature_id = -1 ;
my $feature_group_id = undef;
for my $feature (@{$data}) {
$feature_id++;
my $feature_start = $feature->{start};
my $feature_stop = $feature->{stop};
my $feature_ref = $feature->{ref};
if (defined $refseq){
next unless lc($feature_ref) eq lc($refseq);
}
if (defined $start or defined $stop) {
$start = 0 unless defined($start);
$stop = MAX_SEGMENT unless defined($stop);
if ($rangetype eq 'overlaps') {
next unless $feature_stop >= $start && $feature_start <= $stop;
} elsif ($rangetype eq 'contains') {
next unless $feature_start >= $start && $feature_stop <= $stop;
} elsif ($rangetype eq 'contained_in') {
next unless $feature_start <= $start && $feature_stop >= $stop;
} else {
next unless $feature_start == $start && $feature_stop == $stop;
}
}
my $feature_source = $feature->{source};
my $feature_method = $feature->{method};
if (defined $types && @$types){
next unless $self->_matching_typelist($feature_method,$feature_source,$types);
}
my $feature_attributes = $feature->{attributes};
if (defined $attributes){
next unless $self->_matching_attributes($feature_attributes,$attributes);
}
# if we get here, then we have a feature that meets the criteria.
# Then we just push onto an array
# of found features and continue.
my $found_feature = $feature ;
$found_feature->{feature_id} = $feature_id;
$found_feature->{group_id} = $feature_group_id;
push @found_features,$found_feature;
}
return \@found_features;
}
sub _hash_to_array {
my ($self,$feature_hash) = @_;
my @array = @{$feature_hash}{@hash2array_map};
return wantarray ? @array : \@array;
}
# this subroutine is needed for convertion of the feature from hash to array in order to
# pass it to the callback subroutine
sub _convert_feature_hash_to_array{
my ($self, $feature_hash_array) = @_;
my @features_array_array = map {scalar $self->_hash_to_array($_)} @$feature_hash_array;
return \@features_array_array;
}
sub _matching_typelist{
my ($self, $feature_method,$feature_source,$typelist) = @_;
foreach (@$typelist) {
my ($search_method,$search_source) = @$_;
next if lc($search_method) ne lc($feature_method);
next if defined($search_source) && lc($search_source) ne lc($feature_source);
return 1;
}
return 0;
}
sub _matching_attributes {
my ($self, $feature_attributes,$attributes) = @_ ;
foreach (keys %$attributes) {
return 0 if !_match_all_attr_in_feature($_,$attributes->{$_},$feature_attributes)
}
return 1;
}
sub _match_all_attr_in_feature{
my ($attr_name,$attr_value,$feature_attributes) = @_;
for my $attr (@$feature_attributes) {
my ($feature_attr_name,$feature_attr_value) = @$attr ;
next if ($attr_name ne $feature_attr_name || $attr_value ne $feature_attr_value);
return 1;
}
return 0;
}
sub do_initialize { 1; }
sub get_feature_by_group_id{ 1; }
1;
| 29.230876 | 123 | 0.632107 |
ed24cb18287610097ab18b2866765364ffc499d3 | 39,785 | pm | Perl | modules/Bio/EnsEMBL/Production/DBSQL/BulkFetcher.pm | luca-drf/ensembl-production | 8251026004d786f5a160584f3550227adc395cc1 | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Production/DBSQL/BulkFetcher.pm | luca-drf/ensembl-production | 8251026004d786f5a160584f3550227adc395cc1 | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Production/DBSQL/BulkFetcher.pm | luca-drf/ensembl-production | 8251026004d786f5a160584f3550227adc395cc1 | [
"Apache-2.0"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=head1 NAME
Bio::EnsEMBL::Production::DBSQL::BulkFetcher - library of functions for grabbing big chunks of database
=head1 SYNOPSIS
=head1 DESCRIPTION
Data-fetching methods for grabbing big chunks of Ensembl for dumping.
More time-efficient than the normal API. The output is never Ensembl objects.
=cut
package Bio::EnsEMBL::Production::DBSQL::BulkFetcher;
use strict;
use warnings;
use Bio::EnsEMBL::Utils::Argument qw(rearrange);
use Bio::EnsEMBL::Utils::Exception qw(throw);
use Log::Log4perl qw/get_logger/;
my $log = get_logger();
sub new {
my ($class, @args) = @_;
my $self = bless({}, ref($class) || $class);
($self->{biotypes}, $self->{level}, $self->{load_xrefs}) =
rearrange([ 'BIOTYPES', 'LEVEL', 'LOAD_XREFS' ], @args);
$self->{load_xrefs} ||= 0;
$self->{level} ||= 'gene';
return $self;
}
sub export_genes {
my ($self, $dba, $biotypes, $level, $load_xrefs) = @_;
$biotypes = $self->{biotypes} unless defined $biotypes;
$level = $self->{level} unless defined $level;
$load_xrefs = $self->{load_xrefs} unless defined $load_xrefs;
# query for all genes, hash by ID
$log->info("Exporting genes for " . $dba->species());
my $genes = $self->get_genes($dba, $biotypes, $level, $load_xrefs);
my @genes_l = values %$genes;
$log->info("Completed exporting " . scalar(@genes_l) . " genes for " . $dba->species());
return \@genes_l;
}
# Optional third argument lets you specify another table alias for the biotype match
sub _append_biotype_sql {
my ($self, $sql, $biotypes, $table) = @_;
$table ||= 'f';
if (defined $biotypes && scalar(@$biotypes) > 0) {
$sql .= " AND $table.biotype IN (" . join(',', map {"'$_'"} @$biotypes) . ')';
}
return $sql;
}
sub _append_analysis_sql {
my ($self, $dba, $sql, $table) = @_;
my $analysis_ids = $dba->dbc()->sql_helper()->execute_simple(
-SQL => q/select analysis_id from analysis_description where web_data rlike '"gene" *: *{"do_not_display" *: *"1"}'/
);
if (scalar @{$analysis_ids} > 0) {
$sql .= ' and ' . $table . '.analysis_id not in (' . join(',', @$analysis_ids) . ')';
}
return $sql;
}
sub get_genes {
my ($self, $dba, $biotypes, $level, $load_xrefs) = @_;
my $genes_hash = {};
{
my $sql = qq/
select ifnull(f.stable_id,f.gene_id) as id, f.version as version, x.display_label as name, f.description, f.biotype, f.source,
f.seq_region_start as start, f.seq_region_end as end, f.seq_region_strand as strand,
s.name as seq_region_name,
'gene' as ensembl_object_type,
a.logic_name as analysis,
ad.display_label as analysis_display,
b.so_term as so_term
from gene f
left join xref x on (f.display_xref_id = x.xref_id)
join seq_region s using (seq_region_id)
join coord_system c using (coord_system_id)
join analysis a using (analysis_id)
left join analysis_description ad using (analysis_id)
left join biotype b on b.name = f.biotype and b.object_type='gene'
where c.species_id = ?
/;
$sql = $self->_append_biotype_sql($sql, $biotypes);
$sql = $self->_append_analysis_sql($dba, $sql, 'f');
$log->debug("Retrieving genes");
$log->trace($sql);
my @genes =
@{$dba->dbc()->sql_helper()->execute(-SQL => $sql,
-PARAMS => [ $dba->species_id() ],
-USE_HASHREFS => 1,)};
$log->debug("Retrieved " . scalar(@genes) . " genes");
# turn into hash
$genes_hash = { map {$_->{id} => $_} @genes };
}
$log->debug("Found " . scalar(values %{$genes_hash}) . " gene IDs");
if (scalar(values %{$genes_hash}) > 0) {
$log->debug("Adding synonyms");
# query for all synonyms, hash by gene ID
my $synonyms = $self->get_synonyms($dba, $biotypes);
while (my ($gene_id, $synonym) = each %$synonyms) {
$genes_hash->{$gene_id}->{synonyms} = $synonym;
}
$log->debug("Adding seq_region synonyms");
# add seq_region synonyms
my $seq_region_synonyms = $self->get_seq_region_synonyms($dba, 'gene', $biotypes);
while (my ($gene_id, $synonym) = each %$seq_region_synonyms) {
$genes_hash->{$gene_id}->{seq_region_synonyms} = $synonym;
}
$log->debug("Adding haplotypes");
# add haplotypes
my $haplotypes = $self->get_haplotypes($dba, 'gene', $biotypes);
while (my ($gene_id, $synonym) = each %$haplotypes) {
$genes_hash->{$gene_id}->{is_haplotype} = 1;
}
$log->debug("Adding coord_systems");
# add coord_system info
my $coord_systems = $self->get_coord_systems($dba, 'gene', $biotypes);
while (my ($gene_id, $coord_system) = each %$coord_systems) {
$genes_hash->{$gene_id}->{coord_system} = $coord_system;
}
$log->debug("Adding stable IDs");
# add stable_ids
my $ids = $self->get_stable_ids($dba, 'gene');
while (my ($gene_id, $old_ids) = each %{$ids}) {
$genes_hash->{$gene_id}->{previous_ids} = $old_ids;
}
if ($load_xrefs == 1) {
$log->debug("Adding xrefs");
# query for all xrefs, hash by gene ID
my $xrefs = $self->get_xrefs($dba, 'gene', $biotypes);
while (my ($gene_id, $xref) = each %$xrefs) {
$genes_hash->{$gene_id}->{xrefs} = $xref;
}
}
if ($level eq 'transcript' ||
$level eq 'translation' ||
$level eq 'protein_feature') {
$log->debug("Adding transcripts");
# query for transcripts, hash by gene ID
my $transcripts = $self->get_transcripts($dba, $biotypes, $level, $load_xrefs);
$log->debug("Found transcripts for " . scalar(values %{$transcripts}) . " gene IDs");
while (my ($gene_id, $transcript) = each %$transcripts) {
my $g = $genes_hash->{$gene_id};
if (defined $g) {
$g->{transcripts} = $transcript;
}
}
}
}
return $genes_hash;
} ## end sub get_genes
sub get_transcripts {
my ($self, $dba, $biotypes, $level, $load_xrefs) = @_;
my $sql = q/
select ifnull(g.stable_id, g.gene_id) as gene_id,
ifnull(t.stable_id,t.transcript_id) as id,
t.version as version,
x.display_label as name,
t.description,
t.biotype,
t.seq_region_start as start,
t.seq_region_end as end,
t.seq_region_strand as strand,
s.name as seq_region_name,
'transcript' as ensembl_object_type,
a.logic_name as analysis,
ad.display_label as analysis_display,
b.so_term
FROM
gene g
join transcript t using (gene_id)
left join xref x on (t.display_xref_id = x.xref_id)
join seq_region s on (s.seq_region_id = g.seq_region_id)
join coord_system c using (coord_system_id)
join analysis a on (t.analysis_id=a.analysis_id)
left join analysis_description ad on (a.analysis_id=ad.analysis_id)
left join biotype b on b.name = t.biotype and b.object_type='transcript'
where c.species_id = ?
/;
$sql = $self->_append_biotype_sql($sql, $biotypes, 't');
$sql = $self->_append_analysis_sql($dba, $sql, 't');
my $transcripts = {};
$log->debug("Retrieving transcripts");
$log->trace($sql);
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => $sql,
-PARAMS => [ $dba->species_id() ],
-USE_HASHREFS => 1,
-CALLBACK => sub {
my ($row) = @_;
$transcripts->{$row->{id}} = $row;
$transcripts->{$row->{id}}{translations} = [];
return;
});
if ($load_xrefs == 1) {
$log->debug("Getting xrefs for transcripts");
my $xrefs = $self->get_xrefs($dba, 'transcript', $biotypes);
while (my ($id, $xref) = each %{$xrefs}) {
$transcripts->{$id}->{xrefs} = $xref;
}
}
if ($level eq 'translation' || $level eq 'protein_feature') {
$log->debug("Getting translations for transcripts");
my $t = $self->get_translations($dba, $biotypes, $level, $load_xrefs);
while (my ($id, $translation) = each %{$t}) {
$transcripts->{$id}{translations} = $translation;
}
}
{
$log->debug("Getting seq_region synonyms for transcripts");
my $srss = $self->get_seq_region_synonyms($dba, 'transcript', $biotypes);
while (my ($id, $srs) = each %{$srss}) {
$transcripts->{$id}->{seq_region_synonyms} = $srs;
}
}
{
$log->debug("Getting coord_systems for transcripts");
my $css = $self->get_coord_systems($dba, 'transcript', $biotypes);
while (my ($id, $cs) = each %{$css}) {
$transcripts->{$id}->{coord_system} = $cs;
}
}
{
my $exons_list = {};
my $current_transcript_id = '';
my $exon_sql = q/
SELECT
ifnull(t.stable_id, t.transcript_id) AS trans_id,
ifnull(e.stable_id, e.exon_id) AS id,
e.version AS version,
s.name as seq_region_name,
e.seq_region_start as start,
e.seq_region_end as end,
e.seq_region_strand as strand,
et.rank as rank,
'exon' as ensembl_object_type
FROM transcript t
JOIN exon_transcript et ON t.transcript_id = et.`transcript_id`
JOIN exon e ON et.exon_id = e.`exon_id`
JOIN seq_region s ON e.seq_region_id = s.seq_region_id
JOIN coord_system c ON c.coord_system_id = s.coord_system_id
WHERE c.species_id = ?
ORDER BY `id`
/;
$exon_sql = $self->_append_analysis_sql($dba, $exon_sql, 't');
$log->debug("Getting exons for transcripts");
$log->trace($sql);
$dba->dbc->sql_helper->execute_no_return(
-SQL => $exon_sql,
-PARAMS => [ $dba->species_id ],
-USE_HASHREFS => 1,
-CALLBACK => sub {
my ($row) = @_;
my $transcript = $transcripts->{$row->{trans_id}};
$row->{coord_system} = $transcript->{coord_system};
if($current_transcript_id ne $row->{trans_id}) {
if ( $current_transcript_id ne '') {
$self->set_cds($exons_list, $transcripts->{$current_transcript_id}, $current_transcript_id);
$exons_list = {};
}
push @{ $exons_list->{ $row->{trans_id} } } , { 'start' => $row->{start}, 'end' => $row->{end} } ;
$current_transcript_id = $row->{trans_id};
} else {
push @{ $exons_list->{ $row->{trans_id} } } , { 'start' => $row->{start}, 'end' => $row->{end} } ;
}
delete $row->{trans_id};
push @{$transcript->{exons}}, $row;
return;
});
$self->set_cds($exons_list, $transcripts->{$current_transcript_id}, $current_transcript_id);
}
{
$sql = q/
select ifnull(t.stable_id, t.transcript_id) as trans_id,
f.hit_name as id,
f.hit_start as start,
f.hit_end as end,
f.evalue as evalue,
d.db_name as db_name,
d.db_display_name as db_display,
a.logic_name as analysis,
ad.display_label as analysis_display
from coord_system c
join seq_region s using (coord_system_id)
join transcript t using (seq_region_id)
join transcript_supporting_feature sf using (transcript_id)
join dna_align_feature f on (f.dna_align_feature_id=sf.feature_id)
join external_db d using (external_db_id)
join analysis a on (a.analysis_id=f.analysis_id)
left join analysis_description ad on (a.analysis_id=ad.analysis_id)
where sf.feature_type='dna_align_feature'
and c.species_id=?
/;
$sql = $self->_append_analysis_sql($dba, $sql, 't');
$log->debug("Getting DNA supporting features for transcripts");
$log->trace($sql);
$dba->dbc->sql_helper->execute_no_return(
-SQL => $sql,
-PARAMS => [ $dba->species_id ],
-USE_HASHREFS => 1,
-CALLBACK => sub {
my ($row) = @_;
my $transcript = $transcripts->{$row->{trans_id}};
delete $row->{trans_id};
push @{$transcript->{supporting_features}}, $row;
return;
});
}
{
$sql = q/
select ifnull(t.stable_id,t.transcript_id) as trans_id,
f.hit_name as id,
f.hit_start as start,
f.hit_end as end,
f.evalue as evalue,
d.db_name as db_name,
d.db_display_name as db_display,
a.logic_name as analysis,
ad.display_label as analysis_display
from
coord_system c
join seq_region s using (coord_system_id)
join transcript t using (seq_region_id)
join transcript_supporting_feature sf using (transcript_id)
join protein_align_feature f on (f.protein_align_feature_id=sf.feature_id)
join external_db d using (external_db_id)
join analysis a on (a.analysis_id=f.analysis_id)
left join analysis_description ad on (a.analysis_id=ad.analysis_id)
where sf.feature_type='protein_align_feature'
and c.species_id=?
/;
$sql = $self->_append_analysis_sql($dba, $sql, 't');
$log->debug("Getting protein supporting features for transcripts");
$log->trace($sql);
$dba->dbc->sql_helper->execute_no_return(
-SQL => $sql,
-PARAMS => [ $dba->species_id ],
-USE_HASHREFS => 1,
-CALLBACK => sub {
my ($row) = @_;
my $transcript = $transcripts->{$row->{trans_id}};
delete $row->{trans_id};
push @{$transcript->{supporting_features}}, $row;
return;
});
}
{
my $stable_ids = $self->get_stable_ids($dba, 'transcript');
while (my ($id, $prev_ids) = each(%$stable_ids)) {
$transcripts->{$id}->{previous_ids} = $prev_ids;
}
}
my $transcript_hash = {};
for my $transcript (values %$transcripts) {
push @{$transcript_hash->{ $transcript->{gene_id} }}, $transcript;
delete $transcript_hash->{gene_id};
}
return $transcript_hash;
} ## end sub get_transcripts
sub set_cds {
my ($self , $exons_list, $transcript, $transcript_id ) = @_;
#"Getting CDS start and end cords for transcript"
my $arraylength = @{$exons_list->{$transcript_id}};
if( $arraylength > 1 ){
push @{$transcript->{cds}}, {'start'=> ${$exons_list->{$transcript_id}}[0]->{start}, 'end' => ${$exons_list->{$transcript_id}}[-1]->{end} };
} else{
push @{$transcript->{cds}}, {'start'=> ${$exons_list->{$transcript_id}}[0]->{start}, 'end' => ${$exons_list->{$transcript_id}}[0]->{end} };
}
}
sub get_translations {
my ($self, $dba, $biotypes, $level, $load_xrefs) = @_;
my $sql = q/
select ifnull(t.stable_id,t.transcript_id) as transcript_id,
ifnull(tl.stable_id,tl.translation_id) as id,
tl.version as version,
'translation' as ensembl_object_type
from transcript t
join translation tl using (transcript_id)
join seq_region s using (seq_region_id)
join coord_system c using (coord_system_id)
where c.species_id = ?
/;
$sql = $self->_append_biotype_sql($sql, $biotypes, 't');
$sql = $self->_append_analysis_sql($dba, $sql, 't');
my $xrefs = {};
if ($load_xrefs == 1) {
$log->debug("Retrieving xrefs for translations");
$xrefs = $self->get_xrefs($dba, 'translation', $biotypes);
}
# add protein features
my $protein_features = {};
if ($level eq 'protein_feature') {
$log->debug("Retrieving protein_features for translations");
$protein_features = $self->get_protein_features($dba, $biotypes);
}
my $stable_ids = $self->get_stable_ids($dba, 'translation');
$log->debug("Retrieving translations");
my @translations = @{
$dba->dbc()->sql_helper()->execute(
-SQL => $sql,
-PARAMS => [ $dba->species_id() ],
-USE_HASHREFS => 1,
-CALLBACK => sub {
my ($row) = @_;
$row->{xrefs} = ( $xrefs->{ $row->{id} } ) ? $xrefs->{ $row->{id} } : [];
$row->{protein_features} = $protein_features->{ $row->{id} };
my $ids = $stable_ids->{$row->{id}};
$row->{previous_ids} = $ids if defined $ids && scalar(@$ids) > 0;
return $row;
})};
my $translation_hash = {};
for my $translation (@translations) {
push @{$translation_hash->{ $translation->{transcript_id} }}, $translation;
delete $translation_hash->{transcript_id};
}
return $translation_hash;
} ## end sub get_translations
sub get_protein_features {
my ($self, $dba, $biotypes) = @_;
my $sql = q/
select
ifnull(tl.stable_id, tl.translation_id) as translation_id,
pf.hit_name as name,
pf.hit_description as description,
pf.seq_start as start,
pf.seq_end as end,
a.db as dbname,
i.interpro_ac,
ix.display_label as interpro_name,
ix.description as interpro_description,
'protein_feature' as ensembl_object_type
from transcript t
join translation tl using (transcript_id)
join protein_feature pf using (translation_id)
join analysis a on (a.analysis_id = pf.analysis_id)
left join interpro i on (pf.hit_name = i.id)
left join xref ix on (i.interpro_ac = ix.dbprimary_acc)
left join external_db idx on (ix.external_db_id=idx.external_db_id and idx.db_name='Interpro')
join seq_region s using (seq_region_id)
join coord_system c using (coord_system_id)
where c.species_id = ?
/;
$self->_append_biotype_sql($sql, $biotypes, 't');
$log->debug("Retrieving protein_features");
$log->trace($sql);
my @protein_features = @{
$dba->dbc()->sql_helper()->execute(
-SQL => $sql,
-PARAMS => [ $dba->species_id() ],
-USE_HASHREFS => 1)};
my $pf_hash = {};
for my $protein_feature (@protein_features) {
delete $protein_feature->{description} unless defined $protein_feature->{description};
delete $protein_feature->{interpro_ac} unless defined $protein_feature->{interpro_ac};
delete $protein_feature->{interpro_name} unless defined $protein_feature->{interpro_name} && $protein_feature->{interpro_name} ne $protein_feature->{interpro_ac};
delete $protein_feature->{interpro_description} unless defined $protein_feature->{interpro_description};
push @{$pf_hash->{ $protein_feature->{translation_id} }}, $protein_feature;
delete $pf_hash->{translation_id};
}
return $pf_hash;
} ## end sub get_protein_features
sub _generate_xref_sql {
my ($self, $table_name) = @_;
if ($table_name eq 'translation') {
return qq/
SELECT ifnull(tl.stable_id, tl.translation_id) AS id, x.xref_id, x.dbprimary_acc, x.display_label, e.db_name, e.db_display_name, x.description, x.info_type, x.info_text
FROM transcript t
JOIN translation tl USING (transcript_id)
JOIN object_xref ox ON (tl.translation_id = ox.ensembl_id AND ox.ensembl_object_type = 'Translation')
JOIN xref x USING (xref_id)
JOIN external_db e USING (external_db_id)
JOIN seq_region s USING (seq_region_id)
JOIN coord_system c USING (coord_system_id)
LEFT JOIN ontology_xref oox USING (object_xref_id)
WHERE c.species_id = ? AND oox.object_xref_id is null
/;
}
else {
my $Table_name = ucfirst($table_name);
return qq/SELECT ifnull(f.stable_id, f.${table_name}_id) AS id, x.xref_id, x.dbprimary_acc, x.display_label, e.db_name, e.db_display_name, x.description, x.info_type, x.info_text
FROM ${table_name} f
JOIN object_xref ox ON (f.${table_name}_id = ox.ensembl_id AND ox.ensembl_object_type = '${Table_name}')
JOIN xref x USING (xref_id)
JOIN external_db e USING (external_db_id)
JOIN seq_region s USING (seq_region_id)
JOIN coord_system c USING (coord_system_id)
LEFT JOIN ontology_xref oox USING (object_xref_id)
WHERE c.species_id = ? AND oox.object_xref_id is null/;
}
} ## end sub _generate_xref_sql
sub _generate_object_xref_sql {
my ($self, $table_name) = @_;
if ($table_name eq 'translation') {
return qq/SELECT ox.object_xref_id, ifnull(tl.stable_id, tl.translation_id) AS id, x.dbprimary_acc, x.display_label, e.db_name, e.db_display_name, x.description,
oox.linkage_type, sx.dbprimary_acc, sx.display_label, sx.description, se.db_name, se.db_display_name
FROM transcript t
JOIN translation tl USING (transcript_id)
JOIN object_xref ox ON (tl.translation_id = ox.ensembl_id AND ox.ensembl_object_type = 'Translation')
JOIN xref x USING (xref_id)
JOIN external_db e USING (external_db_id)
JOIN seq_region s USING (seq_region_id)
JOIN coord_system c USING (coord_system_id)
JOIN ontology_xref oox USING (object_xref_id)
LEFT JOIN xref sx ON (oox.source_xref_id = sx.xref_id)
LEFT JOIN external_db se ON (se.external_db_id = sx.external_db_id)
WHERE c.species_id = ?
/;
}
else {
my $Table_name = ucfirst($table_name);
return qq/SELECT ox.object_xref_id, ifnull(f.stable_id, f.${table_name}_id) AS id, x.dbprimary_acc, x.display_label, e.db_name, e.db_display_name, x.description,
oox.linkage_type, sx.dbprimary_acc, sx.display_label, sx.description, se.db_name, se.db_display_name
FROM ${table_name} f
JOIN object_xref ox ON (f.${table_name}_id = ox.ensembl_id AND ox.ensembl_object_type = '${Table_name}')
JOIN xref x USING (xref_id)
JOIN external_db e USING (external_db_id)
JOIN seq_region s USING (seq_region_id)
JOIN coord_system c USING (coord_system_id)
JOIN ontology_xref oox USING (object_xref_id)
LEFT JOIN xref sx ON (oox.source_xref_id = sx.xref_id)
LEFT JOIN external_db se ON (se.external_db_id = sx.external_db_id)
WHERE c.species_id = ?/;
}
} ## end sub _generate_object_xref_sql
sub _generate_associated_xref_sql {
my ($self, $table_name) = @_;
if ($table_name eq 'translation') {
return qq/
SELECT ax.object_xref_id, ax.rank, ax.condition_type, x.dbprimary_acc, x.display_label, xe.db_name, xe.db_display_name, x.description,
sx.dbprimary_acc, sx.display_label, se.db_name, sx.description, ax.associated_group_id
FROM transcript t
JOIN translation tl USING (transcript_id)
JOIN object_xref ox ON (tl.translation_id = ox.ensembl_id AND ox.ensembl_object_type = 'Translation')
JOIN associated_xref ax USING (object_xref_id)
JOIN xref x ON (x.xref_id = ax.xref_id)
JOIN external_db xe ON (x.external_db_id = xe.external_db_id)
JOIN xref sx ON (sx.xref_id = ax.source_xref_id)
JOIN external_db se ON (se.external_db_id = sx.external_db_id)
JOIN seq_region s USING (seq_region_id)
JOIN coord_system c USING (coord_system_id)
WHERE c.species_id=?
/;
}
else {
my $Table_name = ucfirst($table_name);
return qq/
SELECT ax.object_xref_id, ax.rank, ax.condition_type, x.dbprimary_acc, x.display_label, xe.db_name, xe.db_display_name, x.description,
sx.dbprimary_acc, sx.display_label, se.db_name, sx.description, ax.associated_group_id
FROM ${table_name} f
JOIN object_xref ox ON (f.${table_name}_id = ox.ensembl_id AND ox.ensembl_object_type = '${Table_name}')
JOIN associated_xref ax USING (object_xref_id)
JOIN xref x ON (x.xref_id = ax.xref_id)
JOIN external_db xe ON (x.external_db_id = xe.external_db_id)
JOIN xref sx ON (sx.xref_id = ax.source_xref_id)
JOIN external_db se ON (se.external_db_id = sx.external_db_id)
JOIN seq_region s USING (seq_region_id)
JOIN coord_system c USING (coord_system_id)
WHERE c.species_id = ?/;
}
} ## end sub _generate_xref_sql
sub get_xrefs {
my ($self, $dba, $type, $biotypes) = @_;
my $synonyms = {};
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => q/select xref_id,synonym from external_synonym/,
-CALLBACK => sub {
my ($id, $syn) = @{$_[0]};
push @{$synonyms->{$id}}, $syn;
return;
});
my $sql = $self->_generate_xref_sql($type);
$sql = $self->_append_biotype_sql($sql, $biotypes, $type);
my $oox_sql = $self->_generate_object_xref_sql($type);
$oox_sql = $self->_append_biotype_sql($oox_sql, $biotypes, $type);
my $ax_sql = $self->_generate_associated_xref_sql($type);
$ax_sql = $self->_append_biotype_sql($ax_sql, $biotypes, $type);
my $xrefs = {};
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => $sql,
-PARAMS => [ $dba->species_id() ],
-CALLBACK => sub {
my ($row) = @_;
my ($stable_id, $xref_id, $dbprimary_acc,
$display_label, $db_name, $db_display_name,
$description, $info_type, $info_text) = @$row;
my $x = { primary_id => $dbprimary_acc,
display_id => $display_label,
dbname => $db_name,
db_display => $db_display_name,
description => $description,
info_type => $info_type,
info_text => $info_text };
my $syn = $synonyms->{$xref_id};
$x->{synonyms} = $syn if defined $syn;
push @{$xrefs->{$stable_id}}, $x;
return;
}
);
# now handle oox
my $oox_xrefs = {};
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => $oox_sql,
-PARAMS => [ $dba->species_id() ],
-CALLBACK => sub {
my ($row) = @_;
my ($ox_id, $stable_id,
$dbprimary_acc, $display_label,
$db_name, $description,
$linkage_type, $other_dbprimary_acc,
$other_display_label, $other_description,
$other_dbname, $other_db_display_name) = @$row;
my $xref = $oox_xrefs->{ $ox_id };
if (!defined $xref) {
$xref = { obj_id => $stable_id,
primary_id => $dbprimary_acc,
display_id => $display_label,
dbname => $db_name,
description => $description,
};
$oox_xrefs->{ $ox_id } = $xref;
}
# add linkage type to $xref
push @{$xref->{linkage_types}}, {
evidence => $linkage_type,
source => {
primary_id => $other_dbprimary_acc,
display_id => $other_display_label,
dbname => $other_dbname,
db_display_name => $other_db_display_name,
description => $other_description,
}
};
return;
}
);
# add associated_xrefs to $oox_xrefs
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => $ax_sql,
-PARAMS => [ $dba->species_id() ],
-CALLBACK => sub {
my ($row) = @_;
my ($associated_ox_id, $associated_rank,
$associated_condition, $dbprimary_acc,
$display_label, $db_name,
$db_display_name,
$description, $other_dbprimary_acc,
$other_display_label, $other_db_name,
$other_description, $associated_group_id) = @$row;
my $xref = $oox_xrefs->{ $associated_ox_id };
# add linkage type to $xref
if (defined $associated_group_id && defined $associated_condition) {
$xref->{associated_xrefs}->{ $associated_group_id }->{ $associated_condition } = {
rank => $associated_rank,
primary_id => $dbprimary_acc,
display_id => $display_label,
db_display_name => $db_display_name,
dbname => $db_name,
description => $description,
source => {
primary_id => $other_dbprimary_acc,
display_id => $other_display_label,
dbname => $other_db_name,
description => $other_description,
} };
}
return;
}
);
# collate everything, remove some uninteresting keys,
for my $xref (values %{$oox_xrefs}) {
$xref->{associated_xrefs} = [ values %{$xref->{associated_xrefs}} ];
push @{$xrefs->{ $xref->{obj_id} }}, $xref;
delete $xref->{obj_id};
}
return $xrefs;
} ## end sub get_xrefs
sub get_coord_systems {
my ($self, $dba, $type, $biotypes) = @_;
my $sql = qq/
select ifnull(g.stable_id,g.${type}_id) as id, c.name, c.version
from $type g
join seq_region s using (seq_region_id)
join coord_system c using (coord_system_id)
where c.species_id = ?
/;
$sql = $self->_append_biotype_sql($sql, $biotypes);
$sql = $self->_append_analysis_sql($dba, $sql, 'g');
my $coord_systems = {};
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => $sql,
-PARAMS => [ $dba->species_id() ],
-CALLBACK => sub {
my ($row) = @_;
$coord_systems->{ $row->[0] } = { name => $row->[1], version => $row->[2] };
return;
});
return $coord_systems;
}
sub get_synonyms {
my ($self, $dba, $biotypes) = @_;
my $sql = q/
select ifnull(g.stable_id,g.gene_id) as id, e.synonym
from gene g
join external_synonym e on (g.display_xref_id = e.xref_id)
join seq_region s using (seq_region_id)
join coord_system c using (coord_system_id)
where c.species_id = ?
/;
$sql = $self->_append_biotype_sql($sql, $biotypes);
my $synonyms = {};
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => $sql,
-PARAMS => [ $dba->species_id() ],
-CALLBACK => sub {
my ($row) = @_;
push @{$synonyms->{ $row->[0] }}, $row->[1];
return;
});
return $synonyms;
}
sub get_seq_region_synonyms {
my ($self, $dba, $type, $biotypes) = @_;
my $sql = qq/
select ifnull(g.stable_id,${type}_id) as id, sr.synonym as synonym, e.db_name as db
from $type g
join seq_region_synonym sr using (seq_region_id)
join seq_region s using (seq_region_id)
join coord_system c using (coord_system_id)
left join external_db e using (external_db_id)
where c.species_id = ?
/;
$sql = $self->_append_biotype_sql($sql, $biotypes, 'g');
$sql = $self->_append_analysis_sql($dba, $sql, 'g');
my $synonyms = {};
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => $sql,
-PARAMS => [ $dba->species_id() ],
-CALLBACK => sub {
my ($row) = @_;
push @{$synonyms->{ $row->[0] }}, { id => $row->[1], db => $row->[2] };
return;
});
return $synonyms;
}
sub get_haplotypes {
my ($self, $dba, $type, $biotypes) = @_;
my $sql = qq/
select ifnull(g.stable_id,${type}_id) as id
from $type g
join assembly_exception ae using (seq_region_id)
join seq_region s using (seq_region_id)
join coord_system c using (coord_system_id)
where c.species_id = ? and ae.exc_type='HAP'
/;
$sql = $self->_append_biotype_sql($sql, $biotypes);
$sql = $self->_append_analysis_sql($dba, $sql, 'g');
my $haplotypes = {};
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => $sql,
-PARAMS => [ $dba->species_id() ],
-CALLBACK => sub {
my ($row) = @_;
$haplotypes->{ $row->[0] } = 1;
return;
});
return $haplotypes;
}
my $base_id_sql = q/
SELECT f.stable_id as id, sie.old_stable_id as old_id
FROM stable_id_event as sie
JOIN %s f on (f.stable_id=sie.new_stable_id)
%sJOIN seq_region s USING (seq_region_id)
JOIN coord_system c USING (coord_system_id)
WHERE sie.type=?
AND old_stable_id != new_stable_id
AND c.species_id=?
/;
my $stable_id_sql = {
gene => sprintf($base_id_sql, 'gene', ''),
transcript => sprintf($base_id_sql, 'transcript', ''),
translation => sprintf($base_id_sql, 'translation', 'JOIN transcript USING (transcript_id) ')
};
sub get_stable_ids {
my ($self, $dba, $type) = @_;
my $stable_ids = {};
$dba->dbc()->sql_helper()->execute_no_return(
-SQL => $stable_id_sql->{$type},
-PARAMS => [ $type, $dba->species_id() ],
-CALLBACK => sub {
my ($row) = @_;
push @{$stable_ids->{ $row->[0] }}, $row->[1];
return;
});
return $stable_ids;
}
sub add_compara {
my ($self, $species, $genes, $compara_dba) = @_;
warn "Adding compara...\n";
$self->add_homologues($species, $genes, $compara_dba);
$self->add_family($species, $genes, $compara_dba);
warn "Finished adding compara...\n";
return;
}
sub add_pan_compara {
my ($self, $species, $genes, $compara_dba) = @_;
warn "Adding pan taxonomic compara...\n";
$self->add_homologues($species, $genes, $compara_dba);
warn "Finished adding pan taxonomic compara...\n";
return;
}
sub add_homologues {
my ($self, $species, $genes, $compara_dba) = @_;
my $homologues = {};
$compara_dba->dbc()->sql_helper()->execute_no_return(
-SQL => q/
SELECT gm1.stable_id, gm2.stable_id, g2.name, h.description, r.stable_id
FROM homology_member hm1
INNER JOIN homology_member hm2 ON (hm1.homology_id = hm2.homology_id)
INNER JOIN homology h ON (hm1.homology_id = h.homology_id)
INNER JOIN gene_member gm1 ON (hm1.gene_member_id = gm1.gene_member_id)
INNER JOIN gene_member gm2 ON (hm2.gene_member_id = gm2.gene_member_id)
INNER JOIN genome_db g ON (gm1.genome_db_id = g.genome_db_id)
INNER JOIN genome_db g2 ON (gm2.genome_db_id = g2.genome_db_id)
INNER JOIN gene_tree_root r ON (h.gene_tree_root_id=r.root_id)
WHERE (hm1.gene_member_id <> hm2.gene_member_id)
AND (gm1.stable_id <> gm2.stable_id)
AND (g.name = ?)
AND (gm1.source_name = 'ENSEMBLGENE')/,
-CALLBACK => sub {
my ($row) = @_;
push @{$homologues->{ $row->[0] }}, {
stable_id => $row->[1],
genome => $row->[2],
orthology_type => $row->[3],
gene_tree_id => $row->[4] };
return;
},
-PARAMS => [ $species ]);
my $n = 0;
for my $gene (@{$genes}) {
if (!defined $gene->{id}) {
throw("No stable ID for gene");
}
my $homo = $homologues->{ $gene->{id} };
if (defined $homo) {
$n++;
$gene->{homologues} ||= [];
push @{$gene->{homologues}}, @$homo;
}
}
print "Added homologues to $n genes\n";
return;
} ## end sub add_homologues
sub add_family {
my ($self, $species, $genes, $compara_dba) = @_;
my $families = {};
# hash all families for this genome by sequence stable_id
$compara_dba->dbc()->sql_helper()->execute_no_return(
-SQL => q/
SELECT s.stable_id, f.stable_id, f.version, f.description
FROM family f
JOIN family_member fm USING (family_id)
JOIN seq_member s USING (seq_member_id)
JOIN genome_db g USING (genome_db_id)
WHERE g.name = ?/,
-CALLBACK => sub {
my ($row) = @_;
my $f = { stable_id => $row->[1] };
$f->{version} = $row->[2] if (defined $row->[2]);
$f->{description} = $row->[3] if (defined $row->[3]);
push @{$families->{ $row->[0] }}, $f;
return;
},
-PARAMS => [ $species ]);
my $n = 0;
# add families for each member
for my $gene (@{$genes}) {
for my $transcript (@{$gene->{transcripts}}) {
my $family = $families->{ $transcript->{id} };
if (defined $family) {
$n++;
$transcript->{families} = $family;
}
for my $translation (@{$transcript->{translations}}) {
$family = $families->{ $translation->{id} };
if (defined $family) {
$n++;
$translation->{families} = $family;
}
}
}
}
print "Added families to $n objects\n";
return;
} ## end sub add_family
my $probe_set_sql = q/select distinct
probe_set_transcript.stable_id AS transcript_stable_id,
array.name AS array_name,
probe_set.name AS display_label,
CONCAT(array.vendor, '_', REPLACE(REPLACE(array.name, '-', '_'), '.', '_'))
AS array_vendor_and_name
from array
join array_chip using (array_id)
join probe using (array_chip_id)
join probe_set using (probe_set_id)
join probe_set_transcript using (probe_set_id)
where
array.is_probeset_array=1/;
my $probe_sql = q/select distinct
probe_transcript.stable_id AS transcript_stable_id,
array.name AS array_name,
probe.name AS display_label,
CONCAT(array.vendor, '_', REPLACE(REPLACE(array.name, '-', '_'), '.', '_'))
AS array_vendor_and_name
from array
join array_chip using (array_id)
join probe using (array_chip_id)
join probe_transcript using (probe_id)
where
array.is_probeset_array=0/;
sub add_funcgen {
my ($self, $genes, $funcgen_dba) = @_;
my $probes = {};
for my $sql ($probe_set_sql) {
$funcgen_dba->dbc()->sql_helper()->execute_no_return(
-SQL => $sql,
-CALLBACK => sub {
my ($transcript_id, $array, $probe, $vendor) = @{shift @_};
push @{$probes->{$transcript_id}},
{ array => $array, probe => $probe, vendor => $vendor };
return;
});
}
for my $gene (@{$genes}) {
for my $transcript (@{$gene->{transcripts}}) {
my $probes_for_transcript = $probes->{ $transcript->{id} };
$transcript->{probes} = $probes_for_transcript
if defined $probes_for_transcript;
}
}
return;
}
1;
| 37.639546 | 186 | 0.580922 |
ed19bcc9674578eba70f7581a3f308cac337cb86 | 442 | pl | Perl | benchmarks/spec2k6bin/specint/perl_depends/lib/unicore/lib/gc_sc/Hebr.pl | YangZhou1997/DynamicCache_v2 | 60bc1e01e0eaf88f6c8e959cb6316e20ac910ed2 | [
"BSD-3-Clause"
]
| 430 | 2015-01-05T19:21:10.000Z | 2022-03-29T07:19:18.000Z | benchmarks/spec2k6bin/specint/perl_depends/lib/unicore/lib/gc_sc/Hebr.pl | YangZhou1997/DynamicCache_v2 | 60bc1e01e0eaf88f6c8e959cb6316e20ac910ed2 | [
"BSD-3-Clause"
]
| 9 | 2015-01-20T17:42:30.000Z | 2022-03-04T22:05:43.000Z | benchmarks/spec2k6bin/specint/perl_depends/lib/unicore/lib/gc_sc/Hebr.pl | YangZhou1997/DynamicCache_v2 | 60bc1e01e0eaf88f6c8e959cb6316e20ac910ed2 | [
"BSD-3-Clause"
]
| 41 | 2015-05-10T17:08:50.000Z | 2022-01-19T01:15:19.000Z | # !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is built by mktables from e.g. UnicodeData.txt.
# Any changes made here will be lost!
#
# This file supports:
# \p{Hebrew} (and fuzzy permutations)
#
# Meaning: Script 'Hebrew'
#
return <<'END';
0591 05A1 Hebrew
05A3 05B9 Hebrew
05BB 05C4 Hebrew
05D0 05EA Hebrew
05F0 05F4 Hebrew
FB1D FB36 Hebrew
FB38 FB3C Hebrew
FB3E Hebrew
FB40 FB41 Hebrew
FB43 FB44 Hebrew
FB46 FB4F Hebrew
END
| 18.416667 | 59 | 0.710407 |
ed6a9bbb5d416a3fbbc8a7774cb09e103a58b1b9 | 4,604 | pm | Perl | admin/modules/EnsEMBL/Web/Document/HTML/AdminIndex.pm | sarahhunt/public-plugins | e5998b5b1791befdd67cf4e8f210c23cec7527d1 | [
"Apache-2.0"
]
| null | null | null | admin/modules/EnsEMBL/Web/Document/HTML/AdminIndex.pm | sarahhunt/public-plugins | e5998b5b1791befdd67cf4e8f210c23cec7527d1 | [
"Apache-2.0"
]
| null | null | null | admin/modules/EnsEMBL/Web/Document/HTML/AdminIndex.pm | sarahhunt/public-plugins | e5998b5b1791befdd67cf4e8f210c23cec7527d1 | [
"Apache-2.0"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::Document::HTML::AdminIndex;
use strict;
use parent qw(EnsEMBL::Web::Document::HTML);
sub new {
my $self = shift->SUPER::new;
$self->{'hub'} = shift;
return $self;
}
sub render {
my $self = shift;
my $hub = $self->{'hub'};
my $user = $hub->user;
return '<div class="plain-box"><p>Note that for access to the database frontends, you will need
to <a href="/Account/Login" class="modal_link">log in</a> (using the same account as www.ensembl.org) and be
a member of the appropriate user group.</p></div>' unless $user;
return '<div class="plain-box"><p>Your user account seems to have limited rights that excludes
access to the database frontends and healthcheck interface. If you need access to these pages,
please ask you team leader.</p></div>' unless $user->is_member_of($hub->species_defs->ENSEMBL_WEBADMIN_ID);
return q(
<div class="admin-left-box"><div class="plain-box">
<h1>Declarations of Intentions</h1>
<ul>
<li><a href="/Changelog/Summary">View all declarations</a></li>
<li><a href="/Changelog/Add">Add a declaration</a></li>
<li><a href="/Changelog/ListReleases?pull=1">Copy a declaration from a previous release</a></li>
<li><a href="/Changelog/Select/Edit">Update a declaration</a></li>
<li><a href="/Changelog/List">Declarations - quick lookup table</a></li>
</ul>
<h1>Ensembl Production Database</h1>
<ul>
<li><a href="/Production/AnalysisWebData">Analysis WebData</a></li>
<li><a href="/AnalysisDesc/List">Analysis Description</a></li>
<li><a href="/Species/List">Species</a></li>
<li><a href="/SpeciesAlias/List">Species alias</a></li>
<li><a href="/Metakey/List">Meta keys</a></li>
<li><a href="/Biotype/Display">Biotypes</a></li>
<li><a href="/Webdata/Display">Web Data</a></li>
<li><a href="/AttribType/Display">AttribType</a></li>
<li><a href="/Attrib/Display">Attrib</a></li>
<li><a href="/AttribSet/Display">AttribSet</a></li>
<li><a href="/ExternalDb/Display">ExternalDb</a></li>
</ul>
<h1>Help Database</h1>
<ul>
<li><a href="/HelpRecord/List/View">Page View</a></li>
<li><a href="/HelpRecord/List/FAQ">FAQ</a></li>
<li><a href="/HelpRecord/List/Glossary">Glossary</a></li>
<li><a href="/HelpRecord/List/Movie">Movies</a></li>
</ul>
<h1>Documents</h1>
<ul>
<li><a href="/Documents/View/Testcases">Healthcheck Testcases</a></li>
</ul>
<h1>User Directory</h1>
<ul>
<li><a href="/UserDirectory">User Directory</a></li>
</ul>
<h1>Performance</h1>
<ul>
<li><a href="/tools_graphs/index.html">Tools LSF queues load</a>
<ul>
<li><a href="/tools_graphs/index.html?type=Blast">Blast</a></li>
<li><a href="/tools_graphs/index.html?type=Blat">Blat</a></li>
<li><a href="/tools_graphs/index.html?type=VEP">VEP</a></li>
<li><a href="/tools_graphs/index.html?type=FileChameleon">FileChameleon</a></li>
<li><a href="/tools_graphs/index.html?type=AlleleFrequency">AlleleFrequency</a></li>
<li><a href="/tools_graphs/index.html?type=VcftoPed">VcftoPed</a></li>
<li><a href="/tools_graphs/index.html?type=DataSlicer">DataSlicer</a></li>
<li><a href="/tools_graphs/index.html?type=VariationPattern">VariationPattern</a></li>
</ul>
</li>
<li><a href="/perf">Nightly local full page-load times</a></li>
<li><a href="/arewestable">Live server error monitoring (BETA)</a></li>
</ul>
<h1>Disk usage</h1>
<ul>
<li><a href="/disk_usage/compara.html">Compara</a></li>
<li><a href="/disk_usage/genebuild.html">Genebuild</a></li>
<li><a href="/disk_usage/production.html">Production</a></li>
<li><a href="/disk_usage/regulation.html">Regulation</a></li>
<li><a href="/disk_usage/variation.html">Variation</a></li>
<li><a href="/disk_usage/web.html">Web</a></li>
</ul>
</div></div>);
}
1;
| 40.034783 | 111 | 0.666594 |
ed61d79b8c103c5fb5c0ac2b85d1dbf77063f6fc | 9,253 | t | Perl | S09-typed-arrays/native-shape1-int.t | rafaelschipiura/roast | dfc4cf7e529687609e296add3ee76e8107467136 | [
"Artistic-2.0"
]
| 99 | 2015-03-03T13:01:44.000Z | 2020-03-05T15:21:43.000Z | S09-typed-arrays/native-shape1-int.t | rafaelschipiura/roast | dfc4cf7e529687609e296add3ee76e8107467136 | [
"Artistic-2.0"
]
| 331 | 2015-02-17T15:26:22.000Z | 2020-03-16T18:29:49.000Z | S09-typed-arrays/native-shape1-int.t | rafaelschipiura/roast | dfc4cf7e529687609e296add3ee76e8107467136 | [
"Artistic-2.0"
]
| 136 | 2015-02-02T13:34:10.000Z | 2020-02-18T02:26:59.000Z | use v6;
use Test;
my @int = int, int8, int16, int32;
my @uint = uint,uint8,uint16,uint32;
if $*KERNEL.bits == 64 {
@int.push: int64;
@uint.push: uint64;
}
plan (@int + @uint) * 111;
# Basic native int array tests.
for flat @int,@uint -> $T {
my $t = $T.^name;
diag "Testing $t array";
ok array[$T].new(:shape(5)) ~~ Positional, "$t array is Positional";
#?rakudo todo 'apparently it is not a typed Positional'
ok array[$T].new(:shape(5)) ~~ Positional[$T], "$t array is Positional[$t]";
ok array[$T].new(:shape(5)).of === $T, "$t array .of is $t";
my @a := EVAL "my $t @[5]";
ok @a ~~ Positional, "$t array is Positional";
#?rakudo todo 'apparently it is not a typed Positional'
ok @a ~~ Positional[$T], "$t array is Positional[$t]";
ok @a.of === $T, "$t array .of is $t";
ok @a.new ~~ Positional, ".new from $t array is Positional";
#?rakudo todo 'apparently it is not a typed Positional'
ok @a.new ~~ Positional[$T], ".new from $t array Positional[$t]";
ok @a.new.of === $T, ".new from $t array .of is $t";
my @arr := array[$T].new(:shape(5));
is @arr.elems, 5, "New $t array has 5 elems";
is @arr.end, 4, "New $t array has end of -1";
is @arr.Int, 5, "New $t array Int-ifies to 5";
is +@arr, 5, "New $t array numifies to 5";
nok @arr.is-lazy , "Empty $t array is not lazy";
dies-ok { @arr[5] }, "Accessing non-existing on $t array dies";
is @arr.elems, 5, "Elems do not grow just from an access on $t array";
is (@arr[0] = 42), 42, "Can store integer in an $t array with Int index";
is @arr[0], 42, "Can get value from $t array with Int index";
my int $i;
is (@arr[$i] = 66), 66, 'can store integer in an $t array with int index';
is @arr[$i], 66, "Can get value from $t array with int index";
is (@arr[1, 2] = 69, 70), (69,70), "Can slice-assign to an $t array";
is @arr[1], 69, "Can get slice-assigned value from $t array (1)";
is @arr[2], 70, "Can get slice-assigned value from $t array (2)";
ok @arr[$i]:exists, ":exists works on $t array with int index";
ok @arr[4]:exists, ":exists works on $t array with Int index";
nok @arr[5]:exists, ":exists works on $t array when out of range";
nok @arr[$i]:!exists, ":!exists works on $t array with int index";
nok @arr[4]:!exists, ":!exists works on $t array with Int index";
ok @arr[5]:!exists, ":!exists works on $t array when out of range";
dies-ok { @arr[$i]:delete }, ":delete dies on $t array with int index";
dies-ok { @arr[0]:delete }, ":delete dies on $t array with Int index";
is @arr[$i]:!delete, 66, ":!delete works on $t array with int index";
is @arr[0]:!delete, 66, ":!delete works on $t array with Int index";
is (@arr := array[$T].new(:shape(1), 42)), 42,
"Can call $t array constructor with a single value";
is @arr.elems, 1, "Correct number of elems set in constructor of $t array";
is @arr[0], 42, "Correct element value set by constructor of $t array";
is (@arr := array[$T].new(:shape(4),10, 15, 12,16)), (10,15,12,16),
"Can call $t array constructor with values";
is @arr.elems, 4, "Correct number of elems set in constructor of $t array";
is @arr[0], 10, "Correct elem value set by constructor of $t array (1)";
is @arr[1], 15, "Correct elem value set by constructor of $t array (2)";
is @arr[2], 12, "Correct elem value set by constructor of $t array (3)";
is @arr[3], 16, "Correct elem value set by constructor of $t array (4)";
is @arr[*-1,*-2], (16,12), "Can also get last 2 elements on $t array";
ok @arr.flat ~~ Seq, "$t array .flat returns a Seq";
ok @arr.eager === @arr, "$t array .eager returns identity";
$_++ for @arr;
is @arr[0], 11, "Mutating for loop on $t array works (1)";
is @arr[1], 16, "Mutating for loop on $t array works (2)";
is @arr[2], 13, "Mutating for loop on $t array works (3)";
is @arr[3], 17, "Mutating for loop on $t array works (4)";
is (@arr.map(* *= 2)), (22,32,26,34), "Can map over $t array";
is @arr[0], 22, "Mutating map on $t array works (1)";
is @arr[1], 32, "Mutating map on $t array works (2)";
is @arr[2], 26, "Mutating map on $t array works (3)";
is @arr[3], 34, "Mutating map on $t array works (4)";
is @arr.grep(* < 30).elems, 2, "grep a $t array";
is-deeply @arr.grep(34), (34,), "$t array.grep(Int)";
is-deeply @arr.grep(34, :k), (3,), "$t array.grep(Int, :k)";
is-deeply @arr.grep(34, :kv), (3,34), "$t array.grep(Int, :kv)";
is-deeply @arr.grep(34, :p), (Pair.new(3,34),), "$t array.grep(Int, :p)";
is-deeply @arr.grep(34, :v), (34,), "$t array.grep(Int, :v)";
is-deeply @arr.first(34), 34, "$t array.grep(Int)";
is-deeply @arr.first(34, :k), 3, "$t array.grep(Int, :k)";
is-deeply @arr.first(34, :kv), (3,34), "$t array.grep(Int, :kv)";
is-deeply @arr.first(34, :p), Pair.new(3,34), "$t array.grep(Int, :p)";
is-deeply @arr.first(34, :v), 34, "$t array.grep(Int, :v)";
is ([+] @arr), 114, "Can use reduce meta-op on a $t array";
is @arr.values, (22,32,26,34), ".values from a $t array";
is @arr.pairup, (22=>32,26=>34), ".pairup from a $t array";
is @arr.keys, ( 0, 1, 2, 3), ".keys from a $t array";
is @arr.pairs, (0=>22,1=>32,2=>26,3=>34), ".pairs from a $t array";
is @arr.antipairs, (22=>0,32=>1,26=>2,34=>3), ".antipairs from a $t array";
is @arr.kv, (0,22,1,32,2,26,3,34), ".kv from a $t array";
is @arr.pick, 22|32|26|34, ".pick from a $t array";
is @arr.roll, 22|32|26|34, ".roll from a $t array";
@arr[1] = @arr[0];
is-deeply @arr.unique.List, (22,26,34), "$t array.unique";
is-deeply @arr.repeated.List, (22,), "$t array.repeated";
is-deeply @arr.squish.List, (22,26,34), "$t array.squish";
dies-ok { @arr.pop }, "Trying to pop a shaped $t array dies";
dies-ok { @arr.shift }, "Trying to shift a shaped $t array dies";
dies-ok { @arr.push(42) }, "Trying to push a shaped $t array dies";
dies-ok { @arr.unshift(42) }, "Trying to unshift a shaped $t array dies";
dies-ok { @arr[0] := my $a }, "Cannot bind to a $t array";
dies-ok { @arr[0]:delete }, "Cannot delete from a $t array";
dies-ok { @arr.append(66) }, "Cannot append to a $t array";
dies-ok { @arr.prepend(66) }, "Cannot prepend to a $t array";
dies-ok { @arr.splice }, "Cannot splice to a $t array";
@arr = 1..4;
is @arr.Str, '1 2 3 4', ".Str space-separates on $t array";
is @arr.gist, '[1 2 3 4]', ".gist space-separates on $t array";
is @arr.raku, "array[$t].new(:shape(4,), [1, 2, 3, 4])",
".raku includes type and int values on $t array";
#?rakudo skip 'STORE not working correctly yet)'
is-deeply @arr[^2], (1,2), 'does slice return correctly';
is-deeply @arr[my $ = ^2], 3, 'does slice handle containerized range';
is @arr.join(":"), "1:2:3:4", "does join a $t array";
is (@arr = ()), "0 0 0 0", "Can clear $t array by assigning empty list";
is @arr.join(":"), "0:0:0:0", "does emptying a $t array reset";
@arr = 42,66;
is @arr.join(":"), "42:66:0:0", "does re-initializing a $t array work";
# Interaction of native shaped int arrays and untyped arrays.
my @native := array[$T].new(:shape(10),1..10);
my @untyped = @native;
is @untyped.elems, 10, "List-assigning $t array to untyped works (1)";
is @untyped[0], 1, "List-assigning $t array to untyped works (2)";
is @untyped[9], 10, "List-assigning $t array to untyped works (3)";
@untyped = flat 0, @native, 11;
is @untyped.elems, 12, "List-assign $t array surrounded by literals (1)";
is @untyped[ 0], 0, "List-assign $t array surrounded by literals (2)";
is @untyped[ 5], 5, "List-assign $t array surrounded by literals (3)";
is @untyped[10], 10, "List-assign $t array surrounded by literals (4)";
is @untyped[11], 11, "List-assign $t array surrounded by literals (5)";
my @untyped2 = 21..30;
my @native2 := array[$T].new(:shape(10));
@native2 = @untyped2;
is @native2.elems, 10, "List-assign untyped array of Int to $t array (1)";
is @native2[0], 21, "List-assign untyped array of Int to $t array (2)";
is @native2[9], 30, "List-assign untyped array of Int to $t array (3)";
@untyped2[9] = 'C-C-C-C-Combo Breaker!';
throws-like { @native2 = @untyped2 }, Exception,
"List-assigning incompatible untyped array to $t array dies";
my @ssa := array[$T].new(:shape(10),1..10);
my @ssb := array[$T].new(:shape(10),1..10);
is @ssa ~~ @ssb, True, "Smartmatching same $t arrays works";
is array[$T].new(:shape(5),4,5,1,2,3).sort, "1 2 3 4 5",
"Can we sort $t array";
is array[$T].new(:shape(2),2,1).sort, "1 2",
"Can we sort 2-element sorted $t array";
is array[$T].new(:shape(1),1).sort, "1",
"Can we sort 1-element sorted $t array";
}
# vim: expandtab shiftwidth=4
| 48.192708 | 80 | 0.568464 |
ed4191ba591222f9374b7b5c7b175fd46035d011 | 11,181 | pm | Perl | tools/test_modules/m23700.pm | realSnoopy/hashcat | ea7f7909f4205edf2f289f1a2f31add76a21c433 | [
"MIT"
]
| null | null | null | tools/test_modules/m23700.pm | realSnoopy/hashcat | ea7f7909f4205edf2f289f1a2f31add76a21c433 | [
"MIT"
]
| null | null | null | tools/test_modules/m23700.pm | realSnoopy/hashcat | ea7f7909f4205edf2f289f1a2f31add76a21c433 | [
"MIT"
]
| null | null | null | #!/usr/bin/env perl
##
## Author......: See docs/credits.txt
## License.....: MIT
##
use strict;
use warnings;
use Digest::SHA;
use Crypt::CBC;
use Encode;
use Digest::CRC qw (crc32);
sub module_constraints { [[0, 127], [8, 8], [0, 20], [8, 8], [-1, -1]] }
my $ITERATIONS = 0x40000;
my $FIXED_RAW_STRING = pack ("H*", "c43d7b00400700000000000000000000");
my $SHA1C00 = 0x5a827999;
my $SHA1C01 = 0x6ed9eba1;
my $SHA1C02 = 0x8f1bbcdc;
my $SHA1C03 = 0xca62c1d6;
my $SHA1M_A = 0x67452301;
my $SHA1M_B = 0xefcdab89;
my $SHA1M_C = 0x98badcfe;
my $SHA1M_D = 0x10325476;
my $SHA1M_E = 0xc3d2e1f0;
sub rotl32
{
my $x = shift;
my $n = shift;
return (($x << $n) | ($x >> (32 - $n))) & 0xffffffff;
}
sub blk
{
my $b = shift;
my $i = shift;
$$b[$i & 15] = rotl32 ($$b[($i + 13) & 15] ^
$$b[($i + 8) & 15] ^
$$b[($i + 2) & 15] ^
$$b[($i + 0) & 15], 1);
return $$b[$i & 15];
}
sub R0
{
my ($b, $v, $w, $x, $y, $z, $i) = @_;
$$b[$i] = unpack ("L<", pack ("L>", $$b[$i])); # blk0 or just swap_byte32 ()
$z += (($w & ($x ^ $y)) ^ $y) + $$b[$i] + $SHA1C00 + rotl32 ($v, 5);
$z &= 0xffffffff;
$w = rotl32 ($w, 30);
return ($z, $w);
}
sub R1
{
my ($b, $v, $w, $x, $y, $z, $i) = @_;
$z += (($w & ($x ^ $y)) ^ $y) + blk ($b, $i) + $SHA1C00 + rotl32 ($v, 5);
$z &= 0xffffffff;
$w = rotl32 ($w, 30);
return ($z, $w);
}
sub R2
{
my ($b, $v, $w, $x, $y, $z, $i) = @_;
$z += ($w ^ $x ^ $y) + blk ($b, $i) + $SHA1C01 + rotl32 ($v, 5);
$z &= 0xffffffff;
$w = rotl32 ($w, 30);
return ($z, $w);
}
sub R3
{
my ($b, $v, $w, $x, $y, $z, $i) = @_;
$z += ((($w | $x) & $y) | ($w & $x)) + blk ($b, $i) + $SHA1C02 + rotl32 ($v, 5);
$z &= 0xffffffff;
$w = rotl32 ($w, 30);
return ($z, $w);
}
sub R4
{
my ($b, $v, $w, $x, $y, $z, $i) = @_;
$z += ($w ^ $x ^ $y) + blk ($b, $i) + $SHA1C03 + rotl32 ($v, 5);
$z &= 0xffffffff;
$w = rotl32 ($w, 30);
return ($z, $w);
}
sub sha1_transform
{
my ($state, $buffer) = @_;
my @block = unpack ("L<*", $$buffer);
my $a = $$state[0];
my $b = $$state[1];
my $c = $$state[2];
my $d = $$state[3];
my $e = $$state[4];
($e, $b) = R0 (\@block, $a, $b, $c, $d, $e, 0);
($d, $a) = R0 (\@block, $e, $a, $b, $c, $d, 1);
($c, $e) = R0 (\@block, $d, $e, $a, $b, $c, 2);
($b, $d) = R0 (\@block, $c, $d, $e, $a, $b, 3);
($a, $c) = R0 (\@block, $b, $c, $d, $e, $a, 4);
($e, $b) = R0 (\@block, $a, $b, $c, $d, $e, 5);
($d, $a) = R0 (\@block, $e, $a, $b, $c, $d, 6);
($c, $e) = R0 (\@block, $d, $e, $a, $b, $c, 7);
($b, $d) = R0 (\@block, $c, $d, $e, $a, $b, 8);
($a, $c) = R0 (\@block, $b, $c, $d, $e, $a, 9);
($e, $b) = R0 (\@block, $a, $b, $c, $d, $e, 10);
($d, $a) = R0 (\@block, $e, $a, $b, $c, $d, 11);
($c, $e) = R0 (\@block, $d, $e, $a, $b, $c, 12);
($b, $d) = R0 (\@block, $c, $d, $e, $a, $b, 13);
($a, $c) = R0 (\@block, $b, $c, $d, $e, $a, 14);
($e, $b) = R0 (\@block, $a, $b, $c, $d, $e, 15);
($d, $a) = R1 (\@block, $e, $a, $b, $c, $d, 16);
($c, $e) = R1 (\@block, $d, $e, $a, $b, $c, 17);
($b, $d) = R1 (\@block, $c, $d, $e, $a, $b, 18);
($a, $c) = R1 (\@block, $b, $c, $d, $e, $a, 19);
($e, $b) = R2 (\@block, $a, $b, $c, $d, $e, 20);
($d, $a) = R2 (\@block, $e, $a, $b, $c, $d, 21);
($c, $e) = R2 (\@block, $d, $e, $a, $b, $c, 22);
($b, $d) = R2 (\@block, $c, $d, $e, $a, $b, 23);
($a, $c) = R2 (\@block, $b, $c, $d, $e, $a, 24);
($e, $b) = R2 (\@block, $a, $b, $c, $d, $e, 25);
($d, $a) = R2 (\@block, $e, $a, $b, $c, $d, 26);
($c, $e) = R2 (\@block, $d, $e, $a, $b, $c, 27);
($b, $d) = R2 (\@block, $c, $d, $e, $a, $b, 28);
($a, $c) = R2 (\@block, $b, $c, $d, $e, $a, 29);
($e, $b) = R2 (\@block, $a, $b, $c, $d, $e, 30);
($d, $a) = R2 (\@block, $e, $a, $b, $c, $d, 31);
($c, $e) = R2 (\@block, $d, $e, $a, $b, $c, 32);
($b, $d) = R2 (\@block, $c, $d, $e, $a, $b, 33);
($a, $c) = R2 (\@block, $b, $c, $d, $e, $a, 34);
($e, $b) = R2 (\@block, $a, $b, $c, $d, $e, 35);
($d, $a) = R2 (\@block, $e, $a, $b, $c, $d, 36);
($c, $e) = R2 (\@block, $d, $e, $a, $b, $c, 37);
($b, $d) = R2 (\@block, $c, $d, $e, $a, $b, 38);
($a, $c) = R2 (\@block, $b, $c, $d, $e, $a, 39);
($e, $b) = R3 (\@block, $a, $b, $c, $d, $e, 40);
($d, $a) = R3 (\@block, $e, $a, $b, $c, $d, 41);
($c, $e) = R3 (\@block, $d, $e, $a, $b, $c, 42);
($b, $d) = R3 (\@block, $c, $d, $e, $a, $b, 43);
($a, $c) = R3 (\@block, $b, $c, $d, $e, $a, 44);
($e, $b) = R3 (\@block, $a, $b, $c, $d, $e, 45);
($d, $a) = R3 (\@block, $e, $a, $b, $c, $d, 46);
($c, $e) = R3 (\@block, $d, $e, $a, $b, $c, 47);
($b, $d) = R3 (\@block, $c, $d, $e, $a, $b, 48);
($a, $c) = R3 (\@block, $b, $c, $d, $e, $a, 49);
($e, $b) = R3 (\@block, $a, $b, $c, $d, $e, 50);
($d, $a) = R3 (\@block, $e, $a, $b, $c, $d, 51);
($c, $e) = R3 (\@block, $d, $e, $a, $b, $c, 52);
($b, $d) = R3 (\@block, $c, $d, $e, $a, $b, 53);
($a, $c) = R3 (\@block, $b, $c, $d, $e, $a, 54);
($e, $b) = R3 (\@block, $a, $b, $c, $d, $e, 55);
($d, $a) = R3 (\@block, $e, $a, $b, $c, $d, 56);
($c, $e) = R3 (\@block, $d, $e, $a, $b, $c, 57);
($b, $d) = R3 (\@block, $c, $d, $e, $a, $b, 58);
($a, $c) = R3 (\@block, $b, $c, $d, $e, $a, 59);
($e, $b) = R4 (\@block, $a, $b, $c, $d, $e, 60);
($d, $a) = R4 (\@block, $e, $a, $b, $c, $d, 61);
($c, $e) = R4 (\@block, $d, $e, $a, $b, $c, 62);
($b, $d) = R4 (\@block, $c, $d, $e, $a, $b, 63);
($a, $c) = R4 (\@block, $b, $c, $d, $e, $a, 64);
($e, $b) = R4 (\@block, $a, $b, $c, $d, $e, 65);
($d, $a) = R4 (\@block, $e, $a, $b, $c, $d, 66);
($c, $e) = R4 (\@block, $d, $e, $a, $b, $c, 67);
($b, $d) = R4 (\@block, $c, $d, $e, $a, $b, 68);
($a, $c) = R4 (\@block, $b, $c, $d, $e, $a, 69);
($e, $b) = R4 (\@block, $a, $b, $c, $d, $e, 70);
($d, $a) = R4 (\@block, $e, $a, $b, $c, $d, 71);
($c, $e) = R4 (\@block, $d, $e, $a, $b, $c, 72);
($b, $d) = R4 (\@block, $c, $d, $e, $a, $b, 73);
($a, $c) = R4 (\@block, $b, $c, $d, $e, $a, 74);
($e, $b) = R4 (\@block, $a, $b, $c, $d, $e, 75);
($d, $a) = R4 (\@block, $e, $a, $b, $c, $d, 76);
($c, $e) = R4 (\@block, $d, $e, $a, $b, $c, 77);
($b, $d) = R4 (\@block, $c, $d, $e, $a, $b, 78);
($a, $c) = R4 (\@block, $b, $c, $d, $e, $a, 79);
$$state[0] = ($$state[0] + $a) & 0xffffffff;
$$state[1] = ($$state[1] + $b) & 0xffffffff;
$$state[2] = ($$state[2] + $c) & 0xffffffff;
$$state[3] = ($$state[3] + $d) & 0xffffffff;
$$state[4] = ($$state[4] + $e) & 0xffffffff;
$$buffer = pack ("L<*", @block);
}
sub sha1_getstate
{
my $ctx = shift;
my $info = $ctx->getstate;
# state:
my $idx = index ($info, "H:");
my $state = substr ($info, $idx + 2, 44);
$state =~ s/://g;
my @state_arr = unpack ("L>*", pack ("H*", $state));
# block:
$idx = index ($info, "block:");
my $block = substr ($info, $idx + 6, 191);
$block =~ s/://g;
$block = pack ("H*", $block);
return (\@state_arr, $block);
}
sub sha1_update_rar29
{
my $ctx = shift;
my $data = shift;
my $len = shift;
my $count = shift;
my $ctx_orig = $ctx->clone;
$ctx->add ($$data);
# two early exits from this function, if (strange data) manipulation is not needed:
my $j = $count & 63;
return if (($j + $len) <= 63);
my $i = 64 - $j;
return if (($i + 63) >= $len);
# proceed with updating $data:
my ($state, $block) = sha1_getstate ($ctx_orig);
substr ($block, $j, $i) = substr ($$data, 0, $i);
sha1_transform ($state, \$block);
while (($i + 63) < $len)
{
my $workspace = substr ($$data, $i, 64);
sha1_transform ($state, \$workspace);
substr ($$data, $i, 64) = $workspace;
$i += 64;
}
}
sub module_generate_hash
{
my $word = shift;
my $salt = shift;
my $crc32_sum = shift;
my $pack_size = shift;
my $unpack_size = shift;
my $data = shift;
# convert to utf16le:
my $buf = encode ("UTF-16LE", $word);
# add the salt to the password buffer:
$buf .= $salt;
my $len = length ($buf);
my $count = 0;
my $ctx = Digest::SHA->new ('SHA1');
my $iv = "";
# main loop:
for (my $i = 0; $i < $ITERATIONS; $i++)
{
sha1_update_rar29 ($ctx, \$buf, $len, $count);
$count += $len;
my $pos = substr (pack ("L<", $i), 0, 3);
$ctx->add ($pos);
$count += 3;
if (($i & 0x3fff) == 0)
{
my $dgst = $ctx->clone->digest;
$iv .= substr ($dgst, 19, 1);
}
}
my $k = $ctx->digest;
$k = pack ("L<*", unpack ("L>4", $k)); # byte swap the first 4 * 4 = 16 bytes
# AES-128 encrypt:
my $aes = Crypt::CBC->new (
-cipher => "Crypt::Rijndael",
-key => $k,
-iv => $iv,
-keysize => 16,
-literal_key => 1,
-header => 'none',
-padding => 'none'
);
if (defined ($data))
{
my $data_orig = $data;
my $data_encrypted = substr ($data, 0, $pack_size);
my $data_decrypted = $aes->decrypt ($data_encrypted);
# CRC32 checksum of the decrypted data:
my $data_crc = substr ($data_decrypted, 0, $unpack_size);
my $crc32_computed = crc32 ($data_crc);
$data = "WRONG";
# verify:
if ($crc32_computed eq $crc32_sum)
{
$data = $data_crc;
}
}
else
{
my $data_len = random_number (1, 4096);
$data = random_bytes ($data_len);
}
my $crc32_computed = crc32 ($data);
# byte-swap CRC32 checksum:
$crc32_computed = unpack ("L<", pack ("L>", $crc32_computed));
my $data_encrypted = $aes->encrypt ($data);
$pack_size = length ($data_encrypted);
$unpack_size = length ($data);
return sprintf ("\$RAR3\$*1*%s*%08x*%i*%i*1*%s*30", unpack ("H*", $salt), $crc32_computed, $pack_size, $unpack_size, unpack ("H*", $data_encrypted));
}
sub module_verify_hash
{
my $line = shift;
my $idx1 = index ($line, ':');
return if ($idx1 < 1);
my $hash = substr ($line, 0, $idx1);
my $word = substr ($line, $idx1 + 1);
return if (substr ($hash, 0, 9) ne "\$RAR3\$*1*");
$idx1 = index ($hash, '*', 9);
return if ($idx1 < 1);
# salt
my $salt = substr ($hash, 9, $idx1 - 9);
# crc32
my $idx2 = index ($hash, '*', $idx1 + 1);
return if ($idx2 < 1);
my $crc32_sum = substr ($hash, $idx1 + 1, $idx2 - $idx1 - 1);
# pack size
$idx1 = index ($hash, '*', $idx2 + 1);
return if ($idx1 < 1);
my $pack_size = substr ($hash, $idx2 + 1, $idx1 - $idx2 - 1);
# unpack size
$idx2 = index ($hash, '*', $idx1 + 1);
return if ($idx2 < 1);
my $unpack_size = substr ($hash, $idx1 + 1, $idx2 - $idx1 - 1);
return unless (substr ($hash, $idx2, 3) eq "*1*");
# data
$idx1 = index ($hash, '*', $idx2 + 3);
return if ($idx1 < 1);
my $data = substr ($hash, $idx2 + 3, $idx1 - $idx2 - 3);
return unless (substr ($hash, $idx1, 3) eq "*30");
# convert to hex:
$salt = pack ("H*", $salt);
$data = pack ("H*", $data);
$crc32_sum = unpack ("L<", pack ("H*", $crc32_sum));
my $word_packed = pack_if_HEX_notation ($word);
my $new_hash = module_generate_hash ($word_packed, $salt, $crc32_sum, $pack_size, $unpack_size, $data);
return ($new_hash, $word);
}
1;
| 22.496982 | 151 | 0.43082 |
ed57896bcaed966f1ef537608d71fdb96964d314 | 41,184 | pl | Perl | crypto/sha/asm/keccak1600-armv4.pl | qzmfranklin/openssl | 45dd7d1ac30a506e5186ef12a0fb3133abffe905 | [
"OpenSSL"
]
| 1 | 2017-12-04T12:08:18.000Z | 2017-12-04T12:08:18.000Z | crypto/sha/asm/keccak1600-armv4.pl | qzmfranklin/openssl | 45dd7d1ac30a506e5186ef12a0fb3133abffe905 | [
"OpenSSL"
]
| null | null | null | crypto/sha/asm/keccak1600-armv4.pl | qzmfranklin/openssl | 45dd7d1ac30a506e5186ef12a0fb3133abffe905 | [
"OpenSSL"
]
| 1 | 2020-08-07T09:33:37.000Z | 2020-08-07T09:33:37.000Z | #!/usr/bin/env perl
# Copyright 2017 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the OpenSSL license (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
#
# ====================================================================
# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
# project. The module is, however, dual licensed under OpenSSL and
# CRYPTOGAMS licenses depending on where you obtain it. For further
# details see http://www.openssl.org/~appro/cryptogams/.
# ====================================================================
#
# Keccak-1600 for ARMv4.
#
# June 2017.
#
# Non-NEON code is KECCAK_1X variant (see sha/keccak1600.c) with bit
# interleaving. How does it compare to Keccak Code Package? It's as
# fast, but several times smaller, and is endian- and ISA-neutral. ISA
# neutrality means that minimum ISA requirement is ARMv4, yet it can
# be assembled even as Thumb-2. NEON code path is KECCAK_1X_ALT with
# register layout taken from Keccak Code Package. It's also as fast,
# in fact faster by 10-15% on some processors, and endian-neutral.
#
# August 2017.
#
# Switch to KECCAK_2X variant for non-NEON code and merge almost 1/2
# of rotate instructions with logical ones. This resulted in ~10%
# improvement on most processors. Switch to KECCAK_2X effectively
# minimizes re-loads from temporary storage, and merged rotates just
# eliminate corresponding instructions. As for latter. When examining
# code you'll notice commented ror instructions. These are eliminated
# ones, and you should trace destination register below to see what's
# going on. Just in case, why not all rotates are eliminated. Trouble
# is that you have operations that require both inputs to be rotated,
# e.g. 'eor a,b>>>x,c>>>y'. This conundrum is resolved by using
# 'eor a,b,c>>>(x-y)' and then merge-rotating 'a' in next operation
# that takes 'a' as input. And thing is that this next operation can
# be in next round. It's totally possible to "carry" rotate "factors"
# to the next round, but it makes code more complex. And the last word
# is the keyword, i.e. "almost 1/2" is kind of complexity cap [for the
# time being]...
#
########################################################################
# Numbers are cycles per processed byte. Non-NEON results account even
# for input bit interleaving.
#
# r=1088(*), NEON
#
# ARM11xx 82/+150%
# Cortex-A5 88/+160%, 36
# Cortex-A7 78/+160%, 34
# Cortex-A8 51/+230%, 30
# Cortex-A9 53/+210%, 26
# Cortex-A15 42/+160%, 18
# Snapdragon S4 43/+210%, 24
#
# (*) Corresponds to SHA3-256. Percentage after slash is improvement
# over compiler-generated KECCAK_2X reference code.
my @C = map("r$_",(0..9));
my @E = map("r$_",(10..12,14));
########################################################################
# Stack layout
# ----->+-----------------------+
# | uint64_t A[5][5] |
# | ... |
# +200->+-----------------------+
# | uint64_t D[5] |
# | ... |
# +240->+-----------------------+
# | uint64_t T[5][5] |
# | ... |
# +440->+-----------------------+
# | saved lr |
# +444->+-----------------------+
# | loop counter |
# +448->+-----------------------+
# | ...
my @A = map([ 8*$_, 8*($_+1), 8*($_+2), 8*($_+3), 8*($_+4) ], (0,5,10,15,20));
my @D = map(8*$_, (25..29));
my @T = map([ 8*$_, 8*($_+1), 8*($_+2), 8*($_+3), 8*($_+4) ], (30,35,40,45,50));
$code.=<<___;
.text
#if defined(__thumb2__)
.syntax unified
.thumb
#else
.code 32
#endif
.type iotas32, %object
.align 5
iotas32:
.long 0x00000001, 0x00000000
.long 0x00000000, 0x00000089
.long 0x00000000, 0x8000008b
.long 0x00000000, 0x80008080
.long 0x00000001, 0x0000008b
.long 0x00000001, 0x00008000
.long 0x00000001, 0x80008088
.long 0x00000001, 0x80000082
.long 0x00000000, 0x0000000b
.long 0x00000000, 0x0000000a
.long 0x00000001, 0x00008082
.long 0x00000000, 0x00008003
.long 0x00000001, 0x0000808b
.long 0x00000001, 0x8000000b
.long 0x00000001, 0x8000008a
.long 0x00000001, 0x80000081
.long 0x00000000, 0x80000081
.long 0x00000000, 0x80000008
.long 0x00000000, 0x00000083
.long 0x00000000, 0x80008003
.long 0x00000001, 0x80008088
.long 0x00000000, 0x80000088
.long 0x00000001, 0x00008000
.long 0x00000000, 0x80008082
.size iotas32,.-iotas32
.type KeccakF1600_int, %function
.align 5
KeccakF1600_int:
add @C[9],sp,#$A[4][2]
add @E[2],sp,#$A[0][0]
add @E[0],sp,#$A[1][0]
ldmia @C[9],{@C[4]-@C[9]} @ A[4][2..4]
KeccakF1600_enter:
str lr,[sp,#440]
eor @E[1],@E[1],@E[1]
str @E[1],[sp,#444]
b .Lround
.align 4
.Lround:
___
sub Round {
my (@A,@R); (@A[0..4],@R) = @_;
$code.=<<___;
ldmia @E[2],{@C[0]-@C[3]} @ A[0][0..1]
ldmia @E[0],{@E[0]-@E[2],@E[3]} @ A[1][0..1]
eor @C[0],@C[0],@E[0]
add @E[0],sp,#$A[1][2]
eor @C[1],@C[1],@E[1]
eor @C[2],@C[2],@E[2]
eor @C[3],@C[3],@E[3]
ldmia @E[0],{@E[0]-@E[2],@E[3]} @ A[1][2..3]
eor @C[4],@C[4],@E[0]
add @E[0],sp,#$A[1][4]
eor @C[5],@C[5],@E[1]
eor @C[6],@C[6],@E[2]
eor @C[7],@C[7],@E[3]
ldmia @E[0],{@E[0]-@E[2],@E[3]} @ A[1][4]..A[2][0]
eor @C[8],@C[8],@E[0]
add @E[0],sp,#$A[2][1]
eor @C[9],@C[9],@E[1]
eor @C[0],@C[0],@E[2]
eor @C[1],@C[1],@E[3]
ldmia @E[0],{@E[0]-@E[2],@E[3]} @ A[2][1..2]
eor @C[2],@C[2],@E[0]
add @E[0],sp,#$A[2][3]
eor @C[3],@C[3],@E[1]
eor @C[4],@C[4],@E[2]
eor @C[5],@C[5],@E[3]
ldmia @E[0],{@E[0]-@E[2],@E[3]} @ A[2][3..4]
eor @C[6],@C[6],@E[0]
add @E[0],sp,#$A[3][0]
eor @C[7],@C[7],@E[1]
eor @C[8],@C[8],@E[2]
eor @C[9],@C[9],@E[3]
ldmia @E[0],{@E[0]-@E[2],@E[3]} @ A[3][0..1]
eor @C[0],@C[0],@E[0]
add @E[0],sp,#$A[3][2]
eor @C[1],@C[1],@E[1]
eor @C[2],@C[2],@E[2]
eor @C[3],@C[3],@E[3]
ldmia @E[0],{@E[0]-@E[2],@E[3]} @ A[3][2..3]
eor @C[4],@C[4],@E[0]
add @E[0],sp,#$A[3][4]
eor @C[5],@C[5],@E[1]
eor @C[6],@C[6],@E[2]
eor @C[7],@C[7],@E[3]
ldmia @E[0],{@E[0]-@E[2],@E[3]} @ A[3][4]..A[4][0]
eor @C[8],@C[8],@E[0]
ldr @E[0],[sp,#$A[4][1]] @ A[4][1]
eor @C[9],@C[9],@E[1]
ldr @E[1],[sp,#$A[4][1]+4]
eor @C[0],@C[0],@E[2]
ldr @E[2],[sp,#$A[0][2]] @ A[0][2]
eor @C[1],@C[1],@E[3]
ldr @E[3],[sp,#$A[0][2]+4]
eor @C[2],@C[2],@E[0]
add @E[0],sp,#$A[0][3]
eor @C[3],@C[3],@E[1]
eor @C[4],@C[4],@E[2]
eor @C[5],@C[5],@E[3]
ldmia @E[0],{@E[0]-@E[2],@E[3]} @ A[0][3..4]
eor @C[6],@C[6],@E[0]
eor @C[7],@C[7],@E[1]
eor @C[8],@C[8],@E[2]
eor @C[9],@C[9],@E[3]
eor @E[0],@C[0],@C[5],ror#32-1 @ E[0] = ROL64(C[2], 1) ^ C[0];
eor @E[1],@C[1],@C[4]
str @E[0],[sp,#$D[1]] @ D[1] = E[0]
eor @E[2],@C[6],@C[1],ror#32-1 @ E[1] = ROL64(C[0], 1) ^ C[3];
str @E[1],[sp,#$D[1]+4]
eor @E[3],@C[7],@C[0]
str @E[2],[sp,#$D[4]] @ D[4] = E[1]
eor @C[0],@C[8],@C[3],ror#32-1 @ C[0] = ROL64(C[1], 1) ^ C[4];
str @E[3],[sp,#$D[4]+4]
eor @C[1],@C[9],@C[2]
str @C[0],[sp,#$D[0]] @ D[0] = C[0]
eor @C[2],@C[2],@C[7],ror#32-1 @ C[1] = ROL64(C[3], 1) ^ C[1];
str @C[1],[sp,#$D[0]+4]
eor @C[3],@C[3],@C[6]
ldr @C[7],[sp,#$A[3][3]]
str @C[2],[sp,#$D[2]] @ D[2] = C[1]
eor @C[4],@C[4],@C[9],ror#32-1 @ C[2] = ROL64(C[4], 1) ^ C[2];
ldr @C[6],[sp,#$A[3][3]+4]
str @C[3],[sp,#$D[2]+4]
eor @C[5],@C[5],@C[8]
ldr @C[8],[sp,#$A[4][4]]
ldr @C[9],[sp,#$A[4][4]+4]
str @C[4],[sp,#$D[3]] @ D[3] = C[2]
eor @C[7],@C[7],@C[4]
str @C[5],[sp,#$D[3]+4]
eor @C[6],@C[6],@C[5]
ldr @C[4],[sp,#$A[0][0]]
@ ror @C[7],@C[7],#32-10 @ C[3] = ROL64(A[3][3] ^ C[2], rhotates[3][3]); /* D[3] */
@ ror @C[6],@C[6],#32-11
eor @C[8],@C[8],@E[2]
ldr @C[5],[sp,#$A[0][0]+4]
eor @C[9],@C[9],@E[3]
ldr @E[2],[sp,#$A[2][2]]
eor @C[0],@C[0],@C[4]
ldr @E[3],[sp,#$A[2][2]+4]
@ ror @C[8],@C[8],#32-7 @ C[4] = ROL64(A[4][4] ^ E[1], rhotates[4][4]); /* D[4] */
@ ror @C[9],@C[9],#32-7
eor @C[1],@C[1],@C[5] @ C[0] = A[0][0] ^ C[0]; /* rotate by 0 */ /* D[0] */
eor @E[2],@E[2],@C[2]
ldr @C[2],[sp,#$A[1][1]]
eor @E[3],@E[3],@C[3]
ldr @C[3],[sp,#$A[1][1]+4]
ror @C[5],@E[2],#32-21 @ C[2] = ROL64(A[2][2] ^ C[1], rhotates[2][2]); /* D[2] */
ldr @E[2],[sp,#444] @ load counter
eor @C[2],@C[2],@E[0]
adr @E[0],iotas32
ror @C[4],@E[3],#32-22
add @E[3],@E[0],@E[2]
eor @C[3],@C[3],@E[1]
___
$code.=<<___ if ($A[0][0] != $T[0][0]);
ldmia @E[3],{@E[0],@E[1]} @ iotas[i]
___
$code.=<<___ if ($A[0][0] == $T[0][0]);
ldr @E[0],[@E[3],#8] @ iotas[i].lo
add @E[2],@E[2],#16
ldr @E[1],[@E[3],#12] @ iotas[i].hi
cmp @E[2],#192
str @E[2],[sp,#444] @ store counter
___
$code.=<<___;
bic @E[2],@C[4],@C[2],ror#32-22
bic @E[3],@C[5],@C[3],ror#32-22
ror @C[2],@C[2],#32-22 @ C[1] = ROL64(A[1][1] ^ E[0], rhotates[1][1]); /* D[1] */
ror @C[3],@C[3],#32-22
eor @E[2],@E[2],@C[0]
eor @E[3],@E[3],@C[1]
eor @E[0],@E[0],@E[2]
eor @E[1],@E[1],@E[3]
str @E[0],[sp,#$R[0][0]] @ R[0][0] = C[0] ^ (~C[1] & C[2]) ^ iotas[i];
bic @E[2],@C[6],@C[4],ror#11
str @E[1],[sp,#$R[0][0]+4]
bic @E[3],@C[7],@C[5],ror#10
bic @E[0],@C[8],@C[6],ror#32-(11-7)
bic @E[1],@C[9],@C[7],ror#32-(10-7)
eor @E[2],@C[2],@E[2],ror#32-11
eor @E[3],@C[3],@E[3],ror#32-10
str @E[2],[sp,#$R[0][1]] @ R[0][1] = C[1] ^ (~C[2] & C[3]);
eor @E[0],@C[4],@E[0],ror#32-7
str @E[3],[sp,#$R[0][1]+4]
eor @E[1],@C[5],@E[1],ror#32-7
str @E[0],[sp,#$R[0][2]] @ R[0][2] = C[2] ^ (~C[3] & C[4]);
bic @E[2],@C[0],@C[8],ror#32-7
str @E[1],[sp,#$R[0][2]+4]
bic @E[3],@C[1],@C[9],ror#32-7
eor @E[2],@E[2],@C[6],ror#32-11
eor @E[3],@E[3],@C[7],ror#32-10
str @E[2],[sp,#$R[0][3]] @ R[0][3] = C[3] ^ (~C[4] & C[0]);
bic @E[0],@C[2],@C[0]
str @E[3],[sp,#$R[0][3]+4]
add @E[3],sp,#$D[3]
bic @E[1],@C[3],@C[1]
ldr @C[0],[sp,#$A[0][3]] @ A[0][3]
eor @E[0],@E[0],@C[8],ror#32-7
ldr @C[1],[sp,#$A[0][3]+4]
eor @E[1],@E[1],@C[9],ror#32-7
str @E[0],[sp,#$R[0][4]] @ R[0][4] = C[4] ^ (~C[0] & C[1]);
add @C[9],sp,#$D[0]
str @E[1],[sp,#$R[0][4]+4]
ldmia @E[3],{@E[0]-@E[2],@E[3]} @ D[3..4]
ldmia @C[9],{@C[6]-@C[9]} @ D[0..1]
ldr @C[2],[sp,#$A[1][4]] @ A[1][4]
eor @C[0],@C[0],@E[0]
ldr @C[3],[sp,#$A[1][4]+4]
eor @C[1],@C[1],@E[1]
@ ror @C[0],@C[0],#32-14 @ C[0] = ROL64(A[0][3] ^ D[3], rhotates[0][3]);
ldr @E[0],[sp,#$A[3][1]] @ A[3][1]
@ ror @C[1],@C[1],#32-14
ldr @E[1],[sp,#$A[3][1]+4]
eor @C[2],@C[2],@E[2]
ldr @C[4],[sp,#$A[2][0]] @ A[2][0]
eor @C[3],@C[3],@E[3]
ldr @C[5],[sp,#$A[2][0]+4]
@ ror @C[2],@C[2],#32-10 @ C[1] = ROL64(A[1][4] ^ D[4], rhotates[1][4]);
@ ror @C[3],@C[3],#32-10
eor @C[6],@C[6],@C[4]
ldr @E[2],[sp,#$D[2]] @ D[2]
eor @C[7],@C[7],@C[5]
ldr @E[3],[sp,#$D[2]+4]
ror @C[5],@C[6],#32-1 @ C[2] = ROL64(A[2][0] ^ D[0], rhotates[2][0]);
ror @C[4],@C[7],#32-2
eor @E[0],@E[0],@C[8]
ldr @C[8],[sp,#$A[4][2]] @ A[4][2]
eor @E[1],@E[1],@C[9]
ldr @C[9],[sp,#$A[4][2]+4]
ror @C[7],@E[0],#32-22 @ C[3] = ROL64(A[3][1] ^ D[1], rhotates[3][1]);
ror @C[6],@E[1],#32-23
bic @E[0],@C[4],@C[2],ror#32-10
bic @E[1],@C[5],@C[3],ror#32-10
eor @E[2],@E[2],@C[8]
eor @E[3],@E[3],@C[9]
ror @C[9],@E[2],#32-30 @ C[4] = ROL64(A[4][2] ^ D[2], rhotates[4][2]);
ror @C[8],@E[3],#32-31
eor @E[0],@E[0],@C[0],ror#32-14
eor @E[1],@E[1],@C[1],ror#32-14
str @E[0],[sp,#$R[1][0]] @ R[1][0] = C[0] ^ (~C[1] & C[2])
bic @E[2],@C[6],@C[4]
str @E[1],[sp,#$R[1][0]+4]
bic @E[3],@C[7],@C[5]
eor @E[2],@E[2],@C[2],ror#32-10
eor @E[3],@E[3],@C[3],ror#32-10
str @E[2],[sp,#$R[1][1]] @ R[1][1] = C[1] ^ (~C[2] & C[3]);
bic @E[0],@C[8],@C[6]
str @E[3],[sp,#$R[1][1]+4]
bic @E[1],@C[9],@C[7]
bic @E[2],@C[0],@C[8],ror#14
bic @E[3],@C[1],@C[9],ror#14
eor @E[0],@E[0],@C[4]
eor @E[1],@E[1],@C[5]
str @E[0],[sp,#$R[1][2]] @ R[1][2] = C[2] ^ (~C[3] & C[4]);
bic @E[0],@C[2],@C[0],ror#32-(14-10)
eor @E[2],@C[6],@E[2],ror#32-14
str @E[1],[sp,#$R[1][2]+4]
bic @E[1],@C[3],@C[1],ror#32-(14-10)
eor @E[3],@C[7],@E[3],ror#32-14
str @E[2],[sp,#$R[1][3]] @ R[1][3] = C[3] ^ (~C[4] & C[0]);
add @E[2],sp,#$D[1]
str @E[3],[sp,#$R[1][3]+4]
ldr @C[1],[sp,#$A[0][1]] @ A[0][1]
eor @E[0],@C[8],@E[0],ror#32-10
ldr @C[0],[sp,#$A[0][1]+4]
eor @E[1],@C[9],@E[1],ror#32-10
str @E[0],[sp,#$R[1][4]] @ R[1][4] = C[4] ^ (~C[0] & C[1]);
add @C[9],sp,#$D[3]
str @E[1],[sp,#$R[1][4]+4]
ldmia @E[2],{@E[0]-@E[2],@E[3]} @ D[1..2]
ldr @C[2],[sp,#$A[1][2]] @ A[1][2]
ldr @C[3],[sp,#$A[1][2]+4]
ldmia @C[9],{@C[6]-@C[9]} @ D[3..4]
eor @C[1],@C[1],@E[0]
ldr @C[4],[sp,#$A[2][3]] @ A[2][3]
eor @C[0],@C[0],@E[1]
ldr @C[5],[sp,#$A[2][3]+4]
ror @C[0],@C[0],#32-1 @ C[0] = ROL64(A[0][1] ^ D[1], rhotates[0][1]);
eor @C[2],@C[2],@E[2]
ldr @E[0],[sp,#$A[3][4]] @ A[3][4]
eor @C[3],@C[3],@E[3]
ldr @E[1],[sp,#$A[3][4]+4]
@ ror @C[2],@C[2],#32-3 @ C[1] = ROL64(A[1][2] ^ D[2], rhotates[1][2]);
ldr @E[2],[sp,#$D[0]] @ D[0]
@ ror @C[3],@C[3],#32-3
ldr @E[3],[sp,#$D[0]+4]
eor @C[4],@C[4],@C[6]
eor @C[5],@C[5],@C[7]
@ ror @C[5],@C[6],#32-12 @ C[2] = ROL64(A[2][3] ^ D[3], rhotates[2][3]);
@ ror @C[4],@C[7],#32-13 @ [track reverse order below]
eor @E[0],@E[0],@C[8]
ldr @C[8],[sp,#$A[4][0]] @ A[4][0]
eor @E[1],@E[1],@C[9]
ldr @C[9],[sp,#$A[4][0]+4]
ror @C[6],@E[0],#32-4 @ C[3] = ROL64(A[3][4] ^ D[4], rhotates[3][4]);
ror @C[7],@E[1],#32-4
eor @E[2],@E[2],@C[8]
eor @E[3],@E[3],@C[9]
ror @C[8],@E[2],#32-9 @ C[4] = ROL64(A[4][0] ^ D[0], rhotates[4][0]);
ror @C[9],@E[3],#32-9
bic @E[0],@C[5],@C[2],ror#13-3
bic @E[1],@C[4],@C[3],ror#12-3
bic @E[2],@C[6],@C[5],ror#32-13
bic @E[3],@C[7],@C[4],ror#32-12
eor @E[0],@C[0],@E[0],ror#32-13
eor @E[1],@C[1],@E[1],ror#32-12
str @E[0],[sp,#$R[2][0]] @ R[2][0] = C[0] ^ (~C[1] & C[2])
eor @E[2],@E[2],@C[2],ror#32-3
str @E[1],[sp,#$R[2][0]+4]
eor @E[3],@E[3],@C[3],ror#32-3
str @E[2],[sp,#$R[2][1]] @ R[2][1] = C[1] ^ (~C[2] & C[3]);
bic @E[0],@C[8],@C[6]
str @E[3],[sp,#$R[2][1]+4]
bic @E[1],@C[9],@C[7]
eor @E[0],@E[0],@C[5],ror#32-13
eor @E[1],@E[1],@C[4],ror#32-12
str @E[0],[sp,#$R[2][2]] @ R[2][2] = C[2] ^ (~C[3] & C[4]);
bic @E[2],@C[0],@C[8]
str @E[1],[sp,#$R[2][2]+4]
bic @E[3],@C[1],@C[9]
eor @E[2],@E[2],@C[6]
eor @E[3],@E[3],@C[7]
str @E[2],[sp,#$R[2][3]] @ R[2][3] = C[3] ^ (~C[4] & C[0]);
bic @E[0],@C[2],@C[0],ror#3
str @E[3],[sp,#$R[2][3]+4]
bic @E[1],@C[3],@C[1],ror#3
ldr @C[1],[sp,#$A[0][4]] @ A[0][4] [in reverse order]
eor @E[0],@C[8],@E[0],ror#32-3
ldr @C[0],[sp,#$A[0][4]+4]
eor @E[1],@C[9],@E[1],ror#32-3
str @E[0],[sp,#$R[2][4]] @ R[2][4] = C[4] ^ (~C[0] & C[1]);
add @C[9],sp,#$D[1]
str @E[1],[sp,#$R[2][4]+4]
ldr @E[0],[sp,#$D[4]] @ D[4]
ldr @E[1],[sp,#$D[4]+4]
ldr @E[2],[sp,#$D[0]] @ D[0]
ldr @E[3],[sp,#$D[0]+4]
ldmia @C[9],{@C[6]-@C[9]} @ D[1..2]
eor @C[1],@C[1],@E[0]
ldr @C[2],[sp,#$A[1][0]] @ A[1][0]
eor @C[0],@C[0],@E[1]
ldr @C[3],[sp,#$A[1][0]+4]
@ ror @C[1],@E[0],#32-13 @ C[0] = ROL64(A[0][4] ^ D[4], rhotates[0][4]);
ldr @C[4],[sp,#$A[2][1]] @ A[2][1]
@ ror @C[0],@E[1],#32-14 @ [was loaded in reverse order]
ldr @C[5],[sp,#$A[2][1]+4]
eor @C[2],@C[2],@E[2]
ldr @E[0],[sp,#$A[3][2]] @ A[3][2]
eor @C[3],@C[3],@E[3]
ldr @E[1],[sp,#$A[3][2]+4]
@ ror @C[2],@C[2],#32-18 @ C[1] = ROL64(A[1][0] ^ D[0], rhotates[1][0]);
ldr @E[2],[sp,#$D[3]] @ D[3]
@ ror @C[3],@C[3],#32-18
ldr @E[3],[sp,#$D[3]+4]
eor @C[6],@C[6],@C[4]
eor @C[7],@C[7],@C[5]
ror @C[4],@C[6],#32-5 @ C[2] = ROL64(A[2][1] ^ D[1], rhotates[2][1]);
ror @C[5],@C[7],#32-5
eor @E[0],@E[0],@C[8]
ldr @C[8],[sp,#$A[4][3]] @ A[4][3]
eor @E[1],@E[1],@C[9]
ldr @C[9],[sp,#$A[4][3]+4]
ror @C[7],@E[0],#32-7 @ C[3] = ROL64(A[3][2] ^ D[2], rhotates[3][2]);
ror @C[6],@E[1],#32-8
eor @E[2],@E[2],@C[8]
eor @E[3],@E[3],@C[9]
ror @C[8],@E[2],#32-28 @ C[4] = ROL64(A[4][3] ^ D[3], rhotates[4][3]);
ror @C[9],@E[3],#32-28
bic @E[0],@C[4],@C[2],ror#32-18
bic @E[1],@C[5],@C[3],ror#32-18
eor @E[0],@E[0],@C[0],ror#32-14
eor @E[1],@E[1],@C[1],ror#32-13
str @E[0],[sp,#$R[3][0]] @ R[3][0] = C[0] ^ (~C[1] & C[2])
bic @E[2],@C[6],@C[4]
str @E[1],[sp,#$R[3][0]+4]
bic @E[3],@C[7],@C[5]
eor @E[2],@E[2],@C[2],ror#32-18
eor @E[3],@E[3],@C[3],ror#32-18
str @E[2],[sp,#$R[3][1]] @ R[3][1] = C[1] ^ (~C[2] & C[3]);
bic @E[0],@C[8],@C[6]
str @E[3],[sp,#$R[3][1]+4]
bic @E[1],@C[9],@C[7]
bic @E[2],@C[0],@C[8],ror#14
bic @E[3],@C[1],@C[9],ror#13
eor @E[0],@E[0],@C[4]
eor @E[1],@E[1],@C[5]
str @E[0],[sp,#$R[3][2]] @ R[3][2] = C[2] ^ (~C[3] & C[4]);
bic @E[0],@C[2],@C[0],ror#18-14
eor @E[2],@C[6],@E[2],ror#32-14
str @E[1],[sp,#$R[3][2]+4]
bic @E[1],@C[3],@C[1],ror#18-13
eor @E[3],@C[7],@E[3],ror#32-13
str @E[2],[sp,#$R[3][3]] @ R[3][3] = C[3] ^ (~C[4] & C[0]);
str @E[3],[sp,#$R[3][3]+4]
add @E[3],sp,#$D[2]
ldr @C[0],[sp,#$A[0][2]] @ A[0][2]
eor @E[0],@C[8],@E[0],ror#32-18
ldr @C[1],[sp,#$A[0][2]+4]
eor @E[1],@C[9],@E[1],ror#32-18
str @E[0],[sp,#$R[3][4]] @ R[3][4] = C[4] ^ (~C[0] & C[1]);
str @E[1],[sp,#$R[3][4]+4]
ldmia @E[3],{@E[0]-@E[2],@E[3]} @ D[2..3]
ldr @C[2],[sp,#$A[1][3]] @ A[1][3]
ldr @C[3],[sp,#$A[1][3]+4]
ldr @C[6],[sp,#$D[4]] @ D[4]
ldr @C[7],[sp,#$D[4]+4]
eor @C[0],@C[0],@E[0]
ldr @C[4],[sp,#$A[2][4]] @ A[2][4]
eor @C[1],@C[1],@E[1]
ldr @C[5],[sp,#$A[2][4]+4]
@ ror @C[0],@C[0],#32-31 @ C[0] = ROL64(A[0][2] ^ D[2], rhotates[0][2]);
ldr @C[8],[sp,#$D[0]] @ D[0]
@ ror @C[1],@C[1],#32-31
ldr @C[9],[sp,#$D[0]+4]
eor @E[2],@E[2],@C[2]
ldr @E[0],[sp,#$A[3][0]] @ A[3][0]
eor @E[3],@E[3],@C[3]
ldr @E[1],[sp,#$A[3][0]+4]
ror @C[3],@E[2],#32-27 @ C[1] = ROL64(A[1][3] ^ D[3], rhotates[1][3]);
ldr @E[2],[sp,#$D[1]] @ D[1]
ror @C[2],@E[3],#32-28
ldr @E[3],[sp,#$D[1]+4]
eor @C[6],@C[6],@C[4]
eor @C[7],@C[7],@C[5]
ror @C[5],@C[6],#32-19 @ C[2] = ROL64(A[2][4] ^ D[4], rhotates[2][4]);
ror @C[4],@C[7],#32-20
eor @E[0],@E[0],@C[8]
ldr @C[8],[sp,#$A[4][1]] @ A[4][1]
eor @E[1],@E[1],@C[9]
ldr @C[9],[sp,#$A[4][1]+4]
ror @C[7],@E[0],#32-20 @ C[3] = ROL64(A[3][0] ^ D[0], rhotates[3][0]);
ror @C[6],@E[1],#32-21
eor @C[8],@C[8],@E[2]
eor @C[9],@C[9],@E[3]
@ ror @C[8],@C[2],#32-1 @ C[4] = ROL64(A[4][1] ^ D[1], rhotates[4][1]);
@ ror @C[9],@C[3],#32-1
bic @E[0],@C[4],@C[2]
bic @E[1],@C[5],@C[3]
eor @E[0],@E[0],@C[0],ror#32-31
eor @E[1],@E[1],@C[1],ror#32-31
str @E[0],[sp,#$R[4][0]] @ R[4][0] = C[0] ^ (~C[1] & C[2])
bic @E[2],@C[6],@C[4]
str @E[1],[sp,#$R[4][0]+4]
bic @E[3],@C[7],@C[5]
eor @E[2],@E[2],@C[2]
eor @E[3],@E[3],@C[3]
str @E[2],[sp,#$R[4][1]] @ R[4][1] = C[1] ^ (~C[2] & C[3]);
bic @E[0],@C[8],@C[6],ror#1
str @E[3],[sp,#$R[4][1]+4]
bic @E[1],@C[9],@C[7],ror#1
bic @E[2],@C[0],@C[8],ror#31-1
bic @E[3],@C[1],@C[9],ror#31-1
eor @C[4],@C[4],@E[0],ror#32-1
eor @C[5],@C[5],@E[1],ror#32-1
str @C[4],[sp,#$R[4][2]] @ R[4][2] = C[2] ^= (~C[3] & C[4]);
eor @C[6],@C[6],@E[2],ror#32-31
str @C[5],[sp,#$R[4][2]+4]
eor @C[7],@C[7],@E[3],ror#32-31
str @C[6],[sp,#$R[4][3]] @ R[4][3] = C[3] ^= (~C[4] & C[0]);
bic @E[0],@C[2],@C[0],ror#32-31
str @C[7],[sp,#$R[4][3]+4]
bic @E[1],@C[3],@C[1],ror#32-31
add @E[2],sp,#$R[0][0]
eor @C[8],@E[0],@C[8],ror#32-1
add @E[0],sp,#$R[1][0]
eor @C[9],@E[1],@C[9],ror#32-1
str @C[8],[sp,#$R[4][4]] @ R[4][4] = C[4] ^= (~C[0] & C[1]);
str @C[9],[sp,#$R[4][4]+4]
___
}
Round(@A,@T);
Round(@T,@A);
$code.=<<___;
blo .Lround
ldr pc,[sp,#440]
.size KeccakF1600_int,.-KeccakF1600_int
.type KeccakF1600, %function
.align 5
KeccakF1600:
stmdb sp!,{r0,r4-r11,lr}
sub sp,sp,#440+16 @ space for A[5][5],D[5],T[5][5],...
add @E[0],r0,#$A[1][0]
add @E[1],sp,#$A[1][0]
ldmia r0, {@C[0]-@C[9]} @ copy A[5][5] to stack
stmia sp, {@C[0]-@C[9]}
ldmia @E[0]!,{@C[0]-@C[9]}
stmia @E[1]!,{@C[0]-@C[9]}
ldmia @E[0]!,{@C[0]-@C[9]}
stmia @E[1]!,{@C[0]-@C[9]}
ldmia @E[0]!,{@C[0]-@C[9]}
stmia @E[1]!,{@C[0]-@C[9]}
ldmia @E[0], {@C[0]-@C[9]}
add @E[2],sp,#$A[0][0]
add @E[0],sp,#$A[1][0]
stmia @E[1], {@C[0]-@C[9]}
bl KeccakF1600_enter
ldr @E[1], [sp,#440+16] @ restore pointer to A
ldmia sp, {@C[0]-@C[9]}
stmia @E[1]!,{@C[0]-@C[9]} @ return A[5][5]
ldmia @E[0]!,{@C[0]-@C[9]}
stmia @E[1]!,{@C[0]-@C[9]}
ldmia @E[0]!,{@C[0]-@C[9]}
stmia @E[1]!,{@C[0]-@C[9]}
ldmia @E[0]!,{@C[0]-@C[9]}
stmia @E[1]!,{@C[0]-@C[9]}
ldmia @E[0], {@C[0]-@C[9]}
stmia @E[1], {@C[0]-@C[9]}
add sp,sp,#440+20
ldmia sp!,{r4-r11,pc}
.size KeccakF1600,.-KeccakF1600
___
{ my ($A_flat,$inp,$len,$bsz) = map("r$_",(10..12,14));
########################################################################
# Stack layout
# ----->+-----------------------+
# | uint64_t A[5][5] |
# | ... |
# | ... |
# +456->+-----------------------+
# | 0x55555555 |
# +460->+-----------------------+
# | 0x33333333 |
# +464->+-----------------------+
# | 0x0f0f0f0f |
# +468->+-----------------------+
# | 0x00ff00ff |
# +472->+-----------------------+
# | uint64_t *A |
# +476->+-----------------------+
# | const void *inp |
# +480->+-----------------------+
# | size_t len |
# +484->+-----------------------+
# | size_t bs |
# +488->+-----------------------+
# | ....
$code.=<<___;
.global SHA3_absorb
.type SHA3_absorb,%function
.align 5
SHA3_absorb:
stmdb sp!,{r0-r12,lr}
sub sp,sp,#456+16
add $A_flat,r0,#$A[1][0]
@ mov $inp,r1
mov $len,r2
mov $bsz,r3
cmp r2,r3
blo .Labsorb_abort
add $inp,sp,#0
ldmia r0, {@C[0]-@C[9]} @ copy A[5][5] to stack
stmia $inp!, {@C[0]-@C[9]}
ldmia $A_flat!,{@C[0]-@C[9]}
stmia $inp!, {@C[0]-@C[9]}
ldmia $A_flat!,{@C[0]-@C[9]}
stmia $inp!, {@C[0]-@C[9]}
ldmia $A_flat!,{@C[0]-@C[9]}
stmia $inp!, {@C[0]-@C[9]}
ldmia $A_flat!,{@C[0]-@C[9]}
stmia $inp, {@C[0]-@C[9]}
ldr $inp,[sp,#476] @ restore $inp
#ifdef __thumb2__
mov r9,#0x00ff00ff
mov r8,#0x0f0f0f0f
mov r7,#0x33333333
mov r6,#0x55555555
#else
mov r6,#0x11 @ compose constants
mov r8,#0x0f
mov r9,#0xff
orr r6,r6,r6,lsl#8
orr r8,r8,r8,lsl#8
orr r6,r6,r6,lsl#16 @ 0x11111111
orr r9,r9,r9,lsl#16 @ 0x00ff00ff
orr r8,r8,r8,lsl#16 @ 0x0f0f0f0f
orr r7,r6,r6,lsl#1 @ 0x33333333
orr r6,r6,r6,lsl#2 @ 0x55555555
#endif
str r9,[sp,#468]
str r8,[sp,#464]
str r7,[sp,#460]
str r6,[sp,#456]
b .Loop_absorb
.align 4
.Loop_absorb:
subs r0,$len,$bsz
blo .Labsorbed
add $A_flat,sp,#0
str r0,[sp,#480] @ save len - bsz
.align 4
.Loop_block:
ldrb r0,[$inp],#1
ldrb r1,[$inp],#1
ldrb r2,[$inp],#1
ldrb r3,[$inp],#1
ldrb r4,[$inp],#1
orr r0,r0,r1,lsl#8
ldrb r1,[$inp],#1
orr r0,r0,r2,lsl#16
ldrb r2,[$inp],#1
orr r0,r0,r3,lsl#24 @ lo
ldrb r3,[$inp],#1
orr r1,r4,r1,lsl#8
orr r1,r1,r2,lsl#16
orr r1,r1,r3,lsl#24 @ hi
and r2,r0,r6 @ &=0x55555555
and r0,r0,r6,lsl#1 @ &=0xaaaaaaaa
and r3,r1,r6 @ &=0x55555555
and r1,r1,r6,lsl#1 @ &=0xaaaaaaaa
orr r2,r2,r2,lsr#1
orr r0,r0,r0,lsl#1
orr r3,r3,r3,lsr#1
orr r1,r1,r1,lsl#1
and r2,r2,r7 @ &=0x33333333
and r0,r0,r7,lsl#2 @ &=0xcccccccc
and r3,r3,r7 @ &=0x33333333
and r1,r1,r7,lsl#2 @ &=0xcccccccc
orr r2,r2,r2,lsr#2
orr r0,r0,r0,lsl#2
orr r3,r3,r3,lsr#2
orr r1,r1,r1,lsl#2
and r2,r2,r8 @ &=0x0f0f0f0f
and r0,r0,r8,lsl#4 @ &=0xf0f0f0f0
and r3,r3,r8 @ &=0x0f0f0f0f
and r1,r1,r8,lsl#4 @ &=0xf0f0f0f0
ldmia $A_flat,{r4-r5} @ A_flat[i]
orr r2,r2,r2,lsr#4
orr r0,r0,r0,lsl#4
orr r3,r3,r3,lsr#4
orr r1,r1,r1,lsl#4
and r2,r2,r9 @ &=0x00ff00ff
and r0,r0,r9,lsl#8 @ &=0xff00ff00
and r3,r3,r9 @ &=0x00ff00ff
and r1,r1,r9,lsl#8 @ &=0xff00ff00
orr r2,r2,r2,lsr#8
orr r0,r0,r0,lsl#8
orr r3,r3,r3,lsr#8
orr r1,r1,r1,lsl#8
lsl r2,r2,#16
lsr r1,r1,#16
eor r4,r4,r3,lsl#16
eor r5,r5,r0,lsr#16
eor r4,r4,r2,lsr#16
eor r5,r5,r1,lsl#16
stmia $A_flat!,{r4-r5} @ A_flat[i++] ^= BitInterleave(inp[0..7])
subs $bsz,$bsz,#8
bhi .Loop_block
str $inp,[sp,#476]
bl KeccakF1600_int
add r14,sp,#456
ldmia r14,{r6-r12,r14} @ restore constants and variables
b .Loop_absorb
.align 4
.Labsorbed:
add $inp,sp,#$A[1][0]
ldmia sp, {@C[0]-@C[9]}
stmia $A_flat!,{@C[0]-@C[9]} @ return A[5][5]
ldmia $inp!, {@C[0]-@C[9]}
stmia $A_flat!,{@C[0]-@C[9]}
ldmia $inp!, {@C[0]-@C[9]}
stmia $A_flat!,{@C[0]-@C[9]}
ldmia $inp!, {@C[0]-@C[9]}
stmia $A_flat!,{@C[0]-@C[9]}
ldmia $inp, {@C[0]-@C[9]}
stmia $A_flat, {@C[0]-@C[9]}
.Labsorb_abort:
add sp,sp,#456+32
mov r0,$len @ return value
ldmia sp!,{r4-r12,pc}
.size SHA3_absorb,.-SHA3_absorb
___
}
{ my ($out,$len,$A_flat,$bsz) = map("r$_", (4,5,10,12));
$code.=<<___;
.global SHA3_squeeze
.type SHA3_squeeze,%function
.align 5
SHA3_squeeze:
stmdb sp!,{r0,r3-r10,lr}
mov $A_flat,r0
mov $out,r1
mov $len,r2
mov $bsz,r3
#ifdef __thumb2__
mov r9,#0x00ff00ff
mov r8,#0x0f0f0f0f
mov r7,#0x33333333
mov r6,#0x55555555
#else
mov r6,#0x11 @ compose constants
mov r8,#0x0f
mov r9,#0xff
orr r6,r6,r6,lsl#8
orr r8,r8,r8,lsl#8
orr r6,r6,r6,lsl#16 @ 0x11111111
orr r9,r9,r9,lsl#16 @ 0x00ff00ff
orr r8,r8,r8,lsl#16 @ 0x0f0f0f0f
orr r7,r6,r6,lsl#1 @ 0x33333333
orr r6,r6,r6,lsl#2 @ 0x55555555
#endif
stmdb sp!,{r6-r9}
mov r14,$A_flat
b .Loop_squeeze
.align 4
.Loop_squeeze:
ldmia $A_flat!,{r0,r1} @ A_flat[i++]
lsl r2,r0,#16
lsl r3,r1,#16 @ r3 = r1 << 16
lsr r2,r2,#16 @ r2 = r0 & 0x0000ffff
lsr r1,r1,#16
lsr r0,r0,#16 @ r0 = r0 >> 16
lsl r1,r1,#16 @ r1 = r1 & 0xffff0000
orr r2,r2,r2,lsl#8
orr r3,r3,r3,lsr#8
orr r0,r0,r0,lsl#8
orr r1,r1,r1,lsr#8
and r2,r2,r9 @ &=0x00ff00ff
and r3,r3,r9,lsl#8 @ &=0xff00ff00
and r0,r0,r9 @ &=0x00ff00ff
and r1,r1,r9,lsl#8 @ &=0xff00ff00
orr r2,r2,r2,lsl#4
orr r3,r3,r3,lsr#4
orr r0,r0,r0,lsl#4
orr r1,r1,r1,lsr#4
and r2,r2,r8 @ &=0x0f0f0f0f
and r3,r3,r8,lsl#4 @ &=0xf0f0f0f0
and r0,r0,r8 @ &=0x0f0f0f0f
and r1,r1,r8,lsl#4 @ &=0xf0f0f0f0
orr r2,r2,r2,lsl#2
orr r3,r3,r3,lsr#2
orr r0,r0,r0,lsl#2
orr r1,r1,r1,lsr#2
and r2,r2,r7 @ &=0x33333333
and r3,r3,r7,lsl#2 @ &=0xcccccccc
and r0,r0,r7 @ &=0x33333333
and r1,r1,r7,lsl#2 @ &=0xcccccccc
orr r2,r2,r2,lsl#1
orr r3,r3,r3,lsr#1
orr r0,r0,r0,lsl#1
orr r1,r1,r1,lsr#1
and r2,r2,r6 @ &=0x55555555
and r3,r3,r6,lsl#1 @ &=0xaaaaaaaa
and r0,r0,r6 @ &=0x55555555
and r1,r1,r6,lsl#1 @ &=0xaaaaaaaa
orr r2,r2,r3
orr r0,r0,r1
cmp $len,#8
blo .Lsqueeze_tail
lsr r1,r2,#8
strb r2,[$out],#1
lsr r3,r2,#16
strb r1,[$out],#1
lsr r2,r2,#24
strb r3,[$out],#1
strb r2,[$out],#1
lsr r1,r0,#8
strb r0,[$out],#1
lsr r3,r0,#16
strb r1,[$out],#1
lsr r0,r0,#24
strb r3,[$out],#1
strb r0,[$out],#1
subs $len,$len,#8
beq .Lsqueeze_done
subs $bsz,$bsz,#8 @ bsz -= 8
bhi .Loop_squeeze
mov r0,r14 @ original $A_flat
bl KeccakF1600
ldmia sp,{r6-r10,r12} @ restore constants and variables
mov r14,$A_flat
b .Loop_squeeze
.align 4
.Lsqueeze_tail:
strb r2,[$out],#1
lsr r2,r2,#8
subs $len,$len,#1
beq .Lsqueeze_done
strb r2,[$out],#1
lsr r2,r2,#8
subs $len,$len,#1
beq .Lsqueeze_done
strb r2,[$out],#1
lsr r2,r2,#8
subs $len,$len,#1
beq .Lsqueeze_done
strb r2,[$out],#1
subs $len,$len,#1
beq .Lsqueeze_done
strb r0,[$out],#1
lsr r0,r0,#8
subs $len,$len,#1
beq .Lsqueeze_done
strb r0,[$out],#1
lsr r0,r0,#8
subs $len,$len,#1
beq .Lsqueeze_done
strb r0,[$out]
b .Lsqueeze_done
.align 4
.Lsqueeze_done:
add sp,sp,#24
ldmia sp!,{r4-r10,pc}
.size SHA3_squeeze,.-SHA3_squeeze
___
}
$code.=<<___;
.fpu neon
.type iotas64, %object
.align 5
iotas64:
.quad 0x0000000000000001
.quad 0x0000000000008082
.quad 0x800000000000808a
.quad 0x8000000080008000
.quad 0x000000000000808b
.quad 0x0000000080000001
.quad 0x8000000080008081
.quad 0x8000000000008009
.quad 0x000000000000008a
.quad 0x0000000000000088
.quad 0x0000000080008009
.quad 0x000000008000000a
.quad 0x000000008000808b
.quad 0x800000000000008b
.quad 0x8000000000008089
.quad 0x8000000000008003
.quad 0x8000000000008002
.quad 0x8000000000000080
.quad 0x000000000000800a
.quad 0x800000008000000a
.quad 0x8000000080008081
.quad 0x8000000000008080
.quad 0x0000000080000001
.quad 0x8000000080008008
.size iotas64,.-iotas64
.type KeccakF1600_neon, %function
.align 5
KeccakF1600_neon:
add r1, r0, #16
adr r2, iotas64
mov r3, #24 @ loop counter
b .Loop_neon
.align 4
.Loop_neon:
@ Theta
vst1.64 {q4}, [r0:64] @ offload A[0..1][4]
veor q13, q0, q5 @ A[0..1][0]^A[2..3][0]
vst1.64 {d18}, [r1:64] @ offload A[2][4]
veor q14, q1, q6 @ A[0..1][1]^A[2..3][1]
veor q15, q2, q7 @ A[0..1][2]^A[2..3][2]
veor d26, d26, d27 @ C[0]=A[0][0]^A[1][0]^A[2][0]^A[3][0]
veor d27, d28, d29 @ C[1]=A[0][1]^A[1][1]^A[2][1]^A[3][1]
veor q14, q3, q8 @ A[0..1][3]^A[2..3][3]
veor q4, q4, q9 @ A[0..1][4]^A[2..3][4]
veor d30, d30, d31 @ C[2]=A[0][2]^A[1][2]^A[2][2]^A[3][2]
veor d31, d28, d29 @ C[3]=A[0][3]^A[1][3]^A[2][3]^A[3][3]
veor d25, d8, d9 @ C[4]=A[0][4]^A[1][4]^A[2][4]^A[3][4]
veor q13, q13, q10 @ C[0..1]^=A[4][0..1]
veor q14, q15, q11 @ C[2..3]^=A[4][2..3]
veor d25, d25, d24 @ C[4]^=A[4][4]
vadd.u64 q4, q13, q13 @ C[0..1]<<1
vadd.u64 q15, q14, q14 @ C[2..3]<<1
vadd.u64 d18, d25, d25 @ C[4]<<1
vsri.u64 q4, q13, #63 @ ROL64(C[0..1],1)
vsri.u64 q15, q14, #63 @ ROL64(C[2..3],1)
vsri.u64 d18, d25, #63 @ ROL64(C[4],1)
veor d25, d25, d9 @ D[0] = C[4] ^= ROL64(C[1],1)
veor q13, q13, q15 @ D[1..2] = C[0..1] ^ ROL64(C[2..3],1)
veor d28, d28, d18 @ D[3] = C[2] ^= ROL64(C[4],1)
veor d29, d29, d8 @ D[4] = C[3] ^= ROL64(C[0],1)
veor d0, d0, d25 @ A[0][0] ^= C[4]
veor d1, d1, d25 @ A[1][0] ^= C[4]
veor d10, d10, d25 @ A[2][0] ^= C[4]
veor d11, d11, d25 @ A[3][0] ^= C[4]
veor d20, d20, d25 @ A[4][0] ^= C[4]
veor d2, d2, d26 @ A[0][1] ^= D[1]
veor d3, d3, d26 @ A[1][1] ^= D[1]
veor d12, d12, d26 @ A[2][1] ^= D[1]
veor d13, d13, d26 @ A[3][1] ^= D[1]
veor d21, d21, d26 @ A[4][1] ^= D[1]
vmov d26, d27
veor d6, d6, d28 @ A[0][3] ^= C[2]
veor d7, d7, d28 @ A[1][3] ^= C[2]
veor d16, d16, d28 @ A[2][3] ^= C[2]
veor d17, d17, d28 @ A[3][3] ^= C[2]
veor d23, d23, d28 @ A[4][3] ^= C[2]
vld1.64 {q4}, [r0:64] @ restore A[0..1][4]
vmov d28, d29
vld1.64 {d18}, [r1:64] @ restore A[2][4]
veor q2, q2, q13 @ A[0..1][2] ^= D[2]
veor q7, q7, q13 @ A[2..3][2] ^= D[2]
veor d22, d22, d27 @ A[4][2] ^= D[2]
veor q4, q4, q14 @ A[0..1][4] ^= C[3]
veor q9, q9, q14 @ A[2..3][4] ^= C[3]
veor d24, d24, d29 @ A[4][4] ^= C[3]
@ Rho + Pi
vmov d26, d2 @ C[1] = A[0][1]
vshl.u64 d2, d3, #44
vmov d27, d4 @ C[2] = A[0][2]
vshl.u64 d4, d14, #43
vmov d28, d6 @ C[3] = A[0][3]
vshl.u64 d6, d17, #21
vmov d29, d8 @ C[4] = A[0][4]
vshl.u64 d8, d24, #14
vsri.u64 d2, d3, #64-44 @ A[0][1] = ROL64(A[1][1], rhotates[1][1])
vsri.u64 d4, d14, #64-43 @ A[0][2] = ROL64(A[2][2], rhotates[2][2])
vsri.u64 d6, d17, #64-21 @ A[0][3] = ROL64(A[3][3], rhotates[3][3])
vsri.u64 d8, d24, #64-14 @ A[0][4] = ROL64(A[4][4], rhotates[4][4])
vshl.u64 d3, d9, #20
vshl.u64 d14, d16, #25
vshl.u64 d17, d15, #15
vshl.u64 d24, d21, #2
vsri.u64 d3, d9, #64-20 @ A[1][1] = ROL64(A[1][4], rhotates[1][4])
vsri.u64 d14, d16, #64-25 @ A[2][2] = ROL64(A[2][3], rhotates[2][3])
vsri.u64 d17, d15, #64-15 @ A[3][3] = ROL64(A[3][2], rhotates[3][2])
vsri.u64 d24, d21, #64-2 @ A[4][4] = ROL64(A[4][1], rhotates[4][1])
vshl.u64 d9, d22, #61
@ vshl.u64 d16, d19, #8
vshl.u64 d15, d12, #10
vshl.u64 d21, d7, #55
vsri.u64 d9, d22, #64-61 @ A[1][4] = ROL64(A[4][2], rhotates[4][2])
vext.8 d16, d19, d19, #8-1 @ A[2][3] = ROL64(A[3][4], rhotates[3][4])
vsri.u64 d15, d12, #64-10 @ A[3][2] = ROL64(A[2][1], rhotates[2][1])
vsri.u64 d21, d7, #64-55 @ A[4][1] = ROL64(A[1][3], rhotates[1][3])
vshl.u64 d22, d18, #39
@ vshl.u64 d19, d23, #56
vshl.u64 d12, d5, #6
vshl.u64 d7, d13, #45
vsri.u64 d22, d18, #64-39 @ A[4][2] = ROL64(A[2][4], rhotates[2][4])
vext.8 d19, d23, d23, #8-7 @ A[3][4] = ROL64(A[4][3], rhotates[4][3])
vsri.u64 d12, d5, #64-6 @ A[2][1] = ROL64(A[1][2], rhotates[1][2])
vsri.u64 d7, d13, #64-45 @ A[1][3] = ROL64(A[3][1], rhotates[3][1])
vshl.u64 d18, d20, #18
vshl.u64 d23, d11, #41
vshl.u64 d5, d10, #3
vshl.u64 d13, d1, #36
vsri.u64 d18, d20, #64-18 @ A[2][4] = ROL64(A[4][0], rhotates[4][0])
vsri.u64 d23, d11, #64-41 @ A[4][3] = ROL64(A[3][0], rhotates[3][0])
vsri.u64 d5, d10, #64-3 @ A[1][2] = ROL64(A[2][0], rhotates[2][0])
vsri.u64 d13, d1, #64-36 @ A[3][1] = ROL64(A[1][0], rhotates[1][0])
vshl.u64 d1, d28, #28
vshl.u64 d10, d26, #1
vshl.u64 d11, d29, #27
vshl.u64 d20, d27, #62
vsri.u64 d1, d28, #64-28 @ A[1][0] = ROL64(C[3], rhotates[0][3])
vsri.u64 d10, d26, #64-1 @ A[2][0] = ROL64(C[1], rhotates[0][1])
vsri.u64 d11, d29, #64-27 @ A[3][0] = ROL64(C[4], rhotates[0][4])
vsri.u64 d20, d27, #64-62 @ A[4][0] = ROL64(C[2], rhotates[0][2])
@ Chi + Iota
vbic q13, q2, q1
vbic q14, q3, q2
vbic q15, q4, q3
veor q13, q13, q0 @ A[0..1][0] ^ (~A[0..1][1] & A[0..1][2])
veor q14, q14, q1 @ A[0..1][1] ^ (~A[0..1][2] & A[0..1][3])
veor q2, q2, q15 @ A[0..1][2] ^= (~A[0..1][3] & A[0..1][4])
vst1.64 {q13}, [r0:64] @ offload A[0..1][0]
vbic q13, q0, q4
vbic q15, q1, q0
vmov q1, q14 @ A[0..1][1]
veor q3, q3, q13 @ A[0..1][3] ^= (~A[0..1][4] & A[0..1][0])
veor q4, q4, q15 @ A[0..1][4] ^= (~A[0..1][0] & A[0..1][1])
vbic q13, q7, q6
vmov q0, q5 @ A[2..3][0]
vbic q14, q8, q7
vmov q15, q6 @ A[2..3][1]
veor q5, q5, q13 @ A[2..3][0] ^= (~A[2..3][1] & A[2..3][2])
vbic q13, q9, q8
veor q6, q6, q14 @ A[2..3][1] ^= (~A[2..3][2] & A[2..3][3])
vbic q14, q0, q9
veor q7, q7, q13 @ A[2..3][2] ^= (~A[2..3][3] & A[2..3][4])
vbic q13, q15, q0
veor q8, q8, q14 @ A[2..3][3] ^= (~A[2..3][4] & A[2..3][0])
vmov q14, q10 @ A[4][0..1]
veor q9, q9, q13 @ A[2..3][4] ^= (~A[2..3][0] & A[2..3][1])
vld1.64 d25, [r2:64]! @ Iota[i++]
vbic d26, d22, d21
vbic d27, d23, d22
vld1.64 {q0}, [r0:64] @ restore A[0..1][0]
veor d20, d20, d26 @ A[4][0] ^= (~A[4][1] & A[4][2])
vbic d26, d24, d23
veor d21, d21, d27 @ A[4][1] ^= (~A[4][2] & A[4][3])
vbic d27, d28, d24
veor d22, d22, d26 @ A[4][2] ^= (~A[4][3] & A[4][4])
vbic d26, d29, d28
veor d23, d23, d27 @ A[4][3] ^= (~A[4][4] & A[4][0])
veor d0, d0, d25 @ A[0][0] ^= Iota[i]
veor d24, d24, d26 @ A[4][4] ^= (~A[4][0] & A[4][1])
subs r3, r3, #1
bne .Loop_neon
bx lr
.size KeccakF1600_neon,.-KeccakF1600_neon
.global SHA3_absorb_neon
.type SHA3_absorb_neon, %function
.align 5
SHA3_absorb_neon:
stmdb sp!, {r4-r6,lr}
vstmdb sp!, {d8-d15}
mov r4, r1 @ inp
mov r5, r2 @ len
mov r6, r3 @ bsz
vld1.32 {d0}, [r0:64]! @ A[0][0]
vld1.32 {d2}, [r0:64]! @ A[0][1]
vld1.32 {d4}, [r0:64]! @ A[0][2]
vld1.32 {d6}, [r0:64]! @ A[0][3]
vld1.32 {d8}, [r0:64]! @ A[0][4]
vld1.32 {d1}, [r0:64]! @ A[1][0]
vld1.32 {d3}, [r0:64]! @ A[1][1]
vld1.32 {d5}, [r0:64]! @ A[1][2]
vld1.32 {d7}, [r0:64]! @ A[1][3]
vld1.32 {d9}, [r0:64]! @ A[1][4]
vld1.32 {d10}, [r0:64]! @ A[2][0]
vld1.32 {d12}, [r0:64]! @ A[2][1]
vld1.32 {d14}, [r0:64]! @ A[2][2]
vld1.32 {d16}, [r0:64]! @ A[2][3]
vld1.32 {d18}, [r0:64]! @ A[2][4]
vld1.32 {d11}, [r0:64]! @ A[3][0]
vld1.32 {d13}, [r0:64]! @ A[3][1]
vld1.32 {d15}, [r0:64]! @ A[3][2]
vld1.32 {d17}, [r0:64]! @ A[3][3]
vld1.32 {d19}, [r0:64]! @ A[3][4]
vld1.32 {d20-d23}, [r0:64]! @ A[4][0..3]
vld1.32 {d24}, [r0:64] @ A[4][4]
sub r0, r0, #24*8 @ rewind
b .Loop_absorb_neon
.align 4
.Loop_absorb_neon:
subs r12, r5, r6 @ len - bsz
blo .Labsorbed_neon
mov r5, r12
vld1.8 {d31}, [r4]! @ endian-neutral loads...
cmp r6, #8*2
veor d0, d0, d31 @ A[0][0] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d2, d2, d31 @ A[0][1] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*4
veor d4, d4, d31 @ A[0][2] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d6, d6, d31 @ A[0][3] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31},[r4]!
cmp r6, #8*6
veor d8, d8, d31 @ A[0][4] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d1, d1, d31 @ A[1][0] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*8
veor d3, d3, d31 @ A[1][1] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d5, d5, d31 @ A[1][2] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*10
veor d7, d7, d31 @ A[1][3] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d9, d9, d31 @ A[1][4] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*12
veor d10, d10, d31 @ A[2][0] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d12, d12, d31 @ A[2][1] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*14
veor d14, d14, d31 @ A[2][2] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d16, d16, d31 @ A[2][3] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*16
veor d18, d18, d31 @ A[2][4] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d11, d11, d31 @ A[3][0] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*18
veor d13, d13, d31 @ A[3][1] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d15, d15, d31 @ A[3][2] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*20
veor d17, d17, d31 @ A[3][3] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d19, d19, d31 @ A[3][4] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*22
veor d20, d20, d31 @ A[4][0] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d21, d21, d31 @ A[4][1] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
cmp r6, #8*24
veor d22, d22, d31 @ A[4][2] ^= *inp++
blo .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d23, d23, d31 @ A[4][3] ^= *inp++
beq .Lprocess_neon
vld1.8 {d31}, [r4]!
veor d24, d24, d31 @ A[4][4] ^= *inp++
.Lprocess_neon:
bl KeccakF1600_neon
b .Loop_absorb_neon
.align 4
.Labsorbed_neon:
vst1.32 {d0}, [r0:64]! @ A[0][0..4]
vst1.32 {d2}, [r0:64]!
vst1.32 {d4}, [r0:64]!
vst1.32 {d6}, [r0:64]!
vst1.32 {d8}, [r0:64]!
vst1.32 {d1}, [r0:64]! @ A[1][0..4]
vst1.32 {d3}, [r0:64]!
vst1.32 {d5}, [r0:64]!
vst1.32 {d7}, [r0:64]!
vst1.32 {d9}, [r0:64]!
vst1.32 {d10}, [r0:64]! @ A[2][0..4]
vst1.32 {d12}, [r0:64]!
vst1.32 {d14}, [r0:64]!
vst1.32 {d16}, [r0:64]!
vst1.32 {d18}, [r0:64]!
vst1.32 {d11}, [r0:64]! @ A[3][0..4]
vst1.32 {d13}, [r0:64]!
vst1.32 {d15}, [r0:64]!
vst1.32 {d17}, [r0:64]!
vst1.32 {d19}, [r0:64]!
vst1.32 {d20-d23}, [r0:64]! @ A[4][0..4]
vst1.32 {d24}, [r0:64]
mov r0, r5 @ return value
vldmia sp!, {d8-d15}
ldmia sp!, {r4-r6,pc}
.size SHA3_absorb_neon,.-SHA3_absorb_neon
.global SHA3_squeeze_neon
.type SHA3_squeeze_neon, %function
.align 5
SHA3_squeeze_neon:
stmdb sp!, {r4-r6,lr}
mov r4, r1 @ out
mov r5, r2 @ len
mov r6, r3 @ bsz
mov r12, r0 @ A_flat
mov r14, r3 @ bsz
b .Loop_squeeze_neon
.align 4
.Loop_squeeze_neon:
cmp r5, #8
blo .Lsqueeze_neon_tail
vld1.32 {d0}, [r12]!
vst1.8 {d0}, [r4]! @ endian-neutral store
subs r5, r5, #8 @ len -= 8
beq .Lsqueeze_neon_done
subs r14, r14, #8 @ bsz -= 8
bhi .Loop_squeeze_neon
vstmdb sp!, {d8-d15}
vld1.32 {d0}, [r0:64]! @ A[0][0..4]
vld1.32 {d2}, [r0:64]!
vld1.32 {d4}, [r0:64]!
vld1.32 {d6}, [r0:64]!
vld1.32 {d8}, [r0:64]!
vld1.32 {d1}, [r0:64]! @ A[1][0..4]
vld1.32 {d3}, [r0:64]!
vld1.32 {d5}, [r0:64]!
vld1.32 {d7}, [r0:64]!
vld1.32 {d9}, [r0:64]!
vld1.32 {d10}, [r0:64]! @ A[2][0..4]
vld1.32 {d12}, [r0:64]!
vld1.32 {d14}, [r0:64]!
vld1.32 {d16}, [r0:64]!
vld1.32 {d18}, [r0:64]!
vld1.32 {d11}, [r0:64]! @ A[3][0..4]
vld1.32 {d13}, [r0:64]!
vld1.32 {d15}, [r0:64]!
vld1.32 {d17}, [r0:64]!
vld1.32 {d19}, [r0:64]!
vld1.32 {d20-d23}, [r0:64]! @ A[4][0..4]
vld1.32 {d24}, [r0:64]
sub r0, r0, #24*8 @ rewind
bl KeccakF1600_neon
mov r12, r0 @ A_flat
vst1.32 {d0}, [r0:64]! @ A[0][0..4]
vst1.32 {d2}, [r0:64]!
vst1.32 {d4}, [r0:64]!
vst1.32 {d6}, [r0:64]!
vst1.32 {d8}, [r0:64]!
vst1.32 {d1}, [r0:64]! @ A[1][0..4]
vst1.32 {d3}, [r0:64]!
vst1.32 {d5}, [r0:64]!
vst1.32 {d7}, [r0:64]!
vst1.32 {d9}, [r0:64]!
vst1.32 {d10}, [r0:64]! @ A[2][0..4]
vst1.32 {d12}, [r0:64]!
vst1.32 {d14}, [r0:64]!
vst1.32 {d16}, [r0:64]!
vst1.32 {d18}, [r0:64]!
vst1.32 {d11}, [r0:64]! @ A[3][0..4]
vst1.32 {d13}, [r0:64]!
vst1.32 {d15}, [r0:64]!
vst1.32 {d17}, [r0:64]!
vst1.32 {d19}, [r0:64]!
vst1.32 {d20-d23}, [r0:64]! @ A[4][0..4]
mov r14, r6 @ bsz
vst1.32 {d24}, [r0:64]
mov r0, r12 @ rewind
vldmia sp!, {d8-d15}
b .Loop_squeeze_neon
.align 4
.Lsqueeze_neon_tail:
ldmia r12, {r2,r3}
cmp r5, #2
strb r2, [r4],#1 @ endian-neutral store
lsr r2, r2, #8
blo .Lsqueeze_neon_done
strb r2, [r4], #1
lsr r2, r2, #8
beq .Lsqueeze_neon_done
strb r2, [r4], #1
lsr r2, r2, #8
cmp r5, #4
blo .Lsqueeze_neon_done
strb r2, [r4], #1
beq .Lsqueeze_neon_done
strb r3, [r4], #1
lsr r3, r3, #8
cmp r5, #6
blo .Lsqueeze_neon_done
strb r3, [r4], #1
lsr r3, r3, #8
beq .Lsqueeze_neon_done
strb r3, [r4], #1
.Lsqueeze_neon_done:
ldmia sp!, {r4-r6,pc}
.size SHA3_squeeze_neon,.-SHA3_squeeze_neon
.asciz "Keccak-1600 absorb and squeeze for ARMv4/NEON, CRYPTOGAMS by <appro\@openssl.org>"
.align 2
___
print $code;
close STDOUT; # enforce flush
| 27.658831 | 90 | 0.502112 |
ed68148b6e7cc66888117d576f6553cb2f03169d | 1,639 | pl | Perl | project-euler/233/233-v1-brute.pl | zoffixznet/project-euler | 39921379385ae2521354c7266a541c46785e85a2 | [
"MIT"
]
| null | null | null | project-euler/233/233-v1-brute.pl | zoffixznet/project-euler | 39921379385ae2521354c7266a541c46785e85a2 | [
"MIT"
]
| null | null | null | project-euler/233/233-v1-brute.pl | zoffixznet/project-euler | 39921379385ae2521354c7266a541c46785e85a2 | [
"MIT"
]
| null | null | null | #! /usr/bin/env perl
#
# Short description for 233-v1-brute.pl
#
# Author shlomif <shlomif@cpan.org>
# Version 0.1
# Copyright (C) 2018 shlomif <shlomif@cpan.org>
# Modified On 2018-04-12 20:24
# Created 2018-04-12 20:24
#
use strict;
use warnings;
my $n = shift;
my $l = int sqrt( $n / 2 );
my $r = 0;
foreach my $i ( 0 .. $l )
{
my $sub = $n - $i * $i;
my $j = int sqrt($sub);
++$r if $j * $j == $sub;
}
print "$r\n";
=head1 COPYRIGHT & LICENSE
Copyright 2018 by Shlomi Fish
This program is distributed under the MIT / Expat License:
L<http://www.opensource.org/licenses/mit-license.php>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
=cut
| 30.351852 | 65 | 0.739475 |
ed5611b7a44647ea8ff86ab088cc87aeff12b7f5 | 1,601 | pm | Perl | auto-lib/Paws/CloudDirectory/Facet.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
]
| null | null | null | auto-lib/Paws/CloudDirectory/Facet.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
]
| null | null | null | auto-lib/Paws/CloudDirectory/Facet.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
]
| null | null | null | package Paws::CloudDirectory::Facet;
use Moose;
has Name => (is => 'ro', isa => 'Str');
has ObjectType => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::CloudDirectory::Facet
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::CloudDirectory::Facet object:
$service_obj->Method(Att1 => { Name => $value, ..., ObjectType => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::CloudDirectory::Facet object:
$result = $service_obj->Method(...);
$result->Att1->Name
=head1 DESCRIPTION
A structure that contains C<Name>, C<ARN>, C<Attributes>, C< Rules>,
and C<ObjectTypes>. See Facets
(http://docs.aws.amazon.com/directoryservice/latest/admin-guide/whatarefacets.html)
for more information.
=head1 ATTRIBUTES
=head2 Name => Str
The name of the Facet.
=head2 ObjectType => Str
The object type that is associated with the facet. See
CreateFacetRequest$ObjectType for more details.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::CloudDirectory>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 23.895522 | 102 | 0.726421 |
ed41d3b6486dee5b83b1db6923e90ce395d4f730 | 376 | pl | Perl | perl/client1.pl | Mobink980/Perl-Programming | 0c4a09a747f9be3a7c55aa754c88ecf5f05c7809 | [
"MIT"
]
| 1 | 2019-12-05T21:12:30.000Z | 2019-12-05T21:12:30.000Z | perl/client1.pl | Mobink980/Perl-Programming | 0c4a09a747f9be3a7c55aa754c88ecf5f05c7809 | [
"MIT"
]
| null | null | null | perl/client1.pl | Mobink980/Perl-Programming | 0c4a09a747f9be3a7c55aa754c88ecf5f05c7809 | [
"MIT"
]
| null | null | null | use strict;
use warnings;
use IO::Socket;
my $socket = new IO::Socket::INET (
PeerAddr => 'localhost',
PeerPort => '1234',
Proto => 'tcp',
);
die "Could not create socket: $!n" unless $socket;
print "Enter the data:\n";
my $data = <STDIN>;
chomp $data;
print $socket "This is the data entered by user '$data'\n";
close($socket); | 26.857143 | 62 | 0.579787 |
ed798f5434953a5a251350de736eecc7e13c9782 | 2,851 | t | Perl | t/controller/search.t | haarg/metacpan-web | 582d63ed8f832dc5853de705137345675084f8c5 | [
"Artistic-1.0"
]
| null | null | null | t/controller/search.t | haarg/metacpan-web | 582d63ed8f832dc5853de705137345675084f8c5 | [
"Artistic-1.0"
]
| null | null | null | t/controller/search.t | haarg/metacpan-web | 582d63ed8f832dc5853de705137345675084f8c5 | [
"Artistic-1.0"
]
| null | null | null | use strict;
use warnings;
use utf8;
use Test::More;
use MetaCPAN::Web::Test;
use Encode qw(encode is_utf8);
test_psgi app, sub {
my $cb = shift;
ok( my $res = $cb->( GET "/search" ),
'GET /search' );
is( $res->code, 302, 'code 302' );
# Empty search query results in redirect.
ok( $res = $cb->( GET "/search?q=" ), 'GET /search?q=' );
is( $res->code, 302, 'code 302' );
# Empty search query for lucky searches also redirects.
ok( $res = $cb->( GET "/search?q=&lucky=1" ), 'GET /search?q=&lucky=1' );
is( $res->code, 302, 'code 302' );
ok( $res = $cb->( GET "/search?q=moose\">" ), 'GET /search?q=moose">' );
is( $res->code, 200, 'code 200' );
ok( $res->content =~ /0\s+results/, '0 results for an invalid search term' );
ok( $res = $cb->( GET "/search?q=moose" ), 'GET /search?q=moose' );
is( $res->code, 200, 'code 200' );
my $tx = tx($res);
$tx->like( '/html/head/title', qr/moose/, 'title includes search term' );
my $release = $tx->find_value(
'//div[@class="search-results"]//div[1]/big[1]/strong/a/@href');
ok( $release, "found release $release" );
# Moose has ratings (other things on this search page likely do as well)
$tx->like(
'//div[@class="search-results"]//a[starts-with(@class, "rating-")]/following-sibling::a',
qr/\d+ reviews?/i,
'current rating and number of reviews listed'
);
ok( $res = $cb->( GET $release), "GET $release" );
is( $res->code, 200, 'code 200' );
# test search operators
my $author = 'rjbs';
$res = $cb->( GET "/search?q=author%3Arjbs+app" );
is( $res->code, 200, 'search restricted by author OK' )
or diag explain $res;
$tx = tx($res);
$tx->ok('//div[@class="search-results"]//div[@class="module-result"]/a[@class="author"]', sub {
my $node = shift;
$node->is('.', uc($author), 'dist owned by queried author')
or diag explain $node;
}, 'all dists owned by queried author');
# as of 2013-01-20 there was only one page of results
search_and_find_module($cb,
"ねんねこ", # no idea what this means - rwstauner 2013-01-20
'Lingua::JA::WordNet',
'search for UTF-8 characters',
);
};
done_testing;
sub req_200_ok {
my ($cb, $req, $desc) = @_;
ok( my $res = $cb->($req), $desc );
is $res->code, 200, "200 OK";
return $res;
}
sub search_and_find_module {
my ($cb, $query, $exp_mod, $desc) = @_;
$query = encode("UTF-8" => $query) if is_utf8($query);
my $res = req_200_ok( $cb, GET("/search?q=$query"), $desc);
my $tx = tx($res);
# make sure there is a link tag whose content is the module name
$tx->ok(
qq!grep(//div[\@class="search-results"]//div[\@class="module-result"]//a[1], "^\Q$exp_mod\E\$")!,
"$desc: found expected module",
);
}
| 33.541176 | 105 | 0.566468 |
ed8829c380efb686b601d0dc75b9e5b513bac23c | 3,769 | t | Perl | t/resource-iterator.t | Mattlk13/ack2 | c811a57bc124a8d49053bc98de3ca39fe4e3b907 | [
"Artistic-2.0"
]
| 719 | 2015-01-03T06:11:27.000Z | 2017-07-29T14:04:07.000Z | t/resource-iterator.t | sahwar/ack2 | 3f13238c80f2052db07dc0ac7b41a8a9fded1242 | [
"Artistic-2.0"
]
| 254 | 2015-01-06T19:10:35.000Z | 2017-07-26T00:51:56.000Z | t/resource-iterator.t | sahwar/ack2 | 3f13238c80f2052db07dc0ac7b41a8a9fded1242 | [
"Artistic-2.0"
]
| 85 | 2015-01-06T17:22:32.000Z | 2017-07-21T05:34:35.000Z | #!perl -T
use warnings;
use strict;
use Test::More tests => 1;
use File::Next 0.22;
use lib 't';
use Util;
prep_environment();
sub slurp {
my $iter = shift;
my @files;
while ( defined ( my $file = $iter->() ) ) {
push( @files, $file );
}
return @files;
}
UNFILTERED: {
my $iter =
File::Next::files( {
file_filter => undef,
descend_filter => undef,
}, 't/swamp' );
my @files = slurp( $iter );
sets_match( \@files, [qw(
t/swamp/0
t/swamp/blib/ignore.pir
t/swamp/blib/ignore.pm
t/swamp/blib/ignore.pod
t/swamp/c-header.h
t/swamp/c-source.c
t/swamp/constitution-100k.pl
t/swamp/crystallography-weenies.f
t/swamp/example.R
t/swamp/file.bar
t/swamp/file.foo
t/swamp/fresh.css
t/swamp/fresh.css.min
t/swamp/fresh.min.css
t/swamp/groceries/another_subdir/CVS/fruit
t/swamp/groceries/another_subdir/CVS/junk
t/swamp/groceries/another_subdir/CVS/meat
t/swamp/groceries/another_subdir/fruit
t/swamp/groceries/another_subdir/junk
t/swamp/groceries/another_subdir/meat
t/swamp/groceries/another_subdir/RCS/fruit
t/swamp/groceries/another_subdir/RCS/junk
t/swamp/groceries/another_subdir/RCS/meat
t/swamp/groceries/dir.d/CVS/fruit
t/swamp/groceries/dir.d/CVS/junk
t/swamp/groceries/dir.d/CVS/meat
t/swamp/groceries/dir.d/fruit
t/swamp/groceries/dir.d/junk
t/swamp/groceries/dir.d/meat
t/swamp/groceries/dir.d/RCS/fruit
t/swamp/groceries/dir.d/RCS/junk
t/swamp/groceries/dir.d/RCS/meat
t/swamp/groceries/CVS/fruit
t/swamp/groceries/CVS/junk
t/swamp/groceries/CVS/meat
t/swamp/groceries/fruit
t/swamp/groceries/junk
t/swamp/groceries/meat
t/swamp/groceries/RCS/fruit
t/swamp/groceries/RCS/junk
t/swamp/groceries/RCS/meat
t/swamp/groceries/subdir/fruit
t/swamp/groceries/subdir/junk
t/swamp/groceries/subdir/meat
t/swamp/html.htm
t/swamp/html.html
t/swamp/incomplete-last-line.txt
t/swamp/javascript.js
t/swamp/lua-shebang-test
t/swamp/Makefile
t/swamp/Makefile.PL
t/swamp/MasterPage.master
t/swamp/minified.js.min
t/swamp/minified.min.js
t/swamp/moose-andy.jpg
t/swamp/notaMakefile
t/swamp/notaRakefile
t/swamp/notes.md
t/swamp/options-crlf.pl
t/swamp/options.pl
t/swamp/options.pl.bak
t/swamp/parrot.pir
t/swamp/perl-test.t
t/swamp/perl-without-extension
t/swamp/perl.cgi
t/swamp/perl.pl
t/swamp/perl.handler.pod
t/swamp/perl.pm
t/swamp/perl.pod
t/swamp/perl.tar.gz
t/swamp/perltoot.jpg
t/swamp/pipe-stress-freaks.F
t/swamp/Rakefile
t/swamp/Sample.ascx
t/swamp/Sample.asmx
t/swamp/sample.asp
t/swamp/sample.aspx
t/swamp/sample.rake
t/swamp/service.svc
t/swamp/solution8.tar
t/swamp/stuff.cmake
t/swamp/CMakeLists.txt
t/swamp/swamp/ignoreme.txt
),
't/swamp/#emacs-workfile.pl#',
't/swamp/not-an-#emacs-workfile#',
], 'UNFILTERED' );
}
done_testing();
| 30.152 | 54 | 0.541258 |
ed8dae3e0d3b0bac624113732942906400e14862 | 2,217 | t | Perl | xt/10_cosine_compare.t | wollmers/Set-Similarity | c5a0827046d91913a3803777a7d5d162038431a6 | [
"Artistic-1.0"
]
| 3 | 2016-02-24T21:29:12.000Z | 2020-11-15T20:28:41.000Z | xt/10_cosine_compare.t | wollmers/Set-Similarity | c5a0827046d91913a3803777a7d5d162038431a6 | [
"Artistic-1.0"
]
| null | null | null | xt/10_cosine_compare.t | wollmers/Set-Similarity | c5a0827046d91913a3803777a7d5d162038431a6 | [
"Artistic-1.0"
]
| null | null | null | #!perl
use strict;
use warnings;
use lib qw(../lib/ ./lib/);
use Test::More;
use Data::Dumper;
my $c = 'Set::Similarity::Cosine';
use_ok($c);
my $pp = new_ok($c);
my $cpdl = 'Set::Similarity::CosinePDL';
use_ok($cpdl);
my $pdl = new_ok($cpdl);
#my $object = $class;
sub d3 { sprintf('%.3f',shift) }
is($pp->similarity('ab','ab'),1,'equal ab strings');
is($pp->similarity('ab','cd'),0,'ab unequal cd strings');
is($pp->similarity('abaa','bccc'),0.5,'abaa 0.5 bccc strings');
is($pp->similarity('abab','bccc'),0.5,'abab 0.5 bccc strings');
is(d3($pp->similarity('ab','abcd')),0.707,'ab 0.707 abcd strings');
is($pp->similarity('ab','ab',2),1,'equal ab bigrams');
is($pp->similarity('ab','cd',2),0,'ab unequal cd bigrams');
is($pp->similarity('abaa','bccc',2),0,'abaa 0 bccc bigrams');
is($pp->similarity('abcabcf','bcccah',2),0.5,'abcabcf 0.5 bcccah bigrams');
is(d3($pp->similarity('abc','abcdef',2)),0.632,'abc 0.632 abcdef bigrams');
is(d3($pp->similarity('Photographer','Fotograf')),'0.630','Photographer 0.630 Fotograf strings');
is(d3($pp->similarity('Photographer','Fotograf',2)),'0.570','Photographer 0.570 Fotograf bigrams');
is(d3($pp->similarity('Photographer','Fotograf',3)),0.516,'Photographer 0.516 Fotograf trigrams');
is($pdl->similarity('ab','ab'),1,'equal ab strings');
is($pdl->similarity('ab','cd'),0,'ab unequal cd strings');
is($pdl->similarity('abaa','bccc'),0.5,'abaa 0.5 bccc strings');
is($pdl->similarity('abab','bccc'),0.5,'abab 0.5 bccc strings');
is(d3($pdl->similarity('ab','abcd')),0.707,'ab 0.707 abcd strings');
is($pdl->similarity('ab','ab',2),1,'equal ab bigrams');
is($pdl->similarity('ab','cd',2),0,'ab unequal cd bigrams');
is($pdl->similarity('abaa','bccc',2),0,'abaa 0 bccc bigrams');
is($pdl->similarity('abcabcf','bcccah',2),0.5,'abcabcf 0.5 bcccah bigrams');
is(d3($pdl->similarity('abc','abcdef',2)),0.632,'abc 0.632 abcdef bigrams');
is(d3($pdl->similarity('Photographer','Fotograf')),'0.630','Photographer 0.630 Fotograf strings');
is(d3($pdl->similarity('Photographer','Fotograf',2)),'0.570','Photographer 0.570 Fotograf bigrams');
is(d3($pdl->similarity('Photographer','Fotograf',3)),0.516,'Photographer 0.516 Fotograf trigrams');
done_testing;
| 35.190476 | 100 | 0.65945 |
ed8069b74324af8db45fbc9f0f66e5fe87ac3929 | 472 | pl | Perl | bin/r2.pl | swainechen/closet | 88f097ac0acbdcee44be694b4e5783f6f703cae8 | [
"MIT"
]
| null | null | null | bin/r2.pl | swainechen/closet | 88f097ac0acbdcee44be694b4e5783f6f703cae8 | [
"MIT"
]
| null | null | null | bin/r2.pl | swainechen/closet | 88f097ac0acbdcee44be694b4e5783f6f703cae8 | [
"MIT"
]
| 2 | 2019-06-09T22:05:30.000Z | 2019-08-21T00:42:18.000Z | #!/usr/bin/perl -w
use PDL;
($x, $y) = rcols (*STDIN);
$w = ones (nelem $x);
print join "\t", r2($x, $y, $w);
print "\n";
sub r2 {
my ($x, $y, $w) = @_;
my ($mx, $sx) = (stats ($x, $w))[0,1];
my ($my, $sy) = (stats ($y, $w))[0,1];
$x = ($x - $mx) / $sx;
$y = ($y - $my) / $sy;
my $r = abs(sum($x * $y * $w)/sum ($w));
my $z = 0.5*(log(1+$r) - log(1-$r));
my $zpdl = pdl ($z*sqrt(sum($w)-3)/sqrt(2));
my ($p) = list (erfc $zpdl);
return ($r**2, $p);
}
| 23.6 | 46 | 0.408898 |
ed6ffdb76eb7a92246f7a461651501dc7815f338 | 2,854 | t | Perl | t/download_vcf2cytosure.t | Clinical-Genomics/MIP | db2e89fec2674f5c12dbf6ec89eba181433fc742 | [
"MIT"
]
| 22 | 2017-09-04T07:50:54.000Z | 2022-01-01T20:41:45.000Z | t/download_vcf2cytosure.t | Clinical-Genomics/MIP | db2e89fec2674f5c12dbf6ec89eba181433fc742 | [
"MIT"
]
| 834 | 2017-09-05T07:18:38.000Z | 2022-03-31T15:27:49.000Z | t/download_vcf2cytosure.t | Clinical-Genomics/MIP | db2e89fec2674f5c12dbf6ec89eba181433fc742 | [
"MIT"
]
| 11 | 2017-09-12T10:53:30.000Z | 2021-11-30T01:40:49.000Z | #!/usr/bin/env perl
use 5.026;
use Carp;
use charnames qw{ :full :short };
use English qw{ -no_match_vars };
use File::Basename qw{ dirname };
use File::Spec::Functions qw{ catdir catfile };
use File::Temp;
use FindBin qw{ $Bin };
use open qw{ :encoding(UTF-8) :std };
use Params::Check qw{ allow check last_error };
use Test::More;
use utf8;
use warnings qw{ FATAL utf8 };
## CPANM
use autodie qw { :all };
use Modern::Perl qw{ 2018 };
use Readonly;
## MIPs lib/
use lib catdir( dirname($Bin), q{lib} );
use MIP::Constants qw{ $COMMA $SPACE };
use MIP::Test::Fixtures qw{ test_log test_mip_hashes };
BEGIN {
use MIP::Test::Fixtures qw{ test_import };
### Check all internal dependency modules and imports
## Modules with import
my %perl_module = (
q{MIP::Recipes::Download::Vcf2cytosure_blacklist_regions} =>
[qw{ download_vcf2cytosure_blacklist_regions }],
q{MIP::Test::Fixtures} => [qw{ test_log test_mip_hashes }],
);
test_import( { perl_module_href => \%perl_module, } );
}
use MIP::Recipes::Download::Vcf2cytosure_blacklist_regions
qw{ download_vcf2cytosure_blacklist_regions };
diag( q{Test download_vcf2cytosure_blacklist_regions from Vcf2cytosure.pm}
. $COMMA
. $SPACE . q{Perl}
. $SPACE
. $PERL_VERSION
. $SPACE
. $EXECUTABLE_NAME );
my $test_dir = File::Temp->newdir();
my $file_path = catfile( $test_dir, q{recipe_script.sh} );
my $log = test_log( { log_name => uc q{mip_download}, no_screen => 1, } );
## Given download parameters for recipe
my $genome_version = q{grch37};
my $recipe_name = q{vcf2cytosure_blacklist_regions};
my $reference_version = q{1.0};
my $slurm_mock_cmd = catfile( $Bin, qw{ data modules slurm-mock.pl } );
my %active_parameter = test_mip_hashes(
{
mip_hash_name => q{download_active_parameter},
}
);
$active_parameter{$recipe_name} = 1;
$active_parameter{project_id} = q{test};
$active_parameter{reference_dir} = catfile($test_dir);
$active_parameter{recipe_core_number}{$recipe_name} = 1;
$active_parameter{recipe_time}{$recipe_name} = 1;
my $reference_href =
$active_parameter{reference_feature}{$recipe_name}{$genome_version}{$reference_version};
my %job_id;
my $is_ok = download_vcf2cytosure_blacklist_regions(
{
active_parameter_href => \%active_parameter,
genome_version => $genome_version,
job_id_href => \%job_id,
profile_base_command => $slurm_mock_cmd,
recipe_name => $recipe_name,
reference_href => $reference_href,
reference_version => $reference_version,
temp_directory => catfile($test_dir),
}
);
## Then
ok( $is_ok, q{ Executed download recipe } . $recipe_name );
done_testing();
| 30.042105 | 90 | 0.656973 |
73fefe0b5d2ab4211f231d074f26648011a14de2 | 4,027 | pl | Perl | external/win_perl/lib/unicore/To/NFKCQC.pl | phixion/l0phtcrack | 48ee2f711134e178dbedbd925640f6b3b663fbb5 | [
"Apache-2.0",
"MIT"
]
| 2 | 2021-10-20T00:25:39.000Z | 2021-11-08T12:52:42.000Z | external/win_perl/lib/unicore/To/NFKCQC.pl | Brute-f0rce/l0phtcrack | 25f681c07828e5e68e0dd788d84cc13c154aed3d | [
"Apache-2.0",
"MIT"
]
| null | null | null | external/win_perl/lib/unicore/To/NFKCQC.pl | Brute-f0rce/l0phtcrack | 25f681c07828e5e68e0dd788d84cc13c154aed3d | [
"Apache-2.0",
"MIT"
]
| 1 | 2022-03-14T06:41:16.000Z | 2022-03-14T06:41:16.000Z | # !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by ..\lib\unicore\mktables from the Unicode
# database, Version 9.0.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly. Use Unicode::UCD to access the Unicode character data
# base.
# The name this swash is to be known by, with the format of the mappings in
# the main body of the table, and what all code points missing from this file
# map to.
$utf8::SwashInfo{'ToNFKCQC'}{'format'} = 's'; # string
$utf8::SwashInfo{'ToNFKCQC'}{'missing'} = 'Yes';
return <<'END';
A0 N
A8 N
AA N
AF N
B2 B5 N
B8 BA N
BC BE N
132 133 N
13F 140 N
149 N
17F N
1C4 1CC N
1F1 1F3 N
2B0 2B8 N
2D8 2DD N
2E0 2E4 N
300 304 M
306 30C M
30F M
311 M
313 314 M
31B M
323 328 M
32D 32E M
330 331 M
338 M
340 341 N
342 M
343 344 N
345 M
374 N
37A N
37E N
384 385 N
387 N
3D0 3D6 N
3F0 3F2 N
3F4 3F5 N
3F9 N
587 N
653 655 M
675 678 N
93C M
958 95F N
9BE M
9D7 M
9DC 9DD N
9DF N
A33 N
A36 N
A59 A5B N
A5E N
B3E M
B56 B57 M
B5C B5D N
BBE M
BD7 M
C56 M
CC2 M
CD5 CD6 M
D3E M
D57 M
DCA M
DCF M
DDF M
E33 N
EB3 N
EDC EDD N
F0C N
F43 N
F4D N
F52 N
F57 N
F5C N
F69 N
F73 N
F75 F79 N
F81 N
F93 N
F9D N
FA2 N
FA7 N
FAC N
FB9 N
102E M
10FC N
1161 1175 M
11A8 11C2 M
1B35 M
1D2C 1D2E N
1D30 1D3A N
1D3C 1D4D N
1D4F 1D6A N
1D78 N
1D9B 1DBF N
1E9A 1E9B N
1F71 N
1F73 N
1F75 N
1F77 N
1F79 N
1F7B N
1F7D N
1FBB N
1FBD 1FC1 N
1FC9 N
1FCB N
1FCD 1FCF N
1FD3 N
1FDB N
1FDD 1FDF N
1FE3 N
1FEB N
1FED 1FEF N
1FF9 N
1FFB N
1FFD 1FFE N
2000 200A N
2011 N
2017 N
2024 2026 N
202F N
2033 2034 N
2036 2037 N
203C N
203E N
2047 2049 N
2057 N
205F N
2070 2071 N
2074 208E N
2090 209C N
20A8 N
2100 2103 N
2105 2107 N
2109 2113 N
2115 2116 N
2119 211D N
2120 2122 N
2124 N
2126 N
2128 N
212A 212D N
212F 2131 N
2133 2139 N
213B 2140 N
2145 2149 N
2150 217F N
2189 N
222C 222D N
222F 2230 N
2329 232A N
2460 24EA N
2A0C N
2A74 2A76 N
2ADC N
2C7C 2C7D N
2D6F N
2E9F N
2EF3 N
2F00 2FD5 N
3000 N
3036 N
3038 303A N
3099 309A M
309B 309C N
309F N
30FF N
3131 318E N
3192 319F N
3200 321E N
3220 3247 N
3250 327E N
3280 32FE N
3300 33FF N
A69C A69D N
A770 N
A7F8 A7F9 N
AB5C AB5F N
F900 FA0D N
FA10 N
FA12 N
FA15 FA1E N
FA20 N
FA22 N
FA25 FA26 N
FA2A FA6D N
FA70 FAD9 N
FB00 FB06 N
FB13 FB17 N
FB1D N
FB1F FB36 N
FB38 FB3C N
FB3E N
FB40 FB41 N
FB43 FB44 N
FB46 FBB1 N
FBD3 FD3D N
FD50 FD8F N
FD92 FDC7 N
FDF0 FDFC N
FE10 FE19 N
FE30 FE44 N
FE47 FE52 N
FE54 FE66 N
FE68 FE6B N
FE70 FE72 N
FE74 N
FE76 FEFC N
FF01 FFBE N
FFC2 FFC7 N
FFCA FFCF N
FFD2 FFD7 N
FFDA FFDC N
FFE0 FFE6 N
FFE8 FFEE N
110BA M
11127 M
1133E M
11357 M
114B0 M
114BA M
114BD M
115AF M
1D15E 1D164 N
1D1BB 1D1C0 N
1D400 1D454 N
1D456 1D49C N
1D49E 1D49F N
1D4A2 N
1D4A5 1D4A6 N
1D4A9 1D4AC N
1D4AE 1D4B9 N
1D4BB N
1D4BD 1D4C3 N
1D4C5 1D505 N
1D507 1D50A N
1D50D 1D514 N
1D516 1D51C N
1D51E 1D539 N
1D53B 1D53E N
1D540 1D544 N
1D546 N
1D54A 1D550 N
1D552 1D6A5 N
1D6A8 1D7CB N
1D7CE 1D7FF N
1EE00 1EE03 N
1EE05 1EE1F N
1EE21 1EE22 N
1EE24 N
1EE27 N
1EE29 1EE32 N
1EE34 1EE37 N
1EE39 N
1EE3B N
1EE42 N
1EE47 N
1EE49 N
1EE4B N
1EE4D 1EE4F N
1EE51 1EE52 N
1EE54 N
1EE57 N
1EE59 N
1EE5B N
1EE5D N
1EE5F N
1EE61 1EE62 N
1EE64 N
1EE67 1EE6A N
1EE6C 1EE72 N
1EE74 1EE77 N
1EE79 1EE7C N
1EE7E N
1EE80 1EE89 N
1EE8B 1EE9B N
1EEA1 1EEA3 N
1EEA5 1EEA9 N
1EEAB 1EEBB N
1F100 1F10A N
1F110 1F12E N
1F130 1F14F N
1F16A 1F16B N
1F190 N
1F200 1F202 N
1F210 1F23B N
1F240 1F248 N
1F250 1F251 N
2F800 2FA1D N
END
| 12.948553 | 78 | 0.64738 |
ed730a0ab35ba438b54b944472f0839498e6cec0 | 69,618 | pm | Perl | perl/vendor/lib/Class/MOP/Class.pm | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
]
| null | null | null | perl/vendor/lib/Class/MOP/Class.pm | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
]
| 3 | 2021-01-27T10:09:28.000Z | 2021-05-11T21:20:12.000Z | perl/vendor/lib/Class/MOP/Class.pm | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
]
| null | null | null | package Class::MOP::Class;
our $VERSION = '2.2012';
use strict;
use warnings;
use Class::MOP::Instance;
use Class::MOP::Method::Wrapped;
use Class::MOP::Method::Accessor;
use Class::MOP::Method::Constructor;
use Class::MOP::MiniTrait;
use Carp 'confess';
use Module::Runtime 'use_package_optimistically';
use Scalar::Util 'blessed';
use Sub::Name 'subname';
use Try::Tiny;
use List::Util 1.33 'all';
use parent 'Class::MOP::Module',
'Class::MOP::Mixin::HasAttributes',
'Class::MOP::Mixin::HasMethods',
'Class::MOP::Mixin::HasOverloads';
# Creation
sub initialize {
my $class = shift;
my $package_name;
if ( @_ % 2 ) {
$package_name = shift;
} else {
my %options = @_;
$package_name = $options{package};
}
($package_name && !ref($package_name))
|| ($class||__PACKAGE__)->_throw_exception( InitializeTakesUnBlessedPackageName => package_name => $package_name );
return Class::MOP::get_metaclass_by_name($package_name)
|| $class->_construct_class_instance(package => $package_name, @_);
}
sub reinitialize {
my ( $class, @args ) = @_;
unshift @args, "package" if @args % 2;
my %options = @args;
my $old_metaclass = blessed($options{package})
? $options{package}
: Class::MOP::get_metaclass_by_name($options{package});
$options{weaken} = Class::MOP::metaclass_is_weak($old_metaclass->name)
if !exists $options{weaken}
&& blessed($old_metaclass)
&& $old_metaclass->isa('Class::MOP::Class');
$old_metaclass->_remove_generated_metaobjects
if $old_metaclass && $old_metaclass->isa('Class::MOP::Class');
my $new_metaclass = $class->SUPER::reinitialize(%options);
$new_metaclass->_restore_metaobjects_from($old_metaclass)
if $old_metaclass && $old_metaclass->isa('Class::MOP::Class');
return $new_metaclass;
}
# NOTE: (meta-circularity)
# this is a special form of _construct_instance
# (see below), which is used to construct class
# meta-object instances for any Class::MOP::*
# class. All other classes will use the more
# normal &construct_instance.
sub _construct_class_instance {
my $class = shift;
my $options = @_ == 1 ? $_[0] : {@_};
my $package_name = $options->{package};
(defined $package_name && $package_name)
|| $class->_throw_exception("ConstructClassInstanceTakesPackageName");
# NOTE:
# return the metaclass if we have it cached,
# and it is still defined (it has not been
# reaped by DESTROY yet, which can happen
# annoyingly enough during global destruction)
if (defined(my $meta = Class::MOP::get_metaclass_by_name($package_name))) {
return $meta;
}
$class
= ref $class
? $class->_real_ref_name
: $class;
# now create the metaclass
my $meta;
if ($class eq 'Class::MOP::Class') {
$meta = $class->_new($options);
}
else {
# NOTE:
# it is safe to use meta here because
# class will always be a subclass of
# Class::MOP::Class, which defines meta
$meta = $class->meta->_construct_instance($options)
}
# and check the metaclass compatibility
$meta->_check_metaclass_compatibility();
Class::MOP::store_metaclass_by_name($package_name, $meta);
# NOTE:
# we need to weaken any anon classes
# so that they can call DESTROY properly
Class::MOP::weaken_metaclass($package_name) if $options->{weaken};
$meta;
}
sub _real_ref_name {
my $self = shift;
# NOTE: we need to deal with the possibility of class immutability here,
# and then get the name of the class appropriately
return $self->is_immutable
? $self->_get_mutable_metaclass_name()
: ref $self;
}
sub _new {
my $class = shift;
return Class::MOP::Class->initialize($class)->new_object(@_)
if $class ne __PACKAGE__;
my $options = @_ == 1 ? $_[0] : {@_};
return bless {
# inherited from Class::MOP::Package
'package' => $options->{package},
# NOTE:
# since the following attributes will
# actually be loaded from the symbol
# table, and actually bypass the instance
# entirely, we can just leave these things
# listed here for reference, because they
# should not actually have a value associated
# with the slot.
'namespace' => \undef,
'methods' => {},
# inherited from Class::MOP::Module
'version' => \undef,
'authority' => \undef,
# defined in Class::MOP::Class
'superclasses' => \undef,
'attributes' => {},
'attribute_metaclass' =>
( $options->{'attribute_metaclass'} || 'Class::MOP::Attribute' ),
'method_metaclass' =>
( $options->{'method_metaclass'} || 'Class::MOP::Method' ),
'wrapped_method_metaclass' => (
$options->{'wrapped_method_metaclass'}
|| 'Class::MOP::Method::Wrapped'
),
'instance_metaclass' =>
( $options->{'instance_metaclass'} || 'Class::MOP::Instance' ),
'immutable_trait' => (
$options->{'immutable_trait'}
|| 'Class::MOP::Class::Immutable::Trait'
),
'constructor_name' => ( $options->{constructor_name} || 'new' ),
'constructor_class' => (
$options->{constructor_class} || 'Class::MOP::Method::Constructor'
),
'destructor_class' => $options->{destructor_class},
}, $class;
}
## Metaclass compatibility
{
my %base_metaclass = (
attribute_metaclass => 'Class::MOP::Attribute',
method_metaclass => 'Class::MOP::Method',
wrapped_method_metaclass => 'Class::MOP::Method::Wrapped',
instance_metaclass => 'Class::MOP::Instance',
constructor_class => 'Class::MOP::Method::Constructor',
destructor_class => 'Class::MOP::Method::Destructor',
);
sub _base_metaclasses { %base_metaclass }
}
sub _check_metaclass_compatibility {
my $self = shift;
my @superclasses = $self->superclasses
or return;
$self->_fix_metaclass_incompatibility(@superclasses);
my %base_metaclass = $self->_base_metaclasses;
# this is always okay ...
return
if ref($self) eq 'Class::MOP::Class'
&& all {
my $meta = $self->$_;
!defined($meta) || $meta eq $base_metaclass{$_};
}
keys %base_metaclass;
for my $superclass (@superclasses) {
$self->_check_class_metaclass_compatibility($superclass);
}
for my $metaclass_type ( keys %base_metaclass ) {
next unless defined $self->$metaclass_type;
for my $superclass (@superclasses) {
$self->_check_single_metaclass_compatibility( $metaclass_type,
$superclass );
}
}
}
sub _check_class_metaclass_compatibility {
my $self = shift;
my ( $superclass_name ) = @_;
if (!$self->_class_metaclass_is_compatible($superclass_name)) {
my $super_meta = Class::MOP::get_metaclass_by_name($superclass_name);
my $super_meta_type = $super_meta->_real_ref_name;
$self->_throw_exception( IncompatibleMetaclassOfSuperclass => class_name => $self->name,
class_meta_type => ref( $self ),
superclass_name => $superclass_name,
superclass_meta_type => $super_meta_type
);
}
}
sub _class_metaclass_is_compatible {
my $self = shift;
my ( $superclass_name ) = @_;
my $super_meta = Class::MOP::get_metaclass_by_name($superclass_name)
|| return 1;
my $super_meta_name = $super_meta->_real_ref_name;
return $self->_is_compatible_with($super_meta_name);
}
sub _check_single_metaclass_compatibility {
my $self = shift;
my ( $metaclass_type, $superclass_name ) = @_;
if (!$self->_single_metaclass_is_compatible($metaclass_type, $superclass_name)) {
my $super_meta = Class::MOP::get_metaclass_by_name($superclass_name);
$self->_throw_exception( MetaclassTypeIncompatible => class_name => $self->name,
superclass_name => $superclass_name,
metaclass_type => $metaclass_type
);
}
}
sub _single_metaclass_is_compatible {
my $self = shift;
my ( $metaclass_type, $superclass_name ) = @_;
my $super_meta = Class::MOP::get_metaclass_by_name($superclass_name)
|| return 1;
# for instance, Moose::Meta::Class has a error_class attribute, but
# Class::MOP::Class doesn't - this shouldn't be an error
return 1 unless $super_meta->can($metaclass_type);
# for instance, Moose::Meta::Class has a destructor_class, but
# Class::MOP::Class doesn't - this shouldn't be an error
return 1 unless defined $super_meta->$metaclass_type;
# if metaclass is defined in superclass but not here, it's not compatible
# this is a really odd case
return 0 unless defined $self->$metaclass_type;
return $self->$metaclass_type->_is_compatible_with($super_meta->$metaclass_type);
}
sub _fix_metaclass_incompatibility {
my $self = shift;
my @supers = map { Class::MOP::Class->initialize($_) } @_;
my $necessary = 0;
for my $super (@supers) {
$necessary = 1
if $self->_can_fix_metaclass_incompatibility($super);
}
return unless $necessary;
for my $super (@supers) {
if (!$self->_class_metaclass_is_compatible($super->name)) {
$self->_fix_class_metaclass_incompatibility($super);
}
}
my %base_metaclass = $self->_base_metaclasses;
for my $metaclass_type (keys %base_metaclass) {
for my $super (@supers) {
if (!$self->_single_metaclass_is_compatible($metaclass_type, $super->name)) {
$self->_fix_single_metaclass_incompatibility(
$metaclass_type, $super
);
}
}
}
}
sub _can_fix_metaclass_incompatibility {
my $self = shift;
my ($super_meta) = @_;
return 1 if $self->_class_metaclass_can_be_made_compatible($super_meta);
my %base_metaclass = $self->_base_metaclasses;
for my $metaclass_type (keys %base_metaclass) {
return 1 if $self->_single_metaclass_can_be_made_compatible($super_meta, $metaclass_type);
}
return;
}
sub _class_metaclass_can_be_made_compatible {
my $self = shift;
my ($super_meta) = @_;
return $self->_can_be_made_compatible_with($super_meta->_real_ref_name);
}
sub _single_metaclass_can_be_made_compatible {
my $self = shift;
my ($super_meta, $metaclass_type) = @_;
my $specific_meta = $self->$metaclass_type;
return unless $super_meta->can($metaclass_type);
my $super_specific_meta = $super_meta->$metaclass_type;
# for instance, Moose::Meta::Class has a destructor_class, but
# Class::MOP::Class doesn't - this shouldn't be an error
return unless defined $super_specific_meta;
# if metaclass is defined in superclass but not here, it's fixable
# this is a really odd case
return 1 unless defined $specific_meta;
return 1 if $specific_meta->_can_be_made_compatible_with($super_specific_meta);
}
sub _fix_class_metaclass_incompatibility {
my $self = shift;
my ( $super_meta ) = @_;
if ($self->_class_metaclass_can_be_made_compatible($super_meta)) {
($self->is_pristine)
|| $self->_throw_exception( CannotFixMetaclassCompatibility => class_name => $self->name,
superclass => $super_meta
);
my $super_meta_name = $super_meta->_real_ref_name;
$self->_make_compatible_with($super_meta_name);
}
}
sub _fix_single_metaclass_incompatibility {
my $self = shift;
my ( $metaclass_type, $super_meta ) = @_;
if ($self->_single_metaclass_can_be_made_compatible($super_meta, $metaclass_type)) {
($self->is_pristine)
|| $self->_throw_exception( CannotFixMetaclassCompatibility => class_name => $self->name,
superclass => $super_meta,
metaclass_type => $metaclass_type
);
my $new_metaclass = $self->$metaclass_type
? $self->$metaclass_type->_get_compatible_metaclass($super_meta->$metaclass_type)
: $super_meta->$metaclass_type;
$self->{$metaclass_type} = $new_metaclass;
}
}
sub _restore_metaobjects_from {
my $self = shift;
my ($old_meta) = @_;
$self->_restore_metamethods_from($old_meta);
$self->_restore_metaattributes_from($old_meta);
}
sub _remove_generated_metaobjects {
my $self = shift;
for my $attr (map { $self->get_attribute($_) } $self->get_attribute_list) {
$attr->remove_accessors;
}
}
# creating classes with MOP ...
sub create {
my $class = shift;
my @args = @_;
unshift @args, 'package' if @args % 2 == 1;
my %options = @args;
(ref $options{superclasses} eq 'ARRAY')
|| __PACKAGE__->_throw_exception( CreateMOPClassTakesArrayRefOfSuperclasses => class => $class,
params => \%options
)
if exists $options{superclasses};
(ref $options{attributes} eq 'ARRAY')
|| __PACKAGE__->_throw_exception( CreateMOPClassTakesArrayRefOfAttributes => class => $class,
params => \%options
)
if exists $options{attributes};
(ref $options{methods} eq 'HASH')
|| __PACKAGE__->_throw_exception( CreateMOPClassTakesHashRefOfMethods => class => $class,
params => \%options
)
if exists $options{methods};
my $package = delete $options{package};
my $superclasses = delete $options{superclasses};
my $attributes = delete $options{attributes};
my $methods = delete $options{methods};
my $meta_name = exists $options{meta_name}
? delete $options{meta_name}
: 'meta';
my $meta = $class->SUPER::create($package => %options);
$meta->_add_meta_method($meta_name)
if defined $meta_name;
$meta->superclasses(@{$superclasses})
if defined $superclasses;
# NOTE:
# process attributes first, so that they can
# install accessors, but locally defined methods
# can then overwrite them. It is maybe a little odd, but
# I think this should be the order of things.
if (defined $attributes) {
foreach my $attr (@{$attributes}) {
$meta->add_attribute($attr);
}
}
if (defined $methods) {
foreach my $method_name (keys %{$methods}) {
$meta->add_method($method_name, $methods->{$method_name});
}
}
return $meta;
}
# XXX: something more intelligent here?
sub _anon_package_prefix { 'Class::MOP::Class::__ANON__::SERIAL::' }
sub create_anon_class { shift->create_anon(@_) }
sub is_anon_class { shift->is_anon(@_) }
sub _anon_cache_key {
my $class = shift;
my %options = @_;
# Makes something like Super::Class|Super::Class::2
return join '=' => (
join( '|', sort @{ $options{superclasses} || [] } ),
);
}
# Instance Construction & Cloning
sub new_object {
my $class = shift;
# NOTE:
# we need to protect the integrity of the
# Class::MOP::Class singletons here, so we
# delegate this to &construct_class_instance
# which will deal with the singletons
return $class->_construct_class_instance(@_)
if $class->name->isa('Class::MOP::Class');
return $class->_construct_instance(@_);
}
sub _construct_instance {
my $class = shift;
my $params = @_ == 1 ? $_[0] : {@_};
my $meta_instance = $class->get_meta_instance();
# FIXME:
# the code below is almost certainly incorrect
# but this is foreign inheritance, so we might
# have to kludge it in the end.
my $instance;
if (my $instance_class = blessed($params->{__INSTANCE__})) {
($instance_class eq $class->name)
|| $class->_throw_exception( InstanceBlessedIntoWrongClass => class_name => $class->name,
params => $params,
instance => $params->{__INSTANCE__}
);
$instance = $params->{__INSTANCE__};
}
elsif (exists $params->{__INSTANCE__}) {
$class->_throw_exception( InstanceMustBeABlessedReference => class_name => $class->name,
params => $params,
instance => $params->{__INSTANCE__}
);
}
else {
$instance = $meta_instance->create_instance();
}
foreach my $attr ($class->get_all_attributes()) {
$attr->initialize_instance_slot($meta_instance, $instance, $params);
}
if (Class::MOP::metaclass_is_weak($class->name)) {
$meta_instance->_set_mop_slot($instance, $class);
}
return $instance;
}
sub _inline_new_object {
my $self = shift;
return (
'my $class = shift;',
'$class = Scalar::Util::blessed($class) || $class;',
$self->_inline_fallback_constructor('$class'),
$self->_inline_params('$params', '$class'),
$self->_inline_generate_instance('$instance', '$class'),
$self->_inline_slot_initializers,
$self->_inline_preserve_weak_metaclasses,
$self->_inline_extra_init,
'return $instance',
);
}
sub _inline_fallback_constructor {
my $self = shift;
my ($class) = @_;
return (
'return ' . $self->_generate_fallback_constructor($class),
'if ' . $class . ' ne \'' . $self->name . '\';',
);
}
sub _generate_fallback_constructor {
my $self = shift;
my ($class) = @_;
return 'Class::MOP::Class->initialize(' . $class . ')->new_object(@_)',
}
sub _inline_params {
my $self = shift;
my ($params, $class) = @_;
return (
'my ' . $params . ' = @_ == 1 ? $_[0] : {@_};',
);
}
sub _inline_generate_instance {
my $self = shift;
my ($inst, $class) = @_;
return (
'my ' . $inst . ' = ' . $self->_inline_create_instance($class) . ';',
);
}
sub _inline_create_instance {
my $self = shift;
return $self->get_meta_instance->inline_create_instance(@_);
}
sub _inline_slot_initializers {
my $self = shift;
my $idx = 0;
return map { $self->_inline_slot_initializer($_, $idx++) }
sort { $a->name cmp $b->name } $self->get_all_attributes;
}
sub _inline_slot_initializer {
my $self = shift;
my ($attr, $idx) = @_;
if (defined(my $init_arg = $attr->init_arg)) {
my @source = (
'if (exists $params->{\'' . $init_arg . '\'}) {',
$self->_inline_init_attr_from_constructor($attr, $idx),
'}',
);
if (my @default = $self->_inline_init_attr_from_default($attr, $idx)) {
push @source, (
'else {',
@default,
'}',
);
}
return @source;
}
elsif (my @default = $self->_inline_init_attr_from_default($attr, $idx)) {
return (
'{',
@default,
'}',
);
}
else {
return ();
}
}
sub _inline_init_attr_from_constructor {
my $self = shift;
my ($attr, $idx) = @_;
my @initial_value = $attr->_inline_set_value(
'$instance', '$params->{\'' . $attr->init_arg . '\'}',
);
push @initial_value, (
'$attrs->[' . $idx . ']->set_initial_value(',
'$instance,',
$attr->_inline_instance_get('$instance'),
');',
) if $attr->has_initializer;
return @initial_value;
}
sub _inline_init_attr_from_default {
my $self = shift;
my ($attr, $idx) = @_;
my $default = $self->_inline_default_value($attr, $idx);
return unless $default;
my @initial_value = $attr->_inline_set_value('$instance', $default);
push @initial_value, (
'$attrs->[' . $idx . ']->set_initial_value(',
'$instance,',
$attr->_inline_instance_get('$instance'),
');',
) if $attr->has_initializer;
return @initial_value;
}
sub _inline_default_value {
my $self = shift;
my ($attr, $index) = @_;
if ($attr->has_default) {
# NOTE:
# default values can either be CODE refs
# in which case we need to call them. Or
# they can be scalars (strings/numbers)
# in which case we can just deal with them
# in the code we eval.
if ($attr->is_default_a_coderef) {
return '$defaults->[' . $index . ']->($instance)';
}
else {
return '$defaults->[' . $index . ']';
}
}
elsif ($attr->has_builder) {
return '$instance->' . $attr->builder;
}
else {
return;
}
}
sub _inline_preserve_weak_metaclasses {
my $self = shift;
if (Class::MOP::metaclass_is_weak($self->name)) {
return (
$self->_inline_set_mop_slot(
'$instance', 'Class::MOP::class_of($class)'
) . ';'
);
}
else {
return ();
}
}
sub _inline_extra_init { }
sub _eval_environment {
my $self = shift;
my @attrs = sort { $a->name cmp $b->name } $self->get_all_attributes;
my $defaults = [map { $_->default } @attrs];
return {
'$defaults' => \$defaults,
};
}
sub get_meta_instance {
my $self = shift;
$self->{'_meta_instance'} ||= $self->_create_meta_instance();
}
sub _create_meta_instance {
my $self = shift;
my $instance = $self->instance_metaclass->new(
associated_metaclass => $self,
attributes => [ $self->get_all_attributes() ],
);
$self->add_meta_instance_dependencies()
if $instance->is_dependent_on_superclasses();
return $instance;
}
# TODO: this is actually not being used!
sub _inline_rebless_instance {
my $self = shift;
return $self->get_meta_instance->inline_rebless_instance_structure(@_);
}
sub _inline_get_mop_slot {
my $self = shift;
return $self->get_meta_instance->_inline_get_mop_slot(@_);
}
sub _inline_set_mop_slot {
my $self = shift;
return $self->get_meta_instance->_inline_set_mop_slot(@_);
}
sub _inline_clear_mop_slot {
my $self = shift;
return $self->get_meta_instance->_inline_clear_mop_slot(@_);
}
sub clone_object {
my $class = shift;
my $instance = shift;
(blessed($instance) && $instance->isa($class->name))
|| $class->_throw_exception( CloneObjectExpectsAnInstanceOfMetaclass => class_name => $class->name,
instance => $instance,
);
# NOTE:
# we need to protect the integrity of the
# Class::MOP::Class singletons here, they
# should not be cloned.
return $instance if $instance->isa('Class::MOP::Class');
$class->_clone_instance($instance, @_);
}
sub _clone_instance {
my ($class, $instance, %params) = @_;
(blessed($instance))
|| $class->_throw_exception( OnlyInstancesCanBeCloned => class_name => $class->name,
instance => $instance,
params => \%params
);
my $meta_instance = $class->get_meta_instance();
my $clone = $meta_instance->clone_instance($instance);
foreach my $attr ($class->get_all_attributes()) {
if ( defined( my $init_arg = $attr->init_arg ) ) {
if (exists $params{$init_arg}) {
$attr->set_value($clone, $params{$init_arg});
}
}
}
return $clone;
}
sub _force_rebless_instance {
my ($self, $instance, %params) = @_;
my $old_metaclass = Class::MOP::class_of($instance);
$old_metaclass->rebless_instance_away($instance, $self, %params)
if $old_metaclass;
my $meta_instance = $self->get_meta_instance;
if (Class::MOP::metaclass_is_weak($old_metaclass->name)) {
$meta_instance->_clear_mop_slot($instance);
}
# rebless!
# we use $_[1] here because of t/cmop/rebless_overload.t regressions
# on 5.8.8
$meta_instance->rebless_instance_structure($_[1], $self);
$self->_fixup_attributes_after_rebless($instance, $old_metaclass, %params);
if (Class::MOP::metaclass_is_weak($self->name)) {
$meta_instance->_set_mop_slot($instance, $self);
}
}
sub rebless_instance {
my ($self, $instance, %params) = @_;
my $old_metaclass = Class::MOP::class_of($instance);
my $old_class = $old_metaclass ? $old_metaclass->name : blessed($instance);
$self->name->isa($old_class)
|| $self->_throw_exception( CanReblessOnlyIntoASubclass => class_name => $self->name,
instance => $instance,
instance_class => blessed( $instance ),
params => \%params,
);
$self->_force_rebless_instance($_[1], %params);
return $instance;
}
sub rebless_instance_back {
my ( $self, $instance ) = @_;
my $old_metaclass = Class::MOP::class_of($instance);
my $old_class
= $old_metaclass ? $old_metaclass->name : blessed($instance);
$old_class->isa( $self->name )
|| $self->_throw_exception( CanReblessOnlyIntoASuperclass => class_name => $self->name,
instance => $instance,
instance_class => blessed( $instance ),
);
$self->_force_rebless_instance($_[1]);
return $instance;
}
sub rebless_instance_away {
# this intentionally does nothing, it is just a hook
}
sub _fixup_attributes_after_rebless {
my $self = shift;
my ($instance, $rebless_from, %params) = @_;
my $meta_instance = $self->get_meta_instance;
for my $attr ( $rebless_from->get_all_attributes ) {
next if $self->find_attribute_by_name( $attr->name );
$meta_instance->deinitialize_slot( $instance, $_ ) for $attr->slots;
}
foreach my $attr ( $self->get_all_attributes ) {
if ( $attr->has_value($instance) ) {
if ( defined( my $init_arg = $attr->init_arg ) ) {
$params{$init_arg} = $attr->get_value($instance)
unless exists $params{$init_arg};
}
else {
$attr->set_value($instance, $attr->get_value($instance));
}
}
}
foreach my $attr ($self->get_all_attributes) {
$attr->initialize_instance_slot($meta_instance, $instance, \%params);
}
}
sub _attach_attribute {
my ($self, $attribute) = @_;
$attribute->attach_to_class($self);
}
sub _post_add_attribute {
my ( $self, $attribute ) = @_;
$self->invalidate_meta_instances;
# invalidate package flag here
try {
local $SIG{__DIE__};
$attribute->install_accessors;
}
catch {
$self->remove_attribute( $attribute->name );
die $_;
};
}
sub remove_attribute {
my $self = shift;
my $removed_attribute = $self->SUPER::remove_attribute(@_)
or return;
$self->invalidate_meta_instances;
$removed_attribute->remove_accessors;
$removed_attribute->detach_from_class;
return$removed_attribute;
}
sub find_attribute_by_name {
my ( $self, $attr_name ) = @_;
foreach my $class ( $self->linearized_isa ) {
# fetch the meta-class ...
my $meta = Class::MOP::Class->initialize($class);
return $meta->get_attribute($attr_name)
if $meta->has_attribute($attr_name);
}
return;
}
sub get_all_attributes {
my $self = shift;
my %attrs = map { %{ Class::MOP::Class->initialize($_)->_attribute_map } }
reverse $self->linearized_isa;
return values %attrs;
}
# Inheritance
sub superclasses {
my $self = shift;
my $isa = $self->get_or_add_package_symbol('@ISA');
if (@_) {
my @supers = @_;
@{$isa} = @supers;
# NOTE:
# on 5.8 and below, we need to call
# a method to get Perl to detect
# a cycle in the class hierarchy
my $class = $self->name;
$class->isa($class);
# NOTE:
# we need to check the metaclass
# compatibility here so that we can
# be sure that the superclass is
# not potentially creating an issues
# we don't know about
$self->_check_metaclass_compatibility();
$self->_superclasses_updated();
}
return @{$isa};
}
sub _superclasses_updated {
my $self = shift;
$self->update_meta_instance_dependencies();
# keep strong references to all our parents, so they don't disappear if
# they are anon classes and don't have any direct instances
$self->_superclass_metas(
map { Class::MOP::class_of($_) } $self->superclasses
);
}
sub _superclass_metas {
my $self = shift;
$self->{_superclass_metas} = [@_];
}
sub subclasses {
my $self = shift;
my $super_class = $self->name;
return @{ $super_class->mro::get_isarev() };
}
sub direct_subclasses {
my $self = shift;
my $super_class = $self->name;
return grep {
grep {
$_ eq $super_class
} Class::MOP::Class->initialize($_)->superclasses
} $self->subclasses;
}
sub linearized_isa {
return @{ mro::get_linear_isa( (shift)->name ) };
}
sub class_precedence_list {
my $self = shift;
my $name = $self->name;
unless (Class::MOP::IS_RUNNING_ON_5_10()) {
# NOTE:
# We need to check for circular inheritance here
# if we are not on 5.10, cause 5.8 detects it late.
# This will do nothing if all is well, and blow up
# otherwise. Yes, it's an ugly hack, better
# suggestions are welcome.
# - SL
($name || return)->isa('This is a test for circular inheritance')
}
# if our mro is c3, we can
# just grab the linear_isa
if (mro::get_mro($name) eq 'c3') {
return @{ mro::get_linear_isa($name) }
}
else {
# NOTE:
# we can't grab the linear_isa for dfs
# since it has all the duplicates
# already removed.
return (
$name,
map {
Class::MOP::Class->initialize($_)->class_precedence_list()
} $self->superclasses()
);
}
}
sub _method_lookup_order {
return (shift->linearized_isa, 'UNIVERSAL');
}
## Methods
{
my $fetch_and_prepare_method = sub {
my ($self, $method_name) = @_;
my $wrapped_metaclass = $self->wrapped_method_metaclass;
# fetch it locally
my $method = $self->get_method($method_name);
# if we don't have local ...
unless ($method) {
# try to find the next method
$method = $self->find_next_method_by_name($method_name);
# die if it does not exist
(defined $method)
|| $self->_throw_exception( MethodNameNotFoundInInheritanceHierarchy => class_name => $self->name,
method_name => $method_name
);
# and now make sure to wrap it
# even if it is already wrapped
# because we need a new sub ref
$method = $wrapped_metaclass->wrap($method,
package_name => $self->name,
name => $method_name,
);
}
else {
# now make sure we wrap it properly
$method = $wrapped_metaclass->wrap($method,
package_name => $self->name,
name => $method_name,
) unless $method->isa($wrapped_metaclass);
}
$self->add_method($method_name => $method);
return $method;
};
sub add_before_method_modifier {
my ($self, $method_name, $method_modifier) = @_;
(defined $method_name && length $method_name)
|| $self->_throw_exception( MethodModifierNeedsMethodName => class_name => $self->name );
my $method = $fetch_and_prepare_method->($self, $method_name);
$method->add_before_modifier(
subname(':before' => $method_modifier)
);
}
sub add_after_method_modifier {
my ($self, $method_name, $method_modifier) = @_;
(defined $method_name && length $method_name)
|| $self->_throw_exception( MethodModifierNeedsMethodName => class_name => $self->name );
my $method = $fetch_and_prepare_method->($self, $method_name);
$method->add_after_modifier(
subname(':after' => $method_modifier)
);
}
sub add_around_method_modifier {
my ($self, $method_name, $method_modifier) = @_;
(defined $method_name && length $method_name)
|| $self->_throw_exception( MethodModifierNeedsMethodName => class_name => $self->name );
my $method = $fetch_and_prepare_method->($self, $method_name);
$method->add_around_modifier(
subname(':around' => $method_modifier)
);
}
# NOTE:
# the methods above used to be named like this:
# ${pkg}::${method}:(before|after|around)
# but this proved problematic when using one modifier
# to wrap multiple methods (something which is likely
# to happen pretty regularly IMO). So instead of naming
# it like this, I have chosen to just name them purely
# with their modifier names, like so:
# :(before|after|around)
# The fact is that in a stack trace, it will be fairly
# evident from the context what method they are attached
# to, and so don't need the fully qualified name.
}
sub find_method_by_name {
my ($self, $method_name) = @_;
(defined $method_name && length $method_name)
|| $self->_throw_exception( MethodNameNotGiven => class_name => $self->name );
foreach my $class ($self->_method_lookup_order) {
my $method = Class::MOP::Class->initialize($class)->get_method($method_name);
return $method if defined $method;
}
return;
}
sub get_all_methods {
my $self = shift;
my %methods;
for my $class ( reverse $self->_method_lookup_order ) {
my $meta = Class::MOP::Class->initialize($class);
$methods{ $_->name } = $_ for $meta->_get_local_methods;
}
return values %methods;
}
sub get_all_method_names {
my $self = shift;
map { $_->name } $self->get_all_methods;
}
sub find_all_methods_by_name {
my ($self, $method_name) = @_;
(defined $method_name && length $method_name)
|| $self->_throw_exception( MethodNameNotGiven => class_name => $self->name );
my @methods;
foreach my $class ($self->_method_lookup_order) {
# fetch the meta-class ...
my $meta = Class::MOP::Class->initialize($class);
push @methods => {
name => $method_name,
class => $class,
code => $meta->get_method($method_name)
} if $meta->has_method($method_name);
}
return @methods;
}
sub find_next_method_by_name {
my ($self, $method_name) = @_;
(defined $method_name && length $method_name)
|| $self->_throw_exception( MethodNameNotGiven => class_name => $self->name );
my @cpl = ($self->_method_lookup_order);
shift @cpl; # discard ourselves
foreach my $class (@cpl) {
my $method = Class::MOP::Class->initialize($class)->get_method($method_name);
return $method if defined $method;
}
return;
}
sub update_meta_instance_dependencies {
my $self = shift;
if ( $self->{meta_instance_dependencies} ) {
return $self->add_meta_instance_dependencies;
}
}
sub add_meta_instance_dependencies {
my $self = shift;
$self->remove_meta_instance_dependencies;
my @attrs = $self->get_all_attributes();
my %seen;
my @classes = grep { not $seen{ $_->name }++ }
map { $_->associated_class } @attrs;
foreach my $class (@classes) {
$class->add_dependent_meta_instance($self);
}
$self->{meta_instance_dependencies} = \@classes;
}
sub remove_meta_instance_dependencies {
my $self = shift;
if ( my $classes = delete $self->{meta_instance_dependencies} ) {
foreach my $class (@$classes) {
$class->remove_dependent_meta_instance($self);
}
return $classes;
}
return;
}
sub add_dependent_meta_instance {
my ( $self, $metaclass ) = @_;
push @{ $self->{dependent_meta_instances} }, $metaclass;
}
sub remove_dependent_meta_instance {
my ( $self, $metaclass ) = @_;
my $name = $metaclass->name;
@$_ = grep { $_->name ne $name } @$_
for $self->{dependent_meta_instances};
}
sub invalidate_meta_instances {
my $self = shift;
$_->invalidate_meta_instance()
for $self, @{ $self->{dependent_meta_instances} };
}
sub invalidate_meta_instance {
my $self = shift;
undef $self->{_meta_instance};
}
# check if we can reinitialize
sub is_pristine {
my $self = shift;
# if any local attr is defined
return if $self->get_attribute_list;
# or any non-declared methods
for my $method ( map { $self->get_method($_) } $self->get_method_list ) {
return if $method->isa("Class::MOP::Method::Generated");
# FIXME do we need to enforce this too? return unless $method->isa( $self->method_metaclass );
}
return 1;
}
## Class closing
sub is_mutable { 1 }
sub is_immutable { 0 }
sub immutable_options { %{ $_[0]{__immutable}{options} || {} } }
sub _immutable_options {
my ( $self, @args ) = @_;
return (
inline_accessors => 1,
inline_constructor => 1,
inline_destructor => 0,
debug => 0,
immutable_trait => $self->immutable_trait,
constructor_name => $self->constructor_name,
constructor_class => $self->constructor_class,
destructor_class => $self->destructor_class,
@args,
);
}
sub make_immutable {
my ( $self, @args ) = @_;
return $self unless $self->is_mutable;
my ($file, $line) = (caller)[1..2];
$self->_initialize_immutable(
file => $file,
line => $line,
$self->_immutable_options(@args),
);
$self->_rebless_as_immutable(@args);
return $self;
}
sub make_mutable {
my $self = shift;
if ( $self->is_immutable ) {
my @args = $self->immutable_options;
$self->_rebless_as_mutable();
$self->_remove_inlined_code(@args);
delete $self->{__immutable};
return $self;
}
else {
return;
}
}
sub _rebless_as_immutable {
my ( $self, @args ) = @_;
$self->{__immutable}{original_class} = ref $self;
bless $self => $self->_immutable_metaclass(@args);
}
sub _immutable_metaclass {
my ( $self, %args ) = @_;
if ( my $class = $args{immutable_metaclass} ) {
return $class;
}
my $trait = $args{immutable_trait} = $self->immutable_trait
|| $self->_throw_exception( NoImmutableTraitSpecifiedForClass => class_name => $self->name,
params => \%args
);
my $meta = $self->meta;
my $meta_attr = $meta->find_attribute_by_name("immutable_trait");
my $class_name;
if ( $meta_attr and $trait eq $meta_attr->default ) {
# if the trait is the same as the default we try and pick a
# predictable name for the immutable metaclass
$class_name = 'Class::MOP::Class::Immutable::' . ref($self);
}
else {
$class_name = join '::', 'Class::MOP::Class::Immutable::CustomTrait',
$trait, 'ForMetaClass', ref($self);
}
return $class_name
if Class::MOP::does_metaclass_exist($class_name);
# If the metaclass is a subclass of CMOP::Class which has had
# metaclass roles applied (via Moose), then we want to make sure
# that we preserve that anonymous class (see Fey::ORM for an
# example of where this matters).
my $meta_name = $meta->_real_ref_name;
my $immutable_meta = $meta_name->create(
$class_name,
superclasses => [ ref $self ],
);
Class::MOP::MiniTrait::apply( $immutable_meta, $trait );
$immutable_meta->make_immutable(
inline_constructor => 0,
inline_accessors => 0,
);
return $class_name;
}
sub _remove_inlined_code {
my $self = shift;
$self->remove_method( $_->name ) for $self->_inlined_methods;
delete $self->{__immutable}{inlined_methods};
}
sub _inlined_methods { @{ $_[0]{__immutable}{inlined_methods} || [] } }
sub _add_inlined_method {
my ( $self, $method ) = @_;
push @{ $self->{__immutable}{inlined_methods} ||= [] }, $method;
}
sub _initialize_immutable {
my ( $self, %args ) = @_;
$self->{__immutable}{options} = \%args;
$self->_install_inlined_code(%args);
}
sub _install_inlined_code {
my ( $self, %args ) = @_;
# FIXME
$self->_inline_accessors(%args) if $args{inline_accessors};
$self->_inline_constructor(%args) if $args{inline_constructor};
$self->_inline_destructor(%args) if $args{inline_destructor};
}
sub _rebless_as_mutable {
my $self = shift;
bless $self, $self->_get_mutable_metaclass_name;
return $self;
}
sub _inline_accessors {
my $self = shift;
foreach my $attr_name ( $self->get_attribute_list ) {
$self->get_attribute($attr_name)->install_accessors(1);
}
}
sub _inline_constructor {
my ( $self, %args ) = @_;
my $name = $args{constructor_name};
# A class may not even have a constructor, and that's okay.
return unless defined $name;
if ( $self->has_method($name) && !$args{replace_constructor} ) {
my $class = $self->name;
warn "Not inlining a constructor for $class since it defines"
. " its own constructor.\n"
. "If you are certain you don't need to inline your"
. " constructor, specify inline_constructor => 0 in your"
. " call to $class->meta->make_immutable\n";
return;
}
my $constructor_class = $args{constructor_class};
{
local $@;
use_package_optimistically($constructor_class);
}
my $constructor = $constructor_class->new(
options => \%args,
metaclass => $self,
is_inline => 1,
package_name => $self->name,
name => $name,
definition_context => {
description => "constructor " . $self->name . "::" . $name,
file => $args{file},
line => $args{line},
},
);
if ( $args{replace_constructor} or $constructor->can_be_inlined ) {
$self->add_method( $name => $constructor );
$self->_add_inlined_method($constructor);
}
}
sub _inline_destructor {
my ( $self, %args ) = @_;
( exists $args{destructor_class} && defined $args{destructor_class} )
|| $self->_throw_exception( NoDestructorClassSpecified => class_name => $self->name,
params => \%args,
);
if ( $self->has_method('DESTROY') && ! $args{replace_destructor} ) {
my $class = $self->name;
warn "Not inlining a destructor for $class since it defines"
. " its own destructor.\n";
return;
}
my $destructor_class = $args{destructor_class};
{
local $@;
use_package_optimistically($destructor_class);
}
return unless $destructor_class->is_needed($self);
my $destructor = $destructor_class->new(
options => \%args,
metaclass => $self,
package_name => $self->name,
name => 'DESTROY',
definition_context => {
description => "destructor " . $self->name . "::DESTROY",
file => $args{file},
line => $args{line},
},
);
if ( $args{replace_destructor} or $destructor->can_be_inlined ) {
$self->add_method( 'DESTROY' => $destructor );
$self->_add_inlined_method($destructor);
}
}
1;
# ABSTRACT: Class Meta Object
__END__
=pod
=encoding UTF-8
=head1 NAME
Class::MOP::Class - Class Meta Object
=head1 VERSION
version 2.2012
=head1 SYNOPSIS
# assuming that class Foo
# has been defined, you can
# use this for introspection ...
# add a method to Foo ...
Foo->meta->add_method( 'bar' => sub {...} )
# get a list of all the classes searched
# the method dispatcher in the correct order
Foo->meta->class_precedence_list()
# remove a method from Foo
Foo->meta->remove_method('bar');
# or use this to actually create classes ...
Class::MOP::Class->create(
'Bar' => (
version => '0.01',
superclasses => ['Foo'],
attributes => [
Class::MOP::Attribute->new('$bar'),
Class::MOP::Attribute->new('$baz'),
],
methods => {
calculate_bar => sub {...},
construct_baz => sub {...}
}
)
);
=head1 DESCRIPTION
The Class Protocol is the largest and most complex part of the
Class::MOP meta-object protocol. It controls the introspection and
manipulation of Perl 5 classes, and it can create them as well. The
best way to understand what this module can do is to read the
documentation for each of its methods.
=head1 INHERITANCE
C<Class::MOP::Class> is a subclass of L<Class::MOP::Module>.
=head1 METHODS
=head2 Class construction
These methods all create new C<Class::MOP::Class> objects. These
objects can represent existing classes or they can be used to create
new classes from scratch.
The metaclass object for a given class is a singleton. If you attempt
to create a metaclass for the same class twice, you will just get the
existing object.
=over 4
=item B<< Class::MOP::Class->create($package_name, %options) >>
This method creates a new C<Class::MOP::Class> object with the given
package name. It accepts a number of options:
=over 8
=item * version
An optional version number for the newly created package.
=item * authority
An optional authority for the newly created package.
See L<Class::MOP::Module/authority> for more details.
=item * superclasses
An optional array reference of superclass names.
=item * methods
An optional hash reference of methods for the class. The keys of the
hash reference are method names and values are subroutine references.
=item * attributes
An optional array reference of L<Class::MOP::Attribute> objects.
=item * meta_name
Specifies the name to install the C<meta> method for this class under.
If it is not passed, C<meta> is assumed, and if C<undef> is explicitly
given, no meta method will be installed.
=item * weaken
If true, the metaclass that is stored in the global cache will be a
weak reference.
Classes created in this way are destroyed once the metaclass they are
attached to goes out of scope, and will be removed from Perl's internal
symbol table.
All instances of a class with a weakened metaclass keep a special
reference to the metaclass object, which prevents the metaclass from
going out of scope while any instances exist.
This only works if the instance is based on a hash reference, however.
=back
=item B<< Class::MOP::Class->create_anon_class(%options) >>
This method works just like C<< Class::MOP::Class->create >> but it
creates an "anonymous" class. In fact, the class does have a name, but
that name is a unique name generated internally by this module.
It accepts the same C<superclasses>, C<methods>, and C<attributes>
parameters that C<create> accepts.
It also accepts a C<cache> option. If this is C<true>, then the anonymous class
will be cached based on its superclasses and roles. If an existing anonymous
class in the cache has the same superclasses and roles, it will be reused.
Anonymous classes default to C<< weaken => 1 >> if cache is C<false>, although
this can be overridden.
=item B<< Class::MOP::Class->initialize($package_name, %options) >>
This method will initialize a C<Class::MOP::Class> object for the
named package. Unlike C<create>, this method I<will not> create a new
class.
The purpose of this method is to retrieve a C<Class::MOP::Class>
object for introspecting an existing class.
If an existing C<Class::MOP::Class> object exists for the named
package, it will be returned, and any options provided will be
ignored!
If the object does not yet exist, it will be created.
The valid options that can be passed to this method are
C<attribute_metaclass>, C<method_metaclass>,
C<wrapped_method_metaclass>, and C<instance_metaclass>. These are all
optional, and default to the appropriate class in the C<Class::MOP>
distribution.
=back
=head2 Object instance construction and cloning
These methods are all related to creating and/or cloning object
instances.
=over 4
=item B<< $metaclass->clone_object($instance, %params) >>
This method clones an existing object instance. Any parameters you
provide are will override existing attribute values in the object.
This is a convenience method for cloning an object instance, then
blessing it into the appropriate package.
You could implement a clone method in your class, using this method:
sub clone {
my ($self, %params) = @_;
$self->meta->clone_object($self, %params);
}
=item B<< $metaclass->rebless_instance($instance, %params) >>
This method changes the class of C<$instance> to the metaclass's class.
You can only rebless an instance into a subclass of its current
class. If you pass any additional parameters, these will be treated
like constructor parameters and used to initialize the object's
attributes. Any existing attributes that are already set will be
overwritten.
Before reblessing the instance, this method will call
C<rebless_instance_away> on the instance's current metaclass. This method
will be passed the instance, the new metaclass, and any parameters
specified to C<rebless_instance>. By default, C<rebless_instance_away>
does nothing; it is merely a hook.
=item B<< $metaclass->rebless_instance_back($instance) >>
Does the same thing as C<rebless_instance>, except that you can only
rebless an instance into one of its superclasses. Any attributes that
do not exist in the superclass will be deinitialized.
This is a much more dangerous operation than C<rebless_instance>,
especially when multiple inheritance is involved, so use this carefully!
=item B<< $metaclass->new_object(%params) >>
This method is used to create a new object of the metaclass's
class. Any parameters you provide are used to initialize the
instance's attributes. A special C<__INSTANCE__> key can be passed to
provide an already generated instance, rather than having Class::MOP
generate it for you. This is mostly useful for using Class::MOP with
foreign classes which generate instances using their own constructors.
=item B<< $metaclass->instance_metaclass >>
Returns the class name of the instance metaclass. See
L<Class::MOP::Instance> for more information on the instance
metaclass.
=item B<< $metaclass->get_meta_instance >>
Returns an instance of the C<instance_metaclass> to be used in the
construction of a new instance of the class.
=back
=head2 Informational predicates
These are a few predicate methods for asking information about the
class itself.
=over 4
=item B<< $metaclass->is_anon_class >>
This returns true if the class was created by calling C<<
Class::MOP::Class->create_anon_class >>.
=item B<< $metaclass->is_mutable >>
This returns true if the class is still mutable.
=item B<< $metaclass->is_immutable >>
This returns true if the class has been made immutable.
=item B<< $metaclass->is_pristine >>
A class is I<not> pristine if it has non-inherited attributes or if it
has any generated methods.
=back
=head2 Inheritance Relationships
=over 4
=item B<< $metaclass->superclasses(@superclasses) >>
This is a read-write accessor which represents the superclass
relationships of the metaclass's class.
This is basically sugar around getting and setting C<@ISA>.
=item B<< $metaclass->class_precedence_list >>
This returns a list of all of the class's ancestor classes. The
classes are returned in method dispatch order.
=item B<< $metaclass->linearized_isa >>
This returns a list based on C<class_precedence_list> but with all
duplicates removed.
=item B<< $metaclass->subclasses >>
This returns a list of all subclasses for this class, even indirect
subclasses.
=item B<< $metaclass->direct_subclasses >>
This returns a list of immediate subclasses for this class, which does not
include indirect subclasses.
=back
=head2 Method introspection and creation
These methods allow you to introspect a class's methods, as well as
add, remove, or change methods.
Determining what is truly a method in a Perl 5 class requires some
heuristics (aka guessing).
Methods defined outside the package with a fully qualified name (C<sub
Package::name { ... }>) will be included. Similarly, methods named
with a fully qualified name using L<Sub::Name> are also included.
However, we attempt to ignore imported functions.
Ultimately, we are using heuristics to determine what truly is a
method in a class, and these heuristics may get the wrong answer in
some edge cases. However, for most "normal" cases the heuristics work
correctly.
=over 4
=item B<< $metaclass->get_method($method_name) >>
This will return a L<Class::MOP::Method> for the specified
C<$method_name>. If the class does not have the specified method, it
returns C<undef>
=item B<< $metaclass->has_method($method_name) >>
Returns a boolean indicating whether or not the class defines the
named method. It does not include methods inherited from parent
classes.
=item B<< $metaclass->get_method_list >>
This will return a list of method I<names> for all methods defined in
this class.
=item B<< $metaclass->add_method($method_name, $method) >>
This method takes a method name and a subroutine reference, and adds
the method to the class.
The subroutine reference can be a L<Class::MOP::Method>, and you are
strongly encouraged to pass a meta method object instead of a code
reference. If you do so, that object gets stored as part of the
class's method map directly. If not, the meta information will have to
be recreated later, and may be incorrect.
If you provide a method object, this method will clone that object if
the object's package name does not match the class name. This lets us
track the original source of any methods added from other classes
(notably Moose roles).
=item B<< $metaclass->remove_method($method_name) >>
Remove the named method from the class. This method returns the
L<Class::MOP::Method> object for the method.
=item B<< $metaclass->method_metaclass >>
Returns the class name of the method metaclass, see
L<Class::MOP::Method> for more information on the method metaclass.
=item B<< $metaclass->wrapped_method_metaclass >>
Returns the class name of the wrapped method metaclass, see
L<Class::MOP::Method::Wrapped> for more information on the wrapped
method metaclass.
=item B<< $metaclass->get_all_methods >>
This will traverse the inheritance hierarchy and return a list of all
the L<Class::MOP::Method> objects for this class and its parents.
=item B<< $metaclass->find_method_by_name($method_name) >>
This will return a L<Class::MOP::Method> for the specified
C<$method_name>. If the class does not have the specified method, it
returns C<undef>
Unlike C<get_method>, this method I<will> look for the named method in
superclasses.
=item B<< $metaclass->get_all_method_names >>
This will return a list of method I<names> for all of this class's
methods, including inherited methods.
=item B<< $metaclass->find_all_methods_by_name($method_name) >>
This method looks for the named method in the class and all of its
parents. It returns every matching method it finds in the inheritance
tree, so it returns a list of methods.
Each method is returned as a hash reference with three keys. The keys
are C<name>, C<class>, and C<code>. The C<code> key has a
L<Class::MOP::Method> object as its value.
The list of methods is distinct.
=item B<< $metaclass->find_next_method_by_name($method_name) >>
This method returns the first method in any superclass matching the
given name. It is effectively the method that C<SUPER::$method_name>
would dispatch to.
=back
=head2 Attribute introspection and creation
Because Perl 5 does not have a core concept of attributes in classes,
we can only return information about attributes which have been added
via this class's methods. We cannot discover information about
attributes which are defined in terms of "regular" Perl 5 methods.
=over 4
=item B<< $metaclass->get_attribute($attribute_name) >>
This will return a L<Class::MOP::Attribute> for the specified
C<$attribute_name>. If the class does not have the specified
attribute, it returns C<undef>.
NOTE that get_attribute does not search superclasses, for that you
need to use C<find_attribute_by_name>.
=item B<< $metaclass->has_attribute($attribute_name) >>
Returns a boolean indicating whether or not the class defines the
named attribute. It does not include attributes inherited from parent
classes.
=item B<< $metaclass->get_attribute_list >>
This will return a list of attributes I<names> for all attributes
defined in this class. Note that this operates on the current class
only, it does not traverse the inheritance hierarchy.
=item B<< $metaclass->get_all_attributes >>
This will traverse the inheritance hierarchy and return a list of all
the L<Class::MOP::Attribute> objects for this class and its parents.
=item B<< $metaclass->find_attribute_by_name($attribute_name) >>
This will return a L<Class::MOP::Attribute> for the specified
C<$attribute_name>. If the class does not have the specified
attribute, it returns C<undef>.
Unlike C<get_attribute>, this attribute I<will> look for the named
attribute in superclasses.
=item B<< $metaclass->add_attribute(...) >>
This method accepts either an existing L<Class::MOP::Attribute>
object or parameters suitable for passing to that class's C<new>
method.
The attribute provided will be added to the class.
Any accessor methods defined by the attribute will be added to the
class when the attribute is added.
If an attribute of the same name already exists, the old attribute
will be removed first.
=item B<< $metaclass->remove_attribute($attribute_name) >>
This will remove the named attribute from the class, and
L<Class::MOP::Attribute> object.
Removing an attribute also removes any accessor methods defined by the
attribute.
However, note that removing an attribute will only affect I<future>
object instances created for this class, not existing instances.
=item B<< $metaclass->attribute_metaclass >>
Returns the class name of the attribute metaclass for this class. By
default, this is L<Class::MOP::Attribute>.
=back
=head2 Overload introspection and creation
These methods provide an API to the core L<overload> functionality.
=over 4
=item B<< $metaclass->is_overloaded >>
Returns true if overloading is enabled for this class. Corresponds to
L<Devel::OverloadInfo/is_overloaded>.
=item B<< $metaclass->get_overloaded_operator($op) >>
Returns the L<Class::MOP::Overload> object corresponding to the operator named
C<$op>, if one exists for this class.
=item B<< $metaclass->has_overloaded_operator($op) >>
Returns whether or not the operator C<$op> is overloaded for this class.
=item B<< $metaclass->get_overload_list >>
Returns a list of operator names which have been overloaded (see
L<overload/Overloadable Operations> for the list of valid operator names).
=item B<< $metaclass->get_all_overloaded_operators >>
Returns a list of L<Class::MOP::Overload> objects corresponding to the
operators that have been overloaded.
=item B<< $metaclass->add_overloaded_operator($op, $impl) >>
Overloads the operator C<$op> for this class. The C<$impl> can be a coderef, a
method name, or a L<Class::MOP::Overload> object. Corresponds to
C<< use overload $op => $impl; >>
=item B<< $metaclass->remove_overloaded_operator($op) >>
Remove overloading for operator C<$op>. Corresponds to C<< no overload $op; >>
=item B<< $metaclass->get_overload_fallback_value >>
Returns the overload C<fallback> setting for the package.
=item B<< $metaclass->set_overload_fallback_value($fallback) >>
Sets the overload C<fallback> setting for the package.
=back
=head2 Class Immutability
Making a class immutable "freezes" the class definition. You can no
longer call methods which alter the class, such as adding or removing
methods or attributes.
Making a class immutable lets us optimize the class by inlining some
methods, and also allows us to optimize some methods on the metaclass
object itself.
After immutabilization, the metaclass object will cache most informational
methods that returns information about methods or attributes. Methods which
would alter the class, such as C<add_attribute> and C<add_method>, will
throw an error on an immutable metaclass object.
The immutabilization system in L<Moose> takes much greater advantage
of the inlining features than Class::MOP itself does.
=over 4
=item B<< $metaclass->make_immutable(%options) >>
This method will create an immutable transformer and use it to make
the class and its metaclass object immutable, and returns true
(you should not rely on the details of this value apart from its truth).
This method accepts the following options:
=over 8
=item * inline_accessors
=item * inline_constructor
=item * inline_destructor
These are all booleans indicating whether the specified method(s)
should be inlined.
By default, accessors and the constructor are inlined, but not the
destructor.
=item * immutable_trait
The name of a class which will be used as a parent class for the
metaclass object being made immutable. This "trait" implements the
post-immutability functionality of the metaclass (but not the
transformation itself).
This defaults to L<Class::MOP::Class::Immutable::Trait>.
=item * constructor_name
This is the constructor method name. This defaults to "new".
=item * constructor_class
The name of the method metaclass for constructors. It will be used to
generate the inlined constructor. This defaults to
"Class::MOP::Method::Constructor".
=item * replace_constructor
This is a boolean indicating whether an existing constructor should be
replaced when inlining a constructor. This defaults to false.
=item * destructor_class
The name of the method metaclass for destructors. It will be used to
generate the inlined destructor. This defaults to
"Class::MOP::Method::Denstructor".
=item * replace_destructor
This is a boolean indicating whether an existing destructor should be
replaced when inlining a destructor. This defaults to false.
=back
=item B<< $metaclass->immutable_options >>
Returns a hash of the options used when making the class immutable, including
both defaults and anything supplied by the user in the call to C<<
$metaclass->make_immutable >>. This is useful if you need to temporarily make
a class mutable and then restore immutability as it was before.
=item B<< $metaclass->make_mutable >>
Calling this method reverse the immutabilization transformation.
=back
=head2 Method Modifiers
Method modifiers are hooks which allow a method to be wrapped with
I<before>, I<after> and I<around> method modifiers. Every time a
method is called, its modifiers are also called.
A class can modify its own methods, as well as methods defined in
parent classes.
=head3 How method modifiers work?
Method modifiers work by wrapping the original method and then
replacing it in the class's symbol table. The wrappers will handle
calling all the modifiers in the appropriate order and preserving the
calling context for the original method.
The return values of C<before> and C<after> modifiers are
ignored. This is because their purpose is B<not> to filter the input
and output of the primary method (this is done with an I<around>
modifier).
This may seem like an odd restriction to some, but doing this allows
for simple code to be added at the beginning or end of a method call
without altering the function of the wrapped method or placing any
extra responsibility on the code of the modifier.
Of course if you have more complex needs, you can use the C<around>
modifier which allows you to change both the parameters passed to the
wrapped method, as well as its return value.
Before and around modifiers are called in last-defined-first-called
order, while after modifiers are called in first-defined-first-called
order. So the call tree might looks something like this:
before 2
before 1
around 2
around 1
primary
around 1
around 2
after 1
after 2
=head3 What is the performance impact?
Of course there is a performance cost associated with method
modifiers, but we have made every effort to make that cost directly
proportional to the number of modifier features you use.
The wrapping method does its best to B<only> do as much work as it
absolutely needs to. In order to do this we have moved some of the
performance costs to set-up time, where they are easier to amortize.
All this said, our benchmarks have indicated the following:
simple wrapper with no modifiers 100% slower
simple wrapper with simple before modifier 400% slower
simple wrapper with simple after modifier 450% slower
simple wrapper with simple around modifier 500-550% slower
simple wrapper with all 3 modifiers 1100% slower
These numbers may seem daunting, but you must remember, every feature
comes with some cost. To put things in perspective, just doing a
simple C<AUTOLOAD> which does nothing but extract the name of the
method called and return it costs about 400% over a normal method
call.
=over 4
=item B<< $metaclass->add_before_method_modifier($method_name, $code) >>
This wraps the specified method with the supplied subroutine
reference. The modifier will be called as a method itself, and will
receive the same arguments as are passed to the method.
When the modifier exits, the wrapped method will be called.
The return value of the modifier will be ignored.
=item B<< $metaclass->add_after_method_modifier($method_name, $code) >>
This wraps the specified method with the supplied subroutine
reference. The modifier will be called as a method itself, and will
receive the same arguments as are passed to the method.
When the wrapped methods exits, the modifier will be called.
The return value of the modifier will be ignored.
=item B<< $metaclass->add_around_method_modifier($method_name, $code) >>
This wraps the specified method with the supplied subroutine
reference.
The first argument passed to the modifier will be a subroutine
reference to the wrapped method. The second argument is the object,
and after that come any arguments passed when the method is called.
The around modifier can choose to call the original method, as well as
what arguments to pass if it does so.
The return value of the modifier is what will be seen by the caller.
=back
=head2 Introspection
=over 4
=item B<< Class::MOP::Class->meta >>
This will return a L<Class::MOP::Class> instance for this class.
It should also be noted that L<Class::MOP> will actually bootstrap
this module by installing a number of attribute meta-objects into its
metaclass.
=back
=head1 AUTHORS
=over 4
=item *
Stevan Little <stevan.little@iinteractive.com>
=item *
Dave Rolsky <autarch@urth.org>
=item *
Jesse Luehrs <doy@tozt.net>
=item *
Shawn M Moore <code@sartak.org>
=item *
יובל קוג'מן (Yuval Kogman) <nothingmuch@woobling.org>
=item *
Karen Etheridge <ether@cpan.org>
=item *
Florian Ragwitz <rafl@debian.org>
=item *
Hans Dieter Pearcey <hdp@weftsoar.net>
=item *
Chris Prather <chris@prather.org>
=item *
Matt S Trout <mst@shadowcat.co.uk>
=back
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 2006 by Infinity Interactive, Inc.
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
=cut
| 30.098573 | 123 | 0.644345 |
ed60517be56fe8f269aee4bc7ace390decb1d9a0 | 2,661 | pm | Perl | modules/Bio/EnsEMBL/Analysis/Hive/RunnableDB/HivecDNAManyHits.pm | jmgonzmart/ensembl-analysis | 41c1d362bc0abce91a81a6615b3d61a6b82b7da5 | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Analysis/Hive/RunnableDB/HivecDNAManyHits.pm | jmgonzmart/ensembl-analysis | 41c1d362bc0abce91a81a6615b3d61a6b82b7da5 | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Analysis/Hive/RunnableDB/HivecDNAManyHits.pm | jmgonzmart/ensembl-analysis | 41c1d362bc0abce91a81a6615b3d61a6b82b7da5 | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env perl
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
#Copyright [2016-2022] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Bio::EnsEMBL::Analysis::Hive::RunnableDB::HivecDNAManyHits;
use strict;
use warnings;
use feature 'say';
use Bio::EnsEMBL::Analysis::Tools::Utilities qw(hrdb_get_dba);
use parent ('Bio::EnsEMBL::Hive::RunnableDB::JobFactory');
sub param_defaults {
my ($self) = @_;
return {
%{$self->SUPER::param_defaults},
threshold => 20,
column_names => ['iid'],
many_hits_process_threshod => .90,
}
}
sub fetch_input {
my $self = shift;
my $db = hrdb_get_dba($self->param_required('target_db'));
my $slice_adaptor = $db->get_SliceAdaptor;
my %hit_names;
foreach my $slice (@{$slice_adaptor->fetch_all('toplevel', undef, 1)}) {
foreach my $transcript (@{$slice->get_all_Transcripts}) {
++$hit_names{$transcript->get_all_supporting_features->[0]->hseqname};
}
}
my @many_hits;
my $threshold = $self->param('threshold');
foreach my $key (keys %hit_names) {
push(@many_hits, $key) if ($hit_names{$key} > $threshold);
}
if (@many_hits) {
if ($self->param_is_defined('old_db')) {
my $old_db = hrdb_get_dba($self->param_required('old_db'));
my $transcript_adaptor = $old_db->get_TranscriptAdaptor;
my @to_process;
$threshold *= $self->param('many_hits_process_threshod');
foreach my $hitname (@many_hits) {
my $transcripts = $transcript_adaptor->fetch_all_by_transcript_supporting_evidence($hitname, 'dna_align_feature');
push(@to_process, $hitname) unless (scalar(@$transcripts) > $threshold);
}
if (@to_process) {
$self->param('inputlist', \@to_process);
}
else {
$self->complete_early(scalar(@many_hits).' cDNAs had more than '.$self->param('threshold').' hits but were already in the previous database');
}
}
else {
$self->param('inputlist', \@many_hits);
}
}
else {
$self->complete_early("No cDNAs had more than $threshold hits");
}
}
1;
| 31.678571 | 150 | 0.682074 |
ed8a562e18b28b90b1591ba700a679f08eccc9f0 | 1,974 | pl | Perl | probs/SYJ/SYJ211+1.012.pl | ptarau/TypesAndProofs | ca9e4347d3dabb3531d38bdef6aafaa41a30555d | [
"Apache-2.0"
]
| 25 | 2018-07-19T00:08:18.000Z | 2022-02-08T23:07:12.000Z | probs/SYJ/SYJ211+1.012.pl | ptarau/TypesAndProofs | ca9e4347d3dabb3531d38bdef6aafaa41a30555d | [
"Apache-2.0"
]
| null | null | null | probs/SYJ/SYJ211+1.012.pl | ptarau/TypesAndProofs | ca9e4347d3dabb3531d38bdef6aafaa41a30555d | [
"Apache-2.0"
]
| 4 | 2019-01-12T17:02:14.000Z | 2021-11-23T08:16:09.000Z | %--------------------------------------------------------------------------
% File : SYJ211+1.012 : ILTP v1.1.2
% Domain : Intuitionistic Syntactic
% Problem : Formulae of Korn & Kreitz
% Version : Especial.
% Problem formulation : Inuit. Invalid. Size 12
% English : (A & B(N) & C(N)) -> f with
% A - (a(0) -> f), B(N) - (~~b(N) -> b(0) -> a(N)),
% C(N) - (&&_{i-1..n} ((~~b(i-1) -> a(i)) -> a(i-1))),
% Refs : [Dyc97] Roy Dyckhoff. Some benchmark formulas for
% intuitionistic propositional logic. At
% http://www.dcs.st-and.ac.uk/~rd/logic/marks.html
% : [KK97] D. Korn & C. Kreitz, A constructively adequate
% refutation system for intuitionistic logic,
% position paper at Tableaux'97, available at
% http://www.cs.uni-potsdam.de/ti/kreitz/PDF/
% Source : [Dyc97]
% Names : kk_n12 : Dyckhoff's benchmark formulas (1997)
%
% Status (intuit.) : Non-Theorem
% Rating (intuit.) : 0.75 v1.0.0
%
% Comments :
%--------------------------------------------------------------------------
fof(axiom1,axiom,(
( a0 -> f) )).
fof(axiom2,axiom,(
( ( ~(~(b12)) -> b0) -> a12) )).
fof(axiom3,axiom,(
( ( ~(~(b0)) -> a1) -> a0) )).
fof(axiom4,axiom,(
( ( ~(~(b1)) -> a2) -> a1) )).
fof(axiom5,axiom,(
( ( ~(~(b2)) -> a3) -> a2) )).
fof(axiom6,axiom,(
( ( ~(~(b3)) -> a4) -> a3) )).
fof(axiom7,axiom,(
( ( ~(~(b4)) -> a5) -> a4) )).
fof(axiom8,axiom,(
( ( ~(~(b5)) -> a6) -> a5) )).
fof(axiom9,axiom,(
( ( ~(~(b6)) -> a7) -> a6) )).
fof(axiom10,axiom,(
( ( ~(~(b7)) -> a8) -> a7) )).
fof(axiom11,axiom,(
( ( ~(~(b8)) -> a9) -> a8) )).
fof(axiom12,axiom,(
( ( ~(~(b9)) -> a10) -> a9) )).
fof(axiom13,axiom,(
( ( ~(~(b10)) -> a11) -> a10) )).
fof(axiom14,axiom,(
( ( ~(~(b11)) -> a12) -> a11) )).
fof(con,conjecture,(
f
)).
%--------------------------------------------------------------------------
| 26.675676 | 75 | 0.425025 |
ed5be5c257ba45b8588fbc77e86cf4b03c5ab75a | 3,186 | plx | Perl | perl/sfgrep/annotations.plx | jtraver/dev | c7cd2181594510a8fa27e7325566ed2d79371624 | [
"MIT"
]
| null | null | null | perl/sfgrep/annotations.plx | jtraver/dev | c7cd2181594510a8fa27e7325566ed2d79371624 | [
"MIT"
]
| null | null | null | perl/sfgrep/annotations.plx | jtraver/dev | c7cd2181594510a8fa27e7325566ed2d79371624 | [
"MIT"
]
| null | null | null | #!/usr/bin/perl -w
use diagnostics;
use warnings;
use strict;
my $status = 1;
my @files;
my $count = 0;
$| = 1;
mkdir "FGREP";
my $output = "FGREP/_annotations_.txt";
if (-e $output)
{
}
else
{
open(OUTPUT, ">$output") or die "Can't write $output: $!";
load_files();
my $save_rs = $/;
foreach my $file (sort @files)
{
if ($file =~ /FreeBSD.4.11.package/ || $file =~ /rhel.4.3.package/)
{
next;
}
$count++;
if ($count % $status == 0)
{
print ".";
}
$/ = undef; # whole file mode
my $file_open = open(FILE, $file);
if ($file_open)
{
my $line = <FILE>; # whole file
close(FILE);
if ($line =~ /\@/i)
{
print OUTPUT
"---------------------------------------------------------------------------\n";
print OUTPUT
"---------------------------------------------------------------------------\n";
print OUTPUT "| $file\n";
print OUTPUT
"---------------------------------------------------------------------------\n";
$/ = $save_rs;
$file_open = open(FILE, $file);
if ($file_open)
{
my @lines = <FILE>;
close(FILE);
chomp(@lines);
foreach $line (@lines)
{
if ($line =~ /\@/i)
{
print OUTPUT "\t$line\n";
}
}
}
else
{
print "coud not read $file in line mode: $!";
print "\n";
}
print OUTPUT "\n";
}
}
else
{
print "Can't read file $file: $!";
print "\n";
}
}
close(OUTPUT);
print "\n";
}
my $cmd = "vi \"$output\"";
system($cmd);
sub load_files
{
my $file = "TextFiles.txt";
my $file_open = open(FILES, $file);
if (!$file_open)
{
print STDERR "please run setTextFiles.plx\n";
exit(1);
}
@files = <FILES>;
close(FILES);
chomp(@files);
$status = @files / 79;
if ($status <= 1)
{
$status = 1;
}
}
| 32.845361 | 85 | 0.25204 |
ed6a77038ca81f9987459fe13bad99dec4785483 | 2,292 | pm | Perl | auto-lib/Paws/SimpleWorkflow/ExternalWorkflowExecutionCancelRequestedEventAttributes.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/SimpleWorkflow/ExternalWorkflowExecutionCancelRequestedEventAttributes.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/SimpleWorkflow/ExternalWorkflowExecutionCancelRequestedEventAttributes.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
]
| 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z | # Generated by default/object.tt
package Paws::SimpleWorkflow::ExternalWorkflowExecutionCancelRequestedEventAttributes;
use Moose;
has InitiatedEventId => (is => 'ro', isa => 'Int', request_name => 'initiatedEventId', traits => ['NameInRequest'], required => 1);
has WorkflowExecution => (is => 'ro', isa => 'Paws::SimpleWorkflow::WorkflowExecution', request_name => 'workflowExecution', traits => ['NameInRequest'], required => 1);
1;
### main pod documentation begin ###
=head1 NAME
Paws::SimpleWorkflow::ExternalWorkflowExecutionCancelRequestedEventAttributes
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::SimpleWorkflow::ExternalWorkflowExecutionCancelRequestedEventAttributes object:
$service_obj->Method(Att1 => { InitiatedEventId => $value, ..., WorkflowExecution => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::SimpleWorkflow::ExternalWorkflowExecutionCancelRequestedEventAttributes object:
$result = $service_obj->Method(...);
$result->Att1->InitiatedEventId
=head1 DESCRIPTION
Provides the details of the C<ExternalWorkflowExecutionCancelRequested>
event.
=head1 ATTRIBUTES
=head2 B<REQUIRED> InitiatedEventId => Int
The ID of the C<RequestCancelExternalWorkflowExecutionInitiated> event
corresponding to the C<RequestCancelExternalWorkflowExecution> decision
to cancel this external workflow execution. This information can be
useful for diagnosing problems by tracing back the chain of events
leading up to this event.
=head2 B<REQUIRED> WorkflowExecution => L<Paws::SimpleWorkflow::WorkflowExecution>
The external workflow execution to which the cancellation request was
delivered.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::SimpleWorkflow>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 32.28169 | 171 | 0.781414 |
ed380ee1d4f6d426c0a1059f2566563099281992 | 1,427 | pm | Perl | hardware/telephony/avaya/cm/snmp/plugin.pm | xdrive05/centreon-plugins | 8227ba680fdfd2bb0d8a806ea61ec1611c2779dc | [
"Apache-2.0"
]
| 1 | 2021-03-16T22:20:32.000Z | 2021-03-16T22:20:32.000Z | hardware/telephony/avaya/cm/snmp/plugin.pm | xdrive05/centreon-plugins | 8227ba680fdfd2bb0d8a806ea61ec1611c2779dc | [
"Apache-2.0"
]
| null | null | null | hardware/telephony/avaya/cm/snmp/plugin.pm | xdrive05/centreon-plugins | 8227ba680fdfd2bb0d8a806ea61ec1611c2779dc | [
"Apache-2.0"
]
| null | null | null | #
# Copyright 2020 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::telephony::avaya::cm::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'calls' => 'hardware::telephony::avaya::cm::snmp::mode::calls',
'licenses' => 'hardware::telephony::avaya::cm::snmp::mode::licenses',
'trunks' => 'hardware::telephony::avaya::cm::snmp::mode::trunks',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check avaya communication manager in SNMP.
=cut
| 27.980392 | 77 | 0.694464 |
ed8c3af8a028745664551a4d1b4443bd9cd447c6 | 421 | pl | Perl | perl/lib/unicore/lib/Blk/Katakan2.pl | mnikolop/Thesis_project_CyberDoc | 9a37fdd5a31de24cb902ee31ef19eb992faa1665 | [
"Apache-2.0"
]
| 4 | 2018-04-20T07:27:13.000Z | 2021-12-21T05:19:24.000Z | perl/lib/unicore/lib/Blk/Katakan2.pl | mnikolop/Thesis_project_CyberDoc | 9a37fdd5a31de24cb902ee31ef19eb992faa1665 | [
"Apache-2.0"
]
| 4 | 2021-03-10T19:10:00.000Z | 2021-05-11T14:58:19.000Z | perl/lib/unicore/lib/Blk/Katakan2.pl | mnikolop/Thesis_project_CyberDoc | 9a37fdd5a31de24cb902ee31ef19eb992faa1665 | [
"Apache-2.0"
]
| 1 | 2019-11-12T02:29:26.000Z | 2019-11-12T02:29:26.000Z | # !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
return <<'END';
31F0 31FF
END
| 30.071429 | 77 | 0.669834 |
ed3ef767547391ac728a4f3deeb886d80323e47c | 23,364 | al | Perl | Apps/W1/APIV2/test/src/APIV2PurchaseInvoicesE2E.Codeunit.al | manjulchauhan/ALAppExtensions | 3f2f1d6e5337188b1af9c0275420f1c1de036a7f | [
"MIT"
]
| 127 | 2018-04-17T18:03:03.000Z | 2019-05-06T18:54:17.000Z | Apps/W1/APIV2/test/src/APIV2PurchaseInvoicesE2E.Codeunit.al | manjulchauhan/ALAppExtensions | 3f2f1d6e5337188b1af9c0275420f1c1de036a7f | [
"MIT"
]
| 2,279 | 2018-09-12T12:01:49.000Z | 2019-05-06T13:59:35.000Z | Apps/W1/APIV2/test/src/APIV2PurchaseInvoicesE2E.Codeunit.al | Enavate-EPS-Product/ALAppExtensions | 20136cf2ef6589ff3ce7b16776aed5e8823b76f2 | [
"MIT"
]
| 41 | 2018-05-17T11:19:52.000Z | 2019-04-30T17:30:38.000Z | codeunit 139829 "APIV2 - Purchase Invoices E2E"
{
// version Test,ERM,W1,All
Subtype = Test;
TestPermissions = Disabled;
trigger OnRun()
begin
// [FEATURE] [Graph] [Purchase] [Invoice]
end;
var
TempIgnoredFieldsForComparison: Record 2000000041 temporary;
Assert: Codeunit "Assert";
LibraryGraphMgt: Codeunit "Library - Graph Mgt";
LibraryGraphDocumentTools: Codeunit "Library - Graph Document Tools";
LibraryRandom: Codeunit "Library - Random";
LibraryInventory: Codeunit "Library - Inventory";
LibraryUtility: Codeunit "Library - Utility";
LibraryPurchase: Codeunit "Library - Purchase";
InvoiceServiceNameTxt: Label 'purchaseInvoices';
ActionPostTxt: Label 'Microsoft.NAV.post', Locked = true;
NotEmptyResponseErr: Label 'Response body should be empty.';
CannotFindDraftInvoiceErr: Label 'Cannot find the draft invoice.';
CannotFindPostedInvoiceErr: Label 'Cannot find the posted invoice.';
InvoiceStatusErr: Label 'The invoice status is incorrect.';
local procedure Initialize()
begin
end;
[Test]
procedure TestGetInvoices()
var
InvoiceID1: Text;
InvoiceID2: Text;
InvoiceJSON1: Text;
InvoiceJSON2: Text;
ResponseText: Text;
TargetURL: Text;
begin
// [SCENARIO 184721] Create posted and unposted Purchase invoices and use a GET method to retrieve them
// [GIVEN] 2 invoices, one posted and one unposted
Initialize();
CreatePurchaseInvoices(InvoiceID1, InvoiceID2);
Commit();
// [WHEN] we GET all the invoices from the web service
TargetURL := LibraryGraphMgt.CreateTargetURL('', Page::"APIV2 - Purchase Invoices", InvoiceServiceNameTxt);
LibraryGraphMgt.GetFromWebService(ResponseText, TargetURL);
// [THEN] the 2 invoices should exist in the response
Assert.IsTrue(
LibraryGraphMgt.GetObjectsFromJSONResponse(
ResponseText, 'number', InvoiceID1, InvoiceID2, InvoiceJSON1, InvoiceJSON2),
'Could not find the invoices in JSON');
LibraryGraphMgt.VerifyIDInJson(InvoiceJSON1);
LibraryGraphMgt.VerifyIDInJson(InvoiceJSON2);
end;
[Test]
procedure TestGetInvoiceFromPostedOrderCorrectOrderIdAndNo()
var
PurchaseHeader: Record "Purchase Header";
PurchInvHeader: Record "Purch. Inv. Header";
OrderId: Guid;
OrderNo: Code[20];
InvoiceId: Guid;
InvoiceNo: Code[20];
TargetURL: Text;
ResponseText: Text;
OrderIdValue: Text;
OrderNoValue: Text;
begin
// [SCENARIO] Create a Purchase Invoice from a Purchase Order and use GET method to retrieve them and check the orderId and orderNumber
// [GIVEN] A purchase invoice created by posting a purchase order
LibraryPurchase.CreatePurchaseOrder(PurchaseHeader);
OrderId := PurchaseHeader.SystemId;
OrderNo := PurchaseHeader."No.";
InvoiceNo := LibraryPurchase.PostPurchaseDocument(PurchaseHeader, true, true);
Commit();
PurchInvHeader.SetRange("No.", InvoiceNo);
PurchInvHeader.FindFirst();
InvoiceId := PurchInvHeader.SystemId;
// [WHEN] we get the invoice from the web service
TargetURL := LibraryGraphMgt.CreateTargetURL(InvoiceId, Page::"APIV2 - Purchase Invoices", InvoiceServiceNameTxt);
LibraryGraphMgt.GetFromWebService(ResponseText, TargetURL);
// [THEN] the orderId field exists in the response
Assert.AreNotEqual('', ResponseText, 'Response JSON should not be blank');
LibraryGraphMgt.VerifyIDFieldInJson(ResponseText, 'orderId');
// [THEN] The orderId and orderNumber fields correspond to the id and number of the sales order
LibraryGraphMgt.GetPropertyValueFromJSON(ResponseText, 'orderId', OrderIdValue);
Assert.AreEqual(OrderIdValue, Format(Lowercase(LibraryGraphMgt.StripBrackets(OrderId))), 'The order id value is wrong.');
LibraryGraphMgt.GetPropertyValueFromJSON(ResponseText, 'orderNumber', OrderNoValue);
Assert.AreEqual(OrderNoValue, Format(OrderNo), 'The order number value is wrong.');
end;
[Test]
procedure TestPostInvoices()
var
PurchaseHeader: Record "Purchase Header";
BuyFromVendor: Record "Vendor";
ShipToVendor: Record "Vendor";
VendorNo: Text;
InvoiceDate: Date;
ResponseText: Text;
InvoiceNumber: Text;
TargetURL: Text;
InvoiceJSON: Text;
begin
// [SCENARIO 184721] Create unposted Purchase invoices
// [GIVEN] A vendor
Initialize();
LibraryPurchase.CreateVendorWithAddress(BuyFromVendor);
LibraryPurchase.CreateVendorWithAddress(ShipToVendor);
VendorNo := BuyFromVendor."No.";
InvoiceDate := WorkDate();
InvoiceJSON := CreateInvoiceJSONWithAddress(BuyFromVendor, ShipToVendor, InvoiceDate);
Commit();
// [WHEN] we POST the JSON to the web service
TargetURL := LibraryGraphMgt.CreateTargetURL('', Page::"APIV2 - Purchase Invoices", InvoiceServiceNameTxt);
LibraryGraphMgt.PostToWebService(TargetURL, InvoiceJSON, ResponseText);
// [THEN] the response text should have the correct Id, invoice address and the invoice should exist in the table with currency code set by default
Assert.AreNotEqual('', ResponseText, 'response JSON should not be blank');
Assert.IsTrue(
LibraryGraphMgt.GetObjectIDFromJSON(ResponseText, 'number', InvoiceNumber),
'Could not find purchase invoice number');
LibraryGraphMgt.VerifyIDInJson(ResponseText);
PurchaseHeader.Reset();
PurchaseHeader.SetRange("Document Type", PurchaseHeader."Document Type"::Invoice);
PurchaseHeader.SetRange("No.", InvoiceNumber);
PurchaseHeader.SetRange("Buy-from Vendor No.", VendorNo);
PurchaseHeader.SetRange("Document Date", InvoiceDate);
PurchaseHeader.SetRange("Posting Date", InvoiceDate);
Assert.IsTrue(PurchaseHeader.FindFirst(), 'The unposted invoice should exist');
LibraryGraphDocumentTools.CheckPurchaseDocumentBuyFromAddress(BuyFromVendor, PurchaseHeader, false, false);
LibraryGraphDocumentTools.CheckPurchaseDocumentShipToAddress(ShipToVendor, PurchaseHeader, false, false);
Assert.AreEqual('', PurchaseHeader."Currency Code", 'The invoice should have the LCY currency code set by default');
end;
[Test]
procedure TestPostPurchaseInvoiceWithCurrency()
var
PurchaseHeader: Record "Purchase Header";
Vendor: Record "Vendor";
VendorNo: Text;
ResponseText: Text;
InvoiceNumber: Text;
TargetURL: Text;
InvoiceJSON: Text;
CurrencyCode: Code[10];
begin
// [SCENARIO 184721] Create posted and unposted with specific currency set and use HTTP POST to create them
Initialize();
// [GIVEN] an Invoice with a non-LCY currencyCode set
LibraryPurchase.CreateVendor(Vendor);
VendorNo := Vendor."No.";
CurrencyCode := GetCurrencyCode();
InvoiceJSON := CreateInvoiceJSON('vendorNumber', VendorNo, 'currencyCode', CurrencyCode);
Commit();
// [WHEN] we POST the JSON to the web service
TargetURL := LibraryGraphMgt.CreateTargetURL('', Page::"APIV2 - Purchase Invoices", InvoiceServiceNameTxt);
LibraryGraphMgt.PostToWebService(TargetURL, InvoiceJSON, ResponseText);
// [THEN] the response text should contain the invoice ID and the integration record table should map the PurchaseInvoiceId with the ID
Assert.AreNotEqual('', ResponseText, 'response JSON should not be blank');
Assert.IsTrue(
LibraryGraphMgt.GetObjectIDFromJSON(ResponseText, 'number', InvoiceNumber),
'Could not find Purchase invoice number');
LibraryGraphMgt.VerifyIDInJson(ResponseText);
// [THEN] the invoice should exist in the tables
PurchaseHeader.Reset();
PurchaseHeader.SetRange("Document Type", PurchaseHeader."Document Type"::Invoice);
PurchaseHeader.SetRange("No.", InvoiceNumber);
PurchaseHeader.SetRange("Buy-from Vendor No.", VendorNo);
Assert.IsTrue(PurchaseHeader.FindFirst(), 'The unposted invoice should exist');
Assert.AreEqual(CurrencyCode, PurchaseHeader."Currency Code", 'The invoice should have the correct currency code');
end;
[Test]
procedure TestModifyInvoices()
begin
TestMultipleModifyInvoices(false, false);
end;
[Test]
procedure TestEmptyModifyInvoices()
begin
TestMultipleModifyInvoices(true, false);
end;
[Test]
procedure TestPartialModifyInvoices()
begin
TestMultipleModifyInvoices(false, true);
end;
local procedure TestMultipleModifyInvoices(EmptyData: Boolean; PartiallyEmptyData: Boolean)
var
BuyFromVendor: Record "Vendor";
ShipToVendor: Record "Vendor";
PurchaseLine: Record "Purchase Line";
Item: Record "Item";
PurchaseHeader: Record "Purchase Header";
InvoiceIntegrationID: Text;
InvoiceID: Text;
ResponseText: Text;
TargetURL: Text;
InvoiceJSON: Text;
begin
// [SCENARIO 184721] Create Purchase Invoice, use a PATCH method to change it and then verify the changes
Initialize();
LibraryPurchase.CreateVendorWithAddress(BuyFromVendor);
LibraryPurchase.CreateVendorWithAddress(ShipToVendor);
// [GIVEN] an order with the previously created vendor
LibraryPurchase.CreatePurchHeader(PurchaseHeader, PurchaseHeader."Document Type"::Invoice, BuyFromVendor."No.");
// [GIVEN] an item with unit price and unit cost
LibraryInventory.CreateItemWithUnitPriceAndUnitCost(
Item, LibraryRandom.RandDecInRange(1, 100, 2), LibraryRandom.RandDecInRange(1, 100, 2));
// [GIVEN] an line in the previously created order
LibraryPurchase.CreatePurchaseLine(PurchaseLine, PurchaseHeader, PurchaseLine.Type::Item, Item."No.", LibraryRandom.RandInt(100));
InvoiceID := PurchaseHeader."No.";
// [GIVEN] the invoice's unique ID
PurchaseHeader.Reset();
PurchaseHeader.SetRange("No.", InvoiceID);
PurchaseHeader.SetRange("Document Type", PurchaseHeader."Document Type"::Invoice);
PurchaseHeader.FindFirst();
InvoiceIntegrationID := PurchaseHeader.SystemId;
Assert.AreNotEqual('', InvoiceIntegrationID, 'ID should not be empty');
if EmptyData then
InvoiceJSON := '{}'
else
InvoiceJSON := LibraryGraphMgt.AddPropertytoJSON(InvoiceJSON, 'vendorNumber', BuyFromVendor."No.");
// [GIVEN] a JSON text with an Item that has the PostalAddress
LibraryGraphDocumentTools.GetVendorAddressJSON(InvoiceJSON, BuyFromVendor, 'buyFrom', EmptyData, PartiallyEmptyData);
LibraryGraphDocumentTools.GetVendorAddressJSON(InvoiceJSON, ShipToVendor, 'shipTo', EmptyData, PartiallyEmptyData);
Commit();
// [WHEN] we PATCH the JSON to the web service, with the unique Item ID
TargetURL := LibraryGraphMgt.CreateTargetURL(InvoiceIntegrationID, Page::"APIV2 - Purchase Invoices", InvoiceServiceNameTxt);
LibraryGraphMgt.PatchToWebService(TargetURL, InvoiceJSON, ResponseText);
// [THEN] the item should have the Unit of Measure as a value in the table
PurchaseHeader.Reset();
PurchaseHeader.SetRange("Document Type", PurchaseHeader."Document Type"::Invoice);
PurchaseHeader.SetRange("No.", InvoiceID);
Assert.IsTrue(PurchaseHeader.FindFirst(), 'The unposted invoice should exist');
// [THEN] the response text should contain the invoice address
LibraryGraphDocumentTools.CheckPurchaseDocumentBuyFromAddress(BuyFromVendor, PurchaseHeader, EmptyData, PartiallyEmptyData);
LibraryGraphDocumentTools.CheckPurchaseDocumentShipToAddress(ShipToVendor, PurchaseHeader, EmptyData, PartiallyEmptyData);
end;
[Test]
procedure TestDeleteInvoice()
var
PurchaseHeader: Record "Purchase Header";
InvoiceID: Text;
ID: Text;
ResponseText: Text;
TargetURL: Text;
begin
// [SCENARIO 184721] Createunposted Purchase invoice and use HTTP DELETE to delete it
// [GIVEN] An unposted invoice
Initialize();
LibraryPurchase.CreatePurchaseInvoice(PurchaseHeader);
ModifyPurchaseHeaderPostingDate(PurchaseHeader, WorkDate());
InvoiceID := PurchaseHeader."No.";
Commit();
PurchaseHeader.Reset();
PurchaseHeader.Get(PurchaseHeader."Document Type"::Invoice, InvoiceID);
ID := PurchaseHeader.SystemId;
Assert.AreNotEqual('', ID, 'ID should not be empty');
// [WHEN] we DELETE the item from the web service, with the item's unique ID
TargetURL := LibraryGraphMgt.CreateTargetURL(ID, Page::"APIV2 - Purchase Invoices", InvoiceServiceNameTxt);
LibraryGraphMgt.DeleteFromWebService(TargetURL, '', ResponseText);
// [THEN] the invoice shouldn't exist in the tables
if PurchaseHeader.Get(PurchaseHeader."Document Type"::Invoice, InvoiceID) then
Assert.ExpectedError('The unposted invoice should not exist');
end;
[Test]
procedure TestCreateInvoiceThroughPageAndAPI()
var
PagePurchaseHeader: Record "Purchase Header";
ApiPurchaseHeader: Record "Purchase Header";
Vendor: Record "Vendor";
RecordField: Record Field;
ApiRecordRef: RecordRef;
PageRecordRef: RecordRef;
PurchaseInvoice: TestPage "Purchase Invoice";
VendorNo: Text;
InvoiceDate: Date;
ResponseText: Text;
TargetURL: Text;
InvoiceJSON: Text;
begin
// [SCENARIO 184721] Create an invoice both through the client UI and through the API
// [SCENARIO] and compare them. They should be the same and have the same fields autocompleted wherever needed.
// [GIVEN] An unposted invoice
Initialize();
LibraryGraphDocumentTools.InitializeUIPage();
LibraryPurchase.CreateVendorWithAddress(Vendor);
VendorNo := Vendor."No.";
InvoiceDate := WorkDate();
// [GIVEN] a json describing our new invoice
InvoiceJSON := CreateInvoiceJSONWithAddress(Vendor, Vendor, InvoiceDate);
Commit();
// [WHEN] we POST the JSON to the web service and create another invoice through the test page
TargetURL := LibraryGraphMgt.CreateTargetURL('', Page::"APIV2 - Purchase Invoices", InvoiceServiceNameTxt);
LibraryGraphMgt.PostToWebService(TargetURL, InvoiceJSON, ResponseText);
CreateInvoiceThroughTestPage(PurchaseInvoice, Vendor, InvoiceDate, InvoiceDate);
// [THEN] the invoice should exist in the table and match the invoice created from the page
ApiPurchaseHeader.Reset();
ApiPurchaseHeader.SetRange("Buy-from Vendor No.", VendorNo);
ApiPurchaseHeader.SetRange("Document Type", ApiPurchaseHeader."Document Type"::Invoice);
ApiPurchaseHeader.SetRange("Document Date", InvoiceDate);
ApiPurchaseHeader.SetRange("Posting Date", InvoiceDate);
Assert.IsTrue(ApiPurchaseHeader.FindFirst(), 'The unposted invoice should exist');
// Ignore these fields when comparing Page and API Invoices
LibraryUtility.AddTempField(TempIgnoredFieldsForComparison, ApiPurchaseHeader.FieldNo("No."), Database::"Purchase Header");
LibraryUtility.AddTempField(
TempIgnoredFieldsForComparison, ApiPurchaseHeader.FieldNo("Posting Description"), Database::"Purchase Header");
LibraryUtility.AddTempField(TempIgnoredFieldsForComparison, ApiPurchaseHeader.FieldNo(Id), Database::"Purchase Header");
// Special ignore case for ES
RecordField.SetRange(TableNo, Database::"Purchase Header");
RecordField.SetRange(FieldName, 'Due Date Modified');
if RecordField.FindFirst() then
LibraryUtility.AddTempField(TempIgnoredFieldsForComparison, RecordField."No.", Database::"Purchase Header");
PagePurchaseHeader.Get(PagePurchaseHeader."Document Type"::Invoice, PurchaseInvoice."No.".Value());
ApiRecordRef.GetTable(ApiPurchaseHeader);
PageRecordRef.GetTable(PagePurchaseHeader);
Assert.RecordsAreEqualExceptCertainFields(ApiRecordRef, PageRecordRef, TempIgnoredFieldsForComparison,
'Page and API Invoice do not match');
end;
[Test]
procedure TestPostInvoiceFailsWithoutVendorNoOrId()
var
Currency: Record "Currency";
ResponseText: Text;
TargetURL: Text;
InvoiceJSON: Text;
CurrencyCode: Code[10];
begin
// [SCENARIO 184721] Create an invoice wihtout Vendor throws an error
Initialize();
// [GIVEN] a purchase invoice JSON with currency only
Currency.SetFilter(Code, '<>%1', '');
Currency.FindFirst();
CurrencyCode := Currency.Code;
InvoiceJSON := LibraryGraphMgt.AddPropertytoJSON('', 'currencyCode', CurrencyCode);
Commit();
// [WHEN] we POST the JSON to the web service
// [THEN] an error is received
TargetURL := LibraryGraphMgt.CreateTargetURL('', Page::"APIV2 - Purchase Invoices", InvoiceServiceNameTxt);
asserterror LibraryGraphMgt.PostToWebService(TargetURL, InvoiceJSON, ResponseText);
end;
[Test]
[Scope('OnPrem')]
procedure TestActionPostPurchaseInvoice()
var
PurchaseHeader: Record "Purchase Header";
PurchInvHeader: Record "Purch. Inv. Header";
TempPurchInvEntityAggregate: Record "Purch. Inv. Entity Aggregate" temporary;
DraftInvoiceRecordRef: RecordRef;
DocumentId: Guid;
DocumentNo: Code[20];
ResponseText: Text;
TargetURL: Text;
begin
// [SCENARIO] User can post a purchase invoice through the API.
Initialize();
// [GIVEN] Draft purchase invoice exists
LibraryPurchase.CreatePurchaseInvoice(PurchaseHeader);
DraftInvoiceRecordRef.GetTable(PurchaseHeader);
DocumentId := PurchaseHeader.SystemId;
DocumentNo := PurchaseHeader."No.";
Commit();
VerifyDraftPurchaseInvoice(DocumentId, TempPurchInvEntityAggregate.Status::Draft.AsInteger());
// [WHEN] A POST request is made to the API.
TargetURL :=
LibraryGraphMgt.CreateTargetURLWithSubpage(DocumentId, Page::"APIV2 - Purchase Invoices", InvoiceServiceNameTxt, ActionPostTxt);
LibraryGraphMgt.PostToWebServiceAndCheckResponseCode(TargetURL, '', ResponseText, 204);
// [THEN] Response should be empty
Assert.AreEqual('', ResponseText, NotEmptyResponseErr);
// [THEN] Invoice is posted
FindPostedPurchaseInvoiceByPreAssignedNo(DocumentNo, PurchInvHeader);
VerifyPostedPurchaseInvoice(PurchInvHeader."Draft Invoice SystemId", TempPurchInvEntityAggregate.Status::Open.AsInteger());
end;
local procedure CreatePurchaseInvoices(var InvoiceID1: Text; var InvoiceID2: Text)
var
PurchaseHeader: Record "Purchase Header";
begin
LibraryPurchase.SetAllowDocumentDeletionBeforeDate(WorkDate() + 1);
LibraryPurchase.CreatePurchaseInvoice(PurchaseHeader);
ModifyPurchaseHeaderPostingDate(PurchaseHeader, WorkDate());
InvoiceID1 := LibraryPurchase.PostPurchaseDocument(PurchaseHeader, false, true);
LibraryPurchase.CreatePurchaseInvoice(PurchaseHeader);
ModifyPurchaseHeaderPostingDate(PurchaseHeader, WorkDate());
InvoiceID2 := PurchaseHeader."No.";
Commit();
end;
local procedure CreateInvoiceJSONWithAddress(BuyFromVendor: Record "Vendor"; ShipToVendor: Record "Vendor"; InvoiceDate: Date): Text
var
InvoiceJSON: Text;
begin
InvoiceJSON := CreateInvoiceJSON('vendorId', BuyFromVendor.SystemId, 'invoiceDate', InvoiceDate);
LibraryGraphDocumentTools.GetVendorAddressJSON(InvoiceJSON, BuyFromVendor, 'buyFrom', false, false);
if BuyFromVendor."No." <> ShipToVendor."No." then
LibraryGraphDocumentTools.GetVendorAddressJSON(InvoiceJSON, ShipToVendor, 'shipTo', false, false);
exit(InvoiceJSON);
end;
local procedure CreateInvoiceThroughTestPage(var PurchaseInvoice: TestPage "Purchase Invoice"; Vendor: Record "Vendor"; DocumentDate: Date; PostingDate: Date)
begin
PurchaseInvoice.OpenNew();
PurchaseInvoice."Buy-from Vendor No.".SetValue(Vendor."No.");
PurchaseInvoice."Document Date".SetValue(DocumentDate);
PurchaseInvoice."Posting Date".SetValue(PostingDate);
end;
local procedure ModifyPurchaseHeaderPostingDate(var PurchaseHeader: Record "Purchase Header"; PostingDate: Date)
begin
PurchaseHeader.Validate("Posting Date", PostingDate);
PurchaseHeader.Modify(true);
end;
local procedure GetCurrencyCode(): Code[10]
var
Currency: Record "Currency";
begin
Currency.SetFilter(Code, '<>%1', '');
if Currency.FindFirst() then
exit(Currency.Code);
end;
local procedure CreateInvoiceJSON(PropertyName1: Text; PropertyValue1Variant: Variant; PropertyName2: Text; PropertyValue2Variant: Variant): Text
var
InvoiceJSON: Text;
begin
InvoiceJSON := LibraryGraphMgt.AddPropertytoJSON('', PropertyName1, PropertyValue1Variant);
InvoiceJSON := LibraryGraphMgt.AddPropertytoJSON(InvoiceJSON, PropertyName2, PropertyValue2Variant);
exit(InvoiceJSON);
end;
local procedure FindPostedPurchaseInvoiceByPreAssignedNo(PreAssignedNo: Code[20]; var PurchInvHeader: Record "Purch. Inv. Header")
begin
PurchInvHeader.SetCurrentKey("Pre-Assigned No.");
PurchInvHeader.SetRange("Pre-Assigned No.", PreAssignedNo);
Assert.IsTrue(PurchInvHeader.FindFirst(), CannotFindPostedInvoiceErr);
end;
local procedure VerifyDraftPurchaseInvoice(DocumentId: Guid; Status: Integer)
var
PurchaseHeader: Record "Purchase Header";
PurchInvEntityAggregate: Record "Purch. Inv. Entity Aggregate";
begin
Assert.IsTrue(PurchaseHeader.GetBySystemId(DocumentId), CannotFindDraftInvoiceErr);
PurchInvEntityAggregate.SetRange(Id, DocumentId);
Assert.IsTrue(PurchInvEntityAggregate.FindFirst(), CannotFindDraftInvoiceErr);
Assert.AreEqual(Status, PurchInvEntityAggregate.Status, InvoiceStatusErr);
end;
local procedure VerifyPostedPurchaseInvoice(DocumentId: Guid; Status: Integer)
var
PurchInvHeader: Record "Purch. Inv. Header";
PurchInvEntityAggregate: Record "Purch. Inv. Entity Aggregate";
begin
PurchInvHeader.SetRange("Draft Invoice SystemId", DocumentId);
Assert.IsFalse(PurchInvHeader.IsEmpty(), CannotFindPostedInvoiceErr);
PurchInvEntityAggregate.SetRange(Id, DocumentId);
Assert.IsTrue(PurchInvEntityAggregate.FindFirst(), CannotFindPostedInvoiceErr);
Assert.AreEqual(Status, PurchInvEntityAggregate.Status, InvoiceStatusErr);
end;
}
| 44.333966 | 162 | 0.695771 |
ed731511b0e34dca2b599164c086dde3f1390027 | 1,902 | pm | Perl | ftemplate/lib/Chj/FP2/IOStream.pm | pflanze/ml2json | 13067c5bc4d4c9e53e1a4b1da20158803ebb8149 | [
"MIT"
]
| 1 | 2021-06-28T04:11:24.000Z | 2021-06-28T04:11:24.000Z | ftemplate/lib/Chj/FP2/IOStream.pm | pflanze/ml2json | 13067c5bc4d4c9e53e1a4b1da20158803ebb8149 | [
"MIT"
]
| null | null | null | ftemplate/lib/Chj/FP2/IOStream.pm | pflanze/ml2json | 13067c5bc4d4c9e53e1a4b1da20158803ebb8149 | [
"MIT"
]
| null | null | null | #
# Copyright 2014 by Christian Jaeger, ch at christianjaeger ch
# Published under the same terms as perl itself
#
=head1 NAME
Chj::FP2::IOStream
=head1 SYNOPSIS
use Chj::FP2::IOStream ':all'; # xopendir_stream, xopendir_pathstream
use Chj::FP2::Stream; # stream_map
use Chj::FP2::Lazy; # Force
use Chj::FP2::List ':all'; # car
my $paths= stream_map sub { my ($item)= @_; "$base/$item" }, xopendir_stream $base;
# which is the same as: my $paths= xopendir_pathstream $base;
my $firstpath= car Force $paths;
# etc.
=head1 DESCRIPTION
Lazy IO (well, input), by reading items lazily as stream items.
(It's arguable whether that is a good idea; Haskell uses different
approaches nowadays. But it's still a nice way to do things if you're
careful.)
=cut
package Chj::FP2::IOStream;
@ISA="Exporter"; require Exporter;
@EXPORT=qw();
@EXPORT_OK=qw(xopendir_stream
xopendir_pathstream);
%EXPORT_TAGS=(all=>[@EXPORT,@EXPORT_OK]);
use strict; use warnings FATAL => 'uninitialized';
use Chj::FP2::Lazy;
use Chj::xopendir;
use Chj::FP2::List ':all';
use Chj::FP2::Stream 'stream_map', 'array2stream';
use Chj::FP::Array_sort;
sub _xopendir_stream ($) {
my ($path)=@_;
my $d= xopendir $path;
my $next; $next= sub {
Delay {
if (defined (my $item= $d->xnread)) {
cons $item, &$next
} else {
$d->xclose;
undef $next;
undef
}
}
};
&$next
}
sub _xopendir_stream_sorted ($$) {
my ($path,$cmp)=@_;
my $d= xopendir $path;
my $items= array_sort [$d->xnread], $cmp;
$d->xclose;
array2stream $items
}
sub xopendir_stream ($;$) {
my ($path,$maybe_cmp)=@_;
if ($maybe_cmp) {
_xopendir_stream_sorted $path,$maybe_cmp;
} else {
_xopendir_stream $path;
}
}
sub xopendir_pathstream ($;$) {
my ($base,$maybe_cmp)=@_;
stream_map sub {
my ($item)= @_;
"$base/$item"
}, xopendir_stream $base,$maybe_cmp
}
1
| 20.901099 | 84 | 0.650894 |
ed1c53000ff0c016b198557c9851c95d6d0e2848 | 43,511 | pm | Perl | src/tools/msvc/Solution.pm | mjolka/postgres | 38bfae36526636ef55daf7cf2a3282403587cb5b | [
"PostgreSQL"
]
| null | null | null | src/tools/msvc/Solution.pm | mjolka/postgres | 38bfae36526636ef55daf7cf2a3282403587cb5b | [
"PostgreSQL"
]
| 6 | 2016-05-09T03:01:09.000Z | 2020-08-05T16:16:14.000Z | src/tools/msvc/Solution.pm | mjolka/postgres | 38bfae36526636ef55daf7cf2a3282403587cb5b | [
"PostgreSQL"
]
| null | null | null |
# Copyright (c) 2021-2022, PostgreSQL Global Development Group
package Solution;
#
# Package that encapsulates a Visual C++ solution file generation
#
# src/tools/msvc/Solution.pm
#
use Carp;
use strict;
use warnings;
use VSObjectFactory;
no warnings qw(redefine); ## no critic
sub _new
{
my $classname = shift;
my $options = shift;
my $self = {
projects => {},
options => $options,
VisualStudioVersion => undef,
MinimumVisualStudioVersion => undef,
vcver => undef,
platform => undef,
};
bless($self, $classname);
$self->DeterminePlatform();
if ($options->{xslt} && !$options->{xml})
{
die "XSLT requires XML\n";
}
$options->{blocksize} = 8
unless $options->{blocksize}; # undef or 0 means default
die "Bad blocksize $options->{blocksize}"
unless grep { $_ == $options->{blocksize} } (1, 2, 4, 8, 16, 32);
$options->{segsize} = 1
unless $options->{segsize}; # undef or 0 means default
# only allow segsize 1 for now, as we can't do large files yet in windows
die "Bad segsize $options->{segsize}"
unless $options->{segsize} == 1;
$options->{wal_blocksize} = 8
unless $options->{wal_blocksize}; # undef or 0 means default
die "Bad wal_blocksize $options->{wal_blocksize}"
unless grep { $_ == $options->{wal_blocksize} }
(1, 2, 4, 8, 16, 32, 64);
return $self;
}
sub GetAdditionalHeaders
{
return '';
}
sub DeterminePlatform
{
my $self = shift;
if ($^O eq "MSWin32")
{
# Examine CL help output to determine if we are in 32 or 64-bit mode.
my $output = `cl /help 2>&1`;
$? >> 8 == 0 or die "cl command not found";
$self->{platform} =
($output =~ /^\/favor:<.+AMD64/m) ? 'x64' : 'Win32';
}
else
{
$self->{platform} = 'FAKE';
}
print "Detected hardware platform: $self->{platform}\n";
return;
}
# Return 1 if $oldfile is newer than $newfile, or if $newfile doesn't exist.
# Special case - if config.pl has changed, always return 1
sub IsNewer
{
my ($newfile, $oldfile) = @_;
-e $oldfile or warn "source file \"$oldfile\" does not exist";
if ( $oldfile ne 'src/tools/msvc/config.pl'
&& $oldfile ne 'src/tools/msvc/config_default.pl')
{
return 1
if (-f 'src/tools/msvc/config.pl')
&& IsNewer($newfile, 'src/tools/msvc/config.pl');
return 1
if (-f 'src/tools/msvc/config_default.pl')
&& IsNewer($newfile, 'src/tools/msvc/config_default.pl');
}
return 1 if (!(-e $newfile));
my @nstat = stat($newfile);
my @ostat = stat($oldfile);
return 1 if ($nstat[9] < $ostat[9]);
return 0;
}
# Copy a file, *not* preserving date. Only works for text files.
sub copyFile
{
my ($src, $dest) = @_;
open(my $i, '<', $src) || croak "Could not open $src";
open(my $o, '>', $dest) || croak "Could not open $dest";
while (<$i>)
{
print $o $_;
}
close($i);
close($o);
return;
}
# Fetch version of OpenSSL based on a parsing of the command shipped with
# the installer this build is linking to. This returns as result an array
# made of the three first digits of the OpenSSL version, which is enough
# to decide which options to apply depending on the version of OpenSSL
# linking with.
sub GetOpenSSLVersion
{
my $self = shift;
# Attempt to get OpenSSL version and location. This assumes that
# openssl.exe is in the specified directory.
# Quote the .exe name in case it has spaces
my $opensslcmd =
qq("$self->{options}->{openssl}\\bin\\openssl.exe" version 2>&1);
my $sslout = `$opensslcmd`;
$? >> 8 == 0
or croak
"Unable to determine OpenSSL version: The openssl.exe command wasn't found.";
if ($sslout =~ /(\d+)\.(\d+)\.(\d+)(\D)/m)
{
return ($1, $2, $3);
}
croak
"Unable to determine OpenSSL version: The openssl.exe version could not be determined.";
}
sub GenerateFiles
{
my $self = shift;
my $bits = $self->{platform} eq 'Win32' ? 32 : 64;
my $ac_init_found = 0;
my $package_name;
my $package_version;
my $package_bugreport;
my $package_url;
my ($majorver, $minorver);
my $ac_define_openssl_api_compat_found = 0;
my $openssl_api_compat;
# Parse configure.ac to get version numbers
open(my $c, '<', "configure.ac")
|| confess("Could not open configure.ac for reading\n");
while (<$c>)
{
if (/^AC_INIT\(\[([^\]]+)\], \[([^\]]+)\], \[([^\]]+)\], \[([^\]]*)\], \[([^\]]+)\]/
)
{
$ac_init_found = 1;
$package_name = $1;
$package_version = $2;
$package_bugreport = $3;
#$package_tarname = $4;
$package_url = $5;
if ($package_version !~ /^(\d+)(?:\.(\d+))?/)
{
confess "Bad format of version: $package_version\n";
}
$majorver = sprintf("%d", $1);
$minorver = sprintf("%d", $2 ? $2 : 0);
}
elsif (/\bAC_DEFINE\(OPENSSL_API_COMPAT, \[([0-9xL]+)\]/)
{
$ac_define_openssl_api_compat_found = 1;
$openssl_api_compat = $1;
}
}
close($c);
confess "Unable to parse configure.ac for all variables!"
unless $ac_init_found && $ac_define_openssl_api_compat_found;
if (IsNewer("src/include/pg_config_os.h", "src/include/port/win32.h"))
{
print "Copying pg_config_os.h...\n";
copyFile("src/include/port/win32.h", "src/include/pg_config_os.h");
}
print "Generating configuration headers...\n";
my $extraver = $self->{options}->{extraver};
$extraver = '' unless defined $extraver;
my $port = $self->{options}->{"--with-pgport"} || 5432;
# Every symbol in pg_config.h.in must be accounted for here. Set
# to undef if the symbol should not be defined.
my %define = (
ALIGNOF_DOUBLE => 8,
ALIGNOF_INT => 4,
ALIGNOF_LONG => 4,
ALIGNOF_LONG_LONG_INT => 8,
ALIGNOF_PG_INT128_TYPE => undef,
ALIGNOF_SHORT => 2,
AC_APPLE_UNIVERSAL_BUILD => undef,
BLCKSZ => 1024 * $self->{options}->{blocksize},
CONFIGURE_ARGS => '"' . $self->GetFakeConfigure() . '"',
DEF_PGPORT => $port,
DEF_PGPORT_STR => qq{"$port"},
ENABLE_GSS => $self->{options}->{gss} ? 1 : undef,
ENABLE_NLS => $self->{options}->{nls} ? 1 : undef,
ENABLE_THREAD_SAFETY => 1,
GETTIMEOFDAY_1ARG => undef,
HAVE_APPEND_HISTORY => undef,
HAVE_ASN1_STRING_GET0_DATA => undef,
HAVE_ATOMICS => 1,
HAVE_ATOMIC_H => undef,
HAVE_BACKTRACE_SYMBOLS => undef,
HAVE_BIO_GET_DATA => undef,
HAVE_BIO_METH_NEW => undef,
HAVE_CLOCK_GETTIME => undef,
HAVE_COMPUTED_GOTO => undef,
HAVE_COPYFILE => undef,
HAVE_COPYFILE_H => undef,
HAVE_CRTDEFS_H => undef,
HAVE_CRYPTO_LOCK => undef,
HAVE_DECL_FDATASYNC => 0,
HAVE_DECL_F_FULLFSYNC => 0,
HAVE_DECL_LLVMCREATEGDBREGISTRATIONLISTENER => 0,
HAVE_DECL_LLVMCREATEPERFJITEVENTLISTENER => 0,
HAVE_DECL_LLVMGETHOSTCPUNAME => 0,
HAVE_DECL_LLVMGETHOSTCPUFEATURES => 0,
HAVE_DECL_LLVMORCGETSYMBOLADDRESSIN => 0,
HAVE_DECL_POSIX_FADVISE => 0,
HAVE_DECL_PREADV => 0,
HAVE_DECL_PWRITEV => 0,
HAVE_DECL_RTLD_GLOBAL => 0,
HAVE_DECL_RTLD_NOW => 0,
HAVE_DECL_SIGWAIT => 0,
HAVE_DECL_STRLCAT => 0,
HAVE_DECL_STRLCPY => 0,
HAVE_DECL_STRNLEN => 1,
HAVE_DECL_STRTOLL => 1,
HAVE_DECL_STRTOULL => 1,
HAVE_DLOPEN => undef,
HAVE_EDITLINE_HISTORY_H => undef,
HAVE_EDITLINE_READLINE_H => undef,
HAVE_EXECINFO_H => undef,
HAVE_EXPLICIT_BZERO => undef,
HAVE_FDATASYNC => undef,
HAVE_FLS => undef,
HAVE_FSEEKO => 1,
HAVE_FUNCNAME__FUNC => undef,
HAVE_FUNCNAME__FUNCTION => 1,
HAVE_GCC__ATOMIC_INT32_CAS => undef,
HAVE_GCC__ATOMIC_INT64_CAS => undef,
HAVE_GCC__SYNC_CHAR_TAS => undef,
HAVE_GCC__SYNC_INT32_CAS => undef,
HAVE_GCC__SYNC_INT32_TAS => undef,
HAVE_GCC__SYNC_INT64_CAS => undef,
HAVE_GETADDRINFO => undef,
HAVE_GETHOSTBYNAME_R => undef,
HAVE_GETIFADDRS => undef,
HAVE_GETOPT => undef,
HAVE_GETOPT_H => undef,
HAVE_GETOPT_LONG => undef,
HAVE_GETPEEREID => undef,
HAVE_GETPEERUCRED => undef,
HAVE_GETPWUID_R => undef,
HAVE_GETRLIMIT => undef,
HAVE_GETRUSAGE => undef,
HAVE_GETTIMEOFDAY => undef,
HAVE_GSSAPI_GSSAPI_H => undef,
HAVE_GSSAPI_H => undef,
HAVE_HMAC_CTX_FREE => undef,
HAVE_HMAC_CTX_NEW => undef,
HAVE_HISTORY_H => undef,
HAVE_HISTORY_TRUNCATE_FILE => undef,
HAVE_IFADDRS_H => undef,
HAVE_INET_ATON => undef,
HAVE_INT_TIMEZONE => 1,
HAVE_INT64 => undef,
HAVE_INT8 => undef,
HAVE_INTTYPES_H => undef,
HAVE_INT_OPTERR => undef,
HAVE_INT_OPTRESET => undef,
HAVE_IPV6 => 1,
HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P => undef,
HAVE_KQUEUE => undef,
HAVE_LANGINFO_H => undef,
HAVE_LDAP_H => undef,
HAVE_LDAP_INITIALIZE => undef,
HAVE_LIBCRYPTO => undef,
HAVE_LIBLDAP => undef,
HAVE_LIBLZ4 => undef,
HAVE_LIBM => undef,
HAVE_LIBPAM => undef,
HAVE_LIBREADLINE => undef,
HAVE_LIBSELINUX => undef,
HAVE_LIBSSL => undef,
HAVE_LIBWLDAP32 => undef,
HAVE_LIBXML2 => undef,
HAVE_LIBXSLT => undef,
HAVE_LIBZ => $self->{options}->{zlib} ? 1 : undef,
HAVE_LINK => undef,
HAVE_LOCALE_T => 1,
HAVE_LONG_INT_64 => undef,
HAVE_LONG_LONG_INT_64 => 1,
HAVE_LZ4_H => undef,
HAVE_MBARRIER_H => undef,
HAVE_MBSTOWCS_L => 1,
HAVE_MEMORY_H => 1,
HAVE_MEMSET_S => undef,
HAVE_MINIDUMP_TYPE => 1,
HAVE_MKDTEMP => undef,
HAVE_NETINET_TCP_H => undef,
HAVE_NET_IF_H => undef,
HAVE_OPENSSL_INIT_SSL => undef,
HAVE_OSSP_UUID_H => undef,
HAVE_PAM_PAM_APPL_H => undef,
HAVE_POLL => undef,
HAVE_POLL_H => undef,
HAVE_POSIX_DECL_SIGWAIT => undef,
HAVE_POSIX_FADVISE => undef,
HAVE_POSIX_FALLOCATE => undef,
HAVE_PPC_LWARX_MUTEX_HINT => undef,
HAVE_PPOLL => undef,
HAVE_PREAD => undef,
HAVE_PSTAT => undef,
HAVE_PS_STRINGS => undef,
HAVE_PTHREAD => undef,
HAVE_PTHREAD_BARRIER_WAIT => undef,
HAVE_PTHREAD_IS_THREADED_NP => undef,
HAVE_PTHREAD_PRIO_INHERIT => undef,
HAVE_PWRITE => undef,
HAVE_READLINE_H => undef,
HAVE_READLINE_HISTORY_H => undef,
HAVE_READLINE_READLINE_H => undef,
HAVE_READLINK => undef,
HAVE_READV => undef,
HAVE_RL_COMPLETION_MATCHES => undef,
HAVE_RL_COMPLETION_SUPPRESS_QUOTE => undef,
HAVE_RL_FILENAME_COMPLETION_FUNCTION => undef,
HAVE_RL_FILENAME_QUOTE_CHARACTERS => undef,
HAVE_RL_FILENAME_QUOTING_FUNCTION => undef,
HAVE_RL_RESET_SCREEN_SIZE => undef,
HAVE_RL_VARIABLE_BIND => undef,
HAVE_SECURITY_PAM_APPL_H => undef,
HAVE_SETENV => undef,
HAVE_SETPROCTITLE => undef,
HAVE_SETPROCTITLE_FAST => undef,
HAVE_SETSID => undef,
HAVE_SHM_OPEN => undef,
HAVE_SOCKLEN_T => 1,
HAVE_SPINLOCKS => 1,
HAVE_STDBOOL_H => 1,
HAVE_STDINT_H => 1,
HAVE_STDLIB_H => 1,
HAVE_STRCHRNUL => undef,
HAVE_STRERROR_R => undef,
HAVE_STRINGS_H => undef,
HAVE_STRING_H => 1,
HAVE_STRLCAT => undef,
HAVE_STRLCPY => undef,
HAVE_STRNLEN => 1,
HAVE_STRSIGNAL => undef,
HAVE_STRTOF => 1,
HAVE_STRTOLL => 1,
HAVE_STRTOQ => undef,
HAVE_STRTOULL => 1,
HAVE_STRTOUQ => undef,
HAVE_STRUCT_ADDRINFO => 1,
HAVE_STRUCT_CMSGCRED => undef,
HAVE_STRUCT_OPTION => undef,
HAVE_STRUCT_SOCKADDR_SA_LEN => undef,
HAVE_STRUCT_SOCKADDR_STORAGE => 1,
HAVE_STRUCT_SOCKADDR_STORAGE_SS_FAMILY => 1,
HAVE_STRUCT_SOCKADDR_STORAGE_SS_LEN => undef,
HAVE_STRUCT_SOCKADDR_STORAGE___SS_FAMILY => undef,
HAVE_STRUCT_SOCKADDR_STORAGE___SS_LEN => undef,
HAVE_STRUCT_SOCKADDR_UN => undef,
HAVE_STRUCT_TM_TM_ZONE => undef,
HAVE_SYNC_FILE_RANGE => undef,
HAVE_SYMLINK => 1,
HAVE_SYNCFS => undef,
HAVE_SYSLOG => undef,
HAVE_SYS_EPOLL_H => undef,
HAVE_SYS_EVENT_H => undef,
HAVE_SYS_IPC_H => undef,
HAVE_SYS_PERSONALITY_H => undef,
HAVE_SYS_PRCTL_H => undef,
HAVE_SYS_PROCCTL_H => undef,
HAVE_SYS_PSTAT_H => undef,
HAVE_SYS_RESOURCE_H => undef,
HAVE_SYS_SELECT_H => undef,
HAVE_SYS_SEM_H => undef,
HAVE_SYS_SHM_H => undef,
HAVE_SYS_SOCKIO_H => undef,
HAVE_SYS_STAT_H => 1,
HAVE_SYS_TAS_H => undef,
HAVE_SYS_TYPES_H => 1,
HAVE_SYS_UCRED_H => undef,
HAVE_SYS_UIO_H => undef,
HAVE_SYS_UN_H => undef,
HAVE_TERMIOS_H => undef,
HAVE_TYPEOF => undef,
HAVE_UCRED_H => undef,
HAVE_UINT64 => undef,
HAVE_UINT8 => undef,
HAVE_UNION_SEMUN => undef,
HAVE_UNISTD_H => 1,
HAVE_UNSETENV => undef,
HAVE_USELOCALE => undef,
HAVE_UUID_BSD => undef,
HAVE_UUID_E2FS => undef,
HAVE_UUID_OSSP => undef,
HAVE_UUID_H => undef,
HAVE_UUID_UUID_H => undef,
HAVE_WINLDAP_H => undef,
HAVE_WCSTOMBS_L => 1,
HAVE_WCTYPE_H => 1,
HAVE_WRITEV => undef,
HAVE_X509_GET_SIGNATURE_NID => 1,
HAVE_X86_64_POPCNTQ => undef,
HAVE__BOOL => undef,
HAVE__BUILTIN_BSWAP16 => undef,
HAVE__BUILTIN_BSWAP32 => undef,
HAVE__BUILTIN_BSWAP64 => undef,
HAVE__BUILTIN_CLZ => undef,
HAVE__BUILTIN_CONSTANT_P => undef,
HAVE__BUILTIN_CTZ => undef,
HAVE__BUILTIN_OP_OVERFLOW => undef,
HAVE__BUILTIN_POPCOUNT => undef,
HAVE__BUILTIN_TYPES_COMPATIBLE_P => undef,
HAVE__BUILTIN_UNREACHABLE => undef,
HAVE__CONFIGTHREADLOCALE => 1,
HAVE__CPUID => 1,
HAVE__GET_CPUID => undef,
HAVE__STATIC_ASSERT => undef,
HAVE___STRTOLL => undef,
HAVE___STRTOULL => undef,
INT64_MODIFIER => qq{"ll"},
LOCALE_T_IN_XLOCALE => undef,
MAXIMUM_ALIGNOF => 8,
MEMSET_LOOP_LIMIT => 1024,
OPENSSL_API_COMPAT => $openssl_api_compat,
PACKAGE_BUGREPORT => qq{"$package_bugreport"},
PACKAGE_NAME => qq{"$package_name"},
PACKAGE_STRING => qq{"$package_name $package_version"},
PACKAGE_TARNAME => lc qq{"$package_name"},
PACKAGE_URL => qq{"$package_url"},
PACKAGE_VERSION => qq{"$package_version"},
PG_INT128_TYPE => undef,
PG_INT64_TYPE => 'long long int',
PG_KRB_SRVNAM => qq{"postgres"},
PG_MAJORVERSION => qq{"$majorver"},
PG_MAJORVERSION_NUM => $majorver,
PG_MINORVERSION_NUM => $minorver,
PG_PRINTF_ATTRIBUTE => undef,
PG_USE_STDBOOL => 1,
PG_VERSION => qq{"$package_version$extraver"},
PG_VERSION_NUM => sprintf("%d%04d", $majorver, $minorver),
PG_VERSION_STR =>
qq{"PostgreSQL $package_version$extraver, compiled by Visual C++ build " CppAsString2(_MSC_VER) ", $bits-bit"},
PROFILE_PID_DIR => undef,
PTHREAD_CREATE_JOINABLE => undef,
RELSEG_SIZE => (1024 / $self->{options}->{blocksize}) *
$self->{options}->{segsize} * 1024,
SIZEOF_BOOL => 1,
SIZEOF_LONG => 4,
SIZEOF_OFF_T => undef,
SIZEOF_SIZE_T => $bits / 8,
SIZEOF_VOID_P => $bits / 8,
STDC_HEADERS => 1,
STRERROR_R_INT => undef,
USE_ARMV8_CRC32C => undef,
USE_ARMV8_CRC32C_WITH_RUNTIME_CHECK => undef,
USE_ASSERT_CHECKING => $self->{options}->{asserts} ? 1 : undef,
USE_BONJOUR => undef,
USE_BSD_AUTH => undef,
USE_ICU => $self->{options}->{icu} ? 1 : undef,
USE_LIBXML => undef,
USE_LIBXSLT => undef,
USE_LZ4 => undef,
USE_LDAP => $self->{options}->{ldap} ? 1 : undef,
USE_LLVM => undef,
USE_NAMED_POSIX_SEMAPHORES => undef,
USE_OPENSSL => undef,
USE_PAM => undef,
USE_SLICING_BY_8_CRC32C => undef,
USE_SSE42_CRC32C => undef,
USE_SSE42_CRC32C_WITH_RUNTIME_CHECK => 1,
USE_SYSTEMD => undef,
USE_SYSV_SEMAPHORES => undef,
USE_SYSV_SHARED_MEMORY => undef,
USE_UNNAMED_POSIX_SEMAPHORES => undef,
USE_WIN32_SEMAPHORES => 1,
USE_WIN32_SHARED_MEMORY => 1,
WCSTOMBS_L_IN_XLOCALE => undef,
WORDS_BIGENDIAN => undef,
XLOG_BLCKSZ => 1024 * $self->{options}->{wal_blocksize},
_FILE_OFFSET_BITS => undef,
_LARGEFILE_SOURCE => undef,
_LARGE_FILES => undef,
inline => '__inline',
pg_restrict => '__restrict',
# not defined, because it'd conflict with __declspec(restrict)
restrict => undef,
typeof => undef,);
if ($self->{options}->{uuid})
{
$define{HAVE_UUID_OSSP} = 1;
$define{HAVE_UUID_H} = 1;
}
if ($self->{options}->{xml})
{
$define{HAVE_LIBXML2} = 1;
$define{USE_LIBXML} = 1;
}
if ($self->{options}->{xslt})
{
$define{HAVE_LIBXSLT} = 1;
$define{USE_LIBXSLT} = 1;
}
if ($self->{options}->{lz4})
{
$define{HAVE_LIBLZ4} = 1;
$define{HAVE_LZ4_H} = 1;
$define{USE_LZ4} = 1;
}
if ($self->{options}->{openssl})
{
$define{USE_OPENSSL} = 1;
my ($digit1, $digit2, $digit3) = $self->GetOpenSSLVersion();
# More symbols are needed with OpenSSL 1.1.0 and above.
if ( ($digit1 >= '3' && $digit2 >= '0' && $digit3 >= '0')
|| ($digit1 >= '1' && $digit2 >= '1' && $digit3 >= '0'))
{
$define{HAVE_ASN1_STRING_GET0_DATA} = 1;
$define{HAVE_BIO_GET_DATA} = 1;
$define{HAVE_BIO_METH_NEW} = 1;
$define{HAVE_HMAC_CTX_FREE} = 1;
$define{HAVE_HMAC_CTX_NEW} = 1;
$define{HAVE_OPENSSL_INIT_SSL} = 1;
}
}
$self->GenerateConfigHeader('src/include/pg_config.h', \%define, 1);
$self->GenerateConfigHeader('src/include/pg_config_ext.h', \%define, 0);
$self->GenerateConfigHeader('src/interfaces/ecpg/include/ecpg_config.h',
\%define, 0);
$self->GenerateDefFile(
"src/interfaces/libpq/libpqdll.def",
"src/interfaces/libpq/exports.txt",
"LIBPQ");
$self->GenerateDefFile(
"src/interfaces/ecpg/ecpglib/ecpglib.def",
"src/interfaces/ecpg/ecpglib/exports.txt",
"LIBECPG");
$self->GenerateDefFile(
"src/interfaces/ecpg/compatlib/compatlib.def",
"src/interfaces/ecpg/compatlib/exports.txt",
"LIBECPG_COMPAT");
$self->GenerateDefFile(
"src/interfaces/ecpg/pgtypeslib/pgtypeslib.def",
"src/interfaces/ecpg/pgtypeslib/exports.txt",
"LIBPGTYPES");
chdir('src/backend/utils');
my $pg_proc_dat = '../../../src/include/catalog/pg_proc.dat';
if ( IsNewer('fmgr-stamp', 'Gen_fmgrtab.pl')
|| IsNewer('fmgr-stamp', '../catalog/Catalog.pm')
|| IsNewer('fmgr-stamp', $pg_proc_dat)
|| IsNewer('fmgr-stamp', '../../../src/include/access/transam.h'))
{
system(
"perl -I ../catalog Gen_fmgrtab.pl --include-path ../../../src/include/ $pg_proc_dat"
);
open(my $f, '>', 'fmgr-stamp')
|| confess "Could not touch fmgr-stamp";
close($f);
}
chdir('../../..');
if (IsNewer(
'src/include/utils/fmgroids.h',
'src/backend/utils/fmgroids.h'))
{
copyFile('src/backend/utils/fmgroids.h',
'src/include/utils/fmgroids.h');
}
if (IsNewer(
'src/include/utils/fmgrprotos.h',
'src/backend/utils/fmgrprotos.h'))
{
copyFile(
'src/backend/utils/fmgrprotos.h',
'src/include/utils/fmgrprotos.h');
}
if (IsNewer(
'src/include/storage/lwlocknames.h',
'src/backend/storage/lmgr/lwlocknames.txt'))
{
print "Generating lwlocknames.c and lwlocknames.h...\n";
chdir('src/backend/storage/lmgr');
system('perl generate-lwlocknames.pl lwlocknames.txt');
chdir('../../../..');
}
if (IsNewer(
'src/include/storage/lwlocknames.h',
'src/backend/storage/lmgr/lwlocknames.h'))
{
copyFile(
'src/backend/storage/lmgr/lwlocknames.h',
'src/include/storage/lwlocknames.h');
}
if (IsNewer('src/include/utils/probes.h', 'src/backend/utils/probes.d'))
{
print "Generating probes.h...\n";
system(
'perl src/backend/utils/Gen_dummy_probes.pl src/backend/utils/probes.d > src/include/utils/probes.h'
);
}
if ($self->{options}->{python}
&& IsNewer(
'src/pl/plpython/spiexceptions.h',
'src/backend/utils/errcodes.txt'))
{
print "Generating spiexceptions.h...\n";
system(
'perl src/pl/plpython/generate-spiexceptions.pl src/backend/utils/errcodes.txt > src/pl/plpython/spiexceptions.h'
);
}
if (IsNewer(
'src/include/utils/errcodes.h',
'src/backend/utils/errcodes.txt'))
{
print "Generating errcodes.h...\n";
system(
'perl src/backend/utils/generate-errcodes.pl src/backend/utils/errcodes.txt > src/backend/utils/errcodes.h'
);
copyFile('src/backend/utils/errcodes.h',
'src/include/utils/errcodes.h');
}
if (IsNewer(
'src/pl/plpgsql/src/plerrcodes.h',
'src/backend/utils/errcodes.txt'))
{
print "Generating plerrcodes.h...\n";
system(
'perl src/pl/plpgsql/src/generate-plerrcodes.pl src/backend/utils/errcodes.txt > src/pl/plpgsql/src/plerrcodes.h'
);
}
if ($self->{options}->{tcl}
&& IsNewer(
'src/pl/tcl/pltclerrcodes.h', 'src/backend/utils/errcodes.txt'))
{
print "Generating pltclerrcodes.h...\n";
system(
'perl src/pl/tcl/generate-pltclerrcodes.pl src/backend/utils/errcodes.txt > src/pl/tcl/pltclerrcodes.h'
);
}
if (IsNewer('src/bin/psql/sql_help.h', 'src/bin/psql/create_help.pl'))
{
print "Generating sql_help.h...\n";
chdir('src/bin/psql');
system("perl create_help.pl ../../../doc/src/sgml/ref sql_help");
chdir('../../..');
}
if (IsNewer('src/common/kwlist_d.h', 'src/include/parser/kwlist.h'))
{
print "Generating kwlist_d.h...\n";
system(
'perl -I src/tools src/tools/gen_keywordlist.pl --extern -o src/common src/include/parser/kwlist.h'
);
}
if (IsNewer(
'src/pl/plpgsql/src/pl_reserved_kwlist_d.h',
'src/pl/plpgsql/src/pl_reserved_kwlist.h')
|| IsNewer(
'src/pl/plpgsql/src/pl_unreserved_kwlist_d.h',
'src/pl/plpgsql/src/pl_unreserved_kwlist.h'))
{
print
"Generating pl_reserved_kwlist_d.h and pl_unreserved_kwlist_d.h...\n";
chdir('src/pl/plpgsql/src');
system(
'perl -I ../../../tools ../../../tools/gen_keywordlist.pl --varname ReservedPLKeywords pl_reserved_kwlist.h'
);
system(
'perl -I ../../../tools ../../../tools/gen_keywordlist.pl --varname UnreservedPLKeywords pl_unreserved_kwlist.h'
);
chdir('../../../..');
}
if (IsNewer(
'src/interfaces/ecpg/preproc/c_kwlist_d.h',
'src/interfaces/ecpg/preproc/c_kwlist.h')
|| IsNewer(
'src/interfaces/ecpg/preproc/ecpg_kwlist_d.h',
'src/interfaces/ecpg/preproc/ecpg_kwlist.h'))
{
print "Generating c_kwlist_d.h and ecpg_kwlist_d.h...\n";
chdir('src/interfaces/ecpg/preproc');
system(
'perl -I ../../../tools ../../../tools/gen_keywordlist.pl --varname ScanCKeywords --no-case-fold c_kwlist.h'
);
system(
'perl -I ../../../tools ../../../tools/gen_keywordlist.pl --varname ScanECPGKeywords ecpg_kwlist.h'
);
chdir('../../../..');
}
if (IsNewer(
'src/interfaces/ecpg/preproc/preproc.y',
'src/backend/parser/gram.y'))
{
print "Generating preproc.y...\n";
chdir('src/interfaces/ecpg/preproc');
system('perl parse.pl < ../../../backend/parser/gram.y > preproc.y');
chdir('../../../..');
}
unless (-f "src/port/pg_config_paths.h")
{
print "Generating pg_config_paths.h...\n";
open(my $o, '>', 'src/port/pg_config_paths.h')
|| confess "Could not open pg_config_paths.h";
print $o <<EOF;
#define PGBINDIR "/bin"
#define PGSHAREDIR "/share"
#define SYSCONFDIR "/etc"
#define INCLUDEDIR "/include"
#define PKGINCLUDEDIR "/include"
#define INCLUDEDIRSERVER "/include/server"
#define LIBDIR "/lib"
#define PKGLIBDIR "/lib"
#define LOCALEDIR "/share/locale"
#define DOCDIR "/doc"
#define HTMLDIR "/doc"
#define MANDIR "/man"
EOF
close($o);
}
my $mf = Project::read_file('src/backend/catalog/Makefile');
$mf =~ s{\\\r?\n}{}g;
$mf =~ /^CATALOG_HEADERS\s*:?=(.*)$/gm
|| croak "Could not find CATALOG_HEADERS in Makefile\n";
my @bki_srcs = split /\s+/, $1;
$mf =~ /^POSTGRES_BKI_DATA\s*:?=[^,]+,(.*)\)$/gm
|| croak "Could not find POSTGRES_BKI_DATA in Makefile\n";
my @bki_data = split /\s+/, $1;
my $need_genbki = 0;
foreach my $bki (@bki_srcs, @bki_data)
{
next if $bki eq "";
if (IsNewer(
'src/backend/catalog/bki-stamp',
"src/include/catalog/$bki"))
{
$need_genbki = 1;
last;
}
}
$need_genbki = 1
if IsNewer('src/backend/catalog/bki-stamp',
'src/backend/catalog/genbki.pl');
$need_genbki = 1
if IsNewer('src/backend/catalog/bki-stamp',
'src/backend/catalog/Catalog.pm');
if ($need_genbki)
{
chdir('src/backend/catalog');
my $bki_srcs = join(' ../../../src/include/catalog/', @bki_srcs);
system(
"perl genbki.pl --include-path ../../../src/include/ --set-version=$majorver $bki_srcs"
);
open(my $f, '>', 'bki-stamp')
|| confess "Could not touch bki-stamp";
close($f);
chdir('../../..');
}
if (IsNewer(
'src/include/catalog/header-stamp',
'src/backend/catalog/bki-stamp'))
{
# Copy generated headers to include directory.
opendir(my $dh, 'src/backend/catalog/')
|| die "Can't opendir src/backend/catalog/ $!";
my @def_headers = grep { /pg_\w+_d\.h$/ } readdir($dh);
closedir $dh;
foreach my $def_header (@def_headers)
{
copyFile(
"src/backend/catalog/$def_header",
"src/include/catalog/$def_header");
}
copyFile(
'src/backend/catalog/schemapg.h',
'src/include/catalog/schemapg.h');
copyFile(
'src/backend/catalog/system_fk_info.h',
'src/include/catalog/system_fk_info.h');
open(my $chs, '>', 'src/include/catalog/header-stamp')
|| confess "Could not touch header-stamp";
close($chs);
}
open(my $o, '>', "doc/src/sgml/version.sgml")
|| croak "Could not write to version.sgml\n";
print $o <<EOF;
<!ENTITY version "$package_version">
<!ENTITY majorversion "$majorver">
EOF
close($o);
return;
}
# Read lines from input file and substitute symbols using the same
# logic that config.status uses. There should be one call of this for
# each AC_CONFIG_HEADERS call in configure.ac.
#
# If the "required" argument is true, we also keep track which of our
# defines have been found and error out if any are left unused at the
# end. That way we avoid accumulating defines in this file that are
# no longer used by configure.
sub GenerateConfigHeader
{
my ($self, $config_header, $defines, $required) = @_;
my $config_header_in = $config_header . '.in';
if ( IsNewer($config_header, $config_header_in)
|| IsNewer($config_header, __FILE__))
{
my %defines_copy = %$defines;
open(my $i, '<', $config_header_in)
|| confess "Could not open $config_header_in\n";
open(my $o, '>', $config_header)
|| confess "Could not write to $config_header\n";
print $o
"/* $config_header. Generated from $config_header_in by src/tools/msvc/Solution.pm. */\n";
while (<$i>)
{
if (m/^#(\s*)undef\s+(\w+)/)
{
my $ws = $1;
my $macro = $2;
if (exists $defines->{$macro})
{
if (defined $defines->{$macro})
{
print $o "#${ws}define $macro ", $defines->{$macro},
"\n";
}
else
{
print $o "/* #${ws}undef $macro */\n";
}
delete $defines_copy{$macro};
}
else
{
croak
"undefined symbol: $macro at $config_header line $.";
}
}
else
{
print $o $_;
}
}
close($o);
close($i);
if ($required && scalar(keys %defines_copy) > 0)
{
croak "unused defines: " . join(' ', keys %defines_copy);
}
}
}
sub GenerateDefFile
{
my ($self, $deffile, $txtfile, $libname) = @_;
if (IsNewer($deffile, $txtfile))
{
print "Generating $deffile...\n";
open(my $if, '<', $txtfile) || confess("Could not open $txtfile\n");
open(my $of, '>', $deffile) || confess("Could not open $deffile\n");
print $of "LIBRARY $libname\nEXPORTS\n";
while (<$if>)
{
next if (/^#/);
next if (/^\s*$/);
my ($f, $o) = split;
print $of " $f @ $o\n";
}
close($of);
close($if);
}
return;
}
sub AddProject
{
my ($self, $name, $type, $folder, $initialdir) = @_;
my $proj =
VSObjectFactory::CreateProject($self->{vcver}, $name, $type, $self);
push @{ $self->{projects}->{$folder} }, $proj;
$proj->AddDir($initialdir) if ($initialdir);
if ($self->{options}->{zlib})
{
$proj->AddIncludeDir($self->{options}->{zlib} . '\include');
$proj->AddLibrary($self->{options}->{zlib} . '\lib\zdll.lib');
}
if ($self->{options}->{openssl})
{
$proj->AddIncludeDir($self->{options}->{openssl} . '\include');
my ($digit1, $digit2, $digit3) = $self->GetOpenSSLVersion();
# Starting at version 1.1.0 the OpenSSL installers have
# changed their library names from:
# - libeay to libcrypto
# - ssleay to libssl
if ( ($digit1 >= '3' && $digit2 >= '0' && $digit3 >= '0')
|| ($digit1 >= '1' && $digit2 >= '1' && $digit3 >= '0'))
{
my $dbgsuffix;
my $libsslpath;
my $libcryptopath;
# The format name of the libraries is slightly
# different between the Win32 and Win64 platform, so
# adapt.
if (-e "$self->{options}->{openssl}/lib/VC/sslcrypto32MD.lib")
{
# Win32 here, with a debugging library set.
$dbgsuffix = 1;
$libsslpath = '\lib\VC\libssl32.lib';
$libcryptopath = '\lib\VC\libcrypto32.lib';
}
elsif (-e "$self->{options}->{openssl}/lib/VC/sslcrypto64MD.lib")
{
# Win64 here, with a debugging library set.
$dbgsuffix = 1;
$libsslpath = '\lib\VC\libssl64.lib';
$libcryptopath = '\lib\VC\libcrypto64.lib';
}
else
{
# On both Win32 and Win64 the same library
# names are used without a debugging context.
$dbgsuffix = 0;
$libsslpath = '\lib\libssl.lib';
$libcryptopath = '\lib\libcrypto.lib';
}
$proj->AddLibrary($self->{options}->{openssl} . $libsslpath,
$dbgsuffix);
$proj->AddLibrary($self->{options}->{openssl} . $libcryptopath,
$dbgsuffix);
}
else
{
# Choose which set of libraries to use depending on if
# debugging libraries are in place in the installer.
if (-e "$self->{options}->{openssl}/lib/VC/ssleay32MD.lib")
{
$proj->AddLibrary(
$self->{options}->{openssl} . '\lib\VC\ssleay32.lib', 1);
$proj->AddLibrary(
$self->{options}->{openssl} . '\lib\VC\libeay32.lib', 1);
}
else
{
# We don't expect the config-specific library
# to be here, so don't ask for it in last
# parameter.
$proj->AddLibrary(
$self->{options}->{openssl} . '\lib\ssleay32.lib', 0);
$proj->AddLibrary(
$self->{options}->{openssl} . '\lib\libeay32.lib', 0);
}
}
}
if ($self->{options}->{nls})
{
$proj->AddIncludeDir($self->{options}->{nls} . '\include');
$proj->AddLibrary($self->{options}->{nls} . '\lib\libintl.lib');
}
if ($self->{options}->{gss})
{
$proj->AddIncludeDir($self->{options}->{gss} . '\include');
$proj->AddIncludeDir($self->{options}->{gss} . '\include\krb5');
if ($self->{platform} eq 'Win32')
{
$proj->AddLibrary(
$self->{options}->{gss} . '\lib\i386\krb5_32.lib');
$proj->AddLibrary(
$self->{options}->{gss} . '\lib\i386\comerr32.lib');
$proj->AddLibrary(
$self->{options}->{gss} . '\lib\i386\gssapi32.lib');
}
else
{
$proj->AddLibrary(
$self->{options}->{gss} . '\lib\amd64\krb5_64.lib');
$proj->AddLibrary(
$self->{options}->{gss} . '\lib\amd64\comerr64.lib');
$proj->AddLibrary(
$self->{options}->{gss} . '\lib\amd64\gssapi64.lib');
}
}
if ($self->{options}->{iconv})
{
$proj->AddIncludeDir($self->{options}->{iconv} . '\include');
$proj->AddLibrary($self->{options}->{iconv} . '\lib\iconv.lib');
}
if ($self->{options}->{icu})
{
$proj->AddIncludeDir($self->{options}->{icu} . '\include');
if ($self->{platform} eq 'Win32')
{
$proj->AddLibrary($self->{options}->{icu} . '\lib\icuin.lib');
$proj->AddLibrary($self->{options}->{icu} . '\lib\icuuc.lib');
$proj->AddLibrary($self->{options}->{icu} . '\lib\icudt.lib');
}
else
{
$proj->AddLibrary($self->{options}->{icu} . '\lib64\icuin.lib');
$proj->AddLibrary($self->{options}->{icu} . '\lib64\icuuc.lib');
$proj->AddLibrary($self->{options}->{icu} . '\lib64\icudt.lib');
}
}
if ($self->{options}->{xml})
{
$proj->AddIncludeDir($self->{options}->{xml} . '\include');
$proj->AddIncludeDir($self->{options}->{xml} . '\include\libxml2');
$proj->AddLibrary($self->{options}->{xml} . '\lib\libxml2.lib');
}
if ($self->{options}->{xslt})
{
$proj->AddIncludeDir($self->{options}->{xslt} . '\include');
$proj->AddLibrary($self->{options}->{xslt} . '\lib\libxslt.lib');
}
if ($self->{options}->{lz4})
{
$proj->AddIncludeDir($self->{options}->{lz4} . '\include');
$proj->AddLibrary($self->{options}->{lz4} . '\lib\liblz4.lib');
}
if ($self->{options}->{uuid})
{
$proj->AddIncludeDir($self->{options}->{uuid} . '\include');
$proj->AddLibrary($self->{options}->{uuid} . '\lib\uuid.lib');
}
return $proj;
}
sub Save
{
my ($self) = @_;
my %flduid;
$self->GenerateFiles();
foreach my $fld (keys %{ $self->{projects} })
{
foreach my $proj (@{ $self->{projects}->{$fld} })
{
$proj->Save();
}
}
open(my $sln, '>', "pgsql.sln") || croak "Could not write to pgsql.sln\n";
print $sln <<EOF;
Microsoft Visual Studio Solution File, Format Version $self->{solutionFileVersion}
# $self->{visualStudioName}
EOF
print $sln $self->GetAdditionalHeaders();
foreach my $fld (keys %{ $self->{projects} })
{
foreach my $proj (@{ $self->{projects}->{$fld} })
{
print $sln <<EOF;
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "$proj->{name}", "$proj->{name}$proj->{filenameExtension}", "$proj->{guid}"
EndProject
EOF
}
if ($fld ne "")
{
$flduid{$fld} = $^O eq "MSWin32" ? Win32::GuidGen() : 'FAKE';
print $sln <<EOF;
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "$fld", "$fld", "$flduid{$fld}"
EndProject
EOF
}
}
print $sln <<EOF;
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|$self->{platform}= Debug|$self->{platform}
Release|$self->{platform} = Release|$self->{platform}
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
EOF
foreach my $fld (keys %{ $self->{projects} })
{
foreach my $proj (@{ $self->{projects}->{$fld} })
{
print $sln <<EOF;
$proj->{guid}.Debug|$self->{platform}.ActiveCfg = Debug|$self->{platform}
$proj->{guid}.Debug|$self->{platform}.Build.0 = Debug|$self->{platform}
$proj->{guid}.Release|$self->{platform}.ActiveCfg = Release|$self->{platform}
$proj->{guid}.Release|$self->{platform}.Build.0 = Release|$self->{platform}
EOF
}
}
print $sln <<EOF;
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
EOF
foreach my $fld (keys %{ $self->{projects} })
{
next if ($fld eq "");
foreach my $proj (@{ $self->{projects}->{$fld} })
{
print $sln "\t\t$proj->{guid} = $flduid{$fld}\n";
}
}
print $sln <<EOF;
EndGlobalSection
EndGlobal
EOF
close($sln);
return;
}
sub GetFakeConfigure
{
my $self = shift;
my $cfg = '--enable-thread-safety';
$cfg .= ' --enable-cassert' if ($self->{options}->{asserts});
$cfg .= ' --enable-nls' if ($self->{options}->{nls});
$cfg .= ' --enable-tap-tests' if ($self->{options}->{tap_tests});
$cfg .= ' --with-ldap' if ($self->{options}->{ldap});
$cfg .= ' --without-zlib' unless ($self->{options}->{zlib});
$cfg .= ' --with-extra-version' if ($self->{options}->{extraver});
$cfg .= ' --with-ssl=openssl' if ($self->{options}->{openssl});
$cfg .= ' --with-uuid' if ($self->{options}->{uuid});
$cfg .= ' --with-libxml' if ($self->{options}->{xml});
$cfg .= ' --with-libxslt' if ($self->{options}->{xslt});
$cfg .= ' --with-lz4' if ($self->{options}->{lz4});
$cfg .= ' --with-gssapi' if ($self->{options}->{gss});
$cfg .= ' --with-icu' if ($self->{options}->{icu});
$cfg .= ' --with-tcl' if ($self->{options}->{tcl});
$cfg .= ' --with-perl' if ($self->{options}->{perl});
$cfg .= ' --with-python' if ($self->{options}->{python});
my $port = $self->{options}->{'--with-pgport'};
$cfg .= " --with-pgport=$port" if defined($port);
return $cfg;
}
package VS2013Solution;
#
# Package that encapsulates a Visual Studio 2013 solution file
#
use Carp;
use strict;
use warnings;
use base qw(Solution);
no warnings qw(redefine); ## no critic
sub new
{
my $classname = shift;
my $self = $classname->SUPER::_new(@_);
bless($self, $classname);
$self->{solutionFileVersion} = '12.00';
$self->{vcver} = '12.00';
$self->{visualStudioName} = 'Visual Studio 2013';
$self->{VisualStudioVersion} = '12.0.21005.1';
$self->{MinimumVisualStudioVersion} = '10.0.40219.1';
return $self;
}
package VS2015Solution;
#
# Package that encapsulates a Visual Studio 2015 solution file
#
use Carp;
use strict;
use warnings;
use base qw(Solution);
no warnings qw(redefine); ## no critic
sub new
{
my $classname = shift;
my $self = $classname->SUPER::_new(@_);
bless($self, $classname);
$self->{solutionFileVersion} = '12.00';
$self->{vcver} = '14.00';
$self->{visualStudioName} = 'Visual Studio 2015';
$self->{VisualStudioVersion} = '14.0.24730.2';
$self->{MinimumVisualStudioVersion} = '10.0.40219.1';
return $self;
}
package VS2017Solution;
#
# Package that encapsulates a Visual Studio 2017 solution file
#
use Carp;
use strict;
use warnings;
use base qw(Solution);
no warnings qw(redefine); ## no critic
sub new
{
my $classname = shift;
my $self = $classname->SUPER::_new(@_);
bless($self, $classname);
$self->{solutionFileVersion} = '12.00';
$self->{vcver} = '15.00';
$self->{visualStudioName} = 'Visual Studio 2017';
$self->{VisualStudioVersion} = '15.0.26730.3';
$self->{MinimumVisualStudioVersion} = '10.0.40219.1';
return $self;
}
package VS2019Solution;
#
# Package that encapsulates a Visual Studio 2019 solution file
#
use Carp;
use strict;
use warnings;
use base qw(Solution);
no warnings qw(redefine); ## no critic
sub new
{
my $classname = shift;
my $self = $classname->SUPER::_new(@_);
bless($self, $classname);
$self->{solutionFileVersion} = '12.00';
$self->{vcver} = '16.00';
$self->{visualStudioName} = 'Visual Studio 2019';
$self->{VisualStudioVersion} = '16.0.28729.10';
$self->{MinimumVisualStudioVersion} = '10.0.40219.1';
return $self;
}
package VS2022Solution;
#
# Package that encapsulates a Visual Studio 2022 solution file
#
use Carp;
use strict;
use warnings;
use base qw(Solution);
no warnings qw(redefine); ## no critic
sub new
{
my $classname = shift;
my $self = $classname->SUPER::_new(@_);
bless($self, $classname);
$self->{solutionFileVersion} = '12.00';
$self->{vcver} = '17.00';
$self->{visualStudioName} = 'Visual Studio 2022';
$self->{VisualStudioVersion} = '17.0.31903.59';
$self->{MinimumVisualStudioVersion} = '10.0.40219.1';
return $self;
}
sub GetAdditionalHeaders
{
my ($self, $f) = @_;
return qq|VisualStudioVersion = $self->{VisualStudioVersion}
MinimumVisualStudioVersion = $self->{MinimumVisualStudioVersion}
|;
}
1;
| 32.087758 | 127 | 0.561214 |
ed5de8381f8edde9398acc2b69825f6268fe644d | 24,263 | pm | Perl | Load/plugin/perl/LoadEstsFromFastaFile.pm | EuPathDB/ApiCommonData | 28e1ec4ff9d6ce3aeb96f4ecffa8b410fcd83f82 | [
"Apache-2.0"
]
| 1 | 2022-03-15T08:20:22.000Z | 2022-03-15T08:20:22.000Z | Load/plugin/perl/LoadEstsFromFastaFile.pm | EuPathDB/ApiCommonData | 28e1ec4ff9d6ce3aeb96f4ecffa8b410fcd83f82 | [
"Apache-2.0"
]
| 3 | 2019-12-17T17:33:51.000Z | 2022-03-23T13:32:13.000Z | Load/plugin/perl/LoadEstsFromFastaFile.pm | EuPathDB/ApiCommonData | 28e1ec4ff9d6ce3aeb96f4ecffa8b410fcd83f82 | [
"Apache-2.0"
]
| 1 | 2022-03-15T08:20:23.000Z | 2022-03-15T08:20:23.000Z | package ApiCommonData::Load::Plugin::LoadEstsFromFastaFile;
#vvvvvvvvvvvvvvvvvvvvvvvvv GUS4_STATUS vvvvvvvvvvvvvvvvvvvvvvvvv
# GUS4_STATUS | SRes.OntologyTerm | auto | absent
# GUS4_STATUS | SRes.SequenceOntology | auto | fixed
# GUS4_STATUS | Study.OntologyEntry | auto | absent
# GUS4_STATUS | SRes.GOTerm | auto | absent
# GUS4_STATUS | Dots.RNAFeatureExon | auto | absent
# GUS4_STATUS | RAD.SageTag | auto | absent
# GUS4_STATUS | RAD.Analysis | auto | absent
# GUS4_STATUS | ApiDB.Profile | auto | absent
# GUS4_STATUS | Study.Study | auto | absent
# GUS4_STATUS | Dots.Isolate | auto | absent
# GUS4_STATUS | DeprecatedTables | auto | absent # leaving Sres.Contact
# GUS4_STATUS | Pathway | auto | absent
# GUS4_STATUS | DoTS.SequenceVariation | auto | absent
# GUS4_STATUS | RNASeq Junctions | auto | absent
# GUS4_STATUS | Simple Rename | auto | absent
# GUS4_STATUS | ApiDB Tuning Gene | auto | absent
# GUS4_STATUS | Rethink | auto | absent
# GUS4_STATUS | dots.gene | manual | reviewed
#^^^^^^^^^^^^^^^^^^^^^^^^^ End GUS4_STATUS ^^^^^^^^^^^^^^^^^^^^
@ISA = qw(GUS::PluginMgr::Plugin);
use strict;
use GUS::PluginMgr::Plugin;
use File::Basename;
use GUS::Model::DoTS::ExternalNASequence;
use GUS::Model::DoTS::EST;
use GUS::Model::DoTS::Library;
use GUS::Model::SRes::Contact;
use GUS::Model::SRes::Taxon;
use GUS::Model::SRes::OntologyTerm;
use Bio::PrimarySeq;
use Bio::Tools::SeqStats;
use GUS::Model::SRes::TaxonName;
my $argsDeclaration =[];
my $purposeBrief = 'Insert EST sequences from a FASTA file.';
my $purpose = <<PLUGIN_PURPOSE;
Insert or update sequences from a FASTA. A set of regular expressions provided on the command line extract from the definition lines of the input sequences various information to stuff into the database.
PLUGIN_PURPOSE
my $tablesAffected =
[ ['DoTS::EST','One row per EST'],['DoTS::ExternalNASequence', 'one row per EST'],['SRES::Contact','one row per library'],['SRES::Library','one row per library']
];
my $tablesDependedOn =
[['SRES::Taxon','taxon_id required for library and externalnasequence tables'],['SRes::OntologyTerm', 'OntologyTerm term for EST']
];
my $howToRestart = "Get the total number of ESTs processed from log file, second column, that number plus one for startAt argument";
my $failureCases = <<PLUGIN_FAILURE_CASES;
PLUGIN_FAILURE_CASES
my $notes = <<PLUGIN_NOTES;
PLUGIN_NOTES
my $documentation = { purpose=>$purpose,
purposeBrief=>$purposeBrief,
tablesAffected=>$tablesAffected,
tablesDependedOn=>$tablesDependedOn,
howToRestart=>$howToRestart,
failureCases=>$failureCases,
notes=>$notes
};
my $argsDeclaration =
[
stringArg({ name => 'sourceIdRegex',
descr => 'regex for identifier from defline, for xest.accession and externalnasequence.source_id',
reqd => 1,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'taxonNameRegex',
descr => 'regex for taxonname.name from defline to get taxon_id',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'checkSQL',
descr => 'sql statement used to query for na_sequence_id of an EST that is already in the database',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'qualityStartRegex',
descr => 'regex to find start of quality sequence, otherwise set to 1',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'possiblyReversedRegex',
descr => 'regex for whether sequence is reversed',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'poorQualityRegex',
descr => 'regex for poor quality trace from defline, otherwise set to 0',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
booleanArg({name => 'possiblyReversed',
descr => 'if likely reversed, field will be set to 1 in EST table for all sequences in file, alternative to regex',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
integerArg({name => 'startAt',
descr => 'number of entry to begin loading, for restart',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'putativeFullLengthRegex',
descr => 'regex for whether sequence is supposed to be full length',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
enumArg({ name => 'putativeFullLength',
descr => 'indicates all sequences are putatively full length, alternative to regex. true or false',
constraintFunc => undef,
reqd => 1,
isList => 0,
enum => "true,false",
}),
fileArg({ name => 'fastaFile',
descr => 'The name of the input fasta file',
reqd => 1,
constraintFunc => undef,
mustExist => 1,
format =>"",
isList => 0 }),
stringArg({ name => 'externalDatabaseName',
descr => 'The name of the ExternalDatabase from which the input sequences have come',
reqd => 1,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'externalDatabaseVersion',
descr => 'The version of the ExternalDatabaseRelease from whith the input sequences have come',
reqd => 1,
constraintFunc => undef,
isList => 0 }),
stringArg({name => 'SOTermName',
descr => 'The extDbRlsName of the Sequence Ontology to use',
reqd => 1,
constraintFunc => undef,
isList => 0
}),
stringArg({name => 'SOExtDbRlsSpec',
descr => 'The extDbRlsName of the Sequence Ontology to use',
reqd => 1,
constraintFunc => undef,
isList => 0
}),
integerArg({ name => 'ncbiTaxId',
descr => 'The taxon id from NCBI for these sequences.',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'regexSecondaryId',
descr => 'The regular expression to pick the secondary id of the sequence from the defline',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'regexDesc',
descr => 'The regular expression to pick the description of the sequence from the defline',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'regexSeqVersion',
descr => 'The regular expression to pick the sequence version e.g. >\S+\.(\d+) for >NM_47654.1',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'contactName',
descr => 'Name of contact, used to create row in contact table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'contactAddress1',
descr => 'First line of address for contact, used to create row in contact table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'contactAddress2',
descr => 'Second line of address for contact, used to create row in contact table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'contactEmail',
descr => 'Email for contact, used to create row in contact table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'contactPhone',
descr => 'Phone for contact, used to create row in contact table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'contactFax',
descr => 'Fax for contact, used to create row in contact table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'libraryStrain',
descr => 'organism strain from which library mRNA was derived',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'libraryVector',
descr => 'vector used for the creation of the library clones',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'libraryStage',
descr => 'stage used for the creation of the library clones',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'libraryDesc',
descr => 'Description of the sequence from the defline for comment_string field of library table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'libraryStageRegex',
descr => 'regex for the stage used for the creation of the library clones',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'libraryDescRegex',
descr => 'regex for the description of the sequence from the defline for comment_string field of library table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'libraryName',
descr => 'name of library for dbest_name field of library table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
stringArg({ name => 'libraryNameRegex',
descr => 'regex for name of library for dbest_name field of library table',
reqd => 0,
constraintFunc => undef,
isList => 0 }),
booleanArg({ name => 'isImage',
descr => 'true if sequences are from IMAGE consortium, otherwise will be set to 0',
reqd => 0,
constraintFunc => undef,
default => 0 }),
stringArg({ name => 'taxonName',
descr => 'Description of the sequence from the defline for comment_string field of library table',
reqd => 0,
constraintFunc => undef,
isList => 0 })
];
sub new() {
my ($class) = @_;
my $self = {};
bless($self,$class);
$self->initialize({requiredDbVersion => 4.0,
cvsRevision => '$Revision$', # cvs fills this in!
name => ref($self),
argsDeclaration => $argsDeclaration,
documentation => $documentation
});
return $self;
}
$| = 1;
sub run {
my $self = shift;
$self->{totalCount} = 0;
$self->{skippedCount} = 0;
$self->{external_database_release_id} =
$self->getExtDbRlsId($self->getArg('externalDatabaseName'),
$self->getArg('externalDatabaseVersion'));
$self->log("loading sequences with external database release id $self->{external_database_release_id}");
$self->fetchSequenceOntologyId();
if ($self->getArg('ncbiTaxId')) {
$self->fetchTaxonId();
}
if ($self->getArg('taxonName')) {
$self->fetchTaxonIdFromName();
}
$self->makeLibraryRow() if $self->getArg('libraryName');
$self->makeContactRow();
$self->processFile();
my $finalCount = $self->{totalCount} + $self->{skippedCount};
my $res = "Run finished: $finalCount ESTs entered for library_id " . $self->{libraryId};
return $res;
}
sub processFile{
my ($self) = @_;
my $file = $self->getArg('fastaFile');
$self->logVerbose("loading sequences from $file\n");
if ($file =~ /gz$/) {
open(F, "gunzip -c $file |") || die "Can't open $file for reading";
} else {
open(F,"$file") || die "Can't open $file for reading";
}
my $source_id;
my $description;
my $secondary_id;
my $seq;
my $seq_version;
my $possiblyReversed;
my $putativeFullLength;
my $poorQuality;
my $seqLength;
my $qualityStart;
my $libName;
my $libStage;
my $libDesc;
my $sql = $self->getArg('checkSQL') ? $self->getArg('checkSQL') : "select na_sequence_id from dots.externalnasequence where source_id = ? and external_database_release_id = $self->{external_database_release_id}";
my $checkStmt = $self->getAlgInvocation()->getQueryHandle()->prepare($sql);
while (<F>) {
if (/^\>/) { ##have a defline....need to process!
$self->undefPointerCache();
if ($self->getArg('startAt')
&& $self->{skippedCount} < $self->getArg('startAt')) {
$self->{skippedCount}++;
$seq = "";
next;
}
if ($seq) {
$self->process($source_id,$secondary_id,$seq_version,$seq,$possiblyReversed,$putativeFullLength,$poorQuality,$seqLength,$qualityStart, $description);
}
##now get the ids etc for this defline...
my $sourceIdRegex = $self->getArg('sourceIdRegex');
if (/$sourceIdRegex/ && $1) {
$source_id = $1;
} else {
my $forgotParens = ($sourceIdRegex !~ /\(/)? "(Forgot parens?)" : "";
$self->userError("Unable to parse source_id from $_ using regex '$sourceIdRegex' $forgotParens");
}
my $id = $self->checkIfInDb($checkStmt,$source_id);
if ($id) {
$source_id = "";
$self->log ("$source_id already in database with external_database_release_id $self->{external_database_release_id}");
next;
}
$secondary_id = ""; $seq_version = 1; $possiblyReversed = 0; $putativeFullLength = 0; $poorQuality = 0; $qualityStart = 1; $description = "";##in case can't parse out of this defline...
my $regexSecondaryId = $self->getArg('regexSecondaryId') if $self->getArg('regexSecondaryId');
if ($regexSecondaryId && /$regexSecondaryId/) {
$secondary_id = $1;
}
my $taxonNameRegex = $self->getArg('taxonNameRegex') if $self->getArg('taxonNameRegex');
if ($taxonNameRegex && /$taxonNameRegex/) {
$self->fetchTaxonIdFromName($1);
}
my $regexDescrip = $self->getArg('regexDesc') if $self->getArg('regexDesc');
if ($regexDescrip && /$regexDescrip/) {
$description = $1;
}
my $regexSeqVersion = $self->getArg('regexSeqVersion') if $self->getArg('regexSeqVersion');
if ($regexSeqVersion && /$regexSeqVersion/) {
$seq_version = $1;
}
my $regexQualityStart = $self->getArg('qualityStartRegex') if $self->getArg('qualityStartRegex');
if ($regexQualityStart && /$regexQualityStart/) {
$qualityStart = $1;
}
my $possiblyReversedRegex = $self->getArg('possiblyReversedRegex') if $self->getArg('possiblyReversedRegex');
if ($possiblyReversedRegex && /$possiblyReversedRegex/) {
$possiblyReversed = 1;
}
elsif ($self->getArg('possiblyReversed')) {
$possiblyReversed = 1;
}
else {
$possiblyReversed = 0;
}
my $libraryDescRegex = $self->getArg('libraryDescRegex') if $self->getArg('libraryDescRegex');
if ($libraryDescRegex && /$libraryDescRegex/) {
$libDesc = $1;
}
my $libraryStageRegex = $self->getArg('libraryStageRegex') if $self->getArg('libraryStageRegex');
if ($libraryStageRegex && /$libraryStageRegex/) {
$libStage = $1;
}
my $libraryNameRegex = $self->getArg('libraryNameRegex') if $self->getArg('libraryNameRegex');
if ($libraryNameRegex && /$libraryNameRegex/) {
$libName = $1;
}
if ($libName){
$self->makeLibraryRow($libName,$libStage,$libDesc);
}
my $putativeFullLengthRegex = $self->getArg('putativeFullLengthRegex') if $self->getArg('putativeFullLengthRegex');
if ($putativeFullLengthRegex && /$putativeFullLengthRegex/) {
$putativeFullLength = 1;
}
elsif ($self->getArg('putativeFullLength') eq 'true') {
$putativeFullLength = 1;
}
else {
$putativeFullLength = 0;
}
my $poorQualityRegex = $self->getArg('poorQualityRegex') if $self->getArg('poorQualityRegex');
if ($poorQualityRegex && /$poorQualityRegex/) {
$poorQuality = 1;
}
##reset the sequence..
$seq = "";
}
else {
$seq .= $_;
$seq =~ s/\s//g;
$seqLength = length($seq);
}
}
$self->process($source_id,$secondary_id,$seq_version,$seq,$possiblyReversed,$putativeFullLength,$poorQuality,$seqLength,$qualityStart, $description) if ($source_id && $seq);
}
##SUBS
sub makeLibraryRow {
my($self,$lib,$libStage,$libDesc) = @_;
my $name = $lib ? $lib : $self->getArg('libraryName');
my $taxonId = $self->{taxonId};
my $isImage = $self->getArg('isImage') ? 1 : 0 ;
my $library = GUS::Model::DoTS::Library->new({'dbest_name'=>$name,'taxon_id'=>$taxonId,'is_image'=>$isImage});
unless ($library->retrieveFromDB()) {
if ($self->getArg('libraryStrain')) {
my $strain = $self->getArg('libraryStrain') ;
$library->setStrain($strain);
}
if ($self->getArg('libraryVector')) {
my $vector = $self->getArg('libraryVector');
$library->setVector($vector);
}
my $stage = $libStage ? $libStage : $self->getArg('libraryStage');
if ($stage) {
$library->setStage($stage);
}
my $description = $libDesc ? $libDesc : $self->getArg('libraryDesc');
if ($description) {
$library->setCommentString($description);
}
$library->submit();
}
$self->{libraryId} = $library->getId();
$library->undefPointerCache();
}
sub makeContactRow {
my ($self) = @_;
my $name = $self->getArg('contactName');
my $address1 = $self->getArg('contactAddress1');
my $address2 = $self->getArg('contactAddress2');
my $contact = GUS::Model::SRes::Contact->new({'name'=>$name,'address1'=>$address1, 'address2'=>$address2 });
unless($contact->retrieveFromDB()) {
if ($self->getArg('contactEmail')) {
my $email = $self->getArg('contactEmail');
$contact->setEmail($email);
}
if ($self->getArg('contactPhone')) {
my $phone = $self->getArg('contactPhone');
$contact->setPhone($phone);
}
if ($self->getArg('contactFax')) {
my $fax = $self->getArg('contactFax');
$contact->setFax($fax);
}
$contact->submit();
}
$self->{contactId} = $contact->getId();
$contact->undefPointerCache();
}
sub checkIfInDb {
my ($self,$checkStmt,$source_id) = @_;
$checkStmt->execute($source_id);
if (my($id) = $checkStmt->fetchrow_array()) {
$checkStmt->finish();
return $id;
}
return 0;
}
sub process {
my($self,$source_id,$secondary_id,$seq_version,$seq,$possiblyReversed,$putativeFullLength,$poorQuality,$seqLength,$qualityStart, $description) = @_;
my $nas = $self->createNewExternalSequence($source_id,$seq,$description,$seq_version,$secondary_id);
my $est = $self->createNewEST($source_id,$possiblyReversed,$putativeFullLength,$poorQuality,$seqLength,$qualityStart);
$nas->addChild($est);
$nas->submit();
$nas->undefPointerCache();
$self->{totalCount}++;
my $total = $self->{totalCount} + $self->{skippedCount};
$self->log("processed sourceId: $source_id and total processed: $total");
}
sub createNewExternalSequence {
my($self, $source_id,$seq,$description,$seq_version,$secondary_id) = @_;
my $aas = GUS::Model::DoTS::ExternalNASequence->
new({'external_database_release_id' => $self->{external_database_release_id},
'source_id' => $source_id,
'taxon_id' => $self->{taxonId},
'sequence_version' => $seq_version,
'sequence_ontology_id' => $self->{sequenceOntologyId} });
if ($secondary_id && $aas->isValidAttribute('secondary_identifier')) {
$aas->setSecondaryIdentifier($secondary_id);
}
if ($description) {
$description =~ s/\"//g; $description =~ s/\'//g;
$aas->set('description',substr($description,0,255));
}
$aas->setSequence($seq);
$self->getMonomerCount($aas,$seq);
return $aas;
}
sub createNewEST {
my ($self,$source_id,$possiblyReversed,$putativeFullLength,$poorQuality,$seqLength,$qualityStart) = @_;
my $est = GUS::Model::DoTS::EST->new({'library_id' => $self->{libraryId},
'contact_id' => $self->{contactId},
'accession' => $source_id,
'possibly_reversed' => $possiblyReversed,
'putative_full_length_read' => $putativeFullLength,
'trace_poor_quality' => $poorQuality,
'quality_start' => $qualityStart,
'seq_length' => $seqLength
});
return $est;
}
sub getMonomerCount{
my ($self, $aas, $seq)=@_;
my $monomersHash;
my $countA = 0;
my $countT = 0;
my $countC = 0;
my $countG = 0;
my $countOther = 0;
$seq =~ s/-//g;
my $seqobj = Bio::PrimarySeq->new(-seq=>$seq,
-alphabet=>'dna');
my $seqStats = Bio::Tools::SeqStats->new(-seq=>$seqobj);
$monomersHash = $seqStats->count_monomers();
foreach my $base (keys %$monomersHash) {
if ($base eq 'A'){
$countA = $$monomersHash{$base};
}
elsif ($base eq 'T'){
$countT = $$monomersHash{$base};
}
elsif ($base eq 'C'){
$countC = $$monomersHash{$base};
}
elsif ($base eq 'G'){
$countG = $$monomersHash{$base};
}
else{
$countOther = $$monomersHash{$base};
}
}
$aas->setACount($countA);
$aas->setTCount($countT);
$aas->setCCount($countC);
$aas->setGCount($countG);
$aas->setOtherCount($countOther);
return;
}
sub fetchSequenceOntologyId {
my ($self) = @_;
my $name = $self->getArg('SOTermName');
my $extDbRlsSpec = $self->getArg('SOExtDbRlsSpec');
my $extDbRlsId = $self->getExtDbRlsId($extDbRlsSpec);
my $SOTerm = GUS::Model::SRes::OntologyTerm->new({'name' => $name ,
external_database_release_id => $extDbRlsId
});
$SOTerm->retrieveFromDB;
$self->{sequenceOntologyId} = $SOTerm->getId();
print STDERR ("SO ID: ********** $self->{sequenceOntologyId} \n");
$self->{sequenceOntologyId}
|| $self->userError("Can't find SO term '$name' in database");
}
sub fetchTaxonId {
my ($self) = @_;
my $ncbiTaxId = $self->getArg('ncbiTaxId');
my $taxon = GUS::Model::SRes::Taxon->new({ncbi_tax_id=>$ncbiTaxId});
$taxon->retrieveFromDB
|| $self->userError ("The NCBI tax ID '$ncbiTaxId' provided on the command line is not found in the database");
$self->{taxonId} = $taxon->getTaxonId();
}
sub fetchTaxonIdFromName {
my ($self,$name) = @_;
my $taxonName = $name ? $name : $self->getArg('taxonName');
my $taxonName = GUS::Model::SRes::TaxonName->new({name=>$taxonName,name_class=>'scientific name'});
$taxonName->retrieveFromDB
|| $self->userError ("The NCBI tax ID '$taxonName' provided on the command line or as a regex is not found in the database");
$self->{taxonId} = $taxonName->getTaxonId();
}
sub undoTables {
qw(
DoTS.EST
DoTS.ExternalNASequence
DoTS.Library
);
}
1;
| 32.437166 | 216 | 0.56683 |
ed604baa8a63c49a89898038d815de66ce8f729b | 7,353 | pl | Perl | net/united-linux-lib.pl | vsilvar/webmin | 9c736481d7d07eb4c8dcdb2cacff2365b74efbe0 | [
"BSD-3-Clause"
]
| 1 | 2021-03-01T12:06:31.000Z | 2021-03-01T12:06:31.000Z | net/united-linux-lib.pl | vsilvar/webmin | 9c736481d7d07eb4c8dcdb2cacff2365b74efbe0 | [
"BSD-3-Clause"
]
| 3 | 2020-04-30T14:00:11.000Z | 2021-05-10T23:28:17.000Z | net/united-linux-lib.pl | vsilvar/webmin | 9c736481d7d07eb4c8dcdb2cacff2365b74efbe0 | [
"BSD-3-Clause"
]
| 2 | 2020-11-04T06:21:49.000Z | 2020-11-06T11:02:20.000Z | # united-linux-lib.pl
# Networking functions for united linux
$net_scripts_dir = "/etc/sysconfig/network";
$routes_config = "/etc/sysconfig/network/routes";
$sysctl_config = "/etc/sysconfig/sysctl";
do 'linux-lib.pl';
# boot_interfaces()
# Returns a list of interfaces brought up at boot time
sub boot_interfaces
{
local(@rv, $f);
opendir(CONF, &translate_filename($net_scripts_dir));
while($f = readdir(CONF)) {
next if ($f !~ /^ifcfg-([a-z0-9:\.]+)$/);
local (%conf, $b);
$b->{'fullname'} = $1;
&read_env_file("$net_scripts_dir/$f", \%conf);
if ($b->{'fullname'} =~ /(\S+):(\d+)/) {
$b->{'name'} = $1;
$b->{'virtual'} = $2;
}
else { $b->{'name'} = $b->{'fullname'}; }
$b->{'up'} = ($conf{'STARTMODE'} eq 'onboot');
local $pfx;
if ($conf{'IPADDR'} =~ /^(\S+)\/(\d+)$/) {
$b->{'address'} = $1;
$pfx = $2;
}
else {
$b->{'address'} = $conf{'IPADDR'};
}
$pfx = $conf{'PREFIXLEN'} if (!$pfx);
if ($pfx) {
$b->{'netmask'} = &prefix_to_mask($pfx);
}
else {
$b->{'netmask'} = $conf{'NETMASK'};
}
$b->{'broadcast'} = $conf{'BROADCAST'};
$b->{'dhcp'} = ($conf{'BOOTPROTO'} eq 'dhcp');
$b->{'edit'} = ($b->{'name'} !~ /^ppp|irlan/);
$b->{'index'} = scalar(@rv);
$b->{'file'} = "$net_scripts_dir/$f";
push(@rv, $b);
}
closedir(CONF);
return @rv;
}
# save_interface(&details)
# Create or update a boot-time interface
sub save_interface
{
local(%conf);
local $name = $_[0]->{'virtual'} ne "" ? $_[0]->{'name'}.":".$_[0]->{'virtual'}
: $_[0]->{'name'};
&lock_file("$net_scripts_dir/ifcfg-$name");
&read_env_file("$net_scripts_dir/ifcfg-$name", \%conf);
$conf{'IPADDR'} = $_[0]->{'address'};
local($ip1, $ip2, $ip3, $ip4) = split(/\./, $_[0]->{'address'});
$conf{'NETMASK'} = $_[0]->{'netmask'};
local($nm1, $nm2, $nm3, $nm4) = split(/\./, $_[0]->{'netmask'});
if ($_[0]->{'address'} && $_[0]->{'netmask'}) {
$conf{'NETWORK'} = sprintf "%d.%d.%d.%d",
($ip1 & int($nm1))&0xff,
($ip2 & int($nm2))&0xff,
($ip3 & int($nm3))&0xff,
($ip4 & int($nm4))&0xff;
}
else {
$conf{'NETWORK'} = '';
}
delete($conf{'PREFIXLEN'});
$conf{'BROADCAST'} = $_[0]->{'broadcast'};
$conf{'STARTMODE'} = $_[0]->{'up'} ? "onboot" :
$conf{'STARTMODE'} eq "onboot" ? "manual" :
$conf{'STARTMODE'};
$conf{'BOOTPROTO'} = $_[0]->{'dhcp'} ? "dhcp" : "static";
$conf{'UNIQUE'} = time();
&write_env_file("$net_scripts_dir/ifcfg-$name", \%conf);
&unlock_file("$net_scripts_dir/ifcfg-$name");
}
# delete_interface(&details)
# Delete a boot-time interface
sub delete_interface
{
local $name = $_[0]->{'virtual'} ne "" ? $_[0]->{'name'}.":".$_[0]->{'virtual'}
: $_[0]->{'name'};
&unlink_logged("$net_scripts_dir/ifcfg-$name");
}
# can_edit(what)
# Can some boot-time interface parameter be edited?
sub can_edit
{
return $_[0] ne "mtu" && $_[0] ne "bootp";
}
# valid_boot_address(address)
# Is some address valid for a bootup interface
sub valid_boot_address
{
return &check_ipaddress($_[0]);
}
# get_hostname()
sub get_hostname
{
local %conf;
&read_env_file($network_config, \%conf);
if ($conf{'HOSTNAME'}) {
return $conf{'HOSTNAME'};
}
return &get_system_hostname(1);
}
# save_hostname(name)
sub save_hostname
{
local %conf;
&system_logged("hostname $_[0] >/dev/null 2>&1");
&open_lock_tempfile(HOST, ">/etc/HOSTNAME");
&print_tempfile(HOST, $_[0],"\n");
&close_tempfile(HOST);
&lock_file($network_config);
&read_env_file($network_config, \%conf);
$conf{'HOSTNAME'} = $_[0];
&write_env_file($network_config, \%conf);
&unlock_file($network_config);
undef(@main::get_system_hostname); # clear cache
}
# get_domainname()
sub get_domainname
{
local $d;
&execute_command("domainname", undef, \$d, undef);
chop($d);
return $d;
}
# save_domainname(domain)
sub save_domainname
{
local %conf;
&execute_command("domainname ".quotemeta($_[0]));
&read_env_file($network_config, \%conf);
if ($_[0]) {
$conf{'NISDOMAIN'} = $_[0];
}
else {
delete($conf{'NISDOMAIN'});
}
&write_env_file($network_config, \%conf);
}
sub routing_config_files
{
return ( $routes_config, $sysctl_config );
}
sub routing_input
{
local (@routes, $i);
&open_readfile(ROUTES, $routes_config);
while(<ROUTES>) {
s/#.*$//;
s/\r|\n//g;
local @r = map { $_ eq '-' ? undef : $_ } split(/\s+/, $_);
push(@routes, \@r) if (@r);
}
close(ROUTES);
# show default router and device
local ($def) = grep { $_->[0] eq "default" } @routes;
print &ui_table_row($text{'routes_default'},
&ui_opt_textbox("gateway", $def->[1], 15, $text{'routes_none'}));
print &ui_table_row($text{'routes_device2'},
&ui_opt_textbox("gatewaydev", $def->[3], 6, $text{'routes_none'}));
# Forwarding enabled?
&read_env_file($sysctl_config, \%sysctl);
print &ui_table_row($text{'routes_forward'},
&ui_yesno_radio("forward", $sysctl{'IP_FORWARD'} eq 'yes'));
# show static network routes
my $i = 0;
my @table;
foreach my $r (@routes, [ ]) {
next if ($r eq $def);
push(@table, [ &ui_textbox("dev_$i", $r->[3], 6),
&ui_textbox("net_$i", $r->[0], 15),
&ui_textbox("netmask_$i", $r->[2], 15),
&ui_textbox("gw_$i", $r->[1], 15),
&ui_textbox("type_$i", $r->[4], 10) ]);
}
print &ui_table_row($text{'routes_static'}, &ui_columns_table([ $text{'routes_ifc'}, $text{'routes_net'},
$text{'routes_mask'}, $text{'routes_gateway'},
$text{'routes_type'} ],
undef, \@table, undef, 1));
}
sub parse_routing
{
# Parse route inputs
local (@routes, $r, $i);
if (!$in{'gateway_def'}) {
&to_ipaddress($in{'gateway'}) ||
&error(&text('routes_edefault', $in{'gateway'}));
local @def = ( "default", $in{'gateway'}, undef, undef );
if (!$in{'gatewaydev_def'}) {
$in{'gatewaydev'} =~ /^\S+$/ ||
&error(&text('routes_edevice', $in{'gatewaydev'}));
$def[3] = $in{'gatewaydev'};
}
push(@routes, \@def);
}
for($i=0; defined($in{"dev_$i"}); $i++) {
next if (!$in{"net_$i"});
&check_ipaddress($in{"net_$i"}) ||
$in{"net_$i"} =~ /^(\S+)\/(\d+)$/ && &check_ipaddress($1) ||
&error(&text('routes_enet', $in{"net_$i"}));
$in{"dev_$i"} =~ /^\S*$/ || &error(&text('routes_edevice', $dev));
!$in{"netmask_$i"} || &check_ipaddress($in{"netmask_$i"}) ||
&error(&text('routes_emask', $in{"netmask_$i"}));
!$in{"gw_$i"} || &check_ipaddress($in{"gw_$i"}) ||
&error(&text('routes_egateway', $in{"gw_$i"}));
$in{"type_$i"} =~ /^\S*$/ ||
&error(&text('routes_etype', $in{"type_$i"}));
push(@routes, [ $in{"net_$i"}, $in{"gw_$i"}, $in{"netmask_$i"},
$in{"dev_$i"}, $in{"type_$i"} ] );
}
# Save routes and routing option
&open_tempfile(ROUTES, ">$routes_config");
foreach $r (@routes) {
&print_tempfile(ROUTES,join(" ", map { $_ eq '' ? "-" : $_ } @$r),"\n");
}
&close_tempfile(ROUTES);
local $lref = &read_file_lines($sysctl_config);
for($i=0; $i<@$lref; $i++) {
if ($lref->[$i] =~ /^\s*IP_FORWARD\s*=/) {
$lref->[$i] = "IP_FORWARD=".($in{'forward'} ? "yes" : "no");
}
}
&flush_file_lines();
}
# apply_network()
# Apply the interface and routing settings
sub apply_network
{
&system_logged("(cd / ; /etc/init.d/network stop ; /etc/init.d/network start) >/dev/null 2>&1");
}
# supports_address6([&iface])
# Returns 1 if managing IPv6 interfaces is supported
sub supports_address6
{
local ($iface) = @_;
return 0;
}
1;
| 27.132841 | 113 | 0.581259 |
ed8ca9221339bf673e96256eedcde99a2dd6549b | 359 | pm | Perl | lib/DDG/Spice/DogoBooks.pm | rezhajulio/zeroclickinfo-spice | b45d330ebd9d253837ade7a7fb90bbdcf73714ba | [
"Apache-2.0"
]
| 1 | 2020-08-25T15:10:24.000Z | 2020-08-25T15:10:24.000Z | lib/DDG/Spice/DogoBooks.pm | rezhajulio/zeroclickinfo-spice | b45d330ebd9d253837ade7a7fb90bbdcf73714ba | [
"Apache-2.0"
]
| null | null | null | lib/DDG/Spice/DogoBooks.pm | rezhajulio/zeroclickinfo-spice | b45d330ebd9d253837ade7a7fb90bbdcf73714ba | [
"Apache-2.0"
]
| null | null | null | package DDG::Spice::DogoBooks;
use DDG::Spice;
triggers any => "dogobooks", "dogo books", "dogo", "kids", "kid", "child", "children";
spice to => 'http://api.dogomedia.com/api/v2/books/search.json?query=$1&api_key={{ENV{DDG_SPICE_DOGO_APIKEY}}}';
spice wrap_jsonp_callback => 1;
handle query_lc => sub {
return $_ if $_ =~ /book/i;
return;
};
1;
| 22.4375 | 112 | 0.654596 |
ed8207e2f70e55ecb8f26e4946a2729b214560e6 | 3,663 | pl | Perl | TAO/orbsvcs/tests/Redundant_Naming/run_test.pl | cflowe/ACE | 5ff60b41adbe1772372d1a43bcc1f2726ff8f810 | [
"DOC"
]
| 36 | 2015-01-10T07:27:33.000Z | 2022-03-07T03:32:08.000Z | TAO/orbsvcs/tests/Redundant_Naming/run_test.pl | cflowe/ACE | 5ff60b41adbe1772372d1a43bcc1f2726ff8f810 | [
"DOC"
]
| 2 | 2018-08-13T07:30:51.000Z | 2019-02-25T03:04:31.000Z | TAO/orbsvcs/tests/Redundant_Naming/run_test.pl | cflowe/ACE | 5ff60b41adbe1772372d1a43bcc1f2726ff8f810 | [
"DOC"
]
| 38 | 2015-01-08T14:12:06.000Z | 2022-01-19T08:33:00.000Z | eval '(exit $?0)' && eval 'exec perl -S $0 ${1+"$@"}'
& eval 'exec perl -S $0 $argv:q'
if 0;
# $Id: run_test.pl 96760 2013-02-05 21:11:03Z stanleyk $
# -*- perl -*-
# This is a Perl script that runs a Naming Service test. It starts
# all the servers and clients as necessary.
use lib "$ENV{ACE_ROOT}/bin";
use PerlACE::TestTarget;
use Cwd;
$startdir = getcwd();
$quiet = 0;
# check for -q flag
if ($ARGV[0] eq '-q') {
$quiet = 1;
}
my $test = PerlACE::TestTarget::create_target (1) || die "Create target 1 failed\n";
$hostname = $test->HostName ();
# Variables for command-line arguments to client and server
# executables.
$ns_orb_port1 = 10001 + $test->RandomPort ();
$ns_orb_port2 = 10002 + $test->RandomPort ();
$ns_endpoint1 = "iiop://$hostname:$ns_orb_port1";
$ns_endpoint2 = "iiop://$hostname:$ns_orb_port2";
$iorfile1 = "ns1.ior";
$iorfile2 = "ns2.ior";
## Allow the user to determine where the persistent file will be located
## just in case the current directory is not suitable for locking.
## We can't change the name of the persistent file because that is not
## sufficient to work around locking problems for Tru64 when the current
## directory is NFS mounted from a system that does not properly support
## locking.
foreach my $possible ($ENV{TMPDIR}, $ENV{TEMP}, $ENV{TMP}) {
if (defined $possible && -d $possible) {
if (chdir($possible)) {
last;
}
}
}
my $test_iorfile1 = $test->LocalFile ($iorfile1);
my $test_iorfile2 = $test->LocalFile ($iorfile2);
$status = 0;
print "INFO: Running the test in ", getcwd(), "\n";
# Make sure that the directory to use to hold the naming contexts exists
# and is cleaned out
if ( ! -d "NameService" ) {
mkdir (NameService, 0777);
}
else {
chdir "NameService";
opendir(THISDIR, ".");
@allfiles = grep(!/^\.\.?$/, readdir(THISDIR));
closedir(THISDIR);
foreach $tmp (@allfiles){
$test->DeleteFile ($tmp);
}
chdir "..";
}
# Run two Naming Servers in redundant mode and one client. Client uses iors
# in files to find the individual copies of the Naming Servers.
my $args = "-ORBEndPoint $ns_endpoint1 -o $iorfile1 -m 0 -r NameService";
my $prog = "$startdir/../../Naming_Service/tao_cosnaming";
$NS1 = $test->CreateProcess ("$prog", "$args");
$test->DeleteFile ($iorfile1);
$NS1->Spawn ();
if ($test->WaitForFileTimed ($iorfile1,
$test->ProcessStartWaitInterval()) == -1) {
print STDERR "ERROR: cannot find file <$test_iorfile1>\n";
$NS1->Kill (); $NS1->TimedWait (1);
exit 1;
}
$args = "-ORBEndPoint $ns_endpoint2 -o $test_iorfile2 -m 0 -r NameService";
$prog = "$startdir/../../Naming_Service/tao_cosnaming";
$NS2 = $test->CreateProcess ("$prog", "$args");
$test->DeleteFile ($iorfile2);
$NS2->Spawn ();
if ($test->WaitForFileTimed ($iorfile2,
$test->ProcessStartWaitInterval()) == -1) {
print STDERR "ERROR: cannot find file <$test_iorfile2>\n";
$NS2->Kill (); $NS2->TimedWait (1);
exit 1;
}
## Even though the ior file is present, the redundant naming service
## isn't really ready to go (most of the time). Sleeping 1 second
## allows the redundant naming service to get to a usable state.
sleep(1);
$args = "-p file://$test_iorfile1 -q file://$test_iorfile2";
$prog = "$startdir/client";
$CL = $test->CreateProcess ("$prog", "$args");
$client = $CL->SpawnWaitKill ($test->ProcessStartWaitInterval());
if ($client != 0) {
print STDERR "ERROR: client returned $client\n";
$status = 1;
}
$NS1->Kill ();
$NS2->Kill ();
$test->DeleteFile ($iorfile1);
$test->DeleteFile ($iorfile2);
exit $status;
| 26.933824 | 84 | 0.643735 |
ed76a793e098decdc8571566f73ba12362ffd36b | 449 | pl | Perl | ARTe/work/tools/cygwin/lib/perl5/5.14/unicore/lib/Sc/Sund.pl | melvin-mancini/Multitasking-RealTime-Arduino-System | 6999beaf28f69b4c4a8f8badcc60f66e6e118477 | [
"MIT"
]
| 5 | 2018-12-18T20:19:43.000Z | 2022-02-21T21:53:09.000Z | Slic3r/Linux/lib/std/unicore/lib/Sc/Sund.pl | thomaspreece10/STLExtract | 2bea6d38135ebfda2558a33cc93565211e30a2bb | [
"MIT"
]
| null | null | null | Slic3r/Linux/lib/std/unicore/lib/Sc/Sund.pl | thomaspreece10/STLExtract | 2bea6d38135ebfda2558a33cc93565211e30a2bb | [
"MIT"
]
| 6 | 2016-09-23T02:54:47.000Z | 2022-03-10T22:04:19.000Z | # !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 6.0.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by the Perl program only. The format and even
# the name or existence of this file are subject to change without notice.
# Don't use it directly.
return <<'END';
1B80 1BAA
1BAE 1BB9
END
| 29.933333 | 78 | 0.681514 |
ed2982be6687100ea028660262fcc4594c94b59f | 2,307 | pm | Perl | lib/Annotation/ParalyzerClusterAnnotator.pm | shengqh/ngsperl | f81d5bf30171950583bb1ab656f51eabc1e9caf6 | [
"Apache-2.0"
]
| 6 | 2016-03-25T17:05:39.000Z | 2019-05-13T07:03:55.000Z | lib/Annotation/ParalyzerClusterAnnotator.pm | shengqh/ngsperl | f81d5bf30171950583bb1ab656f51eabc1e9caf6 | [
"Apache-2.0"
]
| null | null | null | lib/Annotation/ParalyzerClusterAnnotator.pm | shengqh/ngsperl | f81d5bf30171950583bb1ab656f51eabc1e9caf6 | [
"Apache-2.0"
]
| 9 | 2015-04-02T16:41:57.000Z | 2022-02-22T07:25:33.000Z | #!/usr/bin/perl
package Annotation::ParalyzerClusterAnnotator;
use strict;
use warnings;
use File::Basename;
use CQS::PBS;
use CQS::ConfigUtils;
use CQS::SystemUtils;
use CQS::FileUtils;
use CQS::NGSCommon;
use CQS::StringUtils;
use CQS::UniqueTask;
our @ISA = qw(CQS::UniqueTask);
sub new {
my ($class) = @_;
my $self = $class->SUPER::new();
$self->{_name} = __PACKAGE__;
$self->{_suffix} = "_an";
bless $self, $class;
return $self;
}
sub perform {
my ( $self, $config, $section ) = @_;
my ( $task_name, $path_file, $pbs_desc, $target_dir, $log_dir, $pbs_dir, $result_dir, $option, $sh_direct, $cluster ) = $self->init_parameter( $config, $section );
my $corFiles = $config->{$section}{coordinate_files} or die "define coordinate_files (array) in section $section first!";
my %raw_files = %{ get_raw_files( $config, $section ) };
my $pbs_file = $self->get_pbs_filename( $pbs_dir, $task_name );
my $log = $self->get_log_filename( $log_dir, $task_name );
my $log_desc = $cluster->get_log_description($log);
my $pbs = $self->open_pbs( $pbs_file, $pbs_desc, $log_desc, $path_file, $result_dir );
for my $sample_name ( sort keys %raw_files ) {
my @bam_files = @{ $raw_files{$sample_name} };
my $bam_file = $bam_files[0];
my $annFile = change_extension( $bam_file, ".ann.csv" );
my $cfiles = merge_string( ',', @{$corFiles} );
print $pbs "cqstools paralyzer_annotation $option -i $bam_file -c $cfiles -o $annFile \n";
}
$self->close_pbs( $pbs, $pbs_file );
}
sub result {
my ( $self, $config, $section, $pattern ) = @_;
my ( $task_name, $path_file, $pbs_desc, $target_dir, $log_dir, $pbs_dir, $result_dir, $option, $sh_direct ) = $self->init_parameter( $config, $section, 0 );
my $fasta_format = $config->{$section}{fasta_format};
if ( !defined $fasta_format ) {
$fasta_format = 0;
}
my %raw_files = %{ get_raw_files( $config, $section ) };
my $result = {};
for my $sample_name ( keys %raw_files ) {
my @bam_files = @{ $raw_files{$sample_name} };
my $bam_file = $bam_files[0];
my $annFile = change_extension( $bam_file, ".ann.csv" );
my @result_files = ();
push( @result_files, $annFile );
$result->{$sample_name} = filter_array( \@result_files, $pattern );
}
return $result;
}
1;
| 28.481481 | 165 | 0.64456 |
ed8a50987ce78bfb78dab4b9be0ae4bd9e8e1471 | 1,413 | pm | Perl | lib/Data/Object/Signatures.pm | manwar/Data-Object | 39068b33f4b14edd33917aceff40e0f98ee6e04b | [
"Artistic-1.0"
]
| null | null | null | lib/Data/Object/Signatures.pm | manwar/Data-Object | 39068b33f4b14edd33917aceff40e0f98ee6e04b | [
"Artistic-1.0"
]
| null | null | null | lib/Data/Object/Signatures.pm | manwar/Data-Object | 39068b33f4b14edd33917aceff40e0f98ee6e04b | [
"Artistic-1.0"
]
| null | null | null | # ABSTRACT: Signatures Object for Perl 5
package Data::Object::Signatures;
use strict;
use warnings;
use 5.014;
use Data::Object;
use Data::Object::Library;
use Scalar::Util;
use parent 'Type::Tiny::Signatures';
our @DEFAULTS = @Type::Tiny::Signatures::DEFAULTS = 'Data::Object::Library';
# VERSION
1;
=encoding utf8
=head1 SYNOPSIS
use Data::Object::Signatures;
method hello (Str $name) {
say "Hello $name, how are you?";
}
=cut
=head1 DESCRIPTION
Data::Object::Signatures is a subclass of L<Type::Tiny::Signatures> providing
method and function signatures supporting all the type constraints provided by
L<Data::Object::Library>.
=cut
=head1 SEE ALSO
=over 4
=item *
L<Data::Object::Array>
=item *
L<Data::Object::Class>
=item *
L<Data::Object::Class::Syntax>
=item *
L<Data::Object::Code>
=item *
L<Data::Object::Float>
=item *
L<Data::Object::Hash>
=item *
L<Data::Object::Integer>
=item *
L<Data::Object::Number>
=item *
L<Data::Object::Role>
=item *
L<Data::Object::Role::Syntax>
=item *
L<Data::Object::Regexp>
=item *
L<Data::Object::Scalar>
=item *
L<Data::Object::String>
=item *
L<Data::Object::Undef>
=item *
L<Data::Object::Universal>
=item *
L<Data::Object::Autobox>
=item *
L<Data::Object::Immutable>
=item *
L<Data::Object::Library>
=item *
L<Data::Object::Prototype>
=item *
L<Data::Object::Signatures>
=back
=cut
| 10.78626 | 78 | 0.66242 |
ed102fc223ccf53ba24e3b0962d2173f13bbe04c | 1,462 | pm | Perl | lib/Google/Ads/GoogleAds/V4/Services/AccountBudgetProposalService.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
]
| null | null | null | lib/Google/Ads/GoogleAds/V4/Services/AccountBudgetProposalService.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
]
| null | null | null | lib/Google/Ads/GoogleAds/V4/Services/AccountBudgetProposalService.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
]
| null | null | null | # Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V4::Services::AccountBudgetProposalService;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseService);
sub get {
my $self = shift;
my $request_body = shift;
my $http_method = 'GET';
my $request_path = 'v4/{+resourceName}';
my $response_type =
'Google::Ads::GoogleAds::V4::Resources::AccountBudgetProposal';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
sub mutate {
my $self = shift;
my $request_body = shift;
my $http_method = 'POST';
my $request_path = 'v4/customers/{+customerId}/accountBudgetProposals:mutate';
my $response_type =
'Google::Ads::GoogleAds::V4::Services::AccountBudgetProposalService::MutateAccountBudgetProposalResponse';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
1;
| 31.782609 | 106 | 0.72777 |
ed31c6bd2ea91d79f422d5950de8d8eb2b19c9db | 4,789 | pl | Perl | scripts/ccds/prepare_cdstrack.pl | jmgonzmart/ensembl-analysis | 41c1d362bc0abce91a81a6615b3d61a6b82b7da5 | [
"Apache-2.0"
]
| null | null | null | scripts/ccds/prepare_cdstrack.pl | jmgonzmart/ensembl-analysis | 41c1d362bc0abce91a81a6615b3d61a6b82b7da5 | [
"Apache-2.0"
]
| null | null | null | scripts/ccds/prepare_cdstrack.pl | jmgonzmart/ensembl-analysis | 41c1d362bc0abce91a81a6615b3d61a6b82b7da5 | [
"Apache-2.0"
]
| null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2022] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
# NCBI provides SQL Server files, which are not compatible with MySQL.
# This script converts the SQL Server files provided by NCBI to MySQL files named "new_*.sql"
# and replaces empty values between tabs with \N in "Interpretations.txt".
# It also creates the extra table "EnsemblWithdrawals" which contains a list of actions related to the withdrawal comments from the annotators. If a CCDS has been withdrawn we may or may not want to remove the transcripts for our geneset altogether. This file tells us what to do for each withdrawal.
use warnings;
use strict;
use Getopt::Long;
# directory where the input files are located and the output files will be written
my $dir = undef;
&GetOptions(
'dir:s' => \$dir,
);
unless (defined $dir){
die "need to define -dir \n";
}
#3 files to edit:
my $sql = "sql/";
my $data = "data/";
my $create_tables = "createTables.sql";
my $interpretations = "Interpretations.txt";
my $create_keys = "createKeys.sql";
#fix create_tables
open (IN, $dir."/".$sql.$create_tables) or die "can't open $dir/".$sql.$create_tables."\n";
open (OUT, ">".$dir."/".$sql."new_".$create_tables) or die "can't open $dir/".$sql."new_".$create_tables."\n";
$/ = "\n\n";
my $line = "";
while(<IN>){
next if ($_=~/ALTER TABLE/);
my $entry = $_;
$entry =~s/\[dbo\]\.//g;
$entry =~s/CREATE\s+TABLE\s+dbo\./CREATE TABLE /g;
$entry =~s/\bGO\b/;/gi;
$entry =~s/\[gap_count\] \[int\] NOT NULL ,/gap_count int NOT NULL/ ;
$entry =~s/\[//g;
$entry =~s/\]//g;
$entry =~s/IDENTITY\s*\(1,\s*1\)/UNIQUE AUTO_INCREMENT/g;
$entry =~s/General_//g;
$entry =~s/NOT FOR REPLICATION //g;
$entry =~s/ ON PRIMARY TEXTIMAGE_ON PRIMARY/ COLLATE=latin1_swedish_ci ENGINE=MyISAM/g;
$entry =~s/ ON PRIMARY/ COLLATE=latin1_swedish_ci ENGINE=MyISAM/g;
$line .= "$entry\n";
}
$line =~ s/,\s*\n*\s*\)\s*ENGINE/\n) ENGINE/g;
print OUT $line;
$line = undef;
close IN;
#Add entry for extra table detailing Withdrawals:
print OUT "\n\nCREATE TABLE EnsemblWithdrawals (\n".
"\tccds_uid int NOT NULL ,\n".
"\taction ENUM( 'Keep', 'Remove transcript', 'Remove gene' ), \n".
"\tcomment text COLLATE Latin1_BIN NULL\n".
") COLLATE=latin1_swedish_ci ENGINE=MyISAM\n".
";\n";
close OUT;
#fix interpretations
open (IN, $dir."/".$data.$interpretations) or die "can't open $dir/".$data.$interpretations."\n";
open (OUT, ">".$dir."/".$data."new_".$interpretations) or die "can't open $dir/".$data."new_".$interpretations."\n";
$/ = "\n";
while(<IN>){
my $entry = $_;
$entry =~s/\t\t/\t\\N\t/g;
$entry =~s/\t\t/\t\\N\t/g;
$entry =~s/\t$/\t\\N/g;
while ($entry=~/\r/){
$entry =~s/\r//g;
}
print OUT "$entry";
}
close IN;
close OUT;
#fix create_keys
open (IN, $dir."/".$sql.$create_keys) or die "can't open $dir/".$sql.$create_keys."\n";
open (OUT, ">".$dir."/".$sql."new_".$create_keys) or die "can't open $dir/".$sql."new_".$create_keys."\n";
$/ = "\n\n";
while(<IN>){
my $entry = $_;
next if ($entry =~ /---------------------- Primary keys ------------------------/) ;
next if ($entry =~ /---------------------- Foreign keys ------------------------/) ;
$entry =~s/\[dbo\]\.//g;
$entry =~s/\bGO\b/;/gi;
$entry =~s/\[//g;
$entry =~s/\]//g;
$entry =~s/ WITH FILLFACTOR = 90 ON PRIMARY//g;
#separate out the alter_table clauses
if ($entry =~/FOREIGN KEY/){
my @rows = split/CONSTRAINT/, $entry;
my $first_line = shift @rows;
foreach my $constraint (@rows){
$constraint = join " CONSTRAINT ", $first_line, $constraint;
$constraint =~s/\),/\);\n/;
print OUT $constraint."\n";
}
}else{
print OUT "$entry";
}
}
close IN;
#Add entry for extra table:
print OUT "\n\nALTER TABLE EnsemblWithdrawals ADD\n".
"\tCONSTRAINT PK_ensemblWithdrawals PRIMARY KEY CLUSTERED\n".
"\t(\n".
"\t\tccds_uid\n".
"\t)\n".
";\n";
print OUT "\n\nALTER TABLE EnsemblWithdrawals ADD\n".
"\tCONSTRAINT FK_EnsemblWithdrawals_CcdsUids FOREIGN KEY\n".
"\t(\n".
"\t\tccds_uid\n".
"\t) REFERENCES CcdsUids (\n".
"\t\tccds_uid\n".
");\n";
close OUT;
| 27.210227 | 300 | 0.641052 |
ed4f8fc108a6d549b26778df3cdddbd69c9d4f96 | 3,727 | pm | Perl | lib/MIP/Program/Telomerecat.pm | BuildJet/MIP | f1f63117a7324e37dbcaa16c0298f4b4c857d44c | [
"MIT"
]
| null | null | null | lib/MIP/Program/Telomerecat.pm | BuildJet/MIP | f1f63117a7324e37dbcaa16c0298f4b4c857d44c | [
"MIT"
]
| null | null | null | lib/MIP/Program/Telomerecat.pm | BuildJet/MIP | f1f63117a7324e37dbcaa16c0298f4b4c857d44c | [
"MIT"
]
| null | null | null | package MIP::Program::Telomerecat;
use 5.026;
use Carp;
use charnames qw{ :full :short };
use English qw{ -no_match_vars };
use open qw{ :encoding(UTF-8) :std };
use Params::Check qw{ allow check last_error };
use strict;
use utf8;
use warnings;
use warnings qw{ FATAL utf8 };
## CPANM
use autodie qw{ :all };
## MIPs lib/
use MIP::Constants qw{ $SPACE };
use MIP::Unix::Standard_streams qw{ unix_standard_streams };
use MIP::Unix::Write_to_file qw{ unix_write_to_file };
BEGIN {
require Exporter;
use base qw{ Exporter };
# Set the version for version checking
our $VERSION = 1.00;
# Functions and variables which can be optionally exported
our @EXPORT_OK = qw{ telomerecat_bam2length };
}
sub telomerecat_bam2length {
## Function : Perl wrapper for telomerecat bam2length. Based on version 3.4.0
## Returns : @commands
## Arguments: $filehandle => Filehandle to write to
## : $infile_paths_ref => Bam files
## : $outfile_path => Outfile path
## : $processes => Number of processes
## : $stderrfile_path => Stderrfile path
## : $stderrfile_path_append => Append stderr info to file path
## : $stdinfile_path => Stdinfile path
## : $stdoutfile_path => Stdoutfile path
## : $temp_directory => Temporary directory
my ($arg_href) = @_;
## Flatten argument(s)
my $filehandle;
my $infile_paths_ref;
my $outfile_path;
my $stderrfile_path;
my $stderrfile_path_append;
my $stdinfile_path;
my $stdoutfile_path;
my $temp_directory;
## Default(s)
my $processes;
my $tmpl = {
filehandle => {
store => \$filehandle,
},
infile_paths_ref => {
default => [],
required => 1,
store => \$infile_paths_ref,
strict_type => 1,
},
outfile_path => {
required => 1,
store => \$outfile_path,
strict_type => 1,
},
processes => {
allow => [ undef, qr/\A \d+ \z/xms ],
default => 1,
store => \$processes,
strict_type => 1,
},
stderrfile_path => {
store => \$stderrfile_path,
strict_type => 1,
},
stderrfile_path_append => {
store => \$stderrfile_path_append,
strict_type => 1,
},
stdinfile_path => { store => \$stdinfile_path, strict_type => 1, },
stdoutfile_path => {
store => \$stdoutfile_path,
strict_type => 1,
},
temp_directory => { store => \$temp_directory, strict_type => 1, },
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
my @commands = qw{ telomerecat bam2length };
push @commands, q{--output} . $SPACE . $outfile_path;
if ($processes) {
push @commands, q{-p} . $SPACE . $processes;
}
if ($temp_directory) {
push @commands, q{--temp_dir} . $SPACE . $temp_directory;
}
push @commands, join $SPACE, @{$infile_paths_ref};
push @commands,
unix_standard_streams(
{
stderrfile_path => $stderrfile_path,
stderrfile_path_append => $stderrfile_path_append,
stdinfile_path => $stdinfile_path,
stdoutfile_path => $stdoutfile_path,
}
);
unix_write_to_file(
{
commands_ref => \@commands,
filehandle => $filehandle,
separator => $SPACE,
}
);
return @commands;
}
1;
| 26.81295 | 77 | 0.540649 |
73d78978c012f5b613c1d915893baafc740ae4b5 | 326 | t | Perl | tests/embeddedcode1.t | nie-game/terra | 36a544595e59c6066ab9e5b5fa923b82b4be0c41 | [
"MIT"
]
| 1,575 | 2015-01-01T13:40:05.000Z | 2019-10-24T22:08:08.000Z | tests/embeddedcode1.t | nie-game/terra | 36a544595e59c6066ab9e5b5fa923b82b4be0c41 | [
"MIT"
]
| 304 | 2015-01-02T22:35:30.000Z | 2019-10-23T20:43:18.000Z | tests/embeddedcode1.t | nie-game/terra | 36a544595e59c6066ab9e5b5fa923b82b4be0c41 | [
"MIT"
]
| 150 | 2015-01-06T07:18:01.000Z | 2019-10-24T22:08:10.000Z | import "lib/embeddedcode"
local c = 3
local a = 4
local b = defexp(a) a + c
local d = b(10)
assert(d == 13)
local e = def(x)
return x + c + a
end
local f = e(4)
assert(f == 11)
local g = deft(x : int) return x + c + a end
g:disas()
assert(g(4) == 11)
local h = deftexp(x : int) x + c + a
h:disas()
assert(h(5) == 12)
| 14.173913 | 44 | 0.567485 |
ed342f6e038f037dcf20f94b602136c3055096e7 | 1,976 | pm | Perl | local/lib/perl5/DateTime/TimeZone/America/Caracas.pm | jkb78/extrajnm | 6890e38e15f85ea9c09a141aa14affad0b8e91e7 | [
"MIT"
]
| null | null | null | local/lib/perl5/DateTime/TimeZone/America/Caracas.pm | jkb78/extrajnm | 6890e38e15f85ea9c09a141aa14affad0b8e91e7 | [
"MIT"
]
| null | null | null | local/lib/perl5/DateTime/TimeZone/America/Caracas.pm | jkb78/extrajnm | 6890e38e15f85ea9c09a141aa14affad0b8e91e7 | [
"MIT"
]
| null | null | null | # This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/Q713JNUf8G/southamerica. Olson data version 2016a
#
# Do not edit this file directly.
#
package DateTime::TimeZone::America::Caracas;
$DateTime::TimeZone::America::Caracas::VERSION = '1.95';
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::America::Caracas::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
59611177664, # utc_end 1890-01-01 04:27:44 (Wed)
DateTime::TimeZone::NEG_INFINITY, # local_start
59611161600, # local_end 1890-01-01 00:00:00 (Wed)
-16064,
0,
'LMT',
],
[
59611177664, # utc_start 1890-01-01 04:27:44 (Wed)
60308944060, # utc_end 1912-02-12 04:27:40 (Mon)
59611161604, # local_start 1890-01-01 00:00:04 (Wed)
60308928000, # local_end 1912-02-12 00:00:00 (Mon)
-16060,
0,
'CMT',
],
[
60308944060, # utc_start 1912-02-12 04:27:40 (Mon)
61977933000, # utc_end 1965-01-01 04:30:00 (Fri)
60308927860, # local_start 1912-02-11 23:57:40 (Sun)
61977916800, # local_end 1965-01-01 00:00:00 (Fri)
-16200,
0,
'VET',
],
[
61977933000, # utc_start 1965-01-01 04:30:00 (Fri)
63332866800, # utc_end 2007-12-09 07:00:00 (Sun)
61977918600, # local_start 1965-01-01 00:30:00 (Fri)
63332852400, # local_end 2007-12-09 03:00:00 (Sun)
-14400,
0,
'VET',
],
[
63332866800, # utc_start 2007-12-09 07:00:00 (Sun)
DateTime::TimeZone::INFINITY, # utc_end
63332850600, # local_start 2007-12-09 02:30:00 (Sun)
DateTime::TimeZone::INFINITY, # local_end
-16200,
0,
'VET',
],
];
sub olson_version {'2016a'}
sub has_dst_changes {0}
sub _max_year {2026}
sub _new_instance {
return shift->_init( @_, spans => $spans );
}
1;
| 23.807229 | 90 | 0.674595 |
ed1c472f1f83fae1a4dc30c7f3174943532bb051 | 8,413 | t | Perl | S03-operators/set_intersection.t | jmaslak/roast | d69446499800e7cb274c0c240691a8199e69b22c | [
"Artistic-2.0"
]
| null | null | null | S03-operators/set_intersection.t | jmaslak/roast | d69446499800e7cb274c0c240691a8199e69b22c | [
"Artistic-2.0"
]
| null | null | null | S03-operators/set_intersection.t | jmaslak/roast | d69446499800e7cb274c0c240691a8199e69b22c | [
"Artistic-2.0"
]
| null | null | null | use v6;
use Test;
# This test file tests the following set operators:
# (&) intersection (ASCII)
# ∩ intersection
# Empty mutables that have the internal hash allocated
(my $esh = <a>.SetHash)<a>:delete;
(my $ebh = <a>.BagHash)<a>:delete;
(my $emh = <a>.MixHash)<a>:delete;
my @types = Set, SetHash, Bag, BagHash, Mix, MixHash;
# single parameter, result
my @pairs =
<a b c>.Set, <a b c>.Set,
<a b c>.SetHash, <a b c>.SetHash,
<a b c>.Bag, <a b c>.Bag,
<a b c>.BagHash, <a b c>.BagHash,
<a b c>.Mix, <a b c>.Mix,
<a b c>.MixHash, <a b c>.MixHash,
<a b c>, <a b c>.Set,
{:42a,:0b}, <a>.Set,
:{:42a,:0b}, <a>.Set,
42, 42.Set,
;
# two parameters, result
my @triplets =
# result should be a Set(Hash)
set(), set(), set(),
SetHash.new, set(), SetHash.new,
set(), SetHash.new, set(),
SetHash.new, SetHash.new, SetHash.new,
$esh, set(), SetHash.new,
set(), $esh, set(),
$esh, $esh, SetHash.new,
<a b>.Set, set(), set(),
<a b>.SetHash, set(), SetHash.new,
<a b>.Set, <a b>.Set, <a b>.Set,
<a b>.SetHash, <a b>.SetHash, <a b>.SetHash,
<a b>.Set, <c d>.Set, set(),
<a b c>.Set, <b c d>.Set, <b c>.Set,
<a b>.SetHash, <c d>.SetHash, SetHash.new,
<a b c>.SetHash, <b c d>.SetHash, <b c>.SetHash,
# result should be a Bag(Hash)
bag(), bag(), bag(),
BagHash.new, bag(), BagHash.new,
bag(), BagHash.new, bag(),
BagHash.new, BagHash.new, BagHash.new,
$ebh, bag(), BagHash.new,
bag(), $ebh, bag(),
$ebh, $ebh, BagHash.new,
<a b b>.Bag, bag(), bag(),
<a b b>.BagHash, bag(), BagHash.new,
<a b b>.Bag, <a b>.Bag, <a b>.Bag,
<a b b>.BagHash, <a b>.BagHash, <a b>.BagHash,
<a b b>.Bag, <c d>.Bag, bag(),
<a b b c>.Bag, <b c d>.Bag, <b c>.Bag,
<a b b>.BagHash, <c d>.BagHash, BagHash.new,
<a b b c>.BagHash, <b c d>.BagHash, <b c>.BagHash,
# result should be a Mix(Hash)
mix(), mix(), mix(),
MixHash.new, mix(), MixHash.new,
mix(), MixHash.new, mix(),
MixHash.new, MixHash.new, MixHash.new,
$emh, mix(), MixHash.new,
mix(), $emh, mix(),
$emh, $emh, MixHash.new,
(a=>pi,b=>tau).Mix, mix(), mix(),
(a=>pi,b=>tau).MixHash, mix(), MixHash.new,
(a=>pi,b=>tau).Mix, <a b>.Mix, <a b>.Mix,
(a=>pi,b=>tau).MixHash, <a b>.MixHash, <a b>.MixHash,
(a=>pi,b=>tau).Mix, <c d>.Mix, mix(),
(a=>pi,b=>tau).Mix, <b c>.Mix, <b>.Mix,
(a=>pi,b=>tau).MixHash, <c d>.MixHash, MixHash.new,
(a=>pi,b=>tau).MixHash, <b c>.MixHash, <b>.MixHash,
# coercions
<a b>.Set, <a b b>.Bag, <a b>.Bag,
<a b>.Set, <a b b>.BagHash, <a b>.Bag,
<a b>.SetHash, <a b b>.Bag, <a b>.BagHash,
<a b>.SetHash, <a b b>.BagHash, <a b>.BagHash,
<a b>.Bag, <a b b>.Mix, <a b>.Mix,
<a b>.Bag, <a b b>.MixHash, <a b>.Mix,
<a b>.BagHash, <a b b>.Mix, <a b>.MixHash,
<a b>.BagHash, <a b b>.MixHash, <a b>.MixHash,
<a b c>.Set, <a b c d>, <a b c>.Set,
<a b c>.SetHash, <a b c d>, <a b c>.SetHash,
<a b c>.Bag, <a b c d>, <a b c>.Bag,
<a b c>.BagHash, <a b c d>, <a b c>.BagHash,
<a b c>.Mix, <a b c d>, <a b c>.Mix,
<a b c>.MixHash, <a b c d>, <a b c>.MixHash,
<a b c>, <c d e>, <c>.Set,
(:42a,:0b), (:c,:42d,"e"), set(),
(:42a,:0b), (:a,:42d,"e"), <a>.Set,
{:42a,:0b}, {:a,:c,:42d}, <a>.Set,
:{42=>"a",666=>""}, :{55=>"c",66=>1}, set(),
:{42=>"a",666=>""}, :{55=>"c",666=>1}, set(),
:{42=>"a",666=>""}, :{42=>"c",666=>1}, 42.Set,
:{42=>"a",666=>""}, {:c,:42d}, set(),
:{a=>42,666=>""}, {:a,:42d}, <a>.Set,
{:42a,:0b}, <c d e>, set(),
{:42a,:0b}, <a d e>, <a>.Set,
:{42=>"a",666=>""}, <a b c>, set(),
:{a=>42,666=>""}, <a b c>, <a>.Set,
42, 666, set(),
;
# List with 3 parameters, result
my @quads =
[<a b c>.Set, <b c d>.Set, <c d e>.Set], <c>.Set,
[<a b c>.Bag, <b c d>.Bag, <c d e>.Bag], <c>.Bag,
[<a b c>.Mix, <b c d>.Mix, <c d e>.Mix], <c>.Mix,
[<a b c>.Set, <b c d>.Set, <c d e>.Bag], <c>.Bag,
[<a b c>.Set, <b c d>.Set, <c d e>.Mix], <c>.Mix,
[<a b c>.Set, <b c d>.Bag, <c d e>.Mix], <c>.Mix,
[<a b c>, <b c d>, <c d e>], <c>.Set,
[<a b c>, <b c d>, <c d e>.Set], <c>.Set,
[<a b c>, <b c d>, <c d e>.Bag], <c>.Bag,
[<a b c>, <b c d>, <c d e>.Mix], <c>.Mix,
[<a b c>, <b c d>.Bag, <c d e>.Mix], <c>.Mix,
[{:a,:b,:c}, {:b,:c,:d}, {:c,:d,:e}], <c>.Set,
[{:a,:b,:c}, {:b,:c,:d}, <c d e>.Set], <c>.Set,
[{:a,:b,:c}, {:b,:c,:d}, <c d e>.Bag], <c>.Bag,
[{:a,:b,:c}, {:b,:c,:d}, <c d e>.Mix], <c>.Mix,
[{:a,:b,:c}, <b c d>, {:c,:d,:e}], <c>.Set,
[{:a,:b,:c}, <b c d>, <c d e>.Set], <c>.Set,
[{:a,:b,:c}, <b c d>, <c d e>.Bag], <c>.Bag,
[{:a,:b,:c}, <b c d>, <c d e>.Mix], <c>.Mix,
[(:42a).Bag, (:7a).Bag, (:43a).Bag], (:7a).Bag,
[(:42a).Bag, bag(), (:43a).Bag], bag(),
[(a=>-42).Mix, <a>.Mix, (:42a).Mix], (a=>-42).Mix,
[(a=>-42).Mix, set(), (:42a).Mix], mix(),
[(a=>-42).Mix, bag(), (:42a).Mix], mix(),
[(a=>-42).Mix, mix(), (:42a).Mix], mix(),
[(a=>-42).Mix, <b>.Set, (:42a).Bag], mix(),
[(a=>-42).Mix, <b>.Bag, (:42a).Bag], mix(),
[(a=>-42).Mix, <b>.Mix, (:42a).Bag], mix(),
<a b c>, set()
;
plan 2 * (1 + 3 * @types + @pairs/2 + @triplets/3 + 6 * @quads/2);
# intersection
for
&infix:<(&)>, "(&)",
&infix:<∩>, "∩"
-> &op, $name {
is-deeply op(), set(), "does $name\() return set()";
for @types -> \qh {
#exit dd qh unless
is-deeply op(qh.new,qh.new,qh.new), qh.new,
"Sequence of empty {qh.^name} is the empty {qh.^name}";
throws-like { op(qh.new,^Inf) }, X::Cannot::Lazy,
"Cannot {qh.perl}.new $name lazy list";
throws-like { op(qh.new(<a b c>),^Inf) }, X::Cannot::Lazy,
"Cannot {qh.perl}.new(<a b c>) $name lazy list";
}
for @pairs -> $parameter, $result {
#exit dd $parameter, $result unless
is-deeply op($parameter.item), $result,
"infix:<$name>(|$parameter.gist())";
}
for @triplets -> $left, $right, $result {
#exit dd $left, $right, $result unless
is-deeply op($left,$right), $result,
"$left.gist() $name $right.gist()";
}
for @quads -> @params, $result {
for @params.permutations -> @mixed {
#exit dd @mixed, $result unless
is-deeply op(|@mixed), $result,
"[$name] @mixed>>.gist()";
}
}
}
# vim: ft=perl6
| 42.065 | 67 | 0.358493 |
ed008018e5fa2855d9e9ff72811a863cf55cd06b | 11,139 | pl | Perl | tools/intlmap.pl | npocmaka/Windows-Server-2003 | 5c6fe3db626b63a384230a1aa6b92ac416b0765f | [
"Unlicense"
]
| 17 | 2020-11-13T13:42:52.000Z | 2021-09-16T09:13:13.000Z | tools/intlmap.pl | sancho1952007/Windows-Server-2003 | 5c6fe3db626b63a384230a1aa6b92ac416b0765f | [
"Unlicense"
]
| 2 | 2020-10-19T08:02:06.000Z | 2020-10-19T08:23:18.000Z | tools/intlmap.pl | sancho1952007/Windows-Server-2003 | 5c6fe3db626b63a384230a1aa6b92ac416b0765f | [
"Unlicense"
]
| 14 | 2020-11-14T09:43:20.000Z | 2021-08-28T08:59:57.000Z |
# USE section
use lib $ENV{ "RazzleToolPath" };
use GetParams;
use CkLang;
use strict;
no strict 'vars';
# GLOBALS
$ScriptName=$0;
$SDXROOT=$ENV{SDXROOT};
$INTLVIEW_FNAME="$SDXROOT\\tools\\intlview.map";
%SDMAP=();
%INTLVIEW=();
$Lang="";
$Project="";
$Quiet="";
$FullView="";
$CODEBASE="NT";
$BRANCH=$ENV{_BuildBranch};
$LOGGING=0;
$ERROR=1;
$WARNING=2;
%MSG=($LOGGING => "", $ERROR => "error: ", $WARNING => "warning: ");
# MAIN {
# Parse the command line parameters.
&ParseCmdLine( @ARGV );
# print $FullView ? "Selective Mapping\n" : "Selective/Exclusionary Mapping\n";
# Verify the command line parameters and the environment.
&VerifyParams();
# Mark sd.map as read only.
&MarkReadWrite();
# Load data from the "SD.MAP" file.
&LoadSDMap();
&LoadIntlview();
if ( $Lang ) {
&EnlistLangProj($Lang);
} else {
&EnlistCodeProj($Project);
}
# Mark sd.map as read only.
&MarkReadOnly();
# } # MAIN
sub EnlistLangProj {
my ( $lang ) = @_;
&EnlistProject("$lang\_res");
&EnlistProject("$lang\_bin");
&MapProject("$lang\_res");
&MapProject("$lang\_bin");
} # EnlistLangProj
sub EnlistCodeProj {
my ( $projname ) = @_;
foreach (sort keys %INTLVIEW) {
if ( $projname && lc($projname) ne lc($_) ) {
next;
}
&EnlistProject($_);
&MapProject($_);
}
} # EnlistCodeProj
sub EnlistProject {
my ( $projname ) = @_;
if ( !$Quiet ) {
&AskForOK( $projname );
}
printf "\n";
&DisplayMsg( $LOGMSG, "Enlisting project \"$projname\" in branch \"$BRANCH\"...");
if ( exists $SDMAP{lc($projname)} ) {
&DisplayMsg( $LOGMSG, "Already enlisted in project \"$projname\".");
return;
}
&ExecuteCmd( "sdx enlist $projname /q");
# As sdx does not set the error level in case of errors, reload the SD.MAP
# mapping file to verify that the project got actually enlisted.
&LoadSDMap();
if ( ! exists $SDMAP{lc($projname)} ){
&FatalError( "Enlist project \"$projname\" command failed." );
} else {
&DisplayMsg( $LOGMSG, "Project \"$projname\" enlisted successfully." );
}
return;
} # EnlistProject
sub MapProject {
my ( $projname ) = @_;
my $workdir = "$SDXROOT\\$SDMAP{lc($projname)}";
&DisplayMsg( $LOGMSG, "cd /d $workdir");
chdir ( $workdir ) ||
&FatalError( "Unable to change directory to \"$workdir\" to update project's \"$projname\" client view.");
&ExecuteCmd( "sd client -o \> sd.client" );
if ( &UpdateClient( $projname ) ) {
&ExecuteCmd( "sd client -i \< sd.client" );
&DisplayMsg( $LOGMSG, "Project \"$projname\" mapped successfully." );
}
&ExecuteCmd( "del sd.client" );
} # MapProject
sub UpdateClient {
my ( $projname ) = @_;
my $workdir = "$SDXROOT\\$SDMAP{lc($projname)}";
my @sdclient = ();
my $i = 0;
my $j = 0;
open( FILE, "sd.client" ) ||
&FatalError("Unable to open file \"$workdir\\sd.client\" for reading.");
@sdclient = <FILE>;
close( FILE );
for ( $i=0; $i < @sdclient; $i++ ) {
if ( $sdclient[$i] =~ /^View:/ ) {
last;
}
}
if ( $i == @sdclient ) {
&FatalError( "");
}
if ($projname=~/$lang/i) {
while( $i < @sdclient ) {
if ( ($sdclient[$i] =~ /$projname/i) || ($sdclient[$i] =~ /^\s*$/) ) {
# remove this language's item in client view
splice(@sdclient, $i, 1);
} else {
$i++;
}
}
}
$#sdclient=$i;
for ( $j=0; $j < @{$INTLVIEW{lc($projname)}}; $j++ ) {
$sdclient[$i+$j+1] = sprintf( "\t%s\n", ${$INTLVIEW{lc($projname)}}[$j]);
$sdclient[$i+$j+1] =~ s/\<client\>/$SDMAP{client}/;
}
&DisplayMsg( $LOGMSG, "Saving project's \"$projname\" updated client view...");
open(FILE, ">sd.client") ||
&FatalError( "Unable to open \"$workdir\\sd.client\" for writing." );
for ($i=0; $i < @sdclient; $i++ ) {
printf FILE "$sdclient[$i]";
}
close (FILE);
return 1;
} # UpdateClient
sub LoadIntlview {
my @mapfile = ();
my $i=0;
my $key;
my $value;
open( FILE, $INTLVIEW_FNAME ) ||
&FatalError( "Unable to load input file $INTLVIEW_FNAME." );
@mapfile = <FILE>;
close( FILE );
for ($i=0; $i < @mapfile; $i++) {
foreach ($mapfile[$i]) {
SWITCH: {
# commented line
if ( /\s*;/ ) { last SWITCH;}
# valid entry
if (/\s*(\S+)\s+(\S+.*)/ ) {
($key, $value) = ($1, $2);
next if (( !$Lang ) && ( $key =~ /lang/ ));
$key =~ s/\<lang\>/lc$Lang/eg;
$value =~ s/\<lang\>/lc$Lang/eg;
$value =~ s/\<branch\>/lc$BRANCH/eg;
next if $FullView and $value =~/^\-/;
push @{$INTLVIEW{lc($key)}}, $value;
last SWITCH;
}
# default
last SWITCH;
} # SWITCH
} # foreach
} # for
# foreach (sort keys %INTLVIEW) {
# for ($i=0; $i < @{$INTLVIEW{$_}}; $i++ ) {
# printf "key=$_, value[$i]=${$INTLVIEW{$_}}[$i]\n";
# }
# }
} # LoadIntlview
sub LoadSDMap {
my @mapfile=();
my $i=0;
my $sdmap="$SDXROOT\\sd.map";
foreach (keys %SDMAP) {
delete $SDMAP{$_};
}
( -e $sdmap ) || &FatalError( "Unable to find file $sdmap" );
open( FILE, $sdmap ) || &FatalError( "Unable to open file $sdmap." );
@mapfile=<FILE>;
close(FILE);
for ($i=0; $i < @mapfile; $i++) {
foreach ($mapfile[$i]) {
SWITCH: {
# commented lines
if ( /\s*#/ ) { last SWITCH;}
# valid entries
if (/\s*(\S+)\s*=\s*(\S+)/ ) {
$SDMAP{lc($1)}=$2;
last SWITCH;
}
# default
last SWITCH;
} # SWITCH
} # foreach
} # for
# verify codebase
( exists $SDMAP{codebase} ) ||
&FatalError( "CODEBASE is not listed in the SD mapping file $sdmap." );
( lc($SDMAP{codebase}) eq lc($CODEBASE) ) ||
&FatalError( "Codebase '$CODEBASE' does not match the SDXROOT '$SDMAP{codebase}' codebase." );
# verify branch
( exists $SDMAP{branch} ) ||
&FatalError( "BRANCH is not listed in the SD mapping file $sdmap." );
( lc($SDMAP{branch}) eq lc($BRANCH) ) ||
&DisplayMsg( $WARNING, "Branch \"$BRANCH\" does not match the SDXROOT \"$SDMAP{branch}\" branch.");
# verify client
( exists $SDMAP{client} ) ||
&FatalError( "CLIENT is not listed in the SD mapping file $sdmap." );
} # LoadSDMap
sub MarkReadOnly {
my $sdmap="$SDXROOT\\sd.map";
&ExecuteCmd( "attrib +h +r $sdmap");
} # MarkReadOnly
sub MarkReadWrite {
my $sdmap="$SDXROOT\\sd.map";
&ExecuteCmd( "attrib +h -r $sdmap");
} # MarkReadWrite
sub AskForOK {
my ( $projname ) = @_;
printf( "\n About to enlist in project $projname.\n" );
printf( " Press Ctrl+C in the next 30s if you wish to exit script.\n");
printf( " ...");
sleep 30;
} # AskForOK
sub ExecuteCmd {
my ($command) = @_;
DisplayMsg( $LOGMSG, $command );
if ( system( "$command" ) ) {
&FatalError( "Command \"$command\" failed." );
}
} # ExecuteCmd
sub ParseCmdLine {
my @arguments = @_;
my @syntax = (
-n => '',
-o => 'b:l:p:fq',
-p => "BRANCH Lang Project FullView Quiet"
);
&GetParameters( \@syntax, \@arguments );
} # ParseCmdLine
sub GetParameters {
my ($syntax, $arguments)=@_;
my $getparams=GetParams->new;
&{$getparams->{-Process}}(@$syntax,@$arguments);
if ( $HELP ) {
&Usage();
}
} # GetParameters
sub VerifyParams {
# Must run this script from a razzle
if ( !$ENV{RazzleToolPath} || !$ENV{SDXROOT} ) {
&DisplayMsg( $ERROR, "Please run this script from a razzle." );
&Usage();
}
if ( $Lang ) {
if( uc($Lang) eq "INTL" ){
$Lang = "";
}
else
{
# Only MAIN is accepted for language projects.
if ( ! &cklang::CkLang($Lang) ) {
&DisplayMsg( $ERROR, "Invalid language $Lang." );
&Usage();
}
if ( lc($BRANCH) ne "main" && lc($BRANCH) ne "locpart" && lc($BRANCH) ne "idx01" && lc($BRANCH) ne "idx02" && lc($BRANCH) ne "idx03" && lc($BRANCH) ne "dnsrv" && lc($BRANCH) ne "dnsrv_dev" && lc($BRANCH) ne "srv03_rtm" ) {
$BRANCH = "main";
&DisplayMsg( $WARNING, "Branch has been reset to \"$BRANCH\". The only valid branches for language projects are \"main\", \"locpart\", \"idx01\", \"idx02\", \"idx03\", \"dnsrv\", \"dnsrv_dev\" and \"srv03_rtm\"." );
}
}
}
if ( $Lang && $Project ) {
&DisplayMsg( $ERROR, "You can specify language or project, but not both." );
&Usage();
}
} # VerifyParams
sub DisplayMsg {
print "$ScriptName : $MSG{@_[0]}@_[1]\n";
} # DisplayMsg
sub FatalError {
my ( $strerr ) = @_;
&DisplayMsg( $ERRMSG, $strerr );
exit 1;
} # FatalError
sub Usage {
print <<USAGE;
perl $0 - Enlist and map the projects pertinent to Whistler international.
Usage:
perl $0 [-b:<branch>] [-l:<language> | -p:<project>] [-q] [-f]
<branch > Source depot branch.
Enlist in the specified branch.
Default is %_BuildBranch%, $BRANCH in this case.
<language> Language.
If not specified, enlist in the source projects.
Acepted values are "intl" and any language
listed in codes.txt.
If "intl", enlist the source projects.
Otherwise, enlist and map the language projects
<language>_res and <language>_bin.
<project> Project name.
If specified, enlist and map the given source project.
<f>ullfview Flag to supply when generating integration/reverse integration
enlistment.
If specified, client view will be build without
exclusionary mapping ('-') lines.
If no project or language is specified, enlist and map the source
projects. Tools\\intlview.map lists the projects and the client
view mappings pertinent to international builds.
q Quiet mode.
Examples:
perl $0 /q
Enlist and map the source projects, default branch.
perl $0 -b:beta1 -l:intl /q
Same as above, beta1 branch.
perl $0 -p:Admin
Enlist and map the admin project, default branch.
perl $0 -l:jpn /q /f
Enlist and map jpn for integration clients.
perl $0 -l:ger /q
Enlist and exclusionary map ger_res, ger_bin, main branch for
ger project build. The client view will contain e.g.
//depot/main/ger_res/... //XXXX/loc/res/ger/...
-//depot/main/ger_res/.../EDBs/... //XXXX/loc/res/ger/.../EDBs/...
USAGE
exit(1);
} # Usage | 22.457661 | 233 | 0.528593 |
ed5dbb6dc2bc482bf2c518ca8164a9b040c446a9 | 568 | t | Perl | t/03-pointer.t | salortiz/NativeHelpers-Blob | b00a4899ce219dae5fe97e9e414d01dd92874f53 | [
"Artistic-2.0"
]
| 7 | 2016-09-20T15:58:23.000Z | 2020-07-01T23:11:47.000Z | t/03-pointer.t | salortiz/NativeHelpers-Blob | b00a4899ce219dae5fe97e9e414d01dd92874f53 | [
"Artistic-2.0"
]
| 7 | 2016-10-17T18:50:01.000Z | 2019-08-25T03:20:45.000Z | t/03-pointer.t | salortiz/NativeHelpers-Blob | b00a4899ce219dae5fe97e9e414d01dd92874f53 | [
"Artistic-2.0"
]
| 5 | 2016-06-04T20:39:25.000Z | 2019-02-26T16:56:40.000Z | use v6;
use Test;
use NativeCall;
plan 10;
use NativeHelpers::Pointer;
my CArray[uint16] $a .= new: 10, 20 ... 100;
my $p = nativecast(Pointer[uint16], $a);
is $p.deref, 10, 'expected 10';
ok (my $np = $p.succ), 'succ works';
isa-ok $np, Pointer[uint16];
is $np - $p, nativesizeof(uint16), 'expected offset';
is $np.deref, 20, 'expected 20';
ok $np++, 'postfix ++';
is $np.deref, 30, 'expected 30';
$np = $p + 3;
is $np.deref, 40, 'expected 40';
ok $p == $np.pred.pred.pred, 'pred works';
dies-ok {
Pointer.new.succ;
}, "void pointer not allowed";
| 15.777778 | 53 | 0.612676 |
ed42359792b9c143430fb021f4957b66e4c98119 | 1,790 | pm | Perl | modules/Bio/EnsEMBL/Variation/Pipeline/Remapping/FinishFilterMapping.pm | at7/ensembl-variation | 3cec964c173b0d676823f4947f35deaf3db7f119 | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Variation/Pipeline/Remapping/FinishFilterMapping.pm | at7/ensembl-variation | 3cec964c173b0d676823f4947f35deaf3db7f119 | [
"Apache-2.0"
]
| null | null | null | modules/Bio/EnsEMBL/Variation/Pipeline/Remapping/FinishFilterMapping.pm | at7/ensembl-variation | 3cec964c173b0d676823f4947f35deaf3db7f119 | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env perl
# Copyright [1999-2016] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<helpdesk.org>.
=cut
package Bio::EnsEMBL::Variation::Pipeline::Remapping::FinishFilterMapping;
use strict;
use warnings;
use FileHandle;
use base ('Bio::EnsEMBL::Hive::Process');
sub fetch_input {
my $self = shift;
}
sub run {
my $self = shift;
my $working_dir = $self->param('pipeline_dir');
my $statistics_dir = "$working_dir/statistics";
my $overall_counts = {};
opendir(DIR, $statistics_dir) or die $!;
while (my $file = readdir(DIR)) {
if ($file =~ m/\.txt$/) {
my $fh = FileHandle->new("$statistics_dir/$file", 'r');
while (<$fh>) {
chomp;
my ($stats, $count) = split/=/;
$overall_counts->{$stats} += $count;
}
$fh->close();
}
}
closedir(DIR);
my $fh = FileHandle->new("$working_dir/overall_counts.txt", 'w');
while (my ($stats, $counts) = each %$overall_counts) {
print $fh "$stats=$counts\n";
}
$fh->close();
}
1;
| 25.942029 | 102 | 0.678212 |
ed5eca2579ab60aa52f39297806825f6d1b109cd | 7,241 | pm | Perl | lib/File/Tmpdir/Functions.pm | tokiclover/browser-home-profile | 6a9305c5542d2ac6220d8e114e2edfa2018cf074 | [
"BSD-2-Clause-FreeBSD"
]
| 1 | 2021-06-24T22:01:38.000Z | 2021-06-24T22:01:38.000Z | lib/File/Tmpdir/Functions.pm | tokiclover/browser-home-profile | 6a9305c5542d2ac6220d8e114e2edfa2018cf074 | [
"BSD-2-Clause-FreeBSD"
]
| null | null | null | lib/File/Tmpdir/Functions.pm | tokiclover/browser-home-profile | 6a9305c5542d2ac6220d8e114e2edfa2018cf074 | [
"BSD-2-Clause-FreeBSD"
]
| null | null | null | #
# $Header: File::Tmpdir Exp $
# $Author: (c) 2016 tokiclover <tokiclover@gmail.com> Exp $
# $License: MIT (or 2-clause/new/simplified BSD) Exp $
# $Version: 1.3 2016/03/18 Exp $
#
package File::Tmpdir::Functions;
use v5.14.0;
use strict;
use warnings;
use Exporter;
our ($VERSION, @ISA, @EXPORT, @EXPORT_OK, %EXPORT_TAGS);
our (%COLOR, @BG, @FG, %PRINT_INFO, $NAME);
$VERSION = "1.3";
($PRINT_INFO{cols}, $PRINT_INFO{len}, $PRINT_INFO{eol}) = (tput('cols', 1), 0, "");
eval_colors(tput('colors', 1));
@ISA = qw(Exporter);
@EXPORT = qw();
@EXPORT_OK = qw(
pr_info pr_warn pr_error pr_begin pr_end pr_die
eval_colors mount_info sigwinch_handler tput yesno
%COLOR @BG @FG $NAME
);
%EXPORT_TAGS = (
print => [qw(pr_info pr_warn pr_error pr_begin pr_end pr_die)],
misc => [qw(eval_colors tput mount_info yesno)],
color => [qw(%COLOR @BG @FG)],
);
=head1 NAME
File::Tmpdir::Functions - Print and miscellaneous functions
=cut
=head1 SYNOPSIS
use File::Tmpdir qw(:color :print :misc);
=cut
=head1 DESCRIPTION
use File::Tmpdir qw(:print);
Some reusable helpers to format print output prepended with C<name:> or C<[name]>
(name refer to global C<$File::Tmpdir::NAME>) with ANSI color (escapes) support.
use File::Tmpdir qw(:color);
C<%COLOR> hold the usual color attributes e.g. C<$COLOR{bold}>, C<$COLOR{reset}>,
C<$COLOR{underline}> etc; and the common 8 {back,fore}ground named colors prefixed
with C<{bg,fg}-> e.g. C<$COLOR{'fg-blue'}>, C<$COLOR{'bg-yellow'}> etc.
C<@FG> and C<@BG> hold the numeral colors, meaning that, C<@FG[0..255]> and
C<@BG[0..255]> are usable after a C<eval_colors(256)> initialization call.
C<@BG[0..7]> and C<@FG[0..7]> being the named colors included in C<%COLOR>.
=cut
=head1 FUNCTIONS
=cut
=head2 pr_error(str)
Print error message to stderr e.g.:
pr_error("Failed to do this");
=cut
sub pr_error {
my ($msg, $pfx) = (join ' ', @_);
$PRINT_INFO{len} = length($msg)+length($NAME)+2;
$pfx = "$COLOR{'fg-magenta'}${NAME}:$COLOR{reset}" if defined($NAME);
print STDERR "$PRINT_INFO{eol}$COLOR{'fg-red'}:ERROR:$COLOR{reset} $pfx $msg\n";
}
=head2 pr_die(err, str)
Print error level message to stderr and exit program like C<die> would do.
pr_die($?, "Failed to launch $command!");
=cut
sub pr_die {
my $ret = shift;
pr_error(@_);
exit($ret);
}
=head2 pr_info(str)
Print info level message to stdout.
pr_info("Running perl $] version.");
=cut
sub pr_info {
my ($msg, $pfx) = (join ' ', @_);
$PRINT_INFO{len} = length($msg)+length($NAME)+2;
$pfx = "$COLOR{'fg-yellow'}${NAME}:$COLOR{reset}" if defined($NAME);
print "$PRINT_INFO{eol}$COLOR{'fg-blue'}INFO:$COLOR{reset} $pfx $msg\n";
}
=head2 pr_warn(str)
Print warning level message to stdout.
pr_warn("Configuration file not found.");
=cut
sub pr_warn {
my ($msg, $pfx) = (join ' ', @_);
$PRINT_INFO{len} = length($msg)+length($NAME)+2;
$pfx = "$COLOR{'fg-red'}${NAME}:$COLOR{reset}" if defined($NAME);
print STDOUT "$PRINT_INFO{eol}$COLOR{'fg-yellow'}WARN:$COLOR{reset} $pfx $msg\n";
}
=head2 pr_begin(str)
Print the beginning of a formated message to stdout.
pr_begin("Mounting device");
=cut
sub pr_begin {
my ($msg, $pfx) = (join ' ', @_);
print $PRINT_INFO{eol} if defined($PRINT_INFO{eol});
$PRINT_INFO{eol} = "\n";
$PRINT_INFO{len} = length($msg)+length($NAME)+2;
$pfx = "${COLOR{'fg-magenta'}}[$COLOR{'fg-blue'}${NAME}$COLOR{'fg-magenta'}]$COLOR{reset}"
if defined($NAME);
printf "%s", "$pfx $msg";
}
=head2 pr_end(err[, str])
Print the end of a formated message to stdout which is just a colored C<[Ok]>
or C<[No]> (if no further arguments are found) after running a commmand.
pr_end($?);
=cut
sub pr_end {
my ($val, $sfx) = (shift);
my $msg = join ' ', @_;
my $len = $PRINT_INFO{cols} - $PRINT_INFO{len};
if ($val == 0) {
$sfx="${COLOR{'fg-blue'}}[$COLOR{'fg-green'}Ok$COLOR{'fg-blue'}]$COLOR{reset}";
} else {
$sfx="${COLOR{'fg-yellow'}}[$COLOR{'fg-red'}No$COLOR{'fg-yellow'}]$COLOR{reset}";
}
printf "%${len}s\n", "$msg $sfx";
($PRINT_INFO{eol}, $PRINT_INFO{len}) = ('', 0);
}
=head2 yesno([01]|{true|false|etc})
A tiny helper to simplify case incensitive yes/no configuration querries.
=cut
sub yesno {
my $val = shift // 0;
if ($val =~ m/0|disable|off|false|no/i) {
return 0;
}
elsif ($val =~ m/1|enable|on|true|yes/i) {
return 1;
}
else { return }
}
=head2 eval_colors(NUMBER)
Set up colors (used for output for the print helper family.) Default to 8 colors,
if no argument passed. Else, valid argument would be 8 or 256.
=cut
sub eval_colors {
my $NUM = shift // 0;
my @bc = ('none', 'bold', 'faint', 'italic', 'underline', 'blink',
'rapid-blink', 'inverse', 'conceal', 'no-italic', 'no-underline',
'no-blink', 'reveal', 'default'
);
my @val = (0..8, 23..25, 28, '39;49');
my ($esc, $bg, $fg, $num, $c) = ("\e[");
%COLOR = map { $bc[$_], "$esc${val[$_]}m" } 0..$#val;
$COLOR{'reset'} = "${esc}0m";
if ($NUM >= 256) {
($bg, $fg, $num) = ('48;5;', '38;5;', $NUM-1);
}
elsif ($NUM == 8) {
($bg, $fg, $num) = (4, 3, $NUM-1);
}
for $c (0..$num) {
$BG[$c] = "$esc$bg${c}m";
$FG[$c] = "$esc$fg${c}m";
}
@bc = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white');
for $c (0..$#bc) {
$COLOR{"bg-$bc[$c]"} = "$esc$bg${c}m";
$COLOR{"fg-$bc[$c]"} = "$esc$fg${c}m";
}
}
=head2 mount_info ([OPT,] DIR|DEV [,FILE])
A tiny helper to simplify probing usage of mounted points or device, swap
device and kernel module.
mount_info('-s', '/dev/zram0'); # whether the specified device is swap
mount_info('-m', 'zram) ; # whether the kernel module is loaded
mount_info('/dev/zram1') ; # whether the specified device is mounted
=cut
sub mount_info {
my ($opt, $file) = shift;
return unless defined($opt);
if ($opt eq "-s") {
$file = "/proc/swaps";
$opt = shift;
}
elsif ($opt eq "-m") {
$file = "/proc/modules";
$opt = shift;
}
else {
$file = "/proc/mounts" unless defined($file);
}
my ($FILE, $ret);
unless (open($FILE, q(<), $file)) {
pr_error("Failed to open $file: $!");
return;
}
while (<$FILE>) {
if (m|$opt\b|) { $ret = 1; last; }
else { $ret = 0 }
}
close($FILE);
return $ret;
}
=head2 sigwinch_handler()
Handle window resize signal to update the colon length of the terminal.
=cut
sub sigwinch_handler {
$PRINT_INFO{cols} = tput('cols', 1);
}
#$SIG{WINCH} = \&sigwinch_handler;
=head2 tput(cap[, 1])
Simple helper to querry C<terminfo(5)> capabilities without a shell (implied by
the `cmd` construct.) Second argument enable integer conversion.
tput('cols', 1); # to get the actual terminal colon length
=cut
sub tput {
my ($cap, $conv) = @_;
return unless defined($cap);
open(my $TPUT, '-|', 'tput', $cap) or die "Failed to launch tput: $!";
chomp(my @val = <$TPUT>);
close($TPUT);
return int($val[0]) if yesno($conv);
return @val;
}
1;
__END__
=head1 AUTHOR
tokiclover <tokiclover@gmail.com>
=cut
=head1 LICENSE
This program is free software; you can redistribute it and/or modify it
under the MIT License or under the same terms as Perl itself.
=cut
#
# vim:fenc=utf-8:ci:pi:sts=2:sw=2:ts=2:
#
| 23.208333 | 91 | 0.619804 |
ed7a66e63ace30920600376ee0310821246597b5 | 342 | t | Perl | test/blackbox-tests/test-cases/github2272/run.t | AltGr/dune | 5cd4eb6f228649218722caa14a6a5630cba991b0 | [
"MIT"
]
| null | null | null | test/blackbox-tests/test-cases/github2272/run.t | AltGr/dune | 5cd4eb6f228649218722caa14a6a5630cba991b0 | [
"MIT"
]
| null | null | null | test/blackbox-tests/test-cases/github2272/run.t | AltGr/dune | 5cd4eb6f228649218722caa14a6a5630cba991b0 | [
"MIT"
]
| null | null | null | The @all alias should only build enabled libraries
$ dune build @all --display short
ocamldep disabled/.foo.objs/foo.ml.d
ocamlc disabled/.foo.objs/byte/foo.{cmi,cmo,cmt}
ocamlc disabled/foo.cma
ocamlopt disabled/.foo.objs/native/foo.{cmx,o}
ocamlopt disabled/foo.{a,cmxa}
ocamlopt disabled/foo.cmxs
| 38 | 56 | 0.687135 |
ed7dc5627e77d76fd3dde864eb560327aa662f71 | 1,149 | pl | Perl | CentOS_Ubuntu20.04/stunnel/cronjob/stunnel.pl | nu11secur1ty/openvpn-server | 7d7d8ce3fe209180871894161f7a519eb22f8c1f | [
"MIT"
]
| 4 | 2020-12-21T06:07:10.000Z | 2022-03-03T14:04:16.000Z | CentOS_Ubuntu20.04/stunnel/cronjob/stunnel.pl | nu11secur1ty/openvpn-server | 7d7d8ce3fe209180871894161f7a519eb22f8c1f | [
"MIT"
]
| null | null | null | CentOS_Ubuntu20.04/stunnel/cronjob/stunnel.pl | nu11secur1ty/openvpn-server | 7d7d8ce3fe209180871894161f7a519eb22f8c1f | [
"MIT"
]
| null | null | null | #!/usr/bin/perl
# @nu11secur1ty, g1d 2021
use strict;
use warnings;
use diagnostics;
no warnings 'numeric';
my $status_not_ok = 0;
my $check_status = `pgrep -f stunnel`;
if ($check_status == 0){
print "No pid found\n";
$status_not_ok = 1;
}else{
print "Status already started\n";
}
# Starting the service
if ($status_not_ok == 1) {
my $starting = `systemctl start stunnel4.service`;
print "The problem is fixed: $check_status\n";
}
# Listening Service:
my $bad_status = "refused";
my $stunnel_already = `systemctl status stunnel4.service | grep -o "already"`;
my $already_L_443_check = $stunnel_already eq $bad_status;
if ($already_L_443_check == 0) {
print "Listening on 443 is OK\n";
}else{
print "Listening on 443 is not runnig\n";
}
# Fix Listeninig
my $g00d_status = "already";
my $stunnel_refused = `systemctl status stunnel4.service | grep -o "refused"`;
my $already_L_443_fix = $stunnel_refused eq $bad_status;
if ($already_L_443_fix == 0){
my $stunnel_reload = `systemctl restart stunnel4.service`;
print "The stunel listening on port 443 is runing\n";
}else{
print "The new listening status is OK\n";
}
exit 0;
| 24.446809 | 78 | 0.709312 |
ed860355c300ee9b0d6c2ef3ae70811a22494d65 | 113 | pl | Perl | examples/prolog/simpsons.pl | scarf-sh/bnfc | 157f9eb2b6d79308049f6cee92fbab3dc2e3a6ef | [
"BSD-3-Clause"
]
| 449 | 2015-01-01T23:46:52.000Z | 2022-03-31T19:09:56.000Z | examples/prolog/simpsons.pl | scarf-sh/bnfc | 157f9eb2b6d79308049f6cee92fbab3dc2e3a6ef | [
"BSD-3-Clause"
]
| 293 | 2015-01-05T17:30:01.000Z | 2022-03-31T19:50:15.000Z | examples/prolog/simpsons.pl | hangingman/bnfc | b9d96e87a6f9051792555b7060cdd5ad2d31ff80 | [
"BSD-3-Clause"
]
| 142 | 2015-01-16T08:09:00.000Z | 2022-03-28T16:12:46.000Z |
child(bart,homer).
child(homer,abe).
child(maggie,homer).
grandchild(X,Y) :-
child(X,Z),
child(Z,Y).
| 10.272727 | 20 | 0.610619 |
ed3ec93aab71db219f4639a1a9bff7dc2ba6675a | 1,392 | t | Perl | S06-advanced/lexical-subs.t | peschwa/roast | da6f66aab60fbf9c9d043a00d23fd7249912a87a | [
"Artistic-2.0"
]
| null | null | null | S06-advanced/lexical-subs.t | peschwa/roast | da6f66aab60fbf9c9d043a00d23fd7249912a87a | [
"Artistic-2.0"
]
| null | null | null | S06-advanced/lexical-subs.t | peschwa/roast | da6f66aab60fbf9c9d043a00d23fd7249912a87a | [
"Artistic-2.0"
]
| 1 | 2019-10-08T10:15:36.000Z | 2019-10-08T10:15:36.000Z | use v6;
use Test;
plan 11;
{
sub f() {
my sub g(){"g"}; my sub h(){g()}; h();
};
is(f(), 'g', 'can indirectly call lexical sub');
eval_dies_ok('g', 'lexical sub not visible outside current scope');
}
{
sub foo($x) { $x + 1 }
sub callit(&foo) {
foo(1);
}
is(foo(1), 2, 'calls subs passed as &foo parameter');
is(callit({ $^x + 2 }), 3, "lexical subs get precedence over package subs");
}
#?rakudo skip 'cannot parse operator names yet'
{
sub infix:<@@> ($x, $y) { $x + $y }
sub foo2(&infix:<@@>) {
2 @@ 3;
}
is(2 @@ 3, 5);
is(foo2({ $^a * $^b }), 6);
}
{
my sub test_this { #OK not used
ok 1, "Could call ok from within a lexical sub";
return 1;
}
EVAL 'test_this()';
if ($!) {
ok 0, "Could call ok from within a lexical sub";
}
}
# used to be http://rt.perl.org/rt3/Ticket/Display.html?id=65498
{
sub a { 'outer' };
{
my sub a { 'inner' };
is a(), 'inner', 'inner lexical hides outer sub of same name';
}
is a(), 'outer', '... but only where it is visisble';
}
{
package TestScope {
sub f { };
}
#?pugs todo
dies_ok { TestScope::f }, 'subs without scoping modifiers are not entered in the namespace';
}
# RT #57788
{
#?pugs todo
eval_dies_ok 'sub a { }; sub a { }';
}
# vim: ft=perl6 :
| 18.810811 | 96 | 0.510057 |
ed72a6b6c52ea83944b9e464dfda64460a69b1cb | 892 | t | Perl | t/002_apptest.t | git-the-cpan/Catalyst-Authentication-Credential-Authen-Simple | 66f9746e1f12e33de09a174a99ff8d7ac75aeda2 | [
"Artistic-1.0"
]
| null | null | null | t/002_apptest.t | git-the-cpan/Catalyst-Authentication-Credential-Authen-Simple | 66f9746e1f12e33de09a174a99ff8d7ac75aeda2 | [
"Artistic-1.0"
]
| null | null | null | t/002_apptest.t | git-the-cpan/Catalyst-Authentication-Credential-Authen-Simple | 66f9746e1f12e33de09a174a99ff8d7ac75aeda2 | [
"Artistic-1.0"
]
| null | null | null | use strict;
use warnings;
use Test::More;
plan tests => 35;
use lib 't/lib';
use Catalyst::Test qw/AuthTestApp2/;
cmp_ok(get("/authed_ok?username=bob&password=uniquepass"), 'eq', 'authed Bob Smith', "bob authed through onlyone");
cmp_ok(get("/authed_ok?username=john&password=uniquepass"), 'eq', 'authed John Smith', "john authed through onlyone");
cmp_ok(get("/authed_ko?username=bob&password=bob") , 'eq', 'not authed', "bob not authed through stub");
cmp_ok(get("/authed_ko?username=john&password=john") , 'eq', 'not authed', "john not authed through stub");
cmp_ok(get("/authed_ko?username=bob&password=xxx") , 'eq', 'not authed', "bob not authed");
cmp_ok(get("/authed_ko?username=john&password=xxx") , 'eq', 'not authed', "john not authed");
cmp_ok(get("/authed_ko?username=notuser&password=uniquepass"), 'eq', 'not authed', "unexistant user not authed");
| 44.6 | 118 | 0.695067 |
ed7ac4ac214d218eadaa7c23a88f279204cd8357 | 196 | pm | Perl | lib/App/SD/Collection/Ticket.pm | jrockway/sd | bd0c479f98638981afe521f24777b2abaf392b0d | [
"MIT"
]
| 2 | 2016-05-09T01:55:30.000Z | 2020-02-07T03:29:15.000Z | lib/App/SD/Collection/Ticket.pm | rafl/sd | 14cab64206be163b52a3cb97244dc170e5123da2 | [
"MIT"
]
| null | null | null | lib/App/SD/Collection/Ticket.pm | rafl/sd | 14cab64206be163b52a3cb97244dc170e5123da2 | [
"MIT"
]
| null | null | null | package App::SD::Collection::Ticket;
use Any::Moose;
extends 'Prophet::Collection';
use constant record_class => 'App::SD::Model::Ticket';
__PACKAGE__->meta->make_immutable;
no Any::Moose;
1;
| 16.333333 | 54 | 0.72449 |
ed8561ce42596eb33e75adabf67111414e262641 | 234 | al | Perl | src/test/resources/definition_o_02.al | janisz/knowledge-representation | 4eb0c93ed3dc29c431c99a7e3c4865dba48a5fea | [
"0BSD"
]
| null | null | null | src/test/resources/definition_o_02.al | janisz/knowledge-representation | 4eb0c93ed3dc29c431c99a7e3c4865dba48a5fea | [
"0BSD"
]
| 19 | 2016-06-23T10:52:34.000Z | 2021-07-27T04:45:36.000Z | src/main/resources/definition_o_02.al | janisz/knowledge-representation | 4eb0c93ed3dc29c431c99a7e3c4865dba48a5fea | [
"0BSD"
]
| 1 | 2017-01-14T00:44:42.000Z | 2017-01-14T00:44:42.000Z | initially a
a triggers (Janek, A)
(Janek, A) causes -a
(Janek, A) invokes (Janek, B)
ScenarioOne {
ACS = {
},
OBS = {
}
}
always performed (Janek, A) at 0 when ScenarioOne
always performed (Janek, B) at 1 when ScenarioOne
| 15.6 | 49 | 0.649573 |
ed52c9becfe802ecb769485d7db3d900b75877fa | 2,869 | t | Perl | S12-class/augment-supersede.t | perl6/roast | 30e8226c1a0562b9364ee9ea2730763374d79a3d | [
"Artistic-2.0"
]
| 99 | 2015-03-03T13:01:44.000Z | 2020-03-05T15:21:43.000Z | S12-class/augment-supersede.t | perl6/roast | 30e8226c1a0562b9364ee9ea2730763374d79a3d | [
"Artistic-2.0"
]
| 331 | 2015-02-17T15:26:22.000Z | 2020-03-16T18:29:49.000Z | S12-class/augment-supersede.t | perl6/roast | 30e8226c1a0562b9364ee9ea2730763374d79a3d | [
"Artistic-2.0"
]
| 136 | 2015-02-02T13:34:10.000Z | 2020-02-18T02:26:59.000Z | use v6;
use Test;
plan 14;
# L<S12/"Open vs Closed Classes"/"Otherwise you'll get a class redefinition error.">
use MONKEY-TYPING;
{
class Foo {
method a {'called Foo.a'}
}
augment class Foo {
method b {'called Foo.b'}
}
my $o = Foo.new;
is($o.a, 'called Foo.a', 'basic method call works');
is($o.b, 'called Foo.b', 'added method call works');
dies-ok { EVAL('augment class NonExistent { }') },
'augment on non-existent class dies';
}
# https://github.com/Raku/old-issue-tracker/issues/1746
{
my class LexFoo { };
augment class LexFoo { method b { 'called LexFoo.b' } };
is LexFoo.b, 'called LexFoo.b', 'can augment lexical class';
}
# https://github.com/Raku/old-issue-tracker/issues/1876
{
augment class Hash {
method foo() { self.keys };
}
is { a => 1 }.foo, 'a', 'can augment Hash';
}
# https://github.com/Raku/old-issue-tracker/issues/1073
throws-like q[
class MethodClash { method foo() { 3 } };
augment class MethodClash { method foo() { 3 } };
], X::Syntax::Augment::WithoutMonkeyTyping, 'cannot override a method by monkey-typing';
# https://github.com/Raku/old-issue-tracker/issues/1950
eval-lives-ok q[
use MONKEY-TYPING;
role Bar { has $.counter; }
class Pub does Bar { has $.saloon; }
augment class Pub { method snug() { } }
], 'augmenting a class which has a role composed works';
#?rakudo skip 'redeclaration of symbol Bar'
{
use MONKEY-TYPING;
class Bar {
method c {'called Bar.c'}
}
supersede class Bar {
method d {'called Bar.d'}
}
my $o = Bar.new;
throws-like '$o.c', Exception, 'overridden method is gone completely';
is($o.d, 'called Bar.d', 'new method is present instead');
}
# https://github.com/Raku/old-issue-tracker/issues/1792
{
lives-ok {
class A { multi method a() { }};
augment class A { multi method a() { } }
}, 'cannot add multis with augment'
}
# https://github.com/Raku/old-issue-tracker/issues/1447
# some integers produces from ranges didn't have
# methods that augment added. Weird.
{
augment class Int {
method prime { True };
}
my $primes = 0;
lives-ok {
for 1..5 {
$primes++ if .prime;
}
}, 'integers produced from ranges have augmented methods';
}
# https://github.com/Raku/old-issue-tracker/issues/3080
{
try EVAL 'class F { also is F; }';
ok ~$! ~~ / 'cannot inherit from itself' /, "used to crash rakudo";
}
# https://github.com/Raku/old-issue-tracker/issues/3081
{
try EVAL 'class ::F { ... }; class F is ::F';
ok ~$! ~~ / 'cannot inherit from itself' /, "used to crash rakudo";
}
eval-lives-ok 'class A { class B {} }; use MONKEY; augment class A { augment class B { } }',
'Augmenting a nested package lives';
# vim: expandtab shiftwidth=4
| 25.616071 | 92 | 0.612409 |
Subsets and Splits