forked from ledgersmb/LedgerSMB
-
Notifications
You must be signed in to change notification settings - Fork 0
/
16-prechecks.t
266 lines (210 loc) · 8.17 KB
/
16-prechecks.t
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
#!perl
=head1 Driver application for testing of schema change checks
This application reads files (*.precheck) and executes the test scenarios
defined in them by setting up a mock environment: The schema change checks
I<think> they are being executed as a regular check, but everything from
the database connection to the user-input is mocked.
=head2 Definition of test scenarios in *.precheck files
Each precheck test definition file is a Perl source file which holds
exactly one hash. Note that due to the fact that it's a Perl source file
comments can be added just as in regular Perl files.
The names of the prechek files below t/16-precheck/ use the same paths
as the schema change check files below sql/changes/ that they are tests
for. E.g. the test file t/16-precheck/1.5/abstract_tables.precheck defines
tests for checks defined in sql/changes/1.5/abstract_tables.sql.checks.pl.
The keys of the hash correspond with the titles of the checks in the file
to be tested. The values associated with the keys are arrays of hashes.
Each hash in the array defines a test case for the specific check to be
tested. These keys are supported:
=over
=item failure_data
Defines a L<DBD::Mock> resultset (rows failing the check) for the
query defined by the check.
=item failure_session
A list of L<DBD::Mock::Session states|
https://metacpan.org/pod/DBD::Mock#DBD::Mock::Session> to be used
I<after> the initial state with the failing query. These could
be neccessary/desirable for e.g. queries issued
as part of the C<dropdown_sql> DSL keyword.
In general, failure session statements are prepended to the submit session,
because the failure procedure is being run on submit (with an empty failure
row set) - to discover the grids and other controls that are used in the
response.
However, in case statements are driven by the failure data itself, these
statements will only be in the true failure session and should not be
copied to the submit session (because the failure rowset is empty).
Marking statements C<< failure_data_based => 1 >> results in the statements
being filtered out of the submit session data.
=item submit_session
A list of L<DBD::Mock::Session states|
https://metacpan.org/pod/DBD::Mock#DBD::Mock::Session> to be used to validate
the correct submission of corrected data back to the database.
=item response
A hash object used to generate the response data as documented in
L<LedgerSMB::Database::SchemaChecks::JSON>.
Note that a JSON formatted data structure is printed as part of the error
message when the response is missing to help creation of one.
=back
=cut
use Test2::V0;
use Data::Dumper;
use DBI;
use DBD::Mock::Session;
use File::Find::Rule;
use File::Temp;
use JSON::MaybeXS;
use LedgerSMB::Database::ChangeChecks qw( run_checks load_checks );
use LedgerSMB::Database::SchemaChecks::JSON qw( json_formatter_context );
use List::Util qw( first );
my @schemacheck_tests = File::Find::Rule->new
->name('*.precheck')->in('t/16-prechecks');
my @schemachecks = File::Find::Rule->new
->name('*.checks.pl')->in('sql/changes');
is(scalar(@schemachecks), scalar(@schemacheck_tests),
'All schema checks are tested');
sub _slurp {
my ($fn) = @_;
open my $fh, '<:encoding(UTF-8)', $fn
or die "Failed to open generated response file '$fn': $!";
local $/ = undef;
my $content = <$fh>;
close $fh
or warn "Failed to close generated response file '$fn': $!";
return $content;
}
sub _schemacheck_file {
my ($schemacheck_test) = @_;
my $schemacheck = $schemacheck_test;
$schemacheck =~ s!^t/16-prechecks/!sql/changes/!
or die "Can't map '$schemacheck_test' to schema change check file";
$schemacheck =~ s!.precheck$!.sql.checks.pl!
or die "Can't map '$schemacheck_test' to schema change check file";
if (! -f $schemacheck) {
die "Schema change check file ($schemacheck) associated"
. " with $schemacheck_test doesn't exist";
}
return $schemacheck;
}
sub _create_dbh_for_failure_session {
my ($check, $test) = @_;
my $dbh = DBI->connect('dbi:Mock:', '', '', { PrintError => 0 });
$test->{failure_session} //= [];
my $session = DBD::Mock::Session->new(
'initial/failure',
{
statement => $check->{query},
results => $test->{failure_data},
},
map { my %c = %$_; delete $c{failure_data_based};
\%c } @{$test->{failure_session} // []},
);
$dbh->{mock_session} = $session;
return $dbh;
}
sub _create_dbh_for_submit_session {
my ($check, $test) = @_;
my $dbh = DBI->connect('dbi:Mock:', '', '', { RaiseError => 1 });
$test->{submit_session} //= [];
my @set = (
{
statement => $check->{query},
results => $test->{failure_data},
},
(grep { not $_->{failure_data_based} } @{$test->{failure_session}}),
@{$test->{submit_session}},
# Check returns no further failing rows:
{
statement => $check->{query},
results => [],
},
);
#print STDERR Dumper(\@set);
my $session = DBD::Mock::Session->new(
'submit',
@set,
);
$dbh->{mock_session} = $session;
return $dbh;
}
my $json = JSON::MaybeXS->new( utf8 => 1 );
sub _save_JSON_response_file {
my ($check, $response, $dir) = @_;
my $fn = LedgerSMB::Database::SchemaChecks::JSON::_response_filename(
$dir->dirname, $check
);
open my $fh, '>:encoding(UTF-8)', $fn
or die "Unable to create JSON response file '$fn': $!";
print $fh $json->encode({ response => $response })
or die "Unable to generate JSON response file '$fn': $!";
close $fh
or warn "Unable to close JSON response file '$fn': $!";
}
sub _run_schemacheck_test {
my ($check, $test) = @_;
my $dir = File::Temp->newdir;
my $out;
my $dbh;
ok lives {
# Most checks here aren't immediately visible:
# the database session checks that the correct queries
# and expected responses are being generated. When not,
# an error is thrown, which we handle by using 'lives_ok'
$dbh = _create_dbh_for_failure_session($check, $test);
$out = json_formatter_context {
return ! run_checks($dbh, checks => [ $check ]);
} $dir->dirname;
ok(defined($out), 'JSON failure output was generated');
ok(-f $out, 'JSON failure output exists');
$dbh->disconnect;
} or diag $@ . Dumper($dbh->{mock_all_history});
if ($test->{response}) {
ok lives {
$dbh = _create_dbh_for_submit_session($check, $test);
_save_JSON_response_file($check, $test->{response}, $dir);
$out = json_formatter_context {
return ! run_checks($dbh, checks => [ $check ]);
} $dir->dirname;
$dbh->disconnect;
ok(! defined($out), 'No new failures occurred');
} or diag $@ . Dumper($dbh->{mock_all_history});
}
elsif (ref $check->{on_submit}) {
fail 'Response defined; use failure output below to define a response';
diag _slurp($out);
}
else {
note "no response: $check->{title}\n\n";
}
}
sub _run_schemacheck_tests {
my ($check, $tests) = @_;
_run_schemacheck_test($check, $_) for @$tests;
}
sub _run_schemachecks_tests {
my ($schemacheck_test) = @_;
my $schemacheck_file = _schemacheck_file($schemacheck_test);
subtest $schemacheck_file => sub {
my @checks = load_checks($schemacheck_file);
my $tests = eval _slurp($schemacheck_test);
die "Unable to load schema checks from file $schemacheck_test: $@"
if defined $@ and not defined $tests;
for my $test (keys %$tests) {
my $check = first { $_->{title} eq $test } @checks;
ok( defined($check),
"Found check for which tests ($test) have been"
. " defined in $schemacheck_file");
if ($check) {
_run_schemacheck_tests($check, $tests->{$test});
}
}
}
}
if (@schemacheck_tests) {
eval {
_run_schemachecks_tests($_) for @schemacheck_tests;
};
done_testing;
}
else {
plan skip_all => "No test definition files found";
}