diff --git a/author_tools/regen_schema.pl b/author_tools/regen_schema.pl index 2a5a60e6e..10cc76dc3 100644 --- a/author_tools/regen_schema.pl +++ b/author_tools/regen_schema.pl @@ -86,14 +86,27 @@ skip_load_external => 1, + moniker_map => sub { + my ($table, $name) = @_; + + return "JobTry" if $name eq 'JobTries'; + return $name; + }, + rel_name_map => sub { my ($info) = @_; - return "facets" if $info->{name} eq 'facet'; - return "orphans" if $info->{name} eq 'orphan'; - return "reports" if $info->{name} eq 'reportings'; - return "renderings" if $info->{name} eq 'renders'; - return "children" if $info->{name} eq 'events' && $info->{local_class} eq 'App::Yath::Schema::Result::Event'; - return "parent_event" if $info->{name} eq 'parent'; + + return 'children' + if $info->{remote_columns} + && $info->{local_columns} + && $info->{name} eq 'events' + && $info->{local_class} eq 'App::Yath::Schema::Result::Event' + && $info->{remote_class} eq 'App::Yath::Schema::Result::Event' + && $info->{remote_columns}->[0] eq 'parent_event' + && $info->{local_columns}->[0] eq 'event_id'; + + return "coverage" if $info->{name} eq 'coverages'; + return "reports" if $info->{name} eq 'reportings'; return $info->{name}; }, @@ -106,7 +119,7 @@ "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", + #"Tree::AdjacencyList", "UUIDColumns", ], }, diff --git a/dist.ini b/dist.ini index a9dae9a93..0cd72373e 100644 --- a/dist.ini +++ b/dist.ini @@ -77,7 +77,6 @@ DBI = 0 DBIx::Class::UUIDColumns = 0 DBIx::QuickDB = 0.000020 Data::Dumper = 0 -Data::UUID = 0 DateTime = 0 DateTime::Format::MySQL = 0 DateTime::Format::Pg = 0 @@ -200,6 +199,10 @@ System::Info = 0.064 Cpanel::JSON::XS = 0 DBD::Pg = 0 DBD::mysql = 0 +UUID = 0.35 +Data::UUID = 1.227 +UUID::Tiny = 1.04 +Data::UUID::MT = 1.001 DBIx::Class::Storage::DBI::MariaDB = 0 DBIx::Class::Storage::DBI::mysql::Retryable = 0 diff --git a/lib/App/Yath/Command/db.pm b/lib/App/Yath/Command/db.pm index 05ae1465f..7c8009035 100644 --- a/lib/App/Yath/Command/db.pm +++ b/lib/App/Yath/Command/db.pm @@ -90,7 +90,7 @@ use feature 'state'; use App::Yath::Schema::Util qw/schema_config_from_settings/; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use Test2::Harness::Util qw/clean_path/; @@ -252,27 +252,26 @@ sub load_file { $project //= "oops"; unless ($projects{$project}) { - my $p = $config->schema->resultset('Project')->find_or_create({name => $project, project_idx => gen_uuid()}); + my $p = $config->schema->resultset('Project')->find_or_create({name => $project}); $projects{$project} = $p; } my $logfile = $config->schema->resultset('LogFile')->create({ - log_file_idx => gen_uuid(), name => $file, local_file => $file =~ m{^/} ? $file : "./demo/$file", }); - state $user = $config->schema->resultset('User')->find_or_create({username => 'root', password => 'root', realname => 'root', user_idx => gen_uuid()}); + state $user = $config->schema->resultset('User')->find_or_create({username => 'root', password => 'root', realname => 'root'}); my $run = $config->schema->resultset('Run')->create({ run_id => gen_uuid(), - user_idx => $user->user_idx, + user_id => $user->user_id, mode => 'complete', buffer => 'job', status => 'pending', - project_idx => $projects{$project}->project_idx, + project_id => $projects{$project}->project_id, - log_file_idx => $logfile->log_file_idx, + log_file_id => $logfile->log_file_id, }); return $run; @@ -400,7 +399,7 @@ use App::Yath::Server::Config; use App::Yath::Schema::Importer; use App::Yath::Server; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use DBIx::QuickDB; use Plack::Builder; diff --git a/lib/App/Yath/Command/db/publish.pm b/lib/App/Yath/Command/db/publish.pm index e35ca58ce..0f35aa6e3 100644 --- a/lib/App/Yath/Command/db/publish.pm +++ b/lib/App/Yath/Command/db/publish.pm @@ -4,10 +4,12 @@ use warnings; our $VERSION = '2.000000'; +use Time::HiRes qw/time/; + use IO::Uncompress::Bunzip2 qw($Bunzip2Error); use IO::Uncompress::Gunzip qw($GunzipError); -use App::Yath::Schema::Util qw/schema_config_from_settings/; +use App::Yath::Schema::Util qw/schema_config_from_settings format_duration/; use Test2::Harness::Util::JSON qw/decode_json/; use App::Yath::Schema::RunProcessor; @@ -72,9 +74,16 @@ sub run { $project =~ s/^\s+//g; $project =~ s/\s+$//g; + my $start = time; + my $cb = App::Yath::Schema::RunProcessor->process_lines($settings, project => $project); - my $run = eval { $cb->(scalar(<$fh>)) } or return $self->fail($@); + my $run; + eval { + my $ln = <$fh>; + $run = $cb->($ln); + 1 + } or return $self->fail($@); $SIG{INT} = sub { print STDERR "\nCought SIGINT...\n"; @@ -104,12 +113,17 @@ sub run { $cb->(); - print "Published Run (status: " . $run->status . ")\n"; + my $end = time; + + my $dur = format_duration($end - $start); + + print "Published Run. [Status: " . $run->status . ", Duration: $dur]\n"; return 0; } sub fail { + print STDERR "FAIL!\n\n"; my $self = shift; my ($err, $run) = @_; diff --git a/lib/App/Yath/Command/server.pm b/lib/App/Yath/Command/server.pm index 73f088508..18932185b 100644 --- a/lib/App/Yath/Command/server.pm +++ b/lib/App/Yath/Command/server.pm @@ -7,7 +7,7 @@ use feature 'state'; use App::Yath::Server; use App::Yath::Schema::Util qw/schema_config_from_settings/; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use App::Yath::Schema::ImportModes qw/is_mode/; use Test2::Harness::Util qw/clean_path/; @@ -174,14 +174,13 @@ sub load_file { state $user = $config->schema->resultset('User')->find_or_create({username => 'root', password => 'root', realname => 'root'}); my $run = $config->schema->resultset('Run')->create({ - run_id => gen_uuid(), - user_idx => $user->user_idx, + user_id => $user->user_id, mode => $mode, buffer => 'job', status => 'pending', - project_idx => $projects{$project}->project_idx, + project_id => $projects{$project}->project_id, - log_file_idx => $logfile->log_file_idx, + log_file_id => $logfile->log_file_id, }); return $run; @@ -378,7 +377,7 @@ use App::Yath::Server::Config; use App::Yath::Schema::Importer; use App::Yath::Server; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use DBIx::QuickDB; use Plack::Builder; @@ -457,18 +456,17 @@ sub run { single_run => 1, ); - my $user = $config->schema->resultset('User')->create({username => 'root', password => 'root', realname => 'root', user_idx => gen_uuid()}); - my $proj = $config->schema->resultset('Project')->create({name => 'default', project_idx => gen_uuid()}); + my $user = $config->schema->resultset('User')->create({username => 'root', password => 'root', realname => 'root'}); + my $proj = $config->schema->resultset('Project')->create({name => 'default'}); $config->schema->resultset('Run')->create({ run_id => gen_uuid(), - user_idx => $user->user_idx, + user_id => $user->user_id, mode => 'complete', status => 'pending', - project_idx => $proj->project_idx, + project_id => $proj->project_id, log_file => { - log_file_idx => gen_uuid(), name => $self->{+LOG_FILE}, local_file => $self->{+LOG_FILE}, }, diff --git a/lib/App/Yath/Options/Publish.pm b/lib/App/Yath/Options/Publish.pm index 8db5c826d..d87aea3f2 100644 --- a/lib/App/Yath/Options/Publish.pm +++ b/lib/App/Yath/Options/Publish.pm @@ -25,11 +25,11 @@ option_group {group => 'publish', prefix => 'publish', category => "Publish Opti description => 'When buffering DB writes, force a flush when an event is recieved at least N seconds after the last flush.', ); - option buffering => ( + option buffer_size => ( type => 'Scalar', - long_examples => [ ' none', ' job', ' diag', ' run' ], - description => 'Type of buffering to use, if "none" then events are written to the db one at a time, which is SLOW', - default => 'diag', + long_examples => [ ' 100' ], + description => 'Maximum number of events, coverage, or reporting items to buffer before flushing them (each has its own buffer of this size, and each job has its own event buffer of this size)', + default => 100, ); option retry => ( diff --git a/lib/App/Yath/Plugin/DB.pm b/lib/App/Yath/Plugin/DB.pm index cd2bbba50..f0a52bec6 100644 --- a/lib/App/Yath/Plugin/DB.pm +++ b/lib/App/Yath/Plugin/DB.pm @@ -5,7 +5,8 @@ use warnings; our $VERSION = '2.000000'; use App::Yath::Schema::Util qw/schema_config_from_settings/; -use Test2::Harness::Util qw/mod2file looks_like_uuid/; +use Test2::Harness::Util qw/mod2file/; +use Test2::Harness::Util::UUID qw/looks_like_uuid/; use Getopt::Yath; use parent 'App::Yath::Plugin'; diff --git a/lib/App/Yath/Plugin/Git.pm b/lib/App/Yath/Plugin/Git.pm index 8fa2b6c09..83f9a4e6f 100644 --- a/lib/App/Yath/Plugin/Git.pm +++ b/lib/App/Yath/Plugin/Git.pm @@ -75,11 +75,8 @@ sub run_fields { if ($branch) { $data{branch} = $branch; - - my $short = length($branch) > 20 ? substr($branch, 0, 20) : $branch; - - $field->{details} = $short; - $field->{raw} = $branch; + $field->{details} = $branch; + $field->{raw} = $long_sha; } else { $short_sha ||= substr($long_sha, 0, 16); diff --git a/lib/App/Yath/Renderer/DB.pm b/lib/App/Yath/Renderer/DB.pm index 275bfcafa..f16f65648 100644 --- a/lib/App/Yath/Renderer/DB.pm +++ b/lib/App/Yath/Renderer/DB.pm @@ -16,7 +16,7 @@ use Time::HiRes qw/time/; use Test2::Harness::Util qw/clean_path/; use Test2::Harness::IPC::Util qw/start_process/; use Test2::Harness::Util::JSON qw/encode_ascii_json/; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use parent 'App::Yath::Renderer'; use Test2::Harness::Util::HashBase qw{ diff --git a/lib/App/Yath/Renderer/Server.pm b/lib/App/Yath/Renderer/Server.pm index f49f0e9b6..db8dd6f97 100644 --- a/lib/App/Yath/Renderer/Server.pm +++ b/lib/App/Yath/Renderer/Server.pm @@ -11,7 +11,7 @@ use App::Yath::Schema::RunProcessor; use Test2::Util qw/pkg_to_file/; use Test2::Harness::Util qw/mod2file/; use App::Yath::Server::Util qw/share_dir share_file dbd_driver qdb_driver/; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use DBIx::QuickDB; use Plack::Builder; diff --git a/lib/App/Yath/Schema.pm b/lib/App/Yath/Schema.pm index 9273fee79..5726486b6 100644 --- a/lib/App/Yath/Schema.pm +++ b/lib/App/Yath/Schema.pm @@ -4,10 +4,6 @@ use strict; use warnings; use Carp qw/confess/; -use App::Yath::Schema::UUID qw/uuid_inflate/; - -use Test2::Harness::Util::UUID qw/gen_uuid/; - our $VERSION = '2.000000'; use base 'DBIx::Class::Schema'; @@ -42,42 +38,37 @@ sub vague_run_search { my ($project, $run, $user); my $query = $params{query} // {status => 'complete'}; - my $attrs = $params{attrs} // {order_by => {'-desc' => 'run_idx'}, rows => 1}; + my $attrs = $params{attrs} // {order_by => {'-desc' => 'run_id'}, rows => 1}; $attrs->{offset} = $params{idx} if $params{idx}; if (my $username = $params{username}) { $user = $self->resultset('User')->find({username => $username}) || die "Invalid Username ($username)"; - $query->{user_idx} = $user->user_idx; + $query->{user_id} = $user->user_id; } if (my $project_name = $params{project_name}) { $project = $self->resultset('Project')->find({name => $project_name}) || die "Invalid Project ($project)"; - $query->{project_idx} = $project->project_idx; + $query->{project_id} = $project->project_id; } if (my $source = $params{source}) { - my $uuid = uuid_inflate($source); - - if ($uuid) { - $run = $self->resultset('Run')->find({%$query, run_id => $uuid}, $attrs); - return $run if $run; - } + my $run = $self->resultset('Run')->find_by_id_or_uuid($source, $query, $attrs); + return $run if $run; if (my $p = $self->resultset('Project')->find({name => $source})) { die "Project mismatch ($source)" - if $project && $project->project_idx ne $p->project_idx; + if $project && $project->project_id ne $p->project_id; - $query->{project_idx} = $p->project_idx; + $query->{project_id} = $p->project_id; } elsif (my $u = $self->resultset('User')->find({username => $source})) { die "User mismatch ($source)" - if $user && $user->user_idx ne $u->user_idx; + if $user && $user->user_id ne $u->user_id; - $query->{user_idx} = $u->user_idx; + $query->{user_id} = $u->user_id; } else { - die "No UUID match in runs ($uuid)" if $uuid; die "No match for source ($source)"; } } diff --git a/lib/App/Yath/Schema/Dumper.pm b/lib/App/Yath/Schema/Dumper.pm index 6147d46db..5676d3ffb 100644 --- a/lib/App/Yath/Schema/Dumper.pm +++ b/lib/App/Yath/Schema/Dumper.pm @@ -6,7 +6,7 @@ our $VERSION = '2.000000'; use App::Yath::Schema::Util qw/format_duration/; use Test2::Harness::Util::JSON qw/encode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use Time::HiRes qw/time/; use Parallel::Runner; use IO::Compress::Bzip2; @@ -95,7 +95,7 @@ sub dump { || ($spec->{data_type} eq 'binary' && $spec->{size} == 16) || ($spec->{data_type} eq 'char' && $spec->{size} == 36); - $data{$col} = defined($data{$col}) ? uuid_inflate($data{$col})->string : undef; + $data{$col} = defined($data{$col}) ? $data{$col} : undef; } print $fh encode_json(\%data), "\n"; diff --git a/lib/App/Yath/Schema/ImportModes.pm b/lib/App/Yath/Schema/ImportModes.pm index 7d1ddcfe3..d0de3597c 100644 --- a/lib/App/Yath/Schema/ImportModes.pm +++ b/lib/App/Yath/Schema/ImportModes.pm @@ -68,12 +68,13 @@ sub record_all_events { my $mode = _get_mode(%params); - my $job = $params{job}; + my $try = $params{try}; + my $job = $params{job} // $try ? $try->job : undef; my $fail = $params{fail}; my $is_harness_out = $params{is_harness_out}; - croak "must specify either 'job' or 'fail' and 'is_harness_out'" - unless $job || (defined($fail) && defined($is_harness_out)); + croak "must specify either 'try' or 'fail' and 'is_harness_out'" + unless $try || (defined($fail) && defined($is_harness_out)); # Always true in complete mode return 1 if $mode >= $MODES{complete}; @@ -86,7 +87,7 @@ sub record_all_events { return 1 if $is_harness_out; # QVF and QVFD are all events when failing - $fail //= $job->fail; + $fail //= $try->fail; return 1 if $fail && $mode >= $MODES{qvf}; return 0; diff --git a/lib/App/Yath/Schema/Importer.pm b/lib/App/Yath/Schema/Importer.pm index 69ef38638..c63b7a372 100644 --- a/lib/App/Yath/Schema/Importer.pm +++ b/lib/App/Yath/Schema/Importer.pm @@ -12,7 +12,7 @@ use App::Yath::Schema::RunProcessor; use Test2::Harness::Util::HashBase qw/-config -worker_id/; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use Test2::Harness::Util::JSON qw/decode_json/; use IO::Uncompress::Bunzip2 qw($Bunzip2Error); @@ -51,12 +51,12 @@ sub run { while (!defined($max) || $max--) { $schema->resultset('Run')->search( - {status => 'pending', log_file_idx => {'!=' => undef}}, + {status => 'pending', log_file_id => {'!=' => undef}}, {order_by => {-asc => 'added'}, rows => 1}, - )->update({status => 'running', worker_id => $worker_id->{string}}); + )->update({status => 'running', worker_id => $worker_id}); my $run = $schema->resultset('Run')->search( - {status => 'running', worker_id => $worker_id->{string}}, + {status => 'running', worker_id => $worker_id}, {order_by => {-asc => 'added'}, rows => 1}, )->first; diff --git a/lib/App/Yath/Schema/Loader.pm b/lib/App/Yath/Schema/Loader.pm index 3f7c7ecec..add7bf9f3 100644 --- a/lib/App/Yath/Schema/Loader.pm +++ b/lib/App/Yath/Schema/Loader.pm @@ -6,7 +6,7 @@ our $VERSION = '2.000000'; use App::Yath::Schema::Util qw/format_duration/; use Test2::Harness::Util::JSON qw/decode_json/; -use App::Yath::Schema::UUID qw/uuid_deflate/; + use Time::HiRes qw/time/; use Parallel::Runner; use IO::Uncompress::Bunzip2; @@ -74,17 +74,6 @@ sub load { my $row = decode_json($line); - for my $col (keys %$cols_info) { - my $spec = $cols_info->{$col}; - next if $col eq 'trace_id'; - next - unless ($spec->{data_type} eq 'uuid') - || ($spec->{data_type} eq 'binary' && $spec->{size} == 16) - || ($spec->{data_type} eq 'char' && $spec->{size} == 36); - - $row->{$col} = uuid_deflate($row->{$col}); - } - my $ok = eval { $rs->create($row); 1 }; my $err = $@; next if $ok; diff --git a/lib/App/Yath/Schema/Overlay/Event.pm b/lib/App/Yath/Schema/Overlay/Event.pm index 326a500de..e4b82207a 100644 --- a/lib/App/Yath/Schema/Overlay/Event.pm +++ b/lib/App/Yath/Schema/Overlay/Event.pm @@ -14,13 +14,16 @@ use Carp qw/confess/; confess "You must first load a App::Yath::Schema::NAME module" unless $App::Yath::Schema::LOADED; -__PACKAGE__->parent_column('parent_id'); +__PACKAGE__->inflate_column( + rendered => { + inflate => DBIx::Class::InflateColumn::Serializer::JSON->get_unfreezer('rendered', {}), + deflate => DBIx::Class::InflateColumn::Serializer::JSON->get_freezer('rendered', {}), + }, +); sub run { shift->job->run } sub user { shift->job->run->user } -sub facets { shift->facet } - sub in_mode { my $self = shift; return App::Yath::Schema::ImportModes::event_in_mode(event => $self, @_); @@ -53,7 +56,7 @@ sub line_data { my $is_parent = $cols{is_subtest} ? 1 : 0; my $causes_fail = $cols{causes_fail} ? 1 : 0; - $out{lines} = [map { [$_->facet, $_->real_tag, $_->message, $_->data] } $self->renders]; + $out{lines} = $self->rendered // []; if ($has_binary) { for my $binary ($self->binaries) { @@ -63,7 +66,7 @@ sub line_data { 'binary', $binary->is_image ? 'IMAGE' : 'BINARY', $filename, - $binary->binary_idx, + $binary->binary_id, ]; } } @@ -81,25 +84,6 @@ sub line_data { return \%out; } -__PACKAGE__->has_many( - "events", - "App::Yath::Schema::Result::Event", - {"foreign.parent_id" => "self.event_id"}, - {cascade_copy => 0, cascade_delete => 0}, -); - -__PACKAGE__->belongs_to( - "parent_rel", - "App::Yath::Schema::Result::Event", - {event_id => "parent_id"}, - { - is_deferrable => 0, - join_type => "LEFT", - on_delete => "NO ACTION", - on_update => "NO ACTION", - }, -); - 1; __END__ diff --git a/lib/App/Yath/Schema/Overlay/Facet.pm b/lib/App/Yath/Schema/Overlay/Facet.pm deleted file mode 100644 index e230218de..000000000 --- a/lib/App/Yath/Schema/Overlay/Facet.pm +++ /dev/null @@ -1,68 +0,0 @@ -package App::Yath::Schema::Overlay::Facet; -our $VERSION = '2.000000'; - -package - App::Yath::Schema::Result::Facet; -use utf8; -use strict; -use warnings; - -use Carp qw/confess/; -confess "You must first load a App::Yath::Schema::NAME module" - unless $App::Yath::Schema::LOADED; - -__PACKAGE__->inflate_column( - data => { - inflate => DBIx::Class::InflateColumn::Serializer::JSON->get_unfreezer('data', {}), - deflate => DBIx::Class::InflateColumn::Serializer::JSON->get_freezer('data', {}), - }, -); - - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::Result::Facet - Overlay for Facet result class. - -=head1 DESCRIPTION - -This is where custom (not autogenerated) code for the Facet result class lives. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/Overlay/Job.pm b/lib/App/Yath/Schema/Overlay/Job.pm index 79b872ff7..6aa55cfdb 100644 --- a/lib/App/Yath/Schema/Overlay/Job.pm +++ b/lib/App/Yath/Schema/Overlay/Job.pm @@ -7,8 +7,6 @@ use utf8; use strict; use warnings; -use App::Yath::Schema::ImportModes qw/record_all_events mode_check/; - use Carp qw/confess/; confess "You must first load a App::Yath::Schema::NAME module" unless $App::Yath::Schema::LOADED; @@ -42,44 +40,44 @@ sub short_file { } my %COMPLETE_STATUS = (complete => 1, failed => 1, canceled => 1, broken => 1); -sub complete { return $COMPLETE_STATUS{$_[0]->status} // 0 } +#sub complete { return $COMPLETE_STATUS{$_[0]->status} // 0 } +sub complete { 0 } sub sig { my $self = shift; - my $job_parameter = $self->job_parameter; - - return join ";" => ( - (map {$self->$_ // ''} qw/status pass_count fail_count name file fail/), - $job_parameter ? length($job_parameter->parameters) : (''), - ($self->job_fields->count), - ); + return "FIXME"; +# return join ";" => ( +# (map {$self->$_ // ''} qw/name file fail/), +# $parameters ? length($parameters) : (''), +# ($self->job_fields->count), +# ); } -sub short_job_fields { - my $self = shift; - my %params = @_; - - my @fields = $params{prefetched_fields} ? $self->job_fields : $self->job_fields->search( - undef, { - remove_columns => ['data'], - '+select' => ['data IS NOT NULL AS has_data'], - '+as' => ['has_data'], - } - )->all; - - my @out; - for my $jf (@fields) { - my $fields = {$jf->get_all_fields}; - - my $has_data = delete $fields->{data}; - $fields->{has_data} //= $has_data ? \'1' : \'0'; - - push @out => $fields; - } - - return \@out; -} +#sub short_job_fields { +# my $self = shift; +# my %params = @_; +# +# my @fields = $params{prefetched_fields} ? $self->job_fields : $self->job_fields->search( +# undef, { +# remove_columns => ['data'], +# '+select' => ['data IS NOT NULL AS has_data'], +# '+as' => ['has_data'], +# } +# )->all; +# +# my @out; +# for my $jf (@fields) { +# my $fields = {$jf->get_all_fields}; +# +# my $has_data = delete $fields->{data}; +# $fields->{has_data} //= $has_data ? \'1' : \'0'; +# +# push @out => $fields; +# } +# +# return \@out; +#} sub TO_JSON { my $self = shift; @@ -88,10 +86,8 @@ sub TO_JSON { $cols{short_file} = $self->short_file; $cols{shortest_file} = $self->shortest_file; - # Inflate - $cols{parameters} = $self->job_parameter->parameters; - - $cols{fields} = $self->short_job_fields(prefetched => $cols{prefetched_fields}); + # FIXME? +# $cols{fields} = $self->short_job_fields(prefetched => $cols{prefetched_fields}); return \%cols; } @@ -100,6 +96,11 @@ my @GLANCE_FIELDS = qw{ exit_code fail fail_count job_key job_try retry name pas sub glance_data { my $self = shift; + my %params = @_; + + # FIXME: Handle this + my $try_id = $params{try_id}; + my %cols = $self->get_all_fields; my %data; @@ -109,54 +110,11 @@ sub glance_data { $data{short_file} = $self->short_file; $data{shortest_file} = $self->shortest_file; - $data{fields} = $self->short_job_fields(prefetched => $cols{prefetched_fields}); +# $data{fields} = $self->short_job_fields(prefetched => $cols{prefetched_fields}); return \%data; } -sub normalize_to_mode { - my $self = shift; - my %params = @_; - - my $mode = $params{mode} // $self->run->mode; - - # No need to purge anything - return if record_all_events(mode => $mode, job => $self); - return if mode_check($mode, 'complete'); - - if (mode_check($mode, 'summary', 'qvf')) { - my $has_binary = $self->events->search({has_binary => 1}); - while (my $e = $has_binary->next()) { - $has_binary->binaries->delete; - $e->delete; - } - - $self->events->delete; - return; - } - - my $query = { - is_diag => 0, - is_harness => 0, - is_time => 0, - }; - - if (mode_check($mode, 'qvfds')) { - $query->{'-not'} = {is_subtest => 1, nested => 0}; - } - elsif(!mode_check($mode, 'qvfd')) { - die "Unknown mode '$mode'"; - } - - my $has_binary = $self->events->search({%$query, has_binary => 1}); - while (my $e = $has_binary->next()) { - $has_binary->binaries->delete; - $e->delete; - } - - $self->events->search($query)->delete(); -} - 1; __END__ diff --git a/lib/App/Yath/Schema/Overlay/JobParameter.pm b/lib/App/Yath/Schema/Overlay/JobParameter.pm deleted file mode 100644 index 0ff2a2008..000000000 --- a/lib/App/Yath/Schema/Overlay/JobParameter.pm +++ /dev/null @@ -1,67 +0,0 @@ -package App::Yath::Schema::Overlay::JobParameter; -our $VERSION = '2.000000'; - -package - App::Yath::Schema::Result::JobParameter; -use utf8; -use strict; -use warnings; - -use Carp qw/confess/; -confess "You must first load a App::Yath::Schema::NAME module" - unless $App::Yath::Schema::LOADED; - -__PACKAGE__->inflate_column( - parameters => { - inflate => DBIx::Class::InflateColumn::Serializer::JSON->get_unfreezer('parameters', {}), - deflate => DBIx::Class::InflateColumn::Serializer::JSON->get_freezer('parameters', {}), - }, -); - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::Result::JobParameter - Overlay for JobParameter result class. - -=head1 DESCRIPTION - -This is where custom (not autogenerated) code for the JobParameter result class lives. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/Overlay/JobTry.pm b/lib/App/Yath/Schema/Overlay/JobTry.pm new file mode 100644 index 000000000..5c4b83b9c --- /dev/null +++ b/lib/App/Yath/Schema/Overlay/JobTry.pm @@ -0,0 +1,94 @@ +package App::Yath::Schema::Overlay::JobTry; +our $VERSION = '2.000000'; + +package + App::Yath::Schema::Result::JobTry; +use utf8; +use strict; +use warnings; + +use App::Yath::Schema::ImportModes qw/record_all_events mode_check/; + +use Carp qw/confess/; +confess "You must first load a App::Yath::Schema::NAME module" + unless $App::Yath::Schema::LOADED; + +sub normalize_to_mode { + my $self = shift; + my %params = @_; + + my $mode = $params{mode} // $self->job->run->mode; + + # No need to purge anything + return if record_all_events(mode => $mode, job => $self->job, try => $self); + return if mode_check($mode, 'complete'); + + if (mode_check($mode, 'summary', 'qvf')) { + $self->events->delete; + return; + } + + my $query = { + is_diag => 0, + is_harness => 0, + is_time => 0, + }; + + if (mode_check($mode, 'qvfds')) { + $query->{'-not'} = {is_subtest => 1, nested => 0}; + } + elsif(!mode_check($mode, 'qvfd')) { + die "Unknown mode '$mode'"; + } + + $self->events->search($query)->delete(); +} + + +1; + +__END__ + +=pod + +=encoding UTF-8 + +=head1 NAME + +App::Yath::Schema::Result::JobTry - Overlay for JobTry result class. + +=head1 DESCRIPTION + +This is where custom (not autogenerated) code for the JobTry result class lives. + +=head1 SOURCE + +The source code repository for Test2-Harness can be found at +L. + +=head1 MAINTAINERS + +=over 4 + +=item Chad Granum Eexodist@cpan.orgE + +=back + +=head1 AUTHORS + +=over 4 + +=item Chad Granum Eexodist@cpan.orgE + +=back + +=head1 COPYRIGHT + +Copyright Chad Granum Eexodist7@gmail.comE. + +This program is free software; you can redistribute it and/or +modify it under the same terms as Perl itself. + +See L + +=cut diff --git a/lib/App/Yath/Schema/Overlay/JobField.pm b/lib/App/Yath/Schema/Overlay/JobTryField.pm similarity index 80% rename from lib/App/Yath/Schema/Overlay/JobField.pm rename to lib/App/Yath/Schema/Overlay/JobTryField.pm index 36880f2ec..4cbc1afb8 100644 --- a/lib/App/Yath/Schema/Overlay/JobField.pm +++ b/lib/App/Yath/Schema/Overlay/JobTryField.pm @@ -1,14 +1,12 @@ -package App::Yath::Schema::Overlay::JobField; +package App::Yath::Schema::Overlay::JobTryField; our $VERSION = '2.000000'; package - App::Yath::Schema::Result::JobField; + App::Yath::Schema::Result::JobTryField; use utf8; use strict; use warnings; -use Test2::Harness::Util::JSON qw/decode_json/; - use Carp qw/confess/; confess "You must first load a App::Yath::Schema::NAME module" unless $App::Yath::Schema::LOADED; @@ -28,6 +26,7 @@ sub TO_JSON { } 1; + __END__ =pod @@ -36,11 +35,11 @@ __END__ =head1 NAME -App::Yath::Schema::Result::JobField - Overlay for JobField result class. +App::Yath::Schema::Result::JobTryField - Overlay for JobTryField result class. =head1 DESCRIPTION -This is where custom (not autogenerated) code for the JobField result class lives. +This is where custom (not autogenerated) code for the JobTryField result class lives. =head1 SOURCE diff --git a/lib/App/Yath/Schema/Overlay/Orphan.pm b/lib/App/Yath/Schema/Overlay/Orphan.pm deleted file mode 100644 index 06fcd1bb5..000000000 --- a/lib/App/Yath/Schema/Overlay/Orphan.pm +++ /dev/null @@ -1,67 +0,0 @@ -package App::Yath::Schema::Overlay::Orphan; -our $VERSION = '2.000000'; - -package - App::Yath::Schema::Result::Orphan; -use utf8; -use strict; -use warnings; - -use Carp qw/confess/; -confess "You must first load a App::Yath::Schema::NAME module" - unless $App::Yath::Schema::LOADED; - -__PACKAGE__->inflate_column( - data => { - inflate => DBIx::Class::InflateColumn::Serializer::JSON->get_unfreezer('data', {}), - deflate => DBIx::Class::InflateColumn::Serializer::JSON->get_freezer('data', {}), - }, -); - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::Result::Orphan - Overlay for Orphan result class. - -=head1 DESCRIPTION - -This is where custom (not autogenerated) code for the Orphan result class lives. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/Overlay/Project.pm b/lib/App/Yath/Schema/Overlay/Project.pm index dcf5ef43a..e04fbcb2d 100644 --- a/lib/App/Yath/Schema/Overlay/Project.pm +++ b/lib/App/Yath/Schema/Overlay/Project.pm @@ -8,7 +8,7 @@ use strict; use warnings; use Statistics::Basic qw/median/; -use App::Yath::Schema::UUID qw/uuid_deflate/; + use Carp qw/confess/; confess "You must first load a App::Yath::Schema::NAME module" @@ -20,12 +20,12 @@ sub last_covered_run { my $query = { status => 'complete', - project_idx => $self->project_idx, + project_id => $self->project_id, has_coverage => 1, }; my $attrs = { - order_by => {'-desc' => 'run_idx'}, + order_by => {'-desc' => 'run_id'}, rows => 1, }; @@ -57,13 +57,13 @@ sub durations { SELECT test_files.filename, jobs.duration FROM jobs JOIN runs USING(run_id) - JOIN test_files USING(test_file_idx) - JOIN users USING(user_idx) - WHERE runs.project_idx = ? + JOIN test_files USING(test_file_id) + JOIN users USING(user_id) + WHERE runs.project_id = ? AND jobs.duration IS NOT NULL AND test_files.filename IS NOT NULL EOT - my @vals = (uuid_deflate($self->project_idx)); + my @vals = ($self->project_id); my ($user_append, @user_args) = $username ? ("users.username = ?", $username) : (); @@ -77,9 +77,9 @@ sub durations { my $sth = $dbh->prepare(<<" EOT"); SELECT run_id FROM runs - JOIN users USING(user_idx) + JOIN users USING(user_id) $where - ORDER BY run_idx DESC + ORDER BY run_id DESC LIMIT ? EOT diff --git a/lib/App/Yath/Schema/Overlay/Render.pm b/lib/App/Yath/Schema/Overlay/Render.pm deleted file mode 100644 index 9aa211a40..000000000 --- a/lib/App/Yath/Schema/Overlay/Render.pm +++ /dev/null @@ -1,69 +0,0 @@ -package App::Yath::Schema::Overlay::Render; -our $VERSION = '2.000000'; - -package - App::Yath::Schema::Result::Render; -use utf8; -use strict; -use warnings; - -use Carp qw/confess/; -confess "You must first load a App::Yath::Schema::NAME module" - unless $App::Yath::Schema::LOADED; - -__PACKAGE__->inflate_column( - data => { - inflate => DBIx::Class::InflateColumn::Serializer::JSON->get_unfreezer('data', {}), - deflate => DBIx::Class::InflateColumn::Serializer::JSON->get_freezer('data', {}), - }, -); - -sub real_tag { $_[0]->other_tag // $_[0]->tag } - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::Result::Render - Overlay for Render result class. - -=head1 DESCRIPTION - -This is where custom (not autogenerated) code for the Render result class lives. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/Overlay/Run.pm b/lib/App/Yath/Schema/Overlay/Run.pm index 3034c5ccf..5e0824426 100644 --- a/lib/App/Yath/Schema/Overlay/Run.pm +++ b/lib/App/Yath/Schema/Overlay/Run.pm @@ -15,31 +15,22 @@ use App::Yath::Schema::DateTimeFormat qw/DTF/; __PACKAGE__->belongs_to( "user_join", "App::Yath::Schema::Result::User", - { user_idx => "user_idx" }, + { user_id => "user_id" }, { is_deferrable => 0, on_delete => "NO ACTION", on_update => "NO ACTION" }, ); -if ($App::Yath::Schema::LOADED eq 'Percona') { - __PACKAGE__->might_have( - "run_parameter", - "App::Yath::Schema::Result::RunParameter", - { "foreign.run_id" => "self.run_id" }, - { cascade_copy => 0, cascade_delete => 1 }, - ); -} - my %COMPLETE_STATUS = (complete => 1, failed => 1, canceled => 1, broken => 1); sub complete { return $COMPLETE_STATUS{$_[0]->status} // 0 } sub sig { my $self = shift; - my $run_parameter = $self->run_parameter; + my $parameters = $self->parameters; return join ";" => ( - (map {$self->$_ // ''} qw/status pinned passed failed retried concurrency/), - $run_parameter ? length($run_parameter->parameters) : (''), - ($self->run_fields->count), + (map {$self->$_ // ''} qw/status pinned passed failed retried concurrency_j concurrency_x/), + $parameters ? length($parameters) : (''), + (scalar $self->run_fields->count), ); } @@ -47,7 +38,7 @@ sub short_run_fields { my $self = shift; return $self->run_fields->search(undef, { - remove_columns => ['data'], + remove_columns => ['data', 'parameters'], '+select' => ['data IS NOT NULL AS has_data'], '+as' => ['has_data'], })->all; @@ -57,13 +48,8 @@ sub TO_JSON { my $self = shift; my %cols = $self->get_all_fields; - # Inflate - if (my $p = $self->run_parameter) { - $cols{parameters} = $p->parameters; - } - - $cols{user} //= $self->user->username; - $cols{project} //= $self->project->name; + $cols{user} //= $self->user->username; + $cols{project} //= $self->project->name; $cols{fields} = []; for my $rf ($cols{prefetched_fields} ? $self->run_fields : $self->short_run_fields) { @@ -75,7 +61,7 @@ sub TO_JSON { push @{$cols{fields}} => $fields; } - my $dt = DTF()->parse_datetime( $cols{added} ); + my $dt = DTF()->parse_datetime($cols{added}); $cols{added} = $dt->strftime("%Y-%m-%d %I:%M%P"); @@ -98,14 +84,14 @@ sub normalize_to_mode { $_->normalize_to_mode(mode => $mode) for $self->jobs->all; } -sub expanded_coverages { +sub expanded_coverage { my $self = shift; my ($query) = @_; - $self->coverages->search( + $self->coverage->search( $query, { - order_by => [qw/test_file_idx source_file_idx source_sub_idx/], + order_by => [qw/test_file_id source_file_id source_sub_id/], join => [qw/test_file source_file source_sub coverage_manager/], '+columns' => { test_file => 'test_file.filename', @@ -122,7 +108,7 @@ sub coverage_data { my (%params) = @_; my $query = $params{query}; - my $rs = $self->expanded_coverages($query); + my $rs = $self->expanded_coverage($query); my $curr_test; my $data; diff --git a/lib/App/Yath/Schema/Overlay/RunParameter.pm b/lib/App/Yath/Schema/Overlay/RunParameter.pm deleted file mode 100644 index a39a7a8c1..000000000 --- a/lib/App/Yath/Schema/Overlay/RunParameter.pm +++ /dev/null @@ -1,68 +0,0 @@ -package App::Yath::Schema::Overlay::RunParameter; -our $VERSION = '2.000000'; - -package - App::Yath::Schema::Result::RunParameter; -use utf8; -use strict; -use warnings; - -use Carp qw/confess/; -confess "You must first load a App::Yath::Schema::NAME module" - unless $App::Yath::Schema::LOADED; - -__PACKAGE__->inflate_column( - parameters => { - inflate => DBIx::Class::InflateColumn::Serializer::JSON->get_unfreezer('parameters', {}), - deflate => DBIx::Class::InflateColumn::Serializer::JSON->get_freezer('parameters', {}), - }, -); - - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::Result::RunParameter - Overlay for RunParameter result class. - -=head1 DESCRIPTION - -This is where custom (not autogenerated) code for the RunParameter result class lives. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/Overlay/JobOutput.pm b/lib/App/Yath/Schema/Overlay/Sync.pm similarity index 75% rename from lib/App/Yath/Schema/Overlay/JobOutput.pm rename to lib/App/Yath/Schema/Overlay/Sync.pm index 68ca9ded4..8daa1ddeb 100644 --- a/lib/App/Yath/Schema/Overlay/JobOutput.pm +++ b/lib/App/Yath/Schema/Overlay/Sync.pm @@ -1,8 +1,8 @@ -package App::Yath::Schema::Overlay::JobOutput; +package App::Yath::Schema::Overlay::Sync; our $VERSION = '2.000000'; package - App::Yath::Schema::Result::JobOutput; + App::Yath::Schema::Result::Sync; use utf8; use strict; use warnings; @@ -21,11 +21,11 @@ __END__ =head1 NAME -App::Yath::Schema::Result::JobOutput - Overlay for JobOutput result class. +App::Yath::Schema::Result::Sync - Overlay for Sync result class. =head1 DESCRIPTION -This is where custom (not autogenerated) code for the JobOutput result class lives. +This is where custom (not autogenerated) code for the Sync result class lives. =head1 SOURCE diff --git a/lib/App/Yath/Schema/Overlay/User.pm b/lib/App/Yath/Schema/Overlay/User.pm index c107d66c7..57c19e506 100644 --- a/lib/App/Yath/Schema/Overlay/User.pm +++ b/lib/App/Yath/Schema/Overlay/User.pm @@ -11,7 +11,7 @@ use Carp qw/confess/; confess "You must first load a App::Yath::Schema::NAME module" unless $App::Yath::Schema::LOADED; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use Carp qw/croak/; use constant COST => 8; @@ -68,7 +68,7 @@ sub gen_api_key { return $self->result_source->schema->resultset('ApiKey')->create( { - user_idx => $self->user_idx, + user_id => $self->user_id, value => gen_uuid(), status => 'active', name => $name, diff --git a/lib/App/Yath/Schema/Overlay/Version.pm b/lib/App/Yath/Schema/Overlay/Version.pm new file mode 100644 index 000000000..7801e1ce3 --- /dev/null +++ b/lib/App/Yath/Schema/Overlay/Version.pm @@ -0,0 +1,60 @@ +package App::Yath::Schema::Overlay::Version; +our $VERSION = '2.000000'; + +package + App::Yath::Schema::Result::Version; +use utf8; +use strict; +use warnings; + +use Carp qw/confess/; +confess "You must first load a App::Yath::Schema::NAME module" + unless $App::Yath::Schema::LOADED; + +1; + +__END__ + +=pod + +=encoding UTF-8 + +=head1 NAME + +App::Yath::Schema::Result::Version - Overlay for Version result class. + +=head1 DESCRIPTION + +This is where custom (not autogenerated) code for the Version result class lives. + +=head1 SOURCE + +The source code repository for Test2-Harness can be found at +L. + +=head1 MAINTAINERS + +=over 4 + +=item Chad Granum Eexodist@cpan.orgE + +=back + +=head1 AUTHORS + +=over 4 + +=item Chad Granum Eexodist@cpan.orgE + +=back + +=head1 COPYRIGHT + +Copyright Chad Granum Eexodist7@gmail.comE. + +This program is free software; you can redistribute it and/or +modify it under the same terms as Perl itself. + +See L + +=cut diff --git a/lib/App/Yath/Schema/PostgreSQL/ApiKey.pm b/lib/App/Yath/Schema/PostgreSQL/ApiKey.pm index 0c9f0453f..7fd082bea 100644 --- a/lib/App/Yath/Schema/PostgreSQL/ApiKey.pm +++ b/lib/App/Yath/Schema/PostgreSQL/ApiKey.pm @@ -16,11 +16,12 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("api_keys"); __PACKAGE__->add_columns( + "value", + { data_type => "uuid", is_nullable => 0, size => 16 }, "api_key_id", { data_type => "bigint", @@ -30,10 +31,6 @@ __PACKAGE__->add_columns( }, "user_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "name", - { data_type => "varchar", is_nullable => 0, size => 128 }, - "value", - { data_type => "varchar", is_nullable => 0, size => 36 }, "status", { data_type => "enum", @@ -44,6 +41,8 @@ __PACKAGE__->add_columns( }, is_nullable => 0, }, + "name", + { data_type => "varchar", is_nullable => 0, size => 128 }, ); __PACKAGE__->set_primary_key("api_key_id"); __PACKAGE__->add_unique_constraint("api_keys_value_key", ["value"]); @@ -55,7 +54,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Binary.pm b/lib/App/Yath/Schema/PostgreSQL/Binary.pm index c03e0342c..4d7aa62a6 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Binary.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Binary.pm @@ -16,11 +16,12 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("binaries"); __PACKAGE__->add_columns( + "event_uuid", + { data_type => "uuid", is_nullable => 0, size => 16 }, "binary_id", { data_type => "bigint", @@ -29,13 +30,13 @@ __PACKAGE__->add_columns( sequence => "binaries_binary_id_seq", }, "event_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, + { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, + "is_image", + { data_type => "boolean", default_value => \"false", is_nullable => 0 }, "filename", { data_type => "varchar", is_nullable => 0, size => 512 }, "description", { data_type => "text", is_nullable => 1 }, - "is_image", - { data_type => "boolean", default_value => \"false", is_nullable => 0 }, "data", { data_type => "bytea", is_nullable => 0 }, ); @@ -44,11 +45,16 @@ __PACKAGE__->belongs_to( "event", "App::Yath::Schema::Result::Event", { event_id => "event_id" }, - { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, + { + is_deferrable => 0, + join_type => "LEFT", + on_delete => "CASCADE", + on_update => "NO ACTION", + }, ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Config.pm b/lib/App/Yath/Schema/PostgreSQL/Config.pm index 15191f228..f4fb2a9e1 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Config.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Config.pm @@ -16,14 +16,13 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("config"); __PACKAGE__->add_columns( "config_id", { - data_type => "bigint", + data_type => "integer", is_auto_increment => 1, is_nullable => 0, sequence => "config_config_id_seq", @@ -37,7 +36,7 @@ __PACKAGE__->set_primary_key("config_id"); __PACKAGE__->add_unique_constraint("config_setting_key", ["setting"]); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Coverage.pm b/lib/App/Yath/Schema/PostgreSQL/Coverage.pm index e2d444ebb..8655066bb 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Coverage.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Coverage.pm @@ -16,11 +16,12 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("coverage"); __PACKAGE__->add_columns( + "event_uuid", + { data_type => "uuid", is_nullable => 0, size => 16 }, "coverage_id", { data_type => "bigint", @@ -28,9 +29,7 @@ __PACKAGE__->add_columns( is_nullable => 0, sequence => "coverage_coverage_id_seq", }, - "event_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, - "job_id", + "job_try_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, "coverage_manager_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, @@ -47,10 +46,10 @@ __PACKAGE__->add_columns( ); __PACKAGE__->set_primary_key("coverage_id"); __PACKAGE__->add_unique_constraint( - "coverage_run_id_job_id_test_file_id_source_file_id_source_s_key", + "coverage_run_id_job_try_id_test_file_id_source_file_id_sour_key", [ "run_id", - "job_id", + "job_try_id", "test_file_id", "source_file_id", "source_sub_id", @@ -68,20 +67,9 @@ __PACKAGE__->belongs_to( }, ); __PACKAGE__->belongs_to( - "event", - "App::Yath::Schema::Result::Event", - { event_id => "event_id" }, - { - is_deferrable => 0, - join_type => "LEFT", - on_delete => "SET NULL", - on_update => "NO ACTION", - }, -); -__PACKAGE__->belongs_to( - "job", - "App::Yath::Schema::Result::Job", - { job_id => "job_id" }, + "job_try", + "App::Yath::Schema::Result::JobTry", + { job_try_id => "job_try_id" }, { is_deferrable => 0, join_type => "LEFT", @@ -115,7 +103,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/CoverageManager.pm b/lib/App/Yath/Schema/PostgreSQL/CoverageManager.pm index cd821585b..56775491b 100644 --- a/lib/App/Yath/Schema/PostgreSQL/CoverageManager.pm +++ b/lib/App/Yath/Schema/PostgreSQL/CoverageManager.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("coverage_manager"); @@ -34,14 +33,14 @@ __PACKAGE__->add_columns( __PACKAGE__->set_primary_key("coverage_manager_id"); __PACKAGE__->add_unique_constraint("coverage_manager_package_key", ["package"]); __PACKAGE__->has_many( - "coverages", + "coverage", "App::Yath::Schema::Result::Coverage", { "foreign.coverage_manager_id" => "self.coverage_manager_id" }, { cascade_copy => 0, cascade_delete => 1 }, ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Email.pm b/lib/App/Yath/Schema/PostgreSQL/Email.pm index 81f0fcb76..c87d212e3 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Email.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Email.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("email"); @@ -30,12 +29,12 @@ __PACKAGE__->add_columns( }, "user_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, + "verified", + { data_type => "boolean", default_value => \"false", is_nullable => 0 }, "local", { data_type => "citext", is_nullable => 0 }, "domain", { data_type => "citext", is_nullable => 0 }, - "verified", - { data_type => "boolean", default_value => \"false", is_nullable => 0 }, ); __PACKAGE__->set_primary_key("email_id"); __PACKAGE__->add_unique_constraint("email_local_domain_key", ["local", "domain"]); @@ -59,7 +58,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/EmailVerificationCode.pm b/lib/App/Yath/Schema/PostgreSQL/EmailVerificationCode.pm index e909c4947..036c2609b 100644 --- a/lib/App/Yath/Schema/PostgreSQL/EmailVerificationCode.pm +++ b/lib/App/Yath/Schema/PostgreSQL/EmailVerificationCode.pm @@ -16,15 +16,14 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("email_verification_codes"); __PACKAGE__->add_columns( - "email_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, "evcode", { data_type => "uuid", is_nullable => 0, size => 16 }, + "email_id", + { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, ); __PACKAGE__->set_primary_key("email_id"); __PACKAGE__->belongs_to( @@ -35,7 +34,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Event.pm b/lib/App/Yath/Schema/PostgreSQL/Event.pm index 1f263b557..f85bf3a3d 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Event.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Event.pm @@ -16,11 +16,16 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("events"); __PACKAGE__->add_columns( + "event_uuid", + { data_type => "uuid", is_nullable => 0, size => 16 }, + "trace_uuid", + { data_type => "uuid", is_nullable => 1, size => 16 }, + "parent_uuid", + { data_type => "uuid", is_foreign_key => 1, is_nullable => 1, size => 16 }, "event_id", { data_type => "bigint", @@ -28,18 +33,16 @@ __PACKAGE__->add_columns( is_nullable => 0, sequence => "events_event_id_seq", }, - "job_id", + "job_try_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, "parent_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, - "event_uuid", - { data_type => "uuid", is_nullable => 0, size => 16 }, - "trace_uuid", - { data_type => "uuid", is_nullable => 1, size => 16 }, - "stamp", - { data_type => "timestamp", is_nullable => 0 }, - "event_ord", + "event_idx", { data_type => "integer", is_nullable => 0 }, + "event_sdx", + { data_type => "integer", is_nullable => 0 }, + "stamp", + { data_type => "timestamp", is_nullable => 1 }, "nested", { data_type => "smallint", is_nullable => 0 }, "is_subtest", @@ -50,22 +53,27 @@ __PACKAGE__->add_columns( { data_type => "boolean", is_nullable => 0 }, "is_time", { data_type => "boolean", is_nullable => 0 }, - "is_assert", - { data_type => "boolean", is_nullable => 0 }, "causes_fail", { data_type => "boolean", is_nullable => 0 }, - "has_binary", - { data_type => "boolean", is_nullable => 0 }, "has_facets", { data_type => "boolean", is_nullable => 0 }, "has_orphan", { data_type => "boolean", is_nullable => 0 }, - "has_resources", + "has_binaries", { data_type => "boolean", is_nullable => 0 }, + "facets", + { data_type => "jsonb", is_nullable => 1 }, + "orphan", + { data_type => "jsonb", is_nullable => 1 }, + "rendered", + { data_type => "jsonb", is_nullable => 1 }, ); __PACKAGE__->set_primary_key("event_id"); __PACKAGE__->add_unique_constraint("events_event_uuid_key", ["event_uuid"]); -__PACKAGE__->add_unique_constraint("events_job_id_event_ord_key", ["job_id", "event_ord"]); +__PACKAGE__->add_unique_constraint( + "events_job_try_id_event_idx_event_sdx_key", + ["job_try_id", "event_idx", "event_sdx"], +); __PACKAGE__->has_many( "binaries", "App::Yath::Schema::Result::Binary", @@ -73,37 +81,25 @@ __PACKAGE__->has_many( { cascade_copy => 0, cascade_delete => 1 }, ); __PACKAGE__->has_many( - "children", + "events_parent_uuids", "App::Yath::Schema::Result::Event", - { "foreign.parent_id" => "self.event_id" }, + { "foreign.parent_uuid" => "self.event_uuid" }, { cascade_copy => 0, cascade_delete => 1 }, ); __PACKAGE__->has_many( - "coverages", - "App::Yath::Schema::Result::Coverage", - { "foreign.event_id" => "self.event_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); -__PACKAGE__->might_have( - "facets", - "App::Yath::Schema::Result::Facet", - { "foreign.event_id" => "self.event_id" }, + "events_parents", + "App::Yath::Schema::Result::Event", + { "foreign.parent_id" => "self.event_id" }, { cascade_copy => 0, cascade_delete => 1 }, ); __PACKAGE__->belongs_to( - "job", - "App::Yath::Schema::Result::Job", - { job_id => "job_id" }, + "job_try", + "App::Yath::Schema::Result::JobTry", + { job_try_id => "job_try_id" }, { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, ); -__PACKAGE__->might_have( - "orphans", - "App::Yath::Schema::Result::Orphan", - { "foreign.event_id" => "self.event_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); __PACKAGE__->belongs_to( - "parent_event", + "parent", "App::Yath::Schema::Result::Event", { event_id => "parent_id" }, { @@ -113,27 +109,20 @@ __PACKAGE__->belongs_to( on_update => "NO ACTION", }, ); -__PACKAGE__->has_many( - "renderings", - "App::Yath::Schema::Result::Render", - { "foreign.event_id" => "self.event_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); -__PACKAGE__->has_many( - "reports", - "App::Yath::Schema::Result::Reporting", - { "foreign.event_id" => "self.event_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); -__PACKAGE__->might_have( - "resource", - "App::Yath::Schema::Result::Resource", - { "foreign.event_id" => "self.event_id" }, - { cascade_copy => 0, cascade_delete => 1 }, +__PACKAGE__->belongs_to( + "parent_uuid", + "App::Yath::Schema::Result::Event", + { event_uuid => "parent_uuid" }, + { + is_deferrable => 0, + join_type => "LEFT", + on_delete => "NO ACTION", + on_update => "NO ACTION", + }, ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Host.pm b/lib/App/Yath/Schema/PostgreSQL/Host.pm index cffc5078c..d985910ac 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Host.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Host.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("hosts"); @@ -33,9 +32,15 @@ __PACKAGE__->add_columns( ); __PACKAGE__->set_primary_key("host_id"); __PACKAGE__->add_unique_constraint("hosts_hostname_key", ["hostname"]); +__PACKAGE__->has_many( + "resources", + "App::Yath::Schema::Result::Resource", + { "foreign.host_id" => "self.host_id" }, + { cascade_copy => 0, cascade_delete => 1 }, +); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Job.pm b/lib/App/Yath/Schema/PostgreSQL/Job.pm index b47b6a515..2b8f0b219 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Job.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Job.pm @@ -16,11 +16,12 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("jobs"); __PACKAGE__->add_columns( + "job_uuid", + { data_type => "uuid", is_nullable => 0, size => 16 }, "job_id", { data_type => "bigint", @@ -31,85 +32,19 @@ __PACKAGE__->add_columns( "run_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, "test_file_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, - "job_uuid", - { data_type => "uuid", is_nullable => 0, size => 16 }, - "job_try", - { data_type => "integer", is_nullable => 0 }, - "status", - { - data_type => "enum", - default_value => "pending", - extra => { - custom_type_name => "queue_status", - list => ["pending", "running", "complete", "broken", "canceled"], - }, - is_nullable => 0, - }, + { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, "is_harness_out", - { data_type => "boolean", default_value => \"false", is_nullable => 0 }, - "fail", + { data_type => "boolean", is_nullable => 0 }, + "failed", + { data_type => "boolean", is_nullable => 0 }, + "passed", { data_type => "boolean", is_nullable => 1 }, - "retry", - { data_type => "boolean", is_nullable => 1 }, - "name", - { data_type => "text", is_nullable => 1 }, - "exit_code", - { data_type => "integer", is_nullable => 1 }, - "launch", - { data_type => "timestamp", is_nullable => 1 }, - "start", - { data_type => "timestamp", is_nullable => 1 }, - "ended", - { data_type => "timestamp", is_nullable => 1 }, - "duration", - { data_type => "double precision", is_nullable => 1 }, - "pass_count", - { data_type => "bigint", is_nullable => 1 }, - "fail_count", - { data_type => "bigint", is_nullable => 1 }, ); __PACKAGE__->set_primary_key("job_id"); -__PACKAGE__->add_unique_constraint("jobs_job_uuid_job_try_key", ["job_uuid", "job_try"]); -__PACKAGE__->has_many( - "coverages", - "App::Yath::Schema::Result::Coverage", - { "foreign.job_id" => "self.job_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); -__PACKAGE__->has_many( - "events", - "App::Yath::Schema::Result::Event", - { "foreign.job_id" => "self.job_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); -__PACKAGE__->has_many( - "job_fields", - "App::Yath::Schema::Result::JobField", - { "foreign.job_id" => "self.job_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); +__PACKAGE__->add_unique_constraint("jobs_job_uuid_key", ["job_uuid"]); __PACKAGE__->has_many( - "job_outputs", - "App::Yath::Schema::Result::JobOutput", - { "foreign.job_id" => "self.job_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); -__PACKAGE__->might_have( - "job_parameter", - "App::Yath::Schema::Result::JobParameter", - { "foreign.job_id" => "self.job_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); -__PACKAGE__->has_many( - "renderings", - "App::Yath::Schema::Result::Render", - { "foreign.job_id" => "self.job_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); -__PACKAGE__->has_many( - "reports", - "App::Yath::Schema::Result::Reporting", + "jobs_tries", + "App::Yath::Schema::Result::JobTry", { "foreign.job_id" => "self.job_id" }, { cascade_copy => 0, cascade_delete => 1 }, ); @@ -123,16 +58,11 @@ __PACKAGE__->belongs_to( "test_file", "App::Yath::Schema::Result::TestFile", { test_file_id => "test_file_id" }, - { - is_deferrable => 0, - join_type => "LEFT", - on_delete => "CASCADE", - on_update => "NO ACTION", - }, + { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/JobOutput.pm b/lib/App/Yath/Schema/PostgreSQL/JobOutput.pm deleted file mode 100644 index c792a1daa..000000000 --- a/lib/App/Yath/Schema/PostgreSQL/JobOutput.pm +++ /dev/null @@ -1,97 +0,0 @@ -use utf8; -package App::Yath::Schema::PostgreSQL::JobOutput; -our $VERSION = '2.000000'; - -package - App::Yath::Schema::Result::JobOutput; - -# Created by DBIx::Class::Schema::Loader -# DO NOT MODIFY ANY PART OF THIS FILE - -use strict; -use warnings; - -use parent 'App::Yath::Schema::ResultBase'; -__PACKAGE__->load_components( - "InflateColumn::DateTime", - "InflateColumn::Serializer", - "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", - "UUIDColumns", -); -__PACKAGE__->table("job_outputs"); -__PACKAGE__->add_columns( - "job_output_id", - { - data_type => "bigint", - is_auto_increment => 1, - is_nullable => 0, - sequence => "job_outputs_job_output_id_seq", - }, - "job_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "stream", - { - data_type => "enum", - extra => { custom_type_name => "io_stream", list => ["STDOUT", "STDERR"] }, - is_nullable => 0, - }, - "output", - { data_type => "text", is_nullable => 0 }, -); -__PACKAGE__->set_primary_key("job_output_id"); -__PACKAGE__->add_unique_constraint("job_outputs_job_id_stream_key", ["job_id", "stream"]); -__PACKAGE__->belongs_to( - "job", - "App::Yath::Schema::Result::Job", - { job_id => "job_id" }, - { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, -); - - -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 -# DO NOT MODIFY ANY PART OF THIS FILE - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::PostgreSQL::JobOutput - Autogenerated result class for JobOutput in PostgreSQL. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/PostgreSQL/JobParameter.pm b/lib/App/Yath/Schema/PostgreSQL/JobParameter.pm deleted file mode 100644 index 07763dd34..000000000 --- a/lib/App/Yath/Schema/PostgreSQL/JobParameter.pm +++ /dev/null @@ -1,83 +0,0 @@ -use utf8; -package App::Yath::Schema::PostgreSQL::JobParameter; -our $VERSION = '2.000000'; - -package - App::Yath::Schema::Result::JobParameter; - -# Created by DBIx::Class::Schema::Loader -# DO NOT MODIFY ANY PART OF THIS FILE - -use strict; -use warnings; - -use parent 'App::Yath::Schema::ResultBase'; -__PACKAGE__->load_components( - "InflateColumn::DateTime", - "InflateColumn::Serializer", - "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", - "UUIDColumns", -); -__PACKAGE__->table("job_parameters"); -__PACKAGE__->add_columns( - "job_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "parameters", - { data_type => "jsonb", is_nullable => 1 }, -); -__PACKAGE__->set_primary_key("job_id"); -__PACKAGE__->belongs_to( - "job", - "App::Yath::Schema::Result::Job", - { job_id => "job_id" }, - { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, -); - - -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 -# DO NOT MODIFY ANY PART OF THIS FILE - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::PostgreSQL::JobParameter - Autogenerated result class for JobParameter in PostgreSQL. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/PostgreSQL/JobTry.pm b/lib/App/Yath/Schema/PostgreSQL/JobTry.pm new file mode 100644 index 000000000..a7cbdae79 --- /dev/null +++ b/lib/App/Yath/Schema/PostgreSQL/JobTry.pm @@ -0,0 +1,156 @@ +use utf8; +package App::Yath::Schema::PostgreSQL::JobTry; +our $VERSION = '2.000000'; + +package + App::Yath::Schema::Result::JobTry; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY ANY PART OF THIS FILE + +use strict; +use warnings; + +use parent 'App::Yath::Schema::ResultBase'; +__PACKAGE__->load_components( + "InflateColumn::DateTime", + "InflateColumn::Serializer", + "InflateColumn::Serializer::JSON", + "UUIDColumns", +); +__PACKAGE__->table("job_tries"); +__PACKAGE__->add_columns( + "job_try_id", + { + data_type => "bigint", + is_auto_increment => 1, + is_nullable => 0, + sequence => "job_tries_job_try_id_seq", + }, + "job_id", + { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, + "pass_count", + { data_type => "bigint", is_nullable => 1 }, + "fail_count", + { data_type => "bigint", is_nullable => 1 }, + "exit_code", + { data_type => "integer", is_nullable => 1 }, + "launch", + { data_type => "timestamp", is_nullable => 1 }, + "start", + { data_type => "timestamp", is_nullable => 1 }, + "ended", + { data_type => "timestamp", is_nullable => 1 }, + "status", + { + data_type => "enum", + default_value => "pending", + extra => { + custom_type_name => "queue_stat", + list => ["pending", "running", "complete", "broken", "canceled"], + }, + is_nullable => 0, + }, + "job_try_ord", + { data_type => "smallint", is_nullable => 0 }, + "fail", + { data_type => "boolean", is_nullable => 1 }, + "retry", + { data_type => "boolean", is_nullable => 1 }, + "duration", + { + data_type => "numeric", + default_value => \"null", + is_nullable => 1, + size => [14, 4], + }, + "parameters", + { data_type => "jsonb", is_nullable => 1 }, + "stdout", + { data_type => "text", is_nullable => 1 }, + "stderr", + { data_type => "text", is_nullable => 1 }, +); +__PACKAGE__->set_primary_key("job_try_id"); +__PACKAGE__->add_unique_constraint( + "job_tries_job_try_id_job_try_ord_key", + ["job_try_id", "job_try_ord"], +); +__PACKAGE__->has_many( + "coverage", + "App::Yath::Schema::Result::Coverage", + { "foreign.job_try_id" => "self.job_try_id" }, + { cascade_copy => 0, cascade_delete => 1 }, +); +__PACKAGE__->has_many( + "events", + "App::Yath::Schema::Result::Event", + { "foreign.job_try_id" => "self.job_try_id" }, + { cascade_copy => 0, cascade_delete => 1 }, +); +__PACKAGE__->belongs_to( + "job", + "App::Yath::Schema::Result::Job", + { job_id => "job_id" }, + { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, +); +__PACKAGE__->has_many( + "job_try_fields", + "App::Yath::Schema::Result::JobTryField", + { "foreign.job_try_id" => "self.job_try_id" }, + { cascade_copy => 0, cascade_delete => 1 }, +); +__PACKAGE__->has_many( + "reports", + "App::Yath::Schema::Result::Reporting", + { "foreign.job_try_id" => "self.job_try_id" }, + { cascade_copy => 0, cascade_delete => 1 }, +); + + +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 +# DO NOT MODIFY ANY PART OF THIS FILE + +1; + +__END__ + +=pod + +=encoding UTF-8 + +=head1 NAME + +App::Yath::Schema::PostgreSQL::JobTry - Autogenerated result class for JobTry in PostgreSQL. + +=head1 SOURCE + +The source code repository for Test2-Harness can be found at +L. + +=head1 MAINTAINERS + +=over 4 + +=item Chad Granum Eexodist@cpan.orgE + +=back + +=head1 AUTHORS + +=over 4 + +=item Chad Granum Eexodist@cpan.orgE + +=back + +=head1 COPYRIGHT + +Copyright Chad Granum Eexodist7@gmail.comE. + +This program is free software; you can redistribute it and/or +modify it under the same terms as Perl itself. + +See L + +=cut diff --git a/lib/App/Yath/Schema/PostgreSQL/JobField.pm b/lib/App/Yath/Schema/PostgreSQL/JobTryField.pm similarity index 74% rename from lib/App/Yath/Schema/PostgreSQL/JobField.pm rename to lib/App/Yath/Schema/PostgreSQL/JobTryField.pm index 7d56682dd..d29c062ab 100644 --- a/lib/App/Yath/Schema/PostgreSQL/JobField.pm +++ b/lib/App/Yath/Schema/PostgreSQL/JobTryField.pm @@ -1,9 +1,9 @@ use utf8; -package App::Yath::Schema::PostgreSQL::JobField; +package App::Yath::Schema::PostgreSQL::JobTryField; our $VERSION = '2.000000'; package - App::Yath::Schema::Result::JobField; + App::Yath::Schema::Result::JobTryField; # Created by DBIx::Class::Schema::Loader # DO NOT MODIFY ANY PART OF THIS FILE @@ -16,22 +16,21 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); -__PACKAGE__->table("job_fields"); +__PACKAGE__->table("job_try_fields"); __PACKAGE__->add_columns( + "event_uuid", + { data_type => "uuid", is_nullable => 0, size => 16 }, "job_field_id", { data_type => "bigint", is_auto_increment => 1, is_nullable => 0, - sequence => "job_fields_job_field_id_seq", + sequence => "job_try_fields_job_field_id_seq", }, - "job_id", + "job_try_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "job_field_uuid", - { data_type => "uuid", is_nullable => 0, size => 16 }, "name", { data_type => "varchar", is_nullable => 0, size => 64 }, "data", @@ -44,16 +43,15 @@ __PACKAGE__->add_columns( { data_type => "text", is_nullable => 1 }, ); __PACKAGE__->set_primary_key("job_field_id"); -__PACKAGE__->add_unique_constraint("job_fields_job_field_uuid_key", ["job_field_uuid"]); __PACKAGE__->belongs_to( - "job", - "App::Yath::Schema::Result::Job", - { job_id => "job_id" }, + "job_try", + "App::Yath::Schema::Result::JobTry", + { job_try_id => "job_try_id" }, { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; @@ -66,7 +64,7 @@ __END__ =head1 NAME -App::Yath::Schema::PostgreSQL::JobField - Autogenerated result class for JobField in PostgreSQL. +App::Yath::Schema::PostgreSQL::JobTryField - Autogenerated result class for JobTryField in PostgreSQL. =head1 SOURCE diff --git a/lib/App/Yath/Schema/PostgreSQL/LogFile.pm b/lib/App/Yath/Schema/PostgreSQL/LogFile.pm index 85ac4f52e..6e679aa7c 100644 --- a/lib/App/Yath/Schema/PostgreSQL/LogFile.pm +++ b/lib/App/Yath/Schema/PostgreSQL/LogFile.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("log_files"); @@ -44,7 +43,7 @@ __PACKAGE__->has_many( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Permission.pm b/lib/App/Yath/Schema/PostgreSQL/Permission.pm index ef0aabf5c..14725cd3a 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Permission.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Permission.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("permissions"); @@ -59,7 +58,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/PrimaryEmail.pm b/lib/App/Yath/Schema/PostgreSQL/PrimaryEmail.pm index 0f079e5db..97432ceae 100644 --- a/lib/App/Yath/Schema/PostgreSQL/PrimaryEmail.pm +++ b/lib/App/Yath/Schema/PostgreSQL/PrimaryEmail.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("primary_email"); @@ -42,7 +41,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Project.pm b/lib/App/Yath/Schema/PostgreSQL/Project.pm index 0ef1391b8..7126f086c 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Project.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Project.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("projects"); @@ -28,10 +27,10 @@ __PACKAGE__->add_columns( is_nullable => 0, sequence => "projects_project_id_seq", }, - "name", - { data_type => "citext", is_nullable => 0 }, "owner", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, + "name", + { data_type => "citext", is_nullable => 0 }, ); __PACKAGE__->set_primary_key("project_id"); __PACKAGE__->add_unique_constraint("projects_name_key", ["name"]); @@ -66,7 +65,7 @@ __PACKAGE__->has_many( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Render.pm b/lib/App/Yath/Schema/PostgreSQL/Render.pm deleted file mode 100644 index 41e95508d..000000000 --- a/lib/App/Yath/Schema/PostgreSQL/Render.pm +++ /dev/null @@ -1,150 +0,0 @@ -use utf8; -package App::Yath::Schema::PostgreSQL::Render; -our $VERSION = '2.000000'; - -package - App::Yath::Schema::Result::Render; - -# Created by DBIx::Class::Schema::Loader -# DO NOT MODIFY ANY PART OF THIS FILE - -use strict; -use warnings; - -use parent 'App::Yath::Schema::ResultBase'; -__PACKAGE__->load_components( - "InflateColumn::DateTime", - "InflateColumn::Serializer", - "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", - "UUIDColumns", -); -__PACKAGE__->table("renders"); -__PACKAGE__->add_columns( - "render_id", - { - data_type => "bigint", - is_auto_increment => 1, - is_nullable => 0, - sequence => "renders_render_id_seq", - }, - "job_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "event_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "facet", - { data_type => "varchar", is_nullable => 0, size => 64 }, - "tag", - { - data_type => "enum", - extra => { - custom_type_name => "tags", - list => [ - "other", - "ABOUT", - "ARRAY", - "BRIEF", - "CONTROL", - "CRITICAL", - "DEBUG", - "DIAG", - "ENCODING", - "ERROR", - "FACETS", - "FAIL", - "FAILED", - "FATAL", - "HALT", - "HARNESS", - "KILL", - "NO PLAN", - "PASS", - "PASSED", - "PLAN", - "REASON", - "SHOW", - "SKIP ALL", - "SKIPPED", - "STDERR", - "TAGS", - "TIMEOUT", - "VERSION", - "WARN", - "WARNING", - ], - }, - is_nullable => 0, - }, - "other_tag", - { - data_type => "varchar", - default_value => \"null", - is_nullable => 1, - size => 8, - }, - "message", - { data_type => "text", is_nullable => 0 }, - "data", - { data_type => "jsonb", is_nullable => 1 }, -); -__PACKAGE__->set_primary_key("render_id"); -__PACKAGE__->belongs_to( - "event", - "App::Yath::Schema::Result::Event", - { event_id => "event_id" }, - { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, -); -__PACKAGE__->belongs_to( - "job", - "App::Yath::Schema::Result::Job", - { job_id => "job_id" }, - { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, -); - - -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 -# DO NOT MODIFY ANY PART OF THIS FILE - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::PostgreSQL::Render - Autogenerated result class for Render in PostgreSQL. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/PostgreSQL/Reporting.pm b/lib/App/Yath/Schema/PostgreSQL/Reporting.pm index d5e0af514..a6356826b 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Reporting.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Reporting.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("reporting"); @@ -28,9 +27,7 @@ __PACKAGE__->add_columns( is_nullable => 0, sequence => "reporting_reporting_id_seq", }, - "event_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, - "job_id", + "job_try_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, "test_file_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, @@ -41,7 +38,15 @@ __PACKAGE__->add_columns( "run_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, "job_try", - { data_type => "integer", is_nullable => 1 }, + { data_type => "smallint", is_nullable => 1 }, + "retry", + { data_type => "smallint", is_nullable => 0 }, + "abort", + { data_type => "smallint", is_nullable => 0 }, + "fail", + { data_type => "smallint", is_nullable => 0 }, + "pass", + { data_type => "smallint", is_nullable => 0 }, "subtest", { data_type => "varchar", @@ -50,32 +55,13 @@ __PACKAGE__->add_columns( size => 512, }, "duration", - { data_type => "double precision", is_nullable => 0 }, - "fail", - { data_type => "smallint", default_value => 0, is_nullable => 0 }, - "pass", - { data_type => "smallint", default_value => 0, is_nullable => 0 }, - "retry", - { data_type => "smallint", default_value => 0, is_nullable => 0 }, - "abort", - { data_type => "smallint", default_value => 0, is_nullable => 0 }, + { data_type => "numeric", is_nullable => 0, size => [14, 4] }, ); __PACKAGE__->set_primary_key("reporting_id"); __PACKAGE__->belongs_to( - "event", - "App::Yath::Schema::Result::Event", - { event_id => "event_id" }, - { - is_deferrable => 0, - join_type => "LEFT", - on_delete => "SET NULL", - on_update => "NO ACTION", - }, -); -__PACKAGE__->belongs_to( - "job", - "App::Yath::Schema::Result::Job", - { job_id => "job_id" }, + "job_try", + "App::Yath::Schema::Result::JobTry", + { job_try_id => "job_try_id" }, { is_deferrable => 0, join_type => "LEFT", @@ -114,7 +100,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Resource.pm b/lib/App/Yath/Schema/PostgreSQL/Resource.pm index 71acbb5ae..8e4e3a16c 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Resource.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Resource.pm @@ -16,11 +16,12 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("resources"); __PACKAGE__->add_columns( + "event_uuid", + { data_type => "uuid", is_nullable => 0, size => 16 }, "resource_id", { data_type => "bigint", @@ -28,42 +29,36 @@ __PACKAGE__->add_columns( is_nullable => 0, sequence => "resources_resource_id_seq", }, - "event_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, "resource_type_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, "run_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, + "host_id", + { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, + "stamp", + { data_type => "timestamp", is_nullable => 0 }, + "resource_ord", + { data_type => "integer", is_nullable => 0 }, "data", { data_type => "jsonb", is_nullable => 0 }, - "line", - { data_type => "bigint", is_nullable => 0 }, ); __PACKAGE__->set_primary_key("resource_id"); -__PACKAGE__->add_unique_constraint("resources_event_id_key", ["event_id"]); +__PACKAGE__->add_unique_constraint( + "resources_run_id_resource_ord_key", + ["run_id", "resource_ord"], +); __PACKAGE__->belongs_to( - "event", - "App::Yath::Schema::Result::Event", - { event_id => "event_id" }, - { - is_deferrable => 0, - join_type => "LEFT", - on_delete => "SET NULL", - on_update => "NO ACTION", - }, + "host", + "App::Yath::Schema::Result::Host", + { host_id => "host_id" }, + { is_deferrable => 0, on_delete => "SET NULL", on_update => "NO ACTION" }, ); __PACKAGE__->belongs_to( "resource_type", - "App::Yath::Schema::Result::Resource", - { resource_id => "resource_type_id" }, + "App::Yath::Schema::Result::ResourceType", + { resource_type_id => "resource_type_id" }, { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, ); -__PACKAGE__->has_many( - "resources", - "App::Yath::Schema::Result::Resource", - { "foreign.resource_type_id" => "self.resource_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); __PACKAGE__->belongs_to( "run", "App::Yath::Schema::Result::Run", @@ -72,7 +67,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/ResourceType.pm b/lib/App/Yath/Schema/PostgreSQL/ResourceType.pm index 06a4cdfd2..88b101f57 100644 --- a/lib/App/Yath/Schema/PostgreSQL/ResourceType.pm +++ b/lib/App/Yath/Schema/PostgreSQL/ResourceType.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("resource_types"); @@ -29,13 +28,19 @@ __PACKAGE__->add_columns( sequence => "resource_types_resource_type_id_seq", }, "name", - { data_type => "text", is_nullable => 0 }, + { data_type => "varchar", is_nullable => 0, size => 512 }, ); __PACKAGE__->set_primary_key("resource_type_id"); __PACKAGE__->add_unique_constraint("resource_types_name_key", ["name"]); +__PACKAGE__->has_many( + "resources", + "App::Yath::Schema::Result::Resource", + { "foreign.resource_type_id" => "self.resource_type_id" }, + { cascade_copy => 0, cascade_delete => 1 }, +); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Run.pm b/lib/App/Yath/Schema/PostgreSQL/Run.pm index bc64515db..55d80bfca 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Run.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Run.pm @@ -16,11 +16,12 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("runs"); __PACKAGE__->add_columns( + "run_uuid", + { data_type => "uuid", is_nullable => 0, size => 16 }, "run_id", { data_type => "bigint", @@ -34,37 +35,37 @@ __PACKAGE__->add_columns( { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, "log_file_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, - "run_uuid", - { data_type => "uuid", is_nullable => 0, size => 16 }, + "sync_id", + { data_type => "integer", is_foreign_key => 1, is_nullable => 1 }, + "passed", + { data_type => "integer", is_nullable => 1 }, + "failed", + { data_type => "integer", is_nullable => 1 }, + "to_retry", + { data_type => "integer", is_nullable => 1 }, + "retried", + { data_type => "integer", is_nullable => 1 }, + "concurrency_j", + { data_type => "integer", is_nullable => 1 }, + "concurrency_x", + { data_type => "integer", is_nullable => 1 }, + "added", + { + data_type => "timestamp", + default_value => \"current_timestamp", + is_nullable => 0, + original => { default_value => \"now()" }, + }, "status", { data_type => "enum", default_value => "pending", extra => { - custom_type_name => "queue_status", + custom_type_name => "queue_stat", list => ["pending", "running", "complete", "broken", "canceled"], }, is_nullable => 0, }, - "worker_id", - { data_type => "text", is_nullable => 1 }, - "error", - { data_type => "text", is_nullable => 1 }, - "pinned", - { data_type => "boolean", default_value => \"false", is_nullable => 0 }, - "has_coverage", - { data_type => "boolean", default_value => \"false", is_nullable => 0 }, - "has_resources", - { data_type => "boolean", default_value => \"false", is_nullable => 0 }, - "duration", - { data_type => "text", is_nullable => 1 }, - "added", - { - data_type => "timestamp", - default_value => \"current_timestamp", - is_nullable => 0, - original => { default_value => \"now()" }, - }, "mode", { data_type => "enum", @@ -75,29 +76,32 @@ __PACKAGE__->add_columns( }, is_nullable => 0, }, - "buffer", + "canon", + { data_type => "boolean", is_nullable => 0 }, + "pinned", + { data_type => "boolean", default_value => \"false", is_nullable => 0 }, + "has_coverage", + { data_type => "boolean", is_nullable => 1 }, + "has_resources", + { data_type => "boolean", is_nullable => 1 }, + "parameters", + { data_type => "jsonb", is_nullable => 1 }, + "worker_id", + { data_type => "text", is_nullable => 1 }, + "error", + { data_type => "text", is_nullable => 1 }, + "duration", { - data_type => "enum", - default_value => "job", - extra => { - custom_type_name => "run_buffering", - list => ["none", "diag", "job", "run"], - }, - is_nullable => 0, + data_type => "numeric", + default_value => \"null", + is_nullable => 1, + size => [14, 4], }, - "passed", - { data_type => "integer", is_nullable => 1 }, - "failed", - { data_type => "integer", is_nullable => 1 }, - "retried", - { data_type => "integer", is_nullable => 1 }, - "concurrency", - { data_type => "integer", is_nullable => 1 }, ); __PACKAGE__->set_primary_key("run_id"); __PACKAGE__->add_unique_constraint("runs_run_uuid_key", ["run_uuid"]); __PACKAGE__->has_many( - "coverages", + "coverage", "App::Yath::Schema::Result::Coverage", { "foreign.run_id" => "self.run_id" }, { cascade_copy => 0, cascade_delete => 1 }, @@ -143,18 +147,23 @@ __PACKAGE__->has_many( { "foreign.run_id" => "self.run_id" }, { cascade_copy => 0, cascade_delete => 1 }, ); -__PACKAGE__->might_have( - "run_parameter", - "App::Yath::Schema::Result::RunParameter", - { "foreign.run_id" => "self.run_id" }, - { cascade_copy => 0, cascade_delete => 1 }, -); __PACKAGE__->has_many( "sweeps", "App::Yath::Schema::Result::Sweep", { "foreign.run_id" => "self.run_id" }, { cascade_copy => 0, cascade_delete => 1 }, ); +__PACKAGE__->belongs_to( + "sync", + "App::Yath::Schema::Result::Sync", + { sync_id => "sync_id" }, + { + is_deferrable => 0, + join_type => "LEFT", + on_delete => "SET NULL", + on_update => "NO ACTION", + }, +); __PACKAGE__->belongs_to( "user", "App::Yath::Schema::Result::User", @@ -163,7 +172,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/RunField.pm b/lib/App/Yath/Schema/PostgreSQL/RunField.pm index add3f37fb..064c64377 100644 --- a/lib/App/Yath/Schema/PostgreSQL/RunField.pm +++ b/lib/App/Yath/Schema/PostgreSQL/RunField.pm @@ -16,11 +16,12 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("run_fields"); __PACKAGE__->add_columns( + "event_uuid", + { data_type => "uuid", is_nullable => 0, size => 16 }, "run_field_id", { data_type => "bigint", @@ -30,8 +31,6 @@ __PACKAGE__->add_columns( }, "run_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "run_field_uuid", - { data_type => "uuid", is_nullable => 0, size => 16 }, "name", { data_type => "varchar", is_nullable => 0, size => 64 }, "data", @@ -44,7 +43,6 @@ __PACKAGE__->add_columns( { data_type => "text", is_nullable => 1 }, ); __PACKAGE__->set_primary_key("run_field_id"); -__PACKAGE__->add_unique_constraint("run_fields_run_field_uuid_key", ["run_field_uuid"]); __PACKAGE__->belongs_to( "run", "App::Yath::Schema::Result::Run", @@ -53,7 +51,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/RunParameter.pm b/lib/App/Yath/Schema/PostgreSQL/RunParameter.pm deleted file mode 100644 index f5146db36..000000000 --- a/lib/App/Yath/Schema/PostgreSQL/RunParameter.pm +++ /dev/null @@ -1,83 +0,0 @@ -use utf8; -package App::Yath::Schema::PostgreSQL::RunParameter; -our $VERSION = '2.000000'; - -package - App::Yath::Schema::Result::RunParameter; - -# Created by DBIx::Class::Schema::Loader -# DO NOT MODIFY ANY PART OF THIS FILE - -use strict; -use warnings; - -use parent 'App::Yath::Schema::ResultBase'; -__PACKAGE__->load_components( - "InflateColumn::DateTime", - "InflateColumn::Serializer", - "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", - "UUIDColumns", -); -__PACKAGE__->table("run_parameters"); -__PACKAGE__->add_columns( - "run_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "parameters", - { data_type => "jsonb", is_nullable => 1 }, -); -__PACKAGE__->set_primary_key("run_id"); -__PACKAGE__->belongs_to( - "run", - "App::Yath::Schema::Result::Run", - { run_id => "run_id" }, - { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, -); - - -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 -# DO NOT MODIFY ANY PART OF THIS FILE - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::PostgreSQL::RunParameter - Autogenerated result class for RunParameter in PostgreSQL. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/PostgreSQL/Session.pm b/lib/App/Yath/Schema/PostgreSQL/Session.pm index 4c2665de0..3a9642698 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Session.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Session.pm @@ -16,11 +16,12 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("sessions"); __PACKAGE__->add_columns( + "session_uuid", + { data_type => "uuid", is_nullable => 0, size => 16 }, "session_id", { data_type => "bigint", @@ -28,8 +29,6 @@ __PACKAGE__->add_columns( is_nullable => 0, sequence => "sessions_session_id_seq", }, - "session_uuid", - { data_type => "uuid", is_nullable => 0, size => 16 }, "active", { data_type => "boolean", default_value => \"true", is_nullable => 1 }, ); @@ -43,7 +42,7 @@ __PACKAGE__->has_many( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/SessionHost.pm b/lib/App/Yath/Schema/PostgreSQL/SessionHost.pm index 0bdf67892..dca57e83d 100644 --- a/lib/App/Yath/Schema/PostgreSQL/SessionHost.pm +++ b/lib/App/Yath/Schema/PostgreSQL/SessionHost.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("session_hosts"); @@ -75,7 +74,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/SourceFile.pm b/lib/App/Yath/Schema/PostgreSQL/SourceFile.pm index 9d73b25f8..7cf037e6c 100644 --- a/lib/App/Yath/Schema/PostgreSQL/SourceFile.pm +++ b/lib/App/Yath/Schema/PostgreSQL/SourceFile.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("source_files"); @@ -34,14 +33,14 @@ __PACKAGE__->add_columns( __PACKAGE__->set_primary_key("source_file_id"); __PACKAGE__->add_unique_constraint("source_files_filename_key", ["filename"]); __PACKAGE__->has_many( - "coverages", + "coverage", "App::Yath::Schema::Result::Coverage", { "foreign.source_file_id" => "self.source_file_id" }, { cascade_copy => 0, cascade_delete => 1 }, ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/SourceSub.pm b/lib/App/Yath/Schema/PostgreSQL/SourceSub.pm index 448fd67a6..4f0389ece 100644 --- a/lib/App/Yath/Schema/PostgreSQL/SourceSub.pm +++ b/lib/App/Yath/Schema/PostgreSQL/SourceSub.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("source_subs"); @@ -34,14 +33,14 @@ __PACKAGE__->add_columns( __PACKAGE__->set_primary_key("source_sub_id"); __PACKAGE__->add_unique_constraint("source_subs_subname_key", ["subname"]); __PACKAGE__->has_many( - "coverages", + "coverage", "App::Yath::Schema::Result::Coverage", { "foreign.source_sub_id" => "self.source_sub_id" }, { cascade_copy => 0, cascade_delete => 1 }, ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Sweep.pm b/lib/App/Yath/Schema/PostgreSQL/Sweep.pm index 095b35dbe..1afee8a49 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Sweep.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Sweep.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("sweeps"); @@ -43,7 +42,7 @@ __PACKAGE__->belongs_to( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Orphan.pm b/lib/App/Yath/Schema/PostgreSQL/Sync.pm similarity index 52% rename from lib/App/Yath/Schema/PostgreSQL/Orphan.pm rename to lib/App/Yath/Schema/PostgreSQL/Sync.pm index 158cfa595..0b251a30c 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Orphan.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Sync.pm @@ -1,9 +1,9 @@ use utf8; -package App::Yath::Schema::PostgreSQL::Orphan; +package App::Yath::Schema::PostgreSQL::Sync; our $VERSION = '2.000000'; package - App::Yath::Schema::Result::Orphan; + App::Yath::Schema::Result::Sync; # Created by DBIx::Class::Schema::Loader # DO NOT MODIFY ANY PART OF THIS FILE @@ -16,28 +16,37 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); -__PACKAGE__->table("orphans"); +__PACKAGE__->table("syncs"); __PACKAGE__->add_columns( - "event_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "data", - { data_type => "jsonb", is_nullable => 0 }, - "line", + "sync_id", + { + data_type => "integer", + is_auto_increment => 1, + is_nullable => 0, + sequence => "syncs_sync_id_seq", + }, + "last_run_id", { data_type => "bigint", is_nullable => 0 }, + "last_project_id", + { data_type => "bigint", is_nullable => 0 }, + "last_user_id", + { data_type => "bigint", is_nullable => 0 }, + "source", + { data_type => "varchar", is_nullable => 0, size => 64 }, ); -__PACKAGE__->set_primary_key("event_id"); -__PACKAGE__->belongs_to( - "event", - "App::Yath::Schema::Result::Event", - { event_id => "event_id" }, - { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, +__PACKAGE__->set_primary_key("sync_id"); +__PACKAGE__->add_unique_constraint("syncs_source_key", ["source"]); +__PACKAGE__->has_many( + "runs", + "App::Yath::Schema::Result::Run", + { "foreign.sync_id" => "self.sync_id" }, + { cascade_copy => 0, cascade_delete => 1 }, ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; @@ -50,7 +59,7 @@ __END__ =head1 NAME -App::Yath::Schema::PostgreSQL::Orphan - Autogenerated result class for Orphan in PostgreSQL. +App::Yath::Schema::PostgreSQL::Sync - Autogenerated result class for Sync in PostgreSQL. =head1 SOURCE diff --git a/lib/App/Yath/Schema/PostgreSQL/TestFile.pm b/lib/App/Yath/Schema/PostgreSQL/TestFile.pm index bb9b2368c..4e8c80acd 100644 --- a/lib/App/Yath/Schema/PostgreSQL/TestFile.pm +++ b/lib/App/Yath/Schema/PostgreSQL/TestFile.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("test_files"); @@ -34,7 +33,7 @@ __PACKAGE__->add_columns( __PACKAGE__->set_primary_key("test_file_id"); __PACKAGE__->add_unique_constraint("test_files_filename_key", ["filename"]); __PACKAGE__->has_many( - "coverages", + "coverage", "App::Yath::Schema::Result::Coverage", { "foreign.test_file_id" => "self.test_file_id" }, { cascade_copy => 0, cascade_delete => 1 }, @@ -53,7 +52,7 @@ __PACKAGE__->has_many( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/User.pm b/lib/App/Yath/Schema/PostgreSQL/User.pm index 81e9d8204..846afa526 100644 --- a/lib/App/Yath/Schema/PostgreSQL/User.pm +++ b/lib/App/Yath/Schema/PostgreSQL/User.pm @@ -16,7 +16,6 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); __PACKAGE__->table("users"); @@ -28,8 +27,6 @@ __PACKAGE__->add_columns( is_nullable => 0, sequence => "users_user_id_seq", }, - "username", - { data_type => "citext", is_nullable => 0 }, "pw_hash", { data_type => "varchar", @@ -44,8 +41,6 @@ __PACKAGE__->add_columns( is_nullable => 1, size => 22, }, - "realname", - { data_type => "text", is_nullable => 1 }, "role", { data_type => "enum", @@ -53,6 +48,10 @@ __PACKAGE__->add_columns( extra => { custom_type_name => "user_type", list => ["admin", "user"] }, is_nullable => 0, }, + "username", + { data_type => "citext", is_nullable => 0 }, + "realname", + { data_type => "text", is_nullable => 1 }, ); __PACKAGE__->set_primary_key("user_id"); __PACKAGE__->add_unique_constraint("users_username_key", ["username"]); @@ -106,7 +105,7 @@ __PACKAGE__->has_many( ); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; diff --git a/lib/App/Yath/Schema/PostgreSQL/Facet.pm b/lib/App/Yath/Schema/PostgreSQL/Version.pm similarity index 54% rename from lib/App/Yath/Schema/PostgreSQL/Facet.pm rename to lib/App/Yath/Schema/PostgreSQL/Version.pm index 4dfa064b1..22db3a03c 100644 --- a/lib/App/Yath/Schema/PostgreSQL/Facet.pm +++ b/lib/App/Yath/Schema/PostgreSQL/Version.pm @@ -1,9 +1,9 @@ use utf8; -package App::Yath::Schema::PostgreSQL::Facet; +package App::Yath::Schema::PostgreSQL::Version; our $VERSION = '2.000000'; package - App::Yath::Schema::Result::Facet; + App::Yath::Schema::Result::Version; # Created by DBIx::Class::Schema::Loader # DO NOT MODIFY ANY PART OF THIS FILE @@ -16,28 +16,32 @@ __PACKAGE__->load_components( "InflateColumn::DateTime", "InflateColumn::Serializer", "InflateColumn::Serializer::JSON", - "Tree::AdjacencyList", "UUIDColumns", ); -__PACKAGE__->table("facets"); +__PACKAGE__->table("versions"); __PACKAGE__->add_columns( - "event_id", - { data_type => "bigint", is_foreign_key => 1, is_nullable => 0 }, - "data", - { data_type => "jsonb", is_nullable => 0 }, - "line", - { data_type => "bigint", is_nullable => 0 }, -); -__PACKAGE__->set_primary_key("event_id"); -__PACKAGE__->belongs_to( - "event", - "App::Yath::Schema::Result::Event", - { event_id => "event_id" }, - { is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" }, + "version", + { data_type => "numeric", is_nullable => 0, size => [10, 6] }, + "version_id", + { + data_type => "integer", + is_auto_increment => 1, + is_nullable => 0, + sequence => "versions_version_id_seq", + }, + "updated", + { + data_type => "timestamp", + default_value => \"current_timestamp", + is_nullable => 0, + original => { default_value => \"now()" }, + }, ); +__PACKAGE__->set_primary_key("version_id"); +__PACKAGE__->add_unique_constraint("versions_version_key", ["version"]); -# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-21 17:11:11 +# Created by DBIx::Class::Schema::Loader v0.07052 @ 2024-05-29 14:47:42 # DO NOT MODIFY ANY PART OF THIS FILE 1; @@ -50,7 +54,7 @@ __END__ =head1 NAME -App::Yath::Schema::PostgreSQL::Facet - Autogenerated result class for Facet in PostgreSQL. +App::Yath::Schema::PostgreSQL::Version - Autogenerated result class for Version in PostgreSQL. =head1 SOURCE diff --git a/lib/App/Yath/Schema/Queries.pm b/lib/App/Yath/Schema/Queries.pm index d82d386e2..9c0643188 100644 --- a/lib/App/Yath/Schema/Queries.pm +++ b/lib/App/Yath/Schema/Queries.pm @@ -8,7 +8,7 @@ use Carp qw/croak/; use Test2::Harness::Util::HashBase qw/{+CONFIG}->schema; my $project = $schema->resultset('Project')->find({name => $project_name}) or return []; - my $sth = $dbh->prepare("SELECT distinct($field) FROM runs WHERE project_idx = ? ORDER BY $field ASC"); - $sth->execute(uuid_deflate($project->project_idx)) or die $sth->errstr; + my $sth = $dbh->prepare("SELECT distinct($field) FROM runs WHERE project_id = ? ORDER BY $field ASC"); + $sth->execute($project->project_id) or die $sth->errstr; my $rows = $sth->fetchall_arrayref; return [map { $_->[0] } @$rows]; } diff --git a/lib/App/Yath/Schema/Result/JobField.pm b/lib/App/Yath/Schema/Result/JobField.pm deleted file mode 100644 index b764b3877..000000000 --- a/lib/App/Yath/Schema/Result/JobField.pm +++ /dev/null @@ -1,60 +0,0 @@ -package App::Yath::Schema::Result::JobField; -use utf8; -use strict; -use warnings; - -our $VERSION = '2.000000'; - -# DO NOT MODIFY THIS FILE, GENERATED BY author_tools/regen_schema.pl - - -use Carp qw/confess/; -confess "You must first load a App::Yath::Schema::NAME module" - unless $App::Yath::Schema::LOADED; - -require "App/Yath/Schema/${App::Yath::Schema::LOADED}/JobField.pm"; -require "App/Yath/Schema/Overlay/JobField.pm"; - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::Result::JobField - Autogenerated result class for JobField. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/Result/JobParameter.pm b/lib/App/Yath/Schema/Result/JobParameter.pm deleted file mode 100644 index a4468a51e..000000000 --- a/lib/App/Yath/Schema/Result/JobParameter.pm +++ /dev/null @@ -1,60 +0,0 @@ -package App::Yath::Schema::Result::JobParameter; -use utf8; -use strict; -use warnings; - -our $VERSION = '2.000000'; - -# DO NOT MODIFY THIS FILE, GENERATED BY author_tools/regen_schema.pl - - -use Carp qw/confess/; -confess "You must first load a App::Yath::Schema::NAME module" - unless $App::Yath::Schema::LOADED; - -require "App/Yath/Schema/${App::Yath::Schema::LOADED}/JobParameter.pm"; -require "App/Yath/Schema/Overlay/JobParameter.pm"; - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::Result::JobParameter - Autogenerated result class for JobParameter. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/Result/Render.pm b/lib/App/Yath/Schema/Result/JobTry.pm similarity index 78% rename from lib/App/Yath/Schema/Result/Render.pm rename to lib/App/Yath/Schema/Result/JobTry.pm index 166184812..6e3309a4e 100644 --- a/lib/App/Yath/Schema/Result/Render.pm +++ b/lib/App/Yath/Schema/Result/JobTry.pm @@ -1,4 +1,4 @@ -package App::Yath::Schema::Result::Render; +package App::Yath::Schema::Result::JobTry; use utf8; use strict; use warnings; @@ -12,8 +12,8 @@ use Carp qw/confess/; confess "You must first load a App::Yath::Schema::NAME module" unless $App::Yath::Schema::LOADED; -require "App/Yath/Schema/${App::Yath::Schema::LOADED}/Render.pm"; -require "App/Yath/Schema/Overlay/Render.pm"; +require "App/Yath/Schema/${App::Yath::Schema::LOADED}/JobTry.pm"; +require "App/Yath/Schema/Overlay/JobTry.pm"; 1; @@ -25,7 +25,7 @@ __END__ =head1 NAME -App::Yath::Schema::Result::Render - Autogenerated result class for Render. +App::Yath::Schema::Result::JobTry - Autogenerated result class for JobTry. =head1 SOURCE diff --git a/lib/App/Yath/Schema/Result/JobOutput.pm b/lib/App/Yath/Schema/Result/JobTryField.pm similarity index 76% rename from lib/App/Yath/Schema/Result/JobOutput.pm rename to lib/App/Yath/Schema/Result/JobTryField.pm index 293bad2a9..34196e995 100644 --- a/lib/App/Yath/Schema/Result/JobOutput.pm +++ b/lib/App/Yath/Schema/Result/JobTryField.pm @@ -1,4 +1,4 @@ -package App::Yath::Schema::Result::JobOutput; +package App::Yath::Schema::Result::JobTryField; use utf8; use strict; use warnings; @@ -12,8 +12,8 @@ use Carp qw/confess/; confess "You must first load a App::Yath::Schema::NAME module" unless $App::Yath::Schema::LOADED; -require "App/Yath/Schema/${App::Yath::Schema::LOADED}/JobOutput.pm"; -require "App/Yath/Schema/Overlay/JobOutput.pm"; +require "App/Yath/Schema/${App::Yath::Schema::LOADED}/JobTryField.pm"; +require "App/Yath/Schema/Overlay/JobTryField.pm"; 1; @@ -25,7 +25,7 @@ __END__ =head1 NAME -App::Yath::Schema::Result::JobOutput - Autogenerated result class for JobOutput. +App::Yath::Schema::Result::JobTryField - Autogenerated result class for JobTryField. =head1 SOURCE diff --git a/lib/App/Yath/Schema/Result/RunParameter.pm b/lib/App/Yath/Schema/Result/RunParameter.pm deleted file mode 100644 index 4258cdc07..000000000 --- a/lib/App/Yath/Schema/Result/RunParameter.pm +++ /dev/null @@ -1,60 +0,0 @@ -package App::Yath::Schema::Result::RunParameter; -use utf8; -use strict; -use warnings; - -our $VERSION = '2.000000'; - -# DO NOT MODIFY THIS FILE, GENERATED BY author_tools/regen_schema.pl - - -use Carp qw/confess/; -confess "You must first load a App::Yath::Schema::NAME module" - unless $App::Yath::Schema::LOADED; - -require "App/Yath/Schema/${App::Yath::Schema::LOADED}/RunParameter.pm"; -require "App/Yath/Schema/Overlay/RunParameter.pm"; - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::Result::RunParameter - Autogenerated result class for RunParameter. - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/lib/App/Yath/Schema/Result/Facet.pm b/lib/App/Yath/Schema/Result/Sync.pm similarity index 78% rename from lib/App/Yath/Schema/Result/Facet.pm rename to lib/App/Yath/Schema/Result/Sync.pm index 2dab475d9..a126bc8f7 100644 --- a/lib/App/Yath/Schema/Result/Facet.pm +++ b/lib/App/Yath/Schema/Result/Sync.pm @@ -1,4 +1,4 @@ -package App::Yath::Schema::Result::Facet; +package App::Yath::Schema::Result::Sync; use utf8; use strict; use warnings; @@ -12,8 +12,8 @@ use Carp qw/confess/; confess "You must first load a App::Yath::Schema::NAME module" unless $App::Yath::Schema::LOADED; -require "App/Yath/Schema/${App::Yath::Schema::LOADED}/Facet.pm"; -require "App/Yath/Schema/Overlay/Facet.pm"; +require "App/Yath/Schema/${App::Yath::Schema::LOADED}/Sync.pm"; +require "App/Yath/Schema/Overlay/Sync.pm"; 1; @@ -25,7 +25,7 @@ __END__ =head1 NAME -App::Yath::Schema::Result::Facet - Autogenerated result class for Facet. +App::Yath::Schema::Result::Sync - Autogenerated result class for Sync. =head1 SOURCE diff --git a/lib/App/Yath/Schema/Result/Orphan.pm b/lib/App/Yath/Schema/Result/Version.pm similarity index 77% rename from lib/App/Yath/Schema/Result/Orphan.pm rename to lib/App/Yath/Schema/Result/Version.pm index 7e47d49cd..4db5a7109 100644 --- a/lib/App/Yath/Schema/Result/Orphan.pm +++ b/lib/App/Yath/Schema/Result/Version.pm @@ -1,4 +1,4 @@ -package App::Yath::Schema::Result::Orphan; +package App::Yath::Schema::Result::Version; use utf8; use strict; use warnings; @@ -12,8 +12,8 @@ use Carp qw/confess/; confess "You must first load a App::Yath::Schema::NAME module" unless $App::Yath::Schema::LOADED; -require "App/Yath/Schema/${App::Yath::Schema::LOADED}/Orphan.pm"; -require "App/Yath/Schema/Overlay/Orphan.pm"; +require "App/Yath/Schema/${App::Yath::Schema::LOADED}/Version.pm"; +require "App/Yath/Schema/Overlay/Version.pm"; 1; @@ -25,7 +25,7 @@ __END__ =head1 NAME -App::Yath::Schema::Result::Orphan - Autogenerated result class for Orphan. +App::Yath::Schema::Result::Version - Autogenerated result class for Version. =head1 SOURCE diff --git a/lib/App/Yath/Schema/ResultSet.pm b/lib/App/Yath/Schema/ResultSet.pm index 338b2d0de..8df7d8c41 100644 --- a/lib/App/Yath/Schema/ResultSet.pm +++ b/lib/App/Yath/Schema/ResultSet.pm @@ -6,8 +6,46 @@ our $VERSION = '2.000000'; use parent 'DBIx::Class::ResultSet'; +use Carp qw/croak/; +use Test2::Harness::Util::UUID qw/looks_like_uuid/; + __PACKAGE__->load_components('Helper::ResultSet::RemoveColumns'); +sub find_by_id_or_uuid { + my $self = shift; + my ($id, $query, $attrs) = @_; + + $query //= {}; + $attrs //= {}; + + my $rs = $self->result_source; + + my ($pcol, @extra) = $rs->primary_columns; + croak "find_by_id_or_uuid() cannot be used on this class as it has more than one primary key column" if @extra; + + my $ucol; + if ($pcol =~ m/^(.+)_id$/) { + $ucol = "${1}_uuid"; + croak "find_by_id_or_uuid() cannot be used on this class as it ha sno '$ucol' column" + unless $rs->has_column($ucol); + } + else { + croak "Not sure how to turn '$pcol' into the uuid column"; + } + + if (looks_like_uuid($id)) { + $query->{$ucol} = $id; + } + elsif ($id =~ m/^\d+$/) { + $query->{$pcol} = $id; + } + else { + croak "'$id' does not look like either a numeric ID or a UUID"; + } + + return $self->find($query, $attrs); +} + 1; __END__ diff --git a/lib/App/Yath/Schema/RunProcessor.pm b/lib/App/Yath/Schema/RunProcessor.pm index b828eb60b..beb490e28 100644 --- a/lib/App/Yath/Schema/RunProcessor.pm +++ b/lib/App/Yath/Schema/RunProcessor.pm @@ -1,6 +1,7 @@ package App::Yath::Schema::RunProcessor; use strict; use warnings; +use utf8; our $VERSION = '2.000000'; @@ -10,7 +11,7 @@ use Data::Dumper; use List::Util qw/first min max/; use Time::HiRes qw/time sleep/; use MIME::Base64 qw/decode_base64/; -use Sys::Hostname qw/hostname/; +use Scalar::Util qw/weaken/; use Clone qw/clone/; use Carp qw/croak confess/; @@ -20,8 +21,8 @@ use Test2::Util::Facets2Legacy qw/causes_fail/; use App::Yath::Schema::Config; use App::Yath::Schema::Util qw/format_duration is_invalid_subtest_name schema_config_from_settings/; -use App::Yath::Schema::UUID qw/gen_uuid uuid_inflate uuid_deflate uuid_mass_deflate/; -use Test2::Harness::Util::JSON qw/encode_json decode_json/; +use Test2::Harness::Util::UUID qw/gen_uuid/; +use Test2::Harness::Util::JSON qw/encode_ascii_json decode_json/; use App::Yath::Schema::ImportModes qw{ %MODES @@ -34,60 +35,91 @@ use App::Yath::Schema::ImportModes qw{ use Test2::Harness::Util::HashBase qw{ 'ABOUT', - 'ARRAY' => 'ARRAY', - 'BRIEF' => 'BRIEF', - 'CONTROL' => 'CONTROL', - 'CRITICAL' => 'CRITICAL', - 'DEBUG' => 'DEBUG', - 'DIAG' => 'DIAG', - 'ENCODING' => 'ENCODING', - 'ERROR' => 'ERROR', - 'FACETS' => 'FACETS', - 'FAIL' => 'FAIL', - 'FAILED' => 'FAILED', - 'FATAL' => 'FATAL', - 'HALT' => 'HALT', - 'HARNESS' => 'HARNESS', - 'KILL' => 'KILL', - 'NO PLAN' => 'NO PLAN', - 'PASS' => 'PASS', - 'PASSED' => 'PASSED', - 'PLAN' => 'PLAN', - 'REASON' => 'REASON', - 'SHOW' => 'SHOW', - 'SKIP ALL' => 'SKIP ALL', - 'SKIPPED' => 'SKIPPED', - 'STDERR' => 'STDERR', - 'TAGS' => 'TAGS', - 'TIMEOUT' => 'TIMEOUT', - 'VERSION' => 'VERSION', - 'WARN' => 'WARN', - 'WARNING' => 'WARNING', -); +sub schema { $_[0]->{+CONFIG}->schema } + +sub init { + my $self = shift; + + croak "'config' is a required attribute" + unless $self->{+CONFIG}; + + $self->{+DISCONNECT_RETRY} //= 15; + + my $run; + if ($run = $self->{+RUN}) { + $self->{+RUN_ID} = $run->run_id; + $self->{+MODE} = $MODES{$run->mode}; + + $self->retry_on_disconnect("update status for run '$self->{+RUN_ID}'" => sub { $run->update({status => 'pending'}) }); + } + else { + my $run_uuid = $self->{+RUN_UUID} // croak "either 'run' or 'run_uuid' must be provided"; + my $mode = $self->{+MODE} // croak "'mode' is a required attribute unless 'run' is specified"; + $self->{+MODE} = $MODES{$mode} // croak "Invalid mode '$mode'"; + + my $schema = $self->schema; + my $run = $schema->resultset('Run')->create({ + run_uuid => $run_uuid, + user_id => $self->user_id, + project_id => $self->project_id, + mode => $mode, + status => 'pending', + }); + + $self->{+RUN} = $run; + } + + $run->discard_changes; + + $self->{+PROJECT_ID} //= $run->project_id; + + confess "No project id?!?" unless $self->{+PROJECT_ID}; + + $self->{+ID_CACHE} = {}; + + $self->{+CLONE_FACETS} //= 1; + + $self->{+RESOURCE_ORD} //= 1; + + $self->{+COVERAGE} = []; + $self->{+RESOURCES} = []; + $self->{+REPORTING} = []; + $self->{+RUN_FIELDS} = []; + $self->{+TRY_FIELDS} = []; +} sub process_stdin { my $class = shift; @@ -96,7 +128,6 @@ sub process_stdin { return $class->process_handle(\*STDIN, $settings); } - sub process_handle { my $class = shift; my ($fh, $settings) = @_; @@ -113,6 +144,7 @@ sub process_lines { my ($settings, %params) = @_; my $done = 0; + my $idx = 1; my ($next, $last, $run); return sub { my $line = shift; @@ -124,10 +156,10 @@ sub process_lines { $last->(); } elsif ($next) { - $next->($line); + $next->($line, $idx++); } else { - ($next, $last, $run) = $class->_process_first_line($line, $settings, %params); + ($next, $last, $run) = $class->_process_first_line($line, $idx++, $settings, %params); } return $run; @@ -136,7 +168,7 @@ sub process_lines { sub _process_first_line { my $class = shift; - my ($line, $settings, %params) = @_; + my ($line, $idx, $settings, %params) = @_; my $run; my $config = schema_config_from_settings($settings); @@ -151,41 +183,54 @@ sub _process_first_line { my $f = $e->{facet_data}; my $self; - my $run_id; + my ($run_id, $run_uuid); if (my $runf = $f->{harness_run}) { - $run_id = uuid_inflate($runf->{run_id}) or die "No run-id?"; + $run_uuid = $runf->{run_id} or die "No run-uuid?"; my $pub = $settings->group('publish') or die "No publish settings"; + # Legacy logs + $runf->{settings} //= delete $f->{harness_settings}; + my $proj = $runf->{settings}->{yath}->{project} || $params{project} || $settings->yath->project or die "Project name could not be determined"; my $user = $settings->yath->user // $ENV{USER}; my $p = $config->schema->resultset('Project')->find_or_create({name => $proj}); my $u = $config->schema->resultset('User')->find_or_create({username => $user, role => 'user'}); - if (my $old = $config->schema->resultset('Run')->find({run_id => $run_id})) { - die "Run with id '$run_id' is already published. Use --publish-force to override it." unless $settings->publish->force; + if (my $old = $config->schema->resultset('Run')->find({run_uuid => $run_uuid})) { + die "Run with uuid '$run_uuid' is already published. Use --publish-force to override it." unless $settings->publish->force; $old->delete; } $run = $config->schema->resultset('Run')->create({ - run_id => $run_id, + run_uuid => $run_uuid, + canon => 1, mode => $pub->mode, - buffer => $pub->buffering, status => 'pending', - user_idx => $u->user_idx, - project_idx => $p->project_idx, + user_id => $u->user_id, + project_id => $p->project_id, }); + $run_id = $run->run_id; + $self = $class->new( - settings => $settings, - config => $config, - run => $run, - interval => $pub->flush_interval, + settings => $settings, + config => $config, + run => $run, + run_id => $run_id, + run_uuid => $run_uuid, + interval => $pub->flush_interval, + buffer_size => $pub->buffer_size, + user => $u, + user_id => $u->user_id, + project => $p, + project_id => $p->project_id, + clone_facets => 0, ); $self->start(); - $self->process_event($e, $f); + $self->process_event($e, $f, $idx); } else { die "First event did not contain run data"; @@ -206,14 +251,15 @@ sub _process_first_line { $SIG{TERM} = sub { $self->set_signal('TERM'); die "Caught Signal 'TERM'\n"; }; my @errors; + $self->{+ERRORS} = \@errors; return ( sub { - my $line = shift; + my ($line, $idx) = @_; return if eval { my $e = decode_json($line); - $self->process_event($e); + $self->process_event($e, undef, $idx); 1; }; my $err = $@; @@ -236,7 +282,7 @@ sub _process_first_line { sub retry_on_disconnect { my $self = shift; - my ($description, $callback) = @_; + my ($description, $callback, $on_exception) = @_; my ($attempt, $err); for my $i (0 .. ($self->{+DISCONNECT_RETRY} - 1)) { @@ -255,7 +301,9 @@ sub retry_on_disconnect { $self->schema->storage->ensure_connected(); } - die qq{Failed "$description" (attempt $attempt)\n$err\n}; + $on_exception->() if $on_exception; + print STDERR qq{Failed "$description" (attempt $attempt)\n$err\n}; + exit(0); } sub populate { @@ -264,10 +312,13 @@ sub populate { return unless $data && @$data; + local $ENV{DBIC_DT_SEARCH_OK} = 1; + $self->retry_on_disconnect( "Populate '$type'", sub { no warnings 'once'; + local $Data::Dumper::Sortkeys = 1; local $Data::Dumper::Freezer = 'T2HarnessFREEZE'; local *DateTime::T2HarnessFREEZE = sub { my $x = $_[0]->ymd . " " . $_[0]->hms; $_[0] = \$x }; my $rs = $self->schema->resultset($type); @@ -279,7 +330,6 @@ sub populate { warn "\nDuplicate found:\n====\n$err\n====\n\nPopulating '$type' 1 at a time.\n"; for my $item (@$data) { - uuid_mass_deflate($item); next if eval { $rs->create($item); 1 }; my $err = $@; @@ -296,362 +346,108 @@ sub populate { } sub format_stamp { - my $self = shift; + my $self = shift; my $stamp = shift; return undef unless $stamp; unless (ref($stamp)) { - $self->{+FIRST_STAMP} = $self->{+FIRST_STAMP} ? min($self->{+FIRST_STAMP}, $stamp) : $stamp; - $self->{+LAST_STAMP} = $self->{+LAST_STAMP} ? max($self->{+LAST_STAMP}, $stamp) : $stamp; + my $recalc = 0; + if (!$self->{+FIRST_STAMP} || $self->{+FIRST_STAMP} > $stamp) { + $self->{+FIRST_STAMP} = $stamp; + $recalc = 1; + } + + if (!$self->{+LAST_STAMP} || $self->{+LAST_STAMP} < $stamp) { + $self->{+LAST_STAMP} = $stamp; + $recalc = 1; + } + + $self->{+DURATION} = $self->{+LAST_STAMP} - $self->{+FIRST_STAMP} if $recalc; } return DateTime->from_epoch(epoch => $stamp, time_zone => 'local'); } -sub schema { $_[0]->{+CONFIG}->schema } - -sub init { +sub job0_uuid { my $self = shift; - - croak "'config' is a required attribute" - unless $self->{+CONFIG}; - - $self->{+DISCONNECT_RETRY} //= 15; - - my $run; - if ($run = $self->{+RUN}) { - $self->{+RUN_ID} = $run->run_id; - $self->{+MODE} = $MODES{$run->mode}; - - $self->retry_on_disconnect("update status for run '$self->{+RUN_ID}'" => sub { $run->update({status => 'pending'}) }); - } - else { - my $run_id = $self->{+RUN_ID} // croak "either 'run' or 'run_id' must be provided"; - my $mode = $self->{+MODE} // croak "'mode' is a required attribute unless 'run' is specified"; - $self->{+MODE} = $MODES{$mode} // croak "Invalid mode '$mode'"; - - my $schema = $self->schema; - my $run = $schema->resultset('Run')->create({ - run_id => $run_id, - user_idx => $self->user_idx, - project_idx => $self->project_idx, - mode => $mode, - status => 'pending', - }); - - $self->{+RUN} = $run; - } - - $run->discard_changes; - - $self->{+PROJECT_IDX} //= $run->project_idx; - - confess "No project idx?!?" unless $self->{+PROJECT_IDX}; - - $self->{+RUN_ID} = uuid_inflate($self->{+RUN_ID}); - $self->{+USER_IDX} = uuid_inflate($self->{+USER_IDX}); - $self->{+PROJECT_IDX} = uuid_inflate($self->{+PROJECT_IDX}); - - $self->{+ID_CACHE} = {}; - $self->{+COVERAGE} = []; - - $self->{+PASSED} = 0; - $self->{+FAILED} = 0; - - $self->{+JOB0_ID} = gen_uuid(); + return $self->{+JOB0_UUID} //= $self->job0->{job_uuid}; } -sub flush_all { +sub job0_id { my $self = shift; - - my $all = $self->{+JOBS}; - for my $jobs (values %$all) { - for my $job (values %$jobs) { - $job->{done} = 'end'; - $self->flush(job => $job); - } - } - - $self->flush_events(); - $self->flush_reporting(); + return $self->{+JOB0_ID} //= $self->job0->{job_id}; } -sub flush { +sub job0_try { my $self = shift; - my %params = @_; - - my $job = $params{job} or croak "job is required"; - my $res = $job->{result}; - - my $bmode = $self->run->buffer; - my $int = $self->{+INTERVAL}; - - # Always update if needed - $self->retry_on_disconnect("update run" => sub { $self->run->insert_or_update() }); - - my $flush = $params{force} ? 'force' : 0; - $flush ||= 'always' if $bmode eq 'none'; - $flush ||= 'diag' if $bmode eq 'diag' && $res->fail && $params{is_diag}; - $flush ||= 'job' if $job->{done}; - $flush ||= 'status' if $res->is_column_changed('status'); - $flush ||= 'fail' if $res->is_column_changed('fail'); - - if ($int && !$flush) { - my $last = $self->{+LAST_FLUSH}; - $flush = 'interval' if !$last || $int < time - $last; - } - - return "" unless $flush; - $self->{+LAST_FLUSH} = time; - - $self->retry_on_disconnect("update job result" => sub { $res->update() }); - - $self->flush_events(); - $self->flush_reporting(); - - if (my $done = $job->{done}) { - # Last time we need to write this, so clear it. - delete $self->{+JOBS}->{$job->{job_id}}->{$job->{job_try}}; - - unless ($res->status eq 'complete') { - my $status = $self->{+SIGNAL} ? 'canceled' : 'broken'; - $status = 'canceled' if $done eq 'end'; - $res->status($status); - } - - # Normalize the fail/pass - my $fail = $res->fail ? 1 : 0; - $res->fail($fail); - - $res->normalize_to_mode(mode => $self->{+MODE}); - } - - $self->retry_on_disconnect("update job result" => sub { $res->update() }); - - return $flush; + return $self->{+JOB0_TRY} //= $self->get_job_try($self->job0, 0); } -sub flush_events { +sub job0 { my $self = shift; - - return if mode_check($self->{+MODE}, 'summary'); - - my @write; - - my $jobs = $self->{+JOBS}; - for my $tries (values %$jobs) { - for my $job (values %$tries) { - my $events = $job->{events}; - my $deferred = $job->{deffered_events} //= []; - - if (record_all_events(mode => $self->{+MODE}, job => $job->{result})) { - push @write => (@$deferred, @$events); - @$deferred = (); - } - else { - for my $event (@$events) { - if (event_in_mode(event => $event, record_all_event => 0, mode => $self->{+MODE}, job => $job->{result})) { - push @write => $event; - } - else { - push @$deferred => $event; - } - } - } - - @$events = (); - } - } - - return unless @write; - - my @dups; - my @write_bin; - my @write_facets; - my @write_orphans; - my @write_render; - - for my $e (@write) { - if (my $bins = delete $e->{binaries}) { - push @write_bin => @$bins; - } - - my $rendered = 0; - for my $t (qw/facets orphan/) { # Order matters - my $l = "${t}_line"; - my $list = $t eq 'facets' ? \@write_facets : \@write_orphans; - - if (my $data = delete $e->{$t}) { - $e->{"has_$t"} = 1; - my $line = delete $e->{$l}; - push @$list => { - event_id => $e->{event_id}, - line => $line, - data => $data, - }; - - # Prefer rendering from facets, but if we only have an orphan render that - next if $rendered; - - my $facets = decode_json($data); - - $e->{is_assert} = $facets->{assert} ? 1 : 0; - - my $lines = App::Yath::Renderer::Default::Composer->render_super_verbose($facets) or next; - next unless @$lines; - - $rendered = 1; - - push @write_render => map { - my ($tag, $other_tag); - unless ($tag = $DB_TAGS{$_->[1]}) { - $tag = 'other'; - $other_tag = $_->[1]; - } - - { - event_id => $e->{event_id}, - job_key => $e->{job_key}, - - facet => $_->[0], - tag => $tag, - other_tag => $other_tag, - message => $_->[2] // "", - data => $_->[3] ? encode_json($_->[3]) : undef, - } - } @$lines; - } - else { - $e->{"has_$t"} = 0; - delete $e->{$l}; - } - } - } - - local $ENV{DBIC_DT_SEARCH_OK} = 1; - $self->populate(Event => \@write); - $self->populate(Render => \@write_render); - $self->populate(Facet => \@write_facets); - $self->populate(Orphan => \@write_orphans); - $self->populate(Binary => \@write_bin); + return $self->{+JOB0} //= $self->get_job($self->{+JOB0_UUID} //= gen_uuid()); } -sub flush_reporting { +sub user { my $self = shift; - return; - - my @write; - - my %mixin_run = ( - user_idx => $self->user_idx, - run_id => $self->{+RUN_ID}, - project_idx => $self->{+PROJECT_IDX}, - ); - - my $jobs = $self->{+JOBS}; - for my $tries (values %$jobs) { - for my $job (values %$tries) { - my $strip_event_id = 0; - - $strip_event_id = 1 unless record_subtest_events( - job => $job->{result}, - fail => $job->{result}->fail, - mode => $self->{+MODE}, - - is_harness_out => 0, - ); - - my %mixin = ( - %mixin_run, - job_try => $job->{job_try} // 0, - job_key => $job->{job_key}, - test_file_idx => $job->{result}->test_file_idx, - ); - - if (my $duration = $job->{duration}) { - my $fail = $job->{result}->fail // 0; - my $pass = $fail ? 0 : 1; - my $retry = $job->{result}->retry // 0; - my $abort = (defined($fail) || defined($retry)) ? 0 : 1; - - push @write => { - duration => $duration, - pass => $pass, - fail => $fail, - abort => $abort, - retry => $retry, - %mixin, - }; - } - - my $reporting = delete $job->{reporting}; - - for my $rep (@$reporting) { - next unless defined $rep->{duration}; - next unless defined $rep->{subtest}; - - delete $rep->{event_id} if $strip_event_id; + return $self->{+USER} if $self->{+USER}; + return $self->{+USER} = $self->{+RUN}->user if $self->{+RUN}; - %$rep = ( - %mixin, - %$rep, - ); + my $schema = $self->schema; - push @write => $rep; - } - } + if (my $user_id = $self->{+USER_ID}) { + my $user = $schema->resultset('User')->find({user_id => $user_id}) or confess "Invalid user id: $user_id"; + return $self->{+USER} = $user; } - return unless @write; - - local $ENV{DBIC_DT_SEARCH_OK} = 1; + if (my $username = $self->{+USER_NAME}) { + my $user = $schema->resultset('User')->find({username => $username}) or confess "Invalid user name: $username"; + return $self->{+USER} = $user; + } - $self->populate(Reporting => \@write); + confess "No user, user_name, or user_id specified"; } -sub user { +sub user_id { my $self = shift; - - return $self->{+RUN}->user if $self->{+RUN}; - return $self->{+USER} if $self->{+USER}; - - my $user_idx = $self->{+USER_IDX} // confess "No user or user_idx specified"; - - my $schema = $self->schema; - my $user = $schema->resultset('User')->find({user_idx => $user_idx}); - return $user if $user; - confess "Invalid user_idx: $user_idx"; + return $self->{+USER_ID} //= $self->user->user_id; } -sub user_idx { +sub user_name { my $self = shift; - - return $self->{+RUN}->user_idx if $self->{+RUN}; - return $self->{+USER}->user_idx if $self->{+USER}; - return $self->{+USER_IDX} if $self->{+USER_IDX}; + return $self->{+USER_NAME} //= $self->user->username; } sub project { my $self = shift; - return $self->{+RUN}->project if $self->{+RUN}; return $self->{+PROJECT} if $self->{+PROJECT}; - - my $project_idx = $self->{+PROJECT_IDX} // confess "No project or project_idx specified"; + return $self->{+PROJECT} = $self->{+RUN}->project if $self->{+RUN}; my $schema = $self->schema; - my $project = $schema->resultset('Project')->find({project_idx => $project_idx}); - return $project if $project; - confess "Invalid project_idx: $project_idx"; + + if (my $project_id = $self->{+PROJECT_ID}) { + my $project = $schema->resultset('Project')->find({project_id => $project_id}) or confess "Invalid project id: $project_id"; + return $self->{+PROJECT} = $project; + } + + if (my $name = $self->{+PROJECT_NAME}) { + my $project = $schema->resultset('Project')->find({projectname => $name}) or confess "Invalid project name: $name"; + return $self->{+PROJECT} = $project; + } + + confess "No project, project_name, or project_id specified"; } -sub project_idx { +sub project_id { my $self = shift; + return $self->{+PROJECT_ID} //= $self->project->project_id; +} - return $self->{+RUN}->project_idx if $self->{+RUN}; - return $self->{+PROJECT}->project_idx if $self->{+PROJECT}; - return $self->{+PROJECT_IDX} if $self->{+PROJECT_IDX}; +sub project_name { + my $self = shift; + return $self->{+PROJECT_NAME} //= $self->project->name; } sub start { @@ -665,347 +461,450 @@ sub start { sub get_job { my $self = shift; - my (%params) = @_; + my ($job_uuid, %params) = @_; my $is_harness_out = 0; - my $job_id = $params{job_id}; - if (!$job_id || $job_id eq '0') { - $job_id = $self->{+JOB0_ID}; + my $test_file_id; + + if (!$job_uuid || $job_uuid eq '0' || $job_uuid eq $self->{+JOB0_UUID}) { + $job_uuid = $self->job0_uuid; $is_harness_out = 1; + $test_file_id = $self->get_test_file_id('HARNESS INTERNAL LOG'); } - $job_id = uuid_inflate($job_id); + my $run_id = $self->{+RUN}->run_id; my $job_try = $params{job_try} // 0; - my $job = $self->{+JOBS}->{$job_id}->{$job_try}; - return $job if $job; + if (my $job = $self->{+JOBS}->{$job_uuid}) { + return $job; + } + + my $result; + + $test_file_id //= $self->{+FILE_CACHE}->{$job_uuid}; - my $key = gen_uuid(); + for my $spec ($params{queue}, $params{job_spec}) { + last if $test_file_id; + next unless $spec; - my $test_file_idx = undef; - if (my $queue = $params{queue}) { - my $file = $queue->{rel_file} // $queue->{file}; - $test_file_idx = $self->get_test_file_idx($file) if $file; - $self->{+FILE_CACHE}->{$job_id} //= $test_file_idx if $test_file_idx; + my $file = $spec->{rel_file} // $spec->{file}; + $test_file_id = $self->get_test_file_id($file) if $file; + $self->{+FILE_CACHE}->{$job_uuid} = $test_file_id; } - $test_file_idx //= $self->{+FILE_CACHE}->{$job_id}; + die "Could not find a test file name or id" unless $test_file_id; - my $result; $self->retry_on_disconnect( "vivify job" => sub { $result = $self->schema->resultset('Job')->update_or_create({ - status => 'pending', - job_key => $key, - job_id => $job_id, - job_try => $job_try, + job_uuid => $job_uuid, + run_id => $run_id, + test_file_id => $test_file_id, is_harness_out => $is_harness_out, - run_id => $self->{+RUN}->run_id, - fail_count => 0, - pass_count => 0, - test_file_idx => $test_file_idx, - - $is_harness_out ? (name => "HARNESS INTERNAL LOG") : (), + passed => undef, + failed => 0, }); } ); - # In case we are resuming. - $self->retry_on_disconnect("delete old events" => sub { $result->events->delete_all() }); - - # Prevent duplicate coverage when --retry is used - if ($job_try) { - if ($App::Yath::Schema::LOADED =~ m/(mysql|percona|mariadb)/i) { - my $schema = $self->schema; - $schema->storage->connected; # Make sure we are connected - my $dbh = $schema->storage->dbh; - - my $query = <<" EOT"; - DELETE coverage - FROM coverage - JOIN jobs USING(job_key) - WHERE job_id = ? - EOT - - my $sth = $dbh->prepare($query); - $sth->execute($job_id) or die $sth->errstr; - } - else { - $self->retry_on_disconnect( - "delete old coverage" => sub { - $self->schema->resultset('Coverage')->search({'job.job_id' => $job_id}, {join => 'job'})->delete; - } - ); - } - } + my $job_id = $result->job_id; - if (my $old = $self->{+JOBS}->{$job_id}->{$job_try - 1}) { - $self->{+UNCOVER}->{$old->{job_key}}++; - } + my $job = { + run_id => $run_id, + job_id => $job_id, + job_uuid => $job_uuid, + test_file_id => $test_file_id, - $job = { - job_key => $key, - job_id => $job_id, - job_try => $job_try, + is_harness_out => $is_harness_out, - event_ord => 1, - events => [], - orphans => {}, - reporting => [], + tries => [], - result => $result, + result => $result, }; - return $self->{+JOBS}->{$job_id}->{$job_try} = $job; + return $self->{+JOBS}->{$job_uuid} = $job; } -sub process_event { +sub get_job_try { my $self = shift; - my ($event, $f, %params) = @_; - - $f //= $event->{facet_data}; - $f = $f ? clone($f) : {}; + my ($job, $try_ord) = @_; - $self->start unless $self->{+RUNNING}; + $try_ord //= 0; - if (my $res = delete $f->{db_resources}) { - $self->insert_resources($res); - return unless keys %$f; + if (my $try = $job->{tries}->[$try_ord]) { + return $try; } - my $job = $params{job} // $self->get_job(%{$f->{harness} // {}}, queue => $f->{harness_job_queued}); + my $result; + $self->retry_on_disconnect( + "vivify job try" => sub { + $result = $self->schema->resultset('JobTry')->update_or_create({ + job_id => $job->{job_id}, + job_try_ord => $try_ord, + }); + } + ); + + my $try = { + job_try_id => $result->job_try_id, + job_try_ord => $try_ord, + result => $result, - my $e = $self->_process_event($event, $f, %params, job => $job); - clean($e); + orphan_events => {}, + ready_events => [], - if (my $od = $e->{orphan}) { - $job->{orphans}->{$e->{event_id}} = $e; - } - else { - if (my $o = delete $job->{orphans}->{$e->{event_id}}) { - $e->{orphan} = $o->{orphan}; - $e->{orphan_line} = $o->{orphan_line} if defined $o->{orphan_line}; - $e->{stamp} //= $o->{stamp}; - } - push @{$job->{events}} => $e; - } + job => $job, + run_id => $job->{run_id}, + job_id => $job->{job_id}, + job_uuid => $job->{job_uuid}, + }; - $self->flush(job => $job, is_diag => $e->{is_diag}); + weaken($try->{job}); - return; + return $job->{tries}->[$try_ord] = $try } -sub host { - my $self = shift; - return $self->{+HOST} //= $self->{+CONFIG}->schema->resultset('Host')->find_or_create({hostname => hostname()}); +sub clean { + my ($s) = @_; + return 0 unless defined $s; + my $r = ref($_[0]) or return 1; + if ($r eq 'HASH') { return clean_hash(@_) } + elsif ($r eq 'ARRAY') { return clean_array(@_) } + return 1; } -sub insert_resources { - my $self = shift; - my ($res) = @_; +sub clean_hash { + my ($s) = @_; + my $vals = 0; - my $stamp = $res->{stamp}; - my $items = $res->{items}; - my $batch_id = $res->{batch_id}; + for my $key (keys %$s) { + my $v = clean($s->{$key}); + if ($v) { $vals++ } + else { delete $s->{$key} } + } - my $config = $self->{+CONFIG}; + $_[0] = undef unless $vals; - my $run_id = $self->run->run_id; - my $host_idx = $self->host->host_idx; + return $vals; +} - my $res_rs = $config->schema->resultset('Resource'); - my $batch_rs = $config->schema->resultset('ResourceBatch'); +sub clean_array { + my ($s) = @_; - my $dt_stamp = DateTime->from_epoch(epoch => $stamp, time_zone => 'local'); + @$s = grep { clean($_) } @$s; - my $batch = $batch_rs->create({ - resource_batch_idx => $batch_id, - run_id => uuid_inflate($run_id), - host_idx => $host_idx, - stamp => $dt_stamp, - }); + return @$s if @$s; - $res_rs->populate($items); + $_[0] = undef; + return 0; } -sub finish { +sub _get__id { my $self = shift; - my (@errors) = @_; + my ($type, $id_field, $field, $id) = @_; - $self->flush_all(); + return undef unless $id; - my $run = $self->run; + return $self->{+ID_CACHE}->{$type}->{$id_field}->{$field}->{$id} + if $self->{+ID_CACHE}->{$type}->{$id_field}->{$field}->{$id}; - my $status; - my $dur_stat; - my $aborted = 0; + my $spec = {$field => $id}; - if (@errors) { - my $error = join "\n" => @errors; - $status = {status => 'broken', error => $error}; - $dur_stat = 'abort'; - } - else { - my $stat; - if ($self->{+SIGNAL}) { - $stat = 'canceled'; - $dur_stat = 'abort'; - $aborted = 1; - } - else { - $stat = 'complete'; - $dur_stat = $self->{+FAILED} ? 'fail' : 'pass'; + # id fields are always auto-increment, uuid is always uuid + $spec->{$id_field} = gen_uuid() if $id_field =~ m/_uuid$/; + + my $result = $self->schema->resultset($type)->find_or_create($spec); + + return $self->{+ID_CACHE}->{$type}->{$id_field}->{$field}->{$id} = $result->$id_field; +} + +sub get_test_file_id { + my $self = shift; + my ($file) = @_; + + return undef unless $file; + + my @parts = split /(\/t2?\/)/, $file; + my $new; + while (my $part = shift @parts) { + if ($part =~ m{/(t2?)/} && !$new) { + $new = "$1/"; + next; } - $status = {status => $stat, passed => $self->{+PASSED}, failed => $self->{+FAILED}, retried => $self->{+RETRIED}}; + next unless $new; + + $new .= $part; } - if ($self->{+FIRST_STAMP} && $self->{+LAST_STAMP}) { - my $duration = $self->{+LAST_STAMP} - $self->{+FIRST_STAMP}; - $status->{duration} = format_duration($duration); + $file = $new if $new; - $self->retry_on_disconnect("insert duration row" => sub { - my $fail = $aborted ? 0 : $self->{+FAILED} ? 1 : 0; - my $pass = ($fail || $aborted) ? 0 : 1; - my $row = { - run_id => $self->{+RUN_ID}, - user_idx => $self->user_idx, - project_idx => $self->project_idx, - duration => $duration, - retry => 0, - pass => $pass, - fail => $fail, - abort => $aborted, - }; - $self->schema->resultset('Reporting')->create($row); - }); - } + $file =~ s{^\.+/+}{}; - $self->retry_on_disconnect("update run status" => sub { $run->update($status) }); + return $self->_get__id('TestFile' => 'test_file_id', filename => $file); +} - return $status; +sub _pull_facet_binaries { + my $self = shift; + my ($f, $params) = @_; + + my $bin = $f->{binary} or return undef; + return undef unless @$bin; + + my $e_uuid = $params->{e_uuid}; + my @binaries; + + for my $file (@$bin) { + my $data = delete $file->{data}; + $file->{data} = 'Extracted to the "binaries" table'; + + push @binaries => { + event_uuid => $e_uuid, + filename => $file->{filename}, + description => $file->{details}, + data => decode_base64($data), + is_image => $file->{is_image} // $file->{filename} =~ m/\.(a?png|gif|jpe?g|svg|bmp|ico)$/ ? 1 : 0, + }; + } + + return undef unless @binaries; + return \@binaries; } -sub _process_event { +sub _pull_facet_resource { my $self = shift; - my ($event, $f, %params) = @_; - my $job = $params{job}; + my ($f, $params) = @_; + + my $resf = $f->{resource_state} or return undef; + my $data = delete $resf->{data}; + $resf->{data} = 'Extracted to the "resources" table'; + + my $ord = $self->{+RESOURCE_ORD}++; + my $mod = $resf->{module}; + my $host = $resf->{host}; + my $e_uuid = $params->{e_uuid}; + my $stamp = $self->format_stamp($f->{harness}->{stamp}); + + my $resource_type_id = $self->_get__id(ResourceType => 'resource_type_id', name => $mod) or die "Could not get resource_type id"; + my $host_id = $self->_get__id(Host => 'host_id', hostname => $host) or die "Could not get host id"; + + push @{$self->{+RESOURCES} //= []} => { + run_id => $self->{+RUN_ID}, + host_id => $host_id, + resource_type_id => $resource_type_id, + resource_ord => $ord, + event_uuid => $e_uuid, + data => encode_ascii_json($data), + stamp => $stamp, + }; +} - my $ord = $job->{event_ord}++; +sub _pull_facet_run_coverage { + my $self = shift; + my ($f, $params) = @_; + my $c = $self->_pull_facet__coverage($f, 'run', $params); - my $harness = $f->{harness} // {}; - my $trace = $f->{trace} // {}; + my $files = $c->{files}; + my $meta = $c->{testmeta}; - my $e_id = uuid_inflate($harness->{event_id} // $event->{event_id} // die "No event id!"); - my $nested = $f->{hubs}->[0]->{nested} || 0; + my $try = $params->{try}; + my $e_uuid = $params->{e_uuid}; - my @binaries; - if ($f->{binary} && @{$f->{binary}}) { - for my $file (@{$f->{binary}}) { - my $data = delete $file->{data}; - $file->{data} = 'removed'; - - push @binaries => { - event_id => $e_id, - filename => $file->{filename}, - description => $file->{details}, - data => decode_base64($data), - is_image => $file->{is_image} // $file->{filename} =~ m/\.(a?png|gif|jpe?g|svg|bmp|ico)$/ ? 1 : 0, - }; + for my $source (keys %$files) { + my $subs = $files->{$source}; + for my $sub (keys %$subs) { + my $tests = $subs->{$sub}; + for my $test (keys %$tests) { + push @{$self->{+COVERAGE} //= []} => $self->_pre_process_coverage( + event_uuid => $e_uuid, + test => $test, + source => $source, + sub => $sub, + manager => $meta->{$test}->{manager}, + meta => $tests->{$test} + ); + } } } - my $fail = causes_fail($f) ? 1 : 0; - - my $is_diag = $fail; - $is_diag ||= 1 if $f->{errors} && @{$f->{errors}}; - $is_diag ||= 1 if $f->{assert} && !($f->{assert}->{pass} || $f->{amnesty}); - $is_diag ||= 1 if $f->{info} && first { $_->{debug} || $_->{important} } @{$f->{info}}; - $is_diag //= 0; + return; +} - my $is_harness = (first { substr($_, 0, 8) eq 'harness_' } keys %$f) ? 1 : 0; +sub _pre_process_coverage { + my $self = shift; + my %params = @_; - my $is_time = $f->{harness_job_end} ? ($f->{harness_job_end}->{times} ? 1 : 0) : 0; + my $e_uuid = $params{event_uuid}; + my $test_id = $self->get_test_file_id($params{test}) or confess("Could not get test id (for '$params{test}')"); - my $is_subtest = $f->{parent} ? 1 : 0; + my $source_id = $self->_get__id(SourceFile => 'source_file_id', filename => $params{source}) or die "Could not get source id"; + my $sub_id = $self->_get__id(SourceSub => 'source_sub_id', subname => $params{sub}) or die "Could not get sub id"; + my $manager_id = $self->_get__id(CoverageManager => 'coverage_manager_id', package => $params{manager}); - my $e = { - event_id => $e_id, - event_ord => $ord, - nested => $nested, - is_subtest => $is_subtest, - is_diag => $is_diag, - is_harness => $is_harness, - is_time => $is_time, - causes_fail => $fail, - trace_id => $trace->{uuid}, - job_key => $job->{job_key}, - stamp => $self->format_stamp($harness->{stamp} || $event->{stamp} || $params{stamp}), - binaries => \@binaries, - has_binary => @binaries ? 1 : 0, + return { + run_id => $self->{+RUN_ID}, + event_uuid => $e_uuid, + test_file_id => $test_id, + source_file_id => $source_id, + source_sub_id => $sub_id, + coverage_manager_id => $manager_id, + + $manager_id ? (metadata => encode_ascii_json($params{meta})) : (), + $params{job_try_id} ? (job_try_id => $params{job_try_id}) : (), }; +} - my $orphan = $nested ? 1 : 0; - if (my $p = $params{parent_id}) { - $e->{parent_id} ||= $p; - $orphan = 0; - } +sub _pull_facet_job_try_coverage { + my $self = shift; + my ($f, $params) = @_; + my $c = $self->_pull_facet__coverage($f, 'job', $params); - if ($orphan) { - clean($f); + my $job = $params->{job}; + my $try = $params->{try}; + my $e_uuid = $params->{e_uuid}; - if ($f->{parent} && $f->{parent}->{children}) { - $f->{parent}->{children} = "Removed"; + for my $source (keys %{$c->{files}}) { + my $subs = $c->{files}->{$source}; + for my $sub (keys %$subs) { + my $test = $c->{test} // $job->{result}->file; + + push @{$self->{+COVERAGE} //= []} => $self->_pre_process_coverage( + event_uuid => $e_uuid, + job_try_id => $try->{job_try_id}, + test => $test, + source => $source, + sub => $sub, + manager => $c->{manager}, + meta => $subs->{$sub}, + ); } + } + + return; +} + +sub _pull_facet__coverage { + my $self = shift; + my ($f, $type, $params) = @_; + my $e_uuid = $params->{e_uuid}; - $e->{orphan} = encode_json($f); - $e->{orphan_line} = $params{line} if $params{line}; + my $c = delete $f->{"${type}_coverage"} or return undef; + + $f->{"${type}_coverage"} = 'Extracted to the "coverage" table'; + return $c; +} + +sub _pull_facet_children { + my $self = shift; + my ($f, $params) = @_; + + my $p = $f->{parent} or return undef; + my $c = $p->{children} or return undef; + return undef unless @$c; + $f->{parent}->{children} = 'Extracted to populate "events" table'; + + return $c; +} + +sub _pull_facet__fields { + my $self = shift; + my ($f, $type, $params) = @_; + + my @fields; + if (my $fs = $f->{"${type}_fields"}) { + push @fields => @{$fs}; + $f->{"${type}_fields"} = qq{Extracted to populate "${type}_fields" table}; } - else { - if (my $fields = $f->{run_fields}) { - $self->add_run_fields($fields); + + if (my $fs = $f->{"harness_${type}_fields"}) { + push @fields => @{$fs}; + $f->{"harness_${type}_fields"} = qq{Extracted to populate "${type}_fields" table}; + } + + if (my $p = $f->{"harness_${type}"}) { + if (my $fs = $p->{fields}) { + push @fields => @{$fs}; + $p->{"fields"} = qq{Extracted to populate "${type}_fields" table}; } - if (my $job_coverage = $f->{job_coverage}) { - $self->add_job_coverage($job, $job_coverage); - $f->{job_coverage} = "Removed, used to populate the job_coverage table"; + if (my $fs = $p->{"${type}_fields"}) { + push @fields => @{$fs}; + $p->{"${type}_fields"} = qq{Extracted to populate "${type}_fields" table}; } - if (my $run_coverage = $f->{run_coverage}) { - $f->{run_coverage} = "Removed, used to populate the run_coverage table"; - $self->add_run_coverage($run_coverage); + if (my $fs = $p->{"harness_${type}_fields"}) { + push @fields => @{$fs}; + $p->{"harness_${type}_fields"} = qq{Extracted to populate "${type}_fields" table}; } + } - if ($f->{parent} && $f->{parent}->{children}) { - $self->process_event({}, $_, job => $job, parent_id => $e_id, line => $params{line}) for @{$f->{parent}->{children}}; - $f->{parent}->{children} = "Removed, used to populate events table"; + return undef unless @fields; - $self->add_subtest_duration($job, $e, $f) unless $nested; - } + my %mixin = $type eq 'run' ? (run_id => $self->{+RUN_ID}) : (job_try_id => $params->{try}->{job_try_id}); + my $e_uuid = $params->{e_uuid}; - unless ($nested) { - my $res = $job->{result}; - if ($fail) { - $res->fail_count($res->fail_count + 1); - $res->fail(1); - } - $res->pass_count($res->pass_count + 1) if $f->{assert} && !$fail; + for my $field (@fields) { + my $name = $field->{name} || 'unknown'; - $self->update_other($job, $f) if $e->{is_harness}; - } + my $row = { + %mixin, + event_uuid => $e_uuid, + name => $name, + details => $field->{details} || $name, + }; + + $row->{raw} = $field->{raw} if $field->{raw}; + $row->{link} = $field->{link} if $field->{link}; + + $row->{data} = encode_ascii_json($field->{data}) if $field->{data}; - clean($f); - $e->{facets} = encode_json($f); - $e->{facets_line} = $params{line} if $params{line}; + if ($type eq 'run') { + push @{$self->{+RUN_FIELDS} //= []} => $row; + } + else { + push @{$self->{+TRY_FIELDS} //= []} => $row; + } } +} + +sub _pull_facet__params { + my $self = shift; + my ($f, $type, $params) = @_; + + my $p = $f->{"harness_${type}"} or return undef; + $f->{"harness_${type}"} = qq{Extracted to populate "${type}.parameters" column}; - return $e; + return $p; } -sub add_subtest_duration { +sub _pull_facet_run_fields { my $self = shift; - my ($job, $e, $f) = @_; + my ($f, $params) = @_; + return $self->_pull_facet__fields($f, 'run', $params); +} + +sub _pull_facet_run_params { + my $self = shift; + my ($f, $params) = @_; + return $self->_pull_facet__params($f, 'run', $params); +} + +sub _pull_facet_job_try_fields { + my $self = shift; + my ($f, $params) = @_; + return $self->_pull_facet__fields($f, 'job', $params); +} + +sub _pull_facet_job_try_params { + my $self = shift; + my ($f, $params) = @_; + return $self->_pull_facet__params($f, 'job', $params); +} + +sub _pull_facet_reporting { + my $self = shift; + my ($f, $params) = @_; return if $f->{hubs}->[0]->{nested}; @@ -1018,376 +917,729 @@ sub add_subtest_duration { my $stop = $parent->{stop_stamp} // return; my $duration = $stop - $start // return; - push @{$job->{reporting}} => { - duration => $duration, + my $try = $params->{try}; + my $job = $params->{job}; + + my $test_file_id = $job->{is_harness_out} ? undef : $job->{test_file_id}; + + push @{$self->{+REPORTING} //= []} => { + run_id => $self->run_id, + user_id => $self->user_id, + project_id => $self->project_id, + + job_try_id => $try->{job_try_id}, + job_try => $try->{job_try_ord}, + test_file_id => $test_file_id, + subtest => $st, - event_id => $e->{event_id}, + duration => $duration, + abort => 0, retry => 0, + $assert->{pass} ? (pass => 1, fail => 0) : (fail => 1, pass => 0), }; } -sub add_job_coverage { +sub _pull_facet_run_updates { my $self = shift; - my ($job, $job_coverage) = @_; + my ($f, $params) = @_; - my $job_id = $job->{job_id}; - my $job_try = $job->{job_try} // 0; + my $delta = $self->{+RUN_DELTA} //= {}; - # Do not add coverage if a retry has already started. Events could be out of order. - return if $self->{+JOBS}->{$job_id}->{$job_try + 1}; - return if $self->{+UNCOVER} && $self->{+UNCOVER}->{$job->{job_key}}; + $delta->{'=has_coverage'} = 1 if $f->{job_coverage} || $f->{run_coverage}; - for my $source (keys %{$job_coverage->{files}}) { - my $subs = $job_coverage->{files}->{$source}; - for my $sub (keys %$subs) { - my $test = $job_coverage->{test} // $job->{result}->file; - - $self->_add_coverage( - job_key => $job->{job_key}, - test => $test, - source => $source, - sub => $sub, - manager => $job_coverage->{manager}, - meta => $subs->{$sub}, - ); + $delta->{'=has_resources'} = 1 if $f->{resource_state}; + + if (my $run_params = $self->_pull_facet_run_params($f, $params)) { + $delta->{'=parameters'} = encode_ascii_json($run_params); + + my $settings = $run_params->{settings}; + + if (my $r = $settings->{resource}) { + if (my $j = $r->{slots}) { + $delta->{'=concurrency_j'} = $j; + } + + if (my $x = $r->{job_slots}) { + $delta->{'=concurrency_x'} = $x; + } + } + elsif (my $r2 = $settings->{runner}) { #Legacy logs + if (my $j = $r2->{job_count}) { + $delta->{'=concurrency_j'} = $j; + } + } + } + + if (my $job_exit = $f->{harness_job_end}) { + if ($job_exit->{fail}) { + if ($job_exit->{retry}) { + $delta->{'Δto_retry'} += 1; + } + else { + $delta->{'Δfailed'} += 1; + } + } + else { + $delta->{'Δpassed'} += 1; + } + + if ($params->{try}->{job_try_ord}) { + $delta->{'Δto_retry'} -= 1; + $delta->{'Δretried'} += 1; + } + } + + if (my $dur = $self->{+DURATION}) { + unless ($params->{run}->{duration} && $dur <= $params->{run}->{duration}) { + $delta->{'=duration'} = $dur; + $params->{run}->{duration} = $dur; } } - $self->flush_coverage; + return; } -sub add_run_coverage { +sub _pull_facet_job_updates { my $self = shift; - my ($run_coverage) = @_; + my ($f, $params) = @_; - my $files = $run_coverage->{files}; - my $meta = $run_coverage->{testmeta}; + my $job_exit = $f->{harness_job_end} or return undef; - for my $source (keys %$files) { - my $subs = $files->{$source}; - for my $sub (keys %$subs) { - my $tests = $subs->{$sub}; - for my $test (keys %$tests) { - $self->_add_coverage( - test => $test, - source => $source, - sub => $sub, - manager => $meta->{$test}->{manager}, - meta => $tests->{$test} - ); - } - } + my $delta = $params->{job}->{delta} //= {}; + + if ($job_exit->{fail}) { + $delta->{'=failed'} = 1; + } + else { + $delta->{'=passed'} = 1; } - $self->flush_coverage; + return; } -sub _add_coverage { +sub _pull_facet_job_try_updates { my $self = shift; - my %params = @_; + my ($f, $params) = @_; - my $test_id = $self->get_test_file_idx($params{test}) or confess("Could not get test id (for '$params{test}')"); + return undef if $params->{nested}; - my $source_id = $self->_get__id(SourceFile => 'source_file_idx', filename => $params{source}) or die "Could not get source id"; - my $sub_id = $self->_get__id(SourceSub => 'source_sub_idx', subname => $params{sub}) or die "Could not get sub id"; - my $manager_id = $self->_get__id(CoverageManager => 'coverage_manager_idx', package => $params{manager}); + my $delta = $params->{try}->{delta} //= {}; - my $meta = $manager_id ? encode_json($params{meta}) : undef; + if (my $job_params = $self->_pull_facet_job_try_params($f, $params)) { + $delta->{'=parameters'} = encode_ascii_json($job_params); + } - my $coverage = $self->{+COVERAGE} //= []; + if ($params->{causes_fail}) { + $delta->{'=fail_count'} += 1; + } + elsif (my $assert = $f->{assert}) { + $delta->{'=pass_count'} += 1; + } - push @$coverage => { - run_id => $self->{+RUN_ID}, - test_file_idx => $test_id, - source_file_idx => $source_id, - source_sub_idx => $sub_id, - coverage_manager_idx => $manager_id, - metadata => $meta, - job_key => $params{job_key}, - }; -} + if (my $job_start = $f->{harness_job_start}) { + $delta->{'=start'} = $self->format_stamp($job_start->{stamp}); + } -sub flush_coverage { - my $self = shift; + if (my $job_launch = $f->{harness_job_launch}) { + $delta->{'=launch'} = $self->format_stamp($job_launch->{stamp}); + $delta->{'=status'} = 'running'; + } + + if (my $job_exit = $f->{harness_job_exit}) { + $delta->{'=exit_code'} = $job_exit->{exit} if $job_exit->{exit}; + + $delta->{'=stdout'} = clean_output($job_exit->{stdout}) if $job_exit->{stdout}; + $delta->{'=stderr'} = clean_output($job_exit->{stderr}) if $job_exit->{stderr}; + } + + if (my $job_end = $f->{harness_job_end}) { + my $try = $params->{try}; + my $job = $params->{job}; + + my $report = { + run_id => $self->run_id, + user_id => $self->user_id, + project_id => $self->project_id, + + job_try_id => $try->{job_try_id}, + job_try => $try->{job_try_ord}, + test_file_id => $job->{test_file_id}, + + abort => $self->{+SIGNAL} ? 1 : 0, + }; + + if ($job_end->{fail}) { + $delta->{'=fail'} += 1; + $delta->{'=retry'} += $job_end->{retry} ? 1 : 0; + + $report->{fail} = 1; + $report->{pass} = 0; + $report->{retry} = $job_end->{retry} ? 1 : 0; + } + else { + $delta->{'=retry'} = 0; + + $report->{fail} = 0; + $report->{pass} = 1; + $report->{retry} = 0; + } - my $coverage = $self->{+COVERAGE} or return; - return unless @$coverage; + my $duration = 0; + $duration = $job_end->{times}->{totals}->{total} if $job_end->{times} && $job_end->{times}->{totals} && $job_end->{times}->{totals}->{total}; - $self->retry_on_disconnect("update has_coverage" => sub { $self->{+RUN}->update({has_coverage => 1}) }) - unless $self->{+RUN}->has_coverage; + $delta->{'=ended'} = $self->format_stamp($job_end->{stamp}); + $delta->{'=status'} = 'complete'; + $delta->{'=duration'} = $duration if $duration; - $self->populate(Coverage => $coverage); + $params->{try}->{done} = 1; - @$coverage = (); + $report->{duration} = $duration // 0; + + push @{$self->{+REPORTING} //= []} => $report; + } return; } -sub _get__id { - my $self = shift; - my ($type, $id_field, $field, $id) = @_; - my $out = $self->_get___id(@_); - return $out unless defined $out; - return $out if $id_field =~ m/_idx$/; - return uuid_inflate($out); +sub clean_output { + my $text = shift; + + return undef unless defined $text; + $text =~ s/^T2-HARNESS-ESYNC: \d+\n//gm; + chomp($text); + + return undef unless length($text); + return $text; } -sub _get___id { +sub process_event { my $self = shift; - my ($type, $id_field, $field, $id) = @_; + my ($event, $f, $idx, @oops) = @_; - return undef unless $id; + croak "Too many arguments" if @oops; - return $self->{+ID_CACHE}->{$type}->{$id_field}->{$field}->{$id} - if $self->{+ID_CACHE}->{$type}->{$id_field}->{$field}->{$id}; + $f //= $event->{facet_data} // die "No facet data!"; + $f = clone($f) if $self->{+CLONE_FACETS}; - my $spec = {$field => $id}; + my $harness = $f->{harness} or die "No 'harness' facet!"; - # idx fields are always auto-increment, otherwise the id is uuid - $spec->{$id_field} = gen_uuid() unless $id_field =~ m/_idx$/; + my $job = $self->get_job($harness->{job_id}, queue => $f->{harness_job_queued}, job_spec => $f->{harness_job}); + my $try = $self->get_job_try($job, $harness->{job_try}); - my $result = $self->schema->resultset($type)->find_or_create($spec); + my $sdx = 1; - return $self->{+ID_CACHE}->{$type}->{$id_field}->{$field}->{$id} = $result->$id_field; + my $ok = eval { + my @todo = ([$f, event => $event]); + while (my $set = shift @todo) { + my ($sf, %sp) = @$set; + push @todo => $self->_process_event($sf, %sp, job => $job, try => $try, idx => $idx, sdx => $sdx++); + } + + 1; + }; + my $err = $@; + + $self->flush($job, $try); + + die $err unless $ok; + + return; } -sub get_test_file_idx { +sub validate_uuid { my $self = shift; - my ($file) = @_; + my ($uuid) = @_; - return undef unless $file; + confess "No uuid provided" unless $uuid; + confess "UUID '$uuid' Contains invalid characters ($1)" if $uuid =~ m/([^a-fA-F0-9\-])/; - return $self->_get__id('TestFile' => 'test_file_idx', filename => $file); + return 1; } -sub add_io_streams { +sub _process_event { my $self = shift; - my ($job, @streams) = @_; + my ($f, %params) = @_; - my $job_key = $job->job_key; + my ($e_uuid, $formatted_stamp); + if (my $harness = $f->{harness}) { + $e_uuid = $harness->{event_id} // die "No event id!"; + $formatted_stamp = $harness->{stamp} ? $self->format_stamp($harness->{stamp}) : undef; + } + else { + $e_uuid = $f->{about}->{uuid} if $f->{about} && $f->{about}->{uuid}; + $e_uuid //= gen_uuid(); + } - my @write; - for my $s (@streams) { - my ($stream, $output) = @$s; - $output = clean_output($output); - next unless defined $output && length($output); + my $rendered = App::Yath::Renderer::Default::Composer->render_super_verbose($f); + $rendered = undef unless $rendered && @$rendered; - push @write => { - job_key => $job_key, - stream => uc($stream), - output => $output, - }; + my $job = $params{job}; + my $try = $params{try}; + my $idx = $params{idx}; + my $sdx = $params{sdx}; + + my $trace = $f->{trace} // {}; + + die "An event cannot be its own parent" if $params{parent} && $e_uuid eq $params{parent}; + + # Since we directly insert this into a query later we need to make absolutely sure it is a UUID and not any kind of injection. + $self->validate_uuid($e_uuid); + + my $fail = causes_fail($f) ? 1 : 0; + my $is_diag = $fail; + $is_diag ||= 1 if $f->{errors} && @{$f->{errors}}; + $is_diag ||= 1 if $f->{assert} && !($f->{assert}->{pass} || $f->{amnesty}); + $is_diag ||= 1 if $f->{info} && first { $_->{debug} || $_->{important} } @{$f->{info}}; + $is_diag //= 0; + + my $is_time = $f->{harness_job_end} ? ($f->{harness_job_end}->{times} ? 1 : 0) : 0; + my $is_harness = (first { substr($_, 0, 8) eq 'harness_' } keys %$f) ? 1 : 0; + my $is_subtest = $f->{parent} ? 1 : 0; + + my $nested = $f->{hubs}->[0]->{nested} || 0; + + my $pull_params = { + %params, + causes_fail => $fail, + is_diag => $is_diag, + e_uuid => $e_uuid, + is_time => $is_time, + is_harness => $is_harness, + is_subtest => $is_subtest, + nested => $nested, + }; + + $self->_pull_facet_job_updates($f, $pull_params); + $self->_pull_facet_job_try_fields($f, $pull_params); + $self->_pull_facet_job_try_updates($f, $pull_params); + $self->_pull_facet_job_try_coverage($f, $pull_params); + $self->_pull_facet_run_fields($f, $pull_params); + $self->_pull_facet_run_updates($f, $pull_params); + $self->_pull_facet_run_coverage($f, $pull_params); + $self->_pull_facet_resource($f, $pull_params); + + my $children = $self->_pull_facet_children($f, $pull_params); + my $binaries = $self->_pull_facet_binaries($f, $pull_params); + + $self->_pull_facet_reporting($f, $pull_params) if $children; + + # Nested items are orphans unless they have a parent. + my $orphan = $nested ? 1 : 0; + $orphan = 0 if $params{parent}; + $orphan = 1 if $params{orphan}; + + my $e; + $e = $try->{orphan_events}->{$e_uuid} // {}; + + %$e = ( + %$e, + + job_try_id => $try->{job_try_id}, + + event_uuid => $e_uuid, + trace_uuid => $trace->{uuid}, + + stamp => $formatted_stamp, + event_idx => $idx, + event_sdx => $sdx, + nested => $nested, + + is_subtest => $is_subtest, + is_diag => $is_diag, + is_harness => $is_harness, + is_time => $is_time, + + causes_fail => $fail, + + $params{parent} ? (parent_uuid => $params{parent}) : (), + + # Facet version wins if we have one, but we want them here if all we + # got was an orphan. + + $binaries ? (has_binaries => 1, rel_binaries => $binaries) : (has_binaries => 0), + + $rendered ? (rendered => $rendered) : (), + ); + + if ($orphan) { + $e->{has_facets} //= 0; + $e->{has_orphan} = 1; + + clean($e->{orphan} = $f); + + $try->{orphan_events}->{$e_uuid} = $e; + } + else { + delete $try->{orphan_events}->{$e_uuid}; + $e->{has_orphan} //= 0; + $e->{has_facets} = 1; + + clean($e->{facets} = $f); + + push @{$try->{ready_events} //= []} => $e; } - $self->populate('JobOutput' => \@write); + $try->{urgent} = 1 if $is_diag; + + return unless $children && @$children; + + return map {[$_, job => $job, try => $try, idx => $idx, parent => $e_uuid, orphan => $orphan]} @$children; } -sub add_run_fields { +sub finish { my $self = shift; - my ($fields) = @_; + my (@errors) = @_; - my $run = $self->{+RUN}; - my $run_id = $run->run_id; + $self->{+DONE} = 1; - return $self->_add_fields( - fields => $fields, - type => 'RunField', - key_field => 'run_field_id', - attrs => {run_id => $run_id}, - ); + $self->flush_all(); + + my $run = $self->run; + + my $status; + my $aborted = 0; + + if (@errors) { + my $error = join "\n" => @errors; + $status = {status => 'broken', error => $error}; + } + else { + my $stat; + if ($self->{+SIGNAL}) { + $stat = 'canceled'; + $aborted = 1; + } + else { + $stat = 'complete'; + } + + $status = {status => $stat}; + } + + if (my $dur = $self->{+DURATION}) { + $self->retry_on_disconnect("insert duration report row" => sub { + my $fail = $aborted ? 0 : $run->failed ? 1 : 0; + my $pass = ($fail || $aborted) ? 0 : 1; + + my $row = { + run_id => $self->{+RUN_ID}, + user_id => $self->user_id, + project_id => $self->project_id, + duration => $dur, + retry => 0, + pass => $pass, + fail => $fail, + abort => $aborted, + }; + + $self->schema->resultset('Reporting')->create($row); + }); + } + + $self->retry_on_disconnect("update run status" => sub { $run->update($status) }); + + return $status; +} + +sub DESTROY { + return; + my $self = shift; + return if $self->{+DONE}; + $self->finish("Unknown issue, destructor closed out import process. \$@ was: $@", @{$self->{+ERRORS}}); } -sub add_job_fields { +sub flush_all { my $self = shift; - my ($job, $fields) = @_; - my $job_key = $job->job_key; + $self->flush_run(); + $self->flush_coverage(); + $self->flush_reporting(); + $self->flush_try_fields(); - return $self->_add_fields( - fields => $fields, - type => 'JobField', - key_field => 'job_field_id', - attrs => {job_key => $job_key}, - ); + for my $job (values %{$self->{+JOBS}}) { + + $self->flush_job($job); + + for my $try (@{$job->{tries} // []}) { + next unless $try; + + $self->flush_try($try); + $self->flush_events($try); + } + } } -sub _add_fields { +sub flush_run { my $self = shift; - my %params = @_; - my $fields = $params{fields}; - my $type = $params{type}; - my $key_field = $params{key_field}; - my $attrs = $params{attrs} // {}; + if (my $delta = delete $self->{+RUN_DELTA}) { + $self->apply_delta($self->{+RUN}, $delta); + } - my @add; - for my $field (@$fields) { - my $id = gen_uuid; - my $new = {%$attrs, $key_field => $id}; + my $run_fields = delete $self->{+RUN_FIELDS}; + my $resources = delete $self->{+RESOURCES}; - $new->{name} = $field->{name} || 'unknown'; - $new->{details} = $field->{details} || $new->{name}; - $new->{raw} = $field->{raw} if $field->{raw}; - $new->{link} = $field->{link} if $field->{link}; - $new->{data} = encode_json($field->{data}) if $field->{data}; + $self->populate(RunField => $run_fields) if $run_fields && @$run_fields; + $self->populate(Resource => $resources) if $resources && @$resources; - push @add => $new; + return; +} - # Replace the item in the $fields array with the id - $field = $id; +sub flush_coverage { + my $self = shift; + + my $coverage = delete $self->{+COVERAGE}; + if ($coverage && @$coverage) { + $self->populate(Coverage => $coverage); + return 1; } - $self->populate($type => \@add); + return 0; } -sub clean_output { - my $text = shift; +sub flush_reporting { + my $self = shift; - return undef unless defined $text; - $text =~ s/^T2-HARNESS-ESYNC: \d+\n//gm; - chomp($text); + my $reporting = delete $self->{+REPORTING}; + if ($reporting && @$reporting) { + $self->populate(Reporting => $reporting); + return 1; + } - return undef unless length($text); - return $text; + return 0; } -sub clean { - my ($s) = @_; - return 0 unless defined $s; - my $r = ref($_[0]) or return 1; - if ($r eq 'HASH') { return clean_hash(@_) } - elsif ($r eq 'ARRAY') { return clean_array(@_) } - return 1; +sub flush_try_fields { + my $self = shift; + + my $job_fields = delete $self->{+TRY_FIELDS}; + $self->populate(JobTryField => $job_fields) if $job_fields && @$job_fields; + + return; } -sub clean_hash { - my ($s) = @_; - my $vals = 0; +sub flush_job { + my $self = shift; + my ($job) = @_; - for my $key (keys %$s) { - my $v = clean($s->{$key}); - if ($v) { $vals++ } - else { delete $s->{$key} } + if (my $delta = delete $job->{delta}) { + $self->apply_delta($job->{result}, $delta); } +} - $_[0] = undef unless $vals; +sub flush_try { + my $self = shift; + my ($try) = @_; - return $vals; -} + my $delta = delete $try->{delta}; -sub clean_array { - my ($s) = @_; + if ($self->{+DONE} || $try->{done}) { + $delta //= {}; + my $res = $try->{result}; + my $status = $res->status || ''; - @$s = grep { clean($_) } @$s; + unless ($status eq 'complete') { + my $status = $self->{+SIGNAL} ? 'canceled' : 'broken'; + $status = 'canceled' if $self->{+DONE} && !$try->{done}; - return @$s if @$s; + $delta->{'=status'} = $status; + } - $_[0] = undef; - return 0; + my $fail = 0; + $fail ||= $delta->{'=fail'}; + $fail ||= $res->fail; + + # Normalize the fail/pass + $delta->{'=fail'} = $fail ? 1 : 0; + } + + $self->apply_delta($try->{result}, $delta) if $delta; + + return; } -sub update_other { +sub apply_delta { my $self = shift; - my ($job, $f) = @_; + my ($res, $delta) = @_; - my $run = $self->{+RUN}; + my $update = {}; - if (my $run_data = $f->{harness_run}) { - my $settings = $run_data->{settings} //= $f->{harness_settings}; + for my $field (keys %$delta) { + my $val = $delta->{$field}; - if (my $c = $settings->{resource}->{slots}) { - $run->concurrency($c); + if ($field =~ s/^=//) { + $update->{$field} = $val; + } + elsif ($field =~ s/^Δ//) { + $update->{$field} = ($res->$field // 0) + $val; } + } - clean($run_data); - $self->schema->resultset('RunParameter')->find_or_create({ - run_id => $run->run_id, - parameters => $run_data, - }); + $self->retry_on_disconnect("update $res" => sub { $res->update($update) }, sub { print STDERR Dumper($update) }); +} - if (my $fields = $run_data->{harness_run_fields} // $run_data->{fields}) { - $self->add_run_fields($fields); - } +sub flush { + my $self = shift; + my ($job, $try) = @_; + + my $changed = 0; + + # Always flush these, they are things we want to have up to date + $self->flush_run(); + $self->flush_job($job); + $self->flush_try($try); + $self->flush_try_fields(); + + my $int_flush = 0; + my $int = $self->{+INTERVAL}; + if ($int) { + my $last = $self->{+LAST_FLUSH}; + $int_flush = 1 if !$last || $int < time - $last; } - my $job_result = $job->{result}; - my %cols = $job_result->get_columns; + my $bs = $self->{+BUFFER_SIZE}; + my $flushed; - # Handle job events - if (my $job_data = $f->{harness_job}) { - #$cols{test_file_idx} ||= $self->get_test_file_idx($job_data->{file}); - $cols{name} ||= $job_data->{job_name}; - $f->{harness_job} = "Removed, see job with job_key $cols{job_key}"; + if (my $e = $try->{ready_events}) { + my $urgent = delete $try->{urgent}; + $flushed += $self->flush_events($try, urgent => $urgent) if $try->{done} || $urgent || $int_flush || ($e && @$e >= $bs); + } - clean($job_data); - $self->schema->resultset('JobParameter')->find_or_create({ - job_key => uuid_deflate($job_result->job_key), - parameters => $job_data, - }); + if (my $c = $self->{+COVERAGE}) { + $flushed += $self->flush_coverage() if $int_flush || ($bs && @$c >= $bs); + } + if (my $r = $self->{+REPORTING}) { + $flushed += $self->flush_reporting() if $int_flush || ($bs && @$r >= $bs); } - if (my $job_exit = $f->{harness_job_exit}) { - #$cols{test_file_idx} ||= $self->get_test_file_idx($job_exit->{file}); - $cols{exit_code} = $job_exit->{exit}; - if ($job_exit->{retry} && $job_exit->{retry} eq 'will-retry') { - $cols{retry} = 1; - $self->{+RETRIED}++; - $self->{+FAILED}--; - } - else { - $cols{retry} = 0; + $self->{+LAST_FLUSH} = time if $flushed; + + return; +} + +sub flush_events { + my $self = shift; + my ($try, %params) = @_; + + return if mode_check($self->{+MODE}, 'summary'); + + my $events = $try->{ready_events} //= []; + my $deferred = $try->{deffered_events} //= []; + + my $urgent = $params{urgent}; + my $done = $self->{+DONE} || $try->{done}; + + if ($done) { + my @orphans = values %{delete($try->{orphan_events}) // {}}; + + if (@orphans) { + my $msg = "Left with " . scalar(@orphans) . " orphaned events"; + push @{$self->{+ERRORS}} => "$msg."; + warn $msg; } - $self->add_io_streams( - $job_result, - [STDERR => delete $job_exit->{stderr}], - [STDOUT => delete $job_exit->{stdout}], - ); + push @$events => @orphans; } - if (my $job_start = $f->{harness_job_start}) { - $cols{test_file_idx} ||= $self->get_test_file_idx($job_start->{rel_file}) if $job_start->{rel_file}; - $cols{test_file_idx} ||= $self->get_test_file_idx($job_start->{file}); - $cols{start} = $self->format_stamp($job_start->{stamp}); + + my (@write_events, @write_bin, $parent_ids); + + if (record_all_events(mode => $self->{+MODE}, job => $try->{job}->{result}, try => $try->{result})) { + for my $event (@$deferred, @$events) { + $event->{facets} = encode_ascii_json($event->{facets}) if $event->{facets}; + $event->{orphan} = encode_ascii_json($event->{orphan}) if $event->{orphan}; + $event->{rendered} = encode_ascii_json($event->{rendered}) if $event->{rendered}; + + $parent_ids++ if $event->{parent_uuid}; + + push @write_events => $event; + push @write_bin => @{delete($event->{rel_binaries}) // []}; + } + + @$deferred = (); } - if (my $job_launch = $f->{harness_job_launch}) { - $cols{status} = 'running'; + else { + for my $event (@$events) { + if (event_in_mode(event => $event, record_all_event => 0, mode => $self->{+MODE}, job => $try->{job}->{result}, try => $try->{result})) { + $event->{facets} = encode_ascii_json($event->{facets}) if $event->{facets}; + $event->{orphan} = encode_ascii_json($event->{orphan}) if $event->{orphan}; + $event->{rendered} = encode_ascii_json($event->{rendered}) if $event->{rendered}; + + $parent_ids++ if $event->{parent_uuid}; - $cols{test_file_idx} ||= $self->get_test_file_idx($job_launch->{file}); - $cols{launch} = $self->format_stamp($job_launch->{stamp}); + push @write_events => $event; + push @write_bin => @{delete($event->{rel_binaries}) // []}; + } + else { + push @$deferred => $event; + } + } } - if (my $job_end = $f->{harness_job_end}) { - #$cols{test_file_idx} ||= $self->get_test_file_idx($job_end->{file}); - $cols{fail} ||= $job_end->{fail} ? 1 : 0; - $cols{ended} = $self->format_stamp($job_end->{stamp}); - $cols{fail} ? $self->{+FAILED}++ : $self->{+PASSED}++; + @$events = (); - # All done - $job->{done} = 1; - $cols{status} = 'complete'; + my $out = 0; - if ($job_end->{rel_file} && $job_end->{times} && $job_end->{times}->{totals} && $job_end->{times}->{totals}->{total}) { - my $tfile_id = $cols{test_file_idx} ||= $self->get_test_file_idx($job_end->{rel_file}) if $job_end->{rel_file}; + if (@write_events || @write_bin) { + $out = 1; + $try->{normalized} = 0; - if (my $duration = $job_end->{times}->{totals}->{total}) { - $job->{duration} = $duration; - $cols{duration} = $duration; - } + if (@write_events) { + @write_events = sort { $a->{event_idx} <=> $b->{event_idx} || $a->{event_sdx} <=> $b->{event_sdx} } @write_events; + $self->populate(Event => \@write_events) } + $self->populate(Binary => \@write_bin) if @write_bin; + } - if (my $job_fields = $f->{harness_job_fields}) { - $self->add_job_fields($job_result, $job_fields); + + if ($done && !$try->{normalized}) { + @$deferred = (); # Not going to happen at this point + $try->{result}->normalize_to_mode(mode => $self->{+MODE}); + $try->{normalized} = 1; + + $self->fix_event_tree($try) if $parent_ids; + $self->remove_orphans($try) unless $try->{result}->fail; } - $job_result->set_columns(\%cols); + return $out; +} - return; +sub fix_event_tree { + my $self = shift; + my ($try) = @_; + + # FIXME: Need different mysql syntax, also test sqlite + my $dbh = $self->{+CONFIG}->connect; + my $sth = $dbh->prepare(<<" EOT"); + UPDATE events + SET parent_id = event2.event_id + FROM events AS event2 + WHERE events.job_try_id = ? + AND events.job_try_id = event2.job_try_id + AND events.parent_id IS NULL + AND events.parent_uuid = event2.event_uuid + EOT + + $sth->execute($try->{job_try_id}) or die $sth->errstr; } -1; +sub remove_orphans { + my $self = shift; + my ($try) = @_; + + # FIXME: Need different mysql syntax, also test sqlite + my $dbh = $self->{+CONFIG}->connect; + my $sth = $dbh->prepare(<<" EOT"); + UPDATE events + SET orphan = NULL + WHERE job_try_id = ? + AND orphan IS NOT NULL + EOT + + $sth->execute($try->{job_try_id}) or die $sth->errstr; +} __END__ diff --git a/lib/App/Yath/Schema/Sweeper.pm b/lib/App/Yath/Schema/Sweeper.pm index 2201dc527..e876546c3 100644 --- a/lib/App/Yath/Schema/Sweeper.pm +++ b/lib/App/Yath/Schema/Sweeper.pm @@ -2,7 +2,7 @@ package App::Yath::Schema::Sweeper; use strict; use warnings; use Time::HiRes qw/time/; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; our $VERSION = '2.000000'; @@ -130,7 +130,7 @@ sub sweep_run { } if ($params{coverage}) { - $run->coverages->delete; + $run->coverage->delete; $run->update({has_coverage => 0}) unless $params{runs}; } diff --git a/lib/App/Yath/Schema/Sync.pm b/lib/App/Yath/Schema/Sync.pm index 59c702561..5a50c7645 100644 --- a/lib/App/Yath/Schema/Sync.pm +++ b/lib/App/Yath/Schema/Sync.pm @@ -6,7 +6,7 @@ use DBI; use Scope::Guard; use Carp qw/croak/; use Test2::Harness::Util::JSON qw/encode_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; our $VERSION = '2.000000'; @@ -46,9 +46,6 @@ sub sync { my $cache = $params{cache} // {}; my $debug = $params{debug} // 0; - my $from_uuidf = $params{from_uuid_format} // 'binary'; - my $to_uuidf = $params{to_uuid_format} // 'binary'; - my ($rh, $wh); pipe($rh, $wh) or die "Could not open pipe: $!"; $wh->autoflush(1); @@ -68,7 +65,6 @@ sub sync { rh => $rh, cache => $cache, debug => $debug, - uuidf => $to_uuidf, ); $guard->dismiss(); @@ -83,7 +79,6 @@ sub sync { wh => $wh, skip => $skip, debug => $debug, - uuidf => $from_uuidf, ); close($wh); @@ -153,7 +148,7 @@ sub _get_dbh_runs { my @out; while (my $run = $sth->fetchrow_arrayref()) { - push @out => uuid_inflate($run->[0])->string; + push @out => $run->[0]; } return \@out; @@ -168,7 +163,6 @@ sub write_sync { my $dbh = $params{dbh} or croak "'dbh' is required"; my $run_ids = $params{run_ids} or croak "'run_ids' must be an arrayref of run ids"; my $wh = $params{wh} or croak "'wh' is required and must be a writable filehandle"; - my $uuidf = $params{uuidf} // 'binary'; my $skip = $params{skip} // {}; my $debug = $params{debug} // 0; @@ -186,7 +180,7 @@ sub write_sync { my $counter = 0; my $subcount = 0; for my $run_id (@$run_ids) { - my $run_uuid = uuid_inflate($run_id)->$uuidf; + my $run_uuid = $run_id; my @args = ($dbh, $run_uuid, $skip); for my $meth (@to_dump) { @@ -214,7 +208,6 @@ sub read_sync { my $dbh = $params{dbh} or croak "'dbh' is required"; my $run_ids = $params{run_ids} or croak "'run_ids' must be an arrayref of run ids"; my $rh = $params{rh} or croak "'rh' is required and must be a readable filehandle"; - my $uuidf = $params{uuidf} // 'binary'; my $cache = $params{cache} // {}; my $debug = $params{debug} // 0; @@ -262,7 +255,7 @@ sub read_sync { my $method = "import_$type"; next if eval { - $self->$method(dbh => $dbh, item => $data->{$type}, uuidf => $uuidf, cache => $cache); + $self->$method(dbh => $dbh, item => $data->{$type}, cache => $cache); 1; }; @@ -278,7 +271,7 @@ sub read_sync { sub get_or_create_id { my $self = shift; - my ($cache, $dbh, $uuidf, $table, $field, $via_field, $via_value) = @_; + my ($cache, $dbh, $table, $field, $via_field, $via_value) = @_; return undef unless $via_value; @@ -287,7 +280,7 @@ sub get_or_create_id { sub _get_or_create_id { my $self = shift; - my ($cache, $dbh, $uuidf, $table, $field, $via_field, $via_value) = @_; + my ($cache, $dbh, $table, $field, $via_field, $via_value) = @_; my $sql = "SELECT $field FROM $table WHERE $via_field = ?"; @@ -295,19 +288,17 @@ sub _get_or_create_id { $sth->execute($via_value) or die "MySQL Error: " . $dbh->errstr; if ($sth->rows) { my $row = $sth->fetchrow_hashref(); - return uuid_inflate($row->{$field})->string; + return $row->{$field}; } my $uuid = gen_uuid(); - $self->insert($dbh, $uuidf, $table, {$field => $uuid, $via_field => $via_value}); - return $uuid->string; + $self->insert($dbh, $table, {$field => $uuid, $via_field => $via_value}); + return $uuid; } sub insert { my $self = shift; - my ($dbh, $uuidf, $table, $data) = @_; - - _fix_uuids($uuidf => $data); + my ($dbh, $table, $data) = @_; my $sql = "INSERT INTO $table("; my (@fields, @vars); @@ -321,63 +312,6 @@ sub insert { $sth->execute(@vars) or die "Insert failed: " . $dbh->errstr; } -sub stringify_uuids { - my ($in) = @_; - _fix_uuids(string => $in); -} - -sub binarify_uuids { - my ($in) = @_; - _fix_uuids(binary => $in); -} - -my @ID_FIELDS = qw{ - coverage_idx - coverage_manager_idx - event_id - job_field_id - job_id - job_key - parent_id - project_idx - reporting_idx - run_field_id - run_id - source_file_idx - source_sub_idx - test_file_idx - user_idx -}; - -sub _fix_uuids { - my ($method => $in) = @_; - return unless $in; - my $type = ref($in); - - if (!$type) { - return uuid_inflate($in)->$method; - } - if ($type eq 'App::Yath::Schema::UUID') { - return $in->$method; - } - if ($type eq 'HASH') { - # Cannot do all_id or _key fields, some are not uuids... - # This is a list of safe ones - for my $key (@ID_FIELDS) { - next unless exists $in->{$key}; - $in->{$key} = _fix_uuids($method, $in->{$key}); - } - } - elsif($type eq 'ARRAY') { - _fix_uuids($method, $_) for @$in; - } - else { - die "Unsupported type '$type' '$in'"; - } - - return $in; -} - sub render_runs { my $self = shift; my ($dbh, $run_id, $skip) = @_; @@ -388,14 +322,14 @@ sub render_runs { passed, failed, retried, concurrency, parameters, has_coverage, users.username, projects.name as project_name FROM runs - JOIN users USING(user_idx) - JOIN projects USING(project_idx) + JOIN users USING(user_id) + JOIN projects USING(project_id) WHERE run_id = ? EOT $sth->execute($run_id) or die "MySQL Error: " . $dbh->errstr; - my $run = stringify_uuids($sth->fetchrow_hashref()); + my $run = $sth->fetchrow_hashref(); delete $run->{has_coverage} if $skip->{coverage}; return {run => $run}; @@ -411,7 +345,7 @@ sub render_run_fields { WHERE run_id = ? EOT $sth->execute($run_id) or die "MySQL Error: " . $dbh->errstr; - my $run_fields = stringify_uuids($sth->fetchall_arrayref({})); + my $run_fields = $sth->fetchall_arrayref({}); return map { +{run_field => $_} } @$run_fields; } @@ -431,7 +365,7 @@ sub render_jobs { EOT $sth->execute($run_id) or die "MySQL Error: " . $dbh->errstr; - my $jobs = stringify_uuids($sth->fetchall_arrayref({})); + my $jobs = $sth->fetchall_arrayref({}); return map { +{job => $_} } @$jobs; } @@ -447,7 +381,7 @@ sub render_job_fields { EOT $sth->execute($run_id) or die "MySQL Error: " . $dbh->errstr; - my $job_fields = stringify_uuids($sth->fetchall_arrayref({})); + my $job_fields = $sth->fetchall_arrayref({}); return map { +{job_field => $_} } @$job_fields; } @@ -464,7 +398,7 @@ sub render_events { EOT $sth->execute($run_id) or die "MySQL Error: " . $dbh->errstr; - my $events = stringify_uuids($sth->fetchall_arrayref({})); + my $events = $sth->fetchall_arrayref({}); return map { +{event => $_} } @$events; } @@ -482,7 +416,7 @@ sub render_binaries { EOT $sth->execute($run_id) or die "MySQL Error: " . $dbh->errstr; - my $binaries = stringify_uuids($sth->fetchall_arrayref({})); + my $binaries = ($sth->fetchall_arrayref({}); return map { +{binary => $_} } @$binaries; } @@ -503,7 +437,7 @@ sub render_reporting { WHERE run_id = ? EOT $sth->execute($run_id) or die "MySQL Error: " . $dbh->errstr; - my $reporting = stringify_uuids($sth->fetchall_arrayref({})); + my $reporting = $sth->fetchall_arrayref({}); return map { +{reporting => $_} } @$reporting; } @@ -527,7 +461,7 @@ sub render_coverage { EOT $sth->execute($run_id) or die "MySQL Error: " . $dbh->errstr; - my $coverage = stringify_uuids($sth->fetchall_arrayref({})); + my $coverage = $sth->fetchall_arrayref({}); return map { +{coverage => $_} } @$coverage; } @@ -536,14 +470,13 @@ sub import_run { my %params = @_; my $dbh = $params{dbh}; - my $uuidf = $params{uuidf}; my $cache = $params{cache}; my $run = $params{item}; - $run->{user_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'users' => 'user_idx', username => delete $run->{username}); - $run->{project_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'projects' => 'project_idx', name => delete $run->{project_name}); + $run->{user_idx} = $self->get_or_create_id($cache, $dbh, 'users' => 'user_idx', username => delete $run->{username}); + $run->{project_idx} = $self->get_or_create_id($cache, $dbh, 'projects' => 'project_idx', name => delete $run->{project_name}); - $self->insert($dbh, $uuidf, runs => $run); + $self->insert($dbh, runs => $run); } sub import_run_field { @@ -551,10 +484,9 @@ sub import_run_field { my %params = @_; my $dbh = $params{dbh}; - my $uuidf = $params{uuidf}; my $run_field = $params{item}; - $self->insert($dbh, $uuidf, run_fields => $run_field); + $self->insert($dbh, run_fields => $run_field); } sub import_job { @@ -562,13 +494,12 @@ sub import_job { my %params = @_; my $dbh = $params{dbh}; - my $uuidf = $params{uuidf}; my $cache = $params{cache}; my $job = $params{item}; - $job->{test_file_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'test_files' => 'test_file_idx', filename => delete $job->{filename}); + $job->{test_file_idx} = $self->get_or_create_id($cache, $dbh, 'test_files' => 'test_file_idx', filename => delete $job->{filename}); - $self->insert($dbh, $uuidf, jobs => $job); + $self->insert($dbh, jobs => $job); } sub import_job_field { @@ -576,10 +507,9 @@ sub import_job_field { my %params = @_; my $dbh = $params{dbh}; - my $uuidf = $params{uuidf}; my $job_field = $params{item}; - $self->insert($dbh, $uuidf, job_fields => $job_field); + $self->insert($dbh, job_fields => $job_field); } sub import_event { @@ -587,10 +517,9 @@ sub import_event { my %params = @_; my $dbh = $params{dbh}; - my $uuidf = $params{uuidf}; my $event = $params{item}; - $self->insert($dbh, $uuidf, events => $event); + $self->insert($dbh, events => $event); } sub import_binary { @@ -598,10 +527,9 @@ sub import_binary { my %params = @_; my $dbh = $params{dbh}; - my $uuidf = $params{uuidf}; my $binary = $params{item}; - $self->insert($dbh, $uuidf, binaries => $binary); + $self->insert($dbh, binaries => $binary); } sub import_reporting { @@ -609,15 +537,14 @@ sub import_reporting { my %params = @_; my $dbh = $params{dbh}; - my $uuidf = $params{uuidf}; my $cache = $params{cache}; my $reporting = $params{item}; - $reporting->{project_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'projects' => 'project_idx', name => delete $reporting->{project_name}); - $reporting->{user_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'users' => 'user_idx', username => delete $reporting->{username}); - $reporting->{test_file_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'test_files' => 'test_file_idx', filename => delete $reporting->{filename}); + $reporting->{project_idx} = $self->get_or_create_id($cache, $dbh, 'projects' => 'project_idx', name => delete $reporting->{project_name}); + $reporting->{user_idx} = $self->get_or_create_id($cache, $dbh, 'users' => 'user_idx', username => delete $reporting->{username}); + $reporting->{test_file_idx} = $self->get_or_create_id($cache, $dbh, 'test_files' => 'test_file_idx', filename => delete $reporting->{filename}); - $self->insert($dbh, $uuidf, reporting => $reporting); + $self->insert($dbh, reporting => $reporting); } sub import_coverage { @@ -625,16 +552,15 @@ sub import_coverage { my %params = @_; my $dbh = $params{dbh}; - my $uuidf = $params{uuidf}; my $cache = $params{cache}; my $coverage = $params{item}; - $coverage->{test_file_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'test_files' => 'test_file_idx', filename => delete $coverage->{test_file}); - $coverage->{source_file_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'source_files' => 'source_file_idx', filename => delete $coverage->{source_file}); - $coverage->{source_sub_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'source_subs' => 'source_sub_idx', subname => delete $coverage->{source_sub}); - $coverage->{coverage_manager_idx} = $self->get_or_create_id($cache, $dbh, $uuidf, 'coverage_manager' => 'coverage_manager_idx', package => delete $coverage->{coverage_manager}); + $coverage->{test_file_idx} = $self->get_or_create_id($cache, $dbh, 'test_files' => 'test_file_idx', filename => delete $coverage->{test_file}); + $coverage->{source_file_idx} = $self->get_or_create_id($cache, $dbh, 'source_files' => 'source_file_idx', filename => delete $coverage->{source_file}); + $coverage->{source_sub_idx} = $self->get_or_create_id($cache, $dbh, 'source_subs' => 'source_sub_idx', subname => delete $coverage->{source_sub}); + $coverage->{coverage_manager_idx} = $self->get_or_create_id($cache, $dbh, 'coverage_manager' => 'coverage_manager_idx', package => delete $coverage->{coverage_manager}); - $self->insert($dbh, $uuidf, coverage => $coverage); + $self->insert($dbh, coverage => $coverage); } 1; @@ -716,9 +642,6 @@ Copy data from the source database to the destination database. skip => {}, # Optional hashref of (TABLE => bool) for tables to skip cache => {}, # Optional uuid cache map. debug => 0, # Optional, turn on for verbosity - - from_uuid_format => 'binary', # Defaults to 'binary' may be 'string' for older databases - to_uuid_format => 'binary', # Defaults to 'binary' may be 'string' for older databases ); =item $sync->write_sync(...) @@ -729,7 +652,6 @@ Output the data to jsonl format. dbh => $dbh, # Source database run_ids => $run_ids, # list of run_ids to sync wh => $wh, # Where to print the jsonl data - uuidf => $uuidf, # UUID format, defaults to 'binary', 'string' is also valid. skip => $skip, # Optional hashref of (TABLE => bool) for tables to skip debug => 0, # Optional, turn on for verbosity ); @@ -742,32 +664,23 @@ Read the jsonl data and insert it into the database. dbh => $dbh, # Destination database run_ids => $run_ids, # list of run_ids to sync rh => $rh, # Where to read the jsonl data - uuidf => $uuidf, # UUID format, defaults to 'binary', 'string' is also valid. cache => $cache, # Optional uuid cache map. debug => 0, # Optional, turn on for verbosity ); -=item $uuid = $sync->get_or_create_id($cache, $dbh, $uuidf, $table, $uuid_field, $value_field, $value) +=item $uuid = $sync->get_or_create_id($cache, $dbh, $table, $uuid_field, $value_field, $value) Create or find a common link in the database (think project, user, etc). my $uuid = $sync->get_or_create_id( - $cache, $dbh, 'binary', + $cache, $dbh, users => 'user_idx', username => 'bob', ); -=item $sync->insert($dbh, $uuidf, $table, $data) - -Insert $data as a row into $table using the $uuidf uuid format. - -=item $sync->stringify_uuids($thing) - -Takes a string or nested data structure, will convert uuid's in id fields to the string form. - -=item $sync->binarify_uuids($thing) +=item $sync->insert($dbh, $table, $data) -Takes a string or nested data structure, will convert uuid's in id fields to the binary form. +Insert $data as a row into $table. =back diff --git a/lib/App/Yath/Schema/UUID.pm b/lib/App/Yath/Schema/UUID.pm deleted file mode 100644 index e1dc4d56f..000000000 --- a/lib/App/Yath/Schema/UUID.pm +++ /dev/null @@ -1,242 +0,0 @@ -package App::Yath::Schema::UUID; -use strict; -use warnings; - -our $VERSION = '2.000000'; - -use overload( - fallback => 1, - '""' => sub { $_[0]->magic_stringify }, - bool => sub { 1 }, -); - -use Data::UUID; -use Scalar::Util qw/blessed reftype/; -use Test2::Harness::Util qw/looks_like_uuid/; -use Test2::Harness::Util::UUID qw/UG/; - -require Test2::Harness::Util::UUID; -require bytes; - -use Importer Importer => 'import'; -our @EXPORT_OK = qw/uuid_inflate uuid_deflate gen_uuid uuid_mass_inflate uuid_mass_deflate looks_like_uuid_36_or_16/; - -sub gen_uuid { - my $binary = UG()->create(); - my $forsql = _reorder_bin($binary); - my $string = UG()->to_string($binary); - - return bless( - { - binary => $forsql, - string => lc($string), - }, - __PACKAGE__ - ); -} - -sub new { - my $class = shift; - my ($val) = @_; - $val //= lc(Test2::Harness::Util::UUID::gen_uuid()); - return uuid_inflate($val); -} - -sub _reorder_bin { - my $bin = shift; - - return join '' => ( - scalar(reverse(substr($bin, 6, 2))), - scalar(reverse(substr($bin, 4, 2))), - scalar(reverse(substr($bin, 0, 4))), - substr($bin, 8, 8), - ); -} - -sub _unorder_bin { - my ($bin) = @_; - - return join '' => ( - scalar(reverse(substr($bin, 4, 4))), - scalar(reverse(substr($bin, 2, 2))), - scalar(reverse(substr($bin, 0, 2))), - substr($bin, 8, 8), - ); -} - -sub uuid_inflate { - my ($val) = @_; - return undef unless $val; - return $val if blessed($val) && $val->isa(__PACKAGE__); - - my $size = bytes::length($val); - - my $out; - if ($size == 16) { - my $unbin = UG()->to_string(_unorder_bin($val)); - - $out = { - string => lc($unbin), - binary => $val, - }; - } - elsif ($size == 36) { - $val = $val; - - my $bin = UG()->from_string($val); - - $out = { - string => lc($val), - binary => _reorder_bin($bin), - }; - } - - return undef unless $out; - - return bless($out, __PACKAGE__); -} - -sub magic_stringify { - my $self = shift; - return $self->{string} unless $App::Yath::Schema::LOADED && $App::Yath::Schema::LOADED =~ m/(mysql|percona)/i; - - my $i = 0; - while (my @call = caller($i++)) { - return $self->{binary} if $call[0] =~ m/DBIx::Class::Storage::DBI/; - return $self->{string} if $i > 2; - } - - $self->{string}; -} - -sub uuid_deflate { - my ($val) = @_; - return undef unless $val; - $val = uuid_inflate($val) unless blessed($val) && $val->isa(__PACKAGE__); - return undef unless $val; - return $val->{binary} if $App::Yath::Schema::LOADED && $App::Yath::Schema::LOADED =~ m/(mysql|percona)/i; - return $val->{string}; -} - -*deflate = \&uuid_deflate; -*inflate = \&uuid_inflate; - -sub binary { $_[0]->{binary} } -sub string { $_[0]->{string} } -sub TO_JSON { $_[0]->{string} } - -sub uuid_mass_inflate { _uuid_mass_flate($_[0], \&uuid_inflate, \&uuid_mass_inflate) } -sub uuid_mass_deflate { _uuid_mass_flate($_[0], \&uuid_deflate, \&uuid_mass_deflate) } - -sub _uuid_mass_flate { - my ($val_do_not_use, $flate, $mass_flate) = @_; - return $_[0] unless $_[0]; - - if (blessed($_[0])) { - return $_[0] = $flate->($_[0]) if $_[0]->isa(__PACKAGE__); - return $_[0]; - } - - return $_[0] = $flate->($_[0]) if looks_like_uuid_36_or_16($_[0]); - - my $type = reftype($_[0]) or return; - - if ($type eq 'HASH') { - my @list = grep { - my $ok = 1; - $ok &&= $_ eq 'owner' || (m/_(id|key)$/ && $_ ne 'trace_id'); - $ok &&= looks_like_uuid_36_or_16($_[0]->{$_}); - - my $rt = reftype($_[0]->{$_}) // ''; - $ok ||= $rt eq 'HASH' || $rt eq 'ARRAY'; - - $ok; - } keys %{$_[0]}; - - $_[0]->{$_} = _uuid_mass_flate($_[0]->{$_}, $flate, $mass_flate) for @list; - } - elsif($type eq 'ARRAY') { - $_ = _uuid_mass_flate($_, $flate, $mass_flate) for grep { - my $ok = looks_like_uuid_36_or_16($_); - - my $dt = reftype($_) // ''; - $ok ||= 1 if $dt eq 'HASH' || $dt eq 'ARRAY'; - - $ok; - } @{$_[0]}; - } - - return $_[0]; -} - -sub looks_like_uuid_36_or_16 { - my ($val) = @_; - return 0 unless $val; - my $len = length($val); - - if ($len == 16) { - return 1 if $val !~ m/^[[:ascii:]]+$/s; - return 0; - } - elsif ($len == 36) { - return unless $val =~ m/-/; - return looks_like_uuid($val); - } - - return 0; -} - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -App::Yath::Schema::UUID - FIXME - -=head1 DESCRIPTION - -=head1 SYNOPSIS - -=head1 EXPORTS - -=over 4 - -=back - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut - diff --git a/lib/App/Yath/Schema/Util.pm b/lib/App/Yath/Schema/Util.pm index 33b3f467f..da776f53d 100644 --- a/lib/App/Yath/Schema/Util.pm +++ b/lib/App/Yath/Schema/Util.pm @@ -7,7 +7,7 @@ our $VERSION = '2.000000'; use Carp qw/croak/; use Test2::Harness::Util qw/mod2file/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use Importer Importer => 'import'; @@ -119,8 +119,6 @@ sub schema_config_from_settings { sub find_job { my ($schema, $uuid, $try) = @_; - $uuid = uuid_inflate($uuid) or croak "Invalid job identifier"; - my $jobs = $schema->resultset('Job'); if (length $try) { diff --git a/lib/App/Yath/Server/Controller/Binary.pm b/lib/App/Yath/Server/Controller/Binary.pm index 5d12dd707..bdc4a7673 100644 --- a/lib/App/Yath/Server/Controller/Binary.pm +++ b/lib/App/Yath/Server/Controller/Binary.pm @@ -8,7 +8,7 @@ use App::Yath::Server::Response qw/resp error/; use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase; -use App::Yath::Schema::UUID qw/uuid_inflate/; + sub title { 'Binary' } @@ -20,14 +20,14 @@ sub handle { my $res = resp(200); die error(404 => 'Missing route') unless $route; - my $binary_idx = $route->{binary_idx} or die error(404 => "Invalid Route"); + my $binary_id = $route->{binary_id} or die error(404 => "Invalid Route"); - error(404 => 'No id') unless $binary_idx; + error(404 => 'No id') unless $binary_id; my $schema = $self->schema; - my $binary = $schema->resultset('Binary')->find({binary_idx => $binary_idx}); + my $binary = $schema->resultset('Binary')->find({binary_id => $binary_id}); - error(404 => 'No such binary file') unless $binary_idx; + error(404 => 'No such binary file') unless $binary_id; my $filename = $binary->filename; diff --git a/lib/App/Yath/Server/Controller/Coverage.pm b/lib/App/Yath/Server/Controller/Coverage.pm index 97418f3cf..d0acba348 100644 --- a/lib/App/Yath/Server/Controller/Coverage.pm +++ b/lib/App/Yath/Server/Controller/Coverage.pm @@ -7,7 +7,7 @@ our $VERSION = '2.000000'; use List::Util qw/max/; use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json encode_pretty_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase; @@ -39,15 +39,13 @@ sub handle { $run = $project->last_covered_run(user => $username); } else { - $source = uuid_inflate($source) or die error(404 => 'Invalid Run'); - $run = eval { $schema->resultset('Run')->find({run_id => $source}) } or warn $@; - die error(405) unless $run; + $run = $schema->resultset('Run')->find_by_id_or_uuid($source) or die error(405); } die error(404) unless $run; if ($delete) { - $run->coverages->delete; + $run->coverage->delete; $run->update({has_coverage => 0}); } else { diff --git a/lib/App/Yath/Server/Controller/Download.pm b/lib/App/Yath/Server/Controller/Download.pm index f52f07dd3..3b82d3472 100644 --- a/lib/App/Yath/Server/Controller/Download.pm +++ b/lib/App/Yath/Server/Controller/Download.pm @@ -9,7 +9,7 @@ use Text::Xslate(qw/mark_raw/); use App::Yath::Util qw/share_dir/; use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase qw/-title/; @@ -33,7 +33,6 @@ sub handle { } else { my $it = $route->{id} or die error(404 => 'No id'); - $it = uuid_inflate($it) or die error(404 => 'Invalid Run'); my $schema = $self->schema; $run = $schema->resultset('Run')->find({run_id => $it}) or die error(404 => 'Invalid Run'); } diff --git a/lib/App/Yath/Server/Controller/Events.pm b/lib/App/Yath/Server/Controller/Events.pm index ad1635595..49d4db8a9 100644 --- a/lib/App/Yath/Server/Controller/Events.pm +++ b/lib/App/Yath/Server/Controller/Events.pm @@ -7,7 +7,7 @@ our $VERSION = '2.000000'; use List::Util qw/max/; use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase; @@ -29,18 +29,16 @@ sub handle { my $p = $req->parameters; my (%query, %attrs, $rs, $meth, $event); - my $event_id = uuid_inflate($it) or die error(404 => "Invalid event id"); - if ($route->{from} eq 'single_event') { - $event = $schema->resultset('Event')->find({event_id => $event_id}, {remove_columns => [qw/orphan/]}) + $event = $schema->resultset('Event')->find({event_id => $it}, {remove_columns => [qw/orphan/]}) or die error(404 => 'Invalid Event'); } else { - $event = $schema->resultset('Event')->find({event_id => $event_id}, {remove_columns => [qw/orphan facets/]}) + $event = $schema->resultset('Event')->find({event_id => $it}, {remove_columns => [qw/orphan facets/]}) or die error(404 => 'Invalid Event'); } - $attrs{order_by} = {-asc => 'event_idx'}; + $attrs{order_by} = {-asc => 'event_id'}; if ($route->{from} eq 'single_event') { $res->content_type('application/json'); @@ -51,21 +49,22 @@ sub handle { if ($p->{load_subtests}) { # If we are loading subtests then we want ALL descendants, so here # we take the parent event and find the next event of the same - # nesting level, then we want all events with an event_idx between + # nesting level, then we want all events with an event_id between # them (in the same job); my $end_at = $schema->resultset('Event')->find( - {%query, nested => $event->nested, event_idx => {'>' => $event->event_idx}}, + {%query, nested => $event->nested, event_id => {'>' => $event->event_id}}, { - columns => [qw/event_idx/], + columns => [qw/event_id/], %attrs, }, ); - $query{event_ord} = {'>' => $event->event_idx, '<' => $end_at->event_idx}; + # FIXME: This should be using event_idx and event_sdx + $query{event_id} = {'>' => $event->event_id, '<' => $end_at->event_id}; } else { # We want direct descendants only - $query{'parent_id'} = $event_id; + $query{'parent_id'} = $it; } $rs = $schema->resultset('Event')->search( diff --git a/lib/App/Yath/Server/Controller/Files.pm b/lib/App/Yath/Server/Controller/Files.pm index d7e730d41..75995ce7f 100644 --- a/lib/App/Yath/Server/Controller/Files.pm +++ b/lib/App/Yath/Server/Controller/Files.pm @@ -7,7 +7,7 @@ our $VERSION = '2.000000'; use List::Util qw/max/; use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json encode_pretty_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase; @@ -34,7 +34,7 @@ sub handle { my $schema = $self->schema; my $query = {status => 'complete'}; - my $attrs = {order_by => {'-desc' => 'run_idx'}, rows => 1}; + my $attrs = {order_by => {'-desc' => 'run_id'}, rows => 1}; $attrs->{offset} = $idx if $idx; @@ -52,12 +52,19 @@ sub handle { die error(400 => "Invalid Request: $err") unless $ok; die error(404 => 'No Data') unless $run; - my $search = {retry => 0}; - $search->{fail} = 1 if $failed; + my $search = {is_harness_out => 0}; + if ($failed) { + $search->{fail} = 1; + $search->{retry} = 0; + } my $files = $run->jobs->search( $search, - {join => 'test_file', order_by => 'test_file.filename'}, + { + join => ['jobs_tries', 'test_file'], + order_by => 'test_file.filename', + group_by => ['me.job_id', 'test_file.filename'], + }, ); unless($json) { diff --git a/lib/App/Yath/Server/Controller/Interactions.pm b/lib/App/Yath/Server/Controller/Interactions.pm index 11341208c..5d7b47470 100644 --- a/lib/App/Yath/Server/Controller/Interactions.pm +++ b/lib/App/Yath/Server/Controller/Interactions.pm @@ -10,7 +10,7 @@ use App::Yath::Server::Response qw/resp error/; use App::Yath::Util qw/share_dir/; use App::Yath::Schema::Util qw/find_job/; use Test2::Harness::Util::JSON qw/encode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase qw/-title/; @@ -23,7 +23,12 @@ sub handle { my $req = $self->{+REQUEST}; - my $id = uuid_inflate($route->{id}) or die error(404 => 'No event id provided'); + my $schema = $self->schema; + my $id = $route->{id} or die error(404 => 'No event id provided'); + + my $event = $schema->resultset('Event')->find_by_id_or_uuid($id) + or die error(404 => 'Invalid Event'); + my $context = $route->{context} // 1; return $self->data($id, $context) if $route->{data}; @@ -43,10 +48,10 @@ sub handle { my $content = $tx->render( 'interactions.tx', { - base_uri => $req->base->as_string, - event_id => $id, - user => $req->user, - data_uri => $data_uri, + base_uri => $req->base->as_string, + event_id => $event->event_uuid, + user => $req->user, + data_uri => $data_uri, context_count => $context, } ); @@ -60,29 +65,32 @@ sub data { my ($id, $context) = @_; my $schema = $self->schema; + # Get event - my $event = $schema->resultset('Event')->find({event_id => $id}) + my $event = $schema->resultset('Event')->find_by_id_or_uuid($id) or die error(404 => 'Invalid Event'); my $stamp = $event->get_column('stamp') or die "No stamp?!"; - # Get job - my $job = $event->job_key or die error(500 => "Could not find job"); - - # Get run from event - my $run = $job->run or die error(500 => "Could not find run"); + # Get job id + my $try = $event->job_try; + my $job = $try->job; + my $run = $job->run; # Get tests from run where the start and end surround the event - my $job_rs = $run->jobs( + my $try_rs = $schema->resultset('JobTry')->search( { - job_key => {'!=' => $job->job_key}, - ended => {'>=' => $stamp}, + 'job.job_id' => {'!=' => $job->job_id}, + 'me.ended' => {'>=' => $stamp }, '-or' => [ - {launch => {'<=' => $stamp}}, - {start => {'<=' => $stamp}}, + {'me.launch' => {'<=' => $stamp}}, + {'me.start' => {'<=' => $stamp}}, ], }, - {order_by => 'job_idx'}, + { + join => 'job', + order_by => 'job_try_id', + }, ); my $req = $self->{+REQUEST}; @@ -93,7 +101,7 @@ sub data { {type => 'run', data => $run}, {type => 'job', data => $job->glance_data}, {type => 'event', data => $event->line_data}, - {type => 'count', data => $job_rs->count}, + {type => 'count', data => $try_rs->count}, ); my $advance = sub { @@ -117,8 +125,8 @@ sub data { $event_rs = undef; } - if (my $job = $job_rs->next) { - push @out => {type => 'job', data => $job->glance_data}; + if (my $try = $try_rs->next) { + push @out => {type => 'job', data => $try->glance_data}; $event_rs = $job->events( { @@ -132,7 +140,7 @@ sub data { }, ], }, - {order_by => 'event_idx'}, + {order_by => ['event_idx', 'event_sdx']}, ); return 0; diff --git a/lib/App/Yath/Server/Controller/JobField.pm b/lib/App/Yath/Server/Controller/JobTryField.pm similarity index 84% rename from lib/App/Yath/Server/Controller/JobField.pm rename to lib/App/Yath/Server/Controller/JobTryField.pm index 4d8733a4d..a667c8bec 100644 --- a/lib/App/Yath/Server/Controller/JobField.pm +++ b/lib/App/Yath/Server/Controller/JobTryField.pm @@ -1,4 +1,4 @@ -package App::Yath::Server::Controller::JobField; +package App::Yath::Server::Controller::JobTryField; use strict; use warnings; @@ -9,7 +9,7 @@ use Text::Xslate(qw/mark_raw/); use App::Yath::Util qw/share_dir/; use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase qw/-title/; @@ -26,9 +26,8 @@ sub handle { die error(404 => 'Missing route') unless $route; my $it = $route->{id} or die error(404 => 'No id'); - $it = uuid_inflate($it) or die error(404 => "Invalid id"); my $schema = $self->schema; - my $field = $schema->resultset('JobField')->find({job_field_id => $it}) or die error(404 => 'Invalid Field'); + my $field = $schema->resultset('JobTryField')->find({job_try_field_id => $it}) or die error(404 => 'Invalid Field'); if (my $act = $route->{action}) { if ($act eq 'delete') { diff --git a/lib/App/Yath/Server/Controller/Lookup.pm b/lib/App/Yath/Server/Controller/Lookup.pm index afb962d15..5a29dc576 100644 --- a/lib/App/Yath/Server/Controller/Lookup.pm +++ b/lib/App/Yath/Server/Controller/Lookup.pm @@ -4,12 +4,11 @@ use warnings; our $VERSION = '2.000000'; -use Scalar::Util qw/blessed/; use App::Yath::Server::Response qw/resp error/; use App::Yath::Util qw/share_dir/; use App::Yath::Schema::Util qw/find_job/; use Test2::Harness::Util::JSON qw/encode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase qw/-title/; @@ -57,7 +56,7 @@ sub data { my $req = $self->{+REQUEST}; my $res = resp(200); - my @sources = qw/run jobs event/; + my @sources = qw/run job event/; my @out; @@ -91,19 +90,13 @@ sub lookup_run { my ($lookup, $state) = @_; return unless $lookup; - if (blessed($lookup)) { - $lookup = $lookup->run_id; - } - else { - $lookup = uuid_inflate($lookup); - } return if $state->{run}->{$lookup}++; my $schema = $self->schema; my $rs = $schema->resultset('Run'); - my $run = eval { $rs->find({run_id => $lookup}) }; + my $run = $rs->find_by_id_or_uuid($lookup); return () unless $run; return ( @@ -111,36 +104,44 @@ sub lookup_run { ); } -sub lookup_jobs { +sub lookup_job { my $self = shift; - my ($lookup, $state) = @_; + my ($lookup, $state, $try_id) = @_; return unless $lookup; - if (blessed($lookup)) { - $lookup = $lookup->job_key; - } - else { - $lookup = uuid_inflate($lookup); - } return if $state->{job}->{$lookup}++; my $schema = $self->schema; my $rs = $schema->resultset('Job'); + my $job = $rs->find_by_id_or_uuid($lookup); + return () unless $job; - my @out; + # FIXME: Make sure getitng only a specific job_try_id works + return ( + $self->lookup_run($job->run_id, $state), + encode_json({type => 'job', data => $job->glance_data(try_id => $try_id)}) . "\n", + ); +} + +sub lookup_job_try { + my $self = shift; + my ($lookup, $state) = @_; + + return unless $lookup; - for my $key (qw/job_id job_key/) { - my $jobs = eval { $rs->search({$key => $lookup}) }; + return if $state->{job_try}->{$lookup}++; - while (my $job = eval { $jobs->next }) { - push @out => $self->lookup_run($job->run_id, $state); - push @out => encode_json({type => 'job', data => $job->glance_data }) . "\n"; - } - } + my $schema = $self->schema; + + my $rs = $schema->resultset('JobTry'); + my $try = $rs->find({job_try_id => $lookup}); + return () unless $try; - return @out; + return ( + $self->lookup_job($try->job_id, $state, try => $try->job_try_id), + ); } sub lookup_event { @@ -148,24 +149,18 @@ sub lookup_event { my ($lookup, $state) = @_; return unless $lookup; - if (blessed($lookup)) { - $lookup = $lookup->event_id; - } - else { - $lookup = uuid_inflate($lookup); - } return if $state->{event}->{$lookup}++; my $schema = $self->schema; my $rs = $schema->resultset('Event'); - my $event = eval { $rs->find({event_id => $lookup}) }; + my $event = $rs->find_by_id_or_uuid($lookup); return () unless $event; return ( - $self->lookup_jobs($event->job_key, $state), + $self->lookup_job_try($event->job_try_id, $state), encode_json({type => 'event', data => $event->line_data }) . "\n" ); } diff --git a/lib/App/Yath/Server/Controller/Project.pm b/lib/App/Yath/Server/Controller/Project.pm index 33ff64216..7b4992456 100644 --- a/lib/App/Yath/Server/Controller/Project.pm +++ b/lib/App/Yath/Server/Controller/Project.pm @@ -11,7 +11,6 @@ use App::Yath::Util qw/share_dir/; use App::Yath::Schema::Util qw/format_duration parse_duration is_invalid_subtest_name/; use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_deflate uuid_inflate/; use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase; @@ -31,22 +30,22 @@ sub users { my $dbh = $schema->storage->dbh; my $query = <<" EOT"; - SELECT users.user_idx AS user_idx, users.username AS username + SELECT users.user_id AS user_id, users.username AS username FROM users - JOIN runs USING(user_idx) - WHERE runs.project_idx = ? - GROUP BY user_idx + JOIN runs USING(user_id) + WHERE runs.project_id = ? + GROUP BY user_id EOT my $sth = $dbh->prepare($query); - $sth->execute(uuid_deflate($project->project_idx)) or die $sth->errstr; + $sth->execute($project->project_id) or die $sth->errstr; my $owner = $project->owner; my @out; for my $row (@{$sth->fetchall_arrayref // []}) { - my ($user_idx, $username) = @$row; - my $is_owner = ($owner && $user_idx eq $owner->user_idx) ? 1 : 0; - push @out => {user_idx => $user_idx, username => $username, owner => $is_owner}; + my ($user_id, $username) = @$row; + my $is_owner = ($owner && $user_id eq $owner->user_id) ? 1 : 0; + push @out => {user_id => $user_id, username => $username, owner => $is_owner}; } @out = sort { $b->{owner} cmp $a->{owner} || $a->{username} cmp $b->{username} } @out; @@ -183,13 +182,13 @@ sub get_add_query { return ('') unless $n || @$users || $range; - return ("AND run_idx > (SELECT MAX(run_idx) - ? FROM runs)\n", $n) + return ("AND run_id > (SELECT MAX(run_id) - ? FROM runs)\n", $n) unless @$users || $range; my @add_vals; - my $user_query = 'user_idx in (' . join(',' => map { '?' } @$users) . ')'; - push @add_vals => map { uuid_deflate($_) } @$users; + my $user_query = 'user_id in (' . join(',' => map { '?' } @$users) . ')'; + push @add_vals => @$users; return ("AND $user_query\n", @add_vals) unless $n || $range; @@ -198,9 +197,9 @@ sub get_add_query { if ($range) { my $query = <<" EOT"; - SELECT min(run_idx) AS min, max(run_idx) AS max + SELECT min(run_id) AS min, max(run_id) AS max FROM runs - WHERE project_idx = ? + WHERE project_id = ? AND added >= ? AND added <= ? EOT @@ -209,28 +208,28 @@ sub get_add_query { $end = parse_date($end); my $sth = $dbh->prepare($query); - $sth->execute(uuid_deflate($project->project_idx), $start, $end) or die $sth->errstr; + $sth->execute($project->project_id, $start, $end) or die $sth->errstr; my $ords = $sth->fetchrow_hashref; - my $ord_query = "run_idx >= ? AND run_idx <= ?"; + my $ord_query = "run_id >= ? AND run_id <= ?"; push @add_vals => ($ords->{min}, $ords->{max}); return ("AND $user_query AND $ord_query", @add_vals) if @$users; return ("AND $ord_query", @add_vals); } my $query = <<" EOT"; - SELECT run_idx, run_id + SELECT run_id FROM reporting - WHERE project_idx = ? + WHERE project_id = ? AND $user_query - GROUP BY run_idx, run_id - ORDER BY run_idx DESC + GROUP BY run_id + ORDER BY run_id DESC LIMIT ? EOT my $sth = $dbh->prepare($query); - $sth->execute(uuid_deflate($project->project_idx), @add_vals, $n) or die $sth->errstr; + $sth->execute($project->project_id, @add_vals, $n) or die $sth->errstr; my @ids = map { $_->[1] } @{$sth->fetchall_arrayref}; return ('') unless @ids; @@ -251,17 +250,17 @@ sub _build_stat_run_list { my $query = <<" EOT"; SELECT run_id FROM reporting - WHERE project_idx = ? + WHERE project_id = ? $add_query ORDER BY run_id DESC EOT my $sth = $dbh->prepare($query); - $sth->execute(uuid_deflate($project->project_idx), @add_vals) or die $sth->errstr; + $sth->execute($project->project_id, @add_vals) or die $sth->errstr; - my @ids = map { uuid_inflate($_->[0]) } @{$sth->fetchall_arrayref}; + my @ids = map { $_->[0] } @{$sth->fetchall_arrayref}; - my @items = map { $_->TO_JSON } $schema->resultset('Run')->search({run_id => {'-in' => \@ids}}, {order_by => {'-DESC' => 'run_idx'}})->all; + my @items = map { $_->TO_JSON } $schema->resultset('Run')->search({run_id => {'-in' => \@ids}}, {order_by => {'-DESC' => 'run_id'}})->all; $stat->{runs} = \@items; } @@ -279,24 +278,24 @@ sub _build_stat_expensive_files { SELECT test_files.filename AS filename, SUM(duration) AS total_duration, AVG(duration) AS average_duration, - COUNT(DISTINCT(run_id)) AS runs, + COUNT(DISTINCT(run_id)) AS runs, COUNT(duration) AS tries, - COUNT(DISTINCT(user_idx)) AS users, + COUNT(DISTINCT(user_id)) AS users, SUM(pass) AS pass, SUM(fail) AS fail, SUM(retry) AS retry, SUM(abort) AS abort FROM reporting - LEFT JOIN test_files USING(test_file_idx) - WHERE project_idx = ? + LEFT JOIN test_files USING(test_file_id) + WHERE project_id = ? AND subtest IS NULL - AND test_file_idx IS NOT NULL + AND test_file_id IS NOT NULL $add_query GROUP BY filename EOT my $sth = $dbh->prepare($query); - $sth->execute(uuid_deflate($project->project_idx), @add_vals) or die $sth->errstr; + $sth->execute($project->project_id, @add_vals) or die $sth->errstr; my @rows; for my $row (sort { $b->[1] <=> $a->[1] } @{$sth->fetchall_arrayref}) { @@ -335,21 +334,21 @@ sub _build_stat_expensive_subtests { AVG(duration) AS average_duration, COUNT(DISTINCT(run_id)) AS runs, COUNT(duration) AS tries, - COUNT(DISTINCT(user_idx)) AS users, + COUNT(DISTINCT(user_id)) AS users, SUM(pass) AS pass, SUM(fail) AS fail, SUM(abort) AS abort FROM reporting - LEFT JOIN test_files USING(test_file_idx) - WHERE project_idx = ? + LEFT JOIN test_files USING(test_file_id) + WHERE project_id = ? AND subtest IS NOT NULL - AND test_file_idx IS NOT NULL + AND test_file_id IS NOT NULL $add_query GROUP BY filename, subtest EOT my $sth = $dbh->prepare($query); - $sth->execute(uuid_deflate($project->project_idx), @add_vals) or die $sth->errstr; + $sth->execute($project->project_id, @add_vals) or die $sth->errstr; my @rows; for my $row (sort { $b->[2] <=> $a->[2] } @{$sth->fetchall_arrayref}) { @@ -390,16 +389,16 @@ sub _build_stat_expensive_users { SUM(fail) AS fail, SUM(abort) AS abort FROM reporting - LEFT JOIN users USING(user_idx) - WHERE project_idx = ? - AND job_key IS NULL + LEFT JOIN users USING(user_id) + WHERE project_id = ? + AND job_try_id IS NULL AND subtest IS NULL $add_query GROUP BY username EOT my $sth = $dbh->prepare($query); - $sth->execute(uuid_deflate($project->project_idx), @add_vals) or die $sth->errstr; + $sth->execute($project->project_id, @add_vals) or die $sth->errstr; my @rows; for my $row (sort { $b->[1] <=> $a->[1] } @{$sth->fetchall_arrayref}) { @@ -431,38 +430,48 @@ sub _build_stat_user_summary { my ($add_query, @add_vals) = $self->get_add_query($project, $stat); + print "HERE!\n"; + my $query = <<" EOT"; SELECT SUM(duration) AS total_duration, AVG(duration) AS average_duration, COUNT(DISTINCT(run_id)) AS runs, - COUNT(DISTINCT(user_idx)) AS users, + COUNT(DISTINCT(user_id)) AS users, SUM(pass) AS pass, SUM(fail) AS fail, SUM(retry) AS retry, SUM(abort) AS abort, - CASE WHEN test_file_idx IS NULL THEN FALSE ELSE TRUE END AS has_file, + CASE WHEN test_file_id IS NULL THEN FALSE ELSE TRUE END AS has_file, CASE WHEN subtest IS NULL THEN FALSE ELSE TRUE END AS has_subtest, COUNT(subtest) AS total_subtests, - COUNT(test_file_idx) AS total_test_files, + COUNT(test_file_id) AS total_test_files, COUNT(DISTINCT(subtest)) AS unique_subtests, - COUNT(DISTINCT(test_file_idx)) AS unique_test_files + COUNT(DISTINCT(test_file_id)) AS unique_test_files FROM reporting - WHERE project_idx = ? + WHERE project_id = ? $add_query GROUP BY has_file, has_subtest - ORDER BY has_File, has_subtest + ORDER BY has_file, has_subtest EOT my $sth = $dbh->prepare($query); - $sth->execute(uuid_deflate($project->project_idx), @add_vals) or die $sth->errstr; + $sth->execute($project->project_id, @add_vals) or die $sth->errstr; - my $runs = $sth->fetchrow_hashref; + my ($runs, $jobs, $subs); + while (my $row = $sth->fetchrow_hashref) { + if ($row->{has_file} && $row->{has_subtest}) { + $subs = $row; + } + elsif ($row->{has_file}) { + $jobs = $row; + } + else { + $runs = $row; + } + } return $stat->{text} = "No run data." unless $runs->{runs}; - my $jobs = $sth->fetchrow_hashref; - my $subs = $sth->fetchrow_hashref; - $stat->{pair_sets} = []; push @{$stat->{pair_sets}} => [ @@ -514,9 +523,9 @@ sub _build_stat_uncovered { { 'me.name' => 'coverage', 'me.data' => { '!=' => undef }, - 'run.project_idx' => $project->project_idx, + 'run.project_id' => $project->project_id, 'run.has_coverage' => 1, - @$users ? (user_idx => {'-in' => $users}) : () + @$users ? (user_id => {'-in' => $users}) : () }, { join => 'run', @@ -556,13 +565,13 @@ sub _build_stat_coverage { { 'me.name' => 'coverage', 'me.data' => { '!=' => undef }, - 'run.project_idx' => $project->project_idx, + 'run.project_id' => $project->project_id, 'run.has_coverage' => 1, - @$users ? (user_idx => {'-in' => $users}) : () + @$users ? (user_id => {'-in' => $users}) : () }, { join => 'run', - order_by => {'-DESC' => 'run.added'}, + order_by => {'-DESC' => 'run_id'}, $n ? (rows => $n) : (), }, )->all; diff --git a/lib/App/Yath/Server/Controller/ReRun.pm b/lib/App/Yath/Server/Controller/ReRun.pm index 89518ff5a..a47bdc6db 100644 --- a/lib/App/Yath/Server/Controller/ReRun.pm +++ b/lib/App/Yath/Server/Controller/ReRun.pm @@ -6,7 +6,7 @@ our $VERSION = '2.000000'; use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json encode_pretty_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase; @@ -26,15 +26,11 @@ sub handle { my $project_name = $route->{project}; my $username = $route->{username}; - if ($run_id) { - $run_id = uuid_inflate($run_id) or die error(404 => "Invalid run id"); - } - error(404 => 'No source') unless $run_id || ($project_name && $username); my $schema = $self->schema; my $query = {}; - my $attrs = {order_by => {'-desc' => 'run_idx'}, rows => 1}; + my $attrs = {order_by => {'-desc' => 'run_id'}, rows => 1}; my $run; my $ok = eval { diff --git a/lib/App/Yath/Server/Controller/Resources.pm b/lib/App/Yath/Server/Controller/Resources.pm index c440bc4a1..2dd6e0605 100644 --- a/lib/App/Yath/Server/Controller/Resources.pm +++ b/lib/App/Yath/Server/Controller/Resources.pm @@ -12,7 +12,6 @@ use App::Yath::Schema::Util qw/find_job/; use App::Yath::Schema::DateTimeFormat qw/DTF/; use Test2::Harness::Util::JSON qw/encode_json decode_json/; use Test2::Util::Times qw/render_duration/; -use App::Yath::Schema::UUID qw/uuid_inflate uuid_deflate/; use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase qw/-title/; @@ -29,7 +28,7 @@ sub handle { my $id = $route->{id} or die error(404 => 'No id provided'); # Specific instant - my $batch = uuid_inflate($route->{batch}); + my $batch = $route->{batch}; if ($route->{data}) { return $self->data_stamps($req, $id) unless $batch; @@ -85,16 +84,15 @@ sub get_thing { $search_args->{global} = 1; } else { - my $uuid = uuid_inflate($id); - if ($uuid && eval { $thing = $run_rs->find({run_id => $uuid}) }) { - $search_args->{run_id} = $uuid; + if (eval { $thing = $run_rs->find({run_id => $id}) }) { + $search_args->{run_id} = $id; $done_check = sub { return 1 if $thing->complete; return 0; }; } - elsif (($uuid && eval { $thing = $host_rs->find({host_idx => $uuid}) }) || eval { $thing = $host_rs->find({hostname => $id}) }) { - $search_args->{host_idx} = $thing->host_idx; + elsif ((eval { $thing = $host_rs->find({host_id => $id}) }) || eval { $thing = $host_rs->find({hostname => $id}) }) { + $search_args->{host_id} = $thing->host_id; } else { die error(404 => 'Invalid Job ID or Host ID'); @@ -118,11 +116,11 @@ sub get_stamps { my @vals; if ($search_args->{run_id}) { $fields = "run_id = ?"; - push @vals => uuid_deflate($search_args->{run_id}); + push @vals => $search_args->{run_id}; } - elsif ($search_args->{host_idx}) { - $fields = "host_idx = ?"; - push @vals => uuid_deflate($search_args->{host_idx}); + elsif ($search_args->{host_id}) { + $fields = "host_id = ?"; + push @vals => $search_args->{host_id}; } if ($$start) { @@ -130,14 +128,12 @@ sub get_stamps { push @vals => $$start; } - my $sth = $dbh->prepare("SELECT resource_batch_idx, stamp FROM resource_batch WHERE " . $fields . " ORDER BY stamp ASC"); + my $sth = $dbh->prepare("SELECT resource_batch_id, stamp FROM resource_batch WHERE " . $fields . " ORDER BY stamp ASC"); $sth->execute(@vals) or die $sth->errstr; my $rows = $sth->fetchall_arrayref; return unless @$rows; - $_->[0] = uuid_inflate($_->[0]) for @$rows; - $$start = $rows->[-1]->[1]; return $rows; @@ -155,8 +151,8 @@ sub data_stamps { if (my $run_id = $search_args->{run_id}) { push @out => { run_id => $run_id }; } - if (my $host_idx = $search_args->{host_idx}) { - push @out => { host_idx => $host_idx }; + if (my $host_id = $search_args->{host_id}) { + push @out => { host_id => $host_id }; } my $start = time; @@ -222,13 +218,13 @@ sub render_stamp_resources { my %params = @_; my $search_args = $params{search_args}; - my $batch_id = uuid_inflate($params{batch}); + my $batch_id = $params{batch}; my $schema = $self->schema; my $res_rs = $schema->resultset('Resource'); my @res_list; - my $resources = $res_rs->search({resource_batch_idx => $batch_id}, {order_by => {'-asc' => 'batch_ord'}}); + my $resources = $res_rs->search({resource_batch_id => $batch_id}, {order_by => {'-asc' => 'batch_ord'}}); while (my $res = $resources->next) { push @res_list => $self->render_resource($res); } diff --git a/lib/App/Yath/Server/Controller/Run.pm b/lib/App/Yath/Server/Controller/Run.pm index bbe4e3151..bb74a7655 100644 --- a/lib/App/Yath/Server/Controller/Run.pm +++ b/lib/App/Yath/Server/Controller/Run.pm @@ -9,7 +9,7 @@ use Text::Xslate(qw/mark_raw/); use App::Yath::Util qw/share_dir/; use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase qw/-title/; @@ -32,9 +32,8 @@ sub handle { } else { my $it = $route->{id} or die error(404 => 'No id'); - $it = uuid_inflate($it) or die error(404 => "Invalid run id"); my $schema = $self->schema; - $run = $schema->resultset('Run')->find({run_id => $it}) or die error(404 => 'Invalid Run'); + $run = $schema->resultset('Run')->find_by_id_or_uuid($it) or die error(404 => 'Invalid Run'); } if (my $act = $route->{action}) { @@ -43,7 +42,7 @@ sub handle { } elsif ($act eq 'parameters') { $res->content_type('application/json'); - $res->raw_body($run->run_parameter->parameters); + $res->raw_body($run->parameters); return $res; } elsif ($act eq 'cancel') { @@ -51,32 +50,6 @@ sub handle { } elsif ($act eq 'delete') { die error(400 => "Cannot delete a pinned run") if $run->pinned; - - $run->coverages->delete; - $run->reportings->delete; - - my $batches = $run->resource_batches; - while (my $batch = $batches->next) { - $batch->resources->delete; - $batch->delete; - } - - my $jobs = $run->jobs; - - while (my $job = $jobs->next()) { - my $has_binary = $job->events->search({has_binary => 1}); - while (my $e = $has_binary->next()) { - $has_binary->binaries->delete; - $e->delete; - } - - $job->events->delete; - $job->job_fields->delete; - $job->delete; - } - - $run->run_fields->delete; - $run->sweeps->delete; $run->delete; } } diff --git a/lib/App/Yath/Server/Controller/RunField.pm b/lib/App/Yath/Server/Controller/RunField.pm index 7c1b67f4a..19c5cfa80 100644 --- a/lib/App/Yath/Server/Controller/RunField.pm +++ b/lib/App/Yath/Server/Controller/RunField.pm @@ -9,7 +9,7 @@ use Text::Xslate(qw/mark_raw/); use App::Yath::Util qw/share_dir/; use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json decode_json/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase qw/-title/; @@ -25,7 +25,7 @@ sub handle { die error(404 => 'Missing route') unless $route; - my $it = uuid_inflate($route->{id}) or die error(404 => 'No id'); + my $it = $route->{id} or die error(404 => 'No id'); my $schema = $self->schema; my $field = $schema->resultset('RunField')->find({run_field_id => $it}) or die error(404 => 'Invalid Field'); diff --git a/lib/App/Yath/Server/Controller/Stream.pm b/lib/App/Yath/Server/Controller/Stream.pm index 41e7796ee..ea76cbdfd 100644 --- a/lib/App/Yath/Server/Controller/Stream.pm +++ b/lib/App/Yath/Server/Controller/Stream.pm @@ -7,7 +7,8 @@ our $VERSION = '2.000000'; use List::Util qw/max/; use Scalar::Util qw/blessed/; use App::Yath::Schema::Util qw/find_job/; -use App::Yath::Schema::UUID qw/uuid_inflate/; +use Test2::Harness::Util::UUID qw/looks_like_uuid/; + use App::Yath::Server::Response qw/resp error/; use Test2::Harness::Util::JSON qw/encode_json/; use JSON::PP(); @@ -79,24 +80,7 @@ sub stream_runs { my $schema = $self->schema; - # FIXME: Does this make it faster or slower? - my $opts = { -# remove_columns => [qw/run_fields.data/], -# -# join => [qw/user_join project run_fields/], -# '+columns' => { -# 'prefetched_fields' => \'1', -# 'run_fields.run_field_id' => 'run_fields.run_field_id', -# 'run_fields.name' => 'run_fields.name', -# 'run_fields.details' => 'run_fields.details', -# 'run_fields.raw' => 'run_fields.raw', -# 'run_fields.link' => 'run_fields.link', -# 'run_fields.has_data', => \"run_fields.data IS NOT NULL", -# 'user' => \'user_join.username', -# 'project' => \'project.name', -# }, - }; - + my $opts = {remove_columns => [qw/parameters/]}; my %params = ( type => 'run', @@ -104,8 +88,8 @@ sub stream_runs { track_status => 1, id_field => 'run_id', - ord_field => 'added', - sort_field => 'added', + ord_field => 'run_id', + sort_field => 'run_id', search_base => $schema->resultset('Run'), initial_limit => RUN_LIMIT, @@ -117,22 +101,24 @@ sub stream_runs { my $run_id = $route->{run_id}; my $user_id = $route->{user_id}; my $project_id = $route->{project_id}; + + my ($project, $user, $run); if($run_id) { - $run_id = uuid_inflate($run_id) or die error(404 => "Invalid run id"); + $params{id_field} = 'run_uuid' if looks_like_uuid($run_id); return $self->stream_single(%params, id => $run_id); } if ($project_id) { my $p_rs = $schema->resultset('Project'); - $project = eval { $p_rs->find({name => $project_id}) } // eval { $p_rs->find({project_idx => $project_id}) } // die error(404 => 'Invalid Project'); - $params{search_base} = $params{search_base}->search_rs({'me.project_idx' => $project->project_idx}); + $project = eval { $p_rs->find({name => $project_id}) } // eval { $p_rs->find({project_id => $project_id}) } // die error(404 => 'Invalid Project'); + $params{search_base} = $params{search_base}->search_rs({'me.project_id' => $project->project_id}); } elsif ($user_id) { my $u_rs = $schema->resultset('User'); - $user = eval { $u_rs->find({username => $user_id}) } // eval { $u_rs->find({user_idx => $user_id}) } // die error(404 => 'Invalid User'); - $params{search_base} = $params{search_base}->search_rs({'me.user_idx' => $user->user_idx}); + $user = eval { $u_rs->find({username => $user_id}) } // eval { $u_rs->find({user_id => $user_id}) } // die error(404 => 'Invalid User'); + $params{search_base} = $params{search_base}->search_rs({'me.user_id' => $user->user_id}); } return $self->stream_set(%params); @@ -162,18 +148,16 @@ sub stream_jobs { req => $req, track_status => 1, - id_field => 'job_key', - ord_field => 'job_idx', + id_field => 'job_id', + ord_field => 'job_id', method => 'glance_data', search_base => scalar($run->jobs), custom_opts => $opts, - order_by => [{'-desc' => 'status'}, {'-desc' => [qw/job_try job_idx name/]}], + order_by => [{'-desc' => 'status'}, {'-desc' => [qw/job_try job_id name/]}], ); if (my $job_uuid = $route->{job}) { - $job_uuid = uuid_inflate($job_uuid) or die error(404 => "Invalid job id"); - my $schema = $self->schema; return $self->stream_single(%params, item => find_job($schema, $job_uuid, $route->{try})); } @@ -198,8 +182,8 @@ sub stream_events { track_status => 0, id_field => 'event_id', - ord_field => 'event_ord', - sort_field => 'event_ord', + ord_field => 'event_idx', + sort_field => 'event_idx', sort_dir => '-asc', method => 'line_data', custom_query => $query, @@ -271,6 +255,7 @@ sub stream_set { my $order_by = $params{order_by} // $sort_field ? {$sort_dir => $sort_field} : croak "Must specify either 'order_by' or 'sort_field'"; my $items = $search_base->search($custom_query, {%$custom_opts, order_by => $order_by, $limit ? (rows => $limit) : ()}); + my @buffer; my $start = time; my $ord; @@ -289,7 +274,7 @@ sub stream_set { return 0; }, sub { - unless ($items) { + unless ($items || @buffer) { my $val; if (blessed($ord) && $ord->isa('DateTime')) { my $schema = $self->schema; @@ -314,8 +299,8 @@ sub stream_set { ); } - while (my $item = $items->next()) { - $ord = $item->$ord_field; + while (my $item = shift(@buffer) || $items->next()) { + $ord = max($ord || 0, $item->$ord_field); my $update = JSON::PP::false; if ($track) { @@ -334,8 +319,8 @@ sub stream_set { } } - my $data = $method ? $item->$method : $item->TO_JSON; - return encode_json({type => $type, update => $update, data => $data}) . "\n"; + my @buffer = $method ? $item->$method : $item->TO_JSON; + return encode_json({type => $type, update => $update, data => shift(@buffer)}) . "\n"; } $items = undef; diff --git a/lib/App/Yath/Server/Controller/Upload.pm b/lib/App/Yath/Server/Controller/Upload.pm index 886795fea..5c688c851 100644 --- a/lib/App/Yath/Server/Controller/Upload.pm +++ b/lib/App/Yath/Server/Controller/Upload.pm @@ -6,11 +6,10 @@ our $VERSION = '2.000000'; use Text::Xslate(); -use Test2::Harness::Util::UUID qw/gen_uuid/; use Test2::Harness::Util::JSON qw/decode_json/; use Test2::Harness::Util qw/open_file/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use App::Yath::Schema::Queries(); use App::Yath::Util qw/share_dir/; @@ -97,13 +96,13 @@ sub process_form { my $run = $self->schema->resultset('Run')->create({ $run_id ? (run_id => $run_id) : (), - user_idx => ref($user) ? $user->user_idx : 1, - project_idx => $project->project_idx, + user_id => ref($user) ? $user->user_id : 1, + project_id => $project->project_id, mode => $mode, status => 'pending', log_file => { - log_file_idx => $run_id, + log_file_id => $run_id, # FIXME name => $file, data => do { local $/; <$fh> }, }, @@ -120,7 +119,7 @@ sub api_user { return unless $key_val; my $schema = $self->schema; - my $key = $schema->resultset('ApiKey')->find({value => uuid_inflate($key_val)}) + my $key = $schema->resultset('ApiKey')->find({value => $key_val}) or return undef; return undef unless $key->status eq 'active'; diff --git a/lib/App/Yath/Server/Controller/User.pm b/lib/App/Yath/Server/Controller/User.pm index c4c66f92a..9eacd5f98 100644 --- a/lib/App/Yath/Server/Controller/User.pm +++ b/lib/App/Yath/Server/Controller/User.pm @@ -7,7 +7,7 @@ our $VERSION = '2.000000'; use Text::Xslate(); use App::Yath::Util qw/share_dir/; use App::Yath::Server::Response qw/resp error/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use Email::Sender::Simple qw(sendmail); use Email::Simple; @@ -65,14 +65,14 @@ sub process_form { # This one we allow non-post, all others need post. if ('logout' eq $action) { - $req->session_host->update({'user_idx' => undef}); + $req->session_host->update({'user_id' => undef}); return $res->add_msg("You have been logged out."); } elsif ($action eq 'verify') { - my $evcode_id = $p->{verification_code} + my $evcode = $p->{verification_code} or return $res->add_error("Invalid verification code"); - my $code = $schema->resultset('EmailVerificationCode')->find({evcode_id => uuid_inflate($evcode_id)}) + my $code = $schema->resultset('EmailVerificationCode')->find({evcode => $evcode}) or return $res->add_error("Invalid verification code"); my $email = $code->email; @@ -94,7 +94,7 @@ sub process_form { return $res->add_error("Invalid username or password") unless $user && $user->verify_password($password); - $req->session_host->update({'user_idx' => $user->user_idx}); + $req->session_host->update({'user_id' => $user->user_id}); return $res->add_msg("You have been logged in."); } @@ -111,7 +111,7 @@ sub process_form { return $res->add_error("This email is already in use."); } - my $email = eval { $schema->resultset('Email')->create({user_idx => $user->user_idx, local => $local, domain => $domain}) }; + my $email = eval { $schema->resultset('Email')->create({user_id => $user->user_id, local => $local, domain => $domain}) }; $res->add_error("Unable to add email: $@") unless $email; $self->send_verification_code($email); @@ -145,24 +145,24 @@ sub process_form { return $res->add_msg("Password Changed."); } - if ($p->{api_key_idx} && $KEY_ACTION_MAP{$action}) { - my $key_id = uuid_inflate($p->{api_key_idx}); + if ($p->{api_key} && $KEY_ACTION_MAP{$action}) { + my $api_key = $p->{api_key}; my $user = $req->user or return $res->add_error("You must be logged in"); - my $key = $schema->resultset('ApiKey')->find({api_key_idx => $key_id, user_idx => $user->user_idx}); + my $key = $schema->resultset('ApiKey')->find({value => $api_key, user_id => $user->user_id}); return $res->add_error("Invalid key") unless $key; $key->update({status => $KEY_ACTION_MAP{$action}}); return $res->add_msg("Key status changed."); } - if (my $email_idx = $p->{email_idx}) { + if (my $email_id = $p->{email_id}) { my $user = $req->user or return $res->add_error("You must be logged in"); - my $email = $schema->resultset('Email')->find({email_idx => $email_idx, user_idx => $user->user_idx}); + my $email = $schema->resultset('Email')->find({email_id => $email_id, user_id => $user->user_id}); return $res->add_error("Invalid Email") unless $email; if ($action eq 'make primary') { - my $pri = $schema->resultset('PrimaryEmail')->update_or_create({user_idx => $user->user_idx, email_idx => $email_idx}); + my $pri = $schema->resultset('PrimaryEmail')->update_or_create({user_id => $user->user_id, email_id => $email_id}); return $res->add_error("Could not make email primary: $@") unless $pri; return $res->add_msg("Set primary email address."); } @@ -201,8 +201,8 @@ sub send_verification_code { my $our_email = $schema->config('email') or die "System email address is not set"; - my $code = $schema->resultset('EmailVerificationCode')->find_or_create({email_idx => $email->email_idx}); - my $text = $code->evcode_id; + my $code = $schema->resultset('EmailVerificationCode')->find_or_create({email_id => $email->email_id}); + my $text = $code->evcode; my $msg = Email::Simple->create( header => [ diff --git a/lib/App/Yath/Server/Controller/View.pm b/lib/App/Yath/Server/Controller/View.pm index 9afcf1f4f..cb542b8d0 100644 --- a/lib/App/Yath/Server/Controller/View.pm +++ b/lib/App/Yath/Server/Controller/View.pm @@ -8,7 +8,7 @@ use Text::Xslate(qw/mark_raw/); use App::Yath::Util qw/share_dir/; use App::Yath::Schema::Util qw/find_job/; use App::Yath::Server::Response qw/resp error/; -use App::Yath::Schema::UUID qw/uuid_inflate/; + use parent 'App::Yath::Server::Controller'; use Test2::Harness::Util::HashBase qw/-title/; @@ -38,27 +38,25 @@ sub handle { my @url; if ($project_id) { my $p_rs = $schema->resultset('Project'); - $project = eval { $p_rs->find({name => $project_id}) } // eval { $p_rs->find({project_idx => $project_id}) } // die error(404 => 'Invalid Project'); + $project = eval { $p_rs->find({name => $project_id}) } // eval { $p_rs->find({project_id => $project_id}) } // die error(404 => 'Invalid Project'); $self->{+TITLE} .= ">" . $project->name; @url = ('project', $project_id); } elsif ($user_id) { my $u_rs = $schema->resultset('User'); - $user = eval { $u_rs->find({username => $user_id}) } // eval { $u_rs->find({user_idx => $user_id}) } // die error(404 => 'Invalid User'); + $user = eval { $u_rs->find({username => $user_id}) } // eval { $u_rs->find({user_id => $user_id}) } // die error(404 => 'Invalid User'); $self->{+TITLE} .= ">" . $user->username; @url = ('user', $user_id); } elsif($run_id) { - $run_id = uuid_inflate($run_id) or die error(404 => 'Invalid Run'); push @url => $run_id; - $run = eval { $schema->resultset('Run')->find({run_id => $run_id}) } or die error(404 => 'Invalid Run'); + $run = eval { $schema->resultset('Run')->find_by_id_or_uuid($run_id) } or die error(404 => 'Invalid Run'); $self->{+TITLE} .= ">" . $run->project->name; my $job_try = $route->{try}; if (my $job_uuid = $route->{job}) { - $job_uuid = uuid_inflate($job_uuid) or die error(404 => 'Invalid Job'); my $job = find_job($schema, $job_uuid, $job_try) or die error(404 => 'Invalid Job'); $self->{+TITLE} .= ">" . ($job->shortest_file // 'HARNESS'); push @url => $job_uuid; diff --git a/lib/App/Yath/Server/Plack.pm b/lib/App/Yath/Server/Plack.pm index 8eeaffd2b..91626a485 100644 --- a/lib/App/Yath/Server/Plack.pm +++ b/lib/App/Yath/Server/Plack.pm @@ -15,8 +15,6 @@ use Plack::Builder; use Plack::App::Directory; use Plack::App::File; -use App::Yath::Schema::UUID qw/gen_uuid uuid_inflate/; - use App::Yath::Server::Request; use App::Yath::Server::Controller::Upload; use App::Yath::Server::Controller::Recent; @@ -24,7 +22,7 @@ use App::Yath::Server::Controller::User; use App::Yath::Server::Controller::Run; use App::Yath::Server::Controller::RunField; use App::Yath::Server::Controller::Job; -use App::Yath::Server::Controller::JobField; +use App::Yath::Server::Controller::JobTryField; use App::Yath::Server::Controller::Download; use App::Yath::Server::Controller::Sweeper; use App::Yath::Server::Controller::Project; @@ -120,8 +118,8 @@ sub router { $router->connect('/run/field/:id' => {controller => 'App::Yath::Server::Controller::RunField'}); $router->connect('/run/field/:id/delete' => {controller => 'App::Yath::Server::Controller::RunField', action => 'delete'}); - $router->connect('/job/field/:id' => {controller => 'App::Yath::Server::Controller::JobField'}); - $router->connect('/job/field/:id/delete' => {controller => 'App::Yath::Server::Controller::JobField', action => 'delete'}); + $router->connect('/job/field/:id' => {controller => 'App::Yath::Server::Controller::JobTryField'}); + $router->connect('/job/field/:id/delete' => {controller => 'App::Yath::Server::Controller::JobTryField', action => 'delete'}); $router->connect('/job/:job' => {controller => 'App::Yath::Server::Controller::Job'}); $router->connect('/job/:job/:try' => {controller => 'App::Yath::Server::Controller::Job'}); @@ -137,20 +135,20 @@ sub router { $router->connect('/coverage/:source/:user' => {controller => 'App::Yath::Server::Controller::Coverage'}); $router->connect('/coverage/:source/delete' => {controller => 'App::Yath::Server::Controller::Coverage', delete => 1}); - $router->connect('/failed/:source' => {controller => 'App::Yath::Server::Controller::Files', failed => 1}); - $router->connect('/failed/:source/json' => {controller => 'App::Yath::Server::Controller::Files', failed => 1, json => 1}); - $router->connect('/failed/:project/:idx' => {controller => 'App::Yath::Server::Controller::Files', failed => 1, json => 1}); - $router->connect('/failed/:project/:username/:idx' => {controller => 'App::Yath::Server::Controller::Files', failed => 1, json => 1}); + $router->connect('/failed/:source' => {controller => 'App::Yath::Server::Controller::Files', failed => 1}); + $router->connect('/failed/:source/json' => {controller => 'App::Yath::Server::Controller::Files', failed => 1, json => 1}); + $router->connect('/failed/:project/:id' => {controller => 'App::Yath::Server::Controller::Files', failed => 1, json => 1}); + $router->connect('/failed/:project/:username/:id' => {controller => 'App::Yath::Server::Controller::Files', failed => 1, json => 1}); - $router->connect('/files/:source' => {controller => 'App::Yath::Server::Controller::Files', failed => 0}); - $router->connect('/files/:source/json' => {controller => 'App::Yath::Server::Controller::Files', failed => 0, json => 1}); - $router->connect('/files/:project/:idx' => {controller => 'App::Yath::Server::Controller::Files', failed => 0, json => 1}); - $router->connect('/files/:project/:username/:idx' => {controller => 'App::Yath::Server::Controller::Files', failed => 0, json => 1}); + $router->connect('/files/:source' => {controller => 'App::Yath::Server::Controller::Files', failed => 0}); + $router->connect('/files/:source/json' => {controller => 'App::Yath::Server::Controller::Files', failed => 0, json => 1}); + $router->connect('/files/:project/:id' => {controller => 'App::Yath::Server::Controller::Files', failed => 0, json => 1}); + $router->connect('/files/:project/:username/:id' => {controller => 'App::Yath::Server::Controller::Files', failed => 0, json => 1}); $router->connect('/rerun/:run_id' => {controller => 'App::Yath::Server::Controller::ReRun'}); $router->connect('/rerun/:project/:username' => {controller => 'App::Yath::Server::Controller::ReRun'}); - $router->connect('/binary/:binary_idx' => {controller => 'App::Yath::Server::Controller::Binary'}); + $router->connect('/binary/:binary_id' => {controller => 'App::Yath::Server::Controller::Binary'}); $router->connect('/download/:id' => {controller => 'App::Yath::Server::Controller::Download'}); @@ -213,7 +211,7 @@ sub handle_request { $user = $self->schema->resultset('User')->find({username => 'root'}); } elsif ($session_host) { - $user = $session_host->user if $session_host->user_idx; + $user = $session_host->user if $session_host->user_id; } $req->set_user($user) if $user; @@ -293,7 +291,7 @@ sub handle_request { } } - $res->cookies->{id} = {value => $session->session_id, httponly => 1, expires => '+1M'} + $res->cookies->{uuid} = {value => $session->session_uuid, httponly => 1, expires => '+1M'} if $session; return $res->finalize; diff --git a/lib/App/Yath/Server/Request.pm b/lib/App/Yath/Server/Request.pm index dac5694ba..f81361077 100644 --- a/lib/App/Yath/Server/Request.pm +++ b/lib/App/Yath/Server/Request.pm @@ -6,7 +6,7 @@ our $VERSION = '2.000000'; use Carp qw/croak/; -use App::Yath::Schema::UUID qw/gen_uuid uuid_inflate/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use parent 'Plack::Request'; use Test2::Harness::Util::HashBase qw{ @@ -36,13 +36,13 @@ sub session { my $session; my $cookies = $self->cookies; - if (my $id = uuid_inflate($cookies->{id})) { - $session = $schema->resultset('Session')->find({session_id => $id}); + if (my $uuid = $cookies->{uuid}) { + $session = $schema->resultset('Session')->find({session_uuid => $uuid}); $session = undef unless $session && $session->active; } $session ||= $schema->resultset('Session')->create( - {session_id => gen_uuid}, + {session_uuid => gen_uuid}, ); return $self->{+SESSION} = $session; diff --git a/lib/App/Yath/Server/Tester.pm b/lib/App/Yath/Server/Tester.pm index dd4e14d45..7a4ef4c9a 100644 --- a/lib/App/Yath/Server/Tester.pm +++ b/lib/App/Yath/Server/Tester.pm @@ -18,7 +18,7 @@ use Carp qw/croak/; use Time::HiRes qw/sleep/; use Test2::Util qw/pkg_to_file/; use App::Yath::Server::Util qw/dbd_driver qdb_driver share_dir share_file/; -use App::Yath::Schema::UUID qw/gen_uuid/; +use Test2::Harness::Util::UUID qw/gen_uuid/; use Scope::Guard qw/guard/; use File::Temp qw/tempfile/; diff --git a/lib/Test2/Harness/Resource.pm b/lib/Test2/Harness/Resource.pm index bbcaaa4b4..a2aacf27f 100644 --- a/lib/Test2/Harness/Resource.pm +++ b/lib/Test2/Harness/Resource.pm @@ -8,6 +8,7 @@ use Carp qw/croak/; use Term::Table; use Time::HiRes qw/time/; +use Sys::Hostname qw/hostname/; use Test2::Harness::Util qw/parse_exit/; use Test2::Harness::IPC::Util qw/start_collected_process ipc_connect set_procname/; @@ -18,12 +19,12 @@ use Test2::Harness::Util::HashBase qw{ host } +sub host { $_[0]->{+HOST} //= hostname() } + sub DESTROY { my $self = shift; $self->cleanup(); @@ -144,6 +148,7 @@ sub send_data_event { resource_state => { module => ref($self) || $self, data => $data, + host => $self->{+HOST}, }, }, }); diff --git a/lib/Test2/Harness/Run.pm b/lib/Test2/Harness/Run.pm index 66d51d11b..2d410737e 100644 --- a/lib/Test2/Harness/Run.pm +++ b/lib/Test2/Harness/Run.pm @@ -122,9 +122,10 @@ sub send_initial_events { facet_data => { harness_job_queued => { - file => $job->test_file->file, - job_id => $job->job_id, - stamp => $stamp, + file => $job->test_file->file, + rel_file => $job->test_file->relative, + job_id => $job->job_id, + stamp => $stamp, } }, ); diff --git a/lib/Test2/Harness/Util.pm b/lib/Test2/Harness/Util.pm index e90f78bdd..d525d2c50 100644 --- a/lib/Test2/Harness/Util.pm +++ b/lib/Test2/Harness/Util.pm @@ -41,8 +41,6 @@ our @EXPORT_OK = ( render_status_data - looks_like_uuid - clean_path find_in_updir }, @@ -75,15 +73,6 @@ sub find_in_updir { return; } -sub looks_like_uuid { - my ($in) = @_; - - return undef unless defined $in; - return undef unless length($in) == 36; - return undef unless $in =~ m/^[0-9A-F\-]+$/i; - return $in; -} - sub is_same_file { my ($file1, $file2) = @_; diff --git a/lib/Test2/Harness/Util/UUID.pm b/lib/Test2/Harness/Util/UUID.pm deleted file mode 100644 index f7a81f096..000000000 --- a/lib/Test2/Harness/Util/UUID.pm +++ /dev/null @@ -1,89 +0,0 @@ -package Test2::Harness::Util::UUID; -use strict; -use warnings; - -our $VERSION = '2.000000'; - -use Data::UUID; -use Importer 'Importer' => 'import'; - -our @EXPORT = qw/gen_uuid/; -our @EXPORT_OK = qw/UG gen_uuid/; - -my ($UG, $UG_PID); - -sub UG { - return $UG if $UG && $UG_PID && $UG_PID == $$; - - $UG_PID = $$; - return $UG = Data::UUID->new; -} - -# Initialize it here in this PID to start -UG(); - -sub gen_uuid { UG()->create_str() } - -1; - -__END__ - -=pod - -=encoding UTF-8 - -=head1 NAME - -Test2::Harness::Util::UUID - Utils for generating UUIDs. - -=head1 DESCRIPTION - -This module provides a consistent UUID source for all of Test2::Harness. - -=head1 SYNOPSIS - - use Test2::Harness::Util::UUID qw/gen_uuid/; - - my $uuid = gen_uuid; - -=head1 EXPORTS - -=over 4 - -=item $uuid = gen_uuid() - -Generate a UUID. - -=back - -=head1 SOURCE - -The source code repository for Test2-Harness can be found at -L. - -=head1 MAINTAINERS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 AUTHORS - -=over 4 - -=item Chad Granum Eexodist@cpan.orgE - -=back - -=head1 COPYRIGHT - -Copyright Chad Granum Eexodist7@gmail.comE. - -This program is free software; you can redistribute it and/or -modify it under the same terms as Perl itself. - -See L - -=cut diff --git a/share/js/eventtable.js b/share/js/eventtable.js index 457f00560..97469f13c 100644 --- a/share/js/eventtable.js +++ b/share/js/eventtable.js @@ -78,7 +78,7 @@ t2hui.eventtable.expand_lines = function(item) { 'item': item, 'set_ord': count++, 'set_total': item.lines.length, - 'id': item.event_id, + 'id': item.event_uuid, }); tools = false; }); @@ -94,10 +94,10 @@ t2hui.eventtable.message_builder = function(item, dest, data, table) { if (item.item.is_parent == false) { return } - var events_uri = base_uri + 'event/' + item.item.event_id + '/events'; + var events_uri = base_uri + 'event/' + item.item.event_uuid + '/events'; var jumpto = window.location.hash.substr(1); - var highlight = item.item.event_id === jumpto ? true : false; + var highlight = item.item.event_uuid === jumpto ? true : false; var expand = $('
+
'); @@ -126,17 +126,17 @@ t2hui.eventtable.message_builder = function(item, dest, data, table) { if (highlight) { row.addClass('highlight'); - $('[data-parent-id="' + item.item.event_id + '"]').addClass('highlight'); + $('[data-parent-id="' + item.item.event_uuid + '"]').addClass('highlight'); } else { row.removeClass('highlight'); - $('[data-parent-id="' + item.item.event_id + '"]').removeClass('highlight'); + $('[data-parent-id="' + item.item.event_uuid + '"]').removeClass('highlight'); } }); } }, function(e) { - var params = {"data": {"parent-id": item.item.event_id}}; + var params = {"data": {"parent-id": item.item.event_uuid}}; if (highlight) { params.class = "highlight"; } @@ -256,7 +256,7 @@ t2hui.eventtable.tool_builder = function(item, tools, data) { $('#modal_body').text("loading..."); $('#free_modal').slideDown(); - var uri = base_uri + 'event/' + item.item.event_id; + var uri = base_uri + 'event/' + item.item.event_uuid; $.ajax(uri, { 'data': { 'content-type': 'application/json' }, @@ -277,7 +277,7 @@ t2hui.eventtable.tool_builder = function(item, tools, data) { $('#modal_body').text("loading..."); $('#free_modal').slideDown(); - var uri = base_uri + 'event/' + item.item.event_id; + var uri = base_uri + 'event/' + item.item.event_uuid; $.ajax(uri, { 'data': { 'content-type': 'application/json' }, @@ -304,7 +304,7 @@ t2hui.eventtable.modify_row = function(row, item, table, controls) { row.addClass('facet_' + item.facet); row.addClass('tag_' + ctag); - row.attr('data-event-id', item.item.event_id); + row.attr('data-event-id', item.item.event_uuid); if (!controls.filters.seen[tag]) { controls.filters.state[tag] = !controls.filters.hide[tag]; diff --git a/share/js/fieldtable.js b/share/js/fieldtable.js index 3153fc88f..7adb7b7d3 100644 --- a/share/js/fieldtable.js +++ b/share/js/fieldtable.js @@ -169,7 +169,6 @@ function FieldTable(spec) { var attr = me.spec.dynamic_field_attribute; if (attr && item[attr]) { - console.log(attr, "xxx", item[attr]); item[attr].forEach(function(field) { var col = me.render_dynamic_col(field, field.name, item); if (me.hidden_columns[field.name]) { @@ -222,7 +221,6 @@ function FieldTable(spec) { toolrow.prepend(td); } - console.log(field); if (field.has_data) { var viewer = $('
'); var td = $(''); diff --git a/share/js/interactions.js b/share/js/interactions.js index 43317df4e..bcdf5ae52 100644 --- a/share/js/interactions.js +++ b/share/js/interactions.js @@ -4,7 +4,7 @@ function build_interactions(item, state) { if (item.type === 'run') { var run_table = t2hui.runtable.build_table(); content.append(run_table.render()); - run_table.render_item(item.data, item.data.run_id); + run_table.render_item(item.data, item.data.run_uuid); return; } @@ -24,7 +24,7 @@ function build_interactions(item, state) { content.empty(); state = {}; - var uri = base_uri + 'interactions/data/' + event_id + '/' + val; + var uri = base_uri + 'interactions/data/' + event_uuid + '/' + val; t2hui.fetch(uri, {}, function(item) { build_interactions(item, state) }); return true; }); @@ -36,16 +36,16 @@ function build_interactions(item, state) { else if (item.type === 'job') { state.event_table = null; - content.append('
'); + content.append('
'); var job_table = t2hui.jobtable.build_table(null); if (state.list) { - state.list.append('
  • ' + item.data.file + '
  • '); + state.list.append('
  • ' + item.data.file + '
  • '); } content.append(job_table.render()); - job_table.render_item(item.data, item.data.job_key); + job_table.render_item(item.data, item.data.job_uuid); return; } @@ -61,7 +61,7 @@ function build_interactions(item, state) { state.event_table = event_table; } - state.event_table.render_item(item.data, item.data.event_id); + state.event_table.render_item(item.data, item.data.event_uuid); } } diff --git a/share/js/jobtable.js b/share/js/jobtable.js index 2e8e24875..efdac754e 100644 --- a/share/js/jobtable.js +++ b/share/js/jobtable.js @@ -85,7 +85,7 @@ t2hui.jobtable.tool_builder = function(item, tools, data) { $('#modal_body').text("loading..."); $('#free_modal').slideDown(); - var uri = base_uri + 'job/' + item.job_key; + var uri = base_uri + 'job/' + item.job_uuid; $.ajax(uri, { 'data': { 'content-type': 'application/json' }, @@ -96,7 +96,7 @@ t2hui.jobtable.tool_builder = function(item, tools, data) { }); }); - var link = base_uri + 'view/' + item.run_id + '/' + item.job_key; + var link = base_uri + 'view/' + item.run_uuid + '/' + item.job_uuid; var go = $(''); tools.append(go); }; diff --git a/share/js/lookup.js b/share/js/lookup.js index 7c1308dab..750a9f1d3 100644 --- a/share/js/lookup.js +++ b/share/js/lookup.js @@ -32,7 +32,7 @@ $(function() { state.event_table = event_table; } - state.event_table.render_item(item.data, item.data.event_id); + state.event_table.render_item(item.data, item.data.event_uuid); } else if (item.type === 'job') { if (!state.job_table) { @@ -41,7 +41,7 @@ $(function() { jobs.append(job_table.render()); state.job_table = job_table; } - state.job_table.render_item(item.data, item.data.job_key); + state.job_table.render_item(item.data, item.data.job_uuid); } else if (item.type === 'run') { if (!state.run_table) { @@ -50,7 +50,7 @@ $(function() { runs.append(run_table.render()); state.run_table = run_table; } - state.run_table.render_item(item.data, item.data.run_id); + state.run_table.render_item(item.data, item.data.run_uuid); } } ); diff --git a/share/js/project.js b/share/js/project.js index 51843a681..b5d99a2c6 100644 --- a/share/js/project.js +++ b/share/js/project.js @@ -192,7 +192,7 @@ t2hui.project_stats.reload = function(all) { var run_table = t2hui.runtable.build_table(); div.html(run_table.render()); item.runs.forEach(function(run) { - run_table.render_item(run, run.run_id); + run_table.render_item(run, run.run_uuid); }) } diff --git a/share/js/resources.js b/share/js/resources.js index d90bab6df..52d92209e 100644 --- a/share/js/resources.js +++ b/share/js/resources.js @@ -123,8 +123,8 @@ $(function() { if (!item) { return } if (item.complete) { complete = true } - if (item.run_id) { - var stream_url = base_uri + 'stream/run/' + item.run_id; + if (item.run_uuid) { + var stream_url = base_uri + 'stream/run/' + item.run_uuid; var run_table = t2hui.runtable.build_table(); runs.append(run_table.render()); @@ -133,7 +133,7 @@ $(function() { {}, function(item) { if (item.type === 'run') { - run_table.render_item(item.data, item.data.run_id); + run_table.render_item(item.data, item.data.run_uuid); } } ); diff --git a/share/js/runtable.js b/share/js/runtable.js index cd0767781..8af416df8 100644 --- a/share/js/runtable.js +++ b/share/js/runtable.js @@ -86,10 +86,18 @@ t2hui.runtable.build_user = function(item, col) { }; t2hui.runtable.build_concurrency = function(item, col) { - var val = item.concurrency; - if (val === null) { return }; - if (val === undefined) { return }; - col.text("-j" + val); + var valj = item.concurrency_j; + var valx = item.concurrency_x; + if (valj === null) { return }; + if (valj === undefined) { return }; + + var val = "-j" + valj; + + if (valx) { + val = val + ":" + valx; + } + + col.text(val); }; t2hui.runtable.build_pass = function(item, col) { @@ -104,7 +112,7 @@ t2hui.runtable.build_fail = function(item, col) { if (val === null) { return }; if (val === undefined) { return }; if (val == 0) { col.append($('
    ' + val + '
    ')) } - else { col.append($('' + val + '')) } + else { col.append($('' + val + '')) } }; t2hui.runtable.build_retry = function(item, col) { @@ -116,8 +124,8 @@ t2hui.runtable.build_retry = function(item, col) { }; t2hui.runtable.tool_builder = function(item, tools, data) { - var link = base_uri + 'view/' + item.run_id; - var downlink = base_uri + 'download/' + item.run_id; + var link = base_uri + 'view/' + item.run_uuid; + var downlink = base_uri + 'download/' + item.run_uuid; var params = $('
    '); tools.append(params); @@ -125,7 +133,7 @@ t2hui.runtable.tool_builder = function(item, tools, data) { $('#modal_body').html("Loading..."); $('#free_modal').slideDown(); - var url = base_uri + 'run/' + item.run_id + '/parameters'; + var url = base_uri + 'run/' + item.run_uuid + '/parameters'; $.ajax(url, { 'data': { 'content-type': 'application/json' }, 'error': function(a, b, c) { alert("Failed to load run paramaters") }, @@ -179,7 +187,7 @@ t2hui.runtable.tool_builder = function(item, tools, data) { var ok = confirm("Are you sure you wish to cancel this run? This action cannot be undone!\nNote: This only changes the runs status, it will not stop a running test. This is used to 'fix' an aborted run that is still set to 'running'"); if (!ok) { return; } - var url = base_uri + 'run/' + item.run_id + '/cancel'; + var url = base_uri + 'run/' + item.run_uuid + '/cancel'; $.ajax(url, { 'data': { 'content-type': 'application/json' }, 'error': function(a, b, c) { alert("Failed to cancel run") }, @@ -199,18 +207,18 @@ t2hui.runtable.tool_builder = function(item, tools, data) { var ok = confirm("Are you sure you wish to delete this run? This action cannot be undone!"); if (!ok) { return; } - var url = base_uri + 'run/' + item.run_id + '/delete'; + var url = base_uri + 'run/' + item.run_uuid + '/delete'; $.ajax(url, { 'data': { 'content-type': 'application/json' }, 'error': function(a, b, c) { alert("Could not delete run") }, 'success': function() { - $('tr#' + item.run_id).remove(); + $('tr#' + item.run_uuid).remove(); }, }); }); } - var resources = $(''); + var resources = $(''); tools.append(resources); var cimg = $(''); @@ -220,7 +228,7 @@ t2hui.runtable.tool_builder = function(item, tools, data) { dcover.append(dcimg); if (item.has_coverage && item.status === 'complete') { - var curl = base_uri + 'coverage/' + item.run_id; + var curl = base_uri + 'coverage/' + item.run_uuid; var clink = $(''); clink.append(cimg); cover.append(clink); @@ -268,7 +276,7 @@ t2hui.runtable.tool_builder = function(item, tools, data) { tools.prepend(pintool); pintool.click(function() { - var url = base_uri + 'run/' + item.run_id + '/pin'; + var url = base_uri + 'run/' + item.run_uuid + '/pin'; $.ajax(url, { 'data': { 'content-type': 'application/json' }, 'error': function(a, b, c) { alert("Failed to pin run") }, diff --git a/share/js/view.js b/share/js/view.js index 11dc49b48..2b19ffeb5 100644 --- a/share/js/view.js +++ b/share/js/view.js @@ -18,8 +18,8 @@ $(function() { }}, function(item) { if (item.type === 'event') { - item.data.run_id = state.run.run_id; - item.data.job_key = state.job.job_key; + item.data.run_uuid = state.run.run_uuid; + item.data.job_uuid = state.job.job_uuid; state.event = item.data; if (!state.event_table) { var event_controls = t2hui.eventtable.build_controls(state.run, state.job); @@ -36,17 +36,17 @@ $(function() { } } - state.event_table.render_item(item.data, item.data.event_id); + state.event_table.render_item(item.data, item.data.event_uuid); } else if (item.type === 'job') { - item.data.run_id = state.run.run_id; + item.data.run_uuid = state.run.run_uuid; state.job = item.data; if (!state.job_table) { var job_table = t2hui.jobtable.build_table(state.run); jobs.append(job_table.render()); state.job_table = job_table; } - state.job_table.render_item(item.data, item.data.job_key); + state.job_table.render_item(item.data, item.data.job_uuid); } else if (item.type === 'run') { state.run = item.data; @@ -55,7 +55,7 @@ $(function() { runs.append(run_table.render()); state.run_table = run_table; } - state.run_table.render_item(item.data, item.data.run_id); + state.run_table.render_item(item.data, item.data.run_uuid); } } ); diff --git a/share/schema/PostgreSQL.sql b/share/schema/PostgreSQL.sql index ea32ac012..2e872ae02 100644 --- a/share/schema/PostgreSQL.sql +++ b/share/schema/PostgreSQL.sql @@ -1,7 +1,7 @@ CREATE EXTENSION "citext"; CREATE EXTENSION "uuid-ossp"; -CREATE TYPE queue_status AS ENUM( +CREATE TYPE queue_stat AS ENUM( 'pending', 'running', 'complete', @@ -23,59 +23,23 @@ CREATE TYPE run_modes AS ENUM( 'complete' ); -CREATE TYPE run_buffering AS ENUM( - 'none', - 'diag', - 'job', - 'run' -); - CREATE TYPE user_type AS ENUM( 'admin', -- Can add users and set permissions 'user' -- Can manage reports for their projects ); -CREATE TYPE io_stream AS ENUM( - 'STDOUT', - 'STDERR' -); - -CREATE TYPE tags AS ENUM( - 'other', -- Catch all for any not in this enum - 'ABOUT', - 'ARRAY', - 'BRIEF', - 'CONTROL', - 'CRITICAL', - 'DEBUG', - 'DIAG', - 'ENCODING', - 'ERROR', - 'FACETS', - 'FAIL', - 'FAILED', - 'FATAL', - 'HALT', - 'HARNESS', - 'KILL', - 'NO PLAN', - 'PASS', - 'PASSED', - 'PLAN', - 'REASON', - 'SHOW', - 'SKIP ALL', - 'SKIPPED', - 'STDERR', - 'TAGS', - 'TIMEOUT', - 'VERSION', - 'WARN', - 'WARNING' +CREATE TABLE versions( + version NUMERIC(10,6) NOT NULL, + version_id SERIAL PRIMARY KEY, + updated TIMESTAMP NOT NULL DEFAULT now(), + + UNIQUE(version) ); +INSERT INTO versions(version) VALUES('2.000000'); + CREATE TABLE config( - config_id BIGSERIAL PRIMARY KEY, + config_id SERIAL PRIMARY KEY, setting VARCHAR(128) NOT NULL, value VARCHAR(256) NOT NULL, @@ -84,11 +48,11 @@ CREATE TABLE config( CREATE TABLE users ( user_id BIGSERIAL NOT NULL PRIMARY KEY, - username CITEXT NOT NULL, pw_hash VARCHAR(31) DEFAULT NULL, pw_salt VARCHAR(22) DEFAULT NULL, - realname TEXT DEFAULT NULL, role user_type NOT NULL DEFAULT 'user', + username CITEXT NOT NULL, + realname TEXT DEFAULT NULL, UNIQUE(username) ); @@ -96,9 +60,9 @@ CREATE TABLE users ( CREATE TABLE email ( email_id BIGSERIAL NOT NULL PRIMARY KEY, user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE, + verified BOOL NOT NULL DEFAULT FALSE, local CITEXT NOT NULL, domain CITEXT NOT NULL, - verified BOOL NOT NULL DEFAULT FALSE, UNIQUE(local, domain) ); @@ -119,13 +83,13 @@ CREATE TABLE hosts ( ); CREATE TABLE email_verification_codes ( - email_id BIGINT NOT NULL REFERENCES email(email_id) ON DELETE CASCADE PRIMARY KEY, - evcode UUID NOT NULL + evcode UUID NOT NULL, + email_id BIGINT NOT NULL REFERENCES email(email_id) ON DELETE CASCADE PRIMARY KEY ); CREATE TABLE sessions ( - session_id BIGSERIAL NOT NULL PRIMARY KEY, session_uuid UUID NOT NULL, + session_id BIGSERIAL NOT NULL PRIMARY KEY, active BOOL DEFAULT TRUE, UNIQUE(session_uuid) @@ -147,11 +111,11 @@ CREATE TABLE session_hosts ( CREATE INDEX IF NOT EXISTS session_hosts_session ON session_hosts(session_id); CREATE TABLE api_keys ( + value UUID NOT NULL, api_key_id BIGSERIAL NOT NULL PRIMARY KEY, user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE, - name VARCHAR(128) NOT NULL, - value VARCHAR(36) NOT NULL, status api_key_status NOT NULL DEFAULT 'active', + name VARCHAR(128) NOT NULL, UNIQUE(value) ); @@ -166,8 +130,8 @@ CREATE TABLE log_files ( CREATE TABLE projects ( project_id BIGSERIAL NOT NULL PRIMARY KEY, - name CITEXT NOT NULL, owner BIGINT DEFAULT NULL REFERENCES users(user_id) ON DELETE SET NULL, + name CITEXT NOT NULL, UNIQUE(name) ); @@ -181,44 +145,52 @@ CREATE TABLE permissions ( UNIQUE(project_id, user_id) ); -CREATE TABLE runs ( - run_id BIGSERIAL NOT NULL PRIMARY KEY, - user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE, - project_id BIGINT NOT NULL REFERENCES projects(project_id) ON DELETE CASCADE, - log_file_id BIGINT DEFAULT NULL REFERENCES log_files(log_file_id) ON DELETE SET NULL, - - run_uuid UUID NOT NULL, - - status queue_status NOT NULL DEFAULT 'pending', - - worker_id TEXT DEFAULT NULL, - error TEXT DEFAULT NULL, - - pinned BOOL NOT NULL DEFAULT FALSE, +CREATE TABLE syncs ( + sync_id SERIAL NOT NULL PRIMARY KEY, + last_run_id BIGINT NOT NULL, + last_project_id BIGINT NOT NULL, + last_user_id BIGINT NOT NULL, + source VARCHAR(64) NOT NULL, - -- FIXME - has_coverage BOOL NOT NULL DEFAULT FALSE, - has_resources BOOL NOT NULL DEFAULT FALSE, + UNIQUE(source) +); - -- FIXME: Do we need this? - duration TEXT DEFAULT NULL, +CREATE TABLE runs ( + run_uuid UUID NOT NULL, - -- User Input - added TIMESTAMP NOT NULL DEFAULT now(), - mode run_modes NOT NULL DEFAULT 'qvfd', - buffer run_buffering NOT NULL DEFAULT 'job', + run_id BIGSERIAL NOT NULL PRIMARY KEY, + user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE, + project_id BIGINT NOT NULL REFERENCES projects(project_id) ON DELETE CASCADE, + log_file_id BIGINT DEFAULT NULL REFERENCES log_files(log_file_id) ON DELETE SET NULL, - -- From Log + sync_id INTEGER DEFAULT NULL REFERENCES syncs(sync_id) ON DELETE SET NULL, passed INTEGER DEFAULT NULL, failed INTEGER DEFAULT NULL, + to_retry INTEGER DEFAULT NULL, retried INTEGER DEFAULT NULL, - concurrency INTEGER DEFAULT NULL, + concurrency_j INTEGER DEFAULT NULL, + concurrency_x INTEGER DEFAULT NULL, + added TIMESTAMP NOT NULL DEFAULT now(), + + status queue_stat NOT NULL DEFAULT 'pending', + mode run_modes NOT NULL DEFAULT 'qvfd', + + canon BOOL NOT NULL, -- Should be true if sync_id was never set + pinned BOOL NOT NULL DEFAULT FALSE, + has_coverage BOOL DEFAULT NULL, + has_resources BOOL DEFAULT NULL, + + parameters JSONB DEFAULT NULL, + worker_id TEXT DEFAULT NULL, + error TEXT DEFAULT NULL, + duration NUMERIC(14,4) DEFAULT NULL, UNIQUE(run_uuid) ); CREATE INDEX IF NOT EXISTS run_projects ON runs(project_id); CREATE INDEX IF NOT EXISTS run_status ON runs(status); CREATE INDEX IF NOT EXISTS run_user ON runs(user_id); +CREATE INDEX IF NOT EXISTS run_canon ON runs(run_id, canon); CREATE TABLE sweeps ( sweep_id BIGSERIAL NOT NULL PRIMARY KEY, @@ -229,26 +201,6 @@ CREATE TABLE sweeps ( ); CREATE INDEX IF NOT EXISTS sweep_runs ON sweeps(run_id); -CREATE TABLE run_fields ( - run_field_id BIGSERIAL NOT NULL PRIMARY KEY, - run_id BIGINT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE, - run_field_uuid UUID NOT NULL, - name VARCHAR(64) NOT NULL, - data JSONB DEFAULT NULL, - details TEXT DEFAULT NULL, - raw TEXT DEFAULT NULL, - link TEXT DEFAULT NULL, - - UNIQUE(run_field_uuid) -); -CREATE INDEX IF NOT EXISTS run_fields_run_id ON run_fields(run_id); -CREATE INDEX IF NOT EXISTS run_fields_name ON run_fields(name); - -CREATE TABLE run_parameters ( - run_id BIGINT NOT NULL PRIMARY KEY REFERENCES runs(run_id) ON DELETE CASCADE, - parameters JSONB DEFAULT NULL -); - CREATE TABLE test_files ( test_file_id BIGSERIAL NOT NULL PRIMARY KEY, filename VARCHAR(255) NOT NULL, @@ -256,143 +208,132 @@ CREATE TABLE test_files ( UNIQUE(filename) ); -CREATE TABLE jobs ( - job_id BIGSERIAL NOT NULL PRIMARY KEY, - run_id BIGINT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE, - test_file_id BIGINT DEFAULT NULL REFERENCES test_files(test_file_id) ON DELETE CASCADE, +INSERT INTO test_files(filename) VALUES('HARNESS INTERNAL LOG'); - job_uuid UUID NOT NULL, - job_try INT NOT NULL, - status queue_status NOT NULL DEFAULT 'pending', - - is_harness_out BOOL NOT NULL DEFAULT FALSE, - - -- Summaries - fail BOOL DEFAULT NULL, - retry BOOL DEFAULT NULL, - name TEXT DEFAULT NULL, - exit_code INT DEFAULT NULL, - launch TIMESTAMP DEFAULT NULL, - start TIMESTAMP DEFAULT NULL, - ended TIMESTAMP DEFAULT NULL, +CREATE TABLE jobs ( + job_uuid UUID NOT NULL, - duration DOUBLE PRECISION DEFAULT NULL, + job_id BIGSERIAL NOT NULL PRIMARY KEY, + run_id BIGINT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE, + test_file_id BIGINT NOT NULL REFERENCES test_files(test_file_id) ON DELETE CASCADE, - pass_count BIGINT DEFAULT NULL, - fail_count BIGINT DEFAULT NULL, + is_harness_out BOOL NOT NULL, + failed BOOL NOT NULL, + passed BOOL DEFAULT NULL, - UNIQUE(job_uuid, job_try) + UNIQUE(job_uuid) ); CREATE INDEX IF NOT EXISTS job_runs ON jobs(run_id); -CREATE INDEX IF NOT EXISTS job_fail ON jobs(fail); CREATE INDEX IF NOT EXISTS job_file ON jobs(test_file_id); -CREATE TABLE job_parameters ( - job_id BIGINT NOT NULL PRIMARY KEY REFERENCES jobs(job_id) ON DELETE CASCADE, - parameters JSONB DEFAULT NULL -); +CREATE TABLE job_tries ( + job_try_id BIGSERIAL NOT NULL PRIMARY KEY, + job_id BIGINT NOT NULL REFERENCES jobs(job_id) ON DELETE CASCADE, + pass_count BIGINT DEFAULT NULL, + fail_count BIGINT DEFAULT NULL, -CREATE TABLE job_outputs ( - job_output_id BIGSERIAL NOT NULL PRIMARY KEY, - job_id BIGINT NOT NULL REFERENCES jobs(job_id) ON DELETE CASCADE, - stream io_stream NOT NULL, - output TEXT NOT NULL, + exit_code INTEGER DEFAULT NULL, + launch TIMESTAMP DEFAULT NULL, + start TIMESTAMP DEFAULT NULL, + ended TIMESTAMP DEFAULT NULL, - UNIQUE(job_id, stream) -); + status queue_stat NOT NULL DEFAULT 'pending', -CREATE TABLE job_fields ( - job_field_id BIGSERIAL NOT NULL PRIMARY KEY, - job_id BIGINT NOT NULL REFERENCES jobs(job_id) ON DELETE CASCADE, - job_field_uuid UUID NOT NULL, - name VARCHAR(64) NOT NULL, - data JSONB DEFAULT NULL, - details TEXT DEFAULT NULL, - raw TEXT DEFAULT NULL, - link TEXT DEFAULT NULL, + job_try_ord SMALLINT NOT NULL, - UNIQUE(job_field_uuid) -); -CREATE INDEX IF NOT EXISTS job_fields_job_id ON job_fields(job_id); -CREATE INDEX IF NOT EXISTS job_fields_name ON job_fields(name); + fail BOOL DEFAULT NULL, + retry BOOL DEFAULT NULL, + duration NUMERIC(14,4) DEFAULT NULL, -CREATE TABLE events ( - event_id BIGSERIAL NOT NULL PRIMARY KEY, + parameters JSONB DEFAULT NULL, + stdout TEXT DEFAULT NULL, + stderr TEXT DEFAULT NULL, - job_id BIGINT NOT NULL REFERENCES jobs(job_id) ON DELETE CASCADE, - parent_id BIGINT DEFAULT NULL REFERENCES events(event_id) ON DELETE CASCADE, + UNIQUE(job_try_id, job_try_ord) +); +CREATE INDEX IF NOT EXISTS job_try_fail ON job_tries(fail); +CREATE INDEX IF NOT EXISTS job_try_job_fail ON job_tries(job_id, fail); +CREATE TABLE events ( event_uuid UUID NOT NULL, trace_uuid UUID DEFAULT NULL, + parent_uuid UUID DEFAULT NULL REFERENCES events(event_uuid), + + event_id BIGSERIAL NOT NULL PRIMARY KEY, + job_try_id BIGINT NOT NULL REFERENCES job_tries(job_try_id) ON DELETE CASCADE, + parent_id BIGINT DEFAULT NULL REFERENCES events(event_id) ON DELETE CASCADE, + + event_idx INTEGER NOT NULL, -- Line number from log, or event number from stream + event_sdx INTEGER NOT NULL, -- Event sequence number from the line (IE parent + subtest events) + stamp TIMESTAMP DEFAULT NULL, - stamp TIMESTAMP NOT NULL, - event_ord INTEGER NOT NULL, nested SMALLINT NOT NULL, is_subtest BOOL NOT NULL, is_diag BOOL NOT NULL, is_harness BOOL NOT NULL, is_time BOOL NOT NULL, - is_assert BOOL NOT NULL, causes_fail BOOL NOT NULL, - has_binary BOOL NOT NULL, has_facets BOOL NOT NULL, has_orphan BOOL NOT NULL, - has_resources BOOL NOT NULL, + has_binaries BOOL NOT NULL, - UNIQUE(job_id, event_ord), + facets JSONB DEFAULT NULL, + orphan JSONB DEFAULT NULL, + rendered JSONB DEFAULT NULL, + + UNIQUE(job_try_id, event_idx, event_sdx), UNIQUE(event_uuid) ); -CREATE INDEX IF NOT EXISTS event_job_ts ON events(job_id, stamp); -CREATE INDEX IF NOT EXISTS event_job_st ON events(job_id, is_subtest); CREATE INDEX IF NOT EXISTS event_parent ON events(parent_id); -CREATE INDEX IF NOT EXISTS event_trace ON events(trace_uuid); +CREATE INDEX IF NOT EXISTS event_job_ts ON events(job_try_id, stamp); +CREATE INDEX IF NOT EXISTS event_job_st ON events(job_try_id, is_subtest); -CREATE TABLE renders ( - render_id BIGSERIAL NOT NULL PRIMARY KEY, - job_id BIGINT NOT NULL REFERENCES jobs(job_id) ON DELETE CASCADE, - event_id BIGINT NOT NULL REFERENCES events(event_id) ON DELETE CASCADE, +CREATE TABLE binaries ( + event_uuid UUID NOT NULL, - facet VARCHAR(64) NOT NULL, - tag tags NOT NULL, + binary_id BIGSERIAL NOT NULL PRIMARY KEY, + event_id BIGINT DEFAULT NULL REFERENCES events(event_id) ON DELETE CASCADE, - other_tag VARCHAR(8) DEFAULT NULL, + is_image BOOL NOT NULL DEFAULT FALSE, - message TEXT NOT NULL, - data JSONB DEFAULT NULL + filename VARCHAR(512) NOT NULL, + description TEXT DEFAULT NULL, + data BYTEA NOT NULL ); -CREATE INDEX IF NOT EXISTS render_event on renders(event_id); -CREATE INDEX IF NOT EXISTS render_job on renders(job_id); -CREATE INDEX IF NOT EXISTS render_job_tag on renders(job_id, tag); -CREATE INDEX IF NOT EXISTS render_job_ot_tag on renders(job_id, tag, other_tag); +CREATE INDEX IF NOT EXISTS binaries_event ON binaries(event_id); + +CREATE TABLE run_fields ( + event_uuid UUID NOT NULL, -CREATE TABLE facets ( - event_id BIGINT NOT NULL PRIMARY KEY REFERENCES events(event_id) ON DELETE CASCADE, - data JSONB NOT NULL, - line BIGINT NOT NULL, + run_field_id BIGSERIAL NOT NULL PRIMARY KEY, + run_id BIGINT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE, - UNIQUE(event_id) + name VARCHAR(64) NOT NULL, + data JSONB DEFAULT NULL, + details TEXT DEFAULT NULL, + raw TEXT DEFAULT NULL, + link TEXT DEFAULT NULL ); +CREATE INDEX IF NOT EXISTS run_fields_run_id ON run_fields(run_id); +CREATE INDEX IF NOT EXISTS run_fields_name ON run_fields(name); -CREATE TABLE orphans ( - event_id BIGINT NOT NULL PRIMARY KEY REFERENCES events(event_id) ON DELETE CASCADE, - data JSONB NOT NULL, - line BIGINT NOT NULL, +CREATE TABLE job_try_fields ( + event_uuid UUID NOT NULL, - UNIQUE(event_id) -); + job_field_id BIGSERIAL NOT NULL PRIMARY KEY, + job_try_id BIGINT NOT NULL REFERENCES job_tries(job_try_id) ON DELETE CASCADE, -CREATE TABLE binaries ( - binary_id BIGSERIAL NOT NULL PRIMARY KEY, - event_id BIGINT NOT NULL REFERENCES events(event_id) ON DELETE CASCADE, - filename VARCHAR(512) NOT NULL, - description TEXT DEFAULT NULL, - is_image BOOL NOT NULL DEFAULT FALSE, - data BYTEA NOT NULL + name VARCHAR(64) NOT NULL, + data JSONB DEFAULT NULL, + details TEXT DEFAULT NULL, + raw TEXT DEFAULT NULL, + link TEXT DEFAULT NULL ); -CREATE INDEX IF NOT EXISTS binaries_event ON binaries(event_id); +CREATE INDEX IF NOT EXISTS job_try_fields_job_id ON job_try_fields(job_try_id); +CREATE INDEX IF NOT EXISTS job_try_fields_name ON job_try_fields(name); CREATE TABLE source_files ( source_file_id BIGSERIAL NOT NULL PRIMARY KEY, @@ -409,22 +350,26 @@ CREATE TABLE source_subs ( ); CREATE TABLE resource_types( - resource_type_id BIGSERIAL NOT NULL PRIMARY KEY, - name TEXT NOT NULL, + resource_type_id BIGSERIAL NOT NULL PRIMARY KEY, + name VARCHAR(512) NOT NULL, UNIQUE(name) ); CREATE TABLE resources ( + event_uuid UUID NOT NULL, + resource_id BIGSERIAL NOT NULL PRIMARY KEY, - event_id BIGINT DEFAULT NULL REFERENCES events(event_id) ON DELETE SET NULL, - resource_type_id BIGINT NOT NULL REFERENCES resources(resource_id) ON DELETE CASCADE, - run_id BIGINT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE, + resource_type_id BIGINT NOT NULL REFERENCES resource_types(resource_type_id) ON DELETE CASCADE, + run_id BIGINT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE, + host_id BIGINT NOT NULL REFERENCES hosts(host_id) ON DELETE SET NULL, + + stamp TIMESTAMP NOT NULL, + resource_ord INTEGER NOT NULL, data JSONB NOT NULL, - line BIGINT NOT NULL, - UNIQUE(event_id) + UNIQUE(run_id, resource_ord) ); CREATE INDEX IF NOT EXISTS res_data_runs ON resources(run_id); CREATE INDEX IF NOT EXISTS res_data_res ON resources(resource_type_id); @@ -438,11 +383,11 @@ CREATE TABLE coverage_manager ( ); CREATE TABLE coverage ( + event_uuid UUID NOT NULL, + coverage_id BIGSERIAL NOT NULL PRIMARY KEY, - -- FIXME: Make sure this gets imported - event_id BIGINT DEFAULT NULL REFERENCES events(event_id) ON DELETE SET NULL, - job_id BIGINT DEFAULT NULL REFERENCES jobs(job_id) ON DELETE SET NULL, + job_try_id BIGINT DEFAULT NULL REFERENCES job_tries(job_try_id) ON DELETE SET NULL, coverage_manager_id BIGINT DEFAULT NULL REFERENCES coverage_manager(coverage_manager_id) ON DELETE CASCADE, run_id BIGINT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE, @@ -452,31 +397,31 @@ CREATE TABLE coverage ( metadata JSONB DEFAULT NULL, - UNIQUE(run_id, job_id, test_file_id, source_file_id, source_sub_id) + UNIQUE(run_id, job_try_id, test_file_id, source_file_id, source_sub_id) ); CREATE INDEX IF NOT EXISTS coverage_from_source ON coverage(source_file_id, source_sub_id); CREATE INDEX IF NOT EXISTS coverage_from_run_source ON coverage(run_id, source_file_id, source_sub_id); -CREATE INDEX IF NOT EXISTS coverage_from_job ON coverage(job_id); +CREATE INDEX IF NOT EXISTS coverage_from_job ON coverage(job_try_id); CREATE TABLE reporting ( - reporting_id BIGSERIAL NOT NULL PRIMARY KEY, + reporting_id BIGSERIAL NOT NULL PRIMARY KEY, + + job_try_id BIGINT DEFAULT NULL REFERENCES job_tries(job_try_id) ON DELETE SET NULL, + test_file_id BIGINT DEFAULT NULL REFERENCES test_files(test_file_id) ON DELETE CASCADE, - event_id BIGINT DEFAULT NULL REFERENCES events(event_id) ON DELETE SET NULL, - job_id BIGINT DEFAULT NULL REFERENCES jobs(job_id) ON DELETE SET NULL, - test_file_id BIGINT DEFAULT NULL REFERENCES test_files(test_file_id) ON DELETE CASCADE, + project_id BIGINT NOT NULL REFERENCES projects(project_id) ON DELETE CASCADE, + user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE, + run_id BIGINT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE, - project_id BIGINT NOT NULL REFERENCES projects(project_id) ON DELETE CASCADE, - user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE, - run_id BIGINT NOT NULL REFERENCES runs(run_id) ON DELETE CASCADE, + job_try SMALLINT DEFAULT NULL, - job_try INT DEFAULT NULL, - subtest VARCHAR(512) DEFAULT NULL, - duration DOUBLE PRECISION NOT NULL, + retry SMALLINT NOT NULL, + abort SMALLINT NOT NULL, + fail SMALLINT NOT NULL, + pass SMALLINT NOT NULL, - fail SMALLINT NOT NULL DEFAULT 0, - pass SMALLINT NOT NULL DEFAULT 0, - retry SMALLINT NOT NULL DEFAULT 0, - abort SMALLINT NOT NULL DEFAULT 0 + subtest VARCHAR(512) DEFAULT NULL, + duration NUMERIC(14,4) NOT NULL ); CREATE INDEX IF NOT EXISTS reporting_run ON reporting(run_id); CREATE INDEX IF NOT EXISTS reporting_user ON reporting(user_id);