From: Sven Schöling Date: Thu, 8 Oct 2015 12:22:28 +0000 (+0200) Subject: GDPdU Export - erste Version X-Git-Tag: release-3.5.4~1987 X-Git-Url: http://wagnertech.de/git?a=commitdiff_plain;h=11e509315cee0449974caa6d681fa26e730bbeca;p=kivitendo-erp.git GDPdU Export - erste Version --- diff --git a/SL/Controller/Gdpdu.pm b/SL/Controller/Gdpdu.pm new file mode 100644 index 000000000..8b0cbb063 --- /dev/null +++ b/SL/Controller/Gdpdu.pm @@ -0,0 +1,91 @@ +package SL::Controller::Gdpdu; + +# TODO: +# - depending exclusive checkboses via javascript + +use strict; + +use parent qw(SL::Controller::Base); + +use SL::GDPDU; +use SL::Locale::String qw(t8); +use SL::Helper::Flash; + +use Rose::Object::MakeMethods::Generic ( + 'scalar --get_set_init' => [ qw(from to tables) ], +); + +__PACKAGE__->run_before('check_auth'); + +sub action_filter { + my ($self) = @_; + + $self->from(DateTime->today->add(years => -1)->add(days => 1)) if !$self->from; + $self->to(DateTime->today) if !$self->to; + + $self->render('gdpdu/filter', title => t8('GDPDU Export')); +} + +sub action_export { + my ($self) = @_; + + if (!$self->check_inputs) { + $self->action_filter; + return; + } + + my $gdpdu = SL::GDPDU->new( + company => $::instance_conf->get_company, + location => $::instance_conf->get_address, + from => $self->from, + to => $self->to, + tables => $self->tables, + all_tables => !@{ $self->tables } && $::form->{all_tables}, + ); + + my $filename = $gdpdu->generate_export; + + $self->send_file($filename, name => t8('gdpdu-#1-#2.zip', $self->from->ymd, $self->to->ymd), unlink => 1); +} + +#--- other stuff + +sub check_auth { $::auth->assert('report') } + +sub check_inputs { + my ($self) = @_; + + my $error = 0; + + if ($::form->{tables}) { + $self->tables([ keys %{ $::form->{tables} } ]); + # theese three get inferred + push @{ $self->tables }, 'invoice' if $::form->{tables}{ar} || $::form->{tables}{ap}; + push @{ $self->tables }, 'orderitems' if $::form->{tables}{oe}; + push @{ $self->tables }, 'delivery_order_items' if $::form->{tables}{delivery_orders}; + } + + if (!@{ $self->tables } && !$::form->{all_tables}) { + flash('error', t8('No, I really do need checked tables to export.')); + $error = 1; + } + + if (!$::form->{from}) { + my $epoch = DateTime->new(day => 1, month => 1, year => 1900); + flash('info', t8('No start date given, setting to #1', $epoch->to_kivitendo)); + $self->from($epoch); + } + + if (!$::form->{to}) { + flash('info', t8('No end date given, setting to today')); + $self->to(DateTime->today); + } + + !$error; +} + +sub init_from { DateTime->from_kivitendo($::form->{from}) } +sub init_to { DateTime->from_kivitendo($::form->{to}) } +sub init_tables { [ ] } + +1; diff --git a/SL/GDPDU.pm b/SL/GDPDU.pm new file mode 100644 index 000000000..cccc0527e --- /dev/null +++ b/SL/GDPDU.pm @@ -0,0 +1,484 @@ +package SL::GDPDU; + +# TODO: +# translations +# optional: background jobable + +use strict; +use utf8; + +use parent qw(Rose::Object); + +use Text::CSV_XS; +use XML::Writer; +use Archive::Zip; +use File::Temp (); +use File::Spec (); +use List::UtilsBy qw(partition_by); + +use SL::DB::Helper::ALL; # since we work on meta data, we need everything +use SL::DB::Helper::Mappings; +use SL::Locale::String qw(t8); + +use Rose::Object::MakeMethods::Generic ( + scalar => [ qw(from to tables writer company location) ], + 'scalar --get_set_init' => [ qw(files tempfiles export_ids) ], +); + +# in this we find: +# key: table name +# name: short name, translated +# description: long description, translated +# transdate: column used to filter from/to, empty if table is filtered otherwise +# keep: arrayref of columns that should be saved for further referencing +# tables: arrayref with one column and one or many table.column references that were kept earlier +my %known_tables = ( + ar => { name => t8('Invoice'), description => t8('Sales Invoices and Accounts Receivables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', }, + ap => { name => t8('Purchase Invoice'), description => t8('Purchase Invoices and Accounts Payables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', }, + oe => { name => t8('Orders'), description => t8('Orders and Quotations, Sales and Purchase'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', }, + delivery_orders => { name => t8('Delivery Orders'), description => t8('Delivery Orders'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', }, + gl => { name => t8('General Ledger'), description => t8('General Ledger Entries'), keep => [ qw(id) ], transdate => 'transdate', }, + invoice => { name => t8('Invoice Positions'), description => t8('Positions for all Invoices'), keep => [ qw(parts_id) ], tables => [ trans_id => "ar.id", "ap.id" ] }, + orderitems => { name => t8('OrderItems'), description => t8('Positions for all Orders'), keep => [ qw(parts_id) ], tables => [ trans_id => "oe.id" ] }, + delivery_order_items => { name => t8('Delivery Order Items'), description => t8('Positions for all Delivery Orders'), keep => [ qw(parts_id) ], tables => [ delivery_order_id => "delivery_orders.id" ] }, + acc_trans => { name => t8('Transactions'), description => t8('All general ledger entries'), keep => [ qw(chart_id) ], tables => [ trans_id => "ar.id", "ap.id", "oe.id", "delivery_orders.id", "gl.id" ] }, + chart => { name => t8('Charts'), description => t8('Chart of Accounts'), tables => [ id => "acc_trans.chart_id" ] }, + customer => { name => t8('Customers'), description => t8('Customer Master Data'), tables => [ id => "ar.customer_id", "ap.customer_id", "oe.customer_id", "delivery_orders.customer_id" ] }, + vendor => { name => t8('Vendors'), description => t8('Vendor Master Data'), tables => [ id => "ar.vendor_id", "ap.vendor_id", "oe.vendor_id", "delivery_orders.vendor_id" ] }, + parts => { name => t8('Parts'), description => t8('Parts, Services, and Assemblies'), tables => [ id => "invoice.parts_id", "orderitems.parts_id", "delivery_order_items.parts_id" ] }, +); + +# rows in this listing are tiers. +# tables may depend on ids in a tier above them +my @export_table_order = qw( + ar ap gl oe delivery_orders + invoice orderitems delivery_order_items + customer vendor + parts + acc_trans + chart +); + +# needed because the standard dbh sets datestyle german and we don't want to mess with that +my $date_format = 'DD.MM.YYYY'; + +# callbacks that produce the xml spec for these column types +my %column_types = ( + 'Rose::DB::Object::Metadata::Column::Integer' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) }, + 'Rose::DB::Object::Metadata::Column::BigInt' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) }, + 'Rose::DB::Object::Metadata::Column::Text' => sub { $_[0]->tag('AlphaNumeric') }, + 'Rose::DB::Object::Metadata::Column::Varchar' => sub { $_[0]->tag('AlphaNumeric') }, + 'Rose::DB::Object::Metadata::Column::Character' => sub { $_[0]->tag('AlphaNumeric') }, + 'Rose::DB::Object::Metadata::Column::Numeric' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 5) }) }, + 'Rose::DB::Object::Metadata::Column::Date' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) }, + 'Rose::DB::Object::Metadata::Column::Timestamp' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) }, + 'Rose::DB::Object::Metadata::Column::Float' => sub { $_[0]->tag('Numeric') }, + 'Rose::DB::Object::Metadata::Column::Boolean' => sub { $_[0]->tag('AlphaNumeric', sub { $_[0] + ->tag('Map', sub { $_[0] + ->tag('From', 1) + ->tag('To', t8('true')) + }) + ->tag('Map', sub { $_[0] + ->tag('From', 0) + ->tag('To', t8('false')) + }) + ->tag('Map', sub { $_[0] + ->tag('From', 0) + ->tag('To', t8('false')) + }) + }) }, +); + +sub generate_export { + my ($self) = @_; + + # verify data + $self->from && 'DateTime' eq ref $self->from or die 'need from date'; + $self->to && 'DateTime' eq ref $self->to or die 'need to date'; + $self->from <= $self->to or die 'from date must be earlier or equal than to date'; + $self->tables && @{ $self->tables } or die 'need tables'; + for (@{ $self->tables }) { + next if $known_tables{$_}; + die "unknown table '$_'"; + } + + # get data from those tables and save to csv + # for that we need to build queries that fetch all the columns + for ($self->sorted_tables) { + $self->do_csv_export($_); + } + + # write xml file + $self->do_xml_file; + + # add dtd + $self->files->{'gdpdu-01-08-2002.dtd'} = File::Spec->catfile('users', 'gdpdu-01-08-2002.dtd'); + + # make zip + my ($fh, $zipfile) = File::Temp::tempfile(); + my $zip = Archive::Zip->new; + + while (my ($name, $file) = each %{ $self->files }) { + $zip->addFile($file, $name); + } + + $zip->writeToFileHandle($fh) == Archive::Zip::AZ_OK() or die 'error writing zip file'; + close($fh); + + return $zipfile; +} + +sub do_xml_file { + my ($self) = @_; + + my ($fh, $filename) = File::Temp::tempfile(); + binmode($fh, ':utf8'); + + $self->files->{'INDEX.XML'} = $filename; + push @{ $self->tempfiles }, $filename; + + my $writer = XML::Writer->new( + OUTPUT => $fh, + ENCODING => 'UTF-8', + ); + + $self->writer($writer); + $self->writer->xmlDecl('UTF-8'); + $self->writer->doctype('DataSet', undef, "gdpdu-01-08-2002.dtd"); + $self->tag('DataSet', sub { $self + ->tag('Version', '1.0') + ->tag('DataSupplier', sub { $self + ->tag('Name', $self->client_name) + ->tag('Location', $self->client_location) + ->tag('Comment', $self->make_comment) + }) + ->tag('Media', sub { $self + ->tag('Name', t8('DataSet #1', 1)); + for (@{ $self->tables }) { $self + ->table($_) + } + }) + }); + close($fh); +} + +sub table { + my ($self, $table) = @_; + my $writer = $self->writer; + + $self->tag('Table', sub { $self + ->tag('URL', "$table.csv") + ->tag('Name', $known_tables{$table}{name}) + ->tag('Description', $known_tables{$table}{description}) + ->tag('Validity', sub { $self + ->tag('Range', sub { $self + ->tag('From', $self->from->to_kivitendo(dateformat => 'dd.mm.yyyy')) + ->tag('To', $self->to->to_kivitendo(dateformat => 'dd.mm.yyyy')) + }) + ->tag('Format', $date_format) + }) + ->tag('DecimalSymbol', '.') + ->tag('DigitGroupingSymbol', '') + ->tag('VariableLength', sub { $self + ->columns($table) + ->foreign_keys($table) + }) + }); +} + +sub _table_columns { + my ($table) = @_; + my $package = SL::DB::Helper::Mappings::get_package_for_table($table); + + # PrimaryKeys must come before regular columns, so partition first + partition_by { 1 * $_->is_primary_key_member } $package->meta->columns; +} + +sub columns { + my ($self, $table) = @_; + + my %cols_by_primary_key = _table_columns($table); + + for my $column (@{ $cols_by_primary_key{1} }) { + my $type = $column_types{ ref $column }; + + die "unknown col type @{[ ref $column ]}" unless $type; + + $self->tag('VariablePrimaryKey', sub { $self + ->tag('Name', $column->name); + $type->($self); + }) + } + + for my $column (@{ $cols_by_primary_key{0} }) { + my $type = $column_types{ ref $column }; + + die "unknown col type @{[ ref $column]}" unless $type; + + $self->tag('VariableColumn', sub { $self + ->tag('Name', $column->name); + $type->($self); + }) + } + + $self; +} + +sub foreign_keys { + my ($self, $table) = @_; + my $package = SL::DB::Helper::Mappings::get_package_for_table($table); + + my %requested = map { $_ => 1 } @{ $self->tables }; + + for my $rel ($package->meta->foreign_keys) { + next unless $requested{ $rel->class->meta->table }; + + # ok, now extract the columns used as foreign key + my %key_columns = $rel->key_columns; + + if (1 != keys %key_columns) { + die "multi keys? we don't support this currently. fix it please"; + } + + if ($table eq $rel->class->meta->table) { + # self referential foreign keys are a PITA to export correctly. skip! + next; + } + + $self->tag('ForeignKey', sub { + $_[0]->tag('Name', $_) for keys %key_columns; + $_[0]->tag('References', $rel->class->meta->table); + }); + } +} + +sub do_csv_export { + my ($self, $table) = @_; + + my $csv = Text::CSV_XS->new({ binary => 1, eol => "\n", sep_char => ",", quote_char => '"' }); + + my ($fh, $filename) = File::Temp::tempfile(); + binmode($fh, ':utf8'); + + $self->files->{"$table.csv"} = $filename; + push @{ $self->tempfiles }, $filename; + + # in the right order (primary keys first) + my %cols_by_primary_key = _table_columns($table); + my @columns = (@{ $cols_by_primary_key{1} }, @{ $cols_by_primary_key{0} }); + my %col_index = do { my $i = 0; map {; "$_" => $i++ } @columns }; + + # and normalize date stuff + my @select_tokens = map { (ref $_) =~ /Time/ ? $_->name . '::date' : $_->name } @columns; + + my @where_tokens; + my @values; + if ($known_tables{$table}{transdate}) { + if ($self->from) { + push @where_tokens, "$known_tables{$table}{transdate} >= ?"; + push @values, $self->from; + } + if ($self->to) { + push @where_tokens, "$known_tables{$table}{transdate} <= ?"; + push @values, $self->to; + } + } + if ($known_tables{$table}{tables}) { + my ($col, @col_specs) = @{ $known_tables{$table}{tables} }; + my %ids; + for (@col_specs) { + my ($ftable, $fkey) = split /\./, $_; + if (!exists $self->export_ids->{$ftable}{$fkey}) { + # check if we forgot to keep it + if (!grep { $_ eq $fkey } @{ $known_tables{$ftable}{keep} || [] }) { + die "unknown table spec '$_' for table $table, did you forget to keep $fkey in $ftable?" + } else { + # hmm, most likely just an empty set. + $self->export_ids->{$ftable}{$fkey} = {}; + } + } + $ids{$_}++ for keys %{ $self->export_ids->{$ftable}{$fkey} }; + } + if (keys %ids) { + push @where_tokens, "$col IN (@{[ join ',', ('?') x keys %ids ]})"; + push @values, keys %ids; + } else { + push @where_tokens, '1=0'; + } + } + + my $where_clause = @where_tokens ? 'WHERE ' . join ' AND ', @where_tokens : ''; + + my $query = "SELECT " . join(', ', @select_tokens) . " FROM $table $where_clause"; + + my $sth = $::form->get_standard_dbh->prepare($query); + $sth->execute(@values) or die "error executing query $query: " . $sth->errstr; + + while (my $row = $sth->fetch) { + for my $keep_col (@{ $known_tables{$table}{keep} || [] }) { + next if !$row->[$col_index{$keep_col}]; + $self->export_ids->{$table}{$keep_col} ||= {}; + $self->export_ids->{$table}{$keep_col}{$row->[$col_index{$keep_col}]}++; + } + $csv->print($fh, $row) or $csv->error_diag; + } + $sth->finish(); +} + +sub tag { + my ($self, $tag, $content) = @_; + + $self->writer->startTag($tag); + if ('CODE' eq ref $content) { + $content->($self); + } else { + $self->writer->characters($content); + } + $self->writer->endTag; + return $self; +} + +sub make_comment { + my $gdpdu_version = API_VERSION(); + my $kivi_version = $::form->read_version; + my $person = $::myconfig{name}; + my $contact = join ', ', + (t8("Email") . ": $::myconfig{email}" ) x!! $::myconfig{email}, + (t8("Tel") . ": $::myconfig{tel}" ) x!! $::myconfig{tel}, + (t8("Fax") . ": $::myconfig{fax}" ) x!! $::myconfig{fax}; + + t8('DataSet for GDPdU version #1. Created with kivitendo #2 by #3 (#4)', + $gdpdu_version, $kivi_version, $person, $contact + ); +} + +sub client_name { + $_[0]->company +} + +sub client_location { + $_[0]->location +} + +sub sorted_tables { + my ($self) = @_; + + my %given = map { $_ => 1 } @{ $self->tables }; + + grep { $given{$_} } @export_table_order; +} + +sub all_tables { + my ($self, $yesno) = @_; + + $self->tables(\@export_table_order) if $yesno; +} + +sub init_files { +{} } +sub init_export_ids { +{} } +sub init_tempfiles { [] } + +sub API_VERSION { + DateTime->new(year => 2002, month => 8, day => 14)->to_kivitendo; +} + +sub DESTROY { + unlink $_ for @{ $_[0]->tempfiles || [] }; +} + +1; + +__END__ + +=encoding utf-8 + +=head1 NAME + +SL::GDPDU - IDEA export generator + +=head1 FUNCTIONS + +=over 4 + +=item C + +Create new export object. C may contain: + +=over 4 + +=item company + +The name of the company, needed for the supplier header + +=item location + +Location of the company, needed for the suupplier header + +=item from + +=item to + +Will only include records in the specified date range. Data pulled from other +tables will be culled to match what is needed for these records. + +=item tables + +A list of tables to be exported. + +=item all_tables + +Alternative to C, enables all known tables. + +=back + +=item C + +Do the work. Will return an absolut path to a temp file where all export files +are zipped together. + +=back + +=head1 CAVEATS + +=over 4 + +=item * + +Date format is shit. The official docs state that only C, C, C, +and C
are supported, timestamps do not exist. + +=item * + +Number pasing seems to be fragile. Official docs state that behaviour for too +low C settings is undefined. + +There is no dedicated integer type. + +=item * + +Currently C and C have a foreign key to themself with the name +C. If this foreign key is present in the C then the +storno records have to be too. Since this is extremely awkward to code and +confusing for the examiner as to why there are records outside of the time +range, this export skips all self-referential foreign keys. + +=item * + +Documentation for foreign keys is extremely weird. Instead of giving column +maps it assumes that foreign keys map to the primary keys given for the target +table, and in that order. Foreign keys to keys that are not primary seems to be +impossible. Changing type is also not allowed (which actually makes sense). +Hopefully there are no bugs there. + +=item * + +It's currently disallowed to export the whole dataset. It's not clear if this +is wanted. + +=back + +=head1 AUTHOR + +Sven Schöling Es.schoeling@linet-services.deE + +=cut diff --git a/menus/user/00-erp.yaml b/menus/user/00-erp.yaml index 5ef1f7f1d..0ff4e243e 100644 --- a/menus/user/00-erp.yaml +++ b/menus/user/00-erp.yaml @@ -854,6 +854,13 @@ access: report params: action: LiquidityProjection/show +- parent: reports + id: reports_gdpdu_export + name: GDPDU Export + order: 1000 + access: report + params: + action: Gdpdu/filter - id: batch_printing name: Batch Printing icon: printing diff --git a/templates/webpages/gdpdu/filter.html b/templates/webpages/gdpdu/filter.html new file mode 100644 index 000000000..83c39c7bf --- /dev/null +++ b/templates/webpages/gdpdu/filter.html @@ -0,0 +1,44 @@ +[%- USE HTML %] +[%- USE T8 %] +[%- USE L %] +[%- USE LxERP %] + +

[% title | html %]

+ +[%- PROCESS 'common/flash.html' %] + +

[% 'This export will include all records in the given time range and all supplicant information from checked entities. You will receive a single zip file. Please extract this file onto the data medium requested by your auditor.' | $T8 %]

+ +
+ + + + + + + + + + + + + + +
[% 'From Date' | $T8 %][% L.date_tag('from', SELF.from) %]
[% 'To Date' | $T8 %][% L.date_tag('to', SELF.to) %]
[% 'Include in Report' | $T8 %] + [% L.checkbox_tag('tables.ar', label=LxERP.t8('Invoices'), checked=1) %] + [% L.checkbox_tag('tables.ap', label=LxERP.t8('Purchase Invoices'), checked=1) %] + [% L.checkbox_tag('tables.gl', label=LxERP.t8('GL Transactions'), checked=1) %] + [% L.checkbox_tag('tables.delivery_orders', label=LxERP.t8('Delivery Orders'), checked=1) %] + [% L.checkbox_tag('tables.oe', label=LxERP.t8('Quotations and orders'), checked=1) %] + [% L.checkbox_tag('tables.customer', label=LxERP.t8('Customers'), checked=1) %] + [% L.checkbox_tag('tables.vendor', label=LxERP.t8('Vendors'), checked=1) %] + [% L.checkbox_tag('tables.parts', label=LxERP.t8('Parts'), checked=1) %] + [% L.checkbox_tag('tables.acc_trans', label=LxERP.t8('Transactions'), checked=1) %] + [% L.checkbox_tag('tables.chart', label=LxERP.t8('Charts'), checked=1) %] +
+ +[% L.hidden_tag('action', 'Gdpdu/dispatch') %] + +[% L.submit_tag('action_export', LxERP.t8('Export')) %] + +
diff --git a/users/gdpdu-01-08-2002.dtd b/users/gdpdu-01-08-2002.dtd new file mode 100644 index 000000000..68b7bc5b4 --- /dev/null +++ b/users/gdpdu-01-08-2002.dtd @@ -0,0 +1,189 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +