4 # optional: background jobable
9 use parent qw(Rose::Object);
16 use List::UtilsBy qw(partition_by);
18 use SL::DB::Helper::ALL; # since we work on meta data, we need everything
19 use SL::DB::Helper::Mappings;
20 use SL::Locale::String qw(t8);
22 use Rose::Object::MakeMethods::Generic (
23 scalar => [ qw(from to tables writer company location) ],
24 'scalar --get_set_init' => [ qw(files tempfiles export_ids) ],
29 # name: short name, translated
30 # description: long description, translated
31 # transdate: column used to filter from/to, empty if table is filtered otherwise
32 # keep: arrayref of columns that should be saved for further referencing
33 # tables: arrayref with one column and one or many table.column references that were kept earlier
35 ar => { name => t8('Invoice'), description => t8('Sales Invoices and Accounts Receivables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
36 ap => { name => t8('Purchase Invoice'), description => t8('Purchase Invoices and Accounts Payables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
37 oe => { name => t8('Orders'), description => t8('Orders and Quotations, Sales and Purchase'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
38 delivery_orders => { name => t8('Delivery Orders'), description => t8('Delivery Orders'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
39 gl => { name => t8('General Ledger'), description => t8('General Ledger Entries'), keep => [ qw(id) ], transdate => 'transdate', },
40 invoice => { name => t8('Invoice Positions'), description => t8('Positions for all Invoices'), keep => [ qw(parts_id) ], tables => [ trans_id => "ar.id", "ap.id" ] },
41 orderitems => { name => t8('OrderItems'), description => t8('Positions for all Orders'), keep => [ qw(parts_id) ], tables => [ trans_id => "oe.id" ] },
42 delivery_order_items => { name => t8('Delivery Order Items'), description => t8('Positions for all Delivery Orders'), keep => [ qw(parts_id) ], tables => [ delivery_order_id => "delivery_orders.id" ] },
43 acc_trans => { name => t8('Transactions'), description => t8('All general ledger entries'), keep => [ qw(chart_id) ], tables => [ trans_id => "ar.id", "ap.id", "oe.id", "delivery_orders.id", "gl.id" ] },
44 chart => { name => t8('Charts'), description => t8('Chart of Accounts'), tables => [ id => "acc_trans.chart_id" ] },
45 customer => { name => t8('Customers'), description => t8('Customer Master Data'), tables => [ id => "ar.customer_id", "ap.customer_id", "oe.customer_id", "delivery_orders.customer_id" ] },
46 vendor => { name => t8('Vendors'), description => t8('Vendor Master Data'), tables => [ id => "ar.vendor_id", "ap.vendor_id", "oe.vendor_id", "delivery_orders.vendor_id" ] },
47 parts => { name => t8('Parts'), description => t8('Parts, Services, and Assemblies'), tables => [ id => "invoice.parts_id", "orderitems.parts_id", "delivery_order_items.parts_id" ] },
50 # rows in this listing are tiers.
51 # tables may depend on ids in a tier above them
52 my @export_table_order = qw(
53 ar ap gl oe delivery_orders
54 invoice orderitems delivery_order_items
61 # needed because the standard dbh sets datestyle german and we don't want to mess with that
62 my $date_format = 'DD.MM.YYYY';
64 # callbacks that produce the xml spec for these column types
66 'Rose::DB::Object::Metadata::Column::Integer' => sub { $_[0]->tag('Numeric') }, # see Caveats for integer issues
67 'Rose::DB::Object::Metadata::Column::BigInt' => sub { $_[0]->tag('Numeric') }, # see Caveats for integer issues
68 'Rose::DB::Object::Metadata::Column::Text' => sub { $_[0]->tag('AlphaNumeric') },
69 'Rose::DB::Object::Metadata::Column::Varchar' => sub { $_[0]->tag('AlphaNumeric') },
70 'Rose::DB::Object::Metadata::Column::Character' => sub { $_[0]->tag('AlphaNumeric') },
71 'Rose::DB::Object::Metadata::Column::Numeric' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 5) }) },
72 'Rose::DB::Object::Metadata::Column::Date' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) },
73 'Rose::DB::Object::Metadata::Column::Timestamp' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) },
74 'Rose::DB::Object::Metadata::Column::Float' => sub { $_[0]->tag('Numeric') },
75 'Rose::DB::Object::Metadata::Column::Boolean' => sub { $_[0]
77 ->tag('Map', sub { $_[0]
79 ->tag('To', t8('true'))
81 ->tag('Map', sub { $_[0]
83 ->tag('To', t8('false'))
85 ->tag('Map', sub { $_[0]
87 ->tag('To', t8('false'))
96 $self->from && 'DateTime' eq ref $self->from or die 'need from date';
97 $self->to && 'DateTime' eq ref $self->to or die 'need to date';
98 $self->from <= $self->to or die 'from date must be earlier or equal than to date';
99 $self->tables && @{ $self->tables } or die 'need tables';
100 for (@{ $self->tables }) {
101 next if $known_tables{$_};
102 die "unknown table '$_'";
105 # get data from those tables and save to csv
106 # for that we need to build queries that fetch all the columns
107 for ($self->sorted_tables) {
108 $self->do_csv_export($_);
115 $self->files->{'gdpdu-01-08-2002.dtd'} = File::Spec->catfile('users', 'gdpdu-01-08-2002.dtd');
118 my ($fh, $zipfile) = File::Temp::tempfile();
119 my $zip = Archive::Zip->new;
121 while (my ($name, $file) = each %{ $self->files }) {
122 $zip->addFile($file, $name);
125 $zip->writeToFileHandle($fh) == Archive::Zip::AZ_OK() or die 'error writing zip file';
134 my ($fh, $filename) = File::Temp::tempfile();
135 binmode($fh, ':utf8');
137 $self->files->{'INDEX.XML'} = $filename;
138 push @{ $self->tempfiles }, $filename;
140 my $writer = XML::Writer->new(
145 $self->writer($writer);
146 $self->writer->xmlDecl('UTF-8');
147 $self->writer->doctype('DataSet', undef, "gdpdu-01-08-2002.dtd");
148 $self->tag('DataSet', sub { $self
149 ->tag('Version', '1.0')
150 ->tag('DataSupplier', sub { $self
151 ->tag('Name', $self->client_name)
152 ->tag('Location', $self->client_location)
153 ->tag('Comment', $self->make_comment)
155 ->tag('Media', sub { $self
156 ->tag('Name', t8('DataSet #1', 1));
157 for (reverse $self->sorted_tables) { $self # see CAVEATS for table order
166 my ($self, $table) = @_;
167 my $writer = $self->writer;
169 $self->tag('Table', sub { $self
170 ->tag('URL', "$table.csv")
171 ->tag('Name', $known_tables{$table}{name})
172 ->tag('Description', $known_tables{$table}{description})
173 ->tag('Validity', sub { $self
174 ->tag('Range', sub { $self
175 ->tag('From', $self->from->to_kivitendo(dateformat => 'dd.mm.yyyy'))
176 ->tag('To', $self->to->to_kivitendo(dateformat => 'dd.mm.yyyy'))
178 ->tag('Format', $date_format)
181 ->tag('DecimalSymbol', '.')
182 ->tag('DigitGroupingSymbol', '|') # see CAVEATS in documentation
183 ->tag('VariableLength', sub { $self
184 ->tag('ColumnDelimiter', ',') # see CAVEATS for missing RecordDelimiter
185 ->tag('TextEncapsulator', '"')
187 ->foreign_keys($table)
194 my $package = SL::DB::Helper::Mappings::get_package_for_table($table);
196 # PrimaryKeys must come before regular columns, so partition first
197 partition_by { 1 * $_->is_primary_key_member } $package->meta->columns;
201 my ($self, $table) = @_;
203 my %cols_by_primary_key = _table_columns($table);
205 for my $column (@{ $cols_by_primary_key{1} }) {
206 my $type = $column_types{ ref $column };
208 die "unknown col type @{[ ref $column ]}" unless $type;
210 $self->tag('VariablePrimaryKey', sub { $self
211 ->tag('Name', $column->name);
216 for my $column (@{ $cols_by_primary_key{0} }) {
217 my $type = $column_types{ ref $column };
219 die "unknown col type @{[ ref $column]}" unless $type;
221 $self->tag('VariableColumn', sub { $self
222 ->tag('Name', $column->name);
231 my ($self, $table) = @_;
232 my $package = SL::DB::Helper::Mappings::get_package_for_table($table);
234 my %requested = map { $_ => 1 } @{ $self->tables };
236 for my $rel ($package->meta->foreign_keys) {
237 next unless $requested{ $rel->class->meta->table };
239 # ok, now extract the columns used as foreign key
240 my %key_columns = $rel->key_columns;
242 if (1 != keys %key_columns) {
243 die "multi keys? we don't support this currently. fix it please";
246 if ($table eq $rel->class->meta->table) {
247 # self referential foreign keys are a PITA to export correctly. skip!
251 $self->tag('ForeignKey', sub {
252 $_[0]->tag('Name', $_) for keys %key_columns;
253 $_[0]->tag('References', $rel->class->meta->table);
259 my ($self, $table) = @_;
261 my $csv = Text::CSV_XS->new({ binary => 1, eol => "\r\n", sep_char => ",", quote_char => '"' });
263 my ($fh, $filename) = File::Temp::tempfile();
264 binmode($fh, ':utf8');
266 $self->files->{"$table.csv"} = $filename;
267 push @{ $self->tempfiles }, $filename;
269 # in the right order (primary keys first)
270 my %cols_by_primary_key = _table_columns($table);
271 my @columns = (@{ $cols_by_primary_key{1} }, @{ $cols_by_primary_key{0} });
272 my %col_index = do { my $i = 0; map {; "$_" => $i++ } @columns };
274 # and normalize date stuff
275 my @select_tokens = map { (ref $_) =~ /Time/ ? $_->name . '::date' : $_->name } @columns;
279 if ($known_tables{$table}{transdate}) {
281 push @where_tokens, "$known_tables{$table}{transdate} >= ?";
282 push @values, $self->from;
285 push @where_tokens, "$known_tables{$table}{transdate} <= ?";
286 push @values, $self->to;
289 if ($known_tables{$table}{tables}) {
290 my ($col, @col_specs) = @{ $known_tables{$table}{tables} };
293 my ($ftable, $fkey) = split /\./, $_;
294 if (!exists $self->export_ids->{$ftable}{$fkey}) {
295 # check if we forgot to keep it
296 if (!grep { $_ eq $fkey } @{ $known_tables{$ftable}{keep} || [] }) {
297 die "unknown table spec '$_' for table $table, did you forget to keep $fkey in $ftable?"
299 # hmm, most likely just an empty set.
300 $self->export_ids->{$ftable}{$fkey} = {};
303 $ids{$_}++ for keys %{ $self->export_ids->{$ftable}{$fkey} };
306 push @where_tokens, "$col IN (@{[ join ',', ('?') x keys %ids ]})";
307 push @values, keys %ids;
309 push @where_tokens, '1=0';
313 my $where_clause = @where_tokens ? 'WHERE ' . join ' AND ', @where_tokens : '';
315 my $query = "SELECT " . join(', ', @select_tokens) . " FROM $table $where_clause";
317 my $sth = $::form->get_standard_dbh->prepare($query);
318 $sth->execute(@values) or die "error executing query $query: " . $sth->errstr;
320 while (my $row = $sth->fetch) {
321 for my $keep_col (@{ $known_tables{$table}{keep} || [] }) {
322 next if !$row->[$col_index{$keep_col}];
323 $self->export_ids->{$table}{$keep_col} ||= {};
324 $self->export_ids->{$table}{$keep_col}{$row->[$col_index{$keep_col}]}++;
326 s/\r\n/ /g for @$row; # see CAVEATS
328 $csv->print($fh, $row) or $csv->error_diag;
334 my ($self, $tag, $content) = @_;
336 $self->writer->startTag($tag);
337 if ('CODE' eq ref $content) {
340 $self->writer->characters($content);
342 $self->writer->endTag;
347 my $gdpdu_version = API_VERSION();
348 my $kivi_version = $::form->read_version;
349 my $person = $::myconfig{name};
350 my $contact = join ', ',
351 (t8("Email") . ": $::myconfig{email}" ) x!! $::myconfig{email},
352 (t8("Tel") . ": $::myconfig{tel}" ) x!! $::myconfig{tel},
353 (t8("Fax") . ": $::myconfig{fax}" ) x!! $::myconfig{fax};
355 t8('DataSet for GDPdU version #1. Created with kivitendo #2 by #3 (#4)',
356 $gdpdu_version, $kivi_version, $person, $contact
364 sub client_location {
371 my %given = map { $_ => 1 } @{ $self->tables };
373 grep { $given{$_} } @export_table_order;
377 my ($self, $yesno) = @_;
379 $self->tables(\@export_table_order) if $yesno;
382 sub init_files { +{} }
383 sub init_export_ids { +{} }
384 sub init_tempfiles { [] }
387 DateTime->new(year => 2002, month => 8, day => 14)->to_kivitendo;
391 unlink $_ for @{ $_[0]->tempfiles || [] };
402 SL::GDPDU - IDEA export generator
410 Create new export object. C<PARAMS> may contain:
416 The name of the company, needed for the supplier header
420 Location of the company, needed for the suupplier header
426 Will only include records in the specified date range. Data pulled from other
427 tables will be culled to match what is needed for these records.
431 A list of tables to be exported.
435 Alternative to C<tables>, enables all known tables.
439 =item C<generate_export>
441 Do the work. Will return an absolut path to a temp file where all export files
452 Date format is shit. The official docs state that only C<YY>, C<YYYY>, C<MM>,
453 and C<DD> are supported, timestamps do not exist.
457 Number parsing seems to be fragile. Official docs state that behaviour for too
458 low C<Accuracy> settings is undefined. Accuracy of 0 is not taken to mean
459 Integer but instead generates a warning for redudancy.
461 There is no dedicated integer type.
465 Currently C<ar> and C<ap> have a foreign key to themself with the name
466 C<storno_id>. If this foreign key is present in the C<INDEX.XML> then the
467 storno records have to be too. Since this is extremely awkward to code and
468 confusing for the examiner as to why there are records outside of the time
469 range, this export skips all self-referential foreign keys.
473 Documentation for foreign keys is extremely weird. Instead of giving column
474 maps it assumes that foreign keys map to the primary keys given for the target
475 table, and in that order. Foreign keys to keys that are not primary seems to be
476 impossible. Changing type is also not allowed (which actually makes sense).
477 Hopefully there are no bugs there.
481 It's currently disallowed to export the whole dataset. It's not clear if this
486 It is not possible to set an empty C<DigiGroupingSymbol> since then the import
487 will just work with the default. This was asked in their forum, and the
488 response actually was:
490 Einfache Lösung: Definieren Sie das Tausendertrennzeichen als Komma, auch
491 wenn es nicht verwendet wird. Sollten Sie das Komma bereits als Feldtrenner
492 verwenden, so wählen Sie als Tausendertrennzeichen eine Alternative wie das
495 L<http://www.gdpdu-portal.com/forum/index.php?mode=thread&id=1392>
499 It is not possible to define a C<RecordDelimiter> with XML entities. 

500 generates the error message:
502 C<RecordDelimiter>-Wert (
) sollte immer aus ein oder zwei Zeichen
505 Instead we just use the implicit default RecordDelimiter CRLF.
511 Foreign keys seem only to work with previously defined tables (which would be
516 The CSV import library used in IDEA is not able to parse newlines (or more
517 exactly RecordDelimiter) in data. So this export substites all of these with
524 Sven Schöling E<lt>s.schoeling@linet-services.deE<gt>