5 # optional: background jobable
10 use parent qw(Rose::Object);
17 use List::UtilsBy qw(partition_by);
19 use SL::DB::Helper::ALL; # since we work on meta data, we need everything
20 use SL::DB::Helper::Mappings;
21 use SL::Locale::String qw(t8);
23 use Rose::Object::MakeMethods::Generic (
24 scalar => [ qw(from to tables writer company location) ],
25 'scalar --get_set_init' => [ qw(files tempfiles export_ids) ],
30 # name: short name, translated
31 # description: long description, translated
32 # transdate: column used to filter from/to, empty if table is filtered otherwise
33 # keep: arrayref of columns that should be saved for further referencing
34 # tables: arrayref with one column and one or many table.column references that were kept earlier
36 ar => { name => t8('Invoice'), description => t8('Sales Invoices and Accounts Receivables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
37 ap => { name => t8('Purchase Invoice'), description => t8('Purchase Invoices and Accounts Payables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
38 oe => { name => t8('Orders'), description => t8('Orders and Quotations, Sales and Purchase'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
39 delivery_orders => { name => t8('Delivery Orders'), description => t8('Delivery Orders'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
40 gl => { name => t8('General Ledger'), description => t8('General Ledger Entries'), keep => [ qw(id) ], transdate => 'transdate', },
41 invoice => { name => t8('Invoice Positions'), description => t8('Positions for all Invoices'), keep => [ qw(parts_id) ], tables => [ trans_id => "ar.id", "ap.id" ] },
42 orderitems => { name => t8('OrderItems'), description => t8('Positions for all Orders'), keep => [ qw(parts_id) ], tables => [ trans_id => "oe.id" ] },
43 delivery_order_items => { name => t8('Delivery Order Items'), description => t8('Positions for all Delivery Orders'), keep => [ qw(parts_id) ], tables => [ delivery_order_id => "delivery_orders.id" ] },
44 acc_trans => { name => t8('Transactions'), description => t8('All general ledger entries'), keep => [ qw(chart_id) ], tables => [ trans_id => "ar.id", "ap.id", "oe.id", "delivery_orders.id", "gl.id" ] },
45 chart => { name => t8('Charts'), description => t8('Chart of Accounts'), tables => [ id => "acc_trans.chart_id" ] },
46 customer => { name => t8('Customers'), description => t8('Customer Master Data'), tables => [ id => "ar.customer_id", "ap.customer_id", "oe.customer_id", "delivery_orders.customer_id" ] },
47 vendor => { name => t8('Vendors'), description => t8('Vendor Master Data'), tables => [ id => "ar.vendor_id", "ap.vendor_id", "oe.vendor_id", "delivery_orders.vendor_id" ] },
48 parts => { name => t8('Parts'), description => t8('Parts, Services, and Assemblies'), tables => [ id => "invoice.parts_id", "orderitems.parts_id", "delivery_order_items.parts_id" ] },
51 # rows in this listing are tiers.
52 # tables may depend on ids in a tier above them
53 my @export_table_order = qw(
54 ar ap gl oe delivery_orders
55 invoice orderitems delivery_order_items
62 # needed because the standard dbh sets datestyle german and we don't want to mess with that
63 my $date_format = 'DD.MM.YYYY';
65 # callbacks that produce the xml spec for these column types
67 'Rose::DB::Object::Metadata::Column::Integer' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) },
68 'Rose::DB::Object::Metadata::Column::BigInt' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) },
69 'Rose::DB::Object::Metadata::Column::Text' => sub { $_[0]->tag('AlphaNumeric') },
70 'Rose::DB::Object::Metadata::Column::Varchar' => sub { $_[0]->tag('AlphaNumeric') },
71 'Rose::DB::Object::Metadata::Column::Character' => sub { $_[0]->tag('AlphaNumeric') },
72 'Rose::DB::Object::Metadata::Column::Numeric' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 5) }) },
73 'Rose::DB::Object::Metadata::Column::Date' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) },
74 'Rose::DB::Object::Metadata::Column::Timestamp' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) },
75 'Rose::DB::Object::Metadata::Column::Float' => sub { $_[0]->tag('Numeric') },
76 'Rose::DB::Object::Metadata::Column::Boolean' => sub { $_[0]->tag('AlphaNumeric', sub { $_[0]
77 ->tag('Map', sub { $_[0]
79 ->tag('To', t8('true'))
81 ->tag('Map', sub { $_[0]
83 ->tag('To', t8('false'))
85 ->tag('Map', sub { $_[0]
87 ->tag('To', t8('false'))
96 $self->from && 'DateTime' eq ref $self->from or die 'need from date';
97 $self->to && 'DateTime' eq ref $self->to or die 'need to date';
98 $self->from <= $self->to or die 'from date must be earlier or equal than to date';
99 $self->tables && @{ $self->tables } or die 'need tables';
100 for (@{ $self->tables }) {
101 next if $known_tables{$_};
102 die "unknown table '$_'";
105 # get data from those tables and save to csv
106 # for that we need to build queries that fetch all the columns
107 for ($self->sorted_tables) {
108 $self->do_csv_export($_);
115 $self->files->{'gdpdu-01-08-2002.dtd'} = File::Spec->catfile('users', 'gdpdu-01-08-2002.dtd');
118 my ($fh, $zipfile) = File::Temp::tempfile();
119 my $zip = Archive::Zip->new;
121 while (my ($name, $file) = each %{ $self->files }) {
122 $zip->addFile($file, $name);
125 $zip->writeToFileHandle($fh) == Archive::Zip::AZ_OK() or die 'error writing zip file';
134 my ($fh, $filename) = File::Temp::tempfile();
135 binmode($fh, ':utf8');
137 $self->files->{'INDEX.XML'} = $filename;
138 push @{ $self->tempfiles }, $filename;
140 my $writer = XML::Writer->new(
145 $self->writer($writer);
146 $self->writer->xmlDecl('UTF-8');
147 $self->writer->doctype('DataSet', undef, "gdpdu-01-08-2002.dtd");
148 $self->tag('DataSet', sub { $self
149 ->tag('Version', '1.0')
150 ->tag('DataSupplier', sub { $self
151 ->tag('Name', $self->client_name)
152 ->tag('Location', $self->client_location)
153 ->tag('Comment', $self->make_comment)
155 ->tag('Media', sub { $self
156 ->tag('Name', t8('DataSet #1', 1));
157 for (@{ $self->tables }) { $self
166 my ($self, $table) = @_;
167 my $writer = $self->writer;
169 $self->tag('Table', sub { $self
170 ->tag('URL', "$table.csv")
171 ->tag('Name', $known_tables{$table}{name})
172 ->tag('Description', $known_tables{$table}{description})
173 ->tag('Validity', sub { $self
174 ->tag('Range', sub { $self
175 ->tag('From', $self->from->to_kivitendo(dateformat => 'dd.mm.yyyy'))
176 ->tag('To', $self->to->to_kivitendo(dateformat => 'dd.mm.yyyy'))
178 ->tag('Format', $date_format)
180 ->tag('DecimalSymbol', '.')
181 ->tag('DigitGroupingSymbol', '|') # see CAVEATS in documentation
182 ->tag('VariableLength', sub { $self
183 ->tag('ColumnDelimiter', ',')
184 ->tag('RecordDelimiter', '&x0A;')
185 ->tag('TextEncapsulator', '"')
187 ->foreign_keys($table)
194 my $package = SL::DB::Helper::Mappings::get_package_for_table($table);
196 # PrimaryKeys must come before regular columns, so partition first
197 partition_by { 1 * $_->is_primary_key_member } $package->meta->columns;
201 my ($self, $table) = @_;
203 my %cols_by_primary_key = _table_columns($table);
205 for my $column (@{ $cols_by_primary_key{1} }) {
206 my $type = $column_types{ ref $column };
208 die "unknown col type @{[ ref $column ]}" unless $type;
210 $self->tag('VariablePrimaryKey', sub { $self
211 ->tag('Name', $column->name);
216 for my $column (@{ $cols_by_primary_key{0} }) {
217 my $type = $column_types{ ref $column };
219 die "unknown col type @{[ ref $column]}" unless $type;
221 $self->tag('VariableColumn', sub { $self
222 ->tag('Name', $column->name);
231 my ($self, $table) = @_;
232 my $package = SL::DB::Helper::Mappings::get_package_for_table($table);
234 my %requested = map { $_ => 1 } @{ $self->tables };
236 for my $rel ($package->meta->foreign_keys) {
237 next unless $requested{ $rel->class->meta->table };
239 # ok, now extract the columns used as foreign key
240 my %key_columns = $rel->key_columns;
242 if (1 != keys %key_columns) {
243 die "multi keys? we don't support this currently. fix it please";
246 if ($table eq $rel->class->meta->table) {
247 # self referential foreign keys are a PITA to export correctly. skip!
251 $self->tag('ForeignKey', sub {
252 $_[0]->tag('Name', $_) for keys %key_columns;
253 $_[0]->tag('References', $rel->class->meta->table);
259 my ($self, $table) = @_;
261 my $csv = Text::CSV_XS->new({ binary => 1, eol => "\n", sep_char => ",", quote_char => '"' });
263 my ($fh, $filename) = File::Temp::tempfile();
264 binmode($fh, ':utf8');
266 $self->files->{"$table.csv"} = $filename;
267 push @{ $self->tempfiles }, $filename;
269 # in the right order (primary keys first)
270 my %cols_by_primary_key = _table_columns($table);
271 my @columns = (@{ $cols_by_primary_key{1} }, @{ $cols_by_primary_key{0} });
272 my %col_index = do { my $i = 0; map {; "$_" => $i++ } @columns };
274 # and normalize date stuff
275 my @select_tokens = map { (ref $_) =~ /Time/ ? $_->name . '::date' : $_->name } @columns;
279 if ($known_tables{$table}{transdate}) {
281 push @where_tokens, "$known_tables{$table}{transdate} >= ?";
282 push @values, $self->from;
285 push @where_tokens, "$known_tables{$table}{transdate} <= ?";
286 push @values, $self->to;
289 if ($known_tables{$table}{tables}) {
290 my ($col, @col_specs) = @{ $known_tables{$table}{tables} };
293 my ($ftable, $fkey) = split /\./, $_;
294 if (!exists $self->export_ids->{$ftable}{$fkey}) {
295 # check if we forgot to keep it
296 if (!grep { $_ eq $fkey } @{ $known_tables{$ftable}{keep} || [] }) {
297 die "unknown table spec '$_' for table $table, did you forget to keep $fkey in $ftable?"
299 # hmm, most likely just an empty set.
300 $self->export_ids->{$ftable}{$fkey} = {};
303 $ids{$_}++ for keys %{ $self->export_ids->{$ftable}{$fkey} };
306 push @where_tokens, "$col IN (@{[ join ',', ('?') x keys %ids ]})";
307 push @values, keys %ids;
309 push @where_tokens, '1=0';
313 my $where_clause = @where_tokens ? 'WHERE ' . join ' AND ', @where_tokens : '';
315 my $query = "SELECT " . join(', ', @select_tokens) . " FROM $table $where_clause";
317 my $sth = $::form->get_standard_dbh->prepare($query);
318 $sth->execute(@values) or die "error executing query $query: " . $sth->errstr;
320 while (my $row = $sth->fetch) {
321 for my $keep_col (@{ $known_tables{$table}{keep} || [] }) {
322 next if !$row->[$col_index{$keep_col}];
323 $self->export_ids->{$table}{$keep_col} ||= {};
324 $self->export_ids->{$table}{$keep_col}{$row->[$col_index{$keep_col}]}++;
326 $csv->print($fh, $row) or $csv->error_diag;
332 my ($self, $tag, $content) = @_;
334 $self->writer->startTag($tag);
335 if ('CODE' eq ref $content) {
338 $self->writer->characters($content);
340 $self->writer->endTag;
345 my $gdpdu_version = API_VERSION();
346 my $kivi_version = $::form->read_version;
347 my $person = $::myconfig{name};
348 my $contact = join ', ',
349 (t8("Email") . ": $::myconfig{email}" ) x!! $::myconfig{email},
350 (t8("Tel") . ": $::myconfig{tel}" ) x!! $::myconfig{tel},
351 (t8("Fax") . ": $::myconfig{fax}" ) x!! $::myconfig{fax};
353 t8('DataSet for GDPdU version #1. Created with kivitendo #2 by #3 (#4)',
354 $gdpdu_version, $kivi_version, $person, $contact
362 sub client_location {
369 my %given = map { $_ => 1 } @{ $self->tables };
371 grep { $given{$_} } @export_table_order;
375 my ($self, $yesno) = @_;
377 $self->tables(\@export_table_order) if $yesno;
380 sub init_files { +{} }
381 sub init_export_ids { +{} }
382 sub init_tempfiles { [] }
385 DateTime->new(year => 2002, month => 8, day => 14)->to_kivitendo;
389 unlink $_ for @{ $_[0]->tempfiles || [] };
400 SL::GDPDU - IDEA export generator
408 Create new export object. C<PARAMS> may contain:
414 The name of the company, needed for the supplier header
418 Location of the company, needed for the suupplier header
424 Will only include records in the specified date range. Data pulled from other
425 tables will be culled to match what is needed for these records.
429 A list of tables to be exported.
433 Alternative to C<tables>, enables all known tables.
437 =item C<generate_export>
439 Do the work. Will return an absolut path to a temp file where all export files
450 Date format is shit. The official docs state that only C<YY>, C<YYYY>, C<MM>,
451 and C<DD> are supported, timestamps do not exist.
455 Number pasing seems to be fragile. Official docs state that behaviour for too
456 low C<Accuracy> settings is undefined.
458 There is no dedicated integer type.
462 Currently C<ar> and C<ap> have a foreign key to themself with the name
463 C<storno_id>. If this foreign key is present in the C<INDEX.XML> then the
464 storno records have to be too. Since this is extremely awkward to code and
465 confusing for the examiner as to why there are records outside of the time
466 range, this export skips all self-referential foreign keys.
470 Documentation for foreign keys is extremely weird. Instead of giving column
471 maps it assumes that foreign keys map to the primary keys given for the target
472 table, and in that order. Foreign keys to keys that are not primary seems to be
473 impossible. Changing type is also not allowed (which actually makes sense).
474 Hopefully there are no bugs there.
478 It's currently disallowed to export the whole dataset. It's not clear if this
483 It is not possible to set an empty C<DigiGroupingSymbol> since then the import
484 will just work with the default. This was asked in their forum, and the
485 response actually was:
487 Einfache Lösung: Definieren Sie das Tausendertrennzeichen als Komma, auch
488 wenn es nicht verwendet wird. Sollten Sie das Komma bereits als Feldtrenner
489 verwenden, so wählen Sie als Tausendertrennzeichen eine Alternative wie das
492 L<http://www.gdpdu-portal.com/forum/index.php?mode=thread&id=1392>
498 Sven Schöling E<lt>s.schoeling@linet-services.deE<gt>