5 # optional: background jobable
10 use parent qw(Rose::Object);
17 use List::UtilsBy qw(partition_by);
19 use SL::DB::Helper::ALL; # since we work on meta data, we need everything
20 use SL::DB::Helper::Mappings;
21 use SL::Locale::String qw(t8);
23 use Rose::Object::MakeMethods::Generic (
24 scalar => [ qw(from to tables writer company location) ],
25 'scalar --get_set_init' => [ qw(files tempfiles export_ids) ],
30 # name: short name, translated
31 # description: long description, translated
32 # transdate: column used to filter from/to, empty if table is filtered otherwise
33 # keep: arrayref of columns that should be saved for further referencing
34 # tables: arrayref with one column and one or many table.column references that were kept earlier
36 ar => { name => t8('Invoice'), description => t8('Sales Invoices and Accounts Receivables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
37 ap => { name => t8('Purchase Invoice'), description => t8('Purchase Invoices and Accounts Payables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
38 oe => { name => t8('Orders'), description => t8('Orders and Quotations, Sales and Purchase'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
39 delivery_orders => { name => t8('Delivery Orders'), description => t8('Delivery Orders'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
40 gl => { name => t8('General Ledger'), description => t8('General Ledger Entries'), keep => [ qw(id) ], transdate => 'transdate', },
41 invoice => { name => t8('Invoice Positions'), description => t8('Positions for all Invoices'), keep => [ qw(parts_id) ], tables => [ trans_id => "ar.id", "ap.id" ] },
42 orderitems => { name => t8('OrderItems'), description => t8('Positions for all Orders'), keep => [ qw(parts_id) ], tables => [ trans_id => "oe.id" ] },
43 delivery_order_items => { name => t8('Delivery Order Items'), description => t8('Positions for all Delivery Orders'), keep => [ qw(parts_id) ], tables => [ delivery_order_id => "delivery_orders.id" ] },
44 acc_trans => { name => t8('Transactions'), description => t8('All general ledger entries'), keep => [ qw(chart_id) ], tables => [ trans_id => "ar.id", "ap.id", "oe.id", "delivery_orders.id", "gl.id" ] },
45 chart => { name => t8('Charts'), description => t8('Chart of Accounts'), tables => [ id => "acc_trans.chart_id" ] },
46 customer => { name => t8('Customers'), description => t8('Customer Master Data'), tables => [ id => "ar.customer_id", "ap.customer_id", "oe.customer_id", "delivery_orders.customer_id" ] },
47 vendor => { name => t8('Vendors'), description => t8('Vendor Master Data'), tables => [ id => "ar.vendor_id", "ap.vendor_id", "oe.vendor_id", "delivery_orders.vendor_id" ] },
48 parts => { name => t8('Parts'), description => t8('Parts, Services, and Assemblies'), tables => [ id => "invoice.parts_id", "orderitems.parts_id", "delivery_order_items.parts_id" ] },
51 # rows in this listing are tiers.
52 # tables may depend on ids in a tier above them
53 my @export_table_order = qw(
54 ar ap gl oe delivery_orders
55 invoice orderitems delivery_order_items
62 # needed because the standard dbh sets datestyle german and we don't want to mess with that
63 my $date_format = 'DD.MM.YYYY';
65 # callbacks that produce the xml spec for these column types
67 'Rose::DB::Object::Metadata::Column::Integer' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) },
68 'Rose::DB::Object::Metadata::Column::BigInt' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) },
69 'Rose::DB::Object::Metadata::Column::Text' => sub { $_[0]->tag('AlphaNumeric') },
70 'Rose::DB::Object::Metadata::Column::Varchar' => sub { $_[0]->tag('AlphaNumeric') },
71 'Rose::DB::Object::Metadata::Column::Character' => sub { $_[0]->tag('AlphaNumeric') },
72 'Rose::DB::Object::Metadata::Column::Numeric' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 5) }) },
73 'Rose::DB::Object::Metadata::Column::Date' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) },
74 'Rose::DB::Object::Metadata::Column::Timestamp' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) },
75 'Rose::DB::Object::Metadata::Column::Float' => sub { $_[0]->tag('Numeric') },
76 'Rose::DB::Object::Metadata::Column::Boolean' => sub { $_[0]
78 ->tag('Map', sub { $_[0]
80 ->tag('To', t8('true'))
82 ->tag('Map', sub { $_[0]
84 ->tag('To', t8('false'))
86 ->tag('Map', sub { $_[0]
88 ->tag('To', t8('false'))
97 $self->from && 'DateTime' eq ref $self->from or die 'need from date';
98 $self->to && 'DateTime' eq ref $self->to or die 'need to date';
99 $self->from <= $self->to or die 'from date must be earlier or equal than to date';
100 $self->tables && @{ $self->tables } or die 'need tables';
101 for (@{ $self->tables }) {
102 next if $known_tables{$_};
103 die "unknown table '$_'";
106 # get data from those tables and save to csv
107 # for that we need to build queries that fetch all the columns
108 for ($self->sorted_tables) {
109 $self->do_csv_export($_);
116 $self->files->{'gdpdu-01-08-2002.dtd'} = File::Spec->catfile('users', 'gdpdu-01-08-2002.dtd');
119 my ($fh, $zipfile) = File::Temp::tempfile();
120 my $zip = Archive::Zip->new;
122 while (my ($name, $file) = each %{ $self->files }) {
123 $zip->addFile($file, $name);
126 $zip->writeToFileHandle($fh) == Archive::Zip::AZ_OK() or die 'error writing zip file';
135 my ($fh, $filename) = File::Temp::tempfile();
136 binmode($fh, ':utf8');
138 $self->files->{'INDEX.XML'} = $filename;
139 push @{ $self->tempfiles }, $filename;
141 my $writer = XML::Writer->new(
146 $self->writer($writer);
147 $self->writer->xmlDecl('UTF-8');
148 $self->writer->doctype('DataSet', undef, "gdpdu-01-08-2002.dtd");
149 $self->tag('DataSet', sub { $self
150 ->tag('Version', '1.0')
151 ->tag('DataSupplier', sub { $self
152 ->tag('Name', $self->client_name)
153 ->tag('Location', $self->client_location)
154 ->tag('Comment', $self->make_comment)
156 ->tag('Media', sub { $self
157 ->tag('Name', t8('DataSet #1', 1));
158 for (@{ $self->tables }) { $self
167 my ($self, $table) = @_;
168 my $writer = $self->writer;
170 $self->tag('Table', sub { $self
171 ->tag('URL', "$table.csv")
172 ->tag('Name', $known_tables{$table}{name})
173 ->tag('Description', $known_tables{$table}{description})
174 ->tag('Validity', sub { $self
175 ->tag('Range', sub { $self
176 ->tag('From', $self->from->to_kivitendo(dateformat => 'dd.mm.yyyy'))
177 ->tag('To', $self->to->to_kivitendo(dateformat => 'dd.mm.yyyy'))
179 ->tag('Format', $date_format)
181 ->tag('DecimalSymbol', '.')
182 ->tag('DigitGroupingSymbol', '|') # see CAVEATS in documentation
183 ->tag('VariableLength', sub { $self
184 ->tag('ColumnDelimiter', ',')
185 ->tag('RecordDelimiter', '&x0A;')
186 ->tag('TextEncapsulator', '"')
188 ->foreign_keys($table)
195 my $package = SL::DB::Helper::Mappings::get_package_for_table($table);
197 # PrimaryKeys must come before regular columns, so partition first
198 partition_by { 1 * $_->is_primary_key_member } $package->meta->columns;
202 my ($self, $table) = @_;
204 my %cols_by_primary_key = _table_columns($table);
206 for my $column (@{ $cols_by_primary_key{1} }) {
207 my $type = $column_types{ ref $column };
209 die "unknown col type @{[ ref $column ]}" unless $type;
211 $self->tag('VariablePrimaryKey', sub { $self
212 ->tag('Name', $column->name);
217 for my $column (@{ $cols_by_primary_key{0} }) {
218 my $type = $column_types{ ref $column };
220 die "unknown col type @{[ ref $column]}" unless $type;
222 $self->tag('VariableColumn', sub { $self
223 ->tag('Name', $column->name);
232 my ($self, $table) = @_;
233 my $package = SL::DB::Helper::Mappings::get_package_for_table($table);
235 my %requested = map { $_ => 1 } @{ $self->tables };
237 for my $rel ($package->meta->foreign_keys) {
238 next unless $requested{ $rel->class->meta->table };
240 # ok, now extract the columns used as foreign key
241 my %key_columns = $rel->key_columns;
243 if (1 != keys %key_columns) {
244 die "multi keys? we don't support this currently. fix it please";
247 if ($table eq $rel->class->meta->table) {
248 # self referential foreign keys are a PITA to export correctly. skip!
252 $self->tag('ForeignKey', sub {
253 $_[0]->tag('Name', $_) for keys %key_columns;
254 $_[0]->tag('References', $rel->class->meta->table);
260 my ($self, $table) = @_;
262 my $csv = Text::CSV_XS->new({ binary => 1, eol => "\n", sep_char => ",", quote_char => '"' });
264 my ($fh, $filename) = File::Temp::tempfile();
265 binmode($fh, ':utf8');
267 $self->files->{"$table.csv"} = $filename;
268 push @{ $self->tempfiles }, $filename;
270 # in the right order (primary keys first)
271 my %cols_by_primary_key = _table_columns($table);
272 my @columns = (@{ $cols_by_primary_key{1} }, @{ $cols_by_primary_key{0} });
273 my %col_index = do { my $i = 0; map {; "$_" => $i++ } @columns };
275 # and normalize date stuff
276 my @select_tokens = map { (ref $_) =~ /Time/ ? $_->name . '::date' : $_->name } @columns;
280 if ($known_tables{$table}{transdate}) {
282 push @where_tokens, "$known_tables{$table}{transdate} >= ?";
283 push @values, $self->from;
286 push @where_tokens, "$known_tables{$table}{transdate} <= ?";
287 push @values, $self->to;
290 if ($known_tables{$table}{tables}) {
291 my ($col, @col_specs) = @{ $known_tables{$table}{tables} };
294 my ($ftable, $fkey) = split /\./, $_;
295 if (!exists $self->export_ids->{$ftable}{$fkey}) {
296 # check if we forgot to keep it
297 if (!grep { $_ eq $fkey } @{ $known_tables{$ftable}{keep} || [] }) {
298 die "unknown table spec '$_' for table $table, did you forget to keep $fkey in $ftable?"
300 # hmm, most likely just an empty set.
301 $self->export_ids->{$ftable}{$fkey} = {};
304 $ids{$_}++ for keys %{ $self->export_ids->{$ftable}{$fkey} };
307 push @where_tokens, "$col IN (@{[ join ',', ('?') x keys %ids ]})";
308 push @values, keys %ids;
310 push @where_tokens, '1=0';
314 my $where_clause = @where_tokens ? 'WHERE ' . join ' AND ', @where_tokens : '';
316 my $query = "SELECT " . join(', ', @select_tokens) . " FROM $table $where_clause";
318 my $sth = $::form->get_standard_dbh->prepare($query);
319 $sth->execute(@values) or die "error executing query $query: " . $sth->errstr;
321 while (my $row = $sth->fetch) {
322 for my $keep_col (@{ $known_tables{$table}{keep} || [] }) {
323 next if !$row->[$col_index{$keep_col}];
324 $self->export_ids->{$table}{$keep_col} ||= {};
325 $self->export_ids->{$table}{$keep_col}{$row->[$col_index{$keep_col}]}++;
327 $csv->print($fh, $row) or $csv->error_diag;
333 my ($self, $tag, $content) = @_;
335 $self->writer->startTag($tag);
336 if ('CODE' eq ref $content) {
339 $self->writer->characters($content);
341 $self->writer->endTag;
346 my $gdpdu_version = API_VERSION();
347 my $kivi_version = $::form->read_version;
348 my $person = $::myconfig{name};
349 my $contact = join ', ',
350 (t8("Email") . ": $::myconfig{email}" ) x!! $::myconfig{email},
351 (t8("Tel") . ": $::myconfig{tel}" ) x!! $::myconfig{tel},
352 (t8("Fax") . ": $::myconfig{fax}" ) x!! $::myconfig{fax};
354 t8('DataSet for GDPdU version #1. Created with kivitendo #2 by #3 (#4)',
355 $gdpdu_version, $kivi_version, $person, $contact
363 sub client_location {
370 my %given = map { $_ => 1 } @{ $self->tables };
372 grep { $given{$_} } @export_table_order;
376 my ($self, $yesno) = @_;
378 $self->tables(\@export_table_order) if $yesno;
381 sub init_files { +{} }
382 sub init_export_ids { +{} }
383 sub init_tempfiles { [] }
386 DateTime->new(year => 2002, month => 8, day => 14)->to_kivitendo;
390 unlink $_ for @{ $_[0]->tempfiles || [] };
401 SL::GDPDU - IDEA export generator
409 Create new export object. C<PARAMS> may contain:
415 The name of the company, needed for the supplier header
419 Location of the company, needed for the suupplier header
425 Will only include records in the specified date range. Data pulled from other
426 tables will be culled to match what is needed for these records.
430 A list of tables to be exported.
434 Alternative to C<tables>, enables all known tables.
438 =item C<generate_export>
440 Do the work. Will return an absolut path to a temp file where all export files
451 Date format is shit. The official docs state that only C<YY>, C<YYYY>, C<MM>,
452 and C<DD> are supported, timestamps do not exist.
456 Number pasing seems to be fragile. Official docs state that behaviour for too
457 low C<Accuracy> settings is undefined.
459 There is no dedicated integer type.
463 Currently C<ar> and C<ap> have a foreign key to themself with the name
464 C<storno_id>. If this foreign key is present in the C<INDEX.XML> then the
465 storno records have to be too. Since this is extremely awkward to code and
466 confusing for the examiner as to why there are records outside of the time
467 range, this export skips all self-referential foreign keys.
471 Documentation for foreign keys is extremely weird. Instead of giving column
472 maps it assumes that foreign keys map to the primary keys given for the target
473 table, and in that order. Foreign keys to keys that are not primary seems to be
474 impossible. Changing type is also not allowed (which actually makes sense).
475 Hopefully there are no bugs there.
479 It's currently disallowed to export the whole dataset. It's not clear if this
484 It is not possible to set an empty C<DigiGroupingSymbol> since then the import
485 will just work with the default. This was asked in their forum, and the
486 response actually was:
488 Einfache Lösung: Definieren Sie das Tausendertrennzeichen als Komma, auch
489 wenn es nicht verwendet wird. Sollten Sie das Komma bereits als Feldtrenner
490 verwenden, so wählen Sie als Tausendertrennzeichen eine Alternative wie das
493 L<http://www.gdpdu-portal.com/forum/index.php?mode=thread&id=1392>
499 Sven Schöling E<lt>s.schoeling@linet-services.deE<gt>