5 # optional: background jobable
10 use parent qw(Rose::Object);
17 use List::UtilsBy qw(partition_by);
19 use SL::DB::Helper::ALL; # since we work on meta data, we need everything
20 use SL::DB::Helper::Mappings;
21 use SL::Locale::String qw(t8);
23 use Rose::Object::MakeMethods::Generic (
24 scalar => [ qw(from to tables writer company location) ],
25 'scalar --get_set_init' => [ qw(files tempfiles export_ids) ],
30 # name: short name, translated
31 # description: long description, translated
32 # transdate: column used to filter from/to, empty if table is filtered otherwise
33 # keep: arrayref of columns that should be saved for further referencing
34 # tables: arrayref with one column and one or many table.column references that were kept earlier
36 ar => { name => t8('Invoice'), description => t8('Sales Invoices and Accounts Receivables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
37 ap => { name => t8('Purchase Invoice'), description => t8('Purchase Invoices and Accounts Payables'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
38 oe => { name => t8('Orders'), description => t8('Orders and Quotations, Sales and Purchase'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
39 delivery_orders => { name => t8('Delivery Orders'), description => t8('Delivery Orders'), keep => [ qw(id customer_id vendor_id) ], transdate => 'transdate', },
40 gl => { name => t8('General Ledger'), description => t8('General Ledger Entries'), keep => [ qw(id) ], transdate => 'transdate', },
41 invoice => { name => t8('Invoice Positions'), description => t8('Positions for all Invoices'), keep => [ qw(parts_id) ], tables => [ trans_id => "ar.id", "ap.id" ] },
42 orderitems => { name => t8('OrderItems'), description => t8('Positions for all Orders'), keep => [ qw(parts_id) ], tables => [ trans_id => "oe.id" ] },
43 delivery_order_items => { name => t8('Delivery Order Items'), description => t8('Positions for all Delivery Orders'), keep => [ qw(parts_id) ], tables => [ delivery_order_id => "delivery_orders.id" ] },
44 acc_trans => { name => t8('Transactions'), description => t8('All general ledger entries'), keep => [ qw(chart_id) ], tables => [ trans_id => "ar.id", "ap.id", "oe.id", "delivery_orders.id", "gl.id" ] },
45 chart => { name => t8('Charts'), description => t8('Chart of Accounts'), tables => [ id => "acc_trans.chart_id" ] },
46 customer => { name => t8('Customers'), description => t8('Customer Master Data'), tables => [ id => "ar.customer_id", "ap.customer_id", "oe.customer_id", "delivery_orders.customer_id" ] },
47 vendor => { name => t8('Vendors'), description => t8('Vendor Master Data'), tables => [ id => "ar.vendor_id", "ap.vendor_id", "oe.vendor_id", "delivery_orders.vendor_id" ] },
48 parts => { name => t8('Parts'), description => t8('Parts, Services, and Assemblies'), tables => [ id => "invoice.parts_id", "orderitems.parts_id", "delivery_order_items.parts_id" ] },
51 # rows in this listing are tiers.
52 # tables may depend on ids in a tier above them
53 my @export_table_order = qw(
54 ar ap gl oe delivery_orders
55 invoice orderitems delivery_order_items
62 # needed because the standard dbh sets datestyle german and we don't want to mess with that
63 my $date_format = 'DD.MM.YYYY';
65 # callbacks that produce the xml spec for these column types
67 'Rose::DB::Object::Metadata::Column::Integer' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) },
68 'Rose::DB::Object::Metadata::Column::BigInt' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 0) }) },
69 'Rose::DB::Object::Metadata::Column::Text' => sub { $_[0]->tag('AlphaNumeric') },
70 'Rose::DB::Object::Metadata::Column::Varchar' => sub { $_[0]->tag('AlphaNumeric') },
71 'Rose::DB::Object::Metadata::Column::Character' => sub { $_[0]->tag('AlphaNumeric') },
72 'Rose::DB::Object::Metadata::Column::Numeric' => sub { $_[0]->tag('Numeric', sub { $_[0]->tag('Accuracy', 5) }) },
73 'Rose::DB::Object::Metadata::Column::Date' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) },
74 'Rose::DB::Object::Metadata::Column::Timestamp' => sub { $_[0]->tag('Date', sub { $_[0]->tag('Format', $date_format) }) },
75 'Rose::DB::Object::Metadata::Column::Float' => sub { $_[0]->tag('Numeric') },
76 'Rose::DB::Object::Metadata::Column::Boolean' => sub { $_[0]->tag('AlphaNumeric', sub { $_[0]
77 ->tag('Map', sub { $_[0]
79 ->tag('To', t8('true'))
81 ->tag('Map', sub { $_[0]
83 ->tag('To', t8('false'))
85 ->tag('Map', sub { $_[0]
87 ->tag('To', t8('false'))
96 $self->from && 'DateTime' eq ref $self->from or die 'need from date';
97 $self->to && 'DateTime' eq ref $self->to or die 'need to date';
98 $self->from <= $self->to or die 'from date must be earlier or equal than to date';
99 $self->tables && @{ $self->tables } or die 'need tables';
100 for (@{ $self->tables }) {
101 next if $known_tables{$_};
102 die "unknown table '$_'";
105 # get data from those tables and save to csv
106 # for that we need to build queries that fetch all the columns
107 for ($self->sorted_tables) {
108 $self->do_csv_export($_);
115 $self->files->{'gdpdu-01-08-2002.dtd'} = File::Spec->catfile('users', 'gdpdu-01-08-2002.dtd');
118 my ($fh, $zipfile) = File::Temp::tempfile();
119 my $zip = Archive::Zip->new;
121 while (my ($name, $file) = each %{ $self->files }) {
122 $zip->addFile($file, $name);
125 $zip->writeToFileHandle($fh) == Archive::Zip::AZ_OK() or die 'error writing zip file';
134 my ($fh, $filename) = File::Temp::tempfile();
135 binmode($fh, ':utf8');
137 $self->files->{'INDEX.XML'} = $filename;
138 push @{ $self->tempfiles }, $filename;
140 my $writer = XML::Writer->new(
145 $self->writer($writer);
146 $self->writer->xmlDecl('UTF-8');
147 $self->writer->doctype('DataSet', undef, "gdpdu-01-08-2002.dtd");
148 $self->tag('DataSet', sub { $self
149 ->tag('Version', '1.0')
150 ->tag('DataSupplier', sub { $self
151 ->tag('Name', $self->client_name)
152 ->tag('Location', $self->client_location)
153 ->tag('Comment', $self->make_comment)
155 ->tag('Media', sub { $self
156 ->tag('Name', t8('DataSet #1', 1));
157 for (@{ $self->tables }) { $self
166 my ($self, $table) = @_;
167 my $writer = $self->writer;
169 $self->tag('Table', sub { $self
170 ->tag('URL', "$table.csv")
171 ->tag('Name', $known_tables{$table}{name})
172 ->tag('Description', $known_tables{$table}{description})
173 ->tag('Validity', sub { $self
174 ->tag('Range', sub { $self
175 ->tag('From', $self->from->to_kivitendo(dateformat => 'dd.mm.yyyy'))
176 ->tag('To', $self->to->to_kivitendo(dateformat => 'dd.mm.yyyy'))
178 ->tag('Format', $date_format)
180 ->tag('DecimalSymbol', '.')
181 ->tag('DigitGroupingSymbol', '')
182 ->tag('VariableLength', sub { $self
184 ->foreign_keys($table)
191 my $package = SL::DB::Helper::Mappings::get_package_for_table($table);
193 # PrimaryKeys must come before regular columns, so partition first
194 partition_by { 1 * $_->is_primary_key_member } $package->meta->columns;
198 my ($self, $table) = @_;
200 my %cols_by_primary_key = _table_columns($table);
202 for my $column (@{ $cols_by_primary_key{1} }) {
203 my $type = $column_types{ ref $column };
205 die "unknown col type @{[ ref $column ]}" unless $type;
207 $self->tag('VariablePrimaryKey', sub { $self
208 ->tag('Name', $column->name);
213 for my $column (@{ $cols_by_primary_key{0} }) {
214 my $type = $column_types{ ref $column };
216 die "unknown col type @{[ ref $column]}" unless $type;
218 $self->tag('VariableColumn', sub { $self
219 ->tag('Name', $column->name);
228 my ($self, $table) = @_;
229 my $package = SL::DB::Helper::Mappings::get_package_for_table($table);
231 my %requested = map { $_ => 1 } @{ $self->tables };
233 for my $rel ($package->meta->foreign_keys) {
234 next unless $requested{ $rel->class->meta->table };
236 # ok, now extract the columns used as foreign key
237 my %key_columns = $rel->key_columns;
239 if (1 != keys %key_columns) {
240 die "multi keys? we don't support this currently. fix it please";
243 if ($table eq $rel->class->meta->table) {
244 # self referential foreign keys are a PITA to export correctly. skip!
248 $self->tag('ForeignKey', sub {
249 $_[0]->tag('Name', $_) for keys %key_columns;
250 $_[0]->tag('References', $rel->class->meta->table);
256 my ($self, $table) = @_;
258 my $csv = Text::CSV_XS->new({ binary => 1, eol => "\n", sep_char => ",", quote_char => '"' });
260 my ($fh, $filename) = File::Temp::tempfile();
261 binmode($fh, ':utf8');
263 $self->files->{"$table.csv"} = $filename;
264 push @{ $self->tempfiles }, $filename;
266 # in the right order (primary keys first)
267 my %cols_by_primary_key = _table_columns($table);
268 my @columns = (@{ $cols_by_primary_key{1} }, @{ $cols_by_primary_key{0} });
269 my %col_index = do { my $i = 0; map {; "$_" => $i++ } @columns };
271 # and normalize date stuff
272 my @select_tokens = map { (ref $_) =~ /Time/ ? $_->name . '::date' : $_->name } @columns;
276 if ($known_tables{$table}{transdate}) {
278 push @where_tokens, "$known_tables{$table}{transdate} >= ?";
279 push @values, $self->from;
282 push @where_tokens, "$known_tables{$table}{transdate} <= ?";
283 push @values, $self->to;
286 if ($known_tables{$table}{tables}) {
287 my ($col, @col_specs) = @{ $known_tables{$table}{tables} };
290 my ($ftable, $fkey) = split /\./, $_;
291 if (!exists $self->export_ids->{$ftable}{$fkey}) {
292 # check if we forgot to keep it
293 if (!grep { $_ eq $fkey } @{ $known_tables{$ftable}{keep} || [] }) {
294 die "unknown table spec '$_' for table $table, did you forget to keep $fkey in $ftable?"
296 # hmm, most likely just an empty set.
297 $self->export_ids->{$ftable}{$fkey} = {};
300 $ids{$_}++ for keys %{ $self->export_ids->{$ftable}{$fkey} };
303 push @where_tokens, "$col IN (@{[ join ',', ('?') x keys %ids ]})";
304 push @values, keys %ids;
306 push @where_tokens, '1=0';
310 my $where_clause = @where_tokens ? 'WHERE ' . join ' AND ', @where_tokens : '';
312 my $query = "SELECT " . join(', ', @select_tokens) . " FROM $table $where_clause";
314 my $sth = $::form->get_standard_dbh->prepare($query);
315 $sth->execute(@values) or die "error executing query $query: " . $sth->errstr;
317 while (my $row = $sth->fetch) {
318 for my $keep_col (@{ $known_tables{$table}{keep} || [] }) {
319 next if !$row->[$col_index{$keep_col}];
320 $self->export_ids->{$table}{$keep_col} ||= {};
321 $self->export_ids->{$table}{$keep_col}{$row->[$col_index{$keep_col}]}++;
323 $csv->print($fh, $row) or $csv->error_diag;
329 my ($self, $tag, $content) = @_;
331 $self->writer->startTag($tag);
332 if ('CODE' eq ref $content) {
335 $self->writer->characters($content);
337 $self->writer->endTag;
342 my $gdpdu_version = API_VERSION();
343 my $kivi_version = $::form->read_version;
344 my $person = $::myconfig{name};
345 my $contact = join ', ',
346 (t8("Email") . ": $::myconfig{email}" ) x!! $::myconfig{email},
347 (t8("Tel") . ": $::myconfig{tel}" ) x!! $::myconfig{tel},
348 (t8("Fax") . ": $::myconfig{fax}" ) x!! $::myconfig{fax};
350 t8('DataSet for GDPdU version #1. Created with kivitendo #2 by #3 (#4)',
351 $gdpdu_version, $kivi_version, $person, $contact
359 sub client_location {
366 my %given = map { $_ => 1 } @{ $self->tables };
368 grep { $given{$_} } @export_table_order;
372 my ($self, $yesno) = @_;
374 $self->tables(\@export_table_order) if $yesno;
377 sub init_files { +{} }
378 sub init_export_ids { +{} }
379 sub init_tempfiles { [] }
382 DateTime->new(year => 2002, month => 8, day => 14)->to_kivitendo;
386 unlink $_ for @{ $_[0]->tempfiles || [] };
397 SL::GDPDU - IDEA export generator
405 Create new export object. C<PARAMS> may contain:
411 The name of the company, needed for the supplier header
415 Location of the company, needed for the suupplier header
421 Will only include records in the specified date range. Data pulled from other
422 tables will be culled to match what is needed for these records.
426 A list of tables to be exported.
430 Alternative to C<tables>, enables all known tables.
434 =item C<generate_export>
436 Do the work. Will return an absolut path to a temp file where all export files
447 Date format is shit. The official docs state that only C<YY>, C<YYYY>, C<MM>,
448 and C<DD> are supported, timestamps do not exist.
452 Number pasing seems to be fragile. Official docs state that behaviour for too
453 low C<Accuracy> settings is undefined.
455 There is no dedicated integer type.
459 Currently C<ar> and C<ap> have a foreign key to themself with the name
460 C<storno_id>. If this foreign key is present in the C<INDEX.XML> then the
461 storno records have to be too. Since this is extremely awkward to code and
462 confusing for the examiner as to why there are records outside of the time
463 range, this export skips all self-referential foreign keys.
467 Documentation for foreign keys is extremely weird. Instead of giving column
468 maps it assumes that foreign keys map to the primary keys given for the target
469 table, and in that order. Foreign keys to keys that are not primary seems to be
470 impossible. Changing type is also not allowed (which actually makes sense).
471 Hopefully there are no bugs there.
475 It's currently disallowed to export the whole dataset. It's not clear if this
482 Sven Schöling E<lt>s.schoeling@linet-services.deE<gt>