[Lazarus] Writing >1000 TBufDataset records to file is extremely slow

Werner Pamler werner.pamler at freenet.de
Sun Mar 26 23:53:08 CEST 2017


Trying to extend the import/export example of fpspreadsheet from a dBase 
table to a TBufDataset I came across this issue with TBufDataset: While 
data are posted to the database as quickly as usual writing to file 
takes extremely long if there are more than a few thousand records.

Run the demo attached below. On my system, I measure these (non-linearly 
scaling) execution times for writing the TBufDataset table to file:

1000 records -- 0.9 seconds
2000 records -- 8.8 seconds
3000 records -- 31.1 seconds
etc.

Compared to that, writing of the same data to a dbf file is a wink of an 
eye. Is there anything which I am doing wrong? Or should I report a bug?

----------------------- snip -------------------------------

program project1;

{$mode objfpc}{$H+}

uses
   SysUtils, classes, db, bufdataset;

const
   // Parameters for generating database file contents
   NUM_LAST_NAMES = 8;
   NUM_FIRST_NAMES = 8;
   NUM_CITIES = 10;
   LAST_NAMES: array[0..NUM_LAST_NAMES-1] of string = (
     'Chaplin', 'Washington', 'Dylan', 'Springsteen', 'Brando',
     'Monroe', 'Dean', 'Lincoln');
   FIRST_NAMES: array[0..NUM_FIRST_NAMES-1] of string = (
     'Charley', 'George', 'Bob', 'Bruce', 'Marlon',
     'Marylin', 'James', 'Abraham');
   CITIES: array[0..NUM_CITIES-1] of string = (
     'New York', 'Los Angeles', 'San Francisco', 'Chicago', 'Miami',
     'New Orleans', 'Washington', 'Boston', 'Seattle', 'Las Vegas');

   TABLENAME = 'people'; //name for the database table, extension will 
be added
   DATADIR = 'data'; //subdirectory where database file is stored
   DB_EXT: array[0..1] of string = (
     '.dbf', '.db');

const
   NUM_RECORDS = 1000;
   SECONDS_PER_DAY = 24 * 60 * 60;

var
   FExportDataset: TBufDataset;

procedure CreateDatabase;
var
   i: Integer;
   startDate: TDate;
   maxAge: Integer = 100 * 365;
   fn: String;
   stream: TMemoryStream;
   t: TDateTime;
begin
   ForceDirectories(DATADIR);
   startDate := Date();

   fn := DATADIR + DirectorySeparator + TABLENAME + DB_EXT[1];
   DeleteFile(fn);

   FExportDataset := TBufDataset.Create(nil);
//  FExportDataset.Filename := fn;

   FExportDataset.FieldDefs.Add('Last name', ftString, 15);
   FExportDataset.FieldDefs.Add('First name', ftString, 10);
   FExportDataset.FieldDefs.Add('City', ftString, 15);
   FExportDataset.FieldDefs.Add('Birthday', ftDate);
   FExportDataset.FieldDefs.Add('Salary', ftCurrency);
   FExportDataset.FieldDefs.Add('Work begin', ftDateTime);
   FExportDataset.FieldDefs.Add('Work end', ftDateTime);
   FExportDataset.FieldDefs.Add('Size', ftFloat);
   FExportDataset.CreateDataset;

   FExportDataset.Open;

   // Random data
   for i:=1 to NUM_RECORDS do begin
     if (i mod 100 = 0) then
       WriteLn(Format('Adding record %d...', [i]));
     FExportDataset.Insert;
     FExportDataset.FieldByName('Last name').AsString := 
LAST_NAMES[Random(NUM_LAST_NAMES)];
     FExportDataset.FieldByName('First name').AsString := 
FIRST_NAMES[Random(NUM_FIRST_NAMES)];
     FExportDataset.FieldByName('City').AsString := 
CITIES[Random(NUM_CITIES)];
     FExportDataset.FieldByName('Birthday').AsDateTime := startDate - 
random(maxAge);
     FExportDataset.FieldByName('Salary').AsFloat := 1000+Random(9000);
     FExportDataset.FieldByName('Size').AsFloat := (160 + Random(50)) / 100;
     FExportDataSet.FieldByName('Work begin').AsDateTime := 
40000+EncodeTime(6+Random(4), Random(60), Random(60), 0);
     FExportDataSet.FieldByName('Work end').AsDateTime := 
EncodeTime(15+Random(4), Random(60), Random(60), 0);
     FExportDataset.Post;
   end;

   WriteLn('Saving...');
   t := now;
   stream := TMemoryStream.Create;
   try
     FExportDataset.SaveToStream(stream);
     stream.Position := 0;
     WriteLn('Written to memory stream: ', FormatFloat('0.000 s', (now - 
t) * SECONDS_PER_DAY));
     stream.SaveToFile(fn);
   finally
     stream.Free;
   end;
   Writeln('Done. Total time needed for saving: ', FormatFloat('0.000 
s', (now - t) * SECONDS_PER_DAY));

   FExportDataset.Close;

   WriteLn(Format('Created file "%s" in folder "data".', [
     ExtractFileName(fn), ExtractFileDir(fn)
   ]));
   FExportDataset.Free;
end;

begin
   CreateDatabase;

   WriteLn;
   WriteLn('Press ENTER to close.');
   ReadLn;
end.



More information about the Lazarus mailing list