Subject | Performing multiple INSERT statements causes long delay at every Nth statement |
---|---|
Author | Pierre van Wyngaard |
Post date | 2012-05-08T15:36:56Z |
I am using Firebird 1.52, Delphi 2009 on a Windows Vista Business OS. The
Delphi tool I use for database access is Interbase Express 12.12.
I have a database kc4net.fdb from which I want to backup data to another
database pbackup.fdb.
I read all records from each table in kc4net and produce an INSERT string
which gets executed and only committed at the end of every table. All goes
well except for one table AAPLU. The records get copied fine, but every
200th or so record there is a delay of something like a minute before it
finishes the execute statement. Then it will very quickly copy another 200+
records and get stuck again for a minute or so.
I have spent a lot of time trying to find out what the conditions are when
this happens. The pbackup database starts off as a brand new, clean
database. I have also tried making a copy of the kc4net database, renaming
it to pbackup and creating the relevant tables (in case there is something
different in the properties of the two databases). It has the same problem.
I tried changing the page size from 4096 to 8192 - with no luck.
In another part of the program I import data from old Paradox tables into
kc4net, and one of the tables that get populated this way is AAPLU. There is
no delay when I run this import.
What I have now done is to read all the records from kc4net's AAPLU table
into an array, and only then write all the records from the array to
pbackup. That works - no delay. So it seems that the fact that I read each
record from one database and then write to another database before reading
the next record, causes the delay.
I have spent a couple of days now on this problem and am at my wit's end.
I'll appreciate any suggestions to help me find why this is happening and
what I can do about it. The array solution, though it works, seems very
inelegant.
Thanks.
Pierre
Below is the code that runs the backup. I show first the version that causes
the delay and then the version using an array which has no delay (IBQuery1
is linked to kc4net and IBSQL3 is linked to pbackup):
Var
FldCnt: integer;
FldNam: array[0..99] of string;
FldTp: array[0..99] of integer;
procedure GetTblFlds(tbl: string);
var
Tp: integer;
begin
with DMAq do begin
with IBQuery1 do begin
SQL.Text:='select r.RDB$FIELD_NAME AS field_name,'
+' f.RDB$FIELD_TYPE AS field_type'
+' FROM RDB$RELATION_FIELDS r'
+' LEFT JOIN RDB$FIELDS f ON r.RDB$FIELD_SOURCE = f.RDB$FIELD_NAME'
+' WHERE r.RDB$RELATION_NAME='''+UpperCase(tbl)+''''
+' ORDER BY r.RDB$FIELD_POSITION';
Transaction.StartTransaction;
Open;
FldCnt:=0;
while not eof do begin
Inc(FldCnt);
if FldCnt>100 then begin
Close;
Transaction.Commit;
MessageDlg('More than 100 fields in table '+tbl+'. Cannot complete
this operation.',
mtError, [mbOK], 0);
exit;
end;
FldNam[FldCnt-1]:=trim(fieldbyname('field_name').AsString);
Tp:=fieldbyname('field_type').AsInteger;
case Tp of
7: FldTp[FldCnt-1]:=1; {smallint}
8: FldTp[FldCnt-1]:=2; {integer}
27: FldTp[FldCnt-1]:=3; {double precision}
35: FldTp[FldCnt-1]:=4; {timestamp}
14: FldTp[FldCnt-1]:=5; {char}
end;
next;
end;
Close;
Transaction.Commit;
end;
end;
end;
procedure PBupData(tbl: string);
var
N: integer;
s: string;
begin
GetTblFlds(tbl);
with DMAq do begin
IBSQL3.Transaction.StartTransaction;
with IBQuery1 do begin
SQL.Text:='select * from '+tbl;
Transaction.StartTransaction;
Open;
while not eof do begin
s:='insert into '+tbl+' values (';
for N:=0 to FldCnt-1 do begin
case FldTp[N] of
1: s:=s+','+fields[N].AsString;
2: s:=s+','+fields[N].AsString;
3: s:=s+','+FloatToStr(fields[N].AsFloat);
4: s:=s+','''+formatdatetime('mm/dd/yyyy hh:nn:ss',
fields[N].AsDateTime)+'''';
5: s:=s+','''+trim(fields[N].AsString)+'''';
end;
end;
IBSQL3.SQL.Text:=s+')';
IBSQL3.ExecQuery;
next;
end;
Close;
Transaction.Commit;
IBSQL3.Transaction.Commit;
end;
end;
end;
PBupData('AAPLU'); //This is called for every table in kc4net
Here is the modified PBupData procedure that works without delay when
reading first into an array:
procedure PBupData(tbl: string);
var
N, cnt: integer;
s: string;
arr: array[1..2000] of string;
begin
GetTblFlds(tbl);
with DMAq do begin
with IBQuery1 do begin
SQL.Text:='select * from '+tbl;
Transaction.StartTransaction;
Open;
cnt:=0;
while not eof do begin
s:='insert into '+tbl+' values (';
for N:=0 to FldCnt-1 do begin
case FldTp[N] of
1: s:=s+','+fields[N].AsString;
2: s:=s+','+fields[N].AsString;
3: s:=s+','+FloatToStr(fields[N].AsFloat);
4: s:=s+','''+formatdatetime('mm/dd/yyyy hh:nn:ss',
fields[N].AsDateTime)+'''';
5: s:=s+','''+trim(fields[N].AsString)+'''';
end;
end;
Inc(cnt);
arr[cnt]:=s+')';
next;
end;
Close;
Transaction.Commit;
IBSQL3.Transaction.StartTransaction;
for N:=1 to cnt do begin
IBSQL3.SQL.Text:=arr[N];
IBSQL3.ExecQuery;
end;
IBSQL3.Transaction.Commit;
end;
end;
end;
[Non-text portions of this message have been removed]
Delphi tool I use for database access is Interbase Express 12.12.
I have a database kc4net.fdb from which I want to backup data to another
database pbackup.fdb.
I read all records from each table in kc4net and produce an INSERT string
which gets executed and only committed at the end of every table. All goes
well except for one table AAPLU. The records get copied fine, but every
200th or so record there is a delay of something like a minute before it
finishes the execute statement. Then it will very quickly copy another 200+
records and get stuck again for a minute or so.
I have spent a lot of time trying to find out what the conditions are when
this happens. The pbackup database starts off as a brand new, clean
database. I have also tried making a copy of the kc4net database, renaming
it to pbackup and creating the relevant tables (in case there is something
different in the properties of the two databases). It has the same problem.
I tried changing the page size from 4096 to 8192 - with no luck.
In another part of the program I import data from old Paradox tables into
kc4net, and one of the tables that get populated this way is AAPLU. There is
no delay when I run this import.
What I have now done is to read all the records from kc4net's AAPLU table
into an array, and only then write all the records from the array to
pbackup. That works - no delay. So it seems that the fact that I read each
record from one database and then write to another database before reading
the next record, causes the delay.
I have spent a couple of days now on this problem and am at my wit's end.
I'll appreciate any suggestions to help me find why this is happening and
what I can do about it. The array solution, though it works, seems very
inelegant.
Thanks.
Pierre
Below is the code that runs the backup. I show first the version that causes
the delay and then the version using an array which has no delay (IBQuery1
is linked to kc4net and IBSQL3 is linked to pbackup):
Var
FldCnt: integer;
FldNam: array[0..99] of string;
FldTp: array[0..99] of integer;
procedure GetTblFlds(tbl: string);
var
Tp: integer;
begin
with DMAq do begin
with IBQuery1 do begin
SQL.Text:='select r.RDB$FIELD_NAME AS field_name,'
+' f.RDB$FIELD_TYPE AS field_type'
+' FROM RDB$RELATION_FIELDS r'
+' LEFT JOIN RDB$FIELDS f ON r.RDB$FIELD_SOURCE = f.RDB$FIELD_NAME'
+' WHERE r.RDB$RELATION_NAME='''+UpperCase(tbl)+''''
+' ORDER BY r.RDB$FIELD_POSITION';
Transaction.StartTransaction;
Open;
FldCnt:=0;
while not eof do begin
Inc(FldCnt);
if FldCnt>100 then begin
Close;
Transaction.Commit;
MessageDlg('More than 100 fields in table '+tbl+'. Cannot complete
this operation.',
mtError, [mbOK], 0);
exit;
end;
FldNam[FldCnt-1]:=trim(fieldbyname('field_name').AsString);
Tp:=fieldbyname('field_type').AsInteger;
case Tp of
7: FldTp[FldCnt-1]:=1; {smallint}
8: FldTp[FldCnt-1]:=2; {integer}
27: FldTp[FldCnt-1]:=3; {double precision}
35: FldTp[FldCnt-1]:=4; {timestamp}
14: FldTp[FldCnt-1]:=5; {char}
end;
next;
end;
Close;
Transaction.Commit;
end;
end;
end;
procedure PBupData(tbl: string);
var
N: integer;
s: string;
begin
GetTblFlds(tbl);
with DMAq do begin
IBSQL3.Transaction.StartTransaction;
with IBQuery1 do begin
SQL.Text:='select * from '+tbl;
Transaction.StartTransaction;
Open;
while not eof do begin
s:='insert into '+tbl+' values (';
for N:=0 to FldCnt-1 do begin
case FldTp[N] of
1: s:=s+','+fields[N].AsString;
2: s:=s+','+fields[N].AsString;
3: s:=s+','+FloatToStr(fields[N].AsFloat);
4: s:=s+','''+formatdatetime('mm/dd/yyyy hh:nn:ss',
fields[N].AsDateTime)+'''';
5: s:=s+','''+trim(fields[N].AsString)+'''';
end;
end;
IBSQL3.SQL.Text:=s+')';
IBSQL3.ExecQuery;
next;
end;
Close;
Transaction.Commit;
IBSQL3.Transaction.Commit;
end;
end;
end;
PBupData('AAPLU'); //This is called for every table in kc4net
Here is the modified PBupData procedure that works without delay when
reading first into an array:
procedure PBupData(tbl: string);
var
N, cnt: integer;
s: string;
arr: array[1..2000] of string;
begin
GetTblFlds(tbl);
with DMAq do begin
with IBQuery1 do begin
SQL.Text:='select * from '+tbl;
Transaction.StartTransaction;
Open;
cnt:=0;
while not eof do begin
s:='insert into '+tbl+' values (';
for N:=0 to FldCnt-1 do begin
case FldTp[N] of
1: s:=s+','+fields[N].AsString;
2: s:=s+','+fields[N].AsString;
3: s:=s+','+FloatToStr(fields[N].AsFloat);
4: s:=s+','''+formatdatetime('mm/dd/yyyy hh:nn:ss',
fields[N].AsDateTime)+'''';
5: s:=s+','''+trim(fields[N].AsString)+'''';
end;
end;
Inc(cnt);
arr[cnt]:=s+')';
next;
end;
Close;
Transaction.Commit;
IBSQL3.Transaction.StartTransaction;
for N:=1 to cnt do begin
IBSQL3.SQL.Text:=arr[N];
IBSQL3.ExecQuery;
end;
IBSQL3.Transaction.Commit;
end;
end;
end;
[Non-text portions of this message have been removed]