Yes, all the inserts are done via ExecNonQueryBatch.
To avoid OOM errors I added some code to the Sub SetData in the B4XTable class:
Public Sub SetData(Data As List) As ResumableSub
Dim n As Int
Dim rs As ResumableSub = SetNewMemoryDB
Wait For (rs) Complete (bResult As Boolean)
CreateTable
Dim sb As StringBuilder
sb.Initialize
sb.Append("INSERT INTO data VALUES (")
For Each Column As B4XTableColumn In Columns
If Column.ColumnType = COLUMN_TYPE_VOID Then Continue
sb.Append("?,")
Next
sb.Remove(sb.Length - 1, sb.Length) 'take the last comma off
sb.Append(")") 'add the final bracket
If Data.Size > 0 Then
If Data.Get(0) Is List Then
For Each lrow As Object In Data
n = n + 1
sql1.AddNonQueryToBatch(sb.ToString, lrow) 'lrow will be a list
If n = 100000 Then 'to avoid an OOM error!
n = 0
Dim SenderFilter As Object = sql1.ExecNonQueryBatch("SQL1")
Wait For (SenderFilter) SQL1_NonQueryComplete (Success As Boolean)
End If
Next
Else
For Each row() As Object In Data
n = n + 1
sql1.AddNonQueryToBatch(sb.ToString, row) 'row will be a 1D string array
If n = 100000 Then 'to avoid an OOM error!
n = 0
Dim SenderFilter As Object = sql1.ExecNonQueryBatch("SQL1")
Wait For (SenderFilter) SQL1_NonQueryComplete (Success As Boolean)
End If
Next
End If
End If
'Log("SetData, sb.ToString: " & sb.ToString)
Dim SenderFilter As Object = sql1.ExecNonQueryBatch("SQL1")
' Dim n As Long = DateTime.Now
Wait For (SenderFilter) SQL1_NonQueryComplete (Success As Boolean)
' Log($"Set data time: ${DateTime.Now - n}ms"$)
CountAll = Data.Size
mCurrentCount = CountAll
miCountOfPages = Ceil(mCurrentCount / mRowsPerPage)
RefreshNow
Return True
End Sub
I also added some code to limit the size of the list passed to SetData, eg by reading a large file in chunks.
This is the code that get the file parts to read:
Sub GetTextFileParts(strFolder As String, strFile As String, btEndOfLineByte As Byte) As Long()
Dim i As Int
Dim n As Int
Dim iParts As Int
Dim iBytesInPart As Int
Dim iBytes As Int
Dim lPosition As Long
Dim arrPartEnds() As Long
RAF.Initialize(strFolder, strFile, True)
If RAF.Size <= iMaxTextFileSize Then
Return arrPartEnds
End If
iParts = (RAF.Size / iMaxTextFileSize) + 1
iBytesInPart = RAF.Size / iParts
Dim arrPartEnds(iParts) As Long
For i = 0 To iParts - 2
arrPartEnds(i) = iBytesInPart * (i + 1)
Next
'index of last byte in the file
arrPartEnds(iParts - 1) = RAF.Size - 1
For i = 0 To iParts - 2 'note last element of arrPartEnds remains the same
Dim arrBytes(iBytesToCheckForLinebreaks) As Byte
lPosition = arrPartEnds(i) - iBytesToCheckForLinebreaks / 2
iBytes = RAF.ReadBytes(arrBytes, 0, iBytesToCheckForLinebreaks, lPosition)
For n = 0 To iBytes - 1
If arrBytes(n) = btEndOfLineByte Then
arrPartEnds(i) = lPosition + n
Exit
End If
Next
Next
RAF.Close
Return arrPartEnds
End Sub
All this works well and I can show a 625 Mb file via B4XTable.
There are situation though where it seems setting the B4XTable data via SetData with a passed list is inefficient and it could be done better with direct data
transfer via an attached DB and I am still interested to see if this can be done.
I understand that an in-memory DB can't be attached to another in-memory DB, so I have to do this via the regular file based application SQLite DB.
RBS