■ jwFreeNote_03
Help! blob field takes too much memory...From: Theo Buys (development_at_noiwal.nl) Date: 08/10/04
Date: Tue, 10 Aug 2004 12:10:15 +0200When I read Blobs from a access database with ADO (using ADO classes from Carlos Antollini) in VC++ then it takes a lot of memory. When reading records without selecting the blob field, my test program is using about 8652Kb. When reading records with selecting the blob field, my test program is then using about 21600Kb! The blob data is not larger then 22Kb. What is going on? Anybody who knows? class CWordsRecordset : public CADORecordset { public: void Fetch(); CString m_strWord; CByteArray m_baSpeech; // ... more members }; void CWordsRecordset::Fetch() { if (IsBof() || IsEof()) return; GetFieldValue(_T("word"), m_strWord); CADOFieldInfo Fi; GetFieldInfo(_T("speech"), &Fi); m_baSpeech.SetSize(Fi.m_lSize); GetChunk(_T("speech"), (LPVOID) m_baSpeech.GetData()); } BOOL CADORecordset::GetChunk(LPCTSTR lpFieldName, LPVOID lpData) { FieldPtr pField = m_pRecordset->Fields->GetItem(lpFieldName); return GetChunk(pField, lpData); } BOOL CADORecordset::GetChunk(FieldPtr pField, LPVOID lpData) { long lngSize, lngOffSet = 0; _variant_t varChunk; UCHAR chData; HRESULT hr; long lBytesCopied = 0; lngSize = pField->ActualSize; while(lngOffSet < lngSize) { try { varChunk = pField->GetChunk(ChunkSize); //Copy the data only upto the Actual Size of Field. for(long lIndex = 0; lIndex <= (ChunkSize - 1); lIndex++) { hr= SafeArrayGetElement(varChunk.parray, &lIndex, &chData); if(SUCCEEDED(hr)) { ((UCHAR*)lpData)[lBytesCopied] = chData; lBytesCopied++; } else break; } lngOffSet += ChunkSize; } catch(_com_error &e) { dump_com_error(e); return FALSE; } } lngOffSet = 0; return TRUE; }
|
댓글
댓글 쓰기