aboutsummaryrefslogtreecommitdiffstats
path: root/Source/TableDump.m
diff options
context:
space:
mode:
authorrowanbeentje <rowan@beent.je>2009-10-15 13:36:41 +0000
committerrowanbeentje <rowan@beent.je>2009-10-15 13:36:41 +0000
commit778c3556b551077d488af6378108db0630775953 (patch)
treed256b6ef298d480cbe220c9d463284610131439d /Source/TableDump.m
parenta1f940c4592776c23d335d76232441aad3157eec (diff)
downloadsequelpro-778c3556b551077d488af6378108db0630775953.tar.gz
sequelpro-778c3556b551077d488af6378108db0630775953.tar.bz2
sequelpro-778c3556b551077d488af6378108db0630775953.zip
- Fix a problem preventing the end of large CSV files from being imported
Diffstat (limited to 'Source/TableDump.m')
-rw-r--r--Source/TableDump.m2
1 files changed, 1 insertions, 1 deletions
diff --git a/Source/TableDump.m b/Source/TableDump.m
index 9b75c757..d5e6d563 100644
--- a/Source/TableDump.m
+++ b/Source/TableDump.m
@@ -878,7 +878,7 @@
// Extract and process any full CSV rows found so far. Also trigger processing if all
// rows have been read, in order to ensure short files are still processed.
- while ((csvRowArray = [csvParser getRowAsArrayAndTrimString:YES stringIsComplete:allDataRead]) || (allDataRead && !fieldMappingArray)) {
+ while ((csvRowArray = [csvParser getRowAsArrayAndTrimString:YES stringIsComplete:allDataRead]) || (allDataRead && [parsedRows count])) {
// If valid, add the row array and length to local storage
if (csvRowArray) {