Export to JSON, BSON, CSV fails
Solved
seems as a problem with cursor instance - "cursor.size is not a function"
error:
{ "message" : "cursor.size is not a function", "stack" : "script:102:25" }
auto generated script:
import * as path from "path"; import * as fs from "fs"; const promisify = require("bluebird").promisify; const appendFileAsync = promisify(fs.appendFile); const BATCH_SIZE = 2000; let filepath = "/Users/danielgivoni/Downloads/farmer_production.users.csv"; let targetPath = path.dirname(filepath); if (!fs.existsSync(targetPath)) require("mkdirp").sync(targetPath); if (fs.existsSync(filepath)) fs.unlinkSync(filepath); const tab = "campain_email_farmers_query.js"; const queryScript = `db.getCollection("users").aggregate( [ { "$lookup" : { "from" : "farmers", "localField" : "FarmerIds", "foreignField" : "uniqueId", "as" : "farms" } }, { "$match" : { "$and" : [ { "farms" : { "$ne" : [ { } ] } }, { "farms.name" : { "$exists" : true } }, { "farms.stores.emails" : { "$ne" : [ ] } }, { "farms.valid" : { "$ne" : false } } ] } }, { "$unwind" : "$identityVerifer" }, { "$unwind" : "$farms" }, { "$unwind" : "$farms.stores" }, { "$unwind" : "$farms.stores.emails" }, { "$project" : { "telephone" : "$identityVerifer", "locale" : 1, "farm_name" : "$farms.name", "email" : "$farms.stores.emails", "_id" : 0, "currency" : "$farms.stores.currencyCode" } } ], { "$clusterTime" : { "clusterTime" : Timestamp(1569397676, 1), "signature" : { "hash" : BinData(0,"5fFgdo90tggglO5WlKOPjOQ4Ey8="), "keyId" : NumberLong("6726444850310283265") } } })`; let skip = 0; let limit = Number.MAX_SAFE_INTEGER; let connection = "d<..........>-a0.mlab.com:3<........>"; let db = "farmer_production"; let fields = []; let isFirstRead = true; const cursor = mb.getCursorFromQueryScript({ connection, db, queryScript, skip, limit }); const totalCount = cursor.size(); let processedCount = 0; await(mb.batchReadFromCursor(cursor, BATCH_SIZE, (docs) => { return async(() => { let readLength = docs.length; if (!readLength) { return; } if (_.isEmpty(fields)) { fields = mb.tryGetFields(_.sampleSize(docs, 5)); } let csvContent = mb.docsToCSV({ docs, fields, delimiter: ",", withColumnTitle: isFirstRead }); isFirstRead = false; await(appendFileAsync(filepath, csvContent)); processedCount += readLength; const percent = (processedCount / totalCount * 100).toFixed(1); console.log(`${percent}% ${processedCount}/${totalCount} docs exported to "${path.basename(filepath)}".`); sleep(10) })(); })); _.delay(() => mb.openFolder(targetPath), 1000); sleep(100); console.log(`\nexport ${processedCount} docs from ${tab} to ${filepath} finished.`);
other exports fail as well:
#script:62:9 +0.953s
export 0 docs from farmer_production:farmers@<........>b.com:<....>6 to /Users/danielgivoni/Downloads/farmer_production.farmers111.json finished.
other exports fail as well:
#script:62:9 +0.953s
export 0 docs from farmer_production:farmers@<........>b.com:<....>6 to /Users/danielgivoni/Downloads/farmer_production.farmers111.json finished.
on version 5.1.2 works fine
on version 5.1.2 works fine
I am experiencing the same issue on 5.2.1
I am experiencing the same issue on 5.2.1
I am experiencing the same issue on 5.2.1
I am experiencing the same issue on 5.2.1
We have worked out a new test build to resolve the export issue, please download and give it a try.
Windows: https://nosqlbooster.com/s3/download/releasesv5/nosqlbooster4mongo-5.2.3-beta.1.exe
macOS: https://nosqlbooster.com/s3/download/releasesv5/nosqlbooster4mongo-5.2.3-beta.1.dmg
Linux: https://nosqlbooster.com/s3/download/releasesv5/nosqlbooster4mongo-5.2.3-beta.1.AppImage
Thank you for your patience.
We have worked out a new test build to resolve the export issue, please download and give it a try.
Windows: https://nosqlbooster.com/s3/download/releasesv5/nosqlbooster4mongo-5.2.3-beta.1.exe
macOS: https://nosqlbooster.com/s3/download/releasesv5/nosqlbooster4mongo-5.2.3-beta.1.dmg
Linux: https://nosqlbooster.com/s3/download/releasesv5/nosqlbooster4mongo-5.2.3-beta.1.AppImage
Thank you for your patience.
Resolved in 5.2.3
Resolved in 5.2.3
Replies have been locked on this page!