Mongo convert embedded document to array

前端 未结 3 1746
遥遥无期
遥遥无期 2020-12-18 10:50

Is there a way to convert a nested document structure into an array? Below is an example:

Input

\"experience\" : {
        \"0\" : {         


        
3条回答
  •  抹茶落季
    2020-12-18 11:11

    You still need to iterate over the content, but instead you should be writing back using bulk operations:

    Either for MongoDB 2.6 and greater:

    var bulk = db.collection.initializeUnorderedBulkOp(),
        count = 0;
    
    db.collection.find({ 
       "$where": "return !Array.isArray(this.experience)"
    }).forEach(function(doc) {
        bulk.find({ "_id": doc._id }).updateOne({
            "$set": { "experience": [doc.experience["0"]] }
        });
        count++;
    
        // Write once in 1000 entries
        if ( count % 1000 == 0 ) {
            bulk.execute();    
            bulk = db.collection.initializeUnorderedBulkOp();
        }
    })
    
    // Write the remaining
    if ( count % 1000 != 0 )
        bulk.execute();
    

    Or in modern releases of MongoDB 3.2 and greater, the bulkWrite() method is preferred:

    var ops = [];
    
    db.collection.find({ 
       "$where": "return !Array.isArray(this.experience)"
    }).forEach(function(doc) {
       ops.push({
           "updateOne": {
               "filter": { "_id": doc._id },
               "update": { "$set": { "experience": [doc.experience["0"]] } }
           }
       });
    
       if ( ops.length == 1000 ) {
           db.collection.bulkWrite(ops,{ "ordered": false })
           ops = [];
       }
    })
    
    if ( ops.length > 0 )
        db.collection.bulkWrite(ops,{ "ordered": false });
    

    So when writing back to the database over a cursor, then bulk write operations with "unordered" set is the way to go. It's only one write/response per batch of 1000 requests, which reduces a lot of overhead, and "unordered" means that writes can happen in parallel rather than in a serial order. It all makes it faster.

提交回复
热议问题