How can I remove duplicate elements (complex object) from an array

In every document

records

- an array containing many duplicate objects.

and it buy_items

also contains many duplicate items.

How do I clean up duplicated items?

Original documents:

{
  "_id": "0005d116qwwewdq82a1b84f148fa6027d429f3e",
  "records": [
    {
      "DATE": new Date("1996-02-08T08:00:00+0800"),
      "buy_items": [
        "5210 ",
        "5210 ",
        "5210 "
      ]
    },
    {
      "DATE": new Date("1996-02-08T08:00:00+0800"),
      "buy_items": [
        "5210 ",
        "5210 ",
        "5210 "
      ]
    }
    {
      "DATE": new Date("2012-12-08T08:00:00+0800"),
      "buy_items": [
        "5210 ",
        "1234 ",
        " "
      ]
    }        
    ]
}

      

Expected Result:

{
  "_id": "0005d116qwwewdq82a1b84f148fa6027d429f3e",
  "records": [
    {
      "DATE": new Date("1996-02-08T08:00:00+0800"),
      "buy_items": [
        "5210 "
      ]
    },
    {
      "DATE": new Date("2012-12-08T08:00:00+0800"),
      "buy_items": [
        "5210 ",
        "1234 ",
        " "
      ]
    }    
    ]
}

      

With Michaels' solution, the result might look like this:

{
  "_id": "0005d116qwwewdq82a1b84f148fa6027d429f3e",
  "records": [
    "date": new Date("1996-02-08T08:00:00+0800"),
      "buy_items": [
        "5210 "
        "1234 ",
        " "
      ]
    ]
}

      

+3


source to share


3 answers


You can remove duplicate objects using the aggregation framework

db.collection.aggregate(
    [
        { $unwind: "$records" }, 
        { $unwind: "$records.buy_items" }, 
        { $group: { "_id": {id: "$_id", date: "$records.DATE" }, buy_items: { $addToSet: "$records.buy_items" }}}, 
        { $group: {"_id": "$_id.id", records: { $push: {"date": "$_id.date", "buy_items": "$buy_items" }}}}, { $sort: { "records.0.date": 1 }} ,
        { $out: "collection" }
    ]
)

      

The operator allows you to write the aggregation result to a specified collection or Replace an existing collection. $out




Better yet, use the "Bulk" operation

var bulk = bulk = db.collection.initializeOrderedBulkOp(),
    count = 0;

db.collection.aggregate([
    { "$unwind": "$records" }, 
    { "$project": { 
        "date": "$records.DATE", 
        "buy_items": { "$setIntersection": "$records.buy_items" }
    }}, 
    { "$unwind": "$buy_items" }, 
    { "$group": { 
        "_id": { "id": "$_id", "date": "$date" }, 
        "buy_items": { "$addToSet": "$buy_items" }
    }},
    { "$group": { 
        "_id": "$_id.id", 
        "records": { "$push": { 
            "date": "$_id.date", 
            "buy_items": "$buy_items" 
        }}
    }}
]).forEach(function(doc) { 
       bulk.find({"_id": doc._id}).updateOne({
       "$set": { "records": doc.records }
       }); 
       count++; 
       if (count % 500 == 0) {   
           bulk.execute();    
           bulk = db.collection.initializeOrderedBulkOp(); 
       } 
})

if (count % 500 != 0)
    bulk.execute();

      

Result:

{
    "_id" : "0005d116qwwewdq82a1b84f148fa6027d429f3e",
    "records" : [
            {
                    "date" : ISODate("2012-12-08T00:00:00Z"),
                    "buy_items" : [
                            " ",
                            "1234 ",
                            "5210 "
                    ]
            },
            {
                    "date" : ISODate("1996-02-08T00:00:00Z"),
                    "buy_items" : [
                            "5210 "
                    ]
            }
    ]
}

      

+3


source


If you want to update your current collections without creating a new collection or dropping the previous collection. I tried this, but in doing so you have to run two different update commands.

First update records

using the distinct

following:

db.collectionName.update({},{"$set":{"records":db.collectionName.distinct('records')}})

      



and a second update for buy_items

using distinct

as follows:

db.collectionName.update({},{"$set":{"records.0.buy_items":db.collectionName.distinct('records.buy_items')}})

      

If you want to avoid two update requests, follow Michael's answers .

+1


source


You can try using the method to iterate over all document properties, check for uniqueness and filter different values ​​like this: forEach()

find()

db.collection.find().forEach(function(doc){
    var records = [], seen = {};
    doc.records.forEach(function (item){
         var uniqueBuyItems = item["buy_items"].filter(function(i, pos) {
            return item["buy_items"].indexOf(i) == pos;
         });
         item["buy_items"] = uniqueBuyItems;
         if (JSON.stringify(item["buy_items"]) !== JSON.stringify(seen["buy_items"])) {
            records.push(item);
            seen["buy_items"] = item["buy_items"];
         }         
    }); 
    doc.records = records;
    db.collection.save(doc);
})

      

+1


source







All Articles