Leave ready the DB with a set of 10K documents with a length arround of 1K
from pymongo import MongoClient
from bson.objectid import ObjectId
client = MongoClient('localhost', 27017)
db = client['test']
collection = db['test']
collection.remove()
letters = 'abcdefghij'
doc = dict([(k,dict([(k,1) for k in letters])) for k in letters])
doc['_id'] = None
for i in xrange(0, 10000):
id_ = ObjectId(b'foo-bar-%04d' % i)
doc['_id'] = id_
collection.insert(doc)
Thent try to update a couple of fields of randomize ids
from pymongo import MongoClient
from bson.objectid import ObjectId
import random
client = MongoClient('localhost', 27017)
db = client['test']
collection = db['test']
for i in xrange(0, 10000):
id_ = ObjectId(b'foo-bar-%04d' % random.randint(0, 9999))
doc = {
'b.a': i,
'f.c': i*33
}
collection.update({'_id': id_}, {'$set': doc})
It takes less than 1.5 seconds, it means a througput bigger than 7.5K updates per second.
Is not a bad choose use MongoDB for massive update pattern