There really is no other way to do this other than looping the results as it is presently not possible to refer to any existing fields of a document during an update operation.
So your basic construct needs to look something like ( in whatever language ):
db.collection.find({}).forEach(function(doc) {
db.collection.update(
{ "_id": doc._id },
{
"title": doc.title,
"energy": {
"electricity": doc.energyElectricty,
"heat": doc.energyHeat,
"cooling": doc.energyCooling
}
}
);
});
You could do this a little more efficiently with "bulk updates" as available from MongoDB 2.6 and upwards:
var batch = [];
var count = 0;
db.collection.find({}).forEach(function(doc) {
batch.push({
"q": { "_id": doc._id },
"u": {
"title": doc.title,
"energy": {
"electricity": doc.energyElectricty,
"heat": doc.energyHeat,
"cooling": doc.energyCooling
}
}
});
count++;
if ( count % 500 == 0 ) {
db.runCommand({ "update": "collection", "updates": batch });
batch = [];
}
});
if ( batch.length > 0 ) {
db.runCommand({ "update": "collection", "updates": batch });
}
So while all updates are still being done over the wire, this does actually only send over the wire once per 500 ( or how many your feel comfortably sits under the 16MB BSON limit ) items.
Of course though, since you mention this came from a CSV import, you can always re-shape your input and import the collection again if that turns out to be a reasonable option.