I have a very large file that has a ton of JSON Strings(over 100K), with one string on each line.
I want to read each line, insert it into a database, and after item has been inserted, I want to update another document in another database with basic information from the initial insert. And since I am a nodejs newb, I am having trouble wrapping my head around what I am doing wrong. Here is what I have so far.
var lineReader - require("line-reader");
lineReader.eachLine(filePath, function(line, last){
    if(count == 1){
        asyncAdd(JSON.parse(line));
    }
})}
var counter = 0;
function asyncAdd(jsonString){
async.waterfall([
        //this function calls the inserter
    function(callback){
        counter++;
        addJson(jsonString, function(doc){
            callback(null, doc);
            console.log("Added " + counter);
        })
    },
    //This function calls the indexing function
    function(doc, callback){
        console.log("indexing: " + counter);
        updateDBIndex(doc, function(err, savedDocument){
            callback(err, savedDocument);
        });
    }
    ],
    function(err, results){
        if(err){
            return console.error("Error " + err);
        }
        console.log("indexed " + counter);
    });
     }
Basically, if my file looks like:
{"_id": "1", "item":"glove", "color": "red"}\n
{"_id": "4", "item":"hat", "color" : "red"}\n
{"_id": "6", "item":"hat","color" : "blue"}\n
I want the output to look like, added 1 indexing 1 indexed 1 added 2 indexing 2 indexed 2 added 3 indexing 3 indexed 3
Any help will be more than appreciated! Thank you!
 
    