I'm having trouble create processes in parallel with Node while exiting when they're done with a simple HTTP GET request. I've noticed that if I fire a process.exit() inside of a callback for appendFile, some files will not be created or appended in a Node cluster setup. Ideally, the way below is how I would like to fire events since the process is exited as soon as the job is done:
var rp = require("request-promise");
    config = require("./config"),
    cluster = require("cluster"),
    os = require("os"),
    fs = require("fs");
var keywordArray = [
    'keyword1',
    'keyword2',
    ...
];
if (cluster.isMaster) {
    var numCPUs = os.cpus().length;
    var clusterDivision = Math.ceil(keywordArray.length/numCPUs);
    // Reset the json if previously set
    keywordArray.forEach(function(arrayItem) {
        fs.unlink(config.dataDirectory + arrayItem + '.json', function(err) {
            if (err) console.error(err);
            console.log('successfully unlinked ' + arrayItem + '.json from ' + config.dataDirectory);
        });
    });
    // Create a worker for each CPU
    // Seperate the array out evenly for each worker
    for (var j=1;j<=numCPUs;j++) {
        var tempArray = [];
        var removed = keywordArray.splice(0, clusterDivision);
        if (removed.length > 0) {
            // The array contains something so let's do something with the keyword
            console.log('creating a worker');
            cluster.fork().send(removed);
        } else {
            // We don't need a cluster here
        }
    }
    process.on('exit', function() {
        console.log('exited');
    });
} else if (cluster.isWorker) {
    //  Code to run if we're in a worker process
    // Send the object we created above from variables so they're available to the workers
    process.on('message', function(seperatedArrayItem) {
        seperatedArrayItem.forEach(function(arrayItem) {
            function radarRequest(err, response, body) {
                var responseBody = JSON.parse(body);
                console.log(arrayItem); 
                fs.appendFileSync(config.dataDirectory + arrayItem + '.json', JSON.stringify(responseBody.results, null, '\t'), function (err) {
                    if (err) console.err(err);
                    console.log('success writing file');
                });
            }
            rp({
                url: config.radarSearchURI + 
                '?key='+ config.apiKey + 
                '&location=' + config.latitude + ',' + config.longitude + 
                '&radius=' + config.searchRadius + 
                '&keyword=' + arrayItem, headers: config.headers
            }, radarRequest);
        });
        setTimeout(function() {
            process.exit(0);
        }, 5000);
    });
}
The only way I can make sure all files are properly appended is by using a Timeout, which is exactly what I don't want to - and shouldn't - do. Is there another way I can ensure an appendFile has happened successfully and then kill the node process? Here's a way that works (assuming the process doesn't take longer than 5 seconds):
    process.on('message', function(seperatedArrayItem) {
    seperatedArrayItem.forEach(function(arrayItem) {
        function radarRequest(err, response, body) {
            var responseBody = JSON.parse(body);
            console.log(arrayItem); 
            fs.appendFile(config.dataDirectory + arrayItem + '.json', JSON.stringify(responseBody.results, null, '\t'), function (err) {
                if (err) console.err(err)
                console.log('success writing file');
            });
        }
        rp({
            url: config.radarSearchURI + 
            '?key='+ config.apiKey + 
            '&location=' + config.latitude + ',' + config.longitude + 
            '&radius=' + config.searchRadius + 
            '&keyword=' + arrayItem, headers: config.headers
        }, radarRequest);
    });
    setTimeout(function() {
        process.exit(0);
    }, 5000);
});
 
    