I'm using this asynchronous recursive function to iterate through a directory's files and folders, and when a .css file is found, I append data to a file called 'common.css'.
var walk = function(dir, done) {
 var results = [];
 fs.readdir(dir, function(err, list) {
      if (err) return done(err);
      var pending = list.length;
      if (!pending) return done(null);
      list.forEach(function(file) {
           file = dir + '/' + file;
           fs.stat(file, function(err, stat) {
                if (stat && stat.isDirectory()) {
                     minimizer(file, function(err, res) {
                          results = results.concat(res);
                          if (!--pending) done(null);
                     });
                } else if(file) {
                     fs.open('common.css', 'a', 666, function( err, id ) {
                         fs.write( id, 'hello', null, 'utf8', function(){
                             fs.close(id, function(){
                                  console.log('file closed');
                             });
                         });
                     });
                }
                if (!--pending) done(null);
           });  
      });
});   }
The problem is that, being asyncronous, I sense that there are times, where several instances of the function are writing to the file at the same time. Is there any way I can control this flux and queue the writing tasks?
The purpose of this code is to merge the .css files that are found in the directory. I cannot use external tools in this project.
EDIT ANSWER: Well, for this purpose, I can just gather the paths of all .css files I want to merge, and after I have them all, call a syncronous function to write them.
That's what the
var results = []
is there for. I just realized it.
 
     
     
     
     
     
    