I am writing a small program to process a big text file and do some replacements. The thing is that it never stops allocating new memory, so in the end it runs out of memory. I have reduced it to a simple program that simply counts the number of lines (see the code below) while still allocating more and more memory. I must admit that I know little about boost and boost spirit in particular. Could you please tell me what I am doing wrong? Thanks a million!
#include <string>
#include <iostream>
#include <boost/spirit/include/lex_lexertl.hpp>
#include <boost/bind.hpp>
#include <boost/ref.hpp>
#include <boost/spirit/include/support_istream_iterator.hpp>
// Token ids
enum token_ids {
    ID_EOL= 100
};
// Token definition
template <typename Lexer>
    struct var_replace_tokens : boost::spirit::lex::lexer<Lexer> {
        var_replace_tokens() {
            this->self.add ("\n", ID_EOL); // newline characters
        }
    };
// Functor
struct replacer {
    typedef bool result_type;
    template <typename Token>
    bool operator()(Token const& t, std::size_t& lines) const  {
        switch (t.id()) {
        case ID_EOL:
            lines++;
            break;  
        }
        return true;
    }
}; 
int main(int argc, char **argv) {
    size_t lines=0;
    var_replace_tokens< boost::spirit::lex::lexertl::lexer< boost::spirit::lex::lexertl::token< boost::spirit::istream_iterator> > > var_replace_functor;
    cin.unsetf(std::ios::skipws);
    boost::spirit::istream_iterator first(cin);
    boost::spirit::istream_iterator last;
    bool r = boost::spirit::lex::tokenize(first, last, var_replace_functor,  boost::bind(replacer(), _1, boost::ref(lines)));
    if (r) {
        cerr<<"Lines processed: "<<lines<<endl;
    }  else {
        string rest(first, last);
        cerr << "Processing failed at: "<<rest<<" (line "<<lines<<")"<<endl;
    }
}
