I am trying to read and process each line from a large file and then write the result into another file. But the memory is used out quickly. Any other good solutions? Please help.
time_start = time.time()
tt_X = []
D_tmp = []
with open(filenameRead, 'r') as fr, open(fileNameWrite,'wa') as fw:
    for line in fr:
        _, _, D_tmp = line.strip().split(',') #      
        #process each line
        D_tmp = np.array( D_tmp.split() ).astype(int).reshape(15,4,101,101)/215.   
        #D_tmp = np.reshape(D_tmp, () )/215.
        for i in range(15):
            tt_X.append(D_tmp[i,1:4,:-1,:-1])
        encoded = encoder.predict(np.array(tt_X)).flatten()
        fw.writelines(' '.join(encoded.astype(str)) +'\n')
        tt_X = []
        D_tmp = []
time_end = time.time()
print 'The elapsed time is ', time_end - time_start
