I have a for loop inside a function, and it runs fine for data frames that have <10000 rows, but the time used for the loop increases exponentially as the number of rows increase. I have read this post about optimizing loops. Although, I don't know how to apply it to my situation
Here is the for loop below:
for (i in 1:nrow(data.frame)) {
    event <- as.character(data.frame[i,"Event"])
    if(i < 20) {
        # do nothing
    }
    else {
        # Get the previous 20 rows
        one.sec.interval = data[(i - (20 - 1)):i,]
        #       print(head(one.sec.interval))
        # get the covariance matrix
        cov.matrix <- var(one.sec.interval)
        # get the variance of the features
        variance.of.features <- diag(cov.matrix)
        # reformat the variance vector into data frame for easier manipulation
        variance.of.features <- matrix(variance.of.features,1,length(variance.of.features))
        variance.of.features <- data.frame(variance.of.features)
        # rename the variance column of the features
        colnames(variance.of.features) <- c('Back.Pelvis.Acc.X.sd', 'Back.Pelvis.Acc.Y.sd', 'Back.Pelvis.Acc.Z.sd',
        'Back.Pelvis.Gyro.X.sd', 'Back.Pelvis.Gyro.Y.sd', 'Back.Pelvis.Gyro.Z.sd',
        'Back.Trunk.Acc.X.sd', 'Back.Trunk.Acc.Y.sd', 'Back.Trunk.Acc.Z.sd',
        'Back.Trunk.Gyro.X.sd', 'Back.Trunk.Gyro.Y.sd', 'Back.Trunk.Gyro.Z.sd')
        # create the new feature vector
        new.feature.vector <- cbind(data[i,], variance.of.features)
        new.feature.vector$Event <- event
        one.sec.interval.data[i- (20 - 1),] <- new.feature.vector
    }
}
 
     
    