I have a dataset which looks like this
    ID  885038  885039  885040  885041  885042  885043  885044  Class
1267359 2       0       0       0       0       1       0      0
1295720 0       0       0       0       0       1       0      0
1295721 0       0       0       0       0       1       0      0
1295723 0       0       0       0       0       1       0      0
1295724 0       0       0       1       0       1       0      0
1295725 0       0       0       1       0       1       0      0
1295726 2       0       0       0       0       1       0      1
1295727 2       0       0       0       0       1       0      1
1295740 0       0       0       0       0       1       0      1
1295742 0       0       0       0       0       1       0      1
1295744 0       0       0       0       0       1       0      1
1295745 0       0       0       0       0       1       0      1
1295746 0       0       0       0       0       1       0      1
With the intention of doing recursive feature elimination, I followed the steps
- Train the SVM classifier
- compute the ranking criterion for all features
- remove the features with smallest ranking values
- Go to 1.
Following is the R code I have written for doing the same, however, it doesn't show any error and the loop continues with the lengths of the training set.
data <- read.csv("dummy - Copy.csv", header = TRUE)
rownames(data) <- data[,1]
data<-data[,-1]
for (k in 1:length(data)){
  inTraining <- createDataPartition(data$Class, p = .70, list = FALSE)
  training <- data[ inTraining,]
  testing  <- data[-inTraining,]
  ## Building the model ####
  svm.model <- svm(Class ~ ., data = training, cross=10,metric="ROC",type="eps-regression",kernel="linear",na.action=na.omit,probability = TRUE)
  ###### auc  measure #######
  #prediction and ROC
  svm.model$index
  svm.pred <- predict(svm.model, testing, probability = TRUE)
  #calculating auc
  c <- as.numeric(svm.pred)
  c = c - 1
  pred <- prediction(c, testing$Class)
  perf <- performance(pred,"tpr","fpr")
  plot(perf,fpr.stop=0.1)
  auc <- performance(pred, measure = "auc")
  auc <- auc@y.values[[1]]
  #compute the weight vector
  w = t(svm.model$coefs)%*%svm.model$SV
  #compute ranking criteria
  weight_matrix = w * w
  #rank the features
  w_transpose <- t(weight_matrix)
  w2 <- as.matrix(w_transpose[order(w_transpose[,1], decreasing = FALSE),])
  a <- as.matrix(w2[which(w2 == min(w2)),]) #to get the rows with minimum values
  row.names(a) -> remove
  data<- data[,setdiff(colnames(data),remove)]
  print(length(data))
  length <- (length(data))
  cols_names <- colnames(data)
  print(auc)
  output <- paste(length,auc,sep=";")
  write(output, file = "output.txt",append = TRUE)
  write(cols_names, file = paste(length,"cols_selected", ".txt", sep=""))
}
The printed output is like
[1] 3
[1] 0.5
[1] 2
[1] 0.5
[1] 2
[1] 0.5
[1] 2
[1] 0.75
[1] 2
[1] 1
[1] 2
[1] 0.75
[1] 2
[1] 0.5
[1] 2
[1] 0.75
But when I pick any of the feature subset, For e.g. Feature 3 and build an SVM model using the above code (without the loop), I don't get the same AUC value of 0.75.
data <- read.csv("3.csv", header = TRUE)
rownames(data) <- data[,1]
data<-data[,-1]
  inTraining <- createDataPartition(data$Class, p = .70, list = FALSE)
  training <- data[ inTraining,]
  testing  <- data[-inTraining,]
  ## Building the model ####
  svm.model <- svm(Class ~ ., data = training, cross=10,metric="ROC",type="eps-regression",kernel="linear",na.action=na.omit,probability = TRUE)
  ###### auc  measure #######
  #prediction and ROC
  svm.model$index
  svm.pred <- predict(svm.model, testing, probability = TRUE)
  #calculating auc
  c <- as.numeric(svm.pred)
  c = c - 1
  pred <- prediction(c, testing$Class)
  perf <- performance(pred,"tpr","fpr")
  plot(perf,fpr.stop=0.1)
  auc <- performance(pred, measure = "auc")
  auc <- auc@y.values[[1]]
  print(auc)
prints output 
    [1] 3
    [1] 0.75 (instead of 0.5)
Both the codes are same (one with a recursive loop, another one is without any recursive loop) still there is a difference in AUC values for the same feature subset.
The 3 features (885041, 885043 and Class) for both the codes is the same, but it gives different AUC values. 
 
     
    