I have the following vector:
x = c(1, 2, 3, 10, 20, 30)
At each index, 3 consecutive elements are summed, resulting in the following vector:
c(6, 15, 33, 60)
Thus, first element is 1 + 2 + 3 = 6, the second element is 2 + 3 + 10 = 15, et.c
I have the following vector:
x = c(1, 2, 3, 10, 20, 30)
At each index, 3 consecutive elements are summed, resulting in the following vector:
c(6, 15, 33, 60)
Thus, first element is 1 + 2 + 3 = 6, the second element is 2 + 3 + 10 = 15, et.c
 
    
     
    
    What you have is a vector, not an array. You can use rollapply function from zoo package to get what you need.
> x <- c(1, 2, 3, 10, 20, 30)
> #library(zoo)
> rollapply(x, 3, sum)
[1]  6 15 33 60
Take a look at ?rollapply for further details on what rollapply does and how to use it.
 
    
    I put together a package for handling these kinds of 'roll'ing functions that offers functionality similar to zoo's rollapply, but with Rcpp on the backend. Check out RcppRoll on CRAN.
library(microbenchmark)
library(zoo)
library(RcppRoll)
x <- rnorm(1E5)
all.equal( m1 <- rollapply(x, 3, sum), m2 <- roll_sum(x, 3) )
## from flodel
rsum.cumsum <- function(x, n = 3L) {
  tail(cumsum(x) - cumsum(c(rep(0, n), head(x, -n))), -n + 1)
}
microbenchmark(
  unit="ms",
  times=10,
  rollapply(x, 3, sum),
  roll_sum(x, 3),
  rsum.cumsum(x, 3)
)
gives me
Unit: milliseconds
                 expr         min          lq      median         uq         max neval
 rollapply(x, 3, sum) 1056.646058 1068.867550 1076.550463 1113.71012 1131.230825    10
       roll_sum(x, 3)    0.405992    0.442928    0.457642    0.51770    0.574455    10
    rsum.cumsum(x, 3)    2.610119    2.821823    6.469593   11.33624   53.798711    10
You might find it useful if speed is a concern.
 
    
     
    
    If speed is a concern, you could use a convolution filter and chop off the ends:
rsum.filter <- function(x, n = 3L) filter(x, rep(1, n))[-c(1, length(x))]
Or even faster, write it as the difference between two cumulative sums:
rsum.cumsum <- function(x, n = 3L) tail(cumsum(x) - cumsum(c(rep(0, n), head(x, -n))), -n + 1)
Both use base functions only. Some benchmarks:
x <- sample(1:1000)
rsum.rollapply <- function(x, n = 3L) rollapply(x, n, sum)
rsum.sapply    <- function(x, n = 3L) sapply(1:(length(x)-n+1),function(i){
                                       sum(x[i:(i+n-1)])})
library(microbenchmark)
microbenchmark(
  rsum.rollapply(x),
  rsum.sapply(x),
  rsum.filter(x),
  rsum.cumsum(x)
)
# Unit: microseconds
#               expr       min        lq    median         uq       max neval
#  rsum.rollapply(x) 12891.315 13267.103 14635.002 17081.5860 28059.998   100
#     rsum.sapply(x)  4287.533  4433.180  4547.126  5148.0205 12967.866   100
#     rsum.filter(x)   170.165   208.661   269.648   290.2465   427.250   100
#     rsum.cumsum(x)    97.539   130.289   142.889   159.3055   449.237   100
Also I imagine all methods will be faster if x and all applied weights were integers instead of numerics.
 
    
    Using just the base R you could do:
v <- c(1, 2, 3, 10, 20, 30)
grp <- 3
res <- sapply(1:(length(v)-grp+1),function(x){sum(v[x:(x+grp-1)])})
> res
[1]  6 15 33 60
Another way, faster than sapply (comparable to @flodel's rsum.cumsum), is the following:
res <- rowSums(outer(1:(length(v)-grp+1),1:grp,FUN=function(i,j){v[(j - 1) + i]}))
Here's flodel's benchmark updated:
x <- sample(1:1000)
rsum.rollapply <- function(x, n = 3L) rollapply(x, n, sum)
rsum.sapply    <- function(x, n = 3L) sapply(1:(length(x)-n+1),function(i){sum(x[i:(i+n-1)])})
rsum.filter <- function(x, n = 3L) filter(x, rep(1, n))[-c(1, length(x))]
rsum.cumsum <- function(x, n = 3L) tail(cumsum(x) - cumsum(c(rep(0, n), head(x, -n))), -n + 1)
rsum.outer <- function(x, n = 3L) rowSums(outer(1:(length(x)-n+1),1:n,FUN=function(i,j){x[(j - 1) + i]}))
library(microbenchmark)
microbenchmark(
  rsum.rollapply(x),
  rsum.sapply(x),
  rsum.filter(x),
  rsum.cumsum(x),
  rsum.outer(x)
)
# Unit: microseconds
#              expr      min        lq     median         uq       max neval
# rsum.rollapply(x) 9464.495 9929.4480 10223.2040 10752.7960 11808.779   100
#    rsum.sapply(x) 3013.394 3251.1510  3466.9875  4031.6195  7029.333   100
#    rsum.filter(x)  161.278  178.7185   229.7575   242.2375   359.676   100
#    rsum.cumsum(x)   65.280   70.0800    88.1600    95.1995   181.758   100
#     rsum.outer(x)   66.880   73.7600    82.8795    87.0400   131.519   100
 
    
    If you need real speed, try
rsum.cumdiff <- function(x, n = 3L) (cs <- cumsum(x))[-(1:(n-1))] - c(0,cs[1:(length(x)-n)])
It's all in base R, and updating flodel's microbenchmark speaks for itself
x <- sample(1:1000)
rsum.rollapply <- function(x, n = 3L) rollapply(x, n, sum)
rsum.sapply    <- function(x, n = 3L) sapply(1:(length(x)-n+1),function(i){sum(x[i:(i+n-1)])})
rsum.filter <- function(x, n = 3L) filter(x, rep(1, n))[-c(1, length(x))]
rsum.cumsum <- function(x, n = 3L) tail(cumsum(x) - cumsum(c(rep(0, n), head(x, -n))), -n + 1)
rsum.outer <- function(x, n = 3L) rowSums(outer(1:(length(x)-n+1),1:n,FUN=function(i,j){x[(j - 1) + i]}))
rsum.cumdiff <- function(x, n = 3L) (cs <- cumsum(x))[-(1:(n-1))] - c(0, cs[1:(length(x)-n)])
all.equal(rsum.rollapply(x), rsum.sapply(x))
# [1] TRUE
all.equal(rsum.sapply(x), rsum.filter(x))
# [1] TRUE
all.equal(rsum.filter(x), rsum.outer(x))
# [1] TRUE
all.equal(rsum.outer(x), rsum.cumsum(x))
# [1] TRUE
all.equal(rsum.cumsum(x), rsum.cumdiff(x))
# [1] TRUE
library(microbenchmark)
microbenchmark(
  rsum.rollapply(x),
  rsum.sapply(x),
  rsum.filter(x),
  rsum.cumsum(x),
  rsum.outer(x),
  rsum.cumdiff(x)
)
# Unit: microseconds
#               expr      min        lq       mean    median        uq       max neval
#  rsum.rollapply(x) 3369.211 4104.2415 4630.89799 4391.7560 4767.2710 12002.904   100
#     rsum.sapply(x)  850.425  999.2730 1355.56383 1086.0610 1246.5450  6915.877   100
#     rsum.filter(x)   48.970   67.1525   97.28568   96.2430  113.6975   248.728   100
#     rsum.cumsum(x)   47.515   62.7885   89.12085   82.1825  106.6675   230.303   100
#      rsum.outer(x)   69.819   85.3340  160.30133   92.6070  109.0920  5740.119   100
#    rsum.cumdiff(x)    9.698   12.6070   70.01785   14.3040   17.4555  5346.423   100
## R version 3.5.1 "Feather Spray"
## zoo and microbenchmark compiled under R 3.5.3
Oddly enough, everything is faster the second time through microbenchmark:
microbenchmark(
       rsum.rollapply(x),
       rsum.sapply(x),
       rsum.filter(x),
       rsum.cumsum(x),
       rsum.outer(x),
       rsum.cumdiff(x)
   )
# Unit: microseconds
#               expr      min        lq       mean    median        uq      max neval
#  rsum.rollapply(x) 3127.272 3477.5750 3869.38566 3593.4540 3858.9080 7836.603   100
#     rsum.sapply(x)  844.122  914.4245 1059.89841  965.3335 1032.2425 5184.968   100
#     rsum.filter(x)   47.031   60.8490   80.53420   74.1830   90.9100  260.365   100
#     rsum.cumsum(x)   45.092   55.2740   69.90630   64.4855   81.4555  122.668   100
#      rsum.outer(x)   68.850   76.6070   88.49533   82.1825   91.8800  166.304   100
#    rsum.cumdiff(x)    9.213   11.1520   13.18387   12.1225   13.5770   49.456   100
 
    
    library runner may also be used
x <- c(1, 2, 3, 10, 20, 30)
runner::sum_run(x, k=3, na_pad = T)
#> [1] NA NA  6 15 33 60
or slider is also useful
x <- c(1, 2, 3, 10, 20, 30)
slider::slide_sum(x, before = 2, complete = T)
#> [1] NA NA  6 15 33 60
Created on 2021-06-14 by the reprex package (v2.0.0)
