I found another way to do it:
const { spawn } = require('child_process');
export const execCommand = async (command) => {
  return new Promise((resolve, reject) => {
    const [cmd, ...args] = command.split(' ');
    const childProcess = spawn(cmd, args);
    childProcess.stdout.on('data', (data) => {
      process.stdout.write(data.toString());
    });
    childProcess.stderr.on('data', (data) => {
      process.stderr.write(data.toString());
    });
    childProcess.on('error', (error) => {
      reject(error);
    });
    childProcess.on('exit', (code) => {
      if (code === 0) {
        resolve();
      } else {
        reject(new Error(`Command exited with code ${code}.`));
      }
    });
  });
};
This code gives and ability to get real-time output from executed command and redirect all stdout and stderr to parent process. It also allows using the command the same way you use it in bash/sh (single string input). Here I use process.stdout.write for more accurate output instead of console.log that is used in other answers.
Usage:
await execCommand('sudo apt-get update');
await execCommand('sudo apt-get install -y docker.io docker-compose');
Note: compared to exec it does not support execution of multiple commands using &&. So each single command should be executed with a single execCommand statement.
And here is a simplified version that supports both realtime streaming and shell execution:
const { spawn } = require('child_process');
export const execCommand = async (command) => {
  return new Promise((resolve, reject) => {
    const childProcess = spawn(command, { 
      stdio: 'inherit',
      shell: true
    });
    childProcess.on('error', (error) => {
      reject(error);
    });
    childProcess.on('exit', (code) => {
      if (code === 0) {
        resolve();
      } else {
        reject(new Error(`Command exited with code ${code}.`));
      }
    });
  });
};
Usage:
await execCommand('sudo apt-get update && sudo apt-get install -y docker.io docker-compose');