A relatively simple approach is to start a new thread to copy the process's stdout to Python's stdout after you are finished processing it. Here is an example for running webpack-dev-server and scraping the URL from it (not actually implemented):
#!/usr/bin/env python3
import subprocess
import sys
import os
from threading import Thread
from typing import IO
def pipe(a: IO[bytes], b: IO[bytes]):
  for x in a:
    b.write(x)
    b.flush()
process = subprocess.Popen(
  [
    "node_modules/.bin/webpack-dev-server",
    # Note that I had to add --colour because for some reason
    # webpack-dev-server detects stdout as supporting colour,
    # but not stderr.
    "--color",
    # This prints a progress bar using ANSI escape characters
    # which works too!
    "--progress",
  ],
  cwd=".",
  env=dict(os.environ, NODE_ENV="development"),
  stdout=subprocess.PIPE,
)
for line in process.stdout:
  sys.stdout.buffer.write(line)
  # Flush is necessary otherwise stdout will buffer more
  # than one line.
  sys.stdout.buffer.flush()
  # Process the text however you want.
  if b"Project is running at" in line:
    print("Server started at address ??")
    break
# Start a thread to do the pipe copying.
thread = Thread(target=pipe, args=(process.stdout, sys.stdout.buffer))
thread.start()
print("Now we can do other stuff and the process will continue to print to stdout")
Note that I haven't given any thought to proper cleanup - exiting the thread, closing files, etc.