Im trying to run this function every 2seconds forever :
import requests
from bs4 import BeautifulSoup
import asyncio
async def scrape():
    test = []
    r = requests.get(coin_desk)
    soup = BeautifulSoup(r.text, features='xml') 
    title = soup.find_all('title')[2]
    await asyncio.sleep(2)
    for x in title:
        test.append(x)
        print(test)
if name == 'main':
try:
    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)
    loop.run_until_complete(scrape())
except (KeyboardInterrupt, SystemExit):
    pass
If i use
run_forever()
instead of
run_until_complete(scrape())
nothing gets printed out, it just runs forever and skips the function it seems.
 
    