A simple function to upload files to a gcloud bucket.
from google.cloud import storage
#pip install --upgrade google-cloud-storage. 
def upload_to_bucket(blob_name, path_to_file, bucket_name):
    """ Upload data to a bucket"""
     
    # Explicitly use service account credentials by specifying the private key
    # file.
    storage_client = storage.Client.from_service_account_json(
        'creds.json')
    #print(buckets = list(storage_client.list_buckets())
    bucket = storage_client.get_bucket(bucket_name)
    blob = bucket.blob(blob_name)
    blob.upload_from_filename(path_to_file)
    
    #returns a public url
    return blob.public_url
You can generate a credential file using this link: https://cloud.google.com/storage/docs/reference/libraries?authuser=1#client-libraries-install-python
Asynchronous Example:
import asyncio
import aiohttp
# pip install aiofile
from aiofile import AIOFile
# pip install gcloud-aio-storage
from gcloud.aio.storage import Storage 
BUCKET_NAME = '<bucket_name>'
FILE_NAME  = 'requirements.txt'
async def async_upload_to_bucket(blob_name, file_obj, folder='uploads'):
    """ Upload csv files to bucket. """
    async with aiohttp.ClientSession() as session:
        storage = Storage(service_file='./creds.json', session=session) 
        status = await storage.upload(BUCKET_NAME, f'{folder}/{blob_name}', file_obj)
        #info of the uploaded file
        # print(status)
        return status['selfLink']
        
async def main():
    async with AIOFile(FILE_NAME, mode='r') as afp:
        f = await afp.read()
        url = await async_upload_to_bucket(FILE_NAME, f)
        print(url)
# Python 3.6
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
# Python 3.7+
# asyncio.run(main())