I do it like this for now.
file-io-helpers.ts
const mutexMap = new Map<string, Promise<void>>();
export async function createOrAppend(file: string, txtIfNotExists: string, txtIfExists: string) {
    // wait for mutex availability
    let mutex: Promise<void> | undefined;
    while((mutex = mutexMap.get(file)) != null) {
        await mutex;
    }
    
    // get mutex
    mutex = _createOrAppend_INTERNAL(file, txtIfNotExists, txtIfExists);
    mutexMap.set(file, mutex);
    await mutex;
    
    // release mutex
    mutexMap.delete(file);
}
async function _createOrAppend_INTERNAL(file: string, txtIfNotExists: string, txtIfExists: string) {
    if (await checkFileExists(file)) {
        // exists
        appendFile(file, txtIfExists);
    } else {
        // does not exist
        appendFile(file, txtIfNotExists);
    }
}
// inspired by https://stackoverflow.com/a/35008327/7869582
export async function checkFileExists(file: string) {
    return access(file, constants.F_OK)
             .then(() => true)
             .catch(() => false)
}
// // was just a test (writes start line mutliple times with the text in file-io-helpers.spec.ts)
// export async function createOrAppendNoSync(file: string, txtIfNotExists: string, txtIfExists: string) {
//     await _createOrAppend_INTERNAL(file, txtIfNotExists, txtIfExists);
// }
I use it like this
await createOrAppend(
    filePath,
    "text to write if file is new",
    "text to append if file exists"
);
And a test (file-io-helpers.ts)
import { readFile, unlink } from 'fs/promises';
import { firstValueFrom, forkJoin, timer } from 'rxjs';
import { createOrAppend } from './file-io-helpers';
// time measurement inspired by https://stackoverflow.com/a/14551263/7869582
const start = process.hrtime();
function msSinceStart(): number{
    const hrtime = process.hrtime(start);
    return hrtime[0] * 1000 + hrtime[1] / 1000000; // divide by a million to get nano to milli
}
const testFile = `testfile.txt`;
async function myTest(counter: number) {
    
    // wait random time
    const sleepTime = Math.floor(Math.random() * 50);
    await firstValueFrom(timer(sleepTime));
    
    const sinceStart = msSinceStart();
    const counterStr = `[${counter}]`.padEnd(5);
    // createOrAppendNoSync(
    await createOrAppend(
        testFile,
          `${counterStr} start  (${sinceStart.toFixed(4)}ms / ${sleepTime}ms)`,
        `\n${counterStr} append (${sinceStart.toFixed(4)}ms / ${sleepTime}ms)`
    );
    
}
describe('file-io-helper mutex', () => {
    
    /** 
     * this test writes to a testfile concurrently using my createOrAppend method.
     * Despite the many concurrent calls the file has to have one "start" line
     * and the rest are "append" lines */
    it('random access', async () => {
        
        // delete former testfile
        unlink(testFile);
        // array of test promises
        const allTests = [...Array(100).keys()].map(myTest);
        // forkJoin will start all of them concurrently and fire once all are finished
        // firstValueFrom just waits for this finish event
        await firstValueFrom(forkJoin(allTests));
        
        // ...then check the file
        const lines = await (await readFile(testFile, 'utf-8')).split('\n');
        for (let i = 0; i < lines.length; i++) {
            const line = lines[i];
            if (i == 0) {
                // start line needs to contain exactly one occurence or "start"
                const indexes = [...line.matchAll(new RegExp("start", 'gi'))].map(a => a.index);
                expect(indexes.length).toBe(1);
            } else {
                // all other lines may not include "start"
                expect(line).not.toContain("start");
            }
        }
        
    });
});
I am happy to get other/better/cleaner solutions. Especially if you think that race conditions could still occur. I am not totally sure if my test below acts the same as if the code was triggered from outside (eg by express requests).
Note that this solution does not protect against other processes changing the same file. Feel free to add that for completeness...can get relevant for me as well.