Imo, you need to write your own little parser here, e.g.:
def tokenizer(string):
    buffer = ""
    quote = False
    for c in string:
        if quote:
            if c == "'":
                yield ("VALUE", buffer)
                buffer = ""
                quote = not quote
            else:
                buffer += c
        else:
            if c == "[":
                yield ("LIST_OPEN", None)
            elif c == "]":
                yield ("LIST_CLOSE", None)
            elif c == "'":
                quote = not quote
            else:
                pass
def parser(tokens):
    lst = []
    for token in tokens:
        x, y = token
        if x == "LIST_OPEN":
            lst.append(parser(tokens))
        elif x == "LIST_CLOSE":
            return lst
        elif x == "VALUE":
            lst.append(y)
    return lst[0]
With some test assertions:
assert parser(tokenizer("['HES', ['ATRM', 'SIF', 'NAV']]")) == ['HES', ['ATRM', 'SIF', 'NAV']]
assert parser(tokenizer("[['ATRM', 'SIF', 'NWPX'], ['NAV','SENEA'], ['HES','AGYS', 'CBST', 'GTIM', 'XRSC']]")) == [['ATRM', 'SIF', 'NWPX'], ['NAV','SENEA'], ['HES','AGYS', 'CBST', 'GTIM', 'XRSC']]
The idea is to first tokenize your string into values and commands and then convert this to an actual list.