I needed a pseudo random number generation scheme (independent of standard libraries) for one of my projects and I tried a simple LCG based RNG. It seems to work fine except that it produces different values in C++ and Python. I am giving the relevant code for both below. I am not able to find the error. Any help will be greatly appreciated!
(c++)
// file: lgc.cc
// compile and run with: g++ lgc.cc -o lgc && ./lgc
#include <cstdio>
#include <cstdint>
#include <vector>
using namespace std;
uint64_t LGC(uint64_t x) {
  uint64_t A = 1103515245;
  uint64_t C = 12345;
  uint64_t M = (1 << 31);
  return (A * x + C) % M;
}
int main(int argc, char* argv[]) {
  for (auto x : {485288831, 485288832, 10, 16, 255, 756}) {
    uint64_t y = LGC(x);
    printf("%u %u\n", (uint32_t)x, (uint32_t) y);
  }
  return 0;
}
(python)
# file: lgc.py
# run with: python3 lgc.py
def LGC(x):
    A = 1103515245;
    C = 12345;
    M = int(2 ** 31);
    return (A * x + C) % M;
for x in [485288831, 485288832, 10, 16, 255, 756]:
    y = LGC(x)
    print(x, y)
(Results: c++)
485288831 3822790476
485288832 631338425
10 2445230203
16 476387081
255 2223525580
756 1033882141
(Results: python)
485288831 1675306828
485288832 631338425
10 297746555
16 476387081
255 76041932
756 1033882141
 
    