Here is a program that takes decimal ASCII numbers one by one and converts them into an integer. The result is stored in the EDI register:
global _start
%macro kernel 4
    mov eax, %1
    mov ebx, %2
    mov ecx, %3
    mov edx, %4
    int 80h
%endmacro
section .bss
    symbol resb 1
section .text
_start:
    mov esi, 10
    xor edi, edi
.loop:
    kernel 3, 0, symbol, 1 ; load 1 char from STDIN into symbol
    test eax, eax          ; nothing loaded - EOF
    jz .quit
    xor ebx, ebx
    mov bl, [symbol]
    sub bl, '0'
    cmp bl, 9
    jg .quit               ; not a number
    mov eax, edi           ; previously accumulated number
    mul esi                ; eax *= 10
    lea edi, [eax + ebx]
    jmp .loop
.quit:
    mov eax, 1
    mov ebx, edi
    int 80h
I compile it:
$ nasm -g -f elf32 st3-18a.asm
$ ld -g -m elf_i386 st3-18a.o -o st3-18a
$ ./st3-18a
2[Enter]
Ctrl-d
When I run this code in gdb step by step everything is correct, and the result stored in EDI at the end is 2. But when I run without a debugger, and echo the program return value:
$ ./st3-18a
2[Enter]
Ctrl-d
$ echo $?
238
Why does it output 0xEE? What is wrong?
 
     
    