I'm learning low-level things, and I've tried to write and execute a simple program, that prints the ASCII-code of entered character..
section .bss
    i   resb 1
    o   resb 8
    ; i for input, o for output
section .text
    global _start
_start:
    mov eax, 3
    mov ebx, 0
    mov ecx, i
    mov edx, 1
    int 0x80
    ; get one char
    mov ecx, o
    add ecx, 7
    mov edx, 0
    ; set ECX point at last of 8 bytes labeled o
    while:
        mov al, [i]
        mov bl, 0
        cmp al, bl
        jz  end
        mov bl, 10
        div bl
        add ah, '0'
        mov [i], al
        mov [ecx], ah
        dec ecx
        inc edx
        jmp while
    end:
    ; algo - to decimal
    mov eax, 4
    mov ebx, 1
    int 0x80
    ; ECX ready - was decremented until highest non-zero digit in loop
    ; so is EDX - incremented digit count times
    mov eax, 1
    mov ebx, 0
    int 0x80
Successfully compiled (nasm) and linked (ld), but at runtime:
Floating point exception (core dumped)
Once already fixed (really put 0 instead of 10 somehow), and it worked well, but was just intermediate test (printed digits in reversed order). Then I reserved 8-byte field to accumulate loop calculations right through left, and interrupt 4,1,..,.. after end, what brought me to some funny CPU stuff again.
Raping Google gave no result. What's wrong? Thanks in advance.
UPD: if you explore SOF questions on this subject because of a serious trouble with core dump exception, don't waste your time on this topic, in my case an epic and already solved problem occurred inside my head, made some school-level mistake.
 
    