; FIXME: this is implemented in an assembly function because otherwise the xar4 ; argument might not actually be passed to this function, as it's unused until ; the USER1 handler itself so the compiler can omit it. With GNU-style extended ; inline assembly and register variables this is not necessary, and this ; function could be implemented in C where it could be inlined and the compiler ; will arrange for the argument to be present in xar4 when the trap instruction ; executes. .sect ".text:rt_syscall" .global rt_syscall_0 .global rt_syscall_1 .asmfunc rt_syscall_0: ; The zero argument version passes the syscall number in al, so move it here. mov ar4, al rt_syscall_1: trap #20 ; Trigger the USER1 software interrupt. lretr .endasmfunc ; The ABI specifies that arg1 and arg2 are passed on the stack, but the ; interrupt stack frame will not have these arguments in the right place, so ; move them to volatile registers first and then pass them as stack arguments ; to rt_syscall_run in the user1 handler. .global rt_syscall_2 .global rt_syscall_3 .asmfunc rt_syscall_3: movl xar7, *-sp[6] rt_syscall_2: movl xar6, *-sp[4] trap #20 lretr .endasmfunc push_volatile .macro push ar1h:ar0h push xar4 push xar5 push xar6 push xar7 push xt .if .TMS320C2800_FPU32 push rb mov32 *sp++, r0h mov32 *sp++, r1h mov32 *sp++, r2h mov32 *sp++, r3h .if .TMS320C2800_FPU64 mov32 *sp++, r0l mov32 *sp++, r1l mov32 *sp++, r2l mov32 *sp++, r3l .endif ; .TMS320C2800_FPU64 .endif ; .TMS320C2800_FPU32 .endm pop_volatile .macro .if .TMS320C2800_FPU32 .if .TMS320C2800_FPU64 mov32 r3l, *--sp mov32 r2l, *--sp mov32 r1l, *--sp mov32 r0l, *--sp .endif ; .TMS320C2800_FPU64 mov32 r3h, *--sp mov32 r2h, *--sp mov32 r1h, *--sp mov32 r0h, *--sp pop rb .endif ; .TMS320C2800_FPU32 pop xt pop xar7 pop xar6 pop xar5 pop xar4 pop ar1h:ar0h .endm push_non_volatile .macro push xar2 push xar3 push rpc .if .TMS320C2800_FPU32 mov32 *sp++, r4h mov32 *sp++, r5h mov32 *sp++, r6h mov32 *sp++, r7h .if .TMS320C2800_FPU64 mov32 *sp++, r4l mov32 *sp++, r5l mov32 *sp++, r6l mov32 *sp++, r7l .endif ; .TMS320C2800_FPU64 mov32 *sp++, stf .endif ; .TMS320C2800_FPU32 .endm pop_non_volatile .macro .if .TMS320C2800_FPU32 mov32 stf, *--sp .if .TMS320C2800_FPU64 mov32 r7l, *--sp mov32 r6l, *--sp mov32 r5l, *--sp mov32 r4l, *--sp .endif ; .TMS320C2800_FPU64 mov32 r7h, *--sp mov32 r6h, *--sp mov32 r5h, *--sp mov32 r4h, *--sp .endif ; .TMS320C2800_FPU32 pop rpc pop xar3 pop xar2 .endm swapcontext .macro push_non_volatile ; Store the new stack pointer with the saved context. .ref rt_context_prev movl xar6, #rt_context_prev movl xar6, *xar6 mov al, sp mov *xar6, al mov sp, ar4 ; Switch to the new task's stack. pop_non_volatile .endm .sect ".text:rt_user1_handler" .global rt_user1_handler .asmfunc .ref rt_syscall_run rt_user1_handler: and ier, #0xbfff ; Mask the datalog interrupt. eint asp spm 0 clrc page0, ovm clrc amode ; NOTE: the caller of rt_syscall is expecting to have its volatile context ; clobbered, so there's no need to save and restore it before calling a ; function. push xar7 ; Pass arg2 on the stack. push xar6 ; Pass arg1 on the stack. lcr rt_syscall_run subb sp, #4 nasp movl acc, xar4 bf $0, eq ; If there's no new context to switch to, return early. ; Allocate space on the stack for the rest of the volatile context, but ; saving it is not necessary. ar1h:ar0h is special because ar1h is ; non-volatile, ar0h is volatile, but they can only be pushed together. ; ar0h is already clobbered by not saving it before calling rt_syscall_run, ; but ar1h must be saved here now that we know a context switch is needed. push ar1h:ar0h .if .TMS320C2800_FPU32 .if .TMS320C2800_FPU64 addb sp, #28 ; integer + sp + dp .else ; !.TMS320C2800_FPU64 addb sp, #20 ; integer + sp .endif ; .TMS320C2800_FPU64 .else ; !.TMS320C2800_FPU32 addb sp, #10 ; integer only .endif ; .TMS320C2800_FPU32 swapcontext pop_volatile $0: dint ; Errata 4.1.2, caution while using nested interrupts. iret .endasmfunc .sect ".text:rt_datalog_handler" .global rt_datalog_handler .asmfunc .ref rt_syscall_run_pending rt_datalog_handler: eint ; Re-enable interrupts. (This interrupt is masked still.) push_volatile asp spm 0 clrc page0, ovm clrc amode lcr rt_syscall_run_pending nasp movl acc, xar4 bf $0, eq ; If there's no new context to switch to, return early. swapcontext $0: pop_volatile dint ; Errata 4.1.2, caution while using nested interrupts. iret .endasmfunc .sect ".text:rt_start" .global rt_start .asmfunc .ref rt_start_context rt_start: lcr rt_start_context mov sp, ar4 pop_non_volatile pop_volatile iret .endasmfunc .sect ".text:rt_task_entry" .global rt_task_entry .asmfunc rt_task_entry: lcr *xar4 .ref rt_task_exit lb rt_task_exit .endasmfunc