diff options
Diffstat (limited to 'lib/smc/aarch64/asm_smc.S')
-rw-r--r-- | lib/smc/aarch64/asm_smc.S | 73 |
1 files changed, 73 insertions, 0 deletions
diff --git a/lib/smc/aarch64/asm_smc.S b/lib/smc/aarch64/asm_smc.S new file mode 100644 index 0000000..056e5cb --- /dev/null +++ b/lib/smc/aarch64/asm_smc.S @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * Neither the name of ARM nor the names of its contributors may be used + * to endorse or promote products derived from this software without specific + * prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +#include <asm_macros.S> + + .globl asm_tftf_smc64 + + .section .text, "ax" + + +/* --------------------------------------------------------------------------- + * smc_ret_values asm_tftf_smc64(uint64_t arg0, + * uint64_t arg1, + * uint64_t arg2, + * uint64_t arg3, + * uint64_t arg4, + * uint64_t arg5, + * uint64_t arg6); + * --------------------------------------------------------------------------- + */ +func asm_tftf_smc64 + /* + * According to the AAPCS64, x8 is the indirect result location + * register. It contains the address of the memory block that the caller + * has reserved to hold the result, i.e. the smc_ret_values structure + * in our case. + * x8 might be clobbered across the SMC call so save it on the stack. + * Although x8 contains an 8 byte value, we are allocating 16bytes on the stack + * to respect 16byte stack-alignment. + */ + str x8, [sp, #-16]! + + /* SMC arguments are already stored in x0-x6 */ + smc #0 + + /* Pop x8 into a caller-saved register */ + ldr x9, [sp], #16 + + /* + * Return values are stored in x0-x3, put them in the 'smc_ret_values' + * return structure + */ + stp x0, x1, [x9, #0] + stp x2, x3, [x9, #16] + ret +endfunc asm_tftf_smc64 |