mirror of
https://github.com/micropython/micropython.git
synced 2025-07-21 04:51:12 +02:00
py/emitnative: Let Viper int-indexed code use appropriate operands.
This commit extends the generic ASM API by adding the rest of the ASM_{LOAD,STORE}[size]_REG_REG_OFFSET macros whenever applicable. The Viper int-indexed load/store code generator was changed to use those API functions if they are available, falling back to backend-specific implementations if possible and ultimately to a generic implementation. Right now all backends except for x64 implement load16, load32, and store32 operations (x64 only implements load16). Signed-off-by: Alessandro Gatti <a.gatti@frob.it>
This commit is contained in:
committed by
Damien George
parent
e43a3849d9
commit
78ee1bac60
@@ -208,16 +208,18 @@ void asm_arm_bx_reg(asm_arm_t *as, uint reg_src);
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_arm_sub_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_arm_mul_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 4 * (word_offset))
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) ASM_LOAD32_REG_REG_OFFSET((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_arm_ldrb_reg_reg((as), (reg_dest), (reg_base))
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_arm_ldrh_reg_reg((as), (reg_dest), (reg_base))
|
||||
#define ASM_LOAD16_REG_REG_OFFSET(as, reg_dest, reg_base, uint16_offset) asm_arm_ldrh_reg_reg_offset((as), (reg_dest), (reg_base), 2 * (uint16_offset))
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD32_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 4 * (word_offset))
|
||||
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_arm_str_reg_reg((as), (reg_dest), (reg_base), 4 * (word_offset))
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_value, reg_base, word_offset) ASM_STORE32_REG_REG_OFFSET((as), (reg_value), (reg_base), (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_value, reg_base) asm_arm_strb_reg_reg((as), (reg_value), (reg_base))
|
||||
#define ASM_STORE16_REG_REG(as, reg_value, reg_base) asm_arm_strh_reg_reg((as), (reg_value), (reg_base))
|
||||
#define ASM_STORE32_REG_REG(as, reg_value, reg_base) asm_arm_str_reg_reg((as), (reg_value), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG_OFFSET(as, reg_value, reg_base, word_offset) asm_arm_str_reg_reg((as), (reg_value), (reg_base), 4 * (word_offset))
|
||||
|
||||
#define ASM_LOAD8_REG_REG_REG(as, reg_dest, reg_base, reg_index) asm_arm_ldrb_reg_reg_reg((as), (reg_dest), (reg_base), (reg_index))
|
||||
#define ASM_LOAD16_REG_REG_REG(as, reg_dest, reg_base, reg_index) asm_arm_ldrh_reg_reg_reg((as), (reg_dest), (reg_base), (reg_index))
|
||||
|
10
py/asmrv32.h
10
py/asmrv32.h
@@ -732,11 +732,12 @@ void asm_rv32_emit_store_reg_reg_offset(asm_rv32_t *state, mp_uint_t source, mp_
|
||||
#define ASM_JUMP_IF_REG_NONZERO(state, rs, label, bool_test) asm_rv32_emit_jump_if_reg_nonzero(state, rs, label)
|
||||
#define ASM_JUMP_IF_REG_ZERO(state, rs, label, bool_test) asm_rv32_emit_jump_if_reg_eq(state, rs, ASM_RV32_REG_ZERO, label)
|
||||
#define ASM_JUMP_REG(state, rs) asm_rv32_opcode_cjr(state, rs)
|
||||
#define ASM_LOAD_REG_REG_OFFSET(state, rd, rs, offset) ASM_LOAD32_REG_REG_OFFSET(state, rd, rs, offset)
|
||||
#define ASM_LOAD16_REG_REG_OFFSET(state, rd, rs, offset) asm_rv32_emit_load16_reg_reg_offset(state, rd, rs, offset)
|
||||
#define ASM_LOAD16_REG_REG(state, rd, rs) asm_rv32_opcode_lhu(state, rd, rs, 0)
|
||||
#define ASM_LOAD32_REG_REG(state, rd, rs) ASM_LOAD_REG_REG_OFFSET(state, rd, rs, 0)
|
||||
#define ASM_LOAD32_REG_REG(state, rd, rs) ASM_LOAD32_REG_REG_OFFSET(state, rd, rs, 0)
|
||||
#define ASM_LOAD32_REG_REG_OFFSET(state, rd, rs, offset) asm_rv32_emit_load_reg_reg_offset(state, rd, rs, offset)
|
||||
#define ASM_LOAD8_REG_REG(state, rd, rs) asm_rv32_opcode_lbu(state, rd, rs, 0)
|
||||
#define ASM_LOAD_REG_REG_OFFSET(state, rd, rs, offset) asm_rv32_emit_load_reg_reg_offset(state, rd, rs, offset)
|
||||
#define ASM_LSL_REG_REG(state, rd, rs) asm_rv32_opcode_sll(state, rd, rd, rs)
|
||||
#define ASM_LSR_REG_REG(state, rd, rs) asm_rv32_opcode_srl(state, rd, rd, rs)
|
||||
#define ASM_MOV_LOCAL_REG(state, local, rs) asm_rv32_emit_mov_local_reg(state, local, rs)
|
||||
@@ -749,10 +750,11 @@ void asm_rv32_emit_store_reg_reg_offset(asm_rv32_t *state, mp_uint_t source, mp_
|
||||
#define ASM_NEG_REG(state, rd) asm_rv32_opcode_sub(state, rd, ASM_RV32_REG_ZERO, rd)
|
||||
#define ASM_NOT_REG(state, rd) asm_rv32_opcode_xori(state, rd, rd, -1)
|
||||
#define ASM_OR_REG_REG(state, rd, rs) asm_rv32_opcode_or(state, rd, rd, rs)
|
||||
#define ASM_STORE_REG_REG_OFFSET(state, rd, rs, offset) ASM_STORE32_REG_REG_OFFSET(state, rd, rs, offset)
|
||||
#define ASM_STORE16_REG_REG(state, rs1, rs2) asm_rv32_opcode_sh(state, rs1, rs2, 0)
|
||||
#define ASM_STORE32_REG_REG(state, rs1, rs2) ASM_STORE_REG_REG_OFFSET(state, rs1, rs2, 0)
|
||||
#define ASM_STORE32_REG_REG(state, rs1, rs2) ASM_STORE32_REG_REG_OFFSET(state, rs1, rs2, 0)
|
||||
#define ASM_STORE32_REG_REG_OFFSET(state, rd, rs, offset) asm_rv32_emit_store_reg_reg_offset(state, rd, rs, offset)
|
||||
#define ASM_STORE8_REG_REG(state, rs1, rs2) asm_rv32_opcode_sb(state, rs1, rs2, 0)
|
||||
#define ASM_STORE_REG_REG_OFFSET(state, rd, rs, offset) asm_rv32_emit_store_reg_reg_offset(state, rd, rs, offset)
|
||||
#define ASM_SUB_REG_REG(state, rd, rs) asm_rv32_opcode_sub(state, rd, rd, rs)
|
||||
#define ASM_XOR_REG_REG(state, rd, rs) asm_rv32_emit_optimised_xor(state, rd, rs)
|
||||
#define ASM_CLR_REG(state, rd)
|
||||
|
@@ -462,16 +462,18 @@ void asm_thumb_b_rel12(asm_thumb_t *as, int rel);
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_thumb_sub_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_MUL, (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_thumb_ldr_reg_reg_i12_optimised((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) ASM_LOAD32_REG_REG_OFFSET((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrb_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrh_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD16_REG_REG_OFFSET(as, reg_dest, reg_base, uint16_offset) asm_thumb_ldrh_reg_reg_i12_optimised((as), (reg_dest), (reg_base), (uint16_offset))
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD32_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_thumb_ldr_reg_reg_i12_optimised((as), (reg_dest), (reg_base), (word_offset))
|
||||
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), (word_offset))
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) ASM_STORE32_REG_REG_OFFSET((as), (reg_src), (reg_base), (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_thumb_strb_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_thumb_strh_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), (word_offset))
|
||||
|
||||
#define ASM_LOAD8_REG_REG_REG(as, reg_dest, reg_base, reg_index) asm_thumb_ldrb_rlo_rlo_rlo((as), (reg_dest), (reg_base), (reg_index))
|
||||
#define ASM_LOAD16_REG_REG_REG(as, reg_dest, reg_base, reg_index) \
|
||||
|
@@ -200,16 +200,18 @@ void asm_x86_call_ind(asm_x86_t *as, size_t fun_id, mp_uint_t n_args, int temp_r
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x86_sub_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x86_mul_r32_r32((as), (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x86_mov_mem32_to_r32((as), (reg_base), 4 * (word_offset), (reg_dest))
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) ASM_LOAD32_REG_REG_OFFSET((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem8_to_r32zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem16_to_r32zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD16_REG_REG_OFFSET(as, reg_dest, reg_base, uint16_offset) asm_x86_mov_mem16_to_r32zx((as), (reg_base), 2 * (uint16_offset), (reg_dest))
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem32_to_r32((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD32_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x86_mov_mem32_to_r32((as), (reg_base), 4 * (word_offset), (reg_dest))
|
||||
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 4 * (word_offset))
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) ASM_STORE32_REG_REG_OFFSET((as), (reg_src), (reg_base), (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x86_mov_r8_to_mem8((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x86_mov_r16_to_mem16((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 4 * (word_offset))
|
||||
|
||||
#endif // GENERIC_ASM_API
|
||||
|
||||
|
@@ -413,7 +413,7 @@ void asm_xtensa_l32r(asm_xtensa_t *as, mp_uint_t reg, mp_uint_t label);
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_xtensa_op_sub((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_xtensa_op_mull((as), (reg_dest), (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_xtensa_l32i_optimised((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) ASM_LOAD32_REG_REG_OFFSET((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_xtensa_op_l8ui((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_xtensa_op_l16ui((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD16_REG_REG_OFFSET(as, reg_dest, reg_base, uint16_offset) asm_xtensa_op_l16ui((as), (reg_dest), (reg_base), (uint16_offset))
|
||||
@@ -423,13 +423,14 @@ void asm_xtensa_l32r(asm_xtensa_t *as, mp_uint_t reg, mp_uint_t label);
|
||||
asm_xtensa_op_l16ui((as), (reg_dest), (reg_base), 0); \
|
||||
} while (0)
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_xtensa_op_l32i_n((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD32_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_xtensa_l32i_optimised((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_LOAD32_REG_REG_REG(as, reg_dest, reg_base, reg_index) \
|
||||
do { \
|
||||
asm_xtensa_op_addx4((as), (reg_base), (reg_index), (reg_base)); \
|
||||
asm_xtensa_op_l32i_n((as), (reg_dest), (reg_base), 0); \
|
||||
} while (0)
|
||||
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_xtensa_s32i_optimised((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) ASM_STORE32_REG_REG_OFFSET((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_xtensa_op_s8i((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_xtensa_op_s16i((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG_REG(as, reg_val, reg_base, reg_index) \
|
||||
@@ -438,6 +439,7 @@ void asm_xtensa_l32r(asm_xtensa_t *as, mp_uint_t reg, mp_uint_t label);
|
||||
asm_xtensa_op_s16i((as), (reg_val), (reg_base), 0); \
|
||||
} while (0)
|
||||
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_xtensa_op_s32i_n((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_xtensa_s32i_optimised((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_STORE32_REG_REG_REG(as, reg_val, reg_base, reg_index) \
|
||||
do { \
|
||||
asm_xtensa_op_addx4((as), (reg_base), (reg_index), (reg_base)); \
|
||||
|
@@ -1537,6 +1537,9 @@ static void emit_native_load_subscr(emit_t *emit) {
|
||||
switch (vtype_base) {
|
||||
case VTYPE_PTR8: {
|
||||
// pointer to 8-bit memory
|
||||
#ifdef ASM_LOAD8_REG_REG_OFFSET
|
||||
ASM_LOAD8_REG_REG_OFFSET(emit->as, REG_RET, reg_base, index_value);
|
||||
#else
|
||||
#if N_THUMB
|
||||
if (index_value >= 0 && index_value < 32) {
|
||||
asm_thumb_ldrb_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
|
||||
@@ -1561,10 +1564,14 @@ static void emit_native_load_subscr(emit_t *emit) {
|
||||
reg_base = reg_index;
|
||||
}
|
||||
ASM_LOAD8_REG_REG(emit->as, REG_RET, reg_base); // load from (base+index)
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
case VTYPE_PTR16: {
|
||||
// pointer to 16-bit memory
|
||||
#ifdef ASM_LOAD16_REG_REG_OFFSET
|
||||
ASM_LOAD16_REG_REG_OFFSET(emit->as, REG_RET, reg_base, index_value);
|
||||
#else
|
||||
#if N_THUMB
|
||||
if (index_value >= 0 && index_value < 32) {
|
||||
asm_thumb_ldrh_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
|
||||
@@ -1589,10 +1596,14 @@ static void emit_native_load_subscr(emit_t *emit) {
|
||||
reg_base = reg_index;
|
||||
}
|
||||
ASM_LOAD16_REG_REG(emit->as, REG_RET, reg_base); // load from (base+2*index)
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
case VTYPE_PTR32: {
|
||||
// pointer to 32-bit memory
|
||||
#ifdef ASM_LOAD32_REG_REG_OFFSET
|
||||
ASM_LOAD32_REG_REG_OFFSET(emit->as, REG_RET, reg_base, index_value);
|
||||
#else
|
||||
#if N_THUMB
|
||||
if (index_value >= 0 && index_value < 32) {
|
||||
asm_thumb_ldr_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
|
||||
@@ -1617,6 +1628,7 @@ static void emit_native_load_subscr(emit_t *emit) {
|
||||
reg_base = reg_index;
|
||||
}
|
||||
ASM_LOAD32_REG_REG(emit->as, REG_RET, reg_base); // load from (base+4*index)
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
default:
|
||||
@@ -1809,6 +1821,9 @@ static void emit_native_store_subscr(emit_t *emit) {
|
||||
switch (vtype_base) {
|
||||
case VTYPE_PTR8: {
|
||||
// pointer to 8-bit memory
|
||||
#ifdef ASM_STORE8_REG_REG_OFFSET
|
||||
ASM_STORE8_REG_REG_OFFSET(emit->as, reg_value, reg_base, index_value);
|
||||
#else
|
||||
// TODO optimise to use thumb strb r1, [r2, r3]
|
||||
#if N_THUMB
|
||||
if (index_value >= 0 && index_value < 32) {
|
||||
@@ -1837,10 +1852,14 @@ static void emit_native_store_subscr(emit_t *emit) {
|
||||
reg_base = reg_index;
|
||||
}
|
||||
ASM_STORE8_REG_REG(emit->as, reg_value, reg_base); // store value to (base+index)
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
case VTYPE_PTR16: {
|
||||
// pointer to 16-bit memory
|
||||
#ifdef ASM_STORE16_REG_REG_OFFSET
|
||||
ASM_STORE16_REG_REG_OFFSET(emit->as, reg_value, reg_base, index_value);
|
||||
#else
|
||||
#if N_THUMB
|
||||
if (index_value >= 0 && index_value < 32) {
|
||||
asm_thumb_strh_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
|
||||
@@ -1864,10 +1883,14 @@ static void emit_native_store_subscr(emit_t *emit) {
|
||||
reg_base = reg_index;
|
||||
}
|
||||
ASM_STORE16_REG_REG(emit->as, reg_value, reg_base); // store value to (base+2*index)
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
case VTYPE_PTR32: {
|
||||
// pointer to 32-bit memory
|
||||
#ifdef ASM_STORE32_REG_REG_OFFSET
|
||||
ASM_STORE32_REG_REG_OFFSET(emit->as, reg_value, reg_base, index_value);
|
||||
#else
|
||||
#if N_THUMB
|
||||
if (index_value >= 0 && index_value < 32) {
|
||||
asm_thumb_str_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
|
||||
@@ -1896,6 +1919,7 @@ static void emit_native_store_subscr(emit_t *emit) {
|
||||
reg_base = reg_index;
|
||||
}
|
||||
ASM_STORE32_REG_REG(emit->as, reg_value, reg_base); // store value to (base+4*index)
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
default:
|
||||
|
Reference in New Issue
Block a user