1 /* Copyright (C) 2001, 2009, 2010, 2011, 2012, 2013 Free Software Foundation, Inc.
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
19 /* This file is included in vm_engine.c */
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
35 RETURN (scm_from_bool (scm_is_false (x
)));
38 VM_DEFINE_FUNCTION (129, not_not
, "not-not", 1)
41 RETURN (scm_from_bool (!scm_is_false (x
)));
44 VM_DEFINE_FUNCTION (130, eq
, "eq?", 2)
47 RETURN (scm_from_bool (scm_is_eq (x
, y
)));
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
53 RETURN (scm_from_bool (!scm_is_eq (x
, y
)));
56 VM_DEFINE_FUNCTION (132, nullp
, "null?", 1)
59 RETURN (scm_from_bool (scm_is_null (x
)));
62 VM_DEFINE_FUNCTION (133, not_nullp
, "not-null?", 1)
65 RETURN (scm_from_bool (!scm_is_null (x
)));
68 VM_DEFINE_FUNCTION (134, eqv
, "eqv?", 2)
73 if (SCM_IMP (x
) || SCM_IMP (y
))
76 RETURN (scm_eqv_p (x
, y
));
79 VM_DEFINE_FUNCTION (135, equal
, "equal?", 2)
84 if (SCM_IMP (x
) || SCM_IMP (y
))
87 RETURN (scm_equal_p (x
, y
));
90 VM_DEFINE_FUNCTION (136, pairp
, "pair?", 1)
93 RETURN (scm_from_bool (scm_is_pair (x
)));
96 VM_DEFINE_FUNCTION (137, listp
, "list?", 1)
99 RETURN (scm_from_bool (scm_ilength (x
) >= 0));
102 VM_DEFINE_FUNCTION (138, symbolp
, "symbol?", 1)
105 RETURN (scm_from_bool (scm_is_symbol (x
)));
108 VM_DEFINE_FUNCTION (139, vectorp
, "vector?", 1)
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x
)));
119 VM_DEFINE_FUNCTION (140, cons
, "cons", 2)
126 #define VM_VALIDATE_CONS(x, proc) \
127 VM_ASSERT (scm_is_pair (x), vm_error_not_a_pair (proc, x))
129 VM_DEFINE_FUNCTION (141, car
, "car", 1)
132 VM_VALIDATE_CONS (x
, "car");
133 RETURN (SCM_CAR (x
));
136 VM_DEFINE_FUNCTION (142, cdr
, "cdr", 1)
139 VM_VALIDATE_CONS (x
, "cdr");
140 RETURN (SCM_CDR (x
));
143 VM_DEFINE_INSTRUCTION (143, set_car
, "set-car!", 0, 2, 0)
147 VM_VALIDATE_CONS (x
, "set-car!");
152 VM_DEFINE_INSTRUCTION (144, set_cdr
, "set-cdr!", 0, 2, 0)
156 VM_VALIDATE_CONS (x
, "set-cdr!");
163 * Numeric relational tests
167 #define REL(crel,srel) \
170 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
171 RETURN (scm_from_bool (((scm_t_signed_bits) SCM_UNPACK (x)) \
172 crel ((scm_t_signed_bits) SCM_UNPACK (y)))); \
174 RETURN (srel (x, y)); \
177 VM_DEFINE_FUNCTION (145, ee
, "ee?", 2)
179 REL (==, scm_num_eq_p
);
182 VM_DEFINE_FUNCTION (146, lt
, "lt?", 2)
187 VM_DEFINE_FUNCTION (147, le
, "le?", 2)
192 VM_DEFINE_FUNCTION (148, gt
, "gt?", 2)
197 VM_DEFINE_FUNCTION (149, ge
, "ge?", 2)
207 /* The maximum/minimum tagged integers. */
212 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_POSITIVE_FIXNUM)))
214 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_NEGATIVE_FIXNUM)))
216 ((scm_t_signed_bits) SCM_UNPACK (SCM_INUM1) \
217 - (scm_t_signed_bits) SCM_UNPACK (SCM_INUM0))
220 #define FUNC2(CFUNC,SFUNC) \
223 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
225 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
226 if (SCM_FIXABLE (n)) \
227 RETURN (SCM_I_MAKINUM (n)); \
230 RETURN (SFUNC (x, y)); \
233 /* Assembly tagged integer arithmetic routines. This code uses the
234 `asm goto' feature introduced in GCC 4.5. */
236 #if defined __x86_64__ && SCM_GNUC_PREREQ (4, 5)
238 /* The macros below check the CPU's overflow flag to improve fixnum
239 arithmetic. The %rcx register is explicitly clobbered because `asm
240 goto' can't have outputs, in which case the `r' constraint could be
241 used to let the register allocator choose a register.
243 TODO: Use `cold' label attribute in GCC 4.6.
244 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
246 # define ASM_ADD(x, y) \
248 asm volatile goto ("mov %1, %%rcx; " \
249 "test %[tag], %%cl; je %l[slow_add]; " \
250 "test %[tag], %0; je %l[slow_add]; " \
251 "sub %[tag], %%rcx; " \
252 "add %0, %%rcx; jo %l[slow_add]; " \
253 "mov %%rcx, (%[vsp])\n" \
255 : "r" (x), "r" (y), \
256 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
257 : "rcx", "memory", "cc" \
264 # define ASM_SUB(x, y) \
266 asm volatile goto ("mov %0, %%rcx; " \
267 "test %[tag], %%cl; je %l[slow_sub]; " \
268 "test %[tag], %1; je %l[slow_sub]; " \
269 "sub %1, %%rcx; jo %l[slow_sub]; " \
270 "add %[tag], %%rcx; " \
271 "mov %%rcx, (%[vsp])\n" \
273 : "r" (x), "r" (y), \
274 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
275 : "rcx", "memory", "cc" \
285 VM_DEFINE_FUNCTION (150, add
, "add", 2)
293 RETURN (scm_sum (x
, y
));
297 VM_DEFINE_FUNCTION (151, add1
, "add1", 1)
301 /* Check for overflow. We must avoid overflow in the signed
302 addition below, even if X is not an inum. */
303 if (SCM_LIKELY ((scm_t_signed_bits
) SCM_UNPACK (x
) <= INUM_MAX
- INUM_STEP
))
307 /* Add 1 to the integer without untagging. */
308 result
= SCM_PACK ((scm_t_signed_bits
) SCM_UNPACK (x
) + INUM_STEP
);
310 if (SCM_LIKELY (SCM_I_INUMP (result
)))
315 RETURN (scm_sum (x
, SCM_I_MAKINUM (1)));
318 VM_DEFINE_FUNCTION (152, sub
, "sub", 2)
321 FUNC2 (-, scm_difference
);
326 RETURN (scm_difference (x
, y
));
330 VM_DEFINE_FUNCTION (153, sub1
, "sub1", 1)
334 /* Check for overflow. We must avoid overflow in the signed
335 subtraction below, even if X is not an inum. */
336 if (SCM_LIKELY ((scm_t_signed_bits
) SCM_UNPACK (x
) >= INUM_MIN
+ INUM_STEP
))
340 /* Substract 1 from the integer without untagging. */
341 result
= SCM_PACK ((scm_t_signed_bits
) SCM_UNPACK (x
) - INUM_STEP
);
343 if (SCM_LIKELY (SCM_I_INUMP (result
)))
348 RETURN (scm_difference (x
, SCM_I_MAKINUM (1)));
354 VM_DEFINE_FUNCTION (154, mul
, "mul", 2)
358 RETURN (scm_product (x
, y
));
361 VM_DEFINE_FUNCTION (155, div
, "div", 2)
365 RETURN (scm_divide (x
, y
));
368 VM_DEFINE_FUNCTION (156, quo
, "quo", 2)
372 RETURN (scm_quotient (x
, y
));
375 VM_DEFINE_FUNCTION (157, rem
, "rem", 2)
379 RETURN (scm_remainder (x
, y
));
382 VM_DEFINE_FUNCTION (158, mod
, "mod", 2)
386 RETURN (scm_modulo (x
, y
));
389 VM_DEFINE_FUNCTION (159, ash
, "ash", 2)
392 if (SCM_I_INUMP (x
) && SCM_I_INUMP (y
))
394 if (SCM_I_INUM (y
) < 0)
396 /* Right shift, will be a fixnum. */
397 if (SCM_I_INUM (y
) > -SCM_I_FIXNUM_BIT
)
398 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x
) >> -SCM_I_INUM (y
)));
402 /* Left shift. See comments in scm_ash. */
404 scm_t_signed_bits nn
, bits_to_shift
;
407 bits_to_shift
= SCM_I_INUM (y
);
409 if (bits_to_shift
< SCM_I_FIXNUM_BIT
-1
411 (SCM_SRS (nn
, (SCM_I_FIXNUM_BIT
-1 - bits_to_shift
)) + 1)
413 RETURN (SCM_I_MAKINUM (nn
<< bits_to_shift
));
419 RETURN (scm_ash (x
, y
));
422 VM_DEFINE_FUNCTION (160, logand
, "logand", 2)
425 if (SCM_I_INUMP (x
) && SCM_I_INUMP (y
))
426 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x
) & SCM_I_INUM (y
)));
428 RETURN (scm_logand (x
, y
));
431 VM_DEFINE_FUNCTION (161, logior
, "logior", 2)
434 if (SCM_I_INUMP (x
) && SCM_I_INUMP (y
))
435 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x
) | SCM_I_INUM (y
)));
437 RETURN (scm_logior (x
, y
));
440 VM_DEFINE_FUNCTION (162, logxor
, "logxor", 2)
443 if (SCM_I_INUMP (x
) && SCM_I_INUMP (y
))
444 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x
) ^ SCM_I_INUM (y
)));
446 RETURN (scm_logxor (x
, y
));
454 VM_DEFINE_FUNCTION (163, vector_ref
, "vector-ref", 2)
456 scm_t_signed_bits i
= 0;
458 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect
)
460 && ((i
= SCM_I_INUM (idx
)) >= 0)
461 && i
< SCM_I_VECTOR_LENGTH (vect
)))
462 RETURN (SCM_I_VECTOR_ELTS (vect
)[i
]);
466 RETURN (scm_vector_ref (vect
, idx
));
470 VM_DEFINE_INSTRUCTION (164, vector_set
, "vector-set", 0, 3, 0)
472 scm_t_signed_bits i
= 0;
474 POP3 (val
, idx
, vect
);
475 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect
)
477 && ((i
= SCM_I_INUM (idx
)) >= 0)
478 && i
< SCM_I_VECTOR_LENGTH (vect
)))
479 SCM_I_VECTOR_WELTS (vect
)[i
] = val
;
483 scm_vector_set_x (vect
, idx
, val
);
488 VM_DEFINE_INSTRUCTION (165, make_array
, "make-array", 3, -1, 1)
494 len
= (len
<< 8) + FETCH ();
495 len
= (len
<< 8) + FETCH ();
498 PRE_CHECK_UNDERFLOW (len
);
499 ret
= scm_from_contiguous_array (shape
, sp
- len
+ 1, len
);
509 #define VM_VALIDATE_STRUCT(obj, proc) \
510 VM_ASSERT (SCM_STRUCTP (obj), vm_error_not_a_struct (proc, obj))
512 VM_DEFINE_FUNCTION (166, struct_p
, "struct?", 1)
515 RETURN (scm_from_bool (SCM_STRUCTP (obj
)));
518 VM_DEFINE_FUNCTION (167, struct_vtable
, "struct-vtable", 1)
521 VM_VALIDATE_STRUCT (obj
, "struct_vtable");
522 RETURN (SCM_STRUCT_VTABLE (obj
));
525 VM_DEFINE_INSTRUCTION (168, make_struct
, "make-struct", 2, -1, 1)
527 unsigned h
= FETCH ();
528 unsigned l
= FETCH ();
529 scm_t_bits n
= ((h
<< 8U) + l
);
530 SCM vtable
= sp
[-(n
- 1)];
531 const SCM
*inits
= sp
- n
+ 2;
536 if (SCM_LIKELY (SCM_STRUCTP (vtable
)
537 && SCM_VTABLE_FLAG_IS_SET (vtable
, SCM_VTABLE_FLAG_SIMPLE
)
538 && (SCM_STRUCT_DATA_REF (vtable
, scm_vtable_index_size
) + 1
540 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable
)))
542 /* Verily, we are making a simple struct with the right number of
543 initializers, and no finalizer. */
544 ret
= scm_words ((scm_t_bits
)SCM_STRUCT_DATA (vtable
) | scm_tc3_struct
,
546 SCM_SET_CELL_WORD_1 (ret
, (scm_t_bits
)SCM_CELL_OBJECT_LOC (ret
, 2));
547 memcpy (SCM_STRUCT_DATA (ret
), inits
, (n
- 1) * sizeof (SCM
));
550 ret
= scm_c_make_structv (vtable
, 0, n
- 1, (scm_t_bits
*) inits
);
558 VM_DEFINE_FUNCTION (169, struct_ref
, "struct-ref", 2)
562 if (SCM_LIKELY (SCM_STRUCTP (obj
)
563 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj
,
564 SCM_VTABLE_FLAG_SIMPLE
)
565 && SCM_I_INUMP (pos
)))
568 scm_t_bits index
, len
;
570 /* True, an inum is a signed value, but cast to unsigned it will
571 certainly be more than the length, so we will fall through if
572 index is negative. */
573 index
= SCM_I_INUM (pos
);
574 vtable
= SCM_STRUCT_VTABLE (obj
);
575 len
= SCM_STRUCT_DATA_REF (vtable
, scm_vtable_index_size
);
577 if (SCM_LIKELY (index
< len
))
579 scm_t_bits
*data
= SCM_STRUCT_DATA (obj
);
580 RETURN (SCM_PACK (data
[index
]));
585 RETURN (scm_struct_ref (obj
, pos
));
588 VM_DEFINE_FUNCTION (170, struct_set
, "struct-set", 3)
590 ARGS3 (obj
, pos
, val
);
592 if (SCM_LIKELY (SCM_STRUCTP (obj
)
593 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj
,
594 SCM_VTABLE_FLAG_SIMPLE
)
595 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj
,
596 SCM_VTABLE_FLAG_SIMPLE_RW
)
597 && SCM_I_INUMP (pos
)))
600 scm_t_bits index
, len
;
602 /* See above regarding index being >= 0. */
603 index
= SCM_I_INUM (pos
);
604 vtable
= SCM_STRUCT_VTABLE (obj
);
605 len
= SCM_STRUCT_DATA_REF (vtable
, scm_vtable_index_size
);
606 if (SCM_LIKELY (index
< len
))
608 scm_t_bits
*data
= SCM_STRUCT_DATA (obj
);
609 data
[index
] = SCM_UNPACK (val
);
615 RETURN (scm_struct_set_x (obj
, pos
, val
));
622 VM_DEFINE_FUNCTION (171, class_of
, "class-of", 1)
625 if (SCM_INSTANCEP (obj
))
626 RETURN (SCM_CLASS_OF (obj
));
628 RETURN (scm_class_of (obj
));
631 /* FIXME: No checking whatsoever. */
632 VM_DEFINE_FUNCTION (172, slot_ref
, "slot-ref", 2)
635 ARGS2 (instance
, idx
);
636 slot
= SCM_I_INUM (idx
);
637 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance
) [slot
]));
640 /* FIXME: No checking whatsoever. */
641 VM_DEFINE_INSTRUCTION (173, slot_set
, "slot-set", 0, 3, 0)
643 SCM instance
, idx
, val
;
645 POP3 (val
, idx
, instance
);
646 slot
= SCM_I_INUM (idx
);
647 SCM_STRUCT_DATA (instance
) [slot
] = SCM_UNPACK (val
);
655 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
656 VM_ASSERT (SCM_BYTEVECTOR_P (x), vm_error_not_a_bytevector (proc, x))
658 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
662 if (scm_is_eq (endianness, scm_i_native_endianness)) \
663 goto VM_LABEL (bv_##stem##_native_ref); \
667 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
671 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
672 #define ALIGNED_P(ptr, type) \
673 ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
675 VM_DEFINE_FUNCTION (174, bv_u16_ref
, "bv-u16-ref", 3)
676 BV_REF_WITH_ENDIANNESS (u16
, u16
)
677 VM_DEFINE_FUNCTION (175, bv_s16_ref
, "bv-s16-ref", 3)
678 BV_REF_WITH_ENDIANNESS (s16
, s16
)
679 VM_DEFINE_FUNCTION (176, bv_u32_ref
, "bv-u32-ref", 3)
680 BV_REF_WITH_ENDIANNESS (u32
, u32
)
681 VM_DEFINE_FUNCTION (177, bv_s32_ref
, "bv-s32-ref", 3)
682 BV_REF_WITH_ENDIANNESS (s32
, s32
)
683 VM_DEFINE_FUNCTION (178, bv_u64_ref
, "bv-u64-ref", 3)
684 BV_REF_WITH_ENDIANNESS (u64
, u64
)
685 VM_DEFINE_FUNCTION (179, bv_s64_ref
, "bv-s64-ref", 3)
686 BV_REF_WITH_ENDIANNESS (s64
, s64
)
687 VM_DEFINE_FUNCTION (180, bv_f32_ref
, "bv-f32-ref", 3)
688 BV_REF_WITH_ENDIANNESS (f32
, ieee_single
)
689 VM_DEFINE_FUNCTION (181, bv_f64_ref
, "bv-f64-ref", 3)
690 BV_REF_WITH_ENDIANNESS (f64
, ieee_double
)
692 #undef BV_REF_WITH_ENDIANNESS
694 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
696 scm_t_signed_bits i; \
697 const scm_t_ ## type *int_ptr; \
700 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
701 i = SCM_I_INUM (idx); \
702 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
704 if (SCM_LIKELY (SCM_I_INUMP (idx) \
706 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
707 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
708 RETURN (SCM_I_MAKINUM (*int_ptr)); \
712 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
716 #define BV_INT_REF(stem, type, size) \
718 scm_t_signed_bits i; \
719 const scm_t_ ## type *int_ptr; \
722 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
723 i = SCM_I_INUM (idx); \
724 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
726 if (SCM_LIKELY (SCM_I_INUMP (idx) \
728 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
729 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
731 scm_t_ ## type x = *int_ptr; \
732 if (SCM_FIXABLE (x)) \
733 RETURN (SCM_I_MAKINUM (x)); \
737 RETURN (scm_from_ ## type (x)); \
743 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
747 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
749 scm_t_signed_bits i; \
750 const type *float_ptr; \
753 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
754 i = SCM_I_INUM (idx); \
755 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
758 if (SCM_LIKELY (SCM_I_INUMP (idx) \
760 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
761 && (ALIGNED_P (float_ptr, type)))) \
762 RETURN (scm_from_double (*float_ptr)); \
764 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
767 VM_DEFINE_FUNCTION (182, bv_u8_ref
, "bv-u8-ref", 2)
768 BV_FIXABLE_INT_REF (u8
, u8
, uint8
, 1)
769 VM_DEFINE_FUNCTION (183, bv_s8_ref
, "bv-s8-ref", 2)
770 BV_FIXABLE_INT_REF (s8
, s8
, int8
, 1)
771 VM_DEFINE_FUNCTION (184, bv_u16_native_ref
, "bv-u16-native-ref", 2)
772 BV_FIXABLE_INT_REF (u16
, u16_native
, uint16
, 2)
773 VM_DEFINE_FUNCTION (185, bv_s16_native_ref
, "bv-s16-native-ref", 2)
774 BV_FIXABLE_INT_REF (s16
, s16_native
, int16
, 2)
775 VM_DEFINE_FUNCTION (186, bv_u32_native_ref
, "bv-u32-native-ref", 2)
776 #if SIZEOF_VOID_P > 4
777 BV_FIXABLE_INT_REF (u32
, u32_native
, uint32
, 4)
779 BV_INT_REF (u32
, uint32
, 4)
781 VM_DEFINE_FUNCTION (187, bv_s32_native_ref
, "bv-s32-native-ref", 2)
782 #if SIZEOF_VOID_P > 4
783 BV_FIXABLE_INT_REF (s32
, s32_native
, int32
, 4)
785 BV_INT_REF (s32
, int32
, 4)
787 VM_DEFINE_FUNCTION (188, bv_u64_native_ref
, "bv-u64-native-ref", 2)
788 BV_INT_REF (u64
, uint64
, 8)
789 VM_DEFINE_FUNCTION (189, bv_s64_native_ref
, "bv-s64-native-ref", 2)
790 BV_INT_REF (s64
, int64
, 8)
791 VM_DEFINE_FUNCTION (190, bv_f32_native_ref
, "bv-f32-native-ref", 2)
792 BV_FLOAT_REF (f32
, ieee_single
, float, 4)
793 VM_DEFINE_FUNCTION (191, bv_f64_native_ref
, "bv-f64-native-ref", 2)
794 BV_FLOAT_REF (f64
, ieee_double
, double, 8)
796 #undef BV_FIXABLE_INT_REF
802 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
806 if (scm_is_eq (endianness, scm_i_native_endianness)) \
807 goto VM_LABEL (bv_##stem##_native_set); \
809 SCM bv, idx, val; POP3 (val, idx, bv); \
811 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
816 VM_DEFINE_INSTRUCTION (192, bv_u16_set
, "bv-u16-set", 0, 4, 0)
817 BV_SET_WITH_ENDIANNESS (u16
, u16
)
818 VM_DEFINE_INSTRUCTION (193, bv_s16_set
, "bv-s16-set", 0, 4, 0)
819 BV_SET_WITH_ENDIANNESS (s16
, s16
)
820 VM_DEFINE_INSTRUCTION (194, bv_u32_set
, "bv-u32-set", 0, 4, 0)
821 BV_SET_WITH_ENDIANNESS (u32
, u32
)
822 VM_DEFINE_INSTRUCTION (195, bv_s32_set
, "bv-s32-set", 0, 4, 0)
823 BV_SET_WITH_ENDIANNESS (s32
, s32
)
824 VM_DEFINE_INSTRUCTION (196, bv_u64_set
, "bv-u64-set", 0, 4, 0)
825 BV_SET_WITH_ENDIANNESS (u64
, u64
)
826 VM_DEFINE_INSTRUCTION (197, bv_s64_set
, "bv-s64-set", 0, 4, 0)
827 BV_SET_WITH_ENDIANNESS (s64
, s64
)
828 VM_DEFINE_INSTRUCTION (198, bv_f32_set
, "bv-f32-set", 0, 4, 0)
829 BV_SET_WITH_ENDIANNESS (f32
, ieee_single
)
830 VM_DEFINE_INSTRUCTION (199, bv_f64_set
, "bv-f64-set", 0, 4, 0)
831 BV_SET_WITH_ENDIANNESS (f64
, ieee_double
)
833 #undef BV_SET_WITH_ENDIANNESS
835 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
837 scm_t_signed_bits i, j = 0; \
839 scm_t_ ## type *int_ptr; \
841 POP3 (val, idx, bv); \
842 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
843 i = SCM_I_INUM (idx); \
844 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
846 if (SCM_LIKELY (SCM_I_INUMP (idx) \
848 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
849 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
850 && (SCM_I_INUMP (val)) \
851 && ((j = SCM_I_INUM (val)) >= min) \
853 *int_ptr = (scm_t_ ## type) j; \
857 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
862 #define BV_INT_SET(stem, type, size) \
864 scm_t_signed_bits i = 0; \
866 scm_t_ ## type *int_ptr; \
868 POP3 (val, idx, bv); \
869 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
870 i = SCM_I_INUM (idx); \
871 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
873 if (SCM_LIKELY (SCM_I_INUMP (idx) \
875 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
876 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
877 *int_ptr = scm_to_ ## type (val); \
881 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
886 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
888 scm_t_signed_bits i = 0; \
892 POP3 (val, idx, bv); \
893 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
894 i = SCM_I_INUM (idx); \
895 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
897 if (SCM_LIKELY (SCM_I_INUMP (idx) \
899 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
900 && (ALIGNED_P (float_ptr, type)))) \
901 *float_ptr = scm_to_double (val); \
905 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
910 VM_DEFINE_INSTRUCTION (200, bv_u8_set
, "bv-u8-set", 0, 3, 0)
911 BV_FIXABLE_INT_SET (u8
, u8
, uint8
, 0, SCM_T_UINT8_MAX
, 1)
912 VM_DEFINE_INSTRUCTION (201, bv_s8_set
, "bv-s8-set", 0, 3, 0)
913 BV_FIXABLE_INT_SET (s8
, s8
, int8
, SCM_T_INT8_MIN
, SCM_T_INT8_MAX
, 1)
914 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set
, "bv-u16-native-set", 0, 3, 0)
915 BV_FIXABLE_INT_SET (u16
, u16_native
, uint16
, 0, SCM_T_UINT16_MAX
, 2)
916 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set
, "bv-s16-native-set", 0, 3, 0)
917 BV_FIXABLE_INT_SET (s16
, s16_native
, int16
, SCM_T_INT16_MIN
, SCM_T_INT16_MAX
, 2)
918 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set
, "bv-u32-native-set", 0, 3, 0)
919 #if SIZEOF_VOID_P > 4
920 BV_FIXABLE_INT_SET (u32
, u32_native
, uint32
, 0, SCM_T_UINT32_MAX
, 4)
922 BV_INT_SET (u32
, uint32
, 4)
924 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set
, "bv-s32-native-set", 0, 3, 0)
925 #if SIZEOF_VOID_P > 4
926 BV_FIXABLE_INT_SET (s32
, s32_native
, int32
, SCM_T_INT32_MIN
, SCM_T_INT32_MAX
, 4)
928 BV_INT_SET (s32
, int32
, 4)
930 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set
, "bv-u64-native-set", 0, 3, 0)
931 BV_INT_SET (u64
, uint64
, 8)
932 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set
, "bv-s64-native-set", 0, 3, 0)
933 BV_INT_SET (s64
, int64
, 8)
934 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set
, "bv-f32-native-set", 0, 3, 0)
935 BV_FLOAT_SET (f32
, ieee_single
, float, 4)
936 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set
, "bv-f64-native-set", 0, 3, 0)
937 BV_FLOAT_SET (f64
, ieee_double
, double, 8)
939 #undef BV_FIXABLE_INT_SET
944 (defun renumber-ops ()
945 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
948 (let ((counter 127)) (goto-char (point-min))
949 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
951 (number-to-string (setq counter (1+ counter)))