1 /* Copyright (C) 2001, 2009, 2010 Free Software Foundation, Inc.
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
19 /* This file is included in vm_engine.c */
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
35 RETURN (scm_from_bool (scm_is_false (x
)));
38 VM_DEFINE_FUNCTION (129, not_not
, "not-not", 1)
41 RETURN (scm_from_bool (!scm_is_false (x
)));
44 VM_DEFINE_FUNCTION (130, eq
, "eq?", 2)
47 RETURN (scm_from_bool (scm_is_eq (x
, y
)));
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
53 RETURN (scm_from_bool (!scm_is_eq (x
, y
)));
56 VM_DEFINE_FUNCTION (132, nullp
, "null?", 1)
59 RETURN (scm_from_bool (scm_is_null (x
)));
62 VM_DEFINE_FUNCTION (133, not_nullp
, "not-null?", 1)
65 RETURN (scm_from_bool (!scm_is_null (x
)));
68 VM_DEFINE_FUNCTION (134, eqv
, "eqv?", 2)
73 if (SCM_IMP (x
) || SCM_IMP (y
))
76 RETURN (scm_eqv_p (x
, y
));
79 VM_DEFINE_FUNCTION (135, equal
, "equal?", 2)
84 if (SCM_IMP (x
) || SCM_IMP (y
))
87 RETURN (scm_equal_p (x
, y
));
90 VM_DEFINE_FUNCTION (136, pairp
, "pair?", 1)
93 RETURN (scm_from_bool (scm_is_pair (x
)));
96 VM_DEFINE_FUNCTION (137, listp
, "list?", 1)
99 RETURN (scm_from_bool (scm_ilength (x
) >= 0));
102 VM_DEFINE_FUNCTION (138, symbolp
, "symbol?", 1)
105 RETURN (scm_from_bool (scm_is_symbol (x
)));
108 VM_DEFINE_FUNCTION (139, vectorp
, "vector?", 1)
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x
)));
119 VM_DEFINE_FUNCTION (140, cons
, "cons", 2)
126 #define VM_VALIDATE_CONS(x, proc) \
127 if (SCM_UNLIKELY (!scm_is_pair (x))) \
128 { func_name = proc; \
130 goto vm_error_not_a_pair; \
133 VM_DEFINE_FUNCTION (141, car
, "car", 1)
136 VM_VALIDATE_CONS (x
, "car");
137 RETURN (SCM_CAR (x
));
140 VM_DEFINE_FUNCTION (142, cdr
, "cdr", 1)
143 VM_VALIDATE_CONS (x
, "cdr");
144 RETURN (SCM_CDR (x
));
147 VM_DEFINE_INSTRUCTION (143, set_car
, "set-car!", 0, 2, 0)
152 VM_VALIDATE_CONS (x
, "set-car!");
157 VM_DEFINE_INSTRUCTION (144, set_cdr
, "set-cdr!", 0, 2, 0)
162 VM_VALIDATE_CONS (x
, "set-cdr!");
169 * Numeric relational tests
173 #define REL(crel,srel) \
176 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
177 RETURN (scm_from_bool (SCM_I_INUM (x) crel SCM_I_INUM (y))); \
179 RETURN (srel (x, y)); \
182 VM_DEFINE_FUNCTION (145, ee
, "ee?", 2)
184 REL (==, scm_num_eq_p
);
187 VM_DEFINE_FUNCTION (146, lt
, "lt?", 2)
192 VM_DEFINE_FUNCTION (147, le
, "le?", 2)
197 VM_DEFINE_FUNCTION (148, gt
, "gt?", 2)
202 VM_DEFINE_FUNCTION (149, ge
, "ge?", 2)
212 /* The maximum/minimum tagged integers. */
215 #define INUM_MAX (INTPTR_MAX - 1)
216 #define INUM_MIN (INTPTR_MIN + scm_tc2_int)
219 #define FUNC2(CFUNC,SFUNC) \
222 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
224 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
225 if (SCM_FIXABLE (n)) \
226 RETURN (SCM_I_MAKINUM (n)); \
229 RETURN (SFUNC (x, y)); \
232 /* Assembly tagged integer arithmetic routines. This code uses the
233 `asm goto' feature introduced in GCC 4.5. */
235 #if defined __x86_64__ && SCM_GNUC_PREREQ (4, 5)
237 /* The macros below check the CPU's overflow flag to improve fixnum
238 arithmetic. The %rcx register is explicitly clobbered because `asm
239 goto' can't have outputs, in which case the `r' constraint could be
240 used to let the register allocator choose a register.
242 TODO: Use `cold' label attribute in GCC 4.6.
243 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
245 # define ASM_ADD(x, y) \
247 asm volatile goto ("mov %1, %%rcx; " \
248 "test %[tag], %%cl; je %l[slow_add]; " \
249 "test %[tag], %0; je %l[slow_add]; " \
250 "add %0, %%rcx; jo %l[slow_add]; " \
251 "sub %[tag], %%rcx; " \
252 "mov %%rcx, (%[vsp])\n" \
254 : "r" (x), "r" (y), \
255 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
263 # define ASM_SUB(x, y) \
265 asm volatile goto ("mov %0, %%rcx; " \
266 "test %[tag], %%cl; je %l[slow_sub]; " \
267 "test %[tag], %1; je %l[slow_sub]; " \
268 "sub %1, %%rcx; jo %l[slow_sub]; " \
269 "add %[tag], %%rcx; " \
270 "mov %%rcx, (%[vsp])\n" \
272 : "r" (x), "r" (y), \
273 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
284 VM_DEFINE_FUNCTION (150, add
, "add", 2)
292 RETURN (scm_sum (x
, y
));
296 VM_DEFINE_FUNCTION (151, add1
, "add1", 1)
300 /* Check for overflow. */
301 if (SCM_LIKELY ((scm_t_intptr
) x
< INUM_MAX
))
305 /* Add the integers without untagging. */
306 result
= SCM_PACK ((scm_t_intptr
) x
307 + (scm_t_intptr
) SCM_I_MAKINUM (1)
310 if (SCM_LIKELY (SCM_I_INUMP (result
)))
315 RETURN (scm_sum (x
, SCM_I_MAKINUM (1)));
318 VM_DEFINE_FUNCTION (152, sub
, "sub", 2)
321 FUNC2 (-, scm_difference
);
326 RETURN (scm_difference (x
, y
));
330 VM_DEFINE_FUNCTION (153, sub1
, "sub1", 1)
334 /* Check for underflow. */
335 if (SCM_LIKELY ((scm_t_intptr
) x
> INUM_MIN
))
339 /* Substract the integers without untagging. */
340 result
= SCM_PACK ((scm_t_intptr
) x
341 - (scm_t_intptr
) SCM_I_MAKINUM (1)
344 if (SCM_LIKELY (SCM_I_INUMP (result
)))
349 RETURN (scm_difference (x
, SCM_I_MAKINUM (1)));
355 VM_DEFINE_FUNCTION (154, mul
, "mul", 2)
359 RETURN (scm_product (x
, y
));
362 VM_DEFINE_FUNCTION (155, div
, "div", 2)
366 RETURN (scm_divide (x
, y
));
369 VM_DEFINE_FUNCTION (156, quo
, "quo", 2)
373 RETURN (scm_quotient (x
, y
));
376 VM_DEFINE_FUNCTION (157, rem
, "rem", 2)
380 RETURN (scm_remainder (x
, y
));
383 VM_DEFINE_FUNCTION (158, mod
, "mod", 2)
387 RETURN (scm_modulo (x
, y
));
390 VM_DEFINE_FUNCTION (159, ash
, "ash", 2)
393 if (SCM_I_INUMP (x
) && SCM_I_INUMP (y
))
395 if (SCM_I_INUM (y
) < 0)
396 /* Right shift, will be a fixnum. */
397 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x
) >> -SCM_I_INUM (y
)));
399 /* Left shift. See comments in scm_ash. */
401 long nn
, bits_to_shift
;
404 bits_to_shift
= SCM_I_INUM (y
);
406 if (bits_to_shift
< SCM_I_FIXNUM_BIT
-1
408 (SCM_SRS (nn
, (SCM_I_FIXNUM_BIT
-1 - bits_to_shift
)) + 1)
410 RETURN (SCM_I_MAKINUM (nn
<< bits_to_shift
));
416 RETURN (scm_ash (x
, y
));
419 VM_DEFINE_FUNCTION (160, logand
, "logand", 2)
422 if (SCM_I_INUMP (x
) && SCM_I_INUMP (y
))
423 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x
) & SCM_I_INUM (y
)));
425 RETURN (scm_logand (x
, y
));
428 VM_DEFINE_FUNCTION (161, logior
, "logior", 2)
431 if (SCM_I_INUMP (x
) && SCM_I_INUMP (y
))
432 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x
) | SCM_I_INUM (y
)));
434 RETURN (scm_logior (x
, y
));
437 VM_DEFINE_FUNCTION (162, logxor
, "logxor", 2)
440 if (SCM_I_INUMP (x
) && SCM_I_INUMP (y
))
441 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x
) ^ SCM_I_INUM (y
)));
443 RETURN (scm_logxor (x
, y
));
451 VM_DEFINE_FUNCTION (163, vector_ref
, "vector-ref", 2)
455 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect
)
457 && ((i
= SCM_I_INUM (idx
)) >= 0)
458 && i
< SCM_I_VECTOR_LENGTH (vect
)))
459 RETURN (SCM_I_VECTOR_ELTS (vect
)[i
]);
463 RETURN (scm_vector_ref (vect
, idx
));
467 VM_DEFINE_INSTRUCTION (164, vector_set
, "vector-set", 0, 3, 0)
471 POP (val
); POP (idx
); POP (vect
);
472 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect
)
474 && ((i
= SCM_I_INUM (idx
)) >= 0)
475 && i
< SCM_I_VECTOR_LENGTH (vect
)))
476 SCM_I_VECTOR_WELTS (vect
)[i
] = val
;
480 scm_vector_set_x (vect
, idx
, val
);
485 VM_DEFINE_INSTRUCTION (165, make_array
, "make-array", 3, -1, 1)
491 len
= (len
<< 8) + FETCH ();
492 len
= (len
<< 8) + FETCH ();
495 PRE_CHECK_UNDERFLOW (len
);
496 ret
= scm_from_contiguous_array (shape
, sp
- len
+ 1, len
);
506 #define VM_VALIDATE_STRUCT(obj, proc) \
507 if (SCM_UNLIKELY (!SCM_STRUCTP (obj))) \
510 finish_args = (obj); \
511 goto vm_error_not_a_struct; \
514 VM_DEFINE_FUNCTION (166, struct_p
, "struct?", 1)
517 RETURN (scm_from_bool (SCM_STRUCTP (obj
)));
520 VM_DEFINE_FUNCTION (167, struct_vtable
, "struct-vtable", 1)
523 VM_VALIDATE_STRUCT (obj
, "struct_vtable");
524 RETURN (SCM_STRUCT_VTABLE (obj
));
527 VM_DEFINE_INSTRUCTION (168, make_struct
, "make-struct", 2, -1, 1)
529 unsigned h
= FETCH ();
530 unsigned l
= FETCH ();
531 scm_t_bits n
= ((h
<< 8U) + l
);
532 SCM vtable
= sp
[-(n
- 1)];
533 const SCM
*inits
= sp
- n
+ 2;
538 if (SCM_LIKELY (SCM_STRUCTP (vtable
)
539 && SCM_VTABLE_FLAG_IS_SET (vtable
, SCM_VTABLE_FLAG_SIMPLE
)
540 && (SCM_STRUCT_DATA_REF (vtable
, scm_vtable_index_size
) + 1
542 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable
)))
544 /* Verily, we are making a simple struct with the right number of
545 initializers, and no finalizer. */
546 ret
= scm_words ((scm_t_bits
)SCM_STRUCT_DATA (vtable
) | scm_tc3_struct
,
548 SCM_SET_CELL_WORD_1 (ret
, (scm_t_bits
)SCM_CELL_OBJECT_LOC (ret
, 2));
549 memcpy (SCM_STRUCT_DATA (ret
), inits
, (n
- 1) * sizeof (SCM
));
552 ret
= scm_c_make_structv (vtable
, 0, n
- 1, (scm_t_bits
*) inits
);
560 VM_DEFINE_FUNCTION (169, struct_ref
, "struct-ref", 2)
564 if (SCM_LIKELY (SCM_STRUCTP (obj
)
565 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj
,
566 SCM_VTABLE_FLAG_SIMPLE
)
567 && SCM_I_INUMP (pos
)))
570 scm_t_bits index
, len
;
572 index
= SCM_I_INUM (pos
);
573 vtable
= SCM_STRUCT_VTABLE (obj
);
574 len
= SCM_STRUCT_DATA_REF (vtable
, scm_vtable_index_size
);
576 if (SCM_LIKELY (index
< len
))
578 scm_t_bits
*data
= SCM_STRUCT_DATA (obj
);
579 RETURN (SCM_PACK (data
[index
]));
584 RETURN (scm_struct_ref (obj
, pos
));
587 VM_DEFINE_FUNCTION (170, struct_set
, "struct-set", 3)
589 ARGS3 (obj
, pos
, val
);
591 if (SCM_LIKELY (SCM_STRUCTP (obj
)
592 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj
,
593 SCM_VTABLE_FLAG_SIMPLE
)
594 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj
,
595 SCM_VTABLE_FLAG_SIMPLE_RW
)
596 && SCM_I_INUMP (pos
)))
599 scm_t_bits index
, len
;
601 index
= SCM_I_INUM (pos
);
602 vtable
= SCM_STRUCT_VTABLE (obj
);
603 len
= SCM_STRUCT_DATA_REF (vtable
, scm_vtable_index_size
);
604 if (SCM_LIKELY (index
< len
))
606 scm_t_bits
*data
= SCM_STRUCT_DATA (obj
);
607 data
[index
] = SCM_UNPACK (val
);
613 RETURN (scm_struct_set_x (obj
, pos
, val
));
620 VM_DEFINE_FUNCTION (171, class_of
, "class-of", 1)
623 if (SCM_INSTANCEP (obj
))
624 RETURN (SCM_CLASS_OF (obj
));
626 RETURN (scm_class_of (obj
));
629 VM_DEFINE_FUNCTION (172, slot_ref
, "slot-ref", 2)
632 ARGS2 (instance
, idx
);
633 slot
= SCM_I_INUM (idx
);
634 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance
) [slot
]));
637 VM_DEFINE_INSTRUCTION (173, slot_set
, "slot-set", 0, 3, 0)
639 SCM instance
, idx
, val
;
644 slot
= SCM_I_INUM (idx
);
645 SCM_STRUCT_DATA (instance
) [slot
] = SCM_UNPACK (val
);
653 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
656 if (SCM_UNLIKELY (!SCM_BYTEVECTOR_P (x))) \
660 goto vm_error_not_a_bytevector; \
665 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
669 if (scm_is_eq (endianness, scm_i_native_endianness)) \
670 goto VM_LABEL (bv_##stem##_native_ref); \
674 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
678 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
679 #define ALIGNED_P(ptr, type) \
680 ((scm_t_uintptr) (ptr) % alignof (type) == 0)
682 VM_DEFINE_FUNCTION (174, bv_u16_ref
, "bv-u16-ref", 3)
683 BV_REF_WITH_ENDIANNESS (u16
, u16
)
684 VM_DEFINE_FUNCTION (175, bv_s16_ref
, "bv-s16-ref", 3)
685 BV_REF_WITH_ENDIANNESS (s16
, s16
)
686 VM_DEFINE_FUNCTION (176, bv_u32_ref
, "bv-u32-ref", 3)
687 BV_REF_WITH_ENDIANNESS (u32
, u32
)
688 VM_DEFINE_FUNCTION (177, bv_s32_ref
, "bv-s32-ref", 3)
689 BV_REF_WITH_ENDIANNESS (s32
, s32
)
690 VM_DEFINE_FUNCTION (178, bv_u64_ref
, "bv-u64-ref", 3)
691 BV_REF_WITH_ENDIANNESS (u64
, u64
)
692 VM_DEFINE_FUNCTION (179, bv_s64_ref
, "bv-s64-ref", 3)
693 BV_REF_WITH_ENDIANNESS (s64
, s64
)
694 VM_DEFINE_FUNCTION (180, bv_f32_ref
, "bv-f32-ref", 3)
695 BV_REF_WITH_ENDIANNESS (f32
, ieee_single
)
696 VM_DEFINE_FUNCTION (181, bv_f64_ref
, "bv-f64-ref", 3)
697 BV_REF_WITH_ENDIANNESS (f64
, ieee_double
)
699 #undef BV_REF_WITH_ENDIANNESS
701 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
704 const scm_t_ ## type *int_ptr; \
707 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
708 i = SCM_I_INUM (idx); \
709 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
711 if (SCM_LIKELY (SCM_I_INUMP (idx) \
713 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
714 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
715 RETURN (SCM_I_MAKINUM (*int_ptr)); \
719 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
723 #define BV_INT_REF(stem, type, size) \
726 const scm_t_ ## type *int_ptr; \
729 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
730 i = SCM_I_INUM (idx); \
731 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
733 if (SCM_LIKELY (SCM_I_INUMP (idx) \
735 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
736 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
738 scm_t_ ## type x = *int_ptr; \
739 if (SCM_FIXABLE (x)) \
740 RETURN (SCM_I_MAKINUM (x)); \
744 RETURN (scm_from_ ## type (x)); \
750 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
754 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
757 const type *float_ptr; \
760 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
761 i = SCM_I_INUM (idx); \
762 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
765 if (SCM_LIKELY (SCM_I_INUMP (idx) \
767 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
768 && (ALIGNED_P (float_ptr, type)))) \
769 RETURN (scm_from_double (*float_ptr)); \
771 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
774 VM_DEFINE_FUNCTION (182, bv_u8_ref
, "bv-u8-ref", 2)
775 BV_FIXABLE_INT_REF (u8
, u8
, uint8
, 1)
776 VM_DEFINE_FUNCTION (183, bv_s8_ref
, "bv-s8-ref", 2)
777 BV_FIXABLE_INT_REF (s8
, s8
, int8
, 1)
778 VM_DEFINE_FUNCTION (184, bv_u16_native_ref
, "bv-u16-native-ref", 2)
779 BV_FIXABLE_INT_REF (u16
, u16_native
, uint16
, 2)
780 VM_DEFINE_FUNCTION (185, bv_s16_native_ref
, "bv-s16-native-ref", 2)
781 BV_FIXABLE_INT_REF (s16
, s16_native
, int16
, 2)
782 VM_DEFINE_FUNCTION (186, bv_u32_native_ref
, "bv-u32-native-ref", 2)
783 #if SIZEOF_VOID_P > 4
784 BV_FIXABLE_INT_REF (u32
, u32_native
, uint32
, 4)
786 BV_INT_REF (u32
, uint32
, 4)
788 VM_DEFINE_FUNCTION (187, bv_s32_native_ref
, "bv-s32-native-ref", 2)
789 #if SIZEOF_VOID_P > 4
790 BV_FIXABLE_INT_REF (s32
, s32_native
, int32
, 4)
792 BV_INT_REF (s32
, int32
, 4)
794 VM_DEFINE_FUNCTION (188, bv_u64_native_ref
, "bv-u64-native-ref", 2)
795 BV_INT_REF (u64
, uint64
, 8)
796 VM_DEFINE_FUNCTION (189, bv_s64_native_ref
, "bv-s64-native-ref", 2)
797 BV_INT_REF (s64
, int64
, 8)
798 VM_DEFINE_FUNCTION (190, bv_f32_native_ref
, "bv-f32-native-ref", 2)
799 BV_FLOAT_REF (f32
, ieee_single
, float, 4)
800 VM_DEFINE_FUNCTION (191, bv_f64_native_ref
, "bv-f64-native-ref", 2)
801 BV_FLOAT_REF (f64
, ieee_double
, double, 8)
803 #undef BV_FIXABLE_INT_REF
809 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
813 if (scm_is_eq (endianness, scm_i_native_endianness)) \
814 goto VM_LABEL (bv_##stem##_native_set); \
816 SCM bv, idx, val; POP (val); POP (idx); POP (bv); \
817 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
822 VM_DEFINE_INSTRUCTION (192, bv_u16_set
, "bv-u16-set", 0, 4, 0)
823 BV_SET_WITH_ENDIANNESS (u16
, u16
)
824 VM_DEFINE_INSTRUCTION (193, bv_s16_set
, "bv-s16-set", 0, 4, 0)
825 BV_SET_WITH_ENDIANNESS (s16
, s16
)
826 VM_DEFINE_INSTRUCTION (194, bv_u32_set
, "bv-u32-set", 0, 4, 0)
827 BV_SET_WITH_ENDIANNESS (u32
, u32
)
828 VM_DEFINE_INSTRUCTION (195, bv_s32_set
, "bv-s32-set", 0, 4, 0)
829 BV_SET_WITH_ENDIANNESS (s32
, s32
)
830 VM_DEFINE_INSTRUCTION (196, bv_u64_set
, "bv-u64-set", 0, 4, 0)
831 BV_SET_WITH_ENDIANNESS (u64
, u64
)
832 VM_DEFINE_INSTRUCTION (197, bv_s64_set
, "bv-s64-set", 0, 4, 0)
833 BV_SET_WITH_ENDIANNESS (s64
, s64
)
834 VM_DEFINE_INSTRUCTION (198, bv_f32_set
, "bv-f32-set", 0, 4, 0)
835 BV_SET_WITH_ENDIANNESS (f32
, ieee_single
)
836 VM_DEFINE_INSTRUCTION (199, bv_f64_set
, "bv-f64-set", 0, 4, 0)
837 BV_SET_WITH_ENDIANNESS (f64
, ieee_double
)
839 #undef BV_SET_WITH_ENDIANNESS
841 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
845 scm_t_ ## type *int_ptr; \
847 POP (val); POP (idx); POP (bv); \
848 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
849 i = SCM_I_INUM (idx); \
850 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
852 if (SCM_LIKELY (SCM_I_INUMP (idx) \
854 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
855 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
856 && (SCM_I_INUMP (val)) \
857 && ((j = SCM_I_INUM (val)) >= min) \
859 *int_ptr = (scm_t_ ## type) j; \
861 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
865 #define BV_INT_SET(stem, type, size) \
869 scm_t_ ## type *int_ptr; \
871 POP (val); POP (idx); POP (bv); \
872 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
873 i = SCM_I_INUM (idx); \
874 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
876 if (SCM_LIKELY (SCM_I_INUMP (idx) \
878 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
879 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
880 *int_ptr = scm_to_ ## type (val); \
882 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
886 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
892 POP (val); POP (idx); POP (bv); \
893 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
894 i = SCM_I_INUM (idx); \
895 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
897 if (SCM_LIKELY (SCM_I_INUMP (idx) \
899 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
900 && (ALIGNED_P (float_ptr, type)))) \
901 *float_ptr = scm_to_double (val); \
903 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
907 VM_DEFINE_INSTRUCTION (200, bv_u8_set
, "bv-u8-set", 0, 3, 0)
908 BV_FIXABLE_INT_SET (u8
, u8
, uint8
, 0, SCM_T_UINT8_MAX
, 1)
909 VM_DEFINE_INSTRUCTION (201, bv_s8_set
, "bv-s8-set", 0, 3, 0)
910 BV_FIXABLE_INT_SET (s8
, s8
, int8
, SCM_T_INT8_MIN
, SCM_T_INT8_MAX
, 1)
911 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set
, "bv-u16-native-set", 0, 3, 0)
912 BV_FIXABLE_INT_SET (u16
, u16_native
, uint16
, 0, SCM_T_UINT16_MAX
, 2)
913 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set
, "bv-s16-native-set", 0, 3, 0)
914 BV_FIXABLE_INT_SET (s16
, s16_native
, int16
, SCM_T_INT16_MIN
, SCM_T_INT16_MAX
, 2)
915 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set
, "bv-u32-native-set", 0, 3, 0)
916 #if SIZEOF_VOID_P > 4
917 BV_FIXABLE_INT_SET (u32
, u32_native
, uint32
, 0, SCM_T_UINT32_MAX
, 4)
919 BV_INT_SET (u32
, uint32
, 4)
921 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set
, "bv-s32-native-set", 0, 3, 0)
922 #if SIZEOF_VOID_P > 4
923 BV_FIXABLE_INT_SET (s32
, s32_native
, int32
, SCM_T_INT32_MIN
, SCM_T_INT32_MAX
, 4)
925 BV_INT_SET (s32
, int32
, 4)
927 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set
, "bv-u64-native-set", 0, 3, 0)
928 BV_INT_SET (u64
, uint64
, 8)
929 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set
, "bv-s64-native-set", 0, 3, 0)
930 BV_INT_SET (s64
, int64
, 8)
931 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set
, "bv-f32-native-set", 0, 3, 0)
932 BV_FLOAT_SET (f32
, ieee_single
, float, 4)
933 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set
, "bv-f64-native-set", 0, 3, 0)
934 BV_FLOAT_SET (f64
, ieee_double
, double, 8)
936 #undef BV_FIXABLE_INT_SET
941 (defun renumber-ops ()
942 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
945 (let ((counter 127)) (goto-char (point-min))
946 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
948 (number-to-string (setq counter (1+ counter)))