Use `alignof_type' instead of `alignof'.
[bpt/guile.git] / libguile / vm-i-scheme.c
1 /* Copyright (C) 2001, 2009, 2010, 2011 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm_engine.c */
20
21 \f
22 /*
23 * Predicates
24 */
25
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
29
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
31
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
33 {
34 ARGS1 (x);
35 RETURN (scm_from_bool (scm_is_false (x)));
36 }
37
38 VM_DEFINE_FUNCTION (129, not_not, "not-not", 1)
39 {
40 ARGS1 (x);
41 RETURN (scm_from_bool (!scm_is_false (x)));
42 }
43
44 VM_DEFINE_FUNCTION (130, eq, "eq?", 2)
45 {
46 ARGS2 (x, y);
47 RETURN (scm_from_bool (scm_is_eq (x, y)));
48 }
49
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
51 {
52 ARGS2 (x, y);
53 RETURN (scm_from_bool (!scm_is_eq (x, y)));
54 }
55
56 VM_DEFINE_FUNCTION (132, nullp, "null?", 1)
57 {
58 ARGS1 (x);
59 RETURN (scm_from_bool (scm_is_null (x)));
60 }
61
62 VM_DEFINE_FUNCTION (133, not_nullp, "not-null?", 1)
63 {
64 ARGS1 (x);
65 RETURN (scm_from_bool (!scm_is_null (x)));
66 }
67
68 VM_DEFINE_FUNCTION (134, eqv, "eqv?", 2)
69 {
70 ARGS2 (x, y);
71 if (scm_is_eq (x, y))
72 RETURN (SCM_BOOL_T);
73 if (SCM_IMP (x) || SCM_IMP (y))
74 RETURN (SCM_BOOL_F);
75 SYNC_REGISTER ();
76 RETURN (scm_eqv_p (x, y));
77 }
78
79 VM_DEFINE_FUNCTION (135, equal, "equal?", 2)
80 {
81 ARGS2 (x, y);
82 if (scm_is_eq (x, y))
83 RETURN (SCM_BOOL_T);
84 if (SCM_IMP (x) || SCM_IMP (y))
85 RETURN (SCM_BOOL_F);
86 SYNC_REGISTER ();
87 RETURN (scm_equal_p (x, y));
88 }
89
90 VM_DEFINE_FUNCTION (136, pairp, "pair?", 1)
91 {
92 ARGS1 (x);
93 RETURN (scm_from_bool (scm_is_pair (x)));
94 }
95
96 VM_DEFINE_FUNCTION (137, listp, "list?", 1)
97 {
98 ARGS1 (x);
99 RETURN (scm_from_bool (scm_ilength (x) >= 0));
100 }
101
102 VM_DEFINE_FUNCTION (138, symbolp, "symbol?", 1)
103 {
104 ARGS1 (x);
105 RETURN (scm_from_bool (scm_is_symbol (x)));
106 }
107
108 VM_DEFINE_FUNCTION (139, vectorp, "vector?", 1)
109 {
110 ARGS1 (x);
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x)));
112 }
113
114 \f
115 /*
116 * Basic data
117 */
118
119 VM_DEFINE_FUNCTION (140, cons, "cons", 2)
120 {
121 ARGS2 (x, y);
122 CONS (x, x, y);
123 RETURN (x);
124 }
125
126 #define VM_VALIDATE_CONS(x, proc) \
127 if (SCM_UNLIKELY (!scm_is_pair (x))) \
128 { func_name = proc; \
129 finish_args = x; \
130 goto vm_error_not_a_pair; \
131 }
132
133 VM_DEFINE_FUNCTION (141, car, "car", 1)
134 {
135 ARGS1 (x);
136 VM_VALIDATE_CONS (x, "car");
137 RETURN (SCM_CAR (x));
138 }
139
140 VM_DEFINE_FUNCTION (142, cdr, "cdr", 1)
141 {
142 ARGS1 (x);
143 VM_VALIDATE_CONS (x, "cdr");
144 RETURN (SCM_CDR (x));
145 }
146
147 VM_DEFINE_INSTRUCTION (143, set_car, "set-car!", 0, 2, 0)
148 {
149 SCM x, y;
150 POP2 (y, x);
151 VM_VALIDATE_CONS (x, "set-car!");
152 SCM_SETCAR (x, y);
153 NEXT;
154 }
155
156 VM_DEFINE_INSTRUCTION (144, set_cdr, "set-cdr!", 0, 2, 0)
157 {
158 SCM x, y;
159 POP2 (y, x);
160 VM_VALIDATE_CONS (x, "set-cdr!");
161 SCM_SETCDR (x, y);
162 NEXT;
163 }
164
165 \f
166 /*
167 * Numeric relational tests
168 */
169
170 #undef REL
171 #define REL(crel,srel) \
172 { \
173 ARGS2 (x, y); \
174 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
175 RETURN (scm_from_bool (((scm_t_signed_bits) SCM_UNPACK (x)) \
176 crel ((scm_t_signed_bits) SCM_UNPACK (y)))); \
177 SYNC_REGISTER (); \
178 RETURN (srel (x, y)); \
179 }
180
181 VM_DEFINE_FUNCTION (145, ee, "ee?", 2)
182 {
183 REL (==, scm_num_eq_p);
184 }
185
186 VM_DEFINE_FUNCTION (146, lt, "lt?", 2)
187 {
188 REL (<, scm_less_p);
189 }
190
191 VM_DEFINE_FUNCTION (147, le, "le?", 2)
192 {
193 REL (<=, scm_leq_p);
194 }
195
196 VM_DEFINE_FUNCTION (148, gt, "gt?", 2)
197 {
198 REL (>, scm_gr_p);
199 }
200
201 VM_DEFINE_FUNCTION (149, ge, "ge?", 2)
202 {
203 REL (>=, scm_geq_p);
204 }
205
206 \f
207 /*
208 * Numeric functions
209 */
210
211 /* The maximum/minimum tagged integers. */
212 #undef INUM_MAX
213 #undef INUM_MIN
214 #define INUM_MAX (INTPTR_MAX - 1)
215 #define INUM_MIN (INTPTR_MIN + scm_tc2_int)
216
217 #undef FUNC2
218 #define FUNC2(CFUNC,SFUNC) \
219 { \
220 ARGS2 (x, y); \
221 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
222 { \
223 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
224 if (SCM_FIXABLE (n)) \
225 RETURN (SCM_I_MAKINUM (n)); \
226 } \
227 SYNC_REGISTER (); \
228 RETURN (SFUNC (x, y)); \
229 }
230
231 /* Assembly tagged integer arithmetic routines. This code uses the
232 `asm goto' feature introduced in GCC 4.5. */
233
234 #if defined __x86_64__ && SCM_GNUC_PREREQ (4, 5)
235
236 /* The macros below check the CPU's overflow flag to improve fixnum
237 arithmetic. The %rcx register is explicitly clobbered because `asm
238 goto' can't have outputs, in which case the `r' constraint could be
239 used to let the register allocator choose a register.
240
241 TODO: Use `cold' label attribute in GCC 4.6.
242 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
243
244 # define ASM_ADD(x, y) \
245 { \
246 asm volatile goto ("mov %1, %%rcx; " \
247 "test %[tag], %%cl; je %l[slow_add]; " \
248 "test %[tag], %0; je %l[slow_add]; " \
249 "add %0, %%rcx; jo %l[slow_add]; " \
250 "sub %[tag], %%rcx; " \
251 "mov %%rcx, (%[vsp])\n" \
252 : /* no outputs */ \
253 : "r" (x), "r" (y), \
254 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
255 : "rcx", "memory" \
256 : slow_add); \
257 NEXT; \
258 } \
259 slow_add: \
260 do { } while (0)
261
262 # define ASM_SUB(x, y) \
263 { \
264 asm volatile goto ("mov %0, %%rcx; " \
265 "test %[tag], %%cl; je %l[slow_sub]; " \
266 "test %[tag], %1; je %l[slow_sub]; " \
267 "sub %1, %%rcx; jo %l[slow_sub]; " \
268 "add %[tag], %%rcx; " \
269 "mov %%rcx, (%[vsp])\n" \
270 : /* no outputs */ \
271 : "r" (x), "r" (y), \
272 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
273 : "rcx", "memory" \
274 : slow_sub); \
275 NEXT; \
276 } \
277 slow_sub: \
278 do { } while (0)
279
280 #endif
281
282
283 VM_DEFINE_FUNCTION (150, add, "add", 2)
284 {
285 #ifndef ASM_ADD
286 FUNC2 (+, scm_sum);
287 #else
288 ARGS2 (x, y);
289 ASM_ADD (x, y);
290 SYNC_REGISTER ();
291 RETURN (scm_sum (x, y));
292 #endif
293 }
294
295 VM_DEFINE_FUNCTION (151, add1, "add1", 1)
296 {
297 ARGS1 (x);
298
299 /* Check for overflow. */
300 if (SCM_LIKELY ((scm_t_intptr) SCM_UNPACK (x) < INUM_MAX))
301 {
302 SCM result;
303
304 /* Add the integers without untagging. */
305 result = SCM_PACK ((scm_t_intptr) SCM_UNPACK (x)
306 + (scm_t_intptr) SCM_UNPACK (SCM_I_MAKINUM (1))
307 - scm_tc2_int);
308
309 if (SCM_LIKELY (SCM_I_INUMP (result)))
310 RETURN (result);
311 }
312
313 SYNC_REGISTER ();
314 RETURN (scm_sum (x, SCM_I_MAKINUM (1)));
315 }
316
317 VM_DEFINE_FUNCTION (152, sub, "sub", 2)
318 {
319 #ifndef ASM_SUB
320 FUNC2 (-, scm_difference);
321 #else
322 ARGS2 (x, y);
323 ASM_SUB (x, y);
324 SYNC_REGISTER ();
325 RETURN (scm_difference (x, y));
326 #endif
327 }
328
329 VM_DEFINE_FUNCTION (153, sub1, "sub1", 1)
330 {
331 ARGS1 (x);
332
333 /* Check for underflow. */
334 if (SCM_LIKELY ((scm_t_intptr) SCM_UNPACK (x) > INUM_MIN))
335 {
336 SCM result;
337
338 /* Substract the integers without untagging. */
339 result = SCM_PACK ((scm_t_intptr) SCM_UNPACK (x)
340 - (scm_t_intptr) SCM_UNPACK (SCM_I_MAKINUM (1))
341 + scm_tc2_int);
342
343 if (SCM_LIKELY (SCM_I_INUMP (result)))
344 RETURN (result);
345 }
346
347 SYNC_REGISTER ();
348 RETURN (scm_difference (x, SCM_I_MAKINUM (1)));
349 }
350
351 # undef ASM_ADD
352 # undef ASM_SUB
353
354 VM_DEFINE_FUNCTION (154, mul, "mul", 2)
355 {
356 ARGS2 (x, y);
357 SYNC_REGISTER ();
358 RETURN (scm_product (x, y));
359 }
360
361 VM_DEFINE_FUNCTION (155, div, "div", 2)
362 {
363 ARGS2 (x, y);
364 SYNC_REGISTER ();
365 RETURN (scm_divide (x, y));
366 }
367
368 VM_DEFINE_FUNCTION (156, quo, "quo", 2)
369 {
370 ARGS2 (x, y);
371 SYNC_REGISTER ();
372 RETURN (scm_quotient (x, y));
373 }
374
375 VM_DEFINE_FUNCTION (157, rem, "rem", 2)
376 {
377 ARGS2 (x, y);
378 SYNC_REGISTER ();
379 RETURN (scm_remainder (x, y));
380 }
381
382 VM_DEFINE_FUNCTION (158, mod, "mod", 2)
383 {
384 ARGS2 (x, y);
385 SYNC_REGISTER ();
386 RETURN (scm_modulo (x, y));
387 }
388
389 VM_DEFINE_FUNCTION (159, ash, "ash", 2)
390 {
391 ARGS2 (x, y);
392 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
393 {
394 if (SCM_I_INUM (y) < 0)
395 /* Right shift, will be a fixnum. */
396 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) >> -SCM_I_INUM (y)));
397 else
398 /* Left shift. See comments in scm_ash. */
399 {
400 scm_t_signed_bits nn, bits_to_shift;
401
402 nn = SCM_I_INUM (x);
403 bits_to_shift = SCM_I_INUM (y);
404
405 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
406 && ((scm_t_bits)
407 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
408 <= 1))
409 RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
410 /* fall through */
411 }
412 /* fall through */
413 }
414 SYNC_REGISTER ();
415 RETURN (scm_ash (x, y));
416 }
417
418 VM_DEFINE_FUNCTION (160, logand, "logand", 2)
419 {
420 ARGS2 (x, y);
421 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
422 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) & SCM_I_INUM (y)));
423 SYNC_REGISTER ();
424 RETURN (scm_logand (x, y));
425 }
426
427 VM_DEFINE_FUNCTION (161, logior, "logior", 2)
428 {
429 ARGS2 (x, y);
430 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
431 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) | SCM_I_INUM (y)));
432 SYNC_REGISTER ();
433 RETURN (scm_logior (x, y));
434 }
435
436 VM_DEFINE_FUNCTION (162, logxor, "logxor", 2)
437 {
438 ARGS2 (x, y);
439 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
440 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
441 SYNC_REGISTER ();
442 RETURN (scm_logxor (x, y));
443 }
444
445 \f
446 /*
447 * Vectors and arrays
448 */
449
450 VM_DEFINE_FUNCTION (163, vector_ref, "vector-ref", 2)
451 {
452 scm_t_signed_bits i = 0;
453 ARGS2 (vect, idx);
454 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
455 && SCM_I_INUMP (idx)
456 && ((i = SCM_I_INUM (idx)) >= 0)
457 && i < SCM_I_VECTOR_LENGTH (vect)))
458 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
459 else
460 {
461 SYNC_REGISTER ();
462 RETURN (scm_vector_ref (vect, idx));
463 }
464 }
465
466 VM_DEFINE_INSTRUCTION (164, vector_set, "vector-set", 0, 3, 0)
467 {
468 scm_t_signed_bits i = 0;
469 SCM vect, idx, val;
470 POP3 (val, idx, vect);
471 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
472 && SCM_I_INUMP (idx)
473 && ((i = SCM_I_INUM (idx)) >= 0)
474 && i < SCM_I_VECTOR_LENGTH (vect)))
475 SCM_I_VECTOR_WELTS (vect)[i] = val;
476 else
477 {
478 SYNC_REGISTER ();
479 scm_vector_set_x (vect, idx, val);
480 }
481 NEXT;
482 }
483
484 VM_DEFINE_INSTRUCTION (165, make_array, "make-array", 3, -1, 1)
485 {
486 scm_t_uint32 len;
487 SCM shape, ret;
488
489 len = FETCH ();
490 len = (len << 8) + FETCH ();
491 len = (len << 8) + FETCH ();
492 POP (shape);
493 SYNC_REGISTER ();
494 PRE_CHECK_UNDERFLOW (len);
495 ret = scm_from_contiguous_array (shape, sp - len + 1, len);
496 DROPN (len);
497 PUSH (ret);
498 NEXT;
499 }
500
501 \f
502 /*
503 * Structs
504 */
505 #define VM_VALIDATE_STRUCT(obj, proc) \
506 if (SCM_UNLIKELY (!SCM_STRUCTP (obj))) \
507 { \
508 func_name = proc; \
509 finish_args = (obj); \
510 goto vm_error_not_a_struct; \
511 }
512
513 VM_DEFINE_FUNCTION (166, struct_p, "struct?", 1)
514 {
515 ARGS1 (obj);
516 RETURN (scm_from_bool (SCM_STRUCTP (obj)));
517 }
518
519 VM_DEFINE_FUNCTION (167, struct_vtable, "struct-vtable", 1)
520 {
521 ARGS1 (obj);
522 VM_VALIDATE_STRUCT (obj, "struct_vtable");
523 RETURN (SCM_STRUCT_VTABLE (obj));
524 }
525
526 VM_DEFINE_INSTRUCTION (168, make_struct, "make-struct", 2, -1, 1)
527 {
528 unsigned h = FETCH ();
529 unsigned l = FETCH ();
530 scm_t_bits n = ((h << 8U) + l);
531 SCM vtable = sp[-(n - 1)];
532 const SCM *inits = sp - n + 2;
533 SCM ret;
534
535 SYNC_REGISTER ();
536
537 if (SCM_LIKELY (SCM_STRUCTP (vtable)
538 && SCM_VTABLE_FLAG_IS_SET (vtable, SCM_VTABLE_FLAG_SIMPLE)
539 && (SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size) + 1
540 == n)
541 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable)))
542 {
543 /* Verily, we are making a simple struct with the right number of
544 initializers, and no finalizer. */
545 ret = scm_words ((scm_t_bits)SCM_STRUCT_DATA (vtable) | scm_tc3_struct,
546 n + 1);
547 SCM_SET_CELL_WORD_1 (ret, (scm_t_bits)SCM_CELL_OBJECT_LOC (ret, 2));
548 memcpy (SCM_STRUCT_DATA (ret), inits, (n - 1) * sizeof (SCM));
549 }
550 else
551 ret = scm_c_make_structv (vtable, 0, n - 1, (scm_t_bits *) inits);
552
553 DROPN (n);
554 PUSH (ret);
555
556 NEXT;
557 }
558
559 VM_DEFINE_FUNCTION (169, struct_ref, "struct-ref", 2)
560 {
561 ARGS2 (obj, pos);
562
563 if (SCM_LIKELY (SCM_STRUCTP (obj)
564 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
565 SCM_VTABLE_FLAG_SIMPLE)
566 && SCM_I_INUMP (pos)))
567 {
568 SCM vtable;
569 scm_t_bits index, len;
570
571 /* True, an inum is a signed value, but cast to unsigned it will
572 certainly be more than the length, so we will fall through if
573 index is negative. */
574 index = SCM_I_INUM (pos);
575 vtable = SCM_STRUCT_VTABLE (obj);
576 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
577
578 if (SCM_LIKELY (index < len))
579 {
580 scm_t_bits *data = SCM_STRUCT_DATA (obj);
581 RETURN (SCM_PACK (data[index]));
582 }
583 }
584
585 SYNC_REGISTER ();
586 RETURN (scm_struct_ref (obj, pos));
587 }
588
589 VM_DEFINE_FUNCTION (170, struct_set, "struct-set", 3)
590 {
591 ARGS3 (obj, pos, val);
592
593 if (SCM_LIKELY (SCM_STRUCTP (obj)
594 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
595 SCM_VTABLE_FLAG_SIMPLE)
596 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
597 SCM_VTABLE_FLAG_SIMPLE_RW)
598 && SCM_I_INUMP (pos)))
599 {
600 SCM vtable;
601 scm_t_bits index, len;
602
603 /* See above regarding index being >= 0. */
604 index = SCM_I_INUM (pos);
605 vtable = SCM_STRUCT_VTABLE (obj);
606 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
607 if (SCM_LIKELY (index < len))
608 {
609 scm_t_bits *data = SCM_STRUCT_DATA (obj);
610 data[index] = SCM_UNPACK (val);
611 RETURN (val);
612 }
613 }
614
615 SYNC_REGISTER ();
616 RETURN (scm_struct_set_x (obj, pos, val));
617 }
618
619 \f
620 /*
621 * GOOPS support
622 */
623 VM_DEFINE_FUNCTION (171, class_of, "class-of", 1)
624 {
625 ARGS1 (obj);
626 if (SCM_INSTANCEP (obj))
627 RETURN (SCM_CLASS_OF (obj));
628 SYNC_REGISTER ();
629 RETURN (scm_class_of (obj));
630 }
631
632 /* FIXME: No checking whatsoever. */
633 VM_DEFINE_FUNCTION (172, slot_ref, "slot-ref", 2)
634 {
635 size_t slot;
636 ARGS2 (instance, idx);
637 slot = SCM_I_INUM (idx);
638 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance) [slot]));
639 }
640
641 /* FIXME: No checking whatsoever. */
642 VM_DEFINE_INSTRUCTION (173, slot_set, "slot-set", 0, 3, 0)
643 {
644 SCM instance, idx, val;
645 size_t slot;
646 POP3 (val, idx, instance);
647 slot = SCM_I_INUM (idx);
648 SCM_STRUCT_DATA (instance) [slot] = SCM_UNPACK (val);
649 NEXT;
650 }
651
652 \f
653 /*
654 * Bytevectors
655 */
656 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
657 do \
658 { \
659 if (SCM_UNLIKELY (!SCM_BYTEVECTOR_P (x))) \
660 { \
661 func_name = proc; \
662 finish_args = x; \
663 goto vm_error_not_a_bytevector; \
664 } \
665 } \
666 while (0)
667
668 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
669 { \
670 SCM endianness; \
671 POP (endianness); \
672 if (scm_is_eq (endianness, scm_i_native_endianness)) \
673 goto VM_LABEL (bv_##stem##_native_ref); \
674 { \
675 ARGS2 (bv, idx); \
676 SYNC_REGISTER (); \
677 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
678 } \
679 }
680
681 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
682 #define ALIGNED_P(ptr, type) \
683 ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
684
685 VM_DEFINE_FUNCTION (174, bv_u16_ref, "bv-u16-ref", 3)
686 BV_REF_WITH_ENDIANNESS (u16, u16)
687 VM_DEFINE_FUNCTION (175, bv_s16_ref, "bv-s16-ref", 3)
688 BV_REF_WITH_ENDIANNESS (s16, s16)
689 VM_DEFINE_FUNCTION (176, bv_u32_ref, "bv-u32-ref", 3)
690 BV_REF_WITH_ENDIANNESS (u32, u32)
691 VM_DEFINE_FUNCTION (177, bv_s32_ref, "bv-s32-ref", 3)
692 BV_REF_WITH_ENDIANNESS (s32, s32)
693 VM_DEFINE_FUNCTION (178, bv_u64_ref, "bv-u64-ref", 3)
694 BV_REF_WITH_ENDIANNESS (u64, u64)
695 VM_DEFINE_FUNCTION (179, bv_s64_ref, "bv-s64-ref", 3)
696 BV_REF_WITH_ENDIANNESS (s64, s64)
697 VM_DEFINE_FUNCTION (180, bv_f32_ref, "bv-f32-ref", 3)
698 BV_REF_WITH_ENDIANNESS (f32, ieee_single)
699 VM_DEFINE_FUNCTION (181, bv_f64_ref, "bv-f64-ref", 3)
700 BV_REF_WITH_ENDIANNESS (f64, ieee_double)
701
702 #undef BV_REF_WITH_ENDIANNESS
703
704 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
705 { \
706 scm_t_signed_bits i; \
707 const scm_t_ ## type *int_ptr; \
708 ARGS2 (bv, idx); \
709 \
710 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
711 i = SCM_I_INUM (idx); \
712 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
713 \
714 if (SCM_LIKELY (SCM_I_INUMP (idx) \
715 && (i >= 0) \
716 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
717 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
718 RETURN (SCM_I_MAKINUM (*int_ptr)); \
719 else \
720 { \
721 SYNC_REGISTER (); \
722 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
723 } \
724 }
725
726 #define BV_INT_REF(stem, type, size) \
727 { \
728 scm_t_signed_bits i; \
729 const scm_t_ ## type *int_ptr; \
730 ARGS2 (bv, idx); \
731 \
732 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
733 i = SCM_I_INUM (idx); \
734 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
735 \
736 if (SCM_LIKELY (SCM_I_INUMP (idx) \
737 && (i >= 0) \
738 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
739 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
740 { \
741 scm_t_ ## type x = *int_ptr; \
742 if (SCM_FIXABLE (x)) \
743 RETURN (SCM_I_MAKINUM (x)); \
744 else \
745 { \
746 SYNC_REGISTER (); \
747 RETURN (scm_from_ ## type (x)); \
748 } \
749 } \
750 else \
751 { \
752 SYNC_REGISTER (); \
753 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
754 } \
755 }
756
757 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
758 { \
759 scm_t_signed_bits i; \
760 const type *float_ptr; \
761 ARGS2 (bv, idx); \
762 \
763 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
764 i = SCM_I_INUM (idx); \
765 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
766 \
767 SYNC_REGISTER (); \
768 if (SCM_LIKELY (SCM_I_INUMP (idx) \
769 && (i >= 0) \
770 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
771 && (ALIGNED_P (float_ptr, type)))) \
772 RETURN (scm_from_double (*float_ptr)); \
773 else \
774 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
775 }
776
777 VM_DEFINE_FUNCTION (182, bv_u8_ref, "bv-u8-ref", 2)
778 BV_FIXABLE_INT_REF (u8, u8, uint8, 1)
779 VM_DEFINE_FUNCTION (183, bv_s8_ref, "bv-s8-ref", 2)
780 BV_FIXABLE_INT_REF (s8, s8, int8, 1)
781 VM_DEFINE_FUNCTION (184, bv_u16_native_ref, "bv-u16-native-ref", 2)
782 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2)
783 VM_DEFINE_FUNCTION (185, bv_s16_native_ref, "bv-s16-native-ref", 2)
784 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2)
785 VM_DEFINE_FUNCTION (186, bv_u32_native_ref, "bv-u32-native-ref", 2)
786 #if SIZEOF_VOID_P > 4
787 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4)
788 #else
789 BV_INT_REF (u32, uint32, 4)
790 #endif
791 VM_DEFINE_FUNCTION (187, bv_s32_native_ref, "bv-s32-native-ref", 2)
792 #if SIZEOF_VOID_P > 4
793 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4)
794 #else
795 BV_INT_REF (s32, int32, 4)
796 #endif
797 VM_DEFINE_FUNCTION (188, bv_u64_native_ref, "bv-u64-native-ref", 2)
798 BV_INT_REF (u64, uint64, 8)
799 VM_DEFINE_FUNCTION (189, bv_s64_native_ref, "bv-s64-native-ref", 2)
800 BV_INT_REF (s64, int64, 8)
801 VM_DEFINE_FUNCTION (190, bv_f32_native_ref, "bv-f32-native-ref", 2)
802 BV_FLOAT_REF (f32, ieee_single, float, 4)
803 VM_DEFINE_FUNCTION (191, bv_f64_native_ref, "bv-f64-native-ref", 2)
804 BV_FLOAT_REF (f64, ieee_double, double, 8)
805
806 #undef BV_FIXABLE_INT_REF
807 #undef BV_INT_REF
808 #undef BV_FLOAT_REF
809
810
811
812 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
813 { \
814 SCM endianness; \
815 POP (endianness); \
816 if (scm_is_eq (endianness, scm_i_native_endianness)) \
817 goto VM_LABEL (bv_##stem##_native_set); \
818 { \
819 SCM bv, idx, val; POP3 (val, idx, bv); \
820 SYNC_REGISTER (); \
821 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
822 NEXT; \
823 } \
824 }
825
826 VM_DEFINE_INSTRUCTION (192, bv_u16_set, "bv-u16-set", 0, 4, 0)
827 BV_SET_WITH_ENDIANNESS (u16, u16)
828 VM_DEFINE_INSTRUCTION (193, bv_s16_set, "bv-s16-set", 0, 4, 0)
829 BV_SET_WITH_ENDIANNESS (s16, s16)
830 VM_DEFINE_INSTRUCTION (194, bv_u32_set, "bv-u32-set", 0, 4, 0)
831 BV_SET_WITH_ENDIANNESS (u32, u32)
832 VM_DEFINE_INSTRUCTION (195, bv_s32_set, "bv-s32-set", 0, 4, 0)
833 BV_SET_WITH_ENDIANNESS (s32, s32)
834 VM_DEFINE_INSTRUCTION (196, bv_u64_set, "bv-u64-set", 0, 4, 0)
835 BV_SET_WITH_ENDIANNESS (u64, u64)
836 VM_DEFINE_INSTRUCTION (197, bv_s64_set, "bv-s64-set", 0, 4, 0)
837 BV_SET_WITH_ENDIANNESS (s64, s64)
838 VM_DEFINE_INSTRUCTION (198, bv_f32_set, "bv-f32-set", 0, 4, 0)
839 BV_SET_WITH_ENDIANNESS (f32, ieee_single)
840 VM_DEFINE_INSTRUCTION (199, bv_f64_set, "bv-f64-set", 0, 4, 0)
841 BV_SET_WITH_ENDIANNESS (f64, ieee_double)
842
843 #undef BV_SET_WITH_ENDIANNESS
844
845 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
846 { \
847 scm_t_signed_bits i, j = 0; \
848 SCM bv, idx, val; \
849 scm_t_ ## type *int_ptr; \
850 \
851 POP3 (val, idx, bv); \
852 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
853 i = SCM_I_INUM (idx); \
854 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
855 \
856 if (SCM_LIKELY (SCM_I_INUMP (idx) \
857 && (i >= 0) \
858 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
859 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
860 && (SCM_I_INUMP (val)) \
861 && ((j = SCM_I_INUM (val)) >= min) \
862 && (j <= max))) \
863 *int_ptr = (scm_t_ ## type) j; \
864 else \
865 { \
866 SYNC_REGISTER (); \
867 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
868 } \
869 NEXT; \
870 }
871
872 #define BV_INT_SET(stem, type, size) \
873 { \
874 scm_t_signed_bits i = 0; \
875 SCM bv, idx, val; \
876 scm_t_ ## type *int_ptr; \
877 \
878 POP3 (val, idx, bv); \
879 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
880 i = SCM_I_INUM (idx); \
881 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
882 \
883 if (SCM_LIKELY (SCM_I_INUMP (idx) \
884 && (i >= 0) \
885 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
886 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
887 *int_ptr = scm_to_ ## type (val); \
888 else \
889 { \
890 SYNC_REGISTER (); \
891 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
892 } \
893 NEXT; \
894 }
895
896 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
897 { \
898 scm_t_signed_bits i = 0; \
899 SCM bv, idx, val; \
900 type *float_ptr; \
901 \
902 POP3 (val, idx, bv); \
903 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
904 i = SCM_I_INUM (idx); \
905 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
906 \
907 if (SCM_LIKELY (SCM_I_INUMP (idx) \
908 && (i >= 0) \
909 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
910 && (ALIGNED_P (float_ptr, type)))) \
911 *float_ptr = scm_to_double (val); \
912 else \
913 { \
914 SYNC_REGISTER (); \
915 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
916 } \
917 NEXT; \
918 }
919
920 VM_DEFINE_INSTRUCTION (200, bv_u8_set, "bv-u8-set", 0, 3, 0)
921 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1)
922 VM_DEFINE_INSTRUCTION (201, bv_s8_set, "bv-s8-set", 0, 3, 0)
923 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1)
924 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set, "bv-u16-native-set", 0, 3, 0)
925 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2)
926 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set, "bv-s16-native-set", 0, 3, 0)
927 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2)
928 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set, "bv-u32-native-set", 0, 3, 0)
929 #if SIZEOF_VOID_P > 4
930 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4)
931 #else
932 BV_INT_SET (u32, uint32, 4)
933 #endif
934 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set, "bv-s32-native-set", 0, 3, 0)
935 #if SIZEOF_VOID_P > 4
936 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4)
937 #else
938 BV_INT_SET (s32, int32, 4)
939 #endif
940 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set, "bv-u64-native-set", 0, 3, 0)
941 BV_INT_SET (u64, uint64, 8)
942 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set, "bv-s64-native-set", 0, 3, 0)
943 BV_INT_SET (s64, int64, 8)
944 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set, "bv-f32-native-set", 0, 3, 0)
945 BV_FLOAT_SET (f32, ieee_single, float, 4)
946 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set, "bv-f64-native-set", 0, 3, 0)
947 BV_FLOAT_SET (f64, ieee_double, double, 8)
948
949 #undef BV_FIXABLE_INT_SET
950 #undef BV_INT_SET
951 #undef BV_FLOAT_SET
952
953 /*
954 (defun renumber-ops ()
955 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
956 (interactive "")
957 (save-excursion
958 (let ((counter 127)) (goto-char (point-min))
959 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
960 (replace-match
961 (number-to-string (setq counter (1+ counter)))
962 t t nil 1)))))
963 */
964
965 /*
966 Local Variables:
967 c-file-style: "gnu"
968 End:
969 */