VM: Redefine INUM_MIN and INUM_MAX without assumptions.
[bpt/guile.git] / libguile / vm-i-scheme.c
1 /* Copyright (C) 2001, 2009, 2010, 2011, 2012, 2013 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm_engine.c */
20
21 \f
22 /*
23 * Predicates
24 */
25
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
29
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
31
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
33 {
34 ARGS1 (x);
35 RETURN (scm_from_bool (scm_is_false (x)));
36 }
37
38 VM_DEFINE_FUNCTION (129, not_not, "not-not", 1)
39 {
40 ARGS1 (x);
41 RETURN (scm_from_bool (!scm_is_false (x)));
42 }
43
44 VM_DEFINE_FUNCTION (130, eq, "eq?", 2)
45 {
46 ARGS2 (x, y);
47 RETURN (scm_from_bool (scm_is_eq (x, y)));
48 }
49
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
51 {
52 ARGS2 (x, y);
53 RETURN (scm_from_bool (!scm_is_eq (x, y)));
54 }
55
56 VM_DEFINE_FUNCTION (132, nullp, "null?", 1)
57 {
58 ARGS1 (x);
59 RETURN (scm_from_bool (scm_is_null (x)));
60 }
61
62 VM_DEFINE_FUNCTION (133, not_nullp, "not-null?", 1)
63 {
64 ARGS1 (x);
65 RETURN (scm_from_bool (!scm_is_null (x)));
66 }
67
68 VM_DEFINE_FUNCTION (134, eqv, "eqv?", 2)
69 {
70 ARGS2 (x, y);
71 if (scm_is_eq (x, y))
72 RETURN (SCM_BOOL_T);
73 if (SCM_IMP (x) || SCM_IMP (y))
74 RETURN (SCM_BOOL_F);
75 SYNC_REGISTER ();
76 RETURN (scm_eqv_p (x, y));
77 }
78
79 VM_DEFINE_FUNCTION (135, equal, "equal?", 2)
80 {
81 ARGS2 (x, y);
82 if (scm_is_eq (x, y))
83 RETURN (SCM_BOOL_T);
84 if (SCM_IMP (x) || SCM_IMP (y))
85 RETURN (SCM_BOOL_F);
86 SYNC_REGISTER ();
87 RETURN (scm_equal_p (x, y));
88 }
89
90 VM_DEFINE_FUNCTION (136, pairp, "pair?", 1)
91 {
92 ARGS1 (x);
93 RETURN (scm_from_bool (scm_is_pair (x)));
94 }
95
96 VM_DEFINE_FUNCTION (137, listp, "list?", 1)
97 {
98 ARGS1 (x);
99 RETURN (scm_from_bool (scm_ilength (x) >= 0));
100 }
101
102 VM_DEFINE_FUNCTION (138, symbolp, "symbol?", 1)
103 {
104 ARGS1 (x);
105 RETURN (scm_from_bool (scm_is_symbol (x)));
106 }
107
108 VM_DEFINE_FUNCTION (139, vectorp, "vector?", 1)
109 {
110 ARGS1 (x);
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x)));
112 }
113
114 \f
115 /*
116 * Basic data
117 */
118
119 VM_DEFINE_FUNCTION (140, cons, "cons", 2)
120 {
121 ARGS2 (x, y);
122 CONS (x, x, y);
123 RETURN (x);
124 }
125
126 #define VM_VALIDATE_CONS(x, proc) \
127 VM_ASSERT (scm_is_pair (x), vm_error_not_a_pair (proc, x))
128
129 VM_DEFINE_FUNCTION (141, car, "car", 1)
130 {
131 ARGS1 (x);
132 VM_VALIDATE_CONS (x, "car");
133 RETURN (SCM_CAR (x));
134 }
135
136 VM_DEFINE_FUNCTION (142, cdr, "cdr", 1)
137 {
138 ARGS1 (x);
139 VM_VALIDATE_CONS (x, "cdr");
140 RETURN (SCM_CDR (x));
141 }
142
143 VM_DEFINE_INSTRUCTION (143, set_car, "set-car!", 0, 2, 0)
144 {
145 SCM x, y;
146 POP2 (y, x);
147 VM_VALIDATE_CONS (x, "set-car!");
148 SCM_SETCAR (x, y);
149 NEXT;
150 }
151
152 VM_DEFINE_INSTRUCTION (144, set_cdr, "set-cdr!", 0, 2, 0)
153 {
154 SCM x, y;
155 POP2 (y, x);
156 VM_VALIDATE_CONS (x, "set-cdr!");
157 SCM_SETCDR (x, y);
158 NEXT;
159 }
160
161 \f
162 /*
163 * Numeric relational tests
164 */
165
166 #undef REL
167 #define REL(crel,srel) \
168 { \
169 ARGS2 (x, y); \
170 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
171 RETURN (scm_from_bool (((scm_t_signed_bits) SCM_UNPACK (x)) \
172 crel ((scm_t_signed_bits) SCM_UNPACK (y)))); \
173 SYNC_REGISTER (); \
174 RETURN (srel (x, y)); \
175 }
176
177 VM_DEFINE_FUNCTION (145, ee, "ee?", 2)
178 {
179 REL (==, scm_num_eq_p);
180 }
181
182 VM_DEFINE_FUNCTION (146, lt, "lt?", 2)
183 {
184 REL (<, scm_less_p);
185 }
186
187 VM_DEFINE_FUNCTION (147, le, "le?", 2)
188 {
189 REL (<=, scm_leq_p);
190 }
191
192 VM_DEFINE_FUNCTION (148, gt, "gt?", 2)
193 {
194 REL (>, scm_gr_p);
195 }
196
197 VM_DEFINE_FUNCTION (149, ge, "ge?", 2)
198 {
199 REL (>=, scm_geq_p);
200 }
201
202 \f
203 /*
204 * Numeric functions
205 */
206
207 /* The maximum/minimum tagged integers. */
208 #undef INUM_MAX
209 #undef INUM_MIN
210 #define INUM_MAX \
211 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_POSITIVE_FIXNUM)))
212 #define INUM_MIN \
213 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_NEGATIVE_FIXNUM)))
214
215 #undef FUNC2
216 #define FUNC2(CFUNC,SFUNC) \
217 { \
218 ARGS2 (x, y); \
219 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
220 { \
221 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
222 if (SCM_FIXABLE (n)) \
223 RETURN (SCM_I_MAKINUM (n)); \
224 } \
225 SYNC_REGISTER (); \
226 RETURN (SFUNC (x, y)); \
227 }
228
229 /* Assembly tagged integer arithmetic routines. This code uses the
230 `asm goto' feature introduced in GCC 4.5. */
231
232 #if defined __x86_64__ && SCM_GNUC_PREREQ (4, 5)
233
234 /* The macros below check the CPU's overflow flag to improve fixnum
235 arithmetic. The %rcx register is explicitly clobbered because `asm
236 goto' can't have outputs, in which case the `r' constraint could be
237 used to let the register allocator choose a register.
238
239 TODO: Use `cold' label attribute in GCC 4.6.
240 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
241
242 # define ASM_ADD(x, y) \
243 { \
244 asm volatile goto ("mov %1, %%rcx; " \
245 "test %[tag], %%cl; je %l[slow_add]; " \
246 "test %[tag], %0; je %l[slow_add]; " \
247 "add %0, %%rcx; jo %l[slow_add]; " \
248 "sub %[tag], %%rcx; " \
249 "mov %%rcx, (%[vsp])\n" \
250 : /* no outputs */ \
251 : "r" (x), "r" (y), \
252 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
253 : "rcx", "memory" \
254 : slow_add); \
255 NEXT; \
256 } \
257 slow_add: \
258 do { } while (0)
259
260 # define ASM_SUB(x, y) \
261 { \
262 asm volatile goto ("mov %0, %%rcx; " \
263 "test %[tag], %%cl; je %l[slow_sub]; " \
264 "test %[tag], %1; je %l[slow_sub]; " \
265 "sub %1, %%rcx; jo %l[slow_sub]; " \
266 "add %[tag], %%rcx; " \
267 "mov %%rcx, (%[vsp])\n" \
268 : /* no outputs */ \
269 : "r" (x), "r" (y), \
270 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
271 : "rcx", "memory" \
272 : slow_sub); \
273 NEXT; \
274 } \
275 slow_sub: \
276 do { } while (0)
277
278 #endif
279
280
281 VM_DEFINE_FUNCTION (150, add, "add", 2)
282 {
283 #ifndef ASM_ADD
284 FUNC2 (+, scm_sum);
285 #else
286 ARGS2 (x, y);
287 ASM_ADD (x, y);
288 SYNC_REGISTER ();
289 RETURN (scm_sum (x, y));
290 #endif
291 }
292
293 VM_DEFINE_FUNCTION (151, add1, "add1", 1)
294 {
295 ARGS1 (x);
296
297 /* Check for overflow. */
298 if (SCM_LIKELY ((scm_t_intptr) SCM_UNPACK (x) < INUM_MAX))
299 {
300 SCM result;
301
302 /* Add the integers without untagging. */
303 result = SCM_PACK ((scm_t_intptr) SCM_UNPACK (x)
304 + (scm_t_intptr) SCM_UNPACK (SCM_I_MAKINUM (1))
305 - scm_tc2_int);
306
307 if (SCM_LIKELY (SCM_I_INUMP (result)))
308 RETURN (result);
309 }
310
311 SYNC_REGISTER ();
312 RETURN (scm_sum (x, SCM_I_MAKINUM (1)));
313 }
314
315 VM_DEFINE_FUNCTION (152, sub, "sub", 2)
316 {
317 #ifndef ASM_SUB
318 FUNC2 (-, scm_difference);
319 #else
320 ARGS2 (x, y);
321 ASM_SUB (x, y);
322 SYNC_REGISTER ();
323 RETURN (scm_difference (x, y));
324 #endif
325 }
326
327 VM_DEFINE_FUNCTION (153, sub1, "sub1", 1)
328 {
329 ARGS1 (x);
330
331 /* Check for underflow. */
332 if (SCM_LIKELY ((scm_t_intptr) SCM_UNPACK (x) > INUM_MIN))
333 {
334 SCM result;
335
336 /* Substract the integers without untagging. */
337 result = SCM_PACK ((scm_t_intptr) SCM_UNPACK (x)
338 - (scm_t_intptr) SCM_UNPACK (SCM_I_MAKINUM (1))
339 + scm_tc2_int);
340
341 if (SCM_LIKELY (SCM_I_INUMP (result)))
342 RETURN (result);
343 }
344
345 SYNC_REGISTER ();
346 RETURN (scm_difference (x, SCM_I_MAKINUM (1)));
347 }
348
349 # undef ASM_ADD
350 # undef ASM_SUB
351
352 VM_DEFINE_FUNCTION (154, mul, "mul", 2)
353 {
354 ARGS2 (x, y);
355 SYNC_REGISTER ();
356 RETURN (scm_product (x, y));
357 }
358
359 VM_DEFINE_FUNCTION (155, div, "div", 2)
360 {
361 ARGS2 (x, y);
362 SYNC_REGISTER ();
363 RETURN (scm_divide (x, y));
364 }
365
366 VM_DEFINE_FUNCTION (156, quo, "quo", 2)
367 {
368 ARGS2 (x, y);
369 SYNC_REGISTER ();
370 RETURN (scm_quotient (x, y));
371 }
372
373 VM_DEFINE_FUNCTION (157, rem, "rem", 2)
374 {
375 ARGS2 (x, y);
376 SYNC_REGISTER ();
377 RETURN (scm_remainder (x, y));
378 }
379
380 VM_DEFINE_FUNCTION (158, mod, "mod", 2)
381 {
382 ARGS2 (x, y);
383 SYNC_REGISTER ();
384 RETURN (scm_modulo (x, y));
385 }
386
387 VM_DEFINE_FUNCTION (159, ash, "ash", 2)
388 {
389 ARGS2 (x, y);
390 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
391 {
392 if (SCM_I_INUM (y) < 0)
393 {
394 /* Right shift, will be a fixnum. */
395 if (SCM_I_INUM (y) > -SCM_I_FIXNUM_BIT)
396 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) >> -SCM_I_INUM (y)));
397 /* fall through */
398 }
399 else
400 /* Left shift. See comments in scm_ash. */
401 {
402 scm_t_signed_bits nn, bits_to_shift;
403
404 nn = SCM_I_INUM (x);
405 bits_to_shift = SCM_I_INUM (y);
406
407 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
408 && ((scm_t_bits)
409 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
410 <= 1))
411 RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
412 /* fall through */
413 }
414 /* fall through */
415 }
416 SYNC_REGISTER ();
417 RETURN (scm_ash (x, y));
418 }
419
420 VM_DEFINE_FUNCTION (160, logand, "logand", 2)
421 {
422 ARGS2 (x, y);
423 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
424 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) & SCM_I_INUM (y)));
425 SYNC_REGISTER ();
426 RETURN (scm_logand (x, y));
427 }
428
429 VM_DEFINE_FUNCTION (161, logior, "logior", 2)
430 {
431 ARGS2 (x, y);
432 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
433 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) | SCM_I_INUM (y)));
434 SYNC_REGISTER ();
435 RETURN (scm_logior (x, y));
436 }
437
438 VM_DEFINE_FUNCTION (162, logxor, "logxor", 2)
439 {
440 ARGS2 (x, y);
441 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
442 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
443 SYNC_REGISTER ();
444 RETURN (scm_logxor (x, y));
445 }
446
447 \f
448 /*
449 * Vectors and arrays
450 */
451
452 VM_DEFINE_FUNCTION (163, vector_ref, "vector-ref", 2)
453 {
454 scm_t_signed_bits i = 0;
455 ARGS2 (vect, idx);
456 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
457 && SCM_I_INUMP (idx)
458 && ((i = SCM_I_INUM (idx)) >= 0)
459 && i < SCM_I_VECTOR_LENGTH (vect)))
460 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
461 else
462 {
463 SYNC_REGISTER ();
464 RETURN (scm_vector_ref (vect, idx));
465 }
466 }
467
468 VM_DEFINE_INSTRUCTION (164, vector_set, "vector-set", 0, 3, 0)
469 {
470 scm_t_signed_bits i = 0;
471 SCM vect, idx, val;
472 POP3 (val, idx, vect);
473 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
474 && SCM_I_INUMP (idx)
475 && ((i = SCM_I_INUM (idx)) >= 0)
476 && i < SCM_I_VECTOR_LENGTH (vect)))
477 SCM_I_VECTOR_WELTS (vect)[i] = val;
478 else
479 {
480 SYNC_REGISTER ();
481 scm_vector_set_x (vect, idx, val);
482 }
483 NEXT;
484 }
485
486 VM_DEFINE_INSTRUCTION (165, make_array, "make-array", 3, -1, 1)
487 {
488 scm_t_uint32 len;
489 SCM shape, ret;
490
491 len = FETCH ();
492 len = (len << 8) + FETCH ();
493 len = (len << 8) + FETCH ();
494 POP (shape);
495 SYNC_REGISTER ();
496 PRE_CHECK_UNDERFLOW (len);
497 ret = scm_from_contiguous_array (shape, sp - len + 1, len);
498 DROPN (len);
499 PUSH (ret);
500 NEXT;
501 }
502
503 \f
504 /*
505 * Structs
506 */
507 #define VM_VALIDATE_STRUCT(obj, proc) \
508 VM_ASSERT (SCM_STRUCTP (obj), vm_error_not_a_struct (proc, obj))
509
510 VM_DEFINE_FUNCTION (166, struct_p, "struct?", 1)
511 {
512 ARGS1 (obj);
513 RETURN (scm_from_bool (SCM_STRUCTP (obj)));
514 }
515
516 VM_DEFINE_FUNCTION (167, struct_vtable, "struct-vtable", 1)
517 {
518 ARGS1 (obj);
519 VM_VALIDATE_STRUCT (obj, "struct_vtable");
520 RETURN (SCM_STRUCT_VTABLE (obj));
521 }
522
523 VM_DEFINE_INSTRUCTION (168, make_struct, "make-struct", 2, -1, 1)
524 {
525 unsigned h = FETCH ();
526 unsigned l = FETCH ();
527 scm_t_bits n = ((h << 8U) + l);
528 SCM vtable = sp[-(n - 1)];
529 const SCM *inits = sp - n + 2;
530 SCM ret;
531
532 SYNC_REGISTER ();
533
534 if (SCM_LIKELY (SCM_STRUCTP (vtable)
535 && SCM_VTABLE_FLAG_IS_SET (vtable, SCM_VTABLE_FLAG_SIMPLE)
536 && (SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size) + 1
537 == n)
538 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable)))
539 {
540 /* Verily, we are making a simple struct with the right number of
541 initializers, and no finalizer. */
542 ret = scm_words ((scm_t_bits)SCM_STRUCT_DATA (vtable) | scm_tc3_struct,
543 n + 1);
544 SCM_SET_CELL_WORD_1 (ret, (scm_t_bits)SCM_CELL_OBJECT_LOC (ret, 2));
545 memcpy (SCM_STRUCT_DATA (ret), inits, (n - 1) * sizeof (SCM));
546 }
547 else
548 ret = scm_c_make_structv (vtable, 0, n - 1, (scm_t_bits *) inits);
549
550 DROPN (n);
551 PUSH (ret);
552
553 NEXT;
554 }
555
556 VM_DEFINE_FUNCTION (169, struct_ref, "struct-ref", 2)
557 {
558 ARGS2 (obj, pos);
559
560 if (SCM_LIKELY (SCM_STRUCTP (obj)
561 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
562 SCM_VTABLE_FLAG_SIMPLE)
563 && SCM_I_INUMP (pos)))
564 {
565 SCM vtable;
566 scm_t_bits index, len;
567
568 /* True, an inum is a signed value, but cast to unsigned it will
569 certainly be more than the length, so we will fall through if
570 index is negative. */
571 index = SCM_I_INUM (pos);
572 vtable = SCM_STRUCT_VTABLE (obj);
573 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
574
575 if (SCM_LIKELY (index < len))
576 {
577 scm_t_bits *data = SCM_STRUCT_DATA (obj);
578 RETURN (SCM_PACK (data[index]));
579 }
580 }
581
582 SYNC_REGISTER ();
583 RETURN (scm_struct_ref (obj, pos));
584 }
585
586 VM_DEFINE_FUNCTION (170, struct_set, "struct-set", 3)
587 {
588 ARGS3 (obj, pos, val);
589
590 if (SCM_LIKELY (SCM_STRUCTP (obj)
591 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
592 SCM_VTABLE_FLAG_SIMPLE)
593 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
594 SCM_VTABLE_FLAG_SIMPLE_RW)
595 && SCM_I_INUMP (pos)))
596 {
597 SCM vtable;
598 scm_t_bits index, len;
599
600 /* See above regarding index being >= 0. */
601 index = SCM_I_INUM (pos);
602 vtable = SCM_STRUCT_VTABLE (obj);
603 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
604 if (SCM_LIKELY (index < len))
605 {
606 scm_t_bits *data = SCM_STRUCT_DATA (obj);
607 data[index] = SCM_UNPACK (val);
608 RETURN (val);
609 }
610 }
611
612 SYNC_REGISTER ();
613 RETURN (scm_struct_set_x (obj, pos, val));
614 }
615
616 \f
617 /*
618 * GOOPS support
619 */
620 VM_DEFINE_FUNCTION (171, class_of, "class-of", 1)
621 {
622 ARGS1 (obj);
623 if (SCM_INSTANCEP (obj))
624 RETURN (SCM_CLASS_OF (obj));
625 SYNC_REGISTER ();
626 RETURN (scm_class_of (obj));
627 }
628
629 /* FIXME: No checking whatsoever. */
630 VM_DEFINE_FUNCTION (172, slot_ref, "slot-ref", 2)
631 {
632 size_t slot;
633 ARGS2 (instance, idx);
634 slot = SCM_I_INUM (idx);
635 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance) [slot]));
636 }
637
638 /* FIXME: No checking whatsoever. */
639 VM_DEFINE_INSTRUCTION (173, slot_set, "slot-set", 0, 3, 0)
640 {
641 SCM instance, idx, val;
642 size_t slot;
643 POP3 (val, idx, instance);
644 slot = SCM_I_INUM (idx);
645 SCM_STRUCT_DATA (instance) [slot] = SCM_UNPACK (val);
646 NEXT;
647 }
648
649 \f
650 /*
651 * Bytevectors
652 */
653 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
654 VM_ASSERT (SCM_BYTEVECTOR_P (x), vm_error_not_a_bytevector (proc, x))
655
656 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
657 { \
658 SCM endianness; \
659 POP (endianness); \
660 if (scm_is_eq (endianness, scm_i_native_endianness)) \
661 goto VM_LABEL (bv_##stem##_native_ref); \
662 { \
663 ARGS2 (bv, idx); \
664 SYNC_REGISTER (); \
665 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
666 } \
667 }
668
669 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
670 #define ALIGNED_P(ptr, type) \
671 ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
672
673 VM_DEFINE_FUNCTION (174, bv_u16_ref, "bv-u16-ref", 3)
674 BV_REF_WITH_ENDIANNESS (u16, u16)
675 VM_DEFINE_FUNCTION (175, bv_s16_ref, "bv-s16-ref", 3)
676 BV_REF_WITH_ENDIANNESS (s16, s16)
677 VM_DEFINE_FUNCTION (176, bv_u32_ref, "bv-u32-ref", 3)
678 BV_REF_WITH_ENDIANNESS (u32, u32)
679 VM_DEFINE_FUNCTION (177, bv_s32_ref, "bv-s32-ref", 3)
680 BV_REF_WITH_ENDIANNESS (s32, s32)
681 VM_DEFINE_FUNCTION (178, bv_u64_ref, "bv-u64-ref", 3)
682 BV_REF_WITH_ENDIANNESS (u64, u64)
683 VM_DEFINE_FUNCTION (179, bv_s64_ref, "bv-s64-ref", 3)
684 BV_REF_WITH_ENDIANNESS (s64, s64)
685 VM_DEFINE_FUNCTION (180, bv_f32_ref, "bv-f32-ref", 3)
686 BV_REF_WITH_ENDIANNESS (f32, ieee_single)
687 VM_DEFINE_FUNCTION (181, bv_f64_ref, "bv-f64-ref", 3)
688 BV_REF_WITH_ENDIANNESS (f64, ieee_double)
689
690 #undef BV_REF_WITH_ENDIANNESS
691
692 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
693 { \
694 scm_t_signed_bits i; \
695 const scm_t_ ## type *int_ptr; \
696 ARGS2 (bv, idx); \
697 \
698 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
699 i = SCM_I_INUM (idx); \
700 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
701 \
702 if (SCM_LIKELY (SCM_I_INUMP (idx) \
703 && (i >= 0) \
704 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
705 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
706 RETURN (SCM_I_MAKINUM (*int_ptr)); \
707 else \
708 { \
709 SYNC_REGISTER (); \
710 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
711 } \
712 }
713
714 #define BV_INT_REF(stem, type, size) \
715 { \
716 scm_t_signed_bits i; \
717 const scm_t_ ## type *int_ptr; \
718 ARGS2 (bv, idx); \
719 \
720 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
721 i = SCM_I_INUM (idx); \
722 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
723 \
724 if (SCM_LIKELY (SCM_I_INUMP (idx) \
725 && (i >= 0) \
726 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
727 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
728 { \
729 scm_t_ ## type x = *int_ptr; \
730 if (SCM_FIXABLE (x)) \
731 RETURN (SCM_I_MAKINUM (x)); \
732 else \
733 { \
734 SYNC_REGISTER (); \
735 RETURN (scm_from_ ## type (x)); \
736 } \
737 } \
738 else \
739 { \
740 SYNC_REGISTER (); \
741 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
742 } \
743 }
744
745 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
746 { \
747 scm_t_signed_bits i; \
748 const type *float_ptr; \
749 ARGS2 (bv, idx); \
750 \
751 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
752 i = SCM_I_INUM (idx); \
753 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
754 \
755 SYNC_REGISTER (); \
756 if (SCM_LIKELY (SCM_I_INUMP (idx) \
757 && (i >= 0) \
758 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
759 && (ALIGNED_P (float_ptr, type)))) \
760 RETURN (scm_from_double (*float_ptr)); \
761 else \
762 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
763 }
764
765 VM_DEFINE_FUNCTION (182, bv_u8_ref, "bv-u8-ref", 2)
766 BV_FIXABLE_INT_REF (u8, u8, uint8, 1)
767 VM_DEFINE_FUNCTION (183, bv_s8_ref, "bv-s8-ref", 2)
768 BV_FIXABLE_INT_REF (s8, s8, int8, 1)
769 VM_DEFINE_FUNCTION (184, bv_u16_native_ref, "bv-u16-native-ref", 2)
770 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2)
771 VM_DEFINE_FUNCTION (185, bv_s16_native_ref, "bv-s16-native-ref", 2)
772 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2)
773 VM_DEFINE_FUNCTION (186, bv_u32_native_ref, "bv-u32-native-ref", 2)
774 #if SIZEOF_VOID_P > 4
775 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4)
776 #else
777 BV_INT_REF (u32, uint32, 4)
778 #endif
779 VM_DEFINE_FUNCTION (187, bv_s32_native_ref, "bv-s32-native-ref", 2)
780 #if SIZEOF_VOID_P > 4
781 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4)
782 #else
783 BV_INT_REF (s32, int32, 4)
784 #endif
785 VM_DEFINE_FUNCTION (188, bv_u64_native_ref, "bv-u64-native-ref", 2)
786 BV_INT_REF (u64, uint64, 8)
787 VM_DEFINE_FUNCTION (189, bv_s64_native_ref, "bv-s64-native-ref", 2)
788 BV_INT_REF (s64, int64, 8)
789 VM_DEFINE_FUNCTION (190, bv_f32_native_ref, "bv-f32-native-ref", 2)
790 BV_FLOAT_REF (f32, ieee_single, float, 4)
791 VM_DEFINE_FUNCTION (191, bv_f64_native_ref, "bv-f64-native-ref", 2)
792 BV_FLOAT_REF (f64, ieee_double, double, 8)
793
794 #undef BV_FIXABLE_INT_REF
795 #undef BV_INT_REF
796 #undef BV_FLOAT_REF
797
798
799
800 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
801 { \
802 SCM endianness; \
803 POP (endianness); \
804 if (scm_is_eq (endianness, scm_i_native_endianness)) \
805 goto VM_LABEL (bv_##stem##_native_set); \
806 { \
807 SCM bv, idx, val; POP3 (val, idx, bv); \
808 SYNC_REGISTER (); \
809 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
810 NEXT; \
811 } \
812 }
813
814 VM_DEFINE_INSTRUCTION (192, bv_u16_set, "bv-u16-set", 0, 4, 0)
815 BV_SET_WITH_ENDIANNESS (u16, u16)
816 VM_DEFINE_INSTRUCTION (193, bv_s16_set, "bv-s16-set", 0, 4, 0)
817 BV_SET_WITH_ENDIANNESS (s16, s16)
818 VM_DEFINE_INSTRUCTION (194, bv_u32_set, "bv-u32-set", 0, 4, 0)
819 BV_SET_WITH_ENDIANNESS (u32, u32)
820 VM_DEFINE_INSTRUCTION (195, bv_s32_set, "bv-s32-set", 0, 4, 0)
821 BV_SET_WITH_ENDIANNESS (s32, s32)
822 VM_DEFINE_INSTRUCTION (196, bv_u64_set, "bv-u64-set", 0, 4, 0)
823 BV_SET_WITH_ENDIANNESS (u64, u64)
824 VM_DEFINE_INSTRUCTION (197, bv_s64_set, "bv-s64-set", 0, 4, 0)
825 BV_SET_WITH_ENDIANNESS (s64, s64)
826 VM_DEFINE_INSTRUCTION (198, bv_f32_set, "bv-f32-set", 0, 4, 0)
827 BV_SET_WITH_ENDIANNESS (f32, ieee_single)
828 VM_DEFINE_INSTRUCTION (199, bv_f64_set, "bv-f64-set", 0, 4, 0)
829 BV_SET_WITH_ENDIANNESS (f64, ieee_double)
830
831 #undef BV_SET_WITH_ENDIANNESS
832
833 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
834 { \
835 scm_t_signed_bits i, j = 0; \
836 SCM bv, idx, val; \
837 scm_t_ ## type *int_ptr; \
838 \
839 POP3 (val, idx, bv); \
840 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
841 i = SCM_I_INUM (idx); \
842 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
843 \
844 if (SCM_LIKELY (SCM_I_INUMP (idx) \
845 && (i >= 0) \
846 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
847 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
848 && (SCM_I_INUMP (val)) \
849 && ((j = SCM_I_INUM (val)) >= min) \
850 && (j <= max))) \
851 *int_ptr = (scm_t_ ## type) j; \
852 else \
853 { \
854 SYNC_REGISTER (); \
855 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
856 } \
857 NEXT; \
858 }
859
860 #define BV_INT_SET(stem, type, size) \
861 { \
862 scm_t_signed_bits i = 0; \
863 SCM bv, idx, val; \
864 scm_t_ ## type *int_ptr; \
865 \
866 POP3 (val, idx, bv); \
867 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
868 i = SCM_I_INUM (idx); \
869 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
870 \
871 if (SCM_LIKELY (SCM_I_INUMP (idx) \
872 && (i >= 0) \
873 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
874 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
875 *int_ptr = scm_to_ ## type (val); \
876 else \
877 { \
878 SYNC_REGISTER (); \
879 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
880 } \
881 NEXT; \
882 }
883
884 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
885 { \
886 scm_t_signed_bits i = 0; \
887 SCM bv, idx, val; \
888 type *float_ptr; \
889 \
890 POP3 (val, idx, bv); \
891 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
892 i = SCM_I_INUM (idx); \
893 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
894 \
895 if (SCM_LIKELY (SCM_I_INUMP (idx) \
896 && (i >= 0) \
897 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
898 && (ALIGNED_P (float_ptr, type)))) \
899 *float_ptr = scm_to_double (val); \
900 else \
901 { \
902 SYNC_REGISTER (); \
903 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
904 } \
905 NEXT; \
906 }
907
908 VM_DEFINE_INSTRUCTION (200, bv_u8_set, "bv-u8-set", 0, 3, 0)
909 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1)
910 VM_DEFINE_INSTRUCTION (201, bv_s8_set, "bv-s8-set", 0, 3, 0)
911 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1)
912 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set, "bv-u16-native-set", 0, 3, 0)
913 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2)
914 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set, "bv-s16-native-set", 0, 3, 0)
915 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2)
916 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set, "bv-u32-native-set", 0, 3, 0)
917 #if SIZEOF_VOID_P > 4
918 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4)
919 #else
920 BV_INT_SET (u32, uint32, 4)
921 #endif
922 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set, "bv-s32-native-set", 0, 3, 0)
923 #if SIZEOF_VOID_P > 4
924 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4)
925 #else
926 BV_INT_SET (s32, int32, 4)
927 #endif
928 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set, "bv-u64-native-set", 0, 3, 0)
929 BV_INT_SET (u64, uint64, 8)
930 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set, "bv-s64-native-set", 0, 3, 0)
931 BV_INT_SET (s64, int64, 8)
932 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set, "bv-f32-native-set", 0, 3, 0)
933 BV_FLOAT_SET (f32, ieee_single, float, 4)
934 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set, "bv-f64-native-set", 0, 3, 0)
935 BV_FLOAT_SET (f64, ieee_double, double, 8)
936
937 #undef BV_FIXABLE_INT_SET
938 #undef BV_INT_SET
939 #undef BV_FLOAT_SET
940
941 /*
942 (defun renumber-ops ()
943 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
944 (interactive "")
945 (save-excursion
946 (let ((counter 127)) (goto-char (point-min))
947 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
948 (replace-match
949 (number-to-string (setq counter (1+ counter)))
950 t t nil 1)))))
951 */
952
953 /*
954 Local Variables:
955 c-file-style: "gnu"
956 End:
957 */