Fix VM 'ash' for right shifts by large amounts.
[bpt/guile.git] / libguile / vm-i-scheme.c
1 /* Copyright (C) 2001, 2009, 2010, 2011, 2012, 2013 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm_engine.c */
20
21 \f
22 /*
23 * Predicates
24 */
25
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
29
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
31
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
33 {
34 ARGS1 (x);
35 RETURN (scm_from_bool (scm_is_false (x)));
36 }
37
38 VM_DEFINE_FUNCTION (129, not_not, "not-not", 1)
39 {
40 ARGS1 (x);
41 RETURN (scm_from_bool (!scm_is_false (x)));
42 }
43
44 VM_DEFINE_FUNCTION (130, eq, "eq?", 2)
45 {
46 ARGS2 (x, y);
47 RETURN (scm_from_bool (scm_is_eq (x, y)));
48 }
49
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
51 {
52 ARGS2 (x, y);
53 RETURN (scm_from_bool (!scm_is_eq (x, y)));
54 }
55
56 VM_DEFINE_FUNCTION (132, nullp, "null?", 1)
57 {
58 ARGS1 (x);
59 RETURN (scm_from_bool (scm_is_null (x)));
60 }
61
62 VM_DEFINE_FUNCTION (133, not_nullp, "not-null?", 1)
63 {
64 ARGS1 (x);
65 RETURN (scm_from_bool (!scm_is_null (x)));
66 }
67
68 VM_DEFINE_FUNCTION (134, eqv, "eqv?", 2)
69 {
70 ARGS2 (x, y);
71 if (scm_is_eq (x, y))
72 RETURN (SCM_BOOL_T);
73 if (SCM_IMP (x) || SCM_IMP (y))
74 RETURN (SCM_BOOL_F);
75 SYNC_REGISTER ();
76 RETURN (scm_eqv_p (x, y));
77 }
78
79 VM_DEFINE_FUNCTION (135, equal, "equal?", 2)
80 {
81 ARGS2 (x, y);
82 if (scm_is_eq (x, y))
83 RETURN (SCM_BOOL_T);
84 if (SCM_IMP (x) || SCM_IMP (y))
85 RETURN (SCM_BOOL_F);
86 SYNC_REGISTER ();
87 RETURN (scm_equal_p (x, y));
88 }
89
90 VM_DEFINE_FUNCTION (136, pairp, "pair?", 1)
91 {
92 ARGS1 (x);
93 RETURN (scm_from_bool (scm_is_pair (x)));
94 }
95
96 VM_DEFINE_FUNCTION (137, listp, "list?", 1)
97 {
98 ARGS1 (x);
99 RETURN (scm_from_bool (scm_ilength (x) >= 0));
100 }
101
102 VM_DEFINE_FUNCTION (138, symbolp, "symbol?", 1)
103 {
104 ARGS1 (x);
105 RETURN (scm_from_bool (scm_is_symbol (x)));
106 }
107
108 VM_DEFINE_FUNCTION (139, vectorp, "vector?", 1)
109 {
110 ARGS1 (x);
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x)));
112 }
113
114 \f
115 /*
116 * Basic data
117 */
118
119 VM_DEFINE_FUNCTION (140, cons, "cons", 2)
120 {
121 ARGS2 (x, y);
122 CONS (x, x, y);
123 RETURN (x);
124 }
125
126 #define VM_VALIDATE_CONS(x, proc) \
127 VM_ASSERT (scm_is_pair (x), vm_error_not_a_pair (proc, x))
128
129 VM_DEFINE_FUNCTION (141, car, "car", 1)
130 {
131 ARGS1 (x);
132 VM_VALIDATE_CONS (x, "car");
133 RETURN (SCM_CAR (x));
134 }
135
136 VM_DEFINE_FUNCTION (142, cdr, "cdr", 1)
137 {
138 ARGS1 (x);
139 VM_VALIDATE_CONS (x, "cdr");
140 RETURN (SCM_CDR (x));
141 }
142
143 VM_DEFINE_INSTRUCTION (143, set_car, "set-car!", 0, 2, 0)
144 {
145 SCM x, y;
146 POP2 (y, x);
147 VM_VALIDATE_CONS (x, "set-car!");
148 SCM_SETCAR (x, y);
149 NEXT;
150 }
151
152 VM_DEFINE_INSTRUCTION (144, set_cdr, "set-cdr!", 0, 2, 0)
153 {
154 SCM x, y;
155 POP2 (y, x);
156 VM_VALIDATE_CONS (x, "set-cdr!");
157 SCM_SETCDR (x, y);
158 NEXT;
159 }
160
161 \f
162 /*
163 * Numeric relational tests
164 */
165
166 #undef REL
167 #define REL(crel,srel) \
168 { \
169 ARGS2 (x, y); \
170 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
171 RETURN (scm_from_bool (((scm_t_signed_bits) SCM_UNPACK (x)) \
172 crel ((scm_t_signed_bits) SCM_UNPACK (y)))); \
173 SYNC_REGISTER (); \
174 RETURN (srel (x, y)); \
175 }
176
177 VM_DEFINE_FUNCTION (145, ee, "ee?", 2)
178 {
179 REL (==, scm_num_eq_p);
180 }
181
182 VM_DEFINE_FUNCTION (146, lt, "lt?", 2)
183 {
184 REL (<, scm_less_p);
185 }
186
187 VM_DEFINE_FUNCTION (147, le, "le?", 2)
188 {
189 REL (<=, scm_leq_p);
190 }
191
192 VM_DEFINE_FUNCTION (148, gt, "gt?", 2)
193 {
194 REL (>, scm_gr_p);
195 }
196
197 VM_DEFINE_FUNCTION (149, ge, "ge?", 2)
198 {
199 REL (>=, scm_geq_p);
200 }
201
202 \f
203 /*
204 * Numeric functions
205 */
206
207 /* The maximum/minimum tagged integers. */
208 #undef INUM_MAX
209 #undef INUM_MIN
210 #define INUM_MAX (INTPTR_MAX - 1)
211 #define INUM_MIN (INTPTR_MIN + scm_tc2_int)
212
213 #undef FUNC2
214 #define FUNC2(CFUNC,SFUNC) \
215 { \
216 ARGS2 (x, y); \
217 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
218 { \
219 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
220 if (SCM_FIXABLE (n)) \
221 RETURN (SCM_I_MAKINUM (n)); \
222 } \
223 SYNC_REGISTER (); \
224 RETURN (SFUNC (x, y)); \
225 }
226
227 /* Assembly tagged integer arithmetic routines. This code uses the
228 `asm goto' feature introduced in GCC 4.5. */
229
230 #if defined __x86_64__ && SCM_GNUC_PREREQ (4, 5)
231
232 /* The macros below check the CPU's overflow flag to improve fixnum
233 arithmetic. The %rcx register is explicitly clobbered because `asm
234 goto' can't have outputs, in which case the `r' constraint could be
235 used to let the register allocator choose a register.
236
237 TODO: Use `cold' label attribute in GCC 4.6.
238 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
239
240 # define ASM_ADD(x, y) \
241 { \
242 asm volatile goto ("mov %1, %%rcx; " \
243 "test %[tag], %%cl; je %l[slow_add]; " \
244 "test %[tag], %0; je %l[slow_add]; " \
245 "add %0, %%rcx; jo %l[slow_add]; " \
246 "sub %[tag], %%rcx; " \
247 "mov %%rcx, (%[vsp])\n" \
248 : /* no outputs */ \
249 : "r" (x), "r" (y), \
250 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
251 : "rcx", "memory" \
252 : slow_add); \
253 NEXT; \
254 } \
255 slow_add: \
256 do { } while (0)
257
258 # define ASM_SUB(x, y) \
259 { \
260 asm volatile goto ("mov %0, %%rcx; " \
261 "test %[tag], %%cl; je %l[slow_sub]; " \
262 "test %[tag], %1; je %l[slow_sub]; " \
263 "sub %1, %%rcx; jo %l[slow_sub]; " \
264 "add %[tag], %%rcx; " \
265 "mov %%rcx, (%[vsp])\n" \
266 : /* no outputs */ \
267 : "r" (x), "r" (y), \
268 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
269 : "rcx", "memory" \
270 : slow_sub); \
271 NEXT; \
272 } \
273 slow_sub: \
274 do { } while (0)
275
276 #endif
277
278
279 VM_DEFINE_FUNCTION (150, add, "add", 2)
280 {
281 #ifndef ASM_ADD
282 FUNC2 (+, scm_sum);
283 #else
284 ARGS2 (x, y);
285 ASM_ADD (x, y);
286 SYNC_REGISTER ();
287 RETURN (scm_sum (x, y));
288 #endif
289 }
290
291 VM_DEFINE_FUNCTION (151, add1, "add1", 1)
292 {
293 ARGS1 (x);
294
295 /* Check for overflow. */
296 if (SCM_LIKELY ((scm_t_intptr) SCM_UNPACK (x) < INUM_MAX))
297 {
298 SCM result;
299
300 /* Add the integers without untagging. */
301 result = SCM_PACK ((scm_t_intptr) SCM_UNPACK (x)
302 + (scm_t_intptr) SCM_UNPACK (SCM_I_MAKINUM (1))
303 - scm_tc2_int);
304
305 if (SCM_LIKELY (SCM_I_INUMP (result)))
306 RETURN (result);
307 }
308
309 SYNC_REGISTER ();
310 RETURN (scm_sum (x, SCM_I_MAKINUM (1)));
311 }
312
313 VM_DEFINE_FUNCTION (152, sub, "sub", 2)
314 {
315 #ifndef ASM_SUB
316 FUNC2 (-, scm_difference);
317 #else
318 ARGS2 (x, y);
319 ASM_SUB (x, y);
320 SYNC_REGISTER ();
321 RETURN (scm_difference (x, y));
322 #endif
323 }
324
325 VM_DEFINE_FUNCTION (153, sub1, "sub1", 1)
326 {
327 ARGS1 (x);
328
329 /* Check for underflow. */
330 if (SCM_LIKELY ((scm_t_intptr) SCM_UNPACK (x) > INUM_MIN))
331 {
332 SCM result;
333
334 /* Substract the integers without untagging. */
335 result = SCM_PACK ((scm_t_intptr) SCM_UNPACK (x)
336 - (scm_t_intptr) SCM_UNPACK (SCM_I_MAKINUM (1))
337 + scm_tc2_int);
338
339 if (SCM_LIKELY (SCM_I_INUMP (result)))
340 RETURN (result);
341 }
342
343 SYNC_REGISTER ();
344 RETURN (scm_difference (x, SCM_I_MAKINUM (1)));
345 }
346
347 # undef ASM_ADD
348 # undef ASM_SUB
349
350 VM_DEFINE_FUNCTION (154, mul, "mul", 2)
351 {
352 ARGS2 (x, y);
353 SYNC_REGISTER ();
354 RETURN (scm_product (x, y));
355 }
356
357 VM_DEFINE_FUNCTION (155, div, "div", 2)
358 {
359 ARGS2 (x, y);
360 SYNC_REGISTER ();
361 RETURN (scm_divide (x, y));
362 }
363
364 VM_DEFINE_FUNCTION (156, quo, "quo", 2)
365 {
366 ARGS2 (x, y);
367 SYNC_REGISTER ();
368 RETURN (scm_quotient (x, y));
369 }
370
371 VM_DEFINE_FUNCTION (157, rem, "rem", 2)
372 {
373 ARGS2 (x, y);
374 SYNC_REGISTER ();
375 RETURN (scm_remainder (x, y));
376 }
377
378 VM_DEFINE_FUNCTION (158, mod, "mod", 2)
379 {
380 ARGS2 (x, y);
381 SYNC_REGISTER ();
382 RETURN (scm_modulo (x, y));
383 }
384
385 VM_DEFINE_FUNCTION (159, ash, "ash", 2)
386 {
387 ARGS2 (x, y);
388 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
389 {
390 if (SCM_I_INUM (y) < 0)
391 {
392 /* Right shift, will be a fixnum. */
393 if (SCM_I_INUM (y) > -SCM_I_FIXNUM_BIT)
394 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) >> -SCM_I_INUM (y)));
395 /* fall through */
396 }
397 else
398 /* Left shift. See comments in scm_ash. */
399 {
400 scm_t_signed_bits nn, bits_to_shift;
401
402 nn = SCM_I_INUM (x);
403 bits_to_shift = SCM_I_INUM (y);
404
405 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
406 && ((scm_t_bits)
407 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
408 <= 1))
409 RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
410 /* fall through */
411 }
412 /* fall through */
413 }
414 SYNC_REGISTER ();
415 RETURN (scm_ash (x, y));
416 }
417
418 VM_DEFINE_FUNCTION (160, logand, "logand", 2)
419 {
420 ARGS2 (x, y);
421 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
422 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) & SCM_I_INUM (y)));
423 SYNC_REGISTER ();
424 RETURN (scm_logand (x, y));
425 }
426
427 VM_DEFINE_FUNCTION (161, logior, "logior", 2)
428 {
429 ARGS2 (x, y);
430 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
431 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) | SCM_I_INUM (y)));
432 SYNC_REGISTER ();
433 RETURN (scm_logior (x, y));
434 }
435
436 VM_DEFINE_FUNCTION (162, logxor, "logxor", 2)
437 {
438 ARGS2 (x, y);
439 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
440 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
441 SYNC_REGISTER ();
442 RETURN (scm_logxor (x, y));
443 }
444
445 \f
446 /*
447 * Vectors and arrays
448 */
449
450 VM_DEFINE_FUNCTION (163, vector_ref, "vector-ref", 2)
451 {
452 scm_t_signed_bits i = 0;
453 ARGS2 (vect, idx);
454 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
455 && SCM_I_INUMP (idx)
456 && ((i = SCM_I_INUM (idx)) >= 0)
457 && i < SCM_I_VECTOR_LENGTH (vect)))
458 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
459 else
460 {
461 SYNC_REGISTER ();
462 RETURN (scm_vector_ref (vect, idx));
463 }
464 }
465
466 VM_DEFINE_INSTRUCTION (164, vector_set, "vector-set", 0, 3, 0)
467 {
468 scm_t_signed_bits i = 0;
469 SCM vect, idx, val;
470 POP3 (val, idx, vect);
471 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
472 && SCM_I_INUMP (idx)
473 && ((i = SCM_I_INUM (idx)) >= 0)
474 && i < SCM_I_VECTOR_LENGTH (vect)))
475 SCM_I_VECTOR_WELTS (vect)[i] = val;
476 else
477 {
478 SYNC_REGISTER ();
479 scm_vector_set_x (vect, idx, val);
480 }
481 NEXT;
482 }
483
484 VM_DEFINE_INSTRUCTION (165, make_array, "make-array", 3, -1, 1)
485 {
486 scm_t_uint32 len;
487 SCM shape, ret;
488
489 len = FETCH ();
490 len = (len << 8) + FETCH ();
491 len = (len << 8) + FETCH ();
492 POP (shape);
493 SYNC_REGISTER ();
494 PRE_CHECK_UNDERFLOW (len);
495 ret = scm_from_contiguous_array (shape, sp - len + 1, len);
496 DROPN (len);
497 PUSH (ret);
498 NEXT;
499 }
500
501 \f
502 /*
503 * Structs
504 */
505 #define VM_VALIDATE_STRUCT(obj, proc) \
506 VM_ASSERT (SCM_STRUCTP (obj), vm_error_not_a_struct (proc, obj))
507
508 VM_DEFINE_FUNCTION (166, struct_p, "struct?", 1)
509 {
510 ARGS1 (obj);
511 RETURN (scm_from_bool (SCM_STRUCTP (obj)));
512 }
513
514 VM_DEFINE_FUNCTION (167, struct_vtable, "struct-vtable", 1)
515 {
516 ARGS1 (obj);
517 VM_VALIDATE_STRUCT (obj, "struct_vtable");
518 RETURN (SCM_STRUCT_VTABLE (obj));
519 }
520
521 VM_DEFINE_INSTRUCTION (168, make_struct, "make-struct", 2, -1, 1)
522 {
523 unsigned h = FETCH ();
524 unsigned l = FETCH ();
525 scm_t_bits n = ((h << 8U) + l);
526 SCM vtable = sp[-(n - 1)];
527 const SCM *inits = sp - n + 2;
528 SCM ret;
529
530 SYNC_REGISTER ();
531
532 if (SCM_LIKELY (SCM_STRUCTP (vtable)
533 && SCM_VTABLE_FLAG_IS_SET (vtable, SCM_VTABLE_FLAG_SIMPLE)
534 && (SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size) + 1
535 == n)
536 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable)))
537 {
538 /* Verily, we are making a simple struct with the right number of
539 initializers, and no finalizer. */
540 ret = scm_words ((scm_t_bits)SCM_STRUCT_DATA (vtable) | scm_tc3_struct,
541 n + 1);
542 SCM_SET_CELL_WORD_1 (ret, (scm_t_bits)SCM_CELL_OBJECT_LOC (ret, 2));
543 memcpy (SCM_STRUCT_DATA (ret), inits, (n - 1) * sizeof (SCM));
544 }
545 else
546 ret = scm_c_make_structv (vtable, 0, n - 1, (scm_t_bits *) inits);
547
548 DROPN (n);
549 PUSH (ret);
550
551 NEXT;
552 }
553
554 VM_DEFINE_FUNCTION (169, struct_ref, "struct-ref", 2)
555 {
556 ARGS2 (obj, pos);
557
558 if (SCM_LIKELY (SCM_STRUCTP (obj)
559 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
560 SCM_VTABLE_FLAG_SIMPLE)
561 && SCM_I_INUMP (pos)))
562 {
563 SCM vtable;
564 scm_t_bits index, len;
565
566 /* True, an inum is a signed value, but cast to unsigned it will
567 certainly be more than the length, so we will fall through if
568 index is negative. */
569 index = SCM_I_INUM (pos);
570 vtable = SCM_STRUCT_VTABLE (obj);
571 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
572
573 if (SCM_LIKELY (index < len))
574 {
575 scm_t_bits *data = SCM_STRUCT_DATA (obj);
576 RETURN (SCM_PACK (data[index]));
577 }
578 }
579
580 SYNC_REGISTER ();
581 RETURN (scm_struct_ref (obj, pos));
582 }
583
584 VM_DEFINE_FUNCTION (170, struct_set, "struct-set", 3)
585 {
586 ARGS3 (obj, pos, val);
587
588 if (SCM_LIKELY (SCM_STRUCTP (obj)
589 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
590 SCM_VTABLE_FLAG_SIMPLE)
591 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
592 SCM_VTABLE_FLAG_SIMPLE_RW)
593 && SCM_I_INUMP (pos)))
594 {
595 SCM vtable;
596 scm_t_bits index, len;
597
598 /* See above regarding index being >= 0. */
599 index = SCM_I_INUM (pos);
600 vtable = SCM_STRUCT_VTABLE (obj);
601 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
602 if (SCM_LIKELY (index < len))
603 {
604 scm_t_bits *data = SCM_STRUCT_DATA (obj);
605 data[index] = SCM_UNPACK (val);
606 RETURN (val);
607 }
608 }
609
610 SYNC_REGISTER ();
611 RETURN (scm_struct_set_x (obj, pos, val));
612 }
613
614 \f
615 /*
616 * GOOPS support
617 */
618 VM_DEFINE_FUNCTION (171, class_of, "class-of", 1)
619 {
620 ARGS1 (obj);
621 if (SCM_INSTANCEP (obj))
622 RETURN (SCM_CLASS_OF (obj));
623 SYNC_REGISTER ();
624 RETURN (scm_class_of (obj));
625 }
626
627 /* FIXME: No checking whatsoever. */
628 VM_DEFINE_FUNCTION (172, slot_ref, "slot-ref", 2)
629 {
630 size_t slot;
631 ARGS2 (instance, idx);
632 slot = SCM_I_INUM (idx);
633 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance) [slot]));
634 }
635
636 /* FIXME: No checking whatsoever. */
637 VM_DEFINE_INSTRUCTION (173, slot_set, "slot-set", 0, 3, 0)
638 {
639 SCM instance, idx, val;
640 size_t slot;
641 POP3 (val, idx, instance);
642 slot = SCM_I_INUM (idx);
643 SCM_STRUCT_DATA (instance) [slot] = SCM_UNPACK (val);
644 NEXT;
645 }
646
647 \f
648 /*
649 * Bytevectors
650 */
651 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
652 VM_ASSERT (SCM_BYTEVECTOR_P (x), vm_error_not_a_bytevector (proc, x))
653
654 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
655 { \
656 SCM endianness; \
657 POP (endianness); \
658 if (scm_is_eq (endianness, scm_i_native_endianness)) \
659 goto VM_LABEL (bv_##stem##_native_ref); \
660 { \
661 ARGS2 (bv, idx); \
662 SYNC_REGISTER (); \
663 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
664 } \
665 }
666
667 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
668 #define ALIGNED_P(ptr, type) \
669 ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
670
671 VM_DEFINE_FUNCTION (174, bv_u16_ref, "bv-u16-ref", 3)
672 BV_REF_WITH_ENDIANNESS (u16, u16)
673 VM_DEFINE_FUNCTION (175, bv_s16_ref, "bv-s16-ref", 3)
674 BV_REF_WITH_ENDIANNESS (s16, s16)
675 VM_DEFINE_FUNCTION (176, bv_u32_ref, "bv-u32-ref", 3)
676 BV_REF_WITH_ENDIANNESS (u32, u32)
677 VM_DEFINE_FUNCTION (177, bv_s32_ref, "bv-s32-ref", 3)
678 BV_REF_WITH_ENDIANNESS (s32, s32)
679 VM_DEFINE_FUNCTION (178, bv_u64_ref, "bv-u64-ref", 3)
680 BV_REF_WITH_ENDIANNESS (u64, u64)
681 VM_DEFINE_FUNCTION (179, bv_s64_ref, "bv-s64-ref", 3)
682 BV_REF_WITH_ENDIANNESS (s64, s64)
683 VM_DEFINE_FUNCTION (180, bv_f32_ref, "bv-f32-ref", 3)
684 BV_REF_WITH_ENDIANNESS (f32, ieee_single)
685 VM_DEFINE_FUNCTION (181, bv_f64_ref, "bv-f64-ref", 3)
686 BV_REF_WITH_ENDIANNESS (f64, ieee_double)
687
688 #undef BV_REF_WITH_ENDIANNESS
689
690 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
691 { \
692 scm_t_signed_bits i; \
693 const scm_t_ ## type *int_ptr; \
694 ARGS2 (bv, idx); \
695 \
696 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
697 i = SCM_I_INUM (idx); \
698 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
699 \
700 if (SCM_LIKELY (SCM_I_INUMP (idx) \
701 && (i >= 0) \
702 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
703 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
704 RETURN (SCM_I_MAKINUM (*int_ptr)); \
705 else \
706 { \
707 SYNC_REGISTER (); \
708 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
709 } \
710 }
711
712 #define BV_INT_REF(stem, type, size) \
713 { \
714 scm_t_signed_bits i; \
715 const scm_t_ ## type *int_ptr; \
716 ARGS2 (bv, idx); \
717 \
718 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
719 i = SCM_I_INUM (idx); \
720 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
721 \
722 if (SCM_LIKELY (SCM_I_INUMP (idx) \
723 && (i >= 0) \
724 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
725 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
726 { \
727 scm_t_ ## type x = *int_ptr; \
728 if (SCM_FIXABLE (x)) \
729 RETURN (SCM_I_MAKINUM (x)); \
730 else \
731 { \
732 SYNC_REGISTER (); \
733 RETURN (scm_from_ ## type (x)); \
734 } \
735 } \
736 else \
737 { \
738 SYNC_REGISTER (); \
739 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
740 } \
741 }
742
743 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
744 { \
745 scm_t_signed_bits i; \
746 const type *float_ptr; \
747 ARGS2 (bv, idx); \
748 \
749 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
750 i = SCM_I_INUM (idx); \
751 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
752 \
753 SYNC_REGISTER (); \
754 if (SCM_LIKELY (SCM_I_INUMP (idx) \
755 && (i >= 0) \
756 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
757 && (ALIGNED_P (float_ptr, type)))) \
758 RETURN (scm_from_double (*float_ptr)); \
759 else \
760 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
761 }
762
763 VM_DEFINE_FUNCTION (182, bv_u8_ref, "bv-u8-ref", 2)
764 BV_FIXABLE_INT_REF (u8, u8, uint8, 1)
765 VM_DEFINE_FUNCTION (183, bv_s8_ref, "bv-s8-ref", 2)
766 BV_FIXABLE_INT_REF (s8, s8, int8, 1)
767 VM_DEFINE_FUNCTION (184, bv_u16_native_ref, "bv-u16-native-ref", 2)
768 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2)
769 VM_DEFINE_FUNCTION (185, bv_s16_native_ref, "bv-s16-native-ref", 2)
770 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2)
771 VM_DEFINE_FUNCTION (186, bv_u32_native_ref, "bv-u32-native-ref", 2)
772 #if SIZEOF_VOID_P > 4
773 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4)
774 #else
775 BV_INT_REF (u32, uint32, 4)
776 #endif
777 VM_DEFINE_FUNCTION (187, bv_s32_native_ref, "bv-s32-native-ref", 2)
778 #if SIZEOF_VOID_P > 4
779 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4)
780 #else
781 BV_INT_REF (s32, int32, 4)
782 #endif
783 VM_DEFINE_FUNCTION (188, bv_u64_native_ref, "bv-u64-native-ref", 2)
784 BV_INT_REF (u64, uint64, 8)
785 VM_DEFINE_FUNCTION (189, bv_s64_native_ref, "bv-s64-native-ref", 2)
786 BV_INT_REF (s64, int64, 8)
787 VM_DEFINE_FUNCTION (190, bv_f32_native_ref, "bv-f32-native-ref", 2)
788 BV_FLOAT_REF (f32, ieee_single, float, 4)
789 VM_DEFINE_FUNCTION (191, bv_f64_native_ref, "bv-f64-native-ref", 2)
790 BV_FLOAT_REF (f64, ieee_double, double, 8)
791
792 #undef BV_FIXABLE_INT_REF
793 #undef BV_INT_REF
794 #undef BV_FLOAT_REF
795
796
797
798 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
799 { \
800 SCM endianness; \
801 POP (endianness); \
802 if (scm_is_eq (endianness, scm_i_native_endianness)) \
803 goto VM_LABEL (bv_##stem##_native_set); \
804 { \
805 SCM bv, idx, val; POP3 (val, idx, bv); \
806 SYNC_REGISTER (); \
807 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
808 NEXT; \
809 } \
810 }
811
812 VM_DEFINE_INSTRUCTION (192, bv_u16_set, "bv-u16-set", 0, 4, 0)
813 BV_SET_WITH_ENDIANNESS (u16, u16)
814 VM_DEFINE_INSTRUCTION (193, bv_s16_set, "bv-s16-set", 0, 4, 0)
815 BV_SET_WITH_ENDIANNESS (s16, s16)
816 VM_DEFINE_INSTRUCTION (194, bv_u32_set, "bv-u32-set", 0, 4, 0)
817 BV_SET_WITH_ENDIANNESS (u32, u32)
818 VM_DEFINE_INSTRUCTION (195, bv_s32_set, "bv-s32-set", 0, 4, 0)
819 BV_SET_WITH_ENDIANNESS (s32, s32)
820 VM_DEFINE_INSTRUCTION (196, bv_u64_set, "bv-u64-set", 0, 4, 0)
821 BV_SET_WITH_ENDIANNESS (u64, u64)
822 VM_DEFINE_INSTRUCTION (197, bv_s64_set, "bv-s64-set", 0, 4, 0)
823 BV_SET_WITH_ENDIANNESS (s64, s64)
824 VM_DEFINE_INSTRUCTION (198, bv_f32_set, "bv-f32-set", 0, 4, 0)
825 BV_SET_WITH_ENDIANNESS (f32, ieee_single)
826 VM_DEFINE_INSTRUCTION (199, bv_f64_set, "bv-f64-set", 0, 4, 0)
827 BV_SET_WITH_ENDIANNESS (f64, ieee_double)
828
829 #undef BV_SET_WITH_ENDIANNESS
830
831 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
832 { \
833 scm_t_signed_bits i, j = 0; \
834 SCM bv, idx, val; \
835 scm_t_ ## type *int_ptr; \
836 \
837 POP3 (val, idx, bv); \
838 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
839 i = SCM_I_INUM (idx); \
840 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
841 \
842 if (SCM_LIKELY (SCM_I_INUMP (idx) \
843 && (i >= 0) \
844 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
845 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
846 && (SCM_I_INUMP (val)) \
847 && ((j = SCM_I_INUM (val)) >= min) \
848 && (j <= max))) \
849 *int_ptr = (scm_t_ ## type) j; \
850 else \
851 { \
852 SYNC_REGISTER (); \
853 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
854 } \
855 NEXT; \
856 }
857
858 #define BV_INT_SET(stem, type, size) \
859 { \
860 scm_t_signed_bits i = 0; \
861 SCM bv, idx, val; \
862 scm_t_ ## type *int_ptr; \
863 \
864 POP3 (val, idx, bv); \
865 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
866 i = SCM_I_INUM (idx); \
867 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
868 \
869 if (SCM_LIKELY (SCM_I_INUMP (idx) \
870 && (i >= 0) \
871 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
872 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
873 *int_ptr = scm_to_ ## type (val); \
874 else \
875 { \
876 SYNC_REGISTER (); \
877 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
878 } \
879 NEXT; \
880 }
881
882 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
883 { \
884 scm_t_signed_bits i = 0; \
885 SCM bv, idx, val; \
886 type *float_ptr; \
887 \
888 POP3 (val, idx, bv); \
889 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
890 i = SCM_I_INUM (idx); \
891 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
892 \
893 if (SCM_LIKELY (SCM_I_INUMP (idx) \
894 && (i >= 0) \
895 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
896 && (ALIGNED_P (float_ptr, type)))) \
897 *float_ptr = scm_to_double (val); \
898 else \
899 { \
900 SYNC_REGISTER (); \
901 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
902 } \
903 NEXT; \
904 }
905
906 VM_DEFINE_INSTRUCTION (200, bv_u8_set, "bv-u8-set", 0, 3, 0)
907 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1)
908 VM_DEFINE_INSTRUCTION (201, bv_s8_set, "bv-s8-set", 0, 3, 0)
909 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1)
910 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set, "bv-u16-native-set", 0, 3, 0)
911 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2)
912 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set, "bv-s16-native-set", 0, 3, 0)
913 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2)
914 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set, "bv-u32-native-set", 0, 3, 0)
915 #if SIZEOF_VOID_P > 4
916 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4)
917 #else
918 BV_INT_SET (u32, uint32, 4)
919 #endif
920 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set, "bv-s32-native-set", 0, 3, 0)
921 #if SIZEOF_VOID_P > 4
922 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4)
923 #else
924 BV_INT_SET (s32, int32, 4)
925 #endif
926 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set, "bv-u64-native-set", 0, 3, 0)
927 BV_INT_SET (u64, uint64, 8)
928 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set, "bv-s64-native-set", 0, 3, 0)
929 BV_INT_SET (s64, int64, 8)
930 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set, "bv-f32-native-set", 0, 3, 0)
931 BV_FLOAT_SET (f32, ieee_single, float, 4)
932 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set, "bv-f64-native-set", 0, 3, 0)
933 BV_FLOAT_SET (f64, ieee_double, double, 8)
934
935 #undef BV_FIXABLE_INT_SET
936 #undef BV_INT_SET
937 #undef BV_FLOAT_SET
938
939 /*
940 (defun renumber-ops ()
941 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
942 (interactive "")
943 (save-excursion
944 (let ((counter 127)) (goto-char (point-min))
945 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
946 (replace-match
947 (number-to-string (setq counter (1+ counter)))
948 t t nil 1)))))
949 */
950
951 /*
952 Local Variables:
953 c-file-style: "gnu"
954 End:
955 */