VM: Add "cc" to the clobber list of ASM_ADD and ASM_SUB.
[bpt/guile.git] / libguile / vm-i-scheme.c
1 /* Copyright (C) 2001, 2009, 2010, 2011, 2012, 2013 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm_engine.c */
20
21 \f
22 /*
23 * Predicates
24 */
25
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
29
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
31
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
33 {
34 ARGS1 (x);
35 RETURN (scm_from_bool (scm_is_false (x)));
36 }
37
38 VM_DEFINE_FUNCTION (129, not_not, "not-not", 1)
39 {
40 ARGS1 (x);
41 RETURN (scm_from_bool (!scm_is_false (x)));
42 }
43
44 VM_DEFINE_FUNCTION (130, eq, "eq?", 2)
45 {
46 ARGS2 (x, y);
47 RETURN (scm_from_bool (scm_is_eq (x, y)));
48 }
49
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
51 {
52 ARGS2 (x, y);
53 RETURN (scm_from_bool (!scm_is_eq (x, y)));
54 }
55
56 VM_DEFINE_FUNCTION (132, nullp, "null?", 1)
57 {
58 ARGS1 (x);
59 RETURN (scm_from_bool (scm_is_null (x)));
60 }
61
62 VM_DEFINE_FUNCTION (133, not_nullp, "not-null?", 1)
63 {
64 ARGS1 (x);
65 RETURN (scm_from_bool (!scm_is_null (x)));
66 }
67
68 VM_DEFINE_FUNCTION (134, eqv, "eqv?", 2)
69 {
70 ARGS2 (x, y);
71 if (scm_is_eq (x, y))
72 RETURN (SCM_BOOL_T);
73 if (SCM_IMP (x) || SCM_IMP (y))
74 RETURN (SCM_BOOL_F);
75 SYNC_REGISTER ();
76 RETURN (scm_eqv_p (x, y));
77 }
78
79 VM_DEFINE_FUNCTION (135, equal, "equal?", 2)
80 {
81 ARGS2 (x, y);
82 if (scm_is_eq (x, y))
83 RETURN (SCM_BOOL_T);
84 if (SCM_IMP (x) || SCM_IMP (y))
85 RETURN (SCM_BOOL_F);
86 SYNC_REGISTER ();
87 RETURN (scm_equal_p (x, y));
88 }
89
90 VM_DEFINE_FUNCTION (136, pairp, "pair?", 1)
91 {
92 ARGS1 (x);
93 RETURN (scm_from_bool (scm_is_pair (x)));
94 }
95
96 VM_DEFINE_FUNCTION (137, listp, "list?", 1)
97 {
98 ARGS1 (x);
99 RETURN (scm_from_bool (scm_ilength (x) >= 0));
100 }
101
102 VM_DEFINE_FUNCTION (138, symbolp, "symbol?", 1)
103 {
104 ARGS1 (x);
105 RETURN (scm_from_bool (scm_is_symbol (x)));
106 }
107
108 VM_DEFINE_FUNCTION (139, vectorp, "vector?", 1)
109 {
110 ARGS1 (x);
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x)));
112 }
113
114 \f
115 /*
116 * Basic data
117 */
118
119 VM_DEFINE_FUNCTION (140, cons, "cons", 2)
120 {
121 ARGS2 (x, y);
122 CONS (x, x, y);
123 RETURN (x);
124 }
125
126 #define VM_VALIDATE_CONS(x, proc) \
127 VM_ASSERT (scm_is_pair (x), vm_error_not_a_pair (proc, x))
128
129 VM_DEFINE_FUNCTION (141, car, "car", 1)
130 {
131 ARGS1 (x);
132 VM_VALIDATE_CONS (x, "car");
133 RETURN (SCM_CAR (x));
134 }
135
136 VM_DEFINE_FUNCTION (142, cdr, "cdr", 1)
137 {
138 ARGS1 (x);
139 VM_VALIDATE_CONS (x, "cdr");
140 RETURN (SCM_CDR (x));
141 }
142
143 VM_DEFINE_INSTRUCTION (143, set_car, "set-car!", 0, 2, 0)
144 {
145 SCM x, y;
146 POP2 (y, x);
147 VM_VALIDATE_CONS (x, "set-car!");
148 SCM_SETCAR (x, y);
149 NEXT;
150 }
151
152 VM_DEFINE_INSTRUCTION (144, set_cdr, "set-cdr!", 0, 2, 0)
153 {
154 SCM x, y;
155 POP2 (y, x);
156 VM_VALIDATE_CONS (x, "set-cdr!");
157 SCM_SETCDR (x, y);
158 NEXT;
159 }
160
161 \f
162 /*
163 * Numeric relational tests
164 */
165
166 #undef REL
167 #define REL(crel,srel) \
168 { \
169 ARGS2 (x, y); \
170 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
171 RETURN (scm_from_bool (((scm_t_signed_bits) SCM_UNPACK (x)) \
172 crel ((scm_t_signed_bits) SCM_UNPACK (y)))); \
173 SYNC_REGISTER (); \
174 RETURN (srel (x, y)); \
175 }
176
177 VM_DEFINE_FUNCTION (145, ee, "ee?", 2)
178 {
179 REL (==, scm_num_eq_p);
180 }
181
182 VM_DEFINE_FUNCTION (146, lt, "lt?", 2)
183 {
184 REL (<, scm_less_p);
185 }
186
187 VM_DEFINE_FUNCTION (147, le, "le?", 2)
188 {
189 REL (<=, scm_leq_p);
190 }
191
192 VM_DEFINE_FUNCTION (148, gt, "gt?", 2)
193 {
194 REL (>, scm_gr_p);
195 }
196
197 VM_DEFINE_FUNCTION (149, ge, "ge?", 2)
198 {
199 REL (>=, scm_geq_p);
200 }
201
202 \f
203 /*
204 * Numeric functions
205 */
206
207 /* The maximum/minimum tagged integers. */
208 #undef INUM_MAX
209 #undef INUM_MIN
210 #undef INUM_STEP
211 #define INUM_MAX \
212 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_POSITIVE_FIXNUM)))
213 #define INUM_MIN \
214 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_NEGATIVE_FIXNUM)))
215 #define INUM_STEP \
216 ((scm_t_signed_bits) SCM_UNPACK (SCM_INUM1) \
217 - (scm_t_signed_bits) SCM_UNPACK (SCM_INUM0))
218
219 #undef FUNC2
220 #define FUNC2(CFUNC,SFUNC) \
221 { \
222 ARGS2 (x, y); \
223 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
224 { \
225 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
226 if (SCM_FIXABLE (n)) \
227 RETURN (SCM_I_MAKINUM (n)); \
228 } \
229 SYNC_REGISTER (); \
230 RETURN (SFUNC (x, y)); \
231 }
232
233 /* Assembly tagged integer arithmetic routines. This code uses the
234 `asm goto' feature introduced in GCC 4.5. */
235
236 #if defined __x86_64__ && SCM_GNUC_PREREQ (4, 5)
237
238 /* The macros below check the CPU's overflow flag to improve fixnum
239 arithmetic. The %rcx register is explicitly clobbered because `asm
240 goto' can't have outputs, in which case the `r' constraint could be
241 used to let the register allocator choose a register.
242
243 TODO: Use `cold' label attribute in GCC 4.6.
244 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
245
246 # define ASM_ADD(x, y) \
247 { \
248 asm volatile goto ("mov %1, %%rcx; " \
249 "test %[tag], %%cl; je %l[slow_add]; " \
250 "test %[tag], %0; je %l[slow_add]; " \
251 "sub %[tag], %%rcx; " \
252 "add %0, %%rcx; jo %l[slow_add]; " \
253 "mov %%rcx, (%[vsp])\n" \
254 : /* no outputs */ \
255 : "r" (x), "r" (y), \
256 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
257 : "rcx", "memory", "cc" \
258 : slow_add); \
259 NEXT; \
260 } \
261 slow_add: \
262 do { } while (0)
263
264 # define ASM_SUB(x, y) \
265 { \
266 asm volatile goto ("mov %0, %%rcx; " \
267 "test %[tag], %%cl; je %l[slow_sub]; " \
268 "test %[tag], %1; je %l[slow_sub]; " \
269 "sub %1, %%rcx; jo %l[slow_sub]; " \
270 "add %[tag], %%rcx; " \
271 "mov %%rcx, (%[vsp])\n" \
272 : /* no outputs */ \
273 : "r" (x), "r" (y), \
274 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
275 : "rcx", "memory", "cc" \
276 : slow_sub); \
277 NEXT; \
278 } \
279 slow_sub: \
280 do { } while (0)
281
282 #endif
283
284
285 VM_DEFINE_FUNCTION (150, add, "add", 2)
286 {
287 #ifndef ASM_ADD
288 FUNC2 (+, scm_sum);
289 #else
290 ARGS2 (x, y);
291 ASM_ADD (x, y);
292 SYNC_REGISTER ();
293 RETURN (scm_sum (x, y));
294 #endif
295 }
296
297 VM_DEFINE_FUNCTION (151, add1, "add1", 1)
298 {
299 ARGS1 (x);
300
301 /* Check for overflow. We must avoid overflow in the signed
302 addition below, even if X is not an inum. */
303 if (SCM_LIKELY ((scm_t_signed_bits) SCM_UNPACK (x) <= INUM_MAX - INUM_STEP))
304 {
305 SCM result;
306
307 /* Add 1 to the integer without untagging. */
308 result = SCM_PACK ((scm_t_signed_bits) SCM_UNPACK (x) + INUM_STEP);
309
310 if (SCM_LIKELY (SCM_I_INUMP (result)))
311 RETURN (result);
312 }
313
314 SYNC_REGISTER ();
315 RETURN (scm_sum (x, SCM_I_MAKINUM (1)));
316 }
317
318 VM_DEFINE_FUNCTION (152, sub, "sub", 2)
319 {
320 #ifndef ASM_SUB
321 FUNC2 (-, scm_difference);
322 #else
323 ARGS2 (x, y);
324 ASM_SUB (x, y);
325 SYNC_REGISTER ();
326 RETURN (scm_difference (x, y));
327 #endif
328 }
329
330 VM_DEFINE_FUNCTION (153, sub1, "sub1", 1)
331 {
332 ARGS1 (x);
333
334 /* Check for overflow. We must avoid overflow in the signed
335 subtraction below, even if X is not an inum. */
336 if (SCM_LIKELY ((scm_t_signed_bits) SCM_UNPACK (x) >= INUM_MIN + INUM_STEP))
337 {
338 SCM result;
339
340 /* Substract 1 from the integer without untagging. */
341 result = SCM_PACK ((scm_t_signed_bits) SCM_UNPACK (x) - INUM_STEP);
342
343 if (SCM_LIKELY (SCM_I_INUMP (result)))
344 RETURN (result);
345 }
346
347 SYNC_REGISTER ();
348 RETURN (scm_difference (x, SCM_I_MAKINUM (1)));
349 }
350
351 # undef ASM_ADD
352 # undef ASM_SUB
353
354 VM_DEFINE_FUNCTION (154, mul, "mul", 2)
355 {
356 ARGS2 (x, y);
357 SYNC_REGISTER ();
358 RETURN (scm_product (x, y));
359 }
360
361 VM_DEFINE_FUNCTION (155, div, "div", 2)
362 {
363 ARGS2 (x, y);
364 SYNC_REGISTER ();
365 RETURN (scm_divide (x, y));
366 }
367
368 VM_DEFINE_FUNCTION (156, quo, "quo", 2)
369 {
370 ARGS2 (x, y);
371 SYNC_REGISTER ();
372 RETURN (scm_quotient (x, y));
373 }
374
375 VM_DEFINE_FUNCTION (157, rem, "rem", 2)
376 {
377 ARGS2 (x, y);
378 SYNC_REGISTER ();
379 RETURN (scm_remainder (x, y));
380 }
381
382 VM_DEFINE_FUNCTION (158, mod, "mod", 2)
383 {
384 ARGS2 (x, y);
385 SYNC_REGISTER ();
386 RETURN (scm_modulo (x, y));
387 }
388
389 VM_DEFINE_FUNCTION (159, ash, "ash", 2)
390 {
391 ARGS2 (x, y);
392 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
393 {
394 if (SCM_I_INUM (y) < 0)
395 {
396 /* Right shift, will be a fixnum. */
397 if (SCM_I_INUM (y) > -SCM_I_FIXNUM_BIT)
398 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) >> -SCM_I_INUM (y)));
399 /* fall through */
400 }
401 else
402 /* Left shift. See comments in scm_ash. */
403 {
404 scm_t_signed_bits nn, bits_to_shift;
405
406 nn = SCM_I_INUM (x);
407 bits_to_shift = SCM_I_INUM (y);
408
409 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
410 && ((scm_t_bits)
411 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
412 <= 1))
413 RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
414 /* fall through */
415 }
416 /* fall through */
417 }
418 SYNC_REGISTER ();
419 RETURN (scm_ash (x, y));
420 }
421
422 VM_DEFINE_FUNCTION (160, logand, "logand", 2)
423 {
424 ARGS2 (x, y);
425 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
426 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) & SCM_I_INUM (y)));
427 SYNC_REGISTER ();
428 RETURN (scm_logand (x, y));
429 }
430
431 VM_DEFINE_FUNCTION (161, logior, "logior", 2)
432 {
433 ARGS2 (x, y);
434 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
435 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) | SCM_I_INUM (y)));
436 SYNC_REGISTER ();
437 RETURN (scm_logior (x, y));
438 }
439
440 VM_DEFINE_FUNCTION (162, logxor, "logxor", 2)
441 {
442 ARGS2 (x, y);
443 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
444 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
445 SYNC_REGISTER ();
446 RETURN (scm_logxor (x, y));
447 }
448
449 \f
450 /*
451 * Vectors and arrays
452 */
453
454 VM_DEFINE_FUNCTION (163, vector_ref, "vector-ref", 2)
455 {
456 scm_t_signed_bits i = 0;
457 ARGS2 (vect, idx);
458 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
459 && SCM_I_INUMP (idx)
460 && ((i = SCM_I_INUM (idx)) >= 0)
461 && i < SCM_I_VECTOR_LENGTH (vect)))
462 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
463 else
464 {
465 SYNC_REGISTER ();
466 RETURN (scm_vector_ref (vect, idx));
467 }
468 }
469
470 VM_DEFINE_INSTRUCTION (164, vector_set, "vector-set", 0, 3, 0)
471 {
472 scm_t_signed_bits i = 0;
473 SCM vect, idx, val;
474 POP3 (val, idx, vect);
475 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
476 && SCM_I_INUMP (idx)
477 && ((i = SCM_I_INUM (idx)) >= 0)
478 && i < SCM_I_VECTOR_LENGTH (vect)))
479 SCM_I_VECTOR_WELTS (vect)[i] = val;
480 else
481 {
482 SYNC_REGISTER ();
483 scm_vector_set_x (vect, idx, val);
484 }
485 NEXT;
486 }
487
488 VM_DEFINE_INSTRUCTION (165, make_array, "make-array", 3, -1, 1)
489 {
490 scm_t_uint32 len;
491 SCM shape, ret;
492
493 len = FETCH ();
494 len = (len << 8) + FETCH ();
495 len = (len << 8) + FETCH ();
496 POP (shape);
497 SYNC_REGISTER ();
498 PRE_CHECK_UNDERFLOW (len);
499 ret = scm_from_contiguous_array (shape, sp - len + 1, len);
500 DROPN (len);
501 PUSH (ret);
502 NEXT;
503 }
504
505 \f
506 /*
507 * Structs
508 */
509 #define VM_VALIDATE_STRUCT(obj, proc) \
510 VM_ASSERT (SCM_STRUCTP (obj), vm_error_not_a_struct (proc, obj))
511
512 VM_DEFINE_FUNCTION (166, struct_p, "struct?", 1)
513 {
514 ARGS1 (obj);
515 RETURN (scm_from_bool (SCM_STRUCTP (obj)));
516 }
517
518 VM_DEFINE_FUNCTION (167, struct_vtable, "struct-vtable", 1)
519 {
520 ARGS1 (obj);
521 VM_VALIDATE_STRUCT (obj, "struct_vtable");
522 RETURN (SCM_STRUCT_VTABLE (obj));
523 }
524
525 VM_DEFINE_INSTRUCTION (168, make_struct, "make-struct", 2, -1, 1)
526 {
527 unsigned h = FETCH ();
528 unsigned l = FETCH ();
529 scm_t_bits n = ((h << 8U) + l);
530 SCM vtable = sp[-(n - 1)];
531 const SCM *inits = sp - n + 2;
532 SCM ret;
533
534 SYNC_REGISTER ();
535
536 if (SCM_LIKELY (SCM_STRUCTP (vtable)
537 && SCM_VTABLE_FLAG_IS_SET (vtable, SCM_VTABLE_FLAG_SIMPLE)
538 && (SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size) + 1
539 == n)
540 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable)))
541 {
542 /* Verily, we are making a simple struct with the right number of
543 initializers, and no finalizer. */
544 ret = scm_words ((scm_t_bits)SCM_STRUCT_DATA (vtable) | scm_tc3_struct,
545 n + 1);
546 SCM_SET_CELL_WORD_1 (ret, (scm_t_bits)SCM_CELL_OBJECT_LOC (ret, 2));
547 memcpy (SCM_STRUCT_DATA (ret), inits, (n - 1) * sizeof (SCM));
548 }
549 else
550 ret = scm_c_make_structv (vtable, 0, n - 1, (scm_t_bits *) inits);
551
552 DROPN (n);
553 PUSH (ret);
554
555 NEXT;
556 }
557
558 VM_DEFINE_FUNCTION (169, struct_ref, "struct-ref", 2)
559 {
560 ARGS2 (obj, pos);
561
562 if (SCM_LIKELY (SCM_STRUCTP (obj)
563 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
564 SCM_VTABLE_FLAG_SIMPLE)
565 && SCM_I_INUMP (pos)))
566 {
567 SCM vtable;
568 scm_t_bits index, len;
569
570 /* True, an inum is a signed value, but cast to unsigned it will
571 certainly be more than the length, so we will fall through if
572 index is negative. */
573 index = SCM_I_INUM (pos);
574 vtable = SCM_STRUCT_VTABLE (obj);
575 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
576
577 if (SCM_LIKELY (index < len))
578 {
579 scm_t_bits *data = SCM_STRUCT_DATA (obj);
580 RETURN (SCM_PACK (data[index]));
581 }
582 }
583
584 SYNC_REGISTER ();
585 RETURN (scm_struct_ref (obj, pos));
586 }
587
588 VM_DEFINE_FUNCTION (170, struct_set, "struct-set", 3)
589 {
590 ARGS3 (obj, pos, val);
591
592 if (SCM_LIKELY (SCM_STRUCTP (obj)
593 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
594 SCM_VTABLE_FLAG_SIMPLE)
595 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
596 SCM_VTABLE_FLAG_SIMPLE_RW)
597 && SCM_I_INUMP (pos)))
598 {
599 SCM vtable;
600 scm_t_bits index, len;
601
602 /* See above regarding index being >= 0. */
603 index = SCM_I_INUM (pos);
604 vtable = SCM_STRUCT_VTABLE (obj);
605 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
606 if (SCM_LIKELY (index < len))
607 {
608 scm_t_bits *data = SCM_STRUCT_DATA (obj);
609 data[index] = SCM_UNPACK (val);
610 RETURN (val);
611 }
612 }
613
614 SYNC_REGISTER ();
615 RETURN (scm_struct_set_x (obj, pos, val));
616 }
617
618 \f
619 /*
620 * GOOPS support
621 */
622 VM_DEFINE_FUNCTION (171, class_of, "class-of", 1)
623 {
624 ARGS1 (obj);
625 if (SCM_INSTANCEP (obj))
626 RETURN (SCM_CLASS_OF (obj));
627 SYNC_REGISTER ();
628 RETURN (scm_class_of (obj));
629 }
630
631 /* FIXME: No checking whatsoever. */
632 VM_DEFINE_FUNCTION (172, slot_ref, "slot-ref", 2)
633 {
634 size_t slot;
635 ARGS2 (instance, idx);
636 slot = SCM_I_INUM (idx);
637 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance) [slot]));
638 }
639
640 /* FIXME: No checking whatsoever. */
641 VM_DEFINE_INSTRUCTION (173, slot_set, "slot-set", 0, 3, 0)
642 {
643 SCM instance, idx, val;
644 size_t slot;
645 POP3 (val, idx, instance);
646 slot = SCM_I_INUM (idx);
647 SCM_STRUCT_DATA (instance) [slot] = SCM_UNPACK (val);
648 NEXT;
649 }
650
651 \f
652 /*
653 * Bytevectors
654 */
655 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
656 VM_ASSERT (SCM_BYTEVECTOR_P (x), vm_error_not_a_bytevector (proc, x))
657
658 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
659 { \
660 SCM endianness; \
661 POP (endianness); \
662 if (scm_is_eq (endianness, scm_i_native_endianness)) \
663 goto VM_LABEL (bv_##stem##_native_ref); \
664 { \
665 ARGS2 (bv, idx); \
666 SYNC_REGISTER (); \
667 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
668 } \
669 }
670
671 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
672 #define ALIGNED_P(ptr, type) \
673 ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
674
675 VM_DEFINE_FUNCTION (174, bv_u16_ref, "bv-u16-ref", 3)
676 BV_REF_WITH_ENDIANNESS (u16, u16)
677 VM_DEFINE_FUNCTION (175, bv_s16_ref, "bv-s16-ref", 3)
678 BV_REF_WITH_ENDIANNESS (s16, s16)
679 VM_DEFINE_FUNCTION (176, bv_u32_ref, "bv-u32-ref", 3)
680 BV_REF_WITH_ENDIANNESS (u32, u32)
681 VM_DEFINE_FUNCTION (177, bv_s32_ref, "bv-s32-ref", 3)
682 BV_REF_WITH_ENDIANNESS (s32, s32)
683 VM_DEFINE_FUNCTION (178, bv_u64_ref, "bv-u64-ref", 3)
684 BV_REF_WITH_ENDIANNESS (u64, u64)
685 VM_DEFINE_FUNCTION (179, bv_s64_ref, "bv-s64-ref", 3)
686 BV_REF_WITH_ENDIANNESS (s64, s64)
687 VM_DEFINE_FUNCTION (180, bv_f32_ref, "bv-f32-ref", 3)
688 BV_REF_WITH_ENDIANNESS (f32, ieee_single)
689 VM_DEFINE_FUNCTION (181, bv_f64_ref, "bv-f64-ref", 3)
690 BV_REF_WITH_ENDIANNESS (f64, ieee_double)
691
692 #undef BV_REF_WITH_ENDIANNESS
693
694 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
695 { \
696 scm_t_signed_bits i; \
697 const scm_t_ ## type *int_ptr; \
698 ARGS2 (bv, idx); \
699 \
700 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
701 i = SCM_I_INUM (idx); \
702 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
703 \
704 if (SCM_LIKELY (SCM_I_INUMP (idx) \
705 && (i >= 0) \
706 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
707 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
708 RETURN (SCM_I_MAKINUM (*int_ptr)); \
709 else \
710 { \
711 SYNC_REGISTER (); \
712 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
713 } \
714 }
715
716 #define BV_INT_REF(stem, type, size) \
717 { \
718 scm_t_signed_bits i; \
719 const scm_t_ ## type *int_ptr; \
720 ARGS2 (bv, idx); \
721 \
722 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
723 i = SCM_I_INUM (idx); \
724 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
725 \
726 if (SCM_LIKELY (SCM_I_INUMP (idx) \
727 && (i >= 0) \
728 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
729 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
730 { \
731 scm_t_ ## type x = *int_ptr; \
732 if (SCM_FIXABLE (x)) \
733 RETURN (SCM_I_MAKINUM (x)); \
734 else \
735 { \
736 SYNC_REGISTER (); \
737 RETURN (scm_from_ ## type (x)); \
738 } \
739 } \
740 else \
741 { \
742 SYNC_REGISTER (); \
743 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
744 } \
745 }
746
747 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
748 { \
749 scm_t_signed_bits i; \
750 const type *float_ptr; \
751 ARGS2 (bv, idx); \
752 \
753 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
754 i = SCM_I_INUM (idx); \
755 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
756 \
757 SYNC_REGISTER (); \
758 if (SCM_LIKELY (SCM_I_INUMP (idx) \
759 && (i >= 0) \
760 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
761 && (ALIGNED_P (float_ptr, type)))) \
762 RETURN (scm_from_double (*float_ptr)); \
763 else \
764 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
765 }
766
767 VM_DEFINE_FUNCTION (182, bv_u8_ref, "bv-u8-ref", 2)
768 BV_FIXABLE_INT_REF (u8, u8, uint8, 1)
769 VM_DEFINE_FUNCTION (183, bv_s8_ref, "bv-s8-ref", 2)
770 BV_FIXABLE_INT_REF (s8, s8, int8, 1)
771 VM_DEFINE_FUNCTION (184, bv_u16_native_ref, "bv-u16-native-ref", 2)
772 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2)
773 VM_DEFINE_FUNCTION (185, bv_s16_native_ref, "bv-s16-native-ref", 2)
774 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2)
775 VM_DEFINE_FUNCTION (186, bv_u32_native_ref, "bv-u32-native-ref", 2)
776 #if SIZEOF_VOID_P > 4
777 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4)
778 #else
779 BV_INT_REF (u32, uint32, 4)
780 #endif
781 VM_DEFINE_FUNCTION (187, bv_s32_native_ref, "bv-s32-native-ref", 2)
782 #if SIZEOF_VOID_P > 4
783 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4)
784 #else
785 BV_INT_REF (s32, int32, 4)
786 #endif
787 VM_DEFINE_FUNCTION (188, bv_u64_native_ref, "bv-u64-native-ref", 2)
788 BV_INT_REF (u64, uint64, 8)
789 VM_DEFINE_FUNCTION (189, bv_s64_native_ref, "bv-s64-native-ref", 2)
790 BV_INT_REF (s64, int64, 8)
791 VM_DEFINE_FUNCTION (190, bv_f32_native_ref, "bv-f32-native-ref", 2)
792 BV_FLOAT_REF (f32, ieee_single, float, 4)
793 VM_DEFINE_FUNCTION (191, bv_f64_native_ref, "bv-f64-native-ref", 2)
794 BV_FLOAT_REF (f64, ieee_double, double, 8)
795
796 #undef BV_FIXABLE_INT_REF
797 #undef BV_INT_REF
798 #undef BV_FLOAT_REF
799
800
801
802 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
803 { \
804 SCM endianness; \
805 POP (endianness); \
806 if (scm_is_eq (endianness, scm_i_native_endianness)) \
807 goto VM_LABEL (bv_##stem##_native_set); \
808 { \
809 SCM bv, idx, val; POP3 (val, idx, bv); \
810 SYNC_REGISTER (); \
811 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
812 NEXT; \
813 } \
814 }
815
816 VM_DEFINE_INSTRUCTION (192, bv_u16_set, "bv-u16-set", 0, 4, 0)
817 BV_SET_WITH_ENDIANNESS (u16, u16)
818 VM_DEFINE_INSTRUCTION (193, bv_s16_set, "bv-s16-set", 0, 4, 0)
819 BV_SET_WITH_ENDIANNESS (s16, s16)
820 VM_DEFINE_INSTRUCTION (194, bv_u32_set, "bv-u32-set", 0, 4, 0)
821 BV_SET_WITH_ENDIANNESS (u32, u32)
822 VM_DEFINE_INSTRUCTION (195, bv_s32_set, "bv-s32-set", 0, 4, 0)
823 BV_SET_WITH_ENDIANNESS (s32, s32)
824 VM_DEFINE_INSTRUCTION (196, bv_u64_set, "bv-u64-set", 0, 4, 0)
825 BV_SET_WITH_ENDIANNESS (u64, u64)
826 VM_DEFINE_INSTRUCTION (197, bv_s64_set, "bv-s64-set", 0, 4, 0)
827 BV_SET_WITH_ENDIANNESS (s64, s64)
828 VM_DEFINE_INSTRUCTION (198, bv_f32_set, "bv-f32-set", 0, 4, 0)
829 BV_SET_WITH_ENDIANNESS (f32, ieee_single)
830 VM_DEFINE_INSTRUCTION (199, bv_f64_set, "bv-f64-set", 0, 4, 0)
831 BV_SET_WITH_ENDIANNESS (f64, ieee_double)
832
833 #undef BV_SET_WITH_ENDIANNESS
834
835 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
836 { \
837 scm_t_signed_bits i, j = 0; \
838 SCM bv, idx, val; \
839 scm_t_ ## type *int_ptr; \
840 \
841 POP3 (val, idx, bv); \
842 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
843 i = SCM_I_INUM (idx); \
844 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
845 \
846 if (SCM_LIKELY (SCM_I_INUMP (idx) \
847 && (i >= 0) \
848 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
849 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
850 && (SCM_I_INUMP (val)) \
851 && ((j = SCM_I_INUM (val)) >= min) \
852 && (j <= max))) \
853 *int_ptr = (scm_t_ ## type) j; \
854 else \
855 { \
856 SYNC_REGISTER (); \
857 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
858 } \
859 NEXT; \
860 }
861
862 #define BV_INT_SET(stem, type, size) \
863 { \
864 scm_t_signed_bits i = 0; \
865 SCM bv, idx, val; \
866 scm_t_ ## type *int_ptr; \
867 \
868 POP3 (val, idx, bv); \
869 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
870 i = SCM_I_INUM (idx); \
871 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
872 \
873 if (SCM_LIKELY (SCM_I_INUMP (idx) \
874 && (i >= 0) \
875 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
876 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
877 *int_ptr = scm_to_ ## type (val); \
878 else \
879 { \
880 SYNC_REGISTER (); \
881 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
882 } \
883 NEXT; \
884 }
885
886 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
887 { \
888 scm_t_signed_bits i = 0; \
889 SCM bv, idx, val; \
890 type *float_ptr; \
891 \
892 POP3 (val, idx, bv); \
893 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
894 i = SCM_I_INUM (idx); \
895 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
896 \
897 if (SCM_LIKELY (SCM_I_INUMP (idx) \
898 && (i >= 0) \
899 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
900 && (ALIGNED_P (float_ptr, type)))) \
901 *float_ptr = scm_to_double (val); \
902 else \
903 { \
904 SYNC_REGISTER (); \
905 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
906 } \
907 NEXT; \
908 }
909
910 VM_DEFINE_INSTRUCTION (200, bv_u8_set, "bv-u8-set", 0, 3, 0)
911 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1)
912 VM_DEFINE_INSTRUCTION (201, bv_s8_set, "bv-s8-set", 0, 3, 0)
913 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1)
914 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set, "bv-u16-native-set", 0, 3, 0)
915 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2)
916 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set, "bv-s16-native-set", 0, 3, 0)
917 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2)
918 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set, "bv-u32-native-set", 0, 3, 0)
919 #if SIZEOF_VOID_P > 4
920 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4)
921 #else
922 BV_INT_SET (u32, uint32, 4)
923 #endif
924 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set, "bv-s32-native-set", 0, 3, 0)
925 #if SIZEOF_VOID_P > 4
926 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4)
927 #else
928 BV_INT_SET (s32, int32, 4)
929 #endif
930 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set, "bv-u64-native-set", 0, 3, 0)
931 BV_INT_SET (u64, uint64, 8)
932 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set, "bv-s64-native-set", 0, 3, 0)
933 BV_INT_SET (s64, int64, 8)
934 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set, "bv-f32-native-set", 0, 3, 0)
935 BV_FLOAT_SET (f32, ieee_single, float, 4)
936 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set, "bv-f64-native-set", 0, 3, 0)
937 BV_FLOAT_SET (f64, ieee_double, double, 8)
938
939 #undef BV_FIXABLE_INT_SET
940 #undef BV_INT_SET
941 #undef BV_FLOAT_SET
942
943 /*
944 (defun renumber-ops ()
945 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
946 (interactive "")
947 (save-excursion
948 (let ((counter 127)) (goto-char (point-min))
949 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
950 (replace-match
951 (number-to-string (setq counter (1+ counter)))
952 t t nil 1)))))
953 */
954
955 /*
956 Local Variables:
957 c-file-style: "gnu"
958 End:
959 */