VM: Support 32-bit x86 in ASM_ADD and ASM_SUB.
[bpt/guile.git] / libguile / vm-i-scheme.c
1 /* Copyright (C) 2001, 2009, 2010, 2011, 2012, 2013 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm_engine.c */
20
21 \f
22 /*
23 * Predicates
24 */
25
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
29
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
31
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
33 {
34 ARGS1 (x);
35 RETURN (scm_from_bool (scm_is_false (x)));
36 }
37
38 VM_DEFINE_FUNCTION (129, not_not, "not-not", 1)
39 {
40 ARGS1 (x);
41 RETURN (scm_from_bool (!scm_is_false (x)));
42 }
43
44 VM_DEFINE_FUNCTION (130, eq, "eq?", 2)
45 {
46 ARGS2 (x, y);
47 RETURN (scm_from_bool (scm_is_eq (x, y)));
48 }
49
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
51 {
52 ARGS2 (x, y);
53 RETURN (scm_from_bool (!scm_is_eq (x, y)));
54 }
55
56 VM_DEFINE_FUNCTION (132, nullp, "null?", 1)
57 {
58 ARGS1 (x);
59 RETURN (scm_from_bool (scm_is_null (x)));
60 }
61
62 VM_DEFINE_FUNCTION (133, not_nullp, "not-null?", 1)
63 {
64 ARGS1 (x);
65 RETURN (scm_from_bool (!scm_is_null (x)));
66 }
67
68 VM_DEFINE_FUNCTION (134, eqv, "eqv?", 2)
69 {
70 ARGS2 (x, y);
71 if (scm_is_eq (x, y))
72 RETURN (SCM_BOOL_T);
73 if (SCM_IMP (x) || SCM_IMP (y))
74 RETURN (SCM_BOOL_F);
75 SYNC_REGISTER ();
76 RETURN (scm_eqv_p (x, y));
77 }
78
79 VM_DEFINE_FUNCTION (135, equal, "equal?", 2)
80 {
81 ARGS2 (x, y);
82 if (scm_is_eq (x, y))
83 RETURN (SCM_BOOL_T);
84 if (SCM_IMP (x) || SCM_IMP (y))
85 RETURN (SCM_BOOL_F);
86 SYNC_REGISTER ();
87 RETURN (scm_equal_p (x, y));
88 }
89
90 VM_DEFINE_FUNCTION (136, pairp, "pair?", 1)
91 {
92 ARGS1 (x);
93 RETURN (scm_from_bool (scm_is_pair (x)));
94 }
95
96 VM_DEFINE_FUNCTION (137, listp, "list?", 1)
97 {
98 ARGS1 (x);
99 RETURN (scm_from_bool (scm_ilength (x) >= 0));
100 }
101
102 VM_DEFINE_FUNCTION (138, symbolp, "symbol?", 1)
103 {
104 ARGS1 (x);
105 RETURN (scm_from_bool (scm_is_symbol (x)));
106 }
107
108 VM_DEFINE_FUNCTION (139, vectorp, "vector?", 1)
109 {
110 ARGS1 (x);
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x)));
112 }
113
114 \f
115 /*
116 * Basic data
117 */
118
119 VM_DEFINE_FUNCTION (140, cons, "cons", 2)
120 {
121 ARGS2 (x, y);
122 CONS (x, x, y);
123 RETURN (x);
124 }
125
126 #define VM_VALIDATE_CONS(x, proc) \
127 VM_ASSERT (scm_is_pair (x), vm_error_not_a_pair (proc, x))
128
129 VM_DEFINE_FUNCTION (141, car, "car", 1)
130 {
131 ARGS1 (x);
132 VM_VALIDATE_CONS (x, "car");
133 RETURN (SCM_CAR (x));
134 }
135
136 VM_DEFINE_FUNCTION (142, cdr, "cdr", 1)
137 {
138 ARGS1 (x);
139 VM_VALIDATE_CONS (x, "cdr");
140 RETURN (SCM_CDR (x));
141 }
142
143 VM_DEFINE_INSTRUCTION (143, set_car, "set-car!", 0, 2, 0)
144 {
145 SCM x, y;
146 POP2 (y, x);
147 VM_VALIDATE_CONS (x, "set-car!");
148 SCM_SETCAR (x, y);
149 NEXT;
150 }
151
152 VM_DEFINE_INSTRUCTION (144, set_cdr, "set-cdr!", 0, 2, 0)
153 {
154 SCM x, y;
155 POP2 (y, x);
156 VM_VALIDATE_CONS (x, "set-cdr!");
157 SCM_SETCDR (x, y);
158 NEXT;
159 }
160
161 \f
162 /*
163 * Numeric relational tests
164 */
165
166 #undef REL
167 #define REL(crel,srel) \
168 { \
169 ARGS2 (x, y); \
170 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
171 RETURN (scm_from_bool (((scm_t_signed_bits) SCM_UNPACK (x)) \
172 crel ((scm_t_signed_bits) SCM_UNPACK (y)))); \
173 SYNC_REGISTER (); \
174 RETURN (srel (x, y)); \
175 }
176
177 VM_DEFINE_FUNCTION (145, ee, "ee?", 2)
178 {
179 REL (==, scm_num_eq_p);
180 }
181
182 VM_DEFINE_FUNCTION (146, lt, "lt?", 2)
183 {
184 REL (<, scm_less_p);
185 }
186
187 VM_DEFINE_FUNCTION (147, le, "le?", 2)
188 {
189 REL (<=, scm_leq_p);
190 }
191
192 VM_DEFINE_FUNCTION (148, gt, "gt?", 2)
193 {
194 REL (>, scm_gr_p);
195 }
196
197 VM_DEFINE_FUNCTION (149, ge, "ge?", 2)
198 {
199 REL (>=, scm_geq_p);
200 }
201
202 \f
203 /*
204 * Numeric functions
205 */
206
207 /* The maximum/minimum tagged integers. */
208 #undef INUM_MAX
209 #undef INUM_MIN
210 #undef INUM_STEP
211 #define INUM_MAX \
212 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_POSITIVE_FIXNUM)))
213 #define INUM_MIN \
214 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_NEGATIVE_FIXNUM)))
215 #define INUM_STEP \
216 ((scm_t_signed_bits) SCM_UNPACK (SCM_INUM1) \
217 - (scm_t_signed_bits) SCM_UNPACK (SCM_INUM0))
218
219 #undef FUNC2
220 #define FUNC2(CFUNC,SFUNC) \
221 { \
222 ARGS2 (x, y); \
223 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
224 { \
225 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
226 if (SCM_FIXABLE (n)) \
227 RETURN (SCM_I_MAKINUM (n)); \
228 } \
229 SYNC_REGISTER (); \
230 RETURN (SFUNC (x, y)); \
231 }
232
233 /* Assembly tagged integer arithmetic routines. This code uses the
234 `asm goto' feature introduced in GCC 4.5. */
235
236 #if SCM_GNUC_PREREQ (4, 5) && (defined __x86_64__ || defined __i386__)
237
238 # undef _CX
239 # ifdef __x86_64__
240 # define _CX "rcx"
241 # else
242 # define _CX "ecx"
243 # endif
244
245 /* The macros below check the CPU's overflow flag to improve fixnum
246 arithmetic. The _CX register (%rcx or %ecx) is explicitly
247 clobbered because `asm goto' can't have outputs, in which case the
248 `r' constraint could be used to let the register allocator choose a
249 register.
250
251 TODO: Use `cold' label attribute in GCC 4.6.
252 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
253
254 # define ASM_ADD(x, y) \
255 { \
256 asm volatile goto ("mov %1, %%"_CX"; " \
257 "test %[tag], %%cl; je %l[slow_add]; " \
258 "test %[tag], %0; je %l[slow_add]; " \
259 "sub %[tag], %%"_CX"; " \
260 "add %0, %%"_CX"; jo %l[slow_add]; " \
261 "mov %%"_CX", (%[vsp])\n" \
262 : /* no outputs */ \
263 : "r" (x), "r" (y), \
264 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
265 : "rcx", "memory", "cc" \
266 : slow_add); \
267 NEXT; \
268 } \
269 slow_add: \
270 do { } while (0)
271
272 # define ASM_SUB(x, y) \
273 { \
274 asm volatile goto ("mov %0, %%"_CX"; " \
275 "test %[tag], %%cl; je %l[slow_sub]; " \
276 "test %[tag], %1; je %l[slow_sub]; " \
277 "sub %1, %%"_CX"; jo %l[slow_sub]; " \
278 "add %[tag], %%"_CX"; " \
279 "mov %%"_CX", (%[vsp])\n" \
280 : /* no outputs */ \
281 : "r" (x), "r" (y), \
282 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
283 : "rcx", "memory", "cc" \
284 : slow_sub); \
285 NEXT; \
286 } \
287 slow_sub: \
288 do { } while (0)
289
290 #endif
291
292
293 VM_DEFINE_FUNCTION (150, add, "add", 2)
294 {
295 #ifndef ASM_ADD
296 FUNC2 (+, scm_sum);
297 #else
298 ARGS2 (x, y);
299 ASM_ADD (x, y);
300 SYNC_REGISTER ();
301 RETURN (scm_sum (x, y));
302 #endif
303 }
304
305 VM_DEFINE_FUNCTION (151, add1, "add1", 1)
306 {
307 ARGS1 (x);
308
309 /* Check for overflow. We must avoid overflow in the signed
310 addition below, even if X is not an inum. */
311 if (SCM_LIKELY ((scm_t_signed_bits) SCM_UNPACK (x) <= INUM_MAX - INUM_STEP))
312 {
313 SCM result;
314
315 /* Add 1 to the integer without untagging. */
316 result = SCM_PACK ((scm_t_signed_bits) SCM_UNPACK (x) + INUM_STEP);
317
318 if (SCM_LIKELY (SCM_I_INUMP (result)))
319 RETURN (result);
320 }
321
322 SYNC_REGISTER ();
323 RETURN (scm_sum (x, SCM_I_MAKINUM (1)));
324 }
325
326 VM_DEFINE_FUNCTION (152, sub, "sub", 2)
327 {
328 #ifndef ASM_SUB
329 FUNC2 (-, scm_difference);
330 #else
331 ARGS2 (x, y);
332 ASM_SUB (x, y);
333 SYNC_REGISTER ();
334 RETURN (scm_difference (x, y));
335 #endif
336 }
337
338 VM_DEFINE_FUNCTION (153, sub1, "sub1", 1)
339 {
340 ARGS1 (x);
341
342 /* Check for overflow. We must avoid overflow in the signed
343 subtraction below, even if X is not an inum. */
344 if (SCM_LIKELY ((scm_t_signed_bits) SCM_UNPACK (x) >= INUM_MIN + INUM_STEP))
345 {
346 SCM result;
347
348 /* Substract 1 from the integer without untagging. */
349 result = SCM_PACK ((scm_t_signed_bits) SCM_UNPACK (x) - INUM_STEP);
350
351 if (SCM_LIKELY (SCM_I_INUMP (result)))
352 RETURN (result);
353 }
354
355 SYNC_REGISTER ();
356 RETURN (scm_difference (x, SCM_I_MAKINUM (1)));
357 }
358
359 # undef ASM_ADD
360 # undef ASM_SUB
361
362 VM_DEFINE_FUNCTION (154, mul, "mul", 2)
363 {
364 ARGS2 (x, y);
365 SYNC_REGISTER ();
366 RETURN (scm_product (x, y));
367 }
368
369 VM_DEFINE_FUNCTION (155, div, "div", 2)
370 {
371 ARGS2 (x, y);
372 SYNC_REGISTER ();
373 RETURN (scm_divide (x, y));
374 }
375
376 VM_DEFINE_FUNCTION (156, quo, "quo", 2)
377 {
378 ARGS2 (x, y);
379 SYNC_REGISTER ();
380 RETURN (scm_quotient (x, y));
381 }
382
383 VM_DEFINE_FUNCTION (157, rem, "rem", 2)
384 {
385 ARGS2 (x, y);
386 SYNC_REGISTER ();
387 RETURN (scm_remainder (x, y));
388 }
389
390 VM_DEFINE_FUNCTION (158, mod, "mod", 2)
391 {
392 ARGS2 (x, y);
393 SYNC_REGISTER ();
394 RETURN (scm_modulo (x, y));
395 }
396
397 VM_DEFINE_FUNCTION (159, ash, "ash", 2)
398 {
399 ARGS2 (x, y);
400 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
401 {
402 if (SCM_I_INUM (y) < 0)
403 {
404 /* Right shift, will be a fixnum. */
405 if (SCM_I_INUM (y) > -SCM_I_FIXNUM_BIT)
406 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) >> -SCM_I_INUM (y)));
407 /* fall through */
408 }
409 else
410 /* Left shift. See comments in scm_ash. */
411 {
412 scm_t_signed_bits nn, bits_to_shift;
413
414 nn = SCM_I_INUM (x);
415 bits_to_shift = SCM_I_INUM (y);
416
417 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
418 && ((scm_t_bits)
419 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
420 <= 1))
421 RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
422 /* fall through */
423 }
424 /* fall through */
425 }
426 SYNC_REGISTER ();
427 RETURN (scm_ash (x, y));
428 }
429
430 VM_DEFINE_FUNCTION (160, logand, "logand", 2)
431 {
432 ARGS2 (x, y);
433 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
434 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) & SCM_I_INUM (y)));
435 SYNC_REGISTER ();
436 RETURN (scm_logand (x, y));
437 }
438
439 VM_DEFINE_FUNCTION (161, logior, "logior", 2)
440 {
441 ARGS2 (x, y);
442 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
443 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) | SCM_I_INUM (y)));
444 SYNC_REGISTER ();
445 RETURN (scm_logior (x, y));
446 }
447
448 VM_DEFINE_FUNCTION (162, logxor, "logxor", 2)
449 {
450 ARGS2 (x, y);
451 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
452 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
453 SYNC_REGISTER ();
454 RETURN (scm_logxor (x, y));
455 }
456
457 \f
458 /*
459 * Vectors and arrays
460 */
461
462 VM_DEFINE_FUNCTION (163, vector_ref, "vector-ref", 2)
463 {
464 scm_t_signed_bits i = 0;
465 ARGS2 (vect, idx);
466 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
467 && SCM_I_INUMP (idx)
468 && ((i = SCM_I_INUM (idx)) >= 0)
469 && i < SCM_I_VECTOR_LENGTH (vect)))
470 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
471 else
472 {
473 SYNC_REGISTER ();
474 RETURN (scm_vector_ref (vect, idx));
475 }
476 }
477
478 VM_DEFINE_INSTRUCTION (164, vector_set, "vector-set", 0, 3, 0)
479 {
480 scm_t_signed_bits i = 0;
481 SCM vect, idx, val;
482 POP3 (val, idx, vect);
483 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
484 && SCM_I_INUMP (idx)
485 && ((i = SCM_I_INUM (idx)) >= 0)
486 && i < SCM_I_VECTOR_LENGTH (vect)))
487 SCM_I_VECTOR_WELTS (vect)[i] = val;
488 else
489 {
490 SYNC_REGISTER ();
491 scm_vector_set_x (vect, idx, val);
492 }
493 NEXT;
494 }
495
496 VM_DEFINE_INSTRUCTION (165, make_array, "make-array", 3, -1, 1)
497 {
498 scm_t_uint32 len;
499 SCM shape, ret;
500
501 len = FETCH ();
502 len = (len << 8) + FETCH ();
503 len = (len << 8) + FETCH ();
504 POP (shape);
505 SYNC_REGISTER ();
506 PRE_CHECK_UNDERFLOW (len);
507 ret = scm_from_contiguous_array (shape, sp - len + 1, len);
508 DROPN (len);
509 PUSH (ret);
510 NEXT;
511 }
512
513 \f
514 /*
515 * Structs
516 */
517 #define VM_VALIDATE_STRUCT(obj, proc) \
518 VM_ASSERT (SCM_STRUCTP (obj), vm_error_not_a_struct (proc, obj))
519
520 VM_DEFINE_FUNCTION (166, struct_p, "struct?", 1)
521 {
522 ARGS1 (obj);
523 RETURN (scm_from_bool (SCM_STRUCTP (obj)));
524 }
525
526 VM_DEFINE_FUNCTION (167, struct_vtable, "struct-vtable", 1)
527 {
528 ARGS1 (obj);
529 VM_VALIDATE_STRUCT (obj, "struct_vtable");
530 RETURN (SCM_STRUCT_VTABLE (obj));
531 }
532
533 VM_DEFINE_INSTRUCTION (168, make_struct, "make-struct", 2, -1, 1)
534 {
535 unsigned h = FETCH ();
536 unsigned l = FETCH ();
537 scm_t_bits n = ((h << 8U) + l);
538 SCM vtable = sp[-(n - 1)];
539 const SCM *inits = sp - n + 2;
540 SCM ret;
541
542 SYNC_REGISTER ();
543
544 if (SCM_LIKELY (SCM_STRUCTP (vtable)
545 && SCM_VTABLE_FLAG_IS_SET (vtable, SCM_VTABLE_FLAG_SIMPLE)
546 && (SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size) + 1
547 == n)
548 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable)))
549 {
550 /* Verily, we are making a simple struct with the right number of
551 initializers, and no finalizer. */
552 ret = scm_words ((scm_t_bits)SCM_STRUCT_DATA (vtable) | scm_tc3_struct,
553 n + 1);
554 SCM_SET_CELL_WORD_1 (ret, (scm_t_bits)SCM_CELL_OBJECT_LOC (ret, 2));
555 memcpy (SCM_STRUCT_DATA (ret), inits, (n - 1) * sizeof (SCM));
556 }
557 else
558 ret = scm_c_make_structv (vtable, 0, n - 1, (scm_t_bits *) inits);
559
560 DROPN (n);
561 PUSH (ret);
562
563 NEXT;
564 }
565
566 VM_DEFINE_FUNCTION (169, struct_ref, "struct-ref", 2)
567 {
568 ARGS2 (obj, pos);
569
570 if (SCM_LIKELY (SCM_STRUCTP (obj)
571 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
572 SCM_VTABLE_FLAG_SIMPLE)
573 && SCM_I_INUMP (pos)))
574 {
575 SCM vtable;
576 scm_t_bits index, len;
577
578 /* True, an inum is a signed value, but cast to unsigned it will
579 certainly be more than the length, so we will fall through if
580 index is negative. */
581 index = SCM_I_INUM (pos);
582 vtable = SCM_STRUCT_VTABLE (obj);
583 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
584
585 if (SCM_LIKELY (index < len))
586 {
587 scm_t_bits *data = SCM_STRUCT_DATA (obj);
588 RETURN (SCM_PACK (data[index]));
589 }
590 }
591
592 SYNC_REGISTER ();
593 RETURN (scm_struct_ref (obj, pos));
594 }
595
596 VM_DEFINE_FUNCTION (170, struct_set, "struct-set", 3)
597 {
598 ARGS3 (obj, pos, val);
599
600 if (SCM_LIKELY (SCM_STRUCTP (obj)
601 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
602 SCM_VTABLE_FLAG_SIMPLE)
603 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
604 SCM_VTABLE_FLAG_SIMPLE_RW)
605 && SCM_I_INUMP (pos)))
606 {
607 SCM vtable;
608 scm_t_bits index, len;
609
610 /* See above regarding index being >= 0. */
611 index = SCM_I_INUM (pos);
612 vtable = SCM_STRUCT_VTABLE (obj);
613 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
614 if (SCM_LIKELY (index < len))
615 {
616 scm_t_bits *data = SCM_STRUCT_DATA (obj);
617 data[index] = SCM_UNPACK (val);
618 RETURN (val);
619 }
620 }
621
622 SYNC_REGISTER ();
623 RETURN (scm_struct_set_x (obj, pos, val));
624 }
625
626 \f
627 /*
628 * GOOPS support
629 */
630 VM_DEFINE_FUNCTION (171, class_of, "class-of", 1)
631 {
632 ARGS1 (obj);
633 if (SCM_INSTANCEP (obj))
634 RETURN (SCM_CLASS_OF (obj));
635 SYNC_REGISTER ();
636 RETURN (scm_class_of (obj));
637 }
638
639 /* FIXME: No checking whatsoever. */
640 VM_DEFINE_FUNCTION (172, slot_ref, "slot-ref", 2)
641 {
642 size_t slot;
643 ARGS2 (instance, idx);
644 slot = SCM_I_INUM (idx);
645 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance) [slot]));
646 }
647
648 /* FIXME: No checking whatsoever. */
649 VM_DEFINE_INSTRUCTION (173, slot_set, "slot-set", 0, 3, 0)
650 {
651 SCM instance, idx, val;
652 size_t slot;
653 POP3 (val, idx, instance);
654 slot = SCM_I_INUM (idx);
655 SCM_STRUCT_DATA (instance) [slot] = SCM_UNPACK (val);
656 NEXT;
657 }
658
659 \f
660 /*
661 * Bytevectors
662 */
663 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
664 VM_ASSERT (SCM_BYTEVECTOR_P (x), vm_error_not_a_bytevector (proc, x))
665
666 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
667 { \
668 SCM endianness; \
669 POP (endianness); \
670 if (scm_is_eq (endianness, scm_i_native_endianness)) \
671 goto VM_LABEL (bv_##stem##_native_ref); \
672 { \
673 ARGS2 (bv, idx); \
674 SYNC_REGISTER (); \
675 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
676 } \
677 }
678
679 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
680 #define ALIGNED_P(ptr, type) \
681 ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
682
683 VM_DEFINE_FUNCTION (174, bv_u16_ref, "bv-u16-ref", 3)
684 BV_REF_WITH_ENDIANNESS (u16, u16)
685 VM_DEFINE_FUNCTION (175, bv_s16_ref, "bv-s16-ref", 3)
686 BV_REF_WITH_ENDIANNESS (s16, s16)
687 VM_DEFINE_FUNCTION (176, bv_u32_ref, "bv-u32-ref", 3)
688 BV_REF_WITH_ENDIANNESS (u32, u32)
689 VM_DEFINE_FUNCTION (177, bv_s32_ref, "bv-s32-ref", 3)
690 BV_REF_WITH_ENDIANNESS (s32, s32)
691 VM_DEFINE_FUNCTION (178, bv_u64_ref, "bv-u64-ref", 3)
692 BV_REF_WITH_ENDIANNESS (u64, u64)
693 VM_DEFINE_FUNCTION (179, bv_s64_ref, "bv-s64-ref", 3)
694 BV_REF_WITH_ENDIANNESS (s64, s64)
695 VM_DEFINE_FUNCTION (180, bv_f32_ref, "bv-f32-ref", 3)
696 BV_REF_WITH_ENDIANNESS (f32, ieee_single)
697 VM_DEFINE_FUNCTION (181, bv_f64_ref, "bv-f64-ref", 3)
698 BV_REF_WITH_ENDIANNESS (f64, ieee_double)
699
700 #undef BV_REF_WITH_ENDIANNESS
701
702 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
703 { \
704 scm_t_signed_bits i; \
705 const scm_t_ ## type *int_ptr; \
706 ARGS2 (bv, idx); \
707 \
708 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
709 i = SCM_I_INUM (idx); \
710 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
711 \
712 if (SCM_LIKELY (SCM_I_INUMP (idx) \
713 && (i >= 0) \
714 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
715 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
716 RETURN (SCM_I_MAKINUM (*int_ptr)); \
717 else \
718 { \
719 SYNC_REGISTER (); \
720 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
721 } \
722 }
723
724 #define BV_INT_REF(stem, type, size) \
725 { \
726 scm_t_signed_bits i; \
727 const scm_t_ ## type *int_ptr; \
728 ARGS2 (bv, idx); \
729 \
730 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
731 i = SCM_I_INUM (idx); \
732 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
733 \
734 if (SCM_LIKELY (SCM_I_INUMP (idx) \
735 && (i >= 0) \
736 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
737 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
738 { \
739 scm_t_ ## type x = *int_ptr; \
740 if (SCM_FIXABLE (x)) \
741 RETURN (SCM_I_MAKINUM (x)); \
742 else \
743 { \
744 SYNC_REGISTER (); \
745 RETURN (scm_from_ ## type (x)); \
746 } \
747 } \
748 else \
749 { \
750 SYNC_REGISTER (); \
751 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
752 } \
753 }
754
755 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
756 { \
757 scm_t_signed_bits i; \
758 const type *float_ptr; \
759 ARGS2 (bv, idx); \
760 \
761 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
762 i = SCM_I_INUM (idx); \
763 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
764 \
765 SYNC_REGISTER (); \
766 if (SCM_LIKELY (SCM_I_INUMP (idx) \
767 && (i >= 0) \
768 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
769 && (ALIGNED_P (float_ptr, type)))) \
770 RETURN (scm_from_double (*float_ptr)); \
771 else \
772 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
773 }
774
775 VM_DEFINE_FUNCTION (182, bv_u8_ref, "bv-u8-ref", 2)
776 BV_FIXABLE_INT_REF (u8, u8, uint8, 1)
777 VM_DEFINE_FUNCTION (183, bv_s8_ref, "bv-s8-ref", 2)
778 BV_FIXABLE_INT_REF (s8, s8, int8, 1)
779 VM_DEFINE_FUNCTION (184, bv_u16_native_ref, "bv-u16-native-ref", 2)
780 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2)
781 VM_DEFINE_FUNCTION (185, bv_s16_native_ref, "bv-s16-native-ref", 2)
782 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2)
783 VM_DEFINE_FUNCTION (186, bv_u32_native_ref, "bv-u32-native-ref", 2)
784 #if SIZEOF_VOID_P > 4
785 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4)
786 #else
787 BV_INT_REF (u32, uint32, 4)
788 #endif
789 VM_DEFINE_FUNCTION (187, bv_s32_native_ref, "bv-s32-native-ref", 2)
790 #if SIZEOF_VOID_P > 4
791 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4)
792 #else
793 BV_INT_REF (s32, int32, 4)
794 #endif
795 VM_DEFINE_FUNCTION (188, bv_u64_native_ref, "bv-u64-native-ref", 2)
796 BV_INT_REF (u64, uint64, 8)
797 VM_DEFINE_FUNCTION (189, bv_s64_native_ref, "bv-s64-native-ref", 2)
798 BV_INT_REF (s64, int64, 8)
799 VM_DEFINE_FUNCTION (190, bv_f32_native_ref, "bv-f32-native-ref", 2)
800 BV_FLOAT_REF (f32, ieee_single, float, 4)
801 VM_DEFINE_FUNCTION (191, bv_f64_native_ref, "bv-f64-native-ref", 2)
802 BV_FLOAT_REF (f64, ieee_double, double, 8)
803
804 #undef BV_FIXABLE_INT_REF
805 #undef BV_INT_REF
806 #undef BV_FLOAT_REF
807
808
809
810 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
811 { \
812 SCM endianness; \
813 POP (endianness); \
814 if (scm_is_eq (endianness, scm_i_native_endianness)) \
815 goto VM_LABEL (bv_##stem##_native_set); \
816 { \
817 SCM bv, idx, val; POP3 (val, idx, bv); \
818 SYNC_REGISTER (); \
819 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
820 NEXT; \
821 } \
822 }
823
824 VM_DEFINE_INSTRUCTION (192, bv_u16_set, "bv-u16-set", 0, 4, 0)
825 BV_SET_WITH_ENDIANNESS (u16, u16)
826 VM_DEFINE_INSTRUCTION (193, bv_s16_set, "bv-s16-set", 0, 4, 0)
827 BV_SET_WITH_ENDIANNESS (s16, s16)
828 VM_DEFINE_INSTRUCTION (194, bv_u32_set, "bv-u32-set", 0, 4, 0)
829 BV_SET_WITH_ENDIANNESS (u32, u32)
830 VM_DEFINE_INSTRUCTION (195, bv_s32_set, "bv-s32-set", 0, 4, 0)
831 BV_SET_WITH_ENDIANNESS (s32, s32)
832 VM_DEFINE_INSTRUCTION (196, bv_u64_set, "bv-u64-set", 0, 4, 0)
833 BV_SET_WITH_ENDIANNESS (u64, u64)
834 VM_DEFINE_INSTRUCTION (197, bv_s64_set, "bv-s64-set", 0, 4, 0)
835 BV_SET_WITH_ENDIANNESS (s64, s64)
836 VM_DEFINE_INSTRUCTION (198, bv_f32_set, "bv-f32-set", 0, 4, 0)
837 BV_SET_WITH_ENDIANNESS (f32, ieee_single)
838 VM_DEFINE_INSTRUCTION (199, bv_f64_set, "bv-f64-set", 0, 4, 0)
839 BV_SET_WITH_ENDIANNESS (f64, ieee_double)
840
841 #undef BV_SET_WITH_ENDIANNESS
842
843 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
844 { \
845 scm_t_signed_bits i, j = 0; \
846 SCM bv, idx, val; \
847 scm_t_ ## type *int_ptr; \
848 \
849 POP3 (val, idx, bv); \
850 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
851 i = SCM_I_INUM (idx); \
852 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
853 \
854 if (SCM_LIKELY (SCM_I_INUMP (idx) \
855 && (i >= 0) \
856 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
857 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
858 && (SCM_I_INUMP (val)) \
859 && ((j = SCM_I_INUM (val)) >= min) \
860 && (j <= max))) \
861 *int_ptr = (scm_t_ ## type) j; \
862 else \
863 { \
864 SYNC_REGISTER (); \
865 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
866 } \
867 NEXT; \
868 }
869
870 #define BV_INT_SET(stem, type, size) \
871 { \
872 scm_t_signed_bits i = 0; \
873 SCM bv, idx, val; \
874 scm_t_ ## type *int_ptr; \
875 \
876 POP3 (val, idx, bv); \
877 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
878 i = SCM_I_INUM (idx); \
879 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
880 \
881 if (SCM_LIKELY (SCM_I_INUMP (idx) \
882 && (i >= 0) \
883 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
884 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
885 *int_ptr = scm_to_ ## type (val); \
886 else \
887 { \
888 SYNC_REGISTER (); \
889 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
890 } \
891 NEXT; \
892 }
893
894 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
895 { \
896 scm_t_signed_bits i = 0; \
897 SCM bv, idx, val; \
898 type *float_ptr; \
899 \
900 POP3 (val, idx, bv); \
901 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
902 i = SCM_I_INUM (idx); \
903 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
904 \
905 if (SCM_LIKELY (SCM_I_INUMP (idx) \
906 && (i >= 0) \
907 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
908 && (ALIGNED_P (float_ptr, type)))) \
909 *float_ptr = scm_to_double (val); \
910 else \
911 { \
912 SYNC_REGISTER (); \
913 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
914 } \
915 NEXT; \
916 }
917
918 VM_DEFINE_INSTRUCTION (200, bv_u8_set, "bv-u8-set", 0, 3, 0)
919 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1)
920 VM_DEFINE_INSTRUCTION (201, bv_s8_set, "bv-s8-set", 0, 3, 0)
921 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1)
922 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set, "bv-u16-native-set", 0, 3, 0)
923 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2)
924 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set, "bv-s16-native-set", 0, 3, 0)
925 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2)
926 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set, "bv-u32-native-set", 0, 3, 0)
927 #if SIZEOF_VOID_P > 4
928 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4)
929 #else
930 BV_INT_SET (u32, uint32, 4)
931 #endif
932 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set, "bv-s32-native-set", 0, 3, 0)
933 #if SIZEOF_VOID_P > 4
934 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4)
935 #else
936 BV_INT_SET (s32, int32, 4)
937 #endif
938 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set, "bv-u64-native-set", 0, 3, 0)
939 BV_INT_SET (u64, uint64, 8)
940 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set, "bv-s64-native-set", 0, 3, 0)
941 BV_INT_SET (s64, int64, 8)
942 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set, "bv-f32-native-set", 0, 3, 0)
943 BV_FLOAT_SET (f32, ieee_single, float, 4)
944 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set, "bv-f64-native-set", 0, 3, 0)
945 BV_FLOAT_SET (f64, ieee_double, double, 8)
946
947 #undef BV_FIXABLE_INT_SET
948 #undef BV_INT_SET
949 #undef BV_FLOAT_SET
950
951 /*
952 (defun renumber-ops ()
953 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
954 (interactive "")
955 (save-excursion
956 (let ((counter 127)) (goto-char (point-min))
957 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
958 (replace-match
959 (number-to-string (setq counter (1+ counter)))
960 t t nil 1)))))
961 */
962
963 /*
964 Local Variables:
965 c-file-style: "gnu"
966 End:
967 */