Add optimized tagged integer addition/subtractions for x86_64.
[bpt/guile.git] / libguile / vm-i-scheme.c
1 /* Copyright (C) 2001, 2009, 2010 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm_engine.c */
20
21 \f
22 /*
23 * Predicates
24 */
25
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
29
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
31
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
33 {
34 ARGS1 (x);
35 RETURN (scm_from_bool (scm_is_false (x)));
36 }
37
38 VM_DEFINE_FUNCTION (129, not_not, "not-not", 1)
39 {
40 ARGS1 (x);
41 RETURN (scm_from_bool (!scm_is_false (x)));
42 }
43
44 VM_DEFINE_FUNCTION (130, eq, "eq?", 2)
45 {
46 ARGS2 (x, y);
47 RETURN (scm_from_bool (scm_is_eq (x, y)));
48 }
49
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
51 {
52 ARGS2 (x, y);
53 RETURN (scm_from_bool (!scm_is_eq (x, y)));
54 }
55
56 VM_DEFINE_FUNCTION (132, nullp, "null?", 1)
57 {
58 ARGS1 (x);
59 RETURN (scm_from_bool (scm_is_null (x)));
60 }
61
62 VM_DEFINE_FUNCTION (133, not_nullp, "not-null?", 1)
63 {
64 ARGS1 (x);
65 RETURN (scm_from_bool (!scm_is_null (x)));
66 }
67
68 VM_DEFINE_FUNCTION (134, eqv, "eqv?", 2)
69 {
70 ARGS2 (x, y);
71 if (scm_is_eq (x, y))
72 RETURN (SCM_BOOL_T);
73 if (SCM_IMP (x) || SCM_IMP (y))
74 RETURN (SCM_BOOL_F);
75 SYNC_REGISTER ();
76 RETURN (scm_eqv_p (x, y));
77 }
78
79 VM_DEFINE_FUNCTION (135, equal, "equal?", 2)
80 {
81 ARGS2 (x, y);
82 if (scm_is_eq (x, y))
83 RETURN (SCM_BOOL_T);
84 if (SCM_IMP (x) || SCM_IMP (y))
85 RETURN (SCM_BOOL_F);
86 SYNC_REGISTER ();
87 RETURN (scm_equal_p (x, y));
88 }
89
90 VM_DEFINE_FUNCTION (136, pairp, "pair?", 1)
91 {
92 ARGS1 (x);
93 RETURN (scm_from_bool (scm_is_pair (x)));
94 }
95
96 VM_DEFINE_FUNCTION (137, listp, "list?", 1)
97 {
98 ARGS1 (x);
99 RETURN (scm_from_bool (scm_ilength (x) >= 0));
100 }
101
102 VM_DEFINE_FUNCTION (138, symbolp, "symbol?", 1)
103 {
104 ARGS1 (x);
105 RETURN (scm_from_bool (scm_is_symbol (x)));
106 }
107
108 VM_DEFINE_FUNCTION (139, vectorp, "vector?", 1)
109 {
110 ARGS1 (x);
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x)));
112 }
113
114 \f
115 /*
116 * Basic data
117 */
118
119 VM_DEFINE_FUNCTION (140, cons, "cons", 2)
120 {
121 ARGS2 (x, y);
122 CONS (x, x, y);
123 RETURN (x);
124 }
125
126 #define VM_VALIDATE_CONS(x, proc) \
127 if (SCM_UNLIKELY (!scm_is_pair (x))) \
128 { func_name = proc; \
129 finish_args = x; \
130 goto vm_error_not_a_pair; \
131 }
132
133 VM_DEFINE_FUNCTION (141, car, "car", 1)
134 {
135 ARGS1 (x);
136 VM_VALIDATE_CONS (x, "car");
137 RETURN (SCM_CAR (x));
138 }
139
140 VM_DEFINE_FUNCTION (142, cdr, "cdr", 1)
141 {
142 ARGS1 (x);
143 VM_VALIDATE_CONS (x, "cdr");
144 RETURN (SCM_CDR (x));
145 }
146
147 VM_DEFINE_INSTRUCTION (143, set_car, "set-car!", 0, 2, 0)
148 {
149 SCM x, y;
150 POP (y);
151 POP (x);
152 VM_VALIDATE_CONS (x, "set-car!");
153 SCM_SETCAR (x, y);
154 NEXT;
155 }
156
157 VM_DEFINE_INSTRUCTION (144, set_cdr, "set-cdr!", 0, 2, 0)
158 {
159 SCM x, y;
160 POP (y);
161 POP (x);
162 VM_VALIDATE_CONS (x, "set-cdr!");
163 SCM_SETCDR (x, y);
164 NEXT;
165 }
166
167 \f
168 /*
169 * Numeric relational tests
170 */
171
172 #undef REL
173 #define REL(crel,srel) \
174 { \
175 ARGS2 (x, y); \
176 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
177 RETURN (scm_from_bool (SCM_I_INUM (x) crel SCM_I_INUM (y))); \
178 SYNC_REGISTER (); \
179 RETURN (srel (x, y)); \
180 }
181
182 VM_DEFINE_FUNCTION (145, ee, "ee?", 2)
183 {
184 REL (==, scm_num_eq_p);
185 }
186
187 VM_DEFINE_FUNCTION (146, lt, "lt?", 2)
188 {
189 REL (<, scm_less_p);
190 }
191
192 VM_DEFINE_FUNCTION (147, le, "le?", 2)
193 {
194 REL (<=, scm_leq_p);
195 }
196
197 VM_DEFINE_FUNCTION (148, gt, "gt?", 2)
198 {
199 REL (>, scm_gr_p);
200 }
201
202 VM_DEFINE_FUNCTION (149, ge, "ge?", 2)
203 {
204 REL (>=, scm_geq_p);
205 }
206
207 \f
208 /*
209 * Numeric functions
210 */
211
212 /* The maximum/minimum tagged integers. */
213 #undef INUM_MAX
214 #undef INUM_MIN
215 #define INUM_MAX (INTPTR_MAX - 1)
216 #define INUM_MIN (INTPTR_MIN + scm_tc2_int)
217
218 #undef FUNC2
219 #define FUNC2(CFUNC,SFUNC) \
220 { \
221 ARGS2 (x, y); \
222 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
223 { \
224 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
225 if (SCM_FIXABLE (n)) \
226 RETURN (SCM_I_MAKINUM (n)); \
227 } \
228 SYNC_REGISTER (); \
229 RETURN (SFUNC (x, y)); \
230 }
231
232 /* Assembly tagged integer arithmetic routines. This code uses the
233 `asm goto' feature introduced in GCC 4.5. */
234
235 #if defined __x86_64__ && SCM_GNUC_PREREQ (4, 5)
236
237 /* The macros below check the CPU's overflow flag to improve fixnum
238 arithmetic. The %rcx register is explicitly clobbered because `asm
239 goto' can't have outputs, in which case the `r' constraint could be
240 used to let the register allocator choose a register.
241
242 TODO: Use `cold' label attribute in GCC 4.6.
243 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
244
245 # define ASM_ADD(x, y) \
246 { \
247 asm volatile goto ("mov %1, %%rcx; " \
248 "test %[tag], %%cl; je %l[slow_add]; " \
249 "test %[tag], %0; je %l[slow_add]; " \
250 "add %0, %%rcx; jo %l[slow_add]; " \
251 "sub %[tag], %%rcx; " \
252 "mov %%rcx, (%[vsp])\n" \
253 : /* no outputs */ \
254 : "r" (x), "r" (y), \
255 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
256 : "rcx", "memory" \
257 : slow_add); \
258 NEXT; \
259 } \
260 slow_add: \
261 do { } while (0)
262
263 # define ASM_SUB(x, y) \
264 { \
265 asm volatile goto ("mov %0, %%rcx; " \
266 "test %[tag], %%cl; je %l[slow_sub]; " \
267 "test %[tag], %1; je %l[slow_sub]; " \
268 "sub %1, %%rcx; jo %l[slow_sub]; " \
269 "add %[tag], %%rcx; " \
270 "mov %%rcx, (%[vsp])\n" \
271 : /* no outputs */ \
272 : "r" (x), "r" (y), \
273 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
274 : "rcx", "memory" \
275 : slow_sub); \
276 NEXT; \
277 } \
278 slow_sub: \
279 do { } while (0)
280
281 #endif
282
283
284 VM_DEFINE_FUNCTION (150, add, "add", 2)
285 {
286 #ifndef ASM_ADD
287 FUNC2 (+, scm_sum);
288 #else
289 ARGS2 (x, y);
290 ASM_ADD (x, y);
291 SYNC_REGISTER ();
292 RETURN (scm_sum (x, y));
293 #endif
294 }
295
296 VM_DEFINE_FUNCTION (151, add1, "add1", 1)
297 {
298 ARGS1 (x);
299
300 /* Check for overflow. */
301 if (SCM_LIKELY ((scm_t_intptr) x < INUM_MAX))
302 {
303 SCM result;
304
305 /* Add the integers without untagging. */
306 result = SCM_PACK ((scm_t_intptr) x
307 + (scm_t_intptr) SCM_I_MAKINUM (1)
308 - scm_tc2_int);
309
310 if (SCM_LIKELY (SCM_I_INUMP (result)))
311 RETURN (result);
312 }
313
314 SYNC_REGISTER ();
315 RETURN (scm_sum (x, SCM_I_MAKINUM (1)));
316 }
317
318 VM_DEFINE_FUNCTION (152, sub, "sub", 2)
319 {
320 #ifndef ASM_SUB
321 FUNC2 (-, scm_difference);
322 #else
323 ARGS2 (x, y);
324 ASM_SUB (x, y);
325 SYNC_REGISTER ();
326 RETURN (scm_difference (x, y));
327 #endif
328 }
329
330 VM_DEFINE_FUNCTION (153, sub1, "sub1", 1)
331 {
332 ARGS1 (x);
333
334 /* Check for underflow. */
335 if (SCM_LIKELY ((scm_t_intptr) x > INUM_MIN))
336 {
337 SCM result;
338
339 /* Substract the integers without untagging. */
340 result = SCM_PACK ((scm_t_intptr) x
341 - (scm_t_intptr) SCM_I_MAKINUM (1)
342 + scm_tc2_int);
343
344 if (SCM_LIKELY (SCM_I_INUMP (result)))
345 RETURN (result);
346 }
347
348 SYNC_REGISTER ();
349 RETURN (scm_difference (x, SCM_I_MAKINUM (1)));
350 }
351
352 # undef ASM_ADD
353 # undef ASM_SUB
354
355 VM_DEFINE_FUNCTION (154, mul, "mul", 2)
356 {
357 ARGS2 (x, y);
358 SYNC_REGISTER ();
359 RETURN (scm_product (x, y));
360 }
361
362 VM_DEFINE_FUNCTION (155, div, "div", 2)
363 {
364 ARGS2 (x, y);
365 SYNC_REGISTER ();
366 RETURN (scm_divide (x, y));
367 }
368
369 VM_DEFINE_FUNCTION (156, quo, "quo", 2)
370 {
371 ARGS2 (x, y);
372 SYNC_REGISTER ();
373 RETURN (scm_quotient (x, y));
374 }
375
376 VM_DEFINE_FUNCTION (157, rem, "rem", 2)
377 {
378 ARGS2 (x, y);
379 SYNC_REGISTER ();
380 RETURN (scm_remainder (x, y));
381 }
382
383 VM_DEFINE_FUNCTION (158, mod, "mod", 2)
384 {
385 ARGS2 (x, y);
386 SYNC_REGISTER ();
387 RETURN (scm_modulo (x, y));
388 }
389
390 VM_DEFINE_FUNCTION (159, ash, "ash", 2)
391 {
392 ARGS2 (x, y);
393 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
394 {
395 if (SCM_I_INUM (y) < 0)
396 /* Right shift, will be a fixnum. */
397 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) >> -SCM_I_INUM (y)));
398 else
399 /* Left shift. See comments in scm_ash. */
400 {
401 long nn, bits_to_shift;
402
403 nn = SCM_I_INUM (x);
404 bits_to_shift = SCM_I_INUM (y);
405
406 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
407 && ((unsigned long)
408 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
409 <= 1))
410 RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
411 /* fall through */
412 }
413 /* fall through */
414 }
415 SYNC_REGISTER ();
416 RETURN (scm_ash (x, y));
417 }
418
419 VM_DEFINE_FUNCTION (160, logand, "logand", 2)
420 {
421 ARGS2 (x, y);
422 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
423 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) & SCM_I_INUM (y)));
424 SYNC_REGISTER ();
425 RETURN (scm_logand (x, y));
426 }
427
428 VM_DEFINE_FUNCTION (161, logior, "logior", 2)
429 {
430 ARGS2 (x, y);
431 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
432 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) | SCM_I_INUM (y)));
433 SYNC_REGISTER ();
434 RETURN (scm_logior (x, y));
435 }
436
437 VM_DEFINE_FUNCTION (162, logxor, "logxor", 2)
438 {
439 ARGS2 (x, y);
440 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
441 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
442 SYNC_REGISTER ();
443 RETURN (scm_logxor (x, y));
444 }
445
446 \f
447 /*
448 * Vectors and arrays
449 */
450
451 VM_DEFINE_FUNCTION (163, vector_ref, "vector-ref", 2)
452 {
453 long i = 0;
454 ARGS2 (vect, idx);
455 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
456 && SCM_I_INUMP (idx)
457 && ((i = SCM_I_INUM (idx)) >= 0)
458 && i < SCM_I_VECTOR_LENGTH (vect)))
459 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
460 else
461 {
462 SYNC_REGISTER ();
463 RETURN (scm_vector_ref (vect, idx));
464 }
465 }
466
467 VM_DEFINE_INSTRUCTION (164, vector_set, "vector-set", 0, 3, 0)
468 {
469 long i = 0;
470 SCM vect, idx, val;
471 POP (val); POP (idx); POP (vect);
472 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
473 && SCM_I_INUMP (idx)
474 && ((i = SCM_I_INUM (idx)) >= 0)
475 && i < SCM_I_VECTOR_LENGTH (vect)))
476 SCM_I_VECTOR_WELTS (vect)[i] = val;
477 else
478 {
479 SYNC_REGISTER ();
480 scm_vector_set_x (vect, idx, val);
481 }
482 NEXT;
483 }
484
485 VM_DEFINE_INSTRUCTION (165, make_array, "make-array", 3, -1, 1)
486 {
487 scm_t_uint32 len;
488 SCM shape, ret;
489
490 len = FETCH ();
491 len = (len << 8) + FETCH ();
492 len = (len << 8) + FETCH ();
493 POP (shape);
494 SYNC_REGISTER ();
495 PRE_CHECK_UNDERFLOW (len);
496 ret = scm_from_contiguous_array (shape, sp - len + 1, len);
497 DROPN (len);
498 PUSH (ret);
499 NEXT;
500 }
501
502 \f
503 /*
504 * Structs
505 */
506 #define VM_VALIDATE_STRUCT(obj, proc) \
507 if (SCM_UNLIKELY (!SCM_STRUCTP (obj))) \
508 { \
509 func_name = proc; \
510 finish_args = (obj); \
511 goto vm_error_not_a_struct; \
512 }
513
514 VM_DEFINE_FUNCTION (166, struct_p, "struct?", 1)
515 {
516 ARGS1 (obj);
517 RETURN (scm_from_bool (SCM_STRUCTP (obj)));
518 }
519
520 VM_DEFINE_FUNCTION (167, struct_vtable, "struct-vtable", 1)
521 {
522 ARGS1 (obj);
523 VM_VALIDATE_STRUCT (obj, "struct_vtable");
524 RETURN (SCM_STRUCT_VTABLE (obj));
525 }
526
527 VM_DEFINE_INSTRUCTION (168, make_struct, "make-struct", 2, -1, 1)
528 {
529 unsigned h = FETCH ();
530 unsigned l = FETCH ();
531 scm_t_bits n = ((h << 8U) + l);
532 SCM vtable = sp[-(n - 1)];
533 const SCM *inits = sp - n + 2;
534 SCM ret;
535
536 SYNC_REGISTER ();
537
538 if (SCM_LIKELY (SCM_STRUCTP (vtable)
539 && SCM_VTABLE_FLAG_IS_SET (vtable, SCM_VTABLE_FLAG_SIMPLE)
540 && (SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size) + 1
541 == n)
542 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable)))
543 {
544 /* Verily, we are making a simple struct with the right number of
545 initializers, and no finalizer. */
546 ret = scm_words ((scm_t_bits)SCM_STRUCT_DATA (vtable) | scm_tc3_struct,
547 n + 1);
548 SCM_SET_CELL_WORD_1 (ret, (scm_t_bits)SCM_CELL_OBJECT_LOC (ret, 2));
549 memcpy (SCM_STRUCT_DATA (ret), inits, (n - 1) * sizeof (SCM));
550 }
551 else
552 ret = scm_c_make_structv (vtable, 0, n - 1, (scm_t_bits *) inits);
553
554 DROPN (n);
555 PUSH (ret);
556
557 NEXT;
558 }
559
560 VM_DEFINE_FUNCTION (169, struct_ref, "struct-ref", 2)
561 {
562 ARGS2 (obj, pos);
563
564 if (SCM_LIKELY (SCM_STRUCTP (obj)
565 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
566 SCM_VTABLE_FLAG_SIMPLE)
567 && SCM_I_INUMP (pos)))
568 {
569 SCM vtable;
570 scm_t_bits index, len;
571
572 index = SCM_I_INUM (pos);
573 vtable = SCM_STRUCT_VTABLE (obj);
574 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
575
576 if (SCM_LIKELY (index < len))
577 {
578 scm_t_bits *data = SCM_STRUCT_DATA (obj);
579 RETURN (SCM_PACK (data[index]));
580 }
581 }
582
583 SYNC_REGISTER ();
584 RETURN (scm_struct_ref (obj, pos));
585 }
586
587 VM_DEFINE_FUNCTION (170, struct_set, "struct-set", 3)
588 {
589 ARGS3 (obj, pos, val);
590
591 if (SCM_LIKELY (SCM_STRUCTP (obj)
592 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
593 SCM_VTABLE_FLAG_SIMPLE)
594 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
595 SCM_VTABLE_FLAG_SIMPLE_RW)
596 && SCM_I_INUMP (pos)))
597 {
598 SCM vtable;
599 scm_t_bits index, len;
600
601 index = SCM_I_INUM (pos);
602 vtable = SCM_STRUCT_VTABLE (obj);
603 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
604 if (SCM_LIKELY (index < len))
605 {
606 scm_t_bits *data = SCM_STRUCT_DATA (obj);
607 data[index] = SCM_UNPACK (val);
608 RETURN (val);
609 }
610 }
611
612 SYNC_REGISTER ();
613 RETURN (scm_struct_set_x (obj, pos, val));
614 }
615
616 \f
617 /*
618 * GOOPS support
619 */
620 VM_DEFINE_FUNCTION (171, class_of, "class-of", 1)
621 {
622 ARGS1 (obj);
623 if (SCM_INSTANCEP (obj))
624 RETURN (SCM_CLASS_OF (obj));
625 SYNC_REGISTER ();
626 RETURN (scm_class_of (obj));
627 }
628
629 VM_DEFINE_FUNCTION (172, slot_ref, "slot-ref", 2)
630 {
631 size_t slot;
632 ARGS2 (instance, idx);
633 slot = SCM_I_INUM (idx);
634 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance) [slot]));
635 }
636
637 VM_DEFINE_INSTRUCTION (173, slot_set, "slot-set", 0, 3, 0)
638 {
639 SCM instance, idx, val;
640 size_t slot;
641 POP (val);
642 POP (idx);
643 POP (instance);
644 slot = SCM_I_INUM (idx);
645 SCM_STRUCT_DATA (instance) [slot] = SCM_UNPACK (val);
646 NEXT;
647 }
648
649 \f
650 /*
651 * Bytevectors
652 */
653 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
654 do \
655 { \
656 if (SCM_UNLIKELY (!SCM_BYTEVECTOR_P (x))) \
657 { \
658 func_name = proc; \
659 finish_args = x; \
660 goto vm_error_not_a_bytevector; \
661 } \
662 } \
663 while (0)
664
665 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
666 { \
667 SCM endianness; \
668 POP (endianness); \
669 if (scm_is_eq (endianness, scm_i_native_endianness)) \
670 goto VM_LABEL (bv_##stem##_native_ref); \
671 { \
672 ARGS2 (bv, idx); \
673 SYNC_REGISTER (); \
674 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
675 } \
676 }
677
678 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
679 #define ALIGNED_P(ptr, type) \
680 ((scm_t_uintptr) (ptr) % alignof (type) == 0)
681
682 VM_DEFINE_FUNCTION (174, bv_u16_ref, "bv-u16-ref", 3)
683 BV_REF_WITH_ENDIANNESS (u16, u16)
684 VM_DEFINE_FUNCTION (175, bv_s16_ref, "bv-s16-ref", 3)
685 BV_REF_WITH_ENDIANNESS (s16, s16)
686 VM_DEFINE_FUNCTION (176, bv_u32_ref, "bv-u32-ref", 3)
687 BV_REF_WITH_ENDIANNESS (u32, u32)
688 VM_DEFINE_FUNCTION (177, bv_s32_ref, "bv-s32-ref", 3)
689 BV_REF_WITH_ENDIANNESS (s32, s32)
690 VM_DEFINE_FUNCTION (178, bv_u64_ref, "bv-u64-ref", 3)
691 BV_REF_WITH_ENDIANNESS (u64, u64)
692 VM_DEFINE_FUNCTION (179, bv_s64_ref, "bv-s64-ref", 3)
693 BV_REF_WITH_ENDIANNESS (s64, s64)
694 VM_DEFINE_FUNCTION (180, bv_f32_ref, "bv-f32-ref", 3)
695 BV_REF_WITH_ENDIANNESS (f32, ieee_single)
696 VM_DEFINE_FUNCTION (181, bv_f64_ref, "bv-f64-ref", 3)
697 BV_REF_WITH_ENDIANNESS (f64, ieee_double)
698
699 #undef BV_REF_WITH_ENDIANNESS
700
701 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
702 { \
703 long i; \
704 const scm_t_ ## type *int_ptr; \
705 ARGS2 (bv, idx); \
706 \
707 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
708 i = SCM_I_INUM (idx); \
709 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
710 \
711 if (SCM_LIKELY (SCM_I_INUMP (idx) \
712 && (i >= 0) \
713 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
714 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
715 RETURN (SCM_I_MAKINUM (*int_ptr)); \
716 else \
717 { \
718 SYNC_REGISTER (); \
719 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
720 } \
721 }
722
723 #define BV_INT_REF(stem, type, size) \
724 { \
725 long i; \
726 const scm_t_ ## type *int_ptr; \
727 ARGS2 (bv, idx); \
728 \
729 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
730 i = SCM_I_INUM (idx); \
731 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
732 \
733 if (SCM_LIKELY (SCM_I_INUMP (idx) \
734 && (i >= 0) \
735 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
736 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
737 { \
738 scm_t_ ## type x = *int_ptr; \
739 if (SCM_FIXABLE (x)) \
740 RETURN (SCM_I_MAKINUM (x)); \
741 else \
742 { \
743 SYNC_REGISTER (); \
744 RETURN (scm_from_ ## type (x)); \
745 } \
746 } \
747 else \
748 { \
749 SYNC_REGISTER (); \
750 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
751 } \
752 }
753
754 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
755 { \
756 long i; \
757 const type *float_ptr; \
758 ARGS2 (bv, idx); \
759 \
760 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
761 i = SCM_I_INUM (idx); \
762 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
763 \
764 SYNC_REGISTER (); \
765 if (SCM_LIKELY (SCM_I_INUMP (idx) \
766 && (i >= 0) \
767 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
768 && (ALIGNED_P (float_ptr, type)))) \
769 RETURN (scm_from_double (*float_ptr)); \
770 else \
771 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
772 }
773
774 VM_DEFINE_FUNCTION (182, bv_u8_ref, "bv-u8-ref", 2)
775 BV_FIXABLE_INT_REF (u8, u8, uint8, 1)
776 VM_DEFINE_FUNCTION (183, bv_s8_ref, "bv-s8-ref", 2)
777 BV_FIXABLE_INT_REF (s8, s8, int8, 1)
778 VM_DEFINE_FUNCTION (184, bv_u16_native_ref, "bv-u16-native-ref", 2)
779 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2)
780 VM_DEFINE_FUNCTION (185, bv_s16_native_ref, "bv-s16-native-ref", 2)
781 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2)
782 VM_DEFINE_FUNCTION (186, bv_u32_native_ref, "bv-u32-native-ref", 2)
783 #if SIZEOF_VOID_P > 4
784 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4)
785 #else
786 BV_INT_REF (u32, uint32, 4)
787 #endif
788 VM_DEFINE_FUNCTION (187, bv_s32_native_ref, "bv-s32-native-ref", 2)
789 #if SIZEOF_VOID_P > 4
790 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4)
791 #else
792 BV_INT_REF (s32, int32, 4)
793 #endif
794 VM_DEFINE_FUNCTION (188, bv_u64_native_ref, "bv-u64-native-ref", 2)
795 BV_INT_REF (u64, uint64, 8)
796 VM_DEFINE_FUNCTION (189, bv_s64_native_ref, "bv-s64-native-ref", 2)
797 BV_INT_REF (s64, int64, 8)
798 VM_DEFINE_FUNCTION (190, bv_f32_native_ref, "bv-f32-native-ref", 2)
799 BV_FLOAT_REF (f32, ieee_single, float, 4)
800 VM_DEFINE_FUNCTION (191, bv_f64_native_ref, "bv-f64-native-ref", 2)
801 BV_FLOAT_REF (f64, ieee_double, double, 8)
802
803 #undef BV_FIXABLE_INT_REF
804 #undef BV_INT_REF
805 #undef BV_FLOAT_REF
806
807
808
809 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
810 { \
811 SCM endianness; \
812 POP (endianness); \
813 if (scm_is_eq (endianness, scm_i_native_endianness)) \
814 goto VM_LABEL (bv_##stem##_native_set); \
815 { \
816 SCM bv, idx, val; POP (val); POP (idx); POP (bv); \
817 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
818 NEXT; \
819 } \
820 }
821
822 VM_DEFINE_INSTRUCTION (192, bv_u16_set, "bv-u16-set", 0, 4, 0)
823 BV_SET_WITH_ENDIANNESS (u16, u16)
824 VM_DEFINE_INSTRUCTION (193, bv_s16_set, "bv-s16-set", 0, 4, 0)
825 BV_SET_WITH_ENDIANNESS (s16, s16)
826 VM_DEFINE_INSTRUCTION (194, bv_u32_set, "bv-u32-set", 0, 4, 0)
827 BV_SET_WITH_ENDIANNESS (u32, u32)
828 VM_DEFINE_INSTRUCTION (195, bv_s32_set, "bv-s32-set", 0, 4, 0)
829 BV_SET_WITH_ENDIANNESS (s32, s32)
830 VM_DEFINE_INSTRUCTION (196, bv_u64_set, "bv-u64-set", 0, 4, 0)
831 BV_SET_WITH_ENDIANNESS (u64, u64)
832 VM_DEFINE_INSTRUCTION (197, bv_s64_set, "bv-s64-set", 0, 4, 0)
833 BV_SET_WITH_ENDIANNESS (s64, s64)
834 VM_DEFINE_INSTRUCTION (198, bv_f32_set, "bv-f32-set", 0, 4, 0)
835 BV_SET_WITH_ENDIANNESS (f32, ieee_single)
836 VM_DEFINE_INSTRUCTION (199, bv_f64_set, "bv-f64-set", 0, 4, 0)
837 BV_SET_WITH_ENDIANNESS (f64, ieee_double)
838
839 #undef BV_SET_WITH_ENDIANNESS
840
841 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
842 { \
843 long i, j = 0; \
844 SCM bv, idx, val; \
845 scm_t_ ## type *int_ptr; \
846 \
847 POP (val); POP (idx); POP (bv); \
848 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
849 i = SCM_I_INUM (idx); \
850 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
851 \
852 if (SCM_LIKELY (SCM_I_INUMP (idx) \
853 && (i >= 0) \
854 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
855 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
856 && (SCM_I_INUMP (val)) \
857 && ((j = SCM_I_INUM (val)) >= min) \
858 && (j <= max))) \
859 *int_ptr = (scm_t_ ## type) j; \
860 else \
861 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
862 NEXT; \
863 }
864
865 #define BV_INT_SET(stem, type, size) \
866 { \
867 long i = 0; \
868 SCM bv, idx, val; \
869 scm_t_ ## type *int_ptr; \
870 \
871 POP (val); POP (idx); POP (bv); \
872 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
873 i = SCM_I_INUM (idx); \
874 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
875 \
876 if (SCM_LIKELY (SCM_I_INUMP (idx) \
877 && (i >= 0) \
878 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
879 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
880 *int_ptr = scm_to_ ## type (val); \
881 else \
882 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
883 NEXT; \
884 }
885
886 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
887 { \
888 long i = 0; \
889 SCM bv, idx, val; \
890 type *float_ptr; \
891 \
892 POP (val); POP (idx); POP (bv); \
893 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
894 i = SCM_I_INUM (idx); \
895 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
896 \
897 if (SCM_LIKELY (SCM_I_INUMP (idx) \
898 && (i >= 0) \
899 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
900 && (ALIGNED_P (float_ptr, type)))) \
901 *float_ptr = scm_to_double (val); \
902 else \
903 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
904 NEXT; \
905 }
906
907 VM_DEFINE_INSTRUCTION (200, bv_u8_set, "bv-u8-set", 0, 3, 0)
908 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1)
909 VM_DEFINE_INSTRUCTION (201, bv_s8_set, "bv-s8-set", 0, 3, 0)
910 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1)
911 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set, "bv-u16-native-set", 0, 3, 0)
912 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2)
913 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set, "bv-s16-native-set", 0, 3, 0)
914 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2)
915 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set, "bv-u32-native-set", 0, 3, 0)
916 #if SIZEOF_VOID_P > 4
917 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4)
918 #else
919 BV_INT_SET (u32, uint32, 4)
920 #endif
921 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set, "bv-s32-native-set", 0, 3, 0)
922 #if SIZEOF_VOID_P > 4
923 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4)
924 #else
925 BV_INT_SET (s32, int32, 4)
926 #endif
927 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set, "bv-u64-native-set", 0, 3, 0)
928 BV_INT_SET (u64, uint64, 8)
929 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set, "bv-s64-native-set", 0, 3, 0)
930 BV_INT_SET (s64, int64, 8)
931 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set, "bv-f32-native-set", 0, 3, 0)
932 BV_FLOAT_SET (f32, ieee_single, float, 4)
933 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set, "bv-f64-native-set", 0, 3, 0)
934 BV_FLOAT_SET (f64, ieee_double, double, 8)
935
936 #undef BV_FIXABLE_INT_SET
937 #undef BV_INT_SET
938 #undef BV_FLOAT_SET
939
940 /*
941 (defun renumber-ops ()
942 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
943 (interactive "")
944 (save-excursion
945 (let ((counter 127)) (goto-char (point-min))
946 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
947 (replace-match
948 (number-to-string (setq counter (1+ counter)))
949 t t nil 1)))))
950 */
951
952 /*
953 Local Variables:
954 c-file-style: "gnu"
955 End:
956 */