Optimize fixnum comparison.
[bpt/guile.git] / libguile / vm-i-scheme.c
1 /* Copyright (C) 2001, 2009, 2010 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm_engine.c */
20
21 \f
22 /*
23 * Predicates
24 */
25
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
29
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
31
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
33 {
34 ARGS1 (x);
35 RETURN (scm_from_bool (scm_is_false (x)));
36 }
37
38 VM_DEFINE_FUNCTION (129, not_not, "not-not", 1)
39 {
40 ARGS1 (x);
41 RETURN (scm_from_bool (!scm_is_false (x)));
42 }
43
44 VM_DEFINE_FUNCTION (130, eq, "eq?", 2)
45 {
46 ARGS2 (x, y);
47 RETURN (scm_from_bool (scm_is_eq (x, y)));
48 }
49
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
51 {
52 ARGS2 (x, y);
53 RETURN (scm_from_bool (!scm_is_eq (x, y)));
54 }
55
56 VM_DEFINE_FUNCTION (132, nullp, "null?", 1)
57 {
58 ARGS1 (x);
59 RETURN (scm_from_bool (scm_is_null (x)));
60 }
61
62 VM_DEFINE_FUNCTION (133, not_nullp, "not-null?", 1)
63 {
64 ARGS1 (x);
65 RETURN (scm_from_bool (!scm_is_null (x)));
66 }
67
68 VM_DEFINE_FUNCTION (134, eqv, "eqv?", 2)
69 {
70 ARGS2 (x, y);
71 if (scm_is_eq (x, y))
72 RETURN (SCM_BOOL_T);
73 if (SCM_IMP (x) || SCM_IMP (y))
74 RETURN (SCM_BOOL_F);
75 SYNC_REGISTER ();
76 RETURN (scm_eqv_p (x, y));
77 }
78
79 VM_DEFINE_FUNCTION (135, equal, "equal?", 2)
80 {
81 ARGS2 (x, y);
82 if (scm_is_eq (x, y))
83 RETURN (SCM_BOOL_T);
84 if (SCM_IMP (x) || SCM_IMP (y))
85 RETURN (SCM_BOOL_F);
86 SYNC_REGISTER ();
87 RETURN (scm_equal_p (x, y));
88 }
89
90 VM_DEFINE_FUNCTION (136, pairp, "pair?", 1)
91 {
92 ARGS1 (x);
93 RETURN (scm_from_bool (scm_is_pair (x)));
94 }
95
96 VM_DEFINE_FUNCTION (137, listp, "list?", 1)
97 {
98 ARGS1 (x);
99 RETURN (scm_from_bool (scm_ilength (x) >= 0));
100 }
101
102 VM_DEFINE_FUNCTION (138, symbolp, "symbol?", 1)
103 {
104 ARGS1 (x);
105 RETURN (scm_from_bool (scm_is_symbol (x)));
106 }
107
108 VM_DEFINE_FUNCTION (139, vectorp, "vector?", 1)
109 {
110 ARGS1 (x);
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x)));
112 }
113
114 \f
115 /*
116 * Basic data
117 */
118
119 VM_DEFINE_FUNCTION (140, cons, "cons", 2)
120 {
121 ARGS2 (x, y);
122 CONS (x, x, y);
123 RETURN (x);
124 }
125
126 #define VM_VALIDATE_CONS(x, proc) \
127 if (SCM_UNLIKELY (!scm_is_pair (x))) \
128 { func_name = proc; \
129 finish_args = x; \
130 goto vm_error_not_a_pair; \
131 }
132
133 VM_DEFINE_FUNCTION (141, car, "car", 1)
134 {
135 ARGS1 (x);
136 VM_VALIDATE_CONS (x, "car");
137 RETURN (SCM_CAR (x));
138 }
139
140 VM_DEFINE_FUNCTION (142, cdr, "cdr", 1)
141 {
142 ARGS1 (x);
143 VM_VALIDATE_CONS (x, "cdr");
144 RETURN (SCM_CDR (x));
145 }
146
147 VM_DEFINE_INSTRUCTION (143, set_car, "set-car!", 0, 2, 0)
148 {
149 SCM x, y;
150 POP (y);
151 POP (x);
152 VM_VALIDATE_CONS (x, "set-car!");
153 SCM_SETCAR (x, y);
154 NEXT;
155 }
156
157 VM_DEFINE_INSTRUCTION (144, set_cdr, "set-cdr!", 0, 2, 0)
158 {
159 SCM x, y;
160 POP (y);
161 POP (x);
162 VM_VALIDATE_CONS (x, "set-cdr!");
163 SCM_SETCDR (x, y);
164 NEXT;
165 }
166
167 \f
168 /*
169 * Numeric relational tests
170 */
171
172 #undef REL
173 #define REL(crel,srel) \
174 { \
175 ARGS2 (x, y); \
176 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
177 RETURN (scm_from_bool ((scm_t_signed_bits) (x) \
178 crel (scm_t_signed_bits) (y))); \
179 SYNC_REGISTER (); \
180 RETURN (srel (x, y)); \
181 }
182
183 VM_DEFINE_FUNCTION (145, ee, "ee?", 2)
184 {
185 REL (==, scm_num_eq_p);
186 }
187
188 VM_DEFINE_FUNCTION (146, lt, "lt?", 2)
189 {
190 REL (<, scm_less_p);
191 }
192
193 VM_DEFINE_FUNCTION (147, le, "le?", 2)
194 {
195 REL (<=, scm_leq_p);
196 }
197
198 VM_DEFINE_FUNCTION (148, gt, "gt?", 2)
199 {
200 REL (>, scm_gr_p);
201 }
202
203 VM_DEFINE_FUNCTION (149, ge, "ge?", 2)
204 {
205 REL (>=, scm_geq_p);
206 }
207
208 \f
209 /*
210 * Numeric functions
211 */
212
213 /* The maximum/minimum tagged integers. */
214 #undef INUM_MAX
215 #undef INUM_MIN
216 #define INUM_MAX (INTPTR_MAX - 1)
217 #define INUM_MIN (INTPTR_MIN + scm_tc2_int)
218
219 #undef FUNC2
220 #define FUNC2(CFUNC,SFUNC) \
221 { \
222 ARGS2 (x, y); \
223 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
224 { \
225 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
226 if (SCM_FIXABLE (n)) \
227 RETURN (SCM_I_MAKINUM (n)); \
228 } \
229 SYNC_REGISTER (); \
230 RETURN (SFUNC (x, y)); \
231 }
232
233 /* Assembly tagged integer arithmetic routines. This code uses the
234 `asm goto' feature introduced in GCC 4.5. */
235
236 #if defined __x86_64__ && SCM_GNUC_PREREQ (4, 5)
237
238 /* The macros below check the CPU's overflow flag to improve fixnum
239 arithmetic. The %rcx register is explicitly clobbered because `asm
240 goto' can't have outputs, in which case the `r' constraint could be
241 used to let the register allocator choose a register.
242
243 TODO: Use `cold' label attribute in GCC 4.6.
244 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
245
246 # define ASM_ADD(x, y) \
247 { \
248 asm volatile goto ("mov %1, %%rcx; " \
249 "test %[tag], %%cl; je %l[slow_add]; " \
250 "test %[tag], %0; je %l[slow_add]; " \
251 "add %0, %%rcx; jo %l[slow_add]; " \
252 "sub %[tag], %%rcx; " \
253 "mov %%rcx, (%[vsp])\n" \
254 : /* no outputs */ \
255 : "r" (x), "r" (y), \
256 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
257 : "rcx", "memory" \
258 : slow_add); \
259 NEXT; \
260 } \
261 slow_add: \
262 do { } while (0)
263
264 # define ASM_SUB(x, y) \
265 { \
266 asm volatile goto ("mov %0, %%rcx; " \
267 "test %[tag], %%cl; je %l[slow_sub]; " \
268 "test %[tag], %1; je %l[slow_sub]; " \
269 "sub %1, %%rcx; jo %l[slow_sub]; " \
270 "add %[tag], %%rcx; " \
271 "mov %%rcx, (%[vsp])\n" \
272 : /* no outputs */ \
273 : "r" (x), "r" (y), \
274 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
275 : "rcx", "memory" \
276 : slow_sub); \
277 NEXT; \
278 } \
279 slow_sub: \
280 do { } while (0)
281
282 #endif
283
284
285 VM_DEFINE_FUNCTION (150, add, "add", 2)
286 {
287 #ifndef ASM_ADD
288 FUNC2 (+, scm_sum);
289 #else
290 ARGS2 (x, y);
291 ASM_ADD (x, y);
292 SYNC_REGISTER ();
293 RETURN (scm_sum (x, y));
294 #endif
295 }
296
297 VM_DEFINE_FUNCTION (151, add1, "add1", 1)
298 {
299 ARGS1 (x);
300
301 /* Check for overflow. */
302 if (SCM_LIKELY ((scm_t_intptr) x < INUM_MAX))
303 {
304 SCM result;
305
306 /* Add the integers without untagging. */
307 result = SCM_PACK ((scm_t_intptr) x
308 + (scm_t_intptr) SCM_I_MAKINUM (1)
309 - scm_tc2_int);
310
311 if (SCM_LIKELY (SCM_I_INUMP (result)))
312 RETURN (result);
313 }
314
315 SYNC_REGISTER ();
316 RETURN (scm_sum (x, SCM_I_MAKINUM (1)));
317 }
318
319 VM_DEFINE_FUNCTION (152, sub, "sub", 2)
320 {
321 #ifndef ASM_SUB
322 FUNC2 (-, scm_difference);
323 #else
324 ARGS2 (x, y);
325 ASM_SUB (x, y);
326 SYNC_REGISTER ();
327 RETURN (scm_difference (x, y));
328 #endif
329 }
330
331 VM_DEFINE_FUNCTION (153, sub1, "sub1", 1)
332 {
333 ARGS1 (x);
334
335 /* Check for underflow. */
336 if (SCM_LIKELY ((scm_t_intptr) x > INUM_MIN))
337 {
338 SCM result;
339
340 /* Substract the integers without untagging. */
341 result = SCM_PACK ((scm_t_intptr) x
342 - (scm_t_intptr) SCM_I_MAKINUM (1)
343 + scm_tc2_int);
344
345 if (SCM_LIKELY (SCM_I_INUMP (result)))
346 RETURN (result);
347 }
348
349 SYNC_REGISTER ();
350 RETURN (scm_difference (x, SCM_I_MAKINUM (1)));
351 }
352
353 # undef ASM_ADD
354 # undef ASM_SUB
355
356 VM_DEFINE_FUNCTION (154, mul, "mul", 2)
357 {
358 ARGS2 (x, y);
359 SYNC_REGISTER ();
360 RETURN (scm_product (x, y));
361 }
362
363 VM_DEFINE_FUNCTION (155, div, "div", 2)
364 {
365 ARGS2 (x, y);
366 SYNC_REGISTER ();
367 RETURN (scm_divide (x, y));
368 }
369
370 VM_DEFINE_FUNCTION (156, quo, "quo", 2)
371 {
372 ARGS2 (x, y);
373 SYNC_REGISTER ();
374 RETURN (scm_quotient (x, y));
375 }
376
377 VM_DEFINE_FUNCTION (157, rem, "rem", 2)
378 {
379 ARGS2 (x, y);
380 SYNC_REGISTER ();
381 RETURN (scm_remainder (x, y));
382 }
383
384 VM_DEFINE_FUNCTION (158, mod, "mod", 2)
385 {
386 ARGS2 (x, y);
387 SYNC_REGISTER ();
388 RETURN (scm_modulo (x, y));
389 }
390
391 VM_DEFINE_FUNCTION (159, ash, "ash", 2)
392 {
393 ARGS2 (x, y);
394 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
395 {
396 if (SCM_I_INUM (y) < 0)
397 /* Right shift, will be a fixnum. */
398 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) >> -SCM_I_INUM (y)));
399 else
400 /* Left shift. See comments in scm_ash. */
401 {
402 long nn, bits_to_shift;
403
404 nn = SCM_I_INUM (x);
405 bits_to_shift = SCM_I_INUM (y);
406
407 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
408 && ((unsigned long)
409 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
410 <= 1))
411 RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
412 /* fall through */
413 }
414 /* fall through */
415 }
416 SYNC_REGISTER ();
417 RETURN (scm_ash (x, y));
418 }
419
420 VM_DEFINE_FUNCTION (160, logand, "logand", 2)
421 {
422 ARGS2 (x, y);
423 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
424 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) & SCM_I_INUM (y)));
425 SYNC_REGISTER ();
426 RETURN (scm_logand (x, y));
427 }
428
429 VM_DEFINE_FUNCTION (161, logior, "logior", 2)
430 {
431 ARGS2 (x, y);
432 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
433 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) | SCM_I_INUM (y)));
434 SYNC_REGISTER ();
435 RETURN (scm_logior (x, y));
436 }
437
438 VM_DEFINE_FUNCTION (162, logxor, "logxor", 2)
439 {
440 ARGS2 (x, y);
441 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
442 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
443 SYNC_REGISTER ();
444 RETURN (scm_logxor (x, y));
445 }
446
447 \f
448 /*
449 * Vectors and arrays
450 */
451
452 VM_DEFINE_FUNCTION (163, vector_ref, "vector-ref", 2)
453 {
454 long i = 0;
455 ARGS2 (vect, idx);
456 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
457 && SCM_I_INUMP (idx)
458 && ((i = SCM_I_INUM (idx)) >= 0)
459 && i < SCM_I_VECTOR_LENGTH (vect)))
460 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
461 else
462 {
463 SYNC_REGISTER ();
464 RETURN (scm_vector_ref (vect, idx));
465 }
466 }
467
468 VM_DEFINE_INSTRUCTION (164, vector_set, "vector-set", 0, 3, 0)
469 {
470 long i = 0;
471 SCM vect, idx, val;
472 POP (val); POP (idx); POP (vect);
473 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
474 && SCM_I_INUMP (idx)
475 && ((i = SCM_I_INUM (idx)) >= 0)
476 && i < SCM_I_VECTOR_LENGTH (vect)))
477 SCM_I_VECTOR_WELTS (vect)[i] = val;
478 else
479 {
480 SYNC_REGISTER ();
481 scm_vector_set_x (vect, idx, val);
482 }
483 NEXT;
484 }
485
486 VM_DEFINE_INSTRUCTION (165, make_array, "make-array", 3, -1, 1)
487 {
488 scm_t_uint32 len;
489 SCM shape, ret;
490
491 len = FETCH ();
492 len = (len << 8) + FETCH ();
493 len = (len << 8) + FETCH ();
494 POP (shape);
495 SYNC_REGISTER ();
496 PRE_CHECK_UNDERFLOW (len);
497 ret = scm_from_contiguous_array (shape, sp - len + 1, len);
498 DROPN (len);
499 PUSH (ret);
500 NEXT;
501 }
502
503 \f
504 /*
505 * Structs
506 */
507 #define VM_VALIDATE_STRUCT(obj, proc) \
508 if (SCM_UNLIKELY (!SCM_STRUCTP (obj))) \
509 { \
510 func_name = proc; \
511 finish_args = (obj); \
512 goto vm_error_not_a_struct; \
513 }
514
515 VM_DEFINE_FUNCTION (166, struct_p, "struct?", 1)
516 {
517 ARGS1 (obj);
518 RETURN (scm_from_bool (SCM_STRUCTP (obj)));
519 }
520
521 VM_DEFINE_FUNCTION (167, struct_vtable, "struct-vtable", 1)
522 {
523 ARGS1 (obj);
524 VM_VALIDATE_STRUCT (obj, "struct_vtable");
525 RETURN (SCM_STRUCT_VTABLE (obj));
526 }
527
528 VM_DEFINE_INSTRUCTION (168, make_struct, "make-struct", 2, -1, 1)
529 {
530 unsigned h = FETCH ();
531 unsigned l = FETCH ();
532 scm_t_bits n = ((h << 8U) + l);
533 SCM vtable = sp[-(n - 1)];
534 const SCM *inits = sp - n + 2;
535 SCM ret;
536
537 SYNC_REGISTER ();
538
539 if (SCM_LIKELY (SCM_STRUCTP (vtable)
540 && SCM_VTABLE_FLAG_IS_SET (vtable, SCM_VTABLE_FLAG_SIMPLE)
541 && (SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size) + 1
542 == n)
543 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable)))
544 {
545 /* Verily, we are making a simple struct with the right number of
546 initializers, and no finalizer. */
547 ret = scm_words ((scm_t_bits)SCM_STRUCT_DATA (vtable) | scm_tc3_struct,
548 n + 1);
549 SCM_SET_CELL_WORD_1 (ret, (scm_t_bits)SCM_CELL_OBJECT_LOC (ret, 2));
550 memcpy (SCM_STRUCT_DATA (ret), inits, (n - 1) * sizeof (SCM));
551 }
552 else
553 ret = scm_c_make_structv (vtable, 0, n - 1, (scm_t_bits *) inits);
554
555 DROPN (n);
556 PUSH (ret);
557
558 NEXT;
559 }
560
561 VM_DEFINE_FUNCTION (169, struct_ref, "struct-ref", 2)
562 {
563 ARGS2 (obj, pos);
564
565 if (SCM_LIKELY (SCM_STRUCTP (obj)
566 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
567 SCM_VTABLE_FLAG_SIMPLE)
568 && SCM_I_INUMP (pos)))
569 {
570 SCM vtable;
571 scm_t_bits index, len;
572
573 index = SCM_I_INUM (pos);
574 vtable = SCM_STRUCT_VTABLE (obj);
575 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
576
577 if (SCM_LIKELY (index < len))
578 {
579 scm_t_bits *data = SCM_STRUCT_DATA (obj);
580 RETURN (SCM_PACK (data[index]));
581 }
582 }
583
584 SYNC_REGISTER ();
585 RETURN (scm_struct_ref (obj, pos));
586 }
587
588 VM_DEFINE_FUNCTION (170, struct_set, "struct-set", 3)
589 {
590 ARGS3 (obj, pos, val);
591
592 if (SCM_LIKELY (SCM_STRUCTP (obj)
593 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
594 SCM_VTABLE_FLAG_SIMPLE)
595 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
596 SCM_VTABLE_FLAG_SIMPLE_RW)
597 && SCM_I_INUMP (pos)))
598 {
599 SCM vtable;
600 scm_t_bits index, len;
601
602 index = SCM_I_INUM (pos);
603 vtable = SCM_STRUCT_VTABLE (obj);
604 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
605 if (SCM_LIKELY (index < len))
606 {
607 scm_t_bits *data = SCM_STRUCT_DATA (obj);
608 data[index] = SCM_UNPACK (val);
609 RETURN (val);
610 }
611 }
612
613 SYNC_REGISTER ();
614 RETURN (scm_struct_set_x (obj, pos, val));
615 }
616
617 \f
618 /*
619 * GOOPS support
620 */
621 VM_DEFINE_FUNCTION (171, class_of, "class-of", 1)
622 {
623 ARGS1 (obj);
624 if (SCM_INSTANCEP (obj))
625 RETURN (SCM_CLASS_OF (obj));
626 SYNC_REGISTER ();
627 RETURN (scm_class_of (obj));
628 }
629
630 VM_DEFINE_FUNCTION (172, slot_ref, "slot-ref", 2)
631 {
632 size_t slot;
633 ARGS2 (instance, idx);
634 slot = SCM_I_INUM (idx);
635 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance) [slot]));
636 }
637
638 VM_DEFINE_INSTRUCTION (173, slot_set, "slot-set", 0, 3, 0)
639 {
640 SCM instance, idx, val;
641 size_t slot;
642 POP (val);
643 POP (idx);
644 POP (instance);
645 slot = SCM_I_INUM (idx);
646 SCM_STRUCT_DATA (instance) [slot] = SCM_UNPACK (val);
647 NEXT;
648 }
649
650 \f
651 /*
652 * Bytevectors
653 */
654 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
655 do \
656 { \
657 if (SCM_UNLIKELY (!SCM_BYTEVECTOR_P (x))) \
658 { \
659 func_name = proc; \
660 finish_args = x; \
661 goto vm_error_not_a_bytevector; \
662 } \
663 } \
664 while (0)
665
666 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
667 { \
668 SCM endianness; \
669 POP (endianness); \
670 if (scm_is_eq (endianness, scm_i_native_endianness)) \
671 goto VM_LABEL (bv_##stem##_native_ref); \
672 { \
673 ARGS2 (bv, idx); \
674 SYNC_REGISTER (); \
675 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
676 } \
677 }
678
679 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
680 #define ALIGNED_P(ptr, type) \
681 ((scm_t_uintptr) (ptr) % alignof (type) == 0)
682
683 VM_DEFINE_FUNCTION (174, bv_u16_ref, "bv-u16-ref", 3)
684 BV_REF_WITH_ENDIANNESS (u16, u16)
685 VM_DEFINE_FUNCTION (175, bv_s16_ref, "bv-s16-ref", 3)
686 BV_REF_WITH_ENDIANNESS (s16, s16)
687 VM_DEFINE_FUNCTION (176, bv_u32_ref, "bv-u32-ref", 3)
688 BV_REF_WITH_ENDIANNESS (u32, u32)
689 VM_DEFINE_FUNCTION (177, bv_s32_ref, "bv-s32-ref", 3)
690 BV_REF_WITH_ENDIANNESS (s32, s32)
691 VM_DEFINE_FUNCTION (178, bv_u64_ref, "bv-u64-ref", 3)
692 BV_REF_WITH_ENDIANNESS (u64, u64)
693 VM_DEFINE_FUNCTION (179, bv_s64_ref, "bv-s64-ref", 3)
694 BV_REF_WITH_ENDIANNESS (s64, s64)
695 VM_DEFINE_FUNCTION (180, bv_f32_ref, "bv-f32-ref", 3)
696 BV_REF_WITH_ENDIANNESS (f32, ieee_single)
697 VM_DEFINE_FUNCTION (181, bv_f64_ref, "bv-f64-ref", 3)
698 BV_REF_WITH_ENDIANNESS (f64, ieee_double)
699
700 #undef BV_REF_WITH_ENDIANNESS
701
702 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
703 { \
704 long i; \
705 const scm_t_ ## type *int_ptr; \
706 ARGS2 (bv, idx); \
707 \
708 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
709 i = SCM_I_INUM (idx); \
710 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
711 \
712 if (SCM_LIKELY (SCM_I_INUMP (idx) \
713 && (i >= 0) \
714 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
715 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
716 RETURN (SCM_I_MAKINUM (*int_ptr)); \
717 else \
718 { \
719 SYNC_REGISTER (); \
720 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
721 } \
722 }
723
724 #define BV_INT_REF(stem, type, size) \
725 { \
726 long i; \
727 const scm_t_ ## type *int_ptr; \
728 ARGS2 (bv, idx); \
729 \
730 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
731 i = SCM_I_INUM (idx); \
732 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
733 \
734 if (SCM_LIKELY (SCM_I_INUMP (idx) \
735 && (i >= 0) \
736 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
737 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
738 { \
739 scm_t_ ## type x = *int_ptr; \
740 if (SCM_FIXABLE (x)) \
741 RETURN (SCM_I_MAKINUM (x)); \
742 else \
743 { \
744 SYNC_REGISTER (); \
745 RETURN (scm_from_ ## type (x)); \
746 } \
747 } \
748 else \
749 { \
750 SYNC_REGISTER (); \
751 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
752 } \
753 }
754
755 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
756 { \
757 long i; \
758 const type *float_ptr; \
759 ARGS2 (bv, idx); \
760 \
761 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
762 i = SCM_I_INUM (idx); \
763 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
764 \
765 SYNC_REGISTER (); \
766 if (SCM_LIKELY (SCM_I_INUMP (idx) \
767 && (i >= 0) \
768 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
769 && (ALIGNED_P (float_ptr, type)))) \
770 RETURN (scm_from_double (*float_ptr)); \
771 else \
772 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
773 }
774
775 VM_DEFINE_FUNCTION (182, bv_u8_ref, "bv-u8-ref", 2)
776 BV_FIXABLE_INT_REF (u8, u8, uint8, 1)
777 VM_DEFINE_FUNCTION (183, bv_s8_ref, "bv-s8-ref", 2)
778 BV_FIXABLE_INT_REF (s8, s8, int8, 1)
779 VM_DEFINE_FUNCTION (184, bv_u16_native_ref, "bv-u16-native-ref", 2)
780 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2)
781 VM_DEFINE_FUNCTION (185, bv_s16_native_ref, "bv-s16-native-ref", 2)
782 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2)
783 VM_DEFINE_FUNCTION (186, bv_u32_native_ref, "bv-u32-native-ref", 2)
784 #if SIZEOF_VOID_P > 4
785 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4)
786 #else
787 BV_INT_REF (u32, uint32, 4)
788 #endif
789 VM_DEFINE_FUNCTION (187, bv_s32_native_ref, "bv-s32-native-ref", 2)
790 #if SIZEOF_VOID_P > 4
791 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4)
792 #else
793 BV_INT_REF (s32, int32, 4)
794 #endif
795 VM_DEFINE_FUNCTION (188, bv_u64_native_ref, "bv-u64-native-ref", 2)
796 BV_INT_REF (u64, uint64, 8)
797 VM_DEFINE_FUNCTION (189, bv_s64_native_ref, "bv-s64-native-ref", 2)
798 BV_INT_REF (s64, int64, 8)
799 VM_DEFINE_FUNCTION (190, bv_f32_native_ref, "bv-f32-native-ref", 2)
800 BV_FLOAT_REF (f32, ieee_single, float, 4)
801 VM_DEFINE_FUNCTION (191, bv_f64_native_ref, "bv-f64-native-ref", 2)
802 BV_FLOAT_REF (f64, ieee_double, double, 8)
803
804 #undef BV_FIXABLE_INT_REF
805 #undef BV_INT_REF
806 #undef BV_FLOAT_REF
807
808
809
810 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
811 { \
812 SCM endianness; \
813 POP (endianness); \
814 if (scm_is_eq (endianness, scm_i_native_endianness)) \
815 goto VM_LABEL (bv_##stem##_native_set); \
816 { \
817 SCM bv, idx, val; POP (val); POP (idx); POP (bv); \
818 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
819 NEXT; \
820 } \
821 }
822
823 VM_DEFINE_INSTRUCTION (192, bv_u16_set, "bv-u16-set", 0, 4, 0)
824 BV_SET_WITH_ENDIANNESS (u16, u16)
825 VM_DEFINE_INSTRUCTION (193, bv_s16_set, "bv-s16-set", 0, 4, 0)
826 BV_SET_WITH_ENDIANNESS (s16, s16)
827 VM_DEFINE_INSTRUCTION (194, bv_u32_set, "bv-u32-set", 0, 4, 0)
828 BV_SET_WITH_ENDIANNESS (u32, u32)
829 VM_DEFINE_INSTRUCTION (195, bv_s32_set, "bv-s32-set", 0, 4, 0)
830 BV_SET_WITH_ENDIANNESS (s32, s32)
831 VM_DEFINE_INSTRUCTION (196, bv_u64_set, "bv-u64-set", 0, 4, 0)
832 BV_SET_WITH_ENDIANNESS (u64, u64)
833 VM_DEFINE_INSTRUCTION (197, bv_s64_set, "bv-s64-set", 0, 4, 0)
834 BV_SET_WITH_ENDIANNESS (s64, s64)
835 VM_DEFINE_INSTRUCTION (198, bv_f32_set, "bv-f32-set", 0, 4, 0)
836 BV_SET_WITH_ENDIANNESS (f32, ieee_single)
837 VM_DEFINE_INSTRUCTION (199, bv_f64_set, "bv-f64-set", 0, 4, 0)
838 BV_SET_WITH_ENDIANNESS (f64, ieee_double)
839
840 #undef BV_SET_WITH_ENDIANNESS
841
842 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
843 { \
844 long i, j = 0; \
845 SCM bv, idx, val; \
846 scm_t_ ## type *int_ptr; \
847 \
848 POP (val); POP (idx); POP (bv); \
849 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
850 i = SCM_I_INUM (idx); \
851 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
852 \
853 if (SCM_LIKELY (SCM_I_INUMP (idx) \
854 && (i >= 0) \
855 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
856 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
857 && (SCM_I_INUMP (val)) \
858 && ((j = SCM_I_INUM (val)) >= min) \
859 && (j <= max))) \
860 *int_ptr = (scm_t_ ## type) j; \
861 else \
862 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
863 NEXT; \
864 }
865
866 #define BV_INT_SET(stem, type, size) \
867 { \
868 long i = 0; \
869 SCM bv, idx, val; \
870 scm_t_ ## type *int_ptr; \
871 \
872 POP (val); POP (idx); POP (bv); \
873 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
874 i = SCM_I_INUM (idx); \
875 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
876 \
877 if (SCM_LIKELY (SCM_I_INUMP (idx) \
878 && (i >= 0) \
879 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
880 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
881 *int_ptr = scm_to_ ## type (val); \
882 else \
883 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
884 NEXT; \
885 }
886
887 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
888 { \
889 long i = 0; \
890 SCM bv, idx, val; \
891 type *float_ptr; \
892 \
893 POP (val); POP (idx); POP (bv); \
894 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
895 i = SCM_I_INUM (idx); \
896 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
897 \
898 if (SCM_LIKELY (SCM_I_INUMP (idx) \
899 && (i >= 0) \
900 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
901 && (ALIGNED_P (float_ptr, type)))) \
902 *float_ptr = scm_to_double (val); \
903 else \
904 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
905 NEXT; \
906 }
907
908 VM_DEFINE_INSTRUCTION (200, bv_u8_set, "bv-u8-set", 0, 3, 0)
909 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1)
910 VM_DEFINE_INSTRUCTION (201, bv_s8_set, "bv-s8-set", 0, 3, 0)
911 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1)
912 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set, "bv-u16-native-set", 0, 3, 0)
913 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2)
914 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set, "bv-s16-native-set", 0, 3, 0)
915 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2)
916 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set, "bv-u32-native-set", 0, 3, 0)
917 #if SIZEOF_VOID_P > 4
918 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4)
919 #else
920 BV_INT_SET (u32, uint32, 4)
921 #endif
922 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set, "bv-s32-native-set", 0, 3, 0)
923 #if SIZEOF_VOID_P > 4
924 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4)
925 #else
926 BV_INT_SET (s32, int32, 4)
927 #endif
928 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set, "bv-u64-native-set", 0, 3, 0)
929 BV_INT_SET (u64, uint64, 8)
930 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set, "bv-s64-native-set", 0, 3, 0)
931 BV_INT_SET (s64, int64, 8)
932 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set, "bv-f32-native-set", 0, 3, 0)
933 BV_FLOAT_SET (f32, ieee_single, float, 4)
934 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set, "bv-f64-native-set", 0, 3, 0)
935 BV_FLOAT_SET (f64, ieee_double, double, 8)
936
937 #undef BV_FIXABLE_INT_SET
938 #undef BV_INT_SET
939 #undef BV_FLOAT_SET
940
941 /*
942 (defun renumber-ops ()
943 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
944 (interactive "")
945 (save-excursion
946 (let ((counter 127)) (goto-char (point-min))
947 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
948 (replace-match
949 (number-to-string (setq counter (1+ counter)))
950 t t nil 1)))))
951 */
952
953 /*
954 Local Variables:
955 c-file-style: "gnu"
956 End:
957 */