fix a failure to sync regs in vm bytevector ops
[bpt/guile.git] / libguile / vm-i-scheme.c
1 /* Copyright (C) 2001, 2009, 2010 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm_engine.c */
20
21 \f
22 /*
23 * Predicates
24 */
25
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
29
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
31
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
33 {
34 ARGS1 (x);
35 RETURN (scm_from_bool (scm_is_false (x)));
36 }
37
38 VM_DEFINE_FUNCTION (129, not_not, "not-not", 1)
39 {
40 ARGS1 (x);
41 RETURN (scm_from_bool (!scm_is_false (x)));
42 }
43
44 VM_DEFINE_FUNCTION (130, eq, "eq?", 2)
45 {
46 ARGS2 (x, y);
47 RETURN (scm_from_bool (scm_is_eq (x, y)));
48 }
49
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
51 {
52 ARGS2 (x, y);
53 RETURN (scm_from_bool (!scm_is_eq (x, y)));
54 }
55
56 VM_DEFINE_FUNCTION (132, nullp, "null?", 1)
57 {
58 ARGS1 (x);
59 RETURN (scm_from_bool (scm_is_null (x)));
60 }
61
62 VM_DEFINE_FUNCTION (133, not_nullp, "not-null?", 1)
63 {
64 ARGS1 (x);
65 RETURN (scm_from_bool (!scm_is_null (x)));
66 }
67
68 VM_DEFINE_FUNCTION (134, eqv, "eqv?", 2)
69 {
70 ARGS2 (x, y);
71 if (scm_is_eq (x, y))
72 RETURN (SCM_BOOL_T);
73 if (SCM_IMP (x) || SCM_IMP (y))
74 RETURN (SCM_BOOL_F);
75 SYNC_REGISTER ();
76 RETURN (scm_eqv_p (x, y));
77 }
78
79 VM_DEFINE_FUNCTION (135, equal, "equal?", 2)
80 {
81 ARGS2 (x, y);
82 if (scm_is_eq (x, y))
83 RETURN (SCM_BOOL_T);
84 if (SCM_IMP (x) || SCM_IMP (y))
85 RETURN (SCM_BOOL_F);
86 SYNC_REGISTER ();
87 RETURN (scm_equal_p (x, y));
88 }
89
90 VM_DEFINE_FUNCTION (136, pairp, "pair?", 1)
91 {
92 ARGS1 (x);
93 RETURN (scm_from_bool (scm_is_pair (x)));
94 }
95
96 VM_DEFINE_FUNCTION (137, listp, "list?", 1)
97 {
98 ARGS1 (x);
99 RETURN (scm_from_bool (scm_ilength (x) >= 0));
100 }
101
102 VM_DEFINE_FUNCTION (138, symbolp, "symbol?", 1)
103 {
104 ARGS1 (x);
105 RETURN (scm_from_bool (scm_is_symbol (x)));
106 }
107
108 VM_DEFINE_FUNCTION (139, vectorp, "vector?", 1)
109 {
110 ARGS1 (x);
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x)));
112 }
113
114 \f
115 /*
116 * Basic data
117 */
118
119 VM_DEFINE_FUNCTION (140, cons, "cons", 2)
120 {
121 ARGS2 (x, y);
122 CONS (x, x, y);
123 RETURN (x);
124 }
125
126 #define VM_VALIDATE_CONS(x, proc) \
127 if (SCM_UNLIKELY (!scm_is_pair (x))) \
128 { func_name = proc; \
129 finish_args = x; \
130 goto vm_error_not_a_pair; \
131 }
132
133 VM_DEFINE_FUNCTION (141, car, "car", 1)
134 {
135 ARGS1 (x);
136 VM_VALIDATE_CONS (x, "car");
137 RETURN (SCM_CAR (x));
138 }
139
140 VM_DEFINE_FUNCTION (142, cdr, "cdr", 1)
141 {
142 ARGS1 (x);
143 VM_VALIDATE_CONS (x, "cdr");
144 RETURN (SCM_CDR (x));
145 }
146
147 VM_DEFINE_INSTRUCTION (143, set_car, "set-car!", 0, 2, 0)
148 {
149 SCM x, y;
150 POP (y);
151 POP (x);
152 VM_VALIDATE_CONS (x, "set-car!");
153 SCM_SETCAR (x, y);
154 NEXT;
155 }
156
157 VM_DEFINE_INSTRUCTION (144, set_cdr, "set-cdr!", 0, 2, 0)
158 {
159 SCM x, y;
160 POP (y);
161 POP (x);
162 VM_VALIDATE_CONS (x, "set-cdr!");
163 SCM_SETCDR (x, y);
164 NEXT;
165 }
166
167 \f
168 /*
169 * Numeric relational tests
170 */
171
172 #undef REL
173 #define REL(crel,srel) \
174 { \
175 ARGS2 (x, y); \
176 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
177 RETURN (scm_from_bool ((scm_t_signed_bits) (x) \
178 crel (scm_t_signed_bits) (y))); \
179 SYNC_REGISTER (); \
180 RETURN (srel (x, y)); \
181 }
182
183 VM_DEFINE_FUNCTION (145, ee, "ee?", 2)
184 {
185 REL (==, scm_num_eq_p);
186 }
187
188 VM_DEFINE_FUNCTION (146, lt, "lt?", 2)
189 {
190 REL (<, scm_less_p);
191 }
192
193 VM_DEFINE_FUNCTION (147, le, "le?", 2)
194 {
195 REL (<=, scm_leq_p);
196 }
197
198 VM_DEFINE_FUNCTION (148, gt, "gt?", 2)
199 {
200 REL (>, scm_gr_p);
201 }
202
203 VM_DEFINE_FUNCTION (149, ge, "ge?", 2)
204 {
205 REL (>=, scm_geq_p);
206 }
207
208 \f
209 /*
210 * Numeric functions
211 */
212
213 /* The maximum/minimum tagged integers. */
214 #undef INUM_MAX
215 #undef INUM_MIN
216 #define INUM_MAX (INTPTR_MAX - 1)
217 #define INUM_MIN (INTPTR_MIN + scm_tc2_int)
218
219 #undef FUNC2
220 #define FUNC2(CFUNC,SFUNC) \
221 { \
222 ARGS2 (x, y); \
223 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
224 { \
225 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
226 if (SCM_FIXABLE (n)) \
227 RETURN (SCM_I_MAKINUM (n)); \
228 } \
229 SYNC_REGISTER (); \
230 RETURN (SFUNC (x, y)); \
231 }
232
233 /* Assembly tagged integer arithmetic routines. This code uses the
234 `asm goto' feature introduced in GCC 4.5. */
235
236 #if defined __x86_64__ && SCM_GNUC_PREREQ (4, 5)
237
238 /* The macros below check the CPU's overflow flag to improve fixnum
239 arithmetic. The %rcx register is explicitly clobbered because `asm
240 goto' can't have outputs, in which case the `r' constraint could be
241 used to let the register allocator choose a register.
242
243 TODO: Use `cold' label attribute in GCC 4.6.
244 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
245
246 # define ASM_ADD(x, y) \
247 { \
248 asm volatile goto ("mov %1, %%rcx; " \
249 "test %[tag], %%cl; je %l[slow_add]; " \
250 "test %[tag], %0; je %l[slow_add]; " \
251 "add %0, %%rcx; jo %l[slow_add]; " \
252 "sub %[tag], %%rcx; " \
253 "mov %%rcx, (%[vsp])\n" \
254 : /* no outputs */ \
255 : "r" (x), "r" (y), \
256 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
257 : "rcx", "memory" \
258 : slow_add); \
259 NEXT; \
260 } \
261 slow_add: \
262 do { } while (0)
263
264 # define ASM_SUB(x, y) \
265 { \
266 asm volatile goto ("mov %0, %%rcx; " \
267 "test %[tag], %%cl; je %l[slow_sub]; " \
268 "test %[tag], %1; je %l[slow_sub]; " \
269 "sub %1, %%rcx; jo %l[slow_sub]; " \
270 "add %[tag], %%rcx; " \
271 "mov %%rcx, (%[vsp])\n" \
272 : /* no outputs */ \
273 : "r" (x), "r" (y), \
274 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
275 : "rcx", "memory" \
276 : slow_sub); \
277 NEXT; \
278 } \
279 slow_sub: \
280 do { } while (0)
281
282 #endif
283
284
285 VM_DEFINE_FUNCTION (150, add, "add", 2)
286 {
287 #ifndef ASM_ADD
288 FUNC2 (+, scm_sum);
289 #else
290 ARGS2 (x, y);
291 ASM_ADD (x, y);
292 SYNC_REGISTER ();
293 RETURN (scm_sum (x, y));
294 #endif
295 }
296
297 VM_DEFINE_FUNCTION (151, add1, "add1", 1)
298 {
299 ARGS1 (x);
300
301 /* Check for overflow. */
302 if (SCM_LIKELY ((scm_t_intptr) x < INUM_MAX))
303 {
304 SCM result;
305
306 /* Add the integers without untagging. */
307 result = SCM_PACK ((scm_t_intptr) x
308 + (scm_t_intptr) SCM_I_MAKINUM (1)
309 - scm_tc2_int);
310
311 if (SCM_LIKELY (SCM_I_INUMP (result)))
312 RETURN (result);
313 }
314
315 SYNC_REGISTER ();
316 RETURN (scm_sum (x, SCM_I_MAKINUM (1)));
317 }
318
319 VM_DEFINE_FUNCTION (152, sub, "sub", 2)
320 {
321 #ifndef ASM_SUB
322 FUNC2 (-, scm_difference);
323 #else
324 ARGS2 (x, y);
325 ASM_SUB (x, y);
326 SYNC_REGISTER ();
327 RETURN (scm_difference (x, y));
328 #endif
329 }
330
331 VM_DEFINE_FUNCTION (153, sub1, "sub1", 1)
332 {
333 ARGS1 (x);
334
335 /* Check for underflow. */
336 if (SCM_LIKELY ((scm_t_intptr) x > INUM_MIN))
337 {
338 SCM result;
339
340 /* Substract the integers without untagging. */
341 result = SCM_PACK ((scm_t_intptr) x
342 - (scm_t_intptr) SCM_I_MAKINUM (1)
343 + scm_tc2_int);
344
345 if (SCM_LIKELY (SCM_I_INUMP (result)))
346 RETURN (result);
347 }
348
349 SYNC_REGISTER ();
350 RETURN (scm_difference (x, SCM_I_MAKINUM (1)));
351 }
352
353 # undef ASM_ADD
354 # undef ASM_SUB
355
356 VM_DEFINE_FUNCTION (154, mul, "mul", 2)
357 {
358 ARGS2 (x, y);
359 SYNC_REGISTER ();
360 RETURN (scm_product (x, y));
361 }
362
363 VM_DEFINE_FUNCTION (155, div, "div", 2)
364 {
365 ARGS2 (x, y);
366 SYNC_REGISTER ();
367 RETURN (scm_divide (x, y));
368 }
369
370 VM_DEFINE_FUNCTION (156, quo, "quo", 2)
371 {
372 ARGS2 (x, y);
373 SYNC_REGISTER ();
374 RETURN (scm_quotient (x, y));
375 }
376
377 VM_DEFINE_FUNCTION (157, rem, "rem", 2)
378 {
379 ARGS2 (x, y);
380 SYNC_REGISTER ();
381 RETURN (scm_remainder (x, y));
382 }
383
384 VM_DEFINE_FUNCTION (158, mod, "mod", 2)
385 {
386 ARGS2 (x, y);
387 SYNC_REGISTER ();
388 RETURN (scm_modulo (x, y));
389 }
390
391 VM_DEFINE_FUNCTION (159, ash, "ash", 2)
392 {
393 ARGS2 (x, y);
394 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
395 {
396 if (SCM_I_INUM (y) < 0)
397 /* Right shift, will be a fixnum. */
398 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) >> -SCM_I_INUM (y)));
399 else
400 /* Left shift. See comments in scm_ash. */
401 {
402 scm_t_signed_bits nn, bits_to_shift;
403
404 nn = SCM_I_INUM (x);
405 bits_to_shift = SCM_I_INUM (y);
406
407 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
408 && ((scm_t_bits)
409 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
410 <= 1))
411 RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
412 /* fall through */
413 }
414 /* fall through */
415 }
416 SYNC_REGISTER ();
417 RETURN (scm_ash (x, y));
418 }
419
420 VM_DEFINE_FUNCTION (160, logand, "logand", 2)
421 {
422 ARGS2 (x, y);
423 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
424 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) & SCM_I_INUM (y)));
425 SYNC_REGISTER ();
426 RETURN (scm_logand (x, y));
427 }
428
429 VM_DEFINE_FUNCTION (161, logior, "logior", 2)
430 {
431 ARGS2 (x, y);
432 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
433 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) | SCM_I_INUM (y)));
434 SYNC_REGISTER ();
435 RETURN (scm_logior (x, y));
436 }
437
438 VM_DEFINE_FUNCTION (162, logxor, "logxor", 2)
439 {
440 ARGS2 (x, y);
441 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
442 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
443 SYNC_REGISTER ();
444 RETURN (scm_logxor (x, y));
445 }
446
447 \f
448 /*
449 * Vectors and arrays
450 */
451
452 VM_DEFINE_FUNCTION (163, vector_ref, "vector-ref", 2)
453 {
454 scm_t_signed_bits i = 0;
455 ARGS2 (vect, idx);
456 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
457 && SCM_I_INUMP (idx)
458 && ((i = SCM_I_INUM (idx)) >= 0)
459 && i < SCM_I_VECTOR_LENGTH (vect)))
460 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
461 else
462 {
463 SYNC_REGISTER ();
464 RETURN (scm_vector_ref (vect, idx));
465 }
466 }
467
468 VM_DEFINE_INSTRUCTION (164, vector_set, "vector-set", 0, 3, 0)
469 {
470 scm_t_signed_bits i = 0;
471 SCM vect, idx, val;
472 POP (val); POP (idx); POP (vect);
473 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
474 && SCM_I_INUMP (idx)
475 && ((i = SCM_I_INUM (idx)) >= 0)
476 && i < SCM_I_VECTOR_LENGTH (vect)))
477 SCM_I_VECTOR_WELTS (vect)[i] = val;
478 else
479 {
480 SYNC_REGISTER ();
481 scm_vector_set_x (vect, idx, val);
482 }
483 NEXT;
484 }
485
486 VM_DEFINE_INSTRUCTION (165, make_array, "make-array", 3, -1, 1)
487 {
488 scm_t_uint32 len;
489 SCM shape, ret;
490
491 len = FETCH ();
492 len = (len << 8) + FETCH ();
493 len = (len << 8) + FETCH ();
494 POP (shape);
495 SYNC_REGISTER ();
496 PRE_CHECK_UNDERFLOW (len);
497 ret = scm_from_contiguous_array (shape, sp - len + 1, len);
498 DROPN (len);
499 PUSH (ret);
500 NEXT;
501 }
502
503 \f
504 /*
505 * Structs
506 */
507 #define VM_VALIDATE_STRUCT(obj, proc) \
508 if (SCM_UNLIKELY (!SCM_STRUCTP (obj))) \
509 { \
510 func_name = proc; \
511 finish_args = (obj); \
512 goto vm_error_not_a_struct; \
513 }
514
515 VM_DEFINE_FUNCTION (166, struct_p, "struct?", 1)
516 {
517 ARGS1 (obj);
518 RETURN (scm_from_bool (SCM_STRUCTP (obj)));
519 }
520
521 VM_DEFINE_FUNCTION (167, struct_vtable, "struct-vtable", 1)
522 {
523 ARGS1 (obj);
524 VM_VALIDATE_STRUCT (obj, "struct_vtable");
525 RETURN (SCM_STRUCT_VTABLE (obj));
526 }
527
528 VM_DEFINE_INSTRUCTION (168, make_struct, "make-struct", 2, -1, 1)
529 {
530 unsigned h = FETCH ();
531 unsigned l = FETCH ();
532 scm_t_bits n = ((h << 8U) + l);
533 SCM vtable = sp[-(n - 1)];
534 const SCM *inits = sp - n + 2;
535 SCM ret;
536
537 SYNC_REGISTER ();
538
539 if (SCM_LIKELY (SCM_STRUCTP (vtable)
540 && SCM_VTABLE_FLAG_IS_SET (vtable, SCM_VTABLE_FLAG_SIMPLE)
541 && (SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size) + 1
542 == n)
543 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable)))
544 {
545 /* Verily, we are making a simple struct with the right number of
546 initializers, and no finalizer. */
547 ret = scm_words ((scm_t_bits)SCM_STRUCT_DATA (vtable) | scm_tc3_struct,
548 n + 1);
549 SCM_SET_CELL_WORD_1 (ret, (scm_t_bits)SCM_CELL_OBJECT_LOC (ret, 2));
550 memcpy (SCM_STRUCT_DATA (ret), inits, (n - 1) * sizeof (SCM));
551 }
552 else
553 ret = scm_c_make_structv (vtable, 0, n - 1, (scm_t_bits *) inits);
554
555 DROPN (n);
556 PUSH (ret);
557
558 NEXT;
559 }
560
561 VM_DEFINE_FUNCTION (169, struct_ref, "struct-ref", 2)
562 {
563 ARGS2 (obj, pos);
564
565 if (SCM_LIKELY (SCM_STRUCTP (obj)
566 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
567 SCM_VTABLE_FLAG_SIMPLE)
568 && SCM_I_INUMP (pos)))
569 {
570 SCM vtable;
571 scm_t_bits index, len;
572
573 /* True, an inum is a signed value, but cast to unsigned it will
574 certainly be more than the length, so we will fall through if
575 index is negative. */
576 index = SCM_I_INUM (pos);
577 vtable = SCM_STRUCT_VTABLE (obj);
578 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
579
580 if (SCM_LIKELY (index < len))
581 {
582 scm_t_bits *data = SCM_STRUCT_DATA (obj);
583 RETURN (SCM_PACK (data[index]));
584 }
585 }
586
587 SYNC_REGISTER ();
588 RETURN (scm_struct_ref (obj, pos));
589 }
590
591 VM_DEFINE_FUNCTION (170, struct_set, "struct-set", 3)
592 {
593 ARGS3 (obj, pos, val);
594
595 if (SCM_LIKELY (SCM_STRUCTP (obj)
596 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
597 SCM_VTABLE_FLAG_SIMPLE)
598 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
599 SCM_VTABLE_FLAG_SIMPLE_RW)
600 && SCM_I_INUMP (pos)))
601 {
602 SCM vtable;
603 scm_t_bits index, len;
604
605 /* See above regarding index being >= 0. */
606 index = SCM_I_INUM (pos);
607 vtable = SCM_STRUCT_VTABLE (obj);
608 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
609 if (SCM_LIKELY (index < len))
610 {
611 scm_t_bits *data = SCM_STRUCT_DATA (obj);
612 data[index] = SCM_UNPACK (val);
613 RETURN (val);
614 }
615 }
616
617 SYNC_REGISTER ();
618 RETURN (scm_struct_set_x (obj, pos, val));
619 }
620
621 \f
622 /*
623 * GOOPS support
624 */
625 VM_DEFINE_FUNCTION (171, class_of, "class-of", 1)
626 {
627 ARGS1 (obj);
628 if (SCM_INSTANCEP (obj))
629 RETURN (SCM_CLASS_OF (obj));
630 SYNC_REGISTER ();
631 RETURN (scm_class_of (obj));
632 }
633
634 /* FIXME: No checking whatsoever. */
635 VM_DEFINE_FUNCTION (172, slot_ref, "slot-ref", 2)
636 {
637 size_t slot;
638 ARGS2 (instance, idx);
639 slot = SCM_I_INUM (idx);
640 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance) [slot]));
641 }
642
643 /* FIXME: No checking whatsoever. */
644 VM_DEFINE_INSTRUCTION (173, slot_set, "slot-set", 0, 3, 0)
645 {
646 SCM instance, idx, val;
647 size_t slot;
648 POP (val);
649 POP (idx);
650 POP (instance);
651 slot = SCM_I_INUM (idx);
652 SCM_STRUCT_DATA (instance) [slot] = SCM_UNPACK (val);
653 NEXT;
654 }
655
656 \f
657 /*
658 * Bytevectors
659 */
660 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
661 do \
662 { \
663 if (SCM_UNLIKELY (!SCM_BYTEVECTOR_P (x))) \
664 { \
665 func_name = proc; \
666 finish_args = x; \
667 goto vm_error_not_a_bytevector; \
668 } \
669 } \
670 while (0)
671
672 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
673 { \
674 SCM endianness; \
675 POP (endianness); \
676 if (scm_is_eq (endianness, scm_i_native_endianness)) \
677 goto VM_LABEL (bv_##stem##_native_ref); \
678 { \
679 ARGS2 (bv, idx); \
680 SYNC_REGISTER (); \
681 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
682 } \
683 }
684
685 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
686 #define ALIGNED_P(ptr, type) \
687 ((scm_t_uintptr) (ptr) % alignof (type) == 0)
688
689 VM_DEFINE_FUNCTION (174, bv_u16_ref, "bv-u16-ref", 3)
690 BV_REF_WITH_ENDIANNESS (u16, u16)
691 VM_DEFINE_FUNCTION (175, bv_s16_ref, "bv-s16-ref", 3)
692 BV_REF_WITH_ENDIANNESS (s16, s16)
693 VM_DEFINE_FUNCTION (176, bv_u32_ref, "bv-u32-ref", 3)
694 BV_REF_WITH_ENDIANNESS (u32, u32)
695 VM_DEFINE_FUNCTION (177, bv_s32_ref, "bv-s32-ref", 3)
696 BV_REF_WITH_ENDIANNESS (s32, s32)
697 VM_DEFINE_FUNCTION (178, bv_u64_ref, "bv-u64-ref", 3)
698 BV_REF_WITH_ENDIANNESS (u64, u64)
699 VM_DEFINE_FUNCTION (179, bv_s64_ref, "bv-s64-ref", 3)
700 BV_REF_WITH_ENDIANNESS (s64, s64)
701 VM_DEFINE_FUNCTION (180, bv_f32_ref, "bv-f32-ref", 3)
702 BV_REF_WITH_ENDIANNESS (f32, ieee_single)
703 VM_DEFINE_FUNCTION (181, bv_f64_ref, "bv-f64-ref", 3)
704 BV_REF_WITH_ENDIANNESS (f64, ieee_double)
705
706 #undef BV_REF_WITH_ENDIANNESS
707
708 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
709 { \
710 scm_t_signed_bits i; \
711 const scm_t_ ## type *int_ptr; \
712 ARGS2 (bv, idx); \
713 \
714 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
715 i = SCM_I_INUM (idx); \
716 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
717 \
718 if (SCM_LIKELY (SCM_I_INUMP (idx) \
719 && (i >= 0) \
720 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
721 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
722 RETURN (SCM_I_MAKINUM (*int_ptr)); \
723 else \
724 { \
725 SYNC_REGISTER (); \
726 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
727 } \
728 }
729
730 #define BV_INT_REF(stem, type, size) \
731 { \
732 scm_t_signed_bits i; \
733 const scm_t_ ## type *int_ptr; \
734 ARGS2 (bv, idx); \
735 \
736 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
737 i = SCM_I_INUM (idx); \
738 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
739 \
740 if (SCM_LIKELY (SCM_I_INUMP (idx) \
741 && (i >= 0) \
742 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
743 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
744 { \
745 scm_t_ ## type x = *int_ptr; \
746 if (SCM_FIXABLE (x)) \
747 RETURN (SCM_I_MAKINUM (x)); \
748 else \
749 { \
750 SYNC_REGISTER (); \
751 RETURN (scm_from_ ## type (x)); \
752 } \
753 } \
754 else \
755 { \
756 SYNC_REGISTER (); \
757 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
758 } \
759 }
760
761 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
762 { \
763 scm_t_signed_bits i; \
764 const type *float_ptr; \
765 ARGS2 (bv, idx); \
766 \
767 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
768 i = SCM_I_INUM (idx); \
769 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
770 \
771 SYNC_REGISTER (); \
772 if (SCM_LIKELY (SCM_I_INUMP (idx) \
773 && (i >= 0) \
774 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
775 && (ALIGNED_P (float_ptr, type)))) \
776 RETURN (scm_from_double (*float_ptr)); \
777 else \
778 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
779 }
780
781 VM_DEFINE_FUNCTION (182, bv_u8_ref, "bv-u8-ref", 2)
782 BV_FIXABLE_INT_REF (u8, u8, uint8, 1)
783 VM_DEFINE_FUNCTION (183, bv_s8_ref, "bv-s8-ref", 2)
784 BV_FIXABLE_INT_REF (s8, s8, int8, 1)
785 VM_DEFINE_FUNCTION (184, bv_u16_native_ref, "bv-u16-native-ref", 2)
786 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2)
787 VM_DEFINE_FUNCTION (185, bv_s16_native_ref, "bv-s16-native-ref", 2)
788 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2)
789 VM_DEFINE_FUNCTION (186, bv_u32_native_ref, "bv-u32-native-ref", 2)
790 #if SIZEOF_VOID_P > 4
791 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4)
792 #else
793 BV_INT_REF (u32, uint32, 4)
794 #endif
795 VM_DEFINE_FUNCTION (187, bv_s32_native_ref, "bv-s32-native-ref", 2)
796 #if SIZEOF_VOID_P > 4
797 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4)
798 #else
799 BV_INT_REF (s32, int32, 4)
800 #endif
801 VM_DEFINE_FUNCTION (188, bv_u64_native_ref, "bv-u64-native-ref", 2)
802 BV_INT_REF (u64, uint64, 8)
803 VM_DEFINE_FUNCTION (189, bv_s64_native_ref, "bv-s64-native-ref", 2)
804 BV_INT_REF (s64, int64, 8)
805 VM_DEFINE_FUNCTION (190, bv_f32_native_ref, "bv-f32-native-ref", 2)
806 BV_FLOAT_REF (f32, ieee_single, float, 4)
807 VM_DEFINE_FUNCTION (191, bv_f64_native_ref, "bv-f64-native-ref", 2)
808 BV_FLOAT_REF (f64, ieee_double, double, 8)
809
810 #undef BV_FIXABLE_INT_REF
811 #undef BV_INT_REF
812 #undef BV_FLOAT_REF
813
814
815
816 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
817 { \
818 SCM endianness; \
819 POP (endianness); \
820 if (scm_is_eq (endianness, scm_i_native_endianness)) \
821 goto VM_LABEL (bv_##stem##_native_set); \
822 { \
823 SCM bv, idx, val; POP (val); POP (idx); POP (bv); \
824 SYNC_REGISTER (); \
825 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
826 NEXT; \
827 } \
828 }
829
830 VM_DEFINE_INSTRUCTION (192, bv_u16_set, "bv-u16-set", 0, 4, 0)
831 BV_SET_WITH_ENDIANNESS (u16, u16)
832 VM_DEFINE_INSTRUCTION (193, bv_s16_set, "bv-s16-set", 0, 4, 0)
833 BV_SET_WITH_ENDIANNESS (s16, s16)
834 VM_DEFINE_INSTRUCTION (194, bv_u32_set, "bv-u32-set", 0, 4, 0)
835 BV_SET_WITH_ENDIANNESS (u32, u32)
836 VM_DEFINE_INSTRUCTION (195, bv_s32_set, "bv-s32-set", 0, 4, 0)
837 BV_SET_WITH_ENDIANNESS (s32, s32)
838 VM_DEFINE_INSTRUCTION (196, bv_u64_set, "bv-u64-set", 0, 4, 0)
839 BV_SET_WITH_ENDIANNESS (u64, u64)
840 VM_DEFINE_INSTRUCTION (197, bv_s64_set, "bv-s64-set", 0, 4, 0)
841 BV_SET_WITH_ENDIANNESS (s64, s64)
842 VM_DEFINE_INSTRUCTION (198, bv_f32_set, "bv-f32-set", 0, 4, 0)
843 BV_SET_WITH_ENDIANNESS (f32, ieee_single)
844 VM_DEFINE_INSTRUCTION (199, bv_f64_set, "bv-f64-set", 0, 4, 0)
845 BV_SET_WITH_ENDIANNESS (f64, ieee_double)
846
847 #undef BV_SET_WITH_ENDIANNESS
848
849 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
850 { \
851 scm_t_signed_bits i, j = 0; \
852 SCM bv, idx, val; \
853 scm_t_ ## type *int_ptr; \
854 \
855 POP (val); POP (idx); POP (bv); \
856 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
857 i = SCM_I_INUM (idx); \
858 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
859 \
860 if (SCM_LIKELY (SCM_I_INUMP (idx) \
861 && (i >= 0) \
862 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
863 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
864 && (SCM_I_INUMP (val)) \
865 && ((j = SCM_I_INUM (val)) >= min) \
866 && (j <= max))) \
867 *int_ptr = (scm_t_ ## type) j; \
868 else \
869 { \
870 SYNC_REGISTER (); \
871 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
872 } \
873 NEXT; \
874 }
875
876 #define BV_INT_SET(stem, type, size) \
877 { \
878 scm_t_signed_bits i = 0; \
879 SCM bv, idx, val; \
880 scm_t_ ## type *int_ptr; \
881 \
882 POP (val); POP (idx); POP (bv); \
883 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
884 i = SCM_I_INUM (idx); \
885 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
886 \
887 if (SCM_LIKELY (SCM_I_INUMP (idx) \
888 && (i >= 0) \
889 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
890 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
891 *int_ptr = scm_to_ ## type (val); \
892 else \
893 { \
894 SYNC_REGISTER (); \
895 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
896 } \
897 NEXT; \
898 }
899
900 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
901 { \
902 scm_t_signed_bits i = 0; \
903 SCM bv, idx, val; \
904 type *float_ptr; \
905 \
906 POP (val); POP (idx); POP (bv); \
907 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
908 i = SCM_I_INUM (idx); \
909 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
910 \
911 if (SCM_LIKELY (SCM_I_INUMP (idx) \
912 && (i >= 0) \
913 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
914 && (ALIGNED_P (float_ptr, type)))) \
915 *float_ptr = scm_to_double (val); \
916 else \
917 { \
918 SYNC_REGISTER (); \
919 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
920 } \
921 NEXT; \
922 }
923
924 VM_DEFINE_INSTRUCTION (200, bv_u8_set, "bv-u8-set", 0, 3, 0)
925 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1)
926 VM_DEFINE_INSTRUCTION (201, bv_s8_set, "bv-s8-set", 0, 3, 0)
927 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1)
928 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set, "bv-u16-native-set", 0, 3, 0)
929 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2)
930 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set, "bv-s16-native-set", 0, 3, 0)
931 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2)
932 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set, "bv-u32-native-set", 0, 3, 0)
933 #if SIZEOF_VOID_P > 4
934 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4)
935 #else
936 BV_INT_SET (u32, uint32, 4)
937 #endif
938 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set, "bv-s32-native-set", 0, 3, 0)
939 #if SIZEOF_VOID_P > 4
940 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4)
941 #else
942 BV_INT_SET (s32, int32, 4)
943 #endif
944 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set, "bv-u64-native-set", 0, 3, 0)
945 BV_INT_SET (u64, uint64, 8)
946 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set, "bv-s64-native-set", 0, 3, 0)
947 BV_INT_SET (s64, int64, 8)
948 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set, "bv-f32-native-set", 0, 3, 0)
949 BV_FLOAT_SET (f32, ieee_single, float, 4)
950 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set, "bv-f64-native-set", 0, 3, 0)
951 BV_FLOAT_SET (f64, ieee_double, double, 8)
952
953 #undef BV_FIXABLE_INT_SET
954 #undef BV_INT_SET
955 #undef BV_FLOAT_SET
956
957 /*
958 (defun renumber-ops ()
959 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
960 (interactive "")
961 (save-excursion
962 (let ((counter 127)) (goto-char (point-min))
963 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
964 (replace-match
965 (number-to-string (setq counter (1+ counter)))
966 t t nil 1)))))
967 */
968
969 /*
970 Local Variables:
971 c-file-style: "gnu"
972 End:
973 */