Avoid signed overflow and use size_t in bytevectors.c.
[bpt/guile.git] / libguile / vm-i-scheme.c
1 /* Copyright (C) 2001, 2009-2014 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm_engine.c */
20
21 \f
22 /*
23 * Predicates
24 */
25
26 #define ARGS1(a1) SCM a1 = sp[0];
27 #define ARGS2(a1,a2) SCM a1 = sp[-1], a2 = sp[0]; sp--; NULLSTACK (1);
28 #define ARGS3(a1,a2,a3) SCM a1 = sp[-2], a2 = sp[-1], a3 = sp[0]; sp -= 2; NULLSTACK (2);
29
30 #define RETURN(x) do { *sp = x; NEXT; } while (0)
31
32 VM_DEFINE_FUNCTION (128, not, "not", 1)
33 {
34 ARGS1 (x);
35 RETURN (scm_from_bool (scm_is_false (x)));
36 }
37
38 VM_DEFINE_FUNCTION (129, not_not, "not-not", 1)
39 {
40 ARGS1 (x);
41 RETURN (scm_from_bool (!scm_is_false (x)));
42 }
43
44 VM_DEFINE_FUNCTION (130, eq, "eq?", 2)
45 {
46 ARGS2 (x, y);
47 RETURN (scm_from_bool (scm_is_eq (x, y)));
48 }
49
50 VM_DEFINE_FUNCTION (131, not_eq, "not-eq?", 2)
51 {
52 ARGS2 (x, y);
53 RETURN (scm_from_bool (!scm_is_eq (x, y)));
54 }
55
56 VM_DEFINE_FUNCTION (132, nullp, "null?", 1)
57 {
58 ARGS1 (x);
59 RETURN (scm_from_bool (scm_is_null (x)));
60 }
61
62 VM_DEFINE_FUNCTION (133, not_nullp, "not-null?", 1)
63 {
64 ARGS1 (x);
65 RETURN (scm_from_bool (!scm_is_null (x)));
66 }
67
68 VM_DEFINE_FUNCTION (134, eqv, "eqv?", 2)
69 {
70 ARGS2 (x, y);
71 if (scm_is_eq (x, y))
72 RETURN (SCM_BOOL_T);
73 if (SCM_IMP (x) || SCM_IMP (y))
74 RETURN (SCM_BOOL_F);
75 SYNC_REGISTER ();
76 RETURN (scm_eqv_p (x, y));
77 }
78
79 VM_DEFINE_FUNCTION (135, equal, "equal?", 2)
80 {
81 ARGS2 (x, y);
82 if (scm_is_eq (x, y))
83 RETURN (SCM_BOOL_T);
84 if (SCM_IMP (x) || SCM_IMP (y))
85 RETURN (SCM_BOOL_F);
86 SYNC_REGISTER ();
87 RETURN (scm_equal_p (x, y));
88 }
89
90 VM_DEFINE_FUNCTION (136, pairp, "pair?", 1)
91 {
92 ARGS1 (x);
93 RETURN (scm_from_bool (scm_is_pair (x)));
94 }
95
96 VM_DEFINE_FUNCTION (137, listp, "list?", 1)
97 {
98 ARGS1 (x);
99 RETURN (scm_from_bool (scm_ilength (x) >= 0));
100 }
101
102 VM_DEFINE_FUNCTION (138, symbolp, "symbol?", 1)
103 {
104 ARGS1 (x);
105 RETURN (scm_from_bool (scm_is_symbol (x)));
106 }
107
108 VM_DEFINE_FUNCTION (139, vectorp, "vector?", 1)
109 {
110 ARGS1 (x);
111 RETURN (scm_from_bool (SCM_I_IS_VECTOR (x)));
112 }
113
114 \f
115 /*
116 * Basic data
117 */
118
119 VM_DEFINE_FUNCTION (140, cons, "cons", 2)
120 {
121 ARGS2 (x, y);
122 CONS (x, x, y);
123 RETURN (x);
124 }
125
126 #define VM_VALIDATE_CONS(x, proc) \
127 VM_ASSERT (scm_is_pair (x), vm_error_not_a_pair (proc, x))
128
129 VM_DEFINE_FUNCTION (141, car, "car", 1)
130 {
131 ARGS1 (x);
132 VM_VALIDATE_CONS (x, "car");
133 RETURN (SCM_CAR (x));
134 }
135
136 VM_DEFINE_FUNCTION (142, cdr, "cdr", 1)
137 {
138 ARGS1 (x);
139 VM_VALIDATE_CONS (x, "cdr");
140 RETURN (SCM_CDR (x));
141 }
142
143 VM_DEFINE_INSTRUCTION (143, set_car, "set-car!", 0, 2, 0)
144 {
145 SCM x, y;
146 POP2 (y, x);
147 VM_VALIDATE_CONS (x, "set-car!");
148 SCM_SETCAR (x, y);
149 NEXT;
150 }
151
152 VM_DEFINE_INSTRUCTION (144, set_cdr, "set-cdr!", 0, 2, 0)
153 {
154 SCM x, y;
155 POP2 (y, x);
156 VM_VALIDATE_CONS (x, "set-cdr!");
157 SCM_SETCDR (x, y);
158 NEXT;
159 }
160
161 \f
162 /*
163 * Numeric relational tests
164 */
165
166 #undef REL
167 #define REL(crel,srel) \
168 { \
169 ARGS2 (x, y); \
170 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
171 RETURN (scm_from_bool (((scm_t_signed_bits) SCM_UNPACK (x)) \
172 crel ((scm_t_signed_bits) SCM_UNPACK (y)))); \
173 SYNC_REGISTER (); \
174 RETURN (srel (x, y)); \
175 }
176
177 VM_DEFINE_FUNCTION (145, ee, "ee?", 2)
178 {
179 REL (==, scm_num_eq_p);
180 }
181
182 VM_DEFINE_FUNCTION (146, lt, "lt?", 2)
183 {
184 REL (<, scm_less_p);
185 }
186
187 VM_DEFINE_FUNCTION (147, le, "le?", 2)
188 {
189 REL (<=, scm_leq_p);
190 }
191
192 VM_DEFINE_FUNCTION (148, gt, "gt?", 2)
193 {
194 REL (>, scm_gr_p);
195 }
196
197 VM_DEFINE_FUNCTION (149, ge, "ge?", 2)
198 {
199 REL (>=, scm_geq_p);
200 }
201
202 \f
203 /*
204 * Numeric functions
205 */
206
207 /* The maximum/minimum tagged integers. */
208 #undef INUM_MAX
209 #undef INUM_MIN
210 #undef INUM_STEP
211 #define INUM_MAX \
212 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_POSITIVE_FIXNUM)))
213 #define INUM_MIN \
214 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_NEGATIVE_FIXNUM)))
215 #define INUM_STEP \
216 ((scm_t_signed_bits) SCM_UNPACK (SCM_INUM1) \
217 - (scm_t_signed_bits) SCM_UNPACK (SCM_INUM0))
218
219 #undef FUNC2
220 #define FUNC2(CFUNC,SFUNC) \
221 { \
222 ARGS2 (x, y); \
223 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
224 { \
225 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y);\
226 if (SCM_FIXABLE (n)) \
227 RETURN (SCM_I_MAKINUM (n)); \
228 } \
229 SYNC_REGISTER (); \
230 RETURN (SFUNC (x, y)); \
231 }
232
233 /* Assembly tagged integer arithmetic routines. This code uses the
234 `asm goto' feature introduced in GCC 4.5. */
235
236 #if SCM_GNUC_PREREQ (4, 5) && (defined __x86_64__ || defined __i386__)
237
238 # undef _CX
239 # if SIZEOF_VOID_P == 8
240 # define _CX "rcx"
241 # elif SIZEOF_VOID_P == 4
242 # define _CX "ecx"
243 # else
244 # error unsupported word size
245 # endif
246
247 /* The macros below check the CPU's overflow flag to improve fixnum
248 arithmetic. The _CX register (%rcx or %ecx) is explicitly
249 clobbered because `asm goto' can't have outputs, in which case the
250 `r' constraint could be used to let the register allocator choose a
251 register.
252
253 TODO: Use `cold' label attribute in GCC 4.6.
254 http://gcc.gnu.org/ml/gcc-patches/2010-10/msg01777.html */
255
256 # define ASM_ADD(x, y) \
257 { \
258 asm volatile goto ("mov %1, %%"_CX"; " \
259 "test %[tag], %%cl; je %l[slow_add]; " \
260 "test %[tag], %0; je %l[slow_add]; " \
261 "sub %[tag], %%"_CX"; " \
262 "add %0, %%"_CX"; jo %l[slow_add]; " \
263 "mov %%"_CX", (%[vsp])\n" \
264 : /* no outputs */ \
265 : "r" (x), "r" (y), \
266 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
267 : _CX, "memory", "cc" \
268 : slow_add); \
269 NEXT; \
270 } \
271 slow_add: \
272 do { } while (0)
273
274 # define ASM_SUB(x, y) \
275 { \
276 asm volatile goto ("mov %0, %%"_CX"; " \
277 "test %[tag], %%cl; je %l[slow_sub]; " \
278 "test %[tag], %1; je %l[slow_sub]; " \
279 "sub %1, %%"_CX"; jo %l[slow_sub]; " \
280 "add %[tag], %%"_CX"; " \
281 "mov %%"_CX", (%[vsp])\n" \
282 : /* no outputs */ \
283 : "r" (x), "r" (y), \
284 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
285 : _CX, "memory", "cc" \
286 : slow_sub); \
287 NEXT; \
288 } \
289 slow_sub: \
290 do { } while (0)
291
292 # define ASM_MUL(x, y) \
293 { \
294 scm_t_signed_bits xx = SCM_I_INUM (x); \
295 asm volatile goto ("mov %1, %%"_CX"; " \
296 "test %[tag], %%cl; je %l[slow_mul]; " \
297 "sub %[tag], %%"_CX"; " \
298 "test %[tag], %0; je %l[slow_mul]; " \
299 "imul %2, %%"_CX"; jo %l[slow_mul]; " \
300 "add %[tag], %%"_CX"; " \
301 "mov %%"_CX", (%[vsp])\n" \
302 : /* no outputs */ \
303 : "r" (x), "r" (y), "r" (xx), \
304 [vsp] "r" (sp), [tag] "i" (scm_tc2_int) \
305 : _CX, "memory", "cc" \
306 : slow_mul); \
307 NEXT; \
308 } \
309 slow_mul: \
310 do { } while (0)
311
312 #endif
313
314 #if SCM_GNUC_PREREQ (4, 5) && defined __arm__
315
316 # define ASM_ADD(x, y) \
317 if (SCM_LIKELY (SCM_I_INUMP (x) && SCM_I_INUMP (y))) \
318 { \
319 asm volatile goto ("adds r0, %0, %1; bvs %l[slow_add]; " \
320 "str r0, [%[vsp]]\n" \
321 : /* no outputs */ \
322 : "r" (x), "r" (y - scm_tc2_int), \
323 [vsp] "r" (sp) \
324 : "r0", "memory", "cc" \
325 : slow_add); \
326 NEXT; \
327 } \
328 slow_add: \
329 do { } while (0)
330
331 # define ASM_SUB(x, y) \
332 if (SCM_LIKELY (SCM_I_INUMP (x) && SCM_I_INUMP (y))) \
333 { \
334 asm volatile goto ("subs r0, %0, %1; bvs %l[slow_sub]; " \
335 "str r0, [%[vsp]]\n" \
336 : /* no outputs */ \
337 : "r" (x), "r" (y - scm_tc2_int), \
338 [vsp] "r" (sp) \
339 : "r0", "memory", "cc" \
340 : slow_sub); \
341 NEXT; \
342 } \
343 slow_sub: \
344 do { } while (0)
345
346 # if defined (__ARM_ARCH_3M__) || defined (__ARM_ARCH_4__) \
347 || defined (__ARM_ARCH_4T__) || defined (__ARM_ARCH_5__) \
348 || defined (__ARM_ARCH_5T__) || defined (__ARM_ARCH_5E__) \
349 || defined (__ARM_ARCH_5TE__) || defined (__ARM_ARCH_5TEJ__) \
350 || defined (__ARM_ARCH_6__) || defined (__ARM_ARCH_6J__) \
351 || defined (__ARM_ARCH_6K__) || defined (__ARM_ARCH_6Z__) \
352 || defined (__ARM_ARCH_6ZK__) || defined (__ARM_ARCH_6T2__) \
353 || defined (__ARM_ARCH_6M__) || defined (__ARM_ARCH_7__) \
354 || defined (__ARM_ARCH_7A__) || defined (__ARM_ARCH_7R__) \
355 || defined (__ARM_ARCH_7M__) || defined (__ARM_ARCH_7EM__) \
356 || defined (__ARM_ARCH_8A__)
357
358 /* The ARM architectures listed above support the SMULL instruction */
359
360 # define ASM_MUL(x, y) \
361 if (SCM_LIKELY (SCM_I_INUMP (x) && SCM_I_INUMP (y))) \
362 { \
363 scm_t_signed_bits rlo, rhi; \
364 asm ("smull %0, %1, %2, %3\n" \
365 : "=&r" (rlo), "=&r" (rhi) \
366 : "r" (SCM_UNPACK (x) - scm_tc2_int), \
367 "r" (SCM_I_INUM (y))); \
368 if (SCM_LIKELY (SCM_SRS (rlo, 31) == rhi)) \
369 RETURN (SCM_PACK (rlo + scm_tc2_int)); \
370 } \
371 do { } while (0)
372
373 # endif
374
375 #endif
376
377 VM_DEFINE_FUNCTION (150, add, "add", 2)
378 {
379 #ifndef ASM_ADD
380 FUNC2 (+, scm_sum);
381 #else
382 ARGS2 (x, y);
383 ASM_ADD (x, y);
384 SYNC_REGISTER ();
385 RETURN (scm_sum (x, y));
386 #endif
387 }
388
389 VM_DEFINE_FUNCTION (151, add1, "add1", 1)
390 {
391 ARGS1 (x);
392
393 /* Check for overflow. We must avoid overflow in the signed
394 addition below, even if X is not an inum. */
395 if (SCM_LIKELY ((scm_t_signed_bits) SCM_UNPACK (x) <= INUM_MAX - INUM_STEP))
396 {
397 SCM result;
398
399 /* Add 1 to the integer without untagging. */
400 result = SCM_PACK ((scm_t_signed_bits) SCM_UNPACK (x) + INUM_STEP);
401
402 if (SCM_LIKELY (SCM_I_INUMP (result)))
403 RETURN (result);
404 }
405
406 SYNC_REGISTER ();
407 RETURN (scm_sum (x, SCM_I_MAKINUM (1)));
408 }
409
410 VM_DEFINE_FUNCTION (152, sub, "sub", 2)
411 {
412 #ifndef ASM_SUB
413 FUNC2 (-, scm_difference);
414 #else
415 ARGS2 (x, y);
416 ASM_SUB (x, y);
417 SYNC_REGISTER ();
418 RETURN (scm_difference (x, y));
419 #endif
420 }
421
422 VM_DEFINE_FUNCTION (153, sub1, "sub1", 1)
423 {
424 ARGS1 (x);
425
426 /* Check for overflow. We must avoid overflow in the signed
427 subtraction below, even if X is not an inum. */
428 if (SCM_LIKELY ((scm_t_signed_bits) SCM_UNPACK (x) >= INUM_MIN + INUM_STEP))
429 {
430 SCM result;
431
432 /* Substract 1 from the integer without untagging. */
433 result = SCM_PACK ((scm_t_signed_bits) SCM_UNPACK (x) - INUM_STEP);
434
435 if (SCM_LIKELY (SCM_I_INUMP (result)))
436 RETURN (result);
437 }
438
439 SYNC_REGISTER ();
440 RETURN (scm_difference (x, SCM_I_MAKINUM (1)));
441 }
442
443 VM_DEFINE_FUNCTION (154, mul, "mul", 2)
444 {
445 ARGS2 (x, y);
446 #ifdef ASM_MUL
447 ASM_MUL (x, y);
448 #endif
449 SYNC_REGISTER ();
450 RETURN (scm_product (x, y));
451 }
452
453 # undef ASM_ADD
454 # undef ASM_SUB
455 # undef ASM_MUL
456
457 VM_DEFINE_FUNCTION (155, div, "div", 2)
458 {
459 ARGS2 (x, y);
460 SYNC_REGISTER ();
461 RETURN (scm_divide (x, y));
462 }
463
464 VM_DEFINE_FUNCTION (156, quo, "quo", 2)
465 {
466 ARGS2 (x, y);
467 SYNC_REGISTER ();
468 RETURN (scm_quotient (x, y));
469 }
470
471 VM_DEFINE_FUNCTION (157, rem, "rem", 2)
472 {
473 ARGS2 (x, y);
474 SYNC_REGISTER ();
475 RETURN (scm_remainder (x, y));
476 }
477
478 VM_DEFINE_FUNCTION (158, mod, "mod", 2)
479 {
480 ARGS2 (x, y);
481 SYNC_REGISTER ();
482 RETURN (scm_modulo (x, y));
483 }
484
485 VM_DEFINE_FUNCTION (159, ash, "ash", 2)
486 {
487 ARGS2 (x, y);
488 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
489 {
490 if (SCM_I_INUM (y) < 0)
491 /* Right shift, will be a fixnum. */
492 RETURN (SCM_I_MAKINUM
493 (SCM_SRS (SCM_I_INUM (x),
494 (-SCM_I_INUM (y) <= SCM_I_FIXNUM_BIT-1)
495 ? -SCM_I_INUM (y) : SCM_I_FIXNUM_BIT-1)));
496 else
497 /* Left shift. See comments in scm_ash. */
498 {
499 scm_t_signed_bits nn, bits_to_shift;
500
501 nn = SCM_I_INUM (x);
502 bits_to_shift = SCM_I_INUM (y);
503
504 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
505 && ((scm_t_bits)
506 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
507 <= 1))
508 RETURN (SCM_I_MAKINUM (nn < 0
509 ? -(-nn << bits_to_shift)
510 : (nn << bits_to_shift)));
511 /* fall through */
512 }
513 /* fall through */
514 }
515 SYNC_REGISTER ();
516 RETURN (scm_ash (x, y));
517 }
518
519 VM_DEFINE_FUNCTION (160, logand, "logand", 2)
520 {
521 ARGS2 (x, y);
522 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
523 /* Compute bitwise AND without untagging */
524 RETURN (SCM_PACK (SCM_UNPACK (x) & SCM_UNPACK (y)));
525 SYNC_REGISTER ();
526 RETURN (scm_logand (x, y));
527 }
528
529 VM_DEFINE_FUNCTION (161, logior, "logior", 2)
530 {
531 ARGS2 (x, y);
532 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
533 /* Compute bitwise OR without untagging */
534 RETURN (SCM_PACK (SCM_UNPACK (x) | SCM_UNPACK (y)));
535 SYNC_REGISTER ();
536 RETURN (scm_logior (x, y));
537 }
538
539 VM_DEFINE_FUNCTION (162, logxor, "logxor", 2)
540 {
541 ARGS2 (x, y);
542 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
543 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
544 SYNC_REGISTER ();
545 RETURN (scm_logxor (x, y));
546 }
547
548 \f
549 /*
550 * Vectors and arrays
551 */
552
553 VM_DEFINE_FUNCTION (163, vector_ref, "vector-ref", 2)
554 {
555 scm_t_signed_bits i = 0;
556 ARGS2 (vect, idx);
557 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
558 && SCM_I_INUMP (idx)
559 && ((i = SCM_I_INUM (idx)) >= 0)
560 && i < SCM_I_VECTOR_LENGTH (vect)))
561 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
562 else
563 {
564 SYNC_REGISTER ();
565 RETURN (scm_vector_ref (vect, idx));
566 }
567 }
568
569 VM_DEFINE_INSTRUCTION (164, vector_set, "vector-set", 0, 3, 0)
570 {
571 scm_t_signed_bits i = 0;
572 SCM vect, idx, val;
573 POP3 (val, idx, vect);
574 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
575 && SCM_I_INUMP (idx)
576 && ((i = SCM_I_INUM (idx)) >= 0)
577 && i < SCM_I_VECTOR_LENGTH (vect)))
578 SCM_I_VECTOR_WELTS (vect)[i] = val;
579 else
580 {
581 SYNC_REGISTER ();
582 scm_vector_set_x (vect, idx, val);
583 }
584 NEXT;
585 }
586
587 VM_DEFINE_INSTRUCTION (165, make_array, "make-array", 3, -1, 1)
588 {
589 scm_t_uint32 len;
590 SCM shape, ret;
591
592 len = FETCH ();
593 len = (len << 8) + FETCH ();
594 len = (len << 8) + FETCH ();
595 POP (shape);
596 SYNC_REGISTER ();
597 PRE_CHECK_UNDERFLOW (len);
598 ret = scm_from_contiguous_array (shape, sp - len + 1, len);
599 DROPN (len);
600 PUSH (ret);
601 NEXT;
602 }
603
604 \f
605 /*
606 * Structs
607 */
608 #define VM_VALIDATE_STRUCT(obj, proc) \
609 VM_ASSERT (SCM_STRUCTP (obj), vm_error_not_a_struct (proc, obj))
610
611 VM_DEFINE_FUNCTION (166, struct_p, "struct?", 1)
612 {
613 ARGS1 (obj);
614 RETURN (scm_from_bool (SCM_STRUCTP (obj)));
615 }
616
617 VM_DEFINE_FUNCTION (167, struct_vtable, "struct-vtable", 1)
618 {
619 ARGS1 (obj);
620 VM_VALIDATE_STRUCT (obj, "struct_vtable");
621 RETURN (SCM_STRUCT_VTABLE (obj));
622 }
623
624 VM_DEFINE_INSTRUCTION (168, make_struct, "make-struct", 2, -1, 1)
625 {
626 unsigned h = FETCH ();
627 unsigned l = FETCH ();
628 scm_t_bits n = ((h << 8U) + l);
629 SCM vtable = sp[-(n - 1)];
630 const SCM *inits = sp - n + 2;
631 SCM ret;
632
633 SYNC_REGISTER ();
634
635 if (SCM_LIKELY (SCM_STRUCTP (vtable)
636 && SCM_VTABLE_FLAG_IS_SET (vtable, SCM_VTABLE_FLAG_SIMPLE)
637 && (SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size) + 1
638 == n)
639 && !SCM_VTABLE_INSTANCE_FINALIZER (vtable)))
640 {
641 /* Verily, we are making a simple struct with the right number of
642 initializers, and no finalizer. */
643 ret = scm_words ((scm_t_bits)SCM_STRUCT_DATA (vtable) | scm_tc3_struct,
644 n + 1);
645 SCM_SET_CELL_WORD_1 (ret, (scm_t_bits)SCM_CELL_OBJECT_LOC (ret, 2));
646 memcpy (SCM_STRUCT_DATA (ret), inits, (n - 1) * sizeof (SCM));
647 }
648 else
649 ret = scm_c_make_structv (vtable, 0, n - 1, (scm_t_bits *) inits);
650
651 DROPN (n);
652 PUSH (ret);
653
654 NEXT;
655 }
656
657 VM_DEFINE_FUNCTION (169, struct_ref, "struct-ref", 2)
658 {
659 ARGS2 (obj, pos);
660
661 if (SCM_LIKELY (SCM_STRUCTP (obj)
662 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
663 SCM_VTABLE_FLAG_SIMPLE)
664 && SCM_I_INUMP (pos)))
665 {
666 SCM vtable;
667 scm_t_bits index, len;
668
669 /* True, an inum is a signed value, but cast to unsigned it will
670 certainly be more than the length, so we will fall through if
671 index is negative. */
672 index = SCM_I_INUM (pos);
673 vtable = SCM_STRUCT_VTABLE (obj);
674 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
675
676 if (SCM_LIKELY (index < len))
677 {
678 scm_t_bits *data = SCM_STRUCT_DATA (obj);
679 RETURN (SCM_PACK (data[index]));
680 }
681 }
682
683 SYNC_REGISTER ();
684 RETURN (scm_struct_ref (obj, pos));
685 }
686
687 VM_DEFINE_FUNCTION (170, struct_set, "struct-set", 3)
688 {
689 ARGS3 (obj, pos, val);
690
691 if (SCM_LIKELY (SCM_STRUCTP (obj)
692 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
693 SCM_VTABLE_FLAG_SIMPLE)
694 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
695 SCM_VTABLE_FLAG_SIMPLE_RW)
696 && SCM_I_INUMP (pos)))
697 {
698 SCM vtable;
699 scm_t_bits index, len;
700
701 /* See above regarding index being >= 0. */
702 index = SCM_I_INUM (pos);
703 vtable = SCM_STRUCT_VTABLE (obj);
704 len = SCM_STRUCT_DATA_REF (vtable, scm_vtable_index_size);
705 if (SCM_LIKELY (index < len))
706 {
707 scm_t_bits *data = SCM_STRUCT_DATA (obj);
708 data[index] = SCM_UNPACK (val);
709 RETURN (val);
710 }
711 }
712
713 SYNC_REGISTER ();
714 RETURN (scm_struct_set_x (obj, pos, val));
715 }
716
717 \f
718 /*
719 * GOOPS support
720 */
721 VM_DEFINE_FUNCTION (171, class_of, "class-of", 1)
722 {
723 ARGS1 (obj);
724 if (SCM_INSTANCEP (obj))
725 RETURN (SCM_CLASS_OF (obj));
726 SYNC_REGISTER ();
727 RETURN (scm_class_of (obj));
728 }
729
730 /* FIXME: No checking whatsoever. */
731 VM_DEFINE_FUNCTION (172, slot_ref, "slot-ref", 2)
732 {
733 size_t slot;
734 ARGS2 (instance, idx);
735 slot = SCM_I_INUM (idx);
736 RETURN (SCM_PACK (SCM_STRUCT_DATA (instance) [slot]));
737 }
738
739 /* FIXME: No checking whatsoever. */
740 VM_DEFINE_INSTRUCTION (173, slot_set, "slot-set", 0, 3, 0)
741 {
742 SCM instance, idx, val;
743 size_t slot;
744 POP3 (val, idx, instance);
745 slot = SCM_I_INUM (idx);
746 SCM_STRUCT_DATA (instance) [slot] = SCM_UNPACK (val);
747 NEXT;
748 }
749
750 \f
751 /*
752 * Bytevectors
753 */
754 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
755 VM_ASSERT (SCM_BYTEVECTOR_P (x), vm_error_not_a_bytevector (proc, x))
756
757 #define BV_REF_WITH_ENDIANNESS(stem, fn_stem) \
758 { \
759 SCM endianness; \
760 POP (endianness); \
761 if (scm_is_eq (endianness, scm_i_native_endianness)) \
762 goto VM_LABEL (bv_##stem##_native_ref); \
763 { \
764 ARGS2 (bv, idx); \
765 SYNC_REGISTER (); \
766 RETURN (scm_bytevector_##fn_stem##_ref (bv, idx, endianness)); \
767 } \
768 }
769
770 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
771 #define ALIGNED_P(ptr, type) \
772 ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
773
774 VM_DEFINE_FUNCTION (174, bv_u16_ref, "bv-u16-ref", 3)
775 BV_REF_WITH_ENDIANNESS (u16, u16)
776 VM_DEFINE_FUNCTION (175, bv_s16_ref, "bv-s16-ref", 3)
777 BV_REF_WITH_ENDIANNESS (s16, s16)
778 VM_DEFINE_FUNCTION (176, bv_u32_ref, "bv-u32-ref", 3)
779 BV_REF_WITH_ENDIANNESS (u32, u32)
780 VM_DEFINE_FUNCTION (177, bv_s32_ref, "bv-s32-ref", 3)
781 BV_REF_WITH_ENDIANNESS (s32, s32)
782 VM_DEFINE_FUNCTION (178, bv_u64_ref, "bv-u64-ref", 3)
783 BV_REF_WITH_ENDIANNESS (u64, u64)
784 VM_DEFINE_FUNCTION (179, bv_s64_ref, "bv-s64-ref", 3)
785 BV_REF_WITH_ENDIANNESS (s64, s64)
786 VM_DEFINE_FUNCTION (180, bv_f32_ref, "bv-f32-ref", 3)
787 BV_REF_WITH_ENDIANNESS (f32, ieee_single)
788 VM_DEFINE_FUNCTION (181, bv_f64_ref, "bv-f64-ref", 3)
789 BV_REF_WITH_ENDIANNESS (f64, ieee_double)
790
791 #undef BV_REF_WITH_ENDIANNESS
792
793 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
794 { \
795 scm_t_signed_bits i; \
796 const scm_t_ ## type *int_ptr; \
797 ARGS2 (bv, idx); \
798 \
799 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
800 i = SCM_I_INUM (idx); \
801 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
802 \
803 if (SCM_LIKELY (SCM_I_INUMP (idx) \
804 && (i >= 0) \
805 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
806 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
807 RETURN (SCM_I_MAKINUM (*int_ptr)); \
808 else \
809 { \
810 SYNC_REGISTER (); \
811 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
812 } \
813 }
814
815 #define BV_INT_REF(stem, type, size) \
816 { \
817 scm_t_signed_bits i; \
818 const scm_t_ ## type *int_ptr; \
819 ARGS2 (bv, idx); \
820 \
821 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
822 i = SCM_I_INUM (idx); \
823 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
824 \
825 if (SCM_LIKELY (SCM_I_INUMP (idx) \
826 && (i >= 0) \
827 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
828 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
829 { \
830 scm_t_ ## type x = *int_ptr; \
831 if (SCM_FIXABLE (x)) \
832 RETURN (SCM_I_MAKINUM (x)); \
833 else \
834 { \
835 SYNC_REGISTER (); \
836 RETURN (scm_from_ ## type (x)); \
837 } \
838 } \
839 else \
840 { \
841 SYNC_REGISTER (); \
842 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
843 } \
844 }
845
846 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
847 { \
848 scm_t_signed_bits i; \
849 const type *float_ptr; \
850 ARGS2 (bv, idx); \
851 \
852 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
853 i = SCM_I_INUM (idx); \
854 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
855 \
856 SYNC_REGISTER (); \
857 if (SCM_LIKELY (SCM_I_INUMP (idx) \
858 && (i >= 0) \
859 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
860 && (ALIGNED_P (float_ptr, type)))) \
861 RETURN (scm_from_double (*float_ptr)); \
862 else \
863 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
864 }
865
866 VM_DEFINE_FUNCTION (182, bv_u8_ref, "bv-u8-ref", 2)
867 BV_FIXABLE_INT_REF (u8, u8, uint8, 1)
868 VM_DEFINE_FUNCTION (183, bv_s8_ref, "bv-s8-ref", 2)
869 BV_FIXABLE_INT_REF (s8, s8, int8, 1)
870 VM_DEFINE_FUNCTION (184, bv_u16_native_ref, "bv-u16-native-ref", 2)
871 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2)
872 VM_DEFINE_FUNCTION (185, bv_s16_native_ref, "bv-s16-native-ref", 2)
873 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2)
874 VM_DEFINE_FUNCTION (186, bv_u32_native_ref, "bv-u32-native-ref", 2)
875 #if SIZEOF_VOID_P > 4
876 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4)
877 #else
878 BV_INT_REF (u32, uint32, 4)
879 #endif
880 VM_DEFINE_FUNCTION (187, bv_s32_native_ref, "bv-s32-native-ref", 2)
881 #if SIZEOF_VOID_P > 4
882 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4)
883 #else
884 BV_INT_REF (s32, int32, 4)
885 #endif
886 VM_DEFINE_FUNCTION (188, bv_u64_native_ref, "bv-u64-native-ref", 2)
887 BV_INT_REF (u64, uint64, 8)
888 VM_DEFINE_FUNCTION (189, bv_s64_native_ref, "bv-s64-native-ref", 2)
889 BV_INT_REF (s64, int64, 8)
890 VM_DEFINE_FUNCTION (190, bv_f32_native_ref, "bv-f32-native-ref", 2)
891 BV_FLOAT_REF (f32, ieee_single, float, 4)
892 VM_DEFINE_FUNCTION (191, bv_f64_native_ref, "bv-f64-native-ref", 2)
893 BV_FLOAT_REF (f64, ieee_double, double, 8)
894
895 #undef BV_FIXABLE_INT_REF
896 #undef BV_INT_REF
897 #undef BV_FLOAT_REF
898
899
900
901 #define BV_SET_WITH_ENDIANNESS(stem, fn_stem) \
902 { \
903 SCM endianness; \
904 POP (endianness); \
905 if (scm_is_eq (endianness, scm_i_native_endianness)) \
906 goto VM_LABEL (bv_##stem##_native_set); \
907 { \
908 SCM bv, idx, val; POP3 (val, idx, bv); \
909 SYNC_REGISTER (); \
910 scm_bytevector_##fn_stem##_set_x (bv, idx, val, endianness); \
911 NEXT; \
912 } \
913 }
914
915 VM_DEFINE_INSTRUCTION (192, bv_u16_set, "bv-u16-set", 0, 4, 0)
916 BV_SET_WITH_ENDIANNESS (u16, u16)
917 VM_DEFINE_INSTRUCTION (193, bv_s16_set, "bv-s16-set", 0, 4, 0)
918 BV_SET_WITH_ENDIANNESS (s16, s16)
919 VM_DEFINE_INSTRUCTION (194, bv_u32_set, "bv-u32-set", 0, 4, 0)
920 BV_SET_WITH_ENDIANNESS (u32, u32)
921 VM_DEFINE_INSTRUCTION (195, bv_s32_set, "bv-s32-set", 0, 4, 0)
922 BV_SET_WITH_ENDIANNESS (s32, s32)
923 VM_DEFINE_INSTRUCTION (196, bv_u64_set, "bv-u64-set", 0, 4, 0)
924 BV_SET_WITH_ENDIANNESS (u64, u64)
925 VM_DEFINE_INSTRUCTION (197, bv_s64_set, "bv-s64-set", 0, 4, 0)
926 BV_SET_WITH_ENDIANNESS (s64, s64)
927 VM_DEFINE_INSTRUCTION (198, bv_f32_set, "bv-f32-set", 0, 4, 0)
928 BV_SET_WITH_ENDIANNESS (f32, ieee_single)
929 VM_DEFINE_INSTRUCTION (199, bv_f64_set, "bv-f64-set", 0, 4, 0)
930 BV_SET_WITH_ENDIANNESS (f64, ieee_double)
931
932 #undef BV_SET_WITH_ENDIANNESS
933
934 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
935 { \
936 scm_t_signed_bits i, j = 0; \
937 SCM bv, idx, val; \
938 scm_t_ ## type *int_ptr; \
939 \
940 POP3 (val, idx, bv); \
941 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
942 i = SCM_I_INUM (idx); \
943 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
944 \
945 if (SCM_LIKELY (SCM_I_INUMP (idx) \
946 && (i >= 0) \
947 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
948 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
949 && (SCM_I_INUMP (val)) \
950 && ((j = SCM_I_INUM (val)) >= min) \
951 && (j <= max))) \
952 *int_ptr = (scm_t_ ## type) j; \
953 else \
954 { \
955 SYNC_REGISTER (); \
956 scm_bytevector_ ## fn_stem ## _set_x (bv, idx, val); \
957 } \
958 NEXT; \
959 }
960
961 #define BV_INT_SET(stem, type, size) \
962 { \
963 scm_t_signed_bits i = 0; \
964 SCM bv, idx, val; \
965 scm_t_ ## type *int_ptr; \
966 \
967 POP3 (val, idx, bv); \
968 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
969 i = SCM_I_INUM (idx); \
970 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
971 \
972 if (SCM_LIKELY (SCM_I_INUMP (idx) \
973 && (i >= 0) \
974 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
975 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
976 *int_ptr = scm_to_ ## type (val); \
977 else \
978 { \
979 SYNC_REGISTER (); \
980 scm_bytevector_ ## stem ## _native_set_x (bv, idx, val); \
981 } \
982 NEXT; \
983 }
984
985 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
986 { \
987 scm_t_signed_bits i = 0; \
988 SCM bv, idx, val; \
989 type *float_ptr; \
990 \
991 POP3 (val, idx, bv); \
992 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set"); \
993 i = SCM_I_INUM (idx); \
994 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
995 \
996 if (SCM_LIKELY (SCM_I_INUMP (idx) \
997 && (i >= 0) \
998 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
999 && (ALIGNED_P (float_ptr, type)))) \
1000 *float_ptr = scm_to_double (val); \
1001 else \
1002 { \
1003 SYNC_REGISTER (); \
1004 scm_bytevector_ ## fn_stem ## _native_set_x (bv, idx, val); \
1005 } \
1006 NEXT; \
1007 }
1008
1009 VM_DEFINE_INSTRUCTION (200, bv_u8_set, "bv-u8-set", 0, 3, 0)
1010 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1)
1011 VM_DEFINE_INSTRUCTION (201, bv_s8_set, "bv-s8-set", 0, 3, 0)
1012 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1)
1013 VM_DEFINE_INSTRUCTION (202, bv_u16_native_set, "bv-u16-native-set", 0, 3, 0)
1014 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2)
1015 VM_DEFINE_INSTRUCTION (203, bv_s16_native_set, "bv-s16-native-set", 0, 3, 0)
1016 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2)
1017 VM_DEFINE_INSTRUCTION (204, bv_u32_native_set, "bv-u32-native-set", 0, 3, 0)
1018 #if SIZEOF_VOID_P > 4
1019 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4)
1020 #else
1021 BV_INT_SET (u32, uint32, 4)
1022 #endif
1023 VM_DEFINE_INSTRUCTION (205, bv_s32_native_set, "bv-s32-native-set", 0, 3, 0)
1024 #if SIZEOF_VOID_P > 4
1025 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4)
1026 #else
1027 BV_INT_SET (s32, int32, 4)
1028 #endif
1029 VM_DEFINE_INSTRUCTION (206, bv_u64_native_set, "bv-u64-native-set", 0, 3, 0)
1030 BV_INT_SET (u64, uint64, 8)
1031 VM_DEFINE_INSTRUCTION (207, bv_s64_native_set, "bv-s64-native-set", 0, 3, 0)
1032 BV_INT_SET (s64, int64, 8)
1033 VM_DEFINE_INSTRUCTION (208, bv_f32_native_set, "bv-f32-native-set", 0, 3, 0)
1034 BV_FLOAT_SET (f32, ieee_single, float, 4)
1035 VM_DEFINE_INSTRUCTION (209, bv_f64_native_set, "bv-f64-native-set", 0, 3, 0)
1036 BV_FLOAT_SET (f64, ieee_double, double, 8)
1037
1038 #undef BV_FIXABLE_INT_SET
1039 #undef BV_INT_SET
1040 #undef BV_FLOAT_SET
1041
1042 /*
1043 (defun renumber-ops ()
1044 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
1045 (interactive "")
1046 (save-excursion
1047 (let ((counter 127)) (goto-char (point-min))
1048 (while (re-search-forward "^VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
1049 (replace-match
1050 (number-to-string (setq counter (1+ counter)))
1051 t t nil 1)))))
1052 */
1053
1054 /*
1055 Local Variables:
1056 c-file-style: "gnu"
1057 End:
1058 */