Add thread-local lock-free, TLS-free freelists.
[bpt/guile.git] / libguile / vm-engine.c
1 /* Copyright (C) 2001, 2009, 2010, 2011, 2012, 2013, 2014 Free Software Foundation, Inc.
2 *
3 * This library is free software; you can redistribute it and/or
4 * modify it under the terms of the GNU Lesser General Public License
5 * as published by the Free Software Foundation; either version 3 of
6 * the License, or (at your option) any later version.
7 *
8 * This library is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this library; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
16 * 02110-1301 USA
17 */
18
19 /* This file is included in vm.c multiple times. */
20
21
22 #define UNPACK_8_8_8(op,a,b,c) \
23 do \
24 { \
25 a = (op >> 8) & 0xff; \
26 b = (op >> 16) & 0xff; \
27 c = op >> 24; \
28 } \
29 while (0)
30
31 #define UNPACK_8_16(op,a,b) \
32 do \
33 { \
34 a = (op >> 8) & 0xff; \
35 b = op >> 16; \
36 } \
37 while (0)
38
39 #define UNPACK_16_8(op,a,b) \
40 do \
41 { \
42 a = (op >> 8) & 0xffff; \
43 b = op >> 24; \
44 } \
45 while (0)
46
47 #define UNPACK_12_12(op,a,b) \
48 do \
49 { \
50 a = (op >> 8) & 0xfff; \
51 b = op >> 20; \
52 } \
53 while (0)
54
55 #define UNPACK_24(op,a) \
56 do \
57 { \
58 a = op >> 8; \
59 } \
60 while (0)
61
62
63 /* Assign some registers by hand. There used to be a bigger list here,
64 but it was never tested, and in the case of x86-32, was a source of
65 compilation failures. It can be revived if it's useful, but my naive
66 hope is that simply annotating the locals with "register" will be a
67 sufficient hint to the compiler. */
68 #ifdef __GNUC__
69 # if defined __x86_64__
70 /* GCC 4.6 chooses %rbp for IP_REG and %rbx for SP_REG, which works
71 well. Tell it to keep the jump table in a r12, which is
72 callee-saved. */
73 # define JT_REG asm ("r12")
74 # endif
75 #endif
76
77 #ifndef IP_REG
78 # define IP_REG
79 #endif
80 #ifndef FP_REG
81 # define FP_REG
82 #endif
83 #ifndef JT_REG
84 # define JT_REG
85 #endif
86
87 #define VM_ASSERT(condition, handler) \
88 do { \
89 if (SCM_UNLIKELY (!(condition))) \
90 { \
91 SYNC_IP(); \
92 handler; \
93 } \
94 } while (0)
95
96 #ifdef VM_ENABLE_ASSERTIONS
97 # define ASSERT(condition) VM_ASSERT (condition, abort())
98 #else
99 # define ASSERT(condition)
100 #endif
101
102 #if VM_USE_HOOKS
103 #define RUN_HOOK(exp) \
104 do { \
105 if (SCM_UNLIKELY (vp->trace_level > 0)) \
106 { \
107 SYNC_IP (); \
108 exp; \
109 CACHE_FP (); \
110 } \
111 } while (0)
112 #else
113 #define RUN_HOOK(exp)
114 #endif
115 #define RUN_HOOK0(h) RUN_HOOK (vm_dispatch_##h##_hook (vp))
116 #define RUN_HOOK1(h, arg) RUN_HOOK (vm_dispatch_##h##_hook (vp, arg))
117
118 #define APPLY_HOOK() \
119 RUN_HOOK0 (apply)
120 #define PUSH_CONTINUATION_HOOK() \
121 RUN_HOOK0 (push_continuation)
122 #define POP_CONTINUATION_HOOK(old_fp) \
123 RUN_HOOK1 (pop_continuation, old_fp)
124 #define NEXT_HOOK() \
125 RUN_HOOK0 (next)
126 #define ABORT_CONTINUATION_HOOK() \
127 RUN_HOOK0 (abort)
128
129 #define VM_HANDLE_INTERRUPTS \
130 SCM_ASYNC_TICK_WITH_GUARD_CODE (thread, SYNC_IP (), CACHE_FP ())
131
132
133 /* Virtual Machine
134
135 This is Guile's new virtual machine. When I say "new", I mean
136 relative to the current virtual machine. At some point it will
137 become "the" virtual machine, and we'll delete this paragraph. As
138 such, the rest of the comments speak as if there's only one VM.
139 In difference from the old VM, local 0 is the procedure, and the
140 first argument is local 1. At some point in the future we should
141 change the fp to point to the procedure and not to local 1.
142
143 <more overview here>
144 */
145
146
147 /* The VM has three state bits: the instruction pointer (IP), the frame
148 pointer (FP), and the top-of-stack pointer (SP). We cache the first
149 two of these in machine registers, local to the VM, because they are
150 used extensively by the VM. As the SP is used more by code outside
151 the VM than by the VM itself, we don't bother caching it locally.
152
153 Since the FP changes infrequently, relative to the IP, we keep vp->fp
154 in sync with the local FP. This would be a big lose for the IP,
155 though, so instead of updating vp->ip all the time, we call SYNC_IP
156 whenever we would need to know the IP of the top frame. In practice,
157 we need to SYNC_IP whenever we call out of the VM to a function that
158 would like to walk the stack, perhaps as the result of an
159 exception.
160
161 One more thing. We allow the stack to move, when it expands.
162 Therefore if you call out to a C procedure that could call Scheme
163 code, or otherwise push anything on the stack, you will need to
164 CACHE_FP afterwards to restore the possibly-changed FP. */
165
166 #define SYNC_IP() vp->ip = (ip)
167
168 #define CACHE_FP() fp = (vp->fp)
169 #define CACHE_REGISTER() \
170 do { \
171 ip = vp->ip; \
172 fp = vp->fp; \
173 } while (0)
174
175
176
177 /* After advancing vp->sp, but before writing any stack slots, check
178 that it is actually in bounds. If it is not in bounds, currently we
179 signal an error. In the future we may expand the stack instead,
180 possibly by moving it elsewhere, therefore no pointer into the stack
181 besides FP is valid across a CHECK_OVERFLOW call. Be careful! */
182 #define CHECK_OVERFLOW() \
183 do { \
184 if (SCM_UNLIKELY (vp->sp >= vp->stack_limit)) \
185 { \
186 SYNC_IP (); \
187 vm_expand_stack (vp); \
188 CACHE_FP (); \
189 } \
190 } while (0)
191
192 /* Reserve stack space for a frame. Will check that there is sufficient
193 stack space for N locals, including the procedure. Invoke after
194 preparing the new frame and setting the fp and ip. */
195 #define ALLOC_FRAME(n) \
196 do { \
197 vp->sp = LOCAL_ADDRESS (n - 1); \
198 CHECK_OVERFLOW (); \
199 if (vp->sp > vp->sp_max_since_gc) \
200 vp->sp_max_since_gc = vp->sp; \
201 } while (0)
202
203 /* Reset the current frame to hold N locals. Used when we know that no
204 stack expansion is needed. */
205 #define RESET_FRAME(n) \
206 do { \
207 vp->sp = LOCAL_ADDRESS (n - 1); \
208 if (vp->sp > vp->sp_max_since_gc) \
209 vp->sp_max_since_gc = vp->sp; \
210 } while (0)
211
212 /* Compute the number of locals in the frame. At a call, this is equal
213 to the number of actual arguments when a function is first called,
214 plus one for the function. */
215 #define FRAME_LOCALS_COUNT_FROM(slot) \
216 (vp->sp + 1 - LOCAL_ADDRESS (slot))
217 #define FRAME_LOCALS_COUNT() \
218 FRAME_LOCALS_COUNT_FROM (0)
219
220 /* Restore registers after returning from a frame. */
221 #define RESTORE_FRAME() \
222 do { \
223 } while (0)
224
225
226 #ifdef HAVE_LABELS_AS_VALUES
227 # define BEGIN_DISPATCH_SWITCH /* */
228 # define END_DISPATCH_SWITCH /* */
229 # define NEXT(n) \
230 do \
231 { \
232 ip += n; \
233 NEXT_HOOK (); \
234 op = *ip; \
235 goto *jump_table[op & 0xff]; \
236 } \
237 while (0)
238 # define VM_DEFINE_OP(opcode, tag, name, meta) \
239 op_##tag:
240 #else
241 # define BEGIN_DISPATCH_SWITCH \
242 vm_start: \
243 NEXT_HOOK (); \
244 op = *ip; \
245 switch (op & 0xff) \
246 {
247 # define END_DISPATCH_SWITCH \
248 }
249 # define NEXT(n) \
250 do \
251 { \
252 ip += n; \
253 goto vm_start; \
254 } \
255 while (0)
256 # define VM_DEFINE_OP(opcode, tag, name, meta) \
257 op_##tag: \
258 case opcode:
259 #endif
260
261 #define LOCAL_ADDRESS(i) (&SCM_FRAME_LOCAL (fp, i))
262 #define LOCAL_REF(i) SCM_FRAME_LOCAL (fp, i)
263 #define LOCAL_SET(i,o) SCM_FRAME_LOCAL (fp, i) = o
264
265 #define VARIABLE_REF(v) SCM_VARIABLE_REF (v)
266 #define VARIABLE_SET(v,o) SCM_VARIABLE_SET (v, o)
267 #define VARIABLE_BOUNDP(v) (!scm_is_eq (VARIABLE_REF (v), SCM_UNDEFINED))
268
269 #define RETURN_ONE_VALUE(ret) \
270 do { \
271 SCM val = ret; \
272 SCM *old_fp; \
273 VM_HANDLE_INTERRUPTS; \
274 old_fp = fp; \
275 ip = SCM_FRAME_RETURN_ADDRESS (fp); \
276 fp = vp->fp = SCM_FRAME_DYNAMIC_LINK (fp); \
277 /* Clear frame. */ \
278 old_fp[-1] = SCM_BOOL_F; \
279 old_fp[-2] = SCM_BOOL_F; \
280 /* Leave proc. */ \
281 SCM_FRAME_LOCAL (old_fp, 1) = val; \
282 vp->sp = &SCM_FRAME_LOCAL (old_fp, 1); \
283 POP_CONTINUATION_HOOK (old_fp); \
284 NEXT (0); \
285 } while (0)
286
287 /* While we could generate the list-unrolling code here, it's fine for
288 now to just tail-call (apply values vals). */
289 #define RETURN_VALUE_LIST(vals_) \
290 do { \
291 SCM vals = vals_; \
292 VM_HANDLE_INTERRUPTS; \
293 fp[0] = vm_builtin_apply; \
294 fp[1] = vm_builtin_values; \
295 fp[2] = vals; \
296 RESET_FRAME (3); \
297 ip = (scm_t_uint32 *) vm_builtin_apply_code; \
298 goto op_tail_apply; \
299 } while (0)
300
301 #define BR_NARGS(rel) \
302 scm_t_uint32 expected; \
303 UNPACK_24 (op, expected); \
304 if (FRAME_LOCALS_COUNT() rel expected) \
305 { \
306 scm_t_int32 offset = ip[1]; \
307 offset >>= 8; /* Sign-extending shift. */ \
308 NEXT (offset); \
309 } \
310 NEXT (2)
311
312 #define BR_UNARY(x, exp) \
313 scm_t_uint32 test; \
314 SCM x; \
315 UNPACK_24 (op, test); \
316 x = LOCAL_REF (test); \
317 if ((ip[1] & 0x1) ? !(exp) : (exp)) \
318 { \
319 scm_t_int32 offset = ip[1]; \
320 offset >>= 8; /* Sign-extending shift. */ \
321 if (offset < 0) \
322 VM_HANDLE_INTERRUPTS; \
323 NEXT (offset); \
324 } \
325 NEXT (2)
326
327 #define BR_BINARY(x, y, exp) \
328 scm_t_uint16 a, b; \
329 SCM x, y; \
330 UNPACK_12_12 (op, a, b); \
331 x = LOCAL_REF (a); \
332 y = LOCAL_REF (b); \
333 if ((ip[1] & 0x1) ? !(exp) : (exp)) \
334 { \
335 scm_t_int32 offset = ip[1]; \
336 offset >>= 8; /* Sign-extending shift. */ \
337 if (offset < 0) \
338 VM_HANDLE_INTERRUPTS; \
339 NEXT (offset); \
340 } \
341 NEXT (2)
342
343 #define BR_ARITHMETIC(crel,srel) \
344 { \
345 scm_t_uint16 a, b; \
346 SCM x, y; \
347 UNPACK_12_12 (op, a, b); \
348 x = LOCAL_REF (a); \
349 y = LOCAL_REF (b); \
350 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
351 { \
352 scm_t_signed_bits x_bits = SCM_UNPACK (x); \
353 scm_t_signed_bits y_bits = SCM_UNPACK (y); \
354 if ((ip[1] & 0x1) ? !(x_bits crel y_bits) : (x_bits crel y_bits)) \
355 { \
356 scm_t_int32 offset = ip[1]; \
357 offset >>= 8; /* Sign-extending shift. */ \
358 if (offset < 0) \
359 VM_HANDLE_INTERRUPTS; \
360 NEXT (offset); \
361 } \
362 NEXT (2); \
363 } \
364 else \
365 { \
366 SCM res; \
367 SYNC_IP (); \
368 res = srel (x, y); \
369 CACHE_FP (); \
370 if ((ip[1] & 0x1) ? scm_is_false (res) : scm_is_true (res)) \
371 { \
372 scm_t_int32 offset = ip[1]; \
373 offset >>= 8; /* Sign-extending shift. */ \
374 if (offset < 0) \
375 VM_HANDLE_INTERRUPTS; \
376 NEXT (offset); \
377 } \
378 NEXT (2); \
379 } \
380 }
381
382 #define ARGS1(a1) \
383 scm_t_uint16 dst, src; \
384 SCM a1; \
385 UNPACK_12_12 (op, dst, src); \
386 a1 = LOCAL_REF (src)
387 #define ARGS2(a1, a2) \
388 scm_t_uint8 dst, src1, src2; \
389 SCM a1, a2; \
390 UNPACK_8_8_8 (op, dst, src1, src2); \
391 a1 = LOCAL_REF (src1); \
392 a2 = LOCAL_REF (src2)
393 #define RETURN(x) \
394 do { LOCAL_SET (dst, x); NEXT (1); } while (0)
395 #define RETURN_EXP(exp) \
396 do { SCM __x; SYNC_IP (); __x = exp; CACHE_FP (); RETURN (__x); } while (0)
397
398 /* The maximum/minimum tagged integers. */
399 #define INUM_MAX \
400 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_POSITIVE_FIXNUM)))
401 #define INUM_MIN \
402 ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_NEGATIVE_FIXNUM)))
403 #define INUM_STEP \
404 ((scm_t_signed_bits) SCM_UNPACK (SCM_INUM1) \
405 - (scm_t_signed_bits) SCM_UNPACK (SCM_INUM0))
406
407 #define BINARY_INTEGER_OP(CFUNC,SFUNC) \
408 { \
409 ARGS2 (x, y); \
410 if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
411 { \
412 scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y); \
413 if (SCM_FIXABLE (n)) \
414 RETURN (SCM_I_MAKINUM (n)); \
415 } \
416 RETURN_EXP (SFUNC (x, y)); \
417 }
418
419 #define VM_VALIDATE_PAIR(x, proc) \
420 VM_ASSERT (scm_is_pair (x), vm_error_not_a_pair (proc, x))
421
422 #define VM_VALIDATE_STRUCT(obj, proc) \
423 VM_ASSERT (SCM_STRUCTP (obj), vm_error_not_a_pair (proc, obj))
424
425 #define VM_VALIDATE_BYTEVECTOR(x, proc) \
426 VM_ASSERT (SCM_BYTEVECTOR_P (x), vm_error_not_a_bytevector (proc, x))
427
428 /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
429 #define ALIGNED_P(ptr, type) \
430 ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
431
432 static SCM
433 VM_NAME (scm_i_thread *thread, struct scm_vm *vp,
434 scm_i_jmp_buf *registers, int resume)
435 {
436 /* Instruction pointer: A pointer to the opcode that is currently
437 running. */
438 register scm_t_uint32 *ip IP_REG;
439
440 /* Frame pointer: A pointer into the stack, off of which we index
441 arguments and local variables. Pushed at function calls, popped on
442 returns. */
443 register SCM *fp FP_REG;
444
445 /* Current opcode: A cache of *ip. */
446 register scm_t_uint32 op;
447
448 #ifdef HAVE_LABELS_AS_VALUES
449 static const void *jump_table_[256] = {
450 #define LABEL_ADDR(opcode, tag, name, meta) &&op_##tag,
451 FOR_EACH_VM_OPERATION(LABEL_ADDR)
452 #undef LABEL_ADDR
453 };
454 register const void **jump_table JT_REG;
455 /* Attempt to keep JUMP_TABLE_POINTER in a register. This saves one
456 load instruction at each instruction dispatch. */
457 jump_table = jump_table_;
458 #endif
459
460 /* Load VM registers. */
461 CACHE_REGISTER ();
462
463 VM_HANDLE_INTERRUPTS;
464
465 /* Usually a call to the VM happens on application, with the boot
466 continuation on the next frame. Sometimes it happens after a
467 non-local exit however; in that case the VM state is all set up,
468 and we have but to jump to the next opcode. */
469 if (SCM_UNLIKELY (resume))
470 NEXT (0);
471
472 apply:
473 while (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp)))
474 {
475 SCM proc = SCM_FRAME_PROGRAM (fp);
476
477 if (SCM_STRUCTP (proc) && SCM_STRUCT_APPLICABLE_P (proc))
478 {
479 LOCAL_SET (0, SCM_STRUCT_PROCEDURE (proc));
480 continue;
481 }
482 if (SCM_HAS_TYP7 (proc, scm_tc7_smob) && SCM_SMOB_APPLICABLE_P (proc))
483 {
484 scm_t_uint32 n = FRAME_LOCALS_COUNT();
485
486 /* Shuffle args up. */
487 RESET_FRAME (n + 1);
488 while (n--)
489 LOCAL_SET (n + 1, LOCAL_REF (n));
490
491 LOCAL_SET (0, SCM_SMOB_DESCRIPTOR (proc).apply_trampoline);
492 continue;
493 }
494
495 SYNC_IP();
496 vm_error_wrong_type_apply (proc);
497 }
498
499 /* Let's go! */
500 ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
501 NEXT (0);
502
503 BEGIN_DISPATCH_SWITCH;
504
505
506 \f
507
508 /*
509 * Call and return
510 */
511
512 /* halt _:24
513 *
514 * Bring the VM to a halt, returning all the values from the stack.
515 */
516 VM_DEFINE_OP (0, halt, "halt", OP1 (U8_X24))
517 {
518 /* Boot closure in r0, empty frame in r1/r2, proc in r3, values from r4. */
519
520 scm_t_uint32 nvals = FRAME_LOCALS_COUNT_FROM (4);
521 SCM ret;
522
523 if (nvals == 1)
524 ret = LOCAL_REF (4);
525 else
526 {
527 scm_t_uint32 n;
528 ret = SCM_EOL;
529 for (n = nvals; n > 0; n--)
530 ret = scm_inline_cons (thread, LOCAL_REF (4 + n - 1), ret);
531 ret = scm_values (ret);
532 }
533
534 vp->ip = SCM_FRAME_RETURN_ADDRESS (fp);
535 vp->sp = SCM_FRAME_PREVIOUS_SP (fp);
536 vp->fp = SCM_FRAME_DYNAMIC_LINK (fp);
537
538 return ret;
539 }
540
541 /* call proc:24 _:8 nlocals:24
542 *
543 * Call a procedure. PROC is the local corresponding to a procedure.
544 * The two values below PROC will be overwritten by the saved call
545 * frame data. The new frame will have space for NLOCALS locals: one
546 * for the procedure, and the rest for the arguments which should
547 * already have been pushed on.
548 *
549 * When the call returns, execution proceeds with the next
550 * instruction. There may be any number of values on the return
551 * stack; the precise number can be had by subtracting the address of
552 * PROC from the post-call SP.
553 */
554 VM_DEFINE_OP (1, call, "call", OP2 (U8_U24, X8_U24))
555 {
556 scm_t_uint32 proc, nlocals;
557 SCM *old_fp;
558
559 UNPACK_24 (op, proc);
560 UNPACK_24 (ip[1], nlocals);
561
562 VM_HANDLE_INTERRUPTS;
563
564 old_fp = fp;
565 fp = vp->fp = old_fp + proc;
566 SCM_FRAME_SET_DYNAMIC_LINK (fp, old_fp);
567 SCM_FRAME_SET_RETURN_ADDRESS (fp, ip + 2);
568
569 RESET_FRAME (nlocals);
570
571 PUSH_CONTINUATION_HOOK ();
572 APPLY_HOOK ();
573
574 if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
575 goto apply;
576
577 ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
578 NEXT (0);
579 }
580
581 /* tail-call nlocals:24
582 *
583 * Tail-call a procedure. Requires that the procedure and all of the
584 * arguments have already been shuffled into position. Will reset the
585 * frame to NLOCALS.
586 */
587 VM_DEFINE_OP (2, tail_call, "tail-call", OP1 (U8_U24))
588 {
589 scm_t_uint32 nlocals;
590
591 UNPACK_24 (op, nlocals);
592
593 VM_HANDLE_INTERRUPTS;
594
595 RESET_FRAME (nlocals);
596
597 APPLY_HOOK ();
598
599 if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
600 goto apply;
601
602 ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
603 NEXT (0);
604 }
605
606 /* tail-call/shuffle from:24
607 *
608 * Tail-call a procedure. The procedure should already be set to slot
609 * 0. The rest of the args are taken from the frame, starting at
610 * FROM, shuffled down to start at slot 0. This is part of the
611 * implementation of the call-with-values builtin.
612 */
613 VM_DEFINE_OP (3, tail_call_shuffle, "tail-call/shuffle", OP1 (U8_U24))
614 {
615 scm_t_uint32 n, from, nlocals;
616
617 UNPACK_24 (op, from);
618
619 VM_HANDLE_INTERRUPTS;
620
621 VM_ASSERT (from > 0, abort ());
622 nlocals = FRAME_LOCALS_COUNT ();
623
624 for (n = 0; from + n < nlocals; n++)
625 LOCAL_SET (n + 1, LOCAL_REF (from + n));
626
627 RESET_FRAME (n + 1);
628
629 APPLY_HOOK ();
630
631 if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
632 goto apply;
633
634 ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
635 NEXT (0);
636 }
637
638 /* receive dst:12 proc:12 _:8 nlocals:24
639 *
640 * Receive a single return value from a call whose procedure was in
641 * PROC, asserting that the call actually returned at least one
642 * value. Afterwards, resets the frame to NLOCALS locals.
643 */
644 VM_DEFINE_OP (4, receive, "receive", OP2 (U8_U12_U12, X8_U24) | OP_DST)
645 {
646 scm_t_uint16 dst, proc;
647 scm_t_uint32 nlocals;
648 UNPACK_12_12 (op, dst, proc);
649 UNPACK_24 (ip[1], nlocals);
650 VM_ASSERT (FRAME_LOCALS_COUNT () > proc + 1, vm_error_no_values ());
651 LOCAL_SET (dst, LOCAL_REF (proc + 1));
652 RESET_FRAME (nlocals);
653 NEXT (2);
654 }
655
656 /* receive-values proc:24 allow-extra?:1 _:7 nvalues:24
657 *
658 * Receive a return of multiple values from a call whose procedure was
659 * in PROC. If fewer than NVALUES values were returned, signal an
660 * error. Unless ALLOW-EXTRA? is true, require that the number of
661 * return values equals NVALUES exactly. After receive-values has
662 * run, the values can be copied down via `mov'.
663 */
664 VM_DEFINE_OP (5, receive_values, "receive-values", OP2 (U8_U24, B1_X7_U24))
665 {
666 scm_t_uint32 proc, nvalues;
667 UNPACK_24 (op, proc);
668 UNPACK_24 (ip[1], nvalues);
669 if (ip[1] & 0x1)
670 VM_ASSERT (FRAME_LOCALS_COUNT () > proc + nvalues,
671 vm_error_not_enough_values ());
672 else
673 VM_ASSERT (FRAME_LOCALS_COUNT () == proc + 1 + nvalues,
674 vm_error_wrong_number_of_values (nvalues));
675 NEXT (2);
676 }
677
678 /* return src:24
679 *
680 * Return a value.
681 */
682 VM_DEFINE_OP (6, return, "return", OP1 (U8_U24))
683 {
684 scm_t_uint32 src;
685 UNPACK_24 (op, src);
686 RETURN_ONE_VALUE (LOCAL_REF (src));
687 }
688
689 /* return-values _:24
690 *
691 * Return a number of values from a call frame. This opcode
692 * corresponds to an application of `values' in tail position. As
693 * with tail calls, we expect that the values have already been
694 * shuffled down to a contiguous array starting at slot 1.
695 * We also expect the frame has already been reset.
696 */
697 VM_DEFINE_OP (7, return_values, "return-values", OP1 (U8_X24))
698 {
699 SCM *old_fp;
700
701 VM_HANDLE_INTERRUPTS;
702
703 old_fp = fp;
704 ip = SCM_FRAME_RETURN_ADDRESS (fp);
705 fp = vp->fp = SCM_FRAME_DYNAMIC_LINK (fp);
706
707 /* Clear stack frame. */
708 old_fp[-1] = SCM_BOOL_F;
709 old_fp[-2] = SCM_BOOL_F;
710
711 POP_CONTINUATION_HOOK (old_fp);
712
713 NEXT (0);
714 }
715
716
717 \f
718
719 /*
720 * Specialized call stubs
721 */
722
723 /* subr-call ptr-idx:24
724 *
725 * Call a subr, passing all locals in this frame as arguments. Fetch
726 * the foreign pointer from PTR-IDX, a free variable. Return from the
727 * calling frame. This instruction is part of the trampolines
728 * created in gsubr.c, and is not generated by the compiler.
729 */
730 VM_DEFINE_OP (8, subr_call, "subr-call", OP1 (U8_U24))
731 {
732 scm_t_uint32 ptr_idx;
733 SCM pointer, ret;
734 SCM (*subr)();
735
736 UNPACK_24 (op, ptr_idx);
737
738 pointer = SCM_PROGRAM_FREE_VARIABLE_REF (LOCAL_REF (0), ptr_idx);
739 subr = SCM_POINTER_VALUE (pointer);
740
741 SYNC_IP ();
742
743 switch (FRAME_LOCALS_COUNT_FROM (1))
744 {
745 case 0:
746 ret = subr ();
747 break;
748 case 1:
749 ret = subr (fp[1]);
750 break;
751 case 2:
752 ret = subr (fp[1], fp[2]);
753 break;
754 case 3:
755 ret = subr (fp[1], fp[2], fp[3]);
756 break;
757 case 4:
758 ret = subr (fp[1], fp[2], fp[3], fp[4]);
759 break;
760 case 5:
761 ret = subr (fp[1], fp[2], fp[3], fp[4], fp[5]);
762 break;
763 case 6:
764 ret = subr (fp[1], fp[2], fp[3], fp[4], fp[5], fp[6]);
765 break;
766 case 7:
767 ret = subr (fp[1], fp[2], fp[3], fp[4], fp[5], fp[6], fp[7]);
768 break;
769 case 8:
770 ret = subr (fp[1], fp[2], fp[3], fp[4], fp[5], fp[6], fp[7], fp[8]);
771 break;
772 case 9:
773 ret = subr (fp[1], fp[2], fp[3], fp[4], fp[5], fp[6], fp[7], fp[8], fp[9]);
774 break;
775 case 10:
776 ret = subr (fp[1], fp[2], fp[3], fp[4], fp[5], fp[6], fp[7], fp[8], fp[9], fp[10]);
777 break;
778 default:
779 abort ();
780 }
781
782 CACHE_FP ();
783
784 if (SCM_UNLIKELY (SCM_VALUESP (ret)))
785 /* multiple values returned to continuation */
786 RETURN_VALUE_LIST (scm_struct_ref (ret, SCM_INUM0));
787 else
788 RETURN_ONE_VALUE (ret);
789 }
790
791 /* foreign-call cif-idx:12 ptr-idx:12
792 *
793 * Call a foreign function. Fetch the CIF and foreign pointer from
794 * CIF-IDX and PTR-IDX, both free variables. Return from the calling
795 * frame. Arguments are taken from the stack. This instruction is
796 * part of the trampolines created by the FFI, and is not generated by
797 * the compiler.
798 */
799 VM_DEFINE_OP (9, foreign_call, "foreign-call", OP1 (U8_U12_U12))
800 {
801 scm_t_uint16 cif_idx, ptr_idx;
802 SCM closure, cif, pointer, ret;
803
804 UNPACK_12_12 (op, cif_idx, ptr_idx);
805
806 closure = LOCAL_REF (0);
807 cif = SCM_PROGRAM_FREE_VARIABLE_REF (closure, cif_idx);
808 pointer = SCM_PROGRAM_FREE_VARIABLE_REF (closure, ptr_idx);
809
810 SYNC_IP ();
811
812 // FIXME: separate args
813 ret = scm_i_foreign_call (scm_inline_cons (thread, cif, pointer),
814 LOCAL_ADDRESS (1));
815
816 CACHE_FP ();
817
818 if (SCM_UNLIKELY (SCM_VALUESP (ret)))
819 /* multiple values returned to continuation */
820 RETURN_VALUE_LIST (scm_struct_ref (ret, SCM_INUM0));
821 else
822 RETURN_ONE_VALUE (ret);
823 }
824
825 /* continuation-call contregs:24
826 *
827 * Return to a continuation, nonlocally. The arguments to the
828 * continuation are taken from the stack. CONTREGS is a free variable
829 * containing the reified continuation. This instruction is part of
830 * the implementation of undelimited continuations, and is not
831 * generated by the compiler.
832 */
833 VM_DEFINE_OP (10, continuation_call, "continuation-call", OP1 (U8_U24))
834 {
835 SCM contregs;
836 scm_t_uint32 contregs_idx;
837
838 UNPACK_24 (op, contregs_idx);
839
840 contregs =
841 SCM_PROGRAM_FREE_VARIABLE_REF (LOCAL_REF (0), contregs_idx);
842
843 SYNC_IP ();
844 scm_i_check_continuation (contregs);
845 vm_return_to_continuation (scm_i_contregs_vp (contregs),
846 scm_i_contregs_vm_cont (contregs),
847 FRAME_LOCALS_COUNT_FROM (1),
848 LOCAL_ADDRESS (1));
849 scm_i_reinstate_continuation (contregs);
850
851 /* no NEXT */
852 abort ();
853 }
854
855 /* compose-continuation cont:24
856 *
857 * Compose a partial continution with the current continuation. The
858 * arguments to the continuation are taken from the stack. CONT is a
859 * free variable containing the reified continuation. This
860 * instruction is part of the implementation of partial continuations,
861 * and is not generated by the compiler.
862 */
863 VM_DEFINE_OP (11, compose_continuation, "compose-continuation", OP1 (U8_U24))
864 {
865 SCM vmcont;
866 scm_t_uint32 cont_idx;
867
868 UNPACK_24 (op, cont_idx);
869 vmcont = SCM_PROGRAM_FREE_VARIABLE_REF (LOCAL_REF (0), cont_idx);
870
871 SYNC_IP ();
872 VM_ASSERT (SCM_VM_CONT_REWINDABLE_P (vmcont),
873 vm_error_continuation_not_rewindable (vmcont));
874 vm_reinstate_partial_continuation (vp, vmcont, FRAME_LOCALS_COUNT_FROM (1),
875 LOCAL_ADDRESS (1),
876 &thread->dynstack,
877 registers);
878 CACHE_REGISTER ();
879 NEXT (0);
880 }
881
882 /* tail-apply _:24
883 *
884 * Tail-apply the procedure in local slot 0 to the rest of the
885 * arguments. This instruction is part of the implementation of
886 * `apply', and is not generated by the compiler.
887 */
888 VM_DEFINE_OP (12, tail_apply, "tail-apply", OP1 (U8_X24))
889 {
890 int i, list_idx, list_len, nlocals;
891 SCM list;
892
893 VM_HANDLE_INTERRUPTS;
894
895 nlocals = FRAME_LOCALS_COUNT ();
896 // At a minimum, there should be apply, f, and the list.
897 VM_ASSERT (nlocals >= 3, abort ());
898 list_idx = nlocals - 1;
899 list = LOCAL_REF (list_idx);
900 list_len = scm_ilength (list);
901
902 VM_ASSERT (list_len >= 0, vm_error_apply_to_non_list (list));
903
904 nlocals = nlocals - 2 + list_len;
905 ALLOC_FRAME (nlocals);
906
907 for (i = 1; i < list_idx; i++)
908 LOCAL_SET (i - 1, LOCAL_REF (i));
909
910 /* Null out these slots, just in case there are less than 2 elements
911 in the list. */
912 LOCAL_SET (list_idx - 1, SCM_UNDEFINED);
913 LOCAL_SET (list_idx, SCM_UNDEFINED);
914
915 for (i = 0; i < list_len; i++, list = SCM_CDR (list))
916 LOCAL_SET (list_idx - 1 + i, SCM_CAR (list));
917
918 APPLY_HOOK ();
919
920 if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
921 goto apply;
922
923 ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
924 NEXT (0);
925 }
926
927 /* call/cc _:24
928 *
929 * Capture the current continuation, and tail-apply the procedure in
930 * local slot 1 to it. This instruction is part of the implementation
931 * of `call/cc', and is not generated by the compiler.
932 */
933 VM_DEFINE_OP (13, call_cc, "call/cc", OP1 (U8_X24))
934 {
935 SCM vm_cont, cont;
936 scm_t_dynstack *dynstack;
937 int first;
938
939 VM_HANDLE_INTERRUPTS;
940
941 SYNC_IP ();
942 dynstack = scm_dynstack_capture_all (&thread->dynstack);
943 vm_cont = scm_i_vm_capture_stack (vp->stack_base,
944 SCM_FRAME_DYNAMIC_LINK (fp),
945 SCM_FRAME_PREVIOUS_SP (fp),
946 SCM_FRAME_RETURN_ADDRESS (fp),
947 dynstack,
948 0);
949 /* FIXME: Seems silly to capture the registers here, when they are
950 already captured in the registers local, which here we are
951 copying out to the heap; and likewise, the setjmp(&registers)
952 code already has the non-local return handler. But oh
953 well! */
954 cont = scm_i_make_continuation (&first, vp, vm_cont);
955
956 if (first)
957 {
958 LOCAL_SET (0, LOCAL_REF (1));
959 LOCAL_SET (1, cont);
960 RESET_FRAME (2);
961
962 APPLY_HOOK ();
963
964 if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
965 goto apply;
966
967 ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
968 NEXT (0);
969 }
970 else
971 {
972 CACHE_REGISTER ();
973 ABORT_CONTINUATION_HOOK ();
974 NEXT (0);
975 }
976 }
977
978 /* abort _:24
979 *
980 * Abort to a prompt handler. The tag is expected in r1, and the rest
981 * of the values in the frame are returned to the prompt handler.
982 * This corresponds to a tail application of abort-to-prompt.
983 */
984 VM_DEFINE_OP (14, abort, "abort", OP1 (U8_X24))
985 {
986 scm_t_uint32 nlocals = FRAME_LOCALS_COUNT ();
987
988 ASSERT (nlocals >= 2);
989 /* FIXME: Really we should capture the caller's registers. Until
990 then, manually advance the IP so that when the prompt resumes,
991 it continues with the next instruction. */
992 ip++;
993 SYNC_IP ();
994 vm_abort (vp, LOCAL_REF (1), nlocals - 2, LOCAL_ADDRESS (2),
995 SCM_EOL, LOCAL_ADDRESS (0), registers);
996
997 /* vm_abort should not return */
998 abort ();
999 }
1000
1001 /* builtin-ref dst:12 idx:12
1002 *
1003 * Load a builtin stub by index into DST.
1004 */
1005 VM_DEFINE_OP (15, builtin_ref, "builtin-ref", OP1 (U8_U12_U12) | OP_DST)
1006 {
1007 scm_t_uint16 dst, idx;
1008
1009 UNPACK_12_12 (op, dst, idx);
1010 LOCAL_SET (dst, scm_vm_builtin_ref (idx));
1011
1012 NEXT (1);
1013 }
1014
1015
1016 \f
1017
1018 /*
1019 * Function prologues
1020 */
1021
1022 /* br-if-nargs-ne expected:24 _:8 offset:24
1023 * br-if-nargs-lt expected:24 _:8 offset:24
1024 * br-if-nargs-gt expected:24 _:8 offset:24
1025 *
1026 * If the number of actual arguments is not equal, less than, or greater
1027 * than EXPECTED, respectively, add OFFSET, a signed 24-bit number, to
1028 * the current instruction pointer.
1029 */
1030 VM_DEFINE_OP (16, br_if_nargs_ne, "br-if-nargs-ne", OP2 (U8_U24, X8_L24))
1031 {
1032 BR_NARGS (!=);
1033 }
1034 VM_DEFINE_OP (17, br_if_nargs_lt, "br-if-nargs-lt", OP2 (U8_U24, X8_L24))
1035 {
1036 BR_NARGS (<);
1037 }
1038 VM_DEFINE_OP (18, br_if_nargs_gt, "br-if-nargs-gt", OP2 (U8_U24, X8_L24))
1039 {
1040 BR_NARGS (>);
1041 }
1042
1043 /* assert-nargs-ee expected:24
1044 * assert-nargs-ge expected:24
1045 * assert-nargs-le expected:24
1046 *
1047 * If the number of actual arguments is not ==, >=, or <= EXPECTED,
1048 * respectively, signal an error.
1049 */
1050 VM_DEFINE_OP (19, assert_nargs_ee, "assert-nargs-ee", OP1 (U8_U24))
1051 {
1052 scm_t_uint32 expected;
1053 UNPACK_24 (op, expected);
1054 VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
1055 vm_error_wrong_num_args (SCM_FRAME_PROGRAM (fp)));
1056 NEXT (1);
1057 }
1058 VM_DEFINE_OP (20, assert_nargs_ge, "assert-nargs-ge", OP1 (U8_U24))
1059 {
1060 scm_t_uint32 expected;
1061 UNPACK_24 (op, expected);
1062 VM_ASSERT (FRAME_LOCALS_COUNT () >= expected,
1063 vm_error_wrong_num_args (SCM_FRAME_PROGRAM (fp)));
1064 NEXT (1);
1065 }
1066 VM_DEFINE_OP (21, assert_nargs_le, "assert-nargs-le", OP1 (U8_U24))
1067 {
1068 scm_t_uint32 expected;
1069 UNPACK_24 (op, expected);
1070 VM_ASSERT (FRAME_LOCALS_COUNT () <= expected,
1071 vm_error_wrong_num_args (SCM_FRAME_PROGRAM (fp)));
1072 NEXT (1);
1073 }
1074
1075 /* alloc-frame nlocals:24
1076 *
1077 * Ensure that there is space on the stack for NLOCALS local variables,
1078 * setting them all to SCM_UNDEFINED, except those nargs values that
1079 * were passed as arguments and procedure.
1080 */
1081 VM_DEFINE_OP (22, alloc_frame, "alloc-frame", OP1 (U8_U24))
1082 {
1083 scm_t_uint32 nlocals, nargs;
1084 UNPACK_24 (op, nlocals);
1085
1086 nargs = FRAME_LOCALS_COUNT ();
1087 ALLOC_FRAME (nlocals);
1088 while (nlocals-- > nargs)
1089 LOCAL_SET (nlocals, SCM_UNDEFINED);
1090
1091 NEXT (1);
1092 }
1093
1094 /* reset-frame nlocals:24
1095 *
1096 * Like alloc-frame, but doesn't check that the stack is big enough.
1097 * Used to reset the frame size to something less than the size that
1098 * was previously set via alloc-frame.
1099 */
1100 VM_DEFINE_OP (23, reset_frame, "reset-frame", OP1 (U8_U24))
1101 {
1102 scm_t_uint32 nlocals;
1103 UNPACK_24 (op, nlocals);
1104 RESET_FRAME (nlocals);
1105 NEXT (1);
1106 }
1107
1108 /* assert-nargs-ee/locals expected:12 nlocals:12
1109 *
1110 * Equivalent to a sequence of assert-nargs-ee and reserve-locals. The
1111 * number of locals reserved is EXPECTED + NLOCALS.
1112 */
1113 VM_DEFINE_OP (24, assert_nargs_ee_locals, "assert-nargs-ee/locals", OP1 (U8_U12_U12))
1114 {
1115 scm_t_uint16 expected, nlocals;
1116 UNPACK_12_12 (op, expected, nlocals);
1117 VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
1118 vm_error_wrong_num_args (SCM_FRAME_PROGRAM (fp)));
1119 ALLOC_FRAME (expected + nlocals);
1120 while (nlocals--)
1121 LOCAL_SET (expected + nlocals, SCM_UNDEFINED);
1122
1123 NEXT (1);
1124 }
1125
1126 /* br-if-npos-gt nreq:24 _:8 npos:24 _:8 offset:24
1127 *
1128 * Find the first positional argument after NREQ. If it is greater
1129 * than NPOS, jump to OFFSET.
1130 *
1131 * This instruction is only emitted for functions with multiple
1132 * clauses, and an earlier clause has keywords and no rest arguments.
1133 * See "Case-lambda" in the manual, for more on how case-lambda
1134 * chooses the clause to apply.
1135 */
1136 VM_DEFINE_OP (25, br_if_npos_gt, "br-if-npos-gt", OP3 (U8_U24, X8_U24, X8_L24))
1137 {
1138 scm_t_uint32 nreq, npos;
1139
1140 UNPACK_24 (op, nreq);
1141 UNPACK_24 (ip[1], npos);
1142
1143 /* We can only have too many positionals if there are more
1144 arguments than NPOS. */
1145 if (FRAME_LOCALS_COUNT() > npos)
1146 {
1147 scm_t_uint32 n;
1148 for (n = nreq; n < npos; n++)
1149 if (scm_is_keyword (LOCAL_REF (n)))
1150 break;
1151 if (n == npos && !scm_is_keyword (LOCAL_REF (n)))
1152 {
1153 scm_t_int32 offset = ip[2];
1154 offset >>= 8; /* Sign-extending shift. */
1155 NEXT (offset);
1156 }
1157 }
1158 NEXT (3);
1159 }
1160
1161 /* bind-kwargs nreq:24 flags:8 nreq-and-opt:24 _:8 ntotal:24 kw-offset:32
1162 *
1163 * flags := allow-other-keys:1 has-rest:1 _:6
1164 *
1165 * Find the last positional argument, and shuffle all the rest above
1166 * NTOTAL. Initialize the intervening locals to SCM_UNDEFINED. Then
1167 * load the constant at KW-OFFSET words from the current IP, and use it
1168 * to bind keyword arguments. If HAS-REST, collect all shuffled
1169 * arguments into a list, and store it in NREQ-AND-OPT. Finally, clear
1170 * the arguments that we shuffled up.
1171 *
1172 * A macro-mega-instruction.
1173 */
1174 VM_DEFINE_OP (26, bind_kwargs, "bind-kwargs", OP4 (U8_U24, U8_U24, X8_U24, N32))
1175 {
1176 scm_t_uint32 nreq, nreq_and_opt, ntotal, npositional, nkw, n, nargs;
1177 scm_t_int32 kw_offset;
1178 scm_t_bits kw_bits;
1179 SCM kw;
1180 char allow_other_keys, has_rest;
1181
1182 UNPACK_24 (op, nreq);
1183 allow_other_keys = ip[1] & 0x1;
1184 has_rest = ip[1] & 0x2;
1185 UNPACK_24 (ip[1], nreq_and_opt);
1186 UNPACK_24 (ip[2], ntotal);
1187 kw_offset = ip[3];
1188 kw_bits = (scm_t_bits) (ip + kw_offset);
1189 VM_ASSERT (!(kw_bits & 0x7), abort());
1190 kw = SCM_PACK (kw_bits);
1191
1192 nargs = FRAME_LOCALS_COUNT ();
1193
1194 /* look in optionals for first keyword or last positional */
1195 /* starting after the last required positional arg */
1196 npositional = nreq;
1197 while (/* while we have args */
1198 npositional < nargs
1199 /* and we still have positionals to fill */
1200 && npositional < nreq_and_opt
1201 /* and we haven't reached a keyword yet */
1202 && !scm_is_keyword (LOCAL_REF (npositional)))
1203 /* bind this optional arg (by leaving it in place) */
1204 npositional++;
1205 nkw = nargs - npositional;
1206 /* shuffle non-positional arguments above ntotal */
1207 ALLOC_FRAME (ntotal + nkw);
1208 n = nkw;
1209 while (n--)
1210 LOCAL_SET (ntotal + n, LOCAL_REF (npositional + n));
1211 /* and fill optionals & keyword args with SCM_UNDEFINED */
1212 n = npositional;
1213 while (n < ntotal)
1214 LOCAL_SET (n++, SCM_UNDEFINED);
1215
1216 VM_ASSERT (has_rest || (nkw % 2) == 0,
1217 vm_error_kwargs_length_not_even (SCM_FRAME_PROGRAM (fp)));
1218
1219 /* Now bind keywords, in the order given. */
1220 for (n = 0; n < nkw; n++)
1221 if (scm_is_keyword (LOCAL_REF (ntotal + n)))
1222 {
1223 SCM walk;
1224 for (walk = kw; scm_is_pair (walk); walk = SCM_CDR (walk))
1225 if (scm_is_eq (SCM_CAAR (walk), LOCAL_REF (ntotal + n)))
1226 {
1227 SCM si = SCM_CDAR (walk);
1228 LOCAL_SET (SCM_I_INUMP (si) ? SCM_I_INUM (si) : scm_to_uint32 (si),
1229 LOCAL_REF (ntotal + n + 1));
1230 break;
1231 }
1232 VM_ASSERT (scm_is_pair (walk) || allow_other_keys,
1233 vm_error_kwargs_unrecognized_keyword (SCM_FRAME_PROGRAM (fp),
1234 LOCAL_REF (ntotal + n)));
1235 n++;
1236 }
1237 else
1238 VM_ASSERT (has_rest, vm_error_kwargs_invalid_keyword (SCM_FRAME_PROGRAM (fp),
1239 LOCAL_REF (ntotal + n)));
1240
1241 if (has_rest)
1242 {
1243 SCM rest = SCM_EOL;
1244 n = nkw;
1245 while (n--)
1246 rest = scm_inline_cons (thread, LOCAL_REF (ntotal + n), rest);
1247 LOCAL_SET (nreq_and_opt, rest);
1248 }
1249
1250 RESET_FRAME (ntotal);
1251
1252 NEXT (4);
1253 }
1254
1255 /* bind-rest dst:24
1256 *
1257 * Collect any arguments at or above DST into a list, and store that
1258 * list at DST.
1259 */
1260 VM_DEFINE_OP (27, bind_rest, "bind-rest", OP1 (U8_U24) | OP_DST)
1261 {
1262 scm_t_uint32 dst, nargs;
1263 SCM rest = SCM_EOL;
1264
1265 UNPACK_24 (op, dst);
1266 nargs = FRAME_LOCALS_COUNT ();
1267
1268 if (nargs <= dst)
1269 {
1270 ALLOC_FRAME (dst + 1);
1271 while (nargs < dst)
1272 LOCAL_SET (nargs++, SCM_UNDEFINED);
1273 }
1274 else
1275 {
1276 while (nargs-- > dst)
1277 {
1278 rest = scm_inline_cons (thread, LOCAL_REF (nargs), rest);
1279 LOCAL_SET (nargs, SCM_UNDEFINED);
1280 }
1281
1282 RESET_FRAME (dst + 1);
1283 }
1284
1285 LOCAL_SET (dst, rest);
1286
1287 NEXT (1);
1288 }
1289
1290
1291 \f
1292
1293 /*
1294 * Branching instructions
1295 */
1296
1297 /* br offset:24
1298 *
1299 * Add OFFSET, a signed 24-bit number, to the current instruction
1300 * pointer.
1301 */
1302 VM_DEFINE_OP (28, br, "br", OP1 (U8_L24))
1303 {
1304 scm_t_int32 offset = op;
1305 offset >>= 8; /* Sign-extending shift. */
1306 NEXT (offset);
1307 }
1308
1309 /* br-if-true test:24 invert:1 _:7 offset:24
1310 *
1311 * If the value in TEST is true for the purposes of Scheme, add
1312 * OFFSET, a signed 24-bit number, to the current instruction pointer.
1313 */
1314 VM_DEFINE_OP (29, br_if_true, "br-if-true", OP2 (U8_U24, B1_X7_L24))
1315 {
1316 BR_UNARY (x, scm_is_true (x));
1317 }
1318
1319 /* br-if-null test:24 invert:1 _:7 offset:24
1320 *
1321 * If the value in TEST is the end-of-list or Lisp nil, add OFFSET, a
1322 * signed 24-bit number, to the current instruction pointer.
1323 */
1324 VM_DEFINE_OP (30, br_if_null, "br-if-null", OP2 (U8_U24, B1_X7_L24))
1325 {
1326 BR_UNARY (x, scm_is_null (x));
1327 }
1328
1329 /* br-if-nil test:24 invert:1 _:7 offset:24
1330 *
1331 * If the value in TEST is false to Lisp, add OFFSET, a signed 24-bit
1332 * number, to the current instruction pointer.
1333 */
1334 VM_DEFINE_OP (31, br_if_nil, "br-if-nil", OP2 (U8_U24, B1_X7_L24))
1335 {
1336 BR_UNARY (x, scm_is_lisp_false (x));
1337 }
1338
1339 /* br-if-pair test:24 invert:1 _:7 offset:24
1340 *
1341 * If the value in TEST is a pair, add OFFSET, a signed 24-bit number,
1342 * to the current instruction pointer.
1343 */
1344 VM_DEFINE_OP (32, br_if_pair, "br-if-pair", OP2 (U8_U24, B1_X7_L24))
1345 {
1346 BR_UNARY (x, scm_is_pair (x));
1347 }
1348
1349 /* br-if-struct test:24 invert:1 _:7 offset:24
1350 *
1351 * If the value in TEST is a struct, add OFFSET, a signed 24-bit
1352 * number, to the current instruction pointer.
1353 */
1354 VM_DEFINE_OP (33, br_if_struct, "br-if-struct", OP2 (U8_U24, B1_X7_L24))
1355 {
1356 BR_UNARY (x, SCM_STRUCTP (x));
1357 }
1358
1359 /* br-if-char test:24 invert:1 _:7 offset:24
1360 *
1361 * If the value in TEST is a char, add OFFSET, a signed 24-bit number,
1362 * to the current instruction pointer.
1363 */
1364 VM_DEFINE_OP (34, br_if_char, "br-if-char", OP2 (U8_U24, B1_X7_L24))
1365 {
1366 BR_UNARY (x, SCM_CHARP (x));
1367 }
1368
1369 /* br-if-tc7 test:24 invert:1 tc7:7 offset:24
1370 *
1371 * If the value in TEST has the TC7 given in the second word, add
1372 * OFFSET, a signed 24-bit number, to the current instruction pointer.
1373 */
1374 VM_DEFINE_OP (35, br_if_tc7, "br-if-tc7", OP2 (U8_U24, B1_U7_L24))
1375 {
1376 BR_UNARY (x, SCM_HAS_TYP7 (x, (ip[1] >> 1) & 0x7f));
1377 }
1378
1379 /* br-if-eq a:12 b:12 invert:1 _:7 offset:24
1380 *
1381 * If the value in A is eq? to the value in B, add OFFSET, a signed
1382 * 24-bit number, to the current instruction pointer.
1383 */
1384 VM_DEFINE_OP (36, br_if_eq, "br-if-eq", OP2 (U8_U12_U12, B1_X7_L24))
1385 {
1386 BR_BINARY (x, y, scm_is_eq (x, y));
1387 }
1388
1389 /* br-if-eqv a:12 b:12 invert:1 _:7 offset:24
1390 *
1391 * If the value in A is eqv? to the value in B, add OFFSET, a signed
1392 * 24-bit number, to the current instruction pointer.
1393 */
1394 VM_DEFINE_OP (37, br_if_eqv, "br-if-eqv", OP2 (U8_U12_U12, B1_X7_L24))
1395 {
1396 BR_BINARY (x, y,
1397 scm_is_eq (x, y)
1398 || (SCM_NIMP (x) && SCM_NIMP (y)
1399 && scm_is_true (scm_eqv_p (x, y))));
1400 }
1401
1402 // FIXME: remove, have compiler inline eqv test instead
1403 /* br-if-equal a:12 b:12 invert:1 _:7 offset:24
1404 *
1405 * If the value in A is equal? to the value in B, add OFFSET, a signed
1406 * 24-bit number, to the current instruction pointer.
1407 */
1408 // FIXME: Should sync_ip before calling out and cache_fp before coming
1409 // back! Another reason to remove this opcode!
1410 VM_DEFINE_OP (38, br_if_equal, "br-if-equal", OP2 (U8_U12_U12, B1_X7_L24))
1411 {
1412 BR_BINARY (x, y,
1413 scm_is_eq (x, y)
1414 || (SCM_NIMP (x) && SCM_NIMP (y)
1415 && scm_is_true (scm_equal_p (x, y))));
1416 }
1417
1418 /* br-if-= a:12 b:12 invert:1 _:7 offset:24
1419 *
1420 * If the value in A is = to the value in B, add OFFSET, a signed
1421 * 24-bit number, to the current instruction pointer.
1422 */
1423 VM_DEFINE_OP (39, br_if_ee, "br-if-=", OP2 (U8_U12_U12, B1_X7_L24))
1424 {
1425 BR_ARITHMETIC (==, scm_num_eq_p);
1426 }
1427
1428 /* br-if-< a:12 b:12 invert:1 _:7 offset:24
1429 *
1430 * If the value in A is < to the value in B, add OFFSET, a signed
1431 * 24-bit number, to the current instruction pointer.
1432 */
1433 VM_DEFINE_OP (40, br_if_lt, "br-if-<", OP2 (U8_U12_U12, B1_X7_L24))
1434 {
1435 BR_ARITHMETIC (<, scm_less_p);
1436 }
1437
1438 /* br-if-<= a:12 b:12 invert:1 _:7 offset:24
1439 *
1440 * If the value in A is <= to the value in B, add OFFSET, a signed
1441 * 24-bit number, to the current instruction pointer.
1442 */
1443 VM_DEFINE_OP (41, br_if_le, "br-if-<=", OP2 (U8_U12_U12, B1_X7_L24))
1444 {
1445 BR_ARITHMETIC (<=, scm_leq_p);
1446 }
1447
1448
1449 \f
1450
1451 /*
1452 * Lexical binding instructions
1453 */
1454
1455 /* mov dst:12 src:12
1456 *
1457 * Copy a value from one local slot to another.
1458 */
1459 VM_DEFINE_OP (42, mov, "mov", OP1 (U8_U12_U12) | OP_DST)
1460 {
1461 scm_t_uint16 dst;
1462 scm_t_uint16 src;
1463
1464 UNPACK_12_12 (op, dst, src);
1465 LOCAL_SET (dst, LOCAL_REF (src));
1466
1467 NEXT (1);
1468 }
1469
1470 /* long-mov dst:24 _:8 src:24
1471 *
1472 * Copy a value from one local slot to another.
1473 */
1474 VM_DEFINE_OP (43, long_mov, "long-mov", OP2 (U8_U24, X8_U24) | OP_DST)
1475 {
1476 scm_t_uint32 dst;
1477 scm_t_uint32 src;
1478
1479 UNPACK_24 (op, dst);
1480 UNPACK_24 (ip[1], src);
1481 LOCAL_SET (dst, LOCAL_REF (src));
1482
1483 NEXT (2);
1484 }
1485
1486 /* box dst:12 src:12
1487 *
1488 * Create a new variable holding SRC, and place it in DST.
1489 */
1490 VM_DEFINE_OP (44, box, "box", OP1 (U8_U12_U12) | OP_DST)
1491 {
1492 scm_t_uint16 dst, src;
1493 UNPACK_12_12 (op, dst, src);
1494 LOCAL_SET (dst, scm_inline_cell (thread, scm_tc7_variable,
1495 SCM_UNPACK (LOCAL_REF (src))));
1496 NEXT (1);
1497 }
1498
1499 /* box-ref dst:12 src:12
1500 *
1501 * Unpack the variable at SRC into DST, asserting that the variable is
1502 * actually bound.
1503 */
1504 VM_DEFINE_OP (45, box_ref, "box-ref", OP1 (U8_U12_U12) | OP_DST)
1505 {
1506 scm_t_uint16 dst, src;
1507 SCM var;
1508 UNPACK_12_12 (op, dst, src);
1509 var = LOCAL_REF (src);
1510 VM_ASSERT (SCM_VARIABLEP (var),
1511 vm_error_not_a_variable ("variable-ref", var));
1512 VM_ASSERT (VARIABLE_BOUNDP (var),
1513 vm_error_unbound (SCM_FRAME_PROGRAM (fp), var));
1514 LOCAL_SET (dst, VARIABLE_REF (var));
1515 NEXT (1);
1516 }
1517
1518 /* box-set! dst:12 src:12
1519 *
1520 * Set the contents of the variable at DST to SET.
1521 */
1522 VM_DEFINE_OP (46, box_set, "box-set!", OP1 (U8_U12_U12))
1523 {
1524 scm_t_uint16 dst, src;
1525 SCM var;
1526 UNPACK_12_12 (op, dst, src);
1527 var = LOCAL_REF (dst);
1528 VM_ASSERT (SCM_VARIABLEP (var),
1529 vm_error_not_a_variable ("variable-set!", var));
1530 VARIABLE_SET (var, LOCAL_REF (src));
1531 NEXT (1);
1532 }
1533
1534 /* make-closure dst:24 offset:32 _:8 nfree:24
1535 *
1536 * Make a new closure, and write it to DST. The code for the closure
1537 * will be found at OFFSET words from the current IP. OFFSET is a
1538 * signed 32-bit integer. Space for NFREE free variables will be
1539 * allocated.
1540 */
1541 VM_DEFINE_OP (47, make_closure, "make-closure", OP3 (U8_U24, L32, X8_U24) | OP_DST)
1542 {
1543 scm_t_uint32 dst, nfree, n;
1544 scm_t_int32 offset;
1545 SCM closure;
1546
1547 UNPACK_24 (op, dst);
1548 offset = ip[1];
1549 UNPACK_24 (ip[2], nfree);
1550
1551 // FIXME: Assert range of nfree?
1552 closure = scm_inline_words (thread, scm_tc7_program | (nfree << 16),
1553 nfree + 2);
1554 SCM_SET_CELL_WORD_1 (closure, ip + offset);
1555 // FIXME: Elide these initializations?
1556 for (n = 0; n < nfree; n++)
1557 SCM_PROGRAM_FREE_VARIABLE_SET (closure, n, SCM_BOOL_F);
1558 LOCAL_SET (dst, closure);
1559 NEXT (3);
1560 }
1561
1562 /* free-ref dst:12 src:12 _:8 idx:24
1563 *
1564 * Load free variable IDX from the closure SRC into local slot DST.
1565 */
1566 VM_DEFINE_OP (48, free_ref, "free-ref", OP2 (U8_U12_U12, X8_U24) | OP_DST)
1567 {
1568 scm_t_uint16 dst, src;
1569 scm_t_uint32 idx;
1570 UNPACK_12_12 (op, dst, src);
1571 UNPACK_24 (ip[1], idx);
1572 /* CHECK_FREE_VARIABLE (src); */
1573 LOCAL_SET (dst, SCM_PROGRAM_FREE_VARIABLE_REF (LOCAL_REF (src), idx));
1574 NEXT (2);
1575 }
1576
1577 /* free-set! dst:12 src:12 _:8 idx:24
1578 *
1579 * Set free variable IDX from the closure DST to SRC.
1580 */
1581 VM_DEFINE_OP (49, free_set, "free-set!", OP2 (U8_U12_U12, X8_U24))
1582 {
1583 scm_t_uint16 dst, src;
1584 scm_t_uint32 idx;
1585 UNPACK_12_12 (op, dst, src);
1586 UNPACK_24 (ip[1], idx);
1587 /* CHECK_FREE_VARIABLE (src); */
1588 SCM_PROGRAM_FREE_VARIABLE_SET (LOCAL_REF (dst), idx, LOCAL_REF (src));
1589 NEXT (2);
1590 }
1591
1592
1593 \f
1594
1595 /*
1596 * Immediates and statically allocated non-immediates
1597 */
1598
1599 /* make-short-immediate dst:8 low-bits:16
1600 *
1601 * Make an immediate whose low bits are LOW-BITS, and whose top bits are
1602 * 0.
1603 */
1604 VM_DEFINE_OP (50, make_short_immediate, "make-short-immediate", OP1 (U8_U8_I16) | OP_DST)
1605 {
1606 scm_t_uint8 dst;
1607 scm_t_bits val;
1608
1609 UNPACK_8_16 (op, dst, val);
1610 LOCAL_SET (dst, SCM_PACK (val));
1611 NEXT (1);
1612 }
1613
1614 /* make-long-immediate dst:24 low-bits:32
1615 *
1616 * Make an immediate whose low bits are LOW-BITS, and whose top bits are
1617 * 0.
1618 */
1619 VM_DEFINE_OP (51, make_long_immediate, "make-long-immediate", OP2 (U8_U24, I32))
1620 {
1621 scm_t_uint32 dst;
1622 scm_t_bits val;
1623
1624 UNPACK_24 (op, dst);
1625 val = ip[1];
1626 LOCAL_SET (dst, SCM_PACK (val));
1627 NEXT (2);
1628 }
1629
1630 /* make-long-long-immediate dst:24 high-bits:32 low-bits:32
1631 *
1632 * Make an immediate with HIGH-BITS and LOW-BITS.
1633 */
1634 VM_DEFINE_OP (52, make_long_long_immediate, "make-long-long-immediate", OP3 (U8_U24, A32, B32) | OP_DST)
1635 {
1636 scm_t_uint32 dst;
1637 scm_t_bits val;
1638
1639 UNPACK_24 (op, dst);
1640 #if SIZEOF_SCM_T_BITS > 4
1641 val = ip[1];
1642 val <<= 32;
1643 val |= ip[2];
1644 #else
1645 ASSERT (ip[1] == 0);
1646 val = ip[2];
1647 #endif
1648 LOCAL_SET (dst, SCM_PACK (val));
1649 NEXT (3);
1650 }
1651
1652 /* make-non-immediate dst:24 offset:32
1653 *
1654 * Load a pointer to statically allocated memory into DST. The
1655 * object's memory is will be found OFFSET 32-bit words away from the
1656 * current instruction pointer. OFFSET is a signed value. The
1657 * intention here is that the compiler would produce an object file
1658 * containing the words of a non-immediate object, and this
1659 * instruction creates a pointer to that memory, effectively
1660 * resurrecting that object.
1661 *
1662 * Whether the object is mutable or immutable depends on where it was
1663 * allocated by the compiler, and loaded by the loader.
1664 */
1665 VM_DEFINE_OP (53, make_non_immediate, "make-non-immediate", OP2 (U8_U24, N32) | OP_DST)
1666 {
1667 scm_t_uint32 dst;
1668 scm_t_int32 offset;
1669 scm_t_uint32* loc;
1670 scm_t_bits unpacked;
1671
1672 UNPACK_24 (op, dst);
1673 offset = ip[1];
1674 loc = ip + offset;
1675 unpacked = (scm_t_bits) loc;
1676
1677 VM_ASSERT (!(unpacked & 0x7), abort());
1678
1679 LOCAL_SET (dst, SCM_PACK (unpacked));
1680
1681 NEXT (2);
1682 }
1683
1684 /* static-ref dst:24 offset:32
1685 *
1686 * Load a SCM value into DST. The SCM value will be fetched from
1687 * memory, OFFSET 32-bit words away from the current instruction
1688 * pointer. OFFSET is a signed value.
1689 *
1690 * The intention is for this instruction to be used to load constants
1691 * that the compiler is unable to statically allocate, like symbols.
1692 * These values would be initialized when the object file loads.
1693 */
1694 VM_DEFINE_OP (54, static_ref, "static-ref", OP2 (U8_U24, S32))
1695 {
1696 scm_t_uint32 dst;
1697 scm_t_int32 offset;
1698 scm_t_uint32* loc;
1699 scm_t_uintptr loc_bits;
1700
1701 UNPACK_24 (op, dst);
1702 offset = ip[1];
1703 loc = ip + offset;
1704 loc_bits = (scm_t_uintptr) loc;
1705 VM_ASSERT (ALIGNED_P (loc, SCM), abort());
1706
1707 LOCAL_SET (dst, *((SCM *) loc_bits));
1708
1709 NEXT (2);
1710 }
1711
1712 /* static-set! src:24 offset:32
1713 *
1714 * Store a SCM value into memory, OFFSET 32-bit words away from the
1715 * current instruction pointer. OFFSET is a signed value.
1716 */
1717 VM_DEFINE_OP (55, static_set, "static-set!", OP2 (U8_U24, LO32))
1718 {
1719 scm_t_uint32 src;
1720 scm_t_int32 offset;
1721 scm_t_uint32* loc;
1722
1723 UNPACK_24 (op, src);
1724 offset = ip[1];
1725 loc = ip + offset;
1726 VM_ASSERT (ALIGNED_P (loc, SCM), abort());
1727
1728 *((SCM *) loc) = LOCAL_REF (src);
1729
1730 NEXT (2);
1731 }
1732
1733 /* static-patch! _:24 dst-offset:32 src-offset:32
1734 *
1735 * Patch a pointer at DST-OFFSET to point to SRC-OFFSET. Both offsets
1736 * are signed 32-bit values, indicating a memory address as a number
1737 * of 32-bit words away from the current instruction pointer.
1738 */
1739 VM_DEFINE_OP (56, static_patch, "static-patch!", OP3 (U8_X24, LO32, L32))
1740 {
1741 scm_t_int32 dst_offset, src_offset;
1742 void *src;
1743 void** dst_loc;
1744
1745 dst_offset = ip[1];
1746 src_offset = ip[2];
1747
1748 dst_loc = (void **) (ip + dst_offset);
1749 src = ip + src_offset;
1750 VM_ASSERT (ALIGNED_P (dst_loc, void*), abort());
1751
1752 *dst_loc = src;
1753
1754 NEXT (3);
1755 }
1756
1757 \f
1758
1759 /*
1760 * Mutable top-level bindings
1761 */
1762
1763 /* There are three slightly different ways to resolve toplevel
1764 variables.
1765
1766 1. A toplevel reference outside of a function. These need to be
1767 looked up when the expression is evaluated -- no later, and no
1768 before. They are looked up relative to the module that is
1769 current when the expression is evaluated. For example:
1770
1771 (if (foo) a b)
1772
1773 The "resolve" instruction resolves the variable (box), and then
1774 access is via box-ref or box-set!.
1775
1776 2. A toplevel reference inside a function. These are looked up
1777 relative to the module that was current when the function was
1778 defined. Unlike code at the toplevel, which is usually run only
1779 once, these bindings benefit from memoized lookup, in which the
1780 variable resulting from the lookup is cached in the function.
1781
1782 (lambda () (if (foo) a b))
1783
1784 The toplevel-box instruction is equivalent to "resolve", but
1785 caches the resulting variable in statically allocated memory.
1786
1787 3. A reference to an identifier with respect to a particular
1788 module. This can happen for primitive references, and
1789 references residualized by macro expansions. These can always
1790 be cached. Use module-box for these.
1791 */
1792
1793 /* current-module dst:24
1794 *
1795 * Store the current module in DST.
1796 */
1797 VM_DEFINE_OP (57, current_module, "current-module", OP1 (U8_U24) | OP_DST)
1798 {
1799 scm_t_uint32 dst;
1800
1801 UNPACK_24 (op, dst);
1802
1803 SYNC_IP ();
1804 LOCAL_SET (dst, scm_current_module ());
1805
1806 NEXT (1);
1807 }
1808
1809 /* resolve dst:24 bound?:1 _:7 sym:24
1810 *
1811 * Resolve SYM in the current module, and place the resulting variable
1812 * in DST.
1813 */
1814 VM_DEFINE_OP (58, resolve, "resolve", OP2 (U8_U24, B1_X7_U24) | OP_DST)
1815 {
1816 scm_t_uint32 dst;
1817 scm_t_uint32 sym;
1818 SCM var;
1819
1820 UNPACK_24 (op, dst);
1821 UNPACK_24 (ip[1], sym);
1822
1823 SYNC_IP ();
1824 var = scm_lookup (LOCAL_REF (sym));
1825 CACHE_FP ();
1826 if (ip[1] & 0x1)
1827 VM_ASSERT (VARIABLE_BOUNDP (var),
1828 vm_error_unbound (fp[0], LOCAL_REF (sym)));
1829 LOCAL_SET (dst, var);
1830
1831 NEXT (2);
1832 }
1833
1834 /* define! sym:12 val:12
1835 *
1836 * Look up a binding for SYM in the current module, creating it if
1837 * necessary. Set its value to VAL.
1838 */
1839 VM_DEFINE_OP (59, define, "define!", OP1 (U8_U12_U12))
1840 {
1841 scm_t_uint16 sym, val;
1842 UNPACK_12_12 (op, sym, val);
1843 SYNC_IP ();
1844 scm_define (LOCAL_REF (sym), LOCAL_REF (val));
1845 CACHE_FP ();
1846 NEXT (1);
1847 }
1848
1849 /* toplevel-box dst:24 var-offset:32 mod-offset:32 sym-offset:32 bound?:1 _:31
1850 *
1851 * Load a SCM value. The SCM value will be fetched from memory,
1852 * VAR-OFFSET 32-bit words away from the current instruction pointer.
1853 * VAR-OFFSET is a signed value. Up to here, toplevel-box is like
1854 * static-ref.
1855 *
1856 * Then, if the loaded value is a variable, it is placed in DST, and control
1857 * flow continues.
1858 *
1859 * Otherwise, we have to resolve the variable. In that case we load
1860 * the module from MOD-OFFSET, just as we loaded the variable.
1861 * Usually the module gets set when the closure is created. The name
1862 * is an offset to a symbol.
1863 *
1864 * We use the module and the symbol to resolve the variable, placing it in
1865 * DST, and caching the resolved variable so that we will hit the cache next
1866 * time.
1867 */
1868 VM_DEFINE_OP (60, toplevel_box, "toplevel-box", OP5 (U8_U24, S32, S32, N32, B1_X31) | OP_DST)
1869 {
1870 scm_t_uint32 dst;
1871 scm_t_int32 var_offset;
1872 scm_t_uint32* var_loc_u32;
1873 SCM *var_loc;
1874 SCM var;
1875
1876 UNPACK_24 (op, dst);
1877 var_offset = ip[1];
1878 var_loc_u32 = ip + var_offset;
1879 VM_ASSERT (ALIGNED_P (var_loc_u32, SCM), abort());
1880 var_loc = (SCM *) var_loc_u32;
1881 var = *var_loc;
1882
1883 if (SCM_UNLIKELY (!SCM_VARIABLEP (var)))
1884 {
1885 SCM mod, sym;
1886 scm_t_int32 mod_offset = ip[2]; /* signed */
1887 scm_t_int32 sym_offset = ip[3]; /* signed */
1888 scm_t_uint32 *mod_loc = ip + mod_offset;
1889 scm_t_uint32 *sym_loc = ip + sym_offset;
1890
1891 SYNC_IP ();
1892
1893 VM_ASSERT (ALIGNED_P (mod_loc, SCM), abort());
1894 VM_ASSERT (ALIGNED_P (sym_loc, SCM), abort());
1895
1896 mod = *((SCM *) mod_loc);
1897 sym = *((SCM *) sym_loc);
1898
1899 /* If the toplevel scope was captured before modules were
1900 booted, use the root module. */
1901 if (scm_is_false (mod))
1902 mod = scm_the_root_module ();
1903
1904 var = scm_module_lookup (mod, sym);
1905 CACHE_FP ();
1906 if (ip[4] & 0x1)
1907 VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (fp[0], sym));
1908
1909 *var_loc = var;
1910 }
1911
1912 LOCAL_SET (dst, var);
1913 NEXT (5);
1914 }
1915
1916 /* module-box dst:24 var-offset:32 mod-offset:32 sym-offset:32 bound?:1 _:31
1917 *
1918 * Like toplevel-box, except MOD-OFFSET points at the name of a module
1919 * instead of the module itself.
1920 */
1921 VM_DEFINE_OP (61, module_box, "module-box", OP5 (U8_U24, S32, N32, N32, B1_X31) | OP_DST)
1922 {
1923 scm_t_uint32 dst;
1924 scm_t_int32 var_offset;
1925 scm_t_uint32* var_loc_u32;
1926 SCM *var_loc;
1927 SCM var;
1928
1929 UNPACK_24 (op, dst);
1930 var_offset = ip[1];
1931 var_loc_u32 = ip + var_offset;
1932 VM_ASSERT (ALIGNED_P (var_loc_u32, SCM), abort());
1933 var_loc = (SCM *) var_loc_u32;
1934 var = *var_loc;
1935
1936 if (SCM_UNLIKELY (!SCM_VARIABLEP (var)))
1937 {
1938 SCM modname, sym;
1939 scm_t_int32 modname_offset = ip[2]; /* signed */
1940 scm_t_int32 sym_offset = ip[3]; /* signed */
1941 scm_t_uint32 *modname_words = ip + modname_offset;
1942 scm_t_uint32 *sym_loc = ip + sym_offset;
1943
1944 SYNC_IP ();
1945
1946 VM_ASSERT (!(((scm_t_uintptr) modname_words) & 0x7), abort());
1947 VM_ASSERT (ALIGNED_P (sym_loc, SCM), abort());
1948
1949 modname = SCM_PACK ((scm_t_bits) modname_words);
1950 sym = *((SCM *) sym_loc);
1951
1952 if (!scm_module_system_booted_p)
1953 {
1954 #ifdef VM_ENABLE_PARANOID_ASSERTIONS
1955 ASSERT
1956 (scm_is_true
1957 scm_equal_p (modname,
1958 scm_list_2 (SCM_BOOL_T,
1959 scm_from_utf8_symbol ("guile"))));
1960 #endif
1961 var = scm_lookup (sym);
1962 }
1963 else if (scm_is_true (SCM_CAR (modname)))
1964 var = scm_public_lookup (SCM_CDR (modname), sym);
1965 else
1966 var = scm_private_lookup (SCM_CDR (modname), sym);
1967
1968 CACHE_FP ();
1969
1970 if (ip[4] & 0x1)
1971 VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (fp[0], sym));
1972
1973 *var_loc = var;
1974 }
1975
1976 LOCAL_SET (dst, var);
1977 NEXT (5);
1978 }
1979
1980 \f
1981
1982 /*
1983 * The dynamic environment
1984 */
1985
1986 /* prompt tag:24 escape-only?:1 _:7 proc-slot:24 _:8 handler-offset:24
1987 *
1988 * Push a new prompt on the dynamic stack, with a tag from TAG and a
1989 * handler at HANDLER-OFFSET words from the current IP. The handler
1990 * will expect a multiple-value return as if from a call with the
1991 * procedure at PROC-SLOT.
1992 */
1993 VM_DEFINE_OP (62, prompt, "prompt", OP3 (U8_U24, B1_X7_U24, X8_L24))
1994 {
1995 scm_t_uint32 tag, proc_slot;
1996 scm_t_int32 offset;
1997 scm_t_uint8 escape_only_p;
1998 scm_t_dynstack_prompt_flags flags;
1999
2000 UNPACK_24 (op, tag);
2001 escape_only_p = ip[1] & 0x1;
2002 UNPACK_24 (ip[1], proc_slot);
2003 offset = ip[2];
2004 offset >>= 8; /* Sign extension */
2005
2006 /* Push the prompt onto the dynamic stack. */
2007 flags = escape_only_p ? SCM_F_DYNSTACK_PROMPT_ESCAPE_ONLY : 0;
2008 scm_dynstack_push_prompt (&thread->dynstack, flags,
2009 LOCAL_REF (tag),
2010 fp - vp->stack_base,
2011 LOCAL_ADDRESS (proc_slot) - vp->stack_base,
2012 ip + offset,
2013 registers);
2014 NEXT (3);
2015 }
2016
2017 /* wind winder:12 unwinder:12
2018 *
2019 * Push wind and unwind procedures onto the dynamic stack. Note that
2020 * neither are actually called; the compiler should emit calls to wind
2021 * and unwind for the normal dynamic-wind control flow. Also note that
2022 * the compiler should have inserted checks that they wind and unwind
2023 * procs are thunks, if it could not prove that to be the case.
2024 */
2025 VM_DEFINE_OP (63, wind, "wind", OP1 (U8_U12_U12))
2026 {
2027 scm_t_uint16 winder, unwinder;
2028 UNPACK_12_12 (op, winder, unwinder);
2029 scm_dynstack_push_dynwind (&thread->dynstack,
2030 LOCAL_REF (winder), LOCAL_REF (unwinder));
2031 NEXT (1);
2032 }
2033
2034 /* unwind _:24
2035 *
2036 * A normal exit from the dynamic extent of an expression. Pop the top
2037 * entry off of the dynamic stack.
2038 */
2039 VM_DEFINE_OP (64, unwind, "unwind", OP1 (U8_X24))
2040 {
2041 scm_dynstack_pop (&thread->dynstack);
2042 NEXT (1);
2043 }
2044
2045 /* push-fluid fluid:12 value:12
2046 *
2047 * Dynamically bind VALUE to FLUID.
2048 */
2049 VM_DEFINE_OP (65, push_fluid, "push-fluid", OP1 (U8_U12_U12))
2050 {
2051 scm_t_uint32 fluid, value;
2052
2053 UNPACK_12_12 (op, fluid, value);
2054
2055 scm_dynstack_push_fluid (&thread->dynstack,
2056 LOCAL_REF (fluid), LOCAL_REF (value),
2057 thread->dynamic_state);
2058 NEXT (1);
2059 }
2060
2061 /* pop-fluid _:24
2062 *
2063 * Leave the dynamic extent of a with-fluid* expression, restoring the
2064 * fluid to its previous value.
2065 */
2066 VM_DEFINE_OP (66, pop_fluid, "pop-fluid", OP1 (U8_X24))
2067 {
2068 /* This function must not allocate. */
2069 scm_dynstack_unwind_fluid (&thread->dynstack,
2070 thread->dynamic_state);
2071 NEXT (1);
2072 }
2073
2074 /* fluid-ref dst:12 src:12
2075 *
2076 * Reference the fluid in SRC, and place the value in DST.
2077 */
2078 VM_DEFINE_OP (67, fluid_ref, "fluid-ref", OP1 (U8_U12_U12) | OP_DST)
2079 {
2080 scm_t_uint16 dst, src;
2081 size_t num;
2082 SCM fluid, fluids;
2083
2084 UNPACK_12_12 (op, dst, src);
2085 fluid = LOCAL_REF (src);
2086 fluids = SCM_I_DYNAMIC_STATE_FLUIDS (thread->dynamic_state);
2087 if (SCM_UNLIKELY (!SCM_FLUID_P (fluid))
2088 || ((num = SCM_I_FLUID_NUM (fluid)) >= SCM_SIMPLE_VECTOR_LENGTH (fluids)))
2089 {
2090 /* Punt dynstate expansion and error handling to the C proc. */
2091 SYNC_IP ();
2092 LOCAL_SET (dst, scm_fluid_ref (fluid));
2093 }
2094 else
2095 {
2096 SCM val = SCM_SIMPLE_VECTOR_REF (fluids, num);
2097 if (scm_is_eq (val, SCM_UNDEFINED))
2098 val = SCM_I_FLUID_DEFAULT (fluid);
2099 VM_ASSERT (!scm_is_eq (val, SCM_UNDEFINED),
2100 vm_error_unbound_fluid (SCM_FRAME_PROGRAM (fp), fluid));
2101 LOCAL_SET (dst, val);
2102 }
2103
2104 NEXT (1);
2105 }
2106
2107 /* fluid-set fluid:12 val:12
2108 *
2109 * Set the value of the fluid in DST to the value in SRC.
2110 */
2111 VM_DEFINE_OP (68, fluid_set, "fluid-set", OP1 (U8_U12_U12))
2112 {
2113 scm_t_uint16 a, b;
2114 size_t num;
2115 SCM fluid, fluids;
2116
2117 UNPACK_12_12 (op, a, b);
2118 fluid = LOCAL_REF (a);
2119 fluids = SCM_I_DYNAMIC_STATE_FLUIDS (thread->dynamic_state);
2120 if (SCM_UNLIKELY (!SCM_FLUID_P (fluid))
2121 || ((num = SCM_I_FLUID_NUM (fluid)) >= SCM_SIMPLE_VECTOR_LENGTH (fluids)))
2122 {
2123 /* Punt dynstate expansion and error handling to the C proc. */
2124 SYNC_IP ();
2125 scm_fluid_set_x (fluid, LOCAL_REF (b));
2126 }
2127 else
2128 SCM_SIMPLE_VECTOR_SET (fluids, num, LOCAL_REF (b));
2129
2130 NEXT (1);
2131 }
2132
2133
2134 \f
2135
2136 /*
2137 * Strings, symbols, and keywords
2138 */
2139
2140 /* string-length dst:12 src:12
2141 *
2142 * Store the length of the string in SRC in DST.
2143 */
2144 VM_DEFINE_OP (69, string_length, "string-length", OP1 (U8_U12_U12) | OP_DST)
2145 {
2146 ARGS1 (str);
2147 if (SCM_LIKELY (scm_is_string (str)))
2148 RETURN (SCM_I_MAKINUM (scm_i_string_length (str)));
2149 else
2150 {
2151 SYNC_IP ();
2152 RETURN (scm_string_length (str));
2153 }
2154 }
2155
2156 /* string-ref dst:8 src:8 idx:8
2157 *
2158 * Fetch the character at position IDX in the string in SRC, and store
2159 * it in DST.
2160 */
2161 VM_DEFINE_OP (70, string_ref, "string-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2162 {
2163 scm_t_signed_bits i = 0;
2164 ARGS2 (str, idx);
2165 if (SCM_LIKELY (scm_is_string (str)
2166 && SCM_I_INUMP (idx)
2167 && ((i = SCM_I_INUM (idx)) >= 0)
2168 && i < scm_i_string_length (str)))
2169 RETURN (SCM_MAKE_CHAR (scm_i_string_ref (str, i)));
2170 else
2171 {
2172 SYNC_IP ();
2173 RETURN (scm_string_ref (str, idx));
2174 }
2175 }
2176
2177 /* No string-set! instruction, as there is no good fast path there. */
2178
2179 /* string->number dst:12 src:12
2180 *
2181 * Parse a string in SRC to a number, and store in DST.
2182 */
2183 VM_DEFINE_OP (71, string_to_number, "string->number", OP1 (U8_U12_U12) | OP_DST)
2184 {
2185 scm_t_uint16 dst, src;
2186
2187 UNPACK_12_12 (op, dst, src);
2188 SYNC_IP ();
2189 LOCAL_SET (dst,
2190 scm_string_to_number (LOCAL_REF (src),
2191 SCM_UNDEFINED /* radix = 10 */));
2192 NEXT (1);
2193 }
2194
2195 /* string->symbol dst:12 src:12
2196 *
2197 * Parse a string in SRC to a symbol, and store in DST.
2198 */
2199 VM_DEFINE_OP (72, string_to_symbol, "string->symbol", OP1 (U8_U12_U12) | OP_DST)
2200 {
2201 scm_t_uint16 dst, src;
2202
2203 UNPACK_12_12 (op, dst, src);
2204 SYNC_IP ();
2205 LOCAL_SET (dst, scm_string_to_symbol (LOCAL_REF (src)));
2206 NEXT (1);
2207 }
2208
2209 /* symbol->keyword dst:12 src:12
2210 *
2211 * Make a keyword from the symbol in SRC, and store it in DST.
2212 */
2213 VM_DEFINE_OP (73, symbol_to_keyword, "symbol->keyword", OP1 (U8_U12_U12) | OP_DST)
2214 {
2215 scm_t_uint16 dst, src;
2216 UNPACK_12_12 (op, dst, src);
2217 SYNC_IP ();
2218 LOCAL_SET (dst, scm_symbol_to_keyword (LOCAL_REF (src)));
2219 NEXT (1);
2220 }
2221
2222 \f
2223
2224 /*
2225 * Pairs
2226 */
2227
2228 /* cons dst:8 car:8 cdr:8
2229 *
2230 * Cons CAR and CDR, and store the result in DST.
2231 */
2232 VM_DEFINE_OP (74, cons, "cons", OP1 (U8_U8_U8_U8) | OP_DST)
2233 {
2234 ARGS2 (x, y);
2235 RETURN (scm_inline_cons (thread, x, y));
2236 }
2237
2238 /* car dst:12 src:12
2239 *
2240 * Place the car of SRC in DST.
2241 */
2242 VM_DEFINE_OP (75, car, "car", OP1 (U8_U12_U12) | OP_DST)
2243 {
2244 ARGS1 (x);
2245 VM_VALIDATE_PAIR (x, "car");
2246 RETURN (SCM_CAR (x));
2247 }
2248
2249 /* cdr dst:12 src:12
2250 *
2251 * Place the cdr of SRC in DST.
2252 */
2253 VM_DEFINE_OP (76, cdr, "cdr", OP1 (U8_U12_U12) | OP_DST)
2254 {
2255 ARGS1 (x);
2256 VM_VALIDATE_PAIR (x, "cdr");
2257 RETURN (SCM_CDR (x));
2258 }
2259
2260 /* set-car! pair:12 car:12
2261 *
2262 * Set the car of DST to SRC.
2263 */
2264 VM_DEFINE_OP (77, set_car, "set-car!", OP1 (U8_U12_U12))
2265 {
2266 scm_t_uint16 a, b;
2267 SCM x, y;
2268 UNPACK_12_12 (op, a, b);
2269 x = LOCAL_REF (a);
2270 y = LOCAL_REF (b);
2271 VM_VALIDATE_PAIR (x, "set-car!");
2272 SCM_SETCAR (x, y);
2273 NEXT (1);
2274 }
2275
2276 /* set-cdr! pair:12 cdr:12
2277 *
2278 * Set the cdr of DST to SRC.
2279 */
2280 VM_DEFINE_OP (78, set_cdr, "set-cdr!", OP1 (U8_U12_U12))
2281 {
2282 scm_t_uint16 a, b;
2283 SCM x, y;
2284 UNPACK_12_12 (op, a, b);
2285 x = LOCAL_REF (a);
2286 y = LOCAL_REF (b);
2287 VM_VALIDATE_PAIR (x, "set-car!");
2288 SCM_SETCDR (x, y);
2289 NEXT (1);
2290 }
2291
2292
2293 \f
2294
2295 /*
2296 * Numeric operations
2297 */
2298
2299 /* add dst:8 a:8 b:8
2300 *
2301 * Add A to B, and place the result in DST.
2302 */
2303 VM_DEFINE_OP (79, add, "add", OP1 (U8_U8_U8_U8) | OP_DST)
2304 {
2305 BINARY_INTEGER_OP (+, scm_sum);
2306 }
2307
2308 /* add1 dst:12 src:12
2309 *
2310 * Add 1 to the value in SRC, and place the result in DST.
2311 */
2312 VM_DEFINE_OP (80, add1, "add1", OP1 (U8_U12_U12) | OP_DST)
2313 {
2314 ARGS1 (x);
2315
2316 /* Check for overflow. We must avoid overflow in the signed
2317 addition below, even if X is not an inum. */
2318 if (SCM_LIKELY ((scm_t_signed_bits) SCM_UNPACK (x) <= INUM_MAX - INUM_STEP))
2319 {
2320 SCM result;
2321
2322 /* Add 1 to the integer without untagging. */
2323 result = SCM_PACK ((scm_t_signed_bits) SCM_UNPACK (x) + INUM_STEP);
2324
2325 if (SCM_LIKELY (SCM_I_INUMP (result)))
2326 RETURN (result);
2327 }
2328
2329 RETURN_EXP (scm_sum (x, SCM_I_MAKINUM (1)));
2330 }
2331
2332 /* sub dst:8 a:8 b:8
2333 *
2334 * Subtract B from A, and place the result in DST.
2335 */
2336 VM_DEFINE_OP (81, sub, "sub", OP1 (U8_U8_U8_U8) | OP_DST)
2337 {
2338 BINARY_INTEGER_OP (-, scm_difference);
2339 }
2340
2341 /* sub1 dst:12 src:12
2342 *
2343 * Subtract 1 from SRC, and place the result in DST.
2344 */
2345 VM_DEFINE_OP (82, sub1, "sub1", OP1 (U8_U12_U12) | OP_DST)
2346 {
2347 ARGS1 (x);
2348
2349 /* Check for overflow. We must avoid overflow in the signed
2350 subtraction below, even if X is not an inum. */
2351 if (SCM_LIKELY ((scm_t_signed_bits) SCM_UNPACK (x) >= INUM_MIN + INUM_STEP))
2352 {
2353 SCM result;
2354
2355 /* Substract 1 from the integer without untagging. */
2356 result = SCM_PACK ((scm_t_signed_bits) SCM_UNPACK (x) - INUM_STEP);
2357
2358 if (SCM_LIKELY (SCM_I_INUMP (result)))
2359 RETURN (result);
2360 }
2361
2362 RETURN_EXP (scm_difference (x, SCM_I_MAKINUM (1)));
2363 }
2364
2365 /* mul dst:8 a:8 b:8
2366 *
2367 * Multiply A and B, and place the result in DST.
2368 */
2369 VM_DEFINE_OP (83, mul, "mul", OP1 (U8_U8_U8_U8) | OP_DST)
2370 {
2371 ARGS2 (x, y);
2372 RETURN_EXP (scm_product (x, y));
2373 }
2374
2375 /* div dst:8 a:8 b:8
2376 *
2377 * Divide A by B, and place the result in DST.
2378 */
2379 VM_DEFINE_OP (84, div, "div", OP1 (U8_U8_U8_U8) | OP_DST)
2380 {
2381 ARGS2 (x, y);
2382 RETURN_EXP (scm_divide (x, y));
2383 }
2384
2385 /* quo dst:8 a:8 b:8
2386 *
2387 * Divide A by B, and place the quotient in DST.
2388 */
2389 VM_DEFINE_OP (85, quo, "quo", OP1 (U8_U8_U8_U8) | OP_DST)
2390 {
2391 ARGS2 (x, y);
2392 RETURN_EXP (scm_quotient (x, y));
2393 }
2394
2395 /* rem dst:8 a:8 b:8
2396 *
2397 * Divide A by B, and place the remainder in DST.
2398 */
2399 VM_DEFINE_OP (86, rem, "rem", OP1 (U8_U8_U8_U8) | OP_DST)
2400 {
2401 ARGS2 (x, y);
2402 RETURN_EXP (scm_remainder (x, y));
2403 }
2404
2405 /* mod dst:8 a:8 b:8
2406 *
2407 * Place the modulo of A by B in DST.
2408 */
2409 VM_DEFINE_OP (87, mod, "mod", OP1 (U8_U8_U8_U8) | OP_DST)
2410 {
2411 ARGS2 (x, y);
2412 RETURN_EXP (scm_modulo (x, y));
2413 }
2414
2415 /* ash dst:8 a:8 b:8
2416 *
2417 * Shift A arithmetically by B bits, and place the result in DST.
2418 */
2419 VM_DEFINE_OP (88, ash, "ash", OP1 (U8_U8_U8_U8) | OP_DST)
2420 {
2421 ARGS2 (x, y);
2422 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
2423 {
2424 if (SCM_I_INUM (y) < 0)
2425 /* Right shift, will be a fixnum. */
2426 RETURN (SCM_I_MAKINUM
2427 (SCM_SRS (SCM_I_INUM (x),
2428 (-SCM_I_INUM (y) <= SCM_I_FIXNUM_BIT-1)
2429 ? -SCM_I_INUM (y) : SCM_I_FIXNUM_BIT-1)));
2430 else
2431 /* Left shift. See comments in scm_ash. */
2432 {
2433 scm_t_signed_bits nn, bits_to_shift;
2434
2435 nn = SCM_I_INUM (x);
2436 bits_to_shift = SCM_I_INUM (y);
2437
2438 if (bits_to_shift < SCM_I_FIXNUM_BIT-1
2439 && ((scm_t_bits)
2440 (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
2441 <= 1))
2442 RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
2443 /* fall through */
2444 }
2445 /* fall through */
2446 }
2447 RETURN_EXP (scm_ash (x, y));
2448 }
2449
2450 /* logand dst:8 a:8 b:8
2451 *
2452 * Place the bitwise AND of A and B into DST.
2453 */
2454 VM_DEFINE_OP (89, logand, "logand", OP1 (U8_U8_U8_U8) | OP_DST)
2455 {
2456 ARGS2 (x, y);
2457 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
2458 /* Compute bitwise AND without untagging */
2459 RETURN (SCM_PACK (SCM_UNPACK (x) & SCM_UNPACK (y)));
2460 RETURN_EXP (scm_logand (x, y));
2461 }
2462
2463 /* logior dst:8 a:8 b:8
2464 *
2465 * Place the bitwise inclusive OR of A with B in DST.
2466 */
2467 VM_DEFINE_OP (90, logior, "logior", OP1 (U8_U8_U8_U8) | OP_DST)
2468 {
2469 ARGS2 (x, y);
2470 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
2471 /* Compute bitwise OR without untagging */
2472 RETURN (SCM_PACK (SCM_UNPACK (x) | SCM_UNPACK (y)));
2473 RETURN_EXP (scm_logior (x, y));
2474 }
2475
2476 /* logxor dst:8 a:8 b:8
2477 *
2478 * Place the bitwise exclusive OR of A with B in DST.
2479 */
2480 VM_DEFINE_OP (91, logxor, "logxor", OP1 (U8_U8_U8_U8) | OP_DST)
2481 {
2482 ARGS2 (x, y);
2483 if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
2484 RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
2485 RETURN_EXP (scm_logxor (x, y));
2486 }
2487
2488 /* make-vector/immediate dst:8 length:8 init:8
2489 *
2490 * Make a short vector of known size and write it to DST. The vector
2491 * will have space for LENGTH slots, an immediate value. They will be
2492 * filled with the value in slot INIT.
2493 */
2494 VM_DEFINE_OP (92, make_vector_immediate, "make-vector/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
2495 {
2496 scm_t_uint8 dst, init;
2497 scm_t_int32 length, n;
2498 SCM val, vector;
2499
2500 UNPACK_8_8_8 (op, dst, length, init);
2501
2502 val = LOCAL_REF (init);
2503 vector = scm_inline_words (thread, scm_tc7_vector | (length << 8),
2504 length + 1);
2505 for (n = 0; n < length; n++)
2506 SCM_SIMPLE_VECTOR_SET (vector, n, val);
2507 LOCAL_SET (dst, vector);
2508 NEXT (1);
2509 }
2510
2511 /* vector-length dst:12 src:12
2512 *
2513 * Store the length of the vector in SRC in DST.
2514 */
2515 VM_DEFINE_OP (93, vector_length, "vector-length", OP1 (U8_U12_U12) | OP_DST)
2516 {
2517 ARGS1 (vect);
2518 if (SCM_LIKELY (SCM_I_IS_VECTOR (vect)))
2519 RETURN (SCM_I_MAKINUM (SCM_I_VECTOR_LENGTH (vect)));
2520 else
2521 {
2522 SYNC_IP ();
2523 RETURN (scm_vector_length (vect));
2524 }
2525 }
2526
2527 /* vector-ref dst:8 src:8 idx:8
2528 *
2529 * Fetch the item at position IDX in the vector in SRC, and store it
2530 * in DST.
2531 */
2532 VM_DEFINE_OP (94, vector_ref, "vector-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2533 {
2534 scm_t_signed_bits i = 0;
2535 ARGS2 (vect, idx);
2536 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
2537 && SCM_I_INUMP (idx)
2538 && ((i = SCM_I_INUM (idx)) >= 0)
2539 && i < SCM_I_VECTOR_LENGTH (vect)))
2540 RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
2541 else
2542 {
2543 SYNC_IP ();
2544 RETURN (scm_vector_ref (vect, idx));
2545 }
2546 }
2547
2548 /* vector-ref/immediate dst:8 src:8 idx:8
2549 *
2550 * Fill DST with the item IDX elements into the vector at SRC. Useful
2551 * for building data types using vectors.
2552 */
2553 VM_DEFINE_OP (95, vector_ref_immediate, "vector-ref/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
2554 {
2555 scm_t_uint8 dst, src, idx;
2556 SCM v;
2557
2558 UNPACK_8_8_8 (op, dst, src, idx);
2559 v = LOCAL_REF (src);
2560 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (v)
2561 && idx < SCM_I_VECTOR_LENGTH (v)))
2562 LOCAL_SET (dst, SCM_I_VECTOR_ELTS (LOCAL_REF (src))[idx]);
2563 else
2564 LOCAL_SET (dst, scm_c_vector_ref (v, idx));
2565 NEXT (1);
2566 }
2567
2568 /* vector-set! dst:8 idx:8 src:8
2569 *
2570 * Store SRC into the vector DST at index IDX.
2571 */
2572 VM_DEFINE_OP (96, vector_set, "vector-set!", OP1 (U8_U8_U8_U8))
2573 {
2574 scm_t_uint8 dst, idx_var, src;
2575 SCM vect, idx, val;
2576 scm_t_signed_bits i = 0;
2577
2578 UNPACK_8_8_8 (op, dst, idx_var, src);
2579 vect = LOCAL_REF (dst);
2580 idx = LOCAL_REF (idx_var);
2581 val = LOCAL_REF (src);
2582
2583 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
2584 && SCM_I_INUMP (idx)
2585 && ((i = SCM_I_INUM (idx)) >= 0)
2586 && i < SCM_I_VECTOR_LENGTH (vect)))
2587 SCM_I_VECTOR_WELTS (vect)[i] = val;
2588 else
2589 {
2590 SYNC_IP ();
2591 scm_vector_set_x (vect, idx, val);
2592 }
2593 NEXT (1);
2594 }
2595
2596 /* vector-set!/immediate dst:8 idx:8 src:8
2597 *
2598 * Store SRC into the vector DST at index IDX. Here IDX is an
2599 * immediate value.
2600 */
2601 VM_DEFINE_OP (97, vector_set_immediate, "vector-set!/immediate", OP1 (U8_U8_U8_U8))
2602 {
2603 scm_t_uint8 dst, idx, src;
2604 SCM vect, val;
2605
2606 UNPACK_8_8_8 (op, dst, idx, src);
2607 vect = LOCAL_REF (dst);
2608 val = LOCAL_REF (src);
2609
2610 if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
2611 && idx < SCM_I_VECTOR_LENGTH (vect)))
2612 SCM_I_VECTOR_WELTS (vect)[idx] = val;
2613 else
2614 {
2615 SYNC_IP ();
2616 scm_vector_set_x (vect, scm_from_uint8 (idx), val);
2617 }
2618 NEXT (1);
2619 }
2620
2621
2622 \f
2623
2624 /*
2625 * Structs and GOOPS
2626 */
2627
2628 /* struct-vtable dst:12 src:12
2629 *
2630 * Store the vtable of SRC into DST.
2631 */
2632 VM_DEFINE_OP (98, struct_vtable, "struct-vtable", OP1 (U8_U12_U12) | OP_DST)
2633 {
2634 ARGS1 (obj);
2635 VM_VALIDATE_STRUCT (obj, "struct_vtable");
2636 RETURN (SCM_STRUCT_VTABLE (obj));
2637 }
2638
2639 /* allocate-struct/immediate dst:8 vtable:8 nfields:8
2640 *
2641 * Allocate a new struct with VTABLE, and place it in DST. The struct
2642 * will be constructed with space for NFIELDS fields, which should
2643 * correspond to the field count of the VTABLE.
2644 */
2645 VM_DEFINE_OP (99, allocate_struct_immediate, "allocate-struct/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
2646 {
2647 scm_t_uint8 dst, vtable, nfields;
2648 SCM ret;
2649
2650 UNPACK_8_8_8 (op, dst, vtable, nfields);
2651
2652 SYNC_IP ();
2653 ret = scm_allocate_struct (LOCAL_REF (vtable), SCM_I_MAKINUM (nfields));
2654 LOCAL_SET (dst, ret);
2655
2656 NEXT (1);
2657 }
2658
2659 /* struct-ref/immediate dst:8 src:8 idx:8
2660 *
2661 * Fetch the item at slot IDX in the struct in SRC, and store it
2662 * in DST. IDX is an immediate unsigned 8-bit value.
2663 */
2664 VM_DEFINE_OP (100, struct_ref_immediate, "struct-ref/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
2665 {
2666 scm_t_uint8 dst, src, idx;
2667 SCM obj;
2668
2669 UNPACK_8_8_8 (op, dst, src, idx);
2670
2671 obj = LOCAL_REF (src);
2672
2673 if (SCM_LIKELY (SCM_STRUCTP (obj)
2674 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
2675 SCM_VTABLE_FLAG_SIMPLE)
2676 && idx < SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
2677 scm_vtable_index_size)))
2678 RETURN (SCM_STRUCT_SLOT_REF (obj, idx));
2679
2680 SYNC_IP ();
2681 RETURN (scm_struct_ref (obj, SCM_I_MAKINUM (idx)));
2682 }
2683
2684 /* struct-set!/immediate dst:8 idx:8 src:8
2685 *
2686 * Store SRC into the struct DST at slot IDX. IDX is an immediate
2687 * unsigned 8-bit value.
2688 */
2689 VM_DEFINE_OP (101, struct_set_immediate, "struct-set!/immediate", OP1 (U8_U8_U8_U8))
2690 {
2691 scm_t_uint8 dst, idx, src;
2692 SCM obj, val;
2693
2694 UNPACK_8_8_8 (op, dst, idx, src);
2695
2696 obj = LOCAL_REF (dst);
2697 val = LOCAL_REF (src);
2698
2699 if (SCM_LIKELY (SCM_STRUCTP (obj)
2700 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
2701 SCM_VTABLE_FLAG_SIMPLE)
2702 && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
2703 SCM_VTABLE_FLAG_SIMPLE_RW)
2704 && idx < SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
2705 scm_vtable_index_size)))
2706 {
2707 SCM_STRUCT_SLOT_SET (obj, idx, val);
2708 NEXT (1);
2709 }
2710
2711 SYNC_IP ();
2712 scm_struct_set_x (obj, SCM_I_MAKINUM (idx), val);
2713 NEXT (1);
2714 }
2715
2716 /* class-of dst:12 type:12
2717 *
2718 * Store the vtable of SRC into DST.
2719 */
2720 VM_DEFINE_OP (102, class_of, "class-of", OP1 (U8_U12_U12) | OP_DST)
2721 {
2722 ARGS1 (obj);
2723 if (SCM_INSTANCEP (obj))
2724 RETURN (SCM_CLASS_OF (obj));
2725 SYNC_IP ();
2726 RETURN (scm_class_of (obj));
2727 }
2728
2729 VM_DEFINE_OP (103, unused_103, NULL, NOP)
2730 VM_DEFINE_OP (104, unused_104, NULL, NOP)
2731 goto op_unused_255;
2732
2733 \f
2734
2735 /*
2736 * Arrays, packed uniform arrays, and bytevectors.
2737 */
2738
2739 /* load-typed-array dst:8 type:8 shape:8 offset:32 len:32
2740 *
2741 * Load the contiguous typed array located at OFFSET 32-bit words away
2742 * from the instruction pointer, and store into DST. LEN is a byte
2743 * length. OFFSET is signed.
2744 */
2745 VM_DEFINE_OP (105, load_typed_array, "load-typed-array", OP3 (U8_U8_U8_U8, N32, U32) | OP_DST)
2746 {
2747 scm_t_uint8 dst, type, shape;
2748 scm_t_int32 offset;
2749 scm_t_uint32 len;
2750
2751 UNPACK_8_8_8 (op, dst, type, shape);
2752 offset = ip[1];
2753 len = ip[2];
2754 SYNC_IP ();
2755 LOCAL_SET (dst, scm_from_contiguous_typed_array (LOCAL_REF (type),
2756 LOCAL_REF (shape),
2757 ip + offset, len));
2758 NEXT (3);
2759 }
2760
2761 /* make-array dst:12 type:12 _:8 fill:12 bounds:12
2762 *
2763 * Make a new array with TYPE, FILL, and BOUNDS, storing it in DST.
2764 */
2765 VM_DEFINE_OP (106, make_array, "make-array", OP2 (U8_U12_U12, X8_U12_U12) | OP_DST)
2766 {
2767 scm_t_uint16 dst, type, fill, bounds;
2768 UNPACK_12_12 (op, dst, type);
2769 UNPACK_12_12 (ip[1], fill, bounds);
2770 SYNC_IP ();
2771 LOCAL_SET (dst, scm_make_typed_array (LOCAL_REF (type), LOCAL_REF (fill),
2772 LOCAL_REF (bounds)));
2773 NEXT (2);
2774 }
2775
2776 /* bv-u8-ref dst:8 src:8 idx:8
2777 * bv-s8-ref dst:8 src:8 idx:8
2778 * bv-u16-ref dst:8 src:8 idx:8
2779 * bv-s16-ref dst:8 src:8 idx:8
2780 * bv-u32-ref dst:8 src:8 idx:8
2781 * bv-s32-ref dst:8 src:8 idx:8
2782 * bv-u64-ref dst:8 src:8 idx:8
2783 * bv-s64-ref dst:8 src:8 idx:8
2784 * bv-f32-ref dst:8 src:8 idx:8
2785 * bv-f64-ref dst:8 src:8 idx:8
2786 *
2787 * Fetch the item at byte offset IDX in the bytevector SRC, and store
2788 * it in DST. All accesses use native endianness.
2789 */
2790 #define BV_FIXABLE_INT_REF(stem, fn_stem, type, size) \
2791 do { \
2792 scm_t_signed_bits i; \
2793 const scm_t_ ## type *int_ptr; \
2794 ARGS2 (bv, idx); \
2795 \
2796 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
2797 i = SCM_I_INUM (idx); \
2798 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
2799 \
2800 if (SCM_LIKELY (SCM_I_INUMP (idx) \
2801 && (i >= 0) \
2802 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
2803 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
2804 RETURN (SCM_I_MAKINUM (*int_ptr)); \
2805 else \
2806 { \
2807 SYNC_IP (); \
2808 RETURN (scm_bytevector_ ## fn_stem ## _ref (bv, idx)); \
2809 } \
2810 } while (0)
2811
2812 #define BV_INT_REF(stem, type, size) \
2813 do { \
2814 scm_t_signed_bits i; \
2815 const scm_t_ ## type *int_ptr; \
2816 ARGS2 (bv, idx); \
2817 \
2818 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
2819 i = SCM_I_INUM (idx); \
2820 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
2821 \
2822 if (SCM_LIKELY (SCM_I_INUMP (idx) \
2823 && (i >= 0) \
2824 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
2825 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
2826 { \
2827 scm_t_ ## type x = *int_ptr; \
2828 if (SCM_FIXABLE (x)) \
2829 RETURN (SCM_I_MAKINUM (x)); \
2830 else \
2831 { \
2832 SYNC_IP (); \
2833 RETURN (scm_from_ ## type (x)); \
2834 } \
2835 } \
2836 else \
2837 { \
2838 SYNC_IP (); \
2839 RETURN (scm_bytevector_ ## stem ## _native_ref (bv, idx)); \
2840 } \
2841 } while (0)
2842
2843 #define BV_FLOAT_REF(stem, fn_stem, type, size) \
2844 do { \
2845 scm_t_signed_bits i; \
2846 const type *float_ptr; \
2847 ARGS2 (bv, idx); \
2848 \
2849 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
2850 i = SCM_I_INUM (idx); \
2851 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
2852 \
2853 SYNC_IP (); \
2854 if (SCM_LIKELY (SCM_I_INUMP (idx) \
2855 && (i >= 0) \
2856 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
2857 && (ALIGNED_P (float_ptr, type)))) \
2858 RETURN (scm_from_double (*float_ptr)); \
2859 else \
2860 RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
2861 } while (0)
2862
2863 VM_DEFINE_OP (107, bv_u8_ref, "bv-u8-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2864 BV_FIXABLE_INT_REF (u8, u8, uint8, 1);
2865
2866 VM_DEFINE_OP (108, bv_s8_ref, "bv-s8-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2867 BV_FIXABLE_INT_REF (s8, s8, int8, 1);
2868
2869 VM_DEFINE_OP (109, bv_u16_ref, "bv-u16-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2870 BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2);
2871
2872 VM_DEFINE_OP (110, bv_s16_ref, "bv-s16-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2873 BV_FIXABLE_INT_REF (s16, s16_native, int16, 2);
2874
2875 VM_DEFINE_OP (111, bv_u32_ref, "bv-u32-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2876 #if SIZEOF_VOID_P > 4
2877 BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4);
2878 #else
2879 BV_INT_REF (u32, uint32, 4);
2880 #endif
2881
2882 VM_DEFINE_OP (112, bv_s32_ref, "bv-s32-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2883 #if SIZEOF_VOID_P > 4
2884 BV_FIXABLE_INT_REF (s32, s32_native, int32, 4);
2885 #else
2886 BV_INT_REF (s32, int32, 4);
2887 #endif
2888
2889 VM_DEFINE_OP (113, bv_u64_ref, "bv-u64-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2890 BV_INT_REF (u64, uint64, 8);
2891
2892 VM_DEFINE_OP (114, bv_s64_ref, "bv-s64-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2893 BV_INT_REF (s64, int64, 8);
2894
2895 VM_DEFINE_OP (115, bv_f32_ref, "bv-f32-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2896 BV_FLOAT_REF (f32, ieee_single, float, 4);
2897
2898 VM_DEFINE_OP (116, bv_f64_ref, "bv-f64-ref", OP1 (U8_U8_U8_U8) | OP_DST)
2899 BV_FLOAT_REF (f64, ieee_double, double, 8);
2900
2901 /* bv-u8-set! dst:8 idx:8 src:8
2902 * bv-s8-set! dst:8 idx:8 src:8
2903 * bv-u16-set! dst:8 idx:8 src:8
2904 * bv-s16-set! dst:8 idx:8 src:8
2905 * bv-u32-set! dst:8 idx:8 src:8
2906 * bv-s32-set! dst:8 idx:8 src:8
2907 * bv-u64-set! dst:8 idx:8 src:8
2908 * bv-s64-set! dst:8 idx:8 src:8
2909 * bv-f32-set! dst:8 idx:8 src:8
2910 * bv-f64-set! dst:8 idx:8 src:8
2911 *
2912 * Store SRC into the bytevector DST at byte offset IDX. Multibyte
2913 * values are written using native endianness.
2914 */
2915 #define BV_FIXABLE_INT_SET(stem, fn_stem, type, min, max, size) \
2916 do { \
2917 scm_t_uint8 dst, idx, src; \
2918 scm_t_signed_bits i, j = 0; \
2919 SCM bv, scm_idx, val; \
2920 scm_t_ ## type *int_ptr; \
2921 \
2922 UNPACK_8_8_8 (op, dst, idx, src); \
2923 bv = LOCAL_REF (dst); \
2924 scm_idx = LOCAL_REF (idx); \
2925 val = LOCAL_REF (src); \
2926 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set!"); \
2927 i = SCM_I_INUM (scm_idx); \
2928 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
2929 \
2930 if (SCM_LIKELY (SCM_I_INUMP (scm_idx) \
2931 && (i >= 0) \
2932 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
2933 && (ALIGNED_P (int_ptr, scm_t_ ## type)) \
2934 && (SCM_I_INUMP (val)) \
2935 && ((j = SCM_I_INUM (val)) >= min) \
2936 && (j <= max))) \
2937 *int_ptr = (scm_t_ ## type) j; \
2938 else \
2939 { \
2940 SYNC_IP (); \
2941 scm_bytevector_ ## fn_stem ## _set_x (bv, scm_idx, val); \
2942 } \
2943 NEXT (1); \
2944 } while (0)
2945
2946 #define BV_INT_SET(stem, type, size) \
2947 do { \
2948 scm_t_uint8 dst, idx, src; \
2949 scm_t_signed_bits i; \
2950 SCM bv, scm_idx, val; \
2951 scm_t_ ## type *int_ptr; \
2952 \
2953 UNPACK_8_8_8 (op, dst, idx, src); \
2954 bv = LOCAL_REF (dst); \
2955 scm_idx = LOCAL_REF (idx); \
2956 val = LOCAL_REF (src); \
2957 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set!"); \
2958 i = SCM_I_INUM (scm_idx); \
2959 int_ptr = (scm_t_ ## type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
2960 \
2961 if (SCM_LIKELY (SCM_I_INUMP (scm_idx) \
2962 && (i >= 0) \
2963 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
2964 && (ALIGNED_P (int_ptr, scm_t_ ## type)))) \
2965 *int_ptr = scm_to_ ## type (val); \
2966 else \
2967 { \
2968 SYNC_IP (); \
2969 scm_bytevector_ ## stem ## _native_set_x (bv, scm_idx, val); \
2970 } \
2971 NEXT (1); \
2972 } while (0)
2973
2974 #define BV_FLOAT_SET(stem, fn_stem, type, size) \
2975 do { \
2976 scm_t_uint8 dst, idx, src; \
2977 scm_t_signed_bits i; \
2978 SCM bv, scm_idx, val; \
2979 type *float_ptr; \
2980 \
2981 UNPACK_8_8_8 (op, dst, idx, src); \
2982 bv = LOCAL_REF (dst); \
2983 scm_idx = LOCAL_REF (idx); \
2984 val = LOCAL_REF (src); \
2985 VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set!"); \
2986 i = SCM_I_INUM (scm_idx); \
2987 float_ptr = (type *) (SCM_BYTEVECTOR_CONTENTS (bv) + i); \
2988 \
2989 if (SCM_LIKELY (SCM_I_INUMP (scm_idx) \
2990 && (i >= 0) \
2991 && (i + size <= SCM_BYTEVECTOR_LENGTH (bv)) \
2992 && (ALIGNED_P (float_ptr, type)))) \
2993 *float_ptr = scm_to_double (val); \
2994 else \
2995 { \
2996 SYNC_IP (); \
2997 scm_bytevector_ ## fn_stem ## _native_set_x (bv, scm_idx, val); \
2998 } \
2999 NEXT (1); \
3000 } while (0)
3001
3002 VM_DEFINE_OP (117, bv_u8_set, "bv-u8-set!", OP1 (U8_U8_U8_U8))
3003 BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1);
3004
3005 VM_DEFINE_OP (118, bv_s8_set, "bv-s8-set!", OP1 (U8_U8_U8_U8))
3006 BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1);
3007
3008 VM_DEFINE_OP (119, bv_u16_set, "bv-u16-set!", OP1 (U8_U8_U8_U8))
3009 BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2);
3010
3011 VM_DEFINE_OP (120, bv_s16_set, "bv-s16-set!", OP1 (U8_U8_U8_U8))
3012 BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2);
3013
3014 VM_DEFINE_OP (121, bv_u32_set, "bv-u32-set!", OP1 (U8_U8_U8_U8))
3015 #if SIZEOF_VOID_P > 4
3016 BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4);
3017 #else
3018 BV_INT_SET (u32, uint32, 4);
3019 #endif
3020
3021 VM_DEFINE_OP (122, bv_s32_set, "bv-s32-set!", OP1 (U8_U8_U8_U8))
3022 #if SIZEOF_VOID_P > 4
3023 BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4);
3024 #else
3025 BV_INT_SET (s32, int32, 4);
3026 #endif
3027
3028 VM_DEFINE_OP (123, bv_u64_set, "bv-u64-set!", OP1 (U8_U8_U8_U8))
3029 BV_INT_SET (u64, uint64, 8);
3030
3031 VM_DEFINE_OP (124, bv_s64_set, "bv-s64-set!", OP1 (U8_U8_U8_U8))
3032 BV_INT_SET (s64, int64, 8);
3033
3034 VM_DEFINE_OP (125, bv_f32_set, "bv-f32-set!", OP1 (U8_U8_U8_U8))
3035 BV_FLOAT_SET (f32, ieee_single, float, 4);
3036
3037 VM_DEFINE_OP (126, bv_f64_set, "bv-f64-set!", OP1 (U8_U8_U8_U8))
3038 BV_FLOAT_SET (f64, ieee_double, double, 8);
3039
3040 VM_DEFINE_OP (127, unused_127, NULL, NOP)
3041 VM_DEFINE_OP (128, unused_128, NULL, NOP)
3042 VM_DEFINE_OP (129, unused_129, NULL, NOP)
3043 VM_DEFINE_OP (130, unused_130, NULL, NOP)
3044 VM_DEFINE_OP (131, unused_131, NULL, NOP)
3045 VM_DEFINE_OP (132, unused_132, NULL, NOP)
3046 VM_DEFINE_OP (133, unused_133, NULL, NOP)
3047 VM_DEFINE_OP (134, unused_134, NULL, NOP)
3048 VM_DEFINE_OP (135, unused_135, NULL, NOP)
3049 VM_DEFINE_OP (136, unused_136, NULL, NOP)
3050 VM_DEFINE_OP (137, unused_137, NULL, NOP)
3051 VM_DEFINE_OP (138, unused_138, NULL, NOP)
3052 VM_DEFINE_OP (139, unused_139, NULL, NOP)
3053 VM_DEFINE_OP (140, unused_140, NULL, NOP)
3054 VM_DEFINE_OP (141, unused_141, NULL, NOP)
3055 VM_DEFINE_OP (142, unused_142, NULL, NOP)
3056 VM_DEFINE_OP (143, unused_143, NULL, NOP)
3057 VM_DEFINE_OP (144, unused_144, NULL, NOP)
3058 VM_DEFINE_OP (145, unused_145, NULL, NOP)
3059 VM_DEFINE_OP (146, unused_146, NULL, NOP)
3060 VM_DEFINE_OP (147, unused_147, NULL, NOP)
3061 VM_DEFINE_OP (148, unused_148, NULL, NOP)
3062 VM_DEFINE_OP (149, unused_149, NULL, NOP)
3063 VM_DEFINE_OP (150, unused_150, NULL, NOP)
3064 VM_DEFINE_OP (151, unused_151, NULL, NOP)
3065 VM_DEFINE_OP (152, unused_152, NULL, NOP)
3066 VM_DEFINE_OP (153, unused_153, NULL, NOP)
3067 VM_DEFINE_OP (154, unused_154, NULL, NOP)
3068 VM_DEFINE_OP (155, unused_155, NULL, NOP)
3069 VM_DEFINE_OP (156, unused_156, NULL, NOP)
3070 VM_DEFINE_OP (157, unused_157, NULL, NOP)
3071 VM_DEFINE_OP (158, unused_158, NULL, NOP)
3072 VM_DEFINE_OP (159, unused_159, NULL, NOP)
3073 VM_DEFINE_OP (160, unused_160, NULL, NOP)
3074 VM_DEFINE_OP (161, unused_161, NULL, NOP)
3075 VM_DEFINE_OP (162, unused_162, NULL, NOP)
3076 VM_DEFINE_OP (163, unused_163, NULL, NOP)
3077 VM_DEFINE_OP (164, unused_164, NULL, NOP)
3078 VM_DEFINE_OP (165, unused_165, NULL, NOP)
3079 VM_DEFINE_OP (166, unused_166, NULL, NOP)
3080 VM_DEFINE_OP (167, unused_167, NULL, NOP)
3081 VM_DEFINE_OP (168, unused_168, NULL, NOP)
3082 VM_DEFINE_OP (169, unused_169, NULL, NOP)
3083 VM_DEFINE_OP (170, unused_170, NULL, NOP)
3084 VM_DEFINE_OP (171, unused_171, NULL, NOP)
3085 VM_DEFINE_OP (172, unused_172, NULL, NOP)
3086 VM_DEFINE_OP (173, unused_173, NULL, NOP)
3087 VM_DEFINE_OP (174, unused_174, NULL, NOP)
3088 VM_DEFINE_OP (175, unused_175, NULL, NOP)
3089 VM_DEFINE_OP (176, unused_176, NULL, NOP)
3090 VM_DEFINE_OP (177, unused_177, NULL, NOP)
3091 VM_DEFINE_OP (178, unused_178, NULL, NOP)
3092 VM_DEFINE_OP (179, unused_179, NULL, NOP)
3093 VM_DEFINE_OP (180, unused_180, NULL, NOP)
3094 VM_DEFINE_OP (181, unused_181, NULL, NOP)
3095 VM_DEFINE_OP (182, unused_182, NULL, NOP)
3096 VM_DEFINE_OP (183, unused_183, NULL, NOP)
3097 VM_DEFINE_OP (184, unused_184, NULL, NOP)
3098 VM_DEFINE_OP (185, unused_185, NULL, NOP)
3099 VM_DEFINE_OP (186, unused_186, NULL, NOP)
3100 VM_DEFINE_OP (187, unused_187, NULL, NOP)
3101 VM_DEFINE_OP (188, unused_188, NULL, NOP)
3102 VM_DEFINE_OP (189, unused_189, NULL, NOP)
3103 VM_DEFINE_OP (190, unused_190, NULL, NOP)
3104 VM_DEFINE_OP (191, unused_191, NULL, NOP)
3105 VM_DEFINE_OP (192, unused_192, NULL, NOP)
3106 VM_DEFINE_OP (193, unused_193, NULL, NOP)
3107 VM_DEFINE_OP (194, unused_194, NULL, NOP)
3108 VM_DEFINE_OP (195, unused_195, NULL, NOP)
3109 VM_DEFINE_OP (196, unused_196, NULL, NOP)
3110 VM_DEFINE_OP (197, unused_197, NULL, NOP)
3111 VM_DEFINE_OP (198, unused_198, NULL, NOP)
3112 VM_DEFINE_OP (199, unused_199, NULL, NOP)
3113 VM_DEFINE_OP (200, unused_200, NULL, NOP)
3114 VM_DEFINE_OP (201, unused_201, NULL, NOP)
3115 VM_DEFINE_OP (202, unused_202, NULL, NOP)
3116 VM_DEFINE_OP (203, unused_203, NULL, NOP)
3117 VM_DEFINE_OP (204, unused_204, NULL, NOP)
3118 VM_DEFINE_OP (205, unused_205, NULL, NOP)
3119 VM_DEFINE_OP (206, unused_206, NULL, NOP)
3120 VM_DEFINE_OP (207, unused_207, NULL, NOP)
3121 VM_DEFINE_OP (208, unused_208, NULL, NOP)
3122 VM_DEFINE_OP (209, unused_209, NULL, NOP)
3123 VM_DEFINE_OP (210, unused_210, NULL, NOP)
3124 VM_DEFINE_OP (211, unused_211, NULL, NOP)
3125 VM_DEFINE_OP (212, unused_212, NULL, NOP)
3126 VM_DEFINE_OP (213, unused_213, NULL, NOP)
3127 VM_DEFINE_OP (214, unused_214, NULL, NOP)
3128 VM_DEFINE_OP (215, unused_215, NULL, NOP)
3129 VM_DEFINE_OP (216, unused_216, NULL, NOP)
3130 VM_DEFINE_OP (217, unused_217, NULL, NOP)
3131 VM_DEFINE_OP (218, unused_218, NULL, NOP)
3132 VM_DEFINE_OP (219, unused_219, NULL, NOP)
3133 VM_DEFINE_OP (220, unused_220, NULL, NOP)
3134 VM_DEFINE_OP (221, unused_221, NULL, NOP)
3135 VM_DEFINE_OP (222, unused_222, NULL, NOP)
3136 VM_DEFINE_OP (223, unused_223, NULL, NOP)
3137 VM_DEFINE_OP (224, unused_224, NULL, NOP)
3138 VM_DEFINE_OP (225, unused_225, NULL, NOP)
3139 VM_DEFINE_OP (226, unused_226, NULL, NOP)
3140 VM_DEFINE_OP (227, unused_227, NULL, NOP)
3141 VM_DEFINE_OP (228, unused_228, NULL, NOP)
3142 VM_DEFINE_OP (229, unused_229, NULL, NOP)
3143 VM_DEFINE_OP (230, unused_230, NULL, NOP)
3144 VM_DEFINE_OP (231, unused_231, NULL, NOP)
3145 VM_DEFINE_OP (232, unused_232, NULL, NOP)
3146 VM_DEFINE_OP (233, unused_233, NULL, NOP)
3147 VM_DEFINE_OP (234, unused_234, NULL, NOP)
3148 VM_DEFINE_OP (235, unused_235, NULL, NOP)
3149 VM_DEFINE_OP (236, unused_236, NULL, NOP)
3150 VM_DEFINE_OP (237, unused_237, NULL, NOP)
3151 VM_DEFINE_OP (238, unused_238, NULL, NOP)
3152 VM_DEFINE_OP (239, unused_239, NULL, NOP)
3153 VM_DEFINE_OP (240, unused_240, NULL, NOP)
3154 VM_DEFINE_OP (241, unused_241, NULL, NOP)
3155 VM_DEFINE_OP (242, unused_242, NULL, NOP)
3156 VM_DEFINE_OP (243, unused_243, NULL, NOP)
3157 VM_DEFINE_OP (244, unused_244, NULL, NOP)
3158 VM_DEFINE_OP (245, unused_245, NULL, NOP)
3159 VM_DEFINE_OP (246, unused_246, NULL, NOP)
3160 VM_DEFINE_OP (247, unused_247, NULL, NOP)
3161 VM_DEFINE_OP (248, unused_248, NULL, NOP)
3162 VM_DEFINE_OP (249, unused_249, NULL, NOP)
3163 VM_DEFINE_OP (250, unused_250, NULL, NOP)
3164 VM_DEFINE_OP (251, unused_251, NULL, NOP)
3165 VM_DEFINE_OP (252, unused_252, NULL, NOP)
3166 VM_DEFINE_OP (253, unused_253, NULL, NOP)
3167 VM_DEFINE_OP (254, unused_254, NULL, NOP)
3168 VM_DEFINE_OP (255, unused_255, NULL, NOP)
3169 {
3170 vm_error_bad_instruction (op);
3171 abort (); /* never reached */
3172 }
3173
3174 END_DISPATCH_SWITCH;
3175 }
3176
3177
3178 #undef ABORT_CONTINUATION_HOOK
3179 #undef ALIGNED_P
3180 #undef APPLY_HOOK
3181 #undef ARGS1
3182 #undef ARGS2
3183 #undef BEGIN_DISPATCH_SWITCH
3184 #undef BINARY_INTEGER_OP
3185 #undef BR_ARITHMETIC
3186 #undef BR_BINARY
3187 #undef BR_NARGS
3188 #undef BR_UNARY
3189 #undef BV_FIXABLE_INT_REF
3190 #undef BV_FIXABLE_INT_SET
3191 #undef BV_FLOAT_REF
3192 #undef BV_FLOAT_SET
3193 #undef BV_INT_REF
3194 #undef BV_INT_SET
3195 #undef CACHE_REGISTER
3196 #undef CHECK_OVERFLOW
3197 #undef END_DISPATCH_SWITCH
3198 #undef FREE_VARIABLE_REF
3199 #undef INIT
3200 #undef INUM_MAX
3201 #undef INUM_MIN
3202 #undef LOCAL_REF
3203 #undef LOCAL_SET
3204 #undef NEXT
3205 #undef NEXT_HOOK
3206 #undef NEXT_JUMP
3207 #undef POP_CONTINUATION_HOOK
3208 #undef PUSH_CONTINUATION_HOOK
3209 #undef RETURN
3210 #undef RETURN_ONE_VALUE
3211 #undef RETURN_VALUE_LIST
3212 #undef RUN_HOOK
3213 #undef RUN_HOOK0
3214 #undef RUN_HOOK1
3215 #undef SYNC_IP
3216 #undef UNPACK_8_8_8
3217 #undef UNPACK_8_16
3218 #undef UNPACK_16_8
3219 #undef UNPACK_12_12
3220 #undef UNPACK_24
3221 #undef VARIABLE_BOUNDP
3222 #undef VARIABLE_REF
3223 #undef VARIABLE_SET
3224 #undef VM_CHECK_FREE_VARIABLE
3225 #undef VM_CHECK_OBJECT
3226 #undef VM_CHECK_UNDERFLOW
3227 #undef VM_DEFINE_OP
3228 #undef VM_INSTRUCTION_TO_LABEL
3229 #undef VM_USE_HOOKS
3230 #undef VM_VALIDATE_BYTEVECTOR
3231 #undef VM_VALIDATE_PAIR
3232 #undef VM_VALIDATE_STRUCT
3233
3234 /*
3235 (defun renumber-ops ()
3236 "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
3237 (interactive "")
3238 (save-excursion
3239 (let ((counter -1)) (goto-char (point-min))
3240 (while (re-search-forward "^ *VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
3241 (replace-match
3242 (number-to-string (setq counter (1+ counter)))
3243 t t nil 1)))))
3244 (renumber-ops)
3245 */
3246 /*
3247 Local Variables:
3248 c-file-style: "gnu"
3249 End:
3250 */