-/* Copyright (C) 2001, 2009, 2010, 2011, 2012, 2013 Free Software Foundation, Inc.
+/* Copyright (C) 2001, 2009, 2010, 2011, 2012, 2013,
+ * 2014, 2015 Free Software Foundation, Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
RUN_HOOK0 (abort)
#define VM_HANDLE_INTERRUPTS \
- SCM_ASYNC_TICK_WITH_GUARD_CODE (current_thread, SYNC_IP (), CACHE_FP ())
+ SCM_ASYNC_TICK_WITH_GUARD_CODE (thread, SYNC_IP (), CACHE_FP ())
/* Virtual Machine
- This is Guile's new virtual machine. When I say "new", I mean
- relative to the current virtual machine. At some point it will
- become "the" virtual machine, and we'll delete this paragraph. As
- such, the rest of the comments speak as if there's only one VM.
- In difference from the old VM, local 0 is the procedure, and the
- first argument is local 1. At some point in the future we should
- change the fp to point to the procedure and not to local 1.
-
- <more overview here>
- */
-
-
-/* The VM has three state bits: the instruction pointer (IP), the frame
+ The VM has three state bits: the instruction pointer (IP), the frame
pointer (FP), and the top-of-stack pointer (SP). We cache the first
two of these in machine registers, local to the VM, because they are
used extensively by the VM. As the SP is used more by code outside
} while (0)
-
-/* After advancing vp->sp, but before writing any stack slots, check
- that it is actually in bounds. If it is not in bounds, currently we
- signal an error. In the future we may expand the stack instead,
- possibly by moving it elsewhere, therefore no pointer into the stack
- besides FP is valid across a CHECK_OVERFLOW call. Be careful! */
-#define CHECK_OVERFLOW() \
- do { \
- if (SCM_UNLIKELY (vp->sp >= vp->stack_limit)) \
- { \
- SYNC_IP (); \
- vm_expand_stack (vp); \
- CACHE_FP (); \
- } \
- } while (0)
-
/* Reserve stack space for a frame. Will check that there is sufficient
stack space for N locals, including the procedure. Invoke after
- preparing the new frame and setting the fp and ip. */
+ preparing the new frame and setting the fp and ip.
+
+ If there is not enough space for this frame, we try to expand the
+ stack, possibly relocating it somewhere else in the address space.
+ Because of the possible relocation, no pointer into the stack besides
+ FP is valid across an ALLOC_FRAME call. Be careful! */
#define ALLOC_FRAME(n) \
do { \
- vp->sp = LOCAL_ADDRESS (n - 1); \
- CHECK_OVERFLOW (); \
+ SCM *new_sp = LOCAL_ADDRESS (n - 1); \
+ if (new_sp > vp->sp_max_since_gc) \
+ { \
+ if (SCM_UNLIKELY (new_sp >= vp->stack_limit)) \
+ { \
+ SYNC_IP (); \
+ vm_expand_stack (vp, new_sp); \
+ CACHE_FP (); \
+ } \
+ else \
+ vp->sp_max_since_gc = vp->sp = new_sp; \
+ } \
+ else \
+ vp->sp = new_sp; \
} while (0)
/* Reset the current frame to hold N locals. Used when we know that no
#define RESET_FRAME(n) \
do { \
vp->sp = LOCAL_ADDRESS (n - 1); \
+ if (vp->sp > vp->sp_max_since_gc) \
+ vp->sp_max_since_gc = vp->sp; \
} while (0)
/* Compute the number of locals in the frame. At a call, this is equal
{ \
scm_t_int32 offset = ip[1]; \
offset >>= 8; /* Sign-extending shift. */ \
- if (offset < 0) \
+ if (offset <= 0) \
VM_HANDLE_INTERRUPTS; \
NEXT (offset); \
} \
{ \
scm_t_int32 offset = ip[1]; \
offset >>= 8; /* Sign-extending shift. */ \
- if (offset < 0) \
+ if (offset <= 0) \
VM_HANDLE_INTERRUPTS; \
NEXT (offset); \
} \
{ \
scm_t_int32 offset = ip[1]; \
offset >>= 8; /* Sign-extending shift. */ \
- if (offset < 0) \
+ if (offset <= 0) \
VM_HANDLE_INTERRUPTS; \
NEXT (offset); \
} \
{ \
scm_t_int32 offset = ip[1]; \
offset >>= 8; /* Sign-extending shift. */ \
- if (offset < 0) \
+ if (offset <= 0) \
VM_HANDLE_INTERRUPTS; \
NEXT (offset); \
} \
((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
static SCM
-VM_NAME (scm_i_thread *current_thread, struct scm_vm *vp,
+VM_NAME (scm_i_thread *thread, struct scm_vm *vp,
scm_i_jmp_buf *registers, int resume)
{
/* Instruction pointer: A pointer to the opcode that is currently
NEXT (0);
apply:
- while (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp)))
+ while (!SCM_PROGRAM_P (LOCAL_REF (0)))
{
- SCM proc = SCM_FRAME_PROGRAM (fp);
+ SCM proc = LOCAL_REF (0);
if (SCM_STRUCTP (proc) && SCM_STRUCT_APPLICABLE_P (proc))
{
}
/* Let's go! */
- ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
+ ip = SCM_PROGRAM_CODE (LOCAL_REF (0));
+
+ APPLY_HOOK ();
+
NEXT (0);
BEGIN_DISPATCH_SWITCH;
scm_t_uint32 n;
ret = SCM_EOL;
for (n = nvals; n > 0; n--)
- ret = scm_cons (LOCAL_REF (4 + n - 1), ret);
+ ret = scm_inline_cons (thread, LOCAL_REF (4 + n - 1), ret);
ret = scm_values (ret);
}
VM_HANDLE_INTERRUPTS;
+ PUSH_CONTINUATION_HOOK ();
+
old_fp = fp;
fp = vp->fp = old_fp + proc;
SCM_FRAME_SET_DYNAMIC_LINK (fp, old_fp);
RESET_FRAME (nlocals);
- PUSH_CONTINUATION_HOOK ();
+ if (SCM_UNLIKELY (!SCM_PROGRAM_P (LOCAL_REF (0))))
+ goto apply;
+
+ ip = SCM_PROGRAM_CODE (LOCAL_REF (0));
+
APPLY_HOOK ();
- if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
- goto apply;
+ NEXT (0);
+ }
+
+ /* call-label proc:24 _:8 nlocals:24 label:32
+ *
+ * Call a procedure in the same compilation unit.
+ *
+ * This instruction is just like "call", except that instead of
+ * dereferencing PROC to find the call target, the call target is
+ * known to be at LABEL, a signed 32-bit offset in 32-bit units from
+ * the current IP. Since PROC is not dereferenced, it may be some
+ * other representation of the closure.
+ */
+ VM_DEFINE_OP (2, call_label, "call-label", OP3 (U8_U24, X8_U24, L32))
+ {
+ scm_t_uint32 proc, nlocals;
+ scm_t_int32 label;
+ SCM *old_fp;
+
+ UNPACK_24 (op, proc);
+ UNPACK_24 (ip[1], nlocals);
+ label = ip[2];
+
+ VM_HANDLE_INTERRUPTS;
+
+ PUSH_CONTINUATION_HOOK ();
+
+ old_fp = fp;
+ fp = vp->fp = old_fp + proc;
+ SCM_FRAME_SET_DYNAMIC_LINK (fp, old_fp);
+ SCM_FRAME_SET_RETURN_ADDRESS (fp, ip + 3);
+
+ RESET_FRAME (nlocals);
+
+ ip += label;
+
+ APPLY_HOOK ();
- ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
NEXT (0);
}
* arguments have already been shuffled into position. Will reset the
* frame to NLOCALS.
*/
- VM_DEFINE_OP (2, tail_call, "tail-call", OP1 (U8_U24))
+ VM_DEFINE_OP (3, tail_call, "tail-call", OP1 (U8_U24))
{
scm_t_uint32 nlocals;
RESET_FRAME (nlocals);
+ if (SCM_UNLIKELY (!SCM_PROGRAM_P (LOCAL_REF (0))))
+ goto apply;
+
+ ip = SCM_PROGRAM_CODE (LOCAL_REF (0));
+
APPLY_HOOK ();
- if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
- goto apply;
+ NEXT (0);
+ }
+
+ /* tail-call-label nlocals:24 label:32
+ *
+ * Tail-call a known procedure. As call is to call-label, tail-call
+ * is to tail-call-label.
+ */
+ VM_DEFINE_OP (4, tail_call_label, "tail-call-label", OP2 (U8_U24, L32))
+ {
+ scm_t_uint32 nlocals;
+ scm_t_int32 label;
+
+ UNPACK_24 (op, nlocals);
+ label = ip[1];
+
+ VM_HANDLE_INTERRUPTS;
+
+ RESET_FRAME (nlocals);
+
+ ip += label;
+
+ APPLY_HOOK ();
- ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
NEXT (0);
}
* FROM, shuffled down to start at slot 0. This is part of the
* implementation of the call-with-values builtin.
*/
- VM_DEFINE_OP (3, tail_call_shuffle, "tail-call/shuffle", OP1 (U8_U24))
+ VM_DEFINE_OP (5, tail_call_shuffle, "tail-call/shuffle", OP1 (U8_U24))
{
scm_t_uint32 n, from, nlocals;
RESET_FRAME (n + 1);
- APPLY_HOOK ();
-
- if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
+ if (SCM_UNLIKELY (!SCM_PROGRAM_P (LOCAL_REF (0))))
goto apply;
- ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
+ ip = SCM_PROGRAM_CODE (LOCAL_REF (0));
+
+ APPLY_HOOK ();
+
NEXT (0);
}
* PROC, asserting that the call actually returned at least one
* value. Afterwards, resets the frame to NLOCALS locals.
*/
- VM_DEFINE_OP (4, receive, "receive", OP2 (U8_U12_U12, X8_U24) | OP_DST)
+ VM_DEFINE_OP (6, receive, "receive", OP2 (U8_U12_U12, X8_U24) | OP_DST)
{
scm_t_uint16 dst, proc;
scm_t_uint32 nlocals;
* return values equals NVALUES exactly. After receive-values has
* run, the values can be copied down via `mov'.
*/
- VM_DEFINE_OP (5, receive_values, "receive-values", OP2 (U8_U24, B1_X7_U24))
+ VM_DEFINE_OP (7, receive_values, "receive-values", OP2 (U8_U24, B1_X7_U24))
{
scm_t_uint32 proc, nvalues;
UNPACK_24 (op, proc);
*
* Return a value.
*/
- VM_DEFINE_OP (6, return, "return", OP1 (U8_U24))
+ VM_DEFINE_OP (8, return, "return", OP1 (U8_U24))
{
scm_t_uint32 src;
UNPACK_24 (op, src);
* shuffled down to a contiguous array starting at slot 1.
* We also expect the frame has already been reset.
*/
- VM_DEFINE_OP (7, return_values, "return-values", OP1 (U8_X24))
+ VM_DEFINE_OP (9, return_values, "return-values", OP1 (U8_X24))
{
SCM *old_fp;
* calling frame. This instruction is part of the trampolines
* created in gsubr.c, and is not generated by the compiler.
*/
- VM_DEFINE_OP (8, subr_call, "subr-call", OP1 (U8_U24))
+ VM_DEFINE_OP (10, subr_call, "subr-call", OP1 (U8_U24))
{
scm_t_uint32 ptr_idx;
SCM pointer, ret;
pointer = SCM_PROGRAM_FREE_VARIABLE_REF (LOCAL_REF (0), ptr_idx);
subr = SCM_POINTER_VALUE (pointer);
- VM_HANDLE_INTERRUPTS;
SYNC_IP ();
switch (FRAME_LOCALS_COUNT_FROM (1))
* part of the trampolines created by the FFI, and is not generated by
* the compiler.
*/
- VM_DEFINE_OP (9, foreign_call, "foreign-call", OP1 (U8_U12_U12))
+ VM_DEFINE_OP (11, foreign_call, "foreign-call", OP1 (U8_U12_U12))
{
scm_t_uint16 cif_idx, ptr_idx;
SCM closure, cif, pointer, ret;
pointer = SCM_PROGRAM_FREE_VARIABLE_REF (closure, ptr_idx);
SYNC_IP ();
- VM_HANDLE_INTERRUPTS;
// FIXME: separate args
- ret = scm_i_foreign_call (scm_cons (cif, pointer), LOCAL_ADDRESS (1));
+ ret = scm_i_foreign_call (scm_inline_cons (thread, cif, pointer),
+ LOCAL_ADDRESS (1));
CACHE_FP ();
* the implementation of undelimited continuations, and is not
* generated by the compiler.
*/
- VM_DEFINE_OP (10, continuation_call, "continuation-call", OP1 (U8_U24))
+ VM_DEFINE_OP (12, continuation_call, "continuation-call", OP1 (U8_U24))
{
SCM contregs;
scm_t_uint32 contregs_idx;
* instruction is part of the implementation of partial continuations,
* and is not generated by the compiler.
*/
- VM_DEFINE_OP (11, compose_continuation, "compose-continuation", OP1 (U8_U24))
+ VM_DEFINE_OP (13, compose_continuation, "compose-continuation", OP1 (U8_U24))
{
SCM vmcont;
scm_t_uint32 cont_idx;
vm_error_continuation_not_rewindable (vmcont));
vm_reinstate_partial_continuation (vp, vmcont, FRAME_LOCALS_COUNT_FROM (1),
LOCAL_ADDRESS (1),
- ¤t_thread->dynstack,
+ &thread->dynstack,
registers);
CACHE_REGISTER ();
NEXT (0);
* arguments. This instruction is part of the implementation of
* `apply', and is not generated by the compiler.
*/
- VM_DEFINE_OP (12, tail_apply, "tail-apply", OP1 (U8_X24))
+ VM_DEFINE_OP (14, tail_apply, "tail-apply", OP1 (U8_X24))
{
int i, list_idx, list_len, nlocals;
SCM list;
for (i = 0; i < list_len; i++, list = SCM_CDR (list))
LOCAL_SET (list_idx - 1 + i, SCM_CAR (list));
- APPLY_HOOK ();
-
- if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
+ if (SCM_UNLIKELY (!SCM_PROGRAM_P (LOCAL_REF (0))))
goto apply;
- ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
+ ip = SCM_PROGRAM_CODE (LOCAL_REF (0));
+
+ APPLY_HOOK ();
+
NEXT (0);
}
* local slot 1 to it. This instruction is part of the implementation
* of `call/cc', and is not generated by the compiler.
*/
- VM_DEFINE_OP (13, call_cc, "call/cc", OP1 (U8_X24))
+ VM_DEFINE_OP (15, call_cc, "call/cc", OP1 (U8_X24))
{
SCM vm_cont, cont;
scm_t_dynstack *dynstack;
VM_HANDLE_INTERRUPTS;
SYNC_IP ();
- dynstack = scm_dynstack_capture_all (¤t_thread->dynstack);
+ dynstack = scm_dynstack_capture_all (&thread->dynstack);
vm_cont = scm_i_vm_capture_stack (vp->stack_base,
SCM_FRAME_DYNAMIC_LINK (fp),
SCM_FRAME_PREVIOUS_SP (fp),
LOCAL_SET (1, cont);
RESET_FRAME (2);
- APPLY_HOOK ();
-
- if (SCM_UNLIKELY (!SCM_PROGRAM_P (SCM_FRAME_PROGRAM (fp))))
+ if (SCM_UNLIKELY (!SCM_PROGRAM_P (LOCAL_REF (0))))
goto apply;
- ip = SCM_PROGRAM_CODE (SCM_FRAME_PROGRAM (fp));
+ ip = SCM_PROGRAM_CODE (LOCAL_REF (0));
+
+ APPLY_HOOK ();
+
NEXT (0);
}
else
* of the values in the frame are returned to the prompt handler.
* This corresponds to a tail application of abort-to-prompt.
*/
- VM_DEFINE_OP (14, abort, "abort", OP1 (U8_X24))
+ VM_DEFINE_OP (16, abort, "abort", OP1 (U8_X24))
{
scm_t_uint32 nlocals = FRAME_LOCALS_COUNT ();
*
* Load a builtin stub by index into DST.
*/
- VM_DEFINE_OP (15, builtin_ref, "builtin-ref", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (17, builtin_ref, "builtin-ref", OP1 (U8_U12_U12) | OP_DST)
{
scm_t_uint16 dst, idx;
* than EXPECTED, respectively, add OFFSET, a signed 24-bit number, to
* the current instruction pointer.
*/
- VM_DEFINE_OP (16, br_if_nargs_ne, "br-if-nargs-ne", OP2 (U8_U24, X8_L24))
+ VM_DEFINE_OP (18, br_if_nargs_ne, "br-if-nargs-ne", OP2 (U8_U24, X8_L24))
{
BR_NARGS (!=);
}
- VM_DEFINE_OP (17, br_if_nargs_lt, "br-if-nargs-lt", OP2 (U8_U24, X8_L24))
+ VM_DEFINE_OP (19, br_if_nargs_lt, "br-if-nargs-lt", OP2 (U8_U24, X8_L24))
{
BR_NARGS (<);
}
- VM_DEFINE_OP (18, br_if_nargs_gt, "br-if-nargs-gt", OP2 (U8_U24, X8_L24))
+ VM_DEFINE_OP (20, br_if_nargs_gt, "br-if-nargs-gt", OP2 (U8_U24, X8_L24))
{
BR_NARGS (>);
}
* If the number of actual arguments is not ==, >=, or <= EXPECTED,
* respectively, signal an error.
*/
- VM_DEFINE_OP (19, assert_nargs_ee, "assert-nargs-ee", OP1 (U8_U24))
+ VM_DEFINE_OP (21, assert_nargs_ee, "assert-nargs-ee", OP1 (U8_U24))
{
scm_t_uint32 expected;
UNPACK_24 (op, expected);
VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
- vm_error_wrong_num_args (SCM_FRAME_PROGRAM (fp)));
+ vm_error_wrong_num_args (LOCAL_REF (0)));
NEXT (1);
}
- VM_DEFINE_OP (20, assert_nargs_ge, "assert-nargs-ge", OP1 (U8_U24))
+ VM_DEFINE_OP (22, assert_nargs_ge, "assert-nargs-ge", OP1 (U8_U24))
{
scm_t_uint32 expected;
UNPACK_24 (op, expected);
VM_ASSERT (FRAME_LOCALS_COUNT () >= expected,
- vm_error_wrong_num_args (SCM_FRAME_PROGRAM (fp)));
+ vm_error_wrong_num_args (LOCAL_REF (0)));
NEXT (1);
}
- VM_DEFINE_OP (21, assert_nargs_le, "assert-nargs-le", OP1 (U8_U24))
+ VM_DEFINE_OP (23, assert_nargs_le, "assert-nargs-le", OP1 (U8_U24))
{
scm_t_uint32 expected;
UNPACK_24 (op, expected);
VM_ASSERT (FRAME_LOCALS_COUNT () <= expected,
- vm_error_wrong_num_args (SCM_FRAME_PROGRAM (fp)));
+ vm_error_wrong_num_args (LOCAL_REF (0)));
NEXT (1);
}
* setting them all to SCM_UNDEFINED, except those nargs values that
* were passed as arguments and procedure.
*/
- VM_DEFINE_OP (22, alloc_frame, "alloc-frame", OP1 (U8_U24))
+ VM_DEFINE_OP (24, alloc_frame, "alloc-frame", OP1 (U8_U24))
{
scm_t_uint32 nlocals, nargs;
UNPACK_24 (op, nlocals);
* Used to reset the frame size to something less than the size that
* was previously set via alloc-frame.
*/
- VM_DEFINE_OP (23, reset_frame, "reset-frame", OP1 (U8_U24))
+ VM_DEFINE_OP (25, reset_frame, "reset-frame", OP1 (U8_U24))
{
scm_t_uint32 nlocals;
UNPACK_24 (op, nlocals);
* Equivalent to a sequence of assert-nargs-ee and reserve-locals. The
* number of locals reserved is EXPECTED + NLOCALS.
*/
- VM_DEFINE_OP (24, assert_nargs_ee_locals, "assert-nargs-ee/locals", OP1 (U8_U12_U12))
+ VM_DEFINE_OP (26, assert_nargs_ee_locals, "assert-nargs-ee/locals", OP1 (U8_U12_U12))
{
scm_t_uint16 expected, nlocals;
UNPACK_12_12 (op, expected, nlocals);
VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
- vm_error_wrong_num_args (SCM_FRAME_PROGRAM (fp)));
+ vm_error_wrong_num_args (LOCAL_REF (0)));
ALLOC_FRAME (expected + nlocals);
while (nlocals--)
LOCAL_SET (expected + nlocals, SCM_UNDEFINED);
* See "Case-lambda" in the manual, for more on how case-lambda
* chooses the clause to apply.
*/
- VM_DEFINE_OP (25, br_if_npos_gt, "br-if-npos-gt", OP3 (U8_U24, X8_U24, X8_L24))
+ VM_DEFINE_OP (27, br_if_npos_gt, "br-if-npos-gt", OP3 (U8_U24, X8_U24, X8_L24))
{
scm_t_uint32 nreq, npos;
*
* A macro-mega-instruction.
*/
- VM_DEFINE_OP (26, bind_kwargs, "bind-kwargs", OP4 (U8_U24, U8_U24, X8_U24, N32))
+ VM_DEFINE_OP (28, bind_kwargs, "bind-kwargs", OP4 (U8_U24, U8_U24, X8_U24, N32))
{
scm_t_uint32 nreq, nreq_and_opt, ntotal, npositional, nkw, n, nargs;
scm_t_int32 kw_offset;
LOCAL_SET (n++, SCM_UNDEFINED);
VM_ASSERT (has_rest || (nkw % 2) == 0,
- vm_error_kwargs_length_not_even (SCM_FRAME_PROGRAM (fp)));
+ vm_error_kwargs_length_not_even (LOCAL_REF (0)));
/* Now bind keywords, in the order given. */
for (n = 0; n < nkw; n++)
break;
}
VM_ASSERT (scm_is_pair (walk) || allow_other_keys,
- vm_error_kwargs_unrecognized_keyword (SCM_FRAME_PROGRAM (fp),
+ vm_error_kwargs_unrecognized_keyword (LOCAL_REF (0),
LOCAL_REF (ntotal + n)));
n++;
}
else
- VM_ASSERT (has_rest, vm_error_kwargs_invalid_keyword (SCM_FRAME_PROGRAM (fp),
+ VM_ASSERT (has_rest, vm_error_kwargs_invalid_keyword (LOCAL_REF (0),
LOCAL_REF (ntotal + n)));
if (has_rest)
SCM rest = SCM_EOL;
n = nkw;
while (n--)
- rest = scm_cons (LOCAL_REF (ntotal + n), rest);
+ rest = scm_inline_cons (thread, LOCAL_REF (ntotal + n), rest);
LOCAL_SET (nreq_and_opt, rest);
}
* Collect any arguments at or above DST into a list, and store that
* list at DST.
*/
- VM_DEFINE_OP (27, bind_rest, "bind-rest", OP1 (U8_U24) | OP_DST)
+ VM_DEFINE_OP (29, bind_rest, "bind-rest", OP1 (U8_U24) | OP_DST)
{
scm_t_uint32 dst, nargs;
SCM rest = SCM_EOL;
{
while (nargs-- > dst)
{
- rest = scm_cons (LOCAL_REF (nargs), rest);
+ rest = scm_inline_cons (thread, LOCAL_REF (nargs), rest);
LOCAL_SET (nargs, SCM_UNDEFINED);
}
* Add OFFSET, a signed 24-bit number, to the current instruction
* pointer.
*/
- VM_DEFINE_OP (28, br, "br", OP1 (U8_L24))
+ VM_DEFINE_OP (30, br, "br", OP1 (U8_L24))
{
scm_t_int32 offset = op;
offset >>= 8; /* Sign-extending shift. */
+ if (offset <= 0)
+ VM_HANDLE_INTERRUPTS;
NEXT (offset);
}
* If the value in TEST is true for the purposes of Scheme, add
* OFFSET, a signed 24-bit number, to the current instruction pointer.
*/
- VM_DEFINE_OP (29, br_if_true, "br-if-true", OP2 (U8_U24, B1_X7_L24))
+ VM_DEFINE_OP (31, br_if_true, "br-if-true", OP2 (U8_U24, B1_X7_L24))
{
BR_UNARY (x, scm_is_true (x));
}
* If the value in TEST is the end-of-list or Lisp nil, add OFFSET, a
* signed 24-bit number, to the current instruction pointer.
*/
- VM_DEFINE_OP (30, br_if_null, "br-if-null", OP2 (U8_U24, B1_X7_L24))
+ VM_DEFINE_OP (32, br_if_null, "br-if-null", OP2 (U8_U24, B1_X7_L24))
{
BR_UNARY (x, scm_is_null (x));
}
* If the value in TEST is false to Lisp, add OFFSET, a signed 24-bit
* number, to the current instruction pointer.
*/
- VM_DEFINE_OP (31, br_if_nil, "br-if-nil", OP2 (U8_U24, B1_X7_L24))
+ VM_DEFINE_OP (33, br_if_nil, "br-if-nil", OP2 (U8_U24, B1_X7_L24))
{
BR_UNARY (x, scm_is_lisp_false (x));
}
* If the value in TEST is a pair, add OFFSET, a signed 24-bit number,
* to the current instruction pointer.
*/
- VM_DEFINE_OP (32, br_if_pair, "br-if-pair", OP2 (U8_U24, B1_X7_L24))
+ VM_DEFINE_OP (34, br_if_pair, "br-if-pair", OP2 (U8_U24, B1_X7_L24))
{
BR_UNARY (x, scm_is_pair (x));
}
* If the value in TEST is a struct, add OFFSET, a signed 24-bit
* number, to the current instruction pointer.
*/
- VM_DEFINE_OP (33, br_if_struct, "br-if-struct", OP2 (U8_U24, B1_X7_L24))
+ VM_DEFINE_OP (35, br_if_struct, "br-if-struct", OP2 (U8_U24, B1_X7_L24))
{
BR_UNARY (x, SCM_STRUCTP (x));
}
* If the value in TEST is a char, add OFFSET, a signed 24-bit number,
* to the current instruction pointer.
*/
- VM_DEFINE_OP (34, br_if_char, "br-if-char", OP2 (U8_U24, B1_X7_L24))
+ VM_DEFINE_OP (36, br_if_char, "br-if-char", OP2 (U8_U24, B1_X7_L24))
{
BR_UNARY (x, SCM_CHARP (x));
}
* If the value in TEST has the TC7 given in the second word, add
* OFFSET, a signed 24-bit number, to the current instruction pointer.
*/
- VM_DEFINE_OP (35, br_if_tc7, "br-if-tc7", OP2 (U8_U24, B1_U7_L24))
+ VM_DEFINE_OP (37, br_if_tc7, "br-if-tc7", OP2 (U8_U24, B1_U7_L24))
{
BR_UNARY (x, SCM_HAS_TYP7 (x, (ip[1] >> 1) & 0x7f));
}
* If the value in A is eq? to the value in B, add OFFSET, a signed
* 24-bit number, to the current instruction pointer.
*/
- VM_DEFINE_OP (36, br_if_eq, "br-if-eq", OP2 (U8_U12_U12, B1_X7_L24))
+ VM_DEFINE_OP (38, br_if_eq, "br-if-eq", OP2 (U8_U12_U12, B1_X7_L24))
{
BR_BINARY (x, y, scm_is_eq (x, y));
}
* If the value in A is eqv? to the value in B, add OFFSET, a signed
* 24-bit number, to the current instruction pointer.
*/
- VM_DEFINE_OP (37, br_if_eqv, "br-if-eqv", OP2 (U8_U12_U12, B1_X7_L24))
+ VM_DEFINE_OP (39, br_if_eqv, "br-if-eqv", OP2 (U8_U12_U12, B1_X7_L24))
{
BR_BINARY (x, y,
scm_is_eq (x, y)
*/
// FIXME: Should sync_ip before calling out and cache_fp before coming
// back! Another reason to remove this opcode!
- VM_DEFINE_OP (38, br_if_equal, "br-if-equal", OP2 (U8_U12_U12, B1_X7_L24))
+ VM_DEFINE_OP (40, br_if_equal, "br-if-equal", OP2 (U8_U12_U12, B1_X7_L24))
{
BR_BINARY (x, y,
scm_is_eq (x, y)
* If the value in A is = to the value in B, add OFFSET, a signed
* 24-bit number, to the current instruction pointer.
*/
- VM_DEFINE_OP (39, br_if_ee, "br-if-=", OP2 (U8_U12_U12, B1_X7_L24))
+ VM_DEFINE_OP (41, br_if_ee, "br-if-=", OP2 (U8_U12_U12, B1_X7_L24))
{
BR_ARITHMETIC (==, scm_num_eq_p);
}
* If the value in A is < to the value in B, add OFFSET, a signed
* 24-bit number, to the current instruction pointer.
*/
- VM_DEFINE_OP (40, br_if_lt, "br-if-<", OP2 (U8_U12_U12, B1_X7_L24))
+ VM_DEFINE_OP (42, br_if_lt, "br-if-<", OP2 (U8_U12_U12, B1_X7_L24))
{
BR_ARITHMETIC (<, scm_less_p);
}
* If the value in A is <= to the value in B, add OFFSET, a signed
* 24-bit number, to the current instruction pointer.
*/
- VM_DEFINE_OP (41, br_if_le, "br-if-<=", OP2 (U8_U12_U12, B1_X7_L24))
+ VM_DEFINE_OP (43, br_if_le, "br-if-<=", OP2 (U8_U12_U12, B1_X7_L24))
{
BR_ARITHMETIC (<=, scm_leq_p);
}
*
* Copy a value from one local slot to another.
*/
- VM_DEFINE_OP (42, mov, "mov", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (44, mov, "mov", OP1 (U8_U12_U12) | OP_DST)
{
scm_t_uint16 dst;
scm_t_uint16 src;
*
* Copy a value from one local slot to another.
*/
- VM_DEFINE_OP (43, long_mov, "long-mov", OP2 (U8_U24, X8_U24) | OP_DST)
+ VM_DEFINE_OP (45, long_mov, "long-mov", OP2 (U8_U24, X8_U24) | OP_DST)
{
scm_t_uint32 dst;
scm_t_uint32 src;
*
* Create a new variable holding SRC, and place it in DST.
*/
- VM_DEFINE_OP (44, box, "box", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (46, box, "box", OP1 (U8_U12_U12) | OP_DST)
{
scm_t_uint16 dst, src;
UNPACK_12_12 (op, dst, src);
- LOCAL_SET (dst, scm_cell (scm_tc7_variable, SCM_UNPACK (LOCAL_REF (src))));
+ LOCAL_SET (dst, scm_inline_cell (thread, scm_tc7_variable,
+ SCM_UNPACK (LOCAL_REF (src))));
NEXT (1);
}
* Unpack the variable at SRC into DST, asserting that the variable is
* actually bound.
*/
- VM_DEFINE_OP (45, box_ref, "box-ref", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (47, box_ref, "box-ref", OP1 (U8_U12_U12) | OP_DST)
{
scm_t_uint16 dst, src;
SCM var;
var = LOCAL_REF (src);
VM_ASSERT (SCM_VARIABLEP (var),
vm_error_not_a_variable ("variable-ref", var));
- VM_ASSERT (VARIABLE_BOUNDP (var),
- vm_error_unbound (SCM_FRAME_PROGRAM (fp), var));
+ VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (var));
LOCAL_SET (dst, VARIABLE_REF (var));
NEXT (1);
}
*
* Set the contents of the variable at DST to SET.
*/
- VM_DEFINE_OP (46, box_set, "box-set!", OP1 (U8_U12_U12))
+ VM_DEFINE_OP (48, box_set, "box-set!", OP1 (U8_U12_U12))
{
scm_t_uint16 dst, src;
SCM var;
* signed 32-bit integer. Space for NFREE free variables will be
* allocated.
*/
- VM_DEFINE_OP (47, make_closure, "make-closure", OP3 (U8_U24, L32, X8_U24) | OP_DST)
+ VM_DEFINE_OP (49, make_closure, "make-closure", OP3 (U8_U24, L32, X8_U24) | OP_DST)
{
scm_t_uint32 dst, nfree, n;
scm_t_int32 offset;
UNPACK_24 (ip[2], nfree);
// FIXME: Assert range of nfree?
- closure = scm_words (scm_tc7_program | (nfree << 16), nfree + 2);
+ closure = scm_inline_words (thread, scm_tc7_program | (nfree << 16),
+ nfree + 2);
SCM_SET_CELL_WORD_1 (closure, ip + offset);
// FIXME: Elide these initializations?
for (n = 0; n < nfree; n++)
*
* Load free variable IDX from the closure SRC into local slot DST.
*/
- VM_DEFINE_OP (48, free_ref, "free-ref", OP2 (U8_U12_U12, X8_U24) | OP_DST)
+ VM_DEFINE_OP (50, free_ref, "free-ref", OP2 (U8_U12_U12, X8_U24) | OP_DST)
{
scm_t_uint16 dst, src;
scm_t_uint32 idx;
*
* Set free variable IDX from the closure DST to SRC.
*/
- VM_DEFINE_OP (49, free_set, "free-set!", OP2 (U8_U12_U12, X8_U24))
+ VM_DEFINE_OP (51, free_set, "free-set!", OP2 (U8_U12_U12, X8_U24))
{
scm_t_uint16 dst, src;
scm_t_uint32 idx;
* Make an immediate whose low bits are LOW-BITS, and whose top bits are
* 0.
*/
- VM_DEFINE_OP (50, make_short_immediate, "make-short-immediate", OP1 (U8_U8_I16) | OP_DST)
+ VM_DEFINE_OP (52, make_short_immediate, "make-short-immediate", OP1 (U8_U8_I16) | OP_DST)
{
scm_t_uint8 dst;
scm_t_bits val;
* Make an immediate whose low bits are LOW-BITS, and whose top bits are
* 0.
*/
- VM_DEFINE_OP (51, make_long_immediate, "make-long-immediate", OP2 (U8_U24, I32))
+ VM_DEFINE_OP (53, make_long_immediate, "make-long-immediate", OP2 (U8_U24, I32) | OP_DST)
{
scm_t_uint32 dst;
scm_t_bits val;
*
* Make an immediate with HIGH-BITS and LOW-BITS.
*/
- VM_DEFINE_OP (52, make_long_long_immediate, "make-long-long-immediate", OP3 (U8_U24, A32, B32) | OP_DST)
+ VM_DEFINE_OP (54, make_long_long_immediate, "make-long-long-immediate", OP3 (U8_U24, A32, B32) | OP_DST)
{
scm_t_uint32 dst;
scm_t_bits val;
* Whether the object is mutable or immutable depends on where it was
* allocated by the compiler, and loaded by the loader.
*/
- VM_DEFINE_OP (53, make_non_immediate, "make-non-immediate", OP2 (U8_U24, N32) | OP_DST)
+ VM_DEFINE_OP (55, make_non_immediate, "make-non-immediate", OP2 (U8_U24, N32) | OP_DST)
{
scm_t_uint32 dst;
scm_t_int32 offset;
* that the compiler is unable to statically allocate, like symbols.
* These values would be initialized when the object file loads.
*/
- VM_DEFINE_OP (54, static_ref, "static-ref", OP2 (U8_U24, S32))
+ VM_DEFINE_OP (56, static_ref, "static-ref", OP2 (U8_U24, S32) | OP_DST)
{
scm_t_uint32 dst;
scm_t_int32 offset;
* Store a SCM value into memory, OFFSET 32-bit words away from the
* current instruction pointer. OFFSET is a signed value.
*/
- VM_DEFINE_OP (55, static_set, "static-set!", OP2 (U8_U24, LO32))
+ VM_DEFINE_OP (57, static_set, "static-set!", OP2 (U8_U24, LO32))
{
scm_t_uint32 src;
scm_t_int32 offset;
* are signed 32-bit values, indicating a memory address as a number
* of 32-bit words away from the current instruction pointer.
*/
- VM_DEFINE_OP (56, static_patch, "static-patch!", OP3 (U8_X24, LO32, L32))
+ VM_DEFINE_OP (58, static_patch, "static-patch!", OP3 (U8_X24, LO32, L32))
{
scm_t_int32 dst_offset, src_offset;
void *src;
*
* Store the current module in DST.
*/
- VM_DEFINE_OP (57, current_module, "current-module", OP1 (U8_U24) | OP_DST)
+ VM_DEFINE_OP (59, current_module, "current-module", OP1 (U8_U24) | OP_DST)
{
scm_t_uint32 dst;
* Resolve SYM in the current module, and place the resulting variable
* in DST.
*/
- VM_DEFINE_OP (58, resolve, "resolve", OP2 (U8_U24, B1_X7_U24) | OP_DST)
+ VM_DEFINE_OP (60, resolve, "resolve", OP2 (U8_U24, B1_X7_U24) | OP_DST)
{
scm_t_uint32 dst;
scm_t_uint32 sym;
var = scm_lookup (LOCAL_REF (sym));
CACHE_FP ();
if (ip[1] & 0x1)
- VM_ASSERT (VARIABLE_BOUNDP (var),
- vm_error_unbound (fp[0], LOCAL_REF (sym)));
+ VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (LOCAL_REF (sym)));
LOCAL_SET (dst, var);
NEXT (2);
* Look up a binding for SYM in the current module, creating it if
* necessary. Set its value to VAL.
*/
- VM_DEFINE_OP (59, define, "define!", OP1 (U8_U12_U12))
+ VM_DEFINE_OP (61, define, "define!", OP1 (U8_U12_U12))
{
scm_t_uint16 sym, val;
UNPACK_12_12 (op, sym, val);
* DST, and caching the resolved variable so that we will hit the cache next
* time.
*/
- VM_DEFINE_OP (60, toplevel_box, "toplevel-box", OP5 (U8_U24, S32, S32, N32, B1_X31) | OP_DST)
+ VM_DEFINE_OP (62, toplevel_box, "toplevel-box", OP5 (U8_U24, S32, S32, N32, B1_X31) | OP_DST)
{
scm_t_uint32 dst;
scm_t_int32 var_offset;
var = scm_module_lookup (mod, sym);
CACHE_FP ();
if (ip[4] & 0x1)
- VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (fp[0], sym));
+ VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (sym));
*var_loc = var;
}
* Like toplevel-box, except MOD-OFFSET points at the name of a module
* instead of the module itself.
*/
- VM_DEFINE_OP (61, module_box, "module-box", OP5 (U8_U24, S32, N32, N32, B1_X31) | OP_DST)
+ VM_DEFINE_OP (63, module_box, "module-box", OP5 (U8_U24, S32, N32, N32, B1_X31) | OP_DST)
{
scm_t_uint32 dst;
scm_t_int32 var_offset;
if (!scm_module_system_booted_p)
{
-#ifdef VM_ENABLE_PARANOID_ASSERTIONS
- ASSERT
- (scm_is_true
- scm_equal_p (modname,
- scm_list_2 (SCM_BOOL_T,
- scm_from_utf8_symbol ("guile"))));
-#endif
+ ASSERT (scm_is_true
+ scm_equal_p (modname,
+ scm_list_2
+ (SCM_BOOL_T,
+ scm_from_utf8_symbol ("guile"))));
var = scm_lookup (sym);
}
else if (scm_is_true (SCM_CAR (modname)))
CACHE_FP ();
if (ip[4] & 0x1)
- VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (fp[0], sym));
+ VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (sym));
*var_loc = var;
}
* will expect a multiple-value return as if from a call with the
* procedure at PROC-SLOT.
*/
- VM_DEFINE_OP (62, prompt, "prompt", OP3 (U8_U24, B1_X7_U24, X8_L24))
+ VM_DEFINE_OP (64, prompt, "prompt", OP3 (U8_U24, B1_X7_U24, X8_L24))
{
scm_t_uint32 tag, proc_slot;
scm_t_int32 offset;
/* Push the prompt onto the dynamic stack. */
flags = escape_only_p ? SCM_F_DYNSTACK_PROMPT_ESCAPE_ONLY : 0;
- scm_dynstack_push_prompt (¤t_thread->dynstack, flags,
+ scm_dynstack_push_prompt (&thread->dynstack, flags,
LOCAL_REF (tag),
fp - vp->stack_base,
LOCAL_ADDRESS (proc_slot) - vp->stack_base,
* the compiler should have inserted checks that they wind and unwind
* procs are thunks, if it could not prove that to be the case.
*/
- VM_DEFINE_OP (63, wind, "wind", OP1 (U8_U12_U12))
+ VM_DEFINE_OP (65, wind, "wind", OP1 (U8_U12_U12))
{
scm_t_uint16 winder, unwinder;
UNPACK_12_12 (op, winder, unwinder);
- scm_dynstack_push_dynwind (¤t_thread->dynstack,
+ scm_dynstack_push_dynwind (&thread->dynstack,
LOCAL_REF (winder), LOCAL_REF (unwinder));
NEXT (1);
}
* A normal exit from the dynamic extent of an expression. Pop the top
* entry off of the dynamic stack.
*/
- VM_DEFINE_OP (64, unwind, "unwind", OP1 (U8_X24))
+ VM_DEFINE_OP (66, unwind, "unwind", OP1 (U8_X24))
{
- scm_dynstack_pop (¤t_thread->dynstack);
+ scm_dynstack_pop (&thread->dynstack);
NEXT (1);
}
*
* Dynamically bind VALUE to FLUID.
*/
- VM_DEFINE_OP (65, push_fluid, "push-fluid", OP1 (U8_U12_U12))
+ VM_DEFINE_OP (67, push_fluid, "push-fluid", OP1 (U8_U12_U12))
{
scm_t_uint32 fluid, value;
UNPACK_12_12 (op, fluid, value);
- scm_dynstack_push_fluid (¤t_thread->dynstack,
+ scm_dynstack_push_fluid (&thread->dynstack,
LOCAL_REF (fluid), LOCAL_REF (value),
- current_thread->dynamic_state);
+ thread->dynamic_state);
NEXT (1);
}
* Leave the dynamic extent of a with-fluid* expression, restoring the
* fluid to its previous value.
*/
- VM_DEFINE_OP (66, pop_fluid, "pop-fluid", OP1 (U8_X24))
+ VM_DEFINE_OP (68, pop_fluid, "pop-fluid", OP1 (U8_X24))
{
/* This function must not allocate. */
- scm_dynstack_unwind_fluid (¤t_thread->dynstack,
- current_thread->dynamic_state);
+ scm_dynstack_unwind_fluid (&thread->dynstack,
+ thread->dynamic_state);
NEXT (1);
}
*
* Reference the fluid in SRC, and place the value in DST.
*/
- VM_DEFINE_OP (67, fluid_ref, "fluid-ref", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (69, fluid_ref, "fluid-ref", OP1 (U8_U12_U12) | OP_DST)
{
scm_t_uint16 dst, src;
size_t num;
UNPACK_12_12 (op, dst, src);
fluid = LOCAL_REF (src);
- fluids = SCM_I_DYNAMIC_STATE_FLUIDS (current_thread->dynamic_state);
+ fluids = SCM_I_DYNAMIC_STATE_FLUIDS (thread->dynamic_state);
if (SCM_UNLIKELY (!SCM_FLUID_P (fluid))
|| ((num = SCM_I_FLUID_NUM (fluid)) >= SCM_SIMPLE_VECTOR_LENGTH (fluids)))
{
if (scm_is_eq (val, SCM_UNDEFINED))
val = SCM_I_FLUID_DEFAULT (fluid);
VM_ASSERT (!scm_is_eq (val, SCM_UNDEFINED),
- vm_error_unbound_fluid (SCM_FRAME_PROGRAM (fp), fluid));
+ vm_error_unbound_fluid (fluid));
LOCAL_SET (dst, val);
}
*
* Set the value of the fluid in DST to the value in SRC.
*/
- VM_DEFINE_OP (68, fluid_set, "fluid-set", OP1 (U8_U12_U12))
+ VM_DEFINE_OP (70, fluid_set, "fluid-set", OP1 (U8_U12_U12))
{
scm_t_uint16 a, b;
size_t num;
UNPACK_12_12 (op, a, b);
fluid = LOCAL_REF (a);
- fluids = SCM_I_DYNAMIC_STATE_FLUIDS (current_thread->dynamic_state);
+ fluids = SCM_I_DYNAMIC_STATE_FLUIDS (thread->dynamic_state);
if (SCM_UNLIKELY (!SCM_FLUID_P (fluid))
|| ((num = SCM_I_FLUID_NUM (fluid)) >= SCM_SIMPLE_VECTOR_LENGTH (fluids)))
{
*
* Store the length of the string in SRC in DST.
*/
- VM_DEFINE_OP (69, string_length, "string-length", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (71, string_length, "string-length", OP1 (U8_U12_U12) | OP_DST)
{
ARGS1 (str);
if (SCM_LIKELY (scm_is_string (str)))
* Fetch the character at position IDX in the string in SRC, and store
* it in DST.
*/
- VM_DEFINE_OP (70, string_ref, "string-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (72, string_ref, "string-ref", OP1 (U8_U8_U8_U8) | OP_DST)
{
scm_t_signed_bits i = 0;
ARGS2 (str, idx);
*
* Parse a string in SRC to a number, and store in DST.
*/
- VM_DEFINE_OP (71, string_to_number, "string->number", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (73, string_to_number, "string->number", OP1 (U8_U12_U12) | OP_DST)
{
scm_t_uint16 dst, src;
*
* Parse a string in SRC to a symbol, and store in DST.
*/
- VM_DEFINE_OP (72, string_to_symbol, "string->symbol", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (74, string_to_symbol, "string->symbol", OP1 (U8_U12_U12) | OP_DST)
{
scm_t_uint16 dst, src;
*
* Make a keyword from the symbol in SRC, and store it in DST.
*/
- VM_DEFINE_OP (73, symbol_to_keyword, "symbol->keyword", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (75, symbol_to_keyword, "symbol->keyword", OP1 (U8_U12_U12) | OP_DST)
{
scm_t_uint16 dst, src;
UNPACK_12_12 (op, dst, src);
*
* Cons CAR and CDR, and store the result in DST.
*/
- VM_DEFINE_OP (74, cons, "cons", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (76, cons, "cons", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
- RETURN (scm_cons (x, y));
+ RETURN (scm_inline_cons (thread, x, y));
}
/* car dst:12 src:12
*
* Place the car of SRC in DST.
*/
- VM_DEFINE_OP (75, car, "car", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (77, car, "car", OP1 (U8_U12_U12) | OP_DST)
{
ARGS1 (x);
VM_VALIDATE_PAIR (x, "car");
*
* Place the cdr of SRC in DST.
*/
- VM_DEFINE_OP (76, cdr, "cdr", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (78, cdr, "cdr", OP1 (U8_U12_U12) | OP_DST)
{
ARGS1 (x);
VM_VALIDATE_PAIR (x, "cdr");
*
* Set the car of DST to SRC.
*/
- VM_DEFINE_OP (77, set_car, "set-car!", OP1 (U8_U12_U12))
+ VM_DEFINE_OP (79, set_car, "set-car!", OP1 (U8_U12_U12))
{
scm_t_uint16 a, b;
SCM x, y;
*
* Set the cdr of DST to SRC.
*/
- VM_DEFINE_OP (78, set_cdr, "set-cdr!", OP1 (U8_U12_U12))
+ VM_DEFINE_OP (80, set_cdr, "set-cdr!", OP1 (U8_U12_U12))
{
scm_t_uint16 a, b;
SCM x, y;
*
* Add A to B, and place the result in DST.
*/
- VM_DEFINE_OP (79, add, "add", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (81, add, "add", OP1 (U8_U8_U8_U8) | OP_DST)
{
BINARY_INTEGER_OP (+, scm_sum);
}
*
* Add 1 to the value in SRC, and place the result in DST.
*/
- VM_DEFINE_OP (80, add1, "add1", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (82, add1, "add1", OP1 (U8_U12_U12) | OP_DST)
{
ARGS1 (x);
*
* Subtract B from A, and place the result in DST.
*/
- VM_DEFINE_OP (81, sub, "sub", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (83, sub, "sub", OP1 (U8_U8_U8_U8) | OP_DST)
{
BINARY_INTEGER_OP (-, scm_difference);
}
*
* Subtract 1 from SRC, and place the result in DST.
*/
- VM_DEFINE_OP (82, sub1, "sub1", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (84, sub1, "sub1", OP1 (U8_U12_U12) | OP_DST)
{
ARGS1 (x);
*
* Multiply A and B, and place the result in DST.
*/
- VM_DEFINE_OP (83, mul, "mul", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (85, mul, "mul", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
RETURN_EXP (scm_product (x, y));
*
* Divide A by B, and place the result in DST.
*/
- VM_DEFINE_OP (84, div, "div", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (86, div, "div", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
RETURN_EXP (scm_divide (x, y));
*
* Divide A by B, and place the quotient in DST.
*/
- VM_DEFINE_OP (85, quo, "quo", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (87, quo, "quo", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
RETURN_EXP (scm_quotient (x, y));
*
* Divide A by B, and place the remainder in DST.
*/
- VM_DEFINE_OP (86, rem, "rem", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (88, rem, "rem", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
RETURN_EXP (scm_remainder (x, y));
*
* Place the modulo of A by B in DST.
*/
- VM_DEFINE_OP (87, mod, "mod", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (89, mod, "mod", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
RETURN_EXP (scm_modulo (x, y));
*
* Shift A arithmetically by B bits, and place the result in DST.
*/
- VM_DEFINE_OP (88, ash, "ash", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (90, ash, "ash", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
&& ((scm_t_bits)
(SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
<= 1))
- RETURN (SCM_I_MAKINUM (nn << bits_to_shift));
+ RETURN (SCM_I_MAKINUM (nn < 0
+ ? -(-nn << bits_to_shift)
+ : (nn << bits_to_shift)));
/* fall through */
}
/* fall through */
*
* Place the bitwise AND of A and B into DST.
*/
- VM_DEFINE_OP (89, logand, "logand", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (91, logand, "logand", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
*
* Place the bitwise inclusive OR of A with B in DST.
*/
- VM_DEFINE_OP (90, logior, "logior", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (92, logior, "logior", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
*
* Place the bitwise exclusive OR of A with B in DST.
*/
- VM_DEFINE_OP (91, logxor, "logxor", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (93, logxor, "logxor", OP1 (U8_U8_U8_U8) | OP_DST)
{
ARGS2 (x, y);
if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
RETURN_EXP (scm_logxor (x, y));
}
+ /* make-vector dst:8 length:8 init:8
+ *
+ * Make a vector and write it to DST. The vector will have space for
+ * LENGTH slots. They will be filled with the value in slot INIT.
+ */
+ VM_DEFINE_OP (94, make_vector, "make-vector", OP1 (U8_U8_U8_U8) | OP_DST)
+ {
+ scm_t_uint8 dst, init, length;
+
+ UNPACK_8_8_8 (op, dst, length, init);
+
+ LOCAL_SET (dst, scm_make_vector (LOCAL_REF (length), LOCAL_REF (init)));
+
+ NEXT (1);
+ }
+
/* make-vector/immediate dst:8 length:8 init:8
*
* Make a short vector of known size and write it to DST. The vector
* will have space for LENGTH slots, an immediate value. They will be
* filled with the value in slot INIT.
*/
- VM_DEFINE_OP (92, make_vector_immediate, "make-vector/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (95, make_vector_immediate, "make-vector/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
{
scm_t_uint8 dst, init;
scm_t_int32 length, n;
UNPACK_8_8_8 (op, dst, length, init);
val = LOCAL_REF (init);
- vector = scm_words (scm_tc7_vector | (length << 8), length + 1);
+ vector = scm_inline_words (thread, scm_tc7_vector | (length << 8),
+ length + 1);
for (n = 0; n < length; n++)
SCM_SIMPLE_VECTOR_SET (vector, n, val);
LOCAL_SET (dst, vector);
*
* Store the length of the vector in SRC in DST.
*/
- VM_DEFINE_OP (93, vector_length, "vector-length", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (96, vector_length, "vector-length", OP1 (U8_U12_U12) | OP_DST)
{
ARGS1 (vect);
- if (SCM_LIKELY (SCM_I_IS_VECTOR (vect)))
- RETURN (SCM_I_MAKINUM (SCM_I_VECTOR_LENGTH (vect)));
- else
- {
- SYNC_IP ();
- RETURN (scm_vector_length (vect));
- }
+ VM_ASSERT (SCM_I_IS_VECTOR (vect),
+ vm_error_not_a_vector ("vector-ref", vect));
+ RETURN (SCM_I_MAKINUM (SCM_I_VECTOR_LENGTH (vect)));
}
/* vector-ref dst:8 src:8 idx:8
* Fetch the item at position IDX in the vector in SRC, and store it
* in DST.
*/
- VM_DEFINE_OP (94, vector_ref, "vector-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (97, vector_ref, "vector-ref", OP1 (U8_U8_U8_U8) | OP_DST)
{
scm_t_signed_bits i = 0;
ARGS2 (vect, idx);
- if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
- && SCM_I_INUMP (idx)
- && ((i = SCM_I_INUM (idx)) >= 0)
- && i < SCM_I_VECTOR_LENGTH (vect)))
- RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
- else
- {
- SYNC_IP ();
- RETURN (scm_vector_ref (vect, idx));
- }
+ VM_ASSERT (SCM_I_IS_VECTOR (vect),
+ vm_error_not_a_vector ("vector-ref", vect));
+ VM_ASSERT ((SCM_I_INUMP (idx)
+ && ((i = SCM_I_INUM (idx)) >= 0)
+ && i < SCM_I_VECTOR_LENGTH (vect)),
+ vm_error_out_of_range ("vector-ref", idx));
+ RETURN (SCM_I_VECTOR_ELTS (vect)[i]);
}
/* vector-ref/immediate dst:8 src:8 idx:8
* Fill DST with the item IDX elements into the vector at SRC. Useful
* for building data types using vectors.
*/
- VM_DEFINE_OP (95, vector_ref_immediate, "vector-ref/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (98, vector_ref_immediate, "vector-ref/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
{
scm_t_uint8 dst, src, idx;
SCM v;
UNPACK_8_8_8 (op, dst, src, idx);
v = LOCAL_REF (src);
- if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (v)
- && idx < SCM_I_VECTOR_LENGTH (v)))
- LOCAL_SET (dst, SCM_I_VECTOR_ELTS (LOCAL_REF (src))[idx]);
- else
- LOCAL_SET (dst, scm_c_vector_ref (v, idx));
+ VM_ASSERT (SCM_I_IS_VECTOR (v),
+ vm_error_not_a_vector ("vector-ref", v));
+ VM_ASSERT (idx < SCM_I_VECTOR_LENGTH (v),
+ vm_error_out_of_range ("vector-ref", scm_from_size_t (idx)));
+ LOCAL_SET (dst, SCM_I_VECTOR_ELTS (LOCAL_REF (src))[idx]);
NEXT (1);
}
*
* Store SRC into the vector DST at index IDX.
*/
- VM_DEFINE_OP (96, vector_set, "vector-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (99, vector_set, "vector-set!", OP1 (U8_U8_U8_U8))
{
scm_t_uint8 dst, idx_var, src;
SCM vect, idx, val;
idx = LOCAL_REF (idx_var);
val = LOCAL_REF (src);
- if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
- && SCM_I_INUMP (idx)
- && ((i = SCM_I_INUM (idx)) >= 0)
- && i < SCM_I_VECTOR_LENGTH (vect)))
- SCM_I_VECTOR_WELTS (vect)[i] = val;
- else
- {
- SYNC_IP ();
- scm_vector_set_x (vect, idx, val);
- }
+ VM_ASSERT (SCM_I_IS_VECTOR (vect),
+ vm_error_not_a_vector ("vector-ref", vect));
+ VM_ASSERT ((SCM_I_INUMP (idx)
+ && ((i = SCM_I_INUM (idx)) >= 0)
+ && i < SCM_I_VECTOR_LENGTH (vect)),
+ vm_error_out_of_range ("vector-ref", idx));
+ SCM_I_VECTOR_WELTS (vect)[i] = val;
NEXT (1);
}
* Store SRC into the vector DST at index IDX. Here IDX is an
* immediate value.
*/
- VM_DEFINE_OP (97, vector_set_immediate, "vector-set!/immediate", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (100, vector_set_immediate, "vector-set!/immediate", OP1 (U8_U8_U8_U8))
{
scm_t_uint8 dst, idx, src;
SCM vect, val;
vect = LOCAL_REF (dst);
val = LOCAL_REF (src);
- if (SCM_LIKELY (SCM_I_IS_NONWEAK_VECTOR (vect)
- && idx < SCM_I_VECTOR_LENGTH (vect)))
- SCM_I_VECTOR_WELTS (vect)[idx] = val;
- else
- {
- SYNC_IP ();
- scm_vector_set_x (vect, scm_from_uint8 (idx), val);
- }
+ VM_ASSERT (SCM_I_IS_VECTOR (vect),
+ vm_error_not_a_vector ("vector-ref", vect));
+ VM_ASSERT (idx < SCM_I_VECTOR_LENGTH (vect),
+ vm_error_out_of_range ("vector-ref", scm_from_size_t (idx)));
+ SCM_I_VECTOR_WELTS (vect)[idx] = val;
NEXT (1);
}
*
* Store the vtable of SRC into DST.
*/
- VM_DEFINE_OP (98, struct_vtable, "struct-vtable", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (101, struct_vtable, "struct-vtable", OP1 (U8_U12_U12) | OP_DST)
{
ARGS1 (obj);
VM_VALIDATE_STRUCT (obj, "struct_vtable");
* will be constructed with space for NFIELDS fields, which should
* correspond to the field count of the VTABLE.
*/
- VM_DEFINE_OP (99, allocate_struct_immediate, "allocate-struct/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (102, allocate_struct_immediate, "allocate-struct/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
{
scm_t_uint8 dst, vtable, nfields;
SCM ret;
* Fetch the item at slot IDX in the struct in SRC, and store it
* in DST. IDX is an immediate unsigned 8-bit value.
*/
- VM_DEFINE_OP (100, struct_ref_immediate, "struct-ref/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (103, struct_ref_immediate, "struct-ref/immediate", OP1 (U8_U8_U8_U8) | OP_DST)
{
scm_t_uint8 dst, src, idx;
SCM obj;
* Store SRC into the struct DST at slot IDX. IDX is an immediate
* unsigned 8-bit value.
*/
- VM_DEFINE_OP (101, struct_set_immediate, "struct-set!/immediate", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (104, struct_set_immediate, "struct-set!/immediate", OP1 (U8_U8_U8_U8))
{
scm_t_uint8 dst, idx, src;
SCM obj, val;
*
* Store the vtable of SRC into DST.
*/
- VM_DEFINE_OP (102, class_of, "class-of", OP1 (U8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (105, class_of, "class-of", OP1 (U8_U12_U12) | OP_DST)
{
ARGS1 (obj);
if (SCM_INSTANCEP (obj))
RETURN (scm_class_of (obj));
}
- VM_DEFINE_OP (103, unused_103, NULL, NOP)
- VM_DEFINE_OP (104, unused_104, NULL, NOP)
- goto op_unused_255;
-
\f
/*
* from the instruction pointer, and store into DST. LEN is a byte
* length. OFFSET is signed.
*/
- VM_DEFINE_OP (105, load_typed_array, "load-typed-array", OP3 (U8_U8_U8_U8, N32, U32) | OP_DST)
+ VM_DEFINE_OP (106, load_typed_array, "load-typed-array", OP3 (U8_U8_U8_U8, N32, U32) | OP_DST)
{
scm_t_uint8 dst, type, shape;
scm_t_int32 offset;
NEXT (3);
}
- /* make-array dst:12 type:12 _:8 fill:12 bounds:12
+ /* make-array dst:8 type:8 fill:8 _:8 bounds:24
*
* Make a new array with TYPE, FILL, and BOUNDS, storing it in DST.
*/
- VM_DEFINE_OP (106, make_array, "make-array", OP2 (U8_U12_U12, X8_U12_U12) | OP_DST)
+ VM_DEFINE_OP (107, make_array, "make-array", OP2 (U8_U8_U8_U8, X8_U24) | OP_DST)
{
- scm_t_uint16 dst, type, fill, bounds;
- UNPACK_12_12 (op, dst, type);
- UNPACK_12_12 (ip[1], fill, bounds);
+ scm_t_uint8 dst, type, fill, bounds;
+ UNPACK_8_8_8 (op, dst, type, fill);
+ UNPACK_24 (ip[1], bounds);
SYNC_IP ();
LOCAL_SET (dst, scm_make_typed_array (LOCAL_REF (type), LOCAL_REF (fill),
LOCAL_REF (bounds)));
RETURN (scm_bytevector_ ## fn_stem ## _native_ref (bv, idx)); \
} while (0)
- VM_DEFINE_OP (107, bv_u8_ref, "bv-u8-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (108, bv_u8_ref, "bv-u8-ref", OP1 (U8_U8_U8_U8) | OP_DST)
BV_FIXABLE_INT_REF (u8, u8, uint8, 1);
- VM_DEFINE_OP (108, bv_s8_ref, "bv-s8-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (109, bv_s8_ref, "bv-s8-ref", OP1 (U8_U8_U8_U8) | OP_DST)
BV_FIXABLE_INT_REF (s8, s8, int8, 1);
- VM_DEFINE_OP (109, bv_u16_ref, "bv-u16-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (110, bv_u16_ref, "bv-u16-ref", OP1 (U8_U8_U8_U8) | OP_DST)
BV_FIXABLE_INT_REF (u16, u16_native, uint16, 2);
- VM_DEFINE_OP (110, bv_s16_ref, "bv-s16-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (111, bv_s16_ref, "bv-s16-ref", OP1 (U8_U8_U8_U8) | OP_DST)
BV_FIXABLE_INT_REF (s16, s16_native, int16, 2);
- VM_DEFINE_OP (111, bv_u32_ref, "bv-u32-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (112, bv_u32_ref, "bv-u32-ref", OP1 (U8_U8_U8_U8) | OP_DST)
#if SIZEOF_VOID_P > 4
BV_FIXABLE_INT_REF (u32, u32_native, uint32, 4);
#else
BV_INT_REF (u32, uint32, 4);
#endif
- VM_DEFINE_OP (112, bv_s32_ref, "bv-s32-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (113, bv_s32_ref, "bv-s32-ref", OP1 (U8_U8_U8_U8) | OP_DST)
#if SIZEOF_VOID_P > 4
BV_FIXABLE_INT_REF (s32, s32_native, int32, 4);
#else
BV_INT_REF (s32, int32, 4);
#endif
- VM_DEFINE_OP (113, bv_u64_ref, "bv-u64-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (114, bv_u64_ref, "bv-u64-ref", OP1 (U8_U8_U8_U8) | OP_DST)
BV_INT_REF (u64, uint64, 8);
- VM_DEFINE_OP (114, bv_s64_ref, "bv-s64-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (115, bv_s64_ref, "bv-s64-ref", OP1 (U8_U8_U8_U8) | OP_DST)
BV_INT_REF (s64, int64, 8);
- VM_DEFINE_OP (115, bv_f32_ref, "bv-f32-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (116, bv_f32_ref, "bv-f32-ref", OP1 (U8_U8_U8_U8) | OP_DST)
BV_FLOAT_REF (f32, ieee_single, float, 4);
- VM_DEFINE_OP (116, bv_f64_ref, "bv-f64-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ VM_DEFINE_OP (117, bv_f64_ref, "bv-f64-ref", OP1 (U8_U8_U8_U8) | OP_DST)
BV_FLOAT_REF (f64, ieee_double, double, 8);
/* bv-u8-set! dst:8 idx:8 src:8
NEXT (1); \
} while (0)
- VM_DEFINE_OP (117, bv_u8_set, "bv-u8-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (118, bv_u8_set, "bv-u8-set!", OP1 (U8_U8_U8_U8))
BV_FIXABLE_INT_SET (u8, u8, uint8, 0, SCM_T_UINT8_MAX, 1);
- VM_DEFINE_OP (118, bv_s8_set, "bv-s8-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (119, bv_s8_set, "bv-s8-set!", OP1 (U8_U8_U8_U8))
BV_FIXABLE_INT_SET (s8, s8, int8, SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1);
- VM_DEFINE_OP (119, bv_u16_set, "bv-u16-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (120, bv_u16_set, "bv-u16-set!", OP1 (U8_U8_U8_U8))
BV_FIXABLE_INT_SET (u16, u16_native, uint16, 0, SCM_T_UINT16_MAX, 2);
- VM_DEFINE_OP (120, bv_s16_set, "bv-s16-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (121, bv_s16_set, "bv-s16-set!", OP1 (U8_U8_U8_U8))
BV_FIXABLE_INT_SET (s16, s16_native, int16, SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2);
- VM_DEFINE_OP (121, bv_u32_set, "bv-u32-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (122, bv_u32_set, "bv-u32-set!", OP1 (U8_U8_U8_U8))
#if SIZEOF_VOID_P > 4
BV_FIXABLE_INT_SET (u32, u32_native, uint32, 0, SCM_T_UINT32_MAX, 4);
#else
BV_INT_SET (u32, uint32, 4);
#endif
- VM_DEFINE_OP (122, bv_s32_set, "bv-s32-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (123, bv_s32_set, "bv-s32-set!", OP1 (U8_U8_U8_U8))
#if SIZEOF_VOID_P > 4
BV_FIXABLE_INT_SET (s32, s32_native, int32, SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4);
#else
BV_INT_SET (s32, int32, 4);
#endif
- VM_DEFINE_OP (123, bv_u64_set, "bv-u64-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (124, bv_u64_set, "bv-u64-set!", OP1 (U8_U8_U8_U8))
BV_INT_SET (u64, uint64, 8);
- VM_DEFINE_OP (124, bv_s64_set, "bv-s64-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (125, bv_s64_set, "bv-s64-set!", OP1 (U8_U8_U8_U8))
BV_INT_SET (s64, int64, 8);
- VM_DEFINE_OP (125, bv_f32_set, "bv-f32-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (126, bv_f32_set, "bv-f32-set!", OP1 (U8_U8_U8_U8))
BV_FLOAT_SET (f32, ieee_single, float, 4);
- VM_DEFINE_OP (126, bv_f64_set, "bv-f64-set!", OP1 (U8_U8_U8_U8))
+ VM_DEFINE_OP (127, bv_f64_set, "bv-f64-set!", OP1 (U8_U8_U8_U8))
BV_FLOAT_SET (f64, ieee_double, double, 8);
- VM_DEFINE_OP (127, unused_127, NULL, NOP)
- VM_DEFINE_OP (128, unused_128, NULL, NOP)
- VM_DEFINE_OP (129, unused_129, NULL, NOP)
- VM_DEFINE_OP (130, unused_130, NULL, NOP)
- VM_DEFINE_OP (131, unused_131, NULL, NOP)
+ /* br-if-logtest a:12 b:12 invert:1 _:7 offset:24
+ *
+ * If the exact integer in A has any bits in common with the exact
+ * integer in B, add OFFSET, a signed 24-bit number, to the current
+ * instruction pointer.
+ */
+ VM_DEFINE_OP (128, br_if_logtest, "br-if-logtest", OP2 (U8_U12_U12, B1_X7_L24))
+ {
+ BR_BINARY (x, y,
+ ((SCM_I_INUMP (x) && SCM_I_INUMP (y))
+ ? (SCM_UNPACK (x) & SCM_UNPACK (y) & ~scm_tc2_int)
+ : scm_is_true (scm_logtest (x, y))));
+ }
+
+ /* FIXME: Move above */
+
+ /* allocate-struct dst:8 vtable:8 nfields:8
+ *
+ * Allocate a new struct with VTABLE, and place it in DST. The struct
+ * will be constructed with space for NFIELDS fields, which should
+ * correspond to the field count of the VTABLE.
+ */
+ VM_DEFINE_OP (129, allocate_struct, "allocate-struct", OP1 (U8_U8_U8_U8) | OP_DST)
+ {
+ scm_t_uint8 dst, vtable, nfields;
+ SCM ret;
+
+ UNPACK_8_8_8 (op, dst, vtable, nfields);
+
+ SYNC_IP ();
+ ret = scm_allocate_struct (LOCAL_REF (vtable), LOCAL_REF (nfields));
+ LOCAL_SET (dst, ret);
+
+ NEXT (1);
+ }
+
+ /* struct-ref dst:8 src:8 idx:8
+ *
+ * Fetch the item at slot IDX in the struct in SRC, and store it
+ * in DST.
+ */
+ VM_DEFINE_OP (130, struct_ref, "struct-ref", OP1 (U8_U8_U8_U8) | OP_DST)
+ {
+ scm_t_uint8 dst, src, idx;
+ SCM obj;
+ SCM index;
+
+ UNPACK_8_8_8 (op, dst, src, idx);
+
+ obj = LOCAL_REF (src);
+ index = LOCAL_REF (idx);
+
+ if (SCM_LIKELY (SCM_STRUCTP (obj)
+ && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
+ SCM_VTABLE_FLAG_SIMPLE)
+ && SCM_I_INUMP (index)
+ && SCM_I_INUM (index) >= 0
+ && SCM_I_INUM (index) < (SCM_STRUCT_DATA_REF
+ (SCM_STRUCT_VTABLE (obj),
+ scm_vtable_index_size))))
+ RETURN (SCM_STRUCT_SLOT_REF (obj, SCM_I_INUM (index)));
+
+ SYNC_IP ();
+ RETURN (scm_struct_ref (obj, index));
+ }
+
+ /* struct-set! dst:8 idx:8 src:8
+ *
+ * Store SRC into the struct DST at slot IDX.
+ */
+ VM_DEFINE_OP (131, struct_set, "struct-set!", OP1 (U8_U8_U8_U8))
+ {
+ scm_t_uint8 dst, idx, src;
+ SCM obj, val, index;
+
+ UNPACK_8_8_8 (op, dst, idx, src);
+
+ obj = LOCAL_REF (dst);
+ val = LOCAL_REF (src);
+ index = LOCAL_REF (idx);
+
+ if (SCM_LIKELY (SCM_STRUCTP (obj)
+ && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
+ SCM_VTABLE_FLAG_SIMPLE)
+ && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
+ SCM_VTABLE_FLAG_SIMPLE_RW)
+ && SCM_I_INUMP (index)
+ && SCM_I_INUM (index) >= 0
+ && SCM_I_INUM (index) < (SCM_STRUCT_DATA_REF
+ (SCM_STRUCT_VTABLE (obj),
+ scm_vtable_index_size))))
+ {
+ SCM_STRUCT_SLOT_SET (obj, SCM_I_INUM (index), val);
+ NEXT (1);
+ }
+
+ SYNC_IP ();
+ scm_struct_set_x (obj, index, val);
+ NEXT (1);
+ }
+
VM_DEFINE_OP (132, unused_132, NULL, NOP)
VM_DEFINE_OP (133, unused_133, NULL, NOP)
VM_DEFINE_OP (134, unused_134, NULL, NOP)
#undef BV_INT_REF
#undef BV_INT_SET
#undef CACHE_REGISTER
-#undef CHECK_OVERFLOW
#undef END_DISPATCH_SWITCH
#undef FREE_VARIABLE_REF
#undef INIT