+\f
+/* Bytevectors as generalized vectors & arrays. */
+
+#define COMPLEX_ACCESSOR_PROLOGUE(_type) \
+ size_t c_len, c_index; \
+ char *c_bv; \
+ \
+ SCM_VALIDATE_BYTEVECTOR (1, bv); \
+ c_index = scm_to_size_t (index); \
+ \
+ c_len = SCM_BYTEVECTOR_LENGTH (bv); \
+ c_bv = (char *) SCM_BYTEVECTOR_CONTENTS (bv); \
+ \
+ if (SCM_UNLIKELY (c_index + 2 * sizeof (_type) - 1 >= c_len)) \
+ scm_out_of_range (FUNC_NAME, index);
+
+/* Template for native access to complex numbers of type TYPE. */
+#define COMPLEX_NATIVE_REF(_type) \
+ SCM result; \
+ \
+ COMPLEX_ACCESSOR_PROLOGUE (_type); \
+ \
+ { \
+ _type real, imag; \
+ \
+ memcpy (&real, &c_bv[c_index], sizeof (_type)); \
+ memcpy (&imag, &c_bv[c_index + sizeof (_type)], sizeof (_type)); \
+ \
+ result = scm_c_make_rectangular (real, imag); \
+ } \
+ \
+ return result;
+
+static SCM
+bytevector_ref_c32 (SCM bv, SCM index)
+#define FUNC_NAME "bytevector_ref_c32"
+{
+ COMPLEX_NATIVE_REF (float);
+}
+#undef FUNC_NAME
+
+static SCM
+bytevector_ref_c64 (SCM bv, SCM index)
+#define FUNC_NAME "bytevector_ref_c64"
+{
+ COMPLEX_NATIVE_REF (double);
+}
+#undef FUNC_NAME
+
+typedef SCM (*scm_t_bytevector_ref_fn)(SCM, SCM);
+
+static const scm_t_bytevector_ref_fn
+bytevector_ref_fns[SCM_ARRAY_ELEMENT_TYPE_LAST + 1] =
+{
+ NULL, /* SCM */
+ NULL, /* CHAR */
+ NULL, /* BIT */
+ scm_bytevector_u8_ref, /* VU8 */
+ scm_bytevector_u8_ref, /* U8 */
+ scm_bytevector_s8_ref,
+ scm_bytevector_u16_native_ref,
+ scm_bytevector_s16_native_ref,
+ scm_bytevector_u32_native_ref,
+ scm_bytevector_s32_native_ref,
+ scm_bytevector_u64_native_ref,
+ scm_bytevector_s64_native_ref,
+ scm_bytevector_ieee_single_native_ref,
+ scm_bytevector_ieee_double_native_ref,
+ bytevector_ref_c32,
+ bytevector_ref_c64
+};
+
+static SCM
+bv_handle_ref (scm_t_array_handle *h, size_t index)
+{
+ SCM byte_index;
+ scm_t_bytevector_ref_fn ref_fn;
+
+ ref_fn = bytevector_ref_fns[h->element_type];
+ byte_index =
+ scm_from_size_t (index * scm_array_handle_uniform_element_size (h));
+ return ref_fn (h->array, byte_index);
+}
+
+/* Template for native modification of complex numbers of type TYPE. */
+#define COMPLEX_NATIVE_SET(_type) \
+ COMPLEX_ACCESSOR_PROLOGUE (_type); \
+ \
+ { \
+ _type real, imag; \
+ real = scm_c_real_part (value); \
+ imag = scm_c_imag_part (value); \
+ \
+ memcpy (&c_bv[c_index], &real, sizeof (_type)); \
+ memcpy (&c_bv[c_index + sizeof (_type)], &imag, sizeof (_type)); \
+ } \
+ \
+ return SCM_UNSPECIFIED;
+
+static SCM
+bytevector_set_c32 (SCM bv, SCM index, SCM value)
+#define FUNC_NAME "bytevector_set_c32"
+{
+ COMPLEX_NATIVE_SET (float);
+}
+#undef FUNC_NAME
+
+static SCM
+bytevector_set_c64 (SCM bv, SCM index, SCM value)
+#define FUNC_NAME "bytevector_set_c64"
+{
+ COMPLEX_NATIVE_SET (double);
+}
+#undef FUNC_NAME
+
+typedef SCM (*scm_t_bytevector_set_fn)(SCM, SCM, SCM);
+
+const scm_t_bytevector_set_fn bytevector_set_fns[SCM_ARRAY_ELEMENT_TYPE_LAST + 1] =
+{
+ NULL, /* SCM */
+ NULL, /* CHAR */
+ NULL, /* BIT */
+ scm_bytevector_u8_set_x, /* VU8 */
+ scm_bytevector_u8_set_x, /* U8 */
+ scm_bytevector_s8_set_x,
+ scm_bytevector_u16_native_set_x,
+ scm_bytevector_s16_native_set_x,
+ scm_bytevector_u32_native_set_x,
+ scm_bytevector_s32_native_set_x,
+ scm_bytevector_u64_native_set_x,
+ scm_bytevector_s64_native_set_x,
+ scm_bytevector_ieee_single_native_set_x,
+ scm_bytevector_ieee_double_native_set_x,
+ bytevector_set_c32,
+ bytevector_set_c64
+};
+
+static void
+bv_handle_set_x (scm_t_array_handle *h, size_t index, SCM val)
+{
+ SCM byte_index;
+ scm_t_bytevector_set_fn set_fn;
+
+ set_fn = bytevector_set_fns[h->element_type];
+ byte_index =
+ scm_from_size_t (index * scm_array_handle_uniform_element_size (h));
+ set_fn (h->array, byte_index, val);
+}
+
+static void
+bytevector_get_handle (SCM v, scm_t_array_handle *h)
+{
+ h->array = v;
+ h->ndims = 1;
+ h->dims = &h->dim0;
+ h->dim0.lbnd = 0;
+ h->dim0.ubnd = SCM_BYTEVECTOR_TYPED_LENGTH (v) - 1;
+ h->dim0.inc = 1;
+ h->element_type = SCM_BYTEVECTOR_ELEMENT_TYPE (v);
+ h->elements = h->writable_elements = SCM_BYTEVECTOR_CONTENTS (v);
+}