1
Fork 0
mirror of https://git.savannah.gnu.org/git/guile.git synced 2025-05-02 21:10:27 +02:00

64-bit intrinsic args and return values passed indirectly on 32-bit

* libguile/intrinsics.h (INDIRECT_INT64_INTRINSICS): New definition.  If
  true, int64 args and return values are passed by reference.  Here to
  make JIT easier.
* libguile/intrinsics.c (indirect_scm_to_int64, indirect_scm_to_uint64):
  (indirect_scm_to_uint64_truncate, indirect_scm_from_int64):
  (indirect_scm_from_uint64, indirect_lsh, indirect_rsh): New indirect
  variants.
  (scm_bootstrap_intrinsics): Use indirect variants as appropriate.
* libguile/vm-engine.c: Update to call indirect intrinsics if
  appropriate.
This commit is contained in:
Andy Wingo 2018-08-13 16:27:11 +02:00
parent d4abe8bbed
commit 0188bd3816
3 changed files with 113 additions and 10 deletions

View file

@ -95,6 +95,34 @@ scm_to_uint64_truncate (SCM x)
return scm_to_uint64 (scm_logand (x, scm_from_uint64 ((uint64_t) -1)));
}
#if INDIRECT_INT64_INTRINSICS
static void
indirect_scm_to_int64 (int64_t *dst, SCM x)
{
*dst = scm_to_int64 (x);
}
static void
indirect_scm_to_uint64 (uint64_t *dst, SCM x)
{
*dst = scm_to_uint64 (x);
}
static void
indirect_scm_to_uint64_truncate (uint64_t *dst, SCM x)
{
*dst = scm_to_uint64_truncate (x);
}
static SCM
indirect_scm_from_int64 (int64_t *src)
{
return scm_from_int64 (*src);
}
static SCM
indirect_scm_from_uint64 (uint64_t *src)
{
return scm_from_uint64 (*src);
}
#endif
static SCM
logsub (SCM x, SCM y)
{
@ -206,6 +234,19 @@ rsh (SCM a, uint64_t b)
return scm_ash (a, scm_difference (SCM_INUM0, scm_from_uint64 (b)));
}
#if INDIRECT_INT64_INTRINSICS
static SCM
indirect_lsh (SCM a, uint64_t *b)
{
return lsh (a, *b);
}
static SCM
indirect_rsh (SCM a, uint64_t *b)
{
return rsh (a, *b);
}
#endif
static SCM
lsh_immediate (SCM a, uint8_t b)
{
@ -390,11 +431,19 @@ scm_bootstrap_intrinsics (void)
scm_vm_intrinsics.symbol_to_keyword = scm_symbol_to_keyword;
scm_vm_intrinsics.class_of = scm_class_of;
scm_vm_intrinsics.scm_to_f64 = scm_to_double;
#if INDIRECT_INT64_INTRINSICS
scm_vm_intrinsics.scm_to_u64 = indirect_scm_to_uint64;
scm_vm_intrinsics.scm_to_u64_truncate = indirect_scm_to_uint64_truncate;
scm_vm_intrinsics.scm_to_s64 = indirect_scm_to_int64;
scm_vm_intrinsics.u64_to_scm = indirect_scm_from_uint64;
scm_vm_intrinsics.s64_to_scm = indirect_scm_from_int64;
#else
scm_vm_intrinsics.scm_to_u64 = scm_to_uint64;
scm_vm_intrinsics.scm_to_u64_truncate = scm_to_uint64_truncate;
scm_vm_intrinsics.scm_to_s64 = scm_to_int64;
scm_vm_intrinsics.u64_to_scm = scm_from_uint64;
scm_vm_intrinsics.s64_to_scm = scm_from_int64;
#endif
scm_vm_intrinsics.logsub = logsub;
scm_vm_intrinsics.wind = wind;
scm_vm_intrinsics.unwind = unwind;
@ -404,8 +453,13 @@ scm_bootstrap_intrinsics (void)
scm_vm_intrinsics.fluid_set_x = fluid_set_x;
scm_vm_intrinsics.push_dynamic_state = push_dynamic_state;
scm_vm_intrinsics.pop_dynamic_state = pop_dynamic_state;
#if INDIRECT_INT64_INTRINSICS
scm_vm_intrinsics.lsh = indirect_lsh;
scm_vm_intrinsics.rsh = indirect_rsh;
#else
scm_vm_intrinsics.lsh = lsh;
scm_vm_intrinsics.rsh = rsh;
#endif
scm_vm_intrinsics.lsh_immediate = lsh_immediate;
scm_vm_intrinsics.rsh_immediate = rsh_immediate;
scm_vm_intrinsics.heap_numbers_equal_p = scm_i_heap_numbers_equal_p;

View file

@ -34,15 +34,33 @@ typedef SCM (*scm_t_scm_from_scm_uimm_intrinsic) (SCM, uint8_t);
typedef void (*scm_t_scm_sz_u32_intrinsic) (SCM, size_t, uint32_t);
typedef SCM (*scm_t_scm_from_scm_intrinsic) (SCM);
typedef double (*scm_t_f64_from_scm_intrinsic) (SCM);
/* If we don't have 64-bit registers, the intrinsics will take and
return 64-bit values by reference. */
#if SIZEOF_UINTPTR_T >= 8
#define INDIRECT_INT64_INTRINSICS 0
#else
#define INDIRECT_INT64_INTRINSICS 1
#endif
#if INDIRECT_INT64_INTRINSICS
typedef void (*scm_t_u64_from_scm_intrinsic) (uint64_t*, SCM);
typedef void (*scm_t_s64_from_scm_intrinsic) (int64_t*, SCM);
typedef SCM (*scm_t_scm_from_u64_intrinsic) (uint64_t*);
typedef SCM (*scm_t_scm_from_s64_intrinsic) (int64_t*);
typedef SCM (*scm_t_scm_from_scm_u64_intrinsic) (SCM, uint64_t*);
#else
typedef uint64_t (*scm_t_u64_from_scm_intrinsic) (SCM);
typedef int64_t (*scm_t_s64_from_scm_intrinsic) (SCM);
typedef SCM (*scm_t_scm_from_u64_intrinsic) (uint64_t);
typedef SCM (*scm_t_scm_from_s64_intrinsic) (int64_t);
typedef SCM (*scm_t_scm_from_scm_u64_intrinsic) (SCM, uint64_t);
#endif
typedef void (*scm_t_thread_intrinsic) (scm_thread*);
typedef void (*scm_t_thread_scm_intrinsic) (scm_thread*, SCM);
typedef void (*scm_t_thread_scm_scm_intrinsic) (scm_thread*, SCM, SCM);
typedef SCM (*scm_t_scm_from_thread_scm_intrinsic) (scm_thread*, SCM);
typedef SCM (*scm_t_scm_from_scm_u64_intrinsic) (SCM, uint64_t);
typedef int (*scm_t_bool_from_scm_scm_intrinsic) (SCM, SCM);
typedef enum scm_compare (*scm_t_compare_from_scm_scm_intrinsic) (SCM, SCM);
typedef void (*scm_t_thread_sp_intrinsic) (scm_thread*, union scm_vm_stack_element*);

View file

@ -1420,16 +1420,24 @@ VM_NAME (scm_thread *thread)
VM_DEFINE_OP (56, call_u64_from_scm, "call-u64<-scm", DOP2 (X8_S12_S12, C32))
{
uint16_t dst, src;
uint64_t res;
scm_t_u64_from_scm_intrinsic intrinsic;
UNPACK_12_12 (op, dst, src);
intrinsic = intrinsics[ip[1]];
SYNC_IP ();
res = intrinsic (SP_REF (src));
CACHE_SP ();
#if INDIRECT_INT64_INTRINSICS
intrinsic (& SP_REF_U64 (dst), SP_REF (src));
#else
{
uint64_t res = intrinsic (SP_REF (src));
SP_SET_U64 (dst, res);
}
#endif
/* No CACHE_SP () after the intrinsic, as the indirect variants
have an out argument that points at the stack; stack relocation
during this kind of intrinsic is not supported! */
NEXT (2);
}
@ -1677,16 +1685,24 @@ VM_NAME (scm_thread *thread)
VM_DEFINE_OP (77, call_s64_from_scm, "call-s64<-scm", DOP2 (X8_S12_S12, C32))
{
uint16_t dst, src;
int64_t res;
scm_t_s64_from_scm_intrinsic intrinsic;
UNPACK_12_12 (op, dst, src);
intrinsic = intrinsics[ip[1]];
SYNC_IP ();
res = intrinsic (SP_REF (src));
CACHE_SP ();
#if INDIRECT_INT64_INTRINSICS
intrinsic (& SP_REF_S64 (dst), SP_REF (src));
#else
{
int64_t res = intrinsic (SP_REF (src));
SP_SET_S64 (dst, res);
}
#endif
/* No CACHE_SP () after the intrinsic, as the indirect variants
have an out argument that points at the stack; stack relocation
during this kind of intrinsic is not supported! */
NEXT (2);
}
@ -1701,10 +1717,17 @@ VM_NAME (scm_thread *thread)
intrinsic = intrinsics[ip[1]];
SYNC_IP ();
#if INDIRECT_INT64_INTRINSICS
res = intrinsic (& SP_REF_U64 (src));
#else
res = intrinsic (SP_REF_U64 (src));
CACHE_SP ();
#endif
SP_SET (dst, res);
/* No CACHE_SP () after the intrinsic, as the indirect variants
pass stack pointers directly; stack relocation during this kind
of intrinsic is not supported! */
NEXT (2);
}
@ -1718,7 +1741,11 @@ VM_NAME (scm_thread *thread)
intrinsic = intrinsics[ip[1]];
SYNC_IP ();
#if INDIRECT_INT64_INTRINSICS
res = intrinsic (& SP_REF_S64 (src));
#else
res = intrinsic (SP_REF_S64 (src));
#endif
CACHE_SP ();
SP_SET (dst, res);
@ -1872,7 +1899,11 @@ VM_NAME (scm_thread *thread)
intrinsic = intrinsics[ip[1]];
SYNC_IP ();
#if INDIRECT_INT64_INTRINSICS
res = intrinsic (SP_REF (a), & SP_REF_U64 (b));
#else
res = intrinsic (SP_REF (a), SP_REF_U64 (b));
#endif
CACHE_SP ();
SP_SET (dst, res);