]>
Witch of Git - ivy/blob - rt/src/lib.rs
1 use crate::{int
::ObjInt
, lam
::ObjLam
};
2 use std
::sync
::atomic
::{AtomicU32
, Ordering
};
9 const _STDOUT
: i32 = 1;
10 const STDERR
: i32 = 2;
14 ($fmt
:literal $
(, $arg
:expr
)* $
(,)?
) => {
15 if std
::env
::var("IVY_RT_TRACE").is
_ok
() {
16 eprintln
!($fmt
, $
($arg
),*);
22 #[derive(PartialEq, Eq)]
29 pub struct ObjHeader
{
35 #[derive(Clone, Copy)]
39 header
: *mut ObjHeader
,
45 pub unsafe extern "C" fn ivy_debug(obj
: Obj
) -> Obj
{
46 println
!("DEBUG {:016x}", obj
.int
);
51 pub unsafe extern "C" fn ivy_abort(msg
: *const u8, len
: usize) -> ! {
52 sys
::write(STDERR
, msg
, len
);
57 pub unsafe extern "C" fn ivy_exit(code
: i32) -> ! {
62 pub unsafe extern "C" fn ivy_free(obj
: Obj
) {
66 sys
::free(obj
.header
as *mut u8)
70 pub unsafe extern "C" fn ivy_incref(obj
: Obj
) {
75 pub unsafe extern "C" fn ivy_decref(obj
: Obj
) {
80 pub unsafe extern "C" fn ivy_clone(obj
: Obj
) -> Obj
{
81 if obj
.is
_n
ull
() || !obj
.is
_box
() {
85 unimplemented
!("copying boxed integers")
87 let lam
= &*obj
.box_lam
;
88 let size
= lam
.size();
89 let data
= sys
::malloc(size
);
90 core
::ptr
::copy(obj
.box_lam
as *const u8, data
, size
);
91 let box_hdr
= data
as *mut ObjHeader
;
92 *(*box_hdr
).rc
.get_mut() = 0;
93 trace
!("COPY {:016x} {:016x}", obj
.int
, box_hdr
as usize);
94 let box_lam
= data
as *mut ObjLam
;
99 fn is_null(self) -> bool
{
100 unsafe { self.int
== 0 }
103 fn is_box(self) -> bool
{
104 !self.is
_n
ull
() && unsafe { self.int
& 1 == 0 }
107 unsafe fn is_int(self) -> bool
{
108 !self.is
_n
ull
() && (!self.is
_box
() || (*self.header
).tag
== ObjTag
::Int
)
111 unsafe fn is_lam(self) -> bool
{
112 self.is
_box
() && (*self.header
).tag
== ObjTag
::Lam
115 unsafe fn incref(self) {
119 // Ordering::Relaxed is appropriate here, since we assume that each thread with access to a
120 // reference owns at least one reference (rather than simply borrowing it). Therefore,
121 // another thread cannot decrement it to 0 while we are performing this increment (since we
122 // own a reference), so we only need consistency and not ordering.
123 (*self.header
).rc
.fetch
_add
(1, Ordering
::Relaxed
);
126 unsafe fn decref(self) {
130 // Ordering::AcqRel is appropriate here. I believe we need the Acquire in order to ensure
131 // we see all previous increments/decrements, so we can properly see that the decref is
132 // decrementing to 0, and we need the Release in order to ensure that we see all writes to
133 // the memory before we deallocate.
134 // (Check against 1 instead of 0 since we're loading the old refcount.)
135 if (*self.header
).rc
.fetch
_s
ub
(1, Ordering
::AcqRel
) == 1 {
140 unsafe fn dealloc(self) {
145 let lam
= &mut *self.box_lam
;
146 for param
in lam
.params_mut() {
149 for upvar
in lam
.upvars
_m
ut
() {
153 sys
::free(self.header
as *mut u8);