]>
Witch of Git - ivy/blob - rt/src/lib.rs
1 use crate::{int
::ObjInt
, lam
::ObjLam
};
2 use std
::sync
::atomic
::{AtomicU32
, Ordering
};
9 const _STDOUT
: i32 = 1;
10 const STDERR
: i32 = 2;
14 ($fmt
:literal $
(, $arg
:expr
)* $
(,)?
) => {
15 if std
::env
::var("IVY_RT_TRACE").is
_ok
() {
16 eprintln
!($fmt
, $
($arg
),*);
22 #[derive(PartialEq, Eq, Clone, Copy)]
29 pub struct ObjHeader
{
35 #[derive(Clone, Copy)]
39 header
: *mut ObjHeader
,
45 pub unsafe extern "C" fn ivy_debug(obj
: Obj
) -> Obj
{
46 println
!("DEBUG {:016x}", obj
.int
);
51 pub unsafe extern "C" fn ivy_abort(msg
: *const u8, len
: usize) -> ! {
52 sys
::write(STDERR
, msg
, len
);
57 pub unsafe extern "C" fn ivy_exit(code
: i32) -> ! {
62 pub unsafe extern "C" fn ivy_free(obj
: Obj
) {
66 sys
::free(obj
.header
as *mut u8)
70 pub unsafe extern "C" fn ivy_incref(obj
: Obj
) {
75 pub unsafe extern "C" fn ivy_decref(obj
: Obj
) {
80 pub unsafe extern "C" fn ivy_clone(obj
: Obj
) -> Obj
{
81 if obj
.is
_n
ull
() || !obj
.is
_box
() {
85 None
=> unreachable
!(),
86 Some(ObjTag
::Int
) => {
87 unimplemented
!("copying boxed integers")
89 Some(ObjTag
::Lam
) => {
90 let lam
= &*obj
.box_lam
;
91 let size
= lam
.size();
92 let data
= sys
::malloc(size
);
93 core
::ptr
::copy(obj
.box_lam
as *const u8, data
, size
);
94 let box_hdr
= data
as *mut ObjHeader
;
95 *(*box_hdr
).rc
.get_mut() = 0;
96 trace
!("COPY {:016x} {:016x}", obj
.int
, box_hdr
as usize);
97 let box_lam
= data
as *mut ObjLam
;
104 fn is_null(self) -> bool
{
105 unsafe { self.int
== 0 }
108 fn is_box(self) -> bool
{
109 !self.is
_n
ull
() && unsafe { self.int
& 1 == 0 }
112 unsafe fn tag(self) -> Option
<ObjTag
> {
115 } else if self.is
_box
() {
116 Some((*self.header
).tag
)
122 unsafe fn incref(self) {
126 // Ordering::Relaxed is appropriate here, since we assume that each thread with access to a
127 // reference owns at least one reference (rather than simply borrowing it). Therefore,
128 // another thread cannot decrement it to 0 while we are performing this increment (since we
129 // own a reference), so we only need consistency and not ordering.
130 (*self.header
).rc
.fetch
_add
(1, Ordering
::Relaxed
);
133 unsafe fn decref(self) {
137 // Ordering::AcqRel is appropriate here. I believe we need the Acquire in order to ensure
138 // we see all previous increments/decrements, so we can properly see that the decref is
139 // decrementing to 0, and we need the Release in order to ensure that we see all writes to
140 // the memory before we deallocate.
141 // (Check against 1 instead of 0 since we're loading the old refcount.)
142 if (*self.header
).rc
.fetch
_s
ub
(1, Ordering
::AcqRel
) == 1 {
147 unsafe fn dealloc(self) {
148 trace
!("FREE {:016x}", self.int
);
153 None
| Some(ObjTag
::Int
) => (),
154 Some(ObjTag
::Lam
) => {
155 let lam
= &mut *self.box_lam
;
156 for param
in lam
.params() {
159 for upvar
in lam
.upvars
() {
164 sys
::free(self.header
as *mut u8);