]> Witch of Git - ivy/blob - rt/src/lib.rs
[tools] Add tools/test.py as the project test runner
[ivy] / rt / src / lib.rs
1 use crate::{int::ObjInt, lam::ObjLam};
2 use std::sync::atomic::{AtomicU32, Ordering};
3
4 pub mod int;
5 pub mod lam;
6 pub mod sys;
7
8 const _STDIN: i32 = 0;
9 const _STDOUT: i32 = 1;
10 const STDERR: i32 = 2;
11
12 #[macro_export]
13 macro_rules! trace {
14 ($fmt:literal $(, $arg:expr)* $(,)?) => {
15 if std::env::var("IVY_RT_TRACE").is_ok() {
16 eprintln!($fmt, $($arg),*);
17 }
18 }
19 }
20
21 #[repr(u8)]
22 #[derive(PartialEq, Eq, Clone, Copy)]
23 pub enum ObjTag {
24 Lam = 0,
25 Int = 1,
26 }
27
28 #[repr(C)]
29 pub struct ObjHeader {
30 tag: ObjTag,
31 _pad: [u8; 3],
32 rc: AtomicU32,
33 }
34
35 #[derive(Clone, Copy)]
36 #[repr(C)]
37 pub union Obj {
38 int: i64,
39 header: *mut ObjHeader,
40 box_lam: *mut ObjLam,
41 box_int: *mut ObjInt,
42 }
43
44 #[no_mangle]
45 pub unsafe extern "C" fn ivy_debug(obj: Obj) -> Obj {
46 println!("DEBUG {:016x}", obj.int);
47 obj
48 }
49
50 #[no_mangle]
51 pub unsafe extern "C" fn ivy_abort(msg: *const u8, len: usize) -> ! {
52 sys::write(STDERR, msg, len);
53 sys::exit(1);
54 }
55
56 #[no_mangle]
57 pub unsafe extern "C" fn ivy_exit(code: i32) -> ! {
58 sys::exit(code)
59 }
60
61 #[no_mangle]
62 pub unsafe extern "C" fn ivy_free(obj: Obj) {
63 if !obj.is_box() {
64 return;
65 }
66 sys::free(obj.header as *mut u8)
67 }
68
69 #[no_mangle]
70 pub unsafe extern "C" fn ivy_incref(obj: Obj) {
71 obj.incref();
72 }
73
74 #[no_mangle]
75 pub unsafe extern "C" fn ivy_decref(obj: Obj) {
76 obj.decref();
77 }
78
79 #[no_mangle]
80 pub unsafe extern "C" fn ivy_clone(obj: Obj) -> Obj {
81 if obj.is_null() || !obj.is_box() {
82 return obj;
83 }
84 match obj.tag() {
85 None => unreachable!(),
86 Some(ObjTag::Int) => {
87 unimplemented!("copying boxed integers")
88 }
89 Some(ObjTag::Lam) => {
90 let box_lam = lam::ivy_clone_lam(&*obj.box_lam);
91 Obj { box_lam }
92 }
93 }
94 }
95
96 impl Obj {
97 fn is_null(self) -> bool {
98 unsafe { self.int == 0 }
99 }
100
101 fn is_box(self) -> bool {
102 !self.is_null() && unsafe { self.int & 1 == 0 }
103 }
104
105 unsafe fn tag(self) -> Option<ObjTag> {
106 if self.is_null() {
107 None
108 } else if self.is_box() {
109 Some((*self.header).tag)
110 } else {
111 Some(ObjTag::Int)
112 }
113 }
114
115 unsafe fn incref(self) {
116 trace!("INC {:016x}", self.int);
117 if !self.is_box() {
118 return;
119 }
120 // Ordering::Relaxed is appropriate here, since we assume that each thread with access to a
121 // reference owns at least one reference (rather than simply borrowing it). Therefore,
122 // another thread cannot decrement it to 0 while we are performing this increment (since we
123 // own a reference), so we only need consistency and not ordering.
124 (*self.header).rc.fetch_add(1, Ordering::Relaxed);
125 }
126
127 unsafe fn decref(self) {
128 trace!("DEC {:016x}", self.int);
129 if !self.is_box() {
130 return;
131 }
132 // Ordering::AcqRel is appropriate here. I believe we need the Acquire in order to ensure
133 // we see all previous increments/decrements, so we can properly see that the decref is
134 // decrementing to 0, and we need the Release in order to ensure that we see all writes to
135 // the memory before we deallocate.
136 // (Check against 1 instead of 0 since we're loading the old refcount.)
137 if (*self.header).rc.fetch_sub(1, Ordering::AcqRel) == 1 {
138 self.dealloc();
139 }
140 }
141
142 unsafe fn dealloc(self) {
143 trace!("FREE {:016x}", self.int);
144 if !self.is_box() {
145 return;
146 }
147 match self.tag() {
148 None | Some(ObjTag::Int) => (),
149 Some(ObjTag::Lam) => {
150 let lam = &mut *self.box_lam;
151 for param in lam.params() {
152 param.decref();
153 }
154 for upvar in lam.upvars() {
155 upvar.decref();
156 }
157 }
158 }
159 sys::free(self.header as *mut u8);
160 }
161 }