]> Witch of Git - ivy/blob - rt/src/lib.rs
[rt] Make Obj variant padding explicit
[ivy] / rt / src / lib.rs
1 use std::sync::atomic::{Ordering, AtomicU32};
2
3 const _STDIN: i32 = 0;
4 const _STDOUT: i32 = 1;
5 const STDERR: i32 = 2;
6
7 macro_rules! trace {
8 ($fmt:literal $(, $arg:expr)* $(,)?) => {
9 if std::env::var("IVY_RT_TRACE").is_ok() {
10 eprintln!($fmt, $($arg),*);
11 }
12 }
13 }
14
15 #[repr(u8)]
16 #[derive(PartialEq, Eq)]
17 pub enum ObjTag {
18 Lam = 0,
19 Int = 1,
20 }
21
22 #[repr(C)]
23 pub struct ObjHeader {
24 tag: ObjTag,
25 _pad: [u8; 3],
26 rc: AtomicU32,
27 }
28
29 #[repr(C)]
30 pub struct ObjInt {
31 tag: ObjTag,
32 _pad: [u8; 3],
33 rc: AtomicU32,
34 value: i64,
35 }
36
37 #[repr(C)]
38 pub struct ObjLam {
39 tag: ObjTag,
40 _pad: [u8; 1],
41 upvars: u16,
42 rc: AtomicU32,
43 func: extern "C" fn(&ObjLam) -> Obj,
44 params: u16,
45 filled: u16,
46 }
47
48 #[derive(Clone, Copy)]
49 #[repr(C)]
50 pub union Obj {
51 int: i64,
52 header: *mut ObjHeader,
53 box_lam: *mut ObjLam,
54 }
55
56 mod sys {
57 extern "C" {
58 pub fn write(fd: i32, buf: *const u8, len: usize) -> isize;
59 pub fn exit(code: i32) -> !;
60 pub fn malloc(size: usize) -> *mut u8;
61 pub fn free(ptr: *mut u8);
62 }
63 }
64
65 #[no_mangle]
66 pub unsafe extern "C" fn ivy_debug(obj: Obj) -> Obj {
67 println!("DEBUG {:016x}", obj.int);
68 obj
69 }
70
71 #[no_mangle]
72 pub unsafe extern "C" fn ivy_abort(msg: *const u8, len: usize) -> ! {
73 sys::write(STDERR, msg, len);
74 sys::exit(1);
75 }
76
77 #[no_mangle]
78 pub unsafe extern "C" fn ivy_exit(code: i32) -> ! {
79 sys::exit(code)
80 }
81
82 #[no_mangle]
83 pub unsafe extern "C" fn ivy_check_int(obj: Obj) {
84 if !obj.is_int() {
85 panic!("ivy_check_int called with non-integer object {:016x}.", obj.int);
86 }
87 }
88
89 #[no_mangle]
90 pub unsafe extern "C" fn ivy_check_lam(obj: Obj) {
91 if !obj.is_lam() {
92 panic!("ivy_check_lam called with non-lambda object {:016x}.", obj.int);
93 }
94 }
95
96 // This should probably be a macro rather than a call?
97 // But it might be good to have it for completeness.
98 // Or maybe it's valuable if we want to support big integers.
99 #[no_mangle]
100 pub unsafe extern "C" fn ivy_make_int(value: i64) -> Obj {
101 Obj { int: value << 1 }
102 }
103
104 #[no_mangle]
105 pub unsafe extern "C" fn ivy_make_lam(func: extern "C" fn(&ObjLam) -> Obj, params: u16, upvars: u16) -> Obj {
106 let size = ObjLam::size_of(params, upvars);
107 let box_lam = sys::malloc(size) as *mut ObjLam;
108 box_lam.write(ObjLam {
109 tag: ObjTag::Lam,
110 _pad: [0; 1],
111 upvars,
112 rc: AtomicU32::new(0),
113 func,
114 params,
115 filled: 0,
116 });
117 (*box_lam)
118 .raw_fields_mut()
119 .write_bytes(0, (params + upvars) as usize);
120 trace!("MAKE {:016x} {:016x}", box_lam as usize, func as usize);
121 Obj { box_lam }
122 }
123
124 #[no_mangle]
125 pub unsafe extern "C" fn ivy_free(obj: Obj) {
126 if !obj.is_box() {
127 return;
128 }
129 sys::free(obj.header as *mut u8)
130 }
131
132 #[no_mangle]
133 pub unsafe extern "C" fn ivy_incref(obj: Obj) {
134 obj.incref();
135 }
136
137 #[no_mangle]
138 pub unsafe extern "C" fn ivy_decref(obj: Obj) {
139 obj.decref();
140 }
141
142 #[no_mangle]
143 pub unsafe extern "C" fn ivy_clone(obj: Obj) -> Obj {
144 if obj.is_null() || !obj.is_box() {
145 return obj;
146 }
147 if obj.is_int() {
148 unimplemented!("copying boxed integers")
149 }
150 let lam = &*obj.box_lam;
151 let size = lam.size();
152 let data = sys::malloc(size);
153 core::ptr::copy(obj.box_lam as *const u8, data, size);
154 let box_lam = data as *mut ObjLam;
155 *(*box_lam).rc.get_mut() = 0;
156 trace!("COPY {:016x} {:016x}", obj.int, box_lam as usize);
157 Obj { box_lam }
158 }
159
160 #[no_mangle]
161 pub unsafe extern "C" fn ivy_app(fun: Obj, arg: Obj) -> Obj {
162 ivy_app_mut(ivy_clone(fun), arg)
163 }
164
165 #[no_mangle]
166 pub unsafe extern "C" fn ivy_app_mut(fun: Obj, arg: Obj) -> Obj {
167 trace!("APP {:016x} {:016x}", fun.int, arg.int);
168 if !fun.is_lam() {
169 panic!("ivy_app called with a non-lam as the function: {:016x}.", fun.int);
170 }
171 let lam = &mut *fun.box_lam;
172 if lam.filled < lam.params {
173 if arg.is_null() {
174 println!(
175 "Lam @ {:016x} ({:016x}) has {} of {} arguments filled.",
176 fun.int, lam.func as usize, lam.filled, lam.params
177 );
178 panic!("ivy_app called with a null arg.");
179 }
180 arg.incref();
181 let idx = lam.filled as usize;
182 lam.params_mut()[idx] = arg;
183 lam.filled += 1;
184 } else if lam.params == lam.filled {
185 if !arg.is_null() {
186 panic!("ivy_app called for a 0-arity application with a non-null arg.");
187 }
188 }
189
190 if lam.params == lam.filled {
191 trace!("RUN {:016x}", fun.int);
192 (lam.func)(lam)
193 } else {
194 trace!("UPD8 {:016x}", fun.int);
195 fun.incref();
196 fun
197 }
198 }
199
200 impl Obj {
201 fn is_null(self) -> bool {
202 unsafe { self.int == 0 }
203 }
204
205 fn is_box(self) -> bool {
206 !self.is_null() && unsafe { self.int & 1 == 0 }
207 }
208
209 unsafe fn is_int(self) -> bool {
210 !self.is_null() && (!self.is_box() || (*self.header).tag == ObjTag::Int)
211 }
212
213 unsafe fn is_lam(self) -> bool {
214 self.is_box() && (*self.header).tag == ObjTag::Lam
215 }
216
217 unsafe fn incref(self) {
218 if !self.is_box() {
219 return;
220 }
221 // Ordering::Relaxed is appropriate here, since we assume that each thread with access to a
222 // reference owns at least one reference (rather than simply borrowing it). Therefore,
223 // another thread cannot decrement it to 0 while we are performing this increment (since we
224 // own a reference), so we only need consistency and not ordering.
225 (*self.header).rc.fetch_add(1, Ordering::Relaxed);
226 }
227
228 unsafe fn decref(self) {
229 if !self.is_box() {
230 return;
231 }
232 // Ordering::AcqRel is appropriate here. I believe we need the Acquire in order to ensure
233 // we see all previous increments/decrements, so we can properly see that the decref is
234 // decrementing to 0, and we need the Release in order to ensure that we see all writes to
235 // the memory before we deallocate.
236 // (Check against 1 instead of 0 since we're loading the old refcount.)
237 if (*self.header).rc.fetch_sub(1, Ordering::AcqRel) == 1 {
238 self.dealloc();
239 }
240 }
241
242 unsafe fn dealloc(self) {
243 if !self.is_box() {
244 return;
245 }
246 if self.is_lam() {
247 let lam = &mut *self.box_lam;
248 for param in lam.params_mut() {
249 param.decref();
250 }
251 for upvar in lam.upvars_mut() {
252 upvar.decref();
253 }
254 }
255 sys::free(self.header as *mut u8);
256 }
257 }
258
259 impl ObjLam {
260 fn size_of(params: u16, upvars: u16) -> usize {
261 core::mem::size_of::<ObjLam>() + params as usize * 8 + upvars as usize * 8
262 }
263
264 fn size(&self) -> usize {
265 ObjLam::size_of(self.params, self.upvars)
266 }
267
268 unsafe fn raw_fields_mut(&mut self) -> *mut Obj {
269 (self as *mut ObjLam).add(1) as *mut Obj
270 }
271
272 unsafe fn params_mut(&mut self) -> &mut [Obj] {
273 let ptr = self.raw_fields_mut();
274 core::slice::from_raw_parts_mut(ptr, self.params as usize)
275 }
276
277 unsafe fn upvars_mut(&mut self) -> &mut [Obj] {
278 let ptr = self.raw_fields_mut().add(self.params as usize);
279 core::slice::from_raw_parts_mut(ptr, self.upvars as usize)
280 }
281 }