Skip to content

Trace ptr_or_offset in jl_genericmemoryref_t #252

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: mmtk-support-moving-upstream
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion mmtk/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name = "mmtk-julia"
version = "0.30.3"
authors = ["Eduardo Souza <ledusou@gmail.com>", "Yi Lin <qinsoon@gmail.com>"]
build = "build.rs"
edition = "2018"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

Expand Down
83 changes: 65 additions & 18 deletions mmtk/src/julia_scanning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,54 @@ pub unsafe fn mmtk_jl_to_typeof(t: Address) -> *const jl_datatype_t {
t.to_ptr::<jl_datatype_t>()
}

// The type jl_genericmemoryref_t potentially has two pointers (ptr_or_offset, and mem).
// But Julia only identifies it with one single pointer (mem). We need to make sure ptr_or_offset is also traced.
// This function only traces ptr_or_offset, and still leaves mem to the generic trace for data types.
fn trace_ptr_or_offset_in_genericmemoryref<SV: SlotVisitor<JuliaVMSlot>>(closure: &mut SV, r: *mut jl_genericmemoryref_t) {
if mmtk_object_is_managed_by_mmtk(unsafe { *r }.ptr_or_offset as usize) {
let ptr_or_ref_slot = unsafe { Address::from_ptr(::std::ptr::addr_of!((*r).ptr_or_offset)) };
assert_eq!(unsafe { *r }.ptr_or_offset, unsafe { ptr_or_ref_slot.load::<*mut std::ffi::c_void>() });
trace_internal_pointer(ptr_or_ref_slot, closure);
}
}

fn trace_internal_pointer(slot: Address, closure: &mut impl SlotVisitor<JuliaVMSlot>) {
let internal_pointer = unsafe { slot.load::<Address>() };
// find the beginning of the object and trace it since the object may have moved
if let Some(object) =
memory_manager::find_object_from_internal_pointer(internal_pointer, usize::MAX) {
let offset = internal_pointer - object.to_raw_address();
process_offset_slot(closure, slot, offset);
}
}

const REFVALUE_NAME: &std::ffi::CStr = c"RefValue";
const GENERICMEMORYREF_NAME: &std::ffi::CStr = c"GenericMemoryRef";

fn is_refvalue_of(dt: *const jl_datatype_t, name: &std::ffi::CStr) -> bool {
unsafe {
if (*(*dt).name).name != crate::jl_symbol(REFVALUE_NAME.as_ptr() as *const i8) {
return false;
}

if mmtk_jl_svec_len(Address::from_mut_ptr((*dt).parameters)) != 1 {
return false;
}

let t = mmtk_jl_svecref((*dt).parameters, 0);
if !mmtk_jl_is_datatype(t) {
return false;
}

let t_dt = t as *mut jl_datatype_t;
(*(*t_dt).name).name == crate::jl_symbol(name.as_ptr() as *const i8)
}
}

fn is_refvalue_of_generimemoryref(dt: *const jl_datatype_t) -> bool {
is_refvalue_of(dt, GENERICMEMORYREF_NAME)
}

const PRINT_OBJ_TYPE: bool = false;

trait ValidOffset: Copy {
Expand Down Expand Up @@ -234,24 +282,8 @@ pub unsafe fn scan_julia_object<SV: SlotVisitor<JuliaVMSlot>>(obj: Address, clos
}
let vt = vtag.to_ptr::<jl_datatype_t>();
if (*vt).name == jl_array_typename {
let a = obj.to_ptr::<jl_array_t>();
let memref = (*a).ref_;

let ptr_or_offset = memref.ptr_or_offset;
// if the object moves its pointer inside the array object (void* ptr_or_offset) needs to be updated as well
if mmtk_object_is_managed_by_mmtk(ptr_or_offset as usize) {
let ptr_or_ref_slot = Address::from_ptr(::std::ptr::addr_of!((*a).ref_.ptr_or_offset));
let mem_addr_as_usize = memref.mem as usize;
let ptr_or_offset_as_usize = ptr_or_offset as usize;
if ptr_or_offset_as_usize > mem_addr_as_usize {
let offset = ptr_or_offset_as_usize - mem_addr_as_usize;

// Only update the offset pointer if the offset is valid (> 0)
if offset > 0 {
process_offset_slot(closure, ptr_or_ref_slot, offset);
}
}
}
let a = obj.to_mut_ptr::<jl_array_t>();
trace_ptr_or_offset_in_genericmemoryref(closure, &mut (*a).ref_);
}
if (*vt).name == jl_genericmemory_typename {
if PRINT_OBJ_TYPE {
Expand Down Expand Up @@ -379,6 +411,15 @@ pub unsafe fn scan_julia_object<SV: SlotVisitor<JuliaVMSlot>>(obj: Address, clos
return;
}

if (*vt).name == jl_genericmemoryref_typename {
let gmr = obj.to_mut_ptr::<jl_genericmemoryref_t>();
trace_ptr_or_offset_in_genericmemoryref(closure, gmr);
}
if is_refvalue_of_generimemoryref(vt) {
let gmr = obj.to_mut_ptr::<jl_genericmemoryref_t>();
trace_ptr_or_offset_in_genericmemoryref(closure, gmr);
}

let layout = (*vt).layout;
let npointers = (*layout).npointers;
if npointers != 0 {
Expand Down Expand Up @@ -792,3 +833,9 @@ pub unsafe fn mmtk_jl_bt_entry_jlvalue(
debug_assert!(!entry.is_null());
unsafe { ObjectReference::from_raw_address_unchecked(Address::from_mut_ptr(entry)) }
}

pub unsafe fn mmtk_jl_is_datatype(vt: *const jl_datatype_t) -> bool {
let type_tag = mmtk_jl_typetagof(Address::from_ptr(vt));

type_tag.as_usize() == ((jl_small_typeof_tags_jl_datatype_tag as usize) << 4)
}
1 change: 1 addition & 0 deletions mmtk/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ extern "C" {
pub fn jl_log_pinning_event(pinned_object: Address, filename: *const i8, lineno: i32);
pub fn jl_gc_log();
pub static jl_true: *mut crate::julia_types::jl_value_t;
pub fn jl_symbol(name: *const i8) -> *mut crate::julia_types::jl_sym_t;
}

#[macro_export]
Expand Down
29 changes: 15 additions & 14 deletions mmtk/src/object_model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,9 @@ pub(crate) const LOS_METADATA_SPEC: VMLocalLOSMarkNurserySpec =

impl ObjectModel<JuliaVM> for VMObjectModel {
const GLOBAL_LOG_BIT_SPEC: VMGlobalLogBitSpec = LOGGING_SIDE_METADATA_SPEC;
// See https://github.com/mmtk/mmtk-core/issues/1331
const LOCAL_FORWARDING_POINTER_SPEC: VMLocalForwardingPointerSpec =
VMLocalForwardingPointerSpec::in_header(-64);
VMLocalForwardingPointerSpec::in_header(0);

const LOCAL_PINNING_BIT_SPEC: VMLocalPinningBitSpec = LOCAL_PINNING_METADATA_BITS_SPEC;
const LOCAL_FORWARDING_BITS_SPEC: VMLocalForwardingBitsSpec =
Expand All @@ -66,6 +67,8 @@ impl ObjectModel<JuliaVM> for VMObjectModel {
const UNIFIED_OBJECT_REFERENCE_ADDRESS: bool = false;
const OBJECT_REF_OFFSET_LOWER_BOUND: isize = 0;

const NEED_VO_BITS_DURING_TRACING: bool = true;

fn copy(
from: ObjectReference,
semantics: CopySemantics,
Expand Down Expand Up @@ -126,10 +129,7 @@ impl ObjectModel<JuliaVM> for VMObjectModel {
debug_assert!(test_hash_state(to_obj, UNHASHED));
to_obj
} else if test_hash_state(from, HASHED) {
info!(
"Moving a hashed object {} with size = {}. New size = {}",
from, cur_bytes, new_bytes
);
debug!("Moving a hashed object {} with size = {}. New size = {}", from, cur_bytes, new_bytes);
// if cur_bytes == new_bytes you end up copying the whole src
// but before you say that dst += STORED_HASH_BYTES so you don't have space
// in dst to copy src
Expand All @@ -138,10 +138,8 @@ impl ObjectModel<JuliaVM> for VMObjectModel {

// Store hash
let hash = from.to_raw_address().as_usize();
unsafe {
dst.store::<usize>(hash);
}
info!("Store hash {:x} into {}", hash, dst);
unsafe { dst.store::<usize>(hash); }
debug!("Store hash {:x} into {}", hash, dst);
dst += STORED_HASH_BYTES;

// Copy the object
Expand All @@ -153,13 +151,13 @@ impl ObjectModel<JuliaVM> for VMObjectModel {
unsafe { ObjectReference::from_raw_address_unchecked(dst + header_offset) };
copy_context.post_copy(to_obj, new_bytes, semantics);

info!("old object {}, new objectt {}", from, to_obj);
debug!("old object {}, new objectt {}", from, to_obj);

// set_hash_state(from, UNHASHED);
set_hash_state(to_obj, HASHED_AND_MOVED);
to_obj
} else if test_hash_state(from, HASHED_AND_MOVED) {
info!("Moving a hashed+moved object {}", from);
debug!("Moving a hashed+moved object {}", from);
debug_assert_eq!(cur_bytes, new_bytes);
debug_assert_eq!(from.to_raw_address(), from_start + 16usize);
debug_assert_eq!(header_offset, 16);
Expand Down Expand Up @@ -201,12 +199,15 @@ impl ObjectModel<JuliaVM> for VMObjectModel {
}

// zero from_obj (for debugging purposes)
// We cannot zero from_obj. We use find_object_from_internal_pointer during trace.
// So we will need to access from_obj after it is being moved to calculate its size.
// We cannot zero from_obj. See https://github.com/mmtk/mmtk-core/issues/1331
#[cfg(debug_assertions)]
{
use atomic::Ordering;
unsafe {
libc::memset(from_start.to_mut_ptr(), 0, cur_bytes);
}
// unsafe {
// libc::memset(from_start.to_mut_ptr(), 0, cur_bytes);
// }

Self::LOCAL_FORWARDING_BITS_SPEC.store_atomic::<JuliaVM, u8>(
from,
Expand Down