return (page_table[page].allocated & BOXED_PAGE_FLAG);
}
-static inline boolean code_page_p(page_index_t page) {
- /* This is used by the conservative pinning logic to determine if
- * a page can contain code objects. Ideally, we'd be able to
- * check the page allocation flag to see if it is CODE_PAGE_FLAG,
- * but this turns out not to be reliable (in fact, badly
- * unreliable) at the moment. On the upside, all code objects are
- * boxed objects, so we can simply re-use the boxed_page_p() logic
- * for a tighter result than merely "is this page allocated". */
-#if 0
- return (page_table[page].allocated & CODE_PAGE_FLAG) == CODE_PAGE_FLAG;
-#else
- return page_boxed_p(page);
-#endif
-}
-
static inline boolean page_boxed_no_region_p(page_index_t page) {
return page_boxed_p(page) && page_no_region_p(page);
}
* address) which should prevent us from moving the referred-to thing?
* This is called from preserve_pointers() */
static int
-possibly_valid_dynamic_space_pointer(lispobj *pointer)
+possibly_valid_dynamic_space_pointer(lispobj *pointer, page_index_t addr_page_index)
{
lispobj *start_addr;
if (widetag_of(*start_addr) == CODE_HEADER_WIDETAG)
return 1;
+ /* Large object pages only contain ONE object, and it will never
+ * be a CONS. However, arrays and bignums can be allocated larger
+ * than necessary and then shrunk to fit, leaving what look like
+ * (0 . 0) CONSes at the end. These appear valid to
+ * looks_like_valid_lisp_pointer_p(), so pick them off here. */
+ if (page_table[addr_page_index].large_object &&
+ (lowtag_of((lispobj)pointer) == LIST_POINTER_LOWTAG))
+ return 0;
+
return looks_like_valid_lisp_pointer_p(pointer, start_addr);
}
* expensive but important, since it vastly reduces the
* probability that random garbage will be bogusly interpreted as
* a pointer which prevents a page from moving. */
- if (!possibly_valid_dynamic_space_pointer(addr))
+ if (!possibly_valid_dynamic_space_pointer(addr, addr_page_index))
return 0;
#endif
/* Adjust any large objects before promotion as they won't be
* copied after promotion. */
if (page_table[first_page].large_object) {
- /* Large objects (specifically vectors and bignums) can
- * shrink, leaving a "tail" of zeroed space, which appears to
- * the filter above as a seris of valid conses, both car and
- * cdr of which contain the fixnum zero, but will be
- * deallocated when the GC shrinks the large object region to
- * fit the object within. We allow raw pointers within code
- * space, but for boxed and unboxed space we do not, nor do
- * pointers to within a non-code object appear valid above. A
- * cons cell will never merit allocation to a large object
- * page, so pick them off now, before we try to adjust the
- * object. */
- if ((lowtag_of((lispobj)addr) == LIST_POINTER_LOWTAG) &&
- !code_page_p(first_page)) {
- return;
- }
maybe_adjust_large_object(page_address(first_page));
/* It may have moved to unboxed pages. */
region_allocation = page_table[first_page].allocated;
* FIXME: Add a variable to enable this
* dynamically. */
/*
- if (!possibly_valid_dynamic_space_pointer((lispobj *)thing)) {
+ if (!possibly_valid_dynamic_space_pointer((lispobj *)thing, page_index)) {
lose("ptr %p to invalid object %p\n", thing, start);
}
*/