* during a heap verify? */
boolean verify_dynamic_code_check = 0;
+#ifdef LISP_FEATURE_X86
/* Should we check code objects for fixup errors after they are transported? */
boolean check_code_fixups = 0;
+#endif
/* Should we check that newly allocated regions are zero filled? */
boolean gencgc_zero_check = 0;
return page_address(page_index)-page_table[page_index].scan_start_offset;
}
+/* True if the page starts a contiguous block. */
+static inline boolean
+page_starts_contiguous_block_p(page_index_t page_index)
+{
+ return page_table[page_index].scan_start_offset == 0;
+}
+
+/* True if the page is the last page in a contiguous block. */
+static inline boolean
+page_ends_contiguous_block_p(page_index_t page_index, generation_index_t gen)
+{
+ return (/* page doesn't fill block */
+ (page_table[page_index].bytes_used < GENCGC_CARD_BYTES)
+ /* page is last allocated page */
+ || ((page_index + 1) >= last_free_page)
+ /* next page free */
+ || page_free_p(page_index + 1)
+ /* next page contains no data */
+ || (page_table[page_index + 1].bytes_used == 0)
+ /* next page is in different generation */
+ || (page_table[page_index + 1].gen != gen)
+ /* next page starts its own contiguous block */
+ || (page_starts_contiguous_block_p(page_index + 1)));
+}
+
/* Find the page index within the page_table for the given
* address. Return -1 on failure. */
inline page_index_t
/* If the page was free then set up the gen, and
* scan_start_offset. */
if (page_table[first_page].bytes_used == 0)
- gc_assert(page_table[first_page].scan_start_offset == 0);
+ gc_assert(page_starts_contiguous_block_p(first_page));
page_table[first_page].allocated &= ~(OPEN_REGION_PAGE_FLAG);
gc_assert(page_table[first_page].allocated & page_type_flag);
* new areas, but let's do it for them all (they'll probably
* be written anyway?). */
- gc_assert(page_table[first_page].scan_start_offset == 0);
+ gc_assert(page_starts_contiguous_block_p(first_page));
next_page = first_page;
remaining_bytes = nwords*N_WORD_BYTES;
*
* Currently only absolute fixups to the constant vector, or to the
* code area are checked. */
+#ifdef LISP_FEATURE_X86
void
sniff_code_object(struct code *code, os_vm_size_t displacement)
{
-#ifdef LISP_FEATURE_X86
sword_t nheader_words, ncode_words, nwords;
os_vm_address_t constants_start_addr = NULL, constants_end_addr, p;
os_vm_address_t code_start_addr, code_end_addr;
"/code start = %x, end = %x\n",
code_start_addr, code_end_addr));
}
-#endif
}
+#endif
+#ifdef LISP_FEATURE_X86
void
gencgc_apply_code_fixups(struct code *old_code, struct code *new_code)
{
-/* x86-64 uses pc-relative addressing instead of this kludge */
-#ifndef LISP_FEATURE_X86_64
sword_t nheader_words, ncode_words, nwords;
os_vm_address_t constants_start_addr, constants_end_addr;
os_vm_address_t code_start_addr, code_end_addr;
if (check_code_fixups) {
sniff_code_object(new_code,displacement);
}
-#endif
}
-
+#endif
static lispobj
trans_boxed_large(lispobj object)
* but lets do it for them all (they'll probably be written
* anyway?). */
- gc_assert(page_table[first_page].scan_start_offset == 0);
+ gc_assert(page_starts_contiguous_block_p(first_page));
next_page = first_page;
remaining_bytes = nwords*N_WORD_BYTES;
first_page = find_page_index(page_scan_start(addr_page_index))
#else
first_page = addr_page_index;
- while (page_table[first_page].scan_start_offset != 0) {
+ while (!page_starts_contiguous_block_p(first_page)) {
--first_page;
/* Do some checks. */
gc_assert(page_table[first_page].bytes_used == GENCGC_CARD_BYTES);
/* Adjust any large objects before promotion as they won't be
* copied after promotion. */
if (page_table[first_page].large_object) {
- maybe_adjust_large_object(page_address(first_page));
- /* If a large object has shrunk then addr may now point to a
- * free area in which case it's ignored here. Note it gets
- * through the valid pointer test above because the tail looks
- * like conses. */
- if (page_free_p(addr_page_index)
- || (page_table[addr_page_index].bytes_used == 0)
- /* Check the offset within the page. */
- || (((uword_t)addr & (GENCGC_CARD_BYTES - 1))
- > page_table[addr_page_index].bytes_used)) {
- FSHOW((stderr,
- "weird? ignore ptr 0x%x to freed area of large object\n",
- addr));
+ /* Large objects (specifically vectors and bignums) can
+ * shrink, leaving a "tail" of zeroed space, which appears to
+ * the filter above as a seris of valid conses, both car and
+ * cdr of which contain the fixnum zero, but will be
+ * deallocated when the GC shrinks the large object region to
+ * fit the object within. We allow raw pointers within code
+ * space, but for boxed and unboxed space we do not, nor do
+ * pointers to within a non-code object appear valid above. A
+ * cons cell will never merit allocation to a large object
+ * page, so pick them off now, before we try to adjust the
+ * object. */
+ if ((lowtag_of((lispobj)addr) == LIST_POINTER_LOWTAG) &&
+ !code_page_p(first_page)) {
return;
}
+ maybe_adjust_large_object(page_address(first_page));
/* It may have moved to unboxed pages. */
region_allocation = page_table[first_page].allocated;
}
gc_assert(!page_table[i].write_protected);
/* Check whether this is the last page in this contiguous block.. */
- if ((page_table[i].bytes_used < GENCGC_CARD_BYTES)
- /* ..or it is CARD_BYTES and is the last in the block */
- || page_free_p(i+1)
- || (page_table[i+1].bytes_used == 0) /* next page free */
- || (page_table[i+1].gen != from_space) /* diff. gen */
- || (page_table[i+1].scan_start_offset == 0))
+ if (page_ends_contiguous_block_p(i, from_space))
break;
}
int write_protected=1;
/* This should be the start of a region */
- gc_assert(page_table[i].scan_start_offset == 0);
+ gc_assert(page_starts_contiguous_block_p(i));
/* Now work forward until the end of the region */
for (last_page = i; ; last_page++) {
write_protected =
write_protected && page_table[last_page].write_protected;
- if ((page_table[last_page].bytes_used < GENCGC_CARD_BYTES)
- /* Or it is CARD_BYTES and is the last in the block */
- || (!page_boxed_p(last_page+1))
- || (page_table[last_page+1].bytes_used == 0)
- || (page_table[last_page+1].gen != generation)
- || (page_table[last_page+1].scan_start_offset == 0))
+ if (page_ends_contiguous_block_p(last_page, generation))
break;
}
if (!write_protected) {
/* Check whether this is the last page in this
* contiguous block */
- if ((page_table[last_page].bytes_used < GENCGC_CARD_BYTES)
- /* Or it is CARD_BYTES and is the last in the block */
- || (!page_boxed_p(last_page+1))
- || (page_table[last_page+1].bytes_used == 0)
- || (page_table[last_page+1].gen != generation)
- || (page_table[last_page+1].scan_start_offset == 0))
+ if (page_ends_contiguous_block_p(last_page, generation))
break;
}
#ifdef COMPLEX_LONG_FLOAT_WIDETAG
case COMPLEX_LONG_FLOAT_WIDETAG:
#endif
+#ifdef SIMD_PACK_WIDETAG
+ case SIMD_PACK_WIDETAG:
+#endif
case SIMPLE_BASE_STRING_WIDETAG:
#ifdef SIMPLE_CHARACTER_STRING_WIDETAG
case SIMPLE_CHARACTER_STRING_WIDETAG:
&& (page_table[i].bytes_used != 0)
&& (page_table[i].gen == generation)) {
page_index_t last_page;
- int region_allocation = page_table[i].allocated;
/* This should be the start of a contiguous block */
- gc_assert(page_table[i].scan_start_offset == 0);
+ gc_assert(page_starts_contiguous_block_p(i));
/* Need to find the full extent of this contiguous block in case
objects span pages. */
for (last_page = i; ;last_page++)
/* Check whether this is the last page in this contiguous
* block. */
- if ((page_table[last_page].bytes_used < GENCGC_CARD_BYTES)
- /* Or it is CARD_BYTES and is the last in the block */
- || (page_table[last_page+1].allocated != region_allocation)
- || (page_table[last_page+1].bytes_used == 0)
- || (page_table[last_page+1].gen != generation)
- || (page_table[last_page+1].scan_start_offset == 0))
+ if (page_ends_contiguous_block_p(last_page, generation))
break;
verify_space(page_address(i),