5#include "internal/imemo.h"
6#include "vm_callinfo.h"
8size_t rb_iseq_memsize(
const rb_iseq_t *iseq);
9void rb_iseq_mark_and_move(rb_iseq_t *iseq,
bool reference_updating);
10void rb_iseq_free(
const rb_iseq_t *iseq);
13rb_imemo_name(
enum imemo_type
type)
17#define IMEMO_NAME(x) case imemo_##x: return #x;
19 IMEMO_NAME(callcache);
21 IMEMO_NAME(constcache);
28 IMEMO_NAME(parser_strterm);
30 IMEMO_NAME(throw_data);
34 rb_bug(
"unreachable");
43rb_imemo_new(
enum imemo_type
type,
VALUE v0)
45 size_t size = RVALUE_SIZE;
47 NEWOBJ_OF(obj,
void, v0, flags, size, 0);
52static rb_imemo_tmpbuf_t *
53rb_imemo_tmpbuf_new(
void)
63rb_alloc_tmp_buffer_with_count(
volatile VALUE *store,
size_t size,
size_t cnt)
66 rb_imemo_tmpbuf_t *tmpbuf;
70 tmpbuf = rb_imemo_tmpbuf_new();
71 *store = (
VALUE)tmpbuf;
72 ptr = ruby_xmalloc(size);
80rb_alloc_tmp_buffer(
volatile VALUE *store,
long len)
84 if (
len < 0 || (cnt = (
long)roomof(
len,
sizeof(
VALUE))) < 0) {
85 rb_raise(rb_eArgError,
"negative buffer size (or size too big)");
88 return rb_alloc_tmp_buffer_with_count(store,
len, cnt);
92rb_free_tmp_buffer(
volatile VALUE *store)
94 rb_imemo_tmpbuf_t *s = (rb_imemo_tmpbuf_t*)ATOMIC_VALUE_EXCHANGE(*store, 0);
96 void *ptr = ATOMIC_PTR_EXCHANGE(s->ptr, 0);
103rb_imemo_tmpbuf_parser_heap(
void *buf, rb_imemo_tmpbuf_t *old_heap,
size_t cnt)
105 rb_imemo_tmpbuf_t *tmpbuf = rb_imemo_tmpbuf_new();
107 tmpbuf->next = old_heap;
115rb_imemo_new_debug(
enum imemo_type
type,
VALUE v0,
const char *file,
int line)
118 fprintf(stderr,
"memo %p (type: %d) @ %s:%d\n", (
void *)memo, imemo_type(memo), file, line);
128rb_imemo_memsize(
VALUE obj)
131 switch (imemo_type(obj)) {
133 rb_bug(
"imemo_ast is obsolete");
136 case imemo_callcache:
140 case imemo_constcache:
151 size += rb_iseq_memsize((rb_iseq_t *)obj);
157 size +=
sizeof(((rb_method_entry_t *)obj)->def);
160 case imemo_parser_strterm:
164 case imemo_throw_data:
167 size += ((rb_imemo_tmpbuf_t *)obj)->cnt *
sizeof(
VALUE);
171 rb_bug(
"unreachable");
181static enum rb_id_table_iterator_result
182cc_table_mark_i(
VALUE ccs_ptr,
void *data)
185 VM_ASSERT(vm_ccs_p(ccs));
190 VM_ASSERT(rb_id_table_lookup(RCLASS_CC_TBL(klass), ccs->cme->called_id, &lookup_val));
191 VM_ASSERT(lookup_val == ccs_ptr);
194 if (METHOD_ENTRY_INVALIDATED(ccs->cme)) {
196 return ID_TABLE_DELETE;
199 rb_gc_mark_movable((
VALUE)ccs->cme);
201 for (
int i=0; i<ccs->len; i++) {
202 VM_ASSERT(klass == ccs->entries[i].cc->klass);
203 VM_ASSERT(vm_cc_check_cme(ccs->entries[i].cc, ccs->cme));
205 rb_gc_mark_movable((
VALUE)ccs->entries[i].cc);
207 return ID_TABLE_CONTINUE;
212rb_cc_table_mark(
VALUE klass)
216 rb_id_table_foreach_values(cc_tbl, cc_table_mark_i, (
void *)klass);
221moved_or_living_object_strictly_p(
VALUE obj)
227mark_and_move_method_entry(rb_method_entry_t *ment,
bool reference_updating)
229 rb_method_definition_t *def = ment->def;
231 rb_gc_mark_and_move(&ment->owner);
232 rb_gc_mark_and_move(&ment->defined_class);
236 case VM_METHOD_TYPE_ISEQ:
238 rb_gc_mark_and_move_ptr(&def->body.iseq.
iseqptr);
240 rb_gc_mark_and_move_ptr(&def->body.iseq.
cref);
242 if (!reference_updating) {
243 if (def->iseq_overload && ment->defined_class) {
246 rb_gc_mark((
VALUE)ment);
250 case VM_METHOD_TYPE_ATTRSET:
251 case VM_METHOD_TYPE_IVAR:
252 rb_gc_mark_and_move(&def->body.attr.location);
254 case VM_METHOD_TYPE_BMETHOD:
255 rb_gc_mark_and_move(&def->body.bmethod.proc);
256 if (!reference_updating) {
257 if (def->body.bmethod.hooks) rb_hook_list_mark(def->body.bmethod.hooks);
260 case VM_METHOD_TYPE_ALIAS:
261 rb_gc_mark_and_move_ptr(&def->body.alias.original_me);
263 case VM_METHOD_TYPE_REFINED:
264 rb_gc_mark_and_move_ptr(&def->body.refined.orig_me);
266 case VM_METHOD_TYPE_CFUNC:
267 case VM_METHOD_TYPE_ZSUPER:
268 case VM_METHOD_TYPE_MISSING:
269 case VM_METHOD_TYPE_OPTIMIZED:
270 case VM_METHOD_TYPE_UNDEF:
271 case VM_METHOD_TYPE_NOTIMPLEMENTED:
278rb_imemo_mark_and_move(
VALUE obj,
bool reference_updating)
280 switch (imemo_type(obj)) {
282 rb_bug(
"imemo_ast is obsolete");
285 case imemo_callcache: {
306 if (reference_updating) {
311 if (moved_or_living_object_strictly_p(cc->klass) &&
312 moved_or_living_object_strictly_p((
VALUE)cc->cme_)) {
313 *((
VALUE *)&cc->klass) = rb_gc_location(cc->klass);
318 vm_cc_invalidate(cc);
323 if (vm_cc_super_p(cc) || vm_cc_refinement_p(cc)) {
324 rb_gc_mark_movable((
VALUE)cc->cme_);
325 rb_gc_mark_movable((
VALUE)cc->klass);
333 case imemo_constcache: {
336 rb_gc_mark_and_move(&ice->value);
341 rb_cref_t *cref = (rb_cref_t *)obj;
343 rb_gc_mark_and_move(&cref->klass_or_self);
344 rb_gc_mark_and_move_ptr(&cref->next);
345 rb_gc_mark_and_move(&cref->refinements);
352 if (LIKELY(env->ep)) {
354 RUBY_ASSERT(rb_gc_location(env->ep[VM_ENV_DATA_INDEX_ENV]) == rb_gc_location(obj));
355 RUBY_ASSERT(reference_updating || VM_ENV_ESCAPED_P(env->ep));
357 for (
unsigned int i = 0; i < env->env_size; i++) {
358 rb_gc_mark_and_move((
VALUE *)&env->env[i]);
361 rb_gc_mark_and_move_ptr(&env->iseq);
363 if (reference_updating) {
364 ((
VALUE *)env->ep)[VM_ENV_DATA_INDEX_ENV] = rb_gc_location(env->ep[VM_ENV_DATA_INDEX_ENV]);
367 if (!VM_ENV_FLAGS(env->ep, VM_ENV_FLAG_WB_REQUIRED)) {
368 VM_ENV_FLAGS_SET(env->ep, VM_ENV_FLAG_WB_REQUIRED);
370 rb_gc_mark_movable( (
VALUE)rb_vm_env_prev_env(env));
379 if (!reference_updating) {
380 rb_gc_mark_maybe((
VALUE)ifunc->data);
386 rb_iseq_mark_and_move((rb_iseq_t *)obj, reference_updating);
389 struct MEMO *memo = (
struct MEMO *)obj;
391 rb_gc_mark_and_move((
VALUE *)&memo->v1);
392 rb_gc_mark_and_move((
VALUE *)&memo->v2);
393 if (!reference_updating) {
394 rb_gc_mark_maybe(memo->u3.value);
400 mark_and_move_method_entry((rb_method_entry_t *)obj, reference_updating);
402 case imemo_parser_strterm:
408 rb_gc_mark_and_move((
VALUE *)&svar->lastline);
409 rb_gc_mark_and_move((
VALUE *)&svar->backref);
410 rb_gc_mark_and_move((
VALUE *)&svar->others);
414 case imemo_throw_data: {
417 rb_gc_mark_and_move((
VALUE *)&throw_data->throw_obj);
422 const rb_imemo_tmpbuf_t *m = (
const rb_imemo_tmpbuf_t *)obj;
424 if (!reference_updating) {
426 rb_gc_mark_locations(m->ptr, m->ptr + m->cnt);
427 }
while ((m = m->next) != NULL);
433 rb_bug(
"unreachable");
441static enum rb_id_table_iterator_result
442free_const_entry_i(
VALUE value,
void *data)
444 rb_const_entry_t *ce = (rb_const_entry_t *)value;
446 return ID_TABLE_CONTINUE;
452 rb_id_table_foreach_values(tbl, free_const_entry_i, 0);
453 rb_id_table_free(tbl);
461 for (
int i=0; i<ccs->len; i++) {
465 if (rb_gc_pointer_to_heap_p((
VALUE)cc) &&
466 !rb_objspace_garbage_object_p((
VALUE)cc) &&
467 IMEMO_TYPE_P(cc, imemo_callcache) &&
468 cc->klass == klass) {
476 VM_ASSERT(!vm_cc_super_p(cc) && !vm_cc_refinement_p(cc));
477 vm_cc_invalidate(cc);
479 ruby_xfree(ccs->entries);
487 RB_DEBUG_COUNTER_INC(ccs_free);
488 vm_ccs_free(ccs,
true,
Qundef);
491static enum rb_id_table_iterator_result
492cc_table_free_i(
VALUE ccs_ptr,
void *data)
496 VM_ASSERT(vm_ccs_p(ccs));
498 vm_ccs_free(ccs,
false, klass);
500 return ID_TABLE_CONTINUE;
504rb_cc_table_free(
VALUE klass)
509 rb_id_table_foreach_values(cc_tbl, cc_table_free_i, (
void *)klass);
510 rb_id_table_free(cc_tbl);
515rb_imemo_free(
VALUE obj)
517 switch (imemo_type(obj)) {
519 rb_bug(
"imemo_ast is obsolete");
522 case imemo_callcache:
523 RB_DEBUG_COUNTER_INC(obj_imemo_callcache);
526 case imemo_callinfo:{
531 if (ci->kwarg->references == 0)
xfree((
void *)ci->kwarg);
533 RB_DEBUG_COUNTER_INC(obj_imemo_callinfo);
537 case imemo_constcache:
538 RB_DEBUG_COUNTER_INC(obj_imemo_constcache);
542 RB_DEBUG_COUNTER_INC(obj_imemo_cref);
550 RB_DEBUG_COUNTER_INC(obj_imemo_env);
555 RB_DEBUG_COUNTER_INC(obj_imemo_ifunc);
558 rb_iseq_free((rb_iseq_t *)obj);
559 RB_DEBUG_COUNTER_INC(obj_imemo_iseq);
563 RB_DEBUG_COUNTER_INC(obj_imemo_memo);
567 rb_free_method_entry((rb_method_entry_t *)obj);
568 RB_DEBUG_COUNTER_INC(obj_imemo_ment);
571 case imemo_parser_strterm:
572 RB_DEBUG_COUNTER_INC(obj_imemo_parser_strterm);
576 RB_DEBUG_COUNTER_INC(obj_imemo_svar);
578 case imemo_throw_data:
579 RB_DEBUG_COUNTER_INC(obj_imemo_throw_data);
583 xfree(((rb_imemo_tmpbuf_t *)obj)->ptr);
584 RB_DEBUG_COUNTER_INC(obj_imemo_tmpbuf);
588 rb_bug(
"unreachable");
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define xfree
Old name of ruby_xfree.
#define Qundef
Old name of RUBY_Qundef.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define FL_WB_PROTECTED
Old name of RUBY_FL_WB_PROTECTED.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define T_MOVED
Old name of RUBY_T_MOVED.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
int len
Length of the buffer.
VALUE type(ANYARGS)
ANYARGS-ed function type.
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
uintptr_t VALUE
Type that represents a Ruby object.