Lines Matching defs:gch

322   GenCollectedHeap* gch = GenCollectedHeap::heap();
323 assert(gch->kind() == CollectedHeap::GenCollectedHeap,
326 gch->gen_policy()->size_policy();
394 GenCollectedHeap* gch = GenCollectedHeap::heap();
395 size_t expected_promotion = MIN2(gch->get_gen(0)->capacity(),
740 GenCollectedHeap* gch = GenCollectedHeap::heap();
741 _young_gen = gch->prev_gen(_cmsGen);
742 if (gch->supports_inline_contig_alloc()) {
743 _top_addr = gch->top_addr();
744 _end_addr = gch->end_addr();
859 GenCollectedHeap* gch = GenCollectedHeap::heap();
871 gch->used(), gch->capacity());
874 gch->used() / K, gch->capacity() / K);
977 GenCollectedHeap* gch = GenCollectedHeap::heap();
978 Generation* prev_gen = gch->_gens[prev_level];
1564 GenCollectedHeap* gch = GenCollectedHeap::heap();
1565 assert(gch->collector_policy()->is_two_generation_policy(),
1567 if (gch->incremental_collection_will_fail(true /* consult_young */)) {
1688 GenCollectedHeap* gch = GenCollectedHeap::heap();
1689 unsigned int gc_count = gch->total_full_collections();
1902 GenCollectedHeap* gch = GenCollectedHeap::heap();
1903 GCCause::Cause gc_cause = gch->gc_cause();
1910 gch->collector_policy());
1946 GenCollectedHeap* gch = GenCollectedHeap::heap();
1947 assert(gch->collector_policy()->is_two_generation_policy(),
1951 if (gch->incremental_collection_will_fail(false /* don't consult_young */)) {
1959 GCCause::is_user_requested_gc(gch->gc_cause()) ||
1960 gch->incremental_collection_will_fail(true /* consult_young */));
1997 GenCollectedHeap* gch = GenCollectedHeap::heap();
2003 gc_tracer->report_gc_start(gch->gc_cause(), gc_timer->gc_start());
2006 if (PrintGC && Verbose && !(GCCause::is_user_requested_gc(gch->gc_cause()))) {
2094 size_policy()->msc_collection_end(gch->gc_cause());
2199 GenCollectedHeap* gch = GenCollectedHeap::heap();
2225 gch->increment_total_full_collections(); // ... starting a collection cycle
2226 _collection_count_start = gch->total_full_collections();
2365 size_policy()->concurrent_phases_end(gch->gc_cause(),
2366 gch->prev_gen(_cmsGen)->capacity(),
2461 GenCollectedHeap* gch = GenCollectedHeap::heap();
2462 _last_heap_summary = gch->create_heap_summary();
2463 _last_perm_gen_summary = gch->create_perm_gen_summary();
2576 GenCollectedHeap* gch = GenCollectedHeap::heap();
2577 size_policy()->ms_collection_end(gch->gc_cause());
2934 GenCollectedHeap* gch = GenCollectedHeap::heap();
2935 gch->ensure_parsability(false); // fill TLABs, but no need to retire them
2937 gch->save_marks();
2962 GenCollectedHeap* gch = GenCollectedHeap::heap();
2966 gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
2968 gch->gen_process_strong_roots(_cmsGen->level(),
3013 GenCollectedHeap* gch = GenCollectedHeap::heap();
3018 gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
3019 gch->gen_process_strong_roots(_cmsGen->level(),
3610 GenCollectedHeap* gch = GenCollectedHeap::heap();
3615 gch->ensure_parsability(false); // fill TLABs, but no need to retire them
3617 gch->save_marks();
3625 gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
3626 gch->gen_process_strong_roots(_cmsGen->level(),
3648 size_policy()->checkpoint_roots_initial_end(gch->gc_cause());
3705 GenCollectedHeap* gch = GenCollectedHeap::heap();
3706 size_policy()->ms_collection_marking_end(gch->gc_cause());
4931 GenCollectedHeap* gch = GenCollectedHeap::heap();
4934 FlagSetting fl(gch->_is_gc_active, false);
4939 gch->do_collection(true, // full (i.e. force, see below)
4977 GenCollectedHeap* gch = GenCollectedHeap::heap();
4999 gch->ensure_parsability(false); // fill TLAB's, but no need to retire them
5001 gch->save_marks();
5106 size_policy()->checkpoint_roots_final_end(gch->gc_cause());
5171 GenCollectedHeap* gch = GenCollectedHeap::heap();
5211 gch->gen_process_strong_roots(_collector->_cmsGen->level(),
5603 GenCollectedHeap* gch = GenCollectedHeap::heap();
5604 FlexibleWorkGang* workers = gch->workers();
5623 gch->set_par_threads(n_workers);
5628 // gch->rem_set()->prepare_for_younger_refs_iterate(true); // parallel
5656 GenCollectedHeap::StrongRootsScope srs(gch);
5660 GenCollectedHeap::StrongRootsScope srs(gch);
5663 gch->set_par_threads(0); // 0 ==> non-parallel.
5673 GenCollectedHeap* gch = GenCollectedHeap::heap();
5745 gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
5746 GenCollectedHeap::StrongRootsScope srs(gch);
5747 gch->gen_process_strong_roots(_cmsGen->level(),
5911 GenCollectedHeap* gch = GenCollectedHeap::heap();
5912 FlexibleWorkGang* workers = gch->workers();
5924 GenCollectedHeap* gch = GenCollectedHeap::heap();
5925 FlexibleWorkGang* workers = gch->workers();
5959 GenCollectedHeap* gch = GenCollectedHeap::heap();
5961 FlexibleWorkGang* workers = gch->workers();
6191 GenCollectedHeap* gch = GenCollectedHeap::heap();
6192 gch->clear_incremental_collection_failed(); // Worth retrying as fresh space may have been freed up
6193 gch->update_full_collections_completed(_collection_count_start);
6240 GenCollectedHeap* gch = GenCollectedHeap::heap();
6241 assert(gch->kind() == CollectedHeap::GenCollectedHeap,
6244 gch->gen_policy()->size_policy();
6325 GenCollectedHeap* gch = GenCollectedHeap::heap();
6327 AdaptiveSizePolicyOutput(sp, gch->total_collections());
9154 GenCollectedHeap* gch = (GenCollectedHeap*) GenCollectedHeap::heap();
9156 (CMSAdaptiveSizePolicy*) gch->gen_policy()->size_policy();
9186 GenCollectedHeap* gch = GenCollectedHeap::heap();
9188 (CMSGCAdaptivePolicyCounters*) gch->collector_policy()->counters();
9200 GenCollectedHeap* gch = GenCollectedHeap::heap();
9215 GenCollectedHeap* gch = GenCollectedHeap::heap();
9228 GenCollectedHeap* gch = (GenCollectedHeap*) GenCollectedHeap::heap();
9240 assert(gch->kind() == CollectedHeap::GenCollectedHeap,
9244 Generation* prev_gen = gch->get_gen(prev_level);