blob: da891fed77c78a498dd8374f58285462faffdaad [file] [log] [blame]
* Copyright 2014 The Android Open Source Project
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* See the License for the specific language governing permissions and
* limitations under the License.
#include "instrumentation.h"
#include "atomic.h"
#include "base/macros.h"
#include "base/mutex.h"
#include "gc_root.h"
#include "jni.h"
#include "oat_file.h"
#include "object_callbacks.h"
#include "safe_map.h"
#include "thread_pool.h"
namespace art {
class CompiledMethod;
class CompilerCallbacks;
namespace mirror {
class ArtMethod;
} // namespcae mirror
namespace jit {
class JitInstrumentationCache;
class JitCodeCache {
static constexpr size_t kMaxCapacity = 1 * GB;
static constexpr size_t kDefaultCapacity = 2 * MB;
// Create the code cache with a code + data capacity equal to "capacity", error message is passed
// in the out arg error_msg.
static JitCodeCache* Create(size_t capacity, std::string* error_msg);
const uint8_t* CodeCachePtr() const {
return code_cache_ptr_;
size_t CodeCacheSize() const {
return code_cache_ptr_ - code_cache_begin_;
size_t CodeCacheRemain() const {
return code_cache_end_ - code_cache_ptr_;
const uint8_t* DataCachePtr() const {
return data_cache_ptr_;
size_t DataCacheSize() const {
return data_cache_ptr_ - data_cache_begin_;
size_t DataCacheRemain() const {
return data_cache_end_ - data_cache_ptr_;
size_t NumMethods() const {
return num_methods_;
// Return true if the code cache contains the code pointer which si the entrypoint of the method.
bool ContainsMethod(mirror::ArtMethod* method) const
// Return true if the code cache contains a code ptr.
bool ContainsCodePtr(const void* ptr) const;
// Reserve a region of code of size at least "size". Returns null if there is no more room.
uint8_t* ReserveCode(Thread* self, size_t size) LOCKS_EXCLUDED(lock_);
// Add a data array of size (end - begin) with the associated contents, returns null if there
// is no more room.
uint8_t* AddDataArray(Thread* self, const uint8_t* begin, const uint8_t* end)
// Get code for a method, returns null if it is not in the jit cache.
const void* GetCodeFor(mirror::ArtMethod* method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) LOCKS_EXCLUDED(lock_);
// Save the compiled code for a method so that GetCodeFor(method) will return old_code_ptr if the
// entrypoint isn't within the cache.
void SaveCompiledCode(mirror::ArtMethod* method, const void* old_code_ptr)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) LOCKS_EXCLUDED(lock_);
// Takes ownership of code_mem_map.
explicit JitCodeCache(MemMap* code_mem_map);
// Unimplemented, TODO: Determine if it is necessary.
void FlushInstructionCache();
// Lock which guards.
Mutex lock_;
// Mem map which holds code and data. We do this since we need to have 32 bit offsets from method
// headers in code cache which point to things in the data cache. If the maps are more than 4GB
// apart, having multiple maps wouldn't work.
std::unique_ptr<MemMap> mem_map_;
// Code cache section.
uint8_t* code_cache_ptr_;
const uint8_t* code_cache_begin_;
const uint8_t* code_cache_end_;
// Data cache section.
uint8_t* data_cache_ptr_;
const uint8_t* data_cache_begin_;
const uint8_t* data_cache_end_;
size_t num_methods_;
// TODO: This relies on methods not moving.
// This map holds code for methods if they were deoptimized by the instrumentation stubs. This is
// required since we have to implement ClassLinker::GetQuickOatCodeFor for walking stacks.
SafeMap<mirror::ArtMethod*, const void*> method_code_map_ GUARDED_BY(lock_);
} // namespace jit
} // namespace art