/* * Copyright (C) 2011-2020 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #pragma once #if ENABLE(DFG_JIT) #include "CCallHelpers.h" #include "CodeBlock.h" #include "DFGDisassembler.h" #include "DFGGraph.h" #include "DFGInlineCacheWrapper.h" #include "DFGJITCode.h" #include "DFGOSRExitCompilationInfo.h" #include "GPRInfo.h" #include "HandlerInfo.h" #include "JITCode.h" #include "JITInlineCacheGenerator.h" #include "LinkBuffer.h" #include "MacroAssembler.h" #include "PCToCodeOriginMap.h" namespace JSC { class AbstractSamplingCounter; class CodeBlock; class VM; namespace DFG { class JITCodeGenerator; class NodeToRegisterMap; class OSRExitJumpPlaceholder; class SlowPathGenerator; class SpeculativeJIT; class SpeculationRecovery; struct EntryLocation; struct OSRExit; // === CallLinkRecord === // // A record of a call out from JIT code that needs linking to a helper function. // Every CallLinkRecord contains a reference to the call instruction & the function // that it needs to be linked to. struct CallLinkRecord { CallLinkRecord(MacroAssembler::Call call, FunctionPtr function) : m_call(call) , m_function(function) { } MacroAssembler::Call m_call; FunctionPtr m_function; }; // === JITCompiler === // // DFG::JITCompiler is responsible for generating JIT code from the dataflow graph. // It does so by delegating to the speculative & non-speculative JITs, which // generate to a MacroAssembler (which the JITCompiler owns through an inheritance // relationship). The JITCompiler holds references to information required during // compilation, and also records information used in linking (e.g. a list of all // call to be linked). class JITCompiler : public CCallHelpers { public: JITCompiler(Graph& dfg); ~JITCompiler(); void compile(); void compileFunction(); // Accessors for properties. Graph& graph() { return m_graph; } // Methods to set labels for the disassembler. void setStartOfCode() { m_pcToCodeOriginMapBuilder.appendItem(labelIgnoringWatchpoints(), CodeOrigin(BytecodeIndex(0))); if (LIKELY(!m_disassembler)) return; m_disassembler->setStartOfCode(labelIgnoringWatchpoints()); } void setForBlockIndex(BlockIndex blockIndex) { if (LIKELY(!m_disassembler)) return; m_disassembler->setForBlockIndex(blockIndex, labelIgnoringWatchpoints()); } void setForNode(Node* node) { if (LIKELY(!m_disassembler)) return; m_disassembler->setForNode(node, labelIgnoringWatchpoints()); } void setEndOfMainPath(); void setEndOfCode(); CallSiteIndex addCallSite(CodeOrigin codeOrigin) { return m_jitCode->common.codeOrigins->addCodeOrigin(codeOrigin); } CallSiteIndex emitStoreCodeOrigin(CodeOrigin codeOrigin) { CallSiteIndex callSite = addCallSite(codeOrigin); emitStoreCallSiteIndex(callSite); return callSite; } void emitStoreCallSiteIndex(CallSiteIndex callSite) { store32(TrustedImm32(callSite.bits()), tagFor(CallFrameSlot::argumentCountIncludingThis)); } // Add a call out from JIT code, without an exception check. Call appendCall(const FunctionPtr function) { Call functionCall = call(OperationPtrTag); m_calls.append(CallLinkRecord(functionCall, function.retagged())); return functionCall; } Call appendOperationCall(const FunctionPtr function) { Call functionCall = call(OperationPtrTag); m_calls.append(CallLinkRecord(functionCall, function)); return functionCall; } void exceptionCheck(); void exceptionCheckWithCallFrameRollback() { m_exceptionChecksWithCallFrameRollback.append(emitExceptionCheck(vm())); } OSRExitCompilationInfo& appendExitInfo(MacroAssembler::JumpList jumpsToFail = MacroAssembler::JumpList()) { OSRExitCompilationInfo info; info.m_failureJumps = jumpsToFail; m_exitCompilationInfo.append(info); return m_exitCompilationInfo.last(); } #if USE(JSVALUE32_64) void* addressOfDoubleConstant(Node*); #endif void addGetById(const JITGetByIdGenerator& gen, SlowPathGenerator* slowPath) { m_getByIds.append(InlineCacheWrapper(gen, slowPath)); } void addGetByIdWithThis(const JITGetByIdWithThisGenerator& gen, SlowPathGenerator* slowPath) { m_getByIdsWithThis.append(InlineCacheWrapper(gen, slowPath)); } void addGetByVal(const JITGetByValGenerator& gen, SlowPathGenerator* slowPath) { m_getByVals.append(InlineCacheWrapper(gen, slowPath)); } void addPutById(const JITPutByIdGenerator& gen, SlowPathGenerator* slowPath) { m_putByIds.append(InlineCacheWrapper(gen, slowPath)); } void addDelById(const JITDelByIdGenerator& gen, SlowPathGenerator* slowPath) { m_delByIds.append(InlineCacheWrapper(gen, slowPath)); } void addDelByVal(const JITDelByValGenerator& gen, SlowPathGenerator* slowPath) { m_delByVals.append(InlineCacheWrapper(gen, slowPath)); } void addInstanceOf(const JITInstanceOfGenerator& gen, SlowPathGenerator* slowPath) { m_instanceOfs.append(InlineCacheWrapper(gen, slowPath)); } void addInById(const JITInByIdGenerator& gen, SlowPathGenerator* slowPath) { m_inByIds.append(InlineCacheWrapper(gen, slowPath)); } void addJSCall(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo* info) { m_jsCalls.append(JSCallRecord(fastCall, slowCall, targetToCheck, info)); } void addJSDirectCall(Call call, Label slowPath, CallLinkInfo* info) { m_jsDirectCalls.append(JSDirectCallRecord(call, slowPath, info)); } void addJSDirectTailCall(PatchableJump patchableJump, Call call, Label slowPath, CallLinkInfo* info) { m_jsDirectTailCalls.append(JSDirectTailCallRecord(patchableJump, call, slowPath, info)); } void addWeakReference(JSCell* target) { m_graph.m_plan.weakReferences().addLazily(target); } void addWeakReferences(const StructureSet& structureSet) { for (unsigned i = structureSet.size(); i--;) addWeakReference(structureSet[i]); } template Jump branchWeakPtr(RelationalCondition cond, T left, JSCell* weakPtr) { Jump result = branchPtr(cond, left, TrustedImmPtr(weakPtr)); addWeakReference(weakPtr); return result; } template Jump branchWeakStructure(RelationalCondition cond, T left, RegisteredStructure weakStructure) { Structure* structure = weakStructure.get(); #if USE(JSVALUE64) Jump result = branch32(cond, left, TrustedImm32(structure->id())); return result; #else return branchPtr(cond, left, TrustedImmPtr(structure)); #endif } void noticeOSREntry(BasicBlock&, JITCompiler::Label blockHead, LinkBuffer&); void noticeCatchEntrypoint(BasicBlock&, JITCompiler::Label blockHead, LinkBuffer&, Vector&& argumentFormats); RefPtr jitCode() { return m_jitCode; } Vector