CodeBlock.h revision 2bde8e466a4451c7319e3a072d118917957d6554
1/*
2 * Copyright (C) 2008, 2009, 2010 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1.  Redistributions of source code must retain the above copyright
10 *     notice, this list of conditions and the following disclaimer.
11 * 2.  Redistributions in binary form must reproduce the above copyright
12 *     notice, this list of conditions and the following disclaimer in the
13 *     documentation and/or other materials provided with the distribution.
14 * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 *     its contributors may be used to endorse or promote products derived
16 *     from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
33#include "EvalCodeCache.h"
34#include "Instruction.h"
35#include "JITCode.h"
36#include "JSGlobalObject.h"
37#include "JumpTable.h"
38#include "Nodes.h"
39#include "RegExp.h"
40#include "UString.h"
41#include <wtf/FastAllocBase.h>
42#include <wtf/PassOwnPtr.h>
43#include <wtf/RefPtr.h>
44#include <wtf/Vector.h>
45
46#if ENABLE(JIT)
47#include "StructureStubInfo.h"
48#endif
49
50// Register numbers used in bytecode operations have different meaning according to their ranges:
51//      0x80000000-0xFFFFFFFF  Negative indices from the CallFrame pointer are entries in the call frame, see RegisterFile.h.
52//      0x00000000-0x3FFFFFFF  Forwards indices from the CallFrame pointer are local vars and temporaries with the function's callframe.
53//      0x40000000-0x7FFFFFFF  Positive indices from 0x40000000 specify entries in the constant pool on the CodeBlock.
54static const int FirstConstantRegisterIndex = 0x40000000;
55
56namespace JSC {
57
58    enum HasSeenShouldRepatch {
59        hasSeenShouldRepatch
60    };
61
62    class ExecState;
63
64    enum CodeType { GlobalCode, EvalCode, FunctionCode };
65
66    inline int unmodifiedArgumentsRegister(int argumentsRegister) { return argumentsRegister - 1; }
67
68    static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
69
70    struct HandlerInfo {
71        uint32_t start;
72        uint32_t end;
73        uint32_t target;
74        uint32_t scopeDepth;
75#if ENABLE(JIT)
76        CodeLocationLabel nativeCode;
77#endif
78    };
79
80    struct ExpressionRangeInfo {
81        enum {
82            MaxOffset = (1 << 7) - 1,
83            MaxDivot = (1 << 25) - 1
84        };
85        uint32_t instructionOffset : 25;
86        uint32_t divotPoint : 25;
87        uint32_t startOffset : 7;
88        uint32_t endOffset : 7;
89    };
90
91    struct LineInfo {
92        uint32_t instructionOffset;
93        int32_t lineNumber;
94    };
95
96#if ENABLE(JIT)
97    struct CallLinkInfo {
98        CallLinkInfo()
99            : hasSeenShouldRepatch(false)
100        {
101        }
102
103        CodeLocationNearCall callReturnLocation;
104        CodeLocationDataLabelPtr hotPathBegin;
105        CodeLocationNearCall hotPathOther;
106        WriteBarrier<JSFunction> callee;
107        bool hasSeenShouldRepatch;
108
109        void setUnlinked() { callee.clear(); }
110        bool isLinked() { return callee; }
111
112        bool seenOnce()
113        {
114            return hasSeenShouldRepatch;
115        }
116
117        void setSeen()
118        {
119            hasSeenShouldRepatch = true;
120        }
121    };
122
123    struct MethodCallLinkInfo {
124        MethodCallLinkInfo()
125            : cachedStructure(0)
126            , cachedPrototypeStructure(0)
127        {
128        }
129
130        bool seenOnce()
131        {
132            ASSERT(!cachedStructure);
133            return cachedPrototypeStructure;
134        }
135
136        void setSeen()
137        {
138            ASSERT(!cachedStructure && !cachedPrototypeStructure);
139            // We use the values of cachedStructure & cachedPrototypeStructure to indicate the
140            // current state.
141            //     - In the initial state, both are null.
142            //     - Once this transition has been taken once, cachedStructure is
143            //       null and cachedPrototypeStructure is set to a nun-null value.
144            //     - Once the call is linked both structures are set to non-null values.
145            cachedPrototypeStructure = (Structure*)1;
146        }
147
148        CodeLocationCall callReturnLocation;
149        CodeLocationDataLabelPtr structureLabel;
150        Structure* cachedStructure;
151        Structure* cachedPrototypeStructure;
152    };
153
154    struct GlobalResolveInfo {
155        GlobalResolveInfo(unsigned bytecodeOffset)
156            : structure(0)
157            , offset(0)
158            , bytecodeOffset(bytecodeOffset)
159        {
160        }
161
162        Structure* structure;
163        unsigned offset;
164        unsigned bytecodeOffset;
165    };
166
167    // This structure is used to map from a call return location
168    // (given as an offset in bytes into the JIT code) back to
169    // the bytecode index of the corresponding bytecode operation.
170    // This is then used to look up the corresponding handler.
171    struct CallReturnOffsetToBytecodeOffset {
172        CallReturnOffsetToBytecodeOffset(unsigned callReturnOffset, unsigned bytecodeOffset)
173            : callReturnOffset(callReturnOffset)
174            , bytecodeOffset(bytecodeOffset)
175        {
176        }
177
178        unsigned callReturnOffset;
179        unsigned bytecodeOffset;
180    };
181
182    // valueAtPosition helpers for the binarySearch algorithm.
183
184    inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo)
185    {
186        return structureStubInfo->callReturnLocation.executableAddress();
187    }
188
189    inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo)
190    {
191        return callLinkInfo->callReturnLocation.executableAddress();
192    }
193
194    inline void* getMethodCallLinkInfoReturnLocation(MethodCallLinkInfo* methodCallLinkInfo)
195    {
196        return methodCallLinkInfo->callReturnLocation.executableAddress();
197    }
198
199    inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeOffset* pc)
200    {
201        return pc->callReturnOffset;
202    }
203#endif
204
205    class CodeBlock {
206        WTF_MAKE_FAST_ALLOCATED;
207        friend class JIT;
208    protected:
209        CodeBlock(ScriptExecutable* ownerExecutable, CodeType, JSGlobalObject*, PassRefPtr<SourceProvider>, unsigned sourceOffset, SymbolTable* symbolTable, bool isConstructor);
210
211        WriteBarrier<JSGlobalObject> m_globalObject;
212        Heap* m_heap;
213
214    public:
215        virtual ~CodeBlock();
216
217        void markAggregate(MarkStack&);
218        void refStructures(Instruction* vPC) const;
219        void derefStructures(Instruction* vPC) const;
220
221        static void dumpStatistics();
222
223#if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
224        void dump(ExecState*) const;
225        void printStructures(const Instruction*) const;
226        void printStructure(const char* name, const Instruction*, int operand) const;
227#endif
228
229        bool isStrictMode() const { return m_isStrictMode; }
230
231        inline bool isKnownNotImmediate(int index)
232        {
233            if (index == m_thisRegister && !m_isStrictMode)
234                return true;
235
236            if (isConstantRegisterIndex(index))
237                return getConstant(index).isCell();
238
239            return false;
240        }
241
242        ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
243        {
244            return index >= m_numVars;
245        }
246
247        HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset);
248        int lineNumberForBytecodeOffset(unsigned bytecodeOffset);
249        void expressionRangeForBytecodeOffset(unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset);
250
251#if ENABLE(JIT)
252
253        StructureStubInfo& getStubInfo(ReturnAddressPtr returnAddress)
254        {
255            return *(binarySearch<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress.value()));
256        }
257
258        CallLinkInfo& getCallLinkInfo(ReturnAddressPtr returnAddress)
259        {
260            return *(binarySearch<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress.value()));
261        }
262
263        MethodCallLinkInfo& getMethodCallLinkInfo(ReturnAddressPtr returnAddress)
264        {
265            return *(binarySearch<MethodCallLinkInfo, void*, getMethodCallLinkInfoReturnLocation>(m_methodCallLinkInfos.begin(), m_methodCallLinkInfos.size(), returnAddress.value()));
266        }
267
268        unsigned bytecodeOffset(ReturnAddressPtr returnAddress)
269        {
270            if (!m_rareData)
271                return 1;
272            Vector<CallReturnOffsetToBytecodeOffset>& callIndices = m_rareData->m_callReturnIndexVector;
273            if (!callIndices.size())
274                return 1;
275            return binarySearch<CallReturnOffsetToBytecodeOffset, unsigned, getCallReturnOffset>(callIndices.begin(), callIndices.size(), getJITCode().offsetOf(returnAddress.value()))->bytecodeOffset;
276        }
277#endif
278#if ENABLE(INTERPRETER)
279        unsigned bytecodeOffset(Instruction* returnAddress)
280        {
281            return static_cast<Instruction*>(returnAddress) - instructions().begin();
282        }
283#endif
284
285        void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; }
286        bool isNumericCompareFunction() { return m_isNumericCompareFunction; }
287
288        Vector<Instruction>& instructions() { return m_instructions; }
289        void discardBytecode() { m_instructions.clear(); }
290
291#ifndef NDEBUG
292        unsigned instructionCount() { return m_instructionCount; }
293        void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; }
294#endif
295
296#if ENABLE(JIT)
297        JITCode& getJITCode() { return m_isConstructor ? ownerExecutable()->generatedJITCodeForConstruct() : ownerExecutable()->generatedJITCodeForCall(); }
298        ExecutablePool* executablePool() { return getJITCode().getExecutablePool(); }
299#endif
300
301        ScriptExecutable* ownerExecutable() const { return m_ownerExecutable.get(); }
302
303        void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; }
304
305        void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; }
306        int thisRegister() const { return m_thisRegister; }
307
308        void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; }
309        bool needsFullScopeChain() const { return m_needsFullScopeChain; }
310        void setUsesEval(bool usesEval) { m_usesEval = usesEval; }
311        bool usesEval() const { return m_usesEval; }
312
313        void setArgumentsRegister(int argumentsRegister)
314        {
315            ASSERT(argumentsRegister != -1);
316            m_argumentsRegister = argumentsRegister;
317            ASSERT(usesArguments());
318        }
319        int argumentsRegister()
320        {
321            ASSERT(usesArguments());
322            return m_argumentsRegister;
323        }
324        void setActivationRegister(int activationRegister)
325        {
326            m_activationRegister = activationRegister;
327        }
328        int activationRegister()
329        {
330            ASSERT(needsFullScopeChain());
331            return m_activationRegister;
332        }
333        bool usesArguments() const { return m_argumentsRegister != -1; }
334
335        CodeType codeType() const { return m_codeType; }
336
337        SourceProvider* source() const { return m_source.get(); }
338        unsigned sourceOffset() const { return m_sourceOffset; }
339
340        size_t numberOfJumpTargets() const { return m_jumpTargets.size(); }
341        void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); }
342        unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
343        unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
344
345        void createActivation(CallFrame*);
346
347#if ENABLE(INTERPRETER)
348        void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
349        void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
350        bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
351#endif
352#if ENABLE(JIT)
353        size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
354        void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
355        StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
356
357        void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); }
358        GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; }
359        bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset);
360
361        size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); }
362        void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); }
363        CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; }
364
365        void addMethodCallLinkInfos(unsigned n) { m_methodCallLinkInfos.grow(n); }
366        MethodCallLinkInfo& methodCallLinkInfo(int index) { return m_methodCallLinkInfos[index]; }
367#endif
368
369        // Exception handling support
370
371        size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
372        void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); }
373        HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
374
375        void addExpressionInfo(const ExpressionRangeInfo& expressionInfo)
376        {
377            createRareDataIfNecessary();
378            m_rareData->m_expressionInfo.append(expressionInfo);
379        }
380
381        void addLineInfo(unsigned bytecodeOffset, int lineNo)
382        {
383            createRareDataIfNecessary();
384            Vector<LineInfo>& lineInfo = m_rareData->m_lineInfo;
385            if (!lineInfo.size() || lineInfo.last().lineNumber != lineNo) {
386                LineInfo info = { bytecodeOffset, lineNo };
387                lineInfo.append(info);
388            }
389        }
390
391        bool hasExpressionInfo() { return m_rareData && m_rareData->m_expressionInfo.size(); }
392        bool hasLineInfo() { return m_rareData && m_rareData->m_lineInfo.size(); }
393        //  We only generate exception handling info if the user is debugging
394        // (and may want line number info), or if the function contains exception handler.
395        bool needsCallReturnIndices()
396        {
397            return m_rareData &&
398                (m_rareData->m_expressionInfo.size() || m_rareData->m_lineInfo.size() || m_rareData->m_exceptionHandlers.size());
399        }
400
401#if ENABLE(JIT)
402        Vector<CallReturnOffsetToBytecodeOffset>& callReturnIndexVector()
403        {
404            createRareDataIfNecessary();
405            return m_rareData->m_callReturnIndexVector;
406        }
407#endif
408
409        // Constant Pool
410
411        size_t numberOfIdentifiers() const { return m_identifiers.size(); }
412        void addIdentifier(const Identifier& i) { return m_identifiers.append(i); }
413        Identifier& identifier(int index) { return m_identifiers[index]; }
414
415        size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); }
416        void addConstant(JSValue v)
417        {
418            m_constantRegisters.append(WriteBarrier<Unknown>());
419            m_constantRegisters.last().set(m_globalObject->globalData(), m_ownerExecutable.get(), v);
420        }
421        WriteBarrier<Unknown>& constantRegister(int index) { return m_constantRegisters[index - FirstConstantRegisterIndex]; }
422        ALWAYS_INLINE bool isConstantRegisterIndex(int index) const { return index >= FirstConstantRegisterIndex; }
423        ALWAYS_INLINE JSValue getConstant(int index) const { return m_constantRegisters[index - FirstConstantRegisterIndex].get(); }
424
425        unsigned addFunctionDecl(FunctionExecutable* n)
426        {
427            unsigned size = m_functionDecls.size();
428            m_functionDecls.append(WriteBarrier<FunctionExecutable>());
429            m_functionDecls.last().set(m_globalObject->globalData(), m_ownerExecutable.get(), n);
430            return size;
431        }
432        FunctionExecutable* functionDecl(int index) { return m_functionDecls[index].get(); }
433        int numberOfFunctionDecls() { return m_functionDecls.size(); }
434        unsigned addFunctionExpr(FunctionExecutable* n)
435        {
436            unsigned size = m_functionExprs.size();
437            m_functionExprs.append(WriteBarrier<FunctionExecutable>());
438            m_functionExprs.last().set(m_globalObject->globalData(), m_ownerExecutable.get(), n);
439            return size;
440        }
441        FunctionExecutable* functionExpr(int index) { return m_functionExprs[index].get(); }
442
443        unsigned addRegExp(PassRefPtr<RegExp> r) { createRareDataIfNecessary(); unsigned size = m_rareData->m_regexps.size(); m_rareData->m_regexps.append(r); return size; }
444        RegExp* regexp(int index) const { ASSERT(m_rareData); return m_rareData->m_regexps[index].get(); }
445
446        JSGlobalObject* globalObject() { return m_globalObject.get(); }
447
448        // Jump Tables
449
450        size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; }
451        SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); }
452        SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; }
453
454        size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; }
455        SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); }
456        SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; }
457
458        size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
459        StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
460        StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
461
462
463        SymbolTable* symbolTable() { return m_symbolTable; }
464        SharedSymbolTable* sharedSymbolTable() { ASSERT(m_codeType == FunctionCode); return static_cast<SharedSymbolTable*>(m_symbolTable); }
465
466        EvalCodeCache& evalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
467
468        void shrinkToFit();
469
470        // FIXME: Make these remaining members private.
471
472        int m_numCalleeRegisters;
473        int m_numVars;
474        int m_numCapturedVars;
475        int m_numParameters;
476        bool m_isConstructor;
477
478    private:
479#if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
480        void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
481
482        CString registerName(ExecState*, int r) const;
483        void printUnaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
484        void printBinaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
485        void printConditionalJump(ExecState*, const Vector<Instruction>::const_iterator&, Vector<Instruction>::const_iterator&, int location, const char* op) const;
486        void printGetByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
487        void printPutByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
488#endif
489
490        void createRareDataIfNecessary()
491        {
492            if (!m_rareData)
493                m_rareData = adoptPtr(new RareData);
494        }
495
496        WriteBarrier<ScriptExecutable> m_ownerExecutable;
497        JSGlobalData* m_globalData;
498
499        Vector<Instruction> m_instructions;
500#ifndef NDEBUG
501        unsigned m_instructionCount;
502#endif
503
504        int m_thisRegister;
505        int m_argumentsRegister;
506        int m_activationRegister;
507
508        bool m_needsFullScopeChain;
509        bool m_usesEval;
510        bool m_isNumericCompareFunction;
511        bool m_isStrictMode;
512
513        CodeType m_codeType;
514
515        RefPtr<SourceProvider> m_source;
516        unsigned m_sourceOffset;
517
518#if ENABLE(INTERPRETER)
519        Vector<unsigned> m_propertyAccessInstructions;
520        Vector<unsigned> m_globalResolveInstructions;
521#endif
522#if ENABLE(JIT)
523        Vector<StructureStubInfo> m_structureStubInfos;
524        Vector<GlobalResolveInfo> m_globalResolveInfos;
525        Vector<CallLinkInfo> m_callLinkInfos;
526        Vector<MethodCallLinkInfo> m_methodCallLinkInfos;
527#endif
528
529        Vector<unsigned> m_jumpTargets;
530
531        // Constant Pool
532        Vector<Identifier> m_identifiers;
533        COMPILE_ASSERT(sizeof(Register) == sizeof(WriteBarrier<Unknown>), Register_must_be_same_size_as_WriteBarrier_Unknown);
534        Vector<WriteBarrier<Unknown> > m_constantRegisters;
535        Vector<WriteBarrier<FunctionExecutable> > m_functionDecls;
536        Vector<WriteBarrier<FunctionExecutable> > m_functionExprs;
537
538        SymbolTable* m_symbolTable;
539
540        struct RareData {
541           WTF_MAKE_FAST_ALLOCATED;
542        public:
543            Vector<HandlerInfo> m_exceptionHandlers;
544
545            // Rare Constants
546            Vector<RefPtr<RegExp> > m_regexps;
547
548            // Jump Tables
549            Vector<SimpleJumpTable> m_immediateSwitchJumpTables;
550            Vector<SimpleJumpTable> m_characterSwitchJumpTables;
551            Vector<StringJumpTable> m_stringSwitchJumpTables;
552
553            EvalCodeCache m_evalCodeCache;
554
555            // Expression info - present if debugging.
556            Vector<ExpressionRangeInfo> m_expressionInfo;
557            // Line info - present if profiling or debugging.
558            Vector<LineInfo> m_lineInfo;
559#if ENABLE(JIT)
560            Vector<CallReturnOffsetToBytecodeOffset> m_callReturnIndexVector;
561#endif
562        };
563#if COMPILER(MSVC)
564        friend void WTF::deleteOwnedPtr<RareData>(RareData*);
565#endif
566        OwnPtr<RareData> m_rareData;
567    };
568
569    // Program code is not marked by any function, so we make the global object
570    // responsible for marking it.
571
572    class GlobalCodeBlock : public CodeBlock {
573    public:
574        GlobalCodeBlock(ScriptExecutable* ownerExecutable, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset)
575            : CodeBlock(ownerExecutable, codeType, globalObject, sourceProvider, sourceOffset, &m_unsharedSymbolTable, false)
576        {
577        }
578
579    private:
580        SymbolTable m_unsharedSymbolTable;
581    };
582
583    class ProgramCodeBlock : public GlobalCodeBlock {
584    public:
585        ProgramCodeBlock(ProgramExecutable* ownerExecutable, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider)
586            : GlobalCodeBlock(ownerExecutable, codeType, globalObject, sourceProvider, 0)
587        {
588        }
589    };
590
591    class EvalCodeBlock : public GlobalCodeBlock {
592    public:
593        EvalCodeBlock(EvalExecutable* ownerExecutable, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth)
594            : GlobalCodeBlock(ownerExecutable, EvalCode, globalObject, sourceProvider, 0)
595            , m_baseScopeDepth(baseScopeDepth)
596        {
597        }
598
599        int baseScopeDepth() const { return m_baseScopeDepth; }
600
601        const Identifier& variable(unsigned index) { return m_variables[index]; }
602        unsigned numVariables() { return m_variables.size(); }
603        void adoptVariables(Vector<Identifier>& variables)
604        {
605            ASSERT(m_variables.isEmpty());
606            m_variables.swap(variables);
607        }
608
609    private:
610        int m_baseScopeDepth;
611        Vector<Identifier> m_variables;
612    };
613
614    class FunctionCodeBlock : public CodeBlock {
615    public:
616        // Rather than using the usual RefCounted::create idiom for SharedSymbolTable we just use new
617        // as we need to initialise the CodeBlock before we could initialise any RefPtr to hold the shared
618        // symbol table, so we just pass as a raw pointer with a ref count of 1.  We then manually deref
619        // in the destructor.
620        FunctionCodeBlock(FunctionExecutable* ownerExecutable, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, bool isConstructor)
621            : CodeBlock(ownerExecutable, codeType, globalObject, sourceProvider, sourceOffset, SharedSymbolTable::create().leakRef(), isConstructor)
622        {
623        }
624        ~FunctionCodeBlock()
625        {
626            sharedSymbolTable()->deref();
627        }
628    };
629
630    inline Register& ExecState::r(int index)
631    {
632        CodeBlock* codeBlock = this->codeBlock();
633        if (codeBlock->isConstantRegisterIndex(index))
634            return *reinterpret_cast<Register*>(&codeBlock->constantRegister(index));
635        return this[index];
636    }
637
638    inline Register& ExecState::uncheckedR(int index)
639    {
640        ASSERT(index < FirstConstantRegisterIndex);
641        return this[index];
642    }
643
644} // namespace JSC
645
646#endif // CodeBlock_h
647