1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
18 #define ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
19 
20 #include "base/casts.h"
21 #include "base/macros.h"
22 #include "code_generator.h"
23 #include "data_type-inl.h"
24 #include "dex/dex_file-inl.h"
25 #include "locations.h"
26 #include "mirror/var_handle.h"
27 #include "nodes.h"
28 #include "utils/assembler.h"
29 #include "utils/label.h"
30 
31 namespace art {
32 
33 // Default slow-path for fallback (calling the managed code to handle the intrinsic) in an
34 // intrinsified call. This will copy the arguments into the positions for a regular call.
35 //
36 // Note: The actual parameters are required to be in the locations given by the invoke's location
37 //       summary. If an intrinsic modifies those locations before a slowpath call, they must be
38 //       restored!
39 //
40 // Note: If an invoke wasn't sharpened, we will put down an invoke-virtual here. That's potentially
41 //       sub-optimal (compared to a direct pointer call), but this is a slow-path.
42 
43 template <typename TDexCallingConvention,
44           typename TSlowPathCode = SlowPathCode,
45           typename TAssembler = Assembler>
46 class IntrinsicSlowPath : public TSlowPathCode {
47  public:
IntrinsicSlowPath(HInvoke * invoke)48   explicit IntrinsicSlowPath(HInvoke* invoke) : TSlowPathCode(invoke), invoke_(invoke) { }
49 
MoveArguments(CodeGenerator * codegen)50   Location MoveArguments(CodeGenerator* codegen) {
51     TDexCallingConvention calling_convention_visitor;
52     IntrinsicVisitor::MoveArguments(invoke_, codegen, &calling_convention_visitor);
53     return calling_convention_visitor.GetMethodLocation();
54   }
55 
EmitNativeCode(CodeGenerator * codegen)56   void EmitNativeCode(CodeGenerator* codegen) override {
57     TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler());
58     assembler->Bind(this->GetEntryLabel());
59 
60     this->SaveLiveRegisters(codegen, invoke_->GetLocations());
61 
62     Location method_loc = MoveArguments(codegen);
63 
64     if (invoke_->IsInvokeStaticOrDirect()) {
65       HInvokeStaticOrDirect* invoke_static_or_direct = invoke_->AsInvokeStaticOrDirect();
66       DCHECK_NE(invoke_static_or_direct->GetMethodLoadKind(), MethodLoadKind::kRecursive);
67       DCHECK_NE(invoke_static_or_direct->GetCodePtrLocation(),
68                 CodePtrLocation::kCallCriticalNative);
69       codegen->GenerateStaticOrDirectCall(invoke_static_or_direct, method_loc, this);
70     } else if (invoke_->IsInvokeVirtual()) {
71       codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), method_loc, this);
72     } else {
73       DCHECK(invoke_->IsInvokePolymorphic());
74       codegen->GenerateInvokePolymorphicCall(invoke_->AsInvokePolymorphic(), this);
75     }
76 
77     // Copy the result back to the expected output.
78     Location out = invoke_->GetLocations()->Out();
79     if (out.IsValid()) {
80       DCHECK(out.IsRegisterKind());  // TODO: Replace this when we support output in memory.
81       // We want to double-check that we don't overwrite a live register with the return
82       // value.
83       // Note: For the possible kNoOutputOverlap case we can't simply remove the OUT register
84       // from the GetLiveRegisters() - theoretically it might be needed after the return from
85       // the slow path.
86       DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->OverlapsRegisters(out));
87       codegen->MoveFromReturnRegister(out, invoke_->GetType());
88     }
89 
90     this->RestoreLiveRegisters(codegen, invoke_->GetLocations());
91     assembler->Jump(this->GetExitLabel());
92   }
93 
GetDescription()94   const char* GetDescription() const override { return "IntrinsicSlowPath"; }
95 
96  private:
97   // The instruction where this slow path is happening.
98   HInvoke* const invoke_;
99 
100   DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPath);
101 };
102 
GetExpectedVarHandleCoordinatesCount(HInvoke * invoke)103 static inline size_t GetExpectedVarHandleCoordinatesCount(HInvoke *invoke) {
104   mirror::VarHandle::AccessModeTemplate access_mode_template =
105       mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
106   size_t var_type_count = mirror::VarHandle::GetNumberOfVarTypeParameters(access_mode_template);
107   size_t accessor_argument_count = invoke->GetNumberOfArguments() - 1;
108 
109   return accessor_argument_count - var_type_count;
110 }
111 
GetDataTypeFromShorty(HInvoke * invoke,uint32_t index)112 static inline DataType::Type GetDataTypeFromShorty(HInvoke* invoke, uint32_t index) {
113   DCHECK(invoke->IsInvokePolymorphic());
114   const DexFile& dex_file = invoke->GetBlock()->GetGraph()->GetDexFile();
115   const char* shorty = dex_file.GetShorty(invoke->AsInvokePolymorphic()->GetProtoIndex());
116   DCHECK_LT(index, strlen(shorty));
117 
118   return DataType::FromShorty(shorty[index]);
119 }
120 
IsVarHandleGetAndBitwiseOp(HInvoke * invoke)121 static inline bool IsVarHandleGetAndBitwiseOp(HInvoke* invoke) {
122   switch (invoke->GetIntrinsic()) {
123     case Intrinsics::kVarHandleGetAndBitwiseOr:
124     case Intrinsics::kVarHandleGetAndBitwiseOrAcquire:
125     case Intrinsics::kVarHandleGetAndBitwiseOrRelease:
126     case Intrinsics::kVarHandleGetAndBitwiseXor:
127     case Intrinsics::kVarHandleGetAndBitwiseXorAcquire:
128     case Intrinsics::kVarHandleGetAndBitwiseXorRelease:
129     case Intrinsics::kVarHandleGetAndBitwiseAnd:
130     case Intrinsics::kVarHandleGetAndBitwiseAndAcquire:
131     case Intrinsics::kVarHandleGetAndBitwiseAndRelease:
132       return true;
133     default:
134       return false;
135   }
136 }
137 
IsVarHandleGetAndAdd(HInvoke * invoke)138 static inline bool IsVarHandleGetAndAdd(HInvoke* invoke) {
139   switch (invoke->GetIntrinsic()) {
140     case Intrinsics::kVarHandleGetAndAdd:
141     case Intrinsics::kVarHandleGetAndAddAcquire:
142     case Intrinsics::kVarHandleGetAndAddRelease:
143       return true;
144     default:
145       return false;
146   }
147 }
148 
149 }  // namespace art
150 
151 #endif  // ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
152