36
36
37
37
#define __ _masm->
38
38
39
- class DowncallStubGenerator : public StubCodeGenerator {
40
- BasicType* _signature;
41
- int _num_args;
42
- BasicType _ret_bt;
43
- const ABIDescriptor& _abi;
44
-
45
- const GrowableArray<VMStorage>& _input_registers;
46
- const GrowableArray<VMStorage>& _output_registers;
47
-
48
- bool _needs_return_buffer;
49
- int _captured_state_mask;
50
- bool _needs_transition;
51
-
52
- int _frame_complete;
53
- int _frame_size_slots;
54
- OopMapSet* _oop_maps;
55
- public:
56
- DowncallStubGenerator (CodeBuffer* buffer,
57
- BasicType* signature,
58
- int num_args,
59
- BasicType ret_bt,
60
- const ABIDescriptor& abi,
61
- const GrowableArray<VMStorage>& input_registers,
62
- const GrowableArray<VMStorage>& output_registers,
63
- bool needs_return_buffer,
64
- int captured_state_mask,
65
- bool needs_transition)
66
- : StubCodeGenerator(buffer, PrintMethodHandleStubs),
67
- _signature (signature),
68
- _num_args(num_args),
69
- _ret_bt(ret_bt),
70
- _abi(abi),
71
- _input_registers(input_registers),
72
- _output_registers(output_registers),
73
- _needs_return_buffer(needs_return_buffer),
74
- _captured_state_mask(captured_state_mask),
75
- _needs_transition(needs_transition),
76
- _frame_complete(0 ),
77
- _frame_size_slots(0 ),
78
- _oop_maps(nullptr ) {
79
- }
80
-
81
- void generate ();
82
-
83
- int frame_complete () const {
84
- return _frame_complete;
85
- }
86
-
87
- int framesize () const {
88
- return (_frame_size_slots >> (LogBytesPerWord - LogBytesPerInt));
89
- }
90
-
91
- OopMapSet* oop_maps () const {
92
- return _oop_maps;
93
- }
94
- };
95
-
96
39
static const int native_invoker_code_base_size = 256 ;
97
40
static const int native_invoker_size_per_arg = 8 ;
98
41
@@ -108,10 +51,10 @@ RuntimeStub* DowncallLinker::make_downcall_stub(BasicType* signature,
108
51
int code_size = native_invoker_code_base_size + (num_args * native_invoker_size_per_arg);
109
52
int locs_size = 1 ; // must be non-zero
110
53
CodeBuffer code (" nep_invoker_blob" , code_size, locs_size);
111
- DowncallStubGenerator g (&code, signature, num_args, ret_bt, abi,
112
- input_registers, output_registers,
113
- needs_return_buffer, captured_state_mask,
114
- needs_transition);
54
+ StubGenerator g (&code, signature, num_args, ret_bt, abi,
55
+ input_registers, output_registers,
56
+ needs_return_buffer, captured_state_mask,
57
+ needs_transition);
115
58
g.generate ();
116
59
code.log_section_sizes (" nep_invoker_blob" );
117
60
@@ -134,7 +77,39 @@ RuntimeStub* DowncallLinker::make_downcall_stub(BasicType* signature,
134
77
return stub;
135
78
}
136
79
137
- void DowncallStubGenerator::generate () {
80
+ static constexpr int RFP_BIAS = 16 ; // skip old rbp and return address
81
+
82
+ void DowncallLinker::StubGenerator::pd_add_offset_to_oop (VMStorage reg_oop, VMStorage reg_offset, VMStorage tmp1, VMStorage tmp2) const {
83
+ Register r_tmp1 = as_Register (tmp1);
84
+ Register r_tmp2 = as_Register (tmp2);
85
+ if (reg_oop.is_reg ()) {
86
+ assert (reg_oop.type () == StorageType::INTEGER, " expected" );
87
+ Register reg_oop_reg = as_Register (reg_oop);
88
+ if (reg_offset.is_reg ()) {
89
+ assert (reg_offset.type () == StorageType::INTEGER, " expected" );
90
+ __ add (reg_oop_reg, reg_oop_reg, as_Register (reg_offset));
91
+ } else {
92
+ assert (reg_offset.is_stack (), " expected" );
93
+ assert (reg_offset.stack_size () == 8 , " expected long" );
94
+ Address offset_addr (rfp, RFP_BIAS + reg_offset.offset ());
95
+ __ ldr (r_tmp1, offset_addr);
96
+ __ add (reg_oop_reg, reg_oop_reg, r_tmp1);
97
+ }
98
+ } else {
99
+ assert (reg_oop.is_stack (), " expected" );
100
+ assert (reg_oop.stack_size () == 8 , " expected long" );
101
+ assert (reg_offset.is_stack (), " expected" );
102
+ assert (reg_offset.stack_size () == 8 , " expected long" );
103
+ Address offset_addr (rfp, RFP_BIAS + reg_offset.offset ());
104
+ Address oop_addr (rfp, RFP_BIAS + reg_oop.offset ());
105
+ __ ldr (r_tmp1, offset_addr);
106
+ __ ldr (r_tmp2, oop_addr);
107
+ __ add (r_tmp1, r_tmp1, r_tmp2);
108
+ __ str (r_tmp1, oop_addr);
109
+ }
110
+ }
111
+
112
+ void DowncallLinker::StubGenerator::generate () {
138
113
enum layout {
139
114
rfp_off,
140
115
rfp_off2,
@@ -150,23 +125,16 @@ void DowncallStubGenerator::generate() {
150
125
Register tmp1 = r9;
151
126
Register tmp2 = r10;
152
127
153
- VMStorage shuffle_reg = as_VMStorage (r19);
154
- JavaCallingConvention in_conv;
155
- NativeCallingConvention out_conv (_input_registers);
156
- ArgumentShuffle arg_shuffle (_signature, _num_args, _signature, _num_args, &in_conv, &out_conv, shuffle_reg);
157
-
158
- #ifndef PRODUCT
159
- LogTarget (Trace, foreign, downcall) lt;
160
- if (lt.is_enabled ()) {
161
- ResourceMark rm;
162
- LogStream ls (lt);
163
- arg_shuffle.print_on (&ls);
164
- }
165
- #endif
128
+ GrowableArray<VMStorage> java_regs;
129
+ ForeignGlobals::java_calling_convention (_signature, _num_args, java_regs);
130
+ bool has_objects = false ;
131
+ GrowableArray<VMStorage> filtered_java_regs = ForeignGlobals::downcall_filter_offset_regs (java_regs, _signature,
132
+ _num_args, has_objects);
133
+ assert (!(_needs_transition && has_objects), " can not pass objects when doing transition" );
166
134
167
135
int allocated_frame_size = 0 ;
168
136
assert (_abi._shadow_space_bytes == 0 , " not expecting shadow space on AArch64" );
169
- allocated_frame_size += arg_shuffle. out_arg_bytes ( );
137
+ allocated_frame_size += ForeignGlobals::compute_out_arg_bytes (_input_registers );
170
138
171
139
bool should_save_return_value = !_needs_return_buffer;
172
140
RegSpiller out_reg_spiller (_output_registers);
@@ -193,6 +161,33 @@ void DowncallStubGenerator::generate() {
193
161
allocated_frame_size += BytesPerWord;
194
162
}
195
163
164
+ // The space we have allocated will look like:
165
+ //
166
+ // FP-> | |
167
+ // |---------------------| = frame_bottom_offset = frame_size
168
+ // | (optional) |
169
+ // | capture state buf |
170
+ // |---------------------| = StubLocations::CAPTURED_STATE_BUFFER
171
+ // | (optional) |
172
+ // | return buffer |
173
+ // |---------------------| = StubLocations::RETURN_BUFFER
174
+ // SP-> | out/stack args | or | out_reg_spiller area |
175
+ //
176
+ // Note how the last chunk can be shared, since the 3 uses occur at different times.
177
+
178
+ VMStorage shuffle_reg = as_VMStorage (r19);
179
+ GrowableArray<VMStorage> out_regs = ForeignGlobals::replace_place_holders (_input_registers, locs);
180
+ ArgumentShuffle arg_shuffle (filtered_java_regs, out_regs, shuffle_reg);
181
+
182
+ #ifndef PRODUCT
183
+ LogTarget (Trace, foreign, downcall) lt;
184
+ if (lt.is_enabled ()) {
185
+ ResourceMark rm;
186
+ LogStream ls (lt);
187
+ arg_shuffle.print_on (&ls);
188
+ }
189
+ #endif
190
+
196
191
_frame_size_slots = align_up (framesize + (allocated_frame_size >> LogBytesPerInt), 4 );
197
192
assert (is_even (_frame_size_slots/2 ), " sp not 16-byte aligned" );
198
193
@@ -218,8 +213,12 @@ void DowncallStubGenerator::generate() {
218
213
__ stlrw (tmp1, tmp2);
219
214
}
220
215
216
+ if (has_objects) {
217
+ add_offsets_to_oops (java_regs, as_VMStorage (tmp1), as_VMStorage (tmp2));
218
+ }
219
+
221
220
__ block_comment (" { argument shuffle" );
222
- arg_shuffle.generate (_masm, shuffle_reg, 0 , _abi._shadow_space_bytes , locs );
221
+ arg_shuffle.generate (_masm, shuffle_reg, 0 , _abi._shadow_space_bytes );
223
222
__ block_comment (" } argument shuffle" );
224
223
225
224
__ blr (as_Register (locs.get (StubLocations::TARGET_ADDRESS)));
0 commit comments