1use std::ops::DerefMut;
5
6use move_binary_format::errors::PartialVMResult;
7use move_core_types::{
8 gas_algebra::{AbstractMemorySize, InternalGas, NumArgs, NumBytes},
9 language_storage::ModuleId,
10};
11use move_vm_runtime::{
12 execution::Type,
13 shared::{
14 gas::{GasMeter, SimpleInstruction},
15 views::{SizeConfig, ValueView},
16 },
17};
18use sui_types::gas_model::{
19 gas_predicates::{native_function_threshold_exceeded, use_legacy_abstract_size},
20 tables::{GasStatus, REFERENCE_SIZE, STRUCT_SIZE, VEC_SIZE},
21};
22
23pub struct SuiGasMeter<G: DerefMut<Target = GasStatus>>(pub G);
24
25fn get_simple_instruction_stack_change(
27 instr: SimpleInstruction,
28) -> (u64, u64, AbstractMemorySize, AbstractMemorySize) {
29 use SimpleInstruction::*;
30
31 match instr {
32 Nop | Ret => (0, 0, 0.into(), 0.into()),
34 BrTrue | BrFalse => (1, 0, Type::Bool.size(), 0.into()),
35 Branch => (0, 0, 0.into(), 0.into()),
36 LdU8 => (0, 1, 0.into(), Type::U8.size()),
37 LdU16 => (0, 1, 0.into(), Type::U16.size()),
38 LdU32 => (0, 1, 0.into(), Type::U32.size()),
39 LdU64 => (0, 1, 0.into(), Type::U64.size()),
40 LdU128 => (0, 1, 0.into(), Type::U128.size()),
41 LdU256 => (0, 1, 0.into(), Type::U256.size()),
42 LdTrue | LdFalse => (0, 1, 0.into(), Type::Bool.size()),
43 FreezeRef => (1, 1, REFERENCE_SIZE, REFERENCE_SIZE),
44 ImmBorrowLoc | MutBorrowLoc => (0, 1, 0.into(), REFERENCE_SIZE),
45 ImmBorrowField | MutBorrowField | ImmBorrowFieldGeneric | MutBorrowFieldGeneric => {
46 (1, 1, REFERENCE_SIZE, REFERENCE_SIZE)
47 }
48 CastU8 => (1, 1, Type::U8.size(), Type::U8.size()),
51 CastU16 => (1, 1, Type::U8.size(), Type::U16.size()),
52 CastU32 => (1, 1, Type::U8.size(), Type::U32.size()),
53 CastU64 => (1, 1, Type::U8.size(), Type::U64.size()),
54 CastU128 => (1, 1, Type::U8.size(), Type::U128.size()),
55 CastU256 => (1, 1, Type::U8.size(), Type::U256.size()),
56 Add | Sub | Mul | Mod | Div => (2, 1, Type::U8.size() + Type::U8.size(), Type::U256.size()),
59 BitOr | BitAnd | Xor => (2, 1, Type::U8.size() + Type::U8.size(), Type::U256.size()),
60 Shl | Shr => (2, 1, Type::U8.size() + Type::U8.size(), Type::U256.size()),
61 Or | And => (
62 2,
63 1,
64 Type::Bool.size() + Type::Bool.size(),
65 Type::Bool.size(),
66 ),
67 Lt | Gt | Le | Ge => (2, 1, Type::U8.size() + Type::U8.size(), Type::Bool.size()),
68 Not => (1, 1, Type::Bool.size(), Type::Bool.size()),
69 Abort => (1, 0, Type::U64.size(), 0.into()),
70 }
71}
72
73impl<G: DerefMut<Target = GasStatus>> GasMeter for SuiGasMeter<G> {
74 fn charge_simple_instr(&mut self, instr: SimpleInstruction) -> PartialVMResult<()> {
76 let (pops, pushes, pop_size, push_size) = get_simple_instruction_stack_change(instr);
77 self.0
78 .charge(1, pushes, pops, push_size.into(), pop_size.into())
79 }
80
81 fn charge_pop(&mut self, popped_val: impl ValueView) -> PartialVMResult<()> {
82 let decr_size = abstract_memory_size(&self.0, popped_val)?;
83 self.0.charge(1, 0, 1, 0, decr_size.into())
84 }
85
86 fn charge_native_function(
87 &mut self,
88 amount: InternalGas,
89 ret_vals: Option<impl ExactSizeIterator<Item = impl ValueView>>,
90 ) -> PartialVMResult<()> {
91 let pushes = ret_vals
94 .as_ref()
95 .map(|ret_vals| ret_vals.len())
96 .unwrap_or(0) as u64;
97 let size_increase = match ret_vals {
99 Some(mut ret_vals) => ret_vals.try_fold(
100 AbstractMemorySize::zero(),
101 |acc, elem| -> PartialVMResult<_> {
102 Ok(acc + abstract_memory_size(&self.0, elem)?)
103 },
104 )?,
105 None => AbstractMemorySize::zero(),
106 };
107 self.0.record_native_call();
108 if native_function_threshold_exceeded(self.0.gas_model_version, self.0.num_native_calls) {
109 self.0
116 .charge(amount.into(), pushes, 0, size_increase.into(), 0)
117 } else {
118 self.0.charge(0, pushes, 0, size_increase.into(), 0)?;
122 self.0.deduct_gas(amount)
124 }
125 }
126
127 fn charge_native_function_before_execution(
128 &mut self,
129 mut args: impl ExactSizeIterator<Item = impl ValueView>,
130 ) -> PartialVMResult<()> {
131 let pops = args.len() as u64;
134 let stack_reduction_size = args.try_fold(
136 AbstractMemorySize::new(pops),
137 |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
138 )?;
139 self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
143 }
144
145 fn charge_call(
146 &mut self,
147 _module_id: &ModuleId,
148 _func_name: &str,
149 mut args: impl ExactSizeIterator<Item = impl ValueView>,
150 _num_locals: NumArgs,
151 ) -> PartialVMResult<()> {
152 let pops = args.len() as u64;
154 let stack_reduction_size = args.try_fold(
157 AbstractMemorySize::new(0),
158 |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
159 )?;
160 self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
161 }
162
163 fn charge_call_generic(
164 &mut self,
165 _module_id: &ModuleId,
166 _func_name: &str,
167 mut args: impl ExactSizeIterator<Item = impl ValueView>,
168 _num_locals: NumArgs,
169 ) -> PartialVMResult<()> {
170 let pops = args.len() as u64;
172 let stack_reduction_size = args.try_fold(
174 AbstractMemorySize::new(0),
175 |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
176 )?;
177 self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
180 }
181
182 fn charge_ld_const(&mut self, size: NumBytes) -> PartialVMResult<()> {
183 self.0.charge(1, 1, 0, u64::from(size), 0)
185 }
186
187 fn charge_ld_const_after_deserialization(
188 &mut self,
189 _val: impl ValueView,
190 ) -> PartialVMResult<()> {
191 Ok(())
193 }
194
195 fn charge_copy_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
196 let incr_size = abstract_memory_size(&self.0, val)?;
198 self.0.charge(1, 1, 0, incr_size.into(), 0)
199 }
200
201 fn charge_move_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
202 if reweight_move_loc(self.0.gas_model_version) {
203 self.0.charge(1, 1, 0, REFERENCE_SIZE.into(), 0)
204 } else {
205 let incr_size = abstract_memory_size(&self.0, val)?;
209 self.0.charge(1, 1, 0, incr_size.into(), 0)
210 }
211 }
212
213 fn charge_store_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
214 let decr_size = abstract_memory_size(&self.0, val)?;
218 self.0.charge(1, 0, 1, 0, decr_size.into())
219 }
220
221 fn charge_pack(
222 &mut self,
223 _is_generic: bool,
224 args: impl ExactSizeIterator<Item = impl ValueView>,
225 ) -> PartialVMResult<()> {
226 let num_fields = args.len() as u64;
228 self.0.charge(1, 1, num_fields, STRUCT_SIZE.into(), 0)
231 }
232
233 fn charge_unpack(
234 &mut self,
235 _is_generic: bool,
236 args: impl ExactSizeIterator<Item = impl ValueView>,
237 ) -> PartialVMResult<()> {
238 let num_fields = args.len() as u64;
240 self.0.charge(1, num_fields, 1, 0, STRUCT_SIZE.into())
241 }
242
243 fn charge_variant_switch(&mut self, val: impl ValueView) -> PartialVMResult<()> {
244 let decr_size = abstract_memory_size(&self.0, val)?;
246 self.0.charge(1, 0, 1, 0, decr_size.into())
247 }
248
249 fn charge_read_ref(&mut self, ref_val: impl ValueView) -> PartialVMResult<()> {
250 let size = if reweight_read_ref(self.0.gas_model_version) {
254 abstract_memory_size_with_traversal(&self.0, ref_val)?
255 } else {
256 abstract_memory_size(&self.0, ref_val)?
257 };
258 self.0.charge(1, 1, 1, size.into(), REFERENCE_SIZE.into())
259 }
260
261 fn charge_write_ref(
262 &mut self,
263 new_val: impl ValueView,
264 old_val: impl ValueView,
265 ) -> PartialVMResult<()> {
266 let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
270 (0, 2)
271 } else {
272 (1, 2)
273 };
274 let incr_size = abstract_memory_size(&self.0, new_val)?;
275 let decr_size = abstract_memory_size(&self.0, old_val)?;
276 self.0
277 .charge(1, pushes, pops, incr_size.into(), decr_size.into())
278 }
279
280 fn charge_eq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
281 let size_reduction = abstract_memory_size_with_traversal(&self.0, lhs)?
282 + abstract_memory_size_with_traversal(&self.0, rhs)?;
283 self.0.charge(
284 1,
285 1,
286 2,
287 (Type::Bool.size() + size_reduction).into(),
288 size_reduction.into(),
289 )
290 }
291
292 fn charge_neq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
293 let size_reduction = abstract_memory_size_with_traversal(&self.0, lhs)?
294 + abstract_memory_size_with_traversal(&self.0, rhs)?;
295 let size_increase = if enable_traverse_refs(self.0.gas_model_version) {
296 Type::Bool.size() + size_reduction
297 } else {
298 Type::Bool.size()
299 };
300 self.0
301 .charge(1, 1, 2, size_increase.into(), size_reduction.into())
302 }
303
304 fn charge_vec_pack<'a>(
305 &mut self,
306 args: impl ExactSizeIterator<Item = impl ValueView>,
307 ) -> PartialVMResult<()> {
308 let num_args = args.len() as u64;
310 self.0.charge(1, 1, num_args, VEC_SIZE.into(), 0)
313 }
314
315 fn charge_vec_len(&mut self) -> PartialVMResult<()> {
316 self.0
317 .charge(1, 1, 1, Type::U64.size().into(), REFERENCE_SIZE.into())
318 }
319
320 fn charge_vec_borrow(&mut self, _is_mut: bool, _is_success: bool) -> PartialVMResult<()> {
321 self.0.charge(
322 1,
323 1,
324 2,
325 REFERENCE_SIZE.into(),
326 (REFERENCE_SIZE + Type::U64.size()).into(),
327 )
328 }
329
330 fn charge_vec_push_back(&mut self, _val: impl ValueView) -> PartialVMResult<()> {
331 self.0.charge(1, 0, 2, 0, REFERENCE_SIZE.into())
333 }
334
335 fn charge_vec_pop_back(&mut self, _val: Option<impl ValueView>) -> PartialVMResult<()> {
336 self.0.charge(1, 1, 1, 0, REFERENCE_SIZE.into())
337 }
338
339 fn charge_vec_unpack(
340 &mut self,
341 expect_num_elements: NumArgs,
342 _elems: impl ExactSizeIterator<Item = impl ValueView>,
343 ) -> PartialVMResult<()> {
344 let pushes = u64::from(expect_num_elements);
346 self.0.charge(1, pushes, 1, 0, VEC_SIZE.into())
348 }
349
350 fn charge_vec_swap(&mut self) -> PartialVMResult<()> {
351 let size_decrease = REFERENCE_SIZE + Type::U64.size() + Type::U64.size();
352 let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
353 (0, 3)
354 } else {
355 (1, 1)
356 };
357 self.0.charge(1, pushes, pops, 0, size_decrease.into())
358 }
359
360 fn charge_drop_frame(
361 &mut self,
362 _locals: impl Iterator<Item = impl ValueView>,
363 ) -> PartialVMResult<()> {
364 Ok(())
365 }
366
367 fn remaining_gas(&self) -> InternalGas {
368 if !self.0.charge {
369 return InternalGas::new(u64::MAX);
370 }
371 self.0.gas_left
372 }
373}
374
375fn abstract_memory_size(
376 status: &GasStatus,
377 val: impl ValueView,
378) -> PartialVMResult<AbstractMemorySize> {
379 let config = size_config_for_gas_model_version(status.gas_model_version, false);
380 val.abstract_memory_size(&config)
381}
382
383fn abstract_memory_size_with_traversal(
384 status: &GasStatus,
385 val: impl ValueView,
386) -> PartialVMResult<AbstractMemorySize> {
387 let config = size_config_for_gas_model_version(status.gas_model_version, true);
388 val.abstract_memory_size(&config)
389}
390
391fn enable_traverse_refs(gas_model_version: u64) -> bool {
392 gas_model_version > 9
393}
394
395fn reweight_read_ref(gas_model_version: u64) -> bool {
396 gas_model_version > 10
398}
399
400fn reweight_move_loc(gas_model_version: u64) -> bool {
401 gas_model_version > 10
403}
404
405fn reduce_stack_size(gas_model_version: u64) -> bool {
406 gas_model_version > 10
408}
409
410fn size_config_for_gas_model_version(
411 gas_model_version: u64,
412 should_traverse_refs: bool,
413) -> SizeConfig {
414 if use_legacy_abstract_size(gas_model_version) {
415 SizeConfig {
416 traverse_references: false,
417 include_vector_size: false,
418 }
419 } else if should_traverse_refs {
420 SizeConfig {
421 traverse_references: enable_traverse_refs(gas_model_version),
422 include_vector_size: true,
423 }
424 } else {
425 SizeConfig {
426 traverse_references: false,
427 include_vector_size: true,
428 }
429 }
430}