1use std::ops::DerefMut;
5
6use move_binary_format::errors::PartialVMResult;
7use move_core_types::gas_algebra::{AbstractMemorySize, InternalGas, NumArgs, NumBytes};
8use move_vm_runtime::{
9 execution::Type,
10 shared::{
11 gas::{GasMeter, SimpleInstruction},
12 views::{SizeConfig, ValueView},
13 },
14};
15use sui_types::gas_model::{
16 gas_predicates::{
17 legacy_charge_native_pops_args, native_function_threshold_exceeded,
18 use_legacy_abstract_size,
19 },
20 tables::{GasStatus, REFERENCE_SIZE, STRUCT_SIZE, VEC_SIZE},
21};
22
23pub struct SuiGasMeter<G: DerefMut<Target = GasStatus>>(pub G);
24
25fn get_simple_instruction_stack_change(
27 instr: SimpleInstruction,
28) -> PartialVMResult<(u64, u64, AbstractMemorySize, AbstractMemorySize)> {
29 use SimpleInstruction::*;
30
31 Ok(match instr {
32 Nop | Ret => (0, 0, 0.into(), 0.into()),
34 BrTrue | BrFalse => (1, 0, Type::Bool.size()?, 0.into()),
35 Branch => (0, 0, 0.into(), 0.into()),
36 LdU8 => (0, 1, 0.into(), Type::U8.size()?),
37 LdU16 => (0, 1, 0.into(), Type::U16.size()?),
38 LdU32 => (0, 1, 0.into(), Type::U32.size()?),
39 LdU64 => (0, 1, 0.into(), Type::U64.size()?),
40 LdU128 => (0, 1, 0.into(), Type::U128.size()?),
41 LdU256 => (0, 1, 0.into(), Type::U256.size()?),
42 LdTrue | LdFalse => (0, 1, 0.into(), Type::Bool.size()?),
43 FreezeRef => (1, 1, REFERENCE_SIZE, REFERENCE_SIZE),
44 ImmBorrowLoc | MutBorrowLoc => (0, 1, 0.into(), REFERENCE_SIZE),
45 ImmBorrowField | MutBorrowField | ImmBorrowFieldGeneric | MutBorrowFieldGeneric => {
46 (1, 1, REFERENCE_SIZE, REFERENCE_SIZE)
47 }
48 CastU8 => (1, 1, Type::U8.size()?, Type::U8.size()?),
51 CastU16 => (1, 1, Type::U8.size()?, Type::U16.size()?),
52 CastU32 => (1, 1, Type::U8.size()?, Type::U32.size()?),
53 CastU64 => (1, 1, Type::U8.size()?, Type::U64.size()?),
54 CastU128 => (1, 1, Type::U8.size()?, Type::U128.size()?),
55 CastU256 => (1, 1, Type::U8.size()?, Type::U256.size()?),
56 Add | Sub | Mul | Mod | Div => (
59 2,
60 1,
61 Type::U8.size()? + Type::U8.size()?,
62 Type::U256.size()?,
63 ),
64 BitOr | BitAnd | Xor => (
65 2,
66 1,
67 Type::U8.size()? + Type::U8.size()?,
68 Type::U256.size()?,
69 ),
70 Shl | Shr => (
71 2,
72 1,
73 Type::U8.size()? + Type::U8.size()?,
74 Type::U256.size()?,
75 ),
76 Or | And => (
77 2,
78 1,
79 Type::Bool.size()? + Type::Bool.size()?,
80 Type::Bool.size()?,
81 ),
82 Lt | Gt | Le | Ge => (
83 2,
84 1,
85 Type::U8.size()? + Type::U8.size()?,
86 Type::Bool.size()?,
87 ),
88 Not => (1, 1, Type::Bool.size()?, Type::Bool.size()?),
89 Abort => (1, 0, Type::U64.size()?, 0.into()),
90 })
91}
92
93impl<G: DerefMut<Target = GasStatus>> GasMeter for SuiGasMeter<G> {
94 fn charge_simple_instr(&mut self, instr: SimpleInstruction) -> PartialVMResult<()> {
96 let (pops, pushes, pop_size, push_size) = get_simple_instruction_stack_change(instr)?;
97 self.0
98 .charge(1, pushes, pops, push_size.into(), pop_size.into())
99 }
100
101 fn charge_pop(&mut self, popped_val: impl ValueView) -> PartialVMResult<()> {
102 let decr_size = abstract_memory_size(&self.0, popped_val)?;
103 self.0.charge(1, 0, 1, 0, decr_size.into())
104 }
105
106 fn charge_native_function(
107 &mut self,
108 amount: InternalGas,
109 ret_vals: Option<impl ExactSizeIterator<Item = impl ValueView>>,
110 ) -> PartialVMResult<()> {
111 let pushes = ret_vals
114 .as_ref()
115 .map(|ret_vals| ret_vals.len())
116 .unwrap_or(0) as u64;
117 let size_increase = match ret_vals {
119 Some(mut ret_vals) => ret_vals.try_fold(
120 AbstractMemorySize::zero(),
121 |acc, elem| -> PartialVMResult<_> {
122 Ok(acc + abstract_memory_size(&self.0, elem)?)
123 },
124 )?,
125 None => AbstractMemorySize::zero(),
126 };
127 self.0.record_native_call();
128 if native_function_threshold_exceeded(self.0.gas_model_version, self.0.num_native_calls) {
129 self.0
136 .charge(amount.into(), pushes, 0, size_increase.into(), 0)
137 } else {
138 self.0.charge(0, pushes, 0, size_increase.into(), 0)?;
142 self.0.deduct_gas(amount)
144 }
145 }
146
147 fn charge_native_function_before_execution(
148 &mut self,
149 mut args: impl ExactSizeIterator<Item = impl ValueView>,
150 ) -> PartialVMResult<()> {
151 if legacy_charge_native_pops_args(self.0.gas_model_version) {
152 let pops = args.len() as u64;
154 let stack_reduction_size = args.try_fold(
155 AbstractMemorySize::new(pops),
156 |acc, elem| -> PartialVMResult<_> {
157 Ok(acc + abstract_memory_size(&self.0, elem)?)
158 },
159 )?;
160 self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
161 } else {
162 self.0.charge(1, 0, 0, 0, 0)
165 }
166 }
167
168 fn charge_call(
169 &mut self,
170 mut args: impl ExactSizeIterator<Item = impl ValueView>,
171 _num_locals: NumArgs,
172 ) -> PartialVMResult<()> {
173 let pops = args.len() as u64;
175 let stack_reduction_size = args.try_fold(
178 AbstractMemorySize::new(0),
179 |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
180 )?;
181 self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
182 }
183
184 fn charge_call_generic(
185 &mut self,
186 mut args: impl ExactSizeIterator<Item = impl ValueView>,
187 _num_locals: NumArgs,
188 ) -> PartialVMResult<()> {
189 let pops = args.len() as u64;
191 let stack_reduction_size = args.try_fold(
193 AbstractMemorySize::new(0),
194 |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
195 )?;
196 self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
199 }
200
201 fn charge_ld_const(&mut self, size: NumBytes) -> PartialVMResult<()> {
202 self.0.charge(1, 1, 0, u64::from(size), 0)
204 }
205
206 fn charge_ld_const_after_deserialization(
207 &mut self,
208 _val: impl ValueView,
209 ) -> PartialVMResult<()> {
210 Ok(())
212 }
213
214 fn charge_copy_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
215 let incr_size = abstract_memory_size(&self.0, val)?;
217 self.0.charge(1, 1, 0, incr_size.into(), 0)
218 }
219
220 fn charge_move_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
221 if reweight_move_loc(self.0.gas_model_version) {
222 self.0.charge(1, 1, 0, REFERENCE_SIZE.into(), 0)
223 } else {
224 let incr_size = abstract_memory_size(&self.0, val)?;
228 self.0.charge(1, 1, 0, incr_size.into(), 0)
229 }
230 }
231
232 fn charge_store_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
233 let decr_size = abstract_memory_size(&self.0, val)?;
237 self.0.charge(1, 0, 1, 0, decr_size.into())
238 }
239
240 fn charge_pack(
241 &mut self,
242 _is_generic: bool,
243 args: impl ExactSizeIterator<Item = impl ValueView>,
244 ) -> PartialVMResult<()> {
245 let num_fields = args.len() as u64;
247 self.0.charge(1, 1, num_fields, STRUCT_SIZE.into(), 0)
250 }
251
252 fn charge_unpack(
253 &mut self,
254 _is_generic: bool,
255 args: impl ExactSizeIterator<Item = impl ValueView>,
256 ) -> PartialVMResult<()> {
257 let num_fields = args.len() as u64;
259 self.0.charge(1, num_fields, 1, 0, STRUCT_SIZE.into())
260 }
261
262 fn charge_variant_switch(&mut self, val: impl ValueView) -> PartialVMResult<()> {
263 let decr_size = abstract_memory_size(&self.0, val)?;
265 self.0.charge(1, 0, 1, 0, decr_size.into())
266 }
267
268 fn charge_read_ref(&mut self, ref_val: impl ValueView) -> PartialVMResult<()> {
269 let size = if reweight_read_ref(self.0.gas_model_version) {
273 abstract_memory_size_with_traversal(&self.0, ref_val)?
274 } else {
275 abstract_memory_size(&self.0, ref_val)?
276 };
277 self.0.charge(1, 1, 1, size.into(), REFERENCE_SIZE.into())
278 }
279
280 fn charge_write_ref(
281 &mut self,
282 new_val: impl ValueView,
283 old_val: impl ValueView,
284 ) -> PartialVMResult<()> {
285 let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
289 (0, 2)
290 } else {
291 (1, 2)
292 };
293 let incr_size = abstract_memory_size(&self.0, new_val)?;
294 let decr_size = abstract_memory_size(&self.0, old_val)?;
295 self.0
296 .charge(1, pushes, pops, incr_size.into(), decr_size.into())
297 }
298
299 fn charge_eq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
300 let size_reduction = abstract_memory_size_with_traversal(&self.0, lhs)?
301 + abstract_memory_size_with_traversal(&self.0, rhs)?;
302 self.0.charge(
303 1,
304 1,
305 2,
306 (Type::Bool.size()? + size_reduction).into(),
307 size_reduction.into(),
308 )
309 }
310
311 fn charge_neq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
312 let size_reduction = abstract_memory_size_with_traversal(&self.0, lhs)?
313 + abstract_memory_size_with_traversal(&self.0, rhs)?;
314 let size_increase = if enable_traverse_refs(self.0.gas_model_version) {
315 Type::Bool.size()? + size_reduction
316 } else {
317 Type::Bool.size()?
318 };
319 self.0
320 .charge(1, 1, 2, size_increase.into(), size_reduction.into())
321 }
322
323 fn charge_vec_pack<'a>(
324 &mut self,
325 args: impl ExactSizeIterator<Item = impl ValueView>,
326 ) -> PartialVMResult<()> {
327 let num_args = args.len() as u64;
329 self.0.charge(1, 1, num_args, VEC_SIZE.into(), 0)
332 }
333
334 fn charge_vec_len(&mut self) -> PartialVMResult<()> {
335 self.0
336 .charge(1, 1, 1, Type::U64.size()?.into(), REFERENCE_SIZE.into())
337 }
338
339 fn charge_vec_borrow(&mut self, _is_mut: bool, _is_success: bool) -> PartialVMResult<()> {
340 self.0.charge(
341 1,
342 1,
343 2,
344 REFERENCE_SIZE.into(),
345 (REFERENCE_SIZE + Type::U64.size()?).into(),
346 )
347 }
348
349 fn charge_vec_push_back(&mut self, _val: impl ValueView) -> PartialVMResult<()> {
350 self.0.charge(1, 0, 2, 0, REFERENCE_SIZE.into())
352 }
353
354 fn charge_vec_pop_back(&mut self, _val: Option<impl ValueView>) -> PartialVMResult<()> {
355 self.0.charge(1, 1, 1, 0, REFERENCE_SIZE.into())
356 }
357
358 fn charge_vec_unpack(
359 &mut self,
360 expect_num_elements: NumArgs,
361 _elems: impl ExactSizeIterator<Item = impl ValueView>,
362 ) -> PartialVMResult<()> {
363 let pushes = u64::from(expect_num_elements);
365 self.0.charge(1, pushes, 1, 0, VEC_SIZE.into())
367 }
368
369 fn charge_vec_swap(&mut self) -> PartialVMResult<()> {
370 let size_decrease = REFERENCE_SIZE + Type::U64.size()? + Type::U64.size()?;
371 let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
372 (0, 3)
373 } else {
374 (1, 1)
375 };
376 self.0.charge(1, pushes, pops, 0, size_decrease.into())
377 }
378
379 fn charge_drop_frame(
380 &mut self,
381 _locals: impl Iterator<Item = impl ValueView>,
382 ) -> PartialVMResult<()> {
383 Ok(())
384 }
385
386 fn remaining_gas(&self) -> InternalGas {
387 if !self.0.charge {
388 return InternalGas::new(u64::MAX);
389 }
390 self.0.gas_left
391 }
392}
393
394fn abstract_memory_size(
395 status: &GasStatus,
396 val: impl ValueView,
397) -> PartialVMResult<AbstractMemorySize> {
398 let config = size_config_for_gas_model_version(status.gas_model_version, false);
399 val.abstract_memory_size(&config)
400}
401
402fn abstract_memory_size_with_traversal(
403 status: &GasStatus,
404 val: impl ValueView,
405) -> PartialVMResult<AbstractMemorySize> {
406 let config = size_config_for_gas_model_version(status.gas_model_version, true);
407 val.abstract_memory_size(&config)
408}
409
410fn enable_traverse_refs(gas_model_version: u64) -> bool {
411 gas_model_version > 9
412}
413
414fn reweight_read_ref(gas_model_version: u64) -> bool {
415 gas_model_version > 10
417}
418
419fn reweight_move_loc(gas_model_version: u64) -> bool {
420 gas_model_version > 10
422}
423
424fn reduce_stack_size(gas_model_version: u64) -> bool {
425 gas_model_version > 10
427}
428
429fn size_config_for_gas_model_version(
430 gas_model_version: u64,
431 should_traverse_refs: bool,
432) -> SizeConfig {
433 if use_legacy_abstract_size(gas_model_version) {
434 SizeConfig {
435 traverse_references: false,
436 include_vector_size: false,
437 }
438 } else if should_traverse_refs {
439 SizeConfig {
440 traverse_references: enable_traverse_refs(gas_model_version),
441 include_vector_size: true,
442 }
443 } else {
444 SizeConfig {
445 traverse_references: false,
446 include_vector_size: true,
447 }
448 }
449}