sui_adapter_latest/
gas_meter.rs

1// Copyright (c) Mysten Labs, Inc.
2// SPDX-License-Identifier: Apache-2.0
3
4use std::ops::DerefMut;
5
6use move_binary_format::errors::PartialVMResult;
7use move_core_types::{
8    gas_algebra::{AbstractMemorySize, InternalGas, NumArgs, NumBytes},
9    language_storage::ModuleId,
10};
11use move_vm_types::{
12    gas::{GasMeter, SimpleInstruction},
13    loaded_data::runtime_types::Type,
14    views::{SizeConfig, TypeView, ValueView},
15};
16use sui_types::gas_model::{
17    gas_predicates::{native_function_threshold_exceeded, use_legacy_abstract_size},
18    tables::{GasStatus, REFERENCE_SIZE, STRUCT_SIZE, VEC_SIZE},
19};
20
21pub struct SuiGasMeter<G: DerefMut<Target = GasStatus>>(pub G);
22
23/// Returns a tuple of (<pops>, <pushes>, <stack_size_decrease>, <stack_size_increase>)
24fn get_simple_instruction_stack_change(
25    instr: SimpleInstruction,
26) -> (u64, u64, AbstractMemorySize, AbstractMemorySize) {
27    use SimpleInstruction::*;
28
29    match instr {
30        // NB: The `Ret` pops are accounted for in `Call` instructions, so we say `Ret` has no pops.
31        Nop | Ret => (0, 0, 0.into(), 0.into()),
32        BrTrue | BrFalse => (1, 0, Type::Bool.size(), 0.into()),
33        Branch => (0, 0, 0.into(), 0.into()),
34        LdU8 => (0, 1, 0.into(), Type::U8.size()),
35        LdU16 => (0, 1, 0.into(), Type::U16.size()),
36        LdU32 => (0, 1, 0.into(), Type::U32.size()),
37        LdU64 => (0, 1, 0.into(), Type::U64.size()),
38        LdU128 => (0, 1, 0.into(), Type::U128.size()),
39        LdU256 => (0, 1, 0.into(), Type::U256.size()),
40        LdTrue | LdFalse => (0, 1, 0.into(), Type::Bool.size()),
41        FreezeRef => (1, 1, REFERENCE_SIZE, REFERENCE_SIZE),
42        ImmBorrowLoc | MutBorrowLoc => (0, 1, 0.into(), REFERENCE_SIZE),
43        ImmBorrowField | MutBorrowField | ImmBorrowFieldGeneric | MutBorrowFieldGeneric => {
44            (1, 1, REFERENCE_SIZE, REFERENCE_SIZE)
45        }
46        // Since we don't have the size of the value being cast here we take a conservative
47        // over-approximation: it is _always_ getting cast from the smallest integer type.
48        CastU8 => (1, 1, Type::U8.size(), Type::U8.size()),
49        CastU16 => (1, 1, Type::U8.size(), Type::U16.size()),
50        CastU32 => (1, 1, Type::U8.size(), Type::U32.size()),
51        CastU64 => (1, 1, Type::U8.size(), Type::U64.size()),
52        CastU128 => (1, 1, Type::U8.size(), Type::U128.size()),
53        CastU256 => (1, 1, Type::U8.size(), Type::U256.size()),
54        // NB: We don't know the size of what integers we're dealing with, so we conservatively
55        // over-approximate by popping the smallest integers, and push the largest.
56        Add | Sub | Mul | Mod | Div => (2, 1, Type::U8.size() + Type::U8.size(), Type::U256.size()),
57        BitOr | BitAnd | Xor => (2, 1, Type::U8.size() + Type::U8.size(), Type::U256.size()),
58        Shl | Shr => (2, 1, Type::U8.size() + Type::U8.size(), Type::U256.size()),
59        Or | And => (
60            2,
61            1,
62            Type::Bool.size() + Type::Bool.size(),
63            Type::Bool.size(),
64        ),
65        Lt | Gt | Le | Ge => (2, 1, Type::U8.size() + Type::U8.size(), Type::Bool.size()),
66        Not => (1, 1, Type::Bool.size(), Type::Bool.size()),
67        Abort => (1, 0, Type::U64.size(), 0.into()),
68    }
69}
70
71impl<G: DerefMut<Target = GasStatus>> GasMeter for SuiGasMeter<G> {
72    /// Charge an instruction and fail if not enough gas units are left.
73    fn charge_simple_instr(&mut self, instr: SimpleInstruction) -> PartialVMResult<()> {
74        let (pops, pushes, pop_size, push_size) = get_simple_instruction_stack_change(instr);
75        self.0
76            .charge(1, pushes, pops, push_size.into(), pop_size.into())
77    }
78
79    fn charge_pop(&mut self, popped_val: impl ValueView) -> PartialVMResult<()> {
80        let decr_size = abstract_memory_size(&self.0, popped_val)?;
81        self.0.charge(1, 0, 1, 0, decr_size.into())
82    }
83
84    fn charge_native_function(
85        &mut self,
86        amount: InternalGas,
87        ret_vals: Option<impl ExactSizeIterator<Item = impl ValueView>>,
88    ) -> PartialVMResult<()> {
89        // Charge for the number of pushes on to the stack that the return of this function is
90        // going to cause.
91        let pushes = ret_vals
92            .as_ref()
93            .map(|ret_vals| ret_vals.len())
94            .unwrap_or(0) as u64;
95        // Calculate the number of bytes that are getting pushed onto the stack.
96        let size_increase = match ret_vals {
97            Some(mut ret_vals) => ret_vals.try_fold(
98                AbstractMemorySize::zero(),
99                |acc, elem| -> PartialVMResult<_> {
100                    Ok(acc + abstract_memory_size(&self.0, elem)?)
101                },
102            )?,
103            None => AbstractMemorySize::zero(),
104        };
105        self.0.record_native_call();
106        if native_function_threshold_exceeded(self.0.gas_model_version, self.0.num_native_calls) {
107            // Charge for the stack operations. We don't count this as an "instruction" since we
108            // already accounted for the `Call` instruction in the
109            // `charge_native_function_before_execution` call.
110            // The amount returned by the native function is viewed as the "virtual" instruction cost
111            // for the native function, and will be charged and contribute to the overall cost tier of
112            // the transaction accordingly.
113            self.0
114                .charge(amount.into(), pushes, 0, size_increase.into(), 0)
115        } else {
116            // Charge for the stack operations. We don't count this as an "instruction" since we
117            // already accounted for the `Call` instruction in the
118            // `charge_native_function_before_execution` call.
119            self.0.charge(0, pushes, 0, size_increase.into(), 0)?;
120            // Now charge the gas that the native function told us to charge.
121            self.0.deduct_gas(amount)
122        }
123    }
124
125    fn charge_native_function_before_execution(
126        &mut self,
127        _ty_args: impl ExactSizeIterator<Item = impl TypeView>,
128        mut args: impl ExactSizeIterator<Item = impl ValueView>,
129    ) -> PartialVMResult<()> {
130        // Determine the number of pops that are going to be needed for this function call, and
131        // charge for them.
132        let pops = args.len() as u64;
133        // Calculate the size decrease of the stack from the above pops.
134        let stack_reduction_size = args.try_fold(
135            AbstractMemorySize::new(pops),
136            |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
137        )?;
138        // Track that this is going to be popping from the operand stack. We also increment the
139        // instruction count as we need to account for the `Call` bytecode that initiated this
140        // native call.
141        self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
142    }
143
144    fn charge_call(
145        &mut self,
146        _module_id: &ModuleId,
147        _func_name: &str,
148        mut args: impl ExactSizeIterator<Item = impl ValueView>,
149        _num_locals: NumArgs,
150    ) -> PartialVMResult<()> {
151        // We will have to perform this many pops for the call.
152        let pops = args.len() as u64;
153        // Size stays the same -- we're just moving it from the operand stack to the locals. But
154        // the size on the operand stack is reduced by sum_{args} arg.size().
155        let stack_reduction_size = args.try_fold(
156            AbstractMemorySize::new(0),
157            |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
158        )?;
159        self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
160    }
161
162    fn charge_call_generic(
163        &mut self,
164        _module_id: &ModuleId,
165        _func_name: &str,
166        _ty_args: impl ExactSizeIterator<Item = impl TypeView>,
167        mut args: impl ExactSizeIterator<Item = impl ValueView>,
168        _num_locals: NumArgs,
169    ) -> PartialVMResult<()> {
170        // We have to perform this many pops from the operand stack for this function call.
171        let pops = args.len() as u64;
172        // Calculate the size reduction on the operand stack.
173        let stack_reduction_size = args.try_fold(
174            AbstractMemorySize::new(0),
175            |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
176        )?;
177        // Charge for the pops, no pushes, and account for the stack size decrease. Also track the
178        // `CallGeneric` instruction we must have encountered for this.
179        self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
180    }
181
182    fn charge_ld_const(&mut self, size: NumBytes) -> PartialVMResult<()> {
183        // Charge for the load from the locals onto the stack.
184        self.0.charge(1, 1, 0, u64::from(size), 0)
185    }
186
187    fn charge_ld_const_after_deserialization(
188        &mut self,
189        _val: impl ValueView,
190    ) -> PartialVMResult<()> {
191        // We already charged for this based on the bytes that we're loading so don't charge again.
192        Ok(())
193    }
194
195    fn charge_copy_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
196        // Charge for the copy of the local onto the stack.
197        let incr_size = abstract_memory_size(&self.0, val)?;
198        self.0.charge(1, 1, 0, incr_size.into(), 0)
199    }
200
201    fn charge_move_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
202        if reweight_move_loc(self.0.gas_model_version) {
203            self.0.charge(1, 1, 0, REFERENCE_SIZE.into(), 0)
204        } else {
205            // Charge for the move of the local on to the stack. Note that we charge here since we
206            // aren't tracking the local size (at least not yet). If we were, this should be a net-zero
207            // operation in terms of memory usage.
208            let incr_size = abstract_memory_size(&self.0, val)?;
209            self.0.charge(1, 1, 0, incr_size.into(), 0)
210        }
211    }
212
213    fn charge_store_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
214        // Charge for the storing of the value on the stack into a local. Note here that if we were
215        // also accounting for the size of the locals that this would be a net-zero operation in
216        // terms of memory.
217        let decr_size = abstract_memory_size(&self.0, val)?;
218        self.0.charge(1, 0, 1, 0, decr_size.into())
219    }
220
221    fn charge_pack(
222        &mut self,
223        _is_generic: bool,
224        args: impl ExactSizeIterator<Item = impl ValueView>,
225    ) -> PartialVMResult<()> {
226        // We perform `num_fields` number of pops.
227        let num_fields = args.len() as u64;
228        // The actual amount of memory on the stack is staying the same with the addition of some
229        // extra size for the struct, so the size doesn't really change much.
230        self.0.charge(1, 1, num_fields, STRUCT_SIZE.into(), 0)
231    }
232
233    fn charge_unpack(
234        &mut self,
235        _is_generic: bool,
236        args: impl ExactSizeIterator<Item = impl ValueView>,
237    ) -> PartialVMResult<()> {
238        // We perform `num_fields` number of pushes.
239        let num_fields = args.len() as u64;
240        self.0.charge(1, num_fields, 1, 0, STRUCT_SIZE.into())
241    }
242
243    fn charge_variant_switch(&mut self, val: impl ValueView) -> PartialVMResult<()> {
244        // We perform a single pop of a value from the stack.
245        let decr_size = abstract_memory_size(&self.0, val)?;
246        self.0.charge(1, 0, 1, 0, decr_size.into())
247    }
248
249    fn charge_read_ref(&mut self, ref_val: impl ValueView) -> PartialVMResult<()> {
250        // We read the reference so we are decreasing the size of the stack by the size of the
251        // reference, and adding to it the size of the value that has been read from that
252        // reference.
253        let size = if reweight_read_ref(self.0.gas_model_version) {
254            abstract_memory_size_with_traversal(&self.0, ref_val)?
255        } else {
256            abstract_memory_size(&self.0, ref_val)?
257        };
258        self.0.charge(1, 1, 1, size.into(), REFERENCE_SIZE.into())
259    }
260
261    fn charge_write_ref(
262        &mut self,
263        new_val: impl ValueView,
264        old_val: impl ValueView,
265    ) -> PartialVMResult<()> {
266        // TODO(tzakian): We should account for this elsewhere as the owner of data the
267        // reference points to won't be on the stack. For now though, we treat it as adding to the
268        // stack size.
269        let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
270            (0, 2)
271        } else {
272            (1, 2)
273        };
274        let incr_size = abstract_memory_size(&self.0, new_val)?;
275        let decr_size = abstract_memory_size(&self.0, old_val)?;
276        self.0
277            .charge(1, pushes, pops, incr_size.into(), decr_size.into())
278    }
279
280    fn charge_eq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
281        let size_reduction = abstract_memory_size_with_traversal(&self.0, lhs)?
282            + abstract_memory_size_with_traversal(&self.0, rhs)?;
283        self.0.charge(
284            1,
285            1,
286            2,
287            (Type::Bool.size() + size_reduction).into(),
288            size_reduction.into(),
289        )
290    }
291
292    fn charge_neq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
293        let size_reduction = abstract_memory_size_with_traversal(&self.0, lhs)?
294            + abstract_memory_size_with_traversal(&self.0, rhs)?;
295        let size_increase = if enable_traverse_refs(self.0.gas_model_version) {
296            Type::Bool.size() + size_reduction
297        } else {
298            Type::Bool.size()
299        };
300        self.0
301            .charge(1, 1, 2, size_increase.into(), size_reduction.into())
302    }
303
304    fn charge_vec_pack<'a>(
305        &mut self,
306        _ty: impl TypeView + 'a,
307        args: impl ExactSizeIterator<Item = impl ValueView>,
308    ) -> PartialVMResult<()> {
309        // We will perform `num_args` number of pops.
310        let num_args = args.len() as u64;
311        // The amount of data on the stack stays constant except we have some extra metadata for
312        // the vector to hold the length of the vector.
313        self.0.charge(1, 1, num_args, VEC_SIZE.into(), 0)
314    }
315
316    fn charge_vec_len(&mut self, _ty: impl TypeView) -> PartialVMResult<()> {
317        self.0
318            .charge(1, 1, 1, Type::U64.size().into(), REFERENCE_SIZE.into())
319    }
320
321    fn charge_vec_borrow(
322        &mut self,
323        _is_mut: bool,
324        _ty: impl TypeView,
325        _is_success: bool,
326    ) -> PartialVMResult<()> {
327        self.0.charge(
328            1,
329            1,
330            2,
331            REFERENCE_SIZE.into(),
332            (REFERENCE_SIZE + Type::U64.size()).into(),
333        )
334    }
335
336    fn charge_vec_push_back(
337        &mut self,
338        _ty: impl TypeView,
339        _val: impl ValueView,
340    ) -> PartialVMResult<()> {
341        // The value was already on the stack, so we aren't increasing the number of bytes on the stack.
342        self.0.charge(1, 0, 2, 0, REFERENCE_SIZE.into())
343    }
344
345    fn charge_vec_pop_back(
346        &mut self,
347        _ty: impl TypeView,
348        _val: Option<impl ValueView>,
349    ) -> PartialVMResult<()> {
350        self.0.charge(1, 1, 1, 0, REFERENCE_SIZE.into())
351    }
352
353    fn charge_vec_unpack(
354        &mut self,
355        _ty: impl TypeView,
356        expect_num_elements: NumArgs,
357        _elems: impl ExactSizeIterator<Item = impl ValueView>,
358    ) -> PartialVMResult<()> {
359        // Charge for the pushes
360        let pushes = u64::from(expect_num_elements);
361        // The stack size stays pretty much the same modulo the additional vector size
362        self.0.charge(1, pushes, 1, 0, VEC_SIZE.into())
363    }
364
365    fn charge_vec_swap(&mut self, _ty: impl TypeView) -> PartialVMResult<()> {
366        let size_decrease = REFERENCE_SIZE + Type::U64.size() + Type::U64.size();
367        let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
368            (0, 3)
369        } else {
370            (1, 1)
371        };
372        self.0.charge(1, pushes, pops, 0, size_decrease.into())
373    }
374
375    fn charge_drop_frame(
376        &mut self,
377        _locals: impl Iterator<Item = impl ValueView>,
378    ) -> PartialVMResult<()> {
379        Ok(())
380    }
381
382    fn remaining_gas(&self) -> InternalGas {
383        if !self.0.charge {
384            return InternalGas::new(u64::MAX);
385        }
386        self.0.gas_left
387    }
388}
389
390fn abstract_memory_size(
391    status: &GasStatus,
392    val: impl ValueView,
393) -> PartialVMResult<AbstractMemorySize> {
394    let config = size_config_for_gas_model_version(status.gas_model_version, false);
395    val.abstract_memory_size(&config)
396}
397
398fn abstract_memory_size_with_traversal(
399    status: &GasStatus,
400    val: impl ValueView,
401) -> PartialVMResult<AbstractMemorySize> {
402    let config = size_config_for_gas_model_version(status.gas_model_version, true);
403    val.abstract_memory_size(&config)
404}
405
406fn enable_traverse_refs(gas_model_version: u64) -> bool {
407    gas_model_version > 9
408}
409
410fn reweight_read_ref(gas_model_version: u64) -> bool {
411    // Reweighting `ReadRef` is only done in gas model versions 10 and above.
412    gas_model_version > 10
413}
414
415fn reweight_move_loc(gas_model_version: u64) -> bool {
416    // Reweighting `MoveLoc` is only done in gas model versions 10 and above.
417    gas_model_version > 10
418}
419
420fn reduce_stack_size(gas_model_version: u64) -> bool {
421    // Reducing stack size is only done in gas model versions 10 and above.
422    gas_model_version > 10
423}
424
425fn enable_fine_grained_value_size(gas_model_version: u64) -> bool {
426    gas_model_version > 10
427}
428
429fn size_config_for_gas_model_version(
430    gas_model_version: u64,
431    should_traverse_refs: bool,
432) -> SizeConfig {
433    if use_legacy_abstract_size(gas_model_version) {
434        SizeConfig {
435            traverse_references: false,
436            include_vector_size: false,
437            fine_grained_value_size: false,
438        }
439    } else if should_traverse_refs {
440        SizeConfig {
441            traverse_references: enable_traverse_refs(gas_model_version),
442            include_vector_size: true,
443            fine_grained_value_size: enable_fine_grained_value_size(gas_model_version),
444        }
445    } else {
446        SizeConfig {
447            traverse_references: false,
448            include_vector_size: true,
449            fine_grained_value_size: enable_fine_grained_value_size(gas_model_version),
450        }
451    }
452}