sui_adapter_latest/
gas_meter.rs

1// Copyright (c) Mysten Labs, Inc.
2// SPDX-License-Identifier: Apache-2.0
3
4use move_binary_format::errors::PartialVMResult;
5use move_core_types::{
6    gas_algebra::{AbstractMemorySize, InternalGas, NumArgs, NumBytes},
7    language_storage::ModuleId,
8};
9use move_vm_types::{
10    gas::{GasMeter, SimpleInstruction},
11    loaded_data::runtime_types::Type,
12    views::{SizeConfig, TypeView, ValueView},
13};
14use sui_types::gas_model::{
15    gas_predicates::{native_function_threshold_exceeded, use_legacy_abstract_size},
16    tables::{GasStatus, REFERENCE_SIZE, STRUCT_SIZE, VEC_SIZE},
17};
18
19pub struct SuiGasMeter<'g>(pub &'g mut GasStatus);
20
21/// Returns a tuple of (<pops>, <pushes>, <stack_size_decrease>, <stack_size_increase>)
22fn get_simple_instruction_stack_change(
23    instr: SimpleInstruction,
24) -> (u64, u64, AbstractMemorySize, AbstractMemorySize) {
25    use SimpleInstruction::*;
26
27    match instr {
28        // NB: The `Ret` pops are accounted for in `Call` instructions, so we say `Ret` has no pops.
29        Nop | Ret => (0, 0, 0.into(), 0.into()),
30        BrTrue | BrFalse => (1, 0, Type::Bool.size(), 0.into()),
31        Branch => (0, 0, 0.into(), 0.into()),
32        LdU8 => (0, 1, 0.into(), Type::U8.size()),
33        LdU16 => (0, 1, 0.into(), Type::U16.size()),
34        LdU32 => (0, 1, 0.into(), Type::U32.size()),
35        LdU64 => (0, 1, 0.into(), Type::U64.size()),
36        LdU128 => (0, 1, 0.into(), Type::U128.size()),
37        LdU256 => (0, 1, 0.into(), Type::U256.size()),
38        LdTrue | LdFalse => (0, 1, 0.into(), Type::Bool.size()),
39        FreezeRef => (1, 1, REFERENCE_SIZE, REFERENCE_SIZE),
40        ImmBorrowLoc | MutBorrowLoc => (0, 1, 0.into(), REFERENCE_SIZE),
41        ImmBorrowField | MutBorrowField | ImmBorrowFieldGeneric | MutBorrowFieldGeneric => {
42            (1, 1, REFERENCE_SIZE, REFERENCE_SIZE)
43        }
44        // Since we don't have the size of the value being cast here we take a conservative
45        // over-approximation: it is _always_ getting cast from the smallest integer type.
46        CastU8 => (1, 1, Type::U8.size(), Type::U8.size()),
47        CastU16 => (1, 1, Type::U8.size(), Type::U16.size()),
48        CastU32 => (1, 1, Type::U8.size(), Type::U32.size()),
49        CastU64 => (1, 1, Type::U8.size(), Type::U64.size()),
50        CastU128 => (1, 1, Type::U8.size(), Type::U128.size()),
51        CastU256 => (1, 1, Type::U8.size(), Type::U256.size()),
52        // NB: We don't know the size of what integers we're dealing with, so we conservatively
53        // over-approximate by popping the smallest integers, and push the largest.
54        Add | Sub | Mul | Mod | Div => (2, 1, Type::U8.size() + Type::U8.size(), Type::U256.size()),
55        BitOr | BitAnd | Xor => (2, 1, Type::U8.size() + Type::U8.size(), Type::U256.size()),
56        Shl | Shr => (2, 1, Type::U8.size() + Type::U8.size(), Type::U256.size()),
57        Or | And => (
58            2,
59            1,
60            Type::Bool.size() + Type::Bool.size(),
61            Type::Bool.size(),
62        ),
63        Lt | Gt | Le | Ge => (2, 1, Type::U8.size() + Type::U8.size(), Type::Bool.size()),
64        Not => (1, 1, Type::Bool.size(), Type::Bool.size()),
65        Abort => (1, 0, Type::U64.size(), 0.into()),
66    }
67}
68
69impl GasMeter for SuiGasMeter<'_> {
70    /// Charge an instruction and fail if not enough gas units are left.
71    fn charge_simple_instr(&mut self, instr: SimpleInstruction) -> PartialVMResult<()> {
72        let (pops, pushes, pop_size, push_size) = get_simple_instruction_stack_change(instr);
73        self.0
74            .charge(1, pushes, pops, push_size.into(), pop_size.into())
75    }
76
77    fn charge_pop(&mut self, popped_val: impl ValueView) -> PartialVMResult<()> {
78        self.0
79            .charge(1, 0, 1, 0, abstract_memory_size(self.0, popped_val).into())
80    }
81
82    fn charge_native_function(
83        &mut self,
84        amount: InternalGas,
85        ret_vals: Option<impl ExactSizeIterator<Item = impl ValueView>>,
86    ) -> PartialVMResult<()> {
87        // Charge for the number of pushes on to the stack that the return of this function is
88        // going to cause.
89        let pushes = ret_vals
90            .as_ref()
91            .map(|ret_vals| ret_vals.len())
92            .unwrap_or(0) as u64;
93        // Calculate the number of bytes that are getting pushed onto the stack.
94        let size_increase = ret_vals
95            .map(|ret_vals| {
96                ret_vals.fold(AbstractMemorySize::zero(), |acc, elem| {
97                    acc + abstract_memory_size(self.0, elem)
98                })
99            })
100            .unwrap_or_else(AbstractMemorySize::zero);
101        self.0.record_native_call();
102        if native_function_threshold_exceeded(self.0.gas_model_version, self.0.num_native_calls) {
103            // Charge for the stack operations. We don't count this as an "instruction" since we
104            // already accounted for the `Call` instruction in the
105            // `charge_native_function_before_execution` call.
106            // The amount returned by the native function is viewed as the "virtual" instruction cost
107            // for the native function, and will be charged and contribute to the overall cost tier of
108            // the transaction accordingly.
109            self.0
110                .charge(amount.into(), pushes, 0, size_increase.into(), 0)
111        } else {
112            // Charge for the stack operations. We don't count this as an "instruction" since we
113            // already accounted for the `Call` instruction in the
114            // `charge_native_function_before_execution` call.
115            self.0.charge(0, pushes, 0, size_increase.into(), 0)?;
116            // Now charge the gas that the native function told us to charge.
117            self.0.deduct_gas(amount)
118        }
119    }
120
121    fn charge_native_function_before_execution(
122        &mut self,
123        _ty_args: impl ExactSizeIterator<Item = impl TypeView>,
124        args: impl ExactSizeIterator<Item = impl ValueView>,
125    ) -> PartialVMResult<()> {
126        // Determine the number of pops that are going to be needed for this function call, and
127        // charge for them.
128        let pops = args.len() as u64;
129        // Calculate the size decrease of the stack from the above pops.
130        let stack_reduction_size = args.fold(AbstractMemorySize::new(pops), |acc, elem| {
131            acc + abstract_memory_size(self.0, elem)
132        });
133        // Track that this is going to be popping from the operand stack. We also increment the
134        // instruction count as we need to account for the `Call` bytecode that initiated this
135        // native call.
136        self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
137    }
138
139    fn charge_call(
140        &mut self,
141        _module_id: &ModuleId,
142        _func_name: &str,
143        args: impl ExactSizeIterator<Item = impl ValueView>,
144        _num_locals: NumArgs,
145    ) -> PartialVMResult<()> {
146        // We will have to perform this many pops for the call.
147        let pops = args.len() as u64;
148        // Size stays the same -- we're just moving it from the operand stack to the locals. But
149        // the size on the operand stack is reduced by sum_{args} arg.size().
150        let stack_reduction_size = args.fold(AbstractMemorySize::new(0), |acc, elem| {
151            acc + abstract_memory_size(self.0, elem)
152        });
153        self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
154    }
155
156    fn charge_call_generic(
157        &mut self,
158        _module_id: &ModuleId,
159        _func_name: &str,
160        _ty_args: impl ExactSizeIterator<Item = impl TypeView>,
161        args: impl ExactSizeIterator<Item = impl ValueView>,
162        _num_locals: NumArgs,
163    ) -> PartialVMResult<()> {
164        // We have to perform this many pops from the operand stack for this function call.
165        let pops = args.len() as u64;
166        // Calculate the size reduction on the operand stack.
167        let stack_reduction_size = args.fold(AbstractMemorySize::new(0), |acc, elem| {
168            acc + abstract_memory_size(self.0, elem)
169        });
170        // Charge for the pops, no pushes, and account for the stack size decrease. Also track the
171        // `CallGeneric` instruction we must have encountered for this.
172        self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
173    }
174
175    fn charge_ld_const(&mut self, size: NumBytes) -> PartialVMResult<()> {
176        // Charge for the load from the locals onto the stack.
177        self.0.charge(1, 1, 0, u64::from(size), 0)
178    }
179
180    fn charge_ld_const_after_deserialization(
181        &mut self,
182        _val: impl ValueView,
183    ) -> PartialVMResult<()> {
184        // We already charged for this based on the bytes that we're loading so don't charge again.
185        Ok(())
186    }
187
188    fn charge_copy_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
189        // Charge for the copy of the local onto the stack.
190        self.0
191            .charge(1, 1, 0, abstract_memory_size(self.0, val).into(), 0)
192    }
193
194    fn charge_move_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
195        if reweight_move_loc(self.0.gas_model_version) {
196            self.0.charge(1, 1, 0, REFERENCE_SIZE.into(), 0)
197        } else {
198            // Charge for the move of the local on to the stack. Note that we charge here since we
199            // aren't tracking the local size (at least not yet). If we were, this should be a net-zero
200            // operation in terms of memory usage.
201            self.0
202                .charge(1, 1, 0, abstract_memory_size(self.0, val).into(), 0)
203        }
204    }
205
206    fn charge_store_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
207        // Charge for the storing of the value on the stack into a local. Note here that if we were
208        // also accounting for the size of the locals that this would be a net-zero operation in
209        // terms of memory.
210        self.0
211            .charge(1, 0, 1, 0, abstract_memory_size(self.0, val).into())
212    }
213
214    fn charge_pack(
215        &mut self,
216        _is_generic: bool,
217        args: impl ExactSizeIterator<Item = impl ValueView>,
218    ) -> PartialVMResult<()> {
219        // We perform `num_fields` number of pops.
220        let num_fields = args.len() as u64;
221        // The actual amount of memory on the stack is staying the same with the addition of some
222        // extra size for the struct, so the size doesn't really change much.
223        self.0.charge(1, 1, num_fields, STRUCT_SIZE.into(), 0)
224    }
225
226    fn charge_unpack(
227        &mut self,
228        _is_generic: bool,
229        args: impl ExactSizeIterator<Item = impl ValueView>,
230    ) -> PartialVMResult<()> {
231        // We perform `num_fields` number of pushes.
232        let num_fields = args.len() as u64;
233        self.0.charge(1, num_fields, 1, 0, STRUCT_SIZE.into())
234    }
235
236    fn charge_variant_switch(&mut self, val: impl ValueView) -> PartialVMResult<()> {
237        // We perform a single pop of a value from the stack.
238        self.0
239            .charge(1, 0, 1, 0, abstract_memory_size(self.0, val).into())
240    }
241
242    fn charge_read_ref(&mut self, ref_val: impl ValueView) -> PartialVMResult<()> {
243        // We read the reference so we are decreasing the size of the stack by the size of the
244        // reference, and adding to it the size of the value that has been read from that
245        // reference.
246        let size = if reweight_read_ref(self.0.gas_model_version) {
247            abstract_memory_size_with_traversal(self.0, ref_val)
248        } else {
249            abstract_memory_size(self.0, ref_val)
250        };
251        self.0.charge(1, 1, 1, size.into(), REFERENCE_SIZE.into())
252    }
253
254    fn charge_write_ref(
255        &mut self,
256        new_val: impl ValueView,
257        old_val: impl ValueView,
258    ) -> PartialVMResult<()> {
259        // TODO(tzakian): We should account for this elsewhere as the owner of data the
260        // reference points to won't be on the stack. For now though, we treat it as adding to the
261        // stack size.
262        let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
263            (0, 2)
264        } else {
265            (1, 2)
266        };
267        self.0.charge(
268            1,
269            pushes,
270            pops,
271            abstract_memory_size(self.0, new_val).into(),
272            abstract_memory_size(self.0, old_val).into(),
273        )
274    }
275
276    fn charge_eq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
277        let size_reduction = abstract_memory_size_with_traversal(self.0, lhs)
278            + abstract_memory_size_with_traversal(self.0, rhs);
279        self.0.charge(
280            1,
281            1,
282            2,
283            (Type::Bool.size() + size_reduction).into(),
284            size_reduction.into(),
285        )
286    }
287
288    fn charge_neq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
289        let size_reduction = abstract_memory_size_with_traversal(self.0, lhs)
290            + abstract_memory_size_with_traversal(self.0, rhs);
291        let size_increase = if enable_traverse_refs(self.0.gas_model_version) {
292            Type::Bool.size() + size_reduction
293        } else {
294            Type::Bool.size()
295        };
296        self.0
297            .charge(1, 1, 2, size_increase.into(), size_reduction.into())
298    }
299
300    fn charge_vec_pack<'a>(
301        &mut self,
302        _ty: impl TypeView + 'a,
303        args: impl ExactSizeIterator<Item = impl ValueView>,
304    ) -> PartialVMResult<()> {
305        // We will perform `num_args` number of pops.
306        let num_args = args.len() as u64;
307        // The amount of data on the stack stays constant except we have some extra metadata for
308        // the vector to hold the length of the vector.
309        self.0.charge(1, 1, num_args, VEC_SIZE.into(), 0)
310    }
311
312    fn charge_vec_len(&mut self, _ty: impl TypeView) -> PartialVMResult<()> {
313        self.0
314            .charge(1, 1, 1, Type::U64.size().into(), REFERENCE_SIZE.into())
315    }
316
317    fn charge_vec_borrow(
318        &mut self,
319        _is_mut: bool,
320        _ty: impl TypeView,
321        _is_success: bool,
322    ) -> PartialVMResult<()> {
323        self.0.charge(
324            1,
325            1,
326            2,
327            REFERENCE_SIZE.into(),
328            (REFERENCE_SIZE + Type::U64.size()).into(),
329        )
330    }
331
332    fn charge_vec_push_back(
333        &mut self,
334        _ty: impl TypeView,
335        _val: impl ValueView,
336    ) -> PartialVMResult<()> {
337        // The value was already on the stack, so we aren't increasing the number of bytes on the stack.
338        self.0.charge(1, 0, 2, 0, REFERENCE_SIZE.into())
339    }
340
341    fn charge_vec_pop_back(
342        &mut self,
343        _ty: impl TypeView,
344        _val: Option<impl ValueView>,
345    ) -> PartialVMResult<()> {
346        self.0.charge(1, 1, 1, 0, REFERENCE_SIZE.into())
347    }
348
349    fn charge_vec_unpack(
350        &mut self,
351        _ty: impl TypeView,
352        expect_num_elements: NumArgs,
353        _elems: impl ExactSizeIterator<Item = impl ValueView>,
354    ) -> PartialVMResult<()> {
355        // Charge for the pushes
356        let pushes = u64::from(expect_num_elements);
357        // The stack size stays pretty much the same modulo the additional vector size
358        self.0.charge(1, pushes, 1, 0, VEC_SIZE.into())
359    }
360
361    fn charge_vec_swap(&mut self, _ty: impl TypeView) -> PartialVMResult<()> {
362        let size_decrease = REFERENCE_SIZE + Type::U64.size() + Type::U64.size();
363        let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
364            (0, 3)
365        } else {
366            (1, 1)
367        };
368        self.0.charge(1, pushes, pops, 0, size_decrease.into())
369    }
370
371    fn charge_drop_frame(
372        &mut self,
373        _locals: impl Iterator<Item = impl ValueView>,
374    ) -> PartialVMResult<()> {
375        Ok(())
376    }
377
378    fn remaining_gas(&self) -> InternalGas {
379        if !self.0.charge {
380            return InternalGas::new(u64::MAX);
381        }
382        self.0.gas_left
383    }
384}
385
386fn abstract_memory_size(status: &GasStatus, val: impl ValueView) -> AbstractMemorySize {
387    let config = size_config_for_gas_model_version(status.gas_model_version, false);
388    val.abstract_memory_size(&config)
389}
390
391fn abstract_memory_size_with_traversal(
392    status: &GasStatus,
393    val: impl ValueView,
394) -> AbstractMemorySize {
395    let config = size_config_for_gas_model_version(status.gas_model_version, true);
396    val.abstract_memory_size(&config)
397}
398
399fn enable_traverse_refs(gas_model_version: u64) -> bool {
400    gas_model_version > 9
401}
402
403fn reweight_read_ref(gas_model_version: u64) -> bool {
404    // Reweighting `ReadRef` is only done in gas model versions 10 and above.
405    gas_model_version > 10
406}
407
408fn reweight_move_loc(gas_model_version: u64) -> bool {
409    // Reweighting `MoveLoc` is only done in gas model versions 10 and above.
410    gas_model_version > 10
411}
412
413fn reduce_stack_size(gas_model_version: u64) -> bool {
414    // Reducing stack size is only done in gas model versions 10 and above.
415    gas_model_version > 10
416}
417
418fn enable_fine_grained_value_size(gas_model_version: u64) -> bool {
419    gas_model_version > 10
420}
421
422fn size_config_for_gas_model_version(
423    gas_model_version: u64,
424    should_traverse_refs: bool,
425) -> SizeConfig {
426    if use_legacy_abstract_size(gas_model_version) {
427        SizeConfig {
428            traverse_references: false,
429            include_vector_size: false,
430            fine_grained_value_size: false,
431        }
432    } else if should_traverse_refs {
433        SizeConfig {
434            traverse_references: enable_traverse_refs(gas_model_version),
435            include_vector_size: true,
436            fine_grained_value_size: enable_fine_grained_value_size(gas_model_version),
437        }
438    } else {
439        SizeConfig {
440            traverse_references: false,
441            include_vector_size: true,
442            fine_grained_value_size: enable_fine_grained_value_size(gas_model_version),
443        }
444    }
445}