sui_adapter_latest/
gas_meter.rs

1// Copyright (c) Mysten Labs, Inc.
2// SPDX-License-Identifier: Apache-2.0
3
4use std::ops::DerefMut;
5
6use move_binary_format::errors::PartialVMResult;
7use move_core_types::{
8    gas_algebra::{AbstractMemorySize, InternalGas, NumArgs, NumBytes},
9    language_storage::ModuleId,
10};
11use move_vm_runtime::{
12    execution::Type,
13    shared::{
14        gas::{GasMeter, SimpleInstruction},
15        views::{SizeConfig, ValueView},
16    },
17};
18use sui_types::gas_model::{
19    gas_predicates::{native_function_threshold_exceeded, use_legacy_abstract_size},
20    tables::{GasStatus, REFERENCE_SIZE, STRUCT_SIZE, VEC_SIZE},
21};
22
23pub struct SuiGasMeter<G: DerefMut<Target = GasStatus>>(pub G);
24
25/// Returns a tuple of (<pops>, <pushes>, <stack_size_decrease>, <stack_size_increase>)
26fn get_simple_instruction_stack_change(
27    instr: SimpleInstruction,
28) -> PartialVMResult<(u64, u64, AbstractMemorySize, AbstractMemorySize)> {
29    use SimpleInstruction::*;
30
31    Ok(match instr {
32        // NB: The `Ret` pops are accounted for in `Call` instructions, so we say `Ret` has no pops.
33        Nop | Ret => (0, 0, 0.into(), 0.into()),
34        BrTrue | BrFalse => (1, 0, Type::Bool.size()?, 0.into()),
35        Branch => (0, 0, 0.into(), 0.into()),
36        LdU8 => (0, 1, 0.into(), Type::U8.size()?),
37        LdU16 => (0, 1, 0.into(), Type::U16.size()?),
38        LdU32 => (0, 1, 0.into(), Type::U32.size()?),
39        LdU64 => (0, 1, 0.into(), Type::U64.size()?),
40        LdU128 => (0, 1, 0.into(), Type::U128.size()?),
41        LdU256 => (0, 1, 0.into(), Type::U256.size()?),
42        LdTrue | LdFalse => (0, 1, 0.into(), Type::Bool.size()?),
43        FreezeRef => (1, 1, REFERENCE_SIZE, REFERENCE_SIZE),
44        ImmBorrowLoc | MutBorrowLoc => (0, 1, 0.into(), REFERENCE_SIZE),
45        ImmBorrowField | MutBorrowField | ImmBorrowFieldGeneric | MutBorrowFieldGeneric => {
46            (1, 1, REFERENCE_SIZE, REFERENCE_SIZE)
47        }
48        // Since we don't have the size of the value being cast here we take a conservative
49        // over-approximation: it is _always_ getting cast from the smallest integer type.
50        CastU8 => (1, 1, Type::U8.size()?, Type::U8.size()?),
51        CastU16 => (1, 1, Type::U8.size()?, Type::U16.size()?),
52        CastU32 => (1, 1, Type::U8.size()?, Type::U32.size()?),
53        CastU64 => (1, 1, Type::U8.size()?, Type::U64.size()?),
54        CastU128 => (1, 1, Type::U8.size()?, Type::U128.size()?),
55        CastU256 => (1, 1, Type::U8.size()?, Type::U256.size()?),
56        // NB: We don't know the size of what integers we're dealing with, so we conservatively
57        // over-approximate by popping the smallest integers, and push the largest.
58        Add | Sub | Mul | Mod | Div => (
59            2,
60            1,
61            Type::U8.size()? + Type::U8.size()?,
62            Type::U256.size()?,
63        ),
64        BitOr | BitAnd | Xor => (
65            2,
66            1,
67            Type::U8.size()? + Type::U8.size()?,
68            Type::U256.size()?,
69        ),
70        Shl | Shr => (
71            2,
72            1,
73            Type::U8.size()? + Type::U8.size()?,
74            Type::U256.size()?,
75        ),
76        Or | And => (
77            2,
78            1,
79            Type::Bool.size()? + Type::Bool.size()?,
80            Type::Bool.size()?,
81        ),
82        Lt | Gt | Le | Ge => (
83            2,
84            1,
85            Type::U8.size()? + Type::U8.size()?,
86            Type::Bool.size()?,
87        ),
88        Not => (1, 1, Type::Bool.size()?, Type::Bool.size()?),
89        Abort => (1, 0, Type::U64.size()?, 0.into()),
90    })
91}
92
93impl<G: DerefMut<Target = GasStatus>> GasMeter for SuiGasMeter<G> {
94    /// Charge an instruction and fail if not enough gas units are left.
95    fn charge_simple_instr(&mut self, instr: SimpleInstruction) -> PartialVMResult<()> {
96        let (pops, pushes, pop_size, push_size) = get_simple_instruction_stack_change(instr)?;
97        self.0
98            .charge(1, pushes, pops, push_size.into(), pop_size.into())
99    }
100
101    fn charge_pop(&mut self, popped_val: impl ValueView) -> PartialVMResult<()> {
102        let decr_size = abstract_memory_size(&self.0, popped_val)?;
103        self.0.charge(1, 0, 1, 0, decr_size.into())
104    }
105
106    fn charge_native_function(
107        &mut self,
108        amount: InternalGas,
109        ret_vals: Option<impl ExactSizeIterator<Item = impl ValueView>>,
110    ) -> PartialVMResult<()> {
111        // Charge for the number of pushes on to the stack that the return of this function is
112        // going to cause.
113        let pushes = ret_vals
114            .as_ref()
115            .map(|ret_vals| ret_vals.len())
116            .unwrap_or(0) as u64;
117        // Calculate the number of bytes that are getting pushed onto the stack.
118        let size_increase = match ret_vals {
119            Some(mut ret_vals) => ret_vals.try_fold(
120                AbstractMemorySize::zero(),
121                |acc, elem| -> PartialVMResult<_> {
122                    Ok(acc + abstract_memory_size(&self.0, elem)?)
123                },
124            )?,
125            None => AbstractMemorySize::zero(),
126        };
127        self.0.record_native_call();
128        if native_function_threshold_exceeded(self.0.gas_model_version, self.0.num_native_calls) {
129            // Charge for the stack operations. We don't count this as an "instruction" since we
130            // already accounted for the `Call` instruction in the
131            // `charge_native_function_before_execution` call.
132            // The amount returned by the native function is viewed as the "virtual" instruction cost
133            // for the native function, and will be charged and contribute to the overall cost tier of
134            // the transaction accordingly.
135            self.0
136                .charge(amount.into(), pushes, 0, size_increase.into(), 0)
137        } else {
138            // Charge for the stack operations. We don't count this as an "instruction" since we
139            // already accounted for the `Call` instruction in the
140            // `charge_native_function_before_execution` call.
141            self.0.charge(0, pushes, 0, size_increase.into(), 0)?;
142            // Now charge the gas that the native function told us to charge.
143            self.0.deduct_gas(amount)
144        }
145    }
146
147    fn charge_native_function_before_execution(
148        &mut self,
149        mut args: impl ExactSizeIterator<Item = impl ValueView>,
150    ) -> PartialVMResult<()> {
151        // Determine the number of pops that are going to be needed for this function call, and
152        // charge for them.
153        let pops = args.len() as u64;
154        // Calculate the size decrease of the stack from the above pops.
155        let stack_reduction_size = args.try_fold(
156            AbstractMemorySize::new(pops),
157            |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
158        )?;
159        // Track that this is going to be popping from the operand stack. We also increment the
160        // instruction count as we need to account for the `Call` bytecode that initiated this
161        // native call.
162        self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
163    }
164
165    fn charge_call(
166        &mut self,
167        _module_id: &ModuleId,
168        _func_name: &str,
169        mut args: impl ExactSizeIterator<Item = impl ValueView>,
170        _num_locals: NumArgs,
171    ) -> PartialVMResult<()> {
172        // We will have to perform this many pops for the call.
173        let pops = args.len() as u64;
174        // Size stays the same -- we're just moving it from the operand stack to the locals. But
175        // the size on the operand stack is reduced by sum_{args} arg.size()?.
176        let stack_reduction_size = args.try_fold(
177            AbstractMemorySize::new(0),
178            |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
179        )?;
180        self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
181    }
182
183    fn charge_call_generic(
184        &mut self,
185        _module_id: &ModuleId,
186        _func_name: &str,
187        mut args: impl ExactSizeIterator<Item = impl ValueView>,
188        _num_locals: NumArgs,
189    ) -> PartialVMResult<()> {
190        // We have to perform this many pops from the operand stack for this function call.
191        let pops = args.len() as u64;
192        // Calculate the size reduction on the operand stack.
193        let stack_reduction_size = args.try_fold(
194            AbstractMemorySize::new(0),
195            |acc, elem| -> PartialVMResult<_> { Ok(acc + abstract_memory_size(&self.0, elem)?) },
196        )?;
197        // Charge for the pops, no pushes, and account for the stack size decrease. Also track the
198        // `CallGeneric` instruction we must have encountered for this.
199        self.0.charge(1, 0, pops, 0, stack_reduction_size.into())
200    }
201
202    fn charge_ld_const(&mut self, size: NumBytes) -> PartialVMResult<()> {
203        // Charge for the load from the locals onto the stack.
204        self.0.charge(1, 1, 0, u64::from(size), 0)
205    }
206
207    fn charge_ld_const_after_deserialization(
208        &mut self,
209        _val: impl ValueView,
210    ) -> PartialVMResult<()> {
211        // We already charged for this based on the bytes that we're loading so don't charge again.
212        Ok(())
213    }
214
215    fn charge_copy_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
216        // Charge for the copy of the local onto the stack.
217        let incr_size = abstract_memory_size(&self.0, val)?;
218        self.0.charge(1, 1, 0, incr_size.into(), 0)
219    }
220
221    fn charge_move_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
222        if reweight_move_loc(self.0.gas_model_version) {
223            self.0.charge(1, 1, 0, REFERENCE_SIZE.into(), 0)
224        } else {
225            // Charge for the move of the local on to the stack. Note that we charge here since we
226            // aren't tracking the local size (at least not yet). If we were, this should be a net-zero
227            // operation in terms of memory usage.
228            let incr_size = abstract_memory_size(&self.0, val)?;
229            self.0.charge(1, 1, 0, incr_size.into(), 0)
230        }
231    }
232
233    fn charge_store_loc(&mut self, val: impl ValueView) -> PartialVMResult<()> {
234        // Charge for the storing of the value on the stack into a local. Note here that if we were
235        // also accounting for the size of the locals that this would be a net-zero operation in
236        // terms of memory.
237        let decr_size = abstract_memory_size(&self.0, val)?;
238        self.0.charge(1, 0, 1, 0, decr_size.into())
239    }
240
241    fn charge_pack(
242        &mut self,
243        _is_generic: bool,
244        args: impl ExactSizeIterator<Item = impl ValueView>,
245    ) -> PartialVMResult<()> {
246        // We perform `num_fields` number of pops.
247        let num_fields = args.len() as u64;
248        // The actual amount of memory on the stack is staying the same with the addition of some
249        // extra size for the struct, so the size doesn't really change much.
250        self.0.charge(1, 1, num_fields, STRUCT_SIZE.into(), 0)
251    }
252
253    fn charge_unpack(
254        &mut self,
255        _is_generic: bool,
256        args: impl ExactSizeIterator<Item = impl ValueView>,
257    ) -> PartialVMResult<()> {
258        // We perform `num_fields` number of pushes.
259        let num_fields = args.len() as u64;
260        self.0.charge(1, num_fields, 1, 0, STRUCT_SIZE.into())
261    }
262
263    fn charge_variant_switch(&mut self, val: impl ValueView) -> PartialVMResult<()> {
264        // We perform a single pop of a value from the stack.
265        let decr_size = abstract_memory_size(&self.0, val)?;
266        self.0.charge(1, 0, 1, 0, decr_size.into())
267    }
268
269    fn charge_read_ref(&mut self, ref_val: impl ValueView) -> PartialVMResult<()> {
270        // We read the reference so we are decreasing the size of the stack by the size of the
271        // reference, and adding to it the size of the value that has been read from that
272        // reference.
273        let size = if reweight_read_ref(self.0.gas_model_version) {
274            abstract_memory_size_with_traversal(&self.0, ref_val)?
275        } else {
276            abstract_memory_size(&self.0, ref_val)?
277        };
278        self.0.charge(1, 1, 1, size.into(), REFERENCE_SIZE.into())
279    }
280
281    fn charge_write_ref(
282        &mut self,
283        new_val: impl ValueView,
284        old_val: impl ValueView,
285    ) -> PartialVMResult<()> {
286        // TODO(tzakian): We should account for this elsewhere as the owner of data the
287        // reference points to won't be on the stack. For now though, we treat it as adding to the
288        // stack size.
289        let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
290            (0, 2)
291        } else {
292            (1, 2)
293        };
294        let incr_size = abstract_memory_size(&self.0, new_val)?;
295        let decr_size = abstract_memory_size(&self.0, old_val)?;
296        self.0
297            .charge(1, pushes, pops, incr_size.into(), decr_size.into())
298    }
299
300    fn charge_eq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
301        let size_reduction = abstract_memory_size_with_traversal(&self.0, lhs)?
302            + abstract_memory_size_with_traversal(&self.0, rhs)?;
303        self.0.charge(
304            1,
305            1,
306            2,
307            (Type::Bool.size()? + size_reduction).into(),
308            size_reduction.into(),
309        )
310    }
311
312    fn charge_neq(&mut self, lhs: impl ValueView, rhs: impl ValueView) -> PartialVMResult<()> {
313        let size_reduction = abstract_memory_size_with_traversal(&self.0, lhs)?
314            + abstract_memory_size_with_traversal(&self.0, rhs)?;
315        let size_increase = if enable_traverse_refs(self.0.gas_model_version) {
316            Type::Bool.size()? + size_reduction
317        } else {
318            Type::Bool.size()?
319        };
320        self.0
321            .charge(1, 1, 2, size_increase.into(), size_reduction.into())
322    }
323
324    fn charge_vec_pack<'a>(
325        &mut self,
326        args: impl ExactSizeIterator<Item = impl ValueView>,
327    ) -> PartialVMResult<()> {
328        // We will perform `num_args` number of pops.
329        let num_args = args.len() as u64;
330        // The amount of data on the stack stays constant except we have some extra metadata for
331        // the vector to hold the length of the vector.
332        self.0.charge(1, 1, num_args, VEC_SIZE.into(), 0)
333    }
334
335    fn charge_vec_len(&mut self) -> PartialVMResult<()> {
336        self.0
337            .charge(1, 1, 1, Type::U64.size()?.into(), REFERENCE_SIZE.into())
338    }
339
340    fn charge_vec_borrow(&mut self, _is_mut: bool, _is_success: bool) -> PartialVMResult<()> {
341        self.0.charge(
342            1,
343            1,
344            2,
345            REFERENCE_SIZE.into(),
346            (REFERENCE_SIZE + Type::U64.size()?).into(),
347        )
348    }
349
350    fn charge_vec_push_back(&mut self, _val: impl ValueView) -> PartialVMResult<()> {
351        // The value was already on the stack, so we aren't increasing the number of bytes on the stack.
352        self.0.charge(1, 0, 2, 0, REFERENCE_SIZE.into())
353    }
354
355    fn charge_vec_pop_back(&mut self, _val: Option<impl ValueView>) -> PartialVMResult<()> {
356        self.0.charge(1, 1, 1, 0, REFERENCE_SIZE.into())
357    }
358
359    fn charge_vec_unpack(
360        &mut self,
361        expect_num_elements: NumArgs,
362        _elems: impl ExactSizeIterator<Item = impl ValueView>,
363    ) -> PartialVMResult<()> {
364        // Charge for the pushes
365        let pushes = u64::from(expect_num_elements);
366        // The stack size stays pretty much the same modulo the additional vector size
367        self.0.charge(1, pushes, 1, 0, VEC_SIZE.into())
368    }
369
370    fn charge_vec_swap(&mut self) -> PartialVMResult<()> {
371        let size_decrease = REFERENCE_SIZE + Type::U64.size()? + Type::U64.size()?;
372        let (pushes, pops) = if reduce_stack_size(self.0.gas_model_version) {
373            (0, 3)
374        } else {
375            (1, 1)
376        };
377        self.0.charge(1, pushes, pops, 0, size_decrease.into())
378    }
379
380    fn charge_drop_frame(
381        &mut self,
382        _locals: impl Iterator<Item = impl ValueView>,
383    ) -> PartialVMResult<()> {
384        Ok(())
385    }
386
387    fn remaining_gas(&self) -> InternalGas {
388        if !self.0.charge {
389            return InternalGas::new(u64::MAX);
390        }
391        self.0.gas_left
392    }
393}
394
395fn abstract_memory_size(
396    status: &GasStatus,
397    val: impl ValueView,
398) -> PartialVMResult<AbstractMemorySize> {
399    let config = size_config_for_gas_model_version(status.gas_model_version, false);
400    val.abstract_memory_size(&config)
401}
402
403fn abstract_memory_size_with_traversal(
404    status: &GasStatus,
405    val: impl ValueView,
406) -> PartialVMResult<AbstractMemorySize> {
407    let config = size_config_for_gas_model_version(status.gas_model_version, true);
408    val.abstract_memory_size(&config)
409}
410
411fn enable_traverse_refs(gas_model_version: u64) -> bool {
412    gas_model_version > 9
413}
414
415fn reweight_read_ref(gas_model_version: u64) -> bool {
416    // Reweighting `ReadRef` is only done in gas model versions 10 and above.
417    gas_model_version > 10
418}
419
420fn reweight_move_loc(gas_model_version: u64) -> bool {
421    // Reweighting `MoveLoc` is only done in gas model versions 10 and above.
422    gas_model_version > 10
423}
424
425fn reduce_stack_size(gas_model_version: u64) -> bool {
426    // Reducing stack size is only done in gas model versions 10 and above.
427    gas_model_version > 10
428}
429
430fn size_config_for_gas_model_version(
431    gas_model_version: u64,
432    should_traverse_refs: bool,
433) -> SizeConfig {
434    if use_legacy_abstract_size(gas_model_version) {
435        SizeConfig {
436            traverse_references: false,
437            include_vector_size: false,
438        }
439    } else if should_traverse_refs {
440        SizeConfig {
441            traverse_references: enable_traverse_refs(gas_model_version),
442            include_vector_size: true,
443        }
444    } else {
445        SizeConfig {
446            traverse_references: false,
447            include_vector_size: true,
448        }
449    }
450}