use std::{
collections::BTreeMap,
fs,
path::{Path, PathBuf},
};
use anyhow::{bail, ensure, Context, Result};
use client::Client;
use fastcrypto::encoding::{Base64, Encoding};
use query::{limits, packages, SuiAddress, UInt53};
use sui_types::object::Object;
use tracing::info;
mod client;
mod query;
pub async fn dump(
rpc_url: String,
output_dir: PathBuf,
before_checkpoint: Option<u64>,
) -> Result<()> {
ensure_output_directory(&output_dir)?;
let client = Client::new(rpc_url)?;
let after_checkpoint = read_last_checkpoint(&output_dir)?;
let limit = max_page_size(&client).await?;
let (last_checkpoint, packages) =
fetch_packages(&client, limit, after_checkpoint, before_checkpoint).await?;
for package in &packages {
let SuiAddress(address) = &package.address;
dump_package(&output_dir, package)
.with_context(|| format!("Failed to dump package {address}"))?;
}
if let Some(last_checkpoint) = last_checkpoint {
write_last_checkpoint(&output_dir, last_checkpoint)?;
}
Ok(())
}
fn ensure_output_directory(path: impl Into<PathBuf>) -> Result<()> {
let path: PathBuf = path.into();
if !path.exists() {
fs::create_dir_all(&path).context("Making output directory")?;
return Ok(());
}
ensure!(
path.is_dir(),
"Output path is not a directory: {}",
path.display()
);
let metadata = fs::metadata(&path).context("Getting metadata for output path")?;
ensure!(
!metadata.permissions().readonly(),
"Output directory is not writable: {}",
path.display()
);
Ok(())
}
fn read_last_checkpoint(output: &Path) -> Result<Option<u64>> {
let path = output.join("last-checkpoint");
if !path.exists() {
return Ok(None);
}
let content = fs::read_to_string(&path).context("Failed to read last checkpoint")?;
let checkpoint: u64 =
serde_json::from_str(&content).context("Failed to parse last checkpoint")?;
info!("Resuming download after checkpoint {checkpoint}");
Ok(Some(checkpoint))
}
fn write_last_checkpoint(output: &Path, checkpoint: u64) -> Result<()> {
let path = output.join("last-checkpoint");
let content =
serde_json::to_string(&checkpoint).context("Failed to serialize last checkpoint")?;
fs::write(path, content).context("Failed to write last checkpoint")?;
Ok(())
}
async fn max_page_size(client: &Client) -> Result<i32> {
Ok(client
.query(limits::build())
.await
.context("Failed to fetch max page size")?
.service_config
.max_page_size)
}
async fn fetch_packages(
client: &Client,
page_size: i32,
after_checkpoint: Option<u64>,
before_checkpoint: Option<u64>,
) -> Result<(Option<u64>, Vec<packages::MovePackage>)> {
let packages::Query {
checkpoint: checkpoint_viewed_at,
packages:
packages::MovePackageConnection {
mut page_info,
mut nodes,
},
} = client
.query(packages::build(
page_size,
None,
after_checkpoint.map(UInt53),
before_checkpoint.map(UInt53),
))
.await
.with_context(|| "Failed to fetch page 1 of packages.")?;
for i in 2.. {
if !page_info.has_next_page {
break;
}
let packages = client
.query(packages::build(
page_size,
page_info.end_cursor,
after_checkpoint.map(UInt53),
before_checkpoint.map(UInt53),
))
.await
.with_context(|| format!("Failed to fetch page {i} of packages."))?
.packages;
nodes.extend(packages.nodes);
page_info = packages.page_info;
info!(
"Fetched page {i} ({} package{} so far).",
nodes.len(),
if nodes.len() == 1 { "" } else { "s" },
);
}
use packages::Checkpoint as C;
let last_checkpoint = match (checkpoint_viewed_at, before_checkpoint) {
(
Some(C {
sequence_number: UInt53(v),
}),
Some(b),
) if b > 0 => Some(v.min(b - 1)),
(
Some(C {
sequence_number: UInt53(c),
}),
_,
)
| (_, Some(c)) => Some(c),
_ => None,
};
Ok((last_checkpoint, nodes))
}
fn dump_package(output_dir: &Path, pkg: &packages::MovePackage) -> Result<()> {
let Some(query::Base64(bcs)) = &pkg.bcs else {
bail!("Missing BCS");
};
let bytes = Base64::decode(bcs).context("Failed to decode BCS")?;
let object = bcs::from_bytes::<Object>(&bytes).context("Failed to deserialize")?;
let id = object.id();
let Some(package) = object.data.try_as_package() else {
bail!("Not a package");
};
let origins: BTreeMap<_, _> = package
.type_origin_table()
.iter()
.map(|o| {
(
format!("{}::{}", o.module_name, o.datatype_name),
o.package.to_string(),
)
})
.collect();
let package_dir = output_dir.join(format!("{}.{}", id, package.version().value()));
fs::create_dir(&package_dir).context("Failed to make output directory")?;
let linkage_json = serde_json::to_string_pretty(package.linkage_table())
.context("Failed to serialize linkage")?;
let origins_json =
serde_json::to_string_pretty(&origins).context("Failed to serialize type origins")?;
fs::write(package_dir.join("object.bcs"), bytes).context("Failed to write object BCS")?;
fs::write(package_dir.join("linkage.json"), linkage_json).context("Failed to write linkage")?;
fs::write(package_dir.join("origins.json"), origins_json)
.context("Failed to write type origins")?;
for (module_name, module_bytes) in package.serialized_module_map() {
let module_path = package_dir.join(format!("{module_name}.mv"));
fs::write(module_path, module_bytes)
.with_context(|| format!("Failed to write module: {module_name}"))?
}
Ok(())
}