Skip to content

Commit

Permalink
[temporary-storage] Add API to access last known ObjectRef from paren…
Browse files Browse the repository at this point in the history
…t sync (MystenLabs#3618)

* [temporary-storage] Add API to access last known ObjectRef from parent sync

- In an effort to move the version info out of Move, we will need to be able to query the last known version when unwrapping objects
  • Loading branch information
tnowacki authored Jul 29, 2022
1 parent dafbaa2 commit 3566707
Show file tree
Hide file tree
Showing 9 changed files with 225 additions and 152 deletions.
26 changes: 17 additions & 9 deletions crates/sui-adapter/src/in_memory_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,15 @@ use sui_types::{
base_types::{ObjectID, ObjectRef, SequenceNumber},
error::{SuiError, SuiResult},
object::Object,
storage::{BackingPackageStore, DeleteKind},
storage::{BackingPackageStore, DeleteKind, ParentSync},
};

// TODO: We should use AuthorityTemporaryStore instead.
// Keeping this functionally identical to AuthorityTemporaryStore is a pain.
#[derive(Default, Debug)]
#[derive(Debug, Default)]
pub struct InMemoryStorage {
persistent: BTreeMap<ObjectID, Object>,
last_entry_for_deleted: BTreeMap<ObjectID, ObjectRef>,
}

impl BackingPackageStore for InMemoryStorage {
Expand All @@ -23,9 +24,10 @@ impl BackingPackageStore for InMemoryStorage {
}
}

impl BackingPackageStore for &mut InMemoryStorage {
fn get_package(&self, package_id: &ObjectID) -> SuiResult<Option<Object>> {
(**self).get_package(package_id)
impl ParentSync for InMemoryStorage {
fn get_latest_parent_entry_ref(&self, object_id: ObjectID) -> SuiResult<Option<ObjectRef>> {
debug_assert!(!self.persistent.contains_key(&object_id));
Ok(self.last_entry_for_deleted.get(&object_id).copied())
}
}

Expand Down Expand Up @@ -61,7 +63,10 @@ impl InMemoryStorage {
for o in objects {
persistent.insert(o.id(), o);
}
Self { persistent }
Self {
persistent,
last_entry_for_deleted: BTreeMap::new(),
}
}

pub fn get_object(&self, id: &ObjectID) -> Option<&Object> {
Expand All @@ -77,7 +82,9 @@ impl InMemoryStorage {
}

pub fn insert_object(&mut self, object: Object) {
self.persistent.insert(object.id(), object);
let id = object.id();
self.last_entry_for_deleted.remove(&id);
self.persistent.insert(id, object);
}

pub fn objects(&self) -> &BTreeMap<ObjectID, Object> {
Expand All @@ -99,8 +106,9 @@ impl InMemoryStorage {
self.insert_object(new_object);
}
for (id, _) in deleted {
let obj_opt = self.persistent.remove(&id);
assert!(obj_opt.is_some())
let obj = self.persistent.remove(&id).unwrap();
self.last_entry_for_deleted
.insert(id, obj.compute_object_reference());
}
}
}
183 changes: 97 additions & 86 deletions crates/sui-adapter/src/temporary_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,35 +8,37 @@ use std::collections::{BTreeMap, HashSet};
use sui_types::base_types::{
ObjectDigest, ObjectID, ObjectRef, SequenceNumber, SuiAddress, TransactionDigest,
};
use sui_types::error::{ExecutionError, SuiError};
use sui_types::error::{ExecutionError, SuiError, SuiResult};
use sui_types::fp_bail;
use sui_types::messages::{ExecutionStatus, InputObjects, TransactionEffects};
use sui_types::object::{Data, Object};
use sui_types::storage::{BackingPackageStore, DeleteKind, Storage};
use sui_types::storage::{BackingPackageStore, DeleteKind, ParentSync, Storage};
use sui_types::{
event::Event,
gas::{GasCostSummary, SuiGasStatus},
object::Owner,
};

pub type InnerTemporaryStore = (
BTreeMap<ObjectID, Object>,
Vec<ObjectRef>,
BTreeMap<ObjectID, (ObjectRef, Object)>,
BTreeMap<ObjectID, (SequenceNumber, DeleteKind)>,
Vec<Event>,
);
pub struct InnerTemporaryStore {
pub objects: BTreeMap<ObjectID, Object>,
pub mutable_inputs: Vec<ObjectRef>,
pub written: BTreeMap<ObjectID, (ObjectRef, Object)>,
pub deleted: BTreeMap<ObjectID, (SequenceNumber, DeleteKind)>,
}

pub struct TemporaryStore<S> {
// The backing store for retrieving Move packages onchain.
// When executing a Move call, the dependent packages are not going to be
// in the input objects. They will be feteched from the backing store.
package_store: S,
// in the input objects. They will be fetched from the backing store.
// Also used for fetching the backing parent_sync to get the last known version for wrapped
// objects
store: S,
tx_digest: TransactionDigest,
objects: BTreeMap<ObjectID, Object>,
mutable_inputs: Vec<ObjectRef>, // Inputs that are mutable
written: BTreeMap<ObjectID, (ObjectRef, Object)>, // Objects written
mutable_inputs: Vec<ObjectRef>, // Inputs that are mutable
written: BTreeMap<ObjectID, Object>, // Objects written
/// Objects actively deleted.
/// Child count is Some for Normal/UnwrapThenDelete events, and is None for wraps
deleted: BTreeMap<ObjectID, (SequenceNumber, DeleteKind)>,
/// Ordered sequence of events emitted by execution
events: Vec<Event>,
Expand All @@ -45,18 +47,30 @@ pub struct TemporaryStore<S> {
created_object_ids: HashSet<ObjectID>,
}

macro_rules! into_inner {
($store:ident) => {{
let written = $store
.written
.into_iter()
.map(|(id, obj)| (id, (obj.compute_object_reference(), obj)))
.collect();
InnerTemporaryStore {
objects: $store.objects,
mutable_inputs: $store.mutable_inputs,
written,
deleted: $store.deleted,
}
}};
}

impl<S> TemporaryStore<S> {
/// Creates a new store associated with an authority store, and populates it with
/// initial objects.
pub fn new(
package_store: S,
input_objects: InputObjects,
tx_digest: TransactionDigest,
) -> Self {
pub fn new(store: S, input_objects: InputObjects, tx_digest: TransactionDigest) -> Self {
let mutable_inputs = input_objects.mutable_inputs();
let objects = input_objects.into_object_map();
Self {
package_store,
store,
tx_digest,
objects,
mutable_inputs,
Expand All @@ -72,7 +86,7 @@ impl<S> TemporaryStore<S> {
&self.objects
}

pub fn written(&self) -> &BTreeMap<ObjectID, (ObjectRef, Object)> {
pub fn written(&self) -> &BTreeMap<ObjectID, Object> {
&self.written
}

Expand All @@ -86,13 +100,7 @@ impl<S> TemporaryStore<S> {
{
self.check_invariants();
}
(
self.objects,
self.mutable_inputs,
self.written,
self.deleted,
self.events,
)
into_inner!(self)
}

/// For every object from active_inputs (i.e. all mutable objects), if they are not
Expand All @@ -108,8 +116,7 @@ impl<S> TemporaryStore<S> {
let mut object = self.objects[id].clone();
// Active input object must be Move object.
object.data.try_as_move_mut().unwrap().increment_version();
self.written
.insert(*id, (object.compute_object_reference(), object));
self.written.insert(*id, object);
}
}
}
Expand All @@ -132,7 +139,7 @@ impl<S> TemporaryStore<S> {
)?;
objects_to_update.push(gas_object.clone());

for (object_id, (_object_ref, object)) in &mut self.written {
for (object_id, object) in &mut self.written {
let (old_object_size, storage_rebate) =
if let Some(old_object) = self.objects.get(object_id) {
(
Expand Down Expand Up @@ -179,73 +186,75 @@ impl<S> TemporaryStore<S> {
}

pub fn to_effects(
&self,
self,
shared_object_refs: Vec<ObjectRef>,
transaction_digest: &TransactionDigest,
transaction_dependencies: Vec<TransactionDigest>,
gas_cost_summary: GasCostSummary,
status: ExecutionStatus,
gas_object_ref: ObjectRef,
) -> TransactionEffects {
) -> (InnerTemporaryStore, TransactionEffects) {
let written = self
.written
.iter()
.map(|(id, obj)| (*id, (obj.compute_object_reference(), obj.owner)))
.collect::<BTreeMap<_, _>>();

// In the case of special transactions that don't require a gas object,
// we don't really care about the effects to gas, just use the input for it.
let updated_gas_object_info = if gas_object_ref.0 == ObjectID::ZERO {
(gas_object_ref, Owner::AddressOwner(SuiAddress::default()))
} else {
let (gas_reference, gas_object) = &self.written[&gas_object_ref.0];
(*gas_reference, gas_object.owner)
written[&gas_object_ref.0]
};
TransactionEffects {
let mut created = vec![];
let mut mutated = vec![];
let mut unwrapped = vec![];
for (id, object_ref_and_owner) in written {
match (
self.created_object_ids.contains(&id),
self.objects.contains_key(&id),
) {
(true, _) => created.push(object_ref_and_owner),
(false, true) => mutated.push(object_ref_and_owner),
(false, false) => {
// wrapped objects must have their version set to 1 + the last known version in
// the `parent_sync`
debug_assert!(object_ref_and_owner.0 .1.value() > 1);
unwrapped.push(object_ref_and_owner)
}
}
}

let mut deleted = vec![];
let mut wrapped = vec![];
for (id, (version, kind)) in &self.deleted {
match kind {
DeleteKind::Normal | DeleteKind::UnwrapThenDelete => {
deleted.push((*id, *version, ObjectDigest::OBJECT_DIGEST_DELETED))
}
DeleteKind::Wrap => {
wrapped.push((*id, *version, ObjectDigest::OBJECT_DIGEST_WRAPPED))
}
}
}
let inner = into_inner!(self);

let effects = TransactionEffects {
status,
gas_used: gas_cost_summary,
shared_objects: shared_object_refs,
transaction_digest: *transaction_digest,
created: self
.written
.iter()
.filter(|(id, _)| self.created_object_ids.contains(*id))
.map(|(_, (object_ref, object))| (*object_ref, object.owner))
.collect(),
mutated: self
.written
.iter()
.filter(|(id, _)| self.objects.contains_key(*id))
.map(|(_, (object_ref, object))| (*object_ref, object.owner))
.collect(),
unwrapped: self
.written
.iter()
.filter(|(id, _)| {
!self.objects.contains_key(*id) && !self.created_object_ids.contains(*id)
})
.map(|(_, (object_ref, object))| (*object_ref, object.owner))
.collect(),
deleted: self
.deleted
.iter()
.filter_map(|(id, (version, kind))| {
if kind != &DeleteKind::Wrap {
Some((*id, *version, ObjectDigest::OBJECT_DIGEST_DELETED))
} else {
None
}
})
.collect(),
wrapped: self
.deleted
.iter()
.filter_map(|(id, (version, kind))| {
if kind == &DeleteKind::Wrap {
Some((*id, *version, ObjectDigest::OBJECT_DIGEST_WRAPPED))
} else {
None
}
})
.collect(),
created,
mutated,
unwrapped,
deleted,
wrapped,
gas_object: updated_gas_object_info,
events: self.events.clone(),
events: self.events,
dependencies: transaction_dependencies,
}
};
(inner, effects)
}

/// An internal check of the invariants (will only fire in debug)
Expand Down Expand Up @@ -295,10 +304,7 @@ impl<S> Storage for TemporaryStore<S> {
fn read_object(&self, id: &ObjectID) -> Option<&Object> {
// there should be no read after delete
debug_assert!(self.deleted.get(id) == None);
match self.written.get(id) {
Some((_, obj)) => Some(obj),
None => self.objects.get(id),
}
self.written.get(id).or_else(|| self.objects.get(id))
}

fn set_create_object_ids(&mut self, ids: HashSet<ObjectID>) {
Expand All @@ -325,8 +331,7 @@ impl<S> Storage for TemporaryStore<S> {
// The adapter is not very disciplined at filling in the correct
// previous transaction digest, so we ensure it is correct here.
object.previous_transaction = self.tx_digest;
self.written
.insert(object.id(), (object.compute_object_reference(), object));
self.written.insert(object.id(), object);
}

fn delete_object(&mut self, id: &ObjectID, version: SequenceNumber, kind: DeleteKind) {
Expand Down Expand Up @@ -359,7 +364,7 @@ impl<S: BackingPackageStore> ModuleResolver for TemporaryStore<S> {
let package_obj;
let package = match self.read_object(package_id) {
Some(object) => object,
None => match self.package_store.get_package(package_id)? {
None => match self.store.get_package(package_id)? {
Some(object) => {
package_obj = object;
&package_obj
Expand Down Expand Up @@ -415,3 +420,9 @@ impl<S> ResourceResolver for TemporaryStore<S> {
}
}
}

impl<S: ParentSync> ParentSync for TemporaryStore<S> {
fn get_latest_parent_entry_ref(&self, object_id: ObjectID) -> SuiResult<Option<ObjectRef>> {
self.store.get_latest_parent_entry_ref(object_id)
}
}
Loading

0 comments on commit 3566707

Please sign in to comment.