Skip to content

Commit

Permalink
fix: avoid panic in some cases
Browse files Browse the repository at this point in the history
  • Loading branch information
vincent-herlemont committed Aug 24, 2024
1 parent 93771b0 commit db43ab2
Show file tree
Hide file tree
Showing 13 changed files with 134 additions and 80 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ fn main() -> Result<(), db_type::Error> {
let mut db = Builder::new().create_in_memory(&MODELS)?;

// Insert data (open a read-write transaction)
let rw = db.rw_transaction().unwrap();
let rw = db.rw_transaction()?;
rw.insert(Item { id: 1, name: "red".to_string() })?;
rw.insert(Item { id: 2, name: "green".to_string() })?;
rw.insert(Item { id: 3, name: "blue".to_string() })?;
Expand Down
9 changes: 8 additions & 1 deletion benches/overhead_data_size.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,14 @@ fn use_native_db_insert(db: &Database, data: Data) {

fn use_native_db_scan(db: &Database) -> Vec<Data> {
let r = db.r_transaction().unwrap();
let out = r.scan().primary().unwrap().all().unwrap().try_collect().unwrap();
let out = r
.scan()
.primary()
.unwrap()
.all()
.unwrap()
.try_collect()
.unwrap();
out
}

Expand Down
10 changes: 8 additions & 2 deletions native_db_macro/src/keys.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,15 @@ impl<O: ToTokenStream> KeyDefinition<O> {

pub(crate) fn ident(&self) -> Ident {
if self.is_field() {
self.field_name.as_ref().unwrap().clone()
self.field_name
.as_ref()
.expect("Trying to get an undefined field name")
.clone()
} else {
self.function_name.as_ref().unwrap().clone()
self.function_name
.as_ref()
.expect("Trying to get an undefined function name")
.clone()
}
}

Expand Down
33 changes: 24 additions & 9 deletions native_db_macro/src/model_attributes.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use crate::keys::{KeyDefinition, KeyOptions};
use crate::struct_name::StructName;
use proc_macro::Ident;
use std::collections::HashSet;
use syn::meta::ParseNestedMeta;
use syn::parse::Result;
Expand All @@ -21,12 +22,16 @@ impl ModelAttributes {
if meta.path.is_ident("primary_key") {
let mut key: KeyDefinition<()> = KeyDefinition::new_empty(self.struct_name.clone());
meta.parse_nested_meta(|meta| {
let ident = meta
.path
.get_ident()
.expect("Expected ident for primary_key");
if key.is_empty() {
key.set_function_name(meta.path.get_ident().unwrap().clone());
key.set_function_name(ident.clone());
} else {
panic!(
"Unknown attribute: {}",
meta.path.get_ident().unwrap().to_string()
"Unknown attribute \"{}\" for primary_key",
ident.to_string()
);
}
Ok(())
Expand All @@ -36,16 +41,20 @@ impl ModelAttributes {
let mut key: KeyDefinition<KeyOptions> =
KeyDefinition::new_empty(self.struct_name.clone());
meta.parse_nested_meta(|meta| {
let ident = meta
.path
.get_ident()
.expect("Expected ident for secondary_key");
if key.is_empty() {
key.set_function_name(meta.path.get_ident().unwrap().clone());
key.set_function_name(ident.clone());
} else if meta.path.is_ident("unique") {
key.options.unique = true;
} else if meta.path.is_ident("optional") {
key.options.optional = true;
} else {
panic!(
"Unknown attribute: {}",
meta.path.get_ident().unwrap().to_string()
"Unknown attribute \"{}\" for secondary_key",
ident.to_string()
);
}
Ok(())
Expand All @@ -54,7 +63,7 @@ impl ModelAttributes {
} else {
panic!(
"Unknown attribute: {}",
meta.path.get_ident().unwrap().to_string()
meta.path.get_ident().expect("Expected ident").to_string()
);
}
Ok(())
Expand All @@ -65,7 +74,10 @@ impl ModelAttributes {
if attr.path().is_ident("primary_key") {
self.primary_key = Some(KeyDefinition::new_field(
self.struct_name.clone(),
field.ident.clone().unwrap(),
field
.ident
.clone()
.expect("Parsed field expected to have an ident for primary_key"),
(),
));
} else if attr.path().is_ident("secondary_key") {
Expand All @@ -85,7 +97,10 @@ impl ModelAttributes {

self.secondary_keys.insert(KeyDefinition::new_field(
self.struct_name.clone(),
field.ident.clone().unwrap(),
field
.ident
.clone()
.expect("Parsed field expected to have an ident for secondary_key"),
secondary_options,
));
}
Expand Down
14 changes: 8 additions & 6 deletions src/database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,15 +177,17 @@ impl<'a> Database<'a> {
let comparator = VersionReq::parse(selector)
.expect(format!("Invalid version selector: {}", selector).as_str());

// If there is no previous version, the database is coming from <=0.7.1
if metadata.previous_version().is_none() {
return Ok(true);
}

let previous_version = Version::parse(metadata.previous_version().unwrap()).expect(
let previous_version = if let Some(previous_version) = metadata.previous_version() {
previous_version
} else {
return Ok(true);
};

let previous_version = Version::parse(previous_version).expect(
format!(
"Invalid previous version: {}",
metadata.previous_version().unwrap()
previous_version
)
.as_str(),
);
Expand Down
8 changes: 8 additions & 0 deletions src/db_type/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,17 @@ pub enum Error {
#[error("Redb storage error")]
RedbStorageError(#[from] redb::StorageError),

#[cfg(feature = "redb1")]
#[error("Redb redb1 storage error")]
Redb1StorageError(#[from] redb1::StorageError),

#[error("Redb table error")]
RedbTableError(#[from] redb::TableError),

#[cfg(feature = "redb1")]
#[error("Redb redb1 table error")]
Redb1TableError(#[from] redb1::TableError),

#[error("Redb commit error")]
RedbCommitError(#[from] redb::CommitError),

Expand Down
8 changes: 4 additions & 4 deletions src/transaction/internal/rw_transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ impl<'db> InternalRwTransaction<'db> {
let new_table_definition = self
.primary_table_definitions
.get(T::native_db_model().primary_key.unique_table_name.as_str())
.unwrap();
.expect("Fatal error: table definition not found during migration");
if new_table_definition
.native_model_options
.native_model_legacy
Expand Down Expand Up @@ -269,7 +269,7 @@ impl<'db> InternalRwTransaction<'db> {
"Impossible to migrate the table {} because multiple old tables with data exist: {}, {}",
T::native_db_model().primary_key.unique_table_name,
new_primary_table_definition.redb.name(),
old_table_definition.unwrap().redb.name()
old_table_definition.expect("Unreachable").redb.name()
);
} else if table.len()? > 0 {
old_table_definition = Some(new_primary_table_definition);
Expand All @@ -293,7 +293,7 @@ impl<'db> InternalRwTransaction<'db> {

// List all data from the old table
for old_data in self.concrete_primary_drain(old_table_definition.model.clone())? {
let (decoded_item, _) = native_model::decode::<T>(old_data.0).unwrap();
let (decoded_item, _) = native_model::decode::<T>(old_data.0)?;
let decoded_item = decoded_item.native_db_input()?;
self.concrete_insert(T::native_db_model(), decoded_item)?;
}
Expand All @@ -303,7 +303,7 @@ impl<'db> InternalRwTransaction<'db> {

pub fn refresh<T: ToInput + Debug>(&self) -> Result<()> {
for data in self.concrete_primary_drain(T::native_db_model())? {
let (decoded_item, _) = native_model::decode::<T>(data.0).unwrap();
let (decoded_item, _) = native_model::decode::<T>(data.0)?;
let decoded_item = decoded_item.native_db_input()?;
self.concrete_insert(T::native_db_model(), decoded_item)?;
}
Expand Down
6 changes: 2 additions & 4 deletions src/transaction/query/scan/primary_scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ where
/// let r = db.r_transaction()?;
///
/// // Get all values
/// let _values: Vec<Data> = r.scan().primary()?.all().unwrap().try_collect()?;
/// let _values: Vec<Data> = r.scan().primary()?.all()?.try_collect()?;
/// Ok(())
/// }
/// ```
Expand Down Expand Up @@ -137,9 +137,7 @@ where
start_with: impl ToKey + 'a,
) -> Result<PrimaryScanIteratorStartWith<'a, T>> {
let start_with = start_with.to_key();
let range = self
.primary_table
.range::<Key>(start_with.clone()..)?;
let range = self.primary_table.range::<Key>(start_with.clone()..)?;

Ok(PrimaryScanIteratorStartWith {
range,
Expand Down
33 changes: 8 additions & 25 deletions src/transaction/query/scan/secondary_scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ where
/// let r = db.r_transaction()?;
///
/// // Get only values that have the secondary key set (name is not None)
/// let _values: Vec<Data> = r.scan().secondary(DataKey::name)?.all().unwrap().try_collect()?;
/// let _values: Vec<Data> = r.scan().secondary(DataKey::name)?.all()?.try_collect()?;
/// Ok(())
/// }
/// ```
Expand Down Expand Up @@ -112,7 +112,7 @@ where
/// let r = db.r_transaction()?;
///
/// // Get only values that have the secondary key name from C to the end
/// let _values: Vec<Data> = r.scan().secondary(DataKey::name)?.range("C"..).unwrap().try_collect()?;
/// let _values: Vec<Data> = r.scan().secondary(DataKey::name)?.range("C"..)?.try_collect()?;
/// Ok(())
/// }
/// ```
Expand All @@ -126,9 +126,9 @@ where
.secondary_table
.range::<Key>(database_inner_key_value_range)?
{
let (_, l_primary_keys) = keys.unwrap();
let (_, l_primary_keys) = keys?;
for primary_key in l_primary_keys {
let primary_key = primary_key.unwrap();
let primary_key = primary_key?;
primary_keys.push(primary_key);
}
}
Expand Down Expand Up @@ -170,7 +170,7 @@ where
/// let r = db.r_transaction()?;
///
/// // Get only values that have the secondary key name starting with "hello"
/// let _values: Vec<Data> = r.scan().secondary(DataKey::name)?.start_with("hello").unwrap().try_collect()?;
/// let _values: Vec<Data> = r.scan().secondary(DataKey::name)?.start_with("hello")?.try_collect()?;
/// Ok(())
/// }
/// ```
Expand All @@ -180,11 +180,8 @@ where
) -> Result<SecondaryScanIterator<'a, PrimaryTable, T>> {
let start_with = start_with.to_key();
let mut primary_keys = vec![];
for keys in self
.secondary_table
.range::<Key>(start_with.clone()..)?
{
let (l_secondary_key, l_primary_keys) = keys.unwrap();
for keys in self.secondary_table.range::<Key>(start_with.clone()..)? {
let (l_secondary_key, l_primary_keys) = keys?;
if !l_secondary_key
.value()
.as_slice()
Expand All @@ -193,7 +190,7 @@ where
break;
}
for primary_key in l_primary_keys {
let primary_key = primary_key.unwrap();
let primary_key = primary_key?;
primary_keys.push(primary_key);
}
}
Expand Down Expand Up @@ -284,19 +281,5 @@ where
}
_ => None,
}
// match self.range.next() {
// Some(Ok((secondary_key, primary_key))) => {
// if secondary_key
// .value()
// .as_slice()
// .starts_with(self.start_with.as_slice())
// {
// unwrap_item(self.primary_table.get(primary_key.value()).unwrap())
// } else {
// None
// }
// }
// _ => None,
// }
}
}
16 changes: 7 additions & 9 deletions src/upgrade/redb1_to_redb2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ fn upgrade_primary_table(
let mut redb2_table = redb2_write_txn.open_table(redb2_primary_table_definition)?;

use redb1::ReadableTable;
for r in redb1_table.iter().unwrap() {
let (key, value) = r.unwrap();
for r in redb1_table.iter()? {
let (key, value) = r?;
let key = Redb2DatabaseInnerKeyValue::from(key.value());
redb2_table.insert(key, value.value()).unwrap();
redb2_table.insert(key, value.value())?;
}
}

Expand All @@ -65,18 +65,16 @@ fn upgrade_secondary_table(
let redb2_write_txn = db2.begin_write()?;

{
let redb1_table = redb1_read_txn
.open_table(redb1_primary_table_definition)
.unwrap();
let redb1_table = redb1_read_txn.open_table(redb1_primary_table_definition)?;
let mut redb2_table =
redb2_write_txn.open_multimap_table(redb2_primary_table_definition)?;

use redb1::ReadableTable;
for r in redb1_table.iter().unwrap() {
let (key, value) = r.unwrap();
for r in redb1_table.iter()? {
let (key, value) = r?;
let key = Redb2DatabaseInnerKeyValue::from(key.value());
let value = Redb2DatabaseInnerKeyValue::from(value.value());
redb2_table.insert(key, value).unwrap();
redb2_table.insert(key, value)?;
}
}

Expand Down
6 changes: 4 additions & 2 deletions tests/migrate/with_secondary_keys.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,8 @@ fn test_migrate() {
.secondary(ItemV2Key::first_name_key)
.unwrap()
.start_with("Alexandre")
.unwrap().try_collect()
.unwrap()
.try_collect()
.unwrap();
assert_eq!(
item,
Expand All @@ -182,7 +183,8 @@ fn test_migrate() {
.secondary(ItemV2Key::last_name_key)
.unwrap()
.start_with("Verne")
.unwrap().try_collect()
.unwrap()
.try_collect()
.unwrap();
assert_eq!(
item,
Expand Down
Loading

0 comments on commit db43ab2

Please sign in to comment.