Skip to content

Commit

Permalink
Unnest migration_core module structure
Browse files Browse the repository at this point in the history
We probably also would want JSON-RPC on wasm (wasi).
  • Loading branch information
tomhoule committed Jan 5, 2022
1 parent 15a8ce8 commit 1e1c08c
Show file tree
Hide file tree
Showing 5 changed files with 190 additions and 5 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build-wasm.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@ jobs:
command: build
args: -p migration-connector --release --target=wasm32-unknown-unknown

- name: Build the migration-core crate with default features
- name: Build the migration-core crate
uses: actions-rs/cargo@v1
with:
command: build
args: -p migration-core --release --target=wasm32-unknown-unknown
args: -p migration-core --release --target=wasm32-unknown-unknown --no-default-features

- name: Build the prisma-fmt crate
uses: actions-rs/cargo@v1
Expand Down
1 change: 1 addition & 0 deletions migration-engine/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,6 @@ tracing-futures = "0.2"
url = "2.1.1"

[features]
default = ["sql"]
mongodb = ["mongodb-migration-connector"]
sql = ["sql-migration-connector"]
104 changes: 101 additions & 3 deletions migration-engine/core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,120 @@

//! The top-level library crate for the migration engine.
pub mod commands;
#[doc(hidden)]
pub mod query_engine;

mod api;
mod core_error;
mod rpc;

pub mod commands;
pub use self::{api::GenericApi, core_error::*, rpc::rpc_api};

pub use core_error::*;
pub use migration_connector;

#[cfg(not(target_arch = "wasm32"))]
mod native;
pub use native::*;

#[cfg(not(target_arch = "wasm32"))]
pub use native::*;
mod native;

use datamodel::{
common::{
preview_features::PreviewFeature,
provider_names::{MSSQL_SOURCE_NAME, MYSQL_SOURCE_NAME, POSTGRES_SOURCE_NAME, SQLITE_SOURCE_NAME},
},
Datasource,
};
use enumflags2::BitFlags;
use std::env;
use user_facing_errors::{common::InvalidConnectionString, KnownError};

use datamodel::{Configuration, Datamodel};

fn parse_schema(schema: &str) -> CoreResult<(Configuration, Datamodel)> {
datamodel::parse_schema(schema).map_err(CoreError::new_schema_parser_error)
}

#[cfg(feature = "mongodb")]
use datamodel::common::provider_names::MONGODB_SOURCE_NAME;
#[cfg(feature = "mongodb")]
use mongodb_migration_connector::MongoDbMigrationConnector;
#[cfg(feature = "sql")]
use sql_migration_connector::SqlMigrationConnector;

/// Top-level constructor for the migration engine API.
pub async fn migration_api(datamodel: &str) -> CoreResult<Box<dyn api::GenericApi>> {
let (source, url, preview_features, shadow_database_url) = parse_configuration(datamodel)?;

match source.active_provider.as_str() {
#[cfg(feature = "sql")]
POSTGRES_SOURCE_NAME => {
let mut u = url::Url::parse(&url).map_err(|err| {
let details = user_facing_errors::quaint::invalid_connection_string_description(&format!(
"Error parsing connection string: {}",
err
));

CoreError::from(KnownError::new(InvalidConnectionString { details }))
})?;

let params: Vec<(String, String)> = u.query_pairs().map(|(k, v)| (k.to_string(), v.to_string())).collect();

u.query_pairs_mut().clear();

for (k, v) in params.into_iter() {
if k == "statement_cache_size" {
u.query_pairs_mut().append_pair("statement_cache_size", "0");
} else {
u.query_pairs_mut().append_pair(&k, &v);
}
}

if !u.query_pairs().any(|(k, _)| k == "statement_cache_size") {
u.query_pairs_mut().append_pair("statement_cache_size", "0");
}

let connector = SqlMigrationConnector::new(u.to_string(), preview_features, shadow_database_url)?;

Ok(Box::new(connector))
}
#[cfg(feature = "sql")]
MYSQL_SOURCE_NAME | SQLITE_SOURCE_NAME | MSSQL_SOURCE_NAME => {
let connector = SqlMigrationConnector::new(url, preview_features, shadow_database_url)?;

Ok(Box::new(connector))
}
#[cfg(feature = "mongodb")]
MONGODB_SOURCE_NAME => Ok(Box::new(MongoDbMigrationConnector::new(url, preview_features))),
provider => Err(CoreError::from_msg(format!(
"`{}` is not a supported connector.",
provider
))),
}
}

fn parse_configuration(datamodel: &str) -> CoreResult<(Datasource, String, BitFlags<PreviewFeature>, Option<String>)> {
let config = datamodel::parse_configuration(datamodel)
.map(|validated_config| validated_config.subject)
.map_err(|err| CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)))?;

let url = config.datasources[0]
.load_url(|key| env::var(key).ok())
.map_err(|err| CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)))?;

let shadow_database_url = config.datasources[0]
.load_shadow_database_url()
.map_err(|err| CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)))?;

let preview_features = config.preview_features();

let source = config
.datasources
.into_iter()
.next()
.ok_or_else(|| CoreError::from_msg("There is no datasource in the schema.".into()))?;

Ok((source, url, preview_features, shadow_database_url))
}
86 changes: 86 additions & 0 deletions migration-engine/core/src/query_engine.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
//! Query Engine test setup.
use crate::SqlMigrationConnector;
#[cfg(feature = "mongodb")]
use datamodel::common::provider_names::MONGODB_SOURCE_NAME;
use datamodel::common::provider_names::{
COCKROACHDB_SOURCE_NAME, MSSQL_SOURCE_NAME, MYSQL_SOURCE_NAME, POSTGRES_SOURCE_NAME, SQLITE_SOURCE_NAME,
};
use migration_connector::{ConnectorResult, DiffTarget, MigrationConnector};
#[cfg(feature = "mongodb")]
use mongodb_migration_connector::MongoDbMigrationConnector;

/// Database setup for connector-test-kit-rs.
pub async fn setup(prisma_schema: &str) -> ConnectorResult<()> {
let (source, url, preview_features, _shadow_database_url) = super::parse_configuration(prisma_schema)?;

match &source.active_provider {
provider
if [
MYSQL_SOURCE_NAME,
POSTGRES_SOURCE_NAME,
SQLITE_SOURCE_NAME,
MSSQL_SOURCE_NAME,
COCKROACHDB_SOURCE_NAME,
]
.contains(&provider.as_str()) =>
{
// 1. creates schema & database
SqlMigrationConnector::qe_setup(&url).await?;
let api = SqlMigrationConnector::new(url, preview_features, None)?;

// 2. create the database schema for given Prisma schema
{
let (config, schema) = crate::parse_schema(prisma_schema).unwrap();
let migration = api
.diff(DiffTarget::Empty, DiffTarget::Datamodel((&config, &schema)))
.await
.unwrap();
api.database_migration_step_applier()
.apply_migration(&migration)
.await
.unwrap();
};
}

#[cfg(feature = "mongodb")]
provider if provider == MONGODB_SOURCE_NAME => {
let connector = MongoDbMigrationConnector::new(url, preview_features);
// Drop database. Creation is automatically done when collections are created.
connector.drop_database().await?;
let (_, schema) = crate::parse_schema(prisma_schema).unwrap();
connector.create_collections(&schema).await?;
}

x => unimplemented!("Connector {} is not supported yet", x),
};

Ok(())
}

/// Database teardown for connector-test-kit-rs.
pub async fn teardown(prisma_schema: &str) -> ConnectorResult<()> {
let (source, url, _, _) = super::parse_configuration(prisma_schema)?;

match &source.active_provider {
provider
if [
MYSQL_SOURCE_NAME,
POSTGRES_SOURCE_NAME,
SQLITE_SOURCE_NAME,
MSSQL_SOURCE_NAME,
COCKROACHDB_SOURCE_NAME,
]
.contains(&provider.as_str()) =>
{
SqlMigrationConnector::qe_teardown(&url).await?;
}

#[cfg(feature = "mongodb")]
provider if provider == MONGODB_SOURCE_NAME => {}

x => unimplemented!("Connector {} is not supported yet", x),
};

Ok(())
}
File renamed without changes.

0 comments on commit 1e1c08c

Please sign in to comment.