import migrations and don't require diesel_cli for admins
#555
Merged
Plume_migration_agent
merged 9 commits from integrated-migrations
into master
5 years ago
@ -0,0 +1,6 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
--#!|_conn, path: &Path| {
|
||||
--#! let mut pb = path.to_path_buf();
|
||||
--#! pb.push("search_index");
|
||||
--#! std::fs::remove_dir_all(pb).map_err(Error::from)
|
||||
--#!}
|
@ -0,0 +1,10 @@
|
||||
-- Your SQL goes here
|
||||
--#!|conn: &Connection, path: &Path| {
|
||||
--#! let mut pb = path.to_path_buf();
|
||||
--#! pb.push("search_index");
|
||||
--#! let searcher = super::search::Searcher::create(&pb)?;
|
||||
--#! searcher.fill(conn)?;
|
||||
--#! searcher.commit();
|
||||
--#! Ok(())
|
||||
--#!}
|
||||
|
@ -0,0 +1,6 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
--#!|_conn, path: &Path| {
|
||||
--#! let mut pb = path.to_path_buf();
|
||||
--#! pb.push("search_index");
|
||||
--#! std::fs::remove_dir_all(pb).map_err(Error::from)
|
||||
--#!}
|
@ -0,0 +1,10 @@
|
||||
-- Your SQL goes here
|
||||
--#!|conn: &Connection, path: &Path| {
|
||||
--#! let mut pb = path.to_path_buf();
|
||||
--#! pb.push("search_index");
|
||||
--#! let searcher = super::search::Searcher::create(&pb)?;
|
||||
--#! searcher.fill(conn)?;
|
||||
--#! searcher.commit();
|
||||
--#! Ok(())
|
||||
--#!}
|
||||
|
@ -0,0 +1,59 @@
|
||||
use clap::{App, Arg, ArgMatches, SubCommand};
|
||||
|
||||
use plume_models::{migrations::IMPORTED_MIGRATIONS, Connection};
|
||||
use std::path::Path;
|
||||
|
||||
pub fn command<'a, 'b>() -> App<'a, 'b> {
|
||||
SubCommand::with_name("migration")
|
||||
.about("Manage migrations")
|
||||
.subcommand(
|
||||
SubCommand::with_name("run")
|
||||
.arg(
|
||||
Arg::with_name("path")
|
||||
.short("p")
|
||||
.long("path")
|
||||
.takes_value(true)
|
||||
.required(false)
|
||||
.help("Path to Plume's working directory"),
|
||||
)
|
||||
.about("Run migrations"),
|
||||
)
|
||||
.subcommand(
|
||||
SubCommand::with_name("redo")
|
||||
.arg(
|
||||
Arg::with_name("path")
|
||||
.short("p")
|
||||
.long("path")
|
||||
.takes_value(true)
|
||||
.required(false)
|
||||
.help("Path to Plume's working directory"),
|
||||
)
|
||||
.about("Rerun latest migration"),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn run<'a>(args: &ArgMatches<'a>, conn: &Connection) {
|
||||
let conn = conn;
|
||||
match args.subcommand() {
|
||||
("run", Some(x)) => run_(x, conn),
|
||||
("redo", Some(x)) => redo(x, conn),
|
||||
("", None) => command().print_help().unwrap(),
|
||||
_ => println!("Unknown subcommand"),
|
||||
}
|
||||
}
|
||||
|
||||
fn run_<'a>(args: &ArgMatches<'a>, conn: &Connection) {
|
||||
let path = args.value_of("path").unwrap_or(".");
|
||||
|
||||
IMPORTED_MIGRATIONS
|
||||
.run_pending_migrations(conn, Path::new(path))
|
||||
.expect("Failed to run migrations")
|
||||
}
|
||||
|
||||
fn redo<'a>(args: &ArgMatches<'a>, conn: &Connection) {
|
||||
let path = args.value_of("path").unwrap_or(".");
|
||||
|
||||
IMPORTED_MIGRATIONS
|
||||
.rerun_last_migration(conn, Path::new(path))
|
||||
.expect("Failed to rerun migrations")
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "plume-macro"
|
||||
version = "0.1.0"
|
||||
authors = ["Trinity Pointard <trinity.pointard@insa-rennes.fr>"]
|
||||
edition = "2018"
|
||||
description = "Plume procedural macros"
|
||||
license = "AGPLv3"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = "0.4"
|
||||
quote = "0.6.12"
|
||||
syn = "0.11.4"
|
||||
|
||||
|
||||
[features]
|
||||
default = []
|
||||
postgres = []
|
||||
sqlite = []
|
@ -0,0 +1,139 @@
|
||||
#![recursion_limit = "128"]
|
||||
extern crate proc_macro;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
extern crate syn;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::TokenStream as TokenStream2;
|
||||
use std::fs::{read_dir, File};
|
||||
use std::io::Read;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[proc_macro]
|
||||
pub fn import_migrations(input: TokenStream) -> TokenStream {
|
||||
assert!(input.is_empty());
|
||||
let migration_dir = if cfg!(feature = "postgres") {
|
||||
"migrations/postgres"
|
||||
} else if cfg!(feature = "sqlite") {
|
||||
"migrations/sqlite"
|
||||
} else {
|
||||
"migrations"
|
||||
};
|
||||
let mut files = read_dir(migration_dir)
|
||||
.unwrap()
|
||||
.map(|dir| dir.unwrap())
|
||||
.filter(|dir| dir.file_type().unwrap().is_dir())
|
||||
.map(|dir| dir.path())
|
||||
.collect::<Vec<_>>();
|
||||
files.sort_unstable();
|
||||
let migrations = files
|
||||
.into_iter()
|
||||
.map(|path| {
|
||||
let mut up = path.clone();
|
||||
let mut down = path.clone();
|
||||
up.push("up.sql");
|
||||
down.push("down.sql");
|
||||
let mut up_sql = String::new();
|
||||
let mut down_sql = String::new();
|
||||
File::open(up).unwrap().read_to_string(&mut up_sql).unwrap();
|
||||
File::open(down)
|
||||
.unwrap()
|
||||
.read_to_string(&mut down_sql)
|
||||
.unwrap();
|
||||
let name = path
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.chars()
|
||||
.filter(char::is_ascii_digit)
|
||||
.take(14)
|
||||
|
||||
.collect::<String>();
|
||||
(name, up_sql, down_sql)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let migrations_name = migrations.iter().map(|m| &m.0).collect::<Vec<_>>();
|
||||
let migrations_up = migrations
|
||||
.iter()
|
||||
.map(|m| m.1.as_str())
|
||||
.map(file_to_migration)
|
||||
.collect::<Vec<_>>();
|
||||
let migrations_down = migrations
|
||||
.iter()
|
||||
.map(|m| m.2.as_str())
|
||||
.map(file_to_migration)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
/*
|
||||
enum Action {
|
||||
Sql(&'static str),
|
||||
Function(&'static Fn(&Connection, &Path) -> Result<()>)
|
||||
}*/
|
||||
|
||||
quote!(
|
||||
ImportedMigrations(
|
||||
&[#(ComplexMigration{name: #migrations_name, up: #migrations_up, down: #migrations_down}),*]
|
||||
)
|
||||
).into()
|
||||
}
|
||||
|
||||
fn file_to_migration(file: &str) -> TokenStream2 {
|
||||
let mut sql = true;
|
||||
let mut acc = String::new();
|
||||
let mut actions = vec![];
|
||||
for line in file.lines() {
|
||||
if sql {
|
||||
if line.starts_with("--#!") {
|
||||
if !acc.trim().is_empty() {
|
||||
actions.push(quote!(Action::Sql(#acc)));
|
||||
}
|
||||
sql = false;
|
||||
acc = line[4..].to_string();
|
||||
acc.push('\n');
|
||||
} else if line.starts_with("--") {
|
||||
continue;
|
||||
} else {
|
||||
acc.push_str(line);
|
||||
acc.push('\n');
|
||||
}
|
||||
} else {
|
||||
if line.starts_with("--#!") {
|
||||
acc.push_str(&line[4..]);
|
||||
acc.push('\n');
|
||||
} else if line.starts_with("--") {
|
||||
continue;
|
||||
} else {
|
||||
let func: TokenStream2 = trampoline(TokenStream::from_str(&acc).unwrap().into());
|
||||
actions.push(quote!(Action::Function(&#func)));
|
||||
sql = true;
|
||||
acc = line.to_string();
|
||||
acc.push('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
if !acc.trim().is_empty() {
|
||||
if sql {
|
||||
actions.push(quote!(Action::Sql(#acc)));
|
||||
} else {
|
||||
let func: TokenStream2 = trampoline(TokenStream::from_str(&acc).unwrap().into());
|
||||
actions.push(quote!(Action::Function(&#func)));
|
||||
}
|
||||
}
|
||||
|
||||
quote!(
|
||||
&[#(#actions),*]
|
||||
)
|
||||
}
|
||||
|
||||
/// Build a trampoline to allow reference to closure from const context
|
||||
fn trampoline(closure: TokenStream2) -> TokenStream2 {
|
||||
quote! {
|
||||
{
|
||||
fn trampoline<'a, 'b>(conn: &'a Connection, path: &'b Path) -> Result<()> {
|
||||
(#closure)(conn, path)
|
||||
}
|
||||
trampoline
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,122 @@
|
||||
use Connection;
|
||||
use Error;
|
||||
use Result;
|
||||
|
||||
use diesel::connection::{Connection as Conn, SimpleConnection};
|
||||
use migrations_internals::{setup_database, MigrationConnection};
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
#[allow(dead_code)] //variants might not be constructed if not required by current migrations
|
||||
enum Action {
|
||||
Sql(&'static str),
|
||||
Function(&'static Fn(&Connection, &Path) -> Result<()>),
|
||||
}
|
||||
|
||||
impl Action {
|
||||
fn run(&self, conn: &Connection, path: &Path) -> Result<()> {
|
||||
match self {
|
||||
Action::Sql(sql) => conn.batch_execute(sql).map_err(Error::from),
|
||||
Action::Function(f) => f(conn, path),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ComplexMigration {
|
||||
name: &'static str,
|
||||
up: &'static [Action],
|
||||
down: &'static [Action],
|
||||
}
|
||||
|
||||
impl ComplexMigration {
|
||||
fn run(&self, conn: &Connection, path: &Path) -> Result<()> {
|
||||
println!("Running migration {}", self.name);
|
||||
for step in self.up {
|
||||
step.run(conn, path)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn revert(&self, conn: &Connection, path: &Path) -> Result<()> {
|
||||
println!("Reverting migration {}", self.name);
|
||||
for step in self.down {
|
||||
step.run(conn, path)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ImportedMigrations(&'static [ComplexMigration]);
|
||||
|
||||
impl ImportedMigrations {
|
||||
pub fn run_pending_migrations(&self, conn: &Connection, path: &Path) -> Result<()> {
|
||||
Review
Should these Should these `dbg!` be here?
trinity-1686a
commented 5 years ago
Review
no :x no :x
|
||||
use diesel::dsl::sql;
|
||||
use diesel::sql_types::Bool;
|
||||
use diesel::{select, RunQueryDsl};
|
||||
#[cfg(feature = "postgres")]
|
||||
let schema_exists: bool = select(sql::<Bool>(
|
||||
"EXISTS \
|
||||
(SELECT 1 \
|
||||
FROM information_schema.tables \
|
||||
WHERE table_name = '__diesel_schema_migrations')",
|
||||
))
|
||||
.get_result(conn)?;
|
||||
#[cfg(feature = "sqlite")]
|
||||
let schema_exists: bool = select(sql::<Bool>(
|
||||
"EXISTS \
|
||||
(SELECT 1 \
|
||||
FROM sqlite_master \
|
||||
WHERE type = 'table' \
|
||||
AND name = '__diesel_schema_migrations')",
|
||||
))
|
||||
.get_result(conn)?;
|
||||
|
||||
if !schema_exists {
|
||||
setup_database(conn)?;
|
||||
}
|
||||
|
||||
let latest_migration = conn.latest_run_migration_version()?;
|
||||
let latest_id = if let Some(migration) = latest_migration {
|
||||
self.0
|
||||
.binary_search_by_key(&migration.as_str(), |mig| mig.name)
|
||||
.map(|id| id + 1)
|
||||
.map_err(|_| Error::NotFound)?
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let to_run = &self.0[latest_id..];
|
||||
for migration in to_run {
|
||||
conn.transaction(|| {
|
||||
migration.run(conn, path)?;
|
||||
conn.insert_new_migration(migration.name)
|
||||
.map_err(Error::from)
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn is_pending(&self, conn: &Connection) -> Result<bool> {
|
||||
let latest_migration = conn.latest_run_migration_version()?;
|
||||
if let Some(migration) = latest_migration {
|
||||
Ok(self.0.last().expect("no migrations found").name != migration)
|
||||
} else {
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rerun_last_migration(&self, conn: &Connection, path: &Path) -> Result<()> {
|
||||
let latest_migration = conn.latest_run_migration_version()?;
|
||||
let id = latest_migration
|
||||
.and_then(|m| self.0.binary_search_by_key(&m.as_str(), |m| m.name).ok())?;
|
||||
let migration = &self.0[id];
|
||||
conn.transaction(|| {
|
||||
migration.revert(conn, path)?;
|
||||
migration.run(conn, path)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub const IMPORTED_MIGRATIONS: ImportedMigrations = {
|
||||
import_migrations! {}
|
||||
};
|
@ -1,6 +1,5 @@
|
||||
#!/bin/bash
|
||||
mkdir bin
|
||||
cp target/release/{plume,plm} bin
|
||||
cp "$(which diesel)" bin
|
||||
strip -s bin/*
|
||||
tar -cvzf plume.tar.gz bin/ static/ migrations/$FEATURES
|
||||
|
Loading…
Reference in New Issue
this seems like a strange thing to
take()
what is this whole expression supposed to do?
diesel store whether migrations were run by storing a 14 digit code. For
2019-03-06-115158_blog_images
, it is translated to20190306115158
. To do that I get an iterator over the folder name, filter only digits, and take the first 14 ones (if there are more, they are ignored). See here fortake
documentation