Skip to content

Commit

Permalink
Merge pull request #4410 from AndreCostaaa/feat-keep-versions-unique
Browse files Browse the repository at this point in the history
feat(cli): generate unique versions
  • Loading branch information
weiznich authored Jan 3, 2025
2 parents cec3cd0 + 8a30ec2 commit 977a2b0
Show file tree
Hide file tree
Showing 6 changed files with 224 additions and 18 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ in a way that makes the pools suitable for use in parallel tests.
### Fixed

* Use a single space instead of two spaces between `DELETE FROM`.
* Diesel CLI now ensures that migration versions are always unique. If it fails to generate a unique version, it will return an error. The new version format remains compatible with older Diesel versions.

## [2.2.2] 2024-07-19

Expand Down
1 change: 1 addition & 0 deletions diesel_cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ tracing = "0.1"
tracing-subscriber = { version = "0.3.10", features = ["env-filter"] }
thiserror = "2.0.0"
similar-asserts = "1.6.0"
fd-lock = "4.0.2"

[dependencies.diesel]
version = "~2.2.0"
Expand Down
17 changes: 12 additions & 5 deletions diesel_cli/src/errors.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};

use crate::infer_schema_internals::TableName;

Expand All @@ -18,7 +18,7 @@ pub enum Error {
ProjectRootNotFound(PathBuf),
#[error("The --database-url argument must be passed, or the DATABASE_URL environment variable must be set.")]
DatabaseUrlMissing,
#[error("Encountered an IO error: {0} {n}", n=print_optional_path(.1))]
#[error("Encountered an IO error: {0} for `{n}`", n=print_optional_path(.1))]
IoError(#[source] std::io::Error, Option<PathBuf>),
#[error("Failed to execute a database query: {0}")]
QueryError(#[from] diesel::result::Error),
Expand Down Expand Up @@ -64,10 +64,17 @@ pub enum Error {
NoSchemaKeyFound(String),
#[error("Failed To Run rustfmt")]
RustFmtFail(String),
#[error("Failed to acquire migration folder lock: {1} for `{n}`", n=print_path(.0))]
FailedToAcquireMigrationFolderLock(PathBuf, String),
#[error("Tried to generate too many migrations with the same version `{1}` - Migrations folder is `{n}`", n=print_path(.0))]
TooManyMigrations(PathBuf, String),
#[error("Specified migration version `{1}` already exists inside `{n}`", n=print_path(.0))]
DuplicateMigrationVersion(PathBuf, String),
}

fn print_path(path: &Path) -> String {
format!("{}", path.display())
}
fn print_optional_path(path: &Option<PathBuf>) -> String {
path.as_ref()
.map(|p| format!(" for `{}`", p.display()))
.unwrap_or_default()
path.as_ref().map(|p| print_path(p)).unwrap_or_default()
}
122 changes: 116 additions & 6 deletions diesel_cli/src/migrations/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,14 @@ use diesel::backend::Backend;
use diesel::migration::{Migration, MigrationSource};
use diesel::Connection;
use diesel_migrations::{FileBasedMigrations, HarnessWithOutput, MigrationError, MigrationHarness};
use fd_lock::RwLock;
use std::any::Any;
use std::collections::{HashMap, HashSet};
use std::env;
use std::error::Error;
use std::fmt::Display;
use std::fs::{self};
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use std::{env, io};

use crate::database::InferConnection;
use crate::{config::Config, regenerate_schema_if_file_specified};
Expand Down Expand Up @@ -85,6 +86,17 @@ pub(super) fn run_migration_command(matches: &ArgMatches) -> Result<(), crate::e
println!("{result:?}");
}
("generate", args) => {
let migrations_folder = migrations_dir(matches)?;
let mut lock = RwLock::new(migration_folder_lock(migrations_folder.clone())?);
// This blocks until we can get the lock
// Will throw an error if we receive a termination signal
let _ = lock.write().map_err(|err| {
crate::errors::Error::FailedToAcquireMigrationFolderLock(
migrations_folder.clone(),
err.to_string(),
)
})?;

let migration_name = args
.get_one::<String>("MIGRATION_NAME")
.expect("Clap ensure this argument is set");
Expand Down Expand Up @@ -128,10 +140,12 @@ pub(super) fn run_migration_command(matches: &ArgMatches) -> Result<(), crate::e
(String::new(), String::new())
};
let version = migration_version(args);
let versioned_name = format!("{version}_{migration_name}");
let migration_dir = migrations_dir(matches)?.join(versioned_name);
fs::create_dir(&migration_dir)
.map_err(|e| crate::errors::Error::IoError(e, Some(migration_dir.clone())))?;
let migration_dir = create_migration_dir(
migrations_folder,
migration_name,
version,
args_contains_version(args),
)?;

match args
.get_one::<String>("MIGRATION_FORMAT")
Expand All @@ -157,6 +171,95 @@ pub(super) fn run_migration_command(matches: &ArgMatches) -> Result<(), crate::e
Ok(())
}

/// Opens the .diesel_lock file inside the migrations folder
/// Creates the file if it does not exist
/// A lock can be acquired on this file to make sure we don't have multiple instances of diesel
/// doing migration work
/// See [run_migration_command]::generate for an example
fn migration_folder_lock(dir: PathBuf) -> Result<File, crate::errors::Error> {
let path = dir.join(".diesel_lock");
match File::create_new(&path) {
Ok(file) => Ok(file),
Err(err) => {
if matches!(err.kind(), io::ErrorKind::AlreadyExists) {
File::open(&path).map_err(|err| crate::errors::Error::IoError(err, Some(path)))
} else {
Err(crate::errors::Error::IoError(err, Some(path)))
}
}
}
}

fn create_migration_dir<'a>(
migrations_dir: PathBuf,
migration_name: &str,
version: Box<dyn Display + 'a>,
explicit_version: bool,
) -> Result<PathBuf, crate::errors::Error> {
const MAX_MIGRATIONS_PER_SEC: u16 = u16::MAX;
fn is_duplicate_version(full_version: &str, migration_folders: &Vec<PathBuf>) -> bool {
for folder in migration_folders {
if folder.to_string_lossy().starts_with(full_version) {
return true;
}
}
false
}

fn create(
migrations_dir: &Path,
version: &str,
migration_name: &str,
) -> Result<PathBuf, crate::errors::Error> {
let versioned_name = format!("{version}_{migration_name}");
let path = migrations_dir.join(versioned_name);

fs::create_dir(&path)
.map_err(|e| crate::errors::Error::IoError(e, Some(path.to_path_buf())))?;
Ok(path.to_path_buf())
}

let migration_folders: Vec<PathBuf> = migrations_dir
.read_dir()
.map_err(|err| crate::errors::Error::IoError(err, Some(migrations_dir.clone())))?
.filter_map(|e| {
if let Ok(e) = e {
if e.path().is_dir() {
return Some(e.path().file_name()?.into());
}
}
None
})
.collect();

// if there's an explicit version try to use it
if explicit_version {
let version = format!("{version}");
if is_duplicate_version(&version, &migration_folders) {
return Err(crate::errors::Error::DuplicateMigrationVersion(
migrations_dir,
version,
));
}
return create(&migrations_dir, &version, migration_name);
}

// else add a subversion so the versions stay unique
for subversion in 0..=MAX_MIGRATIONS_PER_SEC {
let full_version = format!("{version}-{subversion:04x}");
if is_duplicate_version(&full_version, &migration_folders) {
continue;
}
return create(&migrations_dir, &full_version, migration_name);
}
// if we get here it means the user is trying to generate > `MAX_MIGRATION_PER_SEC`
// migrations per second
Err(crate::errors::Error::TooManyMigrations(
migrations_dir,
version.to_string(),
))
}

fn generate_sql_migration(
path: &Path,
with_down: bool,
Expand Down Expand Up @@ -198,6 +301,13 @@ fn generate_sql_migration(
Ok(())
}

fn args_contains_version(matches: &ArgMatches) -> bool {
if let Ok(exists) = matches.try_contains_id("MIGRATION_VERSION") {
return exists;
}
false
}

fn migration_version<'a>(matches: &'a ArgMatches) -> Box<dyn Display + 'a> {
matches
.get_one::<String>("MIGRATION_VERSION")
Expand Down
84 changes: 81 additions & 3 deletions diesel_cli/tests/migration_generate.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use std::fs::DirEntry;
use std::path::{Path, PathBuf};
use std::{fs::File, io::Read};

Expand All @@ -15,16 +16,16 @@ fn migration_generate_creates_a_migration_with_the_proper_name() {
// check overall output
let expected_stdout = Regex::new(
"\
Creating migrations.\\d{4}-\\d{2}-\\d{2}-\\d{6}_hello.up.sql
Creating migrations.\\d{4}-\\d{2}-\\d{2}-\\d{6}_hello.down.sql\
Creating migrations.\\d{4}-\\d{2}-\\d{2}-\\d{6}-0000_hello.up.sql
Creating migrations.\\d{4}-\\d{2}-\\d{2}-\\d{6}-0000_hello.down.sql\
",
)
.unwrap();
assert!(result.is_success(), "Command failed: {:?}", result);
assert!(expected_stdout.is_match(result.stdout()));

// check timestamps are properly formatted
let captured_timestamps = Regex::new(r"(?P<stamp>[\d-]*)_hello").unwrap();
let captured_timestamps = Regex::new(r"(?P<stamp>[\d-]*)-0000_hello").unwrap();
let mut stamps_found = 0;
for caps in captured_timestamps.captures_iter(result.stdout()) {
let timestamp = NaiveDateTime::parse_from_str(&caps["stamp"], TIMESTAMP_FORMAT);
Expand Down Expand Up @@ -278,6 +279,83 @@ fn migration_generate_from_diff_except_tables() {
test_generate_migration("diff_except_tables", vec!["-e", "table_b", "table_c"]);
}

#[test]
fn migration_generate_with_duplicate_specified_version_fails() {
const VERSION_ARG: &str = "--version=12345";

let p = project("generate_duplicate_specified_versions").build();

p.command("setup").run();

let result = p
.command("migration")
.arg("generate")
.arg("hello1")
.arg(VERSION_ARG)
.run();

assert!(result.is_success(), "Result was unsuccessful {:?}", result);

let result = p
.command("migration")
.arg("generate")
.arg("hello2")
.arg(VERSION_ARG)
.run();

assert!(!result.is_success(), "Result was successful {:?}", result);
}

#[test]
fn migration_generate_different_versions() {
const MIGRATIONS_NO: usize = 26;
fn extract_version(entry: &DirEntry) -> String {
entry
.file_name()
.to_string_lossy()
.split('_')
.next()
.expect("To have _ in migration path")
.to_string()
}
let p = project("generate_different_versions").build();
p.command("setup").run();

for i in 1..=MIGRATIONS_NO {
p.command("migration")
.arg("generate")
.arg(format!("mig{}", i))
.run();
}

let paths: Vec<DirEntry> = p
.directory_entries("migrations")
.unwrap()
.filter_map(|e| {
if let Ok(e) = e {
if e.path().is_dir() {
return Some(e);
}
}
None
})
.collect();

for i in 0..paths.len() {
for j in (i + 1)..paths.len() {
let version_i = extract_version(&paths[i]);
let version_j = extract_version(&paths[j]);
assert_ne!(version_i, version_j);
}
}
if cfg!(feature = "postgres") {
// Includes the 00000 migration created on 'setup'
assert_eq!(paths.len(), MIGRATIONS_NO + 1);
} else {
assert_eq!(paths.len(), MIGRATIONS_NO);
}
}

#[cfg(feature = "sqlite")]
const BACKEND: &str = "sqlite";
#[cfg(feature = "postgres")]
Expand Down
17 changes: 13 additions & 4 deletions diesel_cli/tests/support/project_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ extern crate dotenvy;
#[cfg(not(feature = "sqlite"))]
extern crate url;

use std::fs::{self, File};
use std::io::prelude::*;
use std::fs::{self, File, ReadDir};
use std::io::{self, prelude::*};
use std::path::{Path, PathBuf};
use tempfile::{Builder, TempDir};

Expand Down Expand Up @@ -85,8 +85,13 @@ impl Project {
.join("migrations")
.read_dir()
.expect("Error reading directory")
.map(|e| Migration {
path: e.expect("error reading entry").path(),
.filter_map(|e| {
if let Ok(e) = e {
if e.path().is_dir() {
return Some(Migration { path: e.path() });
}
}
None
})
.collect()
}
Expand Down Expand Up @@ -132,6 +137,10 @@ impl Project {
self.directory.path().join(path).exists()
}

pub fn directory_entries<P: AsRef<Path>>(&self, path: P) -> Result<ReadDir, io::Error> {
fs::read_dir(self.directory.path().join(path))
}

pub fn file_contents<P: AsRef<Path>>(&self, path: P) -> String {
let mut f = File::open(self.directory.path().join(path)).expect("Could not open file");
let mut result = String::new();
Expand Down

0 comments on commit 977a2b0

Please sign in to comment.