Skip to content

Commit

Permalink
Clippy a bunch of code again (#1412)
Browse files Browse the repository at this point in the history
This runs `cargo clippy --fix` again and then finally just-about
finishes the cleanup in pgrx-pg-sys manually so that it doesn't omit a
bunch of pointless warnings, modulo a few cases left on purpose.
  • Loading branch information
workingjubilee authored Nov 29, 2023
1 parent f9f8911 commit 1535a0a
Show file tree
Hide file tree
Showing 15 changed files with 54 additions and 61 deletions.
6 changes: 3 additions & 3 deletions cargo-pgrx/src/command/install.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ pub(crate) fn install_extension(
true,
&package_manifest_path,
&mut output_tracking,
&pg_config,
pg_config,
)?;
}

Expand Down Expand Up @@ -218,7 +218,7 @@ pub(crate) fn install_extension(
false,
&package_manifest_path,
&mut output_tracking,
&pg_config,
pg_config,
)?;
}

Expand Down Expand Up @@ -409,7 +409,7 @@ fn copy_sql_files(
true,
&package_manifest_path,
output_tracking,
&pg_config,
pg_config,
)?;
}
}
Expand Down
2 changes: 1 addition & 1 deletion cargo-pgrx/src/command/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ pub(crate) fn generate_schema(
// The next action may take a few seconds, we'd like the user to know we're thinking.
eprintln!("{} SQL entities", " Discovering".bold().green(),);

let postmaster_stub_built = create_stub(&postmaster_path, &postmaster_stub_dir)?;
let postmaster_stub_built = create_stub(postmaster_path, &postmaster_stub_dir)?;

// Inspect the symbol table for a list of `__pgrx_internals` we should have the generator call
let mut lib_so = target_dir_with_profile.clone();
Expand Down
52 changes: 23 additions & 29 deletions pgrx-pg-sys/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -377,19 +377,15 @@ fn rewrite_items(
fn extract_oids(code: &syn::File) -> BTreeMap<syn::Ident, Box<syn::Expr>> {
let mut oids = BTreeMap::new(); // we would like to have a nice sorted set
for item in &code.items {
match item {
Item::Const(ItemConst { ident, ty, expr, .. }) => {
// Retype as strings for easy comparison
let name = ident.to_string();
let ty_str = ty.to_token_stream().to_string();

// This heuristic identifies "OIDs"
// We're going to warp the const declarations to be our newtype Oid
if ty_str == "u32" && is_builtin_oid(&name) {
oids.insert(ident.clone(), expr.clone());
}
}
_ => {}
let Item::Const(ItemConst { ident, ty, expr, .. }) = item else { continue };
// Retype as strings for easy comparison
let name = ident.to_string();
let ty_str = ty.to_token_stream().to_string();

// This heuristic identifies "OIDs"
// We're going to warp the const declarations to be our newtype Oid
if ty_str == "u32" && is_builtin_oid(&name) {
oids.insert(ident.clone(), expr.clone());
}
}
oids
Expand All @@ -402,7 +398,7 @@ fn is_builtin_oid(name: &str) -> bool {
}

fn rewrite_oid_consts(
items: &Vec<syn::Item>,
items: &[syn::Item],
oids: &BTreeMap<syn::Ident, Box<syn::Expr>>,
) -> Vec<syn::Item> {
items
Expand All @@ -418,9 +414,7 @@ fn rewrite_oid_consts(
.collect()
}

fn format_builtin_oid_impl<'a>(
oids: BTreeMap<syn::Ident, Box<syn::Expr>>,
) -> proc_macro2::TokenStream {
fn format_builtin_oid_impl(oids: BTreeMap<syn::Ident, Box<syn::Expr>>) -> proc_macro2::TokenStream {
let enum_variants: proc_macro2::TokenStream;
let from_impl: proc_macro2::TokenStream;
(enum_variants, from_impl) = oids
Expand Down Expand Up @@ -451,12 +445,12 @@ fn format_builtin_oid_impl<'a>(
}

/// Implement our `PgNode` marker trait for `pg_sys::Node` and its "subclasses"
fn impl_pg_node(items: &Vec<syn::Item>) -> eyre::Result<proc_macro2::TokenStream> {
fn impl_pg_node(items: &[syn::Item]) -> eyre::Result<proc_macro2::TokenStream> {
let mut pgnode_impls = proc_macro2::TokenStream::new();

// we scope must of the computation so we can borrow `items` and then
// extend it at the very end.
let struct_graph: StructGraph = StructGraph::from(&items[..]);
let struct_graph: StructGraph = StructGraph::from(items);

// collect all the structs with `NodeTag` as their first member,
// these will serve as roots in our forest of `Node`s
Expand Down Expand Up @@ -586,7 +580,7 @@ impl<'a> From<&'a [syn::Item]> for StructGraph<'a> {
for item in items.iter() {
// grab the first field if it is struct
let (id, first_field) = match &item {
&syn::Item::Struct(syn::ItemStruct {
syn::Item::Struct(syn::ItemStruct {
ident: id,
fields: syn::Fields::Named(fields),
..
Expand Down Expand Up @@ -689,7 +683,7 @@ impl Ord for StructDescriptor<'_> {
fn get_bindings(
major_version: u16,
pg_config: &PgConfig,
include_h: &PathBuf,
include_h: &path::Path,
) -> eyre::Result<syn::File> {
let bindings = if let Some(info_dir) =
target_env_tracked(&format!("PGRX_TARGET_INFO_PATH_PG{major_version}"))
Expand All @@ -712,7 +706,7 @@ fn get_bindings(
fn run_bindgen(
major_version: u16,
pg_config: &PgConfig,
include_h: &PathBuf,
include_h: &path::Path,
) -> eyre::Result<String> {
eprintln!("Generating bindings for pg{major_version}");
let configure = pg_config.configure()?;
Expand Down Expand Up @@ -834,13 +828,13 @@ fn pg_target_include_flags(pg_version: u16, pg_config: &PgConfig) -> eyre::Resul
}
}

fn build_shim(shim_src: &PathBuf, shim_dst: &PathBuf, pg_config: &PgConfig) -> eyre::Result<()> {
fn build_shim(
shim_src: &path::Path,
shim_dst: &path::Path,
pg_config: &PgConfig,
) -> eyre::Result<()> {
let major_version = pg_config.major_version()?;
let mut libpgrx_cshim: PathBuf = shim_dst.clone();

libpgrx_cshim.push(format!("libpgrx-cshim-{}.a", major_version));

eprintln!("libpgrx_cshim={}", libpgrx_cshim.display());
// then build the shim for the version feature currently being built
build_shim_for_version(shim_src, shim_dst, pg_config)?;

Expand All @@ -855,8 +849,8 @@ fn build_shim(shim_src: &PathBuf, shim_dst: &PathBuf, pg_config: &PgConfig) -> e
}

fn build_shim_for_version(
shim_src: &PathBuf,
shim_dst: &PathBuf,
shim_src: &path::Path,
shim_dst: &path::Path,
pg_config: &PgConfig,
) -> eyre::Result<()> {
let path_env = prefix_path(pg_config.parent_path());
Expand Down
12 changes: 6 additions & 6 deletions pgrx-pg-sys/build/clang.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ pub(crate) fn detect_include_paths_for(
!is_hidden(entry) && {
entry_contains(entry, "clang")
|| entry_contains(entry, "include")
|| entry_contains(entry, &*clang_major_fmt)
|| entry_contains(entry, &clang_major_fmt)
// we always want to descend from a lib dir, but only one step
// as we don't really want to search all of /usr/lib's subdirs
|| os_str_contains(entry.file_name(), "lib")
Expand All @@ -85,7 +85,7 @@ pub(crate) fn detect_include_paths_for(
.filter_map(|e| e.ok()) // be discreet
// We now need something that looks like it actually satisfies all our constraints
.filter(|entry| {
entry_contains(entry, &*clang_major_fmt)
entry_contains(entry, &clang_major_fmt)
&& entry_contains(entry, "clang")
&& entry_contains(entry, "include")
})
Expand All @@ -96,28 +96,28 @@ pub(crate) fn detect_include_paths_for(
})
.filter_map(|entry| {
let mut pbuf = entry.into_path();
if pbuf.pop() && pbuf.is_dir() && os_str_contains(&*pbuf.file_name()?, "include") {
if pbuf.pop() && pbuf.is_dir() && os_str_contains(pbuf.file_name()?, "include") {
Some(pbuf)
} else {
None
}
})
.collect::<Vec<_>>();

if paths.len() > 0 {
if !paths.is_empty() {
paths.sort();
paths.dedup();
break;
}
}
// If we have anything better to recommend, don't autodetect!
let autodetect = paths.len() == 0;
let autodetect = paths.is_empty();
eprintln!("Found include dirs {:?}", paths);
(autodetect, paths)
}

fn is_hidden(entry: &DirEntry) -> bool {
entry.file_name().to_str().map(|s| s.starts_with(".")).unwrap_or(false)
entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
}

fn entry_contains(entry: &DirEntry, needle: &str) -> bool {
Expand Down
7 changes: 3 additions & 4 deletions pgrx-pg-sys/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,9 @@
#![cfg_attr(nightly, feature(strict_provenance))]

#[cfg(
any(
// no features at all will cause problems
not(any(feature = "pg12", feature = "pg13", feature = "pg14", feature = "pg15", feature = "pg16")),
))]
// no features at all will cause problems
not(any(feature = "pg12", feature = "pg13", feature = "pg14", feature = "pg15", feature = "pg16")),
)]
std::compile_error!("exactly one feature must be provided (pg12, pg13, pg14, pg15, pg16)");

mod cshim;
Expand Down
2 changes: 1 addition & 1 deletion pgrx-pg-sys/src/submodules/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ pub use datum::Datum;
pub use htup::*;
pub use oids::*;
pub use pg_try::*;
#[cfg(any(feature = "pg12"))]
#[cfg(feature = "pg12")]
pub use polyfill::*;
pub use utils::*;

Expand Down
2 changes: 1 addition & 1 deletion pgrx-pg-sys/src/submodules/panic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -575,7 +575,7 @@ fn do_ereport(ereport: ErrorReportWithLevel) {
/// to be freed in case level < ERROR
#[inline(always)]
#[rustfmt::skip] // my opinion wins
#[cfg(any(feature = "pg12"))]
#[cfg(feature = "pg12")]
fn do_ereport_impl(ereport: ErrorReportWithLevel) {

extern "C" {
Expand Down
2 changes: 1 addition & 1 deletion pgrx-pg-sys/src/submodules/polyfill.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,5 @@ mod typalign {
pub const TYPALIGN_DOUBLE: u8 = b'd';
}

#[cfg(any(feature = "pg12"))]
#[cfg(feature = "pg12")]
pub use typalign::*;
4 changes: 2 additions & 2 deletions pgrx-sql-entity-graph/src/extension_sql/entity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ impl ToSql for ExtensionSqlEntity {
let ExtensionSqlEntity { file, line, sql, creates, requires, .. } = self;
let creates = if !creates.is_empty() {
let joined =
creates.into_iter().map(|i| format!("-- {}", i)).collect::<Vec<_>>().join("\n");
creates.iter().map(|i| format!("-- {}", i)).collect::<Vec<_>>().join("\n");
format!(
"\
-- creates:\n\
Expand All @@ -84,7 +84,7 @@ impl ToSql for ExtensionSqlEntity {
};
let requires = if !requires.is_empty() {
let joined =
requires.into_iter().map(|i| format!("-- {}", i)).collect::<Vec<_>>().join("\n");
requires.iter().map(|i| format!("-- {}", i)).collect::<Vec<_>>().join("\n");
format!(
"\
-- requires:\n\
Expand Down
8 changes: 4 additions & 4 deletions pgrx-sql-entity-graph/src/pgrx_sql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1108,7 +1108,7 @@ fn connect_hashes(
enums,
);

if let Some((_, extern_index)) = externs.into_iter().find(|(extern_item, _)| {
if let Some((_, extern_index)) = externs.iter().find(|(extern_item, _)| {
item.module_path == extern_item.module_path && extern_item.name == item.fn_name()
}) {
graph.add_edge(*extern_index, index, SqlGraphRelationship::RequiredBy);
Expand Down Expand Up @@ -1414,7 +1414,7 @@ fn make_extern_connection(
full_path: &str,
externs: &HashMap<PgExternEntity, NodeIndex>,
) -> eyre::Result<()> {
match externs.into_iter().find(|(extern_item, _)| full_path == extern_item.full_path) {
match externs.iter().find(|(extern_item, _)| full_path == extern_item.full_path) {
Some((_, extern_index)) => {
graph.add_edge(*extern_index, index, SqlGraphRelationship::RequiredBy);
Ok(())
Expand All @@ -1436,11 +1436,11 @@ fn make_type_or_enum_connection(
types: &HashMap<PostgresTypeEntity, NodeIndex>,
enums: &HashMap<PostgresEnumEntity, NodeIndex>,
) -> bool {
let found_ty = types.into_iter().find(|(ty_item, _)| ty_item.id_matches(ty_id));
let found_ty = types.iter().find(|(ty_item, _)| ty_item.id_matches(ty_id));
if let Some((_, ty_index)) = found_ty {
graph.add_edge(*ty_index, index, SqlGraphRelationship::RequiredBy);
}
let found_enum = enums.into_iter().find(|(ty_item, _)| ty_item.id_matches(ty_id));
let found_enum = enums.iter().find(|(ty_item, _)| ty_item.id_matches(ty_id));
if let Some((_, ty_index)) = found_enum {
graph.add_edge(*ty_index, index, SqlGraphRelationship::RequiredBy);
}
Expand Down
2 changes: 1 addition & 1 deletion pgrx-tests/src/framework.rs
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ pub fn client() -> eyre::Result<(postgres::Client, String)> {
)
.wrap_err("There was an issue attempting to get the session ID from Postgres")?;

let session_id = match sid_query_result.get(0) {
let session_id = match sid_query_result.first() {
Some(row) => row.get::<&str, &str>("sid").to_string(),
None => Err(eyre!("Failed to obtain a client Session ID from Postgres"))?,
};
Expand Down
2 changes: 1 addition & 1 deletion pgrx-version-updater/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ fn update_files(args: &UpdateFilesArgs) {
}

// Recursively walk down all directories to extract out any existing Cargo.toml files
for entry in WalkDir::new(&current_dir)
for entry in WalkDir::new(current_dir)
.into_iter()
.filter_entry(|e| is_not_excluded_dir(e))
.filter_map(|v| v.ok())
Expand Down
2 changes: 1 addition & 1 deletion pgrx/src/hooks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,7 @@ unsafe extern "C" fn pgrx_process_utility(
.inner
}

#[cfg(any(feature = "pg12"))]
#[cfg(feature = "pg12")]
#[pg_guard]
unsafe extern "C" fn pgrx_planner(
parse: *mut pg_sys::Query,
Expand Down
6 changes: 3 additions & 3 deletions pgrx/src/list/old_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ impl<T> PgList<T> {
}
}

#[cfg(any(feature = "pg12"))]
#[cfg(feature = "pg12")]
#[inline]
pub unsafe fn replace_ptr(&mut self, i: usize, with: *mut T) {
let cell = pg_sys::pgrx_list_nth_cell(self.list, i as i32);
Expand All @@ -124,7 +124,7 @@ impl<T> PgList<T> {
cell.as_mut().expect("cell is null").ptr_value = with as void_mut_ptr;
}

#[cfg(any(feature = "pg12"))]
#[cfg(feature = "pg12")]
#[inline]
pub fn replace_int(&mut self, i: usize, with: i32) {
unsafe {
Expand All @@ -142,7 +142,7 @@ impl<T> PgList<T> {
}
}

#[cfg(any(feature = "pg12"))]
#[cfg(feature = "pg12")]
#[inline]
pub fn replace_oid(&mut self, i: usize, with: pg_sys::Oid) {
unsafe {
Expand Down
6 changes: 3 additions & 3 deletions pgrx/src/trigger_support/pg_trigger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ impl<'a> PgTrigger<'a> {
// containing a known good `TriggerData` which also contains a known good `Trigger`... and the user agreed to
// our `unsafe` constructor safety rules, we choose to trust this is indeed a valid pointer offered to us by
// PostgreSQL, and that it trusts it.
unsafe { PgHeapTuple::from_trigger_data(&*self.trigger_data, TriggerTuple::New) }
unsafe { PgHeapTuple::from_trigger_data(self.trigger_data, TriggerTuple::New) }
}

/// Returns the old database row for UPDATE/DELETE operations in row-level triggers.
Expand All @@ -95,7 +95,7 @@ impl<'a> PgTrigger<'a> {
// containing a known good `TriggerData` which also contains a known good `Trigger`... and the user agreed to
// our `unsafe` constructor safety rules, we choose to trust this is indeed a valid pointer offered to us by
// PostgreSQL, and that it trusts it.
unsafe { PgHeapTuple::from_trigger_data(&*self.trigger_data, TriggerTuple::Old) }
unsafe { PgHeapTuple::from_trigger_data(self.trigger_data, TriggerTuple::Old) }
}

/// Variable that contains the name of the trigger actually fired
Expand Down Expand Up @@ -206,7 +206,7 @@ impl<'a> PgTrigger<'a> {
let slice: &[*mut c_char] =
unsafe { core::slice::from_raw_parts(tgargs, tgnargs.try_into()?) };
let args = slice
.into_iter()
.iter()
.map(|v| {
// Safety: Given that we have a known good `FunctionCallInfo`, which PostgreSQL has checked is indeed a trigger,
// containing a known good `TriggerData` which also contains a known good `Trigger`... and the user agreed to
Expand Down

0 comments on commit 1535a0a

Please sign in to comment.