From 9311595feed5136e9d94ed8097f907fb82a4894d Mon Sep 17 00:00:00 2001 From: Merel Kuijs Date: Mon, 15 Jan 2024 22:28:34 +0100 Subject: [PATCH] Resolves deprecation error --- ncem/data.py | 2 +- ncem/estimators/base_estimator.py | 6 +++--- ncem/interpretation/interpreter.py | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/ncem/data.py b/ncem/data.py index c078d27f..a0b02555 100644 --- a/ncem/data.py +++ b/ncem/data.py @@ -247,7 +247,7 @@ def plot_degree_vs_dist( mean_d = [np.mean(degree) for degree in degrees] print(np.mean(mean_d)) mean_degree += mean_d - distances += [np.int(dist * lateral_resolution)] * len(mean_d) + distances += [int(dist * lateral_resolution)] * len(mean_d) sns_data = pd.DataFrame( { diff --git a/ncem/estimators/base_estimator.py b/ncem/estimators/base_estimator.py index ad803e98..881f9f78 100644 --- a/ncem/estimators/base_estimator.py +++ b/ncem/estimators/base_estimator.py @@ -307,7 +307,7 @@ def get_data( ) if robustness: np.random.seed(robustness_seed) - n_images = np.int(len(self.data.img_celldata) * robustness) + n_images = int(len(self.data.img_celldata) * robustness) print(n_images) image_keys = list( np.random.choice( @@ -332,10 +332,10 @@ def get_data( if segmentation_robustness: node_fraction = segmentation_robustness[0] overflow_fraction = segmentation_robustness[1] - total_size = np.int(self.data.celldata.shape[0] * node_fraction) + total_size = int(self.data.celldata.shape[0] * node_fraction) for key, ad in self.data.img_celldata.items(): - size = np.int(ad.shape[0] * node_fraction) + size = int(ad.shape[0] * node_fraction) random_indices = np.random.choice(ad.shape[0], size=size, replace=False) a = ad.obsp["adjacency_matrix_connectivities"].toarray() err_ad = ad.copy() diff --git a/ncem/interpretation/interpreter.py b/ncem/interpretation/interpreter.py index e1f73b1e..4ef3ad7c 100644 --- a/ncem/interpretation/interpreter.py +++ b/ncem/interpretation/interpreter.py @@ -499,7 +499,7 @@ def _get_np_data( for k, v in nodes_idx.items(): count = count + len(v) - with tqdm(total=np.int(count / self.n_eval_nodes_per_graph)) as pbar: + with tqdm(total=int(count / self.n_eval_nodes_per_graph)) as pbar: for _step, (x_batch, y_batch) in enumerate(ds): target_batch, interaction_batch, sf_batch, node_covar_batch, g_batch = x_batch target.append(target_batch.numpy().squeeze()) @@ -919,7 +919,7 @@ def get_sender_receiver_effects(self, params_type: str = "ols", significance_thr print("calculating inv fim.") fim_inv = get_fim_inv(x_design, y) - interaction_shape = np.int(self.n_features_0**2) + interaction_shape = int(self.n_features_0**2) params = params[:, self.n_features_0 : interaction_shape + self.n_features_0] is_sign, pvalues, qvalues = wald_test( params=params, fisher_inv=fim_inv, significance_threshold=significance_threshold @@ -1902,7 +1902,7 @@ def _get_np_data( def get_sender_receiver_effects(self, params_type: str = "ols", significance_threshold: float = 0.05): data = {"target": self.data.celldata.obsm["node_types"], "proportions": self.data.celldata.obsm["proportions"]} target = np.asarray(dmatrix("target-1", data)) - interaction_shape = np.int(self.n_features_0**2) + interaction_shape = int(self.n_features_0**2) interactions = np.asarray(dmatrix("target:proportions-1", data)) y = self.data.celldata.X @@ -1922,7 +1922,7 @@ def get_sender_receiver_effects(self, params_type: str = "ols", significance_thr is_sign, pvalues, qvalues = wald_test( params=params, fisher_inv=fim_inv, significance_threshold=significance_threshold ) - interaction_shape = np.int(self.n_features_0**2) + interaction_shape = int(self.n_features_0**2) # subset to interaction terms is_sign = is_sign[self.n_features_0 : interaction_shape + self.n_features_0, :] pvalues = pvalues[self.n_features_0 : interaction_shape + self.n_features_0, :]