Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Some PEP8 changes #46

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 13 additions & 22 deletions bct/algorithms/centrality.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,13 +173,14 @@ def entropy(w_):
pnm[np.logical_not(pnm)] = 1
return -np.sum(pnm * np.log(pnm), axis=1) / np.log(m)

#explicitly ignore compiler warning for division by zero
# explicitly ignore compiler warning for division by zero
with np.errstate(invalid='ignore'):
Hpos = entropy(W * (W > 0))
Hneg = entropy(-W * (W < 0))

return Hpos, Hneg


def edge_betweenness_bin(G):
'''
Edge betweenness centrality is the fraction of all shortest paths in
Expand Down Expand Up @@ -346,7 +347,6 @@ def eigenvector_centrality_und(CIJ):
'''
from scipy import linalg

n = len(CIJ)
vals, vecs = linalg.eig(CIJ)
i = np.argmax(vals)
return np.abs(vecs[:, i])
Expand Down Expand Up @@ -486,16 +486,11 @@ def gateway_coef_sign(W, ci, centrality_type='degree'):
np.fill_diagonal(W, 0)

def gcoef(W):
#strength
s = np.sum(W, axis=1)
#neighbor community affiliation
Gc = np.inner((W != 0), np.diag(ci))
#community specific neighbors
Sc2 = np.zeros((n,))
#extra modular weighting
ksm = np.zeros((n,))
#intra modular wieghting
centm = np.zeros((n,))
s = np.sum(W, axis=1) # strength
Gc = np.inner((W != 0), np.diag(ci)) # neighbor community affiliation
Sc2 = np.zeros((n,)) # community specific neighbors
ksm = np.zeros((n,)) # extra modular weighting
centm = np.zeros((n,)) # intra modular wieghting

if centrality_type == 'degree':
cent = s.copy()
Expand All @@ -508,19 +503,14 @@ def gcoef(W):
print(np.sum(ks))
Sc2 += ks ** 2
for j in range(1, nr_modules+1):
#calculate extramodular weights
# calculate extramodular weights
ksm[ci == j] += ks[ci == j] / np.sum(ks[ci == j])

#calculate intramodular weights
# calculate intramodular weights
centm[ci == i] = np.sum(cent[ci == i])

#print(Gc)
#print(centm)
#print(ksm)
#print(ks)

centm = centm / max(centm)
#calculate total weights
# calculate total weights
gs = (1 - ksm * centm) ** 2

Gw = 1 - Sc2 * gs / s ** 2
Expand All @@ -532,7 +522,7 @@ def gcoef(W):
G_pos = gcoef(W * (W > 0))
G_neg = gcoef(-W * (W < 0))
return G_pos, G_neg


def kcoreness_centrality_bd(CIJ):
'''
Expand Down Expand Up @@ -780,13 +770,14 @@ def pcoef(W_):
P[np.where(np.logical_not(P))] = 0 # p_ind=0 if no (out)neighbors
return P

#explicitly ignore compiler warning for division by zero
# explicitly ignore compiler warning for division by zero
with np.errstate(invalid='ignore'):
Ppos = pcoef(W * (W > 0))
Pneg = pcoef(-W * (W < 0))

return Ppos, Pneg


def subgraph_centrality(CIJ):
'''
The subgraph centrality of a node is a weighted sum of closed walks of
Expand Down
33 changes: 16 additions & 17 deletions bct/algorithms/clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def clustering_coef_wu_sign(W, coef_type='default'):
'''
Returns the weighted clustering coefficient generalized or separated
for positive and negative weights.

Three Algorithms are supported; herefore referred to as default, zhang,
and constantini.

Expand Down Expand Up @@ -319,6 +319,7 @@ def clustering_coef_wu_sign(W, coef_type='default'):
C = cyc3 / cyc2
return C


def consensus_und(D, tau, reps=1000):
'''
This algorithm seeks a consensus partition of the
Expand Down Expand Up @@ -385,11 +386,7 @@ def unique_partitions(cis):
dup = np.where(np.sum(np.abs(cis.T - cis[:, 0]), axis=1) == 0)
cis = np.delete(cis, dup, axis=1)
c = np.delete(c, dup)
# count+=1
# print count,c,dup
# if count>10:
# class QualitativeError(): pass
# raise QualitativeError()

return np.transpose(ciu)

n = len(D)
Expand Down Expand Up @@ -448,14 +445,15 @@ def get_components(A, no_depend=False):
'''

if not np.all(A == A.T): # ensure matrix is undirected
raise BCTParamError('get_components can only be computed for undirected'
' matrices. If your matrix is noisy, correct it with np.around')

raise BCTParamError('get_components can only be computed for '
'undirected matrices. If your matrix is '
'noisy, correct it with np.around')

A = binarize(A, copy=True)
n = len(A)
np.fill_diagonal(A, 1)

edge_map = [{u,v} for u in range(n) for v in range(n) if A[u,v] == 1]
edge_map = [{u, v} for u in range(n) for v in range(n) if A[u, v] == 1]
union_sets = []
for item in edge_map:
temp = []
Expand All @@ -468,8 +466,8 @@ def get_components(A, no_depend=False):
temp.append(item)
union_sets = temp

comps = np.array([i+1 for v in range(n) for i in
range(len(union_sets)) if v in union_sets[i]])
comps = np.array([i+1 for v in range(n) for i in
range(len(union_sets)) if v in union_sets[i]])
comp_sizes = np.array([len(s) for s in union_sets])

return comps, comp_sizes
Expand Down Expand Up @@ -517,8 +515,9 @@ def get_components_old(A, no_depend=False):
# nonsquare matrices cannot be symmetric; no need to check

if not np.all(A == A.T): # ensure matrix is undirected
raise BCTParamError('get_components can only be computed for undirected'
' matrices. If your matrix is noisy, correct it with np.around')
raise BCTParamError('get_components can only be computed for '
'undirected matrices. If your matrix is '
'noisy, correct it with np.around')

A = binarize(A, copy=True)
n = len(A)
Expand Down Expand Up @@ -589,9 +588,9 @@ def transitivity_bd(A):
= 2 * (K(K-1)/2 - diag(A^2))
= K(K-1) - 2(diag(A^2))
'''
S = A + A.T # symmetrized input graph
K = np.sum(S, axis=1) # total degree (in+out)
cyc3 = np.diag(np.dot(S, np.dot(S, S))) / 2 # number of 3-cycles
S = A + A.T # symmetrized input graph
K = np.sum(S, axis=1) # total degree (in+out)
cyc3 = np.diag(np.dot(S, np.dot(S, S))) / 2 # number of 3-cycles
CYC3 = K * (K - 1) - 2 * np.diag(np.dot(A, A)) # number of all possible 3-cycles
return np.sum(cyc3) / np.sum(CYC3)

Expand Down
50 changes: 25 additions & 25 deletions bct/algorithms/core.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import division, print_function
import numpy as np
from bct.utils import BCTParamError
from .degree import degrees_dir, degrees_und, strengths_dir, strengths_und
from .degree import strengths_und_sign

Expand Down Expand Up @@ -132,8 +133,8 @@ def assortativity_wei(CIJ, flag=0):


def core_periphery_dir(W, gamma=1, C0=None):
'''
The optimal core/periphery subdivision is a partition of the network
'''
The optimal core/periphery subdivision is a partition of the network
into two nonoverlapping groups of nodes, a core group and a periphery
group. The number of core-group edges is maximized, and the number of
within periphery edges is minimized.
Expand Down Expand Up @@ -161,13 +162,13 @@ def core_periphery_dir(W, gamma=1, C0=None):
n = len(W)
np.fill_diagonal(W, 0)

if C0 == None:
if C0 is None:
C = np.random.randint(2, size=(n,))
else:
C = C0.copy()

#methodological note, the core-detection null model is not corrected
#for degree cf community detection (to enable detection of hubs)
# methodological note, the core-detection null model is not corrected
# for degree cf community detection (to enable detection of hubs)

s = np.sum(W)
p = np.mean(W)
Expand All @@ -178,25 +179,25 @@ def core_periphery_dir(W, gamma=1, C0=None):
q = np.sum(B[np.ix_(cix, cix)]) - np.sum(B[np.ix_(ncix, ncix)])

print(q)
#sqish
# sqish

flag = True
it = 0
while flag:
it += 1
it += 1
if it > 100:
raise BCTParamError('Infinite Loop aborted')

flag = False
#initial node indices
ixes = np.arange(n)
# initial node indices
ixes = np.arange(n)

Ct = C.copy()
while len(ixes) > 0:
Qt = np.zeros((n,))
ctix, = np.where(Ct)
nctix, = np.where(np.logical_not(Ct))
q0 = (np.sum(B[np.ix_(ctix, ctix)]) -
q0 = (np.sum(B[np.ix_(ctix, ctix)]) -
np.sum(B[np.ix_(nctix, nctix)]))
Qt[ctix] = q0 - 2 * np.sum(B[ctix, :], axis=1)
Qt[nctix] = q0 + 2 * np.sum(B[nctix, :], axis=1)
Expand All @@ -206,15 +207,15 @@ def core_periphery_dir(W, gamma=1, C0=None):
print(np.where(np.abs(Qt[ixes]-max_Qt) < 1e-10))
print(Qt[ixes])
print(max_Qt)
#tunourn
# tunourn
u = u[np.random.randint(len(u))]
print(np.sum(Ct))
Ct[ixes[u]] = np.logical_not(Ct[ixes[u]])
print(np.sum(Ct))
#casga
# casga

ixes = np.delete(ixes, u)

print(max_Qt - q)
print(len(ixes))

Expand All @@ -223,7 +224,7 @@ def core_periphery_dir(W, gamma=1, C0=None):
C = Ct.copy()
cix, = np.where(C)
ncix, = np.where(np.logical_not(C))
q = (np.sum(B[np.ix_(cix, cix)]) -
q = (np.sum(B[np.ix_(cix, cix)]) -
np.sum(B[np.ix_(ncix, ncix)]))

cix, = np.where(C)
Expand Down Expand Up @@ -378,7 +379,7 @@ def local_assortativity_wu_sign(W):
----------
W : NxN np.ndarray
undirected connection matrix with positive and negative weights

Returns
-------
loc_assort_pos : Nx1 np.ndarray
Expand All @@ -399,19 +400,20 @@ def local_assortativity_wu_sign(W):

for curr_node in range(n):
jp = np.where(W[curr_node, :] > 0)
loc_assort_pos[curr_node] = np.sum(np.abs(str_pos[jp] -
str_pos[curr_node])) / str_pos[curr_node]
loc_assort_pos[curr_node] = np.sum(np.abs(
str_pos[jp] - str_pos[curr_node])) / str_pos[curr_node]
jn = np.where(W[curr_node, :] < 0)
loc_assort_neg[curr_node] = np.sum(np.abs(str_neg[jn] -
str_neg[curr_node])) / str_neg[curr_node]
loc_assort_neg[curr_node] = np.sum(np.abs(
str_neg[jn] - str_neg[curr_node])) / str_neg[curr_node]

loc_assort_pos = ((r_pos + 1) / n -
loc_assort_pos / np.sum(loc_assort_pos))
loc_assort_pos = ((r_pos + 1) / n -
loc_assort_pos / np.sum(loc_assort_pos))
loc_assort_neg = ((r_neg + 1) / n -
loc_assort_neg / np.sum(loc_assort_neg))
loc_assort_neg / np.sum(loc_assort_neg))

return loc_assort_pos, loc_assort_neg


def rich_club_bd(CIJ, klevel=None):
'''
The rich club coefficient, R, at level k is the fraction of edges that
Expand Down Expand Up @@ -484,7 +486,7 @@ def rich_club_bu(CIJ, klevel=None):
'''
deg = degrees_und(CIJ) # compute degree of each node

if klevel == None:
if klevel is None:
klevel = int(np.max(deg))

R = np.zeros((klevel,))
Expand Down Expand Up @@ -518,7 +520,6 @@ def rich_club_wd(CIJ, klevel=None):
Rw : Kx1 np.ndarray
vector of rich-club coefficients for levels 1 to klevel
'''
nr_nodes = len(CIJ)
# degree of each node is defined here as in+out
deg = np.sum((CIJ != 0), axis=0) + np.sum((CIJ.T != 0), axis=0)

Expand Down Expand Up @@ -565,7 +566,6 @@ def rich_club_wu(CIJ, klevel=None):
Rw : Kx1 np.ndarray
vector of rich-club coefficients for levels 1 to klevel
'''
nr_nodes = len(CIJ)
deg = np.sum((CIJ != 0), axis=0)

if klevel is None:
Expand Down
9 changes: 4 additions & 5 deletions bct/algorithms/degree.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,11 +175,10 @@ def strengths_und_sign(W):
total negative weight
'''
W = W.copy()
n = len(W)
np.fill_diagonal(W, 0) # clear diagonal
np.fill_diagonal(W, 0) # clear diagonal
Spos = np.sum(W * (W > 0), axis=0) # positive strengths
Sneg = np.sum(W * (W < 0), axis=0) # negative strengths
Sneg = np.sum(W * (W < 0), axis=0) # negative strengths

vpos = np.sum(W[W > 0]) # positive weight
vneg = np.sum(W[W < 0]) # negative weight
vpos = np.sum(W[W > 0]) # positive weight
vneg = np.sum(W[W < 0]) # negative weight
return Spos, Sneg, vpos, vneg
Loading