Created
March 7, 2021 09:16
-
-
Save NoFishLikeIan/2fa5fed89e62e748ffe6842962d332ff to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Author: Gabriela Miyazato Szini and Andrea Titton - March 2021 | |
import numpy as np | |
from itertools import permutations, combinations | |
np.random.seed(110792) | |
verbose = True | |
# Initialization | |
N = 5 # number of nodes in the network | |
n_dyads = N*(N-1) # number of directed dyads | |
u_ij = np.random.normal(0, 1, (N, N)) # drawing the error terms | |
np.fill_diagonal(u_ij, 0) | |
# drawing the FE likewise Jochmans (2016) | |
a_i = np.random.beta(2, 2, size=(N, 1)) - 1/2 | |
x_ij = - np.abs(a_i - a_i.T) | |
# Utils | |
def perm_np(to: int, size: int) -> np.array: | |
""" | |
Generates a matrix of "to x size" of permutations up "to" | |
""" | |
it = permutations(range(N), size) | |
return np.array(list(it)) | |
def var_tetrad(A: np.array, t: np.array) -> np.array: | |
""" | |
Variance by tetrad of A, using tetrads t | |
""" | |
first = A[t[:, 0], t[:, 1]] - A[t[:, 0], t[:, 2]] | |
second = A[t[:, 3], t[:, 1]] - A[t[:, 3], t[:, 2]] | |
return first - second | |
def double_diff(mat, i, j, k, l): | |
return (mat[i, j] - mat[i, k]) - (mat[l, j] - mat[l, k]) | |
def s(U, X, tetrad): | |
""" | |
Computes s given a tetrad | |
""" | |
return double_diff(X, *tetrad) * double_diff(U, *tetrad) | |
# List of dyads and tetrads | |
dyads, tetrads = perm_np(N, 2), perm_np(N, 4) | |
verbose and print("Setting up the U-statistic...") | |
X = var_tetrad(x_ij, tetrads) | |
U = var_tetrad(u_ij, tetrads) | |
u_stat = np.mean(X * U) | |
#-- Calculating the variance of the U-statistic --# | |
# To calculate the variance we need to obtain \Delta_2, here we follow a consistent estimator proposed in Graham (2018) | |
# to get the delta_2 value we need the average of \bar{s}_ij \bar{s}_ij' | |
verbose and print("Computing Graham estimator...") | |
s_ij_bar = np.zeros(n_dyads) | |
for l, dyad in enumerate(permutations(range(N), 2)): | |
verbose and print(f"Dyad {l+1} / {n_dyads}", end="\r") | |
i, j = dyad | |
# for a given dyad, we need to average the scores of the combinations that contain i and j, so here we take the combinations | |
comb = filter( | |
lambda c: (i in c) and (j in c), | |
combinations(range(N), 4) | |
) | |
s_ij = np.zeros(n_dyads) # vector to store those scores | |
# for a given combination, the score consists of an average of its permutations | |
for m, c in enumerate(comb): | |
s_ij[m] = np.mean([ | |
s(x_ij, u_ij, perm) for perm in permutations(c) | |
]) | |
s_ij_bar[l] = np.mean(s_ij) | |
# Delta_2 is simply the average of \bar{s}_ij \bar{s}_ij' over all dyads | |
delta_2 = np.mean(s_ij_bar * s_ij_bar) | |
print("\n...done!") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment