-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathsimulation-consolidation-correlated34.py
145 lines (126 loc) · 5.72 KB
/
simulation-consolidation-correlated34.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
#!/usr/bin/env python3
'''
Created on Apr 5, 2018
Converting code to Python 3.4 so it can run on the Harvard server.
This is code for consolidation.
@author: Michael Muthukrishna
'''
import argparse
from human_social_network_generator34 import human_social_network_iterations_correlated
from numpy import random
import csv
import copy as C
from MyNetworkFunctions import *
debug_mode = False
output_json_graphs = False
#Create the folder for storing the output files (if it doesn't exist)
data_folder = "./data_consol_corr/"
#pathlib.Path(data_folder).mkdir(exist_ok=True)
beta_params = [[4,4],[2.5,3.5],[3.5,2.5]]
ext_conf_corr = -0.3
parser = argparse.ArgumentParser(description="Run DSIT simulation over Muthukrishna-Schaller network")
parser.add_argument('-e', '--extraversion', help='-1=negative skew, 0=approximate normal, 1=positive skew', required=(not debug_mode), default=0)
parser.add_argument('-c', '--conformity', help='-1=negative skew, 0=approximate normal, 1=positive skew', required=(not debug_mode), default=0)
parser.add_argument('-i', '--iterations', help='int - number of iterations', required=(not debug_mode), default=10)
parser.add_argument('-n', '--sim_num', help='int - number of simulation', required=(not debug_mode), default=-1)
#############################################################################
#### Helper functions #######################################################
#############################################################################
def shouldIChange(graph, nodeNum):
conformity = graph.node[nodeNum]['conformity']
myValue = graph.node[nodeNum]['value']
sameTally = 0
diffTally = 0
neighbors = graph.neighbors(nodeNum)
for n in neighbors:
if graph.node[n]['value'] == myValue:
sameTally = sameTally + 1
else:
diffTally = diffTally + 1
prob_of_conforming = conformity * diffTally/(sameTally + diffTally)
if (random.random() < prob_of_conforming):
return True
return False
def simulate(graph,fileName, iterations=1):
random.seed()
graphSummaryDataFileName = fileName + '.csv'
f = open(graphSummaryDataFileName, 'w')
fields = ['iteration', 'gen', 'influenceMoveCount', '0:1 Distribution']
csvwr = csv.DictWriter(f, fieldnames=fields, delimiter=',')
csvwr.writeheader()
for i in range(0,iterations):
if debug_mode:
print("Iteration:" + str(i))
g = C.deepcopy(graph)
#Randomize values on nodes
for node in g.nodes():
temp = random.randint(2)
g.add_node(node, value=temp)
data = {}
data['iteration'] = i
data['gen'] = 0
data['influenceMoveCount'] = 0
# =======================================================================
#data['meanSimilar'] = meanSimilarityCoefficient(g)
#muthClump = muthukrishnaClumpiness(g)
#data['meanClumpSize'] = N.mean(muthClump)
#data['numClumps'] = len(muthClump)
# valComm = valueCommunities(g)
# data['meanCommunitySize'] = N.mean(map(len, valComm))
# data['numCommunities'] = len(valComm)
# data['influenceMoveCount'] = 0
#=======================================================================
data['0:1 Distribution'] = zeroToOne(g)
csvwr.writerow(data)
#Save graph
save_to_jsonfile(fileName + '_iter_' + str(i)+ '_gen_' + str(0) + '.json', g)
#Select random node and apply social influence rules until nNodes generations of no change
nStayedSame = 0
count = 0
numNodes = len(g.nodes())
while (nStayedSame < 2*numNodes):
if debug_mode:
print("Count:" + str(count))
count = count + 1
randNode = random.choice(g.nodes())
#calculate if value should change and change if necessary
if (shouldIChange(g, randNode)):
newValue = (g.node[randNode]['value'] + 1) % 2
g.add_node(randNode, value=newValue)
nStayedSame = 0
else:
nStayedSame = nStayedSame + 1
# If you want to write every generation, indent this under the while loop.
# Here I'm just outputting at the beginning and end to save space
data = {}
data['iteration'] = i
data['gen'] = count
data['influenceMoveCount'] = count
# ===================================================================
#data['meanSimilar'] = meanSimilarityCoefficient(g)
#muthClump = muthukrishnaClumpiness(g)
#data['meanClumpSize'] = N.mean(muthClump)
#data['numClumps'] = len(muthClump)
# valComm = valueCommunities(g)
# data['meanCommunitySize'] = N.mean(map(len, valComm))
# data['numCommunities'] = len(valComm)
# data['influenceMoveCount'] = count
#===================================================================
data['0:1 Distribution'] = zeroToOne(g)
csvwr.writerow(data)
#Save graph
if output_json_graphs and count % numNodes == 0:
save_to_jsonfile(fileName + '_iter_' + str(i) + '_gen_' + str(count) + '.json', g)
f.close()
if __name__ == '__main__':
args = parser.parse_args()
Gs = []
if debug_mode:
print("Create network")
params = beta_params[int(args.extraversion)] + beta_params[int(args.conformity)] + [ext_conf_corr, 900]
G = human_social_network_iterations_correlated((30, 30), 50, False, *params)
if debug_mode:
print("Save iterations of the graph")
if debug_mode:
print("Run DSIT")
simulate(G, data_folder + 'graph_ext_' + args.extraversion + '_conf_' + args.conformity + '_simnum_' + args.sim_num, int(args.iterations))