Skip to content

Commit 70763f1

Browse files
committed
exercise 3
1 parent fad1511 commit 70763f1

File tree

6 files changed

+1082
-0
lines changed

6 files changed

+1082
-0
lines changed
57.9 KB
Binary file not shown.
34.9 KB
Binary file not shown.

1_monte_carlo/generate_data.py

Lines changed: 212 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,212 @@
1+
2+
import numpy as np
3+
import scipy
4+
from scipy.sparse.csgraph import connected_components
5+
from scipy.sparse import csr_matrix
6+
import time
7+
from numba import jit
8+
9+
import pickle # for input/output
10+
11+
12+
def init_system(Lx, Ly):
13+
"""Determine the bond array and an initial state of spins"""
14+
N = Lx * Ly
15+
16+
def xy_to_n(x, y):
17+
return x*Ly + y
18+
19+
def n_to_xy(n):
20+
return n // Ly, np.mod(n, Ly)
21+
22+
# easy way:
23+
bonds = []
24+
for x in range(Lx):
25+
for y in range(Ly):
26+
n = xy_to_n(x, y)
27+
m1 = xy_to_n((x+1)% Lx, y)
28+
m2 = xy_to_n(x, (y+1) % Ly)
29+
bonds.append([n, m1])
30+
bonds.append([n, m2])
31+
bonds = np.array(bonds)
32+
spins = np.random.randint(0, 2, size=(N,))*2 - 1
33+
return spins, bonds, N
34+
35+
36+
# In[3]:
37+
38+
39+
# part c)
40+
@jit(nopython=True)
41+
def get_weights(spins, bonds, T):
42+
weights = np.zeros(len(bonds))
43+
p = np.exp(-2./T) # set J = 1
44+
for b in range(len(bonds)):
45+
n = bonds[b, 0]
46+
m = bonds[b, 1]
47+
#if spins[n] != spins[m]:
48+
# weights[b] = 0.
49+
#else:
50+
# if np.random.rand() < p:
51+
# weights[b] = 0.
52+
# else:
53+
# weights[b] = 1.
54+
if spins[n] == spins[m] and np.random.rand() > p:
55+
weights[b] = 1.
56+
return weights
57+
58+
59+
# In[4]:
60+
61+
62+
# part d)
63+
@jit(nopython=True)
64+
def flip_spins(spins, N_components, labels):
65+
flip_cluster = np.random.random(N_components) < 0.5 # N_components True/False values with 50/50 chance
66+
for n in range(len(spins)):
67+
cluster = labels[n]
68+
if flip_cluster[cluster]:
69+
spins[n] = - spins[n]
70+
# done
71+
72+
73+
# In[5]:
74+
75+
76+
def swendsen_wang_update(spins, bonds, T):
77+
"""Perform one update of the Swendsen-Wang algorithm"""
78+
N = len(spins)
79+
weights = get_weights(spins, bonds, T)
80+
graph = csr_matrix((weights, (bonds[:, 0], bonds[:, 1])), shape=(N, N))
81+
graph += csr_matrix((weights, (bonds[:, 1], bonds[:, 0])), shape=(N, N))
82+
N_components, labels = connected_components(graph, directed=False)
83+
flip_spins(spins, N_components, labels)
84+
85+
86+
# In[6]:
87+
88+
89+
@jit(nopython=True)
90+
def energy(spins, bonds):
91+
Nbonds = len(bonds)
92+
energy = 0.
93+
for b in range(Nbonds):
94+
energy -= spins[bonds[b, 0]]* spins[bonds[b, 1]]
95+
return energy
96+
97+
def energy2(spins, bonds):
98+
"""alternative implementation, gives the same results, but does not require jit to be fast"""
99+
return -1. * np.sum(spins[bonds[:, 0]]* spins[bonds[:, 1]])
100+
101+
def magnetization(spins):
102+
return np.sum(spins)
103+
104+
105+
106+
# In[7]:
107+
108+
#########
109+
# starting from here, I changed stuff compared to the notebook sol2_swendsen_wang.ipynb
110+
111+
112+
def simulation(spins, bonds, T, N_measure=100):
113+
"""Perform a Monte-carlo simulation at given temperature"""
114+
# no thermalization here
115+
Es = []
116+
Ms = []
117+
for n in range(N_measure):
118+
swendsen_wang_update(spins, bonds, T)
119+
Es.append(energy(spins, bonds))
120+
Ms.append(magnetization(spins))
121+
return np.array(Es), np.array(Ms)
122+
123+
124+
# The full simulation at different temperatures
125+
def gen_data_L(Ts, L, N_measure=10000, N_bins=10):
126+
print("generate data for L = {L: 3d}".format(L=L), flush=True)
127+
assert(N_measure//N_bins >= 10)
128+
spins, bonds, N = init_system(L, L)
129+
spins = np.random.randint(0, 2, size=(N,))*2 - 1
130+
obs = ['E', 'C', 'M', 'absM', 'chi', 'UB']
131+
data = dict((key, []) for key in obs)
132+
t0 = time.time()
133+
for T in Ts:
134+
if N_measure > 1000:
135+
print("simulating L={L: 3d}, T={T:.3f}".format(L=L, T=T), flush=True)
136+
# thermalize. Rule of thumb: spent ~10-20% of the simulation time without measurement
137+
simulation(spins, bonds, T, N_measure//10)
138+
# Simlulate with measurements
139+
bins = dict((key, []) for key in obs)
140+
for b in range(N_bins):
141+
E, M = simulation(spins, bonds, T, N_measure//N_bins)
142+
bins['E'].append(np.mean(E)/N)
143+
bins['C'].append(np.var(E)/(T**2*N))
144+
bins['M'].append(np.mean(M)/N)
145+
bins['absM'].append(np.mean(np.abs(M))/N)
146+
bins['chi'].append(np.var(np.abs(M))/(T*N))
147+
bins['UB'].append(1.5*(1.-np.mean(M**4)/(3.*np.mean(M**2)**2)))
148+
for key in obs:
149+
bin = bins[key]
150+
data[key].append((np.mean(bin), np.std(bin)/np.sqrt(N_bins)))
151+
print("generating data for L ={L: 3d} took {t: 6.1f}s".format(L=L, t=time.time()-t0))
152+
# convert into arrays
153+
for key in obs:
154+
data[key] = np.array(data[key])
155+
# good practice: save meta-data along with the original data
156+
data['L'] = L
157+
data['observables'] = obs
158+
data['Ts'] = Ts
159+
data['N_measure'] = N_measure
160+
data['N_bins'] = N_bins
161+
return data
162+
163+
164+
def save_data(filename, data):
165+
"""Save an (almost) arbitrary python object to disc."""
166+
with open(filename, 'wb') as f:
167+
pickle.dump(data, f)
168+
# done
169+
170+
171+
def load_data(filename):
172+
"""Load and return data saved to disc with the function `save_data`."""
173+
with open(filename, 'rb') as f:
174+
data = pickle.load(f)
175+
return data
176+
177+
178+
if __name__ == "__main__":
179+
Tc_guess = None
180+
# Tc_guess = 2.27 # good guess for the 2D Ising model; uncomment this to get
181+
# # many T-points around this value for large L (-> long runtime!)
182+
if Tc_guess is None:
183+
N_measure = 1000 # just a quick guess
184+
Ls = [8, 16, 32]
185+
output_filename = 'data_ising_square.pkl'
186+
else:
187+
N_measure = 10000
188+
Ls = [8, 16, 32, 64, 128, 256]
189+
output_filename = 'data_ising_square_largeL.pkl'
190+
data = dict()
191+
for L in Ls:
192+
if Tc_guess is None:
193+
# no guess for Tc available -> scan a wide range to get a first guess
194+
Ts = np.linspace(1., 4., 50)
195+
else:
196+
# choose T-values L-dependent: more points around Tc
197+
Ts = np.linspace(Tc_guess - 0.5, Tc_guess + 0.5, 21)
198+
Ts = np.append(Ts, np.linspace(Tc_guess - 8./L, Tc_guess + 8./L, 50))
199+
Ts = np.sort(Ts)[::-1]
200+
data[L] = gen_data_L(Ts, L, N_measure)
201+
data['Ls'] = Ls
202+
save_data(output_filename, data)
203+
# data structure:
204+
# data = {'Ls': [8, 16, ...],
205+
# 8: {'observables': ['E', 'M', 'C', ...],
206+
# 'Ts': (np.array of temperature values),
207+
# 'E': (np.array of mean & error, shape (len(Ts), 2)),
208+
# 'C': (np.array of mean & error, shape (len(Ts), 2)),
209+
# ... (further observables & metadata)
210+
# }
211+
# ... (further L values with same structure)
212+
# }

1_monte_carlo/hw03.pdf

139 KB
Binary file not shown.

1_monte_carlo/sol3_finite_size_scaling.ipynb

Lines changed: 594 additions & 0 deletions
Large diffs are not rendered by default.

1_monte_carlo/sol3_finite_size_scaling_triangular.ipynb

Lines changed: 276 additions & 0 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)