#!/usr/bin/python '''1. Install python and numpy 2. Create input coma separated file with columns are in the order Name,ra,dec,distance. 3. It is expected that you have cluster.dat, 2mass_group.dat, sdss_group.dat, Karachentsev.dat catalogs in the same directory. 4. Run this script as python ScreeningThreadedAscii.py 5. This will ask for input catalog, output file name (default is ScreenedOut.cat) and number of threads (default is 5) 6. The output catalog has columns Name, ra, dec, distance to the object, distance to the closest object, external potential with Compton scale 10 Mpc (phi10), 8 Mpc ... 1 Mpc ''' import numpy as np import threading import os import gc import csv if __name__ == '__main__': def ScreeningFunc(incata, outcata): G = 4.3e-3 ii = 1 DisOn = 0 #Check whether the galaxy should be added to the potential f = csv.writer(open(outcata, 'w'), delimiter=',') f.writerow(['Name,ra,dec,d,mind,phi10,phi8,phi5,phi3,phi2,phi1']) for l in open(incata):# row = l.split(',') ra1, dec1, R1 = float(row[1]), float(row[2]), float(row[3]) #persic NN = 0 d = [] pot1 = 0.0 pot2 = 0.0 pot3 = 0.0 pot4 = 0.0 pot5 = 0.0 pot6 = 0.0 print ii#, ra1, dec1, R1 jj = 1 con = (AllRa - ra1 < 5.0) & (AllDec - dec1 < 5.0) & \ (AllDis - R1 < 10) SubRa = AllRa[con] SubDec = AllDec[con] SubDis = AllDis[con] SubMass = AllMass[con] ra1 = ra1 * np.pi / 180.0 dec1 = dec1 * np.pi / 180.0 for k in range(SubRa.shape[0]): ra2, dec2, R2, M = SubRa[k], SubDec[k], SubDis[k], SubMass[k] #print ra2, dec2, R2, M, row1[4] ra2 = ra2 * np.pi / 180.0 dec2 = dec2 * np.pi / 180.0 ang = np.arccos(np.cos(dec1) * np.cos(dec2) * np.cos(ra1 - ra2) + np.sin(dec1) * np.sin(dec2)) if R1 < 10: dis = np.sqrt(R1**2.0 + R2**2.0 - 2.0 * R1 * R2 * np.cos(ang)) DisOn = 1 else: if abs(R1 - R2) < 10.: dis = 1.41 * R1 * ang DisOn = 1 if DisOn: DisOn = 0 d.append(dis) VirialR = (3. * M / (4. * np.pi * 200 * 6.75e-8))**(1/3.) #In pc. Assuming the virial radius is within which the average density is 200 times the critical density of the universe VirialR = VirialR / 1.0e6 # if dis > 10: # pass if dis < 10 + VirialR and dis*1e6 > 100: #The second criteria makes sure that we are not the using the object itself to find the potential pot1 += (G * M / (300000**2.0 * dis*1e6)) if dis < 8 + VirialR and dis*1e6 > 100: pot2 += (G * M / (300000**2.0 * dis*1e6)) if dis < 5 + VirialR and dis*1e6 > 100: pot3 += (G * M / (300000**2.0 * dis*1e6)) if dis < 3 + VirialR and dis*1e6 > 100: pot4 += (G * M / (300000**2.0 * dis*1e6)) if dis < 2 + VirialR and dis*1e6 > 100: pot5 += (G * M / (300000**2.0 * dis*1e6)) if dis < 1 + VirialR and dis*1e6 > 100: pot6 += (G * M / (300000**2.0 * dis*1e6)) # print jj, ra1, dec1, R1, ra2, dec2, R2, ang, M, dis, G * M / (300000**2.0 * dis*1e6) jj += 1 # print error try: mind = min(d) except: mind = -9999 f.writerow([row[0], ra1*180/np.pi, dec1*180/np.pi, R1, mind, pot1, pot2, pot3, pot4, pot5, pot6]) ii += 1 os.system('rm -f ox* x*') incata = raw_input('In catalog > ') outcata = raw_input('Output catalog > ') no_of_threads = raw_input('No of threads > ') #fR0 = raw_input('fR0 > ') #Comp = raw_input('Compton scale > ') if len(outcata) == 0: outcata = 'ScreeningOut.cat' if len(incata) == 0: incata = 'test.csv' if len(no_of_threads) == 0: no_of_threads = 5 no_of_threads = int(no_of_threads) indata = open(incata, 'r').read().split('\n') nLs = np.ceil(len(indata) / (no_of_threads * 1.)) #number of lines per file for kk, nL in enumerate(np.linspace(0, len(indata), nLs)): odata = indata[np.int(nL): int(nL + nLs)] xf = open('x%02d'%kk, 'w') xf.write('\n'.join(odata)) xf.close() #cmd = "split -d -l `wc -l %s | awk '{print int($1/%s)}'` %s" %(incata, no_of_threads, incata) #os.system(cmd) #Read catalogs clusters = np.genfromtxt('cluster.dat') mass2 = np.genfromtxt('2mass_group.dat') sdss = np.genfromtxt('sdss_group.dat') kara = np.genfromtxt('Karachentsev.dat') AllCatalog = np.concatenate((clusters, mass2, sdss, kara)) AllRa = AllCatalog[:, 0] AllDec = AllCatalog[:, 1] AllDis = AllCatalog[:, 2] AllMass = AllCatalog[:, 3] clusters, mass2, sdss, kara, AllCatalog = 0, 0, 0, 0, 0 gc.collect() threads_arr = [] for i in range(int(no_of_threads)): ic = 'x%02d'%i oc = 'ox%02d.txt'%i t = threading.Thread(target=ScreeningFunc, args=(ic, oc, )) t.start() threads_arr.append(t) for th in threads_arr: th.join() os.system('cat ox*txt > %s' %outcata)