Rev. | feb0096336fc5b85e6e43481b48a74591cfc504a |
---|---|
Tamanho | 1,992 bytes |
Hora | 2009-11-10 21:42:33 |
Autor | lorenzo |
Mensagem de Log | I modified this code which now provides an alternative (and equivalent) method
|
#!/usr/bin/env python
import scipy as s
import pylab as p
import numpy as n
import sys
import string
def get_duration(presence_list, delta_slice):
tag_id_list=s.unique1d(presence_list[:,1])
duration_list=s.arange(len(tag_id_list))
for i in xrange(len(tag_id_list)):
tag_sel=s.where(presence_list[:,1]==tag_id_list[i])[0]
duration_list[i]=max(presence_list[tag_sel,0])-min(presence_list[tag_sel,0])+delta_slice
#there may be problems with tags which are observed only once, hence the
#corresponding visit duration would be 0, whereas I simply cannot define it
#hence
sel=s.where(duration_list>0)
duration_list=duration_list[sel]
return (duration_list)
def duplicate_and_mix_array(my_2d_arr):
#this function is useful to cast the time-dependent edge list
#into a shape more similar to the bootcount list.
new_arr=s.zeros(2*2*len(my_2d_arr)).reshape((2*len(my_2d_arr),2))
new_arr=new_arr.astype("int64")
sel_even=s.arange(0,len(new_arr),2)
sel_odd=s.arange(1,len(new_arr),2)
new_arr[sel_odd,0]=my_2d_arr[:,0]
new_arr[sel_even,0]=my_2d_arr[:,0]
new_arr[sel_odd,1]=my_2d_arr[:,2]
new_arr[sel_even,1]=my_2d_arr[:,1]
return (new_arr)
read_edgelist=1 #it tells whether the data are already in the form of an edgelist or not.
filename=sys.argv[1]
f = open(filename)
presence_list = [map(int, string.split(line)) for line in f.readlines()]
f.close()
presence_list = s.array(presence_list, dtype="int64")
delta_slice=20 #number of seconds
if (read_edgelist!=1):
visit_duration=get_duration(presence_list, delta_slice)
n.savetxt("visit_duration_afresh.dat",visit_duration,fmt='%d')
else:
presence_list =duplicate_and_mix_array(presence_list)
n.savetxt("presence_list_afresh.dat",presence_list,fmt='%d')
visit_duration=get_duration(presence_list, delta_slice)
n.savetxt("visit_duration_afresh.dat",visit_duration,fmt='%d')
print "So far so good"