修訂 | 3f535e30889fb692dfee04aeb5423599bd902db7 (tree) |
---|---|
時間 | 2010-01-11 01:55:33 |
作者 | lorenzo |
Commiter | lorenzo |
A small script (to be integrated in the older codes) to generate the couples of ID's associated to
each contact (NOT the timeslice, the real, uninterrupted contact).
@@ -0,0 +1,50 @@ | ||
1 | +#!/usr/bin/env python | |
2 | +import scipy as s | |
3 | +import pylab as p | |
4 | +import numpy as n | |
5 | +import sys | |
6 | +import string | |
7 | + | |
8 | +def my_hash(arr): #this will operate on a similar function which | |
9 | + #has already combined tag_id and bootcount (both taking 16 bites each) | |
10 | + | |
11 | + # my_hash(arr) >> 32 to get back the 1st argument | |
12 | + # my_hash(arr) & 0xFFFFFFFF to get back the second argument | |
13 | + | |
14 | + arr=s.sort(arr) #maybe this is not needed at all; the rows of sliced data (which are arr) | |
15 | + #are already sorted out elsewhere. | |
16 | + | |
17 | + return (arr[0] << 32) | arr[1] | |
18 | + | |
19 | + | |
20 | +def my_unhash(longnum): #this is the inverse of the function above | |
21 | + | |
22 | + arr_out=s.zeros(2).astype("int64") | |
23 | + | |
24 | + arr_out[0]= longnum >> 32 | |
25 | + arr_out[1]= longnum & 0xFFFFFFFF | |
26 | + | |
27 | + return (arr_out) | |
28 | + | |
29 | + | |
30 | + | |
31 | + | |
32 | + | |
33 | + | |
34 | +f = open(sys.argv[1]) | |
35 | +hashed_interactions = [map(int, string.split(line)) for line in f.readlines()] | |
36 | +f.close() | |
37 | + | |
38 | +hashed_interactions = s.array(hashed_interactions, dtype="int64") | |
39 | + | |
40 | +decoupled_arr=s.zeros((len(hashed_interactions),2)).astype("int64") | |
41 | + | |
42 | +for i in xrange(len(hashed_interactions)): | |
43 | + decoupled_arr[i,:]=my_unhash(hashed_interactions[i]) | |
44 | + | |
45 | +n.savetxt("decoupled_hash_for_contact_durations_1_.dat", decoupled_arr, fmt='%d') | |
46 | +n.savetxt("decoupled_hash_1d_for_contact_durations_1_.dat",\ | |
47 | + decoupled_arr.reshape(-1,1), fmt='%d') | |
48 | + | |
49 | + | |
50 | +print "So far so good" |