|
GAMBIT
v1.5.0-2191-ga4742ac
a Global And Modular Bsm Inference Tool
|
|
def | get_dset_lengths (d, group, dsets) |
|
def | check_lengths (d) |
|
def | copy_dset (indset, outdset, nextempty, basemsg="") |
|
def | copy_dset_whole (indset, outdset, nextempty, basemsg="") |
|
def | cantor_pairing (x, y) |
|
def | check_for_duplicates (fout, group) |
|
Tools for reading/writing data from HDF5 files
◆ cantor_pairing()
def hdf5tools.cantor_pairing |
( |
|
x, |
|
|
|
y |
|
) |
| |
◆ check_for_duplicates()
def hdf5tools.check_for_duplicates |
( |
|
fout, |
|
|
|
group |
|
) |
| |
Definition at line 71 of file hdf5tools.py.
References cantor_pairing(), and Gambit::Scanner.zip().
Referenced by Gambit::DescriptionDatabase.loadFile().
72 pointIDs_out = fout[group][ "pointID"] 73 mpiranks_out = fout[group][ "MPIrank"] 74 pointIDs_isvalid_out = np.array(fout[group][ "pointID_isvalid"][:],dtype=np.bool) 75 mpiranks_isvalid_out = np.array(fout[group][ "MPIrank_isvalid"][:],dtype=np.bool) 76 mask_out = (pointIDs_isvalid_out & mpiranks_isvalid_out) 79 np.array(pointIDs_out[mask_out],dtype=np.longlong), 80 np.array(mpiranks_out[mask_out],dtype=np.longlong) 83 pid = pointIDs_out[mask_out] 84 rank = mpiranks_out[mask_out] 86 for ID,p,r in zip(ids,pid,rank): 88 print " Spotted first entry ({0},{1})".format(r,p) 89 Nmatches = np.sum(ID==ids) 91 print " Error! ID", ID, "is duplicated {0} times!".format(Nmatches) 93 matches = (p==pid) & (r==rank) 94 Nmatches2 = np.sum(matches) 96 print " ...MPIrank/pointID ({0},{1}) duplicate count: {2}".format(r,p,Nmatches2) 97 dup_locs = np.where(matches) 98 print " Indices of duplicates are:", dup_locs 100 print " ...No duplicate pid and rank pairs detected! This seems to indicate that something is screwed up in the Cantor pairing" 103 raise ValueError( "Duplicates detected in output dataset!") 107
auto zip(const T &... containers) -> boost::iterator_range< boost::zip_iterator< decltype(boost::make_tuple(std::begin(containers)...))>> Use for combine container in a range loop: for (auto &&x : zip(a, b)){...}.
◆ check_lengths()
def hdf5tools.check_lengths |
( |
|
d | ) |
|
Definition at line 28 of file hdf5tools.py.
30 for key,value in d.items(): 34 raise ValueError( "Length of dataset '{0}' is inconsistent with the others in the target group! (length was {1}; previous dataset had length={2})".format(key,value,length))
◆ copy_dset()
def hdf5tools.copy_dset |
( |
|
indset, |
|
|
|
outdset, |
|
|
|
nextempty, |
|
|
|
basemsg = "" |
|
) |
| |
Definition at line 40 of file hdf5tools.py.
References int.
40 def copy_dset(indset,outdset,nextempty,basemsg=""): 41 lengthtocopy = indset.shape[0] 42 chunksleft = math.ceil(lengthtocopy/float(chunksize)) 43 remainder = lengthtocopy % chunksize 48 if(chunksleft % 1 == 0): 49 sys.stdout.write( "{0}: {1}% \r".format(basemsg, int(100*(Nchunks-chunksleft)/Nchunks))) 51 if(remainder!=0 and chunksleft==1): 53 outdset[nextempty+start:nextempty+start+stride] = indset[start:start+stride]
◆ copy_dset_whole()
def hdf5tools.copy_dset_whole |
( |
|
indset, |
|
|
|
outdset, |
|
|
|
nextempty, |
|
|
|
basemsg = "" |
|
) |
| |
Definition at line 59 of file hdf5tools.py.
60 lengthtocopy = indset.shape[0] 62 sys.stdout.write( "{0} \r".format(basemsg)) 64 outdset[nextempty:nextempty+lengthtocopy] = indset
◆ get_dset_lengths()
def hdf5tools.get_dset_lengths |
( |
|
d, |
|
|
|
group, |
|
|
|
dsets |
|
) |
| |
Definition at line 14 of file hdf5tools.py.
15 for itemname in group: 16 item = group[itemname] 17 if isinstance(item,h5py.Dataset): 19 dsets.add((itemname,item.dtype)) 21 d[itemname] += item.shape[0] 23 d[itemname] = item.shape[0] 24 if isinstance(item,h5py.Group):
◆ bufferlength
int hdf5tools.bufferlength = 100 |
◆ chunksize
int hdf5tools.chunksize = 1000 |
◆ max_ppidpairs
|