2016-10-27 7 views
0

現在、私の端末を使ってPythonファイルを開いています。特に、私はすべてがそれに応じて実行されることを確認するために使用するスクリプトをテストしています。Python - hdf5ファイルを実行するとクラッシュする

ファイルを開こうとすると面倒なことがあります。それは含まれています

from os.path import isfile 
import numpy as np 
import h5py 

def gcPath(basePath,snapNum,chunkNum=0): 
    """ Return absolute path to a group catalog HDF5 file (modify as needed). """ 
    gcPath = basePath + '/groups_%03d/' % snapNum 
    filePath1 = gcPath + 'groups_%03d.%d.hdf5' % (snapNum, chunkNum) 
    filePath2 = gcPath + 'fof_subhalo_tab_%03d.%d.hdf5' % (snapNum, chunkNum) 

    if isfile(filePath1): 
     return filePath1 
    return filePath2 

def offsetPath(basePath, snapNum): 
    """ Return absolute path to a separate offset file (modify as needed). """ 
    offsetPath = basePath + '../postprocessing/offsets/offsets_%03d.hdf5' % snapNum 

    return offsetPath 

def loadObjects(basePath,snapNum,gName,nName,fields): 
    """ Load either halo or subhalo information from the group catalog. """ 
    result = {} 

    # make sure fields is not a single element 
    if isinstance(fields, basestring): 
     fields = [fields] 

    # load header from first chunk 
    with h5py.File(gcPath(basePath,snapNum),'r') as f: 

     header = dict(f['Header'].attrs.items()) 
     result['count'] = f['Header'].attrs['N'+nName+'_Total'] 

     if not result['count']: 
      print 'warning: zero groups, empty return (snap='+str(snapNum)+').' 
      return result 

     # if fields not specified, load everything 
     if not fields: 
      fields = f[gName].keys() 

     for field in fields: 
      # verify existence 
      if not field in f[gName].keys(): 
       raise Exception("Group catalog does not have requested field ["+field+"]!") 

      # replace local length with global 
      shape = list(f[gName][field].shape) 
      shape[0] = result['count'] 

      # allocate within return dict 
      result[field] = np.zeros(shape, dtype=f[gName][field].dtype) 

    # loop over chunks 
    wOffset = 0 

    for i in range(header['NumFiles']): 
     f = h5py.File(gcPath(basePath,snapNum,i),'r') 

     if not f['Header'].attrs['N'+nName+'_ThisFile']: 
      continue # empty file chunk 

     # loop over each requested field 
     for field in fields: 
      # shape and type 
      shape = f[gName][field].shape 

      # read data local to the current file 
      if len(shape) == 1: 
       result[field][wOffset:wOffset+shape[0]] = f[gName][field][0:shape[0]] 
      else: 
       result[field][wOffset:wOffset+shape[0],:] = f[gName][field][0:shape[0],:] 


     wOffset += shape[0] 
     f.close() 

    # only a single field? then return the array instead of a single item dict 
    if len(fields) == 1: 
     return result[fields[0]] 

    return result 

def loadSubhalos(basePath,snapNum,fields=None): 
    """ Load all subhalo information from the entire group catalog for one snapshot 
     (optionally restrict to a subset given by fields). """ 

    return loadObjects(basePath,snapNum,"Subhalo","subgroups",fields) 

def loadHalos(basePath,snapNum,fields=None): 
    """ Load all halo information from the entire group catalog for one snapshot 
     (optionally restrict to a subset given by fields). """ 

    return loadObjects(basePath,snapNum,"Group","groups",fields) 

def loadHeader(basePath,snapNum): 
    """ Load the group catalog header. """ 
    with h5py.File(gcPath(basePath,snapNum),'r') as f: 
     header = dict(f['Header'].attrs.items()) 

    return header 

def load(basePath,snapNum): 
    """ Load complete group catalog all at once. """ 
    r = {} 
    r['subhalos'] = loadSubhalos(basePath,snapNum) 
    r['halos'] = loadHalos(basePath,snapNum) 
    r['header'] = loadHeader(basePath,snapNum) 
    return r 

def loadSingle(basePath,snapNum,haloID=-1,subhaloID=-1): 
    """ Return complete group catalog information for one halo or subhalo. """ 
    if (haloID < 0 and subhaloID < 0) or (haloID >= 0 and subhaloID >= 0): 
     raise Exception("Must specify either haloID or subhaloID (and not both).") 

    gName = "Subhalo" if subhaloID >= 0 else "Group" 
    searchID = subhaloID if subhaloID >= 0 else haloID 

    # old or new format 
    if 'fof_subhalo' in gcPath(basePath,snapNum): 
     # use separate 'offsets_nnn.hdf5' files 
     with h5py.File(offsetPath(basePath,snapNum),'r') as f: 
      offsets = f['FileOffsets/'+gName][()] 
    else: 
     # use header of group catalog 
     with h5py.File(gcPath(basePath,snapNum),'r') as f: 
     offsets = f['Header'].attrs['FileOffsets_'+gName] 

    offsets = searchID - offsets 
    fileNum = np.max(np.where(offsets >= 0)) 
    groupOffset = offsets[fileNum] 

    # load halo/subhalo fields into a dict 
    result = {} 

    with h5py.File(gcPath(basePath,snapNum,fileNum),'r') as f: 
     for haloProp in f[gName].keys(): 
      result[haloProp] = f[gName][haloProp][groupOffset] 

    return result 

基本的には、私のユーザーディレクトリから分析のために定義されたhdf5ファイルを取得することです。私はPython環境を介して実行した場合

、私は以前にプリインストールされた大規模なリストAのパッケージを持っていたjupyterを、使用している

HDF5-DIAG: Error detected in HDF5 (1.8.13) thread 0: 
    #000: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #001: H5Tconv.c line 8622 in H5T__conv_double_ulong(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #002: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #003: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #004: H5Tconv.c line 8568 in H5T__conv_float_ulong(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #005: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #006: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #007: H5Tconv.c line 8650 in H5T__conv_ldouble_long(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #008: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #009: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #010: H5Tconv.c line 8595 in H5T__conv_double_long(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #011: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #012: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #013: H5Tconv.c line 8541 in H5T__conv_float_long(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #014: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #015: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #016: H5Tconv.c line 7858 in H5T__conv_ulong_ldouble(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #017: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #018: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #019: H5Tconv.c line 7831 in H5T__conv_ulong_double(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #020: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #021: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #022: H5Tconv.c line 7804 in H5T__conv_ulong_float(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #023: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #024: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #025: H5Tconv.c line 7777 in H5T__conv_long_ldouble(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #026: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #027: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #028: H5Tconv.c line 7751 in H5T__conv_long_double(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 
    #029: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path 
    major: Datatype 
    minor: Unable to initialize object 
    #030: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function 
    major: Datatype 
    minor: Unable to initialize object 
    #031: H5Tconv.c line 7726 in H5T__conv_long_float(): disagreement about datatype size 
    major: Datatype 
    minor: Unable to initialize object 


Traceback (most recent call last): 
     File "groupcat.py", line 6, in <module> 
     import h5py 
    File   "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/h5py/__init__.py", line 23, in <module> 
from . import _conv 
    File "h5py/h5t.pxd", line 14, in init h5py._conv (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/_conv.c:6961) 
    File "h5py/h5t.pyx", line 139, in init h5py.h5t (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/h5t.c:20285) 
    File "h5py/h5t.pyx", line 73, in h5py.h5t.lockid (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/h5t.c:2565) 
    File "h5py/h5t.pyx", line 42, in h5py.h5t.typewrap (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/h5t.c:2199) 
TypeError: Unable to initialize conversion function (Disagreement about datatype size) 
Segmentation fault: 11 

に戻ってるので、問題がある可能性があり私は特定のパッケージがインストールされていません。しかし、トレースバックエラーを見ると、問題はh5pyと思われます。しかし、すでにh5pyがインストールされています。

+0

'driver'パラメータを使ってみましょう:' h5py.File(fname、mode = 'r'、driver = 'core') ' – Chr

答えて

2

あなたのサンプルコードはかなり長いです。より簡潔な例があれば助けやすくなります。

HDF5エラーの場合、最も重要なエラーが最初に表示されます。ここでは「変換機能を初期化できません。 HDF5ファイルにはどのような種類のデータがありますか?それらをNumPyデータ型にマップできますか?

関連する問題