PDL-IO-HDF5-0.762/0000755000175000017500000000000015004706725013074 5ustar osboxesosboxesPDL-IO-HDF5-0.762/META.json0000644000175000017500000000317215004706725014520 0ustar osboxesosboxes{ "abstract" : "PDL Interface to the HDF5 Data Format", "author" : [ "unknown", "John Cerney ", "Andrew Benson " ], "dynamic_config" : 1, "generated_by" : "ExtUtils::MakeMaker version 7.72, CPAN::Meta::Converter version 2.150010", "license" : [ "perl_5", "perl_5" ], "meta-spec" : { "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec", "version" : 2 }, "name" : "PDL-IO-HDF5", "no_index" : { "directory" : [ "t", "inc" ] }, "prereqs" : { "build" : { "requires" : { "ExtUtils::MakeMaker" : "0", "PDL" : "2.064" } }, "configure" : { "requires" : { "ExtUtils::MakeMaker" : "0", "PDL" : "2.064" } }, "runtime" : { "requires" : { "PDL" : "2.064" } }, "test" : { "requires" : { "Test::More" : "0.88" } } }, "release_status" : "stable", "resources" : { "bugtracker" : { "web" : "https://github.com/PDLPorters/pdl-io-hdf5/issues" }, "homepage" : "http://pdl.perl.org/", "license" : [ "http://dev.perl.org/licenses/" ], "repository" : { "type" : "git", "web" : "https://github.com/PDLPorters/pdl-io-hdf5" } }, "version" : "0.762", "x_meta_spec" : { "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec", "version" : "2" }, "x_serialization_backend" : "JSON::PP version 4.04" } PDL-IO-HDF5-0.762/HDF5/0000755000175000017500000000000015004706725013562 5ustar osboxesosboxesPDL-IO-HDF5-0.762/HDF5/tkview.pm0000644000175000017500000001523614701402215015426 0ustar osboxesosboxespackage PDL::IO::HDF5::tkview; # Experimental module to view HDF5 using perl/tk and PDL::IO::HDF5 modules use Tk 800; use Tk::Tree; use IO::File; =head1 NAME PDL::IO::HDF5::tkview - View HDF5 files using perl/tk and PDL::IO::HDF5 modules =head1 DESCRIPTION This is a experimental object to view HDF5 files the PDL::IO::HDF5 module. The HDF files are displayed in a tree structure using Tk::Tree =head1 SYNOPSIS use Tk; use PDL::IO::HDF5::tkview use PDL::IO::HDF5; my $mw = MainWindow->new; my $h5 = new PDL::IO::HDF5('datafile.h5'); # open HDF5 file object my $tkview = new PDL::IO::HDF5::tkview( $mw, $h5); MainLoop; =head1 MEMBER DATA =over 1 =item mw Tk window where the file structure is displayed. =item H5obj PDL::IO::HDF5 Object =item hl Tk Hlist object =item dataDisplaySub Sub ref to execute when a dataset is double-clicked. This defaults to a print of the dataset. See L for details. Tk Hlist object =back =head1 METHODS =head2 new =for ref PDL::IO::HDF5::tkview Constructor - creates new object B =for usage $tkview = new PDL::IO::HDF5::tkview( $mw, $H5obj); Where: $mw Tk window $H5obj PDL::IO::HDF5::Object =cut # Cube Image Pixmap (ppm) format. raw data string $cubeImage = '/* XPM */ static char * cube_xpm[] = { "12 12 3 1", " c #FFFFFFFFFFFF", ". c #000000000000", "X c #FFFFFFFF0000", " ........", " .XXXXXX..", " .XXXXXX.X.", " ........XX.", " .XXXXXX.XX.", " .XXXXXX.XX.", " .XXXXXX.XX.", " .XXXXXX.XX.", " .XXXXXX.X. ", " .XXXXXX.. ", " ........ ", " "};'; # ----------------------------------------------- # Routine to create the array_display window sub new{ my $type = shift; # get the class type my $mw = $_[0]; my $H5obj = $_[1]; my $self = {}; # setup member variables: $self->{mw} = $mw; $self->{H5obj} = $H5obj; bless $self, $type; # setup the window if (defined $H5obj){ my $hl = $mw->Scrolled('Tree',-separator => $;,-drawbranch => 1, -width => '15', -bg => 'white'); $hl->configure(-opencmd => [\&More,$self, $hl]); $hl->configure(-command => [\&activateCmd,$self]); # command to called when entry double-clicked my $name = $H5obj->filename; $hl->add($name, -text => $name, -data => $H5obj, -itemtype => 'imagetext'); $hl->setmode($name => 'close'); # Get Images for display $self->{groupImage} = $mw->Pixmap(-file => Tk->findINC('winfolder.xpm') ); $self->{cubeImage} = $mw->Pixmap(-data => $cubeImage ); AddChildren($self,$hl,$name,$H5obj); $hl->pack(-expand=> 1, -fill => 'both'); $self->{hl} = $hl; # Set Default dataDisplaySub $self->{dataDisplaySub} = sub{ print $_[0]}; } return $self; } # sub to add elements to the hlist after an element in the list has been expanded (i.e. clicked-on) sub AddChildren { my $self = shift; my ($hl,$path,$data) = @_; # hl list object, location, data my $w; my $name; my $text; if( ref($data) =~ /Group/ || !($path =~ /$;/ ) ){ # Current Item to expand is a group or top level of file # Display any Attributes First: my @attrs; # attributes stored my %attrs; @attrs = sort $data->attrs; if( @attrs){ # set attribute hash if there are attributes @attrs{@attrs} = $data->attrGet(@attrs); # attrget not defined yet } my ($attr, $attrValue); foreach $attr(@attrs){ # add each attribute to the display $attrValue = $attrs{$attr}; $text = "$attr: $attrValue"; $hl->add("$path$;"."_Attr$attr", -text => $text, -data => $attrValue); } # Display Datasets next: my @datasets; # dataset names stored @datasets = sort $data->datasets; # get list of datasets in the current group/file my ($dataset, @dims); foreach $dataset(@datasets){ # add each attribute to the display my $datasetData = $data->dataset($dataset); @dims = $datasetData->dims; # get the dims of the dataset if( @dims){ # > 0-dimensional dataset $text = "$dataset: Dims ".join(", ",@dims); } else{ # zero-dimensional dataset $text = "$dataset: ".$datasetData->get; } $hl->add("$path$;"."_Dset$dataset", -image => $self->{cubeImage}, -text => $text, -data => $data); } # Display Groups Next my @groups; # groups stored @groups = sort $data->groups; my ($group, $groupName); foreach $groupName(@groups){ # Add each group to the display # data element is the parent object and the group name. $hl->add("$path$;"."_Group$groupName", -image => $self->{groupImage}, -text => $groupName, -data => [ $data,$groupName] ); $hl->setmode( "$path$;"."_Group$groupName", "open"); } } } # This Sub called when a element of the H-list is expanded/collapsed. (i.e. clicked-on) sub More { my $self = shift; my ($w,$item) = @_; # hl list object, hlist item name if( defined $w->info('children',$item) > 0){ #get rid of old elements if it has already been opened # print "Has children\n"; $w->delete('offsprings',$item); } # print "item = $item\n"; my $data = $w->entrycget($item,'-data'); #get the data ref for this entry my @levels = split($;,$item); if( @levels && ( $levels[-1] =~ /^_Group/) ){ # if this is a group then get the group object my ($obj, $groupName) = @$data; $data = $obj->group($groupName); } $self->AddChildren($w,$item,$data); } =head2 dataDisplaySubSet =for ref Set the dataDisplaySub data member. B =for usage # Data Display sub to call when a dataset is double-clicked my $dataDisplay = sub{ my $data = $_[0]; print "I'm Displaying This $data\n";}; $tkview->dataDisplaySubSet($dataDisplay); The dataDisplaySub data member is a perl sub ref that is called when a dataset is double-clicked. This data member is initially set to just print the dataset's data to the command line. Using the L method, different actions for displaying the data can be "plugged-in". =cut sub dataDisplaySubSet { my ($self, $subref) = @_; $self->{dataDisplaySub} = $subref; } #------------------------------------------------------------------- =head2 activateCmd =for ref Internal Display method invoked whenever a tree element is activated (i.e. double-clicked). This method does nothing unless a dataset element has been selected. It that cases it calls $self->dataDisplaySub with the data. =cut sub activateCmd{ my $self = shift; my ($name) = (@_); # Name of the hlist element that was selected return unless($name =~ /$;_Dset(.+)$/); # only process datasets my $datasetName = $1; my $hlist = $self->{hl}; my $group = $hlist->entrycget($name,'-data'); my $PDL = $group->dataset($datasetName)->get; my $dataDisplaySub = $self->{dataDisplaySub}; &$dataDisplaySub($PDL) } 1; PDL-IO-HDF5-0.762/HDF5/Dataset.pm0000644000175000017500000014524115004706605015511 0ustar osboxesosboxespackage PDL::IO::HDF5::Dataset; use Carp; use strict; use Config; use PDL::Core::Dev; # Global mapping variables our ($H5T_STRING, $H5T_REFERENCE, %PDLtoHDF5internalTypeMapping, %HDF5toPDLfileMapping, %PDLtoHDF5fileMapping); =head1 NAME PDL::IO::HDF5::Dataset - PDL::IO::HDF5 Helper Object representing HDF5 datasets. =head1 DESCRIPTION This is a helper-object used by PDL::IO::HDF5 to interface with HDF5 format's dataset objects. Information on the HDF5 Format can be found at the HDF Group's web site at http://www.hdfgroup.org . =head1 SYNOPSIS See L =head1 MEMBER DATA =over 1 =item ID ID number given to the dataset by the HDF5 library =item name Name of the dataset. =item parent Ref to parent object (group) that owns this dateset. =item fileObj Ref to the L object that owns this object. =back =head1 METHODS =head2 new =for ref PDL::IO::HDF5::Dataset Constructor - creates new object B =for usage This object will usually be created using the calling format detailed in the L. The following syntax is used by the L object to build the object. $a = new PDL::IO::HDF5:Dataset( name => $name, parent => $parent, fileObj => $fileObj); Args: $name Name of the dataset $parent Parent Object that owns this dataset $fileObj PDL::HDF object that owns this dateset. =cut sub new{ my $type = shift; my %parms = @_; my $self = {}; my @DataMembers = qw( name parent fileObj); my %DataMembers; @DataMembers{ @DataMembers } = @DataMembers; # hash for quick lookup # check for proper supplied names: my $varName; foreach $varName(keys %parms){ unless( defined($DataMembers{$varName})){ carp("Error Calling ".__PACKAGE__." Constuctor\n \'$varName\' not a valid data member\n"); return undef; } unless( defined($parms{$varName})){ carp("Error Calling ".__PACKAGE__." Constuctor\n \'$varName\' not supplied\n"); return undef; } $self->{$varName} = $parms{$varName}; } my $parent = $self->{parent}; my $groupID = $parent->IDget; my $groupName = $parent->nameGet; my $name = $self->{name}; my $datasetID; ##### # Turn Error Reporting off for the following, so H5 lib doesn't complain # if the group isn't found. PDL::IO::HDF5::H5errorOff(); my $rc = PDL::IO::HDF5::H5Gget_objinfo($groupID, $name,1,0); PDL::IO::HDF5::H5errorOn(); # See if the dataset exists: if ($rc >= 0) { #DataSet Exists open it: $datasetID = PDL::IO::HDF5::H5Dopen($groupID, $name); if ($datasetID < 0) { carp "Error Calling ".__PACKAGE__." Constuctor: Can't open existing dataset '$name'\n"; return undef; } } else{ # dataset didn't exist, set datasetID = 0 ## (Have to put off opening the dataset ### until it is written to (Must know dims, etc to create) $datasetID = 0; } $self->{ID} = $datasetID; bless $self, $type; return $self; } =head2 DESTROY =for ref PDL::IO::HDF5::Dataset Destructor - Closes the dataset object B =for usage No Usage. Automatically called =cut sub DESTROY { my $self = shift; my $datasetID = $self->{ID}; # print "In DataSet DEstroy\n"; if ($datasetID && (PDL::IO::HDF5::H5Dclose($self->{ID}) < 0)) { warn("Error closing HDF5 Dataset '".$self->{name}."' in file:group: '".$self->{filename}.":".$self->{group}."'\n"); } } =head2 set =for ref Write data to the HDF5 dataset B =for usage $dataset->set($pdl, unlimited => 1); # Write the array data in the dataset Options: unlimited If present, the dataset is created with unlimited dimensions. =cut ############################################################################# # Mapping of PDL types to HDF5 types for writing to a dataset # # Mapping of PDL types to what HDF5 calls them while we are dealing with them # outside of the HDF5 file. %PDLtoHDF5internalTypeMapping = ( $PDL::Types::PDL_SB => PDL::IO::HDF5::H5T_NATIVE_INT8(), $PDL::Types::PDL_B => PDL::IO::HDF5::H5T_NATIVE_UINT8(), $PDL::Types::PDL_S => PDL::IO::HDF5::H5T_NATIVE_INT16(), $PDL::Types::PDL_US => PDL::IO::HDF5::H5T_NATIVE_UINT16(), $PDL::Types::PDL_L => PDL::IO::HDF5::H5T_NATIVE_INT32(), $PDL::Types::PDL_UL => PDL::IO::HDF5::H5T_NATIVE_UINT32(), $PDL::Types::PDL_ULL => PDL::IO::HDF5::H5T_NATIVE_UINT64(), $PDL::Types::PDL_LL => PDL::IO::HDF5::H5T_NATIVE_INT64(), $PDL::Types::PDL_F => PDL::IO::HDF5::H5T_NATIVE_FLOAT(), $PDL::Types::PDL_D => PDL::IO::HDF5::H5T_NATIVE_DOUBLE(), $PDL::Types::PDL_LD => PDL::IO::HDF5::H5T_NATIVE_LDOUBLE(), # no HDF5 direct support for long doubles, nor complex numbers # deliberately not supporting indx as likely to cause mayhem ); # Mapping of PDL types to what types they are written to in the HDF5 file. if (isbigendian()) { %PDLtoHDF5fileMapping = ( $PDL::Types::PDL_SB => PDL::IO::HDF5::H5T_STD_I8BE(), $PDL::Types::PDL_B => PDL::IO::HDF5::H5T_STD_U8BE(), $PDL::Types::PDL_S => PDL::IO::HDF5::H5T_STD_I16BE(), $PDL::Types::PDL_US => PDL::IO::HDF5::H5T_STD_U16BE(), $PDL::Types::PDL_L => PDL::IO::HDF5::H5T_STD_I32BE(), $PDL::Types::PDL_UL => PDL::IO::HDF5::H5T_STD_U32BE(), $PDL::Types::PDL_ULL => PDL::IO::HDF5::H5T_STD_U64BE(), $PDL::Types::PDL_LL => PDL::IO::HDF5::H5T_STD_I64BE(), $PDL::Types::PDL_F => PDL::IO::HDF5::H5T_IEEE_F32BE(), $PDL::Types::PDL_D => PDL::IO::HDF5::H5T_IEEE_F64BE(), ); } else { # Little endian. %PDLtoHDF5fileMapping = ( $PDL::Types::PDL_SB => PDL::IO::HDF5::H5T_STD_I8LE(), $PDL::Types::PDL_B => PDL::IO::HDF5::H5T_STD_U8LE(), $PDL::Types::PDL_S => PDL::IO::HDF5::H5T_STD_I16LE(), $PDL::Types::PDL_US => PDL::IO::HDF5::H5T_STD_U16LE(), $PDL::Types::PDL_L => PDL::IO::HDF5::H5T_STD_I32LE(), $PDL::Types::PDL_UL => PDL::IO::HDF5::H5T_STD_U32LE(), $PDL::Types::PDL_ULL => PDL::IO::HDF5::H5T_STD_U64LE(), $PDL::Types::PDL_LL => PDL::IO::HDF5::H5T_STD_I64LE(), $PDL::Types::PDL_F => PDL::IO::HDF5::H5T_IEEE_F32LE(), $PDL::Types::PDL_D => PDL::IO::HDF5::H5T_IEEE_F64LE(), ); } sub set { my ($self, $pdl) = @_; my %options = @_ > 2 ? @_[2..$#_] : (); my ($parent, $datasetID, $name) = @$self{qw(parent ID name)}; my $groupID = $parent->IDget; my $internalhdf5_type; # hdf5 type that describes the way data is stored in memory my $hdf5Filetype; # hdf5 type that describes the way data will be stored in the file. my @dims; # hdf5 equivalent dims for the supplied PDL my $type = $pdl->get_datatype; # get PDL datatype if ($pdl->isa('PDL::Char')) { # Special Case for PDL::Char Objects (fixed length strings) @dims = $pdl->dims; my $length = shift @dims; # String length is the first dim of the PDL for PDL::Char # Create Null-Terminated String Type $internalhdf5_type = PDL::IO::HDF5::H5Tcopy(PDL::IO::HDF5::H5T_C_S1()); PDL::IO::HDF5::H5Tset_size($internalhdf5_type, $length ); # make legth of type equal to strings $hdf5Filetype = $internalhdf5_type; # memory and file storage will be the same type @dims = reverse(@dims); # HDF5 stores columns/rows in reverse order than pdl } else { # Other PDL Types unless (defined($internalhdf5_type = $PDLtoHDF5internalTypeMapping{$type})) { carp "Error Calling ".__PACKAGE__."::set: Can't map PDL type to HDF5 datatype\n"; return undef; } unless (defined($hdf5Filetype = $PDLtoHDF5fileMapping{$type})) { carp "Error Calling ".__PACKAGE__."::set: Can't map PDL type to HDF5 datatype\n"; return undef; } @dims = reverse($pdl->dims); # HDF5 stores columns/rows in reverse order than pdl } my $udims = my $dims = PDL::IO::HDF5::packList(@dims); if (exists($options{unlimited})) { my $udim = pack ("L*", (PDL::IO::HDF5::H5S_UNLIMITED())); my $rank = scalar(@dims)*2; $udims = $udim x $rank; } my $dataspaceID = PDL::IO::HDF5::H5Screate_simple(scalar(@dims), $dims , $udims); if ($dataspaceID < 0) { carp("Can't Open Dataspace in ".__PACKAGE__.":set\n"); return undef; } if ($datasetID == 0) { # Dataset not created yet my $propertiesID; if (exists($options{unlimited})) { $propertiesID = PDL::IO::HDF5::H5Pcreate(PDL::IO::HDF5::H5P_DATASET_CREATE()); if ($propertiesID < 0) { carp("Can't Open Properties in ".__PACKAGE__.":set\n"); return undef; } if (@dims and PDL::IO::HDF5::H5Pset_chunk($propertiesID,scalar(@dims),$dims) < 0) { carp("Error setting chunk size in ".__PACKAGE__.":set\n"); return undef; } # /* Create the dataset. */ $datasetID = PDL::IO::HDF5::H5Dcreate($groupID, $name, $hdf5Filetype, $dataspaceID, $propertiesID); } else { # /* Create the dataset. */ $datasetID = PDL::IO::HDF5::H5Dcreate($groupID, $name, $hdf5Filetype, $dataspaceID, PDL::IO::HDF5::H5P_DEFAULT()); } if ($datasetID < 0) { carp("Can't Create Dataspace in ".__PACKAGE__.":set\n"); return undef; } $self->{ID} = $datasetID; if (exists($options{unlimited})) { if (PDL::IO::HDF5::H5Pclose($propertiesID) < 0) { carp("Error closing properties in ".__PACKAGE__.":set\n"); return undef; } } } if (@dims and PDL::IO::HDF5::H5Dextend($datasetID,$dims) < 0) { carp("Error extending dataset in ".__PACKAGE__.":set\n"); return undef; } # Write the actual data: my $data = ${$pdl->get_dataref}; if (PDL::IO::HDF5::H5Dwrite($datasetID, $internalhdf5_type, PDL::IO::HDF5::H5S_ALL(), PDL::IO::HDF5::H5S_ALL(), PDL::IO::HDF5::H5P_DEFAULT(), $data) < 0) { carp("Error Writing to dataset in ".__PACKAGE__.":set\n"); return undef; } # /* Terminate access to the data space. */ carp("Can't close Dataspace in ".__PACKAGE__.":set\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return 1; } =head2 get =for ref Get data from a HDF5 dataset to a PDL B =for usage $pdl = $dataset->get; # Read the Array from the HDF5 dataset, create a PDL from it # and put in $pdl # Assuming $dataset is three dimensional # with dimensions (20,100,90) The I method can also be used to obtain particular slices or hyperslabs of the dataset array. For example, if $dataset is three dimensional with dimensions (20,100,90) then we could do: $start=pdl([0,0,0]); # We begin the slice at the very beginning $end=pdl([19,0,0]); # We take the first vector of the array, $stride=pdl([2,1,1]); # taking only every two values of the vector $pdl = $dataset->get($start,$end,[$stride]); # Read a slice or # hyperslab from the HDF5 dataset. # $start, $end and optionally $stride # should be PDL vectors with length the # number of dimensions of the dataset. # $start gives the starting coordinates # in the array. # $end gives the ending coordinate # in the array # $stride gives the steps taken from one # coordinate to the next of the slice The mapping of HDF5 datatypes in the file to PDL datatypes in memory will be according to the following table. HDF5 File Type PDL Type ------------------------ ----------------- PDL::IO::HDF5::H5T_C_S1() => PDL::Char Object (Special Case for Char Strings) PDL::IO::HDF5::H5T_STD_I8BE() => $PDL::Types::PDL_SB, PDL::IO::HDF5::H5T_STD_I8LE() => $PDL::Types::PDL_SB, PDL::IO::HDF5::H5T_STD_U8BE() => $PDL::Types::PDL_B, PDL::IO::HDF5::H5T_STD_U8LE() => $PDL::Types::PDL_B, PDL::IO::HDF5::H5T_STD_I16BE() => $PDL::Types::PDL_S, PDL::IO::HDF5::H5T_STD_I16LE() => $PDL::Types::PDL_S, PDL::IO::HDF5::H5T_STD_U16BE() => $PDL::Types::PDL_U, PDL::IO::HDF5::H5T_STD_U16LE() => $PDL::Types::PDL_U, PDL::IO::HDF5::H5T_STD_I32BE() => $PDL::Types::PDL_L, PDL::IO::HDF5::H5T_STD_I32LE() => $PDL::Types::PDL_L, PDL::IO::HDF5::H5T_STD_U32LE() => $PDL::Types::PDL_UL, PDL::IO::HDF5::H5T_STD_U32BE() => $PDL::Types::PDL_UL, PDL::IO::HDF5::H5T_STD_I64LE() => $PDL::Types::PDL_LL, PDL::IO::HDF5::H5T_STD_I64BE() => $PDL::Types::PDL_LL, PDL::IO::HDF5::H5T_STD_U64LE() => $PDL::Types::PDL_ULL, PDL::IO::HDF5::H5T_STD_U64BE() => $PDL::Types::PDL_ULL, PDL::IO::HDF5::H5T_IEEE_F32BE() => $PDL::Types::PDL_F, PDL::IO::HDF5::H5T_IEEE_F32LE() => $PDL::Types::PDL_F, PDL::IO::HDF5::H5T_IEEE_F64BE() => $PDL::Types::PDL_D, PDL::IO::HDF5::H5T_IEEE_F64LE() => $PDL::Types::PDL_D For HDF5 File types not in this table, this method will attempt to map it to the default PDL type PDL_D. If the dataset being read is a scalar reference, the referenced dataset region will be read instead. B Character arrays are returned as the special L fixed-length string type. For fixed-length HDF5 string arrays, this is a direct mapping to the PDL::Char datatype. For HDF5 variable-length string arrays, the data is converted to a fixed-length character array, with a string size equal to the maximum size of all the strings in the array. =cut ############################################################################# # Mapping of HDF5 file types to PDL types %HDF5toPDLfileMapping = ( PDL::IO::HDF5::H5T_STD_I8BE() => $PDL::Types::PDL_SB, PDL::IO::HDF5::H5T_STD_I8LE() => $PDL::Types::PDL_SB, PDL::IO::HDF5::H5T_STD_U8BE() => $PDL::Types::PDL_B, PDL::IO::HDF5::H5T_STD_U8LE() => $PDL::Types::PDL_B, PDL::IO::HDF5::H5T_STD_I16BE() => $PDL::Types::PDL_S, PDL::IO::HDF5::H5T_STD_I16LE() => $PDL::Types::PDL_S, PDL::IO::HDF5::H5T_STD_U16BE() => $PDL::Types::PDL_U, PDL::IO::HDF5::H5T_STD_U16LE() => $PDL::Types::PDL_U, PDL::IO::HDF5::H5T_STD_I32BE() => $PDL::Types::PDL_L, PDL::IO::HDF5::H5T_STD_I32LE() => $PDL::Types::PDL_L, PDL::IO::HDF5::H5T_STD_U32LE() => $PDL::Types::PDL_UL, PDL::IO::HDF5::H5T_STD_U32BE() => $PDL::Types::PDL_UL, PDL::IO::HDF5::H5T_STD_I64LE() => $PDL::Types::PDL_LL, PDL::IO::HDF5::H5T_STD_I64BE() => $PDL::Types::PDL_LL, PDL::IO::HDF5::H5T_STD_U64LE() => $PDL::Types::PDL_ULL, PDL::IO::HDF5::H5T_STD_U64BE() => $PDL::Types::PDL_ULL, PDL::IO::HDF5::H5T_IEEE_F32BE() => $PDL::Types::PDL_F, PDL::IO::HDF5::H5T_IEEE_F32LE() => $PDL::Types::PDL_F, PDL::IO::HDF5::H5T_IEEE_F64BE() => $PDL::Types::PDL_D, PDL::IO::HDF5::H5T_IEEE_F64LE() => $PDL::Types::PDL_D ); $H5T_STRING = PDL::IO::HDF5::H5T_STRING (); #HDF5 string type $H5T_REFERENCE = PDL::IO::HDF5::H5T_REFERENCE(); #HDF5 reference type sub get{ my $self = shift; my $start = shift; my $end = shift; my $stride = shift; my $pdl; my $rc; # H5 library call return code my $parent = $self->{parent}; my $groupID = $parent->IDget; my $datasetID = $self->{ID}; my $name = $self->{name}; my $stringSize; # String size, if we are retrieving a string type my $PDLtype; # PDL type that the data will be mapped to my $internalhdf5_type; # Type that represents how HDF5 will store the data in memory (after retreiving from # the file) my $ReturnType = 'PDL'; # Default object returned is PDL. If strings are store, then this will # return PDL::Char my $isReference = 0; # Indicates if dataset is a reference my $datasetReference; # Data set reference my $referencedDatasetID; # ID of referenced dataset # Get the HDF5 file datatype; my $HDF5type = PDL::IO::HDF5::H5Dget_type($datasetID ); unless( $HDF5type >= 0 ){ carp "Error Calling ".__PACKAGE__."::get: Can't get HDF5 Dataset type.\n"; return undef; } # Check for string type: my $varLenString = 0; # Flag = 1 if reading variable-length string array if (PDL::IO::HDF5::H5Tget_class($HDF5type ) == $H5T_STRING) { # String type # Check for variable length string" if (!PDL::IO::HDF5::H5Tis_variable_str($HDF5type)) { # Not a variable length string $stringSize = PDL::IO::HDF5::H5Tget_size($HDF5type); unless( $stringSize >= 0 ){ carp "Error Calling ".__PACKAGE__."::get: Can't get HDF5 String Datatype Size.\n"; carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; return undef; } $internalhdf5_type = $HDF5type; # internal storage the same as the file storage. } else{ # Variable-length String, set flag $varLenString = 1; # Create variable-length type for reading from the file $internalhdf5_type = PDL::IO::HDF5::H5Tcopy(PDL::IO::HDF5::H5T_C_S1() ); PDL::IO::HDF5::H5Tset_size( $internalhdf5_type, PDL::IO::HDF5::H5T_VARIABLE() ); } $PDLtype = $PDL::Types::PDL_B; $ReturnType = 'PDL::Char'; # For strings, we return a PDL::Char } elsif (PDL::IO::HDF5::H5Tget_class($HDF5type) == $H5T_REFERENCE) { # Reference type # Flag that dataset is a reference $isReference = 1; # Check that the reference dataset is a single element my $dataspaceID = PDL::IO::HDF5::H5Dget_space($datasetID); my $Ndims = PDL::IO::HDF5::H5Sget_simple_extent_ndims($dataspaceID); if ($Ndims != 0) { carp("Can't handle non-scalar references ".__PACKAGE__.":get\n"); carp("Can't close Dataspace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } # Read the reference my $howBig = PDL::IO::HDF5::H5Tget_size(PDL::IO::HDF5::H5T_STD_REF_DSETREG()); $datasetReference = ' ' x $howBig; $rc = PDL::IO::HDF5::H5Dread($datasetID, PDL::IO::HDF5::H5T_STD_REF_DSETREG(), PDL::IO::HDF5::H5S_ALL(), PDL::IO::HDF5::H5S_ALL(), PDL::IO::HDF5::H5P_DEFAULT(), $datasetReference); # Dereference the reference $referencedDatasetID = PDL::IO::HDF5::H5Rdereference($datasetID,PDL::IO::HDF5::H5R_DATASET_REGION(),$datasetReference); # Get the data type of the dereferenced object $HDF5type = PDL::IO::HDF5::H5Dget_type($referencedDatasetID); # Map the HDF5 file datatype to a PDL datatype $PDLtype = $PDL::Types::PDL_D; # Default type is double my $defaultType; foreach $defaultType( keys %HDF5toPDLfileMapping){ if (PDL::IO::HDF5::H5Tequal($defaultType,$HDF5type) > 0) { $PDLtype = $HDF5toPDLfileMapping{$defaultType}; last; } } # Get the HDF5 internal datatype that corresponds to the PDL type unless( defined($PDLtoHDF5internalTypeMapping{$PDLtype}) ){ carp "Error Calling ".__PACKAGE__."::set: Can't map PDL type to HDF5 datatype\n"; return undef; } $internalhdf5_type = $PDLtoHDF5internalTypeMapping{$PDLtype}; } else{ # Normal Numeric Type # Map the HDF5 file datatype to a PDL datatype $PDLtype = $PDL::Types::PDL_D; # Default type is double my $defaultType; foreach $defaultType( keys %HDF5toPDLfileMapping){ if (PDL::IO::HDF5::H5Tequal($defaultType,$HDF5type) > 0) { $PDLtype = $HDF5toPDLfileMapping{$defaultType}; last; } } # Get the HDF5 internal datatype that corresponds to the PDL type unless( defined($PDLtoHDF5internalTypeMapping{$PDLtype}) ){ carp "Error Calling ".__PACKAGE__."::set: Can't map PDL type to HDF5 datatype\n"; return undef; } $internalhdf5_type = $PDLtoHDF5internalTypeMapping{$PDLtype}; } my $dataspaceID; if ($isReference == 1) { # Get the dataspace from the reference $dataspaceID = PDL::IO::HDF5::H5Rget_region($datasetID,PDL::IO::HDF5::H5R_DATASET_REGION(),$datasetReference); # Now reset the dataset ID to that of the referenced dataset for all further use $datasetID = $referencedDatasetID; } else { # Get the dataspace from the dataset itself $dataspaceID = PDL::IO::HDF5::H5Dget_space($datasetID); } if ($dataspaceID < 0) { carp("Can't Open Dataspace in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; return undef; } # Get the number of dims: my $Ndims = PDL::IO::HDF5::H5Sget_simple_extent_ndims($dataspaceID); if ($Ndims < 0) { carp("Can't Get Number of Dims in Dataspace in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } my @dims = ( 0..($Ndims-1)); my ($mem_space,$file_space); if ($isReference == 1) { my @startAt = ( 0..($Ndims-1)); my @endAt = ( 0..($Ndims-1)); my $startAt = PDL::IO::HDF5::packList(@startAt); my $endAt = PDL::IO::HDF5::packList(@endAt); my $rc = PDL::IO::HDF5::H5Sget_select_bounds($dataspaceID, $startAt, $endAt ); if ($rc < 0) { carp("Error getting number of dims in dataspace in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } @startAt = PDL::IO::HDF5::unpackList($startAt); @endAt = PDL::IO::HDF5::unpackList($endAt); for(my $i=0;$i<=$#dims;++$i) { $dims[$i] = $endAt[$i]-$startAt[$i]+1; } if (not defined $start) { $start = PDL->zeros($Ndims); $end = PDL->zeros($Ndims); $start .= PDL->pdl(@startAt); $end .= PDL->pdl(@endAt); } else { $start += PDL->pdl(@startAt); $end += PDL->pdl(@startAt); } } if (not defined $start) { # Initialize Dims structure: my $dims = PDL::IO::HDF5::packList(@dims); my $dims2 = PDL::IO::HDF5::packList(@dims); my $rc = PDL::IO::HDF5::H5Sget_simple_extent_dims($dataspaceID, $dims, $dims2 ); if ($rc != $Ndims) { carp("Error getting number of dims in dataspace in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } @dims = PDL::IO::HDF5::unpackList($dims); # get the dim sizes from the binary structure } else { if (($start->getndims != 1) || ($start->getdim(0) != $Ndims)) { carp("Wrong dimensions in start PDL in ".__PACKAGE__."\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } my $start2 = PDL::IO::HDF5::packList(reverse($start->list)); if (not defined $end) { carp("No end supplied in ".__PACKAGE__."\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } if (($end->getndims != 1) || ($end->getdim(0) != $Ndims)) { carp("Wrong dimensions in end PDL in ".__PACKAGE__."\n") ; carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } my $length2; if (defined $stride) { if (($stride->getndims != 1) || ($stride->getdim(0) != $Ndims)) { carp("Wrong dimensions in stride in ".__PACKAGE__."\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } @dims=reverse((($end-$start+1)/$stride)->list); $length2 = PDL::IO::HDF5::packList(@dims); } else { @dims=reverse(($end-$start+1)->list); $length2 = PDL::IO::HDF5::packList(@dims); $stride=PDL::Core::ones($Ndims); } my $mem_dims = PDL::IO::HDF5::packList(@dims); my $stride2 = PDL::IO::HDF5::packList(reverse($stride->list)); my $block=PDL::Core::ones($Ndims); my $block2 = PDL::IO::HDF5::packList(reverse($block->list)); # Slice the data $file_space = PDL::IO::HDF5::H5Dget_space($datasetID); $rc=PDL::IO::HDF5::H5Sselect_hyperslab($file_space, 0, $start2, $stride2, $length2, $block2); if ($rc < 0) { carp("Error slicing data from file in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } $mem_space = PDL::IO::HDF5::H5Screate_simple($Ndims, $mem_dims, $mem_dims); } # Create initial PDL null array with the proper datatype $pdl = $ReturnType->null; $pdl->set_datatype($PDLtype); my @pdldims; # dims of the PDL my $datatypeSize; # Size of one element of data stored if (defined( $stringSize)) { # Fixed-Length String types @pdldims = ($stringSize,reverse(@dims)); # HDF5 stores columns/rows in reverse order than pdl, # 1st PDL dim is the string length (for PDL::Char) $datatypeSize = PDL::howbig($pdl->get_datatype); } elsif ($varLenString) { # Variable-length String # (Variable length string arrays will be converted to fixed-length strings later) @pdldims = (reverse(@dims)); # HDF5 stores columns/rows in reverse order than pdl # Variable length strings are stored as arrays of string pointers, so get that size # This will by 4 bytes on 32-bit machines, and 8 bytes on 64-bit machines. $datatypeSize = PDL::IO::HDF5::bufPtrSize(); } else{ # Normal Numeric types # (Variable length string arrays will be converted to fixed-length strings later) @pdldims = (reverse(@dims)); # HDF5 stores columns/rows in reverse order than pdl $datatypeSize = PDL::howbig($pdl->get_datatype); } $pdl->setdims(\@pdldims); my $nelems = 1; foreach (@pdldims){ $nelems *= $_; }; # calculate the number of elements my $datasize = $nelems * $datatypeSize; # Create empty space for the data # Incrementally, to get around problem on win32 my $howBig = $datatypeSize; my $data = ' ' x $howBig; foreach my $dim(@pdldims){ $data = $data x $dim; } # Read the data: if (not defined $start) { $rc = PDL::IO::HDF5::H5Dread($datasetID, $internalhdf5_type, PDL::IO::HDF5::H5S_ALL(), PDL::IO::HDF5::H5S_ALL(), PDL::IO::HDF5::H5P_DEFAULT(), $data); } else { $rc = PDL::IO::HDF5::H5Dread($datasetID, $internalhdf5_type, $mem_space, $file_space, PDL::IO::HDF5::H5P_DEFAULT(), $data); } if ($rc < 0) { carp("Error reading data from file in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } if ($varLenString) { # Convert variable-length string to fixed-length string, to be compatible with the PDL::Char type my $maxsize = PDL::IO::HDF5::findMaxVarLenSize($data, $nelems); # Create empty space for the fixed-length data # Incrementally, to get around problem on win32 my $howBig = $maxsize + 1; # Adding one to include the null string terminator my $fixeddata = ' ' x $howBig; foreach my $dim(@pdldims){ $fixeddata = $fixeddata x $dim; } PDL::IO::HDF5::copyVarLenToFixed($data, $fixeddata, $nelems, $maxsize); # Reclaim data from HDF5 system (HDF5 allocates memory when it reads variable-length strings) $rc = PDL::IO::HDF5::H5Dvlen_reclaim ($internalhdf5_type, $dataspaceID, PDL::IO::HDF5::H5P_DEFAULT(), $data); if ($rc < 0) { carp("Error reclaiming memeory while reading data from file in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } # Adjust for fixed-length PDL creation $data = $fixeddata; unshift @pdldims, ($maxsize+1); } # Setup the PDL with the proper dimensions and data $pdl->setdims(\@pdldims); # Update the PDL data with the data read from the file ${$pdl->get_dataref()} = $data; $pdl->upd_data(); # /* Terminate access to the data space. */ carp("Can't close Dataspace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; # /* Terminate access to the data type. */ carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; return $pdl; } =head2 dims =for ref Get the dims for a HDF5 dataset. For example, a 3 x 4 array would return a perl array (3,4); B =for usage @pdl = $dataset->dims; # Get an array of dims. =cut sub dims{ my $self = shift; my $parent = $self->{parent}; my $groupID = $parent->IDget; my $datasetID = $self->{ID}; my $name = $self->{name}; my $dataspaceID = PDL::IO::HDF5::H5Dget_space($datasetID); if ($dataspaceID < 0) { carp("Can't Open Dataspace in ".__PACKAGE__.":get\n"); return undef; } # Get the number of dims: my $Ndims = PDL::IO::HDF5::H5Sget_simple_extent_ndims($dataspaceID); if ($Ndims < 0) { carp("Can't Get Number of Dims in Dataspace in ".__PACKAGE__.":get\n"); return undef; } # Initialize Dims structure: my @dims = ( 0..($Ndims-1)); my $dims = PDL::IO::HDF5::packList(@dims); my $dims2 = PDL::IO::HDF5::packList(@dims); my $rc = PDL::IO::HDF5::H5Sget_simple_extent_dims($dataspaceID, $dims, $dims2 ); if ($rc != $Ndims) { carp("Error getting number of dims in dataspace in ".__PACKAGE__.":get\n"); carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } @dims = PDL::IO::HDF5::unpackList($dims); # get the dim sizes from the binary structure return reverse @dims; # return dims in the order that PDL will store them } =head2 attrSet =for ref Set the value of an attribute(s) Attribute types supported are null-terminated strings and PDL matrices B =for usage $dataset->attrSet( 'attr1' => 'attr1Value', 'attr2' => 'attr2 value', 'attr3' => $pdl, . . . ); Returns undef on failure, 1 on success. =cut sub attrSet { my $self = shift; my %attrs = @_; # get atribute hash my $datasetID = $self->{ID}; unless( $datasetID){ # Error checking carp("Can't Set Attribute for empty dataset. Try writing some data to it first:\n"); carp(" in file:group: '".$self->{filename}.":".$self->{group}."'\n"); return undef; } my($key,$value); my $typeID; # id used for attribute my $dataspaceID; # id used for the attribute dataspace my $attrID; foreach $key( sort keys %attrs){ $value = $attrs{$key}; if (ref($value) =~ /^PDL/) { my $internalhdf5_type; # hdf5 type that describes the way data is stored in memory my @dims; # hdf5 equivalent dims for the supplied PDL my $type = $value->get_datatype; # get PDL datatype if ($value->isa('PDL::Char')) { # Special Case for PDL::Char Objects (fixed length strings) @dims = $value->dims; my $length = shift @dims; # String length is the first dim of the PDL for PDL::Char # Create Null-Terminated String Type $internalhdf5_type = PDL::IO::HDF5::H5Tcopy(PDL::IO::HDF5::H5T_C_S1()); PDL::IO::HDF5::H5Tset_size($internalhdf5_type, $length ); # make legth of type eaual to strings $typeID = $internalhdf5_type; # memory and file storage will be the same type @dims = reverse(@dims); # HDF5 stores columns/rows in reverse order than pdl } else { # Other PDL Types unless( defined($PDLtoHDF5internalTypeMapping{$type}) ){ carp "Error Calling ".__PACKAGE__."::set: Can't map PDL type to HDF5 datatype\n"; return undef; } $internalhdf5_type = $PDLtoHDF5internalTypeMapping{$type}; $typeID = PDL::IO::HDF5::H5Tcopy($internalhdf5_type); @dims = reverse($value->dims); # HDF5 stores columns/rows in reverse order than pdl } my $dims = PDL::IO::HDF5::packList(@dims); $value = ${$value->get_dataref}; $dataspaceID = PDL::IO::HDF5::H5Screate_simple(scalar(@dims), $dims , $dims); if ($dataspaceID < 0) { carp("Can't Open Dataspace in ".__PACKAGE__.":set\n"); return undef; } } else { # Create Null-Terminated String Type $typeID = PDL::IO::HDF5::H5Tcopy(PDL::IO::HDF5::H5T_C_S1()); PDL::IO::HDF5::H5Tset_size($typeID, length($value) || 1 ); # make legth of type eaual to length of $value or 1 if zero $dataspaceID = PDL::IO::HDF5::H5Screate_simple(0, 0, 0); } #Note: If a attr already exists, then it will be deleted an re-written # Delete the attribute first PDL::IO::HDF5::H5errorOff(); # keep h5 lib from complaining PDL::IO::HDF5::H5Adelete($datasetID, $key); PDL::IO::HDF5::H5errorOn(); $attrID = PDL::IO::HDF5::H5Acreate($datasetID, $key, $typeID, $dataspaceID, PDL::IO::HDF5::H5P_DEFAULT()); if ($attrID < 0) { carp "Error in ".__PACKAGE__." attrSet; Can't create attribute '$key'\n"; PDL::IO::HDF5::H5Sclose($dataspaceID); PDL::IO::HDF5::H5Tclose($typeID); # Cleanup return undef; } # Write the attribute data. if (PDL::IO::HDF5::H5Awrite($attrID, $typeID, $value) < 0) { carp "Error in ".__PACKAGE__." attrSet; Can't write attribute '$key'\n"; PDL::IO::HDF5::H5Aclose($attrID); PDL::IO::HDF5::H5Sclose($dataspaceID); PDL::IO::HDF5::H5Tclose($typeID); # Cleanup return undef; } # Cleanup PDL::IO::HDF5::H5Aclose($attrID); PDL::IO::HDF5::H5Sclose($dataspaceID); PDL::IO::HDF5::H5Tclose($typeID); } # Clear-out the attribute index, it is no longer valid with the updates # we just made. $self->{fileObj}->clearAttrIndex; return 1; } =head2 attrDel =for ref Delete attribute(s) B =for usage $dataset->attrDel( 'attr1', 'attr2', . . . ); Returns undef on failure, 1 on success. =cut sub attrDel { my $self = shift; my @attrs = @_; # get atribute names my $datasetID = $self->{ID}; my $attr; my $rc; #Return code returned by H5Adelete foreach $attr( @attrs ){ # Note: We don't consider errors here as cause for aborting, we just # complain using carp if (PDL::IO::HDF5::H5Adelete($datasetID, $attr) < 0) { carp "Error in ".__PACKAGE__." attrDel; Error Deleting attribute '$attr'\n"; } } # Clear-out the attribute index, it is no longer valid with the updates # we just made. $self->{fileObj}->clearAttrIndex; return 1; } =head2 attrs =for ref Get a list of all attribute names associated with a dataset B =for usage @attrs = $dataset->attrs; =cut sub attrs { my $self = shift; my $datasetID = $self->{ID}; my $defaultMaxSize = 256; # default max size of a attribute name my $noAttr = PDL::IO::HDF5::H5Aget_num_attrs($datasetID); # get the number of attributes my $attrIndex = 0; # attribute Index my @attrNames = (); my $attributeID; my $attrNameSize; # size of the attribute name my $attrName; # attribute name # Go thru each attribute and get the name for( $attrIndex = 0; $attrIndex < $noAttr; $attrIndex++){ $attributeID = PDL::IO::HDF5::H5Aopen_idx($datasetID, $attrIndex ); if ($attributeID < 0) { carp "Error in ".__PACKAGE__." attrs; Error Opening attribute number $attrIndex\n"; next; } #init attrname to 256 length string (Maybe this not necessary with # the typemap) $attrName = ' ' x 256; # Get the name $attrNameSize = PDL::IO::HDF5::H5Aget_name($attributeID, 256, $attrName ); # If the name is greater than 256, try again with the proper size: if ($attrNameSize > 256) { $attrName = ' ' x $attrNameSize; $attrNameSize = PDL::IO::HDF5::H5Aget_name($attributeID, $attrNameSize, $attrName ); } push @attrNames, $attrName; # Close the attr: PDL::IO::HDF5::H5Aclose($attributeID); } return @attrNames; } =head2 attrGet =for ref Get the value of an attribute(s) Currently the attribute types supported are null-terminated strings and PDLs. B =for usage my @attrs = $dataset->attrGet( 'attr1', 'attr2'); =cut sub attrGet { my $self = shift; my @attrs = @_; # get atribute array my $datasetID = $self->{ID}; my($attrName,$attrValue); my @attrValues; #return array my $typeID; # id used for attribute my $dataspaceID; # id used for the attribute dataspace my $attrID; my $stringSize; my $Ndims; foreach $attrName( @attrs) { undef($stringSize); $attrValue = undef; # Open the Attribute $attrID = PDL::IO::HDF5::H5Aopen_name($datasetID, $attrName ); unless( $attrID >= 0){ carp "Error Calling ".__PACKAGE__."::attrget: Can't open HDF5 Attribute name '$attrName'.\n"; next; } # Open the data-space $dataspaceID = PDL::IO::HDF5::H5Aget_space($attrID); if ($dataspaceID < 0) { carp("Can't Open Dataspace for Attribute name '$attrName' in ".__PACKAGE__."::attrget\n"); carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; next; } # Check to see if the dataspace is simple if (PDL::IO::HDF5::H5Sis_simple($dataspaceID) < 0) { carp("Warning: Non-Simple Dataspace for Attribute name '$attrName' ".__PACKAGE__."::attrget\n"); carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; next; } # Get the number of dims: $Ndims = PDL::IO::HDF5::H5Sget_simple_extent_ndims($dataspaceID); unless( $Ndims >= 0){ if ($Ndims < 0) { carp("Warning: Can't Get Number of Dims in Attribute name '$attrName' Dataspace in ".__PACKAGE__.":get\n"); } #if ($Ndims > 0) { # carp("Warning: Non-Scalar Dataspace for Attribute name '$attrName' Dataspace in ".__PACKAGE__.":get\n"); #} carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; next; } my $HDF5type; if ($Ndims == 0) { # If it is a scalar we do this # Get the HDF5 dataset datatype; $HDF5type = PDL::IO::HDF5::H5Aget_type($attrID ); unless( $HDF5type >= 0 ){ carp "Error Calling ".__PACKAGE__."::attrGet: Can't get HDF5 Dataset type in Attribute name '$attrName'.\n"; carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; next; } # Get the size so we can allocate space for it my $size = PDL::IO::HDF5::H5Tget_size($HDF5type); unless( $size){ carp "Error Calling ".__PACKAGE__."::attrGet: Can't get HDF5 Dataset type size in Attribute name '$attrName'.\n"; carp("Can't close Datatype in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; next; } #init attr value to the length of the type my $data = ' ' x ($size); my $PDLtype; my $ReturnType; my $internalhdf5_type; if (PDL::IO::HDF5::H5Tget_class($HDF5type ) == PDL::IO::HDF5::H5T_STRING()) { # String type $PDLtype = $PDL::Types::PDL_B; $internalhdf5_type = $HDF5type; # internal storage the same as the file storage. $ReturnType = 'PDL::Char'; # For strings, we return a PDL::Char $stringSize = PDL::IO::HDF5::H5Tget_size($HDF5type); unless ($stringSize >= 0) { carp "Error Calling ".__PACKAGE__."::attrGet: Can't get HDF5 String Datatype Size.\n"; carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; return undef; } } else { # Normal Numeric Type # Map the HDF5 file datatype to a PDL datatype $PDLtype = $PDL::Types::PDL_D; # Default type is double $ReturnType = 'PDL'; my $defaultType; foreach $defaultType( keys %HDF5toPDLfileMapping){ if (PDL::IO::HDF5::H5Tequal($defaultType,$HDF5type) > 0) { $PDLtype = $HDF5toPDLfileMapping{$defaultType}; last; } } # Get the HDF5 internal datatype that corresponds to the PDL type unless( defined($PDLtoHDF5internalTypeMapping{$PDLtype}) ){ carp "Error Calling ".__PACKAGE__."::attrGet: Can't map PDL type to HDF5 datatype\n"; return undef; } $internalhdf5_type = $PDLtoHDF5internalTypeMapping{$PDLtype}; } if (PDL::IO::HDF5::H5Aread($attrID, $internalhdf5_type, $data) < 0) { carp "Error Calling ".__PACKAGE__."::attrGet: Can't read Attribute Value for Attribute name '$attrName'.\n"; carp("Can't close Datatype in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; next; } $attrValue = $ReturnType->null; $attrValue->set_datatype($PDLtype); my @pdldims; if (defined $stringSize) { # String types @pdldims = $stringSize; } else { @pdldims = 1; } $attrValue->setdims(\@pdldims); # Update the PDL data with the data read from the file ${$attrValue->get_dataref()} = $data; $attrValue->upd_data(); # End of scalar option } else { # This is a PDL # Get the HDF5 dataset datatype; $HDF5type = PDL::IO::HDF5::H5Aget_type($attrID ); unless ($HDF5type >= 0) { carp "Error Calling ".__PACKAGE__."::attrGet: Can't get HDF5 Dataset type in Attribute name '$attrName'.\n"; carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; next; } #********************************************************* my $stringSize; my $PDLtype; my $internalhdf5_type; my $typeID; my $ReturnType = 'PDL'; # Default object returned is PDL. If strings are store, then this will # Check for string type: my $varLenString = 0; # Flag = 1 if reading variable-length string array if (PDL::IO::HDF5::H5Tget_class($HDF5type ) == $H5T_STRING) { # String type # Check for variable length string" if (! PDL::IO::HDF5::H5Tis_variable_str($HDF5type )) { # Not a variable length string $stringSize = PDL::IO::HDF5::H5Tget_size($HDF5type); unless ($stringSize >= 0) { carp "Error Calling ".__PACKAGE__."::get: Can't get HDF5 String Datatype Size.\n"; carp("Can't close Datatype in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; return undef; } $internalhdf5_type = $HDF5type; # internal storage the same as the file storage. } else { # Variable-length String, set flag $varLenString = 1; # Create variable-length type for reading from the file $internalhdf5_type = PDL::IO::HDF5::H5Tcopy(PDL::IO::HDF5::H5T_C_S1() ); PDL::IO::HDF5::H5Tset_size( $internalhdf5_type, PDL::IO::HDF5::H5T_VARIABLE() ); } $PDLtype = $PDL::Types::PDL_B; $internalhdf5_type = $HDF5type; # internal storage the same as the file storage. $typeID=$HDF5type; $ReturnType = 'PDL::Char'; # For strings, we return a PDL::Char } else { # Normal Numeric Type # Map the HDF5 file datatype to a PDL datatype $PDLtype = $PDL::Types::PDL_D; # Default type is double my $defaultType; foreach $defaultType( keys %HDF5toPDLfileMapping){ if (PDL::IO::HDF5::H5Tequal($defaultType,$HDF5type) > 0) { $PDLtype = $HDF5toPDLfileMapping{$defaultType}; last; } } # Get the HDF5 internal datatype that corresponds to the PDL type unless( defined($PDLtoHDF5internalTypeMapping{$PDLtype}) ){ carp "Error Calling ".__PACKAGE__."::set: Can't map PDL type to HDF5 datatype\n"; carp("Can't close Datatype in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; return undef; } $internalhdf5_type = $PDLtoHDF5internalTypeMapping{$PDLtype}; #$internalhdf5_type = $HDF5type; # internal storage the same as the file storage. #$typeID = PDL::IO::HDF5::H5Tcopy($internalhdf5_type); $typeID = $internalhdf5_type; } # End of String or Numeric type # Initialize Dims structure: my @dims = ( 0..($Ndims-1)); my $dims = PDL::IO::HDF5::packList(@dims); my $dims2 = PDL::IO::HDF5::packList(@dims); my $rc = PDL::IO::HDF5::H5Sget_simple_extent_dims($dataspaceID, $dims, $dims2 ); if ($rc != $Ndims) { carp("Error getting number of dims in dataspace in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; return undef; } @dims = PDL::IO::HDF5::unpackList($dims); # get the dim sizes from the binary structure # Create initial PDL null array with the proper datatype $attrValue = $ReturnType->null; $attrValue->set_datatype($PDLtype); my @pdldims; # dims of the PDL my $datatypeSize; # Size of one element of data stored if (defined $stringSize) { # Fixed-Length String types @pdldims = ($stringSize,reverse(@dims)); # HDF5 stores columns/rows in reverse order than pdl, # 1st PDL dim is the string length (for PDL::Char) $datatypeSize = PDL::howbig($attrValue->get_datatype); } elsif ($varLenString) { # Variable-length String # (Variable length string arrays will be converted to fixed-length strings later) @pdldims = (reverse(@dims)); # HDF5 stores columns/rows in reverse order than pdl # Variable length strings are stored as arrays of string pointers, so get that size # This will by 4 bytes on 32-bit machines, and 8 bytes on 64-bit machines. $datatypeSize = PDL::IO::HDF5::bufPtrSize(); } else { # Normal Numeric types @pdldims = (reverse(@dims)); # HDF5 stores columns/rows in reverse order than pdl, $datatypeSize = PDL::howbig($attrValue->get_datatype); } $attrValue->setdims(\@pdldims); my $nelems = 1; foreach (@pdldims) { $nelems *= $_; } # calculate the number of elements my $datasize = $nelems * $datatypeSize; # Create empty space for the data # Incrementally, to get around problem on win32 my $howBig = $datatypeSize; my $data = ' ' x $howBig; foreach my $dim(@pdldims){ $data = $data x $dim; } # Read the data: $rc = PDL::IO::HDF5::H5Aread($attrID,$internalhdf5_type,$data); if ($rc < 0 ) { carp("Error reading data from file in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; return undef; } if ($varLenString) { # Convert variable-length string to fixed-length string, to be compatible with the PDL::Char type my $maxsize = PDL::IO::HDF5::findMaxVarLenSize($data, $nelems); # Create empty space for the fixed-length data # Incrementally, to get around problem on win32 my $howBig = $maxsize + 1; # Adding one to include the null string terminator my $fixeddata = ' ' x $howBig; foreach my $dim(@pdldims){ $fixeddata = $fixeddata x $dim; } PDL::IO::HDF5::copyVarLenToFixed($data, $fixeddata, $nelems, $maxsize); # Reclaim data from HDF5 system (HDF5 allocates memory when it reads variable-length strings) $rc = PDL::IO::HDF5::H5Dvlen_reclaim ($internalhdf5_type, $dataspaceID, PDL::IO::HDF5::H5P_DEFAULT(), $data); if ($rc < 0) { carp("Error reclaiming memeory while reading data from file in ".__PACKAGE__.":get\n"); carp("Can't close Datatype in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":get\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; return undef; } # Adjust for fixed-length PDL creation $data = $fixeddata; unshift @pdldims, ($maxsize+1); } # Setup the PDL with the proper dimensions and data $attrValue->setdims(\@pdldims); ${$attrValue->get_dataref()} = $data; $attrValue->upd_data(); #************************************************ } # End of PDL option # Cleanup carp("Can't close Datatype in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Tclose($HDF5type) < 0; carp("Can't close DataSpace in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Sclose($dataspaceID) < 0; carp("Can't close Attribute in ".__PACKAGE__.":attrGet\n") if PDL::IO::HDF5::H5Aclose($attrID) < 0; } continue { if ($Ndims == 0) { if (defined($stringSize)) { push @attrValues, $attrValue->atstr(0); } else { push @attrValues, $attrValue->index(0); } } else { push @attrValues, $attrValue; } } return @attrValues; } =head2 IDget =for ref Returns the HDF5 library ID for this object B =for usage my $ID = $dataSetObj->IDget; =cut sub IDget{ my $self = shift; return $self->{ID}; } =head2 nameGet =for ref Returns the HDF5 Dataset Name for this object. B =for usage my $name = $datasetObj->nameGet; =cut sub nameGet{ my $self = shift; return $self->{name}; } 1; PDL-IO-HDF5-0.762/HDF5/Group.pm0000644000175000017500000003301214701402152015201 0ustar osboxesosboxespackage PDL::IO::HDF5::Group; use Carp; use strict; =head1 NAME PDL::IO::HDF5::Group - PDL::IO::HDF5 Helper Object representing HDF5 groups. =head1 DESCRIPTION This is a helper-object used by PDL::IO::HDF5 to interface with HDF5 format's group objects. Information on the HDF5 Format can be found at the HDF Group's web site at http://www.hdfgroup.org . =head1 SYNOPSIS See L =head1 MEMBER DATA =over 1 =item ID ID number given to the group by the HDF5 library =item name Name of the group. (Absolute to the root group '/'. e.g. /maingroup/subgroup) =item parent Ref to parent object (file or group) that owns this group. =item fileObj Ref to the L object that owns this object. =back =head1 METHODS =head2 new =for ref PDL::IO::HDF5::Group Constructor - creates new object B =for usage This object will usually be created using the calling format detailed in the L. The following syntax is used by the L object to build the object. $a = new PDL::IO::HDF5:Group( name => $name, parent => $parent, fileObj => $fileObj ); Args: $name Name of the group (relative to the parent) $parent Parent Object that owns this group $fileObj PDL::HDF (Top Level) object that owns this group. =cut sub new{ my $type = shift; my %parms = @_; my $self = {}; my @DataMembers = qw( name parent fileObj); my %DataMembers; @DataMembers{ @DataMembers } = @DataMembers; # hash for quick lookup # check for proper supplied names: my $varName; foreach $varName(keys %parms){ unless( defined($DataMembers{$varName})){ carp("Error Calling ".__PACKAGE__." Constuctor\n \'$varName\' not a valid data member\n"); return undef; } $self->{$varName} = $parms{$varName}; } my $parent = $self->{parent}; my $parentID = $parent->IDget; my $parentName = $parent->nameGet; my $groupName = $self->{name}; my $groupID; # Adjust groupname to be absolute: if( $parentName ne '/') { # Parent is not the root group $self->{name} = "$parentName/$groupName"; } else{ # Parent is root group $self->{name} = "$parentName$groupName"; } # Turn Error Reporting off for the following, so H5 lib doesn't complain # if the group isn't found. PDL::IO::HDF5::H5errorOff(); my $rc = PDL::IO::HDF5::H5Gget_objinfo($parentID, $groupName,1,0); PDL::IO::HDF5::H5errorOn(); # See if the group exists: if( $rc >= 0){ #Group Exists open it: $groupID = PDL::IO::HDF5::H5Gopen($parentID, $groupName); } else{ # group didn't exist, create it: $groupID = PDL::IO::HDF5::H5Gcreate($parentID, $groupName, 0); # Clear-out the attribute index, it is no longer valid with the updates # we just made. $self->{fileObj}->clearAttrIndex; } # Try Opening the Group First (Assume it already exists) if($groupID < 0 ){ carp "Error Calling ".__PACKAGE__." Constuctor: Can't open or create group '$groupName'\n"; return undef; } $self->{ID} = $groupID; bless $self, $type; return $self; } =head2 DESTROY =for ref PDL::IO::HDF5 Destructor - Closes the HDF5::Group Object. B =for usage No Usage. Automatically called =cut sub DESTROY { my $self = shift; #print "In Group Destroy\n"; if( PDL::IO::HDF5::H5Gclose($self->{ID}) < 0){ warn("Error closing HDF5 Group '".$self->{name}."' in file '".$self->{parentName}."'\n"); } } =head2 attrSet =for ref Set the value of an attribute(s) Supports null-terminated strings, integers and floating point scalar and 1D array attributes. B =for usage $group->attrSet( 'attr1' => 'attr1Value', 'attr2' => 'attr2 value', 'attr3' => $pdl, . . . ); Returns undef on failure, 1 on success. =cut sub attrSet { my $self = shift; # Attribute setting for groups is exactly like datasets # Call datasets directly (This breaks OO inheritance, but is # better than duplicating code from the dataset object here return $self->PDL::IO::HDF5::Dataset::attrSet(@_); } =head2 attrGet =for ref Get the value of an attribute(s) Supports null-terminated strings, integer and floating point scalar and 1D array attributes. B =for usage my @attrs = $group->attrGet( 'attr1', 'attr2'); =cut sub attrGet { my $self = shift; # Attribute reading for groups is exactly like datasets # Call datasets directly (This breaks OO inheritance, but is # better than duplicating code from the dataset object here return $self->PDL::IO::HDF5::Dataset::attrGet(@_); } =head2 attrDel =for ref Delete attribute(s) B =for usage $group->attrDel( 'attr1', 'attr2', . . . ); Returns undef on failure, 1 on success. =cut sub attrDel { my $self = shift; my @attrs = @_; # get atribute names my $groupID = $self->{ID}; my $attr; my $rc; #Return code returned by H5Adelete foreach $attr( @attrs ){ # Note: We don't consider errors here as cause for aborting, we just # complain using carp if( PDL::IO::HDF5::H5Adelete($groupID, $attr) < 0){ carp "Error in ".__PACKAGE__." attrDel; Error Deleting attribute '$attr'\n"; } } # Clear-out the attribute index, it is no longer valid with the updates # we just made. $self->{fileObj}->clearAttrIndex; return 1; } =head2 attrs =for ref Get a list of all attribute names in a group B =for usage @attrs = $group->attrs; =cut sub attrs { my $self = shift; my $groupID = $self->{ID}; my $defaultMaxSize = 256; # default max size of a attribute name my $noAttr = PDL::IO::HDF5::H5Aget_num_attrs($groupID); # get the number of attributes my $attrIndex = 0; # attribute Index my @attrNames = (); my $attributeID; my $attrNameSize; # size of the attribute name my $attrName; # attribute name # Go thru each attribute and get the name for( $attrIndex = 0; $attrIndex < $noAttr; $attrIndex++){ $attributeID = PDL::IO::HDF5::H5Aopen_idx($groupID, $attrIndex ); if( $attributeID < 0){ carp "Error in ".__PACKAGE__." attrs; Error Opening attribute number $attrIndex\n"; next; } #init attrname to 256 length string (Maybe this not necessary with # the typemap) $attrName = ' ' x 256; # Get the name $attrNameSize = PDL::IO::HDF5::H5Aget_name($attributeID, 256, $attrName ); # If the name is greater than 256, try again with the proper size: if( $attrNameSize > 256 ){ $attrName = ' ' x $attrNameSize; $attrNameSize = PDL::IO::HDF5::H5Aget_name($attributeID, $attrNameSize, $attrName ); } push @attrNames, $attrName; # Close the attr: PDL::IO::HDF5::H5Aclose($attributeID); } return @attrNames; } =head2 dataset =for ref Open an existing or create a new dataset in a group. B =for usage $dataset = $group->dataset('newdataset'); Returns undef on failure, 1 on success. =cut sub dataset { my $self = shift; my $name = $_[0]; my $groupID = $self->{ID}; # get the group name of the current group my $dataset = PDL::IO::HDF5::Dataset->new( name=> $name, parent => $self, fileObj => $self->{fileObj} ); } =head2 datasets =for ref Get a list of all dataset names in a group. (Relative to the current group) B =for usage @datasets = $group->datasets; =cut sub datasets { my $self = shift; my $groupID = $self->{ID}; my @totalDatasets = PDL::IO::HDF5::H5GgetDatasetNames($groupID,"."); return @totalDatasets; } =head2 group =for ref Open an existing or create a new group in an existing group. B =for usage $newgroup = $oldgroup->group("newgroup"); Returns undef on failure, 1 on success. =cut sub group { my $self = shift; my $name = $_[0]; # get the group name my $group = new PDL::IO::HDF5::Group( name=> $name, parent => $self, fileObj => $self->{fileObj} ); return $group; } =head2 groups =for ref Get a list of all group names in a group. (Relative to the current group) B =for usage @groupNames = $group->groups; =cut sub groups { my $self = shift; my $groupID = $self->{ID}; my @totalgroups = PDL::IO::HDF5::H5GgetGroupNames($groupID,'.'); return @totalgroups; } =head2 _buildAttrIndex =for ref Internal Recursive Method to build the attribute index hash for the object For the purposes of indexing groups by their attributes, the attributes are applied hierarchial. i.e. any attributes of the higher level groups are assumed to be apply for the lower level groups. B =for usage $group->_buildAttrIndex($index, $currentAttrs); Input/Output: $index: Total Index hash ref $currentAttrs: Hash refs of the attributes valid for the current group. =cut sub _buildAttrIndex{ my ($self, $index, $currentAttrs) = @_; # Take care of any attributes in the current group my @attrs = $self->attrs; my @attrValues = $self->attrGet(@attrs); # Get the group name my $groupName = $self->nameGet; my %indexElement; # element of the index for this group %indexElement = %$currentAttrs; # Initialize index element # with attributes valid at the # group above # Add (or overwrite) attributes for this group # i.e. local group attributes take precedence over # higher-level attributes @indexElement{@attrs} = @attrValues; $index->{$groupName} = \%indexElement; # Now Do any subgroups: my @subGroups = $self->groups; my $subGroup; foreach $subGroup(@subGroups){ $self->group($subGroup)->_buildAttrIndex($index,\%indexElement); } } =head2 IDget =for ref Returns the HDF5 library ID for this object B =for usage my $ID = $groupObj->IDget; =cut sub IDget{ my $self = shift; return $self->{ID}; } =head2 nameGet =for ref Returns the HDF5 Group Name for this object. (Relative to the root group) B =for usage my $name = $groupObj->nameGet; =cut sub nameGet{ my $self = shift; return $self->{name}; } ####--------------------------------------------------------- =head2 reference =for ref Creates a reference to a region of a dataset. B =for usage $groupObj->reference($referenceName,$datasetObj,@regionStart,@regionCount); Create a reference named $referenceName within the group $groupObj to a subroutine of the dataset $datasetObj. The region to be referenced is defined by the @regionStart and @regionCount arrays. =cut sub reference{ my $self = shift; my $datasetObj = shift; my $referenceName = shift; my @regionStart = shift; my @regionCount = shift; # Get the dataset ID. my $dataSubsetID = $datasetObj->IDget; # Get the dataspace of the dataset. my $dataSubsetSpaceID = PDL::IO::HDF5::H5Dget_space($dataSubsetID); if( $dataSubsetSpaceID <= 0 ){ carp("Can't get dataspacein ".__PACKAGE__.":reference\n"); return undef; } # Select a hyperslab from this dataspace. my $Ndims = $#regionStart+1; my $start = new PDL @regionStart; my $length = new PDL @regionCount; my $start2 = PDL::IO::HDF5::packList(reverse($start->list)); my $length2 = PDL::IO::HDF5::packList(reverse($length->list)); my $stride = PDL::Core::ones($Ndims); my $stride2 = PDL::IO::HDF5::packList(reverse($stride->list)); my $block = PDL::Core::ones($Ndims); my $block2 = PDL::IO::HDF5::packList(reverse($block->list)); my $rc = PDL::IO::HDF5::H5Sselect_hyperslab($dataSubsetSpaceID,0,$start2,$stride2,$length2,$block2); if ( $rc < 0 ) { carp("Error slicing data space in ".__PACKAGE__.":reference\n"); carp("Can't close DataSpace in ".__PACKAGE__.":reference\n") if( PDL::IO::HDF5::H5Sclose($dataSubsetSpaceID) < 0); return undef; } # Create a dataspace for the reference dataset. my $dataspaceID = PDL::IO::HDF5::H5Screate_simple(0,0,0); if( $dataspaceID < 0 ){ carp("Can't Open Dataspace in ".__PACKAGE__.":reference\n"); return undef; } # Create the reference dataset. my $dataSetID = PDL::IO::HDF5::H5Dcreate($self->{ID},$referenceName, PDL::IO::HDF5::H5T_STD_REF_DSETREG(), $dataspaceID, PDL::IO::HDF5::H5P_DEFAULT()); if( $dataSetID < 0){ carp("Can't Create Dataset in ".__PACKAGE__.":reference\n"); return undef; } # Create the reference. my $howBig = PDL::IO::HDF5::H5Tget_size(PDL::IO::HDF5::H5T_STD_REF_DSETREG()); my $datasetReference = ' ' x $howBig; if ( PDL::IO::HDF5::H5Rcreate($datasetReference,$datasetObj->{parent}->{ID},$datasetObj->{name},PDL::IO::HDF5::H5R_DATASET_REGION(),$dataSubsetSpaceID) < 0 ) { carp("Can't Create Reference Dataset in ".__PACKAGE__.":reference\n"); return undef; } # Write the reference dataset. if( PDL::IO::HDF5::H5Dwrite($dataSetID,PDL::IO::HDF5::H5T_STD_REF_DSETREG(),PDL::IO::HDF5::H5S_ALL(),PDL::IO::HDF5::H5S_ALL(),PDL::IO::HDF5::H5P_DEFAULT(),$datasetReference) < 0 ){ carp("Error Writing to dataset in ".__PACKAGE__.":reference\n"); return undef; } # Close the dataset dataspace. PDL::IO::HDF5::H5Sclose($dataspaceID); PDL::IO::HDF5::H5Sclose($dataSubsetSpaceID); PDL::IO::HDF5::H5Dclose($dataSetID); return 1; } ####--------------------------------------------------------- =head2 unlink =for ref Unlink an object from a group. B =for usage $groupObj->unlink($objectName); Unlink the named object from the group. =cut sub unlink{ my $self = shift; my $objectName = shift; # Get the dataset ID. my $groupID = $self->{ID}; # Do the unlink. if ( PDL::IO::HDF5::H5Ldelete($groupID,$objectName,PDL::IO::HDF5::H5P_DEFAULT()) < 0 ) { carp("Can't unlink object in ".__PACKAGE__.":unlink\n"); return undef; } return 1; } 1; PDL-IO-HDF5-0.762/hdf5.pd0000644000175000017500000016353115004706631014254 0ustar osboxesosboxesuse Config; our $VERSION = '0.762'; pp_setversion(qq{'$VERSION'}); # Necessary includes for .xs file pp_addhdr(<<'EOH'); #include #define PDLchar pdl #define PDLuchar pdl #define PDLshort pdl #define PDLint pdl #define PDLlong pdl #define PDLfloat pdl #define PDLdouble pdl #define uchar unsigned char EOH pp_bless ("PDL::IO::HDF5"); pp_addpm(<<'EOPM'); =head1 NAME PDL::IO::HDF5 - PDL Interface to the HDF5 Data Format. =head1 DESCRIPTION This package provides an object-oriented interface for Ls to the HDF5 data-format. Information on the HDF5 Format can be found at the HDF Group's web site at http://www.hdfgroup.org . =head2 LIMITATIONS Currently this interface only provides a subset of the total HDF5 library's capability. =over 1 =item * Only HDF5 Simple datatypes are supported. No HDF5 Compound datatypes are supported since PDL doesn't support them. =item * Only HDF5 Simple dataspaces are supported. =back =head1 SYNOPSIS use PDL::IO::HDF5; # Files ####### my $newfile = new PDL::IO::HDF5("newfile.hdf"); # create new hdf5 or open existing file. my $attrValue = $existingFile->attrGet('AttrName'); # Get attribute value for file $existingFile->attSet('AttrName' => 'AttrValue'); # Set attribute value(s) for file # Groups ###### my $group = $newfile->group("/mygroup"); # create a new or open existing group my @groups = $existingFile->groups; # get a list of all the groups at the root '/' # level. my @groups = $group->groups; # get a list of all the groups at the "mygroup" # level. my $group2 = $group->group('newgroup'); # Create/open a new group in existing group "mygroup" $group->unlink('datasetName'); # Delete a dataset from a group $group->reference($dataset,'refName',\@start,\@count); # Create a scalar reference to a subregion of a # dataset, with specified start index and count. my $attrValue = $group->attrGet('AttrName'); # Get attribute value for a group $group->attrSet('AttrName' => 'AttrValue'); # Set attribute value(s) for a group $group->attrDel('AttrName1', 'AttrName2'); # Delete attribute(s) for a group @attrs = $group->attrs; # Get List of attributes for a group # Data Sets ######## my $dataset = $group->dataset( 'datasetName'); # create a new or open existing dataset # in an existing group my $dataset = $newfile->dataset( 'datasetName'); # create a new or open existing dataset # in the root group of a file my $dataset2 = $newfile->dataset( 'datasetName'); # create a new or open existing dataset # in the root group. my @datasets = $existingFile->datasets; # get a list of all datasets in the root '/' group my @datasets = $group->datasets; # get a list of all datasets in a group @dims = $dataset->dims; # get a list of dimensions for the dataset $pdl = $dataset->get(); # Get the array data in the dataset $pdl = $dataset->get($start,$length,$stride); # Get a slice or hyperslab of the array data in the dataset $dataset->set($pdl, unlimited => 1); # Set the array data in the dataset my $attrValue = $dataset->attrGet('AttrName'); # Get attribute value for a dataset $dataset->attSet('AttrName' => 'AttrValue'); # Set attribute value(s) for a dataset =head1 MEMBER DATA =over 1 =item ID ID number given to the file by the HDF5 library =item filename Name of the file. =item accessMode Access Mode?? ( read /write etc????) =item attrIndex Quick lookup index of group names to attribute values. Autogenerated as-needed by the L, L, L methods. Any attribute writes or group creations will delete this data member, because it will no longer be valid. The index is of this form: { groupName1 => { attr1 => value, attr2 => value }. groupName2 => { attr1 => value, attr3 => value }. . . . } For the purposes of indexing groups by their attributes, the attributes are applied hierarchically. i.e. any attributes of the higher level groups are assumed to be apply for the lower level groups. =item groupIndex Quick lookup index of attribute names/values group names. This index is used by the L method to quickly find any group(s) that have attribute that match a desired set. The index is of this form: { "attr1\0attt2" => { "value1\0value2' => [ group1, group2, ...], "value3\0value3' => [ groupA ], . . . }, "att1" => { "value1' => [ group1, group2, ...], "value3' => [ groupA ] . . . }, . . . } The first level of the index maps the attribute name combinations that have indexes built to their index. The second level maps the corresponding attribute values with the group(s) where these attributes take on these values. groupName1 => { attr1 => value, attr2 => value }. groupName2 => { attr1 => value, attr3 => value }. . . . } For the purposes of indexing groups by their attributes, the attributes are applied hierarchically. i.e. any attributes of the higher level groups are assumed to be apply for the lower level groups. =back =head1 METHODS =head2 new =for ref PDL::IO::HDF5 constructor - creates PDL::IO::HDF5 object for reading or writing data. B =for usage $a = new PDL::IO::HDF5( $filename ); Arguments: 1) The name of the file. If this file exists and you want to write to it, prepend the name with the '>' character: ">name.nc" Returns undef on failure. B =for example $hdf5obj = new PDL::IO::HDF5( "file.hdf" ); =cut sub new { my $type = shift; my $file = shift; my $self = {}; my $rc; my $write; if (substr($file, 0, 1) eq '>') { # open for writing $file = substr ($file, 1); # chop off > $write = 1; } my $fileID; # HDF file id if (-e $file) { # Existing File if ($write) { $fileID = H5Fopen($file, H5F_ACC_RDWR(), H5P_DEFAULT()); if( $fileID < 0){ carp("Can't Open Existing HDF file '$file' for writing\n"); return undef; } $self->{accessMode} = 'w'; } else { # Open read-only $fileID = H5Fopen($file, H5F_ACC_RDONLY(), H5P_DEFAULT()); if( $fileID < 0){ carp("Can't Open Existing HDF file '$file' for reading\n"); return undef; } $self->{accessMode} = 'r'; } } else{ # File doesn't exist, create it: $fileID = H5Fcreate($file, H5F_ACC_TRUNC(), H5P_DEFAULT(), H5P_DEFAULT()); if( $fileID < 0){ carp("Can't Open New HDF file '$file' for writing\n"); return undef; } $self->{accessMode} = 'w'; } # Record file name, ID $self->{filename} = $file; $self->{ID} = $fileID; $self->{attrIndex} = undef; # Initialize attrIndex $self->{groupIndex} = undef; # Initialize groupIndex bless $self, $type; } =head2 filename =for ref Get the filename for the HDF5 file B =for usage my $filename = $HDFfile->filename; =cut sub filename { my $self = shift; return $self->{filename}; } =head2 group =for ref Open or create a group in the root "/" group (i.e. top level) of the HDF5 file. B =for usage $HDFfile->group("groupName"); Returns undef on failure, 1 on success. =cut sub group { my $self = shift; my $name = $_[0]; # get the group name my $parentID = $self->{ID}; my $parentName = ''; my $group = new PDL::IO::HDF5::Group( 'name'=> $name, parent => $self, fileObj => $self ); } =head2 groups =for ref Get a list of groups in the root "/" group (i.e. top level) of the HDF5 file. B =for usage @groups = $HDFfile->groups; =cut sub groups { my $self = shift; my @groups = $self->group("/")->groups; return @groups; } =head2 unlink =for ref Unlink an object from the root "/" group (i.e. top level) of the HDF5 file. B =for usage $HDFfile->unlink($name); =cut sub unlink { my $self = shift; my $name = $_[0]; $self->group("/")->unlink($name); return 1; } =head2 dataset =for ref Open or create a dataset in the root "/" group (i.e. top level) of the HDF5 file. B =for usage $HDFfile->dataset("groupName"); Returns undef on failure, 1 on success. Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->dataset("groupName"); =cut sub dataset { my $self = shift; my $name = $_[0]; # get the dataset name return $self->group("/")->dataset($name); } =head2 datasets =for ref Get a list of all dataset names in the root "/" group. B =for usage @datasets = $HDF5file->datasets; Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->datasets; =cut sub datasets{ my $self = shift; my $name = $_[0]; # get the dataset name return $self->group("/")->datasets; } =head2 attrSet =for ref Set the value of an attribute(s) in the root '/' group of the file. Currently attribute types supported are null-terminated strings and any PDL type. B =for usage $HDFfile->attrSet( 'attr1' => 'attr1Value', 'attr2' => 'attr2 value', 'attr3' => $pdl, . . . ); Returns undef on failure, 1 on success. Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->attrSet( 'attr1' => 'attr1Value', 'attr2' => 'attr2 value', 'attr3' => $pdl, . . . ); =cut sub attrSet { my $self = shift; my %attrs = @_; # get atribute hash return $self->group("/")->attrSet(%attrs); } =head2 attrGet =for ref Get the value of an attribute(s) in the root '/' group of the file. Currently the attribute types supported are null-terminated strings and PDLs. B =for usage @attrValues = $HDFfile->attrGet( 'attr1', 'attr2' ); =cut sub attrGet { my $self = shift; my @attrs = @_; # get atribute hash return $self->group("/")->attrGet(@attrs); } =head2 attrDel =for ref Delete attribute(s) in the root "/" group of the file. B =for usage $HDFfile->attrDel( 'attr1', 'attr2', . . . ); Returns undef on failure, 1 on success. Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->attrDel( 'attr1', 'attr2', . . . ); =cut sub attrDel { my $self = shift; my @attrs = @_; # get atribute names return $self->group("/")->attrDel(@attrs); } =head2 attrs =for ref Get a list of all attribute names in the root "/" group of the file. B =for usage @attrs = $HDFfile->attrs; Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->attrs =cut sub attrs { my $self = shift; return $self->group("/")->attrs; } =head2 reference =for ref Create a reference to part of a dataset in the root "/" group of the file. B =for usage $HDFfile->reference; Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->reference($referenceName,$datasetObj,@regionStart,@regionCount); Create a reference named $referenceName within the root group "/" to a subroutine of the dataset $datasetObj. The region to be referenced is defined by the @regionStart and @regionCount arrays. =cut sub reference { my $self = shift; my $datasetObj = shift; my $referenceName = shift; my @regionStart = shift; my @regionCount = shift; return $self->group("/")->reference($datasetObj,$referenceName,\@regionStart,\@regionCount); } =head2 _buildAttrIndex =for ref Internal Method to build the attribute index hash for the object B =for usage $hdf5obj->_buildAttrIndex; Output: Updated attrIndex data member =cut sub _buildAttrIndex{ my ($self) = @_; # Take care of any attributes in the current group my @attrs = $self->attrs; my @attrValues = $self->attrGet(@attrs); my $index = $self->{attrIndex} = {}; my %indexElement; # element of the index for this group @indexElement{@attrs} = @attrValues; $index->{'/'} = \%indexElement; my $topLevelAttrs = { %indexElement }; # Now Do any subgroups: my @subGroups = $self->groups; my $subGroup; foreach $subGroup(@subGroups){ $self->group($subGroup)->_buildAttrIndex($index,$topLevelAttrs); } } =head2 clearAttrIndex =for ref Method to clear the attribute index hash for the object. This is a mostly internal method that is called whenever some part of the HDF5 file has changed and the L index is no longer valid. B =for usage $hdf5obj->clearAttrIndex; =cut sub clearAttrIndex{ my $self = shift; $self->{attrIndex} = undef; } =head2 _buildGroupIndex =for ref Internal Method to build the groupIndex hash for the object B =for usage $hdf5obj->_buildGroupIndex(@attrs); where: @attrs List of attribute names to build a group index on. Output: Updated groupIndex data member =cut sub _buildGroupIndex{ my ($self,@attrs) = @_; @attrs = sort @attrs; # Sort the attributes so the order won't matter # Generate attrIndex if not there yet defined( $self->{attrIndex}) || $self->_buildAttrIndex; my $attrIndex = $self->{attrIndex}; my $groupIndexElement = {}; # Element of the group index that we will build my $group; my $attrIndexElement; # Attr index for the current group my @attrValues; # attr values corresponding to @attrs for the current group my $key; # group index key # Go Thru All Groups foreach $group(sort keys %$attrIndex){ $attrIndexElement = $attrIndex->{$group}; @attrValues = map defined($_) ? $_ : '_undef_', @$attrIndexElement{@attrs}; # Groups with undefined attr will get a '_undef_' string for the value # Use multi-dimensional array emulation for the hash # key here because it should be quicker. if( defined( $groupIndexElement->{$key = join($;,@attrValues)}) ) { # if already defined, add to the list push @{$groupIndexElement->{$key}}, $group; } else{ # not already defined create new element $groupIndexElement->{$key} = [ $group ]; } } # initialize group index if it doesn't exist. unless( defined $self->{groupIndex} ){ $self->{groupIndex} = {} }; # Use multi-dimensional array emulation for the hash # key here because it should be quicker. $self->{groupIndex}{join($;,@attrs)} = $groupIndexElement; } =head2 clearGroupIndex =for ref Method to clear the group index hash for the object. This is a mostly internal method that is called whenever some part of the HDF5 file has changed and the L index is no longer valid. B =for usage $hdf5obj->clearGroupIndex; =cut sub clearGroupIndex{ my $self = shift; $self->{groupIndex} = undef; } =head2 getGroupsByAttr =for ref Get the group names which attributes match a given set of values. This method enables database-like queries to be made. I.e. you can get answers to questions like 'Which groups have attr1 = value1, and attr3 = value2?'. B =for usage @groupNames = $hdf5Obj->getGroupsByAttr( 'attr1' => 'value1', 'attr2' => 'value2' ); =cut sub getGroupsByAttr{ my $self = shift; my %attrHash = @_; my @keys = sort keys %attrHash; # Use multi-dimensional array emulation for the hash # key here because it should be quicker. my $compositeKey = join($;, @keys); # Generate groupIndex if not there yet defined( $self->{groupIndex}{$compositeKey} ) || $self->_buildGroupIndex(@keys); $groupIndex = $self->{groupIndex}{$compositeKey}; my @values = @attrHash{@keys}; my $compositeValues = join($;, @values); if( defined($groupIndex->{$compositeValues} )){ return @{$groupIndex->{$compositeValues}}; } else{ return (); } } =head2 allAttrValues =for ref Returns information about group attributes defined in the HDF5 datafile. B =for usage # Single Attr Usage. Returns an array of all # values of attribute 'attrName' in the file. $hdf5obj->allAttrValues('attrName'); # Multiple Attr Usage. Returns an 2D array of all # values of attributes 'attr1', 'attr2' in the file. # Higher-Level $hdf5obj->allAttrValues('attr1', 'attr2'); =cut sub allAttrValues{ my $self = shift; my @attrs = @_; # Generate attrIndex if not there yet defined( $self->{attrIndex}) || $self->_buildAttrIndex; my $attrIndex = $self->{attrIndex}; if( @attrs == 1) { # Single Argument Processing my $attr = $attrs[0]; my $group; my @values; my $grpAttrHash; # attr hash for a particular group # Go thru each group and look for instances of $attr foreach $group( keys %$attrIndex){ $grpAttrHash = $attrIndex->{$group}; if( defined($grpAttrHash->{$attr})){ push @values, $grpAttrHash->{$attr}; } } return @values; } else{ # Multiple argument processing my $group; my @values; my $grpAttrHash; # attr hash for a particular group my $attr; # individual attr name my $allAttrSeen; # flag = 0 if we have not seen all of the # desired attributes in the current group my $value; # Current value of the @values array that we # will return # Go thru each group and look for instances of $attr foreach $group( keys %$attrIndex){ $grpAttrHash = $attrIndex->{$group}; # Go thru each attribute $allAttrSeen = 1; # assume we will se all atributes, set to zero if we don't $value = []; foreach $attr(@attrs){ if( defined($grpAttrHash->{$attr})){ push @$value, $grpAttrHash->{$attr}; } else{ $allAttrSeen = 0; } } push @values, $value if $allAttrSeen; #add to values array if we got anything } return @values; } } =head2 allAttrNames =for ref Returns a sorted list of all the group attribute names that are defined in the file. B =for usage my @attrNames = $hdf5obj->allAttrNames; =cut sub allAttrNames{ my $self = shift; # Generate attrIndex if not there yet defined( $self->{attrIndex}) || $self->_buildAttrIndex; my $attrIndex = $self->{attrIndex}; my $group; my %names; my $grpAttrHash; # attr hash for a particular group my @currentNames; # Go thru each group and look for instances of $attr foreach $group( keys %$attrIndex){ $grpAttrHash = $attrIndex->{$group}; @currentNames = keys %$grpAttrHash; @names{@currentNames} = @currentNames; } return sort keys %names; } =head2 IDget =for ref Returns the HDF5 library ID for this object B =for usage my $ID = $hdf5obj->IDget; =cut sub IDget{ my $self = shift; return $self->{ID}; } =head2 nameGet =for ref Returns the HDF5 Group Name for this object. (Always '/', i.e. the root group for this top-level object) B =for usage my $name = $hdf5obj->nameGet; =cut sub nameGet{ my $self = shift; return '/'; } =head2 DESTROY =for ref PDL::IO::HDF5 Destructor - Closes the HDF5 file B =for usage No Usage. Automatically called =cut sub DESTROY { my $self = shift; if( H5Fclose($self->{ID}) < 0){ warn("Error closing HDF5 file ".$self->{filename}."\n"); } } # # Utility function (Not a Method!!!) # to pack a perl list into a binary structure # to be interpreted as a C array of long longs. This code is build # during the make process to do the Right Thing for big and little # endian machines sub packList { my @list = @_; if(ref($_[0])){ croak(__PACKAGE__."::packList is not a method!\n"); } EOPM # Packing of long int array structure differs depending on # if the current machine is little or big endian. This logic # probably won't work for 'weird' byte order machine, but for most # others (intel, vax, sun, etc) it should be OK. # if( $Config{'byteorder'} =~ /^1/){ # little endian pp_addpm("\t".'@list = map (( $_,0 ), @list); # Intersperse zeros to make 64 bit hsize_t'); } else{ # Big Endian Machine pp_addpm("\t".'@list = map (( 0,$_ ), @list); # Intersperse zeros to make 64 bit hsize_t'); } pp_addpm(<<'EOPM'); my $list = pack ("L*", @list); return $list; } EOPM pp_addpm(<<'EOPM'); # # Utility function (Not a Method!!!) # to unpack a perl list from a binary structure # that is a C array of long longs. This code is build # during the make process to do the Right Thing for big and little # endian machines sub unpackList{ if(ref($_[0])){ croak(__PACKAGE__."::unpackList is not a method!\n"); } my ($binaryStruct) = (@_); # input binary structure my $listLength = length($binaryStruct) / 8; # list returned will be the # number of bytes in the input struct/8, since # the output numbers are 64bit. EOPM # UnPacking of long int array structure differs depending on # if the current machine is little or big endian. This logic # probably won't work for 'weird' byte order machine, but for most # others (intel, vax, sun, etc) it should be OK. # if( $Config{'byteorder'} =~ /^1/){ # little endian pp_addpm("\t".'my $unpackString = "Lxxxx" x $listLength; # 4 xxxx used to toss upper 32 bits'); } else{ # Big Endian Machine pp_addpm("\t".'my $unpackString = "xxxxL" x $listLength; # 4 xxxx used to toss upper 32 bits'); } pp_addpm(<<'EOPM'); my @list = unpack( $unpackString, $binaryStruct ); return @list; } =head1 AUTHORS John Cerney, j-cerney1@raytheon.com Andrew Benson, abenson@obs.carnegiescience.edu =cut EOPM # Read in a modified hdf.h file. Define # a low-level perl interface to hdf from these definitions. sub create_low_level { # This file must be modified to only include # hdf5 3 function definitions. # Also, all C function declarations must be on one line. my $defn = shift; my @lines = split (/\n/, $defn); foreach (@lines) { next if (/^\#/); # Skip commented out lines next if (/^\s*$/); # Skip blank lines my ($return_type, $func_name, $parms) = /^(\w+\**)\s+(\w+)\s*\((.*?)\)\;/; my @parms = split (/,/, $parms); my @vars = (); my @types = (); my %output = (); foreach $parm (@parms) { my ($varname) = ($parm =~ /(\w+)$/); $parm =~ s/$varname$//; # parm now contains the full C type $output{$varname} = 1 if (($parm =~ /\*/) && ($parm !~ /const/)); $parm =~ s/const //; # get rid of 'const' in C type $parm =~ s/^\s+//; $parm =~ s/\s+$//; # pare off the variable type from 'parm' push (@vars, $varname); push (@types, $parm); } my $xsout = ''; $xsout .= "$return_type\n"; $xsout .= "$func_name (" . join (", ", @vars) . ")\n"; for (my $i=0;$i<@vars;$i++) { $xsout .= "\t$types[$i]\t$vars[$i]\n"; } $xsout .= "CODE:\n"; $xsout .= "\tRETVAL = $func_name ("; for (my $i=0;$i<@vars;$i++) { if ($types[$i] =~ /PDL/) { ($type = $types[$i]) =~ s/PDL//; # Get rid of PDL type when writine xs CODE section $xsout .= "($type)$vars[$i]"."->data,"; } else { $xsout .= "$vars[$i],"; } } chop ($xsout) if( $xsout =~ /\,$/s); # remove last comma, if present $xsout .= ");\n"; $xsout .= "OUTPUT:\n"; $xsout .= "\tRETVAL\n"; foreach $var (sort keys %output) { $xsout .= "\t$var\n"; } $xsout .= "\n\n"; pp_addxs ('', $xsout); } } #------------------------------------------------------------------------- # Create low level interface from edited hdr5 header file. #------------------------------------------------------------------------- create_low_level (<<'EODEF'); # HDF5 Functions we create an interface to using the perl XS code # # Note: H5Gget_objinfo arg statbuf has been changed from a H5G_stat_t type to # a const void type to avoid compilation errors. This function only used # to determine if a group exists, so the statbuf variable is not used as # I/O variable as stated in the HDF5 docs. hid_t H5Fcreate (const char *name, unsigned flags, hid_t create_id, hid_t access_id); hid_t H5Fopen (const char *name, unsigned flags, hid_t access_id); herr_t H5Fclose (hid_t file_id); # # Dataspace functions hid_t H5Screate_simple (int rank, const hsize_t * dims, const hsize_t * maxdims); herr_t H5Sclose(hid_t space_id); int H5Sget_simple_extent_ndims(hid_t space_id); int H5Sget_simple_extent_dims(hid_t space_id, hsize_t *dims, hsize_t *maxdims); herr_t H5Sselect_hyperslab(hid_t space_id, int op, const hsize_t *start, const hsize_t *stride, const hsize_t *count, const hsize_t *block); herr_t H5Sget_select_bounds(hid_t space_id, hsize_t *start, hsize_t *end); hid_t H5Pcreate(hid_t cls_id); herr_t H5Pset_chunk(hid_t plist, int ndims, const hsize_t *dim); herr_t H5Pclose(hid_t plist); # # # Dataset Functions hid_t H5Dcreate (hid_t loc_id, const char *name, hid_t type_id, hid_t space_id, hid_t create_plist_id); hid_t H5Dopen (hid_t loc_id, const char *name); herr_t H5Dwrite (hid_t dataset_id, hid_t mem_type_id, hid_t mem_space_id, hid_t file_space_id, hid_t xfer_plist_id, const char * buf); herr_t H5Dextend(hid_t dataset_id, const hsize_t *size); # H5Dread buf type changed from void * to I8 * so that is can be catergorized separately in the # typemap as a T_PVI translation herr_t H5Dread (hid_t dataset_id, hid_t mem_type_id, hid_t mem_space_id, hid_t file_space_id, hid_t xfer_plist_id, I8 * buf); hid_t H5Dclose (hid_t dataset_id); hid_t H5Dget_type(hid_t dataset_id); hid_t H5Dget_space(hid_t dataset_id); # H5Dvlen_reclaim buf type changed from void * to I8 * so that it can be categorised separately in the # typemap as a T_PVI translation herr_t H5Dvlen_reclaim(hid_t type_id, hid_t space_id, hid_t plist_id, I8 *buf); hid_t H5Gcreate(hid_t loc_id, const char *name, size_t size_hint); hid_t H5Gopen(hid_t loc_id, const char *name); herr_t H5Gclose(hid_t group_id); herr_t H5Gget_objinfo(hid_t loc_id, const char *name, hbool_t follow_link, const void *statbuf); herr_t H5errorOn(); herr_t H5errorOff(); # # Attribute Functions hid_t H5Aopen_name(hid_t loc_id, const char *name); hid_t H5Acreate(hid_t loc_id, const char *name, hid_t type_id, hid_t space_id, hid_t create_plist); # Note: attrib write only supports char buffer right now herr_t H5Awrite (hid_t attr_id, hid_t mem_type_id, I8 * buf); herr_t H5Adelete(hid_t loc_id, const char * name); herr_t H5Aclose(hid_t attr_id); int H5Aget_num_attrs(hid_t loc_id); hid_t H5Aopen_idx(hid_t loc_id, unsigned int idx); ssize_t H5Aget_name(hid_t attr_id, size_t buf_size, char *buf); htri_t H5Sis_simple(hid_t space_id); hid_t H5Aget_space(hid_t attr_id); hid_t H5Aget_type(hid_t attr_id); # The Attrib read only supports char buffer right now herr_t H5Aread(hid_t attr_id, hid_t mem_type_id, I8 *buf); # Type Functions: herr_t H5Tset_size(hid_t type_id, size_t size); herr_t H5Tclose(hid_t type_id); hid_t H5Tcopy(hid_t type_id); size_t H5Tget_size(hid_t type_id); #hid_t H5Tget_super(hid_t type); htri_t H5Tequal(hid_t type_id1, hid_t type_id2); H5T_class_t H5Tget_class(hid_t type_id); htri_t H5Tis_variable_str(hid_t type_id); # Reference Functions: H5G_obj_t H5Rget_obj_type(hid_t id, H5R_type_t ref_type, I8 *ref); hid_t H5Rget_region(hid_t dataset, H5R_type_t ref_type, I8 *ref); hid_t H5Rdereference(hid_t dataset, H5R_type_t ref_type, I8 *ref); herr_t H5Rcreate(I8 *ref, hid_t loc_id, const char *name, H5R_type_t ref_type, hid_t space_id); # Link functions: herr_t H5Ldelete(hid_t loc_id, const char *name, hid_t lapl_id); EODEF # Add Optional HDF Constants to export list. pp_add_exported('', <<'EOPM'); H5F_ACC_DEBUG H5F_ACC_EXCL H5F_ACC_RDONLY H5F_ACC_RDWR H5F_ACC_TRUNC H5P_DEFAULT H5P_DATASET_CREATE H5S_ALL H5S_UNLIMITED H5T_ALPHA_B16 H5T_ALPHA_B32 H5T_ALPHA_B64 H5T_ALPHA_B8 H5T_ALPHA_F32 H5T_ALPHA_F64 H5T_ALPHA_I16 H5T_ALPHA_I32 H5T_ALPHA_I64 H5T_ALPHA_I8 H5T_ALPHA_U16 H5T_ALPHA_U32 H5T_ALPHA_U64 H5T_ALPHA_U8 H5T_C_S1 H5T_FORTRAN_S1 H5T_IEEE_F32BE H5T_IEEE_F32LE H5T_IEEE_F64BE H5T_IEEE_F64LE H5T_INTEL_B16 H5T_INTEL_B32 H5T_INTEL_B64 H5T_INTEL_B8 H5T_INTEL_F32 H5T_INTEL_F64 H5T_INTEL_I16 H5T_INTEL_I32 H5T_INTEL_I64 H5T_INTEL_I8 H5T_INTEL_U16 H5T_INTEL_U32 H5T_INTEL_U64 H5T_INTEL_U8 H5T_MIPS_B16 H5T_MIPS_B32 H5T_MIPS_B64 H5T_MIPS_B8 H5T_MIPS_F32 H5T_MIPS_F64 H5T_MIPS_I16 H5T_MIPS_I32 H5T_MIPS_I64 H5T_MIPS_I8 H5T_MIPS_U16 H5T_MIPS_U32 H5T_MIPS_U64 H5T_MIPS_U8 H5T_NATIVE_B16 H5T_NATIVE_B32 H5T_NATIVE_B64 H5T_NATIVE_B8 H5T_NATIVE_CHAR H5T_NATIVE_DOUBLE H5T_NATIVE_FLOAT H5T_NATIVE_HBOOL H5T_NATIVE_HERR H5T_NATIVE_HSIZE H5T_NATIVE_HSSIZE H5T_NATIVE_INT H5T_NATIVE_INT16 H5T_NATIVE_INT32 H5T_NATIVE_INT64 H5T_NATIVE_INT8 H5T_NATIVE_INT_FAST16 H5T_NATIVE_INT_FAST32 H5T_NATIVE_INT_FAST64 H5T_NATIVE_INT_FAST8 H5T_NATIVE_INT_LEAST16 H5T_NATIVE_INT_LEAST32 H5T_NATIVE_INT_LEAST64 H5T_NATIVE_INT_LEAST8 H5T_NATIVE_LDOUBLE H5T_NATIVE_LLONG H5T_NATIVE_LONG H5T_NATIVE_OPAQUE H5T_NATIVE_SCHAR H5T_NATIVE_SHORT H5T_NATIVE_UCHAR H5T_NATIVE_UINT H5T_NATIVE_UINT16 H5T_NATIVE_UINT32 H5T_NATIVE_UINT64 H5T_NATIVE_UINT8 H5T_NATIVE_UINT_FAST16 H5T_NATIVE_UINT_FAST32 H5T_NATIVE_UINT_FAST64 H5T_NATIVE_UINT_FAST8 H5T_NATIVE_UINT_LEAST16 H5T_NATIVE_UINT_LEAST32 H5T_NATIVE_UINT_LEAST64 H5T_NATIVE_UINT_LEAST8 H5T_NATIVE_ULLONG H5T_NATIVE_ULONG H5T_NATIVE_USHORT H5T_STD_B16BE H5T_STD_B16LE H5T_STD_B32BE H5T_STD_B32LE H5T_STD_B64BE H5T_STD_B64LE H5T_STD_B8BE H5T_STD_B8LE H5T_STD_I16BE H5T_STD_I16LE H5T_STD_I32BE H5T_STD_I32LE H5T_STD_I64BE H5T_STD_I64LE H5T_STD_I8BE H5T_STD_I8LE H5T_STD_REF_DSETREG H5T_STD_REF_OBJ H5T_STD_U16BE H5T_STD_U16LE H5T_STD_U32BE H5T_STD_U32LE H5T_STD_U64BE H5T_STD_U64LE H5T_STD_U8BE H5T_STD_U8LE H5T_STRING H5T_UNIX_D32BE H5T_UNIX_D32LE H5T_UNIX_D64BE H5T_UNIX_D64LE H5T_REFERENCE H5R_DATASET_REGION EOPM ############################################################### # XS Code that implements self-contained turn-on/off for # the h5 library error reporting. We can turn error reporting # selectively on and off to keep the library from complaining # when we are doing things like checking to see if a particular # group name exists. pp_addhdr(<<'EOXS'); /* ############################################################### # # H5 Library error reporting turn-on/off functions # # */ herr_t H5errorOff() { return H5Eset_auto(NULL, NULL ); } herr_t H5errorOn() { return H5Eset_auto((herr_t(*)(void*))H5Eprint, stderr ); } /* ############################################################### # # Operator Interation Functions (Supplied to and called by 'H5Giterate') # used to get the number of datasets in a group, # and the names of the dataset in the group. # # */ /* * Operator function to get number of datasets */ herr_t incIfDset(hid_t loc_id, const char *name, void *opdata) { H5G_stat_t statbuf; unsigned int * dsetCount; dsetCount = (unsigned int *) opdata; /* * Get type of the object and increment *dsetCount * if it is a dataset * The name of the object is passed to this function by * the Library. Some magic :-) */ H5Gget_objinfo(loc_id, name, FALSE, &statbuf); if( statbuf.type == H5G_DATASET){ (*dsetCount)++; } return 0; } /* * Operator function to fill up char array of dataset names * * opdata is a pointer to an Array of strings (i.e. 2D char array) */ herr_t getName_if_Dset(hid_t loc_id, const char *name, void *opdata) { H5G_stat_t statbuf; char ** datasetName; char *** tempptr; tempptr = (char ***) opdata; datasetName = *tempptr; /* * Get type of the object. * If it is a dataset, get allocate space for it at *datasetName * Increment *tempptr so we will be looking at the next name space when * this function is called again. * * Note: The calling function must take care of freeing memory allocateed * */ H5Gget_objinfo(loc_id, name, FALSE, &statbuf); if( statbuf.type == H5G_DATASET){ *datasetName = (char *) malloc( (strlen(name)+1) * sizeof(char)); if( *datasetName == NULL){ printf("PDL::IO::HDF5; Out of Memory in getName_if_Dset\n"); exit(1); } strcpy(*datasetName,name); (*tempptr)++; } return 0; } /* * Operator function to get number of groups in a particular location */ herr_t incIfGroup(hid_t loc_id, const char *name, void *opdata) { H5G_stat_t statbuf; unsigned int * groupCount; groupCount = (unsigned int *) opdata; /* * Get type of the object and increment *groupCount * if it is a group * The name of the object is passed to this function by * the Library. Some magic :-) */ H5Gget_objinfo(loc_id, name, FALSE, &statbuf); if( statbuf.type == H5G_GROUP){ (*groupCount)++; } return 0; } /* * Operator function to fill up char array of group names * * opdata is a pointer to an Array of strings (i.e. 2D char array) */ herr_t getName_if_Group(hid_t loc_id, const char *name, void *opdata) { H5G_stat_t statbuf; char ** groupName; char *** tempptr; tempptr = (char ***) opdata; groupName = *tempptr; /* * Get type of the object. * If it is a group, get allocate space for it at *groupName * Increment *tempptr so we will be looking at the next name space when * this function is called again. * * Note: The calling function must take care of freeing memory allocateed * */ H5Gget_objinfo(loc_id, name, FALSE, &statbuf); if( statbuf.type == H5G_GROUP){ *groupName = (char *) malloc( (strlen(name)+1) * sizeof(char)); if( *groupName == NULL){ printf("PDL::IO::HDF5; Out of Memory in getName_if_Group\n"); exit(1); } strcpy(*groupName,name); (*tempptr)++; } return 0; } EOXS ############################################################### # XS Code that implements the HDF constants # Using the AUTOLOAD routine, any calls to hdf5 constants, like # H5F_ACC_RDONLY will call the 'constant' routine here and return # the value of the #defined'ed H5F_ACC_RDONLY pp_addhdr(<<'EOXS'); /* ############################################################### # # Functions to handle interfacing HDF5 constants with perl # # This originally generated using h2xs and manually editing # */ static int not_here(char *s) { croak("%s not implemented on this architecture", s); return -1; } hid_t constant(char *name, int arg) { errno = 0; switch (*name) { case 'A': break; case 'B': break; case 'C': break; case 'D': break; case 'E': break; case 'F': break; case 'G': break; case 'H': if (strEQ(name, "H5F_ACC_DEBUG")) #ifdef H5F_ACC_DEBUG return H5F_ACC_DEBUG; #else goto not_there; #endif if (strEQ(name, "H5F_ACC_EXCL")) #ifdef H5F_ACC_EXCL return H5F_ACC_EXCL; #else goto not_there; #endif if (strEQ(name, "H5F_ACC_RDONLY")) #ifdef H5F_ACC_RDONLY return H5F_ACC_RDONLY; #else goto not_there; #endif if (strEQ(name, "H5F_ACC_RDWR")) #ifdef H5F_ACC_RDWR return H5F_ACC_RDWR; #else goto not_there; #endif if (strEQ(name, "H5F_ACC_TRUNC")) #ifdef H5F_ACC_TRUNC return H5F_ACC_TRUNC; #else goto not_there; #endif if (strEQ(name, "H5P_DEFAULT")) #ifdef H5P_DEFAULT return H5P_DEFAULT; #else goto not_there; #endif if (strEQ(name, "H5P_DATASET_CREATE")) #ifdef H5P_DATASET_CREATE return H5P_DATASET_CREATE; #else goto not_there; #endif if (strEQ(name, "H5S_ALL")) #ifdef H5S_ALL return H5S_ALL; #else goto not_there; #endif if (strEQ(name, "H5S_UNLIMITED")) #ifdef H5S_UNLIMITED return H5S_UNLIMITED; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_B16")) #ifdef H5T_ALPHA_B16 return H5T_ALPHA_B16; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_B32")) #ifdef H5T_ALPHA_B32 return H5T_ALPHA_B32; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_B64")) #ifdef H5T_ALPHA_B64 return H5T_ALPHA_B64; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_B8")) #ifdef H5T_ALPHA_B8 return H5T_ALPHA_B8; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_F32")) #ifdef H5T_ALPHA_F32 return H5T_ALPHA_F32; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_F64")) #ifdef H5T_ALPHA_F64 return H5T_ALPHA_F64; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_I16")) #ifdef H5T_ALPHA_I16 return H5T_ALPHA_I16; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_I32")) #ifdef H5T_ALPHA_I32 return H5T_ALPHA_I32; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_I64")) #ifdef H5T_ALPHA_I64 return H5T_ALPHA_I64; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_I8")) #ifdef H5T_ALPHA_I8 return H5T_ALPHA_I8; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_U16")) #ifdef H5T_ALPHA_U16 return H5T_ALPHA_U16; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_U32")) #ifdef H5T_ALPHA_U32 return H5T_ALPHA_U32; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_U64")) #ifdef H5T_ALPHA_U64 return H5T_ALPHA_U64; #else goto not_there; #endif if (strEQ(name, "H5T_ALPHA_U8")) #ifdef H5T_ALPHA_U8 return H5T_ALPHA_U8; #else goto not_there; #endif if (strEQ(name, "H5T_C_S1")) #ifdef H5T_C_S1 return H5T_C_S1; #else goto not_there; #endif if (strEQ(name, "H5T_FORTRAN_S1")) #ifdef H5T_FORTRAN_S1 return H5T_FORTRAN_S1; #else goto not_there; #endif if (strEQ(name, "H5T_IEEE_F32BE")) #ifdef H5T_IEEE_F32BE return H5T_IEEE_F32BE; #else goto not_there; #endif if (strEQ(name, "H5T_IEEE_F32LE")) #ifdef H5T_IEEE_F32LE return H5T_IEEE_F32LE; #else goto not_there; #endif if (strEQ(name, "H5T_IEEE_F64BE")) #ifdef H5T_IEEE_F64BE return H5T_IEEE_F64BE; #else goto not_there; #endif if (strEQ(name, "H5T_IEEE_F64LE")) #ifdef H5T_IEEE_F64LE return H5T_IEEE_F64LE; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_B16")) #ifdef H5T_INTEL_B16 return H5T_INTEL_B16; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_B32")) #ifdef H5T_INTEL_B32 return H5T_INTEL_B32; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_B64")) #ifdef H5T_INTEL_B64 return H5T_INTEL_B64; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_B8")) #ifdef H5T_INTEL_B8 return H5T_INTEL_B8; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_F32")) #ifdef H5T_INTEL_F32 return H5T_INTEL_F32; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_F64")) #ifdef H5T_INTEL_F64 return H5T_INTEL_F64; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_I16")) #ifdef H5T_INTEL_I16 return H5T_INTEL_I16; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_I32")) #ifdef H5T_INTEL_I32 return H5T_INTEL_I32; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_I64")) #ifdef H5T_INTEL_I64 return H5T_INTEL_I64; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_I8")) #ifdef H5T_INTEL_I8 return H5T_INTEL_I8; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_U16")) #ifdef H5T_INTEL_U16 return H5T_INTEL_U16; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_U32")) #ifdef H5T_INTEL_U32 return H5T_INTEL_U32; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_U64")) #ifdef H5T_INTEL_U64 return H5T_INTEL_U64; #else goto not_there; #endif if (strEQ(name, "H5T_INTEL_U8")) #ifdef H5T_INTEL_U8 return H5T_INTEL_U8; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_B16")) #ifdef H5T_MIPS_B16 return H5T_MIPS_B16; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_B32")) #ifdef H5T_MIPS_B32 return H5T_MIPS_B32; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_B64")) #ifdef H5T_MIPS_B64 return H5T_MIPS_B64; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_B8")) #ifdef H5T_MIPS_B8 return H5T_MIPS_B8; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_F32")) #ifdef H5T_MIPS_F32 return H5T_MIPS_F32; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_F64")) #ifdef H5T_MIPS_F64 return H5T_MIPS_F64; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_I16")) #ifdef H5T_MIPS_I16 return H5T_MIPS_I16; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_I32")) #ifdef H5T_MIPS_I32 return H5T_MIPS_I32; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_I64")) #ifdef H5T_MIPS_I64 return H5T_MIPS_I64; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_I8")) #ifdef H5T_MIPS_I8 return H5T_MIPS_I8; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_U16")) #ifdef H5T_MIPS_U16 return H5T_MIPS_U16; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_U32")) #ifdef H5T_MIPS_U32 return H5T_MIPS_U32; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_U64")) #ifdef H5T_MIPS_U64 return H5T_MIPS_U64; #else goto not_there; #endif if (strEQ(name, "H5T_MIPS_U8")) #ifdef H5T_MIPS_U8 return H5T_MIPS_U8; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_B16")) #ifdef H5T_NATIVE_B16 return H5T_NATIVE_B16; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_B32")) #ifdef H5T_NATIVE_B32 return H5T_NATIVE_B32; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_B64")) #ifdef H5T_NATIVE_B64 return H5T_NATIVE_B64; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_B8")) #ifdef H5T_NATIVE_B8 return H5T_NATIVE_B8; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_CHAR")) #ifdef H5T_NATIVE_CHAR return H5T_NATIVE_CHAR; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_DOUBLE")) #ifdef H5T_NATIVE_DOUBLE return H5T_NATIVE_DOUBLE; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_FLOAT")) #ifdef H5T_NATIVE_FLOAT return H5T_NATIVE_FLOAT; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_HBOOL")) #ifdef H5T_NATIVE_HBOOL return H5T_NATIVE_HBOOL; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_HERR")) #ifdef H5T_NATIVE_HERR return H5T_NATIVE_HERR; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_HSIZE")) #ifdef H5T_NATIVE_HSIZE return H5T_NATIVE_HSIZE; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_HSSIZE")) #ifdef H5T_NATIVE_HSSIZE return H5T_NATIVE_HSSIZE; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT")) #ifdef H5T_NATIVE_INT return H5T_NATIVE_INT; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT16")) #ifdef H5T_NATIVE_INT16 return H5T_NATIVE_INT16; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT32")) #ifdef H5T_NATIVE_INT32 return H5T_NATIVE_INT32; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT64")) #ifdef H5T_NATIVE_INT64 return H5T_NATIVE_INT64; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT8")) #ifdef H5T_NATIVE_INT8 return H5T_NATIVE_INT8; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT_FAST16")) #ifdef H5T_NATIVE_INT_FAST16 return H5T_NATIVE_INT_FAST16; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT_FAST32")) #ifdef H5T_NATIVE_INT_FAST32 return H5T_NATIVE_INT_FAST32; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT_FAST64")) #ifdef H5T_NATIVE_INT_FAST64 return H5T_NATIVE_INT_FAST64; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT_FAST8")) #ifdef H5T_NATIVE_INT_FAST8 return H5T_NATIVE_INT_FAST8; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT_LEAST16")) #ifdef H5T_NATIVE_INT_LEAST16 return H5T_NATIVE_INT_LEAST16; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT_LEAST32")) #ifdef H5T_NATIVE_INT_LEAST32 return H5T_NATIVE_INT_LEAST32; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT_LEAST64")) #ifdef H5T_NATIVE_INT_LEAST64 return H5T_NATIVE_INT_LEAST64; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_INT_LEAST8")) #ifdef H5T_NATIVE_INT_LEAST8 return H5T_NATIVE_INT_LEAST8; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_LDOUBLE")) #ifdef H5T_NATIVE_LDOUBLE return H5T_NATIVE_LDOUBLE; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_LLONG")) #ifdef H5T_NATIVE_LLONG return H5T_NATIVE_LLONG; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_LONG")) #ifdef H5T_NATIVE_LONG return H5T_NATIVE_LONG; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_OPAQUE")) #ifdef H5T_NATIVE_OPAQUE return H5T_NATIVE_OPAQUE; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_SCHAR")) #ifdef H5T_NATIVE_SCHAR return H5T_NATIVE_SCHAR; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_SHORT")) #ifdef H5T_NATIVE_SHORT return H5T_NATIVE_SHORT; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UCHAR")) #ifdef H5T_NATIVE_UCHAR return H5T_NATIVE_UCHAR; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT")) #ifdef H5T_NATIVE_UINT return H5T_NATIVE_UINT; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT16")) #ifdef H5T_NATIVE_UINT16 return H5T_NATIVE_UINT16; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT32")) #ifdef H5T_NATIVE_UINT32 return H5T_NATIVE_UINT32; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT64")) #ifdef H5T_NATIVE_UINT64 return H5T_NATIVE_UINT64; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT8")) #ifdef H5T_NATIVE_UINT8 return H5T_NATIVE_UINT8; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT_FAST16")) #ifdef H5T_NATIVE_UINT_FAST16 return H5T_NATIVE_UINT_FAST16; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT_FAST32")) #ifdef H5T_NATIVE_UINT_FAST32 return H5T_NATIVE_UINT_FAST32; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT_FAST64")) #ifdef H5T_NATIVE_UINT_FAST64 return H5T_NATIVE_UINT_FAST64; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT_FAST8")) #ifdef H5T_NATIVE_UINT_FAST8 return H5T_NATIVE_UINT_FAST8; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT_LEAST16")) #ifdef H5T_NATIVE_UINT_LEAST16 return H5T_NATIVE_UINT_LEAST16; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT_LEAST32")) #ifdef H5T_NATIVE_UINT_LEAST32 return H5T_NATIVE_UINT_LEAST32; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT_LEAST64")) #ifdef H5T_NATIVE_UINT_LEAST64 return H5T_NATIVE_UINT_LEAST64; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_UINT_LEAST8")) #ifdef H5T_NATIVE_UINT_LEAST8 return H5T_NATIVE_UINT_LEAST8; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_ULLONG")) #ifdef H5T_NATIVE_ULLONG return H5T_NATIVE_ULLONG; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_ULONG")) #ifdef H5T_NATIVE_ULONG return H5T_NATIVE_ULONG; #else goto not_there; #endif if (strEQ(name, "H5T_NATIVE_USHORT")) #ifdef H5T_NATIVE_USHORT return H5T_NATIVE_USHORT; #else goto not_there; #endif if (strEQ(name, "H5T_STRING")) return H5T_STRING; /* This was manually enter to get the enumerated type */ if (strEQ(name, "H5T_STD_B16BE")) #ifdef H5T_STD_B16BE return H5T_STD_B16BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_B16LE")) #ifdef H5T_STD_B16LE return H5T_STD_B16LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_B32BE")) #ifdef H5T_STD_B32BE return H5T_STD_B32BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_B32LE")) #ifdef H5T_STD_B32LE return H5T_STD_B32LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_B64BE")) #ifdef H5T_STD_B64BE return H5T_STD_B64BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_B64LE")) #ifdef H5T_STD_B64LE return H5T_STD_B64LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_B8BE")) #ifdef H5T_STD_B8BE return H5T_STD_B8BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_B8LE")) #ifdef H5T_STD_B8LE return H5T_STD_B8LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_I16BE")) #ifdef H5T_STD_I16BE return H5T_STD_I16BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_I16LE")) #ifdef H5T_STD_I16LE return H5T_STD_I16LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_I32BE")) #ifdef H5T_STD_I32BE return H5T_STD_I32BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_I32LE")) #ifdef H5T_STD_I32LE return H5T_STD_I32LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_I64BE")) #ifdef H5T_STD_I64BE return H5T_STD_I64BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_I64LE")) #ifdef H5T_STD_I64LE return H5T_STD_I64LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_I8BE")) #ifdef H5T_STD_I8BE return H5T_STD_I8BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_I8LE")) #ifdef H5T_STD_I8LE return H5T_STD_I8LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_REF_DSETREG")) #ifdef H5T_STD_REF_DSETREG return H5T_STD_REF_DSETREG; #else goto not_there; #endif if (strEQ(name, "H5T_STD_REF_OBJ")) #ifdef H5T_STD_REF_OBJ return H5T_STD_REF_OBJ; #else goto not_there; #endif if (strEQ(name, "H5T_STD_U16BE")) #ifdef H5T_STD_U16BE return H5T_STD_U16BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_U16LE")) #ifdef H5T_STD_U16LE return H5T_STD_U16LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_U32BE")) #ifdef H5T_STD_U32BE return H5T_STD_U32BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_U32LE")) #ifdef H5T_STD_U32LE return H5T_STD_U32LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_U64BE")) #ifdef H5T_STD_U64BE return H5T_STD_U64BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_U64LE")) #ifdef H5T_STD_U64LE return H5T_STD_U64LE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_U8BE")) #ifdef H5T_STD_U8BE return H5T_STD_U8BE; #else goto not_there; #endif if (strEQ(name, "H5T_STD_U8LE")) #ifdef H5T_STD_U8LE return H5T_STD_U8LE; #else goto not_there; #endif if (strEQ(name, "H5T_UNIX_D32BE")) #ifdef H5T_UNIX_D32BE return H5T_UNIX_D32BE; #else goto not_there; #endif if (strEQ(name, "H5T_UNIX_D32LE")) #ifdef H5T_UNIX_D32LE return H5T_UNIX_D32LE; #else goto not_there; #endif if (strEQ(name, "H5T_UNIX_D64BE")) #ifdef H5T_UNIX_D64BE return H5T_UNIX_D64BE; #else goto not_there; #endif if (strEQ(name, "H5T_UNIX_D64LE")) #ifdef H5T_UNIX_D64LE return H5T_UNIX_D64LE; #else goto not_there; #endif if (strEQ(name, "H5T_REFERENCE")) return H5T_REFERENCE; /* This was manually enter to get the enumerated type */ if (strEQ(name, "H5R_DATASET_REGION")) return H5R_DATASET_REGION; /* This was manually enter to get the enumerated type */ break; case 'I': break; case 'J': break; case 'K': break; case 'L': break; case 'M': break; case 'N': break; case 'O': break; case 'P': break; case 'Q': break; case 'R': break; case 'S': break; case 'T': break; case 'U': break; case 'V': break; case 'W': break; case 'X': break; case 'Y': break; case 'Z': break; } errno = EINVAL; return 0; not_there: errno = ENOENT; return 0; } /* ############################################################# */ EOXS pp_addxs('',<<'EOXS'); hid_t constant(name,arg) char * name int arg EOXS ############### Add Autoload Routine for the hdf5 constants ########## pp_addpm( {At => Top}, <<'EOPM'); use PDL::Lite; use PDL::Char; # Require needed here becuase dataset uses some of the XS # calls that are defined in PDL::IO::HDF5 (like PDL::IO::HDF5::H5T_NATIVE_CHAR() ) # Doing a 'use' would make use of the calls before they are defined. # require PDL::IO::HDF5::Group; require PDL::IO::HDF5::Dataset; use Carp; sub AUTOLOAD { # This AUTOLOAD is used to 'autoload' constants from the constant() # XS function. If a constant is not found then control is passed # to the AUTOLOAD in AutoLoader. my $constname; ($constname = $AUTOLOAD) =~ s/.*:://; croak "& not defined" if $constname eq 'constant'; my $val = constant($constname, @_ ? $_[0] : 0); if ($! != 0) { if ($! =~ /Invalid/) { $AutoLoader::AUTOLOAD = $AUTOLOAD; goto &AutoLoader::AUTOLOAD; } else { croak "Your vendor has not defined hdf5 macro $constname"; } } *$AUTOLOAD = sub { $val }; goto &$AUTOLOAD; } EOPM # Code that implements the dataset count and dataset name functions pp_addxs('',<<'EOXS'); # Code to get the number of datasets in a group int H5GgetDatasetCount( groupID, groupName ) hid_t groupID char * groupName CODE: int dsetCount = 0; H5Giterate(groupID, groupName, NULL, incIfDset, &dsetCount); RETVAL = dsetCount; OUTPUT: RETVAL # Code to get the names of the datasets in a group void H5GgetDatasetNames( groupID, groupName ) hid_t groupID char * groupName PREINIT: int dsetCount = 0; char ** datasetNames; /* Array of dataset names */ char ** datasetPtr; /* temp pointer to datasetNames */ int i; /* Index variable */ PPCODE: /* Get the number of datasets */ H5Giterate(groupID, groupName, NULL, incIfDset, &dsetCount); if( dsetCount > 0){ /* Datasets found */ /* Allocate Space for array of strings */ datasetNames = (char **) malloc( dsetCount * sizeof(char *)); if( datasetNames == NULL){ printf("PDL::IO::HDF5; out of Memory in H5GgetDatasetNames\n"); exit(1); } datasetPtr = datasetNames; H5Giterate(groupID, groupName, NULL, getName_if_Dset, &datasetPtr); EXTEND(SP, dsetCount); /* Make room for results on the return stack */ for( i = 0; i< dsetCount; i++){ /* Push Names onto return stack */ /* printf("Name found = '%s'\n",datasetNames[i]); */ PUSHs(sv_2mortal(newSVpv(datasetNames[i],0))); free(datasetNames[i]); /* Release Memory */ } free(datasetNames); } # Code to get the number of groups in a group/file int H5GgetGroupCount( groupID, groupName ) hid_t groupID char * groupName CODE: int groupCount = 0; H5Giterate(groupID, groupName, NULL, incIfGroup, &groupCount); RETVAL = groupCount; OUTPUT: RETVAL # Code to get the names of the groups in a group/file void H5GgetGroupNames( groupID, groupName ) hid_t groupID char * groupName PREINIT: int groupCount = 0; char ** groupNames; /* Array of group names */ char ** groupPtr; /* temp pointer to groupnames */ int i; /* Index variable */ PPCODE: /* Get the number of datasets */ H5Giterate(groupID, groupName, NULL, incIfGroup, &groupCount); if( groupCount > 0){ /* Groups found */ /* Allocate Space for array of strings */ groupNames = (char **) malloc( groupCount * sizeof(char *)); if( groupNames == NULL){ printf("PDL::IO::HDF5; out of Memory in H5GgetGroupNames\n"); exit(1); } groupPtr = groupNames; H5Giterate(groupID, groupName, NULL, getName_if_Group, &groupPtr); EXTEND(SP, groupCount); /* Make room for results on the return stack */ for( i = 0; i< groupCount; i++){ /* Push Names onto return stack */ /* printf("Name found = '%s'\n",datasetNames[i]); */ PUSHs(sv_2mortal(newSVpv(groupNames[i],0))); free(groupNames[i]); /* Release Memory */ } free(groupNames); } # Code to get the maximum length of strings in a ragged character array int findMaxVarLenSize( buf, numelem ) I8 * buf int numelem CODE: int i; int maxStrSize; int len; char** rdata; /* Convert input generic pointer to character array */ rdata = (char **) buf; /* Find max string length */ maxStrSize = 0; for(i=0; i maxStrSize ) maxStrSize = len; } } /* end for */ RETVAL = maxStrSize; OUTPUT: RETVAL # Function to copy the variable length strings from an input buffer varlenbuff to a supplied # fixed-length string buffer fixedbuf. # Number of elements (numelem) and maximum length of any variable length string (maxVarlensize) # must be supplied. # Output is the number of elements converted int copyVarLenToFixed( varlenbuff, fixedbuf, numelem, maxVarlensize ) I8 * varlenbuff I8 * fixedbuf int numelem int maxVarlensize CODE: int fixlenbufferInc; /* size of strings, including the null byte */ int i; char** rdata; char* tempPtr; fixlenbufferInc = maxVarlensize + 1; /* size of strings, including the null byte */ /* Convert input generic pointer to character array */ rdata = (char **) varlenbuff; tempPtr = (char*) fixedbuf; /* Copy variable length strings to fixed length strings */ for(i=0; iSvPDLV($arg)->data) T_PDLS $var = (short *)(PDL->SvPDLV($arg)->data) T_PDLUS $var = (unsigned short *)(PDL->SvPDLV($arg)->data) T_PDLL $var = (long *)(PDL->SvPDLV($arg)->data) T_PDLLL $var = (long long *)(PDL->SvPDLV($arg)->data) T_PDLF $var = (float *)(PDL->SvPDLV($arg)->data) T_PDLD $var = (double *)(PDL->SvPDLV($arg)->data) ############################################################################# OUTPUT # T_PVI typemap copies the data in $var to $arg, up to the # length of length($arg). This differs from the T_PV typemap # where the data is copied up to a Null char (string terminator) # T_PVI's will be used for getting raw data blocks out of the C-code T_PVI sv_setpvn((SV*)$arg, (char *) $var, SvCUR($arg)); T_PDLB PDL->SetSV_PDL($arg,$var); T_PDLS PDL->SetSV_PDL($arg,$var); T_PDLUS PDL->SetSV_PDL($arg,$var); T_PDLL PDL->SetSV_PDL($arg,$var); T_PDLLL PDL->SetSV_PDL($arg,$var); T_PDLF PDL->SetSV_PDL($arg,$var); T_PDLD PDL->SetSV_PDL($arg,$var); PDL-IO-HDF5-0.762/MANIFEST.SKIP0000644000175000017500000000217514701373256015001 0ustar osboxesosboxes#!start included ExtUtils/MANIFEST.SKIP # Avoid version control files. \bRCS\b \bCVS\b \bSCCS\b ,v$ \B\.svn\b \B\.git\b \B\.gitignore\b \b_darcs\b \B\.cvsignore$ # Avoid VMS specific MakeMaker generated files \bDescrip.MMS$ \bDESCRIP.MMS$ \bdescrip.mms$ # Avoid Makemaker generated and utility files. \bMANIFEST\.bak \bMakefile$ \bblib/ \bMakeMaker-\d \bpm_to_blib\.ts$ \bpm_to_blib$ \bblibdirs\.ts$ # 6.18 through 6.25 generated this \b_eumm/ # 7.05_05 and above # Avoid Module::Build generated and utility files. \bBuild$ \b_build/ \bBuild.bat$ \bBuild.COM$ \bBUILD.COM$ \bbuild.com$ # and Module::Build::Tiny generated files \b_build_params$ # Avoid temp and backup files. ~$ \.old$ \#$ \b\.# \.bak$ \.tmp$ \.# \.rej$ \..*\.sw.?$ # Avoid OS-specific files/dirs # Mac OSX metadata \B\.DS_Store # Mac OSX SMB mount metadata files \B\._ # Avoid Devel::Cover and Devel::CoverX::Covered files. \bcover_db\b \bcovered\b # Avoid prove files \B\.prove$ # Avoid MYMETA files ^MYMETA\. #!end included ExtUtils/MANIFEST.SKIP ^HDF5\..* ^xt/ # Avoid CI configuration/runtime files \B\.github/workflows\b \B\.github/actions\b PDL-IO-HDF5-0.762/t/0000755000175000017500000000000015004706725013337 5ustar osboxesosboxesPDL-IO-HDF5-0.762/t/total.t0000644000175000017500000001427214741020337014650 0ustar osboxesosboxes# Script to test the PDL::IO::HDF5 objects together in the # way they would normally be used # # i.e. the way they would normally be used as described # in the PDL::IO::HDF5 synopsis use strict; use warnings; use PDL; use PDL::Char; use PDL::IO::HDF5; use Test::More; # New File Check: my $filename = "total.hdf5"; # get rid of filename if it already exists unlink $filename if( -e $filename); ok(my $hdfobj = new PDL::IO::HDF5($filename)); # Set attribute for file (root group) ok($hdfobj->attrSet( 'attr1' => 'dudeman', 'attr2' => 'What??')); # Try Setting attr for an existing attr ok($hdfobj->attrSet( 'attr1' => 'dudeman23')); # Add a attribute and then delete it ok($hdfobj->attrSet( 'dummyAttr' => 'dummyman', 'dummyAttr2' => 'dummyman')); ok($hdfobj->attrDel( 'dummyAttr', 'dummyAttr2' )); # Get list of attributes my @attrs = $hdfobj->attrs; is(join(",",sort @attrs), 'attr1,attr2' ); # Get a list of attribute values my @attrValues = $hdfobj->attrGet(sort @attrs); is(join(",",@attrValues), 'dudeman23,What??' ); ############################################## # Create a dataset in the root group my $dataset = $hdfobj->dataset('rootdataset'); my $pdl = sequence(5,4); ok($dataset->set($pdl, unlimited => 1) ); # Create String dataset using PDL::Char my $dataset2 = $hdfobj->dataset('charData'); my $pdlChar = new PDL::Char( [ ["abccc", "def", "ghi"],["jkl", "mno", 'pqr'] ] ); ok($dataset2->set($pdlChar, unlimited => 1)); my $pdl2 = $dataset->get; ok((($pdl - $pdl2)->sum) < .001 ); my @dims = $dataset->dims; is( join(", ",@dims), '5, 4' ); # Get a list of datasets (should be two) my @datasets = $hdfobj->datasets; is( scalar(@datasets), 2 ); ############################################# my $group = $hdfobj->group("mygroup"); my $subgroup = $group->group("subgroup"); ### Try a non-deault data-set type (float) #### # Create a dataset in the subgroup $dataset = $subgroup->dataset('my dataset'); $pdl = sequence(5,4)->float; # Try a non-default data type ok( $dataset->set($pdl, unlimited => 1) ); $pdl2 = $dataset->get; ok( (($pdl - $pdl2)->sum) < .001 ); # Check for the PDL returned being a float is( $pdl->type, 'float' ); # Get a hyperslab $pdl = $dataset->get(pdl([0,0]), pdl([4,0])); # Get the first vector of the PDL # Check to see if the dims are as expected. my @pdlDims = $pdl->dims; is_deeply( \@pdlDims, [5, 1] ); ### Try a non-default data-set type (int/long) #### # Create a dataset in the subgroup $dataset = $subgroup->dataset('my dataset2'); $pdl = sequence(5,4)->long; # Try a non-default data type ok( $dataset->set($pdl, unlimited => 1) ); $pdl2 = $dataset->get; ok( (($pdl - $pdl2)->sum) < .001 ); # Check for the PDL returned being a int/long is( $pdl->type, 'long' ); ################ Set Attributes at the Dataset Level ############### # Set attribute for group ok( $dataset->attrSet( 'attr1' => 'DSdudeman', 'attr2' => 'DSWhat??')); # Try Setting attr for an existing attr ok($dataset->attrSet( 'attr1' => 'DSdudeman23')); # Add a attribute and then delete it ok( $dataset->attrSet( 'dummyAttr' => 'dummyman', 'dummyAttr2' => 'dummyman')); ok( $dataset->attrDel( 'dummyAttr', 'dummyAttr2' )); # Get list of attributes @attrs = $dataset->attrs; is( join(",",sort @attrs), 'attr1,attr2' ); # Get a list of attribute values @attrValues = $dataset->attrGet(sort @attrs); is( join(",",@attrValues), 'DSdudeman23,DSWhat??' ); ################ Set Attributes at the Group Level ############### # Set attribute for group ok( $group->attrSet( 'attr1' => 'dudeman', 'attr2' => 'What??')); # Try Setting attr for an existing attr ok($group->attrSet( 'attr1' => 'dudeman23')); # Add a attribute and then delete it ok( $group->attrSet( 'dummyAttr' => 'dummyman', 'dummyAttr2' => 'dummyman')); ok( $group->attrDel( 'dummyAttr', 'dummyAttr2' )); # Get list of attributes @attrs = $group->attrs; is( join(",",sort @attrs), 'attr1,attr2' ); # Get a list of datasets (should be none) @datasets = $group->datasets; is( scalar(@datasets), 0 ); # Create another group my $group2 = $hdfobj->group("dude2"); # Get a list of groups in the root group my @groups = $hdfobj->groups; is( join(",",sort @groups), 'dude2,mygroup' ); # Get a list of groups in group2 (should be none) @groups = $group2->groups; is( scalar(@groups), 0 ); undef $hdfobj; { # Script to test the attribute index functionality of the PDL::IO::HDF5 Class # New File Check: my $filename = "total.hdf5"; ok(my $hdfobj = PDL::IO::HDF5->new($filename)); # It is normally a no-no to call a internal method, but we # are just testing here: $hdfobj->_buildAttrIndex; my $baseline = { '/' => { attr1 => 'dudeman23', attr2 => 'What??', }, '/dude2' => { attr1 => 'dudeman23', attr2 => 'What??', }, '/mygroup' => { attr1 => 'dudeman23', attr2 => 'What??', }, '/mygroup/subgroup' => { attr1 => 'dudeman23', attr2 => 'What??', }, }; is_deeply($hdfobj->{attrIndex}, $baseline); my @values = $hdfobj->allAttrValues('attr1'); $baseline = [ 'dudeman23', 'dudeman23', 'dudeman23', 'dudeman23' ]; is_deeply \@values, $baseline; @values = $hdfobj->allAttrValues('attr1','attr2'); $baseline = [ [ 'dudeman23', 'What??', ], [ 'dudeman23', 'What??', ], [ 'dudeman23', 'What??', ], [ 'dudeman23', 'What??', ] ]; is_deeply \@values, $baseline; my @names = $hdfobj->allAttrNames; is_deeply \@names, [ 'attr1', 'attr2', ]; # Test building the groupIndex $hdfobj->_buildGroupIndex('attr1','attr2'); $hdfobj->_buildGroupIndex('attr2'); $hdfobj->_buildGroupIndex('attr1','attr3'); $baseline = { 'attr1attr2' => { 'dudeman23What??' => [ '/', '/dude2', '/mygroup', '/mygroup/subgroup' ] }, 'attr1attr3' => { 'dudeman23_undef_' => [ '/', '/dude2', '/mygroup', '/mygroup/subgroup' ] }, 'attr2' => { 'What??' => [ '/', '/dude2', '/mygroup', '/mygroup/subgroup' ] } }; my $result = $hdfobj->{groupIndex}; is_deeply $result, $baseline or diag explain $result; my @groups = $hdfobj->getGroupsByAttr( 'attr1' => 'dudeman23', 'attr2' => 'What??'); $baseline = [ '/', '/dude2', '/mygroup', '/mygroup/subgroup', ]; is_deeply \@groups, $baseline; # clean up file } unlink $filename if( -e $filename); done_testing; PDL-IO-HDF5-0.762/t/xData.t0000644000175000017500000000301115004706605014555 0ustar osboxesosboxes# Test case for HDF5 extensible datasets use strict; use warnings; use PDL; use PDL::IO::HDF5; use Test::More; use File::Spec::Functions; my $filename = "xData.hdf5"; unlink $filename if -e $filename; # get rid of file if it already exists my $hdf5 = new PDL::IO::HDF5($filename); my $group=$hdf5->group('group1'); # Store an extensible dataset my $dataset=$group->dataset('xdata'); my $data1 = pdl [ 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0 ]; $dataset->set($data1, unlimited => 1); # read the dataset my $xdata = $group->dataset("xdata")->get(); my $expected = '[2 3 4 5 6 7 8 9 10 11 12]'; is( "$xdata", $expected); # write more data my $data2 = pdl [ 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0 ]; $dataset->set($data2, unlimited => 1); # read the dataset $xdata = $group->dataset("xdata")->get(); $expected = '[2 3 4 5 6 7 8 9 10 11 12 13 14]'; is( "$xdata", $expected); unlink $filename if -e $filename; # clean up file $hdf5 = PDL::IO::HDF5->new(catfile(qw(t sbyte.hdf5))); $dataset = $hdf5->dataset('data2'); my $got = $dataset->get; $expected = pdl(-127, 127); # deliberately type double ok +(($got - $expected)->sum) < .001 or diag "got=$got\nexpected=$expected"; unlink $filename if -e $filename; # clean up file $hdf5 = PDL::IO::HDF5->new($filename); $dataset = $hdf5->dataset('data'); $dataset->set(pdl(42), unlimited => 1); $got = $dataset->get; ok +(($got - 42)->sum) < .001 or diag "got=$got\nexpected=$expected"; unlink $filename if -e $filename; # clean up file done_testing; PDL-IO-HDF5-0.762/t/file.t0000644000175000017500000000066014101574542014442 0ustar osboxesosboxesuse strict; use warnings; use PDL::IO::HDF5; use Test::More; # New File Check: my $filename = "newFile.hdf5"; # get rid of filename if it already exists unlink $filename if( -e $filename); ok(new PDL::IO::HDF5($filename)); #Existing File for Writing Check ok(new PDL::IO::HDF5(">".$filename)); #Existing File for Reading Check ok(new PDL::IO::HDF5($filename)); # clean up file unlink $filename if( -e $filename); done_testing; PDL-IO-HDF5-0.762/t/vlenString.t0000644000175000017500000000146614701377432015670 0ustar osboxesosboxes# Test case for reading variable-length string arrays. # These are converted to fixed-length PDL::Char types when read use strict; use warnings; use PDL; use PDL::Char; use PDL::IO::HDF5; use Test::More; # New File Check: my $filename = "varlen.hdf5"; ok(my $h5obj = new PDL::IO::HDF5(">".$filename)); my $dataset = $h5obj->dataset("Dataset"); my $pdl = $dataset->get(); my @dims = $pdl->dims; is(join(", ", @dims), "93, 4"); is($pdl->atstr(2), "Now we are engaged in a great civil war,"); ###### Now check variable-length string attribute array ### ($pdl) = $dataset->attrGet('Attr1'); @dims = $pdl->dims; is(join(", ", @dims), "14, 4"); is($pdl->atstr(2), "Attr String 3"); ###### Now check variable-length string attribute scalar ### ($pdl) = $dataset->attrGet('attr2'); is($pdl, "dude"); done_testing; PDL-IO-HDF5-0.762/t/sbyte.hdf50000644000175000017500000000665214701423027015240 0ustar osboxesosboxes‰HDF  ÿÿÿÿÿÿÿÿª ÿÿÿÿÿÿÿÿ`ˆ¨ˆ¨TREEÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ0HEAPXÈdata2HÿÿÿÿÿÿÿÿxÖg€SNOD TREEÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ¨ PDL-IO-HDF5-0.762/t/attribPDL.t0000644000175000017500000000613014701416634015351 0ustar osboxesosboxes# Test case for HDF5 attributes that are pdls # This is a new feature as-of version 0.64 use strict; use warnings; use PDL; use PDL::Char; use PDL::IO::HDF5; use Config; my $have_LL = $Config{ivsize} == 4 ? 0 : 1; use Test::More; my $filename = "attrib.hdf5"; # get rid of filename if it already exists unlink $filename if( -e $filename); my $hdf5 = new PDL::IO::HDF5($filename); # Create pdls to store: my $pchar = PDL::Char->new( [['abc', 'def', 'ghi'],['jkl', 'mno', 'pqr']] ); my $bt=pdl([[1.2,1.3,1.4],[1.5,1.6,1.7],[1.8,1.9,2.0]]); my $group=$hdf5->group('Radiometric information'); # Store a dataset my $dataset=$group->dataset('SP_BT'); $dataset->set($bt, unlimited => 1); # Store a scalar and pdl attribute $dataset->attrSet('UNITS'=>'K'); $dataset->attrSet('NUM_COL'=>pdl(long,[[1,2,3],[4,5,6]])); $dataset->attrSet('NUM_COLLONG'=>pdl(longlong,[[123456789123456784,2,3],[4,5,6]])) if $have_LL; $dataset->attrSet('NUM_ROW'=>$pchar); $dataset->attrSet('SCALING'=>'pepe'); $dataset->attrSet('OFFSET'=>pdl(double,[0.0074])); $dataset->attrSet('ID'=>pdl(long,87)); $dataset->attrSet('IDLONG'=>pdl(longlong,123456789123456784)) if $have_LL; $dataset->attrSet('TEMPERATURE'=>pdl(double,3.1415927)); # Set group attribute $group->attrSet('GroupPDLAttr'=>pdl(long,[[1,2,3],[4,5,6]])); ######## Now Read HDF5 file ##### my $hdf2= new PDL::IO::HDF5($filename); my $group2=$hdf2->group('Radiometric information'); my $dataset2=$group2->dataset('SP_BT'); my $expected; $expected = pdl ' [ [1.2 1.3 1.4] [1.5 1.6 1.7] [1.8 1.9 2] ] '; my $bt2=$dataset2->get(); ok all(approx($bt2, $expected)) or diag "got: $bt2"; $expected = 'K'; my ($units)=$dataset2->attrGet('UNITS'); is($units, $expected); $expected = pdl ' [ [1 2 3] [4 5 6] ] '; my ($numcol)=$dataset2->attrGet('NUM_COL'); ok all(approx($numcol, $expected)) or diag "got: $numcol"; isa_ok($numcol, 'PDL'); if($have_LL) { $expected = '123456789123456784 2 3 4 5 6'; my ($numcollong)=$dataset2->attrGet('NUM_COLLONG'); is(sprintf("%18i %18i %18i %18i %18i %18i",$numcollong->list()), $expected); } $expected = "[ [ 'abc' 'def' 'ghi' ] [ 'jkl' 'mno' 'pqr' ] ] "; my ($numrow)=$dataset2->attrGet('NUM_ROW'); is("$numrow", $expected); $expected = 'pepe'; my ($scaling)=$dataset2->attrGet('SCALING'); is($scaling, $expected); $expected = pdl '[0.0074]'; my ($offset)=$dataset2->attrGet('OFFSET'); ok all(approx($offset, $expected)) or diag "got: $offset"; $expected = '87'; my ($id)=$dataset2->attrGet('ID'); is("$id", $expected); if($have_LL) { $expected = '123456789123456784'; my ($idlong)=$dataset2->attrGet('IDLONG'); is("$idlong", $expected); } $expected = pdl '3.1415927'; my ($temperature)=$dataset2->attrGet('TEMPERATURE'); ok all(approx($temperature, $expected)) or diag "got: $temperature"; # Check Group PDL Attribute $expected = pdl ' [ [1 2 3] [4 5 6] ] '; my ($numcol2)=$group2->attrGet('GroupPDLAttr'); ok all(approx($numcol2, $expected)) or diag "got: $numcol2"; isa_ok($numcol2, 'PDL'); # clean up file unlink $filename if( -e $filename); done_testing; PDL-IO-HDF5-0.762/t/reference.t0000644000175000017500000000163214101575610015456 0ustar osboxesosboxes# Test case for HDF5 references use strict; use warnings; use PDL; use PDL::IO::HDF5; use Test::More; my $filename = "reference.hdf5"; # get rid of filename if it already exists unlink $filename if( -e $filename); my $hdf5 = new PDL::IO::HDF5($filename); my $group=$hdf5->group('group1'); # Store a dataset my $dataset=$hdf5->dataset('data1'); my $data = pdl [ 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0 ]; $dataset->set($data, unlimited => 1); # create the reference my @regionStart = ( 3 ); my @regionCount = ( 3 ); $hdf5->reference($dataset,"myRef",\@regionStart,\@regionCount); my $expected = 'data1, myRef'; my @datasets1=$hdf5->datasets(); is(join(', ',@datasets1), $expected); # dereference the dataset my $ref = $hdf5->dataset("myRef"); my $dereferenced = $ref->get(); $expected = '[5 6 7]'; is("$dereferenced", $expected); # clean up file unlink $filename if( -e $filename); done_testing; PDL-IO-HDF5-0.762/t/group.t0000644000175000017500000000473014701420073014654 0ustar osboxesosboxes# Script to test the group/dataset object separately. # i.e. not the way they would normally be used as described # in the PDL::IO::HDF5 synopsis use strict; use warnings; use PDL; use PDL::IO::HDF5; use PDL::IO::HDF5::Group; use PDL::IO::HDF5::Dataset; use Test::More; # New File Check: my $filename = "group.hdf5"; # get rid of filename if it already exists unlink $filename if( -e $filename); ok(my $hdfobj = new PDL::IO::HDF5($filename)); my $group = new PDL::IO::HDF5::Group( name => '/dude', parent => $hdfobj, fileObj => $hdfobj); # Set attribute for group ok($group->attrSet( 'attr1' => 'dudeman', 'attr2' => 'What??')); # Try Setting attr for an existing attr ok($group->attrSet( 'attr1' => 'dudeman23')); # Add a attribute and then delete it ok( $group->attrSet( 'dummyAttr' => 'dummyman', 'dummyAttr2' => 'dummyman')); ok( $group->attrDel( 'dummyAttr', 'dummyAttr2' )); # Get list of attributes my @attrs = $group->attrs; is( join(",",sort @attrs), 'attr1,attr2' ); # Get a list of attribute values my @attrValues = $group->attrGet(sort @attrs); is( join(",",@attrValues), 'dudeman23,What??' ); # Get a list of datasets (should be none) my @datasets = $group->datasets; is( scalar(@datasets), 0 ); # Create another group my $group2 = new PDL::IO::HDF5::Group( 'name'=> '/dude2', parent => $hdfobj, fileObj => $hdfobj); # open the root group my $rootGroup = new PDL::IO::HDF5::Group( 'name'=> '/', parent => $hdfobj, fileObj => $hdfobj); # Get a list of groups my @groups = $rootGroup->groups; is( join(",",sort @groups), 'dude,dude2' ); # Get a list of groups in group2 (should be none) @groups = $group2->groups; is( scalar(@groups), 0 ); # Create a dataset in the root group my $dataset = new PDL::IO::HDF5::Dataset( 'name'=> 'data1', parent => $rootGroup, fileObj => $hdfobj); my $pdl = sequence(5,4); ok( $dataset->set($pdl, unlimited => 1) ); my $pdl2 = $dataset->get; ok( (($pdl - $pdl2)->sum) < .001 ); # Set attribute for dataset ok( $dataset->attrSet( 'attr1' => 'dataset dudeman', 'attr2' => 'Huh What??')); # Try Setting attr for an existing attr ok($dataset->attrSet( 'attr1' => 'dataset dudeman23')); # Add a attribute and then delete it ok( $dataset->attrSet( 'dummyAttr' => 'dummyman', 'dummyAttr2' => 'dummyman')); ok( $dataset->attrDel( 'dummyAttr', 'dummyAttr2' )); # Get list of attributes @attrs = $dataset->attrs; is( join(",",sort @attrs), 'attr1,attr2' ); # clean up file unlink $filename if( -e $filename); done_testing; PDL-IO-HDF5-0.762/t/unlink.t0000644000175000017500000000146714101576461015033 0ustar osboxesosboxes# Test case for HDF5 unlink function use strict; use warnings; use PDL; use PDL::IO::HDF5; use Test::More; my $filename = "unlink.hdf5"; # get rid of filename if it already exists unlink $filename if( -e $filename); my $hdf5 = new PDL::IO::HDF5($filename); my $group=$hdf5->group('group1'); # Store a dataset my $dataset=$group->dataset('data1'); my $data = pdl [ 2.0, 3.0, 4.0 ]; $dataset->set($data, unlimited => 1); my $expected = 'data1'; my @datasets1=$group->datasets(); #print "datasets '".join(", ",@datasets1)."'\n"; is(join(', ',@datasets1), $expected); # Remove the dataset. $group->unlink('data1'); $expected = ''; my @datasets2=$group->datasets(); #print "datasets '".join(", ",@datasets2)."'\n"; is(join(', ',@datasets2), $expected); # clean up file unlink $filename if( -e $filename); done_testing; PDL-IO-HDF5-0.762/COPYRIGHT0000644000175000017500000000060613301073635014364 0ustar osboxesosboxesFor the PDL version of the HDF5 interface: Copyright (c) 2014 Chris Marshall, Andrew Benson. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. Copyright (c) 2001 John Cerney. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. PDL-IO-HDF5-0.762/GENERATED/0000755000175000017500000000000015004706725014372 5ustar osboxesosboxesPDL-IO-HDF5-0.762/GENERATED/PDL/0000755000175000017500000000000015004706725015011 5ustar osboxesosboxesPDL-IO-HDF5-0.762/GENERATED/PDL/IO/0000755000175000017500000000000015004706725015320 5ustar osboxesosboxesPDL-IO-HDF5-0.762/GENERATED/PDL/IO/HDF5.pm0000644000175000017500000005333515004706725016355 0ustar osboxesosboxes# # GENERATED WITH PDL::PP from hdf5.pd! Don't modify! # package PDL::IO::HDF5; our @EXPORT_OK = qw( ); our %EXPORT_TAGS = (Func=>\@EXPORT_OK); use PDL::Core qw/ barf/; use PDL::Exporter; use DynaLoader; our $VERSION = '0.762'; our @ISA = ( 'PDL::Exporter','DynaLoader' ); push @PDL::Core::PP, __PACKAGE__; bootstrap PDL::IO::HDF5 $VERSION; #line 2513 "hdf5.pd" use PDL::Lite; use PDL::Char; # Require needed here becuase dataset uses some of the XS # calls that are defined in PDL::IO::HDF5 (like PDL::IO::HDF5::H5T_NATIVE_CHAR() ) # Doing a 'use' would make use of the calls before they are defined. # require PDL::IO::HDF5::Group; require PDL::IO::HDF5::Dataset; use Carp; sub AUTOLOAD { # This AUTOLOAD is used to 'autoload' constants from the constant() # XS function. If a constant is not found then control is passed # to the AUTOLOAD in AutoLoader. my $constname; ($constname = $AUTOLOAD) =~ s/.*:://; croak "& not defined" if $constname eq 'constant'; my $val = constant($constname, @_ ? $_[0] : 0); if ($! != 0) { if ($! =~ /Invalid/) { $AutoLoader::AUTOLOAD = $AUTOLOAD; goto &AutoLoader::AUTOLOAD; } else { croak "Your vendor has not defined hdf5 macro $constname"; } } *$AUTOLOAD = sub { $val }; goto &$AUTOLOAD; } #line 22 "hdf5.pd" =head1 NAME PDL::IO::HDF5 - PDL Interface to the HDF5 Data Format. =head1 DESCRIPTION This package provides an object-oriented interface for Ls to the HDF5 data-format. Information on the HDF5 Format can be found at the HDF Group's web site at http://www.hdfgroup.org . =head2 LIMITATIONS Currently this interface only provides a subset of the total HDF5 library's capability. =over 1 =item * Only HDF5 Simple datatypes are supported. No HDF5 Compound datatypes are supported since PDL doesn't support them. =item * Only HDF5 Simple dataspaces are supported. =back =head1 SYNOPSIS use PDL::IO::HDF5; # Files ####### my $newfile = new PDL::IO::HDF5("newfile.hdf"); # create new hdf5 or open existing file. my $attrValue = $existingFile->attrGet('AttrName'); # Get attribute value for file $existingFile->attSet('AttrName' => 'AttrValue'); # Set attribute value(s) for file # Groups ###### my $group = $newfile->group("/mygroup"); # create a new or open existing group my @groups = $existingFile->groups; # get a list of all the groups at the root '/' # level. my @groups = $group->groups; # get a list of all the groups at the "mygroup" # level. my $group2 = $group->group('newgroup'); # Create/open a new group in existing group "mygroup" $group->unlink('datasetName'); # Delete a dataset from a group $group->reference($dataset,'refName',\@start,\@count); # Create a scalar reference to a subregion of a # dataset, with specified start index and count. my $attrValue = $group->attrGet('AttrName'); # Get attribute value for a group $group->attrSet('AttrName' => 'AttrValue'); # Set attribute value(s) for a group $group->attrDel('AttrName1', 'AttrName2'); # Delete attribute(s) for a group @attrs = $group->attrs; # Get List of attributes for a group # Data Sets ######## my $dataset = $group->dataset( 'datasetName'); # create a new or open existing dataset # in an existing group my $dataset = $newfile->dataset( 'datasetName'); # create a new or open existing dataset # in the root group of a file my $dataset2 = $newfile->dataset( 'datasetName'); # create a new or open existing dataset # in the root group. my @datasets = $existingFile->datasets; # get a list of all datasets in the root '/' group my @datasets = $group->datasets; # get a list of all datasets in a group @dims = $dataset->dims; # get a list of dimensions for the dataset $pdl = $dataset->get(); # Get the array data in the dataset $pdl = $dataset->get($start,$length,$stride); # Get a slice or hyperslab of the array data in the dataset $dataset->set($pdl, unlimited => 1); # Set the array data in the dataset my $attrValue = $dataset->attrGet('AttrName'); # Get attribute value for a dataset $dataset->attSet('AttrName' => 'AttrValue'); # Set attribute value(s) for a dataset =head1 MEMBER DATA =over 1 =item ID ID number given to the file by the HDF5 library =item filename Name of the file. =item accessMode Access Mode?? ( read /write etc????) =item attrIndex Quick lookup index of group names to attribute values. Autogenerated as-needed by the L, L, L methods. Any attribute writes or group creations will delete this data member, because it will no longer be valid. The index is of this form: { groupName1 => { attr1 => value, attr2 => value }. groupName2 => { attr1 => value, attr3 => value }. . . . } For the purposes of indexing groups by their attributes, the attributes are applied hierarchically. i.e. any attributes of the higher level groups are assumed to be apply for the lower level groups. =item groupIndex Quick lookup index of attribute names/values group names. This index is used by the L method to quickly find any group(s) that have attribute that match a desired set. The index is of this form: { "attr1\0attt2" => { "value1\0value2' => [ group1, group2, ...], "value3\0value3' => [ groupA ], . . . }, "att1" => { "value1' => [ group1, group2, ...], "value3' => [ groupA ] . . . }, . . . } The first level of the index maps the attribute name combinations that have indexes built to their index. The second level maps the corresponding attribute values with the group(s) where these attributes take on these values. groupName1 => { attr1 => value, attr2 => value }. groupName2 => { attr1 => value, attr3 => value }. . . . } For the purposes of indexing groups by their attributes, the attributes are applied hierarchically. i.e. any attributes of the higher level groups are assumed to be apply for the lower level groups. =back =head1 METHODS =head2 new =for ref PDL::IO::HDF5 constructor - creates PDL::IO::HDF5 object for reading or writing data. B =for usage $a = new PDL::IO::HDF5( $filename ); Arguments: 1) The name of the file. If this file exists and you want to write to it, prepend the name with the '>' character: ">name.nc" Returns undef on failure. B =for example $hdf5obj = new PDL::IO::HDF5( "file.hdf" ); =cut sub new { my $type = shift; my $file = shift; my $self = {}; my $rc; my $write; if (substr($file, 0, 1) eq '>') { # open for writing $file = substr ($file, 1); # chop off > $write = 1; } my $fileID; # HDF file id if (-e $file) { # Existing File if ($write) { $fileID = H5Fopen($file, H5F_ACC_RDWR(), H5P_DEFAULT()); if( $fileID < 0){ carp("Can't Open Existing HDF file '$file' for writing\n"); return undef; } $self->{accessMode} = 'w'; } else { # Open read-only $fileID = H5Fopen($file, H5F_ACC_RDONLY(), H5P_DEFAULT()); if( $fileID < 0){ carp("Can't Open Existing HDF file '$file' for reading\n"); return undef; } $self->{accessMode} = 'r'; } } else{ # File doesn't exist, create it: $fileID = H5Fcreate($file, H5F_ACC_TRUNC(), H5P_DEFAULT(), H5P_DEFAULT()); if( $fileID < 0){ carp("Can't Open New HDF file '$file' for writing\n"); return undef; } $self->{accessMode} = 'w'; } # Record file name, ID $self->{filename} = $file; $self->{ID} = $fileID; $self->{attrIndex} = undef; # Initialize attrIndex $self->{groupIndex} = undef; # Initialize groupIndex bless $self, $type; } =head2 filename =for ref Get the filename for the HDF5 file B =for usage my $filename = $HDFfile->filename; =cut sub filename { my $self = shift; return $self->{filename}; } =head2 group =for ref Open or create a group in the root "/" group (i.e. top level) of the HDF5 file. B =for usage $HDFfile->group("groupName"); Returns undef on failure, 1 on success. =cut sub group { my $self = shift; my $name = $_[0]; # get the group name my $parentID = $self->{ID}; my $parentName = ''; my $group = new PDL::IO::HDF5::Group( 'name'=> $name, parent => $self, fileObj => $self ); } =head2 groups =for ref Get a list of groups in the root "/" group (i.e. top level) of the HDF5 file. B =for usage @groups = $HDFfile->groups; =cut sub groups { my $self = shift; my @groups = $self->group("/")->groups; return @groups; } =head2 unlink =for ref Unlink an object from the root "/" group (i.e. top level) of the HDF5 file. B =for usage $HDFfile->unlink($name); =cut sub unlink { my $self = shift; my $name = $_[0]; $self->group("/")->unlink($name); return 1; } =head2 dataset =for ref Open or create a dataset in the root "/" group (i.e. top level) of the HDF5 file. B =for usage $HDFfile->dataset("groupName"); Returns undef on failure, 1 on success. Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->dataset("groupName"); =cut sub dataset { my $self = shift; my $name = $_[0]; # get the dataset name return $self->group("/")->dataset($name); } =head2 datasets =for ref Get a list of all dataset names in the root "/" group. B =for usage @datasets = $HDF5file->datasets; Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->datasets; =cut sub datasets{ my $self = shift; my $name = $_[0]; # get the dataset name return $self->group("/")->datasets; } =head2 attrSet =for ref Set the value of an attribute(s) in the root '/' group of the file. Currently attribute types supported are null-terminated strings and any PDL type. B =for usage $HDFfile->attrSet( 'attr1' => 'attr1Value', 'attr2' => 'attr2 value', 'attr3' => $pdl, . . . ); Returns undef on failure, 1 on success. Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->attrSet( 'attr1' => 'attr1Value', 'attr2' => 'attr2 value', 'attr3' => $pdl, . . . ); =cut sub attrSet { my $self = shift; my %attrs = @_; # get atribute hash return $self->group("/")->attrSet(%attrs); } =head2 attrGet =for ref Get the value of an attribute(s) in the root '/' group of the file. Currently the attribute types supported are null-terminated strings and PDLs. B =for usage @attrValues = $HDFfile->attrGet( 'attr1', 'attr2' ); =cut sub attrGet { my $self = shift; my @attrs = @_; # get atribute hash return $self->group("/")->attrGet(@attrs); } =head2 attrDel =for ref Delete attribute(s) in the root "/" group of the file. B =for usage $HDFfile->attrDel( 'attr1', 'attr2', . . . ); Returns undef on failure, 1 on success. Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->attrDel( 'attr1', 'attr2', . . . ); =cut sub attrDel { my $self = shift; my @attrs = @_; # get atribute names return $self->group("/")->attrDel(@attrs); } =head2 attrs =for ref Get a list of all attribute names in the root "/" group of the file. B =for usage @attrs = $HDFfile->attrs; Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->attrs =cut sub attrs { my $self = shift; return $self->group("/")->attrs; } =head2 reference =for ref Create a reference to part of a dataset in the root "/" group of the file. B =for usage $HDFfile->reference; Note: This is a convenience method that is equivalent to: $HDFfile->group("/")->reference($referenceName,$datasetObj,@regionStart,@regionCount); Create a reference named $referenceName within the root group "/" to a subroutine of the dataset $datasetObj. The region to be referenced is defined by the @regionStart and @regionCount arrays. =cut sub reference { my $self = shift; my $datasetObj = shift; my $referenceName = shift; my @regionStart = shift; my @regionCount = shift; return $self->group("/")->reference($datasetObj,$referenceName,\@regionStart,\@regionCount); } =head2 _buildAttrIndex =for ref Internal Method to build the attribute index hash for the object B =for usage $hdf5obj->_buildAttrIndex; Output: Updated attrIndex data member =cut sub _buildAttrIndex{ my ($self) = @_; # Take care of any attributes in the current group my @attrs = $self->attrs; my @attrValues = $self->attrGet(@attrs); my $index = $self->{attrIndex} = {}; my %indexElement; # element of the index for this group @indexElement{@attrs} = @attrValues; $index->{'/'} = \%indexElement; my $topLevelAttrs = { %indexElement }; # Now Do any subgroups: my @subGroups = $self->groups; my $subGroup; foreach $subGroup(@subGroups){ $self->group($subGroup)->_buildAttrIndex($index,$topLevelAttrs); } } =head2 clearAttrIndex =for ref Method to clear the attribute index hash for the object. This is a mostly internal method that is called whenever some part of the HDF5 file has changed and the L index is no longer valid. B =for usage $hdf5obj->clearAttrIndex; =cut sub clearAttrIndex{ my $self = shift; $self->{attrIndex} = undef; } =head2 _buildGroupIndex =for ref Internal Method to build the groupIndex hash for the object B =for usage $hdf5obj->_buildGroupIndex(@attrs); where: @attrs List of attribute names to build a group index on. Output: Updated groupIndex data member =cut sub _buildGroupIndex{ my ($self,@attrs) = @_; @attrs = sort @attrs; # Sort the attributes so the order won't matter # Generate attrIndex if not there yet defined( $self->{attrIndex}) || $self->_buildAttrIndex; my $attrIndex = $self->{attrIndex}; my $groupIndexElement = {}; # Element of the group index that we will build my $group; my $attrIndexElement; # Attr index for the current group my @attrValues; # attr values corresponding to @attrs for the current group my $key; # group index key # Go Thru All Groups foreach $group(sort keys %$attrIndex){ $attrIndexElement = $attrIndex->{$group}; @attrValues = map defined($_) ? $_ : '_undef_', @$attrIndexElement{@attrs}; # Groups with undefined attr will get a '_undef_' string for the value # Use multi-dimensional array emulation for the hash # key here because it should be quicker. if( defined( $groupIndexElement->{$key = join($;,@attrValues)}) ) { # if already defined, add to the list push @{$groupIndexElement->{$key}}, $group; } else{ # not already defined create new element $groupIndexElement->{$key} = [ $group ]; } } # initialize group index if it doesn't exist. unless( defined $self->{groupIndex} ){ $self->{groupIndex} = {} }; # Use multi-dimensional array emulation for the hash # key here because it should be quicker. $self->{groupIndex}{join($;,@attrs)} = $groupIndexElement; } =head2 clearGroupIndex =for ref Method to clear the group index hash for the object. This is a mostly internal method that is called whenever some part of the HDF5 file has changed and the L index is no longer valid. B =for usage $hdf5obj->clearGroupIndex; =cut sub clearGroupIndex{ my $self = shift; $self->{groupIndex} = undef; } =head2 getGroupsByAttr =for ref Get the group names which attributes match a given set of values. This method enables database-like queries to be made. I.e. you can get answers to questions like 'Which groups have attr1 = value1, and attr3 = value2?'. B =for usage @groupNames = $hdf5Obj->getGroupsByAttr( 'attr1' => 'value1', 'attr2' => 'value2' ); =cut sub getGroupsByAttr{ my $self = shift; my %attrHash = @_; my @keys = sort keys %attrHash; # Use multi-dimensional array emulation for the hash # key here because it should be quicker. my $compositeKey = join($;, @keys); # Generate groupIndex if not there yet defined( $self->{groupIndex}{$compositeKey} ) || $self->_buildGroupIndex(@keys); $groupIndex = $self->{groupIndex}{$compositeKey}; my @values = @attrHash{@keys}; my $compositeValues = join($;, @values); if( defined($groupIndex->{$compositeValues} )){ return @{$groupIndex->{$compositeValues}}; } else{ return (); } } =head2 allAttrValues =for ref Returns information about group attributes defined in the HDF5 datafile. B =for usage # Single Attr Usage. Returns an array of all # values of attribute 'attrName' in the file. $hdf5obj->allAttrValues('attrName'); # Multiple Attr Usage. Returns an 2D array of all # values of attributes 'attr1', 'attr2' in the file. # Higher-Level $hdf5obj->allAttrValues('attr1', 'attr2'); =cut sub allAttrValues{ my $self = shift; my @attrs = @_; # Generate attrIndex if not there yet defined( $self->{attrIndex}) || $self->_buildAttrIndex; my $attrIndex = $self->{attrIndex}; if( @attrs == 1) { # Single Argument Processing my $attr = $attrs[0]; my $group; my @values; my $grpAttrHash; # attr hash for a particular group # Go thru each group and look for instances of $attr foreach $group( keys %$attrIndex){ $grpAttrHash = $attrIndex->{$group}; if( defined($grpAttrHash->{$attr})){ push @values, $grpAttrHash->{$attr}; } } return @values; } else{ # Multiple argument processing my $group; my @values; my $grpAttrHash; # attr hash for a particular group my $attr; # individual attr name my $allAttrSeen; # flag = 0 if we have not seen all of the # desired attributes in the current group my $value; # Current value of the @values array that we # will return # Go thru each group and look for instances of $attr foreach $group( keys %$attrIndex){ $grpAttrHash = $attrIndex->{$group}; # Go thru each attribute $allAttrSeen = 1; # assume we will se all atributes, set to zero if we don't $value = []; foreach $attr(@attrs){ if( defined($grpAttrHash->{$attr})){ push @$value, $grpAttrHash->{$attr}; } else{ $allAttrSeen = 0; } } push @values, $value if $allAttrSeen; #add to values array if we got anything } return @values; } } =head2 allAttrNames =for ref Returns a sorted list of all the group attribute names that are defined in the file. B =for usage my @attrNames = $hdf5obj->allAttrNames; =cut sub allAttrNames{ my $self = shift; # Generate attrIndex if not there yet defined( $self->{attrIndex}) || $self->_buildAttrIndex; my $attrIndex = $self->{attrIndex}; my $group; my %names; my $grpAttrHash; # attr hash for a particular group my @currentNames; # Go thru each group and look for instances of $attr foreach $group( keys %$attrIndex){ $grpAttrHash = $attrIndex->{$group}; @currentNames = keys %$grpAttrHash; @names{@currentNames} = @currentNames; } return sort keys %names; } =head2 IDget =for ref Returns the HDF5 library ID for this object B =for usage my $ID = $hdf5obj->IDget; =cut sub IDget{ my $self = shift; return $self->{ID}; } =head2 nameGet =for ref Returns the HDF5 Group Name for this object. (Always '/', i.e. the root group for this top-level object) B =for usage my $name = $hdf5obj->nameGet; =cut sub nameGet{ my $self = shift; return '/'; } =head2 DESTROY =for ref PDL::IO::HDF5 Destructor - Closes the HDF5 file B =for usage No Usage. Automatically called =cut sub DESTROY { my $self = shift; if( H5Fclose($self->{ID}) < 0){ warn("Error closing HDF5 file ".$self->{filename}."\n"); } } # # Utility function (Not a Method!!!) # to pack a perl list into a binary structure # to be interpreted as a C array of long longs. This code is build # during the make process to do the Right Thing for big and little # endian machines sub packList { my @list = @_; if(ref($_[0])){ croak(__PACKAGE__."::packList is not a method!\n"); } #line 1080 "hdf5.pd" @list = map (( $_,0 ), @list); # Intersperse zeros to make 64 bit hsize_t #line 1088 "hdf5.pd" my $list = pack ("L*", @list); return $list; } #line 1095 "hdf5.pd" # # Utility function (Not a Method!!!) # to unpack a perl list from a binary structure # that is a C array of long longs. This code is build # during the make process to do the Right Thing for big and little # endian machines sub unpackList{ if(ref($_[0])){ croak(__PACKAGE__."::unpackList is not a method!\n"); } my ($binaryStruct) = (@_); # input binary structure my $listLength = length($binaryStruct) / 8; # list returned will be the # number of bytes in the input struct/8, since # the output numbers are 64bit. #line 1122 "hdf5.pd" my $unpackString = "Lxxxx" x $listLength; # 4 xxxx used to toss upper 32 bits #line 1129 "hdf5.pd" my @list = unpack( $unpackString, $binaryStruct ); return @list; } =head1 AUTHORS John Cerney, j-cerney1@raytheon.com Andrew Benson, abenson@obs.carnegiescience.edu =cut #line 1113 "HDF5.pm" # Exit with OK status 1; PDL-IO-HDF5-0.762/META.yml0000644000175000017500000000172715004706725014354 0ustar osboxesosboxes--- abstract: 'PDL Interface to the HDF5 Data Format' author: - unknown - 'John Cerney ' - 'Andrew Benson ' build_requires: ExtUtils::MakeMaker: '0' PDL: '2.064' Test::More: '0.88' configure_requires: ExtUtils::MakeMaker: '0' PDL: '2.064' dynamic_config: 1 generated_by: 'ExtUtils::MakeMaker version 7.72, CPAN::Meta::Converter version 2.150010' license: open_source meta-spec: url: http://module-build.sourceforge.net/META-spec-v1.4.html version: '1.4' name: PDL-IO-HDF5 no_index: directory: - t - inc requires: PDL: '2.064' resources: bugtracker: https://github.com/PDLPorters/pdl-io-hdf5/issues homepage: http://pdl.perl.org/ license: http://dev.perl.org/licenses/ repository: https://github.com/PDLPorters/pdl-io-hdf5 version: '0.762' x_meta_spec: url: http://search.cpan.org/perldoc?CPAN::Meta::Spec version: '2' x_serialization_backend: 'CPAN::Meta::YAML version 0.018' PDL-IO-HDF5-0.762/Changes0000644000175000017500000000301715004706647014373 0ustar osboxesosboxes0.762 2025-05-01 - fix Dataset::set not working with scalar ndarrays (#9) - thanks @arodland for report 0.761 2025-01-12 - build issues fixed (#6) - thanks @a-shahba 0.76 2024-10-09 - handle PDL 2.064+ types like signed byte (#5) - thanks @a-shahba for report 0.75 2021-08-08 - fix t/attribPDL.t to work on longdouble - thanks @eserte for report 0.74 2021-08-01 - applied various Debian patches - thanks @sebastic 0.73 2015-06-08 - Fix indexing problem with PDL-IO-HDF5 0.72 0.72 2015-06-08 - Fix indexing problem with PDL-IO-HDF5 0.70 0.71 2015-06-08 - Fix indexing problem with PDL-IO-HDF5 0.70 0.70 2015-06-08 - This release features improved online POD via http://metacpan.org and http://search.cpan.org - improved library detection - new support for datasets with native endianness. 0.6501 2014-01-26 - Fix missing version info in provides META key (apparently having VERSION_FROM => 'hdf5.pd' doesn't fill in the blanks for the meta data. 0.65 2014-01-26 - Fix missing META data information to index PDL::IO::HDF5 since it is provided by hdf5.pd which is not recognized by CPAN indexers. 0.64 2014-01-26 - This is an update release to PDL::IO::HDF5 with improved platform detection and build process. - New features and tests (thanks to Andrew Benson) - Unlinking of datasets - Extensible datasets - Creation of scalar references - Automatic dereferencing of scalar references - Support for longlong integer attributes and datasets - Better HDF5 library detection on cygwin - Added longlong tests - Other cleanup and fixs PDL-IO-HDF5-0.762/tkviewtest0000755000175000017500000000523213301073635015230 0ustar osboxesosboxes#!/usr/bin/perl # Demo script using PDL::IO::HDF5 and Tk to show an HDF5 file structure # use Tk; use PDL::IO::HDF5::tkview; use PDL::IO::HDF5; use Tk::Balloon; my $maxElements = 50; # Largest Array (in number of elements) that we # will try to show in a popup balloon my $filename = shift @ARGV || 'varlen.hdf5'; my $mw = MainWindow->new; my $b = $mw->Balloon; my $h5 = new PDL::IO::HDF5($filename); # open HDF5 file object my $tkview = new PDL::IO::HDF5::tkview( $mw, $h5); my $tree = $tkview->{hl}; my $lastItem = ''; my $mouseItem; my ($pointerX,$pointerY); my @BBox = (0,0,0,0); $b->attach($tree, -balloonposition => 'mouse', -postcommand => sub { #print "Box for $item is ".join(", ",@BBox)."\n"; #print "Box for $mouseItem is ".join(", ",@BBox)."\n"; #print "y = $pointerY\n"; if( ($pointerY >= $BBox[1] ) && ($pointerY <= $BBox[3]) && # Popup balloon if withing bounding box $mouseItem =~ /$;_Dset(.+)$/ ){ # and a dataset item my $datasetName = $1; my $text = $tree->entrycget($mouseItem,'-text'); my $elements = 1; if( $text =~ /\: Dims (.+)$/ ){ my @dims = split(',',$1); my $message; foreach (@dims){ $elements *= $_; } } if( $elements > $maxElements){ $message = "$elements Elements: Too Big To Display"; } else{ my $group = $tree->entrycget($mouseItem,'-data'); my $PDL = $group->dataset($datasetName)->get; $message = "$PDL"; } $b->{"clients"}{$tree}{-balloonmsg} = $message; return 1; } 0; }, -motioncommand => sub { # my $e = $tree->XEvent; # print "xevent is a ".ref($e)."\n"; ($pointerX,$pointerY) = $tree->pointerxy; $pointerX -= $tree->rootx; $pointerY -= $tree->rooty; $mouseItem = $tree->nearest($pointerY); # print "MouseItem = '$mouseItem'\n"; my $infoBBox = $tree->infoBbox($mouseItem); # print "infoBBox = '$infoBBox'\n"; return 1 unless defined($infoBBox); if( ref($infoBBox)){ # Handle the different ways that # tk does the bounding box for 800.015 and 800.018, etc @BBox = @$infoBBox; } else{ @BBox = split(' ', $infoBBox); } # print "Bbox = ".join(", ",@BBox)."\n"; # print "lastItem = '$lastItem', mouseItem = '$mouseItem'\n"; if( ( $lastItem eq $mouseItem ) && ($pointerY >= $BBox[1] ) && ($pointerY <= $BBox[3]) ){ # Same item, and withing it's bounding box don't cancel the Balloon 0; } else{ # New item - cancel it so a new balloon will # be posted $lastItem = $mouseItem; 1; } } ); MainLoop; PDL-IO-HDF5-0.762/README0000644000175000017500000000227514701377432013764 0ustar osboxesosboxesPDL::IO::HDF5 From The Man Pages: ------------------- NAME PDL::IO::HDF5 - PDL Interface to the HDF5 Data Format. DESCRIPTION This package provides an object-oriented interface for the PDL package to the HDF5 data-format. Information on the HDF5 Format can be found at the HDF Group's web site at http://www.hdfgroup.org/ . LIMITATIONS Currently this interface only provides a subset of the total HDF5 library's capability. o Only HDF5 Simple datatypes are supported. No HDF5 Compound datatypes are supported since PDL doesn't support them. o Only HDF5 Simple dataspaces are supported. Also Included: -------------- An experimental module for interactive viewing of HDF5 files using perl/Tk is also included. The file tkviewtest is a short demo of this capability. The following are required for installation: -------------------------------------------- -- PDL v2.004 -- HDF5 version 1.2.0 or greater Installation: ------------ Installation should be the normal: perl Makefile.PL make make test (as root) make install Acknowledgements ---------------- The idea for this module is based on the code of Doug Hunt's PDL::netCDF module. PDL-IO-HDF5-0.762/varlen.hdf50000644000175000017500000003011015004706705015124 0ustar osboxesosboxes‰HDF  ÿÿÿÿÿÿÿÿH0ÿÿÿÿÿÿÿÿ €`HEAP€DatasetðTREEÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿà €` 0HaæÑR xAttr1    SNODÐ\U(XGCOLXtesting whether that nation or any nation so conceived and so dedicated can long endure.(Now we are engaged in a great civil war,Uconceived in liberty and dedicated to the proposition that all men are created equal.\Four score and seven years ago our forefathers brought forth on this continent a new nation,xGCOL Attr String 4 Attr String 3 Attr String 2 Attr String 1p (attr2dudePDL-IO-HDF5-0.762/MANIFEST0000644000175000017500000000102415004706725014222 0ustar osboxesosboxesChanges COPYRIGHT hdf5.pd HDF5/Dataset.pm HDF5/Group.pm HDF5/tkview.pm Makefile.PL MANIFEST This list of files MANIFEST.SKIP README t/attribPDL.t t/file.t t/group.t t/reference.t t/sbyte.hdf5 t/total.t t/unlink.t t/vlenString.t t/xData.t tkviewtest typemap varlen.hdf5 META.yml Module YAML meta-data (added by MakeMaker) META.json Module JSON meta-data (added by MakeMaker) GENERATED/PDL/IO/HDF5.pm mod=PDL::IO::HDF5 pd=hdf5.pd (added by pdlpp_mkgen) PDL-IO-HDF5-0.762/Makefile.PL0000644000175000017500000001325314756352375015065 0ustar osboxesosboxesuse strict; use warnings; use PDL::Core::Dev; # Pick up development utilities use ExtUtils::MakeMaker; use Config; ## Search for hdf5 library and include file $ENV{HDF5_PATH} ||= ''; sub macos_get_lib_path { return if $^O ne 'darwin'; my $pref = `brew --prefix hdf5`; return if !$pref; chomp $pref; "$pref/lib"; } sub get_lib_paths { permutate(grep $_, ( macos_get_lib_path(), $ENV{HDF5_PATH}."/lib", $ENV{HDF5_PATH}."/lib64", $ENV{HDF5_LIBDIR}, split(/ /, $Config{libpth}), # TODO: This will break for paths with spaces '/usr/local/hdf5/lib', '/usr/local/lib', '/opt/local/lib', '/usr/lib', '/opt/lib', '/usr/lib64', split(":",$ENV{LD_LIBRARY_PATH}||''), )); } sub permutate { ( @_, (map "$_/serial", @_), (map "$_/hdf5/serial", @_), ); } my @lib_base = qw(hdf5 hdf5_serial); my ($hdf5_lib_path, $hdf5_lib_base); DIR: foreach my $libdir ( get_lib_paths() ) { for my $extension (".$Config{dlext}", $Config{_a}, ".dll.a") { for my $base (@lib_base) { my $shortfile = "lib$base$extension"; my $file = "$libdir/$shortfile"; next if !-e $file; $hdf5_lib_path = $libdir; $hdf5_lib_base = $base; print "Found $shortfile at $file\n"; last DIR; } } } # We don't do a die here, because we would get bogus emails from CPAN testers unless(defined ($hdf5_lib_path) ){ print "####### Cannot find hdf5 library, libhdf5.so or libhdf5.a. ####### Please add the correct library path to Makefile.PL or install HDF\n"; exit(); } my $hdf5_include_path; foreach my $incdir ( permutate($Config{usrinc}), (map { my $s = $_; $s =~ s/\/lib[^\/]*/\/include/; $s } get_lib_paths()), ) { my $shortfile = "hdf5.h"; my $file = "$incdir/$shortfile"; if (-e $file) { $hdf5_include_path = $incdir; print "Found $shortfile at $file\n"; last; } } # We don't do a die here, because we would get bogus emails from CPAN testers unless ( defined ($hdf5_include_path) ){ print "####### Cannot find hdf5 header file, hdf5.h. ####### Please add the correct include path to Makefile.PL or install HDF5\n"; exit(); } # Flags to include jpeg and/or zlib during compilation my $jpegLib = 0; my $zLib = 0; if( -e "$hdf5_include_path/H5config.h"){ open( H5CONFIG, "$hdf5_include_path/H5config.h") or die("Can't Open Include File '$hdf5_include_path/H5config.h'\n"); while(defined( $_ = )){ $jpegLib = 1 if( /^\s*\#define\s+HAVE_LIBJPEG\s+1/ ); $zLib = 1 if( /^\s*\#define\s+HAVE_LIBZ\s+1/ ); } } #If in win32, add the required defined for the HDF5 libs to work: my $define_win32HDF = ''; if ($Config{'osname'} =~ /win32/i) { $define_win32HDF = '-D _HDF5USEDLL_ -D HASATTRIBUTE '; print "Defining _HDF5USEDLL_ for win32\n"; } my $LIBS = "-L$hdf5_lib_path -l$hdf5_lib_base "; $LIBS .= " -lz" if($zLib); $LIBS .= " -ljpeg" if($jpegLib); $LIBS .= " -lm"; my $package = ["hdf5.pd",'HDF5','PDL::IO::HDF5']; my $meta_merge = { 'name' => 'PDL-IO-HDF5', 'abstract' => 'PDL Interface to the HDF5 Data Format', 'release_status' => 'stable', # 'testing', 'author' => [ 'John Cerney ', 'Andrew Benson ', ], 'license' => [ 'perl_5' ], 'meta_spec' => { 'version' => '2', 'url' => 'http://search.cpan.org/perldoc?CPAN::Meta::Spec', }, 'prereqs' => { 'runtime' => { 'requires' => { 'PDL' => '2.064', }, }, 'build' => { 'requires' => { 'ExtUtils::MakeMaker' => '0', 'PDL' => '2.064', }, }, test => { requires => { 'Test::More' => '0.88', # done_testing }, }, 'configure' => { 'requires' => { 'ExtUtils::MakeMaker' => '0', 'PDL' => '2.064', # new types like ULL, CLD }, }, }, resources => { license => [ 'http://dev.perl.org/licenses/' ], homepage => 'http://pdl.perl.org/', bugtracker => { web => 'https://github.com/PDLPorters/pdl-io-hdf5/issues', }, repository => { url => 'git@github.com:PDLPorters/pdl-io-hdf5.git', web => 'https://github.com/PDLPorters/pdl-io-hdf5', type => 'git', }, }, 'dynamic_config' => 1, 'meta-spec' => { 'version' => '2', 'url' => 'http://search.cpan.org/perldoc?CPAN::Meta::Spec', }, }; # create GENERATED subdir with *.pm files during 'make dist' (to make metacpan.org happy) my $preop = '$(PERL) -I$(INST_ARCHLIB) -I$(INST_LIB) -MPDL::Core::Dev -e pdlpp_mkgen $(DISTVNAME)'; WriteMakefile( 'NAME' => 'PDL::IO::HDF5', 'CCFLAGS' => "$Config{ccflags} $define_win32HDF -DH5_USE_16_API -g", 'CONFIGURE_REQUIRES' => { PDL => '2.004' }, 'BUILD_REQUIRES' => { PDL => '2.004' }, # 'TEST_REQUIRES' => { PDL => '2.004' }, 'PREREQ_PM' => { PDL => '2.004' }, 'LICENSE' => 'perl', 'VERSION_FROM' => 'hdf5.pd', 'META_MERGE' => $meta_merge, 'TYPEMAPS' => [&PDL_TYPEMAP()], 'OBJECT' => 'HDF5.o ', 'PM' => { 'HDF5.pm' => '$(INST_LIBDIR)/HDF5.pm', 'HDF5/Group.pm' => '$(INST_LIBDIR)/HDF5/Group.pm', 'HDF5/Dataset.pm' => '$(INST_LIBDIR)/HDF5/Dataset.pm', 'HDF5/tkview.pm' => '$(INST_LIBDIR)/HDF5/tkview.pm', }, 'INC' => &PDL_INCLUDE()." -I$hdf5_include_path", 'LIBS' => [$LIBS], 'clean' => {'FILES' => 'HDF5.pm HDF5.xs HDF5.o HDF5.c newFile.hdf5'}, 'dist' => { COMPRESS => 'gzip', SUFFIX => 'gz', PREOP => $preop }, ); sub MY::postamble { pdlpp_postamble($package); }