dump time information to hdf5 file

pull/1/head
Thorsten Liebig 2010-04-29 19:26:45 +02:00
parent cd1b02dbc7
commit d16545c776
4 changed files with 13 additions and 7 deletions

View File

@ -307,7 +307,7 @@ bool ProcessFields::DumpMultiScalarArray2VTK(ofstream &file, string names[], FDT
return true;
}
bool ProcessFields::DumpVectorArray2HDF5(string filename, string name, FDTD_FLOAT const* const* const* const* array, unsigned int const* numLines)
bool ProcessFields::DumpVectorArray2HDF5(string filename, string name, FDTD_FLOAT const* const* const* const* array, unsigned int const* numLines, float time)
{
const H5std_string FILE_NAME(filename);
const H5std_string DATASET_NAME( name );
@ -329,6 +329,11 @@ bool ProcessFields::DumpVectorArray2HDF5(string filename, string name, FDTD_FLOA
// datatype.setOrder( H5T_ORDER_LE );
H5::DataSet dataset = group.createDataSet( DATASET_NAME, datatype, dataspace );
hsize_t t_dimsf[] = {1};
H5::DataSpace t_dataspace( 1, t_dimsf );
H5::Attribute attr = dataset.createAttribute("time",H5::PredType::NATIVE_FLOAT,t_dataspace);
attr.write( H5::PredType::NATIVE_FLOAT , &time);
// I have not the slightest idea why this array-copy action is necessary... but it's the only way hdf5 does what it is supposed to do anyway!!
// at least it is save in case FDTD_FLOAT was defined as double...
// why does hdf5 write the dimensions backwards??? or matlab???

View File

@ -57,7 +57,7 @@ public:
static bool DumpScalarArray2VTK(ofstream &file, string name, FDTD_FLOAT const* const* const* array, double const* const* discLines, unsigned int const* numLines, unsigned int precision=12);
static bool DumpMultiScalarArray2VTK(ofstream &file, string names[], FDTD_FLOAT const* const* const* const* array, unsigned int numFields, double const* const* discLines, unsigned int const* numLines, unsigned int precision=12);
static bool DumpVectorArray2HDF5(string filename, string name, FDTD_FLOAT const* const* const* const* array, unsigned int const* numLines);
static bool DumpVectorArray2HDF5(string filename, string name, FDTD_FLOAT const* const* const* const* array, unsigned int const* numLines, float time=0);
double CalcTotalEnergy() const;

View File

@ -84,7 +84,7 @@ void ProcessFieldsTD::DumpCellInterpol(string filename)
{
stringstream ss;
ss << std::setw( pad_length ) << std::setfill( '0' ) << Eng->GetNumberOfTimesteps();
DumpVectorArray2HDF5(filename.c_str(),string( ss.str() ),E_T,numDLines);
DumpVectorArray2HDF5(filename.c_str(),string( ss.str() ),E_T,numDLines,Eng->GetNumberOfTimesteps()*Op->GetTimestep());
}
else
cerr << "ProcessFieldsTD::DumpCellInterpol: unknown File-Type" << endl;
@ -148,7 +148,7 @@ void ProcessFieldsTD::DumpCellInterpol(string filename)
{
stringstream ss;
ss << std::setw( pad_length ) << std::setfill( '0' ) << Eng->GetNumberOfTimesteps();
DumpVectorArray2HDF5(filename.c_str(),string( ss.str() ),H_T,numDLines);
DumpVectorArray2HDF5(filename.c_str(),string( ss.str() ),H_T,numDLines,(0.5+Eng->GetNumberOfTimesteps())*Op->GetTimestep());
}
else
cerr << "ProcessFieldsTD::DumpCellInterpol: unknown File-Type" << endl;
@ -198,7 +198,7 @@ void ProcessFieldsTD::DumpNoInterpol(string filename)
{
stringstream ss;
ss << std::setw( pad_length ) << std::setfill( '0' ) << Eng->GetNumberOfTimesteps();
DumpVectorArray2HDF5(filename.c_str(),string( ss.str() ),E_T,numLines);
DumpVectorArray2HDF5(filename.c_str(),string( ss.str() ),E_T,numLines,Eng->GetNumberOfTimesteps()*Op->GetTimestep());
}
else
cerr << "ProcessFieldsTD::DumpCellInterpol: unknown File-Type" << endl;
@ -244,7 +244,7 @@ void ProcessFieldsTD::DumpNoInterpol(string filename)
{
stringstream ss;
ss << std::setw( pad_length ) << std::setfill( '0' ) << Eng->GetNumberOfTimesteps();
DumpVectorArray2HDF5(filename.c_str(),string( ss.str() ),H_T,numLines);
DumpVectorArray2HDF5(filename.c_str(),string( ss.str() ),H_T,numLines,(0.5+Eng->GetNumberOfTimesteps())*Op->GetTimestep());
}
else
cerr << "ProcessFieldsTD::DumpCellInterpol: unknown File-Type" << endl;

View File

@ -15,11 +15,12 @@ for n=1:numel(info.GroupHierarchy.Groups)
if strcmp(info.GroupHierarchy.Groups(n).Name,'/FieldData')
for m=1:numel(info.GroupHierarchy.Groups(n).Datasets)
names{m} = info.GroupHierarchy.Groups(n).Datasets(m).Name;
hdf_fielddata.time(m) = double(info.GroupHierarchy.Groups(n).Datasets(m).Attributes.Value);
end
end
end
hdf_fielddata.names = names;
for n=1:numel(names)
hdf_fielddata.values{n} = hdf5read(file,names{n});
hdf_fielddata.values{n} = double(hdf5read(file,names{n}));
end