2010-04-05 21:16:52 +00:00
|
|
|
function hdf_fielddata = ReadHDF5FieldData(file)
|
2010-04-24 14:43:22 +00:00
|
|
|
% function hdf_fielddata = ReadHDF5FieldData(file)
|
|
|
|
%
|
|
|
|
% returns:
|
2012-01-16 12:30:50 +00:00
|
|
|
% % time domain data (if exist)
|
|
|
|
% hdf_fielddata.TD.time
|
|
|
|
% hdf_fielddata.TD.names
|
|
|
|
% hdf_fielddata.TD.values
|
|
|
|
%
|
|
|
|
% % frequency domain data (if exist)
|
|
|
|
% hdf_fielddata.FD.time
|
|
|
|
% hdf_fielddata.FD.names
|
|
|
|
% hdf_fielddata.FD.values
|
2010-04-24 14:43:22 +00:00
|
|
|
%
|
2010-05-28 13:13:45 +00:00
|
|
|
% example: values of timestep 12:
|
2012-01-16 12:30:50 +00:00
|
|
|
% hdf_fielddata.TD.values{12}: array (x,y,z,polarization)
|
2010-05-28 13:13:45 +00:00
|
|
|
%
|
|
|
|
% plot z-field component along y-direction for timestep 12:
|
2012-01-16 12:30:50 +00:00
|
|
|
% plot( hdf_fielddata.TD.values{12}(1,:,1,3) )
|
2010-05-28 13:13:45 +00:00
|
|
|
%
|
2010-04-24 14:43:22 +00:00
|
|
|
% openEMS matlab interface
|
|
|
|
% -----------------------
|
|
|
|
% author: Thorsten Liebig
|
2010-05-28 13:13:45 +00:00
|
|
|
%
|
2010-09-28 08:33:42 +00:00
|
|
|
% See also ReadHDF5Mesh ReadHDF5Dump
|
2010-04-05 21:16:52 +00:00
|
|
|
|
2010-10-02 19:04:28 +00:00
|
|
|
isOctave = exist('OCTAVE_VERSION','builtin') ~= 0;
|
|
|
|
if isOctave
|
|
|
|
hdf_fielddata = ReadHDF5FieldData_octave(file);
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2010-04-05 21:16:52 +00:00
|
|
|
info = hdf5info(file);
|
2010-12-20 09:40:33 +00:00
|
|
|
TD.names = {};
|
|
|
|
FD.names = {};
|
|
|
|
hdf_fielddata = [];
|
2010-04-05 21:16:52 +00:00
|
|
|
|
|
|
|
for n=1:numel(info.GroupHierarchy.Groups)
|
|
|
|
if strcmp(info.GroupHierarchy.Groups(n).Name,'/FieldData')
|
2010-12-20 09:40:33 +00:00
|
|
|
%found /FieldData, look for either TD or FD data
|
|
|
|
for nGroup=1:numel(info.GroupHierarchy.Groups(n).Groups)
|
|
|
|
%search and read TD data
|
|
|
|
if strcmp(info.GroupHierarchy.Groups(n).Groups(nGroup).Name,'/FieldData/TD')
|
|
|
|
for m=1:numel(info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets)
|
2011-01-20 13:38:11 +00:00
|
|
|
TD.names{m} = info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets(m).Name;
|
2012-04-27 14:34:35 +00:00
|
|
|
for a = 1:numel(info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets(m).Attributes)
|
|
|
|
str = regexp(info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets(m).Attributes(a).Name,'\w/*\w*','match');
|
|
|
|
TD.(str{end})(m) = double(info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets(m).Attributes(a).Value);
|
|
|
|
end
|
2010-12-20 09:40:33 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
%search and read FD data
|
|
|
|
if strcmp(info.GroupHierarchy.Groups(n).Groups(nGroup).Name,'/FieldData/FD')
|
|
|
|
for m=1:numel(info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets)
|
|
|
|
FD.names{m} = info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets(m).Name;
|
2012-04-27 14:34:35 +00:00
|
|
|
for a = 1:numel(info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets(m).Attributes)
|
|
|
|
str = regexp(info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets(m).Attributes(a).Name,'\w/*\w*','match');
|
|
|
|
FD.(str{end})(m) = double(info.GroupHierarchy.Groups(n).Groups(nGroup).Datasets(m).Attributes(a).Value);
|
|
|
|
end
|
2010-12-20 09:40:33 +00:00
|
|
|
end
|
|
|
|
end
|
2010-04-05 21:16:52 +00:00
|
|
|
end
|
2010-12-20 09:40:33 +00:00
|
|
|
|
2010-04-05 21:16:52 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-01-20 13:38:11 +00:00
|
|
|
if (numel(TD.names)>0)
|
2012-04-27 14:34:35 +00:00
|
|
|
hdf_fielddata.TD=TD;
|
2011-01-02 10:28:03 +00:00
|
|
|
for n=1:numel(hdf_fielddata.TD.names)
|
|
|
|
hdf_fielddata.TD.values{n} = double(hdf5read(file,hdf_fielddata.TD.names{n}));
|
2010-12-20 09:40:33 +00:00
|
|
|
end
|
2010-10-02 19:04:28 +00:00
|
|
|
end
|
|
|
|
|
2010-12-20 09:40:33 +00:00
|
|
|
if (numel(FD.names)>0)
|
2012-04-27 14:34:35 +00:00
|
|
|
hdf_fielddata.FD=FD;
|
2011-03-10 09:32:54 +00:00
|
|
|
Nr_freq = numel(FD.names);
|
2010-12-20 09:40:33 +00:00
|
|
|
for n=1:Nr_freq
|
|
|
|
name = ['/FieldData/FD/f' int2str(n-1) '_real'];
|
|
|
|
ind = find(strcmp(FD.names,name));
|
2011-02-03 09:41:52 +00:00
|
|
|
if isempty(ind)
|
|
|
|
ind = find(strcmp(FD.names,['/FieldData/FD/f' int2str(n-1)]));
|
2011-04-08 09:26:13 +00:00
|
|
|
if ~isempty(ind)
|
|
|
|
hdf_fielddata.FD.values{n} = double(hdf5read(file,FD.names{ind}));
|
|
|
|
end
|
2011-02-03 09:41:52 +00:00
|
|
|
else
|
|
|
|
hdf_fielddata.FD.values{n} = double(hdf5read(file,FD.names{ind}));
|
|
|
|
name = ['/FieldData/FD/f' int2str(n-1) '_imag'];
|
|
|
|
ind = find(strcmp(FD.names,name));
|
|
|
|
hdf_fielddata.FD.values{n} = hdf_fielddata.FD.values{n} + 1j*double(hdf5read(file,FD.names{ind}));
|
|
|
|
end
|
2010-12-20 09:40:33 +00:00
|
|
|
end
|
|
|
|
end
|
2010-10-02 19:04:28 +00:00
|
|
|
|
|
|
|
function hdf_fielddata = ReadHDF5FieldData_octave(file)
|
|
|
|
hdf = load( '-hdf5', file );
|
2012-01-16 12:30:50 +00:00
|
|
|
if ~isfield(hdf,'FieldData')
|
|
|
|
error('no field data found')
|
|
|
|
end
|
|
|
|
if isfield(hdf.FieldData,'TD')
|
|
|
|
%read TD data
|
|
|
|
hdf_fielddata_names = fieldnames(hdf.FieldData.TD);
|
|
|
|
for n=1:numel(hdf_fielddata_names)
|
|
|
|
hdf_fielddata.TD.values{n} = hdf.FieldData.TD.(hdf_fielddata_names{n});
|
|
|
|
hdf_fielddata.TD.names{n} = ['/FieldData/TD/' hdf_fielddata_names{n}(2:end)];
|
|
|
|
hdf_fielddata.TD.time(n) = h5readatt_octave(file, hdf_fielddata.TD.names{n},'time');
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if isfield(hdf.FieldData,'FD')
|
|
|
|
%read FD data
|
|
|
|
hdf_fielddata_names = fieldnames(hdf.FieldData.FD);
|
|
|
|
for n=1:numel(hdf_fielddata_names)/2
|
|
|
|
hdf_fielddata.FD.values{n} = hdf.FieldData.FD.(hdf_fielddata_names{2*n}) + 1j*hdf.FieldData.FD.(hdf_fielddata_names{2*n-1});
|
|
|
|
hdf_fielddata.FD.names{n} = ['/FieldData/FD/' hdf_fielddata_names{2*n-1}(1:end-5)];
|
2012-04-27 14:34:35 +00:00
|
|
|
hdf_fielddata.FD.frequencies(n) = h5readatt_octave(file,['/FieldData/FD/' hdf_fielddata_names{2*n}],'frequency');
|
2012-01-16 12:30:50 +00:00
|
|
|
end
|
2010-10-02 19:04:28 +00:00
|
|
|
end
|