matlab: allow running MPI openEMS on a HPC

Signed-off-by: Thorsten Liebig <Thorsten.Liebig@gmx.de>
This commit is contained in:
Thorsten Liebig 2013-05-15 16:03:48 +02:00
parent a607bc6969
commit 8698f7a448
2 changed files with 54 additions and 49 deletions

View File

@ -59,7 +59,7 @@ function RunOpenEMS(Sim_Path, Sim_File, opts, Settings)
% WriteOpenEMS('/tmp/path_to_run_in/myfile.xml', FDTD, CSX) % WriteOpenEMS('/tmp/path_to_run_in/myfile.xml', FDTD, CSX)
% RunOpenEMS('/tmp/path_to_run_in','myfile.xml','-v') % RunOpenEMS('/tmp/path_to_run_in','myfile.xml','-v')
% %
% See also WriteOpenEMS FindFreeSSH InitCSX InitFDTD % See also WriteOpenEMS FindFreeSSH InitCSX InitFDTD RunOpenEMS_MPI
% %
% openEMS matlab interface % openEMS matlab interface
% ----------------------- % -----------------------

View File

@ -42,9 +42,10 @@ cd(Sim_Path);
scp_options = '-C -o "PasswordAuthentication no" -o "StrictHostKeyChecking no"'; scp_options = '-C -o "PasswordAuthentication no" -o "StrictHostKeyChecking no"';
ssh_options = [scp_options ' -x']; ssh_options = [scp_options ' -x'];
Remote_Nodes = Settings.MPI.Hosts; if isfield(Settings.MPI,'Hosts')
HostList = ''; Remote_Nodes = Settings.MPI.Hosts;
for n=1:numel(Remote_Nodes) HostList = '';
for n=1:numel(Remote_Nodes)
remote_name = Remote_Nodes{n}; remote_name = Remote_Nodes{n};
if (n==1) if (n==1)
@ -69,6 +70,7 @@ for n=1:numel(Remote_Nodes)
disp(res); disp(res);
error('openEMS:RunOpenEMS',['scp to remote ' remote_name ' failed!']); error('openEMS:RunOpenEMS',['scp to remote ' remote_name ' failed!']);
end end
end
end end
@ -79,28 +81,30 @@ else
append_unix = []; append_unix = [];
end end
disp(['Running remote openEMS_MPI in working dir: ' work_path]);
if ~isfield(Settings.MPI,'GlobalArgs') if ~isfield(Settings.MPI,'GlobalArgs')
Settings.MPI.GlobalArgs = ''; Settings.MPI.GlobalArgs = '';
end end
if isfield(Settings.MPI,'Hosts') if isfield(Settings.MPI,'Hosts')
disp(['Running remote openEMS_MPI in working dir: ' work_path]);
[status] = system(['mpiexec -host ' HostList ' -n ' int2str(NrProc) ' -wdir ' work_path ' ' Settings.MPI.Binary ' ' Sim_File ' ' opts ' ' append_unix]); [status] = system(['mpiexec -host ' HostList ' -n ' int2str(NrProc) ' -wdir ' work_path ' ' Settings.MPI.Binary ' ' Sim_File ' ' opts ' ' append_unix]);
else else
[status] = system(['mpiexec ' Settings.MPI.GlobalArgs ' -n ' int2str(NrProc) ' -wdir ' work_path ' ' Settings.MPI.Binary ' ' Sim_File ' ' opts ' ' append_unix]); disp('Running local openEMS_MPI');
[status] = system(['mpiexec ' Settings.MPI.GlobalArgs ' -n ' int2str(NrProc) ' ' Settings.MPI.Binary ' ' Sim_File ' ' opts ' ' append_unix]);
end end
if (status~=0) if (status~=0)
error('openEMS:RunOpenEMS','mpirun openEMS failed!'); error('openEMS:RunOpenEMS','mpirun openEMS failed!');
end end
disp( 'Remote simulation done... copying back results and cleaning up...' ); if isfield(Settings.MPI,'Hosts')
disp( 'Remote simulation done... copying back results and cleaning up...' );
if (strncmp(work_path,'/tmp/',5)~=1) % savety precaution... if (strncmp(work_path,'/tmp/',5)~=1) % savety precaution...
error('openEMS:RunOpenEMS','working path invalid for deletion'); error('openEMS:RunOpenEMS','working path invalid for deletion');
end end
for n=1:numel(Remote_Nodes) for n=1:numel(Remote_Nodes)
remote_name = Remote_Nodes{n}; remote_name = Remote_Nodes{n};
disp(['Copy data from remote node: ' remote_name]); disp(['Copy data from remote node: ' remote_name]);
[stat, res] = unix(['scp -r ' scp_options ' ' remote_name ':' work_path '/* ''' pwd '''/']); [stat, res] = unix(['scp -r ' scp_options ' ' remote_name ':' work_path '/* ''' pwd '''/']);
@ -115,6 +119,7 @@ for n=1:numel(Remote_Nodes)
disp(res); disp(res);
warning('openEMS:RunOpenEMS','remote cleanup failed!'); warning('openEMS:RunOpenEMS','remote cleanup failed!');
end end
end
end end
cd(savePath); cd(savePath);