-
Notifications
You must be signed in to change notification settings - Fork 62
Paraview Visualization
Cameron Smith edited this page Sep 17, 2015
·
9 revisions
An embarrassingly parallel part surface extraction script using a MPI enabled build of pvpython
follows. Copy this code into a file named extractSurface.py
and run it with the command:
mpirun -np numProcesses pvpython ./extractSurface.py /path/to/input/dir/someMesh.pvtu /path/to/output/dir
Assuming all goes well pvtp
file and directories containing vtp
files will be created in the /path/to/output/dir
directory. It is critical that pvpython have MPI support. See the CCI wiki for info on the ParaView install.
Follow the CCI wiki instructions to visualize the *vtp
files in Parallel on the CCI.
#### import the simple module from the paraview
from paraview.simple import *
import sys
import os
from mpi4py import MPI
#### disable automatic camera reset on 'Show'
paraview.simple._DisableFirstRenderCameraReset()
inputDir = sys.argv[2]
outputDir = sys.argv[3]
f = open(inputDir+'/'+sys.argv[1],'r')
def setUpPVTP(piece):
# create a new 'XML Unstructured Grid Reader'
part = XMLUnstructuredGridReader(FileName=[inputDir+'/'+piece])
# create a new 'Extract Surface'
surface = ExtractSurface(Input=part)
# this is going to be REALLY hacky
# create a temporary pvtp file, that contains the field info, but only one source
# go in later, and put all the piece info correctly in it
v = XMLPPolyDataWriter(FileName='temp.pvtp', Input=surface,DataMode=0)
v.UpdatePipeline()
Delete(surface)
del surface
Delete(part)
del part
def extract(piece,id):
# create a new 'XML Unstructured Grid Reader'
part = XMLUnstructuredGridReader(FileName=[inputDir+'/'+piece])
# create a new 'Extract Surface'
surface = ExtractSurface(Input=part)
# save data
fn=outputDir+'/'+str(int(id/1000))+'/'+piece[:-4]+'.vtp'
w = XMLPolyDataWriter(FileName=fn, Input=surface)
w.UpdatePipeline()
Delete(surface)
del surface
Delete(part)
del part
def extractSet(pieces,start,end):
for i in range(start,end):
extract(pieces[i],i)
rank = MPI.COMM_WORLD.Get_rank()
n = MPI.COMM_WORLD.size
# read in the pvtu file
lines = f.readlines()
first = True
pieces = []
for line in lines:
if '<Piece Source=' in line:
pieces.append(line[15:-4])
f.close()
if(rank == 0):
setUpPVTP(pieces[0])
if(rank == 0):
numDir = int(len(pieces)/1000)
if not os.path.exists(outputDir):
os.makedirs(outputDir)
for i in range(numDir+1):
if not os.path.exists(outputDir+'/'+str(i)):
os.makedirs(outputDir+'/'+str(i))
MPI.COMM_WORLD.Barrier()
# all processes can compute the split, and figure it out for themselves.
piecesPerProc = len(pieces)/n
procSplit = []
for i in range(n):
procSplit.append((i*(piecesPerProc),(i+1)*piecesPerProc))
# include the remainder on the last core
procSplit[n-1] = (procSplit[n-2][1],len(pieces))
extractSet(pieces,procSplit[rank][0],procSplit[rank][1])
if(rank == 0):
temp = open('temp.pvtp','r')
templines = temp.readlines()
pvtp = open(outputDir+'/'+sys.argv[1][:-5]+'.pvtp','w')
for line in templines:
if '<Piece Source=' in line:
break
else:
pvtp.write(line)
temp.close()
for i in range(len(pieces)):
pvtp.write('<Piece Source="'+str(int(i/1000))+'/'+pieces[i][:-4]+'.vtp"/>\n')
pvtp.write('</PPolyData>\n</VTKFile>\n')
pvtp.close()
os.system('rm temp.pvtp')