diff --git a/.gitignore b/.gitignore index e52a0630e20..7b09105d5dd 100644 --- a/.gitignore +++ b/.gitignore @@ -104,6 +104,7 @@ CMakeLists.txt petsc-doc-env doc/manualpages src/binding/petsc4py/.eggs +src/binding/petsc4py/petsc4py.egg-info src/binding/petsc4py/src/include/petsc4py/petsc4py.PETSc.h src/binding/petsc4py/src/include/petsc4py/petsc4py.PETSc_api.h src/binding/petsc4py/src/libpetsc4py/libpetsc4py.c @@ -180,7 +181,6 @@ src/benchmarks/streams/CUDAVersion src/benchmarks/streams/OpenMPVersion src/benchmarks/streams/PthreadVersion src/benchmarks/streams/scaling.png -horse.ply /docs/ **/f90module*.f90 compile_commands.json diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b3ef7c32cae..056a191f3ac 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -74,6 +74,8 @@ check-ci-settings: - if [ ! -z ${INIT_SCRIPT+x} ]; then echo "sourcing ${INIT_SCRIPT}"; if [ -f ${HOME}/${INIT_SCRIPT} ]; then source ${HOME}/${INIT_SCRIPT}; else source ${INIT_SCRIPT}; fi; fi - printf "PATH:$PATH\n" - printf "PYTHONPATH:$PYTHONPATH\n" + - printf "python:${PYTHON}\n" + - ${PYTHON} -m pip list --user - printf "CONFIG_OPTS:${CONFIG_OPTS}\n" - printf "PETSC_OPTIONS:$PETSC_OPTIONS\n" - if [ ! -z ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME+x} ]; then printf "CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME\n" ;fi @@ -83,7 +85,7 @@ check-ci-settings: - make CFLAGS="${MAKE_TEST_CFLAGS}" CXXFLAGS="${MAKE_TEST_CXXFLAGS}" FFLAGS="${MAKE_TEST_FFLAGS}" CUDAFLAGS="${MAKE_CUDAFLAGS}" allgtests-tap gmakesearch="${TEST_SEARCH}" TIMEOUT=${TIMEOUT} variables: PYTHON: python3 - MAKE_CFLAGS: -Werror + MAKE_CFLAGS: -Werror -Wmissing-field-initializers MAKE_CXXFLAGS: -Werror -Wzero-as-null-pointer-constant MAKE_FFLAGS: -Werror MAKE_TEST_CFLAGS: -Werror @@ -214,10 +216,17 @@ petsc4py-pip: script: - python3 -m venv pip-builds - source pip-builds/bin/activate - - python3 -m pip install --upgrade pip==23.0.1 - - python3 -m pip install . - - python3 -m pip install src/binding/petsc4py - - python3 -m petsc4py + - python -m pip install --upgrade pip + - unset PETSC_DIR PETSC_ARCH + - export PIP_WHEEL_DIR="${PWD}/wheelhouse" + - export PIP_FIND_LINKS="${PIP_WHEEL_DIR}" + - mkdir -p "${PIP_WHEEL_DIR}" + - python -m pip wheel . + - python -m pip wheel src/binding/petsc4py + - python -m pip install --pre --no-index --no-cache-dir petsc4py + - python -m pip list + - python -m petsc --prefix + - python -m petsc4py checksource: extends: .test-basic @@ -229,10 +238,12 @@ checksource: - vermin --version - make vermin - PATH=/nfs/gce/projects/petsc/soft/u22.04/spack-2024-03-06/opt/spack/linux-ubuntu22.04-x86_64/gcc-11.4.0/llvm-18.1.0-7hsf4hzma6aebqsd4ifb5brchcooktkp/bin:$PATH make checkclangformat SHELL=bash + - PATH=/nfs/gce/projects/petsc/soft/u22.04/spack-2024-03-06/opt/spack/linux-ubuntu22.04-x86_64/gcc-11.4.0/llvm-18.1.5-ug2n4c7cb3xrf3dxn7fkffuvcgfdzzpk/bin:$PATH make checkclangformat SHELL=bash - make checkbadSource SHELL=bash - make checkbadFileChange SHELL=bash - make -f gmakefile check_output SHELL=bash - make check_petsc4py_rst + - make -C src/binding/petsc4py lint artifacts: name: "$CI_JOB_NAME" when: always @@ -1199,7 +1210,7 @@ analyze-pipeline: python3 -m venv $VENV && . $VENV/bin/activate && cd doc && - python3 -m pip install -r requirements.txt + python -m pip install -r requirements.txt # # Build documentation and make available for review using GitLab pages diff --git a/.gitlab/CODEOWNERS b/.gitlab/CODEOWNERS index c5d97ebf1a5..29735c257df 100644 --- a/.gitlab/CODEOWNERS +++ b/.gitlab/CODEOWNERS @@ -18,6 +18,7 @@ src/ts/event/ @abhyshr @caidao22 @BarrySmith /src/ksp/pc/impls/gamg/ @markadams4 @jedbrown /src/ksp/pc/impls/hpddm/ @prj- @stefanozampini /src/ksp/ksp/impls/hpddm/ @prj- @stefanozampini +/src/ksp/ksp/utils/lmvm/ @hsuh @tisaac /src/binding/petsc4py/ @dalcinl @wence @pefarrell @joseroman @jedbrown @knepley @psanan @stefanozampini diff --git a/config/BuildSystem/config/compilers.py b/config/BuildSystem/config/compilers.py index 7093da08ace..1d5ad2c4baa 100644 --- a/config/BuildSystem/config/compilers.py +++ b/config/BuildSystem/config/compilers.py @@ -249,11 +249,15 @@ def checkCLibraries(self): # Intel 11 has a bogus -long_double option if arg == '-long_double': continue - # if options of type -L foobar if arg == '-lto_library': lib = next(argIter) self.logPrint('Skipping Apple LLVM linker option -lto_library '+lib) continue + # ASan + if arg in ['-lasan', '-lubsan']: + self.logPrint('Skipping ASan libraries') + continue + # if options of type -L foobar if arg == '-L': lib = next(argIter) self.logPrint('Found -L '+lib, 4, 'compilers') @@ -581,6 +585,10 @@ def checkCxxLibraries(self): if arg == '-long_double': continue + # ASan + if arg in ['-lasan', '-lubsan']: + self.logPrint('Skipping ASan libraries') + continue # if options of type -L foobar if arg == '-L': lib = next(argIter) @@ -982,6 +990,7 @@ def checkFortranLibraries(self): while 1: arg = next(argIter) self.logPrint( 'Checking arg '+arg, 4, 'compilers') + # Intel compiler sometimes puts " " around an option like "-lsomething" if arg.startswith('"') and arg.endswith('"'): arg = arg[1:-1] @@ -990,6 +999,10 @@ def checkFortranLibraries(self): if arg.endswith('"') and arg[:-1].find('"') == -1: arg = arg[:-1] + # ASan + if arg in ['-lasan', '-lubsan']: + self.logPrint('Skipping ASan libraries') + continue if arg == '-lto_library': lib = next(argIter) self.logPrint('Skipping Apple LLVM linker option -lto_library '+lib) @@ -1077,6 +1090,9 @@ def checkFortranLibraries(self): continue elif arg == '-lLTO' and self.setCompilers.isDarwin(self.log): self.logPrint('Skipping -lTO') + elif arg == '-lnvc': + self.logPrint('Skipping -lnvc: https://forums.developer.nvidia.com/t/failed-cuda-device-detection-when-explicitly-linking-libnvc/203225') + continue elif arg.find('-libpath:')>=0: self.logPrint('Skipping Intel oneAPI ifort (on Microsoft Windows) compiler option: '+arg) continue diff --git a/config/BuildSystem/config/compilersFortran.py b/config/BuildSystem/config/compilersFortran.py index b46a0b50585..8fe5bf5b30c 100644 --- a/config/BuildSystem/config/compilersFortran.py +++ b/config/BuildSystem/config/compilersFortran.py @@ -135,8 +135,11 @@ def checkFortranTypeInitialize(self): def checkFortranTypeStar(self): '''Determine whether the Fortran compiler handles type(*)''' + '''Newer nvfortran support (*) but they introduce extra arguments at the end that interfere with char * lengths''' + '''So it cannot be used in interface definitions''' + '''Not using type(*) :: b(:) prevents this compiler from certifying it has (*)''' self.pushLanguage('FC') - if self.checkCompile(body = ' interface\n subroutine a(b)\n type(*) :: b(:)\n end subroutine\n end interface\n'): + if self.checkCompile(body = ' interface\n subroutine a(b)\n type(*) b\n end subroutine\n end interface\n'): self.addDefine('HAVE_FORTRAN_TYPE_STAR', 1) self.logPrint('Fortran compiler supports type(*)') else: diff --git a/config/BuildSystem/config/framework.py b/config/BuildSystem/config/framework.py index 29c9889cd6c..e4989f0a966 100644 --- a/config/BuildSystem/config/framework.py +++ b/config/BuildSystem/config/framework.py @@ -490,6 +490,8 @@ def filterCompileOutput(self, output,flag = '', filterAlways = 0): if output.find('warning: ISO C90 does not support') >= 0: return output if output.find('warning: ISO C does not support') >= 0: return output if output.find('warning #2650: attributes ignored here') >= 0: return output + if output.find('warning #3175: unrecognized gcc optimization level') >= 0: return output + if output.find('warning: unknown attribute') >= 0: return output if output.find('Warning: attribute visibility is unsupported and will be skipped') >= 0: return output if output.find('(E) Invalid statement found within an interface block. Executable statement, statement function or syntax error encountered.') >= 0: return output elif self.argDB['ignoreCompileOutput'] and not filterAlways: diff --git a/config/BuildSystem/config/package.py b/config/BuildSystem/config/package.py index 7cecf0100a9..af8f445d185 100644 --- a/config/BuildSystem/config/package.py +++ b/config/BuildSystem/config/package.py @@ -48,6 +48,7 @@ def __init__(self, framework): self.foundversion = '' # version of the package actually found self.version_tuple = '' # version of the package actually found (tuple) self.requiresversion = 0 # error if the version information is not found + self.requirekandr = 0 # package requires KandR compiler flags to build # These are specified for the package self.required = 0 # 1 means the package is required @@ -407,11 +408,8 @@ def updatePackageCFlags(self,flags): outflags = self.removeVisibilityFlag(flags.split()) outflags = self.removeWarningFlags(outflags) outflags = self.removeCoverageFlag(outflags) - with self.Language('C'): - if config.setCompilers.Configure.isClang(self.getCompiler(), self.log): - outflags.append('-Wno-implicit-function-declaration') - if config.setCompilers.Configure.isDarwin(self.log): - outflags.append('-fno-common') + if self.requirekandr: + outflags += self.setCompilers.KandRFlags return ' '.join(outflags) def updatePackageFFlags(self,flags): @@ -573,11 +571,11 @@ def generateLibList(self, directory, liblist = None): alllibs.append(libs) return alllibs - def getIncludeDirs(self, prefix, includeDir): - if isinstance(includeDir, list): - iDirs = [inc for inc in includeDir if os.path.isabs(inc)] + [os.path.join(prefix, inc) for inc in includeDir if not os.path.isabs(inc)] - return [inc for inc in iDirs if os.path.exists(inc)] - return os.path.join(prefix, includeDir) + def getIncludeDirs(self, prefix, includeDirs): + if not isinstance(includeDirs, list): + includeDirs = [includeDirs] + iDirs = [inc for inc in includeDirs if os.path.isabs(inc)] + [os.path.join(prefix, inc) for inc in includeDirs if not os.path.isabs(inc)] + return [inc for inc in iDirs if os.path.exists(inc)] def addToArgs(self,args,key,value): found = 0 @@ -1202,51 +1200,50 @@ def infinitePatch(str): setattr(self.compilers, flagsArg, oldFlags+extraFlags+' '+self.headers.toString(self.dinclude)) self.compilers.saveLog() - # X.py uses a weird list of two headers. + # Multiple headers are tried in order if not isinstance(self.versioninclude,list): headerList = [self.versioninclude] else: headerList = self.versioninclude - includeLines = '' for header in headerList: - includeLines += '#include "'+header+'"\n' - try: - # We once used '#include "'+self.versioninclude+'"\npetscpkgver('+self.versionname+');\n', - # but some preprocessors are picky (ex. dpcpp -E), reporting errors on the code above even - # it is just supposed to do preprocessing: - # - # error: C++ requires a type specifier for all declarations - # petscpkgver(__SYCL_COMPILER_VERSION); - # ^ - # - # So we instead use this compilable code. - output = self.outputPreprocess( + try: + # We once used '#include "'+self.versioninclude+'"\npetscpkgver('+self.versionname+');\n', + # but some preprocessors are picky (ex. dpcpp -E), reporting errors on the code above even + # it is just supposed to do preprocessing: + # + # error: C++ requires a type specifier for all declarations + # petscpkgver(__SYCL_COMPILER_VERSION); + # ^ + # + # So we instead use this compilable code. + output = self.outputPreprocess( ''' -{x} +#include "{x}" #define PetscXstr_(s) PetscStr_(s) #define PetscStr_(s) #s const char *ver = "petscpkgver(" PetscXstr_({y}) ")"; -'''.format(x=includeLines, y=self.versionname)) - # Ex. char *ver = "petscpkgver(" "20211206" ")"; - # But after stripping spaces, quotes etc below, it becomes char*ver=petscpkgver(20211206); - self.logWrite(self.compilers.restoreLog()) - except: - self.log.write('For '+self.package+' unable to run preprocessor to obtain version information, skipping version check\n') +'''.format(x=header, y=self.versionname)) + # Ex. char *ver = "petscpkgver(" "20211206" ")"; + # But after stripping spaces, quotes etc below, it becomes char*ver=petscpkgver(20211206); + except: + output = None self.logWrite(self.compilers.restoreLog()) - self.popLanguage() - setattr(self.compilers, flagsArg,oldFlags) - self.version = '' - return + if output: + break self.popLanguage() setattr(self.compilers, flagsArg,oldFlags) + if not output: + self.log.write('For '+self.package+' unable to run preprocessor to obtain version information, skipping version check\n') + self.version = '' + return # the preprocessor output might be very long, but the petscpkgver line should be at the end. Therefore, we partition it backwards [mid, right] = output.rpartition('petscpkgver')[1:] version = '' if mid: # if mid is not empty, then it should be 'petscpkgver', meaning we found the version string verLine = right.split(';',1)[0] # get the string before the first ';'. Preprocessor might dump multiline result. self.log.write('Found the raw version string: ' + verLine +'\n') - # strip backslashs, spaces and quotes. Note Mumps' version macro has "" around it, giving output: (" "\"5.4.1\"" ")"; + # strip backslashes, spaces, and quotes. Note MUMPS' version macro has "" around it, giving output: (" "\"5.4.1\"" ")"; for char in ['\\', ' ', '"']: verLine = verLine.replace(char, '') # get the string between the outer () diff --git a/config/BuildSystem/config/packages/AMReX.py b/config/BuildSystem/config/packages/AMReX.py index 317c3fb2ba7..97788156005 100644 --- a/config/BuildSystem/config/packages/AMReX.py +++ b/config/BuildSystem/config/packages/AMReX.py @@ -53,7 +53,7 @@ def formCMakeConfigureArgs(self): if self.cuda.found: GPUBackend = 'CUDA' # Prefer cmake options instead of -DAMReX_CUDA_ARCH - args.append('-DCMAKE_CUDA_ARCHITECTURES:STRING="{}"'.format(self.cuda.cmakeArch())) + args.extend(self.cuda.getCmakeCUDAArchFlag()) elif self.hip.found: GPUBackend = 'HIP' args.append('-DCMAKE_HIP_ARCHITECTURES="'+self.hip.hipArch+'"') diff --git a/config/BuildSystem/config/packages/Chaco.py b/config/BuildSystem/config/packages/Chaco.py index f6e83cca5aa..e2c03889d72 100644 --- a/config/BuildSystem/config/packages/Chaco.py +++ b/config/BuildSystem/config/packages/Chaco.py @@ -11,8 +11,9 @@ def __init__(self, framework): self.includes = [] #Chaco does not have an include file self.liblist = [['libchaco.a']] self.downloadonWindows = 1 - self.requires32bitint = 1; # 1 means that the package will not work with 64-bit integers + self.requires32bitint = 1 # 1 means that the package will not work with 64-bit integers self.hastests = 1 + self.requirekandr = 1 return def setupDependencies(self, framework): diff --git a/config/BuildSystem/config/packages/MFEM.py b/config/BuildSystem/config/packages/MFEM.py index 47a76aa13db..ed2157afcf4 100644 --- a/config/BuildSystem/config/packages/MFEM.py +++ b/config/BuildSystem/config/packages/MFEM.py @@ -17,6 +17,7 @@ def __init__(self, framework): self.skippackagewithoptions = 1 self.builtafterpetsc = 1 self.noMPIUni = 1 + self.precisions = ['single', 'double'] return def setupHelp(self, help): @@ -35,6 +36,7 @@ def setupDependencies(self, framework): self.cuda = framework.require('config.packages.cuda',self) self.hip = framework.require('config.packages.hip',self) self.openmp = framework.require('config.packages.openmp',self) + self.scalar = framework.require('PETSc.options.scalarTypes',self) self.deps = [self.mpi,self.hypre,self.metis] self.odeps = [self.slepc,self.ceed,self.cuda,self.openmp] return @@ -111,6 +113,8 @@ def Install(self): g.write('MFEM_MPIEXEC = '+self.mpi.getMakeMacro('MPIEXEC')+'\n') g.write('MFEM_USE_METIS_5 = YES\n') g.write('MFEM_USE_METIS = YES\n') + if self.scalar.precision == 'single': + g.write('MFEM_PRECISION = single\n') g.write('MFEM_USE_PETSC = YES\n') g.write('HYPRE_OPT = '+self.headers.toString(self.hypre.include)+'\n') g.write('HYPRE_LIB = '+self.libraries.toString(self.hypre.lib)+'\n') @@ -218,6 +222,9 @@ def Install(self): echo "Error installing MFEM. Check ${PETSC_ARCH}/lib/petsc/conf/mfem.log" && \\\n\ echo "********************************************************************" && \\\n\ exit 1)']) + exampleDirBuild = os.path.join(buildDir, 'examples', 'petsc') + self.addMakeRule('mfem-check', '', ['@echo "Running MFEM/PETSc check examples"',\ + '-@cd '+exampleDirBuild+' ; ${OMAKE} ex1p-test-par']) if self.argDB['prefix'] and not 'package-prefix-hash' in self.argDB: self.addMakeRule('mfem-build','') diff --git a/config/BuildSystem/config/packages/MOAB.py b/config/BuildSystem/config/packages/MOAB.py index 5447fdc5aed..e8d77d272ca 100644 --- a/config/BuildSystem/config/packages/MOAB.py +++ b/config/BuildSystem/config/packages/MOAB.py @@ -3,7 +3,7 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.download = ['https://web.cels.anl.gov/projects/petsc/download/externalpackages/moab-5.0.0.tar.gz'] + self.download = ['https://web.cels.anl.gov/projects/sigma/downloads/moab/moab-5.5.0.tar.gz'] self.downloaddirnames = ['moab'] # Check for moab::Core and includes/libraries to verify build self.functions = ['Core'] @@ -11,7 +11,6 @@ def __init__(self, framework): self.includes = ['moab/Core.hpp'] self.liblist = [['libiMesh.a', 'libMOAB.a'],['libMOAB.a']] self.buildLanguages = ['Cxx'] - self.maxCxxVersion = 'c++14' self.precisions = ['single','double'] self.hastests = 1 self.useddirectly = 0 diff --git a/config/BuildSystem/config/packages/MPI.py b/config/BuildSystem/config/packages/MPI.py index 5159aea97c3..dbd1fdab89f 100644 --- a/config/BuildSystem/config/packages/MPI.py +++ b/config/BuildSystem/config/packages/MPI.py @@ -739,17 +739,24 @@ def checkMPIDistro(self): MPI_VER = ' '+MPICHPKG+'_VERSION: '+mpich_numversion except: self.logPrint('Unable to parse '+MPICHPKG+' version from header. Probably a buggy preprocessor') - for mpichpkg in ['i_mpi','mvapich2','mpich']: - MPICHPKG = mpichpkg.upper() + for MPICHPKG in ['MPICH', 'I_MPI', 'MVAPICH2']: mpich_test = '#include \nint mpich_ver = '+MPICHPKG+'_NUMVERSION;\n' if self.checkCompile(mpich_test): buf = self.outputPreprocess(mpich_test) try: mpich_numversion = re.compile('\nint mpich_ver ='+HASHLINESPACE+'([0-9]+)'+HASHLINESPACE+';').search(buf).group(1) - self.addDefine('HAVE_'+MPICHPKG+'_NUMVERSION',mpich_numversion) MPI_VER += ' '+MPICHPKG+'_NUMVERSION: '+mpich_numversion - if mpichpkg == 'mpich': self.mpich_numversion = mpich_numversion - if mpichpkg == 'i_mpi': self.isIntelMPI = 1 + self.addDefine('HAVE_'+MPICHPKG, 1) + # for I_MPI and MVAPICH2, we can not use petscpkg_version.h since they are not a petsc package yet. + # Anyway, we use PETSC_PKG_'MPICHPKG'_NUMVERSION to record the config time version for later compile time checking. + self.addDefine('PKG_'+MPICHPKG+'_NUMVERSION',mpich_numversion) + if MPICHPKG == 'MPICH': + self.mpich_numversion = mpich_numversion + MAJ = int(mpich_numversion)//10000000 # See comments in MPICH.py + MIN = int(mpich_numversion)//100000%100 + REV = int(mpich_numversion)//1000%100 + self.mpich.version_tuple = (MAJ, MIN, REV) # version_tuple makes mpich included in petscpkg_version.h + elif MPICHPKG == 'I_MPI': self.isIntelMPI = 1 except: self.logPrint('Unable to parse '+MPICHPKG+' version from header. Probably a buggy preprocessor') if MPI_VER: @@ -774,11 +781,10 @@ def checkMPIDistro(self): ompi_major_version = re.compile('\nint ompi_major ='+HASHLINESPACE+'([0-9]+)'+HASHLINESPACE+';').search(buf).group(1) ompi_minor_version = re.compile('\nint ompi_minor ='+HASHLINESPACE+'([0-9]+)'+HASHLINESPACE+';').search(buf).group(1) ompi_release_version = re.compile('\nint ompi_release ='+HASHLINESPACE+'([0-9]+)'+HASHLINESPACE+';').search(buf).group(1) - self.addDefine('HAVE_OMPI_MAJOR_VERSION',ompi_major_version) - self.addDefine('HAVE_OMPI_MINOR_VERSION',ompi_minor_version) - self.addDefine('HAVE_OMPI_RELEASE_VERSION',ompi_release_version) self.ompi_major_version = ompi_major_version self.ompi_version = tuple([int(i) for i in [ompi_major_version,ompi_minor_version,ompi_release_version]]) + self.openmpi.version_tuple = self.ompi_version # version_tuple makes openmpi included by petscpkg_version.h, so one can uses macros defined there + self.addDefine('HAVE_OPENMPI', 1) # we have openmpi, though it is not necessarily installed by --download-openmpi self.mpi_pkg_version = ' OMPI_VERSION: '+ompi_major_version+'.'+ompi_minor_version+'.'+ompi_release_version+'\n' MPI_VER = ' OMPI_VERSION: '+ompi_major_version+'.'+ompi_minor_version+'.'+ompi_release_version except: diff --git a/config/BuildSystem/config/packages/MPICH.py b/config/BuildSystem/config/packages/MPICH.py index 91297b74fcf..7de3c4b3cc3 100644 --- a/config/BuildSystem/config/packages/MPICH.py +++ b/config/BuildSystem/config/packages/MPICH.py @@ -4,12 +4,13 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.version = '4.2.0' - self.download_darwin = ['https://web.cels.anl.gov/projects/petsc/download/externalpackages/mpich-4.2.0-p1.tar.gz'] + self.version = '4.2.1' self.download = ['https://github.com/pmodels/mpich/releases/download/v'+self.version+'/mpich-'+self.version+'.tar.gz', 'https://www.mpich.org/static/downloads/'+self.version+'/mpich-'+self.version+'.tar.gz', # does not always work from Python? So add in web.cels URL below 'https://web.cels.anl.gov/projects/petsc/download/externalpackages'+'/mpich-'+self.version+'.tar.gz'] self.download_git = ['git://https://github.com/pmodels/mpich.git'] + self.versionname = 'MPICH_NUMVERSION' + self.includes = ['mpi.h'] self.gitsubmodules = ['.'] self.downloaddirnames = ['mpich'] self.skippackagewithoptions = 1 @@ -26,6 +27,12 @@ def setupDependencies(self, framework): self.odeps = [self.cuda, self.hip, self.hwloc] return + def versionToStandardForm(self,ver): + '''Converts from MPICH 10007201 notation to standard notation 1.0.7''' + # See the format at https://github.com/pmodels/mpich/blob/main/src/include/mpi.h.in#L78 + # 1 digit for MAJ, 2 digits for MIN, 2 digits for REV, 1 digit for EXT and 2 digits for EXT_NUMBER + return ".".join(map(str,[int(ver)//10000000, int(ver)//100000%100, int(ver)//1000%100])) + def setupHelp(self, help): config.package.GNUPackage.setupHelp(self,help) import nargs @@ -69,8 +76,11 @@ def formGNUConfigureArgs(self): mpich_device = 'ch3:nemesis' if self.cuda.found: args.append('--with-cuda='+self.cuda.cudaDir) - if hasattr(self.cuda,'cudaArch'): - args.append('--with-cuda-sm='+self.cuda.cudaArch) # MPICH's default to --with-cuda-sm=XX is 'all' + if hasattr(self.cuda,'cudaArch'): # MPICH's default to --with-cuda-sm=XX is 'auto', to auto-detect the arch of the visible GPUs (similar to our `native`). + if self.cuda.cudaArch == 'all': + args.append('--with-cuda-sm=all-major') # MPICH stopped supporting 'all' thus we do it with 'all-major' + else: + args.append('--with-cuda-sm='+self.cuda.cudaArch) mpich_device = 'ch4:ucx' elif self.hip.found: args.append('--with-hip='+self.hip.hipDir) diff --git a/config/BuildSystem/config/packages/MUMPS.py b/config/BuildSystem/config/packages/MUMPS.py index 4fa052fc801..7cbab7c252e 100644 --- a/config/BuildSystem/config/packages/MUMPS.py +++ b/config/BuildSystem/config/packages/MUMPS.py @@ -3,12 +3,12 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.version = '5.6.2' + self.version = '5.7.1' self.minversion = '5.2.1' self.versionname = 'MUMPS_VERSION' self.requiresversion = 1 self.gitcommit = 'v'+self.version - self.download = ['https://graal.ens-lyon.fr/MUMPS/MUMPS_'+self.version+'.tar.gz', + self.download = ['https://mumps-solver.org/MUMPS_'+self.version+'.tar.gz', 'https://web.cels.anl.gov/projects/petsc/download/externalpackages/MUMPS_'+self.version+'.tar.gz'] self.downloaddirnames = ['petsc-pkg-mumps','MUMPS'] self.buildLanguages = ['C','FC'] @@ -172,7 +172,7 @@ def Install(self): # To avoid a bug related to MPI_IN_PLACE and old MPICH releases, see MR 4410 self.avoid_mpi_in_place = 0 if 'download-mumps-avoid-mpi-in-place' in self.framework.clArgDB: # user-provided value takes precedence - self.avoid_mpi_in_place = self.clArgDB['download-mumps-avoid-mpi-in-place'] + self.avoid_mpi_in_place = self.framework.clArgDB['download-mumps-avoid-mpi-in-place'] elif hasattr(self.mpi, 'mpich_numversion') and int(self.mpi.mpich_numversion) < 40000101: self.avoid_mpi_in_place = 1 if self.avoid_mpi_in_place: diff --git a/config/BuildSystem/config/packages/OpenMPI.py b/config/BuildSystem/config/packages/OpenMPI.py index 1c24da23161..4b81609336c 100644 --- a/config/BuildSystem/config/packages/OpenMPI.py +++ b/config/BuildSystem/config/packages/OpenMPI.py @@ -4,10 +4,12 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.version = '5.0.2' + self.version = '5.0.3' self.download = ['https://download.open-mpi.org/release/open-mpi/v5.0/openmpi-'+self.version+'.tar.gz', 'https://web.cels.anl.gov/projects/petsc/download/externalpackages/openmpi-'+self.version+'.tar.gz'] self.download_git = ['git://https://github.com/open-mpi/ompi.git'] + self.versionname = 'OMPI_MAJOR_VERSION.OMPI_MINOR_VERSION.OMPI_RELEASE_VERSION' + self.includes = ['mpi.h'] self.gitsubmodules = ['.'] self.downloaddirnames = ['openmpi','ompi'] self.skippackagewithoptions = 1 @@ -57,7 +59,7 @@ def formGNUConfigureArgs(self): def preInstall(self): if not self.getExecutable('perl'): - raise RuntimeError('Cannot find perl required by --download-openmpi, install perl (possibly with a package manager) and run ./configure again') + raise RuntimeError('Cannot find perl required by --download-openmpi, install perl (possibly with a package manager) and run ./configure again') self.Bootstrap('AUTOMAKE_JOBS=%d ./autogen.pl' % self.make.make_np) def checkDownload(self): diff --git a/config/BuildSystem/config/packages/PTScotch.py b/config/BuildSystem/config/packages/PTScotch.py index 0288e95a97b..bd75d13aec0 100644 --- a/config/BuildSystem/config/packages/PTScotch.py +++ b/config/BuildSystem/config/packages/PTScotch.py @@ -3,7 +3,7 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.version = '7.0.3' + self.version = '7.0.4' self.versionname = 'SCOTCH_VERSION.SCOTCH_RELEASE.SCOTCH_PATCHLEVEL' self.gitcommit = 'v'+self.version self.download = ['git://https://gitlab.inria.fr/scotch/scotch.git', @@ -16,6 +16,7 @@ def __init__(self, framework): self.functionsDefine = ['SCOTCH_ParMETIS_V3_NodeND'] self.includes = ['ptscotch.h'] self.hastests = 1 + self.requirekandr = 1 return def setupDependencies(self, framework): diff --git a/config/BuildSystem/config/packages/PaStiX.py b/config/BuildSystem/config/packages/PaStiX.py index 94b49c4c8ea..a04463dfedb 100644 --- a/config/BuildSystem/config/packages/PaStiX.py +++ b/config/BuildSystem/config/packages/PaStiX.py @@ -3,7 +3,7 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.version = '5.2.3' + self.version = '5.2.3-p1' self.versionname = 'PASTIX_MAJOR_VERSION.PASTIX_MEDIUM_VERSION.PASTIX_MINOR_VERSION' # 'https://gforge.inria.fr/frs/download.php/file/36212/pastix_'+self.version+'.tar.bz2', self.download = ['https://web.cels.anl.gov/projects/petsc/download/externalpackages/pastix_'+self.version+'.tar.bz2'] @@ -16,6 +16,7 @@ def __init__(self, framework): self.buildLanguages = ['C','FC'] self.hastests = 1 self.hastestsdatafiles= 1 + self.requirekandr = 1 return diff --git a/config/BuildSystem/config/packages/SuiteSparse.py b/config/BuildSystem/config/packages/SuiteSparse.py index 6999ad611e1..90d7765a031 100644 --- a/config/BuildSystem/config/packages/SuiteSparse.py +++ b/config/BuildSystem/config/packages/SuiteSparse.py @@ -6,7 +6,7 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) self.minversion = '5.6.0' - self.version = '7.6.1' + self.version = '7.7.0' self.versioninclude = 'SuiteSparse_config.h' self.versionname = 'SUITESPARSE_MAIN_VERSION.SUITESPARSE_SUB_VERSION.SUITESPARSE_SUBSUB_VERSION' self.gitcommit = 'v'+self.version diff --git a/config/BuildSystem/config/packages/SuperLU_DIST.py b/config/BuildSystem/config/packages/SuperLU_DIST.py index 321ac707562..2051cb41f8a 100644 --- a/config/BuildSystem/config/packages/SuperLU_DIST.py +++ b/config/BuildSystem/config/packages/SuperLU_DIST.py @@ -5,9 +5,9 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) self.minversion = '6.3.0' - self.version = '8.2.1' + self.version = '9.0.0' self.versionname = 'SUPERLU_DIST_MAJOR_VERSION.SUPERLU_DIST_MINOR_VERSION.SUPERLU_DIST_PATCH_VERSION' - self.gitcommit = 'v'+self.version + self.gitcommit = '2e39ceca001f594dc63426f2b500c82f5ce312a3' # v9.0.0+, i.e.: master May 20, 2024 self.download = ['git://https://github.com/xiaoyeli/superlu_dist','https://github.com/xiaoyeli/superlu_dist/archive/'+self.gitcommit+'.tar.gz'] self.functions = ['set_default_options_dist'] self.includes = ['superlu_ddefs.h'] @@ -47,7 +47,7 @@ def formCMakeConfigureArgs(self): args[place]=item[:-1]+' '+self.headers.toString(self.cuda.include)+' -DDEBUGlevel=0 -DPRNTlevel=0"' args.append('-DTPL_ENABLE_CUDALIB=TRUE') args.append('-DTPL_CUDA_LIBRARIES="'+self.libraries.toString(self.cuda.dlib)+'"') - args.append('-DCMAKE_CUDA_ARCHITECTURES:STRING="{}"'.format(self.cuda.cmakeArch())) + args.extend(self.cuda.getCmakeCUDAArchFlag()) with self.Language('CUDA'): # already set in package.py so could be removed, but why are MPI include paths listed here args.append('-DCMAKE_CUDA_FLAGS="'+self.getCompilerFlags()+' '+self.mpi.includepaths+' '+self.headers.toString(self.cuda.include)+' -DDEBUGlevel=0 -DPRNTlevel=0"') diff --git a/config/BuildSystem/config/packages/Trilinos.py b/config/BuildSystem/config/packages/Trilinos.py index a9a061c39ae..5f5a51ba9b7 100644 --- a/config/BuildSystem/config/packages/Trilinos.py +++ b/config/BuildSystem/config/packages/Trilinos.py @@ -109,7 +109,7 @@ def formCMakeConfigureArgs(self): trequires = 1 if trequires: if bf.find('(Dev)') > -1: - self.requirespath = 0 + self.requiresrpath = 0 bf = fd.readline() fd.close() diff --git a/config/BuildSystem/config/packages/X.py b/config/BuildSystem/config/packages/X.py index 24c53681ae6..694ef58e2ae 100644 --- a/config/BuildSystem/config/packages/X.py +++ b/config/BuildSystem/config/packages/X.py @@ -4,8 +4,6 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.versionname = 'XORG_VERSION_MAJOR.XORG_VERSION_MINOR.XORG_VERSION_PATCH.XORG_VERSION_SNAP' - self.versioninclude = ['xorg/xorg-server.h','xorg/xorgVersion.h'] self.functions = ['XSetWMName'] self.includes = ['X11/Xlib.h'] self.liblist = [['libX11.a']] @@ -14,10 +12,6 @@ def __init__(self, framework): self.testoptions_whennotfound = '-nox_warning' return - def versionToStandardForm(self,ver): - '''Completes the arithmetic needed to compute the version number from the numerical strings''' - return '.'.join([str(int(eval(i))) for i in ver.split('.')]) - def getSearchDirectories(self): '''Generate list of possible locations of X11''' yield '' diff --git a/config/BuildSystem/config/packages/Zoltan.py b/config/BuildSystem/config/packages/Zoltan.py index 2504c6223cf..39520014779 100644 --- a/config/BuildSystem/config/packages/Zoltan.py +++ b/config/BuildSystem/config/packages/Zoltan.py @@ -4,7 +4,7 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.version = '3.83' + self.version = '3.901' self.versionname = 'ZOLTAN_VERSION_NUMBER' self.download = ['https://web.cels.anl.gov/projects/petsc/download/externalpackages/zoltan_distrib_v'+self.version+'.tar.gz'] self.functions = ['Zoltan_LB_Partition'] @@ -12,6 +12,11 @@ def __init__(self, framework): self.liblist = [['libzoltan.a']] self.buildLanguages = ['C','Cxx'] + def setupHelp(self, help): + config.package.GNUPackage.setupHelp(self,help) + import nargs + help.addArgument('ZOLTAN', '-with-zoltan-fortran-bindings', nargs.ArgBool(None, 0, 'Use/build Zoltan Fortran interface')) + def setupDependencies(self, framework): config.package.GNUPackage.setupDependencies(self, framework) self.parmetis = framework.require('config.packages.parmetis',self) @@ -28,9 +33,13 @@ def formGNUConfigureArgs(self): args.append('--enable-mpi') args.append('CPPFLAGS="'+self.headers.toStringNoDupes(self.dinclude)+'"') args.append('LIBS="'+self.libraries.toStringNoDupes(self.dlib)+'"') - if hasattr(self.compilers, 'FC'): - args.append('--enable-f90interface') - self.addToArgs(args,'FCFLAGS',self.headers.toStringNoDupes(self.dinclude)) + if self.argDB['with-zoltan-fortran-bindings']: + if hasattr(self.compilers, 'FC'): + args.append('--enable-f90interface') + self.addToArgs(args,'FCFLAGS',self.headers.toStringNoDupes(self.dinclude)) + else: + raise RuntimeError('Cannot build Zoltan Fortran bindings --with-fc=0 or with a malfunctioning Fortran compiler.') + if self.parmetis.found: args.append('--with-parmetis') if self.ptscotch.found: @@ -41,6 +50,8 @@ def Install(self): '''Zoltan does not have a make clean''' packageDir = os.path.join(self.packageDir,'petsc-build') args = self.formGNUConfigureArgs() + if self.download and self.argDB['download-'+self.downloadname.lower()+'-configure-arguments']: + args.append(self.argDB['download-'+self.downloadname.lower()+'-configure-arguments']) args = ' '.join(args) conffile = os.path.join(self.packageDir,self.package+'.petscconf') fd = open(conffile, 'w') diff --git a/config/BuildSystem/config/packages/amgx.py b/config/BuildSystem/config/packages/amgx.py index 3929c209908..12c2ce4cb7d 100644 --- a/config/BuildSystem/config/packages/amgx.py +++ b/config/BuildSystem/config/packages/amgx.py @@ -31,6 +31,6 @@ def formCMakeConfigureArgs(self): args.append('-DCMAKE_BUILD_TYPE=RelWithTraces') #args.append('-DCMAKE_CXX_FLAGS="-O3"') #args.append('-DCMAKE_C_FLAGS="-O3"') - args.append('-DCMAKE_CUDA_ARCHITECTURES:STRING="{}"'.format(self.cuda.cmakeArch())) + args.extend(self.cuda.getCmakeCUDAArchFlag()) args.append('-DCUDAToolkit_ROOT=' + self.cuda.cudaDir) return args diff --git a/config/BuildSystem/config/packages/cgns.py b/config/BuildSystem/config/packages/cgns.py index 301dc8ffabb..3fba3772f87 100644 --- a/config/BuildSystem/config/packages/cgns.py +++ b/config/BuildSystem/config/packages/cgns.py @@ -5,9 +5,10 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) - self.version = '4.3.0' - self.gitcommit = 'v' + self.version - self.download = ['git://https://github.com/cgns/cgns', 'https://github.com/cgns/cgns/archive/{}.tar.gz'.format(self.gitcommit)] + #self.version = '4.3.0' + #self.gitcommit = 'v' + self.version + self.gitcommit = '30157c3a893074f0ff28cddff9d746ab613c84ba' # develop Mar-5-2024 (v4.4.0+) + self.download = ['git://https://github.com/cgns/cgns', 'https://github.com/cgns/cgns/archive/{}.tar.gz'.format(self.gitcommit)] self.functions = ['cgp_close'] self.includes = ['cgnslib.h'] self.liblist = [['libcgns.a'], @@ -30,3 +31,12 @@ def formCMakeConfigureArgs(self): if self.hdf5.directory: args.append('-DHDF5_ROOT:PATH={}'.format(self.hdf5.directory)) return args + + def configureLibrary(self): + config.package.Package.configureLibrary(self) + oldFlags = self.compilers.CPPFLAGS + self.compilers.CPPFLAGS += ' '+self.headers.toString(self.include) + if not self.checkCompile('#include "cgnslib.h"', '#if (CG_SIZEOF_SIZE < '+str(self.getDefaultIndexSize())+')\n#error incompatible CG_SIZEOF_SIZE\n#endif\n'): + raise RuntimeError('CGNS specified is incompatible!\n--with-64-bit-indices option requires CGNS built with CGNS_ENABLE_64BIT.\nSuggest using --download-cgns for a compatible CGNS') + self.compilers.CPPFLAGS = oldFlags + return diff --git a/config/BuildSystem/config/packages/cmake.py b/config/BuildSystem/config/packages/cmake.py index 5ab6c92c0cc..cef32d300fc 100644 --- a/config/BuildSystem/config/packages/cmake.py +++ b/config/BuildSystem/config/packages/cmake.py @@ -4,7 +4,7 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.version = '3.28.3' + self.version = '3.29.3' self.download = ['https://github.com/Kitware/CMake/releases/download/v'+self.version+'/cmake-'+self.version+'.tar.gz', 'https://gitlab.kitware.com/cmake/cmake/-/archive/v'+self.version+'/cmake-v'+self.version+'.tar.gz'] self.download_solaris = ['https://cmake.org/files/v3.11/cmake-3.11.4.tar.gz', diff --git a/config/BuildSystem/config/packages/cuda.py b/config/BuildSystem/config/packages/cuda.py index 134e3463a13..fa6951875bf 100644 --- a/config/BuildSystem/config/packages/cuda.py +++ b/config/BuildSystem/config/packages/cuda.py @@ -75,6 +75,9 @@ def cudaArchList(self): a list of the given cuda arch numbers. raises RuntimeError if cuda arch is not a list of version numbers ''' + if not hasattr(self,'cudaArch'): + raise RuntimeError('cudaArch is not set') from None + arch_list = self.cudaArch.split(',') try: @@ -114,9 +117,12 @@ def clangArchFlags(self): raise RuntimeError('clang only supports cuda archs specified as version number(s) (got "'+self.cudaArch+'")') return ''.join(' --cuda-gpu-arch=sm_'+gen for gen in self.cudaArchList()) - def cmakeArch(self): + def getCmakeCUDAArchFlag(self): # CMake supports 'all', 'all-major', 'native', and a semicolon-separated list of numbers - return self.cudaArch.replace(',', ';') + if hasattr(self,'cudaArch'): + return ['-DCMAKE_CUDA_ARCHITECTURES:STRING="{}"'.format(self.cudaArch.replace(',', ';'))] + else: + return [] def setupDependencies(self, framework): config.package.Package.setupDependencies(self, framework) @@ -219,6 +225,7 @@ def generateLibList(self, directory): stubliblist = config.package.Package.generateLibList(self, nvhpcStubLibDir) liblist.append(mathliblist[0]+cudaliblist[0]+stubliblist[0]) liblist.append(mathliblist[1]+cudaliblist[1]+stubliblist[1]) + self.math_libs_dir = os.path.join(nvhpcDir,'math_libs') # might be used by Kokkos-Kernels # 'directory' is in format D, and we peel 'directory' three times. # We preserve the version info in case a NVHPC installation provides multiple cuda versions and we'd like to respect user's choice @@ -237,6 +244,7 @@ def generateLibList(self, directory): stubliblist = config.package.Package.generateLibList(self, nvhpcStubVerLibDir) liblist.append(mathliblist[0]+cudaliblist[0]+stubliblist[0]) liblist.append(mathliblist[1]+cudaliblist[1]+stubliblist[1]) + self.math_libs_dir = os.path.join(nvhpcDir,'math_libs',ver) return liblist def checkSizeofVoidP(self): @@ -302,11 +310,13 @@ def setCudaDir(self): else: nvccDir = os.path.dirname(self.systemNvcc) # /path/bin d = os.path.dirname(nvccDir) # /path + # d might be /to/Linux_x86_64/21.7/cuda or /to/Linux_x86_64/21.7/cuda/12.2, check if math_libs exist. If yes, we are using NVHPC + if os.path.exists(os.path.join(d,'..','math_libs')) or os.path.exists(os.path.join(d,'..','..','math_libs')): + self.isnvhpc = 1 if os.path.exists(os.path.join(d,'include','cuda.h')): # CUDAToolkit with a structure /path/{bin/nvcc, include/cuda.h} self.cudaDir = d - elif os.path.exists(os.path.normpath(os.path.join(d,'..','cuda','include','cuda.h'))): # NVHPC, see above + elif os.path.exists(os.path.normpath(os.path.join(d,'..','cuda','include','cuda.h'))): # could be NVHPC self.cudaDir = os.path.normpath(os.path.join(d,'..','cuda')) # get rid of .. in path, getting /path/Linux_x86_64/21.5/cuda - self.isnvhpc = 1 if not hasattr(self, 'cudaDir'): raise RuntimeError('CUDA directory not found!') @@ -351,10 +361,16 @@ def configureLibrary(self): #include #include #include ''' - body = '''int cerr; + body = '''cudaError_t cerr; cudaDeviceProp dp; cerr = cudaGetDeviceProperties(&dp, 0); - if (cerr) printf("Error calling cudaGetDeviceProperties\\n"); + if (cerr) { + #if (CUDART_VERSION >= 8000) + printf("Error calling cudaGetDeviceProperties with CUDA error %d (%s) : %s\\n", (int)cerr, cudaGetErrorName(cerr), cudaGetErrorString(cerr)); + #else + printf("Error calling cudaGetDeviceProperties with CUDA error %d\\n", (int)cerr); + #endif + } else printf("%d\\n",10*dp.major+dp.minor); return(cerr);''' self.pushLanguage('CUDA') @@ -374,6 +390,9 @@ def configureLibrary(self): else: self.log.write('petsc-supplied CUDA device query test found the CUDA Capability is '+str(gen)+'\n') self.cudaArch = str(gen) + # Store min cuda arch at configure time for later error diagnosis + if self.cudaArchIsVersionList(): + self.addDefine('PKG_CUDA_MIN_ARCH', min(self.cudaArchList())) # Check flags validity if hasattr(self,'cudaArch'): diff --git a/config/BuildSystem/config/packages/hip.py b/config/BuildSystem/config/packages/hip.py index 1c74e9a7413..6674e35e4e8 100644 --- a/config/BuildSystem/config/packages/hip.py +++ b/config/BuildSystem/config/packages/hip.py @@ -11,7 +11,7 @@ def __init__(self, framework): self.minversion = '5.0.0' # Check version from rocm-core here, as HIP_VERSION_PATCH (e.g., 31061 from hip_version.h) is not necessarily the AMD advertised patch version, e.g., in 5.6.0 self.versionname = 'ROCM_VERSION_MAJOR.ROCM_VERSION_MINOR.ROCM_VERSION_PATCH' - self.versioninclude = 'rocm_version.h' + self.versioninclude = ['rocm-core/rocm_version.h', 'rocm_version.h'] self.requiresversion = 1 self.functionsCxx = [1,'', 'rocblas_create'] self.includes = ['hip/hip_runtime.h'] diff --git a/config/BuildSystem/config/packages/hpddm.py b/config/BuildSystem/config/packages/hpddm.py index 443b99f0e73..264dde616c8 100644 --- a/config/BuildSystem/config/packages/hpddm.py +++ b/config/BuildSystem/config/packages/hpddm.py @@ -3,8 +3,8 @@ class Configure(config.package.Package): def __init__(self,framework): config.package.Package.__init__(self,framework) - # self.version = '2.2.5' - self.gitcommit = '201eecd26177f88d7bb6287251877d8013fb64d2' # main jan-01-2024 + self.version = '2.3.0' + self.gitcommit = '66c82a45db9fb64ece5a69de8b4d27896b96c30e' # main may-12-2024 self.download = ['git://https://github.com/hpddm/hpddm','https://github.com/hpddm/hpddm/archive/'+self.gitcommit+'.tar.gz'] self.minversion = '2.2.1' self.versionname = 'HPDDM_VERSION' diff --git a/config/BuildSystem/config/packages/hypre.py b/config/BuildSystem/config/packages/hypre.py index bbb610bc754..c714ad1a4d1 100644 --- a/config/BuildSystem/config/packages/hypre.py +++ b/config/BuildSystem/config/packages/hypre.py @@ -16,10 +16,7 @@ def __init__(self, framework): self.includes = ['HYPRE.h'] self.liblist = [['libHYPRE.a']] self.buildLanguages = ['C','Cxx'] - # Per hypre users guide section 7.5 - install manually on windows for MS compilers. - self.precisions = ['double'] - # HYPRE is supposed to work with complex number - #self.complex = 0 + self.precisions = ['single', 'double', '__float128'] self.hastests = 1 self.hastestsdatafiles = 1 @@ -75,6 +72,12 @@ def formGNUConfigureArgs(self): args.append('--with-blas=no') args.append('--with-lapack=no') + # floating point precisions + if self.scalar.precision == 'single': + args.append('--enable-single') + elif self.scalar.precision == '__float128': + args.append('--enable-longdouble') + # HYPRE automatically detects essl symbols and includes essl.h! # There are no configure options to disable it programmatically if hasattr(self.blasLapack,'essl'): @@ -90,6 +93,7 @@ def formGNUConfigureArgs(self): if self.hip.found: stdflag = '-std=c++14' hipbuild = True + args.append('ROCM_PATH="{0}"'.format(self.hip.hipDir)) args.append('--with-hip') if not hasharch: if not 'with-hypre-gpu-arch' in self.framework.clArgDB: diff --git a/config/BuildSystem/config/packages/kokkos-kernels.py b/config/BuildSystem/config/packages/kokkos-kernels.py index b69a76a6da9..a80b102d7f5 100644 --- a/config/BuildSystem/config/packages/kokkos-kernels.py +++ b/config/BuildSystem/config/packages/kokkos-kernels.py @@ -4,7 +4,7 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) - self.gitcommit = '4.2.01' + self.gitcommit = '4.3.01' self.minversion = '3.7.01' self.versionname = 'KOKKOSKERNELS_VERSION' self.download = ['git://https://github.com/kokkos/kokkos-kernels.git','https://github.com/kokkos/kokkos-kernels/archive/'+self.gitcommit+'.tar.gz'] @@ -80,6 +80,10 @@ def formCMakeConfigureArgs(self): args.append('-DKokkosKernels_ENABLE_TPL_CUBLAS=OFF') # These are turned ON by KK by default when CUDA is enabled args.append('-DKokkosKernels_ENABLE_TPL_CUSPARSE=OFF') args.append('-DKokkosKernels_ENABLE_TPL_CUSOLVER=OFF') + elif hasattr(self.cuda, 'math_libs_dir'): # KK-4.3+ failed to locate nvhpc math_libs on Perlmutter@NERSC, so we set them explicitly + args.append('-DCUBLAS_ROOT='+self.cuda.math_libs_dir) + args.append('-DCUSPARSE_ROOT='+self.cuda.math_libs_dir) + args.append('-DCUSOLVER_ROOT='+self.cuda.math_libs_dir) elif self.hip.found: args = self.rmArgsStartsWith(args,'-DCMAKE_CXX_COMPILER=') args.append('-DCMAKE_CXX_COMPILER='+self.getCompiler('HIP')) @@ -117,5 +121,13 @@ def formCMakeConfigureArgs(self): return args def configureLibrary(self): + needRestore = False self.buildLanguages= self.kokkos.buildLanguages + if self.cuda.found and not self.cuda.cudaclang: + oldFlags = self.setCompilers.CUDAPPFLAGS + self.setCompilers.CUDAPPFLAGS += " -ccbin " + self.getCompiler('Cxx') + needRestore = True + config.package.CMakePackage.configureLibrary(self) + + if needRestore: self.setCompilers.CUDAPPFLAGS = oldFlags diff --git a/config/BuildSystem/config/packages/kokkos.py b/config/BuildSystem/config/packages/kokkos.py index 1aaf8846018..1b385b00d4f 100644 --- a/config/BuildSystem/config/packages/kokkos.py +++ b/config/BuildSystem/config/packages/kokkos.py @@ -4,7 +4,7 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) - self.gitcommit = '4.2.01' + self.gitcommit = '4.3.01' self.minversion = '3.7.01' self.versionname = 'KOKKOS_VERSION' self.download = ['git://https://github.com/kokkos/kokkos.git','https://github.com/kokkos/kokkos/archive/'+self.gitcommit+'.tar.gz'] diff --git a/config/BuildSystem/config/packages/metis.py b/config/BuildSystem/config/packages/metis.py index ff5aa06909f..467a2627202 100644 --- a/config/BuildSystem/config/packages/metis.py +++ b/config/BuildSystem/config/packages/metis.py @@ -4,7 +4,7 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) self.versionname = 'METIS_VER_MAJOR.METIS_VER_MINOR.METIS_VER_SUBMINOR' - self.gitcommit = 'v5.1.0-p11' + self.gitcommit = 'v5.1.0-p12' self.download = ['git://https://bitbucket.org/petsc/pkg-metis.git','https://bitbucket.org/petsc/pkg-metis/get/'+self.gitcommit+'.tar.gz'] self.downloaddirnames = ['petsc-pkg-metis'] self.functions = ['METIS_PartGraphKway'] diff --git a/config/BuildSystem/config/packages/ml.py b/config/BuildSystem/config/packages/ml.py index 25615ba3cc1..05cc4e345cc 100644 --- a/config/BuildSystem/config/packages/ml.py +++ b/config/BuildSystem/config/packages/ml.py @@ -29,10 +29,54 @@ def setupDependencies(self, framework): self.mpi = framework.require('config.packages.MPI',self) self.blasLapack = framework.require('config.packages.BlasLapack',self) self.mathlib = framework.require('config.packages.mathlib',self) + self.metis = framework.require('config.packages.metis',self) self.deps = [self.mpi,self.blasLapack,self.cxxlibs,self.mathlib] + self.odeps = [self.metis] return + # older versions of Trilinos require passing rpath with the various library paths + # this caused problems on Apple with CMake generating command lines that are too long + # Trilinos was fixed to handle the rpath internally using CMake + def toStringNoDupes(self,string): + string = self.libraries.toStringNoDupes(string) + if self.requiresrpath: return string + newstring = '' + for i in string.split(' '): + if i.find('-rpath') == -1: + newstring = newstring+' '+i + return newstring.strip() + + def toString(self,string): + string = self.libraries.toString(string) + if self.requiresrpath: return string + newstring = '' + for i in string.split(' '): + if i.find('-rpath') == -1: + newstring = newstring+' '+i + return newstring.strip() + def formCMakeConfigureArgs(self): + if '++' in self.externalPackagesDir: + raise RuntimeError('Cannot build ml in a folder containing "++"') + self.requiresrpath = 1 + # Get trilinos version + # if version is 120900 (Dev) or higher than don't require rpaths + trequires = 0 + fd = open(os.path.join(self.packageDir,'Version.cmake')) + bf = fd.readline() + while bf: + if bf.startswith('SET(Trilinos_MAJOR_MINOR_VERSION'): + bf = bf[34:39] + bf = int(bf) + if bf > 120900: + self.requiresrpath = 0 + if bf == 120900: + trequires = 1 + if trequires: + if bf.find('(Dev)') > -1: + self.requiresrpath = 0 + bf = fd.readline() + fd.close() args = config.package.CMakePackage.formCMakeConfigureArgs(self) args.append('-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES=OFF') args.append('-DTrilinos_ENABLE_ALL_PACKAGES=OFF') @@ -41,6 +85,10 @@ def formCMakeConfigureArgs(self): args.append('-DTPL_LAPACK_LIBRARIES="'+self.libraries.toString(self.blasLapack.dlib)+'"') args.append('-DBUILD_SHARED_LIBS=ON') args.append('-DTPL_ENABLE_MPI=ON') + if self.metis.found: + args.append('-DTPL_ENABLE_METIS=ON') + args.append('-DTPL_METIS_LIBRARIES="'+self.toStringNoDupes(self.metis.lib)+'"') + args.append('-DTPL_METIS_INCLUDE_DIRS="'+self.headers.toStringNoDupes(self.metis.include)[2:]+'"') if not hasattr(self.compilers, 'FC'): args.append('-DTrilinos_ENABLE_Fortran=OFF') diff --git a/config/BuildSystem/config/packages/mmg.py b/config/BuildSystem/config/packages/mmg.py index 51c2e58fcd1..bd1ed34d406 100644 --- a/config/BuildSystem/config/packages/mmg.py +++ b/config/BuildSystem/config/packages/mmg.py @@ -4,7 +4,7 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) - self.gitcommit = 'a9e4fd6a1b028d5fd7a7f0ab7eef15c5fde5a4a3' # develop feb-13-2024 + self.gitcommit = 'e89f046ce10e969b5d46af9eb058ec6dc43af2d8' # develop apr-25-2024 self.download = ['git://https://github.com/MmgTools/mmg.git','https://github.com/MmgTools/mmg/archive/'+self.gitcommit+'.tar.gz'] self.versionname = 'MMG_VERSION_RELEASE' self.includes = ['mmg/libmmg.h'] diff --git a/config/BuildSystem/config/packages/mpi4py.py b/config/BuildSystem/config/packages/mpi4py.py index 78e7735421c..5884fc05ea6 100644 --- a/config/BuildSystem/config/packages/mpi4py.py +++ b/config/BuildSystem/config/packages/mpi4py.py @@ -5,7 +5,7 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.download = ['https://github.com/mpi4py/mpi4py/releases/download/3.1.5/mpi4py-3.1.5.tar.gz'] + self.download = ['https://github.com/mpi4py/mpi4py/releases/download/3.1.6/mpi4py-3.1.6.tar.gz'] self.functions = [] self.includes = [] self.useddirectly = 0 diff --git a/config/BuildSystem/config/packages/openblas.py b/config/BuildSystem/config/packages/openblas.py index 0013b13d2a6..60e4d68d6e9 100644 --- a/config/BuildSystem/config/packages/openblas.py +++ b/config/BuildSystem/config/packages/openblas.py @@ -11,8 +11,7 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.version = '0.3.21' - self.gitcommit = 'v'+self.version + self.gitcommit = '9af2a9dc3b506f696137c2fe0b28d3d6c218b0ac' # develop Mar-25-2024 (0.3.26+) self.versionname = 'OPENBLAS_VERSION' self.download = ['git://https://github.com/xianyi/OpenBLAS.git','https://github.com/xianyi/OpenBLAS/archive/'+self.gitcommit+'.tar.gz'] self.versioninclude = 'openblas_config.h' @@ -81,6 +80,8 @@ def Install(self): cmdline+=" "+self.argDB['download-openblas-make-options'] if not self.argDB['with-shared-libraries']: cmdline += " NO_SHARED=1 " + else: + cmdline += " NO_STATIC=1 " cmdline += " MAKE_NB_JOBS="+str(self.make.make_np)+" " usespthreads = False if 'download-openblas-use-pthreads' in self.argDB and self.argDB['download-openblas-use-pthreads']: @@ -100,7 +101,7 @@ def Install(self): else: cmdline += " USE_THREAD=0 " cmdline += " NO_EXPRECISION=1 " - cmdline += " libs netlib re_lapack shared " + cmdline += " shared " self.include = [os.path.join(self.installDir,'include')] libdir = self.libDir @@ -113,13 +114,13 @@ def Install(self): try: self.logPrintBox('Compiling OpenBLAS; this may take several minutes') - output1,err1,ret = config.package.Package.executeShellCommand('cd '+blasDir+' && make '+cmdline, timeout=2500, log = self.log) + output1,err1,ret = config.package.Package.executeShellCommand('cd '+blasDir+' && '+self.make.make+' '+cmdline, timeout=2500, log = self.log) except RuntimeError as e: self.logPrint('Error running make on '+blasDir+': '+str(e)) raise RuntimeError('Error running make on '+blasDir) try: self.logPrintBox('Installing OpenBLAS') - output2,err2,ret = config.package.Package.executeShellCommand('cd '+blasDir+' && make PREFIX='+self.installDir+' '+cmdline+' install', timeout=60, log = self.log) + output2,err2,ret = config.package.Package.executeShellCommand('cd '+blasDir+' && '+self.make.make+' PREFIX='+self.installDir+' '+cmdline+' install', timeout=60, log = self.log) except RuntimeError as e: self.logPrint('Error moving '+blasDir+' libraries: '+str(e)) raise RuntimeError('Error moving '+blasDir+' libraries') diff --git a/config/BuildSystem/config/packages/p4est.py b/config/BuildSystem/config/packages/p4est.py index 1099aa44c25..10392abb22d 100644 --- a/config/BuildSystem/config/packages/p4est.py +++ b/config/BuildSystem/config/packages/p4est.py @@ -4,7 +4,8 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.gitcommit = 'aafb87d93e33dffe24d67533d56bc2f0cdb72605' # master oct-26-2023 + self.version = '2.8.6' + self.gitcommit = 'v'+self.version self.download = ['git://https://github.com/cburstedde/p4est','https://github.com/cburstedde/p4est/archive/'+self.gitcommit+'.tar.gz'] self.versionname = 'P4EST_VERSION_MAJOR.P4EST_VERSION_MINOR.P4EST_VERSION_POINT' self.versioninclude = 'p4est_config.h' diff --git a/config/BuildSystem/config/packages/parmmg.py b/config/BuildSystem/config/packages/parmmg.py index 0591f96bf85..16b1cdea0d4 100644 --- a/config/BuildSystem/config/packages/parmmg.py +++ b/config/BuildSystem/config/packages/parmmg.py @@ -4,7 +4,7 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) - self.gitcommit = '11fec662f8b494eb41f5d13f1aaa12b6311d9b25' # develop feb-26-2024 + self.gitcommit = 'f8a5338ea1bb2c778bfb4559c2c3974ba15b4730' # develop apr-22-2024 self.download = ['git://https://github.com/MmgTools/ParMmg.git','https://github.com/MmgTools/ParMmg/archive/'+self.gitcommit+'.tar.gz'] self.versionname = 'PMMG_VERSION_RELEASE' self.includes = ['parmmg/libparmmg.h'] diff --git a/config/BuildSystem/config/packages/scalapack.py b/config/BuildSystem/config/packages/scalapack.py index 63f5cc8b274..a4bf7ec9a52 100644 --- a/config/BuildSystem/config/packages/scalapack.py +++ b/config/BuildSystem/config/packages/scalapack.py @@ -20,6 +20,7 @@ def __init__(self, framework): self.makerulename = 'scalapack' self.minCmakeVersion = (3,9,0) self.libDirs = ['lib',os.path.join('lib','intel64')] + self.requirekandr = 1 return def setupDependencies(self, framework): diff --git a/config/BuildSystem/config/packages/slate.py b/config/BuildSystem/config/packages/slate.py index 2de2d3b5f4d..336bf7fef8b 100644 --- a/config/BuildSystem/config/packages/slate.py +++ b/config/BuildSystem/config/packages/slate.py @@ -39,7 +39,7 @@ def formCMakeConfigureArgs(self): if self.cuda.found: args.append('-Dgpu_backend=cuda') - args.append('-DCMAKE_CUDA_ARCHITECTURES:STRING="{}"'.format(self.cuda.cmakeArch())) + args.extend(self.cuda.getCmakeCUDAArchFlag()) elif self.hip.found: args.append('-Dgpu_backend=hip') args.append('-DCMAKE_HIP_ARCHITECTURES="'+self.hip.hipArch+'"') # cmake supports format like "gfx801;gfx900" diff --git a/config/BuildSystem/config/packages/slepc.py b/config/BuildSystem/config/packages/slepc.py index 2a32bbdd2c2..668634e7855 100644 --- a/config/BuildSystem/config/packages/slepc.py +++ b/config/BuildSystem/config/packages/slepc.py @@ -3,7 +3,8 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.gitcommit = '7a35a7b972895e94e214c2664698eba8c3004b4b' # jose/threadsafe-petscoptionsgetviewer Nov 23, 2023 + self.gitcommit = 'f411c6140a224d8aa23c687eec932852ff777229' # jose/minor-fortran-stub-cleanup, jun 11, 2024 to main + #self.gitcommit = 'v'+self.version self.download = ['git://https://gitlab.com/slepc/slepc.git','https://gitlab.com/slepc/slepc/-/archive/'+self.gitcommit+'/slepc-'+self.gitcommit+'.tar.gz'] self.functions = [] self.includes = [] diff --git a/config/BuildSystem/config/packages/sowing.py b/config/BuildSystem/config/packages/sowing.py index 9667e153abd..3334ed71568 100644 --- a/config/BuildSystem/config/packages/sowing.py +++ b/config/BuildSystem/config/packages/sowing.py @@ -8,8 +8,8 @@ def noCheck(command, status, output, error): class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.minversion = '1.1.26.8' - self.gitcommit = 'v1.1.26-p8' + self.minversion = '1.1.26.12' + self.gitcommit = 'v1.1.26.12' self.download = ['git://https://bitbucket.org/petsc/pkg-sowing.git','https://bitbucket.org/petsc/pkg-sowing/get/'+self.gitcommit+'.tar.gz'] self.downloaddirnames = ['petsc-pkg-sowing'] self.downloadonWindows = 1 @@ -54,10 +54,11 @@ def alternateConfigureLibrary(self): def checkBfortVersion(self): '''Check if the bfort version is recent enough''' + self.logPrint("Checking bfort version\n") try: import re (output, error, status) = config.base.Configure.executeShellCommand(self.bfort+' -version', checkCommand=noCheck, log = self.log) - ver = re.compile(r'bfort \(sowing\) release ([0-9]+).([0-9]+).([0-9]+)').match(output) + ver = re.compile(r'bfort \(sowing\) release ([0-9]+).([0-9]+).([0-9]+).([0-9]+)').match(output) foundversion = tuple(map(int,ver.groups())) self.foundversion = ".".join(map(str,foundversion)) except (RuntimeError,AttributeError) as e: @@ -154,7 +155,7 @@ def buildFortranStubs(self): arch = '' else: arch = self.arch - generatefortranstubs.main(self.petscdir.dir, arch,self.bfort, os.path.join(self.petscdir.dir,'src'),0) + generatefortranstubs.main(self.petscdir.dir, arch,self.bfort, self.petscdir.dir,0) if self.fortran.fortranIsF90: generatefortranstubs.processf90interfaces(self.petscdir.dir,arch,0) self.framework.actions.addArgument('PETSc', 'File creation', 'Generated Fortran stubs') diff --git a/config/BuildSystem/config/packages/spai.py b/config/BuildSystem/config/packages/spai.py index 708341a35b9..91ff11d0fe7 100644 --- a/config/BuildSystem/config/packages/spai.py +++ b/config/BuildSystem/config/packages/spai.py @@ -12,6 +12,7 @@ def __init__(self, framework): self.requires32bitint = 1 self.complex = 0 self.hastests = 1 + self.requirekandr = 1 return def setupDependencies(self, framework): diff --git a/config/BuildSystem/config/packages/sprng.py b/config/BuildSystem/config/packages/sprng.py index ab94ffa5663..4a780ab9cba 100644 --- a/config/BuildSystem/config/packages/sprng.py +++ b/config/BuildSystem/config/packages/sprng.py @@ -3,10 +3,11 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.download = ['https://web.cels.anl.gov/projects/petsc/download/externalpackages/sprng-1.0.tar.gz'] - self.functions = ['make_new_seed_mpi'] - self.includes = ['sprng.h'] - self.liblist = [['liblcg.a']] + self.download = ['https://web.cels.anl.gov/projects/petsc/download/externalpackages/sprng-1.0.tar.gz'] + self.functions = ['make_new_seed_mpi'] + self.includes = ['sprng.h'] + self.liblist = [['liblcg.a']] + self.requirekandr = 1 return def setupDependencies(self, framework): diff --git a/config/BuildSystem/config/packages/strumpack.py b/config/BuildSystem/config/packages/strumpack.py index 9b28235fd34..7ec5fa2bca9 100644 --- a/config/BuildSystem/config/packages/strumpack.py +++ b/config/BuildSystem/config/packages/strumpack.py @@ -90,7 +90,7 @@ def formCMakeConfigureArgs(self): # https://portal.nersc.gov/project/sparse/strumpack/master/GPU_Support.html if self.cuda.found: args.append('-DSTRUMPACK_USE_CUDA=ON') - args.append('-DCMAKE_CUDA_ARCHITECTURES:STRING="{}"'.format(self.cuda.cmakeArch())) + args.extend(self.cuda.getCmakeCUDAArchFlag()) elif self.hip.found: args.append('-DSTRUMPACK_USE_HIP=ON') # Not using -DHIP_HIPCC_FLAGS=--amdgpu-target=gfx906 as mentioned in the doc, because we prefer standardized cmake options diff --git a/config/BuildSystem/config/packages/zfp.py b/config/BuildSystem/config/packages/zfp.py index 63b21a64d92..f9d0bc97351 100644 --- a/config/BuildSystem/config/packages/zfp.py +++ b/config/BuildSystem/config/packages/zfp.py @@ -39,7 +39,7 @@ def formCMakeConfigureArgs(self): # if self.cuda.found: # args.append('-DZFP_WITH_CUDA=ON') - # args.append('-DCMAKE_CUDA_ARCHITECTURES:STRING="{}"'.format(self.cuda.cmakeArch())) + # args.extend(self.cuda.getCmakeCUDAArchFlag()) # else: # args.append('-DZFP_WITH_CUDA=OFF') diff --git a/config/BuildSystem/config/setCompilers.py b/config/BuildSystem/config/setCompilers.py index 58c9c43d4bb..bcf0181ae2a 100644 --- a/config/BuildSystem/config/setCompilers.py +++ b/config/BuildSystem/config/setCompilers.py @@ -262,7 +262,7 @@ def isClang(compiler, log): (output, error, status) = config.base.Configure.executeShellCommand(compiler+' --help | head -n 500', log = log, logOutputflg = False) output = output + error found = (any([s in output for s in ['Emit Clang AST']]) - and not any([s in output for s in ['Intel(R)','Win32 Development Tool Front End']])) + and not any([s in output for s in ['Win32 Development Tool Front End']])) if found: if log: log.write('Detected CLANG compiler\n') return 1 @@ -281,6 +281,19 @@ def isHIP(compiler, log): except RuntimeError: pass + @staticmethod + def isOneAPI(compiler, log): + '''Returns true if the compiler is an Intel oneAPI compiler''' + try: + (output, error, status) = config.base.Configure.executeShellCommand(compiler+' --version', log = log) + output = output + error + found = any([s in output for s in ['Intel(R) oneAPI']]) + if found: + if log: log.write('Detected Intel oneAPI compiler\n') + return 1 + except RuntimeError: + pass + @staticmethod def isSYCL(compiler, log): '''Returns true if the compiler is a SYCL compiler''' @@ -2132,6 +2145,15 @@ def checkPIC(self): self.popLanguage() return + def checkKandRFlags(self): + '''Check C compiler flags that allow compiling K and R code (needed for some external packages)''' + self.KandRFlags = [] + with self.Language('C'): + if config.setCompilers.Configure.isGNU(self.getCompiler(), self.log) or config.setCompilers.Configure.isClang(self.getCompiler(), self.log): + for f in ['-Wno-implicit-int', '-Wno-int-conversion', '-Wno-implicit-function-declaration', '-Wno-deprecated-non-prototype', '-fno-common']: + if self.checkCompilerFlag(f, compilerOnly = 1): + self.KandRFlags.append(f) + def checkLargeFileIO(self): '''check for large file support with 64-bit offset''' if not self.argDB['with-large-file-io']: @@ -2851,6 +2873,7 @@ def configure(self): if Configure.isCygwin(self.log): self.executeTest(self.checkLinkerWindows) self.executeTest(self.checkPIC) + self.executeTest(self.checkKandRFlags) self.executeTest(self.checkSharedLinkerPaths) self.executeTest(self.checkLibC) self.executeTest(self.checkDynamicLinker) diff --git a/config/BuildSystem/config/utilities/fortranCommandLine.py b/config/BuildSystem/config/utilities/fortranCommandLine.py index c19092c43fb..3ac311d560a 100755 --- a/config/BuildSystem/config/utilities/fortranCommandLine.py +++ b/config/BuildSystem/config/utilities/fortranCommandLine.py @@ -6,6 +6,7 @@ def __init__(self, framework): config.base.Configure.__init__(self, framework) self.headerPrefix = '' self.substPrefix = '' + self.have_command_argument = False return def __str__(self): @@ -30,72 +31,12 @@ def configureFortranCommandLine(self): self.libraries.pushLanguage('FC') self.libraries.saveLog() - if self.libraries.check('','', call = ' integer i\n character*(80) arg\n i = command_argument_count()\n call get_command_argument(i,arg)'): + if self.libraries.check('','', call = ' integer i\n character(len=80) arg\n i = command_argument_count()\n call get_command_argument(i,arg)'): self.logWrite(self.libraries.restoreLog()) self.libraries.popLanguage() - self.addDefine('HAVE_FORTRAN_GET_COMMAND_ARGUMENT',1) - return - - # These are for when the routines are called from C - # We should unify the naming conventions of these. - self.pushLanguage('C') - self.libraries.saveLog() - self.functions.saveLog() - if self.functions.check('ipxfargc_', libraries = self.compilers.flibs): - self.logWrite(self.functions.restoreLog()) - self.logWrite(self.libraries.restoreLog()) - self.popLanguage() - self.addDefine('HAVE_PXFGETARG_NEW',1) - return - - self.pushLanguage('C') - self.libraries.saveLog() - self.functions.saveLog() - if self.functions.check('f90_unix_MP_iargc', libraries = self.compilers.flibs): - self.logWrite(self.functions.restoreLog()) - self.logWrite(self.libraries.restoreLog()) - self.popLanguage() - self.addDefine('HAVE_NAGF90',1) - return - - self.pushLanguage('C') - self.libraries.saveLog() - self.functions.saveLog() - if self.functions.check('PXFGETARG', libraries = self.compilers.flibs): - self.logWrite(self.functions.restoreLog()) - self.logWrite(self.libraries.restoreLog()) - self.popLanguage() - self.addDefine('HAVE_PXFGETARG',1) - return - - self.pushLanguage('C') - self.libraries.saveLog() - self.functions.saveLog() - if self.functions.check('iargc_', libraries = self.compilers.flibs): - self.logWrite(self.functions.restoreLog()) - self.logWrite(self.libraries.restoreLog()) - self.popLanguage() - self.addDefine('HAVE_BGL_IARGC',1) - return - - self.pushLanguage('C') - self.libraries.saveLog() - self.functions.saveLog() - if self.functions.check('GETARG@16', libraries = self.compilers.flibs): - self.logWrite(self.functions.restoreLog()) - self.logWrite(self.libraries.restoreLog()) - self.popLanguage() - self.addDefine('USE_NARGS',1) - self.addDefine('HAVE_IARG_COUNT_PROGNAME',1) - return - - self.pushLanguage('C') - self.libraries.saveLog() - self.functions.saveLog() - self.functions.check('_gfortran_iargc', libraries = self.compilers.flibs) - self.logWrite(self.functions.restoreLog()) - self.logWrite(self.libraries.restoreLog()) - self.popLanguage() + self.have_command_argument = True + else: + self.logPrint("Missing GET_COMMAND_ARGUMENT() support in Fortran!") return def configure(self): diff --git a/config/PETSc/Configure.py b/config/PETSc/Configure.py index 80ccc27c8b3..1aea0b0244c 100644 --- a/config/PETSc/Configure.py +++ b/config/PETSc/Configure.py @@ -93,6 +93,7 @@ def setupDependencies(self, framework): self.blasLapack = framework.require('config.packages.BlasLapack',self) self.mpi = framework.require('config.packages.MPI', self) self.fortran = framework.require('config.compilersFortran', self) + self.ftncmdline = framework.require('config.utilities.fortranCommandLine',self) self.externalpackagesdir = framework.require('PETSc.options.externalpackagesdir',self) for utility in sorted(os.listdir(os.path.join('config','PETSc','options'))): @@ -329,6 +330,8 @@ def Dump(self): if not self.fortran.fortranIsF90: raise RuntimeError('Error! Fortran compiler "'+self.compilers.FC+'" does not support F90! PETSc fortran bindings require a F90 compiler') self.addDefine('USE_FORTRAN_BINDINGS','1') + if not self.ftncmdline.have_command_argument: + raise RuntimeError('Error! Fortran compiler "'+self.compilers.FC+'" does not support F2003 GET_COMMAND_ARGUMENT()!') self.setCompilers.pushLanguage('FC') # need FPPFLAGS in config/setCompilers self.addMakeMacro('FPP_FLAGS',self.setCompilers.FPPFLAGS) diff --git a/config/PETSc/options/scalarTypes.py b/config/PETSc/options/scalarTypes.py index 02dae7b4c59..289235d3b28 100755 --- a/config/PETSc/options/scalarTypes.py +++ b/config/PETSc/options/scalarTypes.py @@ -80,6 +80,11 @@ def configureScalarType(self): self.popLanguage() return + def checkNoFiniteMathOnly(self): + '''Check if attribute for ignoring finite-math-only optimization is valid, for isnan() and isinf()''' + if self.checkCompile('','__attribute__((optimize ("no-finite-math-only"))) int foo(void);'): + self.addDefine('HAVE_NO_FINITE_MATH_ONLY',1) + def configurePrecision(self): '''Set the default real number precision for PETSc objects''' self.log.write('Checking C compiler works with __float128\n') @@ -157,4 +162,5 @@ def configurePrecision(self): def configure(self): self.executeTest(self.configureScalarType) self.executeTest(self.configurePrecision) + self.executeTest(self.checkNoFiniteMathOnly) return diff --git a/config/examples/arch-alcf-polaris.py b/config/examples/arch-alcf-polaris.py index fe4742febb3..eff945a9e95 100755 --- a/config/examples/arch-alcf-polaris.py +++ b/config/examples/arch-alcf-polaris.py @@ -2,18 +2,18 @@ # Use GNU compilers: # -# module load cudatoolkit-standalone PrgEnv-gnu cray-libsci -# # Note cray-libsci provides BLAS etc. In summary, we have -# -# module load cudatoolkit-standalone/11.8.0 PrgEnv-gnu gcc/10.3.0 cray-libsci +# module use /soft/modulefiles +# module unload darshan +# module load cudatoolkit-standalone/12.4.1 PrgEnv-gnu cray-libsci # # $ module list # Currently Loaded Modules: -# 1) craype-x86-rome 5) craype-accel-nvidia80 9) cray-dsmml/0.2.2 13) PrgEnv-gnu/8.3.3 -# 2) libfabric/1.15.2.0 6) cmake/3.23.2 10) cray-pmi/6.1.10 14) cray-libsci/23.02.1.1 -# 3) craype-network-ofi 7) cudatoolkit-standalone/11.8.0 11) cray-pals/1.2.11 15) gcc/10.3.0 -# 4) perftools-base/23.03.0 8) craype/2.7.20 12) cray-libpals/1.2.11 16) cray-mpich/8.1.25 +# 1) libfabric/1.15.2.0 6) nghttp2/1.57.0-ciat5hu 11) cray-dsmml/0.2.2 16) craype-x86-milan +# 2) craype-network-ofi 7) curl/8.4.0-2ztev25 12) cray-mpich/8.1.28 17) PrgEnv-gnu/8.5.0 +# 3) perftools-base/23.12.0 8) cmake/3.27.7 13) cray-pmi/6.1.13 18) cray-libsci/23.12.5 +# 4) gcc-native/12.3 9) cudatoolkit-standalone/12.4.1 14) cray-pals/1.3.4 +# 5) spack-pe-base/0.6.1 10) craype/2.7.30 15) cray-libpals/1.3.4 if __name__ == '__main__': import sys @@ -30,6 +30,7 @@ '--with-cuda-arch=80', # Since there is no easy way to auto-detect the cuda arch on the gpu-less Polaris login nodes, we explicitly set it. '--download-kokkos', '--download-kokkos-kernels', + '--download-hypre', ] configure.petsc_configure(configure_options) diff --git a/config/examples/arch-ci-freebsd-c-single-opt.py b/config/examples/arch-ci-freebsd-c-single-opt.py index 2a685044515..c2900916ce1 100755 --- a/config/examples/arch-ci-freebsd-c-single-opt.py +++ b/config/examples/arch-ci-freebsd-c-single-opt.py @@ -13,6 +13,7 @@ '--download-superlu_dist', '--download-metis', '--download-parmetis', + '--download-hypre', '--download-cmake', # needed by metis/parmetis '--with-strict-petscerrorcode', ] diff --git a/config/examples/arch-ci-freebsd-cxx-pkgs-opt.py b/config/examples/arch-ci-freebsd-cxx-pkgs-opt.py index e80c03884af..b906e5f4282 100755 --- a/config/examples/arch-ci-freebsd-cxx-pkgs-opt.py +++ b/config/examples/arch-ci-freebsd-cxx-pkgs-opt.py @@ -30,7 +30,7 @@ '--download-chaco=1', '--download-spai=1', '--download-netcdf=1', - '--download-moab=1', + #'--download-moab=1', # moab-5.5: OS freebsd12.1 is not supported # if needed, use https://web.cels.anl.gov/projects/petsc/download/externalpackages/moab-5.0.0.tar.gz '--download-saws', '--download-ks', '--download-codipack=1', diff --git a/config/examples/arch-ci-linux-cuda112-omp.py b/config/examples/arch-ci-linux-cuda112-omp.py index 722207e981e..c72a99a3a5d 100755 --- a/config/examples/arch-ci-linux-cuda112-omp.py +++ b/config/examples/arch-ci-linux-cuda112-omp.py @@ -22,6 +22,7 @@ '--download-hypre', '--download-hypre-configure-arguments=--enable-unified-memory', '--with-strict-petscerrorcode', + '--download-mpich=1', #'--with-coverage', ] diff --git a/config/examples/arch-ci-linux-intel-mkl-single.py b/config/examples/arch-ci-linux-intel-mkl-single.py index 25aa20dad51..48941006cda 100755 --- a/config/examples/arch-ci-linux-intel-mkl-single.py +++ b/config/examples/arch-ci-linux-intel-mkl-single.py @@ -26,6 +26,7 @@ '--download-superlu_dist', '--download-metis', '--download-parmetis', + '--download-hypre', '--with-strict-petscerrorcode', ] configure.petsc_configure(configure_options) diff --git a/config/examples/arch-ci-linux-opt-arm.py b/config/examples/arch-ci-linux-opt-arm.py index df52db8a854..2ab32a67e43 100755 --- a/config/examples/arch-ci-linux-opt-arm.py +++ b/config/examples/arch-ci-linux-opt-arm.py @@ -13,7 +13,7 @@ '--download-mpich-device=ch3:sock', '--download-mpich-configure-arguments=--enable-error-messages=all --enable-g', # note --enable-g=memit - used by --with-debugging=1 does not help '--download-openblas=1', - '--download-openblas-make-options=TARGET=GENERIC', + #'--download-openblas-make-options=TARGET=GENERIC', '--download-hypre=1', '--download-cmake=1', '--download-metis=1', diff --git a/config/examples/arch-ci-linux-pkgs-dbg.py b/config/examples/arch-ci-linux-pkgs-dbg.py index df951d52f48..afe28771724 100755 --- a/config/examples/arch-ci-linux-pkgs-dbg.py +++ b/config/examples/arch-ci-linux-pkgs-dbg.py @@ -6,55 +6,58 @@ configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, '--with-coverage', - #'--download-mpich=1', use system MPI as elemental fails with this - '--download-fblaslapack=1', - '--download-hypre=1', - '--download-cmake=1', - '--download-metis=1', - '--download-parmetis=1', - '--download-ptscotch=1', - '--download-suitesparse=1', - '--download-triangle=1', - '--download-superlu=1', - '--download-superlu_dist=1', - '--download-scalapack=1', - '--download-mumps=1', - # '--download-elemental=1', # disabled since its maxCxxVersion is c++14, but Kokkos-4.0's minCxxVersion is c++17 - '--download-spai=1', - # '--download-moab=1', # disabled since its maxCxxVersion is c++14, but Kokkos-4.0's minCxxVersion is c++17 - '--download-parms=1', - '--download-chaco=1', - '--download-fftw=1', - '--download-pastix=1', - '--download-hwloc=1', + #'--download-mpich', use system MPI as elemental fails with this + '--download-fblaslapack', + '--download-hypre', + '--download-cmake', + '--download-metis', + '--download-parmetis', + '--download-ptscotch', + '--download-suitesparse', + '--download-triangle', + '--download-superlu', + '--download-superlu_dist', + '--download-scalapack', + '--download-mumps', + # '--download-elemental', # disabled since its maxCxxVersion is c++14, but Kokkos-4.0's minCxxVersion is c++17 + '--download-spai', + '--download-moab', + '--download-parms', + '--download-chaco', + '--download-fftw', + '--download-pastix', + '--download-hwloc', '--download-ctetgen', '--download-netcdf', '--download-hdf5', - '--with-zlib=1', + '--with-zlib', '--download-exodusii', '--download-pnetcdf', '--download-party', '--download-yaml', '--download-ml', '--download-sundials2', - '--download-p4est=1', + '--download-p4est', '--download-eigen', '--download-pragmatic', - '--download-mmg=1', - '--download-parmmg=1', - '--download-hpddm=1', - '--download-bamg=1', - '--download-htool=1', - '--download-mfem=1', - '--download-glvis=1', - '--with-opengl=1', - '--download-revolve=1', - '--download-cams=1', + '--download-mmg', + '--download-parmmg', + '--download-hpddm', + '--download-bamg', + '--download-htool', + '--download-mfem', + '--download-glvis', + '--with-opengl', + '--download-revolve', + '--download-cams', '--download-slepc', '--download-kokkos', '--download-kokkos-kernels', '--with-dmlandau-3d', '--with-strict-petscerrorcode', + '--download-mpi4py', + '--with-petsc4py', + '--with-debugging', ] if __name__ == '__main__': diff --git a/config/examples/arch-ci-osx-cxx-pkgs-opt-arm.py b/config/examples/arch-ci-osx-cxx-pkgs-opt-arm.py index 925bdda4b2f..f8b29510a4e 100755 --- a/config/examples/arch-ci-osx-cxx-pkgs-opt-arm.py +++ b/config/examples/arch-ci-osx-cxx-pkgs-opt-arm.py @@ -39,7 +39,7 @@ '--download-suitesparse=1', '--download-chaco=1', '--download-spai=1', - # '--download-moab=1', # disabled since its maxCxxVersion is c++14, but Kokkos-4.0's minCxxVersion is c++17 + '--download-moab=1', #'--download-saws', #needs /usr/bin/python [missing in newer MacOS] '--download-revolve=1', '--download-cams=1', diff --git a/config/examples/arch-mswin-icx-ifort.py b/config/examples/arch-mswin-icx-ifort.py index d566a460ec0..4a78686235f 100755 --- a/config/examples/arch-mswin-icx-ifort.py +++ b/config/examples/arch-mswin-icx-ifort.py @@ -8,7 +8,8 @@ '--with-blaslapack-lib=-L/cygdrive/c/PROGRA~2/Intel/oneAPI/mkl/latest/lib mkl_intel_lp64_dll.lib mkl_sequential_dll.lib mkl_core_dll.lib', '--with-cc=win32fe icl --use icx', '--with-cxx=win32fe icl --use icx', - '--with-fc=win32fe ifort', + '--with-fc=win32fe ifort -Qdiag-disable:10448', + 'FPPFLAGS=-I/cygdrive/c/PROGRA~2/Intel/oneAPI/mpi/latest/include/mpi', '--with-mpi-include=/cygdrive/c/PROGRA~2/Intel/oneAPI/mpi/latest/include', '--with-mpi-lib=/cygdrive/c/PROGRA~2/Intel/oneAPI/mpi/latest/lib/impi.lib', '--with-mpiexec=/cygdrive/c/PROGRA~2/Intel/oneAPI/mpi/latest/bin/mpiexec -localonly', diff --git a/config/examples/arch-mswin-icx.py b/config/examples/arch-mswin-icx.py index 65e41fd7365..b0a2ef83c13 100755 --- a/config/examples/arch-mswin-icx.py +++ b/config/examples/arch-mswin-icx.py @@ -10,6 +10,7 @@ '--with-cxx=icx', '--with-fc=ifx', '--with-shared-libraries=0', + 'FPPFLAGS=-I/cygdrive/c/PROGRA~2/Intel/oneAPI/mpi/latest/include/mpi', '--with-mpi-include=/cygdrive/c/PROGRA~2/Intel/oneAPI/mpi/latest/include', '--with-mpi-lib=/cygdrive/c/PROGRA~2/Intel/oneAPI/mpi/latest/lib/impi.lib', '--with-mpiexec=/cygdrive/c/PROGRA~2/Intel/oneAPI/mpi/latest//bin/mpiexec -localonly', diff --git a/config/examples/arch-nersc-perlmutter-opt.py b/config/examples/arch-nersc-perlmutter-opt.py index 00109fc6ebb..5cc5a5fcb6b 100755 --- a/config/examples/arch-nersc-perlmutter-opt.py +++ b/config/examples/arch-nersc-perlmutter-opt.py @@ -2,7 +2,7 @@ # Example configure script for Perlmutter, the HPE Cray EX system at NERSC/LBNL equipped with # AMD EPYC CPUS and NVIDIA A100 GPUS. Here we target the GPU compute nodes and builds with -# support for the CUDA/cuSPARSE, Kokkos, and ViennaCL back-ends. +# support for the CUDA/cuSPARSE, Kokkos, and ViennaCL back-ends. # # Currently, configuring PETSc on the system does not require loading many , if any, non-default modules. # As documented at https://docs.nersc.gov/systems/perlmutter/software/#mpi, typical settings might be @@ -11,6 +11,7 @@ # module load cudatoolkit # module load PrgEnv-gnu # module load craype-accel-nvidia80 +# module load cray-python # # The above are currently present in the default environment. Users may wish to 'module load' a # different programming environment (which will generally force a reload of certain related modules, @@ -23,7 +24,9 @@ import configure configure_options = [ '--with-make-np=8', # Must limit size of parallel build to stay within resource limitations imposed by the center - '--with-mpiexec=srun -G4', # '-G4' requests all four GPUs present on a Perlmutter GPU compute node. + # '-G4' requests all four GPUs present on a Perlmutter GPU compute node. + # --gpu-bind=none to avoid the gpu-aware mpi runtime error: (GTL DEBUG: 0) cuIpcOpenMemHandle: invalid argument, CUDA_ERROR_INVALID_VALUE, line no 360 + '--with-mpiexec=srun -G4 --gpu-bind=none', '--with-batch=0', # Use the Cray compiler wrappers, regardless of the underlying compilers loaded by the programming environment module: @@ -41,24 +44,12 @@ '--CUDAFLAGS= -g -O3', '--with-debugging=0', # Disable debugging for production builds; use '--with-debugging=1' for development work. - # Set sowing-cc and sowing-cxx explicitly, as this prevents errors caused by compiling sowing with GCC when a - # programming environment other than PrgEnv-gnu has been loaded. If there is this compiler mismatch, we will see - # errors like - # - # /opt/nvidia/hpc_sdk/Linux_x86_64/22.5/compilers/include/bits/floatn.h:60:17: error: two or more data types in declaration specifiers - # typedef float _Float32; - # ^~~~~~~~ - '--download-sowing-cc=cc', # Note that sowing is only needed when Fortran bindings are required. - '--download-sowing-cc=CC', - - # Build with support for CUDA/cuSPARSE, Kokkos/Kokkos Kernels, and ViennaCL back-ends: '--with-cuda=1', '--with-cuda-arch=80', '--download-viennacl', '--download-kokkos', '--download-kokkos-kernels', - '--with-kokkos-kernels-tpl=0', # Use native Kokkos kernels, rather than NVIDIA-provided ones. # Download and build a few commonly-used packages: '--download-hypre', diff --git a/config/install.py b/config/install.py index ed019043c6b..b740c612455 100755 --- a/config/install.py +++ b/config/install.py @@ -290,6 +290,143 @@ def fixConf(self): self.fixConfFile(os.path.join(self.destConfDir,file)) return + def fixPythonWheel(self): + import glob + import shutil + # + for pattern in ( + self.destLibDir + '/*.a', + self.destLibDir + '/*.la', + self.destLibDir + '/pkgconfig', # TODO: keep? + self.destConfDir + '/configure-hash', + self.destConfDir + '/uninstall.py', + self.destConfDir + '/reconfigure-*.py', + self.destConfDir + '/pkg.conf.*', + self.destConfDir + '/pkg.git*.*', + self.destConfDir + '/modules', # TODO: keep? + self.destShareDir + '/*/examples/src/*', + self.destShareDir + '/*/datafiles', + ): + for pathname in glob.glob(pattern): + if os.path.isdir(pathname): + shutil.rmtree(pathname) + elif os.path.exists(pathname): + os.remove(pathname) + # + for filename in ( + self.destIncludeDir + '/petscconf.h', + self.destIncludeDir + '/petscconfiginfo.h', + self.destIncludeDir + '/petscmachineinfo.h', + self.destShareDir + '/petsc/examples/gmakefile.test', + self.destConfDir + '/rules', + self.destConfDir + '/rules_doc.mk', + self.destConfDir + '/rules_util.mk', + self.destConfDir + '/petscrules', + self.destConfDir + '/variables', + self.destConfDir + '/petscvariables', + ): + with open(filename, 'r') as oldFile: + contents = oldFile.read() + contents = contents.replace(self.installDir, '${PETSC_DIR}') + contents = contents.replace(self.rootDir, '${PETSC_DIR}') + contents = re.sub( + r'^(PYTHON(_EXE)?) = (.*)$', + r'\1 = python%d' % sys.version_info[0], + contents, flags=re.MULTILINE, + ) + with open(filename, 'w') as newFile: + newFile.write(contents) + # + def lsdir(dirname, *patterns): + return glob.glob(os.path.join(dirname, *patterns)) + def shell(*args): + return self.executeShellCommand(' '.join(args))[0] + libdir = os.path.join(self.installDir, 'lib') + if sys.platform == 'linux': + libraries = [ + lib for lib in lsdir(self.destLibDir, 'lib*.so*') + if not os.path.islink(lib) + ] + for shlib in libraries: + # fix shared library rpath + rpath = shell('patchelf', '--print-rpath', shlib) + rpath = rpath.split(os.path.pathsep) + if libdir in rpath: + rpath.insert(0, '$ORIGIN') + while libdir in rpath: + rpath.remove(libdir) + if rpath: + rpath = os.path.pathsep.join(rpath) + shell('patchelf', '--set-rpath', "'%s'" % rpath, shlib) + # fix shared library file and symlink + basename = os.path.basename(shlib) + libname, ext, _ = basename.partition('.so') + liblink = libname + ext + soname = shell('patchelf', '--print-soname', shlib) + for symlink in lsdir(self.destLibDir, liblink + '*'): + if os.path.islink(symlink): + os.unlink(symlink) + curdir = os.getcwd() + try: + os.chdir(os.path.dirname(shlib)) + if soname != basename: + os.rename(basename, soname) + if soname != liblink: + os.symlink(soname, liblink) + finally: + os.chdir(curdir) + if sys.platform == 'darwin': + def otool(cmd, dylib): + pattern = r''' + ^\s+ cmd \s %s$\n + ^\s+ cmdsize \s \d+$\n + ^\s+ (?:name|path) \s (.*) \s \(offset \s \d+\)$ + ''' % cmd + return re.findall( + pattern, shell('otool', '-l', dylib), + flags=re.VERBOSE | re.MULTILINE, + ) + libraries = [ + lib for lib in lsdir(self.destLibDir, 'lib*.dylib') + if not os.path.islink(lib) + ] + for dylib in libraries: + install_name = otool('LC_ID_DYLIB', dylib)[0] + dependencies = otool('LC_LOAD_DYLIB', dylib) + runtime_path = otool('LC_RPATH', dylib) + # fix shared library install name and rpath + install_name = '@rpath/' + os.path.basename(install_name) + shell('install_name_tool', '-id', install_name, dylib) + if libdir in runtime_path: + shell('install_name_tool', '-delete_rpath', libdir, dylib) + for rpath in ('@loader_path',): + if rpath not in runtime_path: + shell('install_name_tool', '-add_rpath', rpath, dylib) + for dep in dependencies: + if os.path.dirname(dep) in (libdir,): + newid = '@rpath/' + os.path.basename(dep) + shell('install_name_tool', '-change', dep, newid, dylib) + # fix shared library file and symlink + basename = os.path.basename(dylib) + libname, ext = os.path.splitext(basename) + libname = libname.partition('.')[0] + liblink = libname + ext + dyname = os.path.basename(install_name) + for symlink in lsdir(self.destLibDir, libname + '*' + ext): + if os.path.islink(symlink): + os.unlink(symlink) + curdir = os.getcwd() + try: + os.chdir(os.path.dirname(dylib)) + if dyname != basename: + os.rename(basename, dyname) + if dyname != liblink: + os.symlink(dyname, liblink) + finally: + os.chdir(curdir) + # + return + def createUninstaller(self): uninstallscript = os.path.join(self.destConfDir, 'uninstall.py') f = open(uninstallscript, 'w') @@ -429,14 +566,20 @@ def runcopy(self): self.installBin() self.installLib() self.installShare() + self.createUninstaller() return def runfix(self): self.fixConf() + using_build_backend = any( + os.environ.get(prefix + '_BUILD_BACKEND') + for prefix in ('_PYPROJECT_HOOKS', 'PEP517') + ) + if using_build_backend: + self.fixPythonWheel() return def rundone(self): - self.createUninstaller() if self.destDir == self.installDir: self.outputInstallDone() else: diff --git a/config/petsc_harness.sh b/config/petsc_harness.sh index 23f339c1def..3313bb7f707 100644 --- a/config/petsc_harness.sh +++ b/config/petsc_harness.sh @@ -311,7 +311,7 @@ function petsc_mpiexec_cudamemcheck() { # cuda-memcheck command re="${executable}" for i in "$@"; do - # first occurence of the presence of petsc_arch is the executable, + # first occurrence of the presence of petsc_arch is the executable, # except when we install MPI ourselves if [[ $i =~ ${re} ]]; then # found it, put cuda memcheck command in diff --git a/doc/build_classic_docs.py b/doc/build_classic_docs.py index 20318e07e89..30b7394f728 100755 --- a/doc/build_classic_docs.py +++ b/doc/build_classic_docs.py @@ -34,6 +34,7 @@ def main(stage,outdir): '--with-mkl_sparse_optimize=0', '--with-mkl_sparse=0', '--with-debugging=0', + '--download-sowing=1', 'COPTFLAS=-O0', '--with-petsc4py', 'PETSC_ARCH=' + petsc_arch, diff --git a/doc/build_man_examples_links.py b/doc/build_man_examples_links.py index b5546970686..d4feb5faecb 100755 --- a/doc/build_man_examples_links.py +++ b/doc/build_man_examples_links.py @@ -17,7 +17,7 @@ def processdir(petsc_dir,dir,keyre,mdict,uses): '''Loop over tutorials, call processfile() on each''' #print('Processing '+dir) for file in os.listdir(dir): - if os.path.isfile(os.path.join(dir,file)) and (file.endswith('.c') or file.endswith('.cxx')): processfile(petsc_dir,dir,file,keyre,mdict,uses) + if os.path.isfile(os.path.join(dir,file)) and (file.endswith('.c') or file.endswith('.cxx') or file.endswith('.F90')): processfile(petsc_dir,dir,file,keyre,mdict,uses) def loadmanualpagescit(petsc_dir): '''Loads and parses the manualpages.cit file generated by Sowing doctext''' @@ -33,8 +33,10 @@ def loadmanualpagescit(petsc_dir): if not m: raise RuntimeError('Cannot find PATTERN '+str(PATTERN)+' in manualpages.cit line '+line) if re.match(EXCLUDE_PATTERN,m.group(1)): continue - mdict[' '+m.group(1)+' '] = m.group(3) - mdict['\('+m.group(1)+'\('] = m.group(3) + mdict[r' '+m.group(1)+r' '] = m.group(3) + mdict[r' '+m.group(1)+r'\)'] = m.group(3) + mdict[r' '+m.group(1)+r','] = m.group(3) + mdict[r'\('+m.group(1)+r'\('] = m.group(3) # sort to find enclosing names first mdict = dict(sorted(mdict.items(), key=lambda item: len(item[0]), reverse = True)) keyre = re.compile('|'.join(list(mdict.keys()))) @@ -52,6 +54,7 @@ def main(petsc_dir): if len(uses[i[1:-1]]) > 0: manpage = os.path.join(petsc_dir,'doc','manualpages',mdict[i]) set_uses = set(uses[i[1:-1]]) + uses[i[1:-1]] = [] with open(manpage,'a') as fd: fd.write('\n## Examples\n') for j in set_uses: diff --git a/doc/changes/321.rst b/doc/changes/321.rst new file mode 100644 index 00000000000..02bf405d997 --- /dev/null +++ b/doc/changes/321.rst @@ -0,0 +1,195 @@ +============= +Changes: 3.21 +============= + +.. + STYLE GUIDELINES: + * Capitalize sentences + * Use imperative, e.g., Add, Improve, Change, etc. + * Don't use a period (.) at the end of entries + * If multiple sentences are needed, use a period or semicolon to divide sentences, but not at the end of the final sentence + +.. rubric:: General: + +- Add single precision support for using HYPRE and MFEM +- Require Fortran 2003 compiler with GET_COMMAND_ARGUMENT() support for building PETSc with Fortran bindings + +.. rubric:: Configure/Build: + +- Add ``--download-blis-use-openmp=0`` to force ``download-blis`` to not build with OpenMP when ``with-openmp`` is provided +- Add ```PetscBLASSetNumThreads()`` and ``PetscBLASGetNumThreads()`` for controlling how many threads the BLAS routines use +- Change ``win_cl`` and similar ``win32fe`` compiler wrappers to ``win32fe_cl`` +- Add build support for Intel oneAPI compilers ``icx`` and ``ifx`` on Microsoft Windows with compiler wrappers ``win32fe_icx`` and ``win32fe_ifx`` (only static library build with ``ifx``) +- Add lib/petsc/bin/maint/runjobs.py list-of-jobs script to allow submitting a subset of the jobs to the CI + +.. rubric:: Sys: + +- Add ``PetscBench`` an object class for managing benchmarks in PETSc +- Deprecate ``PetscVoidFunction``, ``PetscVoidStarFunction``, and ``PetscErrorCodeFunction`` typedefs in favor of + ``PetscVoidFn`` and ``PetscErrorCodeFn`` +- Add ``PetscOptionsBoundedReal()`` and ``PetscOptionsRangeReal()`` +- Rename Petsc stream types to ``PETSC_STREAM_DEFAULT``, ``PETSC_STREAM_NONBLOCKING``, ``PETSC_STREAM_DEFAULT_WITH_BARRIER`` and ``PETSC_STREAM_NONBLOCKING_WITH_BARRIER``. The root device context uses ``PETSC_STREAM_DEFAULT`` by default + +.. rubric:: Event Logging: + +.. rubric:: PetscViewer: + +- Change ``PetscViewerRestoreSubViewer()`` to no longer need a call to ``PetscViewerFlush()`` after it +- Introduce ``PetscOptionsRestoreViewer()`` that must be called after ``PetscOptionsGetViewer()`` and ``PetscOptionsGetViewers()`` + to ensure thread safety +- Add ``PetscViewerASCIIWORLDSetFileUnit()`` + +.. rubric:: PetscDraw: + +.. rubric:: AO: + +.. rubric:: IS: + +- Add ``ISLocalToGlobalMappingGetNodeInfo()`` and ``ISLocalToGlobalMappingRestoreNodeInfo()`` to access neighboring information of local indices +- Add support to load an ``ISLocalToGlobalMapping`` via ``ISLocalToGlobalMappingLoad()`` from data previously stored using ``ISLocalToGlobalMappingView()`` +- Add ``ISLocalToGlobalMappingGetBlockMultiLeavesSF()`` to share local block data + +.. rubric:: VecScatter / PetscSF: + +- Add MPI-4.0 persistent neighborhood collectives support. Use -sf_neighbor_persistent along with -sf_type neighbor to enable it +- Add ``PetscSFCreateStridedSF()`` to communicate strided blocks of data + +.. rubric:: PF: + +.. rubric:: Vec: + +- Add ``VecGhostGetGhostIS()`` to get the ghost indices of a ghosted vector +- Add ``-vec_mdot_use_gemv`` to let ``VecMDot()``, ``VecMTDot()`` use BLAS2 ``gemv()`` instead of custom unrolled kernel. Default is on +- Add ``-vec_maxpy_use_gemv`` to let ``VecMAXPY()`` use BLAS2 ``gemv()`` instead of custom unrolled kernel. Default is off +- ``VecReplaceArray()`` on the first Vec obtained from ``VecDuplicateVecs()`` with either of the two above \*_use_gemv options won't work anymore. If needed, turn them off or use ``VecDuplicateVec()`` instead +- ``VecScale()`` is now a logically collective operation +- Add ``VecISShift()`` to shift a part of the vector +- ``VecISSet()`` does no longer accept NULL as index set +- ``VecLoad()`` automatically determines whether the file was written using 32-bit or 64-bit indices, and files can read with PETSc built either way + +.. rubric:: PetscSection: + +- Add ``PetscSectionGetBlockStarts()`` and ``PetscSectionSetBlockStarts()`` +- Add argument to ``PetscSectionCreateGlobalSection()`` that can ignore the local section permutation +- Add ``PetscSectionCreateComponentSubsection()`` + +.. rubric:: PetscPartitioner: + +- Add edge weights to ``PetscPartitionerPartition()`` + +.. rubric:: Mat: + +- Reset ``MATLMVM`` history vectors if size is changed +- Add specific support for ``MatMultHermitianTranspose()`` and ``MatMultHermitianTransposeAdd()`` in ``MATSHELL``, ``MATDENSE``, ``MATNEST``, and ``MATSCALAPACK`` +- Add function ``MatProductGetAlgorithm()`` +- ``MATTRANSPOSEVIRTUAL``, ``MATHERMITIANTRANSPOSEVIRTUAL``, ``MATNORMAL``, ``MATNORMALHERMITIAN``, and ``MATCOMPOSITE`` now derive from ``MATSHELL``. This implies a new behavior for those ``Mat``, as calling ``MatAssemblyBegin()``/``MatAssemblyEnd()`` destroys scalings and shifts for ``MATSHELL``, but it was not previously the case for other ``MatType`` +- Add function ``MatGetRowSumAbs()`` to compute vector of L1 norms of rows ([B]AIJ only) +- Add partial support for ``MatBackwardSolve()``/``MatForwardSolve()`` with ``MATSOLVERMKL_PARDISO`` and ``MATSOLVERMKL_CPARDISO`` +- Deprecate ``MATIS`` options ``-matis_xxx``. Use ``-mat_is_xxx`` +- Add support for repeated entries in the local part of the local to global map for ``MATIS`` via the routines ``MatISSetAllowRepeated()`` and ``MatISGetAllowRepeated()`` +- Add support to dump and load a matrix of ``MATIS`` type +- Add dense representations of symmetric Broyden matrices ``MATLMVMDBFGS``, ``MATLMVMDDFP``, and ``MATLMVMDQN`` +- Add ``MATSELLHIP`` and the corresponding HIP kernels for matrix-vector multiplication + +.. rubric:: MatCoarsen: + +- Add ``MatCoarsenSetMaximumIterations()`` with corresponding option ``-mat_coarsen_max_it <4>``. The number of iteration of the coarsening method. Used for the HEM coarsener +- Add ``MatCoarsenSetThreshold()`` with corresponding option ``-mat_coarsen_threshold <-1>``. Threshold for filtering graph for HEM. Like GAMG < 0 means no filtering +- Change API for several PetscCD methods used internally in ``PCGAMG`` and ``MatCoarsen`` (eg, change ``PetscCDSetChuckSize()`` to ``PetscCDSetChunckSize()``), remove ``Mat`` argument from``PetscCDGetASMBlocks()`` + +.. rubric:: PC: + +- Add ``PCGAMGSetLowMemoryFilter()`` with corresponding option ``-pc_gamg_low_memory_threshold_filter``. Use the system ``MatFilter`` graph/matrix filter, without a temporary copy of the graph, otherwise use method that can be faster +- Add ``PCGAMGASMSetHEM()`` with corresponding option ``-pc_gamg_asm_hem_aggs N``. Use ASM smoother constructed from N applications of heavy edge matching +- ``PCMAT`` use ``MatSolve()`` if implemented by the matrix type +- Add ``PCLMVMSetUpdateVec()`` for the automatic update of the LMVM preconditioner inside a SNES solve +- Add ``PCGAMGSetInjectionIndex()`` with corresponding option ``-pc_gamg_injection_index i,j,k...``. Inject provided indices of fine grid operator as first coarse grid restriction (sort of p-multigrid for C1 elements) +- Add ``PC_JACOBI_ROWL1`` to ``PCJacobiType`` to use (scaled) l1 row norms for diagonal approximation with scaling of off-diagonal elements +- Add ``PCJacobiSetRowl1Scale()`` and ``-pc_jacobi_rowl1_scale scale`` to access new scale member of PC_Jacobi class, for new row l1 Jacobi +- Add ``-mg_fine_...`` prefix alias for fine grid options to override ``-mg_levels_...`` options, like ``-mg_coarse_...`` +- The generated sub-matrices in ``PCFIELDSPLIT``, ``PCASM``, and ``PCBJACOBI`` now retain any null space or near null space attached to them even if the non-zero structure of the outer matrix changes +- Add support for multiple subdomain per MPI process in ``PCBDDC`` +- Add `PCJacobiGetDiagonal()` to access the Jacobi inverted matrix diagonal and its square root variant + +.. rubric:: KSP: + +.. rubric:: SNES: + +- Add support for Quasi-Newton models in ``SNESNEWTONTR`` via ``SNESNewtonTRSetQNType`` +- Add support for trust region norm customization in ``SNESNEWTONTR`` via ``SNESNewtonTRSetNormType`` +- Remove default of ``KSPPREONLY`` and ``PCLU`` for ``SNESNASM`` subdomain solves: for ``SNESASPIN`` use ``-npc_sub_ksp_type preonly -npc_sub_pc_type lu`` +- Add function typedefs ``SNESInitialGuessFn``, ``SNESFunctionFn``, ``SNESObjectiveFn``, ``SNESJacobianFn``, and ``SNESNGSFn`` +- Deprecate ``DMDASNESFunction``, ``DMDASNESJacobian``, ``DMDASNESObjective``, ``DMDASNESFunctionVec``, ``DMDASNESJacobianVec``, and ``DMDASNESObjectiveVec`` + in favor of ``DMDASNESFunctionFn``, ``DMDASNESJacobianFn``, ``DMDASNESObjectiveFn``, ``DMDASNESFunctionVecFn``, ``DMDASNESJacobianVecFn``, and ``DMDASNESObjectiveVecFn`` + +.. rubric:: SNESLineSearch: + +- Deprecate ``SNESLineSearchShellSetUserFunc()`` and ``SNESLineSearchShellGetUserFunc()`` in favor of ``SNESLineSearchShellSetApply()`` and ``SNESLineSearchShellGetApply()`` + +.. rubric:: TS: + +- Add support for custom predictor callbacks in the second-order generalized-alpha method using ``TSAlpha2SetPredictor()`` +- Allow adaptivity to change time step size in first step of second-order generalized-alpha method +- Add ``TSSetPostEventStep()`` to control the first step after event +- Rename ``TSSetPostEventIntervalStep()`` to ``TSSetPostEventSecondStep()``, controlling the second step after event +- Rename option ``-ts_event_post_eventinterval_step`` to ``-ts_event_post_event_second_step`` +- Change the (event) indicator functions type from ``PetscScalar[]`` to ``PetscReal[]`` in the user ``indicator()`` callback set by ``TSSetEventHandler()`` +- Add ``TSGetStepRollBack()`` to access the internal rollback flag +- Add boolean flag to ``TSSetResize()`` to control when to resize + +.. rubric:: TAO: + +- Deprecate ``TaoCancelMonitors()`` (resp. ``-tao_cancelmonitors``) in favor of ``TaoMonitorCancel()`` (resp. ``-tao_monitor_cancel``) +- Deprecate ``-tao_view_gradient``, ``-tao_view_ls_residual``, ``-tao_view_solution``, and ``-tao_view_stepdirection`` in favor of + ``-tao_monitor_gradient``, ``-tao_monitor_ls_residual``, ``-tao_monitor_solution``, and ``-tao_monitor_step`` +- Deprecate ``-tao_draw_solution``, ``-tao_draw_gradient``, and ``-tao_draw_step`` in favor of ``-tao_monitor_solution_draw``, ``-tao_monitor_gradient_draw``, and ``-tao_monitor_step_draw`` +- Deprecate ``TaoSetMonitor()`` in favor of ``TaoMonitorSet()`` +- Deprecate all of the provided ``Tao`` monitor routine names in favor of the standard PETSc naming conventions + +.. rubric:: DM/DA: + +- Add MPI reduction inside ``SNESComputeObjective_DMDA()``. No need to call reduction in local callback +- Deprecate ``PetscSimplePointFunc`` in favor of ``PetscSimplePointFn`` +- Move ``DMPlexReorderDefaultFlag`` to ``DMReorderDefaultFlag`` +- Add ``DMCreateSectionPermutation()``, ``DMReorderSectionGetType()``, and ``DMReorderSectionSetType()`` +- Add ``DMReorderSectionGetDefault()`` and ``DMReorderSectionSetDefault()`` to allow point permutations when sections are built automatically +- Change interface to ``DMCreateSectionSubDM()`` to add component specification +- Add ``DMDAGetBoundaryType()`` + +.. rubric:: DMSwarm: + +- Add continuous ``DM`` argument to ``DMSwarmProjectFields()`` +- Add ``DMSwarmGetFieldInfo()`` +- Add ``DMSwarmVectorGetField()`` + +.. rubric:: DMPlex: + +- Drop support for MED, i.e. remove ``DMPlexCreateMedFromFile()`` and ``--with-med`` +- Change protototype of ``DMPlexSetSNESLocalFEM()``. Now it accepts a single context and a Boolean indicating to use the objective function callback +- Replace ``DMProjectCoordinates()`` with ``DMSetCoordinateDisc()`` +- Add argument to ``DMPlexCreateCoordinateSpace()`` +- Add ``DMPlexCoordMap`` and some default maps +- Add Boolean argument to ``DMPlexPartitionLabelCreateSF()`` to sort ranks +- Add ``DMClearAuxiliaryVec()`` to clear the auxiliary data +- Add ignoreLabelHalo, sanitizeSubmesh, and ownershipTransferSF arguments to ``DMPlexFilter()`` +- Change ``DMPlexSetIsoperiodicFaceSF()``, ``DMPlexGetIsoperiodicFaceSF()``, and ``DMPlexSetIsoperiodicFaceTransform()`` to accept multiple values for different periodic pairings + +.. rubric:: FE/FV: + +- Add Jacobian type argument to ``PetscFEIntegrateBdJacobian()`` +- Add ``PetscFVClone()`` +- Add ``PetscFVCreateDualSpace()`` + +.. rubric:: DMNetwork: + +.. rubric:: DMStag: + +- Add support for ``DMLocalToLocalBegin()`` and ``DMLocalToLocalEnd()`` +- Add ``DMStagSetRefinementFactor()`` and ``DMStagGetRefinementFactor()`` to set and get the refinement ratio +- Add support for arbitrary refinement ratio and degree of freedom in interpolation and restriction + +.. rubric:: DT: + +- Add ``PetscDSUpdateBoundaryLabels()`` + +.. rubric:: Fortran: diff --git a/doc/changes/dev.rst b/doc/changes/dev.rst index 28cb484ba8c..b7e5363cb2b 100644 --- a/doc/changes/dev.rst +++ b/doc/changes/dev.rst @@ -13,170 +13,89 @@ Changes: Development .. rubric:: Configure/Build: -- Add ``--download-blis-use-openmp=0`` to force ``download-blis`` to not build with OpenMP when ``with-openmp`` is provided -- Add ```PetscBLASSetNumThreads()`` and ``PetscBLASGetNumThreads()`` for controlling how many threads the BLAS routines use -- Change ``win_cl`` and similar ``win32fe`` compiler wrappers to ``win32fe_cl`` -- Add build support for Intel oneAPI compilers ``icx`` and ``ifx`` on Microsoft Windows with compiler wrappers ``win32fe_icx`` and ``win32fe_ifx`` (only static library build with ``ifx``) - .. rubric:: Sys: -- Add ``PetscBench`` an object class for managing benchmarks in PETSc -- Deprecate ``PetscVoidFunction``, ``PetscVoidStarFunction``, and ``PetscErrorCodeFunction`` typedefs in favor of - ``PetscVoidFn`` and ``PetscErrorCodeFn`` -- Add ``PetscOptionsBoundedReal()`` and ``PetscOptionsRangeReal()`` -- Rename Petsc stream types to ``PETSC_STREAM_DEFAULT``, ``PETSC_STREAM_NONBLOCKING``, ``PETSC_STREAM_DEFAULT_WITH_BARRIER`` and ``PETSC_STREAM_NONBLOCKING_WITH_BARRIER``. The root device context uses ``PETSC_STREAM_DEFAULT`` by default - .. rubric:: Event Logging: .. rubric:: PetscViewer: -- Change ``PetscViewerRestoreSubViewer()`` to no longer need a call to ``PetscViewerFlush()`` after it -- Introduce ``PetscOptionsRestoreViewer()`` that must be called after ``PetscOptionsGetViewer()`` and ``PetscOptionsGetViewers()`` - to ensure thread safety - .. rubric:: PetscDraw: .. rubric:: AO: .. rubric:: IS: -- Add ``ISLocalToGlobalMappingGetNodeInfo()`` and ``ISLocalToGlobalMappingRestoreNodeInfo()`` to access neighboring information of local indices -- Add support to load an ``ISLocalToGlobalMapping`` via ``ISLocalToGlobalMappingLoad()`` from data previously stored using ``ISLocalToGlobalMappingView()`` - .. rubric:: VecScatter / PetscSF: -- Add MPI-4.0 persistent neighborhood collectives support. Use -sf_neighbor_persistent along with -sf_type neighbor to enable it -- Add ``PetscSFCreateStridedSF()`` to communicate strided blocks of data - .. rubric:: PF: .. rubric:: Vec: -- Add ``VecGhostGetGhostIS()`` to get the ghost indices of a ghosted vector -- Add ``-vec_mdot_use_gemv`` to let ``VecMDot()``, ``VecMTDot()`` use BLAS2 ``gemv()`` instead of custom unrolled kernel. Default is on -- Add ``-vec_maxpy_use_gemv`` to let ``VecMAXPY()`` use BLAS2 ``gemv()`` instead of custom unrolled kernel. Default is off -- ``VecReplaceArray()`` on the first Vec obtained from ``VecDuplicateVecs()`` with either of the two above \*_use_gemv options won't work anymore. If needed, turn them off or use ``VecDuplicateVec()`` instead -- ``VecScale()`` is now a logically collective operation -- Add ``VecISShift()`` to shift a part of the vector -- ``VecISSet()`` does no longer accept NULL as index set -- ``VecLoad()`` automatically determines whether the file was written using 32-bit or 64-bit indices, and files can read with PETSc built either way +- The ``IS`` passed to ``VecISAXPY()``, ``VecISCopy()``. ``VecISSet()``, and ``VecISShift()`` must have the same communicator of the vectors used +- Make ``VecLock`` API active in optimized mode +- ``VecNestSetSubVec()`` and ``VecNestSetSubVecs()`` now take references to input vectors rather than creating duplicates .. rubric:: PetscSection: -- Add ``PetscSectionGetBlockStarts()`` and ``PetscSectionSetBlockStarts()`` -- Add argument to ``PetscSectionCreateGlobalSection()`` that can ignore the local section permutation -- Add ``PetscSectionCreateComponentSubsection()`` - .. rubric:: PetscPartitioner: .. rubric:: Mat: -- Reset ``MATLMVM`` history vectors if size is changed -- Add specific support for ``MatMultHermitianTranspose()`` and ``MatMultHermitianTransposeAdd()`` in ``MATSHELL``, ``MATDENSE``, ``MATNEST``, and ``MATSCALAPACK`` -- Add function ``MatProductGetAlgorithm()`` -- ``MATTRANSPOSEVIRTUAL``, ``MATHERMITIANTRANSPOSEVIRTUAL``, ``MATNORMAL``, ``MATNORMALHERMITIAN``, and ``MATCOMPOSITE`` now derive from ``MATSHELL``. This implies a new behavior for those ``Mat``, as calling ``MatAssemblyBegin()``/``MatAssemblyEnd()`` destroys scalings and shifts for ``MATSHELL``, but it was not previously the case for other ``MatType`` -- Add function ``MatGetRowSumAbs()`` to compute vector of L1 norms of rows ([B]AIJ only) -- Add partial support for ``MatBackwardSolve()``/``MatForwardSolve()`` with ``MATSOLVERMKL_PARDISO`` and ``MATSOLVERMKL_CPARDISO`` -- Deprecate ``MATIS`` options ``-matis_xxx``. Use ``-mat_is_xxx`` -- Add support for repeated entries in the local part of the local to global map for ``MATIS`` via the routines ``MatISSetAllowRepeated()`` and ``MatISGetAllowRepeated()``. -- Add support to dump and load a matrix of ``MATIS`` type. - .. rubric:: MatCoarsen: -- Add ``MatCoarsenSetMaximumIterations()`` with corresponding option ``-mat_coarsen_max_it <4>``. The number of iteration of the coarsening method. Used for the HEM coarsener -- Add ``MatCoarsenSetThreshold()`` with corresponding option ``-mat_coarsen_threshold <-1>``. Threshold for filtering graph for HEM. Like GAMG < 0 means no filtering -- Change API for several PetscCD methods used internally in ``PCGAMG`` and ``MatCoarsen`` (eg, change ``PetscCDSetChuckSize()`` to ``PetscCDSetChunckSize()``), remove ``Mat`` argument from``PetscCDGetASMBlocks()`` - .. rubric:: PC: -- Add ``PCGAMGSetLowMemoryFilter()`` with corresponding option ``-pc_gamg_low_memory_threshold_filter``. Use the system ``MatFilter`` graph/matrix filter, without a temporary copy of the graph, otherwise use method that can be faster -- Add ``PCGAMGASMSetHEM()`` with corresponding option ``-pc_gamg_asm_hem_aggs N``. Use ASM smoother constructed from N applications of heavy edge matching -- ``PCMAT`` use ``MatSolve()`` if implemented by the matrix type -- Add ``PCLMVMSetUpdateVec()`` for the automatic update of the LMVM preconditioner inside a SNES solve -- Add ``PCGAMGSetInjectionIndex()`` with corresponding option ``-pc_gamg_injection_index i,j,k...``. Inject provided indices of fine grid operator as first coarse grid restriction (sort of p-multigrid for C1 elements) -- Add ``PC_JACOBI_ROWL1`` to ``PCJacobiType`` to use (scaled) l1 row norms for diagonal approximation with scaling of off-diagonal elements -- Add ``PCJacobiSetRowl1Scale()`` and ``-pc_jacobi_rowl1_scale scale`` to access new scale member of PC_Jacobi class, for new row l1 Jacobi -- Add ``-mg_fine_...`` prefix alias for fine grid options to override ``-mg_levels_...`` options, like ``-mg_coarse_...`` -- The generated sub-matrices in ``PCFIELDSPLIT``, ``PCASM``, and ``PCBJACOBI`` now retain any null space or near null space attached to them even if the non-zero structure of the outer matrix changes +- Add support in ``PCFieldSplitSetFields()`` including with ``-pc_fieldsplit_%d_fields fields`` for ``MATNEST``, making it possible to + utilize multiple levels of ``PCFIELDSPLIT`` with ``MATNEST`` from the command line +- Add ``PCCompositeSpecialSetAlphaMat()`` API to use a matrix other than the identity in + preconditioners based on an alternating direction iteration, e.g., setting :math:`M` for + :math:`P = (A + alpha M) M^{-1} (alpha M + B)` + +- Change the option database keys for coarsening for ``PCGAMG`` to use the prefix ``-pc_gamg_``, for example ``-pc_gamg_mat_coarsen_type`` .. rubric:: KSP: .. rubric:: SNES: -- Add support for Quasi-Newton models in ``SNESNEWTONTR`` via ``SNESNewtonTRSetQNType`` -- Add support for trust region norm customization in ``SNESNEWTONTR`` via ``SNESNewtonTRSetNormType`` -- Remove default of ``KSPPREONLY`` and ``PCLU`` for ``SNESNASM`` subdomain solves: for ``SNESASPIN`` use ``-npc_sub_ksp_type preonly -npc_sub_pc_type lu`` -- Add function typedefs ``SNESInitialGuessFn``, ``SNESFunctionFn``, ``SNESObjectiveFn``, ``SNESJacobianFn``, and ``SNESNGSFn`` -- Deprecate ``DMDASNESFunction``, ``DMDASNESJacobian``, ``DMDASNESObjective``, ``DMDASNESFunctionVec``, ``DMDASNESJacobianVec``, and ``DMDASNESObjectiveVec`` - in favor of ``DMDASNESFunctionFn``, ``DMDASNESJacobianFn``, ``DMDASNESObjectiveFn``, ``DMDASNESFunctionVecFn``, ``DMDASNESJacobianVecFn``, and ``DMDASNESObjectiveVecFn`` - .. rubric:: SNESLineSearch: -- Deprecate ``SNESLineSearchShellSetUserFunc()`` and ``SNESLineSearchShellGetUserFunc()`` in favor of ``SNESLineSearchShellSetApply()`` and ``SNESLineSearchShellGetApply()`` - .. rubric:: TS: -- Add support for custom predictor callbacks in the second-order generalized-alpha method using ``TSAlpha2SetPredictor()`` -- Allow adaptivity to change time step size in first step of second-order generalized-alpha method. -- Add ``TSSetPostEventStep()`` to control the first step after event -- Rename ``TSSetPostEventIntervalStep()`` to ``TSSetPostEventSecondStep()``, controlling the second step after event -- Rename option ``-ts_event_post_eventinterval_step`` to ``-ts_event_post_event_second_step`` -- Change the (event) indicator functions type from ``PetscScalar[]`` to ``PetscReal[]`` in the user ``indicator()`` callback set by ``TSSetEventHandler()`` -- Add ``TSGetStepRollBack()`` to access the internal rollback flag -- Add boolean flag to ``TSSetResize()`` to control when to resize +- Add Rosenbrock-W methods from :cite:`rang2015improved` with :math:`B_{PR}` stability: ``TSROSWR34PRW``, ``TSROSWR3PRL2``, ``TSROSWRODASPR``, and ``TSROSWRODASPR2`` .. rubric:: TAO: -- Deprecate ``TaoCancelMonitors()`` (resp. ``-tao_cancelmonitors``) in favor of ``TaoMonitorCancel()`` (resp. ``-tao_monitor_cancel``) -- Deprecate ``-tao_view_gradient``, ``-tao_view_ls_residual``, ``-tao_view_solution``, and ``-tao_view_stepdirection`` in favor of - ``-tao_monitor_gradient``, ``-tao_monitor_ls_residual``, ``-tao_monitor_solution``, and ``-tao_monitor_step`` -- Deprecate ``-tao_draw_solution``, ``-tao_draw_gradient``, and ``-tao_draw_step`` in favor of ``-tao_monitor_solution_draw``, ``-tao_monitor_gradient_draw``, and ``-tao_monitor_step_draw`` -- Deprecate ``TaoSetMonitor()`` in favor of ``TaoMonitorSet()`` -- Deprecate all of the provided ``Tao`` monitor routine names in favor of the standard PETSc naming conventions - .. rubric:: DM/DA: -- Add MPI reduction inside ``SNESComputeObjective_DMDA()``. No need to call reduction in local callback -- Deprecate ``PetscSimplePointFunc`` in favor of ``PetscSimplePointFn`` -- Move ``DMPlexReorderDefaultFlag`` to ``DMReorderDefaultFlag`` -- Add ``DMCreateSectionPermutation()``, ``DMReorderSectionGetType()``, and ``DMReorderSectionSetType()`` -- Add ``DMReorderSectionGetDefault()`` and ``DMReorderSectionSetDefault()`` to allow point permutations when sections are built automatically -- Change interface to ``DMCreateSectionSubDM()`` to add component specification -- Add ``DMDAGetBoundaryType()`` +- Add ``DMGetSparseLocalize()`` and ``DMSetSparseLocalize()`` +- Add ``DMGeomModelRegister()``, ``DMGeomModelRegisterAll()``, ``DMGeomModelRegisterDestroy()``, ``DMSnapToGeomModel()``, ``DMSetSnapToGeomModel()`` to support registering geometric models +- Add ``DMGetOutputSequenceLength()`` .. rubric:: DMSwarm: -- Add continuous ``DM`` argument to ``DMSwarmProjectFields()`` -- Add ``DMSwarmGetFieldInfo()`` -- Add ``DMSwarmVectorGetField()`` - .. rubric:: DMPlex: -- Drop support for MED, i.e. remove ``DMPlexCreateMedFromFile()`` and ``--with-med`` -- Change protototype of ``DMPlexSetSNESLocalFEM()``. Now it accepts a single context and a Boolean indicating to use the objective function callback -- Replace ``DMProjectCoordinates()`` with ``DMSetCoordinateDisc()`` -- Add argument to ``DMPlexCreateCoordinateSpace()`` -- Add ``DMPlexCoordMap`` and some default maps -- Add Boolean argument to ``DMPlexPartitionLabelCreateSF()`` to sort ranks -- Add ``DMClearAuxiliaryVec()`` to clear the auxiliary data -- Add ignoreLabelHalo, sanitizeSubmesh, and ownershipTransferSF arguments to ``DMPlexFilter()`` +- Add ``DMLabelGetValueBounds()`` +- Add ``DMPlexOrientLabel()`` +- Add an argument to ``DMPlexLabelCohesiveComplete()`` in order to change behavior at surface boundary +- Remove ``DMPlexSnapToGeomModel()`` +- Add refinement argument to ``DMPlexCreateHexCylinderMesh()`` +- Now ``DMPlexComputeBdIntegral()`` takes one function per field +- Add ``DMPlexCreateEdgeNumbering()`` .. rubric:: FE/FV: -- Add Jacobian type argument to ``PetscFEIntegrateBdJacobian()`` -- Add ``PetscFVClone()`` -- Add ``PetscFVCreateDualSpace()`` - .. rubric:: DMNetwork: .. rubric:: DMStag: -- Add support for ``DMLocalToLocalBegin()`` and ``DMLocalToLocalEnd()`` -- Add ``DMStagSetRefinementFactor()`` and ``DMStagGetRefinementFactor()`` to set and get the refinement ratio -- Add support for arbitrary refinement ratio and degree of freedom in interpolation and restriction - .. rubric:: DT: -- Add ``PetscDSUpdateBoundaryLabels()`` - .. rubric:: Fortran: + +- Add ``PETSC_NULL_ENUM`` to be used instead of ``PETSC_NULL_INTEGER`` when a pointer to an ``enum`` is expected in a PETSc function call +- Add ``PETSC_NULL_INTEGER_ARRAY``, ``PETSC_NULL_SCALAR_ARRAY``, and ``PETSC_NULL_REAL_ARRAY`` for use instead of + ``PETSC_NULL_INTEGER``, ``PETSC_NULL_SCALAR``, and ``PETSC_NULL_REAL`` when an array is expected in a PETSc function call +- Add automatically generated interface definitions for most PETSc functions to detect illegal usage at compile time +- Add ``PetscObjectIsNull()`` for users to check if a PETSc object is ``NULL`` +- Change the PETSc Fortran API so that non-array values, ``v``, passed to PETSc routines expecting arrays must be cast with ``[v]`` in the calling sequence diff --git a/doc/changes/index.rst b/doc/changes/index.rst index 072d798c2ac..8db0212d5f2 100644 --- a/doc/changes/index.rst +++ b/doc/changes/index.rst @@ -24,6 +24,7 @@ editor. :maxdepth: 1 main - PETSc development version + 3.21 - Public Release, Mar 29, 2024 <321> 3.20 - Public Release, Sep 28, 2023 <320> 3.19 - Public Release, Mar 30, 2023 <319> 3.18 - Public Release, Sep 30, 2022 <318> diff --git a/doc/community/meetings/meeting.rst b/doc/community/meetings/meeting.rst index 139c833ed77..ab758170aa7 100644 --- a/doc/community/meetings/meeting.rst +++ b/doc/community/meetings/meeting.rst @@ -28,12 +28,12 @@ simulations by scientists and engineers. Upcoming Meetings ================= -- `The 2024 user meeting will take place May 23-24 in Cologne, Germany. `__ -- The 2025 user meeting will take place in North America. Suggestions for a meeting location? +- The 2025 user meeting will take place in Buffalo, New York, USA. Previous Meetings ================= +- `Cologne, Germany; May 23-24, 2024 `__ - :any:`Chicago, Illinois, USA; June 5-7, 2023 <2023_meeting>` - `Atlanta, Georgia, USA; June 5-7, 2019 `__ - `London, UK; June 4-6, 2018 `__ diff --git a/doc/community/petsc_team.rst b/doc/community/petsc_team.rst index ad41a800a17..860ef42b15f 100644 --- a/doc/community/petsc_team.rst +++ b/doc/community/petsc_team.rst @@ -118,6 +118,10 @@ The full list of contributors can be found `on GitLab `__. +to your branch in the PETSc Discord channel ``testing-ci-forum``. -1. Check the issue's threads to see if the error is listed and add it there, with a link to your MR (e.g. ``!1234``). Otherwise, create a new thread. -2. Click the three dots in the top right of the thread and select "Copy link". -3. Add this link in your MR description. +Check the forum's threads to see if the error is listed and add it there, with a link to your MR (e.g. ``!1234``). Otherwise, create a new thread. diff --git a/doc/developers/style.rst b/doc/developers/style.rst index 18175955d4c..a97e3f28708 100644 --- a/doc/developers/style.rst +++ b/doc/developers/style.rst @@ -536,7 +536,7 @@ Sphinx later processes. | a formatted comment of a function that will be used for documentation and a Fortran interface. - | ``/*@C`` - | a formatted comment of a function that will be used only for documentation, not to generate a Fortran interface. In general, such labeled C functions should have a custom Fortran interface provided. Functions that take ``char*`` or function pointer arguments must have the ``C`` symbol and a custom Fortran interface provided. + | a formatted comment of a function that will be used only for documentation, not to generate a Fortran interface. Certain constructs and usages do not yet support automatically generating a Fortran interface. In general, such labeled C functions should have a custom Fortran interface provided. - | ``/*E`` | a formatted comment of an enum used for documentation only. Note that each of these needs to be listed in ``lib/petsc/conf/bfort-petsc.txt`` as a native and defined in the corresponding ``include/petsc/finclude/petscxxx.h`` Fortran include file and the values set as parameters in the file ``src/SECTION/f90-mod/petscSUBSECTION.h``, for example, ``src/vec/f90-mod/petscis.h``. @@ -552,7 +552,7 @@ Sphinx later processes. The Fortran interface files supplied manually by the developer go into the two directories ``ftn-custom`` and ``f90-custom``, while those generated by -Sowing go into ``ftn-auto``. +Sowing go into ``ftn-auto`` directories in the $PETSC_ARCH/src`` directory tree. Each include file that contains formatted comments needs to have a line of the form diff --git a/doc/index.rst b/doc/index.rst index 89206a4ee1f..ebefc268207 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -18,10 +18,6 @@ PETSc is developed as :ref:`open-source `, :any:`requests ` will take place May 23, 24 in Cologne, Germany. - .. admonition:: News: Book on numerical methods using PETSc **PETSc for Partial Differential Equations: Numerical Solutions in C and Python**, by Ed Bueler. diff --git a/doc/install/download.rst b/doc/install/download.rst index a4e956e7ce4..fc4834c809c 100644 --- a/doc/install/download.rst +++ b/doc/install/download.rst @@ -31,16 +31,16 @@ Alternative: Obtain Release Version with Tarball Tarball which contains only the source. Documentation available `online `__. -- `petsc-3.20.5.tar.gz `__ +- `petsc-3.21.2.tar.gz `__ Tarball which includes all documentation, recommended for offline use. -- `petsc-with-docs-3.20.5.tar.gz `__ +- `petsc-with-docs-3.21.2.tar.gz `__ Tarball to enable a separate installation of petsc4py. -- `petsc4py-3.20.5.tar.gz `__ +- `petsc4py-3.21.2.tar.gz `__ To extract the sources use: @@ -77,3 +77,20 @@ or if you already have a local clone of petsc git repository More details on contributing to PETSc development are at :any:`ch_contributing`. The development version of the documentation, which is largely the same as the release documentation is `available `__. + +Release Schedule +================ + +We intend to provide new releases every 6 months, and patch updates to current release every month. + +New releases (for example: 3.20.0, 3.21.0, 3.22.0, etc.): + +- March (end of the month) +- September (end of the month) + +New patch updates (for example: 3.21.1, 2.21.2, 3.21.3, etc.): + +- Last week of every month (or first week on next month - if delayed) + +And with a new release of PETSc the old version will no longer get patch updates. I.e., when 3.22.0 is released, bug fixes +and any updates will go to 3.22.x - and petsc-3.21, petsc-3.20, etc., will not get any additional patch updates. diff --git a/doc/install/install.rst b/doc/install/install.rst index 77b6d3d8924..f91f51b2d6b 100644 --- a/doc/install/install.rst +++ b/doc/install/install.rst @@ -568,10 +568,10 @@ configure time. For example: .. code-block:: console - $ ./configure --prefix=/opt/petsc/petsc-3.20.0-mpich --with-mpi-dir=/opt/mpich + $ ./configure --prefix=/opt/petsc/petsc-3.21.0-mpich --with-mpi-dir=/opt/mpich $ make $ make install [DESTDIR=/tmp/petsc-pkg] - $ ./configure --prefix=/opt/petsc/petsc-3.20.0-openmpi --with-mpi-dir=/opt/openmpi + $ ./configure --prefix=/opt/petsc/petsc-3.21.0-openmpi --with-mpi-dir=/opt/openmpi $ make $ make install [DESTDIR=/tmp/petsc-pkg] diff --git a/doc/manual/fortran.rst b/doc/manual/fortran.rst index 357c602af62..b640f96d598 100644 --- a/doc/manual/fortran.rst +++ b/doc/manual/fortran.rst @@ -50,6 +50,14 @@ For example, PETSc types like ``PetscInt`` and ``PetscReal`` are simply aliases for basic Fortran types and cannot be written as ``type(tPetscInt)`` +PETSc objects are always automatically initialized when declared so you do not need to (and should not) do + +.. code-block:: fortran + + type(tXXX) x = PETSC_NULL_XXX + XXX x = PETSC_NULL_XXX + + Calling Sequences ^^^^^^^^^^^^^^^^^ @@ -94,6 +102,56 @@ For proper error handling one should not use the above syntax instead one should PetscCallA(KSPSolve(ksp, b, x, ierr)) ! Fortran main program PetscCall(KSPSolve(ksp, b, x)) // C +Passing Arrays +^^^^^^^^^^^^^^ + +Many PETSc functions take arrays as arguments; in Fortran they must be passed as arrays even if the "array" +is of length one (unlike Fortran 77 where one can pass scalars to functions expecting arrays). When passing +a single value one can use the Fortran [] notation to pass the scalar as an array, for example + +.. code-block:: fortran + + PetscCall(VecSetValues(v, one, [i], [val], ierr)) + +This trick can only be used for arrays used to pass data into a PETSc routine, it cannot be used +for arrays used to receive data from a PETSc routine. For example, + +.. code-block:: fortran + + PetscCall(VecGetValues(v, one, idx, [val], ierr)) + +is invalid and will not set ``val`` with the correct value. + +For PETSc routine arguments that return a character string, you should pass a string long enough to hold the +result. For example, + +.. code-block:: fortran + + character(80) str + PetscCall(KSPGetType(ksp,str,ierr)) + +The result is copied into ``str``. + +For PETSc routine arguments that return an array of ``PetscInt``, ``PetscScalar``, ``PetscReal`` or of PETSc objects, +there are two possibilities. In the first, the Fortran routine must pass in an array of sufficient size to hold the result. For example, + + +.. code-block:: fortran + + PetscInt lx(64) + DMDAGetOwnershipRanges(a, lx, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY); + +In the second form one passes in a pointer to an array and the PETSc routine returns an array containing the values. + +.. code-block:: fortran + + PetscScalar, pointer :: array(:) + PetscCall(VecGetArrayF90(v, array, ierr)) + +In this second form the PETSc routine often has a name that ends with ``F90``. + +The information for which form to use is documented in the manual page of the routine. + Passing Null Pointers ^^^^^^^^^^^^^^^^^^^^^ @@ -110,6 +168,35 @@ command in Fortran: PetscCall(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, PETSC_NULL_CHARACTER, '-name', N, flg, ierr)) +Where the code expects an array, then use ``PETSC_NULL_XXX_ARRAY``. For example: + +.. code-block:: fortran + + PetscCall(MatCreateSeqDense(comm, m, n, PETSC_NULL_SCALAR_ARRAY, A)) + +Finally when a subroutine returns a ``PetscObject`` through an argument to check if it is `NULL` you must use: + +.. code-block:: fortran + + if (PetscObjectIsNull(dm)) then + if (.not. PetscObjectIsNull(dm)) then + +you cannot use + +.. code-block:: fortran + + if (dm .eq. PETSC_NULL_DM) then + +Note that + +.. code-block:: fortran + + if (PetscObjectIsNull(PETSC_NULL_VEC)) then + +will always return true, for any PETSc object. + +These specializations are required because of Fortran's strict type checking system and lack of a concept of ``NULL``. + Matrix, Vector and IS Indices ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/manual/index.rst b/doc/manual/index.rst index 694c8744042..5b77d030bfe 100644 --- a/doc/manual/index.rst +++ b/doc/manual/index.rst @@ -51,6 +51,7 @@ Prepared by **P. Sanan** :sup:`1,8`, **J. Sarich** :sup:`1`, **B. F. Smith** :sup:`1,17`, +**H. Suh** :sup:`1`, **S. Zampini** :sup:`4`, **H. Zhang** :sup:`1,5`, **H. Zhang** :sup:`1`, and diff --git a/doc/manual/ksp.rst b/doc/manual/ksp.rst index 40c67abbe09..9932d550c5b 100644 --- a/doc/manual/ksp.rst +++ b/doc/manual/ksp.rst @@ -315,7 +315,7 @@ can be used by the options database command * - Method - KSPType - - Options Database Name + - Options Database * - Richardson - ``KSPRICHARDSON`` - ``richardson`` @@ -437,8 +437,8 @@ can be used by the options database command - ``KSPPYTHON`` - ``python`` * - Shell for no ``KSP`` method - - ``KSPPREONLY`` (or ``KSPNONE``) - - ``preonly`` (or ``none``) + - ``KSPNONE`` + - ``none`` Note: the bi-conjugate gradient method requires application of both the @@ -709,7 +709,7 @@ iterative linear solvers. * - Method - PCType - - Options Database Name + - Options Database * - Jacobi - ``PCJACOBI`` - ``jacobi`` @@ -1077,20 +1077,20 @@ Algebraic Multigrid (AMG) Preconditioners PETSc has a native algebraic multigrid preconditioner ``PCGAMG`` – *gamg* – and interfaces to three external AMG packages: *hypre*, *ML* -and *AMGx* (CUDA platforms only), that can be downloaded in the -configuration phase (eg, ``--download-hypre`` ) and used by -specifiying that command line parameter (eg, ``-pc_type hypre``). +and *AMGx* (CUDA platforms only) that can be downloaded in the +configuration phase (e.g., ``--download-hypre`` ) and used by +specifying that command line parameter (e.g., ``-pc_type hypre``). *Hypre* is relatively monolithic in that a PETSc matrix is converted into a hypre -matrix and then *hypre* is called to do the entire solve. *ML* is more -modular in that PETSc only has *ML* generate the coarse grid spaces -(columns of the prolongation operator), which is core of an AMG method, +matrix, and then *hypre* is called to solve the entire problem. *ML* is more +modular because PETSc only has *ML* generate the coarse grid spaces +(columns of the prolongation operator), which is the core of an AMG method, and then constructs a ``PCMG`` with Galerkin coarse grid operator construction. ``PCGAMG`` is designed from the beginning to be modular, to allow for new components to be added easily and also populates a multigrid preconditioner ``PCMG`` so generic multigrid parameters are used (see :any:`sec_mg`). PETSc provides a fully supported (smoothed) aggregation AMG, but supports the addition of new methods (``-pc_type gamg -pc_gamg_type agg`` or ``PCSetType(pc,PCGAMG)`` and -``PCGAMGSetType(pc,PCGAMGAGG)``. Examples of extension are a reference implementations of +``PCGAMGSetType(pc, PCGAMGAGG)``. Examples of extension are reference implementations of a classical AMG method (``-pc_gamg_type classical``), a (2D) hybrid geometric AMG method (``-pc_gamg_type geo``) that are not supported. A 2.5D AMG method DofColumns :cite:`isaacstadlerghattas2015` supports 2D coarsenings extruded in the third dimension. ``PCGAMG`` does require the use @@ -1099,31 +1099,116 @@ can use ``MATAIJ`` instead of ``MATBAIJ`` without changing any code other than t constructor (or the ``-mat_type`` from the command line). For instance, ``MatSetValuesBlocked`` works with ``MATAIJ`` matrices. +**Important parameters for ``PCGAMGAGG``** + +* Control the generation of the coarse grid + + * ``-pc_gamg_aggressive_coarsening`` Use aggressive coarsening on the finest n levels to construct the coarser mesh. + See `PCGAMGAGGSetNSmooths()`. The larger value produces a faster preconditioner to create and solve, but the convergence may be slower. + + * ``-pc_gamg_low_memory_threshold_filter`` Filter small matrix entries before coarsening the mesh. + See ``PCGAMGSetLowMemoryFilter()``. + + * ``-pc_gamg_threshold``` The threshold of small values to drop when ``-pc_gamg_low_memory_threshold_filter`` is used. A + negative value means keeping even the locations with 0.0. See ``PCGAMGSetThreshold()`` + + * ``-pc_gamg_threshold_scale`` :real:1.0> Set a scale factor applied to each coarser level when ``-pc_gamg_low_memory_threshold_filter`` is used. + See ``PCGAMGSetThresholdScale()``. + + * ``-pc_gamg_mat_coarsen_type`` Algorithm used to coarsen the matrix graph. See ``MatCoarsenSetType()``. + + * ``-pc_gamg_mat_coarsen_max_it`` Maximum HEM iterations to use. See ``MatCoarsenSetMaximumIterations()``. + + * ``-pc_gamg_aggressive_mis_k`` k distance in MIS coarsening (>2 is 'aggressive') to use in coarsening. + See `PCGAMGMISkSetAggressive()`. The larger value produces a preconditioner that is faster to create and solve with but the convergence may be slower. + This option and the previous option work to determine how aggressively the grids are coarsened. + + * ``-pc_gamg_mis_k_minimum_degree_ordering`` Use a minimum degree ordering in the greedy MIS algorithm used to coarsen. + See ``PCGAMGMISkSetMinDegreeOrdering()`` + +* Control the generation of the prolongation for ``PCGAMGAGG`` + + * ``-pc_gamg_agg_nsmooths`` Number of smoothing steps to be used in constructing the prolongation. For symmetric problems, + generally, one or more is best. For some strongly nonsymmetric problems, 0 may be best. See ``PCGAMGSetNSmooths()``. + +* Control the amount of parallelism on the levels + + * ``-pc_gamg_process_eq_limit`` Sets the minimum number of equations allowed per process when coarsening (otherwise, fewer MPI processes + are used for the coarser mesh). A larger value will cause the coarser problems to be run on fewer MPI processes, resulting + in less communication and possibly a faster time to solution. See ``PCGAMGSetProcEqLim()``. + + * ``-pc_gamg_rank_reduction_factors`` Set a schedule for MPI rank reduction on coarse grids. ``See PCGAMGSetRankReductionFactors()`` + This overrides the lessening of processes that would arise from ``-pc_gamg_process_eq_limit``. + + * ``-pc_gamg_repartition`` Run a partitioner on each coarser mesh generated rather than using the default partition arising from the + finer mesh. See ``PCGAMGSetRepartition()``. This increases the preconditioner setup time but will result in less time per + iteration of the solver. + + * ``-pc_gamg_parallel_coarse_grid_solver`` Allow the coarse grid solve to run in parallel, depending on the value of ``-pc_gamg_coarse_eq_limit``. + See ``PCGAMGSetParallelCoarseGridSolve()``. If the coarse grid problem is large then this can + improve the time to solution. + + * ``-pc_gamg_coarse_eq_limit`` Sets the minimum number of equations allowed per process on the coarsest level when coarsening + (otherwise fewer MPI processes will be used). A larger value will cause the coarse problems to be run on fewer MPI processes. + This only applies if ``-pc_gamg_parallel_coarse_grid_solver`` is set to true. See ``PCGAMGSetCoarseEqLim()``. + +* Control the smoothers + + * ``-pc_mg_levels`` Set the maximum number of levels to use. + + * ``-mg_levels_ksp_type`` If ``KSPCHEBYSHEV`` or ``KSPRICHARDSON`` is not used, then the Krylov + method for the entire multigrid solve has to be a flexible method such as ``KSPFGMRES``. Generally, the + stronger the Krylov method the faster the convergence, but with more cost per iteration. See ``KSPSetType()``. + + * ``-mg_levels_ksp_max_it`` Sets the number of iterations to run the smoother on each level. Generally, the more iterations + , the faster the convergence, but with more cost per multigrid iteration. See ``PCMGSetNumberSmooth()``. + + * ``-mg_levels_ksp_xxx`` Sets options for the ``KSP`` in the smoother on the levels. + + * ``-mg_levels_pc_type`` Sets the smoother to use on each level. See ``PCSetType()``. Generally, the + stronger the preconditioner the faster the convergence, but with more cost per iteration. + + * ``-mg_levels_pc_xxx`` Sets options for the ``PC`` in the smoother on the levels. + + * ``-mg_coarse_ksp_type`` Sets the solver ``KSPType`` to use on the coarsest level. + + * ``-mg_coarse_pc_type`` Sets the solver ``PCType`` to use on the coarsest level. + + * ``-pc_gamg_asm_use_agg`` Use ``PCASM`` as the smoother on each level with the aggregates defined by the coarsening process are + the subdomains. This option automatically switches the smoother on the levels to be ``PCASM``. + + * ``-mg_levels_pc_asm_overlap`` Use non-zero overlap with ``-pc_gamg_asm_use_agg``. See ``PCASMSetOverlap()``. + +* Control the multigrid algorithm + + * ``-pc_mg_type`` The type of multigrid to use. Usually, multiplicative is the fastest. + + * ``-pc_mg_cycle_type`` Use V- or W-cycle with ``-pc_mg_type`` ``multiplicative`` + ``PCGAMG`` provides unsmoothed aggregation (``-pc_gamg_agg_nsmooths 0``) and smoothed aggregation (``-pc_gamg_agg_nsmooths 1`` or -``PCGAMGSetNSmooths(pc,1)``). Smoothed aggregation (SA) is recommended +``PCGAMGSetNSmooths(pc,1)``). Smoothed aggregation (SA), :cite:`vanek1996algebraic`, :cite:`vanek2001convergence`, is recommended for symmetric positive definite systems. Unsmoothed aggregation can be -useful for asymmetric problems and problems where highest eigen -estimates are problematic. If poor convergence rates are observed using -the smoothed version one can test unsmoothed aggregation. +useful for asymmetric problems and problems where the highest eigenestimates are problematic. If poor convergence rates are observed using +the smoothed version, one can test unsmoothed aggregation. **Eigenvalue estimates:** The parameters for the KSP eigen estimator, used for SA, can be set with ``-pc_gamg_esteig_ksp_max_it`` and -``-pc_gamg_esteig_ksp_type``. For example CG generally converges to the -highest eigenvalue fast than GMRES (the default for KSP) if your problem +``-pc_gamg_esteig_ksp_type``. For example, CG generally converges to the +highest eigenvalue faster than GMRES (the default for KSP) if your problem is symmetric positive definite. One can specify CG with ``-pc_gamg_esteig_ksp_type cg``. The default for ``-pc_gamg_esteig_ksp_max_it`` is 10, which we have found is pretty safe with a (default) safety factor of 1.1. One can specify the range of real -eigenvalues, in the same way that one can for Chebyshev KSP solvers +eigenvalues in the same way as with Chebyshev KSP solvers (smoothers), with ``-pc_gamg_eigenvalues ``. GAMG sets the MG smoother type to chebyshev by default. By default, GAMG uses its eigen estimate, if it has one, for Chebyshev smoothers if the smoother uses Jacobi preconditioning. This can be overridden with ``-pc_gamg_use_sa_esteig ``. -AMG methods requires knowledge of the number of degrees of freedom per -vertex, the default is one (a scalar problem). Vector problems like +AMG methods require knowledge of the number of degrees of freedom per +vertex; the default is one (a scalar problem). Vector problems like elasticity should set the block size of the matrix appropriately with ``-mat_block_size bs`` or ``MatSetBlockSize(mat,bs)``. Equations must be ordered in “vertex-major” ordering (e.g., @@ -1132,59 +1217,59 @@ ordered in “vertex-major” ordering (e.g., **Near null space:** Smoothed aggregation requires an explicit representation of the (near) null space of the operator for optimal performance. One can provide an orthonormal set of null space vectors -with ``MatSetNearNullSpace()``. The vector of all ones is the default, +with ``MatSetNearNullSpace()``. The vector of all ones is the default for each variable given by the block size (e.g., the translational rigid body modes). For elasticity, where rotational rigid body modes are -required to complete the near null space you can use +required to complete the near null-space you can use ``MatNullSpaceCreateRigidBody()`` to create the null space vectors and then ``MatSetNearNullSpace()``. **Coarse grid data model:** The GAMG framework provides for reducing the number of active processes on coarse grids to reduce communication costs when there is not enough parallelism to keep relative communication -costs down. Most AMG solver reduce to just one active process on the +costs down. Most AMG solvers reduce to just one active process on the coarsest grid (the PETSc MG framework also supports redundantly solving -the coarse grid on all processes to potentially reduce communication -costs), although this forcing to one process can be overridden if one +the coarse grid on all processes to reduce communication +costs potentially). However, this forcing to one process can be overridden if one wishes to use a parallel coarse grid solver. GAMG generalizes this by -reducing the active number of processes on other coarse grids as well. +reducing the active number of processes on other coarse grids. GAMG will select the number of active processors by fitting the desired -number of equation per process (set with +number of equations per process (set with ``-pc_gamg_process_eq_limit <50>,``) at each level given that size of each level. If :math:`P_i < P` processors are desired on a level -:math:`i` then the first :math:`P_i` processes are populated with the grid +:math:`i`, then the first :math:`P_i` processes are populated with the grid and the remaining are empty on that grid. One can, and probably should, repartition the coarse grids with ``-pc_gamg_repartition ``, otherwise an integer process reduction factor (:math:`q`) is selected and the equations on the first :math:`q` processes are moved to process -0, and so on. As mentioned multigrid generally coarsens the problem -until it is small enough to be solved with an exact solver (eg, LU or -SVD) in a relatively small time. GAMG will stop coarsening when the -number of equation on a grid falls below at threshold give by +0, and so on. As mentioned, multigrid generally coarsens the problem +until it is small enough to be solved with an exact solver (e.g., LU or +SVD) in a relatively short time. GAMG will stop coarsening when the +number of the equation on a grid falls below the threshold given by ``-pc_gamg_coarse_eq_limit <50>,``. **Coarse grid parameters:** There are several options to provide parameters to the coarsening algorithm and parallel data layout. Run a -code that uses ``PCGAMG`` with ``-help`` to get full listing of GAMG -parameters with short parameter descriptions. The rate of coarsening is +code using ``PCGAMG`` with ``-help`` to get a full listing of GAMG +parameters with short descriptions. The rate of coarsening is critical in AMG performance – too slow coarsening will result in an overly expensive solver per iteration and too fast coarsening will result in decrease in the convergence rate. ``-pc_gamg_threshold <-1>`` and ``-pc_gamg_aggressive_coarsening `` are the primary parameters that control coarsening rates, which is very important for AMG performance. A greedy maximal independent set (MIS) algorithm is used in coarsening. -Squaring the graph implements so called MIS-2, the root vertex in an -aggregate is more than two edges away from another root vertex, instead +Squaring the graph implements MIS-2; the root vertex in an +aggregate is more than two edges away from another root vertex instead of more than one in MIS. The threshold parameter sets a normalized threshold for which edges are removed from the MIS graph, thereby coarsening slower. Zero will keep all non-zero edges, a negative number -will keep zero edges, a positive number will drop small edges. Typical +will keep zero edges, and a positive number will drop small edges. Typical finite threshold values are in the range of :math:`0.01 - 0.05`. There are additional parameters for changing the weights on coarse grids. -The parallel MIS algorithms requires symmetric weights/matrix. Thus ``PCGAMG`` +The parallel MIS algorithms require symmetric weights/matrices. Thus ``PCGAMG`` will automatically make the graph symmetric if it is not symmetric. Since this -has additional cost users should indicate the symmetry of the matrices they +has additional cost, users should indicate the symmetry of the matrices they provide by calling .. code-block:: @@ -1197,7 +1282,7 @@ or MatSetOption(mat,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE (or PETSC_FALSE)). -If they know that the matrix will always have symmetry, despite future changes +If they know that the matrix will always have symmetry despite future changes to the matrix (with, for example, ``MatSetValues()``) then they should also call .. code-block:: @@ -1210,11 +1295,11 @@ or MatSetOption(mat,MAT_STRUCTURAL_SYMMETRY_ETERNAL,PETSC_TRUE (or PETSC_FALSE)). -Using this information allows the algorithm to skip the unnecessary computations. +Using this information allows the algorithm to skip unnecessary computations. -**Trouble shooting algebraic multigrid methods:** If ``PCGAMG``, *ML*, *AMGx* or -*hypre* does not perform well the first thing to try is one of the other -methods. Often the default parameters or just the strengths of different +**Troubleshooting algebraic multigrid methods:** If ``PCGAMG``, *ML*, *AMGx* or +*hypre* does not perform well; the first thing to try is one of the other +methods. Often, the default parameters or just the strengths of different algorithms can fix performance problems or provide useful information to guide further debugging. There are several sources of poor performance of AMG solvers and often special purpose methods must be developed to @@ -1223,8 +1308,8 @@ performance degradation that may not be fixed with parameters in PETSc currently: non-elliptic operators, curl/curl operators, highly stretched grids or highly anisotropic problems, large jumps in material coefficients with complex geometry (AMG is particularly well suited to -jumps in coefficients but it is not a perfect solution), highly -incompressible elasticity, not to mention ill-posed problems, and many +jumps in coefficients, but it is not a perfect solution), highly +incompressible elasticity, not to mention ill-posed problems and many others. For Grad-Div and Curl-Curl operators, you may want to try the Auxiliary-space Maxwell Solver (AMS, ``-pc_type hypre -pc_hypre_type ams``) or the Auxiliary-space Divergence @@ -1237,9 +1322,9 @@ operator, which can be set using ``PCHYPRESetDiscreteCurl()``. **I am converging slowly, what do I do?** AMG methods are sensitive to coarsening rates and methods; for GAMG use ``-pc_gamg_threshold `` -or ``PCGAMGSetThreshold()`` to regulate coarsening rates, higher values decrease +or ``PCGAMGSetThreshold()`` to regulate coarsening rates; higher values decrease coarsening rate. Squaring the graph is the second mechanism for -increasing coarsening rate. Use ``-pc_gamg_aggressive_coarsening ``, or +increasing the coarsening rate. Use ``-pc_gamg_aggressive_coarsening ``, or ``PCGAMGSetAggressiveLevels(pc,N)``, to aggressive ly coarsen (MIS-2) the graph on the finest N levels. A high threshold (e.g., :math:`x=0.08`) will result in an expensive but potentially powerful preconditioner, and a low threshold @@ -1248,60 +1333,60 @@ cheaper solves, and generally worse convergence rates. One can run with ``-info :pc`` and grep for ``PCGAMG`` to get statistics on each level, which can be used to see if you are coarsening at an -appropriate rate. With smoothed aggregation you generally want to coarse -at about a rate of 3:1 in each dimension. Coarsening too slow will +appropriate rate. With smoothed aggregation, you generally want to coarse +at about a rate of 3:1 in each dimension. Coarsening too slowly will result in large numbers of non-zeros per row on coarse grids (this is reported). The number of non-zeros can go up very high, say about 300 -(times the degrees-of-freedom per vertex) on a 3D hex mesh. One can also +(times the degrees of freedom per vertex) on a 3D hex mesh. One can also look at the grid complexity, which is also reported (the ratio of the total number of matrix entries for all levels to the number of matrix entries on the fine level). Grid complexity should be well under 2.0 and preferably around :math:`1.3` or lower. If convergence is poor and the Galerkin coarse grid construction is much smaller than the time for each -solve then one can safely decrease the coarsening rate. +solve, one can safely decrease the coarsening rate. ``-pc_gamg_threshold`` :math:`-1.0` is the simplest and most robust -option, and is recommended if poor convergence rates are observed, at -least until the source of the problem is discovered. In conclusion, if -convergence is slow then decreasing the coarsening rate (increasing the -threshold) should be tried. +option and is recommended if poor convergence rates are observed, at +least until the source of the problem is discovered. In conclusion, decreasing the coarsening rate (increasing the +threshold) should be tried if convergence is slow. **A note on Chebyshev smoothers.** Chebyshev solvers are attractive as multigrid smoothers because they can target a specific interval of the -spectrum which is the purpose of a smoother. The spectral bounds for +spectrum, which is the purpose of a smoother. The spectral bounds for Chebyshev solvers are simple to compute because they rely on the highest eigenvalue of your (diagonally preconditioned) operator, which is conceptually simple to compute. However, if this highest eigenvalue -estimate is not accurate (too low) then the solvers can fail with and +estimate is not accurate (too low), the solvers can fail with an indefinite preconditioner message. One can run with ``-info`` and grep for ``PCGAMG`` to get these estimates or use ``-ksp_view``. These highest eigenvalues are generally between 1.5-3.0. For symmetric positive -definite systems CG is a better eigenvalue estimator -``-mg_levels_esteig_ksp_type cg``. Indefinite matrix messages are often -caused by bad Eigen estimates. Explicitly damped Jacobi or Krylov -smoothers can provide an alternative to Chebyshev and *hypre* has +definite systems, CG is a better eigenvalue estimator +``-mg_levels_esteig_ksp_type cg``. Bad Eigen estimates often cause indefinite matrix messages. Explicitly damped Jacobi or Krylov +smoothers can provide an alternative to Chebyshev, and *hypre* has alternative smoothers. -**Now am I solving alright, can I expect better?** If you find that you +**Now, am I solving alright? Can I expect better?** If you find that you are getting nearly one digit in reduction of the residual per iteration and are using a modest number of point smoothing steps (e.g., 1-4 iterations of SOR), then you may be fairly close to textbook multigrid -efficiency. Although you also need to check the setup costs. This can be +efficiency. However, you also need to check the setup costs. This can be determined by running with ``-log_view`` and check that the time for the Galerkin coarse grid construction (``MatPtAP()``) is not (much) more than the time spent in each solve (``KSPSolve()``). If the ``MatPtAP()`` time is -too large then one can increase the coarsening rate by decreasing the +too large, then one can increase the coarsening rate by decreasing the threshold and using aggressive coarsening (``-pc_gamg_aggressive_coarsening ``, squares the graph on the finest N -levels). Likewise if your ``MatPtAP()`` time is small and your convergence -rate is not ideal then you could decrease the coarsening rate. +levels). Likewise, if your ``MatPtAP()`` time is short and your convergence +If the rate is not ideal, you could decrease the coarsening rate. -PETSc’s AMG solver is constructed as a framework for developers to -easily add AMG capabilities, like a new AMG methods or an AMG component +PETSc’s AMG solver is a framework for developers to +easily add AMG capabilities, like new AMG methods or an AMG component like a matrix triple product. Contact us directly if you are interested in contributing. -It is possible but not recommended to use algebraic multigrid as a "standalone" solver, that is not accelerating it with a Krylov method. Use a ``KSPType`` of ``KSPRICHARDSON`` -(or equivalently `-ksp_type richardson`) to achieve this. Using ``KSPPREONLY`` will not work since it only applies a single cycle of multigrid. +Using algebraic multigrid as a "standalone" solver is possible but not recommended, as it does not accelerate it with a Krylov method. +Use a ``KSPType`` of ``KSPRICHARDSON`` +(or equivalently `-ksp_type richardson`) to achieve this. Using ``KSPPREONLY`` will not work since it only applies a single multigrid cycle. + Adaptive Interpolation `````````````````````` @@ -1854,18 +1939,16 @@ the incomplete factorization. .. _sec_block_matrices: -Solving Block Matrices -~~~~~~~~~~~~~~~~~~~~~~ +Solving Block Matrices with PCFIELDSPLIT +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Block matrices represent an important class of problems in numerical linear algebra and offer the possibility of far more efficient iterative -solvers than just treating the entire matrix as black box. In this -section we use the common linear algebra definition of block matrices -where matrices are divided in a small, problem-size independent (two, -three or so) number of very large blocks. These blocks arise naturally -from the underlying physics or discretization of the problem, for -example, the velocity and pressure. Under a certain numbering of -unknowns the matrix can be written as +solvers than just treating the entire matrix as a black box. In this +section, we use the common linear algebra definition of block matrices, where matrices are divided into a small, problem-size independent (two, +three, or so) number of very large blocks. These blocks arise naturally +from the underlying physics or discretization of the problem, such as the velocity and pressure. Under a certain numbering of +unknowns, the matrix can be written as .. math:: @@ -1876,10 +1959,9 @@ unknowns the matrix can be written as A_{30} & A_{31} & A_{32} & A_{33} \\ \end{array} \right), -where each :math:`A_{ij}` is an entire block. On a parallel computer the -matrices are not explicitly stored this way. Instead, each process will -own some of the rows of :math:`A_{0*}`, :math:`A_{1*}` etc. On a -process, the blocks may be stored one block followed by another +where each :math:`A_{ij}` is an entire block. The matrices on a parallel computer are not explicitly stored this way. Instead, each process will +own some rows of :math:`A_{0*}`, :math:`A_{1*}` etc. On a +process, the blocks may be stored in one block followed by another .. math:: @@ -1893,7 +1975,7 @@ process, the blocks may be stored one block followed by another ... \\ \end{array} \right) -or interlaced, for example with two blocks +or interlaced, for example, with two blocks .. math:: @@ -1906,35 +1988,110 @@ or interlaced, for example with two blocks ... \end{array} \right). -Note that for interlaced storage the number of rows/columns of each +Note that for interlaced storage, the number of rows/columns of each block must be the same size. Matrices obtained with ``DMCreateMatrix()`` where the ``DM`` is a ``DMDA`` are always stored interlaced. Block -matrices can also be stored using the ``MATNEST`` format which holds +matrices can also be stored using the ``MATNEST`` format, which holds separate assembled blocks. Each of these nested matrices is itself distributed in parallel. It is more efficient to use ``MATNEST`` with the methods described in this section because there are fewer copies and -better formats (e.g. ``MATBAIJ`` or ``MATSBAIJ``) can be used for the +better formats (e.g., ``MATBAIJ`` or ``MATSBAIJ``) can be used for the components, but it is not possible to use many other methods with ``MATNEST``. See :any:`sec_matnest` for more on assembling block matrices without depending on a specific matrix format. -The PETSc ``PCFIELDSPLIT`` preconditioner is used to implement the -“block” solvers in PETSc. There are three ways to provide the +The PETSc ``PCFIELDSPLIT`` preconditioner implements the +“block” solvers in PETSc, :cite:`elman2008tcp`. There are three ways to provide the information that defines the blocks. If the matrices are stored as interlaced then ``PCFieldSplitSetFields()`` can be called repeatedly to indicate which fields belong to each block. More generally ``PCFieldSplitSetIS()`` can be used to indicate exactly which -rows/columns of the matrix belong to a particular block. You can provide -names for each block with these routines, if you do not provide names -they are numbered from 0. With these two approaches the blocks may -overlap (though generally they will not). If only one block is defined +rows/columns of the matrix belong to a particular block (field). You can provide +names for each block with these routines; if you do not, they are numbered from 0. With these two approaches, the blocks may +overlap (though they generally will not overlap). If only one block is defined, then the complement of the matrices is used to define the other block. -Finally the option ``-pc_fieldsplit_detect_saddle_point`` causes two +Finally, the option ``-pc_fieldsplit_detect_saddle_point`` causes two diagonal blocks to be found, one associated with all rows/columns that have zeros on the diagonals and the rest. -For simplicity in the rest of the section we restrict our matrices to -two-by-two blocks. So the matrix is +**Important parameters for ``PCFIELDSPLIT``** + +- Control the fields used + + - ``-pc_fieldsplit_detect_saddle_point`` Generate two fields, the first consists of all rows with a nonzero on the diagonal, and the second will be all rows + with zero on the diagonal. See ``PCFieldSplitSetDetectSaddlePoint()``. + + - ``-pc_fieldsplit_dm_splits`` Use the ``DM`` attached to the preconditioner to determine the fields. See ``PCFieldSplitSetDMSplits()`` and + ``DMCreateFieldDecomposition()``. + + - ``-pc_fieldsplit_%d_fields`` Use f1, f2, .. to define field `d`. The `fn` are in the range of 0, ..., bs-1 where bs is the block size + of the matrix or set with ``PCFieldSplitSetBlockSize()``. See ``PCFieldSplitSetFields()``. + + - ``-pc_fieldsplit_default`` Automatically add any fields needed that have not been supplied explicitly by ``-pc_fieldsplit_%d_fields``. + + - ``DMFieldsplitSetIS()`` Provide the ``IS`` that defines a particular field. + +- Control the type of the block preconditioner + + - ``-pc_fieldsplit_type`` The order in which the field solves are applied. + For symmetric problems where ``KSPCG`` is used ``symmetric_multiplicative`` must be used instead of ``multiplicative``. ``additive`` is the least expensive + to apply but provides the worst convergence. ``schur`` requires either a good preconditioner for the Schur complement or a naturally well-conditioned + Schur complement, but when it works well can be extremely effective. See ``PCFieldSplitSetType()``. ``gkb`` is for symmetric saddle-point problems (the lower-right + the block is zero). + + - ``-pc_fieldsplit_diag_use_amat`` Use the first matrix that is passed to ``KSPSetJacobian()`` to construct the block-diagonal sub-matrices used in the algorithms, + by default, the second matrix is used. + + - Options for Schur preconditioner: ``-pc_fieldsplit_type`` + ``schur`` + + - ``-pc_fieldsplit_schur_fact_type`` See ``PCFieldSplitSetSchurFactType()``. ``full`` reduces the iterations but each iteration requires additional + field solves. + + - ``-pc_fieldsplit_schur_precondition`` How the Schur complement is preconditioned. See ``PCFieldSplitSetSchurPre()``. + + - ``-fieldsplit_1_mat_schur_complement_ainv_type`` Use the lumped diagonal of :math:`A_{00}` when ``-pc_fieldsplit_schur_precondition`` + ``selfp`` is used. + + - ``-pc_fieldsplit_schur_scale`` Controls the sign flip of S for ``-pc_fieldsplit_schur_fact_type`` ``diag``. + See ``PCFieldSplitSetSchurScale()`` + + - ``fieldsplit_1_xxx`` controls the solver for the Schur complement system. + If a ``DM`` provided the fields, use the second field name set in the ``DM`` instead of 1. + + - ``-fieldsplit_1_pc_type`` ``lsc`` ``-fieldsplit_1_lsc_pc_xxx`` use + the least squares commutators :cite:`elmanhowleshadidshuttleworthtuminaro2006` :cite:`silvester2001efficient` + preconditioner for the Schur complement with any preconditioner for the least-squares matrix, see ``PCLSC``. + If a ``DM`` provided the fields, use the second field name set in the ``DM`` instead of 1. + + - ``-fieldsplit_upper_xxx`` Set options for the solver in the upper solver when ``-pc_fieldsplit_schur_fact_type`` + ``upper`` or ``full`` is used. Defaults to + using the solver as provided with ``-fieldsplit_0_xxx``. + + - ``-fieldsplit_1_inner_xxx`` Set the options for the solver inside the application of the Schur complement; + defaults to using the solver as provided with ``-fieldsplit_0_xxx``. If a ``DM`` provides the fields use the name of the second field name set in the ``DM`` instead of 1. + + - Options for GKB preconditioner: ``-pc_fieldsplit_type`` gkb + + - ``-pc_fieldsplit_gkb_tol`` See ``PCFieldSplitSetGKBTol()``. + + - ``-pc_fieldsplit_gkb_delay`` See ``PCFieldSplitSetGKBDelay()``. + + - ``-pc_fieldsplit_gkb_nu`` See ``PCFieldSplitSetGKBNu()``. + + - ``-pc_fieldsplit_gkb_maxit`` See ``PCFieldSplitSetGKBMaxit()``. + + - ``-pc_fieldsplit_gkb_monitor`` Monitor the convergence of the inner solver. + +- Options for additive and multiplication field solvers: + + - ``-fieldsplit_%d_xxx`` Set options for the solver for field number `d`. For example, ``-fieldsplit_0_pc_type`` + ``jacobi``. When the fields are obtained from a ``DM`` use the + field name instead of `d`. + + + +For simplicity, we restrict our matrices to two-by-two blocks in the rest of the section. So the matrix is .. math:: @@ -1943,7 +2100,7 @@ two-by-two blocks. So the matrix is A_{10} & A_{11} \\ \end{array} \right). -On occasion the user may provide another matrix that is used to +On occasion, the user may provide another matrix that is used to construct parts of the preconditioner .. math:: @@ -1954,10 +2111,10 @@ construct parts of the preconditioner \end{array} \right). For notational simplicity define :math:`\text{ksp}(A,Ap)` to mean -approximately solving a linear system using ``KSP`` with operator +approximately solving a linear system using ``KSP`` with the operator :math:`A` and preconditioner built from matrix :math:`Ap`. -For matrices defined with any number of blocks there are three “block” +For matrices defined with any number of blocks, there are three “block” algorithms available: block Jacobi, .. math:: @@ -2049,7 +2206,7 @@ internal KSPs are given by ``-fieldsplit_name_``. By default blocks :math:`A_{00}, A_{01}` and so on are extracted out of ``Pmat``, the matrix that the ``KSP`` uses to build the preconditioner, -and not out of ``Amat`` (i.e., :math:`A` itself). As discussed above in +and not out of ``Amat`` (i.e., :math:`A` itself). As discussed above, in :any:`sec_combining-pcs`, however, it is possible to use ``Amat`` instead of ``Pmat`` by calling ``PCSetUseAmat(pc)`` or using ``-pc_use_amat`` on the command line. @@ -2061,7 +2218,7 @@ argument ``-pc_fieldsplit_diag_use_amat``. Similarly, ``-pc_fieldsplit_off_diag_use_amat`` will cause the off-diagonal blocks :math:`A_{01},A_{10}` etc. to be extracted out of ``Amat``. -For two-by-two blocks only, there is another family of solvers, based on +For two-by-two blocks only, there is another family of solvers based on Schur complements. The inverse of the Schur complement factorization is .. math:: @@ -2134,7 +2291,7 @@ Schur complements. The inverse of the Schur complement factorization is 0 & I \\ \end{array} \right). -The preconditioner is accessed with ``-pc_fieldsplit_type schur`` and is +The preconditioner is accessed with ``-pc_fieldsplit_type`` ``schur`` and is implemented as .. math:: @@ -2164,7 +2321,7 @@ Where the approximate Schur complement. There are several variants of the Schur complement preconditioner -obtained by dropping some of the terms, these can be obtained with +obtained by dropping some of the terms; these can be obtained with ``-pc_fieldsplit_schur_fact_type `` or the function ``PCFieldSplitSetSchurFactType()``. Note that the ``diag`` form uses the preconditioner @@ -2177,19 +2334,19 @@ uses the preconditioner \end{array} \right). This is done to ensure the preconditioner is positive definite for a -common class of problems, saddle points with a positive definite -:math:`A_{00}`: for these the Schur complement is negative definite. +a common class of problems, saddle points with a positive definite +:math:`A_{00}`: for these, the Schur complement is negative definite. The effectiveness of the Schur complement preconditioner depends on the availability of a good preconditioner :math:`\hat Sp` for the Schur complement matrix. In general, you are responsible for supplying :math:`\hat Sp` via ``PCFieldSplitSetSchurPre(pc,PC_FIELDSPLIT_SCHUR_PRE_USER,Sp)``. -In the absence of a good problem-specific :math:`\hat Sp`, you can use -some of the built-in options. +Without a good problem-specific :math:`\hat Sp`, you can use +some built-in options. Using ``-pc_fieldsplit_schur_precondition user`` on the command line -activates the matrix supplied programmatically as explained above. +activates the matrix supplied programmatically, as explained above. With ``-pc_fieldsplit_schur_precondition a11`` (default) :math:`\hat Sp = A_{11}` is used to build a preconditioner for @@ -2200,10 +2357,10 @@ Otherwise, ``-pc_fieldsplit_schur_precondition self`` will set build the preconditioner. The problem with the last approach is that :math:`\hat S` is used in -unassembled, matrix-free form, and many preconditioners (e.g., ILU) +the unassembled, matrix-free form, and many preconditioners (e.g., ILU) cannot be built out of such matrices. Instead, you can *assemble* an approximation to :math:`\hat S` by inverting :math:`A_{00}`, but only -approximately, so as to ensure the sparsity of :math:`\hat Sp` as much +approximately, to ensure the sparsity of :math:`\hat Sp` as much as possible. Specifically, using ``-pc_fieldsplit_schur_precondition selfp`` will assemble :math:`\hat Sp = A_{11} - A_{10} \text{inv}(A_{00}) A_{01}`. @@ -2224,10 +2381,10 @@ This uses for the preconditioner to :math:`\hat{S}` the operator .. math:: \text{ksp}(A_{10} A_{01},A_{10} A_{01}) A_{10} A_{00} A_{01} \text{ksp}(A_{10} A_{01},A_{10} A_{01}) -which, of course, introduces two additional inner solves for each +Which, of course, introduces two additional inner solves for each application of the Schur complement. The options prefix for this inner ``KSP`` is ``-fieldsplit_1_lsc_``. Instead of constructing the matrix -:math:`A_{10} A_{01}` the user can provide their own matrix. This is +:math:`A_{10} A_{01}`, users can provide their own matrix. This is done by attaching the matrix/matrices to the :math:`Sp` matrix they provide with diff --git a/doc/manual/mat.rst b/doc/manual/mat.rst index e62fb3d5b77..27772a6135e 100644 --- a/doc/manual/mat.rst +++ b/doc/manual/mat.rst @@ -154,6 +154,12 @@ informs the user that all rows from ``first_row`` to ``last_row-1`` (since the value returned in ``last_row`` is one more than the global index of the last local row) will be stored on the local process. +If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`. +If the `Mat` was created directly, the range values are determined by the local sizes passed to `MatSetSizes()` or `MatCreateAIJ()` (and such low-level functions for other `MatType`). +If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`. +For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine +the local values in the matrix. See :any:`sec_matlayout` for full details on row and column layouts. + In the sparse matrix implementations, once the assembly routines have been called, the matrices are compressed and can be used for matrix-vector multiplication, etc. Any space for preallocated nonzeros @@ -569,10 +575,22 @@ applications based on a fixed number of stored update vectors. - ``MATLMVMDFP`` - ``lmvmdfp`` - SPD + * - Dense Davidon-Fletcher-Powell (DFP) :cite:`keyprefix-ErwayMarcia2017` + - ``MATLMVMDDFP`` + - ``lmvmddfp`` + - SPD * - Broyden-Fletcher-Goldfarb-Shanno (BFGS) :cite:`keyprefix-nocedal2006numerical` - ``MATLMVMBFGS`` - ``lmvmbfgs`` - SPD + * - Dense Broyden-Fletcher-Goldfarb-Shanno (BFGS) :cite:`keyprefix-ErwayMarcia2017` + - ``MATLMVMDBFGS`` + - ``lmvmdbfgs`` + - SPD + * - Dense Quasi-Newton + - ``MATLMVMDQN`` + - ``lmvmdqn`` + - SPD * - Restricted Broyden Family :cite:`keyprefix-erway2017solving` - ``MATLMVMSymBrdn`` - ``lmvmsymbrdn`` @@ -614,8 +632,9 @@ LMVM matrices can be applied to vectors in forward mode via ``MatSolve()``. They also support ``MatCreateVecs()``, ``MatDuplicate()`` and ``MatCopy()`` operations. -Restricted Broyden Family, DFP and BFGS methods additionally implement -special Jacobian initialization and scaling options available via +Restricted Broyden Family, DFP and BFGS methods, including their dense +versions, additionally implement special Jacobian initialization and +scaling options available via ``-mat_lmvm_scale_type ``. We describe these choices below: @@ -654,6 +673,24 @@ choices below: results in a significant reduction in the number of function evaluations taken to compute a solution. +The dense implementations are numerically equivalent to DFP and BFGS, +but they try to minimize memory transfer at the cost of storage +:cite:`keyprefix-ErwayMarcia2017`. Generally, dense formulations of DFP +and BFGS, ``MATLMVMDDFP`` and ``MATLMVMDBFGS``, should be faster than +classical recursive versions - on both CPU and GPU. It should be noted +that ``MatMult`` of dense BFGS, and ``MatSolve`` of dense DFP requires +Cholesky factorization, which may be numerically unstable, if a Jacobian +option other than ``none`` is used. Therefore, the default +implementation is to enable classical recursive algorithms to avoid +the Cholesky factorization. This option can be toggled via +``-mat_lbfgs_recursive`` and ``-mat_ldfp_recursive``. + +Dense Quasi-Newton, ``MATLMVMDQN`` is an implementation that uses +``MatSolve`` of ``MATLMVMDBFGS`` for its ``MatSolve``, and uses +``MatMult`` of ``MATLMVMDDFP`` for its ``MatMult``. It can be +seen as a hybrid implementation to avoid both recursive implementation +and Cholesky factorization, trading numerical accuracy for performances. + Note that the user-provided initial Jacobian via ``MatLMVMSetJ0()`` overrides and disables all built-in initialization methods. diff --git a/doc/manual/snes.rst b/doc/manual/snes.rst index 3d417965c10..65cd8b9ba1e 100644 --- a/doc/manual/snes.rst +++ b/doc/manual/snes.rst @@ -86,7 +86,8 @@ follows: SNESCreate(MPI_Comm comm,SNES *snes); -The user must then set routines for evaluating the residual function :math:numref:`fx0` and its associated Jacobian matrix, as +The user must then set routines for evaluating the residual function :math:numref:`fx0` +and, *possibly*, its associated Jacobian matrix, as discussed in the following sections. To choose a nonlinear solution method, the user can either call @@ -172,7 +173,11 @@ The arguments of the routine ``FormJacobian()`` are the current iterate, ``x``; the (approximate) Jacobian matrix, ``Amat``; the matrix from which the preconditioner is constructed, ``Pmat`` (which is usually the same as ``Amat``); and an optional user-defined Jacobian context, -``ctx``, for application-specific data. Note that the ``SNES`` solvers +``ctx``, for application-specific data. The ``FormJacobian()`` +callback is only invoked if the solver requires it, always +*after* ``FormFunction()`` has been called at the current iterate. + +Note that the ``SNES`` solvers are all data-structure neutral, so the full range of PETSc matrix formats (including “matrix-free” methods) can be used. :any:`ch_matrices` discusses information regarding diff --git a/doc/manual/ts.rst b/doc/manual/ts.rst index 63f73e4a3aa..d23e629d16a 100644 --- a/doc/manual/ts.rst +++ b/doc/manual/ts.rst @@ -1060,7 +1060,7 @@ Discretized finite element problems often have the form :math:`M \dot u = G(t, u Such problems can be solved using ``DMTSSetIFunction()`` with implicit integrators. When :math:`M` is nonsingular (i.e., the problem is an ODE, not a DAE), explicit integrators can be applied to :math:`\dot u = M^{-1} G(t, u)` or :math:`\dot u = \hat M^{-1} G(t, u)`, where :math:`\hat M` is the lumped mass matrix. While the true mass matrix generally has a dense inverse and thus must be solved iteratively, the lumped mass matrix is diagonal (e.g., computed via collocated quadrature or row sums of :math:`M`). -To have PETSc create and apply a (lumped) mass matrix automatically, first use ``DMTSSetRHSFunction()` to specify :math:`G` and set a ``PetscFE` using ``DMAddField()`` and ``DMCreateDS()``, then call either ``DMTSCreateRHSMassMatrix()`` or ``DMTSCreateRHSMassMatrixLumped()`` to automatically create the mass matrix and a ``KSP`` that will be used to apply :math:`M^{-1}`. +To have PETSc create and apply a (lumped) mass matrix automatically, first use ``DMTSSetRHSFunction()`` to specify :math:`G` and set a ``PetscFE`` using ``DMAddField()`` and ``DMCreateDS()``, then call either ``DMTSCreateRHSMassMatrix()`` or ``DMTSCreateRHSMassMatrixLumped()`` to automatically create the mass matrix and a ``KSP`` that will be used to apply :math:`M^{-1}`. This ``KSP`` can be customized using the ``"mass_"`` prefix. .. _section_sa: diff --git a/doc/manual/vec.rst b/doc/manual/vec.rst index d6dfb3a5bda..fd698f3c956 100644 --- a/doc/manual/vec.rst +++ b/doc/manual/vec.rst @@ -470,6 +470,18 @@ process, while ``end`` specifies *one more than* the last owned by the local process. This command is useful, for instance, in assembling parallel vectors. +If the `Vec` was obtained from a `DM` with `DMCreateGlobalVector()`, then the range values are determined by the specific `DM`. +If the `Vec` was created directly, the range values are determined by the local size passed to `VecSetSizes()` or `VecCreateMPI()`. +If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`. +For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine +the local values in the vector. + +Very occasionally, all MPI processes need to know all the range values, these can be obtained with + +.. code-block:: + + VecGetOwnershipRanges(Vec vec,PetscInt range[]); + The number of elements stored locally can be accessed with .. code-block:: diff --git a/doc/overview/index.rst b/doc/overview/index.rst index c0263a28a1d..6d3bdd5ad99 100644 --- a/doc/overview/index.rst +++ b/doc/overview/index.rst @@ -21,6 +21,7 @@ managing parallel PDE discretizations including parallel matrix and vector assem integrator_table tao_solve_table discrete_table + plex_transform_table ../manual/index ../manualpages/index ../changes/index diff --git a/doc/overview/plex_transform_table.rst b/doc/overview/plex_transform_table.rst new file mode 100644 index 00000000000..fc63a9af4d0 --- /dev/null +++ b/doc/overview/plex_transform_table.rst @@ -0,0 +1,55 @@ +.. _plex_transform_table: + +============================================ +Summary of Unstructured Mesh Transformations +============================================ + +.. list-table:: + :widths: auto + :align: center + :header-rows: 1 + + * - + - ``DMPlexTransformType`` + - Accepts Active Label + - Description + * - Mesh filtering + - transform_filter + - Yes + - Preserve a subset of the mesh marked by a `DMLabel` + * - Regular Refinement + - refine_regular + - No + - Splits all $k$-cells into $2^k$ pieces + * - Alfeld Refinement + - refine_alfeld + - No + - Barycentric refinement for simplicies + * - Skeleton-based Refinement (SBR) + - refine_sbr + - Yes + - Simplicial refinement from Plaza and Carey + * - 1D Refinement + - refine_1d + - No + - Optimized refinement for 1D meshes that preserves the canonical ordering + * - Simplex-to-Box transform + - refine_tobox + - No + - Replaces each simplex cell with $2^d$ box cells + * - Box-to-Simplex transform + - refine_tosimplex + - No + - Replaces each box cell with simplex cells + * - Mesh extrusion + - extrude + - Yes + - Extrude $n$ layers of cells from a surface + * - Boundary Layer Extrusion + - refine_boundary_layer + - Yes + - Creates $n$ layers of tensor cells along marked boundaries + * - Cohesive cell extrusion + - cohesive_extrude + - Yes + - Extrude a layer of cells into a mesh from an internal surface diff --git a/doc/overview/previous_release_docs.rst b/doc/overview/previous_release_docs.rst index b9e5d7252c2..121f6079c8c 100644 --- a/doc/overview/previous_release_docs.rst +++ b/doc/overview/previous_release_docs.rst @@ -6,6 +6,7 @@ Documentation for previous PETSc releases .. toctree:: :maxdepth: 1 + 3.20 3.19 3.18 3.17 diff --git a/doc/petsc.bib b/doc/petsc.bib index fc466296ca2..77afb9953d3 100644 --- a/doc/petsc.bib +++ b/doc/petsc.bib @@ -1058,6 +1058,17 @@ @Article{ mills2021 Rupp and Barry F. Smith and Stefano Zampini and Hong Zhang and Junchao Zhang} } +@Article{ mills2024, + title = {{PETSc/TAO} Developments for Early Exascale Systems}, + author = {Richard Tran Mills and Mark F. Adams and Satish Balay and Jed Brown and + Jacob Faibussowitsch and Tobin Isaac and Matthew G. Knepley and Todd Munson and + Hansul Suh and Stefano Zampini and Hong Zhang and Junchao Zhang}, + journal = {International Journal of High Performance Computing Applications}, + url = {http://arxiv.org/abs/2401.05868}, + note = {Submitted}, + year = {2024} +} + @Article{ rupp:2016:pis:2988256.2907944, author = {Karl Rupp and Josef Weinbub and Ansgar J\"{u}ngel and Tibor Grasser}, title = {Pipelined Iterative Solvers with Kernel Fusion for Graphics Processing Units}, @@ -2955,7 +2966,7 @@ @Article{ isaacknepley2017 year = {2017}, note = {\url{http://arxiv.org/abs/1508.02470}}, petsc_uses = {DMPlex} -}} +} @Article{ knepleylangegorman2017, title = {Unstructured Overlapping Mesh Distribution in Parallel}, @@ -3022,21 +3033,202 @@ @Article{ farrellknepleywechsungmitchell2020 volume = {47}, number = {3}, pages = {1--22}, + issn = {0098-3500}, + doi = {10.1145/3445791}, + publisher = {Association for Computing Machinery}, + issue_date = {September 2021}, year = {2021}, petsc_uses = {KSP,DMPlex} } +@Article{ laakmann2022augmented, + title = {An augmented Lagrangian preconditioner for the magnetohydrodynamics equations at high Reynolds and coupling numbers}, + author = {Fabian Laakmann and Patrick E Farrell and Lawrence Mitchell}, + journal = {SIAM Journal on Scientific Computing}, + volume = {44}, + number = {4}, + pages = {B1018--B1044}, + year = {2022} +} + +@Article{ papadopoulosfarrellsurowiec2021, + title = {Computing multiple solutions of topology optimization problems}, + author = {Papadopoulos, Ioannis PA and Farrell, Patrick E and Surowiec, Thomas M}, + journal = {SIAM Journal on Scientific Computing}, + volume = {43}, + number = {3}, + pages = {A1555--A1582}, + year = {2021} +} + +@Article{ jiangzhanghuschneiderzorinpanozzo2021, + title = {Bijective and coarse high-order tetrahedral meshes}, + author = {Jiang, Zhongshi and Zhang, Ziyi and Hu, Yixin and Schneider, Teseo and Zorin, Denis and Panozzo, Daniele}, + journal = {ACM Transactions on Graphics (TOG)}, + volume = {40}, + number = {4}, + pages = {1--16}, + year = {2021} +} + +@Article{ abumaclachlanfarrell2023, + title = {Monolithic multigrid for implicit {Runge--Kutta} discretizations of incompressible fluid flow}, + author = {Abu-Labdeh, Razan and MacLachlan, Scott and Farrell, Patrick E}, + journal = {Journal of Computational Physics}, + volume = {478}, + pages = {111961}, + year = {2023} +} + +@Article{ farrellkirbymarchena2021, + title = {{Irksome}: Automating {Runge--Kutta} time-stepping for finite element methods}, + author = {Farrell, Patrick E and Kirby, Robert C and Marchena-Menendez, Jorge}, + journal = {ACM Transactions on Mathematical Software (TOMS)}, + volume = {47}, + number = {4}, + pages = {1--26}, + year = {2021} +} + +@Article{ farrellgazca2020, + title = {An augmented Lagrangian preconditioner for implicitly constituted non-Newtonian incompressible flow}, + author = {Farrell, Patrick E and Gazca-Orozco, Pablo Alexei}, + journal = {SIAM Journal on Scientific Computing}, + volume = {42}, + number = {6}, + pages = {B1329--B1349}, + year = {2020} +} + @Article{ farrellmitchellscottwechsung2022, - title = {Robust multigrid methods for nearly incompressible elasticity using macro - elements}, - author = {Patrick E Farrell and Lawrence Mitchell and L Ridgway Scott and Florian - Wechsung}, + title = {Robust multigrid methods for nearly incompressible elasticity using macro elements}, + author = {Farrell, Patrick E and Mitchell, Lawrence and Scott, L Ridgway and Wechsung, Florian}, journal = {IMA Journal of Numerical Analysis}, - issn = {0272-4979}, - doi = {10.1093/imanum/drab083}, - url = {https://doi.org/10.1093/imanum/drab083}, - year = {2022}, - petsc_uses = {KSP,DMPlex} + volume = {42}, + number = {4}, + pages = {3306--3329}, + year = {2022} +} + +@Article{ shihstadlerwechsung2022, + title = {Robust multigrid techniques for augmented Lagrangian preconditioning of incompressible Stokes equations with extreme viscosity variations}, + author = {Shih, Yu-hsuan and Stadler, Georg and Wechsung, Florian}, + journal = {SIAM Journal on Scientific Computing}, + volume = {45}, + number = {3}, + pages = {S27--S53}, + year = {2022} +} + +@Article{ adlerhehumaclachlanohm2022, + title = {Monolithic multigrid for a reduced-quadrature discretization of poroelasticity}, + author = {Adler, James H and He, Yunhui and Hu, Xiaozhe and MacLachlan, Scott and Ohm, Peter}, + journal = {SIAM Journal on Scientific Computing}, + volume = {45}, + number = {3}, + pages = {S54--S81}, + year = {2022} +} + +@Article{ xiafarrellwechsung2021, + title = {Augmented Lagrangian preconditioners for the Oseen--Frank model of nematic and cholesteric liquid crystals}, + author = {Xia, Jingmin and Farrell, Patrick E and Wechsung, Florian}, + journal = {BIT Numerical Mathematics}, + volume = {61}, + pages = {607--644}, + year = {2021} +} + +@Article{ budisahukuchtamardalzikatanov2023, + title = {Algebraic multigrid methods for metric-perturbed coupled problems}, + author = {Budisa, Ana and Hu, Xiaozhe and Kuchta, Miroslav and Mardal, Kent-Andre and Zikatanov, Ludmil Tomov}, + journal = {arXiv preprint arXiv:2305.06073}, + year = {2023} +} + +@Article{ laakmannhufarrell2023, + title = {Structure-preserving and helicity-conserving finite element approximations and preconditioning for the Hall MHD equations}, + author = {Laakmann, Fabian and Hu, Kaibo and Farrell, Patrick E}, + journal = {Journal of Computational Physics}, + volume = {492}, + pages = {112410}, + year = {2023} +} + +@Article{ hongkrauskuchtalymberymardalrognes2022, + title = {Robust approximation of generalized Biot-Brinkman problems}, + author = {Hong, Qingguo and Kraus, Johannes and Kuchta, Miroslav and Lymbery, Maria and Mardal, Kent-Andr{\'e} and Rognes, Marie E}, + journal = {Journal of Scientific Computing}, + volume = {93}, + number = {3}, + pages = {77}, + year = {2022} +} + +@Article{ boonkuchtamardalruiz2021, + title = {Robust preconditioners for perturbed saddle-point problems and conservative discretizations of {Biot's} equations utilizing total pressure}, + author = {Boon, Wietse M and Kuchta, Miroslav and Mardal, Kent-Andre and Ruiz-Baier, Ricardo}, + journal = {SIAM Journal on Scientific Computing}, + volume = {43}, + number = {4}, + pages = {B961--B983}, + year = {2021} +} + +@Article{ papadopoulosfarrell2023, + title = {Preconditioners for computing multiple solutions in three-dimensional fluid topology optimization}, + author = {Papadopoulos, Ioannis PA and Farrell, Patrick E}, + journal = {SIAM Journal on Scientific Computing}, + volume = {45}, + number = {6}, + pages = {B853--B883}, + year = {2023} +} + +@Article{ farrellhamdanmaclachlan2022, + title = {A new mixed finite-element method for H2 elliptic problems}, + author = {Farrell, Patrick E and Hamdan, Abdalaziz and MacLachlan, Scott P}, + journal = {Computers \& Mathematics with Applications}, + volume = {128}, + pages = {300--319}, + year = {2022} +} + +@Article{ harpertuminaro2023, + title = {Compression and Reduced Representation Techniques for Patch-Based Relaxation}, + author = {Harper, Graham and Tuminaro, Ray}, + journal = {arXiv preprint arXiv:2306.10025}, + year = {2023} +} + +@Article{ kirbykernell2021, + title = {Preconditioning mixed finite elements for tide models}, + author = {Kirby, Robert C and Kernell, Tate}, + journal = {Computers \& Mathematics with Applications}, + volume = {82}, + pages = {212--227}, + year = {2021} +} + +@Book{ xia2023, + title = {Computing and Analysing Energy Minimisation Problems in Liquid Crystals: Implementation Using Firedrake}, + author = {Xia, Jingmin}, + year = {2023}, + publisher = {World Scientific} +} + +@PhDThesis{ leveque2022, + title = {Preconditioned iterative methods for optimal control problems with time-dependent PDEs as constraints}, + author = {Leveque, Santolo}, + school = {The University of Edinburgh}, + year = {2022} +} + +@PhDThesis{ brubeck2022, + title = {Optimal-complexity and robust multigrid methods for high-order FEM}, + author = {Pablo D. Brubeck}, + school = {Oxford University}, + year = {2022} } @Misc{ firedrakeproject, @@ -8320,7 +8512,7 @@ @Misc{ petsc-web-page title = {{PETS}c {W}eb page}, url = {https://petsc.org/}, howpublished = {\url{https://petsc.org/}}, - year = {2023} + year = {2024} } @Unpublished{ petsc-debian-package, @@ -8817,9 +9009,9 @@ @TechReport{ petsc-user-ref Zhang}, title = {{PETSc/TAO} Users Manual}, institution = {Argonne National Laboratory}, - number = {ANL-21/39 - Revision 3.20}, + number = {ANL-21/39 - Revision 3.21}, doi = {10.2172/2205494}, - year = {2023} + year = {2024} } % url = {https://petsc.org/release} @@ -11187,7 +11379,19 @@ @Article{ rang_2005 number = {4}, pages = {761--787}, volume = {45}, - publisher = {Springer} + publisher = {Springer}, + doi = {10.1007/s10543-005-0035-y} +} + +@article{rang2015improved, + title = {Improved traditional {R}osenbrock--{W}anner methods for stiff {ODE}s and {DAE}s}, + author = {Rang, Joachim}, + journal = {Journal of Computational and Applied Mathematics}, + volume = {286}, + pages = {128--144}, + year = {2015}, + publisher = {Elsevier}, + doi = {10.1016/j.cam.2015.03.010} } @Article{ gottliebketchesonshu2009, @@ -22564,6 +22768,15 @@ @Unpublished{ awanoufabienguzmanstern2020 year = {2020} } +@Article{ fabienguzmanneilanzytoon2022, + title = {Low-order divergence-free approximations for the Stokes problem on Worsey--Farin and Powell--Sabin splits}, + author = {Maurice Fabien and Johnny Guzm{\'a}n and Michael Neilan and Ahmed Zytoon}, + journal = {Computer Methods in Applied Mechanics and Engineering}, + volume = {390}, + pages = {114444}, + year = {2022}, +} + @Article{ chaabanegiraultrivierethompson2018, title = {A stable enriched {G}alerkin element for the {S}tokes problem}, author = {Nabil Chaabane and Vivette Girault and B\'eatrice Rivi\`ere and Travis Thompson}, @@ -22632,6 +22845,14 @@ @Article{ walkerknepleyaagaardwilliams2023 year = {2023} } +@Article{ bhavsar2024influence, + title = {Influence of initial slab dip, inter-plate coupling, and nonlinear rheology on dynamic weakening at the lithosphere-asthenosphere boundary}, + author = {Vivek Bhavsar and Margarete A. Jadamec and Matthew G. Knepley}, + journal = {Journal of Geophysical Research: Solid Earth}, + note = {Submitted}, + year = {2024}, +} + @InProceedings{ actorfuentesriviere2020b, title = {Identification of Kernels in a Convolutional Neural Network: Connections Between Level Set Equation and Deep Learning for Image Segmentation}, @@ -38988,11 +39209,12 @@ @InProceedings{ nathawaniknepley2022b } @Article{ nathawaniknepley2023, - title = {One-dimensional model to simulate shear-induced droplet formation}, + title = {A one-dimensional mathematical model for shear-induced droplet formation in co-flowing fluids}, author = {Darsh K. Nathawani and Matthew G. Knepley}, - journal = {International Journal of Multiphase Flow}, - note = {Submitted}, - year = {2023} + journal = {Theoretical and Computational Fluid Dynamics}, + url = {https://rdcu.be/dFtlR}, + doi = {10.1007/s00162-024-00690-5}, + year = {2024} } @InProceedings{ georgalisnathawaniknepleypatra2023, @@ -39050,6 +39272,16 @@ @Article{ finnknepleypusztayadams2023 year = {2023} } +@Article{ hamhaplaknepleymitchellsagiyama2024, + title = {Efficient N-to-M Checkpointing Algorithm for Finite Element Simulations}, + author = {David A. Ham and Vaclav Hapla and Matthew G. Knepley and Lawrence Mitchell and Koki Sagiyama}, + journal = {SIAM Journal on Scientific Computing}, + note = {Under review}, + url = {https://arXiv.org/abs/2401.05868}, + year = {2024}, + petsc_uses = {DMPlex} +} + @Article{ eggersdupont1994, title = {Drop formation in a one-dimensional approximation of the {Navier--Stokes} equation}, @@ -39104,7 +39336,7 @@ @article{MckayEriksonKozdon2019 year = {2019}, } -article{EricksonOReillyNordstrom2019, +@article{EricksonOReillyNordstrom2019, title = {Accuracy of stable, high-order finite difference methods for hyperbolic systems with non-smooth wave speeds}, author = {Brittany A Erickson and Ossian O’Reilly and Jan Nordstr{\"o}m}, journal = {Journal of Scientific Computing}, @@ -39170,3 +39402,13 @@ @article{SEAS2023 pages = {499--523}, year = {2023}, } + +@article{ErwayMarcia2017, + title = {On solving large-scale limited-memory quasi-{N}ewton equations}, + author = {Erway, Jennifer B and Marcia, Roummel F}, + journal = {Linear Algebra and its Applications}, + volume = {515}, + pages = {196--225}, + year = {2017}, + publisher = {Elsevier}, +} diff --git a/doc/requirements.txt b/doc/requirements.txt index 26fbc3116fb..1142527a91e 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -11,3 +11,4 @@ Jinja2==3.1.3 # docutils 0.19.1 produces bad HTML from the references in PCFIELDSPLIT docutils==0.17.1 numpy +wheel diff --git a/gmakefile b/gmakefile index 6af12062605..623135d4e81 100644 --- a/gmakefile +++ b/gmakefile @@ -296,7 +296,7 @@ gmakeinfo: -@echo "-----------------------------------------" -@echo "Using configure Options: ${CONFIGURE_OPTIONS}" -@echo "Using configuration flags:" - -@grep "\#define " ${PETSCCONF_H} | tail -n +2 + -@grep "#define " ${PETSCCONF_H} | tail -n +2 -@echo "-----------------------------------------" -@echo "Using C compile: ${PETSC_CCOMPILE_SINGLE}" -@if [ "${MPICC_SHOW}" != "" ]; then \ diff --git a/gmakefile.test b/gmakefile.test index 4d93b5b0f7c..18739b6bcfd 100644 --- a/gmakefile.test +++ b/gmakefile.test @@ -297,7 +297,11 @@ $(foreach pkg, $(pkgs), $(call concattestlang,$(pkg),F F90)) : $(libpetscall) # Testing convenience targets .PHONY: test pre-clean -test: report_tests +# The Fortran compiler may use out-dated module files in the tutorial and test directories so delete them all +remove-test-mod-files: + -@find src -name "*.mod" -not -path "src/binding/*" -delete + +test: remove-test-mod-files report_tests pre-clean: @$(RM) -rf $(TESTDIR)/counts $(TESTLOGTAPFILE) $(TESTLOGERRFILE) diff --git a/include/petsc/finclude/petscao.h b/include/petsc/finclude/petscao.h index 3943524c8dd..482f6c95e02 100644 --- a/include/petsc/finclude/petscao.h +++ b/include/petsc/finclude/petscao.h @@ -7,7 +7,7 @@ #include "petsc/finclude/petscis.h" -#define AO PetscFortranAddr +#define AO type(tPetscAO) #define AOType character*(80) #define AOBASIC 'basic' diff --git a/include/petsc/finclude/petscdm.h b/include/petsc/finclude/petscdm.h index 2e182c39e0b..24c9389a8fb 100644 --- a/include/petsc/finclude/petscdm.h +++ b/include/petsc/finclude/petscdm.h @@ -7,6 +7,7 @@ #include "petsc/finclude/petscis.h" #include "petsc/finclude/petscvec.h" #include "petsc/finclude/petscmat.h" +#include "petsc/finclude/petscdt.h" #define DMType character*(80) #define DMBoundaryType PetscEnum @@ -24,18 +25,17 @@ #define DMReorderDefaultFlag PetscEnum #define DM type(tDM) - -#define DMAdaptor PetscFortranAddr -#define PetscQuadrature PetscFortranAddr -#define PetscWeakForm PetscFortranAddr -#define PetscDS PetscFortranAddr -#define PetscFE PetscFortranAddr -#define PetscSpace PetscFortranAddr -#define PetscDualSpace PetscFortranAddr -#define PetscFV PetscFortranAddr -#define PetscLimiter PetscFortranAddr -#define PetscPartitioner PetscFortranAddr -#define DMField PetscFortranAddr +#define DMAdaptor type(tDMAdaptor) +#define PetscQuadrature type(tPetscQuadrature) +#define PetscWeakForm type(tPetscWeakForm) +#define PetscDS type(tPetscDS) +#define PetscFE type(tPetscFE) +#define PetscSpace type(tPetscSpace) +#define PetscDualSpace type(tPetscDualSpace) +#define PetscFV type(tPetscFV) +#define PetscLimiter type(tPetscLimiter) +#define PetscPartitioner type(tPetscPartitioner) +#define DMField type(tDMField) #define DMDA 'da' #define DMCOMPOSITE 'composite' diff --git a/include/petsc/finclude/petscdmda.h b/include/petsc/finclude/petscdmda.h index 2a9493f63b5..b94c3abc521 100644 --- a/include/petsc/finclude/petscdmda.h +++ b/include/petsc/finclude/petscdmda.h @@ -45,6 +45,4 @@ #define DMDAInterpolationType PetscEnum #define DMDAElementType PetscEnum -#define PetscGLL PetscFortranAddr - #endif diff --git a/include/petsc/finclude/petscdraw.h b/include/petsc/finclude/petscdraw.h index 2547508f6d1..299a4c02c3e 100644 --- a/include/petsc/finclude/petscdraw.h +++ b/include/petsc/finclude/petscdraw.h @@ -6,16 +6,17 @@ #if !defined (PETSCDRAWDEF_H) #define PETSCDRAWDEF_H -#define PetscDraw PetscFortranAddr -#define PetscDrawLG PetscFortranAddr -#define PetscDrawAxis PetscFortranAddr -#define PetscDrawSP PetscFortranAddr -#define PetscDrawHG PetscFortranAddr -#define PetscDrawMesh PetscFortranAddr +#define PetscDraw type(tPetscDraw) +#define PetscDrawLG type(tPetscDrawLG) +#define PetscDrawAxis type(tPetscDrawAxis) +#define PetscDrawSP type(tPetscDrawSP) +#define PetscDrawHG type(tPetscDrawHG) +#define PetscDrawMesh type(tPetscDrawMesh) +#define PetscDrawBar type(tPetscDrawBar) #define PetscDrawButton PetscEnum #define PetscDrawType character*(80) #define PetscDrawMarkerType PetscEnum -#define PetscDrawBar PetscFortranAddr + ! ! types of draw context ! diff --git a/include/petsc/finclude/petscdt.h b/include/petsc/finclude/petscdt.h new file mode 100644 index 00000000000..572d44826e9 --- /dev/null +++ b/include/petsc/finclude/petscdt.h @@ -0,0 +1,10 @@ +! +! Include file for Fortran use of the DT package in PETSc +! +#if !defined (PETSCDTDEF_H) +#define PETSCDTDEF_H + +#define PetscDTNodeType PetscEnum +#define PetscGaussLobattoLegendreCreateType PetscEnum + +#endif diff --git a/include/petsc/finclude/petscis.h b/include/petsc/finclude/petscis.h index a3ca143b3db..606d3018d55 100644 --- a/include/petsc/finclude/petscis.h +++ b/include/petsc/finclude/petscis.h @@ -19,11 +19,12 @@ #define ISType character*(80) #define ISInfo PetscEnum #define ISInfoType PetscEnum -#define ISLocalToGlobalMapping PetscFortranAddr #define ISGlobalToLocalType character*(80) #define ISGlobalToLocalMappingMode PetscEnum #define ISColoringType PetscEnum +#define ISLocalToGlobalMapping type(tISLocalToGlobalMapping) + #define ISColoringValue PETSC_IS_COLORING_VALUE_TYPE_F #define ISGENERAL 'general' diff --git a/include/petsc/finclude/petscksp.h b/include/petsc/finclude/petscksp.h index f5634f77a8d..e3a3b9899f2 100644 --- a/include/petsc/finclude/petscksp.h +++ b/include/petsc/finclude/petscksp.h @@ -19,6 +19,7 @@ #define KSPGMRESCGSRefinementType PetscEnum #define MatSchurComplementAinvType PetscEnum #define MatLMVMSymBroydenScaleType PetscEnum +#define MatLMVMDenseType PetscEnum #define KSPHPDDMType PetscEnum #define KSPChebyshevKind PetscEnum diff --git a/include/petsc/finclude/petscmat.h b/include/petsc/finclude/petscmat.h index 55752a6335a..f5580a05651 100644 --- a/include/petsc/finclude/petscmat.h +++ b/include/petsc/finclude/petscmat.h @@ -12,10 +12,11 @@ #define Mat type(tMat) #define MatNullSpace type(tMatNullSpace) #define MatFDColoring type(tMatFDColoring) +#define MatColoring type(tMatColoring) +#define MatTransposeColoring type(tMatTransposeColoring) +#define MatPartitioning type(tMatPartitioning) +#define MatCoarsen type(tMatCoarsen) -#define MatColoring PetscFortranAddr -#define MatPartitioning PetscFortranAddr -#define MatCoarsen PetscFortranAddr #define MatAIJIndices PetscFortranAddr #define MatType character*(80) #define MatSolverType character*(80) @@ -164,7 +165,10 @@ #define MATDUMMY 'dummy' #define MATLMVM 'lmvm' #define MATLMVMDFP 'lmvmdfp' +#define MATLMVMDDFP 'lmvmddfp' #define MATLMVMBFGS 'lmvmbfgs' +#define MATLMVMDBFGS 'lmvmdbfgs' +#define MATLMVMDQN 'lmvmdqn' #define MATLMVMSR1 'lmvmsr1' #define MATLMVMBROYDEN 'lmvmbroyden' #define MATLMVMBADBROYDEN 'lmvmbadbroyden' diff --git a/include/petsc/finclude/petscpc.h b/include/petsc/finclude/petscpc.h index 6d152e4563d..395b73f9253 100644 --- a/include/petsc/finclude/petscpc.h +++ b/include/petsc/finclude/petscpc.h @@ -1,5 +1,4 @@ ! -! ! Include file for Fortran use of the PC (preconditioner) package in PETSc ! #if !defined (PETSCPCDEF_H) @@ -10,13 +9,17 @@ #define PC type(tPC) +#define PCType character*(80) #define PCSide PetscEnum #define PCJacobiType PetscEnum #define PCASMType PetscEnum #define PCGASMType PetscEnum +#define PCMGType PetscEnum +#define PCMGCycleType PetscEnum +#define PCMGGalerkinType PetscEnum +#define PCMGCoarseSpaceType PetscEnum #define PCCompositeType PetscEnum #define PCRichardsonConvergedReason PetscEnum -#define PCType character*(80) #define PCFieldSplitSchurPreType PetscEnum #define PCPARMSGlobalType PetscEnum #define PCPARMSLocalType PetscEnum @@ -26,6 +29,10 @@ #define PCGAMGClassicalType character*(80) #define PCGAMGLayoutType PetscEnum #define PCHPDDMCoarseCorrectionType PetscEnum +#define PCExoticType PetscEnum +#define PCDeflationSpaceType PetscEnum +#define PCBDDCInterfaceExtType PetscEnum +#define PCFailedReason PetscEnum ! ! GAMG types ! @@ -37,7 +44,6 @@ ! #define PCGAMGCLASSICALDIRECT 'direct' #define PCGAMGCLASSICALSTANDARD 'standard' - ! ! Various preconditioners ! @@ -94,12 +100,4 @@ #define PCH2OPUS 'h2opus' #define PCMPI 'mpi' -#define PCMGType PetscEnum -#define PCMGCycleType PetscEnum -#define PCMGGalerkinType PetscEnum -#define PCExoticType PetscEnum -#define PCDeflationSpaceType PetscEnum -#define PCBDDCInterfaceExtType PetscEnum -#define PCHPDDMCoarseCorrectionType PetscEnum -#define PCFailedReason PetscEnum #endif diff --git a/include/petsc/finclude/petscsnes.h b/include/petsc/finclude/petscsnes.h index bdc10f7a6c8..e73665f428a 100644 --- a/include/petsc/finclude/petscsnes.h +++ b/include/petsc/finclude/petscsnes.h @@ -7,7 +7,8 @@ #include "petsc/finclude/petscksp.h" #define SNES type(tSNES) - +#define SNESLineSearch type(tSNESLineSearch) +#define MatMFFD type(tMatMFFD) #define PetscConvEst type(tPetscConvEst) #define SNESType character*(80) @@ -15,9 +16,7 @@ #define SNESConvergedReason PetscEnum #define SNESLineSearchReason PetscEnum #define SNESLineSearchType character*(80) -#define MatMFFD PetscFortranAddr -#define MatMFFDType PetscFortranAddr -#define SNESLineSearch PetscFortranAddr +#define MatMFFDType character*(80) #define SNESLineSearchOrder PetscEnum #define SNESNormSchedule PetscEnum #define SNESQNType PetscEnum @@ -29,6 +28,8 @@ #define SNESNGMRESSelectType PetscEnum #define SNESNewtonTRFallbackType PetscEnum #define SNESNewtonTRQNType PetscEnum +#define SNESCompositeType PetscEnum +#define SNESFunctionType PetscEnum ! ! SNESType diff --git a/include/petsc/finclude/petscsys.h b/include/petsc/finclude/petscsys.h index 71199588a7a..27e29225bea 100644 --- a/include/petsc/finclude/petscsys.h +++ b/include/petsc/finclude/petscsys.h @@ -82,6 +82,10 @@ #define PetscLogStage PetscFortranInt #define PetscVoid PetscFortranAddr ! +#define PetscDeviceType PetscEnum +#define PetscDevice type(tPetscDevice) +#define PetscDeviceContext type(tPetscDeviceContext) +! #define PetscCopyMode PetscEnum ! #define PetscDataType PetscEnum @@ -163,9 +167,11 @@ ! that are not used in the numerical computations, but rather in logging, ! timing etc. ! -#define PetscObject PetscFortranAddr +#define PetscObject type(tPetscObject) #define PetscLogDouble PetscFortranDouble ! +#define PetscObjectIsNull(obj) (obj%v == 0 .or. obj%v == -2 .or. obj%v == -3) +! ! Macros for error checking ! #define SETERRQ(c, ierr, s) call PetscError(c, ierr, 0, s); return @@ -189,7 +195,7 @@ #define PetscCheckA(err, c, ierr, s) if (.not.(err)) then; SETERRA(c, ierr, s); endif #define PetscCheck(err, c, ierr, s) if (.not.(err)) then; SETERRQ(c, ierr, s); endif -#define PetscMatlabEngine PetscFortranAddr +#define PetscMatlabEngine type(tPetscMatlabEngine) #if !defined(PetscFlush) #if defined(PETSC_HAVE_FORTRAN_FLUSH) @@ -217,4 +223,5 @@ #define PetscFunctionList PetscFortranAddr #define PetscInfoCommFlag PetscEnum + #endif diff --git a/include/petsc/finclude/petsctao.h b/include/petsc/finclude/petsctao.h index bb7c523aca4..db7d7ba131b 100644 --- a/include/petsc/finclude/petsctao.h +++ b/include/petsc/finclude/petsctao.h @@ -4,7 +4,7 @@ #include "petsc/finclude/petscts.h" #define Tao type(tTao) -#define TaoLineSearch PetscFortranAddr +#define TaoLineSearch type(tTaoLineSearch) #define TaoConvergedReason PetscEnum #define TaoType character*(80) #define TaoLineSearchType character*(80) @@ -12,6 +12,7 @@ #define TaoADMMRegularizerType PetscEnum #define TaoALMMType PetscEnum #define TaoBNCGType PetscEnum +#define TaoSubsetType PetscEnum #define TAOLMVM 'lmvm' #define TAONLS 'nls' diff --git a/include/petsc/finclude/petscts.h b/include/petsc/finclude/petscts.h index 655d608d604..7c5bf5148f8 100644 --- a/include/petsc/finclude/petscts.h +++ b/include/petsc/finclude/petscts.h @@ -9,6 +9,7 @@ #define TS type(tTS) #define TSAdapt type(tTSAdapt) #define TSTrajectory type(tTSTrajectory) +#define TSGLLEAdapt type(tTSGLLEAdapt) #define TSType character*(80) #define TSAdaptType character*(80) @@ -20,6 +21,7 @@ #define TSProblemType PetscEnum #define TSSundialsGramSchmidtType PetscEnum #define TSSundialsLmmType PetscEnum +#define TSTrajectoryMemoryType PetscEnum #define TSEULER 'euler' #define TSBEULER 'beuler' diff --git a/include/petsc/private/cpp/object_pool.hpp b/include/petsc/private/cpp/object_pool.hpp index b33e613970c..a15efec151d 100644 --- a/include/petsc/private/cpp/object_pool.hpp +++ b/include/petsc/private/cpp/object_pool.hpp @@ -251,23 +251,29 @@ class ConstructorInterface { PetscErrorCode destroy(value_type *ptr) const noexcept { + const Derived &underlying = this->underlying(); + PetscFunctionBegin; - PetscCall(this->underlying().destroy_(ptr)); + PetscCall(underlying.destroy_(ptr)); PetscFunctionReturn(PETSC_SUCCESS); } template PetscErrorCode reset(value_type *val, Args &&...args) const noexcept { + const Derived &underlying = this->underlying(); + PetscFunctionBegin; - PetscCall(this->underlying().reset_(val, std::forward(args)...)); + PetscCall(underlying.reset_(val, std::forward(args)...)); PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode invalidate(value_type *ptr) const noexcept { + const Derived &underlying = this->underlying(); + PetscFunctionBegin; - PetscCall(this->underlying().invalidate_(ptr)); + PetscCall(underlying.invalidate_(ptr)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/include/petsc/private/cupmatomics.hpp b/include/petsc/private/cupmatomics.hpp index 26c8145e1bd..ccaa2344351 100644 --- a/include/petsc/private/cupmatomics.hpp +++ b/include/petsc/private/cupmatomics.hpp @@ -493,7 +493,7 @@ __device__ static double atomicExch(double *address, double val) } #endif -__device__ static llint atomicExch(llint *address, llint val) +__device__ static inline llint atomicExch(llint *address, llint val) { return (llint)(atomicExch((ullint *)address, (ullint)val)); } @@ -534,7 +534,7 @@ struct AtomicInsert { Atomic add operations */ -__device__ static llint atomicAdd(llint *address, llint val) +__device__ static inline llint atomicAdd(llint *address, llint val) { return (llint)atomicAdd((ullint *)address, (ullint)val); } @@ -583,7 +583,7 @@ struct AtomicAdd { HIP has no atomicMult at all, so we build our own with atomicCAS */ #if defined(PETSC_USE_REAL_DOUBLE) -__device__ static double atomicMult(double *address, double val) +__device__ static inline double atomicMult(double *address, double val) { ullint *address_as_ull = (ullint *)(address); ullint old = *address_as_ull, assumed; @@ -607,7 +607,7 @@ __device__ static float atomicMult(float *address, float val) } #endif -__device__ static int atomicMult(int *address, int val) +__device__ static inline int atomicMult(int *address, int val) { int *address_as_int = (int *)(address); int old = *address_as_int, assumed; @@ -618,7 +618,7 @@ __device__ static int atomicMult(int *address, int val) return (int)old; } -__device__ static llint atomicMult(llint *address, llint val) +__device__ static inline llint atomicMult(llint *address, llint val) { ullint *address_as_ull = (ullint *)(address); ullint old = *address_as_ull, assumed; @@ -725,7 +725,7 @@ struct AtomicMax { As of ROCm 3.10, the llint atomicAnd/Or/Xor(llint*, llint) is not supported */ -__device__ static llint atomicAnd(llint *address, llint val) +__device__ static inline llint atomicAnd(llint *address, llint val) { ullint *address_as_ull = (ullint *)(address); ullint old = *address_as_ull, assumed; @@ -735,7 +735,7 @@ __device__ static llint atomicAnd(llint *address, llint val) } while (assumed != old); return (llint)old; } -__device__ static llint atomicOr(llint *address, llint val) +__device__ static inline llint atomicOr(llint *address, llint val) { ullint *address_as_ull = (ullint *)(address); ullint old = *address_as_ull, assumed; @@ -746,7 +746,7 @@ __device__ static llint atomicOr(llint *address, llint val) return (llint)old; } -__device__ static llint atomicXor(llint *address, llint val) +__device__ static inline llint atomicXor(llint *address, llint val) { ullint *address_as_ull = (ullint *)(address); ullint old = *address_as_ull, assumed; diff --git a/include/petsc/private/cupmblasinterface.hpp b/include/petsc/private/cupmblasinterface.hpp index e8a92894455..60dc46f2f37 100644 --- a/include/petsc/private/cupmblasinterface.hpp +++ b/include/petsc/private/cupmblasinterface.hpp @@ -231,7 +231,7 @@ class cupmBlasHandleWrapper { #define PETSC_CUPMBLAS_FP_INPUT_TYPE PETSC_CUPMBLAS_FP_INPUT_TYPE_U #define PETSC_CUPMBLAS_FP_RETURN_TYPE PETSC_CUPMBLAS_FP_RETURN_TYPE_L template <> -struct BlasInterfaceImpl : Interface { +struct PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL BlasInterfaceImpl : Interface { // typedefs using cupmBlasHandle_t = cupmBlasHandleWrapper; using cupmBlasError_t = cublasStatus_t; @@ -262,6 +262,7 @@ struct BlasInterfaceImpl : Interface { // level 1 BLAS PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, axpy) + PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, copy) PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, scal) PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION_EXACT(STANDARD, dot, PetscIfPetscDefined(USE_COMPLEX, dotc, dot)) PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION_EXACT(STANDARD, dotu, PetscIfPetscDefined(USE_COMPLEX, dotu, dot)) @@ -272,6 +273,8 @@ struct BlasInterfaceImpl : Interface { // level 2 BLAS PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, gemv) + PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, trmv) + PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, trsv) // level 3 BLAS PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, gemm) @@ -296,7 +299,7 @@ struct BlasInterfaceImpl : Interface { #define PETSC_CUPMBLAS_FP_INPUT_TYPE PETSC_CUPMBLAS_FP_INPUT_TYPE_U #define PETSC_CUPMBLAS_FP_RETURN_TYPE PETSC_CUPMBLAS_FP_RETURN_TYPE_L template <> -struct BlasInterfaceImpl : Interface { +struct PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL BlasInterfaceImpl : Interface { // typedefs using cupmBlasHandle_t = cupmBlasHandleWrapper; using cupmBlasError_t = hipblasStatus_t; @@ -327,6 +330,7 @@ struct BlasInterfaceImpl : Interface { // level 1 BLAS PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, axpy) + PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, copy) PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, scal) PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION_EXACT(STANDARD, dot, PetscIfPetscDefined(USE_COMPLEX, dotc, dot)) PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION_EXACT(STANDARD, dotu, PetscIfPetscDefined(USE_COMPLEX, dotu, dot)) @@ -337,6 +341,8 @@ struct BlasInterfaceImpl : Interface { // level 2 BLAS PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, gemv) + PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, trmv) + PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, trsv) // level 3 BLAS PETSC_CUPMBLAS_ALIAS_BLAS_FUNCTION(STANDARD, gemm) @@ -385,6 +391,7 @@ struct BlasInterfaceImpl : Interface { using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasSetPointerMode; \ /* level 1 BLAS */ \ using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXaxpy; \ + using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXcopy; \ using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXscal; \ using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXdot; \ using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXdotu; \ @@ -394,6 +401,8 @@ struct BlasInterfaceImpl : Interface { using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXasum; \ /* level 2 BLAS */ \ using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXgemv; \ + using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXtrmv; \ + using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXtrsv; \ /* level 3 BLAS */ \ using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXgemm; \ using ::Petsc::device::cupm::impl::BlasInterfaceImpl::cupmBlasXtrsm; \ diff --git a/include/petsc/private/cupminterface.hpp b/include/petsc/private/cupminterface.hpp index fc573e2559e..a9b13fcfac7 100644 --- a/include/petsc/private/cupminterface.hpp +++ b/include/petsc/private/cupminterface.hpp @@ -549,6 +549,12 @@ struct Interface : InterfaceImpl { #define PETSC_PKG_CUDA_VERSION_GE(...) 0 #define CUPM_DEFINED_PETSC_PKG_CUDA_VERSION_GE #endif + +#if !defined(PETSC_PKG_HIP_VERSION_LT) + #define PETSC_PKG_HIP_VERSION_LT(...) 0 + #define CUPM_DEFINED_PETSC_PKG_HIP_VERSION_LT +#endif + static PetscErrorCode PetscCUPMGetMemType(const void *data, PetscMemType *type, PetscBool *registered = nullptr, PetscBool *managed = nullptr) noexcept { cupmPointerAttributes_t attr; @@ -569,14 +575,14 @@ struct Interface : InterfaceImpl { cerr = cupmPointerGetAttributes(&attr, data); cerr = cupmGetLastError(); // HIP seems to always have used memoryType though -#if (defined(CUDART_VERSION) && (CUDART_VERSION < 10000)) || defined(__HIP_PLATFORM_HCC__) +#if (defined(CUDART_VERSION) && (CUDART_VERSION < 10000)) || (defined(__HIP_PLATFORM_HCC__) && PETSC_PKG_HIP_VERSION_LT(5, 5, 0)) const auto mtype = attr.memoryType; if (managed) *managed = static_cast((cerr == cupmSuccess) && attr.isManaged); #else if (PETSC_PKG_CUDA_VERSION_GE(11, 0, 0) && (T == DeviceType::CUDA)) PetscCallCUPM(cerr); const auto mtype = attr.type; if (managed) *managed = static_cast(mtype == cupmMemoryTypeManaged); -#endif // CUDART_VERSION && CUDART_VERSION < 10000 || __HIP_PLATFORM_HCC__ +#endif // CUDART_VERSION && CUDART_VERSION < 10000 || (defined(__HIP_PLATFORM_HCC__) && PETSC_PKG_HIP_VERSION_LT(5, 5, 0)) if (type) *type = ((cerr == cupmSuccess) && (mtype == cupmMemoryTypeDevice)) ? PETSC_MEMTYPE_CUPM() : PETSC_MEMTYPE_HOST; if (registered && (cerr == cupmSuccess) && (mtype == cupmMemoryTypeHost)) *registered = PETSC_TRUE; PetscFunctionReturn(PETSC_SUCCESS); @@ -584,6 +590,9 @@ struct Interface : InterfaceImpl { #if defined(CUPM_DEFINED_PETSC_PKG_CUDA_VERSION_GE) #undef PETSC_PKG_CUDA_VERSION_GE #endif +#if defined(CUPM_DEFINED_PETSC_PKG_HIP_VERSION_LT) + #undef PETSC_PKG_HIP_VERSION_LT +#endif PETSC_NODISCARD static PETSC_CONSTEXPR_14 cupmMemcpyKind_t PetscDeviceCopyModeToCUPMMemcpyKind(PetscDeviceCopyMode mode) noexcept { diff --git a/include/petsc/private/cupmobject.hpp b/include/petsc/private/cupmobject.hpp index 8feee31b054..49276033595 100644 --- a/include/petsc/private/cupmobject.hpp +++ b/include/petsc/private/cupmobject.hpp @@ -39,7 +39,7 @@ namespace // regular versions would be an enormous pain to square with the templated types... // ========================================================================================== template -class UseCUPMHostAllocGuard : Interface { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL UseCUPMHostAllocGuard : Interface { public: PETSC_CUPM_INHERIT_INTERFACE_TYPEDEFS_USING(T); @@ -115,7 +115,7 @@ inline bool UseCUPMHostAllocGuard::value() const noexcept } // anonymous namespace template -class RestoreableArray : Interface { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL RestoreableArray : Interface { public: PETSC_CUPM_INHERIT_INTERFACE_TYPEDEFS_USING(T); @@ -189,7 +189,7 @@ inline RestoreableArray::operator cupm_pointer_type() const noexcept } template -class CUPMObject : SolverInterface { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL CUPMObject : SolverInterface { protected: PETSC_CUPMSOLVER_INHERIT_INTERFACE_TYPEDEFS_USING(T); diff --git a/include/petsc/private/cupmsolverinterface.hpp b/include/petsc/private/cupmsolverinterface.hpp index 7bae5839802..1cf3feb3e67 100644 --- a/include/petsc/private/cupmsolverinterface.hpp +++ b/include/petsc/private/cupmsolverinterface.hpp @@ -50,7 +50,7 @@ struct SolverInterfaceImpl; #if PetscDefined(HAVE_CUDA) template <> -struct SolverInterfaceImpl : BlasInterface { +struct PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL SolverInterfaceImpl : BlasInterface { // typedefs using cupmSolverHandle_t = cusolverDnHandle_t; using cupmSolverError_t = cusolverStatus_t; @@ -166,7 +166,7 @@ struct SolverInterfaceImpl : BlasInterface { #if PetscDefined(HAVE_HIP) template <> -struct SolverInterfaceImpl : BlasInterface { +struct PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL SolverInterfaceImpl : BlasInterface { // typedefs using cupmSolverHandle_t = hipsolverHandle_t; using cupmSolverError_t = hipsolverStatus_t; diff --git a/include/petsc/private/deviceimpl.h b/include/petsc/private/deviceimpl.h index df2e0a5fd09..75c3b546260 100644 --- a/include/petsc/private/deviceimpl.h +++ b/include/petsc/private/deviceimpl.h @@ -3,6 +3,10 @@ #include #include +#if defined(PETSC_HAVE_CUPM) +PETSC_INTERN int PetscDeviceCUPMRuntimeArch; // The real CUDA/HIP arch the code is run with. For log view and error diagnosis +#endif + /* logging support */ PETSC_INTERN PetscLogEvent CUBLAS_HANDLE_CREATE; PETSC_INTERN PetscLogEvent CUSOLVER_HANDLE_CREATE; diff --git a/include/petsc/private/dmdaimpl.h b/include/petsc/private/dmdaimpl.h index f77d1665435..cc50d849e51 100644 --- a/include/petsc/private/dmdaimpl.h +++ b/include/petsc/private/dmdaimpl.h @@ -93,7 +93,6 @@ PETSC_INTERN PetscErrorCode DMView_DA_Matlab(DM, PetscViewer); PETSC_INTERN PetscErrorCode DMView_DA_Binary(DM, PetscViewer); PETSC_INTERN PetscErrorCode DMView_DA_VTK(DM, PetscViewer); PETSC_INTERN PetscErrorCode DMView_DA_GLVis(DM, PetscViewer); -PETSC_EXTERN PetscErrorCode DMDAVTKWriteAll(PetscObject, PetscViewer); PETSC_INTERN PetscErrorCode DMDASelectFields(DM, PetscInt *, PetscInt **); PETSC_INTERN PetscErrorCode DMCreateGlobalVector_DA(DM, Vec *); diff --git a/include/petsc/private/dmimpl.h b/include/petsc/private/dmimpl.h index beb20e21f59..7a042bb15c6 100644 --- a/include/petsc/private/dmimpl.h +++ b/include/petsc/private/dmimpl.h @@ -88,6 +88,7 @@ struct _DMOps { PetscErrorCode (*getboundingbox)(DM, PetscReal *, PetscReal *); PetscErrorCode (*getlocalboundingbox)(DM, PetscReal[], PetscReal[], PetscInt[], PetscInt[]); PetscErrorCode (*locatepointssubdomain)(DM, Vec, PetscMPIInt **); + PetscErrorCode (*snaptogeommodel)(DM, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); PetscErrorCode (*projectfunctionlocal)(DM, PetscReal, PetscErrorCode (**)(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar *, void *), void **, InsertMode, Vec); PetscErrorCode (*projectfunctionlabellocal)(DM, PetscReal, DMLabel, PetscInt, const PetscInt[], PetscInt, const PetscInt[], PetscErrorCode (**)(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar *, void *), void **, InsertMode, Vec); @@ -276,8 +277,9 @@ struct _p_DM { // Affine transform applied in DMGlobalToLocal struct { - VecScatter affine_to_local; - Vec affine; + PetscInt num_affines; + VecScatter *affine_to_local; + Vec *affine; PetscErrorCode (*setup)(DM); } periodic; /* Constraints */ @@ -412,7 +414,8 @@ PETSC_EXTERN PetscErrorCode DMView_GLVis(DM, PetscViewer, PetscErrorCode (*)(DM, */ #if defined(PETSC_HAVE_HDF5) -PETSC_EXTERN PetscErrorCode DMSequenceLoad_HDF5_Internal(DM, const char *, PetscInt, PetscScalar *, PetscViewer); +PETSC_EXTERN PetscErrorCode DMSequenceLoad_HDF5_Internal(DM, const char[], PetscInt, PetscScalar *, PetscViewer); +PETSC_EXTERN PetscErrorCode DMSequenceGetLength_HDF5_Internal(DM, const char[], PetscInt *, PetscViewer); #endif static inline PetscErrorCode DMGetLocalOffset_Private(DM dm, PetscInt point, PetscInt *start, PetscInt *end) diff --git a/include/petsc/private/dmpleximpl.h b/include/petsc/private/dmpleximpl.h index 82ff1b01550..dd96678dc23 100644 --- a/include/petsc/private/dmpleximpl.h +++ b/include/petsc/private/dmpleximpl.h @@ -216,11 +216,12 @@ typedef struct { // Periodicity struct { // Specified by the user - PetscScalar transform[4][4]; // geometric transform - PetscSF face_sf; // root(donor faces) <-- leaf(local faces) + PetscInt num_face_sfs; // number of face_sfs + PetscSF *face_sfs; // root(donor faces) <-- leaf(local faces) + PetscScalar (*transform)[4][4]; // geometric transform // Created eagerly (depends on points) PetscSF composed_sf; // root(non-periodic global points) <-- leaf(local points) - IS periodic_points; + IS *periodic_points; } periodic; /* Projection */ @@ -232,7 +233,6 @@ typedef struct { PetscReal scale[NUM_PETSC_UNITS]; /* The scale for each SI unit */ /* Geometry */ - PetscBool ignoreModel; /* Ignore the geometry model during refinement */ PetscReal minradius; /* Minimum distance from cell centroid to face */ PetscBool useHashLocation; /* Use grid hashing for point location */ PetscGridHash lbox; /* Local box for searching */ @@ -300,14 +300,6 @@ PETSC_INTERN PetscErrorCode VecView_Plex_HDF5_Native_Internal(Vec, PetscViewer); PETSC_INTERN PetscErrorCode VecView_Plex_Local_HDF5_Internal(Vec, PetscViewer); PETSC_INTERN PetscErrorCode VecLoad_Plex_HDF5_Internal(Vec, PetscViewer); PETSC_INTERN PetscErrorCode VecLoad_Plex_HDF5_Native_Internal(Vec, PetscViewer); - -struct _n_DMPlexStorageVersion { - int major, minor, subminor; -}; -typedef struct _n_DMPlexStorageVersion *DMPlexStorageVersion; - -PETSC_EXTERN PetscErrorCode PetscViewerHDF5GetDMPlexStorageVersionReading(PetscViewer, DMPlexStorageVersion *); -PETSC_EXTERN PetscErrorCode PetscViewerHDF5GetDMPlexStorageVersionWriting(PetscViewer, DMPlexStorageVersion *); #endif PETSC_EXTERN PetscErrorCode VecView_Plex_Local_CGNS(Vec, PetscViewer); @@ -357,8 +349,9 @@ PETSC_INTERN PetscErrorCode DMPlexAnchorsModifyMat(DM, PetscSection, PetscInt, P PETSC_INTERN PetscErrorCode DMPlexAnchorsModifyMat_Internal(DM, PetscSection, PetscInt, PetscInt, const PetscInt[], const PetscInt ***, PetscInt, PetscInt, const PetscScalar[], PetscInt *, PetscInt *, PetscInt *[], PetscScalar *[], PetscInt[], PetscBool, PetscBool); PETSC_INTERN PetscErrorCode DMPlexAnchorsGetSubMatModification(DM, PetscSection, PetscInt, PetscInt, const PetscInt[], const PetscInt ***, PetscInt *, PetscInt *, PetscInt *[], PetscInt[], PetscScalar *[]); PETSC_INTERN PetscErrorCode DMPlexLocatePoint_Internal(DM, PetscInt, const PetscScalar[], PetscInt, PetscInt *); -/* these two are PETSC_EXTERN just because of src/dm/impls/plex/tests/ex18.c */ +/* this is PETSC_EXTERN just because of src/dm/impls/plex/tests/ex18.c */ PETSC_EXTERN PetscErrorCode DMPlexOrientInterface_Internal(DM); +PETSC_INTERN PetscErrorCode DMPlexOrientCells_Internal(DM, IS, IS); /* Applications may use this function */ PETSC_EXTERN PetscErrorCode DMPlexCreateNumbering_Plex(DM, PetscInt, PetscInt, PetscInt, PetscInt *, PetscSF, IS *); @@ -715,7 +708,7 @@ static inline void DMPlex_Transpose3D_Internal(PetscScalar A[]) static inline void DMPlex_Invert2D_Internal(PetscReal invJ[], PetscReal J[], PetscReal detJ) { // Allow zero volume cells - const PetscReal invDet = detJ == 0 ? 1.0 : 1.0 / detJ; + const PetscReal invDet = detJ == 0 ? 1.0 : (PetscReal)1.0 / detJ; invJ[0] = invDet * J[3]; invJ[1] = -invDet * J[1]; @@ -727,7 +720,7 @@ static inline void DMPlex_Invert2D_Internal(PetscReal invJ[], PetscReal J[], Pet static inline void DMPlex_Invert3D_Internal(PetscReal invJ[], PetscReal J[], PetscReal detJ) { // Allow zero volume cells - const PetscReal invDet = detJ == 0 ? 1.0 : 1.0 / detJ; + const PetscReal invDet = detJ == 0 ? 1.0 : (PetscReal)1.0 / detJ; invJ[0 * 3 + 0] = invDet * (J[1 * 3 + 1] * J[2 * 3 + 2] - J[1 * 3 + 2] * J[2 * 3 + 1]); invJ[0 * 3 + 1] = invDet * (J[0 * 3 + 2] * J[2 * 3 + 1] - J[0 * 3 + 1] * J[2 * 3 + 2]); @@ -825,6 +818,8 @@ PETSC_INTERN PetscErrorCode DMCreateNeumannOverlap_Plex(DM, IS *, Mat *, PetscEr PETSC_INTERN PetscErrorCode DMPlexMarkBoundaryFaces_Internal(DM, PetscInt, PetscInt, DMLabel, PetscBool); PETSC_INTERN PetscErrorCode DMPlexDistributeOverlap_Internal(DM, PetscInt, MPI_Comm, const char *, PetscSF *, DM *); +PETSC_INTERN PetscErrorCode DMPlexMarkSubmesh_Interpolated(DM, DMLabel, PetscInt, PetscBool, PetscBool, DMLabel, DM); + PETSC_INTERN PetscErrorCode DMPeriodicCoordinateSetUp_Internal(DM); /* Functions in the vtable */ @@ -854,3 +849,6 @@ PETSC_INTERN void coordMap_shear(PetscInt, PetscInt, PetscInt, const PetscInt[], PETSC_INTERN void coordMap_flare(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]); PETSC_INTERN void coordMap_annulus(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]); PETSC_INTERN void coordMap_shell(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]); + +PETSC_EXTERN PetscErrorCode DMSnapToGeomModel_EGADS(DM, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); +PETSC_EXTERN PetscErrorCode DMSnapToGeomModel_EGADSLite(DM, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); diff --git a/include/petsc/private/dmplextransformimpl.h b/include/petsc/private/dmplextransformimpl.h index e4966d26214..23f47e27dd7 100644 --- a/include/petsc/private/dmplextransformimpl.h +++ b/include/petsc/private/dmplextransformimpl.h @@ -71,6 +71,17 @@ typedef struct { PetscInt **ornt; /* The array of orientation for each target cell */ } DMPlexTransform_Extrude; +typedef struct { + PetscInt debug; // Debugging level + PetscBool useTensor; // Flag to create tensor cells + PetscReal width; // The width of a cohesive cell + PetscInt *Nt; // The array of the number of target types + DMPolytopeType **target; // The array of target types + PetscInt **size; // The array of the number of each target type + PetscInt **cone; // The array of cones for each target cell + PetscInt **ornt; // The array of orientation for each target cell +} DMPlexTransform_Cohesive; + typedef struct { PetscInt dummy; } DMPlexRefine_Regular; diff --git a/include/petsc/private/fortranimpl.h b/include/petsc/private/fortranimpl.h index 070e963e169..73127be0f28 100644 --- a/include/petsc/private/fortranimpl.h +++ b/include/petsc/private/fortranimpl.h @@ -15,8 +15,12 @@ PETSC_EXTERN void *PETSC_NULL_SCALAR_Fortran; PETSC_EXTERN void *PETSC_NULL_DOUBLE_Fortran; PETSC_EXTERN void *PETSC_NULL_REAL_Fortran; PETSC_EXTERN void *PETSC_NULL_BOOL_Fortran; +PETSC_EXTERN void *PETSC_NULL_ENUM_Fortran; +PETSC_EXTERN void *PETSC_NULL_INTEGER_ARRAY_Fortran; +PETSC_EXTERN void *PETSC_NULL_SCALAR_ARRAY_Fortran; +PETSC_EXTERN void *PETSC_NULL_REAL_ARRAY_Fortran; +PETSC_EXTERN void *PETSC_NULL_MPI_COMM_Fortran; PETSC_EXTERN void (*PETSC_NULL_FUNCTION_Fortran)(void); -PETSC_EXTERN void *PETSC_NULL_MPI_COMM_Fortran; PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, PetscInt); @@ -34,7 +38,7 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, Pe #define FIXCHAR(a, n, b) \ do { \ if ((a) == PETSC_NULL_CHARACTER_Fortran) { \ - (b) = (a) = NULL; \ + (b) = (a) = PETSC_NULLPTR; \ } else { \ while (((n) > 0) && ((a)[(n) - 1] == ' ')) (n)--; \ *ierr = PetscMalloc1((n) + 1, &(b)); \ @@ -65,22 +69,34 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, Pe The cast through PETSC_UINTPTR_T is so that compilers that warn about casting to/from void * to void(*)(void) will not complain about these comparisons. It is not know if this works for all compilers */ -#define FORTRANNULLINTEGER(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_INTEGER_Fortran) -#define FORTRANNULLSCALAR(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_SCALAR_Fortran) +#define FORTRANNULLINTEGER(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_INTEGER_Fortran || ((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_INTEGER_ARRAY_Fortran) +#define FORTRANNULLSCALAR(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_SCALAR_Fortran || ((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_SCALAR_ARRAY_Fortran) +#define FORTRANNULLREAL(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_REAL_Fortran || ((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_REAL_ARRAY_Fortran) #define FORTRANNULLDOUBLE(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_DOUBLE_Fortran) -#define FORTRANNULLREAL(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_REAL_Fortran) #define FORTRANNULLBOOL(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_BOOL_Fortran) +#define FORTRANNULLENUM(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_ENUM_Fortran) #define FORTRANNULLCHARACTER(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_CHARACTER_Fortran) #define FORTRANNULLFUNCTION(a) (((void (*)(void))(PETSC_UINTPTR_T)a) == PETSC_NULL_FUNCTION_Fortran) #define FORTRANNULLOBJECT(a) (*(void **)(PETSC_UINTPTR_T)a == (void *)0) #define FORTRANNULLMPICOMM(a) (((void *)(PETSC_UINTPTR_T)a) == PETSC_NULL_MPI_COMM_Fortran) +/* + A Fortran object with a value of (void*) 0 corresponds to a NULL object in C and is indicated in Fortran by PETSC_NULL_XXXX + A Fortran object with a value of (void*) -2 is an object that was never created or was destroyed (see checkFortranTypeInitialize()). + A Fortran object with a value of (void*) -3 happens when a PETSc routine returns in one of its arguments a NULL object + (it cannot return a value of (void*) 0 because if later the returned variable is passed to a creation routine, + it would think one has passed in a PETSC_NULL_XXX and error). + + This is needed because Fortran always uses pass by reference so one cannot pass a NULL address, only an address with special + values at the location. +*/ + #define CHKFORTRANNULLINTEGER(a) \ do { \ - if (FORTRANNULLINTEGER(a)) { \ - a = NULL; \ + if (FORTRANNULLINTEGER(a) || FORTRANNULLENUM(a)) { \ + a = PETSC_NULLPTR; \ } else if (FORTRANNULLDOUBLE(a) || FORTRANNULLSCALAR(a) || FORTRANNULLREAL(a) || FORTRANNULLBOOL(a) || FORTRANNULLFUNCTION(a) || FORTRANNULLCHARACTER(a) || FORTRANNULLMPICOMM(a)) { \ - *ierr = PetscError(PETSC_COMM_SELF, __LINE__, "fortran_interface_unknown_file", __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_INTEGER"); \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_INTEGER"); \ *ierr = PETSC_ERR_ARG_BADPTR; \ return; \ } \ @@ -89,9 +105,9 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, Pe #define CHKFORTRANNULLSCALAR(a) \ do { \ if (FORTRANNULLSCALAR(a)) { \ - a = NULL; \ + a = PETSC_NULLPTR; \ } else if (FORTRANNULLINTEGER(a) || FORTRANNULLDOUBLE(a) || FORTRANNULLREAL(a) || FORTRANNULLBOOL(a) || FORTRANNULLFUNCTION(a) || FORTRANNULLCHARACTER(a) || FORTRANNULLMPICOMM(a)) { \ - *ierr = PetscError(PETSC_COMM_SELF, __LINE__, "fortran_interface_unknown_file", __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_SCALAR"); \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_SCALAR"); \ *ierr = PETSC_ERR_ARG_BADPTR; \ return; \ } \ @@ -100,9 +116,9 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, Pe #define CHKFORTRANNULLDOUBLE(a) \ do { \ if (FORTRANNULLDOUBLE(a)) { \ - a = NULL; \ + a = PETSC_NULLPTR; \ } else if (FORTRANNULLINTEGER(a) || FORTRANNULLSCALAR(a) || FORTRANNULLREAL(a) || FORTRANNULLBOOL(a) || FORTRANNULLFUNCTION(a) || FORTRANNULLCHARACTER(a) || FORTRANNULLMPICOMM(a)) { \ - *ierr = PetscError(PETSC_COMM_SELF, __LINE__, "fortran_interface_unknown_file", __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_DOUBLE"); \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_DOUBLE"); \ *ierr = PETSC_ERR_ARG_BADPTR; \ return; \ } \ @@ -111,9 +127,9 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, Pe #define CHKFORTRANNULLREAL(a) \ do { \ if (FORTRANNULLREAL(a)) { \ - a = NULL; \ + a = PETSC_NULLPTR; \ } else if (FORTRANNULLINTEGER(a) || FORTRANNULLDOUBLE(a) || FORTRANNULLSCALAR(a) || FORTRANNULLBOOL(a) || FORTRANNULLFUNCTION(a) || FORTRANNULLCHARACTER(a) || FORTRANNULLMPICOMM(a)) { \ - *ierr = PetscError(PETSC_COMM_SELF, __LINE__, "fortran_interface_unknown_file", __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_REAL"); \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_REAL"); \ *ierr = PETSC_ERR_ARG_BADPTR; \ return; \ } \ @@ -121,10 +137,10 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, Pe #define CHKFORTRANNULLOBJECT(a) \ do { \ - if (*(void **)a == (void *)0) { \ - a = NULL; \ + if (!(*(void **)a)) { \ + a = PETSC_NULLPTR; \ } else if (FORTRANNULLINTEGER(a) || FORTRANNULLDOUBLE(a) || FORTRANNULLSCALAR(a) || FORTRANNULLREAL(a) || FORTRANNULLBOOL(a) || FORTRANNULLFUNCTION(a) || FORTRANNULLCHARACTER(a) || FORTRANNULLMPICOMM(a)) { \ - *ierr = PetscError(PETSC_COMM_SELF, __LINE__, "fortran_interface_unknown_file", __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_XXX where XXX is the name of a particular object class"); \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_XXX where XXX is the name of a particular object class"); \ *ierr = PETSC_ERR_ARG_BADPTR; \ return; \ } \ @@ -133,9 +149,9 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, Pe #define CHKFORTRANNULLBOOL(a) \ do { \ if (FORTRANNULLBOOL(a)) { \ - a = NULL; \ + a = PETSC_NULLPTR; \ } else if (FORTRANNULLSCALAR(a) || FORTRANNULLINTEGER(a) || FORTRANNULLDOUBLE(a) || FORTRANNULLSCALAR(a) || FORTRANNULLREAL(a) || FORTRANNULLFUNCTION(a) || FORTRANNULLCHARACTER(a) || FORTRANNULLMPICOMM(a)) { \ - *ierr = PetscError(PETSC_COMM_SELF, __LINE__, "fortran_interface_unknown_file", __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_BOOL"); \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_BOOL"); \ *ierr = PETSC_ERR_ARG_BADPTR; \ return; \ } \ @@ -144,9 +160,9 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, Pe #define CHKFORTRANNULLFUNCTION(a) \ do { \ if (FORTRANNULLFUNCTION(a)) { \ - a = NULL; \ + a = PETSC_NULLPTR; \ } else if (FORTRANNULLOBJECT(a) || FORTRANNULLSCALAR(a) || FORTRANNULLDOUBLE(a) || FORTRANNULLREAL(a) || FORTRANNULLINTEGER(a) || FORTRANNULLBOOL(a) || FORTRANNULLCHARACTER(a) || FORTRANNULLMPICOMM(a)) { \ - *ierr = PetscError(PETSC_COMM_SELF, __LINE__, "fortran_interface_unknown_file", __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_FUNCTION"); \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_FUNCTION"); \ *ierr = PETSC_ERR_ARG_BADPTR; \ return; \ } \ @@ -155,22 +171,38 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool, const char *, Pe #define CHKFORTRANNULLMPICOMM(a) \ do { \ if (FORTRANNULLMPICOMM(a)) { \ - a = NULL; \ + a = PETSC_NULLPTR; \ } else if (FORTRANNULLINTEGER(a) || FORTRANNULLDOUBLE(a) || FORTRANNULLSCALAR(a) || FORTRANNULLREAL(a) || FORTRANNULLBOOL(a) || FORTRANNULLFUNCTION(a) || FORTRANNULLCHARACTER(a)) { \ - *ierr = PetscError(PETSC_COMM_SELF, __LINE__, "fortran_interface_unknown_file", __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_MPI_COMM"); \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Use PETSC_NULL_MPI_COMM"); \ *ierr = PETSC_ERR_ARG_BADPTR; \ return; \ } \ } while (0) -/* The two macros are used at the beginning and end of PETSc object Fortran destroy routines XxxDestroy(). -2 is in consistent with - the one used in checkFortranTypeInitialize() at compilersFortran.py. - */ +/* In the beginning of Fortran XxxCreate() ensure object is not NULL or already created */ +#define PETSC_FORTRAN_OBJECT_CREATE(a) \ + do { \ + if (!(*(void **)a)) { \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Cannot create PETSC_NULL_XXX object"); \ + *ierr = PETSC_ERR_ARG_WRONG; \ + return; \ + } else if (*((void **)(a)) != (void *)-2 && *((void **)(a)) != (void *)-3) { \ + *ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_WRONG, PETSC_ERROR_INITIAL, "Cannot create already existing object"); \ + *ierr = PETSC_ERR_ARG_WRONG; \ + return; \ + } \ + } while (0) -/* In the beginning of Fortran XxxDestroy(a), if the input object was destroyed, change it to a petsc C NULL object so that it won't crash C XxxDestory() */ +/* + In the beginning of Fortran XxxDestroy(a), if the input object was destroyed, change it to a petsc C NULL object so that it won't crash C XxxDestory() + If it is PETSC_NULL_XXX just return since these objects cannot be destroyed +*/ #define PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(a) \ do { \ - if (*((void **)(a)) == (void *)-2) *(a) = NULL; \ + if (!*(void **)a || *((void **)(a)) == (void *)-2 || *((void **)(a)) == (void *)-3) { \ + *ierr = PETSC_SUCCESS; \ + return; \ + } \ } while (0) /* After C XxxDestroy(a) is called, change a's state from NULL to destroyed, so that it can be used/destroyed again by Fortran. @@ -206,6 +238,52 @@ typedef PETSC_UINTPTR_T PetscFortranAddr; #define PETSC_VIEWER_MATLAB_WORLD_FORTRAN 14 #define PETSC_VIEWER_MATLAB_SELF_FORTRAN 15 +#include + +static inline PetscViewer PetscPatchDefaultViewers(PetscViewer *v) +{ + if (!v) return PETSC_NULLPTR; + if (!(*(void **)v)) return PETSC_NULLPTR; + switch ((*(PetscFortranAddr *)v)) { + case PETSC_VIEWER_DRAW_WORLD_FORTRAN: + return PETSC_VIEWER_DRAW_WORLD; + case PETSC_VIEWER_DRAW_SELF_FORTRAN: + return PETSC_VIEWER_DRAW_SELF; + + case PETSC_VIEWER_STDOUT_WORLD_FORTRAN: + return PETSC_VIEWER_STDOUT_WORLD; + case PETSC_VIEWER_STDOUT_SELF_FORTRAN: + return PETSC_VIEWER_STDOUT_SELF; + + case PETSC_VIEWER_STDERR_WORLD_FORTRAN: + return PETSC_VIEWER_STDERR_WORLD; + case PETSC_VIEWER_STDERR_SELF_FORTRAN: + return PETSC_VIEWER_STDERR_SELF; + + case PETSC_VIEWER_BINARY_WORLD_FORTRAN: + return PETSC_VIEWER_BINARY_WORLD; + case PETSC_VIEWER_BINARY_SELF_FORTRAN: + return PETSC_VIEWER_BINARY_SELF; + +#if defined(PETSC_HAVE_MATLAB) + case PETSC_VIEWER_MATLAB_SELF_FORTRAN: + return PETSC_VIEWER_MATLAB_SELF; + case PETSC_VIEWER_MATLAB_WORLD_FORTRAN: + return PETSC_VIEWER_MATLAB_WORLD; +#endif + +#if defined(PETSC_USE_SOCKET_VIEWER) + case PETSC_VIEWER_SOCKET_WORLD_FORTRAN: + return PETSC_VIEWER_SOCKET_WORLD; + case PETSC_VIEWER_SOCKET_SELF_FORTRAN: + return PETSC_VIEWER_SOCKET_SELF; +#endif + + default: + return *v; + } +} + #if defined(PETSC_USE_SOCKET_VIEWER) #define PetscPatchDefaultViewers_Fortran_Socket(vin, v) \ } \ diff --git a/include/petsc/private/isimpl.h b/include/petsc/private/isimpl.h index 454b8066908..01c2c2475f3 100644 --- a/include/petsc/private/isimpl.h +++ b/include/petsc/private/isimpl.h @@ -80,7 +80,8 @@ struct _p_ISLocalToGlobalMapping { PetscInt **info_indices; PetscInt *info_nodec; PetscInt **info_nodei; - void *data; /* type specific data is stored here */ + PetscSF multileaves_sf; /* SF to communicate from local block indices to multi-leaves */ + void *data; /* type specific data is stored here */ }; struct _n_ISColoring { diff --git a/include/petsc/private/kernels/blockinvert.h b/include/petsc/private/kernels/blockinvert.h index f92d200ca96..b363de3af33 100644 --- a/include/petsc/private/kernels/blockinvert.h +++ b/include/petsc/private/kernels/blockinvert.h @@ -18,8 +18,9 @@ src/mat/impls/baij/seq */ -PETSC_INTERN PetscErrorCode PetscLINPACKgefa(MatScalar *, PetscInt, PetscInt *, PetscBool, PetscBool *); -PETSC_INTERN PetscErrorCode PetscLINPACKgedi(MatScalar *, PetscInt, PetscInt *, MatScalar *); +PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode PetscLINPACKgefa(MatScalar *, PetscInt, PetscInt *, PetscBool, PetscBool *); +PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode PetscLINPACKgedi(MatScalar *, PetscInt, PetscInt *, MatScalar *); + PETSC_EXTERN PetscErrorCode PetscKernel_A_gets_inverse_A_2(MatScalar *, PetscReal, PetscBool, PetscBool *); PETSC_EXTERN PetscErrorCode PetscKernel_A_gets_inverse_A_3(MatScalar *, PetscReal, PetscBool, PetscBool *); diff --git a/include/petsc/private/matdensecupmimpl.h b/include/petsc/private/matdensecupmimpl.h index be4772b8dd3..d225c760e2f 100644 --- a/include/petsc/private/matdensecupmimpl.h +++ b/include/petsc/private/matdensecupmimpl.h @@ -59,7 +59,7 @@ namespace impl // ========================================================================================== template -class MatDense_CUPM_Base : protected device::cupm::impl::CUPMObject { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL MatDense_CUPM_Base : protected device::cupm::impl::CUPMObject { public: PETSC_CUPMOBJECT_HEADER(T); @@ -152,7 +152,7 @@ class MatDense_MPI_CUPM; // ========================================================================================== template -class MatDense_CUPM : protected MatDense_CUPM_Base { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL MatDense_CUPM : protected MatDense_CUPM_Base { private: static PetscErrorCode CheckSaneSequentialMatSizes_(Mat) noexcept; @@ -190,7 +190,7 @@ class MatDense_CUPM : protected MatDense_CUPM_Base { template template -class MatDense_CUPM::MatrixArray : public device::cupm::impl::RestoreableArray { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL MatDense_CUPM::MatrixArray : public device::cupm::impl::RestoreableArray { using base_type = device::cupm::impl::RestoreableArray; public: diff --git a/include/petsc/private/matimpl.h b/include/petsc/private/matimpl.h index cb9fb7b6bc2..57c332dbabd 100644 --- a/include/petsc/private/matimpl.h +++ b/include/petsc/private/matimpl.h @@ -218,6 +218,7 @@ struct _MatOps { PetscErrorCode (*transposesymbolic)(Mat, Mat *); PetscErrorCode (*eliminatezeros)(Mat, PetscBool); PetscErrorCode (*getrowsumabs)(Mat, Vec); + PetscErrorCode (*getfactor)(Mat, MatSolverType, MatFactorType, Mat *); }; /* If you add MatOps entries above also add them to the MATOP enum @@ -752,9 +753,10 @@ typedef struct { } MatParentState; PETSC_EXTERN PetscErrorCode MatFactorDumpMatrix(Mat); -PETSC_INTERN PetscErrorCode MatShift_Basic(Mat, PetscScalar); PETSC_INTERN PetscErrorCode MatSetBlockSizes_Default(Mat, PetscInt, PetscInt); +PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatShift_Basic(Mat, PetscScalar); + static inline PetscErrorCode MatPivotCheck_nz(PETSC_UNUSED Mat mat, const MatFactorInfo *info, FactorShiftCtx *sctx, PETSC_UNUSED PetscInt row) { PetscReal _rs = sctx->rs; @@ -1755,3 +1757,4 @@ PETSC_EXTERN PetscLogEvent MAT_H2Opus_Compress; PETSC_EXTERN PetscLogEvent MAT_H2Opus_Orthog; PETSC_EXTERN PetscLogEvent MAT_H2Opus_LR; PETSC_EXTERN PetscLogEvent MAT_CUDACopyToGPU; +PETSC_EXTERN PetscLogEvent MAT_HIPCopyToGPU; diff --git a/include/petsc/private/partitionerimpl.h b/include/petsc/private/partitionerimpl.h index 388c65898ae..c15974009e6 100644 --- a/include/petsc/private/partitionerimpl.h +++ b/include/petsc/private/partitionerimpl.h @@ -14,7 +14,7 @@ struct _PetscPartitionerOps { PetscErrorCode (*reset)(PetscPartitioner); PetscErrorCode (*view)(PetscPartitioner, PetscViewer); PetscErrorCode (*destroy)(PetscPartitioner); - PetscErrorCode (*partition)(PetscPartitioner, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscSection, PetscSection, PetscSection, IS *); + PetscErrorCode (*partition)(PetscPartitioner, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscSection, PetscSection, PetscSection, PetscSection, IS *); }; struct _p_PetscPartitioner { @@ -28,4 +28,5 @@ struct _p_PetscPartitioner { PetscBool viewGraph; PetscBool noGraph; /* if true, the partitioner does not need the connectivity graph, only the number of local vertices */ PetscBool usevwgt; /* if true, the partitioner looks at the local section vertSection to weight the vertices of the graph */ + PetscBool useewgt; /* if true, the partitioner looks at the topology to weight the edges of the graph */ }; diff --git a/include/petsc/private/pcbddcimpl.h b/include/petsc/private/pcbddcimpl.h index acf3f139bc4..256af22e7ab 100644 --- a/include/petsc/private/pcbddcimpl.h +++ b/include/petsc/private/pcbddcimpl.h @@ -94,6 +94,7 @@ typedef struct { /* Some defaults on selecting vertices and constraints*/ PetscBool use_local_adj; + PetscInt local_adj_square; PetscBool use_vertices; PetscBool use_faces; PetscBool use_edges; diff --git a/include/petsc/private/pcbddcprivateimpl.h b/include/petsc/private/pcbddcprivateimpl.h index b4e447cb3c9..57d9a1b2040 100644 --- a/include/petsc/private/pcbddcprivateimpl.h +++ b/include/petsc/private/pcbddcprivateimpl.h @@ -4,8 +4,11 @@ #include /* main functions */ -PETSC_EXTERN PetscErrorCode PCBDDCAnalyzeInterface(PC); -PETSC_EXTERN PetscErrorCode PCBDDCConstraintsSetUp(PC); +PETSC_INTERN PetscErrorCode PCBDDCAnalyzeInterface(PC); +PETSC_INTERN PetscErrorCode PCBDDCConstraintsSetUp(PC); + +/* load or dump customization */ +PETSC_EXTERN PetscErrorCode PCBDDCLoadOrViewCustomization(PC, PetscBool, const char *); /* reset functions */ PETSC_EXTERN PetscErrorCode PCBDDCResetTopography(PC); @@ -30,72 +33,72 @@ PETSC_EXTERN PetscErrorCode PCBDDCGraphGetDirichletDofsB(PCBDDCGraph, IS *); PETSC_EXTERN PetscErrorCode PCBDDCDestroyGraphCandidatesIS(void *); /* interface for scaling operator */ -PETSC_EXTERN PetscErrorCode PCBDDCScalingSetUp(PC); -PETSC_EXTERN PetscErrorCode PCBDDCScalingDestroy(PC); -PETSC_EXTERN PetscErrorCode PCBDDCScalingRestriction(PC, Vec, Vec); -PETSC_EXTERN PetscErrorCode PCBDDCScalingExtension(PC, Vec, Vec); +PETSC_INTERN PetscErrorCode PCBDDCScalingSetUp(PC); +PETSC_INTERN PetscErrorCode PCBDDCScalingDestroy(PC); +PETSC_INTERN PetscErrorCode PCBDDCScalingRestriction(PC, Vec, Vec); +PETSC_INTERN PetscErrorCode PCBDDCScalingExtension(PC, Vec, Vec); /* nullspace correction */ -PETSC_EXTERN PetscErrorCode PCBDDCNullSpaceAssembleCorrection(PC, PetscBool, PetscBool); +PETSC_INTERN PetscErrorCode PCBDDCNullSpaceAssembleCorrection(PC, PetscBool, PetscBool); /* utils */ -PETSC_EXTERN PetscErrorCode PCBDDCComputeLocalMatrix(PC, Mat); -PETSC_EXTERN PetscErrorCode PCBDDCSetUpLocalWorkVectors(PC); -PETSC_EXTERN PetscErrorCode PCBDDCSetUpSolvers(PC); -PETSC_EXTERN PetscErrorCode PCBDDCSetUpLocalScatters(PC); -PETSC_EXTERN PetscErrorCode PCBDDCSetUpLocalSolvers(PC, PetscBool, PetscBool); -PETSC_EXTERN PetscErrorCode PCBDDCSetUpCorrection(PC, PetscScalar **); -PETSC_EXTERN PetscErrorCode PCBDDCSetUpCoarseSolver(PC, PetscScalar *); -PETSC_EXTERN PetscErrorCode PCBDDCComputePrimalNumbering(PC, PetscInt *, PetscInt **); -PETSC_EXTERN PetscErrorCode PCBDDCScatterCoarseDataBegin(PC, InsertMode, ScatterMode); -PETSC_EXTERN PetscErrorCode PCBDDCScatterCoarseDataEnd(PC, InsertMode, ScatterMode); -PETSC_EXTERN PetscErrorCode PCBDDCApplyInterfacePreconditioner(PC, PetscBool); -PETSC_EXTERN PetscErrorCode PCBDDCOrthonormalizeVecs(PetscInt *, Vec[]); -PETSC_EXTERN PetscErrorCode PCBDDCSetUseExactDirichlet(PC, PetscBool); -PETSC_EXTERN PetscErrorCode PCBDDCSetLevel(PC, PetscInt); -PETSC_EXTERN PetscErrorCode PCBDDCGlobalToLocal(VecScatter, Vec, Vec, IS, IS *); -PETSC_EXTERN PetscErrorCode PCBDDCAdaptiveSelection(PC); -PETSC_EXTERN PetscErrorCode PCBDDCConsistencyCheckIS(PC, MPI_Op, IS *); -PETSC_EXTERN PetscErrorCode PCBDDCComputeLocalTopologyInfo(PC); -PETSC_EXTERN PetscErrorCode MatCreateSubMatrixUnsorted(Mat, IS, IS, Mat *); -PETSC_EXTERN PetscErrorCode PCBDDCDetectDisconnectedComponents(PC, PetscBool, PetscInt *, IS *[], IS *); -PETSC_EXTERN PetscErrorCode MatSeqAIJCompress(Mat, Mat *); -PETSC_EXTERN PetscErrorCode PCBDDCReuseSolversBenignAdapt(PCBDDCReuseSolvers, Vec, Vec, PetscBool, PetscBool); -PETSC_EXTERN PetscErrorCode PCBDDCComputeNoNetFlux(Mat, Mat, PetscBool, IS, PCBDDCGraph, MatNullSpace *); -PETSC_EXTERN PetscErrorCode PCBDDCNullSpaceCreate(MPI_Comm, PetscBool, PetscInt, Vec[], MatNullSpace *); -PETSC_EXTERN PetscErrorCode PCBDDCNedelecSupport(PC); -PETSC_EXTERN PetscErrorCode PCBDDCAddPrimalVerticesLocalIS(PC, IS); -PETSC_EXTERN PetscErrorCode PCBDDCComputeFakeChange(PC, PetscBool, PCBDDCGraph, PCBDDCSubSchurs, Mat *, IS *, IS *, PetscBool *); +PETSC_INTERN PetscErrorCode PCBDDCComputeLocalMatrix(PC, Mat); +PETSC_INTERN PetscErrorCode PCBDDCSetUpLocalWorkVectors(PC); +PETSC_INTERN PetscErrorCode PCBDDCSetUpSolvers(PC); +PETSC_INTERN PetscErrorCode PCBDDCSetUpLocalScatters(PC); +PETSC_INTERN PetscErrorCode PCBDDCSetUpLocalSolvers(PC, PetscBool, PetscBool); +PETSC_INTERN PetscErrorCode PCBDDCSetUpCorrection(PC, Mat *); +PETSC_INTERN PetscErrorCode PCBDDCSetUpCoarseSolver(PC, Mat); +PETSC_INTERN PetscErrorCode PCBDDCComputePrimalNumbering(PC, PetscInt *, PetscInt **); +PETSC_INTERN PetscErrorCode PCBDDCScatterCoarseDataBegin(PC, InsertMode, ScatterMode); +PETSC_INTERN PetscErrorCode PCBDDCScatterCoarseDataEnd(PC, InsertMode, ScatterMode); +PETSC_INTERN PetscErrorCode PCBDDCApplyInterfacePreconditioner(PC, PetscBool); +PETSC_INTERN PetscErrorCode PCBDDCOrthonormalizeVecs(PetscInt *, Vec[]); +PETSC_INTERN PetscErrorCode PCBDDCSetUseExactDirichlet(PC, PetscBool); +PETSC_INTERN PetscErrorCode PCBDDCSetLevel(PC, PetscInt); +PETSC_INTERN PetscErrorCode PCBDDCGlobalToLocal(VecScatter, Vec, Vec, IS, IS *); +PETSC_INTERN PetscErrorCode PCBDDCAdaptiveSelection(PC); +PETSC_INTERN PetscErrorCode PCBDDCConsistencyCheckIS(PC, MPI_Op, IS *); +PETSC_INTERN PetscErrorCode PCBDDCComputeLocalTopologyInfo(PC); +PETSC_INTERN PetscErrorCode PCBDDCDetectDisconnectedComponents(PC, PetscBool, PetscInt *, IS *[], IS *); +PETSC_INTERN PetscErrorCode PCBDDCReuseSolversBenignAdapt(PCBDDCReuseSolvers, Vec, Vec, PetscBool, PetscBool); +PETSC_INTERN PetscErrorCode PCBDDCComputeNoNetFlux(Mat, Mat, PetscBool, IS, PCBDDCGraph, MatNullSpace *); +PETSC_INTERN PetscErrorCode PCBDDCNullSpaceCreate(MPI_Comm, PetscBool, PetscInt, Vec[], MatNullSpace *); +PETSC_INTERN PetscErrorCode PCBDDCNedelecSupport(PC); +PETSC_INTERN PetscErrorCode PCBDDCAddPrimalVerticesLocalIS(PC, IS); +PETSC_INTERN PetscErrorCode PCBDDCComputeFakeChange(PC, PetscBool, PCBDDCGraph, PCBDDCSubSchurs, Mat *, IS *, IS *, PetscBool *); +PETSC_INTERN PetscErrorCode MatCreateSubMatrixUnsorted(Mat, IS, IS, Mat *); +PETSC_INTERN PetscErrorCode MatSeqAIJCompress(Mat, Mat *); /* benign subspace trick */ -PETSC_EXTERN PetscErrorCode PCBDDCBenignPopOrPushB0(PC, PetscBool); -PETSC_EXTERN PetscErrorCode PCBDDCBenignGetOrSetP0(PC, Vec, PetscBool); -PETSC_EXTERN PetscErrorCode PCBDDCBenignDetectSaddlePoint(PC, PetscBool, IS *); -PETSC_EXTERN PetscErrorCode PCBDDCBenignCheck(PC, IS); -PETSC_EXTERN PetscErrorCode PCBDDCBenignShellMat(PC, PetscBool); -PETSC_EXTERN PetscErrorCode PCBDDCBenignRemoveInterior(PC, Vec, Vec); +PETSC_INTERN PetscErrorCode PCBDDCBenignPopOrPushB0(PC, PetscBool); +PETSC_INTERN PetscErrorCode PCBDDCBenignGetOrSetP0(PC, Vec, PetscBool); +PETSC_INTERN PetscErrorCode PCBDDCBenignDetectSaddlePoint(PC, PetscBool, IS *); +PETSC_INTERN PetscErrorCode PCBDDCBenignCheck(PC, IS); +PETSC_INTERN PetscErrorCode PCBDDCBenignShellMat(PC, PetscBool); +PETSC_INTERN PetscErrorCode PCBDDCBenignRemoveInterior(PC, Vec, Vec); /* feti-dp */ -PETSC_EXTERN PetscErrorCode PCBDDCCreateFETIDPMatContext(PC, FETIDPMat_ctx *); -PETSC_EXTERN PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx); -PETSC_EXTERN PetscErrorCode PCBDDCCreateFETIDPPCContext(PC, FETIDPPC_ctx *); -PETSC_EXTERN PetscErrorCode PCBDDCSetupFETIDPPCContext(Mat, FETIDPPC_ctx); -PETSC_EXTERN PetscErrorCode FETIDPPCApply(PC, Vec, Vec); -PETSC_EXTERN PetscErrorCode FETIDPPCApplyTranspose(PC, Vec, Vec); -PETSC_EXTERN PetscErrorCode FETIDPPCView(PC, PetscViewer); -PETSC_EXTERN PetscErrorCode PCBDDCDestroyFETIDPPC(PC); -PETSC_EXTERN PetscErrorCode FETIDPMatMult(Mat, Vec, Vec); -PETSC_EXTERN PetscErrorCode FETIDPMatMultTranspose(Mat, Vec, Vec); +PETSC_INTERN PetscErrorCode PCBDDCCreateFETIDPMatContext(PC, FETIDPMat_ctx *); +PETSC_INTERN PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx); +PETSC_INTERN PetscErrorCode PCBDDCCreateFETIDPPCContext(PC, FETIDPPC_ctx *); +PETSC_INTERN PetscErrorCode PCBDDCSetupFETIDPPCContext(Mat, FETIDPPC_ctx); +PETSC_INTERN PetscErrorCode FETIDPPCApply(PC, Vec, Vec); +PETSC_INTERN PetscErrorCode FETIDPPCApplyTranspose(PC, Vec, Vec); +PETSC_INTERN PetscErrorCode FETIDPPCView(PC, PetscViewer); +PETSC_INTERN PetscErrorCode PCBDDCDestroyFETIDPPC(PC); +PETSC_INTERN PetscErrorCode FETIDPMatMult(Mat, Vec, Vec); +PETSC_INTERN PetscErrorCode FETIDPMatMultTranspose(Mat, Vec, Vec); -PETSC_EXTERN PetscErrorCode PCBDDCDestroyFETIDPMat(Mat); +PETSC_INTERN PetscErrorCode PCBDDCDestroyFETIDPMat(Mat); /* interface to SubSchurs */ -PETSC_EXTERN PetscErrorCode PCBDDCInitSubSchurs(PC); -PETSC_EXTERN PetscErrorCode PCBDDCSetUpSubSchurs(PC); +PETSC_INTERN PetscErrorCode PCBDDCInitSubSchurs(PC); +PETSC_INTERN PetscErrorCode PCBDDCSetUpSubSchurs(PC); /* sub schurs API */ -PETSC_EXTERN PetscErrorCode PCBDDCSubSchursCreate(PCBDDCSubSchurs *); -PETSC_EXTERN PetscErrorCode PCBDDCSubSchursInit(PCBDDCSubSchurs, const char *, IS, IS, PCBDDCGraph, ISLocalToGlobalMapping, PetscBool, PetscBool); -PETSC_EXTERN PetscErrorCode PCBDDCSubSchursReset(PCBDDCSubSchurs); -PETSC_EXTERN PetscErrorCode PCBDDCSubSchursDestroy(PCBDDCSubSchurs *); -PETSC_EXTERN PetscErrorCode PCBDDCSubSchursSetUp(PCBDDCSubSchurs, Mat, Mat, PetscBool, PetscInt[], PetscInt[], PetscInt, Vec, PetscBool, PetscBool, PetscBool, PetscInt, PetscInt[], IS[], Mat, IS); +PETSC_INTERN PetscErrorCode PCBDDCSubSchursCreate(PCBDDCSubSchurs *); +PETSC_INTERN PetscErrorCode PCBDDCSubSchursInit(PCBDDCSubSchurs, const char *, IS, IS, PCBDDCGraph, ISLocalToGlobalMapping, PetscBool, PetscBool); +PETSC_INTERN PetscErrorCode PCBDDCSubSchursReset(PCBDDCSubSchurs); +PETSC_INTERN PetscErrorCode PCBDDCSubSchursDestroy(PCBDDCSubSchurs *); +PETSC_INTERN PetscErrorCode PCBDDCSubSchursSetUp(PCBDDCSubSchurs, Mat, Mat, PetscBool, PetscInt[], PetscInt[], PetscInt, Vec, PetscBool, PetscBool, PetscBool, PetscInt, PetscInt[], IS[], Mat, IS); diff --git a/include/petsc/private/pcbddcstructsimpl.h b/include/petsc/private/pcbddcstructsimpl.h index b3ca1234ae5..0ce6bfc3cf2 100644 --- a/include/petsc/private/pcbddcstructsimpl.h +++ b/include/petsc/private/pcbddcstructsimpl.h @@ -10,26 +10,35 @@ #define PCBDDCGRAPH_LOCAL_PERIODIC_MARK -3 #define PCBDDCGRAPH_SPECIAL_MARK -4 -/* Structure for local graph partitioning */ +/* Metadata information on node */ +typedef struct { + PetscBool touched; + PetscInt subset; + PetscInt which_dof; + PetscInt special_dof; + PetscInt local_sub; + PetscInt count; + PetscInt *neighbours_set; + PetscInt local_groups_count; + PetscInt *local_groups; +} PCBDDCGraphNode; + +/* Data structure for local graph partitioning */ struct _PCBDDCGraph { PetscBool setupcalled; /* graph information */ ISLocalToGlobalMapping l2gmap; PetscInt nvtxs; PetscInt nvtxs_global; - PetscBT touched; - PetscInt *count; - PetscInt **neighbours_set; - PetscInt *subset; - PetscInt *which_dof; - PetscInt *special_dof; + PCBDDCGraphNode *nodes; PetscInt custom_minimal_size; PetscBool twodim; PetscBool twodimset; PetscBool has_dirichlet; + PetscBool multi_element; IS dirdofs; IS dirdofsB; - PetscInt commsizelimit; + PetscBool seq_graph; PetscInt maxcount; /* data for connected components */ PetscInt ncc; @@ -42,9 +51,10 @@ struct _PCBDDCGraph { PetscInt **subset_idxs; PetscInt *subset_ncc; PetscInt *subset_ref_node; - /* data for periodic dofs */ - PetscInt *mirrors; - PetscInt **mirrors_set; + PetscInt *gsubset_size; + PetscInt *interface_ref_rsize; + PetscSF interface_ref_sf; + PetscSF interface_subset_sf; /* placeholders for connectivity relation between dofs */ PetscInt nvtxs_csr; PetscInt *xadj; diff --git a/include/petsc/private/pcgamgimpl.h b/include/petsc/private/pcgamgimpl.h index f5889a5692e..9b0d76b7a5c 100644 --- a/include/petsc/private/pcgamgimpl.h +++ b/include/petsc/private/pcgamgimpl.h @@ -28,9 +28,10 @@ typedef struct gamg_TAG { PetscBool cpu_pin_coarse_grids; PetscInt min_eq_proc; PetscInt asm_hem_aggs; + MatCoarsen asm_crs; /* used to generate ASM aggregates */ PetscInt coarse_eq_limit; PetscReal threshold_scale; - PetscReal threshold[PETSC_MG_MAXLEVELS]; /* common quatity to many AMG methods so keep it up here */ + PetscReal threshold[PETSC_MG_MAXLEVELS]; /* common quantity to many AMG methods so keep it up here */ PetscInt level_reduction_factors[PETSC_MG_MAXLEVELS]; PetscInt current_level; /* stash construction state */ /* these 4 are all related to the method data and should be in the subctx */ diff --git a/include/petsc/private/pcmgimpl.h b/include/petsc/private/pcmgimpl.h index db5e557dd7d..3ac4b5032a5 100644 --- a/include/petsc/private/pcmgimpl.h +++ b/include/petsc/private/pcmgimpl.h @@ -6,7 +6,7 @@ #include #include #include -#define PETSC_MG_MAXLEVELS 10 +#define PETSC_MG_MAXLEVELS 15 /* Each level has its own copy of this data. Level (0) is always the coarsest level and Level (levels-1) is the finest. diff --git a/include/petsc/private/petscdsimpl.h b/include/petsc/private/petscdsimpl.h index a18b7920f23..c1543d07a23 100644 --- a/include/petsc/private/petscdsimpl.h +++ b/include/petsc/private/petscdsimpl.h @@ -88,14 +88,15 @@ struct _PetscDSOps { struct _p_PetscDS { PETSCHEADER(struct _PetscDSOps); - void *data; /* Implementation object */ - PetscDS *subprobs; /* The subspaces for each dimension */ - PetscBool setup; /* Flag for setup */ - PetscInt dimEmbed; /* The real space coordinate dimension */ - PetscInt Nf; /* The number of solution fields */ - PetscObject *disc; /* The discretization for each solution field (PetscFE, PetscFV, etc.) */ - PetscBool *cohesive; /* Flag for cohesive discretization */ - PetscBool isCohesive; /* We are on a cohesive cell, meaning lower dimensional FE used on a 0-volume cell. Normal fields appear on both endcaps, whereas cohesive field only appear once in the middle */ + void *data; /* Implementation object */ + PetscDS *subprobs; /* The subspaces for each dimension */ + PetscBool setup; /* Flag for setup */ + PetscInt dimEmbed; /* The real space coordinate dimension */ + PetscInt Nf; /* The number of solution fields */ + PetscObject *disc; /* The discretization for each solution field (PetscFE, PetscFV, etc.) */ + PetscBool *cohesive; /* Flag for cohesive discretization */ + PetscBool isCohesive; /* We are on a cohesive cell, meaning lower dimensional FE used on a 0-volume cell. Normal fields appear on both endcaps, whereas cohesive field only appear once in the middle */ + PetscInt printIntegrate; /* Debugging level for kernels */ /* Quadrature */ PetscBool forceQuad; /* Flag to force matching quadratures in discretizations */ IS *quadPerm[DM_NUM_POLYTOPES]; /* qP[ct][o]: q point permutation for orientation o of integ domain */ diff --git a/include/petsc/private/petscimpl.h b/include/petsc/private/petscimpl.h index e7e91ef7dc3..b254080d7ac 100644 --- a/include/petsc/private/petscimpl.h +++ b/include/petsc/private/petscimpl.h @@ -478,7 +478,7 @@ PETSC_ASSERT_POINTER_IMPL_SPECIALIZATION(int32_t, PETSC_INT32); PETSC_ASSERT_POINTER_IMPL_SPECIALIZATION(uint32_t, PETSC_INT32); PETSC_ASSERT_POINTER_IMPL_SPECIALIZATION(int64_t, PETSC_INT64); PETSC_ASSERT_POINTER_IMPL_SPECIALIZATION(uint64_t, PETSC_INT64); - #if !defined(PETSC_SKIP_COMPLEX) + #if defined(PETSC_HAVE_COMPLEX) PETSC_ASSERT_POINTER_IMPL_SPECIALIZATION(PetscComplex, PETSC_COMPLEX); #endif @@ -494,7 +494,7 @@ PETSC_ASSERT_POINTER_IMPL_SPECIALIZATION(PetscComplex, PETSC_COMPLEX); #elif PETSC_C_VERSION >= 11 #define PETSC_GENERIC_CV(type, result) type * : result, const type * : result, volatile type * : result, const volatile type * : result - #if !PetscDefined(SKIP_COMPLEX) + #if PetscDefined(HAVE_COMPLEX) #define PETSC_GENERIC_CV_COMPLEX(result) PETSC_GENERIC_CV(PetscComplex, result) #else #define PETSC_GENERIC_CV_COMPLEX(result) @@ -518,7 +518,7 @@ PETSC_ASSERT_POINTER_IMPL_SPECIALIZATION(PetscComplex, PETSC_COMPLEX); #define PETSC_GENERIC_CV_STRINGIZE(type) PETSC_GENERIC_CV(type, PetscStringize(type)) - #if !PetscDefined(SKIP_COMPLEX) + #if PetscDefined(HAVE_COMPLEX) #define PETSC_GENERIC_CV_STRINGIZE_COMPLEX PETSC_GENERIC_CV_STRINGIZE(PetscComplex) #else #define PETSC_GENERIC_CV_STRINGIZE_COMPLEX @@ -1477,15 +1477,16 @@ static inline PetscErrorCode PetscSpinlockDestroy(PetscSpinlock *ck_spinlock) } #elif (defined(__cplusplus) && defined(PETSC_HAVE_CXX_ATOMIC)) || (!defined(__cplusplus) && defined(PETSC_HAVE_STDATOMIC_H)) #if defined(__cplusplus) + // See the example at https://en.cppreference.com/w/cpp/atomic/atomic_flag #include #define petsc_atomic_flag std::atomic_flag - #define petsc_atomic_flag_test_and_set(p) std::atomic_flag_test_and_set_explicit(p, std::memory_order_relaxed) - #define petsc_atomic_flag_clear(p) std::atomic_flag_clear_explicit(p, std::memory_order_relaxed) + #define petsc_atomic_flag_test_and_set(p) std::atomic_flag_test_and_set_explicit(p, std::memory_order_acquire) + #define petsc_atomic_flag_clear(p) std::atomic_flag_clear_explicit(p, std::memory_order_release) #else #include #define petsc_atomic_flag atomic_flag - #define petsc_atomic_flag_test_and_set(p) atomic_flag_test_and_set_explicit(p, memory_order_relaxed) - #define petsc_atomic_flag_clear(p) atomic_flag_clear_explicit(p, memory_order_relaxed) + #define petsc_atomic_flag_test_and_set(p) atomic_flag_test_and_set_explicit(p, memory_order_acquire) + #define petsc_atomic_flag_clear(p) atomic_flag_clear_explicit(p, memory_order_release) #endif typedef petsc_atomic_flag PetscSpinlock; diff --git a/include/petsc/private/vecimpl.h b/include/petsc/private/vecimpl.h index 4c2728a579f..c028cffc0ee 100644 --- a/include/petsc/private/vecimpl.h +++ b/include/petsc/private/vecimpl.h @@ -159,9 +159,9 @@ struct _p_Vec { PetscBool array_gotten; VecStash stash, bstash; /* used for storing off-proc values during assembly */ PetscBool petscnative; /* means the ->data starts with VECHEADER and can use VecGetArrayFast()*/ + PetscInt lock; /* lock state. vector can be free (=0), locked for read (>0) or locked for write(<0) */ #if PetscDefined(USE_DEBUG) PetscStack lockstack; /* the file,func,line of where locks are added */ - PetscInt lock; /* lock state. vector can be free (=0), locked for read (>0) or locked for write(<0) */ #endif PetscOffloadMask offloadmask; /* a mask which indicates where the valid vector data is (GPU, CPU or both) */ #if defined(PETSC_HAVE_DEVICE) @@ -469,3 +469,11 @@ PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecWAXPYAsync_Private(Vec, PetscScala PetscUseTypeMethod(v, name, __VA_ARGS__); \ } \ } while (0) + +// return in lda the vector's local size aligned to bytes, where lda is an integer pointer +#define VecGetLocalSizeAligned(v, alignment, lda) \ + do { \ + PetscInt n = (v)->map->n; \ + const size_t s = (alignment) / sizeof(PetscScalar); \ + *(lda) = ((n + s - 1) / s) * s; \ + } while (0) diff --git a/include/petsc/private/viewercgnsimpl.h b/include/petsc/private/viewercgnsimpl.h index a6d1365ef4b..44a34abf5e0 100644 --- a/include/petsc/private/viewercgnsimpl.h +++ b/include/petsc/private/viewercgnsimpl.h @@ -24,5 +24,31 @@ typedef struct { PetscCheck(!_cgns_ier, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS error %d %s", _cgns_ier, cg_get_error()); \ } while (0) +#if !defined(PRIdCGSIZE) + #if CG_SIZEOF_SIZE == 32 + // cgsize_t is defined as int + #define MPIU_CGSIZE MPI_INT + #define PRIdCGSIZE "d" + #else + #if defined(_WIN32) + // cgsize_t is defined as __int64, which is synonymous with long long + #define MPIU_CGSIZE MPI_LONG_LONG + #define PRIdCGSIZE "lld" + #else + // cgsize_t is defined as long + #define MPIU_CGSIZE MPI_LONG + #define PRIdCGSIZE "ld" + #endif + #endif +#else + #if CG_SIZEOF_SIZE == 32 + // cgsize_t is defined as int32_t + #define MPIU_CGSIZE MPI_INT32_T + #else + // cgsize_t is defined as int64_t + #define MPIU_CGSIZE MPI_INT64_T + #endif +#endif + PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode PetscViewerCGNSCheckBatch_Internal(PetscViewer); PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode PetscViewerCGNSFileOpen_Internal(PetscViewer, PetscInt); diff --git a/include/petscbag.h b/include/petscbag.h index 9298a44252d..ed4ac0126f5 100644 --- a/include/petscbag.h +++ b/include/petscbag.h @@ -26,6 +26,8 @@ PetscCall(PetscBagSetName(bag,"MyParameters")); PetscCall(PetscBagRegisterInt(bag,¶ms.height,22,"height","Height of the water tower")); .ve + Note: + `PetscBag` is a C struct, it is not a `PetscObject` .seealso: `PetscBagSetName()`, `PetscBagGetName()`, `PetscBagView()`, `PetscBagLoad()`, `PetscBagGetData()` `PetscBagRegisterReal()`, `PetscBagRegisterInt()`, `PetscBagRegisterBool()`, `PetscBagRegisterScalar()` @@ -50,7 +52,7 @@ PETSC_EXTERN PetscErrorCode PetscBagRegisterBoolArray(PetscBag, void *, PetscInt PETSC_EXTERN PetscErrorCode PetscBagGetNames(PetscBag, const char *[]); PETSC_EXTERN PetscErrorCode PetscBagSetFromOptions(PetscBag); -PETSC_EXTERN PetscErrorCode PetscBagGetName(PetscBag, char **); +PETSC_EXTERN PetscErrorCode PetscBagGetName(PetscBag, const char **); PETSC_EXTERN PetscErrorCode PetscBagSetName(PetscBag, const char *, const char *); PETSC_EXTERN PetscErrorCode PetscBagSetOptionsPrefix(PetscBag, const char *); diff --git a/include/petscblaslapack.h b/include/petscblaslapack.h index 8898cb54107..4c7cd0ee0be 100644 --- a/include/petscblaslapack.h +++ b/include/petscblaslapack.h @@ -120,6 +120,7 @@ BLAS_EXTERN void BLASsyr2k_(const char *, const char *, const PetscBLASInt *, co BLAS_EXTERN void BLASgemv_(const char *, const PetscBLASInt *, const PetscBLASInt *, const PetscScalar *, const PetscScalar *, const PetscBLASInt *, const PetscScalar *, const PetscBLASInt *, const PetscScalar *, PetscScalar *, const PetscBLASInt *); BLAS_EXTERN void LAPACKgetrs_(const char *, const PetscBLASInt *, const PetscBLASInt *, const PetscScalar *, const PetscBLASInt *, const PetscBLASInt *, PetscScalar *, const PetscBLASInt *, PetscBLASInt *); BLAS_EXTERN void BLAStrmv_(const char *, const char *, const char *, const PetscBLASInt *, const PetscScalar *, const PetscBLASInt *, PetscScalar *, const PetscBLASInt *); +BLAS_EXTERN void BLAStrsv_(const char *, const char *, const char *, const PetscBLASInt *, const PetscScalar *, const PetscBLASInt *, PetscScalar *, const PetscBLASInt *); BLAS_EXTERN void BLASgemm_(const char *, const char *, const PetscBLASInt *, const PetscBLASInt *, const PetscBLASInt *, const PetscScalar *, const PetscScalar *, const PetscBLASInt *, const PetscScalar *, const PetscBLASInt *, const PetscScalar *, PetscScalar *, const PetscBLASInt *); BLAS_EXTERN void BLASREALgemm_(const char *, const char *, const PetscBLASInt *, const PetscBLASInt *, const PetscBLASInt *, const PetscReal *, const PetscReal *, const PetscBLASInt *, const PetscReal *, const PetscBLASInt *, const PetscReal *, PetscReal *, const PetscBLASInt *); BLAS_EXTERN void BLASsymm_(const char *, const char *, const PetscBLASInt *, const PetscBLASInt *, const PetscScalar *, const PetscScalar *, const PetscBLASInt *, const PetscScalar *, const PetscBLASInt *, const PetscScalar *, PetscScalar *, const PetscBLASInt *); diff --git a/include/petscblaslapack_mangle.h b/include/petscblaslapack_mangle.h index d0f22d2a287..087acfc9427 100644 --- a/include/petscblaslapack_mangle.h +++ b/include/petscblaslapack_mangle.h @@ -161,6 +161,7 @@ #define BLASgemv_ PETSCBLAS(gemv, GEMV) #define LAPACKgetrs_ PETSCBLAS(getrs, GETRS) #define BLAStrmv_ PETSCBLAS(trmv, TRMV) +#define BLAStrsv_ PETSCBLAS(trsv, TRSV) #define BLASgemm_ PETSCBLAS(gemm, GEMM) #define BLASsymm_ PETSCBLAS(symm, SYMM) #define BLASsyrk_ PETSCBLAS(syrk, SYRK) diff --git a/include/petscdm.h b/include/petscdm.h index 07559521dfd..78d04921c64 100644 --- a/include/petscdm.h +++ b/include/petscdm.h @@ -46,6 +46,7 @@ PETSC_EXTERN const char *const DMBoundaryConditionTypes[]; PETSC_EXTERN const char *const DMBlockingTypes[]; PETSC_EXTERN PetscFunctionList DMList; PETSC_EXTERN DMGeneratorFunctionList DMGenerateList; +PETSC_EXTERN PetscFunctionList DMGeomModelList; PETSC_EXTERN PetscErrorCode DMCreate(MPI_Comm, DM *); PETSC_EXTERN PetscErrorCode DMClone(DM, DM *); PETSC_EXTERN PetscErrorCode DMSetType(DM, DMType); @@ -112,6 +113,9 @@ PETSC_EXTERN PetscErrorCode DMGenerate(DM, const char[], PetscBool, DM *); PETSC_EXTERN PetscErrorCode DMGenerateRegister(const char[], PetscErrorCode (*)(DM, PetscBool, DM *), PetscErrorCode (*)(DM, PetscReal *, DM *), PetscErrorCode (*)(DM, Vec, DMLabel, DMLabel, DM *), PetscInt); PETSC_EXTERN PetscErrorCode DMGenerateRegisterAll(void); PETSC_EXTERN PetscErrorCode DMGenerateRegisterDestroy(void); +PETSC_EXTERN PetscErrorCode DMGeomModelRegister(const char[], PetscErrorCode (*)(DM, PetscInt, PetscInt, const PetscScalar[], PetscScalar[])); +PETSC_EXTERN PetscErrorCode DMGeomModelRegisterAll(void); +PETSC_EXTERN PetscErrorCode DMGeomModelRegisterDestroy(void); PETSC_EXTERN PetscErrorCode DMAdaptLabel(DM, DMLabel, DM *); PETSC_EXTERN PetscErrorCode DMAdaptMetric(DM, Vec, DMLabel, DMLabel, DM *); @@ -168,6 +172,8 @@ PETSC_EXTERN PetscErrorCode DMGetLocalBoundingBox(DM, PetscReal[], PetscReal[]); PETSC_EXTERN PetscErrorCode DMGetBoundingBox(DM, PetscReal[], PetscReal[]); PETSC_EXTERN PetscErrorCode DMSetCoordinateDisc(DM, PetscFE, PetscBool); PETSC_EXTERN PetscErrorCode DMLocatePoints(DM, Vec, DMPointLocationType, PetscSF *); +PETSC_EXTERN PetscErrorCode DMSnapToGeomModel(DM, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); +PETSC_EXTERN PetscErrorCode DMSetSnapToGeomModel(DM, const char[]); /* Periodicity support */ PETSC_EXTERN PetscErrorCode DMGetPeriodicity(DM, const PetscReal *[], const PetscReal *[], const PetscReal *[]); @@ -176,6 +182,8 @@ PETSC_EXTERN PetscErrorCode DMLocalizeCoordinate(DM, const PetscScalar[], PetscB PETSC_EXTERN PetscErrorCode DMLocalizeCoordinates(DM); PETSC_EXTERN PetscErrorCode DMGetCoordinatesLocalized(DM, PetscBool *); PETSC_EXTERN PetscErrorCode DMGetCoordinatesLocalizedLocal(DM, PetscBool *); +PETSC_EXTERN PetscErrorCode DMGetSparseLocalize(DM, PetscBool *); +PETSC_EXTERN PetscErrorCode DMSetSparseLocalize(DM, PetscBool); /* block hook interface */ PETSC_EXTERN PetscErrorCode DMSubDomainHookAdd(DM, PetscErrorCode (*)(DM, DM, void *), PetscErrorCode (*)(DM, VecScatter, VecScatter, DM, void *), void *); @@ -292,7 +300,8 @@ PETSC_EXTERN PetscErrorCode DMSetDefaultConstraints(DM, PetscSection, Mat, Vec); PETSC_EXTERN PetscErrorCode DMGetOutputDM(DM, DM *); PETSC_EXTERN PetscErrorCode DMGetOutputSequenceNumber(DM, PetscInt *, PetscReal *); PETSC_EXTERN PetscErrorCode DMSetOutputSequenceNumber(DM, PetscInt, PetscReal); -PETSC_EXTERN PetscErrorCode DMOutputSequenceLoad(DM, PetscViewer, const char *, PetscInt, PetscReal *); +PETSC_EXTERN PetscErrorCode DMOutputSequenceLoad(DM, PetscViewer, const char[], PetscInt, PetscReal *); +PETSC_EXTERN PetscErrorCode DMGetOutputSequenceLength(DM, PetscViewer, const char[], PetscInt *); PETSC_EXTERN PetscErrorCode DMGetNumFields(DM, PetscInt *); PETSC_EXTERN PetscErrorCode DMSetNumFields(DM, PetscInt); diff --git a/include/petscdmda.h b/include/petscdmda.h index ef7dd7ec75e..ca76bc03454 100644 --- a/include/petscdmda.h +++ b/include/petscdmda.h @@ -231,3 +231,5 @@ PETSC_EXTERN PetscErrorCode DMDAConvertToCell(DM, MatStencil, PetscInt *); PETSC_EXTERN PetscErrorCode DMDASetVertexCoordinates(DM, PetscReal, PetscReal, PetscReal, PetscReal, PetscReal, PetscReal); PETSC_EXTERN PetscErrorCode DMDASetPreallocationCenterDimension(DM, PetscInt); PETSC_EXTERN PetscErrorCode DMDAGetPreallocationCenterDimension(DM, PetscInt *); + +PETSC_EXTERN PetscErrorCode DMDAVTKWriteAll(PetscObject, PetscViewer); diff --git a/include/petscdmda_kokkos.hpp b/include/petscdmda_kokkos.hpp index cd2e0d1a865..ff73d4f2c45 100644 --- a/include/petscdmda_kokkos.hpp +++ b/include/petscdmda_kokkos.hpp @@ -24,7 +24,7 @@ PetscErrorCode DMDAVecGetKokkosOffsetView(DM da,Vec v,Kokkos::Experimental::OffsetView* kv); PetscErrorCode DMDAVecGetKokkosOffsetViewWrite(DM da,Vec v,Kokkos::Experimental::OffsetView* kv); - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + da - the distributed array @@ -152,7 +152,7 @@ PetscErrorCode DMDAVecGetKokkosOffsetViewWrite(DM, Vec, Kokkos::Experimental::Of PetscErrorCode DMDAVecRestoreKokkosOffsetView(DM da,Vec v,Kokkos::Experimental::OffsetView* kv); PetscErrorCode DMDAVecRestoreKokkosOffsetViewWrite(DM da,Vec v,Kokkos::Experimental::OffsetView* kv); - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + da - the distributed array @@ -204,7 +204,7 @@ PetscErrorCode DMDAVecRestoreKokkosOffsetViewWrite(DM, Vec, Kokkos::Experimental PetscErrorCode DMDAVecGetKokkosOffsetViewDOF(DM da,Vec v,Kokkos::Experimental::OffsetView* kv); PetscErrorCode DMDAVecGetKokkosOffsetViewDOFWrite(DM da,Vec v,Kokkos::Experimental::OffsetView* kv); - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + da - the distributed array @@ -316,7 +316,7 @@ PetscErrorCode DMDAVecGetKokkosOffsetViewDOFWrite(DM, Vec, Kokkos::Experimental: PetscErrorCode DMDAVecRestoreKokkosOffsetViewDOF(DM da,Vec v,Kokkos::Experimental::OffsetView* kv); PetscErrorCode DMDAVecRestoreKokkosOffsetViewDOFWrite(DM da,Vec v,Kokkos::Experimental::OffsetView* kv); - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + da - the distributed array diff --git a/include/petscdmlabel.h b/include/petscdmlabel.h index 6ce809a3e7c..92b016d5e0c 100644 --- a/include/petscdmlabel.h +++ b/include/petscdmlabel.h @@ -68,6 +68,7 @@ PETSC_EXTERN PetscErrorCode DMLabelDestroyIndex(DMLabel); PETSC_EXTERN PetscErrorCode DMLabelHasValue(DMLabel, PetscInt, PetscBool *); PETSC_EXTERN PetscErrorCode DMLabelHasPoint(DMLabel, PetscInt, PetscBool *); PETSC_EXTERN PetscErrorCode DMLabelGetBounds(DMLabel, PetscInt *, PetscInt *); +PETSC_EXTERN PetscErrorCode DMLabelGetValueBounds(DMLabel, PetscInt *, PetscInt *); PETSC_EXTERN PetscErrorCode DMLabelFilter(DMLabel, PetscInt, PetscInt); PETSC_EXTERN PetscErrorCode DMLabelPermute(DMLabel, IS, DMLabel *); PETSC_EXTERN PetscErrorCode DMLabelDistribute(DMLabel, PetscSF, DMLabel *); diff --git a/include/petscdmmoab.h b/include/petscdmmoab.h index 6c7bd46b2c6..088e6aca745 100644 --- a/include/petscdmmoab.h +++ b/include/petscdmmoab.h @@ -2,8 +2,8 @@ #include /*I "petscvec.h" I*/ #include /*I "petscmat.h" I*/ -#include /*I "petscdm.h" I*/ -#include /*I "petscdt.h" I*/ +#include /*I "petscdm.h" I*/ +#include /*I "petscdt.h" I*/ #include #include /*I "moab/Core.hpp" I*/ diff --git a/include/petscdmplex.h b/include/petscdmplex.h index c9f8c750052..7be02a25c5e 100644 --- a/include/petscdmplex.h +++ b/include/petscdmplex.h @@ -61,6 +61,7 @@ PETSC_EXTERN PetscErrorCode DMPlexStratify(DM); PETSC_EXTERN PetscErrorCode DMPlexEqual(DM, DM, PetscBool *); PETSC_EXTERN PetscErrorCode DMPlexOrientPoint(DM, PetscInt, PetscInt); PETSC_EXTERN PetscErrorCode DMPlexOrient(DM); +PETSC_EXTERN PetscErrorCode DMPlexOrientLabel(DM, DMLabel); PETSC_EXTERN PetscErrorCode DMPlexPreallocateOperator(DM, PetscInt, PetscInt[], PetscInt[], PetscInt[], PetscInt[], Mat, PetscBool); PETSC_EXTERN PetscErrorCode DMPlexGetPointLocal(DM, PetscInt, PetscInt *, PetscInt *); PETSC_EXTERN PetscErrorCode DMPlexPointLocalRead(DM, PetscInt, const PetscScalar *, void *); @@ -116,6 +117,7 @@ PETSC_EXTERN PetscErrorCode DMPlexFilter(DM, DMLabel, PetscInt, PetscBool, Petsc PETSC_EXTERN PetscErrorCode DMPlexGetCellNumbering(DM, IS *); PETSC_EXTERN PetscErrorCode DMPlexGetVertexNumbering(DM, IS *); PETSC_EXTERN PetscErrorCode DMPlexCreatePointNumbering(DM, IS *); +PETSC_EXTERN PetscErrorCode DMPlexCreateEdgeNumbering(DM, IS *); PETSC_EXTERN PetscErrorCode DMPlexCreateRankField(DM, Vec *); PETSC_EXTERN PetscErrorCode DMPlexCreateLabelField(DM, DMLabel, Vec *); @@ -171,7 +173,7 @@ PETSC_EXTERN PetscErrorCode DMPlexCreateBoxMesh(MPI_Comm, PetscInt, PetscBool, c PETSC_EXTERN PetscErrorCode DMPlexCreateBoxSurfaceMesh(MPI_Comm, PetscInt, const PetscInt[], const PetscReal[], const PetscReal[], PetscBool, DM *); PETSC_EXTERN PetscErrorCode DMPlexCreateSphereMesh(MPI_Comm, PetscInt, PetscBool, PetscReal, DM *); PETSC_EXTERN PetscErrorCode DMPlexCreateBallMesh(MPI_Comm, PetscInt, PetscReal, DM *); -PETSC_EXTERN PetscErrorCode DMPlexCreateHexCylinderMesh(MPI_Comm, DMBoundaryType, DM *); +PETSC_EXTERN PetscErrorCode DMPlexCreateHexCylinderMesh(MPI_Comm, DMBoundaryType, PetscInt, DM *); PETSC_EXTERN PetscErrorCode DMPlexCreateTPSMesh(MPI_Comm, DMPlexTPSType, const PetscInt[], const DMBoundaryType[], PetscBool, PetscInt, PetscInt, PetscReal, DM *); PETSC_EXTERN PetscErrorCode DMPlexCreateWedgeCylinderMesh(MPI_Comm, PetscInt, PetscBool, DM *); PETSC_EXTERN PetscErrorCode DMPlexCreateWedgeBoxMesh(MPI_Comm, const PetscInt[], const PetscReal[], const PetscReal[], const DMBoundaryType[], PetscBool, PetscBool, DM *); @@ -179,9 +181,9 @@ PETSC_EXTERN PetscErrorCode DMPlexCreateHypercubicMesh(MPI_Comm, PetscInt, const PETSC_EXTERN PetscErrorCode DMPlexExtrude(DM, PetscInt, PetscReal, PetscBool, PetscBool, PetscBool, const PetscReal[], const PetscReal[], DM *); PETSC_EXTERN PetscErrorCode DMPlexInflateToGeomModel(DM); -PETSC_EXTERN PetscErrorCode DMPlexSetIsoperiodicFaceSF(DM, PetscSF); -PETSC_EXTERN PetscErrorCode DMPlexGetIsoperiodicFaceSF(DM, PetscSF *); -PETSC_EXTERN PetscErrorCode DMPlexSetIsoperiodicFaceTransform(DM, const PetscScalar[]); +PETSC_EXTERN PetscErrorCode DMPlexSetIsoperiodicFaceSF(DM, PetscInt, PetscSF *); +PETSC_EXTERN PetscErrorCode DMPlexGetIsoperiodicFaceSF(DM, PetscInt *, const PetscSF **); +PETSC_EXTERN PetscErrorCode DMPlexSetIsoperiodicFaceTransform(DM, PetscInt, const PetscScalar *); PETSC_EXTERN PetscErrorCode DMPlexCheck(DM); PETSC_EXTERN PetscErrorCode DMPlexCheckSymmetry(DM); @@ -190,6 +192,7 @@ PETSC_EXTERN PetscErrorCode DMPlexCheckFaces(DM, PetscInt); PETSC_EXTERN PetscErrorCode DMPlexCheckGeometry(DM); PETSC_EXTERN PetscErrorCode DMPlexCheckPointSF(DM, PetscSF, PetscBool); PETSC_EXTERN PetscErrorCode DMPlexCheckInterfaceCones(DM); +PETSC_EXTERN PetscErrorCode DMPlexCheckOrphanVertices(DM); PETSC_EXTERN PetscErrorCode DMPlexCheckCellShape(DM, PetscBool, PetscReal); PETSC_EXTERN PetscErrorCode DMPlexComputeOrthogonalQuality(DM, PetscFV, PetscReal, Vec *, DMLabel *); @@ -279,7 +282,7 @@ PETSC_EXTERN PetscErrorCode DMGetEnclosureRelation(DM, DM, DMEnclosureType *); PETSC_EXTERN PetscErrorCode DMGetEnclosurePoint(DM, DM, DMEnclosureType, PetscInt, PetscInt *); PETSC_EXTERN PetscErrorCode DMPlexLabelComplete(DM, DMLabel); -PETSC_EXTERN PetscErrorCode DMPlexLabelCohesiveComplete(DM, DMLabel, DMLabel, PetscInt, PetscBool, DM); +PETSC_EXTERN PetscErrorCode DMPlexLabelCohesiveComplete(DM, DMLabel, DMLabel, PetscInt, PetscBool, PetscBool, DM); PETSC_EXTERN PetscErrorCode DMPlexLabelAddCells(DM, DMLabel); PETSC_EXTERN PetscErrorCode DMPlexLabelAddFaceCells(DM, DMLabel); PETSC_EXTERN PetscErrorCode DMPlexLabelClearCells(DM, DMLabel); @@ -392,7 +395,7 @@ PETSC_EXTERN PetscErrorCode DMPlexComputeL2FieldDiff(DM, PetscReal, PetscErrorCo PETSC_EXTERN PetscErrorCode DMPlexComputeL2DiffVec(DM, PetscReal, PetscErrorCode (**)(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar *, void *), void **, Vec, Vec); PETSC_EXTERN PetscErrorCode DMPlexComputeCellwiseIntegralFEM(DM, Vec, Vec, void *); PETSC_EXTERN PetscErrorCode DMPlexComputeIntegralFEM(DM, Vec, PetscScalar *, void *); -PETSC_EXTERN PetscErrorCode DMPlexComputeBdIntegral(DM, Vec, DMLabel, PetscInt, const PetscInt[], void (*)(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]), PetscScalar *, void *); +PETSC_EXTERN PetscErrorCode DMPlexComputeBdIntegral(DM, Vec, DMLabel, PetscInt, const PetscInt[], void (**)(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]), PetscScalar *, void *); PETSC_EXTERN PetscErrorCode DMPlexComputeInterpolatorNested(DM, DM, PetscBool, Mat, void *); PETSC_EXTERN PetscErrorCode DMPlexComputeInterpolatorGeneral(DM, DM, Mat, void *); PETSC_EXTERN PetscErrorCode DMPlexComputeClementInterpolant(DM, Vec, Vec); @@ -457,7 +460,6 @@ PETSC_EXTERN PetscErrorCode DMPlexNaturalToGlobalEnd(DM, Vec, Vec); PETSC_EXTERN PetscErrorCode DMPlexCreateNaturalVector(DM, Vec *); /* mesh adaptation */ -PETSC_EXTERN PetscErrorCode DMPlexSnapToGeomModel(DM, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); PETSC_EXTERN PetscErrorCode DMPlexMetricSetFromOptions(DM); PETSC_EXTERN PetscErrorCode DMPlexMetricSetIsotropic(DM, PetscBool); PETSC_EXTERN PetscErrorCode DMPlexMetricIsIsotropic(DM, PetscBool *); @@ -541,3 +543,15 @@ PETSC_EXTERN PetscErrorCode DMPlexPointQueueFront(DMPlexPointQueue, PetscInt *); PETSC_EXTERN PetscErrorCode DMPlexPointQueueBack(DMPlexPointQueue, PetscInt *); PETSC_EXTERN PetscBool DMPlexPointQueueEmpty(DMPlexPointQueue); PETSC_EXTERN PetscErrorCode DMPlexPointQueueEmptyCollective(PetscObject, DMPlexPointQueue, PetscBool *); + +#if defined(PETSC_HAVE_HDF5) +struct _n_DMPlexStorageVersion { + int major, minor, subminor; +}; +typedef struct _n_DMPlexStorageVersion *DMPlexStorageVersion; + +PETSC_EXTERN PetscErrorCode PetscViewerHDF5GetDMPlexStorageVersionReading(PetscViewer, DMPlexStorageVersion *); +PETSC_EXTERN PetscErrorCode PetscViewerHDF5SetDMPlexStorageVersionReading(PetscViewer, DMPlexStorageVersion); +PETSC_EXTERN PetscErrorCode PetscViewerHDF5GetDMPlexStorageVersionWriting(PetscViewer, DMPlexStorageVersion *); +PETSC_EXTERN PetscErrorCode PetscViewerHDF5SetDMPlexStorageVersionWriting(PetscViewer, DMPlexStorageVersion); +#endif diff --git a/include/petscdmplextransform.h b/include/petscdmplextransform.h index ecbdf01e62c..6a734588141 100644 --- a/include/petscdmplextransform.h +++ b/include/petscdmplextransform.h @@ -5,6 +5,16 @@ PETSC_EXTERN PetscClassId DMPLEXTRANSFORM_CLASSID; +/*J + DMPlexTransformType - String with the name of a PETSc DMPlexTransform type + + Level: beginner + + Note: + [](plex_transform_table) for a table of available matrix types + +.seealso: [](plex_transform_table), [](ch_unstructured), `DMPlexTransformCreate()`, `DMPlexTransform`, `DMPlexTransformRegister()` +J*/ typedef const char *DMPlexTransformType; #define DMPLEXREFINEREGULAR "refine_regular" #define DMPLEXREFINEALFELD "refine_alfeld" @@ -15,6 +25,7 @@ typedef const char *DMPlexTransformType; #define DMPLEXREFINETOSIMPLEX "refine_tosimplex" #define DMPLEXREFINE1D "refine_1d" #define DMPLEXEXTRUDE "extrude" +#define DMPLEXCOHESIVEEXTRUDE "cohesive_extrude" #define DMPLEXTRANSFORMFILTER "transform_filter" PETSC_EXTERN PetscFunctionList DMPlexTransformList; @@ -77,4 +88,9 @@ PETSC_EXTERN PetscErrorCode DMPlexTransformExtrudeSetNormal(DMPlexTransform, con PETSC_EXTERN PetscErrorCode DMPlexTransformExtrudeSetNormalFunction(DMPlexTransform, PetscErrorCode (*)(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar[], void *)); PETSC_EXTERN PetscErrorCode DMPlexTransformExtrudeSetThicknesses(DMPlexTransform, PetscInt, const PetscReal[]); +PETSC_EXTERN PetscErrorCode DMPlexTransformCohesiveExtrudeGetTensor(DMPlexTransform, PetscBool *); +PETSC_EXTERN PetscErrorCode DMPlexTransformCohesiveExtrudeSetTensor(DMPlexTransform, PetscBool); +PETSC_EXTERN PetscErrorCode DMPlexTransformCohesiveExtrudeGetWidth(DMPlexTransform, PetscReal *); +PETSC_EXTERN PetscErrorCode DMPlexTransformCohesiveExtrudeSetWidth(DMPlexTransform, PetscReal); + PETSC_EXTERN PetscErrorCode DMPlexCreateEphemeral(DMPlexTransform, const char[], DM *); diff --git a/include/petscdmplextypes.h b/include/petscdmplextypes.h index 987fff96df5..b37f093f5bc 100644 --- a/include/petscdmplextypes.h +++ b/include/petscdmplextypes.h @@ -45,7 +45,7 @@ PETSC_EXTERN const char *const DMPlexShapes[]; . `DM_COORD_MAP_SHEAR` - The shear (additive) map along some dimension . `DM_COORD_MAP_FLARE` - The flare (multiplicative) map along some dimension . `DM_COORD_MAP_ANNULUS` - The map from a rectangle to an annulus -. `DM_COORD_MAP_SHELL` - The map from a rectangular solid to an spherical shell +- `DM_COORD_MAP_SHELL` - The map from a rectangular solid to an spherical shell Level: beginner diff --git a/include/petscdmstag.h b/include/petscdmstag.h index d14dbc78d55..4e867b5a99a 100644 --- a/include/petscdmstag.h +++ b/include/petscdmstag.h @@ -16,6 +16,9 @@ The order of the enum entries is significant, as it corresponds to the canonical numbering of DOFs, and the fact that the numbering starts at 0 may also be used by the implementation. + Fortran Note: + Currently there is no Fortran support, it could be easily added. + .seealso: [](ch_stag), `DMSTAG`, `DMDA`, `DMStagStencil`, `DMStagGetLocationSlot()`, `DMStagStencilType` E*/ typedef enum { @@ -59,9 +62,11 @@ PETSC_EXTERN const char *const DMStagStencilLocations[]; /* Corresponding string Level: beginner - Note: + Notes: The component (c) field must always be set, even if there is a single component at a given location (in which case c should be set to 0). + This is a struct, not a `PetscObject`. + .seealso: [](ch_stag), `DMSTAG`, `DMDA`, `DMStagMatSetValuesStencil()`, `DMStagVecSetValuesStencil()`, `DMStagStencilLocation`, `DMStagSetStencilWidth()`, `DMStagSetStencilType()`, `DMStagVecGetValuesStencil()`, `DMStagStencilLocation` S*/ diff --git a/include/petscdmswarm.h b/include/petscdmswarm.h index 5da48da04da..eb291edc01b 100644 --- a/include/petscdmswarm.h +++ b/include/petscdmswarm.h @@ -141,6 +141,8 @@ PETSC_EXTERN PetscErrorCode DMSwarmInitializeCoordinates(DM); PETSC_EXTERN PetscErrorCode DMSwarmInitializeVelocities(DM, PetscProbFunc, const PetscReal[]); PETSC_EXTERN PetscErrorCode DMSwarmInitializeVelocitiesFromOptions(DM, const PetscReal[]); +PETSC_EXTERN PetscErrorCode DMSwarmCreatePointPerCellCount(DM, PetscInt *, PetscInt **); + // Interface to internal storage PETSC_EXTERN PetscErrorCode DMSwarmDataFieldGetEntries(const DMSwarmDataField, void **); PETSC_EXTERN PetscErrorCode DMSwarmDataFieldRestoreEntries(const DMSwarmDataField, void **); diff --git a/include/petscdstypes.h b/include/petscdstypes.h index c217f6ede8d..57f4184609c 100644 --- a/include/petscdstypes.h +++ b/include/petscdstypes.h @@ -31,6 +31,9 @@ typedef struct _p_PetscWeakForm *PetscWeakForm; Level: intermediate + Note: + This is a struct, not a `PetscObject` + .seealso: `DMPlexSNESComputeResidualFEM()`, `DMPlexSNESComputeJacobianFEM()`, `DMPlexSNESComputeBoundaryFEM()` S*/ typedef struct _PetscFormKey { diff --git a/include/petscerror.h b/include/petscerror.h index 59f55ead5d6..04c6ecfafce 100644 --- a/include/petscerror.h +++ b/include/petscerror.h @@ -27,7 +27,7 @@ Synopsis: #include - PetscErrorCode SETERRQ(MPI_Comm comm,PetscErrorCode ierr,char *message,...) + PetscErrorCode SETERRQ(MPI_Comm comm, PetscErrorCode ierr, char *message, ...) Collective @@ -72,7 +72,7 @@ PETSC_EXTERN PetscMPIInt PETSC_MPI_ERROR_CODE; Synopsis: #include - PetscErrorCode SETERRMPI(MPI_Comm comm,PetscErrorCode ierr,char *message,...) + PetscErrorCode SETERRMPI(MPI_Comm comm, PetscErrorCode ierr, char *message, ...) Collective @@ -96,7 +96,7 @@ M*/ Synopsis: #include - PetscErrorCode SETERRA(MPI_Comm comm,PetscErrorCode ierr,char *message) + PetscErrorCode SETERRA(MPI_Comm comm, PetscErrorCode ierr, char *message) Collective @@ -121,7 +121,7 @@ M*/ Synopsis: #include - PetscErrorCode SETERRABORT(MPI_Comm comm,PetscErrorCode ierr,char *message,...) + PetscErrorCode SETERRABORT(MPI_Comm comm, PetscErrorCode ierr, char *message, ...) Collective @@ -152,7 +152,7 @@ M*/ } while (0) /*MC - PetscCheck - Check that a particular condition is true + PetscCheck - Checks that a particular condition is true; if not true, then returns the provided error code Synopsis: #include @@ -327,10 +327,10 @@ M*/ `PetscUseTypeMethod()` or `PetscTryTypeMethod()` should be used when calling functions pointers contained in a PETSc object's `ops` array Fortran Notes: - The Fortran function from which this is used must declare a variable PetscErrorCode ierr and ierr must be + The Fortran function in which this is used must declare a `PetscErrorCode` variable necessarily named `ierr`, and `ierr` must be the final argument to the PETSc function being called. - In the main program and in Fortran subroutines that do not have ierr as the final return parameter one + In the main program and in Fortran subroutines that do not have `ierr` as the final return parameter, one should use `PetscCallA()` Example Fortran Usage: @@ -339,8 +339,8 @@ M*/ Vec v ... - PetscCall(VecShift(v,1.0,ierr)) - PetscCallA(VecShift(v,1.0,ierr)) + PetscCall(VecShift(v, 1.0, ierr)) + PetscCallA(VecShift(v, 1.0, ierr)) .ve .seealso: `SETERRQ()`, `PetscCheck()`, `PetscAssert()`, `PetscTraceBackErrorHandler()`, `PetscCallMPI()`, @@ -349,12 +349,12 @@ M*/ M*/ /*MC - PetscCallA - Fortran-only macro that should be used in the main program to call PETSc functions instead of using - PetscCall() which should be used in other Fortran subroutines + PetscCallA - Fortran-only macro that should be used in the main program and subroutines that do not have `ierr` as the final return parameter, to call PETSc functions instead of using + `PetscCall()` which should be used in other Fortran subroutines Synopsis: #include - PetscErrorCode PetscCallA(PetscFunction(arguments,ierr)) + PetscErrorCode PetscCallA(PetscFunction(arguments, ierr)) Collective @@ -366,6 +366,7 @@ M*/ Notes: This should only be used with Fortran. With C/C++, use `PetscCall()` always. + The Fortran function in which this is used must declare a `PetscErrorCode` variable necessarily named `ierr` Use `SETERRA()` to set an error in a Fortran main program and `SETERRQ()` in Fortran subroutines .seealso: `SETERRQ()`, `SETERRA()`, `SETERRABORT()`, `PetscCall()`, `CHKERRA()`, `PetscCallAbort()` @@ -377,7 +378,7 @@ M*/ Synopsis: #include - void PetscCallBack(const char *functionname,PetscFunction(args)) + void PetscCallBack(const char *functionname, PetscFunction(args)) Not Collective; No Fortran Support @@ -387,7 +388,7 @@ M*/ Example Usage: .vb - PetscCallBack("XXX callback to do something",a->callback(...)); + PetscCallBack("XXX callback to do something", a->callback(...)); .ve Level: developer @@ -1152,7 +1153,7 @@ PETSC_DEPRECATED_FUNCTION(3, 13, 0, "PetscSignalSegvCheckPointerOrMpi()", ) stat Synopsis: #include - PetscErrorCode (*PetscErrorPrintf)(const char format[],...); + PetscErrorCode (*PetscErrorPrintf)(const char format[], ...); Not Collective; No Fortran Support diff --git a/include/petscfe.h b/include/petscfe.h index cefb2130fe4..bbae5527b09 100644 --- a/include/petscfe.h +++ b/include/petscfe.h @@ -16,6 +16,9 @@ Level: intermediate + Note: + This is a struct, not a `PetscObject` + .seealso: `PetscFE`, `PetscFEGeomCreate()`, `PetscFEGeomDestroy()`, `PetscFEGeomGetChunk()`, `PetscFEGeomRestoreChunk()`, `PetscFEGeomGetPoint()`, `PetscFEGeomGetCellPoint()`, `PetscFEGeomComplete()`, `PetscSpace`, `PetscDualSpace` M*/ diff --git a/include/petscis.h b/include/petscis.h index 34cdaa86a7c..ab1b1791552 100644 --- a/include/petscis.h +++ b/include/petscis.h @@ -192,15 +192,24 @@ PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingSetUp(ISLocalToGlobalMapping); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping, PetscViewer); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingLoad(ISLocalToGlobalMapping, PetscViewer); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping, PetscObject, const char[]); - +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm, PetscInt, const ISLocalToGlobalMapping[], ISLocalToGlobalMapping *); +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping, ISLocalToGlobalMapping *); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *); +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping, PetscInt *); +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping, const PetscInt **); +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping, const PetscInt **); +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping, const PetscInt **); +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping, const PetscInt **); +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping, PetscInt *); +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping, PetscInt); + PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping, PetscInt, const PetscInt[], PetscInt[]); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping, PetscInt, const PetscInt[], PetscInt[]); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping, IS, IS *); PETSC_EXTERN PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping, ISGlobalToLocalMappingMode, PetscInt, const PetscInt[], PetscInt *, PetscInt[]); PETSC_EXTERN PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping, ISGlobalToLocalMappingMode, PetscInt, const PetscInt[], PetscInt *, PetscInt[]); PETSC_EXTERN PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping, ISGlobalToLocalMappingMode, IS, IS *); -PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping, PetscInt *); + PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping, PetscInt *, PetscInt *[], PetscInt **[]); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping, PetscInt *, PetscInt *[], PetscInt **[]); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping, PetscInt *, PetscInt *[], PetscInt *[], PetscInt **[]); @@ -209,14 +218,7 @@ PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetBlockNodeInfo(ISLocalToGlob PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreBlockNodeInfo(ISLocalToGlobalMapping, PetscInt *, PetscInt *[], PetscInt **[]); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping, PetscInt *, PetscInt *[], PetscInt *[], PetscInt **[]); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping, PetscInt *, PetscInt *[], PetscInt *[], PetscInt **[]); -PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping, const PetscInt **); -PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping, const PetscInt **); -PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping, const PetscInt **); -PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping, const PetscInt **); -PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm, PetscInt, const ISLocalToGlobalMapping[], ISLocalToGlobalMapping *); -PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping, PetscInt *); -PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping, PetscInt); -PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping, ISLocalToGlobalMapping *); +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetBlockMultiLeavesSF(ISLocalToGlobalMapping, PetscSF *); /*E ISColoringType - determines if the coloring is for the entire parallel grid/graph/matrix @@ -286,76 +288,9 @@ struct _n_PetscLayout { PetscInt oldbs; /* And again */ }; -/*@C - PetscLayoutFindOwner - Find the owning MPI process for a global index - - Not Collective; No Fortran Support - - Input Parameters: -+ map - the layout -- idx - global index to find the owner of - - Output Parameter: -. owner - the owning rank - - Level: developer - -.seealso: `PetscLayout`, `PetscLayoutFindOwnerIndex()` -@*/ -static inline PetscErrorCode PetscLayoutFindOwner(PetscLayout map, PetscInt idx, PetscMPIInt *owner) -{ - PetscMPIInt lo = 0, hi, t; - - PetscFunctionBegin; - *owner = -1; /* GCC erroneously issues warning about possibly uninitialized use when error condition */ - PetscAssert((map->n >= 0) && (map->N >= 0) && (map->range), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PetscLayoutSetUp() must be called first"); - PetscAssert(idx >= 0 && idx <= map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index %" PetscInt_FMT " is out of range", idx); - hi = map->size; - while (hi - lo > 1) { - t = lo + (hi - lo) / 2; - if (idx < map->range[t]) hi = t; - else lo = t; - } - *owner = lo; - PetscFunctionReturn(PETSC_SUCCESS); -} - -/*@C - PetscLayoutFindOwnerIndex - Find the owning MPI process and the local index on that process for a global index - - Not Collective; No Fortran Support - - Input Parameters: -+ map - the layout -- idx - global index to find the owner of - - Output Parameters: -+ owner - the owning rank -- lidx - local index used by the owner for `idx` - - Level: developer - -.seealso: `PetscLayout`, `PetscLayoutFindOwner()` -@*/ -static inline PetscErrorCode PetscLayoutFindOwnerIndex(PetscLayout map, PetscInt idx, PetscMPIInt *owner, PetscInt *lidx) -{ - PetscMPIInt lo = 0, hi, t; - - PetscFunctionBegin; - PetscAssert((map->n >= 0) && (map->N >= 0) && (map->range), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PetscLayoutSetUp() must be called first"); - PetscAssert(idx >= 0 && idx <= map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index %" PetscInt_FMT " is out of range", idx); - hi = map->size; - while (hi - lo > 1) { - t = lo + (hi - lo) / 2; - if (idx < map->range[t]) hi = t; - else lo = t; - } - if (owner) *owner = lo; - if (lidx) *lidx = idx - map->range[lo]; - PetscFunctionReturn(PETSC_SUCCESS); -} - PETSC_EXTERN PetscErrorCode PetscLayoutCreate(MPI_Comm, PetscLayout *); +PETSC_EXTERN PetscErrorCode PetscLayoutFindOwner(PetscLayout, PetscInt, PetscMPIInt *); +PETSC_EXTERN PetscErrorCode PetscLayoutFindOwnerIndex(PetscLayout, PetscInt, PetscMPIInt *, PetscInt *); PETSC_EXTERN PetscErrorCode PetscLayoutCreateFromSizes(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscLayout *); PETSC_EXTERN PetscErrorCode PetscLayoutCreateFromRanges(MPI_Comm, const PetscInt[], PetscCopyMode, PetscInt, PetscLayout *); PETSC_EXTERN PetscErrorCode PetscLayoutSetUp(PetscLayout); diff --git a/include/petscksp.h b/include/petscksp.h index 9ef370587a4..8344c438406 100644 --- a/include/petscksp.h +++ b/include/petscksp.h @@ -986,6 +986,9 @@ PETSC_EXTERN PetscErrorCode MatCreateSchurComplementPmat(Mat, Mat, Mat, Mat, Mat PETSC_EXTERN PetscErrorCode MatCreateLMVMDFP(MPI_Comm, PetscInt, PetscInt, Mat *); PETSC_EXTERN PetscErrorCode MatCreateLMVMBFGS(MPI_Comm, PetscInt, PetscInt, Mat *); +PETSC_EXTERN PetscErrorCode MatCreateLMVMDBFGS(MPI_Comm, PetscInt, PetscInt, Mat *); +PETSC_EXTERN PetscErrorCode MatCreateLMVMDDFP(MPI_Comm, PetscInt, PetscInt, Mat *); +PETSC_EXTERN PetscErrorCode MatCreateLMVMDQN(MPI_Comm, PetscInt, PetscInt, Mat *); PETSC_EXTERN PetscErrorCode MatCreateLMVMSR1(MPI_Comm, PetscInt, PetscInt, Mat *); PETSC_EXTERN PetscErrorCode MatCreateLMVMBroyden(MPI_Comm, PetscInt, PetscInt, Mat *); PETSC_EXTERN PetscErrorCode MatCreateLMVMBadBroyden(MPI_Comm, PetscInt, PetscInt, Mat *); @@ -1010,10 +1013,24 @@ PETSC_EXTERN PetscErrorCode MatLMVMGetJ0(Mat, Mat *); PETSC_EXTERN PetscErrorCode MatLMVMGetJ0PC(Mat, PC *); PETSC_EXTERN PetscErrorCode MatLMVMGetJ0KSP(Mat, KSP *); PETSC_EXTERN PetscErrorCode MatLMVMSetHistorySize(Mat, PetscInt); +PETSC_EXTERN PetscErrorCode MatLMVMGetHistorySize(Mat, PetscInt *); PETSC_EXTERN PetscErrorCode MatLMVMGetUpdateCount(Mat, PetscInt *); PETSC_EXTERN PetscErrorCode MatLMVMGetRejectCount(Mat, PetscInt *); PETSC_EXTERN PetscErrorCode MatLMVMSymBroydenSetDelta(Mat, PetscScalar); +/*E + MatLMVMSymBroydenScaleType - Scaling type for symmetric Broyden. + + Values: ++ `MAT_LMVM_SYMBROYDEN_SCALE_NONE` - No scaling +. `MAT_LMVM_SYMBROYDEN_SCALE_SCALAR` - scalar scaling +. `MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL` - diagonal scaling +- `MAT_LMVM_SYMBROYDEN_SCALE_USER` - user-provided scale option + + Level: intermediate + +.seealso: [](ch_matrices), `MatLMVM`, `MatLMVMSymBroydenSetScaleType()` +E*/ typedef enum { MAT_LMVM_SYMBROYDEN_SCALE_NONE = 0, MAT_LMVM_SYMBROYDEN_SCALE_SCALAR = 1, @@ -1024,6 +1041,25 @@ PETSC_EXTERN const char *const MatLMVMSymBroydenScaleTypes[]; PETSC_EXTERN PetscErrorCode MatLMVMSymBroydenSetScaleType(Mat, MatLMVMSymBroydenScaleType); +/*E + MatLMVMDenseType - Memory storage strategy for dense variants `MATLMVM`. + + Values: ++ `MAT_LMVM_DENSE_REORDER` - reorders memory to minimize kernel launch +- `MAT_LMVM_DENSE_INPLACE` - computes inplace to minimize memory movement + + Level: intermediate + +.seealso: [](ch_matrices), `MatLMVM`, `MatLMVMDenseSetType()` +E*/ +typedef enum { + MAT_LMVM_DENSE_REORDER, + MAT_LMVM_DENSE_INPLACE +} MatLMVMDenseType; +PETSC_EXTERN const char *const MatLMVMDenseTypes[]; + +PETSC_EXTERN PetscErrorCode MatLMVMDenseSetType(Mat, MatLMVMDenseType); + PETSC_EXTERN PetscErrorCode KSPSetDM(KSP, DM); PETSC_EXTERN PetscErrorCode KSPSetDMActive(KSP, PetscBool); PETSC_EXTERN PetscErrorCode KSPGetDM(KSP, DM *); @@ -1094,3 +1130,5 @@ PETSC_EXTERN PetscErrorCode DMCheckInterpolator(DM, Mat, Mat, Mat, PetscReal); PETSC_EXTERN PetscErrorCode PCBJKOKKOSSetKSP(PC, KSP); PETSC_EXTERN PetscErrorCode PCBJKOKKOSGetKSP(PC, KSP *); + +PETSC_EXTERN PetscErrorCode DMCopyDMKSP(DM, DM); diff --git a/include/petsclog.h b/include/petsclog.h index a9368832d01..c264ec21103 100644 --- a/include/petsclog.h +++ b/include/petsclog.h @@ -414,7 +414,7 @@ static inline PETSC_UNUSED PetscErrorCode PetscLogObjectDestroy(PetscObject o) #define PETSC_FLOPS_PER_OP 1.0 #endif -/*@C +/*@ PetscLogFlops - Log how many flops are performed in a calculation Input Parameter: @@ -722,7 +722,7 @@ PETSC_EXTERN PetscErrorCode PetscLogGpuTime(void); PETSC_EXTERN PetscErrorCode PetscLogGpuTimeBegin(void); PETSC_EXTERN PetscErrorCode PetscLogGpuTimeEnd(void); -/*@C +/*@ PetscLogGpuFlops - Log how many flops are performed in a calculation on the device Input Parameter: @@ -737,6 +737,9 @@ PETSC_EXTERN PetscErrorCode PetscLogGpuTimeEnd(void); The values are also added to the total flop count for the MPI rank that is set with `PetscLogFlops()`; hence the number of flops just on the CPU would be the value from set from `PetscLogFlops()` minus the value set from `PetscLogGpuFlops()` + Developer Note: + Currently Fortran stub generator cannot run through files in include + .seealso: [](ch_profiling), `PetscLogView()`, `PetscLogFlops()`, `PetscLogGpuTimeBegin()`, `PetscLogGpuTimeEnd()` @*/ static inline PetscErrorCode PetscLogGpuFlops(PetscLogDouble n) diff --git a/include/petscmat.h b/include/petscmat.h index 9daabc9739f..784049f0575 100644 --- a/include/petscmat.h +++ b/include/petscmat.h @@ -119,10 +119,16 @@ typedef const char *MatType; #define MATSELLCUDA "sellcuda" #define MATSEQSELLCUDA "seqsellcuda" #define MATMPISELLCUDA "mpisellcuda" +#define MATSELLHIP "sellhip" +#define MATSEQSELLHIP "seqsellhip" +#define MATMPISELLHIP "mpisellhip" #define MATDUMMY "dummy" #define MATLMVM "lmvm" #define MATLMVMDFP "lmvmdfp" +#define MATLMVMDDFP "lmvmddfp" #define MATLMVMBFGS "lmvmbfgs" +#define MATLMVMDBFGS "lmvmdbfgs" +#define MATLMVMDQN "lmvmdqn" #define MATLMVMSR1 "lmvmsr1" #define MATLMVMBROYDEN "lmvmbroyden" #define MATLMVMBADBROYDEN "lmvmbadbroyden" @@ -367,6 +373,7 @@ PETSC_EXTERN PetscErrorCode MatSetOptionsPrefixFactor(Mat, const char[]); PETSC_EXTERN PetscErrorCode MatAppendOptionsPrefixFactor(Mat, const char[]); PETSC_EXTERN PetscErrorCode MatAppendOptionsPrefix(Mat, const char[]); PETSC_EXTERN PetscErrorCode MatGetOptionsPrefix(Mat, const char *[]); +PETSC_EXTERN PetscErrorCode MatGetState(Mat, PetscObjectState *); PETSC_EXTERN PetscErrorCode MatSetErrorIfFailure(Mat, PetscBool); PETSC_EXTERN PetscFunctionList MatList; @@ -523,6 +530,7 @@ PETSC_EXTERN PetscErrorCode MatHYPRESetPreallocation(Mat, PetscInt, const PetscI PETSC_EXTERN PetscErrorCode MatPythonSetType(Mat, const char[]); PETSC_EXTERN PetscErrorCode MatPythonGetType(Mat, const char *[]); +PETSC_EXTERN PetscErrorCode MatPythonCreate(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, const char[], Mat *); PETSC_EXTERN PetscErrorCode MatResetPreallocation(Mat); PETSC_EXTERN PetscErrorCode MatSetUp(Mat); @@ -690,8 +698,8 @@ PETSC_EXTERN PetscErrorCode MatSetBlockSize(Mat, PetscInt); PETSC_EXTERN PetscErrorCode MatGetBlockSizes(Mat, PetscInt *, PetscInt *); PETSC_EXTERN PetscErrorCode MatSetBlockSizes(Mat, PetscInt, PetscInt); PETSC_EXTERN PetscErrorCode MatSetBlockSizesFromMats(Mat, Mat, Mat); -PETSC_EXTERN PetscErrorCode MatSetVariableBlockSizes(Mat, PetscInt, PetscInt *); -PETSC_EXTERN PetscErrorCode MatGetVariableBlockSizes(Mat, PetscInt *, const PetscInt **); +PETSC_EXTERN PetscErrorCode MatSetVariableBlockSizes(Mat, PetscInt, const PetscInt[]); +PETSC_EXTERN PetscErrorCode MatGetVariableBlockSizes(Mat, PetscInt *, const PetscInt *[]); PETSC_EXTERN PetscErrorCode MatDenseGetColumn(Mat, PetscInt, PetscScalar *[]); PETSC_EXTERN PetscErrorCode MatDenseRestoreColumn(Mat, PetscScalar *[]); @@ -2015,8 +2023,14 @@ typedef enum { MATOP_FIND_OFFBLOCK_ENTRIES = 143, MATOP_MPICONCATENATESEQ = 144, MATOP_DESTROYSUBMATRICES = 145, - MATOP_TRANSPOSE_SOLVE = 146, - MATOP_GET_VALUES_LOCAL = 147 + MATOP_MAT_TRANSPOSE_SOLVE = 146, + MATOP_GET_VALUES_LOCAL = 147, + MATOP_CREATE_GRAPH = 148, + /* MATOP_PLACEHOLDER_149=149, */ + MATOP_TRANSPOSE_SYMBOLIC = 150, + MATOP_ELIMINATE_ZEROS = 151, + MATOP_GET_ROW_SUM_ABS = 152, + MATOP_GET_FACTOR = 153 } MatOperation; PETSC_EXTERN PetscErrorCode MatSetOperation(Mat, MatOperation, void (*)(void)); PETSC_EXTERN PetscErrorCode MatGetOperation(Mat, MatOperation, void (**)(void)); @@ -2188,26 +2202,28 @@ PETSC_EXTERN PetscErrorCode PetscViewerMathematicaPutCSRMatrix(PetscViewer, Pets #ifdef PETSC_HAVE_H2OPUS PETSC_EXTERN_TYPEDEF typedef PetscScalar(MatH2OpusKernelFn)(PetscInt, PetscReal[], PetscReal[], void *); PETSC_EXTERN_TYPEDEF typedef MatH2OpusKernelFn *MatH2OpusKernel; -PETSC_EXTERN PetscErrorCode MatCreateH2OpusFromKernel(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, const PetscReal[], PetscBool, MatH2OpusKernelFn *, void *, PetscReal, PetscInt, PetscInt, Mat *); -PETSC_EXTERN PetscErrorCode MatCreateH2OpusFromMat(Mat, PetscInt, const PetscReal[], PetscBool, PetscReal, PetscInt, PetscInt, PetscInt, PetscReal, Mat *); -PETSC_EXTERN PetscErrorCode MatH2OpusSetSamplingMat(Mat, Mat, PetscInt, PetscReal); -PETSC_EXTERN PetscErrorCode MatH2OpusOrthogonalize(Mat); -PETSC_EXTERN PetscErrorCode MatH2OpusCompress(Mat, PetscReal); -PETSC_EXTERN PetscErrorCode MatH2OpusSetNativeMult(Mat, PetscBool); -PETSC_EXTERN PetscErrorCode MatH2OpusGetNativeMult(Mat, PetscBool *); -PETSC_EXTERN PetscErrorCode MatH2OpusGetIndexMap(Mat, IS *); -PETSC_EXTERN PetscErrorCode MatH2OpusMapVec(Mat, PetscBool, Vec, Vec *); -PETSC_EXTERN PetscErrorCode MatH2OpusLowRankUpdate(Mat, Mat, Mat, PetscScalar); + +PETSC_EXTERN PetscErrorCode MatCreateH2OpusFromKernel(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, const PetscReal[], PetscBool, MatH2OpusKernelFn *, void *, PetscReal, PetscInt, PetscInt, Mat *); +PETSC_EXTERN PetscErrorCode MatCreateH2OpusFromMat(Mat, PetscInt, const PetscReal[], PetscBool, PetscReal, PetscInt, PetscInt, PetscInt, PetscReal, Mat *); +PETSC_EXTERN PetscErrorCode MatH2OpusSetSamplingMat(Mat, Mat, PetscInt, PetscReal); +PETSC_EXTERN PetscErrorCode MatH2OpusOrthogonalize(Mat); +PETSC_EXTERN PetscErrorCode MatH2OpusCompress(Mat, PetscReal); +PETSC_EXTERN PetscErrorCode MatH2OpusSetNativeMult(Mat, PetscBool); +PETSC_EXTERN PetscErrorCode MatH2OpusGetNativeMult(Mat, PetscBool *); +PETSC_EXTERN PetscErrorCode MatH2OpusGetIndexMap(Mat, IS *); +PETSC_EXTERN PetscErrorCode MatH2OpusMapVec(Mat, PetscBool, Vec, Vec *); +PETSC_EXTERN PetscErrorCode MatH2OpusLowRankUpdate(Mat, Mat, Mat, PetscScalar); #endif #ifdef PETSC_HAVE_HTOOL PETSC_EXTERN_TYPEDEF typedef PetscErrorCode(MatHtoolKernelFn)(PetscInt, PetscInt, PetscInt, const PetscInt *, const PetscInt *, PetscScalar *, void *); PETSC_EXTERN_TYPEDEF typedef MatHtoolKernelFn *MatHtoolKernel; -PETSC_EXTERN PetscErrorCode MatCreateHtoolFromKernel(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, const PetscReal[], const PetscReal[], MatHtoolKernelFn *, void *, Mat *); -PETSC_EXTERN PetscErrorCode MatHtoolSetKernel(Mat, MatHtoolKernelFn *, void *); -PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationSource(Mat, IS *); -PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationTarget(Mat, IS *); -PETSC_EXTERN PetscErrorCode MatHtoolUsePermutation(Mat, PetscBool); + +PETSC_EXTERN PetscErrorCode MatCreateHtoolFromKernel(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, const PetscReal[], const PetscReal[], MatHtoolKernelFn *, void *, Mat *); +PETSC_EXTERN PetscErrorCode MatHtoolSetKernel(Mat, MatHtoolKernelFn *, void *); +PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationSource(Mat, IS *); +PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationTarget(Mat, IS *); +PETSC_EXTERN PetscErrorCode MatHtoolUsePermutation(Mat, PetscBool); /*E MatHtoolCompressorType - Indicates the type of compressor used by a `MATHTOOL` @@ -2530,6 +2546,8 @@ PETSC_EXTERN PetscErrorCode MatDenseHIPPlaceArray(Mat, const PetscScalar *); PETSC_EXTERN PetscErrorCode MatDenseHIPReplaceArray(Mat, const PetscScalar *); PETSC_EXTERN PetscErrorCode MatDenseHIPResetArray(Mat); PETSC_EXTERN PetscErrorCode MatDenseHIPSetPreallocation(Mat, PetscScalar *); +PETSC_EXTERN PetscErrorCode MatCreateSeqSELLHIP(MPI_Comm, PetscInt, PetscInt, PetscInt, const PetscInt[], Mat *); +PETSC_EXTERN PetscErrorCode MatCreateSELLHIP(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, const PetscInt[], PetscInt, const PetscInt[], Mat *); #endif #if defined(PETSC_HAVE_VIENNACL) diff --git a/include/petscmath.h b/include/petscmath.h index 3ee8eb15afa..98544f646b3 100644 --- a/include/petscmath.h +++ b/include/petscmath.h @@ -708,7 +708,7 @@ M*/ Level: beginner -.seealso: `PetscMax()`, `PetscMin()`, `PetscAbsInt()`, `PetscSqr()` +.seealso: `PetscReal`, `PetscMax()`, `PetscMin()`, `PetscAbsInt()`, `PetscSqr()` M*/ #if defined(PETSC_USE_REAL_SINGLE) #define PetscAbsReal(a) fabsf(a) @@ -741,6 +741,33 @@ M*/ M*/ #define PetscSqr(a) ((a) * (a)) +/*MC + PetscRealConstant - a compile time macro that ensures a given constant real number is properly represented in the configured + precision of `PetscReal` be it half, single, double or 128-bit representation + + Synopsis: + #include + PetscReal PetscRealConstant(real_number) + + Not Collective + + Input Parameter: +. v1 - the real number, for example 1.5 + + Level: beginner + + Note: + For example, if PETSc is configured with `--with-precision=__float128` and one writes +.vb + PetscReal d = 1.5; +.ve + the result is 1.5 in double precision extended to 128 bit representation, meaning it is very far from the correct value. Hence, one should write +.vb + PetscReal d = PetscRealConstant(1.5); +.ve + +.seealso: `PetscReal` +M*/ #if defined(PETSC_USE_REAL_SINGLE) #define PetscRealConstant(constant) constant##F #elif defined(PETSC_USE_REAL_DOUBLE) @@ -754,10 +781,99 @@ M*/ /* Basic constants */ +/*MC + PETSC_PI - the value of $ \pi$ to the correct precision of `PetscReal`. + + Level: beginner + +.seealso: `PetscReal`, `PETSC_PHI`, `PETSC_SQRT2` +M*/ + +/*MC + PETSC_PHI - the value of $ \phi$, the Golden Ratio, to the correct precision of `PetscReal`. + + Level: beginner + +.seealso: `PetscReal`, `PETSC_PI`, `PETSC_SQRT2` +M*/ + +/*MC + PETSC_SQRT2 - the value of $ \sqrt{2} $ to the correct precision of `PetscReal`. + + Level: beginner + +.seealso: `PetscReal`, `PETSC_PI`, `PETSC_PHI` +M*/ + #define PETSC_PI PetscRealConstant(3.1415926535897932384626433832795029) #define PETSC_PHI PetscRealConstant(1.6180339887498948482045868343656381) #define PETSC_SQRT2 PetscRealConstant(1.4142135623730950488016887242096981) +/*MC + PETSC_MAX_REAL - the largest real value that can be stored in a `PetscReal` + + Level: beginner + +.seealso: `PETSC_MIN_REAL`, `PETSC_REAL_MIN`, `PETSC_MACHINE_EPSILON`, `PETSC_SQRT_MACHINE_EPSILON`, `PETSC_SMALL` +M*/ + +/*MC + PETSC_MIN_REAL - the smallest real value that can be stored in a `PetscReal`, generally this is - `PETSC_MAX_REAL` + + Level: beginner + +.seealso `PETSC_MAX_REAL`, `PETSC_REAL_MIN`, `PETSC_MACHINE_EPSILON`, `PETSC_SQRT_MACHINE_EPSILON`, `PETSC_SMALL` +M*/ + +/*MC + PETSC_REAL_MIN - the smallest positive normalized real value that can be stored in a `PetscReal`. + + Level: beginner + + Note: + See for a discussion of normalized and subnormal floating point numbers + + Developer Note: + The naming is confusing as there is both a `PETSC_REAL_MIN` and `PETSC_MIN_REAL` with different meanings. + +.seealso `PETSC_MAX_REAL`, `PETSC_MIN_REAL`, `PETSC_MACHINE_EPSILON`, `PETSC_SQRT_MACHINE_EPSILON`, `PETSC_SMALL` +M*/ + +/*MC + PETSC_MACHINE_EPSILON - the machine epsilon for the precision of `PetscReal` + + Level: beginner + + Note: + See + +.seealso `PETSC_MAX_REAL`, `PETSC_MIN_REAL`, `PETSC_REAL_MIN`, `PETSC_SQRT_MACHINE_EPSILON`, `PETSC_SMALL` +M*/ + +/*MC + PETSC_SQRT_MACHINE_EPSILON - the square root of the machine epsilon for the precision of `PetscReal` + + Level: beginner + + Note: + See `PETSC_MACHINE_EPSILON` + +.seealso `PETSC_MAX_REAL`, `PETSC_MIN_REAL`, `PETSC_REAL_MIN`, `PETSC_MACHINE_EPSILON`, `PETSC_SMALL` +M*/ + +/*MC + PETSC_SMALL - an arbitrary "small" number which depends on the precision of `PetscReal` used in some PETSc examples + and in `PetscApproximateLTE()` and `PetscApproximateGTE()` to determine if a computation was successful. + + Level: beginner + + Note: + See `PETSC_MACHINE_EPSILON` + +.seealso `PetscApproximateLTE()`, `PetscApproximateGTE()`, `PETSC_MAX_REAL`, `PETSC_MIN_REAL`, `PETSC_REAL_MIN`, `PETSC_MACHINE_EPSILON`, + `PETSC_SQRT_MACHINE_EPSILON` +M*/ + #if defined(PETSC_USE_REAL_SINGLE) #define PETSC_MAX_REAL 3.40282346638528860e+38F #define PETSC_MIN_REAL (-PETSC_MAX_REAL) diff --git a/include/petscmatlab.h b/include/petscmatlab.h index 4768a415350..2c4bc5e1f68 100644 --- a/include/petscmatlab.h +++ b/include/petscmatlab.h @@ -29,7 +29,7 @@ typedef struct _p_PetscMatlabEngine *PetscMatlabEngine; PETSC_EXTERN PetscErrorCode PetscMatlabEngineCreate(MPI_Comm, const char[], PetscMatlabEngine *); PETSC_EXTERN PetscErrorCode PetscMatlabEngineDestroy(PetscMatlabEngine *); PETSC_EXTERN PetscErrorCode PetscMatlabEngineEvaluate(PetscMatlabEngine, const char[], ...); -PETSC_EXTERN PetscErrorCode PetscMatlabEngineGetOutput(PetscMatlabEngine, char **); +PETSC_EXTERN PetscErrorCode PetscMatlabEngineGetOutput(PetscMatlabEngine, const char **); PETSC_EXTERN PetscErrorCode PetscMatlabEnginePrintOutput(PetscMatlabEngine, FILE *); PETSC_EXTERN PetscErrorCode PetscMatlabEnginePut(PetscMatlabEngine, PetscObject); PETSC_EXTERN PetscErrorCode PetscMatlabEngineGet(PetscMatlabEngine, PetscObject); diff --git a/include/petscpartitioner.h b/include/petscpartitioner.h index cab4f42a81c..90c097bfb1a 100644 --- a/include/petscpartitioner.h +++ b/include/petscpartitioner.h @@ -47,7 +47,7 @@ PETSC_EXTERN PetscErrorCode PetscPartitionerReset(PetscPartitioner); PETSC_EXTERN PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner); PETSC_EXTERN PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner, PetscObject, const char[]); PETSC_EXTERN PetscErrorCode PetscPartitionerView(PetscPartitioner, PetscViewer); -PETSC_EXTERN PetscErrorCode PetscPartitionerPartition(PetscPartitioner, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscSection, PetscSection, PetscSection, IS *); +PETSC_EXTERN PetscErrorCode PetscPartitionerPartition(PetscPartitioner, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscSection, PetscSection, PetscSection, PetscSection, IS *); PETSC_EXTERN PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner, PetscInt, const PetscInt[], const PetscInt[]); PETSC_EXTERN PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner, PetscBool); diff --git a/include/petscpc.h b/include/petscpc.h index d1489861df1..1a2b890c8b5 100644 --- a/include/petscpc.h +++ b/include/petscpc.h @@ -133,6 +133,7 @@ PETSC_EXTERN PetscErrorCode PCJacobiSetUseAbs(PC, PetscBool); PETSC_EXTERN PetscErrorCode PCJacobiGetUseAbs(PC, PetscBool *); PETSC_EXTERN PetscErrorCode PCJacobiSetFixDiagonal(PC, PetscBool); PETSC_EXTERN PetscErrorCode PCJacobiGetFixDiagonal(PC, PetscBool *); +PETSC_EXTERN PetscErrorCode PCJacobiGetDiagonal(PC pc, Vec, Vec); PETSC_EXTERN PetscErrorCode PCJacobiSetRowl1Scale(PC, PetscReal); PETSC_EXTERN PetscErrorCode PCJacobiGetRowl1Scale(PC, PetscReal *); PETSC_EXTERN PetscErrorCode PCSORSetSymmetric(PC, MatSORType); @@ -248,6 +249,7 @@ PETSC_EXTERN PetscErrorCode PCCompositeAddPC(PC, PC); PETSC_EXTERN PetscErrorCode PCCompositeGetNumberPC(PC, PetscInt *); PETSC_EXTERN PetscErrorCode PCCompositeGetPC(PC, PetscInt, PC *); PETSC_EXTERN PetscErrorCode PCCompositeSpecialSetAlpha(PC, PetscScalar); +PETSC_EXTERN PetscErrorCode PCCompositeSpecialSetAlphaMat(PC, Mat); PETSC_EXTERN PetscErrorCode PCRedundantSetNumber(PC, PetscInt); PETSC_EXTERN PetscErrorCode PCRedundantSetScatter(PC, VecScatter, VecScatter); @@ -499,6 +501,7 @@ PETSC_EXTERN PetscErrorCode PCHPDDMGetSTShareSubKSP(PC, PetscBool *); PETSC_EXTERN PetscErrorCode PCHPDDMSetDeflationMat(PC, IS, Mat); PETSC_EXTERN PetscErrorCode PCHPDDMFinalizePackage(void); PETSC_EXTERN PetscErrorCode PCHPDDMInitializePackage(void); +PETSC_EXTERN PetscErrorCode PCHPDDMGetComplexities(PC, PetscReal *, PetscReal *); PETSC_EXTERN PetscErrorCode PCAmgXGetResources(PC, void *); diff --git a/include/petscpctypes.h b/include/petscpctypes.h index 8e5d5d32e8f..7de71c326f6 100644 --- a/include/petscpctypes.h +++ b/include/petscpctypes.h @@ -102,11 +102,11 @@ typedef enum { #define PC_SIDE_MAX (PC_SYMMETRIC + 1) /*E - PCRichardsonConvergedReason - reason a `PCRICHARDSON` `PCApplyRichardson()` method terminated + PCRichardsonConvergedReason - reason a `PCApplyRichardson()` method terminated Level: advanced -.seealso: [](sec_pc), `PCRICHARDSON`, `PC`, `PCApplyRichardson()` +.seealso: [](sec_pc), `KSPRICHARDSON`, `PC`, `PCApplyRichardson()` E*/ typedef enum { PCRICHARDSON_CONVERGED_RTOL = 2, @@ -229,11 +229,10 @@ typedef enum { Values: + `PC_FIELDSPLIT_SCHUR_PRE_SELF` - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix. The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM` -. `PC_FIELDSPLIT_SCHUR_PRE_SELFP` - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01. - This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be +. `PC_FIELDSPLIT_SCHUR_PRE_SELFP` - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $Sp = A11 - A10 diag(A00)^{-1} A01$. + This is only a good preconditioner when $diag(A00)$ is a good preconditioner for $A00$. Optionally, $A00$ can be lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump` -. `PC_FIELDSPLIT_SCHUR_PRE_A11` - the preconditioner for the Schur complement is generated from the block diagonal part of the matrix used to define the preconditioner, - associated with the Schur complement (i.e. A11), not the Schur complement matrix +. `PC_FIELDSPLIT_SCHUR_PRE_A11` - the preconditioner for the Schur complement is generated from $A11$, not the Schur complement matrix . `PC_FIELDSPLIT_SCHUR_PRE_USER` - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument to this function). - `PC_FIELDSPLIT_SCHUR_PRE_FULL` - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation diff --git a/include/petscsnes.h b/include/petscsnes.h index 43df77edd04..52f71b8222c 100644 --- a/include/petscsnes.h +++ b/include/petscsnes.h @@ -79,8 +79,6 @@ PETSC_EXTERN PetscErrorCode SNESSetWorkVecs(SNES, PetscInt); PETSC_EXTERN PetscErrorCode SNESAddOptionsChecker(PetscErrorCode (*)(SNES)); -PETSC_EXTERN PetscErrorCode SNESSetUpdate(SNES, PetscErrorCode (*)(SNES, PetscInt)); - PETSC_EXTERN PetscErrorCode SNESRegister(const char[], PetscErrorCode (*)(SNES)); PETSC_EXTERN PetscErrorCode SNESGetKSP(SNES, KSP *); @@ -504,6 +502,19 @@ PETSC_EXTERN_TYPEDEF typedef PetscErrorCode(SNESJacobianFn)(SNES snes, Vec u, Ma S*/ PETSC_EXTERN_TYPEDEF typedef PetscErrorCode(SNESNGSFn)(SNES snes, Vec u, Vec b, void *ctx); +/*S + SNESUpdateFn - A prototype of a `SNES` update function that would be passed to `SNESSetUpdate()` + + Calling Sequence: ++ snes - `SNES` context +- step - the current iteration index + + Level: advanced + +.seealso: [](ch_snes), `SNES`, `SNESSetUpdate()` +S*/ +PETSC_EXTERN_TYPEDEF typedef PetscErrorCode(SNESUpdateFn)(SNES snes, PetscInt step); + /* --------- Solving systems of nonlinear equations --------------- */ PETSC_EXTERN PetscErrorCode SNESSetFunction(SNES, Vec, SNESFunctionFn *, void *); PETSC_EXTERN PetscErrorCode SNESGetFunction(SNES, Vec *, SNESFunctionFn **, void **); @@ -528,6 +539,8 @@ PETSC_EXTERN PetscErrorCode SNESSetObjective(SNES, SNESObjectiveFn *, void *); PETSC_EXTERN PetscErrorCode SNESGetObjective(SNES, SNESObjectiveFn **, void **); PETSC_EXTERN PetscErrorCode SNESComputeObjective(SNES, Vec, PetscReal *); +PETSC_EXTERN PetscErrorCode SNESSetUpdate(SNES, SNESUpdateFn *); + /*E SNESNormSchedule - Frequency with which the norm is computed during a nonliner solve @@ -1167,6 +1180,20 @@ PETSC_EXTERN PetscErrorCode SNESNASMGetSNES(SNES, PetscInt, SNES *); PETSC_EXTERN PetscErrorCode SNESNASMGetNumber(SNES, PetscInt *); PETSC_EXTERN PetscErrorCode SNESNASMSetWeight(SNES, Vec); +/*E + SNESCompositeType - Determines how two or more preconditioners are composed with the `SNESType` of `SNESCOMPOSITE` + + Values: ++ `SNES_COMPOSITE_ADDITIVE` - results from application of all preconditioners are added together +. `SNES_COMPOSITE_MULTIPLICATIVE` - preconditioners are applied sequentially to the residual freshly + computed after the previous preconditioner application +- `SNES_COMPOSITE_ADDITIVEOPTIMAL` - uses a linear combination of the solutions obtained with each preconditioner that approximately minimize the function + value at the new iteration. + + Level: beginner + +.seealso: [](sec_pc), `PCCOMPOSITE`, `PCFIELDSPLIT`, `PC`, `PCCompositeSetType()`, `PCCompositeType` +E*/ typedef enum { SNES_COMPOSITE_ADDITIVE, SNES_COMPOSITE_MULTIPLICATIVE, diff --git a/include/petscstring.h b/include/petscstring.h index 56925c0f245..53b1a706afe 100644 --- a/include/petscstring.h +++ b/include/petscstring.h @@ -596,7 +596,7 @@ static inline PetscErrorCode PetscStrbeginswith(const char a[], const char b[], beginning at location `a`. Copying between regions that overlap will take place correctly. Use `PetscMemcpy()` if the locations do not overlap - Not Collective + Not Collective, No Fortran Support Input Parameters: + b - pointer to initial memory space @@ -650,7 +650,7 @@ static inline PetscErrorCode PetscMemmove(void *a, const void *b, size_t n) beginning at location `a`. The two memory regions CANNOT overlap, use `PetscMemmove()` in that case. - Not Collective + Not Collective, No Fortran Support Input Parameters: + b - pointer to initial memory space @@ -712,7 +712,7 @@ static inline PetscErrorCode PetscMemcpy(void *a, const void *b, size_t n) /*@C PetscMemzero - Zeros the specified memory. - Not Collective + Not Collective, No Fortran Support Input Parameters: + a - pointer to beginning memory location diff --git a/include/petscsys.h b/include/petscsys.h index 7bfc17aecfa..759d119dd9a 100644 --- a/include/petscsys.h +++ b/include/petscsys.h @@ -15,6 +15,7 @@ directory as the other PETSc include files. */ #include +#include #include #include #include @@ -75,28 +76,28 @@ #ifndef MPIUNI_H #error "PETSc was configured with --with-mpi=0 but now appears to be compiling using a different mpi.h" #endif -#elif defined(PETSC_HAVE_I_MPI_NUMVERSION) +#elif defined(PETSC_HAVE_I_MPI) #if !defined(I_MPI_NUMVERSION) #error "PETSc was configured with I_MPI but now appears to be compiling using a non-I_MPI mpi.h" - #elif I_MPI_NUMVERSION != PETSC_HAVE_I_MPI_NUMVERSION + #elif I_MPI_NUMVERSION != PETSC_PKG_I_MPI_NUMVERSION #error "PETSc was configured with one I_MPI mpi.h version but now appears to be compiling using a different I_MPI mpi.h version" #endif -#elif defined(PETSC_HAVE_MVAPICH2_NUMVERSION) +#elif defined(PETSC_HAVE_MVAPICH2) #if !defined(MVAPICH2_NUMVERSION) #error "PETSc was configured with MVAPICH2 but now appears to be compiling using a non-MVAPICH2 mpi.h" - #elif MVAPICH2_NUMVERSION != PETSC_HAVE_MVAPICH2_NUMVERSION + #elif MVAPICH2_NUMVERSION != PETSC_PKG_MVAPICH2_NUMVERSION #error "PETSc was configured with one MVAPICH2 mpi.h version but now appears to be compiling using a different MVAPICH2 mpi.h version" #endif -#elif defined(PETSC_HAVE_MPICH_NUMVERSION) +#elif defined(PETSC_HAVE_MPICH) #if !defined(MPICH_NUMVERSION) || defined(MVAPICH2_NUMVERSION) || defined(I_MPI_NUMVERSION) #error "PETSc was configured with MPICH but now appears to be compiling using a non-MPICH mpi.h" - #elif (MPICH_NUMVERSION / 100000000 != PETSC_HAVE_MPICH_NUMVERSION / 100000000) || (MPICH_NUMVERSION / 100000 < PETSC_HAVE_MPICH_NUMVERSION / 100000) || (MPICH_NUMVERSION / 100000 == PETSC_HAVE_MPICH_NUMVERSION / 100000 && MPICH_NUMVERSION % 100000 / 1000 < PETSC_HAVE_MPICH_NUMVERSION % 100000 / 1000) + #elif !PETSC_PKG_MPICH_VERSION_EQ(MPICH_NUMVERSION / 10000000, MPICH_NUMVERSION / 100000 % 100, MPICH_NUMVERSION / 1000 % 100) #error "PETSc was configured with one MPICH mpi.h version but now appears to be compiling using a different MPICH mpi.h version" #endif -#elif defined(PETSC_HAVE_OMPI_MAJOR_VERSION) +#elif defined(PETSC_HAVE_OPENMPI) #if !defined(OMPI_MAJOR_VERSION) #error "PETSc was configured with Open MPI but now appears to be compiling using a non-Open MPI mpi.h" - #elif (OMPI_MAJOR_VERSION != PETSC_HAVE_OMPI_MAJOR_VERSION) || (OMPI_MINOR_VERSION < PETSC_HAVE_OMPI_MINOR_VERSION) || (OMPI_MINOR_VERSION == PETSC_HAVE_OMPI_MINOR_VERSION && OMPI_RELEASE_VERSION < PETSC_HAVE_OMPI_RELEASE_VERSION) + #elif !PETSC_PKG_OPENMPI_VERSION_EQ(OMPI_MAJOR_VERSION, OMPI_MINOR_VERSION, OMPI_RELEASE_VERSION) #error "PETSc was configured with one Open MPI mpi.h version but now appears to be compiling using a different Open MPI mpi.h version" #endif #elif defined(PETSC_HAVE_MSMPI_VERSION) @@ -278,16 +279,32 @@ M*/ /*MC PETSC_MPI_THREAD_REQUIRED - the required threading support used if PETSc initializes MPI with `MPI_Init_thread()`. + No Fortran Support + Level: beginner Note: - By default `PETSC_MPI_THREAD_REQUIRED` equals `MPI_THREAD_FUNNELED` when the MPI implementation provides MPI_Init_thread(), otherwise it equals `MPI_THREAD_SINGLE` + By default `PETSC_MPI_THREAD_REQUIRED` equals `MPI_THREAD_FUNNELED` when the MPI implementation provides `MPI_Init_thread()`, otherwise it equals `MPI_THREAD_SINGLE` .seealso: `PetscInitialize()` M*/ PETSC_EXTERN PetscMPIInt PETSC_MPI_THREAD_REQUIRED; +/*MC + PetscBeganMPI - indicates if PETSc initialized MPI during `PetscInitialize()` or if MPI was already initialized. + + Synopsis: + #include + PetscBool PetscBeganMPI; + + No Fortran Support + + Level: developer + +.seealso: `PetscInitialize()`, `PetscInitializeCalled()` +M*/ PETSC_EXTERN PetscBool PetscBeganMPI; + PETSC_EXTERN PetscBool PetscErrorHandlingInitialized; PETSC_EXTERN PetscBool PetscInitializeCalled; PETSC_EXTERN PetscBool PetscFinalizeCalled; @@ -1286,7 +1303,7 @@ PETSC_EXTERN PetscErrorCode PetscMallocGetStack(void *, PetscStack **); PETSC_EXTERN PetscErrorCode PetscObjectsDump(FILE *, PetscBool); PETSC_EXTERN PetscErrorCode PetscObjectsView(PetscViewer); -PETSC_EXTERN PetscErrorCode PetscObjectsGetObject(const char *, PetscObject *, char **); +PETSC_EXTERN PetscErrorCode PetscObjectsGetObject(const char *, PetscObject *, const char **); PETSC_EXTERN PetscErrorCode PetscObjectListDestroy(PetscObjectList *); PETSC_EXTERN PetscErrorCode PetscObjectListFind(PetscObjectList, const char[], PetscObject *); PETSC_EXTERN PetscErrorCode PetscObjectListReverseFind(PetscObjectList, PetscObject, char **, PetscBool *); @@ -1415,11 +1432,9 @@ PETSC_EXTERN PetscErrorCode PetscHelpPrintfDefault(MPI_Comm, const char[], ...) PETSC_EXTERN PetscErrorCode PetscFormatConvertGetSize(const char *, size_t *); PETSC_EXTERN PetscErrorCode PetscFormatConvert(const char *, char *); -#if defined(PETSC_HAVE_POPEN) PETSC_EXTERN PetscErrorCode PetscPOpen(MPI_Comm, const char[], const char[], const char[], FILE **); PETSC_EXTERN PetscErrorCode PetscPClose(MPI_Comm, FILE *); PETSC_EXTERN PetscErrorCode PetscPOpenSetMachine(const char[]); -#endif PETSC_EXTERN PetscErrorCode PetscSynchronizedPrintf(MPI_Comm, const char[], ...) PETSC_ATTRIBUTE_FORMAT(2, 3); PETSC_EXTERN PetscErrorCode PetscSynchronizedFPrintf(MPI_Comm, FILE *, const char[], ...) PETSC_ATTRIBUTE_FORMAT(3, 4); @@ -1608,10 +1623,9 @@ PETSC_EXTERN PetscErrorCode MPIU_File_read_at_all(MPI_File, MPI_Offset, void *, static inline PetscErrorCode PetscIntCast(PetscInt64 a, PetscInt *b) { PetscFunctionBegin; - *b = 0; // if using 64-bit indices already then this comparison is tautologically true PetscCheck(a < PETSC_MAX_INT, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt64_FMT " is too big for PetscInt, you may need to ./configure using --with-64-bit-indices", a); - *b = (PetscInt)a; + if (b) *b = (PetscInt)a; PetscFunctionReturn(PETSC_SUCCESS); } @@ -2096,6 +2110,19 @@ PETSC_EXTERN PetscErrorCode PetscMkdir(const char[]); PETSC_EXTERN PetscErrorCode PetscMkdtemp(char[]); PETSC_EXTERN PetscErrorCode PetscRMTree(const char[]); +/*MC + PetscBinaryBigEndian - indicates if values in memory are stored with big endian format + + Synopsis: + #include + PetscBool PetscBinaryBigEndian(void); + + No Fortran Support + + Level: developer + +.seealso: `PetscInitialize()`, `PetscFinalize()`, `PetscInitializeCalled` +M*/ static inline PetscBool PetscBinaryBigEndian(void) { long _petsc_v = 1; @@ -2212,9 +2239,32 @@ PETSC_EXTERN PetscErrorCode PetscSegBufferExtractInPlace(PetscSegBuffer, void *) PETSC_EXTERN PetscErrorCode PetscSegBufferGetSize(PetscSegBuffer, size_t *); PETSC_EXTERN PetscErrorCode PetscSegBufferUnuse(PetscSegBuffer, size_t); -/* Type-safe wrapper to encourage use of PETSC_RESTRICT. Does not use PetscFunctionBegin because the error handling - * prevents the compiler from completely erasing the stub. This is called in inner loops so it has to be as fast as - * possible. */ +/*MC + PetscSegBufferGetInts - access an array of `PetscInt` from a `PetscSegBuffer` + + Synopsis: + #include + PetscErrorCode PetscSegBufferGetInts(PetscSegBuffer seg, size_t count, PetscInt *PETSC_RESTRICT *slot); + + No Fortran Support + + Input Parameters: ++ seg - `PetscSegBuffer` buffer +- count - number of entries needed + + Output Parameter: +. buf - address of new buffer for contiguous data + + Level: intermediate + + Developer Note: + Type-safe wrapper to encourage use of PETSC_RESTRICT. Does not use PetscFunctionBegin because the error handling + prevents the compiler from completely erasing the stub. This is called in inner loops so it has to be as fast as + possible. + +.seealso: `PetscSegBuffer`, `PetscSegBufferGet()`, `PetscInitialize()`, `PetscFinalize()`, `PetscInitializeCalled` +M*/ +/* */ static inline PetscErrorCode PetscSegBufferGetInts(PetscSegBuffer seg, size_t count, PetscInt *PETSC_RESTRICT *slot) { return PetscSegBufferGet(seg, count, (void **)slot); @@ -2231,7 +2281,7 @@ PETSC_EXTERN PetscErrorCode (*PetscVFPrintf)(FILE *, const char[] PETSC_EXTERN PetscSegBuffer PetscCitationsList; -/*@C +/*@ PetscCitationsRegister - Register a bibtex item to obtain credit for an implemented algorithm used in the code. Not Collective; No Fortran Support diff --git a/include/petsctao.h b/include/petsctao.h index 083b9b0d024..1c5e33bf356 100644 --- a/include/petsctao.h +++ b/include/petsctao.h @@ -508,4 +508,6 @@ PETSC_EXTERN PetscErrorCode TaoEstimateActiveBounds(Vec, Vec, Vec, Vec, Vec, Vec PETSC_EXTERN PetscErrorCode TaoBoundStep(Vec, Vec, Vec, IS, IS, IS, PetscReal, Vec); PETSC_EXTERN PetscErrorCode TaoBoundSolution(Vec, Vec, Vec, PetscReal, PetscInt *, Vec); +PETSC_EXTERN PetscErrorCode MatCreateSubMatrixFree(Mat, IS, IS, Mat *); + #include diff --git a/include/petsctime.h b/include/petsctime.h index d137f9d5c9d..6f8b9972dae 100644 --- a/include/petsctime.h +++ b/include/petsctime.h @@ -12,13 +12,9 @@ PETSC_EXTERN PetscErrorCode PetscGetCPUTime(PetscLogDouble *); /* Global counters */ PETSC_EXTERN PetscLogDouble petsc_BaseTime; -/*MC +/*@ PetscTime - Returns the current time from some base time in the past in seconds. - Synopsis: - #include - PetscErrorCode PetscTime(PetscLogDouble *v) - Not Collective Output Parameter: @@ -41,15 +37,16 @@ PETSC_EXTERN PetscLogDouble petsc_BaseTime; stages and events in application codes. .seealso: `PetscTimeSubtract()`, `PetscTimeAdd()`, `PetscLogStageRegister()`, `PetscLogEventRegister()`, `PetscLogEventBegin()`, `PetscLogEventEnd()` -M*/ +@*/ +static inline PetscErrorCode PetscTime(PetscLogDouble *v) +{ + *v = MPI_Wtime(); + return PETSC_SUCCESS; +} -/*MC +/*@ PetscTimeSubtract - Subtracts the current time (in seconds) from the value `v`. - Synopsis: - #include - PetscErrorCode PetscTimeSubtract(PetscLogDouble *v) - Not Collective Input Parameter: @@ -67,15 +64,16 @@ M*/ stages and events in application codes. .seealso: `PetscTime()`, `PetscTimeAdd()`, `PetscLogStageRegister()`, `PetscLogEventRegister()`, `PetscLogEventBegin()`, `PetscLogEventEnd()` -M*/ +@*/ +static inline PetscErrorCode PetscTimeSubtract(PetscLogDouble *v) +{ + *v -= MPI_Wtime(); + return PETSC_SUCCESS; +} -/*MC +/*@ PetscTimeAdd - Adds the current time (in seconds) to the value `v`. - Synopsis: - #include - PetscErrorCode PetscTimeAdd(PetscLogDouble *v) - Not Collective Input Parameter: @@ -91,20 +89,7 @@ M*/ The options database command `-log_view` activates PETSc library timing. .seealso: `PetscTime()`, `PetscTimeSubtract()`, `PetscLogStageRegister()`, `PetscLogEventRegister()`, `PetscLogEventBegin()`, `PetscLogEventEnd()` -M*/ - -static inline PetscErrorCode PetscTime(PetscLogDouble *v) -{ - *v = MPI_Wtime(); - return PETSC_SUCCESS; -} - -static inline PetscErrorCode PetscTimeSubtract(PetscLogDouble *v) -{ - *v -= MPI_Wtime(); - return PETSC_SUCCESS; -} - +@*/ static inline PetscErrorCode PetscTimeAdd(PetscLogDouble *v) { *v += MPI_Wtime(); diff --git a/include/petscts.h b/include/petscts.h index 27426b5c2f3..83eeabdd617 100644 --- a/include/petscts.h +++ b/include/petscts.h @@ -1367,7 +1367,11 @@ typedef const char *TSRosWType; #define TSROSW2P "2p" #define TSROSWRA3PW "ra3pw" #define TSROSWRA34PW2 "ra34pw2" +#define TSROSWR34PRW "r34prw" +#define TSROSWR3PRL2 "r3prl2" #define TSROSWRODAS3 "rodas3" +#define TSROSWRODASPR "rodaspr" +#define TSROSWRODASPR2 "rodaspr2" #define TSROSWSANDU3 "sandu3" #define TSROSWASSP3P3S1C "assp3p3s1c" #define TSROSWLASSP3P4S2C "lassp3p4s2c" diff --git a/include/petscvec.h b/include/petscvec.h index ff8bf23d83d..366dc95ce31 100644 --- a/include/petscvec.h +++ b/include/petscvec.h @@ -145,6 +145,7 @@ PETSC_EXTERN PetscErrorCode VecZeroEntries(Vec); PETSC_EXTERN PetscErrorCode VecSetOptionsPrefix(Vec, const char[]); PETSC_EXTERN PetscErrorCode VecAppendOptionsPrefix(Vec, const char[]); PETSC_EXTERN PetscErrorCode VecGetOptionsPrefix(Vec, const char *[]); +PETSC_EXTERN PetscErrorCode VecGetState(Vec, PetscObjectState *); PETSC_EXTERN PetscErrorCode VecSetSizes(Vec, PetscInt, PetscInt); @@ -370,13 +371,9 @@ PETSC_EXTERN PetscErrorCode VecSetPreallocationCOO(Vec, PetscCount, const PetscI PETSC_EXTERN PetscErrorCode VecSetPreallocationCOOLocal(Vec, PetscCount, PetscInt[]); PETSC_EXTERN PetscErrorCode VecSetValuesCOO(Vec, const PetscScalar[], InsertMode); -/*MC +/*@C VecSetValue - Set a single entry into a vector. - Synopsis: - #include - PetscErrorCode VecSetValue(Vec v,PetscInt row,PetscScalar value, InsertMode mode); - Not Collective Input Parameters: @@ -397,7 +394,7 @@ PETSC_EXTERN PetscErrorCode VecSetValuesCOO(Vec, const PetscScalar[], InsertMode `VecSetValue()` uses 0-based indices in Fortran as well as in C. .seealso: [](ch_vectors), `VecSetValues()`, `VecAssemblyBegin()`, `VecAssemblyEnd()`, `VecSetValuesBlockedLocal()`, `VecSetValueLocal()` -M*/ +@*/ static inline PetscErrorCode VecSetValue(Vec v, PetscInt i, PetscScalar va, InsertMode mode) { return VecSetValues(v, 1, &i, &va, mode); @@ -480,13 +477,9 @@ PETSC_EXTERN PetscErrorCode VecViennaCLRestoreCLMemWrite(Vec); PETSC_EXTERN PetscErrorCode VecViennaCLGetCLMem(Vec, PETSC_UINTPTR_T *); PETSC_EXTERN PetscErrorCode VecViennaCLRestoreCLMem(Vec); -/*MC +/*@C VecSetValueLocal - Set a single entry into a vector using the local numbering, see `VecSetValuesLocal()` - Synopsis: - #include - PetscErrorCode VecSetValueLocal(Vec v,PetscInt row,PetscScalar value, InsertMode mode); - Not Collective Input Parameters: @@ -507,7 +500,7 @@ PETSC_EXTERN PetscErrorCode VecViennaCLRestoreCLMem(Vec); `VecSetValues()` uses 0-based indices in Fortran as well as in C. .seealso: [](ch_vectors), `VecSetValuesLocal()`, `VecSetValues()`, `VecAssemblyBegin()`, `VecAssemblyEnd()`, `VecSetValuesBlockedLocal()`, `VecSetValue()` -M*/ +@*/ static inline PetscErrorCode VecSetValueLocal(Vec v, PetscInt i, PetscScalar va, InsertMode mode) { return VecSetValuesLocal(v, 1, &i, &va, mode); @@ -638,12 +631,12 @@ static inline PetscErrorCode VecRestoreArrayPair(Vec x, Vec y, PetscScalar **xv, PetscFunctionReturn(PETSC_SUCCESS); } -#if PetscDefined(USE_DEBUG) -PETSC_EXTERN PetscErrorCode VecLockReadPush(Vec); -PETSC_EXTERN PetscErrorCode VecLockReadPop(Vec); -PETSC_EXTERN PetscErrorCode VecLockWriteSet(Vec, PetscBool); -PETSC_EXTERN PetscErrorCode VecLockGet(Vec, PetscInt *); -PETSC_EXTERN PetscErrorCode VecLockGetLocation(Vec, const char *[], const char *[], int *); +PETSC_EXTERN PetscErrorCode VecLockReadPush(Vec); +PETSC_EXTERN PetscErrorCode VecLockReadPop(Vec); +PETSC_EXTERN PetscErrorCode VecLockWriteSet(Vec, PetscBool); +PETSC_EXTERN PetscErrorCode VecLockGet(Vec, PetscInt *); +PETSC_EXTERN PetscErrorCode VecLockGetLocation(Vec, const char *[], const char *[], int *); + static inline PetscErrorCode VecSetErrorIfLocked(Vec x, PetscInt arg) { PetscInt state; @@ -660,21 +653,19 @@ static inline PetscErrorCode VecSetErrorIfLocked(Vec x, PetscInt arg) } PetscFunctionReturn(PETSC_SUCCESS); } + /* The three are deprecated */ -PETSC_EXTERN PETSC_DEPRECATED_FUNCTION(3, 11, 0, "VecLockReadPush()", ) PetscErrorCode VecLockPush(Vec); -PETSC_EXTERN PETSC_DEPRECATED_FUNCTION(3, 11, 0, "VecLockReadPop()", ) PetscErrorCode VecLockPop(Vec); - #define VecLocked(x, arg) VecSetErrorIfLocked(x, arg) PETSC_DEPRECATED_MACRO(3, 11, 0, "VecSetErrorIfLocked()", ) -#else - #define VecLockReadPush(x) PETSC_SUCCESS - #define VecLockReadPop(x) PETSC_SUCCESS - #define VecLockGet(x, s) (*(s) = 0, PETSC_SUCCESS) - #define VecSetErrorIfLocked(x, arg) PETSC_SUCCESS - #define VecLockWriteSet(x, flg) PETSC_SUCCESS - /* The three are deprecated */ - #define VecLockPush(x) PETSC_SUCCESS - #define VecLockPop(x) PETSC_SUCCESS - #define VecLocked(x, arg) PETSC_SUCCESS -#endif +PETSC_DEPRECATED_FUNCTION(3, 11, 0, "VecLockReadPush()", ) static inline PetscErrorCode VecLockPush(Vec v) +{ + return VecLockReadPush(v); +} + +PETSC_DEPRECATED_FUNCTION(3, 11, 0, "VecLockReadPop()", ) static inline PetscErrorCode VecLockPop(Vec v) +{ + return VecLockReadPop(v); +} + +#define VecLocked(x, arg) VecSetErrorIfLocked(x, arg) PETSC_DEPRECATED_MACRO(3, 11, 0, "VecSetErrorIfLocked()", ) /*E VecOperation - Enumeration of overide-able methods in the `Vec` implementation function-table. diff --git a/include/petscvec_kokkos.hpp b/include/petscvec_kokkos.hpp index 624366b43eb..4309871a144 100644 --- a/include/petscvec_kokkos.hpp +++ b/include/petscvec_kokkos.hpp @@ -25,7 +25,7 @@ PetscErrorCode VecGetKokkosView (Vec v,Kokkos::View* kv); PetscErrorCode VecGetKokkosView (Vec v,Kokkos::View* kv); - Logically Collective + Logically Collective, No Fortran Support Input Parameter: . v - the vector in type of `VECKOKKOS` @@ -62,7 +62,7 @@ PetscErrorCode VecGetKokkosView(Vec, Kokkos::View *) PetscErrorCode VecRestoreKokkosView (Vec v,Kokkos::View* kv); PetscErrorCode VecRestoreKokkosView (Vec v,Kokkos::View* kv); - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + v - the vector in type of `VECKOKKOS` @@ -92,7 +92,7 @@ PetscErrorCode VecRestoreKokkosView(Vec, Kokkos::View PetscErrorCode VecGetKokkosViewWrite (Vec v,Kokkos::View* kv); - Logically Collective + Logically Collective, No Fortran Support Input Parameter: . v - the vector in type of `VECKOKKOS` @@ -124,7 +124,7 @@ PetscErrorCode VecGetKokkosViewWrite(Vec, Kokkos::View PetscErrorCode VecRestoreKokkosViewWrite (Vec v,Kokkos::View* kv); - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + v - the vector in type of `VECKOKKOS` diff --git a/include/petscversion.h b/include/petscversion.h index f04e53c8a80..b4b229c009b 100644 --- a/include/petscversion.h +++ b/include/petscversion.h @@ -4,9 +4,9 @@ #define PETSC_VERSION_RELEASE 0 #define PETSC_VERSION_MAJOR 3 -#define PETSC_VERSION_MINOR 20 -#define PETSC_VERSION_SUBMINOR 5 -#define PETSC_RELEASE_DATE "Sep 28, 2023" +#define PETSC_VERSION_MINOR 21 +#define PETSC_VERSION_SUBMINOR 2 +#define PETSC_RELEASE_DATE "Mar 29, 2024" #define PETSC_VERSION_DATE "unknown" #if !defined(PETSC_VERSION_GIT) diff --git a/include/petscviewer.h b/include/petscviewer.h index 16ac6791fd0..5383426bc68 100644 --- a/include/petscviewer.h +++ b/include/petscviewer.h @@ -73,7 +73,6 @@ PETSC_EXTERN PetscErrorCode PetscViewerDrawGetDrawLG(PetscViewer, PetscInt, Pets PETSC_EXTERN PetscErrorCode PetscViewerDrawGetDrawAxis(PetscViewer, PetscInt, PetscDrawAxis *); PETSC_EXTERN PetscErrorCode PetscViewerMathematicaOpen(MPI_Comm, int, const char[], const char[], PetscViewer *); -PETSC_EXTERN PetscErrorCode PetscViewerSiloOpen(MPI_Comm, const char[], PetscViewer *); PETSC_EXTERN PetscErrorCode PetscViewerMatlabOpen(MPI_Comm, const char[], PetscFileMode, PetscViewer *); /*E diff --git a/lib/petsc/bin/maint/builddist b/lib/petsc/bin/maint/builddist index 503df7acedd..9332afc0dac 100755 --- a/lib/petsc/bin/maint/builddist +++ b/lib/petsc/bin/maint/builddist @@ -21,6 +21,8 @@ #echo "(6) tag the new release with git and make a new clone for the release" #echo "(7) got the users manual CLEARED by ANL publications for release" +set -e + # If version specified on the commandline, set the version echo "Starting date: `date +'%a, %d %b %Y %H:%M:%S %z'`" @@ -110,9 +112,9 @@ python3 -m venv $VENV source $VENV/bin/activate cd $PETSC_DIR/doc python3 -m pip install -r requirements.txt -PETSCBUIDTARBALL=1 make html BUILDDIR="../docs" SPHINXOPTS="-T -E -j 6" +PETSCBUIDTARBALL=1 make html BUILDDIR="../docs" SPHINXOPTS="-T -E -j 1" mv ../docs/html/* ../docs/html/.[!.]* ../docs -make latexpdf SPHINXOPTS="-j 6" && cp _build/latex/manual.pdf ../docs/manual/ +make latexpdf SPHINXOPTS="-j 1" && cp _build/latex/manual.pdf ../docs/manual/ # Build fortranstubs cd $PETSC_DIR/ diff --git a/lib/petsc/bin/maint/generatefortranstubs.py b/lib/petsc/bin/maint/generatefortranstubs.py index a610b9e8239..de12d1dfcdb 100644 --- a/lib/petsc/bin/maint/generatefortranstubs.py +++ b/lib/petsc/bin/maint/generatefortranstubs.py @@ -3,9 +3,12 @@ # Generates fortran stubs for PETSc using the Sowing bfort program # # This tool looks for the values MANSEC and [BFORT]SUBMANSEC (where BFORTSUBMANSEC has priority over SUBMANSEC) -# defined in the makefile (or include file when there is no makefile) +# defined in the makefile # -# The F90 generated interface fils are stored in src/MANSEC/f90-mod/ftn-auto-interfaces/petsc[BFORT]SUBMANSEC.h90 +# The F90 generated interface files are stored in $PETSC_ARCH/src/MANSEC/f90-mod/ftn-auto-interfaces/petsc[BFORT]SUBMANSEC.h90 +# The Fortran stub files are stored in $PETSC_ARCH/directory/ftn-auto where directory is the directory of the original source +# +# Stubs/interfaces generated from include can only involve sys files # # These are then included by the petsc[[BFORT]SUB]MANSECmod.F90 files to create the Fortran module files # @@ -43,9 +46,9 @@ def findLineCol(filename, string): data = re.subn('\00','',data)[0] data = re.subn('\nvoid ','\nPETSC_EXTERN void ',data)[0] - data = re.subn('\nPetscErrorCode ','\nPETSC_EXTERN void ',data)[0] + data = re.subn('\n[ ]*PetscErrorCode ','\nPETSC_EXTERN void ',data)[0] data = re.subn(r'Petsc([ToRm]*)Pointer\(int\)','Petsc\\1Pointer(void*)',data)[0] - data = re.subn(r'PetscToPointer\(a\) \(a\)','PetscToPointer(a) (*(PetscFortranAddr *)(a))',data)[0] + data = re.subn(r'PetscToPointer\(a\) \(a\)','PetscToPointer(a) (a ? *(PetscFortranAddr *)(a) : 0)',data)[0] data = re.subn(r'PetscFromPointer\(a\) \(int\)\(a\)','PetscFromPointer(a) (PetscFortranAddr)(a)',data)[0] data = re.subn(r'PetscToPointer\( \*\(int\*\)','PetscToPointer(',data)[0] data = re.subn('MPI_Comm comm','MPI_Comm *comm',data)[0] @@ -102,7 +105,7 @@ def FixDir(petscdir,petscarch,parentdir,dir,verbose): for filename in [f for f in os.listdir(parentdir) if re.match(r'f90module[0-9]+.f90', f)]: os.remove(os.path.join(parentdir, filename)) return - + mfile=os.path.abspath(os.path.join(parentdir,'makefile')) try: fd=open(mfile,'r') @@ -130,6 +133,10 @@ def FixDir(petscdir,petscarch,parentdir,dir,verbose): if line.find('MANSEC') >=0 and not line.find('SUBMANSEC') >=0: mansec = line.split('=')[1].lower().strip() + # this is a hack for the include directory and will be wrong for any + # non-sys static include functions + if mansec == 'unknown': mansec = 'sys' + if submansec == 'unknown': submansec = 'sys' if not bfortsubmansec == 'unknown': submansec = bfortsubmansec @@ -150,7 +157,6 @@ def FixDir(petscdir,petscarch,parentdir,dir,verbose): txt = fd.read() fd.close() if txt: - if mansec == 'unknown': raise RuntimeError("Cannot have unknown mansec in " + parentdir) if not os.path.isdir(os.path.join(petscdir,petscarch,'src',mansec,'f90-mod','ftn-auto-interfaces')): os.makedirs(os.path.join(petscdir,petscarch,'src',mansec,'f90-mod','ftn-auto-interfaces')) if not os.path.isdir(os.path.join(petscdir,petscarch,'src',mansec,'f90-mod','ftn-auto-interfaces',submansec+'-tmpdir')): os.makedirs(os.path.join(petscdir,petscarch,'src',mansec,'f90-mod','ftn-auto-interfaces',submansec+'-tmpdir')) fname = os.path.join(petscdir,petscarch,'src',mansec,'f90-mod','ftn-auto-interfaces',submansec+'-tmpdir',os.path.relpath(parentdir,petscdir).replace('/','_')+'.h90') @@ -176,18 +182,19 @@ def processDir(petscdir, petscarch,bfort, verbose, dirpath, dirnames, filenames) ''' Runs bfort on a directory and then fixes the files generated by bfort including moving generated F90 fortran interface files''' if not dirpath.startswith(petscdir): raise RuntimeError("Error, the directory being processed "+dirpath+" does not begin with PETSC_DIR "+petscdir) sdirpath = dirpath.replace(petscdir+'/','') + if sdirpath == 'include': sdirpath = os.path.join('src','sys') outdir = os.path.join(petscdir,petscarch,sdirpath,'ftn-auto') if filenames: PrepFtnDir(outdir) options = ['-dir '+outdir, '-mnative', '-ansi', '-nomsgs', '-noprofile', '-anyname', '-mapptr', '-mpi', '-shortargname', '-ferr', '-ptrprefix Petsc', '-ptr64 PETSC_USE_POINTER_CONVERSION', '-fcaps PETSC_HAVE_FORTRAN_CAPS', '-fuscore PETSC_HAVE_FORTRAN_UNDERSCORE', - '-f90mod_skip_header','-on_error_abort'] + '-f90mod_skip_header', '-on_error_abort', '-fstring'] split_ct = 10 for i in range(0, len(filenames), split_ct): cmd = 'BFORT_CONFIG_PATH='+os.path.join(petscdir,'lib','petsc','conf')+' '+bfort+' '+' '.join(options+filenames[i:i+split_ct])+' -f90modfile f90module'+str(i)+'.f90' try: - output = check_output(cmd, cwd=dirpath, shell=True, stderr=subprocess.STDOUT) + output = check_output(cmd, cwd=dirpath, shell=True).decode('utf-8') except subprocess.CalledProcessError as e: raise SystemError(str(e)+'\nIn '+dirpath+'\n'+e.output.decode(encoding='UTF-8',errors='replace')); FixDir(petscdir,petscarch, dirpath,outdir,verbose) @@ -250,11 +257,15 @@ def processf90interfaces(petscdir,petscarch,verbose): for sfile in os.listdir(tmpDir): if verbose: print(' Copying in '+sfile) with open(os.path.join(tmpDir,sfile),'r') as fdr: - for ibuf in fdr.read().split(' subroutine')[1:]: + buf = fdr.read() + if buf.startswith('#if defined(PETSC_HAVE_FORTRAN_TYPE_STAR)'): + fd.write('#if defined(PETSC_HAVE_FORTRAN_TYPE_STAR)\n') + for ibuf in buf.split(' subroutine')[1:]: ibuf = ' subroutine'+ibuf ibuf = ibuf.replace('integer z','PetscErrorCode z') ibuf = ibuf.replace('integer a ! MPI_Comm','MPI_Comm a ! MPI_Comm') - plist = [p for p in ptypes if ' '+p[1:]+' ' in ibuf] + plist = [p for p in ptypes if ' '+p[1:]+' ' in ibuf or ('AO' in ibuf and p == 'tPetscAO')] # see src/vec/f90-mod/petscao.h + if 'PetscObject' in ibuf: plist.append('tPetscObject') if plist: ibuf = ibuf.replace(')',')\n import '+','.join(set(plist)),1) fd.write(ibuf) shutil.rmtree(tmpDir) @@ -262,10 +273,11 @@ def processf90interfaces(petscdir,petscarch,verbose): return def main(petscdir,petscarch,bfort,dir,verbose): - for dirpath, dirnames, filenames in os.walk(dir, topdown=True): - filenames = [i for i in filenames if not i.find('#') > -1 and os.path.splitext(i)[1] in ['.c','.h','.cxx','.cu']] - dirnames[:] = [d for d in dirnames if d not in ['output','tutorials','tests','binding','benchmarks'] and not d.startswith('ftn-') and not d.startswith('f90-')] - processDir(petscdir, petscarch,bfort, verbose, dirpath, dirnames, filenames) + for p in [ os.path.join(dir,'include'), os.path.join(dir,'src') ]: + for dirpath, dirnames, filenames in os.walk(p, topdown=True): + filenames = [i for i in filenames if not i.find('#') > -1 and os.path.splitext(i)[1] in ['.c','.h','.cxx','.cu']] + dirnames[:] = [d for d in dirnames if d not in ['output', 'binding', 'tests', 'tutorials', 'yaml']] + processDir(petscdir, petscarch,bfort, verbose, dirpath, dirnames, filenames) return # # generatefortranstubs bfortexectuable -verbose ----- generates fortran stubs for a directory and all its children @@ -311,7 +323,5 @@ def not_empty(v): assert isinstance(args.bfort, (list, tuple)) bfort_exec = args.bfort[0] assert isinstance(bfort_exec, str) - ret = main( - args.petsc_dir, args.petsc_arch, bfort_exec, os.path.join(args.petsc_dir, 'src'), args.verbose - ) + ret = main(args.petsc_dir, args.petsc_arch, bfort_exec, args.petsc_dir, args.verbose) sys.exit(ret) diff --git a/lib/petsc/bin/maint/petsclinter/petsclinter/classes/docs/_doc_section.py b/lib/petsc/bin/maint/petsclinter/petsclinter/classes/docs/_doc_section.py index c468234d9dd..2e34f9e3d64 100644 --- a/lib/petsc/bin/maint/petsclinter/petsclinter/classes/docs/_doc_section.py +++ b/lib/petsc/bin/maint/petsclinter/petsclinter/classes/docs/_doc_section.py @@ -462,9 +462,9 @@ def _check_fortran_interface(self, docstring: PetscDocStringImpl, fnargs: tuple[ for arg in fnargs: kind = self._get_deref_pointer_cursor_type(arg).kind - if kind in clx_char_type_kinds: - requires_c.append((arg, 'char pointer')) - elif kind in clx_function_type_kinds: + #if kind in clx_char_type_kinds: + # requires_c.append((arg, 'char pointer')) + if kind in clx_function_type_kinds: requires_c.append((arg, 'function pointer')) if len(requires_c): diff --git a/lib/petsc/bin/maint/petsclinter/petsclinter/classes/docs/_doc_str.py b/lib/petsc/bin/maint/petsclinter/petsclinter/classes/docs/_doc_str.py index 670db5c5c7c..fc86d33a89d 100644 --- a/lib/petsc/bin/maint/petsclinter/petsclinter/classes/docs/_doc_str.py +++ b/lib/petsc/bin/maint/petsclinter/petsclinter/classes/docs/_doc_str.py @@ -335,7 +335,7 @@ def is_heading(self, line: str, prev_line: str) -> Verdict: whether the line is a heading """ def handle_header_with_colon(text: str) -> Verdict: - if text.endswith('\:'): + if text.endswith(r'\:'): return Verdict.NOT_HEADING textlo = text.casefold() diff --git a/lib/petsc/bin/maint/runjobs.py b/lib/petsc/bin/maint/runjobs.py new file mode 100755 index 00000000000..a25f4eb526a --- /dev/null +++ b/lib/petsc/bin/maint/runjobs.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 + +import sys +from subprocess import check_output + +# runjobs.py [-f] [job1 job2 ... jobN] +# +# Sets a list of jobs to run upon the next push of the branch that is in a merge request. +# If no jobs are listed then all jobs in the pipeline are run but without a need to un-pause the pipeline on the GitLab site. +# +# -f: when commits in the local "branch" are not in sync with "origin/branch" - runjobs.py will not create a new local CI commit +# for the specified "jobs list". Use '-f' to force the creation of this commit [and then use 'git push -f' to update this +# branch's contents at GitLab] - if the intention is to overwrite these differences with your local files. +# Otherwise, sync your local branch with "origin/branch" changes before running runjobs.py. +# +force = (len(sys.argv) > 1 and sys.argv[1] == '-f') +if force: + alljobs = (sys.argv[2:] == []) + jobs = sys.argv[2:] +else: + alljobs = (sys.argv[1:] == []) + jobs = sys.argv[1:] + +try: + check_output(r'git diff-index --quiet HEAD --', shell=True) +except Exception: + print('Do not run on a repository with any uncommited changes') + sys.exit(0) + +if not force: + try: + check_output('git fetch', shell=True).decode('utf-8') + branch = check_output('git rev-parse --abbrev-ref HEAD', shell=True).decode('utf-8').strip() + base = check_output('git merge-base ' + branch + ' remotes/origin/' + branch, shell=True).decode('utf-8').strip() + aref = check_output('git rev-parse ' + branch, shell=True).decode('utf-8').strip() + bref = check_output('git rev-parse remotes/origin/' + branch, shell=True).decode('utf-8').strip() + except Exception: + print('Unable to run git commits, not submitting jobs') + sys.exit(0) + if not aref == bref and not bref == base: + print('Your repository is behind or has diverged from GitLab, not running jobs') + print('If you plan to run git push with -f then use -f with this command') + sys.exit(0) + +with open('.gitlab-ci.yml','r') as fd: + ci = fd.read() + +Success_Message = 'Do a git push to start the job(s); after the pipeline finishes REMOVE commit ' +File_Message = '# gitlab-ci.yml was automatically generated (by lib/petsc/bin/maint/runjobs.py) for running CI jobs: ' + +start = ci.find(File_Message) +if start > -1: + start += 100 + end = ci.find('\n') + try: commit = check_output('git rev-list -1 HEAD .gitlab-ci.yml', shell=True).decode('utf-8').strip() + except Exception: commit = '' + if (not alljobs and not ci[start:end] == ' all jobs' and eval(ci[start:end]) == jobs) or (alljobs and ci[start:end] == ' all jobs'): + print(Success_Message + commit) + else: + print('runjobs.py was previously run (with different jobs), rerun after you REMOVE commit '+commit) + sys.exit(0) + +arches = list(jobs) +for arch in arches.copy(): + if ci.find('TEST_ARCH: arch-ci-'+arch) > -1: + arches.remove(arch) +if arches: + print('Could not locate job(s) '+str(arches)) + sys.exit(0) + +extraJob=''' +using-runjobs: + extends: .test-basic + stage: .pre + tags: + - gce-stage1 + script: + - exit 5 + variables: + GIT_STRATEGY: none + allow_failure: true + +''' + +with open('.gitlab-ci.yml','w') as fd: + if not alljobs: fd.write(File_Message + ' '+str(jobs)+'\n') + else: fd.write(File_Message + 'all jobs\n') + for a in ci.split('\n\n'): + if a.startswith('pause-for-approval:'): continue + if not alljobs: + if a.startswith('# job for analyzing the coverage results '): break + if a.find('CONFIG_OPTS: ') > -1: continue + if a.find('petsc4py-') > -1: continue + if a.find('check-each-commit:') > -1: continue + test_arch = a.find('TEST_ARCH: ') + if test_arch > -1: + arch = a[test_arch+19:] + arch = arch[:arch.find('\n')] + if arch in jobs: + fd.write(a+'\n\n') + else: + fd.write(a+'\n\n') + else: + fd.write(a+'\n\n') + if not alljobs: fd.write(extraJob) + +try: + output = check_output('git add .gitlab-ci.yml', shell=True) + if not alljobs: + output = check_output('git commit -m"DRAFT: CI: Temporary commit, remove before merge! Runs only jobs: '+str(jobs)+'"', shell=True) + else: + output = check_output('git commit -m"DRAFT: CI: Temporary commit, remove before merge! Runs all jobs immediately, no unpause needed at GitLab"', shell=True) +except Exception: + print('Unable to commit changed .gitlab-ci.yml file') + sys.exit(0) + +try: commit = check_output('git rev-list -1 HEAD .gitlab-ci.yml', shell=True).decode('utf-8').strip() +except Exception: commit = '' +print(Success_Message + commit) diff --git a/lib/petsc/bin/maint/toclapack.sh b/lib/petsc/bin/maint/toclapack.sh index f2e14e64eec..04248a1511a 100755 --- a/lib/petsc/bin/maint/toclapack.sh +++ b/lib/petsc/bin/maint/toclapack.sh @@ -207,7 +207,7 @@ dist: cleanblaslapck cleanlib # 2) Transform fortran source to c from blas and lapack -# Create list of files that won't be processed by fc2 +# Create list of files that won't be processed by f2c # Those functions correspond to extra precision routines and codes with f90 constructs cat > ${TMP}/skip.list << EOF SXLASRC = sgesvxx.o sgerfsx.o sla_gerfsx_extended.o sla_geamv.o \ diff --git a/lib/petsc/bin/maint/xclude b/lib/petsc/bin/maint/xclude index 2e2d9c38c2c..3e1737f04fc 100755 --- a/lib/petsc/bin/maint/xclude +++ b/lib/petsc/bin/maint/xclude @@ -19,7 +19,7 @@ petsc-dist/externalpackages petsc-dist/tmp petsc-dist/src/binding/petsc4py/build petsc-dist/src/binding/petsc4py/dist -petsc-dist/src/binding/petsc4py/petsc4py.egg-info +petsc-dist/src/binding/petsc4py/src/petsc4py.egg-info petsc-dist/src/binding/petsc4py/.eggs petsc-dist/lib/petsc/conf/petscvariables petsc-dist/config/BuildSystem/.hg diff --git a/lib/petsc/conf/bfort-petsc.txt b/lib/petsc/conf/bfort-petsc.txt index 9393dd9ea70..3fcbf9787c8 100644 --- a/lib/petsc/conf/bfort-petsc.txt +++ b/lib/petsc/conf/bfort-petsc.txt @@ -3,9 +3,15 @@ # petscao.h nativeptr AO -# petscbag.h +# petscbag.h This is not a PETSc object nativeptr PetscBag +# this is not a PETSc object +native PetscSFNode + +# petscdt.h +native PetscDTNodeType + # petscdm.h native DMBoundaryType native DMPointLocationType @@ -43,8 +49,6 @@ native DMDALocalInfo native DMDAInterpolationType native DMDAElementType -nativeptr PetscGLL - # dmlabel.h nativeptr DMLabel @@ -159,6 +163,7 @@ native PCHPDDMCoarseCorrectionType native PCMGType native PCMGCycleType native PCMGGalerkinType +native PCMGCoarseSpaceType native PCExoticType native PCDeflationSpaceType native PCBDDCInterfaceExtType @@ -171,7 +176,7 @@ nativeptr PetscConvEst native SNESConvergedReason native SNESLineSearchReason nativeptr MatMFFD -nativeptr MatMFFDType +native MatMFFDType nativeptr SNESLineSearch native SNESLineSearchOrder native SNESNormSchedule @@ -186,6 +191,7 @@ native SNESNewtonTRFallbackType # petscsnesfas.h native SNESFASType +native SNESFunctionType # petsctao.h nativeptr Tao @@ -194,6 +200,7 @@ native TaoConvergedReason native TaoADMMUpdateType native TaoADMMRegularizerType native TaoALMMType +native TaoSubsetType # petscsys.h native PetscBool @@ -202,6 +209,9 @@ native PetscOffset native PetscFortranAddr native PetscCount +nativeptr PetscDevice +nativeptr PetscDeviceContext + native PetscInt native PetscInt64 native PetscObjectState @@ -234,7 +244,7 @@ native PetscReal native MatScalar -native PetscObject +nativeptr PetscObject native PetscLogDouble nativeptr PetscMatlabEngine @@ -289,3 +299,66 @@ nativeptr PetscViewerAndFormat nativeptr PetscViewers native PetscFileMode native PetscViewerFormat + +char AOType +char PetscBenchType +char CharacteristicType +char DMType +char DMFieldType +char DMForestTopology +char DMForestAdaptivityStrategy +char DMLabelType +char DMPlexTransformType +char PetscDrawType +char PetscDSType +char PetscDualSpaceType +char PetscFEType +char PetscLimiterType +char PetscFVType +char ISType +char ISLocalToGlobalMappingType +char KSPType +char KSPGuessType +char PetscLogHandlerType +char MatType +char MatSolverType +char MatProductAlgorithm +char MatOrderingType +char MatColoringType +char MatPartitioningType +char MatMFFDType +char MatCoarsenType +char PetscPartitionerType +char PCType +char PCGAMGType +char PCGAMGClassicalType +char PFType +char PetscSectionSymType +char PetscSFType +char SNESType +char SNESLineSearchType +char SNESMSType +char PetscSpaceType +char PetscRandomType +char TaoType +char TaoLineSearchType +char TSType +char TSTrajectoryType +char TSSSPType +char TSAdaptType +char TSGLLEAdaptType +char TSGLLEAcceptType +char TSGLLEType +char TSRKType +char TSMPRKType +char TSIRKType +char TSGLEEType +char TSARKIMEXType +char TSDIRKType +char TSRosWType +char TSBasicSymplecticType +char VecType +char VecTaggerType +char PetscViewerType +char DMForestTopology +char DMForestAdaptivityStrategy diff --git a/lib/petsc/conf/rules b/lib/petsc/conf/rules index 8fc3002c623..a62da1a9f32 100644 --- a/lib/petsc/conf/rules +++ b/lib/petsc/conf/rules @@ -240,7 +240,7 @@ endif ifdef SEPARATE_COMPILE_LINK % : %.c ${PETSC_COMPILE_SINGLE} $< - ${CLINKER} $(EXEFLAGS) -o $@ $@.o $(filter-out $@.o $<,$^) $(PETSC_LIB) + ${CLINKER} $(LOADLIBES) -o $@ $@.o $(filter-out $@.o $<,$^) $(LDLIBS) endif # We don't have % : %.cu target as we can't use nvcc as linker - due to difference in linker options [wrt CLINKER etc.]. For example, nvcc does not accept -Wl,-rpath diff --git a/lib/petsc/conf/rules_util.mk b/lib/petsc/conf/rules_util.mk index ff6a24e8252..72addbf3cbf 100644 --- a/lib/petsc/conf/rules_util.mk +++ b/lib/petsc/conf/rules_util.mk @@ -121,6 +121,8 @@ checkbadSource: -@git --no-pager grep -n -P 'PetscFunctionReturn(ierr)' -- ${GITSRC} >> checkbadSource.out;true -@echo "----- .seealso with leading white spaces ---------------------------" >> checkbadSource.out -@git --no-pager grep -n -P -E '^[ ]+.seealso:' -- ${GITSRC} ':!src/sys/tests/linter/*' >> checkbadSource.out;true + -@echo "----- .seealso with double backticks -------------------------------" >> checkbadSource.out + -@git --no-pager grep -n -P -E '^.seealso:.*``' -- ${GITSRC} >> checkbadSource.out;true -@echo "----- Defining a returning macro without PetscMacroReturns() -------" >> checkbadSource.out -@git --no-pager grep -n -P 'define .*\w+;\s+do' -- ${GITSRC} | grep -E -v '(PetscMacroReturns|PetscDrawCollectiveBegin|MatPreallocateBegin|PetscOptionsBegin|PetscObjectOptionsBegin|PetscOptionsHeadEnd)' >> checkbadSource.out;true -@echo "----- Defining an error checking macro using CHKERR style ----------" >> checkbadSource.out @@ -145,9 +147,9 @@ checkbadSource: -@git --no-pager grep -n -E -B 1 ' PetscFunctionReturn' -- ${GITSRC} | grep -E '\-[0-9]+-$$' | grep -v '^--$$' >> checkbadSource.out;true -@echo "----- No blank line before PetscFunctionBegin and derivatives ------" >> checkbadSource.out -@git --no-pager grep -n -E -B 1 ' PetscFunctionBegin(User|Hot){0,1};' -- ${GITSRC} ':!src/sys/tests/*' ':!src/sys/tutorials/*' | grep -E '\-[0-9]+-.*;' | grep -v '^--$$' | grep -v '\\' >> checkbadSource.out;true - -@echo "----- Uneeded parentheses [!&~*](foo[->|.]bar) ---------------------" >> checkbadSource.out + -@echo "----- Unneeded parentheses [!&~*](foo[->|.]bar) --------------------" >> checkbadSource.out -@git --no-pager grep -n -P -E '([\!\&\~\*\(]|\)\)|\([^,\*\(]+\**\))\(([a-zA-Z0-9_]+((\.|->)[a-zA-Z0-9_]+|\[[a-zA-Z0-9_ \%\+\*\-]+\])+)\)' -- ${GITSRC} >> checkbadSource.out;true - @a=`cat checkbadSource.out | wc -l`; l=`expr $$a - 29` ;\ + @a=`cat checkbadSource.out | wc -l`; l=`expr $$a - 30` ;\ if [ $$l -gt 0 ] ; then \ echo $$l " files with errors detected in source code formatting" ;\ cat checkbadSource.out ;\ @@ -165,6 +167,16 @@ checkbadSource: fi @test ! -s badSourceChar.out +# Run a linter in a Python virtual environment to check (and fix) the formatting of PETSc manual pages +# V=1 verbose +# REPLACE=1 replace ill-formatted docs with correctly formatted docs +env-lint: + @if [[ `which llvm-config` == "" ]]; then echo "llvm-config for version 14 must be in your path"; exit 1; fi + @if [ `llvm-config --version | cut -f1 -d"."` != 14 ]; then echo "llvm-config for version 14 must be in your path"; exit 1; fi + @python3 -m venv petsc-lint-env + @source petsc-lint-env/bin/activate && python3 -m pip install --quiet -r lib/petsc/bin/maint/petsclinter/requirements.txt && \ + python3 ${PETSC_DIR}/lib/petsc/bin/maint/petsclinter --verbose=${V} --apply-patches=${REPLACE} --clang_lib=`llvm-config --libdir`/libclang.dylib --werror 1 ./src ; deactivate + # Run a linter to check (and fix) the formatting of PETSc manual pages lint: ${PYTHON} ${PETSC_DIR}/lib/petsc/bin/maint/petsclinter --verbose=${V} --apply-patches=${REPLACE} $(LINTER_OPTIONS) ${DIRECTORY} diff --git a/lib/petsc/conf/variables b/lib/petsc/conf/variables index 12820873af9..cd9f3928174 100644 --- a/lib/petsc/conf/variables +++ b/lib/petsc/conf/variables @@ -87,7 +87,7 @@ PETSC_COMPILE_SINGLE = ${PCC} -o $*.o -c ${PCC_FLAGS} ${${CLANGUAGE}FLAGS} ${ PETSC_CCOMPILE_SINGLE = ${CC} -o $*.o -c ${CC_FLAGS} ${CFLAGS} ${CPPFLAGS} PETSC_CXXCOMPILE_SINGLE = ${CXX} -o $*.o -c ${CXX_FLAGS} ${CXXFLAGS} ${CXXCPPFLAGS} PETSC_FCOMPILE_SINGLE = ${FC} -o $*.o -c ${FC_FLAGS} ${FFLAGS} ${FCPPFLAGS} -PETSC_CUCOMPILE_SINGLE = ${CUDAC} -o $*.o -c $(MPICXX_INCLUDES) ${CUDAC_FLAGS} ${CUDAFLAGS} ${CUDAC_HOSTFLAGS} ${CUDACPPFLAGS} ${CXXCPPFLAGS} +PETSC_CUCOMPILE_SINGLE = ${CUDAC} -o $*.o -c $(MPICXX_INCLUDES) ${CUDAC_FLAGS} ${CUDAFLAGS} ${CUDAC_HOSTFLAGS} ${CUDACPPFLAGS} --compiler-options="${CXXCPPFLAGS}" PETSC_HIPCOMPILE_SINGLE = ${HIPC} -o $*.o $(MPICXX_INCLUDES) -c $(HIPC_FLAGS) $(HIPPP_FLAGS) $(HIPFLAGS) $(HIPPPFLAGS) $(HIPOPTFLAGS) $(HIPC_DEPFLAGS) $(CXXFLAGS) $(CXXCPPFLAGS) $(CXX_DEPFLAGS) PETSC_SYCLCOMPILE_SINGLE = ${SYCLC} -o $*.o $(MPICXX_INCLUDES) -c $(SYCLC_FLAGS) $(SYCLPP_FLAGS) $(SYCLFLAGS) $(SYCLPPFLAGS) $(SYCLOPTFLAGS) $(SYCLC_DEPFLAGS) $(CXXFLAGS) $(CXXCPPFLAGS) # diff --git a/makefile b/makefile index ca1830b4f42..a1f01afe134 100644 --- a/makefile +++ b/makefile @@ -134,7 +134,7 @@ check: echo "*mpiexec not found*. cannot run make check"; \ else \ ${RM} -f check_error;\ - ${RUN_TEST} PETSC_OPTIONS="${PETSC_OPTIONS} ${PETSC_TEST_OPTIONS}" PATH="${PETSC_DIR}/${PETSC_ARCH}/lib:${PATH}" check_build 2>&1 | tee ./${PETSC_ARCH}/lib/petsc/conf/check.log; \ + ${RUN_TEST} OMP_NUM_THREADS=1 PETSC_OPTIONS="${PETSC_OPTIONS} ${PETSC_TEST_OPTIONS}" PATH="${PETSC_DIR}/${PETSC_ARCH}/lib:${PATH}" check_build 2>&1 | tee ./${PETSC_ARCH}/lib/petsc/conf/check.log; \ if [ -f check_error ]; then \ echo "Error while running make check"; \ ${RM} -f check_error;\ @@ -217,6 +217,9 @@ check_build: +@if [ "${SLEPC}" = "yes" ]; then \ ${OMAKE_SELF} PETSC_ARCH=${PETSC_ARCH} PETSC_DIR=${PETSC_DIR} slepc-check; \ fi; + +@if [ "${MFEM}" = "yes" ]; then \ + ${OMAKE_SELF} PETSC_ARCH=${PETSC_ARCH} PETSC_DIR=${PETSC_DIR} mfem-check; \ + fi; -@echo "Completed PETSc check examples" # ********* Rules for make install ******************************************************************************************************************* diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000000..e9c565938be --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,6 @@ +[build-system] +requires = [ + "setuptools", "wheel", + "patchelf; platform_system=='Linux'", +] +build-backend = "setuptools.build_meta" diff --git a/setup.py b/setup.py index d9221eea2fc..7d0171df801 100755 --- a/setup.py +++ b/setup.py @@ -12,11 +12,11 @@ .. note:: - To install ``PETSc`` and ``petsc4py`` (``mpi4py`` is optional - but highly recommended) use:: + To install the ``PETSc`` and ``petsc4py`` packages + (``mpi4py`` is optional but highly recommended) use:: - $ python -m pip install numpy mpi4py (or pip install numpy mpi4py) - $ python -m pip install petsc petsc4py (or pip install petsc petsc4py) + $ python -m pip install numpy mpi4py + $ python -m pip install petsc petsc4py .. tip:: @@ -25,21 +25,22 @@ $ python -m pip install Cython numpy mpi4py $ python -m pip install --no-deps https://gitlab.com/petsc/petsc/-/archive/main/petsc-main.tar.gz - To set the MPI compilers use the environmental variables ``MPICC``, ``MPICXX``, ``MPIF90``. - Provide any ``PETSc`` ./configure options using the environmental variable ``PETSC_CONFIGURE_OPTIONS``. Do not use the ``PETSc`` ``./configure`` options ``--with-cc``, ``--with-cxx``, ``--with-fc``, or ``--with-mpi-dir``. - - If ``mpi4py`` is installed the compilers will obtained from that installation and ``MPICC``, ``MPICXX``, ``MPIF90`` will be ignored. + To set the MPI compilers use the environmental variables ``MPICC``, ``MPICXX``, ``MPIFORT``. + If ``mpi4py`` is installed the compilers will be obtained from that installation and ``MPICC``, ``MPICXX``, ``MPIFORT`` will be ignored. """ -import sys, os +import re +import os +import sys +import shlex +import shutil from setuptools import setup +from wheel.bdist_wheel import bdist_wheel as _bdist_wheel from setuptools.command.install import install as _install -from distutils.util import get_platform, split_quoted -from distutils.spawn import find_executable from distutils import log init_py = """\ @@ -50,12 +51,26 @@ def get_petsc_dir(): import os return os.path.dirname(__file__) + def get_config(): conf = {} conf['PETSC_DIR'] = get_petsc_dir() return conf """ +main_py = """\ +# Author: PETSc Team +# Contact: petsc-maint@mcs.anl.gov + +if __name__ == "__main__": + import sys + if "--prefix" in sys.argv: + from . import get_petsc_dir + print(get_petsc_dir()) + del get_petsc_dir + del sys +""" + metadata = { 'provides' : ['petsc'], 'zip_safe' : False, @@ -63,34 +78,38 @@ def get_config(): CONFIGURE_OPTIONS = [] + def bootstrap(): # Set PETSC_DIR and PETSC_ARCH PETSC_DIR = os.path.abspath(os.getcwd()) - PETSC_ARCH = 'arch-python-' + get_platform() + PETSC_ARCH = 'arch-python' os.environ['PETSC_DIR'] = PETSC_DIR os.environ['PETSC_ARCH'] = PETSC_ARCH sys.path.insert(0, os.path.join(PETSC_DIR, 'config')) sys.path.insert(0, os.path.join(PETSC_DIR, 'lib','petsc','conf')) - # Generate package __init__.py file - from distutils.dir_util import mkpath + + # Generate package __init__.py and __main__.py files pkgdir = os.path.join('config', 'pypi') - if not os.path.exists(pkgdir): mkpath(pkgdir) - pkgfile = os.path.join(pkgdir, '__init__.py') - fh = open(pkgfile, 'w') - fh.write(init_py) - fh.close() + os.makedirs(pkgdir, exist_ok=True) + for pyfile, contents in ( + ('__init__.py', init_py), + ('__main__.py', main_py), + ): + with open(os.path.join(pkgdir, pyfile), 'w') as fh: + fh.write(contents) + # Configure options options = os.environ.get('PETSC_CONFIGURE_OPTIONS', '') - CONFIGURE_OPTIONS.extend(split_quoted(options)) + CONFIGURE_OPTIONS.extend(shlex.split(options)) for i in CONFIGURE_OPTIONS: if i.startswith('--with-mpi-dir='): - raise RuntimeError("Do not use --with-mpi-dir, use the environmental variables MPICC, MPICXX, MPIF90") + raise RuntimeError("Do not use --with-mpi-dir, use the environmental variables MPICC, MPICXX, MPIFORT") if i.startswith('--with-cc='): raise RuntimeError("Do not use --with-cc, use the environmental variable MPICC") if i.startswith('--with-cxx=') and i != "--with-cxx=0": raise RuntimeError("Do not use --with-cxx, use the environmental variable MPICXX") if i.startswith('--with-fc=') and i != "--with-fc=0": - raise RuntimeError("Do not use --with-fc, use the environmental variable MPIF90") + raise RuntimeError("Do not use --with-fc, use the environmental variable MPIFORT") if '--with-mpi=0' not in CONFIGURE_OPTIONS: # Simple-minded lookup for MPI and mpi4py @@ -102,12 +121,13 @@ def bootstrap(): except ImportError: # mpi4py is not installed mpi4py = None mpicc = (os.environ.get('MPICC') or - find_executable('mpicc')) + shutil.which('mpicc')) except AttributeError: # mpi4py is too old pass if not mpi4py and mpicc: metadata['install_requires'] = ['mpi4py>=1.2.2'] + def config(prefix, dry_run=False): log.info('PETSc: configure') options = [ @@ -125,11 +145,13 @@ def config(prefix, dry_run=False): conf = mpi4py.get_config() mpicc = conf.get('mpicc') mpicxx = conf.get('mpicxx') - mpif90 = conf.get('mpif90') + mpifort = conf.get('mpifort') or conf.get('mpif90') except (ImportError, AttributeError): - mpicc = os.environ.get('MPICC') or find_executable('mpicc') - mpicxx = os.environ.get('MPICXX') or find_executable('mpicxx') - mpif90 = os.environ.get('MPIF90') or find_executable('mpif90') + mpicc = os.environ.get('MPICC') or shutil.which('mpicc') + mpicxx = os.environ.get('MPICXX') or shutil.which('mpicxx') + mpifort = os.environ.get('MPIFORT') or os.environ.get('MPIF90') + mpifort = mpifort or shutil.which('mpifort') + mpifort = mpifort or shutil.which('mpif90') if mpicc: options.append('--with-cc='+mpicc) if '--with-cxx=0' not in CONFIGURE_OPTIONS: @@ -138,8 +160,8 @@ def config(prefix, dry_run=False): else: options.append('--with-cxx=0') if '--with-fc=0' not in CONFIGURE_OPTIONS: - if mpif90: - options.append('--with-fc='+mpif90) + if mpifort: + options.append('--with-fc='+mpifort) else: options.append('--with-fc=0') options.append('--with-sowing=0') @@ -151,7 +173,8 @@ def config(prefix, dry_run=False): for opt in options: log.info(' '*4 + opt) # Run PETSc configure - if dry_run: return + if dry_run: + return use_config_py = False if use_config_py: import configure @@ -162,12 +185,40 @@ def config(prefix, dry_run=False): python = sys.executable command = [python, './configure'] + options status = os.system(" ".join(command)) - if status != 0: raise RuntimeError(status) + if status != 0: + raise RuntimeError(status) + # Fix PETSc configuration + using_build_backend = any( + os.environ.get(prefix + '_BUILD_BACKEND') + for prefix in ('_PYPROJECT_HOOKS', 'PEP517') + ) + if using_build_backend: + pdir = os.environ['PETSC_DIR'] + parch = os.environ['PETSC_ARCH'] + include = os.path.join(pdir, parch, 'include') + for filename in ( + 'petscconf.h', + 'petscconfiginfo.h', + 'petscmachineinfo.h', + ): + filename = os.path.join(include, filename) + with open(filename, 'r') as old_fh: + contents = old_fh.read() + contents = contents.replace(prefix, '${PETSC_DIR}') + contents = re.sub( + r'^(#define PETSC_PYTHON_EXE) "(.*)"$', + r'\1 "python%d"' % sys.version_info[0], + contents, flags=re.MULTILINE, + ) + with open(filename, 'w') as new_fh: + new_fh.write(contents) + def build(dry_run=False): log.info('PETSc: build') # Run PETSc build - if dry_run: return + if dry_run: + return use_builder_py = False if use_builder_py: import builder @@ -175,15 +226,18 @@ def build(dry_run=False): import logger logger.Logger.defaultLog = None else: - make = find_executable('make') + make = shutil.which('make') command = [make, 'all'] status = os.system(" ".join(command)) - if status != 0: raise RuntimeError(status) + if status != 0: + raise RuntimeError(status) + def install(dry_run=False): log.info('PETSc: install') # Run PETSc installer - if dry_run: return + if dry_run: + return use_install_py = False if use_install_py: import install @@ -191,10 +245,12 @@ def install(dry_run=False): import logger logger.Logger.defaultLog = None else: - make = find_executable('make') + make = shutil.which('make') command = [make, 'install'] status = os.system(" ".join(command)) - if status != 0: raise RuntimeError(status) + if status != 0: + raise RuntimeError(status) + class context(object): def __init__(self): @@ -209,11 +265,11 @@ def exit(self): sys.argv[:] = self.sys_argv os.chdir(self.wdir) + class cmd_install(_install): def initialize_options(self): _install.initialize_options(self) - self.optimize = 1 def finalize_options(self): _install.finalize_options(self) @@ -245,8 +301,20 @@ def get_outputs(self): outputs += _install.get_outputs(self) return outputs + +class cmd_bdist_wheel(_bdist_wheel): + + def finalize_options(self): + super().finalize_options() + self.root_is_pure = False + self.build_number = None + + def get_tag(self): + plat_tag = super().get_tag()[-1] + return (self.python_tag, "none", plat_tag) + + def version(): - import re version_re = { 'major' : re.compile(r"#define\s+PETSC_VERSION_MAJOR\s+(\d+)"), 'minor' : re.compile(r"#define\s+PETSC_VERSION_MINOR\s+(\d+)"), @@ -265,13 +333,18 @@ def version(): v = "%d.%d.0.dev%d" % (major, minor+1, 0) return v + def tarball(): VERSION = version() - if '.dev' in VERSION: return None + if '.dev' in VERSION: + return None return ('https://web.cels.anl.gov/projects/petsc/download/release-snapshots/' 'petsc-%s.tar.gz#egg=petsc-%s' % (VERSION, VERSION)) -description = __doc__.split('\n')[1:-1]; del description[1:3] + +description = __doc__.split('\n')[1:-1] +del description[1:3] + classifiers = """ Development Status :: 5 - Production/Stable Intended Audience :: Developers @@ -286,29 +359,31 @@ def tarball(): Topic :: Software Development :: Libraries """ -if 'bdist_wheel' in sys.argv: - sys.stderr.write("petsc: this package cannot be built as a wheel\n") - sys.exit(1) - bootstrap() -setup(name='petsc', - version=version(), - description=description.pop(0), - long_description='\n'.join(description), - classifiers= classifiers.split('\n')[1:-1], - keywords = ['PETSc', 'MPI'], - platforms=['POSIX'], - license='BSD', - - url='https://petsc.org/', - download_url=tarball(), - - author='PETSc Team', - author_email='petsc-maint@mcs.anl.gov', - maintainer='Lisandro Dalcin', - maintainer_email='dalcinl@gmail.com', - - packages = ['petsc'], - package_dir = {'petsc': 'config/pypi'}, - cmdclass={'install': cmd_install}, - **metadata) +setup( + name='petsc', + version=version(), + description=description.pop(0), + long_description='\n'.join(description), + long_description_content_type='text/x-rst', + classifiers=classifiers.split('\n')[1:-1], + keywords = ['PETSc', 'MPI'], + platforms=['POSIX'], + license='BSD-2-Clause', + + url='https://petsc.org/', + download_url=tarball(), + + author='PETSc Team', + author_email='petsc-maint@mcs.anl.gov', + maintainer='Lisandro Dalcin', + maintainer_email='dalcinl@gmail.com', + + packages=['petsc'], + package_dir= {'petsc': 'config/pypi'}, + cmdclass={ + 'install': cmd_install, + 'bdist_wheel': cmd_bdist_wheel, + }, + **metadata +) diff --git a/share/petsc/datafiles/meshes/horse.ply.bz2 b/share/petsc/datafiles/meshes/horse.ply.bz2 deleted file mode 100644 index 88d60093978..00000000000 Binary files a/share/petsc/datafiles/meshes/horse.ply.bz2 and /dev/null differ diff --git a/share/petsc/suppressions/lsan b/share/petsc/suppressions/lsan index f5fa389fea9..e0cd0d4b0ec 100644 --- a/share/petsc/suppressions/lsan +++ b/share/petsc/suppressions/lsan @@ -3,3 +3,4 @@ leak:hwloc__add_info # when MPI_Abort() is called, some MPI implementations do not free # all that has been allocated during the initialization (but in general, there is no leak in PetscInitialize_Common) leak:PetscInitialize_Common +leak:libpython diff --git a/src/benchmarks/run_petsc_benchmarks.sh b/src/benchmarks/run_petsc_benchmarks.sh index a4802b5aa4c..c089a58a4b0 100644 --- a/src/benchmarks/run_petsc_benchmarks.sh +++ b/src/benchmarks/run_petsc_benchmarks.sh @@ -72,7 +72,7 @@ fi # taken as the final result. run_spmv_benchmarks() { [ "${DRY_RUN}" == "true" ] && return - if [ "${EXECUTOR}" == "cuda" ]; then + if [ "${EXECUTOR}" == "cuda" ] || [ "${EXECUTOR}" == "hip" ]; then ${LAUNCHER} ../mat/tests/bench_spmv -formats "${FORMATS}" -repetitions 5 -use_gpu -AJSON "$1" else ${LAUNCHER} ../mat/tests/bench_spmv -formats "${FORMATS}" -repetitions 5 -AJSON "$1" @@ -174,8 +174,8 @@ for (( p=${LOOP_START}; p < ${LOOP_END}; ++p )); do generate_suite_sparse_input "$i" >"${RESULT_FILE}" echo -e "${PREFIX}Running SpMV for ${GROUP}/${NAME}" 1>&2 run_spmv_benchmarks "${RESULT_FILE}" - echo -e "${PREFIX}Cleaning up problem ${GROUP}/${NAME}" 1>&2 - [ "${DRY_RUN}" != "true" ] && ${SSGET} -i "$i" -c >/dev/null + # echo -e "${PREFIX}Cleaning up problem ${GROUP}/${NAME}" 1>&2 + # [ "${DRY_RUN}" != "true" ] && ${SSGET} -i "$i" -c >/dev/null else append_suite_sparse_input "$i" >>"${RESULT_FILE}" fi diff --git a/src/binding/petsc4py/.cython-lint.toml b/src/binding/petsc4py/.cython-lint.toml new file mode 100644 index 00000000000..ec5e6791ac7 --- /dev/null +++ b/src/binding/petsc4py/.cython-lint.toml @@ -0,0 +1,9 @@ +[tool.cython-lint] +ignore = [ + "E125", # continuation line with same indent as next logical line + "E221", # multiple spaces before operator + "E261", # at least two spaces before inline comment + "E501", # line too long + "E701", # multiple statements on one line (colon) + "E702", # multiple statements on one line (semicolon) +] diff --git a/src/binding/petsc4py/.gitignore b/src/binding/petsc4py/.gitignore index 963a15ed74f..56f24181a57 100644 --- a/src/binding/petsc4py/.gitignore +++ b/src/binding/petsc4py/.gitignore @@ -4,6 +4,7 @@ dist MANIFEST .tox **/__pycache__ +*env/ docs/*.html docs/*.info diff --git a/src/binding/petsc4py/.ruff.toml b/src/binding/petsc4py/.ruff.toml new file mode 100644 index 00000000000..d70464286b3 --- /dev/null +++ b/src/binding/petsc4py/.ruff.toml @@ -0,0 +1,41 @@ +include = ["*.py", "src/*.py"] +exclude = ["demo/legacy", "*env"] + +[lint] +select = [ + "A", + "B", + "C", + # "D", + "E", + "F", + "G", + # "I", + "S", + "W", + "UP", + # "ARG", + # "ISC", + "PIE", + # "PTH", + "PYI", + "RET", + # "RUF", + # "TRY", + "YTT", +] + +ignore = [ + "E501", # Line too long + "E731", # Do not assign a `lambda` expression, use a `def` + "G004", # Logging statement uses f-string + "S110", # `try`-`except`-`pass` detected, consider logging the exception + "UP015", # [*] Unnecessary open mode parameters + "C901", # `function` is too complex +] + +[format] +quote-style = "single" + +[lint.per-file-ignores] +"demo/*" = ["E402", "PIE790",] diff --git a/src/binding/petsc4py/CHANGES.rst b/src/binding/petsc4py/CHANGES.rst index b62e58e547e..166ca80b476 100644 --- a/src/binding/petsc4py/CHANGES.rst +++ b/src/binding/petsc4py/CHANGES.rst @@ -6,6 +6,11 @@ CHANGES: PETSc for Python :Contact: dalcinl@gmail.com +Release 3.21.0 +============== + +- Update to PETSc 3.21 release. + Release 3.20.0 ============== diff --git a/src/binding/petsc4py/DESCRIPTION.rst b/src/binding/petsc4py/DESCRIPTION.rst index 5cfdb1ba24b..0700e184e36 100644 --- a/src/binding/petsc4py/DESCRIPTION.rst +++ b/src/binding/petsc4py/DESCRIPTION.rst @@ -40,6 +40,6 @@ project. D. May, L. Curfman McInnes, R. Mills, L. Mitchell, T. Munson, J. Roman, K. Rupp, P. Sanan, J Sarich, B. Smith, S. Zampini, H. Zhang, and H. Zhang, J. Zhang, - *PETSc/TAO Users Manual*, ANL-21/39 - Revision 3.20, 2023. + *PETSc/TAO Users Manual*, ANL-21/39 - Revision 3.21, 2024. http://dx.doi.org/10.2172/2205494, https://petsc.org/release/docs/manual/manual.pdf diff --git a/src/binding/petsc4py/conf/confpetsc.py b/src/binding/petsc4py/conf/confpetsc.py index c18d4fd991c..28d98943acc 100644 --- a/src/binding/petsc4py/conf/confpetsc.py +++ b/src/binding/petsc4py/conf/confpetsc.py @@ -6,6 +6,13 @@ import glob import copy import warnings +from distutils import log +from distutils import sysconfig +from distutils.util import execute +from distutils.util import split_quoted +from distutils.errors import DistutilsError +from distutils.text_file import TextFile + try: from cStringIO import StringIO @@ -26,38 +33,37 @@ from distutils.core import Extension as _Extension from distutils.core import Command + def import_command(cmd): try: from importlib import import_module except ImportError: + def import_module(n): return __import__(n, fromlist=[None]) + try: - if not setuptools: raise ImportError + if not setuptools: + raise ImportError mod = import_module('setuptools.command.' + cmd) return getattr(mod, cmd) except ImportError: mod = import_module('distutils.command.' + cmd) return getattr(mod, cmd) -_config = import_command('config') -_build = import_command('build') -_build_ext = import_command('build_ext') -_install = import_command('install') -from distutils import log -from distutils import sysconfig -from distutils.util import execute -from distutils.util import split_quoted -from distutils.errors import DistutilsError +_config = import_command('config') +_build = import_command('build') +_build_ext = import_command('build_ext') +_install = import_command('install') try: from setuptools import modified except ImportError: - try: - from setuptools import dep_util as modified - except ImportError: - from distutils import dep_util as modified + try: + from setuptools import dep_util as modified + except ImportError: + from distutils import dep_util as modified try: from packaging.version import Version @@ -73,53 +79,62 @@ def import_module(n): CYTHON = '3.0.0' + def cython_req(): return CYTHON + def cython_chk(VERSION, verbose=True): # def warn(message): - if not verbose: return - ruler, ws, nl = "*"*80, " " ,"\n" + if not verbose: + return + ruler, ws, nl = '*' * 80, ' ', '\n' pyexe = sys.executable - advise = "$ %s -m pip install --upgrade cython" % pyexe - def printer(*s): sys.stderr.write(" ".join(s)+"\n") + advise = '$ %s -m pip install --upgrade cython' % pyexe + + def printer(*s): + sys.stderr.write(' '.join(s) + '\n') + printer(ruler, nl) printer(ws, message, nl) printer(ws, ws, advise, nl) printer(ruler) + # try: import Cython except ImportError: - warn("You need Cython to generate C source files.") + warn('You need Cython to generate C source files.') return False # CYTHON_VERSION = Cython.__version__ - m = re.match(r"(\d+\.\d+(?:\.\d+)?).*", CYTHON_VERSION) + m = re.match(r'(\d+\.\d+(?:\.\d+)?).*', CYTHON_VERSION) if not m: - warn("Cannot parse Cython version string {0!r}" - .format(CYTHON_VERSION)) + warn(f'Cannot parse Cython version string {CYTHON_VERSION!r}') return False REQUIRED = Version(VERSION) PROVIDED = Version(m.groups()[0]) if PROVIDED < REQUIRED: - warn("You need Cython >= {0} (you have version {1})" - .format(VERSION, CYTHON_VERSION)) + warn(f'You need Cython >= {VERSION} (you have version {CYTHON_VERSION})') return False # if verbose: - log.info("using Cython %s" % CYTHON_VERSION) + log.info('using Cython %s' % CYTHON_VERSION) return True + def cython_run( - source, target=None, - depends=(), includes=(), - workdir=None, force=False, - VERSION="0.0", + source, + target=None, + depends=(), + includes=(), + workdir=None, + force=False, + VERSION='0.0', ): if target is None: - target = os.path.splitext(source)[0]+'.c' + target = os.path.splitext(source)[0] + '.c' cwd = os.getcwd() try: if workdir: @@ -128,8 +143,7 @@ def cython_run( for dep in depends: alldeps += glob.glob(dep) if not (force or modified.newer_group(alldeps, target)): - log.debug("skipping '%s' -> '%s' (up-to-date)", - source, target) + log.debug("skipping '%s' -> '%s' (up-to-date)", source, target) return finally: os.chdir(cwd) @@ -137,7 +151,7 @@ def cython_run( if setuptools and not cython_chk(VERSION, verbose=False): if sys.modules.get('Cython'): removed = getattr(sys.modules['Cython'], '__version__', '') - log.info("removing Cython %s from sys.modules" % removed) + log.info('removing Cython %s from sys.modules' % removed) pkgname = re.compile(r'cython(\.|$)', re.IGNORECASE) for modname in list(sys.modules.keys()): if pkgname.match(modname): @@ -149,7 +163,7 @@ def cython_run( category = setuptools.SetuptoolsDeprecationWarning warnings.simplefilter('ignore', category) log.info("fetching build requirement '%s'" % require) - install_setup_requires(dict(setup_requires=[require])) + install_setup_requires({'setup_requires': [require]}) except Exception: log.info("failed to fetch build requirement '%s'" % require) if not cython_chk(VERSION): @@ -157,6 +171,7 @@ def cython_run( # log.info("cythonizing '%s' -> '%s'", source, target) from cythonize import cythonize + args = [] if workdir: args += ['--working', workdir] @@ -165,13 +180,12 @@ def cython_run( args += ['--output-file', target] err = cythonize(args) if err: - raise DistutilsError( - "Cython failure: '%s' -> '%s'" % (source, target) - ) + raise DistutilsError(f"Cython failure: '{source}' -> '{target}'") # -------------------------------------------------------------------- + def fix_config_vars(names, values): values = list(values) if 'CONDA_BUILD' in os.environ: @@ -193,43 +207,32 @@ def fix_config_vars(names, values): values[i] = flag return values + def get_config_vars(*names): # Core Python configuration values = sysconfig.get_config_vars(*names) # Do any distutils flags fixup right now - values = fix_config_vars(names, values) - return values + return fix_config_vars(names, values) -from distutils.unixccompiler import UnixCCompiler -rpath_option_orig = UnixCCompiler.runtime_library_dir_option -def rpath_option(compiler, dir): - option = rpath_option_orig(compiler, dir) - if sys.platform[:5] == 'linux': - if option.startswith('-R'): - option = option.replace('-R', '-Wl,-rpath,', 1) - elif option.startswith('-Wl,-R'): - option = option.replace('-Wl,-R', '-Wl,-rpath,', 1) - return option -UnixCCompiler.runtime_library_dir_option = rpath_option # -------------------------------------------------------------------- -class PetscConfig: +class PetscConfig: def __init__(self, petsc_dir, petsc_arch, dest_dir=None): if dest_dir is None: dest_dir = os.environ.get('DESTDIR') - self.configdict = { } + self.configdict = {} if not petsc_dir: - raise DistutilsError("PETSc not found") + raise DistutilsError('PETSc not found') if not os.path.isdir(petsc_dir): - raise DistutilsError("invalid PETSC_DIR: %s" % petsc_dir) - self.version = self._get_petsc_version(petsc_dir) + raise DistutilsError('invalid PETSC_DIR: %s' % petsc_dir) + self.version = self._get_petsc_version(petsc_dir) self.configdict = self._get_petsc_config(petsc_dir, petsc_arch) - self.PETSC_DIR = self['PETSC_DIR'] + self.PETSC_DIR = self['PETSC_DIR'] self.PETSC_ARCH = self['PETSC_ARCH'] self.DESTDIR = dest_dir - language_map = {'CONLY':'c', 'CXXONLY':'c++'} + language_map = {'CONLY': 'c', 'CXXONLY': 'c++'} self.language = language_map[self['PETSC_LANGUAGE']] def __getitem__(self, item): @@ -245,23 +248,26 @@ def configure(self, extension, compiler=None): def _get_petsc_version(self, petsc_dir): import re + version_re = { - 'major' : re.compile(r"#define\s+PETSC_VERSION_MAJOR\s+(\d+)"), - 'minor' : re.compile(r"#define\s+PETSC_VERSION_MINOR\s+(\d+)"), - 'micro' : re.compile(r"#define\s+PETSC_VERSION_SUBMINOR\s+(\d+)"), - 'release': re.compile(r"#define\s+PETSC_VERSION_RELEASE\s+(-*\d+)"), - } + 'major': re.compile(r'#define\s+PETSC_VERSION_MAJOR\s+(\d+)'), + 'minor': re.compile(r'#define\s+PETSC_VERSION_MINOR\s+(\d+)'), + 'micro': re.compile(r'#define\s+PETSC_VERSION_SUBMINOR\s+(\d+)'), + 'release': re.compile(r'#define\s+PETSC_VERSION_RELEASE\s+(-*\d+)'), + } petscversion_h = os.path.join(petsc_dir, 'include', 'petscversion.h') - with open(petscversion_h, 'rt') as f: data = f.read() + with open(petscversion_h, 'rt') as f: + data = f.read() major = int(version_re['major'].search(data).groups()[0]) minor = int(version_re['minor'].search(data).groups()[0]) micro = int(version_re['micro'].search(data).groups()[0]) release = int(version_re['release'].search(data).groups()[0]) - return (major, minor, micro), (release == 1) + return (major, minor, micro), (release == 1) def _get_petsc_config(self, petsc_dir, petsc_arch): from os.path import join, isdir, exists - PETSC_DIR = petsc_dir + + PETSC_DIR = petsc_dir PETSC_ARCH = petsc_arch # confdir = join('lib', 'petsc', 'conf') @@ -273,7 +279,7 @@ def _get_petsc_config(self, petsc_dir, petsc_arch): # variables = join(PETSC_DIR, confdir, 'variables') if not exists(variables): - variables = join(PETSC_DIR, PETSC_ARCH, confdir, 'variables') + variables = join(PETSC_DIR, PETSC_ARCH, confdir, 'variables') petscvariables = join(PETSC_DIR, PETSC_ARCH, confdir, 'petscvariables') # with open(variables) as f: @@ -281,11 +287,10 @@ def _get_petsc_config(self, petsc_dir, petsc_arch): with open(petscvariables) as f: contents += f.read() # - confstr = 'PETSC_DIR = %s\n' % PETSC_DIR + confstr = 'PETSC_DIR = %s\n' % PETSC_DIR confstr += 'PETSC_ARCH = %s\n' % PETSC_ARCH confstr += contents - confdict = makefile(StringIO(confstr)) - return confdict + return makefile(StringIO(confstr)) def _configure_ext(self, ext, dct, append=False): extdict = ext.__dict__ @@ -304,16 +309,22 @@ def configure_extension(self, extension): # we might be building against PETSc in staging location (DESTDIR) when # DESTDIR is set, so append DESTDIR (if nonempty) to those paths petsc_inc = flaglist(prepend_to_flags(self.DESTDIR, self['PETSC_CC_INCLUDES'])) - lib_flags = prepend_to_flags(self.DESTDIR, '-L%s %s' % \ - (self['PETSC_LIB_DIR'], self['PETSC_LIB_BASIC'])) + lib_flags = prepend_to_flags( + self.DESTDIR, + '-L{} {}'.format(self['PETSC_LIB_DIR'], self['PETSC_LIB_BASIC']), + ) petsc_lib = flaglist(lib_flags) # runtime_library_dirs is not supported on Windows if sys.platform != 'win32': # if DESTDIR is set, then we're building against PETSc in a staging # directory, but rpath needs to point to final install directory. - rpath = strip_prefix(self.DESTDIR, self['PETSC_LIB_DIR']) - petsc_lib['runtime_library_dirs'].append(rpath) - + rpath = [strip_prefix(self.DESTDIR, self['PETSC_LIB_DIR'])] + if sys.modules.get('petsc') is not None: + if sys.platform == 'darwin': + rpath = ['@loader_path/../../petsc/lib'] + else: + rpath = ['$ORIGIN/../../petsc/lib'] + petsc_lib['runtime_library_dirs'].extend(rpath) # Link in extra libraries on static builds if self['BUILDSHAREDLIB'] != 'yes': petsc_ext_lib = split_quoted(self['PETSC_EXTERNAL_LIB_BASIC']) @@ -322,26 +333,31 @@ def configure_extension(self, extension): self._configure_ext(extension, petsc_lib) def configure_compiler(self, compiler): - if compiler.compiler_type != 'unix': return + if compiler.compiler_type != 'unix': + return getenv = os.environ.get # distutils C/C++ compiler - (cc, cflags, ccshared, cxx) = get_config_vars( - 'CC', 'CFLAGS', 'CCSHARED', 'CXX') + (cc, cflags, ccshared, cxx) = get_config_vars('CC', 'CFLAGS', 'CCSHARED', 'CXX') ccshared = getenv('CCSHARED', ccshared or '') cflags = getenv('CFLAGS', cflags or '') cflags = cflags.replace('-Wstrict-prototypes', '') # distutils linker - (ldflags, ldshared, so_ext) = get_config_vars( - 'LDFLAGS', 'LDSHARED', 'SO') + (ldflags, ldshared, so_ext) = get_config_vars('LDFLAGS', 'LDSHARED', 'SO') ld = cc ldshared = getenv('LDSHARED', ldshared) ldflags = getenv('LDFLAGS', cflags + ' ' + (ldflags or '')) ldcmd = split_quoted(ld) + split_quoted(ldflags) - ldshared = [flg for flg in split_quoted(ldshared) if flg not in ldcmd and (flg.find('/lib/spack/env')<0)] + ldshared = [ + flg + for flg in split_quoted(ldshared) + if flg not in ldcmd and (flg.find('/lib/spack/env') < 0) + ] ldshared = str.join(' ', ldshared) + # def get_flags(cmd): - if not cmd: return '' + if not cmd: + return '' cmd = split_quoted(cmd) if os.path.basename(cmd[0]) == 'xcrun': del cmd[0] @@ -354,11 +370,12 @@ def get_flags(cmd): continue break return ' '.join(cmd[1:]) + # PETSc C compiler PCC = self['PCC'] PCC_FLAGS = get_flags(cc) + ' ' + self['PCC_FLAGS'] PCC_FLAGS = PCC_FLAGS.replace('-fvisibility=hidden', '') - PCC = getenv('PCC', PCC) + ' ' + getenv('PCCFLAGS', PCC_FLAGS) + PCC = getenv('PCC', PCC) + ' ' + getenv('PCCFLAGS', PCC_FLAGS) PCC_SHARED = str.join(' ', (PCC, ccshared, cflags)) # PETSc C++ compiler PCXX = PCC if self.language == 'c++' else self.get('CXX', cxx) @@ -370,36 +387,28 @@ def get_flags(cmd): PLD_SHARED = str.join(' ', (PLD, ldshared, ldflags)) # compiler.set_executables( - compiler = PCC, - compiler_cxx = PCXX, - linker_exe = PLD, - compiler_so = PCC_SHARED, - linker_so = PLD_SHARED, - ) + compiler=PCC, + compiler_cxx=PCXX, + linker_exe=PLD, + compiler_so=PCC_SHARED, + linker_so=PLD_SHARED, + ) compiler.shared_lib_extension = so_ext - # - if sys.platform == 'darwin': - for attr in ('preprocessor', - 'compiler', 'compiler_cxx', 'compiler_so', - 'linker_so', 'linker_exe'): - compiler_cmd = getattr(compiler, attr, []) - while '-mno-fused-madd' in compiler_cmd: - compiler_cmd.remove('-mno-fused-madd') def log_info(self): - PETSC_DIR = self['PETSC_DIR'] + PETSC_DIR = self['PETSC_DIR'] PETSC_ARCH = self['PETSC_ARCH'] - version = ".".join([str(i) for i in self.version[0]]) - release = ("development", "release")[self.version[1]] + version = '.'.join([str(i) for i in self.version[0]]) + release = ('development', 'release')[self.version[1]] version_info = version + ' ' + release integer_size = '%s-bit' % self['PETSC_INDEX_SIZE'] - scalar_type = self['PETSC_SCALAR'] - precision = self['PETSC_PRECISION'] - language = self['PETSC_LANGUAGE'] - compiler = self['PCC'] - linker = self['PCC_LINKER'] - log.info('PETSC_DIR: %s' % PETSC_DIR ) - log.info('PETSC_ARCH: %s' % PETSC_ARCH ) + scalar_type = self['PETSC_SCALAR'] + precision = self['PETSC_PRECISION'] + language = self['PETSC_LANGUAGE'] + compiler = self['PCC'] + linker = self['PCC_LINKER'] + log.info('PETSC_DIR: %s' % PETSC_DIR) + log.info('PETSC_ARCH: %s' % PETSC_ARCH) log.info('version: %s' % version_info) log.info('integer-size: %s' % integer_size) log.info('scalar-type: %s' % scalar_type) @@ -408,30 +417,30 @@ def log_info(self): log.info('compiler: %s' % compiler) log.info('linker: %s' % linker) + # -------------------------------------------------------------------- + class Extension(_Extension): pass + # -------------------------------------------------------------------- cmd_petsc_opts = [ - ('petsc-dir=', None, - "define PETSC_DIR, overriding environmental variables"), - ('petsc-arch=', None, - "define PETSC_ARCH, overriding environmental variables"), - ] + ('petsc-dir=', None, 'define PETSC_DIR, overriding environmental variables'), + ('petsc-arch=', None, 'define PETSC_ARCH, overriding environmental variables'), +] class config(_config): - Configure = PetscConfig user_options = _config.user_options + cmd_petsc_opts def initialize_options(self): _config.initialize_options(self) - self.petsc_dir = None + self.petsc_dir = None self.petsc_arch = None def get_config_arch(self, arch): @@ -440,22 +449,23 @@ def get_config_arch(self, arch): def run(self): _config.run(self) self.petsc_dir = config.get_petsc_dir(self.petsc_dir) - if self.petsc_dir is None: return + if self.petsc_dir is None: + return petsc_arch = config.get_petsc_arch(self.petsc_dir, self.petsc_arch) log.info('-' * 70) log.info('PETSC_DIR: %s' % self.petsc_dir) arch_list = petsc_arch - if not arch_list : - arch_list = [ None ] + if not arch_list: + arch_list = [None] for arch in arch_list: conf = self.get_config_arch(arch) - archname = conf.PETSC_ARCH or conf['PETSC_ARCH'] + archname = conf.PETSC_ARCH or conf['PETSC_ARCH'] scalar_type = conf['PETSC_SCALAR'] - precision = conf['PETSC_PRECISION'] - language = conf['PETSC_LANGUAGE'] - compiler = conf['PCC'] - linker = conf['PCC_LINKER'] - log.info('-'*70) + precision = conf['PETSC_PRECISION'] + language = conf['PETSC_LANGUAGE'] + compiler = conf['PCC'] + linker = conf['PCC_LINKER'] + log.info('-' * 70) log.info('PETSC_ARCH: %s' % archname) log.info(' * scalar-type: %s' % scalar_type) log.info(' * precision: %s' % precision) @@ -464,35 +474,40 @@ def run(self): log.info(' * linker: %s' % linker) log.info('-' * 70) - #@staticmethod + # @staticmethod def get_petsc_dir(petsc_dir): - if not petsc_dir: return None + if not petsc_dir: + return None petsc_dir = os.path.expandvars(petsc_dir) if not petsc_dir or '$PETSC_DIR' in petsc_dir: try: import petsc + petsc_dir = petsc.get_petsc_dir() except ImportError: - log.warn("PETSC_DIR not specified") + log.warn('PETSC_DIR not specified') return None petsc_dir = os.path.expanduser(petsc_dir) petsc_dir = os.path.abspath(petsc_dir) return config.chk_petsc_dir(petsc_dir) + get_petsc_dir = staticmethod(get_petsc_dir) - #@staticmethod + # @staticmethod def chk_petsc_dir(petsc_dir): if not os.path.isdir(petsc_dir): log.error('invalid PETSC_DIR: %s (ignored)' % petsc_dir) return None return petsc_dir + chk_petsc_dir = staticmethod(chk_petsc_dir) - #@staticmethod + # @staticmethod def get_petsc_arch(petsc_dir, petsc_arch): - if not petsc_dir: return None + if not petsc_dir: + return None petsc_arch = os.path.expandvars(petsc_arch) - if (not petsc_arch or '$PETSC_ARCH' in petsc_arch): + if not petsc_arch or '$PETSC_ARCH' in petsc_arch: petsc_arch = '' petsc_conf = os.path.join(petsc_dir, 'lib', 'petsc', 'conf') if os.path.isdir(petsc_conf): @@ -504,9 +519,10 @@ def get_petsc_arch(petsc_dir, petsc_arch): petsc_arch = unique(petsc_arch) petsc_arch = [arch for arch in petsc_arch if arch] return config.chk_petsc_arch(petsc_dir, petsc_arch) + get_petsc_arch = staticmethod(get_petsc_arch) - #@staticmethod + # @staticmethod def chk_petsc_arch(petsc_dir, petsc_arch): valid_archs = [] for arch in petsc_arch: @@ -514,20 +530,22 @@ def chk_petsc_arch(petsc_dir, petsc_arch): if os.path.isdir(arch_path): valid_archs.append(arch) else: - log.warn("invalid PETSC_ARCH: %s (ignored)" % arch) + log.warn('invalid PETSC_ARCH: %s (ignored)' % arch) return valid_archs + chk_petsc_arch = staticmethod(chk_petsc_arch) class build(_build): - user_options = _build.user_options - user_options += [( - 'inplace', - 'i', - "ignore build-lib and put compiled extensions into the source " - "directory alongside your pure Python modules", - )] + user_options += [ + ( + 'inplace', + 'i', + 'ignore build-lib and put compiled extensions into the source ' + 'directory alongside your pure Python modules', + ) + ] user_options += cmd_petsc_opts boolean_options = _build.boolean_options @@ -536,32 +554,28 @@ class build(_build): def initialize_options(self): _build.initialize_options(self) self.inplace = None - self.petsc_dir = None + self.petsc_dir = None self.petsc_arch = None def finalize_options(self): _build.finalize_options(self) if self.inplace is None: self.inplace = False - self.set_undefined_options('config', - ('petsc_dir', 'petsc_dir'), - ('petsc_arch', 'petsc_arch')) - self.petsc_dir = config.get_petsc_dir(self.petsc_dir) - self.petsc_arch = config.get_petsc_arch(self.petsc_dir, - self.petsc_arch) + self.set_undefined_options( + 'config', ('petsc_dir', 'petsc_dir'), ('petsc_arch', 'petsc_arch') + ) + self.petsc_dir = config.get_petsc_dir(self.petsc_dir) + self.petsc_arch = config.get_petsc_arch(self.petsc_dir, self.petsc_arch) - sub_commands = \ - [('build_src', lambda *args: True)] + \ - _build.sub_commands + sub_commands = [('build_src', lambda *args: True)] + _build.sub_commands class build_src(Command): - description = "build C sources from Cython files" + description = 'build C sources from Cython files' user_options = [ - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ] + ('force', 'f', 'forcibly build everything (ignore file timestamps)'), + ] boolean_options = ['force'] @@ -569,57 +583,33 @@ def initialize_options(self): self.force = False def finalize_options(self): - self.set_undefined_options('build', - ('force', 'force'), - ) + self.set_undefined_options( + 'build', + ('force', 'force'), + ) def run(self): sources = getattr(self, 'sources', []) for source in sources: - cython_run( - force=self.force, - VERSION=cython_req(), - **source - ) + cython_run(force=self.force, VERSION=cython_req(), **source) class build_ext(_build_ext): - user_options = _build_ext.user_options + cmd_petsc_opts def initialize_options(self): _build_ext.initialize_options(self) self.inplace = None - self.petsc_dir = None + self.petsc_dir = None self.petsc_arch = None self._outputs = [] def finalize_options(self): _build_ext.finalize_options(self) self.set_undefined_options('build', ('inplace', 'inplace')) - self.set_undefined_options('build', - ('petsc_dir', 'petsc_dir'), - ('petsc_arch', 'petsc_arch')) - if ((sys.platform.startswith('linux') or - sys.platform.startswith('gnu') or - sys.platform.startswith('sunos')) and - sysconfig.get_config_var('Py_ENABLE_SHARED')): - py_version = sysconfig.get_python_version() - bad_pylib_dir = os.path.join(sys.prefix, "lib", - "python" + py_version, - "config") - try: - self.library_dirs.remove(bad_pylib_dir) - except ValueError: - pass - pylib_dir = sysconfig.get_config_var("LIBDIR") - if pylib_dir not in self.library_dirs: - self.library_dirs.append(pylib_dir) - if pylib_dir not in self.rpath: - self.rpath.append(pylib_dir) - if sys.exec_prefix == '/usr': - self.library_dirs.remove(pylib_dir) - self.rpath.remove(pylib_dir) + self.set_undefined_options( + 'build', ('petsc_dir', 'petsc_dir'), ('petsc_arch', 'petsc_arch') + ) def _copy_ext(self, ext): extclass = ext.__class__ @@ -635,14 +625,14 @@ def _copy_ext(self, ext): def _build_ext_arch(self, ext, pkgpath, arch): build_temp = self.build_temp - build_lib = self.build_lib + build_lib = self.build_lib try: self.build_temp = os.path.join(build_temp, arch) - self.build_lib = os.path.join(build_lib, pkgpath, arch) + self.build_lib = os.path.join(build_lib, pkgpath, arch) _build_ext.build_extension(self, ext) finally: self.build_temp = build_temp - self.build_lib = build_lib + self.build_lib = build_lib def get_config_arch(self, arch): return config.Configure(self.petsc_dir, arch) @@ -652,7 +642,7 @@ def build_extension(self, ext): return _build_ext.build_extension(self, ext) petsc_arch = self.petsc_arch if not petsc_arch: - petsc_arch = [ None ] + petsc_arch = [None] for arch in petsc_arch: config = self.get_config_arch(arch) ARCH = arch or config['PETSC_ARCH'] @@ -664,6 +654,7 @@ def build_extension(self, ext): pkgpath, newext = self._copy_ext(ext) config.configure(newext, self.compiler) self._build_ext_arch(newext, pkgpath, ARCH) + return None def run(self): self.build_sources() @@ -675,8 +666,9 @@ def build_sources(self): def build_extensions(self, *args, **kargs): self.PETSC_ARCH_LIST = [] - _build_ext.build_extensions(self, *args,**kargs) - if not self.PETSC_ARCH_LIST: return + _build_ext.build_extensions(self, *args, **kargs) + if not self.PETSC_ARCH_LIST: + return self.build_configuration(self.PETSC_ARCH_LIST) def build_configuration(self, arch_list): @@ -684,27 +676,39 @@ def build_configuration(self, arch_list): template, variables = self.get_config_data(arch_list) config_data = template % variables # - build_lib = self.build_lib - dist_name = self.distribution.get_name() - config_file = os.path.join(build_lib, dist_name, 'lib', - dist_name.replace('4py', '') + '.cfg') + build_lib = self.build_lib + dist_name = self.distribution.get_name() + config_file = os.path.join( + build_lib, dist_name, 'lib', dist_name.replace('4py', '') + '.cfg' + ) + # def write_file(filename, data): with open(filename, 'w') as fh: fh.write(config_data) - execute(write_file, (config_file, config_data), - msg='writing %s' % config_file, - verbose=self.verbose, dry_run=self.dry_run) + + execute( + write_file, + (config_file, config_data), + msg='writing %s' % config_file, + verbose=self.verbose, + dry_run=self.dry_run, + ) def get_config_data(self, arch_list): DESTDIR = self.DESTDIR - template = "\n".join([ - "PETSC_DIR = %(PETSC_DIR)s", - "PETSC_ARCH = %(PETSC_ARCH)s", - ]) + "\n" + template = ( + '\n'.join( + [ + 'PETSC_DIR = %(PETSC_DIR)s', + 'PETSC_ARCH = %(PETSC_ARCH)s', + ] + ) + + '\n' + ) variables = { - 'PETSC_DIR' : strip_prefix(DESTDIR, self.petsc_dir), - 'PETSC_ARCH' : os.path.pathsep.join(arch_list), + 'PETSC_DIR': strip_prefix(DESTDIR, self.petsc_dir), + 'PETSC_ARCH': os.path.pathsep.join(arch_list), } return template, variables @@ -745,12 +749,10 @@ def get_outputs(self): else: outfile = os.path.join(self.build_lib, filename) outputs.append(outfile) - outputs = list(set(outputs)) - return outputs + return list(set(outputs)) class install(_install): - def initialize_options(self): with warnings.catch_warnings(): if setuptools: @@ -771,6 +773,7 @@ def initialize_options(self): # -------------------------------------------------------------------- + def setup(**attrs): cmdclass = attrs.setdefault('cmdclass', {}) for cmd in cmdclass_list: @@ -781,15 +784,18 @@ def setup(**attrs): version = cython_req() if not cython_chk(version, verbose=False): reqs = attrs.setdefault('setup_requires', []) - reqs += ['Cython=='+version] + reqs += ['Cython>=' + version] return _setup(**attrs) + # -------------------------------------------------------------------- if setuptools: try: from setuptools.command import egg_info as mod_egg_info + _FileList = mod_egg_info.FileList + class FileList(_FileList): def process_template_line(self, line): level = log.set_threshold(log.ERROR) @@ -797,22 +803,27 @@ def process_template_line(self, line): _FileList.process_template_line(self, line) finally: log.set_threshold(level) + mod_egg_info.FileList = FileList except (ImportError, AttributeError): pass # -------------------------------------------------------------------- + def append(seq, item): if item not in seq: seq.append(item) + def append_dict(conf, dct): for key, values in dct.items(): if key in conf: for value in values: if value not in conf[key]: conf[key].append(value) + + def unique(seq): res = [] for item in seq: @@ -820,22 +831,20 @@ def unique(seq): res.append(item) return res -def flaglist(flags): +def flaglist(flags): conf = { - 'define_macros' : [], - 'undef_macros' : [], - 'include_dirs' : [], - - 'libraries' : [], - 'library_dirs' : [], + 'define_macros': [], + 'undef_macros': [], + 'include_dirs': [], + 'libraries': [], + 'library_dirs': [], 'runtime_library_dirs': [], + 'extra_compile_args': [], + 'extra_link_args': [], + } - 'extra_compile_args' : [], - 'extra_link_args' : [], - } - - if type(flags) is str: + if isinstance(flags, str): flags = flags.split() switch = '-Wl,' @@ -854,7 +863,6 @@ def flaglist(flags): append_next_word = None for word in flags: - if append_next_word is not None: append(append_next_word, word) append_next_word = None @@ -862,39 +870,41 @@ def flaglist(flags): switch, value = word[0:2], word[2:] - if switch == "-I": + if switch == '-I': append(conf['include_dirs'], value) - elif switch == "-D": + elif switch == '-D': try: - idx = value.index("=") - macro = (value[:idx], value[idx+1:]) + idx = value.index('=') + macro = (value[:idx], value[idx + 1 :]) except ValueError: macro = (value, None) append(conf['define_macros'], macro) - elif switch == "-U": + elif switch == '-U': append(conf['undef_macros'], value) - elif switch == "-l": + elif switch == '-l': append(conf['libraries'], value) - elif switch == "-L": + elif switch == '-L': append(conf['library_dirs'], value) - elif switch == "-R": + elif switch == '-R': append(conf['runtime_library_dirs'], value) - elif word.startswith("-Wl"): + elif word.startswith('-Wl'): linkopts = word.split(',') append_dict(conf, flaglist(linkopts[1:])) - elif word == "-rpath": + elif word == '-rpath': append_next_word = conf['runtime_library_dirs'] - elif word == "-Xlinker": + elif word == '-Xlinker': append_next_word = conf['extra_link_args'] else: - #log.warn("unrecognized flag '%s'" % word) + # log.warn("unrecognized flag '%s'" % word) pass return conf + def prepend_to_flags(path, flags): """Prepend a path to compiler flags with absolute paths""" if not path: return flags + def append_path(m): switch = m.group(1) open_quote = m.group(4) @@ -904,23 +914,23 @@ def append_path(m): moded_path = os.path.normpath(path + os.path.sep + old_path) return switch + open_quote + moded_path + close_quote return m.group(0) - return re.sub(r'((^|\s+)(-I|-L))(\s*["\']?)(\S+)(["\']?)', - append_path, flags) + + return re.sub(r'((^|\s+)(-I|-L))(\s*["\']?)(\S+)(["\']?)', append_path, flags) + def strip_prefix(prefix, string): if not prefix: return string return re.sub(r'^' + prefix, '', string) -# -------------------------------------------------------------------- -from distutils.text_file import TextFile +# -------------------------------------------------------------------- # Regexes needed for parsing Makefile-like syntaxes -import re as _re -_variable_rx = _re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") -_findvar1_rx = _re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") -_findvar2_rx = _re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") +_variable_rx = re.compile(r'([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)') +_findvar1_rx = re.compile(r'\$\(([A-Za-z][A-Za-z0-9_]*)\)') +_findvar2_rx = re.compile(r'\${([A-Za-z][A-Za-z0-9_]*)}') + def makefile(fileobj, dct=None): """Parse a Makefile-style file. @@ -929,10 +939,7 @@ def makefile(fileobj, dct=None): optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ - fp = TextFile(file=fileobj, - strip_comments=1, - skip_blanks=1, - join_lines=1) + fp = TextFile(file=fileobj, strip_comments=1, skip_blanks=1, join_lines=1) if dct is None: dct = {} @@ -941,20 +948,24 @@ def makefile(fileobj, dct=None): while 1: line = fp.readline() - if line is None: # eof + if line is None: # eof break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = str.strip(v) - if "$" in v: + if '$' in v: notdone[n] = v else: - try: v = int(v) - except ValueError: pass + try: + v = int(v) + except ValueError: + pass done[n] = v - try: del notdone[n] - except KeyError: pass + try: + del notdone[n] + except KeyError: + pass fp.close() # do variable interpolation here @@ -971,14 +982,15 @@ def makefile(fileobj, dct=None): # get it on a subsequent round found = False else: - done[n] = item = "" + done[n] = item = '' if found: - after = value[m.end():] - value = value[:m.start()] + item + after - if "$" in after: + after = value[m.end() :] + value = value[: m.start()] + item + after + if '$' in after: notdone[name] = value else: - try: value = int(value) + try: + value = int(value) except ValueError: done[name] = str.strip(value) else: @@ -992,4 +1004,5 @@ def makefile(fileobj, dct=None): dct.update(done) return dct + # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/conf/cyautodoc.py b/src/binding/petsc4py/conf/cyautodoc.py index b36b46145b4..d7784c106aa 100644 --- a/src/binding/petsc4py/conf/cyautodoc.py +++ b/src/binding/petsc4py/conf/cyautodoc.py @@ -8,24 +8,24 @@ ExpressionWriter as BaseExpressionWriter, AnnotationWriter as BaseAnnotationWriter, ) +from Cython.Compiler.Errors import error class ExpressionWriter(BaseExpressionWriter): - def visit_IndexNode(self, node): self.visit(node.base) - self.put(u"[") + self.put('[') if isinstance(node.index, TupleNode): if node.index.subexpr_nodes(): self.emit_sequence(node.index) else: - self.put(u"()") + self.put('()') else: self.visit(node.index) - self.put(u"]") + self.put(']') def visit_UnicodeNode(self, node): - self.emit_string(node, "") + self.emit_string(node, '') class AnnotationWriter(ExpressionWriter, BaseAnnotationWriter): @@ -33,7 +33,6 @@ class AnnotationWriter(ExpressionWriter, BaseAnnotationWriter): class EmbedSignature(CythonTransform): - def __init__(self, context): super(EmbedSignature, self).__init__(context) self.class_name = None @@ -44,15 +43,11 @@ def _select_format(self, embed, clinic): def _fmt_expr(self, node): writer = ExpressionWriter() - result = writer.write(node) - # print(type(node).__name__, '-->', result) - return result + return writer.write(node) def _fmt_annotation(self, node): writer = AnnotationWriter() - result = writer.write(node) - # print(type(node).__name__, '-->', result) - return result + return writer.write(node) def _fmt_arg(self, arg): annotation = None @@ -88,10 +83,16 @@ def _fmt_star_arg(self, arg): arg_doc = arg_doc + (': %s' % annotation) return arg_doc - def _fmt_arglist(self, args, - npoargs=0, npargs=0, pargs=None, - nkargs=0, kargs=None, - hide_self=False): + def _fmt_arglist( + self, + args, + npoargs=0, + npargs=0, + pargs=None, + nkargs=0, + kargs=None, + hide_self=False, + ): arglist = [] for arg in args: if not hide_self or not arg.entry.is_self_arg: @@ -112,18 +113,26 @@ def _fmt_arglist(self, args, def _fmt_ret_type(self, ret): if ret is PyrexTypes.py_object_type: return None - else: - return ret.declaration_code("", for_display=1) - - def _fmt_signature(self, cls_name, func_name, args, - npoargs=0, npargs=0, pargs=None, - nkargs=0, kargs=None, - return_expr=None, - return_type=None, hide_self=False): - arglist = self._fmt_arglist(args, - npoargs, npargs, pargs, - nkargs, kargs, - hide_self=hide_self) + return ret.declaration_code('', for_display=1) + + def _fmt_signature( + self, + node, + cls_name, + func_name, + args, + npoargs=0, + npargs=0, + pargs=None, + nkargs=0, + kargs=None, + return_expr=None, + return_type=None, + hide_self=False, + ): + arglist = self._fmt_arglist( + args, npoargs, npargs, pargs, nkargs, kargs, hide_self=hide_self + ) arglist_doc = ', '.join(arglist) func_doc = '%s(%s)' % (func_name, arglist_doc) if cls_name: @@ -137,25 +146,31 @@ def _fmt_signature(self, cls_name, func_name, args, if ret_doc: docfmt = self._select_format('%s -> %s', '%s -> (%s)') func_doc = docfmt % (func_doc, ret_doc) + else: + if not func_doc.startswith('_') and not func_name.startswith('_'): + error( + node.pos, + f'cyautodoc._fmt_signature: missing return type for {func_doc}', + ) return func_doc def _fmt_relative_position(self, pos): - return 'Source code at ' + ':'.join((str(pos[0].get_filenametable_entry()), str(pos[1]))) + return 'Source code at ' + ':'.join( + (str(pos[0].get_filenametable_entry()), str(pos[1])) + ) def _embed_signature(self, signature, pos, node_doc): pos = self._fmt_relative_position(pos) if node_doc: - docfmt = self._select_format("%s\n%s\n%s", "%s\n--\n\n%s") + docfmt = self._select_format('%s\n%s\n%s', '%s\n--\n\n%s') return docfmt % (signature, node_doc, pos) - else: - docfmt = self._select_format("%s\n%s", "%s\n--\n\n%s") - return docfmt % (signature, pos) + docfmt = self._select_format('%s\n%s', '%s\n--\n\n%s') + return docfmt % (signature, pos) def __call__(self, node): if not Options.docstrings: return node - else: - return super(EmbedSignature, self).__call__(node) + return super(EmbedSignature, self).__call__(node) def visit_ClassDefNode(self, node): oldname = self.class_name @@ -180,41 +195,40 @@ def visit_DefNode(self, node): if not self.current_directives['embedsignature']: return node - is_constructor = False hide_self = False if node.entry.is_special: - is_constructor = self.class_node and node.name == '__init__' - if not is_constructor: - return node - class_name, func_name = None, self.class_name - hide_self = True - else: - class_name, func_name = self.class_name, node.name + return node + class_name, func_name = self.class_name, node.name npoargs = getattr(node, 'num_posonly_args', 0) nkargs = getattr(node, 'num_kwonly_args', 0) npargs = len(node.args) - nkargs - npoargs signature = self._fmt_signature( - class_name, func_name, node.args, - npoargs, npargs, node.star_arg, - nkargs, node.starstar_arg, + node, + class_name, + func_name, + node.args, + npoargs, + npargs, + node.star_arg, + nkargs, + node.starstar_arg, return_expr=node.return_type_annotation, - return_type=None, hide_self=hide_self) + return_type=None, + hide_self=hide_self, + ) if signature: - if is_constructor: - doc_holder = self.class_node.entry.type.scope - else: - doc_holder = node.entry + doc_holder = node.entry if doc_holder.doc is not None: old_doc = doc_holder.doc - elif not is_constructor and getattr(node, 'py_func', None) is not None: + elif getattr(node, 'py_func', None) is not None: old_doc = node.py_func.entry.doc else: old_doc = None new_doc = self._embed_signature(signature, node.pos, old_doc) doc_holder.doc = EncodedString(new_doc) - if not is_constructor and getattr(node, 'py_func', None) is not None: + if getattr(node, 'py_func', None) is not None: node.py_func.entry.doc = EncodedString(new_doc) return node @@ -225,9 +239,12 @@ def visit_CFuncDefNode(self, node): return node signature = self._fmt_signature( - self.class_name, node.declarator.base.name, + node, + self.class_name, + node.declarator.base.name, node.declarator.args, - return_type=node.return_type) + return_type=node.return_type, + ) if signature: if node.entry.doc is not None: old_doc = node.entry.doc @@ -252,7 +269,7 @@ def visit_PropertyNode(self, node): type_name = None if entry.visibility == 'public': # property synthesised from a cdef public attribute - type_name = entry.type.declaration_code("", for_display=1) + type_name = entry.type.declaration_code('', for_display=1) if not entry.type.is_pyobject: type_name = "'%s'" % type_name elif entry.type.is_extension_type: @@ -278,23 +295,28 @@ def visit_PropertyNode(self, node): # Monkeypatch AutoDocTransforms.EmbedSignature try: from Cython.Compiler import AutoDocTransforms + AutoDocTransforms.EmbedSignature = EmbedSignature except Exception as exc: import logging + logging.Logger(__name__).exception(exc) # Monkeypatch Nodes.raise_utility_code try: from Cython.Compiler.Nodes import raise_utility_code + code = raise_utility_code.impl try: - ipos = code.index("if (tb) {\n#if CYTHON_COMPILING_IN_PYPY\n") + ipos = code.index('if (tb) {\n#if CYTHON_COMPILING_IN_PYPY\n') except ValueError: ipos = None else: raise_utility_code.impl = code[:ipos] + code[ipos:].replace( - 'CYTHON_COMPILING_IN_PYPY', '!CYTHON_FAST_THREAD_STATE', 1) + 'CYTHON_COMPILING_IN_PYPY', '!CYTHON_FAST_THREAD_STATE', 1 + ) del raise_utility_code, code, ipos except Exception as exc: import logging + logging.Logger(__name__).exception(exc) diff --git a/src/binding/petsc4py/conf/cythonize.py b/src/binding/petsc4py/conf/cythonize.py index 33774f5c204..650f291b86f 100755 --- a/src/binding/petsc4py/conf/cythonize.py +++ b/src/binding/petsc4py/conf/cythonize.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 """Run Cython with custom options.""" + import os import sys @@ -48,5 +49,5 @@ def main(): sys.exit(cythonize(args)) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/src/binding/petsc4py/conf/epydocify.py b/src/binding/petsc4py/conf/epydocify.py index 88723b791b6..b84168d46dd 100755 --- a/src/binding/petsc4py/conf/epydocify.py +++ b/src/binding/petsc4py/conf/epydocify.py @@ -2,29 +2,33 @@ # -------------------------------------------------------------------- -from petsc4py import PETSc # -------------------------------------------------------------------- try: from docutils.nodes import NodeVisitor + NodeVisitor.unknown_visit = lambda self, node: None - NodeVisitor.unknown_departure = lambda self, node: None + NodeVisitor.unknown_departure = lambda self, node: None except ImportError: pass -try: # epydoc 3.0.1 + docutils 0.6 +try: # epydoc 3.0.1 + docutils 0.6 from docutils.nodes import Text from UserString import UserString + if not isinstance(Text, UserString): + def Text_get_data(s): try: return s._data except AttributeError: return s.astext() + def Text_set_data(s, d): s.astext = lambda: d s._data = d + Text.data = property(Text_get_data, Text_set_data) except ImportError: pass @@ -32,18 +36,19 @@ def Text_set_data(s, d): # -------------------------------------------------------------------- from epydoc.docwriter import dotgraph +from epydoc import docstringparser as dsp import re -dotgraph._DOT_VERSION_RE = \ - re.compile(r'dot (?:- Graphviz )version ([\d\.]+)') +import sys +import os +import epydoc.cli -try: +dotgraph._DOT_VERSION_RE = re.compile(r'dot (?:- Graphviz )version ([\d\.]+)') - dotgraph.DotGraph.DEFAULT_HTML_IMAGE_FORMAT +try: dotgraph.DotGraph.DEFAULT_HTML_IMAGE_FORMAT = 'png' except AttributeError: - DotGraph_to_html = dotgraph.DotGraph.to_html DotGraph_run_dot = dotgraph.DotGraph._run_dot @@ -51,14 +56,15 @@ def to_html(self, image_file, image_url, center=True): if image_file[-4:] == '.gif': image_file = image_file[:-4] + '.png' if image_url[-4:] == '.gif': - image_url = image_url[:-4] + '.png' + image_url = image_url[:-4] + '.png' return DotGraph_to_html(self, image_file, image_url) def _run_dot(self, *options): if '-Tgif' in options: opts = list(options) for i, o in enumerate(opts): - if o == '-Tgif': opts[i] = '-Tpng' + if o == '-Tgif': + opts[i] = '-Tpng' options = type(options)(opts) return DotGraph_run_dot(self, *options) @@ -67,27 +73,29 @@ def _run_dot(self, *options): # -------------------------------------------------------------------- -import re _SIGNATURE_RE = re.compile( # Class name (for builtin methods) - r'^\s*((?P\w+)\.)?' + + r'^\s*((?P\w+)\.)?' + + # The function name - r'(?P\w+)' + + r'(?P\w+)' + + # The parameters - r'\(((?P(?:self|cls|mcs)),?)?(?P.*)\)' + + r'\(((?P(?:self|cls|mcs)),?)?(?P.*)\)' + + # The return value (optional) - r'(\s*(->)\s*(?P\S.*?))?'+ + r'(\s*(->)\s*(?P\S.*?))?' + + # The end marker - r'\s*(\n|\s+(--|<=+>)\s+|$|\.\s+|\.\n)') + r'\s*(\n|\s+(--|<=+>)\s+|$|\.\s+|\.\n)' +) + -from epydoc import docstringparser as dsp dsp._SIGNATURE_RE = _SIGNATURE_RE # -------------------------------------------------------------------- -import sys, os -import epydoc.cli def epydocify(): dirname = os.path.dirname(__file__) @@ -95,6 +103,7 @@ def epydocify(): sys.argv.append('--config=' + config) epydoc.cli.cli() + if __name__ == '__main__': epydocify() diff --git a/src/binding/petsc4py/conf/requirements-docs.txt b/src/binding/petsc4py/conf/requirements-docs.txt new file mode 100644 index 00000000000..add1c487218 --- /dev/null +++ b/src/binding/petsc4py/conf/requirements-docs.txt @@ -0,0 +1,5 @@ +pydata-sphinx-theme==0.15.1 +sphinx>=7.0.0 +sphobjinv +typing_extensions;python_version<'3.11' +pylit diff --git a/src/binding/petsc4py/conf/requirements-lint.txt b/src/binding/petsc4py/conf/requirements-lint.txt new file mode 100644 index 00000000000..f3029a1bfca --- /dev/null +++ b/src/binding/petsc4py/conf/requirements-lint.txt @@ -0,0 +1,3 @@ +cython >= 3 +cython-lint >= 0.16.2 +ruff == 0.4.1 diff --git a/src/binding/petsc4py/conf/stubgen.py b/src/binding/petsc4py/conf/stubgen.py index 6a13433031b..4357855b94b 100644 --- a/src/binding/petsc4py/conf/stubgen.py +++ b/src/binding/petsc4py/conf/stubgen.py @@ -8,11 +8,7 @@ def is_cyfunction(obj): def is_function(obj): - return ( - inspect.isbuiltin(obj) - or is_cyfunction(obj) - or type(obj) is type(ord) - ) + return inspect.isbuiltin(obj) or is_cyfunction(obj) or type(obj) is type(ord) def is_method(obj): @@ -20,7 +16,8 @@ def is_method(obj): inspect.ismethoddescriptor(obj) or inspect.ismethod(obj) or is_cyfunction(obj) - or type(obj) in ( + or type(obj) + in ( type(str.index), type(str.__add__), type(str.__new__), @@ -29,21 +26,14 @@ def is_method(obj): def is_classmethod(obj): - return ( - inspect.isbuiltin(obj) - or type(obj).__name__ in ( - 'classmethod', - 'classmethod_descriptor', - ) + return inspect.isbuiltin(obj) or type(obj).__name__ in ( + 'classmethod', + 'classmethod_descriptor', ) def is_staticmethod(obj): - return ( - type(obj).__name__ in ( - 'staticmethod', - ) - ) + return type(obj).__name__ in ('staticmethod',) def is_constant(obj): @@ -63,8 +53,7 @@ def is_class(obj): class Lines(list): - - INDENT = " " * 4 + INDENT = ' ' * 4 level = 0 @property @@ -76,7 +65,7 @@ def add(self, lines): if lines is None: return if isinstance(lines, str): - lines = textwrap.dedent(lines).strip().split("\n") + lines = textwrap.dedent(lines).strip().split('\n') indent = self.INDENT * self.level for line in lines: self.append(indent + line) @@ -84,47 +73,49 @@ def add(self, lines): def signature(obj): doc = obj.__doc__ - doc = doc or f"{obj.__name__}: Any" # FIXME remove line + doc = doc or f'{obj.__name__}: Any' # FIXME remove line sig = doc.split('\n', 1)[0].split('.', 1)[-1] return sig or None + def visit_constant(constant): name, value = constant - return f"{name}: Final[{type(value).__name__}] = ..." + return f'{name}: Final[{type(value).__name__}] = ...' def visit_function(function): sig = signature(function) - return f"def {sig}: ..." + return f'def {sig}: ...' def visit_method(method): sig = signature(method) - return f"def {sig}: ..." + return f'def {sig}: ...' def visit_datadescr(datadescr): sig = signature(datadescr) - return f"{sig}" + return f'{sig}' def visit_property(prop, name=None): sig = signature(prop.fget) pname = name or prop.fget.__name__ ptype = sig.rsplit('->', 1)[-1].strip() - return f"{pname}: {ptype}" + return f'{pname}: {ptype}' def visit_constructor(cls, name='__init__', args=None): - init = (name == '__init__') + init = name == '__init__' argname = cls.__name__.lower() argtype = cls.__name__ - initarg = args or f"{argname}: Optional[{argtype}] = None" + initarg = args or f'{argname}: Optional[{argtype}] = None' selfarg = 'self' if init else 'cls' rettype = 'None' if init else argtype - arglist = f"{selfarg}, {initarg}" - sig = f"{name}({arglist}) -> {rettype}" - return f"def {sig}: ..." + arglist = f'{selfarg}, {initarg}' + sig = f'{name}({arglist}) -> {rettype}' + return f'def {sig}: ...' + def visit_class(cls, outer=None, done=None): skip = { @@ -134,22 +125,22 @@ def visit_class(cls, outer=None, done=None): '__weakref__', '__pyx_vtable__', '__enum2str', # FIXME refactor implementation - '_traceback_', # FIXME maybe refactor? + '_traceback_', # FIXME maybe refactor? '__lt__', '__le__', '__ge__', '__gt__', } special = { - '__len__': "__len__(self) -> int", - '__bool__': "__bool__(self) -> bool", - '__hash__': "__hash__(self) -> int", - '__int__': "__int__(self) -> int", - '__index__': "__int__(self) -> int", - '__str__': "__str__(self) -> str", - '__repr__': "__repr__(self) -> str", - '__eq__': "__eq__(self, other: object) -> bool", - '__ne__': "__ne__(self, other: object) -> bool", + '__len__': '__len__(self) -> int', + '__bool__': '__bool__(self) -> bool', + '__hash__': '__hash__(self) -> int', + '__int__': '__int__(self) -> int', + '__index__': '__int__(self) -> int', + '__str__': '__str__(self) -> str', + '__repr__': '__repr__(self) -> str', + '__eq__': '__eq__(self, other: object) -> bool', + '__ne__': '__ne__(self, other: object) -> bool', } constructor = ( '__new__', @@ -159,26 +150,28 @@ def visit_class(cls, outer=None, done=None): qualname = cls.__name__ cls_name = cls.__name__ if outer is not None and cls_name.startswith(outer): - cls_name = cls_name[len(outer):] - qualname = f"{outer}.{cls_name}" + cls_name = cls_name[len(outer) :] + qualname = f'{outer}.{cls_name}' override = OVERRIDE.get(qualname, {}) done = set() if done is None else done lines = Lines() try: + class sub(cls): pass + final = False except TypeError: final = True if final: - lines.add = "@final" + lines.add = '@final' base = cls.__base__ if base is object: - lines.add = f"class {cls_name}:" + lines.add = f'class {cls_name}:' else: - lines.add = f"class {cls_name}({base.__name__}):" + lines.add = f'class {cls_name}({base.__name__}):' lines.level += 1 start = len(lines) @@ -216,7 +209,6 @@ def members(seq): continue for name in members(keys): - if name in override: done.add(name) lines.add = override[name] @@ -225,7 +217,7 @@ def members(seq): if name in special: done.add(name) sig = special[name] - lines.add = f"def {sig}: ..." + lines.add = f'def {sig}: ...' continue attr = getattr(cls, name) @@ -235,12 +227,12 @@ def members(seq): if name == attr.__name__: obj = dct[name] if is_classmethod(obj): - lines.add = "@classmethod" + lines.add = '@classmethod' elif is_staticmethod(obj): - lines.add = "@staticmethod" + lines.add = '@staticmethod' lines.add = visit_method(attr) elif True: - lines.add = f"{name} = {attr.__name__}" + lines.add = f'{name} = {attr.__name__}' continue if is_datadescr(attr): @@ -258,13 +250,12 @@ def members(seq): lines.add = visit_constant((name, attr)) continue - leftovers = [name for name in keys if - name not in done and name not in skip] + leftovers = [name for name in keys if name not in done and name not in skip] if leftovers: - raise RuntimeError(f"leftovers: {leftovers}") + raise RuntimeError(f'leftovers: {leftovers}') if len(lines) == start: - lines.add = "pass" + lines.add = 'pass' lines.level -= 1 return lines @@ -285,14 +276,17 @@ def visit_module(module, done=None): lines = Lines() keys = list(module.__dict__.keys()) - keys.sort(key=lambda name: name.startswith("_")) + keys.sort(key=lambda name: name.startswith('_')) constants = [ - (name, getattr(module, name)) for name in keys - if all(( - name not in done and name not in skip, - isinstance(getattr(module, name), int), - )) + (name, getattr(module, name)) + for name in keys + if all( + ( + name not in done and name not in skip, + isinstance(getattr(module, name), int), + ) + ) ] for name, value in constants: done.add(name) @@ -301,7 +295,7 @@ def visit_module(module, done=None): else: lines.add = visit_constant((name, value)) if constants: - lines.add = "" + lines.add = '' for name in keys: if name in done or name in skip: @@ -313,19 +307,22 @@ def visit_module(module, done=None): if value.__module__ != module.__name__: continue lines.add = visit_class(value) - lines.add = "" + lines.add = '' instances = [ - (k, getattr(module, k)) for k in keys - if all(( - k not in done and k not in skip, - type(getattr(module, k)) is value, - )) + (k, getattr(module, k)) + for k in keys + if all( + ( + k not in done and k not in skip, + type(getattr(module, k)) is value, + ) + ) ] for attrname, attrvalue in instances: done.add(attrname) lines.add = visit_constant((attrname, attrvalue)) if instances: - lines.add = "" + lines.add = '' continue if is_function(value): @@ -333,10 +330,10 @@ def visit_module(module, done=None): if name == value.__name__: lines.add = visit_function(value) else: - lines.add = f"{name} = {value.__name__}" + lines.add = f'{name} = {value.__name__}' continue - lines.add = "" + lines.add = '' for name in keys: if name in done or name in skip: continue @@ -347,10 +344,9 @@ def visit_module(module, done=None): else: lines.add = visit_constant((name, value)) - leftovers = [name for name in keys if - name not in done and name not in skip] + leftovers = [name for name in keys if name not in done and name not in skip] if leftovers: - raise RuntimeError(f"leftovers: {leftovers}") + raise RuntimeError(f'leftovers: {leftovers}') return lines @@ -410,10 +406,9 @@ def visit_module(module, done=None): """ OVERRIDE = { - 'Error': { - }, - '__pyx_capi__': "__pyx_capi__: Final[Dict[str, Any]] = ...", - '__type_registry__': "__type_registry__: Final[Dict[int, type[Object]]] = ...", + 'Error': {}, + '__pyx_capi__': '__pyx_capi__: Final[Dict[str, Any]] = ...', + '__type_registry__': '__type_registry__: Final[Dict[int, type[Object]]] = ...', } TYPING = """ @@ -422,9 +417,10 @@ def visit_module(module, done=None): def visit_petsc4py_PETSc(done=None): from petsc4py import PETSc as module + lines = Lines() lines.add = IMPORTS - lines.add = "" + lines.add = '' lines.add = visit_module(module) lines.add = TYPING return lines diff --git a/src/binding/petsc4py/demo/legacy/poisson2d/poisson2d.py b/src/binding/petsc4py/demo/legacy/poisson2d/poisson2d.py index e794c9e797e..a2865701bcf 100644 --- a/src/binding/petsc4py/demo/legacy/poisson2d/poisson2d.py +++ b/src/binding/petsc4py/demo/legacy/poisson2d/poisson2d.py @@ -9,7 +9,7 @@ # # u = 0 for x = 0, x = 1, y = 0, y = 1 # -# A finite difference approximation with the usual 7-point stencil +# A finite difference approximation with the usual 5-point stencil # is used to discretize the boundary value problem to obtain a # nonlinear system of equations. The problem is solved in a 2D # rectangular domain, using distributed arrays (DAs) to partition diff --git a/src/binding/petsc4py/demo/legacy/wrap-cython/Bratu3D.pyx b/src/binding/petsc4py/demo/legacy/wrap-cython/Bratu3D.pyx index 2d67c1e053c..f0fcb67b463 100644 --- a/src/binding/petsc4py/demo/legacy/wrap-cython/Bratu3D.pyx +++ b/src/binding/petsc4py/demo/legacy/wrap-cython/Bratu3D.pyx @@ -1,7 +1,7 @@ from petsc4py.PETSc cimport Vec, PetscVec from petsc4py.PETSc cimport Mat, PetscMat from petsc4py.PETSc cimport DM, PetscDM -from petsc4py.PETSc cimport SNES, PetscSNES +from petsc4py.PETSc cimport SNES from petsc4py.PETSc import Error @@ -12,18 +12,21 @@ cdef extern from "Bratu3Dimpl.h": int FormFunction (PetscDM da, PetscVec x, PetscVec F, Params *p) int FormJacobian (PetscDM da, PetscVec x, PetscMat J, Params *p) + def formInitGuess(Vec x, DM da, double lambda_): cdef int ierr cdef Params p = {"lambda_" : lambda_} ierr = FormInitGuess(da.dm, x.vec, &p) if ierr != 0: raise Error(ierr) + def formFunction(SNES snes, Vec x, Vec f, DM da, double lambda_): cdef int ierr cdef Params p = {"lambda_" : lambda_} ierr = FormFunction(da.dm, x.vec, f.vec, &p) if ierr != 0: raise Error(ierr) + def formJacobian(SNES snes, Vec x, Mat J, Mat P, DM da, double lambda_): cdef int ierr cdef Params p = {"lambda_" : lambda_} diff --git a/src/binding/petsc4py/demo/poisson2d/poisson2d.py b/src/binding/petsc4py/demo/poisson2d/poisson2d.py index bb998cae845..ee4a6f385e3 100644 --- a/src/binding/petsc4py/demo/poisson2d/poisson2d.py +++ b/src/binding/petsc4py/demo/poisson2d/poisson2d.py @@ -49,13 +49,15 @@ n = OptDB.getInt('n', 5) h = 1.0 / (n + 1) -# Sparse matrices are represented by `PETSc.Mat` objects. -# +# Matrices are instances of the `PETSc.Mat` class. + +A = PETSc.Mat() + +# Create the underlying PETSc C Mat object. # You can omit the ``comm`` argument if your objects live on # `PETSc.COMM_WORLD` but it is a dangerous choice to rely on default values # for such important arguments. -A = PETSc.Mat() A.create(comm=PETSc.COMM_WORLD) # Specify global matrix shape with a tuple. @@ -70,7 +72,8 @@ # # A.setSizes(((PETSc.DECIDE, n * n), (PETSc.DECIDE, n * n))) -# Various `sparse matrix formats ` can be selected: +# Here we use a sparse matrix of AIJ type +# Various `matrix formats ` can be selected: A.setType(PETSc.Mat.Type.AIJ) @@ -112,7 +115,7 @@ def index_to_grid(r): column = row + 1 A[row, column] = -1.0 / h**2 -# At this stage, any parallel synchronization required in the matrix assembly +# At this stage, any exchange of information required in the matrix assembly # process has not occurred. We achieve this by calling `Mat.assemblyBegin` and # then `Mat.assemblyEnd`. diff --git a/src/binding/petsc4py/demo/python_types/ksppython_protocol.py b/src/binding/petsc4py/demo/python_types/ksppython_protocol.py index 9f0fa6dce9b..b3c28f1b15c 100644 --- a/src/binding/petsc4py/demo/python_types/ksppython_protocol.py +++ b/src/binding/petsc4py/demo/python_types/ksppython_protocol.py @@ -1,13 +1,12 @@ from petsc4py.PETSc import KSP -from petsc4py.PETSc import Mat from petsc4py.PETSc import Vec from petsc4py.PETSc import Viewer # A template class with the Python methods supported by KSPPYTHON -class KSPPythonProtocol: +class KSPPythonProtocol: def solve(self, ksp: KSP, b: Vec, x: Vec) -> None: """Solve the linear system with right-hand side b. Return solution in x.""" ... @@ -39,4 +38,3 @@ def buildResidual(self, ksp: KSP, t: Vec, r: Vec) -> None: def reset(self, ksp: KSP) -> None: """Reset the Krylov solver.""" ... - diff --git a/src/binding/petsc4py/demo/python_types/mat.py b/src/binding/petsc4py/demo/python_types/mat.py index ec62197f587..873741f1437 100644 --- a/src/binding/petsc4py/demo/python_types/mat.py +++ b/src/binding/petsc4py/demo/python_types/mat.py @@ -9,7 +9,7 @@ # # u = 0 for x = 0, x = 1, y = 0, y = 1 # -# A finite difference approximation with the usual 7-point stencil +# A finite difference approximation with the usual 5-point stencil # is used to discretize the boundary value problem to obtain a # nonlinear system of equations. The problem is solved in a 2D # rectangular domain, using distributed arrays (DAs) to partition @@ -18,20 +18,21 @@ # ------------------------------------------------------------------------ # We first import petsc4py and sys to initialize PETSc -import sys, petsc4py +import sys +import petsc4py + petsc4py.init(sys.argv) # Import the PETSc module from petsc4py import PETSc + # Here we define a class representing the discretized operator # This allows us to apply the operator "matrix-free" class Poisson2D: - def __init__(self, da): - assert da.getDim() == 2 self.da = da - self.localX = da.createLocalVec() + self.localX = da.createLocalVec() # This is the method that PETSc will look for when applying # the operator. `X` is the PETSc input vector, `Y` the output vector, @@ -39,7 +40,7 @@ def __init__(self, da): def mult(self, mat, X, Y): # Grid sizes mx, my = self.da.getSizes() - hx, hy = [1.0/m for m in [mx, my]] + hx, hy = (1.0 / m for m in [mx, my]) # Bounds for the local part of the grid this process owns (xs, xe), (ys, ye) = self.da.getRanges() @@ -54,20 +55,24 @@ def mult(self, mat, X, Y): # Loop on the local grid and compute the local action of the operator for j in range(ys, ye): for i in range(xs, xe): - u = x[i, j] # center + u = x[i, j] # center u_e = u_w = u_n = u_s = 0 - if i > 0: u_w = x[i-1, j] # west - if i < mx-1: u_e = x[i+1, j] # east - if j > 0: u_s = x[i, j-1] # south - if j < ny-1: u_n = x[i, j+1] # north - u_xx = (-u_e + 2*u - u_w)*hy/hx - u_yy = (-u_n + 2*u - u_s)*hx/hy + if i > 0: + u_w = x[i - 1, j] # west + if i < mx - 1: + u_e = x[i + 1, j] # east + if j > 0: + u_s = x[i, j - 1] # south + if j < ny - 1: + u_n = x[i, j + 1] # north + u_xx = (-u_e + 2 * u - u_w) * hy / hx + u_yy = (-u_n + 2 * u - u_s) * hx / hy y[i, j] = u_xx + u_yy # This is the method that PETSc will look for when the diagonal of the matrix is needed. def getDiagonal(self, mat, D): mx, my = self.da.getSizes() - hx, hy = [1.0/m for m in [mx, my]] + hx, hy = (1.0 / m for m in [mx, my]) (xs, xe), (ys, ye) = self.da.getRanges() d = self.da.getVecArray(D) @@ -75,22 +80,24 @@ def getDiagonal(self, mat, D): # Loop on the local grid and compute the diagonal for j in range(ys, ye): for i in range(xs, xe): - d[i, j] = 2*hy/hx + 2*hx/hy + d[i, j] = 2 * hy / hx + 2 * hx / hy # The class can contain other methods that PETSc won't use def formRHS(self, B): b = self.da.getVecArray(B) mx, my = self.da.getSizes() - hx, hy = [1.0/m for m in [mx, my]] + hx, hy = (1.0 / m for m in [mx, my]) (xs, xe), (ys, ye) = self.da.getRanges() for j in range(ys, ye): for i in range(xs, xe): - b[i, j] = 1*hx*hy + b[i, j] = 1 * hx * hy # Access the option database and read options from the command line OptDB = PETSc.Options() -nx, ny = OptDB.getIntArray('grid', (16, 16)) # Read `-grid `, defaults to 16,16 +nx, ny = OptDB.getIntArray( + 'grid', (16, 16) +) # Read `-grid `, defaults to 16,16 # Create the distributed memory implementation for structured grid da = PETSc.DMDA().create([nx, ny], stencil_width=1) diff --git a/src/binding/petsc4py/demo/python_types/matpython_protocol.py b/src/binding/petsc4py/demo/python_types/matpython_protocol.py index 4981e92de77..b2995d66cea 100644 --- a/src/binding/petsc4py/demo/python_types/matpython_protocol.py +++ b/src/binding/petsc4py/demo/python_types/matpython_protocol.py @@ -9,8 +9,8 @@ # A template class with the Python methods supported by MATPYTHON -class MatPythonProtocol: +class MatPythonProtocol: def mult(self, A: Mat, x: Vec, y: Vec) -> None: """Matrix vector multiplication: y = A @ x.""" ... @@ -62,7 +62,7 @@ def shift(self, A: Mat, s: Scalar) -> None: def createSubMatrix(self, A: Mat, r: IS, c: IS, out: Mat) -> Mat: """Return the submatrix corresponding to r rows and c columns. - Matrix out must be reused if not None. + Matrix out must be reused if not None. """ ... @@ -70,7 +70,7 @@ def createSubMatrix(self, A: Mat, r: IS, c: IS, out: Mat) -> Mat: def zeroRowsColumns(self, A: Mat, r: IS, diag: Scalar, x: Vec, b: Vec) -> None: """Zero rows and columns of the matrix corresponding to the index set r. - Insert diag on the diagonal and modify vectors x and b accordingly if not None. + Insert diag on the diagonal and modify vectors x and b accordingly if not None. """ ... @@ -111,15 +111,21 @@ def copy(self, A: Mat, B: Mat, op: Mat.Structure) -> None: """Copy the matrix: B = A.""" ... - def productSetFromOptions(self, A: Mat, prodtype: str, X: Mat, Y: Mat, Z: Mat) -> bool: + def productSetFromOptions( + self, A: Mat, prodtype: str, X: Mat, Y: Mat, Z: Mat + ) -> bool: """The boolean flag indicating if the matrix supports prodtype.""" ... - def productSymbolic(self, A: Mat, product: Mat, producttype: str, X: Mat, Y: Mat, Z: Mat) -> None: + def productSymbolic( + self, A: Mat, product: Mat, producttype: str, X: Mat, Y: Mat, Z: Mat + ) -> None: """Perform the symbolic stage of the requested matrix product.""" ... - def productNumeric(self, A: Mat, product: Mat, producttype: str, X: Mat, Y: Mat, Z: Mat) -> None: + def productNumeric( + self, A: Mat, product: Mat, producttype: str, X: Mat, Y: Mat, Z: Mat + ) -> None: """Perform the numeric stage of the requested matrix product.""" ... @@ -147,8 +153,17 @@ def solveTransposeAdd(self, A: Mat, y: Vec, z: Vec, x: Vec) -> None: """Solve the equation: x = inv(A)^T y + z.""" ... - def SOR(self, A: Mat, b: Vec, omega: float, sortype: Mat.SORType, - shift: float, its: int, lits: int, x: Vec) -> None: + def SOR( + self, + A: Mat, + b: Vec, + omega: float, + sortype: Mat.SORType, + shift: float, + its: int, + lits: int, + x: Vec, + ) -> None: """Perform SOR iterations.""" ... @@ -160,6 +175,6 @@ def imagPart(self, A: Mat) -> None: """Set real part to zero. A = imag(A).""" ... - def realPart(self, A: sMat) -> None: + def realPart(self, A: Mat) -> None: """Set imaginary part to zero. A = real(A).""" ... diff --git a/src/binding/petsc4py/demo/python_types/pc.py b/src/binding/petsc4py/demo/python_types/pc.py index ba239374dce..05ea79cc9af 100644 --- a/src/binding/petsc4py/demo/python_types/pc.py +++ b/src/binding/petsc4py/demo/python_types/pc.py @@ -1,6 +1,5 @@ # The user-defined Python class implementing the Jacobi method. class myJacobi: - # Setup the internal data. In this case, we access the matrix diagonal. def setUp(self, pc): _, P = pc.getOperators() diff --git a/src/binding/petsc4py/demo/python_types/pcpython_protocol.py b/src/binding/petsc4py/demo/python_types/pcpython_protocol.py index 62c16940dc5..0382aea1259 100644 --- a/src/binding/petsc4py/demo/python_types/pcpython_protocol.py +++ b/src/binding/petsc4py/demo/python_types/pcpython_protocol.py @@ -7,8 +7,8 @@ # A template class with the Python methods supported by PCPYTHON -class PCPythonProtocol: +class PCPythonProtocol: def apply(self, pc: PC, b: Vec, x: Vec) -> None: """Apply the preconditioner on vector b, return in x.""" ... @@ -43,6 +43,7 @@ def postSolve(self, pc: PC, ksp: KSP, b: Vec, x: Vec) -> None: This method is allowed to modify the right-hand side b and the solution x. """ + def view(self, pc: PC, viewer: Viewer) -> None: """View the preconditioner.""" ... @@ -58,4 +59,3 @@ def setUp(self, pc: PC) -> None: def reset(self, pc: PC) -> None: """Reset the preconditioner.""" ... - diff --git a/src/binding/petsc4py/docs/source/apidoc.py b/src/binding/petsc4py/docs/source/apidoc.py index 07c5ff457c0..99e56f4369f 100644 --- a/src/binding/petsc4py/docs/source/apidoc.py +++ b/src/binding/petsc4py/docs/source/apidoc.py @@ -3,18 +3,16 @@ import inspect import textwrap from sphinx.util import logging + logger = logging.getLogger(__name__) + def is_cyfunction(obj): return type(obj).__name__ == 'cython_function_or_method' def is_function(obj): - return ( - inspect.isbuiltin(obj) - or is_cyfunction(obj) - or type(obj) is type(ord) - ) + return inspect.isbuiltin(obj) or is_cyfunction(obj) or type(obj) is type(ord) def is_method(obj): @@ -22,7 +20,8 @@ def is_method(obj): inspect.ismethoddescriptor(obj) or inspect.ismethod(obj) or is_cyfunction(obj) - or type(obj) in ( + or type(obj) + in ( type(str.index), type(str.__add__), type(str.__new__), @@ -31,25 +30,20 @@ def is_method(obj): def is_classmethod(obj): - return ( - inspect.isbuiltin(obj) - or type(obj).__name__ in ( - 'classmethod', - 'classmethod_descriptor', - ) + return inspect.isbuiltin(obj) or type(obj).__name__ in ( + 'classmethod', + 'classmethod_descriptor', ) def is_staticmethod(obj): - return ( - type(obj).__name__ in ( - 'staticmethod', - ) - ) + return type(obj).__name__ in ('staticmethod',) + def is_constant(obj): return isinstance(obj, (int, float, str, dict)) + def is_datadescr(obj): return inspect.isdatadescriptor(obj) and not hasattr(obj, 'fget') @@ -62,9 +56,12 @@ def is_class(obj): return inspect.isclass(obj) or type(obj) is type(int) -class Lines(list): +def is_hidden(obj): + return obj.__qualname__.startswith('_') + - INDENT = " " * 4 +class Lines(list): + INDENT = ' ' * 4 level = 0 @property @@ -82,16 +79,22 @@ def add(self, lines): self.append(indent + line) -def signature(obj): +def signature(obj, fail=True): doc = obj.__doc__ - doc = doc or f"{obj.__name__}: Any" # FIXME remove line + if not doc: + if fail and not is_hidden(obj): + logger.warning(f'Missing signature for {obj}') + doc = f'{obj.__name__}: Any' sig = doc.partition('\n')[0].split('.', 1)[-1] return sig or None -def docstring(obj): +def docstring(obj, fail=True): doc = obj.__doc__ - doc = doc or '' # FIXME + if not doc: + if fail and not is_hidden(obj): + logger.warning(f'Missing docstring for {obj}') + doc = '' link = None sig = None cl = is_class(obj) @@ -104,23 +107,36 @@ def docstring(obj): summary, _, docbody = doc.partition('\n') summary = summary.strip() docbody = textwrap.dedent(docbody).strip() - if docbody and sig: + + # raise warning if docstring is not provided for a method + if not summary and not is_function(obj) and is_method(obj): + logger.warning(f'docstring: Missing summary for {obj}') + + # warnings for docstrings that are not compliant + if len(summary) > 79: + logger.warning(f'Summary for {obj} too long.') + if docbody: if not summary.endswith('.'): - logger.warning(f'Summary for {sig} does not end with period.') - if len(summary) > 79: - logger.warning(f'Summary for {sig} too long.') + logger.warning(f'Summary for {obj} does not end with period.') # FIXME lines = docbody.split('\n') - for i,l in enumerate(lines): - if len(l) > 79: - logger.warning(f'Line {i} for {sig} too long.') - #init = ("Collective.", "Not collective.", "Logically collective.", "Neighborwise collective.") - #if lines[0] not in init: - # logger.warning(f'Unexpected collectiveness specification for {sig}\nFound {lines[0]}') + for i, line in enumerate(lines): + if len(line) > 79: + logger.warning(f'Line {i} for documentation of {obj} too long.') + if not cl: + init = ( + 'Collective.', + 'Not collective.', + 'Logically collective.', + 'Neighborwise collective.', + 'Collective the first time it is called.', + ) + if lines[0] not in init: + logger.warning(f'Unexpected collectiveness for {sig}\nFound {lines[0]}') if link: linktxt, _, link = link.rpartition(' ') - linkloc = link.replace(':','#L') + linkloc = link.replace(':', '#L') # FIXME do we want to use a special section? # section = f'References\n----------`' section = '\n' @@ -135,66 +151,65 @@ def docstring(obj): doc = f'"""{summary}\n\n{docbody}\n\n"""' else: doc = f'"""{summary}"""' - doc = textwrap.indent(doc, Lines.INDENT) - return doc + return textwrap.indent(doc, Lines.INDENT) def visit_data(constant): name, value = constant typename = type(value).__name__ - kind = "Constant" if isinstance(value, int) else "Object" + kind = 'Constant' if isinstance(value, int) else 'Object' init = f"_def({typename}, '{name}')" - doc = f"#: {kind} ``{name}`` of type :class:`{typename}`" - return f"{name}: {typename} = {init} {doc}\n" + doc = f'#: {kind} ``{name}`` of type :class:`{typename}`' + return f'{name}: {typename} = {init} {doc}\n' def visit_function(function): sig = signature(function) doc = docstring(function) - body = Lines.INDENT + "..." - return f"def {sig}:\n{doc}\n{body}\n" + body = Lines.INDENT + '...' + return f'def {sig}:\n{doc}\n{body}\n' def visit_method(method): sig = signature(method) doc = docstring(method) - body = Lines.INDENT + "..." - return f"def {sig}:\n{doc}\n{body}\n" + body = Lines.INDENT + '...' + return f'def {sig}:\n{doc}\n{body}\n' def visit_datadescr(datadescr, name=None): sig = signature(datadescr) doc = docstring(datadescr) name = sig.partition(':')[0].strip() or datadescr.__name__ - type = sig.partition(':')[2].strip() or 'Any' - sig = f"{name}(self) -> {type}" - body = Lines.INDENT + "..." - return f"@property\ndef {sig}:\n{doc}\n{body}\n" + rtype = sig.partition(':')[2].strip() or 'Any' + sig = f'{name}(self) -> {rtype}' + body = Lines.INDENT + '...' + return f'@property\ndef {sig}:\n{doc}\n{body}\n' def visit_property(prop, name=None): sig = signature(prop.fget) name = name or prop.fget.__name__ - type = sig.rsplit('->', 1)[-1].strip() - sig = f"{name}(self) -> {type}" + rtype = sig.rsplit('->', 1)[-1].strip() + sig = f'{name}(self) -> {rtype}' doc = f'"""{prop.__doc__}"""' doc = textwrap.indent(doc, Lines.INDENT) - body = Lines.INDENT + "..." - return f"@property\ndef {sig}:\n{doc}\n{body}\n" + body = Lines.INDENT + '...' + return f'@property\ndef {sig}:\n{doc}\n{body}\n' def visit_constructor(cls, name='__init__', args=None): - init = (name == '__init__') + init = name == '__init__' argname = cls.__mro__[-2].__name__.lower() argtype = cls.__name__ - initarg = args or f"{argname}: Optional[{argtype}] = None" + initarg = args or f'{argname}: Optional[{argtype}] = None' selfarg = 'self' if init else 'cls' rettype = 'None' if init else argtype - arglist = f"{selfarg}, {initarg}" - sig = f"{name}({arglist}) -> {rettype}" + arglist = f'{selfarg}, {initarg}' + sig = f'{name}({arglist}) -> {rettype}' ret = '...' if init else 'return super().__new__(cls)' body = Lines.INDENT + ret - return f"def {sig}:\n{body}" + return f'def {sig}:\n{body}' def visit_class(cls, outer=None, done=None): @@ -209,29 +224,25 @@ def visit_class(cls, outer=None, done=None): '__ge__', '__gt__', '__enum2str', # FIXME refactor implementation - '_traceback_', # FIXME maybe refactor? + '_traceback_', # FIXME maybe refactor? } special = { - '__len__': "__len__(self) -> int", - '__bool__': "__bool__(self) -> bool", - '__hash__': "__hash__(self) -> int", - '__int__': "__int__(self) -> int", - '__index__': "__int__(self) -> int", - '__str__': "__str__(self) -> str", - '__repr__': "__repr__(self) -> str", - '__eq__': "__eq__(self, other: object) -> bool", - '__ne__': "__ne__(self, other: object) -> bool", + '__len__': '__len__(self) -> int', + '__bool__': '__bool__(self) -> bool', + '__hash__': '__hash__(self) -> int', + '__int__': '__int__(self) -> int', + '__index__': '__int__(self) -> int', + '__str__': '__str__(self) -> str', + '__repr__': '__repr__(self) -> str', + '__eq__': '__eq__(self, other: object) -> bool', + '__ne__': '__ne__(self, other: object) -> bool', } - constructor = ( - '__new__', - '__init__', - ) qualname = cls.__name__ cls_name = cls.__name__ if outer is not None and cls_name.startswith(outer): - cls_name = cls_name[len(outer):] - qualname = f"{outer}.{cls_name}" + cls_name = cls_name[len(outer) :] + qualname = f'{outer}.{cls_name}' override = OVERRIDE.get(qualname, {}) done = set() if done is None else done @@ -239,9 +250,9 @@ def visit_class(cls, outer=None, done=None): base = cls.__base__ if base is object: - lines.add = f"class {cls_name}:" + lines.add = f'class {cls_name}:' else: - lines.add = f"class {cls_name}({base.__name__}):" + lines.add = f'class {cls_name}({base.__name__}):' lines.level += 1 lines.add = docstring(cls) @@ -276,7 +287,6 @@ def members(seq): continue for name in members(keys): - if name in override: done.add(name) lines.add = override[name] @@ -285,7 +295,7 @@ def members(seq): if name in special: done.add(name) sig = special[name] - lines.add = f"def {sig}: ..." + lines.add = f'def {sig}: ...' continue attr = getattr(cls, name) @@ -295,12 +305,10 @@ def members(seq): if name == attr.__name__: obj = dct[name] if is_classmethod(obj): - lines.add = "@classmethod" + lines.add = '@classmethod' elif is_staticmethod(obj): - lines.add = "@staticmethod" + lines.add = '@staticmethod' lines.add = visit_method(attr) - elif False: - lines.add = f"{name} = {attr.__name__}" continue if is_datadescr(attr): @@ -318,10 +326,9 @@ def members(seq): lines.add = visit_data((name, attr)) continue - leftovers = [name for name in keys if - name not in done and name not in skip] + leftovers = [name for name in keys if name not in done and name not in skip] if leftovers: - raise RuntimeError(f"leftovers: {leftovers}") + raise RuntimeError(f'leftovers: {leftovers}') lines.level -= 1 return lines @@ -344,14 +351,17 @@ def visit_module(module, done=None): lines = Lines() keys = list(module.__dict__.keys()) - keys.sort(key=lambda name: name.startswith("_")) + keys.sort(key=lambda name: name.startswith('_')) constants = [ - (name, getattr(module, name)) for name in keys - if all(( - name not in done and name not in skip, - is_constant(getattr(module, name)), - )) + (name, getattr(module, name)) + for name in keys + if all( + ( + name not in done and name not in skip, + is_constant(getattr(module, name)), + ) + ) ] for _, value in constants: cls = type(value) @@ -361,7 +371,7 @@ def visit_module(module, done=None): if cls.__module__ == module.__name__: done.add(name) lines.add = visit_class(cls) - lines.add = "" + lines.add = '' for attr in constants: name, value = attr done.add(name) @@ -370,7 +380,7 @@ def visit_module(module, done=None): else: lines.add = visit_data((name, value)) if constants: - lines.add = "" + lines.add = '' for name in keys: if name in done or name in skip: @@ -384,19 +394,22 @@ def visit_module(module, done=None): if value.__module__ != module.__name__: continue lines.add = visit_class(value) - lines.add = "" + lines.add = '' instances = [ - (k, getattr(module, k)) for k in keys - if all(( - k not in done and k not in skip, - type(getattr(module, k)) is value, - )) + (k, getattr(module, k)) + for k in keys + if all( + ( + k not in done and k not in skip, + type(getattr(module, k)) is value, + ) + ) ] for attrname, attrvalue in instances: done.add(attrname) lines.add = visit_data((attrname, attrvalue)) if instances: - lines.add = "" + lines.add = '' continue if is_function(value): @@ -404,10 +417,10 @@ def visit_module(module, done=None): if name == value.__name__: lines.add = visit_function(value) else: - lines.add = f"{name} = {value.__name__}" + lines.add = f'{name} = {value.__name__}' continue - lines.add = "" + lines.add = '' for name in keys: if name in done or name in skip: continue @@ -418,10 +431,9 @@ def visit_module(module, done=None): else: lines.add = visit_data((name, value)) - leftovers = [name for name in keys if - name not in done and name not in skip] + leftovers = [name for name in keys if name not in done and name not in skip] if leftovers: - raise RuntimeError(f"leftovers: {leftovers}") + raise RuntimeError(f'leftovers: {leftovers}') return lines @@ -498,8 +510,7 @@ def _def(cls, name): return obj """ -OVERRIDE = { -} +OVERRIDE = {} TYPING = """ from .typing import * @@ -508,14 +519,15 @@ def _def(cls, name): def visit_petsc4py_PETSc(done=None): from petsc4py import PETSc + lines = Lines() lines.add = f'"""{PETSc.__doc__}"""' lines.add = IMPORTS - lines.add = "" + lines.add = '' lines.add = HELPERS - lines.add = "" + lines.add = '' lines.add = visit_module(PETSc) - lines.add = "" + lines.add = '' lines.add = TYPING return lines @@ -530,8 +542,7 @@ def generate(filename): def load_module(filename, name=None): if name is None: - name, _ = os.path.splitext( - os.path.basename(filename)) + name, _ = os.path.splitext(os.path.basename(filename)) module = type(sys)(name) module.__file__ = filename module.__package__ = name.rsplit('.', 1)[0] @@ -547,7 +558,8 @@ def load_module(filename, name=None): def replace_module(module): name = module.__name__ - assert name not in _sys_modules + if name in _sys_modules: + raise RuntimeError(f'{name} in modules') _sys_modules[name] = sys.modules[name] sys.modules[name] = module return _sys_modules[name] @@ -555,7 +567,8 @@ def replace_module(module): def restore_module(module): name = module.__name__ - assert name in _sys_modules + if name not in _sys_modules: + raise RuntimeError(f'{name} not in modules') sys.modules[name] = _sys_modules[name] del _sys_modules[name] diff --git a/src/binding/petsc4py/docs/source/citing.rst b/src/binding/petsc4py/docs/source/citing.rst index 8dda61b4a09..1e8242425f1 100644 --- a/src/binding/petsc4py/docs/source/citing.rst +++ b/src/binding/petsc4py/docs/source/citing.rst @@ -1,14 +1,14 @@ Citations ========= -If PETSc for Python been significant to a project that leads to an +If PETSc for Python has been significant to a project that leads to an academic publication, please acknowledge that fact by citing the project. * L. Dalcin, P. Kler, R. Paz, and A. Cosimo, *Parallel Distributed Computing using Python*, Advances in Water Resources, 34(9):1124-1139, 2011. - http://dx.doi.org/10.1016/j.advwatres.2011.04.013 + https://doi.org/10.1016/j.advwatres.2011.04.013 * S. Balay, S. Abhyankar, M. Adams, S. Benson, J. Brown, P. Brune, K. Buschelman, E. Constantinescu, L. Dalcin, A. Dener, @@ -17,6 +17,6 @@ project. D. May, L. Curfman McInnes, R. Mills, L. Mitchell, T. Munson, J. Roman, K. Rupp, P. Sanan, J Sarich, B. Smith, S. Zampini, H. Zhang, and H. Zhang, J. Zhang, - *PETSc/TAO Users Manual*, ANL-21/39 - Revision 3.20, 2023. - http://dx.doi.org/10.2172/2205494, + *PETSc/TAO Users Manual*, ANL-21/39 - Revision 3.21, 2024. + https://doi.org/10.2172/2205494, https://petsc.org/release/docs/manual/manual.pdf diff --git a/src/binding/petsc4py/docs/source/conf.py b/src/binding/petsc4py/docs/source/conf.py index 2b24af23bf6..bb6e9a9e2a1 100644 --- a/src/binding/petsc4py/docs/source/conf.py +++ b/src/binding/petsc4py/docs/source/conf.py @@ -24,6 +24,7 @@ sys.path.insert(0, os.path.abspath('.')) _today = datetime.datetime.now() +# FIXME: allow building from build? # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information @@ -31,7 +32,8 @@ package = 'petsc4py' docdir = os.path.abspath(os.path.dirname(__file__)) -topdir = os.path.abspath(os.path.join(docdir, *[os.path.pardir]*2)) +topdir = os.path.abspath(os.path.join(docdir, *[os.path.pardir] * 2)) + def pkg_version(): with open(os.path.join(topdir, 'src', package, '__init__.py')) as f: @@ -42,20 +44,20 @@ def pkg_version(): def get_doc_branch(): release = 1 if topdir.endswith(os.path.join(os.path.sep, 'src', 'binding', package)): - rootdir = os.path.abspath(os.path.join(topdir, *[os.path.pardir]*3)) + rootdir = os.path.abspath(os.path.join(topdir, *[os.path.pardir] * 3)) rootname = package.replace('4py', '') version_h = os.path.join(rootdir, 'include', f'{rootname}version.h') if os.path.exists(version_h) and os.path.isfile(version_h): release_macro = f'{rootname.upper()}_VERSION_RELEASE' - version_re = re.compile(rf"#define\s+{release_macro}\s+([-]*\d+)") + version_re = re.compile(rf'#define\s+{release_macro}\s+([-]*\d+)') with open(version_h, 'r') as f: release = int(version_re.search(f.read()).groups()[0]) return 'release' if release else 'main' -project = 'PETSc for Python' -author = 'Lisandro Dalcin' -copyright = f'{_today.year}, {author}' +__project__ = 'PETSc for Python' +__author__ = 'Lisandro Dalcin' +__copyright__ = f'{_today.year}, {__author__}' release = pkg_version() version = release.rsplit('.', 1)[0] @@ -111,12 +113,13 @@ def get_doc_branch(): # Links depends on the actual branch -> release or main www = f'https://gitlab.com/petsc/petsc/-/tree/{get_doc_branch()}' -extlinks = {'sources': (f'{www}/src/binding/petsc4py/src/%s','')} +extlinks = {'sources': (f'{www}/src/binding/petsc4py/src/%s', '%s')} napoleon_preprocess_types = True try: import sphinx_rtd_theme + if 'sphinx_rtd_theme' not in extensions: extensions.append('sphinx_rtd_theme') except ImportError: @@ -132,6 +135,7 @@ def get_doc_branch(): 'petsc': ('https://petsc.org/release/', None), } + def _mangle_petsc_intersphinx(): """Preprocess the keys in PETSc's intersphinx inventory. @@ -148,25 +152,26 @@ def _mangle_petsc_intersphinx(): website = intersphinx_mapping['petsc'][0].partition('/release/')[0] branch = get_doc_branch() - doc_url = f"{website}/{branch}/" - if 'LOC' in os.environ and os.path.isfile(os.path.join(os.environ['LOC'],'objects.inv')): - inventory_url=f"file://" + os.path.join(os.environ['LOC'],'objects.inv') + doc_url = f'{website}/{branch}/' + if 'LOC' in os.environ and os.path.isfile( + os.path.join(os.environ['LOC'], 'objects.inv') + ): + inventory_url = 'file://' + os.path.join(os.environ['LOC'], 'objects.inv') else: - inventory_url=f"{doc_url}objects.inv" - print("Using PETSC inventory from "+inventory_url) + inventory_url = f'{doc_url}objects.inv' + print('Using PETSC inventory from ' + inventory_url) inventory = sphobjinv.Inventory(url=inventory_url) print(inventory) for obj in inventory.objects: - if obj.name.startswith("manualpages"): - obj.name = "petsc." + "/".join(obj.name.split("/")[2:]) - obj.role = "class" - obj.domain = "py" + if obj.name.startswith('manualpages'): + obj.name = 'petsc.' + '/'.join(obj.name.split('/')[2:]) + obj.role = 'class' + obj.domain = 'py' - new_inventory_filename = "petsc_objects.inv" + new_inventory_filename = 'petsc_objects.inv' sphobjinv.writebytes( - new_inventory_filename, - sphobjinv.compress(inventory.data_file(contract=True)) + new_inventory_filename, sphobjinv.compress(inventory.data_file(contract=True)) ) intersphinx_mapping['petsc'] = (doc_url, new_inventory_filename) @@ -192,6 +197,7 @@ def _setup_mpi4py_typing(): def _patch_domain_python(): from sphinx.domains.python import PythonDomain + PythonDomain.object_types['data'].roles += ('class',) @@ -226,7 +232,6 @@ def stringify_annotation(annotation, mode='fully-qualified-except-typing'): # class ClassDocumenterMixin: - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if self.config.autodoc_class_signature == 'separated': @@ -263,13 +268,12 @@ def _monkey_patch_returns(): this we swap ``:class:`` for ``:any:``. """ - _parse_returns_section = \ - NumpyDocstring._parse_returns_section + _parse_returns_section = NumpyDocstring._parse_returns_section @functools.wraps(NumpyDocstring._parse_returns_section) def wrapper(*args, **kwargs): out = _parse_returns_section(*args, **kwargs) - return [line.replace(":class:", ":any:") for line in out] + return [line.replace(':class:', ':any:') for line in out] NumpyDocstring._parse_returns_section = wrapper @@ -280,13 +284,12 @@ def _monkey_patch_see_also(): Napoleon uses :obj: for all names found in "see also" sections but we need :all: so that references to labels work.""" - _parse_numpydoc_see_also_section = \ - NumpyDocstring._parse_numpydoc_see_also_section + _parse_numpydoc_see_also_section = NumpyDocstring._parse_numpydoc_see_also_section @functools.wraps(NumpyDocstring._parse_numpydoc_see_also_section) def wrapper(*args, **kwargs): out = _parse_numpydoc_see_also_section(*args, **kwargs) - return [line.replace(":obj:", ":any:") for line in out] + return [line.replace(':obj:', ':any:') for line in out] NumpyDocstring._parse_numpydoc_see_also_section = wrapper @@ -304,26 +307,27 @@ def _process_demos(*demos): # Convert demo .py files to rst. Also copy the .py file so it can be # linked from the demo rst file. try: - os.mkdir("demo") + os.mkdir('demo') except FileExistsError: pass for demo in demos: - demo_dir = os.path.join("demo", os.path.dirname(demo)) - demo_src = os.path.join(os.pardir, os.pardir, "demo", demo) + demo_dir = os.path.join('demo', os.path.dirname(demo)) + demo_src = os.path.join(os.pardir, os.pardir, 'demo', demo) try: os.mkdir(demo_dir) except FileExistsError: pass - with open(demo_src, "r") as infile: - with open(os.path.join( - os.path.join("demo", os.path.splitext(demo)[0] + ".rst")), "w" + with open(demo_src, 'r') as infile: + with open( + os.path.join(os.path.join('demo', os.path.splitext(demo)[0] + '.rst')), + 'w', ) as outfile: converter = pylit.Code2Text(infile) outfile.write(str(converter)) demo_copy_name = os.path.join(demo_dir, os.path.basename(demo)) shutil.copyfile(demo_src, demo_copy_name) html_static_path.append(demo_copy_name) - with open(os.path.join("demo", "demo.rst"), "w") as demofile: + with open(os.path.join('demo', 'demo.rst'), 'w') as demofile: demofile.write(""" petsc4py demos ============== @@ -332,13 +336,12 @@ def _process_demos(*demos): """) for demo in demos: - demofile.write(" " + os.path.splitext(demo)[0] + "\n") - demofile.write("\n") + demofile.write(' ' + os.path.splitext(demo)[0] + '\n') + demofile.write('\n') -html_static_path=[] -_process_demos( - "poisson2d/poisson2d.py" -) + +html_static_path = [] +_process_demos('poisson2d/poisson2d.py') def setup(app): @@ -358,6 +361,7 @@ def setup(app): sys_dwb = sys.dont_write_bytecode sys.dont_write_bytecode = True import apidoc + sys.dont_write_bytecode = sys_dwb name = PETSc.__name__ @@ -387,9 +391,11 @@ def setup(app): typing.overload = typing_overload from petsc4py import typing as tp + for attr in tp.__all__: autodoc_type_aliases[attr] = f'~petsc4py.typing.{attr}' + # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output @@ -397,9 +403,7 @@ def setup(app): # a list of builtin themes. html_theme = 'pydata_sphinx_theme' -html_theme_options = { - "navigation_with_keys":True -} +html_theme_options = {'navigation_with_keys': True} # -- Options for HTMLHelp output ------------------------------------------ @@ -412,7 +416,7 @@ def setup(app): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', f'{package}.tex', project, author, 'howto'), + ('index', f'{package}.tex', __project__, __author__, 'howto'), ] latex_elements = { @@ -423,9 +427,7 @@ def setup(app): # -- Options for manual page output --------------------------------------- # (source start file, name, description, authors, manual section). -man_pages = [ - ('index', package, project, [author], 3) -] +man_pages = [('index', package, __project__, [__author__], 3)] # -- Options for Texinfo output ------------------------------------------- @@ -433,8 +435,15 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', package, project, author, - package, f'{project}.', 'Miscellaneous'), + ( + 'index', + package, + __project__, + __author__, + package, + f'{__project__}.', + 'Miscellaneous', + ), ] diff --git a/src/binding/petsc4py/docs/source/contributing.rst b/src/binding/petsc4py/docs/source/contributing.rst new file mode 100644 index 00000000000..06869540c37 --- /dev/null +++ b/src/binding/petsc4py/docs/source/contributing.rst @@ -0,0 +1,54 @@ +Contributing +============ + +Contributions from the user community are welcome. See +the `PETSc developers ` documentation for general +information on contributions. + +New contributions to petsc4py **must** adhere with the coding standards. +We use cython-lint_ for Cython and ruff_ for Python source codes. +These can be installed using:: + + $ python -m pip install -r src/binding/petsc4py/conf/requirements-lint.txt + +If you are contributing Cython code, you can check compliance with:: + + $ make cython-lint -C src/binding/petsc4py + +For Python code, run:: + + $ make ruff-lint -C src/binding/petsc4py + +Python code can be auto-formatted using:: + + $ make ruff-lint RUFF_OPTS='format' -C src/binding/petsc4py + +New contributions to petsc4py must be tested. +Tests are located in the :file:`src/binding/petsc4py/test` folder. +To add a new test, either add a new :file:`test_xxx.py` or modify a +pre-existing file according to the +`unittest `_ +specifications. + +If you add a new :file:`test_xxx.py`, you can run the tests using:: + + $ cd src/binding/petsc4py + $ python test/runtests.py -k test_xxx + +If instead you are modifying an existing :file:`test_xxx.py`, +you can test your additions by using the fully qualified name of the Python +class or method you are modifying, e.g.:: + + $ python test/runtests.py -k test_xxx.class_name.method_name + +All new code must include documentation in accordance with the `documentation +standard `. To check for compliance, run:: + + $ make html SPHINXOPTS='-W' -C src/binding/petsc4py/docs/source + +.. warning:: + + The docstrings must not cause Sphinx warnings. + +.. _cython-lint: https://github.com/MarcoGorelli/cython-lint +.. _ruff: https://docs.astral.sh/ruff diff --git a/src/binding/petsc4py/docs/source/documentation_standards.rst b/src/binding/petsc4py/docs/source/documentation_standards.rst index 1d2afc035ce..1c39dd2ef6b 100644 --- a/src/binding/petsc4py/docs/source/documentation_standards.rst +++ b/src/binding/petsc4py/docs/source/documentation_standards.rst @@ -1,7 +1,7 @@ -Documentation standards for PETSc4py +Documentation standards for petsc4py ==================================== -Subject to exceptions given below, new contributions to PETSc4py **must** +Subject to exceptions given below, new contributions to petsc4py **must** include `type annotations ` for function parameters and results, and docstrings on every class, function and method. @@ -14,24 +14,25 @@ Docstring standards ------------------- Docstrings are to be written in `numpydoc:format` format. -The first line of a function or method docstring must be a short description of -the method in imperative mood ("Return the norm of the matrix.") "Return" is -to be preferred over "Get" in this sentence. A blank line must follow this -description. Use one-liner descriptions for properties. +The first line of a class, function or method docstring must be a short +description of the method in imperative mood ("Return the norm of the matrix.") +"Return" is to be preferred over "Get" in this sentence. A blank line must +follow this description. Use one-liner descriptions for properties. -If the corresponding C API documentation lists a function as being collective, -then this information must be repeated on the next line of the docstring. -Valid strings are: "Not collective.", "Logically collective.", "Collective.", -or "Neighborwise collective.". +If the corresponding C API documentation of a method lists a function as being +collective, then this information must be repeated on the next line of the +docstring. Valid strings are: "Not collective.", "Logically collective.", +"Collective.", "Neighborwise collective.", or "Collective the first time it is +called". The initial description section can contain more information if this is useful. In particular, if there is a PETSc manual chapter about a class, then this should be referred to from here. -Use double backticks around literals (like strings and numbers). E.g. +Use double backticks around literals (like strings and numbers), e.g., \`\`2\`\`, \`\`"foo"\`\`. -Reference PETSc functions simply using backticks. eg: `petsc.KSP`. refers to +Reference PETSc functions simply using backticks, e.g., `petsc.KSP` refers to the PETSc C documentation for KSP. Do **not** use URLs in docstrings. Always use Intersphinx references. @@ -41,12 +42,15 @@ allowed by numpydoc may be included if they are useful. Parameters .......... -This is required unless there are no parameters, or it will be completely -obvious to even a novice user what the parameters do. +This is required for methods unless there are no parameters, or it will be +completely obvious to even a novice user what the parameters do. -Types should only be specified in this section if for some reason the types -provided by typing prove to be inadequate. If no type is being specified, do -not include a colon (``:``) to the right of the parameter name. +If a class has a non-trivial constructor, the arguments of the constructor and +their types must be explicitly documented within this section. + +For methods, types should only be specified in this section if for some reason +the types provided by typing prove to be inadequate. If no type is being +specified, do not include a colon (``:``) to the right of the parameter name. Use `Sys.getDefaultComm` when specifying the default communicator. @@ -68,20 +72,15 @@ information useful to users. Every ``setFromOptions`` must include the link \`petsc_options\`. -Any closely related part of the PETSc4py API not already linked in the +Any closely related part of the petsc4py API not already linked in the docstring should appear (e.g. setters and getters should cross-refer). If there is a corresponding C API documentation page, this must be linked from -the "See also" section. E.g. \`petsc.MatSetValues\`. +the "See also" section, e.g. \`petsc.MatSetValues\`. End docstring with an empty line - "closing three quotation marks must be on a line by itself, preferably preceded by a blank line" -.. warning:: - - The docstrings must not cause Sphinx warnings. - - Type hint standards ------------------- @@ -96,11 +95,11 @@ Communicators in type signatures must use Python typing instead of c-typing (i.e. ``comm: Comm`` not ``Comm comm``). This is because communicators can come from ``mpi4py`` and not just the ``petsc4py.PETSc.Comm`` class. -For petsc4py native types that are can be strings, the type is ``argument: -KSP.Type | str`` (not eg: ``KSPType argument``). If the type is strictly an +For petsc4py native types that are strings, the type is ``argument: +KSP.Type | str`` (not e.g.: ``KSPType argument``). If the type is strictly an enum the ``| str`` can be omitted. Full signature example:: def setType(self, ksp_type: KSP.Type | str) -> None: -If a NumPy is returned, use ``ArrayInt``/``ArrayReal``/``ArrayScalar`` as the -return type. +If a NumPy array is returned, use +``ArrayBool``/``ArrayInt``/``ArrayReal``/``ArrayScalar`` as the return type. diff --git a/src/binding/petsc4py/docs/source/index.rst b/src/binding/petsc4py/docs/source/index.rst index e5110499262..c96e8893bb0 100644 --- a/src/binding/petsc4py/docs/source/index.rst +++ b/src/binding/petsc4py/docs/source/index.rst @@ -11,7 +11,7 @@ PETSc for Python .. topic:: Abstract - This document describes petsc4py_, a Python_ wrapper to the PETSc_ + This document describes :mod:`petsc4py`, a Python_ wrapper to the PETSc_ libraries. PETSc_ (the Portable, Extensible Toolkit for Scientific @@ -28,7 +28,7 @@ PETSc for Python * mpi4py_: Python bindings for MPI_, the *Message Passing Interface*. - Other projects depends on petsc4py: + Other projects depend on petsc4py: * slepc4py_: Python bindings for SLEPc_, the *Scalable Library for Eigenvalue Problem Computations*. @@ -42,6 +42,7 @@ PETSc for Python overview install + contributing citing .. toctree:: diff --git a/src/binding/petsc4py/docs/source/install.rst b/src/binding/petsc4py/docs/source/install.rst index bfed0bfac3a..83dacb51623 100644 --- a/src/binding/petsc4py/docs/source/install.rst +++ b/src/binding/petsc4py/docs/source/install.rst @@ -26,18 +26,50 @@ The installation of :mod:`petsc4py` supports multiple `PETSC_ARCH $ PETSC_ARCH='arch-0:...:arch-N' python -m pip install src/binding/petsc4py If you are cross-compiling, and the :mod:`numpy` module cannot be loaded on -your build host, then before invoking :file:`pip`, set the +your build host, then before invoking :program:`pip`, set the :envvar:`NUMPY_INCLUDE` environment variable to the path that would be returned by :samp:`import numpy; numpy.get_include()`:: $ export NUMPY_INCLUDE=/usr/lib/pythonX/site-packages/numpy/core/include +Running the testing suite +------------------------- + +When installing from source, the petsc4py complete testsuite can be run as:: + + $ cd src/binding/petsc4py + $ python test/runtests.py + +or via the makefile rule ``test``:: + + $ make test -C src/binding/petsc4py + +Specific tests can be run using the command-line option ``-k``, e.g.:: + + $ python test/runtests.py -k test_optdb + +to run all the tests provided in :file:`tests/test_optdb.py`. + +For other command-line options, run:: + + $ python test/runtests.py --help + +If not otherwise specified, all tests will be run in sequential mode. +To run all the tests with the same number of MPI processes, for example +``4``, run:: + + $ mpiexec -n 4 python test/runtests.py + +or:: + + $ make test-4 -C src/binding/petsc4py + Building the documentation -------------------------- -Install the documentation dependencies using the ``[doc]`` extra:: +Install the documentation dependencies:: - $ python -m pip install "src/binding/petsc4py[doc]" + $ python -m pip install -r src/binding/petsc4py/conf/requirements-docs.txt Then:: @@ -49,8 +81,3 @@ The resulting HTML files will be in :file:`_build/html`. .. note:: Building the documentation requires Python 3.11 or later. - -.. note:: - - All new code must include documentation in accordance with the `documentation - standard ` diff --git a/src/binding/petsc4py/docs/source/links.txt b/src/binding/petsc4py/docs/source/links.txt index 3b43a5fb997..843c7d5b257 100644 --- a/src/binding/petsc4py/docs/source/links.txt +++ b/src/binding/petsc4py/docs/source/links.txt @@ -1,19 +1,13 @@ -.. _MPI: http://www.mpi-forum.org - -.. _MPICH: http://www.mpich.org/ - -.. _Open MPI: http://www.open-mpi.org +.. _MPI: https://www.mpi-forum.org .. _PETSc: https://petsc.org -.. _SLEPc: http://slepc.upv.es - -.. _Python: http://www.python.org +.. _SLEPc: https://slepc.upv.es -.. _NumPy: http://www.numpy.org +.. _Python: https://www.python.org -.. _mpi4py: http://github.com/mpi4py/mpi4py +.. _NumPy: https://numpy.org -.. _petsc4py: http://gitlab.com/petsc/petsc4py +.. _mpi4py: https://github.com/mpi4py/mpi4py -.. _slepc4py: http://gitlab.com/slepc/slepc4py +.. _slepc4py: https://gitlab.com/slepc/slepc diff --git a/src/binding/petsc4py/docs/source/overview.rst b/src/binding/petsc4py/docs/source/overview.rst index 5c1d23e7fd8..f087e26aece 100644 --- a/src/binding/petsc4py/docs/source/overview.rst +++ b/src/binding/petsc4py/docs/source/overview.rst @@ -1,5 +1,5 @@ -Overview -======== +PETSc Overview +============== PETSc_ is a suite of data structures and routines for the scalable (parallel) solution of scientific applications modeled by @@ -29,7 +29,7 @@ communication and computation. D. May, L. Curfman McInnes, R. Mills, L. Mitchell, T. Munson, J. Roman, K. Rupp, P. Sanan, J Sarich, B. Smith, S. Zampini, H. Zhang, and H. Zhang, J. Zhang, - *PETSc/TAO Users Manual*, ANL-21/39 - Revision 3.20, 2023. + *PETSc/TAO Users Manual*, ANL-21/39 - Revision 3.21, 2024. http://dx.doi.org/10.2172/2205494, https://petsc.org/release/docs/manual/manual.pdf @@ -63,14 +63,13 @@ required for many parallel solutions of PDEs. structures. :Mat: A large suite of data structures and code for the manipulation - of parallel sparse matrices. Includes four different parallel + of parallel sparse matrices. Includes several different parallel matrix data structures, each appropriate for a different class of problems. :PC: A collection of sequential and parallel preconditioners, including (sequential) ILU(k), LU, and (both sequential and - parallel) block Jacobi, overlapping additive Schwarz methods - and (through BlockSolve95) ILU(0) and ICC(0). + parallel) block Jacobi, overlapping additive Schwarz methods. :KSP: Parallel implementations of many popular Krylov subspace iterative methods, including GMRES, CG, CGS, Bi-CG-Stab, two diff --git a/src/binding/petsc4py/docs/source/petsc_options.rst b/src/binding/petsc4py/docs/source/petsc_options.rst index 46a375b8f48..dab377e1fe1 100644 --- a/src/binding/petsc4py/docs/source/petsc_options.rst +++ b/src/binding/petsc4py/docs/source/petsc_options.rst @@ -1,5 +1,55 @@ .. _petsc_options: -Working with PETSc options (TODO) -================================= +Working with PETSc options +========================== +A very powerful feature of PETSc is that objects can be configured via command-line options. +In this way, one can choose the method to be used or set different parameters without changing the source code. +See the PETSc `manual ` for additional information. + +In order to use command-line options in a petsc4py program, it is important to initialize the module as follows: + +.. code-block:: python + + # We first import petsc4py and sys to initialize PETSc + import sys, petsc4py + petsc4py.init(sys.argv) + + # Import the PETSc module + from petsc4py import PETSc + +Then one can provide command-line options when running a script: + +.. code-block:: console + + $ python foo.py -ksp_type gmres -ksp_gmres_restart 100 -ksp_view + +When the above initialization method is not possible, PETSc options can be also specified via environment variables or configuration files, e.g.: + +.. code-block:: console + + $ PETSC_OPTIONS='-ksp_type gmres -ksp_gmres_restart 100 -ksp_view' python foo.py + +Command-line options can be read via an instance of the ``Options`` class. For instance: + +.. code-block:: python + + OptDB = PETSc.Options() + n = OptDB.getInt('n', 16) + eta = OptDB.getReal('eta', 0.014) + alpha = OptDB.getScalar('alpha', -12.3) + +In this way, if the script is run with + +.. code-block:: console + + $ python foo.py -n 50 -alpha 8.8 + +the options, ``n`` and ``alpha`` will get the values ``50`` and ``8.8``, respectively, while ``eta`` will be assigned the value specified as default, ``0.014``. + +The options database is accessible also as a Python dictionary, so that one can for instance override, insert or delete an option: + +.. code-block:: python + + OptDB['draw_pause'] = 1 + del OptDB['draw_pause'] diff --git a/src/binding/petsc4py/docs/source/petsc_python_types.rst b/src/binding/petsc4py/docs/source/petsc_python_types.rst index 5daab3f85a3..25f082c4cfb 100644 --- a/src/binding/petsc4py/docs/source/petsc_python_types.rst +++ b/src/binding/petsc4py/docs/source/petsc_python_types.rst @@ -7,12 +7,9 @@ Here we discuss details about Python-aware PETSc types that can be used within t In particular, we discuss matrices, preconditioners, Krylov solvers, nonlinear solvers and ODE integrators. -The low-level, Cython implementation exposing the Python methods is contained in +The low-level, Cython implementation exposing the Python methods is in `src/petsc4py/PETSc/libpetsc4py.pyx `_. -:file:`${PETSC_DIR}/src/binding/petsc4py/src/petsc4py/PETSc/libpetsc4py.pyx` - -The scripts can be found in -:file:`${PETSC_DIR}/src/binding/petsc4py/demo/python_types`. +The scripts used here can be found at `demo/python_types `_. .. _petsc_python_mat: diff --git a/src/binding/petsc4py/makefile b/src/binding/petsc4py/makefile index ee0b6188e53..058685bdfd8 100644 --- a/src/binding/petsc4py/makefile +++ b/src/binding/petsc4py/makefile @@ -30,7 +30,7 @@ srcclean: -${RM} src/${package}/${MODULE}_api.h .PHONY: clean distclean fullclean -clean: +clean: srcclean ${PYTHON} setup.py clean --all distclean: clean -${RM} -r build _configtest.* *.py[co] @@ -51,6 +51,29 @@ uninstall: # ---- +.PHONY: lint ruff-lint cython-lint +CYTHONLINT_SRC = src demo +CYTHONLINT_OPTS = +RUFF_SRC = . +RUFF_OPTS = check + +cython-lint: + conf/cythonize.sh -Wextra -Werror + cython-lint ${CYTHONLINT_OPTS} ${CYTHONLINT_SRC} + + +ruff-lint: + ruff ${RUFF_OPTS} ${RUFF_SRC} + +lint: + ${RM} -r petsc4py-lint-env + ${PYTHON} -m venv petsc4py-lint-env + . petsc4py-lint-env/bin/activate && python -m pip install -r conf/requirements-lint.txt + . petsc4py-lint-env/bin/activate && ${MAKE} cython-lint + . petsc4py-lint-env/bin/activate && ${MAKE} ruff-lint + +# ---- + .PHONY: docs docs-html docs-pdf docs-misc docs: docs-html docs-pdf docs-misc docs-html: rst2html sphinx-html @@ -106,7 +129,8 @@ docsclean: website: ${RM} -r petsc-doc-env ${PYTHON} -m venv petsc-doc-env - . petsc-doc-env/bin/activate && env CFLAGS=-O0 python -m pip install .[doc] + . petsc-doc-env/bin/activate && env CFLAGS=-O0 python -m pip install . + . petsc-doc-env/bin/activate && python -m pip install -r conf/requirements-docs.txt . petsc-doc-env/bin/activate && ${MAKE} sphinx-html SPHINXOPTS=-Wj2 mkdir -p ${LOC}/petsc4py mv docs/html/* ${LOC}/petsc4py diff --git a/src/binding/petsc4py/pyproject.toml b/src/binding/petsc4py/pyproject.toml new file mode 100644 index 00000000000..6987c0389ed --- /dev/null +++ b/src/binding/petsc4py/pyproject.toml @@ -0,0 +1,7 @@ +[build-system] +requires = [ + "cython >= 3", + "numpy", + "setuptools", +] +build-backend = "setuptools.build_meta" diff --git a/src/binding/petsc4py/setup.cfg b/src/binding/petsc4py/setup.cfg index 38706d385ca..b16be70b9da 100644 --- a/src/binding/petsc4py/setup.cfg +++ b/src/binding/petsc4py/setup.cfg @@ -10,11 +10,3 @@ force_manifest = 1 [nosetests] where = test - -[options.extras_require] -doc = - pydata-sphinx-theme==0.15.1 - sphinx>=7.0.0 - sphobjinv - typing_extensions;python_version<'3.11' - pylit diff --git a/src/binding/petsc4py/setup.py b/src/binding/petsc4py/setup.py index 9eb1270a0ee..f27a1ba934d 100755 --- a/src/binding/petsc4py/setup.py +++ b/src/binding/petsc4py/setup.py @@ -16,10 +16,9 @@ pyver = sys.version_info[:2] if pyver < (2, 6) or (3, 0) <= pyver < (3, 2): - raise RuntimeError("Python version 2.6, 2.7 or >= 3.2 required") + raise RuntimeError('Python version 2.6, 2.7 or >= 3.2 required') if pyver == (2, 6) or pyver == (3, 2): - sys.stderr.write( - "WARNING: Python %d.%d is not supported.\n" % pyver) + sys.stderr.write('WARNING: Python %d.%d is not supported.\n' % pyver) PNAME = 'PETSc' EMAIL = 'petsc-maint@mcs.anl.gov' @@ -29,16 +28,19 @@ # Metadata # -------------------------------------------------------------------- + def F(string): return string.format( Name=PNAME, name=PNAME.lower(), - pyname=PNAME.lower()+'4py', + pyname=PNAME.lower() + '4py', ) + def get_name(): return F('{pyname}') + def get_version(): try: return get_version.result @@ -51,17 +53,20 @@ def get_version(): get_version.result = version return version + def description(): return F('{Name} for Python') + def long_description(): with open(os.path.join(topdir, 'DESCRIPTION.rst')) as f: return f.read() -url = F('https://gitlab.com/{name}/{name}') + +url = F('https://gitlab.com/{name}/{name}') pypiroot = F('https://pypi.io/packages/source') pypislug = F('{pyname}')[0] + F('/{pyname}') -tarball = F('{pyname}-%s.tar.gz' % get_version()) +tarball = F('{pyname}-%s.tar.gz' % get_version()) download = '/'.join([pypiroot, pypislug, tarball]) classifiers = """ @@ -95,24 +100,26 @@ def long_description(): """.strip().split('\n') metadata = { - 'name' : get_name(), - 'version' : get_version(), - 'description' : description(), - 'long_description' : long_description(), - 'url' : url, - 'download_url' : download, - 'classifiers' : classifiers, - 'keywords' : keywords + PLIST, - 'license' : 'BSD-2-Clause', - 'platforms' : platforms, - 'author' : 'Lisandro Dalcin', - 'author_email' : 'dalcinl@gmail.com', - 'maintainer' : F('{Name} Team'), - 'maintainer_email' : EMAIL, + 'name': get_name(), + 'version': get_version(), + 'description': description(), + 'long_description': long_description(), + 'url': url, + 'download_url': download, + 'classifiers': classifiers, + 'keywords': keywords + PLIST, + 'license': 'BSD-2-Clause', + 'platforms': platforms, + 'author': 'Lisandro Dalcin', + 'author_email': 'dalcinl@gmail.com', + 'maintainer': F('{Name} Team'), + 'maintainer_email': EMAIL, } -metadata.update({ - 'requires': ['numpy'], -}) +metadata.update( + { + 'requires': ['numpy'], + } +) metadata_extra = { 'long_description_content_type': 'text/x-rst', @@ -122,33 +129,36 @@ def long_description(): # Extension modules # -------------------------------------------------------------------- + def sources(): - src = dict( - source=F('{pyname}/{Name}.pyx'), - depends=[ + src = { + 'source': F('{pyname}/{Name}.pyx'), + 'depends': [ F('{pyname}/*.pyx'), F('{pyname}/*.pxd'), F('{pyname}/{Name}/*.pyx'), F('{pyname}/{Name}/*.pxd'), F('{pyname}/{Name}/*.pxi'), ], - workdir='src', - ) + 'workdir': 'src', + } return [src] + def extensions(): from os import walk from glob import glob from os.path import join + # depends = [] glob_join = lambda *args: glob(join(*args)) - for pth, dirs, files in walk('src'): + for pth, _, _ in walk('src'): depends += glob_join(pth, '*.h') depends += glob_join(pth, '*.c') for pkg in map(str.lower, reversed(PLIST)): - if (pkg.upper()+'_DIR') in os.environ: - pd = os.environ[pkg.upper()+'_DIR'] + if (pkg.upper() + '_DIR') in os.environ: + pd = os.environ[pkg.upper() + '_DIR'] pa = os.environ.get('PETSC_ARCH', '') depends += glob_join(pd, 'include', '*.h') depends += glob_join(pd, 'include', pkg, 'private', '*.h') @@ -161,6 +171,7 @@ def extensions(): else: try: import numpy + numpy_includes = [numpy.get_include()] except ImportError: numpy_includes = [] @@ -168,51 +179,57 @@ def extensions(): if F('{pyname}') != 'petsc4py': try: import petsc4py + petsc4py_includes = [petsc4py.get_include()] except ImportError: petsc4py_includes = [] include_dirs.extend(petsc4py_includes) # - ext = dict( - name=F('{pyname}.lib.{Name}'), - sources=[F('src/{pyname}/{Name}.c')], - depends=depends, - include_dirs=[ + ext = { + 'name': F('{pyname}.lib.{Name}'), + 'sources': [F('src/{pyname}/{Name}.c')], + 'depends': depends, + 'include_dirs': [ 'src', F('src/{pyname}/include'), - ] + include_dirs, - define_macros=[ + ] + + include_dirs, + 'define_macros': [ ('MPICH_SKIP_MPICXX', 1), ('OMPI_SKIP_MPICXX', 1), ('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION'), ], - ) + } return [ext] + # -------------------------------------------------------------------- # Setup # -------------------------------------------------------------------- + def get_release(): suffix = os.path.join('src', 'binding', F('{pyname}')) if not topdir.endswith(os.path.join(os.path.sep, suffix)): return True release = 1 - rootdir = os.path.abspath(os.path.join(topdir, *[os.path.pardir]*3)) + rootdir = os.path.abspath(os.path.join(topdir, *[os.path.pardir] * 3)) version_h = os.path.join(rootdir, 'include', F('{name}version.h')) release_macro = '%s_VERSION_RELEASE' % F('{name}').upper() - version_re = re.compile(r"#define\s+%s\s+([-]*\d+)" % release_macro) + version_re = re.compile(r'#define\s+%s\s+([-]*\d+)' % release_macro) if os.path.exists(version_h) and os.path.isfile(version_h): with open(version_h, 'r') as f: release = int(version_re.search(f.read()).groups()[0]) return bool(release) + def requires(pkgname, major, minor, release=True): minor = minor + int(not release) devel = '' if release else '.dev0' - vmin = "%s.%s%s" % (major, minor, devel) - vmax = "%s.%s" % (major, minor + 1) - return "%s>=%s,<%s" % (pkgname, vmin, vmax) + vmin = f'{major}.{minor}{devel}' + vmax = f'{major}.{minor+1}' + return f'{pkgname}>={vmin},<{vmax}' + def run_setup(): setup_args = metadata.copy() @@ -220,17 +237,27 @@ def run_setup(): x, y = tuple(map(int, vstr)) release = get_release() if not release: - setup_args['version'] = "%d.%d.0.dev0" %(x, y+1) + setup_args['version'] = '%d.%d.0.dev0' % (x, y + 1) if setuptools: setup_args['zip_safe'] = False - setup_args['install_requires'] = ['numpy'] + try: + import numpy + + major = int(numpy.__version__.partition('.')[0]) + numpy_pin = 'numpy>=%d,<%d' % (major, major + 1) + except ImportError: + numpy_pin = 'numpy' + setup_args['setup_requires'] = ['numpy'] + setup_args['install_requires'] = [numpy_pin] for pkg in map(str.lower, PLIST): PKG_DIR = os.environ.get(pkg.upper() + '_DIR') if not (PKG_DIR and os.path.isdir(PKG_DIR)): package = requires(pkg, x, y, release) + setup_args['setup_requires'] += [package] setup_args['install_requires'] += [package] if F('{pyname}') != 'petsc4py': package = requires('petsc4py', x, y, release) + setup_args['setup_requires'] += [package] setup_args['install_requires'] += [package] setup_args.update(metadata_extra) # @@ -240,7 +267,7 @@ def run_setup(): F('{pyname}'), F('{pyname}.lib'), ], - package_dir={'' : 'src'}, + package_dir={'': 'src'}, package_data={ F('{pyname}'): [ F('{Name}.pxd'), @@ -255,20 +282,19 @@ def run_setup(): F('{name}.cfg'), ], }, - cython_sources=[ - src for src in sources() - ], - ext_modules=[ - conf.Extension(**ext) for ext in extensions() - ], - **setup_args + cython_sources=[src for src in sources()], # noqa: C416 + ext_modules=[conf.Extension(**ext) for ext in extensions()], + **setup_args, ) + # -------------------------------------------------------------------- + def main(): run_setup() + if __name__ == '__main__': main() diff --git a/src/binding/petsc4py/src/lib-petsc/custom.h b/src/binding/petsc4py/src/lib-petsc/custom.h index 6665818714a..ba81ddd38fa 100644 --- a/src/binding/petsc4py/src/lib-petsc/custom.h +++ b/src/binding/petsc4py/src/lib-petsc/custom.h @@ -10,6 +10,7 @@ #include #include #include +#include /* ---------------------------------------------------------------- */ @@ -568,6 +569,15 @@ PetscErrorCode SNESSetUseFDColoring(SNES snes,PetscBool flag) PetscFunctionReturn(PETSC_SUCCESS); } +static +PetscErrorCode SNESComputeUpdate(SNES snes) +{ + PetscFunctionBegin; + PetscValidHeaderSpecific(snes,SNES_CLASSID,1); + PetscTryTypeMethod(snes, update, snes->iter); + PetscFunctionReturn(PETSC_SUCCESS); +} + /* ---------------------------------------------------------------- */ static @@ -642,11 +652,14 @@ PetscErrorCode TaoHasHessianRoutine(Tao tao, PetscBool* flg) #endif static -PetscErrorCode TaoComputeUpdate(Tao tao) +PetscErrorCode TaoComputeUpdate(Tao tao, PetscReal *f) { PetscFunctionBegin; PetscValidHeaderSpecific(tao,TAO_CLASSID,1); - PetscTryTypeMethod(tao,update,tao->niter,tao->user_update); + if (tao->ops->update) { + PetscUseTypeMethod(tao,update,tao->niter,tao->user_update); + PetscCall(TaoComputeObjective(tao,tao->solution,f)); + } PetscFunctionReturn(PETSC_SUCCESS); } @@ -711,6 +724,14 @@ PetscErrorCode DMDACreateND(MPI_Comm comm, PetscFunctionReturn(PETSC_SUCCESS); } +static +PetscErrorCode PetscDeviceReference(PetscDevice device) +{ + PetscFunctionBegin; + PetscCall(PetscDeviceReference_Internal(device)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /* ---------------------------------------------------------------- */ #endif/* PETSC4PY_CUSTOM_H*/ diff --git a/src/binding/petsc4py/src/petsc4py/PETSc.pxd b/src/binding/petsc4py/src/petsc4py/PETSc.pxd index 6b8df2a8181..a1ff2ba9193 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc.pxd +++ b/src/binding/petsc4py/src/petsc4py/PETSc.pxd @@ -98,7 +98,7 @@ cdef extern from "": struct _p_DM ctypedef _p_DM* PetscDM "DM" - struct _p_DMPlexTransform + struct _p_DMPlexTransform ctypedef _p_DMPlexTransform* PetscDMPlexTransform "DMPlexTransform" struct _p_PetscDS @@ -316,17 +316,17 @@ cdef type PyPetscType_Lookup(int) # -------------------------------------------------------------------- cdef extern from * nogil: - ctypedef enum PetscErrorCode: - PETSC_SUCCESS - PETSC_ERR_PLIB - PETSC_ERR_SUP - PETSC_ERR_USER - PETSC_ERR_MEM - PETSC_ERR_MPI - PETSC_ERR_PYTHON - - ctypedef enum PetscErrorType: - PETSC_ERROR_INITIAL - PETSC_ERROR_REPEAT + ctypedef enum PetscErrorCode: + PETSC_SUCCESS + PETSC_ERR_PLIB + PETSC_ERR_SUP + PETSC_ERR_USER + PETSC_ERR_MEM + PETSC_ERR_MPI + PETSC_ERR_PYTHON + + ctypedef enum PetscErrorType: + PETSC_ERROR_INITIAL + PETSC_ERROR_REPEAT cdef PetscErrorCode CHKERR(PetscErrorCode) except PETSC_ERR_PYTHON nogil diff --git a/src/binding/petsc4py/src/petsc4py/PETSc.py b/src/binding/petsc4py/src/petsc4py/PETSc.py index 1118e84d867..cd319650a59 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc.py +++ b/src/binding/petsc4py/src/petsc4py/PETSc.py @@ -1,5 +1,6 @@ ARCH = None from petsc4py.lib import ImportPETSc # noqa: E402 + PETSc = ImportPETSc(ARCH) PETSc._initialize() del PETSc diff --git a/src/binding/petsc4py/src/petsc4py/PETSc.pyx b/src/binding/petsc4py/src/petsc4py/PETSc.pyx index 7dbd471db8d..a056c649d38 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc.pyx @@ -1,17 +1,18 @@ -#cython: language_level=3str -#cython: embedsignature=True -#cython: embedsignature.format=python -#cython: annotation_typing=False -#cython: cdivision=True -#cython: auto_pickle=False -#cython: always_allow_keywords=True -#cython: allow_none_for_extension_args=False -#cython: autotestdict=False -#cython: warn.multiple_declarators=False -#cython: optimize.use_switch=False -#cython: binding=False +# cython: language_level=3str +# cython: embedsignature=True +# cython: embedsignature.format=python +# cython: annotation_typing=False +# cython: cdivision=True +# cython: auto_pickle=False +# cython: always_allow_keywords=True +# cython: allow_none_for_extension_args=False +# cython: autotestdict=False +# cython: warn.multiple_declarators=False +# cython: optimize.use_switch=False +# cython: binding=False -#from __future__ import annotations -cimport cython +# from __future__ import annotations + +cimport cython # no-cython-lint include "PETSc/PETSc.pyx" diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/AO.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/AO.pyx index c9ee58e6930..02537f2c575 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/AO.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/AO.pyx @@ -1,6 +1,7 @@ # -------------------------------------------------------------------- class AOType(object): + """The application ordering types.""" BASIC = S_(AOBASIC) ADVANCED = S_(AOADVANCED) MAPPING = S_(AOMAPPING) @@ -8,6 +9,7 @@ class AOType(object): # -------------------------------------------------------------------- + cdef class AO(Object): """Application ordering object.""" Type = AOType @@ -33,7 +35,7 @@ cdef class AO(Object): """ cdef PetscViewer cviewer = NULL if viewer is not None: cviewer = viewer.vwr - CHKERR( AOView(self.ao, cviewer) ) + CHKERR(AOView(self.ao, cviewer)) def destroy(self) -> Self: """Destroy the application ordering. @@ -45,15 +47,14 @@ cdef class AO(Object): petsc.AODestroy """ - CHKERR( AODestroy(&self.ao) ) + CHKERR(AODestroy(&self.ao)) return self def createBasic( self, app: Sequence[int] | IS, petsc: Sequence[int] | IS | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Return a basic application ordering using two orderings. Collective. @@ -88,22 +89,21 @@ cdef class AO(Object): isapp = (app).iset if petsc is not None: ispetsc = (petsc).iset - CHKERR( AOCreateBasicIS(isapp, ispetsc, &newao) ) + CHKERR(AOCreateBasicIS(isapp, ispetsc, &newao)) else: app = iarray_i(app, &napp, &idxapp) if petsc is not None: petsc = iarray_i(petsc, &npetsc, &idxpetsc) assert napp == npetsc, "incompatible array sizes" - CHKERR( AOCreateBasic(ccomm, napp, idxapp, idxpetsc, &newao) ) - CHKERR( PetscCLEAR(self.obj) ); self.ao = newao + CHKERR(AOCreateBasic(ccomm, napp, idxapp, idxpetsc, &newao)) + CHKERR(PetscCLEAR(self.obj)); self.ao = newao return self def createMemoryScalable( self, app: Sequence[int] | IS, petsc: Sequence[int] | IS | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Return a memory scalable application ordering using two orderings. Collective. @@ -141,22 +141,21 @@ cdef class AO(Object): isapp = (app).iset if petsc is not None: ispetsc = (petsc).iset - CHKERR( AOCreateMemoryScalableIS(isapp, ispetsc, &newao) ) + CHKERR(AOCreateMemoryScalableIS(isapp, ispetsc, &newao)) else: app = iarray_i(app, &napp, &idxapp) if petsc is not None: petsc = iarray_i(petsc, &npetsc, &idxpetsc) assert napp == npetsc, "incompatible array sizes" - CHKERR( AOCreateMemoryScalable(ccomm, napp, idxapp, idxpetsc, &newao) ) - CHKERR( PetscCLEAR(self.obj) ); self.ao = newao + CHKERR(AOCreateMemoryScalable(ccomm, napp, idxapp, idxpetsc, &newao)) + CHKERR(PetscCLEAR(self.obj)); self.ao = newao return self def createMapping( self, app: Sequence[int] | IS, petsc: Sequence[int] | IS | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Return an application mapping using two orderings. Collective. @@ -188,14 +187,14 @@ cdef class AO(Object): isapp = (app).iset if petsc is not None: ispetsc = (petsc).iset - CHKERR( AOCreateMappingIS(isapp, ispetsc, &newao) ) + CHKERR(AOCreateMappingIS(isapp, ispetsc, &newao)) else: app = iarray_i(app, &napp, &idxapp) if petsc is not None: petsc = iarray_i(petsc, &npetsc, &idxpetsc) assert napp == npetsc, "incompatible array sizes" - CHKERR( AOCreateMapping(ccomm, napp, idxapp, idxpetsc, &newao) ) - CHKERR( PetscCLEAR(self.obj) ); self.ao = newao + CHKERR(AOCreateMapping(ccomm, napp, idxapp, idxpetsc, &newao)) + CHKERR(PetscCLEAR(self.obj)); self.ao = newao return self def getType(self) -> str: @@ -209,7 +208,7 @@ cdef class AO(Object): """ cdef PetscAOType cval = NULL - CHKERR( AOGetType(self.ao, &cval) ) + CHKERR(AOGetType(self.ao, &cval)) return bytes2str(cval) def app2petsc(self, indices: Sequence[int] | IS) -> Sequence[int] | IS: @@ -240,10 +239,10 @@ cdef class AO(Object): cdef PetscInt nidx = 0, *idx = NULL if isinstance(indices, IS): iset = (indices).iset - CHKERR( AOApplicationToPetscIS(self.ao, iset) ) + CHKERR(AOApplicationToPetscIS(self.ao, iset)) else: indices = oarray_i(indices, &nidx, &idx) - CHKERR( AOApplicationToPetsc(self.ao, nidx, idx) ) + CHKERR(AOApplicationToPetsc(self.ao, nidx, idx)) return indices def petsc2app(self, indices: Sequence[int] | IS) -> Sequence[int] | IS: @@ -274,10 +273,10 @@ cdef class AO(Object): cdef PetscInt nidx = 0, *idx = NULL if isinstance(indices, IS): iset = (indices).iset - CHKERR( AOPetscToApplicationIS(self.ao, iset) ) + CHKERR(AOPetscToApplicationIS(self.ao, iset)) else: indices = oarray_i(indices, &nidx, &idx) - CHKERR( AOPetscToApplication(self.ao, nidx, idx) ) + CHKERR(AOPetscToApplication(self.ao, nidx, idx)) return indices # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/CAPI.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/CAPI.pyx index 174d046c9d0..7b86ceeb4d9 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/CAPI.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/CAPI.pyx @@ -1,13 +1,13 @@ -#--------------------------------------------------------------------- +# --------------------------------------------------------------------- cdef inline int setref(void *d, void *s) except -1: - cdef PetscObject *dest = d - cdef PetscObject source = s - CHKERR( PetscINCREF(&source) ) + cdef PetscObject *dest = d + cdef PetscObject source = s + CHKERR(PetscINCREF(&source)) dest[0] = source return 0 -#--------------------------------------------------------------------- +# --------------------------------------------------------------------- # -- Error -- @@ -82,6 +82,7 @@ cdef api PetscRandom PyPetscRandom_Get(object arg) except ? NULL: cdef api Device PyPetscDevice_New(PetscDevice arg): cdef Device ret = Device() + CHKERR(PetscDeviceReference(arg)) ret.device = arg return ret @@ -401,5 +402,4 @@ cdef api PetscDualSpace PyPetscDualSpace_Get(object arg) except ? NULL: retv = ob.dualspace return retv - -#--------------------------------------------------------------------- +# --------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Comm.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Comm.pyx index 5e2306ab629..089f7336123 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Comm.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Comm.pyx @@ -30,7 +30,7 @@ cdef class Comm: def __dealloc__(self): if self.isdup: - CHKERR( PetscCommDEALLOC(&self.comm) ) + CHKERR(PetscCommDEALLOC(&self.comm)) self.comm = MPI_COMM_NULL self.isdup = 0 self.base = None @@ -46,7 +46,7 @@ cdef class Comm: cdef MPI_Comm comm2 = o.comm cdef int flag = 0 if comm1 != MPI_COMM_NULL and comm2 != MPI_COMM_NULL: - CHKERR( MPI_Comm_compare(comm1, comm2, &flag) ) + CHKERR(MPI_Comm_compare(comm1, comm2, &flag)) if eq: return (flag==MPI_IDENT or flag==MPI_CONGRUENT) else: return (flag!=MPI_IDENT and flag!=MPI_CONGRUENT) else: @@ -71,7 +71,7 @@ cdef class Comm: if self.comm == MPI_COMM_NULL: return if not self.isdup: raise ValueError("communicator not owned") - CHKERR( PetscCommDestroy(&self.comm) ) + CHKERR(PetscCommDestroy(&self.comm)) self.comm = MPI_COMM_NULL self.isdup = 0 self.base = None @@ -89,7 +89,7 @@ cdef class Comm: if self.comm == MPI_COMM_NULL: raise ValueError("null communicator") cdef MPI_Comm newcomm = MPI_COMM_NULL - CHKERR( PetscCommDuplicate(self.comm, &newcomm, NULL) ) + CHKERR(PetscCommDuplicate(self.comm, &newcomm, NULL)) cdef Comm comm = type(self)() comm.comm = newcomm comm.isdup = 1 @@ -105,7 +105,7 @@ cdef class Comm: if self.comm == MPI_COMM_NULL: raise ValueError("null communicator") cdef int size=0 - CHKERRMPI( MPI_Comm_size(self.comm, &size) ) + CHKERRMPI(MPI_Comm_size(self.comm, &size)) return size def getRank(self) -> int: @@ -117,7 +117,7 @@ cdef class Comm: if self.comm == MPI_COMM_NULL: raise ValueError("null communicator") cdef int rank=0 - CHKERRMPI( MPI_Comm_rank(self.comm, &rank) ) + CHKERRMPI(MPI_Comm_rank(self.comm, &rank)) return rank def barrier(self) -> None: @@ -128,7 +128,7 @@ cdef class Comm: """ if self.comm == MPI_COMM_NULL: raise ValueError("null communicator") - CHKERRMPI( MPI_Barrier(self.comm) ) + CHKERRMPI(MPI_Barrier(self.comm)) # --- properties --- @@ -191,9 +191,9 @@ cdef MPI_Comm PETSC_COMM_DEFAULT = MPI_COMM_NULL cdef MPI_Comm GetComm( object comm, MPI_Comm defv, ) except? MPI_COMM_NULL: - return def_Comm(comm, defv) + return def_Comm(comm, defv) cdef MPI_Comm GetCommDefault(): - return PETSC_COMM_DEFAULT + return PETSC_COMM_DEFAULT # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Const.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Const.pyx index ae84bd9abfc..24e54dcb228 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Const.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Const.pyx @@ -34,6 +34,7 @@ More constants: # ------------------------------------------------------------------------------ + class InsertMode(object): """Insertion mode. @@ -71,6 +72,7 @@ class InsertMode(object): # ------------------------------------------------------------------------------ + class ScatterMode(object): """Scatter mode. @@ -100,6 +102,7 @@ class ScatterMode(object): # ------------------------------------------------------------------------------ + class NormType(object): """Norm type. diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DM.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DM.pyx index 325401619be..0f5b55bb413 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DM.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DM.pyx @@ -18,6 +18,7 @@ class DMType(object): PRODUCT = S_(DMPRODUCT) STAG = S_(DMSTAG) + class DMBoundaryType(object): """`DM` Boundary types.""" NONE = DM_BOUNDARY_NONE @@ -26,7 +27,9 @@ class DMBoundaryType(object): PERIODIC = DM_BOUNDARY_PERIODIC TWIST = DM_BOUNDARY_TWIST + class DMPolytopeType(object): + """The `DM` cell types.""" POINT = DM_POLYTOPE_POINT SEGMENT = DM_POLYTOPE_SEGMENT POINT_PRISM_TENSOR = DM_POLYTOPE_POINT_PRISM_TENSOR @@ -45,24 +48,24 @@ class DMPolytopeType(object): UNKNOWN_CELL = DM_POLYTOPE_UNKNOWN_CELL UNKNOWN_FACE = DM_POLYTOPE_UNKNOWN_FACE + class DMReorderDefaultFlag(object): + """The `DM` reordering default flags.""" NOTSET = DM_REORDER_DEFAULT_NOTSET FALSE = DM_REORDER_DEFAULT_FALSE TRUE = DM_REORDER_DEFAULT_TRUE # -------------------------------------------------------------------- -cdef class DM(Object): - """An object describing a computational grid or mesh. - """ +cdef class DM(Object): + """An object describing a computational grid or mesh.""" Type = DMType BoundaryType = DMBoundaryType PolytopeType = DMPolytopeType ReorderDefaultFlag = DMReorderDefaultFlag - """Flag indicating whether `DM` is reordered by default.""" # @@ -87,7 +90,7 @@ cdef class DM(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( DMView(self.dm, vwr) ) + CHKERR(DMView(self.dm, vwr)) def load(self, Viewer viewer) -> Self: """Return a `DM` stored in binary. @@ -112,7 +115,7 @@ cdef class DM(Object): DM.view, DM.load, Object.setName, petsc.DMLoad """ - CHKERR( DMLoad(self.dm, viewer.vwr) ) + CHKERR(DMLoad(self.dm, viewer.vwr)) return self def destroy(self) -> Self: @@ -125,7 +128,7 @@ cdef class DM(Object): petsc.DMDestroy """ - CHKERR( DMDestroy(&self.dm) ) + CHKERR(DMDestroy(&self.dm)) return self def create(self, comm: Comm | None = None) -> Self: @@ -145,8 +148,8 @@ cdef class DM(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDM newdm = NULL - CHKERR( DMCreate(ccomm, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMCreate(ccomm, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def clone(self) -> DM: @@ -160,7 +163,7 @@ cdef class DM(Object): """ cdef DM dm = type(self)() - CHKERR( DMClone(self.dm, &dm.dm) ) + CHKERR(DMClone(self.dm, &dm.dm)) return dm def setType(self, dm_type: DM.Type | str) -> None: @@ -184,7 +187,7 @@ cdef class DM(Object): """ cdef PetscDMType cval = NULL dm_type = str2bytes(dm_type, &cval) - CHKERR( DMSetType(self.dm, cval) ) + CHKERR(DMSetType(self.dm, cval)) def getType(self) -> str: """Return the `DM` type name. @@ -197,7 +200,7 @@ cdef class DM(Object): """ cdef PetscDMType cval = NULL - CHKERR( DMGetType(self.dm, &cval) ) + CHKERR(DMGetType(self.dm, &cval)) return bytes2str(cval) def getDimension(self) -> int: @@ -211,7 +214,7 @@ cdef class DM(Object): """ cdef PetscInt dim = 0 - CHKERR( DMGetDimension(self.dm, &dim) ) + CHKERR(DMGetDimension(self.dm, &dim)) return toInt(dim) def setDimension(self, dim: int) -> None: @@ -230,7 +233,7 @@ cdef class DM(Object): """ cdef PetscInt cdim = asInt(dim) - CHKERR( DMSetDimension(self.dm, cdim) ) + CHKERR(DMSetDimension(self.dm, cdim)) def getCoordinateDim(self) -> int: """Return the dimension of embedding space for coordinates values. @@ -243,7 +246,7 @@ cdef class DM(Object): """ cdef PetscInt dim = 0 - CHKERR( DMGetCoordinateDim(self.dm, &dim) ) + CHKERR(DMGetCoordinateDim(self.dm, &dim)) return toInt(dim) def setCoordinateDim(self, dim: int) -> None: @@ -262,9 +265,9 @@ cdef class DM(Object): """ cdef PetscInt cdim = asInt(dim) - CHKERR( DMSetCoordinateDim(self.dm, cdim) ) + CHKERR(DMSetCoordinateDim(self.dm, cdim)) - def setOptionsPrefix(self, prefix: str) -> None: + def setOptionsPrefix(self, prefix: str | None) -> None: """Set the prefix used for searching for options in the database. Logically collective. @@ -276,7 +279,7 @@ cdef class DM(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( DMSetOptionsPrefix(self.dm, cval) ) + CHKERR(DMSetOptionsPrefix(self.dm, cval)) def getOptionsPrefix(self) -> str: """Return the prefix used for searching for options in the database. @@ -289,10 +292,10 @@ cdef class DM(Object): """ cdef const char *cval = NULL - CHKERR( DMGetOptionsPrefix(self.dm, &cval) ) + CHKERR(DMGetOptionsPrefix(self.dm, &cval)) return bytes2str(cval) - def appendOptionsPrefix(self, prefix: str) -> None: + def appendOptionsPrefix(self, prefix: str | None) -> None: """Append to the prefix used for searching for options in the database. Logically collective. @@ -304,7 +307,7 @@ cdef class DM(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( DMAppendOptionsPrefix(self.dm, cval) ) + CHKERR(DMAppendOptionsPrefix(self.dm, cval)) def setFromOptions(self) -> None: """Configure the object from the options database. @@ -316,30 +319,7 @@ cdef class DM(Object): petsc_options, petsc.DMSetFromOptions """ - CHKERR( DMSetFromOptions(self.dm) ) - - def viewFromOptions(self, name: str, Object obj=None) -> None: - """View a `DM` based in the options. - - Collective. - - Parameters - ---------- - name - Name used to activate the viewing. - obj - Object provides the prefix for the options database. - - See Also - -------- - petsc.DMViewFromOptions - - """ - cdef const char *cname = NULL - _ = str2bytes(name, &cname) - cdef PetscObject cobj = NULL - if obj is not None: cobj = obj.obj[0] - CHKERR( DMViewFromOptions(self.dm, cobj, cname) ) + CHKERR(DMSetFromOptions(self.dm)) def setUp(self) -> Self: """Return the data structure. @@ -351,15 +331,17 @@ cdef class DM(Object): petsc.DMSetUp """ - CHKERR( DMSetUp(self.dm) ) + CHKERR(DMSetUp(self.dm)) return self # --- application context --- - def setAppCtx(self, appctx): + def setAppCtx(self, appctx: Any) -> None: + """Set the application context.""" self.set_attr('__appctx__', appctx) - def getAppCtx(self): + def getAppCtx(self) -> Any: + """Return the application context.""" return self.get_attr('__appctx__') # @@ -383,7 +365,7 @@ cdef class DM(Object): """ cdef PetscBool uC = useCone cdef PetscBool uCl = useClosure - CHKERR( DMSetBasicAdjacency(self.dm, uC, uCl) ) + CHKERR(DMSetBasicAdjacency(self.dm, uC, uCl)) def getBasicAdjacency(self) -> tuple[bool, bool]: """Return the flags for determining variable influence. @@ -404,7 +386,7 @@ cdef class DM(Object): """ cdef PetscBool uC = PETSC_FALSE cdef PetscBool uCl = PETSC_FALSE - CHKERR( DMGetBasicAdjacency(self.dm, &uC, &uCl) ) + CHKERR(DMGetBasicAdjacency(self.dm, &uC, &uCl)) return toBool(uC), toBool(uCl) def setFieldAdjacency(self, field: int, useCone: bool, useClosure: bool) -> None: @@ -429,7 +411,7 @@ cdef class DM(Object): cdef PetscInt f = asInt(field) cdef PetscBool uC = useCone cdef PetscBool uCl = useClosure - CHKERR( DMSetAdjacency(self.dm, f, uC, uCl) ) + CHKERR(DMSetAdjacency(self.dm, f, uC, uCl)) def getFieldAdjacency(self, field: int) -> tuple[bool, bool]: """Return the flags for determining variable influence. @@ -456,7 +438,7 @@ cdef class DM(Object): cdef PetscInt f = asInt(field) cdef PetscBool uC = PETSC_FALSE cdef PetscBool uCl = PETSC_FALSE - CHKERR( DMGetAdjacency(self.dm, f, &uC, &uCl) ) + CHKERR(DMGetAdjacency(self.dm, f, &uC, &uCl)) return toBool(uC), toBool(uCl) # @@ -483,7 +465,7 @@ cdef class DM(Object): cdef PetscInt *ifields = NULL cdef PetscInt numFields = 0 fields = iarray_i(fields, &numFields, &ifields) - CHKERR( DMCreateSubDM( self.dm, numFields, ifields, &iset.iset, &subdm.dm) ) + CHKERR(DMCreateSubDM(self.dm, numFields, ifields, &iset.iset, &subdm.dm)) return iset, subdm # @@ -515,8 +497,8 @@ cdef class DM(Object): cdef PetscDMLabel clbl = NULL label = str2bytes(label, &cval) if cval == NULL: cval = b"" # XXX Should be fixed upstream - CHKERR( DMGetLabel(self.dm, cval, &clbl) ) - CHKERR( DMSetAuxiliaryVec(self.dm, clbl, cvalue, cpart, aux.vec) ) + CHKERR(DMGetLabel(self.dm, cval, &clbl)) + CHKERR(DMSetAuxiliaryVec(self.dm, clbl, cvalue, cpart, aux.vec)) def getAuxiliaryVec(self, label: str | None = None, value: int | None = 0, part: int | None = 0) -> Vec: """Return an auxiliary vector for region. @@ -544,8 +526,8 @@ cdef class DM(Object): cdef Vec aux = Vec() label = str2bytes(label, &cval) if cval == NULL: cval = b"" # XXX Should be fixed upstream - CHKERR( DMGetLabel(self.dm, cval, &clbl) ) - CHKERR( DMGetAuxiliaryVec(self.dm, clbl, cvalue, cpart, &aux.vec) ) + CHKERR(DMGetLabel(self.dm, cval, &clbl)) + CHKERR(DMGetAuxiliaryVec(self.dm, clbl, cvalue, cpart, &aux.vec)) return aux def setNumFields(self, numFields: int) -> None: @@ -559,7 +541,7 @@ cdef class DM(Object): """ cdef PetscInt cnum = asInt(numFields) - CHKERR( DMSetNumFields(self.dm, cnum) ) + CHKERR(DMSetNumFields(self.dm, cnum)) def getNumFields(self) -> int: """Return the number of fields in the `DM`. @@ -572,7 +554,7 @@ cdef class DM(Object): """ cdef PetscInt cnum = 0 - CHKERR( DMGetNumFields(self.dm, &cnum) ) + CHKERR(DMGetNumFields(self.dm, &cnum)) return toInt(cnum) def setField(self, index: int, Object field, label: str | None = None) -> None: @@ -599,7 +581,7 @@ cdef class DM(Object): cdef PetscObject cobj = field.obj[0] cdef PetscDMLabel clbl = NULL assert label is None - CHKERR( DMSetField(self.dm, cidx, clbl, cobj) ) + CHKERR(DMSetField(self.dm, cidx, clbl, cobj)) def getField(self, index: int) -> tuple[Object, None]: """Return the discretization object for a given `DM` field. @@ -619,11 +601,11 @@ cdef class DM(Object): cdef PetscInt cidx = asInt(index) cdef PetscObject cobj = NULL cdef PetscDMLabel clbl = NULL - CHKERR( DMGetField(self.dm, cidx, &clbl, &cobj) ) + CHKERR(DMGetField(self.dm, cidx, &clbl, &cobj)) assert clbl == NULL cdef Object field = subtype_Object(cobj)() field.obj[0] = cobj - CHKERR( PetscINCREF(field.obj) ) + CHKERR(PetscINCREF(field.obj)) return (field, None) # TODO REVIEW def addField(self, Object field, label: str | None = None) -> None: @@ -647,7 +629,7 @@ cdef class DM(Object): cdef PetscObject cobj = field.obj[0] cdef PetscDMLabel clbl = NULL assert label is None - CHKERR( DMAddField(self.dm, clbl, cobj) ) + CHKERR(DMAddField(self.dm, clbl, cobj)) def clearFields(self) -> None: """Remove all fields from the `DM`. @@ -659,7 +641,7 @@ cdef class DM(Object): petsc.DMClearFields """ - CHKERR( DMClearFields(self.dm) ) + CHKERR(DMClearFields(self.dm)) def copyFields(self, DM dm) -> None: """Copy the discretizations of this `DM` into another `DM`. @@ -676,7 +658,7 @@ cdef class DM(Object): petsc.DMCopyFields """ - CHKERR( DMCopyFields(self.dm, dm.dm) ) + CHKERR(DMCopyFields(self.dm, dm.dm)) def createDS(self) -> None: """Create discrete systems. @@ -688,7 +670,7 @@ cdef class DM(Object): petsc.DMCreateDS """ - CHKERR( DMCreateDS(self.dm) ) + CHKERR(DMCreateDS(self.dm)) def clearDS(self) -> None: """Remove all discrete systems from the `DM`. @@ -700,7 +682,7 @@ cdef class DM(Object): petsc.DMClearDS """ - CHKERR( DMClearDS(self.dm) ) + CHKERR(DMClearDS(self.dm)) def getDS(self) -> DS: """Return default `DS`. @@ -713,8 +695,8 @@ cdef class DM(Object): """ cdef DS ds = DS() - CHKERR( DMGetDS(self.dm, &ds.ds) ) - CHKERR( PetscINCREF(ds.obj) ) + CHKERR(DMGetDS(self.dm, &ds.ds)) + CHKERR(PetscINCREF(ds.obj)) return ds def copyDS(self, DM dm) -> None: @@ -732,7 +714,7 @@ cdef class DM(Object): petsc.DMCopyDS """ - CHKERR( DMCopyDS(self.dm, dm.dm) ) + CHKERR(DMCopyDS(self.dm, dm.dm)) def copyDisc(self, DM dm) -> None: """Copy fields and discrete systems of a `DM` into another `DM`. @@ -749,7 +731,7 @@ cdef class DM(Object): petsc.DMCopyDisc """ - CHKERR( DMCopyDisc(self.dm, dm.dm) ) + CHKERR(DMCopyDisc(self.dm, dm.dm)) # @@ -764,7 +746,7 @@ cdef class DM(Object): """ cdef PetscInt bs = 1 - CHKERR( DMGetBlockSize(self.dm, &bs) ) + CHKERR(DMGetBlockSize(self.dm, &bs)) return toInt(bs) def setVecType(self, vec_type: Vec.Type | str) -> None: @@ -779,7 +761,7 @@ cdef class DM(Object): """ cdef PetscVecType vtype = NULL vec_type = str2bytes(vec_type, &vtype) - CHKERR( DMSetVecType(self.dm, vtype) ) + CHKERR(DMSetVecType(self.dm, vtype)) def createGlobalVec(self) -> Vec: """Return a global vector. @@ -792,7 +774,7 @@ cdef class DM(Object): """ cdef Vec vg = Vec() - CHKERR( DMCreateGlobalVector(self.dm, &vg.vec) ) + CHKERR(DMCreateGlobalVector(self.dm, &vg.vec)) return vg def createLocalVec(self) -> Vec: @@ -806,7 +788,7 @@ cdef class DM(Object): """ cdef Vec vl = Vec() - CHKERR( DMCreateLocalVector(self.dm, &vl.vec) ) + CHKERR(DMCreateLocalVector(self.dm, &vl.vec)) return vl def getGlobalVec(self) -> Vec: @@ -820,8 +802,8 @@ cdef class DM(Object): """ cdef Vec vg = Vec() - CHKERR( DMGetGlobalVector(self.dm, &vg.vec) ) - CHKERR( PetscINCREF(vg.obj) ) + CHKERR(DMGetGlobalVector(self.dm, &vg.vec)) + CHKERR(PetscINCREF(vg.obj)) return vg def restoreGlobalVec(self, Vec vg) -> None: @@ -839,8 +821,8 @@ cdef class DM(Object): petsc.DMRestoreGlobalVector """ - CHKERR( PetscObjectDereference(vg.vec) ) - CHKERR( DMRestoreGlobalVector(self.dm, &vg.vec) ) + CHKERR(PetscObjectDereference(vg.vec)) + CHKERR(DMRestoreGlobalVector(self.dm, &vg.vec)) def getLocalVec(self) -> Vec: """Return a local vector. @@ -853,8 +835,8 @@ cdef class DM(Object): """ cdef Vec vl = Vec() - CHKERR( DMGetLocalVector(self.dm, &vl.vec) ) - CHKERR( PetscINCREF(vl.obj) ) + CHKERR(DMGetLocalVector(self.dm, &vl.vec)) + CHKERR(PetscINCREF(vl.obj)) return vl def restoreLocalVec(self, Vec vl) -> None: @@ -872,8 +854,8 @@ cdef class DM(Object): petsc.DMRestoreLocalVector """ - CHKERR( PetscObjectDereference(vl.vec) ) - CHKERR( DMRestoreLocalVector(self.dm, &vl.vec) ) + CHKERR(PetscObjectDereference(vl.vec)) + CHKERR(DMRestoreLocalVector(self.dm, &vl.vec)) def globalToLocal(self, Vec vg, Vec vl, addv: InsertModeSpec | None = None) -> None: """Update local vectors from global vector. @@ -895,8 +877,8 @@ cdef class DM(Object): """ cdef PetscInsertMode im = insertmode(addv) - CHKERR( DMGlobalToLocalBegin(self.dm, vg.vec, im, vl.vec) ) - CHKERR( DMGlobalToLocalEnd (self.dm, vg.vec, im, vl.vec) ) + CHKERR(DMGlobalToLocalBegin(self.dm, vg.vec, im, vl.vec)) + CHKERR(DMGlobalToLocalEnd (self.dm, vg.vec, im, vl.vec)) def localToGlobal(self, Vec vl, Vec vg, addv: InsertModeSpec | None = None) -> None: """Update global vectors from local vector. @@ -918,8 +900,8 @@ cdef class DM(Object): """ cdef PetscInsertMode im = insertmode(addv) - CHKERR( DMLocalToGlobalBegin(self.dm, vl.vec, im, vg.vec) ) - CHKERR( DMLocalToGlobalEnd(self.dm, vl.vec, im, vg.vec) ) + CHKERR(DMLocalToGlobalBegin(self.dm, vl.vec, im, vg.vec)) + CHKERR(DMLocalToGlobalEnd(self.dm, vl.vec, im, vg.vec)) def localToLocal(self, Vec vl, Vec vlg, addv: InsertModeSpec | None = None) -> None: """Map the values from a local vector to another local vector. @@ -941,8 +923,8 @@ cdef class DM(Object): """ cdef PetscInsertMode im = insertmode(addv) - CHKERR( DMLocalToLocalBegin(self.dm, vl.vec, im, vlg.vec) ) - CHKERR( DMLocalToLocalEnd (self.dm, vl.vec, im, vlg.vec) ) + CHKERR(DMLocalToLocalBegin(self.dm, vl.vec, im, vlg.vec)) + CHKERR(DMLocalToLocalEnd (self.dm, vl.vec, im, vlg.vec)) def getLGMap(self) -> LGMap: """Return local mapping to global mapping. @@ -955,8 +937,8 @@ cdef class DM(Object): """ cdef LGMap lgm = LGMap() - CHKERR( DMGetLocalToGlobalMapping(self.dm, &lgm.lgm) ) - CHKERR( PetscINCREF(lgm.obj) ) + CHKERR(DMGetLocalToGlobalMapping(self.dm, &lgm.lgm)) + CHKERR(PetscINCREF(lgm.obj)) return lgm # @@ -972,8 +954,8 @@ cdef class DM(Object): """ cdef DM cdm = type(self)() - CHKERR( DMGetCoarseDM(self.dm, &cdm.dm) ) - CHKERR( PetscINCREF(cdm.obj) ) + CHKERR(DMGetCoarseDM(self.dm, &cdm.dm)) + CHKERR(PetscINCREF(cdm.obj)) return cdm def setCoarseDM(self, DM dm) -> None: @@ -986,7 +968,7 @@ cdef class DM(Object): petsc.DMSetCoarseDM """ - CHKERR( DMSetCoarseDM(self.dm, dm.dm) ) + CHKERR(DMSetCoarseDM(self.dm, dm.dm)) return def getCoordinateDM(self) -> DM: @@ -1000,8 +982,8 @@ cdef class DM(Object): """ cdef DM cdm = type(self)() - CHKERR( DMGetCoordinateDM(self.dm, &cdm.dm) ) - CHKERR( PetscINCREF(cdm.obj) ) + CHKERR(DMGetCoordinateDM(self.dm, &cdm.dm)) + CHKERR(PetscINCREF(cdm.obj)) return cdm def getCoordinateSection(self) -> Section: @@ -1015,8 +997,8 @@ cdef class DM(Object): """ cdef Section sec = Section() - CHKERR( DMGetCoordinateSection(self.dm, &sec.sec) ) - CHKERR( PetscINCREF(sec.obj) ) + CHKERR(DMGetCoordinateSection(self.dm, &sec.sec)) + CHKERR(PetscINCREF(sec.obj)) return sec def setCoordinates(self, Vec c) -> None: @@ -1034,7 +1016,7 @@ cdef class DM(Object): petsc.DMSetCoordinates """ - CHKERR( DMSetCoordinates(self.dm, c.vec) ) + CHKERR(DMSetCoordinates(self.dm, c.vec)) def getCoordinates(self) -> Vec: """Return a global vector with the coordinates associated. @@ -1047,8 +1029,8 @@ cdef class DM(Object): """ cdef Vec c = Vec() - CHKERR( DMGetCoordinates(self.dm, &c.vec) ) - CHKERR( PetscINCREF(c.obj) ) + CHKERR(DMGetCoordinates(self.dm, &c.vec)) + CHKERR(PetscINCREF(c.obj)) return c def setCoordinatesLocal(self, Vec c) -> None: @@ -1066,7 +1048,7 @@ cdef class DM(Object): petsc.DMSetCoordinatesLocal """ - CHKERR( DMSetCoordinatesLocal(self.dm, c.vec) ) + CHKERR(DMSetCoordinatesLocal(self.dm, c.vec)) def getCoordinatesLocal(self) -> Vec: """Return a local vector with the coordinates associated. @@ -1079,8 +1061,8 @@ cdef class DM(Object): """ cdef Vec c = Vec() - CHKERR( DMGetCoordinatesLocal(self.dm, &c.vec) ) - CHKERR( PetscINCREF(c.obj) ) + CHKERR(DMGetCoordinatesLocal(self.dm, &c.vec)) + CHKERR(PetscINCREF(c.obj)) return c def setCellCoordinateDM(self, DM dm) -> None: @@ -1098,7 +1080,7 @@ cdef class DM(Object): petsc.DMSetCellCoordinateDM """ - CHKERR( DMSetCellCoordinateDM(self.dm, dm.dm) ) + CHKERR(DMSetCellCoordinateDM(self.dm, dm.dm)) def getCellCoordinateDM(self) -> DM: """Return the cell coordinate `DM`. @@ -1111,8 +1093,8 @@ cdef class DM(Object): """ cdef DM cdm = type(self)() - CHKERR( DMGetCellCoordinateDM(self.dm, &cdm.dm) ) - CHKERR( PetscINCREF(cdm.obj) ) + CHKERR(DMGetCellCoordinateDM(self.dm, &cdm.dm)) + CHKERR(PetscINCREF(cdm.obj)) return cdm def setCellCoordinateSection(self, dim: int, Section sec) -> None: @@ -1133,7 +1115,7 @@ cdef class DM(Object): """ cdef PetscInt cdim = asInt(dim) - CHKERR( DMSetCellCoordinateSection(self.dm, cdim, sec.sec) ) + CHKERR(DMSetCellCoordinateSection(self.dm, cdim, sec.sec)) def getCellCoordinateSection(self) -> Section: """Return the cell coordinate layout over the `DM`. @@ -1146,8 +1128,8 @@ cdef class DM(Object): """ cdef Section sec = Section() - CHKERR( DMGetCellCoordinateSection(self.dm, &sec.sec) ) - CHKERR( PetscINCREF(sec.obj) ) + CHKERR(DMGetCellCoordinateSection(self.dm, &sec.sec)) + CHKERR(PetscINCREF(sec.obj)) return sec def setCellCoordinates(self, Vec c) -> None: @@ -1165,7 +1147,7 @@ cdef class DM(Object): petsc.DMSetCellCoordinates """ - CHKERR( DMSetCellCoordinates(self.dm, c.vec) ) + CHKERR(DMSetCellCoordinates(self.dm, c.vec)) def getCellCoordinates(self) -> Vec: """Return a global vector with the cellwise coordinates. @@ -1178,8 +1160,8 @@ cdef class DM(Object): """ cdef Vec c = Vec() - CHKERR( DMGetCellCoordinates(self.dm, &c.vec) ) - CHKERR( PetscINCREF(c.obj) ) + CHKERR(DMGetCellCoordinates(self.dm, &c.vec)) + CHKERR(PetscINCREF(c.obj)) return c def setCellCoordinatesLocal(self, Vec c) -> None: @@ -1197,7 +1179,7 @@ cdef class DM(Object): petsc.DMSetCellCoordinatesLocal """ - CHKERR( DMSetCellCoordinatesLocal(self.dm, c.vec) ) + CHKERR(DMSetCellCoordinatesLocal(self.dm, c.vec)) def getCellCoordinatesLocal(self) -> Vec: """Return a local vector with the cellwise coordinates. @@ -1210,13 +1192,15 @@ cdef class DM(Object): """ cdef Vec c = Vec() - CHKERR( DMGetCellCoordinatesLocal(self.dm, &c.vec) ) - CHKERR( PetscINCREF(c.obj) ) + CHKERR(DMGetCellCoordinatesLocal(self.dm, &c.vec)) + CHKERR(PetscINCREF(c.obj)) return c def setCoordinateDisc(self, FE disc, project: bool) -> Self: """Project coordinates to a different space. + Collective. + Parameters ---------- disc @@ -1228,13 +1212,13 @@ cdef class DM(Object): """ cdef PetscBool pr = project - CHKERR( DMSetCoordinateDisc(self.dm, disc.fe, pr)) + CHKERR(DMSetCoordinateDisc(self.dm, disc.fe, pr)) return self def getCoordinatesLocalized(self) -> bool: """Check if the coordinates have been localized for cells. - Collective. + Not collective. See Also -------- @@ -1242,7 +1226,7 @@ cdef class DM(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( DMGetCoordinatesLocalized(self.dm, &flag) ) + CHKERR(DMGetCoordinatesLocalized(self.dm, &flag)) return toBool(flag) def getBoundingBox(self) -> tuple[tuple[float, float], ...]: @@ -1255,10 +1239,10 @@ cdef class DM(Object): petsc.DMGetBoundingBox """ - cdef PetscInt i,dim=0 - CHKERR( DMGetCoordinateDim(self.dm, &dim) ) + cdef PetscInt dim=0 + CHKERR(DMGetCoordinateDim(self.dm, &dim)) cdef PetscReal gmin[3], gmax[3] - CHKERR( DMGetBoundingBox(self.dm, gmin, gmax) ) + CHKERR(DMGetBoundingBox(self.dm, gmin, gmax)) return tuple([(toReal(gmin[i]), toReal(gmax[i])) for i from 0 <= i < dim]) @@ -1272,10 +1256,10 @@ cdef class DM(Object): petsc.DMGetLocalBoundingBox """ - cdef PetscInt i,dim=0 - CHKERR( DMGetCoordinateDim(self.dm, &dim) ) + cdef PetscInt dim=0 + CHKERR(DMGetCoordinateDim(self.dm, &dim)) cdef PetscReal lmin[3], lmax[3] - CHKERR( DMGetLocalBoundingBox(self.dm, lmin, lmax) ) + CHKERR(DMGetLocalBoundingBox(self.dm, lmin, lmax)) return tuple([(toReal(lmin[i]), toReal(lmax[i])) for i from 0 <= i < dim]) @@ -1293,7 +1277,7 @@ cdef class DM(Object): petsc.DMLocalizeCoordinates """ - CHKERR( DMLocalizeCoordinates(self.dm) ) + CHKERR(DMLocalizeCoordinates(self.dm)) # def setMatType(self, mat_type: Mat.Type | str) -> None: @@ -1317,7 +1301,7 @@ cdef class DM(Object): """ cdef PetscMatType mtype = NULL mat_type = str2bytes(mat_type, &mtype) - CHKERR( DMSetMatType(self.dm, mtype) ) + CHKERR(DMSetMatType(self.dm, mtype)) def createMat(self) -> Mat: """Return an empty matrix. @@ -1330,7 +1314,7 @@ cdef class DM(Object): """ cdef Mat mat = Mat() - CHKERR( DMCreateMatrix(self.dm, &mat.mat) ) + CHKERR(DMCreateMatrix(self.dm, &mat.mat)) return mat def createMassMatrix(self, DM dmf) -> Mat: @@ -1349,7 +1333,7 @@ cdef class DM(Object): """ cdef Mat mat = Mat() - CHKERR( DMCreateMassMatrix(self.dm, dmf.dm, &mat.mat) ) + CHKERR(DMCreateMassMatrix(self.dm, dmf.dm, &mat.mat)) return mat def createInterpolation(self, DM dm) -> tuple[Mat, Vec]: @@ -1369,8 +1353,8 @@ cdef class DM(Object): """ cdef Mat A = Mat() cdef Vec scale = Vec() - CHKERR( DMCreateInterpolation(self.dm, dm.dm, - &A.mat, &scale.vec)) + CHKERR(DMCreateInterpolation(self.dm, dm.dm, + &A.mat, &scale.vec)) return (A, scale) def createInjection(self, DM dm) -> Mat: @@ -1389,7 +1373,7 @@ cdef class DM(Object): """ cdef Mat inject = Mat() - CHKERR( DMCreateInjection(self.dm, dm.dm, &inject.mat) ) + CHKERR(DMCreateInjection(self.dm, dm.dm, &inject.mat)) return inject def createRestriction(self, DM dm) -> Mat: @@ -1408,7 +1392,7 @@ cdef class DM(Object): """ cdef Mat mat = Mat() - CHKERR( DMCreateRestriction(self.dm, dm.dm, &mat.mat) ) + CHKERR(DMCreateRestriction(self.dm, dm.dm, &mat.mat)) return mat def convert(self, dm_type: DM.Type | str) -> DM: @@ -1429,7 +1413,7 @@ cdef class DM(Object): cdef PetscDMType cval = NULL dm_type = str2bytes(dm_type, &cval) cdef PetscDM newdm = NULL - CHKERR( DMConvert(self.dm, cval, &newdm) ) + CHKERR(DMConvert(self.dm, cval, &newdm)) cdef DM dm = subtype_DM(newdm)() dm.dm = newdm return dm @@ -1450,10 +1434,10 @@ cdef class DM(Object): """ cdef MPI_Comm dmcomm = MPI_COMM_NULL - CHKERR( PetscObjectGetComm(self.dm, &dmcomm) ) + CHKERR(PetscObjectGetComm(self.dm, &dmcomm)) dmcomm = def_Comm(comm, dmcomm) cdef PetscDM newdm = NULL - CHKERR( DMRefine(self.dm, dmcomm, &newdm) ) + CHKERR(DMRefine(self.dm, dmcomm, &newdm)) cdef DM dm = subtype_DM(newdm)() dm.dm = newdm return dm @@ -1474,10 +1458,10 @@ cdef class DM(Object): """ cdef MPI_Comm dmcomm = MPI_COMM_NULL - CHKERR( PetscObjectGetComm(self.dm, &dmcomm) ) + CHKERR(PetscObjectGetComm(self.dm, &dmcomm)) dmcomm = def_Comm(comm, dmcomm) cdef PetscDM newdm = NULL - CHKERR( DMCoarsen(self.dm, dmcomm, &newdm) ) + CHKERR(DMCoarsen(self.dm, dmcomm, &newdm)) cdef DM dm = subtype_DM(newdm)() dm.dm = newdm return dm @@ -1499,8 +1483,8 @@ cdef class DM(Object): """ cdef PetscInt i, n = asInt(nlevels) cdef PetscDM *newdmf = NULL - cdef object tmp = oarray_p(empty_p(n), NULL, &newdmf) - CHKERR( DMRefineHierarchy(self.dm, n, newdmf) ) + cdef object unused = oarray_p(empty_p(n), NULL, &newdmf) + CHKERR(DMRefineHierarchy(self.dm, n, newdmf)) cdef DM dmf = None cdef list hierarchy = [] for i from 0 <= i < n: @@ -1526,8 +1510,8 @@ cdef class DM(Object): """ cdef PetscInt i, n = asInt(nlevels) cdef PetscDM *newdmc = NULL - cdef object tmp = oarray_p(empty_p(n),NULL, &newdmc) - CHKERR( DMCoarsenHierarchy(self.dm, n, newdmc) ) + cdef object unused = oarray_p(empty_p(n), NULL, &newdmc) + CHKERR(DMCoarsenHierarchy(self.dm, n, newdmc)) cdef DM dmc = None cdef list hierarchy = [] for i from 0 <= i < n: @@ -1547,7 +1531,7 @@ cdef class DM(Object): """ cdef PetscInt n = 0 - CHKERR( DMGetRefineLevel(self.dm, &n) ) + CHKERR(DMGetRefineLevel(self.dm, &n)) return toInt(n) def setRefineLevel(self, level: int) -> None: @@ -1566,7 +1550,7 @@ cdef class DM(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( DMSetRefineLevel(self.dm, clevel) ) + CHKERR(DMSetRefineLevel(self.dm, clevel)) def getCoarsenLevel(self) -> int: """Return the number of coarsenings. @@ -1579,7 +1563,7 @@ cdef class DM(Object): """ cdef PetscInt n = 0 - CHKERR( DMGetCoarsenLevel(self.dm, &n) ) + CHKERR(DMGetCoarsenLevel(self.dm, &n)) return toInt(n) # @@ -1602,19 +1586,20 @@ cdef class DM(Object): cdef const char *cval = NULL cdef PetscDMLabel clbl = NULL label = str2bytes(label, &cval) - CHKERR( DMGetLabel(self.dm, cval, &clbl) ) + CHKERR(DMGetLabel(self.dm, cval, &clbl)) cdef DM newdm = DMPlex() - CHKERR( DMAdaptLabel(self.dm, clbl, &newdm.dm) ) + CHKERR(DMAdaptLabel(self.dm, clbl, &newdm.dm)) return newdm def adaptMetric( self, Vec metric, bdLabel: str | None = None, - rgLabel: str | None = None, - ) -> DM: + rgLabel: str | None = None) -> DM: """Return a mesh adapted to the specified metric field. + Collective. + Parameters ---------- metric @@ -1634,12 +1619,12 @@ cdef class DM(Object): cdef PetscDMLabel crglbl = NULL bdLabel = str2bytes(bdLabel, &cval) if cval == NULL: cval = b"" # XXX Should be fixed upstream - CHKERR( DMGetLabel(self.dm, cval, &cbdlbl) ) + CHKERR(DMGetLabel(self.dm, cval, &cbdlbl)) rgLabel = str2bytes(rgLabel, &cval) if cval == NULL: cval = b"" # XXX Should be fixed upstream - CHKERR( DMGetLabel(self.dm, cval, &crglbl) ) + CHKERR(DMGetLabel(self.dm, cval, &crglbl)) cdef DM newdm = DMPlex() - CHKERR( DMAdaptMetric(self.dm, metric.vec, cbdlbl, crglbl, &newdm.dm) ) + CHKERR(DMAdaptMetric(self.dm, metric.vec, cbdlbl, crglbl, &newdm.dm)) return newdm def getLabel(self, name: str) -> DMLabel: @@ -1655,8 +1640,8 @@ cdef class DM(Object): cdef const char *cname = NULL cdef DMLabel dmlabel = DMLabel() name = str2bytes(name, &cname) - CHKERR( DMGetLabel(self.dm, cname, &dmlabel.dmlabel) ) - CHKERR( PetscINCREF(dmlabel.obj) ) + CHKERR(DMGetLabel(self.dm, cname, &dmlabel.dmlabel)) + CHKERR(PetscINCREF(dmlabel.obj)) return dmlabel # @@ -1664,47 +1649,55 @@ cdef class DM(Object): def setLocalSection(self, Section sec) -> None: """Set the `Section` encoding the local data layout for the `DM`. + Collective. + See Also -------- petsc.DMSetLocalSection """ - CHKERR( DMSetLocalSection(self.dm, sec.sec) ) + CHKERR(DMSetLocalSection(self.dm, sec.sec)) def getLocalSection(self) -> Section: """Return the `Section` encoding the local data layout for the `DM`. + Not collective. + See Also -------- petsc.DMGetGlobalSection """ cdef Section sec = Section() - CHKERR( DMGetLocalSection(self.dm, &sec.sec) ) - CHKERR( PetscINCREF(sec.obj) ) + CHKERR(DMGetLocalSection(self.dm, &sec.sec)) + CHKERR(PetscINCREF(sec.obj)) return sec def setGlobalSection(self, Section sec) -> None: """Set the `Section` encoding the global data layout for the `DM`. + Collective. + See Also -------- petsc.DMSetGlobalSection """ - CHKERR( DMSetGlobalSection(self.dm, sec.sec) ) + CHKERR(DMSetGlobalSection(self.dm, sec.sec)) def getGlobalSection(self) -> Section: """Return the `Section` encoding the global data layout for the `DM`. + Collective the first time it is called. + See Also -------- petsc.DMGetGlobalSection """ cdef Section sec = Section() - CHKERR( DMGetGlobalSection(self.dm, &sec.sec) ) - CHKERR( PetscINCREF(sec.obj) ) + CHKERR(DMGetGlobalSection(self.dm, &sec.sec)) + CHKERR(PetscINCREF(sec.obj)) return sec setSection = setLocalSection @@ -1719,6 +1712,8 @@ cdef class DM(Object): def createSectionSF(self, Section localsec, Section globalsec) -> None: """Create the `SF` encoding the parallel DOF overlap for the `DM`. + Collective. + Parameters ---------- localsec @@ -1735,30 +1730,34 @@ cdef class DM(Object): DM.getSectionSF, petsc.DMCreateSectionSF """ - CHKERR( DMCreateSectionSF(self.dm, localsec.sec, globalsec.sec) ) + CHKERR(DMCreateSectionSF(self.dm, localsec.sec, globalsec.sec)) def getSectionSF(self) -> SF: """Return the `Section` encoding the parallel DOF overlap. + Collective the first time it is called. + See Also -------- petsc.DMGetSectionSF """ cdef SF sf = SF() - CHKERR( DMGetSectionSF(self.dm, &sf.sf) ) - CHKERR( PetscINCREF(sf.obj) ) + CHKERR(DMGetSectionSF(self.dm, &sf.sf)) + CHKERR(PetscINCREF(sf.obj)) return sf def setSectionSF(self, SF sf) -> None: """Set the `Section` encoding the parallel DOF overlap for the `DM`. + Logically collective. + See Also -------- petsc.DMSetSectionSF """ - CHKERR( DMSetSectionSF(self.dm, sf.sf) ) + CHKERR(DMSetSectionSF(self.dm, sf.sf)) createDefaultSF = createSectionSF getDefaultSF = getSectionSF @@ -1767,25 +1766,29 @@ cdef class DM(Object): def getPointSF(self) -> SF: """Return the `SF` encoding the parallel DOF overlap for the `DM`. + Not collective. + See Also -------- petsc.DMGetPointSF """ cdef SF sf = SF() - CHKERR( DMGetPointSF(self.dm, &sf.sf) ) - CHKERR( PetscINCREF(sf.obj) ) + CHKERR(DMGetPointSF(self.dm, &sf.sf)) + CHKERR(PetscINCREF(sf.obj)) return sf def setPointSF(self, SF sf) -> None: """Set the `SF` encoding the parallel DOF overlap for the `DM`. + Logically collective. + See Also -------- petsc.DMSetPointSF """ - CHKERR( DMSetPointSF(self.dm, sf.sf) ) + CHKERR(DMSetPointSF(self.dm, sf.sf)) def getNumLabels(self) -> int: """Return the number of labels defined by on the `DM`. @@ -1798,7 +1801,7 @@ cdef class DM(Object): """ cdef PetscInt nLabels = 0 - CHKERR( DMGetNumLabels(self.dm, &nLabels) ) + CHKERR(DMGetNumLabels(self.dm, &nLabels)) return toInt(nLabels) def getLabelName(self, index: int) -> str: @@ -1818,7 +1821,7 @@ cdef class DM(Object): """ cdef PetscInt cindex = asInt(index) cdef const char *cname = NULL - CHKERR( DMGetLabelName(self.dm, cindex, &cname) ) + CHKERR(DMGetLabelName(self.dm, cindex, &cname)) return bytes2str(cname) def hasLabel(self, name: str) -> bool: @@ -1839,7 +1842,7 @@ cdef class DM(Object): cdef PetscBool flag = PETSC_FALSE cdef const char *cname = NULL name = str2bytes(name, &cname) - CHKERR( DMHasLabel(self.dm, cname, &flag) ) + CHKERR(DMHasLabel(self.dm, cname, &flag)) return toBool(flag) def createLabel(self, name: str) -> None: @@ -1859,7 +1862,7 @@ cdef class DM(Object): """ cdef const char *cname = NULL name = str2bytes(name, &cname) - CHKERR( DMCreateLabel(self.dm, cname) ) + CHKERR(DMCreateLabel(self.dm, cname)) def removeLabel(self, name: str) -> None: """Remove and destroy the label by name. @@ -1879,9 +1882,9 @@ cdef class DM(Object): cdef const char *cname = NULL cdef PetscDMLabel clbl = NULL name = str2bytes(name, &cname) - CHKERR( DMRemoveLabel(self.dm, cname, &clbl) ) + CHKERR(DMRemoveLabel(self.dm, cname, &clbl)) # TODO: Once DMLabel is wrapped, this should return the label, like the C function. - CHKERR( DMLabelDestroy(&clbl) ) + CHKERR(DMLabelDestroy(&clbl)) def getLabelValue(self, name: str, point: int) -> int: """Return the value in `DMLabel` for the given point. @@ -1903,7 +1906,7 @@ cdef class DM(Object): cdef PetscInt cpoint = asInt(point), value = 0 cdef const char *cname = NULL name = str2bytes(name, &cname) - CHKERR( DMGetLabelValue(self.dm, cname, cpoint, &value) ) + CHKERR(DMGetLabelValue(self.dm, cname, cpoint, &value)) return toInt(value) def setLabelValue(self, name: str, point: int, value: int) -> None: @@ -1928,7 +1931,7 @@ cdef class DM(Object): cdef PetscInt cpoint = asInt(point), cvalue = asInt(value) cdef const char *cname = NULL name = str2bytes(name, &cname) - CHKERR( DMSetLabelValue(self.dm, cname, cpoint, cvalue) ) + CHKERR(DMSetLabelValue(self.dm, cname, cpoint, cvalue)) def clearLabelValue(self, name: str, point: int, value: int) -> None: """Remove a point from a `DMLabel` with given value. @@ -1952,7 +1955,7 @@ cdef class DM(Object): cdef PetscInt cpoint = asInt(point), cvalue = asInt(value) cdef const char *cname = NULL name = str2bytes(name, &cname) - CHKERR( DMClearLabelValue(self.dm, cname, cpoint, cvalue) ) + CHKERR(DMClearLabelValue(self.dm, cname, cpoint, cvalue)) def getLabelSize(self, name: str) -> int: """Return the number of values that the `DMLabel` takes. @@ -1972,7 +1975,7 @@ cdef class DM(Object): cdef PetscInt size = 0 cdef const char *cname = NULL name = str2bytes(name, &cname) - CHKERR( DMGetLabelSize(self.dm, cname, &size) ) + CHKERR(DMGetLabelSize(self.dm, cname, &size)) return toInt(size) def getLabelIdIS(self, name: str) -> IS: @@ -1993,7 +1996,7 @@ cdef class DM(Object): cdef const char *cname = NULL name = str2bytes(name, &cname) cdef IS lis = IS() - CHKERR( DMGetLabelIdIS(self.dm, cname, &lis.iset) ) + CHKERR(DMGetLabelIdIS(self.dm, cname, &lis.iset)) return lis def getStratumSize(self, name: str, value: int) -> int: @@ -2017,7 +2020,7 @@ cdef class DM(Object): cdef PetscInt cvalue = asInt(value) cdef const char *cname = NULL name = str2bytes(name, &cname) - CHKERR( DMGetStratumSize(self.dm, cname, cvalue, &size) ) + CHKERR(DMGetStratumSize(self.dm, cname, cvalue, &size)) return toInt(size) def getStratumIS(self, name: str, value: int) -> IS: @@ -2041,7 +2044,7 @@ cdef class DM(Object): cdef const char *cname = NULL name = str2bytes(name, &cname) cdef IS sis = IS() - CHKERR( DMGetStratumIS(self.dm, cname, cvalue, &sis.iset) ) + CHKERR(DMGetStratumIS(self.dm, cname, cvalue, &sis.iset)) return sis def clearLabelStratum(self, name: str, value: int) -> None: @@ -2064,7 +2067,7 @@ cdef class DM(Object): cdef PetscInt cvalue = asInt(value) cdef const char *cname = NULL name = str2bytes(name, &cname) - CHKERR( DMClearLabelStratum(self.dm, cname, cvalue) ) + CHKERR(DMClearLabelStratum(self.dm, cname, cvalue)) def setLabelOutput(self, name: str, output: bool) -> None: """Set if a given label should be saved to a view. @@ -2086,7 +2089,7 @@ cdef class DM(Object): cdef const char *cname = NULL name = str2bytes(name, &cname) cdef PetscBool coutput = output - CHKERR( DMSetLabelOutput(self.dm, cname, coutput) ) + CHKERR(DMSetLabelOutput(self.dm, cname, coutput)) def getLabelOutput(self, name: str) -> bool: """Return the output flag for a given label. @@ -2106,7 +2109,7 @@ cdef class DM(Object): cdef const char *cname = NULL name = str2bytes(name, &cname) cdef PetscBool coutput = PETSC_FALSE - CHKERR( DMGetLabelOutput(self.dm, cname, &coutput) ) + CHKERR(DMGetLabelOutput(self.dm, cname, &coutput)) return coutput # backward compatibility @@ -2117,8 +2120,7 @@ cdef class DM(Object): def setKSPComputeOperators( self, operators, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Matrix associated with the linear system. Collective. @@ -2141,7 +2143,7 @@ cdef class DM(Object): if kargs is None: kargs = {} context = (operators, args, kargs) self.set_attr('__operators__', context) - CHKERR( DMKSPSetComputeOperators(self.dm, KSP_ComputeOps, context) ) + CHKERR(DMKSPSetComputeOperators(self.dm, KSP_ComputeOps, context)) def createFieldDecomposition(self) -> tuple[list, list, list] : """Return a list of `IS` objects. @@ -2162,7 +2164,7 @@ cdef class DM(Object): cdef PetscDM *cdm = NULL cdef char** cnamelist = NULL - CHKERR( DMCreateFieldDecomposition(self.dm, &clen, &cnamelist, &cis, &cdm) ) + CHKERR(DMCreateFieldDecomposition(self.dm, &clen, &cnamelist, &cis, &cdm)) cdef list isets = [ref_IS(cis[i]) for i from 0 <= i < clen] cdef list dms = [] @@ -2173,29 +2175,28 @@ cdef class DM(Object): if cdm != NULL: dm = subtype_DM(cdm[i])() dm.dm = cdm[i] - CHKERR( PetscINCREF(dm.obj) ) + CHKERR(PetscINCREF(dm.obj)) dms.append(dm) else: dms.append(None) name = bytes2str(cnamelist[i]) names.append(name) - CHKERR( PetscFree(cnamelist[i]) ) + CHKERR(PetscFree(cnamelist[i])) - CHKERR( ISDestroy(&cis[i]) ) - CHKERR( DMDestroy(&cdm[i]) ) + CHKERR(ISDestroy(&cis[i])) + CHKERR(DMDestroy(&cdm[i])) - CHKERR( PetscFree(cis) ) - CHKERR( PetscFree(cdm) ) - CHKERR( PetscFree(cnamelist) ) + CHKERR(PetscFree(cis)) + CHKERR(PetscFree(cdm)) + CHKERR(PetscFree(cnamelist)) return (names, isets, dms) # TODO REVIEW def setSNESFunction( self, function: SNESFunction, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set `SNES` residual evaluation function. @@ -2220,15 +2221,14 @@ cdef class DM(Object): if kargs is None: kargs = {} context = (function, args, kargs) self.set_attr('__function__', context) - CHKERR( DMSNESSetFunction(self.dm, SNES_Function, context) ) + CHKERR(DMSNESSetFunction(self.dm, SNES_Function, context)) else: - CHKERR( DMSNESSetFunction(self.dm, NULL, NULL) ) + CHKERR(DMSNESSetFunction(self.dm, NULL, NULL)) def setSNESJacobian( - self, jacobian: SNESJacobianFunction, - args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + self, jacobian: SNESJacobianFunction, + args: tuple[Any, ...] | None = None, + kargs: dict[str, Any] | None = None) -> None: """Set the `SNES` Jacobian evaluation function. Not collective. @@ -2252,17 +2252,16 @@ cdef class DM(Object): if kargs is None: kargs = {} context = (jacobian, args, kargs) self.set_attr('__jacobian__', context) - CHKERR( DMSNESSetJacobian(self.dm, SNES_Jacobian, context) ) + CHKERR(DMSNESSetJacobian(self.dm, SNES_Jacobian, context)) else: - CHKERR( DMSNESSetJacobian(self.dm, NULL, NULL) ) + CHKERR(DMSNESSetJacobian(self.dm, NULL, NULL)) def addCoarsenHook( self, coarsenhook: DMCoarsenHookFunction, restricthook: DMRestrictHookFunction, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Add a callback to be executed when restricting to a coarser grid. Logically collective. @@ -2292,7 +2291,7 @@ cdef class DM(Object): coarsenhooks = self.get_attr('__coarsenhooks__') if coarsenhooks is None: coarsenhooks = [coarsencontext] - CHKERR( DMCoarsenHookAdd(self.dm, DM_PyCoarsenHook, NULL, NULL) ) + CHKERR(DMCoarsenHookAdd(self.dm, DM_PyCoarsenHook, NULL, NULL)) else: coarsenhooks.append(coarsencontext) self.set_attr('__coarsenhooks__', coarsenhooks) @@ -2303,7 +2302,7 @@ cdef class DM(Object): restricthooks = self.get_attr('__restricthooks__') if restricthooks is None: restricthooks = [restrictcontext] - CHKERR( DMCoarsenHookAdd(self.dm, NULL, DM_PyRestrictHook, NULL) ) + CHKERR(DMCoarsenHookAdd(self.dm, NULL, DM_PyRestrictHook, NULL)) else: restricthooks.append(restrictcontext) self.set_attr('__restricthooks__', restricthooks) @@ -2311,16 +2310,20 @@ cdef class DM(Object): # --- application context --- property appctx: - def __get__(self): + """Application context.""" + def __get__(self) -> object: return self.getAppCtx() + def __set__(self, value): self.setAppCtx(value) # --- discretization space --- property ds: - def __get__(self): + """Discrete space.""" + def __get__(self) -> DS: return self.getDS() + def __set__(self, value): self.setDS(value) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DMComposite.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DMComposite.pyx index d624a570354..0adf6fde34c 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DMComposite.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DMComposite.pyx @@ -20,8 +20,9 @@ cdef class DMComposite(DM): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDM newdm = NULL - CHKERR( DMCompositeCreate(ccomm, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMCompositeCreate(ccomm, &newdm)) + CHKERR(PetscCLEAR(self.obj)) + self.dm = newdm return self def addDM(self, DM dm, *args: DM) -> None: @@ -41,11 +42,11 @@ cdef class DMComposite(DM): petsc.DMCompositeAddDM """ - CHKERR( DMCompositeAddDM(self.dm, dm.dm) ) + CHKERR(DMCompositeAddDM(self.dm, dm.dm)) cdef object item for item in args: dm = item - CHKERR( DMCompositeAddDM(self.dm, dm.dm) ) + CHKERR(DMCompositeAddDM(self.dm, dm.dm)) def getNumber(self) -> int: """Get number of sub-DMs contained in the composite. @@ -58,7 +59,7 @@ cdef class DMComposite(DM): """ cdef PetscInt n = 0 - CHKERR( DMCompositeGetNumberDM(self.dm, &n) ) + CHKERR(DMCompositeGetNumberDM(self.dm, &n)) return toInt(n) getNumberDM = getNumber @@ -74,15 +75,15 @@ cdef class DMComposite(DM): """ cdef PetscInt i, n = 0 cdef PetscDM *cdms = NULL - CHKERR( DMCompositeGetNumberDM(self.dm, &n) ) - cdef object tmp = oarray_p(empty_p(n), NULL, &cdms) - CHKERR( DMCompositeGetEntriesArray(self.dm, cdms) ) + CHKERR(DMCompositeGetNumberDM(self.dm, &n)) + cdef object unused = oarray_p(empty_p(n), NULL, &cdms) + CHKERR(DMCompositeGetEntriesArray(self.dm, cdms)) cdef DM entry = None cdef list entries = [] for i from 0 <= i < n: entry = subtype_DM(cdms[i])() entry.dm = cdms[i] - CHKERR( PetscINCREF(entry.obj) ) + CHKERR(PetscINCREF(entry.obj)) entries.append(entry) return entries @@ -104,12 +105,12 @@ cdef class DMComposite(DM): """ cdef PetscInt i, n = 0 - CHKERR( DMCompositeGetNumberDM(self.dm, &n) ) + CHKERR(DMCompositeGetNumberDM(self.dm, &n)) cdef PetscVec *clvecs = NULL - cdef object tmp = oarray_p(empty_p(n), NULL, &clvecs) + cdef object unused = oarray_p(empty_p(n), NULL, &clvecs) for i from 0 <= i < n: clvecs[i] = (lvecs[i]).vec - CHKERR( DMCompositeScatterArray(self.dm, gvec.vec, clvecs) ) + CHKERR(DMCompositeScatterArray(self.dm, gvec.vec, clvecs)) def gather(self, Vec gvec, imode: InsertModeSpec, lvecs: Sequence[Vec]) -> None: """Gather split local vectors into a coupled global vector. @@ -132,12 +133,12 @@ cdef class DMComposite(DM): """ cdef PetscInsertMode cimode = insertmode(imode) cdef PetscInt i, n = 0 - CHKERR( DMCompositeGetNumberDM(self.dm, &n) ) + CHKERR(DMCompositeGetNumberDM(self.dm, &n)) cdef PetscVec *clvecs = NULL - cdef object tmp = oarray_p(empty_p(n), NULL, &clvecs) + cdef object unused = oarray_p(empty_p(n), NULL, &clvecs) for i from 0 <= i < n: clvecs[i] = (lvecs[i]).vec - CHKERR( DMCompositeGatherArray(self.dm, cimode, gvec.vec, clvecs) ) + CHKERR(DMCompositeGatherArray(self.dm, cimode, gvec.vec, clvecs)) def getGlobalISs(self) -> list[IS]: """Return the index sets for each composed object in the composite. @@ -158,12 +159,12 @@ cdef class DMComposite(DM): """ cdef PetscInt i, n = 0 cdef PetscIS *cis = NULL - CHKERR( DMCompositeGetNumberDM(self.dm, &n) ) - CHKERR( DMCompositeGetGlobalISs(self.dm, &cis) ) + CHKERR(DMCompositeGetNumberDM(self.dm, &n)) + CHKERR(DMCompositeGetGlobalISs(self.dm, &cis)) cdef object isets = [ref_IS(cis[i]) for i from 0 <= i < n] for i from 0 <= i < n: - CHKERR( ISDestroy(&cis[i]) ) - CHKERR( PetscFree(cis) ) + CHKERR(ISDestroy(&cis[i])) + CHKERR(PetscFree(cis)) return isets def getLocalISs(self) -> list[IS]: @@ -184,12 +185,12 @@ cdef class DMComposite(DM): """ cdef PetscInt i, n = 0 cdef PetscIS *cis = NULL - CHKERR( DMCompositeGetNumberDM(self.dm, &n) ) - CHKERR( DMCompositeGetLocalISs(self.dm, &cis) ) + CHKERR(DMCompositeGetNumberDM(self.dm, &n)) + CHKERR(DMCompositeGetLocalISs(self.dm, &cis)) cdef object isets = [ref_IS(cis[i]) for i from 0 <= i < n] for i from 0 <= i < n: - CHKERR( ISDestroy(&cis[i]) ) - CHKERR( PetscFree(cis) ) + CHKERR(ISDestroy(&cis[i])) + CHKERR(PetscFree(cis)) return isets def getLGMaps(self) -> list[LGMap]: @@ -207,12 +208,12 @@ cdef class DMComposite(DM): """ cdef PetscInt i, n = 0 cdef PetscLGMap *clgm = NULL - CHKERR( DMCompositeGetNumberDM(self.dm, &n) ) - CHKERR( DMCompositeGetISLocalToGlobalMappings(self.dm, &clgm) ) + CHKERR(DMCompositeGetNumberDM(self.dm, &n)) + CHKERR(DMCompositeGetISLocalToGlobalMappings(self.dm, &clgm)) cdef object lgms = [ref_LGMap(clgm[i]) for i from 0 <= i < n] for i from 0 <= i < n: - CHKERR( ISLocalToGlobalMappingDestroy(&clgm[i]) ) - CHKERR( PetscFree(clgm) ) + CHKERR(ISLocalToGlobalMappingDestroy(&clgm[i])) + CHKERR(PetscFree(clgm)) return lgms def getAccess(self, Vec gvec, locs: Sequence[int] | None = None) -> Any: diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DMDA.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DMDA.pyx index 82894180cc2..9c205d397b4 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DMDA.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DMDA.pyx @@ -1,19 +1,25 @@ # -------------------------------------------------------------------- class DMDAStencilType(object): + """Stencil types.""" STAR = DMDA_STENCIL_STAR BOX = DMDA_STENCIL_BOX + class DMDAInterpolationType(object): + """Interpolation types.""" Q0 = DMDA_INTERPOLATION_Q0 Q1 = DMDA_INTERPOLATION_Q1 + class DMDAElementType(object): + """Element types.""" P1 = DMDA_ELEMENT_P1 Q1 = DMDA_ELEMENT_Q1 # -------------------------------------------------------------------- + cdef class DMDA(DM): """A DM object that is used to manage data for a structured grid.""" @@ -34,8 +40,7 @@ cdef class DMDA(DM): stencil_width: int | None = None, bint setup: bool = True, ownership_ranges: tuple[Sequence[int], ...] | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a ``DMDA`` object. Collective. @@ -132,12 +137,12 @@ cdef class DMDA(DM): # create the DMDA object cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDM newda = NULL - CHKERR( DMDACreateND(ccomm, ndim, ndof, - M, N, P, m, n, p, lx, ly, lz, - btx, bty, btz, stype, swidth, - &newda) ) - if setup and ndim > 0: CHKERR( DMSetUp(newda) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newda + CHKERR(DMDACreateND(ccomm, ndim, ndof, + M, N, P, m, n, p, lx, ly, lz, + btx, bty, btz, stype, swidth, + &newda)) + if setup and ndim > 0: CHKERR(DMSetUp(newda)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newda return self def duplicate( @@ -145,8 +150,7 @@ cdef class DMDA(DM): dof: int | None = None, boundary_type: tuple[DM.BoundaryType | int | str | bool, ...] | None = None, stencil_type: StencilType | None = None, - stencil_width: int | None = None, - ) -> DMDA: + stencil_width: int | None = None) -> DMDA: """Duplicate a DMDA. Collective. @@ -179,17 +183,17 @@ cdef class DMDA(DM): cdef PetscDMBoundaryType btz = DM_BOUNDARY_NONE cdef PetscDMDAStencilType stype = DMDA_STENCIL_BOX cdef PetscInt swidth = PETSC_DECIDE - CHKERR( DMDAGetInfo(self.dm, - &ndim, - &M, &N, &P, - &m, &n, &p, - &ndof, &swidth, - &btx, &bty, &btz, - &stype) ) + CHKERR(DMDAGetInfo(self.dm, + &ndim, + &M, &N, &P, + &m, &n, &p, + &ndof, &swidth, + &btx, &bty, &btz, + &stype)) cdef const PetscInt *lx = NULL, *ly = NULL, *lz = NULL - CHKERR( DMDAGetOwnershipRanges(self.dm, &lx, &ly, &lz) ) + CHKERR(DMDAGetOwnershipRanges(self.dm, &lx, &ly, &lz)) cdef MPI_Comm comm = MPI_COMM_NULL - CHKERR( PetscObjectGetComm(self.dm, &comm) ) + CHKERR(PetscObjectGetComm(self.dm, &comm)) # if dof is not None: ndof = asInt(dof) @@ -201,11 +205,11 @@ cdef class DMDA(DM): swidth = asInt(stencil_width) # cdef DMDA da = DMDA() - CHKERR( DMDACreateND(comm, ndim, ndof, - M, N, P, m, n, p, lx, ly, lz, - btx, bty, btz, stype, swidth, - &da.dm) ) - CHKERR( DMSetUp(da.dm) ) + CHKERR(DMDACreateND(comm, ndim, ndof, + M, N, P, m, n, p, lx, ly, lz, + btx, bty, btz, stype, swidth, + &da.dm)) + CHKERR(DMSetUp(da.dm)) return da # @@ -255,7 +259,7 @@ cdef class DMDA(DM): """ cdef PetscInt ndof = asInt(dof) - CHKERR( DMDASetDof(self.dm, ndof) ) + CHKERR(DMDASetDof(self.dm, ndof)) def getDof(self) -> int: """Return the number of degrees of freedom per node. @@ -268,13 +272,13 @@ cdef class DMDA(DM): """ cdef PetscInt dof = 0 - CHKERR( DMDAGetInfo(self.dm, - NULL, - NULL, NULL, NULL, - NULL, NULL, NULL, - &dof, NULL, - NULL, NULL, NULL, - NULL) ) + CHKERR(DMDAGetInfo(self.dm, + NULL, + NULL, NULL, NULL, + NULL, NULL, NULL, + &dof, NULL, + NULL, NULL, NULL, + NULL)) return toInt(dof) def setSizes(self, sizes: DimsSpec) -> None: @@ -299,10 +303,10 @@ cdef class DMDA(DM): cdef PetscInt P = 1 gdim = asDims(gsizes, &M, &N, &P) cdef PetscInt dim = PETSC_DECIDE - CHKERR( DMDAGetDim(self.dm, &dim) ) + CHKERR(DMDAGetDim(self.dm, &dim)) if dim == PETSC_DECIDE: - CHKERR( DMSetDimension(self.dm, gdim) ) - CHKERR( DMDASetSizes(self.dm, M, N, P) ) + CHKERR(DMSetDimension(self.dm, gdim)) + CHKERR(DMDASetSizes(self.dm, M, N, P)) def getSizes(self) -> tuple[int, ...]: """Return the number of grid points in each dimension. @@ -318,13 +322,13 @@ cdef class DMDA(DM): cdef PetscInt M = PETSC_DECIDE cdef PetscInt N = PETSC_DECIDE cdef PetscInt P = PETSC_DECIDE - CHKERR( DMDAGetInfo(self.dm, - &dim, - &M, &N, &P, - NULL, NULL, NULL, - NULL, NULL, - NULL, NULL, NULL, - NULL) ) + CHKERR(DMDAGetInfo(self.dm, + &dim, + &M, &N, &P, + NULL, NULL, NULL, + NULL, NULL, + NULL, NULL, NULL, + NULL)) return toDims(dim, M, N, P) def setProcSizes(self, proc_sizes: DimsSpec) -> None: @@ -349,10 +353,10 @@ cdef class DMDA(DM): cdef PetscInt p = PETSC_DECIDE pdim = asDims(psizes, &m, &n, &p) cdef PetscInt dim = PETSC_DECIDE - CHKERR( DMDAGetDim(self.dm, &dim) ) + CHKERR(DMDAGetDim(self.dm, &dim)) if dim == PETSC_DECIDE: - CHKERR( DMSetDimension(self.dm, pdim) ) - CHKERR( DMDASetNumProcs(self.dm, m, n, p) ) + CHKERR(DMSetDimension(self.dm, pdim)) + CHKERR(DMDASetNumProcs(self.dm, m, n, p)) def getProcSizes(self) -> tuple[int, ...]: """Return the number of processes in each dimension. @@ -368,19 +372,18 @@ cdef class DMDA(DM): cdef PetscInt m = PETSC_DECIDE cdef PetscInt n = PETSC_DECIDE cdef PetscInt p = PETSC_DECIDE - CHKERR( DMDAGetInfo(self.dm, - &dim, - NULL, NULL, NULL, - &m, &n, &p, - NULL, NULL, - NULL, NULL, NULL, - NULL) ) + CHKERR(DMDAGetInfo(self.dm, + &dim, + NULL, NULL, NULL, + &m, &n, &p, + NULL, NULL, + NULL, NULL, NULL, + NULL)) return toDims(dim, m, n, p) def setBoundaryType( self, - boundary_type: tuple[DM.BoundaryType | int | str | bool, ...], - ) -> None: + boundary_type: tuple[DM.BoundaryType | int | str | bool, ...]) -> None: """Set the type of ghost nodes on domain boundaries. Not collective. @@ -399,7 +402,7 @@ cdef class DMDA(DM): cdef PetscDMBoundaryType bty = DM_BOUNDARY_NONE cdef PetscDMBoundaryType btz = DM_BOUNDARY_NONE asBoundary(boundary_type, &btx, &bty, &btz) - CHKERR( DMDASetBoundaryType(self.dm, btx, bty, btz) ) + CHKERR(DMDASetBoundaryType(self.dm, btx, bty, btz)) def getBoundaryType(self) -> tuple[DM.BoundaryType, ...]: """Return the type of ghost nodes at boundary in each dimension. @@ -415,13 +418,13 @@ cdef class DMDA(DM): cdef PetscDMBoundaryType btx = DM_BOUNDARY_NONE cdef PetscDMBoundaryType bty = DM_BOUNDARY_NONE cdef PetscDMBoundaryType btz = DM_BOUNDARY_NONE - CHKERR( DMDAGetInfo(self.dm, - &dim, - NULL, NULL, NULL, - NULL, NULL, NULL, - NULL, NULL, - &btx, &bty, &btz, - NULL) ) + CHKERR(DMDAGetInfo(self.dm, + &dim, + NULL, NULL, NULL, + NULL, NULL, NULL, + NULL, NULL, + &btx, &bty, &btz, + NULL)) return toDims(dim, btx, bty, btz) def setStencilType(self, stencil_type: StencilType) -> None: @@ -440,7 +443,7 @@ cdef class DMDA(DM): """ cdef PetscDMDAStencilType stype = asStencil(stencil_type) - CHKERR( DMDASetStencilType(self.dm, stype) ) + CHKERR(DMDASetStencilType(self.dm, stype)) def getStencilType(self) -> StencilType: """Return the stencil type. @@ -453,13 +456,13 @@ cdef class DMDA(DM): """ cdef PetscDMDAStencilType stype = DMDA_STENCIL_BOX - CHKERR( DMDAGetInfo(self.dm, - NULL, - NULL, NULL, NULL, - NULL, NULL, NULL, - NULL, NULL, - NULL, NULL, NULL, - &stype) ) + CHKERR(DMDAGetInfo(self.dm, + NULL, + NULL, NULL, NULL, + NULL, NULL, NULL, + NULL, NULL, + NULL, NULL, NULL, + &stype)) return stype def setStencilWidth(self, stencil_width: int) -> None: @@ -478,7 +481,7 @@ cdef class DMDA(DM): """ cdef PetscInt swidth = asInt(stencil_width) - CHKERR( DMDASetStencilWidth(self.dm, swidth) ) + CHKERR(DMDASetStencilWidth(self.dm, swidth)) def getStencilWidth(self) -> int: """Return the stencil width. @@ -491,20 +494,19 @@ cdef class DMDA(DM): """ cdef PetscInt swidth = 0 - CHKERR( DMDAGetInfo(self.dm, - NULL, - NULL, NULL, NULL, - NULL, NULL, NULL, - NULL, &swidth, - NULL, NULL, NULL, - NULL) ) + CHKERR(DMDAGetInfo(self.dm, + NULL, + NULL, NULL, NULL, + NULL, NULL, NULL, + NULL, &swidth, + NULL, NULL, NULL, + NULL)) return toInt(swidth) def setStencil( self, stencil_type: StencilType, - stencil_width: int, - ) -> None: + stencil_width: int) -> None: """Set the stencil type and width. Not collective. @@ -524,8 +526,8 @@ cdef class DMDA(DM): """ cdef PetscDMDAStencilType stype = asStencil(stencil_type) cdef PetscInt swidth = asInt(stencil_width) - CHKERR( DMDASetStencilType(self.dm, stype) ) - CHKERR( DMDASetStencilWidth(self.dm, swidth) ) + CHKERR(DMDASetStencilType(self.dm, stype)) + CHKERR(DMDASetStencilWidth(self.dm, swidth)) def getStencil(self) -> tuple[StencilType, int]: """Return the stencil type and width. @@ -539,13 +541,13 @@ cdef class DMDA(DM): """ cdef PetscDMDAStencilType stype = DMDA_STENCIL_BOX cdef PetscInt swidth = 0 - CHKERR( DMDAGetInfo(self.dm, - NULL, - NULL, NULL, NULL, - NULL, NULL, NULL, - NULL, &swidth, - NULL, NULL, NULL, - &stype) ) + CHKERR(DMDAGetInfo(self.dm, + NULL, + NULL, NULL, NULL, + NULL, NULL, NULL, + NULL, &swidth, + NULL, NULL, NULL, + &stype)) return (toStencil(stype), toInt(swidth)) # @@ -564,10 +566,10 @@ cdef class DMDA(DM): """ cdef PetscInt dim=0, x=0, y=0, z=0, m=0, n=0, p=0 - CHKERR( DMDAGetDim(self.dm, &dim) ) - CHKERR( DMDAGetCorners(self.dm, - &x, &y, &z, - &m, &n, &p) ) + CHKERR(DMDAGetDim(self.dm, &dim)) + CHKERR(DMDAGetCorners(self.dm, + &x, &y, &z, + &m, &n, &p)) return ((toInt(x), toInt(x+m)), (toInt(y), toInt(y+n)), (toInt(z), toInt(z+p)))[:dim] @@ -584,10 +586,10 @@ cdef class DMDA(DM): """ cdef PetscInt dim=0, x=0, y=0, z=0, m=0, n=0, p=0 - CHKERR( DMDAGetDim(self.dm, &dim) ) - CHKERR( DMDAGetGhostCorners(self.dm, - &x, &y, &z, - &m, &n, &p) ) + CHKERR(DMDAGetDim(self.dm, &dim)) + CHKERR(DMDAGetGhostCorners(self.dm, + &x, &y, &z, + &m, &n, &p)) return ((toInt(x), toInt(x+m)), (toInt(y), toInt(y+n)), (toInt(z), toInt(z+p)))[:dim] @@ -607,14 +609,14 @@ cdef class DMDA(DM): """ cdef PetscInt dim=0, m=0, n=0, p=0 cdef const PetscInt *lx = NULL, *ly = NULL, *lz = NULL - CHKERR( DMDAGetInfo(self.dm, - &dim, - NULL, NULL, NULL, - &m, &n, &p, - NULL, NULL, - NULL, NULL, NULL, - NULL) ) - CHKERR( DMDAGetOwnershipRanges(self.dm, &lx, &ly, &lz) ) + CHKERR(DMDAGetInfo(self.dm, + &dim, + NULL, NULL, NULL, + &m, &n, &p, + NULL, NULL, + NULL, NULL, NULL, + NULL)) + CHKERR(DMDAGetOwnershipRanges(self.dm, &lx, &ly, &lz)) return toOwnershipRanges(dim, m, n, p, lx, ly, lz) def getCorners(self) -> tuple[tuple[int, ...], tuple[int, ...]]: @@ -639,10 +641,10 @@ cdef class DMDA(DM): """ cdef PetscInt dim=0, x=0, y=0, z=0, m=0, n=0, p=0 - CHKERR( DMDAGetDim(self.dm, &dim) ) - CHKERR( DMDAGetCorners(self.dm, - &x, &y, &z, - &m, &n, &p) ) + CHKERR(DMDAGetDim(self.dm, &dim)) + CHKERR(DMDAGetCorners(self.dm, + &x, &y, &z, + &m, &n, &p)) return ((toInt(x), toInt(y), toInt(z))[:dim], (toInt(m), toInt(n), toInt(p))[:dim]) @@ -661,10 +663,10 @@ cdef class DMDA(DM): """ cdef PetscInt dim=0, x=0, y=0, z=0, m=0, n=0, p=0 - CHKERR( DMDAGetDim(self.dm, &dim) ) - CHKERR( DMDAGetGhostCorners(self.dm, - &x, &y, &z, - &m, &n, &p) ) + CHKERR(DMDAGetDim(self.dm, &dim)) + CHKERR(DMDAGetGhostCorners(self.dm, + &x, &y, &z, + &m, &n, &p)) return ((toInt(x), toInt(y), toInt(z))[:dim], (toInt(m), toInt(n), toInt(p))[:dim]) @@ -692,7 +694,7 @@ cdef class DMDA(DM): cdef PetscInt ival = asInt(field) cdef const char *cval = NULL name = str2bytes(name, &cval) - CHKERR( DMDASetFieldName(self.dm, ival, cval) ) + CHKERR(DMDASetFieldName(self.dm, ival, cval)) def getFieldName(self, field: int) -> str: """Return the name of an individual field component. @@ -713,7 +715,7 @@ cdef class DMDA(DM): """ cdef PetscInt ival = asInt(field) cdef const char *cval = NULL - CHKERR( DMDAGetFieldName(self.dm, ival, &cval) ) + CHKERR(DMDAGetFieldName(self.dm, ival, &cval)) return bytes2str(cval) # @@ -742,8 +744,7 @@ cdef class DMDA(DM): ymin: float = 0, ymax: float = 1, zmin: float = 0, - zmax: float = 1, - ) -> None: + zmax: float = 1) -> None: """Set the DMDA coordinates to be a uniform grid. Collective. @@ -775,10 +776,10 @@ cdef class DMDA(DM): cdef PetscReal _xmin = asReal(xmin), _xmax = asReal(xmax) cdef PetscReal _ymin = asReal(ymin), _ymax = asReal(ymax) cdef PetscReal _zmin = asReal(zmin), _zmax = asReal(zmax) - CHKERR( DMDASetUniformCoordinates(self.dm, - _xmin, _xmax, - _ymin, _ymax, - _zmin, _zmax) ) + CHKERR(DMDASetUniformCoordinates(self.dm, + _xmin, _xmax, + _ymin, _ymax, + _zmin, _zmax)) def setCoordinateName(self, index: int, name: str) -> None: """Set the name of the coordinate dimension. @@ -800,7 +801,7 @@ cdef class DMDA(DM): cdef PetscInt ival = asInt(index) cdef const char *cval = NULL name = str2bytes(name, &cval) - CHKERR( DMDASetCoordinateName(self.dm, ival, cval) ) + CHKERR(DMDASetCoordinateName(self.dm, ival, cval)) def getCoordinateName(self, index: int) -> str: """Return the name of a coordinate dimension. @@ -819,7 +820,7 @@ cdef class DMDA(DM): """ cdef PetscInt ival = asInt(index) cdef const char *cval = NULL - CHKERR( DMDAGetCoordinateName(self.dm, ival, &cval) ) + CHKERR(DMDAGetCoordinateName(self.dm, ival, &cval)) return bytes2str(cval) # @@ -838,15 +839,14 @@ cdef class DMDA(DM): """ cdef Vec vn = Vec() - CHKERR( DMDACreateNaturalVector(self.dm, &vn.vec) ) + CHKERR(DMDACreateNaturalVector(self.dm, &vn.vec)) return vn def globalToNatural( self, Vec vg, Vec vn, - addv: InsertMode | None = None, - ) -> None: + addv: InsertMode | None = None) -> None: """Map values to the "natural" grid ordering. Neighborwise collective. @@ -869,15 +869,14 @@ cdef class DMDA(DM): """ cdef PetscInsertMode im = insertmode(addv) - CHKERR( DMDAGlobalToNaturalBegin(self.dm, vg.vec, im, vn.vec) ) - CHKERR( DMDAGlobalToNaturalEnd (self.dm, vg.vec, im, vn.vec) ) + CHKERR(DMDAGlobalToNaturalBegin(self.dm, vg.vec, im, vn.vec)) + CHKERR(DMDAGlobalToNaturalEnd(self.dm, vg.vec, im, vn.vec)) def naturalToGlobal( self, Vec vn, Vec vg, - addv: InsertMode | None = None, - ) -> None: + addv: InsertMode | None = None) -> None: """Map values the to grid ordering. Neighborwise collective. @@ -898,8 +897,8 @@ cdef class DMDA(DM): """ cdef PetscInsertMode im = insertmode(addv) - CHKERR( DMDANaturalToGlobalBegin(self.dm, vn.vec, im, vg.vec) ) - CHKERR( DMDANaturalToGlobalEnd (self.dm, vn.vec, im, vg.vec) ) + CHKERR(DMDANaturalToGlobalBegin(self.dm, vn.vec, im, vg.vec)) + CHKERR(DMDANaturalToGlobalEnd(self.dm, vn.vec, im, vg.vec)) # @@ -920,8 +919,8 @@ cdef class DMDA(DM): """ cdef AO ao = AO() - CHKERR( DMDAGetAO(self.dm, &ao.ao) ) - CHKERR( PetscINCREF(ao.obj) ) + CHKERR(DMDAGetAO(self.dm, &ao.ao)) + CHKERR(PetscINCREF(ao.obj)) return ao def getScatter(self) -> tuple[Scatter, Scatter]: @@ -936,9 +935,9 @@ cdef class DMDA(DM): """ cdef Scatter l2g = Scatter() cdef Scatter g2l = Scatter() - CHKERR( DMDAGetScatter(self.dm, &l2g.sct, &g2l.sct) ) - CHKERR( PetscINCREF(l2g.obj) ) - CHKERR( PetscINCREF(g2l.obj) ) + CHKERR(DMDAGetScatter(self.dm, &l2g.sct, &g2l.sct)) + CHKERR(PetscINCREF(l2g.obj)) + CHKERR(PetscINCREF(g2l.obj)) return (l2g, g2l) # @@ -947,8 +946,7 @@ cdef class DMDA(DM): self, refine_x: int = 2, refine_y: int = 2, - refine_z: int = 2, - ) -> None: + refine_z: int = 2) -> None: """Set the ratios for the DMDA grid refinement. Logically collective. @@ -971,10 +969,10 @@ cdef class DMDA(DM): refine[0] = asInt(refine_x) refine[1] = asInt(refine_y) refine[2] = asInt(refine_z) - CHKERR( DMDASetRefinementFactor(self.dm, - refine[0], - refine[1], - refine[2]) ) + CHKERR(DMDASetRefinementFactor(self.dm, + refine[0], + refine[1], + refine[2])) def getRefinementFactor(self) -> tuple[int, ...]: """Return the ratios that the DMDA grid is refined in each dimension. @@ -986,12 +984,12 @@ cdef class DMDA(DM): setRefinementFactor, petsc.DMDAGetRefinementFactor """ - cdef PetscInt i, dim = 0, refine[3] - CHKERR( DMDAGetDim(self.dm, &dim) ) - CHKERR( DMDAGetRefinementFactor(self.dm, - &refine[0], - &refine[1], - &refine[2]) ) + cdef PetscInt dim = 0, refine[3] + CHKERR(DMDAGetDim(self.dm, &dim)) + CHKERR(DMDAGetRefinementFactor(self.dm, + &refine[0], + &refine[1], + &refine[2])) return tuple([toInt(refine[i]) for 0 <= i < dim]) def setInterpolationType(self, interp_type: InterpolationType) -> None: @@ -1013,7 +1011,7 @@ cdef class DMDA(DM): """ cdef PetscDMDAInterpolationType ival = dainterpolationtype(interp_type) - CHKERR( DMDASetInterpolationType(self.dm, ival) ) + CHKERR(DMDASetInterpolationType(self.dm, ival)) def getInterpolationType(self) -> InterpolationType: """Return the type of interpolation. @@ -1026,7 +1024,7 @@ cdef class DMDA(DM): """ cdef PetscDMDAInterpolationType ival = DMDA_INTERPOLATION_Q0 - CHKERR( DMDAGetInterpolationType(self.dm, &ival) ) + CHKERR(DMDAGetInterpolationType(self.dm, &ival)) return ival # @@ -1042,7 +1040,7 @@ cdef class DMDA(DM): """ cdef PetscDMDAElementType ival = daelementtype(elem_type) - CHKERR( DMDASetElementType(self.dm, ival) ) + CHKERR(DMDASetElementType(self.dm, ival)) # FIXME: Return type def getElementType(self) -> ElementType: @@ -1056,7 +1054,7 @@ cdef class DMDA(DM): """ cdef PetscDMDAElementType ival = DMDA_ELEMENT_Q1 - CHKERR( DMDAGetElementType(self.dm, &ival) ) + CHKERR(DMDAGetElementType(self.dm, &ival)) return ival def getElements(self, elem_type: ElementType | None = None) -> ArrayInt: @@ -1084,16 +1082,16 @@ cdef class DMDA(DM): cdef PetscInt nel=0, nen=0 cdef const PetscInt *elems=NULL cdef object elements - CHKERR( DMDAGetDim(self.dm, &dim) ) + CHKERR(DMDAGetDim(self.dm, &dim)) if elem_type is not None: etype = daelementtype(elem_type) - CHKERR( DMDASetElementType(self.dm, etype) ) + CHKERR(DMDASetElementType(self.dm, etype)) try: - CHKERR( DMDAGetElements(self.dm, &nel, &nen, &elems) ) + CHKERR(DMDAGetElements(self.dm, &nel, &nen, &elems)) elements = array_i(nel*nen, elems) elements.shape = (toInt(nel), toInt(nen)) finally: - CHKERR( DMDARestoreElements(self.dm, &nel, &nen, &elems) ) + CHKERR(DMDARestoreElements(self.dm, &nel, &nen, &elems)) return elements # diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DMLabel.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DMLabel.pyx index 718c80d77b1..cb2df4015af 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DMLabel.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DMLabel.pyx @@ -15,7 +15,7 @@ cdef class DMLabel(Object): petsc.DMLabelDestroy """ - CHKERR( DMLabelDestroy(&self.dmlabel) ) + CHKERR(DMLabelDestroy(&self.dmlabel)) return self def view(self, Viewer viewer=None) -> None: @@ -35,7 +35,7 @@ cdef class DMLabel(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( DMLabelView(self.dmlabel, vwr) ) + CHKERR(DMLabelView(self.dmlabel, vwr)) def create(self, name: str, comm: Comm | None = None) -> Self: """Create a `DMLabel` object, which is a multimap. @@ -58,8 +58,8 @@ cdef class DMLabel(Object): cdef PetscDMLabel newdmlabel = NULL cdef const char *cname = NULL name = str2bytes(name, &cname) - CHKERR( DMLabelCreate(ccomm, cname, &newdmlabel) ) - CHKERR( PetscCLEAR(self.obj) ); self.dmlabel = newdmlabel + CHKERR(DMLabelCreate(ccomm, cname, &newdmlabel)) + CHKERR(PetscCLEAR(self.obj)); self.dmlabel = newdmlabel return self def duplicate(self) -> DMLabel: @@ -73,7 +73,7 @@ cdef class DMLabel(Object): """ cdef DMLabel new = DMLabel() - CHKERR( DMLabelDuplicate(self.dmlabel, &new.dmlabel) ) + CHKERR(DMLabelDuplicate(self.dmlabel, &new.dmlabel)) return new def reset(self) -> None: @@ -86,7 +86,7 @@ cdef class DMLabel(Object): petsc.DMLabelReset """ - CHKERR( DMLabelReset(self.dmlabel) ) + CHKERR(DMLabelReset(self.dmlabel)) def insertIS(self, IS iset, value: int) -> Self: """Set all points in the `IS` to a value. @@ -106,7 +106,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cvalue = asInt(value) - CHKERR( DMLabelInsertIS(self.dmlabel, iset.iset, cvalue) ) + CHKERR(DMLabelInsertIS(self.dmlabel, iset.iset, cvalue)) return self def setValue(self, point: int, value: int) -> None: @@ -132,7 +132,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt cvalue = asInt(value) - CHKERR( DMLabelSetValue(self.dmlabel, cpoint, cvalue) ) + CHKERR(DMLabelSetValue(self.dmlabel, cpoint, cvalue)) def getValue(self, point: int) -> int: """Return the value a label assigns to a point. @@ -155,7 +155,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt cvalue = 0 - CHKERR( DMLabelGetValue(self.dmlabel, cpoint, &cvalue) ) + CHKERR(DMLabelGetValue(self.dmlabel, cpoint, &cvalue)) return toInt(cvalue) def getDefaultValue(self) -> int: @@ -172,7 +172,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cvalue = 0 - CHKERR( DMLabelGetDefaultValue(self.dmlabel, &cvalue) ) + CHKERR(DMLabelGetDefaultValue(self.dmlabel, &cvalue)) return toInt(cvalue) def setDefaultValue(self, value: int) -> None: @@ -194,7 +194,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cvalue = asInt(value) - CHKERR( DMLabelSetDefaultValue(self.dmlabel, cvalue) ) + CHKERR(DMLabelSetDefaultValue(self.dmlabel, cvalue)) def clearValue(self, point: int, value: int) -> None: """Clear the value a label assigns to a point. @@ -215,11 +215,13 @@ cdef class DMLabel(Object): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt cvalue = asInt(value) - CHKERR( DMLabelClearValue(self.dmlabel, cpoint, cvalue) ) + CHKERR(DMLabelClearValue(self.dmlabel, cpoint, cvalue)) def addStratum(self, value: int) -> None: """Add a new stratum value in a `DMLabel`. + Not collective. + Parameters ---------- value @@ -231,7 +233,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cvalue = asInt(value) - CHKERR( DMLabelAddStratum(self.dmlabel, cvalue) ) + CHKERR(DMLabelAddStratum(self.dmlabel, cvalue)) def addStrata(self, strata: Sequence[int]) -> None: """Add new stratum values in a `DMLabel`. @@ -250,8 +252,8 @@ cdef class DMLabel(Object): """ cdef PetscInt *istrata = NULL cdef PetscInt numStrata = 0 - fields = iarray_i(strata, &numStrata, &istrata) - CHKERR( DMLabelAddStrata(self.dmlabel, numStrata, istrata) ) + strata = iarray_i(strata, &numStrata, &istrata) + CHKERR(DMLabelAddStrata(self.dmlabel, numStrata, istrata)) def addStrataIS(self, IS iset) -> None: """Add new stratum values in a `DMLabel`. @@ -268,7 +270,7 @@ cdef class DMLabel(Object): addStrata, addStratum, petsc.DMLabelAddStrataIS """ - CHKERR( DMLabelAddStrataIS(self.dmlabel, iset.iset) ) + CHKERR(DMLabelAddStrataIS(self.dmlabel, iset.iset)) def getNumValues(self) -> int: """Return the number of values that the `DMLabel` takes. @@ -281,7 +283,7 @@ cdef class DMLabel(Object): """ cdef PetscInt numValues = 0 - CHKERR( DMLabelGetNumValues(self.dmlabel, &numValues) ) + CHKERR(DMLabelGetNumValues(self.dmlabel, &numValues)) return toInt(numValues) def getValueIS(self) -> IS: @@ -295,7 +297,7 @@ cdef class DMLabel(Object): """ cdef IS iset = IS() - CHKERR( DMLabelGetValueIS(self.dmlabel, &iset.iset) ) + CHKERR(DMLabelGetValueIS(self.dmlabel, &iset.iset)) return iset def stratumHasPoint(self, value: int, point: int) -> bool: @@ -318,7 +320,7 @@ cdef class DMLabel(Object): cdef PetscInt cpoint = asInt(point) cdef PetscInt cvalue = asInt(value) cdef PetscBool ccontains = PETSC_FALSE - CHKERR( DMLabelStratumHasPoint(self.dmlabel, cvalue, cpoint, &ccontains) ) + CHKERR(DMLabelStratumHasPoint(self.dmlabel, cvalue, cpoint, &ccontains)) return toBool(ccontains) def hasStratum(self, value: int) -> bool: @@ -338,7 +340,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cvalue = asInt(value) cdef PetscBool cexists = PETSC_FALSE - CHKERR( DMLabelHasStratum(self.dmlabel, cvalue, &cexists) ) + CHKERR(DMLabelHasStratum(self.dmlabel, cvalue, &cexists)) return toBool(cexists) def getStratumSize(self, stratum: int) -> int: @@ -358,7 +360,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cstratum = asInt(stratum) cdef PetscInt csize = 0 - CHKERR( DMLabelGetStratumSize(self.dmlabel, cstratum, &csize) ) + CHKERR(DMLabelGetStratumSize(self.dmlabel, cstratum, &csize)) return toInt(csize) def getStratumIS(self, stratum: int) -> IS: @@ -378,7 +380,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cstratum = asInt(stratum) cdef IS iset = IS() - CHKERR( DMLabelGetStratumIS(self.dmlabel, cstratum, &iset.iset) ) + CHKERR(DMLabelGetStratumIS(self.dmlabel, cstratum, &iset.iset)) return iset def setStratumIS(self, stratum: int, IS iset) -> None: @@ -399,7 +401,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cstratum = asInt(stratum) - CHKERR( DMLabelSetStratumIS(self.dmlabel, cstratum, iset.iset) ) + CHKERR(DMLabelSetStratumIS(self.dmlabel, cstratum, iset.iset)) def clearStratum(self, stratum: int) -> None: """Remove a stratum. @@ -417,7 +419,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cstratum = asInt(stratum) - CHKERR( DMLabelClearStratum(self.dmlabel, cstratum) ) + CHKERR(DMLabelClearStratum(self.dmlabel, cstratum)) def computeIndex(self) -> None: """Create an index structure for membership determination. @@ -431,7 +433,7 @@ cdef class DMLabel(Object): petsc.DMLabelComputeIndex """ - CHKERR( DMLabelComputeIndex(self.dmlabel) ) + CHKERR(DMLabelComputeIndex(self.dmlabel)) def createIndex(self, pStart: int, pEnd: int) -> None: """Create an index structure for membership determination. @@ -451,7 +453,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cpstart = asInt(pStart), cpend = asInt(pEnd) - CHKERR( DMLabelCreateIndex(self.dmlabel, cpstart, cpend) ) + CHKERR(DMLabelCreateIndex(self.dmlabel, cpstart, cpend)) def destroyIndex(self) -> None: """Destroy the index structure. @@ -463,7 +465,7 @@ cdef class DMLabel(Object): createIndex, petsc.DMLabelDestroyIndex """ - CHKERR( DMLabelDestroyIndex(self.dmlabel) ) + CHKERR(DMLabelDestroyIndex(self.dmlabel)) def hasValue(self, value: int) -> bool: """Determine whether a label assigns the value to any point. @@ -482,7 +484,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cvalue = asInt(value) cdef PetscBool cexists = PETSC_FALSE - CHKERR( DMLabelHasValue(self.dmlabel, cvalue, &cexists) ) + CHKERR(DMLabelHasValue(self.dmlabel, cvalue, &cexists)) return toBool(cexists) def hasPoint(self, point: int) -> bool: @@ -504,7 +506,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cpoint = asInt(point) cdef PetscBool cexists = PETSC_FALSE - CHKERR( DMLabelHasPoint(self.dmlabel, cpoint, &cexists) ) + CHKERR(DMLabelHasPoint(self.dmlabel, cpoint, &cexists)) return toBool(cexists) def getBounds(self) -> tuple[int, int]: @@ -520,7 +522,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cpstart = 0, cpend = 0 - CHKERR( DMLabelGetBounds(self.dmlabel, &cpstart, &cpend) ) + CHKERR(DMLabelGetBounds(self.dmlabel, &cpstart, &cpend)) return toInt(cpstart), toInt(cpend) def filter(self, start: int, end: int) -> None: @@ -541,7 +543,7 @@ cdef class DMLabel(Object): """ cdef PetscInt cstart = asInt(start), cend = asInt(end) - CHKERR( DMLabelFilter(self.dmlabel, cstart, cend) ) + CHKERR(DMLabelFilter(self.dmlabel, cstart, cend)) def permute(self, IS permutation) -> DMLabel: """Create a new label with permuted points. @@ -559,7 +561,7 @@ cdef class DMLabel(Object): """ cdef DMLabel new = DMLabel() - CHKERR( DMLabelPermute(self.dmlabel, permutation.iset, &new.dmlabel) ) + CHKERR(DMLabelPermute(self.dmlabel, permutation.iset, &new.dmlabel)) return new def distribute(self, SF sf) -> DMLabel: @@ -578,7 +580,7 @@ cdef class DMLabel(Object): """ cdef DMLabel new = DMLabel() - CHKERR( DMLabelDistribute(self.dmlabel, sf.sf, &new.dmlabel) ) + CHKERR(DMLabelDistribute(self.dmlabel, sf.sf, &new.dmlabel)) return new def gather(self, SF sf) -> DMLabel: @@ -599,7 +601,7 @@ cdef class DMLabel(Object): """ cdef DMLabel new = DMLabel() - CHKERR( DMLabelGather(self.dmlabel, sf.sf, &new.dmlabel) ) + CHKERR(DMLabelGather(self.dmlabel, sf.sf, &new.dmlabel)) return new def convertToSection(self) -> tuple[Section, IS]: @@ -614,7 +616,7 @@ cdef class DMLabel(Object): """ cdef Section section = Section() cdef IS iset = IS() - CHKERR( DMLabelConvertToSection(self.dmlabel, §ion.sec, &iset.iset) ) + CHKERR(DMLabelConvertToSection(self.dmlabel, §ion.sec, &iset.iset)) return section, iset def getNonEmptyStratumValuesIS(self) -> IS: @@ -628,5 +630,5 @@ cdef class DMLabel(Object): """ cdef IS iset = IS() - CHKERR( DMLabelGetNonEmptyStratumValuesIS(self.dmlabel, &iset.iset) ) + CHKERR(DMLabelGetNonEmptyStratumValuesIS(self.dmlabel, &iset.iset)) return iset diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DMPlex.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DMPlex.pyx index a5453177fe8..1c1de2872dc 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DMPlex.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DMPlex.pyx @@ -3,7 +3,9 @@ cdef class DMPlex(DM): """Encapsulate an unstructured mesh. - DMPlex encapsulates both topology and geometry. It is capable of parallel refinement and coarsening (using Pragmatic or ParMmg) and parallel redistribution for load balancing. It is designed to interface with the `FE` and ``FV`` trial discretization objects. + DMPlex encapsulates both topology and geometry. + It is capable of parallel refinement and coarsening (using Pragmatic or ParMmg) + and parallel redistribution for load balancing. """ @@ -26,8 +28,8 @@ cdef class DMPlex(DM): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDM newdm = NULL - CHKERR( DMPlexCreate(ccomm, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreate(ccomm, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def createFromCellList(self, dim: int, cells: Sequence[int], coords: Sequence[float], interpolate: bool | None = True, comm: Comm | None = None) -> Self: @@ -69,23 +71,23 @@ cdef class DMPlex(DM): coords = PyArray_FROM_OTF(coords, NPY_PETSC_REAL, npy_flags) if PyArray_NDIM(cells) != 2: raise ValueError( ("cell indices must have two dimensions: " - "cells.ndim=%d") % (PyArray_NDIM(cells)) ) + "cells.ndim=%d") % (PyArray_NDIM(cells))) if PyArray_NDIM(coords) != 2: raise ValueError( ("coords vertices must have two dimensions: " - "coords.ndim=%d") % (PyArray_NDIM(coords)) ) - numCells = PyArray_DIM(cells, 0) - numCorners = PyArray_DIM(cells, 1) - numVertices = PyArray_DIM(coords, 0) - spaceDim = PyArray_DIM(coords, 1) - cellVertices = PyArray_DATA(cells) + "coords.ndim=%d") % (PyArray_NDIM(coords))) + numCells = PyArray_DIM(cells, 0) + numCorners = PyArray_DIM(cells, 1) + numVertices = PyArray_DIM(coords, 0) + spaceDim = PyArray_DIM(coords, 1) + cellVertices = PyArray_DATA(cells) vertexCoords = PyArray_DATA(coords) - CHKERR( DMPlexCreateFromCellListPetsc(ccomm, cdim, numCells, numVertices, - numCorners, interp, cellVertices, - spaceDim, vertexCoords, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreateFromCellListPetsc(ccomm, cdim, numCells, numVertices, + numCorners, interp, cellVertices, + spaceDim, vertexCoords, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self - def createBoxMesh(self, faces: Sequence[int], lower: Sequence[float] | None = (0,0,0), upper: Sequence[float] | None = (1,1,1), + def createBoxMesh(self, faces: Sequence[int], lower: Sequence[float] | None = (0, 0, 0), upper: Sequence[float] | None = (1, 1, 1), simplex: bool | None = True, periodic: Sequence | str | int | bool | None = False, interpolate: bool | None = True, comm: Comm | None = None) -> Self: """Create a mesh on the tensor product of intervals. @@ -102,7 +104,7 @@ cdef class DMPlex(DM): simplex `True` for simplices, `False` for tensor cells. periodic - The boundary type for the X,Y,Z direction, + The boundary type for the X, Y, Z direction, or `None` for `DM.BoundaryType.NONE`. interpolate Flag to create intermediate mesh entities (edges, faces). @@ -125,18 +127,18 @@ cdef class DMPlex(DM): cdef PetscReal cupper[3] cupper[0] = cupper[1] = cupper[2] = 1 for i from 0 <= i < dim: cupper[i] = upper[i] - cdef PetscDMBoundaryType btype[3]; + cdef PetscDMBoundaryType btype[3] asBoundary(periodic, &btype[0], &btype[1], &btype[2]) cdef PetscBool csimplex = simplex cdef PetscBool cinterp = interpolate cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDM newdm = NULL - CHKERR( DMPlexCreateBoxMesh(ccomm, dim, csimplex, cfaces, - clower, cupper, btype, cinterp, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreateBoxMesh(ccomm, dim, csimplex, cfaces, + clower, cupper, btype, cinterp, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self - def createBoxSurfaceMesh(self, faces: Sequence[int], lower: Sequence[float] | None = (0,0,0), upper: Sequence[float] | None = (1,1,1), + def createBoxSurfaceMesh(self, faces: Sequence[int], lower: Sequence[float] | None = (0, 0, 0), upper: Sequence[float] | None = (1, 1, 1), interpolate: bool | None = True, comm: Comm | None = None) -> Self: """Create a mesh on the surface of a box mesh using tensor cells. @@ -175,11 +177,11 @@ cdef class DMPlex(DM): cdef PetscBool cinterp = interpolate cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDM newdm = NULL - CHKERR( DMPlexCreateBoxSurfaceMesh(ccomm, dim, cfaces, clower, cupper, cinterp, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreateBoxSurfaceMesh(ccomm, dim, cfaces, clower, cupper, cinterp, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self - def createFromFile(self, filename: str, plexname: str | None = "unnamed", interpolate: bool | None = True, comm: Comm | None = None): + def createFromFile(self, filename: str, plexname: str | None = "unnamed", interpolate: bool | None = True, comm: Comm | None = None) -> Self: """Create `DMPlex` from a file. Collective. @@ -209,8 +211,8 @@ cdef class DMPlex(DM): cdef const char *pname = NULL filename = str2bytes(filename, &cfile) plexname = str2bytes(plexname, &pname) - CHKERR( DMPlexCreateFromFile(ccomm, cfile, pname, interp, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreateFromFile(ccomm, cfile, pname, interp, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def createCGNS(self, cgid: int, interpolate: bool | None = True, comm: Comm | None = None) -> Self: @@ -237,8 +239,8 @@ cdef class DMPlex(DM): cdef PetscBool interp = interpolate cdef PetscDM newdm = NULL cdef PetscInt ccgid = asInt(cgid) - CHKERR( DMPlexCreateCGNS(ccomm, ccgid, interp, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreateCGNS(ccomm, ccgid, interp, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def createCGNSFromFile(self, filename: str, interpolate: bool | None = True, comm: Comm | None = None) -> Self: @@ -266,8 +268,8 @@ cdef class DMPlex(DM): cdef PetscDM newdm = NULL cdef const char *cfile = NULL filename = str2bytes(filename, &cfile) - CHKERR( DMPlexCreateCGNSFromFile(ccomm, cfile, interp, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreateCGNSFromFile(ccomm, cfile, interp, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def createExodusFromFile(self, filename: str, interpolate: bool | None = True, comm: Comm | None = None) -> Self: @@ -295,8 +297,8 @@ cdef class DMPlex(DM): cdef PetscDM newdm = NULL cdef const char *cfile = NULL filename = str2bytes(filename, &cfile) - CHKERR( DMPlexCreateExodusFromFile(ccomm, cfile, interp, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreateExodusFromFile(ccomm, cfile, interp, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def createExodus(self, exoid: int, interpolate: bool | None = True, comm: Comm | None = None) -> Self: @@ -307,7 +309,7 @@ cdef class DMPlex(DM): Parameters ---------- exoid - The ExodusII id associated with a exodus file and obtained using ex_open. + The ExodusII id associated with a file obtained using ``ex_open``. interpolate Create faces and edges in the mesh, comm @@ -322,8 +324,8 @@ cdef class DMPlex(DM): cdef PetscBool interp = interpolate cdef PetscDM newdm = NULL cdef PetscInt cexoid = asInt(exoid) - CHKERR( DMPlexCreateExodus(ccomm, cexoid, interp, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreateExodus(ccomm, cexoid, interp, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def createGmsh(self, Viewer viewer, interpolate: bool | None = True, comm: Comm | None = None) -> Self: @@ -360,13 +362,15 @@ cdef class DMPlex(DM): cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscBool interp = interpolate cdef PetscDM newdm = NULL - CHKERR( DMPlexCreateGmsh(ccomm, viewer.vwr, interp, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexCreateGmsh(ccomm, viewer.vwr, interp, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def createCohesiveSubmesh(self, hasLagrange: bool, value: int) -> DMPlex: """Extract the hypersurface defined by one face of the cohesive cells. + Collective. + Parameters ---------- hasLagrange @@ -382,7 +386,7 @@ cdef class DMPlex(DM): cdef PetscBool flag = hasLagrange cdef PetscInt cvalue = asInt(value) cdef DM subdm = DMPlex() - CHKERR( DMPlexCreateCohesiveSubmesh(self.dm, flag, NULL, cvalue, &subdm.dm) ) + CHKERR(DMPlexCreateCohesiveSubmesh(self.dm, flag, NULL, cvalue, &subdm.dm)) return subdm def getChart(self) -> tuple[int, int]: @@ -403,7 +407,7 @@ cdef class DMPlex(DM): """ cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) return toInt(pStart), toInt(pEnd) def setChart(self, pStart: int, pEnd: int) -> None: @@ -425,7 +429,7 @@ cdef class DMPlex(DM): """ cdef PetscInt cStart = asInt(pStart) cdef PetscInt cEnd = asInt(pEnd) - CHKERR( DMPlexSetChart(self.dm, cStart, cEnd) ) + CHKERR(DMPlexSetChart(self.dm, cStart, cEnd)) def getConeSize(self, p: int) -> int: """Return the number of in-edges for this point in the DAG. @@ -445,10 +449,10 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp None: @@ -471,10 +475,10 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp ArrayInt: """Return the points on the in-edges for this point in the DAG. @@ -494,12 +498,12 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp None: @@ -525,21 +529,21 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp None: """DMPlexInsertCone - Insert a point into the in-edges for the point p in the DAG. @@ -564,7 +568,7 @@ cdef class DMPlex(DM): cdef PetscInt cp = asInt(p) cdef PetscInt cconePos = asInt(conePos) cdef PetscInt cconePoint = asInt(conePoint) - CHKERR( DMPlexInsertCone(self.dm,cp,cconePos,cconePoint) ) + CHKERR(DMPlexInsertCone(self.dm, cp, cconePos, cconePoint)) def insertConeOrientation(self, p: int, conePos: int, coneOrientation: int) -> None: """Insert a point orientation for the in-edge for the point p in the DAG. @@ -589,7 +593,7 @@ cdef class DMPlex(DM): cdef PetscInt cp = asInt(p) cdef PetscInt cconePos = asInt(conePos) cdef PetscInt cconeOrientation = asInt(coneOrientation) - CHKERR( DMPlexInsertConeOrientation(self.dm, cp, cconePos, cconeOrientation) ) + CHKERR(DMPlexInsertConeOrientation(self.dm, cp, cconePos, cconeOrientation)) def getConeOrientation(self, p: int) -> ArrayInt: """Return the orientations on the in-edges for this point in the DAG. @@ -609,12 +613,12 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp None: @@ -638,15 +642,15 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp None: """Set the polytope type of a given cell. @@ -668,7 +672,7 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscDMPolytopeType val = ctype - CHKERR( DMPlexSetCellType(self.dm, cp, val) ) + CHKERR(DMPlexSetCellType(self.dm, cp, val)) def getCellType(self, p: int) -> DM.PolytopeType: """Return the polytope type of a given cell. @@ -688,7 +692,7 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscDMPolytopeType ctype = DM_POLYTOPE_UNKNOWN - CHKERR( DMPlexGetCellType(self.dm, cp, &ctype) ) + CHKERR(DMPlexGetCellType(self.dm, cp, &ctype)) return toInt(ctype) def getCellTypeLabel(self) -> DMLabel: @@ -703,8 +707,8 @@ cdef class DMPlex(DM): """ cdef DMLabel label = DMLabel() - CHKERR( DMPlexGetCellTypeLabel(self.dm, &label.dmlabel) ) - CHKERR( PetscINCREF(label.obj) ) + CHKERR(DMPlexGetCellTypeLabel(self.dm, &label.dmlabel)) + CHKERR(PetscINCREF(label.obj)) return label def getSupportSize(self, p: int) -> int: @@ -725,10 +729,10 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp None: @@ -751,10 +755,10 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp ArrayInt: """Return the points on the out-edges for this point in the DAG. @@ -774,12 +778,12 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp None: @@ -803,13 +807,13 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp tuple[int, int]: """Return the maximum number of in-edges and out-edges of the DAG. @@ -830,7 +834,7 @@ cdef class DMPlex(DM): """ cdef PetscInt maxConeSize = 0, maxSupportSize = 0 - CHKERR( DMPlexGetMaxSizes(self.dm, &maxConeSize, &maxSupportSize) ) + CHKERR(DMPlexGetMaxSizes(self.dm, &maxConeSize, &maxSupportSize)) return toInt(maxConeSize), toInt(maxSupportSize) def symmetrize(self) -> None: @@ -844,7 +848,7 @@ cdef class DMPlex(DM): DMPlex.setCone, petsc.DMPlexSymmetrize """ - CHKERR( DMPlexSymmetrize(self.dm) ) + CHKERR(DMPlexSymmetrize(self.dm)) def stratify(self) -> None: """Calculate the strata of DAG. @@ -856,42 +860,48 @@ cdef class DMPlex(DM): DM, DMPlex, DMPlex.create, DMPlex.symmetrize, petsc.DMPlexStratify """ - CHKERR( DMPlexStratify(self.dm) ) + CHKERR(DMPlexStratify(self.dm)) def orient(self) -> None: """Give a consistent orientation to the input mesh. + Collective. + See Also -------- DM, DMPlex, DM.create, petsc.DMPlexOrient """ - CHKERR( DMPlexOrient(self.dm) ) + CHKERR(DMPlexOrient(self.dm)) def getCellNumbering(self) -> IS: """Return a global cell numbering for all cells on this process. + Collective the first time it is called. + See Also -------- DM, DMPlex, DMPlex.getVertexNumbering, petsc.DMPlexGetCellNumbering """ cdef IS iset = IS() - CHKERR( DMPlexGetCellNumbering(self.dm, &iset.iset) ) - CHKERR( PetscINCREF(iset.obj) ) + CHKERR(DMPlexGetCellNumbering(self.dm, &iset.iset)) + CHKERR(PetscINCREF(iset.obj)) return iset def getVertexNumbering(self) -> IS: """Return a global vertex numbering for all vertices on this process. + Collective the first time it is called. + See Also -------- DM, DMPlex, DMPlex.getCellNumbering, petsc.DMPlexGetVertexNumbering """ cdef IS iset = IS() - CHKERR( DMPlexGetVertexNumbering(self.dm, &iset.iset) ) - CHKERR( PetscINCREF(iset.obj) ) + CHKERR(DMPlexGetVertexNumbering(self.dm, &iset.iset)) + CHKERR(PetscINCREF(iset.obj)) return iset def createPointNumbering(self) -> IS: @@ -905,7 +915,7 @@ cdef class DMPlex(DM): """ cdef IS iset = IS() - CHKERR( DMPlexCreatePointNumbering(self.dm, &iset.iset) ) + CHKERR(DMPlexCreatePointNumbering(self.dm, &iset.iset)) return iset def getDepth(self) -> int: @@ -920,7 +930,7 @@ cdef class DMPlex(DM): """ cdef PetscInt depth = 0 - CHKERR( DMPlexGetDepth(self.dm,&depth) ) + CHKERR(DMPlexGetDepth(self.dm, &depth)) return toInt(depth) def getDepthStratum(self, svalue: int) -> tuple[int, int]: @@ -947,7 +957,7 @@ cdef class DMPlex(DM): """ cdef PetscInt csvalue = asInt(svalue), sStart = 0, sEnd = 0 - CHKERR( DMPlexGetDepthStratum(self.dm, csvalue, &sStart, &sEnd) ) + CHKERR(DMPlexGetDepthStratum(self.dm, csvalue, &sStart, &sEnd)) return (toInt(sStart), toInt(sEnd)) def getHeightStratum(self, svalue: int) -> tuple[int, int]: @@ -974,7 +984,7 @@ cdef class DMPlex(DM): """ cdef PetscInt csvalue = asInt(svalue), sStart = 0, sEnd = 0 - CHKERR( DMPlexGetHeightStratum(self.dm, csvalue, &sStart, &sEnd) ) + CHKERR(DMPlexGetHeightStratum(self.dm, csvalue, &sStart, &sEnd)) return (toInt(sStart), toInt(sEnd)) def getPointDepth(self, point: int) -> int: @@ -995,7 +1005,7 @@ cdef class DMPlex(DM): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt depth = 0 - CHKERR( DMPlexGetPointDepth(self.dm, cpoint, &depth) ) + CHKERR(DMPlexGetPointDepth(self.dm, cpoint, &depth)) return toInt(depth) def getPointHeight(self, point: int) -> int: @@ -1016,7 +1026,7 @@ cdef class DMPlex(DM): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt height = 0 - CHKERR( DMPlexGetPointHeight(self.dm, cpoint, &height) ) + CHKERR(DMPlexGetPointHeight(self.dm, cpoint, &height)) return toInt(height) def getMeet(self, points: Sequence[int]) -> ArrayInt: @@ -1039,11 +1049,11 @@ cdef class DMPlex(DM): cdef PetscInt numCoveringPoints = 0 cdef const PetscInt *coveringPoints = NULL points = iarray_i(points, &numPoints, &ipoints) - CHKERR( DMPlexGetMeet(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints) ) + CHKERR(DMPlexGetMeet(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints)) try: return array_i(numCoveringPoints, coveringPoints) finally: - CHKERR( DMPlexRestoreMeet(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints) ) + CHKERR(DMPlexRestoreMeet(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints)) def getJoin(self, points: Sequence[int]) -> ArrayInt: """Return an array for the join of the set of points. @@ -1065,11 +1075,11 @@ cdef class DMPlex(DM): cdef PetscInt numCoveringPoints = 0 cdef const PetscInt *coveringPoints = NULL points = iarray_i(points, &numPoints, &ipoints) - CHKERR( DMPlexGetJoin(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints) ) + CHKERR(DMPlexGetJoin(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints)) try: return array_i(numCoveringPoints, coveringPoints) finally: - CHKERR( DMPlexRestoreJoin(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints) ) + CHKERR(DMPlexRestoreJoin(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints)) def getFullJoin(self, points: Sequence[int]) -> ArrayInt: """Return an array for the join of the set of points. @@ -1091,11 +1101,11 @@ cdef class DMPlex(DM): cdef PetscInt numCoveringPoints = 0 cdef const PetscInt *coveringPoints = NULL points = iarray_i(points, &numPoints, &ipoints) - CHKERR( DMPlexGetFullJoin(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints) ) + CHKERR(DMPlexGetFullJoin(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints)) try: return array_i(numCoveringPoints, coveringPoints) finally: - CHKERR( DMPlexRestoreJoin(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints) ) + CHKERR(DMPlexRestoreJoin(self.dm, numPoints, ipoints, &numCoveringPoints, &coveringPoints)) def getTransitiveClosure(self, p: int, useCone: bool | None = True) -> tuple[ArrayInt, ArrayInt]: """Return the points and orientations on the transitive closure of this point. @@ -1124,17 +1134,17 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p) cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( DMPlexGetChart(self.dm, &pStart, &pEnd) ) + CHKERR(DMPlexGetChart(self.dm, &pStart, &pEnd)) assert cp>=pStart and cp ArrayScalar: """Return an array of values on the closure of ``p``. @@ -1157,11 +1167,11 @@ cdef class DMPlex(DM): """ cdef PetscInt cp = asInt(p), csize = 0 cdef PetscScalar *cvals = NULL - CHKERR( DMPlexVecGetClosure(self.dm, sec.sec, vec.vec, cp, &csize, &cvals) ) + CHKERR(DMPlexVecGetClosure(self.dm, sec.sec, vec.vec, cp, &csize, &cvals)) try: closure = array_s(csize, cvals) finally: - CHKERR( DMPlexVecRestoreClosure(self.dm, sec.sec, vec.vec, cp, &csize, &cvals) ) + CHKERR(DMPlexVecRestoreClosure(self.dm, sec.sec, vec.vec, cp, &csize, &cvals)) return closure def getVecClosure(self, Section sec or None, Vec vec, point: int) -> ArrayScalar: @@ -1187,11 +1197,11 @@ cdef class DMPlex(DM): cdef PetscSection csec = sec.sec if sec is not None else NULL cdef PetscInt cp = asInt(point), csize = 0 cdef PetscScalar *cvals = NULL - CHKERR( DMPlexVecGetClosure(self.dm, csec, vec.vec, cp, &csize, &cvals) ) + CHKERR(DMPlexVecGetClosure(self.dm, csec, vec.vec, cp, &csize, &cvals)) try: closure = array_s(csize, cvals) finally: - CHKERR( DMPlexVecRestoreClosure(self.dm, csec, vec.vec, cp, &csize, &cvals) ) + CHKERR(DMPlexVecRestoreClosure(self.dm, csec, vec.vec, cp, &csize, &cvals)) return closure def setVecClosure(self, Section sec or None, Vec vec, point: int, values: Sequence[Scalar], addv: InsertModeSpec | None = None) -> None: @@ -1222,9 +1232,9 @@ cdef class DMPlex(DM): cdef PetscInt cp = asInt(point) cdef PetscInt csize = 0 cdef PetscScalar *cvals = NULL - cdef object tmp = iarray_s(values, &csize, &cvals) + cdef object unused = iarray_s(values, &csize, &cvals) cdef PetscInsertMode im = insertmode(addv) - CHKERR( DMPlexVecSetClosure(self.dm, csec, vec.vec, cp, cvals, im) ) + CHKERR(DMPlexVecSetClosure(self.dm, csec, vec.vec, cp, cvals, im)) def setMatClosure(self, Section sec or None, Section gsec or None, Mat mat, point: int, values: Sequence[Scalar], addv: InsertModeSpec | None = None) -> None: @@ -1254,14 +1264,14 @@ cdef class DMPlex(DM): DM, DMPlex, petsc.DMPlexMatSetClosure """ - cdef PetscSection csec = sec.sec if sec is not None else NULL + cdef PetscSection csec = sec.sec if sec is not None else NULL cdef PetscSection cgsec = gsec.sec if gsec is not None else NULL cdef PetscInt cp = asInt(point) cdef PetscInt csize = 0 cdef PetscScalar *cvals = NULL - cdef object tmp = iarray_s(values, &csize, &cvals) + cdef object unused = iarray_s(values, &csize, &cvals) cdef PetscInsertMode im = insertmode(addv) - CHKERR( DMPlexMatSetClosure(self.dm, csec, cgsec, mat.mat, cp, cvals, im) ) + CHKERR(DMPlexMatSetClosure(self.dm, csec, cgsec, mat.mat, cp, cvals, im)) def generate(self, DMPlex boundary, name: str | None = None, interpolate: bool | None = True) -> Self: """Generate a mesh. @@ -1287,8 +1297,8 @@ cdef class DMPlex(DM): cdef const char *cname = NULL if name: name = str2bytes(name, &cname) cdef PetscDM newdm = NULL - CHKERR( DMPlexGenerate(boundary.dm, cname, interp, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexGenerate(boundary.dm, cname, interp, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def setTriangleOptions(self, opts: str) -> None: @@ -1309,7 +1319,7 @@ cdef class DMPlex(DM): """ cdef const char *copts = NULL opts = str2bytes(opts, &copts) - CHKERR( DMPlexTriangleSetOptions(self.dm, copts) ) + CHKERR(DMPlexTriangleSetOptions(self.dm, copts)) def setTetGenOptions(self, opts: str) -> None: """Set the options used for the Tetgen mesh generator. @@ -1329,7 +1339,7 @@ cdef class DMPlex(DM): """ cdef const char *copts = NULL opts = str2bytes(opts, &copts) - CHKERR( DMPlexTetgenSetOptions(self.dm, copts) ) + CHKERR(DMPlexTetgenSetOptions(self.dm, copts)) def markBoundaryFaces(self, label: str, value: int | None = None) -> DMLabel: """Mark all faces on the boundary. @@ -1355,12 +1365,14 @@ cdef class DMPlex(DM): cdef const char *cval = NULL label = str2bytes(label, &cval) cdef PetscDMLabel clbl = NULL - CHKERR( DMGetLabel(self.dm, cval, &clbl) ) - CHKERR( DMPlexMarkBoundaryFaces(self.dm, ival, clbl) ) + CHKERR(DMGetLabel(self.dm, cval, &clbl)) + CHKERR(DMPlexMarkBoundaryFaces(self.dm, ival, clbl)) def labelComplete(self, DMLabel label) -> None: """Add the transitive closure to the surface. + Not collective. + Parameters ---------- label @@ -1371,11 +1383,14 @@ cdef class DMPlex(DM): DM, DMPlex, DMPlex.labelCohesiveComplete, petsc.DMPlexLabelComplete """ - CHKERR( DMPlexLabelComplete(self.dm, label.dmlabel) ) + CHKERR(DMPlexLabelComplete(self.dm, label.dmlabel)) - def labelCohesiveComplete(self, DMLabel label, DMLabel bdlabel, bdvalue: int, flip: bool, DMPlex subdm) -> None: + def labelCohesiveComplete(self, DMLabel label, DMLabel bdlabel, bdvalue: int, + flip: bool, split: bool, DMPlex subdm) -> None: """Add all other mesh pieces to complete the surface. + Not collective. + Parameters ---------- label @@ -1388,6 +1403,9 @@ cdef class DMPlex(DM): flip Flag to flip the submesh normal and replace points on the other side. + split + Flag to split faces incident on the surface boundary, + rather than clamping those faces to the boundary subdm The `DMPlex` associated with the label. @@ -1397,13 +1415,16 @@ cdef class DMPlex(DM): petsc.DMPlexLabelCohesiveComplete """ - cdef PetscBool flg = flip - cdef PetscInt val = asInt(bdvalue) - CHKERR( DMPlexLabelCohesiveComplete(self.dm, label.dmlabel, bdlabel.dmlabel, val, flg, subdm.dm) ) + cdef PetscBool flg = flip + cdef PetscBool flg2 = split + cdef PetscInt val = asInt(bdvalue) + CHKERR(DMPlexLabelCohesiveComplete(self.dm, label.dmlabel, bdlabel.dmlabel, val, flg, flg2, subdm.dm)) def setAdjacencyUseAnchors(self, useAnchors: bool = True) -> None: """Define adjacency in the mesh using the point-to-point constraints. + Logically collective. + Parameters ---------- useAnchors @@ -1418,11 +1439,13 @@ cdef class DMPlex(DM): """ cdef PetscBool flag = useAnchors - CHKERR( DMPlexSetAdjacencyUseAnchors(self.dm, flag) ) + CHKERR(DMPlexSetAdjacencyUseAnchors(self.dm, flag)) def getAdjacencyUseAnchors(self) -> bool: """Query whether adjacency in the mesh uses the point-to-point constraints. + Not collective. + See Also -------- DMPlex, DMPlex.getAdjacency, DMPlex.distribute @@ -1430,12 +1453,14 @@ cdef class DMPlex(DM): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( DMPlexGetAdjacencyUseAnchors(self.dm, &flag) ) + CHKERR(DMPlexGetAdjacencyUseAnchors(self.dm, &flag)) return toBool(flag) def getAdjacency(self, p: int) -> ArrayInt: """Return all points adjacent to the given point. + Not collective. + Parameters ---------- p @@ -1449,14 +1474,14 @@ cdef class DMPlex(DM): cdef PetscInt cp = asInt(p) cdef PetscInt nadj = PETSC_DETERMINE cdef PetscInt *iadj = NULL - CHKERR( DMPlexGetAdjacency(self.dm, cp, &nadj, &iadj) ) + CHKERR(DMPlexGetAdjacency(self.dm, cp, &nadj, &iadj)) try: adjacency = array_i(nadj, iadj) finally: - CHKERR( PetscFree(iadj) ) + CHKERR(PetscFree(iadj)) return adjacency - def setPartitioner(self, Partitioner part): + def setPartitioner(self, Partitioner part) -> None: """Set the mesh partitioner. Logically collective. @@ -1472,7 +1497,7 @@ cdef class DMPlex(DM): Partitioner.create, petsc.DMPlexSetPartitioner """ - CHKERR( DMPlexSetPartitioner(self.dm, part.part) ) + CHKERR(DMPlexSetPartitioner(self.dm, part.part)) def getPartitioner(self) -> Partitioner: """Return the mesh partitioner. @@ -1487,13 +1512,15 @@ cdef class DMPlex(DM): """ cdef Partitioner part = Partitioner() - CHKERR( DMPlexGetPartitioner(self.dm, &part.part) ) - CHKERR( PetscINCREF(part.obj) ) + CHKERR(DMPlexGetPartitioner(self.dm, &part.part)) + CHKERR(PetscINCREF(part.obj)) return part def rebalanceSharedPoints(self, entityDepth: int | None = 0, useInitialGuess: bool | None = True, parallel: bool | None = True) -> bool: """Redistribute shared points in order to achieve better balancing. + Collective. + Parameters ---------- entityDepth @@ -1520,7 +1547,7 @@ cdef class DMPlex(DM): cdef PetscBool cuseInitialGuess = asBool(useInitialGuess) cdef PetscBool cparallel = asBool(parallel) cdef PetscBool csuccess = PETSC_FALSE - CHKERR( DMPlexRebalanceSharedPoints(self.dm, centityDepth, cuseInitialGuess, cparallel, &csuccess) ) + CHKERR(DMPlexRebalanceSharedPoints(self.dm, centityDepth, cuseInitialGuess, cparallel, &csuccess)) return toBool(csuccess) def distribute(self, overlap: int | None = 0) -> SF or None: @@ -1546,9 +1573,9 @@ cdef class DMPlex(DM): cdef PetscDM dmParallel = NULL cdef PetscInt coverlap = asInt(overlap) cdef SF sf = SF() - CHKERR( DMPlexDistribute(self.dm, coverlap, &sf.sf, &dmParallel) ) + CHKERR(DMPlexDistribute(self.dm, coverlap, &sf.sf, &dmParallel)) if dmParallel != NULL: - CHKERR( PetscCLEAR(self.obj) ); self.dm = dmParallel + CHKERR(PetscCLEAR(self.obj)); self.dm = dmParallel return sf def distributeOverlap(self, overlap: int | None = 0) -> SF: @@ -1575,9 +1602,9 @@ cdef class DMPlex(DM): cdef PetscInt coverlap = asInt(overlap) cdef SF sf = SF() cdef PetscDM dmOverlap = NULL - CHKERR( DMPlexDistributeOverlap(self.dm, coverlap, - &sf.sf, &dmOverlap) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = dmOverlap + CHKERR(DMPlexDistributeOverlap(self.dm, coverlap, + &sf.sf, &dmOverlap)) + CHKERR(PetscCLEAR(self.obj)); self.dm = dmOverlap return sf def isDistributed(self) -> bool: @@ -1591,12 +1618,14 @@ cdef class DMPlex(DM): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( DMPlexIsDistributed(self.dm, &flag) ) + CHKERR(DMPlexIsDistributed(self.dm, &flag)) return toBool(flag) def isSimplex(self) -> bool: """Return the flag indicating if the first cell is a simplex. + Not collective. + See Also -------- DM, DMPlex, DMPlex.getCellType, DMPlex.getHeightStratum @@ -1604,7 +1633,7 @@ cdef class DMPlex(DM): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( DMPlexIsSimplex(self.dm, &flag) ) + CHKERR(DMPlexIsSimplex(self.dm, &flag)) return toBool(flag) def distributeGetDefault(self) -> bool: @@ -1624,7 +1653,7 @@ cdef class DMPlex(DM): """ cdef PetscBool dist = PETSC_FALSE - CHKERR( DMPlexDistributeGetDefault(self.dm, &dist) ) + CHKERR(DMPlexDistributeGetDefault(self.dm, &dist)) return toBool(dist) def distributeSetDefault(self, flag: bool) -> None: @@ -1644,12 +1673,14 @@ cdef class DMPlex(DM): """ cdef PetscBool dist = asBool(flag) - CHKERR( DMPlexDistributeSetDefault(self.dm, dist) ) + CHKERR(DMPlexDistributeSetDefault(self.dm, dist)) return def distributionSetName(self, name: str) -> None: """Set the name of the specific parallel distribution. + Logically collective. + Parameters ---------- name @@ -1664,11 +1695,13 @@ cdef class DMPlex(DM): cdef const char *cname = NULL if name is not None: name = str2bytes(name, &cname) - CHKERR( DMPlexDistributionSetName(self.dm, cname) ) + CHKERR(DMPlexDistributionSetName(self.dm, cname)) def distributionGetName(self) -> str: """Retrieve the name of the specific parallel distribution. + Not collective. + Returns ------- name : str @@ -1681,7 +1714,7 @@ cdef class DMPlex(DM): """ cdef const char *cname = NULL - CHKERR( DMPlexDistributionGetName(self.dm, &cname) ) + CHKERR(DMPlexDistributionGetName(self.dm, &cname)) return bytes2str(cname) def interpolate(self) -> None: @@ -1696,8 +1729,8 @@ cdef class DMPlex(DM): """ cdef PetscDM newdm = NULL - CHKERR( DMPlexInterpolate(self.dm, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexInterpolate(self.dm, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm def uninterpolate(self) -> None: """Convert to a mesh with only cells and vertices. @@ -1711,8 +1744,8 @@ cdef class DMPlex(DM): """ cdef PetscDM newdm = NULL - CHKERR( DMPlexUninterpolate(self.dm, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMPlexUninterpolate(self.dm, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm def distributeField(self, SF sf, Section sec, Vec vec, Section newsec=None, Vec newvec=None) -> tuple[Section, Vec]: @@ -1749,14 +1782,14 @@ cdef class DMPlex(DM): if newsec is None: newsec = Section() if newvec is None: newvec = Vec() if newsec.sec == NULL: - CHKERR( PetscObjectGetComm(sec.sec, &ccomm) ) - CHKERR( PetscSectionCreate(ccomm, &newsec.sec) ) + CHKERR(PetscObjectGetComm(sec.sec, &ccomm)) + CHKERR(PetscSectionCreate(ccomm, &newsec.sec)) if newvec.vec == NULL: - CHKERR( PetscObjectGetComm(vec.vec, &ccomm) ) - CHKERR( VecCreate(ccomm, &newvec.vec) ) - CHKERR( DMPlexDistributeField(self.dm, sf.sf, - sec.sec, vec.vec, - newsec.sec, newvec.vec)) + CHKERR(PetscObjectGetComm(vec.vec, &ccomm)) + CHKERR(VecCreate(ccomm, &newvec.vec)) + CHKERR(DMPlexDistributeField(self.dm, sf.sf, + sec.sec, vec.vec, + newsec.sec, newvec.vec)) return (newsec, newvec) def getMinRadius(self) -> float: @@ -1770,7 +1803,7 @@ cdef class DMPlex(DM): """ cdef PetscReal cminradius = 0. - CHKERR( DMPlexGetMinRadius(self.dm, &cminradius)) + CHKERR(DMPlexGetMinRadius(self.dm, &cminradius)) return asReal(cminradius) def createCoarsePointIS(self) -> IS: @@ -1790,7 +1823,7 @@ cdef class DMPlex(DM): """ cdef IS fpoint = IS() - CHKERR( DMPlexCreateCoarsePointIS(self.dm, &fpoint.iset) ) + CHKERR(DMPlexCreateCoarsePointIS(self.dm, &fpoint.iset)) return fpoint def createSection(self, numComp: Sequence[int], numDof: Sequence[int], @@ -1827,7 +1860,7 @@ cdef class DMPlex(DM): """ # topological dimension cdef PetscInt dim = 0 - CHKERR( DMGetDimension(self.dm, &dim) ) + CHKERR(DMGetDimension(self.dm, &dim)) # components and DOFs cdef PetscInt ncomp = 0, ndof = 0 cdef PetscInt *icomp = NULL, *idof = NULL @@ -1839,18 +1872,19 @@ cdef class DMPlex(DM): cdef PetscInt *bcfield = NULL cdef PetscIS *bccomps = NULL cdef PetscIS *bcpoints = NULL + cdef object unused1, unused2 if bcField is not None: bcField = iarray_i(bcField, &nbc, &bcfield) if bcComps is not None: bcComps = list(bcComps) assert len(bcComps) == nbc - tmp1 = oarray_p(empty_p(nbc), NULL, &bccomps) + unused1 = oarray_p(empty_p(nbc), NULL, &bccomps) for i from 0 <= i < nbc: bccomps[i] = (bcComps[i]).iset if bcPoints is not None: bcPoints = list(bcPoints) assert len(bcPoints) == nbc - tmp2 = oarray_p(empty_p(nbc), NULL, &bcpoints) + unused2 = oarray_p(empty_p(nbc), NULL, &bcpoints) for i from 0 <= i < nbc: bcpoints[i] = (bcPoints[i]).iset else: @@ -1863,9 +1897,9 @@ cdef class DMPlex(DM): if perm is not None: cperm = perm.iset # create section cdef Section sec = Section() - CHKERR( DMPlexCreateSection(self.dm, NULL, icomp, idof, - nbc, bcfield, bccomps, bcpoints, - cperm, &sec.sec) ) + CHKERR(DMPlexCreateSection(self.dm, NULL, icomp, idof, + nbc, bcfield, bccomps, bcpoints, + cperm, &sec.sec)) return sec def getPointLocal(self, point: int) -> tuple[int, int]: @@ -1893,7 +1927,7 @@ cdef class DMPlex(DM): """ cdef PetscInt start = 0, end = 0 cdef PetscInt cpoint = asInt(point) - CHKERR( DMPlexGetPointLocal(self.dm, cpoint, &start, &end) ) + CHKERR(DMPlexGetPointLocal(self.dm, cpoint, &start, &end)) return toInt(start), toInt(end) def getPointLocalField(self, point: int, field: int) -> tuple[int, int]: @@ -1924,7 +1958,7 @@ cdef class DMPlex(DM): cdef PetscInt start = 0, end = 0 cdef PetscInt cpoint = asInt(point) cdef PetscInt cfield = asInt(field) - CHKERR( DMPlexGetPointLocalField(self.dm, cpoint, cfield, &start, &end) ) + CHKERR(DMPlexGetPointLocalField(self.dm, cpoint, cfield, &start, &end)) return toInt(start), toInt(end) def getPointGlobal(self, point: int) -> tuple[int, int]: @@ -1952,7 +1986,7 @@ cdef class DMPlex(DM): """ cdef PetscInt start = 0, end = 0 cdef PetscInt cpoint = asInt(point) - CHKERR( DMPlexGetPointGlobal(self.dm, cpoint, &start, &end) ) + CHKERR(DMPlexGetPointGlobal(self.dm, cpoint, &start, &end)) return toInt(start), toInt(end) def getPointGlobalField(self, point: int, field: int) -> tuple[int, int]: @@ -1983,7 +2017,7 @@ cdef class DMPlex(DM): cdef PetscInt start = 0, end = 0 cdef PetscInt cpoint = asInt(point) cdef PetscInt cfield = asInt(field) - CHKERR( DMPlexGetPointGlobalField(self.dm, cpoint, cfield, &start, &end) ) + CHKERR(DMPlexGetPointGlobalField(self.dm, cpoint, cfield, &start, &end)) return toInt(start), toInt(end) def createClosureIndex(self, Section sec or None) -> None: @@ -2004,13 +2038,15 @@ cdef class DMPlex(DM): """ cdef PetscSection csec = sec.sec if sec is not None else NULL - CHKERR( DMPlexCreateClosureIndex(self.dm, csec) ) + CHKERR(DMPlexCreateClosureIndex(self.dm, csec)) # def setRefinementUniform(self, refinementUniform: bool | None = True) -> None: """Set the flag for uniform refinement. + Logically collective. + Parameters ---------- refinementUniform @@ -2024,11 +2060,13 @@ cdef class DMPlex(DM): """ cdef PetscBool flag = refinementUniform - CHKERR( DMPlexSetRefinementUniform(self.dm, flag) ) + CHKERR(DMPlexSetRefinementUniform(self.dm, flag)) def getRefinementUniform(self) -> bool: """Retrieve the flag for uniform refinement. + Not collective. + Returns ------- refinementUniform : bool @@ -2042,12 +2080,14 @@ cdef class DMPlex(DM): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( DMPlexGetRefinementUniform(self.dm, &flag) ) + CHKERR(DMPlexGetRefinementUniform(self.dm, &flag)) return toBool(flag) def setRefinementLimit(self, refinementLimit: float) -> None: """Set the maximum cell volume for refinement. + Logically collective. + Parameters ---------- refinementLimit @@ -2061,11 +2101,13 @@ cdef class DMPlex(DM): """ cdef PetscReal rval = asReal(refinementLimit) - CHKERR( DMPlexSetRefinementLimit(self.dm, rval) ) + CHKERR(DMPlexSetRefinementLimit(self.dm, rval)) def getRefinementLimit(self) -> float: """Retrieve the maximum cell volume for refinement. + Not collective. + See Also -------- DM, DMPlex, DM.refine, DMPlex.setRefinementLimit @@ -2074,7 +2116,7 @@ cdef class DMPlex(DM): """ cdef PetscReal rval = 0.0 - CHKERR( DMPlexGetRefinementLimit(self.dm, &rval) ) + CHKERR(DMPlexGetRefinementLimit(self.dm, &rval)) return toReal(rval) def getOrdering(self, otype: Mat.OrderingType) -> IS: @@ -2102,7 +2144,7 @@ cdef class DMPlex(DM): cdef PetscDMLabel label = NULL otype = str2bytes(otype, &cval) cdef IS perm = IS() - CHKERR( DMPlexGetOrdering(self.dm, cval, label, &perm.iset) ) + CHKERR(DMPlexGetOrdering(self.dm, cval, label, &perm.iset)) return perm def permute(self, IS perm) -> DMPlex: @@ -2126,7 +2168,7 @@ cdef class DMPlex(DM): """ cdef DMPlex dm = type(self)() - CHKERR( DMPlexPermute(self.dm, perm.iset, &dm.dm) ) + CHKERR(DMPlexPermute(self.dm, perm.iset, &dm.dm)) return dm def reorderGetDefault(self) -> DM.ReorderDefaultFlag: @@ -2140,10 +2182,10 @@ cdef class DMPlex(DM): """ cdef PetscDMReorderDefaultFlag reorder = DM_REORDER_DEFAULT_NOTSET - CHKERR( DMPlexReorderGetDefault(self.dm, &reorder) ) + CHKERR(DMPlexReorderGetDefault(self.dm, &reorder)) return reorder - def reorderSetDefault(self, flag: DM.ReorderDefaultFlag): + def reorderSetDefault(self, flag: DM.ReorderDefaultFlag) -> None: """Set flag indicating whether the DM should be reordered by default. Logically collective. @@ -2159,7 +2201,7 @@ cdef class DMPlex(DM): """ cdef PetscDMReorderDefaultFlag reorder = flag - CHKERR( DMPlexReorderSetDefault(self.dm, reorder) ) + CHKERR(DMPlexReorderSetDefault(self.dm, reorder)) return # @@ -2167,7 +2209,7 @@ cdef class DMPlex(DM): def computeCellGeometryFVM(self, cell: int) -> tuple[float, ArrayReal, ArrayReal]: """Compute the volume for a given cell. - Collective. + Not collective. Parameters ---------- @@ -2191,9 +2233,9 @@ cdef class DMPlex(DM): """ cdef PetscInt cdim = 0 cdef PetscInt ccell = asInt(cell) - CHKERR( DMGetCoordinateDim(self.dm, &cdim) ) + CHKERR(DMGetCoordinateDim(self.dm, &cdim)) cdef PetscReal vol = 0, centroid[3], normal[3] - CHKERR( DMPlexComputeCellGeometryFVM(self.dm, ccell, &vol, centroid, normal) ) + CHKERR(DMPlexComputeCellGeometryFVM(self.dm, ccell, &vol, centroid, normal)) return (toReal(vol), array_r(cdim, centroid), array_r(cdim, normal)) def constructGhostCells(self, labelName: str | None = None) -> int: @@ -2221,8 +2263,8 @@ cdef class DMPlex(DM): labelName = str2bytes(labelName, &cname) cdef PetscInt numGhostCells = 0 cdef PetscDM dmGhosted = NULL - CHKERR( DMPlexConstructGhostCells(self.dm, cname, &numGhostCells, &dmGhosted)) - CHKERR( PetscCLEAR(self.obj) ); self.dm = dmGhosted + CHKERR(DMPlexConstructGhostCells(self.dm, cname, &numGhostCells, &dmGhosted)) + CHKERR(PetscCLEAR(self.obj)); self.dm = dmGhosted return toInt(numGhostCells) def getSubpointIS(self) -> IS: @@ -2241,7 +2283,7 @@ cdef class DMPlex(DM): """ cdef IS iset = IS() - CHKERR( DMPlexGetSubpointIS(self.dm, &iset.iset) ) + CHKERR(DMPlexGetSubpointIS(self.dm, &iset.iset)) PetscINCREF(iset.obj) return iset @@ -2261,18 +2303,30 @@ cdef class DMPlex(DM): """ cdef DMLabel label = DMLabel() - CHKERR( DMPlexGetSubpointMap(self.dm, &label.dmlabel) ) + CHKERR(DMPlexGetSubpointMap(self.dm, &label.dmlabel)) PetscINCREF(label.obj) return label # Metric def metricSetFromOptions(self) -> None: - CHKERR( DMPlexMetricSetFromOptions(self.dm) ) + """Configure the object from the options database. + + Collective. + + See Also + -------- + petsc_options + + """ + # FIXME petsc.DMPlexMetricSetFromOptions + CHKERR(DMPlexMetricSetFromOptions(self.dm)) def metricSetUniform(self, uniform: bool) -> None: """Record whether the metric is uniform or not. + Logically collective. + Parameters ---------- uniform @@ -2285,11 +2339,13 @@ cdef class DMPlex(DM): """ cdef PetscBool bval = asBool(uniform) - CHKERR( DMPlexMetricSetUniform(self.dm, bval) ) + CHKERR(DMPlexMetricSetUniform(self.dm, bval)) def metricIsUniform(self) -> bool: """Return the flag indicating whether the metric is uniform or not. + Not collective. + See Also -------- DMPlex.metricSetUniform, DMPlex.metricRestrictAnisotropyFirst @@ -2297,12 +2353,14 @@ cdef class DMPlex(DM): """ cdef PetscBool uniform = PETSC_FALSE - CHKERR( DMPlexMetricIsUniform(self.dm, &uniform) ) + CHKERR(DMPlexMetricIsUniform(self.dm, &uniform)) return toBool(uniform) def metricSetIsotropic(self, isotropic: bool) -> None: """Record whether the metric is isotropic or not. + Logically collective. + Parameters ---------- isotropic @@ -2315,11 +2373,13 @@ cdef class DMPlex(DM): """ cdef PetscBool bval = asBool(isotropic) - CHKERR( DMPlexMetricSetIsotropic(self.dm, bval) ) + CHKERR(DMPlexMetricSetIsotropic(self.dm, bval)) def metricIsIsotropic(self) -> bool: """Return the flag indicating whether the metric is isotropic or not. + Not collective. + See Also -------- DMPlex.metricSetIsotropic, DMPlex.metricIsUniform @@ -2327,12 +2387,14 @@ cdef class DMPlex(DM): """ cdef PetscBool isotropic = PETSC_FALSE - CHKERR( DMPlexMetricIsIsotropic(self.dm, &isotropic) ) + CHKERR(DMPlexMetricIsIsotropic(self.dm, &isotropic)) return toBool(isotropic) def metricSetRestrictAnisotropyFirst(self, restrictAnisotropyFirst: bool) -> None: """Record whether anisotropy is be restricted before normalization or after. + Logically collective. + Parameters ---------- restrictAnisotropyFirst @@ -2345,11 +2407,13 @@ cdef class DMPlex(DM): """ cdef PetscBool bval = asBool(restrictAnisotropyFirst) - CHKERR( DMPlexMetricSetRestrictAnisotropyFirst(self.dm, bval) ) + CHKERR(DMPlexMetricSetRestrictAnisotropyFirst(self.dm, bval)) def metricRestrictAnisotropyFirst(self) -> bool: """Return ``true`` if anisotropy is restricted before normalization. + Not collective. + See Also -------- DMPlex.metricIsIsotropic, DMPlex.metricSetRestrictAnisotropyFirst @@ -2357,12 +2421,14 @@ cdef class DMPlex(DM): """ cdef PetscBool restrictAnisotropyFirst = PETSC_FALSE - CHKERR( DMPlexMetricRestrictAnisotropyFirst(self.dm, &restrictAnisotropyFirst) ) + CHKERR(DMPlexMetricRestrictAnisotropyFirst(self.dm, &restrictAnisotropyFirst)) return toBool(restrictAnisotropyFirst) def metricSetNoInsertion(self, noInsert: bool) -> None: """Set the flag indicating whether node insertion should be turned off. + Logically collective. + Parameters ---------- noInsert @@ -2376,11 +2442,13 @@ cdef class DMPlex(DM): """ cdef PetscBool bval = asBool(noInsert) - CHKERR( DMPlexMetricSetNoInsertion(self.dm, bval) ) + CHKERR(DMPlexMetricSetNoInsertion(self.dm, bval)) def metricNoInsertion(self) -> bool: """Return the flag indicating whether node insertion and deletion are turned off. + Not collective. + See Also -------- DMPlex.metricSetNoInsertion, DMPlex.metricNoSwapping @@ -2389,12 +2457,14 @@ cdef class DMPlex(DM): """ cdef PetscBool noInsert = PETSC_FALSE - CHKERR( DMPlexMetricNoInsertion(self.dm, &noInsert) ) + CHKERR(DMPlexMetricNoInsertion(self.dm, &noInsert)) return toBool(noInsert) def metricSetNoSwapping(self, noSwap: bool) -> None: """Set the flag indicating whether facet swapping should be turned off. + Logically collective. + Parameters ---------- noSwap @@ -2408,11 +2478,13 @@ cdef class DMPlex(DM): """ cdef PetscBool bval = asBool(noSwap) - CHKERR( DMPlexMetricSetNoSwapping(self.dm, bval) ) + CHKERR(DMPlexMetricSetNoSwapping(self.dm, bval)) def metricNoSwapping(self) -> bool: """Return the flag indicating whether facet swapping is turned off. + Not collective. + See Also -------- DMPlex.metricSetNoSwapping, DMPlex.metricNoInsertion @@ -2421,12 +2493,14 @@ cdef class DMPlex(DM): """ cdef PetscBool noSwap = PETSC_FALSE - CHKERR( DMPlexMetricNoSwapping(self.dm, &noSwap) ) + CHKERR(DMPlexMetricNoSwapping(self.dm, &noSwap)) return toBool(noSwap) def metricSetNoMovement(self, noMove: bool) -> None: """Set the flag indicating whether node movement should be turned off. + Logically collective. + Parameters ---------- noMove @@ -2440,11 +2514,13 @@ cdef class DMPlex(DM): """ cdef PetscBool bval = asBool(noMove) - CHKERR( DMPlexMetricSetNoMovement(self.dm, bval) ) + CHKERR(DMPlexMetricSetNoMovement(self.dm, bval)) def metricNoMovement(self) -> bool: """Return the flag indicating whether node movement is turned off. + Not collective. + See Also -------- DMPlex.metricSetNoMovement, DMPlex.metricNoInsertion @@ -2453,12 +2529,14 @@ cdef class DMPlex(DM): """ cdef PetscBool noMove = PETSC_FALSE - CHKERR( DMPlexMetricNoMovement(self.dm, &noMove) ) + CHKERR(DMPlexMetricNoMovement(self.dm, &noMove)) return toBool(noMove) def metricSetNoSurf(self, noSurf: bool) -> None: """Set the flag indicating whether surface modification should be turned off. + Logically collective. + Parameters ---------- noSurf @@ -2472,11 +2550,13 @@ cdef class DMPlex(DM): """ cdef PetscBool bval = asBool(noSurf) - CHKERR( DMPlexMetricSetNoSurf(self.dm, bval) ) + CHKERR(DMPlexMetricSetNoSurf(self.dm, bval)) def metricNoSurf(self) -> bool: """Return the flag indicating whether surface modification is turned off. + Not collective. + See Also -------- DMPlex.metricSetNoSurf, DMPlex.metricNoMovement @@ -2485,12 +2565,14 @@ cdef class DMPlex(DM): """ cdef PetscBool noSurf = PETSC_FALSE - CHKERR( DMPlexMetricNoSurf(self.dm, &noSurf) ) + CHKERR(DMPlexMetricNoSurf(self.dm, &noSurf)) return toBool(noSurf) def metricSetVerbosity(self, verbosity: int) -> None: """Set the verbosity of the mesh adaptation package. + Logically collective. + Parameters ---------- verbosity @@ -2503,11 +2585,13 @@ cdef class DMPlex(DM): """ cdef PetscInt ival = asInt(verbosity) - CHKERR( DMPlexMetricSetVerbosity(self.dm, ival) ) + CHKERR(DMPlexMetricSetVerbosity(self.dm, ival)) def metricGetVerbosity(self) -> int: """Return the verbosity of the mesh adaptation package. + Not collective. + Returns ------- verbosity : int @@ -2520,12 +2604,14 @@ cdef class DMPlex(DM): """ cdef PetscInt verbosity = 0 - CHKERR( DMPlexMetricGetVerbosity(self.dm, &verbosity) ) + CHKERR(DMPlexMetricGetVerbosity(self.dm, &verbosity)) return toInt(verbosity) def metricSetNumIterations(self, numIter: int) -> None: """Set the number of parallel adaptation iterations. + Logically collective. + Parameters ---------- numIter @@ -2538,11 +2624,13 @@ cdef class DMPlex(DM): """ cdef PetscInt ival = asInt(numIter) - CHKERR( DMPlexMetricSetNumIterations(self.dm, ival) ) + CHKERR(DMPlexMetricSetNumIterations(self.dm, ival)) def metricGetNumIterations(self) -> int: """Return the number of parallel adaptation iterations. + Not collective. + See Also -------- DMPlex.metricSetNumIterations, DMPlex.metricGetVerbosity @@ -2550,12 +2638,14 @@ cdef class DMPlex(DM): """ cdef PetscInt numIter = 0 - CHKERR( DMPlexMetricGetNumIterations(self.dm, &numIter) ) + CHKERR(DMPlexMetricGetNumIterations(self.dm, &numIter)) return toInt(numIter) def metricSetMinimumMagnitude(self, h_min: float) -> None: """Set the minimum tolerated metric magnitude. + Logically collective. + Parameters ---------- h_min @@ -2568,11 +2658,13 @@ cdef class DMPlex(DM): """ cdef PetscReal rval = asReal(h_min) - CHKERR( DMPlexMetricSetMinimumMagnitude(self.dm, rval) ) + CHKERR(DMPlexMetricSetMinimumMagnitude(self.dm, rval)) def metricGetMinimumMagnitude(self) -> float: """Return the minimum tolerated metric magnitude. + Not collective. + See Also -------- DMPlex.metricSetMinimumMagnitude, DMPlex.metricGetMaximumMagnitude @@ -2580,12 +2672,14 @@ cdef class DMPlex(DM): """ cdef PetscReal h_min = 0 - CHKERR( DMPlexMetricGetMinimumMagnitude(self.dm, &h_min) ) + CHKERR(DMPlexMetricGetMinimumMagnitude(self.dm, &h_min)) return toReal(h_min) def metricSetMaximumMagnitude(self, h_max: float) -> None: """Set the maximum tolerated metric magnitude. + Logically collective. + Parameters ---------- h_max @@ -2598,11 +2692,13 @@ cdef class DMPlex(DM): """ cdef PetscReal rval = asReal(h_max) - CHKERR( DMPlexMetricSetMaximumMagnitude(self.dm, rval) ) + CHKERR(DMPlexMetricSetMaximumMagnitude(self.dm, rval)) def metricGetMaximumMagnitude(self) -> float: """Return the maximum tolerated metric magnitude. + Not collective. + See Also -------- DMPlex.metricSetMaximumMagnitude, DMPlex.metricGetMinimumMagnitude @@ -2610,12 +2706,14 @@ cdef class DMPlex(DM): """ cdef PetscReal h_max = 0 - CHKERR( DMPlexMetricGetMaximumMagnitude(self.dm, &h_max) ) + CHKERR(DMPlexMetricGetMaximumMagnitude(self.dm, &h_max)) return toReal(h_max) def metricSetMaximumAnisotropy(self, a_max: float) -> None: """Set the maximum tolerated metric anisotropy. + Logically collective. + Parameters ---------- a_max @@ -2628,11 +2726,13 @@ cdef class DMPlex(DM): """ cdef PetscReal rval = asReal(a_max) - CHKERR( DMPlexMetricSetMaximumAnisotropy(self.dm, rval) ) + CHKERR(DMPlexMetricSetMaximumAnisotropy(self.dm, rval)) def metricGetMaximumAnisotropy(self) -> float: """Return the maximum tolerated metric anisotropy. + Not collective. + See Also -------- DMPlex.metricSetMaximumAnisotropy, DMPlex.metricGetMaximumMagnitude @@ -2640,12 +2740,14 @@ cdef class DMPlex(DM): """ cdef PetscReal a_max = 0 - CHKERR( DMPlexMetricGetMaximumAnisotropy(self.dm, &a_max) ) + CHKERR(DMPlexMetricGetMaximumAnisotropy(self.dm, &a_max)) return toReal(a_max) def metricSetTargetComplexity(self, targetComplexity: float) -> None: """Set the target metric complexity. + Logically collective. + Parameters ---------- targetComplexity @@ -2658,11 +2760,13 @@ cdef class DMPlex(DM): """ cdef PetscReal rval = asReal(targetComplexity) - CHKERR( DMPlexMetricSetTargetComplexity(self.dm, rval) ) + CHKERR(DMPlexMetricSetTargetComplexity(self.dm, rval)) def metricGetTargetComplexity(self) -> float: """Return the target metric complexity. + Not collective. + See Also -------- DMPlex.metricSetTargetComplexity, DMPlex.metricGetNormalizationOrder @@ -2670,12 +2774,14 @@ cdef class DMPlex(DM): """ cdef PetscReal targetComplexity = 0 - CHKERR( DMPlexMetricGetTargetComplexity(self.dm, &targetComplexity) ) + CHKERR(DMPlexMetricGetTargetComplexity(self.dm, &targetComplexity)) return toReal(targetComplexity) def metricSetNormalizationOrder(self, p: float) -> None: """Set the order p for L-p normalization. + Logically collective. + Parameters ---------- p @@ -2688,11 +2794,13 @@ cdef class DMPlex(DM): """ cdef PetscReal rval = asReal(p) - CHKERR( DMPlexMetricSetNormalizationOrder(self.dm, rval) ) + CHKERR(DMPlexMetricSetNormalizationOrder(self.dm, rval)) def metricGetNormalizationOrder(self) -> float: """Return the order p for L-p normalization. + Not collective. + See Also -------- DMPlex.metricSetNormalizationOrder, DMPlex.metricGetTargetComplexity @@ -2700,12 +2808,14 @@ cdef class DMPlex(DM): """ cdef PetscReal p = 0 - CHKERR( DMPlexMetricGetNormalizationOrder(self.dm, &p) ) + CHKERR(DMPlexMetricGetNormalizationOrder(self.dm, &p)) return toReal(p) def metricSetGradationFactor(self, beta: float) -> None: """Set the metric gradation factor. + Logically collective. + Parameters ---------- beta @@ -2718,11 +2828,13 @@ cdef class DMPlex(DM): """ cdef PetscReal rval = asReal(beta) - CHKERR( DMPlexMetricSetGradationFactor(self.dm, rval) ) + CHKERR(DMPlexMetricSetGradationFactor(self.dm, rval)) def metricGetGradationFactor(self) -> float: """Return the metric gradation factor. + Not collective. + See Also -------- DMPlex.metricSetGradationFactor, DMPlex.metricGetHausdorffNumber @@ -2730,12 +2842,14 @@ cdef class DMPlex(DM): """ cdef PetscReal beta = 0 - CHKERR( DMPlexMetricGetGradationFactor(self.dm, &beta) ) + CHKERR(DMPlexMetricGetGradationFactor(self.dm, &beta)) return toReal(beta) def metricSetHausdorffNumber(self, hausd: float) -> None: """Set the metric Hausdorff number. + Logically collective. + Parameters ---------- hausd @@ -2748,11 +2862,13 @@ cdef class DMPlex(DM): """ cdef PetscReal rval = asReal(hausd) - CHKERR( DMPlexMetricSetHausdorffNumber(self.dm, rval) ) + CHKERR(DMPlexMetricSetHausdorffNumber(self.dm, rval)) def metricGetHausdorffNumber(self) -> float: """Return the metric Hausdorff number. + Not collective. + See Also -------- DMPlex.metricGetGradationFactor, DMPlex.metricSetHausdorffNumber @@ -2760,12 +2876,14 @@ cdef class DMPlex(DM): """ cdef PetscReal hausd = 0 - CHKERR( DMPlexMetricGetHausdorffNumber(self.dm, &hausd) ) + CHKERR(DMPlexMetricGetHausdorffNumber(self.dm, &hausd)) return toReal(hausd) def metricCreate(self, field: int | None = 0) -> Vec: """Create a Riemannian metric field. + Collective. + Parameters ---------- field @@ -2779,12 +2897,14 @@ cdef class DMPlex(DM): """ cdef PetscInt ival = asInt(field) cdef Vec metric = Vec() - CHKERR( DMPlexMetricCreate(self.dm, ival, &metric.vec) ) + CHKERR(DMPlexMetricCreate(self.dm, ival, &metric.vec)) return metric def metricCreateUniform(self, alpha: float, field: int | None = 0) -> Vec: """Construct a uniform isotropic metric. + Collective. + Parameters ---------- alpha @@ -2801,12 +2921,14 @@ cdef class DMPlex(DM): cdef PetscInt ival = asInt(field) cdef PetscReal rval = asReal(alpha) cdef Vec metric = Vec() - CHKERR( DMPlexMetricCreateUniform(self.dm, ival, rval, &metric.vec) ) + CHKERR(DMPlexMetricCreateUniform(self.dm, ival, rval, &metric.vec)) return metric def metricCreateIsotropic(self, Vec indicator, field: int | None = 0) -> Vec: """Construct an isotropic metric from an error indicator. + Collective. + Parameters ---------- indicator @@ -2822,12 +2944,14 @@ cdef class DMPlex(DM): """ cdef PetscInt ival = asInt(field) cdef Vec metric = Vec() - CHKERR( DMPlexMetricCreateIsotropic(self.dm, ival, indicator.vec, &metric.vec) ) + CHKERR(DMPlexMetricCreateIsotropic(self.dm, ival, indicator.vec, &metric.vec)) return metric def metricDeterminantCreate(self, field: int | None = 0) -> tuple[Vec, DM]: """Create the determinant field for a Riemannian metric. + Collective. + Parameters ---------- field @@ -2849,12 +2973,14 @@ cdef class DMPlex(DM): cdef PetscInt ival = asInt(field) cdef Vec determinant = Vec() cdef DM dmDet = DM() - CHKERR( DMPlexMetricDeterminantCreate(self.dm, ival, &determinant.vec, &dmDet.dm) ) + CHKERR(DMPlexMetricDeterminantCreate(self.dm, ival, &determinant.vec, &dmDet.dm)) return (determinant, dmDet) def metricEnforceSPD(self, Vec metric, Vec ometric, Vec determinant, restrictSizes: bool | None = False, restrictAnisotropy: bool | None = False) -> tuple[Vec, Vec]: """Enforce symmetric positive-definiteness of a metric. + Collective. + Parameters ---------- metric @@ -2883,13 +3009,14 @@ cdef class DMPlex(DM): """ cdef PetscBool bval_rs = asBool(restrictSizes) cdef PetscBool bval_ra = asBool(restrictAnisotropy) - cdef DM dmDet = DM() - CHKERR( DMPlexMetricEnforceSPD(self.dm, metric.vec, bval_rs, bval_ra, ometric.vec, determinant.vec) ) + CHKERR(DMPlexMetricEnforceSPD(self.dm, metric.vec, bval_rs, bval_ra, ometric.vec, determinant.vec)) return (ometric, determinant) def metricNormalize(self, Vec metric, Vec ometric, Vec determinant, restrictSizes: bool | None = True, restrictAnisotropy: bool | None = True) -> tuple[Vec, Vec]: """Apply L-p normalization to a metric. + Collective. + Parameters ---------- metric @@ -2918,12 +3045,14 @@ cdef class DMPlex(DM): """ cdef PetscBool bval_rs = asBool(restrictSizes) cdef PetscBool bval_ra = asBool(restrictAnisotropy) - CHKERR( DMPlexMetricNormalize(self.dm, metric.vec, bval_rs, bval_ra, ometric.vec, determinant.vec) ) + CHKERR(DMPlexMetricNormalize(self.dm, metric.vec, bval_rs, bval_ra, ometric.vec, determinant.vec)) return (ometric, determinant) def metricAverage2(self, Vec metric1, Vec metric2, Vec metricAvg) -> Vec: """Compute and return the unweighted average of two metrics. + Collective. + Parameters ---------- metric1 @@ -2938,12 +3067,14 @@ cdef class DMPlex(DM): DMPlex.metricAverage3, petsc.DMPlexMetricAverage2 """ - CHKERR( DMPlexMetricAverage2(self.dm, metric1.vec, metric2.vec, metricAvg.vec) ) + CHKERR(DMPlexMetricAverage2(self.dm, metric1.vec, metric2.vec, metricAvg.vec)) return metricAvg def metricAverage3(self, Vec metric1, Vec metric2, Vec metric3, Vec metricAvg) -> Vec: """Compute and return the unweighted average of three metrics. + Collective. + Parameters ---------- metric1 @@ -2960,12 +3091,14 @@ cdef class DMPlex(DM): DMPlex.metricAverage2, petsc.DMPlexMetricAverage3 """ - CHKERR( DMPlexMetricAverage3(self.dm, metric1.vec, metric2.vec, metric3.vec, metricAvg.vec) ) + CHKERR(DMPlexMetricAverage3(self.dm, metric1.vec, metric2.vec, metric3.vec, metricAvg.vec)) return metricAvg def metricIntersection2(self, Vec metric1, Vec metric2, Vec metricInt) -> Vec: """Compute and return the intersection of two metrics. + Collective. + Parameters ---------- metric1 @@ -2980,12 +3113,14 @@ cdef class DMPlex(DM): DMPlex.metricIntersection3, petsc.DMPlexMetricIntersection2 """ - CHKERR( DMPlexMetricIntersection2(self.dm, metric1.vec, metric2.vec, metricInt.vec) ) + CHKERR(DMPlexMetricIntersection2(self.dm, metric1.vec, metric2.vec, metricInt.vec)) return metricInt def metricIntersection3(self, Vec metric1, Vec metric2, Vec metric3, Vec metricInt) -> Vec: """Compute the intersection of three metrics. + Collective. + Parameters ---------- metric1 @@ -3002,7 +3137,7 @@ cdef class DMPlex(DM): DMPlex.metricIntersection2, petsc.DMPlexMetricIntersection3 """ - CHKERR( DMPlexMetricIntersection3(self.dm, metric1.vec, metric2.vec, metric3.vec, metricInt.vec) ) + CHKERR(DMPlexMetricIntersection3(self.dm, metric1.vec, metric2.vec, metric3.vec, metricInt.vec)) return metricInt def computeGradientClementInterpolant(self, Vec locX, Vec locC) -> Vec: @@ -3022,7 +3157,7 @@ cdef class DMPlex(DM): DM, DMPlex, petsc.DMPlexComputeGradientClementInterpolant """ - CHKERR( DMPlexComputeGradientClementInterpolant(self.dm, locX.vec, locC.vec) ) + CHKERR(DMPlexComputeGradientClementInterpolant(self.dm, locX.vec, locC.vec)) return locC # View @@ -3043,7 +3178,7 @@ cdef class DMPlex(DM): DMPlex.topologyLoad, Viewer, petsc.DMPlexTopologyView """ - CHKERR( DMPlexTopologyView(self.dm, viewer.vwr)) + CHKERR(DMPlexTopologyView(self.dm, viewer.vwr)) def coordinatesView(self, Viewer viewer) -> None: """Save `DMPlex` coordinates into a file. @@ -3061,7 +3196,7 @@ cdef class DMPlex(DM): DMPlex.coordinatesLoad, Viewer, petsc.DMPlexCoordinatesView """ - CHKERR( DMPlexCoordinatesView(self.dm, viewer.vwr)) + CHKERR(DMPlexCoordinatesView(self.dm, viewer.vwr)) def labelsView(self, Viewer viewer) -> None: """Save `DMPlex` labels into a file. @@ -3079,7 +3214,7 @@ cdef class DMPlex(DM): DMPlex.labelsLoad, Viewer, petsc.DMPlexLabelsView """ - CHKERR( DMPlexLabelsView(self.dm, viewer.vwr)) + CHKERR(DMPlexLabelsView(self.dm, viewer.vwr)) def sectionView(self, Viewer viewer, DM sectiondm) -> None: """Save a section associated with a `DMPlex`. @@ -3100,7 +3235,7 @@ cdef class DMPlex(DM): DMPlex.sectionLoad, Viewer, petsc.DMPlexSectionView """ - CHKERR( DMPlexSectionView(self.dm, viewer.vwr, sectiondm.dm)) + CHKERR(DMPlexSectionView(self.dm, viewer.vwr, sectiondm.dm)) def globalVectorView(self, Viewer viewer, DM sectiondm, Vec vec) -> None: """Save a global vector. @@ -3124,7 +3259,7 @@ cdef class DMPlex(DM): DMPlex.localVectorLoad, petsc.DMPlexGlobalVectorView """ - CHKERR( DMPlexGlobalVectorView(self.dm, viewer.vwr, sectiondm.dm, vec.vec)) + CHKERR(DMPlexGlobalVectorView(self.dm, viewer.vwr, sectiondm.dm, vec.vec)) def localVectorView(self, Viewer viewer, DM sectiondm, Vec vec) -> None: """Save a local vector. @@ -3148,7 +3283,7 @@ cdef class DMPlex(DM): DMPlex.localVectorLoad, petsc.DMPlexLocalVectorView """ - CHKERR( DMPlexLocalVectorView(self.dm, viewer.vwr, sectiondm.dm, vec.vec)) + CHKERR(DMPlexLocalVectorView(self.dm, viewer.vwr, sectiondm.dm, vec.vec)) # Load @@ -3175,7 +3310,7 @@ cdef class DMPlex(DM): """ cdef SF sf = SF() - CHKERR( DMPlexTopologyLoad(self.dm, viewer.vwr, &sf.sf)) + CHKERR(DMPlexTopologyLoad(self.dm, viewer.vwr, &sf.sf)) return sf def coordinatesLoad(self, Viewer viewer, SF sfxc) -> None: @@ -3196,7 +3331,7 @@ cdef class DMPlex(DM): SF, Viewer, petsc.DMPlexCoordinatesLoad """ - CHKERR( DMPlexCoordinatesLoad(self.dm, viewer.vwr, sfxc.sf)) + CHKERR(DMPlexCoordinatesLoad(self.dm, viewer.vwr, sfxc.sf)) def labelsLoad(self, Viewer viewer, SF sfxc) -> None: """Load labels into this `DMPlex` object. @@ -3216,7 +3351,7 @@ cdef class DMPlex(DM): DM.view, SF, Viewer, petsc.DMPlexLabelsLoad """ - CHKERR( DMPlexLabelsLoad(self.dm, viewer.vwr, sfxc.sf)) + CHKERR(DMPlexLabelsLoad(self.dm, viewer.vwr, sfxc.sf)) def sectionLoad(self, Viewer viewer, DM sectiondm, SF sfxc) -> tuple[SF, SF]: """Load section into a `DM`. @@ -3252,7 +3387,7 @@ cdef class DMPlex(DM): """ cdef SF gsf = SF() cdef SF lsf = SF() - CHKERR( DMPlexSectionLoad(self.dm, viewer.vwr, sectiondm.dm, sfxc.sf, &gsf.sf, &lsf.sf)) + CHKERR(DMPlexSectionLoad(self.dm, viewer.vwr, sectiondm.dm, sfxc.sf, &gsf.sf, &lsf.sf)) return gsf, lsf def globalVectorLoad(self, Viewer viewer, DM sectiondm, SF sf, Vec vec) -> None: @@ -3278,7 +3413,7 @@ cdef class DMPlex(DM): DMPlex.localVectorView, SF, Viewer, petsc.DMPlexGlobalVectorLoad """ - CHKERR( DMPlexGlobalVectorLoad(self.dm, viewer.vwr, sectiondm.dm, sf.sf, vec.vec)) + CHKERR(DMPlexGlobalVectorLoad(self.dm, viewer.vwr, sectiondm.dm, sf.sf, vec.vec)) def localVectorLoad(self, Viewer viewer, DM sectiondm, SF sf, Vec vec) -> None: """Load on-disk vector data into a local vector. @@ -3303,10 +3438,13 @@ cdef class DMPlex(DM): DMPlex.localVectorView, SF, Viewer, petsc.DMPlexLocalVectorLoad """ - CHKERR( DMPlexLocalVectorLoad(self.dm, viewer.vwr, sectiondm.dm, sf.sf, vec.vec)) + CHKERR(DMPlexLocalVectorLoad(self.dm, viewer.vwr, sectiondm.dm, sf.sf, vec.vec)) # -------------------------------------------------------------------- + + class DMPlexTransformType(object): + """Transformation types.""" REFINEREGULAR = S_(DMPLEXREFINEREGULAR) REFINEALFELD = S_(DMPLEXREFINEALFELD) REFINEPOWELLSABIN = S_(DMPLEXREFINEPOWELLSABIN) @@ -3318,50 +3456,129 @@ class DMPlexTransformType(object): EXTRUDE = S_(DMPLEXEXTRUDE) TRANSFORMFILTER = S_(DMPLEXTRANSFORMFILTER) + cdef class DMPlexTransform(Object): + """Mesh transformations.""" def __cinit__(self): self.obj = &self.tr self.tr = NULL - def apply(self, DM dm): + def apply(self, DM dm) -> DM: + """Apply a mesh transformation. + + Collective. + + """ + # FIXME petsc.DMPlexTransformApply cdef DMPlex newdm = DMPlex() - CHKERR( DMPlexTransformApply(self.tr, dm.dm, &newdm.dm) ) + CHKERR(DMPlexTransformApply(self.tr, dm.dm, &newdm.dm)) return newdm - def create(self, comm=None): + def create(self, comm: Comm | None = None) -> Self: + """Create a mesh transformation. + + Collective. + + See Also + -------- + petsc.DMPlexTransformCreate + + """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDMPlexTransform newtr = NULL - CHKERR( DMPlexTransformCreate(ccomm, &newtr) ) - CHKERR( PetscCLEAR(self.obj) ) + CHKERR(DMPlexTransformCreate(ccomm, &newtr)) + CHKERR(PetscCLEAR(self.obj)) self.tr = newtr return self - def destroy(self): - CHKERR( DMPlexTransformDestroy(&self.tr) ) + def destroy(self) -> Self: + """Destroy a mesh transformation. + + Collective. + + See Also + -------- + petsc.DMPlexTransformDestroy + + """ + CHKERR(DMPlexTransformDestroy(&self.tr)) return self - def getType(self): + def getType(self) -> str: + """Return the transformation type name. + + Not collective. + + See Also + -------- + petsc.DMPlexTransformGetType + + """ cdef PetscDMPlexTransformType cval = NULL - CHKERR( DMPlexTransformGetType(self.tr, &cval) ) + CHKERR(DMPlexTransformGetType(self.tr, &cval)) return bytes2str(cval) - def setUp(self): - CHKERR( DMPlexTransformSetUp(self.tr) ) + def setUp(self) -> Self: + """Setup a mesh transformation. + + Collective. + + """ + # FIXME petsc.DMPlexTransformSetUp + CHKERR(DMPlexTransformSetUp(self.tr)) return self - def setType(self, tr_type): + def setType(self, tr_type : DMPlexTransformType | str) -> None: + """Set the transformation type. + + Collective. + + See Also + -------- + petsc.DMPlexTransformSetType + + """ cdef PetscDMPlexTransformType cval = NULL tr_type = str2bytes(tr_type, &cval) - CHKERR( DMPlexTransformSetType(self.tr, cval) ) + CHKERR(DMPlexTransformSetType(self.tr, cval)) + + def setDM(self, DM dm) -> None: + """Set the `DM` for the transformation. + + Logically collective. + + """ + # FIXME petsc.DMPlexTransformSetDM + CHKERR(DMPlexTransformSetDM(self.tr, dm.dm)) - def setDM(self, DM dm): - CHKERR( DMPlexTransformSetDM(self.tr, dm.dm) ) + def setFromOptions(self) -> None: + """Configure the transformation from the options database. - def setFromOptions(self): - CHKERR( DMPlexTransformSetFromOptions(self.tr) ) + Collective. + + See Also + -------- + petsc_options, petsc.DMPlexTransformSetFromOptions + + """ + CHKERR(DMPlexTransformSetFromOptions(self.tr)) - def view(self, Viewer viewer=None): + def view(self, Viewer viewer=None) -> None: + """View the mesh transformation. + + Collective. + + Parameters + ---------- + viewer + A `Viewer` instance or `None` for the default viewer. + + See Also + -------- + Viewer, petsc.DMPlexTransformView + + """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( DMPlexTransformView(self.tr, vwr) ) + CHKERR(DMPlexTransformView(self.tr, vwr)) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DMShell.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DMShell.pyx index c9d369e0f7b..3e869ee1a3d 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DMShell.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DMShell.pyx @@ -18,8 +18,8 @@ cdef class DMShell(DM): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDM newdm = NULL - CHKERR( DMShellCreate(ccomm, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm + CHKERR(DMShellCreate(ccomm, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm return self def setMatrix(self, Mat mat) -> None: @@ -37,7 +37,7 @@ cdef class DMShell(DM): petsc.DMShellSetMatrix """ - CHKERR( DMShellSetMatrix(self.dm, mat.mat) ) + CHKERR(DMShellSetMatrix(self.dm, mat.mat)) def setGlobalVector(self, Vec gv) -> None: """Set a template global vector. @@ -54,7 +54,7 @@ cdef class DMShell(DM): setLocalVector, petsc.DMShellSetGlobalVector """ - CHKERR( DMShellSetGlobalVector(self.dm, gv.vec) ) + CHKERR(DMShellSetGlobalVector(self.dm, gv.vec)) def setLocalVector(self, Vec lv) -> None: """Set a template local vector. @@ -71,14 +71,13 @@ cdef class DMShell(DM): setGlobalVector, petsc.DMShellSetLocalVector """ - CHKERR( DMShellSetLocalVector(self.dm, lv.vec) ) + CHKERR(DMShellSetLocalVector(self.dm, lv.vec)) def setCreateGlobalVector( self, create_gvec: Callable[[DM], Vec] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine to create a global vector. Logically collective. @@ -102,16 +101,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (create_gvec, args, kargs) self.set_attr('__create_global_vector__', context) - CHKERR( DMShellSetCreateGlobalVector(self.dm, DMSHELL_CreateGlobalVector) ) + CHKERR(DMShellSetCreateGlobalVector(self.dm, DMSHELL_CreateGlobalVector)) else: - CHKERR( DMShellSetCreateGlobalVector(self.dm, NULL) ) + CHKERR(DMShellSetCreateGlobalVector(self.dm, NULL)) def setCreateLocalVector( self, create_lvec: Callable[[DM], Vec] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine to create a local vector. Logically collective. @@ -135,9 +133,9 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (create_lvec, args, kargs) self.set_attr('__create_local_vector__', context) - CHKERR( DMShellSetCreateLocalVector(self.dm, DMSHELL_CreateLocalVector) ) + CHKERR(DMShellSetCreateLocalVector(self.dm, DMSHELL_CreateLocalVector)) else: - CHKERR( DMShellSetCreateLocalVector(self.dm, NULL) ) + CHKERR(DMShellSetCreateLocalVector(self.dm, NULL)) def setGlobalToLocal( self, @@ -146,8 +144,7 @@ cdef class DMShell(DM): begin_args: tuple[Any, ...] | None = None, begin_kargs: dict[str, Any] | None = None, end_args: tuple[Any, ...] | None = None, - end_kargs: dict[str, Any] | None = None, - ) -> None: + end_kargs: dict[str, Any] | None = None) -> None: """Set the routines used to perform a global to local scatter. Logically collective. @@ -187,7 +184,7 @@ cdef class DMShell(DM): context = (end, end_args, end_kargs) self.set_attr('__g2l_end__', context) cend = &DMSHELL_GlobalToLocalEnd - CHKERR( DMShellSetGlobalToLocal(self.dm, cbegin, cend) ) + CHKERR(DMShellSetGlobalToLocal(self.dm, cbegin, cend)) def setGlobalToLocalVecScatter(self, Scatter gtol) -> None: """Set a `Scatter` context for global to local communication. @@ -204,7 +201,7 @@ cdef class DMShell(DM): petsc.DMShellSetGlobalToLocalVecScatter """ - CHKERR( DMShellSetGlobalToLocalVecScatter(self.dm, gtol.sct) ) + CHKERR(DMShellSetGlobalToLocalVecScatter(self.dm, gtol.sct)) def setLocalToGlobal( self, @@ -213,8 +210,7 @@ cdef class DMShell(DM): begin_args: tuple[Any, ...] | None = None, begin_kargs: dict[str, Any] | None = None, end_args: tuple[Any, ...] | None = None, - end_kargs: dict[str, Any] | None = None, - ) -> None: + end_kargs: dict[str, Any] | None = None) -> None: """Set the routines used to perform a local to global scatter. Logically collective. @@ -252,7 +248,7 @@ cdef class DMShell(DM): context = (end, end_args, end_kargs) self.set_attr('__l2g_end__', context) cend = &DMSHELL_LocalToGlobalEnd - CHKERR( DMShellSetLocalToGlobal(self.dm, cbegin, cend) ) + CHKERR(DMShellSetLocalToGlobal(self.dm, cbegin, cend)) def setLocalToGlobalVecScatter(self, Scatter ltog) -> None: """Set a `Scatter` context for local to global communication. @@ -269,7 +265,7 @@ cdef class DMShell(DM): petsc.DMShellSetLocalToGlobalVecScatter """ - CHKERR( DMShellSetLocalToGlobalVecScatter(self.dm, ltog.sct) ) + CHKERR(DMShellSetLocalToGlobalVecScatter(self.dm, ltog.sct)) def setLocalToLocal( self, @@ -278,8 +274,7 @@ cdef class DMShell(DM): begin_args: tuple[Any, ...] | None = None, begin_kargs: dict[str, Any] | None = None, end_args: tuple[Any, ...] | None = None, - end_kargs: dict[str, Any] | None = None, - ) -> None: + end_kargs: dict[str, Any] | None = None) -> None: """Set the routines used to perform a local to local scatter. Logically collective. @@ -319,7 +314,7 @@ cdef class DMShell(DM): context = (end, end_args, end_kargs) self.set_attr('__l2l_end__', context) cend = &DMSHELL_LocalToLocalEnd - CHKERR( DMShellSetLocalToLocal(self.dm, cbegin, cend) ) + CHKERR(DMShellSetLocalToLocal(self.dm, cbegin, cend)) def setLocalToLocalVecScatter(self, Scatter ltol) -> None: """Set a ``Scatter`` context for local to local communication. @@ -336,14 +331,13 @@ cdef class DMShell(DM): petsc.DMShellSetLocalToLocalVecScatter """ - CHKERR( DMShellSetLocalToLocalVecScatter(self.dm, ltol.sct) ) + CHKERR(DMShellSetLocalToLocalVecScatter(self.dm, ltol.sct)) def setCreateMatrix( self, create_matrix: Callable[[DM], Mat] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine to create a matrix. Logically collective. @@ -367,16 +361,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (create_matrix, args, kargs) self.set_attr('__create_matrix__', context) - CHKERR( DMShellSetCreateMatrix(self.dm, DMSHELL_CreateMatrix) ) + CHKERR(DMShellSetCreateMatrix(self.dm, DMSHELL_CreateMatrix)) else: - CHKERR( DMShellSetCreateMatrix(self.dm, NULL) ) + CHKERR(DMShellSetCreateMatrix(self.dm, NULL)) def setCoarsen( self, coarsen: Callable[[DM, Comm], DM] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine used to coarsen the `DMShell`. Logically collective. @@ -400,16 +393,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (coarsen, args, kargs) self.set_attr('__coarsen__', context) - CHKERR( DMShellSetCoarsen(self.dm, DMSHELL_Coarsen) ) + CHKERR(DMShellSetCoarsen(self.dm, DMSHELL_Coarsen)) else: - CHKERR( DMShellSetCoarsen(self.dm, NULL) ) + CHKERR(DMShellSetCoarsen(self.dm, NULL)) def setRefine( self, refine: Callable[[DM, Comm], DM] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine used to refine the `DMShell`. Logically collective. @@ -433,16 +425,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (refine, args, kargs) self.set_attr('__refine__', context) - CHKERR( DMShellSetRefine(self.dm, DMSHELL_Refine) ) + CHKERR(DMShellSetRefine(self.dm, DMSHELL_Refine)) else: - CHKERR( DMShellSetRefine(self.dm, NULL) ) + CHKERR(DMShellSetRefine(self.dm, NULL)) def setCreateInterpolation( self, create_interpolation: Callable[[DM, DM], tuple[Mat, Vec]] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine used to create the interpolation operator. Logically collective. @@ -466,16 +457,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (create_interpolation, args, kargs) self.set_attr('__create_interpolation__', context) - CHKERR( DMShellSetCreateInterpolation(self.dm, DMSHELL_CreateInterpolation) ) + CHKERR(DMShellSetCreateInterpolation(self.dm, DMSHELL_CreateInterpolation)) else: - CHKERR( DMShellSetCreateInterpolation(self.dm, NULL) ) + CHKERR(DMShellSetCreateInterpolation(self.dm, NULL)) def setCreateInjection( self, create_injection: Callable[[DM, DM], Mat] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine used to create the injection operator. Logically collective. @@ -499,16 +489,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (create_injection, args, kargs) self.set_attr('__create_injection__', context) - CHKERR( DMShellSetCreateInjection(self.dm, DMSHELL_CreateInjection) ) + CHKERR(DMShellSetCreateInjection(self.dm, DMSHELL_CreateInjection)) else: - CHKERR( DMShellSetCreateInjection(self.dm, NULL) ) + CHKERR(DMShellSetCreateInjection(self.dm, NULL)) def setCreateRestriction( self, create_restriction: Callable[[DM, DM], Mat] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine used to create the restriction operator. Logically collective. @@ -532,16 +521,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (create_restriction, args, kargs) self.set_attr('__create_restriction__', context) - CHKERR( DMShellSetCreateRestriction(self.dm, DMSHELL_CreateRestriction) ) + CHKERR(DMShellSetCreateRestriction(self.dm, DMSHELL_CreateRestriction)) else: - CHKERR( DMShellSetCreateRestriction(self.dm, NULL) ) + CHKERR(DMShellSetCreateRestriction(self.dm, NULL)) def setCreateFieldDecomposition( self, decomp: Callable[[DM], tuple[list[str] | None, list[IS] | None, list[DM] | None]] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine used to create a field decomposition. Logically collective. @@ -565,16 +553,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (decomp, args, kargs) self.set_attr('__create_field_decomp__', context) - CHKERR( DMShellSetCreateFieldDecomposition(self.dm, DMSHELL_CreateFieldDecomposition) ) + CHKERR(DMShellSetCreateFieldDecomposition(self.dm, DMSHELL_CreateFieldDecomposition)) else: - CHKERR( DMShellSetCreateFieldDecomposition(self.dm, NULL) ) + CHKERR(DMShellSetCreateFieldDecomposition(self.dm, NULL)) def setCreateDomainDecomposition( self, decomp: Callable[[DM], tuple[list[str] | None, list[IS] | None, list[IS] | None, list[DM] | None]] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine used to create a domain decomposition. Logically collective. @@ -598,16 +585,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (decomp, args, kargs) self.set_attr('__create_domain_decomp__', context) - CHKERR( DMShellSetCreateDomainDecomposition(self.dm, DMSHELL_CreateDomainDecomposition) ) + CHKERR(DMShellSetCreateDomainDecomposition(self.dm, DMSHELL_CreateDomainDecomposition)) else: - CHKERR( DMShellSetCreateDomainDecomposition(self.dm, NULL) ) + CHKERR(DMShellSetCreateDomainDecomposition(self.dm, NULL)) def setCreateDomainDecompositionScatters( self, scatter: Callable[[DM, list[DM]], tuple[list[Scatter], list[Scatter], list[Scatter]]] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine used to create the scatter contexts for domain decomposition. Logically collective. @@ -631,16 +617,15 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (scatter, args, kargs) self.set_attr('__create_domain_decomp_scatters__', context) - CHKERR( DMShellSetCreateDomainDecompositionScatters(self.dm, DMSHELL_CreateDomainDecompositionScatters) ) + CHKERR(DMShellSetCreateDomainDecompositionScatters(self.dm, DMSHELL_CreateDomainDecompositionScatters)) else: - CHKERR( DMShellSetCreateDomainDecompositionScatters(self.dm, NULL) ) + CHKERR(DMShellSetCreateDomainDecompositionScatters(self.dm, NULL)) def setCreateSubDM( self, create_subdm: Callable[[DM, Sequence[int]], tuple[IS, DM]] | None, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the routine used to create a sub DM from the `DMShell`. Logically collective. @@ -664,6 +649,6 @@ cdef class DMShell(DM): if kargs is None: kargs = {} context = (create_subdm, args, kargs) self.set_attr('__create_subdm__', context) - CHKERR( DMShellSetCreateSubDM(self.dm, DMSHELL_CreateSubDM) ) + CHKERR(DMShellSetCreateSubDM(self.dm, DMSHELL_CreateSubDM)) else: - CHKERR( DMShellSetCreateSubDM(self.dm, NULL) ) + CHKERR(DMShellSetCreateSubDM(self.dm, NULL)) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DMStag.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DMStag.pyx index 1bb5616cee1..24d3a0c2715 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DMStag.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DMStag.pyx @@ -1,11 +1,14 @@ # -------------------------------------------------------------------- class DMStagStencilType(object): + """Stencil types.""" STAR = DMSTAG_STENCIL_STAR BOX = DMSTAG_STENCIL_BOX NONE = DMSTAG_STENCIL_NONE + class DMStagStencilLocation(object): + """Stencil location types.""" NULLLOC = DMSTAG_NULL_LOCATION BACK_DOWN_LEFT = DMSTAG_BACK_DOWN_LEFT BACK_DOWN = DMSTAG_BACK_DOWN @@ -37,6 +40,7 @@ class DMStagStencilLocation(object): # -------------------------------------------------------------------- + cdef class DMStag(DM): """A DM object representing a "staggered grid" or a structured cell complex.""" @@ -54,8 +58,7 @@ cdef class DMStag(DM): proc_sizes: tuple[int, ...] | None = None, ownership_ranges: tuple[Sequence[int], ...] | None = None, comm: Comm | None = None, - setUp: bool | None = False, - ) -> Self: + setUp: bool | None = False) -> Self: """Create a DMDA object. Collective. @@ -145,14 +148,14 @@ cdef class DMStag(DM): # create cdef PetscDM newda = NULL if dim == 1: - CHKERR( DMStagCreate1d(ccomm, btx, M, dof0, dof1, stype, swidth, lx, &newda) ) + CHKERR(DMStagCreate1d(ccomm, btx, M, dof0, dof1, stype, swidth, lx, &newda)) if dim == 2: - CHKERR( DMStagCreate2d(ccomm, btx, bty, M, N, m, n, dof0, dof1, dof2, stype, swidth, lx, ly, &newda) ) + CHKERR(DMStagCreate2d(ccomm, btx, bty, M, N, m, n, dof0, dof1, dof2, stype, swidth, lx, ly, &newda)) if dim == 3: - CHKERR( DMStagCreate3d(ccomm, btx, bty, btz, M, N, P, m, n, p, dof0, dof1, dof2, dof3, stype, swidth, lx, ly, lz, &newda) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newda + CHKERR(DMStagCreate3d(ccomm, btx, bty, btz, M, N, P, m, n, p, dof0, dof1, dof2, dof3, stype, swidth, lx, ly, lz, &newda)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newda if setUp: - CHKERR( DMSetUp(self.dm) ) + CHKERR(DMSetUp(self.dm)) return self # Setters @@ -175,7 +178,7 @@ cdef class DMStag(DM): """ cdef PetscInt sw = asInt(swidth) - CHKERR( DMStagSetStencilWidth(self.dm, sw) ) + CHKERR(DMStagSetStencilWidth(self.dm, sw)) def setStencilType(self, stenciltype: StencilType | str) -> None: """Set elementwise ghost/halo stencil type. @@ -193,12 +196,11 @@ cdef class DMStag(DM): """ cdef PetscDMStagStencilType stype = asStagStencil(stenciltype) - CHKERR( DMStagSetStencilType(self.dm, stype) ) + CHKERR(DMStagSetStencilType(self.dm, stype)) def setBoundaryTypes( self, - boundary_types: tuple[DM.BoundaryType | int | str | bool, ...], - ) -> None: + boundary_types: tuple[DM.BoundaryType | int | str | bool, ...]) -> None: """Set the boundary types. Logically collective. @@ -217,7 +219,7 @@ cdef class DMStag(DM): cdef PetscDMBoundaryType bty = DM_BOUNDARY_NONE cdef PetscDMBoundaryType btz = DM_BOUNDARY_NONE asBoundary(boundary_types, &btx, &bty, &btz) - CHKERR( DMStagSetBoundaryTypes(self.dm, btx, bty, btz) ) + CHKERR(DMStagSetBoundaryTypes(self.dm, btx, bty, btz)) def setDof(self, dofs: tuple[int, ...]) -> None: """Set DOFs/stratum. @@ -237,9 +239,9 @@ cdef class DMStag(DM): """ cdef tuple gdofs = tuple(dofs) - cdef PetscInt gdim=PETSC_DECIDE, dof0=1, dof1=0, dof2=0, dof3=0 - gdim = asDofs(gdofs, &dof0, &dof1, &dof2, &dof3) - CHKERR( DMStagSetDOF(self.dm, dof0, dof1, dof2, dof3) ) + cdef PetscInt dof0=1, dof1=0, dof2=0, dof3=0 + asDofs(gdofs, &dof0, &dof1, &dof2, &dof3) + CHKERR(DMStagSetDOF(self.dm, dof0, dof1, dof2, dof3)) def setGlobalSizes(self, sizes: tuple[int, ...]) -> None: """Set global element counts in each dimension. @@ -257,9 +259,9 @@ cdef class DMStag(DM): """ cdef tuple gsizes = tuple(sizes) - cdef PetscInt gdim=PETSC_DECIDE, M=1, N=1, P=1 - gdim = asStagDims(gsizes, &M, &N, &P) - CHKERR( DMStagSetGlobalSizes(self.dm, M, N, P) ) + cdef PetscInt M=1, N=1, P=1 + asStagDims(gsizes, &M, &N, &P) + CHKERR(DMStagSetGlobalSizes(self.dm, M, N, P)) def setProcSizes(self, sizes: tuple[int, ...]) -> None: """Set the number of processes in each dimension in the global process grid. @@ -277,9 +279,9 @@ cdef class DMStag(DM): """ cdef tuple psizes = tuple(sizes) - cdef PetscInt pdim=PETSC_DECIDE, m=PETSC_DECIDE, n=PETSC_DECIDE, p=PETSC_DECIDE - pdim = asStagDims(psizes, &m, &n, &p) - CHKERR( DMStagSetNumRanks(self.dm, m, n, p) ) + cdef PetscInt m=PETSC_DECIDE, n=PETSC_DECIDE, p=PETSC_DECIDE + asStagDims(psizes, &m, &n, &p) + CHKERR(DMStagSetNumRanks(self.dm, m, n, p)) def setOwnershipRanges(self, ranges: tuple[Sequence[int], ...]) -> None: """Set elements per process in each dimension. @@ -298,10 +300,10 @@ cdef class DMStag(DM): """ cdef PetscInt dim=0, m=PETSC_DECIDE, n=PETSC_DECIDE, p=PETSC_DECIDE cdef PetscInt *lx = NULL, *ly = NULL, *lz = NULL - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetNumRanks(self.dm, &m, &n, &p) ) - ownership_ranges = asStagOwnershipRanges(ranges, dim, &m, &n, &p, &lx, &ly, &lz) - CHKERR( DMStagSetOwnershipRanges(self.dm, lx, ly, lz) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetNumRanks(self.dm, &m, &n, &p)) + asStagOwnershipRanges(ranges, dim, &m, &n, &p, &lx, &ly, &lz) + CHKERR(DMStagSetOwnershipRanges(self.dm, lx, ly, lz)) # Getters @@ -326,7 +328,7 @@ cdef class DMStag(DM): """ cdef PetscInt epe=0 - CHKERR( DMStagGetEntriesPerElement(self.dm, &epe) ) + CHKERR(DMStagGetEntriesPerElement(self.dm, &epe)) return toInt(epe) def getStencilWidth(self) -> int: @@ -340,7 +342,7 @@ cdef class DMStag(DM): """ cdef PetscInt swidth=0 - CHKERR( DMStagGetStencilWidth(self.dm, &swidth) ) + CHKERR(DMStagGetStencilWidth(self.dm, &swidth)) return toInt(swidth) def getDof(self) -> tuple[int, ...]: @@ -354,9 +356,9 @@ cdef class DMStag(DM): """ cdef PetscInt dim=0, dof0=0, dof1=0, dof2=0, dof3=0 - CHKERR( DMStagGetDOF(self.dm, &dof0, &dof1, &dof2, &dof3) ) - CHKERR( DMGetDimension(self.dm, &dim) ) - return toDofs(dim+1,dof0,dof1,dof2,dof3) + CHKERR(DMStagGetDOF(self.dm, &dof0, &dof1, &dof2, &dof3)) + CHKERR(DMGetDimension(self.dm, &dim)) + return toDofs(dim+1, dof0, dof1, dof2, dof3) def getCorners(self) -> tuple[tuple[int, ...], tuple[int, ...], tuple[int, ...]]: """Return starting element index, width and number of partial elements. @@ -376,8 +378,8 @@ cdef class DMStag(DM): """ cdef PetscInt dim=0, x=0, y=0, z=0, m=0, n=0, p=0, nExtrax=0, nExtray=0, nExtraz=0 - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetCorners(self.dm, &x, &y, &z, &m, &n, &p, &nExtrax, &nExtray, &nExtraz) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetCorners(self.dm, &x, &y, &z, &m, &n, &p, &nExtrax, &nExtray, &nExtraz)) return (asInt(x), asInt(y), asInt(z))[:dim], (asInt(m), asInt(n), asInt(p))[:dim], (asInt(nExtrax), asInt(nExtray), asInt(nExtraz))[:dim] def getGhostCorners(self) -> tuple[tuple[int, ...], tuple[int, ...]]: @@ -391,8 +393,8 @@ cdef class DMStag(DM): """ cdef PetscInt dim=0, x=0, y=0, z=0, m=0, n=0, p=0 - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetGhostCorners(self.dm, &x, &y, &z, &m, &n, &p) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetGhostCorners(self.dm, &x, &y, &z, &m, &n, &p)) return (asInt(x), asInt(y), asInt(z))[:dim], (asInt(m), asInt(n), asInt(p))[:dim] def getLocalSizes(self) -> tuple[int, ...]: @@ -408,8 +410,8 @@ cdef class DMStag(DM): """ cdef PetscInt dim=0, m=PETSC_DECIDE, n=PETSC_DECIDE, p=PETSC_DECIDE - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetLocalSizes(self.dm, &m, &n, &p) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetLocalSizes(self.dm, &m, &n, &p)) return toStagDims(dim, m, n, p) def getGlobalSizes(self) -> tuple[int, ...]: @@ -423,8 +425,8 @@ cdef class DMStag(DM): """ cdef PetscInt dim=0, m=PETSC_DECIDE, n=PETSC_DECIDE, p=PETSC_DECIDE - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetGlobalSizes(self.dm, &m, &n, &p) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetGlobalSizes(self.dm, &m, &n, &p)) return toStagDims(dim, m, n, p) def getProcSizes(self) -> tuple[int, ...]: @@ -438,8 +440,8 @@ cdef class DMStag(DM): """ cdef PetscInt dim=0, m=PETSC_DECIDE, n=PETSC_DECIDE, p=PETSC_DECIDE - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetNumRanks(self.dm, &m, &n, &p) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetNumRanks(self.dm, &m, &n, &p)) return toStagDims(dim, m, n, p) def getStencilType(self) -> str: @@ -453,7 +455,7 @@ cdef class DMStag(DM): """ cdef PetscDMStagStencilType stype = DMSTAG_STENCIL_BOX - CHKERR( DMStagGetStencilType(self.dm, &stype) ) + CHKERR(DMStagGetStencilType(self.dm, &stype)) return toStagStencil(stype) def getOwnershipRanges(self) -> tuple[Sequence[int], ...]: @@ -468,9 +470,9 @@ cdef class DMStag(DM): """ cdef PetscInt dim=0, m=0, n=0, p=0 cdef const PetscInt *lx = NULL, *ly = NULL, *lz = NULL - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetNumRanks(self.dm, &m, &n, &p) ) - CHKERR( DMStagGetOwnershipRanges(self.dm, &lx, &ly, &lz) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetNumRanks(self.dm, &m, &n, &p)) + CHKERR(DMStagGetOwnershipRanges(self.dm, &lx, &ly, &lz)) return toStagOwnershipRanges(dim, m, n, p, lx, ly, lz) def getBoundaryTypes(self) -> tuple[str, ...]: @@ -487,8 +489,8 @@ cdef class DMStag(DM): cdef PetscDMBoundaryType btx = DM_BOUNDARY_NONE cdef PetscDMBoundaryType bty = DM_BOUNDARY_NONE cdef PetscDMBoundaryType btz = DM_BOUNDARY_NONE - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetBoundaryTypes(self.dm, &btx, &bty, &btz) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetBoundaryTypes(self.dm, &btx, &bty, &btz)) return toStagBoundaryTypes(dim, btx, bty, btz) def getIsFirstRank(self) -> tuple[int, ...]: @@ -503,8 +505,8 @@ cdef class DMStag(DM): """ cdef PetscBool rank0=PETSC_FALSE, rank1=PETSC_FALSE, rank2=PETSC_FALSE cdef PetscInt dim=0 - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetIsFirstRank(self.dm, &rank0, &rank1, &rank2) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetIsFirstRank(self.dm, &rank0, &rank1, &rank2)) return toStagDims(dim, rank0, rank1, rank2) def getIsLastRank(self) -> tuple[int, ...]: @@ -519,8 +521,8 @@ cdef class DMStag(DM): """ cdef PetscBool rank0=PETSC_FALSE, rank1=PETSC_FALSE, rank2=PETSC_FALSE cdef PetscInt dim=0 - CHKERR( DMGetDimension(self.dm, &dim) ) - CHKERR( DMStagGetIsLastRank(self.dm, &rank0, &rank1, &rank2) ) + CHKERR(DMGetDimension(self.dm, &dim)) + CHKERR(DMStagGetIsLastRank(self.dm, &rank0, &rank1, &rank2)) return toStagDims(dim, rank0, rank1, rank2) # Coordinate-related functions @@ -532,8 +534,7 @@ cdef class DMStag(DM): ymin: float = 0, ymax: float = 1, zmin: float = 0, - zmax: float = 1, - ) -> None: + zmax: float = 1) -> None: """Set coordinates to be a uniform grid, storing all values. Collective. @@ -562,7 +563,7 @@ cdef class DMStag(DM): cdef PetscReal _xmin = asReal(xmin), _xmax = asReal(xmax) cdef PetscReal _ymin = asReal(ymin), _ymax = asReal(ymax) cdef PetscReal _zmin = asReal(zmin), _zmax = asReal(zmax) - CHKERR( DMStagSetUniformCoordinatesExplicit(self.dm, _xmin, _xmax, _ymin, _ymax, _zmin, _zmax) ) + CHKERR(DMStagSetUniformCoordinatesExplicit(self.dm, _xmin, _xmax, _ymin, _ymax, _zmin, _zmax)) def setUniformCoordinatesProduct( self, @@ -571,8 +572,7 @@ cdef class DMStag(DM): ymin: float = 0, ymax: float = 1, zmin: float = 0, - zmax: float = 1, - ) -> None: + zmax: float = 1) -> None: """Create uniform coordinates, as a product of 1D arrays. Collective. @@ -605,7 +605,7 @@ cdef class DMStag(DM): cdef PetscReal _xmin = asReal(xmin), _xmax = asReal(xmax) cdef PetscReal _ymin = asReal(ymin), _ymax = asReal(ymax) cdef PetscReal _zmin = asReal(zmin), _zmax = asReal(zmax) - CHKERR( DMStagSetUniformCoordinatesProduct(self.dm, _xmin, _xmax, _ymin, _ymax, _zmin, _zmax) ) + CHKERR(DMStagSetUniformCoordinatesProduct(self.dm, _xmin, _xmax, _ymin, _ymax, _zmin, _zmax)) def setUniformCoordinates( self, @@ -614,8 +614,7 @@ cdef class DMStag(DM): ymin: float = 0, ymax: float = 1, zmin: float = 0, - zmax: float = 1, - ) -> None: + zmax: float = 1) -> None: """Set the coordinates to be a uniform grid.. Collective. @@ -653,7 +652,7 @@ cdef class DMStag(DM): cdef PetscReal _xmin = asReal(xmin), _xmax = asReal(xmax) cdef PetscReal _ymin = asReal(ymin), _ymax = asReal(ymax) cdef PetscReal _zmin = asReal(zmin), _zmax = asReal(zmax) - CHKERR( DMStagSetUniformCoordinates(self.dm, _xmin, _xmax, _ymin, _ymax, _zmin, _zmax) ) + CHKERR(DMStagSetUniformCoordinates(self.dm, _xmin, _xmax, _ymin, _ymax, _zmin, _zmax)) def setCoordinateDMType(self, dmtype: DM.Type) -> None: """Set the type to store coordinates. @@ -672,7 +671,7 @@ cdef class DMStag(DM): """ cdef PetscDMType cval = NULL dmtype = str2bytes(dmtype, &cval) - CHKERR( DMStagSetCoordinateDMType(self.dm, cval) ) + CHKERR(DMStagSetCoordinateDMType(self.dm, cval)) # Location slot related functions @@ -696,7 +695,7 @@ cdef class DMStag(DM): cdef PetscInt slot=0 cdef PetscInt comp=asInt(c) cdef PetscDMStagStencilLocation sloc = asStagStencilLocation(loc) - CHKERR( DMStagGetLocationSlot(self.dm, sloc, comp, &slot) ) + CHKERR(DMStagGetLocationSlot(self.dm, sloc, comp, &slot)) return toInt(slot) def getProductCoordinateLocationSlot(self, loc: StencilLocation) -> None: @@ -716,7 +715,7 @@ cdef class DMStag(DM): """ cdef PetscInt slot=0 cdef PetscDMStagStencilLocation sloc = asStagStencilLocation(loc) - CHKERR( DMStagGetProductCoordinateLocationSlot(self.dm, sloc, &slot) ) + CHKERR(DMStagGetProductCoordinateLocationSlot(self.dm, sloc, &slot)) return toInt(slot) def getLocationDof(self, loc: StencilLocation) -> int: @@ -736,7 +735,7 @@ cdef class DMStag(DM): """ cdef PetscInt dof=0 cdef PetscDMStagStencilLocation sloc = asStagStencilLocation(loc) - CHKERR( DMStagGetLocationDOF(self.dm, sloc, &dof) ) + CHKERR(DMStagGetLocationDOF(self.dm, sloc, &dof)) return toInt(dof) # Random other functions @@ -762,7 +761,7 @@ cdef class DMStag(DM): petsc.DMStagMigrateVec """ - CHKERR( DMStagMigrateVec(self.dm, vec.vec, dmTo.dm, vecTo.vec ) ) + CHKERR(DMStagMigrateVec(self.dm, vec.vec, dmTo.dm, vecTo.vec)) def createCompatibleDMStag(self, dofs: tuple[int, ...]) -> DM: """Create a compatible ``DMStag`` with different DOFs/stratum. @@ -780,20 +779,19 @@ cdef class DMStag(DM): """ cdef tuple gdofs = tuple(dofs) - cdef PetscInt gdim=PETSC_DECIDE, dof0=1, dof1=0, dof2=0, dof3=0 - gdim = asDofs(gdofs, &dof0, &dof1, &dof2, &dof3) + cdef PetscInt dof0=1, dof1=0, dof2=0, dof3=0 + asDofs(gdofs, &dof0, &dof1, &dof2, &dof3) cdef PetscDM newda = NULL - CHKERR( DMStagCreateCompatibleDMStag(self.dm, dof0, dof1, dof2, dof3, &newda) ) + CHKERR(DMStagCreateCompatibleDMStag(self.dm, dof0, dof1, dof2, dof3, &newda)) cdef DM newdm = type(self)() - CHKERR( PetscCLEAR(newdm.obj) ); newdm.dm = newda + CHKERR(PetscCLEAR(newdm.obj)); newdm.dm = newda return newdm def VecSplitToDMDA( self, Vec vec, loc: StencilLocation, - c: int, - ) -> tuple[DMDA, Vec]: + c: int) -> tuple[DMDA, Vec]: """Return ``DMDA``, ``Vec`` from a subgrid of a ``DMStag``, its ``Vec``. Collective. @@ -820,19 +818,19 @@ cdef class DMStag(DM): cdef PetscDMStagStencilLocation sloc = asStagStencilLocation(loc) cdef PetscDM pda = NULL cdef PetscVec pdavec = NULL - CHKERR( DMStagVecSplitToDMDA(self.dm, vec.vec, sloc, pc, &pda, &pdavec) ) + CHKERR(DMStagVecSplitToDMDA(self.dm, vec.vec, sloc, pc, &pda, &pdavec)) cdef DM da = DMDA() - CHKERR( PetscCLEAR(da.obj) ); da.dm = pda + CHKERR(PetscCLEAR(da.obj)); da.dm = pda cdef Vec davec = Vec() - CHKERR( PetscCLEAR(davec.obj) ); davec.vec = pdavec - return (da,davec) + CHKERR(PetscCLEAR(davec.obj)); davec.vec = pdavec + return (da, davec) def getVecArray(self, Vec vec) -> None: - """**Not implemented in petsc4py.**""" + """Not implemented.""" raise NotImplementedError('getVecArray for DMStag not yet implemented in petsc4py') def get1dCoordinatecArrays(self) -> None: - """**Not implemented in petsc4py.**""" + """Not implemented.""" raise NotImplementedError('get1dCoordinatecArrays for DMStag not yet implemented in petsc4py') property dim: diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DMSwarm.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DMSwarm.pyx index 4767bdd73d0..04e811f49cb 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DMSwarm.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DMSwarm.pyx @@ -1,22 +1,29 @@ # -------------------------------------------------------------------- class DMSwarmType(object): + """Swarm types.""" BASIC = DMSWARM_BASIC PIC = DMSWARM_PIC + class DMSwarmMigrateType(object): + """Swarm migration types.""" MIGRATE_BASIC = DMSWARM_MIGRATE_BASIC MIGRATE_DMCELLNSCATTER = DMSWARM_MIGRATE_DMCELLNSCATTER MIGRATE_DMCELLEXACT = DMSWARM_MIGRATE_DMCELLEXACT MIGRATE_USER = DMSWARM_MIGRATE_USER + class DMSwarmCollectType(object): + """Swarm collection types.""" COLLECT_BASIC = DMSWARM_COLLECT_BASIC COLLECT_DMDABOUNDINGBOX = DMSWARM_COLLECT_DMDABOUNDINGBOX COLLECT_GENERAL = DMSWARM_COLLECT_GENERAL COLLECT_USER = DMSWARM_COLLECT_USER + class DMSwarmPICLayoutType(object): + """Swarm PIC layout types.""" LAYOUT_REGULAR = DMSWARMPIC_LAYOUT_REGULAR LAYOUT_GAUSS = DMSWARMPIC_LAYOUT_GAUSS LAYOUT_SUBDIVISION = DMSWARMPIC_LAYOUT_SUBDIVISION @@ -50,9 +57,9 @@ cdef class DMSwarm(DM): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDM newdm = NULL - CHKERR( DMCreate(ccomm, &newdm) ) - CHKERR( PetscCLEAR(self.obj) ); self.dm = newdm - CHKERR( DMSetType(self.dm, DMSWARM) ) + CHKERR(DMCreate(ccomm, &newdm)) + CHKERR(PetscCLEAR(self.obj)); self.dm = newdm + CHKERR(DMSetType(self.dm, DMSWARM)) return self def createGlobalVectorFromField(self, fieldname: str) -> Vec: @@ -76,7 +83,7 @@ cdef class DMSwarm(DM): cdef const char *cfieldname = NULL cdef Vec vg = Vec() fieldname = str2bytes(fieldname, &cfieldname) - CHKERR( DMSwarmCreateGlobalVectorFromField(self.dm, cfieldname, &vg.vec) ) + CHKERR(DMSwarmCreateGlobalVectorFromField(self.dm, cfieldname, &vg.vec)) return vg def destroyGlobalVectorFromField(self, fieldname: str) -> None: @@ -97,7 +104,7 @@ cdef class DMSwarm(DM): cdef const char *cfieldname = NULL cdef PetscVec vec = NULL fieldname = str2bytes(fieldname, &cfieldname) - CHKERR( DMSwarmDestroyGlobalVectorFromField(self.dm, cfieldname, &vec) ) + CHKERR(DMSwarmDestroyGlobalVectorFromField(self.dm, cfieldname, &vec)) def createLocalVectorFromField(self, fieldname: str) -> Vec: """Create a local `Vec` object associated with a given field. @@ -120,7 +127,7 @@ cdef class DMSwarm(DM): cdef const char *cfieldname = NULL cdef Vec vl = Vec() fieldname = str2bytes(fieldname, &cfieldname) - CHKERR( DMSwarmCreateLocalVectorFromField(self.dm, cfieldname, &vl.vec) ) + CHKERR(DMSwarmCreateLocalVectorFromField(self.dm, cfieldname, &vl.vec)) return vl def destroyLocalVectorFromField(self, fieldname: str) -> None: @@ -139,9 +146,9 @@ cdef class DMSwarm(DM): """ cdef const char *cfieldname = NULL - cdef PetscVec vec + cdef PetscVec vec = NULL fieldname = str2bytes(fieldname, &cfieldname) - CHKERR( DMSwarmDestroyLocalVectorFromField(self.dm, cfieldname, &vec) ) + CHKERR(DMSwarmDestroyLocalVectorFromField(self.dm, cfieldname, &vec)) def initializeFieldRegister(self) -> None: """Initiate the registration of fields to a `DMSwarm`. @@ -155,7 +162,7 @@ cdef class DMSwarm(DM): finalizeFieldRegister, petsc.DMSwarmInitializeFieldRegister """ - CHKERR( DMSwarmInitializeFieldRegister(self.dm) ) + CHKERR(DMSwarmInitializeFieldRegister(self.dm)) def finalizeFieldRegister(self) -> None: """Finalize the registration of fields to a `DMSwarm`. @@ -167,7 +174,7 @@ cdef class DMSwarm(DM): initializeFieldRegister, petsc.DMSwarmFinalizeFieldRegister """ - CHKERR( DMSwarmFinalizeFieldRegister(self.dm) ) + CHKERR(DMSwarmFinalizeFieldRegister(self.dm)) def setLocalSizes(self, nlocal: int, buffer: int) -> Self: """Set the length of all registered fields on the `DMSwarm`. @@ -188,7 +195,7 @@ cdef class DMSwarm(DM): """ cdef PetscInt cnlocal = asInt(nlocal) cdef PetscInt cbuffer = asInt(buffer) - CHKERR( DMSwarmSetLocalSizes(self.dm, cnlocal, cbuffer) ) + CHKERR(DMSwarmSetLocalSizes(self.dm, cnlocal, cbuffer)) return self def registerField(self, fieldname: str, blocksize: int, dtype: dtype = ScalarType) -> None: @@ -219,7 +226,7 @@ cdef class DMSwarm(DM): if dtype == ComplexType: ctype = PETSC_COMPLEX assert ctype != PETSC_DATATYPE_UNKNOWN fieldname = str2bytes(fieldname, &cfieldname) - CHKERR( DMSwarmRegisterPetscDatatypeField(self.dm, cfieldname, cblocksize, ctype) ) + CHKERR(DMSwarmRegisterPetscDatatypeField(self.dm, cfieldname, cblocksize, ctype)) def getField(self, fieldname: str) -> Sequence[int | float | complex]: """Return arrays storing all entries associated with a field. @@ -253,8 +260,8 @@ cdef class DMSwarm(DM): cdef PetscReal *data = NULL cdef PetscInt nlocal = 0 fieldname = str2bytes(fieldname, &cfieldname) - CHKERR( DMSwarmGetField(self.dm, cfieldname, &blocksize, &ctype, &data) ) - CHKERR( DMSwarmGetLocalSize(self.dm, &nlocal) ) + CHKERR(DMSwarmGetField(self.dm, cfieldname, &blocksize, &ctype, &data)) + CHKERR(DMSwarmGetLocalSize(self.dm, &nlocal)) cdef int typenum = -1 if ctype == PETSC_INT: typenum = NPY_PETSC_INT if ctype == PETSC_REAL: typenum = NPY_PETSC_REAL @@ -283,7 +290,7 @@ cdef class DMSwarm(DM): cdef PetscInt blocksize = 0 cdef PetscDataType ctype = PETSC_DATATYPE_UNKNOWN fieldname = str2bytes(fieldname, &cfieldname) - CHKERR( DMSwarmRestoreField(self.dm, cfieldname, &blocksize, &ctype, 0) ) + CHKERR(DMSwarmRestoreField(self.dm, cfieldname, &blocksize, &ctype, 0)) def vectorDefineField(self, fieldname: str) -> None: """Set the field from which to define a `Vec` object. @@ -305,7 +312,7 @@ cdef class DMSwarm(DM): """ cdef const char *cval = NULL fieldname = str2bytes(fieldname, &cval) - CHKERR( DMSwarmVectorDefineField(self.dm, cval) ) + CHKERR(DMSwarmVectorDefineField(self.dm, cval)) def addPoint(self) -> None: """Add space for one new point in the `DMSwarm`. @@ -317,7 +324,7 @@ cdef class DMSwarm(DM): petsc.DMSwarmAddPoint """ - CHKERR( DMSwarmAddPoint(self.dm) ) + CHKERR(DMSwarmAddPoint(self.dm)) def addNPoints(self, npoints: int) -> None: """Add space for a number of new points in the `DMSwarm`. @@ -335,7 +342,7 @@ cdef class DMSwarm(DM): """ cdef PetscInt cnpoints = asInt(npoints) - CHKERR( DMSwarmAddNPoints(self.dm, cnpoints) ) + CHKERR(DMSwarmAddNPoints(self.dm, cnpoints)) def removePoint(self) -> None: """Remove the last point from the `DMSwarm`. @@ -347,7 +354,7 @@ cdef class DMSwarm(DM): petsc.DMSwarmRemovePoint """ - CHKERR( DMSwarmRemovePoint(self.dm) ) + CHKERR(DMSwarmRemovePoint(self.dm)) def removePointAtIndex(self, index: int) -> None: """Remove a specific point from the `DMSwarm`. @@ -365,7 +372,7 @@ cdef class DMSwarm(DM): """ cdef PetscInt cindex = asInt(index) - CHKERR( DMSwarmRemovePointAtIndex(self.dm, cindex) ) + CHKERR(DMSwarmRemovePointAtIndex(self.dm, cindex)) def copyPoint(self, pi: int, pj: int) -> None: """Copy point pi to point pj in the `DMSwarm`. @@ -386,7 +393,7 @@ cdef class DMSwarm(DM): """ cdef PetscInt cpi = asInt(pi) cdef PetscInt cpj = asInt(pj) - CHKERR( DMSwarmCopyPoint(self.dm, cpi, cpj) ) + CHKERR(DMSwarmCopyPoint(self.dm, cpi, cpj)) def getLocalSize(self) -> int: """Return the local length of fields registered. @@ -399,7 +406,7 @@ cdef class DMSwarm(DM): """ cdef PetscInt size = asInt(0) - CHKERR( DMSwarmGetLocalSize(self.dm, &size) ) + CHKERR(DMSwarmGetLocalSize(self.dm, &size)) return toInt(size) def getSize(self) -> int: @@ -413,7 +420,7 @@ cdef class DMSwarm(DM): """ cdef PetscInt size = asInt(0) - CHKERR( DMSwarmGetSize(self.dm, &size) ) + CHKERR(DMSwarmGetSize(self.dm, &size)) return toInt(size) def migrate(self, remove_sent_points: bool = False) -> None: @@ -433,7 +440,7 @@ cdef class DMSwarm(DM): """ cdef PetscBool remove_pts = asBool(remove_sent_points) - CHKERR( DMSwarmMigrate(self.dm, remove_pts) ) + CHKERR(DMSwarmMigrate(self.dm, remove_pts)) def collectViewCreate(self) -> None: """Apply a collection method and gather points in neighbor ranks. @@ -445,7 +452,7 @@ cdef class DMSwarm(DM): collectViewDestroy, petsc.DMSwarmCollectViewCreate """ - CHKERR( DMSwarmCollectViewCreate(self.dm) ) + CHKERR(DMSwarmCollectViewCreate(self.dm)) def collectViewDestroy(self) -> None: """Reset the `DMSwarm` to the size prior to calling `collectViewCreate`. @@ -457,7 +464,7 @@ cdef class DMSwarm(DM): collectViewCreate, petsc.DMSwarmCollectViewDestroy """ - CHKERR( DMSwarmCollectViewDestroy(self.dm) ) + CHKERR(DMSwarmCollectViewDestroy(self.dm)) def setCellDM(self, DM dm) -> None: """Attach a `DM` to a `DMSwarm`. @@ -474,7 +481,7 @@ cdef class DMSwarm(DM): getCellDM, petsc.DMSwarmSetCellDM """ - CHKERR( DMSwarmSetCellDM(self.dm, dm.dm) ) + CHKERR(DMSwarmSetCellDM(self.dm, dm.dm)) def getCellDM(self) -> DM: """Return `DM` cell attached to `DMSwarm`. @@ -487,10 +494,10 @@ cdef class DMSwarm(DM): """ cdef PetscDM newdm = NULL - CHKERR( DMSwarmGetCellDM(self.dm, &newdm) ) + CHKERR(DMSwarmGetCellDM(self.dm, &newdm)) cdef DM dm = subtype_DM(newdm)() dm.dm = newdm - CHKERR( PetscINCREF(dm.obj) ) + CHKERR(PetscINCREF(dm.obj)) return dm def setType(self, dmswarm_type: Type | str) -> None: @@ -509,15 +516,14 @@ cdef class DMSwarm(DM): """ cdef PetscDMSwarmType cval = dmswarm_type - CHKERR( DMSwarmSetType(self.dm, cval) ) + CHKERR(DMSwarmSetType(self.dm, cval)) def setPointsUniformCoordinates( self, min: Sequence[float], max: Sequence[float], npoints: Sequence[int], - mode: InsertMode | None = None, - ) -> Self: + mode: InsertMode | None = None) -> Self: """Set point coordinates in a `DMSwarm` on a regular (ijk) grid. Collective. @@ -542,7 +548,7 @@ cdef class DMSwarm(DM): """ cdef PetscInt dim = asInt(0) - CHKERR( DMGetDimension(self.dm, &dim) ) + CHKERR(DMGetDimension(self.dm, &dim)) cdef PetscReal cmin[3] cmin[0] = cmin[1] = cmin[2] = asReal(0.) for i from 0 <= i < dim: cmin[i] = min[i] @@ -553,15 +559,14 @@ cdef class DMSwarm(DM): cnpoints[0] = cnpoints[1] = cnpoints[2] = asInt(0) for i from 0 <= i < dim: cnpoints[i] = npoints[i] cdef PetscInsertMode cmode = insertmode(mode) - CHKERR( DMSwarmSetPointsUniformCoordinates(self.dm, cmin, cmax, cnpoints, cmode) ) + CHKERR(DMSwarmSetPointsUniformCoordinates(self.dm, cmin, cmax, cnpoints, cmode)) return self def setPointCoordinates( self, coordinates: Sequence[float], redundant: bool = False, - mode: InsertMode | None = None - ) -> None: + mode: InsertMode | None = None) -> None: """Set point coordinates in a `DMSwarm` from a user-defined list. Collective. @@ -586,12 +591,12 @@ cdef class DMSwarm(DM): if PyArray_ISFORTRAN(xyz): xyz = PyArray_Copy(xyz) if PyArray_NDIM(xyz) != 2: raise ValueError( ("coordinates must have two dimensions: " - "coordinates.ndim=%d") % (PyArray_NDIM(xyz)) ) + "coordinates.ndim=%d") % (PyArray_NDIM(xyz))) cdef PetscInt cnpoints = PyArray_DIM(xyz, 0) cdef PetscBool credundant = asBool(redundant) cdef PetscInsertMode cmode = insertmode(mode) cdef PetscReal *coords = PyArray_DATA(xyz) - CHKERR( DMSwarmSetPointCoordinates(self.dm, cnpoints, coords, credundant, cmode) ) + CHKERR(DMSwarmSetPointCoordinates(self.dm, cnpoints, coords, credundant, cmode)) def insertPointUsingCellDM(self, layoutType: PICLayoutType, fill_param: int) -> None: """Insert point coordinates within each cell. @@ -613,7 +618,7 @@ cdef class DMSwarm(DM): """ cdef PetscDMSwarmPICLayoutType clayoutType = layoutType cdef PetscInt cfill_param = asInt(fill_param) - CHKERR( DMSwarmInsertPointsUsingCellDM(self.dm, clayoutType, cfill_param) ) + CHKERR(DMSwarmInsertPointsUsingCellDM(self.dm, clayoutType, cfill_param)) def setPointCoordinatesCellwise(self, coordinates: Sequence[float]) -> None: """Insert point coordinates within each cell. @@ -637,10 +642,10 @@ cdef class DMSwarm(DM): if PyArray_ISFORTRAN(xyz): xyz = PyArray_Copy(xyz) if PyArray_NDIM(xyz) != 2: raise ValueError( ("coordinates must have two dimensions: " - "coordinates.ndim=%d") % (PyArray_NDIM(xyz)) ) + "coordinates.ndim=%d") % (PyArray_NDIM(xyz))) cdef PetscInt cnpoints = PyArray_DIM(xyz, 0) cdef PetscReal *coords = PyArray_DATA(xyz) - CHKERR( DMSwarmSetPointCoordinatesCellwise(self.dm, cnpoints, coords) ) + CHKERR(DMSwarmSetPointCoordinatesCellwise(self.dm, cnpoints, coords)) def viewFieldsXDMF(self, filename: str, fieldnames: Sequence[str]) -> None: """Write a selection of `DMSwarm` fields to an XDMF3 file. @@ -664,12 +669,12 @@ cdef class DMSwarm(DM): filename = str2bytes(filename, &cfilename) cdef PetscInt cnfields = len(fieldnames) cdef const char** cfieldnames = NULL - cdef object tmp = oarray_p(empty_p(cnfields), NULL, &cfieldnames) + cdef object unused = oarray_p(empty_p(cnfields), NULL, &cfieldnames) fieldnames = list(fieldnames) for i from 0 <= i < cnfields: fieldnames[i] = str2bytes(fieldnames[i], &cval) cfieldnames[i] = cval - CHKERR( DMSwarmViewFieldsXDMF(self.dm, cfilename, cnfields, cfieldnames ) ) + CHKERR(DMSwarmViewFieldsXDMF(self.dm, cfilename, cnfields, cfieldnames)) def viewXDMF(self, filename: str) -> None: """Write this `DMSwarm` fields to an XDMF3 file. @@ -688,7 +693,7 @@ cdef class DMSwarm(DM): """ cdef const char *cval = NULL filename = str2bytes(filename, &cval) - CHKERR( DMSwarmViewXDMF(self.dm, cval) ) + CHKERR(DMSwarmViewXDMF(self.dm, cval)) def sortGetAccess(self) -> None: """Setup up a `DMSwarm` point sort context. @@ -706,7 +711,7 @@ cdef class DMSwarm(DM): sortRestoreAccess, petsc.DMSwarmSortGetAccess """ - CHKERR( DMSwarmSortGetAccess(self.dm) ) + CHKERR(DMSwarmSortGetAccess(self.dm)) def sortRestoreAccess(self) -> None: """Invalidate the `DMSwarm` point sorting context. @@ -718,7 +723,7 @@ cdef class DMSwarm(DM): sortGetAccess, petsc.DMSwarmSortRestoreAccess """ - CHKERR( DMSwarmSortRestoreAccess(self.dm) ) + CHKERR(DMSwarmSortRestoreAccess(self.dm)) def sortGetPointsPerCell(self, e: int) -> list[int]: """Create an array of point indices for all points in a cell. @@ -739,7 +744,7 @@ cdef class DMSwarm(DM): cdef PetscInt cnpoints = asInt(0) cdef PetscInt *cpidlist = NULL cdef list pidlist = [] - CHKERR( DMSwarmSortGetPointsPerCell(self.dm, ce, &cnpoints, &cpidlist) ) + CHKERR(DMSwarmSortGetPointsPerCell(self.dm, ce, &cnpoints, &cpidlist)) npoints = asInt(cnpoints) for i from 0 <= i < npoints: pidlist.append(asInt(cpidlist[i])) return pidlist @@ -761,7 +766,7 @@ cdef class DMSwarm(DM): """ cdef PetscInt ce = asInt(e) cdef PetscInt npoints = asInt(0) - CHKERR( DMSwarmSortGetNumberOfPointsPerCell(self.dm, ce, &npoints) ) + CHKERR(DMSwarmSortGetNumberOfPointsPerCell(self.dm, ce, &npoints)) return toInt(npoints) def sortGetIsValid(self) -> bool: @@ -777,7 +782,7 @@ cdef class DMSwarm(DM): """ cdef PetscBool isValid = asBool(False) - CHKERR( DMSwarmSortGetIsValid(self.dm, &isValid) ) + CHKERR(DMSwarmSortGetIsValid(self.dm, &isValid)) return toBool(isValid) def sortGetSizes(self) -> tuple[int, int]: @@ -799,7 +804,7 @@ cdef class DMSwarm(DM): """ cdef PetscInt ncells = asInt(0) cdef PetscInt npoints = asInt(0) - CHKERR( DMSwarmSortGetSizes(self.dm, &ncells, &npoints) ) + CHKERR(DMSwarmSortGetSizes(self.dm, &ncells, &npoints)) return (toInt(ncells), toInt(npoints)) def projectFields(self, DM dm, fieldnames: Sequence[str], vecs: Sequence[Vec], mode: ScatterModeSpec = None) -> None: @@ -822,16 +827,16 @@ cdef class DMSwarm(DM): cdef const char *cval = NULL cdef PetscInt cnfields = len(fieldnames) cdef const char** cfieldnames = NULL - cdef object tmp = oarray_p(empty_p(cnfields), NULL, &cfieldnames) - cdef PetscVec *cfieldvecs - cdef object tmp2 = oarray_p(empty_p(cnfields), NULL, &cfieldvecs) + cdef object unused = oarray_p(empty_p(cnfields), NULL, &cfieldnames) + cdef PetscVec *cfieldvecs = NULL + cdef object unused2 = oarray_p(empty_p(cnfields), NULL, &cfieldvecs) cdef PetscScatterMode cmode = scattermode(mode) fieldnames = list(fieldnames) for i from 0 <= i < cnfields: fieldnames[i] = str2bytes(fieldnames[i], &cval) cfieldnames[i] = cval cfieldvecs[i] = ((vecs[i])).vec - CHKERR( DMSwarmProjectFields(self.dm, dm.dm, cnfields, cfieldnames, cfieldvecs, cmode) ) + CHKERR(DMSwarmProjectFields(self.dm, dm.dm, cnfields, cfieldnames, cfieldvecs, cmode)) return diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DMUtils.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DMUtils.pyx index 4c1e91bacc0..b9bad6f7fc7 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DMUtils.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DMUtils.pyx @@ -27,8 +27,8 @@ cdef class DMInterpolation: """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_SELF) cdef PetscDMInterpolation newdminterp = NULL - CHKERR( DMInterpolationCreate(ccomm, &newdminterp) ) - CHKERR( DMInterpolationDestroy(&self.dminterp)) + CHKERR(DMInterpolationCreate(ccomm, &newdminterp)) + CHKERR(DMInterpolationDestroy(&self.dminterp)) self.dminterp = newdminterp return self @@ -42,7 +42,7 @@ cdef class DMInterpolation: create, petsc.DMInterpolationDestroy """ - CHKERR( DMInterpolationDestroy(&self.dminterp)) + CHKERR(DMInterpolationDestroy(&self.dminterp)) return self def evaluate(self, DM dm, Vec x, Vec v=None) -> Vec: @@ -67,8 +67,8 @@ cdef class DMInterpolation: if v is None: v = Vec() if v.vec == NULL: - CHKERR( DMInterpolationGetVector(self.dminterp, &v.vec ) ) - CHKERR( DMInterpolationEvaluate(self.dminterp, dm.dm, x.vec, v.vec ) ) + CHKERR(DMInterpolationGetVector(self.dminterp, &v.vec)) + CHKERR(DMInterpolationEvaluate(self.dminterp, dm.dm, x.vec, v.vec)) return v def getCoordinates(self) -> Vec: @@ -85,8 +85,8 @@ cdef class DMInterpolation: """ cdef Vec coords = Vec() - CHKERR( DMInterpolationGetCoordinates(self.dminterp, &coords.vec) ) - CHKERR( PetscINCREF(coords.obj) ) + CHKERR(DMInterpolationGetCoordinates(self.dminterp, &coords.vec)) + CHKERR(PetscINCREF(coords.obj)) return coords def getDim(self) -> int: @@ -100,7 +100,7 @@ cdef class DMInterpolation: """ cdef PetscInt cdim = 0 - CHKERR( DMInterpolationGetDim(self.dminterp, &cdim) ) + CHKERR(DMInterpolationGetDim(self.dminterp, &cdim)) return toInt(cdim) def getDof(self) -> int: @@ -114,7 +114,7 @@ cdef class DMInterpolation: """ cdef PetscInt cdof = 0 - CHKERR( DMInterpolationGetDof(self.dminterp, &cdof) ) + CHKERR(DMInterpolationGetDof(self.dminterp, &cdof)) return toInt(cdof) def setDim(self, dim: int) -> None: @@ -133,7 +133,7 @@ cdef class DMInterpolation: """ cdef PetscInt cdim = asInt(dim) - CHKERR( DMInterpolationSetDim(self.dminterp, cdim) ) + CHKERR(DMInterpolationSetDim(self.dminterp, cdim)) def setDof(self, dof: int) -> None: """Set the number of fields interpolated at a point. @@ -151,14 +151,13 @@ cdef class DMInterpolation: """ cdef PetscInt cdof = asInt(dof) - CHKERR( DMInterpolationSetDof(self.dminterp, cdof) ) + CHKERR(DMInterpolationSetDof(self.dminterp, cdof)) def setUp( self, DM dm, redundantPoints: bool = False, - ignoreOutsideDomain: bool = False, - ) -> None: + ignoreOutsideDomain: bool = False) -> None: """Compute spatial indices for point location during interpolation. Collective. @@ -181,7 +180,7 @@ cdef class DMInterpolation: """ cdef PetscBool credundantPoints = asBool(redundantPoints) cdef PetscBool cignoreOutsideDomain = asBool(ignoreOutsideDomain) - CHKERR( DMInterpolationSetUp(self.dminterp, dm.dm, credundantPoints, cignoreOutsideDomain) ) + CHKERR(DMInterpolationSetUp(self.dminterp, dm.dm, credundantPoints, cignoreOutsideDomain)) def getVector(self) -> Vec: """Return a `Vec` which can hold all the interpolated field values. @@ -196,7 +195,7 @@ cdef class DMInterpolation: """ cdef Vec vec = Vec() - CHKERR( DMInterpolationGetVector(self.dminterp, &vec.vec)) + CHKERR(DMInterpolationGetVector(self.dminterp, &vec.vec)) return vec def restoreVector(self, Vec vec) -> None: @@ -214,4 +213,4 @@ cdef class DMInterpolation: getVector, petsc.DMInterpolationRestoreVector """ - CHKERR( DMInterpolationRestoreVector(self.dminterp, &vec.vec) ) + CHKERR(DMInterpolationRestoreVector(self.dminterp, &vec.vec)) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DS.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DS.pyx index eaaa35bcaaf..e23116d1b54 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DS.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DS.pyx @@ -1,10 +1,12 @@ # -------------------------------------------------------------------- class DSType(object): + """The Discrete System types.""" BASIC = S_(PETSCDSBASIC) # -------------------------------------------------------------------- + cdef class DS(Object): """Discrete System object.""" @@ -33,7 +35,7 @@ cdef class DS(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscDSView(self.ds, vwr) ) + CHKERR(PetscDSView(self.ds, vwr)) def destroy(self) -> Self: """Destroy the discrete system. @@ -45,7 +47,7 @@ cdef class DS(Object): create, petsc.PetscDSDestroy """ - CHKERR( PetscDSDestroy(&self.ds) ) + CHKERR(PetscDSDestroy(&self.ds)) return self def create(self, comm: Comm | None = None) -> Self: @@ -67,8 +69,8 @@ cdef class DS(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDS newds = NULL - CHKERR( PetscDSCreate(ccomm, &newds) ) - CHKERR( PetscCLEAR(self.obj) ); self.ds = newds + CHKERR(PetscDSCreate(ccomm, &newds)) + CHKERR(PetscCLEAR(self.obj)); self.ds = newds return self def setType(self, ds_type: Type | str) -> None: @@ -88,7 +90,7 @@ cdef class DS(Object): """ cdef PetscDSType cval = NULL ds_type = str2bytes(ds_type, &cval) - CHKERR( PetscDSSetType(self.ds, cval) ) + CHKERR(PetscDSSetType(self.ds, cval)) def getType(self) -> str: """Return the type of the discrete system. @@ -101,7 +103,7 @@ cdef class DS(Object): """ cdef PetscDSType cval = NULL - CHKERR( PetscDSGetType(self.ds, &cval) ) + CHKERR(PetscDSGetType(self.ds, &cval)) return bytes2str(cval) def setFromOptions(self) -> None: @@ -114,7 +116,7 @@ cdef class DS(Object): petsc_options, petsc.PetscDSSetFromOptions """ - CHKERR( PetscDSSetFromOptions(self.ds) ) + CHKERR(PetscDSSetFromOptions(self.ds)) def setUp(self) -> Self: """Construct data structures for the discrete system. @@ -126,7 +128,7 @@ cdef class DS(Object): petsc.PetscDSSetUp """ - CHKERR( PetscDSSetUp(self.ds) ) + CHKERR(PetscDSSetUp(self.ds)) return self # @@ -145,7 +147,7 @@ cdef class DS(Object): """ cdef PetscInt dim = 0 - CHKERR( PetscDSGetSpatialDimension(self.ds, &dim) ) + CHKERR(PetscDSGetSpatialDimension(self.ds, &dim)) return toInt(dim) def getCoordinateDimension(self) -> int: @@ -162,7 +164,7 @@ cdef class DS(Object): """ cdef PetscInt dim = 0 - CHKERR( PetscDSGetCoordinateDimension(self.ds, &dim) ) + CHKERR(PetscDSGetCoordinateDimension(self.ds, &dim)) return toInt(dim) def getNumFields(self) -> int: @@ -176,7 +178,7 @@ cdef class DS(Object): """ cdef PetscInt nf = 0 - CHKERR( PetscDSGetNumFields(self.ds, &nf) ) + CHKERR(PetscDSGetNumFields(self.ds, &nf)) return toInt(nf) def getFieldIndex(self, Object disc) -> int: @@ -195,7 +197,7 @@ cdef class DS(Object): """ cdef PetscInt field = 0 - CHKERR( PetscDSGetFieldIndex(self.ds, disc.obj[0], &field) ) + CHKERR(PetscDSGetFieldIndex(self.ds, disc.obj[0], &field)) return toInt(field) def getTotalDimensions(self) -> int: @@ -209,7 +211,7 @@ cdef class DS(Object): """ cdef PetscInt tdim = 0 - CHKERR( PetscDSGetTotalDimension(self.ds, &tdim) ) + CHKERR(PetscDSGetTotalDimension(self.ds, &tdim)) return toInt(tdim) def getTotalComponents(self) -> int: @@ -223,7 +225,7 @@ cdef class DS(Object): """ cdef PetscInt tcmp = 0 - CHKERR( PetscDSGetTotalComponents(self.ds, &tcmp) ) + CHKERR(PetscDSGetTotalComponents(self.ds, &tcmp)) return toInt(tcmp) def getDimensions(self) -> ArrayInt: @@ -237,8 +239,8 @@ cdef class DS(Object): """ cdef PetscInt nf = 0, *dims = NULL - CHKERR( PetscDSGetNumFields(self.ds, &nf) ) - CHKERR( PetscDSGetDimensions(self.ds, &dims) ) + CHKERR(PetscDSGetNumFields(self.ds, &nf)) + CHKERR(PetscDSGetDimensions(self.ds, &dims)) return array_i(nf, dims) def getComponents(self) -> ArrayInt: @@ -252,8 +254,8 @@ cdef class DS(Object): """ cdef PetscInt nf = 0, *cmps = NULL - CHKERR( PetscDSGetNumFields(self.ds, &nf) ) - CHKERR( PetscDSGetComponents(self.ds, &cmps) ) + CHKERR(PetscDSGetNumFields(self.ds, &nf)) + CHKERR(PetscDSGetComponents(self.ds, &cmps)) return array_i(nf, cmps) def setDiscretisation(self, f: int, disc: Object) -> None: @@ -275,9 +277,7 @@ cdef class DS(Object): """ cdef PetscInt cf = asInt(f) cdef FE fe = disc - CHKERR( PetscDSSetDiscretization(self.ds, cf, fe.fe) ) - - + CHKERR(PetscDSSetDiscretization(self.ds, cf, fe.fe)) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/DT.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/DT.pyx index 2d476d0271b..5883004d211 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/DT.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/DT.pyx @@ -23,7 +23,7 @@ cdef class Quad(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscQuadratureView(self.quad, vwr) ) + CHKERR(PetscQuadratureView(self.quad, vwr)) def create(self, comm: Comm | None = None) -> Self: """Create a `Quad` object. @@ -42,8 +42,8 @@ cdef class Quad(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscQuadrature newquad = NULL - CHKERR( PetscQuadratureCreate(ccomm, &newquad) ) - CHKERR( PetscCLEAR(self.obj) ); self.quad = newquad + CHKERR(PetscQuadratureCreate(ccomm, &newquad)) + CHKERR(PetscCLEAR(self.obj)); self.quad = newquad return self def duplicate(self) -> Quad: @@ -57,7 +57,7 @@ cdef class Quad(Object): """ cdef Quad newquad = Quad() - CHKERR( PetscQuadratureDuplicate(self.quad, &newquad.quad) ) + CHKERR(PetscQuadratureDuplicate(self.quad, &newquad.quad)) return newquad def destroy(self) -> Self: @@ -70,7 +70,7 @@ cdef class Quad(Object): petsc.PetscQuadratureDestroy """ - CHKERR( PetscQuadratureDestroy(&self.quad) ) + CHKERR(PetscQuadratureDestroy(&self.quad)) return self def getData(self) -> tuple(ArrayReal, ArrayReal): @@ -95,7 +95,7 @@ cdef class Quad(Object): cdef PetscInt cnpoints = 0 cdef const PetscReal *cpoints = NULL cdef const PetscReal *cweights = NULL - CHKERR( PetscQuadratureGetData(self.quad, &cdim, &cnc, &cnpoints, &cpoints, &cweights)) + CHKERR(PetscQuadratureGetData(self.quad, &cdim, &cnc, &cnpoints, &cpoints, &cweights)) return array_r(cnpoints*cdim, cpoints), array_r(cnpoints*cnc, cweights) # FIXME: @@ -112,7 +112,7 @@ cdef class Quad(Object): """ cdef PetscInt cnc = 0 - CHKERR( PetscQuadratureGetNumComponents(self.quad, &cnc) ) + CHKERR(PetscQuadratureGetNumComponents(self.quad, &cnc)) return toInt(cnc) def setNumComponents(self, nc: int) -> None: @@ -131,7 +131,7 @@ cdef class Quad(Object): """ cdef PetscInt cnc = asInt(nc) - CHKERR( PetscQuadratureSetNumComponents(self.quad, cnc) ) + CHKERR(PetscQuadratureSetNumComponents(self.quad, cnc)) def getOrder(self) -> int: """Return the order of the method in the `Quad`. @@ -144,7 +144,7 @@ cdef class Quad(Object): """ cdef PetscInt corder = 0 - CHKERR( PetscQuadratureGetOrder(self.quad, &corder)) + CHKERR(PetscQuadratureGetOrder(self.quad, &corder)) return toInt(corder) def setOrder(self, order: int) -> None: @@ -164,7 +164,7 @@ cdef class Quad(Object): """ cdef PetscInt corder = asInt(order) - CHKERR( PetscQuadratureSetOrder(self.quad, corder)) + CHKERR(PetscQuadratureSetOrder(self.quad, corder)) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Device.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Device.pyx index 98853f3a864..fe1c1da5b9f 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Device.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Device.pyx @@ -1,20 +1,21 @@ # -------------------------------------------------------------------- class staticproperty(property): - def __get__(self, *args, **kwargs): - return self.fget.__get__(*args, **kwargs)() + def __get__(self, *args, **kwargs): + return self.fget.__get__(*args, **kwargs)() + cdef object make_enum_class(str class_name, str class_docstring, tuple args): - cdef dict enum2str = {} - cdef dict attrs = {} + cdef dict enum2str = {} + cdef dict attrs = {} - for name, c_enum in args: - enum2str[c_enum] = name - attrs[name] = c_enum + for name, c_enum in args: + enum2str[c_enum] = name + attrs[name] = c_enum - attrs['__enum2str'] = enum2str - attrs['__doc__'] = class_docstring - return type(class_name, (object, ), attrs) + attrs['__enum2str'] = enum2str + attrs['__doc__'] = class_docstring + return type(class_name, (object,), attrs) DeviceType = make_enum_class( "DeviceType", @@ -30,7 +31,7 @@ DeviceType = make_enum_class( ("CUDA" , PETSC_DEVICE_CUDA), ("HIP" , PETSC_DEVICE_HIP), ("SYCL" , PETSC_DEVICE_SYCL), - ("DEFAULT" , staticproperty(lambda *_,**__: PETSC_DEVICE_DEFAULT())) + ("DEFAULT" , staticproperty(lambda *_, **__: PETSC_DEVICE_DEFAULT())) ) ) @@ -40,7 +41,8 @@ StreamType = make_enum_class( See Also -------- - DeviceContext, DeviceContext.getStreamType, DeviceContext.setStreamType, petsc.PetscStreamType + DeviceContext, DeviceContext.getStreamType + DeviceContext.setStreamType, petsc.PetscStreamType """, ( @@ -57,7 +59,8 @@ DeviceJoinMode = make_enum_class( See Also -------- - DeviceContext, DeviceContext.join, DeviceContext.fork, petsc.PetscDeviceContextJoinMode + DeviceContext, DeviceContext.join, DeviceContext.fork + petsc.PetscDeviceContextJoinMode """, ( @@ -70,480 +73,511 @@ DeviceJoinMode = make_enum_class( # -------------------------------------------------------------------- cdef class Device: - """The device object. + """The device object. - Represents a handle to an accelerator (which may be the host). + Represents a handle to an accelerator (which may be the host). - See Also - -------- - DeviceContext, petsc.PetscDevice + See Also + -------- + DeviceContext, petsc.PetscDevice - """ + """ - Type = DeviceType + Type = DeviceType - def __cinit__(self): - self.device = NULL + def __cinit__(self): + self.device = NULL - def __dealloc__(self): - self.destroy() + def __dealloc__(self): + self.destroy() - @classmethod - def create(cls, dtype: Type | None = None, device_id: int = DECIDE) -> Device: - """Create a device object. + @classmethod + def create(cls, dtype: Type | None = None, device_id: int = DECIDE) -> Device: + """Create a device object. - Not collective. + Not collective. - Parameters - ---------- - dtype - The type of device to create (or `None` for the default). + Parameters + ---------- + dtype + The type of device to create (or `None` for the default). - device_id - The numeric id of the device to create. + device_id + The numeric id of the device to create. - See Also - -------- - destroy, petsc.PetscDeviceCreate + See Also + -------- + destroy, petsc.PetscDeviceCreate - """ - cdef PetscInt cdevice_id = asInt(device_id) - cdef PetscDeviceType cdevice_type = asDeviceType(dtype if dtype is not None else cls.Type.DEFAULT) - cdef Device device = cls() + """ + cdef PetscInt cdevice_id = asInt(device_id) + cdef PetscDeviceType cdevice_type = asDeviceType(dtype if dtype is not None else cls.Type.DEFAULT) + cdef Device device = cls() - CHKERR(PetscDeviceCreate(cdevice_type, cdevice_id, &device.device)) - return device + CHKERR(PetscDeviceCreate(cdevice_type, cdevice_id, &device.device)) + return device - def destroy(self) -> None: - """Destroy a device object. + def destroy(self) -> None: + """Destroy a device object. - Not collective. + Not collective. - See Also - -------- - create, petsc.PetscDeviceDestroy + See Also + -------- + create, petsc.PetscDeviceDestroy - """ - CHKERR(PetscDeviceDestroy(&self.device)) + """ + CHKERR(PetscDeviceDestroy(&self.device)) - def configure(self) -> None: - """Configure and setup a device object. + def configure(self) -> None: + """Configure and setup a device object. - Not collective. + Not collective. - See Also - -------- - create, petsc.PetscDeviceConfigure + See Also + -------- + create, petsc.PetscDeviceConfigure - """ - CHKERR(PetscDeviceConfigure(self.device)) + """ + CHKERR(PetscDeviceConfigure(self.device)) - def view(self, Viewer viewer=None) -> None: - """View a device object. + def view(self, Viewer viewer=None) -> None: + """View a device object. - Collective. + Collective. - Parameters - ---------- - viewer - A `Viewer` instance or `None` for the default viewer. + Parameters + ---------- + viewer + A `Viewer` instance or `None` for the default viewer. - See Also - -------- - petsc.PetscDeviceView + See Also + -------- + petsc.PetscDeviceView - """ - cdef PetscViewer vwr = NULL + """ + cdef PetscViewer vwr = NULL - if viewer is not None: - vwr = viewer.vwr - CHKERR(PetscDeviceView(self.device, vwr)) + if viewer is not None: + vwr = viewer.vwr + CHKERR(PetscDeviceView(self.device, vwr)) - def getDeviceType(self) -> str: - """Return the type of the device. + def getDeviceType(self) -> str: + """Return the type of the device. - Not collective. + Not collective. - See Also - -------- - type, petsc.PetscDeviceGetType + See Also + -------- + type, petsc.PetscDeviceGetType - """ - cdef PetscDeviceType cdtype + """ + cdef PetscDeviceType cdtype = PETSC_DEVICE_HOST - CHKERR(PetscDeviceGetType(self.device, &cdtype)) - return toDeviceType(cdtype) + CHKERR(PetscDeviceGetType(self.device, &cdtype)) + return toDeviceType(cdtype) - def getDeviceId(self) -> int: - """Return the device id. + def getDeviceId(self) -> int: + """Return the device id. - Not collective. + Not collective. - See Also - -------- - create, petsc.PetscDeviceGetDeviceId + See Also + -------- + create, petsc.PetscDeviceGetDeviceId - """ - cdef PetscInt cdevice_id = 0 + """ + cdef PetscInt cdevice_id = 0 - CHKERR(PetscDeviceGetDeviceId(self.device, &cdevice_id)) - return toInt(cdevice_id) + CHKERR(PetscDeviceGetDeviceId(self.device, &cdevice_id)) + return toInt(cdevice_id) - @staticmethod - def setDefaultType(device_type: Type | str) -> None: - """Set the device type to be used as the default in subsequent calls to `create`. + @staticmethod + def setDefaultType(device_type: Type | str) -> None: + """Set the device type to be used as the default in subsequent calls to `create`. - Not collective. + Not collective. - See Also - -------- - create, petsc.PetscDeviceSetDefaultDeviceType + See Also + -------- + create, petsc.PetscDeviceSetDefaultDeviceType - """ - cdef PetscDeviceType cdevice_type = asDeviceType(device_type) + """ + cdef PetscDeviceType cdevice_type = asDeviceType(device_type) - CHKERR(PetscDeviceSetDefaultDeviceType(cdevice_type)) + CHKERR(PetscDeviceSetDefaultDeviceType(cdevice_type)) - property type: - """The device type.""" - def __get__(self) -> str: - return self.getDeviceType() + property type: + """The device type.""" + def __get__(self) -> str: + return self.getDeviceType() - property device_id: - """The device id.""" - def __get__(self) -> int: - return self.getDeviceId() + property device_id: + """The device id.""" + def __get__(self) -> int: + return self.getDeviceId() # -------------------------------------------------------------------- cdef class DeviceContext(Object): - """DeviceContext object. - - Represents an abstract handle to a device context. - - See Also - -------- - Device, petsc.PetscDeviceContext - - """ - JoinMode = DeviceJoinMode - StreamType = StreamType - - def __cinit__(self): - self.obj = &self.dctx - self.dctx = NULL - - def __dealloc__(self): - self.destroy() - - @classmethod - def create(cls) -> DeviceContext: - """Create an empty DeviceContext. + """DeviceContext object. - Not collective. + Represents an abstract handle to a device context. See Also -------- - Device, petsc.PetscDeviceContextCreate + Device, petsc.PetscDeviceContext """ - cdef DeviceContext dctx = cls() + JoinMode = DeviceJoinMode + StreamType = StreamType - CHKERR(PetscDeviceContextCreate(&dctx.dctx)) - return dctx - - def getStreamType(self) -> str: - """Return the `StreamType`. - - Not collective. - - See Also - -------- - stream_type, setStreamType, petsc.PetscDeviceContextGetStreamType + def __cinit__(self): + self.obj = &self.dctx + self.dctx = NULL - """ - cdef PetscStreamType cstream_type = PETSC_STREAM_DEFAULT + def create(self) -> Self: + """Create an empty DeviceContext. - CHKERR(PetscDeviceContextGetStreamType(self.dctx, &cstream_type)) - return toStreamType(cstream_type) + Not collective. - def setStreamType(self, stream_type: StreamType | str) -> None: - """Set the `StreamType`. + See Also + -------- + destroy, Device, petsc.PetscDeviceContextCreate - Not collective. + """ + cdef PetscDeviceContext dctx = NULL + CHKERR(PetscDeviceContextCreate(&dctx)) + CHKERR(PetscCLEAR(self.obj)); self.dctx = dctx + return self - Parameters - ---------- - stream_type - The type of stream to set + def destroy(self) -> Self: + """Destroy a device context. - See Also - -------- - stream_type, getStreamType, petsc.PetscDeviceContextSetStreamType + Not collective. - """ - cdef PetscStreamType cstream_type = asStreamType(stream_type) + See Also + -------- + create, petsc.PetscDeviceContextDestroy - CHKERR(PetscDeviceContextSetStreamType(self.dctx, cstream_type)) + """ + CHKERR(PetscDeviceContextDestroy(&self.dctx)) + return self - def getDevice(self) -> Device: - """Get the `Device` which this instance is attached to. + def getStreamType(self) -> str: + """Return the `StreamType`. - Not collective. + Not collective. - See Also - -------- - setDevice, device, Device, petsc.PetscDeviceContextGetDevice + See Also + -------- + stream_type, setStreamType, petsc.PetscDeviceContextGetStreamType - """ - cdef PetscDevice device = NULL + """ + cdef PetscStreamType cstream_type = PETSC_STREAM_DEFAULT - CHKERR(PetscDeviceContextGetDevice(self.dctx, &device)) - return PyPetscDevice_New(device) + CHKERR(PetscDeviceContextGetStreamType(self.dctx, &cstream_type)) + return toStreamType(cstream_type) - def setDevice(self, Device device not None) -> None: - """Set the `Device` which this `DeviceContext` is attached to. + def setStreamType(self, stream_type: StreamType | str) -> None: + """Set the `StreamType`. - Collective. + Not collective. - Parameters - ---------- - device - The `Device` to which this instance is attached to. + Parameters + ---------- + stream_type + The type of stream to set - See Also - -------- - getDevice, device, Device, petsc.PetscDeviceContextSetDevice + See Also + -------- + stream_type, getStreamType, petsc.PetscDeviceContextSetStreamType - """ - cdef PetscDevice cdevice = PyPetscDevice_Get(device) + """ + cdef PetscStreamType cstream_type = asStreamType(stream_type) - CHKERR(PetscDeviceContextSetDevice(self.dctx, cdevice)) + CHKERR(PetscDeviceContextSetStreamType(self.dctx, cstream_type)) + def getDevice(self) -> Device: + """Get the `Device` which this instance is attached to. - def setUp(self) -> None: - """Set up the internal data structures for using the device context. + Not collective. - Not collective. + See Also + -------- + setDevice, device, Device, petsc.PetscDeviceContextGetDevice - See Also - -------- - create, petsc.PetscDeviceContextSetUp + """ + cdef PetscDevice device = NULL - """ - CHKERR(PetscDeviceContextSetUp(self.dctx)) + CHKERR(PetscDeviceContextGetDevice(self.dctx, &device)) + return PyPetscDevice_New(device) - def duplicate(self) -> DeviceContext: - """Duplicate a the device context. + def setDevice(self, Device device not None) -> None: + """Set the `Device` which this `DeviceContext` is attached to. - Not collective. + Collective. - See Also - -------- - create, petsc.PetscDeviceContextDuplicate + Parameters + ---------- + device + The `Device` to which this instance is attached to. - """ - cdef PetscDeviceContext octx = NULL + See Also + -------- + getDevice, device, Device, petsc.PetscDeviceContextSetDevice - CHKERR(PetscDeviceContextDuplicate(self.dctx, &octx)) - return PyPetscDeviceContext_New(octx) + """ + cdef PetscDevice cdevice = PyPetscDevice_Get(device) - def idle(self) -> bool: - """Return whether the underlying stream for the device context is idle. + CHKERR(PetscDeviceContextSetDevice(self.dctx, cdevice)) - Not collective. + def setUp(self) -> None: + """Set up the internal data structures for using the device context. - See Also - -------- - synchronize, petsc.PetscDeviceContextQueryIdle + Not collective. - """ - cdef PetscBool is_idle = PETSC_FALSE + See Also + -------- + create, petsc.PetscDeviceContextSetUp - CHKERR(PetscDeviceContextQueryIdle(self.dctx, &is_idle)) - return toBool(is_idle) + """ + CHKERR(PetscDeviceContextSetUp(self.dctx)) - def waitFor(self, other: DeviceContext | None) -> None: - """Make this instance wait for ``other``. + def duplicate(self) -> DeviceContext: + """Duplicate a the device context. - Not collective. + Not collective. - Parameters - ---------- - other - The other `DeviceContext` to wait for + See Also + -------- + create, petsc.PetscDeviceContextDuplicate - See Also - -------- - fork, join, petsc.PetscDeviceContextWaitForContext + """ + cdef DeviceContext octx = type(self)() - """ - cdef PetscDeviceContext cother = NULL + CHKERR(PetscDeviceContextDuplicate(self.dctx, &octx.dctx)) + return octx - if other is not None: - cother = PyPetscDeviceContext_Get(other) - CHKERR(PetscDeviceContextWaitForContext(self.dctx, cother)) + def idle(self) -> bool: + """Return whether the underlying stream for the device context is idle. - def fork(self, n: int, stream_type: DeviceContext.StreamType | str | None = None) -> list[DeviceContext]: - """Create multiple device contexts which are all logically dependent on this one. + Not collective. - Not collective. + See Also + -------- + synchronize, petsc.PetscDeviceContextQueryIdle - Parameters - ---------- - n - The number of device contexts to create. - stream_type - The type of stream of the forked device context. + """ + cdef PetscBool is_idle = PETSC_FALSE - See Also - -------- - join, waitFor, petsc.PetscDeviceContextFork + CHKERR(PetscDeviceContextQueryIdle(self.dctx, &is_idle)) + return toBool(is_idle) - """ - cdef PetscDeviceContext *subctx = NULL - cdef PetscStreamType cstream_type = PETSC_STREAM_DEFAULT - cdef PetscInt cn = asInt(n) - try: - if stream_type is None: - CHKERR(PetscDeviceContextFork(self.dctx, cn, &subctx)) - else: - cstream_type = asStreamType(stream_type) - CHKERR(PetscDeviceContextForkWithStreamType(self.dctx, cstream_type, cn, &subctx)) - return [PyPetscDeviceContext_New(subctx[i]) for i in range(cn)] - finally: - CHKERR(PetscFree(subctx)) - - def join(self, join_mode: DeviceJoinMode | str, py_sub_ctxs: list[DeviceContext]) -> None: - """Join a set of device contexts on this one. - - Not collective. - - Parameters - ---------- - join_mode - The type of join to perform. - py_sub_ctxs - The list of device contexts to join. + def waitFor(self, other: DeviceContext | None) -> None: + """Make this instance wait for ``other``. - See Also - -------- - fork, waitFor, petsc.PetscDeviceContextJoin + Not collective. - """ - cdef PetscDeviceContext *np_subctx_copy = NULL - cdef PetscDeviceContext *np_subctx = NULL - cdef PetscInt nsub = 0 - cdef PetscDeviceContextJoinMode cjoin_mode = asJoinMode(join_mode) + Parameters + ---------- + other + The other `DeviceContext` to wait for - tmp = oarray_p(py_sub_ctxs, &nsub, &np_subctx) - try: - CHKERR(PetscMalloc((nsub) * sizeof(PetscDeviceContext *), &np_subctx_copy)) - CHKERR(PetscMemcpy(np_subctx_copy, np_subctx, (nsub) * sizeof(PetscDeviceContext *))) - CHKERR(PetscDeviceContextJoin(self.dctx, nsub, cjoin_mode, &np_subctx_copy)) - finally: - CHKERR(PetscFree(np_subctx_copy)) + See Also + -------- + fork, join, petsc.PetscDeviceContextWaitForContext - if cjoin_mode == PETSC_DEVICE_CONTEXT_JOIN_DESTROY: - for i in range(nsub): - py_sub_ctxs[i] = None + """ + cdef PetscDeviceContext cother = NULL - def synchronize(self) -> None: - """Synchronize a device context. + if other is not None: + cother = PyPetscDeviceContext_Get(other) + CHKERR(PetscDeviceContextWaitForContext(self.dctx, cother)) - Not collective. + def fork(self, n: int, stream_type: DeviceContext.StreamType | str | None = None) -> list[DeviceContext]: + """Create multiple device contexts which are all logically dependent on this one. - Notes - ----- - The underlying stream is considered idle after this routine returns, - i.e. `idle` will return ``True``. + Not collective. - See Also - -------- - idle, petsc.PetscDeviceContextSynchronize + Parameters + ---------- + n + The number of device contexts to create. + stream_type + The type of stream of the forked device context. - """ - CHKERR(PetscDeviceContextSynchronize(self.dctx)) + Examples + -------- + The device contexts created must be destroyed using `join`. - def setFromOptions(self, comm: Comm | None = None) -> None: - """Configure the `DeviceContext` from the options database. + >>> dctx = PETSc.DeviceContext().getCurrent() + >>> dctxs = dctx.fork(4) + >>> ... # perform computations + >>> # we can mix various join modes + >>> dctx.join(PETSc.DeviceContext.JoinMode.SYNC, dctxs[0:2]) + >>> dctx.join(PETSc.DeviceContext.JoinMode.SYNC, dctxs[2:]) + >>> ... # some more computations and joins + >>> # dctxs must be all destroyed with joinMode.DESTROY + >>> dctx.join(PETSc.DeviceContext.JoinMode.DESTROY, dctxs) - Collective. + See Also + -------- + join, waitFor, petsc.PetscDeviceContextFork - Parameters - ---------- - comm - MPI communicator, defaults to `Sys.getDefaultComm`. + """ + cdef PetscDeviceContext *csubctxs = NULL + cdef PetscStreamType cstream_type = PETSC_STREAM_DEFAULT + cdef PetscInt cn = asInt(n) + cdef list subctxs = [] + if stream_type is None: + CHKERR(PetscDeviceContextFork(self.dctx, cn, &csubctxs)) + else: + cstream_type = asStreamType(stream_type) + CHKERR(PetscDeviceContextForkWithStreamType(self.dctx, cstream_type, cn, &csubctxs)) + # FIXME: without CXX compiler, csubctxs is NULL + if csubctxs: + subctxs = [None] * cn + for i from 0 <= i < cn: + subctxs[i] = DeviceContext() + (subctxs[i]).dctx = csubctxs[i] + CHKERR(PetscFree(csubctxs)) + return subctxs - See Also - -------- - Sys.getDefaultComm, petsc.PetscDeviceContextSetFromOptions + def join(self, join_mode: DeviceJoinMode | str, py_sub_ctxs: list[DeviceContext]) -> None: + """Join a set of device contexts on this one. - """ - cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) + Not collective. + + Parameters + ---------- + join_mode + The type of join to perform. + py_sub_ctxs + The list of device contexts to join. + + See Also + -------- + fork, waitFor, petsc.PetscDeviceContextJoin + + """ + cdef PetscDeviceContext *np_subctx = NULL + cdef PetscDeviceContextJoinMode cjoin_mode = asJoinMode(join_mode) + cdef Py_ssize_t nctxs = len(py_sub_ctxs) + + CHKERR(PetscMalloc((nctxs) * sizeof(PetscDeviceContext *), &np_subctx)) + for i from 0 <= i < nctxs: + dctx = py_sub_ctxs[i] + np_subctx[i] = (dctx).dctx if dctx is not None else NULL + CHKERR(PetscDeviceContextJoin(self.dctx, nctxs, cjoin_mode, &np_subctx)) + + if cjoin_mode == PETSC_DEVICE_CONTEXT_JOIN_DESTROY: + # in this case, PETSc destroys the contexts and frees the array + for i in range(nctxs): + (py_sub_ctxs[i]).dctx = NULL + else: + # we need to free the temporary array + CHKERR(PetscFree(np_subctx)) + + def synchronize(self) -> None: + """Synchronize a device context. + + Not collective. + + Notes + ----- + The underlying stream is considered idle after this routine returns, + i.e. `idle` will return ``True``. + + See Also + -------- + idle, petsc.PetscDeviceContextSynchronize + + """ + CHKERR(PetscDeviceContextSynchronize(self.dctx)) + + def setFromOptions(self, comm: Comm | None = None) -> None: + """Configure the `DeviceContext` from the options database. + + Collective. + + Parameters + ---------- + comm + MPI communicator, defaults to `Sys.getDefaultComm`. + + See Also + -------- + Sys.getDefaultComm, petsc.PetscDeviceContextSetFromOptions + + """ + cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) + + CHKERR(PetscDeviceContextSetFromOptions(ccomm, self.dctx)) + + @staticmethod + def getCurrent() -> DeviceContext: + """Return the current device context. + + Not collective. + + See Also + -------- + current, setCurrent, petsc.PetscDeviceContextGetCurrentContext + + """ + cdef PetscDeviceContext dctx = NULL + + CHKERR(PetscDeviceContextGetCurrentContext(&dctx)) + return PyPetscDeviceContext_New(dctx) + + @staticmethod + def setCurrent(dctx: DeviceContext | None) -> None: + """Set the current device context. - CHKERR(PetscDeviceContextSetFromOptions(ccomm, self.dctx)) + Not collective. - @staticmethod - def getCurrent() -> DeviceContext: - """Return the current device context. + Parameters + ---------- + dctx + The `DeviceContext` to set as current (or `None` to use + the default context). - Not collective. + See Also + -------- + current, getCurrent, petsc.PetscDeviceContextSetCurrentContext - See Also - -------- - current, setCurrent, petsc.PetscDeviceContextGetCurrentContext + """ + cdef PetscDeviceContext cdctx = NULL - """ - cdef PetscDeviceContext dctx = NULL + if dctx is not None: + cdctx = PyPetscDeviceContext_Get(dctx) + CHKERR(PetscDeviceContextSetCurrentContext(cdctx)) - CHKERR(PetscDeviceContextGetCurrentContext(&dctx)) - return PyPetscDeviceContext_New(dctx) + property stream_type: + """The stream type.""" + def __get__(self) -> str: + return self.getStreamType() - @staticmethod - def setCurrent(dctx: DeviceContext | None) -> None: - """Set the current device context. + def __set__(self, stype: StreamType | str) -> None: + self.setStreamType(stype) - Not collective. + property device: + """The device associated to the device context.""" + def __get__(self) -> Device: + return self.getDevice() - Parameters - ---------- - dctx - The `DeviceContext` to set as current (or `None` to use - the default context). + def __set__(self, Device device) -> None: + self.setDevice(device) - See Also - -------- - current, getCurrent, petsc.PetscDeviceContextSetCurrentContext + property current: + """The current global device context.""" + def __get__(self) -> DeviceContext: + return self.getCurrent() - """ - cdef PetscDeviceContext cdctx = NULL - - if dctx is not None: - cdctx = PyPetscDeviceContext_Get(dctx) - CHKERR(PetscDeviceContextSetCurrentContext(cdctx)) - - property stream_type: - """The stream type.""" - def __get__(self) -> str: - return self.getStreamType() - def __set__(self, stype: StreamType | str) -> None: - self.setStreamType(stype) - - property device: - """The device associated to the device context.""" - def __get__(self) -> Device: - return self.getDevice() - def __set__(self, Device device) -> None: - self.setDevice(device) - - property current: - """The current global device context.""" - def __get__(self) -> DeviceContext: - return self.getCurrent() - def __set__(self, dctx: DeviceContext | None) -> None: - self.setCurrent(dctx) + def __set__(self, dctx: DeviceContext | None) -> None: + self.setCurrent(dctx) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/FE.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/FE.pyx index 6ba9b487ac9..c2cafa5e398 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/FE.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/FE.pyx @@ -1,12 +1,14 @@ # -------------------------------------------------------------------- class FEType(object): + """The finite element types.""" BASIC = S_(PETSCFEBASIC) OPENCL = S_(PETSCFEOPENCL) COMPOSITE = S_(PETSCFECOMPOSITE) # -------------------------------------------------------------------- + cdef class FE(Object): """A PETSc object that manages a finite element space.""" @@ -33,7 +35,7 @@ cdef class FE(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscFEView(self.fe, vwr) ) + CHKERR(PetscFEView(self.fe, vwr)) def destroy(self) -> Self: """Destroy the `FE` object. @@ -45,7 +47,7 @@ cdef class FE(Object): petsc.PetscFEDestroy """ - CHKERR( PetscFEDestroy(&self.fe) ) + CHKERR(PetscFEDestroy(&self.fe)) return self def create(self, comm: Comm | None = None) -> Self: @@ -67,8 +69,8 @@ cdef class FE(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscFE newfe = NULL - CHKERR( PetscFECreate(ccomm, &newfe) ) - CHKERR( PetscCLEAR(self.obj) ); self.fe = newfe + CHKERR(PetscFECreate(ccomm, &newfe)) + CHKERR(PetscCLEAR(self.obj)); self.fe = newfe return self def createDefault( @@ -78,8 +80,7 @@ cdef class FE(Object): isSimplex: bool, qorder: int = DETERMINE, prefix: str = None, - comm: Comm | None = None - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `FE` for basic FEM computation. Collective. @@ -113,9 +114,9 @@ cdef class FE(Object): cdef PetscBool cisSimplex = asBool(isSimplex) cdef const char *cprefix = NULL if prefix: - prefix = str2bytes(prefix, &cprefix) - CHKERR( PetscFECreateDefault(ccomm, cdim, cnc, cisSimplex, cprefix, cqorder, &newfe)) - CHKERR( PetscCLEAR(self.obj) ); self.fe = newfe + prefix = str2bytes(prefix, &cprefix) + CHKERR(PetscFECreateDefault(ccomm, cdim, cnc, cisSimplex, cprefix, cqorder, &newfe)) + CHKERR(PetscCLEAR(self.obj)); self.fe = newfe return self def createLagrange( @@ -125,8 +126,7 @@ cdef class FE(Object): isSimplex: bool, k: int, qorder: int = DETERMINE, - comm: Comm | None = None - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `FE` for the basic Lagrange space of degree k. Collective. @@ -159,8 +159,8 @@ cdef class FE(Object): cdef PetscInt ck = asInt(k) cdef PetscInt cqorder = asInt(qorder) cdef PetscBool cisSimplex = asBool(isSimplex) - CHKERR( PetscFECreateLagrange(ccomm, cdim, cnc, cisSimplex, ck, cqorder, &newfe)) - CHKERR( PetscCLEAR(self.obj) ); self.fe = newfe + CHKERR(PetscFECreateLagrange(ccomm, cdim, cnc, cisSimplex, ck, cqorder, &newfe)) + CHKERR(PetscCLEAR(self.obj)); self.fe = newfe return self def getQuadrature(self) -> Quad: @@ -174,7 +174,7 @@ cdef class FE(Object): """ cdef Quad quad = Quad() - CHKERR( PetscFEGetQuadrature(self.fe, &quad.quad) ) + CHKERR(PetscFEGetQuadrature(self.fe, &quad.quad)) return quad def getDimension(self) -> int: @@ -188,7 +188,7 @@ cdef class FE(Object): """ cdef PetscInt cdim = 0 - CHKERR( PetscFEGetDimension(self.fe, &cdim) ) + CHKERR(PetscFEGetDimension(self.fe, &cdim)) return toInt(cdim) def getSpatialDimension(self) -> int: @@ -202,7 +202,7 @@ cdef class FE(Object): """ cdef PetscInt csdim = 0 - CHKERR( PetscFEGetSpatialDimension(self.fe, &csdim) ) + CHKERR(PetscFEGetSpatialDimension(self.fe, &csdim)) return toInt(csdim) def getNumComponents(self) -> int: @@ -216,7 +216,7 @@ cdef class FE(Object): """ cdef PetscInt comp = 0 - CHKERR( PetscFEGetNumComponents(self.fe, &comp) ) + CHKERR(PetscFEGetNumComponents(self.fe, &comp)) return toInt(comp) def setNumComponents(self, comp: int) -> None: @@ -235,7 +235,7 @@ cdef class FE(Object): """ cdef PetscInt ccomp = asInt(comp) - CHKERR( PetscFESetNumComponents(self.fe, comp) ) + CHKERR(PetscFESetNumComponents(self.fe, ccomp)) def getNumDof(self) -> ndarray: """Return the number of DOFs. @@ -252,8 +252,8 @@ cdef class FE(Object): """ cdef const PetscInt *numDof = NULL cdef PetscInt cdim = 0 - CHKERR( PetscFEGetDimension(self.fe, &cdim) ) - CHKERR( PetscFEGetNumDof(self.fe, &numDof) ) + CHKERR(PetscFEGetDimension(self.fe, &cdim)) + CHKERR(PetscFEGetNumDof(self.fe, &numDof)) return array_i(cdim, numDof) def getTileSizes(self) -> tuple(int, int, int, int): @@ -279,7 +279,7 @@ cdef class FE(Object): """ cdef PetscInt blockSize = 0, numBlocks = 0 cdef PetscInt batchSize = 0, numBatches = 0 - CHKERR( PetscFEGetTileSizes(self.fe, &blockSize, &numBlocks, &batchSize, &numBatches) ) + CHKERR(PetscFEGetTileSizes(self.fe, &blockSize, &numBlocks, &batchSize, &numBatches)) return toInt(blockSize), toInt(numBlocks), toInt(batchSize), toInt(numBatches) def setTileSizes( @@ -287,8 +287,7 @@ cdef class FE(Object): blockSize: int, numBlocks: int, batchSize: int, - numBatches: int, - ) -> None: + numBatches: int) -> None: """Set the tile sizes for evaluation. Not collective. @@ -311,7 +310,7 @@ cdef class FE(Object): """ cdef PetscInt cblockSize = asInt(blockSize), cnumBlocks = asInt(numBlocks) cdef PetscInt cbatchSize = asInt(batchSize), cnumBatches = asInt(numBatches) - CHKERR( PetscFESetTileSizes(self.fe, blockSize, numBlocks, batchSize, numBatches) ) + CHKERR(PetscFESetTileSizes(self.fe, cblockSize, cnumBlocks, cbatchSize, cnumBatches)) def getFaceQuadrature(self) -> Quad: """Return the `Quad` used to calculate inner products on faces. @@ -324,7 +323,7 @@ cdef class FE(Object): """ cdef Quad quad = Quad() - CHKERR( PetscFEGetFaceQuadrature(self.fe, &quad.quad) ) + CHKERR(PetscFEGetFaceQuadrature(self.fe, &quad.quad)) return quad def setQuadrature(self, Quad quad) -> Self: @@ -342,7 +341,7 @@ cdef class FE(Object): getQuadrature, petsc.PetscFESetQuadrature """ - CHKERR( PetscFESetQuadrature(self.fe, quad.quad) ) + CHKERR(PetscFESetQuadrature(self.fe, quad.quad)) return self def setFaceQuadrature(self, Quad quad) -> Quad: @@ -360,7 +359,7 @@ cdef class FE(Object): getFaceQuadrature, petsc.PetscFESetFaceQuadrature """ - CHKERR( PetscFESetFaceQuadrature(self.fe, quad.quad) ) + CHKERR(PetscFESetFaceQuadrature(self.fe, quad.quad)) return self def setType(self, fe_type: Type | str) -> Self: @@ -380,7 +379,7 @@ cdef class FE(Object): """ cdef PetscFEType cval = NULL fe_type = str2bytes(fe_type, &cval) - CHKERR( PetscFESetType(self.fe, cval) ) + CHKERR(PetscFESetType(self.fe, cval)) return self def getBasisSpace(self) -> Space: @@ -394,7 +393,7 @@ cdef class FE(Object): """ cdef Space sp = Space() - CHKERR( PetscFEGetBasisSpace(self.fe, &sp.space ) ) + CHKERR(PetscFEGetBasisSpace(self.fe, &sp.space)) return sp def setBasisSpace(self, Space sp) -> None: @@ -412,7 +411,7 @@ cdef class FE(Object): getBasisSpace, petsc.PetscFESetBasisSpace """ - CHKERR( PetscFESetBasisSpace(self.fe, sp.space ) ) + CHKERR(PetscFESetBasisSpace(self.fe, sp.space)) def setFromOptions(self) -> None: """Set parameters in a `FE` from the options database. @@ -424,7 +423,7 @@ cdef class FE(Object): petsc_options, petsc.PetscFESetFromOptions """ - CHKERR( PetscFESetFromOptions(self.fe) ) + CHKERR(PetscFESetFromOptions(self.fe)) def setUp(self) -> None: """Construct data structures for the `FE` after the `Type` has been set. @@ -436,7 +435,7 @@ cdef class FE(Object): petsc.PetscFESetUp """ - CHKERR( PetscFESetUp(self.fe) ) + CHKERR(PetscFESetUp(self.fe)) def getDualSpace(self) -> DualSpace: """Return the `DualSpace` used to define the inner product for the `FE`. @@ -449,7 +448,7 @@ cdef class FE(Object): """ cdef DualSpace dspace = DualSpace() - CHKERR( PetscFEGetDualSpace(self.fe, &dspace.dualspace) ) + CHKERR(PetscFEGetDualSpace(self.fe, &dspace.dualspace)) return dspace def setDualSpace(self, DualSpace dspace) -> None: @@ -467,30 +466,7 @@ cdef class FE(Object): getDualSpace, DualSpace, petsc.PetscFESetDualSpace """ - CHKERR( PetscFESetDualSpace(self.fe, dspace.dualspace) ) - - def viewFromOptions(self, name: str, Object obj=None) -> None: - """View from a `FE` based on values in the options database. - - Collective. - - Parameters - ---------- - name - Command line option name. - obj - Optional object that provides the options prefix. - - See Also - -------- - petsc_options, petsc.PetscFEViewFromOptions - - """ - cdef const char *cname = NULL - _ = str2bytes(name, &cname) - cdef PetscObject cobj = NULL - if obj is not None: cobj = obj.obj[0] - CHKERR( PetscFEViewFromOptions(self.fe, cobj, cname) ) + CHKERR(PetscFESetDualSpace(self.fe, dspace.dualspace)) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/IS.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/IS.pyx index 69098751016..8ace959b959 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/IS.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/IS.pyx @@ -1,12 +1,14 @@ # -------------------------------------------------------------------- class ISType(object): + """The index set types.""" GENERAL = S_(ISGENERAL) BLOCK = S_(ISBLOCK) STRIDE = S_(ISSTRIDE) # -------------------------------------------------------------------- + cdef class IS(Object): """A collection of indices. @@ -38,7 +40,6 @@ cdef class IS(Object): buf.releasebuffer(view) self # unused - # 'with' statement (PEP 343) def __enter__(self): @@ -69,7 +70,7 @@ cdef class IS(Object): """ cdef PetscViewer cviewer = NULL if viewer is not None: cviewer = viewer.vwr - CHKERR( ISView(self.iset, cviewer) ) + CHKERR(ISView(self.iset, cviewer)) def destroy(self) -> Self: """Destroy the index set. @@ -81,7 +82,7 @@ cdef class IS(Object): petsc.ISDestroy """ - CHKERR( ISDestroy(&self.iset) ) + CHKERR(ISDestroy(&self.iset)) return self def create(self, comm: Comm | None = None) -> Self: @@ -101,8 +102,8 @@ cdef class IS(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscIS newiset = NULL - CHKERR( ISCreate(ccomm, &newiset) ) - CHKERR( PetscCLEAR(self.obj) ); self.iset = newiset + CHKERR(ISCreate(ccomm, &newiset)) + CHKERR(PetscCLEAR(self.obj)); self.iset = newiset return self def setType(self, is_type: IS.Type | str) -> None: @@ -122,7 +123,7 @@ cdef class IS(Object): """ cdef PetscISType cval = NULL is_type = str2bytes(is_type, &cval) - CHKERR( ISSetType(self.iset, cval) ) + CHKERR(ISSetType(self.iset, cval)) def getType(self) -> str: """Return the index set type associated with the IS. @@ -135,14 +136,13 @@ cdef class IS(Object): """ cdef PetscISType cval = NULL - CHKERR( ISGetType(self.iset, &cval) ) + CHKERR(ISGetType(self.iset, &cval)) return bytes2str(cval) def createGeneral( self, indices: Sequence[int], - comm: Comm | None = None - ) -> Self: + comm: Comm | None = None) -> Self: """Create an IS with indices. Collective. @@ -164,16 +164,15 @@ cdef class IS(Object): cdef PetscCopyMode cm = PETSC_COPY_VALUES cdef PetscIS newiset = NULL indices = iarray_i(indices, &nidx, &idx) - CHKERR( ISCreateGeneral(ccomm, nidx, idx, cm, &newiset) ) - CHKERR( PetscCLEAR(self.obj) ); self.iset = newiset + CHKERR(ISCreateGeneral(ccomm, nidx, idx, cm, &newiset)) + CHKERR(PetscCLEAR(self.obj)); self.iset = newiset return self def createBlock( self, bsize: int, indices: Sequence[int], - comm: Comm | None = None - ) -> Self: + comm: Comm | None = None) -> Self: """Create a blocked index set. Collective. @@ -198,17 +197,16 @@ cdef class IS(Object): cdef PetscCopyMode cm = PETSC_COPY_VALUES cdef PetscIS newiset = NULL indices = iarray_i(indices, &nidx, &idx) - CHKERR( ISCreateBlock(ccomm, bs, nidx, idx, cm, &newiset) ) - CHKERR( PetscCLEAR(self.obj) ); self.iset = newiset + CHKERR(ISCreateBlock(ccomm, bs, nidx, idx, cm, &newiset)) + CHKERR(PetscCLEAR(self.obj)); self.iset = newiset return self def createStride( self, size: int, - first: int=0, - step: int=0, - comm: Comm | None = None - ) -> Self: + first: int = 0, + step: int = 0, + comm: Comm | None = None) -> Self: """Create an index set consisting of evenly spaced values. Collective. @@ -234,8 +232,8 @@ cdef class IS(Object): cdef PetscInt cfirst = asInt(first) cdef PetscInt cstep = asInt(step) cdef PetscIS newiset = NULL - CHKERR( ISCreateStride(ccomm, csize, cfirst, cstep, &newiset) ) - CHKERR( PetscCLEAR(self.obj) ); self.iset = newiset + CHKERR(ISCreateStride(ccomm, csize, cfirst, cstep, &newiset)) + CHKERR(PetscCLEAR(self.obj)); self.iset = newiset return self def duplicate(self) -> IS: @@ -249,7 +247,7 @@ cdef class IS(Object): """ cdef IS iset = type(self)() - CHKERR( ISDuplicate(self.iset, &iset.iset) ) + CHKERR(ISDuplicate(self.iset, &iset.iset)) return iset def copy(self, IS result=None) -> IS: @@ -276,8 +274,8 @@ cdef class IS(Object): if result is None: result = type(self)() if result.iset == NULL: - CHKERR( ISDuplicate(self.iset, &result.iset) ) - CHKERR( ISCopy(self.iset, result.iset) ) + CHKERR(ISDuplicate(self.iset, &result.iset)) + CHKERR(ISCopy(self.iset, result.iset)) return result def load(self, Viewer viewer) -> Self: @@ -298,9 +296,9 @@ cdef class IS(Object): cdef MPI_Comm comm = MPI_COMM_NULL cdef PetscObject obj = (viewer.vwr) if self.iset == NULL: - CHKERR( PetscObjectGetComm(obj, &comm) ) - CHKERR( ISCreate(comm, &self.iset) ) - CHKERR( ISLoad(self.iset, viewer.vwr) ) + CHKERR(PetscObjectGetComm(obj, &comm)) + CHKERR(ISCreate(comm, &self.iset)) + CHKERR(ISLoad(self.iset, viewer.vwr)) return self def allGather(self) -> IS: @@ -316,7 +314,7 @@ cdef class IS(Object): """ cdef IS iset = IS() - CHKERR( ISAllGather(self.iset, &iset.iset) ) + CHKERR(ISAllGather(self.iset, &iset.iset)) return iset def toGeneral(self) -> Self: @@ -329,7 +327,7 @@ cdef class IS(Object): petsc.ISToGeneral, petsc.ISType """ - CHKERR( ISToGeneral(self.iset) ) + CHKERR(ISToGeneral(self.iset)) return self def buildTwoSided(self, IS toindx=None) -> IS: @@ -359,7 +357,7 @@ cdef class IS(Object): cdef PetscIS ctoindx = NULL if toindx is not None: ctoindx = toindx.iset cdef IS result = IS() - CHKERR( ISBuildTwoSided(self.iset, ctoindx, &result.iset) ) + CHKERR(ISBuildTwoSided(self.iset, ctoindx, &result.iset)) return result def invertPermutation(self, nlocal: int | None = None) -> IS: @@ -383,7 +381,7 @@ cdef class IS(Object): cdef PetscInt cnlocal = PETSC_DECIDE if nlocal is not None: cnlocal = asInt(nlocal) cdef IS iset = IS() - CHKERR( ISInvertPermutation(self.iset, cnlocal, &iset.iset) ) + CHKERR(ISInvertPermutation(self.iset, cnlocal, &iset.iset)) return iset def getSize(self) -> int: @@ -397,7 +395,7 @@ cdef class IS(Object): """ cdef PetscInt N = 0 - CHKERR( ISGetSize(self.iset, &N) ) + CHKERR(ISGetSize(self.iset, &N)) return toInt(N) def getLocalSize(self) -> int: @@ -411,7 +409,7 @@ cdef class IS(Object): """ cdef PetscInt n = 0 - CHKERR( ISGetLocalSize(self.iset, &n) ) + CHKERR(ISGetLocalSize(self.iset, &n)) return toInt(n) def getSizes(self) -> tuple[int, int]: @@ -432,8 +430,8 @@ cdef class IS(Object): """ cdef PetscInt n = 0, N = 0 - CHKERR( ISGetLocalSize(self.iset, &n) ) - CHKERR( ISGetSize(self.iset, &N) ) + CHKERR(ISGetLocalSize(self.iset, &n)) + CHKERR(ISGetSize(self.iset, &N)) return (toInt(n), toInt(N)) def getBlockSize(self) -> int: @@ -447,7 +445,7 @@ cdef class IS(Object): """ cdef PetscInt bs = 1 - CHKERR( ISGetBlockSize(self.iset, &bs) ) + CHKERR(ISGetBlockSize(self.iset, &bs)) return toInt(bs) def setBlockSize(self, bs: int) -> None: @@ -466,7 +464,7 @@ cdef class IS(Object): """ cdef PetscInt cbs = asInt(bs) - CHKERR( ISSetBlockSize(self.iset, cbs) ) + CHKERR(ISSetBlockSize(self.iset, cbs)) def sort(self) -> Self: """Sort the indices of an index set. @@ -478,7 +476,7 @@ cdef class IS(Object): petsc.ISSort """ - CHKERR( ISSort(self.iset) ) + CHKERR(ISSort(self.iset)) return self def isSorted(self) -> bool: @@ -492,7 +490,7 @@ cdef class IS(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( ISSorted(self.iset, &flag) ) + CHKERR(ISSorted(self.iset, &flag)) return toBool(flag) def setPermutation(self) -> Self: @@ -505,7 +503,7 @@ cdef class IS(Object): petsc.ISSetPermutation """ - CHKERR( ISSetPermutation(self.iset) ) + CHKERR(ISSetPermutation(self.iset)) return self def isPermutation(self) -> bool: @@ -519,7 +517,7 @@ cdef class IS(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( ISPermutation(self.iset, &flag) ) + CHKERR(ISPermutation(self.iset, &flag)) return toBool(flag) def setIdentity(self) -> Self: @@ -532,7 +530,7 @@ cdef class IS(Object): petsc.ISSetIdentity """ - CHKERR( ISSetIdentity(self.iset) ) + CHKERR(ISSetIdentity(self.iset)) return self def isIdentity(self) -> bool: @@ -546,7 +544,7 @@ cdef class IS(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( ISIdentity(self.iset, &flag) ) + CHKERR(ISIdentity(self.iset, &flag)) return toBool(flag) def equal(self, IS iset) -> bool: @@ -565,7 +563,7 @@ cdef class IS(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( ISEqual(self.iset, iset.iset, &flag) ) + CHKERR(ISEqual(self.iset, iset.iset, &flag)) return toBool(flag) def sum(self, IS iset) -> IS: @@ -584,7 +582,7 @@ cdef class IS(Object): """ cdef IS out = IS() - CHKERR( ISSum(self.iset, iset.iset, &out.iset) ) + CHKERR(ISSum(self.iset, iset.iset, &out.iset)) return out def expand(self, IS iset) -> IS: @@ -611,7 +609,7 @@ cdef class IS(Object): """ cdef IS out = IS() - CHKERR( ISExpand(self.iset, iset.iset, &out.iset) ) + CHKERR(ISExpand(self.iset, iset.iset, &out.iset)) return out def union(self, IS iset) -> IS: @@ -640,13 +638,13 @@ cdef class IS(Object): """ cdef PetscBool flag1=PETSC_FALSE, flag2=PETSC_FALSE - CHKERR( ISSorted(self.iset, &flag1) ) - CHKERR( ISSorted(iset.iset, &flag2) ) + CHKERR(ISSorted(self.iset, &flag1)) + CHKERR(ISSorted(iset.iset, &flag2)) cdef IS out = IS() if flag1==PETSC_TRUE and flag2==PETSC_TRUE: - CHKERR( ISSum(self.iset, iset.iset, &out.iset) ) + CHKERR(ISSum(self.iset, iset.iset, &out.iset)) else: - CHKERR( ISExpand(self.iset, iset.iset, &out.iset) ) + CHKERR(ISExpand(self.iset, iset.iset, &out.iset)) return out def difference(self, IS iset: IS) -> IS: @@ -670,7 +668,7 @@ cdef class IS(Object): """ cdef IS out = IS() - CHKERR( ISDifference(self.iset, iset.iset, &out.iset) ) + CHKERR(ISDifference(self.iset, iset.iset, &out.iset)) return out def complement(self, nmin: int, nmax: int) -> IS: @@ -706,7 +704,7 @@ cdef class IS(Object): cdef PetscInt cnmin = asInt(nmin) cdef PetscInt cnmax = asInt(nmax) cdef IS out = IS() - CHKERR( ISComplement(self.iset, cnmin, cnmax, &out.iset) ) + CHKERR(ISComplement(self.iset, cnmin, cnmax, &out.iset)) return out def embed(self, IS iset, drop: bool) -> IS: @@ -737,7 +735,7 @@ cdef class IS(Object): """ cdef PetscBool bval = drop cdef IS out = IS() - CHKERR( ISEmbed(self.iset, iset.iset, bval, &out.iset) ) + CHKERR(ISEmbed(self.iset, iset.iset, bval, &out.iset)) return out def renumber(self, IS mult=None) -> tuple[int, IS]: @@ -767,7 +765,7 @@ cdef class IS(Object): if mult is not None: mlt = mult.iset cdef IS out = IS() cdef PetscInt n = 0 - CHKERR( ISRenumber(self.iset, mlt, &n, &out.iset) ) + CHKERR(ISRenumber(self.iset, mlt, &n, &out.iset)) return (toInt(n), out) # @@ -786,7 +784,7 @@ cdef class IS(Object): cdef PetscInt nidx = 0, *idx = NULL cdef PetscCopyMode cm = PETSC_COPY_VALUES indices = iarray_i(indices, &nidx, &idx) - CHKERR( ISGeneralSetIndices(self.iset, nidx, idx, cm) ) + CHKERR(ISGeneralSetIndices(self.iset, nidx, idx, cm)) def getIndices(self) -> ArrayInt: """Return the indices of the index set. @@ -800,13 +798,13 @@ cdef class IS(Object): """ cdef PetscInt size = 0 cdef const PetscInt *indices = NULL - CHKERR( ISGetLocalSize(self.iset, &size) ) - CHKERR( ISGetIndices(self.iset, &indices) ) + CHKERR(ISGetLocalSize(self.iset, &size)) + CHKERR(ISGetIndices(self.iset, &indices)) cdef object oindices = None try: oindices = array_i(size, indices) finally: - CHKERR( ISRestoreIndices(self.iset, &indices) ) + CHKERR(ISRestoreIndices(self.iset, &indices)) return oindices def setBlockIndices(self, bsize: int, indices: Sequence[int]) -> None: @@ -830,7 +828,7 @@ cdef class IS(Object): cdef PetscInt nidx = 0, *idx = NULL cdef PetscCopyMode cm = PETSC_COPY_VALUES indices = iarray_i(indices, &nidx, &idx) - CHKERR( ISBlockSetIndices(self.iset, bs, nidx, idx, cm) ) + CHKERR(ISBlockSetIndices(self.iset, bs, nidx, idx, cm)) def getBlockIndices(self) -> ArrayInt: """Return the indices of an index set with type `IS.Type.BLOCK`. @@ -844,14 +842,14 @@ cdef class IS(Object): """ cdef PetscInt size = 0, bs = 1 cdef const PetscInt *indices = NULL - CHKERR( ISGetLocalSize(self.iset, &size) ) - CHKERR( ISGetBlockSize(self.iset, &bs) ) - CHKERR( ISBlockGetIndices(self.iset, &indices) ) + CHKERR(ISGetLocalSize(self.iset, &size)) + CHKERR(ISGetBlockSize(self.iset, &bs)) + CHKERR(ISBlockGetIndices(self.iset, &indices)) cdef object oindices = None try: oindices = array_i(size//bs, indices) finally: - CHKERR( ISBlockRestoreIndices(self.iset, &indices) ) + CHKERR(ISBlockRestoreIndices(self.iset, &indices)) return oindices def setStride(self, size: int, first: int = 0, step: int = 1) -> None: @@ -876,7 +874,7 @@ cdef class IS(Object): cdef PetscInt csize = asInt(size) cdef PetscInt cfirst = asInt(first) cdef PetscInt cstep = asInt(step) - CHKERR( ISStrideSetStride(self.iset, csize, cfirst, cstep) ) + CHKERR(ISStrideSetStride(self.iset, csize, cfirst, cstep)) def getStride(self) -> tuple[int, int, int]: """Return size and stride information. @@ -898,8 +896,8 @@ cdef class IS(Object): """ cdef PetscInt size=0, first=0, step=0 - CHKERR( ISGetLocalSize(self.iset, &size) ) - CHKERR( ISStrideGetInfo(self.iset, &first, &step) ) + CHKERR(ISGetLocalSize(self.iset, &size)) + CHKERR(ISStrideGetInfo(self.iset, &first, &step)) return (toInt(size), toInt(first), toInt(step)) def getInfo(self) -> tuple[int, int]: @@ -920,7 +918,7 @@ cdef class IS(Object): """ cdef PetscInt first = 0, step = 0 - CHKERR( ISStrideGetInfo(self.iset, &first, &step) ) + CHKERR(ISStrideGetInfo(self.iset, &first, &step)) return (toInt(first), toInt(step)) # @@ -1051,7 +1049,7 @@ cdef class IS(Object): class GLMapMode(object): - """Enum describing mapping behavior for global-to-local maps when global indices are missing. + """Enum describing mapping behavior when global indices are missing. MASK Give missing global indices a local index of -1. @@ -1068,6 +1066,7 @@ class GLMapMode(object): class LGMapType(object): + """Local to global map types.""" BASIC = S_(ISLOCALTOGLOBALMAPPINGBASIC) HASH = S_(ISLOCALTOGLOBALMAPPINGHASH) @@ -1075,7 +1074,7 @@ class LGMapType(object): # -------------------------------------------------------------------- cdef class LGMap(Object): - """Mapping from an arbitrary local ordering from ``0`` to ``n-1`` to a global PETSc ordering used by a vector or matrix. + """Mapping from a local to a global ordering. See Also -------- @@ -1095,8 +1094,7 @@ cdef class LGMap(Object): def __call__( self, indices: Sequence[int], - result: ArrayInt | None = None - ) -> None: + result: ArrayInt | None = None) -> None: """Convert a locally numbered list of integers to a global numbering. Not collective. @@ -1140,7 +1138,7 @@ cdef class LGMap(Object): """ cdef PetscISLocalToGlobalMappingType cval = NULL lgmap_type = str2bytes(lgmap_type, &cval) - CHKERR( ISLocalToGlobalMappingSetType(self.lgm, cval) ) + CHKERR(ISLocalToGlobalMappingSetType(self.lgm, cval)) def setFromOptions(self) -> None: """Set mapping options from the options database. @@ -1152,7 +1150,7 @@ cdef class LGMap(Object): petsc_options, petsc.ISLocalToGlobalMappingSetFromOptions """ - CHKERR( ISLocalToGlobalMappingSetFromOptions(self.lgm) ) + CHKERR(ISLocalToGlobalMappingSetFromOptions(self.lgm)) def view(self, Viewer viewer=None) -> None: """View the local-to-global mapping. @@ -1171,7 +1169,7 @@ cdef class LGMap(Object): """ cdef PetscViewer cviewer = NULL if viewer is not None: cviewer = viewer.vwr - CHKERR( ISLocalToGlobalMappingView(self.lgm, cviewer) ) + CHKERR(ISLocalToGlobalMappingView(self.lgm, cviewer)) def destroy(self) -> Self: """Destroy the local-to-global mapping. @@ -1183,15 +1181,14 @@ cdef class LGMap(Object): petsc.ISLocalToGlobalMappingDestroy """ - CHKERR( ISLocalToGlobalMappingDestroy(&self.lgm) ) + CHKERR(ISLocalToGlobalMappingDestroy(&self.lgm)) return self def create( self, indices: Sequence[int], bsize: int | None = None, - comm: Comm | None = None - ) -> Self: + comm: Comm | None = None) -> Self: """Create a local-to-global mapping. Not collective. @@ -1217,9 +1214,9 @@ cdef class LGMap(Object): if bsize is not None: bs = asInt(bsize) if bs == PETSC_DECIDE: bs = 1 indices = iarray_i(indices, &nidx, &idx) - CHKERR( ISLocalToGlobalMappingCreate( - ccomm, bs, nidx, idx, cm, &newlgm) ) - CHKERR( PetscCLEAR(self.obj) ); self.lgm = newlgm + CHKERR(ISLocalToGlobalMappingCreate( + ccomm, bs, nidx, idx, cm, &newlgm)) + CHKERR(PetscCLEAR(self.obj)); self.lgm = newlgm return self def createIS(self, IS iset) -> Self: @@ -1238,9 +1235,9 @@ cdef class LGMap(Object): """ cdef PetscLGMap newlgm = NULL - CHKERR( ISLocalToGlobalMappingCreateIS( - iset.iset, &newlgm) ) - CHKERR( PetscCLEAR(self.obj) ); self.lgm = newlgm + CHKERR(ISLocalToGlobalMappingCreateIS( + iset.iset, &newlgm)) + CHKERR(PetscCLEAR(self.obj)); self.lgm = newlgm return self def createSF(self, SF sf, start: int) -> Self: @@ -1262,8 +1259,8 @@ cdef class LGMap(Object): """ cdef PetscLGMap newlgm = NULL cdef PetscInt cstart = asInt(start) - CHKERR( ISLocalToGlobalMappingCreateSF(sf.sf, cstart, &newlgm) ) - CHKERR( PetscCLEAR(self.obj) ); self.lgm = newlgm + CHKERR(ISLocalToGlobalMappingCreateSF(sf.sf, cstart, &newlgm)) + CHKERR(PetscCLEAR(self.obj)); self.lgm = newlgm return self def getSize(self) -> int: @@ -1277,7 +1274,7 @@ cdef class LGMap(Object): """ cdef PetscInt n = 0 - CHKERR( ISLocalToGlobalMappingGetSize(self.lgm, &n) ) + CHKERR(ISLocalToGlobalMappingGetSize(self.lgm, &n)) return toInt(n) def getBlockSize(self) -> int: @@ -1291,7 +1288,7 @@ cdef class LGMap(Object): """ cdef PetscInt bs = 1 - CHKERR( ISLocalToGlobalMappingGetBlockSize(self.lgm, &bs) ) + CHKERR(ISLocalToGlobalMappingGetBlockSize(self.lgm, &bs)) return toInt(bs) def getIndices(self) -> ArrayInt: @@ -1306,16 +1303,16 @@ cdef class LGMap(Object): """ cdef PetscInt size = 0 cdef const PetscInt *indices = NULL - CHKERR( ISLocalToGlobalMappingGetSize( - self.lgm, &size) ) - CHKERR( ISLocalToGlobalMappingGetIndices( - self.lgm, &indices) ) + CHKERR(ISLocalToGlobalMappingGetSize( + self.lgm, &size)) + CHKERR(ISLocalToGlobalMappingGetIndices( + self.lgm, &indices)) cdef object oindices = None try: oindices = array_i(size, indices) finally: - CHKERR( ISLocalToGlobalMappingRestoreIndices( - self.lgm, &indices) ) + CHKERR(ISLocalToGlobalMappingRestoreIndices( + self.lgm, &indices)) return oindices def getBlockIndices(self) -> ArrayInt: @@ -1330,18 +1327,18 @@ cdef class LGMap(Object): """ cdef PetscInt size = 0, bs = 1 cdef const PetscInt *indices = NULL - CHKERR( ISLocalToGlobalMappingGetSize( - self.lgm, &size) ) - CHKERR( ISLocalToGlobalMappingGetBlockSize( - self.lgm, &bs) ) - CHKERR( ISLocalToGlobalMappingGetBlockIndices( - self.lgm, &indices) ) + CHKERR(ISLocalToGlobalMappingGetSize( + self.lgm, &size)) + CHKERR(ISLocalToGlobalMappingGetBlockSize( + self.lgm, &bs)) + CHKERR(ISLocalToGlobalMappingGetBlockIndices( + self.lgm, &indices)) cdef object oindices = None try: oindices = array_i(size//bs, indices) finally: - CHKERR( ISLocalToGlobalMappingRestoreBlockIndices( - self.lgm, &indices) ) + CHKERR(ISLocalToGlobalMappingRestoreBlockIndices( + self.lgm, &indices)) return oindices def getInfo(self) -> dict[int, ArrayInt]: @@ -1362,9 +1359,9 @@ cdef class LGMap(Object): """ cdef PetscInt i, nproc = 0, *procs = NULL, cdef PetscInt *numprocs = NULL, **indices = NULL - cdef object neighs = { } - CHKERR( ISLocalToGlobalMappingGetInfo( - self.lgm, &nproc, &procs, &numprocs, &indices) ) + cdef dict neighs = {} + CHKERR(ISLocalToGlobalMappingGetInfo( + self.lgm, &nproc, &procs, &numprocs, &indices)) try: for i from 0 <= i < nproc: neighs[toInt(procs[i])] = array_i(numprocs[i], indices[i]) @@ -1391,9 +1388,9 @@ cdef class LGMap(Object): """ cdef PetscInt i, nproc = 0, *procs = NULL, cdef PetscInt *numprocs = NULL, **indices = NULL - cdef object neighs = { } - CHKERR( ISLocalToGlobalMappingGetBlockInfo( - self.lgm, &nproc, &procs, &numprocs, &indices) ) + cdef dict neighs = {} + CHKERR(ISLocalToGlobalMappingGetBlockInfo( + self.lgm, &nproc, &procs, &numprocs, &indices)) try: for i from 0 <= i < nproc: neighs[toInt(procs[i])] = array_i(numprocs[i], indices[i]) @@ -1407,8 +1404,7 @@ cdef class LGMap(Object): def apply( self, indices: Sequence[int], - result: ArrayInt | None = None, - ) -> ArrayInt: + result: ArrayInt | None = None) -> ArrayInt: """Convert a locally numbered list of integers to a global numbering. Not collective. @@ -1438,15 +1434,14 @@ cdef class LGMap(Object): if result is None: result = empty_i(niidx) result = oarray_i(result, &noidx, &oidx) assert niidx == noidx, "incompatible array sizes" - CHKERR( ISLocalToGlobalMappingApply( - self.lgm, niidx, iidx, oidx) ) + CHKERR(ISLocalToGlobalMappingApply( + self.lgm, niidx, iidx, oidx)) return result def applyBlock( self, indices: Sequence[int], - result: ArrayInt | None = None, - ) -> ArrayInt: + result: ArrayInt | None = None) -> ArrayInt: """Convert a local block numbering to a global block numbering. Not collective. @@ -1476,8 +1471,8 @@ cdef class LGMap(Object): if result is None: result = empty_i(niidx) result = oarray_i(result, &noidx, &oidx) assert niidx == noidx, "incompatible array sizes" - CHKERR( ISLocalToGlobalMappingApplyBlock( - self.lgm, niidx, iidx, oidx) ) + CHKERR(ISLocalToGlobalMappingApplyBlock( + self.lgm, niidx, iidx, oidx)) return result def applyIS(self, IS iset) -> IS: @@ -1501,15 +1496,14 @@ cdef class LGMap(Object): """ cdef IS result = IS() - CHKERR( ISLocalToGlobalMappingApplyIS( - self.lgm, iset.iset, &result.iset) ) + CHKERR(ISLocalToGlobalMappingApplyIS( + self.lgm, iset.iset, &result.iset)) return result def applyInverse( self, indices: Sequence[int], - mode: GLMapMode | str | None = None, - ) -> ArrayInt: + mode: GLMapMode | str | None = None) -> ArrayInt: """Compute local numbering from global numbering. Not collective. @@ -1538,18 +1532,17 @@ cdef class LGMap(Object): indices = iarray_i(indices, &n, &idx) cdef PetscInt nout = n, *idxout = NULL if cmode != PETSC_IS_GTOLM_MASK: - CHKERR( ISGlobalToLocalMappingApply( - self.lgm, cmode, n, idx, &nout, NULL) ) + CHKERR(ISGlobalToLocalMappingApply( + self.lgm, cmode, n, idx, &nout, NULL)) result = oarray_i(empty_i(nout), &nout, &idxout) - CHKERR( ISGlobalToLocalMappingApply( - self.lgm, cmode, n, idx, &nout, idxout) ) + CHKERR(ISGlobalToLocalMappingApply( + self.lgm, cmode, n, idx, &nout, idxout)) return result def applyBlockInverse( self, indices: Sequence[int], - mode: GLMapMode | str | None = None, - ) -> ArrayInt: + mode: GLMapMode | str | None = None) -> ArrayInt: """Compute blocked local numbering from blocked global numbering. Not collective. @@ -1578,11 +1571,11 @@ cdef class LGMap(Object): indices = iarray_i(indices, &n, &idx) cdef PetscInt nout = n, *idxout = NULL if cmode != PETSC_IS_GTOLM_MASK: - CHKERR( ISGlobalToLocalMappingApply( - self.lgm, cmode, n, idx, &nout, NULL) ) + CHKERR(ISGlobalToLocalMappingApply( + self.lgm, cmode, n, idx, &nout, NULL)) result = oarray_i(empty_i(nout), &nout, &idxout) - CHKERR( ISGlobalToLocalMappingApplyBlock( - self.lgm, cmode, n, idx, &nout, idxout) ) + CHKERR(ISGlobalToLocalMappingApplyBlock( + self.lgm, cmode, n, idx, &nout, idxout)) return result # diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/KSP.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/KSP.pyx index 3c92d320662..706c2615485 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/KSP.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/KSP.pyx @@ -166,16 +166,9 @@ class KSPType(object): natively in PETSc, e.g., GCRODR, a recycled Krylov method which is similar to KSPLGMRES. `petsc.KSPHPDDM` - Notes - ----- - `KSP Type `__ - `KSP Type table `__ - `Pieplined KSP methods `__ - `Flexible KSP methods `__ - See Also -------- - petsc_options, petsc.KSP + petsc_options, petsc.KSPType """ RICHARDSON = S_(KSPRICHARDSON) @@ -227,6 +220,7 @@ class KSPType(object): FETIDP = S_(KSPFETIDP) HPDDM = S_(KSPHPDDM) + class KSPNormType(object): """KSP norm type. @@ -262,6 +256,7 @@ class KSPNormType(object): UNPRECONDITIONED = NORM_UNPRECONDITIONED NATURAL = NORM_NATURAL + class KSPConvergedReason(object): """KSP Converged Reason. @@ -329,7 +324,7 @@ class KSPConvergedReason(object): `petsc.KSPConvergedReason` """ - #iterating + # iterating CONVERGED_ITERATING = KSP_CONVERGED_ITERATING ITERATING = KSP_CONVERGED_ITERATING # converged @@ -353,6 +348,7 @@ class KSPConvergedReason(object): DIVERGED_INDEFINITE_MAT = KSP_DIVERGED_INDEFINITE_MAT DIVERGED_PCSETUP_FAILED = KSP_DIVERGED_PC_FAILED + class KSPHPDDMType(object): """The *HPDDM* Krylov solver type.""" GMRES = KSP_HPDDM_TYPE_GMRES @@ -366,6 +362,7 @@ class KSPHPDDMType(object): # -------------------------------------------------------------------- + cdef class KSP(Object): """Abstract PETSc object that manages all Krylov methods. @@ -442,7 +439,7 @@ cdef class KSP(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( KSPView(self.ksp, vwr) ) + CHKERR(KSPView(self.ksp, vwr)) def destroy(self) -> Self: """Destroy KSP context. @@ -454,7 +451,7 @@ cdef class KSP(Object): petsc.KSPDestroy """ - CHKERR( KSPDestroy(&self.ksp) ) + CHKERR(KSPDestroy(&self.ksp)) return self def create(self, comm: Comm | None = None) -> Self: @@ -469,8 +466,8 @@ cdef class KSP(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscKSP newksp = NULL - CHKERR( KSPCreate(ccomm, &newksp) ) - CHKERR( PetscCLEAR(self.obj) ); self.ksp = newksp + CHKERR(KSPCreate(ccomm, &newksp)) + CHKERR(PetscCLEAR(self.obj)); self.ksp = newksp return self def setType(self, ksp_type: Type | str) -> None: @@ -507,7 +504,7 @@ cdef class KSP(Object): """ cdef PetscKSPType cval = NULL ksp_type = str2bytes(ksp_type, &cval) - CHKERR( KSPSetType(self.ksp, cval) ) + CHKERR(KSPSetType(self.ksp, cval)) def getType(self) -> str: """Return the KSP type as a string from the `KSP` object. @@ -520,10 +517,10 @@ cdef class KSP(Object): """ cdef PetscKSPType cval = NULL - CHKERR( KSPGetType(self.ksp, &cval) ) + CHKERR(KSPGetType(self.ksp, &cval)) return bytes2str(cval) - def setOptionsPrefix(self, prefix: str) -> None: + def setOptionsPrefix(self, prefix: str | None) -> None: """Set the prefix used for all `KSP` options in the database. Logically collective. @@ -559,7 +556,7 @@ cdef class KSP(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( KSPSetOptionsPrefix(self.ksp, cval) ) + CHKERR(KSPSetOptionsPrefix(self.ksp, cval)) def getOptionsPrefix(self) -> str: """Return the prefix used for all `KSP` options in the database. @@ -572,10 +569,10 @@ cdef class KSP(Object): """ cdef const char *cval = NULL - CHKERR( KSPGetOptionsPrefix(self.ksp, &cval) ) + CHKERR(KSPGetOptionsPrefix(self.ksp, &cval)) return bytes2str(cval) - def appendOptionsPrefix(self, prefix: str) -> None: + def appendOptionsPrefix(self, prefix: str | None) -> None: """Append to prefix used for all `KSP` options in the database. Logically collective. @@ -598,7 +595,7 @@ cdef class KSP(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( KSPAppendOptionsPrefix(self.ksp, cval) ) + CHKERR(KSPAppendOptionsPrefix(self.ksp, cval)) def setFromOptions(self) -> None: """Set `KSP` options from the options database. @@ -613,7 +610,7 @@ cdef class KSP(Object): petsc_options, petsc.KSPSetFromOptions """ - CHKERR( KSPSetFromOptions(self.ksp) ) + CHKERR(KSPSetFromOptions(self.ksp)) # --- application context --- @@ -665,10 +662,10 @@ cdef class KSP(Object): """ cdef PetscDM newdm = NULL - CHKERR( KSPGetDM(self.ksp, &newdm) ) + CHKERR(KSPGetDM(self.ksp, &newdm)) cdef DM dm = subtype_DM(newdm)() dm.dm = newdm - CHKERR( PetscINCREF(dm.obj) ) + CHKERR(PetscINCREF(dm.obj)) return dm def setDM(self, DM dm) -> None: @@ -702,7 +699,7 @@ cdef class KSP(Object): petsc.KSPSetDM """ - CHKERR( KSPSetDM(self.ksp, dm.dm) ) + CHKERR(KSPSetDM(self.ksp, dm.dm)) def setDMActive(self, flag: bool) -> None: """`DM` should be used to generate system matrix & RHS vector. @@ -726,7 +723,7 @@ cdef class KSP(Object): """ cdef PetscBool cflag = asBool(flag) - CHKERR( KSPSetDMActive(self.ksp, cflag) ) + CHKERR(KSPSetDMActive(self.ksp, cflag)) # --- operators and preconditioner --- @@ -734,8 +731,7 @@ cdef class KSP(Object): self, rhs: KSPRHSFunction, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set routine to compute the right-hand side of the linear system. Logically collective. @@ -763,14 +759,13 @@ cdef class KSP(Object): if kargs is None: kargs = {} context = (rhs, args, kargs) self.set_attr('__rhs__', context) - CHKERR( KSPSetComputeRHS(self.ksp, KSP_ComputeRHS, context) ) + CHKERR(KSPSetComputeRHS(self.ksp, KSP_ComputeRHS, context)) def setComputeOperators( self, operators: KSPOperatorsFunction, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set routine to compute the linear operators. Logically collective. @@ -809,7 +804,7 @@ cdef class KSP(Object): if kargs is None: kargs = {} context = (operators, args, kargs) self.set_attr('__operators__', context) - CHKERR( KSPSetComputeOperators(self.ksp, KSP_ComputeOps, context) ) + CHKERR(KSPSetComputeOperators(self.ksp, KSP_ComputeOps, context)) def setOperators(self, Mat A=None, Mat P=None) -> None: """Set matrix associated with the linear system. @@ -850,7 +845,7 @@ cdef class KSP(Object): if A is not None: amat = A.mat cdef PetscMat pmat=amat if P is not None: pmat = P.mat - CHKERR( KSPSetOperators(self.ksp, amat, pmat) ) + CHKERR(KSPSetOperators(self.ksp, amat, pmat)) def getOperators(self) -> tuple[Mat, Mat]: """Return the matrix associated with the linear system. @@ -874,9 +869,9 @@ cdef class KSP(Object): """ cdef Mat A = Mat(), P = Mat() - CHKERR( KSPGetOperators(self.ksp, &A.mat, &P.mat) ) - CHKERR( PetscINCREF(A.obj) ) - CHKERR( PetscINCREF(P.obj) ) + CHKERR(KSPGetOperators(self.ksp, &A.mat, &P.mat)) + CHKERR(PetscINCREF(A.obj)) + CHKERR(PetscINCREF(P.obj)) return (A, P) def setPC(self, PC pc) -> None: @@ -897,7 +892,7 @@ cdef class KSP(Object): PETSc.KSP, getPC, petsc.KSPSetPC """ - CHKERR( KSPSetPC(self.ksp, pc.pc) ) + CHKERR(KSPSetPC(self.ksp, pc.pc)) def getPC(self) -> PC: """Return the preconditioner. @@ -910,8 +905,8 @@ cdef class KSP(Object): """ cdef PC pc = PC() - CHKERR( KSPGetPC(self.ksp, &pc.pc) ) - CHKERR( PetscINCREF(pc.obj) ) + CHKERR(KSPGetPC(self.ksp, &pc.pc)) + CHKERR(PetscINCREF(pc.obj)) return pc # --- tolerances and convergence --- @@ -921,8 +916,7 @@ cdef class KSP(Object): rtol: float | None = None, atol: float | None = None, divtol: float | None = None, - max_it: int | None = None - ) -> None: + max_it: int | None = None) -> None: """Set various tolerances used by the KSP convergence testers. Logically collective. @@ -958,13 +952,13 @@ cdef class KSP(Object): """ cdef PetscReal crtol, catol, cdivtol - crtol = catol = cdivtol = PETSC_DEFAULT; + crtol = catol = cdivtol = PETSC_DEFAULT if rtol is not None: crtol = asReal(rtol) if atol is not None: catol = asReal(atol) if divtol is not None: cdivtol = asReal(divtol) cdef PetscInt cmaxits = PETSC_DEFAULT if max_it is not None: cmaxits = asInt(max_it) - CHKERR( KSPSetTolerances(self.ksp, crtol, catol, cdivtol, cmaxits) ) + CHKERR(KSPSetTolerances(self.ksp, crtol, catol, cdivtol, cmaxits)) def getTolerances(self) -> tuple[float, float, float, int]: """Return various tolerances used by the KSP convergence tests. @@ -992,15 +986,14 @@ cdef class KSP(Object): """ cdef PetscReal crtol=0, catol=0, cdivtol=0 cdef PetscInt cmaxits=0 - CHKERR( KSPGetTolerances(self.ksp, &crtol, &catol, &cdivtol, &cmaxits) ) + CHKERR(KSPGetTolerances(self.ksp, &crtol, &catol, &cdivtol, &cmaxits)) return (toReal(crtol), toReal(catol), toReal(cdivtol), toInt(cmaxits)) def setConvergenceTest( self, converged: KSPConvergenceTestFunction, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the function to be used to determine convergence. Logically collective. @@ -1035,8 +1028,8 @@ cdef class KSP(Object): cdef void* cctx = NULL cdef PetscBool islsqr = PETSC_FALSE if converged is not None: - CHKERR( KSPSetConvergenceTest( - self.ksp, KSP_Converged, NULL, NULL) ) + CHKERR(KSPSetConvergenceTest( + self.ksp, KSP_Converged, NULL, NULL)) if args is None: args = () if kargs is None: kargs = {} self.set_attr('__converged__', (converged, args, kargs)) @@ -1045,23 +1038,20 @@ cdef class KSP(Object): # different convergence tests (like KSPLSQR for example) # Now we handle LSQR explicitly, but a proper mechanism, # say KSPGetDefaultConverged would be more appropriate - CHKERR( KSPGetNormType(self.ksp, &normtype) ) + CHKERR(KSPGetNormType(self.ksp, &normtype)) if normtype != KSP_NORM_NONE: - CHKERR( PetscObjectTypeCompare(self.ksp, - KSPLSQR, &islsqr) ) - CHKERR( KSPConvergedDefaultCreate(&cctx) ) + CHKERR(PetscObjectTypeCompare(self.ksp, + KSPLSQR, &islsqr)) + CHKERR(KSPConvergedDefaultCreate(&cctx)) if not islsqr: - CHKERR( KSPSetConvergenceTest( - self.ksp, KSPConvergedDefault, - cctx, KSPConvergedDefaultDestroy) ) + CHKERR(KSPSetConvergenceTest(self.ksp, KSPConvergedDefault, + cctx, KSPConvergedDefaultDestroy)) else: - CHKERR( KSPSetConvergenceTest( - self.ksp, KSPLSQRConvergedDefault, - cctx, KSPConvergedDefaultDestroy) ) + CHKERR(KSPSetConvergenceTest(self.ksp, KSPLSQRConvergedDefault, + cctx, KSPConvergedDefaultDestroy)) else: - CHKERR( KSPSetConvergenceTest( - self.ksp, KSPConvergedSkip, - NULL, NULL) ) + CHKERR(KSPSetConvergenceTest(self.ksp, KSPConvergedSkip, + NULL, NULL)) self.set_attr('__converged__', None) def addConvergenceTest( @@ -1069,8 +1059,7 @@ cdef class KSP(Object): converged: KSPConvergenceTestFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None, - prepend: bool = False - ) -> None: + prepend: bool = False) -> None: """Add the function to be used to determine convergence. Logically collective. @@ -1099,13 +1088,11 @@ cdef class KSP(Object): petsc.KSPSetConvergenceTest, petsc.KSPConvergedDefault """ - cdef PetscKSPNormType normtype = KSP_NORM_NONE - cdef void* cctx = NULL cdef object oconverged = self.get_attr("__converged__") cdef PetscBool pre = asBool(prepend) if converged is None: return if oconverged is not None: raise NotImplementedError("converged callback already set or added") - CHKERR( KSPAddConvergenceTest(self.ksp, KSP_Converged, pre) ) + CHKERR(KSPAddConvergenceTest(self.ksp, KSP_Converged, pre)) if args is None: args = () if kargs is None: kargs = {} self.set_attr('__converged__', (converged, args, kargs)) @@ -1126,6 +1113,8 @@ cdef class KSP(Object): def callConvergenceTest(self, its: int, rnorm: float) -> None: """Call the convergence test callback. + Collective. + Parameters ---------- its @@ -1141,14 +1130,13 @@ cdef class KSP(Object): cdef PetscInt ival = asInt(its) cdef PetscReal rval = asReal(rnorm) cdef PetscKSPConvergedReason reason = KSP_CONVERGED_ITERATING - CHKERR( KSPConvergenceTestCall(self.ksp, ival, rval, &reason) ) + CHKERR(KSPConvergenceTestCall(self.ksp, ival, rval, &reason)) return reason def setConvergenceHistory( self, length: int | None = None, - reset: bool = False - ) -> None: + reset: bool = False) -> None: """Set the array used to hold the residual history. Not collective. @@ -1187,7 +1175,7 @@ cdef class KSP(Object): if reset: flag = PETSC_TRUE cdef object hist = oarray_r(empty_r(size), NULL, &data) self.set_attr('__history__', hist) - CHKERR( KSPSetResidualHistory(self.ksp, data, size, flag) ) + CHKERR(KSPSetResidualHistory(self.ksp, data, size, flag)) def getConvergenceHistory(self) -> ArrayReal: """Return array containing the residual history. @@ -1201,7 +1189,7 @@ cdef class KSP(Object): """ cdef const PetscReal *data = NULL cdef PetscInt size = 0 - CHKERR( KSPGetResidualHistory(self.ksp, &data, &size) ) + CHKERR(KSPGetResidualHistory(self.ksp, &data, &size)) return array_r(size, data) def logConvergenceHistory(self, rnorm: float) -> None: @@ -1216,15 +1204,14 @@ cdef class KSP(Object): """ cdef PetscReal rval = asReal(rnorm) - CHKERR( KSPLogResidualHistory(self.ksp, rval) ) + CHKERR(KSPLogResidualHistory(self.ksp, rval)) # --- monitoring --- def setMonitor(self, - monitor: KSPMonitorFunction, - args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None - ) -> None: + monitor: KSPMonitorFunction, + args: tuple[Any, ...] | None = None, + kargs: dict[str, Any] | None = None) -> None: """Set additional function to monitor the residual. Logically collective. @@ -1263,7 +1250,7 @@ cdef class KSP(Object): if monitorlist is None: monitorlist = [] self.set_attr('__monitor__', monitorlist) - CHKERR( KSPMonitorSet(self.ksp, KSP_Monitor, NULL, NULL) ) + CHKERR(KSPMonitorSet(self.ksp, KSP_Monitor, NULL, NULL)) if args is None: args = () if kargs is None: kargs = {} monitorlist.append((monitor, args, kargs)) @@ -1291,7 +1278,7 @@ cdef class KSP(Object): petsc_options, getMonitor, setMonitor, monitor, petsc.KSPMonitorCancel """ - CHKERR( KSPMonitorCancel(self.ksp) ) + CHKERR(KSPMonitorCancel(self.ksp)) self.set_attr('__monitor__', None) cancelMonitor = monitorCancel @@ -1313,7 +1300,7 @@ cdef class KSP(Object): """ cdef PetscInt ival = asInt(its) cdef PetscReal rval = asReal(rnorm) - CHKERR( KSPMonitor(self.ksp, ival, rval) ) + CHKERR(KSPMonitor(self.ksp, ival, rval)) # --- customization --- @@ -1349,7 +1336,7 @@ cdef class KSP(Object): petsc.KSPSetPCSide """ - CHKERR( KSPSetPCSide(self.ksp, side) ) + CHKERR(KSPSetPCSide(self.ksp, side)) def getPCSide(self) -> PC.Side: """Return the preconditioning side. @@ -1362,12 +1349,14 @@ cdef class KSP(Object): """ cdef PetscPCSide side = PC_LEFT - CHKERR( KSPGetPCSide(self.ksp, &side) ) + CHKERR(KSPGetPCSide(self.ksp, &side)) return side def setNormType(self, normtype: NormType) -> None: """Set the norm that is used for convergence testing. + Logically collective. + Parameters ---------- normtype @@ -1388,7 +1377,7 @@ cdef class KSP(Object): petsc.KSPSetCheckNormIteration """ - CHKERR( KSPSetNormType(self.ksp, normtype) ) + CHKERR(KSPSetNormType(self.ksp, normtype)) def getNormType(self) -> NormType: """Return the norm that is used for convergence testing. @@ -1401,12 +1390,14 @@ cdef class KSP(Object): """ cdef PetscKSPNormType normtype = KSP_NORM_NONE - CHKERR( KSPGetNormType(self.ksp, &normtype) ) + CHKERR(KSPGetNormType(self.ksp, &normtype)) return normtype def setComputeEigenvalues(self, flag: bool) -> None: """Set a flag to compute eigenvalues. + Logically collective. + Set a flag so that the extreme eigenvalues values will be calculated via a Lanczos or Arnoldi process as the linear system is solved. @@ -1426,7 +1417,7 @@ cdef class KSP(Object): """ cdef PetscBool compute = asBool(flag) - CHKERR( KSPSetComputeEigenvalues(self.ksp, compute) ) + CHKERR(KSPSetComputeEigenvalues(self.ksp, compute)) def getComputeEigenvalues(self) -> bool: """Return flag indicating whether eigenvalues will be calculated. @@ -1443,7 +1434,7 @@ cdef class KSP(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( KSPGetComputeEigenvalues(self.ksp, &flag) ) + CHKERR(KSPGetComputeEigenvalues(self.ksp, &flag)) return toBool(flag) def setComputeSingularValues(self, flag: bool) -> None: @@ -1470,11 +1461,13 @@ cdef class KSP(Object): """ cdef PetscBool compute = asBool(flag) - CHKERR( KSPSetComputeSingularValues(self.ksp, compute) ) + CHKERR(KSPSetComputeSingularValues(self.ksp, compute)) def getComputeSingularValues(self) -> bool: """Return flag indicating whether singular values will be calculated. + Not collective. + Return the flag indicating whether the extreme singular values will be calculated via a Lanczos or Arnoldi process as the linear system is solved. @@ -1485,7 +1478,7 @@ cdef class KSP(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( KSPGetComputeSingularValues(self.ksp, &flag) ) + CHKERR(KSPGetComputeSingularValues(self.ksp, &flag)) return toBool(flag) # --- initial guess --- @@ -1510,7 +1503,7 @@ cdef class KSP(Object): """ cdef PetscBool guess_nonzero = asBool(flag) - CHKERR( KSPSetInitialGuessNonzero(self.ksp, guess_nonzero) ) + CHKERR(KSPSetInitialGuessNonzero(self.ksp, guess_nonzero)) def getInitialGuessNonzero(self) -> bool: """Determine whether the KSP solver uses a zero initial guess. @@ -1523,7 +1516,7 @@ cdef class KSP(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( KSPGetInitialGuessNonzero(self.ksp, &flag) ) + CHKERR(KSPGetInitialGuessNonzero(self.ksp, &flag)) return toBool(flag) def setInitialGuessKnoll(self, flag: bool) -> None: @@ -1544,11 +1537,13 @@ cdef class KSP(Object): """ cdef PetscBool guess_knoll = asBool(flag) - CHKERR( KSPSetInitialGuessKnoll(self.ksp, guess_knoll) ) + CHKERR(KSPSetInitialGuessKnoll(self.ksp, guess_knoll)) def getInitialGuessKnoll(self) -> bool: """Determine whether the KSP solver is using the Knoll trick. + Not collective. + This uses the Knoll trick; using `PC.apply` to compute the initial guess. @@ -1558,12 +1553,14 @@ cdef class KSP(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( KSPGetInitialGuessKnoll(self.ksp, &flag) ) + CHKERR(KSPGetInitialGuessKnoll(self.ksp, &flag)) return toBool(flag) def setUseFischerGuess(self, model: int, size: int) -> None: """Use the Paul Fischer algorithm to compute initial guesses. + Logically collective. + Use the Paul Fischer algorithm or its variants to compute initial guesses for a set of solves with related right hand sides. @@ -1583,7 +1580,7 @@ cdef class KSP(Object): """ cdef PetscInt ival1 = asInt(model) cdef PetscInt ival2 = asInt(size) - CHKERR( KSPSetUseFischerGuess(self.ksp, ival1, ival2) ) + CHKERR(KSPSetUseFischerGuess(self.ksp, ival1, ival2)) # --- solving --- @@ -1597,7 +1594,7 @@ cdef class KSP(Object): petsc.KSPSetUp """ - CHKERR( KSPSetUp(self.ksp) ) + CHKERR(KSPSetUp(self.ksp)) def reset(self) -> None: """Resets a KSP context. @@ -1612,7 +1609,7 @@ cdef class KSP(Object): petsc.KSPReset """ - CHKERR( KSPReset(self.ksp) ) + CHKERR(KSPReset(self.ksp)) def setUpOnBlocks(self) -> None: """Set up the preconditioner for each block in a block method. @@ -1627,7 +1624,73 @@ cdef class KSP(Object): petsc.KSPSetUpOnBlocks """ - CHKERR( KSPSetUpOnBlocks(self.ksp) ) + CHKERR(KSPSetUpOnBlocks(self.ksp)) + + def setPreSolve( + self, + presolve: KSPPreSolveFunction | None, + args: tuple[Any, ...] | None = None, + kargs: dict[str, Any] | None = None) -> None: + """Set the function that is called at the beginning of each `KSP.solve`. + + Logically collective. + + Parameters + ---------- + presolve + The callback function. + args + Positional arguments for the callback function. + kargs + Keyword arguments for the callback function. + + See Also + -------- + solve, petsc.KSPSetPreSolve, petsc.KSPSetPostSolve + + """ + if presolve is not None: + if args is None: args = () + if kargs is None: kargs = {} + context = (presolve, args, kargs) + self.set_attr('__presolve__', context) + CHKERR(KSPSetPreSolve(self.ksp, KSP_PreSolve, context)) + else: + self.set_attr('__presolve__', None) + CHKERR(KSPSetPreSolve(self.ksp, NULL, NULL)) + + def setPostSolve( + self, + postsolve: KSPPostSolveFunction | None, + args: tuple[Any, ...] | None = None, + kargs: dict[str, Any] | None = None) -> None: + """Set the function that is called at the end of each `KSP.solve`. + + Logically collective. + + Parameters + ---------- + postsolve + The callback function. + args + Positional arguments for the callback function. + kargs + Keyword arguments for the callback function. + + See Also + -------- + solve, petsc.KSPSetPreSolve, petsc.KSPSetPostSolve + + """ + if postsolve is not None: + if args is None: args = () + if kargs is None: kargs = {} + context = (postsolve, args, kargs) + self.set_attr('__postsolve__', context) + CHKERR(KSPSetPostSolve(self.ksp, KSP_PostSolve, context)) + else: + self.set_attr('__postsolve__', None) + CHKERR(KSPSetPostSolve(self.ksp, NULL, NULL)) def solve(self, Vec b, Vec x) -> None: """Solve the linear system. @@ -1708,7 +1771,7 @@ cdef class KSP(Object): cdef PetscVec x_vec = NULL if b is not None: b_vec = b.vec if x is not None: x_vec = x.vec - CHKERR( KSPSolve(self.ksp, b_vec, x_vec) ) + CHKERR(KSPSolve(self.ksp, b_vec, x_vec)) def solveTranspose(self, Vec b, Vec x) -> None: """Solve the transpose of a linear system. @@ -1732,11 +1795,13 @@ cdef class KSP(Object): solve, petsc.KSPSolveTranspose """ - CHKERR( KSPSolveTranspose(self.ksp, b.vec, x.vec) ) + CHKERR(KSPSolveTranspose(self.ksp, b.vec, x.vec)) def matSolve(self, Mat B, Mat X) -> None: """Solve a linear system with multiple right-hand sides. + Collective. + These are stored as a `Mat.Type.DENSE`. Unlike `solve`, ``B`` and ``X`` must be different matrices. @@ -1752,11 +1817,13 @@ cdef class KSP(Object): solve, petsc.KSPMatSolve """ - CHKERR( KSPMatSolve(self.ksp, B.mat, X.mat) ) + CHKERR(KSPMatSolve(self.ksp, B.mat, X.mat)) def matSolveTranspose(self, Mat B, Mat X) -> None: """Solve the transpose of a linear system with multiple RHS. + Collective. + Parameters ---------- B @@ -1769,39 +1836,39 @@ cdef class KSP(Object): solveTranspose, petsc.KSPMatSolve """ - CHKERR( KSPMatSolveTranspose(self.ksp, B.mat, X.mat) ) + CHKERR(KSPMatSolveTranspose(self.ksp, B.mat, X.mat)) def setIterationNumber(self, its: int) -> None: """Use `its` property.""" cdef PetscInt ival = asInt(its) - CHKERR( KSPSetIterationNumber(self.ksp, ival) ) + CHKERR(KSPSetIterationNumber(self.ksp, ival)) def getIterationNumber(self) -> int: """Use `its` property.""" cdef PetscInt ival = 0 - CHKERR( KSPGetIterationNumber(self.ksp, &ival) ) + CHKERR(KSPGetIterationNumber(self.ksp, &ival)) return toInt(ival) def setResidualNorm(self, rnorm: float) -> None: """Use `norm` property.""" cdef PetscReal rval = asReal(rnorm) - CHKERR( KSPSetResidualNorm(self.ksp, rval) ) + CHKERR(KSPSetResidualNorm(self.ksp, rval)) def getResidualNorm(self) -> float: """Use `norm` property.""" cdef PetscReal rval = 0 - CHKERR( KSPGetResidualNorm(self.ksp, &rval) ) + CHKERR(KSPGetResidualNorm(self.ksp, &rval)) return toReal(rval) def setConvergedReason(self, reason: KSP.ConvergedReason) -> None: """Use `reason` property.""" cdef PetscKSPConvergedReason val = reason - CHKERR( KSPSetConvergedReason(self.ksp, val) ) + CHKERR(KSPSetConvergedReason(self.ksp, val)) def getConvergedReason(self) -> KSP.ConvergedReason: """Use `reason` property.""" cdef PetscKSPConvergedReason reason = KSP_CONVERGED_ITERATING - CHKERR( KSPGetConvergedReason(self.ksp, &reason) ) + CHKERR(KSPGetConvergedReason(self.ksp, &reason)) return reason def setHPDDMType(self, hpddm_type: HPDDMType) -> None: @@ -1820,18 +1887,20 @@ cdef class KSP(Object): """ cdef PetscKSPHPDDMType ctype = hpddm_type - CHKERR( KSPHPDDMSetType(self.ksp, ctype) ) + CHKERR(KSPHPDDMSetType(self.ksp, ctype)) def getHPDDMType(self) -> HPDDMType: """Return the Krylov solver type. + Not collective. + See Also -------- petsc.KSPHPDDMGetType """ cdef PetscKSPHPDDMType cval = KSP_HPDDM_TYPE_GMRES - CHKERR( KSPHPDDMGetType(self.ksp, &cval) ) + CHKERR(KSPHPDDMGetType(self.ksp, &cval)) return cval def setErrorIfNotConverged(self, flag: bool) -> None: @@ -1850,7 +1919,7 @@ cdef class KSP(Object): """ cdef PetscBool ernc = asBool(flag) - CHKERR( KSPSetErrorIfNotConverged(self.ksp, ernc) ) + CHKERR(KSPSetErrorIfNotConverged(self.ksp, ernc)) def getErrorIfNotConverged(self) -> bool: """Return the flag indicating the solver will error if divergent. @@ -1863,7 +1932,7 @@ cdef class KSP(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( KSPGetErrorIfNotConverged(self.ksp, &flag) ) + CHKERR(KSPGetErrorIfNotConverged(self.ksp, &flag)) return toBool(flag) def getRhs(self) -> Vec: @@ -1877,13 +1946,15 @@ cdef class KSP(Object): """ cdef Vec vec = Vec() - CHKERR( KSPGetRhs(self.ksp, &vec.vec) ) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(KSPGetRhs(self.ksp, &vec.vec)) + CHKERR(PetscINCREF(vec.obj)) return vec def getSolution(self) -> Vec: """Return the solution for the linear system to be solved. + Not collective. + Note that this may not be the solution that is stored during the iterative process. @@ -1893,17 +1964,18 @@ cdef class KSP(Object): """ cdef Vec vec = Vec() - CHKERR( KSPGetSolution(self.ksp, &vec.vec) ) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(KSPGetSolution(self.ksp, &vec.vec)) + CHKERR(PetscINCREF(vec.obj)) return vec def getWorkVecs( self, right: int | None = None, - left: int | None = None - ) -> tuple[list[Vec], list[Vec]] | list[Vec] | None: + left: int | None = None) -> tuple[list[Vec], list[Vec]] | list[Vec] | None: """Create working vectors. + Collective. + Parameters ---------- right @@ -1927,7 +1999,7 @@ cdef class KSP(Object): if L: nl = asInt(left) cdef object vecsr = [] if R else None cdef object vecsl = [] if L else None - CHKERR( KSPCreateVecs(self.ksp, nr, &vr, nl, &vr) ) + CHKERR(KSPCreateVecs(self.ksp, nr, &vr, nl, &vr)) try: for i from 0 <= i < nr: vecsr.append(ref_Vec(vr[i])) @@ -1947,6 +2019,8 @@ cdef class KSP(Object): def buildSolution(self, Vec x=None) -> Vec: """Return the solution vector. + Collective. + Parameters ---------- x @@ -1959,14 +2033,16 @@ cdef class KSP(Object): """ if x is None: x = Vec() if x.vec == NULL: - CHKERR( KSPGetSolution(self.ksp, &x.vec) ) - CHKERR( VecDuplicate(x.vec, &x.vec) ) - CHKERR( KSPBuildSolution(self.ksp, x.vec, NULL) ) + CHKERR(KSPGetSolution(self.ksp, &x.vec)) + CHKERR(VecDuplicate(x.vec, &x.vec)) + CHKERR(KSPBuildSolution(self.ksp, x.vec, NULL)) return x def buildResidual(self, Vec r=None) -> Vec: """Return the residual of the linear system. + Collective. + Parameters ---------- r @@ -1979,14 +2055,16 @@ cdef class KSP(Object): """ if r is None: r = Vec() if r.vec == NULL: - CHKERR( KSPGetRhs(self.ksp, &r.vec) ) - CHKERR( VecDuplicate(r.vec, &r.vec) ) - CHKERR( KSPBuildResidual(self.ksp , NULL, r.vec, &r.vec) ) + CHKERR(KSPGetRhs(self.ksp, &r.vec)) + CHKERR(VecDuplicate(r.vec, &r.vec)) + CHKERR(KSPBuildResidual(self.ksp , NULL, r.vec, &r.vec)) return r def computeEigenvalues(self) -> ArrayComplex: """Compute the extreme eigenvalues for the preconditioned operator. + Not collective. + See Also -------- petsc.KSPComputeEigenvalues @@ -1996,10 +2074,10 @@ cdef class KSP(Object): cdef PetscInt neig = 0 cdef PetscReal *rdata = NULL cdef PetscReal *idata = NULL - CHKERR( KSPGetIterationNumber(self.ksp, &its) ) + CHKERR(KSPGetIterationNumber(self.ksp, &its)) cdef ndarray r = oarray_r(empty_r(its), NULL, &rdata) cdef ndarray i = oarray_r(empty_r(its), NULL, &idata) - CHKERR( KSPComputeEigenvalues(self.ksp, its, rdata, idata, &neig) ) + CHKERR(KSPComputeEigenvalues(self.ksp, its, rdata, idata, &neig)) eigen = empty_c(neig) eigen.real = r[:neig] eigen.imag = i[:neig] @@ -2008,6 +2086,8 @@ cdef class KSP(Object): def computeExtremeSingularValues(self) -> tuple[float, float]: """Compute the extreme singular values for the preconditioned operator. + Collective. + Returns ------- smax : float @@ -2022,7 +2102,7 @@ cdef class KSP(Object): """ cdef PetscReal smax = 0 cdef PetscReal smin = 0 - CHKERR( KSPComputeExtremeSingularValues(self.ksp, &smax, &smin) ) + CHKERR(KSPComputeExtremeSingularValues(self.ksp, &smax, &smin)) return toReal(smax), toReal(smin) # --- GMRES --- @@ -2030,6 +2110,8 @@ cdef class KSP(Object): def setGMRESRestart(self, restart: int) -> None: """Set number of iterations at which KSP restarts. + Logically collective. + Suitable KSPs are: KSPGMRES, KSPFGMRES and KSPLGMRES. Parameters @@ -2043,15 +2125,14 @@ cdef class KSP(Object): """ cdef PetscInt ival = asInt(restart) - CHKERR( KSPGMRESSetRestart(self.ksp, ival) ) + CHKERR(KSPGMRESSetRestart(self.ksp, ival)) # --- Python --- def createPython( self, context: Any = None, - comm: Comm | None = None - ) -> Self: + comm: Comm | None = None) -> Self: """Create a linear solver of Python type. Collective. @@ -2071,10 +2152,10 @@ cdef class KSP(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscKSP newksp = NULL - CHKERR( KSPCreate(ccomm, &newksp) ) - CHKERR( PetscCLEAR(self.obj) ); self.ksp = newksp - CHKERR( KSPSetType(self.ksp, KSPPYTHON) ) - CHKERR( KSPPythonSetContext(self.ksp, context) ) + CHKERR(KSPCreate(ccomm, &newksp)) + CHKERR(PetscCLEAR(self.obj)); self.ksp = newksp + CHKERR(KSPSetType(self.ksp, KSPPYTHON)) + CHKERR(KSPPythonSetContext(self.ksp, context)) return self def setPythonContext(self, context: Any | None = None) -> None: @@ -2087,7 +2168,7 @@ cdef class KSP(Object): petsc_python_ksp, getPythonContext """ - CHKERR( KSPPythonSetContext(self.ksp, context) ) + CHKERR(KSPPythonSetContext(self.ksp, context)) def getPythonContext(self) -> Any: """Return the instance of the class implementing Python methods. @@ -2100,7 +2181,7 @@ cdef class KSP(Object): """ cdef void *context = NULL - CHKERR( KSPPythonGetContext(self.ksp, &context) ) + CHKERR(KSPPythonGetContext(self.ksp, &context)) if context == NULL: return None else: return context @@ -2117,7 +2198,7 @@ cdef class KSP(Object): """ cdef const char *cval = NULL py_type = str2bytes(py_type, &cval) - CHKERR( KSPPythonSetType(self.ksp, cval) ) + CHKERR(KSPPythonSetType(self.ksp, cval)) def getPythonType(self) -> str: """Return the fully qualified Python name of the class used by the solver. @@ -2131,7 +2212,7 @@ cdef class KSP(Object): """ cdef const char *cval = NULL - CHKERR( KSPPythonGetType(self.ksp, &cval) ) + CHKERR(KSPPythonGetType(self.ksp, &cval)) return bytes2str(cval) # --- application context --- @@ -2140,6 +2221,7 @@ cdef class KSP(Object): """The solver application context.""" def __get__(self) -> Any: return self.getAppCtx() + def __set__(self, value): self.setAppCtx(value) @@ -2149,6 +2231,7 @@ cdef class KSP(Object): """The solver `DM`.""" def __get__(self) -> DM: return self.getDM() + def __set__(self, value): self.setDM(value) @@ -2182,6 +2265,7 @@ cdef class KSP(Object): """Whether guess is non-zero.""" def __get__(self) -> bool: return self.getInitialGuessNonzero() + def __set__(self, value): self.setInitialGuessNonzero(value) @@ -2189,6 +2273,7 @@ cdef class KSP(Object): """Whether solver uses Knoll trick.""" def __get__(self) -> bool: return self.getInitialGuessKnoll() + def __set__(self, value): self.setInitialGuessKnoll(value) @@ -2203,6 +2288,7 @@ cdef class KSP(Object): """The side on which preconditioning is performed.""" def __get__(self) -> PC.Side: return self.getPCSide() + def __set__(self, value): self.setPCSide(value) @@ -2210,6 +2296,7 @@ cdef class KSP(Object): """The norm used by the solver.""" def __get__(self) -> NormType: return self.getNormType() + def __set__(self, value): self.setNormType(value) @@ -2219,6 +2306,7 @@ cdef class KSP(Object): """The relative tolerance of the solver.""" def __get__(self) -> float: return self.getTolerances()[0] + def __set__(self, value): self.setTolerances(rtol=value) @@ -2226,6 +2314,7 @@ cdef class KSP(Object): """The absolute tolerance of the solver.""" def __get__(self) -> float: return self.getTolerances()[1] + def __set__(self, value): self.setTolerances(atol=value) @@ -2233,6 +2322,7 @@ cdef class KSP(Object): """The divergence tolerance of the solver.""" def __get__(self) -> float: return self.getTolerances()[2] + def __set__(self, value): self.setTolerances(divtol=value) @@ -2240,6 +2330,7 @@ cdef class KSP(Object): """The maximum number of iteration the solver may take.""" def __get__(self) -> int: return self.getTolerances()[3] + def __set__(self, value): self.setTolerances(max_it=value) @@ -2249,6 +2340,7 @@ cdef class KSP(Object): """The current number of iterations the solver has taken.""" def __get__(self) -> int: return self.getIterationNumber() + def __set__(self, value): self.setIterationNumber(value) @@ -2256,6 +2348,7 @@ cdef class KSP(Object): """The norm of the residual at the current iteration.""" def __get__(self) -> float: return self.getResidualNorm() + def __set__(self, value): self.setResidualNorm(value) @@ -2270,6 +2363,7 @@ cdef class KSP(Object): """The converged reason.""" def __get__(self) -> KSP.ConvergedReason: return self.getConvergedReason() + def __set__(self, value): self.setConvergedReason(value) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Log.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Log.pyx index 10b4dae6fb7..3e11005ffef 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Log.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Log.pyx @@ -9,35 +9,35 @@ cdef class Log: """Logging support.""" @classmethod - def Stage(cls, name): + def Stage(cls, name) -> LogStage: if not name: raise ValueError("empty name") cdef const char *cname = NULL name = str2bytes(name, &cname) cdef PetscLogStage stageid = -1 cdef LogStage stage = get_LogStage(name) if stage is not None: return stage - CHKERR( PetscLogStageFindId(cname, &stageid) ) + CHKERR(PetscLogStageFindId(cname, &stageid)) if stageid == -1: - CHKERR( PetscLogStageRegister(cname, &stageid) ) + CHKERR(PetscLogStageRegister(cname, &stageid)) stage = reg_LogStage(name, stageid) return stage @classmethod - def Class(cls, name): + def Class(cls, name) -> LogClass: if not name: raise ValueError("empty name") cdef const char *cname = NULL name = str2bytes(name, &cname) cdef PetscLogClass classid = -1 cdef LogClass klass = get_LogClass(name) if klass is not None: return klass - CHKERR( PetscLogClassFindId(cname, &classid) ) + CHKERR(PetscLogClassFindId(cname, &classid)) if classid == -1: - CHKERR( PetscLogClassRegister(cname, &classid) ) + CHKERR(PetscLogClassRegister(cname, &classid)) klass = reg_LogClass(name, classid) return klass @classmethod - def Event(cls, name, klass=None): + def Event(cls, name, klass=None) -> LogEvent: if not name: raise ValueError("empty name") cdef const char *cname = NULL name = str2bytes(name, &cname) @@ -46,14 +46,14 @@ cdef class Log: if klass is not None: classid = klass cdef LogEvent event = get_LogEvent(name) if event is not None: return event - CHKERR( PetscLogEventFindId(cname, &eventid) ) + CHKERR(PetscLogEventFindId(cname, &eventid)) if eventid == -1: - CHKERR( PetscLogEventRegister(cname, classid, &eventid) ) + CHKERR(PetscLogEventRegister(cname, classid, &eventid)) event = reg_LogEvent(name, eventid) return event @classmethod - def begin(cls): + def begin(cls) -> None: """Turn on logging of objects and events. Collective. @@ -63,7 +63,7 @@ cdef class Log: petsc.PetscLogDefaultBegin """ - CHKERR( PetscLogDefaultBegin() ) + CHKERR(PetscLogDefaultBegin()) @classmethod def view(cls, Viewer viewer=None) -> None: @@ -84,7 +84,7 @@ cdef class Log: cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr if vwr == NULL: vwr = PETSC_VIEWER_STDOUT_WORLD - CHKERR( PetscLogView(vwr) ) + CHKERR(PetscLogView(vwr)) @classmethod def logFlops(cls, flops: float) -> None: @@ -103,7 +103,7 @@ cdef class Log: """ cdef PetscLogDouble cflops=flops - CHKERR( PetscLogFlops(cflops) ) + CHKERR(PetscLogFlops(cflops)) @classmethod def addFlops(cls, flops: float) -> None: @@ -126,7 +126,7 @@ cdef class Log: """ cdef PetscLogDouble cflops=flops - CHKERR( PetscLogFlops(cflops) ) + CHKERR(PetscLogFlops(cflops)) @classmethod def getFlops(cls) -> float: @@ -145,7 +145,7 @@ cdef class Log: """ cdef PetscLogDouble cflops=0 - CHKERR( PetscGetFlops(&cflops) ) + CHKERR(PetscGetFlops(&cflops)) return cflops @classmethod @@ -165,18 +165,18 @@ cdef class Log: """ cdef PetscLogDouble wctime=0 - CHKERR( PetscTime(&wctime) ) + CHKERR(PetscTime(&wctime)) return wctime @classmethod def getCPUTime(cls) -> float: """Return the CPU time.""" cdef PetscLogDouble cputime=0 - CHKERR( PetscGetCPUTime(&cputime) ) + CHKERR(PetscGetCPUTime(&cputime)) return cputime @classmethod - def EventDecorator(cls, name=None, klass=None): + def EventDecorator(cls, name=None, klass=None) -> Any: """Decorate a function with a `PETSc` event.""" def decorator(func): @functools.wraps(func) @@ -202,7 +202,7 @@ cdef class Log: """ cdef PetscBool flag = PETSC_FALSE - CHKERR( PetscLogIsActive(&flag) ) + CHKERR(PetscLogIsActive(&flag)) return toBool(flag) # -------------------------------------------------------------------- @@ -210,7 +210,12 @@ cdef class Log: cdef class LogStage: """Logging support for different stages.""" - cdef readonly PetscLogStage id + cdef PetscLogStage id + + property id: + """The log stage identifier.""" + def __get__(self) -> int: + return self.id def __cinit__(self): self.id = 0 @@ -237,7 +242,7 @@ cdef class LogStage: LogStage.pop, petsc.PetscLogStagePush """ - CHKERR( PetscLogStagePush(self.id) ) + CHKERR(PetscLogStagePush(self.id)) def pop(self) -> None: """Pop a stage from the logging stack. @@ -250,20 +255,23 @@ cdef class LogStage: """ self # unused - CHKERR( PetscLogStagePop() ) + CHKERR(PetscLogStagePop()) # - def getName(self): + def getName(self) -> str: + """Return the current stage name.""" cdef const char *cval = NULL - CHKERR( PetscLogStageFindName(self.id, &cval) ) + CHKERR(PetscLogStageFindName(self.id, &cval)) return bytes2str(cval) property name: - def __get__(self): + """The current stage name.""" + def __get__(self) -> str: return self.getName() + def __set__(self, value): - self; value; # unused + self; value # unused raise TypeError("readonly attribute") # @@ -278,7 +286,7 @@ cdef class LogStage: petsc.PetscLogStageSetActive """ - CHKERR( PetscLogStageSetActive(self.id, PETSC_TRUE) ) + CHKERR(PetscLogStageSetActive(self.id, PETSC_TRUE)) def deactivate(self) -> None: """Deactivate the stage. @@ -290,7 +298,7 @@ cdef class LogStage: petsc.PetscLogStageSetActive """ - CHKERR( PetscLogStageSetActive(self.id, PETSC_FALSE) ) + CHKERR(PetscLogStageSetActive(self.id, PETSC_FALSE)) def getActive(self) -> bool: """Check if the stage is activated. @@ -303,7 +311,7 @@ cdef class LogStage: """ cdef PetscBool flag = PETSC_FALSE - CHKERR( PetscLogStageGetActive(self.id, &flag) ) + CHKERR(PetscLogStageGetActive(self.id, &flag)) return toBool(flag) def setActive(self, flag: bool) -> None: @@ -318,11 +326,13 @@ cdef class LogStage: """ cdef PetscBool tval = PETSC_FALSE if flag: tval = PETSC_TRUE - CHKERR( PetscLogStageSetActive(self.id, tval) ) + CHKERR(PetscLogStageSetActive(self.id, tval)) property active: - def __get__(self): + """Whether the stage is activate.""" + def __get__(self) -> bool: return self.getActive() + def __set__(self, value): self.setActive(value) @@ -339,7 +349,7 @@ cdef class LogStage: """ cdef PetscBool flag = PETSC_FALSE - CHKERR( PetscLogStageGetVisible(self.id, &flag) ) + CHKERR(PetscLogStageGetVisible(self.id, &flag)) return toBool(flag) def setVisible(self, flag: bool) -> None: @@ -359,16 +369,18 @@ cdef class LogStage: """ cdef PetscBool tval = PETSC_FALSE if flag: tval = PETSC_TRUE - CHKERR( PetscLogStageSetVisible(self.id, tval) ) + CHKERR(PetscLogStageSetVisible(self.id, tval)) property visible: - def __get__(self): + """Whether the stage is visible.""" + def __get__(self) -> bool: return self.getVisible() + def __set__(self, value): self.setVisible(value) -cdef dict stage_registry = { } +cdef dict stage_registry = {} cdef LogStage get_LogStage(object name): return stage_registry.get(name) @@ -382,8 +394,14 @@ cdef LogStage reg_LogStage(object name, PetscLogStage stageid): # -------------------------------------------------------------------- cdef class LogClass: + """Logging support.""" - cdef readonly PetscLogClass id + cdef PetscLogClass id + + property id: + """The log class identifier.""" + def __get__(self) -> int: + return self.id def __cinit__(self): self.id = PETSC_OBJECT_CLASSID @@ -393,44 +411,53 @@ cdef class LogClass: # - def getName(self): + def getName(self) -> str: + """Return the log class name.""" cdef const char *cval = NULL - CHKERR( PetscLogClassFindName(self.id, &cval) ) + CHKERR(PetscLogClassFindName(self.id, &cval)) return bytes2str(cval) property name: - def __get__(self): + """The log class name.""" + def __get__(self) -> str: return self.getName() + def __set__(self, value): - self; value; # unused + self; value # unused raise TypeError("readonly attribute") # - def activate(self): - CHKERR( PetscLogClassActivate(self.id) ) + def activate(self) -> None: + """Activate the log class.""" + CHKERR(PetscLogClassActivate(self.id)) - def deactivate(self): - CHKERR( PetscLogClassDeactivate(self.id) ) + def deactivate(self) -> None: + """Deactivate the log class.""" + CHKERR(PetscLogClassDeactivate(self.id)) - def getActive(self): + def getActive(self) -> bool: + """Not implemented.""" self # unused raise NotImplementedError - def setActive(self, flag): + def setActive(self, flag: bool) -> None: + """Activate or deactivate the log class.""" if flag: - CHKERR( PetscLogClassActivate(self.id) ) + CHKERR(PetscLogClassActivate(self.id)) else: - CHKERR( PetscLogClassDeactivate(self.id) ) + CHKERR(PetscLogClassDeactivate(self.id)) property active: - def __get__(self): + """Log class activation.""" + def __get__(self) -> bool: return self.getActive() + def __set__(self, value): self.setActive(value) -cdef dict class_registry = { } +cdef dict class_registry = {} cdef LogClass get_LogClass(object name): return class_registry.get(name) @@ -444,8 +471,14 @@ cdef LogClass reg_LogClass(object name, PetscLogClass classid): # -------------------------------------------------------------------- cdef class LogEvent: + """Logging support.""" - cdef readonly PetscLogEvent id + cdef PetscLogEvent id + + property id: + """The log event identifier.""" + def __get__(self) -> int: + return self.id def __cinit__(self): self.id = 0 @@ -460,8 +493,6 @@ cdef class LogEvent: def __exit__(self, *exc): self.end() - - def begin(self, *objs) -> None: """Log the beginning of a user event. @@ -479,7 +510,7 @@ cdef class LogEvent: """ cdef PetscObject o[4] event_args2objs(objs, o) - CHKERR( PetscLogEventBegin(self.id, o[0], o[1], o[2], o[3]) ) + CHKERR(PetscLogEventBegin(self.id, o[0], o[1], o[2], o[3])) def end(self, *objs) -> None: """Log the end of a user event. @@ -498,19 +529,22 @@ cdef class LogEvent: """ cdef PetscObject o[4] event_args2objs(objs, o) - CHKERR( PetscLogEventEnd(self.id, o[0], o[1], o[2], o[3]) ) + CHKERR(PetscLogEventEnd(self.id, o[0], o[1], o[2], o[3])) # - def getName(self): + def getName(self) -> str: + """The current event name.""" cdef const char *cval = NULL - CHKERR( PetscLogEventFindName(self.id, &cval) ) + CHKERR(PetscLogEventFindName(self.id, &cval)) return bytes2str(cval) property name: - def __get__(self): + """The current event name.""" + def __get__(self) ->str: return self.getName() + def __set__(self, value): - self; value; # unused + self; value # unused raise TypeError("readonly attribute") # @@ -525,7 +559,7 @@ cdef class LogEvent: petsc.PetscLogEventActivate """ - CHKERR( PetscLogEventActivate(self.id) ) + CHKERR(PetscLogEventActivate(self.id)) def deactivate(self) -> None: """Indicate that the event should not be logged. @@ -537,9 +571,10 @@ cdef class LogEvent: petsc.PetscLogEventDeactivate """ - CHKERR( PetscLogEventDeactivate(self.id) ) + CHKERR(PetscLogEventDeactivate(self.id)) - def getActive(self): + def getActive(self) -> bool: + """Not implemented.""" self # unused raise NotImplementedError @@ -559,17 +594,20 @@ cdef class LogEvent: """ if flag: - CHKERR( PetscLogEventActivate(self.id) ) + CHKERR(PetscLogEventActivate(self.id)) else: - CHKERR( PetscLogEventDeactivate(self.id) ) + CHKERR(PetscLogEventDeactivate(self.id)) property active: - def __get__(self): + """Event activation.""" + def __get__(self) -> bool: return self.getActive() + def __set__(self, value): self.setActive(value) - def getActiveAll(self): + def getActiveAll(self) -> bool: + """Not implemented.""" self # unused raise NotImplementedError @@ -590,11 +628,13 @@ cdef class LogEvent: """ cdef PetscBool tval = PETSC_FALSE if flag: tval = PETSC_TRUE - CHKERR( PetscLogEventSetActiveAll(self.id, tval) ) + CHKERR(PetscLogEventSetActiveAll(self.id, tval)) property active_all: - def __get__(self): + """All events activation.""" + def __get__(self) -> bool: self.getActiveAll() + def __set__(self, value): self.setActiveAll(value) @@ -623,10 +663,10 @@ cdef class LogEvent: cdef PetscEventPerfInfo info cdef PetscInt cstage = PETSC_DETERMINE if stage is not None: cstage = asInt(stage) - CHKERR( PetscLogEventGetPerfInfo(cstage, self.id, &info) ) + CHKERR(PetscLogEventGetPerfInfo(cstage, self.id, &info)) return info -cdef dict event_registry = { } +cdef dict event_registry = {} cdef LogEvent get_LogEvent(object name): return event_registry.get(name) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Mat.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Mat.pyx index 8f2ae16ce2f..8bfcd34fc99 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Mat.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Mat.pyx @@ -82,7 +82,10 @@ class MatType(object): DUMMY = S_(MATDUMMY) LMVM = S_(MATLMVM) LMVMDFP = S_(MATLMVMDFP) + LMVMDDFP = S_(MATLMVMDDFP) LMVMBFGS = S_(MATLMVMBFGS) + LMVMDBFGS = S_(MATLMVMDBFGS) + LMVMDQN = S_(MATLMVMDQN) LMVMSR1 = S_(MATLMVMSR1) LMVMBROYDEN = S_(MATLMVMBROYDEN) LMVMBADBROYDEN = S_(MATLMVMBADBROYDEN) @@ -93,6 +96,7 @@ class MatType(object): DIAGONAL = S_(MATDIAGONAL) H2OPUS = S_(MATH2OPUS) + class MatOption(object): """Matrix option. @@ -129,6 +133,7 @@ class MatOption(object): SORTED_FULL = MAT_SORTED_FULL OPTION_MAX = MAT_OPTION_MAX + class MatAssemblyType(object): """Matrix assembly type. @@ -144,12 +149,14 @@ class MatAssemblyType(object): FINAL = FINAL_ASSEMBLY FLUSH = FLUSH_ASSEMBLY + class MatInfoType(object): """Matrix info type.""" LOCAL = MAT_LOCAL GLOBAL_MAX = MAT_GLOBAL_MAX GLOBAL_SUM = MAT_GLOBAL_SUM + class MatStructure(object): """Matrix modification structure. @@ -169,6 +176,7 @@ class MatStructure(object): DIFFERENT = DIFFERENT_NZ = DIFFERENT_NONZERO_PATTERN UNKNOWN = UNKNOWN_NZ = UNKNOWN_NONZERO_PATTERN + class MatDuplicateOption(object): """Matrix duplicate option. @@ -181,6 +189,7 @@ class MatDuplicateOption(object): COPY_VALUES = MAT_COPY_VALUES SHARE_NONZERO_PATTERN = MAT_SHARE_NONZERO_PATTERN + class MatOrderingType(object): """Factored matrix ordering type. @@ -200,6 +209,7 @@ class MatOrderingType(object): AMD = S_(MATORDERINGAMD) METISND = S_(MATORDERINGMETISND) + class MatSolverType(object): """Factored matrix solver type. @@ -229,6 +239,7 @@ class MatSolverType(object): CUDA = S_(MATSOLVERCUDA) SPQR = S_(MATSOLVERSPQR) + class MatFactorShiftType(object): """Factored matrix shift type. @@ -246,6 +257,7 @@ class MatFactorShiftType(object): NZ = MAT_SHIFT_NONZERO PD = MAT_SHIFT_POSITIVE_DEFINITE + class MatSORType(object): """Matrix SOR type. @@ -265,6 +277,7 @@ class MatSORType(object): APPLY_UPPER = SOR_APPLY_UPPER APPLY_LOWER = SOR_APPLY_LOWER + @cython.internal cdef class MatStencil: """Associate structured grid coordinates with matrix indices. @@ -281,6 +294,7 @@ cdef class MatStencil: "First logical grid coordinate." def __get__(self) -> int: return toInt(self.stencil.i) + def __set__(self, value: int) -> None: self.stencil.i = asInt(value) @@ -288,6 +302,7 @@ cdef class MatStencil: "Second logical grid coordinate." def __get__(self) -> int: return toInt(self.stencil.j) + def __set__(self, value: int) -> None: self.stencil.j = asInt(value) @@ -295,6 +310,7 @@ cdef class MatStencil: "Third logical grid coordinate." def __get__(self) -> int: return toInt(self.stencil.k) + def __set__(self, value: int) -> None: self.stencil.k = asInt(value) @@ -302,6 +318,7 @@ cdef class MatStencil: "Field component." def __get__(self) -> int: return toInt(self.stencil.c) + def __set__(self, value: int) -> None: self.stencil.c = asInt(value) @@ -310,6 +327,7 @@ cdef class MatStencil: def __get__(self) -> tuple[int, int, int]: cdef PetscMatStencil *s = &self.stencil return toInt(s.i), toInt(s.j), toInt(s.k) + def __set__(self, value: Sequence[int]) -> None: cdef PetscMatStencil *s = &self.stencil s.i = s.j = s.k = 0 @@ -320,6 +338,7 @@ cdef class MatStencil: def __get__(self) -> int: cdef PetscMatStencil *s = &self.stencil return toInt(s.c) + def __set__(self, value: int) -> None: cdef PetscMatStencil *s = &self.stencil s.c = asInt(value) @@ -451,7 +470,7 @@ cdef class Mat(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( MatView(self.mat, vwr) ) + CHKERR(MatView(self.mat, vwr)) def destroy(self) -> Self: """Destroy the matrix. @@ -463,7 +482,7 @@ cdef class Mat(Object): create, petsc.MatDestroy """ - CHKERR( MatDestroy(&self.mat) ) + CHKERR(MatDestroy(&self.mat)) return self def create(self, comm: Comm | None = None) -> Self: @@ -488,8 +507,8 @@ cdef class Mat(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscMat newmat = NULL - CHKERR( MatCreate(ccomm, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreate(ccomm, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def setType(self, mat_type: Type | str) -> None: @@ -509,13 +528,12 @@ cdef class Mat(Object): """ cdef PetscMatType cval = NULL mat_type = str2bytes(mat_type, &cval) - CHKERR( MatSetType(self.mat, cval) ) + CHKERR(MatSetType(self.mat, cval)) def setSizes( self, size: MatSizeSpec, - bsize: MatBlockSizeSpec | None = None, - ) -> None: + bsize: MatBlockSizeSpec | None = None) -> None: """Set the local, global and block sizes. Collective. @@ -558,12 +576,12 @@ cdef class Mat(Object): """ cdef PetscInt rbs = 0, cbs = 0, m = 0, n = 0, M = 0, N = 0 Mat_Sizes(size, bsize, &rbs, &cbs, &m, &n, &M, &N) - CHKERR( MatSetSizes(self.mat, m, n, M, N) ) + CHKERR(MatSetSizes(self.mat, m, n, M, N)) if rbs != PETSC_DECIDE: if cbs != PETSC_DECIDE: - CHKERR( MatSetBlockSizes(self.mat, rbs, cbs) ) + CHKERR(MatSetBlockSizes(self.mat, rbs, cbs)) else: - CHKERR( MatSetBlockSize(self.mat, rbs) ) + CHKERR(MatSetBlockSize(self.mat, rbs)) def setBlockSize(self, bsize: int) -> None: """Set the matrix block size (same for rows and columns). @@ -581,7 +599,7 @@ cdef class Mat(Object): """ cdef PetscInt bs = asInt(bsize) - CHKERR( MatSetBlockSize(self.mat, bs) ) + CHKERR(MatSetBlockSize(self.mat, bs)) def setBlockSizes(self, row_bsize: int, col_bsize: int) -> None: """Set the row and column block sizes. @@ -602,7 +620,21 @@ cdef class Mat(Object): """ cdef PetscInt rbs = asInt(row_bsize) cdef PetscInt cbs = asInt(col_bsize) - CHKERR( MatSetBlockSizes(self.mat, rbs, cbs) ) + CHKERR(MatSetBlockSizes(self.mat, rbs, cbs)) + + def setVariableBlockSizes(self, blocks: Sequence[int]) -> None: + """Set diagonal point-blocks of the matrix. + + Not collective. + + See Also + -------- + setBlockSize, petsc.MatSetVariableBlockSizes + + """ + cdef PetscInt nb=0, *b=NULL + iarray_i(blocks, &nb, &b) + CHKERR(MatSetVariableBlockSizes(self.mat, nb, b)) def setVecType(self, vec_type: Vec.Type | str) -> None: """Set the vector type. @@ -621,7 +653,7 @@ cdef class Mat(Object): """ cdef PetscVecType cval = NULL vec_type = str2bytes(vec_type, &cval) - CHKERR( MatSetVecType(self.mat, cval) ) + CHKERR(MatSetVecType(self.mat, cval)) def getVecType(self) -> str: """Return the vector type used by the matrix. @@ -634,7 +666,7 @@ cdef class Mat(Object): """ cdef PetscVecType cval = NULL - CHKERR( MatGetVecType(self.mat, &cval) ) + CHKERR(MatGetVecType(self.mat, &cval)) return bytes2str(cval) def setNestVecType(self, vec_type: Vec.Type | str) -> None: @@ -654,7 +686,7 @@ cdef class Mat(Object): """ cdef PetscVecType cval = NULL vec_type = str2bytes(vec_type, &cval) - CHKERR( MatNestSetVecType(self.mat, cval) ) + CHKERR(MatNestSetVecType(self.mat, cval)) # @@ -664,8 +696,7 @@ cdef class Mat(Object): bsize: MatBlockSizeSpec | None = None, nnz: NNZSpec | None = None, csr: CSRIndicesSpec | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a sparse `Type.AIJ` matrix, optionally preallocating. Collective. @@ -699,7 +730,7 @@ cdef class Mat(Object): # create matrix cdef PetscMat newmat = NULL Mat_Create(MATAIJ, comm, size, bsize, &newmat) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat # preallocate matrix Mat_AllocAIJ(self.mat, nnz, csr) return self @@ -710,8 +741,7 @@ cdef class Mat(Object): bsize: MatBlockSizeSpec, nnz: NNZSpec | None = None, csr: CSRIndicesSpec | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a sparse blocked `Type.BAIJ` matrix, optionally preallocating. Collective. @@ -744,7 +774,7 @@ cdef class Mat(Object): # create matrix cdef PetscMat newmat = NULL Mat_Create(MATBAIJ, comm, size, bsize, &newmat) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat # preallocate matrix Mat_AllocAIJ(self.mat, nnz, csr) return self @@ -752,11 +782,10 @@ cdef class Mat(Object): def createSBAIJ( self, size: MatSizeSpec, - bsize: int, + bsize: MatBlockSizeSpec, nnz: NNZSpec | None = None, csr: CSRIndicesSpec | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a sparse `Type.SBAIJ` matrix in symmetric block format. Collective. @@ -789,7 +818,7 @@ cdef class Mat(Object): # create matrix cdef PetscMat newmat = NULL Mat_Create(MATSBAIJ, comm, size, bsize, &newmat) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat # preallocate matrix Mat_AllocAIJ(self.mat, nnz, csr) return self @@ -800,8 +829,7 @@ cdef class Mat(Object): bsize: MatBlockSizeSpec | None = None, nnz: NNZSpec | None = None, csr: CSRIndicesSpec | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a sparse `Type.AIJCRL` matrix. Collective. @@ -837,7 +865,7 @@ cdef class Mat(Object): # create matrix cdef PetscMat newmat = NULL Mat_Create(MATAIJCRL, comm, size, bsize, &newmat) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat # preallocate matrix Mat_AllocAIJ(self.mat, nnz, csr) return self @@ -863,7 +891,7 @@ cdef class Mat(Object): """ cdef PetscBool done = PETSC_FALSE - CHKERR( MatIsPreallocated(self.mat, &done) ) + CHKERR(MatIsPreallocated(self.mat, &done)) # if done: raise Error(PETSC_ERR_ORDER) Mat_AllocAIJ_NNZ(self.mat, nnz) return self @@ -897,7 +925,7 @@ cdef class Mat(Object): """ cdef PetscBool done = PETSC_FALSE - CHKERR( MatIsPreallocated(self.mat, &done) ) + CHKERR(MatIsPreallocated(self.mat, &done)) # if done: raise Error(PETSC_ERR_ORDER) Mat_AllocAIJ_CSR(self.mat, csr) return self @@ -907,8 +935,7 @@ cdef class Mat(Object): size: MatSizeSpec, csr: CSRSpec | tuple[CSRSpec, CSRSpec], bsize: MatBlockSizeSpec | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a sparse `Type.AIJ` matrix with data in CSR format. Collective. @@ -974,21 +1001,21 @@ cdef class Mat(Object): # create matrix cdef PetscMat newmat = NULL if comm_size(ccomm) == 1: - CHKERR( MatCreateSeqAIJWithArrays( - ccomm, m, n, i, j, v, &newmat) ) + CHKERR(MatCreateSeqAIJWithArrays( + ccomm, m, n, i, j, v, &newmat)) csr = (pi, pj, pv) else: # if off-diagonal components are provided then SplitArrays can be # used (and not cause a copy). if oi != NULL and oj != NULL and ov != NULL: - CHKERR( MatCreateMPIAIJWithSplitArrays( - ccomm, m, n, M, N, i, j, v, oi, oj, ov, &newmat) ) + CHKERR(MatCreateMPIAIJWithSplitArrays( + ccomm, m, n, M, N, i, j, v, oi, oj, ov, &newmat)) csr = ((pi, pj, pv), (poi, poj, pov)) else: - CHKERR( MatCreateMPIAIJWithArrays( - ccomm, m, n, M, N, i, j, v, &newmat) ) + CHKERR(MatCreateMPIAIJWithArrays( + ccomm, m, n, M, N, i, j, v, &newmat)) csr = None - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat self.set_attr('__csr__', csr) return self @@ -999,8 +1026,7 @@ cdef class Mat(Object): size: MatSizeSpec, bsize: MatBlockSizeSpec | None = None, array: Sequence[Scalar] | None = None, - comm: Comm | None = None - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `Type.DENSE` matrix. Collective. @@ -1024,7 +1050,7 @@ cdef class Mat(Object): # create matrix cdef PetscMat newmat = NULL Mat_Create(MATDENSE, comm, size, bsize, &newmat) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat # preallocate matrix if array is not None: array = Mat_AllocDense(self.mat, array) @@ -1037,8 +1063,7 @@ cdef class Mat(Object): bsize: MatBlockSizeSpec | None = None, array: Sequence[Scalar] | None = None, cudahandle: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `Type.DENSECUDA` matrix with optional host and device data. Collective. @@ -1078,15 +1103,15 @@ cdef class Mat(Object): Sys_Layout(ccomm, rbs, &m, &M) Sys_Layout(ccomm, cbs, &n, &N) # create matrix and set sizes - CHKERR( MatCreateDenseCUDA(ccomm, m, n, M, N, (cudahandle), &newmat) ) + CHKERR(MatCreateDenseCUDA(ccomm, m, n, M, N, (cudahandle), &newmat)) # Does block size make sense for MATDENSE? - CHKERR( MatSetBlockSizes(newmat, rbs, cbs) ) + CHKERR(MatSetBlockSizes(newmat, rbs, cbs)) else: Mat_Create(MATDENSECUDA, comm, size, bsize, &newmat) if array is not None: array = Mat_AllocDense(self.mat, array) self.set_attr('__array__', array) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def setPreallocationDense(self, array: Sequence[Scalar]) -> Self: @@ -1105,7 +1130,7 @@ cdef class Mat(Object): """ cdef PetscBool done = PETSC_FALSE - CHKERR( MatIsPreallocated(self.mat, &done) ) + CHKERR(MatIsPreallocated(self.mat, &done)) # if done: raise Error(PETSC_ERR_ORDER) array = Mat_AllocDense(self.mat, array) self.set_attr('__array__', array) @@ -1133,8 +1158,8 @@ cdef class Mat(Object): if comm is None: comm = scatter.getComm() cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscMat newmat = NULL - CHKERR( MatCreateScatter(ccomm, scatter.sct, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateScatter(ccomm, scatter.sct, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createNormal(self, Mat mat) -> Self: @@ -1158,8 +1183,8 @@ cdef class Mat(Object): """ cdef PetscMat newmat = NULL - CHKERR( MatCreateNormal(mat.mat, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateNormal(mat.mat, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createTranspose(self, Mat mat) -> Self: @@ -1183,8 +1208,8 @@ cdef class Mat(Object): """ cdef PetscMat newmat = NULL - CHKERR( MatCreateTranspose(mat.mat, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateTranspose(mat.mat, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createNormalHermitian(self, Mat mat) -> Self: @@ -1208,8 +1233,8 @@ cdef class Mat(Object): """ cdef PetscMat newmat = NULL - CHKERR( MatCreateNormalHermitian(mat.mat, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateNormalHermitian(mat.mat, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createHermitianTranspose(self, Mat mat) -> Self: @@ -1233,8 +1258,8 @@ cdef class Mat(Object): """ cdef PetscMat newmat = NULL - CHKERR( MatCreateHermitianTranspose(mat.mat, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateHermitianTranspose(mat.mat, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createLRC(self, Mat A, Mat U, Vec c, Mat V) -> Self: @@ -1280,8 +1305,8 @@ cdef class Mat(Object): if A is not None: Amat = A.mat if c is not None: cvec = c.vec if V is not None: Vmat = V.mat - CHKERR( MatCreateLRC(Amat, Umat, cvec, Vmat, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateLRC(Amat, Umat, cvec, Vmat, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createSubMatrixVirtual(self, Mat A, IS isrow, IS iscol=None) -> Self: @@ -1305,8 +1330,8 @@ cdef class Mat(Object): """ if iscol is None: iscol = isrow cdef PetscMat newmat = NULL - CHKERR( MatCreateSubMatrixVirtual(A.mat, isrow.iset, iscol.iset, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateSubMatrixVirtual(A.mat, isrow.iset, iscol.iset, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createNest( @@ -1314,8 +1339,7 @@ cdef class Mat(Object): mats: Sequence[Sequence[Mat]], isrows: Sequence[IS] | None = None, iscols: Sequence[IS] | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `Type.NEST` matrix containing multiple submatrices. Collective. @@ -1360,21 +1384,21 @@ cdef class Mat(Object): cdef PetscMat *cmats = NULL cdef PetscIS *cisrows = NULL cdef PetscIS *ciscols = NULL - cdef object tmp1, tmp2, tmp3 - tmp1 = oarray_p(empty_p(nr*nc), NULL, &cmats) + cdef object unused1, unused2, unused3 + unused1 = oarray_p(empty_p(nr*nc), NULL, &cmats) for i from 0 <= i < mr: for j from 0 <= j < mc: mat = mats[i][j] cmats[i*mc+j] = (mat).mat if mat is not None else NULL if isrows is not None: - tmp2 = oarray_p(empty_p(nr), NULL, &cisrows) + unused2 = oarray_p(empty_p(nr), NULL, &cisrows) for i from 0 <= i < mr: cisrows[i] = (isrows[i]).iset if iscols is not None: - tmp3 = oarray_p(empty_p(nc), NULL, &ciscols) + unused3 = oarray_p(empty_p(nc), NULL, &ciscols) for j from 0 <= j < mc: ciscols[j] = (iscols[j]).iset cdef PetscMat newmat = NULL - CHKERR( MatCreateNest(ccomm, nr, cisrows, nc, ciscols, cmats, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateNest(ccomm, nr, cisrows, nc, ciscols, cmats, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createH2OpusFromMat( @@ -1386,10 +1410,11 @@ cdef class Mat(Object): leafsize: int | None = None, maxrank: int | None = None, bs: int | None = None, - rtol: float | None = None, - ) -> Self: + rtol: float | None = None) -> Self: """Create a hierarchical `Type.H2OPUS` matrix sampling from a provided operator. + Collective. + Parameters ---------- A @@ -1443,27 +1468,27 @@ cdef class Mat(Object): if PyArray_ISFORTRAN(xyz): xyz = PyArray_Copy(xyz) if PyArray_NDIM(xyz) != 2: raise ValueError( ("coordinates must have two dimensions: " - "coordinates.ndim=%d") % (PyArray_NDIM(xyz)) ) + "coordinates.ndim=%d") % (PyArray_NDIM(xyz))) nvtx = PyArray_DIM(xyz, 0) - CHKERR( MatGetLocalSize(A.mat, &rl, &cl) ) + CHKERR(MatGetLocalSize(A.mat, &rl, &cl)) if cl != rl: raise ValueError("Not for rectangular matrices") if nvtx < rl: raise ValueError( - ("coordinates size must be at least %d" % rl )) + ("coordinates size must be at least %d" % rl)) cdim = PyArray_DIM(xyz, 1) coords = PyArray_DATA(xyz) cdef PetscMat newmat = NULL - CHKERR( MatCreateH2OpusFromMat(A.mat, cdim, coords, cdist, peta, lsize, maxr, pbs, tol, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateH2OpusFromMat(A.mat, cdim, coords, cdist, peta, lsize, maxr, pbs, tol, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createIS( self, size: MatSizeSpec, + bsize: MatBlockSizeSpec | None = None, LGMap lgmapr = None, LGMap lgmapc = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `Type.IS` matrix representing globally unassembled operators. Collective. @@ -1472,6 +1497,8 @@ cdef class Mat(Object): ---------- size Matrix size. + bsize + Matrix block size. If `None`, a block size of ``1`` is set. lgmapr Optional local-to-global mapping for the rows. If `None`, the local row space matches the global row space. @@ -1493,7 +1520,7 @@ cdef class Mat(Object): cdef PetscLGMap lgmc = NULL cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscInt rbs = 0, cbs = 0, m = 0, n = 0, M = 0, N = 0 - Mat_Sizes(size, None, &rbs, &cbs, &m, &n, &M, &N) + Mat_Sizes(size, bsize, &rbs, &cbs, &m, &n, &M, &N) Sys_Layout(ccomm, rbs, &m, &M) Sys_Layout(ccomm, cbs, &n, &N) # create matrix @@ -1501,19 +1528,18 @@ cdef class Mat(Object): cdef PetscInt bs = 1 if rbs == cbs: bs = rbs if lgmapr is not None: - lgmr = lgmapr.lgm + lgmr = lgmapr.lgm if lgmapc is not None: - lgmc = lgmapc.lgm - CHKERR( MatCreateIS(ccomm, bs, m, n, M, N, lgmr, lgmc, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + lgmc = lgmapc.lgm + CHKERR(MatCreateIS(ccomm, bs, m, n, M, N, lgmr, lgmc, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createConstantDiagonal( self, size: MatSizeSpec, diag: float, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a diagonal matrix of type `Type.CONSTANTDIAGONAL`. Collective. @@ -1538,14 +1564,13 @@ cdef class Mat(Object): Sys_Layout(ccomm, rbs, &m, &M) Sys_Layout(ccomm, cbs, &n, &N) cdef PetscMat newmat = NULL - CHKERR( MatCreateConstantDiagonal(ccomm, m, n, M, N, diag, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateConstantDiagonal(ccomm, m, n, M, N, diag, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createDiagonal( self, - Vec diag, - ) -> Self: + Vec diag) -> Self: """Create a diagonal matrix of type `Type.DIAGONAL`. Collective. @@ -1562,8 +1587,8 @@ cdef class Mat(Object): """ cdef PetscVec dvec = diag.vec cdef PetscMat newmat = NULL - CHKERR( MatCreateDiagonal(dvec, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat + CHKERR(MatCreateDiagonal(dvec, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat return self def createPython(self, size: MatSizeSpec, context: Any = None, comm: Comm | None = None) -> Self: @@ -1594,24 +1619,31 @@ cdef class Mat(Object): # create matrix # FIXME: propagate block sizes? cdef PetscMat newmat = NULL - CHKERR( MatCreate(ccomm, &newmat) ) - CHKERR( PetscCLEAR(self.obj) ); self.mat = newmat - CHKERR( MatSetSizes(self.mat, m, n, M, N) ) - CHKERR( MatSetType(self.mat, MATPYTHON) ) - CHKERR( MatPythonSetContext(self.mat, context) ) + CHKERR(MatCreate(ccomm, &newmat)) + CHKERR(PetscCLEAR(self.obj)); self.mat = newmat + CHKERR(MatSetSizes(self.mat, m, n, M, N)) + CHKERR(MatSetType(self.mat, MATPYTHON)) + CHKERR(MatPythonSetContext(self.mat, context)) + if context: + CHKERR(MatSetUp(self.mat)) return self def setPythonContext(self, context: Any) -> None: """Set the instance of the class implementing the required Python methods. - Not collective. + Logically collective. + + Notes + ----- + In order to use the matrix, `Mat.setUp` must be called after having set + the context. Pass `None` to reset the matrix to its initial state. See Also -------- - petsc_python_mat, getPythonContext + petsc_python_mat, getPythonContext, setPythonType """ - CHKERR( MatPythonSetContext(self.mat, context) ) + CHKERR(MatPythonSetContext(self.mat, context)) def getPythonContext(self) -> Any: """Return the instance of the class implementing the required Python methods. @@ -1624,7 +1656,7 @@ cdef class Mat(Object): """ cdef void *context = NULL - CHKERR( MatPythonGetContext(self.mat, &context) ) + CHKERR(MatPythonGetContext(self.mat, &context)) if context == NULL: return None else: return context @@ -1633,6 +1665,11 @@ cdef class Mat(Object): Collective. + Notes + ----- + In order to use the matrix, `Mat.setUp` must be called after having set + the type. + See Also -------- petsc_python_mat, setPythonContext, getPythonType @@ -1641,7 +1678,7 @@ cdef class Mat(Object): """ cdef const char *cval = NULL py_type = str2bytes(py_type, &cval) - CHKERR( MatPythonSetType(self.mat, cval) ) + CHKERR(MatPythonSetType(self.mat, cval)) def getPythonType(self) -> str: """Return the fully qualified Python name of the class used by the matrix. @@ -1655,12 +1692,12 @@ cdef class Mat(Object): """ cdef const char *cval = NULL - CHKERR( MatPythonGetType(self.mat, &cval) ) + CHKERR(MatPythonGetType(self.mat, &cval)) return bytes2str(cval) # - def setOptionsPrefix(self, prefix: str) -> None: + def setOptionsPrefix(self, prefix: str | None = None) -> None: """Set the prefix used for searching for options in the database. Logically collective. @@ -1672,7 +1709,7 @@ cdef class Mat(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( MatSetOptionsPrefix(self.mat, cval) ) + CHKERR(MatSetOptionsPrefix(self.mat, cval)) def getOptionsPrefix(self) -> str: """Return the prefix used for searching for options in the database. @@ -1685,10 +1722,10 @@ cdef class Mat(Object): """ cdef const char *cval = NULL - CHKERR( MatGetOptionsPrefix(self.mat, &cval) ) + CHKERR(MatGetOptionsPrefix(self.mat, &cval)) return bytes2str(cval) - def appendOptionsPrefix(self, prefix: str) -> None: + def appendOptionsPrefix(self, prefix: str | None = None) -> None: """Append to the prefix used for searching for options in the database. Logically collective. @@ -1700,7 +1737,7 @@ cdef class Mat(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( MatAppendOptionsPrefix(self.mat, cval) ) + CHKERR(MatAppendOptionsPrefix(self.mat, cval)) def setFromOptions(self) -> None: """Configure the matrix from the options database. @@ -1712,9 +1749,9 @@ cdef class Mat(Object): petsc_options, petsc.MatSetFromOptions """ - CHKERR( MatSetFromOptions(self.mat) ) + CHKERR(MatSetFromOptions(self.mat)) - def setUp(self) -> None: + def setUp(self) -> Self: """Set up the internal data structures for using the matrix. Collective. @@ -1724,7 +1761,7 @@ cdef class Mat(Object): petsc.MatSetUp """ - CHKERR( MatSetUp(self.mat) ) + CHKERR(MatSetUp(self.mat)) return self def setOption(self, option: Option, flag: bool) -> None: @@ -1737,7 +1774,7 @@ cdef class Mat(Object): getOption, petsc.MatSetOption """ - CHKERR( MatSetOption(self.mat, option, flag) ) + CHKERR(MatSetOption(self.mat, option, flag)) def getOption(self, option: Option) -> bool: """Return the option value. @@ -1750,7 +1787,7 @@ cdef class Mat(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( MatGetOption(self.mat, option, &flag) ) + CHKERR(MatGetOption(self.mat, option, &flag)) return toBool(flag) def getType(self) -> str: @@ -1764,7 +1801,7 @@ cdef class Mat(Object): """ cdef PetscMatType cval = NULL - CHKERR( MatGetType(self.mat, &cval) ) + CHKERR(MatGetType(self.mat, &cval)) return bytes2str(cval) def getSize(self) -> tuple[int, int]: @@ -1778,7 +1815,7 @@ cdef class Mat(Object): """ cdef PetscInt M = 0, N = 0 - CHKERR( MatGetSize(self.mat, &M, &N) ) + CHKERR(MatGetSize(self.mat, &M, &N)) return (toInt(M), toInt(N)) def getLocalSize(self) -> tuple[int, int]: @@ -1792,7 +1829,7 @@ cdef class Mat(Object): """ cdef PetscInt m = 0, n = 0 - CHKERR( MatGetLocalSize(self.mat, &m, &n) ) + CHKERR(MatGetLocalSize(self.mat, &m, &n)) return (toInt(m), toInt(n)) def getSizes(self) -> tuple[LayoutSizeSpec, LayoutSizeSpec]: @@ -1807,8 +1844,8 @@ cdef class Mat(Object): """ cdef PetscInt m = 0, n = 0 cdef PetscInt M = 0, N = 0 - CHKERR( MatGetLocalSize(self.mat, &m, &n) ) - CHKERR( MatGetSize(self.mat, &M, &N) ) + CHKERR(MatGetLocalSize(self.mat, &m, &n)) + CHKERR(MatGetSize(self.mat, &M, &N)) return ((toInt(m), toInt(M)), (toInt(n), toInt(N))) def getBlockSize(self) -> int: @@ -1822,7 +1859,7 @@ cdef class Mat(Object): """ cdef PetscInt bs = 0 - CHKERR( MatGetBlockSize(self.mat, &bs) ) + CHKERR(MatGetBlockSize(self.mat, &bs)) return toInt(bs) def getBlockSizes(self) -> tuple[int, int]: @@ -1836,7 +1873,7 @@ cdef class Mat(Object): """ cdef PetscInt rbs = 0, cbs = 0 - CHKERR( MatGetBlockSizes(self.mat, &rbs, &cbs) ) + CHKERR(MatGetBlockSizes(self.mat, &rbs, &cbs)) return (toInt(rbs), toInt(cbs)) def getOwnershipRange(self) -> tuple[int, int]: @@ -1850,7 +1887,7 @@ cdef class Mat(Object): """ cdef PetscInt ival1 = 0, ival2 = 0 - CHKERR( MatGetOwnershipRange(self.mat, &ival1, &ival2) ) + CHKERR(MatGetOwnershipRange(self.mat, &ival1, &ival2)) return (toInt(ival1), toInt(ival2)) def getOwnershipRanges(self) -> ArrayInt: @@ -1866,11 +1903,11 @@ cdef class Mat(Object): """ cdef const PetscInt *rowrng = NULL - CHKERR( MatGetOwnershipRanges(self.mat, &rowrng) ) + CHKERR(MatGetOwnershipRanges(self.mat, &rowrng)) cdef MPI_Comm comm = MPI_COMM_NULL - CHKERR( PetscObjectGetComm(self.mat, &comm) ) + CHKERR(PetscObjectGetComm(self.mat, &comm)) cdef int size = -1 - CHKERR( MPI_Comm_size(comm, &size) ) + CHKERR(MPI_Comm_size(comm, &size)) return array_i(size+1, rowrng) def getOwnershipRangeColumn(self) -> tuple[int, int]: @@ -1885,7 +1922,7 @@ cdef class Mat(Object): """ cdef PetscInt ival1 = 0, ival2 = 0 - CHKERR( MatGetOwnershipRangeColumn(self.mat, &ival1, &ival2) ) + CHKERR(MatGetOwnershipRangeColumn(self.mat, &ival1, &ival2)) return (toInt(ival1), toInt(ival2)) def getOwnershipRangesColumn(self) -> ArrayInt: @@ -1899,11 +1936,11 @@ cdef class Mat(Object): """ cdef const PetscInt *colrng = NULL - CHKERR( MatGetOwnershipRangesColumn(self.mat, &colrng) ) + CHKERR(MatGetOwnershipRangesColumn(self.mat, &colrng)) cdef MPI_Comm comm = MPI_COMM_NULL - CHKERR( PetscObjectGetComm(self.mat, &comm) ) + CHKERR(PetscObjectGetComm(self.mat, &comm)) cdef int size = -1 - CHKERR( MPI_Comm_size(comm, &size) ) + CHKERR(MPI_Comm_size(comm, &size)) return array_i(size+1, colrng) def getOwnershipIS(self) -> tuple[IS, IS]: @@ -1918,7 +1955,7 @@ cdef class Mat(Object): """ cdef IS rows = IS() cdef IS cols = IS() - CHKERR( MatGetOwnershipIS(self.mat, &rows.iset, &cols.iset) ) + CHKERR(MatGetOwnershipIS(self.mat, &rows.iset, &cols.iset)) return (rows, cols) def getInfo(self, info: InfoType = None) -> dict[str, float]: @@ -1938,7 +1975,7 @@ cdef class Mat(Object): """ cdef PetscMatInfoType itype = infotype(info) cdef PetscMatInfo cinfo - CHKERR( MatGetInfo(self.mat, itype, &cinfo) ) + CHKERR(MatGetInfo(self.mat, itype, &cinfo)) return cinfo def duplicate(self, copy: bool = False) -> Mat: @@ -1960,7 +1997,7 @@ cdef class Mat(Object): if copy: flag = MAT_COPY_VALUES if copy > MAT_COPY_VALUES: flag = MAT_SHARE_NONZERO_PATTERN cdef Mat mat = type(self)() - CHKERR( MatDuplicate(self.mat, flag, &mat.mat) ) + CHKERR(MatDuplicate(self.mat, flag, &mat.mat)) return mat def copy(self, Mat result=None, structure: Structure | None = None) -> Mat: @@ -1985,9 +2022,9 @@ cdef class Mat(Object): if result is None: result = type(self)() if result.mat == NULL: - CHKERR( MatDuplicate(self.mat, copy, &result.mat) ) + CHKERR(MatDuplicate(self.mat, copy, &result.mat)) else: - CHKERR( MatCopy(self.mat, result.mat, mstr) ) + CHKERR(MatCopy(self.mat, result.mat, mstr)) return result def load(self, Viewer viewer) -> Self: @@ -2003,9 +2040,9 @@ cdef class Mat(Object): cdef MPI_Comm comm = MPI_COMM_NULL cdef PetscObject obj = (viewer.vwr) if self.mat == NULL: - CHKERR( PetscObjectGetComm(obj, &comm) ) - CHKERR( MatCreate(comm, &self.mat) ) - CHKERR( MatLoad(self.mat, viewer.vwr) ) + CHKERR(PetscObjectGetComm(obj, &comm)) + CHKERR(MatCreate(comm, &self.mat)) + CHKERR(MatLoad(self.mat, viewer.vwr)) return self def convert(self, mat_type: Type | str = None, Mat out=None) -> Mat: @@ -2037,7 +2074,7 @@ cdef class Mat(Object): reuse = MAT_INITIAL_MATRIX else: reuse = MAT_REUSE_MATRIX - CHKERR( MatConvert(self.mat, mtype, reuse, &out.mat) ) + CHKERR(MatConvert(self.mat, mtype, reuse, &out.mat)) return out def transpose(self, Mat out=None) -> Mat: @@ -2064,18 +2101,20 @@ cdef class Mat(Object): reuse = MAT_INITIAL_MATRIX else: reuse = MAT_REUSE_MATRIX - CHKERR( MatTranspose(self.mat, reuse, &out.mat) ) + CHKERR(MatTranspose(self.mat, reuse, &out.mat)) return out def setTransposePrecursor(self, Mat out) -> None: """Set transpose precursor. + Logically collective. + See Also -------- petsc.MatTransposeSetPrecursor """ - CHKERR( MatTransposeSetPrecursor(self.mat, out.mat) ) + CHKERR(MatTransposeSetPrecursor(self.mat, out.mat)) def hermitianTranspose(self, Mat out=None) -> Mat: """Return the transposed Hermitian matrix. @@ -2101,7 +2140,7 @@ cdef class Mat(Object): reuse = MAT_INITIAL_MATRIX else: reuse = MAT_REUSE_MATRIX - CHKERR( MatHermitianTranspose(self.mat, reuse, &out.mat) ) + CHKERR(MatHermitianTranspose(self.mat, reuse, &out.mat)) return out def realPart(self, Mat out=None) -> Mat: @@ -2123,8 +2162,8 @@ cdef class Mat(Object): if out is None: out = self elif out.mat == NULL: - CHKERR( MatDuplicate(self.mat, MAT_COPY_VALUES, &out.mat) ) - CHKERR( MatRealPart(out.mat) ) + CHKERR(MatDuplicate(self.mat, MAT_COPY_VALUES, &out.mat)) + CHKERR(MatRealPart(out.mat)) return out def imagPart(self, Mat out=None) -> Mat: @@ -2146,8 +2185,8 @@ cdef class Mat(Object): if out is None: out = self elif out.mat == NULL: - CHKERR( MatDuplicate(self.mat, MAT_COPY_VALUES, &out.mat) ) - CHKERR( MatImaginaryPart(out.mat) ) + CHKERR(MatDuplicate(self.mat, MAT_COPY_VALUES, &out.mat)) + CHKERR(MatImaginaryPart(out.mat)) return out def conjugate(self, Mat out=None) -> Mat: @@ -2169,8 +2208,8 @@ cdef class Mat(Object): if out is None: out = self elif out.mat == NULL: - CHKERR( MatDuplicate(self.mat, MAT_COPY_VALUES, &out.mat) ) - CHKERR( MatConjugate(out.mat) ) + CHKERR(MatDuplicate(self.mat, MAT_COPY_VALUES, &out.mat)) + CHKERR(MatConjugate(out.mat)) return out def permute(self, IS row, IS col) -> Mat: @@ -2191,7 +2230,7 @@ cdef class Mat(Object): """ cdef Mat mat = Mat() - CHKERR( MatPermute(self.mat, row.iset, col.iset, &mat.mat) ) + CHKERR(MatPermute(self.mat, row.iset, col.iset, &mat.mat)) return mat def equal(self, Mat mat) -> bool: @@ -2205,7 +2244,7 @@ cdef class Mat(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( MatEqual(self.mat, mat.mat, &flag) ) + CHKERR(MatEqual(self.mat, mat.mat, &flag)) return toBool(flag) def isTranspose(self, Mat mat=None, tol: float = 0) -> bool: @@ -2228,7 +2267,7 @@ cdef class Mat(Object): if mat is None: mat = self cdef PetscReal rval = asReal(tol) cdef PetscBool flag = PETSC_FALSE - CHKERR( MatIsTranspose(self.mat, mat.mat, rval, &flag) ) + CHKERR(MatIsTranspose(self.mat, mat.mat, rval, &flag)) return toBool(flag) def isSymmetric(self, tol: float = 0) -> bool: @@ -2248,7 +2287,7 @@ cdef class Mat(Object): """ cdef PetscReal rval = asReal(tol) cdef PetscBool flag = PETSC_FALSE - CHKERR( MatIsSymmetric(self.mat, rval, &flag) ) + CHKERR(MatIsSymmetric(self.mat, rval, &flag)) return toBool(flag) def isSymmetricKnown(self) -> tuple[bool, bool]: @@ -2263,7 +2302,7 @@ cdef class Mat(Object): """ cdef PetscBool flag1 = PETSC_FALSE cdef PetscBool flag2 = PETSC_FALSE - CHKERR( MatIsSymmetricKnown(self.mat, &flag1, &flag2) ) + CHKERR(MatIsSymmetricKnown(self.mat, &flag1, &flag2)) return (toBool(flag1), toBool(flag2)) def isHermitian(self, tol: float = 0) -> bool: @@ -2283,7 +2322,7 @@ cdef class Mat(Object): """ cdef PetscReal rval = asReal(tol) cdef PetscBool flag = PETSC_FALSE - CHKERR( MatIsHermitian(self.mat, rval, &flag) ) + CHKERR(MatIsHermitian(self.mat, rval, &flag)) return toBool(flag) def isHermitianKnown(self) -> tuple[bool, bool]: @@ -2298,13 +2337,21 @@ cdef class Mat(Object): """ cdef PetscBool flag1 = PETSC_FALSE cdef PetscBool flag2 = PETSC_FALSE - CHKERR( MatIsHermitianKnown(self.mat, &flag1, &flag2) ) + CHKERR(MatIsHermitianKnown(self.mat, &flag1, &flag2)) return (toBool(flag1), toBool(flag2)) def isStructurallySymmetric(self) -> bool: - """Return the boolean indicating if the matrix is structurally symmetric.""" + """Return the boolean indicating if the matrix is structurally symmetric. + + Not collective. + + See Also + -------- + petsc.MatIsStructurallySymmetric + + """ cdef PetscBool flag = PETSC_FALSE - CHKERR( MatIsStructurallySymmetric(self.mat, &flag) ) + CHKERR(MatIsStructurallySymmetric(self.mat, &flag)) return toBool(flag) def zeroEntries(self) -> None: @@ -2317,10 +2364,10 @@ cdef class Mat(Object): petsc.MatZeroEntries """ - CHKERR( MatZeroEntries(self.mat) ) + CHKERR(MatZeroEntries(self.mat)) def getValue(self, row, col) -> Scalar: - """Return the value in the (row,col) position. + """Return the value in the (row, col) position. Not collective. @@ -2332,11 +2379,11 @@ cdef class Mat(Object): cdef PetscInt ival1 = asInt(row) cdef PetscInt ival2 = asInt(col) cdef PetscScalar sval = 0 - CHKERR( MatGetValues(self.mat, 1, &ival1, 1, &ival2, &sval) ) + CHKERR(MatGetValues(self.mat, 1, &ival1, 1, &ival2, &sval)) return toScalar(sval) def getValues(self, rows: Sequence[int], cols: Sequence[int], values: ArrayScalar = None) -> ArrayScalar: - """Return the values in the ``zip(rows,cols)`` positions. + """Return the values in the ``zip(rows, cols)`` positions. Not collective. @@ -2368,7 +2415,7 @@ cdef class Mat(Object): """ # row ownership cdef PetscInt rstart=0, rend=0, nrows=0 - CHKERR( MatGetOwnershipRange(self.mat, &rstart, &rend) ) + CHKERR(MatGetOwnershipRange(self.mat, &rstart, &rend)) nrows = rend - rstart # first pass: row pointer array cdef PetscInt *AI = NULL @@ -2376,9 +2423,9 @@ cdef class Mat(Object): cdef PetscInt irow=0, ncols=0 AI[0] = 0 for irow from 0 <= irow < nrows: - CHKERR( MatGetRow(self.mat, irow+rstart, &ncols, NULL, NULL) ) + CHKERR(MatGetRow(self.mat, irow+rstart, &ncols, NULL, NULL)) AI[irow+1] = AI[irow] + ncols - CHKERR( MatRestoreRow(self.mat, irow+rstart, &ncols, NULL, NULL) ) + CHKERR(MatRestoreRow(self.mat, irow+rstart, &ncols, NULL, NULL)) # second pass: column indices and values cdef PetscInt *AJ = NULL cdef ndarray aj = oarray_i(empty_i(AI[nrows]), NULL, &AJ) @@ -2387,10 +2434,10 @@ cdef class Mat(Object): cdef const PetscInt *cols = NULL cdef const PetscScalar *vals = NULL for irow from 0 <= irow < nrows: - CHKERR( MatGetRow(self.mat, irow+rstart, &ncols, &cols, &vals) ) - CHKERR( PetscMemcpy(AJ+AI[irow], cols, ncols*sizeof(PetscInt)) ) - CHKERR( PetscMemcpy(AV+AI[irow], vals, ncols*sizeof(PetscScalar)) ) - CHKERR( MatRestoreRow(self.mat, irow+rstart, &ncols, &cols, &vals) ) + CHKERR(MatGetRow(self.mat, irow+rstart, &ncols, &cols, &vals)) + CHKERR(PetscMemcpy(AJ+AI[irow], cols, ncols*sizeof(PetscInt))) + CHKERR(PetscMemcpy(AV+AI[irow], vals, ncols*sizeof(PetscScalar))) + CHKERR(MatRestoreRow(self.mat, irow+rstart, &ncols, &cols, &vals)) # return (ai, aj, av) @@ -2408,10 +2455,10 @@ cdef class Mat(Object): cdef PetscInt ncols = 0 cdef const PetscInt *icols=NULL cdef const PetscScalar *svals=NULL - CHKERR( MatGetRow(self.mat, irow, &ncols, &icols, &svals) ) + CHKERR(MatGetRow(self.mat, irow, &ncols, &icols, &svals)) cdef object cols = array_i(ncols, icols) cdef object vals = array_s(ncols, svals) - CHKERR( MatRestoreRow(self.mat, irow, &ncols, &icols, &svals) ) + CHKERR(MatRestoreRow(self.mat, irow, &ncols, &icols, &svals)) return (cols, vals) def getRowIJ(self, symmetric: bool = False, compressed: bool = False) -> tuple[ArrayInt, ArrayInt]: @@ -2438,11 +2485,11 @@ cdef class Mat(Object): cdef const PetscInt *ia=NULL cdef const PetscInt *ja=NULL cdef PetscBool done=PETSC_FALSE - CHKERR( MatGetRowIJ(self.mat, shift, symm, bcmp, &n, &ia, &ja, &done) ) + CHKERR(MatGetRowIJ(self.mat, shift, symm, bcmp, &n, &ia, &ja, &done)) cdef object ai=None, aj=None - if done != PETSC_FALSE: ai = array_i( n+1, ia) + if done != PETSC_FALSE: ai = array_i(n+1, ia) if done != PETSC_FALSE: aj = array_i(ia[n], ja) - CHKERR( MatRestoreRowIJ(self.mat, shift, symm, bcmp, &n, &ia, &ja, &done) ) + CHKERR(MatRestoreRowIJ(self.mat, shift, symm, bcmp, &n, &ia, &ja, &done)) return (ai, aj) def getColumnIJ(self, symmetric: bool = False, compressed: bool = False) -> tuple[ArrayInt, ArrayInt]: @@ -2468,11 +2515,11 @@ cdef class Mat(Object): cdef const PetscInt *ia=NULL cdef const PetscInt *ja=NULL cdef PetscBool done=PETSC_FALSE - CHKERR( MatGetColumnIJ(self.mat, shift, symm, bcmp, &n, &ia, &ja, &done) ) + CHKERR(MatGetColumnIJ(self.mat, shift, symm, bcmp, &n, &ia, &ja, &done)) cdef object ai=None, aj=None - if done != PETSC_FALSE: ai = array_i( n+1, ia) + if done != PETSC_FALSE: ai = array_i(n+1, ia) if done != PETSC_FALSE: aj = array_i(ia[n], ja) - CHKERR( MatRestoreColumnIJ(self.mat, shift, symm, bcmp, &n, &ia, &ja, &done) ) + CHKERR(MatRestoreColumnIJ(self.mat, shift, symm, bcmp, &n, &ia, &ja, &done)) return (ai, aj) def setValue( @@ -2480,8 +2527,7 @@ cdef class Mat(Object): row: int, col: int, value: Scalar, - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set a value to the ``(row, col)`` entry of the matrix. Not collective. @@ -2506,15 +2552,14 @@ cdef class Mat(Object): cdef PetscInt ival2 = asInt(col) cdef PetscScalar sval = asScalar(value) cdef PetscInsertMode caddv = insertmode(addv) - CHKERR( MatSetValues(self.mat, 1, &ival1, 1, &ival2, &sval, caddv) ) + CHKERR(MatSetValues(self.mat, 1, &ival1, 1, &ival2, &sval, caddv)) def setValues( self, rows: Sequence[int], cols: Sequence[int], values: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set values to the rows ⊗ cols entries of the matrix. Not collective. @@ -2547,8 +2592,7 @@ cdef class Mat(Object): J: Sequence[int], V: Sequence[Scalar], addv: InsertModeSpec = None, - rowmap: Sequence[int] = None, - ) -> None: + rowmap: Sequence[int] = None) -> None: """Set a subset of values stored in CSR format. Not collective. @@ -2578,8 +2622,7 @@ cdef class Mat(Object): I: Sequence[int], J: Sequence[int], V: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set values stored in CSR format. Not collective. @@ -2607,8 +2650,7 @@ cdef class Mat(Object): rows: Sequence[int], cols: Sequence[int], values: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set values to the rows ⊗ col block entries of the matrix. Not collective. @@ -2643,8 +2685,7 @@ cdef class Mat(Object): J: Sequence[int], V: Sequence[Scalar], addv: InsertModeSpec = None, - rowmap: Sequence[int] = None, - ) -> None: + rowmap: Sequence[int] = None) -> None: """Set a subset of values stored in block CSR format. Not collective. @@ -2674,8 +2715,7 @@ cdef class Mat(Object): I: Sequence[int], J: Sequence[int], V: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set values stored in block CSR format. Not collective. @@ -2716,7 +2756,7 @@ cdef class Mat(Object): """ if cmap is None: cmap = rmap - CHKERR( MatSetLocalToGlobalMapping(self.mat, rmap.lgm, cmap.lgm) ) + CHKERR(MatSetLocalToGlobalMapping(self.mat, rmap.lgm, cmap.lgm)) def getLGMap(self) -> tuple[LGMap, LGMap]: """Return the local-to-global mappings. @@ -2730,9 +2770,9 @@ cdef class Mat(Object): """ cdef LGMap cmap = LGMap() cdef LGMap rmap = LGMap() - CHKERR( MatGetLocalToGlobalMapping(self.mat, &rmap.lgm, &cmap.lgm) ) - CHKERR( PetscINCREF(cmap.obj) ) - CHKERR( PetscINCREF(rmap.obj) ) + CHKERR(MatGetLocalToGlobalMapping(self.mat, &rmap.lgm, &cmap.lgm)) + CHKERR(PetscINCREF(cmap.obj)) + CHKERR(PetscINCREF(rmap.obj)) return (rmap, cmap) def setValueLocal( @@ -2740,8 +2780,7 @@ cdef class Mat(Object): row: int, col: int, value: Scalar, - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set a value to the ``(row, col)`` entry of the matrix in local ordering. Not collective. @@ -2766,16 +2805,15 @@ cdef class Mat(Object): cdef PetscInt ival2 = asInt(col) cdef PetscScalar sval = asScalar(value) cdef PetscInsertMode caddv = insertmode(addv) - CHKERR( MatSetValuesLocal( - self.mat, 1, &ival1, 1, &ival2, &sval, caddv) ) + CHKERR(MatSetValuesLocal( + self.mat, 1, &ival1, 1, &ival2, &sval, caddv)) def setValuesLocal( self, rows: Sequence[int], cols: Sequence[int], values: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set values to the rows ⊗ col entries of the matrix in local ordering. Not collective. @@ -2808,8 +2846,7 @@ cdef class Mat(Object): J: Sequence[int], V: Sequence[Scalar], addv: InsertModeSpec = None, - rowmap: Sequence[int] = None, - ) -> None: + rowmap: Sequence[int] = None) -> None: """Set a subset of values stored in CSR format. Not collective. @@ -2839,8 +2876,7 @@ cdef class Mat(Object): I: Sequence[int], J: Sequence[int], V: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set values stored in CSR format. Not collective. @@ -2868,8 +2904,7 @@ cdef class Mat(Object): rows: Sequence[int], cols: Sequence[int], values: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set values to the rows ⊗ col block entries of the matrix in local ordering. Not collective. @@ -2904,8 +2939,7 @@ cdef class Mat(Object): J: Sequence[int], V: Sequence[Scalar], addv: InsertModeSpec = None, - rowmap: Sequence[int] = None, - ) -> None: + rowmap: Sequence[int] = None) -> None: """Set a subset of values stored in block CSR format. Not collective. @@ -2935,8 +2969,7 @@ cdef class Mat(Object): I: Sequence[int], J: Sequence[int], V: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set values stored in block CSR format. Not collective. @@ -2981,15 +3014,14 @@ cdef class Mat(Object): ndof = asInt(dof) if starts is not None: asDims(dims, &cstarts[0], &cstarts[1], &cstarts[2]) - CHKERR( MatSetStencil(self.mat, ndim, cdims, cstarts, ndof) ) + CHKERR(MatSetStencil(self.mat, ndim, cdims, cstarts, ndof)) def setValueStencil( self, MatStencil row: Stencil, MatStencil col: Stencil, value: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set a value to row and col stencil. Not collective. @@ -3023,8 +3055,7 @@ cdef class Mat(Object): row: Stencil, col: Stencil, value: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Set a block of values to row and col stencil. Not collective. @@ -3081,10 +3112,10 @@ cdef class Mat(Object): if x is not None: xvec = x.vec if b is not None: bvec = b.vec if isinstance(rows, IS): - CHKERR( MatZeroRowsIS(self.mat, (rows).iset, sval, xvec, bvec) ) + CHKERR(MatZeroRowsIS(self.mat, (rows).iset, sval, xvec, bvec)) else: rows = iarray_i(rows, &ni, &i) - CHKERR( MatZeroRows(self.mat, ni, i, sval, xvec, bvec) ) + CHKERR(MatZeroRows(self.mat, ni, i, sval, xvec, bvec)) def zeroRowsLocal(self, rows: IS | Sequence[int], diag: Scalar = 1, Vec x=None, Vec b=None) -> None: """Zero selected rows of the matrix in local ordering. @@ -3114,10 +3145,10 @@ cdef class Mat(Object): if x is not None: xvec = x.vec if b is not None: bvec = b.vec if isinstance(rows, IS): - CHKERR( MatZeroRowsLocalIS(self.mat, (rows).iset, sval, xvec, bvec) ) + CHKERR(MatZeroRowsLocalIS(self.mat, (rows).iset, sval, xvec, bvec)) else: rows = iarray_i(rows, &ni, &i) - CHKERR( MatZeroRowsLocal(self.mat, ni, i, sval, xvec, bvec) ) + CHKERR(MatZeroRowsLocal(self.mat, ni, i, sval, xvec, bvec)) def zeroRowsColumns(self, rows: IS | Sequence[int], diag: Scalar = 1, Vec x=None, Vec b=None) -> None: """Zero selected rows and columns of the matrix. @@ -3148,10 +3179,10 @@ cdef class Mat(Object): if x is not None: xvec = x.vec if b is not None: bvec = b.vec if isinstance(rows, IS): - CHKERR( MatZeroRowsColumnsIS(self.mat, (rows).iset, sval, xvec, bvec) ) + CHKERR(MatZeroRowsColumnsIS(self.mat, (rows).iset, sval, xvec, bvec)) else: rows = iarray_i(rows, &ni, &i) - CHKERR( MatZeroRowsColumns(self.mat, ni, i, sval, xvec, bvec) ) + CHKERR(MatZeroRowsColumns(self.mat, ni, i, sval, xvec, bvec)) def zeroRowsColumnsLocal(self, rows: IS | Sequence[int], diag: Scalar = 1, Vec x=None, Vec b=None) -> None: """Zero selected rows and columns of the matrix in local ordering. @@ -3182,10 +3213,10 @@ cdef class Mat(Object): if x is not None: xvec = x.vec if b is not None: bvec = b.vec if isinstance(rows, IS): - CHKERR( MatZeroRowsColumnsLocalIS(self.mat, (rows).iset, sval, xvec, bvec) ) + CHKERR(MatZeroRowsColumnsLocalIS(self.mat, (rows).iset, sval, xvec, bvec)) else: rows = iarray_i(rows, &ni, &i) - CHKERR( MatZeroRowsColumnsLocal(self.mat, ni, i, sval, xvec, bvec) ) + CHKERR(MatZeroRowsColumnsLocal(self.mat, ni, i, sval, xvec, bvec)) def zeroRowsColumnsStencil(self, rows: Sequence[Stencil], diag: Scalar = 1, Vec x=None, Vec b=None) -> None: """Zero selected rows and columns of the matrix. @@ -3211,18 +3242,17 @@ cdef class Mat(Object): """ cdef PetscScalar sval = asScalar(diag) cdef PetscInt nrows = asInt(len(rows)) - cdef PetscMatStencil st - cdef MatStencil r + cdef MatStencil r = 0 cdef PetscMatStencil *crows = NULL - CHKERR( PetscMalloc((nrows+1)*sizeof(st), &crows) ) + CHKERR(PetscMalloc((nrows+1)*sizeof(PetscMatStencil), &crows)) for i in range(nrows): r = rows[i] crows[i] = r.stencil cdef PetscVec xvec = NULL, bvec = NULL if x is not None: xvec = x.vec if b is not None: bvec = b.vec - CHKERR( MatZeroRowsColumnsStencil(self.mat, nrows, crows, sval, xvec, bvec) ) - CHKERR( PetscFree( crows ) ) + CHKERR(MatZeroRowsColumnsStencil(self.mat, nrows, crows, sval, xvec, bvec)) + CHKERR(PetscFree(crows)) def storeValues(self) -> None: """Stash a copy of the matrix values. @@ -3234,7 +3264,7 @@ cdef class Mat(Object): retrieveValues, petsc.MatStoreValues """ - CHKERR( MatStoreValues(self.mat) ) + CHKERR(MatStoreValues(self.mat)) def retrieveValues(self) -> None: """Retrieve a copy of the matrix values previously stored with `storeValues`. @@ -3246,7 +3276,7 @@ cdef class Mat(Object): storeValues, petsc.MatRetrieveValues """ - CHKERR( MatRetrieveValues(self.mat) ) + CHKERR(MatRetrieveValues(self.mat)) def assemblyBegin(self, assembly: MatAssemblySpec = None) -> None: """Begin an assembling stage of the matrix. @@ -3264,7 +3294,7 @@ cdef class Mat(Object): """ cdef PetscMatAssemblyType flag = assemblytype(assembly) - CHKERR( MatAssemblyBegin(self.mat, flag) ) + CHKERR(MatAssemblyBegin(self.mat, flag)) def assemblyEnd(self, assembly: MatAssemblySpec = None) -> None: """Complete an assembling stage of the matrix initiated with `assemblyBegin`. @@ -3282,7 +3312,7 @@ cdef class Mat(Object): """ cdef PetscMatAssemblyType flag = assemblytype(assembly) - CHKERR( MatAssemblyEnd(self.mat, flag) ) + CHKERR(MatAssemblyEnd(self.mat, flag)) def assemble(self, assembly: MatAssemblySpec = None) -> None: """Assemble the matrix. @@ -3300,8 +3330,8 @@ cdef class Mat(Object): """ cdef PetscMatAssemblyType flag = assemblytype(assembly) - CHKERR( MatAssemblyBegin(self.mat, flag) ) - CHKERR( MatAssemblyEnd(self.mat, flag) ) + CHKERR(MatAssemblyBegin(self.mat, flag)) + CHKERR(MatAssemblyEnd(self.mat, flag)) def isAssembled(self) -> bool: """The boolean flag indicating if the matrix is assembled. @@ -3314,7 +3344,7 @@ cdef class Mat(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( MatAssembled(self.mat, &flag) ) + CHKERR(MatAssembled(self.mat, &flag)) return toBool(flag) def findZeroRows(self) -> IS: @@ -3328,13 +3358,12 @@ cdef class Mat(Object): """ cdef IS zerorows = IS() - CHKERR( MatFindZeroRows(self.mat, &zerorows.iset) ) + CHKERR(MatFindZeroRows(self.mat, &zerorows.iset)) return zerorows def createVecs( self, - side: Literal['r', 'R', 'right', 'Right', 'RIGHT', 'l', 'L', 'left', 'Left', 'LEFT'] | None = None, - ) -> Vec | tuple[Vec, Vec]: + side: Literal['r', 'R', 'right', 'Right', 'RIGHT', 'l', 'L', 'left', 'Left', 'LEFT'] | None = None) -> Vec | tuple[Vec, Vec]: """Return vectors that can be used in matrix vector products. Collective. @@ -3357,16 +3386,16 @@ cdef class Mat(Object): """ cdef Vec vecr, vecl if side is None: - vecr = Vec(); vecl = Vec(); - CHKERR( MatCreateVecs(self.mat, &vecr.vec, &vecl.vec) ) + vecr = Vec(); vecl = Vec() + CHKERR(MatCreateVecs(self.mat, &vecr.vec, &vecl.vec)) return (vecr, vecl) elif side in ('r', 'R', 'right', 'Right', 'RIGHT'): vecr = Vec() - CHKERR( MatCreateVecs(self.mat, &vecr.vec, NULL) ) + CHKERR(MatCreateVecs(self.mat, &vecr.vec, NULL)) return vecr elif side in ('l', 'L', 'left', 'Left', 'LEFT'): vecl = Vec() - CHKERR( MatCreateVecs(self.mat, NULL, &vecl.vec) ) + CHKERR(MatCreateVecs(self.mat, NULL, &vecl.vec)) return vecl else: raise ValueError("side '%r' not understood" % side) @@ -3382,7 +3411,7 @@ cdef class Mat(Object): """ cdef Vec vecr = Vec() - CHKERR( MatCreateVecs(self.mat, &vecr.vec, NULL) ) + CHKERR(MatCreateVecs(self.mat, &vecr.vec, NULL)) return vecr def createVecLeft(self) -> Vec: @@ -3396,7 +3425,7 @@ cdef class Mat(Object): """ cdef Vec vecl = Vec() - CHKERR( MatCreateVecs(self.mat, NULL, &vecl.vec) ) + CHKERR(MatCreateVecs(self.mat, NULL, &vecl.vec)) return vecl getVecs = createVecs @@ -3426,13 +3455,15 @@ cdef class Mat(Object): if result is None: result = Vec() if result.vec == NULL: - CHKERR( MatCreateVecs(self.mat, NULL, &result.vec) ) - CHKERR( MatGetColumnVector(self.mat, result.vec, ival) ) + CHKERR(MatCreateVecs(self.mat, NULL, &result.vec)) + CHKERR(MatGetColumnVector(self.mat, result.vec, ival)) return result def getRedundantMatrix(self, nsubcomm: int, subcomm: Comm | None = None, Mat out=None) -> Mat: """Return redundant matrices on subcommunicators. + Collective. + Parameters ---------- nsubcomm @@ -3455,7 +3486,7 @@ cdef class Mat(Object): cdef PetscMatReuse reuse = MAT_INITIAL_MATRIX if out is None: out = Mat() if out.mat != NULL: reuse = MAT_REUSE_MATRIX - CHKERR( MatCreateRedundantMatrix(self.mat, _nsubcomm, _subcomm, reuse, &out.mat)) + CHKERR(MatCreateRedundantMatrix(self.mat, _nsubcomm, _subcomm, reuse, &out.mat)) return out def getDiagonal(self, Vec result=None) -> Vec: @@ -3476,8 +3507,8 @@ cdef class Mat(Object): if result is None: result = Vec() if result.vec == NULL: - CHKERR( MatCreateVecs(self.mat, NULL, &result.vec) ) - CHKERR( MatGetDiagonal(self.mat, result.vec) ) + CHKERR(MatCreateVecs(self.mat, NULL, &result.vec)) + CHKERR(MatGetDiagonal(self.mat, result.vec)) return result def getRowSum(self, Vec result=None) -> Vec: @@ -3498,8 +3529,8 @@ cdef class Mat(Object): if result is None: result = Vec() if result.vec == NULL: - CHKERR( MatCreateVecs(self.mat, NULL, &result.vec) ) - CHKERR( MatGetRowSum(self.mat, result.vec) ) + CHKERR(MatCreateVecs(self.mat, NULL, &result.vec)) + CHKERR(MatGetRowSum(self.mat, result.vec)) return result def setDiagonal(self, Vec diag, addv: InsertModeSpec = None) -> None: @@ -3520,7 +3551,7 @@ cdef class Mat(Object): """ cdef PetscInsertMode caddv = insertmode(addv) - CHKERR( MatDiagonalSet(self.mat, diag.vec, caddv) ) + CHKERR(MatDiagonalSet(self.mat, diag.vec, caddv)) def diagonalScale(self, Vec L=None, Vec R=None) -> None: """Perform left and/or right diagonal scaling of the matrix. @@ -3542,7 +3573,7 @@ cdef class Mat(Object): cdef PetscVec vecl=NULL, vecr=NULL if L is not None: vecl = L.vec if R is not None: vecr = R.vec - CHKERR( MatDiagonalScale(self.mat, vecl, vecr) ) + CHKERR(MatDiagonalScale(self.mat, vecl, vecr)) def invertBlockDiagonal(self) -> ArrayScalar: """Return the inverse of the block-diagonal entries. @@ -3556,9 +3587,9 @@ cdef class Mat(Object): """ cdef PetscInt bs = 0, m = 0 cdef const PetscScalar *cibdiag = NULL - CHKERR( MatGetBlockSize(self.mat, &bs) ) - CHKERR( MatGetLocalSize(self.mat, &m, NULL) ) - CHKERR( MatInvertBlockDiagonal(self.mat, &cibdiag) ) + CHKERR(MatGetBlockSize(self.mat, &bs)) + CHKERR(MatGetLocalSize(self.mat, &m, NULL)) + CHKERR(MatInvertBlockDiagonal(self.mat, &cibdiag)) cdef ndarray ibdiag = array_s(m*bs, cibdiag) ibdiag.shape = (toInt(m//bs), toInt(bs), toInt(bs)) return ibdiag.transpose(0, 2, 1) @@ -3575,7 +3606,7 @@ cdef class Mat(Object): getNullSpace, petsc.MatSetNullSpace """ - CHKERR( MatSetNullSpace(self.mat, nsp.nsp) ) + CHKERR(MatSetNullSpace(self.mat, nsp.nsp)) def getNullSpace(self) -> NullSpace: """Return the nullspace. @@ -3588,8 +3619,8 @@ cdef class Mat(Object): """ cdef NullSpace nsp = NullSpace() - CHKERR( MatGetNullSpace(self.mat, &nsp.nsp) ) - CHKERR( PetscINCREF(nsp.obj) ) + CHKERR(MatGetNullSpace(self.mat, &nsp.nsp)) + CHKERR(PetscINCREF(nsp.obj)) return nsp def setTransposeNullSpace(self, NullSpace nsp) -> None: @@ -3602,7 +3633,7 @@ cdef class Mat(Object): setNullSpace, getTransposeNullSpace, petsc.MatSetTransposeNullSpace """ - CHKERR( MatSetTransposeNullSpace(self.mat, nsp.nsp) ) + CHKERR(MatSetTransposeNullSpace(self.mat, nsp.nsp)) def getTransposeNullSpace(self) -> NullSpace: """Return the transpose nullspace. @@ -3615,8 +3646,8 @@ cdef class Mat(Object): """ cdef NullSpace nsp = NullSpace() - CHKERR( MatGetTransposeNullSpace(self.mat, &nsp.nsp) ) - CHKERR( PetscINCREF(nsp.obj) ) + CHKERR(MatGetTransposeNullSpace(self.mat, &nsp.nsp)) + CHKERR(PetscINCREF(nsp.obj)) return nsp def setNearNullSpace(self, NullSpace nsp) -> None: @@ -3629,7 +3660,7 @@ cdef class Mat(Object): setNullSpace, getNearNullSpace, petsc.MatSetNearNullSpace """ - CHKERR( MatSetNearNullSpace(self.mat, nsp.nsp) ) + CHKERR(MatSetNearNullSpace(self.mat, nsp.nsp)) def getNearNullSpace(self) -> NullSpace: """Return the near-nullspace. @@ -3642,8 +3673,8 @@ cdef class Mat(Object): """ cdef NullSpace nsp = NullSpace() - CHKERR( MatGetNearNullSpace(self.mat, &nsp.nsp) ) - CHKERR( PetscINCREF(nsp.obj) ) + CHKERR(MatGetNearNullSpace(self.mat, &nsp.nsp)) + CHKERR(PetscINCREF(nsp.obj)) return nsp # matrix-vector product @@ -3665,7 +3696,7 @@ cdef class Mat(Object): petsc.MatMult """ - CHKERR( MatMult(self.mat, x.vec, y.vec) ) + CHKERR(MatMult(self.mat, x.vec, y.vec)) def multAdd(self, Vec x, Vec v, Vec y) -> None: """Perform the matrix vector product with addition y = A @ x + v. @@ -3686,7 +3717,7 @@ cdef class Mat(Object): petsc.MatMultAdd """ - CHKERR( MatMultAdd(self.mat, x.vec, v.vec, y.vec) ) + CHKERR(MatMultAdd(self.mat, x.vec, v.vec, y.vec)) def multTranspose(self, Vec x, Vec y) -> None: """Perform the transposed matrix vector product y = A^T @ x. @@ -3705,7 +3736,7 @@ cdef class Mat(Object): petsc.MatMultTranspose """ - CHKERR( MatMultTranspose(self.mat, x.vec, y.vec) ) + CHKERR(MatMultTranspose(self.mat, x.vec, y.vec)) def multTransposeAdd(self, Vec x, Vec v, Vec y) -> None: """Perform the transposed matrix vector product with addition y = A^T @ x + v. @@ -3726,7 +3757,7 @@ cdef class Mat(Object): petsc.MatMultTransposeAdd """ - CHKERR( MatMultTransposeAdd(self.mat, x.vec, v.vec, y.vec) ) + CHKERR(MatMultTransposeAdd(self.mat, x.vec, v.vec, y.vec)) def multHermitian(self, Vec x, Vec y) -> None: """Perform the Hermitian matrix vector product y = A^H @ x. @@ -3745,7 +3776,7 @@ cdef class Mat(Object): petsc.MatMultHermitianTranspose """ - CHKERR( MatMultHermitian(self.mat, x.vec, y.vec) ) + CHKERR(MatMultHermitian(self.mat, x.vec, y.vec)) def multHermitianAdd(self, Vec x, Vec v, Vec y) -> None: """Perform the Hermitian matrix vector product with addition y = A^H @ x + v. @@ -3766,7 +3797,7 @@ cdef class Mat(Object): petsc.MatMultHermitianTransposeAdd """ - CHKERR( MatMultHermitianAdd(self.mat, x.vec, v.vec, y.vec) ) + CHKERR(MatMultHermitianAdd(self.mat, x.vec, v.vec, y.vec)) # SOR @@ -3774,12 +3805,11 @@ cdef class Mat(Object): self, Vec b, Vec x, - omega:float = 1.0, - sortype:SORType | None = None, - shift:float = 0.0, - its:int = 1, - lits:int = 1, - ) -> None: + omega: float = 1.0, + sortype: SORType | None = None, + shift: float = 0.0, + its: int = 1, + lits: int = 1) -> None: """Compute relaxation (SOR, Gauss-Seidel) sweeps. Neighborwise collective. @@ -3796,7 +3826,7 @@ cdef class Mat(Object): cdef PetscReal cshift = asReal(shift) cdef PetscInt cits = asInt(its) cdef PetscInt clits = asInt(lits) - CHKERR( MatSOR(self.mat, b.vec, comega, csortype, cshift, cits, clits, x.vec) ) + CHKERR(MatSOR(self.mat, b.vec, comega, csortype, cshift, cits, clits, x.vec)) # @@ -3811,8 +3841,8 @@ cdef class Mat(Object): """ cdef Mat submat = Mat() - CHKERR( MatGetDiagonalBlock(self.mat, &submat.mat) ) - CHKERR( PetscINCREF(submat.obj) ) + CHKERR(MatGetDiagonalBlock(self.mat, &submat.mat)) + CHKERR(PetscINCREF(submat.obj)) return submat def increaseOverlap(self, IS iset, overlap: int = 1) -> None: @@ -3826,7 +3856,7 @@ cdef class Mat(Object): """ cdef PetscInt ival = asInt(overlap) - CHKERR( MatIncreaseOverlap(self.mat, 1, &iset.iset, ival) ) + CHKERR(MatIncreaseOverlap(self.mat, 1, &iset.iset, ival)) def createSubMatrix(self, IS isrow, IS iscol=None, Mat submat=None) -> Mat: """Return a submatrix. @@ -3854,16 +3884,15 @@ cdef class Mat(Object): if iscol is not None: ciscol = iscol.iset if submat is None: submat = Mat() if submat.mat != NULL: reuse = MAT_REUSE_MATRIX - CHKERR( MatCreateSubMatrix(self.mat, isrow.iset, ciscol, - reuse, &submat.mat) ) + CHKERR(MatCreateSubMatrix(self.mat, isrow.iset, ciscol, + reuse, &submat.mat)) return submat def createSubMatrices( self, isrows: IS | Sequence[IS], iscols: IS | Sequence[IS] = None, - submats: Mat | Sequence[Mat] = None, - ) -> Sequence[Mat]: + submats: Mat | Sequence[Mat] = None) -> Sequence[Mat]: """Return several sequential submatrices. Collective. @@ -3893,26 +3922,25 @@ cdef class Mat(Object): cdef PetscIS *cisrows = NULL cdef PetscIS *ciscols = NULL cdef PetscMat *cmats = NULL - cdef object tmp1, tmp2 cdef Mat mat - tmp1 = oarray_p(empty_p(n), NULL, &cisrows) + cdef object unused1 = oarray_p(empty_p(n), NULL, &cisrows) for i from 0 <= i < n: cisrows[i] = (isrows[i]).iset - tmp2 = oarray_p(empty_p(n), NULL, &ciscols) + cdef object unused2 = oarray_p(empty_p(n), NULL, &ciscols) for i from 0 <= i < n: ciscols[i] = (iscols[i]).iset if submats is not None: reuse = MAT_REUSE_MATRIX submats = list(submats) assert len(submats) == len(isrows) - CHKERR( PetscMalloc((n+1)*sizeof(PetscMat), &cmats) ) + CHKERR(PetscMalloc((n+1)*sizeof(PetscMat), &cmats)) for i from 0 <= i < n: cmats[i] = (submats[i]).mat - CHKERR( MatCreateSubMatrices(self.mat, n, cisrows, ciscols, reuse, &cmats) ) - for i from 0 <= i < n: CHKERR( PetscINCREF(&cmats[i]) ) + CHKERR(MatCreateSubMatrices(self.mat, n, cisrows, ciscols, reuse, &cmats)) + for i from 0 <= i < n: CHKERR(PetscINCREF(&cmats[i])) if reuse == MAT_INITIAL_MATRIX: submats = [None] * n for i from 0 <= i < n: submats[i] = mat = Mat() mat.mat = cmats[i] - CHKERR( MatDestroyMatrices(n, &cmats) ) + CHKERR(MatDestroyMatrices(n, &cmats)) return submats # @@ -3939,11 +3967,11 @@ cdef class Mat(Object): """ if submat is None: submat = Mat() - else: CHKERR( MatDestroy(&submat.mat) ) - CHKERR( MatGetLocalSubMatrix(self.mat, isrow.iset, iscol.iset, &submat.mat) ) + else: CHKERR(MatDestroy(&submat.mat)) + CHKERR(MatGetLocalSubMatrix(self.mat, isrow.iset, iscol.iset, &submat.mat)) return submat - def restoreLocalSubMatrix(self, IS isrow, IS iscol, Mat submat): + def restoreLocalSubMatrix(self, IS isrow, IS iscol, Mat submat) -> None: """Restore a reference to a submatrix obtained with `getLocalSubMatrix`. Collective. @@ -3962,14 +3990,13 @@ cdef class Mat(Object): getLocalSubMatrix, petsc.MatRestoreLocalSubMatrix """ - CHKERR( MatRestoreLocalSubMatrix(self.mat, isrow.iset, iscol.iset, &submat.mat) ) + CHKERR(MatRestoreLocalSubMatrix(self.mat, isrow.iset, iscol.iset, &submat.mat)) # def norm( self, - norm_type: NormTypeSpec = None, - ) -> float | tuple[float, float]: + norm_type: NormTypeSpec = None) -> float | tuple[float, float]: """Compute the requested matrix norm. Collective. @@ -3985,7 +4012,7 @@ cdef class Mat(Object): cdef PetscNormType ntype = PETSC_NORM_FROBENIUS if norm_type is not None: ntype = norm_type cdef PetscReal rval[2] - CHKERR( MatNorm(self.mat, ntype, rval) ) + CHKERR(MatNorm(self.mat, ntype, rval)) if ntype != norm_1_2: return toReal(rval[0]) else: return (toReal(rval[0]), toReal(rval[1])) @@ -4000,7 +4027,7 @@ cdef class Mat(Object): """ cdef PetscScalar sval = asScalar(alpha) - CHKERR( MatScale(self.mat, sval) ) + CHKERR(MatScale(self.mat, sval)) def shift(self, alpha: Scalar) -> None: """Shift the matrix. @@ -4013,7 +4040,7 @@ cdef class Mat(Object): """ cdef PetscScalar sval = asScalar(alpha) - CHKERR( MatShift(self.mat, sval) ) + CHKERR(MatShift(self.mat, sval)) def chop(self, tol: float) -> None: """Set entries smallest of tol (in absolute values) to zero. @@ -4026,7 +4053,7 @@ cdef class Mat(Object): """ cdef PetscReal rval = asReal(tol) - CHKERR( MatFilter(self.mat, rval, PETSC_FALSE, PETSC_FALSE) ) + CHKERR(MatFilter(self.mat, rval, PETSC_FALSE, PETSC_FALSE)) def setRandom(self, Random random=None) -> None: """Set random values in the matrix. @@ -4045,7 +4072,7 @@ cdef class Mat(Object): """ cdef PetscRandom rnd = NULL if random is not None: rnd = random.rnd - CHKERR( MatSetRandom(self.mat, rnd) ) + CHKERR(MatSetRandom(self.mat, rnd)) def axpy(self, alpha: Scalar, Mat X, structure: Structure = None) -> None: """Perform the matrix summation ``self`` + = ɑ·X. @@ -4068,7 +4095,7 @@ cdef class Mat(Object): """ cdef PetscScalar sval = asScalar(alpha) cdef PetscMatStructure flag = matstructure(structure) - CHKERR( MatAXPY(self.mat, sval, X.mat, flag) ) + CHKERR(MatAXPY(self.mat, sval, X.mat, flag)) def aypx(self, alpha: Scalar, Mat X, structure: Structure = None) -> None: """Perform the matrix summation ``self`` = ɑ·``self`` + X. @@ -4091,7 +4118,7 @@ cdef class Mat(Object): """ cdef PetscScalar sval = asScalar(alpha) cdef PetscMatStructure flag = matstructure(structure) - CHKERR( MatAYPX(self.mat, sval, X.mat, flag) ) + CHKERR(MatAYPX(self.mat, sval, X.mat, flag)) # matrix-matrix product @@ -4099,8 +4126,7 @@ cdef class Mat(Object): self, Mat mat, Mat result=None, - fill: float | None = None - ) -> Mat: + fill: float | None = None) -> Mat: """Perform matrix-matrix multiplication C=AB. Neighborwise collective. @@ -4140,15 +4166,14 @@ cdef class Mat(Object): elif result.mat != NULL: reuse = MAT_REUSE_MATRIX if fill is not None: rval = asReal(fill) - CHKERR( MatMatMult(self.mat, mat.mat, reuse, rval, &result.mat) ) + CHKERR(MatMatMult(self.mat, mat.mat, reuse, rval, &result.mat)) return result def matTransposeMult( self, Mat mat, Mat result=None, - fill: float | None = None - ): + fill: float | None = None) -> Mat: """Perform matrix-matrix multiplication C=ABᵀ. Neighborwise collective. @@ -4188,15 +4213,14 @@ cdef class Mat(Object): elif result.mat != NULL: reuse = MAT_REUSE_MATRIX if fill is not None: rval = asReal(fill) - CHKERR( MatMatTransposeMult(self.mat, mat.mat, reuse, rval, &result.mat) ) + CHKERR(MatMatTransposeMult(self.mat, mat.mat, reuse, rval, &result.mat)) return result def transposeMatMult( self, Mat mat, Mat result=None, - fill: float | None = None - ): + fill: float | None = None) -> Mat: """Perform matrix-matrix multiplication C=AᵀB. Neighborwise collective. @@ -4236,15 +4260,14 @@ cdef class Mat(Object): elif result.mat != NULL: reuse = MAT_REUSE_MATRIX if fill is not None: rval = asReal(fill) - CHKERR( MatTransposeMatMult(self.mat, mat.mat, reuse, rval, &result.mat) ) + CHKERR(MatTransposeMatMult(self.mat, mat.mat, reuse, rval, &result.mat)) return result def ptap( self, Mat P, Mat result=None, - fill: float | None = None - ) -> Mat: + fill: float | None = None) -> Mat: """Creates the matrix product C = PᵀAP. Neighborwise collective. @@ -4288,15 +4311,14 @@ cdef class Mat(Object): elif result.mat != NULL: reuse = MAT_REUSE_MATRIX if fill is not None: cfill = asReal(fill) - CHKERR( MatPtAP(self.mat, P.mat, reuse, cfill, &result.mat) ) + CHKERR(MatPtAP(self.mat, P.mat, reuse, cfill, &result.mat)) return result def rart( self, Mat R, Mat result=None, - fill: float | None = None - ) -> Mat: + fill: float | None = None) -> Mat: """Create the matrix product C = RARᵀ. Neighborwise collective. @@ -4336,7 +4358,7 @@ cdef class Mat(Object): elif result.mat != NULL: reuse = MAT_REUSE_MATRIX if fill is not None: cfill = asReal(fill) - CHKERR( MatRARt(self.mat, R.mat, reuse, cfill, &result.mat) ) + CHKERR(MatRARt(self.mat, R.mat, reuse, cfill, &result.mat)) return result def matMatMult( @@ -4344,8 +4366,7 @@ cdef class Mat(Object): Mat B, Mat C, Mat result=None, - fill: float | None = None - ) -> Mat: + fill: float | None = None) -> Mat: """Perform matrix-matrix-matrix multiplication D=ABC. Neighborwise collective. @@ -4382,16 +4403,17 @@ cdef class Mat(Object): elif result.mat != NULL: reuse = MAT_REUSE_MATRIX if fill is not None: cfill = asReal(fill) - CHKERR( MatMatMatMult(self.mat, B.mat, C.mat, reuse, cfill, &result.mat) ) + CHKERR(MatMatMatMult(self.mat, B.mat, C.mat, reuse, cfill, &result.mat)) return result def kron( self, Mat mat, - Mat result=None - ) -> Mat: + Mat result=None) -> Mat: """Compute C, the Kronecker product of A and B. + Collective. + Parameters ---------- mat @@ -4416,12 +4438,14 @@ cdef class Mat(Object): result = Mat() elif result.mat != NULL: reuse = MAT_REUSE_MATRIX - CHKERR( MatSeqAIJKron(self.mat, mat.mat, reuse, &result.mat) ) + CHKERR(MatSeqAIJKron(self.mat, mat.mat, reuse, &result.mat)) return result def bindToCPU(self, flg: bool) -> None: """Mark a matrix to temporarily stay on the CPU. + Collective. + Once marked, perform computations on the CPU. Parameters @@ -4435,18 +4459,20 @@ cdef class Mat(Object): """ cdef PetscBool bindFlg = asBool(flg) - CHKERR( MatBindToCPU(self.mat, bindFlg) ) + CHKERR(MatBindToCPU(self.mat, bindFlg)) def boundToCPU(self) -> bool: """Query if a matrix is bound to the CPU. + Not collective. + See Also -------- petsc.MatBoundToCPU """ cdef PetscBool flg = PETSC_TRUE - CHKERR( MatBoundToCPU(self.mat, &flg) ) + CHKERR(MatBoundToCPU(self.mat, &flg)) return toBool(flg) # XXX factorization @@ -4476,15 +4502,14 @@ cdef class Mat(Object): cdef PetscMatOrderingType cval = NULL ord_type = str2bytes(ord_type, &cval) cdef IS rp = IS(), cp = IS() - CHKERR( MatGetOrdering(self.mat, cval, &rp.iset, &cp.iset) ) + CHKERR(MatGetOrdering(self.mat, cval, &rp.iset, &cp.iset)) return (rp, cp) def reorderForNonzeroDiagonal( self, IS isrow, IS iscol, - atol: float = 0 - ) -> None: + atol: float = 0) -> None: """Change a matrix ordering to remove zeros from the diagonal. Collective. @@ -4506,14 +4531,13 @@ cdef class Mat(Object): """ cdef PetscReal rval = asReal(atol) cdef PetscIS rp = isrow.iset, cp = iscol.iset - CHKERR( MatReorderForNonzeroDiagonal(self.mat, rval, rp, cp) ) + CHKERR(MatReorderForNonzeroDiagonal(self.mat, rval, rp, cp)) def factorLU( self, IS isrow, IS iscol, - options: dict[str, Any] | None = None, - ) -> None: + options: dict[str, Any] | None = None) -> None: """Perform an in-place LU factorization. Collective. @@ -4537,7 +4561,7 @@ cdef class Mat(Object): """ cdef PetscMatFactorInfo info matfactorinfo(PETSC_FALSE, PETSC_FALSE, options, &info) - CHKERR( MatLUFactor(self.mat, isrow.iset, iscol.iset, &info) ) + CHKERR(MatLUFactor(self.mat, isrow.iset, iscol.iset, &info)) def factorSymbolicLU(self, Mat mat, IS isrow, IS iscol, options=None) -> None: """Not implemented.""" @@ -4551,8 +4575,7 @@ cdef class Mat(Object): self, IS isrow, IS iscol, - options: dict[str, Any] | None = None, - ) -> None: + options: dict[str, Any] | None = None) -> None: """Perform an in-place ILU factorization. Collective. @@ -4576,7 +4599,7 @@ cdef class Mat(Object): """ cdef PetscMatFactorInfo info matfactorinfo(PETSC_TRUE, PETSC_FALSE, options, &info) - CHKERR( MatILUFactor(self.mat, isrow.iset, iscol.iset, &info) ) + CHKERR(MatILUFactor(self.mat, isrow.iset, iscol.iset, &info)) def factorSymbolicILU(self, IS isrow, IS iscol, options=None) -> None: """Not implemented.""" @@ -4585,8 +4608,7 @@ cdef class Mat(Object): def factorCholesky( self, IS isperm, - options: dict[str, Any] | None = None, - ) -> None: + options: dict[str, Any] | None = None) -> None: """Perform an in-place Cholesky factorization. Collective. @@ -4606,7 +4628,7 @@ cdef class Mat(Object): """ cdef PetscMatFactorInfo info matfactorinfo(PETSC_FALSE, PETSC_TRUE, options, &info) - CHKERR( MatCholeskyFactor(self.mat, isperm.iset, &info) ) + CHKERR(MatCholeskyFactor(self.mat, isperm.iset, &info)) def factorSymbolicCholesky(self, IS isperm, options=None) -> None: """Not implemented.""" @@ -4619,8 +4641,7 @@ cdef class Mat(Object): def factorICC( self, IS isperm, - options: dict[str, Any] | None = None, - ) -> None: + options: dict[str, Any] | None = None) -> None: """Perform an in-place an incomplete Cholesky factorization. Collective. @@ -4640,7 +4661,7 @@ cdef class Mat(Object): """ cdef PetscMatFactorInfo info matfactorinfo(PETSC_TRUE, PETSC_TRUE, options, &info) - CHKERR( MatICCFactor(self.mat, isperm.iset, &info) ) + CHKERR(MatICCFactor(self.mat, isperm.iset, &info)) def factorSymbolicICC(self, IS isperm, options=None) -> None: """Not implemented.""" @@ -4668,7 +4689,7 @@ cdef class Mat(Object): """ cdef PetscInt ival1 = 0, ival2 = 0, ival3 = 0 - CHKERR( MatGetInertia(self.mat, &ival1, &ival2, &ival3) ) + CHKERR(MatGetInertia(self.mat, &ival1, &ival2, &ival3)) return (toInt(ival1), toInt(ival2), toInt(ival3)) def setUnfactored(self) -> None: @@ -4681,14 +4702,14 @@ cdef class Mat(Object): petsc.MatSetUnfactored """ - CHKERR( MatSetUnfactored(self.mat) ) + CHKERR(MatSetUnfactored(self.mat)) # IS def setISAllowRepeated(self, allow: bool = True) -> None: """Allow repeated entries in the local to global map. - Logically Collective. + Logically collective. Parameters ---------- @@ -4701,20 +4722,20 @@ cdef class Mat(Object): """ cdef PetscBool callow = asBool(allow) - CHKERR( MatISSetAllowRepeated(self.mat, callow) ) + CHKERR(MatISSetAllowRepeated(self.mat, callow)) def getISAllowRepeated(self) -> bool: """Get the flag for repeated entries in the local to global map. - Not Collective. + Not collective. See Also -------- setISAllowRepeated, petsc.MatISGetAllowRepeated """ - cdef PetscBool callow - CHKERR( MatISGetAllowRepeated(self.mat, &callow) ) + cdef PetscBool callow = PETSC_FALSE + CHKERR(MatISGetAllowRepeated(self.mat, &callow)) return asBool(callow) def fixISLocalEmpty(self, fix: bool = True) -> None: @@ -4734,24 +4755,28 @@ cdef class Mat(Object): """ cdef PetscBool cfix = asBool(fix) - CHKERR( MatISFixLocalEmpty(self.mat, cfix) ) + CHKERR(MatISFixLocalEmpty(self.mat, cfix)) def getISLocalMat(self) -> Mat: """Return the local matrix stored inside a `Type.IS` matrix. + Not collective. + See Also -------- petsc.MatISGetLocalMat """ cdef Mat local = Mat() - CHKERR( MatISGetLocalMat(self.mat, &local.mat) ) - CHKERR( PetscINCREF(local.obj) ) + CHKERR(MatISGetLocalMat(self.mat, &local.mat)) + CHKERR(PetscINCREF(local.obj)) return local def restoreISLocalMat(self, Mat local not None) -> None: """Restore the local matrix obtained with `getISLocalMat`. + Not collective. + Parameters ---------- local @@ -4762,11 +4787,13 @@ cdef class Mat(Object): petsc.MatISRestoreLocalMat """ - CHKERR( MatISRestoreLocalMat(self.mat, &local.mat) ) + CHKERR(MatISRestoreLocalMat(self.mat, &local.mat)) def setISLocalMat(self, Mat local not None) -> None: """Set the local matrix stored inside a `Type.IS`. + Not collective. + Parameters ---------- local @@ -4777,15 +4804,16 @@ cdef class Mat(Object): petsc.MatISSetLocalMat """ - CHKERR( MatISSetLocalMat(self.mat, local.mat) ) + CHKERR(MatISSetLocalMat(self.mat, local.mat)) def setISPreallocation( self, nnz: Sequence[int], - onnz: Sequence[int], - ) -> Self: + onnz: Sequence[int]) -> Self: """Preallocate memory for a `Type.IS` parallel matrix. + Collective. + Parameters ---------- nnz @@ -4806,7 +4834,7 @@ cdef class Mat(Object): cdef PetscInt *connz = NULL nnz = iarray_i(nnz, NULL, &cnnz) onnz = iarray_i(onnz, NULL, &connz) - CHKERR( MatISSetPreallocation(self.mat, 0, cnnz, 0, connz) ) + CHKERR(MatISSetPreallocation(self.mat, 0, cnnz, 0, connz)) return self # LRC @@ -4836,14 +4864,14 @@ cdef class Mat(Object): cdef Mat U = Mat() cdef Vec c = Vec() cdef Mat V = Mat() - CHKERR( MatLRCGetMats(self.mat, &A.mat, &U.mat, &c.vec, &V.mat) ) - CHKERR( PetscINCREF(A.obj) ) - CHKERR( PetscINCREF(U.obj) ) - CHKERR( PetscINCREF(c.obj) ) - CHKERR( PetscINCREF(V.obj) ) + CHKERR(MatLRCGetMats(self.mat, &A.mat, &U.mat, &c.vec, &V.mat)) + CHKERR(PetscINCREF(A.obj)) + CHKERR(PetscINCREF(U.obj)) + CHKERR(PetscINCREF(c.obj)) + CHKERR(PetscINCREF(V.obj)) return (A, U, c, V) - def setLRCMats(self, Mat A, Mat U, Vec c=None, Mat V=None): + def setLRCMats(self, Mat A, Mat U, Vec c=None, Mat V=None) -> None: """Set the constituents of a `Type.LRC` matrix. Logically collective. @@ -4868,24 +4896,28 @@ cdef class Mat(Object): cdef PetscMat Amat = A.mat if A is not None else NULL cdef PetscVec cvec = c.vec if c is not None else NULL cdef PetscMat Vmat = V.mat if V is not None else NULL - CHKERR( MatLRCSetMats(self.mat, Amat, U.mat, cvec, Vmat) ) + CHKERR(MatLRCSetMats(self.mat, Amat, U.mat, cvec, Vmat)) # H2Opus def H2OpusOrthogonalize(self) -> Self: """Orthogonalize the basis tree of a hierarchical matrix. + Collective. + See Also -------- petsc.MatH2OpusOrthogonalize """ - CHKERR( MatH2OpusOrthogonalize(self.mat) ) + CHKERR(MatH2OpusOrthogonalize(self.mat)) return self - def H2OpusCompress(self, tol: float): + def H2OpusCompress(self, tol: float) -> Self: """Compress a hierarchical matrix. + Collective. + Parameters ---------- tol @@ -4897,12 +4929,14 @@ cdef class Mat(Object): """ cdef PetscReal _tol = asReal(tol) - CHKERR( MatH2OpusCompress(self.mat, _tol) ) + CHKERR(MatH2OpusCompress(self.mat, _tol)) return self - def H2OpusLowRankUpdate(self, Mat U, Mat V=None, s: float = 1.0): + def H2OpusLowRankUpdate(self, Mat U, Mat V=None, s: float = 1.0) -> Self: """Perform a low-rank update of the form ``self`` += sUVᵀ. + Collective. + Parameters ---------- U @@ -4921,7 +4955,7 @@ cdef class Mat(Object): cdef PetscMat vmat = NULL if V is not None: vmat = V.mat - CHKERR( MatH2OpusLowRankUpdate(self.mat, U.mat, vmat, _s) ) + CHKERR(MatH2OpusLowRankUpdate(self.mat, U.mat, vmat, _s)) return self # MUMPS @@ -4945,7 +4979,7 @@ cdef class Mat(Object): """ cdef PetscInt _icntl = asInt(icntl) cdef PetscInt _ival = asInt(ival) - CHKERR( MatMumpsSetIcntl(self.mat, _icntl, _ival) ); + CHKERR(MatMumpsSetIcntl(self.mat, _icntl, _ival)) def getMumpsIcntl(self, icntl: int) -> int: """Return the MUMPS parameter, ``ICNTL[icntl]``. @@ -4959,10 +4993,10 @@ cdef class Mat(Object): """ cdef PetscInt _icntl = asInt(icntl) cdef PetscInt ival = 0 - CHKERR( MatMumpsGetIcntl(self.mat, _icntl, &ival) ); + CHKERR(MatMumpsGetIcntl(self.mat, _icntl, &ival)) return toInt(ival) - def setMumpsCntl(self, icntl: int, val: float): + def setMumpsCntl(self, icntl: int, val: float) -> None: """Set a MUMPS parameter, ``CNTL[icntl] = val``. Logically collective. @@ -4981,7 +5015,7 @@ cdef class Mat(Object): """ cdef PetscInt _icntl = asInt(icntl) cdef PetscReal _val = asReal(val) - CHKERR( MatMumpsSetCntl(self.mat, _icntl, _val) ); + CHKERR(MatMumpsSetCntl(self.mat, _icntl, _val)) def getMumpsCntl(self, icntl: int) -> float: """Return the MUMPS parameter, ``CNTL[icntl]``. @@ -4995,7 +5029,7 @@ cdef class Mat(Object): """ cdef PetscInt _icntl = asInt(icntl) cdef PetscReal val = 0 - CHKERR( MatMumpsGetCntl(self.mat, _icntl, &val) ); + CHKERR(MatMumpsGetCntl(self.mat, _icntl, &val)) return toReal(val) def getMumpsInfo(self, icntl: int) -> int: @@ -5015,7 +5049,7 @@ cdef class Mat(Object): """ cdef PetscInt _icntl = asInt(icntl) cdef PetscInt ival = 0 - CHKERR( MatMumpsGetInfo(self.mat, _icntl, &ival) ); + CHKERR(MatMumpsGetInfo(self.mat, _icntl, &ival)) return toInt(ival) def getMumpsInfog(self, icntl: int) -> int: @@ -5035,7 +5069,7 @@ cdef class Mat(Object): """ cdef PetscInt _icntl = asInt(icntl) cdef PetscInt ival = 0 - CHKERR( MatMumpsGetInfog(self.mat, _icntl, &ival) ); + CHKERR(MatMumpsGetInfog(self.mat, _icntl, &ival)) return toInt(ival) def getMumpsRinfo(self, icntl: int) -> float: @@ -5055,7 +5089,7 @@ cdef class Mat(Object): """ cdef PetscInt _icntl = asInt(icntl) cdef PetscReal val = 0 - CHKERR( MatMumpsGetRinfo(self.mat, _icntl, &val) ); + CHKERR(MatMumpsGetRinfo(self.mat, _icntl, &val)) return toReal(val) def getMumpsRinfog(self, icntl: int) -> float: @@ -5075,7 +5109,7 @@ cdef class Mat(Object): """ cdef PetscInt _icntl = asInt(icntl) cdef PetscReal val = 0 - CHKERR( MatMumpsGetRinfog(self.mat, _icntl, &val) ); + CHKERR(MatMumpsGetRinfog(self.mat, _icntl, &val)) return toReal(val) # solve @@ -5097,7 +5131,7 @@ cdef class Mat(Object): petsc.MatForwardSolve """ - CHKERR( MatForwardSolve(self.mat, b.vec, x.vec) ) + CHKERR(MatForwardSolve(self.mat, b.vec, x.vec)) def solveBackward(self, Vec b, Vec x) -> None: """Solve Ux=b, given a factored matrix A=LU. @@ -5116,7 +5150,7 @@ cdef class Mat(Object): petsc.MatBackwardSolve """ - CHKERR( MatBackwardSolve(self.mat, b.vec, x.vec) ) + CHKERR(MatBackwardSolve(self.mat, b.vec, x.vec)) def solve(self, Vec b, Vec x) -> None: """Solve Ax=b, given a factored matrix. @@ -5139,7 +5173,7 @@ cdef class Mat(Object): KSP.create, solveTranspose, petsc.MatSolve """ - CHKERR(MatSolve(self.mat, b.vec, x.vec) ) + CHKERR(MatSolve(self.mat, b.vec, x.vec)) def solveTranspose(self, Vec b, Vec x) -> None: """Solve Aᵀx=b, given a factored matrix. @@ -5160,7 +5194,7 @@ cdef class Mat(Object): KSP.create, petsc.MatSolve, petsc.MatSolveTranspose """ - CHKERR( MatSolveTranspose(self.mat, b.vec, x.vec) ) + CHKERR(MatSolveTranspose(self.mat, b.vec, x.vec)) def solveAdd(self, Vec b, Vec y, Vec x) -> None: """Solve x=y+A⁻¹b, given a factored matrix. @@ -5183,7 +5217,7 @@ cdef class Mat(Object): KSP.create, petsc.MatSolve, petsc.MatSolveAdd """ - CHKERR( MatSolveAdd(self.mat, b.vec, y.vec, x.vec) ) + CHKERR(MatSolveAdd(self.mat, b.vec, y.vec, x.vec)) def solveTransposeAdd(self, Vec b, Vec y, Vec x) -> None: """Solve x=y+A⁻ᵀb, given a factored matrix. @@ -5206,7 +5240,7 @@ cdef class Mat(Object): KSP.create, petsc.MatSolve, petsc.MatSolveTransposeAdd """ - CHKERR( MatSolveTransposeAdd(self.mat, b.vec, y.vec, x.vec) ) + CHKERR(MatSolveTransposeAdd(self.mat, b.vec, y.vec, x.vec)) def matSolve(self, Mat B, Mat X) -> None: """Solve AX=B, given a factored matrix A. @@ -5226,7 +5260,7 @@ cdef class Mat(Object): KSP.create, petsc.MatMatSolve """ - CHKERR( MatMatSolve(self.mat, B.mat, X.mat) ) + CHKERR(MatMatSolve(self.mat, B.mat, X.mat)) # dense matrices @@ -5246,7 +5280,7 @@ cdef class Mat(Object): """ cdef PetscInt _ilda = asInt(lda) - CHKERR( MatDenseSetLDA(self.mat, _ilda) ) + CHKERR(MatDenseSetLDA(self.mat, _ilda)) def getDenseLDA(self) -> int: """Return the leading dimension of the array used by the dense matrix. @@ -5259,7 +5293,7 @@ cdef class Mat(Object): """ cdef PetscInt lda=0 - CHKERR( MatDenseGetLDA(self.mat, &lda) ) + CHKERR(MatDenseGetLDA(self.mat, &lda)) return toInt(lda) def getDenseArray(self, readonly: bool = False) -> ArrayScalar: @@ -5279,25 +5313,25 @@ cdef class Mat(Object): """ cdef PetscInt m=0, N=0, lda=0 cdef PetscScalar *data = NULL - CHKERR( MatGetLocalSize(self.mat, &m, NULL) ) - CHKERR( MatGetSize(self.mat, NULL, &N) ) - CHKERR( MatDenseGetLDA(self.mat, &lda) ) + CHKERR(MatGetLocalSize(self.mat, &m, NULL)) + CHKERR(MatGetSize(self.mat, NULL, &N)) + CHKERR(MatDenseGetLDA(self.mat, &lda)) if readonly: - CHKERR( MatDenseGetArrayRead(self.mat, &data) ) + CHKERR(MatDenseGetArrayRead(self.mat, &data)) else: - CHKERR( MatDenseGetArray(self.mat, &data) ) + CHKERR(MatDenseGetArray(self.mat, &data)) cdef int typenum = NPY_PETSC_SCALAR cdef int itemsize = sizeof(PetscScalar) cdef int flags = NPY_ARRAY_FARRAY cdef npy_intp dims[2], strides[2] - dims[0] = m; strides[0] = sizeof(PetscScalar); - dims[1] = N; strides[1] = (lda*sizeof(PetscScalar)); + dims[0] = m; strides[0] = sizeof(PetscScalar) + dims[1] = N; strides[1] = (lda*sizeof(PetscScalar)) array = PyArray_New(ndarray, 2, dims, typenum, strides, data, itemsize, flags, NULL) if readonly: - CHKERR( MatDenseRestoreArrayRead(self.mat, &data) ) + CHKERR(MatDenseRestoreArrayRead(self.mat, &data)) else: - CHKERR( MatDenseRestoreArray(self.mat, &data) ) + CHKERR(MatDenseRestoreArray(self.mat, &data)) return array def getDenseLocalMatrix(self) -> Mat: @@ -5311,8 +5345,8 @@ cdef class Mat(Object): """ cdef Mat mat = type(self)() - CHKERR( MatDenseGetLocalMatrix(self.mat, &mat.mat) ) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(MatDenseGetLocalMatrix(self.mat, &mat.mat)) + CHKERR(PetscINCREF(mat.obj)) return mat def getDenseColumnVec(self, i: int, mode: AccessModeSpec = 'rw') -> Vec: @@ -5339,12 +5373,12 @@ cdef class Mat(Object): cdef Vec v = Vec() cdef PetscInt _i = asInt(i) if mode == 'rw': - CHKERR( MatDenseGetColumnVec(self.mat, _i, &v.vec) ) + CHKERR(MatDenseGetColumnVec(self.mat, _i, &v.vec)) elif mode == 'r': - CHKERR( MatDenseGetColumnVecRead(self.mat, _i, &v.vec) ) + CHKERR(MatDenseGetColumnVecRead(self.mat, _i, &v.vec)) else: - CHKERR( MatDenseGetColumnVecWrite(self.mat, _i, &v.vec) ) - CHKERR( PetscINCREF(v.obj) ) + CHKERR(MatDenseGetColumnVecWrite(self.mat, _i, &v.vec)) + CHKERR(PetscINCREF(v.obj)) return v def restoreDenseColumnVec(self, i: int, mode: AccessModeSpec = 'rw') -> None: @@ -5367,11 +5401,11 @@ cdef class Mat(Object): """ cdef PetscInt _i = asInt(i) if mode == 'rw': - CHKERR( MatDenseRestoreColumnVec(self.mat, _i, NULL) ) + CHKERR(MatDenseRestoreColumnVec(self.mat, _i, NULL)) elif mode == 'r': - CHKERR( MatDenseRestoreColumnVecRead(self.mat, _i, NULL) ) + CHKERR(MatDenseRestoreColumnVecRead(self.mat, _i, NULL)) else: - CHKERR( MatDenseRestoreColumnVecWrite(self.mat, _i, NULL) ) + CHKERR(MatDenseRestoreColumnVecWrite(self.mat, _i, NULL)) # Nest @@ -5385,8 +5419,8 @@ cdef class Mat(Object): petsc.MatNestGetSize """ - cdef PetscInt nrows, ncols - CHKERR( MatNestGetSize(self.mat, &nrows, &ncols) ) + cdef PetscInt nrows = 0, ncols = 0 + CHKERR(MatNestGetSize(self.mat, &nrows, &ncols)) return toInt(nrows), toInt(ncols) def getNestISs(self) -> tuple[list[IS], list[IS]]: @@ -5399,15 +5433,15 @@ cdef class Mat(Object): petsc.MatNestGetISs """ - cdef PetscInt i, nrows =0, ncols = 0 + cdef PetscInt nrows = 0, ncols = 0 cdef PetscIS *cisrows = NULL cdef PetscIS *ciscols = NULL - CHKERR( MatNestGetSize(self.mat, &nrows, &ncols) ) - cdef object tmpr = oarray_p(empty_p(nrows), NULL, &cisrows) - cdef object tmpc = oarray_p(empty_p(ncols), NULL, &ciscols) - CHKERR( MatNestGetISs(self.mat, cisrows, ciscols) ) - cdef object isetsrows = [ref_IS(cisrows[i]) for i from 0 <= i < nrows] - cdef object isetscols = [ref_IS(ciscols[i]) for i from 0 <= i < ncols] + CHKERR(MatNestGetSize(self.mat, &nrows, &ncols)) + cdef object unusedr = oarray_p(empty_p(nrows), NULL, &cisrows) + cdef object unusedc = oarray_p(empty_p(ncols), NULL, &ciscols) + CHKERR(MatNestGetISs(self.mat, cisrows, ciscols)) + isetsrows = [ref_IS(cisrows[i]) for i from 0 <= i < nrows] + isetscols = [ref_IS(ciscols[i]) for i from 0 <= i < ncols] return isetsrows, isetscols def getNestLocalISs(self) -> tuple[list[IS], list[IS]]: @@ -5420,15 +5454,15 @@ cdef class Mat(Object): petsc.MatNestGetLocalISs """ - cdef PetscInt i, nrows =0, ncols = 0 + cdef PetscInt nrows = 0, ncols = 0 cdef PetscIS *cisrows = NULL cdef PetscIS *ciscols = NULL - CHKERR( MatNestGetSize(self.mat, &nrows, &ncols) ) - cdef object tmpr = oarray_p(empty_p(nrows), NULL, &cisrows) - cdef object tmpc = oarray_p(empty_p(ncols), NULL, &ciscols) - CHKERR( MatNestGetLocalISs(self.mat, cisrows, ciscols) ) - cdef object isetsrows = [ref_IS(cisrows[i]) for i from 0 <= i < nrows] - cdef object isetscols = [ref_IS(ciscols[i]) for i from 0 <= i < ncols] + CHKERR(MatNestGetSize(self.mat, &nrows, &ncols)) + cdef object unusedr = oarray_p(empty_p(nrows), NULL, &cisrows) + cdef object unusedc = oarray_p(empty_p(ncols), NULL, &ciscols) + CHKERR(MatNestGetLocalISs(self.mat, cisrows, ciscols)) + isetsrows = [ref_IS(cisrows[i]) for i from 0 <= i < nrows] + isetscols = [ref_IS(ciscols[i]) for i from 0 <= i < ncols] return isetsrows, isetscols def getNestSubMatrix(self, i: int, j: int) -> Mat: @@ -5451,8 +5485,8 @@ cdef class Mat(Object): cdef Mat submat = Mat() cdef PetscInt idxm = asInt(i) cdef PetscInt jdxm = asInt(j) - CHKERR( MatNestGetSubMat(self.mat, idxm, jdxm, &submat.mat) ) - CHKERR( PetscINCREF(submat.obj) ) + CHKERR(MatNestGetSubMat(self.mat, idxm, jdxm, &submat.mat)) + CHKERR(PetscINCREF(submat.obj)) return submat # DM @@ -5468,10 +5502,10 @@ cdef class Mat(Object): """ cdef PetscDM newdm = NULL - CHKERR( MatGetDM(self.mat, &newdm) ) + CHKERR(MatGetDM(self.mat, &newdm)) cdef DM dm = subtype_DM(newdm)() dm.dm = newdm - CHKERR( PetscINCREF(dm.obj) ) + CHKERR(PetscINCREF(dm.obj)) return dm def setDM(self, DM dm) -> None: @@ -5489,7 +5523,7 @@ cdef class Mat(Object): petsc.MatSetDM """ - CHKERR( MatSetDM(self.mat, dm.dm) ) + CHKERR(MatSetDM(self.mat, dm.dm)) # backward compatibility @@ -5501,6 +5535,7 @@ cdef class Mat(Object): """Matrix local and global sizes.""" def __get__(self) -> tuple[tuple[int, int], tuple[int, int]]: return self.getSizes() + def __set__(self, value): self.setSizes(value) @@ -5584,25 +5619,25 @@ cdef class Mat(Object): # By restoring now, we guarantee the sanity of the ObjectState if mode == 'w': if hostmem: - CHKERR( MatDenseGetArrayWrite(self.mat, &a) ) - CHKERR( MatDenseRestoreArrayWrite(self.mat, NULL) ) + CHKERR(MatDenseGetArrayWrite(self.mat, &a)) + CHKERR(MatDenseRestoreArrayWrite(self.mat, NULL)) else: - CHKERR( MatDenseCUDAGetArrayWrite(self.mat, &a) ) - CHKERR( MatDenseCUDARestoreArrayWrite(self.mat, NULL) ) + CHKERR(MatDenseCUDAGetArrayWrite(self.mat, &a)) + CHKERR(MatDenseCUDARestoreArrayWrite(self.mat, NULL)) elif mode == 'r': if hostmem: - CHKERR( MatDenseGetArrayRead(self.mat, &a) ) - CHKERR( MatDenseRestoreArrayRead(self.mat, NULL) ) + CHKERR(MatDenseGetArrayRead(self.mat, &a)) + CHKERR(MatDenseRestoreArrayRead(self.mat, NULL)) else: - CHKERR( MatDenseCUDAGetArrayRead(self.mat, &a) ) - CHKERR( MatDenseCUDARestoreArrayRead(self.mat, NULL) ) + CHKERR(MatDenseCUDAGetArrayRead(self.mat, &a)) + CHKERR(MatDenseCUDARestoreArrayRead(self.mat, NULL)) else: if hostmem: - CHKERR( MatDenseGetArray(self.mat, &a) ) - CHKERR( MatDenseRestoreArray(self.mat, NULL) ) + CHKERR(MatDenseGetArray(self.mat, &a)) + CHKERR(MatDenseRestoreArray(self.mat, NULL)) else: - CHKERR( MatDenseCUDAGetArray(self.mat, &a) ) - CHKERR( MatDenseCUDARestoreArray(self.mat, NULL) ) + CHKERR(MatDenseCUDAGetArray(self.mat, &a)) + CHKERR(MatDenseCUDARestoreArray(self.mat, NULL)) dl_tensor.data = a cdef DLContext* ctx = &dl_tensor.ctx @@ -5627,7 +5662,7 @@ cdef class Mat(Object): raise ValueError('Unsupported PetscScalar type') dtype.lanes = 1 dlm_tensor.manager_ctx = self.mat - CHKERR( PetscObjectReference(self.mat) ) + CHKERR(PetscObjectReference(self.mat)) dlm_tensor.manager_deleter = manager_deleter dlm_tensor.del_obj = PetscDEALLOC return PyCapsule_New(dlm_tensor, 'dltensor', pycapsule_deleter) @@ -5670,7 +5705,7 @@ cdef class NullSpace(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( MatNullSpaceView(self.nsp, vwr) ) + CHKERR(MatNullSpaceView(self.nsp, vwr)) def destroy(self) -> Self: """Destroy the null space. @@ -5682,15 +5717,14 @@ cdef class NullSpace(Object): create, petsc.MatNullSpaceDestroy """ - CHKERR( MatNullSpaceDestroy(&self.nsp) ) + CHKERR(MatNullSpaceDestroy(&self.nsp)) return self def create( self, constant: bool = False, vectors: Sequence[Vec] = (), - comm=None - ) -> Self: + comm=None) -> Self: """Create the null space. Collective. @@ -5714,12 +5748,12 @@ cdef class NullSpace(Object): if constant: has_const = PETSC_TRUE cdef PetscInt i = 0, nv = len(vectors) cdef PetscVec *v = NULL - cdef object tmp2 = oarray_p(empty_p(nv), NULL, &v) + cdef object unused2 = oarray_p(empty_p(nv), NULL, &v) for i from 0 <= i < nv: v[i] = ((vectors[i])).vec cdef PetscNullSpace newnsp = NULL - CHKERR( MatNullSpaceCreate(ccomm, has_const, nv, v, &newnsp) ) - CHKERR( PetscCLEAR(self.obj) ); self.nsp = newnsp + CHKERR(MatNullSpaceCreate(ccomm, has_const, nv, v, &newnsp)) + CHKERR(PetscCLEAR(self.obj)); self.nsp = newnsp return self def createRigidBody(self, Vec coords) -> Self: @@ -5739,16 +5773,15 @@ cdef class NullSpace(Object): """ cdef PetscNullSpace newnsp = NULL - CHKERR( MatNullSpaceCreateRigidBody(coords.vec, &newnsp) ) - CHKERR( PetscCLEAR(self.obj) ); self.nsp = newnsp + CHKERR(MatNullSpaceCreateRigidBody(coords.vec, &newnsp)) + CHKERR(PetscCLEAR(self.obj)); self.nsp = newnsp return self def setFunction( self, function: MatNullFunction, args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None, - ) -> None: + kargs: dict[str, Any] | None = None) -> None: """Set the callback to remove the nullspace. Logically collective. @@ -5768,13 +5801,13 @@ cdef class NullSpace(Object): """ if function is not None: - CHKERR( MatNullSpaceSetFunction( - self.nsp, NullSpace_Function, NULL) ) + CHKERR(MatNullSpaceSetFunction( + self.nsp, NullSpace_Function, NULL)) if args is None: args = () if kargs is None: kargs = {} self.set_attr('__function__', (function, args, kargs)) else: - CHKERR( MatNullSpaceSetFunction(self.nsp, NULL, NULL) ) + CHKERR(MatNullSpaceSetFunction(self.nsp, NULL, NULL)) self.set_attr('__function__', None) # @@ -5789,7 +5822,7 @@ cdef class NullSpace(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( MatNullSpaceGetVecs(self.nsp, &flag, NULL, NULL) ) + CHKERR(MatNullSpaceGetVecs(self.nsp, &flag, NULL, NULL)) return toBool(flag) def getVecs(self) -> list[Vec]: @@ -5804,13 +5837,13 @@ cdef class NullSpace(Object): """ cdef PetscInt i = 0, nv = 0 cdef const PetscVec *v = NULL - CHKERR( MatNullSpaceGetVecs(self.nsp, NULL, &nv, &v) ) + CHKERR(MatNullSpaceGetVecs(self.nsp, NULL, &nv, &v)) cdef Vec vec = None cdef list vectors = [] for i from 0 <= i < nv: vec = Vec() vec.vec = v[i] - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(PetscINCREF(vec.obj)) vectors.append(vec) return vectors @@ -5843,7 +5876,7 @@ cdef class NullSpace(Object): petsc.MatNullSpaceRemove """ - CHKERR( MatNullSpaceRemove(self.nsp, vec.vec) ) + CHKERR(MatNullSpaceRemove(self.nsp, vec.vec)) def test(self, Mat mat) -> bool: """Return if the claimed null space is valid for a matrix. @@ -5861,7 +5894,7 @@ cdef class NullSpace(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( MatNullSpaceTest(self.nsp, mat.mat, &flag) ) + CHKERR(MatNullSpaceTest(self.nsp, mat.mat, &flag)) return toBool(flag) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/MatPartitioning.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/MatPartitioning.pyx index 3bbae811c3c..5b9ef01ec1a 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/MatPartitioning.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/MatPartitioning.pyx @@ -1,6 +1,7 @@ # -------------------------------------------------------------------- class MatPartitioningType(object): + """The partitioning types.""" PARTITIONINGCURRENT = S_(MATPARTITIONINGCURRENT) PARTITIONINGAVERAGE = S_(MATPARTITIONINGAVERAGE) PARTITIONINGSQUARE = S_(MATPARTITIONINGSQUARE) @@ -12,6 +13,7 @@ class MatPartitioningType(object): # -------------------------------------------------------------------- + cdef class MatPartitioning(Object): """Object for managing the partitioning of a matrix or graph.""" @@ -42,7 +44,7 @@ cdef class MatPartitioning(Object): assert self.obj != NULL cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( MatPartitioningView(self.part, vwr) ) + CHKERR(MatPartitioningView(self.part, vwr)) def destroy(self) -> Self: """Destroy the partitioning context. @@ -54,7 +56,7 @@ cdef class MatPartitioning(Object): create, petsc.MatPartitioningDestroy """ - CHKERR( MatPartitioningDestroy(&self.part) ) + CHKERR(MatPartitioningDestroy(&self.part)) return self def create(self, comm: Comm | None = None) -> Self: @@ -73,7 +75,7 @@ cdef class MatPartitioning(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) - CHKERR( MatPartitioningCreate(ccomm, &self.part) ) + CHKERR(MatPartitioningCreate(ccomm, &self.part)) return self def setType(self, matpartitioning_type: Type | str) -> None: @@ -93,7 +95,7 @@ cdef class MatPartitioning(Object): """ cdef PetscMatPartitioningType cval = NULL matpartitioning_type = str2bytes(matpartitioning_type, &cval) - CHKERR( MatPartitioningSetType(self.part, cval) ) + CHKERR(MatPartitioningSetType(self.part, cval)) def getType(self) -> str: """Return the partitioning method. @@ -106,7 +108,7 @@ cdef class MatPartitioning(Object): """ cdef PetscMatPartitioningType cval = NULL - CHKERR( MatPartitioningGetType(self.part, &cval) ) + CHKERR(MatPartitioningGetType(self.part, &cval)) return bytes2str(cval) def setFromOptions(self) -> None: @@ -119,7 +121,7 @@ cdef class MatPartitioning(Object): petsc_options, petsc.MatPartitioningSetFromOptions """ - CHKERR( MatPartitioningSetFromOptions(self.part) ) + CHKERR(MatPartitioningSetFromOptions(self.part)) def setAdjacency(self, Mat adj) -> None: """Set the adjacency graph (matrix) of the thing to be partitioned. @@ -137,7 +139,7 @@ cdef class MatPartitioning(Object): petsc.MatPartitioningSetAdjacency """ - CHKERR( MatPartitioningSetAdjacency(self.part, adj.mat) ) + CHKERR(MatPartitioningSetAdjacency(self.part, adj.mat)) def apply(self, IS partitioning) -> None: """Return a partitioning for the graph represented by a sparse matrix. @@ -152,7 +154,7 @@ cdef class MatPartitioning(Object): petsc.MatPartitioningApply """ - CHKERR( MatPartitioningApply(self.part, &partitioning.iset) ) + CHKERR(MatPartitioningApply(self.part, &partitioning.iset)) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Object.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Object.pyx index 6f9d42a5d70..b2c0cd0b088 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Object.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Object.pyx @@ -1,7 +1,13 @@ # -------------------------------------------------------------------- cdef class Object: + """Base class wrapping a PETSc object. + See Also + -------- + petsc.PetscObject + + """ # --- special methods --- def __cinit__(self): @@ -9,7 +15,7 @@ cdef class Object: self.obj = &self.oval def __dealloc__(self): - CHKERR( PetscDEALLOC(&self.obj[0]) ) + CHKERR(PetscDEALLOC(&self.obj[0])) self.obj = NULL def __richcmp__(self, other, int op): @@ -27,7 +33,7 @@ cdef class Object: cdef Object obj = type(self)() cdef PetscObject o = self.obj[0] if o != NULL: - CHKERR( PetscObjectReference(o) ) + CHKERR(PetscObjectReference(o)) obj.obj[0] = o return obj @@ -53,207 +59,481 @@ cdef class Object: # - def view(self, Viewer viewer=None): + def view(self, Viewer viewer=None) -> None: + """Display the object. + + Collective. + + Parameters + ---------- + viewer + A `Viewer` instance or `None` for the default viewer. + + See Also + -------- + petsc.PetscObjectView + + """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscObjectView(self.obj[0], vwr) ) + CHKERR(PetscObjectView(self.obj[0], vwr)) + + def destroy(self) -> Self: + """Destroy the object. - def destroy(self): - CHKERR( PetscObjectDestroy(&self.obj[0]) ) + Collective. + + See Also + -------- + petsc.PetscObjectDestroy + + """ + CHKERR(PetscObjectDestroy(&self.obj[0])) return self - def getType(self): + def getType(self) -> str: + """Return the object type name. + + Not collective. + + See Also + -------- + petsc.PetscObjectGetType + + """ cdef const char *cval = NULL - CHKERR( PetscObjectGetType(self.obj[0], &cval) ) + CHKERR(PetscObjectGetType(self.obj[0], &cval)) return bytes2str(cval) # - def setOptionsPrefix(self, prefix): + def setOptionsPrefix(self, prefix : str | None) -> None: + """Set the prefix used for searching for options in the database. + + Logically collective. + + See Also + -------- + petsc_options, getOptionsPrefix, petsc.PetscObjectSetOptionsPrefix + + """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( PetscObjectSetOptionsPrefix(self.obj[0], cval) ) + CHKERR(PetscObjectSetOptionsPrefix(self.obj[0], cval)) + + def getOptionsPrefix(self) -> str: + """Return the prefix used for searching for options in the database. + + Not collective. + + See Also + -------- + petsc_options, setOptionsPrefix, petsc.PetscObjectGetOptionsPrefix - def getOptionsPrefix(self): + """ cdef const char *cval = NULL - CHKERR( PetscObjectGetOptionsPrefix(self.obj[0], &cval) ) + CHKERR(PetscObjectGetOptionsPrefix(self.obj[0], &cval)) return bytes2str(cval) - def appendOptionsPrefix(self, prefix): + def appendOptionsPrefix(self, prefix: str | None) -> None: + """Append to the prefix used for searching for options in the database. + + Logically collective. + + See Also + -------- + petsc_options, setOptionsPrefix, petsc.PetscObjectAppendOptionsPrefix + + """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( PetscObjectAppendOptionsPrefix(self.obj[0], cval) ) + CHKERR(PetscObjectAppendOptionsPrefix(self.obj[0], cval)) + + def setFromOptions(self) -> None: + """Configure the object from the options database. + + Collective. - def setFromOptions(self): - CHKERR( PetscObjectSetFromOptions(self.obj[0]) ) + See Also + -------- + petsc_options, petsc.PetscObjectSetFromOptions - def viewFromOptions(self, name, Object prefix=None): + """ + CHKERR(PetscObjectSetFromOptions(self.obj[0])) + + def viewFromOptions(self, name : str, Object objpre=None) -> None: + """View the object via command line options. + + Collective. + + Parameters + ---------- + name + The command line option. + objpre + Optional object that provides prefix. + + See Also + -------- + petsc_options, petsc.PetscObjectViewFromOptions + + """ cdef PetscObject pobj = NULL cdef const char *cval = NULL - pobj = prefix.obj[0] if prefix is not None else NULL + pobj = objpre.obj[0] if objpre is not None else NULL name = str2bytes(name, &cval) - CHKERR( PetscObjectViewFromOptions(self.obj[0], pobj, cval) ) + CHKERR(PetscObjectViewFromOptions(self.obj[0], pobj, cval)) # - def getComm(self): + def getComm(self) -> Comm: + """Return the communicator of the object. + + Not collective. + + See Also + -------- + petsc.PetscObjectGetComm + + """ cdef Comm comm = Comm() - CHKERR( PetscObjectGetComm(self.obj[0], &comm.comm) ) + CHKERR(PetscObjectGetComm(self.obj[0], &comm.comm)) return comm - def getName(self): + def getName(self) -> str: + """Return the name of the object. + + Not collective. + + See Also + -------- + petsc.PetscObjectGetName + + """ cdef const char *cval = NULL - CHKERR( PetscObjectGetName(self.obj[0], &cval) ) + CHKERR(PetscObjectGetName(self.obj[0], &cval)) return bytes2str(cval) - def setName(self, name): + def setName(self, name : str | None) -> None: + """Associate a name to the object. + + Not collective. + + See Also + -------- + petsc.PetscObjectSetName + + """ cdef const char *cval = NULL name = str2bytes(name, &cval) - CHKERR( PetscObjectSetName(self.obj[0], cval) ) + CHKERR(PetscObjectSetName(self.obj[0], cval)) + + def getClassId(self) -> int: + """Return the class identifier of the object. + + Not collective. + + See Also + -------- + petsc.PetscObjectGetClassId - def getClassId(self): + """ cdef PetscClassId classid = 0 - CHKERR( PetscObjectGetClassId(self.obj[0], &classid) ) + CHKERR(PetscObjectGetClassId(self.obj[0], &classid)) return classid - def getClassName(self): + def getClassName(self) -> str: + """Return the class name of the object. + + Not collective. + + See Also + -------- + petsc.PetscObjectGetClassName + + """ cdef const char *cval = NULL - CHKERR( PetscObjectGetClassName(self.obj[0], &cval) ) + CHKERR(PetscObjectGetClassName(self.obj[0], &cval)) return bytes2str(cval) - def getRefCount(self): + def getRefCount(self) -> int: + """Return the reference count of the object. + + Not collective. + + See Also + -------- + petsc.PetscObjectGetReference + + """ if self.obj[0] == NULL: return 0 cdef PetscInt refcnt = 0 - CHKERR( PetscObjectGetReference(self.obj[0], &refcnt) ) + CHKERR(PetscObjectGetReference(self.obj[0], &refcnt)) return toInt(refcnt) # --- general support --- - def compose(self, name, Object obj or None): + def compose(self, name : str | None, Object obj or None) -> None: + """Associate a PETSc object using a key string. + + Logically collective. + + Parameters + ---------- + name + The string identifying the object to be composed. + obj + The object to be composed. + + See Also + -------- + query, petsc.PetscObjectCompose + + """ cdef const char *cval = NULL cdef PetscObject cobj = NULL name = str2bytes(name, &cval) if obj is not None: cobj = obj.obj[0] - CHKERR( PetscObjectCompose(self.obj[0], cval, cobj) ) + CHKERR(PetscObjectCompose(self.obj[0], cval, cobj)) + + def query(self, name: str) -> Object: + """Query for the PETSc object associated with a key string. - def query(self, name): + Not collective. + + See Also + -------- + compose, petsc.PetscObjectQuery + + """ cdef const char *cval = NULL cdef PetscObject cobj = NULL name = str2bytes(name, &cval) - CHKERR( PetscObjectQuery(self.obj[0], cval, &cobj) ) + CHKERR(PetscObjectQuery(self.obj[0], cval, &cobj)) if cobj == NULL: return None cdef Object obj = subtype_Object(cobj)() obj.obj[0] = cobj - CHKERR( PetscINCREF(obj.obj) ) + CHKERR(PetscINCREF(obj.obj)) return obj - def incRef(self): + def incRef(self) -> int: + """Increment the object reference count. + + Logically collective. + + See Also + -------- + getRefCount, petsc.PetscObjectReference + + """ cdef PetscObject obj = self.obj[0] cdef PetscInt refct = 0 if obj != NULL: - CHKERR( PetscObjectReference(obj) ) - CHKERR( PetscObjectGetReference(obj, &refct) ) - return (refct) + CHKERR(PetscObjectReference(obj)) + CHKERR(PetscObjectGetReference(obj, &refct)) + return toInt(refct) - def decRef(self): + def decRef(self) -> int: + """Decrement the object reference count. + + Logically collective. + + See Also + -------- + getRefCount, petsc.PetscObjectDereference + + """ cdef PetscObject obj = self.obj[0] cdef PetscInt refct = 0 if obj != NULL: - CHKERR( PetscObjectGetReference(obj, &refct) ) - CHKERR( PetscObjectDereference(obj) ) + CHKERR(PetscObjectGetReference(obj, &refct)) + CHKERR(PetscObjectDereference(obj)) if refct == 1: self.obj[0] = NULL refct -= 1 - return (refct) + return toInt(refct) + + def getAttr(self, name : str) -> object: + """Return the attribute associated with a given name. + + Not collective. + + See Also + -------- + setAttr, getDict - def getAttr(self, name): + """ cdef const char *cval = NULL name = str2bytes(name, &cval) return self.get_attr(cval) - def setAttr(self, name, attr): + def setAttr(self, name : str, attr : object) -> None: + """Set an the attribute associated with a given name. + + Not collective. + + See Also + -------- + getAttr, getDict + + """ cdef const char *cval = NULL name = str2bytes(name, &cval) self.set_attr(cval, attr) - def getDict(self): + def getDict(self) -> dict: + """Return the dictionary of attributes. + + Not collective. + + See Also + -------- + setAttr, getAttr + + """ return self.get_dict() # --- state manipulation --- - def stateIncrease(self): + def stateIncrease(self) -> None: + """Increment the PETSc object state. + + Logically collective. + + See Also + -------- + stateGet, stateSet, petsc.PetscObjectStateIncrease + + """ PetscINCSTATE(self.obj) - def stateGet(self): + def stateGet(self) -> int: + """Return the PETSc object state. + + Not collective. + + See Also + -------- + stateSet, stateIncrease, petsc.PetscObjectStateGet + + """ cdef PetscObjectState state = 0 - CHKERR( PetscObjectStateGet(self.obj[0], &state) ) - return toInt(state) + CHKERR(PetscObjectStateGet(self.obj[0], &state)) + return state + + def stateSet(self, state : int) -> None: + """Set the PETSc object state. + + Logically collective. - def stateSet(self, state): + See Also + -------- + stateIncrease, stateGet, petsc.PetscObjectStateSet + + """ cdef PetscObjectState cstate = asInt(state) - CHKERR( PetscObjectStateSet(self.obj[0], cstate) ) + CHKERR(PetscObjectStateSet(self.obj[0], cstate)) # --- tab level --- - def incrementTabLevel(self, tab, Object parent=None): + def incrementTabLevel(self, tab : int, Object parent=None) -> None: + """Increment the PETSc object tab level. + + Logically collective. + + See Also + -------- + setTabLevel, getTabLevel, petsc.PetscObjectIncrementTabLevel + + """ cdef PetscInt ctab = asInt(tab) cdef PetscObject cobj = NULL if parent is None else parent.obj[0] - CHKERR( PetscObjectIncrementTabLevel(self.obj[0], cobj, ctab) ) + CHKERR(PetscObjectIncrementTabLevel(self.obj[0], cobj, ctab)) + + def setTabLevel(self, level : int) -> None: + """Set the PETSc object tab level. - def setTabLevel(self, level): + Logically collective. + + See Also + -------- + incrementTabLevel, getTabLevel, petsc.PetscObjectSetTabLevel + + """ cdef PetscInt clevel = asInt(level) - CHKERR( PetscObjectSetTabLevel(self.obj[0], clevel) ) + CHKERR(PetscObjectSetTabLevel(self.obj[0], clevel)) + + def getTabLevel(self) -> None: + """Return the PETSc object tab level. - def getTabLevel(self): + Not collective. + + See Also + -------- + setTabLevel, incrementTabLevel, petsc.PetscObjectGetTabLevel + + """ cdef PetscInt clevel = 0 - CHKERR( PetscObjectGetTabLevel(self.obj[0], &clevel) ) + CHKERR(PetscObjectGetTabLevel(self.obj[0], &clevel)) return toInt(clevel) # --- properties --- property type: - def __get__(self): + """Object type.""" + def __get__(self) -> str: return self.getType() + def __set__(self, value): self.setType(value) property prefix: - def __get__(self): + """Options prefix.""" + def __get__(self) -> str: return self.getOptionsPrefix() + def __set__(self, value): self.setOptionsPrefix(value) property comm: - def __get__(self): + """The object communicator.""" + def __get__(self) -> Comm: return self.getComm() property name: - def __get__(self): + """The object name.""" + def __get__(self) -> str: return self.getName() + def __set__(self, value): self.setName(value) property classid: - def __get__(self): + """The class identifier.""" + def __get__(self) -> int: return self.getClassId() property klass: - def __get__(self): + """The class name.""" + def __get__(self) -> str: return self.getClassName() property refcount: - def __get__(self): + """Reference count.""" + def __get__(self) -> int: return self.getRefCount() # --- ctypes support --- property handle: - def __get__(self): + """Handle for ctypes support.""" + def __get__(self) -> int: cdef PetscObject obj = self.obj[0] return PyLong_FromVoidPtr(obj) # --- Fortran support --- property fortran: - def __get__(self): + """Fortran handle.""" + def __get__(self) -> int: cdef PetscObject obj = self.obj[0] return Object_toFortran(obj) @@ -261,7 +541,7 @@ cdef class Object: include "cyclicgc.pxi" -cdef dict type_registry = { 0 : None } +cdef dict type_registry = {0 : None} __type_registry__ = type_registry cdef int PyPetscType_Register(int classid, type cls) except -1: @@ -278,7 +558,7 @@ cdef int PyPetscType_Register(int classid, type cls) except -1: value = type_registry[key] if cls is not value: raise ValueError( - "key: %d, cannot register: %s, " \ + "key: %d, cannot register: %s, " "already registered: %s" % (key, cls, value)) return 0 diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Options.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Options.pyx index 91e6d018433..dea35ebd9fe 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Options.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Options.pyx @@ -1,17 +1,56 @@ # -------------------------------------------------------------------- cdef class Options: + """The options database object. + + A dictionary-like object to store and operate with + command line options. + + Parameters + ---------- + prefix : str, optional + Optional string to prepend to all the options. + + Examples + -------- + Create an option database and operate with it. + + >>> from petsc4py import PETSc + >>> opts = PETSc.Options() + >>> opts['a'] = 1 # insert the command-line option '-a 1' + >>> if 'a' in opts: # if the option is present + >>> val = opts['a'] # return the option value as 'str' + >>> a_int = opts.getInt('a') # return the option value as 'int' + >>> a_bool = opts.getBool('a') # return the option value as 'bool' + + Read command line and use default values. + + >>> from petsc4py import PETSc + >>> opts = PETSc.Options() + >>> b_float = opts.getReal('b', 1) # return the value or 1.0 if not present + + Read command line options prepended with a prefix. + + >>> from petsc4py import PETSc + >>> opts = PETSc.Options('prefix_') + >>> opts.getString('b', 'some_default_string') # read -prefix_b xxx + + See Also + -------- + petsc_options + + """ cdef PetscOptions opt cdef object _prefix - def __init__(self, prefix=None): + def __init__(self, prefix = None): self.opt = NULL - self.prefix = prefix + self.prefix = prefix def __dealloc__(self): if self.opt == NULL: return - CHKERR( PetscOptionsDestroy(&self.opt) ) + CHKERR(PetscOptionsDestroy(&self.opt)) def __contains__(self, item): return self.hasName(item) @@ -26,123 +65,366 @@ cdef class Options: self.delValue(item) property prefix: - def __get__(self): + """Prefix for options.""" + def __get__(self) -> str: return self._prefix + def __set__(self, prefix): self._prefix = getprefix(prefix) + def __del__(self): self._prefix = None # - def create(self): + def create(self) -> Self: + """Create an options database.""" if self.opt != NULL: return - CHKERR( PetscOptionsCreate(&self.opt) ) + CHKERR(PetscOptionsCreate(&self.opt)) return self - def destroy(self): + def destroy(self) -> Self: + """Destroy an options database.""" if self.opt == NULL: return - CHKERR( PetscOptionsDestroy(&self.opt) ) + CHKERR(PetscOptionsDestroy(&self.opt)) return self - def clear(self): + def clear(self) -> Self: + """Clear an options database.""" if self.opt == NULL: return - CHKERR( PetscOptionsClear(self.opt) ) + CHKERR(PetscOptionsClear(self.opt)) return self - def view(self, Viewer viewer=None): + def view(self, Viewer viewer=None) -> None: + """View the options database. + + Collective. + + Parameters + ---------- + viewer + A `Viewer` instance or `None` for the default viewer. + + See Also + -------- + Viewer, petsc.PetscOptionsView + + """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscOptionsView(self.opt, vwr) ) + CHKERR(PetscOptionsView(self.opt, vwr)) + + def prefixPush(self, prefix: str | Options | Object | None) -> None: + """Push a prefix for the options database. + + Logically collective. - def prefixPush(self, prefix): + See Also + -------- + prefixPop, petsc.PetscOptionsPrefixPush + + """ prefix = getprefix(prefix) cdef const char *cprefix = NULL prefix = str2bytes(prefix, &cprefix) - CHKERR( PetscOptionsPrefixPush(self.opt, cprefix) ) + CHKERR(PetscOptionsPrefixPush(self.opt, cprefix)) + + def prefixPop(self) -> None: + """Pop a prefix for the options database. + + Logically collective. - def prefixPop(self): - CHKERR( PetscOptionsPrefixPop(self.opt) ) + See Also + -------- + prefixPush, petsc.PetscOptionsPrefixPop + + """ + CHKERR(PetscOptionsPrefixPop(self.opt)) # - def hasName(self, name): + def hasName(self, name: str) -> bool: + """Return the boolean indicating if the option is in the database.""" cdef const char *pr = NULL cdef const char *nm = NULL - tmp = getpair(self.prefix, name, &pr, &nm) + cdef object unused = getpair(self.prefix, name, &pr, &nm) cdef PetscBool flag = PETSC_FALSE - CHKERR( PetscOptionsHasName(self.opt, pr, nm, &flag) ) + CHKERR(PetscOptionsHasName(self.opt, pr, nm, &flag)) return toBool(flag) - def setValue(self, name, value): + def setValue(self, name: str, + value: bool | int | float | Scalar | Sequence[bool] | Sequence[int] | Sequence[float] | Sequence[Scalar] | str) -> None: + """Set a value for an option. + + Logically collective. + + Parameters + ---------- + name + The string identifying the option. + value + The option value. + + See Also + -------- + delValue, petsc.PetscOptionsSetValue + + """ cdef const char *pr = NULL cdef const char *nm = NULL - tmp = getpair(self.prefix, name, &pr, &nm) - if pr == NULL: + cdef object unused = getpair(self.prefix, name, &pr, &nm) + if pr == NULL: option = bytes2str(nm) - else: + else: option = '-%s%s' % (bytes2str(pr), bytes2str(&nm[1])) - if type(value) is bool: + + if isinstance(value, ndarray): + value = value.tolist() + if isinstance(value, (tuple, list)): + value = str(value).replace(' ', '').\ + replace('(', '').replace(')', '').\ + replace('[', '').replace(']', '') + elif isinstance(value, bool): value = str(value).lower() - elif value is not None : + elif value is not None: value = str(value) cdef const char *key = NULL cdef const char *val = NULL option = str2bytes(option, &key) value = str2bytes(value, &val) - CHKERR( PetscOptionsSetValue(self.opt, key, val) ) + CHKERR(PetscOptionsSetValue(self.opt, key, val)) - def delValue(self, name): + def delValue(self, name: str) -> None: + """Delete an option from the database. + + Logically collective. + + See Also + -------- + setValue, petsc.PetscOptionsClearValue + + """ cdef const char *pr = NULL cdef const char *nm = NULL - tmp = getpair(self.prefix, name, &pr, &nm) - if pr == NULL: + cdef object unused = getpair(self.prefix, name, &pr, &nm) + if pr == NULL: option = bytes2str(nm) - else: + else: option = '-%s%s' % (bytes2str(pr), bytes2str(&nm[1])) cdef const char *key = NULL option = str2bytes(option, &key) - CHKERR( PetscOptionsClearValue(self.opt, key) ) + CHKERR(PetscOptionsClearValue(self.opt, key)) # - def getBool(self, name, default=None): + def getBool(self, name: str, default=None) -> bool: + """Return the boolean value associated with the option. + + Not collective. + + Parameters + ---------- + name + The option name. + default + The default value. + If `None`, it raises a `KeyError` if the option is not found. + + See Also + -------- + getBoolArray, petsc.PetscOptionsGetBool + + """ return getopt(self.opt, OPT_BOOL, self.prefix, name, default) - def getBoolArray(self, name, default=None): + def getBoolArray(self, name: str, default=None) -> ArrayBool: + """Return the boolean values associated with the option. + + Not collective. + + Parameters + ---------- + name + The option name. + default + The default value. + If `None`, it raises a `KeyError` if the option is not found. + + See Also + -------- + getBool, petsc.PetscOptionsGetBoolArray + + """ return getopt(self.opt, OPT_BOOLARRAY, self.prefix, name, default) - def getInt(self, name, default=None): + def getInt(self, name: str, default=None) -> int: + """Return the integer value associated with the option. + + Not collective. + + Parameters + ---------- + name + The option name. + default + The default value. + If `None`, it raises a `KeyError` if the option is not found. + + See Also + -------- + getIntArray, petsc.PetscOptionsGetInt + + """ return getopt(self.opt, OPT_INT, self.prefix, name, default) - def getIntArray(self, name, default=None): + def getIntArray(self, name: str, default=None) -> ArrayInt: + """Return the integer array associated with the option. + + Not collective. + + Parameters + ---------- + name + The option name. + default + The default value. + If `None`, it raises a `KeyError` if the option is not found. + + See Also + -------- + getInt, petsc.PetscOptionsGetIntArray + + """ return getopt(self.opt, OPT_INTARRAY, self.prefix, name, default) - def getReal(self, name, default=None): + def getReal(self, name: str, default=None) -> float: + """Return the real value associated with the option. + + Not collective. + + Parameters + ---------- + name + The option name. + default + The default value. + If `None`, it raises a `KeyError` if the option is not found. + + See Also + -------- + getRealArray, petsc.PetscOptionsGetReal + + """ return getopt(self.opt, OPT_REAL, self.prefix, name, default) - def getRealArray(self, name, default=None): + def getRealArray(self, name: str, default=None) -> ArrayReal: + """Return the real array associated with the option. + + Not collective. + + Parameters + ---------- + name + The option name. + default + The default value. + If `None`, it raises a `KeyError` if the option is not found. + + See Also + -------- + getReal, petsc.PetscOptionsGetRealArray + + """ return getopt(self.opt, OPT_REALARRAY, self.prefix, name, default) - def getScalar(self, name, default=None): + def getScalar(self, name: str, default=None) -> Scalar: + """Return the scalar value associated with the option. + + Not collective. + + Parameters + ---------- + name + The option name. + default + The default value. + If `None`, it raises a `KeyError` if the option is not found. + + See Also + -------- + getScalarArray, petsc.PetscOptionsGetScalar + + """ return getopt(self.opt, OPT_SCALAR, self.prefix, name, default) - def getScalarArray(self, name, default=None): + def getScalarArray(self, name: str, default=None) -> ArrayScalar: + """Return the scalar array associated with the option. + + Not collective. + + Parameters + ---------- + name + The option name. + default + The default value. + If `None`, it raises a `KeyError` if the option is not found. + + See Also + -------- + getScalar, petsc.PetscOptionsGetScalarArray + + """ return getopt(self.opt, OPT_SCALARARRAY, self.prefix, name, default) - def getString(self, name, default=None): + def getString(self, name: str, default=None) -> str: + """Return the string associated with the option. + + Not collective. + + Parameters + ---------- + name + The option name. + default + The default value. + If `None`, it raises a `KeyError` if the option is not found. + + See Also + -------- + petsc.PetscOptionsGetString + + """ return getopt(self.opt, OPT_STRING, self.prefix, name, default) # - def insertString(self, string): + def insertString(self, string: str) -> None: + """Insert a string in the options database. + + Logically collective. + + See Also + -------- + petsc.PetscOptionsInsertString + + """ cdef const char *cstring = NULL string = str2bytes(string, &cstring) - CHKERR( PetscOptionsInsertString(self.opt, cstring) ) + CHKERR(PetscOptionsInsertString(self.opt, cstring)) + + def getAll(self) -> dict[str, str]: + """Return all the options and their values. + + Not collective. + + See Also + -------- + petsc.PetscOptionsGetAll - def getAll(self): + """ cdef char *allopts = NULL - CHKERR( PetscOptionsGetAll(self.opt, &allopts) ) + CHKERR(PetscOptionsGetAll(self.opt, &allopts)) options = bytes2str(allopts) - CHKERR( PetscFree(allopts) ) + CHKERR(PetscFree(allopts)) return parseopt(options, self.prefix) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/PC.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/PC.pyx index cca5ed897f1..fad70f7c3e2 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/PC.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/PC.pyx @@ -53,6 +53,7 @@ class PCType(object): HPDDM = S_(PCHPDDM) H2OPUS = S_(PCH2OPUS) + class PCSide(object): """The manner in which the preconditioner is applied.""" # native @@ -64,6 +65,7 @@ class PCSide(object): R = RIGHT S = SYMMETRIC + class PCASMType(object): """The *ASM* subtype.""" NONE = PC_ASM_NONE @@ -71,6 +73,7 @@ class PCASMType(object): RESTRICT = PC_ASM_RESTRICT INTERPOLATE = PC_ASM_INTERPOLATE + class PCGASMType(object): """The *GASM* subtype.""" NONE = PC_GASM_NONE @@ -78,6 +81,7 @@ class PCGASMType(object): RESTRICT = PC_GASM_RESTRICT INTERPOLATE = PC_GASM_INTERPOLATE + class PCMGType(object): """The *MG* subtype.""" MULTIPLICATIVE = PC_MG_MULTIPLICATIVE @@ -85,17 +89,20 @@ class PCMGType(object): FULL = PC_MG_FULL KASKADE = PC_MG_KASKADE + class PCMGCycleType(object): """The *MG* cycle type.""" V = PC_MG_CYCLE_V W = PC_MG_CYCLE_W + class PCGAMGType(object): """The *GAMG* subtype.""" AGG = S_(PCGAMGAGG) GEO = S_(PCGAMGGEO) CLASSICAL = S_(PCGAMGCLASSICAL) + class PCCompositeType(object): """The composite type.""" ADDITIVE = PC_COMPOSITE_ADDITIVE @@ -104,6 +111,7 @@ class PCCompositeType(object): SPECIAL = PC_COMPOSITE_SPECIAL SCHUR = PC_COMPOSITE_SCHUR + class PCFieldSplitSchurPreType(object): """The field split Schur subtype.""" SELF = PC_FIELDSPLIT_SCHUR_PRE_SELF @@ -112,6 +120,7 @@ class PCFieldSplitSchurPreType(object): USER = PC_FIELDSPLIT_SCHUR_PRE_USER FULL = PC_FIELDSPLIT_SCHUR_PRE_FULL + class PCFieldSplitSchurFactType(object): """The field split Schur factorization type.""" DIAG = PC_FIELDSPLIT_SCHUR_FACT_DIAG @@ -119,6 +128,7 @@ class PCFieldSplitSchurFactType(object): UPPER = PC_FIELDSPLIT_SCHUR_FACT_UPPER FULL = PC_FIELDSPLIT_SCHUR_FACT_FULL + class PCPatchConstructType(object): """The patch construction type.""" STAR = PC_PATCH_STAR @@ -127,6 +137,7 @@ class PCPatchConstructType(object): USER = PC_PATCH_USER PYTHON = PC_PATCH_PYTHON + class PCHPDDMCoarseCorrectionType(object): """The *HPDDM* coarse correction type.""" DEFLATED = PC_HPDDM_COARSE_CORRECTION_DEFLATED @@ -134,6 +145,7 @@ class PCHPDDMCoarseCorrectionType(object): BALANCED = PC_HPDDM_COARSE_CORRECTION_BALANCED NONE = PC_HPDDM_COARSE_CORRECTION_NONE + class PCDeflationSpaceType(object): """The deflation space subtype.""" HAAR = PC_DEFLATION_SPACE_HAAR @@ -146,6 +158,7 @@ class PCDeflationSpaceType(object): AGGREGATION = PC_DEFLATION_SPACE_AGGREGATION USER = PC_DEFLATION_SPACE_USER + class PCFailedReason(object): """The reason the preconditioner has failed.""" SETUP_ERROR = PC_SETUP_ERROR @@ -158,6 +171,7 @@ class PCFailedReason(object): # -------------------------------------------------------------------- + cdef class PC(Object): """Preconditioners. @@ -168,11 +182,12 @@ cdef class PC(Object): Examples -------- >>> from petsc4py import PETSc - >>> v = PETSc.Vec().createWithArray([1,2]) - >>> m = PETSc.Mat().createDense(2,array=[[1,0],[0,1]]) + >>> v = PETSc.Vec().createWithArray([1, 2]) + >>> m = PETSc.Mat().createDense(2, array=[[1, 0], [0, 1]]) >>> pc = PETSc.PC().create() >>> pc.setOperators(m) - >>> u = pc(v) # Vec u is created internally, can also be passed as second argument + >>> u = pc(v) # u is created internally + >>> pc.apply(v, u) # u can also be passed as second argument See Also -------- @@ -230,7 +245,7 @@ cdef class PC(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PCView(self.pc, vwr) ) + CHKERR(PCView(self.pc, vwr)) def destroy(self) -> Self: """Destroy the `PC` that was created with `create`. @@ -242,7 +257,7 @@ cdef class PC(Object): petsc.PCDestroy """ - CHKERR( PCDestroy(&self.pc) ) + CHKERR(PCDestroy(&self.pc)) self.pc = NULL return self @@ -267,8 +282,8 @@ cdef class PC(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscPC newpc = NULL - CHKERR( PCCreate(ccomm, &newpc) ) - CHKERR( PetscCLEAR(self.obj) ); self.pc = newpc + CHKERR(PCCreate(ccomm, &newpc)) + CHKERR(PetscCLEAR(self.obj)); self.pc = newpc return self def setType(self, pc_type: Type | str) -> None: @@ -288,7 +303,7 @@ cdef class PC(Object): """ cdef PetscPCType cval = NULL pc_type = str2bytes(pc_type, &cval) - CHKERR( PCSetType(self.pc, cval) ) + CHKERR(PCSetType(self.pc, cval)) def getType(self) -> str: """Return the preconditioner type. @@ -301,10 +316,10 @@ cdef class PC(Object): """ cdef PetscPCType cval = NULL - CHKERR( PCGetType(self.pc, &cval) ) + CHKERR(PCGetType(self.pc, &cval)) return bytes2str(cval) - def setOptionsPrefix(self, prefix: str) -> None: + def setOptionsPrefix(self, prefix: str | None) -> None: """Set the prefix used for all the `PC` options. Logically collective. @@ -321,7 +336,7 @@ cdef class PC(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( PCSetOptionsPrefix(self.pc, cval) ) + CHKERR(PCSetOptionsPrefix(self.pc, cval)) def getOptionsPrefix(self) -> str: """Return the prefix used for all the `PC` options. @@ -334,10 +349,10 @@ cdef class PC(Object): """ cdef const char *cval = NULL - CHKERR( PCGetOptionsPrefix(self.pc, &cval) ) + CHKERR(PCGetOptionsPrefix(self.pc, &cval)) return bytes2str(cval) - def appendOptionsPrefix(self, prefix: str) -> None: + def appendOptionsPrefix(self, prefix: str | None) -> None: """Append to the prefix used for all the `PC` options. Logically collective. @@ -354,7 +369,7 @@ cdef class PC(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( PCAppendOptionsPrefix(self.pc, cval) ) + CHKERR(PCAppendOptionsPrefix(self.pc, cval)) def setFromOptions(self) -> None: """Set various `PC` parameters from user options. @@ -366,7 +381,7 @@ cdef class PC(Object): petsc_options, petsc.PCSetFromOptions """ - CHKERR( PCSetFromOptions(self.pc) ) + CHKERR(PCSetFromOptions(self.pc)) def setOperators(self, Mat A=None, Mat P=None) -> None: """Set the matrices associated with the linear system. @@ -396,9 +411,9 @@ cdef class PC(Object): if A is not None: amat = A.mat cdef PetscMat pmat=amat if P is not None: pmat = P.mat - CHKERR( PCSetOperators(self.pc, amat, pmat) ) + CHKERR(PCSetOperators(self.pc, amat, pmat)) - def getOperators(self) -> tuple[Mat,Mat]: + def getOperators(self) -> tuple[Mat, Mat]: """Return the matrices associated with a linear system. Not collective. @@ -409,9 +424,9 @@ cdef class PC(Object): """ cdef Mat A = Mat(), P = Mat() - CHKERR( PCGetOperators(self.pc, &A.mat, &P.mat) ) - CHKERR( PetscINCREF(A.obj) ) - CHKERR( PetscINCREF(P.obj) ) + CHKERR(PCGetOperators(self.pc, &A.mat, &P.mat)) + CHKERR(PetscINCREF(A.obj)) + CHKERR(PetscINCREF(P.obj)) return (A, P) def setUseAmat(self, flag: bool) -> None: @@ -438,9 +453,9 @@ cdef class PC(Object): cdef PetscBool cflag = PETSC_FALSE if flag: cflag = PETSC_TRUE - CHKERR( PCSetUseAmat(self.pc, cflag) ) + CHKERR(PCSetUseAmat(self.pc, cflag)) - def getUseAmat(self): + def getUseAmat(self) -> bool: """Return the flag to indicate if `PC` is applied to ``A`` or ``P``. Logically collective. @@ -456,7 +471,7 @@ cdef class PC(Object): """ cdef PetscBool cflag = PETSC_FALSE - CHKERR( PCGetUseAmat(self.pc, &cflag) ) + CHKERR(PCGetUseAmat(self.pc, &cflag)) return toBool(cflag) def setReusePreconditioner(self, flag: bool) -> None: @@ -482,9 +497,9 @@ cdef class PC(Object): cdef PetscBool cflag = PETSC_FALSE if flag: cflag = PETSC_TRUE - CHKERR( PCSetReusePreconditioner(self.pc, cflag) ) + CHKERR(PCSetReusePreconditioner(self.pc, cflag)) - def setFailedReason(self, reason: FailedReason | str) -> None: + def setFailedReason(self, reason: FailedReason | str) -> None: """Set the reason the `PC` terminated. Logically collective. @@ -500,7 +515,7 @@ cdef class PC(Object): """ cdef PetscPCFailedReason val = reason - CHKERR( PCSetFailedReason(self.pc, val) ) + CHKERR(PCSetFailedReason(self.pc, val)) def getFailedReason(self) -> FailedReason: """Return the reason the `PC` terminated. @@ -516,7 +531,7 @@ cdef class PC(Object): """ cdef PetscPCFailedReason reason = PC_NOERROR - CHKERR( PCGetFailedReason(self.pc, &reason) ) + CHKERR(PCGetFailedReason(self.pc, &reason)) return reason def getFailedReasonRank(self) -> FailedReason: @@ -532,7 +547,7 @@ cdef class PC(Object): """ cdef PetscPCFailedReason reason = PC_NOERROR - CHKERR( PCGetFailedReasonRank(self.pc, &reason) ) + CHKERR(PCGetFailedReasonRank(self.pc, &reason)) return reason def setUp(self) -> None: @@ -545,7 +560,7 @@ cdef class PC(Object): petsc.PCSetUp """ - CHKERR( PCSetUp(self.pc) ) + CHKERR(PCSetUp(self.pc)) def reset(self) -> None: """Reset the `PC`, removing any allocated vectors and matrices. @@ -557,7 +572,7 @@ cdef class PC(Object): petsc.PCReset """ - CHKERR( PCReset(self.pc) ) + CHKERR(PCReset(self.pc)) def setUpOnBlocks(self) -> None: """Set up the `PC` for each block. @@ -573,7 +588,7 @@ cdef class PC(Object): setUp, petsc.PCSetUpOnBlocks """ - CHKERR( PCSetUpOnBlocks(self.pc) ) + CHKERR(PCSetUpOnBlocks(self.pc)) def apply(self, Vec x, Vec y) -> None: """Apply the `PC` to a vector. @@ -592,7 +607,7 @@ cdef class PC(Object): petsc.PCApply """ - CHKERR( PCApply(self.pc, x.vec, y.vec) ) + CHKERR(PCApply(self.pc, x.vec, y.vec)) def matApply(self, Mat x, Mat y) -> None: """Apply the `PC` to many vectors stored as `Mat.Type.DENSE`. @@ -611,7 +626,7 @@ cdef class PC(Object): petsc.PCMatApply, petsc.PCApply """ - CHKERR( PCMatApply(self.pc, x.mat, y.mat) ) + CHKERR(PCMatApply(self.pc, x.mat, y.mat)) def applyTranspose(self, Vec x, Vec y) -> None: """Apply the transpose of the `PC` to a vector. @@ -633,7 +648,7 @@ cdef class PC(Object): petsc.PCApply """ - CHKERR( PCApplyTranspose(self.pc, x.vec, y.vec) ) + CHKERR(PCApplyTranspose(self.pc, x.vec, y.vec)) def applySymmetricLeft(self, Vec x, Vec y) -> None: """Apply the left part of a symmetric `PC` to a vector. @@ -652,7 +667,7 @@ cdef class PC(Object): petsc.PCApplySymmetricLeft """ - CHKERR( PCApplySymmetricLeft(self.pc, x.vec, y.vec) ) + CHKERR(PCApplySymmetricLeft(self.pc, x.vec, y.vec)) def applySymmetricRight(self, Vec x, Vec y) -> None: """Apply the right part of a symmetric `PC` to a vector. @@ -671,7 +686,7 @@ cdef class PC(Object): petsc.PCApplySymmetricRight """ - CHKERR( PCApplySymmetricRight(self.pc, x.vec, y.vec) ) + CHKERR(PCApplySymmetricRight(self.pc, x.vec, y.vec)) # --- discretization space --- @@ -686,10 +701,10 @@ cdef class PC(Object): """ cdef PetscDM newdm = NULL - CHKERR( PCGetDM(self.pc, &newdm) ) + CHKERR(PCGetDM(self.pc, &newdm)) cdef DM dm = subtype_DM(newdm)() dm.dm = newdm - CHKERR( PetscINCREF(dm.obj) ) + CHKERR(PetscINCREF(dm.obj)) return dm def setDM(self, DM dm) -> None: @@ -707,7 +722,7 @@ cdef class PC(Object): petsc.PCSetDM """ - CHKERR( PCSetDM(self.pc, dm.dm) ) + CHKERR(PCSetDM(self.pc, dm.dm)) def setCoordinates(self, coordinates: Sequence[Sequence[float]]) -> None: """Set the coordinates for the nodes on the local process. @@ -728,11 +743,11 @@ cdef class PC(Object): if PyArray_ISFORTRAN(xyz): xyz = PyArray_Copy(xyz) if PyArray_NDIM(xyz) != 2: raise ValueError( ("coordinates must have two dimensions: " - "coordinates.ndim=%d") % (PyArray_NDIM(xyz)) ) + "coordinates.ndim=%d") % (PyArray_NDIM(xyz))) cdef PetscInt nvtx = PyArray_DIM(xyz, 0) cdef PetscInt ndim = PyArray_DIM(xyz, 1) cdef PetscReal *coords = PyArray_DATA(xyz) - CHKERR( PCSetCoordinates(self.pc, ndim, nvtx, coords) ) + CHKERR(PCSetCoordinates(self.pc, ndim, nvtx, coords)) # --- Python --- @@ -755,10 +770,10 @@ cdef class PC(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscPC newpc = NULL - CHKERR( PCCreate(ccomm, &newpc) ) - CHKERR( PetscCLEAR(self.obj) ); self.pc = newpc - CHKERR( PCSetType(self.pc, PCPYTHON) ) - CHKERR( PCPythonSetContext(self.pc, context) ) + CHKERR(PCCreate(ccomm, &newpc)) + CHKERR(PetscCLEAR(self.obj)); self.pc = newpc + CHKERR(PCSetType(self.pc, PCPYTHON)) + CHKERR(PCPythonSetContext(self.pc, context)) return self def setPythonContext(self, context: Any) -> None: @@ -771,7 +786,7 @@ cdef class PC(Object): petsc_python_pc, getPythonContext """ - CHKERR( PCPythonSetContext(self.pc, context) ) + CHKERR(PCPythonSetContext(self.pc, context)) def getPythonContext(self) -> Any: """Return the instance of the class implementing the required Python methods. @@ -784,7 +799,7 @@ cdef class PC(Object): """ cdef void *context = NULL - CHKERR( PCPythonGetContext(self.pc, &context) ) + CHKERR(PCPythonGetContext(self.pc, &context)) if context == NULL: return None else: return context @@ -800,7 +815,7 @@ cdef class PC(Object): """ cdef const char *cval = NULL py_type = str2bytes(py_type, &cval) - CHKERR( PCPythonSetType(self.pc, cval) ) + CHKERR(PCPythonSetType(self.pc, cval)) def getPythonType(self) -> str: """Return the fully qualified Python name of the class used by the preconditioner. @@ -813,7 +828,7 @@ cdef class PC(Object): """ cdef const char *cval = NULL - CHKERR( PCPythonGetType(self.pc, &cval) ) + CHKERR(PCPythonGetType(self.pc, &cval)) return bytes2str(cval) # --- ASM --- @@ -834,7 +849,7 @@ cdef class PC(Object): """ cdef PetscPCASMType cval = asmtype - CHKERR( PCASMSetType(self.pc, cval) ) + CHKERR(PCASMSetType(self.pc, cval)) def setASMOverlap(self, overlap: int) -> None: """Set the overlap between a pair of subdomains. @@ -852,12 +867,12 @@ cdef class PC(Object): """ cdef PetscInt ival = asInt(overlap) - CHKERR( PCASMSetOverlap(self.pc, ival) ) + CHKERR(PCASMSetOverlap(self.pc, ival)) def setASMLocalSubdomains( self, nsd: int, - is_: Sequence[IS] | None = None, + is_sub: Sequence[IS] | None = None, is_local: Sequence[IS] | None = None) -> None: """Set the local subdomains. @@ -867,7 +882,7 @@ cdef class PC(Object): ---------- nsd The number of subdomains for this process. - is\_ + is_sub Defines the subdomains for this process or `None` to determine internally. is_local @@ -883,25 +898,25 @@ cdef class PC(Object): cdef PetscInt i = 0 cdef PetscIS *isets = NULL cdef PetscIS *isets_local = NULL - if is_ is not None: - assert len(is_) == nsd - CHKERR( PetscMalloc(n*sizeof(PetscIS), &isets) ) + if is_sub is not None: + assert len(is_sub) == nsd + CHKERR(PetscMalloc(n*sizeof(PetscIS), &isets)) for i in range(n): - isets[i] = (is_[i]).iset + isets[i] = (is_sub[i]).iset if is_local is not None: assert len(is_local) == nsd - CHKERR( PetscMalloc(n*sizeof(PetscIS), &isets_local) ) + CHKERR(PetscMalloc(n*sizeof(PetscIS), &isets_local)) for i in range(n): isets_local[i] = (is_local[i]).iset - CHKERR( PCASMSetLocalSubdomains(self.pc, n, isets, isets_local) ) - CHKERR( PetscFree(isets) ) - CHKERR( PetscFree(isets_local) ) + CHKERR(PCASMSetLocalSubdomains(self.pc, n, isets, isets_local)) + CHKERR(PetscFree(isets)) + CHKERR(PetscFree(isets_local)) def setASMTotalSubdomains( self, nsd: int, - is_: Sequence[IS] | None=None, - is_local: Sequence[IS] | None=None) -> None: + is_sub: Sequence[IS] | None = None, + is_local: Sequence[IS] | None = None) -> None: """Set the subdomains for all processes. Collective. @@ -910,7 +925,7 @@ cdef class PC(Object): ---------- nsd The number of subdomains for all processes. - is\_ + is_sub Defines the subdomains for all processes or `None` to determine internally. is_local @@ -926,19 +941,19 @@ cdef class PC(Object): cdef PetscInt i = 0 cdef PetscIS *isets = NULL cdef PetscIS *isets_local = NULL - if is_ is not None: - assert len(is_) == nsd - CHKERR( PetscMalloc(n*sizeof(PetscIS), &isets) ) + if is_sub is not None: + assert len(is_sub) == nsd + CHKERR(PetscMalloc(n*sizeof(PetscIS), &isets)) for i in range(n): - isets[i] = (is_[i]).iset + isets[i] = (is_sub[i]).iset if is_local is not None: assert len(is_local) == nsd - CHKERR( PetscMalloc(n*sizeof(PetscIS), &isets_local) ) + CHKERR(PetscMalloc(n*sizeof(PetscIS), &isets_local)) for i in range(n): isets_local[i] = (is_local[i]).iset - CHKERR( PCASMSetTotalSubdomains(self.pc, n, isets, isets_local) ) - CHKERR( PetscFree(isets) ) - CHKERR( PetscFree(isets_local) ) + CHKERR(PCASMSetTotalSubdomains(self.pc, n, isets, isets_local)) + CHKERR(PetscFree(isets)) + CHKERR(PetscFree(isets_local)) def getASMSubKSP(self) -> list[KSP]: """Return the local `KSP` object for all blocks on this process. @@ -950,9 +965,9 @@ cdef class PC(Object): petsc.PCASMGetSubKSP """ - cdef PetscInt i = 0, n = 0 + cdef PetscInt n = 0 cdef PetscKSP *p = NULL - CHKERR( PCASMGetSubKSP(self.pc, &n, NULL, &p) ) + CHKERR(PCASMGetSubKSP(self.pc, &n, NULL, &p)) return [ref_KSP(p[i]) for i from 0 <= i None: @@ -971,7 +986,7 @@ cdef class PC(Object): """ cdef PetscBool cdosort = asBool(dosort) - CHKERR( PCASMSetSortIndices(self.pc, cdosort) ) + CHKERR(PCASMSetSortIndices(self.pc, cdosort)) # --- GASM --- @@ -991,7 +1006,7 @@ cdef class PC(Object): """ cdef PetscPCGASMType cval = gasmtype - CHKERR( PCGASMSetType(self.pc, cval) ) + CHKERR(PCGASMSetType(self.pc, cval)) def setGASMOverlap(self, overlap: int) -> None: """Set the overlap between a pair of subdomains. @@ -1009,7 +1024,7 @@ cdef class PC(Object): """ cdef PetscInt ival = asInt(overlap) - CHKERR( PCGASMSetOverlap(self.pc, ival) ) + CHKERR(PCGASMSetOverlap(self.pc, ival)) # --- GAMG --- @@ -1030,7 +1045,7 @@ cdef class PC(Object): """ cdef PetscPCGAMGType cval = NULL gamgtype = str2bytes(gamgtype, &cval) - CHKERR( PCGAMGSetType(self.pc, cval) ) + CHKERR(PCGAMGSetType(self.pc, cval)) def setGAMGLevels(self, levels: int) -> None: """Set the maximum number of levels. @@ -1048,7 +1063,7 @@ cdef class PC(Object): """ cdef PetscInt ival = asInt(levels) - CHKERR( PCGAMGSetNlevels(self.pc, ival) ) + CHKERR(PCGAMGSetNlevels(self.pc, ival)) def setGAMGSmooths(self, smooths: int) -> None: """Set the number of smoothing steps used on all levels. @@ -1066,25 +1081,29 @@ cdef class PC(Object): """ cdef PetscInt ival = asInt(smooths) - CHKERR( PCGAMGSetNSmooths(self.pc, ival) ) + CHKERR(PCGAMGSetNSmooths(self.pc, ival)) # --- Hypre --- def getHYPREType(self) -> str: """Return the `Type.HYPRE` type. + Not collective. + See Also -------- petsc.PCHYPREGetType """ cdef PetscPCHYPREType cval = NULL - CHKERR( PCHYPREGetType(self.pc, &cval) ) + CHKERR(PCHYPREGetType(self.pc, &cval)) return bytes2str(cval) - def setHYPREType(self, hypretype: str): + def setHYPREType(self, hypretype: str) -> None: """Set the `Type.HYPRE` type. + Collective. + Parameters ---------- hypretype @@ -1098,7 +1117,7 @@ cdef class PC(Object): """ cdef PetscPCHYPREType cval = NULL hypretype = str2bytes(hypretype, &cval) - CHKERR( PCHYPRESetType(self.pc, cval) ) + CHKERR(PCHYPRESetType(self.pc, cval)) def setHYPREDiscreteCurl(self, Mat mat) -> None: """Set the discrete curl matrix. @@ -1115,7 +1134,7 @@ cdef class PC(Object): petsc.PCHYPRESetDiscreteCurl """ - CHKERR( PCHYPRESetDiscreteCurl(self.pc, mat.mat) ) + CHKERR(PCHYPRESetDiscreteCurl(self.pc, mat.mat)) def setHYPREDiscreteGradient(self, Mat mat) -> None: """Set the discrete gradient matrix. @@ -1132,7 +1151,7 @@ cdef class PC(Object): petsc.PCHYPRESetDiscreteGradient """ - CHKERR( PCHYPRESetDiscreteGradient(self.pc, mat.mat) ) + CHKERR(PCHYPRESetDiscreteGradient(self.pc, mat.mat)) def setHYPRESetAlphaPoissonMatrix(self, Mat mat) -> None: """Set the vector Poisson matrix. @@ -1149,7 +1168,7 @@ cdef class PC(Object): petsc.PCHYPRESetAlphaPoissonMatrix """ - CHKERR( PCHYPRESetAlphaPoissonMatrix(self.pc, mat.mat) ) + CHKERR(PCHYPRESetAlphaPoissonMatrix(self.pc, mat.mat)) def setHYPRESetBetaPoissonMatrix(self, Mat mat=None) -> None: """Set the Posson matrix. @@ -1168,7 +1187,7 @@ cdef class PC(Object): """ cdef PetscMat pmat = NULL if mat is not None: pmat = mat.mat - CHKERR( PCHYPRESetBetaPoissonMatrix(self.pc, pmat) ) + CHKERR(PCHYPRESetBetaPoissonMatrix(self.pc, pmat)) def setHYPRESetInterpolations(self, dim: int, Mat RT_Pi_Full=None, RT_Pi=None, Mat ND_Pi_Full=None, ND_Pi=None) -> None: @@ -1226,11 +1245,11 @@ cdef class PC(Object): Parameters ---------- ozz - A vector representing ``[1,0,0]`` or ``[1,0]`` in 2D. + A vector representing ``[1, 0, 0]`` or ``[1, 0]`` in 2D. zoz - A vector representing ``[0,1,0]`` or ``[0,1]`` in 2D. + A vector representing ``[0, 1, 0]`` or ``[0, 1]`` in 2D. zzo - A vector representing ``[0,0,1]`` or `None` in 2D. + A vector representing ``[0, 0, 1]`` or `None` in 2D. See Also -------- @@ -1239,8 +1258,8 @@ cdef class PC(Object): """ cdef PetscVec zzo_vec = NULL if zzo is not None: zzo_vec = zzo.vec - CHKERR( PCHYPRESetEdgeConstantVectors(self.pc, ozz.vec, zoz.vec, - zzo_vec) ) + CHKERR(PCHYPRESetEdgeConstantVectors(self.pc, ozz.vec, zoz.vec, + zzo_vec)) def setHYPREAMSSetInteriorNodes(self, Vec interior) -> None: """Set the list of interior nodes to a zero conductivity region. @@ -1278,7 +1297,7 @@ cdef class PC(Object): """ cdef PetscMatSolverType cval = NULL solver = str2bytes(solver, &cval) - CHKERR( PCFactorSetMatSolverType(self.pc, cval) ) + CHKERR(PCFactorSetMatSolverType(self.pc, cval)) def getFactorSolverType(self) -> str: """Return the solver package used to perform the factorization. @@ -1291,12 +1310,14 @@ cdef class PC(Object): """ cdef PetscMatSolverType cval = NULL - CHKERR( PCFactorGetMatSolverType(self.pc, &cval) ) + CHKERR(PCFactorGetMatSolverType(self.pc, &cval)) return bytes2str(cval) def setFactorSetUpSolverType(self) -> None: """Set up the factorization solver. + Collective. + This can be called after `KSP.setOperators` or `PC.setOperators`, causes `petsc.MatGetFactor` to be called so then one may set the options for that particular factorization object. @@ -1306,7 +1327,7 @@ cdef class PC(Object): petsc_options, petsc.PCFactorSetUpMatSolverType """ - CHKERR( PCFactorSetUpMatSolverType(self.pc) ) + CHKERR(PCFactorSetUpMatSolverType(self.pc)) def setFactorOrdering( self, @@ -1335,15 +1356,15 @@ cdef class PC(Object): cdef PetscMatOrderingType cval = NULL if ord_type is not None: ord_type = str2bytes(ord_type, &cval) - CHKERR( PCFactorSetMatOrderingType(self.pc, cval) ) + CHKERR(PCFactorSetMatOrderingType(self.pc, cval)) cdef PetscReal rval = 0 if nzdiag is not None: rval = asReal(nzdiag) - CHKERR( PCFactorReorderForNonzeroDiagonal(self.pc, rval) ) + CHKERR(PCFactorReorderForNonzeroDiagonal(self.pc, rval)) cdef PetscBool bval = PETSC_FALSE if reuse is not None: bval = PETSC_TRUE if reuse else PETSC_FALSE - CHKERR( PCFactorSetReuseOrdering(self.pc, bval) ) + CHKERR(PCFactorSetReuseOrdering(self.pc, bval)) def setFactorPivot( self, @@ -1368,11 +1389,11 @@ cdef class PC(Object): cdef PetscReal rval = 0 if zeropivot is not None: rval = asReal(zeropivot) - CHKERR( PCFactorSetZeroPivot(self.pc, rval) ) + CHKERR(PCFactorSetZeroPivot(self.pc, rval)) cdef PetscBool bval = PETSC_FALSE if inblocks is not None: bval = PETSC_TRUE if inblocks else PETSC_FALSE - CHKERR( PCFactorSetPivotInBlocks(self.pc, bval) ) + CHKERR(PCFactorSetPivotInBlocks(self.pc, bval)) def setFactorShift( self, @@ -1380,6 +1401,8 @@ cdef class PC(Object): amount: float | None = None) -> None: """Set options for shifting diagonal entries of a matrix. + Logically collective. + Parameters ---------- shift_type @@ -1396,11 +1419,11 @@ cdef class PC(Object): cdef PetscMatFactorShiftType cval = MAT_SHIFT_NONE if shift_type is not None: cval = matfactorshifttype(shift_type) - CHKERR( PCFactorSetShiftType(self.pc, cval) ) + CHKERR(PCFactorSetShiftType(self.pc, cval)) cdef PetscReal rval = 0 if amount is not None: rval = asReal(amount) - CHKERR( PCFactorSetShiftAmount(self.pc, rval) ) + CHKERR(PCFactorSetShiftAmount(self.pc, rval)) def setFactorLevels(self, levels: int) -> None: """Set the number of levels of fill. @@ -1418,7 +1441,7 @@ cdef class PC(Object): """ cdef PetscInt ival = asInt(levels) - CHKERR( PCFactorSetLevels(self.pc, ival) ) + CHKERR(PCFactorSetLevels(self.pc, ival)) def getFactorMatrix(self) -> Mat: """Return the factored matrix. @@ -1431,8 +1454,8 @@ cdef class PC(Object): """ cdef Mat mat = Mat() - CHKERR( PCFactorGetMatrix(self.pc, &mat.mat) ) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(PCFactorGetMatrix(self.pc, &mat.mat)) + CHKERR(PetscINCREF(mat.obj)) return mat # --- FieldSplit --- @@ -1453,7 +1476,7 @@ cdef class PC(Object): """ cdef PetscPCCompositeType cval = ctype - CHKERR( PCFieldSplitSetType(self.pc, cval) ) + CHKERR(PCFieldSplitSetType(self.pc, cval)) def setFieldSplitIS(self, *fields: Tuple[str, IS]) -> None: """Set the elements for the field split by `IS`. @@ -1479,11 +1502,13 @@ cdef class PC(Object): cdef const char *cname = NULL for name, field in fields: name = str2bytes(name, &cname) - CHKERR( PCFieldSplitSetIS(self.pc, cname, field.iset) ) + CHKERR(PCFieldSplitSetIS(self.pc, cname, field.iset)) def setFieldSplitFields(self, bsize: int, *fields: Tuple[str, Sequence[int]]) -> None: """Sets the elements for the field split. + Collective. + Parameters ---------- bsize @@ -1498,7 +1523,7 @@ cdef class PC(Object): """ cdef PetscInt bs = asInt(bsize) - CHKERR( PCFieldSplitSetBlockSize(self.pc, bs) ) + CHKERR(PCFieldSplitSetBlockSize(self.pc, bs)) cdef object name = None cdef object field = None cdef const char *cname = NULL @@ -1506,43 +1531,47 @@ cdef class PC(Object): for name, field in fields: name = str2bytes(name, &cname) field = iarray_i(field, &nfields, &ifields) - CHKERR( PCFieldSplitSetFields(self.pc, cname, - nfields, ifields, ifields) ) + CHKERR(PCFieldSplitSetFields(self.pc, cname, + nfields, ifields, ifields)) def getFieldSplitSubKSP(self) -> list[KSP]: """Return the `KSP` for all splits. + Not collective. + See Also -------- petsc.PCFieldSplitGetSubKSP """ - cdef PetscInt i = 0, n = 0 + cdef PetscInt n = 0 cdef PetscKSP *p = NULL cdef object subksp = None try: - CHKERR( PCFieldSplitGetSubKSP(self.pc, &n, &p) ) + CHKERR(PCFieldSplitGetSubKSP(self.pc, &n, &p)) subksp = [ref_KSP(p[i]) for i from 0 <= i list[KSP]: """Return the `KSP` for the Schur complement based splits. + Not collective. + See Also -------- petsc.PCFieldSplitSchurGetSubKSP, petsc.PCFieldSplitGetSubKSP """ - cdef PetscInt i = 0, n = 0 + cdef PetscInt n = 0 cdef PetscKSP *p = NULL cdef object subksp = None try: - CHKERR( PCFieldSplitSchurGetSubKSP(self.pc, &n, &p) ) + CHKERR(PCFieldSplitSchurGetSubKSP(self.pc, &n, &p)) subksp = [ref_KSP(p[i]) for i from 0 <= i None: @@ -1561,7 +1590,7 @@ cdef class PC(Object): """ cdef PetscPCFieldSplitSchurFactType cval = ctype - CHKERR( PCFieldSplitSetSchurFactType(self.pc, cval) ) + CHKERR(PCFieldSplitSetSchurFactType(self.pc, cval)) def setFieldSplitSchurPreType( self, @@ -1586,7 +1615,7 @@ cdef class PC(Object): cdef PetscPCFieldSplitSchurPreType pval = ptype cdef PetscMat pmat = NULL if pre is not None: pmat = pre.mat - CHKERR( PCFieldSplitSetSchurPre(self.pc, pval, pmat) ) + CHKERR(PCFieldSplitSetSchurPre(self.pc, pval, pmat)) # --- COMPOSITE --- @@ -1606,7 +1635,7 @@ cdef class PC(Object): """ cdef PetscPCCompositeType cval = ctype - CHKERR( PCCompositeSetType(self.pc, cval) ) + CHKERR(PCCompositeSetType(self.pc, cval)) def getCompositePC(self, n: int) -> None: """Return a component of the composite `PC`. @@ -1625,8 +1654,8 @@ cdef class PC(Object): """ cdef PC pc = PC() cdef cn = asInt(n) - CHKERR( PCCompositeGetPC(self.pc, cn, &pc.pc) ) - CHKERR( PetscINCREF(pc.obj) ) + CHKERR(PCCompositeGetPC(self.pc, cn, &pc.pc)) + CHKERR(PetscINCREF(pc.obj)) return pc def addCompositePCType(self, pc_type: Type | str) -> None: @@ -1646,11 +1675,11 @@ cdef class PC(Object): """ cdef PetscPCType cval = NULL pc_type = str2bytes(pc_type, &cval) - CHKERR( PCCompositeAddPCType(self.pc, cval) ) + CHKERR(PCCompositeAddPCType(self.pc, cval)) # --- KSP --- - def getKSP(self): + def getKSP(self) -> KSP: """Return the `KSP` if the `PC` is `Type.KSP`. Not collective. @@ -1661,8 +1690,8 @@ cdef class PC(Object): """ cdef KSP ksp = KSP() - CHKERR( PCKSPGetKSP(self.pc, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(PCKSPGetKSP(self.pc, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp # --- MG --- @@ -1678,10 +1707,10 @@ cdef class PC(Object): """ cdef PetscPCMGType cval = PC_MG_ADDITIVE - CHKERR( PCMGGetType(self.pc, &cval) ) + CHKERR(PCMGGetType(self.pc, &cval)) return cval - def setMGType(self, mgtype: MGType): + def setMGType(self, mgtype: MGType) -> None: """Set the form of multigrid. Logically collective. @@ -1692,7 +1721,7 @@ cdef class PC(Object): """ cdef PetscPCMGType cval = mgtype - CHKERR( PCMGSetType(self.pc, cval) ) + CHKERR(PCMGSetType(self.pc, cval)) def getMGLevels(self) -> int: """Return the number of `MG` levels. @@ -1705,12 +1734,14 @@ cdef class PC(Object): """ cdef PetscInt levels = 0 - CHKERR( PCMGGetLevels(self.pc, &levels) ) + CHKERR(PCMGGetLevels(self.pc, &levels)) return toInt(levels) def setMGLevels(self, levels: int) -> None: """Set the number of `MG` levels. + Logically collective. + Parameters ---------- levels @@ -1722,7 +1753,7 @@ cdef class PC(Object): """ cdef PetscInt clevels = asInt(levels) - CHKERR( PCMGSetLevels(self.pc, clevels, NULL) ) + CHKERR(PCMGSetLevels(self.pc, clevels, NULL)) def getMGCoarseSolve(self) -> KSP: """Return the `KSP` used on the coarse grid. @@ -1735,8 +1766,8 @@ cdef class PC(Object): """ cdef KSP ksp = KSP() - CHKERR( PCMGGetCoarseSolve(self.pc, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(PCMGGetCoarseSolve(self.pc, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp def setMGInterpolation(self, level, Mat mat) -> None: @@ -1757,7 +1788,7 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( PCMGSetInterpolation(self.pc, clevel, mat.mat) ) + CHKERR(PCMGSetInterpolation(self.pc, clevel, mat.mat)) def getMGInterpolation(self, level: int) -> Mat: """Return the interpolation operator for the given level. @@ -1776,8 +1807,8 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) cdef Mat interpolation = Mat() - CHKERR( PCMGGetInterpolation(self.pc, clevel, &interpolation.mat) ) - CHKERR( PetscINCREF(interpolation.obj) ) + CHKERR(PCMGGetInterpolation(self.pc, clevel, &interpolation.mat)) + CHKERR(PetscINCREF(interpolation.obj)) return interpolation def setMGRestriction(self, level: int, Mat mat) -> None: @@ -1798,7 +1829,7 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( PCMGSetRestriction(self.pc, clevel, mat.mat) ) + CHKERR(PCMGSetRestriction(self.pc, clevel, mat.mat)) def getMGRestriction(self, level: int) -> Mat: """Return the restriction operator for the given level. @@ -1817,8 +1848,8 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) cdef Mat restriction = Mat() - CHKERR( PCMGGetRestriction(self.pc, clevel, &restriction.mat) ) - CHKERR( PetscINCREF(restriction.obj) ) + CHKERR(PCMGGetRestriction(self.pc, clevel, &restriction.mat)) + CHKERR(PetscINCREF(restriction.obj)) return restriction def setMGRScale(self, level: int, Vec rscale) -> None: @@ -1839,7 +1870,7 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( PCMGSetRScale(self.pc, clevel, rscale.vec) ) + CHKERR(PCMGSetRScale(self.pc, clevel, rscale.vec)) def getMGRScale(self, level: int) -> Vec: """Return the pointwise scaling for the restriction operator on the given level. @@ -1858,8 +1889,8 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) cdef Vec rscale = Vec() - CHKERR( PCMGGetRScale(self.pc, clevel, &rscale.vec) ) - CHKERR( PetscINCREF(rscale.obj) ) + CHKERR(PCMGGetRScale(self.pc, clevel, &rscale.vec)) + CHKERR(PetscINCREF(rscale.obj)) return rscale def getMGSmoother(self, level: int) -> KSP: @@ -1879,8 +1910,8 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) cdef KSP ksp = KSP() - CHKERR( PCMGGetSmoother(self.pc, clevel, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(PCMGGetSmoother(self.pc, clevel, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp def getMGSmootherDown(self, level: int) -> KSP: @@ -1900,8 +1931,8 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) cdef KSP ksp = KSP() - CHKERR( PCMGGetSmootherDown(self.pc, clevel, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(PCMGGetSmootherDown(self.pc, clevel, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp def getMGSmootherUp(self, level: int) -> KSP: @@ -1921,13 +1952,15 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) cdef KSP ksp = KSP() - CHKERR( PCMGGetSmootherUp(self.pc, clevel, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(PCMGGetSmootherUp(self.pc, clevel, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp def setMGCycleType(self, cycle_type: MGCycleType) -> None: """Set the type of cycles. + Logically collective. + Parameters ---------- cycle_type @@ -1939,7 +1972,7 @@ cdef class PC(Object): """ cdef PetscPCMGCycleType ctype = cycle_type - CHKERR( PCMGSetCycleType(self.pc, ctype) ) + CHKERR(PCMGSetCycleType(self.pc, ctype)) def setMGCycleTypeOnLevel(self, level: int, cycle_type: MGCycleType) -> None: """Set the type of cycle on the given level. @@ -1960,7 +1993,7 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) cdef PetscPCMGCycleType ctype = cycle_type - CHKERR( PCMGSetCycleTypeOnLevel(self.pc, clevel, ctype) ) + CHKERR(PCMGSetCycleTypeOnLevel(self.pc, clevel, ctype)) def setMGRhs(self, level: int, Vec rhs) -> None: """Set the vector where the right-hand side is stored. @@ -1983,7 +2016,7 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( PCMGSetRhs(self.pc, clevel, rhs.vec) ) + CHKERR(PCMGSetRhs(self.pc, clevel, rhs.vec)) def setMGX(self, level: int, Vec x) -> None: """Set the vector where the solution is stored. @@ -2006,7 +2039,7 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( PCMGSetX(self.pc, clevel, x.vec) ) + CHKERR(PCMGSetX(self.pc, clevel, x.vec)) def setMGR(self, level: int, Vec r) -> None: """Set the vector where the residual is stored. @@ -2029,14 +2062,14 @@ cdef class PC(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( PCMGSetR(self.pc, clevel, r.vec) ) + CHKERR(PCMGSetR(self.pc, clevel, r.vec)) # --- BDDC --- def setBDDCLocalAdjacency(self, csr: CSRIndicesSpec) -> None: """Provide a custom connectivity graph for local dofs. - Not Collective. + Not collective. Parameters ---------- @@ -2056,12 +2089,12 @@ cdef class PC(Object): oj = iarray_i(oj, &nj, &j) if (i[0] != 0): raise ValueError("I[0] is %d, expected %d" % - (toInt(i[0]), toInt(0)) ) + (toInt(i[0]), toInt(0))) if (i[ni-1] != nj): raise ValueError("size(J) is %d, expected %d" % - (toInt(nj), toInt(i[ni-1])) ) + (toInt(nj), toInt(i[ni-1]))) - CHKERR( PCBDDCSetLocalAdjacencyGraph(self.pc, ni - 1, i, j, PETSC_COPY_VALUES) ) + CHKERR(PCBDDCSetLocalAdjacencyGraph(self.pc, ni - 1, i, j, PETSC_COPY_VALUES)) def setBDDCDivergenceMat(self, Mat div, trans: bool = False, IS l2l=None) -> None: """Set the linear operator representing ∫ div(u)•p dx. @@ -2087,7 +2120,7 @@ cdef class PC(Object): cdef PetscBool ptrans = trans cdef PetscIS pl2l = NULL if l2l is not None: pl2l = l2l.iset - CHKERR( PCBDDCSetDivergenceMat(self.pc, div.mat, ptrans, pl2l) ) + CHKERR(PCBDDCSetDivergenceMat(self.pc, div.mat, ptrans, pl2l)) def setBDDCDiscreteGradient( self, @@ -2123,7 +2156,7 @@ cdef class PC(Object): cdef PetscInt pfield = asInt(field) cdef PetscBool pgord = gord cdef PetscBool pconforming = conforming - CHKERR( PCBDDCSetDiscreteGradient(self.pc, G.mat, porder, pfield, pgord, pconforming) ) + CHKERR(PCBDDCSetDiscreteGradient(self.pc, G.mat, porder, pfield, pgord, pconforming)) def setBDDCChangeOfBasisMat(self, Mat T, interior: bool = False) -> None: """Set a user defined change of basis for degrees of freedom. @@ -2144,7 +2177,7 @@ cdef class PC(Object): """ cdef PetscBool pinterior = interior - CHKERR( PCBDDCSetChangeOfBasisMat(self.pc, T.mat, pinterior) ) + CHKERR(PCBDDCSetChangeOfBasisMat(self.pc, T.mat, pinterior)) def setBDDCPrimalVerticesIS(self, IS primv) -> None: """Set additional user defined primal vertices. @@ -2161,7 +2194,7 @@ cdef class PC(Object): petsc.PCBDDCSetPrimalVerticesIS """ - CHKERR( PCBDDCSetPrimalVerticesIS(self.pc, primv.iset) ) + CHKERR(PCBDDCSetPrimalVerticesIS(self.pc, primv.iset)) def setBDDCPrimalVerticesLocalIS(self, IS primv) -> None: """Set additional user defined primal vertices. @@ -2178,7 +2211,7 @@ cdef class PC(Object): petsc.PCBDDCSetPrimalVerticesLocalIS """ - CHKERR( PCBDDCSetPrimalVerticesLocalIS(self.pc, primv.iset) ) + CHKERR(PCBDDCSetPrimalVerticesLocalIS(self.pc, primv.iset)) def setBDDCCoarseningRatio(self, cratio: int) -> None: """Set the coarsening ratio used in the multilevel version. @@ -2196,7 +2229,7 @@ cdef class PC(Object): """ cdef PetscInt pcratio = asInt(cratio) - CHKERR( PCBDDCSetCoarseningRatio(self.pc, pcratio) ) + CHKERR(PCBDDCSetCoarseningRatio(self.pc, pcratio)) def setBDDCLevels(self, levels: int) -> None: """Set the maximum number of additional levels allowed. @@ -2214,7 +2247,7 @@ cdef class PC(Object): """ cdef PetscInt plevels = asInt(levels) - CHKERR( PCBDDCSetLevels(self.pc, plevels) ) + CHKERR(PCBDDCSetLevels(self.pc, plevels)) def setBDDCDirichletBoundaries(self, IS bndr) -> None: """Set the `IS` defining Dirichlet boundaries for the global problem. @@ -2231,7 +2264,7 @@ cdef class PC(Object): petsc.PCBDDCSetDirichletBoundaries """ - CHKERR( PCBDDCSetDirichletBoundaries(self.pc, bndr.iset) ) + CHKERR(PCBDDCSetDirichletBoundaries(self.pc, bndr.iset)) def setBDDCDirichletBoundariesLocal(self, IS bndr) -> None: """Set the `IS` defining Dirichlet boundaries in local ordering. @@ -2248,7 +2281,7 @@ cdef class PC(Object): setBDDCDirichletBoundaries, petsc.PCBDDCSetDirichletBoundariesLocal """ - CHKERR( PCBDDCSetDirichletBoundariesLocal(self.pc, bndr.iset) ) + CHKERR(PCBDDCSetDirichletBoundariesLocal(self.pc, bndr.iset)) def setBDDCNeumannBoundaries(self, IS bndr) -> None: """Set the `IS` defining Neumann boundaries for the global problem. @@ -2265,7 +2298,7 @@ cdef class PC(Object): petsc.PCBDDCSetNeumannBoundaries """ - CHKERR( PCBDDCSetNeumannBoundaries(self.pc, bndr.iset) ) + CHKERR(PCBDDCSetNeumannBoundaries(self.pc, bndr.iset)) def setBDDCNeumannBoundariesLocal(self, IS bndr) -> None: """Set the `IS` defining Neumann boundaries in local ordering. @@ -2282,7 +2315,7 @@ cdef class PC(Object): setBDDCNeumannBoundaries, petsc.PCBDDCSetNeumannBoundariesLocal """ - CHKERR( PCBDDCSetNeumannBoundariesLocal(self.pc, bndr.iset) ) + CHKERR(PCBDDCSetNeumannBoundariesLocal(self.pc, bndr.iset)) def setBDDCDofsSplitting(self, isfields: IS | Sequence[IS]) -> None: """Set the index set(s) defining fields of the global matrix. @@ -2302,12 +2335,11 @@ cdef class PC(Object): isfields = [isfields] if isinstance(isfields, IS) else list(isfields) cdef Py_ssize_t i, n = len(isfields) cdef PetscIS *cisfields = NULL - cdef object tmp - tmp = oarray_p(empty_p(n), NULL, &cisfields) + cdef object unused = oarray_p(empty_p(n), NULL, &cisfields) for i from 0 <= i < n: cisfields[i] = (isfields[i]).iset - CHKERR( PCBDDCSetDofsSplitting(self.pc, n, cisfields) ) + CHKERR(PCBDDCSetDofsSplitting(self.pc, n, cisfields)) - def setBDDCDofsSplittingLocal(self, isfields: IS | Sequence[IS]): + def setBDDCDofsSplittingLocal(self, isfields: IS | Sequence[IS]) -> None: """Set the index set(s) defining fields of the local subdomain matrix. Collective. @@ -2328,20 +2360,21 @@ cdef class PC(Object): isfields = [isfields] if isinstance(isfields, IS) else list(isfields) cdef Py_ssize_t i, n = len(isfields) cdef PetscIS *cisfields = NULL - cdef object tmp - tmp = oarray_p(empty_p(n), NULL, &cisfields) + cdef object unused = oarray_p(empty_p(n), NULL, &cisfields) for i from 0 <= i < n: cisfields[i] = (isfields[i]).iset - CHKERR( PCBDDCSetDofsSplittingLocal(self.pc, n, cisfields) ) + CHKERR(PCBDDCSetDofsSplittingLocal(self.pc, n, cisfields)) # --- Patch --- - def setPatchCellNumbering(self, Section sec not None): - CHKERR( PCPatchSetCellNumbering(self.pc, sec.sec) ) + def setPatchCellNumbering(self, Section sec) -> None: + """Set the cell numbering.""" + CHKERR(PCPatchSetCellNumbering(self.pc, sec.sec)) def setPatchDiscretisationInfo(self, dms, bs, cellNodeMaps, subspaceOffsets, ghostBcNodes, - globalBcNodes): + globalBcNodes) -> None: + """Set discretisation info.""" cdef PetscInt numSubSpaces = 0 cdef PetscInt numGhostBcs = 0, numGlobalBcs = 0 cdef PetscInt *nodesPerCell = NULL @@ -2358,9 +2391,9 @@ cdef class PC(Object): globalBcNodes = iarray_i(globalBcNodes, &numGlobalBcs, &cglobalBcNodes) subspaceOffsets = iarray_i(subspaceOffsets, NULL, &csubspaceOffsets) - CHKERR( PetscMalloc(numSubSpaces*sizeof(PetscInt), &nodesPerCell) ) - CHKERR( PetscMalloc(numSubSpaces*sizeof(PetscDM), &cdms) ) - CHKERR( PetscMalloc(numSubSpaces*sizeof(PetscInt*), &ccellNodeMaps) ) + CHKERR(PetscMalloc(numSubSpaces*sizeof(PetscInt), &nodesPerCell)) + CHKERR(PetscMalloc(numSubSpaces*sizeof(PetscDM), &cdms)) + CHKERR(PetscMalloc(numSubSpaces*sizeof(PetscInt*), &ccellNodeMaps)) for i in range(numSubSpaces): cdms[i] = (dms[i]).dm _, nodes = asarray(cellNodeMaps[i]).shape @@ -2368,44 +2401,49 @@ cdef class PC(Object): nodesPerCell[i] = asInt(nodes) # TODO: refactor on the PETSc side to take ISes? - CHKERR( PCPatchSetDiscretisationInfo(self.pc, numSubSpaces, - cdms, cbs, nodesPerCell, - ccellNodeMaps, csubspaceOffsets, - numGhostBcs, cghostBcNodes, - numGlobalBcs, cglobalBcNodes) ) - CHKERR( PetscFree(nodesPerCell) ) - CHKERR( PetscFree(cdms) ) - CHKERR( PetscFree(ccellNodeMaps) ) - - def setPatchComputeOperator(self, operator, args=None, kargs=None): + CHKERR(PCPatchSetDiscretisationInfo(self.pc, numSubSpaces, + cdms, cbs, nodesPerCell, + ccellNodeMaps, csubspaceOffsets, + numGhostBcs, cghostBcNodes, + numGlobalBcs, cglobalBcNodes)) + CHKERR(PetscFree(nodesPerCell)) + CHKERR(PetscFree(cdms)) + CHKERR(PetscFree(ccellNodeMaps)) + + def setPatchComputeOperator(self, operator, args=None, kargs=None) -> None: + """Set compute operator callbacks.""" if args is None: args = () if kargs is None: kargs = {} context = (operator, args, kargs) self.set_attr("__patch_compute_operator__", context) - CHKERR( PCPatchSetComputeOperator(self.pc, PCPatch_ComputeOperator, context) ) + CHKERR(PCPatchSetComputeOperator(self.pc, PCPatch_ComputeOperator, context)) - def setPatchComputeOperatorInteriorFacets(self, operator, args=None, kargs=None): + def setPatchComputeOperatorInteriorFacets(self, operator, args=None, kargs=None) -> None: + """Set compute operator callbacks.""" if args is None: args = () if kargs is None: kargs = {} context = (operator, args, kargs) self.set_attr("__patch_compute_operator_interior_facets__", context) - CHKERR( PCPatchSetComputeOperatorInteriorFacets(self.pc, PCPatch_ComputeOperatorInteriorFacets, context) ) + CHKERR(PCPatchSetComputeOperatorInteriorFacets(self.pc, PCPatch_ComputeOperatorInteriorFacets, context)) - def setPatchComputeFunction(self, function, args=None, kargs=None): + def setPatchComputeFunction(self, function, args=None, kargs=None) -> None: + """Set compute operator callbacks.""" if args is None: args = () if kargs is None: kargs = {} context = (function, args, kargs) self.set_attr("__patch_compute_function__", context) - CHKERR( PCPatchSetComputeFunction(self.pc, PCPatch_ComputeFunction, context) ) + CHKERR(PCPatchSetComputeFunction(self.pc, PCPatch_ComputeFunction, context)) - def setPatchComputeFunctionInteriorFacets(self, function, args=None, kargs=None): + def setPatchComputeFunctionInteriorFacets(self, function, args=None, kargs=None) -> None: + """Set compute operator callbacks.""" if args is None: args = () if kargs is None: kargs = {} context = (function, args, kargs) self.set_attr("__patch_compute_function_interior_facets__", context) - CHKERR( PCPatchSetComputeFunction(self.pc, PCPatch_ComputeFunctionInteriorFacets, context) ) + CHKERR(PCPatchSetComputeFunction(self.pc, PCPatch_ComputeFunctionInteriorFacets, context)) - def setPatchConstructType(self, typ, operator=None, args=None, kargs=None): + def setPatchConstructType(self, typ, operator=None, args=None, kargs=None) -> None: + """Set compute operator callbacks.""" if args is None: args = () if kargs is None: kargs = {} @@ -2416,13 +2454,15 @@ cdef class PC(Object): else: context = None self.set_attr("__patch_construction_operator__", context) - CHKERR( PCPatchSetConstructType(self.pc, typ, PCPatch_UserConstructOperator, context) ) + CHKERR(PCPatchSetConstructType(self.pc, typ, PCPatch_UserConstructOperator, context)) # --- HPDDM --- def setHPDDMAuxiliaryMat(self, IS uis, Mat uaux) -> None: """Set the auxiliary matrix used by the preconditioner. + Logically collective. + Parameters ---------- uis @@ -2435,11 +2475,13 @@ cdef class PC(Object): petsc.PCHPDDMSetAuxiliaryMat """ - CHKERR( PCHPDDMSetAuxiliaryMat(self.pc, uis.iset, uaux.mat, NULL, NULL) ) + CHKERR(PCHPDDMSetAuxiliaryMat(self.pc, uis.iset, uaux.mat, NULL, NULL)) def setHPDDMRHSMat(self, Mat B) -> None: """Set the right-hand side matrix of the preconditioner. + Logically collective. + Parameters ---------- B @@ -2450,11 +2492,13 @@ cdef class PC(Object): petsc.PCHPDDMSetRHSMat """ - CHKERR( PCHPDDMSetRHSMat(self.pc, B.mat) ) + CHKERR(PCHPDDMSetRHSMat(self.pc, B.mat)) def setHPDDMHasNeumannMat(self, has: bool) -> None: """Set to indicate that the `Mat` passed to the `PC` is the local Neumann matrix. + Logically collective. + Parameters ---------- has @@ -2466,7 +2510,7 @@ cdef class PC(Object): """ cdef PetscBool phas = has - CHKERR( PCHPDDMHasNeumannMat(self.pc, phas) ) + CHKERR(PCHPDDMHasNeumannMat(self.pc, phas)) def setHPDDMCoarseCorrectionType(self, correction_type: HPDDMCoarseCorrectionType) -> None: """Set the coarse correction type. @@ -2484,35 +2528,41 @@ cdef class PC(Object): """ cdef PetscPCHPDDMCoarseCorrectionType ctype = correction_type - CHKERR( PCHPDDMSetCoarseCorrectionType(self.pc, ctype) ) + CHKERR(PCHPDDMSetCoarseCorrectionType(self.pc, ctype)) def getHPDDMCoarseCorrectionType(self) -> HPDDMCoarseCorrectionType: """Return the coarse correction type. + Not collective. + See Also -------- petsc.PCHPDDMGetCoarseCorrectionType """ cdef PetscPCHPDDMCoarseCorrectionType cval = PC_HPDDM_COARSE_CORRECTION_DEFLATED - CHKERR( PCHPDDMGetCoarseCorrectionType(self.pc, &cval) ) + CHKERR(PCHPDDMGetCoarseCorrectionType(self.pc, &cval)) return cval def getHPDDMSTShareSubKSP(self) -> bool: """Return true if the `KSP` in SLEPc ``ST`` and the subdomain solver is shared. + Not collective. + See Also -------- petsc.PCHPDDMGetSTShareSubKSP """ cdef PetscBool cval = PETSC_FALSE - CHKERR( PCHPDDMGetSTShareSubKSP(self.pc, &cval) ) + CHKERR(PCHPDDMGetSTShareSubKSP(self.pc, &cval)) return toBool(cval) - def setHPDDMDeflationMat(self, IS uis, Mat U): + def setHPDDMDeflationMat(self, IS uis, Mat U) -> None: """Set the deflation space used to assemble a coarse operator. + Logically collective. + Parameters ---------- uis @@ -2525,13 +2575,15 @@ cdef class PC(Object): petsc.PCHPDDMSetDeflationMat """ - CHKERR( PCHPDDMSetDeflationMat(self.pc, uis.iset, U.mat) ) + CHKERR(PCHPDDMSetDeflationMat(self.pc, uis.iset, U.mat)) # --- SPAI --- def setSPAIEpsilon(self, val: float) -> None: """Set the tolerance for the preconditioner. + Logically collective. + Parameters ---------- val @@ -2543,11 +2595,13 @@ cdef class PC(Object): """ cdef PetscReal cval = asReal(val) - CHKERR( PCSPAISetEpsilon(self.pc, cval) ) + CHKERR(PCSPAISetEpsilon(self.pc, cval)) def setSPAINBSteps(self, nbsteps: int) -> None: """Set the maximum number of improvement steps per row. + Logically collective. + Parameters ---------- nbsteps @@ -2559,11 +2613,13 @@ cdef class PC(Object): """ cdef PetscInt cval = asInt(nbsteps) - CHKERR( PCSPAISetNBSteps(self.pc, cval) ) + CHKERR(PCSPAISetNBSteps(self.pc, cval)) def setSPAIMax(self, maxval: int) -> None: """Set the size of working buffers in the preconditioner. + Logically collective. + Parameters ---------- maxval @@ -2576,11 +2632,13 @@ cdef class PC(Object): """ cdef PetscInt cval = asInt(maxval) - CHKERR( PCSPAISetMax(self.pc, cval) ) + CHKERR(PCSPAISetMax(self.pc, cval)) def setSPAIMaxNew(self, maxval: int) -> None: """Set the maximum number of new non-zero candidates per step. + Logically collective. + Parameters ---------- maxval @@ -2592,11 +2650,13 @@ cdef class PC(Object): """ cdef PetscInt cval = asInt(maxval) - CHKERR( PCSPAISetMaxNew(self.pc, cval) ) + CHKERR(PCSPAISetMaxNew(self.pc, cval)) def setSPAIBlockSize(self, n: int) -> None: """Set the block size of the preconditioner. + Logically collective. + Parameters ---------- n @@ -2608,11 +2668,13 @@ cdef class PC(Object): """ cdef PetscInt cval = asInt(n) - CHKERR( PCSPAISetBlockSize(self.pc, cval) ) + CHKERR(PCSPAISetBlockSize(self.pc, cval)) def setSPAICacheSize(self, size: int) -> None: """Set the cache size. + Logically collective. + Parameters ---------- size @@ -2624,11 +2686,13 @@ cdef class PC(Object): """ cdef PetscInt cval = asInt(size) - CHKERR( PCSPAISetCacheSize(self.pc, cval) ) + CHKERR(PCSPAISetCacheSize(self.pc, cval)) def setSPAIVerbose(self, level: int) -> None: """Set the verbosity level. + Logically collective. + Parameters ---------- level @@ -2640,11 +2704,13 @@ cdef class PC(Object): """ cdef PetscInt cval = asInt(level) - CHKERR( PCSPAISetVerbose(self.pc, cval) ) + CHKERR(PCSPAISetVerbose(self.pc, cval)) def setSPAISp(self, sym: int) -> None: """Set to specify a symmetric sparsity pattern. + Logically collective. + Parameters ---------- sym @@ -2656,7 +2722,7 @@ cdef class PC(Object): """ cdef PetscInt cval = asInt(sym) - CHKERR( PCSPAISetSp(self.pc, cval) ) + CHKERR(PCSPAISetSp(self.pc, cval)) # --- DEFLATION --- @@ -2680,7 +2746,7 @@ cdef class PC(Object): """ cdef PetscBool cflg = asBool(flg) - CHKERR( PCDeflationSetInitOnly(self.pc, cflg) ) + CHKERR(PCDeflationSetInitOnly(self.pc, cflg)) def setDeflationLevels(self, levels: int) -> None: """Set the maximum level of deflation nesting. @@ -2698,7 +2764,7 @@ cdef class PC(Object): """ cdef PetscInt clevels = asInt(levels) - CHKERR( PCDeflationSetLevels(self.pc, clevels) ) + CHKERR(PCDeflationSetLevels(self.pc, clevels)) def setDeflationReductionFactor(self, red: int) -> None: """Set the reduction factor for the preconditioner. @@ -2716,7 +2782,7 @@ cdef class PC(Object): """ cdef PetscInt cred = asInt(red) - CHKERR( PCDeflationSetReductionFactor(self.pc, cred) ) + CHKERR(PCDeflationSetReductionFactor(self.pc, cred)) def setDeflationCorrectionFactor(self, fact: float) -> None: """Set the coarse problem correction factor. @@ -2734,7 +2800,7 @@ cdef class PC(Object): """ cdef PetscScalar cfact = asScalar(fact) - CHKERR( PCDeflationSetCorrectionFactor(self.pc, fact) ) + CHKERR(PCDeflationSetCorrectionFactor(self.pc, cfact)) def setDeflationSpaceToCompute(self, space_type: DeflationSpaceType, size: int) -> None: """Set the deflation space type. @@ -2755,7 +2821,7 @@ cdef class PC(Object): """ cdef PetscInt csize = asInt(size) cdef PetscPCDeflationSpaceType ctype = space_type - CHKERR( PCDeflationSetSpaceToCompute(self.pc, space_type, csize) ) + CHKERR(PCDeflationSetSpaceToCompute(self.pc, ctype, csize)) def setDeflationSpace(self, Mat W, transpose: bool) -> None: """Set the deflation space matrix or its (Hermitian) transpose. @@ -2776,7 +2842,7 @@ cdef class PC(Object): """ cdef PetscBool ctranspose = asBool(transpose) - CHKERR( PCDeflationSetSpace(self.pc, W.mat, ctranspose) ) + CHKERR(PCDeflationSetSpace(self.pc, W.mat, ctranspose)) def setDeflationProjectionNullSpaceMat(self, Mat mat) -> None: """Set the projection null space matrix. @@ -2793,7 +2859,7 @@ cdef class PC(Object): petsc.PCDeflationSetProjectionNullSpaceMat """ - CHKERR( PCDeflationSetProjectionNullSpaceMat(self.pc, mat.mat) ) + CHKERR(PCDeflationSetProjectionNullSpaceMat(self.pc, mat.mat)) def setDeflationCoarseMat(self, Mat mat) -> None: """Set the coarse problem matrix. @@ -2810,7 +2876,7 @@ cdef class PC(Object): petsc.PCDeflationSetCoarseMat """ - CHKERR( PCDeflationSetCoarseMat(self.pc, mat.mat) ) + CHKERR(PCDeflationSetCoarseMat(self.pc, mat.mat)) def getDeflationCoarseKSP(self) -> KSP: """Return the coarse problem `KSP`. @@ -2823,21 +2889,23 @@ cdef class PC(Object): """ cdef KSP ksp = KSP() - CHKERR( PCDeflationGetCoarseKSP(self.pc, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(PCDeflationGetCoarseKSP(self.pc, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp def getDeflationPC(self) -> PC: """Return the additional preconditioner. + Not collective. + See Also -------- petsc.PCDeflationGetPC """ cdef PC apc = PC() - CHKERR( PCDeflationGetPC(self.pc, &apc.pc) ) - CHKERR( PetscINCREF(apc.obj) ) + CHKERR(PCDeflationGetPC(self.pc, &apc.pc)) + CHKERR(PetscINCREF(apc.obj)) return apc # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/PETSc.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/PETSc.pyx index dbb727c8302..0ca0007c308 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/PETSc.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/PETSc.pyx @@ -28,13 +28,13 @@ cdef extern from * nogil: # -------------------------------------------------------------------- cdef inline object bytes2str(const char p[]): - if p == NULL: - return None - cdef bytes s = p - if isinstance(s, str): - return s - else: - return s.decode() + if p == NULL: + return None + cdef bytes s = p + if isinstance(s, str): + return s + else: + return s.decode() cdef inline object str2bytes(object s, const char *p[]): if s is None: @@ -46,14 +46,14 @@ cdef inline object str2bytes(object s, const char *p[]): return s cdef inline object S_(const char p[]): - if p == NULL: return None - cdef object s = p - return s if isinstance(s, str) else s.decode() + if p == NULL: return None + cdef object s = p + return s if isinstance(s, str) else s.decode() # -------------------------------------------------------------------- -# Vile hack for raising a exception and not contaminating traceback +# Vile hack for raising an exception and not contaminating traceback cdef extern from *: void PyErr_SetObject(object, object) @@ -94,7 +94,7 @@ cdef inline int SETERRMPI(int ierr) except -1 nogil: cdef int result_len = sizeof(mpi_err_str) memset(mpi_err_str, 0, result_len) - MPI_Error_string(ierr, mpi_err_str, &result_len); + MPI_Error_string(ierr, mpi_err_str, &result_len) result_len cdef char error_str[MPI_MAX_ERROR_STRING+64] @@ -106,7 +106,7 @@ cdef inline int SETERRMPI(int ierr) except -1 nogil: cdef inline PetscErrorCode CHKERRMPI(int ierr) except PETSC_ERR_PYTHON nogil: if ierr == MPI_SUCCESS: - return PETSC_SUCCESS + return PETSC_SUCCESS SETERRMPI(ierr) return PETSC_ERR_PYTHON @@ -125,7 +125,7 @@ cdef extern from "": PetscScalar PyPetscScalar_AsPetscScalar(object) except? -1.0 cdef extern from "": - int PyPetscBuffer_FillInfo(Py_buffer*,void*,PetscInt,char,int,int) except -1 + int PyPetscBuffer_FillInfo(Py_buffer*, void*, PetscInt, char, int, int) except -1 void PyPetscBuffer_Release(Py_buffer*) cdef inline object toBool(PetscBool value): @@ -260,11 +260,11 @@ include "libpetsc4py.pyx" cdef extern from "Python.h": int Py_IsInitialized() nogil - int PyList_Insert(object,Py_ssize_t,object) except -1 - int PyList_Append(object,object) except -1 + int PyList_Insert(object, Py_ssize_t, object) except -1 + int PyList_Append(object, object) except -1 cdef extern from * nogil: - PetscErrorCode PetscTBEH(MPI_Comm,int,char*,char*,int,PetscErrorType,char*,void*) + PetscErrorCode PetscTBEH(MPI_Comm, int, char*, char*, int, PetscErrorType, char*, void*) cdef object tracebacklist = [] @@ -328,7 +328,7 @@ cdef PetscErrorCode PetscPythonErrorHandler( cdef extern from "" nogil: void* malloc(size_t) - void* realloc (void*,size_t) + void* realloc (void*, size_t) void free(void*) cdef extern from "" nogil: @@ -336,8 +336,8 @@ cdef extern from "" nogil: pass cdef extern from "" nogil: - void* memset(void*,int,size_t) - void* memcpy(void*,void*,size_t) + void* memset(void*, int, size_t) + void* memcpy(void*, void*, size_t) char* strdup(char*) cdef extern from "" nogil: @@ -347,7 +347,7 @@ cdef extern from "" nogil: cdef extern from "Python.h": int Py_AtExit(void (*)() noexcept nogil) - void PySys_WriteStderr(char*,...) + void PySys_WriteStderr(char*, ...) cdef extern from * nogil: """ @@ -374,7 +374,7 @@ cdef int getinitargs(object args, int *argc, char **argv[]) except -1: v[i] = strdup(args[i]) if v[i] == NULL: raise MemoryError - except: + except Exception: delinitargs(&c, &v); raise argc[0] = c; argv[0] = v return 0 @@ -383,7 +383,7 @@ cdef void delinitargs(int *argc, char **argv[]) noexcept nogil: # dallocate command line arguments cdef int i, c = argc[0] cdef char** v = argv[0] - argc[0] = 0; argv[0] = NULL; + argc[0] = 0; argv[0] = NULL if c >= 0 and v != NULL: for 0 <= i < c: if v[i] != NULL: free(v[i]) @@ -419,9 +419,9 @@ cdef void finalize() noexcept nogil: # -------------------------------------------------------------------- cdef extern from *: - PetscErrorCode (*PetscVFPrintf)(FILE*,const char*,va_list) except PETSC_ERR_PYTHON nogil + PetscErrorCode (*PetscVFPrintf)(FILE*, const char*, va_list) except PETSC_ERR_PYTHON nogil -cdef PetscErrorCode (*prevfprintf)(FILE*,const char*,va_list) except PETSC_ERR_PYTHON nogil +cdef PetscErrorCode (*prevfprintf)(FILE*, const char*, va_list) except PETSC_ERR_PYTHON nogil prevfprintf = NULL cdef PetscErrorCode PetscVFPrintf_PythonStdStream( @@ -430,21 +430,21 @@ cdef PetscErrorCode PetscVFPrintf_PythonStdStream( import sys cdef char cstring[8192] cdef size_t stringlen = sizeof(cstring) - cdef size_t final_pos + cdef size_t final_pos = 0 if (fd == PETSC_STDOUT) and not (sys.stdout == sys.__stdout__): - CHKERR( PetscVSNPrintf(&cstring[0], stringlen, fmt, &final_pos,ap)) + CHKERR(PetscVSNPrintf(&cstring[0], stringlen, fmt, &final_pos, ap)) if final_pos > 0 and cstring[final_pos-1] == '\x00': final_pos -= 1 ustring = cstring[:final_pos].decode('UTF-8') sys.stdout.write(ustring) elif (fd == PETSC_STDERR) and not (sys.stderr == sys.__stderr__): - CHKERR( PetscVSNPrintf(&cstring[0], stringlen, fmt, &final_pos,ap)) + CHKERR(PetscVSNPrintf(&cstring[0], stringlen, fmt, &final_pos, ap)) if final_pos > 0 and cstring[final_pos-1] == '\x00': final_pos -= 1 ustring = cstring[:final_pos].decode('UTF-8') sys.stderr.write(ustring) else: - CHKERR( PetscVFPrintfDefault(fd, fmt, ap) ) + CHKERR(PetscVFPrintfDefault(fd, fmt, ap)) return PETSC_SUCCESS cdef int _push_vfprintf( @@ -471,11 +471,11 @@ cdef int initialize(object args, object comm) except -1: global PETSC_COMM_WORLD PETSC_COMM_WORLD = def_Comm(comm, PETSC_COMM_WORLD) # initialize PETSc - CHKERR( PetscInitialize(&PyPetsc_Argc, &PyPetsc_Argv, NULL, NULL) ) + CHKERR(PetscInitialize(&PyPetsc_Argc, &PyPetsc_Argv, NULL, NULL)) # install Python error handler cdef PetscErrorHandlerFunction handler = NULL handler = PetscPythonErrorHandler - CHKERR( PetscPushErrorHandler(handler, NULL) ) + CHKERR(PetscPushErrorHandler(handler, NULL)) # redirect PETSc std streams import sys if (sys.stdout != sys.__stdout__) or (sys.stderr != sys.__stderr__): @@ -535,11 +535,11 @@ cdef int register() except -1: if registercalled: return 0 registercalled = True # register citation - CHKERR( PetscCitationsRegister(citation, NULL) ) + CHKERR(PetscCitationsRegister(citation, NULL)) # make sure all PETSc packages are initialized - CHKERR( PetscInitializePackageAll() ) + CHKERR(PetscInitializePackageAll()) # register custom implementations - CHKERR( PetscPythonRegisterAll() ) + CHKERR(PetscPythonRegisterAll()) # register Python types PyPetscType_Register(PETSC_OBJECT_CLASSID, Object) PyPetscType_Register(PETSC_VIEWER_CLASSID, Viewer) @@ -571,6 +571,7 @@ cdef int register() except -1: # -------------------------------------------------------------------- + def _initialize(args=None, comm=None): import atexit global tracebacklist @@ -590,10 +591,12 @@ def _initialize(args=None, comm=None): # Register finalizer atexit.register(_pre_finalize) + def _pre_finalize(): # Called while the Python interpreter is still running garbage_cleanup() + def _finalize(): finalize() # @@ -616,12 +619,15 @@ def _finalize(): global citations_registry citations_registry.clear() + def _push_python_vfprintf(): _push_vfprintf(&PetscVFPrintf_PythonStdStream) + def _pop_python_vfprintf(): _pop_vfprintf() + def _stdout_is_stderr(): global PETSC_STDOUT, PETSC_STDERR return PETSC_STDOUT == PETSC_STDERR diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Partitioner.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Partitioner.pyx index a9b16fca885..44d4eb23625 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Partitioner.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Partitioner.pyx @@ -1,6 +1,7 @@ # -------------------------------------------------------------------- class PartitionerType(object): + """The partitioner types.""" PARMETIS = S_(PETSCPARTITIONERPARMETIS) PTSCOTCH = S_(PETSCPARTITIONERPTSCOTCH) CHACO = S_(PETSCPARTITIONERCHACO) @@ -11,6 +12,7 @@ class PartitionerType(object): # -------------------------------------------------------------------- + cdef class Partitioner(Object): """A graph partitioner.""" @@ -37,7 +39,7 @@ cdef class Partitioner(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscPartitionerView(self.part, vwr) ) + CHKERR(PetscPartitionerView(self.part, vwr)) def destroy(self) -> Self: """Destroy the partitioner object. @@ -49,7 +51,7 @@ cdef class Partitioner(Object): petsc.PetscPartitionerDestroy """ - CHKERR( PetscPartitionerDestroy(&self.part) ) + CHKERR(PetscPartitionerDestroy(&self.part)) return self def create(self, comm: Comm | None = None) -> Self: @@ -71,8 +73,8 @@ cdef class Partitioner(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscPartitioner newpart = NULL - CHKERR( PetscPartitionerCreate(ccomm, &newpart) ) - CHKERR( PetscCLEAR(self.obj) ); self.part = newpart + CHKERR(PetscPartitionerCreate(ccomm, &newpart)) + CHKERR(PetscCLEAR(self.obj)); self.part = newpart return self def setType(self, part_type: Type | str) -> None: @@ -92,7 +94,7 @@ cdef class Partitioner(Object): """ cdef PetscPartitionerType cval = NULL part_type = str2bytes(part_type, &cval) - CHKERR( PetscPartitionerSetType(self.part, cval) ) + CHKERR(PetscPartitionerSetType(self.part, cval)) def getType(self) -> Type: """Return the partitioner type. @@ -105,7 +107,7 @@ cdef class Partitioner(Object): """ cdef PetscPartitionerType cval = NULL - CHKERR( PetscPartitionerGetType(self.part, &cval) ) + CHKERR(PetscPartitionerGetType(self.part, &cval)) return bytes2str(cval) def setFromOptions(self) -> None: @@ -118,7 +120,7 @@ cdef class Partitioner(Object): petsc_options, petsc.PetscPartitionerSetFromOptions """ - CHKERR( PetscPartitionerSetFromOptions(self.part) ) + CHKERR(PetscPartitionerSetFromOptions(self.part)) def setUp(self) -> None: """Construct data structures for the partitioner. @@ -130,7 +132,7 @@ cdef class Partitioner(Object): petsc.PetscPartitionerSetUp """ - CHKERR( PetscPartitionerSetUp(self.part) ) + CHKERR(PetscPartitionerSetUp(self.part)) def reset(self) -> None: """Reset data structures of the partitioner. @@ -142,14 +144,13 @@ cdef class Partitioner(Object): petsc.PetscPartitionerReset """ - CHKERR( PetscPartitionerReset(self.part) ) + CHKERR(PetscPartitionerReset(self.part)) def setShellPartition( self, numProcs: int, sizes: Sequence[int] | None = None, - points: Sequence[int] | None = None, - ) -> None: + points: Sequence[int] | None = None) -> None: """Set a custom partition for a mesh. Collective. @@ -181,8 +182,8 @@ cdef class Partitioner(Object): raise ValueError("Must provide both sizes and points arrays") if points is not None: points = iarray_i(points, NULL, &cpoints) - CHKERR( PetscPartitionerShellSetPartition(self.part, cnumProcs, - csizes, cpoints) ) + CHKERR(PetscPartitionerShellSetPartition(self.part, cnumProcs, + csizes, cpoints)) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Random.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Random.pyx index d7166a7d46e..2dc5f274f61 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Random.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Random.pyx @@ -10,6 +10,7 @@ class RandomType(object): # -------------------------------------------------------------------- + cdef class Random(Object): """The random number generator object. @@ -55,7 +56,7 @@ cdef class Random(Object): assert self.obj != NULL cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscRandomView(self.rnd, vwr) ) + CHKERR(PetscRandomView(self.rnd, vwr)) def destroy(self) -> Self: """Destroy the random number generator object. @@ -67,7 +68,7 @@ cdef class Random(Object): petsc.PetscRandomDestroy """ - CHKERR( PetscRandomDestroy(&self.rnd) ) + CHKERR(PetscRandomDestroy(&self.rnd)) return self def create(self, comm: Comm | None = None) -> Self: @@ -86,7 +87,7 @@ cdef class Random(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) - CHKERR( PetscRandomCreate(ccomm, &self.rnd) ) + CHKERR(PetscRandomCreate(ccomm, &self.rnd)) return self def setType(self, rnd_type: Random.Type | str) -> None: @@ -106,7 +107,7 @@ cdef class Random(Object): """ cdef PetscRandomType cval = NULL rnd_type = str2bytes(rnd_type, &cval) - CHKERR( PetscRandomSetType(self.rnd, cval) ) + CHKERR(PetscRandomSetType(self.rnd, cval)) def getType(self) -> str: """Return the type of the random number generator object. @@ -119,7 +120,7 @@ cdef class Random(Object): """ cdef PetscRandomType cval = NULL - CHKERR( PetscRandomGetType(self.rnd, &cval) ) + CHKERR(PetscRandomGetType(self.rnd, &cval)) return bytes2str(cval) def setFromOptions(self) -> None: @@ -132,7 +133,7 @@ cdef class Random(Object): petsc_options, petsc.PetscRandomSetFromOptions """ - CHKERR( PetscRandomSetFromOptions(self.rnd) ) + CHKERR(PetscRandomSetFromOptions(self.rnd)) def getValue(self) -> Scalar: """Generate a scalar random number. @@ -145,7 +146,7 @@ cdef class Random(Object): """ cdef PetscScalar sval = 0 - CHKERR( PetscRandomGetValue(self.rnd, &sval) ) + CHKERR(PetscRandomGetValue(self.rnd, &sval)) return toScalar(sval) def getValueReal(self) -> float: @@ -159,7 +160,7 @@ cdef class Random(Object): """ cdef PetscReal rval = 0 - CHKERR( PetscRandomGetValueReal(self.rnd, &rval) ) + CHKERR(PetscRandomGetValueReal(self.rnd, &rval)) return toReal(rval) def getSeed(self) -> int: @@ -173,7 +174,7 @@ cdef class Random(Object): """ cdef unsigned long seed = 0 - CHKERR( PetscRandomGetSeed(self.rnd, &seed) ) + CHKERR(PetscRandomGetSeed(self.rnd, &seed)) return seed def setSeed(self, seed: int | None = None) -> None: @@ -192,8 +193,8 @@ cdef class Random(Object): """ if seed is not None: - CHKERR( PetscRandomSetSeed(self.rnd, seed) ) - CHKERR( PetscRandomSeed(self.rnd) ) + CHKERR(PetscRandomSetSeed(self.rnd, seed)) + CHKERR(PetscRandomSeed(self.rnd)) def getInterval(self) -> tuple[Scalar, Scalar]: """Return the interval containing the random numbers generated. @@ -207,7 +208,7 @@ cdef class Random(Object): """ cdef PetscScalar sval1 = 0 cdef PetscScalar sval2 = 1 - CHKERR( PetscRandomGetInterval(self.rnd, &sval1, &sval2) ) + CHKERR(PetscRandomGetInterval(self.rnd, &sval1, &sval2)) return (toScalar(sval1), toScalar(sval2)) def setInterval(self, interval: tuple[Scalar, Scalar]) -> None: @@ -225,7 +226,7 @@ cdef class Random(Object): low, high = interval sval1 = asScalar(low) sval2 = asScalar(high) - CHKERR( PetscRandomSetInterval(self.rnd, sval1, sval2) ) + CHKERR(PetscRandomSetInterval(self.rnd, sval1, sval2)) # @@ -233,6 +234,7 @@ cdef class Random(Object): """The seed of the random number generator.""" def __get__(self) -> int: return self.getSeed() + def __set__(self, value: int | None) -> None: self.setSeed(value) @@ -240,6 +242,7 @@ cdef class Random(Object): """The interval of the generated random numbers.""" def __get__(self) -> tuple[Scalar, Scalar]: return self.getInterval() + def __set__(self, value: tuple[Scalar, Scalar]): self.setInterval(value) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/SF.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/SF.pyx index 300a4bf27ba..79533f8e566 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/SF.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/SF.pyx @@ -1,6 +1,7 @@ # -------------------------------------------------------------------- class SFType(object): + """The star forest types.""" BASIC = S_(PETSCSFBASIC) NEIGHBOR = S_(PETSCSFNEIGHBOR) ALLGATHERV = S_(PETSCSFALLGATHERV) @@ -12,6 +13,7 @@ class SFType(object): # -------------------------------------------------------------------- + cdef class SF(Object): """Star Forest object for communication. @@ -26,10 +28,6 @@ cdef class SF(Object): self.obj = &self.sf self.sf = NULL - def __dealloc__(self): - CHKERR( PetscSFDestroy(&self.sf) ) - self.sf = NULL - def view(self, Viewer viewer=None) -> None: """View a star forest. @@ -47,7 +45,7 @@ cdef class SF(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscSFView(self.sf, vwr) ) + CHKERR(PetscSFView(self.sf, vwr)) def destroy(self) -> Self: """Destroy the star forest. @@ -59,7 +57,7 @@ cdef class SF(Object): petsc.PetscSFDestroy """ - CHKERR( PetscSFDestroy(&self.sf) ) + CHKERR(PetscSFDestroy(&self.sf)) return self def create(self, comm: Comm | None = None) -> Self: @@ -79,8 +77,8 @@ cdef class SF(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscSF newsf = NULL - CHKERR( PetscSFCreate(ccomm, &newsf) ) - CHKERR( PetscCLEAR(self.obj) ); self.sf = newsf + CHKERR(PetscSFCreate(ccomm, &newsf)) + CHKERR(PetscCLEAR(self.obj)); self.sf = newsf return self def setType(self, sf_type: Type | str) -> None: @@ -100,7 +98,7 @@ cdef class SF(Object): """ cdef PetscSFType cval = NULL sf_type = str2bytes(sf_type, &cval) - CHKERR( PetscSFSetType(self.sf, cval) ) + CHKERR(PetscSFSetType(self.sf, cval)) def getType(self) -> str: """Return the type name of the star forest. @@ -113,7 +111,7 @@ cdef class SF(Object): """ cdef PetscSFType cval = NULL - CHKERR( PetscSFGetType(self.sf, &cval) ) + CHKERR(PetscSFGetType(self.sf, &cval)) return bytes2str(cval) def setFromOptions(self) -> None: @@ -126,7 +124,7 @@ cdef class SF(Object): petsc_options, petsc.PetscSFSetFromOptions """ - CHKERR( PetscSFSetFromOptions(self.sf) ) + CHKERR(PetscSFSetFromOptions(self.sf)) def setUp(self) -> None: """Set up communication structures. @@ -138,7 +136,7 @@ cdef class SF(Object): petsc.PetscSFSetUp """ - CHKERR( PetscSFSetUp(self.sf) ) + CHKERR(PetscSFSetUp(self.sf)) def reset(self) -> None: """Reset a star forest so that different sizes or neighbors can be used. @@ -150,7 +148,7 @@ cdef class SF(Object): petsc.PetscSFReset """ - CHKERR( PetscSFReset(self.sf) ) + CHKERR(PetscSFReset(self.sf)) # @@ -180,7 +178,7 @@ cdef class SF(Object): cdef PetscInt nroots = 0, nleaves = 0 cdef const PetscInt *ilocal = NULL cdef const PetscSFNode *iremote = NULL - CHKERR( PetscSFGetGraph(self.sf, &nroots, &nleaves, &ilocal, &iremote) ) + CHKERR(PetscSFGetGraph(self.sf, &nroots, &nleaves, &ilocal, &iremote)) if ilocal == NULL: local = arange(0, nleaves, 1) else: @@ -226,7 +224,7 @@ cdef class SF(Object): else: assert nremote % 2 == 0 nleaves = nremote // 2 - CHKERR( PetscSFSetGraph(self.sf, cnroots, nleaves, ilocal, PETSC_COPY_VALUES, iremote, PETSC_COPY_VALUES) ) + CHKERR(PetscSFSetGraph(self.sf, cnroots, nleaves, ilocal, PETSC_COPY_VALUES, iremote, PETSC_COPY_VALUES)) def setRankOrder(self, flag: bool) -> None: """Sort multi-points for gathers and scatters by rank order. @@ -244,7 +242,7 @@ cdef class SF(Object): """ cdef PetscBool bval = asBool(flag) - CHKERR( PetscSFSetRankOrder(self.sf, bval) ) + CHKERR(PetscSFSetRankOrder(self.sf, bval)) def getMulti(self) -> SF: """Return the inner SF implementing gathers and scatters. @@ -257,8 +255,8 @@ cdef class SF(Object): """ cdef SF sf = SF() - CHKERR( PetscSFGetMultiSF(self.sf, &sf.sf) ) - CHKERR( PetscINCREF(sf.obj) ) + CHKERR(PetscSFGetMultiSF(self.sf, &sf.sf)) + CHKERR(PetscINCREF(sf.obj)) return sf def createInverse(self) -> SF: @@ -275,7 +273,7 @@ cdef class SF(Object): """ cdef SF sf = SF() - CHKERR( PetscSFCreateInverseSF(self.sf, &sf.sf) ) + CHKERR(PetscSFCreateInverseSF(self.sf, &sf.sf)) return sf def computeDegree(self) -> ArrayInt: @@ -289,10 +287,10 @@ cdef class SF(Object): """ cdef const PetscInt *cdegree = NULL - cdef PetscInt nroots - CHKERR( PetscSFComputeDegreeBegin(self.sf, &cdegree) ) - CHKERR( PetscSFComputeDegreeEnd(self.sf, &cdegree) ) - CHKERR( PetscSFGetGraph(self.sf, &nroots, NULL, NULL, NULL) ) + cdef PetscInt nroots = 0 + CHKERR(PetscSFComputeDegreeBegin(self.sf, &cdegree)) + CHKERR(PetscSFComputeDegreeEnd(self.sf, &cdegree)) + CHKERR(PetscSFGetGraph(self.sf, &nroots, NULL, NULL, NULL)) degree = array_i(nroots, cdegree) return degree @@ -317,7 +315,7 @@ cdef class SF(Object): cdef PetscInt *cselected = NULL selected = iarray_i(selected, &nroots, &cselected) cdef SF sf = SF() - CHKERR( PetscSFCreateEmbeddedRootSF(self.sf, nroots, cselected, &sf.sf) ) + CHKERR(PetscSFCreateEmbeddedRootSF(self.sf, nroots, cselected, &sf.sf)) return sf def createEmbeddedLeafSF(self, selected: Sequence[int]) -> SF: @@ -341,7 +339,7 @@ cdef class SF(Object): cdef PetscInt *cselected = NULL selected = iarray_i(selected, &nleaves, &cselected) cdef SF sf = SF() - CHKERR( PetscSFCreateEmbeddedLeafSF(self.sf, nleaves, cselected, &sf.sf) ) + CHKERR(PetscSFCreateEmbeddedLeafSF(self.sf, nleaves, cselected, &sf.sf)) return sf def createSectionSF(self, Section rootSection, remoteOffsets: Sequence[int] | None, Section leafSection) -> SF: @@ -373,8 +371,8 @@ cdef class SF(Object): cdef PetscInt *cremoteOffsets = NULL if remoteOffsets is not None: remoteOffsets = iarray_i(remoteOffsets, &noffsets, &cremoteOffsets) - CHKERR( PetscSFCreateSectionSF(self.sf, rootSection.sec, cremoteOffsets, - leafSection.sec, §ionSF.sf) ) + CHKERR(PetscSFCreateSectionSF(self.sf, rootSection.sec, cremoteOffsets, + leafSection.sec, §ionSF.sf)) return sectionSF def distributeSection(self, Section rootSection, Section leafSection=None) -> tuple[ArrayInt, Section]: @@ -396,20 +394,20 @@ cdef class SF(Object): petsc.PetscSFDistributeSection """ - cdef PetscInt lpStart - cdef PetscInt lpEnd + cdef PetscInt lpStart = 0 + cdef PetscInt lpEnd = 0 cdef PetscInt *cremoteOffsets = NULL cdef ndarray remoteOffsets cdef MPI_Comm ccomm = def_Comm(self.comm, PETSC_COMM_DEFAULT) if leafSection is None: leafSection = Section() if leafSection.sec == NULL: - CHKERR( PetscSectionCreate(ccomm, &leafSection.sec) ) - CHKERR( PetscSFDistributeSection(self.sf, rootSection.sec, - &cremoteOffsets, leafSection.sec) ) - CHKERR( PetscSectionGetChart(leafSection.sec, &lpStart, &lpEnd) ) + CHKERR(PetscSectionCreate(ccomm, &leafSection.sec)) + CHKERR(PetscSFDistributeSection(self.sf, rootSection.sec, + &cremoteOffsets, leafSection.sec)) + CHKERR(PetscSectionGetChart(leafSection.sec, &lpStart, &lpEnd)) remoteOffsets = array_i(lpEnd-lpStart, cremoteOffsets) - CHKERR( PetscFree(cremoteOffsets) ) + CHKERR(PetscFree(cremoteOffsets)) return (remoteOffsets, leafSection) def compose(self, SF sf) -> SF: @@ -430,7 +428,7 @@ cdef class SF(Object): """ cdef SF csf = SF() - CHKERR( PetscSFCompose(self.sf, sf.sf, &csf.sf)) + CHKERR(PetscSFCompose(self.sf, sf.sf, &csf.sf)) return csf def bcastBegin(self, unit: Datatype, ndarray rootdata, ndarray leafdata, op: Op) -> None: @@ -459,8 +457,8 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) cdef MPI_Op cop = mpi4py_Op_Get(op) - CHKERR( PetscSFBcastBegin(self.sf, dtype, PyArray_DATA(rootdata), - PyArray_DATA(leafdata), cop) ) + CHKERR(PetscSFBcastBegin(self.sf, dtype, PyArray_DATA(rootdata), + PyArray_DATA(leafdata), cop)) def bcastEnd(self, unit: Datatype, ndarray rootdata, ndarray leafdata, op: Op) -> None: """End a broadcast & reduce operation started with `bcastBegin`. @@ -485,8 +483,8 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) cdef MPI_Op cop = mpi4py_Op_Get(op) - CHKERR( PetscSFBcastEnd(self.sf, dtype, PyArray_DATA(rootdata), - PyArray_DATA(leafdata), cop) ) + CHKERR(PetscSFBcastEnd(self.sf, dtype, PyArray_DATA(rootdata), + PyArray_DATA(leafdata), cop)) def reduceBegin(self, unit: Datatype, ndarray leafdata, ndarray rootdata, op: Op) -> None: """Begin reduction of leafdata into rootdata. @@ -513,8 +511,8 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) cdef MPI_Op cop = mpi4py_Op_Get(op) - CHKERR( PetscSFReduceBegin(self.sf, dtype, PyArray_DATA(leafdata), - PyArray_DATA(rootdata), cop) ) + CHKERR(PetscSFReduceBegin(self.sf, dtype, PyArray_DATA(leafdata), + PyArray_DATA(rootdata), cop)) def reduceEnd(self, unit: Datatype, ndarray leafdata, ndarray rootdata, op: Op) -> None: """End a reduction operation started with `reduceBegin`. @@ -539,8 +537,8 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) cdef MPI_Op cop = mpi4py_Op_Get(op) - CHKERR( PetscSFReduceEnd(self.sf, dtype, PyArray_DATA(leafdata), - PyArray_DATA(rootdata), cop) ) + CHKERR(PetscSFReduceEnd(self.sf, dtype, PyArray_DATA(leafdata), + PyArray_DATA(rootdata), cop)) def scatterBegin(self, unit: Datatype, ndarray multirootdata, ndarray leafdata) -> None: """Begin pointwise scatter operation. @@ -565,8 +563,8 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) - CHKERR( PetscSFScatterBegin(self.sf, dtype, PyArray_DATA(multirootdata), - PyArray_DATA(leafdata)) ) + CHKERR(PetscSFScatterBegin(self.sf, dtype, PyArray_DATA(multirootdata), + PyArray_DATA(leafdata))) def scatterEnd(self, unit: Datatype, ndarray multirootdata, ndarray leafdata) -> None: """End scatter operation that was started with `scatterBegin`. @@ -588,8 +586,8 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) - CHKERR( PetscSFScatterEnd(self.sf, dtype, PyArray_DATA(multirootdata), - PyArray_DATA(leafdata)) ) + CHKERR(PetscSFScatterEnd(self.sf, dtype, PyArray_DATA(multirootdata), + PyArray_DATA(leafdata))) def gatherBegin(self, unit: Datatype, ndarray leafdata, ndarray multirootdata) -> None: """Begin pointwise gather of all leaves into multi-roots. @@ -614,8 +612,8 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) - CHKERR( PetscSFGatherBegin(self.sf, dtype, PyArray_DATA(leafdata), - PyArray_DATA(multirootdata)) ) + CHKERR(PetscSFGatherBegin(self.sf, dtype, PyArray_DATA(leafdata), + PyArray_DATA(multirootdata))) def gatherEnd(self, unit: Datatype, ndarray leafdata, ndarray multirootdata) -> None: """End gather operation that was started with `gatherBegin`. @@ -638,8 +636,8 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) - CHKERR( PetscSFGatherEnd(self.sf, dtype, PyArray_DATA(leafdata), - PyArray_DATA(multirootdata)) ) + CHKERR(PetscSFGatherEnd(self.sf, dtype, PyArray_DATA(leafdata), + PyArray_DATA(multirootdata))) def fetchAndOpBegin(self, unit: Datatype, rootdata: ndarray, leafdata: ndarray, leafupdate: ndarray, op: Op) -> None: """Begin fetch and update operation. @@ -673,9 +671,9 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) cdef MPI_Op cop = mpi4py_Op_Get(op) - CHKERR( PetscSFFetchAndOpBegin(self.sf, dtype, PyArray_DATA(rootdata), - PyArray_DATA(leafdata), - PyArray_DATA(leafupdate), cop) ) + CHKERR(PetscSFFetchAndOpBegin(self.sf, dtype, PyArray_DATA(rootdata), + PyArray_DATA(leafdata), + PyArray_DATA(leafupdate), cop)) def fetchAndOpEnd(self, unit: Datatype, rootdata: ndarray, leafdata: ndarray, leafupdate: ndarray, op: Op) -> None: """End operation started in a matching call to `fetchAndOpBegin`. @@ -704,9 +702,9 @@ cdef class SF(Object): """ cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit) cdef MPI_Op cop = mpi4py_Op_Get(op) - CHKERR( PetscSFFetchAndOpEnd(self.sf, dtype, PyArray_DATA(rootdata), - PyArray_DATA(leafdata), - PyArray_DATA(leafupdate), cop) ) + CHKERR(PetscSFFetchAndOpEnd(self.sf, dtype, PyArray_DATA(rootdata), + PyArray_DATA(leafdata), + PyArray_DATA(leafupdate), cop)) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/SNES.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/SNES.pyx index c14514247c3..b206b8ffea6 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/SNES.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/SNES.pyx @@ -29,6 +29,7 @@ class SNESType(object): COMPOSITE = S_(SNESCOMPOSITE) PATCH = S_(SNESPATCH) + class SNESNormSchedule(object): """SNES norm schedule. @@ -52,6 +53,7 @@ class SNESNormSchedule(object): FINAL_ONLY = NORM_FINAL_ONLY INITIAL_FINAL_ONLY = NORM_INITIAL_FINAL_ONLY + # FIXME Missing reference petsc.SNESConvergedReason class SNESConvergedReason(object): """SNES solver termination reason. @@ -84,6 +86,7 @@ class SNESConvergedReason(object): # -------------------------------------------------------------------- + cdef class SNES(Object): """Nonlinear equations solver. @@ -124,7 +127,7 @@ cdef class SNES(Object): """ cdef PetscViewer cviewer = NULL if viewer is not None: cviewer = viewer.vwr - CHKERR( SNESView(self.snes, cviewer) ) + CHKERR(SNESView(self.snes, cviewer)) def destroy(self) -> Self: """Destroy the solver. @@ -136,7 +139,7 @@ cdef class SNES(Object): petsc.SNESDestroy """ - CHKERR( SNESDestroy(&self.snes) ) + CHKERR(SNESDestroy(&self.snes)) return self def create(self, comm: Comm | None = None) -> Self: @@ -156,8 +159,8 @@ cdef class SNES(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscSNES newsnes = NULL - CHKERR( SNESCreate(ccomm, &newsnes) ) - CHKERR( PetscCLEAR(self.obj) ); self.snes = newsnes + CHKERR(SNESCreate(ccomm, &newsnes)) + CHKERR(PetscCLEAR(self.obj)); self.snes = newsnes return self def setType(self, snes_type: Type | str) -> None: @@ -177,7 +180,7 @@ cdef class SNES(Object): """ cdef PetscSNESType cval = NULL snes_type = str2bytes(snes_type, &cval) - CHKERR( SNESSetType(self.snes, cval) ) + CHKERR(SNESSetType(self.snes, cval)) def getType(self) -> str: """Return the type of the solver. @@ -190,10 +193,10 @@ cdef class SNES(Object): """ cdef PetscSNESType cval = NULL - CHKERR( SNESGetType(self.snes, &cval) ) + CHKERR(SNESGetType(self.snes, &cval)) return bytes2str(cval) - def setOptionsPrefix(self, prefix: str) -> None: + def setOptionsPrefix(self, prefix: str | None) -> None: """Set the prefix used for searching for options in the database. Logically collective. @@ -205,7 +208,7 @@ cdef class SNES(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( SNESSetOptionsPrefix(self.snes, cval) ) + CHKERR(SNESSetOptionsPrefix(self.snes, cval)) def getOptionsPrefix(self) -> str: """Return the prefix used for searching for options in the database. @@ -218,10 +221,10 @@ cdef class SNES(Object): """ cdef const char *cval = NULL - CHKERR( SNESGetOptionsPrefix(self.snes, &cval) ) + CHKERR(SNESGetOptionsPrefix(self.snes, &cval)) return bytes2str(cval) - def appendOptionsPrefix(self, prefix: str) -> None: + def appendOptionsPrefix(self, prefix: str | None) -> None: """Append to the prefix used for searching for options in the database. Logically collective. @@ -233,7 +236,7 @@ cdef class SNES(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( SNESAppendOptionsPrefix(self.snes, cval) ) + CHKERR(SNESAppendOptionsPrefix(self.snes, cval)) def setFromOptions(self) -> None: """Configure the solver from the options database. @@ -245,7 +248,7 @@ cdef class SNES(Object): petsc_options, petsc.SNESSetFromOptions """ - CHKERR( SNESSetFromOptions(self.snes) ) + CHKERR(SNESSetFromOptions(self.snes)) # --- application context --- @@ -254,16 +257,16 @@ cdef class SNES(Object): self.set_attr('__appctx__', appctx) if appctx is not None: registerAppCtx(appctx) - CHKERR( SNESSetApplicationContext(self.snes, appctx) ) + CHKERR(SNESSetApplicationContext(self.snes, appctx)) else: - CHKERR( SNESSetApplicationContext(self.snes, NULL) ) + CHKERR(SNESSetApplicationContext(self.snes, NULL)) def getApplicationContext(self) -> Any: """Return the application context.""" - cdef void *ctx + cdef void *ctx = NULL appctx = self.get_attr('__appctx__') if appctx is None: - CHKERR( SNESGetApplicationContext(self.snes, &ctx) ) + CHKERR(SNESGetApplicationContext(self.snes, &ctx)) appctx = toAppCtx(ctx) return appctx @@ -284,10 +287,10 @@ cdef class SNES(Object): """ cdef PetscDM newdm = NULL - CHKERR( SNESGetDM(self.snes, &newdm) ) + CHKERR(SNESGetDM(self.snes, &newdm)) cdef DM dm = subtype_DM(newdm)() dm.dm = newdm - CHKERR( PetscINCREF(dm.obj) ) + CHKERR(PetscINCREF(dm.obj)) return dm def setDM(self, DM dm) -> None: @@ -300,7 +303,7 @@ cdef class SNES(Object): getDM, petsc.SNESSetDM """ - CHKERR( SNESSetDM(self.snes, dm.dm) ) + CHKERR(SNESSetDM(self.snes, dm.dm)) # --- FAS --- @@ -316,7 +319,7 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( SNESFASSetInterpolation(self.snes, clevel, mat.mat) ) + CHKERR(SNESFASSetInterpolation(self.snes, clevel, mat.mat)) def getFASInterpolation(self, level: int) -> Mat: """Return the `Mat` used to apply the interpolation from level-1 to level. @@ -330,8 +333,8 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) cdef Mat mat = Mat() - CHKERR( SNESFASGetInterpolation(self.snes, clevel, &mat.mat) ) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(SNESFASGetInterpolation(self.snes, clevel, &mat.mat)) + CHKERR(PetscINCREF(mat.obj)) return mat def setFASRestriction(self, level: int, Mat mat) -> None: @@ -346,7 +349,7 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( SNESFASSetRestriction(self.snes, clevel, mat.mat) ) + CHKERR(SNESFASSetRestriction(self.snes, clevel, mat.mat)) def getFASRestriction(self, level: int) -> Mat: """Return the `Mat` used to apply the restriction from level-1 to level. @@ -360,8 +363,8 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) cdef Mat mat = Mat() - CHKERR( SNESFASGetRestriction(self.snes, clevel, &mat.mat) ) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(SNESFASGetRestriction(self.snes, clevel, &mat.mat)) + CHKERR(PetscINCREF(mat.obj)) return mat def setFASInjection(self, level: int, Mat mat) -> None: @@ -376,7 +379,7 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( SNESFASSetInjection(self.snes, clevel, mat.mat) ) + CHKERR(SNESFASSetInjection(self.snes, clevel, mat.mat)) def getFASInjection(self, level: int) -> Mat: """Return the `Mat` used to apply the injection from level-1 to level. @@ -390,8 +393,8 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) cdef Mat mat = Mat() - CHKERR( SNESFASGetInjection(self.snes, clevel, &mat.mat) ) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(SNESFASGetInjection(self.snes, clevel, &mat.mat)) + CHKERR(PetscINCREF(mat.obj)) return mat def setFASRScale(self, level: int, Vec vec) -> None: @@ -405,7 +408,7 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) - CHKERR( SNESFASSetRScale(self.snes, clevel, vec.vec) ) + CHKERR(SNESFASSetRScale(self.snes, clevel, vec.vec)) def setFASLevels(self, levels: int, comms: Sequence[Comm] = None) -> None: """Set the number of levels to use with FAS. @@ -431,15 +434,15 @@ cdef class SNES(Object): if comms is not None: if clevels != len(comms): raise ValueError("Must provide as many communicators as levels") - CHKERR( PetscMalloc(sizeof(MPI_Comm)*clevels, &ccomms) ) + CHKERR(PetscMalloc(sizeof(MPI_Comm)*clevels, &ccomms)) try: for i, comm in enumerate(comms): ccomms[i] = def_Comm(comm, MPI_COMM_NULL) - CHKERR( SNESFASSetLevels(self.snes, clevels, ccomms) ) + CHKERR(SNESFASSetLevels(self.snes, clevels, ccomms)) finally: - CHKERR( PetscFree(ccomms) ) + CHKERR(PetscFree(ccomms)) else: - CHKERR( SNESFASSetLevels(self.snes, clevels, ccomms) ) + CHKERR(SNESFASSetLevels(self.snes, clevels, ccomms)) def getFASLevels(self) -> int: """Return the number of levels used. @@ -452,7 +455,7 @@ cdef class SNES(Object): """ cdef PetscInt levels = 0 - CHKERR( SNESFASGetLevels(self.snes, &levels) ) + CHKERR(SNESFASGetLevels(self.snes, &levels)) return toInt(levels) def getFASCycleSNES(self, level: int) -> SNES: @@ -468,8 +471,8 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) cdef SNES lsnes = SNES() - CHKERR( SNESFASGetCycleSNES(self.snes, clevel, &lsnes.snes) ) - CHKERR( PetscINCREF(lsnes.obj) ) + CHKERR(SNESFASGetCycleSNES(self.snes, clevel, &lsnes.snes)) + CHKERR(PetscINCREF(lsnes.obj)) return lsnes def getFASCoarseSolve(self) -> SNES: @@ -483,8 +486,8 @@ cdef class SNES(Object): """ cdef SNES smooth = SNES() - CHKERR( SNESFASGetCoarseSolve(self.snes, &smooth.snes) ) - CHKERR( PetscINCREF(smooth.obj) ) + CHKERR(SNESFASGetCoarseSolve(self.snes, &smooth.snes)) + CHKERR(PetscINCREF(smooth.obj)) return smooth def getFASSmoother(self, level: int) -> SNES: @@ -500,8 +503,8 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) cdef SNES smooth = SNES() - CHKERR( SNESFASGetSmoother(self.snes, clevel, &smooth.snes) ) - CHKERR( PetscINCREF(smooth.obj) ) + CHKERR(SNESFASGetSmoother(self.snes, clevel, &smooth.snes)) + CHKERR(PetscINCREF(smooth.obj)) return smooth def getFASSmootherDown(self, level: int) -> SNES: @@ -517,8 +520,8 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) cdef SNES smooth = SNES() - CHKERR( SNESFASGetSmootherDown(self.snes, clevel, &smooth.snes) ) - CHKERR( PetscINCREF(smooth.obj) ) + CHKERR(SNESFASGetSmootherDown(self.snes, clevel, &smooth.snes)) + CHKERR(PetscINCREF(smooth.obj)) return smooth def getFASSmootherUp(self, level: int) -> SNES: @@ -534,8 +537,8 @@ cdef class SNES(Object): """ cdef PetscInt clevel = asInt(level) cdef SNES smooth = SNES() - CHKERR( SNESFASGetSmootherUp(self.snes, clevel, &smooth.snes) ) - CHKERR( PetscINCREF(smooth.obj) ) + CHKERR(SNESFASGetSmootherUp(self.snes, clevel, &smooth.snes)) + CHKERR(PetscINCREF(smooth.obj)) return smooth # --- nonlinear preconditioner --- @@ -551,8 +554,8 @@ cdef class SNES(Object): """ cdef SNES snes = SNES() - CHKERR( SNESGetNPC(self.snes, &snes.snes) ) - CHKERR( PetscINCREF(snes.obj) ) + CHKERR(SNESGetNPC(self.snes, &snes.snes)) + CHKERR(PetscINCREF(snes.obj)) return snes def hasNPC(self) -> bool: @@ -566,7 +569,7 @@ cdef class SNES(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( SNESHasNPC(self.snes, &flag) ) + CHKERR(SNESHasNPC(self.snes, &flag)) return toBool(flag) def setNPC(self, SNES snes) -> None: @@ -579,7 +582,7 @@ cdef class SNES(Object): getNPC, hasNPC, setNPCSide, getNPCSide, petsc.SNESSetNPC """ - CHKERR( SNESSetNPC(self.snes, snes.snes) ) + CHKERR(SNESSetNPC(self.snes, snes.snes)) def setNPCSide(self, side: PC.Side) -> None: """Set the nonlinear preconditioning side. @@ -591,7 +594,7 @@ cdef class SNES(Object): setNPC, getNPC, hasNPC, getNPCSide, petsc.SNESSetNPCSide """ - CHKERR( SNESSetNPCSide(self.snes, side) ) + CHKERR(SNESSetNPCSide(self.snes, side)) def getNPCSide(self) -> PC.Side: """Return the nonlinear preconditioning side. @@ -604,12 +607,12 @@ cdef class SNES(Object): """ cdef PetscPCSide side = PC_RIGHT - CHKERR( SNESGetNPCSide(self.snes, &side) ) + CHKERR(SNESGetNPCSide(self.snes, &side)) return side # --- user Function/Jacobian routines --- - def setLineSearchPreCheck(self, precheck: SNESLSPreFunction, + def setLineSearchPreCheck(self, precheck: SNESLSPreFunction | None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback that will be called before applying the linesearch. @@ -631,19 +634,19 @@ cdef class SNES(Object): """ cdef PetscSNESLineSearch snesls = NULL - CHKERR( SNESGetLineSearch(self.snes, &snesls) ) + CHKERR(SNESGetLineSearch(self.snes, &snesls)) if precheck is not None: if args is None: args = () if kargs is None: kargs = {} context = (precheck, args, kargs) self.set_attr('__precheck__', context) # FIXME callback - CHKERR( SNESLineSearchSetPreCheck(snesls, SNES_PreCheck, context) ) + CHKERR(SNESLineSearchSetPreCheck(snesls, SNES_PreCheck, context)) else: self.set_attr('__precheck__', None) - CHKERR( SNESLineSearchSetPreCheck(snesls, NULL, NULL) ) + CHKERR(SNESLineSearchSetPreCheck(snesls, NULL, NULL)) - def setInitialGuess(self, initialguess: SNESGuessFunction, + def setInitialGuess(self, initialguess: SNESGuessFunction | None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute the initial guess. @@ -669,10 +672,10 @@ cdef class SNES(Object): if kargs is None: kargs = {} context = (initialguess, args, kargs) self.set_attr('__initialguess__', context) - CHKERR( SNESSetComputeInitialGuess(self.snes, SNES_InitialGuess, context) ) + CHKERR(SNESSetComputeInitialGuess(self.snes, SNES_InitialGuess, context)) else: self.set_attr('__initialguess__', None) - CHKERR( SNESSetComputeInitialGuess(self.snes, NULL, NULL) ) + CHKERR(SNESSetComputeInitialGuess(self.snes, NULL, NULL)) def getInitialGuess(self) -> SNESGuessFunction: """Return the callback to compute the initial guess. @@ -686,7 +689,8 @@ cdef class SNES(Object): """ return self.get_attr('__initialguess__') - def setFunction(self, function: SNESFunction, Vec f=None, + def setFunction(self, function: SNESFunction | None, + Vec f=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute the nonlinear function. @@ -716,9 +720,9 @@ cdef class SNES(Object): if kargs is None: kargs = {} context = (function, args, kargs) self.set_attr('__function__', context) - CHKERR( SNESSetFunction(self.snes, fvec, SNES_Function, context) ) + CHKERR(SNESSetFunction(self.snes, fvec, SNES_Function, context)) else: - CHKERR( SNESSetFunction(self.snes, fvec, NULL, NULL) ) + CHKERR(SNESSetFunction(self.snes, fvec, NULL, NULL)) def getFunction(self) -> SNESFunction: """Return the callback to compute the nonlinear function. @@ -730,11 +734,12 @@ cdef class SNES(Object): setFunction, petsc.SNESGetFunction """ + cdef PetscErrorCode(*fun)(PetscSNES, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON nogil cdef Vec f = Vec() - cdef void* ctx - cdef PetscErrorCode (*fun)(PetscSNES,PetscVec,PetscVec,void*) - CHKERR( SNESGetFunction(self.snes, &f.vec, &fun, &ctx) ) - CHKERR( PetscINCREF(f.obj) ) + cdef void* ctx = NULL + fun = SNES_Function + CHKERR(SNESGetFunction(self.snes, &f.vec, &fun, &ctx)) + CHKERR(PetscINCREF(f.obj)) cdef object function = self.get_attr('__function__') cdef object context @@ -749,7 +754,7 @@ cdef class SNES(Object): return (f, None) - def setUpdate(self, update: SNESUpdateFunction, + def setUpdate(self, update: SNESUpdateFunction | None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute update at the beginning of each step. @@ -775,10 +780,10 @@ cdef class SNES(Object): if kargs is None: kargs = {} context = (update, args, kargs) self.set_attr('__update__', context) - CHKERR( SNESSetUpdate(self.snes, SNES_Update) ) + CHKERR(SNESSetUpdate(self.snes, SNES_Update)) else: self.set_attr('__update__', None) - CHKERR( SNESSetUpdate(self.snes, NULL) ) + CHKERR(SNESSetUpdate(self.snes, NULL)) def getUpdate(self) -> SNESUpdateFunction: """Return the callback to compute the update at the beginning of each step. @@ -792,7 +797,9 @@ cdef class SNES(Object): """ return self.get_attr('__update__') - def setJacobian(self, jacobian: SNESJacobianFunction, Mat J=None, Mat P=None, + def setJacobian(self, + jacobian: SNESJacobianFunction | None, + Mat J=None, Mat P=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute the Jacobian. @@ -825,9 +832,9 @@ cdef class SNES(Object): if kargs is None: kargs = {} context = (jacobian, args, kargs) self.set_attr('__jacobian__', context) - CHKERR( SNESSetJacobian(self.snes, Jmat, Pmat, SNES_Jacobian, context) ) + CHKERR(SNESSetJacobian(self.snes, Jmat, Pmat, SNES_Jacobian, context)) else: - CHKERR( SNESSetJacobian(self.snes, Jmat, Pmat, NULL, NULL) ) + CHKERR(SNESSetJacobian(self.snes, Jmat, Pmat, NULL, NULL)) def getJacobian(self) -> tuple[Mat, Mat, SNESJacobianFunction]: """Return the matrices used to compute the Jacobian and the callback tuple. @@ -850,13 +857,14 @@ cdef class SNES(Object): """ cdef Mat J = Mat() cdef Mat P = Mat() - CHKERR( SNESGetJacobian(self.snes, &J.mat, &P.mat, NULL, NULL) ) - CHKERR( PetscINCREF(J.obj) ) - CHKERR( PetscINCREF(P.obj) ) + CHKERR(SNESGetJacobian(self.snes, &J.mat, &P.mat, NULL, NULL)) + CHKERR(PetscINCREF(J.obj)) + CHKERR(PetscINCREF(P.obj)) cdef object jacobian = self.get_attr('__jacobian__') return (J, P, jacobian) - def setObjective(self, objective: SNESObjFunction, + def setObjective(self, + objective: SNESObjFunction | None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute the objective function. @@ -882,9 +890,9 @@ cdef class SNES(Object): if kargs is None: kargs = {} context = (objective, args, kargs) self.set_attr('__objective__', context) - CHKERR( SNESSetObjective(self.snes, SNES_Objective, context) ) + CHKERR(SNESSetObjective(self.snes, SNES_Objective, context)) else: - CHKERR( SNESSetObjective(self.snes, NULL, NULL) ) + CHKERR(SNESSetObjective(self.snes, NULL, NULL)) def getObjective(self) -> SNESObjFunction: """Return the objective callback tuple. @@ -896,7 +904,7 @@ cdef class SNES(Object): setObjective """ - CHKERR( SNESGetObjective(self.snes, NULL, NULL) ) + CHKERR(SNESGetObjective(self.snes, NULL, NULL)) cdef object objective = self.get_attr('__objective__') return objective @@ -917,7 +925,7 @@ cdef class SNES(Object): setFunction, petsc.SNESComputeFunction """ - CHKERR( SNESComputeFunction(self.snes, x.vec, f.vec) ) + CHKERR(SNESComputeFunction(self.snes, x.vec, f.vec)) def computeJacobian(self, Vec x, Mat J, Mat P=None) -> None: """Compute the Jacobian. @@ -940,7 +948,7 @@ cdef class SNES(Object): """ cdef PetscMat jmat = J.mat, pmat = J.mat if P is not None: pmat = P.mat - CHKERR( SNESComputeJacobian(self.snes, x.vec, jmat, pmat) ) + CHKERR(SNESComputeJacobian(self.snes, x.vec, jmat, pmat)) def computeObjective(self, Vec x) -> float: """Compute the value of the objective function. @@ -958,10 +966,11 @@ cdef class SNES(Object): """ cdef PetscReal o = 0 - CHKERR( SNESComputeObjective(self.snes, x.vec, &o) ) + CHKERR(SNESComputeObjective(self.snes, x.vec, &o)) return toReal(o) - def setNGS(self, ngs: SNESNGSFunction, + def setNGS(self, + ngs: SNESNGSFunction | None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute nonlinear Gauss-Seidel. @@ -982,11 +991,14 @@ cdef class SNES(Object): getNGS, computeNGS, petsc.SNESSetNGS """ - if args is None: args = () - if kargs is None: kargs = {} - context = (ngs, args, kargs) - self.set_attr('__ngs__', context) - CHKERR( SNESSetNGS(self.snes, SNES_NGS, context) ) + if ngs is not None: + if args is None: args = () + if kargs is None: kargs = {} + context = (ngs, args, kargs) + self.set_attr('__ngs__', context) + CHKERR(SNESSetNGS(self.snes, SNES_NGS, context)) + else: + CHKERR(SNESSetNGS(self.snes, NULL, NULL)) def getNGS(self) -> SNESNGSFunction: """Return the nonlinear Gauss-Seidel callback tuple. @@ -998,7 +1010,7 @@ cdef class SNES(Object): setNGS, computeNGS """ - CHKERR( SNESGetNGS(self.snes, NULL, NULL) ) + CHKERR(SNESGetNGS(self.snes, NULL, NULL)) cdef object ngs = self.get_attr('__ngs__') return ngs @@ -1021,7 +1033,7 @@ cdef class SNES(Object): """ cdef PetscVec bvec = NULL if b is not None: bvec = b.vec - CHKERR( SNESComputeNGS(self.snes, bvec, x.vec) ) + CHKERR(SNESComputeNGS(self.snes, bvec, x.vec)) # --- tolerances and convergence --- @@ -1053,8 +1065,8 @@ cdef class SNES(Object): if atol is not None: catol = asReal(atol) if stol is not None: cstol = asReal(stol) if max_it is not None: cmaxit = asInt(max_it) - CHKERR( SNESSetTolerances(self.snes, catol, crtol, cstol, - cmaxit, PETSC_DEFAULT) ) + CHKERR(SNESSetTolerances(self.snes, catol, crtol, cstol, + cmaxit, PETSC_DEFAULT)) def getTolerances(self) -> tuple[float, float, float, int]: """Return the tolerance parameters used in the solver convergence tests. @@ -1079,8 +1091,8 @@ cdef class SNES(Object): """ cdef PetscReal crtol=0, catol=0, cstol=0 cdef PetscInt cmaxit=0 - CHKERR( SNESGetTolerances(self.snes, &catol, &crtol, &cstol, - &cmaxit, NULL) ) + CHKERR(SNESGetTolerances(self.snes, &catol, &crtol, &cstol, + &cmaxit, NULL)) return (toReal(crtol), toReal(catol), toReal(cstol), toInt(cmaxit)) def setNormSchedule(self, normsched: NormSchedule) -> None: @@ -1093,7 +1105,7 @@ cdef class SNES(Object): getNormSchedule, petsc.SNESSetNormSchedule """ - CHKERR( SNESSetNormSchedule(self.snes, normsched) ) + CHKERR(SNESSetNormSchedule(self.snes, normsched)) def getNormSchedule(self) -> NormSchedule: """Return the norm schedule. @@ -1106,7 +1118,7 @@ cdef class SNES(Object): """ cdef PetscSNESNormSchedule normsched = SNES_NORM_NONE - CHKERR( SNESGetNormSchedule(self.snes, &normsched) ) + CHKERR(SNESGetNormSchedule(self.snes, &normsched)) return normsched def setConvergenceTest(self, converged: SNESConvergedFunction | Literal["skip", "default"], @@ -1132,17 +1144,17 @@ cdef class SNES(Object): """ if converged == "skip": self.set_attr('__converged__', None) - CHKERR( SNESSetConvergenceTest(self.snes, SNESConvergedSkip, NULL, NULL) ) + CHKERR(SNESSetConvergenceTest(self.snes, SNESConvergedSkip, NULL, NULL)) elif converged is None or converged == "default": self.set_attr('__converged__', None) - CHKERR( SNESSetConvergenceTest(self.snes, SNESConvergedDefault, NULL, NULL) ) + CHKERR(SNESSetConvergenceTest(self.snes, SNESConvergedDefault, NULL, NULL)) else: assert callable(converged) if args is None: args = () if kargs is None: kargs = {} context = (converged, args, kargs) self.set_attr('__converged__', context) - CHKERR( SNESSetConvergenceTest(self.snes, SNES_Converged, context, NULL) ) + CHKERR(SNESSetConvergenceTest(self.snes, SNES_Converged, context, NULL)) def getConvergenceTest(self) -> SNESConvergedFunction: """Return the callback to used as convergence test. @@ -1182,8 +1194,8 @@ cdef class SNES(Object): cdef PetscReal rval2 = asReal(ynorm) cdef PetscReal rval3 = asReal(fnorm) cdef PetscSNESConvergedReason reason = SNES_CONVERGED_ITERATING - CHKERR( SNESConvergenceTestCall(self.snes, ival, - rval1, rval2, rval3, &reason) ) + CHKERR(SNESConvergenceTestCall(self.snes, ival, + rval1, rval2, rval3, &reason)) return reason def converged(self, its: int, xnorm: float, ynorm: float, fnorm: float) -> None: @@ -1211,8 +1223,7 @@ cdef class SNES(Object): cdef PetscReal rval1 = asReal(xnorm) cdef PetscReal rval2 = asReal(ynorm) cdef PetscReal rval3 = asReal(fnorm) - CHKERR( SNESConverged(self.snes, ival, rval1, rval2, rval3) ) - + CHKERR(SNESConverged(self.snes, ival, rval1, rval2, rval3)) def setConvergenceHistory(self, length=None, reset=False) -> None: """Set the convergence history. @@ -1228,7 +1239,7 @@ cdef class SNES(Object): cdef PetscInt *idata = NULL cdef PetscInt size = 1000 cdef PetscBool flag = PETSC_FALSE - #FIXME + # FIXME if length is True: pass elif length is not None: size = asInt(length) if size < 0: size = 1000 @@ -1236,7 +1247,7 @@ cdef class SNES(Object): cdef object rhist = oarray_r(empty_r(size), NULL, &rdata) cdef object ihist = oarray_i(empty_i(size), NULL, &idata) self.set_attr('__history__', (rhist, ihist)) - CHKERR( SNESSetConvergenceHistory(self.snes, rdata, idata, size, flag) ) + CHKERR(SNESSetConvergenceHistory(self.snes, rdata, idata, size, flag)) def getConvergenceHistory(self) -> tuple[ArrayReal, ArrayInt]: """Return the convergence history. @@ -1251,7 +1262,7 @@ cdef class SNES(Object): cdef PetscReal *rdata = NULL cdef PetscInt *idata = NULL cdef PetscInt size = 0 - CHKERR( SNESGetConvergenceHistory(self.snes, &rdata, &idata, &size) ) + CHKERR(SNESGetConvergenceHistory(self.snes, &rdata, &idata, &size)) cdef object rhist = array_r(size, rdata) cdef object ihist = array_i(size, idata) return (rhist, ihist) @@ -1260,7 +1271,7 @@ cdef class SNES(Object): """Log residual norm and linear iterations.""" cdef PetscReal rval = asReal(norm) cdef PetscInt ival = asInt(linear_its) - CHKERR( SNESLogConvergenceHistory(self.snes, rval, ival) ) + CHKERR(SNESLogConvergenceHistory(self.snes, rval, ival)) def setResetCounters(self, reset: bool = True) -> None: """Set the flag to reset the counters. @@ -1273,11 +1284,14 @@ cdef class SNES(Object): """ cdef PetscBool flag = reset - CHKERR( SNESSetCountersReset(self.snes, flag) ) + CHKERR(SNESSetCountersReset(self.snes, flag)) # --- monitoring --- - def setMonitor(self, monitor: SNESMonitorFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: + def setMonitor(self, + monitor: SNESMonitorFunction | None, + args: tuple[Any, ...] | None = None, + kargs: dict[str, Any] | None = None) -> None: """Set the callback used to monitor solver convergence. Logically collective. @@ -1301,7 +1315,7 @@ cdef class SNES(Object): if monitorlist is None: monitorlist = [] self.set_attr('__monitor__', monitorlist) - CHKERR( SNESMonitorSet(self.snes, SNES_Monitor, NULL, NULL) ) + CHKERR(SNESMonitorSet(self.snes, SNES_Monitor, NULL, NULL)) if args is None: args = () if kargs is None: kargs = {} context = (monitor, args, kargs) @@ -1329,7 +1343,7 @@ cdef class SNES(Object): setMonitor, petsc.SNESMonitorCancel """ - CHKERR( SNESMonitorCancel(self.snes) ) + CHKERR(SNESMonitorCancel(self.snes)) self.set_attr('__monitor__', None) cancelMonitor = monitorCancel @@ -1353,7 +1367,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = asInt(its) cdef PetscReal rval = asReal(rnorm) - CHKERR( SNESMonitor(self.snes, ival, rval) ) + CHKERR(SNESMonitor(self.snes, ival, rval)) # --- more tolerances --- @@ -1370,7 +1384,7 @@ cdef class SNES(Object): cdef PetscReal r = PETSC_DEFAULT cdef PetscInt i = PETSC_DEFAULT cdef PetscInt ival = asInt(max_funcs) - CHKERR( SNESSetTolerances(self.snes, r, r, r, i, ival) ) + CHKERR(SNESSetTolerances(self.snes, r, r, r, i, ival)) def getMaxFunctionEvaluations(self) -> int: """Return the maximum allowed number of function evaluations. @@ -1385,7 +1399,7 @@ cdef class SNES(Object): cdef PetscReal *r = NULL cdef PetscInt *i = NULL cdef PetscInt ival = 0 - CHKERR( SNESGetTolerances(self.snes, r, r, r, i, &ival) ) + CHKERR(SNESGetTolerances(self.snes, r, r, r, i, &ival)) return toInt(ival) def getFunctionEvaluations(self) -> int: @@ -1399,7 +1413,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = 0 - CHKERR( SNESGetNumberFunctionEvals(self.snes, &ival) ) + CHKERR(SNESGetNumberFunctionEvals(self.snes, &ival)) return toInt(ival) def setMaxStepFailures(self, max_fails: int) -> None: @@ -1413,7 +1427,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = asInt(max_fails) - CHKERR( SNESSetMaxNonlinearStepFailures(self.snes, ival) ) + CHKERR(SNESSetMaxNonlinearStepFailures(self.snes, ival)) def getMaxStepFailures(self) -> int: """Return the maximum allowed number of step failures. @@ -1426,7 +1440,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = 0 - CHKERR( SNESGetMaxNonlinearStepFailures(self.snes, &ival) ) + CHKERR(SNESGetMaxNonlinearStepFailures(self.snes, &ival)) return toInt(ival) def getStepFailures(self) -> int: @@ -1440,7 +1454,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = 0 - CHKERR( SNESGetNonlinearStepFailures(self.snes, &ival) ) + CHKERR(SNESGetNonlinearStepFailures(self.snes, &ival)) return toInt(ival) def setMaxKSPFailures(self, max_fails: int) -> None: @@ -1454,7 +1468,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = asInt(max_fails) - CHKERR( SNESSetMaxLinearSolveFailures(self.snes, ival) ) + CHKERR(SNESSetMaxLinearSolveFailures(self.snes, ival)) def getMaxKSPFailures(self) -> int: """Return the maximum allowed number of linear solve failures. @@ -1467,7 +1481,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = 0 - CHKERR( SNESGetMaxLinearSolveFailures(self.snes, &ival) ) + CHKERR(SNESGetMaxLinearSolveFailures(self.snes, &ival)) return toInt(ival) def getKSPFailures(self) -> int: @@ -1481,7 +1495,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = 0 - CHKERR( SNESGetLinearSolveFailures(self.snes, &ival) ) + CHKERR(SNESGetLinearSolveFailures(self.snes, &ival)) return toInt(ival) setMaxNonlinearStepFailures = setMaxStepFailures @@ -1503,7 +1517,7 @@ cdef class SNES(Object): petsc.SNESSetUp """ - CHKERR( SNESSetUp(self.snes) ) + CHKERR(SNESSetUp(self.snes)) def setUpMatrices(self) -> None: """Ensures that matrices are available for Newton-like methods. @@ -1517,7 +1531,7 @@ cdef class SNES(Object): setUp, petsc.SNESSetUpMatrices """ - CHKERR( SNESSetUpMatrices(self.snes) ) + CHKERR(SNESSetUpMatrices(self.snes)) def reset(self) -> None: """Reset the solver. @@ -1529,7 +1543,7 @@ cdef class SNES(Object): petsc.SNESReset """ - CHKERR( SNESReset(self.snes) ) + CHKERR(SNESReset(self.snes)) def solve(self, Vec b = None, Vec x = None) -> None: """Solve the nonlinear equations. @@ -1552,7 +1566,7 @@ cdef class SNES(Object): cdef PetscVec sol = NULL if b is not None: rhs = b.vec if x is not None: sol = x.vec - CHKERR( SNESSolve(self.snes, rhs, sol) ) + CHKERR(SNESSolve(self.snes, rhs, sol)) def setConvergedReason(self, reason: ConvergedReason) -> None: """Set the termination flag. @@ -1565,7 +1579,7 @@ cdef class SNES(Object): """ cdef PetscSNESConvergedReason eval = reason - CHKERR( SNESSetConvergedReason(self.snes, eval) ) + CHKERR(SNESSetConvergedReason(self.snes, eval)) def getConvergedReason(self) -> ConvergedReason: """Return the termination flag. @@ -1578,7 +1592,7 @@ cdef class SNES(Object): """ cdef PetscSNESConvergedReason reason = SNES_CONVERGED_ITERATING - CHKERR( SNESGetConvergedReason(self.snes, &reason) ) + CHKERR(SNESGetConvergedReason(self.snes, &reason)) return reason def setErrorIfNotConverged(self, flag: bool) -> None: @@ -1592,7 +1606,7 @@ cdef class SNES(Object): """ cdef PetscBool ernc = asBool(flag) - CHKERR( SNESSetErrorIfNotConverged(self.snes, ernc) ) + CHKERR(SNESSetErrorIfNotConverged(self.snes, ernc)) def getErrorIfNotConverged(self) -> bool: """Return the flag indicating error on divergence. @@ -1605,7 +1619,7 @@ cdef class SNES(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( SNESGetErrorIfNotConverged(self.snes, &flag) ) + CHKERR(SNESGetErrorIfNotConverged(self.snes, &flag)) return toBool(flag) def setIterationNumber(self, its: int) -> None: @@ -1621,7 +1635,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = asInt(its) - CHKERR( SNESSetIterationNumber(self.snes, ival) ) + CHKERR(SNESSetIterationNumber(self.snes, ival)) def getIterationNumber(self) -> int: """Return the current iteration number. @@ -1634,7 +1648,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = 0 - CHKERR( SNESGetIterationNumber(self.snes, &ival) ) + CHKERR(SNESGetIterationNumber(self.snes, &ival)) return toInt(ival) def setForceIteration(self, force: bool) -> None: @@ -1648,7 +1662,7 @@ cdef class SNES(Object): """ cdef PetscBool bval = asBool(force) - CHKERR( SNESSetForceIteration(self.snes, bval) ) + CHKERR(SNESSetForceIteration(self.snes, bval)) def setFunctionNorm(self, norm: float) -> None: """Set the function norm value. @@ -1663,7 +1677,7 @@ cdef class SNES(Object): """ cdef PetscReal rval = asReal(norm) - CHKERR( SNESSetFunctionNorm(self.snes, rval) ) + CHKERR(SNESSetFunctionNorm(self.snes, rval)) def getFunctionNorm(self) -> float: """Return the function norm. @@ -1676,7 +1690,7 @@ cdef class SNES(Object): """ cdef PetscReal rval = 0 - CHKERR( SNESGetFunctionNorm(self.snes, &rval) ) + CHKERR(SNESGetFunctionNorm(self.snes, &rval)) return toReal(rval) def getLinearSolveIterations(self) -> int: @@ -1690,7 +1704,7 @@ cdef class SNES(Object): """ cdef PetscInt ival = 0 - CHKERR( SNESGetLinearSolveIterations(self.snes, &ival) ) + CHKERR(SNESGetLinearSolveIterations(self.snes, &ival)) return toInt(ival) def getRhs(self) -> Vec: @@ -1704,8 +1718,8 @@ cdef class SNES(Object): """ cdef Vec vec = Vec() - CHKERR( SNESGetRhs(self.snes, &vec.vec) ) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(SNESGetRhs(self.snes, &vec.vec)) + CHKERR(PetscINCREF(vec.obj)) return vec def getSolution(self) -> Vec: @@ -1719,8 +1733,8 @@ cdef class SNES(Object): """ cdef Vec vec = Vec() - CHKERR( SNESGetSolution(self.snes, &vec.vec) ) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(SNESGetSolution(self.snes, &vec.vec)) + CHKERR(PetscINCREF(vec.obj)) return vec def setSolution(self, Vec vec) -> None: @@ -1733,7 +1747,7 @@ cdef class SNES(Object): getSolution, petsc.SNESSetSolution """ - CHKERR( SNESSetSolution(self.snes, vec.vec) ) + CHKERR(SNESSetSolution(self.snes, vec.vec)) def getSolutionUpdate(self) -> Vec: """Return the vector holding the solution update. @@ -1746,8 +1760,8 @@ cdef class SNES(Object): """ cdef Vec vec = Vec() - CHKERR( SNESGetSolutionUpdate(self.snes, &vec.vec) ) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(SNESGetSolutionUpdate(self.snes, &vec.vec)) + CHKERR(PetscINCREF(vec.obj)) return vec # --- linear solver --- @@ -1762,7 +1776,7 @@ cdef class SNES(Object): getKSP, petsc.SNESSetKSP """ - CHKERR( SNESSetKSP(self.snes, ksp.ksp) ) + CHKERR(SNESSetKSP(self.snes, ksp.ksp)) def getKSP(self) -> KSP: """Return the linear solver used by the nonlinear solver. @@ -1775,8 +1789,8 @@ cdef class SNES(Object): """ cdef KSP ksp = KSP() - CHKERR( SNESGetKSP(self.snes, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(SNESGetKSP(self.snes, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp def setUseEW(self, flag: bool = True, *targs: Any, **kargs: Any) -> None: @@ -1799,13 +1813,13 @@ cdef class SNES(Object): """ cdef PetscBool bval = flag - CHKERR( SNESKSPSetUseEW(self.snes, bval) ) + CHKERR(SNESKSPSetUseEW(self.snes, bval)) if targs or kargs: self.setParamsEW(*targs, **kargs) def getUseEW(self) -> bool: """Return the flag indicating if the solver uses the Eisenstat-Walker trick. - Not Collective. + Not collective. See Also -------- @@ -1813,7 +1827,7 @@ cdef class SNES(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( SNESKSPGetUseEW(self.snes, &flag) ) + CHKERR(SNESKSPGetUseEW(self.snes, &flag)) return toBool(flag) def setParamsEW(self, @@ -1864,9 +1878,9 @@ cdef class SNES(Object): if alpha is not None: calpha = asReal(alpha) if alpha2 is not None: calpha2 = asReal(alpha2) if threshold is not None: cthreshold = asReal(threshold) - CHKERR( SNESKSPSetParametersEW( + CHKERR(SNESKSPSetParametersEW( self.snes, cversion, crtol_0, crtol_max, - cgamma, calpha, calpha2, cthreshold) ) + cgamma, calpha, calpha2, cthreshold)) def getParamsEW(self) -> dict[str, int | float]: """Get the parameters of the Eisenstat and Walker trick. @@ -1882,16 +1896,16 @@ cdef class SNES(Object): cdef PetscReal rtol_0=0, rtol_max=0 cdef PetscReal gamma=0, alpha=0, alpha2=0 cdef PetscReal threshold=0 - CHKERR( SNESKSPGetParametersEW( + CHKERR(SNESKSPGetParametersEW( self.snes, &version, &rtol_0, &rtol_max, - &gamma, &alpha, &alpha2, &threshold) ) + &gamma, &alpha, &alpha2, &threshold)) return {'version' : toInt(version), 'rtol_0' : toReal(rtol_0), 'rtol_max' : toReal(rtol_max), 'gamma' : toReal(gamma), 'alpha' : toReal(alpha), 'alpha2' : toReal(alpha2), - 'threshold' : toReal(threshold),} + 'threshold' : toReal(threshold), } # --- matrix-free / finite differences --- @@ -1906,7 +1920,7 @@ cdef class SNES(Object): """ cdef PetscBool bval = flag - CHKERR( SNESSetUseMFFD(self.snes, bval) ) + CHKERR(SNESSetUseMFFD(self.snes, bval)) def getUseMF(self) -> bool: """Return the flag indicating if the solver uses matrix-free finite-differencing. @@ -1919,7 +1933,7 @@ cdef class SNES(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( SNESGetUseMFFD(self.snes, &flag) ) + CHKERR(SNESGetUseMFFD(self.snes, &flag)) return toBool(flag) def setUseFD(self, flag=True) -> None: @@ -1933,7 +1947,7 @@ cdef class SNES(Object): """ cdef PetscBool bval = flag - CHKERR( SNESSetUseFDColoring(self.snes, bval) ) + CHKERR(SNESSetUseFDColoring(self.snes, bval)) def getUseFD(self) -> False: """Return ``true`` if the solver uses color finite-differencing for the Jacobian. @@ -1946,7 +1960,7 @@ cdef class SNES(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( SNESGetUseFDColoring(self.snes, &flag) ) + CHKERR(SNESGetUseFDColoring(self.snes, &flag)) return toBool(flag) # --- VI --- @@ -1961,7 +1975,7 @@ cdef class SNES(Object): petsc.SNESVISetVariableBounds """ - CHKERR( SNESVISetVariableBounds(self.snes, xl.vec, xu.vec) ) + CHKERR(SNESVISetVariableBounds(self.snes, xl.vec, xu.vec)) def getVIInactiveSet(self) -> IS: """Return the index set for the inactive set. @@ -1974,8 +1988,8 @@ cdef class SNES(Object): """ cdef IS inact = IS() - CHKERR( SNESVIGetInactiveSet(self.snes, &inact.iset) ) - CHKERR( PetscINCREF(inact.obj) ) + CHKERR(SNESVIGetInactiveSet(self.snes, &inact.iset)) + CHKERR(PetscINCREF(inact.obj)) return inact # --- Python --- @@ -1999,10 +2013,10 @@ cdef class SNES(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscSNES newsnes = NULL - CHKERR( SNESCreate(ccomm, &newsnes) ) - CHKERR( PetscCLEAR(self.obj) ); self.snes = newsnes - CHKERR( SNESSetType(self.snes, SNESPYTHON) ) - CHKERR( SNESPythonSetContext(self.snes, context) ) + CHKERR(SNESCreate(ccomm, &newsnes)) + CHKERR(PetscCLEAR(self.obj)); self.snes = newsnes + CHKERR(SNESSetType(self.snes, SNESPYTHON)) + CHKERR(SNESPythonSetContext(self.snes, context)) return self def setPythonContext(self, context: Any) -> None: @@ -2015,7 +2029,7 @@ cdef class SNES(Object): petsc_python_snes, getPythonContext """ - CHKERR( SNESPythonSetContext(self.snes, context) ) + CHKERR(SNESPythonSetContext(self.snes, context)) def getPythonContext(self) -> Any: """Return the instance of the class implementing the required Python methods. @@ -2028,7 +2042,7 @@ cdef class SNES(Object): """ cdef void *context = NULL - CHKERR( SNESPythonGetContext(self.snes, &context) ) + CHKERR(SNESPythonGetContext(self.snes, &context)) if context == NULL: return None else: return context @@ -2045,7 +2059,7 @@ cdef class SNES(Object): """ cdef const char *cval = NULL py_type = str2bytes(py_type, &cval) - CHKERR( SNESPythonSetType(self.snes, cval) ) + CHKERR(SNESPythonSetType(self.snes, cval)) def getPythonType(self) -> str: """Return the fully qualified Python name of the class used by the solver. @@ -2059,7 +2073,7 @@ cdef class SNES(Object): """ cdef const char *cval = NULL - CHKERR( SNESPythonGetType(self.snes, &cval) ) + CHKERR(SNESPythonGetType(self.snes, &cval)) return bytes2str(cval) # --- Composite --- @@ -2077,8 +2091,8 @@ cdef class SNES(Object): cdef PetscInt cn cdef SNES snes = SNES() cn = asInt(n) - CHKERR( SNESCompositeGetSNES(self.snes, cn, &snes.snes) ) - CHKERR( PetscINCREF(snes.obj) ) + CHKERR(SNESCompositeGetSNES(self.snes, cn, &snes.snes)) + CHKERR(PetscINCREF(snes.obj)) return snes def getCompositeNumber(self) -> int: @@ -2092,7 +2106,7 @@ cdef class SNES(Object): """ cdef PetscInt cn = 0 - CHKERR( SNESCompositeGetNumber(self.snes, &cn) ) + CHKERR(SNESCompositeGetNumber(self.snes, &cn)) return toInt(cn) # --- NASM --- @@ -2109,8 +2123,8 @@ cdef class SNES(Object): """ cdef PetscInt cn = asInt(n) cdef SNES snes = SNES() - CHKERR( SNESNASMGetSNES(self.snes, cn, &snes.snes) ) - CHKERR( PetscINCREF(snes.obj) ) + CHKERR(SNESNASMGetSNES(self.snes, cn, &snes.snes)) + CHKERR(PetscINCREF(snes.obj)) return snes def getNASMNumber(self) -> int: @@ -2124,14 +2138,14 @@ cdef class SNES(Object): """ cdef PetscInt cn = 0 - CHKERR( SNESNASMGetNumber(self.snes, &cn) ) + CHKERR(SNESNASMGetNumber(self.snes, &cn)) return toInt(cn) # --- Patch --- def setPatchCellNumbering(self, Section sec) -> None: """Set cell patch numbering.""" - CHKERR( SNESPatchSetCellNumbering(self.snes, sec.sec) ) + CHKERR(SNESPatchSetCellNumbering(self.snes, sec.sec)) def setPatchDiscretisationInfo(self, dms, bs, cellNodeMaps, @@ -2155,9 +2169,9 @@ cdef class SNES(Object): globalBcNodes = iarray_i(globalBcNodes, &numGlobalBcs, &cglobalBcNodes) subspaceOffsets = iarray_i(subspaceOffsets, NULL, &csubspaceOffsets) - CHKERR( PetscMalloc(numSubSpaces*sizeof(PetscInt), &nodesPerCell) ) - CHKERR( PetscMalloc(numSubSpaces*sizeof(PetscDM), &cdms) ) - CHKERR( PetscMalloc(numSubSpaces*sizeof(PetscInt*), &ccellNodeMaps) ) + CHKERR(PetscMalloc(numSubSpaces*sizeof(PetscInt), &nodesPerCell)) + CHKERR(PetscMalloc(numSubSpaces*sizeof(PetscDM), &cdms)) + CHKERR(PetscMalloc(numSubSpaces*sizeof(PetscInt*), &ccellNodeMaps)) for i in range(numSubSpaces): cdms[i] = (dms[i]).dm _, nodes = asarray(cellNodeMaps[i]).shape @@ -2165,14 +2179,14 @@ cdef class SNES(Object): nodesPerCell[i] = asInt(nodes) # TODO: refactor on the PETSc side to take ISes? - CHKERR( SNESPatchSetDiscretisationInfo(self.snes, numSubSpaces, - cdms, cbs, nodesPerCell, - ccellNodeMaps, csubspaceOffsets, - numGhostBcs, cghostBcNodes, - numGlobalBcs, cglobalBcNodes) ) - CHKERR( PetscFree(nodesPerCell) ) - CHKERR( PetscFree(cdms) ) - CHKERR( PetscFree(ccellNodeMaps) ) + CHKERR(SNESPatchSetDiscretisationInfo(self.snes, numSubSpaces, + cdms, cbs, nodesPerCell, + ccellNodeMaps, csubspaceOffsets, + numGhostBcs, cghostBcNodes, + numGlobalBcs, cglobalBcNodes)) + CHKERR(PetscFree(nodesPerCell)) + CHKERR(PetscFree(cdms)) + CHKERR(PetscFree(ccellNodeMaps)) def setPatchComputeOperator(self, operator, args=None, kargs=None) -> None: """Set patch compute operator.""" @@ -2180,7 +2194,7 @@ cdef class SNES(Object): if kargs is None: kargs = {} context = (operator, args, kargs) self.set_attr("__patch_compute_operator__", context) - CHKERR( SNESPatchSetComputeOperator(self.snes, PCPatch_ComputeOperator, context) ) + CHKERR(SNESPatchSetComputeOperator(self.snes, PCPatch_ComputeOperator, context)) def setPatchComputeFunction(self, function, args=None, kargs=None) -> None: """Set patch compute function.""" @@ -2188,7 +2202,7 @@ cdef class SNES(Object): if kargs is None: kargs = {} context = (function, args, kargs) self.set_attr("__patch_compute_function__", context) - CHKERR( SNESPatchSetComputeFunction(self.snes, PCPatch_ComputeFunction, context) ) + CHKERR(SNESPatchSetComputeFunction(self.snes, PCPatch_ComputeFunction, context)) def setPatchConstructType(self, typ, operator=None, args=None, kargs=None) -> None: """Set patch construct type.""" @@ -2202,7 +2216,7 @@ cdef class SNES(Object): else: context = None self.set_attr("__patch_construction_operator__", context) - CHKERR( SNESPatchSetConstructType(self.snes, typ, PCPatch_UserConstructOperator, context) ) + CHKERR(SNESPatchSetConstructType(self.snes, typ, PCPatch_UserConstructOperator, context)) # --- application context --- @@ -2210,6 +2224,7 @@ cdef class SNES(Object): """Application context.""" def __get__(self) -> Any: return self.getAppCtx() + def __set__(self, value): self.setAppCtx(value) @@ -2219,6 +2234,7 @@ cdef class SNES(Object): """`DM`.""" def __get__(self) -> DM: return self.getDM() + def __set__(self, value): self.setDM(value) @@ -2228,6 +2244,7 @@ cdef class SNES(Object): """Nonlinear preconditioner.""" def __get__(self) -> SNES: return self.getNPC() + def __set__(self, value): self.setNPC(value) @@ -2254,12 +2271,15 @@ cdef class SNES(Object): """Linear solver.""" def __get__(self) -> KSP: return self.getKSP() + def __set__(self, value): self.setKSP(value) property use_ew: - def __get__(self): + """Use the Eisenstat-Walker trick.""" + def __get__(self) -> bool: return self.getUseEW() + def __set__(self, value): self.setUseEW(value) @@ -2269,6 +2289,7 @@ cdef class SNES(Object): """Relative residual tolerance.""" def __get__(self) -> float: return self.getTolerances()[0] + def __set__(self, value): self.setTolerances(rtol=value) @@ -2276,6 +2297,7 @@ cdef class SNES(Object): """Absolute residual tolerance.""" def __get__(self) -> float: return self.getTolerances()[1] + def __set__(self, value): self.setTolerances(atol=value) @@ -2283,6 +2305,7 @@ cdef class SNES(Object): """Solution update tolerance.""" def __get__(self) -> float: return self.getTolerances()[2] + def __set__(self, value): self.setTolerances(stol=value) @@ -2290,6 +2313,7 @@ cdef class SNES(Object): """Maximum number of iterations.""" def __get__(self) -> int: return self.getTolerances()[3] + def __set__(self, value): self.setTolerances(max_it=value) @@ -2299,6 +2323,7 @@ cdef class SNES(Object): """Maximum number of function evaluations.""" def __get__(self) -> int: return self.getMaxFunctionEvaluations() + def __set__(self, value): self.setMaxFunctionEvaluations(value) @@ -2308,6 +2333,7 @@ cdef class SNES(Object): """Number of iterations.""" def __get__(self) -> int: return self.getIterationNumber() + def __set__(self, value): self.setIterationNumber(value) @@ -2315,6 +2341,7 @@ cdef class SNES(Object): """Function norm.""" def __get__(self) -> float: return self.getFunctionNorm() + def __set__(self, value): self.setFunctionNorm(value) @@ -2329,6 +2356,7 @@ cdef class SNES(Object): """Converged reason.""" def __get__(self) -> ConvergedReason: return self.getConvergedReason() + def __set__(self, value): self.setConvergedReason(value) @@ -2353,6 +2381,7 @@ cdef class SNES(Object): """Boolean indicating if the solver uses matrix-free finite-differencing.""" def __get__(self) -> bool: return self.getUseMF() + def __set__(self, value): self.setUseMF(value) @@ -2360,6 +2389,7 @@ cdef class SNES(Object): """Boolean indicating if the solver uses coloring finite-differencing.""" def __get__(self) -> bool: return self.getUseFD() + def __set__(self, value): self.setUseFD(value) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Scatter.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Scatter.pyx index 939e4846129..81d1444f4ae 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Scatter.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Scatter.pyx @@ -19,6 +19,7 @@ class ScatterType(object): # -------------------------------------------------------------------- + cdef class Scatter(Object): """Scatter object. @@ -31,7 +32,6 @@ cdef class Scatter(Object): """ - Type = ScatterType Mode = ScatterMode @@ -72,7 +72,7 @@ cdef class Scatter(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( VecScatterView(self.sct, vwr) ) + CHKERR(VecScatterView(self.sct, vwr)) def destroy(self) -> Self: """Destroy the scatter. @@ -84,7 +84,7 @@ cdef class Scatter(Object): petsc.VecScatterDestroy """ - CHKERR( VecScatterDestroy(&self.sct) ) + CHKERR(VecScatterDestroy(&self.sct)) return self def create( @@ -92,8 +92,7 @@ cdef class Scatter(Object): Vec vec_from, IS is_from or None, Vec vec_to, - IS is_to or None, - ) -> Self: + IS is_to or None) -> Self: """Create a scatter object. Collective. @@ -139,9 +138,9 @@ cdef class Scatter(Object): if is_from is not None: cisfrom = is_from.iset if is_to is not None: cisto = is_to.iset cdef PetscScatter newsct = NULL - CHKERR( VecScatterCreate( - vec_from.vec, cisfrom, vec_to.vec, cisto, &newsct) ) - CHKERR( PetscCLEAR(self.obj) ); self.sct = newsct + CHKERR(VecScatterCreate( + vec_from.vec, cisfrom, vec_to.vec, cisto, &newsct)) + CHKERR(PetscCLEAR(self.obj)); self.sct = newsct return self def setType(self, scatter_type: Type | str) -> None: @@ -155,8 +154,8 @@ cdef class Scatter(Object): """ cdef PetscScatterType cval = NULL - vec_type = str2bytes(scatter_type, &cval) - CHKERR( VecScatterSetType(self.sct, cval) ) + scatter_type = str2bytes(scatter_type, &cval) + CHKERR(VecScatterSetType(self.sct, cval)) def getType(self) -> str: """Return the type of the scatter. @@ -169,7 +168,7 @@ cdef class Scatter(Object): """ cdef PetscScatterType cval = NULL - CHKERR( VecScatterGetType(self.sct, &cval) ) + CHKERR(VecScatterGetType(self.sct, &cval)) return bytes2str(cval) def setFromOptions(self) -> None: @@ -182,7 +181,7 @@ cdef class Scatter(Object): petsc_options, petsc.VecScatterSetFromOptions """ - CHKERR( VecScatterSetFromOptions(self.sct) ) + CHKERR(VecScatterSetFromOptions(self.sct)) def setUp(self) -> Self: """Set up the internal data structures for using the scatter. @@ -194,13 +193,13 @@ cdef class Scatter(Object): petsc.VecScatterSetUp """ - CHKERR( VecScatterSetUp(self.sct) ) + CHKERR(VecScatterSetUp(self.sct)) return self def copy(self) -> Scatter: """Return a copy of the scatter.""" cdef Scatter scatter = Scatter() - CHKERR( VecScatterCopy(self.sct, &scatter.sct) ) + CHKERR(VecScatterCopy(self.sct, &scatter.sct)) return scatter @classmethod @@ -227,8 +226,8 @@ cdef class Scatter(Object): """ cdef Scatter scatter = Scatter() cdef Vec ovec = Vec() - CHKERR( VecScatterCreateToAll( - vec.vec, &scatter.sct, &ovec.vec) ) + CHKERR(VecScatterCreateToAll( + vec.vec, &scatter.sct, &ovec.vec)) return (scatter, ovec) @classmethod @@ -255,8 +254,8 @@ cdef class Scatter(Object): """ cdef Scatter scatter = Scatter() cdef Vec ovec = Vec() - CHKERR( VecScatterCreateToZero( - vec.vec, &scatter.sct, &ovec.vec) ) + CHKERR(VecScatterCreateToZero( + vec.vec, &scatter.sct, &ovec.vec)) return (scatter, ovec) # @@ -265,8 +264,7 @@ cdef class Scatter(Object): Vec vec_from, Vec vec_to, addv: InsertModeSpec = None, - mode: ScatterModeSpec = None, - ) -> None: + mode: ScatterModeSpec = None) -> None: """Begin a generalized scatter from one vector into another. Collective. @@ -281,16 +279,15 @@ cdef class Scatter(Object): """ cdef PetscInsertMode caddv = insertmode(addv) cdef PetscScatterMode csctm = scattermode(mode) - CHKERR( VecScatterBegin(self.sct, vec_from.vec, vec_to.vec, - caddv, csctm) ) + CHKERR(VecScatterBegin(self.sct, vec_from.vec, vec_to.vec, + caddv, csctm)) def end( self, Vec vec_from, Vec vec_to, addv: InsertModeSpec = None, - mode: ScatterModeSpec = None, - ) -> None: + mode: ScatterModeSpec = None) -> None: """Complete a generalized scatter from one vector into another. Collective. @@ -305,16 +302,15 @@ cdef class Scatter(Object): """ cdef PetscInsertMode caddv = insertmode(addv) cdef PetscScatterMode csctm = scattermode(mode) - CHKERR( VecScatterEnd(self.sct, vec_from.vec, vec_to.vec, - caddv, csctm) ) + CHKERR(VecScatterEnd(self.sct, vec_from.vec, vec_to.vec, + caddv, csctm)) def scatter( self, Vec vec_from, Vec vec_to, addv: InsertModeSpec = None, - mode: ScatterModeSpec = None, - ) -> None: + mode: ScatterModeSpec = None) -> None: """Perform a generalized scatter from one vector into another. Collective. @@ -337,10 +333,10 @@ cdef class Scatter(Object): """ cdef PetscInsertMode caddv = insertmode(addv) cdef PetscScatterMode csctm = scattermode(mode) - CHKERR( VecScatterBegin(self.sct, vec_from.vec, vec_to.vec, - caddv, csctm) ) - CHKERR( VecScatterEnd(self.sct, vec_from.vec, vec_to.vec, - caddv, csctm) ) + CHKERR(VecScatterBegin(self.sct, vec_from.vec, vec_to.vec, + caddv, csctm)) + CHKERR(VecScatterEnd(self.sct, vec_from.vec, vec_to.vec, + caddv, csctm)) scatterBegin = begin scatterEnd = end diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Section.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Section.pyx index 6de3460115b..755657a03d5 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Section.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Section.pyx @@ -7,10 +7,6 @@ cdef class Section(Object): self.obj = &self.sec self.sec = NULL - def __dealloc__(self): - CHKERR( PetscSectionDestroy(&self.sec) ) - self.sec = NULL - def view(self, Viewer viewer=None) -> None: """View the section. @@ -28,7 +24,7 @@ cdef class Section(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscSectionView(self.sec, vwr) ) + CHKERR(PetscSectionView(self.sec, vwr)) def destroy(self) -> Self: """Destroy a section. @@ -40,7 +36,7 @@ cdef class Section(Object): petsc.PetscSectionDestroy """ - CHKERR( PetscSectionDestroy(&self.sec) ) + CHKERR(PetscSectionDestroy(&self.sec)) return self def create(self, comm: Comm | None = None) -> Self: @@ -77,8 +73,8 @@ cdef class Section(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscSection newsec = NULL - CHKERR( PetscSectionCreate(ccomm, &newsec) ) - CHKERR( PetscCLEAR(self.obj) ); self.sec = newsec + CHKERR(PetscSectionCreate(ccomm, &newsec)) + CHKERR(PetscCLEAR(self.obj)); self.sec = newsec return self def clone(self) -> Section: @@ -94,7 +90,7 @@ cdef class Section(Object): """ cdef Section sec =
type(self)() - CHKERR( PetscSectionClone(self.sec, &sec.sec) ) + CHKERR(PetscSectionClone(self.sec, &sec.sec)) return sec def setUp(self) -> None: @@ -109,7 +105,7 @@ cdef class Section(Object): petsc.PetscSectionSetUp """ - CHKERR( PetscSectionSetUp(self.sec) ) + CHKERR(PetscSectionSetUp(self.sec)) def reset(self) -> None: """Free all section data. @@ -121,7 +117,7 @@ cdef class Section(Object): petsc.PetscSectionReset """ - CHKERR( PetscSectionReset(self.sec) ) + CHKERR(PetscSectionReset(self.sec)) def getNumFields(self) -> int: """Return the number of fields in a section. @@ -136,7 +132,7 @@ cdef class Section(Object): """ cdef PetscInt numFields = 0 - CHKERR( PetscSectionGetNumFields(self.sec, &numFields) ) + CHKERR(PetscSectionGetNumFields(self.sec, &numFields)) return toInt(numFields) def setNumFields(self, numFields: int) -> None: @@ -155,7 +151,7 @@ cdef class Section(Object): """ cdef PetscInt cnumFields = asInt(numFields) - CHKERR( PetscSectionSetNumFields(self.sec, cnumFields) ) + CHKERR(PetscSectionSetNumFields(self.sec, cnumFields)) def getFieldName(self, field: int) -> str: """Return the name of a field in the section. @@ -174,7 +170,7 @@ cdef class Section(Object): """ cdef PetscInt cfield = asInt(field) cdef const char *fieldName = NULL - CHKERR( PetscSectionGetFieldName(self.sec,cfield,&fieldName) ) + CHKERR(PetscSectionGetFieldName(self.sec, cfield, &fieldName)) return bytes2str(fieldName) def setFieldName(self, field: int, fieldName: str) -> None: @@ -197,7 +193,7 @@ cdef class Section(Object): cdef PetscInt cfield = asInt(field) cdef const char *cname = NULL fieldName = str2bytes(fieldName, &cname) - CHKERR( PetscSectionSetFieldName(self.sec,cfield,cname) ) + CHKERR(PetscSectionSetFieldName(self.sec, cfield, cname)) def getFieldComponents(self, field: int) -> int: """Return the number of field components for the given field. @@ -215,7 +211,7 @@ cdef class Section(Object): """ cdef PetscInt cfield = asInt(field), cnumComp = 0 - CHKERR( PetscSectionGetFieldComponents(self.sec,cfield,&cnumComp) ) + CHKERR(PetscSectionGetFieldComponents(self.sec, cfield, &cnumComp)) return toInt(cnumComp) def setFieldComponents(self, field: int, numComp: int) -> None: @@ -237,7 +233,7 @@ cdef class Section(Object): """ cdef PetscInt cfield = asInt(field) cdef PetscInt cnumComp = asInt(numComp) - CHKERR( PetscSectionSetFieldComponents(self.sec,cfield,cnumComp) ) + CHKERR(PetscSectionSetFieldComponents(self.sec, cfield, cnumComp)) def getChart(self) -> tuple[int, int]: """Return the range in which points (indices) lie for this section. @@ -253,7 +249,7 @@ cdef class Section(Object): """ cdef PetscInt pStart = 0, pEnd = 0 - CHKERR( PetscSectionGetChart(self.sec, &pStart, &pEnd) ) + CHKERR(PetscSectionGetChart(self.sec, &pStart, &pEnd)) return toInt(pStart), toInt(pEnd) def setChart(self, pStart: int, pEnd: int) -> None: @@ -278,7 +274,7 @@ cdef class Section(Object): """ cdef PetscInt cStart = asInt(pStart) cdef PetscInt cEnd = asInt(pEnd) - CHKERR( PetscSectionSetChart(self.sec, cStart, cEnd) ) + CHKERR(PetscSectionSetChart(self.sec, cStart, cEnd)) def getPermutation(self) -> IS: """Return the permutation that was set with `setPermutation`. @@ -291,8 +287,8 @@ cdef class Section(Object): """ cdef IS perm = IS() - CHKERR( PetscSectionGetPermutation(self.sec, &perm.iset)) - CHKERR( PetscINCREF(perm.obj) ) + CHKERR(PetscSectionGetPermutation(self.sec, &perm.iset)) + CHKERR(PetscINCREF(perm.obj)) return perm def setPermutation(self, IS perm) -> None: @@ -310,7 +306,7 @@ cdef class Section(Object): getPermutation, petsc.PetscSectionSetPermutation """ - CHKERR( PetscSectionSetPermutation(self.sec, perm.iset)) + CHKERR(PetscSectionSetPermutation(self.sec, perm.iset)) def getDof(self, point: int) -> int: """Return the number of degrees of freedom for a given point. @@ -331,7 +327,7 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point), cnumDof = 0 - CHKERR( PetscSectionGetDof(self.sec,cpoint,&cnumDof) ) + CHKERR(PetscSectionGetDof(self.sec, cpoint, &cnumDof)) return toInt(cnumDof) def setDof(self, point: int, numDof: int) -> None: @@ -353,7 +349,7 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt cnumDof = asInt(numDof) - CHKERR( PetscSectionSetDof(self.sec,cpoint,cnumDof) ) + CHKERR(PetscSectionSetDof(self.sec, cpoint, cnumDof)) def addDof(self, point: int, numDof: int) -> None: """Add ``numDof`` degrees of freedom associated with a given point. @@ -374,7 +370,7 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt cnumDof = asInt(numDof) - CHKERR( PetscSectionAddDof(self.sec,cpoint,cnumDof) ) + CHKERR(PetscSectionAddDof(self.sec, cpoint, cnumDof)) def getFieldDof(self, point: int, field: int) -> int: """Return the number of DOFs associated with a field on a given point. @@ -395,7 +391,7 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point), cnumDof = 0 cdef PetscInt cfield = asInt(field) - CHKERR( PetscSectionGetFieldDof(self.sec,cpoint,cfield,&cnumDof) ) + CHKERR(PetscSectionGetFieldDof(self.sec, cpoint, cfield, &cnumDof)) return toInt(cnumDof) def setFieldDof(self, point: int, field: int, numDof: int) -> None: @@ -420,7 +416,7 @@ cdef class Section(Object): cdef PetscInt cpoint = asInt(point) cdef PetscInt cfield = asInt(field) cdef PetscInt cnumDof = asInt(numDof) - CHKERR( PetscSectionSetFieldDof(self.sec,cpoint,cfield,cnumDof) ) + CHKERR(PetscSectionSetFieldDof(self.sec, cpoint, cfield, cnumDof)) def addFieldDof(self, point: int, field: int, numDof: int) -> None: """Add ``numDof`` DOFs associated with a field on a given point. @@ -444,7 +440,7 @@ cdef class Section(Object): cdef PetscInt cpoint = asInt(point) cdef PetscInt cfield = asInt(field) cdef PetscInt cnumDof = asInt(numDof) - CHKERR( PetscSectionAddFieldDof(self.sec,cpoint,cfield,cnumDof) ) + CHKERR(PetscSectionAddFieldDof(self.sec, cpoint, cfield, cnumDof)) def getConstraintDof(self, point: int) -> int: """Return the number of constrained DOFs associated with a given point. @@ -462,7 +458,7 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point), cnumDof = 0 - CHKERR( PetscSectionGetConstraintDof(self.sec,cpoint,&cnumDof) ) + CHKERR(PetscSectionGetConstraintDof(self.sec, cpoint, &cnumDof)) return toInt(cnumDof) def setConstraintDof(self, point: int, numDof: int) -> None: @@ -484,7 +480,7 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt cnumDof = asInt(numDof) - CHKERR( PetscSectionSetConstraintDof(self.sec,cpoint,cnumDof) ) + CHKERR(PetscSectionSetConstraintDof(self.sec, cpoint, cnumDof)) def addConstraintDof(self, point: int, numDof: int) -> None: """Increment the number of constrained DOFs for a given point. @@ -505,7 +501,7 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt cnumDof = asInt(numDof) - CHKERR( PetscSectionAddConstraintDof(self.sec,cpoint,cnumDof) ) + CHKERR(PetscSectionAddConstraintDof(self.sec, cpoint, cnumDof)) def getFieldConstraintDof(self, point: int, field: int) -> int: """Return the number of constrained DOFs for a given field on a point. @@ -526,15 +522,14 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point), cnumDof = 0 cdef PetscInt cfield = asInt(field) - CHKERR( PetscSectionGetFieldConstraintDof(self.sec,cpoint,cfield,&cnumDof) ) + CHKERR(PetscSectionGetFieldConstraintDof(self.sec, cpoint, cfield, &cnumDof)) return toInt(cnumDof) def setFieldConstraintDof( self, point: int, field: int, - numDof: int, - ) -> None: + numDof: int) -> None: """Set the number of constrained DOFs for a given field on a point. Not collective. @@ -557,14 +552,13 @@ cdef class Section(Object): cdef PetscInt cpoint = asInt(point) cdef PetscInt cfield = asInt(field) cdef PetscInt cnumDof = asInt(numDof) - CHKERR( PetscSectionSetFieldConstraintDof(self.sec,cpoint,cfield,cnumDof) ) + CHKERR(PetscSectionSetFieldConstraintDof(self.sec, cpoint, cfield, cnumDof)) def addFieldConstraintDof( self, point: int, field: int, - numDof: int, - ) -> None: + numDof: int) -> None: """Add ``numDof`` constrained DOFs for a given field on a point. Not collective. @@ -587,7 +581,7 @@ cdef class Section(Object): cdef PetscInt cpoint = asInt(point) cdef PetscInt cfield = asInt(field) cdef PetscInt cnumDof = asInt(numDof) - CHKERR( PetscSectionAddFieldConstraintDof(self.sec,cpoint,cfield,cnumDof) ) + CHKERR(PetscSectionAddFieldConstraintDof(self.sec, cpoint, cfield, cnumDof)) def getConstraintIndices(self, point: int) -> ArrayInt: """Return the point DOFs numbers which are constrained for a given point. @@ -609,8 +603,8 @@ cdef class Section(Object): cdef PetscInt cpoint = asInt(point) cdef PetscInt nindex = 0 cdef const PetscInt *indices = NULL - CHKERR( PetscSectionGetConstraintDof(self.sec, cpoint, &nindex) ) - CHKERR( PetscSectionGetConstraintIndices(self.sec, cpoint, &indices) ) + CHKERR(PetscSectionGetConstraintDof(self.sec, cpoint, &nindex)) + CHKERR(PetscSectionGetConstraintIndices(self.sec, cpoint, &indices)) return array_i(nindex, indices) def setConstraintIndices(self, point: int, indices: Sequence[int]) -> None: @@ -634,8 +628,8 @@ cdef class Section(Object): cdef PetscInt nindex = 0 cdef PetscInt *cindices = NULL indices = iarray_i(indices, &nindex, &cindices) - CHKERR( PetscSectionSetConstraintDof(self.sec,cpoint,nindex) ) - CHKERR( PetscSectionSetConstraintIndices(self.sec,cpoint,cindices) ) + CHKERR(PetscSectionSetConstraintDof(self.sec, cpoint, nindex)) + CHKERR(PetscSectionSetConstraintIndices(self.sec, cpoint, cindices)) def getFieldConstraintIndices(self, point: int, field: int) -> ArrayInt: """Return the field DOFs numbers, in [0, DOFs), which are constrained. @@ -660,16 +654,15 @@ cdef class Section(Object): cdef PetscInt cfield = asInt(field) cdef PetscInt nindex = 0 cdef const PetscInt *indices = NULL - CHKERR( PetscSectionGetFieldConstraintDof(self.sec,cpoint,cfield,&nindex) ) - CHKERR( PetscSectionGetFieldConstraintIndices(self.sec,cpoint,cfield,&indices) ) + CHKERR(PetscSectionGetFieldConstraintDof(self.sec, cpoint, cfield, &nindex)) + CHKERR(PetscSectionGetFieldConstraintIndices(self.sec, cpoint, cfield, &indices)) return array_i(nindex, indices) def setFieldConstraintIndices( self, point: int, field: int, - indices: Sequence[int], - ) -> None: + indices: Sequence[int]) -> None: """Set the field DOFs numbers, in [0, DOFs), which are constrained. Not collective. @@ -693,8 +686,8 @@ cdef class Section(Object): cdef PetscInt nindex = 0 cdef PetscInt *cindices = NULL indices = iarray_i(indices, &nindex, &cindices) - CHKERR( PetscSectionSetFieldConstraintDof(self.sec,cpoint,cfield,nindex) ) - CHKERR( PetscSectionSetFieldConstraintIndices(self.sec,cpoint,cfield,cindices) ) + CHKERR(PetscSectionSetFieldConstraintDof(self.sec, cpoint, cfield, nindex)) + CHKERR(PetscSectionSetFieldConstraintIndices(self.sec, cpoint, cfield, cindices)) def getMaxDof(self) -> int: """Return the maximum number of DOFs for any point in the section. @@ -707,7 +700,7 @@ cdef class Section(Object): """ cdef PetscInt maxDof = 0 - CHKERR( PetscSectionGetMaxDof(self.sec,&maxDof) ) + CHKERR(PetscSectionGetMaxDof(self.sec, &maxDof)) return toInt(maxDof) def getStorageSize(self) -> int: @@ -721,7 +714,7 @@ cdef class Section(Object): """ cdef PetscInt size = 0 - CHKERR( PetscSectionGetStorageSize(self.sec,&size) ) + CHKERR(PetscSectionGetStorageSize(self.sec, &size)) return toInt(size) def getConstrainedStorageSize(self) -> int: @@ -735,7 +728,7 @@ cdef class Section(Object): """ cdef PetscInt size = 0 - CHKERR( PetscSectionGetConstrainedStorageSize(self.sec,&size) ) + CHKERR(PetscSectionGetConstrainedStorageSize(self.sec, &size)) return toInt(size) def getOffset(self, point: int) -> int: @@ -757,7 +750,7 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point), offset = 0 - CHKERR( PetscSectionGetOffset(self.sec,cpoint,&offset) ) + CHKERR(PetscSectionGetOffset(self.sec, cpoint, &offset)) return toInt(offset) def setOffset(self, point: int, offset: int) -> None: @@ -781,7 +774,7 @@ cdef class Section(Object): """ cdef PetscInt cpoint = asInt(point) cdef PetscInt coffset = asInt(offset) - CHKERR( PetscSectionSetOffset(self.sec,cpoint,coffset) ) + CHKERR(PetscSectionSetOffset(self.sec, cpoint, coffset)) def getFieldOffset(self, point: int, field: int) -> int: """Return the offset for the field DOFs on the given point. @@ -806,7 +799,7 @@ cdef class Section(Object): cdef PetscInt cpoint = asInt(point) cdef PetscInt cfield = asInt(field) cdef PetscInt offset = 0 - CHKERR( PetscSectionGetFieldOffset(self.sec,cpoint,cfield,&offset) ) + CHKERR(PetscSectionGetFieldOffset(self.sec, cpoint, cfield, &offset)) return toInt(offset) def setFieldOffset(self, point: int, field: int, offset: int) -> None: @@ -833,9 +826,9 @@ cdef class Section(Object): cdef PetscInt cpoint = asInt(point) cdef PetscInt cfield = asInt(field) cdef PetscInt coffset = asInt(offset) - CHKERR( PetscSectionSetFieldOffset(self.sec,cpoint,cfield,coffset) ) + CHKERR(PetscSectionSetFieldOffset(self.sec, cpoint, cfield, coffset)) - def getOffsetRange(self) -> tuple[int,int]: + def getOffsetRange(self) -> tuple[int, int]: """Return the full range of offsets, [start, end), for a section. Not collective. @@ -846,8 +839,8 @@ cdef class Section(Object): """ cdef PetscInt oStart = 0, oEnd = 0 - CHKERR( PetscSectionGetOffsetRange(self.sec,&oStart,&oEnd) ) - return toInt(oStart),toInt(oEnd) + CHKERR(PetscSectionGetOffsetRange(self.sec, &oStart, &oEnd)) + return toInt(oStart), toInt(oEnd) # FIXME: Hardcoded PETSC_FALSE parameters def createGlobalSection(self, SF sf) -> Section: @@ -881,5 +874,5 @@ cdef class Section(Object): """ cdef Section gsec = Section() - CHKERR( PetscSectionCreateGlobalSection(self.sec,sf.sf,PETSC_TRUE,PETSC_FALSE,PETSC_FALSE,&gsec.sec) ) + CHKERR(PetscSectionCreateGlobalSection(self.sec, sf.sf, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &gsec.sec)) return gsec diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Space.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Space.pyx index 501cfc36e68..a909bff90bb 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Space.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Space.pyx @@ -1,6 +1,7 @@ # -------------------------------------------------------------------- class SpaceType(object): + """The function space types.""" POLYNOMIAL = S_(PETSCSPACEPOLYNOMIAL) PTRIMMED = S_(PETSCSPACEPTRIMMED) TENSOR = S_(PETSCSPACETENSOR) @@ -11,8 +12,9 @@ class SpaceType(object): # -------------------------------------------------------------------- + cdef class Space(Object): - """Linear space object.""" + """Function space object.""" Type = SpaceType def __cinit__(self): @@ -29,7 +31,7 @@ cdef class Space(Object): petsc.PetscSpaceSetUp """ - CHKERR( PetscSpaceSetUp(self.space) ) + CHKERR(PetscSpaceSetUp(self.space)) def create(self, comm: Comm | None = None) -> Self: """Create an empty `Space` object. @@ -50,8 +52,8 @@ cdef class Space(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscSpace newsp = NULL - CHKERR( PetscSpaceCreate(ccomm, &newsp) ) - CHKERR( PetscCLEAR(self.obj) ); self.space = newsp + CHKERR(PetscSpaceCreate(ccomm, &newsp)) + CHKERR(PetscCLEAR(self.obj)); self.space = newsp return self def destroy(self) -> Self: @@ -64,7 +66,7 @@ cdef class Space(Object): petsc.PetscSpaceDestroy """ - CHKERR( PetscSpaceDestroy(&self.space) ) + CHKERR(PetscSpaceDestroy(&self.space)) return self def view(self, Viewer viewer=None) -> None: @@ -84,7 +86,7 @@ cdef class Space(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscSpaceView(self.space, vwr) ) + CHKERR(PetscSpaceView(self.space, vwr)) def setFromOptions(self) -> None: """Set parameters in `Space` from the options database. @@ -96,23 +98,27 @@ cdef class Space(Object): petsc_options, petsc.PetscSpaceSetFromOptions """ - CHKERR( PetscSpaceSetFromOptions(self.space) ) + CHKERR(PetscSpaceSetFromOptions(self.space)) def getDimension(self) -> int: """Return the number of basis vectors. + Not collective. + See Also -------- petsc.PetscSpaceGetDimension """ - cdef PetscInt cdim - CHKERR( PetscSpaceGetDimension(self.space, &cdim)) + cdef PetscInt cdim = 0 + CHKERR(PetscSpaceGetDimension(self.space, &cdim)) return toInt(cdim) def getDegree(self) -> tuple[int, int]: """Return the polynomial degrees that characterize this space. + Not collective. + Returns ------- minDegree : int @@ -125,13 +131,15 @@ cdef class Space(Object): setDegree, petsc.PetscSpaceGetDegree """ - cdef PetscInt cdegmax, cdegmin - CHKERR( PetscSpaceGetDegree(self.space, &cdegmin, &cdegmax)) + cdef PetscInt cdegmax = 0, cdegmin = 0 + CHKERR(PetscSpaceGetDegree(self.space, &cdegmin, &cdegmax)) return toInt(cdegmin), toInt(cdegmax) def setDegree(self, degree: int | None, maxDegree: int | None) -> None: """Set the degree of approximation for this space. + Logically collective. + One of ``degree`` and ``maxDegree`` can be `None`. Parameters @@ -146,28 +154,31 @@ cdef class Space(Object): getDegree, petsc.PetscSpaceSetDegree """ - assert( (degree != None) & (maxDegree != None)) cdef PetscInt cdegree = PETSC_DETERMINE if degree is not None: cdegree = asInt(degree) cdef PetscInt cmaxdegree = PETSC_DETERMINE if maxDegree is not None: cmaxdegree = asInt(maxDegree) - CHKERR( PetscSpaceSetDegree(self.space, cdegree, cmaxdegree) ) + CHKERR(PetscSpaceSetDegree(self.space, cdegree, cmaxdegree)) def getNumVariables(self) -> int: """Return the number of variables for this space. + Not collective. + See Also -------- setNumVariables, petsc.PetscSpaceGetNumVariables """ - cdef PetscInt cnvars - CHKERR( PetscSpaceGetNumVariables(self.space, &cnvars)) + cdef PetscInt cnvars = 0 + CHKERR(PetscSpaceGetNumVariables(self.space, &cnvars)) return toInt(cnvars) def setNumVariables(self, n: int) -> None: """Set the number of variables for this space. + Logically collective. + Parameters ---------- n @@ -179,23 +190,27 @@ cdef class Space(Object): """ cdef PetscInt cn = asInt(n) - CHKERR( PetscSpaceSetNumVariables(self.space, cn) ) + CHKERR(PetscSpaceSetNumVariables(self.space, cn)) def getNumComponents(self) -> int: """Return the number of components for this space. + Not collective. + See Also -------- setNumComponents, petsc.PetscSpaceGetNumComponents """ - cdef PetscInt cncomps - CHKERR( PetscSpaceGetNumComponents(self.space, &cncomps)) + cdef PetscInt cncomps = 0 + CHKERR(PetscSpaceGetNumComponents(self.space, &cncomps)) return toInt(cncomps) def setNumComponents(self, nc: int) -> None: """Set the number of components for this space. + Logically collective. + Parameters ---------- nc @@ -207,18 +222,7 @@ cdef class Space(Object): """ cdef PetscInt cnc = asInt(nc) - CHKERR( PetscSpaceSetNumComponents(self.space, cnc) ) - - #def evaluate(self, points): - # cdef PetscInt cnpoints = 0, cdim=0, cnfuncs=0 - # cdef PetscReal *cpoints = NULL - # cdef PetscReal *B = NULL, *D = NULL, *H = NULL - # points = iarray_r(points, &cnpoints, &cpoints) - # # Get the dimension of the space - # CHKERR( PetscSpaceGetDimension( self.space, &cnfuncs) ) - # CHKERR( PetscSpace) - # CHKERR( PetscSpaceEvaluate(self.space, cnpoints, &cpoints, &B, &D, &H) ) - # return array_r(cnpoints*cdim, B), array_r(cnpoints*cnc, D), array_r(, H) + CHKERR(PetscSpaceSetNumComponents(self.space, cnc)) def getType(self) -> str: """Return the type of the space object. @@ -231,7 +235,7 @@ cdef class Space(Object): """ cdef PetscSpaceType cval = NULL - CHKERR( PetscSpaceGetType(self.space, &cval) ) + CHKERR(PetscSpaceGetType(self.space, &cval)) return bytes2str(cval) def setType(self, space_type: Type | str) -> Self: @@ -251,12 +255,14 @@ cdef class Space(Object): """ cdef PetscSpaceType cval = NULL space_type = str2bytes(space_type, &cval) - CHKERR( PetscSpaceSetType(self.space, cval) ) + CHKERR(PetscSpaceSetType(self.space, cval)) return self def getSumConcatenate(self) -> bool: """Return the concatenate flag for this space. + Not collective. + A concatenated sum space will have the number of components equal to the sum of the number of components of all subspaces. A non-concatenated, or direct sum space will have the same number of @@ -267,13 +273,15 @@ cdef class Space(Object): setSumConcatenate, petsc.PetscSpaceSumGetConcatenate """ - cdef PetscBool concatenate - CHKERR( PetscSpaceSumGetConcatenate(self.space, &concatenate)) + cdef PetscBool concatenate = PETSC_FALSE + CHKERR(PetscSpaceSumGetConcatenate(self.space, &concatenate)) return toBool(concatenate) def setSumConcatenate(self, concatenate: bool) -> None: """Set the concatenate flag for this space. + Logically collective. + A concatenated sum space will have the number of components equal to the sum of the number of components of all subspaces. A non-concatenated, or direct sum space will have the same number of @@ -291,23 +299,27 @@ cdef class Space(Object): """ cdef PetscBool cconcatenate = asBool(concatenate) - CHKERR( PetscSpaceSumSetConcatenate(self.space, concatenate)) + CHKERR(PetscSpaceSumSetConcatenate(self.space, cconcatenate)) def getSumNumSubspaces(self) -> int: """Return the number of spaces in the sum. + Not collective. + See Also -------- setSumNumSubspaces, petsc.PetscSpaceSumGetNumSubspaces """ - cdef PetscInt numSumSpaces - CHKERR( PetscSpaceSumGetNumSubspaces(self.space, &numSumSpaces)) + cdef PetscInt numSumSpaces = 0 + CHKERR(PetscSpaceSumGetNumSubspaces(self.space, &numSumSpaces)) return toInt(numSumSpaces) def getSumSubspace(self, s: int) -> Space: """Return a space in the sum. + Not collective. + Parameters ---------- s @@ -320,12 +332,14 @@ cdef class Space(Object): """ cdef Space subsp = Space() cdef PetscInt cs = asInt(s) - CHKERR( PetscSpaceSumGetSubspace(self.space, s, &subsp.space) ) + CHKERR(PetscSpaceSumGetSubspace(self.space, cs, &subsp.space)) return subsp def setSumSubspace(self, s: int, Space subsp) -> None: """Set a space in the sum. + Logically collective. + Parameters ---------- s @@ -339,11 +353,13 @@ cdef class Space(Object): """ cdef PetscInt cs = asInt(s) - CHKERR( PetscSpaceSumSetSubspace(self.space, cs, subsp.space) ) + CHKERR(PetscSpaceSumSetSubspace(self.space, cs, subsp.space)) def setSumNumSubspaces(self, numSumSpaces: int) -> None: """Set the number of spaces in the sum. + Logically collective. + Parameters ---------- numSumSpaces @@ -355,23 +371,27 @@ cdef class Space(Object): """ cdef PetscInt cnumSumSpaces = asInt(numSumSpaces) - CHKERR( PetscSpaceSumSetNumSubspaces(self.space, cnumSumSpaces) ) + CHKERR(PetscSpaceSumSetNumSubspaces(self.space, cnumSumSpaces)) def getTensorNumSubspaces(self) -> int: """Return the number of spaces in the tensor product. + Not collective. + See Also -------- setTensorNumSubspaces, petsc.PetscSpaceTensorGetNumSubspaces """ cdef PetscInt cnumTensSpaces = 0 - CHKERR( PetscSpaceTensorGetNumSubspaces(self.space, &cnumTensSpaces) ) + CHKERR(PetscSpaceTensorGetNumSubspaces(self.space, &cnumTensSpaces)) return toInt(cnumTensSpaces) def setTensorSubspace(self, s: int, Space subsp) -> None: """Set a space in the tensor product. + Logically collective. + Parameters ---------- s @@ -385,11 +405,13 @@ cdef class Space(Object): """ cdef PetscInt cs = asInt(s) - CHKERR( PetscSpaceTensorSetSubspace(self.space, cs, subsp.space) ) + CHKERR(PetscSpaceTensorSetSubspace(self.space, cs, subsp.space)) def getTensorSubspace(self, s: int) -> Space: """Return a space in the tensor product. + Not collective. + Parameters ---------- s @@ -402,12 +424,14 @@ cdef class Space(Object): """ cdef PetscInt cs = asInt(s) cdef Space subsp = Space() - CHKERR( PetscSpaceTensorGetSubspace(self.space, cs, &subsp.space) ) + CHKERR(PetscSpaceTensorGetSubspace(self.space, cs, &subsp.space)) return subsp def setTensorNumSubspaces(self, numTensSpaces: int) -> None: """Set the number of spaces in the tensor product. + Logically collective. + Parameters ---------- numTensSpaces @@ -419,11 +443,13 @@ cdef class Space(Object): """ cdef PetscInt cnumTensSpaces = asInt(numTensSpaces) - CHKERR( PetscSpaceTensorSetNumSubspaces(self.space, cnumTensSpaces) ) + CHKERR(PetscSpaceTensorSetNumSubspaces(self.space, cnumTensSpaces)) def getPolynomialTensor(self) -> bool: """Return whether a function space is a space of tensor polynomials. + Not collective. + Return `True` if a function space is a space of tensor polynomials (the space is spanned by polynomials whose degree in each variable is bounded by the given order), as opposed to polynomials (the space is @@ -435,13 +461,15 @@ cdef class Space(Object): setPolynomialTensor, petsc.PetscSpacePolynomialGetTensor """ - cdef PetscBool ctensor - CHKERR( PetscSpacePolynomialGetTensor(self.space, &ctensor) ) + cdef PetscBool ctensor = PETSC_FALSE + CHKERR(PetscSpacePolynomialGetTensor(self.space, &ctensor)) return toBool(ctensor) def setPolynomialTensor(self, tensor: bool) -> None: """Set whether a function space is a space of tensor polynomials. + Logically collective. + Set to `True` for a function space which is a space of tensor polynomials (the space is spanned by polynomials whose degree in each variable is bounded by the given order), as opposed to polynomials @@ -460,7 +488,7 @@ cdef class Space(Object): """ cdef PetscBool ctensor = asBool(tensor) - CHKERR( PetscSpacePolynomialSetTensor(self.space, ctensor) ) + CHKERR(PetscSpacePolynomialSetTensor(self.space, ctensor)) def setPointPoints(self, Quad quad) -> None: """Set the evaluation points for the space to be based on a quad. @@ -480,7 +508,7 @@ cdef class Space(Object): getPointPoints, petsc.PetscSpacePointSetPoints """ - CHKERR( PetscSpacePointSetPoints(self.space, quad.quad)) + CHKERR(PetscSpacePointSetPoints(self.space, quad.quad)) def getPointPoints(self) -> Quad: """Return the evaluation points for the space as the points of a quad. @@ -493,12 +521,14 @@ cdef class Space(Object): """ cdef Quad quad = Quad() - CHKERR( PetscSpacePointGetPoints(self.space, &quad.quad)) + CHKERR(PetscSpacePointGetPoints(self.space, &quad.quad)) return quad def setPTrimmedFormDegree(self, formDegree: int) -> None: """Set the form degree of the trimmed polynomials. + Logically collective. + Parameters ---------- formDegree @@ -510,46 +540,27 @@ cdef class Space(Object): """ cdef PetscInt cformDegree = asInt(formDegree) - CHKERR( PetscSpacePTrimmedSetFormDegree(self.space, cformDegree) ) + CHKERR(PetscSpacePTrimmedSetFormDegree(self.space, cformDegree)) def getPTrimmedFormDegree(self) -> int: """Return the form degree of the trimmed polynomials. + Not collective. + See Also -------- setPTrimmedFormDegree, petsc.PetscSpacePTrimmedGetFormDegree """ cdef PetscInt cformDegree = 0 - CHKERR( PetscSpacePTrimmedGetFormDegree(self.space, &cformDegree) ) + CHKERR(PetscSpacePTrimmedGetFormDegree(self.space, &cformDegree)) return toInt(cformDegree) - def viewFromOptions(self, name: str, Object obj=None) -> None: - """View a `Space` based on values in the options database. - - Collective. - - Parameters - ---------- - name - Command line option name. - obj - Optional object that provides the options prefix. - - See Also - -------- - petsc_options, petsc.PetscSpaceViewFromOptions - - """ - cdef const char *cname = NULL - _ = str2bytes(name, &cname) - cdef PetscObject cobj = NULL - if obj is not None: cobj = obj.obj[0] - CHKERR( PetscSpaceViewFromOptions(self.space, cobj, cname) ) - # -------------------------------------------------------------------- + class DualSpaceType(object): + """The dual space types.""" LAGRANGE = S_(PETSCDUALSPACELAGRANGE) SIMPLE = S_(PETSCDUALSPACESIMPLE) REFINED = S_(PETSCDUALSPACEREFINED) @@ -557,6 +568,7 @@ class DualSpaceType(object): # -------------------------------------------------------------------- + cdef class DualSpace(Object): """Dual space to a linear space.""" @@ -576,7 +588,7 @@ cdef class DualSpace(Object): petsc.PetscDualSpaceSetUp """ - CHKERR( PetscDualSpaceSetUp(self.dualspace) ) + CHKERR(PetscDualSpaceSetUp(self.dualspace)) def create(self, comm: Comm | None = None) -> Self: """Create an empty `DualSpace` object. @@ -597,8 +609,8 @@ cdef class DualSpace(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscDualSpace newdsp = NULL - CHKERR( PetscDualSpaceCreate(ccomm, &newdsp) ) - CHKERR( PetscCLEAR(self.obj) ); self.dualspace = newdsp + CHKERR(PetscDualSpaceCreate(ccomm, &newdsp)) + CHKERR(PetscCLEAR(self.obj)); self.dualspace = newdsp return self def view(self, Viewer viewer=None) -> None: @@ -618,7 +630,7 @@ cdef class DualSpace(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( PetscDualSpaceView(self.dualspace, vwr) ) + CHKERR(PetscDualSpaceView(self.dualspace, vwr)) def destroy(self) -> Self: """Destroy the `DualSpace` object. @@ -630,7 +642,7 @@ cdef class DualSpace(Object): petsc.PetscDualSpaceDestroy """ - CHKERR( PetscDualSpaceDestroy(&self.dualspace) ) + CHKERR(PetscDualSpaceDestroy(&self.dualspace)) return self def duplicate(self) -> DualSpace: @@ -644,7 +656,7 @@ cdef class DualSpace(Object): """ cdef DualSpace spNew = DualSpace() - CHKERR( PetscDualSpaceDuplicate(self.dualspace, &spNew.dualspace) ) + CHKERR(PetscDualSpaceDuplicate(self.dualspace, &spNew.dualspace)) def getDM(self) -> DM: """Return the `DM` representing the reference cell of a `DualSpace`. @@ -657,7 +669,7 @@ cdef class DualSpace(Object): """ cdef DM dm = DM() - CHKERR( PetscDualSpaceGetDM(self.dualspace, &dm.dm) ) + CHKERR(PetscDualSpaceGetDM(self.dualspace, &dm.dm)) return dm def setDM(self, DM dm) -> None: @@ -675,7 +687,7 @@ cdef class DualSpace(Object): getDM, petsc.PetscDualSpaceSetDM """ - CHKERR( PetscDualSpaceSetDM(self.dualspace, dm.dm) ) + CHKERR(PetscDualSpaceSetDM(self.dualspace, dm.dm)) def getDimension(self) -> int: """Return the dimension of the dual space. @@ -689,25 +701,29 @@ cdef class DualSpace(Object): petsc.PetscDualSpaceGetDimension """ - cdef PetscInt cdim - CHKERR( PetscDualSpaceGetDimension(self.dualspace, &cdim)) + cdef PetscInt cdim = 0 + CHKERR(PetscDualSpaceGetDimension(self.dualspace, &cdim)) return toInt(cdim) def getNumComponents(self) -> int: """Return the number of components for this space. + Not collective. + See Also -------- setNumComponents, petsc.PetscDualSpaceGetNumComponents """ - cdef PetscInt cncomps - CHKERR( PetscDualSpaceGetNumComponents(self.dualspace, &cncomps)) + cdef PetscInt cncomps = 0 + CHKERR(PetscDualSpaceGetNumComponents(self.dualspace, &cncomps)) return toInt(cncomps) def setNumComponents(self, nc: int) -> None: """Set the number of components for this space. + Logically collective. + Parameters ---------- nc @@ -719,7 +735,7 @@ cdef class DualSpace(Object): """ cdef PetscInt cnc = asInt(nc) - CHKERR( PetscDualSpaceSetNumComponents(self.dualspace, cnc) ) + CHKERR(PetscDualSpaceSetNumComponents(self.dualspace, cnc)) def getType(self) -> str: """Return the type of the dual space object. @@ -732,7 +748,7 @@ cdef class DualSpace(Object): """ cdef PetscDualSpaceType cval = NULL - CHKERR( PetscDualSpaceGetType(self.dualspace, &cval) ) + CHKERR(PetscDualSpaceGetType(self.dualspace, &cval)) return bytes2str(cval) def setType(self, dualspace_type: Type | str) -> Self: @@ -751,8 +767,8 @@ cdef class DualSpace(Object): """ cdef PetscDualSpaceType cval = NULL - space_type = str2bytes(dualspace_type, &cval) - CHKERR( PetscDualSpaceSetType(self.dualspace, cval) ) + dualspace_type = str2bytes(dualspace_type, &cval) + CHKERR(PetscDualSpaceSetType(self.dualspace, cval)) return self def getOrder(self) -> int: @@ -765,8 +781,8 @@ cdef class DualSpace(Object): setOrder, petsc.PetscDualSpaceGetOrder """ - cdef PetscInt corder - CHKERR( PetscDualSpaceGetOrder(self.dualspace, &corder)) + cdef PetscInt corder = 0 + CHKERR(PetscDualSpaceGetOrder(self.dualspace, &corder)) return toInt(corder) def setOrder(self, order: int) -> None: @@ -785,7 +801,7 @@ cdef class DualSpace(Object): """ cdef PetscInt corder = asInt(order) - CHKERR( PetscDualSpaceSetOrder(self.dualspace, corder) ) + CHKERR(PetscDualSpaceSetOrder(self.dualspace, corder)) def getNumDof(self) -> ArrayInt: """Return the number of degrees of freedom for each spatial dimension. @@ -799,8 +815,8 @@ cdef class DualSpace(Object): """ cdef const PetscInt *cndof = NULL cdef PetscInt cdim = 0 - CHKERR( PetscDualSpaceGetDimension(self.dualspace, &cdim) ) - CHKERR( PetscDualSpaceGetNumDof(self.dualspace, &cndof) ) + CHKERR(PetscDualSpaceGetDimension(self.dualspace, &cdim)) + CHKERR(PetscDualSpaceGetNumDof(self.dualspace, &cndof)) return array_i(cdim + 1, cndof) def getFunctional(self, i: int) -> Quad: @@ -820,7 +836,7 @@ cdef class DualSpace(Object): """ cdef PetscInt ci = asInt(i) cdef Quad functional = Quad() - CHKERR( PetscDualSpaceGetFunctional( self.dualspace, ci, &functional.quad) ) + CHKERR(PetscDualSpaceGetFunctional(self.dualspace, ci, &functional.quad)) return functional def getInteriorDimension(self) -> int: @@ -837,7 +853,7 @@ cdef class DualSpace(Object): """ cdef PetscInt cintdim = 0 - CHKERR( PetscDualSpaceGetInteriorDimension(self.dualspace, &cintdim) ) + CHKERR(PetscDualSpaceGetInteriorDimension(self.dualspace, &cintdim)) return toInt(cintdim) def getLagrangeContinuity(self) -> bool: @@ -851,7 +867,7 @@ cdef class DualSpace(Object): """ cdef PetscBool ccontinuous = PETSC_FALSE - CHKERR( PetscDualSpaceLagrangeGetContinuity(self.dualspace, &ccontinuous)) + CHKERR(PetscDualSpaceLagrangeGetContinuity(self.dualspace, &ccontinuous)) return toBool(ccontinuous) def setLagrangeContinuity(self, continuous: bool) -> None: @@ -870,7 +886,7 @@ cdef class DualSpace(Object): """ cdef PetscBool ccontinuous = asBool(continuous) - CHKERR( PetscDualSpaceLagrangeSetContinuity(self.dualspace, ccontinuous)) + CHKERR(PetscDualSpaceLagrangeSetContinuity(self.dualspace, ccontinuous)) def getLagrangeTensor(self) -> bool: """Return the tensor nature of the dual space. @@ -883,7 +899,7 @@ cdef class DualSpace(Object): """ cdef PetscBool ctensor = PETSC_FALSE - CHKERR( PetscDualSpaceLagrangeGetTensor(self.dualspace, &ctensor)) + CHKERR(PetscDualSpaceLagrangeGetTensor(self.dualspace, &ctensor)) return toBool(ctensor) def setLagrangeTensor(self, tensor: bool) -> None: @@ -902,7 +918,7 @@ cdef class DualSpace(Object): """ cdef PetscBool ctensor = asBool(tensor) - CHKERR( PetscDualSpaceLagrangeSetTensor(self.dualspace, ctensor)) + CHKERR(PetscDualSpaceLagrangeSetTensor(self.dualspace, ctensor)) def getLagrangeTrimmed(self) -> bool: """Return the trimmed nature of the dual space. @@ -915,7 +931,7 @@ cdef class DualSpace(Object): """ cdef PetscBool ctrimmed = PETSC_FALSE - CHKERR( PetscDualSpaceLagrangeGetTrimmed(self.dualspace, &ctrimmed)) + CHKERR(PetscDualSpaceLagrangeGetTrimmed(self.dualspace, &ctrimmed)) return toBool(ctrimmed) def setLagrangeTrimmed(self, trimmed: bool) -> None: @@ -936,30 +952,7 @@ cdef class DualSpace(Object): """ cdef PetscBool ctrimmed = asBool(trimmed) - CHKERR( PetscDualSpaceLagrangeSetTrimmed(self.dualspace, ctrimmed)) - - def viewFromOptions(self, name: str, Object obj=None) -> None: - """View a `DualSpace` based on values in the options database. - - Collective. - - Parameters - ---------- - name - Command line option name. - obj - Optional object that provides the options prefix. - - See Also - -------- - petsc_options, petsc.PetscSpaceViewFromOptions - - """ - cdef const char *cname = NULL - _ = str2bytes(name, &cname) - cdef PetscObject cobj = NULL - if obj is not None: cobj = obj.obj[0] - CHKERR( PetscDualSpaceViewFromOptions(self.dualspace, cobj, cname) ) + CHKERR(PetscDualSpaceLagrangeSetTrimmed(self.dualspace, ctrimmed)) def setSimpleDimension(self, dim: int) -> None: """Set the number of functionals in the dual space basis. @@ -977,7 +970,7 @@ cdef class DualSpace(Object): """ cdef PetscInt cdim = asInt(dim) - CHKERR( PetscDualSpaceSimpleSetDimension(self.dualspace, cdim) ) + CHKERR(PetscDualSpaceSimpleSetDimension(self.dualspace, cdim)) def setSimpleFunctional(self, func: int, Quad functional) -> None: """Set the given basis element for this dual space. @@ -997,7 +990,7 @@ cdef class DualSpace(Object): """ cdef PetscInt cfunc = asInt(func) - CHKERR( PetscDualSpaceSimpleSetFunctional(self.dualspace, cfunc, functional.quad) ) + CHKERR(PetscDualSpaceSimpleSetFunctional(self.dualspace, cfunc, functional.quad)) del SpaceType del DualSpaceType diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Sys.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Sys.pyx index 20630e13502..d75eb1065cf 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Sys.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Sys.pyx @@ -9,8 +9,7 @@ cdef class Sys: cls, devel: bool = False, date: bool = False, - author: bool = False, - ) -> tuple[int, int, int]: + author: bool = False) -> tuple[int, int, int]: """Return PETSc version information. Not collective. @@ -40,8 +39,8 @@ cdef class Sys: """ cdef char cversion[256] cdef PetscInt major=0, minor=0, micro=0, release=0 - CHKERR( PetscGetVersion(cversion, sizeof(cversion)) ) - CHKERR( PetscGetVersionNumber(&major, &minor, µ, &release) ) + CHKERR(PetscGetVersion(cversion, sizeof(cversion))) + CHKERR(PetscGetVersionNumber(&major, &minor, µ, &release)) out = version = (toInt(major), toInt(minor), toInt(micro)) if devel or date or author: out = [version] @@ -160,8 +159,7 @@ cdef class Sys: sep: str = ' ', end: str = '\n', comm: Comm | None = None, - **kwargs: Any, - ) -> None: + **kwargs: Any) -> None: # noqa: E129 """Print output from the first processor of a communicator. Collective. @@ -194,7 +192,7 @@ cdef class Sys: message = '' cdef const char *m = NULL message = str2bytes(message, &m) - CHKERR( PetscPrintf(ccomm, '%s', m) ) + CHKERR(PetscPrintf(ccomm, '%s', m)) @classmethod def syncPrint( @@ -204,8 +202,7 @@ cdef class Sys: end: str = '\n', flush: bool = False, comm: Comm | None = None, - **kwargs: Any, - ) -> None: + **kwargs: Any) -> None: # noqa: E129 """Print synchronized output from several processors of a communicator. Not collective. @@ -237,8 +234,8 @@ cdef class Sys: message = ''.join(format) % args cdef const char *m = NULL message = str2bytes(message, &m) - CHKERR( PetscSynchronizedPrintf(ccomm, '%s', m) ) - if flush: CHKERR( PetscSynchronizedFlush(ccomm, PETSC_STDOUT) ) + CHKERR(PetscSynchronizedPrintf(ccomm, '%s', m)) + if flush: CHKERR(PetscSynchronizedFlush(ccomm, PETSC_STDOUT)) @classmethod def syncFlush(cls, comm: Comm | None = None) -> None: @@ -257,7 +254,7 @@ cdef class Sys: """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) - CHKERR( PetscSynchronizedFlush(ccomm, PETSC_STDOUT) ) + CHKERR(PetscSynchronizedFlush(ccomm, PETSC_STDOUT)) # --- xxx --- @@ -266,8 +263,7 @@ cdef class Sys: cls, size: int | tuple[int, int], bsize: int | None = None, - comm: Comm | None = None - ) -> tuple[int, int]: + comm: Comm | None = None) -> tuple[int, int]: """Given a global (or local) size determines a local (or global) size. Collective. @@ -306,7 +302,7 @@ cdef class Sys: if bs == PETSC_DECIDE: bs = 1 if n > 0: n = n // bs if N > 0: N = N // bs - CHKERR( PetscSplitOwnership(ccomm, &n, &N) ) + CHKERR(PetscSplitOwnership(ccomm, &n, &N)) n = n * bs N = N * bs return (toInt(n), toInt(N)) @@ -328,7 +324,7 @@ cdef class Sys: """ cdef PetscReal s = asReal(seconds) - CHKERR( PetscSleep(s) ) + CHKERR(PetscSleep(s)) # --- xxx --- @@ -365,8 +361,7 @@ cdef class Sys: handler = PetscAbortErrorHandler else: raise ValueError(f"unknown error handler: {errhandler!r}") - CHKERR( PetscPushErrorHandler(handler, NULL) ) - + CHKERR(PetscPushErrorHandler(handler, NULL)) @classmethod def popErrorHandler(cls) -> None: @@ -379,7 +374,7 @@ cdef class Sys: petsc.PetscPopErrorHandler """ - CHKERR( PetscPopErrorHandler() ) + CHKERR(PetscPopErrorHandler()) @classmethod def popSignalHandler(cls) -> None: @@ -392,15 +387,14 @@ cdef class Sys: petsc.PetscPopSignalHandler """ - CHKERR( PetscPopSignalHandler() ) + CHKERR(PetscPopSignalHandler()) @classmethod def infoAllow( cls, flag: bool, filename: str | None = None, - mode: str = "w", - ) -> None: + mode: str = "w") -> None: """Enables or disables PETSc info messages. Not collective. @@ -423,11 +417,11 @@ cdef class Sys: cdef const char *cfilename = NULL cdef const char *cmode = NULL if flag: tval = PETSC_TRUE - CHKERR( PetscInfoAllow(tval) ) + CHKERR(PetscInfoAllow(tval)) if filename is not None: filename = str2bytes(filename, &cfilename) mode = str2bytes(mode, &cmode) - CHKERR( PetscInfoSetFile(cfilename, cmode) ) + CHKERR(PetscInfoSetFile(cfilename, cmode)) @classmethod def registerCitation(cls, citation: str) -> None: @@ -449,7 +443,7 @@ cdef class Sys: cdef const char *cit = NULL citation = str2bytes(citation, &cit) cdef PetscBool flag = get_citation(citation) - CHKERR( PetscCitationsRegister(cit, &flag) ) + CHKERR(PetscCitationsRegister(cit, &flag)) set_citation(citation, toBool(flag)) @classmethod @@ -471,11 +465,11 @@ cdef class Sys: cdef const char *cpackage = NULL package = str2bytes(package, &cpackage) cdef PetscBool has = PETSC_FALSE - CHKERR( PetscHasExternalPackage(cpackage, &has) ) + CHKERR(PetscHasExternalPackage(cpackage, &has)) return toBool(has) -cdef dict citations_registry = { } +cdef dict citations_registry = {} cdef PetscBool get_citation(object citation): cdef bint is_set = citations_registry.get(citation) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/TAO.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/TAO.pyx index 226371368fa..5db409bd3e0 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/TAO.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/TAO.pyx @@ -42,6 +42,7 @@ class TAOType: ALMM = S_(TAOALMM) PYTHON = S_(TAOPYTHON) + class TAOConvergedReason: """TAO solver termination reason. @@ -69,6 +70,7 @@ class TAOConvergedReason: DIVERGED_TR_REDUCTION = TAO_DIVERGED_TR_REDUCTION # DIVERGED_USER = TAO_DIVERGED_USER # user defined + class TAOBNCGType: """TAO Bound Constrained Conjugate Gradient (BNCG) Update Type.""" GD = TAO_BNCG_GD @@ -86,6 +88,7 @@ class TAOBNCGType: SSML_BRDN = TAO_BNCG_SSML_BRDN # -------------------------------------------------------------------- + cdef class TAO(Object): """Optimization solver. @@ -124,7 +127,7 @@ cdef class TAO(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( TaoView(self.tao, vwr) ) + CHKERR(TaoView(self.tao, vwr)) def destroy(self) -> Self: """Destroy the solver. @@ -136,7 +139,7 @@ cdef class TAO(Object): petsc.TaoDestroy """ - CHKERR( TaoDestroy(&self.tao) ) + CHKERR(TaoDestroy(&self.tao)) return self def create(self, comm: Comm | None = None) -> Self: @@ -156,8 +159,8 @@ cdef class TAO(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscTAO newtao = NULL - CHKERR( TaoCreate(ccomm, &newtao) ) - CHKERR( PetscCLEAR(self.obj) ); self.tao = newtao + CHKERR(TaoCreate(ccomm, &newtao)) + CHKERR(PetscCLEAR(self.obj)); self.tao = newtao return self def setType(self, tao_type: Type | str) -> None: @@ -177,7 +180,7 @@ cdef class TAO(Object): """ cdef PetscTAOType ctype = NULL tao_type = str2bytes(tao_type, &ctype) - CHKERR( TaoSetType(self.tao, ctype) ) + CHKERR(TaoSetType(self.tao, ctype)) def getType(self) -> str: """Return the type of the solver. @@ -190,10 +193,10 @@ cdef class TAO(Object): """ cdef PetscTAOType ctype = NULL - CHKERR( TaoGetType(self.tao, &ctype) ) + CHKERR(TaoGetType(self.tao, &ctype)) return bytes2str(ctype) - def setOptionsPrefix(self, prefix: str) -> None: + def setOptionsPrefix(self, prefix: str | None) -> None: """Set the prefix used for searching for options in the database. Logically collective. @@ -205,9 +208,9 @@ cdef class TAO(Object): """ cdef const char *cprefix = NULL prefix = str2bytes(prefix, &cprefix) - CHKERR( TaoSetOptionsPrefix(self.tao, cprefix) ) + CHKERR(TaoSetOptionsPrefix(self.tao, cprefix)) - def appendOptionsPrefix(self, prefix: str) -> None: + def appendOptionsPrefix(self, prefix: str | None) -> None: """Append to the prefix used for searching for options in the database. Logically collective. @@ -219,7 +222,7 @@ cdef class TAO(Object): """ cdef const char *cprefix = NULL prefix = str2bytes(prefix, &cprefix) - CHKERR( TaoAppendOptionsPrefix(self.tao, cprefix) ) + CHKERR(TaoAppendOptionsPrefix(self.tao, cprefix)) def getOptionsPrefix(self) -> str: """Return the prefix used for searching for options in the database. @@ -232,7 +235,7 @@ cdef class TAO(Object): """ cdef const char *prefix = NULL - CHKERR( TaoGetOptionsPrefix(self.tao, &prefix) ) + CHKERR(TaoGetOptionsPrefix(self.tao, &prefix)) return bytes2str(prefix) def setFromOptions(self) -> None: @@ -245,7 +248,7 @@ cdef class TAO(Object): petsc_options, petsc.TaoSetFromOptions """ - CHKERR( TaoSetFromOptions(self.tao) ) + CHKERR(TaoSetFromOptions(self.tao)) def setUp(self) -> None: """Set up the internal data structures for using the solver. @@ -257,7 +260,7 @@ cdef class TAO(Object): petsc.TaoSetUp """ - CHKERR( TaoSetUp(self.tao) ) + CHKERR(TaoSetUp(self.tao)) # @@ -272,7 +275,7 @@ cdef class TAO(Object): """ cdef PetscReal cradius = asReal(radius) - CHKERR( TaoSetInitialTrustRegionRadius(self.tao, cradius) ) + CHKERR(TaoSetInitialTrustRegionRadius(self.tao, cradius)) # -------------- @@ -294,7 +297,7 @@ cdef class TAO(Object): getSolution, petsc.TaoSetSolution """ - CHKERR( TaoSetSolution(self.tao, x.vec) ) + CHKERR(TaoSetSolution(self.tao, x.vec)) def setObjective(self, objective: TAOObjectiveFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the objective function evaluation callback. @@ -319,9 +322,9 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (objective, args, kargs) self.set_attr("__objective__", context) - CHKERR( TaoSetObjective(self.tao, TAO_Objective, context) ) + CHKERR(TaoSetObjective(self.tao, TAO_Objective, context)) - def setResidual(self, residual: TAOResidualFunction, Vec R=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: + def setResidual(self, residual: TAOResidualFunction, Vec R, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the residual evaluation callback for least-squares applications. Logically collective. @@ -342,13 +345,11 @@ cdef class TAO(Object): setJacobianResidual, petsc.TaoSetResidualRoutine """ - cdef PetscVec Rvec = NULL - if R is not None: Rvec = R.vec if args is None: args = () if kargs is None: kargs = {} context = (residual, args, kargs) self.set_attr("__residual__", context) - CHKERR( TaoSetResidualRoutine(self.tao, Rvec, TAO_Residual, context) ) + CHKERR(TaoSetResidualRoutine(self.tao, R.vec, TAO_Residual, context)) def setJacobianResidual(self, jacobian: TAOJacobianResidualFunction, Mat J=None, Mat P=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute the least-squares residual Jacobian. @@ -381,7 +382,7 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (jacobian, args, kargs) self.set_attr("__jacobian_residual__", context) - CHKERR( TaoSetJacobianResidualRoutine(self.tao, Jmat, Pmat, TAO_JacobianResidual, context) ) + CHKERR(TaoSetJacobianResidualRoutine(self.tao, Jmat, Pmat, TAO_JacobianResidual, context)) def setGradient(self, gradient: TAOGradientFunction, Vec g=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the gradient evaluation callback. @@ -410,7 +411,7 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (gradient, args, kargs) self.set_attr("__gradient__", context) - CHKERR( TaoSetGradient(self.tao, gvec, TAO_Gradient, context) ) + CHKERR(TaoSetGradient(self.tao, gvec, TAO_Gradient, context)) def getGradient(self) -> tuple[Vec, TAOGradientFunction]: """Return the vector used to store the gradient and the evaluation callback. @@ -423,8 +424,8 @@ cdef class TAO(Object): """ cdef Vec vec = Vec() - CHKERR( TaoGetGradient(self.tao, &vec.vec, NULL, NULL) ) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(TaoGetGradient(self.tao, &vec.vec, NULL, NULL)) + CHKERR(PetscINCREF(vec.obj)) cdef object gradient = self.get_attr("__gradient__") return (vec, gradient) @@ -456,7 +457,7 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (objgrad, args, kargs) self.set_attr("__objgrad__", context) - CHKERR( TaoSetObjectiveAndGradient(self.tao, gvec, TAO_ObjGrad, context) ) + CHKERR(TaoSetObjectiveAndGradient(self.tao, gvec, TAO_ObjGrad, context)) def getObjectiveAndGradient(self) -> tuple[Vec, TAOObjectiveGradientFunction]: """Return the vector used to store the gradient and the evaluation callback. @@ -469,8 +470,8 @@ cdef class TAO(Object): """ cdef Vec vec = Vec() - CHKERR( TaoGetObjectiveAndGradient(self.tao, &vec.vec, NULL, NULL) ) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(TaoGetObjectiveAndGradient(self.tao, &vec.vec, NULL, NULL)) + CHKERR(PetscINCREF(vec.obj)) cdef object objgrad = self.get_attr("__objgrad__") return (vec, objgrad) @@ -497,18 +498,18 @@ cdef class TAO(Object): if (isinstance(varbounds, list) or isinstance(varbounds, tuple)): ol, ou = varbounds xl = ol; xu = ou - CHKERR( TaoSetVariableBounds(self.tao, xl.vec, xu.vec) ) + CHKERR(TaoSetVariableBounds(self.tao, xl.vec, xu.vec)) return - if isinstance(varbounds, Vec): #FIXME + if isinstance(varbounds, Vec): # FIXME ol = varbounds; ou = args xl = ol; xu = ou - CHKERR( TaoSetVariableBounds(self.tao, xl.vec, xu.vec) ) + CHKERR(TaoSetVariableBounds(self.tao, xl.vec, xu.vec)) return if args is None: args = () if kargs is None: kargs = {} context = (varbounds, args, kargs) self.set_attr("__varbounds__", context) - CHKERR( TaoSetVariableBoundsRoutine(self.tao, TAO_VarBounds, context) ) + CHKERR(TaoSetVariableBoundsRoutine(self.tao, TAO_VarBounds, context)) def setConstraints(self, constraints: TAOConstraintsFunction, Vec C=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute constraints. @@ -537,7 +538,7 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (constraints, args, kargs) self.set_attr("__constraints__", context) - CHKERR( TaoSetConstraintsRoutine(self.tao, Cvec, TAO_Constraints, context) ) + CHKERR(TaoSetConstraintsRoutine(self.tao, Cvec, TAO_Constraints, context)) def setHessian(self, hessian: TAOHessianFunction, Mat H=None, Mat P=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -572,7 +573,7 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (hessian, args, kargs) self.set_attr("__hessian__", context) - CHKERR( TaoSetHessian(self.tao, Hmat, Pmat, TAO_Hessian, context) ) + CHKERR(TaoSetHessian(self.tao, Hmat, Pmat, TAO_Hessian, context)) def getHessian(self) -> tuple[Mat, Mat, TAOHessianFunction]: """Return the matrices used to store the Hessian and the evaluation callback. @@ -586,9 +587,9 @@ cdef class TAO(Object): """ cdef Mat J = Mat() cdef Mat P = Mat() - CHKERR( TaoGetHessian(self.tao, &J.mat, &P.mat, NULL, NULL) ) - CHKERR( PetscINCREF(J.obj) ) - CHKERR( PetscINCREF(P.obj) ) + CHKERR(TaoGetHessian(self.tao, &J.mat, &P.mat, NULL, NULL)) + CHKERR(PetscINCREF(J.obj)) + CHKERR(PetscINCREF(P.obj)) cdef object hessian = self.get_attr("__hessian__") return (J, P, hessian) @@ -624,7 +625,7 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (jacobian, args, kargs) self.set_attr("__jacobian__", context) - CHKERR( TaoSetJacobianRoutine(self.tao, Jmat, Pmat, TAO_Jacobian, context) ) + CHKERR(TaoSetJacobianRoutine(self.tao, Jmat, Pmat, TAO_Jacobian, context)) def setStateDesignIS(self, IS state=None, IS design=None) -> None: """Set the index sets indicating state and design variables. @@ -639,7 +640,7 @@ cdef class TAO(Object): cdef PetscIS s_is = NULL, d_is = NULL if state is not None: s_is = state.iset if design is not None: d_is = design.iset - CHKERR( TaoSetStateDesignIS(self.tao, s_is, d_is) ) + CHKERR(TaoSetStateDesignIS(self.tao, s_is, d_is)) def setJacobianState(self, jacobian_state, Mat J=None, Mat P=None, Mat I=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -662,8 +663,8 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (jacobian_state, args, kargs) self.set_attr("__jacobian_state__", context) - CHKERR( TaoSetJacobianStateRoutine(self.tao, Jmat, Pmat, Imat, - TAO_JacobianState, context) ) + CHKERR(TaoSetJacobianStateRoutine(self.tao, Jmat, Pmat, Imat, + TAO_JacobianState, context)) def setJacobianDesign(self, jacobian_design, Mat J=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -682,8 +683,8 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (jacobian_design, args, kargs) self.set_attr("__jacobian_design__", context) - CHKERR( TaoSetJacobianDesignRoutine(self.tao, Jmat, - TAO_JacobianDesign, context) ) + CHKERR(TaoSetJacobianDesignRoutine(self.tao, Jmat, + TAO_JacobianDesign, context)) def setEqualityConstraints(self, equality_constraints, Vec c, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -700,8 +701,8 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (equality_constraints, args, kargs) self.set_attr("__equality_constraints__", context) - CHKERR( TaoSetEqualityConstraintsRoutine(self.tao, c.vec, - TAO_EqualityConstraints, context) ) + CHKERR(TaoSetEqualityConstraintsRoutine(self.tao, c.vec, + TAO_EqualityConstraints, context)) def setJacobianEquality(self, jacobian_equality, Mat J=None, Mat P=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -722,8 +723,8 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (jacobian_equality, args, kargs) self.set_attr("__jacobian_equality__", context) - CHKERR( TaoSetJacobianEqualityRoutine(self.tao, Jmat, Pmat, - TAO_JacobianEquality, context) ) + CHKERR(TaoSetJacobianEqualityRoutine(self.tao, Jmat, Pmat, + TAO_JacobianEquality, context)) def setUpdate(self, update: TAOUpdateFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute update at each optimization step. @@ -749,12 +750,12 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (update, args, kargs) self.set_attr('__update__', context) - CHKERR( TaoSetUpdate(self.tao, TAO_Update, context) ) + CHKERR(TaoSetUpdate(self.tao, TAO_Update, context)) else: self.set_attr('__update__', None) - CHKERR( TaoSetUpdate(self.tao, NULL, NULL) ) + CHKERR(TaoSetUpdate(self.tao, NULL, NULL)) - def getUpdate(self) -> tuple[TAOUpdateFunction, tuple[Any,...], dict[str, Any]]: + def getUpdate(self) -> tuple[TAOUpdateFunction, tuple[Any, ...], dict[str, Any]]: """Return the callback to compute the update. Not collective. @@ -784,7 +785,7 @@ cdef class TAO(Object): """ cdef PetscReal f = 0 - CHKERR( TaoComputeObjective(self.tao, x.vec, &f) ) + CHKERR(TaoComputeObjective(self.tao, x.vec, &f)) return toReal(f) def computeResidual(self, Vec x, Vec f) -> None: @@ -804,7 +805,7 @@ cdef class TAO(Object): setResidual, petsc.TaoComputeResidual """ - CHKERR( TaoComputeResidual(self.tao, x.vec, f.vec) ) + CHKERR(TaoComputeResidual(self.tao, x.vec, f.vec)) def computeGradient(self, Vec x, Vec g) -> None: """Compute the gradient of the objective function. @@ -823,7 +824,7 @@ cdef class TAO(Object): setGradient, petsc.TaoComputeGradient """ - CHKERR( TaoComputeGradient(self.tao, x.vec, g.vec) ) + CHKERR(TaoComputeGradient(self.tao, x.vec, g.vec)) def computeObjectiveGradient(self, Vec x, Vec g) -> float: """Compute the gradient of the objective function and its value. @@ -844,7 +845,7 @@ cdef class TAO(Object): """ cdef PetscReal f = 0 - CHKERR( TaoComputeObjectiveAndGradient(self.tao, x.vec, &f, g.vec) ) + CHKERR(TaoComputeObjectiveAndGradient(self.tao, x.vec, &f, g.vec)) return toReal(f) def computeDualVariables(self, Vec xl, Vec xu) -> None: @@ -857,7 +858,7 @@ cdef class TAO(Object): petsc.TaoComputeDualVariables """ - CHKERR( TaoComputeDualVariables(self.tao, xl.vec, xu.vec) ) + CHKERR(TaoComputeDualVariables(self.tao, xl.vec, xu.vec)) def computeVariableBounds(self, Vec xl, Vec xu) -> None: """Compute the vectors corresponding to variables' bounds. @@ -869,19 +870,19 @@ cdef class TAO(Object): setVariableBounds, petsc.TaoComputeVariableBounds """ - CHKERR( TaoComputeVariableBounds(self.tao) ) + CHKERR(TaoComputeVariableBounds(self.tao)) cdef PetscVec Lvec = NULL, Uvec = NULL - CHKERR( TaoGetVariableBounds(self.tao, &Lvec, &Uvec) ) + CHKERR(TaoGetVariableBounds(self.tao, &Lvec, &Uvec)) if xl.vec != NULL: if Lvec != NULL: - CHKERR( VecCopy(Lvec, xl.vec) ) + CHKERR(VecCopy(Lvec, xl.vec)) else: - CHKERR( VecSet(xl.vec, PETSC_NINFINITY) ) + CHKERR(VecSet(xl.vec, PETSC_NINFINITY)) if xu.vec != NULL: if Uvec != NULL: - CHKERR( VecCopy(Uvec, xu.vec) ) + CHKERR(VecCopy(Uvec, xu.vec)) else: - CHKERR( VecSet(xu.vec, PETSC_INFINITY) ) + CHKERR(VecSet(xu.vec, PETSC_INFINITY)) def computeConstraints(self, Vec x, Vec c) -> None: """Compute the vector corresponding to the constraints. @@ -900,7 +901,7 @@ cdef class TAO(Object): setVariableBounds, petsc.TaoComputeVariableBounds """ - CHKERR( TaoComputeConstraints(self.tao, x.vec, c.vec) ) + CHKERR(TaoComputeConstraints(self.tao, x.vec, c.vec)) def computeHessian(self, Vec x, Mat H, Mat P=None) -> None: """Compute the Hessian of the objective function. @@ -923,7 +924,7 @@ cdef class TAO(Object): """ cdef PetscMat hmat = H.mat, pmat = H.mat if P is not None: pmat = P.mat - CHKERR( TaoComputeHessian(self.tao, x.vec, hmat, pmat) ) + CHKERR(TaoComputeHessian(self.tao, x.vec, hmat, pmat)) def computeJacobian(self, Vec x, Mat J, Mat P=None) -> None: """Compute the Jacobian. @@ -946,7 +947,7 @@ cdef class TAO(Object): """ cdef PetscMat jmat = J.mat, pmat = J.mat if P is not None: pmat = P.mat - CHKERR( TaoComputeJacobian(self.tao, x.vec, jmat, pmat) ) + CHKERR(TaoComputeJacobian(self.tao, x.vec, jmat, pmat)) # -------------- @@ -975,7 +976,7 @@ cdef class TAO(Object): if gatol is not None: _gatol = asReal(gatol) if grtol is not None: _grtol = asReal(grtol) if gttol is not None: _gttol = asReal(gttol) - CHKERR( TaoSetTolerances(self.tao, _gatol, _grtol, _gttol) ) + CHKERR(TaoSetTolerances(self.tao, _gatol, _grtol, _gttol)) def getTolerances(self) -> tuple[float, float, float]: """Return the tolerance parameters used in the solver convergence tests. @@ -999,7 +1000,7 @@ cdef class TAO(Object): """ cdef PetscReal _gatol=PETSC_DEFAULT, _grtol=PETSC_DEFAULT, _gttol=PETSC_DEFAULT - CHKERR( TaoGetTolerances(self.tao, &_gatol, &_grtol, &_gttol) ) + CHKERR(TaoGetTolerances(self.tao, &_gatol, &_grtol, &_gttol)) return (toReal(_gatol), toReal(_grtol), toReal(_gttol)) def setMaximumIterations(self, mit: int) -> float: @@ -1013,7 +1014,7 @@ cdef class TAO(Object): """ cdef PetscInt _mit = asInt(mit) - CHKERR( TaoSetMaximumIterations(self.tao, _mit) ) + CHKERR(TaoSetMaximumIterations(self.tao, _mit)) def getMaximumIterations(self) -> int: """Return the maximum number of solver iterations. @@ -1026,7 +1027,7 @@ cdef class TAO(Object): """ cdef PetscInt _mit = PETSC_DEFAULT - CHKERR( TaoGetMaximumIterations(self.tao, &_mit) ) + CHKERR(TaoGetMaximumIterations(self.tao, &_mit)) return toInt(_mit) def setMaximumFunctionEvaluations(self, mit: int) -> None: @@ -1040,7 +1041,7 @@ cdef class TAO(Object): """ cdef PetscInt _mit = asInt(mit) - CHKERR( TaoSetMaximumFunctionEvaluations(self.tao, _mit) ) + CHKERR(TaoSetMaximumFunctionEvaluations(self.tao, _mit)) def getMaximumFunctionEvaluations(self) -> int: """Return the maximum number of objective evaluations within the solver. @@ -1053,7 +1054,7 @@ cdef class TAO(Object): """ cdef PetscInt _mit = PETSC_DEFAULT - CHKERR( TaoGetMaximumFunctionEvaluations(self.tao, &_mit) ) + CHKERR(TaoGetMaximumFunctionEvaluations(self.tao, &_mit)) return toInt(_mit) def setConstraintTolerances(self, catol: float = None, crtol: float = None) -> None: @@ -1076,7 +1077,7 @@ cdef class TAO(Object): cdef PetscReal _catol=PETSC_DEFAULT, _crtol=PETSC_DEFAULT if catol is not None: _catol = asReal(catol) if crtol is not None: _crtol = asReal(crtol) - CHKERR( TaoSetConstraintTolerances(self.tao, _catol, _crtol) ) + CHKERR(TaoSetConstraintTolerances(self.tao, _catol, _crtol)) def getConstraintTolerances(self) -> tuple[float, float]: """Return the constraints tolerance parameters used in the convergence tests. @@ -1096,7 +1097,7 @@ cdef class TAO(Object): """ cdef PetscReal _catol=PETSC_DEFAULT, _crtol=PETSC_DEFAULT - CHKERR( TaoGetConstraintTolerances(self.tao, &_catol, &_crtol) ) + CHKERR(TaoGetConstraintTolerances(self.tao, &_catol, &_crtol)) return (toReal(_catol), toReal(_crtol)) def setConvergenceTest(self, converged: TAOConvergedFunction | None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -1119,13 +1120,13 @@ cdef class TAO(Object): """ if converged is None: - CHKERR( TaoSetConvergenceTest(self.tao, TaoDefaultConvergenceTest, NULL) ) + CHKERR(TaoSetConvergenceTest(self.tao, TaoDefaultConvergenceTest, NULL)) self.set_attr('__converged__', None) else: if args is None: args = () if kargs is None: kargs = {} self.set_attr('__converged__', (converged, args, kargs)) - CHKERR( TaoSetConvergenceTest(self.tao, TAO_Converged, NULL) ) + CHKERR(TaoSetConvergenceTest(self.tao, TAO_Converged, NULL)) def getConvergenceTest(self) -> tuple[TAOConvergedFunction, tuple[Any, ...], dict[str, Any]]: """Return the callback used to test for solver convergence. @@ -1150,7 +1151,7 @@ cdef class TAO(Object): """ cdef PetscTAOConvergedReason creason = reason - CHKERR( TaoSetConvergedReason(self.tao, creason) ) + CHKERR(TaoSetConvergedReason(self.tao, creason)) def getConvergedReason(self) -> ConvergedReason: """Return the termination flag. @@ -1163,7 +1164,7 @@ cdef class TAO(Object): """ cdef PetscTAOConvergedReason creason = TAO_CONTINUE_ITERATING - CHKERR( TaoGetConvergedReason(self.tao, &creason) ) + CHKERR(TaoGetConvergedReason(self.tao, &creason)) return creason def setMonitor(self, monitor: TAOMonitorFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -1190,7 +1191,7 @@ cdef class TAO(Object): if args is None: args = () if kargs is None: kargs = {} if monitorlist is None: - CHKERR( TaoMonitorSet(self.tao, TAO_Monitor, NULL, NULL) ) + CHKERR(TaoMonitorSet(self.tao, TAO_Monitor, NULL, NULL)) self.set_attr('__monitor__', [(monitor, args, kargs)]) else: monitorlist.append((monitor, args, kargs)) @@ -1217,7 +1218,7 @@ cdef class TAO(Object): setMonitor, petsc.TaoMonitorCancel """ - CHKERR( TaoMonitorCancel(self.tao) ) + CHKERR(TaoMonitorCancel(self.tao)) self.set_attr('__monitor__', None) # Tao overwrites these statistics. Copy user defined only if present @@ -1232,19 +1233,19 @@ cdef class TAO(Object): Parameters ---------- its - Current number of iterations + Current number of iterations or `None` to use the value stored internally by the solver. f - Current value of the objective function + Current value of the objective function or `None` to use the value stored internally by the solver. res - Current value of the residual norm + Current value of the residual norm or `None` to use the value stored internally by the solver. cnorm - Current value of the constrains norm + Current value of the constrains norm or `None` to use the value stored internally by the solver. step - Current value of the step + Current value of the step or `None` to use the value stored internally by the solver. See Also @@ -1257,7 +1258,7 @@ cdef class TAO(Object): cdef PetscReal cres = 0.0 cdef PetscReal ccnorm = 0.0 cdef PetscReal cstep = 0.0 - CHKERR( TaoGetSolutionStatus(self.tao, &cits, &cf, &cres, &ccnorm, &cstep, NULL) ) + CHKERR(TaoGetSolutionStatus(self.tao, &cits, &cf, &cres, &ccnorm, &cstep, NULL)) if its is not None: cits = asInt(its) if f is not None: @@ -1268,7 +1269,7 @@ cdef class TAO(Object): ccnorm = asReal(cnorm) if step is not None: cstep = asReal(step) - CHKERR( TaoMonitor(self.tao, cits, cf, cres, ccnorm, cstep) ) + CHKERR(TaoMonitor(self.tao, cits, cf, cres, ccnorm, cstep)) # @@ -1288,8 +1289,8 @@ cdef class TAO(Object): """ if x is not None: - CHKERR( TaoSetSolution(self.tao, x.vec) ) - CHKERR( TaoSolve(self.tao) ) + CHKERR(TaoSetSolution(self.tao, x.vec)) + CHKERR(TaoSolve(self.tao)) def getSolution(self) -> Vec: """Return the vector holding the solution. @@ -1302,8 +1303,8 @@ cdef class TAO(Object): """ cdef Vec vec = Vec() - CHKERR( TaoGetSolution(self.tao, &vec.vec) ) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(TaoGetSolution(self.tao, &vec.vec)) + CHKERR(PetscINCREF(vec.obj)) return vec def setGradientNorm(self, Mat mat) -> None: @@ -1316,7 +1317,7 @@ cdef class TAO(Object): getGradientNorm, petsc.TaoSetGradientNorm """ - CHKERR( TaoSetGradientNorm(self.tao, mat.mat) ) + CHKERR(TaoSetGradientNorm(self.tao, mat.mat)) def getGradientNorm(self) -> Mat: """Return the matrix used to compute inner products. @@ -1329,8 +1330,8 @@ cdef class TAO(Object): """ cdef Mat mat = Mat() - CHKERR( TaoGetGradientNorm(self.tao, &mat.mat) ) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(TaoGetGradientNorm(self.tao, &mat.mat)) + CHKERR(PetscINCREF(mat.obj)) return mat def setLMVMH0(self, Mat mat) -> None: @@ -1343,7 +1344,7 @@ cdef class TAO(Object): getLMVMH0, petsc.TaoLMVMSetH0 """ - CHKERR( TaoLMVMSetH0(self.tao, mat.mat) ) + CHKERR(TaoLMVMSetH0(self.tao, mat.mat)) def getLMVMH0(self) -> Mat: """Return the initial Hessian for the quasi-Newton approximation. @@ -1356,8 +1357,8 @@ cdef class TAO(Object): """ cdef Mat mat = Mat() - CHKERR( TaoLMVMGetH0(self.tao, &mat.mat) ) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(TaoLMVMGetH0(self.tao, &mat.mat)) + CHKERR(PetscINCREF(mat.obj)) return mat def getLMVMH0KSP(self) -> KSP: @@ -1371,8 +1372,8 @@ cdef class TAO(Object): """ cdef KSP ksp = KSP() - CHKERR( TaoLMVMGetH0KSP(self.tao, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(TaoLMVMGetH0KSP(self.tao, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp def getVariableBounds(self) -> tuple[Vec, Vec]: @@ -1386,8 +1387,8 @@ cdef class TAO(Object): """ cdef Vec xl = Vec(), xu = Vec() - CHKERR( TaoGetVariableBounds(self.tao, &xl.vec, &xu.vec) ) - CHKERR( PetscINCREF(xl.obj) ); CHKERR( PetscINCREF(xu.obj) ) + CHKERR(TaoGetVariableBounds(self.tao, &xl.vec, &xu.vec)) + CHKERR(PetscINCREF(xl.obj)); CHKERR(PetscINCREF(xu.obj)) return (xl, xu) def setBNCGType(self, cg_type: BNCGType) -> None: @@ -1401,7 +1402,7 @@ cdef class TAO(Object): """ cdef PetscTAOBNCGType ctype = cg_type - CHKERR( TaoBNCGSetType(self.tao, ctype) ) + CHKERR(TaoBNCGSetType(self.tao, ctype)) def getBNCGType(self) -> BNCGType: """Return the type of the BNCG solver. @@ -1414,7 +1415,7 @@ cdef class TAO(Object): """ cdef PetscTAOBNCGType cg_type = TAO_BNCG_SSML_BFGS - CHKERR( TaoBNCGGetType(self.tao, &cg_type) ) + CHKERR(TaoBNCGGetType(self.tao, &cg_type)) return cg_type def setIterationNumber(self, its: int) -> None: @@ -1428,7 +1429,7 @@ cdef class TAO(Object): """ cdef PetscInt ival = asInt(its) - CHKERR( TaoSetIterationNumber(self.tao, ival) ) + CHKERR(TaoSetIterationNumber(self.tao, ival)) def getIterationNumber(self) -> int: """Return the current iteration number. @@ -1441,7 +1442,7 @@ cdef class TAO(Object): """ cdef PetscInt its=0 - CHKERR( TaoGetIterationNumber(self.tao, &its) ) + CHKERR(TaoGetIterationNumber(self.tao, &its)) return toInt(its) def getObjectiveValue(self) -> float: @@ -1455,7 +1456,7 @@ cdef class TAO(Object): """ cdef PetscReal fval=0 - CHKERR( TaoGetSolutionStatus(self.tao, NULL, &fval, NULL, NULL, NULL, NULL) ) + CHKERR(TaoGetSolutionStatus(self.tao, NULL, &fval, NULL, NULL, NULL, NULL)) return toReal(fval) getFunctionValue = getObjectiveValue @@ -1471,7 +1472,7 @@ cdef class TAO(Object): """ cdef PetscTAOConvergedReason reason = TAO_CONTINUE_ITERATING - CHKERR( TaoGetConvergedReason(self.tao, &reason) ) + CHKERR(TaoGetConvergedReason(self.tao, &reason)) return reason def getSolutionNorm(self) -> tuple[float, float, float]: @@ -1496,7 +1497,7 @@ cdef class TAO(Object): cdef PetscReal gnorm=0 cdef PetscReal cnorm=0 cdef PetscReal fval=0 - CHKERR( TaoGetSolutionStatus(self.tao, NULL, &fval, &gnorm, &cnorm, NULL, NULL) ) + CHKERR(TaoGetSolutionStatus(self.tao, NULL, &fval, &gnorm, &cnorm, NULL, NULL)) return (toReal(fval), toReal(gnorm), toReal(cnorm)) def getSolutionStatus(self) -> tuple[int, float, float, float, float, ConvergedReason]: @@ -1527,9 +1528,9 @@ cdef class TAO(Object): cdef PetscInt its=0 cdef PetscReal fval=0, gnorm=0, cnorm=0, xdiff=0 cdef PetscTAOConvergedReason reason = TAO_CONTINUE_ITERATING - CHKERR( TaoGetSolutionStatus(self.tao, &its, - &fval, &gnorm, &cnorm, &xdiff, - &reason) ) + CHKERR(TaoGetSolutionStatus(self.tao, &its, + &fval, &gnorm, &cnorm, &xdiff, + &reason)) return (toInt(its), toReal(fval), toReal(gnorm), toReal(cnorm), toReal(xdiff), reason) @@ -1545,8 +1546,8 @@ cdef class TAO(Object): """ cdef KSP ksp = KSP() - CHKERR( TaoGetKSP(self.tao, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(TaoGetKSP(self.tao, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp # BRGN routines @@ -1562,8 +1563,8 @@ cdef class TAO(Object): """ cdef TAO subsolver = TAO() - CHKERR( TaoBRGNGetSubsolver(self.tao, &subsolver.tao) ) - CHKERR( PetscINCREF(subsolver.obj) ) + CHKERR(TaoBRGNGetSubsolver(self.tao, &subsolver.tao)) + CHKERR(PetscINCREF(subsolver.obj)) return subsolver def setBRGNRegularizerObjectiveGradient(self, objgrad, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -1580,7 +1581,7 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (objgrad, args, kargs) self.set_attr("__brgnregobjgrad__", context) - CHKERR( TaoBRGNSetRegularizerObjectiveAndGradientRoutine(self.tao, TAO_BRGNRegObjGrad, context) ) + CHKERR(TaoBRGNSetRegularizerObjectiveAndGradientRoutine(self.tao, TAO_BRGNRegObjGrad, context)) def setBRGNRegularizerHessian(self, hessian, Mat H=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the callback to compute the regularizer Hessian. @@ -1598,7 +1599,7 @@ cdef class TAO(Object): if kargs is None: kargs = {} context = (hessian, args, kargs) self.set_attr("__brgnreghessian__", context) - CHKERR( TaoBRGNSetRegularizerHessianRoutine(self.tao, Hmat, TAO_BRGNRegHessian, context) ) + CHKERR(TaoBRGNSetRegularizerHessianRoutine(self.tao, Hmat, TAO_BRGNRegHessian, context)) def setBRGNRegularizerWeight(self, weight: float) -> None: """Set the regularizer weight. @@ -1607,7 +1608,7 @@ cdef class TAO(Object): """ cdef PetscReal cweight = asReal(weight) - CHKERR( TaoBRGNSetRegularizerWeight(self.tao, cweight) ) + CHKERR(TaoBRGNSetRegularizerWeight(self.tao, cweight)) def setBRGNSmoothL1Epsilon(self, epsilon: float) -> None: """Set the smooth L1 epsilon. @@ -1620,7 +1621,7 @@ cdef class TAO(Object): """ cdef PetscReal ceps = asReal(epsilon) - CHKERR( TaoBRGNSetL1SmoothEpsilon(self.tao, ceps) ) + CHKERR(TaoBRGNSetL1SmoothEpsilon(self.tao, ceps)) def setBRGNDictionaryMatrix(self, Mat D) -> None: """Set the dictionary matrix. @@ -1632,7 +1633,7 @@ cdef class TAO(Object): petsc.TaoBRGNSetDictionaryMatrix """ - CHKERR( TaoBRGNSetDictionaryMatrix(self.tao, D.mat) ) + CHKERR(TaoBRGNSetDictionaryMatrix(self.tao, D.mat)) def getBRGNDampingVector(self) -> Vec: """Return the damping vector. @@ -1640,13 +1641,13 @@ cdef class TAO(Object): Not collective. """ - #FIXME - #See Also - #-------- - #petsc.TaoBRGNGetDampingVector + # FIXME + # See Also + # -------- + # petsc.TaoBRGNGetDampingVector cdef Vec damp = Vec() - CHKERR( TaoBRGNGetDampingVector(self.tao, &damp.vec) ) - CHKERR( PetscINCREF(damp.obj) ) + CHKERR(TaoBRGNGetDampingVector(self.tao, &damp.vec)) + CHKERR(PetscINCREF(damp.obj)) return damp def createPython(self, context: Any = None, comm: Comm | None = None) -> Self: @@ -1668,10 +1669,10 @@ cdef class TAO(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscTAO tao = NULL - CHKERR( TaoCreate(ccomm, &tao) ) - CHKERR( PetscCLEAR(self.obj) ); self.tao = tao - CHKERR( TaoSetType(self.tao, TAOPYTHON) ) - CHKERR( TaoPythonSetContext(self.tao, context) ) + CHKERR(TaoCreate(ccomm, &tao)) + CHKERR(PetscCLEAR(self.obj)); self.tao = tao + CHKERR(TaoSetType(self.tao, TAOPYTHON)) + CHKERR(TaoPythonSetContext(self.tao, context)) return self def setPythonContext(self, context: Any) -> None: @@ -1684,7 +1685,7 @@ cdef class TAO(Object): petsc_python_tao, getPythonContext """ - CHKERR( TaoPythonSetContext(self.tao, context) ) + CHKERR(TaoPythonSetContext(self.tao, context)) def getPythonContext(self) -> Any: """Return the instance of the class implementing the required Python methods. @@ -1697,7 +1698,7 @@ cdef class TAO(Object): """ cdef void *context = NULL - CHKERR( TaoPythonGetContext(self.tao, &context) ) + CHKERR(TaoPythonGetContext(self.tao, &context)) if context == NULL: return None else: return context @@ -1714,7 +1715,7 @@ cdef class TAO(Object): """ cdef const char *cval = NULL py_type = str2bytes(py_type, &cval) - CHKERR( TaoPythonSetType(self.tao, cval) ) + CHKERR(TaoPythonSetType(self.tao, cval)) def getPythonType(self) -> str: """Return the fully qualified Python name of the class used by the solver. @@ -1728,7 +1729,7 @@ cdef class TAO(Object): """ cdef const char *cval = NULL - CHKERR( TaoPythonGetType(self.tao, &cval) ) + CHKERR(TaoPythonGetType(self.tao, &cval)) return bytes2str(cval) def getLineSearch(self) -> TAOLineSearch: @@ -1742,8 +1743,8 @@ cdef class TAO(Object): """ cdef TAOLineSearch ls = TAOLineSearch() - CHKERR( TaoGetLineSearch(self.tao, &ls.taols) ) - CHKERR( PetscINCREF(ls.obj) ) + CHKERR(TaoGetLineSearch(self.tao, &ls.taols)) + CHKERR(PetscINCREF(ls.obj)) return ls # --- backward compatibility --- @@ -1756,6 +1757,7 @@ cdef class TAO(Object): """Application context.""" def __get__(self) -> Any: return self.getAppCtx() + def __set__(self, value: Any): self.setAppCtx(value) @@ -1773,6 +1775,7 @@ cdef class TAO(Object): """Broken.""" def __get__(self) -> Any: return self.getFunctionTolerances() + def __set__(self, value): if isinstance(value, (tuple, list)): self.setFunctionTolerances(*value) @@ -1785,6 +1788,7 @@ cdef class TAO(Object): """Broken.""" def __get__(self) -> Any: return self.getGradientTolerances() + def __set__(self, value): if isinstance(value, (tuple, list)): self.getGradientTolerances(*value) @@ -1797,6 +1801,7 @@ cdef class TAO(Object): """Broken.""" def __get__(self) -> Any: return self.getConstraintTolerances() + def __set__(self, value): if isinstance(value, (tuple, list)): self.getConstraintTolerances(*value) @@ -1872,6 +1877,7 @@ del TAOBNCGType # -------------------------------------------------------------------- + class TAOLineSearchType: """TAO Line Search Types.""" UNIT = S_(TAOLINESEARCHUNIT) @@ -1881,6 +1887,7 @@ class TAOLineSearchType: OWARMIJO = S_(TAOLINESEARCHOWARMIJO) GPCG = S_(TAOLINESEARCHGPCG) + class TAOLineSearchConvergedReason: """TAO Line Search Termination Reasons.""" # iterating @@ -1902,6 +1909,7 @@ class TAOLineSearchConvergedReason: # -------------------------------------------------------------------- + cdef class TAOLineSearch(Object): """TAO Line Search.""" @@ -1909,8 +1917,8 @@ cdef class TAOLineSearch(Object): Reason = TAOLineSearchConvergedReason def __cinit__(self): - self.obj = &self.taols - self.taols = NULL + self.obj = &self.taols + self.taols = NULL def view(self, Viewer viewer=None) -> None: """View the linesearch object. @@ -1929,7 +1937,7 @@ cdef class TAOLineSearch(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( TaoLineSearchView(self.taols, vwr) ) + CHKERR(TaoLineSearchView(self.taols, vwr)) def destroy(self) -> Self: """Destroy the linesearch object. @@ -1941,7 +1949,7 @@ cdef class TAOLineSearch(Object): petsc.TaoLineSearchDestroy """ - CHKERR( TaoLineSearchDestroy(&self.taols) ) + CHKERR(TaoLineSearchDestroy(&self.taols)) return self def create(self, comm=None) -> Self: @@ -1961,8 +1969,8 @@ cdef class TAOLineSearch(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscTAOLineSearch newtaols = NULL - CHKERR( TaoLineSearchCreate(ccomm, &newtaols) ) - CHKERR( PetscCLEAR(self.obj) ); self.taols = newtaols + CHKERR(TaoLineSearchCreate(ccomm, &newtaols)) + CHKERR(PetscCLEAR(self.obj)); self.taols = newtaols return self def setType(self, ls_type: Type | str) -> None: @@ -1982,7 +1990,7 @@ cdef class TAOLineSearch(Object): """ cdef PetscTAOLineSearchType ctype = NULL ls_type = str2bytes(ls_type, &ctype) - CHKERR( TaoLineSearchSetType(self.taols, ctype) ) + CHKERR(TaoLineSearchSetType(self.taols, ctype)) def getType(self) -> str: """Return the type of the linesearch. @@ -1995,7 +2003,7 @@ cdef class TAOLineSearch(Object): """ cdef PetscTAOLineSearchType ctype = NULL - CHKERR( TaoLineSearchGetType(self.taols, &ctype) ) + CHKERR(TaoLineSearchGetType(self.taols, &ctype)) return bytes2str(ctype) def setFromOptions(self) -> None: @@ -2008,7 +2016,7 @@ cdef class TAOLineSearch(Object): petsc_options, petsc.TaoLineSearchSetFromOptions """ - CHKERR( TaoLineSearchSetFromOptions(self.taols) ) + CHKERR(TaoLineSearchSetFromOptions(self.taols)) def setUp(self) -> None: """Set up the internal data structures for using the linesearch. @@ -2020,9 +2028,9 @@ cdef class TAOLineSearch(Object): petsc.TaoLineSearchSetUp """ - CHKERR( TaoLineSearchSetUp(self.taols) ) + CHKERR(TaoLineSearchSetUp(self.taols)) - def setOptionsPrefix(self, prefix) -> None: + def setOptionsPrefix(self, prefix: str | None = None) -> None: """Set the prefix used for searching for options in the database. Logically collective. @@ -2034,7 +2042,7 @@ cdef class TAOLineSearch(Object): """ cdef const char *cprefix = NULL prefix = str2bytes(prefix, &cprefix) - CHKERR( TaoLineSearchSetOptionsPrefix(self.taols, cprefix) ) + CHKERR(TaoLineSearchSetOptionsPrefix(self.taols, cprefix)) def getOptionsPrefix(self) -> str: """Return the prefix used for searching for options in the database. @@ -2047,7 +2055,7 @@ cdef class TAOLineSearch(Object): """ cdef const char *prefix = NULL - CHKERR( TaoLineSearchGetOptionsPrefix(self.taols, &prefix) ) + CHKERR(TaoLineSearchGetOptionsPrefix(self.taols, &prefix)) return bytes2str(prefix) def setObjective(self, objective : TAOLSObjectiveFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -2070,7 +2078,7 @@ cdef class TAOLineSearch(Object): petsc.TaoLineSearchSetObjectiveRoutine """ - CHKERR( TaoLineSearchSetObjectiveRoutine(self.taols, TAOLS_Objective, NULL) ) + CHKERR(TaoLineSearchSetObjectiveRoutine(self.taols, TAOLS_Objective, NULL)) if args is None: args = () if kargs is None: kargs = {} self.set_attr("__objective__", (objective, args, kargs)) @@ -2097,7 +2105,7 @@ cdef class TAOLineSearch(Object): petsc.TaoLineSearchSetGradientRoutine """ - CHKERR( TaoLineSearchSetGradientRoutine(self.taols, TAOLS_Gradient, NULL) ) + CHKERR(TaoLineSearchSetGradientRoutine(self.taols, TAOLS_Gradient, NULL)) if args is None: args = () if kargs is None: kargs = {} self.set_attr("__gradient__", (gradient, args, kargs)) @@ -2124,7 +2132,7 @@ cdef class TAOLineSearch(Object): petsc.TaoLineSearchSetObjectiveAndGradientRoutine """ - CHKERR( TaoLineSearchSetObjectiveAndGradientRoutine(self.taols, TAOLS_ObjGrad, NULL) ) + CHKERR(TaoLineSearchSetObjectiveAndGradientRoutine(self.taols, TAOLS_ObjGrad, NULL)) if args is None: args = () if kargs is None: kargs = {} self.set_attr("__objgrad__", (objgrad, args, kargs)) @@ -2139,7 +2147,7 @@ cdef class TAOLineSearch(Object): petsc.TaoLineSearchUseTaoRoutines """ - CHKERR( TaoLineSearchUseTaoRoutines(self.taols, tao.tao) ) + CHKERR(TaoLineSearchUseTaoRoutines(self.taols, tao.tao)) def apply(self, Vec x, Vec g, Vec s) -> tuple[float, float, str]: """Performs a line-search in a given step direction. @@ -2154,7 +2162,7 @@ cdef class TAOLineSearch(Object): cdef PetscReal f = 0 cdef PetscReal steplen = 0 cdef PetscTAOLineSearchConvergedReason reason = TAOLINESEARCH_CONTINUE_ITERATING - CHKERR( TaoLineSearchApply(self.taols,x.vec,&f,g.vec,s.vec,&steplen,&reason)) + CHKERR(TaoLineSearchApply(self.taols, x.vec, &f, g.vec, s.vec, &steplen, &reason)) return (toReal(f), toReal(steplen), reason) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/TS.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/TS.pyx index be252e77939..fa544770043 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/TS.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/TS.pyx @@ -33,6 +33,7 @@ class TSType(object): CRANK_NICOLSON = CN RUNGE_KUTTA = RK + class TSRKType(object): """The *RK* subtype.""" RK1FE = S_(TSRK1FE) @@ -48,6 +49,7 @@ class TSRKType(object): RK7VR = S_(TSRK7VR) RK8VR = S_(TSRK8VR) + class TSARKIMEXType(object): """The *ARKIMEX* subtype.""" ARKIMEX1BEE = S_(TSARKIMEX1BEE) @@ -64,6 +66,7 @@ class TSARKIMEXType(object): ARKIMEX4 = S_(TSARKIMEX4) ARKIMEX5 = S_(TSARKIMEX5) + class TSDIRKType(object): """The *DIRK* subtype.""" DIRKS212 = S_(TSDIRKS212) @@ -83,11 +86,13 @@ class TSDIRKType(object): DIRK8616SAL = S_(TSDIRK8616SAL) DIRKES8516SAL = S_(TSDIRKES8516SAL) + class TSProblemType(object): """Distinguishes linear and nonlinear problems.""" LINEAR = TS_LINEAR NONLINEAR = TS_NONLINEAR + class TSEquationType(object): """Distinguishes among types of explicit and implicit equations.""" UNSPECIFIED = TS_EQ_UNSPECIFIED @@ -104,6 +109,7 @@ class TSEquationType(object): DAE_IMPLICIT_INDEX3 = TS_EQ_DAE_IMPLICIT_INDEX3 DAE_IMPLICIT_INDEXHI = TS_EQ_DAE_IMPLICIT_INDEXHI + class TSExactFinalTime(object): """The method for ending time stepping.""" UNSPECIFIED = TS_EXACTFINALTIME_UNSPECIFIED @@ -111,6 +117,7 @@ class TSExactFinalTime(object): INTERPOLATE = TS_EXACTFINALTIME_INTERPOLATE MATCHSTEP = TS_EXACTFINALTIME_MATCHSTEP + class TSConvergedReason(object): """The reason the time step is converging.""" # iterating @@ -127,6 +134,7 @@ class TSConvergedReason(object): # ----------------------------------------------------------------------------- + cdef class TS(Object): """ODE integrator. @@ -176,11 +184,13 @@ cdef class TS(Object): """ cdef PetscViewer cviewer = NULL if viewer is not None: cviewer = viewer.vwr - CHKERR( TSView(self.ts, cviewer) ) + CHKERR(TSView(self.ts, cviewer)) def load(self, Viewer viewer) -> None: """Load a `TS` that has been stored in binary with `view`. + Collective. + Parameters ---------- viewer @@ -191,22 +201,26 @@ cdef class TS(Object): petsc.TSLoad """ - CHKERR( TSLoad(self.ts, viewer.vwr) ) + CHKERR(TSLoad(self.ts, viewer.vwr)) def destroy(self) -> Self: """Destroy the `TS` that was created with `create`. + Collective. + See Also -------- petsc.TSDestroy """ - CHKERR( TSDestroy(&self.ts) ) + CHKERR(TSDestroy(&self.ts)) return self def create(self, comm: Comm | None = None) -> Self: """Create an empty `TS`. + Collective. + The problem type can then be set with `setProblemType` and the type of solver can then be set with `setType`. @@ -222,8 +236,8 @@ cdef class TS(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscTS newts = NULL - CHKERR( TSCreate(ccomm, &newts) ) - CHKERR( PetscCLEAR(self.obj) ); self.ts = newts + CHKERR(TSCreate(ccomm, &newts)) + CHKERR(PetscCLEAR(self.obj)); self.ts = newts return self def clone(self) -> TS: @@ -237,12 +251,14 @@ cdef class TS(Object): """ cdef TS ts = TS() - CHKERR( TSClone(self.ts, &ts.ts) ) + CHKERR(TSClone(self.ts, &ts.ts)) return ts def setType(self, ts_type: Type | str) -> None: """Set the method to be used as the `TS` solver. + Collective. + Parameters ---------- ts_type @@ -259,11 +275,13 @@ cdef class TS(Object): """ cdef PetscTSType cval = NULL ts_type = str2bytes(ts_type, &cval) - CHKERR( TSSetType(self.ts, cval) ) + CHKERR(TSSetType(self.ts, cval)) def setRKType(self, ts_type: RKType | str) -> None: """Set the type of the *Runge-Kutta* scheme. + Logically collective. + Parameters ---------- ts_type @@ -280,11 +298,13 @@ cdef class TS(Object): """ cdef PetscTSRKType cval = NULL ts_type = str2bytes(ts_type, &cval) - CHKERR( TSRKSetType(self.ts, cval) ) + CHKERR(TSRKSetType(self.ts, cval)) def setARKIMEXType(self, ts_type: ARKIMEXType | str) -> None: """Set the type of `Type.ARKIMEX` scheme. + Logically collective. + Parameters ---------- ts_type @@ -301,11 +321,13 @@ cdef class TS(Object): """ cdef PetscTSARKIMEXType cval = NULL ts_type = str2bytes(ts_type, &cval) - CHKERR( TSARKIMEXSetType(self.ts, cval) ) + CHKERR(TSARKIMEXSetType(self.ts, cval)) def setARKIMEXFullyImplicit(self, flag: bool) -> None: """Solve both parts of the equation implicitly. + Logically collective. + Parameters ---------- flag @@ -317,47 +339,55 @@ cdef class TS(Object): """ cdef PetscBool bval = asBool(flag) - CHKERR( TSARKIMEXSetFullyImplicit(self.ts, bval) ) + CHKERR(TSARKIMEXSetFullyImplicit(self.ts, bval)) def getType(self) -> str: """Return the `TS` type. + Not collective. + See Also -------- petsc.TSGetType """ cdef PetscTSType cval = NULL - CHKERR( TSGetType(self.ts, &cval) ) + CHKERR(TSGetType(self.ts, &cval)) return bytes2str(cval) def getRKType(self) -> str: """Return the `Type.RK` scheme. + Not collective. + See Also -------- petsc.TSRKGetType """ cdef PetscTSRKType cval = NULL - CHKERR( TSRKGetType(self.ts, &cval) ) + CHKERR(TSRKGetType(self.ts, &cval)) return bytes2str(cval) def getARKIMEXType(self) -> str: """Return the `Type.ARKIMEX` scheme. + Not collective. + See Also -------- petsc.TSARKIMEXGetType """ cdef PetscTSARKIMEXType cval = NULL - CHKERR( TSARKIMEXGetType(self.ts, &cval) ) + CHKERR(TSARKIMEXGetType(self.ts, &cval)) return bytes2str(cval) def setDIRKType(self, ts_type: DIRKType | str) -> None: """Set the type of `Type.DIRK` scheme. + Logically collective. + Parameters ---------- ts_type @@ -374,23 +404,27 @@ cdef class TS(Object): """ cdef PetscTSDIRKType cval = NULL ts_type = str2bytes(ts_type, &cval) - CHKERR( TSDIRKSetType(self.ts, cval) ) + CHKERR(TSDIRKSetType(self.ts, cval)) def getDIRKType(self) -> str: """Return the `Type.DIRK` scheme. + Not collective. + See Also -------- setDIRKType, petsc.TSDIRKGetType """ cdef PetscTSDIRKType cval = NULL - CHKERR( TSDIRKGetType(self.ts, &cval) ) + CHKERR(TSDIRKGetType(self.ts, &cval)) return bytes2str(cval) def setProblemType(self, ptype: ProblemType) -> None: """Set the type of problem to be solved. + Logically collective. + Parameters ---------- ptype @@ -401,24 +435,26 @@ cdef class TS(Object): petsc.TSSetProblemType """ - CHKERR( TSSetProblemType(self.ts, ptype) ) + CHKERR(TSSetProblemType(self.ts, ptype)) def getProblemType(self) -> ProblemType: """Return the type of problem to be solved. + Not collective. + See Also -------- petsc.TSGetProblemType """ cdef PetscTSProblemType ptype = TS_NONLINEAR - CHKERR( TSGetProblemType(self.ts, &ptype) ) + CHKERR(TSGetProblemType(self.ts, &ptype)) return ptype def setEquationType(self, eqtype: EquationType) -> None: """Set the type of the equation that `TS` is solving. - Not collective. + Logically collective. Parameters ---------- @@ -430,7 +466,7 @@ cdef class TS(Object): petsc.TSSetEquationType """ - CHKERR( TSSetEquationType(self.ts, eqtype) ) + CHKERR(TSSetEquationType(self.ts, eqtype)) def getEquationType(self) -> EquationType: """Get the type of the equation that `TS` is solving. @@ -443,10 +479,10 @@ cdef class TS(Object): """ cdef PetscTSEquationType eqtype = TS_EQ_UNSPECIFIED - CHKERR( TSGetEquationType(self.ts, &eqtype) ) + CHKERR(TSGetEquationType(self.ts, &eqtype)) return eqtype - def setOptionsPrefix(self, prefix : str) -> None: + def setOptionsPrefix(self, prefix : str | None) -> None: """Set the prefix used for all the `TS` options. Logically collective. @@ -467,7 +503,7 @@ cdef class TS(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( TSSetOptionsPrefix(self.ts, cval) ) + CHKERR(TSSetOptionsPrefix(self.ts, cval)) def getOptionsPrefix(self) -> str: """Return the prefix used for all the `TS` options. @@ -480,10 +516,10 @@ cdef class TS(Object): """ cdef const char *cval = NULL - CHKERR( TSGetOptionsPrefix(self.ts, &cval) ) + CHKERR(TSGetOptionsPrefix(self.ts, &cval)) return bytes2str(cval) - def appendOptionsPrefix(self, prefix: str) -> None: + def appendOptionsPrefix(self, prefix: str | None) -> None: """Append to the prefix used for all the `TS` options. Logically collective. @@ -504,7 +540,7 @@ cdef class TS(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( TSAppendOptionsPrefix(self.ts, cval) ) + CHKERR(TSAppendOptionsPrefix(self.ts, cval)) def setFromOptions(self) -> None: """Set various `TS` parameters from user options. @@ -516,7 +552,7 @@ cdef class TS(Object): petsc_options, petsc.TSSetFromOptions """ - CHKERR( TSSetFromOptions(self.ts) ) + CHKERR(TSSetFromOptions(self.ts)) # --- application context --- @@ -541,11 +577,13 @@ cdef class TS(Object): def setRHSFunction( self, - function: TSRHSFunction, + function: TSRHSFunction | None, Vec f=None, args : tuple[Any, ...] | None = None, kargs : dict[str, Any] | None = None) -> None: - """Set the routine for evaluating the function ``G`` in ``U_t = G(t,u)``. + """Set the routine for evaluating the function ``G`` in ``U_t = G(t, u)``. + + Logically collective. Parameters ---------- @@ -570,18 +608,18 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (function, args, kargs) self.set_attr('__rhsfunction__', context) - CHKERR( TSSetRHSFunction(self.ts, fvec, TS_RHSFunction, context) ) + CHKERR(TSSetRHSFunction(self.ts, fvec, TS_RHSFunction, context)) else: - CHKERR( TSSetRHSFunction(self.ts, fvec, NULL, NULL) ) + CHKERR(TSSetRHSFunction(self.ts, fvec, NULL, NULL)) def setRHSJacobian( self, - jacobian: TSRHSJacobian, + jacobian: TSRHSJacobian | None, Mat J=None, Mat P=None, args : tuple[Any, ...] | None = None, kargs : dict[str, Any] | None = None) -> None: - """Set the function to compute the Jacobian of ``G`` in ``U_t = G(U,t)``. + """Set the function to compute the Jacobian of ``G`` in ``U_t = G(U, t)``. Logically collective. @@ -612,13 +650,15 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (jacobian, args, kargs) self.set_attr('__rhsjacobian__', context) - CHKERR( TSSetRHSJacobian(self.ts, Jmat, Pmat, TS_RHSJacobian, context) ) + CHKERR(TSSetRHSJacobian(self.ts, Jmat, Pmat, TS_RHSJacobian, context)) else: - CHKERR( TSSetRHSJacobian(self.ts, Jmat, Pmat, NULL, NULL) ) + CHKERR(TSSetRHSJacobian(self.ts, Jmat, Pmat, NULL, NULL)) def computeRHSFunction(self, t: float, Vec x, Vec f) -> None: """Evaluate the right-hand side function. + Collective. + Parameters ---------- t @@ -634,11 +674,13 @@ cdef class TS(Object): """ cdef PetscReal time = asReal(t) - CHKERR( TSComputeRHSFunction(self.ts, time, x.vec, f.vec) ) + CHKERR(TSComputeRHSFunction(self.ts, time, x.vec, f.vec)) def computeRHSFunctionLinear(self, t: float, Vec x, Vec f) -> None: """Evaluate the right-hand side via the user-provided Jacobian. + Collective. + Parameters ---------- t @@ -654,7 +696,7 @@ cdef class TS(Object): """ cdef PetscReal time = asReal(t) - CHKERR( TSComputeRHSFunctionLinear(self.ts, time, x.vec, f.vec, NULL) ) + CHKERR(TSComputeRHSFunctionLinear(self.ts, time, x.vec, f.vec, NULL)) def computeRHSJacobian(self, t: float, Vec x, Mat J, Mat P=None) -> None: """Compute the Jacobian matrix that has been set with `setRHSJacobian`. @@ -680,7 +722,7 @@ cdef class TS(Object): cdef PetscReal time = asReal(t) cdef PetscMat jmat = J.mat, pmat = J.mat if P is not None: pmat = P.mat - CHKERR( TSComputeRHSJacobian(self.ts, time, x.vec, jmat, pmat) ) + CHKERR(TSComputeRHSJacobian(self.ts, time, x.vec, jmat, pmat)) def computeRHSJacobianConstant(self, t: float, Vec x, Mat J, Mat P=None) -> None: """Reuse a Jacobian that is time-independent. @@ -706,7 +748,7 @@ cdef class TS(Object): cdef PetscReal time = asReal(t) cdef PetscMat jmat = J.mat, pmat = J.mat if P is not None: pmat = P.mat - CHKERR( TSComputeRHSJacobianConstant(self.ts, time, x.vec, jmat, pmat, NULL) ) + CHKERR(TSComputeRHSJacobianConstant(self.ts, time, x.vec, jmat, pmat, NULL)) def getRHSFunction(self) -> tuple[Vec, TSRHSFunction]: """Return the vector where the rhs is stored and the function used to compute it. @@ -719,8 +761,8 @@ cdef class TS(Object): """ cdef Vec f = Vec() - CHKERR( TSGetRHSFunction(self.ts, &f.vec, NULL, NULL) ) - CHKERR( PetscINCREF(f.obj) ) + CHKERR(TSGetRHSFunction(self.ts, &f.vec, NULL, NULL)) + CHKERR(PetscINCREF(f.obj)) cdef object function = self.get_attr('__rhsfunction__') return (f, function) @@ -735,8 +777,8 @@ cdef class TS(Object): """ cdef Mat J = Mat(), P = Mat() - CHKERR( TSGetRHSJacobian(self.ts, &J.mat, &P.mat, NULL, NULL) ) - CHKERR( PetscINCREF(J.obj) ); CHKERR( PetscINCREF(P.obj) ) + CHKERR(TSGetRHSJacobian(self.ts, &J.mat, &P.mat, NULL, NULL)) + CHKERR(PetscINCREF(J.obj)); CHKERR(PetscINCREF(P.obj)) cdef object jacobian = self.get_attr('__rhsjacobian__') return (J, P, jacobian) @@ -744,7 +786,7 @@ cdef class TS(Object): def setIFunction( self, - function: TSIFunction, + function: TSIFunction | None, Vec f=None, args : tuple[Any, ...] | None = None, kargs : dict[str, Any] | None = None) -> None: @@ -775,13 +817,13 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (function, args, kargs) self.set_attr('__ifunction__', context) - CHKERR( TSSetIFunction(self.ts, fvec, TS_IFunction, context) ) + CHKERR(TSSetIFunction(self.ts, fvec, TS_IFunction, context)) else: - CHKERR( TSSetIFunction(self.ts, fvec, NULL, NULL) ) + CHKERR(TSSetIFunction(self.ts, fvec, NULL, NULL)) def setIJacobian( self, - jacobian: TSIJacobian, + jacobian: TSIJacobian | None, Mat J=None, Mat P=None, args : tuple[Any, ...] | None = None, @@ -791,7 +833,7 @@ cdef class TS(Object): Logically collective. Set the function to compute the matrix ``dF/dU + a*dF/dU_t`` where - ``F(t,U,U_t)`` is the function provided with `setIFunction`. + ``F(t, U, U_t)`` is the function provided with `setIFunction`. Parameters ---------- @@ -820,9 +862,9 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (jacobian, args, kargs) self.set_attr('__ijacobian__', context) - CHKERR( TSSetIJacobian(self.ts, Jmat, Pmat, TS_IJacobian, context) ) + CHKERR(TSSetIJacobian(self.ts, Jmat, Pmat, TS_IJacobian, context)) else: - CHKERR( TSSetIJacobian(self.ts, Jmat, Pmat, NULL, NULL) ) + CHKERR(TSSetIJacobian(self.ts, Jmat, Pmat, NULL, NULL)) def setIJacobianP( self, @@ -835,7 +877,7 @@ cdef class TS(Object): Logically collective. Set the function that computes the Jacobian of ``F`` with respect to - the parameters ``P`` where ``F(Udot,U,t) = G(U,P,t)``, as well as the + the parameters ``P`` where ``F(Udot, U, t) = G(U, P, t)``, as well as the location to store the matrix. Parameters @@ -861,13 +903,13 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (jacobian, args, kargs) self.set_attr('__ijacobianp__', context) - CHKERR( TSSetIJacobianP(self.ts, Jmat, TS_IJacobianP, context) ) + CHKERR(TSSetIJacobianP(self.ts, Jmat, TS_IJacobianP, context)) else: - CHKERR( TSSetIJacobianP(self.ts, Jmat, NULL, NULL) ) + CHKERR(TSSetIJacobianP(self.ts, Jmat, NULL, NULL)) def computeIFunction(self, t: float, Vec x, Vec xdot, - Vec f, imex: bool=False) -> None: + Vec f, imex: bool = False) -> None: """Evaluate the DAE residual written in implicit form. Collective. @@ -892,17 +934,17 @@ cdef class TS(Object): """ cdef PetscReal rval = asReal(t) cdef PetscBool bval = imex - CHKERR( TSComputeIFunction(self.ts, rval, x.vec, xdot.vec, - f.vec, bval) ) + CHKERR(TSComputeIFunction(self.ts, rval, x.vec, xdot.vec, + f.vec, bval)) def computeIJacobian(self, t: float, Vec x, Vec xdot, a: float, - Mat J, Mat P=None, imex: bool=False) -> None: + Mat J, Mat P = None, imex: bool = False) -> None: """Evaluate the Jacobian of the DAE. Collective. - If ``F(t,U,Udot)=0`` is the DAE, the required Jacobian is + If ``F(t, U, Udot)=0`` is the DAE, the required Jacobian is ``dF/dU + shift*dF/dUdot`` Parameters @@ -932,12 +974,12 @@ cdef class TS(Object): cdef PetscBool bval = imex cdef PetscMat jmat = J.mat, pmat = J.mat if P is not None: pmat = P.mat - CHKERR( TSComputeIJacobian(self.ts, rval1, x.vec, xdot.vec, rval2, - jmat, pmat, bval) ) + CHKERR(TSComputeIJacobian(self.ts, rval1, x.vec, xdot.vec, rval2, + jmat, pmat, bval)) def computeIJacobianP(self, - t: float, Vec x, Vec xdot, a: float, - Mat J, imex: bool=False) -> None: + t: float, Vec x, Vec xdot, a: float, + Mat J, imex: bool = False) -> None: """Evaluate the Jacobian with respect to parameters. Collective. @@ -966,8 +1008,8 @@ cdef class TS(Object): cdef PetscReal rval2 = asReal(a) cdef PetscBool bval = asBool(imex) cdef PetscMat jmat = J.mat - CHKERR( TSComputeIJacobianP(self.ts, rval1, x.vec, xdot.vec, rval2, - jmat, bval) ) + CHKERR(TSComputeIJacobianP(self.ts, rval1, x.vec, xdot.vec, rval2, + jmat, bval)) def getIFunction(self) -> tuple[Vec, TSIFunction]: """Return the vector and function which computes the implicit residual. @@ -980,8 +1022,8 @@ cdef class TS(Object): """ cdef Vec f = Vec() - CHKERR( TSGetIFunction(self.ts, &f.vec, NULL, NULL) ) - CHKERR( PetscINCREF(f.obj) ) + CHKERR(TSGetIFunction(self.ts, &f.vec, NULL, NULL)) + CHKERR(PetscINCREF(f.obj)) cdef object function = self.get_attr('__ifunction__') return (f, function) @@ -996,14 +1038,14 @@ cdef class TS(Object): """ cdef Mat J = Mat(), P = Mat() - CHKERR( TSGetIJacobian(self.ts, &J.mat, &P.mat, NULL, NULL) ) - CHKERR( PetscINCREF(J.obj) ); CHKERR( PetscINCREF(P.obj) ) + CHKERR(TSGetIJacobian(self.ts, &J.mat, &P.mat, NULL, NULL)) + CHKERR(PetscINCREF(J.obj)); CHKERR(PetscINCREF(P.obj)) cdef object jacobian = self.get_attr('__ijacobian__') return (J, P, jacobian) def setI2Function( self, - function: TSI2Function, + function: TSI2Function | None, Vec f=None, args : tuple[Any, ...] | None = None, kargs : dict[str, Any] | None = None) -> None: @@ -1034,13 +1076,13 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (function, args, kargs) self.set_attr('__i2function__', context) - CHKERR( TSSetI2Function(self.ts, fvec, TS_I2Function, context) ) + CHKERR(TSSetI2Function(self.ts, fvec, TS_I2Function, context)) else: - CHKERR( TSSetI2Function(self.ts, fvec, NULL, NULL) ) + CHKERR(TSSetI2Function(self.ts, fvec, NULL, NULL)) def setI2Jacobian( self, - jacobian: TSI2Jacobian, + jacobian: TSI2Jacobian | None, Mat J=None, Mat P=None, args=None, @@ -1076,9 +1118,9 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (jacobian, args, kargs) self.set_attr('__i2jacobian__', context) - CHKERR( TSSetI2Jacobian(self.ts, Jmat, Pmat, TS_I2Jacobian, context) ) + CHKERR(TSSetI2Jacobian(self.ts, Jmat, Pmat, TS_I2Jacobian, context)) else: - CHKERR( TSSetI2Jacobian(self.ts, Jmat, Pmat, NULL, NULL) ) + CHKERR(TSSetI2Jacobian(self.ts, Jmat, Pmat, NULL, NULL)) def computeI2Function(self, t: float, Vec x, Vec xdot, Vec xdotdot, Vec f) -> None: """Evaluate the DAE residual in implicit form. @@ -1104,8 +1146,8 @@ cdef class TS(Object): """ cdef PetscReal rval = asReal(t) - CHKERR( TSComputeI2Function(self.ts, rval, x.vec, xdot.vec, xdotdot.vec, - f.vec) ) + CHKERR(TSComputeI2Function(self.ts, rval, x.vec, xdot.vec, xdotdot.vec, + f.vec)) def computeI2Jacobian( self, @@ -1121,7 +1163,7 @@ cdef class TS(Object): Collective. - If ``F(t,U,V,A)=0`` is the DAE, + If ``F(t, U, V, A)=0`` is the DAE, the required Jacobian is ``dF/dU + v dF/dV + a dF/dA``. Parameters @@ -1153,8 +1195,8 @@ cdef class TS(Object): cdef PetscReal rval3 = asReal(a) cdef PetscMat jmat = J.mat, pmat = J.mat if P is not None: pmat = P.mat - CHKERR( TSComputeI2Jacobian(self.ts, rval1, x.vec, xdot.vec, xdotdot.vec, rval2, rval3, - jmat, pmat) ) + CHKERR(TSComputeI2Jacobian(self.ts, rval1, x.vec, xdot.vec, xdotdot.vec, rval2, rval3, + jmat, pmat)) def getI2Function(self) -> tuple[Vec, TSI2Function]: """Return the vector and function which computes the residual. @@ -1167,8 +1209,8 @@ cdef class TS(Object): """ cdef Vec f = Vec() - CHKERR( TSGetI2Function(self.ts, &f.vec, NULL, NULL) ) - CHKERR( PetscINCREF(f.obj) ) + CHKERR(TSGetI2Function(self.ts, &f.vec, NULL, NULL)) + CHKERR(PetscINCREF(f.obj)) cdef object function = self.get_attr('__i2function__') return (f, function) @@ -1183,8 +1225,8 @@ cdef class TS(Object): """ cdef Mat J = Mat(), P = Mat() - CHKERR( TSGetI2Jacobian(self.ts, &J.mat, &P.mat, NULL, NULL) ) - CHKERR( PetscINCREF(J.obj) ); CHKERR( PetscINCREF(P.obj) ) + CHKERR(TSGetI2Jacobian(self.ts, &J.mat, &P.mat, NULL, NULL)) + CHKERR(PetscINCREF(J.obj)); CHKERR(PetscINCREF(P.obj)) cdef object jacobian = self.get_attr('__i2jacobian__') return (J, P, jacobian) @@ -1205,7 +1247,7 @@ cdef class TS(Object): petsc.TSSetSolution """ - CHKERR( TSSetSolution(self.ts, u.vec) ) + CHKERR(TSSetSolution(self.ts, u.vec)) def getSolution(self) -> Vec: """Return the solution at the present timestep. @@ -1222,8 +1264,8 @@ cdef class TS(Object): """ cdef Vec u = Vec() - CHKERR( TSGetSolution(self.ts, &u.vec) ) - CHKERR( PetscINCREF(u.obj) ) + CHKERR(TSGetSolution(self.ts, &u.vec)) + CHKERR(PetscINCREF(u.obj)) return u def setSolution2(self, Vec u, Vec v) -> None: @@ -1243,7 +1285,7 @@ cdef class TS(Object): petsc.TS2SetSolution """ - CHKERR( TS2SetSolution(self.ts, u.vec, v.vec) ) + CHKERR(TS2SetSolution(self.ts, u.vec, v.vec)) def getSolution2(self) -> tuple[Vec, Vec]: """Return the solution and time derivative at the present timestep. @@ -1261,9 +1303,9 @@ cdef class TS(Object): """ cdef Vec u = Vec() cdef Vec v = Vec() - CHKERR( TS2GetSolution(self.ts, &u.vec, &v.vec) ) - CHKERR( PetscINCREF(u.obj) ) - CHKERR( PetscINCREF(v.obj) ) + CHKERR(TS2GetSolution(self.ts, &u.vec, &v.vec)) + CHKERR(PetscINCREF(u.obj)) + CHKERR(PetscINCREF(v.obj)) return (u, v) # --- time span --- @@ -1288,7 +1330,7 @@ cdef class TS(Object): Notes ----- - ``-ts_time_span `` sets the time span from the commandline + ``-ts_time_span `` sets the time span from the commandline See Also -------- @@ -1297,8 +1339,8 @@ cdef class TS(Object): """ cdef PetscInt nt = 0 cdef PetscReal *rtspan = NULL - cdef object tmp = oarray_r(tspan, &nt, &rtspan) - CHKERR( TSSetTimeSpan(self.ts, nt, rtspan) ) + cdef unused = oarray_r(tspan, &nt, &rtspan) + CHKERR(TSSetTimeSpan(self.ts, nt, rtspan)) def getTimeSpan(self) -> ArrayReal: """Return the time span. @@ -1312,13 +1354,15 @@ cdef class TS(Object): """ cdef const PetscReal *rtspan = NULL cdef PetscInt nt = 0 - CHKERR( TSGetTimeSpan(self.ts, &nt, &rtspan) ) + CHKERR(TSGetTimeSpan(self.ts, &nt, &rtspan)) cdef object tspan = array_r(nt, rtspan) return tspan def getTimeSpanSolutions(self) -> list[Vec]: """Return the solutions at the times in the time span. + Not collective. + See Also -------- setTimeSpan, petsc.TSGetTimeSpanSolutions @@ -1326,7 +1370,7 @@ cdef class TS(Object): """ cdef PetscInt nt = 0 cdef PetscVec *sols = NULL - CHKERR( TSGetTimeSpanSolutions(self.ts, &nt, &sols) ) + CHKERR(TSGetTimeSpanSolutions(self.ts, &nt, &sols)) cdef object sollist = None if sols != NULL: sollist = [ref_Vec(sols[i]) for i from 0 <= i < nt] @@ -1345,8 +1389,8 @@ cdef class TS(Object): """ cdef SNES snes = SNES() - CHKERR( TSGetSNES(self.ts, &snes.snes) ) - CHKERR( PetscINCREF(snes.obj) ) + CHKERR(TSGetSNES(self.ts, &snes.snes)) + CHKERR(PetscINCREF(snes.obj)) return snes def getKSP(self) -> KSP: @@ -1360,8 +1404,8 @@ cdef class TS(Object): """ cdef KSP ksp = KSP() - CHKERR( TSGetKSP(self.ts, &ksp.ksp) ) - CHKERR( PetscINCREF(ksp.obj) ) + CHKERR(TSGetKSP(self.ts, &ksp.ksp)) + CHKERR(PetscINCREF(ksp.obj)) return ksp # --- discretization space --- @@ -1380,10 +1424,10 @@ cdef class TS(Object): """ cdef PetscDM newdm = NULL - CHKERR( TSGetDM(self.ts, &newdm) ) + CHKERR(TSGetDM(self.ts, &newdm)) cdef DM dm = subtype_DM(newdm)() dm.dm = newdm - CHKERR( PetscINCREF(dm.obj) ) + CHKERR(PetscINCREF(dm.obj)) return dm def setDM(self, DM dm) -> None: @@ -1401,7 +1445,7 @@ cdef class TS(Object): petsc.TSSetDM """ - CHKERR( TSSetDM(self.ts, dm.dm) ) + CHKERR(TSSetDM(self.ts, dm.dm)) # --- customization --- @@ -1421,7 +1465,7 @@ cdef class TS(Object): """ cdef PetscReal rval = asReal(t) - CHKERR( TSSetTime(self.ts, rval) ) + CHKERR(TSSetTime(self.ts, rval)) def getTime(self) -> float: """Return the time of the most recently completed step. @@ -1438,7 +1482,7 @@ cdef class TS(Object): """ cdef PetscReal rval = 0 - CHKERR( TSGetTime(self.ts, &rval) ) + CHKERR(TSGetTime(self.ts, &rval)) return toReal(rval) def getPrevTime(self) -> float: @@ -1452,7 +1496,7 @@ cdef class TS(Object): """ cdef PetscReal rval = 0 - CHKERR( TSGetPrevTime(self.ts, &rval) ) + CHKERR(TSGetPrevTime(self.ts, &rval)) return toReal(rval) def getSolveTime(self) -> float: @@ -1469,7 +1513,7 @@ cdef class TS(Object): """ cdef PetscReal rval = 0 - CHKERR( TSGetSolveTime(self.ts, &rval) ) + CHKERR(TSGetSolveTime(self.ts, &rval)) return toReal(rval) def setTimeStep(self, time_step: float) -> None: @@ -1488,7 +1532,7 @@ cdef class TS(Object): """ cdef PetscReal rval = asReal(time_step) - CHKERR( TSSetTimeStep(self.ts, rval) ) + CHKERR(TSSetTimeStep(self.ts, rval)) def getTimeStep(self) -> float: """Return the duration of the current timestep. @@ -1501,7 +1545,7 @@ cdef class TS(Object): """ cdef PetscReal tstep = 0 - CHKERR( TSGetTimeStep(self.ts, &tstep) ) + CHKERR(TSGetTimeStep(self.ts, &tstep)) return toReal(tstep) def setStepNumber(self, step_number: int) -> None: @@ -1529,7 +1573,7 @@ cdef class TS(Object): """ cdef PetscInt ival = asInt(step_number) - CHKERR( TSSetStepNumber(self.ts, ival) ) + CHKERR(TSSetStepNumber(self.ts, ival)) def getStepNumber(self) -> int: """Return the number of time steps completed. @@ -1542,7 +1586,7 @@ cdef class TS(Object): """ cdef PetscInt ival = 0 - CHKERR( TSGetStepNumber(self.ts, &ival) ) + CHKERR(TSGetStepNumber(self.ts, &ival)) return toInt(ival) def setMaxTime(self, max_time: float) -> None: @@ -1565,7 +1609,7 @@ cdef class TS(Object): """ cdef PetscReal rval = asReal(max_time) - CHKERR( TSSetMaxTime(self.ts, rval) ) + CHKERR(TSSetMaxTime(self.ts, rval)) def getMaxTime(self) -> float: """Return the maximum (final) time. @@ -1580,7 +1624,7 @@ cdef class TS(Object): """ cdef PetscReal rval = 0 - CHKERR( TSGetMaxTime(self.ts, &rval) ) + CHKERR(TSGetMaxTime(self.ts, &rval)) return toReal(rval) def setMaxSteps(self, max_steps: int) -> None: @@ -1601,7 +1645,7 @@ cdef class TS(Object): """ cdef PetscInt ival = asInt(max_steps) - CHKERR( TSSetMaxSteps(self.ts, ival) ) + CHKERR(TSSetMaxSteps(self.ts, ival)) def getMaxSteps(self) -> int: """Return the maximum number of steps to use. @@ -1614,7 +1658,7 @@ cdef class TS(Object): """ cdef PetscInt ival = 0 - CHKERR( TSGetMaxSteps(self.ts, &ival) ) + CHKERR(TSGetMaxSteps(self.ts, &ival)) return toInt(ival) def getSNESIterations(self) -> int: @@ -1631,7 +1675,7 @@ cdef class TS(Object): """ cdef PetscInt n = 0 - CHKERR( TSGetSNESIterations(self.ts, &n) ) + CHKERR(TSGetSNESIterations(self.ts, &n)) return toInt(n) def getKSPIterations(self) -> int: @@ -1648,7 +1692,7 @@ cdef class TS(Object): """ cdef PetscInt n = 0 - CHKERR( TSGetKSPIterations(self.ts, &n) ) + CHKERR(TSGetKSPIterations(self.ts, &n)) return toInt(n) def setMaxStepRejections(self, n: int) -> None: @@ -1671,12 +1715,7 @@ cdef class TS(Object): """ cdef PetscInt rej = asInt(n) - CHKERR( TSSetMaxStepRejections(self.ts, rej)) - - #def getMaxStepRejections(self): - # cdef PetscInt n = 0 - # CHKERR( TSGetMaxStepRejections(self.ts, &n)) - # return toInt(n) + CHKERR(TSSetMaxStepRejections(self.ts, rej)) def getStepRejections(self) -> int: """Return the total number of rejected steps. @@ -1692,7 +1731,7 @@ cdef class TS(Object): """ cdef PetscInt n = 0 - CHKERR( TSGetStepRejections(self.ts, &n) ) + CHKERR(TSGetStepRejections(self.ts, &n)) return toInt(n) def setMaxSNESFailures(self, n: int) -> None: @@ -1711,12 +1750,7 @@ cdef class TS(Object): """ cdef PetscInt fails = asInt(n) - CHKERR( TSSetMaxSNESFailures(self.ts, fails)) - - #def getMaxSNESFailures(self, n): - # cdef PetscInt n = 0 - # CHKERR( TSGetMaxSNESFailures(self.ts, &n)) - # return toInt(n) + CHKERR(TSSetMaxSNESFailures(self.ts, fails)) def getSNESFailures(self) -> int: """Return the total number of failed `SNES` solves in the `TS`. @@ -1732,10 +1766,10 @@ cdef class TS(Object): """ cdef PetscInt n = 0 - CHKERR( TSGetSNESFailures(self.ts, &n) ) + CHKERR(TSGetSNESFailures(self.ts, &n)) return toInt(n) - def setErrorIfStepFails(self, flag: bool=True) -> None: + def setErrorIfStepFails(self, flag: bool = True) -> None: """Immediately error is no step succeeds. Not collective. @@ -1755,9 +1789,9 @@ cdef class TS(Object): """ cdef PetscBool bval = flag - CHKERR( TSSetErrorIfStepFails(self.ts, bval)) + CHKERR(TSSetErrorIfStepFails(self.ts, bval)) - def setTolerances(self, rtol: float=None, atol: float=None) -> None: + def setTolerances(self, rtol: float = None, atol: float = None) -> None: """Set tolerances for local truncation error when using an adaptive controller. Logically collective. @@ -1794,9 +1828,9 @@ cdef class TS(Object): vatol = (atol).vec else: ratol = asReal(atol) - CHKERR( TSSetTolerances(self.ts, ratol, vatol, rrtol, vrtol) ) + CHKERR(TSSetTolerances(self.ts, ratol, vatol, rrtol, vrtol)) - def getTolerances(self) ->tuple[float,float]: + def getTolerances(self) ->tuple[float, float]: """Return the tolerances for local truncation error. Logically collective. @@ -1817,7 +1851,7 @@ cdef class TS(Object): cdef PetscReal ratol = PETSC_DEFAULT cdef PetscVec vrtol = NULL cdef PetscVec vatol = NULL - CHKERR( TSGetTolerances(self.ts, &ratol, &vatol, &rrtol, &vrtol) ) + CHKERR(TSGetTolerances(self.ts, &ratol, &vatol, &rrtol, &vrtol)) cdef object rtol = None if vrtol != NULL: rtol = ref_Vec(vrtol) @@ -1850,7 +1884,7 @@ cdef class TS(Object): """ cdef PetscTSExactFinalTimeOption oval = option - CHKERR( TSSetExactFinalTime(self.ts, oval) ) + CHKERR(TSSetExactFinalTime(self.ts, oval)) def setConvergedReason(self, reason: ConvergedReason) -> None: """Set the reason for handling the convergence of `solve`. @@ -1871,7 +1905,7 @@ cdef class TS(Object): """ cdef PetscTSConvergedReason cval = reason - CHKERR( TSSetConvergedReason(self.ts, cval) ) + CHKERR(TSSetConvergedReason(self.ts, cval)) def getConvergedReason(self) -> ConvergedReason: """Return the reason the `TS` step was stopped. @@ -1886,14 +1920,14 @@ cdef class TS(Object): """ cdef PetscTSConvergedReason reason = TS_CONVERGED_ITERATING - CHKERR( TSGetConvergedReason(self.ts, &reason) ) + CHKERR(TSGetConvergedReason(self.ts, &reason)) return reason # --- monitoring --- def setMonitor( self, - monitor: TSMonitorFunction, + monitor: TSMonitorFunction | None, args : tuple[Any, ...] | None = None, kargs : dict[str, Any] | None = None) -> None: """Set an additional monitor to the `TS`. @@ -1919,15 +1953,17 @@ cdef class TS(Object): if monitorlist is None: monitorlist = [] self.set_attr('__monitor__', monitorlist) - CHKERR( TSMonitorSet(self.ts, TS_Monitor, NULL, NULL) ) + CHKERR(TSMonitorSet(self.ts, TS_Monitor, NULL, NULL)) if args is None: args = () if kargs is None: kargs = {} context = (monitor, args, kargs) monitorlist.append(context) - def getMonitor(self) -> list[tuple[TSMonitorFunction,tuple[Any, ...],dict[str, Any]]]: + def getMonitor(self) -> list[tuple[TSMonitorFunction, tuple[Any, ...], dict[str, Any]]]: """Return the monitor. + Not collective. + See Also -------- setMonitor @@ -1946,13 +1982,15 @@ cdef class TS(Object): """ self.set_attr('__monitor__', None) - CHKERR( TSMonitorCancel(self.ts) ) + CHKERR(TSMonitorCancel(self.ts)) cancelMonitor = monitorCancel def monitor(self, step: int, time: float, Vec u=None) -> None: """Monitor the solve. + Collective. + Parameters ---------- step @@ -1972,8 +2010,8 @@ cdef class TS(Object): cdef PetscVec uvec = NULL if u is not None: uvec = u.vec if uvec == NULL: - CHKERR( TSGetSolution(self.ts, &uvec) ) - CHKERR( TSMonitor(self.ts, ival, rval, uvec) ) + CHKERR(TSGetSolution(self.ts, &uvec)) + CHKERR(TSMonitor(self.ts, ival, rval, uvec)) # --- event handling --- @@ -1981,8 +2019,8 @@ cdef class TS(Object): self, direction: Sequence[int], terminate: Sequence[bool], - indicator: TSIndicatorFunction, - postevent: TSPostEventFunction=None, + indicator: TSIndicatorFunction | None, + postevent: TSPostEventFunction = None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set a function used for detecting events. @@ -1992,7 +2030,7 @@ cdef class TS(Object): Parameters ---------- direction - Direction of zero crossing to be detected {-1,0,+1}. + Direction of zero crossing to be detected {-1, 0, +1}. terminate Flags for each event to indicate stepping should be terminated. indicator @@ -2025,14 +2063,14 @@ cdef class TS(Object): self.set_attr('__indicator__', (indicator, args, kargs)) if postevent is not None: self.set_attr('__postevent__', (postevent, args, kargs)) - CHKERR( TSSetEventHandler(self.ts, nevents, idirs, iterm, TS_Indicator, TS_PostEvent, NULL) ) + CHKERR(TSSetEventHandler(self.ts, nevents, idirs, iterm, TS_Indicator, TS_PostEvent, NULL)) else: self.set_attr('__postevent__', None) - CHKERR( TSSetEventHandler(self.ts, nevents, idirs, iterm, TS_Indicator, NULL, NULL) ) + CHKERR(TSSetEventHandler(self.ts, nevents, idirs, iterm, TS_Indicator, NULL, NULL)) else: - CHKERR( TSSetEventHandler(self.ts, nevents, idirs, iterm, NULL, NULL, NULL) ) + CHKERR(TSSetEventHandler(self.ts, nevents, idirs, iterm, NULL, NULL, NULL)) - def setEventTolerances(self, tol: float=None, vtol: Sequence[float]=None) -> None: + def setEventTolerances(self, tol: float = None, vtol: Sequence[float] = None) -> None: """Set tolerances for event zero crossings when using event handler. Logically collective. @@ -2063,10 +2101,10 @@ cdef class TS(Object): if tol is not None: tolr = asReal(tol) if vtol is not None: - CHKERR( TSGetNumEvents(self.ts, &nevents) ) + CHKERR(TSGetNumEvents(self.ts, &nevents)) vtol = iarray_r(vtol, &ntolr, &vtolr) assert ntolr == nevents - CHKERR( TSSetEventTolerances(self.ts, tolr, vtolr) ) + CHKERR(TSSetEventTolerances(self.ts, tolr, vtolr)) def getNumEvents(self) -> int: """Return the number of events. @@ -2079,14 +2117,14 @@ cdef class TS(Object): """ cdef PetscInt nevents = 0 - CHKERR( TSGetNumEvents(self.ts, &nevents) ) + CHKERR(TSGetNumEvents(self.ts, &nevents)) return toInt(nevents) # --- solving --- def setPreStep( self, - prestep: TSPreStepFunction, + prestep: TSPreStepFunction | None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set a function to be called at the beginning of each time step. @@ -2112,14 +2150,16 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (prestep, args, kargs) self.set_attr('__prestep__', context) - CHKERR( TSSetPreStep(self.ts, TS_PreStep) ) + CHKERR(TSSetPreStep(self.ts, TS_PreStep)) else: self.set_attr('__prestep__', None) - CHKERR( TSSetPreStep(self.ts, NULL) ) + CHKERR(TSSetPreStep(self.ts, NULL)) - def getPreStep(self) -> tuple[TSPreStepFunction,tuple[Any, ...] | None,dict[str, Any] | None]: + def getPreStep(self) -> tuple[TSPreStepFunction, tuple[Any, ...] | None, dict[str, Any] | None]: """Return the prestep function. + Not collective. + See Also -------- setPreStep @@ -2128,9 +2168,9 @@ cdef class TS(Object): return self.get_attr('__prestep__') def setPostStep(self, - poststep: TSPostStepFunction, - args: tuple[Any, ...] | None = None, - kargs: dict[str, Any] | None = None) -> None: + poststep: TSPostStepFunction | None, + args: tuple[Any, ...] | None = None, + kargs: dict[str, Any] | None = None) -> None: """Set a function to be called at the end of each time step. Logically collective. @@ -2154,12 +2194,12 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (poststep, args, kargs) self.set_attr('__poststep__', context) - CHKERR( TSSetPostStep(self.ts, TS_PostStep) ) + CHKERR(TSSetPostStep(self.ts, TS_PostStep)) else: self.set_attr('__poststep__', None) - CHKERR( TSSetPostStep(self.ts, NULL) ) + CHKERR(TSSetPostStep(self.ts, NULL)) - def getPostStep(self) -> tuple[TSPostStepFunction,tuple[Any, ...] | None,dict[str, Any] | None]: + def getPostStep(self) -> tuple[TSPostStepFunction, tuple[Any, ...] | None, dict[str, Any] | None]: """Return the poststep function.""" return self.get_attr('__poststep__') @@ -2173,7 +2213,7 @@ cdef class TS(Object): petsc.TSSetUp """ - CHKERR( TSSetUp(self.ts) ) + CHKERR(TSSetUp(self.ts)) def reset(self) -> None: """Reset the `TS`, removing any allocated vectors and matrices. @@ -2185,7 +2225,7 @@ cdef class TS(Object): petsc.TSReset """ - CHKERR( TSReset(self.ts) ) + CHKERR(TSReset(self.ts)) def step(self) -> None: """Take one step. @@ -2201,7 +2241,7 @@ cdef class TS(Object): petsc.TSStep """ - CHKERR( TSStep(self.ts) ) + CHKERR(TSStep(self.ts)) def restartStep(self) -> None: """Flag the solver to restart the next step. @@ -2224,19 +2264,21 @@ cdef class TS(Object): petsc.TSRestartStep """ - CHKERR( TSRestartStep(self.ts) ) + CHKERR(TSRestartStep(self.ts)) def rollBack(self) -> None: """Roll back one time step. + Collective. + See Also -------- petsc.TSRollBack """ - CHKERR( TSRollBack(self.ts) ) + CHKERR(TSRollBack(self.ts)) - def solve(self, Vec u) -> None: + def solve(self, Vec u=None) -> None: """Step the requested number of timesteps. Collective. @@ -2244,17 +2286,16 @@ cdef class TS(Object): Parameters ---------- u - The solution vector. Can be `None` if `setSolution` was used and - `setExactFinalTime` is not set as ``TS_EXACTFINALTIME_MATCHSTEP``. - Otherwise this vector must contain the initial conditions and will - contain the solution at the final requested time. + The solution vector. Can be `None`. See Also -------- petsc.TSSolve """ - CHKERR( TSSolve(self.ts, u.vec) ) + cdef PetscVec uvec=NULL + if u is not None: uvec = u.vec + CHKERR(TSSolve(self.ts, uvec)) def interpolate(self, t: float, Vec u) -> None: """Interpolate the solution to a given time. @@ -2274,7 +2315,7 @@ cdef class TS(Object): """ cdef PetscReal rval = asReal(t) - CHKERR( TSInterpolate(self.ts, rval, u.vec) ) + CHKERR(TSInterpolate(self.ts, rval, u.vec)) def setStepLimits(self, hmin: float, hmax: float) -> None: """Set the minimum and maximum allowed step sizes. @@ -2297,11 +2338,13 @@ cdef class TS(Object): cdef PetscReal hminr = toReal(hmin) cdef PetscReal hmaxr = toReal(hmax) TSGetAdapt(self.ts, &tsadapt) - CHKERR( TSAdaptSetStepLimits(tsadapt, hminr, hmaxr) ) + CHKERR(TSAdaptSetStepLimits(tsadapt, hminr, hmaxr)) - def getStepLimits(self) -> tuple[float,float]: + def getStepLimits(self) -> tuple[float, float]: """Return the minimum and maximum allowed time step sizes. + Not collective. + See Also -------- petsc.TSAdaptGetStepLimits @@ -2311,7 +2354,7 @@ cdef class TS(Object): cdef PetscReal hminr = 0. cdef PetscReal hmaxr = 0. TSGetAdapt(self.ts, &tsadapt) - CHKERR( TSAdaptGetStepLimits(tsadapt, &hminr, &hmaxr) ) + CHKERR(TSAdaptGetStepLimits(tsadapt, &hminr, &hmaxr)) return (asReal(hminr), asReal(hmaxr)) # --- Adjoint methods --- @@ -2350,14 +2393,16 @@ cdef class TS(Object): def getCostIntegral(self) -> Vec: """Return a vector of values of the integral term in the cost functions. + Not collective. + See Also -------- petsc.TSGetCostIntegral """ cdef Vec cost = Vec() - CHKERR( TSGetCostIntegral(self.ts, &cost.vec) ) - CHKERR( PetscINCREF(cost.obj) ) + CHKERR(TSGetCostIntegral(self.ts, &cost.vec)) + CHKERR(PetscINCREF(cost.obj)) return cost def setCostGradients( @@ -2383,7 +2428,7 @@ cdef class TS(Object): petsc.TSSetCostGradients """ - cdef PetscInt n = 0; + cdef PetscInt n = 0 cdef PetscVec *vecl = NULL cdef PetscVec *vecm = NULL cdef mem1 = None, mem2 = None @@ -2404,20 +2449,22 @@ cdef class TS(Object): for i from 0 <= i < n: vecm[i] = (vm[i]).vec self.set_attr('__costgradients_memory', (mem1, mem2)) - CHKERR( TSSetCostGradients(self.ts, n, vecl, vecm) ) + CHKERR(TSSetCostGradients(self.ts, n, vecl, vecm)) - def getCostGradients(self) -> tuple[list[Vec],list[Vec]]: + def getCostGradients(self) -> tuple[list[Vec], list[Vec]]: """Return the cost gradients. + Not collective. + See Also -------- setCostGradients, petsc.TSGetCostGradients """ - cdef PetscInt i = 0, n = 0 + cdef PetscInt n = 0 cdef PetscVec *vecl = NULL cdef PetscVec *vecm = NULL - CHKERR( TSGetCostGradients(self.ts, &n, &vecl, &vecm) ) + CHKERR(TSGetCostGradients(self.ts, &n, &vecl, &vecm)) cdef object vl = None, vm = None if vecl != NULL: vl = [ref_Vec(vecl[i]) for i from 0 <= i < n] @@ -2427,7 +2474,7 @@ cdef class TS(Object): def setRHSJacobianP( self, - jacobianp: TSRHSJacobianP, + jacobianp: TSRHSJacobianP | None, Mat A=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: @@ -2458,13 +2505,15 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (jacobianp, args, kargs) self.set_attr('__rhsjacobianp__', context) - CHKERR( TSSetRHSJacobianP(self.ts, Amat, TS_RHSJacobianP, context) ) + CHKERR(TSSetRHSJacobianP(self.ts, Amat, TS_RHSJacobianP, context)) else: - CHKERR( TSSetRHSJacobianP(self.ts, Amat, NULL, NULL) ) + CHKERR(TSSetRHSJacobianP(self.ts, Amat, NULL, NULL)) - def createQuadratureTS(self, forward: bool=True) -> TS: + def createQuadratureTS(self, forward: bool = True) -> TS: """Create a sub `TS` that evaluates integrals over time. + Collective. + Parameters ---------- forward @@ -2477,13 +2526,15 @@ cdef class TS(Object): """ cdef TS qts = TS() cdef PetscBool fwd = forward - CHKERR( TSCreateQuadratureTS(self.ts, fwd, &qts.ts) ) - CHKERR( PetscINCREF(qts.obj) ) + CHKERR(TSCreateQuadratureTS(self.ts, fwd, &qts.ts)) + CHKERR(PetscINCREF(qts.obj)) return qts def getQuadratureTS(self) -> tuple[bool, TS]: """Return the sub `TS` that evaluates integrals over time. + Not collective. + Returns ------- forward : bool @@ -2498,18 +2549,20 @@ cdef class TS(Object): """ cdef TS qts = TS() cdef PetscBool fwd = PETSC_FALSE - CHKERR( TSGetQuadratureTS(self.ts, &fwd, &qts.ts) ) - CHKERR( PetscINCREF(qts.obj) ) + CHKERR(TSGetQuadratureTS(self.ts, &fwd, &qts.ts)) + CHKERR(PetscINCREF(qts.obj)) return (toBool(fwd), qts) def setRHSJacobianP( self, - rhsjacobianp: TSRHSJacobianP, + rhsjacobianp: TSRHSJacobianP | None, Mat A=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None: """Set the function that computes the Jacobian with respect to the parameters. + Collective. + Parameters ---------- rhsjacobianp @@ -2533,13 +2586,15 @@ cdef class TS(Object): if kargs is None: kargs = {} context = (rhsjacobianp, args, kargs) self.set_attr('__rhsjacobianp__', context) - CHKERR( TSSetRHSJacobianP(self.ts, Amat, TS_RHSJacobianP, context) ) + CHKERR(TSSetRHSJacobianP(self.ts, Amat, TS_RHSJacobianP, context)) else: - CHKERR( TSSetRHSJacobianP(self.ts, Amat, NULL, NULL) ) + CHKERR(TSSetRHSJacobianP(self.ts, Amat, NULL, NULL)) def computeRHSJacobianP(self, t: float, Vec x, Mat J) -> None: """Run the user-defined JacobianP function. + Collective. + Parameters ---------- t @@ -2555,11 +2610,13 @@ cdef class TS(Object): """ cdef PetscReal rval = asReal(t) - CHKERR( TSComputeRHSJacobianP(self.ts, rval, x.vec, J.mat) ) + CHKERR(TSComputeRHSJacobianP(self.ts, rval, x.vec, J.mat)) def adjointSetSteps(self, adjoint_steps: int) -> None: """Set the number of steps the adjoint solver should take backward in time. + Logically collective. + Parameters ---------- adjoint_steps @@ -2571,7 +2628,7 @@ cdef class TS(Object): """ cdef PetscInt ival = asInt(adjoint_steps) - CHKERR( TSAdjointSetSteps(self.ts, ival) ) + CHKERR(TSAdjointSetSteps(self.ts, ival)) def adjointSetUp(self) -> None: """Set up the internal data structures for the later use of an adjoint solver. @@ -2595,7 +2652,7 @@ cdef class TS(Object): petsc.TSAdjointSolve """ - CHKERR( TSAdjointSolve(self.ts) ) + CHKERR(TSAdjointSolve(self.ts)) def adjointStep(self) -> None: """Step one time step backward in the adjoint run. @@ -2642,10 +2699,10 @@ cdef class TS(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscTS newts = NULL - CHKERR( TSCreate(ccomm, &newts) ) - CHKERR( PetscCLEAR(self.obj) ); self.ts = newts - CHKERR( TSSetType(self.ts, TSPYTHON) ) - CHKERR( TSPythonSetContext(self.ts, context) ) + CHKERR(TSCreate(ccomm, &newts)) + CHKERR(PetscCLEAR(self.obj)); self.ts = newts + CHKERR(TSSetType(self.ts, TSPYTHON)) + CHKERR(TSPythonSetContext(self.ts, context)) return self def setPythonContext(self, context: Any) -> None: @@ -2658,7 +2715,7 @@ cdef class TS(Object): petsc_python_ts, getPythonContext """ - CHKERR( TSPythonSetContext(self.ts, context) ) + CHKERR(TSPythonSetContext(self.ts, context)) def getPythonContext(self) -> Any: """Return the instance of the class implementing the required Python methods. @@ -2671,7 +2728,7 @@ cdef class TS(Object): """ cdef void *context = NULL - CHKERR( TSPythonGetContext(self.ts, &context) ) + CHKERR(TSPythonGetContext(self.ts, &context)) if context == NULL: return None else: return context @@ -2687,7 +2744,7 @@ cdef class TS(Object): """ cdef const char *cval = NULL py_type = str2bytes(py_type, &cval) - CHKERR( TSPythonSetType(self.ts, cval) ) + CHKERR(TSPythonSetType(self.ts, cval)) def getPythonType(self) -> str: """Return the fully qualified Python name of the class used by the solver. @@ -2700,13 +2757,15 @@ cdef class TS(Object): """ cdef const char *cval = NULL - CHKERR( TSPythonGetType(self.ts, &cval) ) + CHKERR(TSPythonGetType(self.ts, &cval)) return bytes2str(cval) # --- Theta --- def setTheta(self, theta: float) -> None: - """Set the abscissa of the stage in ``(0,1]`` for `Type.THETA`. + """Set the abscissa of the stage in ``(0, 1]`` for `Type.THETA`. + + Logically collective. Parameters ---------- @@ -2723,10 +2782,10 @@ cdef class TS(Object): """ cdef PetscReal rval = asReal(theta) - CHKERR( TSThetaSetTheta(self.ts, rval) ) + CHKERR(TSThetaSetTheta(self.ts, rval)) def getTheta(self) -> float: - """Return the abscissa of the stage in ``(0,1]`` for `Type.THETA`. + """Return the abscissa of the stage in ``(0, 1]`` for `Type.THETA`. Not collective. @@ -2736,12 +2795,14 @@ cdef class TS(Object): """ cdef PetscReal rval = 0 - CHKERR( TSThetaGetTheta(self.ts, &rval) ) + CHKERR(TSThetaGetTheta(self.ts, &rval)) return toReal(rval) def setThetaEndpoint(self, flag=True) -> None: """Set to use the endpoint variant of `Type.THETA`. + Logically collective. + Parameters ---------- flag @@ -2753,18 +2814,20 @@ cdef class TS(Object): """ cdef PetscBool bval = flag - CHKERR( TSThetaSetEndpoint(self.ts, bval) ) + CHKERR(TSThetaSetEndpoint(self.ts, bval)) def getThetaEndpoint(self) -> bool: """Return whether the endpoint variable of `Type.THETA` is used. + Not collective. + See Also -------- petsc.TSThetaGetEndpoint """ cdef PetscBool flag = PETSC_FALSE - CHKERR( TSThetaGetEndpoint(self.ts, &flag) ) + CHKERR(TSThetaGetEndpoint(self.ts, &flag)) return toBool(flag) # --- Alpha --- @@ -2789,13 +2852,13 @@ cdef class TS(Object): """ cdef PetscReal rval = asReal(radius) - CHKERR( TSAlphaSetRadius(self.ts, rval) ) + CHKERR(TSAlphaSetRadius(self.ts, rval)) def setAlphaParams( self, - alpha_m: float | None=None, - alpha_f: float | None=None, - gamma: float | None=None) -> None: + alpha_m: float | None = None, + alpha_f: float | None = None, + gamma: float | None = None) -> None: """Set the algorithmic parameters for `Type.ALPHA`. Logically collective. @@ -2817,23 +2880,25 @@ cdef class TS(Object): """ cdef PetscReal rval1 = 0, rval2 = 0, rval3 = 0 - try: CHKERR( TSAlphaGetParams(self.ts, &rval1, &rval2, &rval3) ) + try: CHKERR(TSAlphaGetParams(self.ts, &rval1, &rval2, &rval3)) except PetscError: pass if alpha_m is not None: rval1 = asReal(alpha_m) if alpha_f is not None: rval2 = asReal(alpha_f) if gamma is not None: rval3 = asReal(gamma) - CHKERR( TSAlphaSetParams(self.ts, rval1, rval2, rval3) ) + CHKERR(TSAlphaSetParams(self.ts, rval1, rval2, rval3)) def getAlphaParams(self) -> tuple[float, float, float]: """Return the algorithmic parameters for `Type.ALPHA`. + Not collective. + See Also -------- petsc.TSAlphaGetParams """ cdef PetscReal rval1 = 0, rval2 = 0, rval3 = 0 - CHKERR( TSAlphaGetParams(self.ts, &rval1, &rval2, &rval3) ) + CHKERR(TSAlphaGetParams(self.ts, &rval1, &rval2, &rval3)) return (toReal(rval1), toReal(rval2), toReal(rval3)) # --- application context --- @@ -2842,6 +2907,7 @@ cdef class TS(Object): """Application context.""" def __get__(self) -> Any: return self.getAppCtx() + def __set__(self, value) -> None: self.setAppCtx(value) @@ -2851,6 +2917,7 @@ cdef class TS(Object): """The `DM`.""" def __get__(self) -> DM: return self.getDM() + def __set__(self, value) -> None: self.setDM(value) @@ -2860,6 +2927,7 @@ cdef class TS(Object): """The problem type.""" def __get__(self) -> ProblemType: return self.getProblemType() + def __set__(self, value) -> None: self.setProblemType(value) @@ -2867,6 +2935,7 @@ cdef class TS(Object): """The equation type.""" def __get__(self) -> EquationType: return self.getEquationType() + def __set__(self, value) -> None: self.setEquationType(value) @@ -2891,6 +2960,7 @@ cdef class TS(Object): """The current time.""" def __get__(self) -> float: return self.getTime() + def __set__(self, value) -> None: self.setTime(value) @@ -2898,6 +2968,7 @@ cdef class TS(Object): """The current time step size.""" def __get__(self) -> None: return self.getTimeStep() + def __set__(self, value): self.setTimeStep(value) @@ -2905,6 +2976,7 @@ cdef class TS(Object): """The current step number.""" def __get__(self) -> int: return self.getStepNumber() + def __set__(self, value) -> None: self.setStepNumber(value) @@ -2912,6 +2984,7 @@ cdef class TS(Object): """The maximum time.""" def __get__(self) -> float: return self.getMaxTime() + def __set__(self, value) -> None: self.setMaxTime(value) @@ -2919,6 +2992,7 @@ cdef class TS(Object): """The maximum number of steps.""" def __get__(self) -> int: return self.getMaxSteps() + def __set__(self, value) -> None: self.setMaxSteps(value) @@ -2928,6 +3002,7 @@ cdef class TS(Object): """The relative tolerance.""" def __get__(self) -> float: return self.getTolerances()[0] + def __set__(self, value) -> None: self.setTolerances(rtol=value) @@ -2935,6 +3010,7 @@ cdef class TS(Object): """The absolute tolerance.""" def __get__(self) -> float: return self.getTolerances()[1] + def __set__(self, value) -> None: self.setTolerances(atol=value) @@ -2942,6 +3018,7 @@ cdef class TS(Object): """The converged reason.""" def __get__(self) -> TSConvergedReason: return self.getConvergedReason() + def __set__(self, value) -> None: self.setConvergedReason(value) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Vec.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Vec.pyx index 672e585a911..0e234c43547 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Vec.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Vec.pyx @@ -20,12 +20,15 @@ class VecType(object): MPIKOKKOS = S_(VECMPIKOKKOS) KOKKOS = S_(VECKOKKOS) + class VecOption(object): """Vector assembly option.""" IGNORE_OFF_PROC_ENTRIES = VEC_IGNORE_OFF_PROC_ENTRIES IGNORE_NEGATIVE_INDICES = VEC_IGNORE_NEGATIVE_INDICES # -------------------------------------------------------------------- + + cdef class Vec(Object): """A vector object. @@ -106,12 +109,6 @@ cdef class Vec(Object): def __matmul__(self, other): return vec_matmul(self, other) - # - - #def __len__(self): - # cdef PetscInt size = 0 - # CHKERR( VecGetSize(self.vec, &size) ) - # return size def __getitem__(self, i): return vec_getitem(self, i) @@ -161,7 +158,7 @@ cdef class Vec(Object): """ cdef PetscViewer vwr = NULL if viewer is not None: vwr = viewer.vwr - CHKERR( VecView(self.vec, vwr) ) + CHKERR(VecView(self.vec, vwr)) def destroy(self) -> Self: """Destroy the vector. @@ -173,7 +170,7 @@ cdef class Vec(Object): create, petsc.VecDestroy """ - CHKERR( VecDestroy(&self.vec) ) + CHKERR(VecDestroy(&self.vec)) return self def create(self, comm: Comm | None = None) -> Self: @@ -195,8 +192,8 @@ cdef class Vec(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscVec newvec = NULL - CHKERR( VecCreate(ccomm, &newvec) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreate(ccomm, &newvec)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec return self def setType(self, vec_type: Type | str) -> None: @@ -216,13 +213,12 @@ cdef class Vec(Object): """ cdef PetscVecType cval = NULL vec_type = str2bytes(vec_type, &cval) - CHKERR( VecSetType(self.vec, cval) ) + CHKERR(VecSetType(self.vec, cval)) def setSizes( self, size: LayoutSizeSpec, - bsize: int | None = None, - ) -> None: + bsize: int | None = None) -> None: """Set the local and global sizes of the vector. Collective. @@ -241,9 +237,9 @@ cdef class Vec(Object): """ cdef PetscInt bs=0, n=0, N=0 Vec_Sizes(size, bsize, &bs, &n, &N) - CHKERR( VecSetSizes(self.vec, n, N) ) + CHKERR(VecSetSizes(self.vec, n, N)) if bs != PETSC_DECIDE: - CHKERR( VecSetBlockSize(self.vec, bs) ) + CHKERR(VecSetBlockSize(self.vec, bs)) # @@ -252,8 +248,7 @@ cdef class Vec(Object): self, size: LayoutSizeSpec, bsize: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a sequential `Type.SEQ` vector. Collective. @@ -278,19 +273,18 @@ cdef class Vec(Object): Sys_Layout(ccomm, bs, &n, &N) if bs == PETSC_DECIDE: bs = 1 cdef PetscVec newvec = NULL - CHKERR( VecCreate(ccomm,&newvec) ) - CHKERR( VecSetSizes(newvec, n, N) ) - CHKERR( VecSetBlockSize(newvec, bs) ) - CHKERR( VecSetType(newvec, VECSEQ) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreate(ccomm, &newvec)) + CHKERR(VecSetSizes(newvec, n, N)) + CHKERR(VecSetBlockSize(newvec, bs)) + CHKERR(VecSetType(newvec, VECSEQ)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec return self def createMPI( self, size: LayoutSizeSpec, bsize: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a parallel `Type.MPI` vector. Collective. @@ -315,11 +309,11 @@ cdef class Vec(Object): Sys_Layout(ccomm, bs, &n, &N) if bs == PETSC_DECIDE: bs = 1 cdef PetscVec newvec = NULL - CHKERR( VecCreate(ccomm, &newvec) ) - CHKERR( VecSetSizes(newvec, n, N) ) - CHKERR( VecSetBlockSize(newvec, bs) ) - CHKERR( VecSetType(newvec, VECMPI) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreate(ccomm, &newvec)) + CHKERR(VecSetSizes(newvec, n, N)) + CHKERR(VecSetBlockSize(newvec, bs)) + CHKERR(VecSetType(newvec, VECMPI)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec return self def createWithArray( @@ -327,8 +321,7 @@ cdef class Vec(Object): array: Sequence[Scalar], size: LayoutSizeSpec | None = None, bsize: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a vector using a provided array. Collective. @@ -367,10 +360,10 @@ cdef class Vec(Object): (toInt(na), toInt(n), toInt(bs))) cdef PetscVec newvec = NULL if comm_size(ccomm) == 1: - CHKERR( VecCreateSeqWithArray(ccomm,bs,N,sa,&newvec) ) + CHKERR(VecCreateSeqWithArray(ccomm, bs, N, sa, &newvec)) else: - CHKERR( VecCreateMPIWithArray(ccomm,bs,n,N,sa,&newvec) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreateMPIWithArray(ccomm, bs, n, N, sa, &newvec)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec self.set_attr('__array__', array) return self @@ -380,8 +373,7 @@ cdef class Vec(Object): cudahandle: Any | None = None, # FIXME What type is appropriate here? size: LayoutSizeSpec | None = None, bsize: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `Type.CUDA` vector with optional arrays. Collective. @@ -424,10 +416,10 @@ cdef class Vec(Object): (toInt(na), toInt(n), toInt(bs))) cdef PetscVec newvec = NULL if comm_size(ccomm) == 1: - CHKERR( VecCreateSeqCUDAWithArrays(ccomm,bs,N,sa,gpuarray,&newvec) ) + CHKERR(VecCreateSeqCUDAWithArrays(ccomm, bs, N, sa, gpuarray, &newvec)) else: - CHKERR( VecCreateMPICUDAWithArrays(ccomm,bs,n,N,sa,gpuarray,&newvec) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreateMPICUDAWithArrays(ccomm, bs, n, N, sa, gpuarray, &newvec)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec if cpuarray is not None: self.set_attr('__array__', cpuarray) @@ -439,8 +431,7 @@ cdef class Vec(Object): hiphandle: Any | None = None, # FIXME What type is appropriate here? size: LayoutSizeSpec | None = None, bsize: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `Type.HIP` vector with optional arrays. Collective. @@ -483,10 +474,10 @@ cdef class Vec(Object): (toInt(na), toInt(n), toInt(bs))) cdef PetscVec newvec = NULL if comm_size(ccomm) == 1: - CHKERR( VecCreateSeqHIPWithArrays(ccomm,bs,N,sa,gpuarray,&newvec) ) + CHKERR(VecCreateSeqHIPWithArrays(ccomm, bs, N, sa, gpuarray, &newvec)) else: - CHKERR( VecCreateMPIHIPWithArrays(ccomm,bs,n,N,sa,gpuarray,&newvec) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreateMPIHIPWithArrays(ccomm, bs, n, N, sa, gpuarray, &newvec)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec if cpuarray is not None: self.set_attr('__array__', cpuarray) @@ -498,8 +489,7 @@ cdef class Vec(Object): viennaclvechandle: Any | None = None, # FIXME What type is appropriate here? size: LayoutSizeSpec | None = None, bsize: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `Type.VIENNACL` vector with optional arrays. Collective. @@ -539,13 +529,13 @@ cdef class Vec(Object): Sys_Layout(ccomm, bs, &n, &N) if bs == PETSC_DECIDE: bs = 1 if na < n: - raise ValueError( "array size %d and vector local size %d block size %d" % (toInt(na), toInt(n), toInt(bs))) + raise ValueError("array size %d and vector local size %d block size %d" % (toInt(na), toInt(n), toInt(bs))) cdef PetscVec newvec = NULL if comm_size(ccomm) == 1: - CHKERR( VecCreateSeqViennaCLWithArrays(ccomm,bs,N,sa,vclvec,&newvec) ) + CHKERR(VecCreateSeqViennaCLWithArrays(ccomm, bs, N, sa, vclvec, &newvec)) else: - CHKERR( VecCreateMPIViennaCLWithArrays(ccomm,bs,n,N,sa,vclvec,&newvec) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreateMPIViennaCLWithArrays(ccomm, bs, n, N, sa, vclvec, &newvec)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec if cpuarray is not None: self.set_attr('__array__', cpuarray) @@ -557,8 +547,7 @@ cdef class Vec(Object): object dltensor, size: LayoutSizeSpec | None = None, bsize: int | None = None, - comm: Comm | None = None - ) -> Self: + comm: Comm | None = None) -> Self: """Create a vector wrapping a DLPack object, sharing the same memory. Collective. @@ -592,8 +581,7 @@ cdef class Vec(Object): cdef int64_t* shape = NULL cdef int64_t* strides = NULL cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) - cdef PetscInt bs = 0,n = 0,N = 0 - cdef DLContext* ctx = NULL + cdef PetscInt bs = 0, n = 0, N = 0 if not PyCapsule_CheckExact(dltensor): dltensor = dltensor.__dlpack__() @@ -620,25 +608,25 @@ cdef class Vec(Object): (toInt(nz), toInt(n), toInt(bs))) cdef PetscVec newvec = NULL cdef PetscDLDeviceType dltype = ptr.dl_tensor.ctx.device_type - if dltype in [kDLCUDA,kDLCUDAManaged]: + if dltype in [kDLCUDA, kDLCUDAManaged]: if comm_size(ccomm) == 1: - CHKERR( VecCreateSeqCUDAWithArray(ccomm,bs,N,(ptr.dl_tensor.data),&newvec) ) + CHKERR(VecCreateSeqCUDAWithArray(ccomm, bs, N, (ptr.dl_tensor.data), &newvec)) else: - CHKERR( VecCreateMPICUDAWithArray(ccomm,bs,n,N,(ptr.dl_tensor.data),&newvec) ) - elif dltype in [kDLCPU,kDLCUDAHost,kDLROCMHost]: + CHKERR(VecCreateMPICUDAWithArray(ccomm, bs, n, N, (ptr.dl_tensor.data), &newvec)) + elif dltype in [kDLCPU, kDLCUDAHost, kDLROCMHost]: if comm_size(ccomm) == 1: - CHKERR( VecCreateSeqWithArray(ccomm,bs,N,(ptr.dl_tensor.data),&newvec) ) + CHKERR(VecCreateSeqWithArray(ccomm, bs, N, (ptr.dl_tensor.data), &newvec)) else: - CHKERR( VecCreateMPIWithArray(ccomm,bs,n,N,(ptr.dl_tensor.data),&newvec) ) + CHKERR(VecCreateMPIWithArray(ccomm, bs, n, N, (ptr.dl_tensor.data), &newvec)) elif dltype == kDLROCM: if comm_size(ccomm) == 1: - CHKERR( VecCreateSeqHIPWithArray(ccomm,bs,N,(ptr.dl_tensor.data),&newvec) ) + CHKERR(VecCreateSeqHIPWithArray(ccomm, bs, N, (ptr.dl_tensor.data), &newvec)) else: - CHKERR( VecCreateMPIHIPWithArray(ccomm,bs,n,N,(ptr.dl_tensor.data),&newvec) ) + CHKERR(VecCreateMPIHIPWithArray(ccomm, bs, n, N, (ptr.dl_tensor.data), &newvec)) else: raise TypeError("Device type {} not supported".format(dltype)) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec self.set_attr('__array__', dltensor) cdef int64_t* shape_arr = NULL cdef int64_t* strides_arr = NULL @@ -655,8 +643,7 @@ cdef class Vec(Object): def attachDLPackInfo( self, Vec vec=None, - object dltensor=None - ) -> Self: + object dltensor=None) -> Self: """Attach tensor information from another vector or DLPack tensor. Logically collective. @@ -681,7 +668,7 @@ cdef class Vec(Object): clearDLPackInfo, createWithDLPack """ - cdef object ctx0 = self.get_attr('__dltensor_ctx__'), ctx = None + cdef object ctx = None cdef DLManagedTensor* ptr = NULL cdef int64_t* shape_arr = NULL cdef int64_t* strides_arr = NULL @@ -781,25 +768,25 @@ cdef class Vec(Object): # By restoring now, we guarantee the sanity of the ObjectState if mode == 'w': if hostmem: - CHKERR( VecGetArrayWrite(self.vec, &a) ) - CHKERR( VecRestoreArrayWrite(self.vec, NULL) ) + CHKERR(VecGetArrayWrite(self.vec, &a)) + CHKERR(VecRestoreArrayWrite(self.vec, NULL)) else: - CHKERR( VecGetArrayWriteAndMemType(self.vec, &a, NULL) ) - CHKERR( VecRestoreArrayWriteAndMemType(self.vec, NULL) ) + CHKERR(VecGetArrayWriteAndMemType(self.vec, &a, NULL)) + CHKERR(VecRestoreArrayWriteAndMemType(self.vec, NULL)) elif mode == 'r': if hostmem: - CHKERR( VecGetArrayRead(self.vec, &a) ) - CHKERR( VecRestoreArrayRead(self.vec, NULL) ) + CHKERR(VecGetArrayRead(self.vec, &a)) + CHKERR(VecRestoreArrayRead(self.vec, NULL)) else: - CHKERR( VecGetArrayReadAndMemType(self.vec, &a, NULL) ) - CHKERR( VecRestoreArrayReadAndMemType(self.vec, NULL) ) + CHKERR(VecGetArrayReadAndMemType(self.vec, &a, NULL)) + CHKERR(VecRestoreArrayReadAndMemType(self.vec, NULL)) else: if hostmem: - CHKERR( VecGetArray(self.vec, &a) ) - CHKERR( VecRestoreArray(self.vec, NULL) ) + CHKERR(VecGetArray(self.vec, &a)) + CHKERR(VecRestoreArray(self.vec, NULL)) else: - CHKERR( VecGetArrayAndMemType(self.vec, &a, NULL) ) - CHKERR( VecRestoreArrayAndMemType(self.vec, NULL) ) + CHKERR(VecGetArrayAndMemType(self.vec, &a, NULL)) + CHKERR(VecRestoreArrayAndMemType(self.vec, NULL)) dl_tensor.data = a cdef DLContext* ctx = &dl_tensor.ctx @@ -824,7 +811,7 @@ cdef class Vec(Object): raise ValueError('Unsupported PetscScalar type') dtype.lanes = 1 dlm_tensor.manager_ctx = self.vec - CHKERR( PetscObjectReference(self.vec) ) + CHKERR(PetscObjectReference(self.vec)) dlm_tensor.manager_deleter = manager_deleter dlm_tensor.del_obj = PetscDEALLOC return PyCapsule_New(dlm_tensor, 'dltensor', pycapsule_deleter) @@ -834,8 +821,7 @@ cdef class Vec(Object): ghosts: Sequence[int], size: LayoutSizeSpec, bsize: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a parallel vector with ghost padding on each processor. Collective. @@ -864,12 +850,12 @@ cdef class Vec(Object): Sys_Layout(ccomm, bs, &n, &N) cdef PetscVec newvec = NULL if bs == PETSC_DECIDE: - CHKERR( VecCreateGhost( - ccomm, n, N, ng, ig, &newvec) ) + CHKERR(VecCreateGhost( + ccomm, n, N, ng, ig, &newvec)) else: - CHKERR( VecCreateGhostBlock( - ccomm, bs, n, N, ng, ig, &newvec) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreateGhostBlock( + ccomm, bs, n, N, ng, ig, &newvec)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec return self def createGhostWithArray( @@ -878,8 +864,7 @@ cdef class Vec(Object): array: Sequence[Scalar], size: LayoutSizeSpec | None = None, bsize: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a parallel vector with ghost padding and provided arrays. Collective. @@ -921,12 +906,12 @@ cdef class Vec(Object): (toInt(ng), toInt(na), toInt(n), toInt(b))) cdef PetscVec newvec = NULL if bs == PETSC_DECIDE: - CHKERR( VecCreateGhostWithArray( - ccomm, n, N, ng, ig, sa, &newvec) ) + CHKERR(VecCreateGhostWithArray( + ccomm, n, N, ng, ig, sa, &newvec)) else: - CHKERR( VecCreateGhostBlockWithArray( - ccomm, bs, n, N, ng, ig, sa, &newvec) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreateGhostBlockWithArray( + ccomm, bs, n, N, ng, ig, sa, &newvec)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec self.set_attr('__array__', array) return self @@ -934,8 +919,7 @@ cdef class Vec(Object): self, size: LayoutSizeSpec, bsize: int | None = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `Type.SHARED` vector that uses shared memory. Collective. @@ -959,18 +943,17 @@ cdef class Vec(Object): Vec_Sizes(size, bsize, &bs, &n, &N) Sys_Layout(ccomm, bs, &n, &N) cdef PetscVec newvec = NULL - CHKERR( VecCreateShared(ccomm, n, N, &newvec) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreateShared(ccomm, n, N, &newvec)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec if bs != PETSC_DECIDE: - CHKERR( VecSetBlockSize(self.vec, bs) ) + CHKERR(VecSetBlockSize(self.vec, bs)) return self def createNest( self, vecs: Sequence[Vec], isets: Sequence[IS] = None, - comm: Comm | None = None, - ) -> Self: + comm: Comm | None = None) -> Self: """Create a `Type.NEST` vector containing multiple nested subvectors. Collective. @@ -1001,20 +984,20 @@ cdef class Vec(Object): cdef PetscInt n = m cdef PetscVec *cvecs = NULL cdef PetscIS *cisets = NULL - cdef object tmp1, tmp2 - tmp1 = oarray_p(empty_p(n), NULL, &cvecs) + cdef object unused1, unused2 + unused1 = oarray_p(empty_p(n), NULL, &cvecs) for i from 0 <= i < m: cvecs[i] = (vecs[i]).vec if isets is not None: - tmp2 = oarray_p(empty_p(n), NULL, &cisets) + unused2 = oarray_p(empty_p(n), NULL, &cisets) for i from 0 <= i < m: cisets[i] = (isets[i]).iset cdef PetscVec newvec = NULL - CHKERR( VecCreateNest(ccomm, n, cisets, cvecs,&newvec) ) - CHKERR( PetscCLEAR(self.obj) ); self.vec = newvec + CHKERR(VecCreateNest(ccomm, n, cisets, cvecs, &newvec)) + CHKERR(PetscCLEAR(self.obj)); self.vec = newvec return self # - def setOptionsPrefix(self, prefix: str) -> None: + def setOptionsPrefix(self, prefix: str | None) -> None: """Set the prefix used for searching for options in the database. Logically collective. @@ -1026,7 +1009,7 @@ cdef class Vec(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( VecSetOptionsPrefix(self.vec, cval) ) + CHKERR(VecSetOptionsPrefix(self.vec, cval)) def getOptionsPrefix(self) -> str: """Return the prefix used for searching for options in the database. @@ -1039,10 +1022,10 @@ cdef class Vec(Object): """ cdef const char *cval = NULL - CHKERR( VecGetOptionsPrefix(self.vec, &cval) ) + CHKERR(VecGetOptionsPrefix(self.vec, &cval)) return bytes2str(cval) - def appendOptionsPrefix(self, prefix: str) -> None: + def appendOptionsPrefix(self, prefix: str | None) -> None: """Append to the prefix used for searching for options in the database. Logically collective. @@ -1054,7 +1037,7 @@ cdef class Vec(Object): """ cdef const char *cval = NULL prefix = str2bytes(prefix, &cval) - CHKERR( VecAppendOptionsPrefix(self.vec, cval) ) + CHKERR(VecAppendOptionsPrefix(self.vec, cval)) def setFromOptions(self) -> None: """Configure the vector from the options database. @@ -1066,7 +1049,7 @@ cdef class Vec(Object): petsc_options, petsc.VecSetFromOptions """ - CHKERR( VecSetFromOptions(self.vec) ) + CHKERR(VecSetFromOptions(self.vec)) def setUp(self) -> Self: """Set up the internal data structures for using the vector. @@ -1078,7 +1061,7 @@ cdef class Vec(Object): create, destroy, petsc.VecSetUp """ - CHKERR( VecSetUp(self.vec) ) + CHKERR(VecSetUp(self.vec)) return self def setOption(self, option: Option, flag: bool) -> None: @@ -1091,7 +1074,7 @@ cdef class Vec(Object): petsc.VecSetOption """ - CHKERR( VecSetOption(self.vec, option, flag) ) + CHKERR(VecSetOption(self.vec, option, flag)) def getType(self) -> str: """Return the type of the vector. @@ -1104,7 +1087,7 @@ cdef class Vec(Object): """ cdef PetscVecType cval = NULL - CHKERR( VecGetType(self.vec, &cval) ) + CHKERR(VecGetType(self.vec, &cval)) return bytes2str(cval) def getSize(self) -> int: @@ -1118,7 +1101,7 @@ cdef class Vec(Object): """ cdef PetscInt N = 0 - CHKERR( VecGetSize(self.vec, &N) ) + CHKERR(VecGetSize(self.vec, &N)) return toInt(N) def getLocalSize(self) -> int: @@ -1132,7 +1115,7 @@ cdef class Vec(Object): """ cdef PetscInt n = 0 - CHKERR( VecGetLocalSize(self.vec, &n) ) + CHKERR(VecGetLocalSize(self.vec, &n)) return toInt(n) def getSizes(self) -> LayoutSizeSpec: @@ -1146,8 +1129,8 @@ cdef class Vec(Object): """ cdef PetscInt n = 0, N = 0 - CHKERR( VecGetLocalSize(self.vec, &n) ) - CHKERR( VecGetSize(self.vec, &N) ) + CHKERR(VecGetLocalSize(self.vec, &n)) + CHKERR(VecGetSize(self.vec, &N)) return (toInt(n), toInt(N)) def setBlockSize(self, bsize: int) -> None: @@ -1161,7 +1144,7 @@ cdef class Vec(Object): """ cdef PetscInt bs = asInt(bsize) - CHKERR( VecSetBlockSize(self.vec, bs) ) + CHKERR(VecSetBlockSize(self.vec, bs)) def getBlockSize(self) -> int: """Return the block size of the vector. @@ -1174,7 +1157,7 @@ cdef class Vec(Object): """ cdef PetscInt bs=0 - CHKERR( VecGetBlockSize(self.vec, &bs) ) + CHKERR(VecGetBlockSize(self.vec, &bs)) return toInt(bs) def getOwnershipRange(self) -> tuple[int, int]: @@ -1195,7 +1178,7 @@ cdef class Vec(Object): """ cdef PetscInt low=0, high=0 - CHKERR( VecGetOwnershipRange(self.vec, &low, &high) ) + CHKERR(VecGetOwnershipRange(self.vec, &low, &high)) return (toInt(low), toInt(high)) def getOwnershipRanges(self) -> ArrayInt: @@ -1211,11 +1194,11 @@ cdef class Vec(Object): """ cdef const PetscInt *rng = NULL - CHKERR( VecGetOwnershipRanges(self.vec, &rng) ) + CHKERR(VecGetOwnershipRanges(self.vec, &rng)) cdef MPI_Comm comm = MPI_COMM_NULL - CHKERR( PetscObjectGetComm(self.vec, &comm) ) + CHKERR(PetscObjectGetComm(self.vec, &comm)) cdef int size = -1 - CHKERR( MPI_Comm_size(comm, &size) ) + CHKERR(MPI_Comm_size(comm, &size)) return array_i(size+1, rng) def createLocalVector(self) -> Vec: @@ -1234,7 +1217,7 @@ cdef class Vec(Object): """ lvec = Vec() - CHKERR( VecCreateLocalVector(self.vec, &lvec.vec) ) + CHKERR(VecCreateLocalVector(self.vec, &lvec.vec)) return lvec def getLocalVector(self, Vec lvec, readonly: bool = False) -> None: @@ -1256,9 +1239,9 @@ cdef class Vec(Object): """ if readonly: - CHKERR( VecGetLocalVectorRead(self.vec, lvec.vec) ) + CHKERR(VecGetLocalVectorRead(self.vec, lvec.vec)) else: - CHKERR( VecGetLocalVector(self.vec, lvec.vec) ) + CHKERR(VecGetLocalVector(self.vec, lvec.vec)) def restoreLocalVector(self, Vec lvec, readonly: bool = False) -> None: """Unmap a local access obtained with `getLocalVector`. @@ -1279,9 +1262,9 @@ cdef class Vec(Object): """ if readonly: - CHKERR( VecRestoreLocalVectorRead(self.vec, lvec.vec) ) + CHKERR(VecRestoreLocalVectorRead(self.vec, lvec.vec)) else: - CHKERR( VecRestoreLocalVector(self.vec, lvec.vec) ) + CHKERR(VecRestoreLocalVector(self.vec, lvec.vec)) # FIXME Return type should be more specific def getBuffer(self, readonly: bool = False) -> Any: @@ -1327,7 +1310,7 @@ cdef class Vec(Object): else: return vec_getbuffer_w(self) - def getArray(self, readonly: bool=False) -> ArrayScalar: + def getArray(self, readonly: bool = False) -> ArrayScalar: """Return local portion of the vector as an `ndarray`. Logically collective. @@ -1372,12 +1355,12 @@ cdef class Vec(Object): cdef PetscInt nv=0 cdef PetscInt na=0 cdef PetscScalar *a = NULL - CHKERR( VecGetLocalSize(self.vec, &nv) ) + CHKERR(VecGetLocalSize(self.vec, &nv)) array = oarray_s(array, &na, &a) if (na != nv): raise ValueError( "cannot place input array size %d, vector size %d" % (toInt(na), toInt(nv))) - CHKERR( VecPlaceArray(self.vec, a) ) + CHKERR(VecPlaceArray(self.vec, a)) self.set_attr('__placed_array__', array) def resetArray(self, force: bool = False) -> ArrayScalar | None: @@ -1406,7 +1389,7 @@ cdef class Vec(Object): cdef object array = None array = self.get_attr('__placed_array__') if array is None and not force: return None - CHKERR( VecResetArray(self.vec) ) + CHKERR(VecResetArray(self.vec)) self.set_attr('__placed_array__', None) return array @@ -1421,7 +1404,7 @@ cdef class Vec(Object): """ cdef PetscBool bindFlg = asBool(flg) - CHKERR( VecBindToCPU(self.vec, bindFlg) ) + CHKERR(VecBindToCPU(self.vec, bindFlg)) def boundToCPU(self) -> bool: """Return whether the vector has been bound to the CPU. @@ -1434,13 +1417,12 @@ cdef class Vec(Object): """ cdef PetscBool flg = PETSC_TRUE - CHKERR( VecBoundToCPU(self.vec, &flg) ) + CHKERR(VecBoundToCPU(self.vec, &flg)) return toBool(flg) def getCUDAHandle( self, - mode: AccessModeSpec = 'rw', - ) -> Any: # FIXME What is the right return type? + mode: AccessModeSpec = 'rw') -> Any: # FIXME What is the right return type? """Return a pointer to the device buffer. Not collective. @@ -1468,11 +1450,11 @@ cdef class Vec(Object): cdef const char *m = NULL if mode is not None: mode = str2bytes(mode, &m) if m == NULL or (m[0] == c'r' and m[1] == c'w'): - CHKERR( VecCUDAGetArray(self.vec, &hdl) ) + CHKERR(VecCUDAGetArray(self.vec, &hdl)) elif m[0] == c'r': - CHKERR( VecCUDAGetArrayRead(self.vec, &hdl) ) + CHKERR(VecCUDAGetArrayRead(self.vec, &hdl)) elif m[0] == c'w': - CHKERR( VecCUDAGetArrayWrite(self.vec, &hdl) ) + CHKERR(VecCUDAGetArrayWrite(self.vec, &hdl)) else: raise ValueError("Invalid mode: expected 'rw', 'r', or 'w'") return hdl @@ -1480,8 +1462,7 @@ cdef class Vec(Object): def restoreCUDAHandle( self, handle: Any, # FIXME What type hint is appropriate? - mode: AccessModeSpec = 'rw', - ) -> None: + mode: AccessModeSpec = 'rw') -> None: """Restore a pointer to the device buffer obtained with `getCUDAHandle`. Not collective. @@ -1503,18 +1484,17 @@ cdef class Vec(Object): cdef const char *m = NULL if mode is not None: mode = str2bytes(mode, &m) if m == NULL or (m[0] == c'r' and m[1] == c'w'): - CHKERR( VecCUDARestoreArray(self.vec, &hdl) ) + CHKERR(VecCUDARestoreArray(self.vec, &hdl)) elif m[0] == c'r': - CHKERR( VecCUDARestoreArrayRead(self.vec, &hdl) ) + CHKERR(VecCUDARestoreArrayRead(self.vec, &hdl)) elif m[0] == c'w': - CHKERR( VecCUDARestoreArrayWrite(self.vec, &hdl) ) + CHKERR(VecCUDARestoreArrayWrite(self.vec, &hdl)) else: raise ValueError("Invalid mode: expected 'rw', 'r', or 'w'") def getHIPHandle( self, - mode: AccessModeSpec = 'rw', - ) -> Any: # FIXME What is the right return type? + mode: AccessModeSpec = 'rw') -> Any: # FIXME What is the right return type? """Return a pointer to the device buffer. Not collective. @@ -1542,11 +1522,11 @@ cdef class Vec(Object): cdef const char *m = NULL if mode is not None: mode = str2bytes(mode, &m) if m == NULL or (m[0] == c'r' and m[1] == c'w'): - CHKERR( VecHIPGetArray(self.vec, &hdl) ) + CHKERR(VecHIPGetArray(self.vec, &hdl)) elif m[0] == c'r': - CHKERR( VecHIPGetArrayRead(self.vec, &hdl) ) + CHKERR(VecHIPGetArrayRead(self.vec, &hdl)) elif m[0] == c'w': - CHKERR( VecHIPGetArrayWrite(self.vec, &hdl) ) + CHKERR(VecHIPGetArrayWrite(self.vec, &hdl)) else: raise ValueError("Invalid mode: expected 'rw', 'r', or 'w'") return hdl @@ -1554,8 +1534,7 @@ cdef class Vec(Object): def restoreHIPHandle( self, handle: Any, # FIXME What type hint is appropriate? - mode: AccessModeSpec = 'rw', - ) -> None: + mode: AccessModeSpec = 'rw') -> None: """Restore a pointer to the device buffer obtained with `getHIPHandle`. Not collective. @@ -1577,11 +1556,11 @@ cdef class Vec(Object): cdef const char *m = NULL if mode is not None: mode = str2bytes(mode, &m) if m == NULL or (m[0] == c'r' and m[1] == c'w'): - CHKERR( VecHIPRestoreArray(self.vec, &hdl) ) + CHKERR(VecHIPRestoreArray(self.vec, &hdl)) elif m[0] == c'r': - CHKERR( VecHIPRestoreArrayRead(self.vec, &hdl) ) + CHKERR(VecHIPRestoreArrayRead(self.vec, &hdl)) elif m[0] == c'w': - CHKERR( VecHIPRestoreArrayWrite(self.vec, &hdl) ) + CHKERR(VecHIPRestoreArrayWrite(self.vec, &hdl)) else: raise ValueError("Invalid mode: expected 'rw', 'r', or 'w'") @@ -1607,8 +1586,8 @@ cdef class Vec(Object): petsc.VecGetOffloadMask, petsc.PetscOffloadMask """ - cdef PetscOffloadMask mask - CHKERR( VecGetOffloadMask(self.vec, &mask) ) + cdef PetscOffloadMask mask = PETSC_OFFLOAD_UNALLOCATED + CHKERR(VecGetOffloadMask(self.vec, &mask)) return mask def getCLContextHandle(self) -> int: @@ -1628,7 +1607,7 @@ cdef class Vec(Object): """ cdef Py_uintptr_t ctxhdl = 0 - CHKERR( VecViennaCLGetCLContext(self.vec, &ctxhdl) ) + CHKERR(VecViennaCLGetCLContext(self.vec, &ctxhdl)) return ctxhdl def getCLQueueHandle(self) -> int: @@ -1648,13 +1627,12 @@ cdef class Vec(Object): """ cdef Py_uintptr_t queuehdl = 0 - CHKERR( VecViennaCLGetCLQueue(self.vec, &queuehdl) ) + CHKERR(VecViennaCLGetCLQueue(self.vec, &queuehdl)) return queuehdl def getCLMemHandle( self, - mode: AccessModeSpec = 'rw', - ) -> int: + mode: AccessModeSpec = 'rw') -> int: """Return the OpenCL buffer associated with the vector. Not collective. @@ -1680,11 +1658,11 @@ cdef class Vec(Object): cdef const char *m = NULL mode = str2bytes(mode, &m) if m == NULL or (m[0] == c'r' and m[1] == c'w'): - CHKERR( VecViennaCLGetCLMem(self.vec, &memhdl) ) + CHKERR(VecViennaCLGetCLMem(self.vec, &memhdl)) elif m[0] == c'r': - CHKERR( VecViennaCLGetCLMemRead(self.vec, &memhdl) ) + CHKERR(VecViennaCLGetCLMemRead(self.vec, &memhdl)) elif m[0] == c'w': - CHKERR( VecViennaCLGetCLMemWrite(self.vec, &memhdl) ) + CHKERR(VecViennaCLGetCLMemWrite(self.vec, &memhdl)) else: raise ValueError("Invalid mode: expected 'r', 'w' or 'rw'") return memhdl @@ -1699,7 +1677,7 @@ cdef class Vec(Object): getCLMemHandle, petsc.VecViennaCLRestoreCLMemWrite """ - CHKERR( VecViennaCLRestoreCLMemWrite(self.vec) ) + CHKERR(VecViennaCLRestoreCLMemWrite(self.vec)) def duplicate(self, array: Sequence[Scalar] | None = None) -> Vec: """Create a new vector with the same type, optionally with data. @@ -1717,7 +1695,7 @@ cdef class Vec(Object): """ cdef Vec vec = type(self)() - CHKERR( VecDuplicate(self.vec, &vec.vec) ) + CHKERR(VecDuplicate(self.vec, &vec.vec)) # duplicate tensor context cdef object ctx0 = self.get_attr('__dltensor_ctx__') if ctx0 is not None: @@ -1747,8 +1725,8 @@ cdef class Vec(Object): if result is None: result = type(self)() if result.vec == NULL: - CHKERR( VecDuplicate(self.vec, &result.vec) ) - CHKERR( VecCopy(self.vec, result.vec) ) + CHKERR(VecDuplicate(self.vec, &result.vec)) + CHKERR(VecCopy(self.vec, result.vec)) return result def chop(self, tol: float) -> None: @@ -1767,7 +1745,7 @@ cdef class Vec(Object): """ cdef PetscReal rval = asReal(tol) - CHKERR( VecFilter(self.vec, rval) ) + CHKERR(VecFilter(self.vec, rval)) def load(self, Viewer viewer) -> Self: """Load a vector. @@ -1782,9 +1760,9 @@ cdef class Vec(Object): cdef MPI_Comm comm = MPI_COMM_NULL cdef PetscObject obj = (viewer.vwr) if self.vec == NULL: - CHKERR( PetscObjectGetComm(obj, &comm) ) - CHKERR( VecCreate(comm, &self.vec) ) - CHKERR( VecLoad(self.vec, viewer.vwr) ) + CHKERR(PetscObjectGetComm(obj, &comm)) + CHKERR(VecCreate(comm, &self.vec)) + CHKERR(VecLoad(self.vec, viewer.vwr)) return self def equal(self, Vec vec) -> bool: @@ -1803,7 +1781,7 @@ cdef class Vec(Object): """ cdef PetscBool flag = PETSC_FALSE - CHKERR( VecEqual(self.vec, vec.vec, &flag) ) + CHKERR(VecEqual(self.vec, vec.vec, &flag)) return toBool(flag) def dot(self, Vec vec) -> Scalar: @@ -1828,7 +1806,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = 0 - CHKERR( VecDot(self.vec, vec.vec, &sval) ) + CHKERR(VecDot(self.vec, vec.vec, &sval)) return toScalar(sval) def dotBegin(self, Vec vec) -> None: @@ -1849,7 +1827,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = 0 - CHKERR( VecDotBegin(self.vec, vec.vec, &sval) ) + CHKERR(VecDotBegin(self.vec, vec.vec, &sval)) def dotEnd(self, Vec vec) -> Scalar: """Finish computing the dot product initiated with `dotBegin`. @@ -1862,7 +1840,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = 0 - CHKERR( VecDotEnd(self.vec, vec.vec, &sval) ) + CHKERR(VecDotEnd(self.vec, vec.vec, &sval)) return toScalar(sval) def tDot(self, Vec vec) -> Scalar: @@ -1884,7 +1862,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = 0 - CHKERR( VecTDot(self.vec, vec.vec, &sval) ) + CHKERR(VecTDot(self.vec, vec.vec, &sval)) return toScalar(sval) def tDotBegin(self, Vec vec) -> None: @@ -1905,7 +1883,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = 0 - CHKERR( VecTDotBegin(self.vec, vec.vec, &sval) ) + CHKERR(VecTDotBegin(self.vec, vec.vec, &sval)) def tDotEnd(self, Vec vec) -> Scalar: """Finish computing the indefinite dot product initiated with `tDotBegin`. @@ -1918,37 +1896,200 @@ cdef class Vec(Object): """ cdef PetscScalar sval = 0 - CHKERR( VecTDotEnd(self.vec, vec.vec, &sval) ) + CHKERR(VecTDotEnd(self.vec, vec.vec, &sval)) return toScalar(sval) - def mDot(self, vecs, out=None) -> None: - """Not implemented.""" - raise NotImplementedError + def mDot(self, vecs: Sequence[Vec], out: ArrayScalar | None = None) -> ArrayScalar: + """Compute Xᴴ·y with X an array of vectors. - def mDotBegin(self, vecs, out=None) -> None: - """Not implemented.""" - raise NotImplementedError + Collective. - def mDotEnd(self, vecs, out=None) -> None: - """Not implemented.""" - raise NotImplementedError + Parameters + ---------- + vecs + Array of vectors. + out + Optional placeholder for the result. - def mtDot(self, vecs, out=None) -> None: - """Not implemented.""" - raise NotImplementedError + See Also + -------- + dot, tDot, mDotBegin, mDotEnd, petsc.VecMDot - def mtDotBegin(self, vecs, out=None) -> None: - """Not implemented.""" - raise NotImplementedError + """ + cdef PetscInt nv=len(vecs), no=0 + cdef PetscVec *v=NULL + cdef PetscScalar *val=NULL + cdef Py_ssize_t i=0 + cdef object unused = oarray_p(empty_p(nv), NULL, &v) + for i from 0 <= i < nv: + v[i] = ((vecs[i])).vec + if out is None: + out = empty_s(nv) + out = oarray_s(out, &no, &val) + if (nv != no): raise ValueError( + ("incompatible array sizes: " + "nv=%d, no=%d") % (toInt(nv), toInt(no))) + CHKERR(VecMDot(self.vec, nv, v, val)) + return out + + def mDotBegin(self, vecs: Sequence[Vec], out: ArrayScalar) -> None: + """Starts a split phase multiple dot product computation. - def mtDotEnd(self, vecs, out=None) -> None: - """Not implemented.""" - raise NotImplementedError + Collective. + + Parameters + ---------- + vecs + Array of vectors. + out + Placeholder for the result. + + See Also + -------- + mDot, mDotEnd, petsc.VecMDotBegin + + """ + cdef PetscInt nv=len(vecs), no=0 + cdef PetscVec *v=NULL + cdef PetscScalar *val=NULL + cdef Py_ssize_t i=0 + cdef object unused = oarray_p(empty_p(nv), NULL, &v) + for i from 0 <= i < nv: + v[i] = ((vecs[i])).vec + out = oarray_s(out, &no, &val) + if (nv != no): raise ValueError( + ("incompatible array sizes: " + "nv=%d, no=%d") % (toInt(nv), toInt(no))) + CHKERR(VecMDotBegin(self.vec, nv, v, val)) + + def mDotEnd(self, vecs: Sequence[Vec], out: ArrayScalar) -> ArrayScalar: + """Ends a split phase multiple dot product computation. + + Collective. + + Parameters + ---------- + vecs + Array of vectors. + out + Placeholder for the result. + + See Also + -------- + mDot, mDotBegin, petsc.VecMDotEnd + + """ + cdef PetscInt nv=len(vecs), no=0 + cdef PetscVec *v=NULL + cdef PetscScalar *val=NULL + cdef Py_ssize_t i=0 + cdef object unused = oarray_p(empty_p(nv), NULL, &v) + for i from 0 <= i < nv: + v[i] = ((vecs[i])).vec + out = oarray_s(out, &no, &val) + if (nv != no): raise ValueError( + ("incompatible array sizes: " + "nv=%d, no=%d") % (toInt(nv), toInt(no))) + CHKERR(VecMDotEnd(self.vec, nv, v, val)) + return out + + def mtDot(self, vecs: Sequence[Vec], out: ArrayScalar | None = None) -> ArrayScalar: + """Compute Xᵀ·y with X an array of vectors. + + Collective. + + Parameters + ---------- + vecs + Array of vectors. + out + Optional placeholder for the result. + + See Also + -------- + tDot, mDot, mtDotBegin, mtDotEnd, petsc.VecMTDot + + """ + cdef PetscInt nv=len(vecs), no=0 + cdef PetscVec *v=NULL + cdef PetscScalar *val=NULL + cdef Py_ssize_t i=0 + cdef object unused = oarray_p(empty_p(nv), NULL, &v) + for i from 0 <= i < nv: + v[i] = ((vecs[i])).vec + if out is None: + out = empty_s(nv) + out = oarray_s(out, &no, &val) + if (nv != no): raise ValueError( + ("incompatible array sizes: " + "nv=%d, no=%d") % (toInt(nv), toInt(no))) + CHKERR(VecMTDot(self.vec, nv, v, val)) + return out + + def mtDotBegin(self, vecs: Sequence[Vec], out: ArrayScalar) -> None: + """Starts a split phase transpose multiple dot product computation. + + Collective. + + Parameters + ---------- + vecs + Array of vectors. + out + Placeholder for the result. + + See Also + -------- + mtDot, mtDotEnd, petsc.VecMTDotBegin + + """ + cdef PetscInt nv=len(vecs), no=0 + cdef PetscVec *v=NULL + cdef PetscScalar *val=NULL + cdef Py_ssize_t i=0 + cdef object unused = oarray_p(empty_p(nv), NULL, &v) + for i from 0 <= i < nv: + v[i] = ((vecs[i])).vec + out = oarray_s(out, &no, &val) + if (nv != no): raise ValueError( + ("incompatible array sizes: " + "nv=%d, no=%d") % (toInt(nv), toInt(no))) + CHKERR(VecMTDotBegin(self.vec, nv, v, val)) + + def mtDotEnd(self, vecs: Sequence[Vec], out: ArrayScalar) -> ArrayScalar: + """Ends a split phase transpose multiple dot product computation. + + Collective. + + Parameters + ---------- + vecs + Array of vectors. + out + Placeholder for the result. + + See Also + -------- + mtDot, mtDotBegin, petsc.VecMTDotEnd + + """ + cdef PetscInt nv=len(vecs), no=0 + cdef PetscVec *v=NULL + cdef PetscScalar *val=NULL + cdef Py_ssize_t i=0 + cdef object unused = oarray_p(empty_p(nv), NULL, &v) + for i from 0 <= i < nv: + v[i] = ((vecs[i])).vec + out = oarray_s(out, &no, &val) + if (nv != no): raise ValueError( + ("incompatible array sizes: " + "nv=%d, no=%d") % (toInt(nv), toInt(no))) + CHKERR(VecMTDotEnd(self.vec, nv, v, val)) + return out def norm( self, - norm_type: NormTypeSpec = None, - ) -> float | tuple[float, float]: + norm_type: NormTypeSpec = None) -> float | tuple[float, float]: """Compute the vector norm. Collective. @@ -1964,14 +2105,13 @@ cdef class Vec(Object): cdef PetscNormType ntype = PETSC_NORM_2 if norm_type is not None: ntype = norm_type cdef PetscReal rval[2] - CHKERR( VecNorm(self.vec, ntype, rval) ) + CHKERR(VecNorm(self.vec, ntype, rval)) if ntype != norm_1_2: return toReal(rval[0]) else: return (toReal(rval[0]), toReal(rval[1])) def normBegin( self, - norm_type: NormTypeSpec = None, - ) -> None: + norm_type: NormTypeSpec = None) -> None: """Begin computing the vector norm. Collective. @@ -1986,12 +2126,11 @@ cdef class Vec(Object): cdef PetscNormType ntype = PETSC_NORM_2 if norm_type is not None: ntype = norm_type cdef PetscReal dummy[2] - CHKERR( VecNormBegin(self.vec, ntype, dummy) ) + CHKERR(VecNormBegin(self.vec, ntype, dummy)) def normEnd( self, - norm_type: NormTypeSpec = None, - ) -> float | tuple[float, float]: + norm_type: NormTypeSpec = None) -> float | tuple[float, float]: """Finish computations initiated with `normBegin`. Collective. @@ -2005,7 +2144,7 @@ cdef class Vec(Object): cdef PetscNormType ntype = PETSC_NORM_2 if norm_type is not None: ntype = norm_type cdef PetscReal rval[2] - CHKERR( VecNormEnd(self.vec, ntype, rval) ) + CHKERR(VecNormEnd(self.vec, ntype, rval)) if ntype != norm_1_2: return toReal(rval[0]) else: return (toReal(rval[0]), toReal(rval[1])) @@ -2021,7 +2160,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = 0 cdef PetscReal rval = 0 - CHKERR( VecDotNorm2(self.vec, vec.vec, &sval, &rval) ) + CHKERR(VecDotNorm2(self.vec, vec.vec, &sval, &rval)) return toScalar(sval), toReal(float) def sum(self) -> Scalar: @@ -2035,7 +2174,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = 0 - CHKERR( VecSum(self.vec, &sval) ) + CHKERR(VecSum(self.vec, &sval)) return toScalar(sval) def min(self) -> tuple[int, float]: @@ -2058,7 +2197,7 @@ cdef class Vec(Object): """ cdef PetscInt ival = 0 cdef PetscReal rval = 0 - CHKERR( VecMin(self.vec, &ival, &rval) ) + CHKERR(VecMin(self.vec, &ival, &rval)) return (toInt(ival), toReal(rval)) def max(self) -> tuple[int, float]: @@ -2081,7 +2220,7 @@ cdef class Vec(Object): """ cdef PetscInt ival = 0 cdef PetscReal rval = 0 - CHKERR( VecMax(self.vec, &ival, &rval) ) + CHKERR(VecMax(self.vec, &ival, &rval)) return (toInt(ival), toReal(rval)) def normalize(self) -> float: @@ -2100,7 +2239,7 @@ cdef class Vec(Object): """ cdef PetscReal rval = 0 - CHKERR( VecNormalize(self.vec, &rval) ) + CHKERR(VecNormalize(self.vec, &rval)) return toReal(rval) def reciprocal(self) -> None: @@ -2113,7 +2252,7 @@ cdef class Vec(Object): petsc.VecReciprocal """ - CHKERR( VecReciprocal(self.vec) ) + CHKERR(VecReciprocal(self.vec)) def exp(self) -> None: """Replace each entry (xₙ) in the vector by exp(xₙ). @@ -2125,7 +2264,7 @@ cdef class Vec(Object): log, petsc.VecExp """ - CHKERR( VecExp(self.vec) ) + CHKERR(VecExp(self.vec)) def log(self) -> None: """Replace each entry in the vector by its natural logarithm. @@ -2137,7 +2276,7 @@ cdef class Vec(Object): exp, petsc.VecLog """ - CHKERR( VecLog(self.vec) ) + CHKERR(VecLog(self.vec)) def sqrtabs(self) -> None: """Replace each entry (xₙ) in the vector by √|xₙ|. @@ -2149,7 +2288,7 @@ cdef class Vec(Object): petsc.VecSqrtAbs """ - CHKERR( VecSqrtAbs(self.vec) ) + CHKERR(VecSqrtAbs(self.vec)) def abs(self) -> None: """Replace each entry (xₙ) in the vector by abs|xₙ|. @@ -2161,9 +2300,9 @@ cdef class Vec(Object): petsc.VecAbs """ - CHKERR( VecAbs(self.vec) ) + CHKERR(VecAbs(self.vec)) - def conjugate(self): + def conjugate(self) -> None: """Conjugate the vector. Logically collective. @@ -2173,7 +2312,7 @@ cdef class Vec(Object): petsc.VecConjugate """ - CHKERR( VecConjugate(self.vec) ) + CHKERR(VecConjugate(self.vec)) def setRandom(self, Random random=None) -> None: """Set all components of the vector to random numbers. @@ -2193,7 +2332,7 @@ cdef class Vec(Object): """ cdef PetscRandom rnd = NULL if random is not None: rnd = random.rnd - CHKERR( VecSetRandom(self.vec, rnd) ) + CHKERR(VecSetRandom(self.vec, rnd)) def permute(self, IS order, invert: bool = False) -> None: """Permute the vector in-place with a provided ordering. @@ -2214,7 +2353,7 @@ cdef class Vec(Object): """ cdef PetscBool cinvert = PETSC_FALSE if invert: cinvert = PETSC_TRUE - CHKERR( VecPermute(self.vec, order.iset, cinvert) ) + CHKERR(VecPermute(self.vec, order.iset, cinvert)) def zeroEntries(self) -> None: """Set all entries in the vector to zero. @@ -2226,7 +2365,7 @@ cdef class Vec(Object): set, petsc.VecZeroEntries """ - CHKERR( VecZeroEntries(self.vec) ) + CHKERR(VecZeroEntries(self.vec)) def set(self, alpha: Scalar) -> None: """Set all components of the vector to the same value. @@ -2239,7 +2378,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = asScalar(alpha) - CHKERR( VecSet(self.vec, sval) ) + CHKERR(VecSet(self.vec, sval)) def isset(self, IS idx, alpha: Scalar) -> None: """Set specific elements of the vector to the same value. @@ -2259,7 +2398,7 @@ cdef class Vec(Object): """ cdef PetscScalar aval = asScalar(alpha) - CHKERR( VecISSet(self.vec, idx.iset, aval) ) + CHKERR(VecISSet(self.vec, idx.iset, aval)) def scale(self, alpha: Scalar) -> None: """Scale all entries of the vector. @@ -2279,7 +2418,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = asScalar(alpha) - CHKERR( VecScale(self.vec, sval) ) + CHKERR(VecScale(self.vec, sval)) def shift(self, alpha: Scalar) -> None: """Shift all entries in the vector. @@ -2299,7 +2438,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = asScalar(alpha) - CHKERR( VecShift(self.vec, sval) ) + CHKERR(VecShift(self.vec, sval)) def swap(self, Vec vec) -> None: """Swap the content of two vectors. @@ -2316,7 +2455,7 @@ cdef class Vec(Object): petsc.VecSwap """ - CHKERR( VecSwap(self.vec, vec.vec) ) + CHKERR(VecSwap(self.vec, vec.vec)) def axpy(self, alpha: Scalar, Vec x) -> None: """Compute and store y = ɑ·x + y. @@ -2336,7 +2475,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = asScalar(alpha) - CHKERR( VecAXPY(self.vec, sval, x.vec) ) + CHKERR(VecAXPY(self.vec, sval, x.vec)) def isaxpy(self, IS idx, alpha: Scalar, Vec x) -> None: """Add a scaled reduced-space vector to a subset of the vector. @@ -2360,7 +2499,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = asScalar(alpha) - CHKERR( VecISAXPY(self.vec, idx.iset, sval, x.vec) ) + CHKERR(VecISAXPY(self.vec, idx.iset, sval, x.vec)) def aypx(self, alpha: Scalar, Vec x) -> None: """Compute and store y = x + ɑ·y. @@ -2380,7 +2519,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = asScalar(alpha) - CHKERR( VecAYPX(self.vec, sval, x.vec) ) + CHKERR(VecAYPX(self.vec, sval, x.vec)) def axpby(self, alpha: Scalar, beta: Scalar, Vec x) -> None: """Compute and store y = ɑ·x + β·y. @@ -2403,7 +2542,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval1 = asScalar(alpha) cdef PetscScalar sval2 = asScalar(beta) - CHKERR( VecAXPBY(self.vec, sval1, sval2, x.vec) ) + CHKERR(VecAXPBY(self.vec, sval1, sval2, x.vec)) def waxpy(self, alpha: Scalar, Vec x, Vec y) -> None: """Compute and store w = ɑ·x + y. @@ -2425,7 +2564,7 @@ cdef class Vec(Object): """ cdef PetscScalar sval = asScalar(alpha) - CHKERR( VecWAXPY(self.vec, sval, x.vec, y.vec) ) + CHKERR(VecWAXPY(self.vec, sval, x.vec, y.vec)) def maxpy(self, alphas: Sequence[Scalar], vecs: Sequence[Vec]) -> None: """Compute and store y = Σₙ(ɑₙ·Xₙ) + y with X an array of vectors. @@ -2449,13 +2588,13 @@ cdef class Vec(Object): cdef PetscInt n = 0 cdef PetscScalar *a = NULL cdef PetscVec *v = NULL - cdef object tmp1 = iarray_s(alphas, &n, &a) - cdef object tmp2 = oarray_p(empty_p(n),NULL, &v) + cdef object unused1 = iarray_s(alphas, &n, &a) + cdef object unused2 = oarray_p(empty_p(n), NULL, &v) assert n == len(vecs) cdef Py_ssize_t i=0 for i from 0 <= i < n: v[i] = ((vecs[i])).vec - CHKERR( VecMAXPY(self.vec, n, a, v) ) + CHKERR(VecMAXPY(self.vec, n, a, v)) def pointwiseMult(self, Vec x, Vec y) -> None: """Compute and store the component-wise multiplication of two vectors. @@ -2474,7 +2613,7 @@ cdef class Vec(Object): pointwiseDivide, petsc.VecPointwiseMult """ - CHKERR( VecPointwiseMult(self.vec, x.vec, y.vec) ) + CHKERR(VecPointwiseMult(self.vec, x.vec, y.vec)) def pointwiseDivide(self, Vec x, Vec y) -> None: """Compute and store the component-wise division of two vectors. @@ -2495,7 +2634,7 @@ cdef class Vec(Object): pointwiseMult, petsc.VecPointwiseDivide """ - CHKERR( VecPointwiseDivide(self.vec, x.vec, y.vec) ) + CHKERR(VecPointwiseDivide(self.vec, x.vec, y.vec)) def pointwiseMin(self, Vec x, Vec y) -> None: """Compute and store the component-wise minimum of two vectors. @@ -2514,7 +2653,7 @@ cdef class Vec(Object): pointwiseMax, pointwiseMaxAbs, petsc.VecPointwiseMin """ - CHKERR( VecPointwiseMin(self.vec, x.vec, y.vec) ) + CHKERR(VecPointwiseMin(self.vec, x.vec, y.vec)) def pointwiseMax(self, Vec x, Vec y) -> None: """Compute and store the component-wise maximum of two vectors. @@ -2533,7 +2672,7 @@ cdef class Vec(Object): pointwiseMin, pointwiseMaxAbs, petsc.VecPointwiseMax """ - CHKERR( VecPointwiseMax(self.vec, x.vec, y.vec) ) + CHKERR(VecPointwiseMax(self.vec, x.vec, y.vec)) def pointwiseMaxAbs(self, Vec x, Vec y) -> None: """Compute and store the component-wise maximum absolute values. @@ -2552,7 +2691,7 @@ cdef class Vec(Object): pointwiseMin, pointwiseMax, petsc.VecPointwiseMaxAbs """ - CHKERR( VecPointwiseMaxAbs(self.vec, x.vec, y.vec) ) + CHKERR(VecPointwiseMaxAbs(self.vec, x.vec, y.vec)) def maxPointwiseDivide(self, Vec vec) -> float: """Return the maximum of the component-wise absolute value division. @@ -2575,7 +2714,7 @@ cdef class Vec(Object): """ cdef PetscReal rval = 0 - CHKERR( VecMaxPointwiseDivide(self.vec, vec.vec, &rval) ) + CHKERR(VecMaxPointwiseDivide(self.vec, vec.vec, &rval)) return toReal(rval) def getValue(self, index: int) -> Scalar: @@ -2597,14 +2736,13 @@ cdef class Vec(Object): """ cdef PetscInt ival = asInt(index) cdef PetscScalar sval = 0 - CHKERR( VecGetValues(self.vec, 1, &ival, &sval) ) + CHKERR(VecGetValues(self.vec, 1, &ival, &sval)) return toScalar(sval) def getValues( self, indices: Sequence[int], - values: Sequence[Scalar] | None = None, - ) -> ArrayScalar: + values: Sequence[Scalar] | None = None) -> ArrayScalar: """Return values from certain locations in the vector. Not collective. @@ -2634,8 +2772,7 @@ cdef class Vec(Object): self, index: int, value: Scalar, - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Insert or add a single value in the vector. Not collective. @@ -2666,14 +2803,13 @@ cdef class Vec(Object): cdef PetscInt ival = asInt(index) cdef PetscScalar sval = asScalar(value) cdef PetscInsertMode caddv = insertmode(addv) - CHKERR( VecSetValues(self.vec, 1, &ival, &sval, caddv) ) + CHKERR(VecSetValues(self.vec, 1, &ival, &sval, caddv)) def setValues( self, indices: Sequence[int], values: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Insert or add multiple values in the vector. Not collective. @@ -2707,8 +2843,7 @@ cdef class Vec(Object): self, indices: Sequence[int], values: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Insert or add blocks of values in the vector. Not collective. @@ -2759,7 +2894,7 @@ cdef class Vec(Object): setValues, setValuesLocal, getLGMap, petsc.VecSetLocalToGlobalMapping """ - CHKERR( VecSetLocalToGlobalMapping(self.vec, lgmap.lgm) ) + CHKERR(VecSetLocalToGlobalMapping(self.vec, lgmap.lgm)) def getLGMap(self) -> LGMap: """Return the local-to-global mapping. @@ -2772,16 +2907,15 @@ cdef class Vec(Object): """ cdef LGMap cmap = LGMap() - CHKERR( VecGetLocalToGlobalMapping(self.vec, &cmap.lgm) ) - CHKERR( PetscINCREF(cmap.obj) ) + CHKERR(VecGetLocalToGlobalMapping(self.vec, &cmap.lgm)) + CHKERR(PetscINCREF(cmap.obj)) return cmap def setValueLocal( self, index: int, value: Scalar, - addv: InsertModeSpec = None, - ): + addv: InsertModeSpec = None) -> None: """Insert or add a single value in the vector using a local numbering. Not collective. @@ -2812,14 +2946,13 @@ cdef class Vec(Object): cdef PetscInt ival = asInt(index) cdef PetscScalar sval = asScalar(value) cdef PetscInsertMode caddv = insertmode(addv) - CHKERR( VecSetValuesLocal(self.vec, 1, &ival, &sval, caddv) ) + CHKERR(VecSetValuesLocal(self.vec, 1, &ival, &sval, caddv)) def setValuesLocal( self, indices: Sequence[int], values: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Insert or add multiple values in the vector with a local numbering. Not collective. @@ -2853,8 +2986,7 @@ cdef class Vec(Object): self, indices: Sequence[int], values: Sequence[Scalar], - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Insert or add blocks of values in the vector with a local numbering. Not collective. @@ -2898,7 +3030,7 @@ cdef class Vec(Object): assemblyEnd, petsc.VecAssemblyBegin """ - CHKERR( VecAssemblyBegin(self.vec) ) + CHKERR(VecAssemblyBegin(self.vec)) def assemblyEnd(self) -> None: """Finish the assembling stage initiated with `assemblyBegin`. @@ -2910,7 +3042,7 @@ cdef class Vec(Object): assemblyBegin, petsc.VecAssemblyEnd """ - CHKERR( VecAssemblyEnd(self.vec) ) + CHKERR(VecAssemblyEnd(self.vec)) def assemble(self) -> None: """Assemble the vector. @@ -2922,8 +3054,8 @@ cdef class Vec(Object): assemblyBegin, assemblyEnd """ - CHKERR( VecAssemblyBegin(self.vec) ) - CHKERR( VecAssemblyEnd(self.vec) ) + CHKERR(VecAssemblyBegin(self.vec)) + CHKERR(VecAssemblyEnd(self.vec)) # --- methods for strided vectors --- @@ -2946,7 +3078,7 @@ cdef class Vec(Object): """ cdef PetscInt ival = asInt(field) cdef PetscScalar sval = asScalar(alpha) - CHKERR( VecStrideScale(self.vec, ival, sval) ) + CHKERR(VecStrideScale(self.vec, ival, sval)) def strideSum(self, field: int) -> Scalar: """Sum subvector entries. @@ -2968,7 +3100,7 @@ cdef class Vec(Object): """ cdef PetscInt ival = asInt(field) cdef PetscScalar sval = 0 - CHKERR( VecStrideSum(self.vec, ival, &sval) ) + CHKERR(VecStrideSum(self.vec, ival, &sval)) return toScalar(sval) def strideMin(self, field: int) -> tuple[int, float]: @@ -2999,7 +3131,7 @@ cdef class Vec(Object): cdef PetscInt ival1 = asInt(field) cdef PetscInt ival2 = 0 cdef PetscReal rval = 0 - CHKERR( VecStrideMin(self.vec, ival1, &ival2, &rval) ) + CHKERR(VecStrideMin(self.vec, ival1, &ival2, &rval)) return (toInt(ival2), toReal(rval)) def strideMax(self, field: int) -> tuple[int, float]: @@ -3030,14 +3162,13 @@ cdef class Vec(Object): cdef PetscInt ival1 = asInt(field) cdef PetscInt ival2 = 0 cdef PetscReal rval = 0 - CHKERR( VecStrideMax(self.vec, ival1, &ival2, &rval) ) + CHKERR(VecStrideMax(self.vec, ival1, &ival2, &rval)) return (toInt(ival2), toReal(rval)) def strideNorm( self, field: int, - norm_type: NormTypeSpec = None, - ) -> float | tuple[float, float]: + norm_type: NormTypeSpec = None) -> float | tuple[float, float]: """Return the norm of entries in a subvector. Collective. @@ -3062,7 +3193,7 @@ cdef class Vec(Object): cdef PetscNormType ntype = PETSC_NORM_2 if norm_type is not None: ntype = norm_type cdef PetscReal rval[2] - CHKERR( VecStrideNorm(self.vec, ival, ntype, rval) ) + CHKERR(VecStrideNorm(self.vec, ival, ntype, rval)) if ntype != norm_1_2: return toReal(rval[0]) else: return (toReal(rval[0]), toReal(rval[1])) @@ -3070,8 +3201,7 @@ cdef class Vec(Object): self, field: int, Vec vec, - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Scatter entries into a component of another vector. Collective. @@ -3096,14 +3226,13 @@ cdef class Vec(Object): """ cdef PetscInt ival = asInt(field) cdef PetscInsertMode caddv = insertmode(addv) - CHKERR( VecStrideScatter(self.vec, ival, vec.vec, caddv) ) + CHKERR(VecStrideScatter(self.vec, ival, vec.vec, caddv)) def strideGather( self, field: int, Vec vec, - addv: InsertModeSpec = None, - ) -> None: + addv: InsertModeSpec = None) -> None: """Insert component values into a single-component vector. Collective. @@ -3128,7 +3257,7 @@ cdef class Vec(Object): """ cdef PetscInt ival = asInt(field) cdef PetscInsertMode caddv = insertmode(addv) - CHKERR( VecStrideGather(self.vec, ival, vec.vec, caddv) ) + CHKERR(VecStrideGather(self.vec, ival, vec.vec, caddv)) # --- methods for vectors with ghost values --- @@ -3168,8 +3297,7 @@ cdef class Vec(Object): def ghostUpdateBegin( self, addv: InsertModeSpec = None, - mode: ScatterModeSpec = None, - ) -> None: + mode: ScatterModeSpec = None) -> None: """Begin updating ghosted vector entries. Neighborwise collective. @@ -3181,13 +3309,12 @@ cdef class Vec(Object): """ cdef PetscInsertMode caddv = insertmode(addv) cdef PetscScatterMode csctm = scattermode(mode) - CHKERR( VecGhostUpdateBegin(self.vec, caddv, csctm) ) + CHKERR(VecGhostUpdateBegin(self.vec, caddv, csctm)) def ghostUpdateEnd( self, addv: InsertModeSpec = None, - mode: ScatterModeSpec = None, - ) -> None: + mode: ScatterModeSpec = None) -> None: """Finish updating ghosted vector entries initiated with `ghostUpdateBegin`. Neighborwise collective. @@ -3199,13 +3326,12 @@ cdef class Vec(Object): """ cdef PetscInsertMode caddv = insertmode(addv) cdef PetscScatterMode csctm = scattermode(mode) - CHKERR( VecGhostUpdateEnd(self.vec, caddv, csctm) ) + CHKERR(VecGhostUpdateEnd(self.vec, caddv, csctm)) def ghostUpdate( self, addv: InsertModeSpec = None, - mode: ScatterModeSpec = None, - ) -> None: + mode: ScatterModeSpec = None) -> None: """Update ghosted vector entries. Neighborwise collective. @@ -3234,8 +3360,8 @@ cdef class Vec(Object): """ cdef PetscInsertMode caddv = insertmode(addv) cdef PetscScatterMode csctm = scattermode(mode) - CHKERR( VecGhostUpdateBegin(self.vec, caddv, csctm) ) - CHKERR( VecGhostUpdateEnd(self.vec, caddv, csctm) ) + CHKERR(VecGhostUpdateBegin(self.vec, caddv, csctm)) + CHKERR(VecGhostUpdateEnd(self.vec, caddv, csctm)) def setMPIGhost(self, ghosts: Sequence[int]) -> None: """Set the ghost points for a ghosted vector. @@ -3254,7 +3380,7 @@ cdef class Vec(Object): """ cdef PetscInt ng=0, *ig=NULL ghosts = iarray_i(ghosts, &ng, &ig) - CHKERR( VecMPISetGhost(self.vec, ng, ig) ) + CHKERR(VecMPISetGhost(self.vec, ng, ig)) # @@ -3280,8 +3406,8 @@ cdef class Vec(Object): """ if subvec is None: subvec = Vec() - else: CHKERR( VecDestroy(&subvec.vec) ) - CHKERR( VecGetSubVector(self.vec, iset.iset, &subvec.vec) ) + else: CHKERR(VecDestroy(&subvec.vec)) + CHKERR(VecGetSubVector(self.vec, iset.iset, &subvec.vec)) return subvec def restoreSubVector(self, IS iset, Vec subvec) -> None: @@ -3301,7 +3427,7 @@ cdef class Vec(Object): getSubVector, petsc.VecRestoreSubVector """ - CHKERR( VecRestoreSubVector(self.vec, iset.iset, &subvec.vec) ) + CHKERR(VecRestoreSubVector(self.vec, iset.iset, &subvec.vec)) def getNestSubVecs(self) -> list[Vec]: """Return all the vectors contained in the nested vector. @@ -3315,21 +3441,20 @@ cdef class Vec(Object): """ cdef PetscInt N=0 cdef PetscVec* sx=NULL - CHKERR( VecNestGetSubVecs(self.vec, &N, &sx) ) + CHKERR(VecNestGetSubVecs(self.vec, &N, &sx)) output = [] for i in range(N): - pyvec = Vec() - pyvec.vec = sx[i] - CHKERR( PetscObjectReference( pyvec.vec) ) - output.append(pyvec) + pyvec = Vec() + pyvec.vec = sx[i] + CHKERR(PetscObjectReference( pyvec.vec)) + output.append(pyvec) return output def setNestSubVecs( self, sx: Sequence[Vec], - idxm: Sequence[int] | None = None, - ) -> None: + idxm: Sequence[int] | None = None) -> None: """Set the component vectors at specified indices in the nested vector. Not collective. @@ -3353,10 +3478,10 @@ cdef class Vec(Object): idxm = iarray_i(idxm, &N, &cidxm) cdef PetscVec* csx = NULL - tmp = oarray_p(empty_p(N), NULL, &csx) + cdef object unused = oarray_p(empty_p(N), NULL, &csx) for i from 0 <= i < N: csx[i] = (sx[i]).vec - CHKERR( VecNestSetSubVecs(self.vec, N, cidxm, csx) ) + CHKERR(VecNestSetSubVecs(self.vec, N, cidxm, csx)) # @@ -3370,7 +3495,7 @@ cdef class Vec(Object): getDM, petsc.VecSetDM """ - CHKERR( VecSetDM(self.vec, dm.dm) ) + CHKERR(VecSetDM(self.vec, dm.dm)) def getDM(self) -> DM: """Return the `DM` associated to the vector. @@ -3383,7 +3508,7 @@ cdef class Vec(Object): """ cdef DM dm = DM() - CHKERR( VecGetDM(self.vec, &dm.dm) ) + CHKERR(VecGetDM(self.vec, &dm.dm)) return dm # @@ -3392,6 +3517,7 @@ cdef class Vec(Object): """The local and global vector sizes.""" def __get__(self) -> LayoutSizeSpec: return self.getSizes() + def __set__(self, value): self.setSizes(value) @@ -3434,6 +3560,7 @@ cdef class Vec(Object): """Writeable `ndarray` containing the local portion of the vector.""" def __get__(self) -> ArrayScalar: return self.getArray() + def __set__(self, value): cdef buf = self.getBuffer() with buf as array: array[:] = value @@ -3452,6 +3579,7 @@ cdef class Vec(Object): """Alias for `array_w`.""" def __get__(self) -> ArrayScalar: return self.array_w + def __set__(self, value): self.array_w = value diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/Viewer.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/Viewer.pyx index 63a2791e426..552e7393c19 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/Viewer.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/Viewer.pyx @@ -17,6 +17,7 @@ class ViewerType(object): ADIOS = S_(PETSCVIEWERADIOS) EXODUSII = S_(PETSCVIEWEREXODUSII) + class ViewerFormat(object): """Viewer format.""" DEFAULT = PETSC_VIEWER_DEFAULT @@ -58,6 +59,7 @@ class ViewerFormat(object): LOAD_BALANCE = PETSC_VIEWER_LOAD_BALANCE FAILED = PETSC_VIEWER_FAILED + class ViewerFileMode(object): """Viewer file mode.""" # native @@ -70,6 +72,7 @@ class ViewerFileMode(object): R, W, A, U = READ, WRITE, APPEND, UPDATE AU = UA = APPEND_UPDATE + class ViewerDrawSize(object): """Window size.""" # native @@ -85,12 +88,14 @@ class ViewerDrawSize(object): # -------------------------------------------------------------------- + cdef class Viewer(Object): """Viewer object. Viewer is described in the `PETSc manual `. - Viewers can be called as functions where the argument specified is the PETSc object to be viewed. See the example below. + Viewers can be called as functions where the argument specified + is the PETSc object to be viewed. See the example below. Examples -------- @@ -127,7 +132,7 @@ cdef class Viewer(Object): def __call__(self, Object obj) -> None: """View a generic object.""" assert obj.obj != NULL - CHKERR( PetscObjectView(obj.obj[0], self.vwr) ) + CHKERR(PetscObjectView(obj.obj[0], self.vwr)) # @@ -152,12 +157,12 @@ cdef class Viewer(Object): """ if obj is None: - CHKERR( PetscViewerView(self.vwr, NULL) ) + CHKERR(PetscViewerView(self.vwr, NULL)) elif isinstance(obj, Viewer): - CHKERR( PetscViewerView(self.vwr, (obj).vwr) ) + CHKERR(PetscViewerView(self.vwr, (obj).vwr)) else: assert (obj).obj != NULL - CHKERR( PetscObjectView((obj).obj[0], self.vwr) ) + CHKERR(PetscObjectView((obj).obj[0], self.vwr)) def destroy(self) -> Self: """Destroy the viewer. @@ -169,7 +174,7 @@ cdef class Viewer(Object): petsc.PetscViewerDestroy """ - CHKERR( PetscViewerDestroy(&self.vwr) ) + CHKERR(PetscViewerDestroy(&self.vwr)) return self def create(self, comm: Comm | None = None) -> Self: @@ -189,8 +194,8 @@ cdef class Viewer(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef PetscViewer newvwr = NULL - CHKERR( PetscViewerCreate(ccomm, &newvwr) ) - CHKERR( PetscCLEAR(self.obj) ); self.vwr = newvwr + CHKERR(PetscViewerCreate(ccomm, &newvwr)) + CHKERR(PetscCLEAR(self.obj)); self.vwr = newvwr return self def createASCII( @@ -198,7 +203,7 @@ cdef class Viewer(Object): name: str, mode: FileMode | str | None = None, comm: Comm | None = None, - ) -> Self: + ) -> Self: """Create a viewer of type `Type.ASCII`. Collective. @@ -224,11 +229,11 @@ cdef class Viewer(Object): cdef PetscFileMode cmode = PETSC_FILE_MODE_WRITE if mode is not None: cmode = filemode(mode) cdef PetscViewer newvwr = NULL - CHKERR( PetscViewerCreate(ccomm, &newvwr) ) - CHKERR( PetscCLEAR(self.obj) ); self.vwr = newvwr - CHKERR( PetscViewerSetType(self.vwr, PETSCVIEWERASCII) ) - CHKERR( PetscViewerFileSetMode(self.vwr, cmode) ) - CHKERR( PetscViewerFileSetName(self.vwr, cname) ) + CHKERR(PetscViewerCreate(ccomm, &newvwr)) + CHKERR(PetscCLEAR(self.obj)); self.vwr = newvwr + CHKERR(PetscViewerSetType(self.vwr, PETSCVIEWERASCII)) + CHKERR(PetscViewerFileSetMode(self.vwr, cmode)) + CHKERR(PetscViewerFileSetName(self.vwr, cname)) return self def createBinary( @@ -236,7 +241,7 @@ cdef class Viewer(Object): name: str, mode: FileMode | str | None = None, comm: Comm | None = None, - ) -> Self: + ) -> Self: """Create a viewer of type `Type.BINARY`. Collective. @@ -260,8 +265,8 @@ cdef class Viewer(Object): name = str2bytes(name, &cname) cdef PetscFileMode cmode = filemode(mode) cdef PetscViewer newvwr = NULL - CHKERR( PetscViewerBinaryOpen(ccomm, cname, cmode, &newvwr) ) - CHKERR( PetscCLEAR(self.obj) ); self.vwr = newvwr + CHKERR(PetscViewerBinaryOpen(ccomm, cname, cmode, &newvwr)) + CHKERR(PetscCLEAR(self.obj)); self.vwr = newvwr return self def createMPIIO( @@ -269,7 +274,7 @@ cdef class Viewer(Object): name: str, mode: FileMode | str | None = None, comm: Comm | None = None, - ) -> Self: + ) -> Self: """Create a viewer of type `Type.BINARY` supporting MPI-IO. Collective. @@ -293,12 +298,12 @@ cdef class Viewer(Object): name = str2bytes(name, &cname) cdef PetscFileMode cmode = filemode(mode) cdef PetscViewer newvwr = NULL - CHKERR( PetscViewerCreate(ccomm, &newvwr) ) - CHKERR( PetscCLEAR(self.obj) ); self.vwr = newvwr - CHKERR( PetscViewerSetType(self.vwr, PETSCVIEWERBINARY) ) - CHKERR( PetscViewerBinarySetUseMPIIO(self.vwr, PETSC_TRUE) ) - CHKERR( PetscViewerFileSetMode(self.vwr, cmode) ) - CHKERR( PetscViewerFileSetName(self.vwr, cname) ) + CHKERR(PetscViewerCreate(ccomm, &newvwr)) + CHKERR(PetscCLEAR(self.obj)); self.vwr = newvwr + CHKERR(PetscViewerSetType(self.vwr, PETSCVIEWERBINARY)) + CHKERR(PetscViewerBinarySetUseMPIIO(self.vwr, PETSC_TRUE)) + CHKERR(PetscViewerFileSetMode(self.vwr, cmode)) + CHKERR(PetscViewerFileSetName(self.vwr, cname)) return self def createVTK( @@ -306,7 +311,7 @@ cdef class Viewer(Object): name: str, mode: FileMode | str | None = None, comm: Comm | None = None, - ) -> Self: + ) -> Self: """Create a viewer of type `Type.VTK`. Collective. @@ -330,11 +335,11 @@ cdef class Viewer(Object): name = str2bytes(name, &cname) cdef PetscFileMode cmode = filemode(mode) cdef PetscViewer newvwr = NULL - CHKERR( PetscViewerCreate(ccomm, &newvwr) ) - CHKERR( PetscCLEAR(self.obj) ); self.vwr = newvwr - CHKERR( PetscViewerSetType(self.vwr, PETSCVIEWERVTK) ) - CHKERR( PetscViewerFileSetMode(self.vwr, cmode) ) - CHKERR( PetscViewerFileSetName(self.vwr, cname) ) + CHKERR(PetscViewerCreate(ccomm, &newvwr)) + CHKERR(PetscCLEAR(self.obj)); self.vwr = newvwr + CHKERR(PetscViewerSetType(self.vwr, PETSCVIEWERVTK)) + CHKERR(PetscViewerFileSetMode(self.vwr, cmode)) + CHKERR(PetscViewerFileSetName(self.vwr, cname)) return self def createHDF5( @@ -342,7 +347,7 @@ cdef class Viewer(Object): name: str, mode: FileMode | str | None = None, comm: Comm | None = None, - ) -> Self: + ) -> Self: """Create a viewer of type `Type.HDF5`. Collective. @@ -366,11 +371,11 @@ cdef class Viewer(Object): name = str2bytes(name, &cname) cdef PetscFileMode cmode = filemode(mode) cdef PetscViewer newvwr = NULL - CHKERR( PetscViewerCreate(ccomm, &newvwr) ) - CHKERR( PetscCLEAR(self.obj) ); self.vwr = newvwr - CHKERR( PetscViewerSetType(self.vwr, PETSCVIEWERHDF5) ) - CHKERR( PetscViewerFileSetMode(self.vwr, cmode) ) - CHKERR( PetscViewerFileSetName(self.vwr, cname) ) + CHKERR(PetscViewerCreate(ccomm, &newvwr)) + CHKERR(PetscCLEAR(self.obj)); self.vwr = newvwr + CHKERR(PetscViewerSetType(self.vwr, PETSCVIEWERHDF5)) + CHKERR(PetscViewerFileSetMode(self.vwr, cmode)) + CHKERR(PetscViewerFileSetName(self.vwr, cname)) return self def createDraw( @@ -380,7 +385,7 @@ cdef class Viewer(Object): position: tuple[int, int] | None = None, size: tuple[int, int] | int | None = None, comm: Comm | None = None, - ) -> Self: + ) -> Self: """Create a `Type.DRAW` viewer. Collective. @@ -418,9 +423,9 @@ cdef class Viewer(Object): except TypeError: w = h = size cdef PetscViewer newvwr = NULL - CHKERR( PetscViewerDrawOpen(ccomm, cdisplay, ctitle, - x, y, w, h, &newvwr) ) - CHKERR( PetscCLEAR(self.obj) ); self.vwr = newvwr + CHKERR(PetscViewerDrawOpen(ccomm, cdisplay, ctitle, + x, y, w, h, &newvwr)) + CHKERR(PetscCLEAR(self.obj)); self.vwr = newvwr return self def setType(self, vwr_type: Type | str) -> None: @@ -440,7 +445,7 @@ cdef class Viewer(Object): """ cdef PetscViewerType cval = NULL vwr_type = str2bytes(vwr_type, &cval) - CHKERR( PetscViewerSetType(self.vwr, cval) ) + CHKERR(PetscViewerSetType(self.vwr, cval)) def getType(self) -> str: """Return the type of the viewer. @@ -453,7 +458,7 @@ cdef class Viewer(Object): """ cdef PetscViewerType cval = NULL - CHKERR( PetscViewerGetType(self.vwr, &cval) ) + CHKERR(PetscViewerGetType(self.vwr, &cval)) return bytes2str(cval) def getFormat(self) -> Format: @@ -467,7 +472,7 @@ cdef class Viewer(Object): """ cdef PetscViewerFormat format = PETSC_VIEWER_DEFAULT - CHKERR( PetscViewerGetFormat(self.vwr, &format) ) + CHKERR(PetscViewerGetFormat(self.vwr, &format)) return format def pushFormat(self, format: Format) -> None: @@ -480,7 +485,7 @@ cdef class Viewer(Object): popFormat, petsc.PetscViewerPushFormat """ - CHKERR( PetscViewerPushFormat(self.vwr, format) ) + CHKERR(PetscViewerPushFormat(self.vwr, format)) def popFormat(self) -> None: """Pop format from the viewer. @@ -492,7 +497,7 @@ cdef class Viewer(Object): pushFormat, petsc.PetscViewerPopFormat """ - CHKERR( PetscViewerPopFormat(self.vwr) ) + CHKERR(PetscViewerPopFormat(self.vwr)) def getSubViewer(self, comm: Comm | None = None) -> Viewer: """Return a viewer defined on a subcommunicator. @@ -515,7 +520,7 @@ cdef class Viewer(Object): """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_SELF) cdef Viewer sub = Viewer() - CHKERR( PetscViewerGetSubViewer(self.vwr, ccomm, &sub.vwr) ) + CHKERR(PetscViewerGetSubViewer(self.vwr, ccomm, &sub.vwr)) return sub def restoreSubViewer(self, Viewer sub) -> None: @@ -534,7 +539,7 @@ cdef class Viewer(Object): """ cdef MPI_Comm ccomm = def_Comm(sub.getComm(), PETSC_COMM_SELF) - CHKERR( PetscViewerRestoreSubViewer(self.vwr, ccomm, &sub.vwr) ) + CHKERR(PetscViewerRestoreSubViewer(self.vwr, ccomm, &sub.vwr)) @classmethod def STDOUT(cls, comm: Comm | None = None) -> Viewer: @@ -551,7 +556,7 @@ cdef class Viewer(Object): cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef Viewer viewer = Viewer() viewer.vwr = PETSC_VIEWER_STDOUT_(ccomm) - CHKERR( PetscINCREF(viewer.obj) ) + CHKERR(PetscINCREF(viewer.obj)) return viewer @classmethod @@ -569,7 +574,7 @@ cdef class Viewer(Object): cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef Viewer viewer = Viewer() viewer.vwr = PETSC_VIEWER_STDERR_(ccomm) - CHKERR( PetscINCREF(viewer.obj) ) + CHKERR(PetscINCREF(viewer.obj)) return viewer @classmethod @@ -590,7 +595,7 @@ cdef class Viewer(Object): cdef const char *cname = NULL name = str2bytes(name, &cname) cdef Viewer viewer = Viewer() - CHKERR( PetscViewerASCIIOpen(ccomm, cname, &viewer.vwr) ) + CHKERR(PetscViewerASCIIOpen(ccomm, cname, &viewer.vwr)) return viewer @classmethod @@ -608,7 +613,7 @@ cdef class Viewer(Object): cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef Viewer viewer = Viewer() viewer.vwr = PETSC_VIEWER_BINARY_(ccomm) - CHKERR( PetscINCREF(viewer.obj) ) + CHKERR(PetscINCREF(viewer.obj)) return viewer @classmethod @@ -626,7 +631,7 @@ cdef class Viewer(Object): cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef Viewer viewer = Viewer() viewer.vwr = PETSC_VIEWER_DRAW_(ccomm) - CHKERR( PetscINCREF(viewer.obj) ) + CHKERR(PetscINCREF(viewer.obj)) return viewer # --- ASCII viewers --- @@ -642,7 +647,7 @@ cdef class Viewer(Object): """ cdef PetscInt ctabs = asInt(tabs) - CHKERR( PetscViewerASCIISetTab(self.vwr, ctabs) ) + CHKERR(PetscViewerASCIISetTab(self.vwr, ctabs)) def getASCIITab(self) -> int: """Return the ASCII tab level. @@ -655,10 +660,10 @@ cdef class Viewer(Object): """ cdef PetscInt tabs = 0 - CHKERR( PetscViewerASCIIGetTab(self.vwr, &tabs) ) + CHKERR(PetscViewerASCIIGetTab(self.vwr, &tabs)) return toInt(tabs) - def addASCIITab(self, tabs: int): + def addASCIITab(self, tabs: int) -> None: """Increment the ASCII tab level. Collective. @@ -669,7 +674,7 @@ cdef class Viewer(Object): """ cdef PetscInt ctabs = asInt(tabs) - CHKERR( PetscViewerASCIIAddTab(self.vwr, ctabs) ) + CHKERR(PetscViewerASCIIAddTab(self.vwr, ctabs)) def subtractASCIITab(self, tabs: int) -> None: """Decrement the ASCII tab level. @@ -682,7 +687,7 @@ cdef class Viewer(Object): """ cdef PetscInt ctabs = asInt(tabs) - CHKERR( PetscViewerASCIISubtractTab(self.vwr, ctabs) ) + CHKERR(PetscViewerASCIISubtractTab(self.vwr, ctabs)) def pushASCIISynchronized(self) -> None: """Allow ASCII synchronized calls. @@ -695,7 +700,7 @@ cdef class Viewer(Object): petsc.PetscViewerASCIIPushSynchronized """ - CHKERR( PetscViewerASCIIPushSynchronized(self.vwr) ) + CHKERR(PetscViewerASCIIPushSynchronized(self.vwr)) def popASCIISynchronized(self) -> None: """Disallow ASCII synchronized calls. @@ -708,7 +713,7 @@ cdef class Viewer(Object): petsc.PetscViewerASCIIPopSynchronized """ - CHKERR( PetscViewerASCIIPopSynchronized(self.vwr) ) + CHKERR(PetscViewerASCIIPopSynchronized(self.vwr)) def pushASCIITab(self) -> None: """Push an additional tab level. @@ -720,7 +725,7 @@ cdef class Viewer(Object): popASCIITab, petsc.PetscViewerASCIIPushTab """ - CHKERR( PetscViewerASCIIPushTab(self.vwr) ) + CHKERR(PetscViewerASCIIPushTab(self.vwr)) def popASCIITab(self) -> None: """Pop an additional tab level pushed via `pushASCIITab`. @@ -732,7 +737,7 @@ cdef class Viewer(Object): pushASCIITab, petsc.PetscViewerASCIIPopTab """ - CHKERR( PetscViewerASCIIPopTab(self.vwr) ) + CHKERR(PetscViewerASCIIPopTab(self.vwr)) def useASCIITabs(self, flag: bool) -> None: """Enable/disable the use of ASCII tabs. @@ -745,7 +750,7 @@ cdef class Viewer(Object): """ cdef PetscBool flg = asBool(flag) - CHKERR( PetscViewerASCIIUseTabs(self.vwr, flg) ) + CHKERR(PetscViewerASCIIUseTabs(self.vwr, flg)) def printfASCII(self, msg: str) -> None: """Print a message. @@ -759,7 +764,7 @@ cdef class Viewer(Object): """ cdef const char *cmsg = NULL msg = str2bytes(msg, &cmsg) - CHKERR( PetscViewerASCIIPrintf(self.vwr, '%s', cmsg) ) + CHKERR(PetscViewerASCIIPrintf(self.vwr, '%s', cmsg)) def printfASCIISynchronized(self, msg: str) -> None: """Print a synchronized message. @@ -773,7 +778,7 @@ cdef class Viewer(Object): """ cdef const char *cmsg = NULL msg = str2bytes(msg, &cmsg) - CHKERR( PetscViewerASCIISynchronizedPrintf(self.vwr, '%s', cmsg) ) + CHKERR(PetscViewerASCIISynchronizedPrintf(self.vwr, '%s', cmsg)) # --- methods specific to file viewers --- @@ -787,7 +792,7 @@ cdef class Viewer(Object): petsc.PetscViewerFlush """ - CHKERR( PetscViewerFlush(self.vwr) ) + CHKERR(PetscViewerFlush(self.vwr)) def setFileMode(self, mode: FileMode | str) -> None: """Set file mode. @@ -799,7 +804,7 @@ cdef class Viewer(Object): getFileMode, petsc.PetscViewerFileSetMode """ - CHKERR( PetscViewerFileSetMode(self.vwr, filemode(mode)) ) + CHKERR(PetscViewerFileSetMode(self.vwr, filemode(mode))) def getFileMode(self) -> FileMode: """Return the file mode. @@ -812,7 +817,7 @@ cdef class Viewer(Object): """ cdef PetscFileMode mode = PETSC_FILE_MODE_READ - CHKERR( PetscViewerFileGetMode(self.vwr, &mode) ) + CHKERR(PetscViewerFileGetMode(self.vwr, &mode)) return mode def setFileName(self, name: str) -> None: @@ -827,7 +832,7 @@ cdef class Viewer(Object): """ cdef const char *cval = NULL name = str2bytes(name, &cval) - CHKERR( PetscViewerFileSetName(self.vwr, cval) ) + CHKERR(PetscViewerFileSetName(self.vwr, cval)) def getFileName(self) -> str: """Return file name. @@ -840,7 +845,7 @@ cdef class Viewer(Object): """ cdef const char *cval = NULL - CHKERR( PetscViewerFileGetName(self.vwr, &cval) ) + CHKERR(PetscViewerFileGetName(self.vwr, &cval)) return bytes2str(cval) # --- methods specific to draw viewers --- @@ -851,7 +856,7 @@ cdef class Viewer(Object): title: str | None = None, position: tuple[int, int] | None = None, size: tuple[int, int] | int | None = None, - ) -> None: + ) -> None: """Set window information for a `Type.DRAW` viewer. Collective. @@ -885,9 +890,9 @@ cdef class Viewer(Object): w, h = size except TypeError: w = h = size - CHKERR( PetscViewerDrawSetInfo(self.vwr, - cdisplay, ctitle, - x, y, w, h) ) + CHKERR(PetscViewerDrawSetInfo(self.vwr, + cdisplay, ctitle, + x, y, w, h)) def clearDraw(self) -> None: """Reset graphics. @@ -899,55 +904,162 @@ cdef class Viewer(Object): petsc.PetscViewerDrawClear """ - CHKERR( PetscViewerDrawClear(self.vwr) ) + CHKERR(PetscViewerDrawClear(self.vwr)) # -------------------------------------------------------------------- cdef class ViewerHDF5(Viewer): + """Viewer object for HDF5 file formats. + + Viewer is described in the `PETSc manual `. + + See Also + -------- + Viewer + + """ + + def create( + self, + name: str, + mode: Viewer.FileMode | str | None = None, + comm: Comm | None = None, + ) -> Self: + """Create a viewer of type `Type.HDF5`. + + Collective. + + Parameters + ---------- + name + The filename associated with the viewer. + mode + The mode type. + comm + MPI communicator, defaults to `Sys.getDefaultComm`. - def create(self, name, mode=None, comm=None): + See Also + -------- + Viewer.createHDF5 + + """ cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) cdef const char *cname = NULL name = str2bytes(name, &cname) cdef PetscFileMode cmode = filemode(mode) cdef PetscViewer newvwr = NULL - CHKERR( PetscViewerCreate(ccomm, &newvwr) ) - CHKERR( PetscCLEAR(self.obj) ); self.vwr = newvwr - CHKERR( PetscViewerSetType(self.vwr, PETSCVIEWERHDF5) ) - CHKERR( PetscViewerFileSetMode(self.vwr, cmode) ) - CHKERR( PetscViewerFileSetName(self.vwr, cname) ) + CHKERR(PetscViewerCreate(ccomm, &newvwr)) + CHKERR(PetscCLEAR(self.obj)); self.vwr = newvwr + CHKERR(PetscViewerSetType(self.vwr, PETSCVIEWERHDF5)) + CHKERR(PetscViewerFileSetMode(self.vwr, cmode)) + CHKERR(PetscViewerFileSetName(self.vwr, cname)) return self - def pushTimestepping(self): - CHKERR( PetscViewerHDF5PushTimestepping(self.vwr) ) + def pushTimestepping(self) -> None: + """Activate the timestepping mode. + + Logically collective. + + See Also + -------- + popTimestepping, petsc.PetscViewerHDF5PushTimestepping + + """ + CHKERR(PetscViewerHDF5PushTimestepping(self.vwr)) - def popTimestepping(self): - CHKERR( PetscViewerHDF5PopTimestepping(self.vwr) ) + def popTimestepping(self) -> None: + """Deactivate the timestepping mode. - def getTimestep(self): + Logically collective. + + See Also + -------- + pushTimestepping, petsc.PetscViewerHDF5PopTimestepping + + """ + CHKERR(PetscViewerHDF5PopTimestepping(self.vwr)) + + def getTimestep(self) -> int: + """Return the current time step. + + Not collective. + + See Also + -------- + pushTimestepping, setTimestep, incrementTimestep + petsc.PetscViewerHDF5GetTimestep + + """ cdef PetscInt ctimestep = 0 - CHKERR( PetscViewerHDF5GetTimestep(self.vwr, &ctimestep) ) + CHKERR(PetscViewerHDF5GetTimestep(self.vwr, &ctimestep)) return toInt(ctimestep) - def setTimestep(self, timestep): - CHKERR( PetscViewerHDF5SetTimestep(self.vwr, asInt(timestep)) ) + def setTimestep(self, timestep: int) -> None: + """Set the current time step. + + Logically collective. + + See Also + -------- + pushTimestepping, getTimestep, incrementTimestep + petsc.PetscViewerHDF5SetTimestep + + """ + CHKERR(PetscViewerHDF5SetTimestep(self.vwr, asInt(timestep))) + + def incrementTimestep(self) -> None: + """Increment the time step. - def incrementTimestep(self): - CHKERR( PetscViewerHDF5IncrementTimestep(self.vwr) ) + Logically collective. + + See Also + -------- + pushTimestepping, setTimestep, getTimestep + petsc.PetscViewerHDF5IncrementTimestep + + """ + CHKERR(PetscViewerHDF5IncrementTimestep(self.vwr)) - def pushGroup(self, group): + def pushGroup(self, group: str) -> None: + """Set the current group. + + Logically collective. + + See Also + -------- + popGroup, getGroup, petsc.PetscViewerHDF5PushGroup + + """ cdef const char *cgroup = NULL group = str2bytes(group, &cgroup) - CHKERR( PetscViewerHDF5PushGroup(self.vwr, cgroup) ) + CHKERR(PetscViewerHDF5PushGroup(self.vwr, cgroup)) + + def popGroup(self) -> None: + """Pop the current group from the stack. - def popGroup(self): - CHKERR( PetscViewerHDF5PopGroup(self.vwr) ) + Logically collective. - def getGroup(self): + See Also + -------- + pushGroup, getGroup, petsc.PetscViewerHDF5PopGroup + + """ + CHKERR(PetscViewerHDF5PopGroup(self.vwr)) + + def getGroup(self) -> str: + """Return the current group. + + Not collective. + + See Also + -------- + pushGroup, popGroup, petsc.PetscViewerHDF5GetGroup + + """ cdef char *cgroup = NULL - CHKERR( PetscViewerHDF5GetGroup(self.vwr, NULL, &cgroup) ) + CHKERR(PetscViewerHDF5GetGroup(self.vwr, NULL, &cgroup)) group = bytes2str(cgroup) - CHKERR( PetscFree(cgroup) ) + CHKERR(PetscFree(cgroup)) return group # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/arraynpy.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/arraynpy.pxi index 8770ea961b9..238db99cc8a 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/arraynpy.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/arraynpy.pxi @@ -12,12 +12,12 @@ cdef extern from "": ctypedef extern class numpy.ndarray [object PyArrayObject]: pass - void* PyArray_DATA(ndarray) - npy_intp PyArray_SIZE(ndarray) - int PyArray_NDIM(ndarray) + void* PyArray_DATA(ndarray) + npy_intp PyArray_SIZE(ndarray) + int PyArray_NDIM(ndarray) npy_intp* PyArray_DIMS(ndarray) - npy_intp PyArray_DIM(ndarray, int) - npy_intp PyArray_MultiplyList(const npy_intp*, int) + npy_intp PyArray_DIM(ndarray, int) + npy_intp PyArray_MultiplyList(const npy_intp*, int) enum: NPY_INTP dtype PyArray_DescrFromType(int) @@ -30,13 +30,13 @@ cdef extern from "": enum: NPY_ARRAY_FARRAY ndarray PyArray_FROM_O(object) - ndarray PyArray_FROM_OT(object,int) - ndarray PyArray_FROM_OTF(object,int,int) + ndarray PyArray_FROM_OT(object, int) + ndarray PyArray_FROM_OTF(object, int, int) ndarray PyArray_Copy(ndarray) - ndarray PyArray_ArangeObj(object,object,object,dtype) - ndarray PyArray_EMPTY(int,npy_intp[],int,int) - ndarray PyArray_ZEROS(int,npy_intp[],int,int) + ndarray PyArray_ArangeObj(object, object, object, dtype) + ndarray PyArray_EMPTY(int, npy_intp[], int, int) + ndarray PyArray_ZEROS(int, npy_intp[], int, int) bint PyArray_ISCONTIGUOUS(ndarray) bint PyArray_ISFORTRAN(ndarray) @@ -44,12 +44,12 @@ cdef extern from "": NPY_ANYORDER NPY_CORDER NPY_FORTRANORDER - ndarray PyArray_NewCopy(ndarray,NPY_ORDER) + ndarray PyArray_NewCopy(ndarray, NPY_ORDER) ctypedef struct PyObject ctypedef struct PyTypeObject - ndarray PyArray_New(PyTypeObject*,int,npy_intp[],int,npy_intp[],void*,int,int,PyObject*) - ndarray PyArray_SimpleNewFromData(int,npy_intp[],int,void*) + ndarray PyArray_New(PyTypeObject*, int, npy_intp[], int, npy_intp[], void*, int, int, PyObject*) + ndarray PyArray_SimpleNewFromData(int, npy_intp[], int, void*) cdef extern from "": @@ -140,25 +140,25 @@ cdef inline ndarray iarray(object ob, int typenum): cdef inline ndarray iarray_i(object ob, PetscInt* size, PetscInt** data): cdef ndarray ary = iarray(ob, NPY_PETSC_INT) - if size != NULL: size[0] = PyArray_SIZE(ary) + if size != NULL: size[0] = PyArray_SIZE(ary) if data != NULL: data[0] = PyArray_DATA(ary) return ary cdef inline ndarray iarray_r(object ob, PetscInt* size, PetscReal** data): cdef ndarray ary = iarray(ob, NPY_PETSC_REAL) - if size != NULL: size[0] = PyArray_SIZE(ary) + if size != NULL: size[0] = PyArray_SIZE(ary) if data != NULL: data[0] = PyArray_DATA(ary) return ary cdef inline ndarray iarray_b(object ob, PetscInt* size, PetscBool** data): cdef ndarray ary = iarray(ob, NPY_PETSC_BOOL) - if size != NULL: size[0] = PyArray_SIZE(ary) + if size != NULL: size[0] = PyArray_SIZE(ary) if data != NULL: data[0] = PyArray_DATA(ary) return ary cdef inline ndarray iarray_s(object ob, PetscInt* size, PetscScalar** data): cdef ndarray ary = iarray(ob, NPY_PETSC_SCALAR) - if size != NULL: size[0] = PyArray_SIZE(ary) + if size != NULL: size[0] = PyArray_SIZE(ary) if data != NULL: data[0] = PyArray_DATA(ary) return ary @@ -173,32 +173,42 @@ cdef inline ndarray oarray(object ob, int typenum): cdef inline ndarray oarray_b(object ob, PetscInt* size, PetscBool** data): cdef ndarray ary = oarray(ob, NPY_PETSC_BOOL) - if size != NULL: size[0] = PyArray_SIZE(ary) + cdef Py_ssize_t ssize = PyArray_SIZE(ary) + if size != NULL: size[0] = ssize if data != NULL: data[0] = PyArray_DATA(ary) + if ssize == 0 and data != NULL: data[0] = NULL return ary cdef inline ndarray oarray_i(object ob, PetscInt* size, PetscInt** data): cdef ndarray ary = oarray(ob, NPY_PETSC_INT) - if size != NULL: size[0] = PyArray_SIZE(ary) + cdef Py_ssize_t ssize = PyArray_SIZE(ary) + if size != NULL: size[0] = ssize if data != NULL: data[0] = PyArray_DATA(ary) + if ssize == 0 and data != NULL: data[0] = NULL return ary cdef inline ndarray oarray_r(object ob, PetscInt* size, PetscReal** data): cdef ndarray ary = oarray(ob, NPY_PETSC_REAL) - if size != NULL: size[0] = PyArray_SIZE(ary) + cdef Py_ssize_t ssize = PyArray_SIZE(ary) + if size != NULL: size[0] = ssize if data != NULL: data[0] = PyArray_DATA(ary) + if ssize == 0 and data != NULL: data[0] = NULL return ary cdef inline ndarray oarray_s(object ob, PetscInt* size, PetscScalar** data): cdef ndarray ary = oarray(ob, NPY_PETSC_SCALAR) - if size != NULL: size[0] = PyArray_SIZE(ary) + cdef Py_ssize_t ssize = PyArray_SIZE(ary) + if size != NULL: size[0] = ssize if data != NULL: data[0] = PyArray_DATA(ary) + if ssize == 0 and data != NULL: data[0] = NULL return ary cdef inline ndarray oarray_p(object ob, PetscInt* size, void** data): cdef ndarray ary = oarray(ob, NPY_INTP) - if size != NULL: size[0] = PyArray_SIZE(ary) - if data != NULL: data[0] = PyArray_DATA(ary) + cdef Py_ssize_t ssize = PyArray_SIZE(ary) + if size != NULL: size[0] = ssize + if data != NULL: data[0] = PyArray_DATA(ary) + if ssize == 0 and data != NULL: data[0] = NULL return ary # -------------------------------------------------------------------- @@ -206,14 +216,16 @@ cdef inline ndarray oarray_p(object ob, PetscInt* size, void** data): cdef inline ndarray ocarray_s(object ob, PetscInt* size, PetscScalar** data): cdef ndarray ary = PyArray_FROM_OTF( ob, NPY_PETSC_SCALAR, NPY_ARRAY_CARRAY|NPY_ARRAY_NOTSWAPPED) - if size != NULL: size[0] = PyArray_SIZE(ary) + cdef Py_ssize_t ssize = PyArray_SIZE(ary) + if size != NULL: size[0] = ssize if data != NULL: data[0] = PyArray_DATA(ary) return ary cdef inline ndarray ofarray_s(object ob, PetscInt* size, PetscScalar** data): cdef ndarray ary = PyArray_FROM_OTF( ob, NPY_PETSC_SCALAR, NPY_ARRAY_FARRAY|NPY_ARRAY_NOTSWAPPED) - if size != NULL: size[0] = PyArray_SIZE(ary) + cdef Py_ssize_t ssize = PyArray_SIZE(ary) + if size != NULL: size[0] = ssize if data != NULL: data[0] = PyArray_DATA(ary) return ary diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/cyclicgc.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/cyclicgc.pxi index 163967dc61b..ed9b1570776 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/cyclicgc.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/cyclicgc.pxi @@ -10,17 +10,16 @@ cdef extern from "Python.h": ctypedef int traverseproc(PyObject *, visitproc, void *) noexcept ctypedef int inquiry(PyObject *) noexcept ctypedef struct PyTypeObject: - char* tp_name - traverseproc tp_traverse - inquiry tp_clear + char *tp_name + traverseproc tp_traverse + inquiry tp_clear PyTypeObject *Py_TYPE(PyObject *) cdef extern from "" nogil: PetscErrorCode PetscGarbageCleanup(MPI_Comm) - PetscErrorCode PetscGarbageView(MPI_Comm,PetscViewer); + PetscErrorCode PetscGarbageView(MPI_Comm, PetscViewer) cdef int tp_traverse(PyObject *o, visitproc visit, void *arg) noexcept: - ## printf("%s.tp_traverse(%p)\n", Py_TYPE(o).tp_name, o) cdef PetscObject p = (o).obj[0] if p == NULL: return 0 cdef PyObject *d = p.python_context @@ -28,17 +27,16 @@ cdef int tp_traverse(PyObject *o, visitproc visit, void *arg) noexcept: return visit(d, arg) cdef int tp_clear(PyObject *o) noexcept: - ## printf("%s.tp_clear(%p)\n", Py_TYPE(o).tp_name, o) cdef PetscObject *p = (o).obj PetscDEALLOC(p) return 0 cdef inline void TypeEnableGC(PyTypeObject *t) noexcept: - ## printf("%s: enforcing GC support\n", t.tp_name) t.tp_traverse = tp_traverse t.tp_clear = tp_clear -def garbage_cleanup(comm=None): + +def garbage_cleanup(comm: Comm | None = None) -> None: """Clean up unused PETSc objects. Collective. @@ -50,18 +48,19 @@ def garbage_cleanup(comm=None): """ if not (PetscInitializeCalled): return - if (PetscFinalizeCalled): return + if (PetscFinalizeCalled): return cdef MPI_Comm ccomm = MPI_COMM_NULL if comm is None: ccomm = GetComm(COMM_WORLD, MPI_COMM_NULL) - CHKERR( PetscGarbageCleanup(ccomm) ) + CHKERR(PetscGarbageCleanup(ccomm)) else: ccomm = GetComm(comm, MPI_COMM_NULL) if ccomm == MPI_COMM_NULL: raise ValueError("null communicator") - CHKERR( PetscGarbageCleanup(ccomm) ) + CHKERR(PetscGarbageCleanup(ccomm)) + -def garbage_view(comm=None): +def garbage_view(comm: Comm | None = None) -> None: """Print summary of the garbage PETSc objects. Collective. @@ -73,13 +72,13 @@ def garbage_view(comm=None): """ if not (PetscInitializeCalled): return - if (PetscFinalizeCalled): return + if (PetscFinalizeCalled): return cdef MPI_Comm ccomm = MPI_COMM_NULL if comm is None: comm = COMM_WORLD ccomm = GetComm(comm, MPI_COMM_NULL) if ccomm == MPI_COMM_NULL: raise ValueError("null communicator") - CHKERR( PetscGarbageView(ccomm, NULL) ) + CHKERR(PetscGarbageView(ccomm, NULL)) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/dlpack.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/dlpack.pxi index e93bf950f83..24204b2d7e4 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/dlpack.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/dlpack.pxi @@ -25,15 +25,15 @@ cdef enum PetscDLDeviceType: kDLCPU = 1 kDLCUDA = 2 kDLCUDAHost = 3 - #kDLOpenCL = 4 - #kDLVulkan = 7 - #kDLMetal = 8 - #kDLVPI = 9 + # kDLOpenCL = 4 + # kDLVulkan = 7 + # kDLMetal = 8 + # kDLVPI = 9 kDLROCM = 10 kDLROCMHost = 11 - #kDLExtDev = 12 + # kDLExtDev = 12 kDLCUDAManaged = 13 - #kDLOneAPI = 14 + # kDLOneAPI = 14 ctypedef struct DLContext: PetscDLDeviceType device_type diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/libpetsc4py.pyx b/src/binding/petsc4py/src/petsc4py/PETSc/libpetsc4py.pyx index ac7bce04f08..750c8cfc06f 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/libpetsc4py.pyx +++ b/src/binding/petsc4py/src/petsc4py/PETSc/libpetsc4py.pyx @@ -1,8 +1,8 @@ -#cython: cdivision=True -#cython: binding=False -#cython: auto_pickle=False -#cython: autotestdict=False -#cython: warn.multiple_declarators=False +# cython: cdivision=True +# cython: binding=False +# cython: auto_pickle=False +# cython: autotestdict=False +# cython: warn.multiple_declarators=False # -------------------------------------------------------------------- @@ -20,12 +20,12 @@ cdef extern from "Python.h": cdef extern from * nogil: ctypedef struct _p_PetscOptionItems ctypedef _p_PetscOptionItems* PetscOptionItems - PetscErrorCode PetscOptionsString(char[],char[],char[],char[],char[],size_t,PetscBool*) + PetscErrorCode PetscOptionsString(char[], char[], char[], char[], char[], size_t, PetscBool*) cdef extern from * nogil: # custom.h PetscErrorCode PetscObjectComposedDataRegisterPy(PetscInt*) - PetscErrorCode PetscObjectComposedDataGetIntPy(PetscObject,PetscInt,PetscInt*,PetscBool*) - PetscErrorCode PetscObjectComposedDataSetIntPy(PetscObject,PetscInt,PetscInt) + PetscErrorCode PetscObjectComposedDataGetIntPy(PetscObject, PetscInt, PetscInt*, PetscBool*) + PetscErrorCode PetscObjectComposedDataSetIntPy(PetscObject, PetscInt, PetscInt) # -------------------------------------------------------------------- @@ -51,16 +51,16 @@ cdef inline PetscErrorCode FunctionEnd() noexcept nogil: FUNCT = fstack[istack] return PETSC_SUCCESS -cdef PetscErrorCode PetscSETERR(PetscErrorCode ierr,char msg[]) noexcept nogil: +cdef PetscErrorCode PetscSETERR(PetscErrorCode ierr, char msg[]) noexcept nogil: global istack, fstack istack = 0 - fstack[istack] = NULL; - return PetscERROR(PETSC_COMM_SELF,FUNCT,ierr, + fstack[istack] = NULL + return PetscERROR(PETSC_COMM_SELF, FUNCT, ierr, PETSC_ERROR_INITIAL, msg, NULL) cdef PetscErrorCode UNSUPPORTED(char msg[]) noexcept nogil: - return PetscERROR(PETSC_COMM_SELF,FUNCT,PETSC_ERR_USER, - PETSC_ERROR_INITIAL,b"method %s()",msg) + return PetscERROR(PETSC_COMM_SELF, FUNCT, PETSC_ERR_USER, + PETSC_ERROR_INITIAL, b"method %s()", msg) # -------------------------------------------------------------------- @@ -157,17 +157,19 @@ cdef object load_module(object path): module.__file__ = path module.__package__ = None module_cache[path] = module + cdef object code = None try: with open(path, 'r') as source: code = compile(source.read(), path, 'exec') exec(code, module.__dict__) - except: + except Exception: del module_cache[path] raise return module # ----------------------------------------------------------------------------- + @cython.internal cdef class _PyObj: @@ -208,7 +210,7 @@ cdef class _PyObj: ctx[0] = NULL return 0 - cdef int setname(self, char name[]) except -1: + cdef int setname(self, const char name[]) except -1: if name != NULL and name[0] != 0: self.name = name else: @@ -244,11 +246,15 @@ cdef class _PyObj: return self.name return NULL -cdef createcontext(char name_p[]): +cdef createcontext(const char name_p[]): if name_p == NULL: return None cdef name = bytes2str(name_p) - cdef mod, path, modname=None - cdef cls, attr, clsname=None + cdef mod + cdef path + cdef modname=None + cdef cls + cdef attr + cdef clsname=None # path/to/filename.py:{function|class} if ':' in name: path, attr = parse_url(name) @@ -272,15 +278,15 @@ cdef createcontext(char name_p[]): cdef int viewcontext(_PyObj ctx, PetscViewer viewer) except -1: cdef PetscBool isascii = PETSC_FALSE, isstring = PETSC_FALSE - CHKERR( PetscObjectTypeCompare(viewer, PETSCVIEWERASCII, &isascii) ) - CHKERR( PetscObjectTypeCompare(viewer, PETSCVIEWERSTRING, &isstring) ) + CHKERR(PetscObjectTypeCompare(viewer, PETSCVIEWERASCII, &isascii)) + CHKERR(PetscObjectTypeCompare(viewer, PETSCVIEWERSTRING, &isstring)) cdef char *name = ctx.getname() if isascii: if name == NULL: name = b"unknown/no yet set" - CHKERR( PetscViewerASCIIPrintf(viewer, b" Python: %s\n", name) ) + CHKERR(PetscViewerASCIIPrintf(viewer, b" Python: %s\n", name)) if isstring: if name == NULL: name = b"" - CHKERR( PetscViewerStringSPrintf(viewer, "%s", name) ) + CHKERR(PetscViewerStringSPrintf(viewer, "%s", name)) return 0 # -------------------------------------------------------------------- @@ -288,45 +294,45 @@ cdef int viewcontext(_PyObj ctx, PetscViewer viewer) except -1: cdef extern from * nogil: struct _MatOps: PetscErrorCode (*destroy)(PetscMat) except PETSC_ERR_PYTHON - PetscErrorCode (*setfromoptions)(PetscMat,PetscOptionItems*) except PETSC_ERR_PYTHON - PetscErrorCode (*view)(PetscMat,PetscViewer) except PETSC_ERR_PYTHON - PetscErrorCode (*duplicate)(PetscMat,PetscMatDuplicateOption,PetscMat*) except PETSC_ERR_PYTHON - PetscErrorCode (*copy)(PetscMat,PetscMat,PetscMatStructure) except PETSC_ERR_PYTHON - PetscErrorCode (*createsubmatrix)(PetscMat,PetscIS,PetscIS,PetscMatReuse,PetscMat*) except PETSC_ERR_PYTHON - PetscErrorCode (*setoption)(PetscMat,PetscMatOption,PetscBool) except PETSC_ERR_PYTHON + PetscErrorCode (*setfromoptions)(PetscMat, PetscOptionItems*) except PETSC_ERR_PYTHON + PetscErrorCode (*view)(PetscMat, PetscViewer) except PETSC_ERR_PYTHON + PetscErrorCode (*duplicate)(PetscMat, PetscMatDuplicateOption, PetscMat*) except PETSC_ERR_PYTHON + PetscErrorCode (*copy)(PetscMat, PetscMat, PetscMatStructure) except PETSC_ERR_PYTHON + PetscErrorCode (*createsubmatrix)(PetscMat, PetscIS, PetscIS, PetscMatReuse, PetscMat*) except PETSC_ERR_PYTHON + PetscErrorCode (*setoption)(PetscMat, PetscMatOption, PetscBool) except PETSC_ERR_PYTHON PetscErrorCode (*setup)(PetscMat) except PETSC_ERR_PYTHON - PetscErrorCode (*assemblybegin)(PetscMat,PetscMatAssemblyType) except PETSC_ERR_PYTHON - PetscErrorCode (*assemblyend)(PetscMat,PetscMatAssemblyType) except PETSC_ERR_PYTHON + PetscErrorCode (*assemblybegin)(PetscMat, PetscMatAssemblyType) except PETSC_ERR_PYTHON + PetscErrorCode (*assemblyend)(PetscMat, PetscMatAssemblyType) except PETSC_ERR_PYTHON PetscErrorCode (*zeroentries)(PetscMat) except PETSC_ERR_PYTHON - PetscErrorCode (*zerorowscolumns)(PetscMat,PetscInt,PetscInt*,PetscScalar,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*scale)(PetscMat,PetscScalar) except PETSC_ERR_PYTHON - PetscErrorCode (*shift)(PetscMat,PetscScalar) except PETSC_ERR_PYTHON - PetscErrorCode (*sor)(PetscMat,PetscVec,PetscReal,PetscMatSORType,PetscReal,PetscInt,PetscInt,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*getvecs)(PetscMat,PetscVec*,PetscVec*) except PETSC_ERR_PYTHON - PetscErrorCode (*mult)(PetscMat,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*multtranspose)(PetscMat,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*multhermitian"multhermitiantranspose")(PetscMat,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*multadd)(PetscMat,PetscVec,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*multtransposeadd)(PetscMat,PetscVec,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*multhermitianadd"multhermitiantransposeadd")(PetscMat,PetscVec,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*multdiagonalblock)(PetscMat,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*solve)(PetscMat,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*solvetranspose)(PetscMat,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*solveadd)(PetscMat,PetscVec,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*solvetransposeadd)(PetscMat,PetscVec,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*getdiagonal)(PetscMat,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*setdiagonal"diagonalset")(PetscMat,PetscVec,PetscInsertMode) except PETSC_ERR_PYTHON - PetscErrorCode (*diagonalscale)(PetscMat,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*missingdiagonal)(PetscMat,PetscBool*,PetscInt*) except PETSC_ERR_PYTHON - PetscErrorCode (*norm)(PetscMat,PetscNormType,PetscReal*) except PETSC_ERR_PYTHON + PetscErrorCode (*zerorowscolumns)(PetscMat, PetscInt, PetscInt*, PetscScalar, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*scale)(PetscMat, PetscScalar) except PETSC_ERR_PYTHON + PetscErrorCode (*shift)(PetscMat, PetscScalar) except PETSC_ERR_PYTHON + PetscErrorCode (*sor)(PetscMat, PetscVec, PetscReal, PetscMatSORType, PetscReal, PetscInt, PetscInt, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*getvecs)(PetscMat, PetscVec*, PetscVec*) except PETSC_ERR_PYTHON + PetscErrorCode (*mult)(PetscMat, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*multtranspose)(PetscMat, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*multhermitian"multhermitiantranspose")(PetscMat, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*multadd)(PetscMat, PetscVec, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*multtransposeadd)(PetscMat, PetscVec, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*multhermitianadd"multhermitiantransposeadd")(PetscMat, PetscVec, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*multdiagonalblock)(PetscMat, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*solve)(PetscMat, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*solvetranspose)(PetscMat, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*solveadd)(PetscMat, PetscVec, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*solvetransposeadd)(PetscMat, PetscVec, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*getdiagonal)(PetscMat, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*setdiagonal"diagonalset")(PetscMat, PetscVec, PetscInsertMode) except PETSC_ERR_PYTHON + PetscErrorCode (*diagonalscale)(PetscMat, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*missingdiagonal)(PetscMat, PetscBool*, PetscInt*) except PETSC_ERR_PYTHON + PetscErrorCode (*norm)(PetscMat, PetscNormType, PetscReal*) except PETSC_ERR_PYTHON PetscErrorCode (*realpart)(PetscMat) except PETSC_ERR_PYTHON PetscErrorCode (*imagpart"imaginarypart")(PetscMat) except PETSC_ERR_PYTHON PetscErrorCode (*conjugate)(PetscMat) except PETSC_ERR_PYTHON - PetscErrorCode (*getdiagonalblock)(PetscMat,PetscMat*) except PETSC_ERR_PYTHON + PetscErrorCode (*getdiagonalblock)(PetscMat, PetscMat*) except PETSC_ERR_PYTHON PetscErrorCode (*productsetfromoptions)(PetscMat) except PETSC_ERR_PYTHON PetscErrorCode (*productsymbolic)(PetscMat) except PETSC_ERR_PYTHON PetscErrorCode (*productnumeric)(PetscMat) except PETSC_ERR_PYTHON - PetscErrorCode (*hasoperation)(PetscMat,PetscMatOperation,PetscBool*) except PETSC_ERR_PYTHON + PetscErrorCode (*hasoperation)(PetscMat, PetscMatOperation, PetscBool*) except PETSC_ERR_PYTHON ctypedef _MatOps *MatOps ctypedef struct Mat_Product: void *data @@ -357,9 +363,10 @@ cdef public PetscErrorCode MatPythonSetContext(PetscMat mat, void *ctx) \ except PETSC_ERR_PYTHON: FunctionBegin(b"MatPythonSetContext") PyMat(mat).setcontext(ctx, Mat_(mat)) + mat.preallocated = PETSC_FALSE return FunctionEnd() -cdef PetscErrorCode MatPythonSetType_PYTHON(PetscMat mat, char name[]) \ +cdef PetscErrorCode MatPythonSetType_PYTHON(PetscMat mat, const char *name) \ except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatPythonSetType_PYTHON") if name == NULL: return FunctionEnd() # XXX @@ -374,29 +381,30 @@ cdef PetscErrorCode MatPythonGetType_PYTHON(PetscMat mat, const char *name[]) \ name[0] = PyMat(mat).getname() return FunctionEnd() -#FIXME: view and setFromOptions? -cdef dict dMatOps = { 3 : 'mult', - 4 : 'multAdd', - 5 : 'multTranspose', - 6 : 'multTransposeAdd', - 7 : 'solve', - 8 : 'solveAdd', - 9 : 'solveTranspose', - 10 : 'solveTransposeAdd', - 13 : 'SOR', - 17 : 'getDiagonal', - 18 : 'diagonalScale', - 19 : 'norm', - 23 : 'zeroEntries', - 32 : 'getDiagonalBlock', - 34 : 'duplicate', - 43 : 'copy', - 45 : 'scale', - 46 : 'shift', - 47 : 'setDiagonal', - 48 : 'zeroRowsColumns', - 59 : 'createSubMatrix', - 88 : 'getVecs', #FIXME -> createVecs +# FIXME: view and setFromOptions? +cdef dict dMatOps = { + 3 : 'mult', + 4 : 'multAdd', + 5 : 'multTranspose', + 6 : 'multTransposeAdd', + 7 : 'solve', + 8 : 'solveAdd', + 9 : 'solveTranspose', + 10 : 'solveTransposeAdd', + 13 : 'SOR', + 17 : 'getDiagonal', + 18 : 'diagonalScale', + 19 : 'norm', + 23 : 'zeroEntries', + 32 : 'getDiagonalBlock', + 34 : 'duplicate', + 43 : 'copy', + 45 : 'scale', + 46 : 'shift', + 47 : 'setDiagonal', + 48 : 'zeroRowsColumns', + 59 : 'createSubMatrix', + 88 : 'getVecs', # FIXME -> createVecs 102 : 'conjugate', 105 : 'realPart', 106 : 'imagPart', @@ -408,10 +416,8 @@ cdef dict dMatOps = { 3 : 'mult', cdef PetscErrorCode MatCreate_Python( PetscMat mat, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatCreate_Python") - # cdef MatOps ops = mat.ops ops.destroy = MatDestroy_Python ops.setfromoptions = MatSetFromOptions_Python @@ -457,17 +463,17 @@ cdef PetscErrorCode MatCreate_Python( mat.assembled = PETSC_TRUE # XXX mat.preallocated = PETSC_FALSE # XXX # - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( mat, b"MatPythonSetType_C", - MatPythonSetType_PYTHON) ) - CHKERR( PetscObjectComposeFunction( + MatPythonSetType_PYTHON)) + CHKERR(PetscObjectComposeFunction( mat, b"MatPythonGetType_C", - MatPythonGetType_PYTHON) ) - CHKERR( PetscObjectComposeFunction( + MatPythonGetType_PYTHON)) + CHKERR(PetscObjectComposeFunction( mat, b"MatProductSetFromOptions_anytype_C", - MatProductSetFromOptions_Python) ) - CHKERR( PetscObjectChangeTypeName( - mat, MATPYTHON) ) + MatProductSetFromOptions_Python)) + CHKERR(PetscObjectChangeTypeName( + mat, MATPYTHON)) # cdef ctx = PyMat(NULL) mat.data = ctx @@ -476,8 +482,7 @@ cdef PetscErrorCode MatCreate_Python( cdef inline PetscErrorCode MatDestroy_Python_inner( PetscMat mat, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: try: addRef(mat) MatPythonSetContext(mat, NULL) @@ -489,21 +494,19 @@ cdef inline PetscErrorCode MatDestroy_Python_inner( cdef PetscErrorCode MatDestroy_Python( PetscMat mat, - ) \ - except PETSC_ERR_PYTHON nogil: - + ) except PETSC_ERR_PYTHON nogil: FunctionBegin(b"MatDestroy_Python") - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( mat, b"MatPythonSetType_C", - NULL) ) - CHKERR( PetscObjectComposeFunction( + NULL)) + CHKERR(PetscObjectComposeFunction( mat, b"MatPythonGetType_C", - NULL) ) - CHKERR( PetscObjectComposeFunction( + NULL)) + CHKERR(PetscObjectComposeFunction( mat, b"MatProductSetFromOptions_anytype_C", - NULL) ) - CHKERR( PetscObjectChangeTypeName( - mat, NULL) ) + NULL)) + CHKERR(PetscObjectChangeTypeName( + mat, NULL)) if Py_IsInitialized(): MatDestroy_Python_inner(mat) return FunctionEnd() @@ -511,18 +514,16 @@ cdef PetscErrorCode MatDestroy_Python( cdef PetscErrorCode MatSetFromOptions_Python( PetscMat mat, PetscOptionItems *PetscOptionsObject, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatSetFromOptions_Python") - # cdef char name[2048], *defval = PyMat(mat).getname() cdef PetscBool found = PETSC_FALSE cdef PetscOptionItems *opts "PetscOptionsObject" = PetscOptionsObject - CHKERR( PetscOptionsString( + CHKERR(PetscOptionsString( b"-mat_python_type", b"Python [package.]module[.{class|function}]", - b"MatPythonSetType", defval, name, sizeof(name), &found) ); opts; + b"MatPythonSetType", defval, name, sizeof(name), &found)); opts if found and name[0]: - CHKERR( MatPythonSetType_PYTHON(mat, name) ) + CHKERR(MatPythonSetType_PYTHON(mat, name)) # cdef setFromOptions = PyMat(mat).setFromOptions if setFromOptions is not None: @@ -532,8 +533,7 @@ cdef PetscErrorCode MatSetFromOptions_Python( cdef PetscErrorCode MatView_Python( PetscMat mat, PetscViewer vwr, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatView_Python") viewcontext(PyMat(mat), vwr) cdef view = PyMat(mat).view @@ -545,8 +545,7 @@ cdef PetscErrorCode MatDuplicate_Python( PetscMat mat, PetscMatDuplicateOption op, PetscMat* out, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatDuplicate_Python") cdef duplicate = PyMat(mat).duplicate if duplicate is None: return UNSUPPORTED(b"duplicate") @@ -558,8 +557,7 @@ cdef PetscErrorCode MatCopy_Python( PetscMat mat, PetscMat out, PetscMatStructure op, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatCopy_Python") cdef copy = PyMat(mat).copy if copy is None: return UNSUPPORTED(b"copy") @@ -569,14 +567,13 @@ cdef PetscErrorCode MatCopy_Python( cdef PetscErrorCode MatGetDiagonalBlock_Python( PetscMat mat, PetscMat *out - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatGetDiagonalBlock_Python") cdef getDiagonalBlock = PyMat(mat).getDiagonalBlock if getDiagonalBlock is None: try: mat.ops.getdiagonalblock = NULL - CHKERR( MatGetDiagonalBlock(mat, out) ) + CHKERR(MatGetDiagonalBlock(mat, out)) finally: mat.ops.getdiagonalblock = MatGetDiagonalBlock_Python return FunctionEnd() @@ -590,17 +587,16 @@ cdef PetscErrorCode MatCreateSubMatrix_Python( PetscIS col, PetscMatReuse op, PetscMat *out, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatCreateSubMatrix_Python") cdef createSubMatrix = PyMat(mat).createSubMatrix if createSubMatrix is None: - try: - mat.ops.createsubmatrix = NULL - CHKERR( MatCreateSubMatrix(mat, row, col, op, out) ) - finally: - mat.ops.createsubmatrix = MatCreateSubMatrix_Python - return FunctionEnd() + try: + mat.ops.createsubmatrix = NULL + CHKERR(MatCreateSubMatrix(mat, row, col, op, out)) + finally: + mat.ops.createsubmatrix = MatCreateSubMatrix_Python + return FunctionEnd() cdef Mat sub = None if op == MAT_IGNORE_MATRIX: sub = None @@ -618,8 +614,7 @@ cdef PetscErrorCode MatSetOption_Python( PetscMat mat, PetscMatOption op, PetscBool flag, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatSetOption_Python") cdef setOption = PyMat(mat).setOption if setOption is not None: @@ -628,34 +623,33 @@ cdef PetscErrorCode MatSetOption_Python( cdef PetscErrorCode MatSetUp_Python( PetscMat mat, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatSetUp_Python") cdef PetscInt rbs = -1, cbs = -1 - CHKERR( PetscLayoutGetBlockSize(mat.rmap, &rbs) ) - CHKERR( PetscLayoutGetBlockSize(mat.cmap, &cbs) ) + CHKERR(PetscLayoutGetBlockSize(mat.rmap, &rbs)) + CHKERR(PetscLayoutGetBlockSize(mat.cmap, &cbs)) if rbs == -1: rbs = 1 if cbs == -1: cbs = rbs - CHKERR( PetscLayoutSetBlockSize(mat.rmap, rbs) ) - CHKERR( PetscLayoutSetBlockSize(mat.cmap, cbs) ) - CHKERR( PetscLayoutSetUp(mat.rmap) ) - CHKERR( PetscLayoutSetUp(mat.cmap) ) + CHKERR(PetscLayoutSetBlockSize(mat.rmap, rbs)) + CHKERR(PetscLayoutSetBlockSize(mat.cmap, cbs)) + CHKERR(PetscLayoutSetUp(mat.rmap)) + CHKERR(PetscLayoutSetUp(mat.cmap)) mat.preallocated = PETSC_TRUE # cdef char name[2048] cdef PetscBool found = PETSC_FALSE if PyMat(mat).self is None: - CHKERR( PetscOptionsGetString(NULL, - getPrefix(mat), b"-mat_python_type", - name, sizeof(name), &found) ) + CHKERR(PetscOptionsGetString(NULL, + getPrefix(mat), b"-mat_python_type", + name, sizeof(name), &found)) if found and name[0]: - CHKERR( MatPythonSetType_PYTHON(mat, name) ) + CHKERR(MatPythonSetType_PYTHON(mat, name)) if PyMat(mat).self is None: return PetscSETERR(PETSC_ERR_USER, - "Python context not set, call one of \n" - " * MatPythonSetType(mat, \"[package.]module.class\")\n" - " * MatSetFromOptions(mat) and pass option " - "-mat_python_type [package.]module.class") + "Python context not set, call one of \n" + " * MatPythonSetType(mat, \"[package.]module.class\")\n" + " * MatSetFromOptions(mat) and pass option " + "-mat_python_type [package.]module.class") # cdef setUp = PyMat(mat).setUp if setUp is not None: @@ -665,8 +659,7 @@ cdef PetscErrorCode MatSetUp_Python( cdef PetscErrorCode MatAssemblyBegin_Python( PetscMat mat, PetscMatAssemblyType at, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatAssemblyBegin_Python") cdef assembly = PyMat(mat).assemblyBegin if assembly is not None: @@ -676,8 +669,7 @@ cdef PetscErrorCode MatAssemblyBegin_Python( cdef PetscErrorCode MatAssemblyEnd_Python( PetscMat mat, PetscMatAssemblyType at, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatAssemblyEnd_Python") cdef assembly = PyMat(mat).assemblyEnd if assembly is None: @@ -688,8 +680,7 @@ cdef PetscErrorCode MatAssemblyEnd_Python( cdef PetscErrorCode MatZeroEntries_Python( PetscMat mat, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatZeroEntries_Python") cdef zeroEntries = PyMat(mat).zeroEntries if zeroEntries is None: return UNSUPPORTED(b"zeroEntries") @@ -703,8 +694,7 @@ cdef PetscErrorCode MatZeroRowsColumns_Python( PetscScalar diag, PetscVec x, PetscVec b, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatZeroRowsColumns_Python") cdef zeroRowsColumns = PyMat(mat).zeroRowsColumns if zeroRowsColumns is None: return UNSUPPORTED(b"zeroRowsColumns") @@ -715,8 +705,7 @@ cdef PetscErrorCode MatZeroRowsColumns_Python( cdef PetscErrorCode MatScale_Python( PetscMat mat, PetscScalar s, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatScale_Python") cdef scale = PyMat(mat).scale if scale is None: return UNSUPPORTED(b"scale") @@ -726,8 +715,7 @@ cdef PetscErrorCode MatScale_Python( cdef PetscErrorCode MatShift_Python( PetscMat mat, PetscScalar s, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatShift_Python") cdef shift = PyMat(mat).shift if shift is None: return UNSUPPORTED(b"shift") @@ -738,14 +726,13 @@ cdef PetscErrorCode MatCreateVecs_Python( PetscMat mat, PetscVec *x, PetscVec *y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatCreateVecs_Python") cdef createVecs = PyMat(mat).createVecs if createVecs is None: try: mat.ops.getvecs = NULL - CHKERR( MatCreateVecs(mat, x, y) ) + CHKERR(MatCreateVecs(mat, x, y)) finally: mat.ops.getvecs = MatCreateVecs_Python return FunctionEnd() @@ -763,8 +750,7 @@ cdef PetscErrorCode MatMult_Python( PetscMat mat, PetscVec x, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatMult_Python") cdef mult = PyMat(mat).mult if mult is None: return UNSUPPORTED(b"mult") @@ -775,14 +761,13 @@ cdef PetscErrorCode MatMultTranspose_Python( PetscMat mat, PetscVec x, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatMultTranspose_Python") cdef multTranspose = PyMat(mat).multTranspose if multTranspose is None: try: mat.ops.multtranspose = NULL - CHKERR( MatMultTranspose(mat, x, y) ) + CHKERR(MatMultTranspose(mat, x, y)) finally: mat.ops.multtranspose = MatMultTranspose_Python return FunctionEnd() @@ -793,14 +778,13 @@ cdef PetscErrorCode MatMultHermitian_Python( PetscMat mat, PetscVec x, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatMultHermitian_Python") cdef multHermitian = PyMat(mat).multHermitian if multHermitian is None: try: mat.ops.multhermitian = NULL - CHKERR( MatMultHermitian(mat, x, y) ) + CHKERR(MatMultHermitian(mat, x, y)) finally: mat.ops.multhermitian = MatMultHermitian_Python return FunctionEnd() @@ -812,20 +796,19 @@ cdef PetscErrorCode MatMultAdd_Python( PetscVec x, PetscVec v, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatMultAdd_Python") cdef multAdd = PyMat(mat).multAdd cdef PetscVec t = NULL if multAdd is None: if v == y: - CHKERR( VecDuplicate(y, &t) ) - CHKERR( MatMult(mat, x, t) ) - CHKERR( VecAXPY(y, 1.0, t) ) - CHKERR( VecDestroy(&t) ) + CHKERR(VecDuplicate(y, &t)) + CHKERR(MatMult(mat, x, t)) + CHKERR(VecAXPY(y, 1.0, t)) + CHKERR(VecDestroy(&t)) else: - CHKERR( MatMult(mat, x, y) ) - CHKERR( VecAXPY(y, 1.0, v) ) + CHKERR(MatMult(mat, x, y)) + CHKERR(VecAXPY(y, 1.0, v)) return FunctionEnd() if multAdd is None: return UNSUPPORTED(b"multAdd") multAdd(Mat_(mat), Vec_(x), Vec_(v), Vec_(y)) @@ -836,20 +819,19 @@ cdef PetscErrorCode MatMultTransposeAdd_Python( PetscVec x, PetscVec v, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatMultTransposeAdd_Python") cdef multTransposeAdd = PyMat(mat).multTransposeAdd cdef PetscVec t = NULL if multTransposeAdd is None: if v == y: - CHKERR( VecDuplicate(y, &t) ) - CHKERR( MatMultTranspose(mat, x, t) ) - CHKERR( VecAXPY(y, 1.0, t) ) - CHKERR( VecDestroy(&t) ) + CHKERR(VecDuplicate(y, &t)) + CHKERR(MatMultTranspose(mat, x, t)) + CHKERR(VecAXPY(y, 1.0, t)) + CHKERR(VecDestroy(&t)) else: - CHKERR( MatMultTranspose(mat, x, y) ) - CHKERR( VecAXPY(y, 1.0, v) ) + CHKERR(MatMultTranspose(mat, x, y)) + CHKERR(VecAXPY(y, 1.0, v)) return FunctionEnd() if multTransposeAdd is None: return UNSUPPORTED(b"multTransposeAdd") multTransposeAdd(Mat_(mat), Vec_(x), Vec_(v), Vec_(y)) @@ -860,14 +842,13 @@ cdef PetscErrorCode MatMultHermitianAdd_Python( PetscVec x, PetscVec v, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatMultHermitianAdd_Python") cdef multHermitianAdd = PyMat(mat).multHermitianAdd if multHermitianAdd is None: try: mat.ops.multhermitianadd = NULL - CHKERR( MatMultHermitianAdd(mat, x, v, y) ) + CHKERR(MatMultHermitianAdd(mat, x, v, y)) finally: mat.ops.multhermitianadd = MatMultHermitianAdd_Python return FunctionEnd() @@ -878,8 +859,7 @@ cdef PetscErrorCode MatMultDiagonalBlock_Python( PetscMat mat, PetscVec x, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatMultDiagonalBlock_Python") cdef multDiagonalBlock = PyMat(mat).multDiagonalBlock if multDiagonalBlock is None: return UNSUPPORTED(b"multDiagonalBlock") @@ -890,8 +870,7 @@ cdef PetscErrorCode MatSolve_Python( PetscMat mat, PetscVec b, PetscVec x, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatSolve_Python") cdef solve = PyMat(mat).solve if solve is None: return UNSUPPORTED(b"solve") @@ -902,14 +881,13 @@ cdef PetscErrorCode MatSolveTranspose_Python( PetscMat mat, PetscVec b, PetscVec x, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatSolveTranspose_Python") cdef solveTranspose = PyMat(mat).solveTranspose if solveTranspose is None: try: mat.ops.solvetranspose = NULL - CHKERR( MatSolveTranspose(mat, b, x) ) + CHKERR(MatSolveTranspose(mat, b, x)) finally: mat.ops.solvetranspose = MatSolveTranspose_Python solveTranspose(Mat_(mat), Vec_(b), Vec_(x)) @@ -920,14 +898,13 @@ cdef PetscErrorCode MatSolveAdd_Python( PetscVec b, PetscVec y, PetscVec x, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatSolveAdd_Python") cdef solveAdd = PyMat(mat).solveAdd if solveAdd is None: try: mat.ops.solveadd = NULL - CHKERR( MatSolveAdd(mat, b, y, x) ) + CHKERR(MatSolveAdd(mat, b, y, x)) finally: mat.ops.solveadd = MatSolveAdd_Python return FunctionEnd() @@ -939,14 +916,13 @@ cdef PetscErrorCode MatSolveTransposeAdd_Python( PetscVec b, PetscVec y, PetscVec x, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatSolveTransposeAdd_Python") cdef solveTransposeAdd = PyMat(mat).solveTransposeAdd if solveTransposeAdd is None: try: mat.ops.solvetransposeadd = NULL - CHKERR( MatSolveTransposeAdd(mat, b, y, x) ) + CHKERR(MatSolveTransposeAdd(mat, b, y, x)) finally: mat.ops.solvetransposeadd = MatSolveTransposeAdd_Python return FunctionEnd() @@ -962,8 +938,7 @@ cdef PetscErrorCode MatSOR_Python( PetscInt its, PetscInt lits, PetscVec x - )\ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatSOR_Python") cdef SOR = PyMat(mat).SOR if SOR is None: return UNSUPPORTED(b"SOR") @@ -973,8 +948,7 @@ cdef PetscErrorCode MatSOR_Python( cdef PetscErrorCode MatGetDiagonal_Python( PetscMat mat, PetscVec v, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatGetDiagonal_Python") cdef getDiagonal = PyMat(mat).getDiagonal if getDiagonal is None: return UNSUPPORTED(b"getDiagonal") @@ -985,8 +959,7 @@ cdef PetscErrorCode MatSetDiagonal_Python( PetscMat mat, PetscVec v, PetscInsertMode im, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatSetDiagonal_Python") cdef setDiagonal = PyMat(mat).setDiagonal cdef bint addv = True if im == PETSC_ADD_VALUES else False @@ -998,8 +971,7 @@ cdef PetscErrorCode MatDiagonalScale_Python( PetscMat mat, PetscVec l, PetscVec r, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatDiagonalScale_Python") cdef diagonalScale = PyMat(mat).diagonalScale if diagonalScale is None: return UNSUPPORTED(b"diagonalScale") @@ -1010,8 +982,7 @@ cdef PetscErrorCode MatMissingDiagonal_Python( PetscMat mat, PetscBool *missing, PetscInt *loc - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatMissingDiagonal_Python") cdef missingDiagonal = PyMat(mat).missingDiagonal if missingDiagonal is None: return UNSUPPORTED(b"missingDiagonal") @@ -1025,8 +996,7 @@ cdef PetscErrorCode MatNorm_Python( PetscMat mat, PetscNormType ntype, PetscReal *nrm, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatNorm_Python") cdef norm = PyMat(mat).norm if norm is None: return UNSUPPORTED(b"norm") @@ -1036,8 +1006,7 @@ cdef PetscErrorCode MatNorm_Python( cdef PetscErrorCode MatRealPart_Python( PetscMat mat, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatRealPart_Python") cdef realPart = PyMat(mat).realPart if realPart is None: return UNSUPPORTED(b"realPart") @@ -1046,8 +1015,7 @@ cdef PetscErrorCode MatRealPart_Python( cdef PetscErrorCode MatImagPart_Python( PetscMat mat, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatImagPart_Python") cdef imagPart = PyMat(mat).imagPart if imagPart is None: return UNSUPPORTED(b"imagPart") @@ -1056,8 +1024,7 @@ cdef PetscErrorCode MatImagPart_Python( cdef PetscErrorCode MatConjugate_Python( PetscMat mat, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatConjugate_Python") cdef conjugate = PyMat(mat).conjugate if conjugate is None: return UNSUPPORTED(b"conjugate") @@ -1068,8 +1035,7 @@ cdef PetscErrorCode MatHasOperation_Python( PetscMat mat, PetscMatOperation op, PetscBool *flag - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatHasOperation_Python") flag[0] = PETSC_FALSE cdef long i = op @@ -1085,32 +1051,31 @@ cdef PetscErrorCode MatHasOperation_Python( cdef PetscErrorCode MatProductNumeric_Python( PetscMat mat - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatProductNumeric_Python") cdef PetscMat A = NULL cdef PetscMat B = NULL cdef PetscMat C = NULL cdef PetscMatProductType mtype = MATPRODUCT_UNSPECIFIED - CHKERR( MatProductGetMats(mat, &A, &B, &C) ) - CHKERR( MatProductGetType(mat, &mtype) ) + CHKERR(MatProductGetMats(mat, &A, &B, &C)) + CHKERR(MatProductGetType(mat, &mtype)) mtypes = {MATPRODUCT_AB : 'AB', MATPRODUCT_ABt : 'ABt', MATPRODUCT_AtB : 'AtB', MATPRODUCT_PtAP : 'PtAP', MATPRODUCT_RARt: 'RARt', MATPRODUCT_ABC: 'ABC'} cdef Mat_Product *product = mat.product cdef PetscInt i = product.data if i < 0 or i > 2: - return PetscSETERR(PETSC_ERR_PLIB, - "Corrupted composed id") + return PetscSETERR(PETSC_ERR_PLIB, + "Corrupted composed id") cdef PetscMat pM = C if i == 2 else B if i == 1 else A cdef Mat PyA = Mat_(A) cdef Mat PyB = Mat_(B) cdef Mat PyC = Mat_(C) if mtype == MATPRODUCT_ABC: - mats = (PyA, PyB, PyC) + mats = (PyA, PyB, PyC) else: - mats = (PyA, PyB, None) + mats = (PyA, PyB, None) cdef productNumeric = PyMat(pM).productNumeric if productNumeric is None: return UNSUPPORTED(b"productNumeric") @@ -1122,37 +1087,36 @@ cdef PetscInt matmatid = -1 cdef PetscErrorCode MatProductSymbolic_Python( PetscMat mat - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatProductSymbolic_Python") cdef PetscMat A = NULL cdef PetscMat B = NULL cdef PetscMat C = NULL cdef PetscMatProductType mtype = MATPRODUCT_UNSPECIFIED - CHKERR( MatProductGetMats(mat, &A, &B, &C) ) - CHKERR( MatProductGetType(mat, &mtype) ) + CHKERR(MatProductGetMats(mat, &A, &B, &C)) + CHKERR(MatProductGetType(mat, &mtype)) mtypes = {MATPRODUCT_AB : 'AB', MATPRODUCT_ABt : 'ABt', MATPRODUCT_AtB : 'AtB', MATPRODUCT_PtAP : 'PtAP', MATPRODUCT_RARt: 'RARt', MATPRODUCT_ABC: 'ABC'} global matmatid cdef PetscInt i = -1 cdef PetscBool flg = PETSC_FALSE - CHKERR( PetscObjectComposedDataGetIntPy(mat, matmatid, &i, &flg) ) + CHKERR(PetscObjectComposedDataGetIntPy(mat, matmatid, &i, &flg)) if flg is not PETSC_TRUE: - return PetscSETERR(PETSC_ERR_PLIB, - "Missing composed id") + return PetscSETERR(PETSC_ERR_PLIB, + "Missing composed id") if i < 0 or i > 2: - return PetscSETERR(PETSC_ERR_PLIB, - "Corrupted composed id") + return PetscSETERR(PETSC_ERR_PLIB, + "Corrupted composed id") cdef PetscMat pM = C if i == 2 else B if i == 1 else A cdef Mat PyA = Mat_(A) cdef Mat PyB = Mat_(B) cdef Mat PyC = Mat_(C) if mtype == MATPRODUCT_ABC: - mats = (PyA, PyB, PyC) + mats = (PyA, PyB, PyC) else: - mats = (PyA, PyB, None) + mats = (PyA, PyB, None) cdef productSymbolic = PyMat(pM).productSymbolic if productSymbolic is None: return UNSUPPORTED(b"productSymbolic") @@ -1168,22 +1132,21 @@ cdef PetscErrorCode MatProductSymbolic_Python( cdef PetscErrorCode MatProductSetFromOptions_Python( PetscMat mat - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"MatProductSetFromOptions_Python") cdef PetscMat A = NULL cdef PetscMat B = NULL cdef PetscMat C = NULL - CHKERR( MatProductGetMats(mat, &A, &B, &C) ) + CHKERR(MatProductGetMats(mat, &A, &B, &C)) if A == NULL or B == NULL: - return PetscSETERR(PETSC_ERR_PLIB, - "Missing matrices") + return PetscSETERR(PETSC_ERR_PLIB, + "Missing matrices") cdef PetscMatProductType mtype = MATPRODUCT_UNSPECIFIED - CHKERR( MatProductGetType(mat, &mtype) ) + CHKERR(MatProductGetType(mat, &mtype)) if mtype == MATPRODUCT_UNSPECIFIED: - return PetscSETERR(PETSC_ERR_PLIB, - "Unknown product type") + return PetscSETERR(PETSC_ERR_PLIB, + "Unknown product type") mtypes = {MATPRODUCT_AB : 'AB', MATPRODUCT_ABt : 'ABt', MATPRODUCT_AtB : 'AtB', MATPRODUCT_PtAP : 'PtAP', MATPRODUCT_RARt: 'RARt', MATPRODUCT_ABC: 'ABC'} @@ -1191,9 +1154,9 @@ cdef PetscErrorCode MatProductSetFromOptions_Python( cdef Mat PyB = Mat_(B) cdef Mat PyC = Mat_(C) if mtype == MATPRODUCT_ABC: - mats = (PyA, PyB, PyC) + mats = (PyA, PyB, PyC) else: - mats = (PyA, PyB, None) + mats = (PyA, PyB, None) # Find Python matrix in mats able to perform the product found = False @@ -1202,16 +1165,16 @@ cdef PetscErrorCode MatProductSetFromOptions_Python( cdef Mat mm cdef PetscInt i = -1 for i in range(len(mats)): - if mats[i] is None: continue - mm = mats[i] - pM = mm.mat - CHKERR( PetscObjectTypeCompare(pM, MATPYTHON, &mispy) ) - if mispy: - if PyMat(pM).productSetFromOptions is not None: - found = PyMat(pM).productSetFromOptions(PyC if C == pM else PyB if B == pM else PyA, mtypes[mtype], *mats) - if found: break + if mats[i] is None: continue + mm = mats[i] + pM = mm.mat + CHKERR(PetscObjectTypeCompare(pM, MATPYTHON, &mispy)) + if mispy: + if PyMat(pM).productSetFromOptions is not None: + found = PyMat(pM).productSetFromOptions(PyC if C == pM else PyB if B == pM else PyA, mtypes[mtype], *mats) + if found: break if not found: - return FunctionEnd() + return FunctionEnd() cdef MatOps ops = mat.ops ops.productsymbolic = MatProductSymbolic_Python @@ -1221,8 +1184,8 @@ cdef PetscErrorCode MatProductSetFromOptions_Python( # Symbolic operation will get this index and store it in the product data global matmatid if matmatid < 0: - CHKERR( PetscObjectComposedDataRegisterPy(&matmatid) ) - CHKERR( PetscObjectComposedDataSetIntPy(mat, matmatid, i) ) + CHKERR(PetscObjectComposedDataRegisterPy(&matmatid)) + CHKERR(PetscObjectComposedDataSetIntPy(mat, matmatid, i)) return FunctionEnd() @@ -1230,18 +1193,18 @@ cdef PetscErrorCode MatProductSetFromOptions_Python( cdef extern from * nogil: struct _PCOps: - PetscErrorCode (*destroy)(PetscPC) except PETSC_ERR_PYTHON - PetscErrorCode (*setup)(PetscPC) except PETSC_ERR_PYTHON - PetscErrorCode (*reset)(PetscPC) except PETSC_ERR_PYTHON - PetscErrorCode (*setfromoptions)(PetscPC,PetscOptionItems*) except PETSC_ERR_PYTHON - PetscErrorCode (*view)(PetscPC,PetscViewer) except PETSC_ERR_PYTHON - PetscErrorCode (*presolve)(PetscPC,PetscKSP,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*postsolve)(PetscPC,PetscKSP,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*apply)(PetscPC,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*matapply)(PetscPC,PetscMat,PetscMat) except PETSC_ERR_PYTHON - PetscErrorCode (*applytranspose)(PetscPC,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*applysymmetricleft)(PetscPC,PetscVec,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*applysymmetricright)(PetscPC,PetscVec,PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*destroy)(PetscPC) except PETSC_ERR_PYTHON + PetscErrorCode (*setup)(PetscPC) except PETSC_ERR_PYTHON + PetscErrorCode (*reset)(PetscPC) except PETSC_ERR_PYTHON + PetscErrorCode (*setfromoptions)(PetscPC, PetscOptionItems*) except PETSC_ERR_PYTHON + PetscErrorCode (*view)(PetscPC, PetscViewer) except PETSC_ERR_PYTHON + PetscErrorCode (*presolve)(PetscPC, PetscKSP, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*postsolve)(PetscPC, PetscKSP, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*apply)(PetscPC, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*matapply)(PetscPC, PetscMat, PetscMat) except PETSC_ERR_PYTHON + PetscErrorCode (*applytranspose)(PetscPC, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*applysymmetricleft)(PetscPC, PetscVec, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*applysymmetricright)(PetscPC, PetscVec, PetscVec) except PETSC_ERR_PYTHON ctypedef _PCOps *PCOps struct _p_PC: void *data @@ -1268,7 +1231,7 @@ cdef public PetscErrorCode PCPythonSetContext(PetscPC pc, void *ctx) \ PyPC(pc).setcontext(ctx, PC_(pc)) return FunctionEnd() -cdef PetscErrorCode PCPythonSetType_PYTHON(PetscPC pc, char name[]) \ +cdef PetscErrorCode PCPythonSetType_PYTHON(PetscPC pc, const char *name) \ except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCPythonSetType_PYTHON") if name == NULL: return FunctionEnd() # XXX @@ -1285,10 +1248,8 @@ cdef PetscErrorCode PCPythonGetType_PYTHON(PetscPC pc, const char *name[]) \ cdef PetscErrorCode PCCreate_Python( PetscPC pc, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCCreate_Python") - # cdef PCOps ops = pc.ops ops.reset = PCReset_Python ops.destroy = PCDestroy_Python @@ -1303,12 +1264,12 @@ cdef PetscErrorCode PCCreate_Python( ops.applysymmetricleft = PCApplySymmetricLeft_Python ops.applysymmetricright = PCApplySymmetricRight_Python # - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( pc, b"PCPythonSetType_C", - PCPythonSetType_PYTHON) ) - CHKERR( PetscObjectComposeFunction( + PCPythonSetType_PYTHON)) + CHKERR(PetscObjectComposeFunction( pc, b"PCPythonGetType_C", - PCPythonGetType_PYTHON) ) + PCPythonGetType_PYTHON)) # cdef ctx = PyPC(NULL) pc.data = ctx @@ -1317,8 +1278,7 @@ cdef PetscErrorCode PCCreate_Python( cdef inline PetscErrorCode PCDestroy_Python_inner( PetscPC pc, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: try: addRef(pc) PCPythonSetContext(pc, NULL) @@ -1330,39 +1290,36 @@ cdef inline PetscErrorCode PCDestroy_Python_inner( cdef PetscErrorCode PCDestroy_Python( PetscPC pc, - ) \ - except PETSC_ERR_PYTHON nogil: + ) except PETSC_ERR_PYTHON nogil: FunctionBegin(b"PCDestroy_Python") - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( pc, b"PCPythonSetType_C", - NULL) ) - CHKERR( PetscObjectComposeFunction( + NULL)) + CHKERR(PetscObjectComposeFunction( pc, b"PCPythonGetType_C", - NULL) ) + NULL)) # if Py_IsInitialized(): PCDestroy_Python_inner(pc) return FunctionEnd() cdef PetscErrorCode PCSetUp_Python( PetscPC pc, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCSetUp_Python") - # cdef char name[2048] cdef PetscBool found = PETSC_FALSE if PyPC(pc).self is None: - CHKERR( PetscOptionsGetString(NULL, - getPrefix(pc), b"-pc_python_type", - name, sizeof(name), &found) ) + CHKERR(PetscOptionsGetString(NULL, + getPrefix(pc), b"-pc_python_type", + name, sizeof(name), &found)) if found and name[0]: - CHKERR( PCPythonSetType_PYTHON(pc, name) ) + CHKERR(PCPythonSetType_PYTHON(pc, name)) if PyPC(pc).self is None: return PetscSETERR(PETSC_ERR_USER, - "Python context not set, call one of \n" - " * PCPythonSetType(pc, \"[package.]module.class\")\n" - " * PCSetFromOptions(pc) and pass option " - "-pc_python_type [package.]module.class") + "Python context not set, call one of \n" + " * PCPythonSetType(pc, \"[package.]module.class\")\n" + " * PCSetFromOptions(pc) and pass option " + "-pc_python_type [package.]module.class") # cdef setUp = PyPC(pc).setUp if setUp is not None: @@ -1381,8 +1338,7 @@ cdef PetscErrorCode PCSetUp_Python( cdef inline PetscErrorCode PCReset_Python_inner( PetscPC pc, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: cdef reset = PyPC(pc).reset if reset is not None: reset(PC_(pc)) @@ -1390,8 +1346,7 @@ cdef inline PetscErrorCode PCReset_Python_inner( cdef PetscErrorCode PCReset_Python( PetscPC pc, - ) \ - except PETSC_ERR_PYTHON nogil: + ) except PETSC_ERR_PYTHON nogil: if getRef(pc) == 0: return PETSC_SUCCESS FunctionBegin(b"PCReset_Python") if Py_IsInitialized(): PCReset_Python_inner(pc) @@ -1400,18 +1355,16 @@ cdef PetscErrorCode PCReset_Python( cdef PetscErrorCode PCSetFromOptions_Python( PetscPC pc, PetscOptionItems *PetscOptionsObject, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCSetFromOptions_Python") - # cdef char name[2048], *defval = PyPC(pc).getname() cdef PetscBool found = PETSC_FALSE cdef PetscOptionItems *opts "PetscOptionsObject" = PetscOptionsObject - CHKERR( PetscOptionsString( + CHKERR(PetscOptionsString( b"-pc_python_type", b"Python [package.]module[.{class|function}]", - b"PCPythonSetType", defval, name, sizeof(name), &found) ); opts; + b"PCPythonSetType", defval, name, sizeof(name), &found)); opts if found and name[0]: - CHKERR( PCPythonSetType_PYTHON(pc, name) ) + CHKERR(PCPythonSetType_PYTHON(pc, name)) # cdef setFromOptions = PyPC(pc).setFromOptions if setFromOptions is not None: @@ -1421,8 +1374,7 @@ cdef PetscErrorCode PCSetFromOptions_Python( cdef PetscErrorCode PCView_Python( PetscPC pc, PetscViewer vwr, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCView_Python") viewcontext(PyPC(pc), vwr) cdef view = PyPC(pc).view @@ -1435,8 +1387,7 @@ cdef PetscErrorCode PCPreSolve_Python( PetscKSP ksp, PetscVec b, PetscVec x, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCPreSolve_Python") cdef preSolve = PyPC(pc).preSolve if preSolve is not None: @@ -1448,8 +1399,7 @@ cdef PetscErrorCode PCPostSolve_Python( PetscKSP ksp, PetscVec b, PetscVec x, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCPostSolve_Python") cdef postSolve = PyPC(pc).postSolve if postSolve is not None: @@ -1460,8 +1410,7 @@ cdef PetscErrorCode PCApply_Python( PetscPC pc, PetscVec x, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCApply_Python") cdef apply = PyPC(pc).apply apply(PC_(pc), Vec_(x), Vec_(y)) @@ -1471,8 +1420,7 @@ cdef PetscErrorCode PCApplyTranspose_Python( PetscPC pc, PetscVec x, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCApplyTranspose_Python") cdef applyTranspose = PyPC(pc).applyTranspose applyTranspose(PC_(pc), Vec_(x), Vec_(y)) @@ -1482,8 +1430,7 @@ cdef PetscErrorCode PCApplySymmetricLeft_Python( PetscPC pc, PetscVec x, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCApplySymmetricLeft_Python") cdef applySymmetricLeft = PyPC(pc).applySymmetricLeft applySymmetricLeft(PC_(pc), Vec_(x), Vec_(y)) @@ -1493,8 +1440,7 @@ cdef PetscErrorCode PCApplySymmetricRight_Python( PetscPC pc, PetscVec x, PetscVec y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCApplySymmetricRight_Python") cdef applySymmetricRight = PyPC(pc).applySymmetricRight applySymmetricRight(PC_(pc), Vec_(x), Vec_(y)) @@ -1504,14 +1450,13 @@ cdef PetscErrorCode PCMatApply_Python( PetscPC pc, PetscMat X, PetscMat Y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PCMatApply_Python") cdef matApply = PyPC(pc).matApply if matApply is None: try: pc.ops.matapply = NULL - CHKERR( PCMatApply(pc, X, Y) ) + CHKERR(PCMatApply(pc, X, Y)) finally: pc.ops.matapply = PCMatApply_Python return FunctionEnd() @@ -1523,26 +1468,26 @@ cdef PetscErrorCode PCMatApply_Python( cdef extern from * nogil: struct _KSPOps: - PetscErrorCode (*destroy)(PetscKSP) except PETSC_ERR_PYTHON - PetscErrorCode (*setup)(PetscKSP) except PETSC_ERR_PYTHON - PetscErrorCode (*reset)(PetscKSP) except PETSC_ERR_PYTHON - PetscErrorCode (*setfromoptions)(PetscKSP,PetscOptionItems*) except PETSC_ERR_PYTHON - PetscErrorCode (*view)(PetscKSP,PetscViewer) except PETSC_ERR_PYTHON - PetscErrorCode (*solve)(PetscKSP) except PETSC_ERR_PYTHON - PetscErrorCode (*buildsolution)(PetscKSP,PetscVec,PetscVec*) except PETSC_ERR_PYTHON - PetscErrorCode (*buildresidual)(PetscKSP,PetscVec,PetscVec,PetscVec*) except PETSC_ERR_PYTHON + PetscErrorCode (*destroy)(PetscKSP) except PETSC_ERR_PYTHON + PetscErrorCode (*setup)(PetscKSP) except PETSC_ERR_PYTHON + PetscErrorCode (*reset)(PetscKSP) except PETSC_ERR_PYTHON + PetscErrorCode (*setfromoptions)(PetscKSP, PetscOptionItems*) except PETSC_ERR_PYTHON + PetscErrorCode (*view)(PetscKSP, PetscViewer) except PETSC_ERR_PYTHON + PetscErrorCode (*solve)(PetscKSP) except PETSC_ERR_PYTHON + PetscErrorCode (*buildsolution)(PetscKSP, PetscVec, PetscVec*) except PETSC_ERR_PYTHON + PetscErrorCode (*buildresidual)(PetscKSP, PetscVec, PetscVec, PetscVec*) except PETSC_ERR_PYTHON ctypedef _KSPOps *KSPOps struct _p_KSP: void *data KSPOps ops PetscBool transpose_solve - PetscInt iter"its",max_its"max_it" + PetscInt iter"its", max_its"max_it" PetscReal norm"rnorm" PetscKSPConvergedReason reason cdef extern from * nogil: # custom.h - PetscErrorCode KSPConverged(PetscKSP,PetscInt,PetscReal,PetscKSPConvergedReason*) - PetscErrorCode KSPLogHistory(PetscKSP,PetscReal) + PetscErrorCode KSPConverged(PetscKSP, PetscInt, PetscReal, PetscKSPConvergedReason*) + PetscErrorCode KSPLogHistory(PetscKSP, PetscReal) @cython.internal @@ -1565,7 +1510,7 @@ cdef public PetscErrorCode KSPPythonSetContext(PetscKSP ksp, void *ctx) \ PyKSP(ksp).setcontext(ctx, KSP_(ksp)) return FunctionEnd() -cdef PetscErrorCode KSPPythonSetType_PYTHON(PetscKSP ksp, char name[]) \ +cdef PetscErrorCode KSPPythonSetType_PYTHON(PetscKSP ksp, const char *name) \ except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPPythonSetType_PYTHON") if name == NULL: return FunctionEnd() # XXX @@ -1582,10 +1527,8 @@ cdef PetscErrorCode KSPPythonGetType_PYTHON(PetscKSP ksp, const char *name[]) \ cdef PetscErrorCode KSPCreate_Python( PetscKSP ksp, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPCreate_Python") - # cdef KSPOps ops = ksp.ops ops.reset = KSPReset_Python ops.destroy = KSPDestroy_Python @@ -1596,35 +1539,34 @@ cdef PetscErrorCode KSPCreate_Python( ops.buildsolution = KSPBuildSolution_Python ops.buildresidual = KSPBuildResidual_Python # - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( ksp, b"KSPPythonSetType_C", - KSPPythonSetType_PYTHON) ) - CHKERR( PetscObjectComposeFunction( + KSPPythonSetType_PYTHON)) + CHKERR(PetscObjectComposeFunction( ksp, b"KSPPythonGetType_C", - KSPPythonGetType_PYTHON) ) + KSPPythonGetType_PYTHON)) # cdef ctx = PyKSP(NULL) ksp.data = ctx Py_INCREF(ksp.data) # - CHKERR( KSPSetSupportedNorm( - ksp, KSP_NORM_PRECONDITIONED, PC_LEFT, 3) ) - CHKERR( KSPSetSupportedNorm( - ksp, KSP_NORM_UNPRECONDITIONED, PC_RIGHT, 3) ) - CHKERR( KSPSetSupportedNorm( - ksp, KSP_NORM_UNPRECONDITIONED, PC_LEFT, 2) ) - CHKERR( KSPSetSupportedNorm( - ksp, KSP_NORM_PRECONDITIONED, PC_RIGHT, 2) ) - CHKERR( KSPSetSupportedNorm( - ksp, KSP_NORM_PRECONDITIONED, PC_SYMMETRIC, 1) ) - CHKERR( KSPSetSupportedNorm( - ksp, KSP_NORM_UNPRECONDITIONED, PC_SYMMETRIC, 1) ) + CHKERR(KSPSetSupportedNorm( + ksp, KSP_NORM_PRECONDITIONED, PC_LEFT, 3)) + CHKERR(KSPSetSupportedNorm( + ksp, KSP_NORM_UNPRECONDITIONED, PC_RIGHT, 3)) + CHKERR(KSPSetSupportedNorm( + ksp, KSP_NORM_UNPRECONDITIONED, PC_LEFT, 2)) + CHKERR(KSPSetSupportedNorm( + ksp, KSP_NORM_PRECONDITIONED, PC_RIGHT, 2)) + CHKERR(KSPSetSupportedNorm( + ksp, KSP_NORM_PRECONDITIONED, PC_SYMMETRIC, 1)) + CHKERR(KSPSetSupportedNorm( + ksp, KSP_NORM_UNPRECONDITIONED, PC_SYMMETRIC, 1)) return FunctionEnd() cdef inline PetscErrorCode KSPDestroy_Python_inner( PetscKSP ksp, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: try: addRef(ksp) KSPPythonSetContext(ksp, NULL) @@ -1636,13 +1578,12 @@ cdef inline PetscErrorCode KSPDestroy_Python_inner( cdef PetscErrorCode KSPDestroy_Python( PetscKSP ksp, - ) \ - except PETSC_ERR_PYTHON nogil: + ) except PETSC_ERR_PYTHON nogil: FunctionBegin(b"KSPDestroy_Python") - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( ksp, b"KSPPythonSetType_C", NULL)) - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( ksp, b"KSPPythonGetType_C", NULL)) # @@ -1651,24 +1592,22 @@ cdef PetscErrorCode KSPDestroy_Python( cdef PetscErrorCode KSPSetUp_Python( PetscKSP ksp, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPSetUp_Python") - # cdef char name[2048] cdef PetscBool found = PETSC_FALSE if PyKSP(ksp).self is None: - CHKERR( PetscOptionsGetString(NULL, - getPrefix(ksp), b"-ksp_python_type", - name, sizeof(name), &found) ) + CHKERR(PetscOptionsGetString(NULL, + getPrefix(ksp), b"-ksp_python_type", + name, sizeof(name), &found)) if found and name[0]: - CHKERR( KSPPythonSetType_PYTHON(ksp, name) ) + CHKERR(KSPPythonSetType_PYTHON(ksp, name)) if PyKSP(ksp).self is None: return PetscSETERR(PETSC_ERR_USER, - "Python context not set, call one of \n" - " * KSPPythonSetType(ksp, \"[package.]module.class\")\n" - " * KSPSetFromOptions(ksp) and pass option " - "-ksp_python_type [package.]module.class") + "Python context not set, call one of \n" + " * KSPPythonSetType(ksp, \"[package.]module.class\")\n" + " * KSPSetFromOptions(ksp) and pass option " + "-ksp_python_type [package.]module.class") # cdef setUp = PyKSP(ksp).setUp if setUp is not None: @@ -1677,8 +1616,7 @@ cdef PetscErrorCode KSPSetUp_Python( cdef inline PetscErrorCode KSPReset_Python_inner( PetscKSP ksp, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: cdef reset = PyKSP(ksp).reset if reset is not None: reset(KSP_(ksp)) @@ -1686,30 +1624,27 @@ cdef inline PetscErrorCode KSPReset_Python_inner( cdef PetscErrorCode KSPReset_Python( PetscKSP ksp, - ) \ - except PETSC_ERR_PYTHON nogil: + ) except PETSC_ERR_PYTHON nogil: if getRef(ksp) == 0: return PETSC_SUCCESS FunctionBegin(b"KSPReset_Python") - CHKERR( PetscObjectCompose(ksp, b"@ksp.vec_work_sol", NULL) ) - CHKERR( PetscObjectCompose(ksp, b"@ksp.vec_work_res", NULL) ) + CHKERR(PetscObjectCompose(ksp, b"@ksp.vec_work_sol", NULL)) + CHKERR(PetscObjectCompose(ksp, b"@ksp.vec_work_res", NULL)) if Py_IsInitialized(): KSPReset_Python_inner(ksp) return FunctionEnd() cdef PetscErrorCode KSPSetFromOptions_Python( PetscKSP ksp, PetscOptionItems *PetscOptionsObject - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPSetFromOptions_Python") - # cdef char name[2048], *defval = PyKSP(ksp).getname() cdef PetscBool found = PETSC_FALSE cdef PetscOptionItems *opts "PetscOptionsObject" = PetscOptionsObject - CHKERR( PetscOptionsString( + CHKERR(PetscOptionsString( b"-ksp_python_type", b"Python [package.]module[.{class|function}]", - b"KSPPythonSetType", defval, name, sizeof(name), &found) ); opts; + b"KSPPythonSetType", defval, name, sizeof(name), &found)); opts if found and name[0]: - CHKERR( KSPPythonSetType_PYTHON(ksp, name) ) + CHKERR(KSPPythonSetType_PYTHON(ksp, name)) # cdef setFromOptions = PyKSP(ksp).setFromOptions if setFromOptions is not None: @@ -1719,8 +1654,7 @@ cdef PetscErrorCode KSPSetFromOptions_Python( cdef PetscErrorCode KSPView_Python( PetscKSP ksp, PetscViewer vwr, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPView_Python") viewcontext(PyKSP(ksp), vwr) cdef view = PyKSP(ksp).view @@ -1732,8 +1666,7 @@ cdef PetscErrorCode KSPBuildSolution_Python( PetscKSP ksp, PetscVec v, PetscVec *V, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPBuildSolution_Python") cdef PetscVec x = v cdef buildSolution = PyKSP(ksp).buildSolution @@ -1742,7 +1675,7 @@ cdef PetscErrorCode KSPBuildSolution_Python( buildSolution(KSP_(ksp), Vec_(x)) if V != NULL: V[0] = x else: - CHKERR( KSPBuildSolutionDefault(ksp, v, V) ) + CHKERR(KSPBuildSolutionDefault(ksp, v, V)) return FunctionEnd() cdef PetscErrorCode KSPBuildResidual_Python( @@ -1750,25 +1683,23 @@ cdef PetscErrorCode KSPBuildResidual_Python( PetscVec t, PetscVec v, PetscVec *V, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPBuildResidual_Python") cdef buildResidual = PyKSP(ksp).buildResidual if buildResidual is not None: buildResidual(KSP_(ksp), Vec_(t), Vec_(v)) if V != NULL: V[0] = v else: - CHKERR( KSPBuildResidualDefault(ksp, t, v, V) ) + CHKERR(KSPBuildResidualDefault(ksp, t, v, V)) return FunctionEnd() cdef PetscErrorCode KSPSolve_Python( PetscKSP ksp, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPSolve_Python") cdef PetscVec B = NULL, X = NULL - CHKERR( KSPGetRhs(ksp, &B) ) - CHKERR( KSPGetSolution(ksp, &X) ) + CHKERR(KSPGetRhs(ksp, &B)) + CHKERR(KSPGetSolution(ksp, &X)) # ksp.iter = 0 ksp.reason = KSP_CONVERGED_ITERATING @@ -1788,56 +1719,54 @@ cdef PetscErrorCode KSPSolve_Python_default( PetscKSP ksp, PetscVec B, PetscVec X, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPSolve_Python_default") - # cdef PetscVec t = NULL - CHKERR( PetscObjectQuery( + CHKERR(PetscObjectQuery( ksp, - b"@ksp.vec_work_sol", - &t) ) + b"@ksp.vec_work_sol", + &t)) if t == NULL: - CHKERR( VecDuplicate(X, &t) ) - CHKERR( PetscObjectCompose( + CHKERR(VecDuplicate(X, &t)) + CHKERR(PetscObjectCompose( ksp, - b"@ksp.vec_work_sol", - t) ) + b"@ksp.vec_work_sol", + t)) cdef PetscVec v = NULL - CHKERR( PetscObjectQuery( + CHKERR(PetscObjectQuery( ksp, - b"@ksp.vec_work_res", - &v) ) + b"@ksp.vec_work_res", + &v)) if v == NULL: - CHKERR( VecDuplicate(B, &v) ) - CHKERR( PetscObjectCompose( + CHKERR(VecDuplicate(B, &v)) + CHKERR(PetscObjectCompose( ksp, - b"@ksp.vec_work_res", - v) ) + b"@ksp.vec_work_res", + v)) # - cdef PetscInt its = 0 cdef PetscVec R = NULL cdef PetscReal rnorm = 0 # - CHKERR( KSPBuildResidual(ksp, t, v, &R) ) - CHKERR( VecNorm(R, PETSC_NORM_2, &rnorm) ) + CHKERR(KSPBuildResidual(ksp, t, v, &R)) + CHKERR(VecNorm(R, PETSC_NORM_2, &rnorm)) # - CHKERR( KSPConverged(ksp, ksp.iter, rnorm, &ksp.reason) ) - CHKERR( KSPLogHistory(ksp, ksp.norm) ) - CHKERR( KSPMonitor(ksp, ksp.iter, ksp.norm) ) + CHKERR(KSPConverged(ksp, ksp.iter, rnorm, &ksp.reason)) + CHKERR(KSPLogHistory(ksp, ksp.norm)) + CHKERR(KSPMonitor(ksp, ksp.iter, ksp.norm)) for its from 0 <= its < ksp.max_its: + its # unused if ksp.reason: break KSPPreStep_Python(ksp) # KSPStep_Python(ksp, B, X) # FIXME? B? - CHKERR( KSPBuildResidual(ksp, t, v, &R) ) - CHKERR( VecNorm(R, PETSC_NORM_2, &rnorm) ) + CHKERR(KSPBuildResidual(ksp, t, v, &R)) + CHKERR(VecNorm(R, PETSC_NORM_2, &rnorm)) ksp.iter += 1 # KSPPostStep_Python(ksp) - CHKERR( KSPConverged(ksp, ksp.iter, rnorm, &ksp.reason) ) - CHKERR( KSPLogHistory(ksp, ksp.norm) ) - CHKERR( KSPMonitor(ksp, ksp.iter, ksp.norm) ) + CHKERR(KSPConverged(ksp, ksp.iter, rnorm, &ksp.reason)) + CHKERR(KSPLogHistory(ksp, ksp.norm)) + CHKERR(KSPMonitor(ksp, ksp.iter, ksp.norm)) if ksp.iter == ksp.max_its: if ksp.reason == KSP_CONVERGED_ITERATING: ksp.reason = KSP_DIVERGED_MAX_IT @@ -1846,8 +1775,7 @@ cdef PetscErrorCode KSPSolve_Python_default( cdef PetscErrorCode KSPPreStep_Python( PetscKSP ksp, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPPreStep_Python") cdef preStep = PyKSP(ksp).preStep if preStep is not None: @@ -1856,8 +1784,7 @@ cdef PetscErrorCode KSPPreStep_Python( cdef PetscErrorCode KSPPostStep_Python( PetscKSP ksp, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPPostStep_Python") cdef postStep = PyKSP(ksp).postStep if postStep is not None: @@ -1868,8 +1795,7 @@ cdef PetscErrorCode KSPStep_Python( PetscKSP ksp, PetscVec B, PetscVec X, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"KSPStep_Python") cdef step = None if ksp.transpose_solve: @@ -1885,25 +1811,26 @@ cdef PetscErrorCode KSPStep_Python( cdef extern from * nogil: struct _SNESOps: - PetscErrorCode (*destroy)(PetscSNES) except PETSC_ERR_PYTHON - PetscErrorCode (*setup)(PetscSNES) except PETSC_ERR_PYTHON - PetscErrorCode (*reset)(PetscSNES) except PETSC_ERR_PYTHON - PetscErrorCode (*setfromoptions)(PetscSNES,PetscOptionItems*) except PETSC_ERR_PYTHON - PetscErrorCode (*view)(PetscSNES,PetscViewer) except PETSC_ERR_PYTHON - PetscErrorCode (*solve)(PetscSNES) except PETSC_ERR_PYTHON + PetscErrorCode (*destroy)(PetscSNES) except PETSC_ERR_PYTHON + PetscErrorCode (*setup)(PetscSNES) except PETSC_ERR_PYTHON + PetscErrorCode (*reset)(PetscSNES) except PETSC_ERR_PYTHON + PetscErrorCode (*setfromoptions)(PetscSNES, PetscOptionItems*) except PETSC_ERR_PYTHON + PetscErrorCode (*view)(PetscSNES, PetscViewer) except PETSC_ERR_PYTHON + PetscErrorCode (*solve)(PetscSNES) except PETSC_ERR_PYTHON ctypedef _SNESOps *SNESOps struct _p_SNES: void *data SNESOps ops - PetscInt iter,max_its,linear_its - PetscReal norm,rtol,ttol + PetscInt iter, max_its, linear_its + PetscReal norm, xnorm, ynorm, rtol, ttol PetscSNESConvergedReason reason - PetscVec vec_sol,vec_sol_update,vec_func - PetscMat jacobian,jacobian_pre + PetscVec vec_sol, vec_sol_update, vec_func + PetscMat jacobian, jacobian_pre PetscKSP ksp cdef extern from * nogil: # custom.h - PetscErrorCode SNESLogHistory(PetscSNES,PetscReal,PetscInt) + PetscErrorCode SNESLogHistory(PetscSNES, PetscReal, PetscInt) + PetscErrorCode SNESComputeUpdate(PetscSNES) @cython.internal @@ -1926,7 +1853,7 @@ cdef public PetscErrorCode SNESPythonSetContext(PetscSNES snes, void *ctx) \ PySNES(snes).setcontext(ctx, SNES_(snes)) return FunctionEnd() -cdef PetscErrorCode SNESPythonSetType_PYTHON(PetscSNES snes, char name[]) \ +cdef PetscErrorCode SNESPythonSetType_PYTHON(PetscSNES snes, const char *name) \ except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESPythonSetType_PYTHON") if name == NULL: return FunctionEnd() # XXX @@ -1943,10 +1870,8 @@ cdef PetscErrorCode SNESPythonGetType_PYTHON(PetscSNES snes, const char *name[]) cdef PetscErrorCode SNESCreate_Python( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESCreate_Python") - # cdef SNESOps ops = snes.ops cdef PetscSNESLineSearch ls = NULL ops.reset = SNESReset_Python @@ -1956,26 +1881,25 @@ cdef PetscErrorCode SNESCreate_Python( ops.view = SNESView_Python ops.solve = SNESSolve_Python # - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( snes, b"SNESPythonSetType_C", - SNESPythonSetType_PYTHON) ) - CHKERR( PetscObjectComposeFunction( + SNESPythonSetType_PYTHON)) + CHKERR(PetscObjectComposeFunction( snes, b"SNESPythonGetType_C", - SNESPythonGetType_PYTHON) ) + SNESPythonGetType_PYTHON)) # cdef ctx = PySNES(NULL) snes.data = ctx # Ensure that the SNES has a linesearch object early enough that # it gets setFromOptions. - CHKERR( SNESGetLineSearch(snes, &ls) ) + CHKERR(SNESGetLineSearch(snes, &ls)) Py_INCREF(snes.data) return FunctionEnd() cdef inline PetscErrorCode SNESDestroy_Python_inner( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: try: addRef(snes) SNESPythonSetContext(snes, NULL) @@ -1987,41 +1911,36 @@ cdef inline PetscErrorCode SNESDestroy_Python_inner( cdef PetscErrorCode SNESDestroy_Python( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON nogil: + ) except PETSC_ERR_PYTHON nogil: FunctionBegin(b"SNESDestroy_Python") - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( snes, b"SNESPythonSetType_C", - NULL) ) - CHKERR( PetscObjectComposeFunction( + NULL)) + CHKERR(PetscObjectComposeFunction( snes, b"SNESPythonGetType_C", - NULL) ) + NULL)) # if Py_IsInitialized(): SNESDestroy_Python_inner(snes) return FunctionEnd() cdef PetscErrorCode SNESSetUp_Python( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESSetUp_Python") - # - #SNESGetKSP(snes,&snes.ksp) - # cdef char name[2048] cdef PetscBool found = PETSC_FALSE if PySNES(snes).self is None: - CHKERR( PetscOptionsGetString(NULL, - getPrefix(snes), b"-snes_python_type", - name, sizeof(name), &found) ) + CHKERR(PetscOptionsGetString(NULL, + getPrefix(snes), b"-snes_python_type", + name, sizeof(name), &found)) if found and name[0]: - CHKERR( SNESPythonSetType_PYTHON(snes, name) ) + CHKERR(SNESPythonSetType_PYTHON(snes, name)) if PySNES(snes).self is None: return PetscSETERR(PETSC_ERR_USER, - "Python context not set, call one of \n" - " * SNESPythonSetType(snes, \"[package.]module.class\")\n" - " * SNESSetFromOptions(snes) and pass option " - "-snes_python_type [package.]module.class") + "Python context not set, call one of \n" + " * SNESPythonSetType(snes, \"[package.]module.class\")\n" + " * SNESSetFromOptions(snes) and pass option " + "-snes_python_type [package.]module.class") # cdef setUp = PySNES(snes).setUp if setUp is not None: @@ -2030,8 +1949,7 @@ cdef PetscErrorCode SNESSetUp_Python( cdef inline PetscErrorCode SNESReset_Python_inner( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: cdef reset = PySNES(snes).reset if reset is not None: reset(SNES_(snes)) @@ -2039,8 +1957,7 @@ cdef inline PetscErrorCode SNESReset_Python_inner( cdef PetscErrorCode SNESReset_Python( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON nogil: + ) except PETSC_ERR_PYTHON nogil: if getRef(snes) == 0: return PETSC_SUCCESS FunctionBegin(b"SNESReset_Python") if Py_IsInitialized(): SNESReset_Python_inner(snes) @@ -2049,18 +1966,16 @@ cdef PetscErrorCode SNESReset_Python( cdef PetscErrorCode SNESSetFromOptions_Python( PetscSNES snes, PetscOptionItems *PetscOptionsObject, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESSetFromOptions_Python") - # cdef char name[2048], *defval = PySNES(snes).getname() cdef PetscBool found = PETSC_FALSE cdef PetscOptionItems *opts "PetscOptionsObject" = PetscOptionsObject - CHKERR( PetscOptionsString( + CHKERR(PetscOptionsString( b"-snes_python_type", b"Python [package.]module[.{class|function}]", - b"SNESPythonSetType", defval, name, sizeof(name), &found) ); opts; + b"SNESPythonSetType", defval, name, sizeof(name), &found)); opts if found and name[0]: - CHKERR( SNESPythonSetType_PYTHON(snes, name) ) + CHKERR(SNESPythonSetType_PYTHON(snes, name)) # cdef setFromOptions = PySNES(snes).setFromOptions if setFromOptions is not None: @@ -2070,8 +1985,7 @@ cdef PetscErrorCode SNESSetFromOptions_Python( cdef PetscErrorCode SNESView_Python( PetscSNES snes, PetscViewer vwr, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESView_Python") viewcontext(PySNES(snes), vwr) cdef view = PySNES(snes).view @@ -2081,12 +1995,11 @@ cdef PetscErrorCode SNESView_Python( cdef PetscErrorCode SNESSolve_Python( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESSolve_Python") cdef PetscVec b = NULL, x = NULL - CHKERR( SNESGetRhs(snes, &b) ) - CHKERR( SNESGetSolution(snes, &x) ) + CHKERR(SNESGetRhs(snes, &b)) + CHKERR(SNESGetSolution(snes, &x)) # snes.iter = 0 # @@ -2100,62 +2013,52 @@ cdef PetscErrorCode SNESSolve_Python( cdef PetscErrorCode SNESSolve_Python_default( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESSolve_Python_default") - # cdef PetscVec X=NULL, F=NULL, Y=NULL - cdef PetscSNESLineSearch ls - CHKERR( SNESGetSolution(snes, &X) ) - CHKERR( SNESGetFunction(snes, &F, NULL, NULL) ) - CHKERR( SNESGetSolutionUpdate(snes, &Y) ) - CHKERR( SNESGetLineSearch(snes, &ls) ) - cdef PetscInt its=0, lits=0 - cdef PetscReal xnorm = 0.0 - cdef PetscReal fnorm = 0.0 - cdef PetscReal ynorm = 0.0 - # - CHKERR( VecSet(Y, 0.0) ) - CHKERR( SNESComputeFunction(snes, X, F) ) - CHKERR( VecNorm(X, PETSC_NORM_2, &xnorm) ) - CHKERR( VecNorm(F, PETSC_NORM_2, &fnorm) ) - # - CHKERR( SNESLogHistory(snes, snes.norm, lits) ) - CHKERR( SNESConverged(snes, snes.iter, xnorm, ynorm, fnorm) ) - CHKERR( SNESMonitor(snes, snes.iter, snes.norm) ) + cdef PetscSNESLineSearch ls=NULL + CHKERR(SNESGetSolution(snes, &X)) + CHKERR(SNESGetFunction(snes, &F, NULL, NULL)) + CHKERR(SNESGetSolutionUpdate(snes, &Y)) + CHKERR(SNESGetLineSearch(snes, &ls)) + # + CHKERR(VecSet(Y, 0.0)) + snes.ynorm = 0.0 + CHKERR(SNESComputeFunction(snes, X, F)) + CHKERR(VecNorm(X, PETSC_NORM_2, &snes.xnorm)) + CHKERR(VecNorm(F, PETSC_NORM_2, &snes.norm)) + # + cdef PetscInt lits=0 + CHKERR(SNESLogHistory(snes, snes.norm, lits)) + CHKERR(SNESConverged(snes, snes.iter, snes.xnorm, snes.ynorm, snes.norm)) + CHKERR(SNESMonitor(snes, snes.iter, snes.norm)) if snes.reason: return FunctionEnd() - cdef PetscObjectState ostate = -1 - cdef PetscObjectState nstate = -1 for its from 0 <= its < snes.max_its: - CHKERR( PetscObjectStateGet(X, &ostate) ) + its # unused + SNESComputeUpdate(snes) SNESPreStep_Python(snes) - CHKERR( PetscObjectStateGet(X, &nstate) ) - if ostate != nstate: - CHKERR( SNESComputeFunction(snes, X, F) ) - CHKERR( VecNorm(F, PETSC_NORM_2, &fnorm) ) # lits = -snes.linear_its SNESStep_Python(snes, X, F, Y) lits += snes.linear_its # - CHKERR( SNESLineSearchApply(ls, X, F, NULL, Y) ) - CHKERR( SNESLineSearchGetNorms(ls, &xnorm, &fnorm, &ynorm) ) + CHKERR(SNESLineSearchApply(ls, X, F, NULL, Y)) + CHKERR(SNESLineSearchGetNorms(ls, &snes.xnorm, &snes.norm, &snes.ynorm)) snes.iter += 1 # SNESPostStep_Python(snes) - CHKERR( SNESLogHistory(snes, snes.norm, lits) ) - CHKERR( SNESConverged(snes, snes.iter, xnorm, ynorm, fnorm) ) - CHKERR( SNESMonitor(snes, snes.iter, snes.norm) ) + CHKERR(SNESLogHistory(snes, snes.norm, lits)) + CHKERR(SNESConverged(snes, snes.iter, snes.xnorm, snes.ynorm, snes.norm)) + CHKERR(SNESMonitor(snes, snes.iter, snes.norm)) if snes.reason: break # return FunctionEnd() cdef PetscErrorCode SNESPreStep_Python( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESPreStep_Python") cdef preStep = PySNES(snes).preStep if preStep is not None: @@ -2164,8 +2067,7 @@ cdef PetscErrorCode SNESPreStep_Python( cdef PetscErrorCode SNESPostStep_Python( PetscSNES snes, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESPostStep_Python") cdef postStep = PySNES(snes).postStep if postStep is not None: @@ -2177,8 +2079,7 @@ cdef PetscErrorCode SNESStep_Python( PetscVec X, PetscVec F, PetscVec Y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESStep_Python") cdef step = PySNES(snes).step if step is not None: @@ -2192,16 +2093,15 @@ cdef PetscErrorCode SNESStep_Python_default( PetscVec X, PetscVec F, PetscVec Y, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"SNESStep_Python_default") cdef PetscMat J = NULL, P = NULL cdef PetscInt lits = 0 - CHKERR( SNESGetJacobian(snes, &J, &P, NULL, NULL) ) - CHKERR( SNESComputeJacobian(snes, X, J, P) ) - CHKERR( KSPSetOperators(snes.ksp, J, P) ) - CHKERR( KSPSolve(snes.ksp, F, Y) ) - CHKERR( KSPGetIterationNumber(snes.ksp, &lits) ) + CHKERR(SNESGetJacobian(snes, &J, &P, NULL, NULL)) + CHKERR(SNESComputeJacobian(snes, X, J, P)) + CHKERR(KSPSetOperators(snes.ksp, J, P)) + CHKERR(KSPSolve(snes.ksp, F, Y)) + CHKERR(KSPGetIterationNumber(snes.ksp, &lits)) snes.linear_its += lits return FunctionEnd() @@ -2210,28 +2110,28 @@ cdef PetscErrorCode SNESStep_Python_default( cdef extern from * nogil: struct _TSOps: - PetscErrorCode (*destroy)(PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode (*setup)(PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode (*reset)(PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode (*setfromoptions)(PetscTS,PetscOptionItems*) except PETSC_ERR_PYTHON - PetscErrorCode (*view)(PetscTS,PetscViewer) except PETSC_ERR_PYTHON - PetscErrorCode (*step)(PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode (*rollback)(PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode (*interpolate)(PetscTS,PetscReal,PetscVec) except PETSC_ERR_PYTHON - PetscErrorCode (*evaluatestep)(PetscTS,PetscInt,PetscVec,PetscBool*) except PETSC_ERR_PYTHON - PetscErrorCode (*solve)(PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode (*snesfunction)(PetscSNES,PetscVec,PetscVec,PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode (*snesjacobian)(PetscSNES,PetscVec,PetscMat,PetscMat,PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*destroy)(PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*setup)(PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*reset)(PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*setfromoptions)(PetscTS, PetscOptionItems*) except PETSC_ERR_PYTHON + PetscErrorCode (*view)(PetscTS, PetscViewer) except PETSC_ERR_PYTHON + PetscErrorCode (*step)(PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*rollback)(PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*interpolate)(PetscTS, PetscReal, PetscVec) except PETSC_ERR_PYTHON + PetscErrorCode (*evaluatestep)(PetscTS, PetscInt, PetscVec, PetscBool*) except PETSC_ERR_PYTHON + PetscErrorCode (*solve)(PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*snesfunction)(PetscSNES, PetscVec, PetscVec, PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*snesjacobian)(PetscSNES, PetscVec, PetscMat, PetscMat, PetscTS) except PETSC_ERR_PYTHON ctypedef _TSOps *TSOps struct _TSUserOps: - PetscErrorCode (*prestep)(PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode (*prestage)(PetscTS,PetscReal) except PETSC_ERR_PYTHON - PetscErrorCode (*poststage)(PetscTS,PetscReal,PetscInt,PetscVec*) except PETSC_ERR_PYTHON - PetscErrorCode (*poststep)(PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode (*rhsfunction)(PetscTS,PetscReal,PetscVec,PetscVec,void*) except PETSC_ERR_PYTHON - PetscErrorCode (*ifunction) (PetscTS,PetscReal,PetscVec,PetscVec,PetscVec,void*) except PETSC_ERR_PYTHON - PetscErrorCode (*rhsjacobian)(PetscTS,PetscReal,PetscVec,PetscMat,PetscMat,void*) except PETSC_ERR_PYTHON - PetscErrorCode (*ijacobian) (PetscTS,PetscReal,PetscVec,PetscVec,PetscReal,PetscMat,PetscMat,void*) except PETSC_ERR_PYTHON + PetscErrorCode (*prestep)(PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*prestage)(PetscTS, PetscReal) except PETSC_ERR_PYTHON + PetscErrorCode (*poststage)(PetscTS, PetscReal, PetscInt, PetscVec*) except PETSC_ERR_PYTHON + PetscErrorCode (*poststep)(PetscTS) except PETSC_ERR_PYTHON + PetscErrorCode (*rhsfunction)(PetscTS, PetscReal, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON + PetscErrorCode (*ifunction) (PetscTS, PetscReal, PetscVec, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON + PetscErrorCode (*rhsjacobian)(PetscTS, PetscReal, PetscVec, PetscMat, PetscMat, void*) except PETSC_ERR_PYTHON + PetscErrorCode (*ijacobian) (PetscTS, PetscReal, PetscVec, PetscVec, PetscReal, PetscMat, PetscMat, void*) except PETSC_ERR_PYTHON ctypedef _TSUserOps *TSUserOps struct _p_TS: void *data @@ -2275,7 +2175,7 @@ cdef public PetscErrorCode TSPythonSetContext(PetscTS ts, void *ctx) \ PyTS(ts).setcontext(ctx, TS_(ts)) return FunctionEnd() -cdef PetscErrorCode TSPythonSetType_PYTHON(PetscTS ts, char name[]) \ +cdef PetscErrorCode TSPythonSetType_PYTHON(PetscTS ts, const char *name) \ except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSPythonSetType_PYTHON") if name == NULL: return FunctionEnd() # XXX @@ -2292,10 +2192,8 @@ cdef PetscErrorCode TSPythonGetType_PYTHON(PetscTS ts, const char *name[]) \ cdef PetscErrorCode TSCreate_Python( PetscTS ts, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSCreate_Python") - # cdef TSOps ops = ts.ops ops.reset = TSReset_Python ops.destroy = TSDestroy_Python @@ -2309,12 +2207,12 @@ cdef PetscErrorCode TSCreate_Python( ops.snesfunction = SNESTSFormFunction_Python ops.snesjacobian = SNESTSFormJacobian_Python # - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( ts, b"TSPythonSetType_C", - TSPythonSetType_PYTHON) ) - CHKERR( PetscObjectComposeFunction( + TSPythonSetType_PYTHON)) + CHKERR(PetscObjectComposeFunction( ts, b"TSPythonGetType_C", - TSPythonGetType_PYTHON) ) + TSPythonGetType_PYTHON)) # ts.usessnes = PETSC_TRUE # @@ -2325,8 +2223,7 @@ cdef PetscErrorCode TSCreate_Python( cdef inline PetscErrorCode TSDestroy_Python_inner( PetscTS ts, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: try: addRef(ts) TSPythonSetContext(ts, NULL) @@ -2338,52 +2235,49 @@ cdef inline PetscErrorCode TSDestroy_Python_inner( cdef PetscErrorCode TSDestroy_Python( PetscTS ts, - ) \ - except PETSC_ERR_PYTHON nogil: + ) except PETSC_ERR_PYTHON nogil: FunctionBegin(b"TSDestroy_Python") - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( ts, b"TSPythonSetType_C", - NULL) ) - CHKERR( PetscObjectComposeFunction( + NULL)) + CHKERR(PetscObjectComposeFunction( ts, b"TSPythonGetType_C", - NULL) ) + NULL)) # if Py_IsInitialized(): TSDestroy_Python_inner(ts) return FunctionEnd() cdef PetscErrorCode TSSetUp_Python( PetscTS ts, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSSetUp_Python") - # cdef PetscVec vec_update = NULL - CHKERR( VecDuplicate(ts.vec_sol, &vec_update) ) - CHKERR( PetscObjectCompose(ts, - b"@ts.vec_update", - vec_update) ) - CHKERR( VecDestroy(&vec_update) ) + CHKERR(VecDuplicate(ts.vec_sol, &vec_update)) + CHKERR(PetscObjectCompose(ts, + b"@ts.vec_update", + vec_update)) + CHKERR(VecDestroy(&vec_update)) cdef PetscVec vec_dot = NULL - CHKERR( VecDuplicate(ts.vec_sol, &vec_dot) ) - CHKERR( PetscObjectCompose(ts, - b"@ts.vec_dot", - vec_dot) ) - CHKERR( VecDestroy(&vec_dot) ) + CHKERR(VecDuplicate(ts.vec_sol, &vec_dot)) + CHKERR(PetscObjectCompose(ts, + b"@ts.vec_dot", + vec_dot)) + CHKERR(VecDestroy(&vec_dot)) # cdef char name[2048] cdef PetscBool found = PETSC_FALSE if PyTS(ts).self is None: - CHKERR( PetscOptionsGetString(NULL, - getPrefix(ts), b"-ts_python_type", - name, sizeof(name), &found) ) + CHKERR(PetscOptionsGetString(NULL, + getPrefix(ts), b"-ts_python_type", + name, sizeof(name), &found)) if found and name[0]: - CHKERR( TSPythonSetType_PYTHON(ts, name) ) + CHKERR(TSPythonSetType_PYTHON(ts, name)) if PyTS(ts).self is None: return PetscSETERR(PETSC_ERR_USER, - "Python context not set, call one of \n" - " * TSPythonSetType(ts, \"[package.]module.class\")\n" - " * TSSetFromOptions(ts) and pass option " - "-ts_python_type [package.]module.class") + "Python context not set, call one of \n" + " * TSPythonSetType(ts, \"[package.]module.class\")\n" + " * TSSetFromOptions(ts) and pass option " + "-ts_python_type [package.]module.class") # cdef setUp = PyTS(ts).setUp if setUp is not None: @@ -2392,8 +2286,7 @@ cdef PetscErrorCode TSSetUp_Python( cdef inline PetscErrorCode TSReset_Python_inner( PetscTS ts, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: cdef reset = PyTS(ts).reset if reset is not None: reset(TS_(ts)) @@ -2401,29 +2294,27 @@ cdef inline PetscErrorCode TSReset_Python_inner( cdef PetscErrorCode TSReset_Python( PetscTS ts, - ) \ - except PETSC_ERR_PYTHON nogil: + ) except PETSC_ERR_PYTHON nogil: if getRef(ts) == 0: return PETSC_SUCCESS FunctionBegin(b"TSReset_Python") - CHKERR( PetscObjectCompose(ts, b"@ts.vec_update", NULL) ) - CHKERR( PetscObjectCompose(ts, b"@ts.vec_dot", NULL) ) + CHKERR(PetscObjectCompose(ts, b"@ts.vec_update", NULL)) + CHKERR(PetscObjectCompose(ts, b"@ts.vec_dot", NULL)) if Py_IsInitialized(): TSReset_Python_inner(ts) return FunctionEnd() cdef PetscErrorCode TSSetFromOptions_Python( PetscTS ts, PetscOptionItems *PetscOptionsObject, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSSetFromOptions_Python") cdef char name[2048], *defval = PyTS(ts).getname() cdef PetscBool found = PETSC_FALSE cdef PetscOptionItems *opts "PetscOptionsObject" = PetscOptionsObject - CHKERR( PetscOptionsString( + CHKERR(PetscOptionsString( b"-ts_python_type", b"Python [package.]module[.{class|function}]", - b"TSPythonSetType", defval, name, sizeof(name), &found) ); opts; + b"TSPythonSetType", defval, name, sizeof(name), &found)); opts if found and name[0]: - CHKERR( TSPythonSetType_PYTHON(ts, name) ) + CHKERR(TSPythonSetType_PYTHON(ts, name)) # cdef setFromOptions = PyTS(ts).setFromOptions if setFromOptions is not None: @@ -2433,8 +2324,7 @@ cdef PetscErrorCode TSSetFromOptions_Python( cdef PetscErrorCode TSView_Python( PetscTS ts, PetscViewer vwr, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSView_Python") viewcontext(PyTS(ts), vwr) cdef view = PyTS(ts).view @@ -2444,8 +2334,7 @@ cdef PetscErrorCode TSView_Python( cdef PetscErrorCode TSStep_Python( PetscTS ts, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSStep_Python") cdef step = PyTS(ts).step if step is not None: @@ -2456,8 +2345,7 @@ cdef PetscErrorCode TSStep_Python( cdef PetscErrorCode TSRollBack_Python( PetscTS ts, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSRollBack_Python") cdef rollback = PyTS(ts).rollback if rollback is None: return UNSUPPORTED(b"rollback") @@ -2468,8 +2356,7 @@ cdef PetscErrorCode TSInterpolate_Python( PetscTS ts, PetscReal t, PetscVec x, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSInterpolate _Python") cdef interpolate = PyTS(ts).interpolate if interpolate is None: return UNSUPPORTED(b"interpolate") @@ -2481,8 +2368,7 @@ cdef PetscErrorCode TSEvaluateStep_Python( PetscInt o, PetscVec x, PetscBool *flag, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSEvaluateStep _Python") cdef evaluatestep = PyTS(ts).evaluatestep if evaluatestep is None: return UNSUPPORTED(b"evaluatestep") @@ -2498,9 +2384,8 @@ cdef PetscErrorCode SNESTSFormFunction_Python( PetscVec x, PetscVec f, PetscTS ts, - ) \ - except PETSC_ERR_PYTHON with gil: - # + ) except PETSC_ERR_PYTHON with gil: + FunctionBegin(b"SNESTSFormFunction _Python") cdef formSNESFunction = PyTS(ts).formSNESFunction if formSNESFunction is not None: args = (SNES_(snes), Vec_(x), Vec_(f), TS_(ts)) @@ -2508,16 +2393,16 @@ cdef PetscErrorCode SNESTSFormFunction_Python( return FunctionEnd() # cdef PetscVec dx = NULL - CHKERR( PetscObjectQuery( + CHKERR(PetscObjectQuery( ts, - b"@ts.vec_dot", - &dx) ) + b"@ts.vec_dot", + &dx)) # cdef PetscReal t = ts.ptime + ts.time_step cdef PetscReal a = 1.0/ts.time_step - CHKERR( VecCopy(ts.vec_sol, dx) ) - CHKERR( VecAXPBY(dx, +a, -a, x) ) - CHKERR( TSComputeIFunction(ts, t, x, dx, f, PETSC_FALSE) ) + CHKERR(VecCopy(ts.vec_sol, dx)) + CHKERR(VecAXPBY(dx, +a, -a, x)) + CHKERR(TSComputeIFunction(ts, t, x, dx, f, PETSC_FALSE)) return FunctionEnd() cdef PetscErrorCode SNESTSFormJacobian_Python( @@ -2526,9 +2411,8 @@ cdef PetscErrorCode SNESTSFormJacobian_Python( PetscMat A, PetscMat B, PetscTS ts, - ) \ - except PETSC_ERR_PYTHON with gil: - # + ) except PETSC_ERR_PYTHON with gil: + FunctionBegin(b"SNESTSFormJacobian _Python") cdef formSNESJacobian = PyTS(ts).formSNESJacobian if formSNESJacobian is not None: args = (SNES_(snes), Vec_(x), Mat_(A), Mat_(B), TS_(ts)) @@ -2536,35 +2420,33 @@ cdef PetscErrorCode SNESTSFormJacobian_Python( return FunctionEnd() # cdef PetscVec dx = NULL - CHKERR( PetscObjectQuery( + CHKERR(PetscObjectQuery( ts, - b"@ts.vec_dot", - &dx) ) + b"@ts.vec_dot", + &dx)) # cdef PetscReal t = ts.ptime + ts.time_step cdef PetscReal a = 1.0/ts.time_step - CHKERR( VecCopy(ts.vec_sol, dx) ) - CHKERR( VecAXPBY(dx, +a, -a, x) ) - CHKERR( TSComputeIJacobian(ts, t, x, dx, a, A, B, PETSC_FALSE) ) + CHKERR(VecCopy(ts.vec_sol, dx)) + CHKERR(VecAXPBY(dx, +a, -a, x)) + CHKERR(TSComputeIJacobian(ts, t, x, dx, a, A, B, PETSC_FALSE)) return FunctionEnd() cdef PetscErrorCode TSSolveStep_Python( PetscTS ts, PetscReal t, PetscVec x, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSSolveStep_Python") - # cdef solveStep = PyTS(ts).solveStep if solveStep is not None: solveStep(TS_(ts), t, Vec_(x)) return FunctionEnd() # cdef PetscInt nits = 0, lits = 0 - CHKERR( SNESSolve(ts.snes, NULL, x) ) - CHKERR( SNESGetIterationNumber(ts.snes, &nits) ) - CHKERR( SNESGetLinearSolveIterations(ts.snes, &lits) ) + CHKERR(SNESSolve(ts.snes, NULL, x)) + CHKERR(SNESGetIterationNumber(ts.snes, &nits)) + CHKERR(SNESGetLinearSolveIterations(ts.snes, &lits)) ts.snes_its += nits ts.ksp_its += lits return FunctionEnd() @@ -2575,8 +2457,7 @@ cdef PetscErrorCode TSAdaptStep_Python( PetscVec x, PetscReal *nextdt, PetscBool *stepok, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSAdaptStep_Python") nextdt[0] = ts.time_step stepok[0] = PETSC_TRUE @@ -2604,27 +2485,26 @@ cdef PetscErrorCode TSAdaptStep_Python( cdef PetscErrorCode TSStep_Python_default( PetscTS ts, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TSStep_Python_default") cdef PetscVec vec_update = NULL - CHKERR( PetscObjectQuery( + CHKERR(PetscObjectQuery( ts, - b"@ts.vec_update", - &vec_update) ) + b"@ts.vec_update", + &vec_update)) # - cdef PetscInt r = 0 cdef PetscReal tt = ts.ptime cdef PetscReal dt = ts.time_step cdef PetscBool accept = PETSC_TRUE cdef PetscBool stageok = PETSC_TRUE for r from 0 <= r < ts.max_reject: + r # unused tt = ts.ptime + ts.time_step - CHKERR( VecCopy(ts.vec_sol, vec_update) ) - CHKERR( TSPreStage(ts, tt+dt) ) + CHKERR(VecCopy(ts.vec_sol, vec_update)) + CHKERR(TSPreStage(ts, tt+dt)) TSSolveStep_Python(ts, tt, vec_update) - CHKERR( TSPostStage(ts, tt+dt, 0, &vec_update) ); - CHKERR( TSAdaptCheckStage(ts.adapt, ts, tt+dt, vec_update, &stageok) ); + CHKERR(TSPostStage(ts, tt+dt, 0, &vec_update)) + CHKERR(TSAdaptCheckStage(ts.adapt, ts, tt+dt, vec_update, &stageok)) if not stageok: ts.reject += 1 continue @@ -2633,7 +2513,7 @@ cdef PetscErrorCode TSStep_Python_default( ts.time_step = dt ts.reject += 1 continue - CHKERR( VecCopy(vec_update, ts.vec_sol) ) + CHKERR(VecCopy(vec_update, ts.vec_sol)) ts.ptime += ts.time_step ts.time_step = dt break @@ -2646,11 +2526,11 @@ cdef PetscErrorCode TSStep_Python_default( cdef extern from * nogil: struct _TaoOps: - PetscErrorCode (*destroy)(PetscTAO) except PETSC_ERR_PYTHON - PetscErrorCode (*setup)(PetscTAO) except PETSC_ERR_PYTHON - PetscErrorCode (*solve)(PetscTAO) except PETSC_ERR_PYTHON - PetscErrorCode (*setfromoptions)(PetscTAO,PetscOptionItems*) except PETSC_ERR_PYTHON - PetscErrorCode (*view)(PetscTAO,PetscViewer) except PETSC_ERR_PYTHON + PetscErrorCode (*destroy)(PetscTAO) except PETSC_ERR_PYTHON + PetscErrorCode (*setup)(PetscTAO) except PETSC_ERR_PYTHON + PetscErrorCode (*solve)(PetscTAO) except PETSC_ERR_PYTHON + PetscErrorCode (*setfromoptions)(PetscTAO, PetscOptionItems*) except PETSC_ERR_PYTHON + PetscErrorCode (*view)(PetscTAO, PetscViewer) except PETSC_ERR_PYTHON ctypedef _TaoOps *TaoOps struct _p_TAO: void *data @@ -2664,15 +2544,15 @@ cdef extern from * nogil: PetscTAOLineSearch linesearch cdef extern from * nogil: # custom.h - PetscErrorCode TaoConverged(PetscTAO,PetscTAOConvergedReason*) + PetscErrorCode TaoConverged(PetscTAO, PetscTAOConvergedReason*) cdef extern from * nogil: # custom.h - PetscErrorCode TaoGetVecs(PetscTAO,PetscVec*,PetscVec*,PetscVec*) - PetscErrorCode TaoCheckReals(PetscTAO,PetscReal,PetscReal) - PetscErrorCode TaoComputeUpdate(PetscTAO) + PetscErrorCode TaoGetVecs(PetscTAO, PetscVec*, PetscVec*, PetscVec*) + PetscErrorCode TaoCheckReals(PetscTAO, PetscReal, PetscReal) + PetscErrorCode TaoComputeUpdate(PetscTAO, PetscReal*) PetscErrorCode TaoCreateDefaultLineSearch(PetscTAO) PetscErrorCode TaoCreateDefaultKSP(PetscTAO) - PetscErrorCode TaoApplyLineSearch(PetscTAO,PetscReal*,PetscReal*,PetscTAOLineSearchConvergedReason*) + PetscErrorCode TaoApplyLineSearch(PetscTAO, PetscReal*, PetscReal*, PetscTAOLineSearchConvergedReason*) @cython.internal @@ -2695,7 +2575,7 @@ cdef public PetscErrorCode TaoPythonSetContext(PetscTAO tao, void *ctx) \ PyTao(tao).setcontext(ctx, TAO_(tao)) return FunctionEnd() -cdef PetscErrorCode TaoPythonSetType_PYTHON(PetscTAO tao, char name[]) \ +cdef PetscErrorCode TaoPythonSetType_PYTHON(PetscTAO tao, const char *name) \ except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoPythonSetType_PYTHON") if name == NULL: return FunctionEnd() # XXX @@ -2712,10 +2592,8 @@ cdef PetscErrorCode TaoPythonGetType_PYTHON(PetscTAO tao, const char *name[]) \ cdef PetscErrorCode TaoCreate_Python( PetscTAO tao, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoCreate_Python") - # cdef TaoOps ops = tao.ops ops.destroy = TaoDestroy_Python ops.view = TaoView_Python @@ -2723,15 +2601,15 @@ cdef PetscErrorCode TaoCreate_Python( ops.setup = TaoSetUp_Python ops.setfromoptions = TaoSetFromOptions_Python # - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( tao, b"TaoPythonSetType_C", - TaoPythonSetType_PYTHON) ) - CHKERR( PetscObjectComposeFunction( + TaoPythonSetType_PYTHON)) + CHKERR(PetscObjectComposeFunction( tao, b"TaoPythonGetType_C", - TaoPythonGetType_PYTHON) ) + TaoPythonGetType_PYTHON)) # - CHKERR( TaoCreateDefaultLineSearch(tao) ) - CHKERR( TaoCreateDefaultKSP(tao) ) + CHKERR(TaoCreateDefaultLineSearch(tao)) + CHKERR(TaoCreateDefaultKSP(tao)) # cdef ctx = PyTao(NULL) tao.data = ctx @@ -2740,8 +2618,7 @@ cdef PetscErrorCode TaoCreate_Python( cdef inline PetscErrorCode TaoDestroy_Python_inner( PetscTAO tao, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: try: addRef(tao) TaoPythonSetContext(tao, NULL) @@ -2753,38 +2630,36 @@ cdef inline PetscErrorCode TaoDestroy_Python_inner( cdef PetscErrorCode TaoDestroy_Python( PetscTAO tao, - ) \ - except PETSC_ERR_PYTHON nogil: + ) except PETSC_ERR_PYTHON nogil: FunctionBegin(b"TaoDestroy_Python") - CHKERR( PetscObjectComposeFunction( + CHKERR(PetscObjectComposeFunction( tao, b"TaoPythonSetType_C", - NULL) ) - CHKERR( PetscObjectComposeFunction( + NULL)) + CHKERR(PetscObjectComposeFunction( tao, b"TaoPythonGetType_C", - NULL) ) + NULL)) # if Py_IsInitialized(): TaoDestroy_Python_inner(tao) return FunctionEnd() cdef PetscErrorCode TaoSetUp_Python( PetscTAO tao, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoSetUp_Python") cdef char name[2048] cdef PetscBool found = PETSC_FALSE if PyTao(tao).self is None: - CHKERR( PetscOptionsGetString(NULL, - getPrefix(tao), b"-tao_python_type", - name, sizeof(name), &found) ) + CHKERR(PetscOptionsGetString(NULL, + getPrefix(tao), b"-tao_python_type", + name, sizeof(name), &found)) if found and name[0]: - CHKERR( TaoPythonSetType_PYTHON(tao, name) ) + CHKERR(TaoPythonSetType_PYTHON(tao, name)) if PyTao(tao).self is None: return PetscSETERR(PETSC_ERR_USER, - "Python context not set, call one of \n" - " * TaoPythonSetType(tao, \"[package.]module.class\")\n" - " * TaoSetFromOptions(tao) and pass option " - "-tao_python_type [package.]module.class") + "Python context not set, call one of \n" + " * TaoPythonSetType(tao, \"[package.]module.class\")\n" + " * TaoSetFromOptions(tao) and pass option " + "-tao_python_type [package.]module.class") # cdef setUp = PyTao(tao).setUp if setUp is not None: @@ -2794,30 +2669,27 @@ cdef PetscErrorCode TaoSetUp_Python( cdef PetscErrorCode TaoSetFromOptions_Python( PetscTAO tao, PetscOptionItems *PetscOptionsObject, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoSetFromOptions_Python") - # cdef char name[2048], *defval = PyTao(tao).getname() cdef PetscBool found = PETSC_FALSE cdef PetscOptionItems *opts "PetscOptionsObject" = PetscOptionsObject - CHKERR( PetscOptionsString( + CHKERR(PetscOptionsString( b"-tao_python_type", b"Python [package.]module[.{class|function}]", - b"TaoPythonSetType", defval, name, sizeof(name), &found) ); opts; + b"TaoPythonSetType", defval, name, sizeof(name), &found)); opts if found and name[0]: - CHKERR( TaoPythonSetType_PYTHON(tao, name) ) + CHKERR(TaoPythonSetType_PYTHON(tao, name)) # cdef setFromOptions = PyTao(tao).setFromOptions if setFromOptions is not None: setFromOptions(TAO_(tao)) - CHKERR( KSPSetFromOptions(tao.ksp) ) + CHKERR(KSPSetFromOptions(tao.ksp)) return FunctionEnd() cdef PetscErrorCode TaoView_Python( PetscTAO tao, PetscViewer vwr, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoView_Python") viewcontext(PyTao(tao), vwr) cdef view = PyTao(tao).view @@ -2827,10 +2699,8 @@ cdef PetscErrorCode TaoView_Python( cdef PetscErrorCode TaoSolve_Python( PetscTAO tao, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoSolve_Python") - # tao.niter = 0 tao.ksp_its = 0 tao.reason = TAO_CONTINUE_ITERATING @@ -2845,66 +2715,54 @@ cdef PetscErrorCode TaoSolve_Python( cdef PetscErrorCode TaoSolve_Python_default( PetscTAO tao, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoSolve_Python_default") - # cdef PetscVec X = NULL, G = NULL, S = NULL - CHKERR( TaoGetVecs(tao, &X, &G, &S) ) + CHKERR(TaoGetVecs(tao, &X, &G, &S)) # cdef PetscReal f = 0.0 cdef PetscReal gnorm = 0.0 cdef PetscReal step = 1.0 # if G != NULL: - CHKERR( TaoComputeObjectiveAndGradient(tao, X, &f, G) ) - CHKERR( VecNorm(G, PETSC_NORM_2, &gnorm) ) + CHKERR(TaoComputeObjectiveAndGradient(tao, X, &f, G)) + CHKERR(VecNorm(G, PETSC_NORM_2, &gnorm)) else: - CHKERR( TaoComputeObjective(tao, X, &f) ) - CHKERR( TaoCheckReals(tao, f, gnorm) ) + CHKERR(TaoComputeObjective(tao, X, &f)) + CHKERR(TaoCheckReals(tao, f, gnorm)) - CHKERR( TaoLogConvergenceHistory(tao, f, gnorm, 0.0, tao.ksp_its) ) - CHKERR( TaoMonitor(tao, tao.niter, f, gnorm, 0.0, step) ) - CHKERR( TaoConverged(tao, &tao.reason) ) + CHKERR(TaoLogConvergenceHistory(tao, f, gnorm, 0.0, tao.ksp_its)) + CHKERR(TaoMonitor(tao, tao.niter, f, gnorm, 0.0, step)) + CHKERR(TaoConverged(tao, &tao.reason)) - cdef PetscObjectState ostate = -1 - cdef PetscObjectState nstate = -1 - cdef PetscInt its = 0 cdef PetscTAOLineSearchConvergedReason lsr = TAOLINESEARCH_SUCCESS for its from 0 <= its < tao.max_it: + its # unused if tao.reason: break - CHKERR( PetscObjectStateGet(X, &ostate) ) - CHKERR( TaoComputeUpdate(tao) ) + CHKERR(TaoComputeUpdate(tao, &f)) TaoPreStep_Python(tao) - CHKERR( PetscObjectStateGet(X, &nstate) ) - if ostate != nstate: - if G != NULL: - CHKERR( TaoComputeObjectiveAndGradient(tao, X, &f, G) ) - CHKERR( VecNorm(G, PETSC_NORM_2, &gnorm) ) - else: - CHKERR( TaoComputeObjective(tao, X, &f) ) # tao.ksp_its = 0 TaoStep_Python(tao, X, G, S) - CHKERR( KSPGetIterationNumber(tao.ksp, &tao.ksp_its) ) + CHKERR(KSPGetIterationNumber(tao.ksp, &tao.ksp_its)) tao.ksp_tot_its += tao.ksp_its # if G != NULL: - CHKERR( TaoApplyLineSearch(tao, &f, &step, &lsr) ) - CHKERR( VecNorm(G, PETSC_NORM_2, &gnorm) ) + CHKERR(TaoApplyLineSearch(tao, &f, &step, &lsr)) + CHKERR(VecNorm(G, PETSC_NORM_2, &gnorm)) if lsr < TAOLINESEARCH_CONTINUE_ITERATING: tao.reason = TAO_DIVERGED_LS_FAILURE else: - CHKERR( TaoComputeObjective(tao, X, &f) ) - CHKERR( TaoCheckReals(tao, f, gnorm) ) + CHKERR(TaoComputeObjective(tao, X, &f)) + CHKERR(TaoCheckReals(tao, f, gnorm)) tao.niter += 1 # TaoPostStep_Python(tao) - CHKERR( TaoLogConvergenceHistory(tao, f, gnorm, 0.0, tao.ksp_its) ) - CHKERR( TaoMonitor(tao, tao.niter, f, gnorm, 0.0, step) ) - CHKERR( TaoConverged(tao, &tao.reason) ) + CHKERR(TaoLogConvergenceHistory(tao, f, gnorm, 0.0, tao.ksp_its)) + CHKERR(TaoMonitor(tao, tao.niter, f, gnorm, 0.0, step)) + CHKERR(TaoConverged(tao, &tao.reason)) if tao.niter == tao.max_it: if tao.reason <= 0: @@ -2917,23 +2775,20 @@ cdef PetscErrorCode TaoStep_Python( PetscVec X, PetscVec G, PetscVec S, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoStep_Python") cdef step = PyTao(tao).step if step is not None: step(TAO_(tao), Vec_(X), Vec_(G) if G != NULL else None, Vec_(S) if S != NULL else None) else: - # TaoStep_Python_default(tao,X,G,S) - CHKERR( TaoComputeGradient(tao, X, S) ) - CHKERR( VecCopy(G, S) ) - CHKERR( VecScale(S, -1.0) ) + # TaoStep_Python_default(tao, X, G, S) + CHKERR(TaoComputeGradient(tao, X, G)) + CHKERR(VecAXPBY(S, -1.0, 0.0, G)) return FunctionEnd() cdef PetscErrorCode TaoPreStep_Python( PetscTAO tao, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoPreStep_Python") cdef preStep = PyTao(tao).preStep if preStep is not None: @@ -2942,8 +2797,7 @@ cdef PetscErrorCode TaoPreStep_Python( cdef PetscErrorCode TaoPostStep_Python( PetscTAO tao, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"TaoPostStep_Python") cdef postStep = PyTao(tao).postStep if postStep is not None: @@ -2955,15 +2809,14 @@ cdef PetscErrorCode TaoPostStep_Python( cdef PetscErrorCode PetscPythonMonitorSet_Python( PetscObject obj_p, const char *url_p, - ) \ - except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: FunctionBegin(b"PetscPythonMonitorSet_Python") assert obj_p != NULL assert url_p != NULL assert url_p[0] != 0 # cdef PetscClassId classid = 0 - CHKERR( PetscObjectGetClassId(obj_p, &classid) ) + CHKERR(PetscObjectGetClassId(obj_p, &classid)) cdef type klass = PyPetscType_Lookup(classid) cdef Object ob = klass() ob.obj[0] = newRef(obj_p) @@ -2974,7 +2827,7 @@ cdef PetscErrorCode PetscPythonMonitorSet_Python( else: path, names = url, 'monitor' module = load_module(path) - for attr in names.split(','): + for attr in names.split(', '): monitor = getattr(module, attr) if isinstance(monitor, type): monitor = monitor(ob) @@ -2986,34 +2839,34 @@ cdef PetscErrorCode PetscPythonMonitorSet_Python( cdef extern from * nogil: - ctypedef PetscErrorCode MatCreateFunction (PetscMat) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PCCreateFunction (PetscPC) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode KSPCreateFunction (PetscKSP) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode SNESCreateFunction (PetscSNES) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode TSCreateFunction (PetscTS) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode TaoCreateFunction (PetscTAO) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode MatCreateFunction (PetscMat) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PCCreateFunction (PetscPC) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode KSPCreateFunction (PetscKSP) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode SNESCreateFunction (PetscSNES) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode TSCreateFunction (PetscTS) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode TaoCreateFunction (PetscTAO) except PETSC_ERR_PYTHON - PetscErrorCode MatRegister (const char[],MatCreateFunction* ) - PetscErrorCode PCRegister (const char[],PCCreateFunction* ) - PetscErrorCode KSPRegister (const char[],KSPCreateFunction* ) - PetscErrorCode SNESRegister (const char[],SNESCreateFunction*) - PetscErrorCode TSRegister (const char[],TSCreateFunction* ) - PetscErrorCode TaoRegister (const char[],TaoCreateFunction* ) + PetscErrorCode MatRegister (const char[], MatCreateFunction*) + PetscErrorCode PCRegister (const char[], PCCreateFunction*) + PetscErrorCode KSPRegister (const char[], KSPCreateFunction*) + PetscErrorCode SNESRegister (const char[], SNESCreateFunction*) + PetscErrorCode TSRegister (const char[], TSCreateFunction*) + PetscErrorCode TaoRegister (const char[], TaoCreateFunction*) - PetscErrorCode (*PetscPythonMonitorSet_C) \ - (PetscObject, const char[]) except PETSC_ERR_PYTHON + PetscErrorCode (*PetscPythonMonitorSet_C) \ + (PetscObject, const char[]) except PETSC_ERR_PYTHON cdef public PetscErrorCode PetscPythonRegisterAll() except PETSC_ERR_PYTHON: FunctionBegin(b"PetscPythonRegisterAll") # Python subtypes - CHKERR( MatRegister ( MATPYTHON, MatCreate_Python ) ) - CHKERR( PCRegister ( PCPYTHON, PCCreate_Python ) ) - CHKERR( KSPRegister ( KSPPYTHON, KSPCreate_Python ) ) - CHKERR( SNESRegister( SNESPYTHON, SNESCreate_Python ) ) - CHKERR( TSRegister ( TSPYTHON, TSCreate_Python ) ) - CHKERR( TaoRegister ( TAOPYTHON, TaoCreate_Python ) ) + CHKERR(MatRegister(MATPYTHON, MatCreate_Python)) + CHKERR(PCRegister(PCPYTHON, PCCreate_Python)) + CHKERR(KSPRegister(KSPPYTHON, KSPCreate_Python)) + CHKERR(SNESRegister(SNESPYTHON, SNESCreate_Python)) + CHKERR(TSRegister(TSPYTHON, TSCreate_Python)) + CHKERR(TaoRegister(TAOPYTHON, TaoCreate_Python)) # Python monitors global PetscPythonMonitorSet_C diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscao.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscao.pxi index 5abe543cc0c..4394111f4af 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscao.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscao.pxi @@ -6,17 +6,17 @@ cdef extern from * nogil: PetscAOType AOMAPPING PetscAOType AOMEMORYSCALABLE - PetscErrorCode AOView(PetscAO,PetscViewer) + PetscErrorCode AOView(PetscAO, PetscViewer) PetscErrorCode AODestroy(PetscAO*) - PetscErrorCode AOCreateBasic(MPI_Comm,PetscInt,const PetscInt[],const PetscInt[],PetscAO*) - PetscErrorCode AOCreateBasicIS(PetscIS,PetscIS,PetscAO*) - PetscErrorCode AOCreateMemoryScalable(MPI_Comm,PetscInt,const PetscInt[],const PetscInt[],PetscAO*) - PetscErrorCode AOCreateMemoryScalableIS(PetscIS,PetscIS,PetscAO*) - PetscErrorCode AOCreateMapping(MPI_Comm,PetscInt,const PetscInt[],const PetscInt[],PetscAO*) - PetscErrorCode AOCreateMappingIS(PetscIS,PetscIS,PetscAO*) - PetscErrorCode AOGetType(PetscAO,PetscAOType*) + PetscErrorCode AOCreateBasic(MPI_Comm, PetscInt, const PetscInt[], const PetscInt[], PetscAO*) + PetscErrorCode AOCreateBasicIS(PetscIS, PetscIS, PetscAO*) + PetscErrorCode AOCreateMemoryScalable(MPI_Comm, PetscInt, const PetscInt[], const PetscInt[], PetscAO*) + PetscErrorCode AOCreateMemoryScalableIS(PetscIS, PetscIS, PetscAO*) + PetscErrorCode AOCreateMapping(MPI_Comm, PetscInt, const PetscInt[], const PetscInt[], PetscAO*) + PetscErrorCode AOCreateMappingIS(PetscIS, PetscIS, PetscAO*) + PetscErrorCode AOGetType(PetscAO, PetscAOType*) - PetscErrorCode AOApplicationToPetsc(PetscAO,PetscInt,PetscInt[]) - PetscErrorCode AOApplicationToPetscIS(PetscAO,PetscIS) - PetscErrorCode AOPetscToApplication(PetscAO,PetscInt,PetscInt[]) - PetscErrorCode AOPetscToApplicationIS(PetscAO,PetscIS) + PetscErrorCode AOApplicationToPetsc(PetscAO, PetscInt, PetscInt[]) + PetscErrorCode AOApplicationToPetscIS(PetscAO, PetscIS) + PetscErrorCode AOPetscToApplication(PetscAO, PetscInt, PetscInt[]) + PetscErrorCode AOPetscToApplicationIS(PetscAO, PetscIS) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdef.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdef.pxi index 722015999c8..9c19779779c 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdef.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdef.pxi @@ -57,4 +57,3 @@ cdef inline PetscScatterMode scattermode(object mode) \ if mode == 'reverse': return PETSC_SCATTER_REVERSE else: raise ValueError("unknown scatter mode: %s" % mode) return mode - diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdevice.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdevice.pxi index 8d51acb2a60..42a51f335aa 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdevice.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdevice.pxi @@ -64,47 +64,50 @@ cdef extern from * nogil: PetscErrorCode PetscDeviceContextGetCurrentContext(PetscDeviceContext *) PetscErrorCode PetscDeviceContextSetCurrentContext(PetscDeviceContext) +cdef extern from * nogil: # custom.h + PetscErrorCode PetscDeviceReference(PetscDevice) + cdef inline PetscDeviceType asDeviceType(object dtype) except (-1): - if isinstance(dtype, str): - dtype = dtype.upper() - try: - return getattr(Device.Type, dtype) - except AttributeError: - raise ValueError("unknown device type: %s" % dtype) - return dtype + if isinstance(dtype, str): + dtype = dtype.upper() + try: + return getattr(Device.Type, dtype) + except AttributeError: + raise ValueError("unknown device type: %s" % dtype) + return dtype cdef inline str toDeviceType(PetscDeviceType dtype): - try: - return Device.Type.__enum2str[dtype] - except KeyError: - raise NotImplementedError("unhandled PetscDeviceType %d" % dtype) + try: + return Device.Type.__enum2str[dtype] + except KeyError: + raise NotImplementedError("unhandled PetscDeviceType %d" % dtype) cdef inline PetscStreamType asStreamType(object stype) except (-1): - if isinstance(stype, str): - stype = stype.upper() - try: - return getattr(DeviceContext.StreamType, stype) - except AttributeError: - raise ValueError("unknown stream type: %s" % stype) - return stype + if isinstance(stype, str): + stype = stype.upper() + try: + return getattr(DeviceContext.StreamType, stype) + except AttributeError: + raise ValueError("unknown stream type: %s" % stype) + return stype cdef inline str toStreamType(PetscStreamType stype): - try: - return DeviceContext.StreamType.__enum2str[stype] - except KeyError: - raise NotImplementedError("unhandled PetscStreamType %d" % stype) + try: + return DeviceContext.StreamType.__enum2str[stype] + except KeyError: + raise NotImplementedError("unhandled PetscStreamType %d" % stype) cdef inline PetscDeviceContextJoinMode asJoinMode(object jmode) except (-1): - if isinstance(jmode, str): - jmode = jmode.upper() - try: - return getattr(DeviceContext.JoinMode, jmode) - except AttributeError: - raise ValueError("unknown join mode: %s" % jmode) - return jmode + if isinstance(jmode, str): + jmode = jmode.upper() + try: + return getattr(DeviceContext.JoinMode, jmode) + except AttributeError: + raise ValueError("unknown join mode: %s" % jmode) + return jmode cdef inline str toJoinMode(PetscDeviceContextJoinMode jmode): - try: - return DeviceContext.JoinMode.__enum2str[jmode] - except KeyError: - raise NotImplementedError("unhandled PetscDeviceContextJoinMode %d" % jmode) + try: + return DeviceContext.JoinMode.__enum2str[jmode] + except KeyError: + raise NotImplementedError("unhandled PetscDeviceContextJoinMode %d" % jmode) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdm.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdm.pxi index b9e5b2a80ef..b80627081a3 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdm.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdm.pxi @@ -52,156 +52,154 @@ cdef extern from * nogil: DM_REORDER_DEFAULT_TRUE ctypedef PetscErrorCode (*PetscDMCoarsenHook)(PetscDM, - PetscDM, - void*) except PETSC_ERR_PYTHON + PetscDM, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMRestrictHook)(PetscDM, - PetscMat, - PetscVec, - PetscMat, - PetscDM, - void*) except PETSC_ERR_PYTHON - - PetscErrorCode DMCreate(MPI_Comm,PetscDM*) - PetscErrorCode DMClone(PetscDM,PetscDM*) + PetscMat, + PetscVec, + PetscMat, + PetscDM, + void*) except PETSC_ERR_PYTHON + + PetscErrorCode DMCreate(MPI_Comm, PetscDM*) + PetscErrorCode DMClone(PetscDM, PetscDM*) PetscErrorCode DMDestroy(PetscDM*) - PetscErrorCode DMView(PetscDM,PetscViewer) - PetscErrorCode DMLoad(PetscDM,PetscViewer) - PetscErrorCode DMSetType(PetscDM,PetscDMType) - PetscErrorCode DMGetType(PetscDM,PetscDMType*) - PetscErrorCode DMGetDimension(PetscDM,PetscInt*) - PetscErrorCode DMSetDimension(PetscDM,PetscInt) - PetscErrorCode DMSetOptionsPrefix(PetscDM,char[]) - PetscErrorCode DMGetOptionsPrefix(PetscDM,char*[]) - PetscErrorCode DMAppendOptionsPrefix(PetscDM,char[]) + PetscErrorCode DMView(PetscDM, PetscViewer) + PetscErrorCode DMLoad(PetscDM, PetscViewer) + PetscErrorCode DMSetType(PetscDM, PetscDMType) + PetscErrorCode DMGetType(PetscDM, PetscDMType*) + PetscErrorCode DMGetDimension(PetscDM, PetscInt*) + PetscErrorCode DMSetDimension(PetscDM, PetscInt) + PetscErrorCode DMSetOptionsPrefix(PetscDM, char[]) + PetscErrorCode DMGetOptionsPrefix(PetscDM, char*[]) + PetscErrorCode DMAppendOptionsPrefix(PetscDM, char[]) PetscErrorCode DMSetFromOptions(PetscDM) - PetscErrorCode DMViewFromOptions(PetscDM,PetscObject,char[]) PetscErrorCode DMSetUp(PetscDM) - PetscErrorCode DMGetAdjacency(PetscDM,PetscInt,PetscBool*,PetscBool*) - PetscErrorCode DMSetAdjacency(PetscDM,PetscInt,PetscBool,PetscBool) - PetscErrorCode DMGetBasicAdjacency(PetscDM,PetscBool*,PetscBool*) - PetscErrorCode DMSetBasicAdjacency(PetscDM,PetscBool,PetscBool) + PetscErrorCode DMGetAdjacency(PetscDM, PetscInt, PetscBool*, PetscBool*) + PetscErrorCode DMSetAdjacency(PetscDM, PetscInt, PetscBool, PetscBool) + PetscErrorCode DMGetBasicAdjacency(PetscDM, PetscBool*, PetscBool*) + PetscErrorCode DMSetBasicAdjacency(PetscDM, PetscBool, PetscBool) - PetscErrorCode DMSetNumFields(PetscDM,PetscInt) - PetscErrorCode DMGetNumFields(PetscDM,PetscInt*) - PetscErrorCode DMSetField(PetscDM,PetscInt,PetscDMLabel,PetscObject) - PetscErrorCode DMAddField(PetscDM,PetscDMLabel,PetscObject) - PetscErrorCode DMGetField(PetscDM,PetscInt,PetscDMLabel*,PetscObject*) + PetscErrorCode DMSetNumFields(PetscDM, PetscInt) + PetscErrorCode DMGetNumFields(PetscDM, PetscInt*) + PetscErrorCode DMSetField(PetscDM, PetscInt, PetscDMLabel, PetscObject) + PetscErrorCode DMAddField(PetscDM, PetscDMLabel, PetscObject) + PetscErrorCode DMGetField(PetscDM, PetscInt, PetscDMLabel*, PetscObject*) PetscErrorCode DMClearFields(PetscDM) - PetscErrorCode DMCopyFields(PetscDM,PetscDM) + PetscErrorCode DMCopyFields(PetscDM, PetscDM) PetscErrorCode DMCreateDS(PetscDM) PetscErrorCode DMClearDS(PetscDM) - PetscErrorCode DMGetDS(PetscDM,PetscDS*) - PetscErrorCode DMCopyDS(PetscDM,PetscDM) - PetscErrorCode DMCopyDisc(PetscDM,PetscDM) - - PetscErrorCode DMGetBlockSize(PetscDM,PetscInt*) - PetscErrorCode DMSetVecType(PetscDM,PetscVecType) - PetscErrorCode DMCreateLocalVector(PetscDM,PetscVec*) - PetscErrorCode DMCreateGlobalVector(PetscDM,PetscVec*) - PetscErrorCode DMGetLocalVector(PetscDM,PetscVec*) - PetscErrorCode DMRestoreLocalVector(PetscDM,PetscVec*) - PetscErrorCode DMGetGlobalVector(PetscDM,PetscVec*) - PetscErrorCode DMRestoreGlobalVector(PetscDM,PetscVec*) - PetscErrorCode DMSetMatType(PetscDM,PetscMatType) - PetscErrorCode DMCreateMatrix(PetscDM,PetscMat*) - PetscErrorCode DMCreateMassMatrix(PetscDM,PetscDM,PetscMat*) - - PetscErrorCode DMGetCoordinateDM(PetscDM,PetscDM*) - PetscErrorCode DMGetCoordinateSection(PetscDM,PetscSection*) - PetscErrorCode DMSetCoordinates(PetscDM,PetscVec) - PetscErrorCode DMGetCoordinates(PetscDM,PetscVec*) - PetscErrorCode DMSetCoordinatesLocal(PetscDM,PetscVec) - PetscErrorCode DMGetCoordinatesLocal(PetscDM,PetscVec*) - PetscErrorCode DMGetCoordinateDim(PetscDM,PetscInt*) - PetscErrorCode DMSetCoordinateDim(PetscDM,PetscInt) + PetscErrorCode DMGetDS(PetscDM, PetscDS*) + PetscErrorCode DMCopyDS(PetscDM, PetscDM) + PetscErrorCode DMCopyDisc(PetscDM, PetscDM) + + PetscErrorCode DMGetBlockSize(PetscDM, PetscInt*) + PetscErrorCode DMSetVecType(PetscDM, PetscVecType) + PetscErrorCode DMCreateLocalVector(PetscDM, PetscVec*) + PetscErrorCode DMCreateGlobalVector(PetscDM, PetscVec*) + PetscErrorCode DMGetLocalVector(PetscDM, PetscVec*) + PetscErrorCode DMRestoreLocalVector(PetscDM, PetscVec*) + PetscErrorCode DMGetGlobalVector(PetscDM, PetscVec*) + PetscErrorCode DMRestoreGlobalVector(PetscDM, PetscVec*) + PetscErrorCode DMSetMatType(PetscDM, PetscMatType) + PetscErrorCode DMCreateMatrix(PetscDM, PetscMat*) + PetscErrorCode DMCreateMassMatrix(PetscDM, PetscDM, PetscMat*) + + PetscErrorCode DMGetCoordinateDM(PetscDM, PetscDM*) + PetscErrorCode DMGetCoordinateSection(PetscDM, PetscSection*) + PetscErrorCode DMSetCoordinates(PetscDM, PetscVec) + PetscErrorCode DMGetCoordinates(PetscDM, PetscVec*) + PetscErrorCode DMSetCoordinatesLocal(PetscDM, PetscVec) + PetscErrorCode DMGetCoordinatesLocal(PetscDM, PetscVec*) + PetscErrorCode DMGetCoordinateDim(PetscDM, PetscInt*) + PetscErrorCode DMSetCoordinateDim(PetscDM, PetscInt) PetscErrorCode DMLocalizeCoordinates(PetscDM) - PetscErrorCode DMSetCoordinateDisc(PetscDM,PetscFE,PetscBool) - PetscErrorCode DMSetCellCoordinateDM(PetscDM,PetscDM) - PetscErrorCode DMGetCellCoordinateDM(PetscDM,PetscDM*) - PetscErrorCode DMSetCellCoordinateSection(PetscDM,PetscInt,PetscSection) - PetscErrorCode DMGetCellCoordinateSection(PetscDM,PetscSection*) - PetscErrorCode DMSetCellCoordinates(PetscDM,PetscVec) - PetscErrorCode DMGetCellCoordinates(PetscDM,PetscVec*) - PetscErrorCode DMSetCellCoordinatesLocal(PetscDM,PetscVec) - PetscErrorCode DMGetCellCoordinatesLocal(PetscDM,PetscVec*) - PetscErrorCode DMGetCoordinatesLocalized(PetscDM,PetscBool*) - - PetscErrorCode DMCreateInterpolation(PetscDM,PetscDM,PetscMat*,PetscVec*) - PetscErrorCode DMCreateInjection(PetscDM,PetscDM,PetscMat*) - PetscErrorCode DMCreateRestriction(PetscDM,PetscDM,PetscMat*) - - PetscErrorCode DMConvert(PetscDM,PetscDMType,PetscDM*) - PetscErrorCode DMRefine(PetscDM,MPI_Comm,PetscDM*) - PetscErrorCode DMCoarsen(PetscDM,MPI_Comm,PetscDM*) - PetscErrorCode DMRefineHierarchy(PetscDM,PetscInt,PetscDM[]) - PetscErrorCode DMCoarsenHierarchy(PetscDM,PetscInt,PetscDM[]) - PetscErrorCode DMGetRefineLevel(PetscDM,PetscInt*) - PetscErrorCode DMSetRefineLevel(PetscDM,PetscInt) - PetscErrorCode DMGetCoarsenLevel(PetscDM,PetscInt*) - PetscErrorCode DMGetCoarseDM(PetscDM,PetscDM*) - PetscErrorCode DMSetCoarseDM(PetscDM,PetscDM) - - PetscErrorCode DMAdaptLabel(PetscDM,PetscDMLabel,PetscDM*) - PetscErrorCode DMAdaptMetric(PetscDM,PetscVec,PetscDMLabel,PetscDMLabel,PetscDM*) - - PetscErrorCode DMGlobalToLocalBegin(PetscDM,PetscVec,PetscInsertMode,PetscVec) - PetscErrorCode DMGlobalToLocalEnd(PetscDM,PetscVec,PetscInsertMode,PetscVec) - PetscErrorCode DMLocalToGlobalBegin(PetscDM,PetscVec,PetscInsertMode,PetscVec) - PetscErrorCode DMLocalToGlobalEnd(PetscDM,PetscVec,PetscInsertMode,PetscVec) - PetscErrorCode DMLocalToLocalBegin(PetscDM,PetscVec,PetscInsertMode,PetscVec) - PetscErrorCode DMLocalToLocalEnd(PetscDM,PetscVec,PetscInsertMode,PetscVec) - - PetscErrorCode DMGetLocalToGlobalMapping(PetscDM,PetscLGMap*) - - PetscErrorCode DMSetSection(PetscDM,PetscSection) - PetscErrorCode DMGetSection(PetscDM,PetscSection*) - PetscErrorCode DMSetLocalSection(PetscDM,PetscSection) - PetscErrorCode DMGetLocalSection(PetscDM,PetscSection*) - PetscErrorCode DMSetGlobalSection(PetscDM,PetscSection) - PetscErrorCode DMGetGlobalSection(PetscDM,PetscSection*) - PetscErrorCode DMCreateSectionSF(PetscDM,PetscSection,PetscSection) - PetscErrorCode DMGetSectionSF(PetscDM,PetscSF*) - PetscErrorCode DMSetSectionSF(PetscDM,PetscSF) - PetscErrorCode DMGetPointSF(PetscDM,PetscSF*) - PetscErrorCode DMSetPointSF(PetscDM,PetscSF) + PetscErrorCode DMSetCoordinateDisc(PetscDM, PetscFE, PetscBool) + PetscErrorCode DMSetCellCoordinateDM(PetscDM, PetscDM) + PetscErrorCode DMGetCellCoordinateDM(PetscDM, PetscDM*) + PetscErrorCode DMSetCellCoordinateSection(PetscDM, PetscInt, PetscSection) + PetscErrorCode DMGetCellCoordinateSection(PetscDM, PetscSection*) + PetscErrorCode DMSetCellCoordinates(PetscDM, PetscVec) + PetscErrorCode DMGetCellCoordinates(PetscDM, PetscVec*) + PetscErrorCode DMSetCellCoordinatesLocal(PetscDM, PetscVec) + PetscErrorCode DMGetCellCoordinatesLocal(PetscDM, PetscVec*) + PetscErrorCode DMGetCoordinatesLocalized(PetscDM, PetscBool*) + + PetscErrorCode DMCreateInterpolation(PetscDM, PetscDM, PetscMat*, PetscVec*) + PetscErrorCode DMCreateInjection(PetscDM, PetscDM, PetscMat*) + PetscErrorCode DMCreateRestriction(PetscDM, PetscDM, PetscMat*) + + PetscErrorCode DMConvert(PetscDM, PetscDMType, PetscDM*) + PetscErrorCode DMRefine(PetscDM, MPI_Comm, PetscDM*) + PetscErrorCode DMCoarsen(PetscDM, MPI_Comm, PetscDM*) + PetscErrorCode DMRefineHierarchy(PetscDM, PetscInt, PetscDM[]) + PetscErrorCode DMCoarsenHierarchy(PetscDM, PetscInt, PetscDM[]) + PetscErrorCode DMGetRefineLevel(PetscDM, PetscInt*) + PetscErrorCode DMSetRefineLevel(PetscDM, PetscInt) + PetscErrorCode DMGetCoarsenLevel(PetscDM, PetscInt*) + PetscErrorCode DMGetCoarseDM(PetscDM, PetscDM*) + PetscErrorCode DMSetCoarseDM(PetscDM, PetscDM) + + PetscErrorCode DMAdaptLabel(PetscDM, PetscDMLabel, PetscDM*) + PetscErrorCode DMAdaptMetric(PetscDM, PetscVec, PetscDMLabel, PetscDMLabel, PetscDM*) + + PetscErrorCode DMGlobalToLocalBegin(PetscDM, PetscVec, PetscInsertMode, PetscVec) + PetscErrorCode DMGlobalToLocalEnd(PetscDM, PetscVec, PetscInsertMode, PetscVec) + PetscErrorCode DMLocalToGlobalBegin(PetscDM, PetscVec, PetscInsertMode, PetscVec) + PetscErrorCode DMLocalToGlobalEnd(PetscDM, PetscVec, PetscInsertMode, PetscVec) + PetscErrorCode DMLocalToLocalBegin(PetscDM, PetscVec, PetscInsertMode, PetscVec) + PetscErrorCode DMLocalToLocalEnd(PetscDM, PetscVec, PetscInsertMode, PetscVec) + + PetscErrorCode DMGetLocalToGlobalMapping(PetscDM, PetscLGMap*) + + PetscErrorCode DMSetSection(PetscDM, PetscSection) + PetscErrorCode DMGetSection(PetscDM, PetscSection*) + PetscErrorCode DMSetLocalSection(PetscDM, PetscSection) + PetscErrorCode DMGetLocalSection(PetscDM, PetscSection*) + PetscErrorCode DMSetGlobalSection(PetscDM, PetscSection) + PetscErrorCode DMGetGlobalSection(PetscDM, PetscSection*) + PetscErrorCode DMCreateSectionSF(PetscDM, PetscSection, PetscSection) + PetscErrorCode DMGetSectionSF(PetscDM, PetscSF*) + PetscErrorCode DMSetSectionSF(PetscDM, PetscSF) + PetscErrorCode DMGetPointSF(PetscDM, PetscSF*) + PetscErrorCode DMSetPointSF(PetscDM, PetscSF) PetscErrorCode DMCreateSubDM(PetscDM, PetscInt, const PetscInt[], PetscIS*, PetscDM*) PetscErrorCode DMSetAuxiliaryVec(PetscDM, PetscDMLabel, PetscInt, PetscInt, PetscVec) PetscErrorCode DMGetAuxiliaryVec(PetscDM, PetscDMLabel, PetscInt, PetscInt, PetscVec*) - PetscErrorCode DMCreateLabel(PetscDM,const char[]) - PetscErrorCode DMGetLabelValue(PetscDM,const char[],PetscInt,PetscInt*) - PetscErrorCode DMSetLabelValue(PetscDM,const char[],PetscInt,PetscInt) - PetscErrorCode DMHasLabel(PetscDM,const char[],PetscBool*) - PetscErrorCode DMClearLabelValue(PetscDM,const char[],PetscInt,PetscInt) - PetscErrorCode DMGetLabelSize(PetscDM,const char[],PetscInt*) - PetscErrorCode DMGetLabelIdIS(PetscDM,const char[],PetscIS*) - PetscErrorCode DMGetStratumSize(PetscDM,const char[],PetscInt,PetscInt*) - PetscErrorCode DMGetStratumIS(PetscDM,const char[],PetscInt,PetscIS*) - PetscErrorCode DMClearLabelStratum(PetscDM,const char[],PetscInt) - PetscErrorCode DMSetLabelOutput(PetscDM,const char[],PetscBool) - PetscErrorCode DMGetLabelOutput(PetscDM,const char[],PetscBool*) - PetscErrorCode DMGetNumLabels(PetscDM,PetscInt*) - PetscErrorCode DMGetLabelName(PetscDM,PetscInt,const char**) - PetscErrorCode DMHasLabel(PetscDM,const char[],PetscBool*) - PetscErrorCode DMGetLabel(PetscDM,const char*,PetscDMLabel*) - PetscErrorCode DMAddLabel(PetscDM,PetscDMLabel) - PetscErrorCode DMRemoveLabel(PetscDM,const char[],PetscDMLabel*) + PetscErrorCode DMCreateLabel(PetscDM, const char[]) + PetscErrorCode DMGetLabelValue(PetscDM, const char[], PetscInt, PetscInt*) + PetscErrorCode DMSetLabelValue(PetscDM, const char[], PetscInt, PetscInt) + PetscErrorCode DMHasLabel(PetscDM, const char[], PetscBool*) + PetscErrorCode DMClearLabelValue(PetscDM, const char[], PetscInt, PetscInt) + PetscErrorCode DMGetLabelSize(PetscDM, const char[], PetscInt*) + PetscErrorCode DMGetLabelIdIS(PetscDM, const char[], PetscIS*) + PetscErrorCode DMGetStratumSize(PetscDM, const char[], PetscInt, PetscInt*) + PetscErrorCode DMGetStratumIS(PetscDM, const char[], PetscInt, PetscIS*) + PetscErrorCode DMClearLabelStratum(PetscDM, const char[], PetscInt) + PetscErrorCode DMSetLabelOutput(PetscDM, const char[], PetscBool) + PetscErrorCode DMGetLabelOutput(PetscDM, const char[], PetscBool*) + PetscErrorCode DMGetNumLabels(PetscDM, PetscInt*) + PetscErrorCode DMGetLabelName(PetscDM, PetscInt, const char**) + PetscErrorCode DMHasLabel(PetscDM, const char[], PetscBool*) + PetscErrorCode DMGetLabel(PetscDM, const char*, PetscDMLabel*) + PetscErrorCode DMAddLabel(PetscDM, PetscDMLabel) + PetscErrorCode DMRemoveLabel(PetscDM, const char[], PetscDMLabel*) PetscErrorCode DMLabelDestroy(PetscDMLabel *) - #int DMCopyLabels(PetscDM,PetscDM) - PetscErrorCode DMShellSetGlobalVector(PetscDM,PetscVec) - PetscErrorCode DMShellSetLocalVector(PetscDM,PetscVec) + PetscErrorCode DMShellSetGlobalVector(PetscDM, PetscVec) + PetscErrorCode DMShellSetLocalVector(PetscDM, PetscVec) - PetscErrorCode DMKSPSetComputeOperators(PetscDM,PetscKSPComputeOpsFunction,void*) + PetscErrorCode DMKSPSetComputeOperators(PetscDM, PetscKSPComputeOpsFunction, void*) - PetscErrorCode DMCreateFieldDecomposition(PetscDM,PetscInt*,char***,PetscIS**,PetscDM**) + PetscErrorCode DMCreateFieldDecomposition(PetscDM, PetscInt*, char***, PetscIS**, PetscDM**) - PetscErrorCode DMSNESSetFunction(PetscDM,PetscSNESFunctionFunction,void*) - PetscErrorCode DMSNESSetJacobian(PetscDM,PetscSNESJacobianFunction,void*) + PetscErrorCode DMSNESSetFunction(PetscDM, PetscSNESFunctionFunction, void*) + PetscErrorCode DMSNESSetJacobian(PetscDM, PetscSNESJacobianFunction, void*) - PetscErrorCode DMCoarsenHookAdd(PetscDM,PetscDMCoarsenHook,PetscDMRestrictHook,void*) + PetscErrorCode DMCoarsenHookAdd(PetscDM, PetscDMCoarsenHook, PetscDMRestrictHook, void*) # -------------------------------------------------------------------- @@ -234,9 +232,9 @@ cdef inline PetscInt asBoundary(object boundary, PetscDMBoundaryType *_z) except -1: cdef PetscInt dim = 0 cdef object x=None, y=None, z=None - if (boundary is None or - isinstance(boundary, str) or - isinstance(boundary, int)): + if boundary is None or \ + isinstance(boundary, str) or \ + isinstance(boundary, int): _x[0] = _y[0] = _z[0] = asBoundaryType(boundary) else: _x[0] = _y[0] = _z[0] = DM_BOUNDARY_NONE @@ -265,7 +263,7 @@ cdef inline object toBoundary(PetscInt dim, cdef inline DM ref_DM(PetscDM dm): cdef DM ob = DM() ob.dm = dm - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob # -------------------------------------------------------------------- @@ -273,8 +271,8 @@ cdef inline DM ref_DM(PetscDM dm): cdef PetscErrorCode DM_PyCoarsenHook( PetscDM fine, PetscDM coarse, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef DM Fine = ref_DM(fine) cdef DM Coarse = ref_DM(coarse) @@ -286,13 +284,13 @@ cdef PetscErrorCode DM_PyCoarsenHook( return PETSC_SUCCESS cdef PetscErrorCode DM_PyRestrictHook( - PetscDM fine, + PetscDM fine, PetscMat mrestrict, PetscVec rscale, PetscMat inject, - PetscDM coarse, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + PetscDM coarse, + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef DM Fine = ref_DM(fine) cdef Mat Mrestrict = ref_Mat(mrestrict) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmcomposite.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmcomposite.pxi index 97a2014ade7..2be535cae43 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmcomposite.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmcomposite.pxi @@ -2,17 +2,17 @@ cdef extern from * nogil: - PetscErrorCode DMCompositeCreate(MPI_Comm,PetscDM*) - PetscErrorCode DMCompositeAddDM(PetscDM,PetscDM) - PetscErrorCode DMCompositeGetNumberDM(PetscDM,PetscInt*) - PetscErrorCode DMCompositeScatterArray(PetscDM,PetscVec,PetscVec*) - PetscErrorCode DMCompositeGatherArray(PetscDM,PetscInsertMode,PetscVec,PetscVec*) - PetscErrorCode DMCompositeGetEntriesArray(PetscDM,PetscDM*) - PetscErrorCode DMCompositeGetAccessArray(PetscDM,PetscVec,PetscInt,const PetscInt*,PetscVec*) - PetscErrorCode DMCompositeRestoreAccessArray(PetscDM,PetscVec,PetscInt,const PetscInt*,PetscVec*) - PetscErrorCode DMCompositeGetGlobalISs(PetscDM,PetscIS**) - PetscErrorCode DMCompositeGetLocalISs(PetscDM,PetscIS**) - PetscErrorCode DMCompositeGetISLocalToGlobalMappings(PetscDM,PetscLGMap**) + PetscErrorCode DMCompositeCreate(MPI_Comm, PetscDM*) + PetscErrorCode DMCompositeAddDM(PetscDM, PetscDM) + PetscErrorCode DMCompositeGetNumberDM(PetscDM, PetscInt*) + PetscErrorCode DMCompositeScatterArray(PetscDM, PetscVec, PetscVec*) + PetscErrorCode DMCompositeGatherArray(PetscDM, PetscInsertMode, PetscVec, PetscVec*) + PetscErrorCode DMCompositeGetEntriesArray(PetscDM, PetscDM*) + PetscErrorCode DMCompositeGetAccessArray(PetscDM, PetscVec, PetscInt, const PetscInt*, PetscVec*) + PetscErrorCode DMCompositeRestoreAccessArray(PetscDM, PetscVec, PetscInt, const PetscInt*, PetscVec*) + PetscErrorCode DMCompositeGetGlobalISs(PetscDM, PetscIS**) + PetscErrorCode DMCompositeGetLocalISs(PetscDM, PetscIS**) + PetscErrorCode DMCompositeGetISLocalToGlobalMappings(PetscDM, PetscLGMap**) cdef class _DMComposite_access: cdef PetscDM dm @@ -26,28 +26,28 @@ cdef class _DMComposite_access: def __cinit__(self, DM dm, Vec gvec, locs=None): self.dm = dm.dm - CHKERR( PetscINCREF(&self.dm) ) + CHKERR(PetscINCREF(&self.dm)) self.gvec = gvec.vec - CHKERR( PetscINCREF(&self.gvec) ) + CHKERR(PetscINCREF(&self.gvec)) if locs is None: - CHKERR( DMCompositeGetNumberDM(self.dm, &self.nlocs) ) + CHKERR(DMCompositeGetNumberDM(self.dm, &self.nlocs)) locs = arange(0, self.nlocs, 1) self.locs_mem = iarray_i(locs, &self.nlocs, &self.locs) self.vecs_mem = oarray_p(empty_p(self.nlocs), NULL, &self.vecs) self.access = None def __dealloc__(self): - CHKERR( DMDestroy(&self.dm) ) - CHKERR( VecDestroy(&self.gvec) ) + CHKERR(DMDestroy(&self.dm)) + CHKERR(VecDestroy(&self.gvec)) def __enter__(self): - cdef Py_ssize_t i, n = self.nlocs - CHKERR( DMCompositeGetAccessArray(self.dm, self.gvec, self.nlocs, self.locs, self.vecs) ) + cdef Py_ssize_t n = self.nlocs + CHKERR(DMCompositeGetAccessArray(self.dm, self.gvec, self.nlocs, self.locs, self.vecs)) self.access = [ref_Vec(self.vecs[i]) for i from 0 <= i < n] return tuple(self.access) def __exit__(self, *exc): cdef Py_ssize_t i, n = self.nlocs for i from 0 <= i < n: (self.access[i]).vec = NULL - CHKERR( DMCompositeRestoreAccessArray(self.dm, self.gvec, self.nlocs, self.locs, self.vecs) ) + CHKERR(DMCompositeRestoreAccessArray(self.dm, self.gvec, self.nlocs, self.locs, self.vecs)) self.access = None diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmda.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmda.pxi index fff95d243b3..9af9303052e 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmda.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmda.pxi @@ -15,73 +15,73 @@ cdef extern from * nogil: DMDA_ELEMENT_Q1 PetscErrorCode DMDACreateND(MPI_Comm, - PetscInt,PetscInt, # dim, dof - PetscInt,PetscInt,PetscInt, # M, N, P - PetscInt,PetscInt,PetscInt, # m, n, p - PetscInt[],PetscInt[],PetscInt[], # lx, ly, lz - PetscDMBoundaryType, # bx - PetscDMBoundaryType, # by - PetscDMBoundaryType, # bz - PetscDMDAStencilType, # stencil type - PetscInt, # stencil width - PetscDM*) - - PetscErrorCode DMDASetDof(PetscDM,PetscInt) - PetscErrorCode DMDASetSizes(PetscDM,PetscInt,PetscInt,PetscInt) - PetscErrorCode DMDASetNumProcs(PetscDM,PetscInt,PetscInt,PetscInt) - PetscErrorCode DMDASetBoundaryType(PetscDM,PetscDMBoundaryType,PetscDMBoundaryType,PetscDMBoundaryType) - PetscErrorCode DMDASetStencilType(PetscDM,PetscDMDAStencilType) - PetscErrorCode DMDASetStencilWidth(PetscDM,PetscInt) + PetscInt, PetscInt, # dim, dof + PetscInt, PetscInt, PetscInt, # M, N, P + PetscInt, PetscInt, PetscInt, # m, n, p + PetscInt[], PetscInt[], PetscInt[], # lx, ly, lz + PetscDMBoundaryType, # bx + PetscDMBoundaryType, # by + PetscDMBoundaryType, # bz + PetscDMDAStencilType, # stencil type + PetscInt, # stencil width + PetscDM*) + + PetscErrorCode DMDASetDof(PetscDM, PetscInt) + PetscErrorCode DMDASetSizes(PetscDM, PetscInt, PetscInt, PetscInt) + PetscErrorCode DMDASetNumProcs(PetscDM, PetscInt, PetscInt, PetscInt) + PetscErrorCode DMDASetBoundaryType(PetscDM, PetscDMBoundaryType, PetscDMBoundaryType, PetscDMBoundaryType) + PetscErrorCode DMDASetStencilType(PetscDM, PetscDMDAStencilType) + PetscErrorCode DMDASetStencilWidth(PetscDM, PetscInt) PetscErrorCode DMDAGetInfo(PetscDM, - PetscInt*, - PetscInt*,PetscInt*,PetscInt*, - PetscInt*,PetscInt*,PetscInt*, - PetscInt*,PetscInt*, - PetscDMBoundaryType*, - PetscDMBoundaryType*, - PetscDMBoundaryType*, - PetscDMDAStencilType*) + PetscInt*, + PetscInt*, PetscInt*, PetscInt*, + PetscInt*, PetscInt*, PetscInt*, + PetscInt*, PetscInt*, + PetscDMBoundaryType*, + PetscDMBoundaryType*, + PetscDMBoundaryType*, + PetscDMDAStencilType*) PetscErrorCode DMDAGetCorners(PetscDM, - PetscInt*,PetscInt*,PetscInt*, - PetscInt*,PetscInt*,PetscInt*) + PetscInt*, PetscInt*, PetscInt*, + PetscInt*, PetscInt*, PetscInt*) PetscErrorCode DMDAGetGhostCorners(PetscDM, - PetscInt*,PetscInt*,PetscInt*, - PetscInt*,PetscInt*,PetscInt*) + PetscInt*, PetscInt*, PetscInt*, + PetscInt*, PetscInt*, PetscInt*) PetscErrorCode DMDAGetOwnershipRanges(PetscDM, - const PetscInt*[], - const PetscInt*[], - const PetscInt*[]) + const PetscInt*[], + const PetscInt*[], + const PetscInt*[]) PetscErrorCode DMDASetUniformCoordinates(PetscDM, - PetscReal,PetscReal, - PetscReal,PetscReal, - PetscReal,PetscReal) - PetscErrorCode DMGetBoundingBox(PetscDM,PetscReal[],PetscReal[]) - PetscErrorCode DMGetLocalBoundingBox(PetscDM,PetscReal[],PetscReal[]) - - PetscErrorCode DMDACreateNaturalVector(PetscDM,PetscVec*) - PetscErrorCode DMDAGlobalToNaturalBegin(PetscDM,PetscVec,PetscInsertMode,PetscVec) - PetscErrorCode DMDAGlobalToNaturalEnd(PetscDM,PetscVec,PetscInsertMode,PetscVec) - PetscErrorCode DMDANaturalToGlobalBegin(PetscDM,PetscVec,PetscInsertMode,PetscVec) - PetscErrorCode DMDANaturalToGlobalEnd(PetscDM,PetscVec,PetscInsertMode,PetscVec) - - PetscErrorCode DMDAGetAO(PetscDM,PetscAO*) - PetscErrorCode DMDAGetScatter(PetscDM,PetscScatter*,PetscScatter*) - - PetscErrorCode DMDASetRefinementFactor(PetscDM,PetscInt,PetscInt,PetscInt) - PetscErrorCode DMDAGetRefinementFactor(PetscDM,PetscInt*,PetscInt*,PetscInt*) - PetscErrorCode DMDASetInterpolationType(PetscDM,PetscDMDAInterpolationType) - PetscErrorCode DMDAGetInterpolationType(PetscDM,PetscDMDAInterpolationType*) - PetscErrorCode DMDASetElementType(PetscDM,PetscDMDAElementType) - PetscErrorCode DMDAGetElementType(PetscDM,PetscDMDAElementType*) - PetscErrorCode DMDAGetElements(PetscDM,PetscInt*,PetscInt*,const PetscInt**) - PetscErrorCode DMDARestoreElements(PetscDM,PetscInt*,PetscInt*,const PetscInt**) - - PetscErrorCode DMDASetFieldName(PetscDM,PetscInt,const char[]) - PetscErrorCode DMDAGetFieldName(PetscDM,PetscInt,const char*[]) - PetscErrorCode DMDASetCoordinateName(PetscDM,PetscInt,const char[]) - PetscErrorCode DMDAGetCoordinateName(PetscDM,PetscInt,const char*[]) + PetscReal, PetscReal, + PetscReal, PetscReal, + PetscReal, PetscReal) + PetscErrorCode DMGetBoundingBox(PetscDM, PetscReal[], PetscReal[]) + PetscErrorCode DMGetLocalBoundingBox(PetscDM, PetscReal[], PetscReal[]) + + PetscErrorCode DMDACreateNaturalVector(PetscDM, PetscVec*) + PetscErrorCode DMDAGlobalToNaturalBegin(PetscDM, PetscVec, PetscInsertMode, PetscVec) + PetscErrorCode DMDAGlobalToNaturalEnd(PetscDM, PetscVec, PetscInsertMode, PetscVec) + PetscErrorCode DMDANaturalToGlobalBegin(PetscDM, PetscVec, PetscInsertMode, PetscVec) + PetscErrorCode DMDANaturalToGlobalEnd(PetscDM, PetscVec, PetscInsertMode, PetscVec) + + PetscErrorCode DMDAGetAO(PetscDM, PetscAO*) + PetscErrorCode DMDAGetScatter(PetscDM, PetscScatter*, PetscScatter*) + + PetscErrorCode DMDASetRefinementFactor(PetscDM, PetscInt, PetscInt, PetscInt) + PetscErrorCode DMDAGetRefinementFactor(PetscDM, PetscInt*, PetscInt*, PetscInt*) + PetscErrorCode DMDASetInterpolationType(PetscDM, PetscDMDAInterpolationType) + PetscErrorCode DMDAGetInterpolationType(PetscDM, PetscDMDAInterpolationType*) + PetscErrorCode DMDASetElementType(PetscDM, PetscDMDAElementType) + PetscErrorCode DMDAGetElementType(PetscDM, PetscDMDAElementType*) + PetscErrorCode DMDAGetElements(PetscDM, PetscInt*, PetscInt*, const PetscInt**) + PetscErrorCode DMDARestoreElements(PetscDM, PetscInt*, PetscInt*, const PetscInt**) + + PetscErrorCode DMDASetFieldName(PetscDM, PetscInt, const char[]) + PetscErrorCode DMDAGetFieldName(PetscDM, PetscInt, const char*[]) + PetscErrorCode DMDASetCoordinateName(PetscDM, PetscInt, const char[]) + PetscErrorCode DMDAGetCoordinateName(PetscDM, PetscInt, const char*[]) # -------------------------------------------------------------------- @@ -114,12 +114,12 @@ cdef inline PetscDMDAElementType daelementtype(object etype) \ return etype cdef inline PetscErrorCode DMDAGetDim(PetscDM da, PetscInt *dim) noexcept nogil: - return DMDAGetInfo(da, dim, - NULL, NULL, NULL, - NULL, NULL, NULL, - NULL, NULL, - NULL, NULL, NULL, - NULL) + return DMDAGetInfo(da, dim, + NULL, NULL, NULL, + NULL, NULL, NULL, + NULL, NULL, + NULL, NULL, NULL, + NULL) cdef inline PetscInt asDims(dims, PetscInt *_M, @@ -177,7 +177,7 @@ cdef inline tuple asOwnershipRanges(object ownership_ranges, if p[0] == PETSC_DECIDE: p[0] = nlz elif p[0] != nlz: raise ValueError( "ownership range size %d and number or processors %d" % - (toInt(nlz), toInt(p[0]))) + (toInt(nlz), toInt(p[0]))) return tuple(ranges) cdef inline tuple toOwnershipRanges(PetscInt dim, @@ -205,22 +205,22 @@ cdef class _DMDA_Vec_array(object): def __cinit__(self, DMDA da, Vec vec, bint DOF=False): # cdef PetscInt dim=0, dof=0 - CHKERR( DMDAGetInfo(da.dm, - &dim, NULL, NULL, NULL, NULL, NULL, NULL, - &dof, NULL, NULL, NULL, NULL, NULL) ) + CHKERR(DMDAGetInfo(da.dm, + &dim, NULL, NULL, NULL, NULL, NULL, NULL, + &dof, NULL, NULL, NULL, NULL, NULL)) cdef PetscInt lxs=0, lys=0, lzs=0 cdef PetscInt lxm=0, lym=0, lzm=0 - CHKERR( DMDAGetCorners(da.dm, - &lxs, &lys, &lzs, - &lxm, &lym, &lzm) ) + CHKERR(DMDAGetCorners(da.dm, + &lxs, &lys, &lzs, + &lxm, &lym, &lzm)) cdef PetscInt gxs=0, gys=0, gzs=0 cdef PetscInt gxm=0, gym=0, gzm=0 - CHKERR( DMDAGetGhostCorners(da.dm, - &gxs, &gys, &gzs, - &gxm, &gym, &gzm) ) + CHKERR(DMDAGetGhostCorners(da.dm, + &gxs, &gys, &gzs, + &gxm, &gym, &gzm)) # cdef PetscInt n=0 - CHKERR( VecGetLocalSize(vec.vec, &n) ) + CHKERR(VecGetLocalSize(vec.vec, &n)) cdef PetscInt xs, ys, zs, xm, ym, zm if (n == lxm*lym*lzm*dof): xs, ys, zs = lxs, lys, lzs @@ -287,13 +287,13 @@ cdef class _DMDA_Vec_array(object): cdef object adjust_index_exp(object starts, object index): - if not isinstance(index, tuple): - return adjust_index(starts[0], index) - index = list(index) - for i, start in enumerate(starts): - index[i] = adjust_index(start, index[i]) - index = tuple(index) - return index + if not isinstance(index, tuple): + return adjust_index(starts[0], index) + index = list(index) + for i, start in enumerate(starts): + index[i] = adjust_index(start, index[i]) + index = tuple(index) + return index cdef object adjust_index(object lbound, object index): if index is None: diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmlabel.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmlabel.pxi index 8d6bb32eb90..74b2d0daf02 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmlabel.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmlabel.pxi @@ -2,40 +2,40 @@ cdef extern from* nogil: - PetscErrorCode DMLabelCreate(MPI_Comm,char[],PetscDMLabel*) - PetscErrorCode DMLabelView(PetscDMLabel,PetscViewer) + PetscErrorCode DMLabelCreate(MPI_Comm, char[], PetscDMLabel*) + PetscErrorCode DMLabelView(PetscDMLabel, PetscViewer) PetscErrorCode DMLabelReset(PetscDMLabel) PetscErrorCode DMLabelDestroy(PetscDMLabel*) - PetscErrorCode DMLabelGetDefaultValue(PetscDMLabel,PetscInt*) - PetscErrorCode DMLabelSetDefaultValue(PetscDMLabel,PetscInt) - PetscErrorCode DMLabelDuplicate(PetscDMLabel,PetscDMLabel*) - PetscErrorCode DMLabelGetValue(PetscDMLabel,PetscInt,PetscInt*) - PetscErrorCode DMLabelSetValue(PetscDMLabel,PetscInt,PetscInt) - PetscErrorCode DMLabelClearValue(PetscDMLabel,PetscInt,PetscInt) - PetscErrorCode DMLabelAddStratum(PetscDMLabel,PetscInt) - PetscErrorCode DMLabelAddStrata(PetscDMLabel,PetscInt,const PetscInt[]) - PetscErrorCode DMLabelAddStrataIS(PetscDMLabel,PetscIS) - PetscErrorCode DMLabelInsertIS(PetscDMLabel,PetscIS,PetscInt) - PetscErrorCode DMLabelGetNumValues(PetscDMLabel,PetscInt*) + PetscErrorCode DMLabelGetDefaultValue(PetscDMLabel, PetscInt*) + PetscErrorCode DMLabelSetDefaultValue(PetscDMLabel, PetscInt) + PetscErrorCode DMLabelDuplicate(PetscDMLabel, PetscDMLabel*) + PetscErrorCode DMLabelGetValue(PetscDMLabel, PetscInt, PetscInt*) + PetscErrorCode DMLabelSetValue(PetscDMLabel, PetscInt, PetscInt) + PetscErrorCode DMLabelClearValue(PetscDMLabel, PetscInt, PetscInt) + PetscErrorCode DMLabelAddStratum(PetscDMLabel, PetscInt) + PetscErrorCode DMLabelAddStrata(PetscDMLabel, PetscInt, const PetscInt[]) + PetscErrorCode DMLabelAddStrataIS(PetscDMLabel, PetscIS) + PetscErrorCode DMLabelInsertIS(PetscDMLabel, PetscIS, PetscInt) + PetscErrorCode DMLabelGetNumValues(PetscDMLabel, PetscInt*) - PetscErrorCode DMLabelGetStratumBounds(PetscDMLabel,PetscInt,PetscInt*,PetscInt*) - PetscErrorCode DMLabelGetValueIS(PetscDMLabel,PetscIS*) - PetscErrorCode DMLabelStratumHasPoint(PetscDMLabel,PetscInt,PetscInt,PetscBool*) - PetscErrorCode DMLabelHasStratum(PetscDMLabel,PetscInt,PetscBool*) - PetscErrorCode DMLabelGetStratumSize(PetscDMLabel,PetscInt,PetscInt*) - PetscErrorCode DMLabelGetStratumIS(PetscDMLabel,PetscInt,PetscIS*) - PetscErrorCode DMLabelSetStratumIS(PetscDMLabel,PetscInt,PetscIS) - PetscErrorCode DMLabelClearStratum(PetscDMLabel,PetscInt) + PetscErrorCode DMLabelGetStratumBounds(PetscDMLabel, PetscInt, PetscInt*, PetscInt*) + PetscErrorCode DMLabelGetValueIS(PetscDMLabel, PetscIS*) + PetscErrorCode DMLabelStratumHasPoint(PetscDMLabel, PetscInt, PetscInt, PetscBool*) + PetscErrorCode DMLabelHasStratum(PetscDMLabel, PetscInt, PetscBool*) + PetscErrorCode DMLabelGetStratumSize(PetscDMLabel, PetscInt, PetscInt*) + PetscErrorCode DMLabelGetStratumIS(PetscDMLabel, PetscInt, PetscIS*) + PetscErrorCode DMLabelSetStratumIS(PetscDMLabel, PetscInt, PetscIS) + PetscErrorCode DMLabelClearStratum(PetscDMLabel, PetscInt) PetscErrorCode DMLabelComputeIndex(PetscDMLabel) - PetscErrorCode DMLabelCreateIndex(PetscDMLabel,PetscInt,PetscInt) + PetscErrorCode DMLabelCreateIndex(PetscDMLabel, PetscInt, PetscInt) PetscErrorCode DMLabelDestroyIndex(PetscDMLabel) - PetscErrorCode DMLabelHasValue(PetscDMLabel,PetscInt,PetscBool*) - PetscErrorCode DMLabelHasPoint(PetscDMLabel,PetscInt,PetscBool*) - PetscErrorCode DMLabelGetBounds(PetscDMLabel,PetscInt*,PetscInt*) - PetscErrorCode DMLabelFilter(PetscDMLabel,PetscInt,PetscInt) - PetscErrorCode DMLabelPermute(PetscDMLabel,PetscIS,PetscDMLabel*) - PetscErrorCode DMLabelDistribute(PetscDMLabel,PetscSF,PetscDMLabel*) - PetscErrorCode DMLabelGather(PetscDMLabel,PetscSF,PetscDMLabel*) - PetscErrorCode DMLabelConvertToSection(PetscDMLabel,PetscSection*,PetscIS*) + PetscErrorCode DMLabelHasValue(PetscDMLabel, PetscInt, PetscBool*) + PetscErrorCode DMLabelHasPoint(PetscDMLabel, PetscInt, PetscBool*) + PetscErrorCode DMLabelGetBounds(PetscDMLabel, PetscInt*, PetscInt*) + PetscErrorCode DMLabelFilter(PetscDMLabel, PetscInt, PetscInt) + PetscErrorCode DMLabelPermute(PetscDMLabel, PetscIS, PetscDMLabel*) + PetscErrorCode DMLabelDistribute(PetscDMLabel, PetscSF, PetscDMLabel*) + PetscErrorCode DMLabelGather(PetscDMLabel, PetscSF, PetscDMLabel*) + PetscErrorCode DMLabelConvertToSection(PetscDMLabel, PetscSection*, PetscIS*) PetscErrorCode DMLabelGetNonEmptyStratumValuesIS(PetscDMLabel, PetscIS*) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmplex.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmplex.pxi index 3798849a99e..b137a85f1ac 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmplex.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmplex.pxi @@ -14,212 +14,180 @@ cdef extern from * nogil: PetscDMPlexTransformType DMPLEXEXTRUDE PetscDMPlexTransformType DMPLEXTRANSFORMFILTER - PetscErrorCode DMPlexCreate(MPI_Comm,PetscDM*) - PetscErrorCode DMPlexCreateCohesiveSubmesh(PetscDM,PetscBool,const char[],PetscInt,PetscDM*) - PetscErrorCode DMPlexCreateFromCellListPetsc(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscBool,PetscInt[],PetscInt,PetscReal[],PetscDM*) - #int DMPlexCreateFromDAG(PetscDM,PetscInt,const PetscInt[],const PetscInt[],const PetscInt[],const PetscInt[],const PetscScalar[]) - - PetscErrorCode DMPlexGetChart(PetscDM,PetscInt*,PetscInt*) - PetscErrorCode DMPlexSetChart(PetscDM,PetscInt,PetscInt) - PetscErrorCode DMPlexGetConeSize(PetscDM,PetscInt,PetscInt*) - PetscErrorCode DMPlexSetConeSize(PetscDM,PetscInt,PetscInt) - PetscErrorCode DMPlexGetCone(PetscDM,PetscInt,const PetscInt*[]) - PetscErrorCode DMPlexSetCone(PetscDM,PetscInt,const PetscInt[]) - PetscErrorCode DMPlexInsertCone(PetscDM,PetscInt,PetscInt,PetscInt) - PetscErrorCode DMPlexInsertConeOrientation(PetscDM,PetscInt,PetscInt,PetscInt) - PetscErrorCode DMPlexGetConeOrientation(PetscDM,PetscInt,const PetscInt*[]) - PetscErrorCode DMPlexSetConeOrientation(PetscDM,PetscInt,const PetscInt[]) - PetscErrorCode DMPlexSetCellType(PetscDM,PetscInt,PetscDMPolytopeType) - PetscErrorCode DMPlexGetCellType(PetscDM,PetscInt,PetscDMPolytopeType*) - PetscErrorCode DMPlexGetCellTypeLabel(PetscDM,PetscDMLabel*) - PetscErrorCode DMPlexGetSupportSize(PetscDM,PetscInt,PetscInt*) - PetscErrorCode DMPlexSetSupportSize(PetscDM,PetscInt,PetscInt) - PetscErrorCode DMPlexGetSupport(PetscDM,PetscInt,const PetscInt*[]) - PetscErrorCode DMPlexSetSupport(PetscDM,PetscInt,const PetscInt[]) - #int DMPlexInsertSupport(PetscDM,PetscInt,PetscInt,PetscInt) - #int DMPlexGetConeSection(PetscDM,PetscSection*) - #int DMPlexGetSupportSection(PetscDM,PetscSection*) - #int DMPlexGetCones(PetscDM,PetscInt*[]) - #int DMPlexGetConeOrientations(PetscDM,PetscInt*[]) - PetscErrorCode DMPlexGetMaxSizes(PetscDM,PetscInt*,PetscInt*) + PetscErrorCode DMPlexCreate(MPI_Comm, PetscDM*) + PetscErrorCode DMPlexCreateCohesiveSubmesh(PetscDM, PetscBool, const char[], PetscInt, PetscDM*) + PetscErrorCode DMPlexCreateFromCellListPetsc(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscBool, PetscInt[], PetscInt, PetscReal[], PetscDM*) + + PetscErrorCode DMPlexGetChart(PetscDM, PetscInt*, PetscInt*) + PetscErrorCode DMPlexSetChart(PetscDM, PetscInt, PetscInt) + PetscErrorCode DMPlexGetConeSize(PetscDM, PetscInt, PetscInt*) + PetscErrorCode DMPlexSetConeSize(PetscDM, PetscInt, PetscInt) + PetscErrorCode DMPlexGetCone(PetscDM, PetscInt, const PetscInt*[]) + PetscErrorCode DMPlexSetCone(PetscDM, PetscInt, const PetscInt[]) + PetscErrorCode DMPlexInsertCone(PetscDM, PetscInt, PetscInt, PetscInt) + PetscErrorCode DMPlexInsertConeOrientation(PetscDM, PetscInt, PetscInt, PetscInt) + PetscErrorCode DMPlexGetConeOrientation(PetscDM, PetscInt, const PetscInt*[]) + PetscErrorCode DMPlexSetConeOrientation(PetscDM, PetscInt, const PetscInt[]) + PetscErrorCode DMPlexSetCellType(PetscDM, PetscInt, PetscDMPolytopeType) + PetscErrorCode DMPlexGetCellType(PetscDM, PetscInt, PetscDMPolytopeType*) + PetscErrorCode DMPlexGetCellTypeLabel(PetscDM, PetscDMLabel*) + PetscErrorCode DMPlexGetSupportSize(PetscDM, PetscInt, PetscInt*) + PetscErrorCode DMPlexSetSupportSize(PetscDM, PetscInt, PetscInt) + PetscErrorCode DMPlexGetSupport(PetscDM, PetscInt, const PetscInt*[]) + PetscErrorCode DMPlexSetSupport(PetscDM, PetscInt, const PetscInt[]) + PetscErrorCode DMPlexGetMaxSizes(PetscDM, PetscInt*, PetscInt*) PetscErrorCode DMPlexSymmetrize(PetscDM) PetscErrorCode DMPlexStratify(PetscDM) - #int DMPlexEqual(PetscDM,PetscDM,PetscBool*) PetscErrorCode DMPlexOrient(PetscDM) - PetscErrorCode DMPlexInterpolate(PetscDM,PetscDM*) - PetscErrorCode DMPlexUninterpolate(PetscDM,PetscDM*) - #int DMPlexLoad(PetscViewer,PetscDM) - #int DMPlexSetPreallocationCenterDimension(PetscDM,PetscInt) - #int DMPlexGetPreallocationCenterDimension(PetscDM,PetscInt*) - #int DMPlexPreallocateOperator(PetscDM,PetscInt,PetscSection,PetscSection,PetscInt[],PetscInt[],PetscInt[],PetscInt[],Mat,PetscBool) - PetscErrorCode DMPlexGetPointLocal(PetscDM,PetscInt,PetscInt*,PetscInt*) - #int DMPlexPointLocalRef(PetscDM,PetscInt,PetscScalar*,void*) - #int DMPlexPointLocalRead(PetscDM,PetscInt,const PetscScalar*,const void*) - PetscErrorCode DMPlexGetPointGlobal(PetscDM,PetscInt,PetscInt*,PetscInt*) - #int DMPlexPointGlobalRef(PetscDM,PetscInt,PetscScalar*,void*) - #int DMPlexPointGlobalRead(PetscDM,PetscInt,const PetscScalar*,const void*) - PetscErrorCode DMPlexGetPointLocalField(PetscDM,PetscInt,PetscInt,PetscInt*,PetscInt*) - PetscErrorCode DMPlexGetPointGlobalField(PetscDM,PetscInt,PetscInt,PetscInt*,PetscInt*) - PetscErrorCode DMPlexCreateClosureIndex(PetscDM,PetscSection) - #int PetscSectionCreateGlobalSectionLabel(PetscSection,PetscSF,PetscBool,PetscBool,PetscDMLabel,PetscInt,PetscSection*) - - PetscErrorCode DMPlexGetCellNumbering(PetscDM,PetscIS*) - PetscErrorCode DMPlexGetVertexNumbering(PetscDM,PetscIS*) - PetscErrorCode DMPlexCreatePointNumbering(PetscDM,PetscIS*) - - PetscErrorCode DMPlexGetDepth(PetscDM,PetscInt*) - #int DMPlexGetDepthLabel(PetscDM,PetscDMLabel*) - PetscErrorCode DMPlexGetDepthStratum(PetscDM,PetscInt,PetscInt*,PetscInt*) - PetscErrorCode DMPlexGetHeightStratum(PetscDM,PetscInt,PetscInt*,PetscInt*) - PetscErrorCode DMPlexGetPointDepth(PetscDM,PetscInt,PetscInt*) - PetscErrorCode DMPlexGetPointHeight(PetscDM,PetscInt,PetscInt*) - - PetscErrorCode DMPlexGetMeet(PetscDM,PetscInt,const PetscInt[],PetscInt*,const PetscInt**) - #int DMPlexGetFullMeet(PetscDM,PetscInt,const PetscInt[],PetscInt*,const PetscInt**) - PetscErrorCode DMPlexRestoreMeet(PetscDM,PetscInt,const PetscInt[],PetscInt*,const PetscInt**) - PetscErrorCode DMPlexGetJoin(PetscDM,PetscInt,const PetscInt[],PetscInt*,const PetscInt**) - PetscErrorCode DMPlexGetFullJoin(PetscDM,PetscInt,const PetscInt[],PetscInt*,const PetscInt**) - PetscErrorCode DMPlexRestoreJoin(PetscDM,PetscInt,const PetscInt[],PetscInt*,const PetscInt**) - PetscErrorCode DMPlexGetTransitiveClosure(PetscDM,PetscInt,PetscBool,PetscInt*,PetscInt*[]) - PetscErrorCode DMPlexRestoreTransitiveClosure(PetscDM,PetscInt,PetscBool,PetscInt*,PetscInt*[]) - PetscErrorCode DMPlexVecGetClosure(PetscDM,PetscSection,PetscVec,PetscInt,PetscInt*,PetscScalar*[]) - PetscErrorCode DMPlexVecRestoreClosure(PetscDM,PetscSection,PetscVec,PetscInt,PetscInt*,PetscScalar*[]) - PetscErrorCode DMPlexVecSetClosure(PetscDM,PetscSection,PetscVec,PetscInt,PetscScalar[],PetscInsertMode) - PetscErrorCode DMPlexMatSetClosure(PetscDM,PetscSection,PetscSection,PetscMat,PetscInt,PetscScalar[],PetscInsertMode) - - PetscErrorCode DMPlexGenerate(PetscDM,const char[],PetscBool ,PetscDM*) - PetscErrorCode DMPlexTriangleSetOptions(PetscDM,const char*) - PetscErrorCode DMPlexTetgenSetOptions(PetscDM,const char*) - #int DMPlexCopyCoordinates(PetscDM,PetscDM) - #int DMPlexCreateDoublet(MPI_Comm,PetscInt,PetscBool,PetscBool,PetscBool,PetscReal,PetscDM*) - PetscErrorCode DMPlexCreateBoxMesh(MPI_Comm,PetscInt,PetscBool,PetscInt[],PetscReal[],PetscReal[],PetscDMBoundaryType[],PetscBool,PetscDM*) - PetscErrorCode DMPlexCreateBoxSurfaceMesh(MPI_Comm,PetscInt,PetscInt[],PetscReal[],PetscReal[],PetscBool,PetscDM*) - PetscErrorCode DMPlexCreateFromFile(MPI_Comm,const char[],const char[],PetscBool,PetscDM*) - PetscErrorCode DMPlexCreateCGNS(MPI_Comm,PetscInt,PetscBool,PetscDM*) - PetscErrorCode DMPlexCreateCGNSFromFile(MPI_Comm,const char[],PetscBool,PetscDM*) - PetscErrorCode DMPlexCreateExodus(MPI_Comm,PetscInt,PetscBool,PetscDM*) - PetscErrorCode DMPlexCreateExodusFromFile(MPI_Comm,const char[],PetscBool,PetscDM*) - PetscErrorCode DMPlexCreateGmsh(MPI_Comm,PetscViewer,PetscBool,PetscDM*) - - #int DMPlexInvertCell(PetscInt,PetscInt,int[]) - #int DMPlexCheckSymmetry(PetscDM) - #int DMPlexCheckSkeleton(PetscDM,PetscBool,PetscInt) - #int DMPlexCheckFaces(PetscDM,PetscBool,PetscInt) - - PetscErrorCode DMPlexSetAdjacencyUseAnchors(PetscDM,PetscBool) - PetscErrorCode DMPlexGetAdjacencyUseAnchors(PetscDM,PetscBool*) - PetscErrorCode DMPlexGetAdjacency(PetscDM,PetscInt,PetscInt*,PetscInt*[]) - #int DMPlexCreateNeighborCSR(PetscDM,PetscInt,PetscInt*,PetscInt**,PetscInt**) - PetscErrorCode DMPlexRebalanceSharedPoints(PetscDM,PetscInt,PetscBool,PetscBool,PetscBool*) - PetscErrorCode DMPlexDistribute(PetscDM,PetscInt,PetscSF*,PetscDM*) - PetscErrorCode DMPlexDistributeOverlap(PetscDM,PetscInt,PetscSF*,PetscDM*) - PetscErrorCode DMPlexDistributeGetDefault(PetscDM,PetscBool*) - PetscErrorCode DMPlexDistributeSetDefault(PetscDM,PetscBool) - PetscErrorCode DMPlexSetPartitioner(PetscDM,PetscPartitioner) - PetscErrorCode DMPlexGetPartitioner(PetscDM,PetscPartitioner*) - PetscErrorCode DMPlexDistributeField(PetscDM,PetscSF,PetscSection,PetscVec,PetscSection,PetscVec) - #int DMPlexDistributeData(PetscDM,PetscSF,PetscSection,MPI_Datatype,void*,PetscSection,void**) - PetscErrorCode DMPlexIsDistributed(PetscDM,PetscBool*) - PetscErrorCode DMPlexIsSimplex(PetscDM,PetscBool*) - PetscErrorCode DMPlexDistributionSetName(PetscDM,const char[]) - PetscErrorCode DMPlexDistributionGetName(PetscDM,const char*[]) - - PetscErrorCode DMPlexGetOrdering(PetscDM,PetscMatOrderingType,PetscDMLabel,PetscIS*) - PetscErrorCode DMPlexPermute(PetscDM,PetscIS,PetscDM*) - PetscErrorCode DMPlexReorderGetDefault(PetscDM,PetscDMReorderDefaultFlag*) - PetscErrorCode DMPlexReorderSetDefault(PetscDM,PetscDMReorderDefaultFlag) - - #int DMPlexCreateSubmesh(PetscDM,PetscDMLabel,PetscInt,PetscDM*) - #int DMPlexCreateHybridMesh(PetscDM,PetscDMLabel,PetscDMLabel,PetscInt,PetscDMLabel*,PetscDMLabel*,PetscDM *,PetscDM *) - PetscErrorCode DMPlexGetSubpointMap(PetscDM,PetscDMLabel*) - #int DMPlexSetSubpointMap(PetscDM,PetscDMLabel) - PetscErrorCode DMPlexGetSubpointIS(PetscDM,PetscIS*) - - PetscErrorCode DMPlexCreateCoarsePointIS(PetscDM,PetscIS*) - PetscErrorCode DMPlexMarkBoundaryFaces(PetscDM,PetscInt,PetscDMLabel) - PetscErrorCode DMPlexLabelComplete(PetscDM,PetscDMLabel) - PetscErrorCode DMPlexLabelCohesiveComplete(PetscDM,PetscDMLabel,PetscDMLabel,PetscInt,PetscBool,PetscDM) - - PetscErrorCode DMPlexGetRefinementLimit(PetscDM,PetscReal*) - PetscErrorCode DMPlexSetRefinementLimit(PetscDM,PetscReal) - PetscErrorCode DMPlexGetRefinementUniform(PetscDM,PetscBool*) - PetscErrorCode DMPlexSetRefinementUniform(PetscDM,PetscBool) + PetscErrorCode DMPlexInterpolate(PetscDM, PetscDM*) + PetscErrorCode DMPlexUninterpolate(PetscDM, PetscDM*) + PetscErrorCode DMPlexGetPointLocal(PetscDM, PetscInt, PetscInt*, PetscInt*) + PetscErrorCode DMPlexGetPointGlobal(PetscDM, PetscInt, PetscInt*, PetscInt*) + PetscErrorCode DMPlexGetPointLocalField(PetscDM, PetscInt, PetscInt, PetscInt*, PetscInt*) + PetscErrorCode DMPlexGetPointGlobalField(PetscDM, PetscInt, PetscInt, PetscInt*, PetscInt*) + PetscErrorCode DMPlexCreateClosureIndex(PetscDM, PetscSection) + + PetscErrorCode DMPlexGetCellNumbering(PetscDM, PetscIS*) + PetscErrorCode DMPlexGetVertexNumbering(PetscDM, PetscIS*) + PetscErrorCode DMPlexCreatePointNumbering(PetscDM, PetscIS*) + + PetscErrorCode DMPlexGetDepth(PetscDM, PetscInt*) + PetscErrorCode DMPlexGetDepthStratum(PetscDM, PetscInt, PetscInt*, PetscInt*) + PetscErrorCode DMPlexGetHeightStratum(PetscDM, PetscInt, PetscInt*, PetscInt*) + PetscErrorCode DMPlexGetPointDepth(PetscDM, PetscInt, PetscInt*) + PetscErrorCode DMPlexGetPointHeight(PetscDM, PetscInt, PetscInt*) + + PetscErrorCode DMPlexGetMeet(PetscDM, PetscInt, const PetscInt[], PetscInt*, const PetscInt**) + PetscErrorCode DMPlexRestoreMeet(PetscDM, PetscInt, const PetscInt[], PetscInt*, const PetscInt**) + PetscErrorCode DMPlexGetJoin(PetscDM, PetscInt, const PetscInt[], PetscInt*, const PetscInt**) + PetscErrorCode DMPlexGetFullJoin(PetscDM, PetscInt, const PetscInt[], PetscInt*, const PetscInt**) + PetscErrorCode DMPlexRestoreJoin(PetscDM, PetscInt, const PetscInt[], PetscInt*, const PetscInt**) + PetscErrorCode DMPlexGetTransitiveClosure(PetscDM, PetscInt, PetscBool, PetscInt*, PetscInt*[]) + PetscErrorCode DMPlexRestoreTransitiveClosure(PetscDM, PetscInt, PetscBool, PetscInt*, PetscInt*[]) + PetscErrorCode DMPlexVecGetClosure(PetscDM, PetscSection, PetscVec, PetscInt, PetscInt*, PetscScalar*[]) + PetscErrorCode DMPlexVecRestoreClosure(PetscDM, PetscSection, PetscVec, PetscInt, PetscInt*, PetscScalar*[]) + PetscErrorCode DMPlexVecSetClosure(PetscDM, PetscSection, PetscVec, PetscInt, PetscScalar[], PetscInsertMode) + PetscErrorCode DMPlexMatSetClosure(PetscDM, PetscSection, PetscSection, PetscMat, PetscInt, PetscScalar[], PetscInsertMode) + + PetscErrorCode DMPlexGenerate(PetscDM, const char[], PetscBool , PetscDM*) + PetscErrorCode DMPlexTriangleSetOptions(PetscDM, const char*) + PetscErrorCode DMPlexTetgenSetOptions(PetscDM, const char*) + PetscErrorCode DMPlexCreateBoxMesh(MPI_Comm, PetscInt, PetscBool, PetscInt[], PetscReal[], PetscReal[], PetscDMBoundaryType[], PetscBool, PetscDM*) + PetscErrorCode DMPlexCreateBoxSurfaceMesh(MPI_Comm, PetscInt, PetscInt[], PetscReal[], PetscReal[], PetscBool, PetscDM*) + PetscErrorCode DMPlexCreateFromFile(MPI_Comm, const char[], const char[], PetscBool, PetscDM*) + PetscErrorCode DMPlexCreateCGNS(MPI_Comm, PetscInt, PetscBool, PetscDM*) + PetscErrorCode DMPlexCreateCGNSFromFile(MPI_Comm, const char[], PetscBool, PetscDM*) + PetscErrorCode DMPlexCreateExodus(MPI_Comm, PetscInt, PetscBool, PetscDM*) + PetscErrorCode DMPlexCreateExodusFromFile(MPI_Comm, const char[], PetscBool, PetscDM*) + PetscErrorCode DMPlexCreateGmsh(MPI_Comm, PetscViewer, PetscBool, PetscDM*) + + PetscErrorCode DMPlexSetAdjacencyUseAnchors(PetscDM, PetscBool) + PetscErrorCode DMPlexGetAdjacencyUseAnchors(PetscDM, PetscBool*) + PetscErrorCode DMPlexGetAdjacency(PetscDM, PetscInt, PetscInt*, PetscInt*[]) + PetscErrorCode DMPlexRebalanceSharedPoints(PetscDM, PetscInt, PetscBool, PetscBool, PetscBool*) + PetscErrorCode DMPlexDistribute(PetscDM, PetscInt, PetscSF*, PetscDM*) + PetscErrorCode DMPlexDistributeOverlap(PetscDM, PetscInt, PetscSF*, PetscDM*) + PetscErrorCode DMPlexDistributeGetDefault(PetscDM, PetscBool*) + PetscErrorCode DMPlexDistributeSetDefault(PetscDM, PetscBool) + PetscErrorCode DMPlexSetPartitioner(PetscDM, PetscPartitioner) + PetscErrorCode DMPlexGetPartitioner(PetscDM, PetscPartitioner*) + PetscErrorCode DMPlexDistributeField(PetscDM, PetscSF, PetscSection, PetscVec, PetscSection, PetscVec) + PetscErrorCode DMPlexIsDistributed(PetscDM, PetscBool*) + PetscErrorCode DMPlexIsSimplex(PetscDM, PetscBool*) + PetscErrorCode DMPlexDistributionSetName(PetscDM, const char[]) + PetscErrorCode DMPlexDistributionGetName(PetscDM, const char*[]) + + PetscErrorCode DMPlexGetOrdering(PetscDM, PetscMatOrderingType, PetscDMLabel, PetscIS*) + PetscErrorCode DMPlexPermute(PetscDM, PetscIS, PetscDM*) + PetscErrorCode DMPlexReorderGetDefault(PetscDM, PetscDMReorderDefaultFlag*) + PetscErrorCode DMPlexReorderSetDefault(PetscDM, PetscDMReorderDefaultFlag) + + PetscErrorCode DMPlexGetSubpointMap(PetscDM, PetscDMLabel*) + PetscErrorCode DMPlexGetSubpointIS(PetscDM, PetscIS*) + + PetscErrorCode DMPlexCreateCoarsePointIS(PetscDM, PetscIS*) + PetscErrorCode DMPlexMarkBoundaryFaces(PetscDM, PetscInt, PetscDMLabel) + PetscErrorCode DMPlexLabelComplete(PetscDM, PetscDMLabel) + PetscErrorCode DMPlexLabelCohesiveComplete(PetscDM, PetscDMLabel, PetscDMLabel, PetscInt, PetscBool, PetscBool, PetscDM) + + PetscErrorCode DMPlexGetRefinementLimit(PetscDM, PetscReal*) + PetscErrorCode DMPlexSetRefinementLimit(PetscDM, PetscReal) + PetscErrorCode DMPlexGetRefinementUniform(PetscDM, PetscBool*) + PetscErrorCode DMPlexSetRefinementUniform(PetscDM, PetscBool) PetscErrorCode DMPlexGetMinRadius(PetscDM, PetscReal*) - #int DMPlexGetNumFaceVertices(PetscDM,PetscInt,PetscInt,PetscInt*) - #int DMPlexGetOrientedFace(PetscDM,PetscInt,PetscInt,const PetscInt[],PetscInt,PetscInt[],PetscInt[],PetscInt[],PetscBool*) - PetscErrorCode DMPlexCreateSection(PetscDM,PetscDMLabel[],const PetscInt[],const PetscInt[],PetscInt,const PetscInt[],const PetscIS[],const PetscIS[],PetscIS,PetscSection*) + PetscErrorCode DMPlexCreateSection(PetscDM, PetscDMLabel[], const PetscInt[], const PetscInt[], PetscInt, const PetscInt[], const PetscIS[], const PetscIS[], PetscIS, PetscSection*) - PetscErrorCode DMPlexComputeCellGeometryFVM(PetscDM,PetscInt,PetscReal*,PetscReal[],PetscReal[]) - PetscErrorCode DMPlexConstructGhostCells(PetscDM,const char[],PetscInt*,PetscDM*) + PetscErrorCode DMPlexComputeCellGeometryFVM(PetscDM, PetscInt, PetscReal*, PetscReal[], PetscReal[]) + PetscErrorCode DMPlexConstructGhostCells(PetscDM, const char[], PetscInt*, PetscDM*) PetscErrorCode DMPlexMetricSetFromOptions(PetscDM) - PetscErrorCode DMPlexMetricSetUniform(PetscDM,PetscBool) - PetscErrorCode DMPlexMetricIsUniform(PetscDM,PetscBool*) - PetscErrorCode DMPlexMetricSetIsotropic(PetscDM,PetscBool) - PetscErrorCode DMPlexMetricIsIsotropic(PetscDM,PetscBool*) - PetscErrorCode DMPlexMetricSetRestrictAnisotropyFirst(PetscDM,PetscBool) - PetscErrorCode DMPlexMetricRestrictAnisotropyFirst(PetscDM,PetscBool*) - PetscErrorCode DMPlexMetricSetNoInsertion(PetscDM,PetscBool) - PetscErrorCode DMPlexMetricNoInsertion(PetscDM,PetscBool*) - PetscErrorCode DMPlexMetricSetNoSwapping(PetscDM,PetscBool) - PetscErrorCode DMPlexMetricNoSwapping(PetscDM,PetscBool*) - PetscErrorCode DMPlexMetricSetNoMovement(PetscDM,PetscBool) - PetscErrorCode DMPlexMetricNoMovement(PetscDM,PetscBool*) - PetscErrorCode DMPlexMetricSetNoSurf(PetscDM,PetscBool) - PetscErrorCode DMPlexMetricNoSurf(PetscDM,PetscBool*) - PetscErrorCode DMPlexMetricSetVerbosity(PetscDM,PetscInt) - PetscErrorCode DMPlexMetricGetVerbosity(PetscDM,PetscInt*) - PetscErrorCode DMPlexMetricSetNumIterations(PetscDM,PetscInt) - PetscErrorCode DMPlexMetricGetNumIterations(PetscDM,PetscInt*) - PetscErrorCode DMPlexMetricSetMinimumMagnitude(PetscDM,PetscReal) - PetscErrorCode DMPlexMetricGetMinimumMagnitude(PetscDM,PetscReal*) - PetscErrorCode DMPlexMetricSetMaximumMagnitude(PetscDM,PetscReal) - PetscErrorCode DMPlexMetricGetMaximumMagnitude(PetscDM,PetscReal*) - PetscErrorCode DMPlexMetricSetMaximumAnisotropy(PetscDM,PetscReal) - PetscErrorCode DMPlexMetricGetMaximumAnisotropy(PetscDM,PetscReal*) - PetscErrorCode DMPlexMetricSetTargetComplexity(PetscDM,PetscReal) - PetscErrorCode DMPlexMetricGetTargetComplexity(PetscDM,PetscReal*) - PetscErrorCode DMPlexMetricSetNormalizationOrder(PetscDM,PetscReal) - PetscErrorCode DMPlexMetricGetNormalizationOrder(PetscDM,PetscReal*) - PetscErrorCode DMPlexMetricSetGradationFactor(PetscDM,PetscReal) - PetscErrorCode DMPlexMetricGetGradationFactor(PetscDM,PetscReal*) - PetscErrorCode DMPlexMetricSetHausdorffNumber(PetscDM,PetscReal) - PetscErrorCode DMPlexMetricGetHausdorffNumber(PetscDM,PetscReal*) - PetscErrorCode DMPlexMetricCreate(PetscDM,PetscInt,PetscVec*) - PetscErrorCode DMPlexMetricCreateUniform(PetscDM,PetscInt,PetscReal,PetscVec*) - PetscErrorCode DMPlexMetricCreateIsotropic(PetscDM,PetscInt,PetscVec,PetscVec*) - PetscErrorCode DMPlexMetricDeterminantCreate(PetscDM,PetscInt,PetscVec*,PetscDM*) - PetscErrorCode DMPlexMetricEnforceSPD(PetscDM,PetscVec,PetscBool,PetscBool,PetscVec,PetscVec) - PetscErrorCode DMPlexMetricNormalize(PetscDM,PetscVec,PetscBool,PetscBool,PetscVec,PetscVec) - PetscErrorCode DMPlexMetricAverage2(PetscDM,PetscVec,PetscVec,PetscVec) - PetscErrorCode DMPlexMetricAverage3(PetscDM,PetscVec,PetscVec,PetscVec,PetscVec) - PetscErrorCode DMPlexMetricIntersection2(PetscDM,PetscVec,PetscVec,PetscVec) - PetscErrorCode DMPlexMetricIntersection3(PetscDM,PetscVec,PetscVec,PetscVec,PetscVec) - - PetscErrorCode DMPlexComputeGradientClementInterpolant(PetscDM,PetscVec,PetscVec) - - PetscErrorCode DMPlexTopologyView(PetscDM,PetscViewer) - PetscErrorCode DMPlexCoordinatesView(PetscDM,PetscViewer) - PetscErrorCode DMPlexLabelsView(PetscDM,PetscViewer) - PetscErrorCode DMPlexSectionView(PetscDM,PetscViewer,PetscDM) - PetscErrorCode DMPlexGlobalVectorView(PetscDM,PetscViewer,PetscDM,PetscVec) - PetscErrorCode DMPlexLocalVectorView(PetscDM,PetscViewer,PetscDM,PetscVec) - - PetscErrorCode DMPlexTopologyLoad(PetscDM,PetscViewer,PetscSF*) - PetscErrorCode DMPlexCoordinatesLoad(PetscDM,PetscViewer,PetscSF) - PetscErrorCode DMPlexLabelsLoad(PetscDM,PetscViewer,PetscSF) - PetscErrorCode DMPlexSectionLoad(PetscDM,PetscViewer,PetscDM,PetscSF,PetscSF*,PetscSF*) - PetscErrorCode DMPlexGlobalVectorLoad(PetscDM,PetscViewer,PetscDM,PetscSF,PetscVec) - PetscErrorCode DMPlexLocalVectorLoad(PetscDM,PetscViewer,PetscDM,PetscSF,PetscVec) - - PetscErrorCode DMPlexTransformApply(PetscDMPlexTransform, PetscDM, PetscDM *); - PetscErrorCode DMPlexTransformCreate(MPI_Comm, PetscDMPlexTransform *); - PetscErrorCode DMPlexTransformDestroy(PetscDMPlexTransform*); - PetscErrorCode DMPlexTransformGetType(PetscDMPlexTransform, PetscDMPlexTransformType *); - PetscErrorCode DMPlexTransformSetType(PetscDMPlexTransform tr, PetscDMPlexTransformType method); - PetscErrorCode DMPlexTransformSetFromOptions(PetscDMPlexTransform); - PetscErrorCode DMPlexTransformSetDM(PetscDMPlexTransform, PetscDM); - PetscErrorCode DMPlexTransformSetUp(PetscDMPlexTransform); - PetscErrorCode DMPlexTransformView(PetscDMPlexTransform tr, PetscViewer v); + PetscErrorCode DMPlexMetricSetUniform(PetscDM, PetscBool) + PetscErrorCode DMPlexMetricIsUniform(PetscDM, PetscBool*) + PetscErrorCode DMPlexMetricSetIsotropic(PetscDM, PetscBool) + PetscErrorCode DMPlexMetricIsIsotropic(PetscDM, PetscBool*) + PetscErrorCode DMPlexMetricSetRestrictAnisotropyFirst(PetscDM, PetscBool) + PetscErrorCode DMPlexMetricRestrictAnisotropyFirst(PetscDM, PetscBool*) + PetscErrorCode DMPlexMetricSetNoInsertion(PetscDM, PetscBool) + PetscErrorCode DMPlexMetricNoInsertion(PetscDM, PetscBool*) + PetscErrorCode DMPlexMetricSetNoSwapping(PetscDM, PetscBool) + PetscErrorCode DMPlexMetricNoSwapping(PetscDM, PetscBool*) + PetscErrorCode DMPlexMetricSetNoMovement(PetscDM, PetscBool) + PetscErrorCode DMPlexMetricNoMovement(PetscDM, PetscBool*) + PetscErrorCode DMPlexMetricSetNoSurf(PetscDM, PetscBool) + PetscErrorCode DMPlexMetricNoSurf(PetscDM, PetscBool*) + PetscErrorCode DMPlexMetricSetVerbosity(PetscDM, PetscInt) + PetscErrorCode DMPlexMetricGetVerbosity(PetscDM, PetscInt*) + PetscErrorCode DMPlexMetricSetNumIterations(PetscDM, PetscInt) + PetscErrorCode DMPlexMetricGetNumIterations(PetscDM, PetscInt*) + PetscErrorCode DMPlexMetricSetMinimumMagnitude(PetscDM, PetscReal) + PetscErrorCode DMPlexMetricGetMinimumMagnitude(PetscDM, PetscReal*) + PetscErrorCode DMPlexMetricSetMaximumMagnitude(PetscDM, PetscReal) + PetscErrorCode DMPlexMetricGetMaximumMagnitude(PetscDM, PetscReal*) + PetscErrorCode DMPlexMetricSetMaximumAnisotropy(PetscDM, PetscReal) + PetscErrorCode DMPlexMetricGetMaximumAnisotropy(PetscDM, PetscReal*) + PetscErrorCode DMPlexMetricSetTargetComplexity(PetscDM, PetscReal) + PetscErrorCode DMPlexMetricGetTargetComplexity(PetscDM, PetscReal*) + PetscErrorCode DMPlexMetricSetNormalizationOrder(PetscDM, PetscReal) + PetscErrorCode DMPlexMetricGetNormalizationOrder(PetscDM, PetscReal*) + PetscErrorCode DMPlexMetricSetGradationFactor(PetscDM, PetscReal) + PetscErrorCode DMPlexMetricGetGradationFactor(PetscDM, PetscReal*) + PetscErrorCode DMPlexMetricSetHausdorffNumber(PetscDM, PetscReal) + PetscErrorCode DMPlexMetricGetHausdorffNumber(PetscDM, PetscReal*) + PetscErrorCode DMPlexMetricCreate(PetscDM, PetscInt, PetscVec*) + PetscErrorCode DMPlexMetricCreateUniform(PetscDM, PetscInt, PetscReal, PetscVec*) + PetscErrorCode DMPlexMetricCreateIsotropic(PetscDM, PetscInt, PetscVec, PetscVec*) + PetscErrorCode DMPlexMetricDeterminantCreate(PetscDM, PetscInt, PetscVec*, PetscDM*) + PetscErrorCode DMPlexMetricEnforceSPD(PetscDM, PetscVec, PetscBool, PetscBool, PetscVec, PetscVec) + PetscErrorCode DMPlexMetricNormalize(PetscDM, PetscVec, PetscBool, PetscBool, PetscVec, PetscVec) + PetscErrorCode DMPlexMetricAverage2(PetscDM, PetscVec, PetscVec, PetscVec) + PetscErrorCode DMPlexMetricAverage3(PetscDM, PetscVec, PetscVec, PetscVec, PetscVec) + PetscErrorCode DMPlexMetricIntersection2(PetscDM, PetscVec, PetscVec, PetscVec) + PetscErrorCode DMPlexMetricIntersection3(PetscDM, PetscVec, PetscVec, PetscVec, PetscVec) + + PetscErrorCode DMPlexComputeGradientClementInterpolant(PetscDM, PetscVec, PetscVec) + + PetscErrorCode DMPlexTopologyView(PetscDM, PetscViewer) + PetscErrorCode DMPlexCoordinatesView(PetscDM, PetscViewer) + PetscErrorCode DMPlexLabelsView(PetscDM, PetscViewer) + PetscErrorCode DMPlexSectionView(PetscDM, PetscViewer, PetscDM) + PetscErrorCode DMPlexGlobalVectorView(PetscDM, PetscViewer, PetscDM, PetscVec) + PetscErrorCode DMPlexLocalVectorView(PetscDM, PetscViewer, PetscDM, PetscVec) + + PetscErrorCode DMPlexTopologyLoad(PetscDM, PetscViewer, PetscSF*) + PetscErrorCode DMPlexCoordinatesLoad(PetscDM, PetscViewer, PetscSF) + PetscErrorCode DMPlexLabelsLoad(PetscDM, PetscViewer, PetscSF) + PetscErrorCode DMPlexSectionLoad(PetscDM, PetscViewer, PetscDM, PetscSF, PetscSF*, PetscSF*) + PetscErrorCode DMPlexGlobalVectorLoad(PetscDM, PetscViewer, PetscDM, PetscSF, PetscVec) + PetscErrorCode DMPlexLocalVectorLoad(PetscDM, PetscViewer, PetscDM, PetscSF, PetscVec) + + PetscErrorCode DMPlexTransformApply(PetscDMPlexTransform, PetscDM, PetscDM *) + PetscErrorCode DMPlexTransformCreate(MPI_Comm, PetscDMPlexTransform *) + PetscErrorCode DMPlexTransformDestroy(PetscDMPlexTransform*) + PetscErrorCode DMPlexTransformGetType(PetscDMPlexTransform, PetscDMPlexTransformType *) + PetscErrorCode DMPlexTransformSetType(PetscDMPlexTransform tr, PetscDMPlexTransformType method) + PetscErrorCode DMPlexTransformSetFromOptions(PetscDMPlexTransform) + PetscErrorCode DMPlexTransformSetDM(PetscDMPlexTransform, PetscDM) + PetscErrorCode DMPlexTransformSetUp(PetscDMPlexTransform) + PetscErrorCode DMPlexTransformView(PetscDMPlexTransform tr, PetscViewer v) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmshell.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmshell.pxi index 1bb1459e6dc..2fc55d76ebd 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmshell.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmshell.pxi @@ -1,74 +1,74 @@ ctypedef PetscErrorCode (*PetscDMShellXToYFunction)(PetscDM, - PetscVec, - PetscInsertMode, - PetscVec) except PETSC_ERR_PYTHON + PetscVec, + PetscInsertMode, + PetscVec) except PETSC_ERR_PYTHON cdef extern from * nogil: ctypedef PetscErrorCode (*PetscDMShellCreateVectorFunction)(PetscDM, - PetscVec*) except PETSC_ERR_PYTHON + PetscVec*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMShellCreateMatrixFunction)(PetscDM, - PetscMat*) except PETSC_ERR_PYTHON + PetscMat*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMShellTransferFunction)(PetscDM, - MPI_Comm, - PetscDM*) except PETSC_ERR_PYTHON + MPI_Comm, + PetscDM*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMShellCreateInterpolationFunction)(PetscDM, - PetscDM, - PetscMat*, - PetscVec*) except PETSC_ERR_PYTHON + PetscDM, + PetscMat*, + PetscVec*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMShellCreateInjectionFunction)(PetscDM, - PetscDM, - PetscMat*) except PETSC_ERR_PYTHON + PetscDM, + PetscMat*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMShellCreateRestrictionFunction)(PetscDM, - PetscDM, - PetscMat*) except PETSC_ERR_PYTHON + PetscDM, + PetscMat*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMShellCreateFieldDecompositionFunction)(PetscDM, - PetscInt*, - char***, - PetscIS**, - PetscDM**) except PETSC_ERR_PYTHON + PetscInt*, + char***, + PetscIS**, + PetscDM**) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMShellCreateDomainDecompositionFunction)(PetscDM, - PetscInt*, - char***, - PetscIS**, - PetscIS**, - PetscDM**) except PETSC_ERR_PYTHON + PetscInt*, + char***, + PetscIS**, + PetscIS**, + PetscDM**) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMShellCreateDomainDecompositionScattersFunction)(PetscDM, - PetscInt, - PetscDM*, - PetscScatter**, - PetscScatter**, - PetscScatter**) except PETSC_ERR_PYTHON + PetscInt, + PetscDM*, + PetscScatter**, + PetscScatter**, + PetscScatter**) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscDMShellCreateSubDM)(PetscDM, - PetscInt, - PetscInt[], - PetscIS*, - PetscDM*) except PETSC_ERR_PYTHON - PetscErrorCode DMShellCreate(MPI_Comm,PetscDM*) - PetscErrorCode DMShellSetMatrix(PetscDM,PetscMat) - PetscErrorCode DMShellSetGlobalVector(PetscDM,PetscVec) - PetscErrorCode DMShellSetLocalVector(PetscDM,PetscVec) - PetscErrorCode DMShellSetCreateGlobalVector(PetscDM,PetscDMShellCreateVectorFunction) - PetscErrorCode DMShellSetCreateLocalVector(PetscDM,PetscDMShellCreateVectorFunction) - PetscErrorCode DMShellSetGlobalToLocal(PetscDM,PetscDMShellXToYFunction,PetscDMShellXToYFunction) - PetscErrorCode DMShellSetGlobalToLocalVecScatter(PetscDM,PetscScatter) - PetscErrorCode DMShellSetLocalToGlobal(PetscDM,PetscDMShellXToYFunction,PetscDMShellXToYFunction) - PetscErrorCode DMShellSetLocalToGlobalVecScatter(PetscDM,PetscScatter) - PetscErrorCode DMShellSetLocalToLocal(PetscDM,PetscDMShellXToYFunction,PetscDMShellXToYFunction) - PetscErrorCode DMShellSetLocalToLocalVecScatter(PetscDM,PetscScatter) - PetscErrorCode DMShellSetCreateMatrix(PetscDM,PetscDMShellCreateMatrixFunction) - PetscErrorCode DMShellSetCoarsen(PetscDM,PetscDMShellTransferFunction) - PetscErrorCode DMShellSetRefine(PetscDM,PetscDMShellTransferFunction) - PetscErrorCode DMShellSetCreateInterpolation(PetscDM,PetscDMShellCreateInterpolationFunction) - PetscErrorCode DMShellSetCreateInjection(PetscDM,PetscDMShellCreateInjectionFunction) - PetscErrorCode DMShellSetCreateRestriction(PetscDM,PetscDMShellCreateRestrictionFunction) - PetscErrorCode DMShellSetCreateFieldDecomposition(PetscDM,PetscDMShellCreateFieldDecompositionFunction) - PetscErrorCode DMShellSetCreateDomainDecomposition(PetscDM,PetscDMShellCreateDomainDecompositionFunction) - PetscErrorCode DMShellSetCreateDomainDecompositionScatters(PetscDM,PetscDMShellCreateDomainDecompositionScattersFunction) - PetscErrorCode DMShellSetCreateSubDM(PetscDM,PetscDMShellCreateSubDM) - - PetscErrorCode VecGetDM(PetscVec,PetscDM*) - PetscErrorCode VecSetDM(PetscVec,PetscDM) - PetscErrorCode MatGetDM(PetscMat,PetscDM*) - PetscErrorCode MatSetDM(PetscMat,PetscDM) + PetscInt, + PetscInt[], + PetscIS*, + PetscDM*) except PETSC_ERR_PYTHON + PetscErrorCode DMShellCreate(MPI_Comm, PetscDM*) + PetscErrorCode DMShellSetMatrix(PetscDM, PetscMat) + PetscErrorCode DMShellSetGlobalVector(PetscDM, PetscVec) + PetscErrorCode DMShellSetLocalVector(PetscDM, PetscVec) + PetscErrorCode DMShellSetCreateGlobalVector(PetscDM, PetscDMShellCreateVectorFunction) + PetscErrorCode DMShellSetCreateLocalVector(PetscDM, PetscDMShellCreateVectorFunction) + PetscErrorCode DMShellSetGlobalToLocal(PetscDM, PetscDMShellXToYFunction, PetscDMShellXToYFunction) + PetscErrorCode DMShellSetGlobalToLocalVecScatter(PetscDM, PetscScatter) + PetscErrorCode DMShellSetLocalToGlobal(PetscDM, PetscDMShellXToYFunction, PetscDMShellXToYFunction) + PetscErrorCode DMShellSetLocalToGlobalVecScatter(PetscDM, PetscScatter) + PetscErrorCode DMShellSetLocalToLocal(PetscDM, PetscDMShellXToYFunction, PetscDMShellXToYFunction) + PetscErrorCode DMShellSetLocalToLocalVecScatter(PetscDM, PetscScatter) + PetscErrorCode DMShellSetCreateMatrix(PetscDM, PetscDMShellCreateMatrixFunction) + PetscErrorCode DMShellSetCoarsen(PetscDM, PetscDMShellTransferFunction) + PetscErrorCode DMShellSetRefine(PetscDM, PetscDMShellTransferFunction) + PetscErrorCode DMShellSetCreateInterpolation(PetscDM, PetscDMShellCreateInterpolationFunction) + PetscErrorCode DMShellSetCreateInjection(PetscDM, PetscDMShellCreateInjectionFunction) + PetscErrorCode DMShellSetCreateRestriction(PetscDM, PetscDMShellCreateRestrictionFunction) + PetscErrorCode DMShellSetCreateFieldDecomposition(PetscDM, PetscDMShellCreateFieldDecompositionFunction) + PetscErrorCode DMShellSetCreateDomainDecomposition(PetscDM, PetscDMShellCreateDomainDecompositionFunction) + PetscErrorCode DMShellSetCreateDomainDecompositionScatters(PetscDM, PetscDMShellCreateDomainDecompositionScattersFunction) + PetscErrorCode DMShellSetCreateSubDM(PetscDM, PetscDMShellCreateSubDM) + + PetscErrorCode VecGetDM(PetscVec, PetscDM*) + PetscErrorCode VecSetDM(PetscVec, PetscDM) + PetscErrorCode MatGetDM(PetscMat, PetscDM*) + PetscErrorCode MatSetDM(PetscMat, PetscDM) cdef PetscErrorCode DMSHELL_CreateGlobalVector( @@ -77,17 +77,17 @@ cdef PetscErrorCode DMSHELL_CreateGlobalVector( cdef DM Dm = subtype_DM(dm)() cdef Vec vec Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__create_global_vector__') assert context is not None and type(context) is tuple (create_gvec, args, kargs) = context vec = create_gvec(Dm, *args, **kargs) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(PetscINCREF(vec.obj)) v[0] = vec.vec cdef PetscDM odm = NULL - CHKERR( VecGetDM(v[0], &odm) ) + CHKERR(VecGetDM(v[0], &odm)) if odm == NULL: - CHKERR( VecSetDM(v[0], dm) ) + CHKERR(VecSetDM(v[0], dm)) return PETSC_SUCCESS cdef PetscErrorCode DMSHELL_CreateLocalVector( @@ -96,17 +96,17 @@ cdef PetscErrorCode DMSHELL_CreateLocalVector( cdef DM Dm = subtype_DM(dm)() cdef Vec vec Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__create_local_vector__') assert context is not None and type(context) is tuple (create_lvec, args, kargs) = context vec = create_lvec(Dm, *args, **kargs) - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(PetscINCREF(vec.obj)) v[0] = vec.vec cdef PetscDM odm = NULL - CHKERR( VecGetDM(v[0], &odm) ) + CHKERR(VecGetDM(v[0], &odm)) if odm == NULL: - CHKERR( VecSetDM(v[0], dm) ) + CHKERR(VecSetDM(v[0], dm)) return PETSC_SUCCESS cdef PetscErrorCode DMSHELL_GlobalToLocalBegin( @@ -118,7 +118,7 @@ cdef PetscErrorCode DMSHELL_GlobalToLocalBegin( cdef Vec gvec = ref_Vec(g) cdef Vec lvec = ref_Vec(l) Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__g2l_begin__') assert context is not None and type(context) is tuple (begin, args, kargs) = context @@ -134,7 +134,7 @@ cdef PetscErrorCode DMSHELL_GlobalToLocalEnd( cdef Vec gvec = ref_Vec(g) cdef Vec lvec = ref_Vec(l) Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__g2l_end__') assert context is not None and type(context) is tuple (end, args, kargs) = context @@ -150,7 +150,7 @@ cdef PetscErrorCode DMSHELL_LocalToGlobalBegin( cdef Vec gvec = ref_Vec(g) cdef Vec lvec = ref_Vec(l) Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__l2g_begin__') assert context is not None and type(context) is tuple (begin, args, kargs) = context @@ -166,7 +166,7 @@ cdef PetscErrorCode DMSHELL_LocalToGlobalEnd( cdef Vec gvec = ref_Vec(g) cdef Vec lvec = ref_Vec(l) Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__l2g_end__') assert context is not None and type(context) is tuple (end, args, kargs) = context @@ -182,7 +182,7 @@ cdef PetscErrorCode DMSHELL_LocalToLocalBegin( cdef Vec gvec = ref_Vec(g) cdef Vec lvec = ref_Vec(l) Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__l2l_begin__') assert context is not None and type(context) is tuple (begin, args, kargs) = context @@ -198,7 +198,7 @@ cdef PetscErrorCode DMSHELL_LocalToLocalEnd( cdef Vec gvec = ref_Vec(g) cdef Vec lvec = ref_Vec(l) Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__l2l_end__') assert context is not None and type(context) is tuple (end, args, kargs) = context @@ -211,17 +211,17 @@ cdef PetscErrorCode DMSHELL_CreateMatrix( cdef DM Dm = subtype_DM(dm)() cdef Mat mat Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__create_matrix__') assert context is not None and type(context) is tuple (matrix, args, kargs) = context mat = matrix(Dm, *args, **kargs) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(PetscINCREF(mat.obj)) cmat[0] = mat.mat cdef PetscDM odm = NULL - CHKERR( MatGetDM(cmat[0], &odm) ) + CHKERR(MatGetDM(cmat[0], &odm)) if odm == NULL: - CHKERR( MatSetDM(cmat[0], dm) ) + CHKERR(MatSetDM(cmat[0], dm)) return PETSC_SUCCESS cdef PetscErrorCode DMSHELL_Coarsen( @@ -232,12 +232,12 @@ cdef PetscErrorCode DMSHELL_Coarsen( cdef DM Dmc cdef Comm Comm = new_Comm(comm) Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__coarsen__') assert context is not None and type(context) is tuple (coarsen, args, kargs) = context Dmc = coarsen(Dm, Comm, *args, **kargs) - CHKERR( PetscINCREF(Dmc.obj) ) + CHKERR(PetscINCREF(Dmc.obj)) dmc[0] = Dmc.dm return PETSC_SUCCESS @@ -249,12 +249,12 @@ cdef PetscErrorCode DMSHELL_Refine( cdef DM Dmf cdef Comm Comm = new_Comm(comm) Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__refine__') assert context is not None and type(context) is tuple (refine, args, kargs) = context Dmf = refine(Dm, Comm, *args, **kargs) - CHKERR( PetscINCREF(Dmf.obj) ) + CHKERR(PetscINCREF(Dmf.obj)) dmf[0] = Dmf.dm return PETSC_SUCCESS @@ -268,21 +268,21 @@ cdef PetscErrorCode DMSHELL_CreateInterpolation( cdef Mat mat cdef Vec vec Dmc.dm = dmc - CHKERR( PetscINCREF(Dmc.obj) ) + CHKERR(PetscINCREF(Dmc.obj)) Dmf.dm = dmf - CHKERR( PetscINCREF(Dmf.obj) ) + CHKERR(PetscINCREF(Dmf.obj)) context = Dmc.get_attr('__create_interpolation__') assert context is not None and type(context) is tuple (interpolation, args, kargs) = context mat, vec = interpolation(Dmc, Dmf, *args, **kargs) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(PetscINCREF(mat.obj)) cmat[0] = mat.mat if cvec == NULL: return PETSC_SUCCESS if vec is None: cvec[0] = NULL else: - CHKERR( PetscINCREF(vec.obj) ) + CHKERR(PetscINCREF(vec.obj)) cvec[0] = vec.vec return PETSC_SUCCESS @@ -294,14 +294,14 @@ cdef PetscErrorCode DMSHELL_CreateInjection( cdef DM Dmf = subtype_DM(dmf)() cdef Mat mat Dmc.dm = dmc - CHKERR( PetscINCREF(Dmc.obj) ) + CHKERR(PetscINCREF(Dmc.obj)) Dmf.dm = dmf - CHKERR( PetscINCREF(Dmf.obj) ) + CHKERR(PetscINCREF(Dmf.obj)) context = Dmc.get_attr('__create_injection__') assert context is not None and type(context) is tuple (injection, args, kargs) = context mat = injection(Dmc, Dmf, *args, **kargs) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(PetscINCREF(mat.obj)) cmat[0] = mat.mat return PETSC_SUCCESS @@ -313,14 +313,14 @@ cdef PetscErrorCode DMSHELL_CreateRestriction( cdef DM Dmc = subtype_DM(dmc)() cdef Mat mat Dmf.dm = dmf - CHKERR( PetscINCREF(Dmf.obj) ) + CHKERR(PetscINCREF(Dmf.obj)) Dmc.dm = dmc - CHKERR( PetscINCREF(Dmc.obj) ) + CHKERR(PetscINCREF(Dmc.obj)) context = Dmf.get_attr('__create_restriction__') assert context is not None and type(context) is tuple (restriction, args, kargs) = context mat = restriction(Dmf, Dmc, *args, **kargs) - CHKERR( PetscINCREF(mat.obj) ) + CHKERR(PetscINCREF(mat.obj)) cmat[0] = mat.mat return PETSC_SUCCESS @@ -334,7 +334,7 @@ cdef PetscErrorCode DMSHELL_CreateFieldDecomposition( cdef int i cdef const char *cname = NULL Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__create_field_decomp__') assert context is not None and type(context) is tuple (decomp, args, kargs) = context @@ -351,22 +351,22 @@ cdef PetscErrorCode DMSHELL_CreateFieldDecomposition( clen[0] = 0 if namelist != NULL and names is not None: - CHKERR( PetscMalloc(len(names)*sizeof(char**), namelist) ) + CHKERR(PetscMalloc(len(names)*sizeof(char**), namelist)) for i in range(len(names)): names[i] = str2bytes(names[i], &cname) - CHKERR( PetscStrallocpy(cname, &(namelist[0][i])) ) + CHKERR(PetscStrallocpy(cname, &(namelist[0][i]))) if islist != NULL and ises is not None: - CHKERR( PetscMalloc(len(ises)*sizeof(PetscIS), islist) ) + CHKERR(PetscMalloc(len(ises)*sizeof(PetscIS), islist)) for i in range(len(ises)): islist[0][i] = (ises[i]).iset - CHKERR( PetscINCREF((ises[i]).obj) ) + CHKERR(PetscINCREF((ises[i]).obj)) if dmlist != NULL and dms is not None: - CHKERR( PetscMalloc(len(dms)*sizeof(PetscDM), dmlist) ) + CHKERR(PetscMalloc(len(dms)*sizeof(PetscDM), dmlist)) for i in range(len(dms)): dmlist[0][i] = (dms[i]).dm - CHKERR( PetscINCREF((dms[i]).obj) ) + CHKERR(PetscINCREF((dms[i]).obj)) return PETSC_SUCCESS cdef PetscErrorCode DMSHELL_CreateDomainDecomposition( @@ -380,7 +380,7 @@ cdef PetscErrorCode DMSHELL_CreateDomainDecomposition( cdef int i cdef const char *cname = NULL Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) context = Dm.get_attr('__create_domain_decomp__') assert context is not None and type(context) is tuple (decomp, args, kargs) = context @@ -399,28 +399,28 @@ cdef PetscErrorCode DMSHELL_CreateDomainDecomposition( clen[0] = 0 if namelist != NULL and names is not None: - CHKERR( PetscMalloc(len(names)*sizeof(char**), namelist) ) + CHKERR(PetscMalloc(len(names)*sizeof(char**), namelist)) for i in range(len(names)): names[i] = str2bytes(names[i], &cname) - CHKERR( PetscStrallocpy(cname, &(namelist[0][i])) ) + CHKERR(PetscStrallocpy(cname, &(namelist[0][i]))) if innerislist != NULL and innerises is not None: - CHKERR( PetscMalloc(len(innerises)*sizeof(PetscIS), innerislist) ) + CHKERR(PetscMalloc(len(innerises)*sizeof(PetscIS), innerislist)) for i in range(len(innerises)): innerislist[0][i] = (innerises[i]).iset - CHKERR( PetscINCREF((innerises[i]).obj) ) + CHKERR(PetscINCREF((innerises[i]).obj)) if outerislist != NULL and outerises is not None: - CHKERR( PetscMalloc(len(outerises)*sizeof(PetscIS), outerislist) ) + CHKERR(PetscMalloc(len(outerises)*sizeof(PetscIS), outerislist)) for i in range(len(outerises)): outerislist[0][i] = (outerises[i]).iset - CHKERR( PetscINCREF((outerises[i]).obj) ) + CHKERR(PetscINCREF((outerises[i]).obj)) if dmlist != NULL and dms is not None: - CHKERR( PetscMalloc(len(dms)*sizeof(PetscDM), dmlist) ) + CHKERR(PetscMalloc(len(dms)*sizeof(PetscDM), dmlist)) for i in range(len(dms)): dmlist[0][i] = (dms[i]).dm - CHKERR( PetscINCREF((dms[i]).obj) ) + CHKERR(PetscINCREF((dms[i]).obj)) return PETSC_SUCCESS cdef PetscErrorCode DMSHELL_CreateDomainDecompositionScatters( @@ -433,17 +433,16 @@ cdef PetscErrorCode DMSHELL_CreateDomainDecompositionScatters( cdef DM Dm = subtype_DM(dm)() cdef int i - cdef const char *cname = NULL cdef DM subdm = None Dm.dm = dm - CHKERR( PetscINCREF(Dm.obj) ) + CHKERR(PetscINCREF(Dm.obj)) psubdms = [] for i from 0 <= i < clen: subdm = subtype_DM(subdms[i])() subdm.dm = subdms[i] - CHKERR( PetscINCREF(subdm.obj) ) + CHKERR(PetscINCREF(subdm.obj)) psubdms.append(subdm) context = Dm.get_attr('__create_domain_decomp_scatters__') @@ -455,19 +454,19 @@ cdef PetscErrorCode DMSHELL_CreateDomainDecompositionScatters( assert len(oscatter) == clen assert len(gscatter) == clen - CHKERR ( PetscMalloc(clen*sizeof(PetscScatter), iscat) ) - CHKERR ( PetscMalloc(clen*sizeof(PetscScatter), oscat) ) - CHKERR ( PetscMalloc(clen*sizeof(PetscScatter), gscat) ) + CHKERR (PetscMalloc(clen*sizeof(PetscScatter), iscat)) + CHKERR (PetscMalloc(clen*sizeof(PetscScatter), oscat)) + CHKERR (PetscMalloc(clen*sizeof(PetscScatter), gscat)) for i in range(clen): iscat[0][i] = (iscatter[i]).sct - CHKERR( PetscINCREF((iscatter[i]).obj) ) + CHKERR(PetscINCREF((iscatter[i]).obj)) oscat[0][i] = (oscatter[i]).sct - CHKERR( PetscINCREF((oscatter[i]).obj) ) + CHKERR(PetscINCREF((oscatter[i]).obj)) gscat[0][i] = (gscatter[i]).sct - CHKERR( PetscINCREF((gscatter[i]).obj) ) + CHKERR(PetscINCREF((gscatter[i]).obj)) return PETSC_SUCCESS @@ -481,7 +480,7 @@ cdef PetscErrorCode DMSHELL_CreateSubDM( cdef IS iset cdef DM subdm dm.dm = cdm - CHKERR( PetscINCREF(dm.obj) ) + CHKERR(PetscINCREF(dm.obj)) context = dm.get_attr('__create_subdm__') assert context is not None and type(context) is tuple (create_subdm, args, kargs) = context @@ -490,8 +489,8 @@ cdef PetscErrorCode DMSHELL_CreateSubDM( iset, subdm = create_subdm(dm, fields, *args, **kargs) - CHKERR( PetscINCREF(iset.obj) ) - CHKERR( PetscINCREF(subdm.obj) ) + CHKERR(PetscINCREF(iset.obj)) + CHKERR(PetscINCREF(subdm.obj)) ciset[0] = iset.iset csubdm[0] = subdm.dm return PETSC_SUCCESS diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmstag.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmstag.pxi index c4a56dde5d6..7f393622424 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmstag.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmstag.pxi @@ -37,48 +37,46 @@ cdef extern from * nogil: DMSTAG_FRONT_UP DMSTAG_FRONT_UP_RIGHT - - PetscErrorCode DMStagCreate1d(MPI_Comm,PetscDMBoundaryType,PetscInt,PetscInt,PetscInt,PetscDMStagStencilType,PetscInt,const PetscInt[],PetscDM*) - PetscErrorCode DMStagCreate2d(MPI_Comm,PetscDMBoundaryType,PetscDMBoundaryType,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscDMStagStencilType,PetscInt,const PetscInt[],const PetscInt[],PetscDM*) - PetscErrorCode DMStagCreate3d(MPI_Comm,PetscDMBoundaryType,PetscDMBoundaryType,PetscDMBoundaryType,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscDMStagStencilType,PetscInt,const PetscInt[],const PetscInt[],const PetscInt[],PetscDM*) - - - PetscErrorCode DMStagGetCorners(PetscDM,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*) - PetscErrorCode DMStagGetGhostCorners(PetscDM,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*) - PetscErrorCode DMStagGetLocalSizes(PetscDM,PetscInt*,PetscInt*,PetscInt*) - PetscErrorCode DMStagGetEntriesPerElement(PetscDM,PetscInt*) - - PetscErrorCode DMStagGetDOF(PetscDM,PetscInt*,PetscInt*,PetscInt*,PetscInt*) - PetscErrorCode DMStagGetNumRanks(PetscDM,PetscInt*,PetscInt*,PetscInt*) - PetscErrorCode DMStagGetGlobalSizes(PetscDM,PetscInt*,PetscInt*,PetscInt*) - PetscErrorCode DMStagGetBoundaryTypes(PetscDM,PetscDMBoundaryType*,PetscDMBoundaryType*,PetscDMBoundaryType*) - PetscErrorCode DMStagGetStencilWidth(PetscDM,PetscInt*) - PetscErrorCode DMStagGetStencilType(PetscDM,PetscDMStagStencilType*) - PetscErrorCode DMStagGetOwnershipRanges(PetscDM,const PetscInt*[],const PetscInt*[],const PetscInt*[]) - - PetscErrorCode DMStagSetDOF(PetscDM,PetscInt,PetscInt,PetscInt,PetscInt) - PetscErrorCode DMStagSetNumRanks(PetscDM,PetscInt,PetscInt,PetscInt) - PetscErrorCode DMStagSetGlobalSizes(PetscDM,PetscInt,PetscInt,PetscInt) - PetscErrorCode DMStagSetBoundaryTypes(PetscDM,PetscDMBoundaryType,PetscDMBoundaryType,PetscDMBoundaryType) - PetscErrorCode DMStagSetStencilWidth(PetscDM,PetscInt) - PetscErrorCode DMStagSetStencilType(PetscDM,PetscDMStagStencilType) - PetscErrorCode DMStagSetOwnershipRanges(PetscDM,const PetscInt[],const PetscInt[],const PetscInt[]) - - PetscErrorCode DMStagGetLocationSlot(PetscDM,PetscDMStagStencilLocation,PetscInt,PetscInt*) - PetscErrorCode DMStagGetLocationDOF(PetscDM,PetscDMStagStencilLocation,PetscInt*) - PetscErrorCode DMStagGetProductCoordinateLocationSlot(PetscDM,PetscDMStagStencilLocation,PetscInt*) - - PetscErrorCode DMStagGetIsFirstRank(PetscDM,PetscBool*,PetscBool*,PetscBool*) - PetscErrorCode DMStagGetIsLastRank(PetscDM,PetscBool*,PetscBool*,PetscBool*) - - PetscErrorCode DMStagSetUniformCoordinatesExplicit(PetscDM,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal) - PetscErrorCode DMStagSetUniformCoordinatesProduct(PetscDM,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal) - PetscErrorCode DMStagSetCoordinateDMType(PetscDM,PetscDMType) - PetscErrorCode DMStagSetUniformCoordinates(PetscDM,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal) - - PetscErrorCode DMStagCreateCompatibleDMStag(PetscDM,PetscInt,PetscInt,PetscInt,PetscInt,PetscDM*) - PetscErrorCode DMStagVecSplitToDMDA(PetscDM,PetscVec,PetscDMStagStencilLocation,PetscInt,PetscDM*,PetscVec*) - PetscErrorCode DMStagMigrateVec(PetscDM,PetscVec,PetscDM,PetscVec) + PetscErrorCode DMStagCreate1d(MPI_Comm, PetscDMBoundaryType, PetscInt, PetscInt, PetscInt, PetscDMStagStencilType, PetscInt, const PetscInt[], PetscDM*) + PetscErrorCode DMStagCreate2d(MPI_Comm, PetscDMBoundaryType, PetscDMBoundaryType, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscDMStagStencilType, PetscInt, const PetscInt[], const PetscInt[], PetscDM*) + PetscErrorCode DMStagCreate3d(MPI_Comm, PetscDMBoundaryType, PetscDMBoundaryType, PetscDMBoundaryType, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscDMStagStencilType, PetscInt, const PetscInt[], const PetscInt[], const PetscInt[], PetscDM*) + + PetscErrorCode DMStagGetCorners(PetscDM, PetscInt*, PetscInt*, PetscInt*, PetscInt*, PetscInt*, PetscInt*, PetscInt*, PetscInt*, PetscInt*) + PetscErrorCode DMStagGetGhostCorners(PetscDM, PetscInt*, PetscInt*, PetscInt*, PetscInt*, PetscInt*, PetscInt*) + PetscErrorCode DMStagGetLocalSizes(PetscDM, PetscInt*, PetscInt*, PetscInt*) + PetscErrorCode DMStagGetEntriesPerElement(PetscDM, PetscInt*) + + PetscErrorCode DMStagGetDOF(PetscDM, PetscInt*, PetscInt*, PetscInt*, PetscInt*) + PetscErrorCode DMStagGetNumRanks(PetscDM, PetscInt*, PetscInt*, PetscInt*) + PetscErrorCode DMStagGetGlobalSizes(PetscDM, PetscInt*, PetscInt*, PetscInt*) + PetscErrorCode DMStagGetBoundaryTypes(PetscDM, PetscDMBoundaryType*, PetscDMBoundaryType*, PetscDMBoundaryType*) + PetscErrorCode DMStagGetStencilWidth(PetscDM, PetscInt*) + PetscErrorCode DMStagGetStencilType(PetscDM, PetscDMStagStencilType*) + PetscErrorCode DMStagGetOwnershipRanges(PetscDM, const PetscInt*[], const PetscInt*[], const PetscInt*[]) + + PetscErrorCode DMStagSetDOF(PetscDM, PetscInt, PetscInt, PetscInt, PetscInt) + PetscErrorCode DMStagSetNumRanks(PetscDM, PetscInt, PetscInt, PetscInt) + PetscErrorCode DMStagSetGlobalSizes(PetscDM, PetscInt, PetscInt, PetscInt) + PetscErrorCode DMStagSetBoundaryTypes(PetscDM, PetscDMBoundaryType, PetscDMBoundaryType, PetscDMBoundaryType) + PetscErrorCode DMStagSetStencilWidth(PetscDM, PetscInt) + PetscErrorCode DMStagSetStencilType(PetscDM, PetscDMStagStencilType) + PetscErrorCode DMStagSetOwnershipRanges(PetscDM, const PetscInt[], const PetscInt[], const PetscInt[]) + + PetscErrorCode DMStagGetLocationSlot(PetscDM, PetscDMStagStencilLocation, PetscInt, PetscInt*) + PetscErrorCode DMStagGetLocationDOF(PetscDM, PetscDMStagStencilLocation, PetscInt*) + PetscErrorCode DMStagGetProductCoordinateLocationSlot(PetscDM, PetscDMStagStencilLocation, PetscInt*) + + PetscErrorCode DMStagGetIsFirstRank(PetscDM, PetscBool*, PetscBool*, PetscBool*) + PetscErrorCode DMStagGetIsLastRank(PetscDM, PetscBool*, PetscBool*, PetscBool*) + + PetscErrorCode DMStagSetUniformCoordinatesExplicit(PetscDM, PetscReal, PetscReal, PetscReal, PetscReal, PetscReal, PetscReal) + PetscErrorCode DMStagSetUniformCoordinatesProduct(PetscDM, PetscReal, PetscReal, PetscReal, PetscReal, PetscReal, PetscReal) + PetscErrorCode DMStagSetCoordinateDMType(PetscDM, PetscDMType) + PetscErrorCode DMStagSetUniformCoordinates(PetscDM, PetscReal, PetscReal, PetscReal, PetscReal, PetscReal, PetscReal) + + PetscErrorCode DMStagCreateCompatibleDMStag(PetscDM, PetscInt, PetscInt, PetscInt, PetscInt, PetscDM*) + PetscErrorCode DMStagVecSplitToDMDA(PetscDM, PetscVec, PetscDMStagStencilLocation, PetscInt, PetscDM*, PetscVec*) + PetscErrorCode DMStagMigrateVec(PetscDM, PetscVec, PetscDM, PetscVec) # -------------------------------------------------------------------- @@ -132,9 +130,9 @@ cdef inline PetscDMStagStencilLocation asStagStencilLocation(object stencil_loca cdef inline PetscInt asStagDims(dims, - PetscInt *_M, - PetscInt *_N, - PetscInt *_P) except? -1: + PetscInt *_M, + PetscInt *_N, + PetscInt *_P) except? -1: cdef PetscInt dim = PETSC_DECIDE cdef object M=None, N=None, P=None dims = tuple(dims) @@ -149,9 +147,9 @@ cdef inline PetscInt asStagDims(dims, return dim cdef inline tuple toStagDims(PetscInt dim, - PetscInt M, - PetscInt N, - PetscInt P): + PetscInt M, + PetscInt N, + PetscInt P): if dim == 0: return () elif dim == 1: return (toInt(M),) elif dim == 2: return (toInt(M), toInt(N)) @@ -185,11 +183,11 @@ cdef inline tuple toDofs(PetscInt ndofs, elif ndofs == 4: return (toInt(dof0), toInt(dof1), toInt(dof2), toInt(dof3)) cdef inline tuple asStagOwnershipRanges(object ownership_ranges, - PetscInt dim, - PetscInt *m, PetscInt *n, PetscInt *p, - PetscInt **_x, - PetscInt **_y, - PetscInt **_z): + PetscInt dim, + PetscInt *m, PetscInt *n, PetscInt *p, + PetscInt **_x, + PetscInt **_y, + PetscInt **_z): cdef object ranges = list(ownership_ranges) cdef PetscInt rdim = len(ranges) cdef PetscInt nlx=0, nly=0, nlz=0 @@ -214,15 +212,15 @@ cdef inline tuple asStagOwnershipRanges(object ownership_ranges, if p[0] == PETSC_DECIDE: p[0] = nlz elif p[0] != nlz: raise ValueError( "ownership range size %d and number or processors %d" % - (toInt(nlz), toInt(p[0]))) + (toInt(nlz), toInt(p[0]))) return tuple(ranges) cdef inline tuple toStagOwnershipRanges(PetscInt dim, - PetscInt m, PetscInt n, PetscInt p, - const PetscInt *lx, - const PetscInt *ly, - const PetscInt *lz): + PetscInt m, PetscInt n, PetscInt p, + const PetscInt *lx, + const PetscInt *ly, + const PetscInt *lz): # Returns tuple of arrays containing ownership ranges as Python arrays ranges = [array_i(m, lx)] if dim > 1: @@ -237,7 +235,7 @@ cdef inline object toStagBoundary(PetscDMBoundaryType btype): elif btype == DM_BOUNDARY_GHOSTED: return "ghosted" cdef inline tuple toStagBoundaryTypes(PetscInt dim, PetscDMBoundaryType btx, PetscDMBoundaryType bty, PetscDMBoundaryType btz): - if dim == 1: return (toStagBoundary(btx), ) + if dim == 1: return (toStagBoundary(btx),) if dim == 2: return (toStagBoundary(btx), toStagBoundary(bty)) if dim == 3: return (toStagBoundary(btx), toStagBoundary(bty), toStagBoundary(btz)) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmswarm.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmswarm.pxi index f1d3161d686..71599bc45f3 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmswarm.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmswarm.pxi @@ -23,51 +23,51 @@ cdef extern from * nogil: DMSWARMPIC_LAYOUT_GAUSS DMSWARMPIC_LAYOUT_SUBDIVISION - PetscErrorCode DMSwarmCreateGlobalVectorFromField(PetscDM,const char[],PetscVec*) - PetscErrorCode DMSwarmDestroyGlobalVectorFromField(PetscDM,const char[],PetscVec*) - PetscErrorCode DMSwarmCreateLocalVectorFromField(PetscDM,const char[],PetscVec*) - PetscErrorCode DMSwarmDestroyLocalVectorFromField(PetscDM,const char[],PetscVec*) + PetscErrorCode DMSwarmCreateGlobalVectorFromField(PetscDM, const char[], PetscVec*) + PetscErrorCode DMSwarmDestroyGlobalVectorFromField(PetscDM, const char[], PetscVec*) + PetscErrorCode DMSwarmCreateLocalVectorFromField(PetscDM, const char[], PetscVec*) + PetscErrorCode DMSwarmDestroyLocalVectorFromField(PetscDM, const char[], PetscVec*) PetscErrorCode DMSwarmInitializeFieldRegister(PetscDM) PetscErrorCode DMSwarmFinalizeFieldRegister(PetscDM) - PetscErrorCode DMSwarmSetLocalSizes(PetscDM,PetscInt,PetscInt) - PetscErrorCode DMSwarmRegisterPetscDatatypeField(PetscDM,const char[],PetscInt,PetscDataType) -# PetscErrorCode DMSwarmRegisterUserStructField(PetscDM,const char[],size_t) -# PetscErrorCode DMSwarmRegisterUserDatatypeField(PetscDM,const char[],size_t,PetscInt) - PetscErrorCode DMSwarmGetField(PetscDM,const char[],PetscInt*,PetscDataType*,void**) - PetscErrorCode DMSwarmRestoreField(PetscDM,const char[],PetscInt*,PetscDataType*,void**) + PetscErrorCode DMSwarmSetLocalSizes(PetscDM, PetscInt, PetscInt) + PetscErrorCode DMSwarmRegisterPetscDatatypeField(PetscDM, const char[], PetscInt, PetscDataType) +# PetscErrorCode DMSwarmRegisterUserStructField(PetscDM, const char[], size_t) +# PetscErrorCode DMSwarmRegisterUserDatatypeField(PetscDM, const char[], size_t, PetscInt) + PetscErrorCode DMSwarmGetField(PetscDM, const char[], PetscInt*, PetscDataType*, void**) + PetscErrorCode DMSwarmRestoreField(PetscDM, const char[], PetscInt*, PetscDataType*, void**) - PetscErrorCode DMSwarmVectorDefineField(PetscDM,const char[]) + PetscErrorCode DMSwarmVectorDefineField(PetscDM, const char[]) PetscErrorCode DMSwarmAddPoint(PetscDM) - PetscErrorCode DMSwarmAddNPoints(PetscDM,PetscInt) + PetscErrorCode DMSwarmAddNPoints(PetscDM, PetscInt) PetscErrorCode DMSwarmRemovePoint(PetscDM) - PetscErrorCode DMSwarmRemovePointAtIndex(PetscDM,PetscInt) - PetscErrorCode DMSwarmCopyPoint(PetscDM,PetscInt,PetscInt) + PetscErrorCode DMSwarmRemovePointAtIndex(PetscDM, PetscInt) + PetscErrorCode DMSwarmCopyPoint(PetscDM, PetscInt, PetscInt) - PetscErrorCode DMSwarmGetLocalSize(PetscDM,PetscInt*) - PetscErrorCode DMSwarmGetSize(PetscDM,PetscInt*) - PetscErrorCode DMSwarmMigrate(PetscDM,PetscBool) + PetscErrorCode DMSwarmGetLocalSize(PetscDM, PetscInt*) + PetscErrorCode DMSwarmGetSize(PetscDM, PetscInt*) + PetscErrorCode DMSwarmMigrate(PetscDM, PetscBool) PetscErrorCode DMSwarmCollectViewCreate(PetscDM) PetscErrorCode DMSwarmCollectViewDestroy(PetscDM) - PetscErrorCode DMSwarmSetCellDM(PetscDM,PetscDM) - PetscErrorCode DMSwarmGetCellDM(PetscDM,PetscDM*) + PetscErrorCode DMSwarmSetCellDM(PetscDM, PetscDM) + PetscErrorCode DMSwarmGetCellDM(PetscDM, PetscDM*) PetscErrorCode DMSwarmSetType(PetscDM, PetscDMSwarmType) - PetscErrorCode DMSwarmSetPointsUniformCoordinates(PetscDM,PetscReal[],PetscReal[],PetscInt[],PetscInsertMode) - PetscErrorCode DMSwarmSetPointCoordinates(PetscDM,PetscInt,PetscReal*,PetscBool,PetscInsertMode) - PetscErrorCode DMSwarmInsertPointsUsingCellDM(PetscDM,PetscDMSwarmPICLayoutType,PetscInt) - PetscErrorCode DMSwarmSetPointCoordinatesCellwise(PetscDM,PetscInt,PetscReal*) - PetscErrorCode DMSwarmViewFieldsXDMF(PetscDM,const char*,PetscInt,const char**) - PetscErrorCode DMSwarmViewXDMF(PetscDM,const char*) + PetscErrorCode DMSwarmSetPointsUniformCoordinates(PetscDM, PetscReal[], PetscReal[], PetscInt[], PetscInsertMode) + PetscErrorCode DMSwarmSetPointCoordinates(PetscDM, PetscInt, PetscReal*, PetscBool, PetscInsertMode) + PetscErrorCode DMSwarmInsertPointsUsingCellDM(PetscDM, PetscDMSwarmPICLayoutType, PetscInt) + PetscErrorCode DMSwarmSetPointCoordinatesCellwise(PetscDM, PetscInt, PetscReal*) + PetscErrorCode DMSwarmViewFieldsXDMF(PetscDM, const char*, PetscInt, const char**) + PetscErrorCode DMSwarmViewXDMF(PetscDM, const char*) PetscErrorCode DMSwarmSortGetAccess(PetscDM) PetscErrorCode DMSwarmSortRestoreAccess(PetscDM) - PetscErrorCode DMSwarmSortGetPointsPerCell(PetscDM,PetscInt,PetscInt*,PetscInt**) - PetscErrorCode DMSwarmSortGetNumberOfPointsPerCell(PetscDM,PetscInt,PetscInt*) - PetscErrorCode DMSwarmSortGetIsValid(PetscDM,PetscBool*) - PetscErrorCode DMSwarmSortGetSizes(PetscDM,PetscInt*,PetscInt*) + PetscErrorCode DMSwarmSortGetPointsPerCell(PetscDM, PetscInt, PetscInt*, PetscInt**) + PetscErrorCode DMSwarmSortGetNumberOfPointsPerCell(PetscDM, PetscInt, PetscInt*) + PetscErrorCode DMSwarmSortGetIsValid(PetscDM, PetscBool*) + PetscErrorCode DMSwarmSortGetSizes(PetscDM, PetscInt*, PetscInt*) - PetscErrorCode DMSwarmProjectFields(PetscDM,PetscDM,PetscInt,const char**,PetscVec*,PetscScatterMode) + PetscErrorCode DMSwarmProjectFields(PetscDM, PetscDM, PetscInt, const char**, PetscVec*, PetscScatterMode) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmutils.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmutils.pxi index 72d78661c43..e1eb88b05f0 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdmutils.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdmutils.pxi @@ -15,8 +15,3 @@ cdef extern from * nogil: PetscErrorCode DMInterpolationSetDim(PetscDMInterpolation, PetscInt) PetscErrorCode DMInterpolationSetDof(PetscDMInterpolation, PetscInt) PetscErrorCode DMInterpolationSetUp(PetscDMInterpolation, PetscDM, PetscBool, PetscBool) - - - - - diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscds.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscds.pxi index 00ed4e64246..a6ea913e326 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscds.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscds.pxi @@ -3,32 +3,32 @@ cdef extern from * nogil: ctypedef const char* PetscDSType PetscDSType PETSCDSBASIC - PetscErrorCode PetscDSCreate(MPI_Comm,PetscDS*) + PetscErrorCode PetscDSCreate(MPI_Comm, PetscDS*) PetscErrorCode PetscDSDestroy(PetscDS*) - PetscErrorCode PetscDSView(PetscDS,PetscViewer) - PetscErrorCode PetscDSSetType(PetscDS,PetscDSType) - PetscErrorCode PetscDSGetType(PetscDS,PetscDSType*) + PetscErrorCode PetscDSView(PetscDS, PetscViewer) + PetscErrorCode PetscDSSetType(PetscDS, PetscDSType) + PetscErrorCode PetscDSGetType(PetscDS, PetscDSType*) PetscErrorCode PetscDSSetFromOptions(PetscDS) PetscErrorCode PetscDSSetUp(PetscDS) - PetscErrorCode PetscDSGetHeightSubspace(PetscDS,PetscInt,PetscDS*) - PetscErrorCode PetscDSGetSpatialDimension(PetscDS,PetscInt*) - PetscErrorCode PetscDSGetCoordinateDimension(PetscDS,PetscInt*) - PetscErrorCode PetscDSSetCoordinateDimension(PetscDS,PetscInt) - PetscErrorCode PetscDSGetNumFields(PetscDS,PetscInt*) - PetscErrorCode PetscDSGetTotalDimension(PetscDS,PetscInt*) - PetscErrorCode PetscDSGetTotalComponents(PetscDS,PetscInt*) - PetscErrorCode PetscDSGetFieldIndex(PetscDS,PetscObject,PetscInt*) - PetscErrorCode PetscDSGetFieldSize(PetscDS,PetscInt,PetscInt*) - PetscErrorCode PetscDSGetFieldOffset(PetscDS,PetscInt,PetscInt*) - PetscErrorCode PetscDSGetDimensions(PetscDS,PetscInt*[]) - PetscErrorCode PetscDSGetComponents(PetscDS,PetscInt*[]) - PetscErrorCode PetscDSGetComponentOffset(PetscDS,PetscInt,PetscInt*) - PetscErrorCode PetscDSGetComponentOffsets(PetscDS,PetscInt*[]) - PetscErrorCode PetscDSGetComponentDerivativeOffsets(PetscDS,PetscInt*[]) + PetscErrorCode PetscDSGetHeightSubspace(PetscDS, PetscInt, PetscDS*) + PetscErrorCode PetscDSGetSpatialDimension(PetscDS, PetscInt*) + PetscErrorCode PetscDSGetCoordinateDimension(PetscDS, PetscInt*) + PetscErrorCode PetscDSSetCoordinateDimension(PetscDS, PetscInt) + PetscErrorCode PetscDSGetNumFields(PetscDS, PetscInt*) + PetscErrorCode PetscDSGetTotalDimension(PetscDS, PetscInt*) + PetscErrorCode PetscDSGetTotalComponents(PetscDS, PetscInt*) + PetscErrorCode PetscDSGetFieldIndex(PetscDS, PetscObject, PetscInt*) + PetscErrorCode PetscDSGetFieldSize(PetscDS, PetscInt, PetscInt*) + PetscErrorCode PetscDSGetFieldOffset(PetscDS, PetscInt, PetscInt*) + PetscErrorCode PetscDSGetDimensions(PetscDS, PetscInt*[]) + PetscErrorCode PetscDSGetComponents(PetscDS, PetscInt*[]) + PetscErrorCode PetscDSGetComponentOffset(PetscDS, PetscInt, PetscInt*) + PetscErrorCode PetscDSGetComponentOffsets(PetscDS, PetscInt*[]) + PetscErrorCode PetscDSGetComponentDerivativeOffsets(PetscDS, PetscInt*[]) - PetscErrorCode PetscDSGetDiscretization(PetscDS,PetscInt,PetscObject*) - PetscErrorCode PetscDSSetDiscretization(PetscDS,PetscInt,PetscObject) - PetscErrorCode PetscDSAddDiscretization(PetscDS,PetscObject) - PetscErrorCode PetscDSGetImplicit(PetscDS,PetscInt,PetscBool*) - PetscErrorCode PetscDSSetImplicit(PetscDS,PetscInt,PetscBool) + PetscErrorCode PetscDSGetDiscretization(PetscDS, PetscInt, PetscObject*) + PetscErrorCode PetscDSSetDiscretization(PetscDS, PetscInt, PetscObject) + PetscErrorCode PetscDSAddDiscretization(PetscDS, PetscObject) + PetscErrorCode PetscDSGetImplicit(PetscDS, PetscInt, PetscBool*) + PetscErrorCode PetscDSSetImplicit(PetscDS, PetscInt, PetscBool) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscdt.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscdt.pxi index d88d30a3d43..9cb0db8a6d0 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscdt.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscdt.pxi @@ -16,18 +16,17 @@ cdef extern from * nogil: PetscErrorCode PetscQuadratureGetData(PetscQuadrature, PetscInt*, PetscInt*, PetscInt*, const PetscReal *[], const PetscReal *[]) PetscErrorCode PetscQuadratureSetData(PetscQuadrature, PetscInt, PetscInt, PetscInt, const PetscReal [], const PetscReal []) - PetscErrorCode PetscQuadratureView(PetscQuadrature, PetscViewer) PetscErrorCode PetscQuadratureDestroy(PetscQuadrature *) PetscErrorCode PetscQuadratureExpandComposite(PetscQuadrature, PetscInt, const PetscReal[], const PetscReal[], PetscQuadrature *) - PetscErrorCode PetscDTLegendreEval(PetscInt,const PetscReal*,PetscInt,const PetscInt*,PetscReal*,PetscReal*,PetscReal*) - PetscErrorCode PetscDTGaussQuadrature(PetscInt,PetscReal,PetscReal,PetscReal*,PetscReal*) - PetscErrorCode PetscDTGaussLobattoLegendreQuadrature(PetscInt,PetscGaussLobattoLegendreCreateType,PetscReal*,PetscReal*) - PetscErrorCode PetscDTReconstructPoly(PetscInt,PetscInt,const PetscReal*,PetscInt,const PetscReal*,PetscReal*) - PetscErrorCode PetscDTGaussTensorQuadrature(PetscInt,PetscInt,PetscInt,PetscReal,PetscReal,PetscQuadrature*) - PetscErrorCode PetscDTGaussJacobiQuadrature(PetscInt,PetscInt,PetscInt,PetscReal,PetscReal,PetscQuadrature*) + PetscErrorCode PetscDTLegendreEval(PetscInt, const PetscReal*, PetscInt, const PetscInt*, PetscReal*, PetscReal*, PetscReal*) + PetscErrorCode PetscDTGaussQuadrature(PetscInt, PetscReal, PetscReal, PetscReal*, PetscReal*) + PetscErrorCode PetscDTGaussLobattoLegendreQuadrature(PetscInt, PetscGaussLobattoLegendreCreateType, PetscReal*, PetscReal*) + PetscErrorCode PetscDTReconstructPoly(PetscInt, PetscInt, const PetscReal*, PetscInt, const PetscReal*, PetscReal*) + PetscErrorCode PetscDTGaussTensorQuadrature(PetscInt, PetscInt, PetscInt, PetscReal, PetscReal, PetscQuadrature*) + PetscErrorCode PetscDTGaussJacobiQuadrature(PetscInt, PetscInt, PetscInt, PetscReal, PetscReal, PetscQuadrature*) PetscErrorCode PetscDTTanhSinhTensorQuadrature(PetscInt, PetscInt, PetscReal, PetscReal, PetscQuadrature *) PetscErrorCode PetscDTTanhSinhIntegrate(void (*)(PetscReal *, void *, PetscReal *), PetscReal, PetscReal, PetscInt, PetscReal *) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscfe.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscfe.pxi index eb475765d0b..4793bc6bdb3 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscfe.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscfe.pxi @@ -31,5 +31,3 @@ cdef extern from * nogil: PetscErrorCode PetscFESetUp(PetscFE) PetscErrorCode PetscFEView(PetscFE, PetscViewer) - PetscErrorCode PetscFEViewFromOptions(PetscFE, PetscObject, char[]) - diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscis.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscis.pxi index 5b032215459..9da2ba88dd7 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscis.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscis.pxi @@ -5,57 +5,57 @@ cdef extern from * nogil: PetscISType ISSTRIDE PetscISType ISBLOCK - PetscErrorCode ISView(PetscIS,PetscViewer) + PetscErrorCode ISView(PetscIS, PetscViewer) PetscErrorCode ISDestroy(PetscIS*) - PetscErrorCode ISCreate(MPI_Comm,PetscIS*) - PetscErrorCode ISSetType(PetscIS,PetscISType) - PetscErrorCode ISGetType(PetscIS,PetscISType*) + PetscErrorCode ISCreate(MPI_Comm, PetscIS*) + PetscErrorCode ISSetType(PetscIS, PetscISType) + PetscErrorCode ISGetType(PetscIS, PetscISType*) - PetscErrorCode ISCreateGeneral(MPI_Comm,PetscInt,PetscInt[],PetscCopyMode,PetscIS*) - PetscErrorCode ISCreateBlock(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscCopyMode,PetscIS*) - PetscErrorCode ISCreateStride(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscIS*) + PetscErrorCode ISCreateGeneral(MPI_Comm, PetscInt, PetscInt[], PetscCopyMode, PetscIS*) + PetscErrorCode ISCreateBlock(MPI_Comm, PetscInt, PetscInt, PetscInt[], PetscCopyMode, PetscIS*) + PetscErrorCode ISCreateStride(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscIS*) - PetscErrorCode ISLoad(PetscIS,PetscViewer) - PetscErrorCode ISDuplicate(PetscIS,PetscIS*) - PetscErrorCode ISCopy(PetscIS,PetscIS) - PetscErrorCode ISAllGather(PetscIS,PetscIS*) - PetscErrorCode ISInvertPermutation(PetscIS,PetscInt,PetscIS*) + PetscErrorCode ISLoad(PetscIS, PetscViewer) + PetscErrorCode ISDuplicate(PetscIS, PetscIS*) + PetscErrorCode ISCopy(PetscIS, PetscIS) + PetscErrorCode ISAllGather(PetscIS, PetscIS*) + PetscErrorCode ISInvertPermutation(PetscIS, PetscInt, PetscIS*) - PetscErrorCode ISGetSize(PetscIS,PetscInt*) - PetscErrorCode ISGetLocalSize(PetscIS,PetscInt*) - PetscErrorCode ISGetBlockSize(PetscIS,PetscInt*) - PetscErrorCode ISSetBlockSize(PetscIS,PetscInt) - PetscErrorCode ISGetIndices(PetscIS,const PetscInt*[]) - PetscErrorCode ISRestoreIndices(PetscIS,const PetscInt*[]) + PetscErrorCode ISGetSize(PetscIS, PetscInt*) + PetscErrorCode ISGetLocalSize(PetscIS, PetscInt*) + PetscErrorCode ISGetBlockSize(PetscIS, PetscInt*) + PetscErrorCode ISSetBlockSize(PetscIS, PetscInt) + PetscErrorCode ISGetIndices(PetscIS, const PetscInt*[]) + PetscErrorCode ISRestoreIndices(PetscIS, const PetscInt*[]) - PetscErrorCode ISEqual(PetscIS,PetscIS,PetscBool*) + PetscErrorCode ISEqual(PetscIS, PetscIS, PetscBool*) PetscErrorCode ISSetPermutation(PetscIS) - PetscErrorCode ISPermutation(PetscIS,PetscBool*) + PetscErrorCode ISPermutation(PetscIS, PetscBool*) PetscErrorCode ISSetIdentity(PetscIS) - PetscErrorCode ISIdentity(PetscIS,PetscBool*) + PetscErrorCode ISIdentity(PetscIS, PetscBool*) PetscErrorCode ISSort(PetscIS) - PetscErrorCode ISSorted(PetscIS,PetscBool*) + PetscErrorCode ISSorted(PetscIS, PetscBool*) - PetscErrorCode ISSum(PetscIS,PetscIS,PetscIS*) - PetscErrorCode ISExpand(PetscIS,PetscIS,PetscIS*) - PetscErrorCode ISDifference(PetscIS,PetscIS,PetscIS*) - PetscErrorCode ISComplement(PetscIS,PetscInt,PetscInt,PetscIS*) - PetscErrorCode ISEmbed(PetscIS,PetscIS,PetscBool,PetscIS*) - PetscErrorCode ISRenumber(PetscIS,PetscIS,PetscInt*,PetscIS*) + PetscErrorCode ISSum(PetscIS, PetscIS, PetscIS*) + PetscErrorCode ISExpand(PetscIS, PetscIS, PetscIS*) + PetscErrorCode ISDifference(PetscIS, PetscIS, PetscIS*) + PetscErrorCode ISComplement(PetscIS, PetscInt, PetscInt, PetscIS*) + PetscErrorCode ISEmbed(PetscIS, PetscIS, PetscBool, PetscIS*) + PetscErrorCode ISRenumber(PetscIS, PetscIS, PetscInt*, PetscIS*) - PetscErrorCode ISGeneralSetIndices(PetscIS,PetscInt,PetscInt[],PetscCopyMode) + PetscErrorCode ISGeneralSetIndices(PetscIS, PetscInt, PetscInt[], PetscCopyMode) - PetscErrorCode ISBlockSetIndices(PetscIS,PetscInt,PetscInt,PetscInt[],PetscCopyMode) - PetscErrorCode ISBlockGetIndices(PetscIS,const PetscInt*[]) - PetscErrorCode ISBlockRestoreIndices(PetscIS,const PetscInt*[]) + PetscErrorCode ISBlockSetIndices(PetscIS, PetscInt, PetscInt, PetscInt[], PetscCopyMode) + PetscErrorCode ISBlockGetIndices(PetscIS, const PetscInt*[]) + PetscErrorCode ISBlockRestoreIndices(PetscIS, const PetscInt*[]) - PetscErrorCode ISStrideSetStride(PetscIS,PetscInt,PetscInt,PetscInt) - PetscErrorCode ISStrideGetInfo(PetscIS,PetscInt*,PetscInt*) + PetscErrorCode ISStrideSetStride(PetscIS, PetscInt, PetscInt, PetscInt) + PetscErrorCode ISStrideGetInfo(PetscIS, PetscInt*, PetscInt*) PetscErrorCode ISToGeneral(PetscIS) - PetscErrorCode ISBuildTwoSided(PetscIS,PetscIS,PetscIS*) + PetscErrorCode ISBuildTwoSided(PetscIS, PetscIS, PetscIS*) cdef extern from * nogil: @@ -68,28 +68,28 @@ cdef extern from * nogil: PETSC_IS_GTOLM_MASK "IS_GTOLM_MASK" PETSC_IS_GTOLM_DROP "IS_GTOLM_DROP" - PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscCopyMode,PetscLGMap*) - PetscErrorCode ISLocalToGlobalMappingCreateIS(PetscIS,PetscLGMap*) - PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF,PetscInt,PetscLGMap*) - PetscErrorCode ISLocalToGlobalMappingSetType(PetscLGMap,PetscISLocalToGlobalMappingType) + PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm, PetscInt, PetscInt, PetscInt[], PetscCopyMode, PetscLGMap*) + PetscErrorCode ISLocalToGlobalMappingCreateIS(PetscIS, PetscLGMap*) + PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF, PetscInt, PetscLGMap*) + PetscErrorCode ISLocalToGlobalMappingSetType(PetscLGMap, PetscISLocalToGlobalMappingType) PetscErrorCode ISLocalToGlobalMappingSetFromOptions(PetscLGMap) - PetscErrorCode ISLocalToGlobalMappingView(PetscLGMap,PetscViewer) + PetscErrorCode ISLocalToGlobalMappingView(PetscLGMap, PetscViewer) PetscErrorCode ISLocalToGlobalMappingDestroy(PetscLGMap*) - PetscErrorCode ISLocalToGlobalMappingGetSize(PetscLGMap,PetscInt*) - PetscErrorCode ISLocalToGlobalMappingGetBlockSize(PetscLGMap,PetscInt*) - PetscErrorCode ISLocalToGlobalMappingGetIndices(PetscLGMap,const PetscInt*[]) - PetscErrorCode ISLocalToGlobalMappingRestoreIndices(PetscLGMap,const PetscInt*[]) - PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(PetscLGMap,const PetscInt*[]) - PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(PetscLGMap,const PetscInt*[]) - PetscErrorCode ISLocalToGlobalMappingGetInfo(PetscLGMap,PetscInt*,PetscInt*[],PetscInt*[],PetscInt**[]) - PetscErrorCode ISLocalToGlobalMappingRestoreInfo(PetscLGMap,PetscInt*,PetscInt*[],PetscInt*[],PetscInt**[]) - PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(PetscLGMap,PetscInt*,PetscInt*[],PetscInt*[],PetscInt**[]) - PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(PetscLGMap,PetscInt*,PetscInt*[],PetscInt*[],PetscInt**[]) - PetscErrorCode ISLocalToGlobalMappingApply(PetscLGMap,PetscInt,PetscInt[],PetscInt[]) - PetscErrorCode ISLocalToGlobalMappingApplyBlock(PetscLGMap,PetscInt,PetscInt[],PetscInt[]) - PetscErrorCode ISLocalToGlobalMappingApplyIS(PetscLGMap,PetscIS,PetscIS*) - PetscErrorCode ISGlobalToLocalMappingApply(PetscLGMap,PetscGLMapMode,PetscInt,PetscInt[],PetscInt*,PetscInt[]) - PetscErrorCode ISGlobalToLocalMappingApplyBlock(PetscLGMap,PetscGLMapMode,PetscInt,PetscInt[],PetscInt*,PetscInt[]) + PetscErrorCode ISLocalToGlobalMappingGetSize(PetscLGMap, PetscInt*) + PetscErrorCode ISLocalToGlobalMappingGetBlockSize(PetscLGMap, PetscInt*) + PetscErrorCode ISLocalToGlobalMappingGetIndices(PetscLGMap, const PetscInt*[]) + PetscErrorCode ISLocalToGlobalMappingRestoreIndices(PetscLGMap, const PetscInt*[]) + PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(PetscLGMap, const PetscInt*[]) + PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(PetscLGMap, const PetscInt*[]) + PetscErrorCode ISLocalToGlobalMappingGetInfo(PetscLGMap, PetscInt*, PetscInt*[], PetscInt*[], PetscInt**[]) + PetscErrorCode ISLocalToGlobalMappingRestoreInfo(PetscLGMap, PetscInt*, PetscInt*[], PetscInt*[], PetscInt**[]) + PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(PetscLGMap, PetscInt*, PetscInt*[], PetscInt*[], PetscInt**[]) + PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(PetscLGMap, PetscInt*, PetscInt*[], PetscInt*[], PetscInt**[]) + PetscErrorCode ISLocalToGlobalMappingApply(PetscLGMap, PetscInt, PetscInt[], PetscInt[]) + PetscErrorCode ISLocalToGlobalMappingApplyBlock(PetscLGMap, PetscInt, PetscInt[], PetscInt[]) + PetscErrorCode ISLocalToGlobalMappingApplyIS(PetscLGMap, PetscIS, PetscIS*) + PetscErrorCode ISGlobalToLocalMappingApply(PetscLGMap, PetscGLMapMode, PetscInt, PetscInt[], PetscInt*, PetscInt[]) + PetscErrorCode ISGlobalToLocalMappingApplyBlock(PetscLGMap, PetscGLMapMode, PetscInt, PetscInt[], PetscInt*, PetscInt[]) # -------------------------------------------------------------------- @@ -97,13 +97,13 @@ cdef extern from * nogil: cdef inline IS ref_IS(PetscIS iset): cdef IS ob = IS() ob.iset = iset - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob cdef inline LGMap ref_LGMap(PetscLGMap lgm): cdef LGMap ob = LGMap() ob.lgm = lgm - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob # -------------------------------------------------------------------- @@ -117,7 +117,7 @@ cdef class _IS_buffer: def __cinit__(self, IS iset): cdef PetscIS i = iset.iset - CHKERR( PetscINCREF(&i) ) + CHKERR(PetscINCREF(&i)) self.iset = i self.size = 0 self.data = NULL @@ -125,22 +125,22 @@ cdef class _IS_buffer: def __dealloc__(self): if self.hasarray and self.iset != NULL: - CHKERR( ISRestoreIndices(self.iset, &self.data) ) - CHKERR( ISDestroy(&self.iset) ) + CHKERR(ISRestoreIndices(self.iset, &self.data)) + CHKERR(ISDestroy(&self.iset)) # cdef int acquire(self) except -1: if not self.hasarray and self.iset != NULL: - CHKERR( ISGetLocalSize(self.iset, &self.size) ) - CHKERR( ISGetIndices(self.iset, &self.data) ) + CHKERR(ISGetLocalSize(self.iset, &self.size)) + CHKERR(ISGetIndices(self.iset, &self.data)) self.hasarray = 1 return 0 cdef int release(self) except -1: if self.hasarray and self.iset != NULL: self.size = 0 - CHKERR( ISRestoreIndices(self.iset, &self.data) ) + CHKERR(ISRestoreIndices(self.iset, &self.data)) self.hasarray = 0 self.data = NULL return 0 @@ -190,7 +190,7 @@ cdef class _IS_buffer: p[0] = self.data n = self.size elif self.iset != NULL: - CHKERR( ISGetLocalSize(self.iset, &n) ) + CHKERR(ISGetLocalSize(self.iset, &n)) return (n*sizeof(PetscInt)) def __getsegcount__(self, Py_ssize_t *lenp): @@ -209,7 +209,7 @@ cdef class _IS_buffer: def __get__(self): cdef PetscInt n = 0 if self.iset != NULL: - CHKERR( ISGetLocalSize(self.iset, &n) ) + CHKERR(ISGetLocalSize(self.iset, &n)) cdef object size = toInt(n) cdef dtype descr = PyArray_DescrFromType(NPY_PETSC_INT) cdef str typestr = "=%c%d" % (descr.kind, descr.itemsize) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscksp.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscksp.pxi index 4863385dca6..ce43114e8d8 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscksp.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscksp.pxi @@ -94,136 +94,148 @@ cdef extern from * nogil: ctypedef PetscErrorCode (*PetscKSPCtxDel)(void*) ctypedef PetscErrorCode (*PetscKSPConvergedFunction)(PetscKSP, - PetscInt, - PetscReal, - PetscKSPConvergedReason*, - void*) except PETSC_ERR_PYTHON + PetscInt, + PetscReal, + PetscKSPConvergedReason*, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscKSPMonitorFunction)(PetscKSP, - PetscInt, - PetscReal, - void*) except PETSC_ERR_PYTHON + PetscInt, + PetscReal, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscKSPComputeRHSFunction)(PetscKSP, - PetscVec, - void*) except PETSC_ERR_PYTHON + PetscVec, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscKSPComputeOpsFunction)(PetscKSP, - PetscMat, - PetscMat, - void*) except PETSC_ERR_PYTHON + PetscMat, + PetscMat, + void*) except PETSC_ERR_PYTHON - PetscErrorCode KSPCreate(MPI_Comm,PetscKSP*) + ctypedef PetscErrorCode (*PetscKSPPreSolveFunction)(PetscKSP, + PetscVec, + PetscVec, + void*) except PETSC_ERR_PYTHON + + ctypedef PetscErrorCode (*PetscKSPPostSolveFunction)(PetscKSP, + PetscVec, + PetscVec, + void*) except PETSC_ERR_PYTHON + + PetscErrorCode KSPCreate(MPI_Comm, PetscKSP*) PetscErrorCode KSPDestroy(PetscKSP*) - PetscErrorCode KSPView(PetscKSP,PetscViewer) + PetscErrorCode KSPView(PetscKSP, PetscViewer) - PetscErrorCode KSPSetType(PetscKSP,PetscKSPType) - PetscErrorCode KSPGetType(PetscKSP,PetscKSPType*) + PetscErrorCode KSPSetType(PetscKSP, PetscKSPType) + PetscErrorCode KSPGetType(PetscKSP, PetscKSPType*) - PetscErrorCode KSPSetOptionsPrefix(PetscKSP,char[]) - PetscErrorCode KSPAppendOptionsPrefix(PetscKSP,char[]) - PetscErrorCode KSPGetOptionsPrefix(PetscKSP,char*[]) + PetscErrorCode KSPSetOptionsPrefix(PetscKSP, char[]) + PetscErrorCode KSPAppendOptionsPrefix(PetscKSP, char[]) + PetscErrorCode KSPGetOptionsPrefix(PetscKSP, char*[]) PetscErrorCode KSPSetFromOptions(PetscKSP) - PetscErrorCode KSPSetTolerances(PetscKSP,PetscReal,PetscReal,PetscReal,PetscInt) - PetscErrorCode KSPGetTolerances(PetscKSP,PetscReal*,PetscReal*,PetscReal*,PetscInt*) - PetscErrorCode KSPSetNormType(PetscKSP,PetscKSPNormType) - PetscErrorCode KSPGetNormType(PetscKSP,PetscKSPNormType*) - PetscErrorCode KSPSetPCSide(PetscKSP,PetscPCSide) - PetscErrorCode KSPGetPCSide(PetscKSP,PetscPCSide*) - PetscErrorCode KSPSetSupportedNorm(PetscKSP,PetscKSPNormType,PetscPCSide,PetscInt) - - PetscErrorCode KSPSetConvergenceTest(PetscKSP,PetscKSPConvergedFunction,void*,PetscKSPCtxDel) - PetscErrorCode KSPSetResidualHistory(PetscKSP,PetscReal[],PetscInt,PetscBool) - PetscErrorCode KSPGetResidualHistory(PetscKSP,PetscReal*[],PetscInt*) - PetscErrorCode KSPLogResidualHistory(PetscKSP,PetscReal) + PetscErrorCode KSPSetTolerances(PetscKSP, PetscReal, PetscReal, PetscReal, PetscInt) + PetscErrorCode KSPGetTolerances(PetscKSP, PetscReal*, PetscReal*, PetscReal*, PetscInt*) + PetscErrorCode KSPSetNormType(PetscKSP, PetscKSPNormType) + PetscErrorCode KSPGetNormType(PetscKSP, PetscKSPNormType*) + PetscErrorCode KSPSetPCSide(PetscKSP, PetscPCSide) + PetscErrorCode KSPGetPCSide(PetscKSP, PetscPCSide*) + PetscErrorCode KSPSetSupportedNorm(PetscKSP, PetscKSPNormType, PetscPCSide, PetscInt) + + PetscErrorCode KSPSetConvergenceTest(PetscKSP, PetscKSPConvergedFunction, void*, PetscKSPCtxDel) + PetscErrorCode KSPSetResidualHistory(PetscKSP, PetscReal[], PetscInt, PetscBool) + PetscErrorCode KSPGetResidualHistory(PetscKSP, PetscReal*[], PetscInt*) + PetscErrorCode KSPLogResidualHistory(PetscKSP, PetscReal) PetscErrorCode KSPConvergedDefaultCreate(void**) PetscErrorCode KSPConvergedDefaultDestroy(void*) - PetscErrorCode KSPConvergedDefault(PetscKSP,PetscInt,PetscReal,PetscKSPConvergedReason*,void*) except PETSC_ERR_PYTHON - PetscErrorCode KSPLSQRConvergedDefault(PetscKSP,PetscInt,PetscReal,PetscKSPConvergedReason*,void*) except PETSC_ERR_PYTHON - PetscErrorCode KSPConvergedSkip(PetscKSP,PetscInt,PetscReal,PetscKSPConvergedReason*,void*) except PETSC_ERR_PYTHON + PetscErrorCode KSPConvergedDefault(PetscKSP, PetscInt, PetscReal, PetscKSPConvergedReason*, void*) except PETSC_ERR_PYTHON + PetscErrorCode KSPLSQRConvergedDefault(PetscKSP, PetscInt, PetscReal, PetscKSPConvergedReason*, void*) except PETSC_ERR_PYTHON + PetscErrorCode KSPConvergedSkip(PetscKSP, PetscInt, PetscReal, PetscKSPConvergedReason*, void*) except PETSC_ERR_PYTHON - PetscErrorCode KSPMonitorSet(PetscKSP,PetscKSPMonitorFunction,void*,PetscKSPCtxDel) + PetscErrorCode KSPMonitorSet(PetscKSP, PetscKSPMonitorFunction, void*, PetscKSPCtxDel) PetscErrorCode KSPMonitorCancel(PetscKSP) - PetscErrorCode KSPMonitor(PetscKSP,PetscInt,PetscReal) + PetscErrorCode KSPMonitor(PetscKSP, PetscInt, PetscReal) - PetscErrorCode KSPSetInitialGuessNonzero(PetscKSP,PetscBool) - PetscErrorCode KSPGetInitialGuessNonzero(PetscKSP,PetscBool*) - PetscErrorCode KSPSetInitialGuessKnoll(PetscKSP,PetscBool) - PetscErrorCode KSPGetInitialGuessKnoll(PetscKSP,PetscBool*) - PetscErrorCode KSPSetUseFischerGuess(PetscKSP,PetscInt,PetscInt) + PetscErrorCode KSPSetInitialGuessNonzero(PetscKSP, PetscBool) + PetscErrorCode KSPGetInitialGuessNonzero(PetscKSP, PetscBool*) + PetscErrorCode KSPSetInitialGuessKnoll(PetscKSP, PetscBool) + PetscErrorCode KSPGetInitialGuessKnoll(PetscKSP, PetscBool*) + PetscErrorCode KSPSetUseFischerGuess(PetscKSP, PetscInt, PetscInt) - PetscErrorCode KSPGetComputeEigenvalues(PetscKSP,PetscBool*) - PetscErrorCode KSPSetComputeEigenvalues(PetscKSP,PetscBool) - PetscErrorCode KSPGetComputeSingularValues(PetscKSP,PetscBool*) - PetscErrorCode KSPSetComputeSingularValues(PetscKSP,PetscBool) + PetscErrorCode KSPGetComputeEigenvalues(PetscKSP, PetscBool*) + PetscErrorCode KSPSetComputeEigenvalues(PetscKSP, PetscBool) + PetscErrorCode KSPGetComputeSingularValues(PetscKSP, PetscBool*) + PetscErrorCode KSPSetComputeSingularValues(PetscKSP, PetscBool) - PetscErrorCode KSPSetComputeRHS(PetscKSP,PetscKSPComputeRHSFunction,void*) - PetscErrorCode KSPSetComputeOperators(PetscKSP,PetscKSPComputeOpsFunction,void*) - PetscErrorCode KSPSetOperators(PetscKSP,PetscMat,PetscMat) - PetscErrorCode KSPGetOperators(PetscKSP,PetscMat*,PetscMat*) - PetscErrorCode KSPGetOperatorsSet(PetscKSP,PetscBool*,PetscBool*) + PetscErrorCode KSPSetComputeRHS(PetscKSP, PetscKSPComputeRHSFunction, void*) + PetscErrorCode KSPSetComputeOperators(PetscKSP, PetscKSPComputeOpsFunction, void*) + PetscErrorCode KSPSetOperators(PetscKSP, PetscMat, PetscMat) + PetscErrorCode KSPGetOperators(PetscKSP, PetscMat*, PetscMat*) + PetscErrorCode KSPGetOperatorsSet(PetscKSP, PetscBool*, PetscBool*) - PetscErrorCode KSPSetPC(PetscKSP,PetscPC) - PetscErrorCode KSPGetPC(PetscKSP,PetscPC*) + PetscErrorCode KSPSetPC(PetscKSP, PetscPC) + PetscErrorCode KSPGetPC(PetscKSP, PetscPC*) - PetscErrorCode KSPGetDM(PetscKSP,PetscDM*) - PetscErrorCode KSPSetDM(PetscKSP,PetscDM) - PetscErrorCode KSPSetDMActive(PetscKSP,PetscBool) + PetscErrorCode KSPGetDM(PetscKSP, PetscDM*) + PetscErrorCode KSPSetDM(PetscKSP, PetscDM) + PetscErrorCode KSPSetDMActive(PetscKSP, PetscBool) PetscErrorCode KSPSetUp(PetscKSP) PetscErrorCode KSPReset(PetscKSP) PetscErrorCode KSPSetUpOnBlocks(PetscKSP) - PetscErrorCode KSPSolve(PetscKSP,PetscVec,PetscVec) - PetscErrorCode KSPSolveTranspose(PetscKSP,PetscVec,PetscVec) - PetscErrorCode KSPMatSolve(PetscKSP,PetscMat,PetscMat) - PetscErrorCode KSPMatSolveTranspose(PetscKSP,PetscMat,PetscMat) + PetscErrorCode KSPSetPreSolve(PetscKSP, PetscKSPPreSolveFunction, void*) + PetscErrorCode KSPSetPostSolve(PetscKSP, PetscKSPPostSolveFunction, void*) + PetscErrorCode KSPSolve(PetscKSP, PetscVec, PetscVec) + PetscErrorCode KSPSolveTranspose(PetscKSP, PetscVec, PetscVec) + PetscErrorCode KSPMatSolve(PetscKSP, PetscMat, PetscMat) + PetscErrorCode KSPMatSolveTranspose(PetscKSP, PetscMat, PetscMat) - PetscErrorCode KSPGetRhs(PetscKSP,PetscVec*) - PetscErrorCode KSPGetSolution(PetscKSP,PetscVec*) - PetscErrorCode KSPGetConvergedReason(PetscKSP,PetscKSPConvergedReason*) - PetscErrorCode KSPGetIterationNumber(PetscKSP,PetscInt*) - PetscErrorCode KSPGetResidualNorm(PetscKSP,PetscReal*) - PetscErrorCode KSPSetErrorIfNotConverged(PetscKSP,PetscBool); - PetscErrorCode KSPGetErrorIfNotConverged(PetscKSP,PetscBool*); + PetscErrorCode KSPGetRhs(PetscKSP, PetscVec*) + PetscErrorCode KSPGetSolution(PetscKSP, PetscVec*) + PetscErrorCode KSPGetConvergedReason(PetscKSP, PetscKSPConvergedReason*) + PetscErrorCode KSPGetIterationNumber(PetscKSP, PetscInt*) + PetscErrorCode KSPGetResidualNorm(PetscKSP, PetscReal*) + PetscErrorCode KSPSetErrorIfNotConverged(PetscKSP, PetscBool) + PetscErrorCode KSPGetErrorIfNotConverged(PetscKSP, PetscBool*) - PetscErrorCode KSPBuildSolution(PetscKSP,PetscVec,PetscVec*) - PetscErrorCode KSPBuildSolutionDefault(PetscKSP,PetscVec,PetscVec*) - PetscErrorCode KSPBuildResidual(PetscKSP,PetscVec,PetscVec,PetscVec*) - PetscErrorCode KSPBuildResidualDefault(PetscKSP,PetscVec,PetscVec,PetscVec*) + PetscErrorCode KSPBuildSolution(PetscKSP, PetscVec, PetscVec*) + PetscErrorCode KSPBuildSolutionDefault(PetscKSP, PetscVec, PetscVec*) + PetscErrorCode KSPBuildResidual(PetscKSP, PetscVec, PetscVec, PetscVec*) + PetscErrorCode KSPBuildResidualDefault(PetscKSP, PetscVec, PetscVec, PetscVec*) - PetscErrorCode KSPSetDiagonalScale(PetscKSP,PetscBool) - PetscErrorCode KSPGetDiagonalScale(PetscKSP,PetscBool*) - PetscErrorCode KSPSetDiagonalScaleFix(PetscKSP,PetscBool) - PetscErrorCode KSPGetDiagonalScaleFix(PetscKSP,PetscBool*) + PetscErrorCode KSPSetDiagonalScale(PetscKSP, PetscBool) + PetscErrorCode KSPGetDiagonalScale(PetscKSP, PetscBool*) + PetscErrorCode KSPSetDiagonalScaleFix(PetscKSP, PetscBool) + PetscErrorCode KSPGetDiagonalScaleFix(PetscKSP, PetscBool*) - PetscErrorCode KSPComputeExplicitOperator(PetscKSP,PetscMat*) - PetscErrorCode KSPComputeEigenvalues(PetscKSP,PetscInt,PetscReal[],PetscReal[],PetscInt*) - PetscErrorCode KSPComputeExtremeSingularValues(PetscKSP,PetscReal*,PetscReal*) + PetscErrorCode KSPComputeExplicitOperator(PetscKSP, PetscMat*) + PetscErrorCode KSPComputeEigenvalues(PetscKSP, PetscInt, PetscReal[], PetscReal[], PetscInt*) + PetscErrorCode KSPComputeExtremeSingularValues(PetscKSP, PetscReal*, PetscReal*) - PetscErrorCode KSPCreateVecs(PetscKSP,PetscInt,PetscVec**,PetscInt,PetscVec**) + PetscErrorCode KSPCreateVecs(PetscKSP, PetscInt, PetscVec**, PetscInt, PetscVec**) - PetscErrorCode KSPGMRESSetRestart(PetscKSP,PetscInt) + PetscErrorCode KSPGMRESSetRestart(PetscKSP, PetscInt) - PetscErrorCode KSPPythonSetType(PetscKSP,char[]) - PetscErrorCode KSPPythonGetType(PetscKSP,char*[]) + PetscErrorCode KSPPythonSetType(PetscKSP, char[]) + PetscErrorCode KSPPythonGetType(PetscKSP, char*[]) - PetscErrorCode KSPHPDDMSetType(PetscKSP,PetscKSPHPDDMType) - PetscErrorCode KSPHPDDMGetType(PetscKSP,PetscKSPHPDDMType*) + PetscErrorCode KSPHPDDMSetType(PetscKSP, PetscKSPHPDDMType) + PetscErrorCode KSPHPDDMGetType(PetscKSP, PetscKSPHPDDMType*) cdef extern from * nogil: # custom.h - PetscErrorCode KSPSetIterationNumber(PetscKSP,PetscInt) - PetscErrorCode KSPSetResidualNorm(PetscKSP,PetscReal) - PetscErrorCode KSPConvergenceTestCall(PetscKSP,PetscInt,PetscReal,PetscKSPConvergedReason*) - PetscErrorCode KSPSetConvergedReason(PetscKSP,PetscKSPConvergedReason) - PetscErrorCode KSPAddConvergenceTest(PetscKSP,PetscKSPConvergedFunction,PetscBool) + PetscErrorCode KSPSetIterationNumber(PetscKSP, PetscInt) + PetscErrorCode KSPSetResidualNorm(PetscKSP, PetscReal) + PetscErrorCode KSPConvergenceTestCall(PetscKSP, PetscInt, PetscReal, PetscKSPConvergedReason*) + PetscErrorCode KSPSetConvergedReason(PetscKSP, PetscKSPConvergedReason) + PetscErrorCode KSPAddConvergenceTest(PetscKSP, PetscKSPConvergedFunction, PetscBool) # ----------------------------------------------------------------------------- cdef inline KSP ref_KSP(PetscKSP ksp): cdef KSP ob = KSP() ob.ksp = ksp - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob # ----------------------------------------------------------------------------- @@ -233,8 +245,8 @@ cdef PetscErrorCode KSP_Converged( PetscInt its, PetscReal rnm, PetscKSPConvergedReason *r, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef KSP Ksp = ref_KSP(ksp) (converged, args, kargs) = Ksp.get_attr('__converged__') reason = converged(Ksp, toInt(its), toReal(rnm), *args, **kargs) @@ -250,8 +262,8 @@ cdef PetscErrorCode KSP_Monitor( PetscKSP ksp, PetscInt its, PetscReal rnm, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef KSP Ksp = ref_KSP(ksp) cdef object monitorlist = Ksp.get_attr('__monitor__') if monitorlist is None: return PETSC_SUCCESS @@ -264,8 +276,8 @@ cdef PetscErrorCode KSP_Monitor( cdef PetscErrorCode KSP_ComputeRHS( PetscKSP ksp, PetscVec rhs, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef KSP Ksp = ref_KSP(ksp) cdef Vec Rhs = ref_Vec(rhs) cdef object context = Ksp.get_attr('__rhs__') @@ -279,8 +291,8 @@ cdef PetscErrorCode KSP_ComputeOps( PetscKSP ksp, PetscMat A, PetscMat B, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef KSP Ksp = ref_KSP(ksp) cdef Mat Amat = ref_Mat(A) cdef Mat Bmat = ref_Mat(B) @@ -292,3 +304,37 @@ cdef PetscErrorCode KSP_ComputeOps( return PETSC_SUCCESS # ----------------------------------------------------------------------------- + +cdef PetscErrorCode KSP_PreSolve( + PetscKSP ksp, + PetscVec rhs, + PetscVec x, + void* ctx, + ) except PETSC_ERR_PYTHON with gil: + cdef KSP Ksp = ref_KSP(ksp) + cdef Vec Rhs = ref_Vec(rhs) + cdef Vec X = ref_Vec(x) + cdef object context = Ksp.get_attr('__presolve__') + if context is None and ctx != NULL: context = ctx + assert context is not None and type(context) is tuple + (presolve, args, kargs) = context + presolve(Ksp, Rhs, X, *args, **kargs) + return PETSC_SUCCESS + +cdef PetscErrorCode KSP_PostSolve( + PetscKSP ksp, + PetscVec rhs, + PetscVec x, + void* ctx, + ) except PETSC_ERR_PYTHON with gil: + cdef KSP Ksp = ref_KSP(ksp) + cdef Vec Rhs = ref_Vec(rhs) + cdef Vec X = ref_Vec(x) + cdef object context = Ksp.get_attr('__postsolve__') + if context is None and ctx != NULL: context = ctx + assert context is not None and type(context) is tuple + (postsolve, args, kargs) = context + postsolve(Ksp, Rhs, X, *args, **kargs) + return PETSC_SUCCESS + +# ----------------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petsclayout.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petsclayout.pxi index ff2519635e2..1398c49c3c3 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petsclayout.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petsclayout.pxi @@ -4,10 +4,10 @@ cdef extern from * nogil: struct _n_PetscLayout ctypedef _n_PetscLayout* PetscLayout - PetscErrorCode PetscLayoutSetLocalSize(PetscLayout,PetscInt) - PetscErrorCode PetscLayoutSetSize(PetscLayout,PetscInt) - PetscErrorCode PetscLayoutGetBlockSize(PetscLayout,PetscInt*) - PetscErrorCode PetscLayoutSetBlockSize(PetscLayout,PetscInt) + PetscErrorCode PetscLayoutSetLocalSize(PetscLayout, PetscInt) + PetscErrorCode PetscLayoutSetSize(PetscLayout, PetscInt) + PetscErrorCode PetscLayoutGetBlockSize(PetscLayout, PetscInt*) + PetscErrorCode PetscLayoutSetBlockSize(PetscLayout, PetscInt) PetscErrorCode PetscLayoutSetUp(PetscLayout) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petsclog.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petsclog.pxi index 3a18329a2b2..3f6cde87744 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petsclog.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petsclog.pxi @@ -23,46 +23,46 @@ cdef extern from * nogil: PetscErrorCode PetscTimeAdd(PetscLogDouble*) ctypedef int PetscLogStage - PetscErrorCode PetscLogStageRegister(char[],PetscLogStage*) + PetscErrorCode PetscLogStageRegister(char[], PetscLogStage*) PetscErrorCode PetscLogStagePush(PetscLogStage) PetscErrorCode PetscLogStagePop() - PetscErrorCode PetscLogStageSetActive(PetscLogStage,PetscBool) - PetscErrorCode PetscLogStageGetActive(PetscLogStage,PetscBool*) - PetscErrorCode PetscLogStageSetVisible(PetscLogStage,PetscBool) - PetscErrorCode PetscLogStageGetVisible(PetscLogStage,PetscBool*) - PetscErrorCode PetscLogStageGetId(char[],PetscLogStage*) + PetscErrorCode PetscLogStageSetActive(PetscLogStage, PetscBool) + PetscErrorCode PetscLogStageGetActive(PetscLogStage, PetscBool*) + PetscErrorCode PetscLogStageSetVisible(PetscLogStage, PetscBool) + PetscErrorCode PetscLogStageGetVisible(PetscLogStage, PetscBool*) + PetscErrorCode PetscLogStageGetId(char[], PetscLogStage*) ctypedef int PetscLogClass "PetscClassId" - PetscErrorCode PetscLogClassRegister"PetscClassIdRegister"(char[],PetscLogClass*) + PetscErrorCode PetscLogClassRegister"PetscClassIdRegister"(char[], PetscLogClass*) PetscErrorCode PetscLogClassActivate"PetscLogEventActivateClass"(PetscLogClass) PetscErrorCode PetscLogClassDeactivate"PetscLogEventDeactivateClass"(PetscLogClass) ctypedef int PetscLogEvent - PetscErrorCode PetscLogEventRegister(char[],PetscLogClass,PetscLogEvent*) - PetscErrorCode PetscLogEventBegin(PetscLogEvent,PetscObject,PetscObject,PetscObject,PetscObject) - PetscErrorCode PetscLogEventEnd(PetscLogEvent,PetscObject,PetscObject,PetscObject,PetscObject) + PetscErrorCode PetscLogEventRegister(char[], PetscLogClass, PetscLogEvent*) + PetscErrorCode PetscLogEventBegin(PetscLogEvent, PetscObject, PetscObject, PetscObject, PetscObject) + PetscErrorCode PetscLogEventEnd(PetscLogEvent, PetscObject, PetscObject, PetscObject, PetscObject) PetscErrorCode PetscLogEventActivate(PetscLogEvent) PetscErrorCode PetscLogEventDeactivate(PetscLogEvent) - PetscErrorCode PetscLogEventSetActiveAll(PetscLogEvent,PetscBool) - PetscErrorCode PetscLogEventGetPerfInfo(PetscLogStage,PetscLogEvent,PetscEventPerfInfo*) + PetscErrorCode PetscLogEventSetActiveAll(PetscLogEvent, PetscBool) + PetscErrorCode PetscLogEventGetPerfInfo(PetscLogStage, PetscLogEvent, PetscEventPerfInfo*) cdef extern from * nogil: # custom.h - PetscErrorCode PetscLogStageFindId(char[],PetscLogStage*) - PetscErrorCode PetscLogClassFindId(char[],PetscLogClass*) - PetscErrorCode PetscLogEventFindId(char[],PetscLogEvent*) - PetscErrorCode PetscLogStageFindName(PetscLogStage,char*[]) - PetscErrorCode PetscLogClassFindName(PetscLogClass,char*[]) - PetscErrorCode PetscLogEventFindName(PetscLogEvent,char*[]) + PetscErrorCode PetscLogStageFindId(char[], PetscLogStage*) + PetscErrorCode PetscLogClassFindId(char[], PetscLogClass*) + PetscErrorCode PetscLogEventFindId(char[], PetscLogEvent*) + PetscErrorCode PetscLogStageFindName(PetscLogStage, char*[]) + PetscErrorCode PetscLogClassFindName(PetscLogClass, char*[]) + PetscErrorCode PetscLogEventFindName(PetscLogEvent, char*[]) cdef inline int event_args2objs(object args, PetscObject o[4]) except -1: - o[0] = o[1] = o[2] = o[3] = NULL - cdef Py_ssize_t i=0, n = len(args) - cdef Object tmp = None - if n > 4: n = 4 - for 0 <= i < n: - tmp = args[i] - if tmp is not None: - o[i] = tmp.obj[0] - return 0 + o[0] = o[1] = o[2] = o[3] = NULL + cdef Py_ssize_t i=0, n = len(args) + cdef Object tmp = None + if n > 4: n = 4 + for 0 <= i < n: + tmp = args[i] + if tmp is not None: + o[i] = tmp.obj[0] + return 0 diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscmat.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscmat.pxi index 04845bd8daa..4ab2ca8ae20 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscmat.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscmat.pxi @@ -75,7 +75,10 @@ cdef extern from * nogil: PetscMatType MATDUMMY PetscMatType MATLMVM PetscMatType MATLMVMDFP + PetscMatType MATLMVMDDFP PetscMatType MATLMVMBFGS + PetscMatType MATLMVMDBFGS + PetscMatType MATLMVMDQN PetscMatType MATLMVMSR1 PetscMatType MATLMVMBROYDEN PetscMatType MATLMVMBADBROYDEN @@ -199,218 +202,219 @@ cdef extern from * nogil: ctypedef enum PetscMatOperation "MatOperation": pass - PetscErrorCode MatView(PetscMat,PetscViewer) + PetscErrorCode MatView(PetscMat, PetscViewer) PetscErrorCode MatDestroy(PetscMat*) - PetscErrorCode MatCreate(MPI_Comm,PetscMat*) - PetscErrorCode MatCreateDenseCUDA(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],PetscMat*) - - PetscErrorCode MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscLGMap,PetscLGMap,PetscMat*) - PetscErrorCode MatISGetLocalMat(PetscMat,PetscMat*) - - PetscErrorCode MatCreateScatter(MPI_Comm,PetscScatter,PetscMat*) - PetscErrorCode MatScatterSetVecScatter(PetscMat,PetscScatter) - PetscErrorCode MatScatterGetVecScatter(PetscMat,PetscScatter*) - - PetscErrorCode MatCreateNormal(PetscMat,PetscMat*) - PetscErrorCode MatCreateTranspose(PetscMat,PetscMat*) - PetscErrorCode MatCreateNormalHermitian(PetscMat,PetscMat*) - PetscErrorCode MatCreateHermitianTranspose(PetscMat,PetscMat*) - PetscErrorCode MatCreateLRC(PetscMat,PetscMat,PetscVec,PetscMat,PetscMat*) - PetscErrorCode MatCreateSubMatrixVirtual(PetscMat,PetscIS,PetscIS,PetscMat*) - PetscErrorCode MatCreateRedundantMatrix(PetscMat,PetscInt,MPI_Comm,PetscMatReuse,PetscMat*) - PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,PetscIS[],PetscInt,PetscIS[],PetscMat[],PetscMat*) - PetscErrorCode MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void*,PetscMat*) - PetscErrorCode MatCreateH2OpusFromMat(PetscMat,PetscInt,const PetscReal[],PetscBool,PetscReal,PetscInt,PetscInt,PetscInt,PetscReal,PetscMat*) - PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscMat*) - PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscMat*) - PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],PetscMat*) - PetscErrorCode MatCreateDiagonal(PetscVec,PetscMat*) - PetscErrorCode MatCreateConstantDiagonal(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar,PetscMat*) - - PetscErrorCode MatSetSizes(PetscMat,PetscInt,PetscInt,PetscInt,PetscInt) - PetscErrorCode MatSetBlockSize(PetscMat,PetscInt) - PetscErrorCode MatSetBlockSizes(PetscMat,PetscInt,PetscInt) - PetscErrorCode MatSetType(PetscMat,PetscMatType) - PetscErrorCode MatSetVecType(PetscMat,PetscVecType) - PetscErrorCode MatGetVecType(PetscMat,PetscVecType*) - PetscErrorCode MatSetOption(PetscMat,PetscMatOption,PetscBool) - PetscErrorCode MatGetOption(PetscMat,PetscMatOption,PetscBool*) + PetscErrorCode MatCreate(MPI_Comm, PetscMat*) + PetscErrorCode MatCreateDenseCUDA(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscScalar[], PetscMat*) + + PetscErrorCode MatCreateIS(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt, PetscLGMap, PetscLGMap, PetscMat*) + PetscErrorCode MatISGetLocalMat(PetscMat, PetscMat*) + + PetscErrorCode MatCreateScatter(MPI_Comm, PetscScatter, PetscMat*) + PetscErrorCode MatScatterSetVecScatter(PetscMat, PetscScatter) + PetscErrorCode MatScatterGetVecScatter(PetscMat, PetscScatter*) + + PetscErrorCode MatCreateNormal(PetscMat, PetscMat*) + PetscErrorCode MatCreateTranspose(PetscMat, PetscMat*) + PetscErrorCode MatCreateNormalHermitian(PetscMat, PetscMat*) + PetscErrorCode MatCreateHermitianTranspose(PetscMat, PetscMat*) + PetscErrorCode MatCreateLRC(PetscMat, PetscMat, PetscVec, PetscMat, PetscMat*) + PetscErrorCode MatCreateSubMatrixVirtual(PetscMat, PetscIS, PetscIS, PetscMat*) + PetscErrorCode MatCreateRedundantMatrix(PetscMat, PetscInt, MPI_Comm, PetscMatReuse, PetscMat*) + PetscErrorCode MatCreateNest(MPI_Comm, PetscInt, PetscIS[], PetscInt, PetscIS[], PetscMat[], PetscMat*) + PetscErrorCode MatCreateShell(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, void*, PetscMat*) + PetscErrorCode MatCreateH2OpusFromMat(PetscMat, PetscInt, const PetscReal[], PetscBool, PetscReal, PetscInt, PetscInt, PetscInt, PetscReal, PetscMat*) + PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscScalar[], PetscMat*) + PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscScalar[], PetscMat*) + PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscScalar[], PetscInt[], PetscInt[], PetscScalar[], PetscMat*) + PetscErrorCode MatCreateDiagonal(PetscVec, PetscMat*) + PetscErrorCode MatCreateConstantDiagonal(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscScalar, PetscMat*) + + PetscErrorCode MatSetSizes(PetscMat, PetscInt, PetscInt, PetscInt, PetscInt) + PetscErrorCode MatSetBlockSize(PetscMat, PetscInt) + PetscErrorCode MatSetBlockSizes(PetscMat, PetscInt, PetscInt) + PetscErrorCode MatSetVariableBlockSizes(PetscMat, PetscInt, PetscInt[]) + PetscErrorCode MatSetType(PetscMat, PetscMatType) + PetscErrorCode MatSetVecType(PetscMat, PetscVecType) + PetscErrorCode MatGetVecType(PetscMat, PetscVecType*) + PetscErrorCode MatSetOption(PetscMat, PetscMatOption, PetscBool) + PetscErrorCode MatGetOption(PetscMat, PetscMatOption, PetscBool*) enum: MAT_SKIP_ALLOCATION - PetscErrorCode MatSeqAIJSetPreallocation (PetscMat,PetscInt,PetscInt[]) - PetscErrorCode MatMPIAIJSetPreallocation (PetscMat,PetscInt,PetscInt[],PetscInt,PetscInt[]) - PetscErrorCode MatSeqBAIJSetPreallocation (PetscMat,PetscInt,PetscInt,PetscInt[]) - PetscErrorCode MatMPIBAIJSetPreallocation (PetscMat,PetscInt,PetscInt,PetscInt[],PetscInt,PetscInt[]) - PetscErrorCode MatSeqSBAIJSetPreallocation(PetscMat,PetscInt,PetscInt,PetscInt[]) - PetscErrorCode MatMPISBAIJSetPreallocation(PetscMat,PetscInt,PetscInt,PetscInt[],PetscInt,PetscInt[]) - PetscErrorCode MatSeqAIJSetPreallocationCSR (PetscMat, PetscInt[],PetscInt[],PetscScalar[]) - PetscErrorCode MatMPIAIJSetPreallocationCSR (PetscMat, PetscInt[],PetscInt[],PetscScalar[]) - PetscErrorCode MatSeqBAIJSetPreallocationCSR (PetscMat,PetscInt,PetscInt[],PetscInt[],PetscScalar[]) - PetscErrorCode MatMPIBAIJSetPreallocationCSR (PetscMat,PetscInt,PetscInt[],PetscInt[],PetscScalar[]) - PetscErrorCode MatSeqSBAIJSetPreallocationCSR(PetscMat,PetscInt,PetscInt[],PetscInt[],PetscScalar[]) - PetscErrorCode MatMPISBAIJSetPreallocationCSR(PetscMat,PetscInt,PetscInt[],PetscInt[],PetscScalar[]) - PetscErrorCode MatSeqDenseSetPreallocation(PetscMat,PetscScalar[]) - PetscErrorCode MatMPIDenseSetPreallocation(PetscMat,PetscScalar[]) - PetscErrorCode MatISSetPreallocation(PetscMat,PetscInt,PetscInt[],PetscInt,PetscInt[]) - - PetscErrorCode MatSetOptionsPrefix(PetscMat,char[]) - PetscErrorCode MatAppendOptionsPrefix(PetscMat,char[]) - PetscErrorCode MatGetOptionsPrefix(PetscMat,char*[]) + PetscErrorCode MatSeqAIJSetPreallocation (PetscMat, PetscInt, PetscInt[]) + PetscErrorCode MatMPIAIJSetPreallocation (PetscMat, PetscInt, PetscInt[], PetscInt, PetscInt[]) + PetscErrorCode MatSeqBAIJSetPreallocation (PetscMat, PetscInt, PetscInt, PetscInt[]) + PetscErrorCode MatMPIBAIJSetPreallocation (PetscMat, PetscInt, PetscInt, PetscInt[], PetscInt, PetscInt[]) + PetscErrorCode MatSeqSBAIJSetPreallocation(PetscMat, PetscInt, PetscInt, PetscInt[]) + PetscErrorCode MatMPISBAIJSetPreallocation(PetscMat, PetscInt, PetscInt, PetscInt[], PetscInt, PetscInt[]) + PetscErrorCode MatSeqAIJSetPreallocationCSR (PetscMat, PetscInt[], PetscInt[], PetscScalar[]) + PetscErrorCode MatMPIAIJSetPreallocationCSR (PetscMat, PetscInt[], PetscInt[], PetscScalar[]) + PetscErrorCode MatSeqBAIJSetPreallocationCSR (PetscMat, PetscInt, PetscInt[], PetscInt[], PetscScalar[]) + PetscErrorCode MatMPIBAIJSetPreallocationCSR (PetscMat, PetscInt, PetscInt[], PetscInt[], PetscScalar[]) + PetscErrorCode MatSeqSBAIJSetPreallocationCSR(PetscMat, PetscInt, PetscInt[], PetscInt[], PetscScalar[]) + PetscErrorCode MatMPISBAIJSetPreallocationCSR(PetscMat, PetscInt, PetscInt[], PetscInt[], PetscScalar[]) + PetscErrorCode MatSeqDenseSetPreallocation(PetscMat, PetscScalar[]) + PetscErrorCode MatMPIDenseSetPreallocation(PetscMat, PetscScalar[]) + PetscErrorCode MatISSetPreallocation(PetscMat, PetscInt, PetscInt[], PetscInt, PetscInt[]) + + PetscErrorCode MatSetOptionsPrefix(PetscMat, char[]) + PetscErrorCode MatAppendOptionsPrefix(PetscMat, char[]) + PetscErrorCode MatGetOptionsPrefix(PetscMat, char*[]) PetscErrorCode MatSetFromOptions(PetscMat) PetscErrorCode MatSetUp(PetscMat) - PetscErrorCode MatGetType(PetscMat,PetscMatType*) - PetscErrorCode MatGetSize(PetscMat,PetscInt*,PetscInt*) - PetscErrorCode MatGetLocalSize(PetscMat,PetscInt*,PetscInt*) - PetscErrorCode MatGetBlockSize(PetscMat,PetscInt*) - PetscErrorCode MatGetBlockSizes(PetscMat,PetscInt*,PetscInt*) - PetscErrorCode MatGetOwnershipRange(PetscMat,PetscInt*,PetscInt*) - PetscErrorCode MatGetOwnershipRanges(PetscMat,const PetscInt*[]) - PetscErrorCode MatGetOwnershipRangeColumn(PetscMat,PetscInt*,PetscInt*) - PetscErrorCode MatGetOwnershipRangesColumn(PetscMat,const PetscInt*[]) - PetscErrorCode MatGetOwnershipIS(PetscMat,PetscIS*,PetscIS*) - PetscErrorCode MatNestGetISs(PetscMat,PetscIS*,PetscIS*) - PetscErrorCode MatNestGetLocalISs(PetscMat,PetscIS*,PetscIS*) - PetscErrorCode MatNestGetSize(PetscMat,PetscInt*,PetscInt*) - PetscErrorCode MatNestGetSubMat(PetscMat,PetscInt,PetscInt,PetscMat*) - PetscErrorCode MatNestSetVecType(PetscMat,PetscVecType) - - PetscErrorCode MatEqual(PetscMat,PetscMat,PetscBool*) - PetscErrorCode MatLoad(PetscMat,PetscViewer) - PetscErrorCode MatDuplicate(PetscMat,PetscMatDuplicateOption,PetscMat*) - PetscErrorCode MatCopy(PetscMat,PetscMat,PetscMatStructure) - PetscErrorCode MatTranspose(PetscMat,PetscMatReuse,PetscMat*) - PetscErrorCode MatTransposeSetPrecursor(PetscMat,PetscMat) - PetscErrorCode MatHermitianTranspose(PetscMat,PetscMatReuse,PetscMat*) - PetscErrorCode MatConvert(PetscMat,PetscMatType,PetscMatReuse,PetscMat*) - - PetscErrorCode MatIsSymmetric(PetscMat,PetscReal,PetscBool*) - PetscErrorCode MatIsStructurallySymmetric(PetscMat,PetscBool*) - PetscErrorCode MatIsHermitian(PetscMat,PetscReal,PetscBool*) - PetscErrorCode MatIsSymmetricKnown(PetscMat,PetscBool*,PetscBool*) - PetscErrorCode MatIsHermitianKnown(PetscMat,PetscBool*,PetscBool*) - PetscErrorCode MatIsTranspose(PetscMat,PetscMat,PetscReal,PetscBool*) - - PetscErrorCode MatCreateVecs(PetscMat,PetscVec*,PetscVec*) - - PetscErrorCode MatSetValue(PetscMat,PetscInt,PetscInt,PetscScalar,PetscInsertMode) - PetscErrorCode MatSetValues(PetscMat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],PetscInsertMode) - PetscErrorCode MatSetValuesBlocked(PetscMat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],PetscInsertMode) - - PetscErrorCode MatSetLocalToGlobalMapping(PetscMat,PetscLGMap,PetscLGMap) - PetscErrorCode MatGetLocalToGlobalMapping(PetscMat,PetscLGMap*,PetscLGMap*) - PetscErrorCode MatSetValueLocal(PetscMat,PetscInt,PetscInt,PetscScalar,PetscInsertMode) - PetscErrorCode MatSetValuesLocal(PetscMat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],PetscInsertMode) - PetscErrorCode MatSetValuesBlockedLocal(PetscMat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],PetscInsertMode) - - PetscErrorCode MatSetStencil(PetscMat,PetscInt,const PetscInt[],const PetscInt[],PetscInt) + PetscErrorCode MatGetType(PetscMat, PetscMatType*) + PetscErrorCode MatGetSize(PetscMat, PetscInt*, PetscInt*) + PetscErrorCode MatGetLocalSize(PetscMat, PetscInt*, PetscInt*) + PetscErrorCode MatGetBlockSize(PetscMat, PetscInt*) + PetscErrorCode MatGetBlockSizes(PetscMat, PetscInt*, PetscInt*) + PetscErrorCode MatGetOwnershipRange(PetscMat, PetscInt*, PetscInt*) + PetscErrorCode MatGetOwnershipRanges(PetscMat, const PetscInt*[]) + PetscErrorCode MatGetOwnershipRangeColumn(PetscMat, PetscInt*, PetscInt*) + PetscErrorCode MatGetOwnershipRangesColumn(PetscMat, const PetscInt*[]) + PetscErrorCode MatGetOwnershipIS(PetscMat, PetscIS*, PetscIS*) + PetscErrorCode MatNestGetISs(PetscMat, PetscIS*, PetscIS*) + PetscErrorCode MatNestGetLocalISs(PetscMat, PetscIS*, PetscIS*) + PetscErrorCode MatNestGetSize(PetscMat, PetscInt*, PetscInt*) + PetscErrorCode MatNestGetSubMat(PetscMat, PetscInt, PetscInt, PetscMat*) + PetscErrorCode MatNestSetVecType(PetscMat, PetscVecType) + + PetscErrorCode MatEqual(PetscMat, PetscMat, PetscBool*) + PetscErrorCode MatLoad(PetscMat, PetscViewer) + PetscErrorCode MatDuplicate(PetscMat, PetscMatDuplicateOption, PetscMat*) + PetscErrorCode MatCopy(PetscMat, PetscMat, PetscMatStructure) + PetscErrorCode MatTranspose(PetscMat, PetscMatReuse, PetscMat*) + PetscErrorCode MatTransposeSetPrecursor(PetscMat, PetscMat) + PetscErrorCode MatHermitianTranspose(PetscMat, PetscMatReuse, PetscMat*) + PetscErrorCode MatConvert(PetscMat, PetscMatType, PetscMatReuse, PetscMat*) + + PetscErrorCode MatIsSymmetric(PetscMat, PetscReal, PetscBool*) + PetscErrorCode MatIsStructurallySymmetric(PetscMat, PetscBool*) + PetscErrorCode MatIsHermitian(PetscMat, PetscReal, PetscBool*) + PetscErrorCode MatIsSymmetricKnown(PetscMat, PetscBool*, PetscBool*) + PetscErrorCode MatIsHermitianKnown(PetscMat, PetscBool*, PetscBool*) + PetscErrorCode MatIsTranspose(PetscMat, PetscMat, PetscReal, PetscBool*) + + PetscErrorCode MatCreateVecs(PetscMat, PetscVec*, PetscVec*) + + PetscErrorCode MatSetValue(PetscMat, PetscInt, PetscInt, PetscScalar, PetscInsertMode) + PetscErrorCode MatSetValues(PetscMat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], PetscInsertMode) + PetscErrorCode MatSetValuesBlocked(PetscMat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], PetscInsertMode) + + PetscErrorCode MatSetLocalToGlobalMapping(PetscMat, PetscLGMap, PetscLGMap) + PetscErrorCode MatGetLocalToGlobalMapping(PetscMat, PetscLGMap*, PetscLGMap*) + PetscErrorCode MatSetValueLocal(PetscMat, PetscInt, PetscInt, PetscScalar, PetscInsertMode) + PetscErrorCode MatSetValuesLocal(PetscMat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], PetscInsertMode) + PetscErrorCode MatSetValuesBlockedLocal(PetscMat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], PetscInsertMode) + + PetscErrorCode MatSetStencil(PetscMat, PetscInt, const PetscInt[], const PetscInt[], PetscInt) ctypedef struct PetscMatStencil "MatStencil": - PetscInt k,j,i,c - PetscErrorCode MatSetValuesStencil(PetscMat,PetscInt,const PetscMatStencil[],PetscInt,const PetscMatStencil[],const PetscScalar[],PetscInsertMode) - PetscErrorCode MatSetValuesBlockedStencil(PetscMat,PetscInt,const PetscMatStencil[],PetscInt,const PetscMatStencil[],const PetscScalar[],PetscInsertMode) - - PetscErrorCode MatGetValues(PetscMat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]) - PetscErrorCode MatGetRow(PetscMat,PetscInt,PetscInt*,const PetscInt*[],const PetscScalar*[]) - PetscErrorCode MatRestoreRow(PetscMat,PetscInt,PetscInt*,const PetscInt*[],const PetscScalar*[]) - PetscErrorCode MatGetRowIJ(PetscMat,PetscInt,PetscBool,PetscBool,PetscInt*,const PetscInt*[],const PetscInt*[],PetscBool*) - PetscErrorCode MatRestoreRowIJ(PetscMat,PetscInt,PetscBool,PetscBool,PetscInt*,const PetscInt*[],const PetscInt*[],PetscBool*) - PetscErrorCode MatGetColumnIJ(PetscMat,PetscInt,PetscBool,PetscBool,PetscInt*,const PetscInt*[],const PetscInt*[],PetscBool*) - PetscErrorCode MatRestoreColumnIJ(PetscMat,PetscInt,PetscBool,PetscBool,PetscInt*,const PetscInt*[],const PetscInt*[],PetscBool*) + PetscInt k, j, i, c + PetscErrorCode MatSetValuesStencil(PetscMat, PetscInt, const PetscMatStencil[], PetscInt, const PetscMatStencil[], const PetscScalar[], PetscInsertMode) + PetscErrorCode MatSetValuesBlockedStencil(PetscMat, PetscInt, const PetscMatStencil[], PetscInt, const PetscMatStencil[], const PetscScalar[], PetscInsertMode) + + PetscErrorCode MatGetValues(PetscMat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], PetscScalar[]) + PetscErrorCode MatGetRow(PetscMat, PetscInt, PetscInt*, const PetscInt*[], const PetscScalar*[]) + PetscErrorCode MatRestoreRow(PetscMat, PetscInt, PetscInt*, const PetscInt*[], const PetscScalar*[]) + PetscErrorCode MatGetRowIJ(PetscMat, PetscInt, PetscBool, PetscBool, PetscInt*, const PetscInt*[], const PetscInt*[], PetscBool*) + PetscErrorCode MatRestoreRowIJ(PetscMat, PetscInt, PetscBool, PetscBool, PetscInt*, const PetscInt*[], const PetscInt*[], PetscBool*) + PetscErrorCode MatGetColumnIJ(PetscMat, PetscInt, PetscBool, PetscBool, PetscInt*, const PetscInt*[], const PetscInt*[], PetscBool*) + PetscErrorCode MatRestoreColumnIJ(PetscMat, PetscInt, PetscBool, PetscBool, PetscInt*, const PetscInt*[], const PetscInt*[], PetscBool*) PetscErrorCode MatZeroEntries(PetscMat) PetscErrorCode MatStoreValues(PetscMat) PetscErrorCode MatRetrieveValues(PetscMat) - PetscErrorCode MatAssemblyBegin(PetscMat,PetscMatAssemblyType) - PetscErrorCode MatAssemblyEnd(PetscMat,PetscMatAssemblyType) - PetscErrorCode MatAssembled(PetscMat,PetscBool*) - - PetscErrorCode MatDiagonalSet(PetscMat,PetscVec,PetscInsertMode) - PetscErrorCode MatDiagonalScale(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatScale(PetscMat,PetscScalar) - PetscErrorCode MatShift(PetscMat,PetscScalar) - PetscErrorCode MatFilter(PetscMat,PetscReal,PetscBool,PetscBool) - PetscErrorCode MatSetRandom(PetscMat,PetscRandom) - PetscErrorCode MatAXPY(PetscMat,PetscScalar,PetscMat,PetscMatStructure) - PetscErrorCode MatAYPX(PetscMat,PetscScalar,PetscMat,PetscMatStructure) - PetscErrorCode MatMatMult(PetscMat,PetscMat,PetscMatReuse,PetscReal,PetscMat*) - PetscErrorCode MatMatTransposeMult(PetscMat,PetscMat,PetscMatReuse,PetscReal,PetscMat*) - PetscErrorCode MatTransposeMatMult(PetscMat,PetscMat,PetscMatReuse,PetscReal,PetscMat*) - - PetscErrorCode MatPtAP(PetscMat,PetscMat,PetscMatReuse,PetscReal,PetscMat*) - PetscErrorCode MatRARt(PetscMat,PetscMat,PetscMatReuse,PetscReal,PetscMat*) - PetscErrorCode MatMatMatMult(PetscMat,PetscMat,PetscMat,PetscMatReuse,PetscReal,PetscMat*) - PetscErrorCode MatSeqAIJKron(PetscMat,PetscMat,PetscMatReuse,PetscMat*) - - PetscErrorCode MatInterpolate(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatInterpolateAdd(PetscMat,PetscVec,PetscVec,PetscVec) - PetscErrorCode MatRestrict(PetscMat,PetscVec,PetscVec) - - PetscErrorCode MatPermute(PetscMat,PetscIS,PetscIS,PetscMat*) - PetscErrorCode MatPermuteSparsify(PetscMat,PetscInt,PetscReal,PetscReal,PetscIS,PetscIS,PetscMat*) - - PetscErrorCode MatMerge(MPI_Comm,PetscMat,PetscInt,PetscMatReuse,PetscMat*) - PetscErrorCode MatCreateSubMatrix(PetscMat,PetscIS,PetscIS,PetscMatReuse,PetscMat*) - PetscErrorCode MatCreateSubMatrices(PetscMat,PetscInt,PetscIS[],PetscIS[],PetscMatReuse,PetscMat*[]) - PetscErrorCode MatIncreaseOverlap(PetscMat,PetscInt,PetscIS[],PetscInt) - PetscErrorCode MatGetDiagonalBlock(PetscMat,PetscMat*) - PetscErrorCode MatGetLocalSubMatrix(PetscMat,PetscIS,PetscIS,PetscMat*) - PetscErrorCode MatRestoreLocalSubMatrix(PetscMat,PetscIS,PetscIS,PetscMat*) - PetscErrorCode MatDestroyMatrices(PetscInt,PetscMat*[]) + PetscErrorCode MatAssemblyBegin(PetscMat, PetscMatAssemblyType) + PetscErrorCode MatAssemblyEnd(PetscMat, PetscMatAssemblyType) + PetscErrorCode MatAssembled(PetscMat, PetscBool*) + + PetscErrorCode MatDiagonalSet(PetscMat, PetscVec, PetscInsertMode) + PetscErrorCode MatDiagonalScale(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatScale(PetscMat, PetscScalar) + PetscErrorCode MatShift(PetscMat, PetscScalar) + PetscErrorCode MatFilter(PetscMat, PetscReal, PetscBool, PetscBool) + PetscErrorCode MatSetRandom(PetscMat, PetscRandom) + PetscErrorCode MatAXPY(PetscMat, PetscScalar, PetscMat, PetscMatStructure) + PetscErrorCode MatAYPX(PetscMat, PetscScalar, PetscMat, PetscMatStructure) + PetscErrorCode MatMatMult(PetscMat, PetscMat, PetscMatReuse, PetscReal, PetscMat*) + PetscErrorCode MatMatTransposeMult(PetscMat, PetscMat, PetscMatReuse, PetscReal, PetscMat*) + PetscErrorCode MatTransposeMatMult(PetscMat, PetscMat, PetscMatReuse, PetscReal, PetscMat*) + + PetscErrorCode MatPtAP(PetscMat, PetscMat, PetscMatReuse, PetscReal, PetscMat*) + PetscErrorCode MatRARt(PetscMat, PetscMat, PetscMatReuse, PetscReal, PetscMat*) + PetscErrorCode MatMatMatMult(PetscMat, PetscMat, PetscMat, PetscMatReuse, PetscReal, PetscMat*) + PetscErrorCode MatSeqAIJKron(PetscMat, PetscMat, PetscMatReuse, PetscMat*) + + PetscErrorCode MatInterpolate(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatInterpolateAdd(PetscMat, PetscVec, PetscVec, PetscVec) + PetscErrorCode MatRestrict(PetscMat, PetscVec, PetscVec) + + PetscErrorCode MatPermute(PetscMat, PetscIS, PetscIS, PetscMat*) + PetscErrorCode MatPermuteSparsify(PetscMat, PetscInt, PetscReal, PetscReal, PetscIS, PetscIS, PetscMat*) + + PetscErrorCode MatMerge(MPI_Comm, PetscMat, PetscInt, PetscMatReuse, PetscMat*) + PetscErrorCode MatCreateSubMatrix(PetscMat, PetscIS, PetscIS, PetscMatReuse, PetscMat*) + PetscErrorCode MatCreateSubMatrices(PetscMat, PetscInt, PetscIS[], PetscIS[], PetscMatReuse, PetscMat*[]) + PetscErrorCode MatIncreaseOverlap(PetscMat, PetscInt, PetscIS[], PetscInt) + PetscErrorCode MatGetDiagonalBlock(PetscMat, PetscMat*) + PetscErrorCode MatGetLocalSubMatrix(PetscMat, PetscIS, PetscIS, PetscMat*) + PetscErrorCode MatRestoreLocalSubMatrix(PetscMat, PetscIS, PetscIS, PetscMat*) + PetscErrorCode MatDestroyMatrices(PetscInt, PetscMat*[]) PetscErrorCode MatConjugate(PetscMat) PetscErrorCode MatRealPart(PetscMat) PetscErrorCode MatImaginaryPart(PetscMat) - PetscErrorCode MatZeroRows(PetscMat,PetscInt,PetscInt[],PetscScalar,PetscVec,PetscVec) - PetscErrorCode MatZeroRowsLocal(PetscMat,PetscInt,PetscInt[],PetscScalar,PetscVec,PetscVec) - PetscErrorCode MatZeroRowsIS(PetscMat,PetscIS,PetscScalar,PetscVec,PetscVec) - PetscErrorCode MatZeroRowsLocalIS(PetscMat,PetscIS,PetscScalar,PetscVec,PetscVec) - PetscErrorCode MatFindZeroRows(PetscMat,PetscIS*) + PetscErrorCode MatZeroRows(PetscMat, PetscInt, PetscInt[], PetscScalar, PetscVec, PetscVec) + PetscErrorCode MatZeroRowsLocal(PetscMat, PetscInt, PetscInt[], PetscScalar, PetscVec, PetscVec) + PetscErrorCode MatZeroRowsIS(PetscMat, PetscIS, PetscScalar, PetscVec, PetscVec) + PetscErrorCode MatZeroRowsLocalIS(PetscMat, PetscIS, PetscScalar, PetscVec, PetscVec) + PetscErrorCode MatFindZeroRows(PetscMat, PetscIS*) - PetscErrorCode MatZeroRowsColumns(PetscMat,PetscInt,PetscInt[],PetscScalar,PetscVec,PetscVec) - PetscErrorCode MatZeroRowsColumnsLocal(PetscMat,PetscInt,PetscInt[],PetscScalar,PetscVec,PetscVec) - PetscErrorCode MatZeroRowsColumnsIS(PetscMat,PetscIS,PetscScalar,PetscVec,PetscVec) - PetscErrorCode MatZeroRowsColumnsLocalIS(PetscMat,PetscIS,PetscScalar,PetscVec,PetscVec) - PetscErrorCode MatZeroRowsColumnsStencil(PetscMat,PetscInt,const PetscMatStencil[],PetscScalar,PetscVec,PetscVec) + PetscErrorCode MatZeroRowsColumns(PetscMat, PetscInt, PetscInt[], PetscScalar, PetscVec, PetscVec) + PetscErrorCode MatZeroRowsColumnsLocal(PetscMat, PetscInt, PetscInt[], PetscScalar, PetscVec, PetscVec) + PetscErrorCode MatZeroRowsColumnsIS(PetscMat, PetscIS, PetscScalar, PetscVec, PetscVec) + PetscErrorCode MatZeroRowsColumnsLocalIS(PetscMat, PetscIS, PetscScalar, PetscVec, PetscVec) + PetscErrorCode MatZeroRowsColumnsStencil(PetscMat, PetscInt, const PetscMatStencil[], PetscScalar, PetscVec, PetscVec) - PetscErrorCode MatGetDiagonal(PetscMat,PetscVec) - PetscErrorCode MatGetRowSum(PetscMat,PetscVec) - PetscErrorCode MatInvertBlockDiagonal(PetscMat,const PetscScalar**) - PetscErrorCode MatGetRowMax(PetscMat,PetscVec,PetscInt[]) - PetscErrorCode MatGetRowMaxAbs(PetscMat,PetscVec,PetscInt[]) - PetscErrorCode MatGetColumnVector(PetscMat,PetscVec,PetscInt) + PetscErrorCode MatGetDiagonal(PetscMat, PetscVec) + PetscErrorCode MatGetRowSum(PetscMat, PetscVec) + PetscErrorCode MatInvertBlockDiagonal(PetscMat, const PetscScalar**) + PetscErrorCode MatGetRowMax(PetscMat, PetscVec, PetscInt[]) + PetscErrorCode MatGetRowMaxAbs(PetscMat, PetscVec, PetscInt[]) + PetscErrorCode MatGetColumnVector(PetscMat, PetscVec, PetscInt) - PetscErrorCode MatNorm(PetscMat,PetscNormType,PetscReal*) + PetscErrorCode MatNorm(PetscMat, PetscNormType, PetscReal*) - PetscErrorCode MatMult(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatMultAdd(PetscMat,PetscVec,PetscVec,PetscVec) - PetscErrorCode MatMultTranspose(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatMultTransposeAdd(PetscMat,PetscVec,PetscVec,PetscVec) + PetscErrorCode MatMult(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatMultAdd(PetscMat, PetscVec, PetscVec, PetscVec) + PetscErrorCode MatMultTranspose(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatMultTransposeAdd(PetscMat, PetscVec, PetscVec, PetscVec) # FIXME: Why? - PetscErrorCode MatMultHermitian"MatMultHermitianTranspose"(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatMultHermitianAdd"MatMultHermitianTransposeAdd"(PetscMat,PetscVec,PetscVec,PetscVec) + PetscErrorCode MatMultHermitian"MatMultHermitianTranspose"(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatMultHermitianAdd"MatMultHermitianTransposeAdd"(PetscMat, PetscVec, PetscVec, PetscVec) - PetscErrorCode MatBindToCPU(PetscMat,PetscBool) - PetscErrorCode MatBoundToCPU(PetscMat,PetscBool*) + PetscErrorCode MatBindToCPU(PetscMat, PetscBool) + PetscErrorCode MatBoundToCPU(PetscMat, PetscBool*) - PetscErrorCode MatSOR(PetscMat,PetscVec,PetscReal,PetscMatSORType,PetscReal,PetscInt,PetscInt,PetscVec) + PetscErrorCode MatSOR(PetscMat, PetscVec, PetscReal, PetscMatSORType, PetscReal, PetscInt, PetscInt, PetscVec) - PetscErrorCode MatGetOrdering(PetscMat,PetscMatOrderingType,PetscIS*,PetscIS*) - PetscErrorCode MatReorderForNonzeroDiagonal(PetscMat,PetscReal,PetscIS,PetscIS) + PetscErrorCode MatGetOrdering(PetscMat, PetscMatOrderingType, PetscIS*, PetscIS*) + PetscErrorCode MatReorderForNonzeroDiagonal(PetscMat, PetscReal, PetscIS, PetscIS) - PetscErrorCode MatISSetAllowRepeated(PetscMat,PetscBool) - PetscErrorCode MatISGetAllowRepeated(PetscMat,PetscBool*) - PetscErrorCode MatISFixLocalEmpty(PetscMat,PetscBool) - PetscErrorCode MatISGetLocalMat(PetscMat,PetscMat*) - PetscErrorCode MatISRestoreLocalMat(PetscMat,PetscMat*) - PetscErrorCode MatISSetLocalMat(PetscMat,PetscMat) + PetscErrorCode MatISSetAllowRepeated(PetscMat, PetscBool) + PetscErrorCode MatISGetAllowRepeated(PetscMat, PetscBool*) + PetscErrorCode MatISFixLocalEmpty(PetscMat, PetscBool) + PetscErrorCode MatISGetLocalMat(PetscMat, PetscMat*) + PetscErrorCode MatISRestoreLocalMat(PetscMat, PetscMat*) + PetscErrorCode MatISSetLocalMat(PetscMat, PetscMat) PetscErrorCode MatH2OpusOrthogonalize(PetscMat) - PetscErrorCode MatH2OpusCompress(PetscMat,PetscReal) - PetscErrorCode MatH2OpusLowRankUpdate(PetscMat,PetscMat,PetscMat,PetscScalar) + PetscErrorCode MatH2OpusCompress(PetscMat, PetscReal) + PetscErrorCode MatH2OpusLowRankUpdate(PetscMat, PetscMat, PetscMat, PetscScalar) - PetscErrorCode MatMissingDiagonal(Mat,PetscBool*,PetscInt*) + PetscErrorCode MatMissingDiagonal(Mat, PetscBool*, PetscInt*) ctypedef enum PetscMatFactorShiftType "MatFactorShiftType": MAT_SHIFT_NONE @@ -434,117 +438,117 @@ cdef extern from * nogil: PetscLogDouble fill_ratio_given, fill_ratio_needed PetscLogDouble factor_mallocs - PetscErrorCode MatGetInfo(PetscMat,PetscMatInfoType,PetscMatInfo*) + PetscErrorCode MatGetInfo(PetscMat, PetscMatInfoType, PetscMatInfo*) PetscErrorCode MatFactorInfoInitialize(PetscMatFactorInfo*) - PetscErrorCode MatCholeskyFactor(PetscMat,PetscIS,PetscMatFactorInfo*) - PetscErrorCode MatCholeskyFactorSymbolic(PetscMat,PetscIS,PetscMatFactorInfo*,PetscMat*) - PetscErrorCode MatCholeskyFactorNumeric(PetscMat,PetscMatFactorInfo*,PetscMat*) - PetscErrorCode MatLUFactor(PetscMat,PetscIS,PetscIS,PetscMatFactorInfo*) - PetscErrorCode MatILUFactor(PetscMat,PetscIS,PetscIS,PetscMatFactorInfo*) - PetscErrorCode MatICCFactor(PetscMat,PetscIS,PetscMatFactorInfo*) - PetscErrorCode MatLUFactorSymbolic(PetscMat,PetscIS,PetscIS,PetscMatFactorInfo*,PetscMat*) - PetscErrorCode MatILUFactorSymbolic(PetscMat,PetscIS,PetscIS,PetscMatFactorInfo*,PetscMat*) - PetscErrorCode MatICCFactorSymbolic(PetscMat,PetscIS,PetscMatFactorInfo*,PetscMat*) - PetscErrorCode MatLUFactorNumeric(PetscMat,PetscMatFactorInfo*,PetscMat*) - PetscErrorCode MatILUDTFactor(PetscMat,PetscIS,PetscIS,PetscMatFactorInfo*,PetscMat*) - PetscErrorCode MatGetInertia(PetscMat,PetscInt*,PetscInt*,PetscInt*) + PetscErrorCode MatCholeskyFactor(PetscMat, PetscIS, PetscMatFactorInfo*) + PetscErrorCode MatCholeskyFactorSymbolic(PetscMat, PetscIS, PetscMatFactorInfo*, PetscMat*) + PetscErrorCode MatCholeskyFactorNumeric(PetscMat, PetscMatFactorInfo*, PetscMat*) + PetscErrorCode MatLUFactor(PetscMat, PetscIS, PetscIS, PetscMatFactorInfo*) + PetscErrorCode MatILUFactor(PetscMat, PetscIS, PetscIS, PetscMatFactorInfo*) + PetscErrorCode MatICCFactor(PetscMat, PetscIS, PetscMatFactorInfo*) + PetscErrorCode MatLUFactorSymbolic(PetscMat, PetscIS, PetscIS, PetscMatFactorInfo*, PetscMat*) + PetscErrorCode MatILUFactorSymbolic(PetscMat, PetscIS, PetscIS, PetscMatFactorInfo*, PetscMat*) + PetscErrorCode MatICCFactorSymbolic(PetscMat, PetscIS, PetscMatFactorInfo*, PetscMat*) + PetscErrorCode MatLUFactorNumeric(PetscMat, PetscMatFactorInfo*, PetscMat*) + PetscErrorCode MatILUDTFactor(PetscMat, PetscIS, PetscIS, PetscMatFactorInfo*, PetscMat*) + PetscErrorCode MatGetInertia(PetscMat, PetscInt*, PetscInt*, PetscInt*) PetscErrorCode MatSetUnfactored(PetscMat) - PetscErrorCode MatLRCGetMats(PetscMat,PetscMat*,PetscMat*,PetscVec*,PetscMat*) - PetscErrorCode MatLRCSetMats(PetscMat,PetscMat,PetscMat,PetscVec,PetscMat) - - PetscErrorCode MatMumpsSetIcntl(PetscMat,PetscInt,PetscInt) - PetscErrorCode MatMumpsGetIcntl(PetscMat,PetscInt,PetscInt*) - PetscErrorCode MatMumpsSetCntl(PetscMat,PetscInt,PetscReal) - PetscErrorCode MatMumpsGetCntl(PetscMat,PetscInt,PetscReal*) - PetscErrorCode MatMumpsGetInfo(PetscMat,PetscInt,PetscInt*) - PetscErrorCode MatMumpsGetInfog(PetscMat,PetscInt,PetscInt*) - PetscErrorCode MatMumpsGetRinfo(PetscMat,PetscInt,PetscReal*) - PetscErrorCode MatMumpsGetRinfog(PetscMat,PetscInt,PetscReal*) - - PetscErrorCode MatForwardSolve(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatBackwardSolve(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatSolve(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatSolveTranspose(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatSolveAdd(PetscMat,PetscVec,PetscVec,PetscVec) - PetscErrorCode MatSolveTransposeAdd(PetscMat,PetscVec,PetscVec,PetscVec) - PetscErrorCode MatMatSolve(PetscMat,PetscMat,PetscMat) - - PetscErrorCode MatComputeExplicitOperator(PetscMat,PetscMat*) - PetscErrorCode MatUseScaledForm(PetscMat,PetscBool) - PetscErrorCode MatScaleSystem(PetscMat,PetscVec,PetscVec) - PetscErrorCode MatUnScaleSystem(PetscMat,PetscVec,PetscVec) - - PetscErrorCode MatDenseSetLDA(PetscMat,PetscInt) - PetscErrorCode MatDenseGetLDA(PetscMat,PetscInt*) - PetscErrorCode MatDenseGetLocalMatrix(PetscMat,PetscMat*) - PetscErrorCode MatDenseGetArray(PetscMat,PetscScalar*[]) - PetscErrorCode MatDenseRestoreArray(PetscMat,PetscScalar*[]) - PetscErrorCode MatDenseGetArrayWrite(PetscMat,PetscScalar*[]) - PetscErrorCode MatDenseRestoreArrayWrite(PetscMat,PetscScalar*[]) - PetscErrorCode MatDenseGetArrayRead(PetscMat,const PetscScalar*[]) - PetscErrorCode MatDenseRestoreArrayRead(PetscMat,const PetscScalar*[]) - PetscErrorCode MatDenseGetColumnVec(PetscMat,PetscInt,PetscVec*) - PetscErrorCode MatDenseRestoreColumnVec(PetscMat,PetscInt,PetscVec*) - PetscErrorCode MatDenseGetColumnVecRead(PetscMat,PetscInt,PetscVec*) - PetscErrorCode MatDenseRestoreColumnVecRead(PetscMat,PetscInt,PetscVec*) - PetscErrorCode MatDenseGetColumnVecWrite(PetscMat,PetscInt,PetscVec*) - PetscErrorCode MatDenseRestoreColumnVecWrite(PetscMat,PetscInt,PetscVec*) - PetscErrorCode MatDenseCUDAGetArray(PetscMat,PetscScalar*[]) - PetscErrorCode MatDenseCUDARestoreArray(PetscMat,PetscScalar*[]) - PetscErrorCode MatDenseCUDAGetArrayWrite(PetscMat,PetscScalar*[]) - PetscErrorCode MatDenseCUDARestoreArrayWrite(PetscMat,PetscScalar*[]) - PetscErrorCode MatDenseCUDAGetArrayRead(PetscMat,const PetscScalar*[]) - PetscErrorCode MatDenseCUDARestoreArrayRead(PetscMat,const PetscScalar*[]) - - PetscErrorCode MatProductGetType(PetscMat,PetscMatProductType*) - PetscErrorCode MatProductGetMats(PetscMat,PetscMat*,PetscMat*,PetscMat*) - - PetscErrorCode MatPythonSetType(PetscMat,char[]) - PetscErrorCode MatPythonGetType(PetscMat,char*[]) + PetscErrorCode MatLRCGetMats(PetscMat, PetscMat*, PetscMat*, PetscVec*, PetscMat*) + PetscErrorCode MatLRCSetMats(PetscMat, PetscMat, PetscMat, PetscVec, PetscMat) + + PetscErrorCode MatMumpsSetIcntl(PetscMat, PetscInt, PetscInt) + PetscErrorCode MatMumpsGetIcntl(PetscMat, PetscInt, PetscInt*) + PetscErrorCode MatMumpsSetCntl(PetscMat, PetscInt, PetscReal) + PetscErrorCode MatMumpsGetCntl(PetscMat, PetscInt, PetscReal*) + PetscErrorCode MatMumpsGetInfo(PetscMat, PetscInt, PetscInt*) + PetscErrorCode MatMumpsGetInfog(PetscMat, PetscInt, PetscInt*) + PetscErrorCode MatMumpsGetRinfo(PetscMat, PetscInt, PetscReal*) + PetscErrorCode MatMumpsGetRinfog(PetscMat, PetscInt, PetscReal*) + + PetscErrorCode MatForwardSolve(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatBackwardSolve(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatSolve(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatSolveTranspose(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatSolveAdd(PetscMat, PetscVec, PetscVec, PetscVec) + PetscErrorCode MatSolveTransposeAdd(PetscMat, PetscVec, PetscVec, PetscVec) + PetscErrorCode MatMatSolve(PetscMat, PetscMat, PetscMat) + + PetscErrorCode MatComputeExplicitOperator(PetscMat, PetscMat*) + PetscErrorCode MatUseScaledForm(PetscMat, PetscBool) + PetscErrorCode MatScaleSystem(PetscMat, PetscVec, PetscVec) + PetscErrorCode MatUnScaleSystem(PetscMat, PetscVec, PetscVec) + + PetscErrorCode MatDenseSetLDA(PetscMat, PetscInt) + PetscErrorCode MatDenseGetLDA(PetscMat, PetscInt*) + PetscErrorCode MatDenseGetLocalMatrix(PetscMat, PetscMat*) + PetscErrorCode MatDenseGetArray(PetscMat, PetscScalar*[]) + PetscErrorCode MatDenseRestoreArray(PetscMat, PetscScalar*[]) + PetscErrorCode MatDenseGetArrayWrite(PetscMat, PetscScalar*[]) + PetscErrorCode MatDenseRestoreArrayWrite(PetscMat, PetscScalar*[]) + PetscErrorCode MatDenseGetArrayRead(PetscMat, const PetscScalar*[]) + PetscErrorCode MatDenseRestoreArrayRead(PetscMat, const PetscScalar*[]) + PetscErrorCode MatDenseGetColumnVec(PetscMat, PetscInt, PetscVec*) + PetscErrorCode MatDenseRestoreColumnVec(PetscMat, PetscInt, PetscVec*) + PetscErrorCode MatDenseGetColumnVecRead(PetscMat, PetscInt, PetscVec*) + PetscErrorCode MatDenseRestoreColumnVecRead(PetscMat, PetscInt, PetscVec*) + PetscErrorCode MatDenseGetColumnVecWrite(PetscMat, PetscInt, PetscVec*) + PetscErrorCode MatDenseRestoreColumnVecWrite(PetscMat, PetscInt, PetscVec*) + PetscErrorCode MatDenseCUDAGetArray(PetscMat, PetscScalar*[]) + PetscErrorCode MatDenseCUDARestoreArray(PetscMat, PetscScalar*[]) + PetscErrorCode MatDenseCUDAGetArrayWrite(PetscMat, PetscScalar*[]) + PetscErrorCode MatDenseCUDARestoreArrayWrite(PetscMat, PetscScalar*[]) + PetscErrorCode MatDenseCUDAGetArrayRead(PetscMat, const PetscScalar*[]) + PetscErrorCode MatDenseCUDARestoreArrayRead(PetscMat, const PetscScalar*[]) + + PetscErrorCode MatProductGetType(PetscMat, PetscMatProductType*) + PetscErrorCode MatProductGetMats(PetscMat, PetscMat*, PetscMat*, PetscMat*) + + PetscErrorCode MatPythonSetType(PetscMat, char[]) + PetscErrorCode MatPythonGetType(PetscMat, char*[]) cdef extern from * nogil: # custom.h - PetscErrorCode MatGetCurrentMemType(PetscMat,PetscMemType*) - PetscErrorCode MatIsPreallocated(PetscMat,PetscBool*) - PetscErrorCode MatHasPreallocationAIJ(PetscMat,PetscBool*,PetscBool*,PetscBool*,PetscBool*) + PetscErrorCode MatGetCurrentMemType(PetscMat, PetscMemType*) + PetscErrorCode MatIsPreallocated(PetscMat, PetscBool*) + PetscErrorCode MatHasPreallocationAIJ(PetscMat, PetscBool*, PetscBool*, PetscBool*, PetscBool*) # ----------------------------------------------------------------------------- cdef extern from * nogil: PetscErrorCode MatNullSpaceDestroy(PetscNullSpace*) - PetscErrorCode MatNullSpaceView(PetscNullSpace,PetscViewer) - PetscErrorCode MatNullSpaceCreate(MPI_Comm,PetscBool,PetscInt,PetscVec[], - PetscNullSpace*) - PetscErrorCode MatNullSpaceCreateRigidBody(PetscVec,PetscNullSpace*) - PetscErrorCode MatNullSpaceGetVecs(PetscNullSpace,PetscBool*,PetscInt*,const PetscVec*[]) - PetscErrorCode MatNullSpaceRemove(PetscNullSpace,PetscVec) - PetscErrorCode MatNullSpaceTest(PetscNullSpace,PetscMat,PetscBool*) + PetscErrorCode MatNullSpaceView(PetscNullSpace, PetscViewer) + PetscErrorCode MatNullSpaceCreate(MPI_Comm, PetscBool, PetscInt, PetscVec[], + PetscNullSpace*) + PetscErrorCode MatNullSpaceCreateRigidBody(PetscVec, PetscNullSpace*) + PetscErrorCode MatNullSpaceGetVecs(PetscNullSpace, PetscBool*, PetscInt*, const PetscVec*[]) + PetscErrorCode MatNullSpaceRemove(PetscNullSpace, PetscVec) + PetscErrorCode MatNullSpaceTest(PetscNullSpace, PetscMat, PetscBool*) ctypedef PetscErrorCode MatNullSpaceFunction(PetscNullSpace, - PetscVec, - void*) except PETSC_ERR_PYTHON - PetscErrorCode MatNullSpaceSetFunction(PetscNullSpace,MatNullSpaceFunction*,void*) + PetscVec, + void*) except PETSC_ERR_PYTHON + PetscErrorCode MatNullSpaceSetFunction(PetscNullSpace, MatNullSpaceFunction*, void*) - PetscErrorCode MatSetNullSpace(PetscMat,PetscNullSpace) - PetscErrorCode MatGetNullSpace(PetscMat,PetscNullSpace*) - PetscErrorCode MatSetTransposeNullSpace(PetscMat,PetscNullSpace) - PetscErrorCode MatGetTransposeNullSpace(PetscMat,PetscNullSpace*) - PetscErrorCode MatSetNearNullSpace(PetscMat,PetscNullSpace) - PetscErrorCode MatGetNearNullSpace(PetscMat,PetscNullSpace*) + PetscErrorCode MatSetNullSpace(PetscMat, PetscNullSpace) + PetscErrorCode MatGetNullSpace(PetscMat, PetscNullSpace*) + PetscErrorCode MatSetTransposeNullSpace(PetscMat, PetscNullSpace) + PetscErrorCode MatGetTransposeNullSpace(PetscMat, PetscNullSpace*) + PetscErrorCode MatSetNearNullSpace(PetscMat, PetscNullSpace) + PetscErrorCode MatGetNearNullSpace(PetscMat, PetscNullSpace*) cdef inline NullSpace ref_NullSpace(PetscNullSpace nsp): cdef NullSpace ob = NullSpace() ob.nsp = nsp - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob cdef PetscErrorCode NullSpace_Function( PetscNullSpace n, PetscVec v, - void * ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef NullSpace nsp = ref_NullSpace(n) cdef Vec vec = ref_Vec(v) (function, args, kargs) = nsp.get_attr('__function__') @@ -556,7 +560,7 @@ cdef PetscErrorCode NullSpace_Function( cdef inline Mat ref_Mat(PetscMat mat): cdef Mat ob = Mat() ob.mat = mat - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob # ----------------------------------------------------------------------------- @@ -565,12 +569,12 @@ cdef inline Mat ref_Mat(PetscMat mat): cdef Mat mat_pos(Mat self): cdef Mat mat = type(self)() - CHKERR( MatDuplicate(self.mat, MAT_COPY_VALUES, &mat.mat) ) + CHKERR(MatDuplicate(self.mat, MAT_COPY_VALUES, &mat.mat)) return mat cdef Mat mat_neg(Mat self): cdef Mat mat = mat_pos(self) - CHKERR( MatScale(mat.mat, -1) ) + CHKERR(MatScale(mat.mat, -1)) return mat # inplace binary operations @@ -634,7 +638,6 @@ cdef Mat mat_sub(Mat self, other): return mat_isub(mat_pos(self), other) cdef Vec mat_mul_vec(Mat self, Vec other): - #CHKERR( MatMult(self.mat, other.vec, result.vec) ) cdef Vec result = self.createVecLeft() self.mult(other, result) return result @@ -674,7 +677,7 @@ cdef Mat mat_rmul(Mat self, other): return mat_mul(self, other) cdef Mat mat_rdiv(Mat self, other): - self; other; # unused + self; other # unused return NotImplemented # ----------------------------------------------------------------------------- @@ -705,7 +708,7 @@ cdef inline PetscErrorCode Mat_Sizes( PetscInt *r, PetscInt *c, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N, - ) except PETSC_ERR_PYTHON: + ) except PETSC_ERR_PYTHON: # unpack row and column sizes cdef object rsize, csize try: @@ -729,7 +732,7 @@ cdef inline PetscErrorCode Mat_Create( object size, object bsize, PetscMat *A, - ) except PETSC_ERR_PYTHON: + ) except PETSC_ERR_PYTHON: # communicator cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT) # sizes and block sizes @@ -741,22 +744,22 @@ cdef inline PetscErrorCode Mat_Create( Sys_Layout(ccomm, cbs, &n, &N) # create matrix and set sizes cdef PetscMat mat = NULL - CHKERR( MatCreate(ccomm, &mat) ) - CHKERR( MatSetSizes(mat, m, n, M, N) ) - CHKERR( MatSetBlockSizes(mat, rbs, cbs) ) - CHKERR( MatSetType(mat, mtype) ) + CHKERR(MatCreate(ccomm, &mat)) + CHKERR(MatSetSizes(mat, m, n, M, N)) + CHKERR(MatSetBlockSizes(mat, rbs, cbs)) + CHKERR(MatSetType(mat, mtype)) A[0] = mat return PETSC_SUCCESS -cdef inline PetscErrorCode Mat_AllocAIJ_NNZ( PetscMat A, object NNZ) except PETSC_ERR_PYTHON: +cdef inline PetscErrorCode Mat_AllocAIJ_NNZ(PetscMat A, object NNZ) except PETSC_ERR_PYTHON: # cdef PetscBool aij=PETSC_FALSE, baij=PETSC_FALSE, sbaij=PETSC_FALSE, aijis=PETSC_FALSE - CHKERR( MatHasPreallocationAIJ(A, &aij, &baij, &sbaij, &aijis)) + CHKERR(MatHasPreallocationAIJ(A, &aij, &baij, &sbaij, &aijis)) # local row size and block size cdef PetscInt m=0, bs=1 - CHKERR( MatGetLocalSize(A, &m, NULL) ) + CHKERR(MatGetLocalSize(A, &m, NULL)) if baij == PETSC_TRUE or sbaij == PETSC_TRUE: - CHKERR( MatGetBlockSize(A, &bs) ) + CHKERR(MatGetBlockSize(A, &bs)) assert bs > 0, "block size not set" # unpack NNZ argument cdef object od_nnz, oo_nnz @@ -781,33 +784,33 @@ cdef inline PetscErrorCode Mat_AllocAIJ_NNZ( PetscMat A, object NNZ) except PETS # check array sizes if d_n > 1 and d_n*bs != m: raise ValueError( "size(d_nnz) is %d, expected %d" % - (toInt(d_n), toInt(m//bs)) ) + (toInt(d_n), toInt(m//bs))) if o_n > 1 and o_n*bs != m: raise ValueError( "size(o_nnz) is %d, expected %d" % - (toInt(o_n), toInt(m//bs)) ) + (toInt(o_n), toInt(m//bs))) # preallocate if aij == PETSC_TRUE: - CHKERR( MatSeqAIJSetPreallocation(A, d_nz, d_nnz) ) - CHKERR( MatMPIAIJSetPreallocation(A, d_nz, d_nnz, o_nz, o_nnz) ) + CHKERR(MatSeqAIJSetPreallocation(A, d_nz, d_nnz)) + CHKERR(MatMPIAIJSetPreallocation(A, d_nz, d_nnz, o_nz, o_nnz)) if baij == PETSC_TRUE: - CHKERR( MatSeqBAIJSetPreallocation(A, bs, d_nz, d_nnz) ) - CHKERR( MatMPIBAIJSetPreallocation(A, bs, d_nz, d_nnz, o_nz, o_nnz) ) + CHKERR(MatSeqBAIJSetPreallocation(A, bs, d_nz, d_nnz)) + CHKERR(MatMPIBAIJSetPreallocation(A, bs, d_nz, d_nnz, o_nz, o_nnz)) if sbaij == PETSC_TRUE: - CHKERR( MatSeqSBAIJSetPreallocation(A, bs, d_nz, d_nnz) ) - CHKERR( MatMPISBAIJSetPreallocation(A, bs, d_nz, d_nnz, o_nz, o_nnz) ) + CHKERR(MatSeqSBAIJSetPreallocation(A, bs, d_nz, d_nnz)) + CHKERR(MatMPISBAIJSetPreallocation(A, bs, d_nz, d_nnz, o_nz, o_nnz)) if aijis == PETSC_TRUE: - CHKERR( MatISSetPreallocation(A, d_nz, d_nnz, o_nz, o_nnz) ) + CHKERR(MatISSetPreallocation(A, d_nz, d_nnz, o_nz, o_nnz)) return PETSC_SUCCESS cdef inline PetscErrorCode Mat_AllocAIJ_CSR(PetscMat A, object CSR) except PETSC_ERR_PYTHON: # cdef PetscBool aij=PETSC_FALSE, baij=PETSC_FALSE, sbaij=PETSC_FALSE, aijis=PETSC_FALSE - CHKERR( MatHasPreallocationAIJ(A, &aij, &baij, &sbaij, &aijis)) + CHKERR(MatHasPreallocationAIJ(A, &aij, &baij, &sbaij, &aijis)) # local row size and block size cdef PetscInt m=0, bs = 1 - CHKERR( MatGetLocalSize(A, &m, NULL) ) + CHKERR(MatGetLocalSize(A, &m, NULL)) if baij == PETSC_TRUE or sbaij == PETSC_TRUE: - CHKERR( MatGetBlockSize(A, &bs) ) + CHKERR(MatGetBlockSize(A, &bs)) assert bs > 0, "block size not set" # unpack CSR argument cdef object oi, oj, ov @@ -828,29 +831,29 @@ cdef inline PetscErrorCode Mat_AllocAIJ_CSR(PetscMat A, object CSR) except PETSC # check array sizes if ((ni-1)*bs != m): raise ValueError("size(I) is %d, expected %d" % - (toInt(ni), toInt(m//bs+1)) ) + (toInt(ni), toInt(m//bs+1))) if (i[0] != 0): raise ValueError("I[0] is %d, expected %d" % - (toInt(i[0]), toInt(0)) ) + (toInt(i[0]), toInt(0))) if (i[ni-1] != nj): raise ValueError("size(J) is %d, expected %d" % - (toInt(nj), toInt(i[ni-1])) ) + (toInt(nj), toInt(i[ni-1]))) if v != NULL and (nj*bs*bs != nv): raise ValueError("size(V) is %d, expected %d" % - (toInt(nv), toInt(nj*bs*bs)) ) + (toInt(nv), toInt(nj*bs*bs))) # preallocate if aij == PETSC_TRUE: - CHKERR( MatSeqAIJSetPreallocationCSR(A, i, j, v) ) - CHKERR( MatMPIAIJSetPreallocationCSR(A, i, j, v) ) + CHKERR(MatSeqAIJSetPreallocationCSR(A, i, j, v)) + CHKERR(MatMPIAIJSetPreallocationCSR(A, i, j, v)) if baij == PETSC_TRUE: - CHKERR( MatSeqBAIJSetPreallocationCSR(A, bs, i, j, v) ) - CHKERR( MatMPIBAIJSetPreallocationCSR(A, bs, i, j, v) ) + CHKERR(MatSeqBAIJSetPreallocationCSR(A, bs, i, j, v)) + CHKERR(MatMPIBAIJSetPreallocationCSR(A, bs, i, j, v)) if sbaij == PETSC_TRUE: - CHKERR( MatSeqSBAIJSetPreallocationCSR(A, bs, i, j, v) ) - CHKERR( MatMPISBAIJSetPreallocationCSR(A, bs, i, j, v) ) + CHKERR(MatSeqSBAIJSetPreallocationCSR(A, bs, i, j, v)) + CHKERR(MatMPISBAIJSetPreallocationCSR(A, bs, i, j, v)) return PETSC_SUCCESS -cdef inline PetscErrorCode Mat_AllocAIJ(PetscMat A,object NNZ, object CSR) except PETSC_ERR_PYTHON: +cdef inline PetscErrorCode Mat_AllocAIJ(PetscMat A, object NNZ, object CSR) except PETSC_ERR_PYTHON: if CSR is not None: return Mat_AllocAIJ_CSR(A, CSR) if NNZ is not None: @@ -859,25 +862,25 @@ cdef inline PetscErrorCode Mat_AllocAIJ(PetscMat A,object NNZ, object CSR) excep cdef inline object Mat_AllocDense(PetscMat A, object array): cdef PetscInt m=0, N=0 - CHKERR( MatGetLocalSize(A, &m, NULL) ) - CHKERR( MatGetSize(A, NULL, &N) ) + CHKERR(MatGetLocalSize(A, &m, NULL)) + CHKERR(MatGetSize(A, NULL, &N)) cdef PetscInt size=0 cdef PetscScalar *data=NULL if array is not None: array = ofarray_s(array, &size, &data) if m*N != size: raise ValueError( "size(array) is %d, expected %dx%d=%d" % - (toInt(size), toInt(m), toInt(N), toInt(m*N)) ) - CHKERR( MatSeqDenseSetPreallocation(A, data) ) - CHKERR( MatMPIDenseSetPreallocation(A, data) ) + (toInt(size), toInt(m), toInt(N), toInt(m*N))) + CHKERR(MatSeqDenseSetPreallocation(A, data)) + CHKERR(MatMPIDenseSetPreallocation(A, data)) return array # ----------------------------------------------------------------------------- ctypedef PetscErrorCode MatSetValuesFcn(PetscMat, - PetscInt,const PetscInt*, - PetscInt,const PetscInt*, - const PetscScalar*,PetscInsertMode) + PetscInt, const PetscInt*, + PetscInt, const PetscInt*, + const PetscScalar*, PetscInsertMode) cdef inline MatSetValuesFcn* matsetvalues_fcn(int blocked, int local): cdef MatSetValuesFcn *setvalues = NULL @@ -888,11 +891,11 @@ cdef inline MatSetValuesFcn* matsetvalues_fcn(int blocked, int local): return setvalues cdef inline PetscErrorCode matsetvalues(PetscMat A, - object oi, object oj, object ov, - object oaddv, int blocked, int local) except PETSC_ERR_PYTHON: + object oi, object oj, object ov, + object oaddv, int blocked, int local) except PETSC_ERR_PYTHON: # block size cdef PetscInt rbs=1, cbs=1 - if blocked: CHKERR( MatGetBlockSizes(A, &rbs, &cbs) ) + if blocked: CHKERR(MatGetBlockSizes(A, &rbs, &cbs)) if rbs < 1: rbs = 1 if cbs < 1: cbs = 1 # rows, cols, and values @@ -905,21 +908,21 @@ cdef inline PetscErrorCode matsetvalues(PetscMat A, ov = iarray_s(ov, &nv, &v) if ni*nj*rbs*cbs != nv: raise ValueError( "incompatible array sizes: ni=%d, nj=%d, nv=%d" % - (toInt(ni), toInt(nj), toInt(nv)) ) + (toInt(ni), toInt(nj), toInt(nv))) # MatSetValuesXXX function and insert mode cdef MatSetValuesFcn *setvalues = matsetvalues_fcn(blocked, local) cdef PetscInsertMode addv = insertmode(oaddv) # actual call - CHKERR( setvalues(A, ni, i, nj, j, v, addv) ) + CHKERR(setvalues(A, ni, i, nj, j, v, addv)) return PETSC_SUCCESS cdef inline PetscErrorCode matsetvalues_rcv(PetscMat A, - object oi, object oj, object ov, - object oaddv, - int blocked, int local) except PETSC_ERR_PYTHON: + object oi, object oj, object ov, + object oaddv, + int blocked, int local) except PETSC_ERR_PYTHON: # block size cdef PetscInt rbs=1, cbs=1 - if blocked: CHKERR( MatGetBlockSizes(A, &rbs, &cbs) ) + if blocked: CHKERR(MatGetBlockSizes(A, &rbs, &cbs)) if rbs < 1: rbs = 1 if cbs < 1: cbs = 1 # rows, cols, and values @@ -933,17 +936,17 @@ cdef inline PetscErrorCode matsetvalues_rcv(PetscMat A, # check various dimensions if PyArray_NDIM(ai) != 2: raise ValueError( ("row indices must have two dimensions: " - "rows.ndim=%d") % (PyArray_NDIM(ai)) ) + "rows.ndim=%d") % (PyArray_NDIM(ai))) elif not PyArray_ISCONTIGUOUS(ai): raise ValueError( "expecting a C-contiguous array") if PyArray_NDIM(aj) != 2: raise ValueError( ("column indices must have two dimensions: " - "cols.ndim=%d") % (PyArray_NDIM(aj)) ) + "cols.ndim=%d") % (PyArray_NDIM(aj))) elif not PyArray_ISCONTIGUOUS(aj): raise ValueError( "expecting a C-contiguous array") if PyArray_NDIM(av) < 2: raise ValueError( ("values must have two or more dimensions: " - "vals.ndim=%d") % (PyArray_NDIM(av)) ) + "vals.ndim=%d") % (PyArray_NDIM(av))) elif not PyArray_ISCONTIGUOUS(av): raise ValueError( "expecting a C-contiguous array") # check various shapes @@ -959,25 +962,25 @@ cdef inline PetscErrorCode matsetvalues_rcv(PetscMat A, (ai.shape, aj.shape, av.shape)) # MatSetValuesXXX function and insert mode cdef MatSetValuesFcn *setvalues = \ - matsetvalues_fcn(blocked, local) + matsetvalues_fcn(blocked, local) cdef PetscInsertMode addv = insertmode(oaddv) # actual calls cdef Py_ssize_t k=0 for k from 0 <= k < nm: - CHKERR( setvalues(A, - si, &i[k*si], - sj, &j[k*sj], - &v[k*sv], addv) ) + CHKERR(setvalues(A, + si, &i[k*si], + sj, &j[k*sj], + &v[k*sv], addv)) return PETSC_SUCCESS cdef inline PetscErrorCode matsetvalues_ijv(PetscMat A, - object oi, object oj, object ov, - object oaddv, - object om, - int blocked, int local) except PETSC_ERR_PYTHON: + object oi, object oj, object ov, + object oaddv, + object om, + int blocked, int local) except PETSC_ERR_PYTHON: # block size cdef PetscInt rbs=1, cbs=1 - if blocked: CHKERR( MatGetBlockSizes(A, &rbs, &cbs) ) + if blocked: CHKERR(MatGetBlockSizes(A, &rbs, &cbs)) if rbs < 1: rbs = 1 if cbs < 1: cbs = 1 # column pointers, column indices, and values @@ -995,28 +998,28 @@ cdef inline PetscErrorCode matsetvalues_ijv(PetscMat A, om = iarray_i(om, &nm, &m) else: if not local: - CHKERR( MatGetOwnershipRange(A, &rs, &re) ) + CHKERR(MatGetOwnershipRange(A, &rs, &re)) rs //= rbs; re //= rbs nm = re - rs # check various sizes if (ni-1 != nm): raise ValueError( "size(I) is %d, expected %d" % - (toInt(ni), toInt(nm+1)) ) - if (i[0] != 0):raise ValueError( + (toInt(ni), toInt(nm+1))) + if (i[0] != 0): raise ValueError( "I[0] is %d, expected %d" % - (toInt(i[0]), 0) ) + (toInt(i[0]), 0)) if (i[ni-1] != nj): raise ValueError( "size(J) is %d, expected %d" % - (toInt(nj), toInt(i[ni-1])) ) + (toInt(nj), toInt(i[ni-1]))) if (nj*rbs*cbs != nv): raise ValueError( "size(V) is %d, expected %d" % - (toInt(nv), toInt(nj*rbs*cbs)) ) + (toInt(nv), toInt(nj*rbs*cbs))) # MatSetValuesXXX function and insert mode cdef MatSetValuesFcn *setvalues = \ - matsetvalues_fcn(blocked, local) + matsetvalues_fcn(blocked, local) cdef PetscInsertMode addv = insertmode(oaddv) # actual call - cdef PetscInt k=0, l=0 + cdef PetscInt k=0, c=0 cdef PetscInt irow=0, ncol=0, *icol=NULL cdef PetscScalar *sval=NULL for k from 0 <= k < nm: @@ -1025,18 +1028,18 @@ cdef inline PetscErrorCode matsetvalues_ijv(PetscMat A, icol = j + i[k] if blocked: sval = v + i[k]*rbs*cbs - for l from 0 <= l < ncol: - CHKERR( setvalues(A, 1, &irow, 1, &icol[l], - &sval[l*rbs*cbs], addv) ) + for c from 0 <= c < ncol: + CHKERR(setvalues(A, 1, &irow, 1, &icol[c], + &sval[c*rbs*cbs], addv)) else: sval = v + i[k] - CHKERR( setvalues(A, 1, &irow, ncol, icol, sval, addv) ) + CHKERR(setvalues(A, 1, &irow, ncol, icol, sval, addv)) return PETSC_SUCCESS cdef inline PetscErrorCode matsetvalues_csr(PetscMat A, - object oi, object oj, object ov, - object oaddv, - int blocked, int local) except PETSC_ERR_PYTHON: + object oi, object oj, object ov, + object oaddv, + int blocked, int local) except PETSC_ERR_PYTHON: matsetvalues_ijv(A, oi, oj, ov, oaddv, None, blocked, local) return PETSC_SUCCESS @@ -1054,13 +1057,13 @@ cdef inline matgetvalues(PetscMat mat, if (ni*nj != nv): raise ValueError( "incompatible array sizes: ni=%d, nj=%d, nv=%d" % (toInt(ni), toInt(nj), toInt(nv))) - CHKERR( MatGetValues(mat, ni, i, nj, j, v) ) + CHKERR(MatGetValues(mat, ni, i, nj, j, v)) return values # ----------------------------------------------------------------------------- cdef extern from * nogil: # custom.h - PetscErrorCode MatFactorInfoDefaults(PetscBool,PetscBool,PetscMatFactorInfo*) + PetscErrorCode MatFactorInfoDefaults(PetscBool, PetscBool, PetscMatFactorInfo*) cdef inline PetscMatFactorShiftType matfactorshifttype(object st) \ except (-1): @@ -1075,8 +1078,8 @@ cdef inline PetscMatFactorShiftType matfactorshifttype(object st) \ return st cdef PetscErrorCode matfactorinfo(PetscBool inc, PetscBool chol, object opts, - PetscMatFactorInfo *info) except PETSC_ERR_PYTHON: - CHKERR( MatFactorInfoDefaults(inc,chol,info) ) + PetscMatFactorInfo *info) except PETSC_ERR_PYTHON: + CHKERR(MatFactorInfoDefaults(inc, chol, info)) if opts is None: return PETSC_SUCCESS cdef dict options = dict(opts) # @@ -1097,17 +1100,16 @@ cdef PetscErrorCode matfactorinfo(PetscBool inc, PetscBool chol, object opts, # cdef dt = options.pop('dt', None) if dt is not None: + info.usedt = PETSC_TRUE info.dt = asReal(dt) cdef dtcol = options.pop('dtcol', None) if dtcol is not None: + info.usedt = PETSC_TRUE info.dtcol = asReal(dtcol) cdef dtcount = options.pop('dtcount', None) if dtcount is not None: - info.dtcount = asInt(dtcount) - if ((dt is not None) or - (dtcol is not None) or - (dtcount is not None)): info.usedt = PETSC_TRUE + info.dtcount = asInt(dtcount) # cdef shifttype = options.pop('shifttype', None) if shifttype is not None: @@ -1127,11 +1129,11 @@ cdef object mat_getitem(Mat self, object ij): cdef PetscInt M=0, N=0 rows, cols = ij if isinstance(rows, slice): - CHKERR( MatGetSize(self.mat, &M, NULL) ) + CHKERR(MatGetSize(self.mat, &M, NULL)) start, stop, stride = rows.indices(toInt(M)) rows = arange(start, stop, stride) if isinstance(cols, slice): - CHKERR( MatGetSize(self.mat, NULL, &N) ) + CHKERR(MatGetSize(self.mat, NULL, &N)) start, stop, stride = cols.indices(toInt(N)) cols = arange(start, stop, stride) return matgetvalues(self.mat, rows, cols, None) @@ -1141,11 +1143,11 @@ cdef PetscErrorCode mat_setitem(Mat self, object ij, object v) except PETSC_ERR_ cdef PetscInt M=0, N=0 rows, cols = ij if isinstance(rows, slice): - CHKERR( MatGetSize(self.mat, &M, NULL) ) + CHKERR(MatGetSize(self.mat, &M, NULL)) start, stop, stride = rows.indices(toInt(M)) rows = arange(start, stop, stride) if isinstance(cols, slice): - CHKERR( MatGetSize(self.mat, NULL, &N) ) + CHKERR(MatGetSize(self.mat, NULL, &N)) start, stop, stride = cols.indices(toInt(N)) cols = arange(start, stop, stride) matsetvalues(self.mat, rows, cols, v, None, 0, 0) @@ -1158,7 +1160,7 @@ cdef matsetvaluestencil(PetscMat A, PetscInsertMode im, int blocked): # block size cdef PetscInt rbs=1, cbs=1 - if blocked: CHKERR( MatGetBlockSizes(A, &rbs, &cbs) ) + if blocked: CHKERR(MatGetBlockSizes(A, &rbs, &cbs)) if rbs < 1: rbs = 1 if cbs < 1: cbs = 1 # values @@ -1166,17 +1168,17 @@ cdef matsetvaluestencil(PetscMat A, cdef PetscScalar *v = NULL value = iarray_s(value, &nv, &v) if rbs*cbs != nv: raise ValueError( - "incompatible array sizes: nv=%d" % toInt(nv) ) + "incompatible array sizes: nv=%d" % toInt(nv)) if blocked: - CHKERR( MatSetValuesBlockedStencil(A, - 1, &r.stencil, - 1, &c.stencil, - v, im) ) + CHKERR(MatSetValuesBlockedStencil(A, + 1, &r.stencil, + 1, &c.stencil, + v, im)) else: - CHKERR( MatSetValuesStencil(A, - 1, &r.stencil, - 1, &c.stencil, - v, im) ) + CHKERR(MatSetValuesStencil(A, + 1, &r.stencil, + 1, &c.stencil, + v, im)) return 0 cdef mat_get_dlpack_ctx(Mat self): @@ -1194,9 +1196,9 @@ cdef mat_get_dlpack_ctx(Mat self): if ctx0 is None: # First time in, create a linear memory view s1 = oarray_p(empty_p(ndim), NULL, &shape_arr) s2 = oarray_p(empty_p(ndim), NULL, &strides_arr) - CHKERR( MatGetSize(self.mat, NULL, &n) ) - CHKERR( MatGetLocalSize(self.mat, &m, NULL) ) - CHKERR( MatDenseGetLDA(self.mat, &lda) ) + CHKERR(MatGetSize(self.mat, NULL, &n)) + CHKERR(MatGetLocalSize(self.mat, &m, NULL)) + CHKERR(MatDenseGetLDA(self.mat, &lda)) shape_arr[0] = m shape_arr[1] = n strides_arr[0] = 1 @@ -1204,11 +1206,11 @@ cdef mat_get_dlpack_ctx(Mat self): else: (_, _, ndim, s1, s2) = ctx0 - devType_ = { PETSC_MEMTYPE_HOST : kDLCPU, PETSC_MEMTYPE_CUDA : kDLCUDA } - CHKERR( MatGetCurrentMemType(self.mat, &mtype) ) + devType_ = {PETSC_MEMTYPE_HOST : kDLCPU, PETSC_MEMTYPE_CUDA : kDLCUDA} + CHKERR(MatGetCurrentMemType(self.mat, &mtype)) dtype = devType_.get(mtype, kDLCPU) if dtype != kDLCPU: - CHKERR( PetscObjectGetDeviceId(self.mat, &devId) ) + CHKERR(PetscObjectGetDeviceId(self.mat, &devId)) ctx0 = (dtype, devId, ndim, s1, s2) self.set_attr('__dltensor_ctx__', ctx0) return ctx0 diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscmatpartitioning.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscmatpartitioning.pxi index e2de0730563..88149929115 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscmatpartitioning.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscmatpartitioning.pxi @@ -10,13 +10,13 @@ cdef extern from * nogil: PetscMatPartitioningType MATPARTITIONINGPTSCOTCH PetscMatPartitioningType MATPARTITIONINGHIERARCH - PetscErrorCode MatPartitioningCreate(MPI_Comm,PetscMatPartitioning*) + PetscErrorCode MatPartitioningCreate(MPI_Comm, PetscMatPartitioning*) PetscErrorCode MatPartitioningDestroy(PetscMatPartitioning*) - PetscErrorCode MatPartitioningView(PetscMatPartitioning,PetscViewer) + PetscErrorCode MatPartitioningView(PetscMatPartitioning, PetscViewer) - PetscErrorCode MatPartitioningSetType(PetscMatPartitioning,PetscMatPartitioningType) - PetscErrorCode MatPartitioningGetType(PetscMatPartitioning,PetscMatPartitioningType*) + PetscErrorCode MatPartitioningSetType(PetscMatPartitioning, PetscMatPartitioningType) + PetscErrorCode MatPartitioningGetType(PetscMatPartitioning, PetscMatPartitioningType*) PetscErrorCode MatPartitioningSetFromOptions(PetscMatPartitioning) - PetscErrorCode MatPartitioningSetAdjacency(PetscMatPartitioning,PetscMat) - PetscErrorCode MatPartitioningApply(PetscMatPartitioning,PetscIS*) + PetscErrorCode MatPartitioningSetAdjacency(PetscMatPartitioning, PetscMat) + PetscErrorCode MatPartitioningApply(PetscMatPartitioning, PetscIS*) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscmem.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscmem.pxi index 27da64ebd99..5ae94a2ff5b 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscmem.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscmem.pxi @@ -1,9 +1,8 @@ cdef extern from * nogil: - PetscErrorCode PetscMalloc(size_t,void*) + PetscErrorCode PetscMalloc(size_t, void*) PetscErrorCode PetscFree(void*) - PetscErrorCode PetscMemcpy(void*,void*,size_t) - PetscErrorCode PetscMemmove(void*,void*,size_t) - PetscErrorCode PetscMemzero(void*,size_t) - PetscErrorCode PetscMemcmp(void*,void*,size_t,PetscBool*) - PetscErrorCode PetscStrallocpy(const char[],char*[]) - + PetscErrorCode PetscMemcpy(void*, void*, size_t) + PetscErrorCode PetscMemmove(void*, void*, size_t) + PetscErrorCode PetscMemzero(void*, size_t) + PetscErrorCode PetscMemcmp(void*, void*, size_t, PetscBool*) + PetscErrorCode PetscStrallocpy(const char[], char*[]) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscmpi.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscmpi.pxi index 92072d75622..2221c1739c6 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscmpi.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscmpi.pxi @@ -11,10 +11,10 @@ cdef extern from * nogil: enum: MPI_IDENT enum: MPI_CONGRUENT - int MPI_Comm_compare(MPI_Comm,MPI_Comm,int*) + int MPI_Comm_compare(MPI_Comm, MPI_Comm, int*) - int MPI_Comm_size(MPI_Comm,int*) - int MPI_Comm_rank(MPI_Comm,int*) + int MPI_Comm_size(MPI_Comm, int*) + int MPI_Comm_rank(MPI_Comm, int*) int MPI_Barrier(MPI_Comm) int MPI_Initialized(int*) @@ -28,7 +28,7 @@ cdef extern from * nogil: MPI_Comm PETSC_COMM_SELF MPI_Comm PETSC_COMM_WORLD - PetscErrorCode PetscCommDuplicate(MPI_Comm,MPI_Comm*,int*) + PetscErrorCode PetscCommDuplicate(MPI_Comm, MPI_Comm*, int*) PetscErrorCode PetscCommDestroy(MPI_Comm*) # -------------------------------------------------------------------- @@ -36,18 +36,17 @@ cdef extern from * nogil: cdef extern from "cython.h": void *Cython_ImportFunction(object, char[], char[]) except? NULL -ctypedef MPI_Comm* PyMPICommGet(object) except NULL -ctypedef object PyMPICommNew(MPI_Comm) +ctypedef MPI_Comm* PyMPICommGet(object) except NULL +ctypedef object PyMPICommNew(MPI_Comm) ctypedef MPI_Datatype* PyMPIDatatypeGet(object) except NULL -ctypedef MPI_Op* PyMPIOpGet(object) except NULL +ctypedef MPI_Op* PyMPIOpGet(object) except NULL cdef inline MPI_Comm mpi4py_Comm_Get( object comm, ) except? MPI_COMM_NULL: from mpi4py import MPI cdef PyMPICommGet *commget = \ - Cython_ImportFunction( - MPI, b"PyMPIComm_Get", b"MPI_Comm *(PyObject *)") + Cython_ImportFunction(MPI, b"PyMPIComm_Get", b"MPI_Comm *(PyObject *)") if commget == NULL: return MPI_COMM_NULL cdef MPI_Comm *ptr = commget(comm) if ptr == NULL: return MPI_COMM_NULL @@ -56,8 +55,7 @@ cdef inline MPI_Comm mpi4py_Comm_Get( cdef inline object mpi4py_Comm_New(MPI_Comm comm): from mpi4py import MPI cdef PyMPICommNew *commnew = \ - Cython_ImportFunction( - MPI, b"PyMPIComm_New", b"PyObject *(MPI_Comm)") + Cython_ImportFunction(MPI, b"PyMPIComm_New", b"PyObject *(MPI_Comm)") if commnew == NULL: return None return commnew(comm) @@ -66,8 +64,7 @@ cdef inline MPI_Datatype mpi4py_Datatype_Get( ) except? MPI_DATATYPE_NULL: from mpi4py import MPI cdef PyMPIDatatypeGet *datatypeget = \ - Cython_ImportFunction( - MPI, b"PyMPIDatatype_Get", b"MPI_Datatype *(PyObject *)") + Cython_ImportFunction(MPI, b"PyMPIDatatype_Get", b"MPI_Datatype *(PyObject *)") if datatypeget == NULL: return MPI_DATATYPE_NULL cdef MPI_Datatype *ptr = datatypeget(datatype) if ptr == NULL: return MPI_DATATYPE_NULL @@ -78,8 +75,7 @@ cdef inline MPI_Op mpi4py_Op_Get( ) except? MPI_OP_NULL: from mpi4py import MPI cdef PyMPIOpGet *opget = \ - Cython_ImportFunction( - MPI, b"PyMPIOp_Get", b"MPI_Op *(PyObject *)") + Cython_ImportFunction(MPI, b"PyMPIOp_Get", b"MPI_Op *(PyObject *)") if opget == NULL: return MPI_OP_NULL cdef MPI_Op *ptr = opget(op) if ptr == NULL: return MPI_OP_NULL @@ -120,13 +116,13 @@ cdef inline Comm new_Comm(MPI_Comm comm): cdef inline int comm_size(MPI_Comm comm) except ? -1: if comm == MPI_COMM_NULL: raise ValueError("null communicator") cdef int size = 0 - CHKERR( MPI_Comm_size(comm, &size) ) + CHKERR(MPI_Comm_size(comm, &size)) return size cdef inline int comm_rank(MPI_Comm comm) except ? -1: if comm == MPI_COMM_NULL: raise ValueError("null communicator") cdef int rank = 0 - CHKERR( MPI_Comm_rank(comm, &rank) ) + CHKERR(MPI_Comm_rank(comm, &rank)) return rank # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscobj.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscobj.pxi index c36db31742a..655b42b3433 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscobj.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscobj.pxi @@ -4,43 +4,43 @@ cdef extern from * nogil: ctypedef int PetscClassId ctypedef int PetscObjectState - PetscErrorCode PetscObjectView(PetscObject,PetscViewer) + PetscErrorCode PetscObjectView(PetscObject, PetscViewer) PetscErrorCode PetscObjectDestroy(PetscObject*) - PetscErrorCode PetscObjectGetReference(PetscObject,PetscInt*) + PetscErrorCode PetscObjectGetReference(PetscObject, PetscInt*) PetscErrorCode PetscObjectReference(PetscObject) PetscErrorCode PetscObjectDereference(PetscObject) - PetscErrorCode PetscObjectSetOptionsPrefix(PetscObject,char[]) - PetscErrorCode PetscObjectAppendOptionsPrefix(PetscObject,char[]) - PetscErrorCode PetscObjectGetOptionsPrefix(PetscObject,char*[]) + PetscErrorCode PetscObjectSetOptionsPrefix(PetscObject, char[]) + PetscErrorCode PetscObjectAppendOptionsPrefix(PetscObject, char[]) + PetscErrorCode PetscObjectGetOptionsPrefix(PetscObject, char*[]) PetscErrorCode PetscObjectSetFromOptions(PetscObject) - PetscErrorCode PetscObjectViewFromOptions(PetscObject,PetscObject,char[]) + PetscErrorCode PetscObjectViewFromOptions(PetscObject, PetscObject, char[]) - PetscErrorCode PetscObjectGetComm(PetscObject,MPI_Comm*) - PetscErrorCode PetscObjectGetClassId(PetscObject,PetscClassId*) - PetscErrorCode PetscObjectGetType(PetscObject,char*[]) - PetscErrorCode PetscObjectGetClassName(PetscObject,char*[]) - PetscErrorCode PetscObjectSetName(PetscObject,char[]) - PetscErrorCode PetscObjectGetName(PetscObject,char*[]) + PetscErrorCode PetscObjectGetComm(PetscObject, MPI_Comm*) + PetscErrorCode PetscObjectGetClassId(PetscObject, PetscClassId*) + PetscErrorCode PetscObjectGetType(PetscObject, char*[]) + PetscErrorCode PetscObjectGetClassName(PetscObject, char*[]) + PetscErrorCode PetscObjectSetName(PetscObject, char[]) + PetscErrorCode PetscObjectGetName(PetscObject, char*[]) PetscErrorCode PetscObjectStateIncrease(PetscObject) - PetscErrorCode PetscObjectStateSet(PetscObject,PetscObjectState) - PetscErrorCode PetscObjectStateGet(PetscObject,PetscObjectState*) - PetscErrorCode PetscObjectTypeCompare(PetscObject,char[],PetscBool*) - PetscErrorCode PetscObjectChangeTypeName(PetscObject,char[]) - PetscErrorCode PetscObjectCompose(PetscObject,char[],PetscObject) - PetscErrorCode PetscObjectQuery(PetscObject,char[],PetscObject*) + PetscErrorCode PetscObjectStateSet(PetscObject, PetscObjectState) + PetscErrorCode PetscObjectStateGet(PetscObject, PetscObjectState*) + PetscErrorCode PetscObjectTypeCompare(PetscObject, char[], PetscBool*) + PetscErrorCode PetscObjectChangeTypeName(PetscObject, char[]) + PetscErrorCode PetscObjectCompose(PetscObject, char[], PetscObject) + PetscErrorCode PetscObjectQuery(PetscObject, char[], PetscObject*) ctypedef void (*PetscVoidFunction)() - PetscErrorCode PetscObjectComposeFunction(PetscObject,char[],PetscVoidFunction) - PetscErrorCode PetscObjectQueryFunction(PetscObject,char[],PetscVoidFunction*) + PetscErrorCode PetscObjectComposeFunction(PetscObject, char[], PetscVoidFunction) + PetscErrorCode PetscObjectQueryFunction(PetscObject, char[], PetscVoidFunction*) - PetscErrorCode PetscObjectIncrementTabLevel(PetscObject,PetscObject,PetscInt) - PetscErrorCode PetscObjectGetTabLevel(PetscObject,PetscInt*) - PetscErrorCode PetscObjectSetTabLevel(PetscObject,PetscInt) + PetscErrorCode PetscObjectIncrementTabLevel(PetscObject, PetscObject, PetscInt) + PetscErrorCode PetscObjectGetTabLevel(PetscObject, PetscInt*) + PetscErrorCode PetscObjectSetTabLevel(PetscObject, PetscInt) cdef extern from * nogil: # custom.h - PetscErrorCode PetscObjectGetDeviceId(PetscObject,PetscInt*) + PetscErrorCode PetscObjectGetDeviceId(PetscObject, PetscInt*) cdef extern from "" nogil: PetscErrorCode PetscObjectDelayedDestroy(PetscObject*) @@ -150,17 +150,17 @@ cdef inline type subtype_DM(PetscDM dm): if obj == NULL: return DM # --- cdef PetscBool match = PETSC_FALSE - CHKERR( PetscObjectTypeCompare(obj, b"da", &match) ) + CHKERR(PetscObjectTypeCompare(obj, b"da", &match)) if match == PETSC_TRUE: return DMDA - CHKERR( PetscObjectTypeCompare(obj, b"plex", &match) ) + CHKERR(PetscObjectTypeCompare(obj, b"plex", &match)) if match == PETSC_TRUE: return DMPlex - CHKERR( PetscObjectTypeCompare(obj, b"composite", &match) ) + CHKERR(PetscObjectTypeCompare(obj, b"composite", &match)) if match == PETSC_TRUE: return DMComposite - CHKERR( PetscObjectTypeCompare(obj, b"shell", &match) ) + CHKERR(PetscObjectTypeCompare(obj, b"shell", &match)) if match == PETSC_TRUE: return DMShell - CHKERR( PetscObjectTypeCompare(obj, b"stag", &match) ) + CHKERR(PetscObjectTypeCompare(obj, b"stag", &match)) if match == PETSC_TRUE: return DMStag - CHKERR( PetscObjectTypeCompare(obj, b"swarm", &match) ) + CHKERR(PetscObjectTypeCompare(obj, b"swarm", &match)) if match == PETSC_TRUE: return DMSwarm # --- return DM @@ -169,7 +169,7 @@ cdef inline type subtype_Object(PetscObject obj): cdef type klass = Object if obj == NULL: return klass cdef PetscClassId classid = 0 - CHKERR( PetscObjectGetClassId(obj,&classid) ) + CHKERR(PetscObjectGetClassId(obj, &classid)) if classid == PETSC_DM_CLASSID: klass = subtype_DM(obj) else: diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscopt.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscopt.pxi index 12d5ee05385..45970685abe 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscopt.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscopt.pxi @@ -5,37 +5,37 @@ cdef extern from * nogil: PetscErrorCode PetscOptionsCreate(PetscOptions*) PetscErrorCode PetscOptionsDestroy(PetscOptions*) - PetscErrorCode PetscOptionsView(PetscOptions,PetscViewer) + PetscErrorCode PetscOptionsView(PetscOptions, PetscViewer) PetscErrorCode PetscOptionsClear(PetscOptions) - PetscErrorCode PetscOptionsPrefixPush(PetscOptions,char[]) + PetscErrorCode PetscOptionsPrefixPush(PetscOptions, char[]) PetscErrorCode PetscOptionsPrefixPop(PetscOptions) - PetscErrorCode PetscOptionsHasName(PetscOptions,char[],char[],PetscBool*) - PetscErrorCode PetscOptionsSetAlias(PetscOptions,char[],char[]) - PetscErrorCode PetscOptionsSetValue(PetscOptions,char[],char[]) - PetscErrorCode PetscOptionsClearValue(PetscOptions,char[]) + PetscErrorCode PetscOptionsHasName(PetscOptions, char[], char[], PetscBool*) + PetscErrorCode PetscOptionsSetAlias(PetscOptions, char[], char[]) + PetscErrorCode PetscOptionsSetValue(PetscOptions, char[], char[]) + PetscErrorCode PetscOptionsClearValue(PetscOptions, char[]) - PetscErrorCode PetscOptionsInsertString(PetscOptions,char[]) - PetscErrorCode PetscOptionsInsertFile(PetscOptions,char[]) - PetscErrorCode PetscOptionsGetAll(PetscOptions,char*[]) + PetscErrorCode PetscOptionsInsertString(PetscOptions, char[]) + PetscErrorCode PetscOptionsInsertFile(PetscOptions, char[]) + PetscErrorCode PetscOptionsGetAll(PetscOptions, char*[]) - PetscErrorCode PetscOptionsGetBool(PetscOptions,char[],char[],PetscBool*,PetscBool*) - PetscErrorCode PetscOptionsGetBoolArray(PetscOptions,char[],char[],PetscBool[],PetscInt*,PetscBool*) - PetscErrorCode PetscOptionsGetInt(PetscOptions,char[],char[],PetscInt*,PetscBool*) - PetscErrorCode PetscOptionsGetIntArray(PetscOptions,char[],char[],PetscInt[],PetscInt*,PetscBool*) - PetscErrorCode PetscOptionsGetReal(PetscOptions,char[],char[],PetscReal*,PetscBool*) - PetscErrorCode PetscOptionsGetRealArray(PetscOptions,char[],char[],PetscReal[],PetscInt*,PetscBool*) - PetscErrorCode PetscOptionsGetScalar(PetscOptions,char[],char[],PetscScalar*,PetscBool*) - PetscErrorCode PetscOptionsGetScalarArray(PetscOptions,char[],char[],PetscScalar[],PetscInt*,PetscBool*) - PetscErrorCode PetscOptionsGetString(PetscOptions,char[],char[],char[],size_t,PetscBool*) + PetscErrorCode PetscOptionsGetBool(PetscOptions, char[], char[], PetscBool*, PetscBool*) + PetscErrorCode PetscOptionsGetBoolArray(PetscOptions, char[], char[], PetscBool[], PetscInt*, PetscBool*) + PetscErrorCode PetscOptionsGetInt(PetscOptions, char[], char[], PetscInt*, PetscBool*) + PetscErrorCode PetscOptionsGetIntArray(PetscOptions, char[], char[], PetscInt[], PetscInt*, PetscBool*) + PetscErrorCode PetscOptionsGetReal(PetscOptions, char[], char[], PetscReal*, PetscBool*) + PetscErrorCode PetscOptionsGetRealArray(PetscOptions, char[], char[], PetscReal[], PetscInt*, PetscBool*) + PetscErrorCode PetscOptionsGetScalar(PetscOptions, char[], char[], PetscScalar*, PetscBool*) + PetscErrorCode PetscOptionsGetScalarArray(PetscOptions, char[], char[], PetscScalar[], PetscInt*, PetscBool*) + PetscErrorCode PetscOptionsGetString(PetscOptions, char[], char[], char[], size_t, PetscBool*) ctypedef struct _p_PetscToken ctypedef _p_PetscToken* PetscToken - PetscErrorCode PetscTokenCreate(char[],char,PetscToken*) + PetscErrorCode PetscTokenCreate(char[], char, PetscToken*) PetscErrorCode PetscTokenDestroy(PetscToken*) - PetscErrorCode PetscTokenFind(PetscToken,char*[]) - PetscErrorCode PetscOptionsValidKey(char[],PetscBool*) + PetscErrorCode PetscTokenFind(PetscToken, char*[]) + PetscErrorCode PetscOptionsValidKey(char[], PetscBool*) # @@ -66,45 +66,46 @@ cdef opt2str(const char *pre, const char *name): cdef getopt_Bool(PetscOptions opt, const char *pre, const char *name, object deft): cdef PetscBool value = PETSC_FALSE cdef PetscBool flag = PETSC_FALSE - CHKERR( PetscOptionsGetBool(opt, pre, name, &value, &flag) ) + CHKERR(PetscOptionsGetBool(opt, pre, name, &value, &flag)) if flag==PETSC_TRUE: return toBool(value) - if deft is not None: return deft + if deft is not None: return toBool(asBool(deft)) raise KeyError(opt2str(pre, name)) cdef getopt_BoolArray(PetscOptions opt, const char *pre, const char *name, object deft): cdef PetscBool value[1024], *ivalue = value, *ivaluedeft = NULL cdef PetscInt nmax = 1024, ndeft = 0 cdef PetscBool flag = PETSC_FALSE - cdef object dummy + cdef object unused if deft is not None: + deft = [toBool(asBool(d)) for d in deft] deft = iarray_b(deft, &ndeft, &ivaluedeft) if ndeft > nmax: - dummy = oarray_b(empty_b(ndeft), &nmax, &ivalue) + unused = oarray_b(empty_b(ndeft), &nmax, &ivalue) memcpy(ivalue, ivaluedeft, ndeft*sizeof(PetscBool)) - CHKERR( PetscOptionsGetBoolArray(opt, pre, name, ivalue, &nmax, &flag) ) - if flag==PETSC_TRUE: return array_b(nmax, ivalue) - if deft is not None: return deft + CHKERR(PetscOptionsGetBoolArray(opt, pre, name, ivalue, &nmax, &flag)) + if flag==PETSC_TRUE: return array_b(nmax, ivalue).astype('bool') + if deft is not None: return deft.astype('bool') raise KeyError(opt2str(pre, name)) cdef getopt_Int(PetscOptions opt, const char *pre, const char *name, object deft): cdef PetscInt value = 0 cdef PetscBool flag = PETSC_FALSE - CHKERR( PetscOptionsGetInt(opt, pre, name, &value, &flag) ) + CHKERR(PetscOptionsGetInt(opt, pre, name, &value, &flag)) if flag==PETSC_TRUE: return toInt(value) - if deft is not None: return deft + if deft is not None: return toInt(asInt(deft)) raise KeyError(opt2str(pre, name)) cdef getopt_IntArray(PetscOptions opt, const char *pre, const char *name, object deft): cdef PetscInt value[1024], *ivalue = value, *ivaluedeft = NULL cdef PetscInt nmax = 1024, ndeft = 0 cdef PetscBool flag = PETSC_FALSE - cdef object dummy + cdef object unused if deft is not None: deft = iarray_i(deft, &ndeft, &ivaluedeft) if ndeft > nmax: - dummy = oarray_i(empty_i(ndeft), &nmax, &ivalue) + unused = oarray_i(empty_i(ndeft), &nmax, &ivalue) memcpy(ivalue, ivaluedeft, ndeft*sizeof(PetscInt)) - CHKERR( PetscOptionsGetIntArray(opt, pre, name, ivalue, &nmax, &flag) ) + CHKERR(PetscOptionsGetIntArray(opt, pre, name, ivalue, &nmax, &flag)) if flag==PETSC_TRUE: return array_i(nmax, ivalue) if deft is not None: return deft raise KeyError(opt2str(pre, name)) @@ -112,22 +113,22 @@ cdef getopt_IntArray(PetscOptions opt, const char *pre, const char *name, object cdef getopt_Real(PetscOptions opt, const char *pre, const char *name, object deft): cdef PetscReal value = 0 cdef PetscBool flag = PETSC_FALSE - CHKERR( PetscOptionsGetReal(opt, pre, name, &value, &flag) ) + CHKERR(PetscOptionsGetReal(opt, pre, name, &value, &flag)) if flag==PETSC_TRUE: return toReal(value) - if deft is not None: return deft + if deft is not None: return toReal(asReal(deft)) raise KeyError(opt2str(pre, name)) cdef getopt_RealArray(PetscOptions opt, const char *pre, const char *name, object deft): cdef PetscReal value[1024], *ivalue = value, *ivaluedeft = NULL cdef PetscInt nmax = 1024, ndeft = 0 cdef PetscBool flag = PETSC_FALSE - cdef object dummy + cdef object unused if deft is not None: deft = iarray_r(deft, &ndeft, &ivaluedeft) if ndeft > nmax: - dummy = oarray_r(empty_r(ndeft), &nmax, &ivalue) + unused = oarray_r(empty_r(ndeft), &nmax, &ivalue) memcpy(ivalue, ivaluedeft, ndeft*sizeof(PetscReal)) - CHKERR( PetscOptionsGetRealArray(opt, pre, name, ivalue, &nmax, &flag) ) + CHKERR(PetscOptionsGetRealArray(opt, pre, name, ivalue, &nmax, &flag)) if flag==PETSC_TRUE: return array_r(nmax, ivalue) if deft is not None: return deft raise KeyError(opt2str(pre, name)) @@ -135,22 +136,22 @@ cdef getopt_RealArray(PetscOptions opt, const char *pre, const char *name, objec cdef getopt_Scalar(PetscOptions opt, const char *pre, const char *name, object deft): cdef PetscScalar value = 0 cdef PetscBool flag = PETSC_FALSE - CHKERR( PetscOptionsGetScalar(opt, pre, name, &value, &flag) ) + CHKERR(PetscOptionsGetScalar(opt, pre, name, &value, &flag)) if flag==PETSC_TRUE: return toScalar(value) - if deft is not None: return deft + if deft is not None: return toScalar(asScalar(deft)) raise KeyError(opt2str(pre, name)) cdef getopt_ScalarArray(PetscOptions opt, const char *pre, const char *name, object deft): cdef PetscScalar value[1024], *ivalue = value, *ivaluedeft = NULL cdef PetscInt nmax = 1024, ndeft = 0 cdef PetscBool flag = PETSC_FALSE - cdef object dummy + cdef object unused if deft is not None: deft = iarray_s(deft, &ndeft, &ivaluedeft) if ndeft > nmax: - dummy = oarray_s(empty_s(ndeft), &nmax, &ivalue) + unused = oarray_s(empty_s(ndeft), &nmax, &ivalue) memcpy(ivalue, ivaluedeft, ndeft*sizeof(PetscScalar)) - CHKERR( PetscOptionsGetScalarArray(opt, pre, name, ivalue, &nmax, &flag) ) + CHKERR(PetscOptionsGetScalarArray(opt, pre, name, ivalue, &nmax, &flag)) if flag==PETSC_TRUE: return array_s(nmax, ivalue) if deft is not None: return deft raise KeyError(opt2str(pre, name)) @@ -158,9 +159,9 @@ cdef getopt_ScalarArray(PetscOptions opt, const char *pre, const char *name, obj cdef getopt_String(PetscOptions opt, const char *pre, const char *name, object deft): cdef char value[1024+1] cdef PetscBool flag = PETSC_FALSE - CHKERR( PetscOptionsGetString(opt, pre, name, value, 1024, &flag) ) + CHKERR(PetscOptionsGetString(opt, pre, name, value, 1024, &flag)) if flag==PETSC_TRUE: return bytes2str(value) - if deft is not None: return deft + if deft is not None: return str(deft) raise KeyError(opt2str(pre, name)) cdef enum PetscOptType: @@ -194,7 +195,7 @@ cdef getpair(prefix, name, const char **pr, const char **nm): cdef getopt(PetscOptions opt, PetscOptType otype, prefix, name, deft): cdef const char *pr = NULL cdef const char *nm = NULL - tmp = getpair(prefix, name, &pr, &nm) + cdef object unused = getpair(prefix, name, &pr, &nm) if otype == OPT_BOOL : return getopt_Bool (opt, pr, nm, deft) if otype == OPT_BOOLARRAY : return getopt_BoolArray (opt, pr, nm, deft) if otype == OPT_INT : return getopt_Int (opt, pr, nm, deft) @@ -209,27 +210,27 @@ cdef getopt(PetscOptions opt, PetscOptType otype, prefix, name, deft): # simple minded options parser cdef tokenize(options): - cdef PetscToken t = NULL - cdef const char *s = NULL - cdef const char *p = NULL - options = str2bytes(options, &s) - cdef list tokens = [] - CHKERR( PetscTokenCreate(s, c' ', &t) ) - try: - CHKERR( PetscTokenFind(t, &p) ) - while p != NULL: - tokens.append(bytes2str(p)) - CHKERR( PetscTokenFind(t, &p) ) - finally: - CHKERR( PetscTokenDestroy(&t) ) - return tokens + cdef PetscToken t = NULL + cdef const char *s = NULL + cdef const char *p = NULL + options = str2bytes(options, &s) + cdef list tokens = [] + CHKERR(PetscTokenCreate(s, c' ', &t)) + try: + CHKERR(PetscTokenFind(t, &p)) + while p != NULL: + tokens.append(bytes2str(p)) + CHKERR(PetscTokenFind(t, &p)) + finally: + CHKERR(PetscTokenDestroy(&t)) + return tokens cdef bint iskey(key): cdef const char *k = NULL cdef PetscBool b = PETSC_FALSE if key: key = str2bytes(key, &k) - CHKERR( PetscOptionsValidKey(k, &b) ) + CHKERR(PetscOptionsValidKey(k, &b)) if b == PETSC_TRUE: return True return False @@ -274,5 +275,3 @@ cdef parseopt(options, prefix): opts[key] = value # we are done return opts - -# diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscpartitioner.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscpartitioner.pxi index 7929c308d9c..fda8e0f794a 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscpartitioner.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscpartitioner.pxi @@ -9,13 +9,13 @@ cdef extern from * nogil: PetscPartitionerType PETSCPARTITIONERGATHER PetscPartitionerType PETSCPARTITIONERMATPARTITIONING - PetscErrorCode PetscPartitionerCreate(MPI_Comm,PetscPartitioner*) + PetscErrorCode PetscPartitionerCreate(MPI_Comm, PetscPartitioner*) PetscErrorCode PetscPartitionerDestroy(PetscPartitioner*) - PetscErrorCode PetscPartitionerView(PetscPartitioner,PetscViewer) - PetscErrorCode PetscPartitionerSetType(PetscPartitioner,PetscPartitionerType) - PetscErrorCode PetscPartitionerGetType(PetscPartitioner,PetscPartitionerType*) + PetscErrorCode PetscPartitionerView(PetscPartitioner, PetscViewer) + PetscErrorCode PetscPartitionerSetType(PetscPartitioner, PetscPartitionerType) + PetscErrorCode PetscPartitionerGetType(PetscPartitioner, PetscPartitionerType*) PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner) PetscErrorCode PetscPartitionerSetUp(PetscPartitioner) PetscErrorCode PetscPartitionerReset(PetscPartitioner) - PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner,PetscInt,PetscInt*,PetscInt*) + PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner, PetscInt, PetscInt*, PetscInt*) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscpc.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscpc.pxi index c0da485f511..76b56b6c28c 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscpc.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscpc.pxi @@ -140,178 +140,174 @@ cdef extern from * nogil: PC_FACTOR_OTHER PC_SUBPC_ERROR - PetscErrorCode PCCreate(MPI_Comm,PetscPC*) + PetscErrorCode PCCreate(MPI_Comm, PetscPC*) PetscErrorCode PCDestroy(PetscPC*) - PetscErrorCode PCView(PetscPC,PetscViewer) + PetscErrorCode PCView(PetscPC, PetscViewer) - PetscErrorCode PCSetType(PetscPC,PetscPCType) - PetscErrorCode PCGetType(PetscPC,PetscPCType*) + PetscErrorCode PCSetType(PetscPC, PetscPCType) + PetscErrorCode PCGetType(PetscPC, PetscPCType*) - PetscErrorCode PCSetOptionsPrefix(PetscPC,char[]) - PetscErrorCode PCAppendOptionsPrefix(PetscPC,char[]) - PetscErrorCode PCGetOptionsPrefix(PetscPC,char*[]) + PetscErrorCode PCSetOptionsPrefix(PetscPC, char[]) + PetscErrorCode PCAppendOptionsPrefix(PetscPC, char[]) + PetscErrorCode PCGetOptionsPrefix(PetscPC, char*[]) PetscErrorCode PCSetFromOptions(PetscPC) - PetscErrorCode PCSetFailedReason(PetscPC,PetscPCFailedReason) - PetscErrorCode PCGetFailedReason(PetscPC,PetscPCFailedReason*) - PetscErrorCode PCGetFailedReasonRank(PetscPC,PetscPCFailedReason*) + PetscErrorCode PCSetFailedReason(PetscPC, PetscPCFailedReason) + PetscErrorCode PCGetFailedReason(PetscPC, PetscPCFailedReason*) + PetscErrorCode PCGetFailedReasonRank(PetscPC, PetscPCFailedReason*) PetscErrorCode PCSetUp(PetscPC) PetscErrorCode PCReset(PetscPC) PetscErrorCode PCSetUpOnBlocks(PetscPC) - PetscErrorCode PCApply(PetscPC,PetscVec,PetscVec) - PetscErrorCode PCMatApply(PetscPC,PetscMat,PetscMat) - PetscErrorCode PCApplyTranspose(PetscPC,PetscVec,PetscVec) - PetscErrorCode PCApplySymmetricLeft(PetscPC,PetscVec,PetscVec) - PetscErrorCode PCApplySymmetricRight(PetscPC,PetscVec,PetscVec) - PetscErrorCode PCApplyRichardson(PetscPC,PetscVec,PetscVec,PetscVec,PetscReal,PetscReal,PetscReal,PetscInt) - PetscErrorCode PCApplyBAorAB(PetscPC,PetscPCSide,PetscVec,PetscVec,PetscVec) - PetscErrorCode PCApplyBAorABTranspose(PetscPC,PetscPCSide,PetscVec,PetscVec,PetscVec) - - #int PCApplyTransposeExists(PetscPC,PetscBool*) - #int PCApplyRichardsonExists(PetscPC,PetscBool*) - - PetscErrorCode PCGetDM(PetscPC,PetscDM*) - PetscErrorCode PCSetDM(PetscPC,PetscDM) - - PetscErrorCode PCSetOperators(PetscPC,PetscMat,PetscMat) - PetscErrorCode PCGetOperators(PetscPC,PetscMat*,PetscMat*) - PetscErrorCode PCGetOperatorsSet(PetscPC,PetscBool*,PetscBool*) - PetscErrorCode PCSetCoordinates(PetscPC,PetscInt,PetscInt,PetscReal[]) - PetscErrorCode PCSetUseAmat(PetscPC,PetscBool) - PetscErrorCode PCGetUseAmat(PetscPC,PetscBool*) - - PetscErrorCode PCComputeExplicitOperator(PetscPC,PetscMat*) - - PetscErrorCode PCDiagonalScale(PetscPC,PetscBool*) - PetscErrorCode PCDiagonalScaleLeft(PetscPC,PetscVec,PetscVec) - PetscErrorCode PCDiagonalScaleRight(PetscPC,PetscVec,PetscVec) - PetscErrorCode PCDiagonalScaleSet(PetscPC,PetscVec) - - PetscErrorCode PCASMSetType(PetscPC,PetscPCASMType) - PetscErrorCode PCASMSetOverlap(PetscPC,PetscInt) - PetscErrorCode PCASMSetLocalSubdomains(PetscPC,PetscInt,PetscIS[],PetscIS[]) - PetscErrorCode PCASMSetTotalSubdomains(PetscPC,PetscInt,PetscIS[],PetscIS[]) - PetscErrorCode PCASMGetSubKSP(PetscPC,PetscInt*,PetscInt*,PetscKSP*[]) - PetscErrorCode PCASMSetSortIndices(PetscPC,PetscBool) - - PetscErrorCode PCGASMSetType(PetscPC,PetscPCGASMType) - PetscErrorCode PCGASMSetOverlap(PetscPC,PetscInt) - - PetscErrorCode PCGAMGSetType(PetscPC,PetscPCGAMGType) - PetscErrorCode PCGAMGSetNlevels(PetscPC,PetscInt) - PetscErrorCode PCGAMGSetNSmooths(PetscPC,PetscInt) - - PetscErrorCode PCHYPREGetType(PetscPC,PetscPCHYPREType*) - PetscErrorCode PCHYPRESetType(PetscPC,PetscPCHYPREType) - PetscErrorCode PCHYPRESetDiscreteCurl(PetscPC,PetscMat); - PetscErrorCode PCHYPRESetDiscreteGradient(PetscPC,PetscMat); - PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PetscPC,PetscMat); - PetscErrorCode PCHYPRESetBetaPoissonMatrix(PetscPC,PetscMat); - PetscErrorCode PCHYPRESetEdgeConstantVectors(PetscPC,PetscVec,PetscVec,PetscVec); - PetscErrorCode PCHYPRESetInterpolations(PetscPC, PetscInt, PetscMat, PetscMat[], PetscMat, PetscMat[]); - PetscErrorCode PCHYPREAMSSetInteriorNodes(PetscPC, PetscVec); - - PetscErrorCode PCFactorGetMatrix(PetscPC,PetscMat*) - PetscErrorCode PCFactorSetZeroPivot(PetscPC,PetscReal) - PetscErrorCode PCFactorSetShiftType(PetscPC,PetscMatFactorShiftType) - PetscErrorCode PCFactorSetShiftAmount(PetscPC,PetscReal) - PetscErrorCode PCFactorSetMatSolverType(PetscPC,PetscMatSolverType) - PetscErrorCode PCFactorGetMatSolverType(PetscPC,PetscMatSolverType*) + PetscErrorCode PCApply(PetscPC, PetscVec, PetscVec) + PetscErrorCode PCMatApply(PetscPC, PetscMat, PetscMat) + PetscErrorCode PCApplyTranspose(PetscPC, PetscVec, PetscVec) + PetscErrorCode PCApplySymmetricLeft(PetscPC, PetscVec, PetscVec) + PetscErrorCode PCApplySymmetricRight(PetscPC, PetscVec, PetscVec) + PetscErrorCode PCApplyRichardson(PetscPC, PetscVec, PetscVec, PetscVec, PetscReal, PetscReal, PetscReal, PetscInt) + PetscErrorCode PCApplyBAorAB(PetscPC, PetscPCSide, PetscVec, PetscVec, PetscVec) + PetscErrorCode PCApplyBAorABTranspose(PetscPC, PetscPCSide, PetscVec, PetscVec, PetscVec) + + PetscErrorCode PCGetDM(PetscPC, PetscDM*) + PetscErrorCode PCSetDM(PetscPC, PetscDM) + + PetscErrorCode PCSetOperators(PetscPC, PetscMat, PetscMat) + PetscErrorCode PCGetOperators(PetscPC, PetscMat*, PetscMat*) + PetscErrorCode PCGetOperatorsSet(PetscPC, PetscBool*, PetscBool*) + PetscErrorCode PCSetCoordinates(PetscPC, PetscInt, PetscInt, PetscReal[]) + PetscErrorCode PCSetUseAmat(PetscPC, PetscBool) + PetscErrorCode PCGetUseAmat(PetscPC, PetscBool*) + + PetscErrorCode PCComputeExplicitOperator(PetscPC, PetscMat*) + + PetscErrorCode PCDiagonalScale(PetscPC, PetscBool*) + PetscErrorCode PCDiagonalScaleLeft(PetscPC, PetscVec, PetscVec) + PetscErrorCode PCDiagonalScaleRight(PetscPC, PetscVec, PetscVec) + PetscErrorCode PCDiagonalScaleSet(PetscPC, PetscVec) + + PetscErrorCode PCASMSetType(PetscPC, PetscPCASMType) + PetscErrorCode PCASMSetOverlap(PetscPC, PetscInt) + PetscErrorCode PCASMSetLocalSubdomains(PetscPC, PetscInt, PetscIS[], PetscIS[]) + PetscErrorCode PCASMSetTotalSubdomains(PetscPC, PetscInt, PetscIS[], PetscIS[]) + PetscErrorCode PCASMGetSubKSP(PetscPC, PetscInt*, PetscInt*, PetscKSP*[]) + PetscErrorCode PCASMSetSortIndices(PetscPC, PetscBool) + + PetscErrorCode PCGASMSetType(PetscPC, PetscPCGASMType) + PetscErrorCode PCGASMSetOverlap(PetscPC, PetscInt) + + PetscErrorCode PCGAMGSetType(PetscPC, PetscPCGAMGType) + PetscErrorCode PCGAMGSetNlevels(PetscPC, PetscInt) + PetscErrorCode PCGAMGSetNSmooths(PetscPC, PetscInt) + + PetscErrorCode PCHYPREGetType(PetscPC, PetscPCHYPREType*) + PetscErrorCode PCHYPRESetType(PetscPC, PetscPCHYPREType) + PetscErrorCode PCHYPRESetDiscreteCurl(PetscPC, PetscMat) + PetscErrorCode PCHYPRESetDiscreteGradient(PetscPC, PetscMat) + PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PetscPC, PetscMat) + PetscErrorCode PCHYPRESetBetaPoissonMatrix(PetscPC, PetscMat) + PetscErrorCode PCHYPRESetEdgeConstantVectors(PetscPC, PetscVec, PetscVec, PetscVec) + PetscErrorCode PCHYPRESetInterpolations(PetscPC, PetscInt, PetscMat, PetscMat[], PetscMat, PetscMat[]) + PetscErrorCode PCHYPREAMSSetInteriorNodes(PetscPC, PetscVec) + + PetscErrorCode PCFactorGetMatrix(PetscPC, PetscMat*) + PetscErrorCode PCFactorSetZeroPivot(PetscPC, PetscReal) + PetscErrorCode PCFactorSetShiftType(PetscPC, PetscMatFactorShiftType) + PetscErrorCode PCFactorSetShiftAmount(PetscPC, PetscReal) + PetscErrorCode PCFactorSetMatSolverType(PetscPC, PetscMatSolverType) + PetscErrorCode PCFactorGetMatSolverType(PetscPC, PetscMatSolverType*) PetscErrorCode PCFactorSetUpMatSolverType(PetscPC) - PetscErrorCode PCFactorSetFill(PetscPC,PetscReal) - PetscErrorCode PCFactorSetColumnPivot(PetscPC,PetscReal) - PetscErrorCode PCFactorReorderForNonzeroDiagonal(PetscPC,PetscReal) - PetscErrorCode PCFactorSetMatOrderingType(PetscPC,PetscMatOrderingType) - PetscErrorCode PCFactorSetReuseOrdering(PetscPC,PetscBool ) - PetscErrorCode PCFactorSetReuseFill(PetscPC,PetscBool ) + PetscErrorCode PCFactorSetFill(PetscPC, PetscReal) + PetscErrorCode PCFactorSetColumnPivot(PetscPC, PetscReal) + PetscErrorCode PCFactorReorderForNonzeroDiagonal(PetscPC, PetscReal) + PetscErrorCode PCFactorSetMatOrderingType(PetscPC, PetscMatOrderingType) + PetscErrorCode PCFactorSetReuseOrdering(PetscPC, PetscBool) + PetscErrorCode PCFactorSetReuseFill(PetscPC, PetscBool) PetscErrorCode PCFactorSetUseInPlace(PetscPC) PetscErrorCode PCFactorSetAllowDiagonalFill(PetscPC) - PetscErrorCode PCFactorSetPivotInBlocks(PetscPC,PetscBool ) - PetscErrorCode PCFactorSetLevels(PetscPC,PetscInt) - PetscErrorCode PCFactorSetDropTolerance(PetscPC,PetscReal,PetscReal,PetscInt) + PetscErrorCode PCFactorSetPivotInBlocks(PetscPC, PetscBool) + PetscErrorCode PCFactorSetLevels(PetscPC, PetscInt) + PetscErrorCode PCFactorSetDropTolerance(PetscPC, PetscReal, PetscReal, PetscInt) - PetscErrorCode PCFieldSplitSetType(PetscPC,PetscPCCompositeType) - PetscErrorCode PCFieldSplitSetBlockSize(PetscPC,PetscInt) - PetscErrorCode PCFieldSplitSetFields(PetscPC,char[],PetscInt,PetscInt*,PetscInt*) - PetscErrorCode PCFieldSplitSetIS(PetscPC,char[],PetscIS) - PetscErrorCode PCFieldSplitGetSubKSP(PetscPC,PetscInt*,PetscKSP*[]) - PetscErrorCode PCFieldSplitSchurGetSubKSP(PetscPC,PetscInt*,PetscKSP*[]) - PetscErrorCode PCFieldSplitSetSchurPre(PetscPC,PetscPCFieldSplitSchurPreType,PetscMat) - PetscErrorCode PCFieldSplitSetSchurFactType(PetscPC,PetscPCFieldSplitSchurFactType) - #int PCFieldSplitGetSchurBlocks(PetscPC,PetscMat*,PetscMat*,PetscMat*,PetscMat*) + PetscErrorCode PCFieldSplitSetType(PetscPC, PetscPCCompositeType) + PetscErrorCode PCFieldSplitSetBlockSize(PetscPC, PetscInt) + PetscErrorCode PCFieldSplitSetFields(PetscPC, char[], PetscInt, PetscInt*, PetscInt*) + PetscErrorCode PCFieldSplitSetIS(PetscPC, char[], PetscIS) + PetscErrorCode PCFieldSplitGetSubKSP(PetscPC, PetscInt*, PetscKSP*[]) + PetscErrorCode PCFieldSplitSchurGetSubKSP(PetscPC, PetscInt*, PetscKSP*[]) + PetscErrorCode PCFieldSplitSetSchurPre(PetscPC, PetscPCFieldSplitSchurPreType, PetscMat) + PetscErrorCode PCFieldSplitSetSchurFactType(PetscPC, PetscPCFieldSplitSchurFactType) - PetscErrorCode PCCompositeSetType(PetscPC,PetscPCCompositeType) - PetscErrorCode PCCompositeGetPC(PetscPC,PetscInt,PetscPC*) - PetscErrorCode PCCompositeAddPCType(PetscPC,PetscPCType) - PetscErrorCode PCCompositeAddPC(PetscPC,PetscPC) + PetscErrorCode PCCompositeSetType(PetscPC, PetscPCCompositeType) + PetscErrorCode PCCompositeGetPC(PetscPC, PetscInt, PetscPC*) + PetscErrorCode PCCompositeAddPCType(PetscPC, PetscPCType) + PetscErrorCode PCCompositeAddPC(PetscPC, PetscPC) - PetscErrorCode PCKSPGetKSP(PetscPC,PetscKSP*) + PetscErrorCode PCKSPGetKSP(PetscPC, PetscKSP*) - PetscErrorCode PCSetReusePreconditioner(PetscPC,PetscBool) + PetscErrorCode PCSetReusePreconditioner(PetscPC, PetscBool) # --- MG --- - PetscErrorCode PCMGSetType(PetscPC,PetscPCMGType) - PetscErrorCode PCMGGetType(PetscPC,PetscPCMGType*) - PetscErrorCode PCMGSetInterpolation(PetscPC,PetscInt,PetscMat) - PetscErrorCode PCMGGetInterpolation(PetscPC,PetscInt,PetscMat*) - PetscErrorCode PCMGSetRestriction(PetscPC,PetscInt,PetscMat) - PetscErrorCode PCMGGetRestriction(PetscPC,PetscInt,PetscMat*) - PetscErrorCode PCMGSetRScale(PetscPC,PetscInt,PetscVec) - PetscErrorCode PCMGGetRScale(PetscPC,PetscInt,PetscVec*) - PetscErrorCode PCMGGetSmoother(PetscPC,PetscInt,PetscKSP*) - PetscErrorCode PCMGGetSmootherUp(PetscPC,PetscInt,PetscKSP*) - PetscErrorCode PCMGGetSmootherDown(PetscPC,PetscInt,PetscKSP*) - PetscErrorCode PCMGGetCoarseSolve(PetscPC,PetscKSP*) - PetscErrorCode PCMGSetRhs(PetscPC,PetscInt,PetscVec) - PetscErrorCode PCMGSetX(PetscPC,PetscInt,PetscVec) - PetscErrorCode PCMGSetR(PetscPC,PetscInt,PetscVec) - PetscErrorCode PCMGSetLevels(PetscPC,PetscInt,MPI_Comm*) - PetscErrorCode PCMGGetLevels(PetscPC,PetscInt*) - PetscErrorCode PCMGSetCycleType(PetscPC,PetscPCMGCycleType) - PetscErrorCode PCMGSetCycleTypeOnLevel(PetscPC,PetscInt,PetscPCMGCycleType) + PetscErrorCode PCMGSetType(PetscPC, PetscPCMGType) + PetscErrorCode PCMGGetType(PetscPC, PetscPCMGType*) + PetscErrorCode PCMGSetInterpolation(PetscPC, PetscInt, PetscMat) + PetscErrorCode PCMGGetInterpolation(PetscPC, PetscInt, PetscMat*) + PetscErrorCode PCMGSetRestriction(PetscPC, PetscInt, PetscMat) + PetscErrorCode PCMGGetRestriction(PetscPC, PetscInt, PetscMat*) + PetscErrorCode PCMGSetRScale(PetscPC, PetscInt, PetscVec) + PetscErrorCode PCMGGetRScale(PetscPC, PetscInt, PetscVec*) + PetscErrorCode PCMGGetSmoother(PetscPC, PetscInt, PetscKSP*) + PetscErrorCode PCMGGetSmootherUp(PetscPC, PetscInt, PetscKSP*) + PetscErrorCode PCMGGetSmootherDown(PetscPC, PetscInt, PetscKSP*) + PetscErrorCode PCMGGetCoarseSolve(PetscPC, PetscKSP*) + PetscErrorCode PCMGSetRhs(PetscPC, PetscInt, PetscVec) + PetscErrorCode PCMGSetX(PetscPC, PetscInt, PetscVec) + PetscErrorCode PCMGSetR(PetscPC, PetscInt, PetscVec) + PetscErrorCode PCMGSetLevels(PetscPC, PetscInt, MPI_Comm*) + PetscErrorCode PCMGGetLevels(PetscPC, PetscInt*) + PetscErrorCode PCMGSetCycleType(PetscPC, PetscPCMGCycleType) + PetscErrorCode PCMGSetCycleTypeOnLevel(PetscPC, PetscInt, PetscPCMGCycleType) # --- BDDC --- - PetscErrorCode PCBDDCSetDiscreteGradient(PetscPC,PetscMat,PetscInt,PetscInt,PetscBool,PetscBool) - PetscErrorCode PCBDDCSetDivergenceMat(PetscPC,PetscMat,PetscBool,PetscIS) - PetscErrorCode PCBDDCSetChangeOfBasisMat(PetscPC,PetscMat,PetscBool) - PetscErrorCode PCBDDCSetPrimalVerticesIS(PetscPC,PetscIS) - PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PetscPC,PetscIS) - PetscErrorCode PCBDDCSetCoarseningRatio(PetscPC,PetscInt) - PetscErrorCode PCBDDCSetLevels(PetscPC,PetscInt) - PetscErrorCode PCBDDCSetDirichletBoundaries(PetscPC,PetscIS) - PetscErrorCode PCBDDCSetDirichletBoundariesLocal(PetscPC,PetscIS) - PetscErrorCode PCBDDCSetNeumannBoundaries(PetscPC,PetscIS) - PetscErrorCode PCBDDCSetNeumannBoundariesLocal(PetscPC,PetscIS) - PetscErrorCode PCBDDCSetDofsSplitting(PetscPC,PetscInt,PetscIS[]) - PetscErrorCode PCBDDCSetDofsSplittingLocal(PetscPC,PetscInt,PetscIS[]) - PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PetscPC,PetscInt,const PetscInt*,const PetscInt*,PetscCopyMode) + PetscErrorCode PCBDDCSetDiscreteGradient(PetscPC, PetscMat, PetscInt, PetscInt, PetscBool, PetscBool) + PetscErrorCode PCBDDCSetDivergenceMat(PetscPC, PetscMat, PetscBool, PetscIS) + PetscErrorCode PCBDDCSetChangeOfBasisMat(PetscPC, PetscMat, PetscBool) + PetscErrorCode PCBDDCSetPrimalVerticesIS(PetscPC, PetscIS) + PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PetscPC, PetscIS) + PetscErrorCode PCBDDCSetCoarseningRatio(PetscPC, PetscInt) + PetscErrorCode PCBDDCSetLevels(PetscPC, PetscInt) + PetscErrorCode PCBDDCSetDirichletBoundaries(PetscPC, PetscIS) + PetscErrorCode PCBDDCSetDirichletBoundariesLocal(PetscPC, PetscIS) + PetscErrorCode PCBDDCSetNeumannBoundaries(PetscPC, PetscIS) + PetscErrorCode PCBDDCSetNeumannBoundariesLocal(PetscPC, PetscIS) + PetscErrorCode PCBDDCSetDofsSplitting(PetscPC, PetscInt, PetscIS[]) + PetscErrorCode PCBDDCSetDofsSplittingLocal(PetscPC, PetscInt, PetscIS[]) + PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PetscPC, PetscInt, const PetscInt*, const PetscInt*, PetscCopyMode) # --- Patch --- ctypedef PetscErrorCode (*PetscPCPatchComputeOperator)(PetscPC, - PetscInt, - PetscVec, - PetscMat, - PetscIS, - PetscInt, - const PetscInt*, - const PetscInt*, - void*) except PETSC_ERR_PYTHON + PetscInt, + PetscVec, + PetscMat, + PetscIS, + PetscInt, + const PetscInt*, + const PetscInt*, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscPCPatchComputeFunction)(PetscPC, - PetscInt, - PetscVec, - PetscVec, - PetscIS, - PetscInt, - const PetscInt*, - const PetscInt*, - void*) except PETSC_ERR_PYTHON + PetscInt, + PetscVec, + PetscVec, + PetscIS, + PetscInt, + const PetscInt*, + const PetscInt*, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscPCPatchConstructOperator)(PetscPC, - PetscInt*, - PetscIS**, - PetscIS*, - void*) except PETSC_ERR_PYTHON + PetscInt*, + PetscIS**, + PetscIS*, + void*) except PETSC_ERR_PYTHON PetscErrorCode PCPatchSetCellNumbering(PetscPC, PetscSection) PetscErrorCode PCPatchSetDiscretisationInfo(PetscPC, PetscInt, PetscDM*, PetscInt*, PetscInt*, const PetscInt**, const PetscInt*, PetscInt, const PetscInt*, PetscInt, const PetscInt*) PetscErrorCode PCPatchSetComputeOperator(PetscPC, PetscPCPatchComputeOperator, void*) @@ -322,52 +318,52 @@ cdef extern from * nogil: # --- HPDDM --- ctypedef PetscErrorCode (*PetscPCHPDDMAuxiliaryMat)(PetscMat, - PetscReal, - PetscVec, - PetscVec, - PetscReal, - PetscIS, - void*) except PETSC_ERR_PYTHON - PetscErrorCode PCHPDDMSetAuxiliaryMat(PetscPC,PetscIS,PetscMat,PetscPCHPDDMAuxiliaryMat,void*) - PetscErrorCode PCHPDDMSetRHSMat(PetscPC,PetscMat) - PetscErrorCode PCHPDDMHasNeumannMat(PetscPC,PetscBool) - PetscErrorCode PCHPDDMSetCoarseCorrectionType(PetscPC,PetscPCHPDDMCoarseCorrectionType) - PetscErrorCode PCHPDDMGetCoarseCorrectionType(PetscPC,PetscPCHPDDMCoarseCorrectionType*) - PetscErrorCode PCHPDDMGetSTShareSubKSP(PetscPC,PetscBool*) - PetscErrorCode PCHPDDMSetDeflationMat(PetscPC,PetscIS,PetscMat) + PetscReal, + PetscVec, + PetscVec, + PetscReal, + PetscIS, + void*) except PETSC_ERR_PYTHON + PetscErrorCode PCHPDDMSetAuxiliaryMat(PetscPC, PetscIS, PetscMat, PetscPCHPDDMAuxiliaryMat, void*) + PetscErrorCode PCHPDDMSetRHSMat(PetscPC, PetscMat) + PetscErrorCode PCHPDDMHasNeumannMat(PetscPC, PetscBool) + PetscErrorCode PCHPDDMSetCoarseCorrectionType(PetscPC, PetscPCHPDDMCoarseCorrectionType) + PetscErrorCode PCHPDDMGetCoarseCorrectionType(PetscPC, PetscPCHPDDMCoarseCorrectionType*) + PetscErrorCode PCHPDDMGetSTShareSubKSP(PetscPC, PetscBool*) + PetscErrorCode PCHPDDMSetDeflationMat(PetscPC, PetscIS, PetscMat) # --- SPAI --- - PetscErrorCode PCSPAISetEpsilon(PetscPC,PetscReal) - PetscErrorCode PCSPAISetNBSteps(PetscPC,PetscInt) - PetscErrorCode PCSPAISetMax(PetscPC,PetscInt) - PetscErrorCode PCSPAISetMaxNew(PetscPC,PetscInt) - PetscErrorCode PCSPAISetBlockSize(PetscPC,PetscInt) - PetscErrorCode PCSPAISetCacheSize(PetscPC,PetscInt) - PetscErrorCode PCSPAISetVerbose(PetscPC,PetscInt) - PetscErrorCode PCSPAISetSp(PetscPC,PetscInt) + PetscErrorCode PCSPAISetEpsilon(PetscPC, PetscReal) + PetscErrorCode PCSPAISetNBSteps(PetscPC, PetscInt) + PetscErrorCode PCSPAISetMax(PetscPC, PetscInt) + PetscErrorCode PCSPAISetMaxNew(PetscPC, PetscInt) + PetscErrorCode PCSPAISetBlockSize(PetscPC, PetscInt) + PetscErrorCode PCSPAISetCacheSize(PetscPC, PetscInt) + PetscErrorCode PCSPAISetVerbose(PetscPC, PetscInt) + PetscErrorCode PCSPAISetSp(PetscPC, PetscInt) # --- DEFLATION --- - PetscErrorCode PCDeflationSetInitOnly(PetscPC,PetscBool) - PetscErrorCode PCDeflationSetLevels(PetscPC,PetscInt) - PetscErrorCode PCDeflationSetReductionFactor(PetscPC,PetscInt) - PetscErrorCode PCDeflationSetCorrectionFactor(PetscPC,PetscScalar) - PetscErrorCode PCDeflationSetSpaceToCompute(PetscPC,PetscPCDeflationSpaceType,PetscInt) - PetscErrorCode PCDeflationSetSpace(PetscPC,PetscMat,PetscBool) - PetscErrorCode PCDeflationSetProjectionNullSpaceMat(PetscPC,PetscMat) - PetscErrorCode PCDeflationSetCoarseMat(PetscPC,PetscMat) - PetscErrorCode PCDeflationGetCoarseKSP(PetscPC,PetscKSP*) - PetscErrorCode PCDeflationGetPC(PetscPC,PetscPC*) + PetscErrorCode PCDeflationSetInitOnly(PetscPC, PetscBool) + PetscErrorCode PCDeflationSetLevels(PetscPC, PetscInt) + PetscErrorCode PCDeflationSetReductionFactor(PetscPC, PetscInt) + PetscErrorCode PCDeflationSetCorrectionFactor(PetscPC, PetscScalar) + PetscErrorCode PCDeflationSetSpaceToCompute(PetscPC, PetscPCDeflationSpaceType, PetscInt) + PetscErrorCode PCDeflationSetSpace(PetscPC, PetscMat, PetscBool) + PetscErrorCode PCDeflationSetProjectionNullSpaceMat(PetscPC, PetscMat) + PetscErrorCode PCDeflationSetCoarseMat(PetscPC, PetscMat) + PetscErrorCode PCDeflationGetCoarseKSP(PetscPC, PetscKSP*) + PetscErrorCode PCDeflationGetPC(PetscPC, PetscPC*) # --- PYTHON --- - PetscErrorCode PCPythonSetType(PetscPC,char[]) - PetscErrorCode PCPythonGetType(PetscPC,char*[]) + PetscErrorCode PCPythonSetType(PetscPC, char[]) + PetscErrorCode PCPythonGetType(PetscPC, char*[]) # -------------------------------------------------------------------- cdef inline PC ref_PC(PetscPC pc): cdef PC ob = PC() ob.pc = pc - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob cdef PetscErrorCode PCPatch_ComputeOperator( @@ -489,7 +485,7 @@ cdef PetscErrorCode PCPatch_UserConstructOperator( CHKERR(PetscMalloc(n[0]*sizeof(PetscIS), userIS)) for i in range(n[0]): userIS[0][i] = (patches[i]).iset - CHKERR( PetscINCREF(&(userIS[0][i])) ) + CHKERR(PetscINCREF(&(userIS[0][i]))) userIterationSet[0] = (iterationSet).iset - CHKERR( PetscINCREF(&(userIterationSet[0])) ) + CHKERR(PetscINCREF(&(userIterationSet[0]))) return PETSC_SUCCESS diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscrand.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscrand.pxi index 34e5f5f7a9e..6500197ae89 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscrand.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscrand.pxi @@ -7,19 +7,19 @@ cdef extern from * nogil: PetscRandomType PETSCRANDER48 PetscRandomType PETSCRANDOM123 - PetscErrorCode PetscRandomCreate(MPI_Comm,PetscRandom*) + PetscErrorCode PetscRandomCreate(MPI_Comm, PetscRandom*) PetscErrorCode PetscRandomDestroy(PetscRandom*) - PetscErrorCode PetscRandomView(PetscRandom,PetscViewer) + PetscErrorCode PetscRandomView(PetscRandom, PetscViewer) - PetscErrorCode PetscRandomSetType(PetscRandom,PetscRandomType) - PetscErrorCode PetscRandomGetType(PetscRandom,PetscRandomType*) + PetscErrorCode PetscRandomSetType(PetscRandom, PetscRandomType) + PetscErrorCode PetscRandomGetType(PetscRandom, PetscRandomType*) PetscErrorCode PetscRandomSetFromOptions(PetscRandom) - PetscErrorCode PetscRandomGetValue(PetscRandom,PetscScalar*) - PetscErrorCode PetscRandomGetValueReal(PetscRandom,PetscReal*) - PetscErrorCode PetscRandomGetValueImaginary(PetscRandom,PetscScalar*) - PetscErrorCode PetscRandomGetInterval(PetscRandom,PetscScalar*,PetscScalar*) - PetscErrorCode PetscRandomSetInterval(PetscRandom,PetscScalar,PetscScalar) - PetscErrorCode PetscRandomSetSeed(PetscRandom,unsigned long) - PetscErrorCode PetscRandomGetSeed(PetscRandom,unsigned long*) + PetscErrorCode PetscRandomGetValue(PetscRandom, PetscScalar*) + PetscErrorCode PetscRandomGetValueReal(PetscRandom, PetscReal*) + PetscErrorCode PetscRandomGetValueImaginary(PetscRandom, PetscScalar*) + PetscErrorCode PetscRandomGetInterval(PetscRandom, PetscScalar*, PetscScalar*) + PetscErrorCode PetscRandomSetInterval(PetscRandom, PetscScalar, PetscScalar) + PetscErrorCode PetscRandomSetSeed(PetscRandom, unsigned long) + PetscErrorCode PetscRandomGetSeed(PetscRandom, unsigned long*) PetscErrorCode PetscRandomSeed(PetscRandom) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscsct.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscsct.pxi index a61ee0d3c86..e8e0e22ac92 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscsct.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscsct.pxi @@ -4,17 +4,17 @@ cdef extern from * nogil: ctypedef PetscSFType PetscScatterType "VecScatterType" - PetscErrorCode VecScatterView(PetscScatter,PetscViewer) + PetscErrorCode VecScatterView(PetscScatter, PetscViewer) PetscErrorCode VecScatterDestroy(PetscScatter*) PetscErrorCode VecScatterSetUp(PetscScatter) - PetscErrorCode VecScatterCreate(PetscVec,PetscIS,PetscVec,PetscIS,PetscScatter*) + PetscErrorCode VecScatterCreate(PetscVec, PetscIS, PetscVec, PetscIS, PetscScatter*) PetscErrorCode VecScatterSetFromOptions(PetscScatter) - PetscErrorCode VecScatterSetType(PetscScatter,PetscScatterType) - PetscErrorCode VecScatterGetType(PetscScatter,PetscScatterType*) + PetscErrorCode VecScatterSetType(PetscScatter, PetscScatterType) + PetscErrorCode VecScatterGetType(PetscScatter, PetscScatterType*) PetscErrorCode VecScatterCopy(PetscScatter, PetscScatter*) - PetscErrorCode VecScatterCreateToAll(PetscVec,PetscScatter*,PetscVec*) - PetscErrorCode VecScatterCreateToZero(PetscVec,PetscScatter*,PetscVec*) - PetscErrorCode VecScatterBegin(PetscScatter,PetscVec,PetscVec,PetscInsertMode,PetscScatterMode) - PetscErrorCode VecScatterEnd(PetscScatter,PetscVec,PetscVec,PetscInsertMode,PetscScatterMode) + PetscErrorCode VecScatterCreateToAll(PetscVec, PetscScatter*, PetscVec*) + PetscErrorCode VecScatterCreateToZero(PetscVec, PetscScatter*, PetscVec*) + PetscErrorCode VecScatterBegin(PetscScatter, PetscVec, PetscVec, PetscInsertMode, PetscScatterMode) + PetscErrorCode VecScatterEnd(PetscScatter, PetscVec, PetscVec, PetscInsertMode, PetscScatterMode) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscsec.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscsec.pxi index bf325541163..eae61cad49d 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscsec.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscsec.pxi @@ -2,51 +2,48 @@ cdef extern from * nogil: - PetscErrorCode PetscSectionCreate(MPI_Comm,PetscSection*) - PetscErrorCode PetscSectionClone(PetscSection,PetscSection*) + PetscErrorCode PetscSectionCreate(MPI_Comm, PetscSection*) + PetscErrorCode PetscSectionClone(PetscSection, PetscSection*) PetscErrorCode PetscSectionSetUp(PetscSection) PetscErrorCode PetscSectionSetUpBC(PetscSection) - PetscErrorCode PetscSectionView(PetscSection,PetscViewer) + PetscErrorCode PetscSectionView(PetscSection, PetscViewer) PetscErrorCode PetscSectionReset(PetscSection) PetscErrorCode PetscSectionDestroy(PetscSection*) - PetscErrorCode PetscSectionGetNumFields(PetscSection,PetscInt*) - PetscErrorCode PetscSectionSetNumFields(PetscSection,PetscInt) - PetscErrorCode PetscSectionGetFieldName(PetscSection,PetscInt,const char*[]) - PetscErrorCode PetscSectionSetFieldName(PetscSection,PetscInt,const char[]) - PetscErrorCode PetscSectionGetFieldComponents(PetscSection,PetscInt,PetscInt*) - PetscErrorCode PetscSectionSetFieldComponents(PetscSection,PetscInt,PetscInt) - PetscErrorCode PetscSectionGetChart(PetscSection,PetscInt*,PetscInt*) - PetscErrorCode PetscSectionSetChart(PetscSection,PetscInt,PetscInt) - PetscErrorCode PetscSectionGetPermutation(PetscSection,PetscIS*) - PetscErrorCode PetscSectionSetPermutation(PetscSection,PetscIS) - PetscErrorCode PetscSectionGetDof(PetscSection,PetscInt,PetscInt*) - PetscErrorCode PetscSectionSetDof(PetscSection,PetscInt,PetscInt) - PetscErrorCode PetscSectionAddDof(PetscSection,PetscInt,PetscInt) - PetscErrorCode PetscSectionGetFieldDof(PetscSection,PetscInt,PetscInt,PetscInt*) - PetscErrorCode PetscSectionSetFieldDof(PetscSection,PetscInt,PetscInt,PetscInt) - PetscErrorCode PetscSectionAddFieldDof(PetscSection,PetscInt,PetscInt,PetscInt) - PetscErrorCode PetscSectionGetConstraintDof(PetscSection,PetscInt,PetscInt*) - PetscErrorCode PetscSectionSetConstraintDof(PetscSection,PetscInt,PetscInt) - PetscErrorCode PetscSectionAddConstraintDof(PetscSection,PetscInt,PetscInt) - PetscErrorCode PetscSectionGetFieldConstraintDof(PetscSection,PetscInt,PetscInt,PetscInt*) - PetscErrorCode PetscSectionSetFieldConstraintDof(PetscSection,PetscInt,PetscInt,PetscInt) - PetscErrorCode PetscSectionAddFieldConstraintDof(PetscSection,PetscInt,PetscInt,PetscInt) - PetscErrorCode PetscSectionGetConstraintIndices(PetscSection,PetscInt,const PetscInt**) - PetscErrorCode PetscSectionSetConstraintIndices(PetscSection,PetscInt,const PetscInt*) - PetscErrorCode PetscSectionGetFieldConstraintIndices(PetscSection,PetscInt,PetscInt,const PetscInt**) - PetscErrorCode PetscSectionSetFieldConstraintIndices(PetscSection,PetscInt,PetscInt,const PetscInt*) - PetscErrorCode PetscSectionGetMaxDof(PetscSection,PetscInt*) - PetscErrorCode PetscSectionGetStorageSize(PetscSection,PetscInt*) - PetscErrorCode PetscSectionGetConstrainedStorageSize(PetscSection,PetscInt*) - PetscErrorCode PetscSectionGetOffset(PetscSection,PetscInt,PetscInt*) - PetscErrorCode PetscSectionSetOffset(PetscSection,PetscInt,PetscInt) - PetscErrorCode PetscSectionGetFieldOffset(PetscSection,PetscInt,PetscInt,PetscInt*) - PetscErrorCode PetscSectionSetFieldOffset(PetscSection,PetscInt,PetscInt,PetscInt) - PetscErrorCode PetscSectionGetOffsetRange(PetscSection,PetscInt*,PetscInt*) - PetscErrorCode PetscSectionCreateGlobalSection(PetscSection,PetscSF,PetscBool,PetscBool,PetscBool,PetscSection*) - #int PetscSectionCreateGlobalSectionCensored(PetscSection,PetscSF,PetscBool,PetscInt,const PetscInt[],PetscSection*) - PetscErrorCode PetscSectionCreateSubsection(PetscSection,PetscInt,PetscInt[],PetscSection*) - PetscErrorCode PetscSectionCreateSubmeshSection(PetscSection,IS,PetscSection*) - #int PetscSectionGetPointLayout(MPI_Comm,PetscSection,PetscLayout*) - #int PetscSectionGetValueLayout(MPI_Comm,PetscSection,PetscLayout*) + PetscErrorCode PetscSectionGetNumFields(PetscSection, PetscInt*) + PetscErrorCode PetscSectionSetNumFields(PetscSection, PetscInt) + PetscErrorCode PetscSectionGetFieldName(PetscSection, PetscInt, const char*[]) + PetscErrorCode PetscSectionSetFieldName(PetscSection, PetscInt, const char[]) + PetscErrorCode PetscSectionGetFieldComponents(PetscSection, PetscInt, PetscInt*) + PetscErrorCode PetscSectionSetFieldComponents(PetscSection, PetscInt, PetscInt) + PetscErrorCode PetscSectionGetChart(PetscSection, PetscInt*, PetscInt*) + PetscErrorCode PetscSectionSetChart(PetscSection, PetscInt, PetscInt) + PetscErrorCode PetscSectionGetPermutation(PetscSection, PetscIS*) + PetscErrorCode PetscSectionSetPermutation(PetscSection, PetscIS) + PetscErrorCode PetscSectionGetDof(PetscSection, PetscInt, PetscInt*) + PetscErrorCode PetscSectionSetDof(PetscSection, PetscInt, PetscInt) + PetscErrorCode PetscSectionAddDof(PetscSection, PetscInt, PetscInt) + PetscErrorCode PetscSectionGetFieldDof(PetscSection, PetscInt, PetscInt, PetscInt*) + PetscErrorCode PetscSectionSetFieldDof(PetscSection, PetscInt, PetscInt, PetscInt) + PetscErrorCode PetscSectionAddFieldDof(PetscSection, PetscInt, PetscInt, PetscInt) + PetscErrorCode PetscSectionGetConstraintDof(PetscSection, PetscInt, PetscInt*) + PetscErrorCode PetscSectionSetConstraintDof(PetscSection, PetscInt, PetscInt) + PetscErrorCode PetscSectionAddConstraintDof(PetscSection, PetscInt, PetscInt) + PetscErrorCode PetscSectionGetFieldConstraintDof(PetscSection, PetscInt, PetscInt, PetscInt*) + PetscErrorCode PetscSectionSetFieldConstraintDof(PetscSection, PetscInt, PetscInt, PetscInt) + PetscErrorCode PetscSectionAddFieldConstraintDof(PetscSection, PetscInt, PetscInt, PetscInt) + PetscErrorCode PetscSectionGetConstraintIndices(PetscSection, PetscInt, const PetscInt**) + PetscErrorCode PetscSectionSetConstraintIndices(PetscSection, PetscInt, const PetscInt*) + PetscErrorCode PetscSectionGetFieldConstraintIndices(PetscSection, PetscInt, PetscInt, const PetscInt**) + PetscErrorCode PetscSectionSetFieldConstraintIndices(PetscSection, PetscInt, PetscInt, const PetscInt*) + PetscErrorCode PetscSectionGetMaxDof(PetscSection, PetscInt*) + PetscErrorCode PetscSectionGetStorageSize(PetscSection, PetscInt*) + PetscErrorCode PetscSectionGetConstrainedStorageSize(PetscSection, PetscInt*) + PetscErrorCode PetscSectionGetOffset(PetscSection, PetscInt, PetscInt*) + PetscErrorCode PetscSectionSetOffset(PetscSection, PetscInt, PetscInt) + PetscErrorCode PetscSectionGetFieldOffset(PetscSection, PetscInt, PetscInt, PetscInt*) + PetscErrorCode PetscSectionSetFieldOffset(PetscSection, PetscInt, PetscInt, PetscInt) + PetscErrorCode PetscSectionGetOffsetRange(PetscSection, PetscInt*, PetscInt*) + PetscErrorCode PetscSectionCreateGlobalSection(PetscSection, PetscSF, PetscBool, PetscBool, PetscBool, PetscSection*) + PetscErrorCode PetscSectionCreateSubsection(PetscSection, PetscInt, PetscInt[], PetscSection*) + PetscErrorCode PetscSectionCreateSubmeshSection(PetscSection, IS, PetscSection*) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscsf.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscsf.pxi index 83c95d7d13d..c0cf04da2e9 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscsf.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscsf.pxi @@ -12,42 +12,42 @@ cdef extern from * nogil: PetscSFType PETSCSFALLTOALL PetscSFType PETSCSFWINDOW - PetscErrorCode PetscSFCreate(MPI_Comm,PetscSF*) - PetscErrorCode PetscSFSetType(PetscSF,PetscSFType) - PetscErrorCode PetscSFGetType(PetscSF,PetscSFType*) + PetscErrorCode PetscSFCreate(MPI_Comm, PetscSF*) + PetscErrorCode PetscSFSetType(PetscSF, PetscSFType) + PetscErrorCode PetscSFGetType(PetscSF, PetscSFType*) PetscErrorCode PetscSFSetFromOptions(PetscSF) PetscErrorCode PetscSFSetUp(PetscSF) - PetscErrorCode PetscSFView(PetscSF,PetscViewer) + PetscErrorCode PetscSFView(PetscSF, PetscViewer) PetscErrorCode PetscSFReset(PetscSF) PetscErrorCode PetscSFDestroy(PetscSF*) ctypedef struct PetscSFNode: PetscInt rank PetscInt index - PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt*,PetscInt*,const PetscInt**,const PetscSFNode**) - PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,PetscSFNode*,PetscCopyMode) - PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool) - - PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt**) - PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt**) - PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*) - PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*) - - PetscErrorCode PetscSFCreateEmbeddedRootSF(PetscSF,PetscInt,const PetscInt*,PetscSF*) - PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF,PetscInt,const PetscInt*,PetscSF*) - - PetscErrorCode PetscSFDistributeSection(PetscSF,PetscSection,PetscInt**,PetscSection) - PetscErrorCode PetscSFCreateSectionSF(PetscSF,PetscSection,PetscInt*,PetscSection, PetscSF*) - - PetscErrorCode PetscSFCompose(PetscSF,PetscSF,PetscSF*) - - PetscErrorCode PetscSFBcastBegin(PetscSF,MPI_Datatype,const void*,void*,MPI_Op) - PetscErrorCode PetscSFBcastEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op) - PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void*,void*,MPI_Op) - PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op) - PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void*,void*) - PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void*,void*) - PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void*,void*) - PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void*,void*) - PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op) - PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op) + PetscErrorCode PetscSFGetGraph(PetscSF, PetscInt*, PetscInt*, const PetscInt**, const PetscSFNode**) + PetscErrorCode PetscSFSetGraph(PetscSF, PetscInt, PetscInt, const PetscInt*, PetscCopyMode, PetscSFNode*, PetscCopyMode) + PetscErrorCode PetscSFSetRankOrder(PetscSF, PetscBool) + + PetscErrorCode PetscSFComputeDegreeBegin(PetscSF, const PetscInt**) + PetscErrorCode PetscSFComputeDegreeEnd(PetscSF, const PetscInt**) + PetscErrorCode PetscSFGetMultiSF(PetscSF, PetscSF*) + PetscErrorCode PetscSFCreateInverseSF(PetscSF, PetscSF*) + + PetscErrorCode PetscSFCreateEmbeddedRootSF(PetscSF, PetscInt, const PetscInt*, PetscSF*) + PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF, PetscInt, const PetscInt*, PetscSF*) + + PetscErrorCode PetscSFDistributeSection(PetscSF, PetscSection, PetscInt**, PetscSection) + PetscErrorCode PetscSFCreateSectionSF(PetscSF, PetscSection, PetscInt*, PetscSection, PetscSF*) + + PetscErrorCode PetscSFCompose(PetscSF, PetscSF, PetscSF*) + + PetscErrorCode PetscSFBcastBegin(PetscSF, MPI_Datatype, const void*, void*, MPI_Op) + PetscErrorCode PetscSFBcastEnd(PetscSF, MPI_Datatype, const void*, void*, MPI_Op) + PetscErrorCode PetscSFReduceBegin(PetscSF, MPI_Datatype, const void*, void*, MPI_Op) + PetscErrorCode PetscSFReduceEnd(PetscSF, MPI_Datatype, const void*, void*, MPI_Op) + PetscErrorCode PetscSFScatterBegin(PetscSF, MPI_Datatype, const void*, void*) + PetscErrorCode PetscSFScatterEnd(PetscSF, MPI_Datatype, const void*, void*) + PetscErrorCode PetscSFGatherBegin(PetscSF, MPI_Datatype, const void*, void*) + PetscErrorCode PetscSFGatherEnd(PetscSF, MPI_Datatype, const void*, void*) + PetscErrorCode PetscSFFetchAndOpBegin(PetscSF, MPI_Datatype, void*, const void*, void*, MPI_Op) + PetscErrorCode PetscSFFetchAndOpEnd(PetscSF, MPI_Datatype, void*, const void*, void*, MPI_Op) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscsnes.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscsnes.pxi index 897592a6d70..5fddd794f71 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscsnes.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscsnes.pxi @@ -23,198 +23,196 @@ cdef extern from * nogil: PetscSNESType SNESPATCH ctypedef enum PetscSNESNormSchedule "SNESNormSchedule": - SNES_NORM_DEFAULT - SNES_NORM_NONE - SNES_NORM_ALWAYS - SNES_NORM_INITIAL_ONLY - SNES_NORM_FINAL_ONLY - SNES_NORM_INITIAL_FINAL_ONLY + SNES_NORM_DEFAULT + SNES_NORM_NONE + SNES_NORM_ALWAYS + SNES_NORM_INITIAL_ONLY + SNES_NORM_FINAL_ONLY + SNES_NORM_INITIAL_FINAL_ONLY ctypedef enum PetscSNESConvergedReason "SNESConvergedReason": - # iterating - SNES_CONVERGED_ITERATING - # converged - SNES_CONVERGED_FNORM_ABS - SNES_CONVERGED_FNORM_RELATIVE - SNES_CONVERGED_SNORM_RELATIVE - SNES_CONVERGED_ITS - # diverged - SNES_DIVERGED_FUNCTION_DOMAIN - SNES_DIVERGED_FUNCTION_COUNT - SNES_DIVERGED_LINEAR_SOLVE - SNES_DIVERGED_FNORM_NAN - SNES_DIVERGED_MAX_IT - SNES_DIVERGED_LINE_SEARCH - SNES_DIVERGED_INNER - SNES_DIVERGED_LOCAL_MIN - SNES_DIVERGED_DTOL - SNES_DIVERGED_JACOBIAN_DOMAIN - SNES_DIVERGED_TR_DELTA + # iterating + SNES_CONVERGED_ITERATING + # converged + SNES_CONVERGED_FNORM_ABS + SNES_CONVERGED_FNORM_RELATIVE + SNES_CONVERGED_SNORM_RELATIVE + SNES_CONVERGED_ITS + # diverged + SNES_DIVERGED_FUNCTION_DOMAIN + SNES_DIVERGED_FUNCTION_COUNT + SNES_DIVERGED_LINEAR_SOLVE + SNES_DIVERGED_FNORM_NAN + SNES_DIVERGED_MAX_IT + SNES_DIVERGED_LINE_SEARCH + SNES_DIVERGED_INNER + SNES_DIVERGED_LOCAL_MIN + SNES_DIVERGED_DTOL + SNES_DIVERGED_JACOBIAN_DOMAIN + SNES_DIVERGED_TR_DELTA ctypedef PetscErrorCode (*PetscSNESCtxDel)(void*) ctypedef PetscErrorCode (*PetscSNESInitialGuessFunction)(PetscSNES, - PetscVec, - void*) except PETSC_ERR_PYTHON + PetscVec, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscSNESFunctionFunction)(PetscSNES, - PetscVec, - PetscVec, - void*) except PETSC_ERR_PYTHON + PetscVec, + PetscVec, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscSNESUpdateFunction)(PetscSNES, - PetscInt) except PETSC_ERR_PYTHON + PetscInt) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscSNESJacobianFunction)(PetscSNES, - PetscVec, - PetscMat, - PetscMat, - void*) except PETSC_ERR_PYTHON + PetscVec, + PetscMat, + PetscMat, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscSNESObjectiveFunction)(PetscSNES, - PetscVec, - PetscReal*, - void*) except PETSC_ERR_PYTHON + PetscVec, + PetscReal*, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscSNESConvergedFunction)(PetscSNES, - PetscInt, - PetscReal, - PetscReal, - PetscReal, - PetscSNESConvergedReason*, - void*) except PETSC_ERR_PYTHON + PetscInt, + PetscReal, + PetscReal, + PetscReal, + PetscSNESConvergedReason*, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscSNESMonitorFunction)(PetscSNES, - PetscInt, - PetscReal, - void*) except PETSC_ERR_PYTHON + PetscInt, + PetscReal, + void*) except PETSC_ERR_PYTHON - PetscErrorCode SNESCreate(MPI_Comm,PetscSNES*) + PetscErrorCode SNESCreate(MPI_Comm, PetscSNES*) PetscErrorCode SNESDestroy(PetscSNES*) - PetscErrorCode SNESView(PetscSNES,PetscViewer) + PetscErrorCode SNESView(PetscSNES, PetscViewer) - PetscErrorCode SNESSetType(PetscSNES,PetscSNESType) - PetscErrorCode SNESGetType(PetscSNES,PetscSNESType*) - PetscErrorCode SNESSetOptionsPrefix(PetscSNES,char[]) - PetscErrorCode SNESAppendOptionsPrefix(PetscSNES,char[]) - PetscErrorCode SNESGetOptionsPrefix(PetscSNES,char*[]) + PetscErrorCode SNESSetType(PetscSNES, PetscSNESType) + PetscErrorCode SNESGetType(PetscSNES, PetscSNESType*) + PetscErrorCode SNESSetOptionsPrefix(PetscSNES, char[]) + PetscErrorCode SNESAppendOptionsPrefix(PetscSNES, char[]) + PetscErrorCode SNESGetOptionsPrefix(PetscSNES, char*[]) PetscErrorCode SNESSetFromOptions(PetscSNES) - PetscErrorCode SNESSetApplicationContext(PetscSNES,void*) - PetscErrorCode SNESGetApplicationContext(PetscSNES,void*) - - PetscErrorCode SNESGetKSP(PetscSNES,PetscKSP*) - PetscErrorCode SNESSetKSP(PetscSNES,PetscKSP) - - PetscErrorCode SNESGetDM(PetscSNES,PetscDM*) - PetscErrorCode SNESSetDM(PetscSNES,PetscDM) - - PetscErrorCode SNESFASSetInterpolation(PetscSNES,PetscInt,PetscMat) - PetscErrorCode SNESFASGetInterpolation(PetscSNES,PetscInt,PetscMat*) - PetscErrorCode SNESFASSetRestriction(PetscSNES,PetscInt,PetscMat) - PetscErrorCode SNESFASGetRestriction(PetscSNES,PetscInt,PetscMat*) - PetscErrorCode SNESFASSetInjection(PetscSNES,PetscInt,PetscMat) - PetscErrorCode SNESFASGetInjection(PetscSNES,PetscInt,PetscMat*) - PetscErrorCode SNESFASSetRScale(PetscSNES,PetscInt,PetscVec) - PetscErrorCode SNESFASSetLevels(PetscSNES,PetscInt,MPI_Comm[]) - PetscErrorCode SNESFASGetLevels(PetscSNES,PetscInt*) - PetscErrorCode SNESFASGetCycleSNES(PetscSNES,PetscInt,PetscSNES*) - PetscErrorCode SNESFASGetCoarseSolve(PetscSNES,PetscSNES*) - PetscErrorCode SNESFASGetSmoother(PetscSNES,PetscInt,PetscSNES*) - PetscErrorCode SNESFASGetSmootherDown(PetscSNES,PetscInt,PetscSNES*) - PetscErrorCode SNESFASGetSmootherUp(PetscSNES,PetscInt,PetscSNES*) - - PetscErrorCode SNESGetNPC(PetscSNES,PetscSNES*) - PetscErrorCode SNESHasNPC(PetscSNES,PetscBool*) - PetscErrorCode SNESSetNPC(PetscSNES,PetscSNES) - PetscErrorCode SNESSetNPCSide(PetscSNES,PetscPCSide) - PetscErrorCode SNESGetNPCSide(PetscSNES,PetscPCSide*) - - PetscErrorCode SNESGetRhs(PetscSNES,PetscVec*) - PetscErrorCode SNESGetSolution(PetscSNES,PetscVec*) - PetscErrorCode SNESSetSolution(PetscSNES,PetscVec) - PetscErrorCode SNESGetSolutionUpdate(PetscSNES,PetscVec*) - - PetscErrorCode SNESSetComputeInitialGuess(PetscSNES,PetscSNESInitialGuessFunction,void*) - PetscErrorCode SNESSetFunction(PetscSNES,PetscVec,PetscSNESFunctionFunction,void*) - PetscErrorCode SNESGetFunction(PetscSNES,PetscVec*,void*,void**) - PetscErrorCode SNESSetUpdate(PetscSNES,PetscSNESUpdateFunction) - PetscErrorCode SNESSetJacobian(PetscSNES,PetscMat,PetscMat,PetscSNESJacobianFunction,void*) - PetscErrorCode SNESGetJacobian(PetscSNES,PetscMat*,PetscMat*,PetscSNESJacobianFunction*,void**) - PetscErrorCode SNESSetObjective(PetscSNES,PetscSNESObjectiveFunction,void*) - PetscErrorCode SNESGetObjective(PetscSNES,PetscSNESObjectiveFunction*,void**) - - PetscErrorCode SNESComputeFunction(PetscSNES,PetscVec,PetscVec) - PetscErrorCode SNESComputeJacobian(PetscSNES,PetscVec,PetscMat,PetscMat) - PetscErrorCode SNESComputeObjective(PetscSNES,PetscVec,PetscReal*) + PetscErrorCode SNESSetApplicationContext(PetscSNES, void*) + PetscErrorCode SNESGetApplicationContext(PetscSNES, void*) + + PetscErrorCode SNESGetKSP(PetscSNES, PetscKSP*) + PetscErrorCode SNESSetKSP(PetscSNES, PetscKSP) + + PetscErrorCode SNESGetDM(PetscSNES, PetscDM*) + PetscErrorCode SNESSetDM(PetscSNES, PetscDM) + + PetscErrorCode SNESFASSetInterpolation(PetscSNES, PetscInt, PetscMat) + PetscErrorCode SNESFASGetInterpolation(PetscSNES, PetscInt, PetscMat*) + PetscErrorCode SNESFASSetRestriction(PetscSNES, PetscInt, PetscMat) + PetscErrorCode SNESFASGetRestriction(PetscSNES, PetscInt, PetscMat*) + PetscErrorCode SNESFASSetInjection(PetscSNES, PetscInt, PetscMat) + PetscErrorCode SNESFASGetInjection(PetscSNES, PetscInt, PetscMat*) + PetscErrorCode SNESFASSetRScale(PetscSNES, PetscInt, PetscVec) + PetscErrorCode SNESFASSetLevels(PetscSNES, PetscInt, MPI_Comm[]) + PetscErrorCode SNESFASGetLevels(PetscSNES, PetscInt*) + PetscErrorCode SNESFASGetCycleSNES(PetscSNES, PetscInt, PetscSNES*) + PetscErrorCode SNESFASGetCoarseSolve(PetscSNES, PetscSNES*) + PetscErrorCode SNESFASGetSmoother(PetscSNES, PetscInt, PetscSNES*) + PetscErrorCode SNESFASGetSmootherDown(PetscSNES, PetscInt, PetscSNES*) + PetscErrorCode SNESFASGetSmootherUp(PetscSNES, PetscInt, PetscSNES*) + + PetscErrorCode SNESGetNPC(PetscSNES, PetscSNES*) + PetscErrorCode SNESHasNPC(PetscSNES, PetscBool*) + PetscErrorCode SNESSetNPC(PetscSNES, PetscSNES) + PetscErrorCode SNESSetNPCSide(PetscSNES, PetscPCSide) + PetscErrorCode SNESGetNPCSide(PetscSNES, PetscPCSide*) + + PetscErrorCode SNESGetRhs(PetscSNES, PetscVec*) + PetscErrorCode SNESGetSolution(PetscSNES, PetscVec*) + PetscErrorCode SNESSetSolution(PetscSNES, PetscVec) + PetscErrorCode SNESGetSolutionUpdate(PetscSNES, PetscVec*) + + PetscErrorCode SNESSetComputeInitialGuess(PetscSNES, PetscSNESInitialGuessFunction, void*) + PetscErrorCode SNESSetFunction(PetscSNES, PetscVec, PetscSNESFunctionFunction, void*) + PetscErrorCode SNESGetFunction(PetscSNES, PetscVec*, void*, void**) + PetscErrorCode SNESSetUpdate(PetscSNES, PetscSNESUpdateFunction) + PetscErrorCode SNESSetJacobian(PetscSNES, PetscMat, PetscMat, PetscSNESJacobianFunction, void*) + PetscErrorCode SNESGetJacobian(PetscSNES, PetscMat*, PetscMat*, PetscSNESJacobianFunction*, void**) + PetscErrorCode SNESSetObjective(PetscSNES, PetscSNESObjectiveFunction, void*) + PetscErrorCode SNESGetObjective(PetscSNES, PetscSNESObjectiveFunction*, void**) + + PetscErrorCode SNESComputeFunction(PetscSNES, PetscVec, PetscVec) + PetscErrorCode SNESComputeJacobian(PetscSNES, PetscVec, PetscMat, PetscMat) + PetscErrorCode SNESComputeObjective(PetscSNES, PetscVec, PetscReal*) ctypedef PetscErrorCode (*PetscSNESNGSFunction)(PetscSNES, - PetscVec, - PetscVec, - void*) except PETSC_ERR_PYTHON - PetscErrorCode SNESSetNGS(PetscSNES,PetscSNESNGSFunction,void*) - PetscErrorCode SNESGetNGS(PetscSNES,PetscSNESNGSFunction*,void**) - PetscErrorCode SNESComputeNGS(PetscSNES,PetscVec,PetscVec) - - PetscErrorCode SNESSetNormSchedule(PetscSNES,PetscSNESNormSchedule) - PetscErrorCode SNESGetNormSchedule(PetscSNES,PetscSNESNormSchedule*) - - PetscErrorCode SNESSetTolerances(PetscSNES,PetscReal,PetscReal,PetscReal,PetscInt,PetscInt) - PetscErrorCode SNESGetTolerances(PetscSNES,PetscReal*,PetscReal*,PetscReal*,PetscInt*,PetscInt*) - - PetscErrorCode SNESConverged(PetscSNES,PetscInt,PetscReal,PetscReal,PetscReal) - PetscErrorCode SNESSetConvergenceTest(PetscSNES,PetscSNESConvergedFunction,void*,PetscSNESCtxDel*) - PetscErrorCode SNESConvergedDefault(PetscSNES,PetscInt,PetscReal,PetscReal,PetscReal, - PetscSNESConvergedReason*,void*) except PETSC_ERR_PYTHON - PetscErrorCode SNESConvergedSkip(PetscSNES,PetscInt,PetscReal,PetscReal,PetscReal, - PetscSNESConvergedReason*,void*) except PETSC_ERR_PYTHON - PetscErrorCode SNESSetConvergenceHistory(PetscSNES,PetscReal[],PetscInt[],PetscInt,PetscBool) - PetscErrorCode SNESGetConvergenceHistory(PetscSNES,PetscReal*[],PetscInt*[],PetscInt*) - PetscErrorCode SNESLogConvergenceHistory(PetscSNES,PetscReal,PetscInt) - - PetscErrorCode SNESMonitorSet(PetscSNES,PetscSNESMonitorFunction,void*,PetscSNESCtxDel) + PetscVec, + PetscVec, + void*) except PETSC_ERR_PYTHON + PetscErrorCode SNESSetNGS(PetscSNES, PetscSNESNGSFunction, void*) + PetscErrorCode SNESGetNGS(PetscSNES, PetscSNESNGSFunction*, void**) + PetscErrorCode SNESComputeNGS(PetscSNES, PetscVec, PetscVec) + + PetscErrorCode SNESSetNormSchedule(PetscSNES, PetscSNESNormSchedule) + PetscErrorCode SNESGetNormSchedule(PetscSNES, PetscSNESNormSchedule*) + + PetscErrorCode SNESSetTolerances(PetscSNES, PetscReal, PetscReal, PetscReal, PetscInt, PetscInt) + PetscErrorCode SNESGetTolerances(PetscSNES, PetscReal*, PetscReal*, PetscReal*, PetscInt*, PetscInt*) + + PetscErrorCode SNESConverged(PetscSNES, PetscInt, PetscReal, PetscReal, PetscReal) + PetscErrorCode SNESSetConvergenceTest(PetscSNES, PetscSNESConvergedFunction, void*, PetscSNESCtxDel*) + PetscErrorCode SNESConvergedDefault(PetscSNES, PetscInt, PetscReal, PetscReal, PetscReal, + PetscSNESConvergedReason*, void*) except PETSC_ERR_PYTHON + PetscErrorCode SNESConvergedSkip(PetscSNES, PetscInt, PetscReal, PetscReal, PetscReal, + PetscSNESConvergedReason*, void*) except PETSC_ERR_PYTHON + PetscErrorCode SNESSetConvergenceHistory(PetscSNES, PetscReal[], PetscInt[], PetscInt, PetscBool) + PetscErrorCode SNESGetConvergenceHistory(PetscSNES, PetscReal*[], PetscInt*[], PetscInt*) + PetscErrorCode SNESLogConvergenceHistory(PetscSNES, PetscReal, PetscInt) + + PetscErrorCode SNESMonitorSet(PetscSNES, PetscSNESMonitorFunction, void*, PetscSNESCtxDel) PetscErrorCode SNESMonitorCancel(PetscSNES) - PetscErrorCode SNESMonitor(PetscSNES,PetscInt,PetscReal) + PetscErrorCode SNESMonitor(PetscSNES, PetscInt, PetscReal) PetscErrorCode SNESSetUp(PetscSNES) PetscErrorCode SNESSetUpMatrices(PetscSNES) PetscErrorCode SNESReset(PetscSNES) - PetscErrorCode SNESSolve(PetscSNES,PetscVec,PetscVec) - - PetscErrorCode SNESSetConvergedReason(PetscSNES,PetscSNESConvergedReason) - PetscErrorCode SNESGetConvergedReason(PetscSNES,PetscSNESConvergedReason*) - PetscErrorCode SNESSetErrorIfNotConverged(PetscSNES,PetscBool); - PetscErrorCode SNESGetErrorIfNotConverged(PetscSNES,PetscBool*); - PetscErrorCode SNESSetIterationNumber(PetscSNES,PetscInt) - PetscErrorCode SNESGetIterationNumber(PetscSNES,PetscInt*) - PetscErrorCode SNESSetForceIteration(PetscSNES,PetscBool) - PetscErrorCode SNESSetFunctionNorm(PetscSNES,PetscReal) - PetscErrorCode SNESGetFunctionNorm(PetscSNES,PetscReal*) - PetscErrorCode SNESGetLinearSolveIterations(PetscSNES,PetscInt*) - PetscErrorCode SNESSetCountersReset(PetscSNES,PetscBool) - - PetscErrorCode SNESGetNumberFunctionEvals(PetscSNES,PetscInt*) - PetscErrorCode SNESSetMaxNonlinearStepFailures(PetscSNES,PetscInt) - PetscErrorCode SNESGetMaxNonlinearStepFailures(PetscSNES,PetscInt*) - PetscErrorCode SNESGetNonlinearStepFailures(PetscSNES,PetscInt*) - PetscErrorCode SNESSetMaxLinearSolveFailures(PetscSNES,PetscInt) - PetscErrorCode SNESGetMaxLinearSolveFailures(PetscSNES,PetscInt*) - PetscErrorCode SNESGetLinearSolveFailures(PetscSNES,PetscInt*) - - PetscErrorCode SNESKSPSetUseEW(PetscSNES,PetscBool) - PetscErrorCode SNESKSPGetUseEW(PetscSNES,PetscBool*) - PetscErrorCode SNESKSPSetParametersEW(PetscSNES,PetscInt,PetscReal,PetscReal, - PetscReal,PetscReal,PetscReal,PetscReal) - PetscErrorCode SNESKSPGetParametersEW(PetscSNES,PetscInt*,PetscReal*,PetscReal*, - PetscReal*,PetscReal*,PetscReal*,PetscReal*) - - PetscErrorCode SNESVISetVariableBounds(PetscSNES,PetscVec,PetscVec) - #ctypedef PetscErrorCode (*PetscSNESVariableBoundsFunction)(PetscSNES,PetscVec,PetscVec) - #int SNESVISetComputeVariableBounds(PetscSNES,PetscSNESVariableBoundsFunction) + PetscErrorCode SNESSolve(PetscSNES, PetscVec, PetscVec) + + PetscErrorCode SNESSetConvergedReason(PetscSNES, PetscSNESConvergedReason) + PetscErrorCode SNESGetConvergedReason(PetscSNES, PetscSNESConvergedReason*) + PetscErrorCode SNESSetErrorIfNotConverged(PetscSNES, PetscBool) + PetscErrorCode SNESGetErrorIfNotConverged(PetscSNES, PetscBool*) + PetscErrorCode SNESSetIterationNumber(PetscSNES, PetscInt) + PetscErrorCode SNESGetIterationNumber(PetscSNES, PetscInt*) + PetscErrorCode SNESSetForceIteration(PetscSNES, PetscBool) + PetscErrorCode SNESSetFunctionNorm(PetscSNES, PetscReal) + PetscErrorCode SNESGetFunctionNorm(PetscSNES, PetscReal*) + PetscErrorCode SNESGetLinearSolveIterations(PetscSNES, PetscInt*) + PetscErrorCode SNESSetCountersReset(PetscSNES, PetscBool) + + PetscErrorCode SNESGetNumberFunctionEvals(PetscSNES, PetscInt*) + PetscErrorCode SNESSetMaxNonlinearStepFailures(PetscSNES, PetscInt) + PetscErrorCode SNESGetMaxNonlinearStepFailures(PetscSNES, PetscInt*) + PetscErrorCode SNESGetNonlinearStepFailures(PetscSNES, PetscInt*) + PetscErrorCode SNESSetMaxLinearSolveFailures(PetscSNES, PetscInt) + PetscErrorCode SNESGetMaxLinearSolveFailures(PetscSNES, PetscInt*) + PetscErrorCode SNESGetLinearSolveFailures(PetscSNES, PetscInt*) + + PetscErrorCode SNESKSPSetUseEW(PetscSNES, PetscBool) + PetscErrorCode SNESKSPGetUseEW(PetscSNES, PetscBool*) + PetscErrorCode SNESKSPSetParametersEW(PetscSNES, PetscInt, PetscReal, PetscReal, + PetscReal, PetscReal, PetscReal, PetscReal) + PetscErrorCode SNESKSPGetParametersEW(PetscSNES, PetscInt*, PetscReal*, PetscReal*, + PetscReal*, PetscReal*, PetscReal*, PetscReal*) + + PetscErrorCode SNESVISetVariableBounds(PetscSNES, PetscVec, PetscVec) PetscErrorCode SNESVIGetInactiveSet(PetscSNES, PetscIS*) - PetscErrorCode SNESCompositeGetSNES(PetscSNES,PetscInt,PetscSNES*) - PetscErrorCode SNESCompositeGetNumber(PetscSNES,PetscInt*) - PetscErrorCode SNESNASMGetSNES(PetscSNES,PetscInt,PetscSNES*) - PetscErrorCode SNESNASMGetNumber(PetscSNES,PetscInt*) + PetscErrorCode SNESCompositeGetSNES(PetscSNES, PetscInt, PetscSNES*) + PetscErrorCode SNESCompositeGetNumber(PetscSNES, PetscInt*) + PetscErrorCode SNESNASMGetSNES(PetscSNES, PetscInt, PetscSNES*) + PetscErrorCode SNESNASMGetNumber(PetscSNES, PetscInt*) PetscErrorCode SNESPatchSetCellNumbering(PetscSNES, PetscSection) PetscErrorCode SNESPatchSetDiscretisationInfo(PetscSNES, PetscInt, PetscDM*, PetscInt*, PetscInt*, const PetscInt**, const PetscInt*, PetscInt, const PetscInt*, PetscInt, const PetscInt*) @@ -222,19 +220,19 @@ cdef extern from * nogil: PetscErrorCode SNESPatchSetComputeFunction(PetscSNES, PetscPCPatchComputeFunction, void*) PetscErrorCode SNESPatchSetConstructType(PetscSNES, PetscPCPatchConstructType, PetscPCPatchConstructOperator, void*) - PetscErrorCode SNESPythonSetType(PetscSNES,char[]) - PetscErrorCode SNESPythonGetType(PetscSNES,char*[]) + PetscErrorCode SNESPythonSetType(PetscSNES, char[]) + PetscErrorCode SNESPythonGetType(PetscSNES, char*[]) cdef extern from * nogil: # custom.h - PetscErrorCode SNESSetUseMFFD(PetscSNES,PetscBool) - PetscErrorCode SNESGetUseMFFD(PetscSNES,PetscBool*) + PetscErrorCode SNESSetUseMFFD(PetscSNES, PetscBool) + PetscErrorCode SNESGetUseMFFD(PetscSNES, PetscBool*) - PetscErrorCode SNESSetUseFDColoring(PetscSNES,PetscBool) - PetscErrorCode SNESGetUseFDColoring(PetscSNES,PetscBool*) + PetscErrorCode SNESSetUseFDColoring(PetscSNES, PetscBool) + PetscErrorCode SNESGetUseFDColoring(PetscSNES, PetscBool*) - PetscErrorCode SNESConvergenceTestCall(PetscSNES,PetscInt, - PetscReal,PetscReal,PetscReal, - PetscSNESConvergedReason*) + PetscErrorCode SNESConvergenceTestCall(PetscSNES, PetscInt, + PetscReal, PetscReal, PetscReal, + PetscSNESConvergedReason*) ctypedef const char* PetscSNESLineSearchType "SNESLineSearchType" PetscSNESLineSearchType SNESLINESEARCHBT @@ -246,25 +244,25 @@ cdef extern from * nogil: # custom.h PetscSNESLineSearchType SNESLINESEARCHSHELL PetscSNESLineSearchType SNESLINESEARCHNCGLINEAR - PetscErrorCode SNESGetLineSearch(PetscSNES,PetscSNESLineSearch*) + PetscErrorCode SNESGetLineSearch(PetscSNES, PetscSNESLineSearch*) PetscErrorCode SNESLineSearchSetFromOptions(PetscSNESLineSearch) - PetscErrorCode SNESLineSearchApply(PetscSNESLineSearch,PetscVec,PetscVec,PetscReal*,PetscVec) - PetscErrorCode SNESLineSearchGetNorms(PetscSNESLineSearch,PetscReal*,PetscReal*,PetscReal*) + PetscErrorCode SNESLineSearchApply(PetscSNESLineSearch, PetscVec, PetscVec, PetscReal*, PetscVec) + PetscErrorCode SNESLineSearchGetNorms(PetscSNESLineSearch, PetscReal*, PetscReal*, PetscReal*) PetscErrorCode SNESLineSearchDestroy(PetscSNESLineSearch*) ctypedef PetscErrorCode (*PetscSNESPreCheckFunction)(PetscSNESLineSearch, - PetscVec,PetscVec, - PetscBool*, - void*) except PETSC_ERR_PYTHON - PetscErrorCode SNESLineSearchSetPreCheck(PetscSNESLineSearch,PetscSNESPreCheckFunction,void*) - PetscErrorCode SNESLineSearchGetSNES(PetscSNESLineSearch,PetscSNES*) + PetscVec, PetscVec, + PetscBool*, + void*) except PETSC_ERR_PYTHON + PetscErrorCode SNESLineSearchSetPreCheck(PetscSNESLineSearch, PetscSNESPreCheckFunction, void*) + PetscErrorCode SNESLineSearchGetSNES(PetscSNESLineSearch, PetscSNES*) # ----------------------------------------------------------------------------- cdef inline SNES ref_SNES(PetscSNES snes): cdef SNES ob = SNES() ob.snes = snes - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob # ----------------------------------------------------------------------------- @@ -272,8 +270,8 @@ cdef inline SNES ref_SNES(PetscSNES snes): cdef PetscErrorCode SNES_InitialGuess( PetscSNES snes, PetscVec x, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef SNES Snes = ref_SNES(snes) cdef Vec Xvec = ref_Vec(x) cdef object context = Snes.get_attr('__initialguess__') @@ -290,10 +288,10 @@ cdef PetscErrorCode SNES_PreCheck( PetscVec x, PetscVec y, PetscBool *changed, - void* ctx - ) except PETSC_ERR_PYTHON with gil: - cdef PetscSNES snes = NULL; - CHKERR( SNESLineSearchGetSNES(linesearch, &snes) ); + void *ctx + ) except PETSC_ERR_PYTHON with gil: + cdef PetscSNES snes = NULL + CHKERR(SNESLineSearchGetSNES(linesearch, &snes)) cdef object b = False cdef SNES Snes = ref_SNES(snes) cdef Vec Xvec = ref_Vec(x) @@ -313,8 +311,8 @@ cdef PetscErrorCode SNES_Function( PetscSNES snes, PetscVec x, PetscVec f, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef SNES Snes = ref_SNES(snes) cdef Vec Xvec = ref_Vec(x) cdef Vec Fvec = ref_Vec(f) @@ -330,7 +328,7 @@ cdef PetscErrorCode SNES_Function( cdef PetscErrorCode SNES_Update( PetscSNES snes, PetscInt its, - ) except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: cdef SNES Snes = ref_SNES(snes) cdef object context = Snes.get_attr('__update__') assert context is not None and type(context) is tuple # sanity check @@ -345,8 +343,8 @@ cdef PetscErrorCode SNES_Jacobian( PetscVec x, PetscMat J, PetscMat P, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef SNES Snes = ref_SNES(snes) cdef Vec Xvec = ref_Vec(x) cdef Mat Jmat = ref_Mat(J) @@ -361,11 +359,11 @@ cdef PetscErrorCode SNES_Jacobian( # ----------------------------------------------------------------------------- cdef PetscErrorCode SNES_Objective( - PetscSNES snes, - PetscVec x, + PetscSNES snes, + PetscVec x, PetscReal *o, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef SNES Snes = ref_SNES(snes) cdef Vec Xvec = ref_Vec(x) cdef object context = Snes.get_attr('__objective__') @@ -382,8 +380,8 @@ cdef PetscErrorCode SNES_NGS( PetscSNES snes, PetscVec x, PetscVec b, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef SNES Snes = ref_SNES(snes) cdef Vec Xvec = ref_Vec(x) cdef Vec Bvec = ref_Vec(b) @@ -403,8 +401,8 @@ cdef PetscErrorCode SNES_Converged( PetscReal gnorm, PetscReal fnorm, PetscSNESConvergedReason *r, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef SNES Snes = ref_SNES(snes) cdef object it = toInt(iters) cdef object xn = toReal(xnorm) @@ -424,11 +422,11 @@ cdef PetscErrorCode SNES_Converged( # ----------------------------------------------------------------------------- cdef PetscErrorCode SNES_Monitor( - PetscSNES snes, - PetscInt iters, - PetscReal rnorm, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + PetscSNES snes, + PetscInt iters, + PetscReal rnorm, + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef SNES Snes = ref_SNES(snes) cdef object monitorlist = Snes.get_attr('__monitor__') if monitorlist is None: return PETSC_SUCCESS diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscspace.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscspace.pxi index 6efc8403d02..2389d7ce112 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscspace.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscspace.pxi @@ -17,7 +17,6 @@ cdef extern from * nogil: PetscErrorCode PetscSpaceView(PetscSpace, PetscViewer) PetscErrorCode PetscSpaceSetType(PetscSpace, PetscSpaceType) PetscErrorCode PetscSpaceGetType(PetscSpace, PetscSpaceType*) - #int PetscSpaceEvaluate(PetscSpace, PetscInt, const PetscReal [], PetscReal [], PetscReal []) PetscErrorCode PetscSpaceGetDimension(PetscSpace, PetscInt*) PetscErrorCode PetscSpaceGetDegree(PetscSpace, PetscInt*, PetscInt*) PetscErrorCode PetscSpaceGetNumVariables(PetscSpace, PetscInt*) @@ -31,12 +30,11 @@ cdef extern from * nogil: PetscErrorCode PetscSpaceSumGetNumSubspaces(PetscSpace, PetscInt*) PetscErrorCode PetscSpaceSumGetSubspace(PetscSpace, PetscInt, PetscSpace*) PetscErrorCode PetscSpaceSumSetNumSubspaces(PetscSpace, PetscInt) - PetscErrorCode PetscSpaceSumSetSubspace(PetscSpace,PetscInt, PetscSpace) + PetscErrorCode PetscSpaceSumSetSubspace(PetscSpace, PetscInt, PetscSpace) PetscErrorCode PetscSpaceTensorGetNumSubspaces(PetscSpace, PetscInt*) PetscErrorCode PetscSpaceTensorGetSubspace(PetscSpace, PetscInt, PetscSpace*) PetscErrorCode PetscSpaceTensorSetNumSubspaces(PetscSpace, PetscInt) PetscErrorCode PetscSpaceTensorSetSubspace(PetscSpace, PetscInt, PetscSpace) - PetscErrorCode PetscSpaceViewFromOptions(PetscSpace, PetscObject, char []) PetscErrorCode PetscSpacePolynomialSetTensor(PetscSpace, PetscBool) PetscErrorCode PetscSpacePolynomialGetTensor(PetscSpace, PetscBool*) @@ -70,7 +68,6 @@ cdef extern from * nogil: PetscErrorCode PetscDualSpaceSetOrder(PetscDualSpace, PetscInt) PetscErrorCode PetscDualSpaceGetNumDof(PetscDualSpace, const PetscInt**) PetscErrorCode PetscDualSpaceSetUp(PetscDualSpace) - PetscErrorCode PetscDualSpaceViewFromOptions(PetscDualSpace,PetscObject, char[]) PetscErrorCode PetscDualSpaceGetFunctional(PetscDualSpace, PetscInt, PetscQuadrature*) PetscErrorCode PetscDualSpaceGetInteriorDimension(PetscDualSpace, PetscInt*) @@ -84,14 +81,3 @@ cdef extern from * nogil: PetscErrorCode PetscDualSpaceSimpleSetFunctional(PetscDualSpace, PetscInt, PetscQuadrature) PetscErrorCode PetscDualSpaceGetType(PetscDualSpace, PetscDualSpaceType*) PetscErrorCode PetscDualSpaceSetType(PetscDualSpace, PetscDualSpaceType) - - #int PetscDualSpaceSetFromOptions(PetscDualSpace) - - - #int PetscDualSpaceRefinedSetCellSpaces(PetscDualSpace, const PetscDualSpace []) - - # Advanced - #int PetscDualSpaceCreateAllDataDefault(PetscDualSpace, PetscQuadrature*, PetscMat*) - #int PetscDualSpaceCreateInteriorDataDefault(PetscDualSpace, PetscQuadrature*, PetscMat*) - #int PetscDualSpaceEqual(PetscDualSpace, PetscDualSpace, PetscBool*) - #int PetscDualSpaceGetAllData(PetscDualSpace, PetscQuadrature*, PetscMat*) \ No newline at end of file diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscsys.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscsys.pxi index 8a5f72cb802..7d13448c4e4 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscsys.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscsys.pxi @@ -8,50 +8,50 @@ cdef extern from * nogil: PETSC_DATATYPE_UNKNOWN const char PETSC_AUTHOR_INFO[] - PetscErrorCode PetscGetVersion(char[],size_t) - PetscErrorCode PetscGetVersionNumber(PetscInt*,PetscInt*,PetscInt*,PetscInt*) + PetscErrorCode PetscGetVersion(char[], size_t) + PetscErrorCode PetscGetVersionNumber(PetscInt*, PetscInt*, PetscInt*, PetscInt*) - PetscErrorCode PetscInitialize(int*,char***,char[],char[]) + PetscErrorCode PetscInitialize(int*, char***, char[], char[]) PetscErrorCode PetscInitializeNoArguments() PetscErrorCode PetscFinalize() PetscBool PetscInitializeCalled PetscBool PetscFinalizeCalled ctypedef PetscErrorCode (*PetscErrorHandlerFunction)( - MPI_Comm,int,char*,char*,int,PetscErrorType,char*,void*) + MPI_Comm, int, char*, char*, int, PetscErrorType, char*, void*) PetscErrorHandlerFunction PetscAttachDebuggerErrorHandler PetscErrorHandlerFunction PetscEmacsClientErrorHandler PetscErrorHandlerFunction PetscTraceBackErrorHandler PetscErrorHandlerFunction PetscMPIAbortErrorHandler PetscErrorHandlerFunction PetscAbortErrorHandler PetscErrorHandlerFunction PetscIgnoreErrorHandler - PetscErrorCode PetscPushErrorHandler(PetscErrorHandlerFunction,void*) + PetscErrorCode PetscPushErrorHandler(PetscErrorHandlerFunction, void*) PetscErrorCode PetscPopErrorHandler() PetscErrorCode PetscPopSignalHandler() PetscErrorCode PetscInfoAllow(PetscBool) - PetscErrorCode PetscInfoSetFile(char*,char*) + PetscErrorCode PetscInfoSetFile(char*, char*) - PetscErrorCode PetscErrorMessage(int,char*[],char**) + PetscErrorCode PetscErrorMessage(int, char*[], char**) - PetscErrorCode PetscSplitOwnership(MPI_Comm,PetscInt*,PetscInt*) - PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm,PetscInt,PetscInt*,PetscInt*) + PetscErrorCode PetscSplitOwnership(MPI_Comm, PetscInt*, PetscInt*) + PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm, PetscInt, PetscInt*, PetscInt*) FILE *PETSC_STDOUT FILE *PETSC_STDERR - PetscErrorCode PetscPrintf(MPI_Comm,char[],...) - PetscErrorCode PetscVSNPrintf(char*,size_t,const char[],size_t *,va_list) - PetscErrorCode PetscVFPrintfDefault(FILE*,const char[],va_list) - PetscErrorCode PetscSynchronizedPrintf(MPI_Comm,char[],...) - PetscErrorCode PetscSynchronizedFlush(MPI_Comm,FILE*) + PetscErrorCode PetscPrintf(MPI_Comm, char[], ...) + PetscErrorCode PetscVSNPrintf(char*, size_t, const char[], size_t *, va_list) + PetscErrorCode PetscVFPrintfDefault(FILE*, const char[], va_list) + PetscErrorCode PetscSynchronizedPrintf(MPI_Comm, char[], ...) + PetscErrorCode PetscSynchronizedFlush(MPI_Comm, FILE*) - PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm,int) - PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm,int) + PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm, int) + PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm, int) PetscErrorCode PetscSleep(PetscReal) - PetscErrorCode PetscCitationsRegister(const char[],PetscBool*) + PetscErrorCode PetscCitationsRegister(const char[], PetscBool*) - PetscErrorCode PetscHasExternalPackage(const char[],PetscBool*) + PetscErrorCode PetscHasExternalPackage(const char[], PetscBool*) cdef inline PetscErrorCode Sys_Sizes( @@ -59,7 +59,7 @@ cdef inline PetscErrorCode Sys_Sizes( PetscInt *_b, PetscInt *_n, PetscInt *_N, - ) except PETSC_ERR_PYTHON: + ) except PETSC_ERR_PYTHON: # get block size cdef PetscInt bs=PETSC_DECIDE, b=PETSC_DECIDE if bsize is not None: bs = b = asInt(bsize) @@ -80,10 +80,10 @@ cdef inline PetscErrorCode Sys_Sizes( "local and global sizes cannot be both 'DECIDE'") if (n > 0) and (n % bs): raise ValueError( "local size %d not divisible by block size %d" % - (toInt(n), toInt(bs)) ) + (toInt(n), toInt(bs))) if (N > 0) and (N % bs): raise ValueError( "global size %d not divisible by block size %d" % - (toInt(N), toInt(bs)) ) + (toInt(N), toInt(bs))) # return result to the caller if _b != NULL: _b[0] = b if _n != NULL: _n[0] = n @@ -95,13 +95,13 @@ cdef inline PetscErrorCode Sys_Layout( PetscInt bs, PetscInt *_n, PetscInt *_N, - ) except PETSC_ERR_PYTHON: + ) except PETSC_ERR_PYTHON: cdef PetscInt n = _n[0] cdef PetscInt N = _N[0] if bs < 0: bs = 1 if n > 0: n = n // bs if N > 0: N = N // bs - CHKERR( PetscSplitOwnership(comm, &n, &N) ) + CHKERR(PetscSplitOwnership(comm, &n, &N)) _n[0] = n * bs _N[0] = N * bs return PETSC_SUCCESS diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petsctao.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petsctao.pxi index 7c5e5eb67eb..8f7bb461264 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petsctao.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petsctao.pxi @@ -36,7 +36,7 @@ cdef extern from * nogil: PetscTAOType TAOPYTHON ctypedef enum PetscTAOConvergedReason "TaoConvergedReason": - #iterating + # iterating TAO_CONTINUE_ITERATING # converged TAO_CONVERGED_GATOL @@ -54,24 +54,24 @@ cdef extern from * nogil: TAO_DIVERGED_USER ctypedef PetscErrorCode (*PetscTaoMonitorDestroy)(void**) - ctypedef PetscErrorCode PetscTaoConvergenceTest(PetscTAO,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoMonitor(PetscTAO,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoObjective(PetscTAO,PetscVec,PetscReal*,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoResidual(PetscTAO,PetscVec,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoGradient(PetscTAO,PetscVec,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoObjGrad(PetscTAO,PetscVec,PetscReal*,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoRegularizerObjGrad(PetscTAO,PetscVec,PetscReal*,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoVarBounds(PetscTAO,PetscVec,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoConstraints(PetscTAO,PetscVec,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoEqualityConstraints(PetscTAO,PetscVec,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoHessian(PetscTAO,PetscVec,PetscMat,PetscMat,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoRegularizerHessian(PetscTAO,PetscVec,PetscMat,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoJacobian(PetscTAO,PetscVec,PetscMat,PetscMat,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoJacobianResidual(PetscTAO,PetscVec,PetscMat,PetscMat,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoJacobianState(PetscTAO,PetscVec,PetscMat,PetscMat,PetscMat,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoJacobianDesign(PetscTAO,PetscVec,PetscMat,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoJacobianEquality(PetscTAO,PetscVec,PetscMat,PetscMat,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoUpdateFunction(PetscTAO,PetscInt,void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoConvergenceTest(PetscTAO, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoMonitor(PetscTAO, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoObjective(PetscTAO, PetscVec, PetscReal*, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoResidual(PetscTAO, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoGradient(PetscTAO, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoObjGrad(PetscTAO, PetscVec, PetscReal*, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoRegularizerObjGrad(PetscTAO, PetscVec, PetscReal*, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoVarBounds(PetscTAO, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoConstraints(PetscTAO, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoEqualityConstraints(PetscTAO, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoHessian(PetscTAO, PetscVec, PetscMat, PetscMat, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoRegularizerHessian(PetscTAO, PetscVec, PetscMat, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoJacobian(PetscTAO, PetscVec, PetscMat, PetscMat, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoJacobianResidual(PetscTAO, PetscVec, PetscMat, PetscMat, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoJacobianState(PetscTAO, PetscVec, PetscMat, PetscMat, PetscMat, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoJacobianDesign(PetscTAO, PetscVec, PetscMat, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoJacobianEquality(PetscTAO, PetscVec, PetscMat, PetscMat, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoUpdateFunction(PetscTAO, PetscInt, void*) except PETSC_ERR_PYTHON ctypedef enum PetscTAOBNCGType "TaoBNCGType": TAO_BNCG_GD @@ -88,111 +88,111 @@ cdef extern from * nogil: TAO_BNCG_SSML_DFP TAO_BNCG_SSML_BRDN - PetscErrorCode TaoMonitor(PetscTAO,PetscInt,PetscReal,PetscReal,PetscReal,PetscReal) - PetscErrorCode TaoView(PetscTAO,PetscViewer) + PetscErrorCode TaoMonitor(PetscTAO, PetscInt, PetscReal, PetscReal, PetscReal, PetscReal) + PetscErrorCode TaoView(PetscTAO, PetscViewer) PetscErrorCode TaoDestroy(PetscTAO*) - PetscErrorCode TaoCreate(MPI_Comm,PetscTAO*) - PetscErrorCode TaoSetOptionsPrefix(PetscTAO,char[]) - PetscErrorCode TaoAppendOptionsPrefix(PetscTAO,char[]) - PetscErrorCode TaoGetOptionsPrefix(PetscTAO,char*[]) + PetscErrorCode TaoCreate(MPI_Comm, PetscTAO*) + PetscErrorCode TaoSetOptionsPrefix(PetscTAO, char[]) + PetscErrorCode TaoAppendOptionsPrefix(PetscTAO, char[]) + PetscErrorCode TaoGetOptionsPrefix(PetscTAO, char*[]) PetscErrorCode TaoSetFromOptions(PetscTAO) - PetscErrorCode TaoSetType(PetscTAO,PetscTAOType) - PetscErrorCode TaoGetType(PetscTAO,PetscTAOType*) + PetscErrorCode TaoSetType(PetscTAO, PetscTAOType) + PetscErrorCode TaoGetType(PetscTAO, PetscTAOType*) PetscErrorCode TaoSetUp(PetscTAO) PetscErrorCode TaoSolve(PetscTAO) - PetscErrorCode TaoSetTolerances(PetscTAO,PetscReal,PetscReal,PetscReal) - PetscErrorCode TaoGetTolerances(PetscTAO,PetscReal*,PetscReal*,PetscReal*) - PetscErrorCode TaoSetConstraintTolerances(PetscTAO,PetscReal,PetscReal) - PetscErrorCode TaoGetConstraintTolerances(PetscTAO,PetscReal*,PetscReal*) - - PetscErrorCode TaoSetFunctionLowerBound(PetscTAO,PetscReal) - PetscErrorCode TaoSetMaximumIterations(PetscTAO,PetscInt) - PetscErrorCode TaoGetMaximumIterations(PetscTAO,PetscInt*) - PetscErrorCode TaoSetMaximumFunctionEvaluations(PetscTAO,PetscInt) - PetscErrorCode TaoGetMaximumFunctionEvaluations(PetscTAO,PetscInt*) - PetscErrorCode TaoSetIterationNumber(PetscTAO,PetscInt) - PetscErrorCode TaoGetIterationNumber(PetscTAO,PetscInt*) - - PetscErrorCode TaoSetTrustRegionTolerance(PetscTAO,PetscReal) - PetscErrorCode TaoGetInitialTrustRegionRadius(PetscTAO,PetscReal*) - PetscErrorCode TaoGetTrustRegionRadius(PetscTAO,PetscReal*) - PetscErrorCode TaoSetTrustRegionRadius(PetscTAO,PetscReal) - - PetscErrorCode TaoDefaultConvergenceTest(PetscTAO,void*) except PETSC_ERR_PYTHON - PetscErrorCode TaoSetConvergenceTest(PetscTAO,PetscTaoConvergenceTest*, void*) - PetscErrorCode TaoSetConvergedReason(PetscTAO,PetscTAOConvergedReason) - PetscErrorCode TaoGetConvergedReason(PetscTAO,PetscTAOConvergedReason*) - PetscErrorCode TaoLogConvergenceHistory(PetscTAO,PetscReal,PetscReal,PetscReal,PetscInt) - PetscErrorCode TaoGetSolutionStatus(PetscTAO,PetscInt*, - PetscReal*,PetscReal*, - PetscReal*,PetscReal*, + PetscErrorCode TaoSetTolerances(PetscTAO, PetscReal, PetscReal, PetscReal) + PetscErrorCode TaoGetTolerances(PetscTAO, PetscReal*, PetscReal*, PetscReal*) + PetscErrorCode TaoSetConstraintTolerances(PetscTAO, PetscReal, PetscReal) + PetscErrorCode TaoGetConstraintTolerances(PetscTAO, PetscReal*, PetscReal*) + + PetscErrorCode TaoSetFunctionLowerBound(PetscTAO, PetscReal) + PetscErrorCode TaoSetMaximumIterations(PetscTAO, PetscInt) + PetscErrorCode TaoGetMaximumIterations(PetscTAO, PetscInt*) + PetscErrorCode TaoSetMaximumFunctionEvaluations(PetscTAO, PetscInt) + PetscErrorCode TaoGetMaximumFunctionEvaluations(PetscTAO, PetscInt*) + PetscErrorCode TaoSetIterationNumber(PetscTAO, PetscInt) + PetscErrorCode TaoGetIterationNumber(PetscTAO, PetscInt*) + + PetscErrorCode TaoSetTrustRegionTolerance(PetscTAO, PetscReal) + PetscErrorCode TaoGetInitialTrustRegionRadius(PetscTAO, PetscReal*) + PetscErrorCode TaoGetTrustRegionRadius(PetscTAO, PetscReal*) + PetscErrorCode TaoSetTrustRegionRadius(PetscTAO, PetscReal) + + PetscErrorCode TaoDefaultConvergenceTest(PetscTAO, void*) except PETSC_ERR_PYTHON + PetscErrorCode TaoSetConvergenceTest(PetscTAO, PetscTaoConvergenceTest*, void*) + PetscErrorCode TaoSetConvergedReason(PetscTAO, PetscTAOConvergedReason) + PetscErrorCode TaoGetConvergedReason(PetscTAO, PetscTAOConvergedReason*) + PetscErrorCode TaoLogConvergenceHistory(PetscTAO, PetscReal, PetscReal, PetscReal, PetscInt) + PetscErrorCode TaoGetSolutionStatus(PetscTAO, PetscInt*, + PetscReal*, PetscReal*, + PetscReal*, PetscReal*, PetscTAOConvergedReason*) - PetscErrorCode TaoMonitorSet(PetscTAO,PetscTaoMonitor,void*,PetscTaoMonitorDestroy) + PetscErrorCode TaoMonitorSet(PetscTAO, PetscTaoMonitor, void*, PetscTaoMonitorDestroy) PetscErrorCode TaoMonitorCancel(PetscTAO) - PetscErrorCode TaoComputeObjective(PetscTAO,PetscVec,PetscReal*) - PetscErrorCode TaoComputeResidual(PetscTAO,PetscVec,PetscVec) - PetscErrorCode TaoComputeGradient(PetscTAO,PetscVec,PetscVec) - PetscErrorCode TaoComputeObjectiveAndGradient(PetscTAO,PetscVec,PetscReal*,PetscVec) - PetscErrorCode TaoComputeConstraints(PetscTAO,PetscVec,PetscVec) - PetscErrorCode TaoComputeDualVariables(PetscTAO,PetscVec,PetscVec) + PetscErrorCode TaoComputeObjective(PetscTAO, PetscVec, PetscReal*) + PetscErrorCode TaoComputeResidual(PetscTAO, PetscVec, PetscVec) + PetscErrorCode TaoComputeGradient(PetscTAO, PetscVec, PetscVec) + PetscErrorCode TaoComputeObjectiveAndGradient(PetscTAO, PetscVec, PetscReal*, PetscVec) + PetscErrorCode TaoComputeConstraints(PetscTAO, PetscVec, PetscVec) + PetscErrorCode TaoComputeDualVariables(PetscTAO, PetscVec, PetscVec) PetscErrorCode TaoComputeVariableBounds(PetscTAO) - PetscErrorCode TaoComputeHessian(PetscTAO,PetscVec,PetscMat,PetscMat) - PetscErrorCode TaoComputeJacobian(PetscTAO,PetscVec,PetscMat,PetscMat) - - PetscErrorCode TaoSetSolution(PetscTAO,PetscVec) - PetscErrorCode TaoSetConstraintsVec(PetscTAO,PetscVec) - PetscErrorCode TaoSetVariableBounds(PetscTAO,PetscVec,PetscVec) - - PetscErrorCode TaoGetSolution(PetscTAO,PetscVec*) - PetscErrorCode TaoSetGradientNorm(PetscTAO,PetscMat) - PetscErrorCode TaoGetGradientNorm(PetscTAO,PetscMat*) - PetscErrorCode TaoLMVMSetH0(PetscTAO,PetscMat) - PetscErrorCode TaoLMVMGetH0(PetscTAO,PetscMat*) - PetscErrorCode TaoLMVMGetH0KSP(PetscTAO,PetscKSP*) - PetscErrorCode TaoBNCGGetType(PetscTAO,PetscTAOBNCGType*) - PetscErrorCode TaoBNCGSetType(PetscTAO,PetscTAOBNCGType) - PetscErrorCode TaoGetVariableBounds(PetscTAO,PetscVec*,PetscVec*) - - PetscErrorCode TaoSetObjective(PetscTAO,PetscTaoObjective*,void*) - PetscErrorCode TaoSetGradient(PetscTAO,PetscVec,PetscTaoGradient*,void*) - PetscErrorCode TaoSetObjectiveAndGradient(PetscTAO,PetscVec,PetscTaoObjGrad*,void*) - PetscErrorCode TaoSetHessian(PetscTAO,PetscMat,PetscMat,PetscTaoHessian*,void*) - PetscErrorCode TaoGetObjective(PetscTAO,PetscTaoObjective**,void**) - PetscErrorCode TaoGetGradient(PetscTAO,PetscVec*,PetscTaoGradient**,void**) - PetscErrorCode TaoGetObjectiveAndGradient(PetscTAO,PetscVec*,PetscTaoObjGrad**,void**) - PetscErrorCode TaoGetHessian(PetscTAO,PetscMat*,PetscMat*,PetscTaoHessian**,void**) - PetscErrorCode TaoSetResidualRoutine(PetscTAO,PetscVec,PetscTaoResidual,void*) - PetscErrorCode TaoSetVariableBoundsRoutine(PetscTAO,PetscTaoVarBounds*,void*) - PetscErrorCode TaoSetConstraintsRoutine(PetscTAO,PetscVec,PetscTaoConstraints*,void*) - PetscErrorCode TaoSetJacobianRoutine(PetscTAO,PetscMat,PetscMat,PetscTaoJacobian*,void*) - PetscErrorCode TaoSetJacobianResidualRoutine(PetscTAO,PetscMat,PetscMat,PetscTaoJacobianResidual*,void*) - PetscErrorCode TaoSetStateDesignIS(PetscTAO,PetscIS,PetscIS) - PetscErrorCode TaoSetJacobianStateRoutine(PetscTAO,PetscMat,PetscMat,PetscMat,PetscTaoJacobianState*,void*) - PetscErrorCode TaoSetJacobianDesignRoutine(PetscTAO,PetscMat,PetscTaoJacobianDesign*,void*) - - PetscErrorCode TaoSetEqualityConstraintsRoutine(PetscTAO,PetscVec,PetscTaoEqualityConstraints*,void*) - PetscErrorCode TaoSetJacobianEqualityRoutine(PetscTAO,PetscMat,PetscMat,PetscTaoJacobianEquality*,void*) - PetscErrorCode TaoSetUpdate(PetscTAO,PetscTaoUpdateFunction*,void*) - - PetscErrorCode TaoSetInitialTrustRegionRadius(PetscTAO,PetscReal) - - PetscErrorCode TaoGetKSP(PetscTAO,PetscKSP*) - PetscErrorCode TaoGetLineSearch(PetscTAO,PetscTAOLineSearch*) - - PetscErrorCode TaoBRGNGetSubsolver(PetscTAO,PetscTAO*) - PetscErrorCode TaoBRGNSetRegularizerObjectiveAndGradientRoutine(PetscTAO,PetscTaoRegularizerObjGrad*,void*) - PetscErrorCode TaoBRGNSetRegularizerHessianRoutine(PetscTAO,PetscMat,PetscTaoRegularizerHessian*,void*) - PetscErrorCode TaoBRGNSetRegularizerWeight(PetscTAO,PetscReal) - PetscErrorCode TaoBRGNSetL1SmoothEpsilon(PetscTAO,PetscReal) - PetscErrorCode TaoBRGNSetDictionaryMatrix(PetscTAO,PetscMat) - PetscErrorCode TaoBRGNGetDampingVector(PetscTAO,PetscVec*) - - PetscErrorCode TaoPythonSetType(PetscTAO,char[]) - PetscErrorCode TaoPythonGetType(PetscTAO,char*[]) + PetscErrorCode TaoComputeHessian(PetscTAO, PetscVec, PetscMat, PetscMat) + PetscErrorCode TaoComputeJacobian(PetscTAO, PetscVec, PetscMat, PetscMat) + + PetscErrorCode TaoSetSolution(PetscTAO, PetscVec) + PetscErrorCode TaoSetConstraintsVec(PetscTAO, PetscVec) + PetscErrorCode TaoSetVariableBounds(PetscTAO, PetscVec, PetscVec) + + PetscErrorCode TaoGetSolution(PetscTAO, PetscVec*) + PetscErrorCode TaoSetGradientNorm(PetscTAO, PetscMat) + PetscErrorCode TaoGetGradientNorm(PetscTAO, PetscMat*) + PetscErrorCode TaoLMVMSetH0(PetscTAO, PetscMat) + PetscErrorCode TaoLMVMGetH0(PetscTAO, PetscMat*) + PetscErrorCode TaoLMVMGetH0KSP(PetscTAO, PetscKSP*) + PetscErrorCode TaoBNCGGetType(PetscTAO, PetscTAOBNCGType*) + PetscErrorCode TaoBNCGSetType(PetscTAO, PetscTAOBNCGType) + PetscErrorCode TaoGetVariableBounds(PetscTAO, PetscVec*, PetscVec*) + + PetscErrorCode TaoSetObjective(PetscTAO, PetscTaoObjective*, void*) + PetscErrorCode TaoSetGradient(PetscTAO, PetscVec, PetscTaoGradient*, void*) + PetscErrorCode TaoSetObjectiveAndGradient(PetscTAO, PetscVec, PetscTaoObjGrad*, void*) + PetscErrorCode TaoSetHessian(PetscTAO, PetscMat, PetscMat, PetscTaoHessian*, void*) + PetscErrorCode TaoGetObjective(PetscTAO, PetscTaoObjective**, void**) + PetscErrorCode TaoGetGradient(PetscTAO, PetscVec*, PetscTaoGradient**, void**) + PetscErrorCode TaoGetObjectiveAndGradient(PetscTAO, PetscVec*, PetscTaoObjGrad**, void**) + PetscErrorCode TaoGetHessian(PetscTAO, PetscMat*, PetscMat*, PetscTaoHessian**, void**) + PetscErrorCode TaoSetResidualRoutine(PetscTAO, PetscVec, PetscTaoResidual, void*) + PetscErrorCode TaoSetVariableBoundsRoutine(PetscTAO, PetscTaoVarBounds*, void*) + PetscErrorCode TaoSetConstraintsRoutine(PetscTAO, PetscVec, PetscTaoConstraints*, void*) + PetscErrorCode TaoSetJacobianRoutine(PetscTAO, PetscMat, PetscMat, PetscTaoJacobian*, void*) + PetscErrorCode TaoSetJacobianResidualRoutine(PetscTAO, PetscMat, PetscMat, PetscTaoJacobianResidual*, void*) + PetscErrorCode TaoSetStateDesignIS(PetscTAO, PetscIS, PetscIS) + PetscErrorCode TaoSetJacobianStateRoutine(PetscTAO, PetscMat, PetscMat, PetscMat, PetscTaoJacobianState*, void*) + PetscErrorCode TaoSetJacobianDesignRoutine(PetscTAO, PetscMat, PetscTaoJacobianDesign*, void*) + + PetscErrorCode TaoSetEqualityConstraintsRoutine(PetscTAO, PetscVec, PetscTaoEqualityConstraints*, void*) + PetscErrorCode TaoSetJacobianEqualityRoutine(PetscTAO, PetscMat, PetscMat, PetscTaoJacobianEquality*, void*) + PetscErrorCode TaoSetUpdate(PetscTAO, PetscTaoUpdateFunction*, void*) + + PetscErrorCode TaoSetInitialTrustRegionRadius(PetscTAO, PetscReal) + + PetscErrorCode TaoGetKSP(PetscTAO, PetscKSP*) + PetscErrorCode TaoGetLineSearch(PetscTAO, PetscTAOLineSearch*) + + PetscErrorCode TaoBRGNGetSubsolver(PetscTAO, PetscTAO*) + PetscErrorCode TaoBRGNSetRegularizerObjectiveAndGradientRoutine(PetscTAO, PetscTaoRegularizerObjGrad*, void*) + PetscErrorCode TaoBRGNSetRegularizerHessianRoutine(PetscTAO, PetscMat, PetscTaoRegularizerHessian*, void*) + PetscErrorCode TaoBRGNSetRegularizerWeight(PetscTAO, PetscReal) + PetscErrorCode TaoBRGNSetL1SmoothEpsilon(PetscTAO, PetscReal) + PetscErrorCode TaoBRGNSetDictionaryMatrix(PetscTAO, PetscMat) + PetscErrorCode TaoBRGNGetDampingVector(PetscTAO, PetscVec*) + + PetscErrorCode TaoPythonSetType(PetscTAO, char[]) + PetscErrorCode TaoPythonGetType(PetscTAO, char*[]) ctypedef const char* PetscTAOLineSearchType "TaoLineSearchType" PetscTAOLineSearchType TAOLINESEARCHUNIT @@ -220,39 +220,39 @@ cdef extern from * nogil: TAOLINESEARCH_HALTED_RTOL TAOLINESEARCH_HALTED_USER - ctypedef PetscErrorCode PetscTaoLineSearchObjective(PetscTAOLineSearch,PetscVec,PetscReal*,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoLineSearchGradient(PetscTAOLineSearch,PetscVec,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoLineSearchObjGrad(PetscTAOLineSearch,PetscVec,PetscReal*,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode PetscTaoLineSearchObjGTS(PetscTaoLineSearch,PetscVec,PetscVec,PetscReal*,PetscReal*,void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoLineSearchObjective(PetscTAOLineSearch, PetscVec, PetscReal*, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoLineSearchGradient(PetscTAOLineSearch, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoLineSearchObjGrad(PetscTAOLineSearch, PetscVec, PetscReal*, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode PetscTaoLineSearchObjGTS(PetscTaoLineSearch, PetscVec, PetscVec, PetscReal*, PetscReal*, void*) except PETSC_ERR_PYTHON - PetscErrorCode TaoLineSearchCreate(MPI_Comm,PetscTAOLineSearch*) + PetscErrorCode TaoLineSearchCreate(MPI_Comm, PetscTAOLineSearch*) PetscErrorCode TaoLineSearchDestroy(PetscTAOLineSearch*) - PetscErrorCode TaoLineSearchView(PetscTAOLineSearch,PetscViewer) - PetscErrorCode TaoLineSearchSetType(PetscTAOLineSearch,PetscTAOLineSearchType) - PetscErrorCode TaoLineSearchGetType(PetscTAOLineSearch,PetscTAOLineSearchType*) - PetscErrorCode TaoLineSearchSetOptionsPrefix(PetscTAOLineSearch,char[]) - PetscErrorCode TaoLineSearchGetOptionsPrefix(PetscTAOLineSearch,char*[]) + PetscErrorCode TaoLineSearchView(PetscTAOLineSearch, PetscViewer) + PetscErrorCode TaoLineSearchSetType(PetscTAOLineSearch, PetscTAOLineSearchType) + PetscErrorCode TaoLineSearchGetType(PetscTAOLineSearch, PetscTAOLineSearchType*) + PetscErrorCode TaoLineSearchSetOptionsPrefix(PetscTAOLineSearch, char[]) + PetscErrorCode TaoLineSearchGetOptionsPrefix(PetscTAOLineSearch, char*[]) PetscErrorCode TaoLineSearchSetFromOptions(PetscTAOLineSearch) PetscErrorCode TaoLineSearchSetUp(PetscTAOLineSearch) - PetscErrorCode TaoLineSearchUseTaoRoutines(PetscTAOLineSearch,PetscTAO) - PetscErrorCode TaoLineSearchSetObjectiveRoutine(PetscTAOLineSearch,PetscTaoLineSearchObjective,void*) - PetscErrorCode TaoLineSearchSetGradientRoutine(PetscTAOLineSearch,PetscTaoLineSearchGradient,void*) - PetscErrorCode TaoLineSearchSetObjectiveAndGradientRoutine(PetscTAOLineSearch,PetscTaoLineSearchObjGrad,void*) - PetscErrorCode TaoLineSearchApply(PetscTAOLineSearch,PetscVec,PetscReal*,PetscVec,PetscVec,PetscReal*,PetscTAOLineSearchConvergedReason*) + PetscErrorCode TaoLineSearchUseTaoRoutines(PetscTAOLineSearch, PetscTAO) + PetscErrorCode TaoLineSearchSetObjectiveRoutine(PetscTAOLineSearch, PetscTaoLineSearchObjective, void*) + PetscErrorCode TaoLineSearchSetGradientRoutine(PetscTAOLineSearch, PetscTaoLineSearchGradient, void*) + PetscErrorCode TaoLineSearchSetObjectiveAndGradientRoutine(PetscTAOLineSearch, PetscTaoLineSearchObjGrad, void*) + PetscErrorCode TaoLineSearchApply(PetscTAOLineSearch, PetscVec, PetscReal*, PetscVec, PetscVec, PetscReal*, PetscTAOLineSearchConvergedReason*) # -------------------------------------------------------------------- cdef inline TAO ref_TAO(PetscTAO tao): cdef TAO ob = TAO() ob.tao = tao - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob # -------------------------------------------------------------------- cdef PetscErrorCode TAO_Objective(PetscTAO _tao, - PetscVec _x, PetscReal *_f, - void *ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, PetscReal *_f, + void *ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) @@ -265,8 +265,8 @@ cdef PetscErrorCode TAO_Objective(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_Residual(PetscTAO _tao, - PetscVec _x, PetscVec _r, - void *ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, PetscVec _r, + void *ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) @@ -279,8 +279,8 @@ cdef PetscErrorCode TAO_Residual(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_Gradient(PetscTAO _tao, - PetscVec _x, PetscVec _g, - void *ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, PetscVec _g, + void *ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) @@ -293,8 +293,8 @@ cdef PetscErrorCode TAO_Gradient(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_ObjGrad(PetscTAO _tao, - PetscVec _x, PetscReal *_f, PetscVec _g, - void *ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, PetscReal *_f, PetscVec _g, + void *ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) @@ -309,8 +309,8 @@ cdef PetscErrorCode TAO_ObjGrad(PetscTAO _tao, cdef PetscErrorCode TAO_BRGNRegObjGrad(PetscTAO _tao, - PetscVec _x, PetscReal *_f, PetscVec _g, - void *ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, PetscReal *_f, PetscVec _g, + void *ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) @@ -324,8 +324,8 @@ cdef PetscErrorCode TAO_BRGNRegObjGrad(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_Constraints(PetscTAO _tao, - PetscVec _x, PetscVec _r, - void *ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, PetscVec _r, + void *ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) @@ -338,8 +338,8 @@ cdef PetscErrorCode TAO_Constraints(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_VarBounds(PetscTAO _tao, - PetscVec _xl, PetscVec _xu, - void *ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _xl, PetscVec _xu, + void *ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec xl = ref_Vec(_xl) @@ -352,10 +352,10 @@ cdef PetscErrorCode TAO_VarBounds(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_Hessian(PetscTAO _tao, - PetscVec _x, - PetscMat _H, - PetscMat _P, - void* ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, + PetscMat _H, + PetscMat _P, + void* ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) cdef Mat H = ref_Mat(_H) @@ -368,9 +368,9 @@ cdef PetscErrorCode TAO_Hessian(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_BRGNRegHessian(PetscTAO _tao, - PetscVec _x, - PetscMat _H, - void* ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, + PetscMat _H, + void* ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) cdef Mat H = ref_Mat(_H) @@ -382,10 +382,10 @@ cdef PetscErrorCode TAO_BRGNRegHessian(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_Jacobian(PetscTAO _tao, - PetscVec _x, - PetscMat _J, - PetscMat _P, - void* ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, + PetscMat _J, + PetscMat _P, + void* ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) cdef Mat J = ref_Mat(_J) @@ -398,10 +398,10 @@ cdef PetscErrorCode TAO_Jacobian(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_JacobianResidual(PetscTAO _tao, - PetscVec _x, - PetscMat _J, - PetscMat _P, - void* ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, + PetscMat _J, + PetscMat _P, + void* ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) cdef Mat J = ref_Mat(_J) @@ -414,27 +414,27 @@ cdef PetscErrorCode TAO_JacobianResidual(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_JacobianState(PetscTAO _tao, - PetscVec _x, - PetscMat _J, - PetscMat _P, - PetscMat _I, - void* ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, + PetscMat _J, + PetscMat _Jp, + PetscMat _Ji, + void* ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) cdef Mat J = ref_Mat(_J) - cdef Mat P = ref_Mat(_P) - cdef Mat I = ref_Mat(_I) + cdef Mat Jp = ref_Mat(_Jp) + cdef Mat Ji = ref_Mat(_Ji) context = tao.get_attr("__jacobian_state__") if context is None and ctx != NULL: context = ctx assert context is not None and type(context) is tuple # sanity check (jacobian, args, kargs) = context - jacobian(tao, x, J, P, I, *args, **kargs) + jacobian(tao, x, J, Jp, Ji, *args, **kargs) return PETSC_SUCCESS cdef PetscErrorCode TAO_JacobianDesign(PetscTAO _tao, - PetscVec _x, - PetscMat _J, - void* ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, + PetscMat _J, + void* ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) cdef Mat J = ref_Mat(_J) @@ -446,9 +446,9 @@ cdef PetscErrorCode TAO_JacobianDesign(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_EqualityConstraints(PetscTAO _tao, - PetscVec _x, - PetscVec _c, - void* ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, + PetscVec _c, + void* ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) cdef Vec c = ref_Vec(_c) @@ -460,10 +460,10 @@ cdef PetscErrorCode TAO_EqualityConstraints(PetscTAO _tao, return PETSC_SUCCESS cdef PetscErrorCode TAO_JacobianEquality(PetscTAO _tao, - PetscVec _x, - PetscMat _J, - PetscMat _P, - void* ctx) except PETSC_ERR_PYTHON with gil: + PetscVec _x, + PetscMat _J, + PetscMat _P, + void* ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef Vec x = ref_Vec(_x) cdef Mat J = ref_Mat(_J) @@ -486,9 +486,9 @@ cdef PetscErrorCode TAO_Update( return PETSC_SUCCESS cdef PetscErrorCode TAO_Converged(PetscTAO _tao, - void* ctx) except PETSC_ERR_PYTHON with gil: + void* ctx) except PETSC_ERR_PYTHON with gil: # call first the default convergence test - CHKERR( TaoDefaultConvergenceTest(_tao, NULL) ) + CHKERR(TaoDefaultConvergenceTest(_tao, NULL)) # call next the user-provided convergence test cdef TAO tao = ref_TAO(_tao) (converged, args, kargs) = tao.get_attr('__converged__') @@ -504,11 +504,11 @@ cdef PetscErrorCode TAO_Converged(PetscTAO _tao, creason = reason assert creason >= TAO_DIVERGED_USER assert creason <= TAO_CONVERGED_USER - CHKERR( TaoSetConvergedReason(_tao, creason) ) + CHKERR(TaoSetConvergedReason(_tao, creason)) return PETSC_SUCCESS cdef PetscErrorCode TAO_Monitor(PetscTAO _tao, - void* ctx) except PETSC_ERR_PYTHON with gil: + void* ctx) except PETSC_ERR_PYTHON with gil: cdef TAO tao = ref_TAO(_tao) cdef object monitorlist = tao.get_attr('__monitor__') if monitorlist is None: return PETSC_SUCCESS @@ -521,7 +521,7 @@ cdef PetscErrorCode TAO_Monitor(PetscTAO _tao, cdef inline TAOLineSearch ref_TAOLS(PetscTAOLineSearch taols): cdef TAOLineSearch ob = TAOLineSearch() ob.taols = taols - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscts.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscts.pxi index efd71dec881..1963ca78551 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscts.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscts.pxi @@ -26,231 +26,231 @@ cdef extern from * nogil: PetscTSType TSDISCGRAD ctypedef enum PetscTSProblemType "TSProblemType": - TS_LINEAR - TS_NONLINEAR + TS_LINEAR + TS_NONLINEAR ctypedef enum PetscTSEquationType "TSEquationType": - TS_EQ_UNSPECIFIED - TS_EQ_EXPLICIT - TS_EQ_ODE_EXPLICIT - TS_EQ_DAE_SEMI_EXPLICIT_INDEX1 - TS_EQ_DAE_SEMI_EXPLICIT_INDEX2 - TS_EQ_DAE_SEMI_EXPLICIT_INDEX3 - TS_EQ_DAE_SEMI_EXPLICIT_INDEXHI - TS_EQ_IMPLICIT - TS_EQ_ODE_IMPLICIT - TS_EQ_DAE_IMPLICIT_INDEX1 - TS_EQ_DAE_IMPLICIT_INDEX2 - TS_EQ_DAE_IMPLICIT_INDEX3 - TS_EQ_DAE_IMPLICIT_INDEXHI + TS_EQ_UNSPECIFIED + TS_EQ_EXPLICIT + TS_EQ_ODE_EXPLICIT + TS_EQ_DAE_SEMI_EXPLICIT_INDEX1 + TS_EQ_DAE_SEMI_EXPLICIT_INDEX2 + TS_EQ_DAE_SEMI_EXPLICIT_INDEX3 + TS_EQ_DAE_SEMI_EXPLICIT_INDEXHI + TS_EQ_IMPLICIT + TS_EQ_ODE_IMPLICIT + TS_EQ_DAE_IMPLICIT_INDEX1 + TS_EQ_DAE_IMPLICIT_INDEX2 + TS_EQ_DAE_IMPLICIT_INDEX3 + TS_EQ_DAE_IMPLICIT_INDEXHI ctypedef enum PetscTSConvergedReason "TSConvergedReason": - # iterating - TS_CONVERGED_ITERATING - # converged - TS_CONVERGED_TIME - TS_CONVERGED_ITS - TS_CONVERGED_USER - TS_CONVERGED_EVENT - # diverged - TS_DIVERGED_NONLINEAR_SOLVE - TS_DIVERGED_STEP_REJECTED + # iterating + TS_CONVERGED_ITERATING + # converged + TS_CONVERGED_TIME + TS_CONVERGED_ITS + TS_CONVERGED_USER + TS_CONVERGED_EVENT + # diverged + TS_DIVERGED_NONLINEAR_SOLVE + TS_DIVERGED_STEP_REJECTED ctypedef enum PetscTSExactFinalTimeOption "TSExactFinalTimeOption": - TS_EXACTFINALTIME_UNSPECIFIED - TS_EXACTFINALTIME_STEPOVER - TS_EXACTFINALTIME_INTERPOLATE - TS_EXACTFINALTIME_MATCHSTEP + TS_EXACTFINALTIME_UNSPECIFIED + TS_EXACTFINALTIME_STEPOVER + TS_EXACTFINALTIME_INTERPOLATE + TS_EXACTFINALTIME_MATCHSTEP ctypedef PetscErrorCode PetscTSCtxDel(void*) ctypedef PetscErrorCode (*PetscTSFunctionFunction)(PetscTS, - PetscReal, - PetscVec, - PetscVec, - void*) except PETSC_ERR_PYTHON + PetscReal, + PetscVec, + PetscVec, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscTSJacobianFunction)(PetscTS, - PetscReal, - PetscVec, - PetscMat, - PetscMat, - void*) except PETSC_ERR_PYTHON + PetscReal, + PetscVec, + PetscMat, + PetscMat, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscTSIFunctionFunction)(PetscTS, - PetscReal, - PetscVec, - PetscVec, - PetscVec, - void*) except PETSC_ERR_PYTHON + PetscReal, + PetscVec, + PetscVec, + PetscVec, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscTSIJacobianFunction)(PetscTS, - PetscReal, - PetscVec, - PetscVec, - PetscReal, - PetscMat, - PetscMat, - void*) except PETSC_ERR_PYTHON + PetscReal, + PetscVec, + PetscVec, + PetscReal, + PetscMat, + PetscMat, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscTSIJacobianPFunction)(PetscTS, - PetscReal, - PetscVec, - PetscVec, - PetscReal, - PetscMat, - void*) except PETSC_ERR_PYTHON + PetscReal, + PetscVec, + PetscVec, + PetscReal, + PetscMat, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscTSI2FunctionFunction)(PetscTS, - PetscReal, - PetscVec, - PetscVec, - PetscVec, - PetscVec, - void*) except PETSC_ERR_PYTHON + PetscReal, + PetscVec, + PetscVec, + PetscVec, + PetscVec, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscTSI2JacobianFunction)(PetscTS, - PetscReal, - PetscVec, - PetscVec, - PetscVec, - PetscReal, - PetscReal, - PetscMat, - PetscMat, - void*) except PETSC_ERR_PYTHON + PetscReal, + PetscVec, + PetscVec, + PetscVec, + PetscReal, + PetscReal, + PetscMat, + PetscMat, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscTSMonitorFunction)(PetscTS, - PetscInt, - PetscReal, - PetscVec, - void*) except PETSC_ERR_PYTHON + PetscInt, + PetscReal, + PetscVec, + void*) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscTSPreStepFunction) (PetscTS) except PETSC_ERR_PYTHON ctypedef PetscErrorCode (*PetscTSPostStepFunction) (PetscTS) except PETSC_ERR_PYTHON - PetscErrorCode TSCreate(MPI_Comm comm,PetscTS*) - PetscErrorCode TSClone(PetscTS,PetscTS*) + PetscErrorCode TSCreate(MPI_Comm comm, PetscTS*) + PetscErrorCode TSClone(PetscTS, PetscTS*) PetscErrorCode TSDestroy(PetscTS*) - PetscErrorCode TSView(PetscTS,PetscViewer) - PetscErrorCode TSLoad(PetscTS,PetscViewer) - - PetscErrorCode TSSetProblemType(PetscTS,PetscTSProblemType) - PetscErrorCode TSGetProblemType(PetscTS,PetscTSProblemType*) - PetscErrorCode TSSetEquationType(PetscTS,PetscTSEquationType) - PetscErrorCode TSGetEquationType(PetscTS,PetscTSEquationType*) - PetscErrorCode TSSetType(PetscTS,PetscTSType) - PetscErrorCode TSGetType(PetscTS,PetscTSType*) - - PetscErrorCode TSSetOptionsPrefix(PetscTS,char[]) - PetscErrorCode TSAppendOptionsPrefix(PetscTS,char[]) - PetscErrorCode TSGetOptionsPrefix(PetscTS,char*[]) + PetscErrorCode TSView(PetscTS, PetscViewer) + PetscErrorCode TSLoad(PetscTS, PetscViewer) + + PetscErrorCode TSSetProblemType(PetscTS, PetscTSProblemType) + PetscErrorCode TSGetProblemType(PetscTS, PetscTSProblemType*) + PetscErrorCode TSSetEquationType(PetscTS, PetscTSEquationType) + PetscErrorCode TSGetEquationType(PetscTS, PetscTSEquationType*) + PetscErrorCode TSSetType(PetscTS, PetscTSType) + PetscErrorCode TSGetType(PetscTS, PetscTSType*) + + PetscErrorCode TSSetOptionsPrefix(PetscTS, char[]) + PetscErrorCode TSAppendOptionsPrefix(PetscTS, char[]) + PetscErrorCode TSGetOptionsPrefix(PetscTS, char*[]) PetscErrorCode TSSetFromOptions(PetscTS) - PetscErrorCode TSSetSolution(PetscTS,PetscVec) - PetscErrorCode TSGetSolution(PetscTS,PetscVec*) - PetscErrorCode TS2SetSolution(PetscTS,PetscVec,PetscVec) - PetscErrorCode TS2GetSolution(PetscTS,PetscVec*,PetscVec*) - - PetscErrorCode TSGetRHSFunction(PetscTS,PetscVec*,PetscTSFunctionFunction*,void*) - PetscErrorCode TSGetRHSJacobian(PetscTS,PetscMat*,PetscMat*,PetscTSJacobianFunction*,void**) - PetscErrorCode TSSetRHSFunction(PetscTS,PetscVec,PetscTSFunctionFunction,void*) - PetscErrorCode TSSetRHSJacobian(PetscTS,PetscMat,PetscMat,PetscTSJacobianFunction,void*) - PetscErrorCode TSSetIFunction(PetscTS,PetscVec,PetscTSIFunctionFunction,void*) - PetscErrorCode TSSetIJacobian(PetscTS,PetscMat,PetscMat,PetscTSIJacobianFunction,void*) - PetscErrorCode TSSetIJacobianP(PetscTS,PetscMat,PetscTSIJacobianPFunction,void*) - PetscErrorCode TSGetIFunction(PetscTS,PetscVec*,PetscTSIFunctionFunction*,void*) - PetscErrorCode TSGetIJacobian(PetscTS,PetscMat*,PetscMat*,PetscTSIJacobianFunction*,void**) - PetscErrorCode TSSetI2Function(PetscTS,PetscVec,PetscTSI2FunctionFunction,void*) - PetscErrorCode TSSetI2Jacobian(PetscTS,PetscMat,PetscMat,PetscTSI2JacobianFunction,void*) - PetscErrorCode TSGetI2Function(PetscTS,PetscVec*,PetscTSI2FunctionFunction*,void**) - PetscErrorCode TSGetI2Jacobian(PetscTS,PetscMat*,PetscMat*,PetscTSI2JacobianFunction*,void**) - - PetscErrorCode TSGetKSP(PetscTS,PetscKSP*) - PetscErrorCode TSGetSNES(PetscTS,PetscSNES*) - - PetscErrorCode TSGetDM(PetscTS,PetscDM*) - PetscErrorCode TSSetDM(PetscTS,PetscDM) - - PetscErrorCode TSComputeRHSFunction(PetscTS,PetscReal,PetscVec,PetscVec) - PetscErrorCode TSComputeRHSFunctionLinear(PetscTS,PetscReal,PetscVec,PetscVec,void*) - PetscErrorCode TSComputeRHSJacobian(PetscTS,PetscReal,PetscVec,PetscMat,PetscMat) - PetscErrorCode TSComputeRHSJacobianConstant(PetscTS,PetscReal,PetscVec,PetscMat,PetscMat,void*) - PetscErrorCode TSComputeIFunction(PetscTS,PetscReal,PetscVec,PetscVec,PetscVec,PetscBool) - PetscErrorCode TSComputeIJacobian(PetscTS,PetscReal,PetscVec,PetscVec,PetscReal,PetscMat,PetscMat,PetscBool) - PetscErrorCode TSComputeIJacobianP(PetscTS,PetscReal,PetscVec,PetscVec,PetscReal,PetscMat,PetscBool) - PetscErrorCode TSComputeI2Function(PetscTS,PetscReal,PetscVec,PetscVec,PetscVec,PetscVec) - PetscErrorCode TSComputeI2Jacobian(PetscTS,PetscReal,PetscVec,PetscVec,PetscVec,PetscReal,PetscReal,PetscMat,PetscMat) - - PetscErrorCode TSSetTime(PetscTS,PetscReal) - PetscErrorCode TSGetTime(PetscTS,PetscReal*) - PetscErrorCode TSGetPrevTime(PetscTS,PetscReal*) - PetscErrorCode TSGetSolveTime(PetscTS,PetscReal*) - PetscErrorCode TSSetTimeStep(PetscTS,PetscReal) - PetscErrorCode TSGetTimeStep(PetscTS,PetscReal*) - PetscErrorCode TSSetStepNumber(PetscTS,PetscInt) - PetscErrorCode TSGetStepNumber(PetscTS,PetscInt*) - PetscErrorCode TSSetMaxSteps(PetscTS,PetscInt) - PetscErrorCode TSGetMaxSteps(PetscTS,PetscInt*) - PetscErrorCode TSSetMaxTime(PetscTS,PetscReal) - PetscErrorCode TSGetMaxTime(PetscTS,PetscReal*) - PetscErrorCode TSSetExactFinalTime(PetscTS,PetscTSExactFinalTimeOption) - PetscErrorCode TSSetTimeSpan(PetscTS,PetscInt,PetscReal*) - PetscErrorCode TSGetTimeSpan(PetscTS,PetscInt*,const PetscReal**) - PetscErrorCode TSGetTimeSpanSolutions(PetscTS,PetscInt*,PetscVec**) - - PetscErrorCode TSSetConvergedReason(PetscTS,PetscTSConvergedReason) - PetscErrorCode TSGetConvergedReason(PetscTS,PetscTSConvergedReason*) - PetscErrorCode TSGetSNESIterations(PetscTS,PetscInt*) - PetscErrorCode TSGetKSPIterations(PetscTS,PetscInt*) - PetscErrorCode TSGetStepRejections(PetscTS,PetscInt*) - PetscErrorCode TSSetMaxStepRejections(PetscTS,PetscInt) - PetscErrorCode TSGetSNESFailures(PetscTS,PetscInt*) - PetscErrorCode TSSetMaxSNESFailures(PetscTS,PetscInt) - PetscErrorCode TSSetErrorIfStepFails(PetscTS,PetscBool) - PetscErrorCode TSSetTolerances(PetscTS,PetscReal,PetscVec,PetscReal,PetscVec) - PetscErrorCode TSGetTolerances(PetscTS,PetscReal*,PetscVec*,PetscReal*,PetscVec*) - - PetscErrorCode TSMonitorSet(PetscTS,PetscTSMonitorFunction,void*,PetscTSCtxDel*) + PetscErrorCode TSSetSolution(PetscTS, PetscVec) + PetscErrorCode TSGetSolution(PetscTS, PetscVec*) + PetscErrorCode TS2SetSolution(PetscTS, PetscVec, PetscVec) + PetscErrorCode TS2GetSolution(PetscTS, PetscVec*, PetscVec*) + + PetscErrorCode TSGetRHSFunction(PetscTS, PetscVec*, PetscTSFunctionFunction*, void*) + PetscErrorCode TSGetRHSJacobian(PetscTS, PetscMat*, PetscMat*, PetscTSJacobianFunction*, void**) + PetscErrorCode TSSetRHSFunction(PetscTS, PetscVec, PetscTSFunctionFunction, void*) + PetscErrorCode TSSetRHSJacobian(PetscTS, PetscMat, PetscMat, PetscTSJacobianFunction, void*) + PetscErrorCode TSSetIFunction(PetscTS, PetscVec, PetscTSIFunctionFunction, void*) + PetscErrorCode TSSetIJacobian(PetscTS, PetscMat, PetscMat, PetscTSIJacobianFunction, void*) + PetscErrorCode TSSetIJacobianP(PetscTS, PetscMat, PetscTSIJacobianPFunction, void*) + PetscErrorCode TSGetIFunction(PetscTS, PetscVec*, PetscTSIFunctionFunction*, void*) + PetscErrorCode TSGetIJacobian(PetscTS, PetscMat*, PetscMat*, PetscTSIJacobianFunction*, void**) + PetscErrorCode TSSetI2Function(PetscTS, PetscVec, PetscTSI2FunctionFunction, void*) + PetscErrorCode TSSetI2Jacobian(PetscTS, PetscMat, PetscMat, PetscTSI2JacobianFunction, void*) + PetscErrorCode TSGetI2Function(PetscTS, PetscVec*, PetscTSI2FunctionFunction*, void**) + PetscErrorCode TSGetI2Jacobian(PetscTS, PetscMat*, PetscMat*, PetscTSI2JacobianFunction*, void**) + + PetscErrorCode TSGetKSP(PetscTS, PetscKSP*) + PetscErrorCode TSGetSNES(PetscTS, PetscSNES*) + + PetscErrorCode TSGetDM(PetscTS, PetscDM*) + PetscErrorCode TSSetDM(PetscTS, PetscDM) + + PetscErrorCode TSComputeRHSFunction(PetscTS, PetscReal, PetscVec, PetscVec) + PetscErrorCode TSComputeRHSFunctionLinear(PetscTS, PetscReal, PetscVec, PetscVec, void*) + PetscErrorCode TSComputeRHSJacobian(PetscTS, PetscReal, PetscVec, PetscMat, PetscMat) + PetscErrorCode TSComputeRHSJacobianConstant(PetscTS, PetscReal, PetscVec, PetscMat, PetscMat, void*) + PetscErrorCode TSComputeIFunction(PetscTS, PetscReal, PetscVec, PetscVec, PetscVec, PetscBool) + PetscErrorCode TSComputeIJacobian(PetscTS, PetscReal, PetscVec, PetscVec, PetscReal, PetscMat, PetscMat, PetscBool) + PetscErrorCode TSComputeIJacobianP(PetscTS, PetscReal, PetscVec, PetscVec, PetscReal, PetscMat, PetscBool) + PetscErrorCode TSComputeI2Function(PetscTS, PetscReal, PetscVec, PetscVec, PetscVec, PetscVec) + PetscErrorCode TSComputeI2Jacobian(PetscTS, PetscReal, PetscVec, PetscVec, PetscVec, PetscReal, PetscReal, PetscMat, PetscMat) + + PetscErrorCode TSSetTime(PetscTS, PetscReal) + PetscErrorCode TSGetTime(PetscTS, PetscReal*) + PetscErrorCode TSGetPrevTime(PetscTS, PetscReal*) + PetscErrorCode TSGetSolveTime(PetscTS, PetscReal*) + PetscErrorCode TSSetTimeStep(PetscTS, PetscReal) + PetscErrorCode TSGetTimeStep(PetscTS, PetscReal*) + PetscErrorCode TSSetStepNumber(PetscTS, PetscInt) + PetscErrorCode TSGetStepNumber(PetscTS, PetscInt*) + PetscErrorCode TSSetMaxSteps(PetscTS, PetscInt) + PetscErrorCode TSGetMaxSteps(PetscTS, PetscInt*) + PetscErrorCode TSSetMaxTime(PetscTS, PetscReal) + PetscErrorCode TSGetMaxTime(PetscTS, PetscReal*) + PetscErrorCode TSSetExactFinalTime(PetscTS, PetscTSExactFinalTimeOption) + PetscErrorCode TSSetTimeSpan(PetscTS, PetscInt, PetscReal*) + PetscErrorCode TSGetTimeSpan(PetscTS, PetscInt*, const PetscReal**) + PetscErrorCode TSGetTimeSpanSolutions(PetscTS, PetscInt*, PetscVec**) + + PetscErrorCode TSSetConvergedReason(PetscTS, PetscTSConvergedReason) + PetscErrorCode TSGetConvergedReason(PetscTS, PetscTSConvergedReason*) + PetscErrorCode TSGetSNESIterations(PetscTS, PetscInt*) + PetscErrorCode TSGetKSPIterations(PetscTS, PetscInt*) + PetscErrorCode TSGetStepRejections(PetscTS, PetscInt*) + PetscErrorCode TSSetMaxStepRejections(PetscTS, PetscInt) + PetscErrorCode TSGetSNESFailures(PetscTS, PetscInt*) + PetscErrorCode TSSetMaxSNESFailures(PetscTS, PetscInt) + PetscErrorCode TSSetErrorIfStepFails(PetscTS, PetscBool) + PetscErrorCode TSSetTolerances(PetscTS, PetscReal, PetscVec, PetscReal, PetscVec) + PetscErrorCode TSGetTolerances(PetscTS, PetscReal*, PetscVec*, PetscReal*, PetscVec*) + + PetscErrorCode TSMonitorSet(PetscTS, PetscTSMonitorFunction, void*, PetscTSCtxDel*) PetscErrorCode TSMonitorCancel(PetscTS) - PetscErrorCode TSMonitor(PetscTS,PetscInt,PetscReal,PetscVec) + PetscErrorCode TSMonitor(PetscTS, PetscInt, PetscReal, PetscVec) - ctypedef PetscErrorCode (*PetscTSIndicator)(PetscTS,PetscReal,PetscVec,PetscReal[],void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode (*PetscTSPostEvent)(PetscTS,PetscInt,PetscInt[],PetscReal,PetscVec, PetscBool, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode (*PetscTSIndicator)(PetscTS, PetscReal, PetscVec, PetscReal[], void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode (*PetscTSPostEvent)(PetscTS, PetscInt, PetscInt[], PetscReal, PetscVec, PetscBool, void*) except PETSC_ERR_PYTHON PetscErrorCode TSSetEventHandler(PetscTS, PetscInt, PetscInt[], PetscBool[], PetscTSIndicator, PetscTSPostEvent, void*) PetscErrorCode TSSetEventTolerances(PetscTS, PetscReal, PetscReal[]) PetscErrorCode TSGetNumEvents(PetscTS, PetscInt*) - ctypedef PetscErrorCode (*PetscTSAdjointR)(PetscTS,PetscReal,PetscVec,PetscVec,void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode (*PetscTSAdjointDRDY)(PetscTS,PetscReal,PetscVec,PetscVec[],void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode (*PetscTSAdjointDRDP)(PetscTS,PetscReal,PetscVec,PetscVec[],void*) except PETSC_ERR_PYTHON - ctypedef PetscErrorCode (*PetscTSRHSJacobianP)(PetscTS,PetscReal,PetscVec,PetscMat,void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode (*PetscTSAdjointR)(PetscTS, PetscReal, PetscVec, PetscVec, void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode (*PetscTSAdjointDRDY)(PetscTS, PetscReal, PetscVec, PetscVec[], void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode (*PetscTSAdjointDRDP)(PetscTS, PetscReal, PetscVec, PetscVec[], void*) except PETSC_ERR_PYTHON + ctypedef PetscErrorCode (*PetscTSRHSJacobianP)(PetscTS, PetscReal, PetscVec, PetscMat, void*) except PETSC_ERR_PYTHON PetscErrorCode TSSetSaveTrajectory(PetscTS) PetscErrorCode TSRemoveTrajectory(PetscTS) - PetscErrorCode TSSetCostGradients(PetscTS,PetscInt,PetscVec*,PetscVec*) - PetscErrorCode TSGetCostGradients(PetscTS,PetscInt*,PetscVec**,PetscVec**) - PetscErrorCode TSCreateQuadratureTS(PetscTS,PetscBool,PetscTS*) - PetscErrorCode TSGetQuadratureTS(PetscTS,PetscBool*,PetscTS*) - PetscErrorCode TSGetCostIntegral(PetscTS,PetscVec*) + PetscErrorCode TSSetCostGradients(PetscTS, PetscInt, PetscVec*, PetscVec*) + PetscErrorCode TSGetCostGradients(PetscTS, PetscInt*, PetscVec**, PetscVec**) + PetscErrorCode TSCreateQuadratureTS(PetscTS, PetscBool, PetscTS*) + PetscErrorCode TSGetQuadratureTS(PetscTS, PetscBool*, PetscTS*) + PetscErrorCode TSGetCostIntegral(PetscTS, PetscVec*) - PetscErrorCode TSSetRHSJacobianP(PetscTS,PetscMat,PetscTSRHSJacobianP,void*) - PetscErrorCode TSComputeRHSJacobianP(PetscTS,PetscReal,PetscVec,PetscMat) + PetscErrorCode TSSetRHSJacobianP(PetscTS, PetscMat, PetscTSRHSJacobianP, void*) + PetscErrorCode TSComputeRHSJacobianP(PetscTS, PetscReal, PetscVec, PetscMat) PetscErrorCode TSAdjointSolve(PetscTS) - PetscErrorCode TSAdjointSetSteps(PetscTS,PetscInt) + PetscErrorCode TSAdjointSetSteps(PetscTS, PetscInt) PetscErrorCode TSAdjointStep(PetscTS) PetscErrorCode TSAdjointSetUp(PetscTS) PetscErrorCode TSAdjointReset(PetscTS) - PetscErrorCode TSAdjointComputeDRDPFunction(PetscTS,PetscReal,PetscVec,PetscVec*) - PetscErrorCode TSAdjointComputeDRDYFunction(PetscTS,PetscReal,PetscVec,PetscVec*) + PetscErrorCode TSAdjointComputeDRDPFunction(PetscTS, PetscReal, PetscVec, PetscVec*) + PetscErrorCode TSAdjointComputeDRDYFunction(PetscTS, PetscReal, PetscVec, PetscVec*) PetscErrorCode TSAdjointCostIntegral(PetscTS) - PetscErrorCode TSForwardSetSensitivities(PetscTS,PetscInt,PetscVec*,PetscInt,PetscVec*) - PetscErrorCode TSForwardGetSensitivities(PetscTS,PetscInt*,PetscVec**,PetscInt*,PetscVec**) - PetscErrorCode TSForwardSetIntegralGradients(PetscTS,PetscInt,PetscVec *,PetscVec *) - PetscErrorCode TSForwardGetIntegralGradients(PetscTS,PetscInt*,PetscVec **,PetscVec **) - PetscErrorCode TSForwardSetRHSJacobianP(PetscTS,PetscVec*,PetscTSCostIntegrandFunction,void*) - PetscErrorCode TSForwardComputeRHSJacobianP(PetscTS,PetscReal,PetscVec,PetscVec*) + PetscErrorCode TSForwardSetSensitivities(PetscTS, PetscInt, PetscVec*, PetscInt, PetscVec*) + PetscErrorCode TSForwardGetSensitivities(PetscTS, PetscInt*, PetscVec**, PetscInt*, PetscVec**) + PetscErrorCode TSForwardSetIntegralGradients(PetscTS, PetscInt, PetscVec *, PetscVec *) + PetscErrorCode TSForwardGetIntegralGradients(PetscTS, PetscInt*, PetscVec **, PetscVec **) + PetscErrorCode TSForwardSetRHSJacobianP(PetscTS, PetscVec*, PetscTSCostIntegrandFunction, void*) + PetscErrorCode TSForwardComputeRHSJacobianP(PetscTS, PetscReal, PetscVec, PetscVec*) PetscErrorCode TSForwardSetUp(PetscTS) PetscErrorCode TSForwardCostIntegral(PetscTS) PetscErrorCode TSForwardStep(PetscTS) @@ -263,19 +263,19 @@ cdef extern from * nogil: PetscErrorCode TSStep(PetscTS) PetscErrorCode TSRestartStep(PetscTS) PetscErrorCode TSRollBack(PetscTS) - PetscErrorCode TSSolve(PetscTS,PetscVec) - PetscErrorCode TSInterpolate(PetscTS,PetscReal,PetscVec) - PetscErrorCode TSPreStage(PetscTS,PetscReal) - PetscErrorCode TSPostStage(PetscTS,PetscReal,PetscInt,PetscVec*) + PetscErrorCode TSSolve(PetscTS, PetscVec) + PetscErrorCode TSInterpolate(PetscTS, PetscReal, PetscVec) + PetscErrorCode TSPreStage(PetscTS, PetscReal) + PetscErrorCode TSPostStage(PetscTS, PetscReal, PetscInt, PetscVec*) - PetscErrorCode TSThetaSetTheta(PetscTS,PetscReal) - PetscErrorCode TSThetaGetTheta(PetscTS,PetscReal*) - PetscErrorCode TSThetaSetEndpoint(PetscTS,PetscBool) - PetscErrorCode TSThetaGetEndpoint(PetscTS,PetscBool*) + PetscErrorCode TSThetaSetTheta(PetscTS, PetscReal) + PetscErrorCode TSThetaGetTheta(PetscTS, PetscReal*) + PetscErrorCode TSThetaSetEndpoint(PetscTS, PetscBool) + PetscErrorCode TSThetaGetEndpoint(PetscTS, PetscBool*) - PetscErrorCode TSAlphaSetRadius(PetscTS,PetscReal) - PetscErrorCode TSAlphaSetParams(PetscTS,PetscReal,PetscReal,PetscReal) - PetscErrorCode TSAlphaGetParams(PetscTS,PetscReal*,PetscReal*,PetscReal*) + PetscErrorCode TSAlphaSetRadius(PetscTS, PetscReal) + PetscErrorCode TSAlphaSetParams(PetscTS, PetscReal, PetscReal, PetscReal) + PetscErrorCode TSAlphaGetParams(PetscTS, PetscReal*, PetscReal*, PetscReal*) ctypedef const char* PetscTSRKType "TSRKType" PetscTSRKType TSRK1FE @@ -291,8 +291,8 @@ cdef extern from * nogil: PetscTSRKType TSRK7VR PetscTSRKType TSRK8VR - PetscErrorCode TSRKGetType(PetscTS,PetscTSRKType*) - PetscErrorCode TSRKSetType(PetscTS,PetscTSRKType) + PetscErrorCode TSRKGetType(PetscTS, PetscTSRKType*) + PetscErrorCode TSRKSetType(PetscTS, PetscTSRKType) ctypedef const char* PetscTSARKIMEXType "TSARKIMEXType" PetscTSARKIMEXType TSARKIMEX1BEE @@ -309,9 +309,9 @@ cdef extern from * nogil: PetscTSARKIMEXType TSARKIMEX4 PetscTSARKIMEXType TSARKIMEX5 - PetscErrorCode TSARKIMEXGetType(PetscTS,PetscTSRKType*) - PetscErrorCode TSARKIMEXSetType(PetscTS,PetscTSRKType) - PetscErrorCode TSARKIMEXSetFullyImplicit(PetscTS,PetscBool) + PetscErrorCode TSARKIMEXGetType(PetscTS, PetscTSRKType*) + PetscErrorCode TSARKIMEXSetType(PetscTS, PetscTSRKType) + PetscErrorCode TSARKIMEXSetFullyImplicit(PetscTS, PetscBool) ctypedef const char* PetscTSDIRKType "TSDIRKType" PetscTSDIRKType TSDIRKS212 @@ -331,29 +331,29 @@ cdef extern from * nogil: PetscTSDIRKType TSDIRK8616SAL PetscTSDIRKType TSDIRKES8516SAL - PetscErrorCode TSDIRKGetType(PetscTS,PetscTSDIRKType*) - PetscErrorCode TSDIRKSetType(PetscTS,PetscTSDIRKType) + PetscErrorCode TSDIRKGetType(PetscTS, PetscTSDIRKType*) + PetscErrorCode TSDIRKSetType(PetscTS, PetscTSDIRKType) - PetscErrorCode TSPythonSetType(PetscTS,char[]) - PetscErrorCode TSPythonGetType(PetscTS,char*[]) + PetscErrorCode TSPythonSetType(PetscTS, char[]) + PetscErrorCode TSPythonGetType(PetscTS, char*[]) cdef extern from * nogil: struct _p_TSAdapt ctypedef _p_TSAdapt *PetscTSAdapt "TSAdapt" - PetscErrorCode TSGetAdapt(PetscTS,PetscTSAdapt*) - PetscErrorCode TSAdaptGetStepLimits(PetscTSAdapt,PetscReal*,PetscReal*) - PetscErrorCode TSAdaptSetStepLimits(PetscTSAdapt,PetscReal,PetscReal) - PetscErrorCode TSAdaptCheckStage(PetscTSAdapt,PetscTS,PetscReal,PetscVec,PetscBool*) + PetscErrorCode TSGetAdapt(PetscTS, PetscTSAdapt*) + PetscErrorCode TSAdaptGetStepLimits(PetscTSAdapt, PetscReal*, PetscReal*) + PetscErrorCode TSAdaptSetStepLimits(PetscTSAdapt, PetscReal, PetscReal) + PetscErrorCode TSAdaptCheckStage(PetscTSAdapt, PetscTS, PetscReal, PetscVec, PetscBool*) cdef extern from * nogil: # custom.h - PetscErrorCode TSSetTimeStepNumber(PetscTS,PetscInt) + PetscErrorCode TSSetTimeStepNumber(PetscTS, PetscInt) # ----------------------------------------------------------------------------- cdef inline TS ref_TS(PetscTS ts): cdef TS ob = TS() ob.ts = ts - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob # ----------------------------------------------------------------------------- @@ -363,8 +363,8 @@ cdef PetscErrorCode TS_RHSFunction( PetscReal t, PetscVec x, PetscVec f, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Xvec = ref_Vec(x) cdef Vec Fvec = ref_Vec(f) @@ -381,8 +381,8 @@ cdef PetscErrorCode TS_RHSJacobian( PetscVec x, PetscMat J, PetscMat P, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Xvec = ref_Vec(x) cdef Mat Jmat = ref_Mat(J) @@ -399,8 +399,8 @@ cdef PetscErrorCode TS_RHSJacobianP( PetscReal t, PetscVec x, PetscMat J, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Xvec = ref_Vec(x) cdef Mat Jmat = ref_Mat(J) @@ -419,8 +419,8 @@ cdef PetscErrorCode TS_IFunction( PetscVec x, PetscVec xdot, PetscVec f, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Xvec = ref_Vec(x) cdef Vec XDvec = ref_Vec(xdot) @@ -440,8 +440,8 @@ cdef PetscErrorCode TS_IJacobian( PetscReal a, PetscMat J, PetscMat P, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Xvec = ref_Vec(x) cdef Vec XDvec = ref_Vec(xdot) @@ -461,8 +461,8 @@ cdef PetscErrorCode TS_IJacobianP( PetscVec xdot, PetscReal a, PetscMat J, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Xvec = ref_Vec(x) cdef Vec XDvec = ref_Vec(xdot) @@ -481,8 +481,8 @@ cdef PetscErrorCode TS_I2Function( PetscVec xdot, PetscVec xdotdot, PetscVec f, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Xvec = ref_Vec(x) cdef Vec XDvec = ref_Vec(xdot) @@ -505,8 +505,8 @@ cdef PetscErrorCode TS_I2Jacobian( PetscReal a, PetscMat J, PetscMat P, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Xvec = ref_Vec(x) cdef Vec XDvec = ref_Vec(xdot) @@ -527,8 +527,8 @@ cdef PetscErrorCode TS_Monitor( PetscInt step, PetscReal time, PetscVec u, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Vu = ref_Vec(u) cdef object monitorlist = Ts.get_attr('__monitor__') @@ -540,19 +540,19 @@ cdef PetscErrorCode TS_Monitor( # ----------------------------------------------------------------------------- cdef PetscErrorCode TS_Indicator( - PetscTS ts, - PetscReal time, - PetscVec u, - PetscReal fvalue[], - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + PetscTS ts, + PetscReal time, + PetscVec u, + PetscReal fvalue[], + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Vu = ref_Vec(u) cdef object context = Ts.get_attr('__indicator__') if context is None: return PETSC_SUCCESS (indicator, args, kargs) = context cdef PetscInt nevents = 0 - CHKERR( TSGetNumEvents(ts, &nevents) ) + CHKERR(TSGetNumEvents(ts, &nevents)) cdef npy_intp s = nevents fvalue_array = PyArray_SimpleNewFromData(1, &s, NPY_PETSC_REAL, fvalue) indicator(Ts, toReal(time), Vu, fvalue_array, *args, **kargs) @@ -565,8 +565,8 @@ cdef PetscErrorCode TS_PostEvent( PetscReal time, PetscVec u, PetscBool forward, - void* ctx, - ) except PETSC_ERR_PYTHON with gil: + void *ctx, + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) cdef Vec Vu = ref_Vec(u) cdef object context = Ts.get_attr('__postevent__') @@ -579,7 +579,7 @@ cdef PetscErrorCode TS_PostEvent( cdef PetscErrorCode TS_PreStep( PetscTS ts, - ) except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) (prestep, args, kargs) = Ts.get_attr('__prestep__') prestep(Ts, *args, **kargs) @@ -587,7 +587,7 @@ cdef PetscErrorCode TS_PreStep( cdef PetscErrorCode TS_PostStep( PetscTS ts, - ) except PETSC_ERR_PYTHON with gil: + ) except PETSC_ERR_PYTHON with gil: cdef TS Ts = ref_TS(ts) (poststep, args, kargs) = Ts.get_attr('__poststep__') poststep(Ts, *args, **kargs) diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscvec.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscvec.pxi index f2095c92f72..f2e73082a44 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscvec.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscvec.pxi @@ -25,201 +25,201 @@ cdef extern from * nogil: VEC_IGNORE_OFF_PROC_ENTRIES VEC_IGNORE_NEGATIVE_INDICES - PetscErrorCode VecView(PetscVec,PetscViewer) + PetscErrorCode VecView(PetscVec, PetscViewer) PetscErrorCode VecDestroy(PetscVec*) - PetscErrorCode VecCreate(MPI_Comm,PetscVec*) + PetscErrorCode VecCreate(MPI_Comm, PetscVec*) - PetscErrorCode VecSetOptionsPrefix(PetscVec,char[]) - PetscErrorCode VecAppendOptionsPrefix(PetscVec,char[]) - PetscErrorCode VecGetOptionsPrefix(PetscVec,char*[]) + PetscErrorCode VecSetOptionsPrefix(PetscVec, char[]) + PetscErrorCode VecAppendOptionsPrefix(PetscVec, char[]) + PetscErrorCode VecGetOptionsPrefix(PetscVec, char*[]) PetscErrorCode VecSetFromOptions(PetscVec) PetscErrorCode VecSetUp(PetscVec) - PetscErrorCode VecCreateSeq(MPI_Comm,PetscInt,PetscVec*) - PetscErrorCode VecCreateSeqWithArray(MPI_Comm,PetscInt,PetscInt,PetscScalar[],PetscVec*) - PetscErrorCode VecCreateSeqCUDAWithArrays(MPI_Comm,PetscInt,PetscInt,PetscScalar[],PetscScalar[],PetscVec*) - PetscErrorCode VecCreateSeqHIPWithArrays(MPI_Comm,PetscInt,PetscInt,PetscScalar[],PetscScalar[],PetscVec*) - PetscErrorCode VecCreateSeqViennaCLWithArrays(MPI_Comm,PetscInt,PetscInt,PetscScalar[],PetscScalar[],PetscVec*) - PetscErrorCode VecCreateMPI(MPI_Comm,PetscInt,PetscInt,PetscVec*) - PetscErrorCode VecCreateMPIWithArray(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscScalar[],PetscVec*) - PetscErrorCode VecCreateMPICUDAWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscScalar[],PetscScalar[],PetscVec*) - PetscErrorCode VecCreateMPIHIPWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscScalar[],PetscScalar[],PetscVec*) - PetscErrorCode VecCreateMPIViennaCLWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscScalar[],PetscScalar[],PetscVec*) - PetscErrorCode VecCreateGhost(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscVec*) - PetscErrorCode VecCreateGhostWithArray(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscScalar[],PetscVec*) - PetscErrorCode VecCreateGhostBlock(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscVec*) - PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscScalar[],PetscVec*) - PetscErrorCode VecCreateShared(MPI_Comm,PetscInt,PetscInt,PetscVec*) - PetscErrorCode VecCreateNest(MPI_Comm,PetscInt,PetscIS[],PetscVec[],PetscVec*) - PetscErrorCode VecGetType(PetscVec,PetscVecType*) - PetscErrorCode VecSetType(PetscVec,PetscVecType) - PetscErrorCode VecSetOption(PetscVec,PetscVecOption,PetscBool) - PetscErrorCode VecSetSizes(PetscVec,PetscInt,PetscInt) - PetscErrorCode VecGetSize(PetscVec,PetscInt*) - PetscErrorCode VecGetLocalSize(PetscVec,PetscInt*) - PetscErrorCode VecSetBlockSize(PetscVec,PetscInt) - PetscErrorCode VecGetBlockSize(PetscVec,PetscInt*) - PetscErrorCode VecGetOwnershipRange(PetscVec,PetscInt*,PetscInt*) - PetscErrorCode VecGetOwnershipRanges(PetscVec,const PetscInt*[]) - - PetscErrorCode VecCreateLocalVector(PetscVec,PetscVec*) - PetscErrorCode VecGetLocalVector(PetscVec,PetscVec) - PetscErrorCode VecRestoreLocalVector(PetscVec,PetscVec) - PetscErrorCode VecGetLocalVectorRead(PetscVec,PetscVec) - PetscErrorCode VecRestoreLocalVectorRead(PetscVec,PetscVec) - - PetscErrorCode VecGetArrayWrite(PetscVec,PetscScalar*[]) - PetscErrorCode VecRestoreArrayWrite(PetscVec,PetscScalar*[]) - PetscErrorCode VecGetArrayRead(PetscVec,const PetscScalar*[]) - PetscErrorCode VecRestoreArrayRead(PetscVec,const PetscScalar*[]) - PetscErrorCode VecGetArray(PetscVec,PetscScalar*[]) - PetscErrorCode VecRestoreArray(PetscVec,PetscScalar*[]) - PetscErrorCode VecPlaceArray(PetscVec,PetscScalar[]) + PetscErrorCode VecCreateSeq(MPI_Comm, PetscInt, PetscVec*) + PetscErrorCode VecCreateSeqWithArray(MPI_Comm, PetscInt, PetscInt, PetscScalar[], PetscVec*) + PetscErrorCode VecCreateSeqCUDAWithArrays(MPI_Comm, PetscInt, PetscInt, PetscScalar[], PetscScalar[], PetscVec*) + PetscErrorCode VecCreateSeqHIPWithArrays(MPI_Comm, PetscInt, PetscInt, PetscScalar[], PetscScalar[], PetscVec*) + PetscErrorCode VecCreateSeqViennaCLWithArrays(MPI_Comm, PetscInt, PetscInt, PetscScalar[], PetscScalar[], PetscVec*) + PetscErrorCode VecCreateMPI(MPI_Comm, PetscInt, PetscInt, PetscVec*) + PetscErrorCode VecCreateMPIWithArray(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscScalar[], PetscVec*) + PetscErrorCode VecCreateMPICUDAWithArrays(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscScalar[], PetscScalar[], PetscVec*) + PetscErrorCode VecCreateMPIHIPWithArrays(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscScalar[], PetscScalar[], PetscVec*) + PetscErrorCode VecCreateMPIViennaCLWithArrays(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscScalar[], PetscScalar[], PetscVec*) + PetscErrorCode VecCreateGhost(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt[], PetscVec*) + PetscErrorCode VecCreateGhostWithArray(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt[], PetscScalar[], PetscVec*) + PetscErrorCode VecCreateGhostBlock(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt[], PetscVec*) + PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscInt[], PetscScalar[], PetscVec*) + PetscErrorCode VecCreateShared(MPI_Comm, PetscInt, PetscInt, PetscVec*) + PetscErrorCode VecCreateNest(MPI_Comm, PetscInt, PetscIS[], PetscVec[], PetscVec*) + PetscErrorCode VecGetType(PetscVec, PetscVecType*) + PetscErrorCode VecSetType(PetscVec, PetscVecType) + PetscErrorCode VecSetOption(PetscVec, PetscVecOption, PetscBool) + PetscErrorCode VecSetSizes(PetscVec, PetscInt, PetscInt) + PetscErrorCode VecGetSize(PetscVec, PetscInt*) + PetscErrorCode VecGetLocalSize(PetscVec, PetscInt*) + PetscErrorCode VecSetBlockSize(PetscVec, PetscInt) + PetscErrorCode VecGetBlockSize(PetscVec, PetscInt*) + PetscErrorCode VecGetOwnershipRange(PetscVec, PetscInt*, PetscInt*) + PetscErrorCode VecGetOwnershipRanges(PetscVec, const PetscInt*[]) + + PetscErrorCode VecCreateLocalVector(PetscVec, PetscVec*) + PetscErrorCode VecGetLocalVector(PetscVec, PetscVec) + PetscErrorCode VecRestoreLocalVector(PetscVec, PetscVec) + PetscErrorCode VecGetLocalVectorRead(PetscVec, PetscVec) + PetscErrorCode VecRestoreLocalVectorRead(PetscVec, PetscVec) + + PetscErrorCode VecGetArrayWrite(PetscVec, PetscScalar*[]) + PetscErrorCode VecRestoreArrayWrite(PetscVec, PetscScalar*[]) + PetscErrorCode VecGetArrayRead(PetscVec, const PetscScalar*[]) + PetscErrorCode VecRestoreArrayRead(PetscVec, const PetscScalar*[]) + PetscErrorCode VecGetArray(PetscVec, PetscScalar*[]) + PetscErrorCode VecRestoreArray(PetscVec, PetscScalar*[]) + PetscErrorCode VecPlaceArray(PetscVec, PetscScalar[]) PetscErrorCode VecResetArray(PetscVec) - PetscErrorCode VecGetArrayWriteAndMemType(PetscVec,PetscScalar*[],PetscMemType*) - PetscErrorCode VecRestoreArrayWriteAndMemType(PetscVec,PetscScalar*[]) - PetscErrorCode VecGetArrayReadAndMemType(PetscVec,const PetscScalar*[],PetscMemType*) - PetscErrorCode VecRestoreArrayReadAndMemType(PetscVec,const PetscScalar*[]) - PetscErrorCode VecGetArrayAndMemType(PetscVec,PetscScalar*[],PetscMemType*) - PetscErrorCode VecRestoreArrayAndMemType(PetscVec,PetscScalar*[]) - - PetscErrorCode VecEqual(PetscVec,PetscVec,PetscBool*) - PetscErrorCode VecLoad(PetscVec,PetscViewer) - - PetscErrorCode VecDuplicate(PetscVec,PetscVec*) - PetscErrorCode VecCopy(PetscVec,PetscVec) - PetscErrorCode VecFilter(PetscVec,PetscReal) - - PetscErrorCode VecDuplicateVecs(PetscVec,PetscInt,PetscVec*[]) - PetscErrorCode VecDestroyVecs(PetscInt,PetscVec*[]) - - PetscErrorCode VecGetValues(PetscVec,PetscInt,PetscInt[],PetscScalar[]) - - PetscErrorCode VecSetValue(PetscVec,PetscInt,PetscScalar,PetscInsertMode) - PetscErrorCode VecSetValues(PetscVec,PetscInt,const PetscInt[],const PetscScalar[],PetscInsertMode) - PetscErrorCode VecSetValuesBlocked(PetscVec,PetscInt,const PetscInt[],const PetscScalar[],PetscInsertMode) - - PetscErrorCode VecSetLocalToGlobalMapping(PetscVec,PetscLGMap) - PetscErrorCode VecGetLocalToGlobalMapping(PetscVec,PetscLGMap*) - PetscErrorCode VecSetValueLocal(PetscVec,PetscInt,PetscScalar,PetscInsertMode) - PetscErrorCode VecSetValuesLocal(PetscVec,PetscInt,const PetscInt[],const PetscScalar[],PetscInsertMode) - PetscErrorCode VecSetValuesBlockedLocal(PetscVec,PetscInt,const PetscInt[],const PetscScalar[],PetscInsertMode) - - PetscErrorCode VecDot(PetscVec,PetscVec,PetscScalar*) - PetscErrorCode VecDotBegin(PetscVec,PetscVec,PetscScalar*) - PetscErrorCode VecDotEnd(PetscVec,PetscVec,PetscScalar*) - PetscErrorCode VecTDot(PetscVec,PetscVec,PetscScalar*) - PetscErrorCode VecTDotBegin(PetscVec,PetscVec,PetscScalar*) - PetscErrorCode VecTDotEnd(PetscVec,PetscVec,PetscScalar*) - PetscErrorCode VecMDot(PetscVec,PetscInt,PetscVec[],PetscScalar*) - PetscErrorCode VecMDotBegin(PetscVec,PetscInt,PetscVec[],PetscScalar*) - PetscErrorCode VecMDotEnd(PetscVec,PetscInt,PetscVec[],PetscScalar*) - PetscErrorCode VecMTDot(PetscVec,PetscInt,PetscVec[],PetscScalar*) - PetscErrorCode VecMTDotBegin(PetscVec,PetscInt,PetscVec[],PetscScalar*) - PetscErrorCode VecMTDotEnd(PetscVec,PetscInt,PetscVec[],PetscScalar*) - - PetscErrorCode VecNorm(PetscVec,PetscNormType,PetscReal*) - PetscErrorCode VecNormBegin(PetscVec,PetscNormType,PetscReal*) - PetscErrorCode VecNormEnd(PetscVec,PetscNormType,PetscReal*) - PetscErrorCode VecDotNorm2(PetscVec,PetscVec,PetscScalar*,PetscReal*) + PetscErrorCode VecGetArrayWriteAndMemType(PetscVec, PetscScalar*[], PetscMemType*) + PetscErrorCode VecRestoreArrayWriteAndMemType(PetscVec, PetscScalar*[]) + PetscErrorCode VecGetArrayReadAndMemType(PetscVec, const PetscScalar*[], PetscMemType*) + PetscErrorCode VecRestoreArrayReadAndMemType(PetscVec, const PetscScalar*[]) + PetscErrorCode VecGetArrayAndMemType(PetscVec, PetscScalar*[], PetscMemType*) + PetscErrorCode VecRestoreArrayAndMemType(PetscVec, PetscScalar*[]) + + PetscErrorCode VecEqual(PetscVec, PetscVec, PetscBool*) + PetscErrorCode VecLoad(PetscVec, PetscViewer) + + PetscErrorCode VecDuplicate(PetscVec, PetscVec*) + PetscErrorCode VecCopy(PetscVec, PetscVec) + PetscErrorCode VecFilter(PetscVec, PetscReal) + + PetscErrorCode VecDuplicateVecs(PetscVec, PetscInt, PetscVec*[]) + PetscErrorCode VecDestroyVecs(PetscInt, PetscVec*[]) + + PetscErrorCode VecGetValues(PetscVec, PetscInt, PetscInt[], PetscScalar[]) + + PetscErrorCode VecSetValue(PetscVec, PetscInt, PetscScalar, PetscInsertMode) + PetscErrorCode VecSetValues(PetscVec, PetscInt, const PetscInt[], const PetscScalar[], PetscInsertMode) + PetscErrorCode VecSetValuesBlocked(PetscVec, PetscInt, const PetscInt[], const PetscScalar[], PetscInsertMode) + + PetscErrorCode VecSetLocalToGlobalMapping(PetscVec, PetscLGMap) + PetscErrorCode VecGetLocalToGlobalMapping(PetscVec, PetscLGMap*) + PetscErrorCode VecSetValueLocal(PetscVec, PetscInt, PetscScalar, PetscInsertMode) + PetscErrorCode VecSetValuesLocal(PetscVec, PetscInt, const PetscInt[], const PetscScalar[], PetscInsertMode) + PetscErrorCode VecSetValuesBlockedLocal(PetscVec, PetscInt, const PetscInt[], const PetscScalar[], PetscInsertMode) + + PetscErrorCode VecDot(PetscVec, PetscVec, PetscScalar*) + PetscErrorCode VecDotBegin(PetscVec, PetscVec, PetscScalar*) + PetscErrorCode VecDotEnd(PetscVec, PetscVec, PetscScalar*) + PetscErrorCode VecTDot(PetscVec, PetscVec, PetscScalar*) + PetscErrorCode VecTDotBegin(PetscVec, PetscVec, PetscScalar*) + PetscErrorCode VecTDotEnd(PetscVec, PetscVec, PetscScalar*) + PetscErrorCode VecMDot(PetscVec, PetscInt, PetscVec[], PetscScalar*) + PetscErrorCode VecMDotBegin(PetscVec, PetscInt, PetscVec[], PetscScalar*) + PetscErrorCode VecMDotEnd(PetscVec, PetscInt, PetscVec[], PetscScalar*) + PetscErrorCode VecMTDot(PetscVec, PetscInt, PetscVec[], PetscScalar*) + PetscErrorCode VecMTDotBegin(PetscVec, PetscInt, PetscVec[], PetscScalar*) + PetscErrorCode VecMTDotEnd(PetscVec, PetscInt, PetscVec[], PetscScalar*) + + PetscErrorCode VecNorm(PetscVec, PetscNormType, PetscReal*) + PetscErrorCode VecNormBegin(PetscVec, PetscNormType, PetscReal*) + PetscErrorCode VecNormEnd(PetscVec, PetscNormType, PetscReal*) + PetscErrorCode VecDotNorm2(PetscVec, PetscVec, PetscScalar*, PetscReal*) PetscErrorCode VecAssemblyBegin(PetscVec) PetscErrorCode VecAssemblyEnd(PetscVec) PetscErrorCode VecZeroEntries(PetscVec) PetscErrorCode VecConjugate(PetscVec) - PetscErrorCode VecNormalize(PetscVec,PetscReal*) - PetscErrorCode VecSum(PetscVec,PetscScalar*) - PetscErrorCode VecMax(PetscVec,PetscInt*,PetscReal*) - PetscErrorCode VecMin(PetscVec,PetscInt*,PetscReal*) - PetscErrorCode VecScale(PetscVec,PetscScalar) - PetscErrorCode VecCopy(PetscVec,PetscVec) - PetscErrorCode VecSetRandom(PetscVec,PetscRandom) - PetscErrorCode VecSet(PetscVec,PetscScalar) - PetscErrorCode VecSwap(PetscVec,PetscVec) - PetscErrorCode VecAXPY(PetscVec,PetscScalar,PetscVec) - PetscErrorCode VecAXPBY(PetscVec,PetscScalar,PetscScalar,PetscVec) - PetscErrorCode VecAYPX(PetscVec,PetscScalar,PetscVec) - PetscErrorCode VecWAXPY(PetscVec,PetscScalar,PetscVec,PetscVec) - PetscErrorCode VecMAXPY(PetscVec,PetscInt,PetscScalar[],PetscVec[]) - PetscErrorCode VecPointwiseMax(PetscVec,PetscVec,PetscVec) - PetscErrorCode VecPointwiseMaxAbs(PetscVec,PetscVec,PetscVec) - PetscErrorCode VecPointwiseMin(PetscVec,PetscVec,PetscVec) - PetscErrorCode VecPointwiseMult(PetscVec,PetscVec,PetscVec) - PetscErrorCode VecPointwiseDivide(PetscVec,PetscVec,PetscVec) - PetscErrorCode VecMaxPointwiseDivide(PetscVec,PetscVec,PetscReal*) - PetscErrorCode VecShift(PetscVec,PetscScalar) - PetscErrorCode VecFilter(PetscVec,PetscReal) + PetscErrorCode VecNormalize(PetscVec, PetscReal*) + PetscErrorCode VecSum(PetscVec, PetscScalar*) + PetscErrorCode VecMax(PetscVec, PetscInt*, PetscReal*) + PetscErrorCode VecMin(PetscVec, PetscInt*, PetscReal*) + PetscErrorCode VecScale(PetscVec, PetscScalar) + PetscErrorCode VecCopy(PetscVec, PetscVec) + PetscErrorCode VecSetRandom(PetscVec, PetscRandom) + PetscErrorCode VecSet(PetscVec, PetscScalar) + PetscErrorCode VecSwap(PetscVec, PetscVec) + PetscErrorCode VecAXPY(PetscVec, PetscScalar, PetscVec) + PetscErrorCode VecAXPBY(PetscVec, PetscScalar, PetscScalar, PetscVec) + PetscErrorCode VecAYPX(PetscVec, PetscScalar, PetscVec) + PetscErrorCode VecWAXPY(PetscVec, PetscScalar, PetscVec, PetscVec) + PetscErrorCode VecMAXPY(PetscVec, PetscInt, PetscScalar[], PetscVec[]) + PetscErrorCode VecPointwiseMax(PetscVec, PetscVec, PetscVec) + PetscErrorCode VecPointwiseMaxAbs(PetscVec, PetscVec, PetscVec) + PetscErrorCode VecPointwiseMin(PetscVec, PetscVec, PetscVec) + PetscErrorCode VecPointwiseMult(PetscVec, PetscVec, PetscVec) + PetscErrorCode VecPointwiseDivide(PetscVec, PetscVec, PetscVec) + PetscErrorCode VecMaxPointwiseDivide(PetscVec, PetscVec, PetscReal*) + PetscErrorCode VecShift(PetscVec, PetscScalar) + PetscErrorCode VecFilter(PetscVec, PetscReal) PetscErrorCode VecReciprocal(PetscVec) - PetscErrorCode VecPermute(PetscVec,PetscIS,PetscBool) + PetscErrorCode VecPermute(PetscVec, PetscIS, PetscBool) PetscErrorCode VecExp(PetscVec) PetscErrorCode VecLog(PetscVec) PetscErrorCode VecSqrtAbs(PetscVec) PetscErrorCode VecAbs(PetscVec) - PetscErrorCode VecStrideMin(PetscVec,PetscInt,PetscInt*,PetscReal*) - PetscErrorCode VecStrideMax(PetscVec,PetscInt,PetscInt*,PetscReal*) - PetscErrorCode VecStrideScale(PetscVec,PetscInt,PetscScalar) - PetscErrorCode VecStrideGather(PetscVec,PetscInt,PetscVec,PetscInsertMode) - PetscErrorCode VecStrideScatter(PetscVec,PetscInt,PetscVec,PetscInsertMode) - PetscErrorCode VecStrideNorm(PetscVec,PetscInt,PetscNormType,PetscReal*) - - PetscErrorCode VecGhostGetLocalForm(PetscVec,PetscVec*) - PetscErrorCode VecGhostRestoreLocalForm(PetscVec,PetscVec*) - PetscErrorCode VecGhostUpdateBegin(PetscVec,PetscInsertMode,PetscScatterMode) - PetscErrorCode VecGhostUpdateEnd(PetscVec,PetscInsertMode,PetscScatterMode) - PetscErrorCode VecMPISetGhost(PetscVec,PetscInt,const PetscInt*) - - PetscErrorCode VecGetSubVector(PetscVec,PetscIS,PetscVec*) - PetscErrorCode VecRestoreSubVector(PetscVec,PetscIS,PetscVec*) - - PetscErrorCode VecNestGetSubVecs(PetscVec,PetscInt*,PetscVec**) - PetscErrorCode VecNestSetSubVecs(PetscVec,PetscInt,PetscInt*,PetscVec*) - - PetscErrorCode VecISAXPY(PetscVec,PetscIS,PetscScalar,PetscVec) - PetscErrorCode VecISSet(PetscVec,PetscIS,PetscScalar) - - PetscErrorCode VecCUDAGetArrayRead(PetscVec,const PetscScalar*[]) - PetscErrorCode VecCUDAGetArrayWrite(PetscVec,PetscScalar*[]) - PetscErrorCode VecCUDAGetArray(PetscVec,PetscScalar*[]) - PetscErrorCode VecCUDARestoreArrayRead(PetscVec,const PetscScalar*[]) - PetscErrorCode VecCUDARestoreArrayWrite(PetscVec,PetscScalar*[]) - PetscErrorCode VecCUDARestoreArray(PetscVec,PetscScalar*[]) - - PetscErrorCode VecHIPGetArrayRead(PetscVec,const PetscScalar*[]) - PetscErrorCode VecHIPGetArrayWrite(PetscVec,PetscScalar*[]) - PetscErrorCode VecHIPGetArray(PetscVec,PetscScalar*[]) - PetscErrorCode VecHIPRestoreArrayRead(PetscVec,const PetscScalar*[]) - PetscErrorCode VecHIPRestoreArrayWrite(PetscVec,PetscScalar*[]) - PetscErrorCode VecHIPRestoreArray(PetscVec,PetscScalar*[]) - - PetscErrorCode VecBindToCPU(PetscVec,PetscBool) - PetscErrorCode VecBoundToCPU(PetscVec,PetscBool*) - PetscErrorCode VecGetOffloadMask(PetscVec,PetscOffloadMask*) - - PetscErrorCode VecViennaCLGetCLContext(PetscVec,Py_uintptr_t*) - PetscErrorCode VecViennaCLGetCLQueue(PetscVec,Py_uintptr_t*) - PetscErrorCode VecViennaCLGetCLMemRead(PetscVec,Py_uintptr_t*) - PetscErrorCode VecViennaCLGetCLMemWrite(PetscVec,Py_uintptr_t*) + PetscErrorCode VecStrideMin(PetscVec, PetscInt, PetscInt*, PetscReal*) + PetscErrorCode VecStrideMax(PetscVec, PetscInt, PetscInt*, PetscReal*) + PetscErrorCode VecStrideScale(PetscVec, PetscInt, PetscScalar) + PetscErrorCode VecStrideGather(PetscVec, PetscInt, PetscVec, PetscInsertMode) + PetscErrorCode VecStrideScatter(PetscVec, PetscInt, PetscVec, PetscInsertMode) + PetscErrorCode VecStrideNorm(PetscVec, PetscInt, PetscNormType, PetscReal*) + + PetscErrorCode VecGhostGetLocalForm(PetscVec, PetscVec*) + PetscErrorCode VecGhostRestoreLocalForm(PetscVec, PetscVec*) + PetscErrorCode VecGhostUpdateBegin(PetscVec, PetscInsertMode, PetscScatterMode) + PetscErrorCode VecGhostUpdateEnd(PetscVec, PetscInsertMode, PetscScatterMode) + PetscErrorCode VecMPISetGhost(PetscVec, PetscInt, const PetscInt*) + + PetscErrorCode VecGetSubVector(PetscVec, PetscIS, PetscVec*) + PetscErrorCode VecRestoreSubVector(PetscVec, PetscIS, PetscVec*) + + PetscErrorCode VecNestGetSubVecs(PetscVec, PetscInt*, PetscVec**) + PetscErrorCode VecNestSetSubVecs(PetscVec, PetscInt, PetscInt*, PetscVec*) + + PetscErrorCode VecISAXPY(PetscVec, PetscIS, PetscScalar, PetscVec) + PetscErrorCode VecISSet(PetscVec, PetscIS, PetscScalar) + + PetscErrorCode VecCUDAGetArrayRead(PetscVec, const PetscScalar*[]) + PetscErrorCode VecCUDAGetArrayWrite(PetscVec, PetscScalar*[]) + PetscErrorCode VecCUDAGetArray(PetscVec, PetscScalar*[]) + PetscErrorCode VecCUDARestoreArrayRead(PetscVec, const PetscScalar*[]) + PetscErrorCode VecCUDARestoreArrayWrite(PetscVec, PetscScalar*[]) + PetscErrorCode VecCUDARestoreArray(PetscVec, PetscScalar*[]) + + PetscErrorCode VecHIPGetArrayRead(PetscVec, const PetscScalar*[]) + PetscErrorCode VecHIPGetArrayWrite(PetscVec, PetscScalar*[]) + PetscErrorCode VecHIPGetArray(PetscVec, PetscScalar*[]) + PetscErrorCode VecHIPRestoreArrayRead(PetscVec, const PetscScalar*[]) + PetscErrorCode VecHIPRestoreArrayWrite(PetscVec, PetscScalar*[]) + PetscErrorCode VecHIPRestoreArray(PetscVec, PetscScalar*[]) + + PetscErrorCode VecBindToCPU(PetscVec, PetscBool) + PetscErrorCode VecBoundToCPU(PetscVec, PetscBool*) + PetscErrorCode VecGetOffloadMask(PetscVec, PetscOffloadMask*) + + PetscErrorCode VecViennaCLGetCLContext(PetscVec, Py_uintptr_t*) + PetscErrorCode VecViennaCLGetCLQueue(PetscVec, Py_uintptr_t*) + PetscErrorCode VecViennaCLGetCLMemRead(PetscVec, Py_uintptr_t*) + PetscErrorCode VecViennaCLGetCLMemWrite(PetscVec, Py_uintptr_t*) PetscErrorCode VecViennaCLRestoreCLMemWrite(PetscVec) - PetscErrorCode VecViennaCLGetCLMem(PetscVec,Py_uintptr_t*) + PetscErrorCode VecViennaCLGetCLMem(PetscVec, Py_uintptr_t*) PetscErrorCode VecViennaCLRestoreCLMem(PetscVec) - PetscErrorCode VecCreateSeqCUDAWithArray(MPI_Comm,PetscInt,PetscInt,const PetscScalar*,PetscVec*) - PetscErrorCode VecCreateMPICUDAWithArray(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscScalar*,PetscVec*) - PetscErrorCode VecCreateSeqHIPWithArray(MPI_Comm,PetscInt,PetscInt,const PetscScalar*,PetscVec*) - PetscErrorCode VecCreateMPIHIPWithArray(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscScalar*,PetscVec*) + PetscErrorCode VecCreateSeqCUDAWithArray(MPI_Comm, PetscInt, PetscInt, const PetscScalar*, PetscVec*) + PetscErrorCode VecCreateMPICUDAWithArray(MPI_Comm, PetscInt, PetscInt, PetscInt, const PetscScalar*, PetscVec*) + PetscErrorCode VecCreateSeqHIPWithArray(MPI_Comm, PetscInt, PetscInt, const PetscScalar*, PetscVec*) + PetscErrorCode VecCreateMPIHIPWithArray(MPI_Comm, PetscInt, PetscInt, PetscInt, const PetscScalar*, PetscVec*) cdef extern from * nogil: # custom.h - PetscErrorCode VecStrideSum(PetscVec,PetscInt,PetscScalar*) - PetscErrorCode VecGetCurrentMemType(PetscVec,PetscMemType*) + PetscErrorCode VecStrideSum(PetscVec, PetscInt, PetscScalar*) + PetscErrorCode VecGetCurrentMemType(PetscVec, PetscMemType*) # -------------------------------------------------------------------- cdef inline Vec ref_Vec(PetscVec vec): cdef Vec ob = Vec() ob.vec = vec - CHKERR( PetscINCREF(ob.obj) ) + CHKERR(PetscINCREF(ob.obj)) return ob # -------------------------------------------------------------------- @@ -228,18 +228,18 @@ cdef inline Vec ref_Vec(PetscVec vec): cdef Vec vec_pos(Vec self): cdef Vec vec = type(self)() - CHKERR( VecDuplicate(self.vec, &vec.vec) ) - CHKERR( VecCopy(self.vec, vec.vec) ) + CHKERR(VecDuplicate(self.vec, &vec.vec)) + CHKERR(VecCopy(self.vec, vec.vec)) return vec cdef Vec vec_neg(Vec self): cdef Vec vec = vec_pos(self) - CHKERR( VecScale(vec.vec, -1) ) + CHKERR(VecScale(vec.vec, -1)) return vec cdef Vec vec_abs(Vec self): cdef Vec vec = vec_pos(self) - CHKERR( VecAbs(vec.vec) ) + CHKERR(VecAbs(vec.vec)) return vec # inplace binary operations @@ -249,14 +249,14 @@ cdef Vec vec_iadd(Vec self, other): cdef Vec vec if isinstance(other, Vec): alpha = 1; vec = other - CHKERR( VecAXPY(self.vec, alpha, vec.vec) ) + CHKERR(VecAXPY(self.vec, alpha, vec.vec)) elif isinstance(other, (tuple, list)): other, vec = other alpha = asScalar(other) - CHKERR( VecAXPY(self.vec, alpha, vec.vec) ) + CHKERR(VecAXPY(self.vec, alpha, vec.vec)) else: alpha = asScalar(other) - CHKERR( VecShift(self.vec, alpha) ) + CHKERR(VecShift(self.vec, alpha)) return self cdef Vec vec_isub(Vec self, other): @@ -264,14 +264,14 @@ cdef Vec vec_isub(Vec self, other): cdef Vec vec if isinstance(other, Vec): alpha = 1; vec = other - CHKERR( VecAXPY(self.vec, -alpha, vec.vec) ) + CHKERR(VecAXPY(self.vec, -alpha, vec.vec)) elif isinstance(other, (tuple, list)): other, vec = other alpha = asScalar(other) - CHKERR( VecAXPY(self.vec, -alpha, vec.vec) ) + CHKERR(VecAXPY(self.vec, -alpha, vec.vec)) else: alpha = asScalar(other) - CHKERR( VecShift(self.vec, -alpha) ) + CHKERR(VecShift(self.vec, -alpha)) return self cdef Vec vec_imul(Vec self, other): @@ -279,10 +279,10 @@ cdef Vec vec_imul(Vec self, other): cdef Vec vec if isinstance(other, Vec): vec = other - CHKERR( VecPointwiseMult(self.vec, self.vec, vec.vec) ) + CHKERR(VecPointwiseMult(self.vec, self.vec, vec.vec)) else: alpha = asScalar(other) - CHKERR( VecScale(self.vec, alpha) ) + CHKERR(VecScale(self.vec, alpha)) return self cdef Vec vec_idiv(Vec self, other): @@ -291,10 +291,10 @@ cdef Vec vec_idiv(Vec self, other): cdef Vec vec if isinstance(other, Vec): vec = other - CHKERR( VecPointwiseDivide(self.vec, self.vec, vec.vec) ) + CHKERR(VecPointwiseDivide(self.vec, self.vec, vec.vec)) else: alpha = asScalar(other) - CHKERR( VecScale(self.vec, one/alpha) ) + CHKERR(VecScale(self.vec, one/alpha)) return self # binary operations @@ -327,7 +327,7 @@ cdef Vec vec_radd(Vec self, other): cdef Vec vec_rsub(Vec self, other): cdef Vec vec = vec_sub(self, other) - CHKERR( VecScale(vec.vec, -1) ) + CHKERR(VecScale(vec.vec, -1)) return vec cdef Vec vec_rmul(Vec self, other): @@ -335,7 +335,7 @@ cdef Vec vec_rmul(Vec self, other): cdef Vec vec_rdiv(Vec self, other): cdef Vec vec = vec_div(self, other) - CHKERR( VecReciprocal(vec.vec) ) + CHKERR(VecReciprocal(vec.vec)) return vec # -------------------------------------------------------------------- @@ -348,8 +348,8 @@ cdef inline int Vec_Sizes(object size, object bsize, # -------------------------------------------------------------------- ctypedef PetscErrorCode VecSetValuesFcn(PetscVec, - PetscInt,const PetscInt*, - const PetscScalar*,PetscInsertMode) + PetscInt, const PetscInt*, + const PetscScalar*, PetscInsertMode) cdef inline VecSetValuesFcn* vecsetvalues_fcn(int blocked, int local): cdef VecSetValuesFcn *setvalues = NULL @@ -365,27 +365,27 @@ cdef inline int vecsetvalues(PetscVec V, # block size cdef PetscInt bs=1 if blocked: - CHKERR( VecGetBlockSize(V, &bs) ) + CHKERR(VecGetBlockSize(V, &bs)) if bs < 1: bs = 1 # indices and values cdef PetscInt ni=0, nv=0 - cdef PetscInt *i=NULL + cdef PetscInt *i=NULL cdef PetscScalar *v=NULL - cdef object tmp1 = iarray_i(oi, &ni, &i) - cdef object tmp2 = iarray_s(ov, &nv, &v) + cdef object unused1 = iarray_i(oi, &ni, &i) + cdef object unused2 = iarray_s(ov, &nv, &v) if ni*bs != nv: raise ValueError( "incompatible array sizes: ni=%d, nv=%d, bs=%d" % - (toInt(ni), toInt(nv), toInt(bs)) ) + (toInt(ni), toInt(nv), toInt(bs))) # VecSetValuesXXX function and insert mode cdef VecSetValuesFcn *setvalues = vecsetvalues_fcn(blocked, local) cdef PetscInsertMode addv = insertmode(oim) # actual call - CHKERR( setvalues(V, ni, i, v, addv) ) + CHKERR(setvalues(V, ni, i, v, addv)) return 0 cdef object vecgetvalues(PetscVec vec, object oindices, object values): cdef PetscInt ni=0, nv=0 - cdef PetscInt *i=NULL + cdef PetscInt *i=NULL cdef PetscScalar *v=NULL cdef object indices = iarray_i(oindices, &ni, &i) if values is None: @@ -395,7 +395,7 @@ cdef object vecgetvalues(PetscVec vec, object oindices, object values): if (ni != nv): raise ValueError( ("incompatible array sizes: " "ni=%d, nv=%d") % (toInt(ni), toInt(nv))) - CHKERR( VecGetValues(vec, ni, i, v) ) + CHKERR(VecGetValues(vec, ni, i, v)) return values # -------------------------------------------------------------------- @@ -420,19 +420,19 @@ cdef inline int vec_setarray(Vec self, object o) except -1: cdef PetscInt na=0, nv=0, i=0 cdef PetscScalar *va=NULL, *vv=NULL cdef ndarray ary = iarray_s(o, &na, &va) - CHKERR( VecGetLocalSize(self.vec, &nv) ) + CHKERR(VecGetLocalSize(self.vec, &nv)) if (na != nv) and PyArray_NDIM(ary) > 0: raise ValueError( "array size %d incompatible with vector local size %d" % - (toInt(na), toInt(nv)) ) - CHKERR( VecGetArray(self.vec, &vv) ) + (toInt(na), toInt(nv))) + CHKERR(VecGetArray(self.vec, &vv)) try: if PyArray_NDIM(ary) == 0: for i from 0 <= i < nv: vv[i] = va[0] else: - CHKERR( PetscMemcpy(vv, va, nv*sizeof(PetscScalar)) ) + CHKERR(PetscMemcpy(vv, va, nv*sizeof(PetscScalar))) finally: - CHKERR( VecRestoreArray(self.vec, &vv) ) + CHKERR(VecRestoreArray(self.vec, &vv)) return 0 cdef object vec_getitem(Vec self, object i): @@ -440,7 +440,7 @@ cdef object vec_getitem(Vec self, object i): if i is Ellipsis: return asarray(self) if isinstance(i, slice): - CHKERR( VecGetSize(self.vec, &N) ) + CHKERR(VecGetSize(self.vec, &N)) start, stop, stride = i.indices(toInt(N)) i = arange(start, stop, stride) return vecgetvalues(self.vec, i, None) @@ -450,7 +450,7 @@ cdef int vec_setitem(Vec self, object i, object v) except -1: if i is Ellipsis: return vec_setarray(self, v) if isinstance(i, slice): - CHKERR( VecGetSize(self.vec, &N) ) + CHKERR(VecGetSize(self.vec, &N)) start, stop, stride = i.indices(toInt(N)) i = arange(start, stop, stride) vecsetvalues(self.vec, i, v, None, 0, 0) @@ -469,17 +469,17 @@ cdef vec_get_dlpack_ctx(Vec self): if ctx0 is None: # First time in, create a linear memory view s1 = oarray_p(empty_p(ndim), NULL, &shape_arr) s2 = oarray_p(empty_p(ndim), NULL, &strides_arr) - CHKERR( VecGetLocalSize(self.vec, &n) ) + CHKERR(VecGetLocalSize(self.vec, &n)) shape_arr[0] = n strides_arr[0] = 1 else: (_, _, ndim, s1, s2) = ctx0 - devType_ = { PETSC_MEMTYPE_HOST : kDLCPU, PETSC_MEMTYPE_CUDA : kDLCUDA, PETSC_MEMTYPE_HIP : kDLROCM } - CHKERR( VecGetCurrentMemType(self.vec, &mtype) ) + devType_ = {PETSC_MEMTYPE_HOST : kDLCPU, PETSC_MEMTYPE_CUDA : kDLCUDA, PETSC_MEMTYPE_HIP : kDLROCM} + CHKERR(VecGetCurrentMemType(self.vec, &mtype)) dtype = devType_.get(mtype, kDLCPU) if dtype != kDLCPU: - CHKERR( PetscObjectGetDeviceId(self.vec, &devId) ) + CHKERR(PetscObjectGetDeviceId(self.vec, &devId)) ctx0 = (dtype, devId, ndim, s1, s2) self.set_attr('__dltensor_ctx__', ctx0) return ctx0 @@ -487,13 +487,13 @@ cdef vec_get_dlpack_ctx(Vec self): # -------------------------------------------------------------------- cdef int Vec_AcquireArray(PetscVec v, PetscScalar *a[], int ro) except -1 nogil: - if ro: CHKERR( VecGetArrayRead(v, a) ) - else: CHKERR( VecGetArray(v, a) ) + if ro: CHKERR(VecGetArrayRead(v, a)) + else: CHKERR(VecGetArray(v, a)) return 0 cdef int Vec_ReleaseArray(PetscVec v, PetscScalar *a[], int ro) except -1 nogil: - if ro: CHKERR( VecRestoreArrayRead(v, a) ) - else: CHKERR( VecRestoreArray(v, a) ) + if ro: CHKERR(VecRestoreArrayRead(v, a)) + else: CHKERR(VecRestoreArray(v, a)) return 0 cdef class _Vec_buffer: @@ -506,7 +506,7 @@ cdef class _Vec_buffer: def __cinit__(self, Vec vec, bint readonly=0): cdef PetscVec v = vec.vec - CHKERR( PetscINCREF(&v) ) + CHKERR(PetscINCREF(&v)) self.vec = v self.size = 0 self.data = NULL @@ -516,13 +516,13 @@ cdef class _Vec_buffer: def __dealloc__(self): if self.hasarray and self.vec != NULL: Vec_ReleaseArray(self.vec, &self.data, self.readonly) - CHKERR( VecDestroy(&self.vec) ) + CHKERR(VecDestroy(&self.vec)) # cdef int acquire(self) except -1 nogil: if not self.hasarray and self.vec != NULL: - CHKERR( VecGetLocalSize(self.vec, &self.size) ) + CHKERR(VecGetLocalSize(self.vec, &self.size)) Vec_AcquireArray(self.vec, &self.data, self.readonly) self.hasarray = 1 return 0 @@ -579,7 +579,7 @@ cdef class _Vec_buffer: p[0] = self.data n = self.size elif self.vec != NULL: - CHKERR( VecGetLocalSize(self.vec, &n) ) + CHKERR(VecGetLocalSize(self.vec, &n)) return (n*sizeof(PetscScalar)) def __getsegcount__(self, Py_ssize_t *lenp): @@ -605,7 +605,7 @@ cdef class _Vec_buffer: def __get__(self): cdef PetscInt n = 0 if self.vec != NULL: - CHKERR( VecGetLocalSize(self.vec, &n) ) + CHKERR(VecGetLocalSize(self.vec, &n)) cdef object size = toInt(n) cdef dtype descr = PyArray_DescrFromType(NPY_PETSC_SCALAR) cdef str typestr = "=%c%d" % (descr.kind, descr.itemsize) @@ -618,8 +618,6 @@ cdef class _Vec_buffer: cdef class _Vec_LocalForm: - "Context manager for `Vec` local form" - cdef Vec gvec cdef Vec lvec @@ -629,10 +627,10 @@ cdef class _Vec_LocalForm: def __enter__(self): cdef PetscVec gvec = self.gvec.vec - CHKERR( VecGhostGetLocalForm(gvec, &self.lvec.vec) ) + CHKERR(VecGhostGetLocalForm(gvec, &self.lvec.vec)) return self.lvec def __exit__(self, *exc): cdef PetscVec gvec = self.gvec.vec - CHKERR( VecGhostRestoreLocalForm(gvec, &self.lvec.vec) ) + CHKERR(VecGhostRestoreLocalForm(gvec, &self.lvec.vec)) self.lvec.vec = NULL diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/petscvwr.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/petscvwr.pxi index 61f2e0d793d..ad7be8f90fb 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/petscvwr.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/petscvwr.pxi @@ -68,63 +68,63 @@ cdef extern from * nogil: enum: PETSC_DRAW_THIRD_SIZE enum: PETSC_DRAW_QUARTER_SIZE - PetscErrorCode PetscViewerView(PetscViewer,PetscViewer) + PetscErrorCode PetscViewerView(PetscViewer, PetscViewer) PetscErrorCode PetscViewerDestroy(PetscViewer*) - PetscErrorCode PetscViewerCreate(MPI_Comm,PetscViewer*) - PetscErrorCode PetscViewerSetType(PetscViewer,PetscViewerType) - PetscErrorCode PetscViewerGetType(PetscViewer,PetscViewerType*) + PetscErrorCode PetscViewerCreate(MPI_Comm, PetscViewer*) + PetscErrorCode PetscViewerSetType(PetscViewer, PetscViewerType) + PetscErrorCode PetscViewerGetType(PetscViewer, PetscViewerType*) - PetscErrorCode PetscViewerSetOptionsPrefix(PetscViewer,char[]) - PetscErrorCode PetscViewerAppendOptionsPrefix(PetscViewer,char[]) - PetscErrorCode PetscViewerGetOptionsPrefix(PetscViewer,char*[]) + PetscErrorCode PetscViewerSetOptionsPrefix(PetscViewer, char[]) + PetscErrorCode PetscViewerAppendOptionsPrefix(PetscViewer, char[]) + PetscErrorCode PetscViewerGetOptionsPrefix(PetscViewer, char*[]) PetscErrorCode PetscViewerSetFromOptions(PetscViewer) PetscErrorCode PetscViewerSetUp(PetscViewer) - PetscErrorCode PetscViewerASCIIOpen(MPI_Comm,char[],PetscViewer*) - PetscErrorCode PetscViewerBinaryCreate(MPI_Comm comm,PetscViewer*) - PetscErrorCode PetscViewerBinaryOpen(MPI_Comm,char[],PetscFileMode,PetscViewer*) - PetscErrorCode PetscViewerDrawOpen(MPI_Comm,char[],char[],int,int,int,int,PetscViewer*) + PetscErrorCode PetscViewerASCIIOpen(MPI_Comm, char[], PetscViewer*) + PetscErrorCode PetscViewerBinaryCreate(MPI_Comm comm, PetscViewer*) + PetscErrorCode PetscViewerBinaryOpen(MPI_Comm, char[], PetscFileMode, PetscViewer*) + PetscErrorCode PetscViewerDrawOpen(MPI_Comm, char[], char[], int, int, int, int, PetscViewer*) - PetscErrorCode PetscViewerBinarySetUseMPIIO(PetscViewer,PetscBool) + PetscErrorCode PetscViewerBinarySetUseMPIIO(PetscViewer, PetscBool) - PetscErrorCode PetscViewerSetFormat(PetscViewer,PetscViewerFormat) - PetscErrorCode PetscViewerGetFormat(PetscViewer,PetscViewerFormat*) - PetscErrorCode PetscViewerPushFormat(PetscViewer,PetscViewerFormat) + PetscErrorCode PetscViewerSetFormat(PetscViewer, PetscViewerFormat) + PetscErrorCode PetscViewerGetFormat(PetscViewer, PetscViewerFormat*) + PetscErrorCode PetscViewerPushFormat(PetscViewer, PetscViewerFormat) PetscErrorCode PetscViewerPopFormat(PetscViewer) - PetscErrorCode PetscViewerGetSubViewer(PetscViewer,MPI_Comm,PetscViewer*) - PetscErrorCode PetscViewerRestoreSubViewer(PetscViewer,MPI_Comm,PetscViewer*) + PetscErrorCode PetscViewerGetSubViewer(PetscViewer, MPI_Comm, PetscViewer*) + PetscErrorCode PetscViewerRestoreSubViewer(PetscViewer, MPI_Comm, PetscViewer*) - PetscErrorCode PetscViewerASCIISetTab(PetscViewer,PetscInt) - PetscErrorCode PetscViewerASCIIGetTab(PetscViewer,PetscInt*) - PetscErrorCode PetscViewerASCIIAddTab(PetscViewer,PetscInt) - PetscErrorCode PetscViewerASCIISubtractTab(PetscViewer,PetscInt) + PetscErrorCode PetscViewerASCIISetTab(PetscViewer, PetscInt) + PetscErrorCode PetscViewerASCIIGetTab(PetscViewer, PetscInt*) + PetscErrorCode PetscViewerASCIIAddTab(PetscViewer, PetscInt) + PetscErrorCode PetscViewerASCIISubtractTab(PetscViewer, PetscInt) PetscErrorCode PetscViewerASCIIPushSynchronized(PetscViewer) PetscErrorCode PetscViewerASCIIPopSynchronized(PetscViewer) PetscErrorCode PetscViewerASCIIPushTab(PetscViewer) PetscErrorCode PetscViewerASCIIPopTab(PetscViewer) - PetscErrorCode PetscViewerASCIIUseTabs(PetscViewer,PetscBool) - PetscErrorCode PetscViewerASCIIPrintf(PetscViewer,const char[],...) - PetscErrorCode PetscViewerStringSPrintf(PetscViewer,char[],...) - PetscErrorCode PetscViewerASCIISynchronizedPrintf(PetscViewer,const char[],...) - - PetscErrorCode PetscViewerFileGetName(PetscViewer,char*[]) - PetscErrorCode PetscViewerFileSetName(PetscViewer,char[]) - PetscErrorCode PetscViewerFileGetMode(PetscViewer,PetscFileMode*) - PetscErrorCode PetscViewerFileSetMode(PetscViewer,PetscFileMode) + PetscErrorCode PetscViewerASCIIUseTabs(PetscViewer, PetscBool) + PetscErrorCode PetscViewerASCIIPrintf(PetscViewer, const char[], ...) + PetscErrorCode PetscViewerStringSPrintf(PetscViewer, char[], ...) + PetscErrorCode PetscViewerASCIISynchronizedPrintf(PetscViewer, const char[], ...) + + PetscErrorCode PetscViewerFileGetName(PetscViewer, char*[]) + PetscErrorCode PetscViewerFileSetName(PetscViewer, char[]) + PetscErrorCode PetscViewerFileGetMode(PetscViewer, PetscFileMode*) + PetscErrorCode PetscViewerFileSetMode(PetscViewer, PetscFileMode) PetscErrorCode PetscViewerFlush(PetscViewer) PetscErrorCode PetscViewerDrawClear(PetscViewer) - PetscErrorCode PetscViewerDrawSetInfo(PetscViewer,char[],char[],int,int,int,int) + PetscErrorCode PetscViewerDrawSetInfo(PetscViewer, char[], char[], int, int, int, int) PetscErrorCode PetscViewerHDF5PushTimestepping(PetscViewer) PetscErrorCode PetscViewerHDF5PopTimestepping(PetscViewer) - PetscErrorCode PetscViewerHDF5GetTimestep(PetscViewer,PetscInt*) - PetscErrorCode PetscViewerHDF5SetTimestep(PetscViewer,PetscInt) + PetscErrorCode PetscViewerHDF5GetTimestep(PetscViewer, PetscInt*) + PetscErrorCode PetscViewerHDF5SetTimestep(PetscViewer, PetscInt) PetscErrorCode PetscViewerHDF5IncrementTimestep(PetscViewer) - PetscErrorCode PetscViewerHDF5PushGroup(PetscViewer,char[]) + PetscErrorCode PetscViewerHDF5PushGroup(PetscViewer, char[]) PetscErrorCode PetscViewerHDF5PopGroup(PetscViewer) - PetscErrorCode PetscViewerHDF5GetGroup(PetscViewer,char[],char*[]) + PetscErrorCode PetscViewerHDF5GetGroup(PetscViewer, char[], char*[]) PetscViewer PETSC_VIEWER_STDOUT_(MPI_Comm) except? NULL PetscViewer PETSC_VIEWER_STDOUT_SELF diff --git a/src/binding/petsc4py/src/petsc4py/PETSc/typing.pxi b/src/binding/petsc4py/src/petsc4py/PETSc/typing.pxi index 839d9bd4705..3846975958f 100644 --- a/src/binding/petsc4py/src/petsc4py/PETSc/typing.pxi +++ b/src/binding/petsc4py/src/petsc4py/PETSc/typing.pxi @@ -15,6 +15,7 @@ cdef Mapping cdef PathLike cdef Scalar +cdef ArrayBool cdef ArrayInt cdef ArrayReal cdef ArrayComplex @@ -51,6 +52,8 @@ cdef KSPRHSFunction cdef KSPOperatorsFunction cdef KSPConvergenceTestFunction cdef KSPMonitorFunction +cdef KSPPreSolveFunction +cdef KSPPostSolveFunction # --- TS --- @@ -103,4 +106,3 @@ cdef TAOLSObjectiveGradientFunction cdef Intracomm cdef Datatype cdef Op - diff --git a/src/binding/petsc4py/src/petsc4py/__init__.py b/src/binding/petsc4py/src/petsc4py/__init__.py index 45643213c99..7984ce4a3f0 100644 --- a/src/binding/petsc4py/src/petsc4py/__init__.py +++ b/src/binding/petsc4py/src/petsc4py/__init__.py @@ -15,8 +15,8 @@ """ -__author__ = 'Lisandro Dalcin' -__version__ = '3.20.5' +__author__ = 'Lisandro Dalcin' +__version__ = '3.21.2' __credits__ = 'PETSc Team ' @@ -38,8 +38,9 @@ def init(args=None, arch=None, comm=None): beginning of the bootstrap script of an application. """ import petsc4py.lib + PETSc = petsc4py.lib.ImportPETSc(arch) - args = petsc4py.lib.getInitArgs(args) + args = petsc4py.lib.getInitArgs(args) PETSc._initialize(args, comm) @@ -59,6 +60,7 @@ def get_include(): """ from os.path import dirname, join + return join(dirname(__file__), 'include') @@ -66,6 +68,7 @@ def get_config(): """Return a dictionary with information about PETSc.""" import os import sys + if sys.version_info[0] >= 3: from io import StringIO from configparser import ConfigParser @@ -75,7 +78,7 @@ def get_config(): pgkdir = os.path.dirname(__file__) filename = os.path.join(pgkdir, 'lib', 'petsc.cfg') with open(filename) as fp: - stream = StringIO("[petsc]\n"+fp.read()) + stream = StringIO('[petsc]\n' + fp.read()) parser = ConfigParser() parser.optionxform = str if sys.version_info[0] >= 3: diff --git a/src/binding/petsc4py/src/petsc4py/__init__.pyi b/src/binding/petsc4py/src/petsc4py/__init__.pyi index f2522eaae66..46859304e24 100644 --- a/src/binding/petsc4py/src/petsc4py/__init__.pyi +++ b/src/binding/petsc4py/src/petsc4py/__init__.pyi @@ -2,6 +2,6 @@ from typing import Union from mpi4py.MPI import Intracomm __version__: str = ... -def init(args: Union[str,list[str]] | None = ..., arch: str | None = ..., comm: Intracomm | None = ...) -> None: ... +def init(args: Union[str, list[str]] | None = ..., arch: str | None = ..., comm: Intracomm | None = ...) -> None: ... def get_include() -> str: ... def get_config() -> dict[str, str]: ... diff --git a/src/binding/petsc4py/src/petsc4py/__main__.py b/src/binding/petsc4py/src/petsc4py/__main__.py index c5829541d39..c7e26b29666 100644 --- a/src/binding/petsc4py/src/petsc4py/__main__.py +++ b/src/binding/petsc4py/src/petsc4py/__main__.py @@ -1,6 +1,7 @@ # Author: Lisandro Dalcin # Contact: dalcinl@gmail.com + """ Command line access to the PETSc Options Database. @@ -13,9 +14,11 @@ """ -def help(args=None): + +def help(args=None): # noqa: A001 import shlex import sys + # program name try: prog = sys.argv[0] @@ -28,10 +31,14 @@ def help(args=None): args = shlex.split(args) else: args = [str(a) for a in args] + if '-help' not in args: + args.append('-help') # import and initialize import petsc4py - petsc4py.init([prog, '-help'] + args) + + petsc4py.init([prog] + args) from petsc4py import PETSc + # help dispatcher COMM = PETSc.COMM_SELF if 'vec' in args: @@ -73,5 +80,6 @@ def help(args=None): dmplex.setFromOptions() dmplex.destroy() + if __name__ == '__main__': help() diff --git a/src/binding/petsc4py/src/petsc4py/__main__.pyi b/src/binding/petsc4py/src/petsc4py/__main__.pyi index bf554a9971f..549cb077686 100644 --- a/src/binding/petsc4py/src/petsc4py/__main__.pyi +++ b/src/binding/petsc4py/src/petsc4py/__main__.pyi @@ -1,3 +1,4 @@ from typing import Union -def help(args: Union[str,list[str]] | None = ...) -> None: ... + +def help(args: Union[str, list[str]] | None = ...) -> None: ... diff --git a/src/binding/petsc4py/src/petsc4py/lib/__init__.py b/src/binding/petsc4py/src/petsc4py/lib/__init__.py index 3ba9a66c4a3..128604397e8 100644 --- a/src/binding/petsc4py/src/petsc4py/lib/__init__.py +++ b/src/binding/petsc4py/src/petsc4py/lib/__init__.py @@ -21,6 +21,7 @@ # -------------------------------------------------------------------- + def ImportPETSc(arch=None): """ Import the PETSc extension module for a given configuration name. @@ -34,13 +35,16 @@ def getPathArchPETSc(arch=None): Undocumented. """ import os + path = os.path.abspath(os.path.dirname(__file__)) - rcvar, rcfile = 'PETSC_ARCH', 'petsc.cfg' + rcvar, rcfile = 'PETSC_ARCH', 'petsc.cfg' path, arch = getPathArch(path, arch, rcvar, rcfile) return (path, arch) + # -------------------------------------------------------------------- + def Import(pkg, name, path, arch): """ Import helper for PETSc-based extension modules. @@ -59,11 +63,10 @@ def Import(pkg, name, path, arch): def get_ext_suffix(): if importlib: return importlib.machinery.EXTENSION_SUFFIXES[0] - else: - return imp.get_suffixes()[0][0] + return imp.get_suffixes()[0][0] def import_module(pkg, name, path, arch): - fullname = '{}.{}'.format(pkg, name) + fullname = f'{pkg}.{name}' pathlist = [os.path.join(path, arch)] if importlib: finder = importlib.machinery.PathFinder() @@ -72,25 +75,24 @@ def import_module(pkg, name, path, arch): sys.modules[fullname] = module spec.loader.exec_module(module) return module - else: - f, fn, info = imp.find_module(name, pathlist) - with f: - return imp.load_module(fullname, f, fn, info) + f, fn, info = imp.find_module(name, pathlist) + with f: + return imp.load_module(fullname, f, fn, info) # test if extension module was already imported - module = sys.modules.get('{}.{}'.format(pkg, name)) + module = sys.modules.get(f'{pkg}.{name}') filename = getattr(module, '__file__', '') if filename.endswith(get_ext_suffix()): # if 'arch' is None, do nothing; otherwise this # call may be invalid if extension module for # other 'arch' has been already imported. if arch is not None and arch != module.__arch__: - raise ImportError("%s already imported" % module) + raise ImportError('%s already imported' % module) return module # silence annoying Cython warning - warnings.filterwarnings("ignore", message="numpy.dtype size changed") - warnings.filterwarnings("ignore", message="numpy.ndarray size changed") + warnings.filterwarnings('ignore', message='numpy.dtype size changed') + warnings.filterwarnings('ignore', message='numpy.ndarray size changed') # import extension module from 'path/arch' directory module = import_module(pkg, name, path, arch) module.__arch__ = arch # save arch value @@ -104,6 +106,7 @@ def getPathArch(path, arch, rcvar='PETSC_ARCH', rcfile='petsc.cfg'): """ import os import warnings + # path if not path: path = '.' @@ -114,16 +117,17 @@ def getPathArch(path, arch, rcvar='PETSC_ARCH', rcfile='petsc.cfg'): # arch if arch is not None: if not isinstance(arch, str): - raise TypeError( "arch argument must be string") + raise TypeError('arch argument must be string') if not os.path.isdir(os.path.join(path, arch)): raise TypeError("invalid arch value: '%s'" % arch) return (path, arch) + # helper function def arch_list(arch): arch = arch.strip().split(os.path.pathsep) arch = [a.strip() for a in arch if a] - arch = [a for a in arch if a] - return arch + return [a for a in arch if a] + # try to get arch from the environment arch_env = arch_list(os.environ.get(rcvar, '')) for arch in arch_env: @@ -135,6 +139,7 @@ def arch_list(arch): if not os.path.isfile(rcfile): # now point to continue return (path, '') + # helper function def parse_rc(rcfile): with open(rcfile) as f: @@ -144,6 +149,7 @@ def parse_rc(rcfile): entries = [ln.split('=') for ln in lines if ln] entries = [(k.strip(), v.strip()) for k, v in entries] return dict(entries) + # try to get arch from data in config file configrc = parse_rc(rcfile) arch_cfg = arch_list(configrc.get(rcvar, '')) @@ -153,8 +159,9 @@ def parse_rc(rcfile): if os.path.isdir(os.path.join(path, arch)): if arch_env: warnings.warn( - "ignored arch: '%s', using: '%s'" % \ - (os.path.pathsep.join(arch_env), arch)) + f"ignored arch: '{os.path.pathsep.join(arch_env)}', using: '{arch}'", + stacklevel=2, + ) return (path, arch) # nothing good found return (path, '') @@ -166,6 +173,7 @@ def getInitArgs(args): """ import shlex import sys + if args is None: args = [] elif isinstance(args, str): @@ -176,13 +184,12 @@ def getInitArgs(args): if args and args[0].startswith('-'): sys_argv = getattr(sys, 'argv', None) sys_exec = getattr(sys, 'executable', 'python') - if (sys_argv and - sys_argv[0] and - sys_argv[0] != '-c'): + if sys_argv and sys_argv[0] and sys_argv[0] != '-c': prog_name = sys_argv[0] else: prog_name = sys_exec args.insert(0, prog_name) return args + # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/src/petsc4py/lib/__init__.pyi b/src/binding/petsc4py/src/petsc4py/lib/__init__.pyi index 40f12444a37..d683658e7d2 100644 --- a/src/binding/petsc4py/src/petsc4py/lib/__init__.pyi +++ b/src/binding/petsc4py/src/petsc4py/lib/__init__.pyi @@ -1,8 +1,10 @@ from types import ModuleType + def ImportPETSc(arch: str | None = ...) -> ModuleType: ... def getPathArchPETSc(arch: str | None = ...) -> tuple[str, str]: ... + def Import(pkg: str, name: str, path: str, arch: str) -> ModuleType: ... def getPathArch(path: str, arch: str, rcvar: str = ..., rcfile: str = ...) -> tuple[str, str]: ... def getInitArgs(args: str | list[str] | None) -> list[str]: ... diff --git a/src/binding/petsc4py/src/petsc4py/typing.py b/src/binding/petsc4py/src/petsc4py/typing.py index 72d03f1356e..944d502ba8b 100644 --- a/src/binding/petsc4py/src/petsc4py/typing.py +++ b/src/binding/petsc4py/src/petsc4py/typing.py @@ -27,61 +27,62 @@ ) __all__ = [ - "Scalar", - "ArrayInt", - "ArrayReal", - "ArrayComplex", - "ArrayScalar", - "DimsSpec", - "AccessModeSpec", - "InsertModeSpec", - "ScatterModeSpec", - "LayoutSizeSpec", - "NormTypeSpec", - "MatAssemblySpec", - "MatSizeSpec", - "MatBlockSizeSpec", - "CSRIndicesSpec", - "CSRSpec", - "NNZSpec", - "MatNullFunction", - "DMCoarsenHookFunction", - "DMRestrictHookFunction", - "KSPRHSFunction", - "KSPOperatorsFunction", - "KSPConvergenceTestFunction", - "KSPMonitorFunction", - "TSRHSFunction", - "TSRHSJacobian", - "TSRHSJacobianP", - "TSIFunction", - "TSIJacobian", - "TSIJacobianP", - "TSI2Function", - "TSI2Jacobian", - "TSI2JacobianP", - "TSMonitorFunction", - "TSPreStepFunction", - "TSPostStepFunction", - "TSIndicatorFunction", - "TSPostEventFunction", - "TSPreStepFunction", - "TSPostStepFunction", - "TAOObjectiveFunction", - "TAOGradientFunction", - "TAOObjectiveGradientFunction", - "TAOHessianFunction", - "TAOUpdateFunction", - "TAOMonitorFunction", - "TAOConvergedFunction", - "TAOJacobianFunction", - "TAOResidualFunction", - "TAOJacobianResidualFunction", - "TAOVariableBoundsFunction", - "TAOConstraintsFunction", - "TAOLSObjectiveFunction", - "TAOLSGradientFunction", - "TAOLSObjectiveGradientFunction", + 'Scalar', + 'ArrayBool', + 'ArrayInt', + 'ArrayReal', + 'ArrayComplex', + 'ArrayScalar', + 'DimsSpec', + 'AccessModeSpec', + 'InsertModeSpec', + 'ScatterModeSpec', + 'LayoutSizeSpec', + 'NormTypeSpec', + 'MatAssemblySpec', + 'MatSizeSpec', + 'MatBlockSizeSpec', + 'CSRIndicesSpec', + 'CSRSpec', + 'NNZSpec', + 'MatNullFunction', + 'DMCoarsenHookFunction', + 'DMRestrictHookFunction', + 'KSPRHSFunction', + 'KSPOperatorsFunction', + 'KSPConvergenceTestFunction', + 'KSPMonitorFunction', + 'TSRHSFunction', + 'TSRHSJacobian', + 'TSRHSJacobianP', + 'TSIFunction', + 'TSIJacobian', + 'TSIJacobianP', + 'TSI2Function', + 'TSI2Jacobian', + 'TSI2JacobianP', + 'TSMonitorFunction', + 'TSPreStepFunction', + 'TSPostStepFunction', + 'TSIndicatorFunction', + 'TSPostEventFunction', + 'TSPreStepFunction', + 'TSPostStepFunction', + 'TAOObjectiveFunction', + 'TAOGradientFunction', + 'TAOObjectiveGradientFunction', + 'TAOHessianFunction', + 'TAOUpdateFunction', + 'TAOMonitorFunction', + 'TAOConvergedFunction', + 'TAOJacobianFunction', + 'TAOResidualFunction', + 'TAOJacobianResidualFunction', + 'TAOVariableBoundsFunction', + 'TAOConstraintsFunction', + 'TAOLSObjectiveFunction', + 'TAOLSGradientFunction', + 'TAOLSObjectiveGradientFunction', ] # --- Sys --- @@ -94,6 +95,9 @@ """ +ArrayBool = NDArray[bool] +"""Array of `bool`.""" + ArrayInt = NDArray[int] """Array of `int`.""" @@ -281,6 +285,12 @@ KSPMonitorFunction = Callable[[KSP, int, float], None] """`PETSc.KSP` monitor callback.""" +KSPPreSolveFunction = Callable[[KSP, Vec, Vec], None] +"""`PETSc.KSP` pre solve callback.""" + +KSPPostSolveFunction = Callable[[KSP, Vec, Vec], None] +"""`PETSc.KSP` post solve callback.""" + # --- SNES --- SNESMonitorFunction = Callable[[SNES, int, float], None] @@ -307,7 +317,9 @@ SNESNGSFunction = Callable[[SNES, Vec, Vec], None] """`SNES` nonlinear Gauss-Seidel callback.""" -SNESConvergedFunction = Callable[[SNES, int, tuple[float, float, float]], SNES.ConvergedReason] +SNESConvergedFunction = Callable[ + [SNES, int, tuple[float, float, float]], SNES.ConvergedReason +] """`SNES` convergence test callback.""" # --- TS --- @@ -362,7 +374,7 @@ TAOGradientFunction = Callable[[TAO, Vec, Vec], None] """`TAO` objective gradient callback.""" -TAOObjectiveGradientFunction = Callable[[TAO, Vec, Vec], float] +TAOObjectiveGradientFunction = Callable[[TAO, Vec, Vec], float] """`TAO` objective function and gradient callback.""" TAOHessianFunction = Callable[[TAO, Vec, Mat, Mat], None] @@ -400,4 +412,3 @@ TAOLSObjectiveGradientFunction = Callable[[TAOLineSearch, Vec, Vec], float] """`TAOLineSearch` objective function and gradient callback.""" - diff --git a/src/binding/petsc4py/test/runtests.py b/src/binding/petsc4py/test/runtests.py index d04a0cd3c67..8b23eed2d08 100644 --- a/src/binding/petsc4py/test/runtests.py +++ b/src/binding/petsc4py/test/runtests.py @@ -15,40 +15,101 @@ def getoptionparser(): parser = optparse.OptionParser() - parser.add_option("-q", "--quiet", - action="store_const", const=0, dest="verbose", default=1, - help="do not print status messages to stdout") - parser.add_option("-v", "--verbose", - action="store_const", const=2, dest="verbose", default=1, - help="print status messages to stdout") - parser.add_option("-i", "--include", type="string", - action="append", dest="include", default=[], - help="include tests matching PATTERN", metavar="PATTERN") - parser.add_option("-e", "--exclude", type="string", - action="append", dest="exclude", default=[], - help="exclude tests matching PATTERN", metavar="PATTERN") - parser.add_option("-k", "--pattern", type="string", - action="append", dest="patterns", default=[], - help="only run tests which match the given substring") - parser.add_option("-f", "--failfast", - action="store_true", dest="failfast", default=False, - help="Stop on first failure") - parser.add_option("--no-builddir", - action="store_false", dest="builddir", default=True, - help="disable testing from build directory") - parser.add_option("--path", type="string", - action="append", dest="path", default=[], - help="prepend PATH to sys.path", metavar="PATH") - parser.add_option("--arch", type="string", - action="store", dest="arch", default=None, - help="use PETSC_ARCH", - metavar="PETSC_ARCH") - parser.add_option("-s","--summary", - action="store_true", dest="summary", default=0, - help="print PETSc log summary") - parser.add_option("--no-memdebug", - action="store_false", dest="memdebug", default=True, - help="Do not use PETSc memory debugging") + parser.add_option( + '-q', + '--quiet', + action='store_const', + const=0, + dest='verbose', + default=1, + help='do not print status messages to stdout', + ) + parser.add_option( + '-v', + '--verbose', + action='store_const', + const=2, + dest='verbose', + default=1, + help='print status messages to stdout', + ) + parser.add_option( + '-i', + '--include', + type='string', + action='append', + dest='include', + default=[], + help='include tests matching PATTERN', + metavar='PATTERN', + ) + parser.add_option( + '-e', + '--exclude', + type='string', + action='append', + dest='exclude', + default=[], + help='exclude tests matching PATTERN', + metavar='PATTERN', + ) + parser.add_option( + '-k', + '--pattern', + type='string', + action='append', + dest='patterns', + default=[], + help='only run tests which match the given substring', + ) + parser.add_option( + '-f', + '--failfast', + action='store_true', + dest='failfast', + default=False, + help='Stop on first failure', + ) + parser.add_option( + '--no-builddir', + action='store_false', + dest='builddir', + default=True, + help='disable testing from build directory', + ) + parser.add_option( + '--path', + type='string', + action='append', + dest='path', + default=[], + help='prepend PATH to sys.path', + metavar='PATH', + ) + parser.add_option( + '--arch', + type='string', + action='store', + dest='arch', + default=None, + help='use PETSC_ARCH', + metavar='PETSC_ARCH', + ) + parser.add_option( + '-s', + '--summary', + action='store_true', + dest='summary', + default=0, + help='print PETSc log summary', + ) + parser.add_option( + '--no-memdebug', + action='store_false', + dest='memdebug', + default=True, + help='Do not use PETSc memory debugging', + ) return parser @@ -72,31 +133,32 @@ def getbuilddir(): def getprocessorinfo(): try: name = os.uname()[1] - except: + except Exception: import platform + name = platform.uname()[1] from petsc4py.PETSc import COMM_WORLD + rank = COMM_WORLD.getRank() return (rank, name) def getlibraryinfo(name): - modname = "%s4py.%s" % (name.lower(), name) + modname = f'{name.lower()}4py.{name}' module = __import__(modname, fromlist=[name]) (major, minor, micro), devel = module.Sys.getVersion(devel=True) r = not devel - if r: release = 'release' - else: release = 'development' + if r: + release = 'release' + else: + release = 'development' arch = module.__arch__ - return ( - "%s %d.%d.%d %s (conf: '%s')" % - (name, major, minor, micro, release, arch) - ) + return "%s %d.%d.%d %s (conf: '%s')" % (name, major, minor, micro, release, arch) def getpythoninfo(): x, y, z = sys.version_info[:3] - return ("Python %d.%d.%d (%s)" % (x, y, z, sys.executable)) + return 'Python %d.%d.%d (%s)' % (x, y, z, sys.executable) def getpackageinfo(pkg): @@ -107,7 +169,7 @@ def getpackageinfo(pkg): name = pkg.__name__ version = pkg.__version__ path = pkg.__path__[0] - return ("%s %s (%s)" % (name, version, path)) + return f'{name} {version} ({path})' def setup_python(options): @@ -123,19 +185,24 @@ def setup_python(options): def setup_unittest(options): - from unittest import TestSuite try: from unittest.runner import _WritelnDecorator except ImportError: from unittest import _WritelnDecorator # writeln_orig = _WritelnDecorator.writeln + def writeln(self, message=''): - try: self.stream.flush() - except: pass + try: + self.stream.flush() + except Exception: + pass writeln_orig(self, message) - try: self.stream.flush() - except: pass + try: + self.stream.flush() + except Exception: + pass + _WritelnDecorator.writeln = writeln @@ -152,13 +219,14 @@ def import_package(options, pkgname): def print_banner(options): r, n = getprocessorinfo() - prefix = "[%d@%s]" % (r, n) + prefix = '[%d@%s]' % (r, n) def writeln(message='', endl='\n'): if message is None: return from petsc4py.PETSc import Sys - message = "%s %s" % (prefix, message) + + message = f'{prefix} {message}' Sys.syncPrint(message, endl=endl, flush=True) if options.verbose: @@ -172,6 +240,7 @@ def writeln(message='', endl='\n'): def load_tests(options, args): from glob import glob import re + testsuitedir = os.path.dirname(__file__) sys.path.insert(0, testsuitedir) pattern = 'test_*.py' @@ -182,8 +251,7 @@ def load_tests(options, args): testloader = unittest.TestLoader() if options.patterns: testloader.testNamePatterns = [ - ('*%s*' % p) if ('*' not in p) else p - for p in options.patterns + ('*%s*' % p) if ('*' not in p) else p for p in options.patterns ] include = exclude = None if options.include: @@ -193,8 +261,7 @@ def load_tests(options, args): for testfile in testfiles: filename = os.path.basename(testfile) testname = os.path.splitext(filename)[0] - if ((exclude and exclude(testname)) or - (include and not include(testname))): + if (exclude and exclude(testname)) or (include and not include(testname)): continue module = __import__(testname) for arg in args: @@ -217,7 +284,6 @@ def run_tests(options, testsuite, runner=None): return result.wasSuccessful() - def abort(code=1): os.abort() @@ -236,12 +302,14 @@ def main(args=None): print_banner(options) testsuite = load_tests(options, args) success = run_tests(options, testsuite) - if not success and options.failfast: abort() + if not success and options.failfast: + abort() shutdown(success) return not success if __name__ == '__main__': import sys + sys.dont_write_bytecode = True sys.exit(main()) diff --git a/src/binding/petsc4py/test/test_comm.py b/src/binding/petsc4py/test/test_comm.py index 5a50851d031..05d7abbd3fa 100644 --- a/src/binding/petsc4py/test/test_comm.py +++ b/src/binding/petsc4py/test/test_comm.py @@ -3,17 +3,17 @@ # -------------------------------------------------------------------- -class TestComm(unittest.TestCase): +class TestComm(unittest.TestCase): def testInit(self): - comm_null1 = PETSc.Comm() - comm_null2 = PETSc.Comm(PETSc.COMM_NULL) + comm_null1 = PETSc.Comm() + comm_null2 = PETSc.Comm(PETSc.COMM_NULL) comm_world = PETSc.Comm(PETSc.COMM_WORLD) - comm_self = PETSc.Comm(PETSc.COMM_SELF) + comm_self = PETSc.Comm(PETSc.COMM_SELF) self.assertEqual(comm_null1, PETSc.COMM_NULL) self.assertEqual(comm_null2, PETSc.COMM_NULL) self.assertEqual(comm_world, PETSc.COMM_WORLD) - self.assertEqual(comm_self, PETSc.COMM_SELF) + self.assertEqual(comm_self, PETSc.COMM_SELF) def testDupDestr(self): self.assertRaises(ValueError, PETSc.COMM_NULL.duplicate) @@ -42,14 +42,10 @@ def testRank(self): self.assertTrue(PETSc.COMM_WORLD.getRank() >= 0) def testProperties(self): - self.assertEqual(PETSc.COMM_SELF.getSize(), - PETSc.COMM_SELF.size) - self.assertEqual(PETSc.COMM_SELF.getRank(), - PETSc.COMM_SELF.rank) - self.assertEqual(PETSc.COMM_WORLD.getSize(), - PETSc.COMM_WORLD.size) - self.assertEqual(PETSc.COMM_WORLD.getRank(), - PETSc.COMM_WORLD.rank) + self.assertEqual(PETSc.COMM_SELF.getSize(), PETSc.COMM_SELF.size) + self.assertEqual(PETSc.COMM_SELF.getRank(), PETSc.COMM_SELF.rank) + self.assertEqual(PETSc.COMM_WORLD.getSize(), PETSc.COMM_WORLD.size) + self.assertEqual(PETSc.COMM_WORLD.getRank(), PETSc.COMM_WORLD.rank) def testCompatMPI4PY(self): try: @@ -75,7 +71,7 @@ def testCompatMPI4PY(self): self.assertTrue(isinstance(cw, MPI.Intracomm)) self.assertEqual(cw.Get_size(), PETSc.COMM_WORLD.getSize()) self.assertEqual(cw.Get_rank(), PETSc.COMM_WORLD.getRank()) - + # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/test/test_device.py b/src/binding/petsc4py/test/test_device.py new file mode 100644 index 00000000000..3c52009f592 --- /dev/null +++ b/src/binding/petsc4py/test/test_device.py @@ -0,0 +1,101 @@ +from petsc4py import PETSc +import unittest + +# -------------------------------------------------------------------- + + +class TestDevice(unittest.TestCase): + def testCurrent(self): + dctx = PETSc.DeviceContext().getCurrent() + self.assertEqual(dctx.getRefCount(), 2) + device = dctx.getDevice() + del device + del dctx + dctx = PETSc.DeviceContext().getCurrent() + self.assertEqual(dctx.getRefCount(), 2) + device = dctx.getDevice() + del device + del dctx + + def testDevice(self): + device = PETSc.Device.create() + device.configure() + _ = device.getDeviceType() + _ = device.getDeviceId() + del device + + def testDeviceContext(self): + dctx = PETSc.DeviceContext().create() + self.assertEqual(dctx.getRefCount(), 1) + dctx.setUp() + self.assertTrue(dctx.idle()) + dctx.destroy() + self.assertEqual(dctx.getRefCount(), 0) + + def testStream(self): + dctx = PETSc.DeviceContext().getCurrent() + self.assertEqual(dctx.getRefCount(), 2) + stype = dctx.getStreamType() + dctx.setStreamType(stype) + dctx.destroy() + self.assertEqual(dctx.getRefCount(), 0) + + def testSetFromOptions(self): + dctx = PETSc.DeviceContext().create() + self.assertEqual(dctx.getRefCount(), 1) + dctx.setFromOptions() + dctx.setUp() + dctx.destroy() + self.assertEqual(dctx.getRefCount(), 0) + + def testDuplicate(self): + dctx = PETSc.DeviceContext().getCurrent() + self.assertEqual(dctx.getRefCount(), 2) + dctx2 = dctx.duplicate() + self.assertEqual(dctx2.getRefCount(), 1) + dctx.destroy() + self.assertEqual(dctx.getRefCount(), 0) + dctx2.destroy() + self.assertEqual(dctx2.getRefCount(), 0) + + def testWaitFor(self): + dctx = PETSc.DeviceContext().create() + self.assertEqual(dctx.getRefCount(), 1) + dctx.setUp() + dctx2 = PETSc.DeviceContext().create() + self.assertEqual(dctx2.getRefCount(), 1) + dctx2.setUp() + dctx.waitFor(dctx2) + dctx.destroy() + self.assertEqual(dctx.getRefCount(), 0) + dctx2.destroy() + dctx2.destroy() + self.assertEqual(dctx2.getRefCount(), 0) + + def testForkJoin(self): + dctx = PETSc.DeviceContext().getCurrent() + self.assertEqual(dctx.getRefCount(), 2) + jdestroy = PETSc.DeviceContext.JoinMode.DESTROY + jtypes = [ + PETSc.DeviceContext.JoinMode.SYNC, + PETSc.DeviceContext.JoinMode.NO_SYNC, + ] + for j in jtypes: + dctxs = dctx.fork(4) + for ctx in dctxs: + self.assertEqual(ctx.getRefCount(), 1) + dctx.join(j, dctxs[0::2]) + dctx.join(j, dctxs[3::-2]) + for ctx in dctxs: + self.assertEqual(ctx.getRefCount(), 1) + dctx.join(jdestroy, dctxs) + for ctx in dctxs: + self.assertEqual(ctx.getRefCount(), 0) + dctx.destroy() + self.assertEqual(dctx.getRefCount(), 0) + + +# -------------------------------------------------------------------- + +if __name__ == '__main__': + unittest.main() diff --git a/src/binding/petsc4py/test/test_dmda.py b/src/binding/petsc4py/test/test_dmda.py index fc94c217084..04e76f41194 100644 --- a/src/binding/petsc4py/test/test_dmda.py +++ b/src/binding/petsc4py/test/test_dmda.py @@ -3,8 +3,8 @@ # -------------------------------------------------------------------- -class BaseTestDA(object): +class BaseTestDA: COMM = PETSc.COMM_WORLD SIZES = None BOUNDARY = None @@ -13,13 +13,15 @@ class BaseTestDA(object): SWIDTH = 1 def setUp(self): - self.da = PETSc.DMDA().create(dim=len(self.SIZES), - dof=self.DOF, - sizes=self.SIZES, - boundary_type=self.BOUNDARY, - stencil_type=self.STENCIL, - stencil_width=self.SWIDTH, - comm=self.COMM) + self.da = PETSc.DMDA().create( + dim=len(self.SIZES), + dof=self.DOF, + sizes=self.SIZES, + boundary_type=self.BOUNDARY, + stencil_type=self.STENCIL, + stencil_width=self.SWIDTH, + comm=self.COMM, + ) def tearDown(self): self.da = None @@ -29,58 +31,56 @@ def testGetInfo(self): dim = self.da.getDim() dof = self.da.getDof() sizes = self.da.getSizes() - psizes = self.da.getProcSizes() boundary = self.da.getBoundaryType() stencil_type = self.da.getStencilType() stencil_width = self.da.getStencilWidth() self.assertEqual(dim, len(self.SIZES)) self.assertEqual(dof, self.DOF) self.assertEqual(sizes, tuple(self.SIZES)) - self.assertEqual(boundary, self.BOUNDARY or (0,)*dim) + self.assertEqual(boundary, self.BOUNDARY or (0,) * dim) self.assertEqual(stencil_type, self.STENCIL) self.assertEqual(stencil_width, self.SWIDTH) def testRangesCorners(self): dim = self.da.getDim() ranges = self.da.getRanges() - starts, lsizes = self.da.getCorners() + starts, lsizes = self.da.getCorners() self.assertEqual(dim, len(ranges)) self.assertEqual(dim, len(starts)) self.assertEqual(dim, len(lsizes)) for i in range(dim): s, e = ranges[i] self.assertEqual(s, starts[i]) - self.assertEqual(e-s, lsizes[i]) + self.assertEqual(e - s, lsizes[i]) def testGhostRangesCorners(self): dim = self.da.getDim() ranges = self.da.getGhostRanges() - starts, lsizes = self.da.getGhostCorners() + starts, lsizes = self.da.getGhostCorners() self.assertEqual(dim, len(ranges)) self.assertEqual(dim, len(starts)) self.assertEqual(dim, len(lsizes)) for i in range(dim): s, e = ranges[i] self.assertEqual(s, starts[i]) - self.assertEqual(e-s, lsizes[i]) + self.assertEqual(e - s, lsizes[i]) def testOwnershipRanges(self): - dim = self.da.getDim() ownership_ranges = self.da.getOwnershipRanges() procsizes = self.da.getProcSizes() self.assertEqual(len(procsizes), len(ownership_ranges)) - for i,m in enumerate(procsizes): + for i, m in enumerate(procsizes): self.assertEqual(m, len(ownership_ranges[i])) def testFieldName(self): for i in range(self.da.getDof()): - self.da.setFieldName(i, "field%d" % i) + self.da.setFieldName(i, 'field%d' % i) for i in range(self.da.getDof()): name = self.da.getFieldName(i) - self.assertEqual(name, "field%d" % i) + self.assertEqual(name, 'field%d' % i) def testCoordinates(self): - self.da.setUniformCoordinates(0,1,0,1,0,1) + self.da.setUniformCoordinates(0, 1, 0, 1, 0, 1) # c = self.da.getCoordinates() self.da.setCoordinates(c) @@ -101,28 +101,28 @@ def testCreateVecMat(self): mat = self.da.createMat() self.assertTrue(mat.getType() in ('aij', 'seqaij', 'mpiaij')) vn.set(1.0) - self.da.naturalToGlobal(vn,vg) + self.da.naturalToGlobal(vn, vg) self.assertEqual(vg.max()[1], 1.0) self.assertEqual(vg.min()[1], 1.0) - self.da.globalToLocal(vg,vl) + self.da.globalToLocal(vg, vl) self.assertEqual(vl.max()[1], 1.0) - self.assertTrue (vl.min()[1] in (1.0, 0.0)) + self.assertTrue(vl.min()[1] in (1.0, 0.0)) vn.set(0.0) - self.da.globalToNatural(vg,vn) + self.da.globalToNatural(vg, vn) self.assertEqual(vn.max()[1], 1.0) self.assertEqual(vn.min()[1], 1.0) vl2 = self.da.createLocalVec() - self.da.localToLocal(vl,vl2) + self.da.localToLocal(vl, vl2) self.assertEqual(vl2.max()[1], 1.0) - self.assertTrue (vl2.min()[1] in (1.0, 0.0)) + self.assertTrue(vl2.min()[1] in (1.0, 0.0)) NONE = PETSc.DM.BoundaryType.NONE - s = self.da.stencil_width btype = self.da.boundary_type psize = self.da.proc_sizes for b, p in zip(btype, psize): - if b != NONE and p == 1: return + if b != NONE and p == 1: + return vg2 = self.da.createGlobalVec() - self.da.localToGlobal(vl2,vg2) + self.da.localToGlobal(vl2, vg2) def testGetVec(self): vg = self.da.getGlobalVec() @@ -131,27 +131,27 @@ def testGetVec(self): vg.set(1.0) self.assertEqual(vg.max()[1], 1.0) self.assertEqual(vg.min()[1], 1.0) - self.da.globalToLocal(vg,vl) + self.da.globalToLocal(vg, vl) self.assertEqual(vl.max()[1], 1.0) - self.assertTrue (vl.min()[1] in (1.0, 0.0)) + self.assertTrue(vl.min()[1] in (1.0, 0.0)) vl.set(2.0) NONE = PETSc.DM.BoundaryType.NONE - s = self.da.stencil_width btype = self.da.boundary_type psize = self.da.proc_sizes for b, p in zip(btype, psize): - if b != NONE and p == 1: return - self.da.localToGlobal(vl,vg) + if b != NONE and p == 1: + return + self.da.localToGlobal(vl, vg) self.assertEqual(vg.max()[1], 2.0) - self.assertTrue (vg.min()[1] in (2.0, 0.0)) + self.assertTrue(vg.min()[1] in (2.0, 0.0)) finally: self.da.restoreGlobalVec(vg) self.da.restoreLocalVec(vl) def testGetOther(self): - ao = self.da.getAO() - lgmap = self.da.getLGMap() - l2g, g2l = self.da.getScatter() + _ = self.da.getAO() + _ = self.da.getLGMap() + _, _ = self.da.getScatter() def testRefineCoarsen(self): da = self.da @@ -159,25 +159,27 @@ def testRefineCoarsen(self): self.assertEqual(da.getDim(), rda.getDim()) self.assertEqual(da.getDof(), rda.getDof()) if da.dim != 1: - self.assertEqual(da.getStencilType(), rda.getStencilType()) + self.assertEqual(da.getStencilType(), rda.getStencilType()) self.assertEqual(da.getStencilWidth(), rda.getStencilWidth()) cda = rda.coarsen() self.assertEqual(rda.getDim(), cda.getDim()) self.assertEqual(rda.getDof(), cda.getDof()) for n1, n2 in zip(self.da.getSizes(), cda.getSizes()): - self.assertTrue(abs(n1-n2)<=1) + self.assertTrue(abs(n1 - n2) <= 1) def testCoarsenRefine(self): + if PETSc.COMM_WORLD.getSize() > 6: + return da = self.da cda = self.da.coarsen() self.assertEqual(da.getDim(), cda.getDim()) self.assertEqual(da.getDof(), cda.getDof()) if da.dim != 1: - self.assertEqual(da.getStencilType(), cda.getStencilType()) + self.assertEqual(da.getStencilType(), cda.getStencilType()) self.assertEqual(da.getStencilWidth(), cda.getStencilWidth()) rda = cda.refine() for n1, n2 in zip(self.da.getSizes(), rda.getSizes()): - self.assertTrue(abs(n1-n2)<=1) + self.assertTrue(abs(n1 - n2) <= 1) def testRefineHierarchy(self): levels = self.da.refineHierarchy(2) @@ -187,6 +189,8 @@ def testRefineHierarchy(self): self.assertTrue(isinstance(item, PETSc.DM)) def testCoarsenHierarchy(self): + if PETSc.COMM_WORLD.getSize() > 6: + return levels = self.da.coarsenHierarchy(2) self.assertTrue(isinstance(levels, list)) self.assertEqual(len(levels), 2) @@ -195,15 +199,19 @@ def testCoarsenHierarchy(self): def testCreateInterpolation(self): da = self.da - if da.dim == 1: return + if da.dim == 1: + return rda = da.refine() - mat, vec = da.createInterpolation(rda) + _, _ = da.createInterpolation(rda) def testCreateInjection(self): + if PETSc.COMM_WORLD.getSize() > 6: + return da = self.da - if da.dim == 1: return + if da.dim == 1: + return rda = da.refine() - scatter = da.createInjection(rda) + _ = da.createInjection(rda) def testzeroRowsColumnsStencil(self): da = self.da @@ -213,15 +221,15 @@ def testzeroRowsColumnsStencil(self): A.setDiagonal(x) diag1 = x.duplicate() A.getDiagonal(diag1) - if self.SIZES != 2: #only coded test for 2D case - return - istart,iend, jstart, jend = da.getRanges() + if self.SIZES != 2: # only coded test for 2D case + return + istart, iend, jstart, jend = da.getRanges() self.assertTrue(x.equal(diag1)) zeroidx = [] - for i in range(istart,iend): - for j in range(jstart,jend): + for i in range(istart, iend): + for j in range(jstart, jend): row = PETSc.Mat.Stencil() - row.index = (i,j) + row.index = (i, j) zeroidx = zeroidx + [row] diag2 = x.duplicate() diag2.set(1.0) @@ -231,110 +239,172 @@ def testzeroRowsColumnsStencil(self): self.assertTrue(ans.equal(diag2)) -MIRROR = PETSc.DMDA.BoundaryType.MIRROR -GHOSTED = PETSc.DMDA.BoundaryType.GHOSTED +MIRROR = PETSc.DMDA.BoundaryType.MIRROR +GHOSTED = PETSc.DMDA.BoundaryType.GHOSTED PERIODIC = PETSc.DMDA.BoundaryType.PERIODIC -TWIST = PETSc.DMDA.BoundaryType.TWIST +TWIST = PETSc.DMDA.BoundaryType.TWIST SCALE = 4 + class BaseTestDA_1D(BaseTestDA): - SIZES = [100*SCALE] + SIZES = [100 * SCALE] + class BaseTestDA_2D(BaseTestDA): - SIZES = [9*SCALE,11*SCALE] + SIZES = [9 * SCALE, 11 * SCALE] + class BaseTestDA_3D(BaseTestDA): - SIZES = [6*SCALE,7*SCALE,8*SCALE] + SIZES = [6 * SCALE, 7 * SCALE, 8 * SCALE] + # -------------------------------------------------------------------- + class TestDA_1D(BaseTestDA_1D, unittest.TestCase): pass + + class TestDA_1D_W0(TestDA_1D): SWIDTH = 0 + + class TestDA_1D_W2(TestDA_1D): SWIDTH = 2 + class TestDA_2D(BaseTestDA_2D, unittest.TestCase): pass + + class TestDA_2D_W0(TestDA_2D): SWIDTH = 0 + + class TestDA_2D_W0_N2(TestDA_2D): DOF = 2 SWIDTH = 0 + + class TestDA_2D_W2(TestDA_2D): SWIDTH = 2 + + class TestDA_2D_W2_N2(TestDA_2D): DOF = 2 SWIDTH = 2 + + class TestDA_2D_PXY(TestDA_2D): - SIZES = [13*SCALE,17*SCALE] + SIZES = [13 * SCALE, 17 * SCALE] DOF = 2 SWIDTH = 5 - BOUNDARY = (PERIODIC,)*2 + BOUNDARY = (PERIODIC,) * 2 + + class TestDA_2D_GXY(TestDA_2D): - SIZES = [13*SCALE,17*SCALE] + SIZES = [13 * SCALE, 17 * SCALE] DOF = 2 SWIDTH = 5 - BOUNDARY = (GHOSTED,)*2 + BOUNDARY = (GHOSTED,) * 2 + + class TestDA_2D_TXY(TestDA_2D): - SIZES = [13*SCALE,17*SCALE] + SIZES = [13 * SCALE, 17 * SCALE] DOF = 2 SWIDTH = 5 - BOUNDARY = (TWIST,)*2 + BOUNDARY = (TWIST,) * 2 + class TestDA_3D(BaseTestDA_3D, unittest.TestCase): pass + + class TestDA_3D_W0(TestDA_3D): SWIDTH = 0 + + class TestDA_3D_W0_N2(TestDA_3D): DOF = 2 SWIDTH = 0 + + class TestDA_3D_W2(TestDA_3D): SWIDTH = 2 + + class TestDA_3D_W2_N2(TestDA_3D): DOF = 2 SWIDTH = 2 + + class TestDA_3D_PXYZ(TestDA_3D): - SIZES = [11*SCALE,13*SCALE,17*SCALE] + SIZES = [11 * SCALE, 13 * SCALE, 17 * SCALE] DOF = 2 SWIDTH = 3 - BOUNDARY = (PERIODIC,)*3 + BOUNDARY = (PERIODIC,) * 3 + + class TestDA_3D_GXYZ(TestDA_3D): - SIZES = [11*SCALE,13*SCALE,17*SCALE] + SIZES = [11 * SCALE, 13 * SCALE, 17 * SCALE] DOF = 2 SWIDTH = 3 - BOUNDARY = (GHOSTED,)*3 + BOUNDARY = (GHOSTED,) * 3 + + class TestDA_3D_TXYZ(TestDA_3D): - SIZES = [11*SCALE,13*SCALE,17*SCALE] + SIZES = [11 * SCALE, 13 * SCALE, 17 * SCALE] DOF = 2 SWIDTH = 3 - BOUNDARY = (TWIST,)*3 + BOUNDARY = (TWIST,) * 3 + # -------------------------------------------------------------------- -DIM = (1,2,3,) -DOF = (None,1,2,3,4,5,) +DIM = ( + 1, + 2, + 3, +) +DOF = ( + None, + 1, + 2, + 3, + 4, + 5, +) BOUNDARY_TYPE = ( None, - "none", (0,)*3, 0, - "ghosted", (GHOSTED,)*3, GHOSTED, - "periodic", (PERIODIC,)*3, PERIODIC, - "twist", (TWIST,)*3, TWIST, - ) -STENCIL_TYPE = (None,"star","box") -STENCIL_WIDTH = (None,0,1,2,3) - + 'none', + (0,) * 3, + 0, + 'ghosted', + (GHOSTED,) * 3, + GHOSTED, + 'periodic', + (PERIODIC,) * 3, + PERIODIC, + 'twist', + (TWIST,) * 3, + TWIST, +) +STENCIL_TYPE = (None, 'star', 'box') +STENCIL_WIDTH = (None, 0, 1, 2, 3) + + +DIM = (1, 2, 3) +DOF = (None, 2, 5) +BOUNDARY_TYPE = (None, 'none', 'periodic', 'ghosted', 'twist') +STENCIL_TYPE = (None, 'box') +STENCIL_WIDTH = (None, 1, 2) -DIM = (1,2,3) -DOF = (None,2,5) -BOUNDARY_TYPE = (None,"none","periodic","ghosted","twist") -STENCIL_TYPE = (None,"box") -STENCIL_WIDTH = (None,1,2) class TestDACreate(unittest.TestCase): pass + + counter = 0 for dim in DIM: for dof in DOF: @@ -343,24 +413,30 @@ class TestDACreate(unittest.TestCase): boundary = boundary[:dim] for stencil in STENCIL_TYPE: for width in STENCIL_WIDTH: - kargs = dict(sizes=[8*SCALE]*dim, - dim=dim, dof=dof, - boundary_type=boundary, - stencil_type=stencil, - stencil_width=width) + kargs = { + 'sizes': [8 * SCALE] * dim, + 'dim': dim, + 'dof': dof, + 'boundary_type': boundary, + 'stencil_type': stencil, + 'stencil_width': width, + } + def testCreate(self, kargs=kargs): kargs = dict(kargs) da = PETSc.DMDA().create(**kargs) da.destroy() - setattr(TestDACreate, - "testCreate%04d"%counter, - testCreate) + + setattr(TestDACreate, 'testCreate%04d' % counter, testCreate) del testCreate, kargs counter += 1 del counter, dim, dof, boundary, stencil, width + class TestDADuplicate(unittest.TestCase): pass + + counter = 0 for dim in DIM: for dof in DOF: @@ -369,10 +445,14 @@ class TestDADuplicate(unittest.TestCase): boundary = boundary[:dim] for stencil in STENCIL_TYPE: for width in STENCIL_WIDTH: - kargs = dict(dim=dim, dof=dof, - boundary_type=boundary, - stencil_type=stencil, - stencil_width=width) + kargs = { + 'dim': dim, + 'dof': dof, + 'boundary_type': boundary, + 'stencil_type': stencil, + 'stencil_width': width, + } + def testDuplicate(self, kargs=kargs): kargs = dict(kargs) dim = kargs.pop('dim') @@ -380,34 +460,32 @@ def testDuplicate(self, kargs=kargs): boundary = kargs['boundary_type'] stencil = kargs['stencil_type'] width = kargs['stencil_width'] - da = PETSc.DMDA().create([8*SCALE]*dim) + da = PETSc.DMDA().create([8 * SCALE] * dim) newda = da.duplicate(**kargs) self.assertEqual(newda.dim, da.dim) self.assertEqual(newda.sizes, da.sizes) - self.assertEqual(newda.proc_sizes, - da.proc_sizes) + self.assertEqual(newda.proc_sizes, da.proc_sizes) self.assertEqual(newda.ranges, da.ranges) self.assertEqual(newda.corners, da.corners) - if (newda.boundary_type == da.boundary_type - and - newda.stencil_width == da.stencil_width): - self.assertEqual(newda.ghost_ranges, - da.ghost_ranges) - self.assertEqual(newda.ghost_corners, - da.ghost_corners) + if ( + newda.boundary_type == da.boundary_type + and newda.stencil_width == da.stencil_width + ): + self.assertEqual(newda.ghost_ranges, da.ghost_ranges) + self.assertEqual(newda.ghost_corners, da.ghost_corners) if dof is None: dof = da.dof if boundary is None: boundary = da.boundary_type - elif boundary == "none": + elif boundary == 'none': boundary = (0,) * dim - elif boundary == "mirror": + elif boundary == 'mirror': boundary = (MIRROR,) * dim - elif boundary == "ghosted": + elif boundary == 'ghosted': boundary = (GHOSTED,) * dim - elif boundary == "periodic": + elif boundary == 'periodic': boundary = (PERIODIC,) * dim - elif boundary == "twist": + elif boundary == 'twist': boundary = (TWIST,) * dim elif isinstance(boundary, int): boundary = (boundary,) * dim @@ -416,16 +494,15 @@ def testDuplicate(self, kargs=kargs): if width is None: width = da.stencil_width self.assertEqual(newda.dof, dof) - self.assertEqual(newda.boundary_type, - boundary) + self.assertEqual(newda.boundary_type, boundary) if dim == 1: - self.assertEqual(newda.stencil, - (stencil, width)) + self.assertEqual(newda.stencil, (stencil, width)) newda.destroy() da.destroy() - setattr(TestDADuplicate, - "testDuplicate%04d"%counter, - testDuplicate) + + setattr( + TestDADuplicate, 'testDuplicate%04d' % counter, testDuplicate + ) del testDuplicate, kargs counter += 1 del counter, dim, dof, boundary, stencil, width diff --git a/src/binding/petsc4py/test/test_dmplex.py b/src/binding/petsc4py/test/test_dmplex.py index 331f6a0a738..ddaeb4965d9 100644 --- a/src/binding/petsc4py/test/test_dmplex.py +++ b/src/binding/petsc4py/test/test_dmplex.py @@ -4,25 +4,25 @@ import os import filecmp import numpy as np +import importlib # -------------------------------------------------------------------- ERR_ARG_OUTOFRANGE = 63 -class BaseTestPlex(object): +class BaseTestPlex: COMM = PETSc.COMM_WORLD DIM = 1 CELLS = [[0, 1], [1, 2]] - COORDS = [[0.], [0.5], [1.]] + COORDS = [[0.0], [0.5], [1.0]] COMP = 1 DOFS = [1, 0] def setUp(self): - self.plex = PETSc.DMPlex().createFromCellList(self.DIM, - self.CELLS, - self.COORDS, - comm=self.COMM) + self.plex = PETSc.DMPlex().createFromCellList( + self.DIM, self.CELLS, self.COORDS, comm=self.COMM + ) def tearDown(self): self.plex.destroy() @@ -35,15 +35,15 @@ def testTopology(self): pStart, pEnd = self.plex.getChart() cStart, cEnd = self.plex.getHeightStratum(0) vStart, vEnd = self.plex.getDepthStratum(0) - numDepths = self.plex.getLabelSize("depth") + numDepths = self.plex.getLabelSize('depth') coords_raw = self.plex.getCoordinates().getArray() coords = np.reshape(coords_raw, (vEnd - vStart, dim)) self.assertEqual(dim, self.DIM) - self.assertEqual(numDepths, self.DIM+1) + self.assertEqual(numDepths, self.DIM + 1) if rank == 0 and self.CELLS is not None: - self.assertEqual(cEnd-cStart, len(self.CELLS)) + self.assertEqual(cEnd - cStart, len(self.CELLS)) if rank == 0 and self.COORDS is not None: - self.assertEqual(vEnd-vStart, len(self.COORDS)) + self.assertEqual(vEnd - vStart, len(self.COORDS)) self.assertTrue((coords == self.COORDS).all()) def testClosure(self): @@ -87,8 +87,10 @@ def testSectionDofs(self): self.plex.setNumFields(1) section = self.plex.createSection([self.COMP], [self.DOFS]) size = section.getStorageSize() - entity_dofs = [self.plex.getStratumSize("depth", d) * - self.DOFS[d] for d in range(self.DIM+1)] + entity_dofs = [ + self.plex.getStratumSize('depth', d) * self.DOFS[d] + for d in range(self.DIM + 1) + ] self.assertEqual(sum(entity_dofs), size) def testSectionClosure(self): @@ -99,7 +101,7 @@ def testSectionClosure(self): for p in range(pStart, pEnd): for i in range(section.getDof(p)): off = section.getOffset(p) - vec.setValue(off+i, p) + vec.setValue(off + i, p) for p in range(pStart, pEnd): point_closure = self.plex.getTransitiveClosure(p)[0] @@ -109,37 +111,40 @@ def testSectionClosure(self): def testBoundaryLabel(self): pStart, pEnd = self.plex.getChart() - if (pEnd - pStart == 0): return + if pEnd - pStart == 0: + return - self.assertFalse(self.plex.hasLabel("boundary")) - self.plex.markBoundaryFaces("boundary") - self.assertTrue(self.plex.hasLabel("boundary")) + self.assertFalse(self.plex.hasLabel('boundary')) + self.plex.markBoundaryFaces('boundary') + self.assertTrue(self.plex.hasLabel('boundary')) - faces = self.plex.getStratumIS("boundary", 1) + faces = self.plex.getStratumIS('boundary', 1) for f in faces.getIndices(): points, orient = self.plex.getTransitiveClosure(f, useCone=True) for p in points: - self.plex.setLabelValue("boundary", p, 1) + self.plex.setLabelValue('boundary', p, 1) for p in range(pStart, pEnd): - if self.plex.getLabelValue("boundary", p) != 1: - self.plex.setLabelValue("boundary", p, 2) + if self.plex.getLabelValue('boundary', p) != 1: + self.plex.setLabelValue('boundary', p, 2) - numBoundary = self.plex.getStratumSize("boundary", 1) - numInterior = self.plex.getStratumSize("boundary", 2) + numBoundary = self.plex.getStratumSize('boundary', 1) + numInterior = self.plex.getStratumSize('boundary', 2) self.assertNotEqual(numBoundary, pEnd - pStart) self.assertNotEqual(numInterior, pEnd - pStart) self.assertEqual(numBoundary + numInterior, pEnd - pStart) def testMetric(self): - if self.DIM == 1: return + if self.DIM == 1: + return self.plex.distribute() - if self.CELLS is None and not self.plex.isSimplex(): return + if self.CELLS is None and not self.plex.isSimplex(): + return self.plex.orient() h_min = 1.0e-30 - h_max = 1.0e+30 - a_max = 1.0e+10 + h_max = 1.0e30 + a_max = 1.0e10 target = 8.0 p = 1.0 beta = 1.3 @@ -168,15 +173,15 @@ def testMetric(self): self.assertFalse(self.plex.metricNoSwapping()) self.assertFalse(self.plex.metricNoMovement()) self.assertFalse(self.plex.metricNoSurf()) - assert self.plex.metricGetVerbosity() == -1 - assert self.plex.metricGetNumIterations() == 3 - assert np.isclose(self.plex.metricGetMinimumMagnitude(), h_min) - assert np.isclose(self.plex.metricGetMaximumMagnitude(), h_max) - assert np.isclose(self.plex.metricGetMaximumAnisotropy(), a_max) - assert np.isclose(self.plex.metricGetTargetComplexity(), target) - assert np.isclose(self.plex.metricGetNormalizationOrder(), p) - assert np.isclose(self.plex.metricGetGradationFactor(), beta) - assert np.isclose(self.plex.metricGetHausdorffNumber(), hausd) + self.assertTrue(self.plex.metricGetVerbosity() == -1) + self.assertTrue(self.plex.metricGetNumIterations() == 3) + self.assertTrue(np.isclose(self.plex.metricGetMinimumMagnitude(), h_min)) + self.assertTrue(np.isclose(self.plex.metricGetMaximumMagnitude(), h_max)) + self.assertTrue(np.isclose(self.plex.metricGetMaximumAnisotropy(), a_max)) + self.assertTrue(np.isclose(self.plex.metricGetTargetComplexity(), target)) + self.assertTrue(np.isclose(self.plex.metricGetNormalizationOrder(), p)) + self.assertTrue(np.isclose(self.plex.metricGetGradationFactor(), beta)) + self.assertTrue(np.isclose(self.plex.metricGetHausdorffNumber(), hausd)) metric1 = self.plex.metricCreateUniform(0.5) metric2 = self.plex.metricCreateUniform(1.0) @@ -184,206 +189,284 @@ def testMetric(self): det = self.plex.metricDeterminantCreate() self.plex.metricAverage2(metric1, metric2, metric) metric1.array[:] *= 1.5 - assert np.allclose(metric.array, metric1.array) + self.assertTrue(np.allclose(metric.array, metric1.array)) self.plex.metricIntersection2(metric1, metric2, metric) - assert np.allclose(metric.array, metric2.array) + self.assertTrue(np.allclose(metric.array, metric2.array)) self.plex.metricEnforceSPD(metric, metric1, det[0]) - assert np.allclose(metric.array, metric1.array) - self.plex.metricNormalize(metric, metric1, det[0], restrictSizes=False, restrictAnisotropy=False) - metric2.scale(pow(target, 2.0/self.DIM)) - assert np.allclose(metric1.array, metric2.array) + self.assertTrue(np.allclose(metric.array, metric1.array)) + + if self.DIM == 2 and PETSc.COMM_WORLD.getSize() > 6: + # Error with 7 processes in 2D: normalization factor is -1 + return + + self.plex.metricNormalize( + metric, metric1, det[0], restrictSizes=False, restrictAnisotropy=False + ) + metric2.scale(pow(target, 2.0 / self.DIM)) + self.assertTrue(np.allclose(metric1.array, metric2.array)) def testAdapt(self): - if self.DIM == 1: return + if self.DIM == 1: + return + if self.DIM == 3 and PETSc.COMM_WORLD.getSize() > 4: + # Error with 5 processes in 3D + # ---------------------------- + # Warning: MMG5_mmgIntextmet: Unable to diagonalize at least 1 metric. + # Error: MMG3D_defsiz_ani: unable to intersect metrics at point 8. + # Metric undefined. Exit program. + # MMG remeshing problem. Exit program. + return self.plex.orient() plex = self.plex.refine() plex.distribute() - if self.CELLS is None and not plex.isSimplex(): return - if sum(self.DOFS) > 1: return + if self.CELLS is None and not plex.isSimplex(): + return + if sum(self.DOFS) > 1: + return metric = plex.metricCreateUniform(9.0) try: - newplex = plex.adaptMetric(metric,"") + newplex = plex.adaptMetric(metric, '') + plex.destroy() + newplex.destroy() except PETSc.Error as exc: - if exc.ierr != ERR_ARG_OUTOFRANGE: raise + plex.destroy() + if exc.ierr != ERR_ARG_OUTOFRANGE: + raise # -------------------------------------------------------------------- + class BaseTestPlex_2D(BaseTestPlex): DIM = 2 - CELLS = [[0, 1, 3], [1, 3, 4], [1, 2, 4], [2, 4, 5], - [3, 4, 6], [4, 6, 7], [4, 5, 7], [5, 7, 8]] - COORDS = [[0.0, 0.0], [0.5, 0.0], [1.0, 0.0], - [0.0, 0.5], [0.5, 0.5], [1.0, 0.5], - [0.0, 1.0], [0.5, 1.0], [1.0, 1.0]] + CELLS = [ + [0, 1, 3], + [1, 3, 4], + [1, 2, 4], + [2, 4, 5], + [3, 4, 6], + [4, 6, 7], + [4, 5, 7], + [5, 7, 8], + ] + COORDS = [ + [0.0, 0.0], + [0.5, 0.0], + [1.0, 0.0], + [0.0, 0.5], + [0.5, 0.5], + [1.0, 0.5], + [0.0, 1.0], + [0.5, 1.0], + [1.0, 1.0], + ] DOFS = [1, 0, 0] + class BaseTestPlex_3D(BaseTestPlex): DIM = 3 - CELLS = [[0, 2, 3, 7], [0, 2, 6, 7], [0, 4, 6, 7], - [0, 1, 3, 7], [0, 1, 5, 7], [0, 4, 5, 7]] - COORDS = [[0., 0., 0.], [1., 0., 0.], [0., 1., 0.], [1., 1., 0.], - [0., 0., 1.], [1., 0., 1.], [0., 1., 1.], [1., 1., 1.]] + CELLS = [ + [0, 2, 3, 7], + [0, 2, 6, 7], + [0, 4, 6, 7], + [0, 1, 3, 7], + [0, 1, 5, 7], + [0, 4, 5, 7], + ] + COORDS = [ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [1.0, 1.0, 0.0], + [0.0, 0.0, 1.0], + [1.0, 0.0, 1.0], + [0.0, 1.0, 1.0], + [1.0, 1.0, 1.0], + ] DOFS = [1, 0, 0, 0] + # -------------------------------------------------------------------- + class TestPlex_1D(BaseTestPlex, unittest.TestCase): pass -class TestPlex_2D(BaseTestPlex_2D, unittest.TestCase): +class TestPlex_2D(BaseTestPlex_2D, unittest.TestCase): def testTransform(self): - plex = self.plex - cstart, cend = plex.getHeightStratum(0) - tr = PETSc.DMPlexTransform().create(comm=PETSc.COMM_WORLD) - tr.setType(PETSc.DMPlexTransformType.REFINEALFELD) - tr.setDM(plex) - tr.setUp() - newplex = tr.apply(plex) - tr.destroy() - newcstart, newcend = newplex.getHeightStratum(0) - newplex.destroy() - self.assertTrue((newcend-newcstart) == 3*(cend-cstart)) + plex = self.plex + cstart, cend = plex.getHeightStratum(0) + tr = PETSc.DMPlexTransform().create(comm=PETSc.COMM_WORLD) + tr.setType(PETSc.DMPlexTransformType.REFINEALFELD) + tr.setDM(plex) + tr.setUp() + newplex = tr.apply(plex) + tr.destroy() + newcstart, newcend = newplex.getHeightStratum(0) + newplex.destroy() + self.assertTrue((newcend - newcstart) == 3 * (cend - cstart)) + class TestPlex_3D(BaseTestPlex_3D, unittest.TestCase): pass + class TestPlex_2D_P3(BaseTestPlex_2D, unittest.TestCase): DOFS = [1, 2, 1] + class TestPlex_3D_P3(BaseTestPlex_3D, unittest.TestCase): DOFS = [1, 2, 1, 0] + class TestPlex_3D_P4(BaseTestPlex_3D, unittest.TestCase): DOFS = [1, 3, 3, 1] + class TestPlex_2D_BoxTensor(BaseTestPlex_2D, unittest.TestCase): CELLS = None COORDS = None + def setUp(self): - self.plex = PETSc.DMPlex().createBoxMesh([3,3], simplex=False) + self.plex = PETSc.DMPlex().createBoxMesh([3, 3], simplex=False) + class TestPlex_3D_BoxTensor(BaseTestPlex_3D, unittest.TestCase): CELLS = None COORDS = None + def setUp(self): - self.plex = PETSc.DMPlex().createBoxMesh([3,3,3], simplex=False) + self.plex = PETSc.DMPlex().createBoxMesh([3, 3, 3], simplex=False) + +# FIXME try: raise PETSc.Error - PETSc.DMPlex().createBoxMesh([2,2], simplex=True, comm=PETSc.COMM_SELF).destroy() + PETSc.DMPlex().createBoxMesh([2, 2], simplex=True, comm=PETSc.COMM_SELF).destroy() except PETSc.Error: pass else: + class TestPlex_2D_Box(BaseTestPlex_2D, unittest.TestCase): CELLS = None COORDS = None + def setUp(self): - self.plex = PETSc.DMPlex().createBoxMesh([1,1], simplex=True) + self.plex = PETSc.DMPlex().createBoxMesh([1, 1], simplex=True) class TestPlex_2D_Boundary(BaseTestPlex_2D, unittest.TestCase): CELLS = None COORDS = None + def setUp(self): boundary = PETSc.DMPlex().create(self.COMM) - boundary.createSquareBoundary([0., 0.], [1., 1.], [2, 2]) - boundary.setDimension(self.DIM-1) + boundary.createSquareBoundary([0.0, 0.0], [1.0, 1.0], [2, 2]) + boundary.setDimension(self.DIM - 1) self.plex = PETSc.DMPlex().generate(boundary) class TestPlex_3D_Box(BaseTestPlex_3D, unittest.TestCase): CELLS = None COORDS = None + def setUp(self): - self.plex = PETSc.DMPlex().createBoxMesh([1,1,1], simplex=True) + self.plex = PETSc.DMPlex().createBoxMesh([1, 1, 1], simplex=True) class TestPlex_3D_Boundary(BaseTestPlex_3D, unittest.TestCase): CELLS = None COORDS = None + def setUp(self): boundary = PETSc.DMPlex().create(self.COMM) - boundary.createCubeBoundary([0., 0., 0.], [1., 1., 1.], [1, 1, 1]) - boundary.setDimension(self.DIM-1) + boundary.createCubeBoundary([0.0, 0.0, 0.0], [1.0, 1.0, 1.0], [1, 1, 1]) + boundary.setDimension(self.DIM - 1) self.plex = PETSc.DMPlex().generate(boundary) # -------------------------------------------------------------------- PETSC_DIR = petsc4py.get_config()['PETSC_DIR'] + def check_dtype(method): def wrapper(self, *args, **kwargs): if PETSc.ScalarType is PETSc.ComplexType: - return - else: - return method(self, *args, **kwargs) + return None + return method(self, *args, **kwargs) + return wrapper + def check_package(method): def wrapper(self, *args, **kwargs): - if not PETSc.Sys.hasExternalPackage("hdf5"): - return - elif self.PARTITIONERTYPE != "simple" and \ - not PETSc.Sys.hasExternalPackage(self.PARTITIONERTYPE): - return - else: - return method(self, *args, **kwargs) + if not PETSc.Sys.hasExternalPackage('hdf5'): + return None + if self.PARTITIONERTYPE != 'simple' and not PETSc.Sys.hasExternalPackage( + self.PARTITIONERTYPE + ): + return None + return method(self, *args, **kwargs) + return wrapper + def check_nsize(method): def wrapper(self, *args, **kwargs): if PETSc.COMM_WORLD.size != self.NSIZE: - return - else: - return method(self, *args, **kwargs) + return None + return method(self, *args, **kwargs) + return wrapper -class BaseTestPlexHDF5(object): + +class BaseTestPlexHDF5: NSIZE = 4 NTIMES = 3 - def setUp(self): - self.txtvwr = PETSc.Viewer() - def tearDown(self): if not PETSc.COMM_WORLD.rank: if os.path.exists(self.outfile()): os.remove(self.outfile()) if os.path.exists(self.tmp_output_file()): os.remove(self.tmp_output_file()) - self.txtvwr = None def _name(self): - return "%s_outformat-%s_%s" % (self.SUFFIX, - self.OUTFORMAT, - self.PARTITIONERTYPE) + return f'{self.SUFFIX}_outformat-{self.OUTFORMAT}_{self.PARTITIONERTYPE}' def infile(self): - return os.path.join(PETSC_DIR, "share/petsc/datafiles/", - "meshes/blockcylinder-50.h5") + return os.path.join( + PETSC_DIR, 'share/petsc/datafiles/', 'meshes/blockcylinder-50.h5' + ) def outfile(self): - return os.path.join("./temp_test_dmplex_%s.h5" % self._name()) + return os.path.join('./temp_test_dmplex_%s.h5' % self._name()) def informat(self): return PETSc.Viewer.Format.HDF5_XDMF def outformat(self): - d = {"hdf5_petsc": PETSc.Viewer.Format.HDF5_PETSC, - "hdf5_xdmf": PETSc.Viewer.Format.HDF5_XDMF} + d = { + 'hdf5_petsc': PETSc.Viewer.Format.HDF5_PETSC, + 'hdf5_xdmf': PETSc.Viewer.Format.HDF5_XDMF, + } return d[self.OUTFORMAT] def partitionerType(self): - d = {"simple": PETSc.Partitioner.Type.SIMPLE, - "ptscotch": PETSc.Partitioner.Type.PTSCOTCH, - "parmetis": PETSc.Partitioner.Type.PARMETIS} + d = { + 'simple': PETSc.Partitioner.Type.SIMPLE, + 'ptscotch': PETSc.Partitioner.Type.PTSCOTCH, + 'parmetis': PETSc.Partitioner.Type.PARMETIS, + } return d[self.PARTITIONERTYPE] def ref_output_file(self): - return os.path.join(PETSC_DIR, "src/dm/impls/plex/tutorials/", - "output/ex5_%s.out" % self._name()) + return os.path.join( + PETSC_DIR, + 'src/dm/impls/plex/tutorials/', + 'output/ex5_%s.out' % self._name(), + ) def tmp_output_file(self): - return os.path.join("./temp_test_dmplex_%s.out" % self._name()) + return os.path.join('./temp_test_dmplex_%s.out' % self._name()) def outputText(self, msg, comm): if not comm.rank: @@ -391,15 +474,18 @@ def outputText(self, msg, comm): f.write(msg) def outputPlex(self, plex): - self.txtvwr.createASCII(self.tmp_output_file(), - mode='a', comm=plex.comm) - plex.view(viewer=self.txtvwr) - self.txtvwr.destroy() + txtvwr = PETSc.Viewer().createASCII( + self.tmp_output_file(), mode='a', comm=plex.comm + ) + plex.view(viewer=txtvwr) + txtvwr.destroy() @check_dtype @check_package @check_nsize def testViewLoadCycle(self): + if importlib.util.find_spec('mpi4py') is None: + self.skipTest('mpi4py') # throws special exception to signal test skip grank = PETSc.COMM_WORLD.rank for i in range(self.NTIMES): if i == 0: @@ -409,27 +495,23 @@ def testViewLoadCycle(self): infname = self.outfile() informt = self.outformat() if self.HETEROGENEOUS: - mycolor = (grank > self.NTIMES - i) + mycolor = grank > self.NTIMES - i else: mycolor = 0 - try: - import mpi4py - except ImportError: - self.skipTest('mpi4py') # throws special exception to signal test skip mpicomm = PETSc.COMM_WORLD.tompi4py() comm = PETSc.Comm(comm=mpicomm.Split(color=mycolor, key=grank)) if mycolor == 0: - self.outputText("Begin cycle %d\n" % i, comm) + self.outputText('Begin cycle %d\n' % i, comm) plex = PETSc.DMPlex() vwr = PETSc.ViewerHDF5() # Create plex plex.create(comm=comm) - plex.setName("DMPlex Object") + plex.setName('DMPlex Object') # Load data from XDMF into dm in parallel vwr.create(infname, mode='r', comm=comm) vwr.pushFormat(format=informt) plex.load(viewer=vwr) - plex.setOptionsPrefix("loaded_") + plex.setOptionsPrefix('loaded_') plex.distributeSetDefault(False) plex.setFromOptions() vwr.popFormat() @@ -437,52 +519,65 @@ def testViewLoadCycle(self): self.outputPlex(plex) # Test DM is indeed distributed flg = plex.isDistributed() - self.outputText("Loaded mesh distributed? %s\n" % - str(flg).upper(), comm) + self.outputText( + 'Loaded mesh distributed? %s\n' % str(flg).upper(), comm + ) # Interpolate plex.interpolate() - plex.setOptionsPrefix("interpolated_") + plex.setOptionsPrefix('interpolated_') plex.setFromOptions() self.outputPlex(plex) # Redistribute part = plex.getPartitioner() part.setType(self.partitionerType()) - _ = plex.distribute(overlap=0) - plex.setName("DMPlex Object") - plex.setOptionsPrefix("redistributed_") + sf = plex.distribute(overlap=0) + if sf: + sf.destroy() + part.destroy() + plex.setName('DMPlex Object') + plex.setOptionsPrefix('redistributed_') plex.setFromOptions() self.outputPlex(plex) # Save redistributed dm to XDMF in parallel vwr.create(self.outfile(), mode='w', comm=comm) vwr.pushFormat(format=self.outformat()) - plex.setName("DMPlex Object") + plex.setName('DMPlex Object') plex.view(viewer=vwr) vwr.popFormat() vwr.destroy() # Destroy plex plex.destroy() - self.outputText("End cycle %d\n--------\n" % i, comm) + self.outputText('End cycle %d\n--------\n' % i, comm) + comm.tompi4py().Free() PETSc.COMM_WORLD.Barrier() # Check that the output is identical to that of plex/tutorial/ex5.c. - self.assertTrue(filecmp.cmp(self.tmp_output_file(), - self.ref_output_file(), shallow=False), - 'Contents of the files not the same.') + self.assertTrue( + filecmp.cmp(self.tmp_output_file(), self.ref_output_file(), shallow=False), + 'Contents of the files not the same.', + ) PETSc.COMM_WORLD.Barrier() + class BaseTestPlexHDF5Homogeneous(BaseTestPlexHDF5): """Test save on N / load on N.""" + SUFFIX = 0 HETEROGENEOUS = False + class BaseTestPlexHDF5Heterogeneous(BaseTestPlexHDF5): """Test save on N / load on M.""" + SUFFIX = 1 HETEROGENEOUS = True -class TestPlexHDF5PETSCSimpleHomogeneous(BaseTestPlexHDF5Homogeneous, - unittest.TestCase): - OUTFORMAT = "hdf5_petsc" - PARTITIONERTYPE = "simple" + +class TestPlexHDF5PETSCSimpleHomogeneous( + BaseTestPlexHDF5Homogeneous, unittest.TestCase +): + OUTFORMAT = 'hdf5_petsc' + PARTITIONERTYPE = 'simple' + """ Skipping. PTScotch produces different distributions when run @@ -494,15 +589,18 @@ class TestPlexHDF5PETSCPTScotchHomogeneous(BaseTestPlexHDF5Homogeneous, PARTITIONERTYPE = "ptscotch" """ -class TestPlexHDF5PETSCParmetisHomogeneous(BaseTestPlexHDF5Homogeneous, - unittest.TestCase): - OUTFORMAT = "hdf5_petsc" - PARTITIONERTYPE = "parmetis" -class TestPlexHDF5XDMFSimpleHomogeneous(BaseTestPlexHDF5Homogeneous, - unittest.TestCase): - OUTFORMAT = "hdf5_xdmf" - PARTITIONERTYPE = "simple" +class TestPlexHDF5PETSCParmetisHomogeneous( + BaseTestPlexHDF5Homogeneous, unittest.TestCase +): + OUTFORMAT = 'hdf5_petsc' + PARTITIONERTYPE = 'parmetis' + + +class TestPlexHDF5XDMFSimpleHomogeneous(BaseTestPlexHDF5Homogeneous, unittest.TestCase): + OUTFORMAT = 'hdf5_xdmf' + PARTITIONERTYPE = 'simple' + """ Skipping. PTScotch produces different distributions when run @@ -514,15 +612,20 @@ class TestPlexHDF5XDMFPTScotchHomogeneous(BaseTestPlexHDF5Homogeneous, PARTITIONERTYPE = "ptscotch" """ -class TestPlexHDF5XDMFParmetisHomogeneous(BaseTestPlexHDF5Homogeneous, - unittest.TestCase): - OUTFORMAT = "hdf5_xdmf" - PARTITIONERTYPE = "parmetis" -class TestPlexHDF5PETSCSimpleHeterogeneous(BaseTestPlexHDF5Heterogeneous, - unittest.TestCase): - OUTFORMAT = "hdf5_petsc" - PARTITIONERTYPE = "simple" +class TestPlexHDF5XDMFParmetisHomogeneous( + BaseTestPlexHDF5Homogeneous, unittest.TestCase +): + OUTFORMAT = 'hdf5_xdmf' + PARTITIONERTYPE = 'parmetis' + + +class TestPlexHDF5PETSCSimpleHeterogeneous( + BaseTestPlexHDF5Heterogeneous, unittest.TestCase +): + OUTFORMAT = 'hdf5_petsc' + PARTITIONERTYPE = 'simple' + """ Skipping. PTScotch produces different distributions when run @@ -534,25 +637,34 @@ class TestPlexHDF5PETSCPTScotchHeterogeneous(BaseTestPlexHDF5Heterogeneous, PARTITIONERTYPE = "ptscotch" """ -class TestPlexHDF5PETSCParmetisHeterogeneous(BaseTestPlexHDF5Heterogeneous, - unittest.TestCase): - OUTFORMAT = "hdf5_petsc" - PARTITIONERTYPE = "parmetis" -class TestPlexHDF5XDMFSimpleHeterogeneous(BaseTestPlexHDF5Heterogeneous, - unittest.TestCase): - OUTFORMAT = "hdf5_xdmf" - PARTITIONERTYPE = "simple" +class TestPlexHDF5PETSCParmetisHeterogeneous( + BaseTestPlexHDF5Heterogeneous, unittest.TestCase +): + OUTFORMAT = 'hdf5_petsc' + PARTITIONERTYPE = 'parmetis' -class TestPlexHDF5XDMFPTScotchHeterogeneous(BaseTestPlexHDF5Heterogeneous, - unittest.TestCase): - OUTFORMAT = "hdf5_xdmf" - PARTITIONERTYPE = "ptscotch" -class TestPlexHDF5XDMFParmetisHeterogeneous(BaseTestPlexHDF5Heterogeneous, - unittest.TestCase): - OUTFORMAT = "hdf5_xdmf" - PARTITIONERTYPE = "parmetis" +class TestPlexHDF5XDMFSimpleHeterogeneous( + BaseTestPlexHDF5Heterogeneous, unittest.TestCase +): + OUTFORMAT = 'hdf5_xdmf' + PARTITIONERTYPE = 'simple' + + +class TestPlexHDF5XDMFPTScotchHeterogeneous( + BaseTestPlexHDF5Heterogeneous, unittest.TestCase +): + OUTFORMAT = 'hdf5_xdmf' + PARTITIONERTYPE = 'ptscotch' + + +class TestPlexHDF5XDMFParmetisHeterogeneous( + BaseTestPlexHDF5Heterogeneous, unittest.TestCase +): + OUTFORMAT = 'hdf5_xdmf' + PARTITIONERTYPE = 'parmetis' + # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/test/test_dmshell.py b/src/binding/petsc4py/test/test_dmshell.py index 76dcef6d1d4..a6be03ad398 100644 --- a/src/binding/petsc4py/test/test_dmshell.py +++ b/src/binding/petsc4py/test/test_dmshell.py @@ -4,7 +4,6 @@ class TestDMShell(unittest.TestCase): - COMM = PETSc.COMM_WORLD def setUp(self): @@ -30,6 +29,7 @@ def create_vec(dm): v.setSizes((10, None)) v.setUp() return v + self.dm.setCreateGlobalVector(create_vec) gvec = self.dm.createGlobalVector() self.assertEqual(gvec.comm, self.dm.comm) @@ -37,7 +37,7 @@ def create_vec(dm): def testSetLocalVector(self): vec = PETSc.Vec().create(comm=PETSc.COMM_SELF) - vec.setSizes((1 + 10*self.COMM.rank, None)) + vec.setSizes((1 + 10 * self.COMM.rank, None)) vec.setUp() self.dm.setLocalVector(vec) lvec = self.dm.createLocalVector() @@ -49,14 +49,15 @@ def testSetLocalVector(self): def testSetCreateLocalVector(self): def create_vec(dm): v = PETSc.Vec().create(comm=PETSc.COMM_SELF) - v.setSizes((1 + 10*dm.comm.rank, None)) + v.setSizes((1 + 10 * dm.comm.rank, None)) v.setUp() return v + self.dm.setCreateLocalVector(create_vec) lvec = self.dm.createLocalVector() lsize, gsize = lvec.getSizes() self.assertEqual(lsize, gsize) - self.assertEqual(lsize, 1 + 10*self.dm.comm.rank) + self.assertEqual(lsize, 1 + 10 * self.dm.comm.rank) self.assertEqual(lvec.comm, PETSc.COMM_SELF) def testSetMatrix(self): @@ -74,6 +75,7 @@ def create_mat(dm): mat.setSizes(((10, None), (2, None))) mat.setUp() return mat + self.dm.setCreateMatrix(create_mat) nmat = self.dm.createMatrix() self.assertEqual(nmat.getSizes(), create_mat(self.dm).getSizes()) @@ -84,8 +86,10 @@ def begin(dm, ivec, mode, ovec): ovec[...] = ivec[...] elif mode == PETSc.InsertMode.ADD_VALUES: ovec[...] += ivec[...] + def end(dm, ivec, mode, ovec): pass + vec = PETSc.Vec().create(comm=self.COMM) vec.setSizes((10, None)) vec.setUp() @@ -97,7 +101,7 @@ def end(dm, ivec, mode, ovec): self.dm.globalToLocal(vec, ovec, addv=PETSc.InsertMode.INSERT_VALUES) self.assertTrue(np.allclose(vec.getArray(), ovec.getArray())) self.dm.globalToLocal(vec, ovec, addv=PETSc.InsertMode.ADD_VALUES) - self.assertTrue(np.allclose(2*vec.getArray(), ovec.getArray())) + self.assertTrue(np.allclose(2 * vec.getArray(), ovec.getArray())) def testLocalToGlobal(self): def begin(dm, ivec, mode, ovec): @@ -105,8 +109,10 @@ def begin(dm, ivec, mode, ovec): ovec[...] = ivec[...] elif mode == PETSc.InsertMode.ADD_VALUES: ovec[...] += ivec[...] + def end(dm, ivec, mode, ovec): pass + vec = PETSc.Vec().create(comm=PETSc.COMM_SELF) vec.setSizes((10, None)) vec.setUp() @@ -118,7 +124,7 @@ def end(dm, ivec, mode, ovec): self.dm.localToGlobal(vec, ovec, addv=PETSc.InsertMode.INSERT_VALUES) self.assertTrue(np.allclose(vec.getArray(), ovec.getArray())) self.dm.localToGlobal(vec, ovec, addv=PETSc.InsertMode.ADD_VALUES) - self.assertTrue(np.allclose(2*vec.getArray(), ovec.getArray())) + self.assertTrue(np.allclose(2 * vec.getArray(), ovec.getArray())) def testLocalToLocal(self): def begin(dm, ivec, mode, ovec): @@ -126,8 +132,10 @@ def begin(dm, ivec, mode, ovec): ovec[...] = ivec[...] elif mode == PETSc.InsertMode.ADD_VALUES: ovec[...] += ivec[...] + def end(dm, ivec, mode, ovec): pass + vec = PETSc.Vec().create(comm=PETSc.COMM_SELF) vec.setSizes((10, None)) vec.setUp() @@ -137,18 +145,7 @@ def end(dm, ivec, mode, ovec): self.dm.localToLocal(vec, ovec, addv=PETSc.InsertMode.INSERT_VALUES) self.assertTrue(np.allclose(vec.getArray(), ovec.getArray())) self.dm.localToLocal(vec, ovec, addv=PETSc.InsertMode.ADD_VALUES) - self.assertTrue(np.allclose(2*vec.getArray(), ovec.getArray())) - - def testGlobalToLocalVecScatter(self): - vec = PETSc.Vec().create() - vec.setSizes((10, None)) - vec.setUp() - sct, ovec = PETSc.Scatter.toAll(vec) - self.dm.setGlobalToLocalVecScatter(sct) - - self.dm.globalToLocal(vec, ovec, addv=PETSc.InsertMode.INSERT_VALUES) - - self.assertTrue(np.allclose(vec.getArray(), ovec.getArray())) + self.assertTrue(np.allclose(2 * vec.getArray(), ovec.getArray())) def testGlobalToLocalVecScatter(self): vec = PETSc.Vec().create() @@ -176,10 +173,13 @@ def testLocalToLocalVecScatter(self): def testCoarsenRefine(self): cdm = PETSc.DMShell().create(comm=self.COMM) + def coarsen(dm, comm): return cdm + def refine(dm, comm): return self.dm + cdm.setRefine(refine) self.dm.setCoarsen(coarsen) coarsened = self.dm.coarsen() @@ -194,8 +194,10 @@ def testCreateInterpolation(self): vec = PETSc.Vec().create() vec.setSizes((10, None)) vec.setUp() + def create_interp(dm, dmf): return mat, vec + self.dm.setCreateInterpolation(create_interp) m, v = self.dm.createInterpolation(self.dm) self.assertEqual(m, mat) @@ -205,8 +207,10 @@ def testCreateInjection(self): mat = PETSc.Mat().create() mat.setSizes(((10, None), (10, None))) mat.setUp() + def create_inject(dm, dmf): return mat + self.dm.setCreateInjection(create_inject) m = self.dm.createInjection(self.dm) self.assertEqual(m, mat) diff --git a/src/binding/petsc4py/test/test_dmstag.py b/src/binding/petsc4py/test/test_dmstag.py index 1a2b59ca6f3..4ac20405a21 100644 --- a/src/binding/petsc4py/test/test_dmstag.py +++ b/src/binding/petsc4py/test/test_dmstag.py @@ -3,8 +3,8 @@ # -------------------------------------------------------------------- -class BaseTestDMStag(object): +class BaseTestDMStag: COMM = PETSc.COMM_WORLD STENCIL = PETSc.DMStag.StencilType.BOX SWIDTH = 1 @@ -13,10 +13,18 @@ class BaseTestDMStag(object): def setUp(self): dim = len(self.SIZES) - self.da = PETSc.DMStag().create(dim, - dofs=self.DOFS, sizes=self.SIZES, boundary_types=self.BOUNDARY, - stencil_type=self.STENCIL, stencil_width=self.SWIDTH, - comm=self.COMM, proc_sizes=self.PROC_SIZES, ownership_ranges=self.OWNERSHIP_RANGES, setUp=True) + self.da = PETSc.DMStag().create( + dim, + dofs=self.DOFS, + sizes=self.SIZES, + boundary_types=self.BOUNDARY, + stencil_type=self.STENCIL, + stencil_width=self.SWIDTH, + comm=self.COMM, + proc_sizes=self.PROC_SIZES, + ownership_ranges=self.OWNERSHIP_RANGES, + setUp=True, + ) self.directda = PETSc.DMStag().create(dim) self.directda.setStencilType(self.STENCIL) @@ -37,11 +45,11 @@ def tearDown(self): def testCoordinates(self): self.da.setCoordinateDMType('stag') - self.da.setUniformCoordinates(0,1,0,1,0,1) - self.da.setUniformCoordinatesExplicit(0,1,0,1,0,1) + self.da.setUniformCoordinates(0, 1, 0, 1, 0, 1) + self.da.setUniformCoordinatesExplicit(0, 1, 0, 1, 0, 1) cda = self.da.getCoordinateDM() datype = cda.getType() - self.assertEqual(datype,'stag') + self.assertEqual(datype, 'stag') cda.destroy() c = self.da.getCoordinatesLocal() @@ -57,11 +65,11 @@ def testCoordinates(self): self.assertEqual(c.min()[1], gc.min()[1]) self.directda.setCoordinateDMType('product') - self.directda.setUniformCoordinates(0,1,0,1,0,1) - self.directda.setUniformCoordinatesProduct(0,1,0,1,0,1) + self.directda.setUniformCoordinates(0, 1, 0, 1, 0, 1) + self.directda.setUniformCoordinatesProduct(0, 1, 0, 1, 0, 1) cda = self.directda.getCoordinateDM() datype = cda.getType() - self.assertEqual(datype,'product') + self.assertEqual(datype, 'product') cda.destroy() def testGetVec(self): @@ -71,21 +79,21 @@ def testGetVec(self): vg.set(1.0) self.assertEqual(vg.max()[1], 1.0) self.assertEqual(vg.min()[1], 1.0) - self.da.globalToLocal(vg,vl) + self.da.globalToLocal(vg, vl) self.assertEqual(vl.max()[1], 1.0) - self.assertTrue (vl.min()[1] in (1.0, 0.0)) + self.assertTrue(vl.min()[1] in (1.0, 0.0)) vl.set(2.0) - self.da.localToGlobal(vl,vg) + self.da.localToGlobal(vl, vg) self.assertEqual(vg.max()[1], 2.0) - self.assertTrue (vg.min()[1] in (2.0, 0.0)) + self.assertTrue(vg.min()[1] in (2.0, 0.0)) self.da.restoreGlobalVec(vg) self.da.restoreLocalVec(vl) def testGetOther(self): - lgmap = self.da.getLGMap() - dlgmap = self.directda.getLGMap() + _ = self.da.getLGMap() + _ = self.directda.getLGMap() def testDof(self): dim = self.da.getDim() @@ -93,24 +101,24 @@ def testDof(self): if dim == 1: dof0 = self.da.getLocationDof('left') dof1 = self.da.getLocationDof('element') - self.assertEqual(dofs[0],dof0) - self.assertEqual(dofs[1],dof1) + self.assertEqual(dofs[0], dof0) + self.assertEqual(dofs[1], dof1) if dim == 2: dof0 = self.da.getLocationDof('down_left') dof1 = self.da.getLocationDof('left') dof2 = self.da.getLocationDof('element') - self.assertEqual(dofs[0],dof0) - self.assertEqual(dofs[1],dof1) - self.assertEqual(dofs[2],dof2) + self.assertEqual(dofs[0], dof0) + self.assertEqual(dofs[1], dof1) + self.assertEqual(dofs[2], dof2) if dim == 3: dof0 = self.da.getLocationDof('back_down_right') dof1 = self.da.getLocationDof('down_left') dof2 = self.da.getLocationDof('left') dof3 = self.da.getLocationDof('element') - self.assertEqual(dofs[0],dof0) - self.assertEqual(dofs[1],dof1) - self.assertEqual(dofs[2],dof2) - self.assertEqual(dofs[3],dof3) + self.assertEqual(dofs[0], dof0) + self.assertEqual(dofs[1], dof1) + self.assertEqual(dofs[2], dof2) + self.assertEqual(dofs[3], dof3) def testMigrateVec(self): vec = self.da.createGlobalVec() @@ -121,159 +129,227 @@ def testMigrateVec(self): def testDMDAInterface(self): return self.da.setCoordinateDMType('stag') - self.da.setUniformCoordinates(0,1,0,1,0,1) + self.da.setUniformCoordinates(0, 1, 0, 1, 0, 1) dim = self.da.getDim() dofs = self.da.getDof() vec = self.da.createGlobalVec() if dim == 1: - da,davec = self.da.VecSplitToDMDA(vec,'left',-dofs[0]) - da,davec = self.da.VecSplitToDMDA(vec,'element',-dofs[1]) + da, davec = self.da.VecSplitToDMDA(vec, 'left', -dofs[0]) + da, davec = self.da.VecSplitToDMDA(vec, 'element', -dofs[1]) if dim == 2: - da,davec = self.da.VecSplitToDMDA(vec,'down_left',-dofs[0]) - da,davec = self.da.VecSplitToDMDA(vec,'down_left',-dofs[1]) - da,davec = self.da.VecSplitToDMDA(vec,'down_left',-dofs[2]) + da, davec = self.da.VecSplitToDMDA(vec, 'down_left', -dofs[0]) + da, davec = self.da.VecSplitToDMDA(vec, 'down_left', -dofs[1]) + da, davec = self.da.VecSplitToDMDA(vec, 'down_left', -dofs[2]) if dim == 3: - da,davec = self.da.VecSplitToDMDA(vec,'back_down_right',-dofs[0]) - da,davec = self.da.VecSplitToDMDA(vec,'down_left',-dofs[1]) - da,davec = self.da.VecSplitToDMDA(vec,'left',-dofs[2]) - da,davec = self.da.VecSplitToDMDA(vec,'element',-dofs[3]) + da, davec = self.da.VecSplitToDMDA(vec, 'back_down_right', -dofs[0]) + da, davec = self.da.VecSplitToDMDA(vec, 'down_left', -dofs[1]) + da, davec = self.da.VecSplitToDMDA(vec, 'left', -dofs[2]) + da, davec = self.da.VecSplitToDMDA(vec, 'element', -dofs[3]) -GHOSTED = PETSc.DM.BoundaryType.GHOSTED + +GHOSTED = PETSc.DM.BoundaryType.GHOSTED PERIODIC = PETSc.DM.BoundaryType.PERIODIC NONE = PETSc.DM.BoundaryType.NONE SCALE = 4 + class BaseTestDMStag_1D(BaseTestDMStag): - SIZES = [100*SCALE,] - BOUNDARY = [NONE,] + SIZES = [ + 100 * SCALE, + ] + BOUNDARY = [ + NONE, + ] + class BaseTestDMStag_2D(BaseTestDMStag): - SIZES = [9*SCALE, 11*SCALE] + SIZES = [9 * SCALE, 11 * SCALE] BOUNDARY = [NONE, NONE] + class BaseTestDMStag_3D(BaseTestDMStag): - SIZES = [6*SCALE, 7*SCALE, 8*SCALE] + SIZES = [6 * SCALE, 7 * SCALE, 8 * SCALE] BOUNDARY = [NONE, NONE, NONE] + # -------------------------------------------------------------------- + class TestDMStag_1D_W0_N11(BaseTestDMStag_1D, unittest.TestCase): SWIDTH = 0 - DOFS = (1,1) - NEWDOF = (2,1) + DOFS = (1, 1) + NEWDOF = (2, 1) + + class TestDMStag_1D_W0_N21(BaseTestDMStag_1D, unittest.TestCase): SWIDTH = 0 - DOFS = (2,1) - NEWDOF = (2,2) + DOFS = (2, 1) + NEWDOF = (2, 2) + + class TestDMStag_1D_W0_N12(BaseTestDMStag_1D, unittest.TestCase): SWIDTH = 0 - DOFS = (1,2) - NEWDOF = (2,2) + DOFS = (1, 2) + NEWDOF = (2, 2) + + class TestDMStag_1D_W2_N11(BaseTestDMStag_1D, unittest.TestCase): SWIDTH = 2 - DOFS = (1,1) - NEWDOF = (2,1) + DOFS = (1, 1) + NEWDOF = (2, 1) + + class TestDMStag_1D_W2_N21(BaseTestDMStag_1D, unittest.TestCase): SWIDTH = 2 - DOFS = (2,1) - NEWDOF = (2,2) + DOFS = (2, 1) + NEWDOF = (2, 2) + + class TestDMStag_1D_W2_N12(BaseTestDMStag_1D, unittest.TestCase): SWIDTH = 2 - DOFS = (1,2) - NEWDOF = (2,2) + DOFS = (1, 2) + NEWDOF = (2, 2) + class TestDMStag_2D_W0_N112(BaseTestDMStag_2D, unittest.TestCase): - DOFS = (1,1,2) + DOFS = (1, 1, 2) SWIDTH = 0 - NEWDOF = (2,2,2) + NEWDOF = (2, 2, 2) + + class TestDMStag_2D_W2_N112(BaseTestDMStag_2D, unittest.TestCase): - DOFS = (1,1,2) + DOFS = (1, 1, 2) SWIDTH = 2 - NEWDOF = (2,2,2) + NEWDOF = (2, 2, 2) + + class TestDMStag_2D_PXY(BaseTestDMStag_2D, unittest.TestCase): - SIZES = [13*SCALE,17*SCALE] - DOFS = (1,1,2) + SIZES = [13 * SCALE, 17 * SCALE] + DOFS = (1, 1, 2) SWIDTH = 5 - BOUNDARY = (PERIODIC,)*2 - NEWDOF = (2,2,2) + BOUNDARY = (PERIODIC,) * 2 + NEWDOF = (2, 2, 2) + + class TestDMStag_2D_GXY(BaseTestDMStag_2D, unittest.TestCase): - SIZES = [13*SCALE,17*SCALE] - DOFS = (1,1,2) + SIZES = [13 * SCALE, 17 * SCALE] + DOFS = (1, 1, 2) SWIDTH = 5 - BOUNDARY = (GHOSTED,)*2 - NEWDOF = (2,2,2) + BOUNDARY = (GHOSTED,) * 2 + NEWDOF = (2, 2, 2) + class TestDMStag_3D_W0_N1123(BaseTestDMStag_3D, unittest.TestCase): - DOFS = (1,1,2,3) + DOFS = (1, 1, 2, 3) SWIDTH = 0 - NEWDOF = (2,2,3,3) + NEWDOF = (2, 2, 3, 3) + + class TestDMStag_3D_W2_N1123(BaseTestDMStag_3D, unittest.TestCase): - DOFS = (1,1,2,3) + DOFS = (1, 1, 2, 3) SWIDTH = 2 - NEWDOF = (2,2,3,3) + NEWDOF = (2, 2, 3, 3) + + class TestDMStag_3D_PXYZ(BaseTestDMStag_3D, unittest.TestCase): - SIZES = [11*SCALE,13*SCALE,17*SCALE] - DOFS = (1,1,2,3) - NEWDOF = (2,2,3,3) + SIZES = [11 * SCALE, 13 * SCALE, 17 * SCALE] + DOFS = (1, 1, 2, 3) + NEWDOF = (2, 2, 3, 3) SWIDTH = 3 - BOUNDARY = (PERIODIC,)*3 + BOUNDARY = (PERIODIC,) * 3 + + class TestDMStag_3D_GXYZ(BaseTestDMStag_3D, unittest.TestCase): - SIZES = [11*SCALE,13*SCALE,17*SCALE] - DOFS = (1,1,2,3) - NEWDOF = (2,2,3,3) + SIZES = [11 * SCALE, 13 * SCALE, 17 * SCALE] + DOFS = (1, 1, 2, 3) + NEWDOF = (2, 2, 3, 3) SWIDTH = 3 - BOUNDARY = (GHOSTED,)*3 + BOUNDARY = (GHOSTED,) * 3 + # -------------------------------------------------------------------- -DIM = (1,2,3) -DOF0 = (0,1,2) -DOF1 = (0,1,2) -DOF2 = (0,1,2) -DOF3 = (0,1,2) +DIM = (1, 2, 3) +DOF0 = (0, 1, 2) +DOF1 = (0, 1, 2) +DOF2 = (0, 1, 2) +DOF3 = (0, 1, 2) BOUNDARY_TYPE = ('none', 'ghosted', 'periodic') -STENCIL_TYPE = ('none', 'star', 'box') -STENCIL_WIDTH = (0,1,2,3) +STENCIL_TYPE = ('none', 'star', 'box') +STENCIL_WIDTH = (0, 1, 2, 3) + class TestDMStagCreate(unittest.TestCase): pass + + counter = 0 for dim in DIM: for dof0 in DOF0: for dof1 in DOF1: for dof2 in DOF2: - if dim == 1 and dof2 > 0: continue + if dim == 1 and dof2 > 0: + continue for dof3 in DOF3: - if dim == 2 and dof3 > 0: continue - if dof0==0 and dof1==0 and dof2==0 and dof3==0: continue - dofs = [dof0,dof1,dof2,dof3][:dim+1] + if dim == 2 and dof3 > 0: + continue + if dof0 == 0 and dof1 == 0 and dof2 == 0 and dof3 == 0: + continue + dofs = [dof0, dof1, dof2, dof3][: dim + 1] for boundary in BOUNDARY_TYPE: - if boundary == "periodic": continue # XXX broken + if boundary == 'periodic': + continue # XXX broken for stencil in STENCIL_TYPE: - if stencil == 'none' and boundary != 'none': continue + if stencil == 'none' and boundary != 'none': + continue for width in STENCIL_WIDTH: - if stencil == 'none' and width > 0: continue - if stencil in ['star','box'] and width == 0: continue - kargs = dict(dim=dim, dofs=dofs, boundary_type=boundary, - stencil_type=stencil, stencil_width=width) - - def testCreate(self,kargs=kargs): + if stencil == 'none' and width > 0: + continue + if stencil in ['star', 'box'] and width == 0: + continue + kargs = { + 'dim': dim, + 'dofs': dofs, + 'boundary_type': boundary, + 'stencil_type': stencil, + 'stencil_width': width, + } + + def testCreate(self, kargs=kargs): kargs = dict(kargs) - cda = PETSc.DMStag().create(kargs['dim'], - dofs = kargs['dofs'], - sizes = [8*SCALE,]*kargs['dim'], - boundary_types = [kargs['boundary_type'],]*kargs['dim'], - stencil_type = kargs['stencil_type'], - stencil_width = kargs['stencil_width'], - setUp=True) + cda = PETSc.DMStag().create( + kargs['dim'], + dofs=kargs['dofs'], + sizes=[ + 8 * SCALE, + ] + * kargs['dim'], + boundary_types=[ + kargs['boundary_type'], + ] + * kargs['dim'], + stencil_type=kargs['stencil_type'], + stencil_width=kargs['stencil_width'], + setUp=True, + ) dda = PETSc.DMStag().create(kargs['dim']) dda.setStencilType(kargs['stencil_type']) dda.setStencilWidth(kargs['stencil_width']) - dda.setBoundaryTypes([kargs['boundary_type'],]*kargs['dim']) + dda.setBoundaryTypes( + [ + kargs['boundary_type'], + ] + * kargs['dim'] + ) dda.setDof(kargs['dofs']) - dda.setGlobalSizes([8*SCALE,]*kargs['dim']) + dda.setGlobalSizes( + [ + 8 * SCALE, + ] + * kargs['dim'] + ) dda.setUp() cdim = cda.getDim() @@ -284,7 +360,7 @@ def testCreate(self,kargs=kargs): cstencil_type = cda.getStencilType() cstencil_width = cda.getStencilWidth() centries_per_element = cda.getEntriesPerElement() - cstarts, csizes, cnextra = cda.getCorners() + cstarts, csizes, cnextra = cda.getCorners() cisLastRank = cda.getIsLastRank() cisFirstRank = cda.getIsFirstRank() cownershipranges = cda.getOwnershipRanges() @@ -298,64 +374,109 @@ def testCreate(self,kargs=kargs): dstencil_type = dda.getStencilType() dstencil_width = dda.getStencilWidth() dentries_per_element = dda.getEntriesPerElement() - dstarts, dsizes, dnextra = dda.getCorners() + dstarts, dsizes, dnextra = dda.getCorners() disLastRank = dda.getIsLastRank() disFirstRank = dda.getIsFirstRank() downershipranges = dda.getOwnershipRanges() dprocsizes = dda.getProcSizes() - self.assertEqual(cdim,kargs['dim']) - self.assertEqual(cdof,tuple(kargs['dofs'])) - self.assertEqual(cboundary,tuple([kargs['boundary_type'],]*kargs['dim'])) - self.assertEqual(cstencil_type,kargs['stencil_type']) - self.assertEqual(cstencil_width,kargs['stencil_width']) - self.assertEqual(cgsizes,tuple([8*SCALE,]*kargs['dim'])) - - self.assertEqual(cdim,ddim) - self.assertEqual(cdof,ddof) - self.assertEqual(cgsizes,dgsizes) - self.assertEqual(clsizes,dlsizes) - self.assertEqual(cboundary,dboundary) - self.assertEqual(cstencil_type,dstencil_type) - self.assertEqual(cstencil_width,dstencil_width) - self.assertEqual(centries_per_element,dentries_per_element) - self.assertEqual(cstarts,dstarts) - self.assertEqual(csizes,dsizes) - self.assertEqual(cnextra,dnextra) - self.assertEqual(cisLastRank,disLastRank) - self.assertEqual(cisFirstRank,disFirstRank) + self.assertEqual(cdim, kargs['dim']) + self.assertEqual(cdof, tuple(kargs['dofs'])) + self.assertEqual( + cboundary, + tuple( + [ + kargs['boundary_type'], + ] + * kargs['dim'] + ), + ) + self.assertEqual( + cstencil_type, kargs['stencil_type'] + ) + self.assertEqual( + cstencil_width, kargs['stencil_width'] + ) + self.assertEqual( + cgsizes, + tuple( + [ + 8 * SCALE, + ] + * kargs['dim'] + ), + ) + + self.assertEqual(cdim, ddim) + self.assertEqual(cdof, ddof) + self.assertEqual(cgsizes, dgsizes) + self.assertEqual(clsizes, dlsizes) + self.assertEqual(cboundary, dboundary) + self.assertEqual(cstencil_type, dstencil_type) + self.assertEqual(cstencil_width, dstencil_width) + self.assertEqual( + centries_per_element, dentries_per_element + ) + self.assertEqual(cstarts, dstarts) + self.assertEqual(csizes, dsizes) + self.assertEqual(cnextra, dnextra) + self.assertEqual(cisLastRank, disLastRank) + self.assertEqual(cisFirstRank, disFirstRank) self.assertEqual(cprocsizes, dprocsizes) - for co,do in zip(cownershipranges, downershipranges): - for i,j in zip(co,do): - self.assertEqual(i,j) - - self.assertEqual(cdim+1,len(cdof)) - self.assertEqual(cdim,len(cgsizes)) - self.assertEqual(cdim,len(clsizes)) - self.assertEqual(cdim,len(cboundary)) - self.assertEqual(cdim,len(cstarts)) - self.assertEqual(cdim,len(csizes)) - self.assertEqual(cdim,len(cnextra)) - self.assertEqual(cdim,len(cisLastRank)) - self.assertEqual(cdim,len(cisLastRank)) - if cdim == 1: self.assertEqual(centries_per_element, cdof[0] + cdof[1]) - if cdim == 2: self.assertEqual(centries_per_element, cdof[0] + 2*cdof[1] + cdof[2]) - if cdim == 3: self.assertEqual(centries_per_element, cdof[0] + 3*cdof[1] + 3*cdof[2] + cdof[3]) + for co, do in zip( + cownershipranges, downershipranges + ): + for i, j in zip(co, do): + self.assertEqual(i, j) + + self.assertEqual(cdim + 1, len(cdof)) + self.assertEqual(cdim, len(cgsizes)) + self.assertEqual(cdim, len(clsizes)) + self.assertEqual(cdim, len(cboundary)) + self.assertEqual(cdim, len(cstarts)) + self.assertEqual(cdim, len(csizes)) + self.assertEqual(cdim, len(cnextra)) + self.assertEqual(cdim, len(cisLastRank)) + self.assertEqual(cdim, len(cisLastRank)) + if cdim == 1: + self.assertEqual( + centries_per_element, cdof[0] + cdof[1] + ) + if cdim == 2: + self.assertEqual( + centries_per_element, + cdof[0] + 2 * cdof[1] + cdof[2], + ) + if cdim == 3: + self.assertEqual( + centries_per_element, + cdof[0] + + 3 * cdof[1] + + 3 * cdof[2] + + cdof[3], + ) for i in range(cdim): self.assertEqual(csizes[i], clsizes[i]) - if cisLastRank[i]: self.assertEqual(cnextra[i],1) - if (cnextra[i]==1): self.assertTrue(cisLastRank[i]) - if (cisFirstRank[i]): self.assertEqual(cstarts[i],0) - self.assertEqual(len(cprocsizes), len(cownershipranges)) + if cisLastRank[i]: + self.assertEqual(cnextra[i], 1) + if cnextra[i] == 1: + self.assertTrue(cisLastRank[i]) + if cisFirstRank[i]: + self.assertEqual(cstarts[i], 0) + self.assertEqual( + len(cprocsizes), len(cownershipranges) + ) self.assertEqual(len(cprocsizes), cdim) - for i,m in enumerate(cprocsizes): + for i, m in enumerate(cprocsizes): self.assertEqual(m, len(cownershipranges[i])) dda.destroy() cda.destroy() - setattr(TestDMStagCreate, - "testCreate%05d"%counter, - testCreate) + setattr( + TestDMStagCreate, + 'testCreate%05d' % counter, + testCreate, + ) del testCreate counter += 1 diff --git a/src/binding/petsc4py/test/test_gc.py b/src/binding/petsc4py/test/test_gc.py index a13ca2b4e9d..366e29da979 100644 --- a/src/binding/petsc4py/test/test_gc.py +++ b/src/binding/petsc4py/test/test_gc.py @@ -1,7 +1,7 @@ from petsc4py import PETSc import unittest -import gc, weakref -import warnings +import gc +import weakref # -------------------------------------------------------------------- @@ -11,8 +11,8 @@ # -------------------------------------------------------------------- -class BaseTestGC(object): +class BaseTestGC: def setUp(self): self.obj = self.CLASS().create(comm=PETSc.COMM_SELF) @@ -25,8 +25,7 @@ def tearDown(self): PETSc.garbage_cleanup() def make_weakref(self): - wref = weakref.ref(self.obj) - return wref + return weakref.ref(self.obj) def testCycleInSelf(self): self.obj.setAttr('myself', self.obj) @@ -35,7 +34,9 @@ def testCycleInMethod(self): self.obj.setAttr('mymeth', self.obj.view) def testCycleInInstance(self): - class A: pass + class A: + pass + a = A() a.obj = self.obj self.obj.setAttr('myinst', a) @@ -45,50 +46,67 @@ def testCycleInAllWays(self): self.testCycleInMethod() self.testCycleInInstance() + # -------------------------------------------------------------------- + class TestGCVec(BaseTestGC, unittest.TestCase): CLASS = PETSc.Vec + class TestGCVecSubType(TestGCVec): CLASS = type('_Vec', (PETSc.Vec,), {}) + class TestGCMat(BaseTestGC, unittest.TestCase): CLASS = PETSc.Mat + class TestGCMatSubType(TestGCMat): CLASS = type('_Mat', (PETSc.Mat,), {}) + class TestGCPC(BaseTestGC, unittest.TestCase): CLASS = PETSc.PC + class TestGCPCSubType(TestGCPC): CLASS = type('_PC', (PETSc.PC,), {}) + class TestGCKSP(BaseTestGC, unittest.TestCase): CLASS = PETSc.KSP + class TestGCKSPSubType(TestGCKSP): CLASS = type('_KSP', (PETSc.KSP,), {}) + class TestGCSNES(BaseTestGC, unittest.TestCase): CLASS = PETSc.SNES + def testCycleInAppCtx(self): self.obj.setAppCtx(self.obj) + class TestGCSNESSubType(TestGCSNES): CLASS = type('_SNES', (PETSc.SNES,), {}) + class TestGCTS(BaseTestGC, unittest.TestCase): CLASS = PETSc.TS + def testCycleInAppCtx(self): self.obj.setAppCtx(self.obj) + class TestGCTSSubType(TestGCTS): CLASS = type('_TS', (PETSc.TS,), {}) + def testCycleInAppCtx(self): self.obj.setAppCtx(self.obj) + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_is.py b/src/binding/petsc4py/test/test_is.py index 16f9a3f7a93..52608d28fe6 100644 --- a/src/binding/petsc4py/test/test_is.py +++ b/src/binding/petsc4py/test/test_is.py @@ -4,8 +4,8 @@ # -------------------------------------------------------------------- -class BaseTestIS(object): +class BaseTestIS: TYPE = None def tearDown(self): @@ -50,8 +50,8 @@ def testComplement(self): self.iset.sort() nmin = self.iset.getIndices().min() nmax = self.iset.getIndices().max() - iset = self.iset.complement(nmin, nmax+1) - iset.complement(nmin, nmax+1) + iset = self.iset.complement(nmin, nmax + 1) + iset.complement(nmin, nmax + 1) del iset def testSum(self): @@ -70,35 +70,43 @@ def testExpand(self): del iset def testRenumber(self): - (n1,is1) = self.iset.renumber() - (n2,is2) = self.iset.renumber(self.iset) + (n1, is1) = self.iset.renumber() + (n2, is2) = self.iset.renumber(self.iset) del is1 del is2 def testProperties(self): - proplist = ['sizes', 'size', 'local_size', 'indices', - 'permutation', 'identity', 'sorted'] + proplist = [ + 'sizes', + 'size', + 'local_size', + 'indices', + 'permutation', + 'identity', + 'sorted', + ] for prop in proplist: self.assertTrue(hasattr(self.iset, prop)) def testArray(self): import numpy + refs = self.iset.getRefCount() arr1 = numpy.asarray(self.iset) - self.assertEqual(self.iset.getRefCount(), refs+1) + self.assertEqual(self.iset.getRefCount(), refs + 1) arr2 = self.iset.array - self.assertEqual(self.iset.getRefCount(), refs+2) + self.assertEqual(self.iset.getRefCount(), refs + 2) self.assertTrue((arr1 == arr2).all()) del arr2 - self.assertEqual(self.iset.getRefCount(), refs+1) + self.assertEqual(self.iset.getRefCount(), refs + 1) del arr1 self.assertEqual(self.iset.getRefCount(), refs) # -------------------------------------------------------------------- -class TestISGeneral(BaseTestIS, unittest.TestCase): +class TestISGeneral(BaseTestIS, unittest.TestCase): TYPE = PETSc.IS.Type.GENERAL def setUp(self): @@ -112,7 +120,6 @@ def testGetIndices(self): class TestISStride(BaseTestIS, unittest.TestCase): - TYPE = PETSc.IS.Type.STRIDE def setUp(self): @@ -122,7 +129,7 @@ def setUp(self): def testGetIndices(self): size, start, step = self.info - indices = [start+i*step for i in range(size)] + indices = [start + i * step for i in range(size)] self.assertEqual(list(self.iset.getIndices()), indices) def testToGeneral(self): @@ -131,19 +138,18 @@ def testToGeneral(self): class TestISBlock(BaseTestIS, unittest.TestCase): - TYPE = PETSc.IS.Type.BLOCK def setUp(self): self.bsize = 3 - self.index = list(range(0,10,2)) + self.index = list(range(0, 10, 2)) random.shuffle(self.index) self.iset = PETSc.IS().createBlock(self.bsize, self.index) self.assertEqual(self.iset.getType(), PETSc.IS.Type.BLOCK) def testGetSize(self): lsize = self.iset.getLocalSize() - self.assertEqual(lsize/self.bsize, len(self.index)) + self.assertEqual(lsize / self.bsize, len(self.index)) def testGetBlockSize(self): bs = self.iset.getBlockSize() @@ -158,9 +164,9 @@ def testGetIndices(self): idx = [] for i in self.iset.getBlockIndices(): for j in range(bs): - idx.append(i*bs+j) + idx.append(i * bs + j) index = list(self.iset.getIndices()) - #self.assertEqual(index, idx) + self.assertEqual(index, idx) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/test/test_ksp.py b/src/binding/petsc4py/test/test_ksp.py index 2433aed4422..e68f68d99a9 100644 --- a/src/binding/petsc4py/test/test_ksp.py +++ b/src/binding/petsc4py/test/test_ksp.py @@ -6,10 +6,10 @@ # -------------------------------------------------------------------- -class BaseTestKSP(object): +class BaseTestKSP: KSP_TYPE = None - PC_TYPE = None + PC_TYPE = None def setUp(self): ksp = PETSc.KSP() @@ -34,14 +34,14 @@ def testTols(self): tols = self.ksp.getTolerances() self.ksp.setTolerances(*tols) tnames = ('rtol', 'atol', 'divtol', 'max_it') - tolvals = [getattr(self.ksp, t) for t in tnames] + tolvals = [getattr(self.ksp, t) for t in tnames] self.assertEqual(tuple(tols), tuple(tolvals)) def testProperties(self): ksp = self.ksp # - ksp.appctx = (1,2,3) - self.assertEqual(ksp.appctx, (1,2,3)) + ksp.appctx = (1, 2, 3) + self.assertEqual(ksp.appctx, (1, 2, 3)) ksp.appctx = None self.assertEqual(ksp.appctx, None) # @@ -64,7 +64,7 @@ def testProperties(self): self.assertEqual(ksp.norm, 0) # rh = ksp.history - self.assertTrue(len(rh)==0) + self.assertTrue(len(rh) == 0) # reason = PETSc.KSP.ConvergedReason.CONVERGED_ITS ksp.reason = reason @@ -106,11 +106,11 @@ def testGetSetPC(self): def testSolve(self): A = PETSc.Mat().create(PETSc.COMM_SELF) - A.setSizes([3,3]) + A.setSizes([3, 3]) A.setType(PETSc.Mat.Type.SEQAIJ) A.setPreallocationNNZ(1) for i in range(3): - A.setValue(i, i, 0.9/(i+1)) + A.setValue(i, i, 0.9 / (i + 1)) A.assemble() A.shift(1) x, b = A.createVecs() @@ -119,7 +119,6 @@ def testSolve(self): self.ksp.setOperators(A) self.ksp.setConvergenceHistory() self.ksp.solve(b, x) - r = b.duplicate() u = x.duplicate() self.ksp.buildSolution(u) self.ksp.buildResidual(u) @@ -138,8 +137,10 @@ def testResetAndSolve(self): def testSetMonitor(self): reshist = {} + def monitor(ksp, its, rnorm): reshist[its] = rnorm + refcnt = getrefcount(monitor) self.ksp.setMonitor(monitor) self.assertEqual(getrefcount(monitor), refcnt + 1) @@ -157,8 +158,10 @@ def monitor(ksp, its, rnorm): def testSetConvergenceTest(self): def converged(ksp, its, rnorm): - if its > 10: return True + if its > 10: + return True return False + refcnt = getrefcount(converged) self.ksp.setConvergenceTest(converged) self.assertEqual(getrefcount(converged), refcnt + 1) @@ -168,68 +171,113 @@ def converged(ksp, its, rnorm): def testAddConvergenceTest(self): def converged(ksp, its, rnorm): return True + refcnt = getrefcount(converged) - self.ksp.addConvergenceTest(converged,prepend=True) + self.ksp.addConvergenceTest(converged, prepend=True) self.assertEqual(getrefcount(converged), refcnt + 1) self.testSolve() self.ksp.setConvergenceTest(None) self.assertEqual(getrefcount(converged), refcnt) self.testSolve() - self.ksp.addConvergenceTest(converged,prepend=False) + self.ksp.addConvergenceTest(converged, prepend=False) self.assertEqual(getrefcount(converged), refcnt + 1) self.testSolve() self.ksp.setConvergenceTest(None) self.assertEqual(getrefcount(converged), refcnt) + def testSetPreSolveTest(self): + check = {'val': 0} + + def presolve(ksp, rhs, x): + check['val'] = 1 + + refcnt = getrefcount(presolve) + self.ksp.setPreSolve(presolve) + self.assertEqual(getrefcount(presolve), refcnt + 1) + self.testSolve() + self.assertEqual(check['val'], 1) + self.ksp.setPreSolve(None) + self.assertEqual(getrefcount(presolve), refcnt) + + def testSetPostSolveTest(self): + check = {'val': 0} + + def postsolve(ksp, rhs, x): + check['val'] = 1 + + refcnt = getrefcount(postsolve) + self.ksp.setPostSolve(postsolve) + self.assertEqual(getrefcount(postsolve), refcnt + 1) + self.testSolve() + self.assertEqual(check['val'], 1) + self.ksp.setPostSolve(None) + self.assertEqual(getrefcount(postsolve), refcnt) + + # -------------------------------------------------------------------- + class TestKSPPREONLY(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.PREONLY PC_TYPE = PETSc.PC.Type.LU + class TestKSPRICHARDSON(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.RICHARDSON + class TestKSPCHEBYCHEV(BaseTestKSP, unittest.TestCase): try: KSP_TYPE = PETSc.KSP.Type.CHEBYSHEV except AttributeError: KSP_TYPE = PETSc.KSP.Type.CHEBYCHEV + class TestKSPCG(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.CG + class TestKSPCGNE(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.CGNE + class TestKSPSTCG(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.STCG + class TestKSPBCGS(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.BCGS + class TestKSPBCGSL(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.BCGSL + class TestKSPCGS(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.CGS + class TestKSPQCG(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.QCG - PC_TYPE = PETSc.PC.Type.JACOBI + PC_TYPE = PETSc.PC.Type.JACOBI + class TestKSPBICG(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.BICG + class TestKSPGMRES(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.GMRES + class TestKSPFGMRES(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.FGMRES + class TestKSPLSQR(BaseTestKSP, unittest.TestCase): KSP_TYPE = PETSc.KSP.Type.LSQR + # -------------------------------------------------------------------- if PETSc.ScalarType().dtype.char in 'FDG': diff --git a/src/binding/petsc4py/test/test_ksp_py.py b/src/binding/petsc4py/test/test_ksp_py.py index ed758240659..a39f54b9db3 100644 --- a/src/binding/petsc4py/test/test_ksp_py.py +++ b/src/binding/petsc4py/test/test_ksp_py.py @@ -2,12 +2,12 @@ from petsc4py import PETSc import unittest -from sys import getrefcount +from test_ksp import BaseTestKSP # -------------------------------------------------------------------- -class MyKSP(object): +class MyKSP: def __init__(self): pass @@ -34,13 +34,13 @@ def loop(self, ksp, r): ksp.monitor(its, rnorm) reason = ksp.callConvergenceTest(its, rnorm) if not reason: - ksp.setIterationNumber(its+1) + ksp.setIterationNumber(its + 1) else: ksp.setConvergedReason(reason) return reason -class MyRichardson(MyKSP): +class MyRichardson(MyKSP): def solve(self, ksp, b, x): A, B = ksp.getOperators() P = ksp.getPC() @@ -56,17 +56,17 @@ def solve(self, ksp, b, x): P.apply(r, z) x.axpy(1, z) -class MyCG(MyKSP): +class MyCG(MyKSP): def setUp(self, ksp): - super(MyCG, self).setUp(ksp) + super().setUp(ksp) d = self.work[0].duplicate() q = d.duplicate() self.work += [d, q] def solve(self, ksp, b, x): A, B = ksp.getOperators() - P = ksp.getPC() + # P = ksp.getPC() r, z, d, q = self.work # A.mult(x, r) @@ -84,37 +84,39 @@ def solve(self, ksp, b, x): beta = delta / delta_old d.aypx(beta, r) + # -------------------------------------------------------------------- -from test_ksp import BaseTestKSP class BaseTestKSPPYTHON(BaseTestKSP): - KSP_TYPE = PETSc.KSP.Type.PYTHON ContextClass = None def setUp(self): - super(BaseTestKSPPYTHON, self).setUp() + super().setUp() ctx = self.ContextClass() self.ksp.setPythonContext(ctx) def testGetType(self): ctx = self.ksp.getPythonContext() - pytype = "{0}.{1}".format(ctx.__module__, type(ctx).__name__) + pytype = f'{ctx.__module__}.{type(ctx).__name__}' self.assertTrue(self.ksp.getPythonType() == pytype) def tearDown(self): self.ksp.destroy() PETSc.garbage_cleanup() + class TestKSPPYTHON_RICH(BaseTestKSPPYTHON, unittest.TestCase): - PC_TYPE = PETSc.PC.Type.JACOBI + PC_TYPE = PETSc.PC.Type.JACOBI ContextClass = MyRichardson + class TestKSPPYTHON_CG(BaseTestKSPPYTHON, unittest.TestCase): - PC_TYPE = PETSc.PC.Type.NONE + PC_TYPE = PETSc.PC.Type.NONE ContextClass = MyCG + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_lgmap.py b/src/binding/petsc4py/test/test_lgmap.py index 42f6e3bc049..994657aba8d 100644 --- a/src/binding/petsc4py/test/test_lgmap.py +++ b/src/binding/petsc4py/test/test_lgmap.py @@ -3,17 +3,17 @@ # -------------------------------------------------------------------- -class BaseTestLGMap(object): +class BaseTestLGMap: def _mk_idx(self, comm): comm_size = comm.getSize() comm_rank = comm.getRank() lsize = 10 first = lsize * comm_rank - last = first + lsize + last = first + lsize if comm_rank > 0: first -= 1 - if comm_rank < (comm_size-1): + if comm_rank < (comm_size - 1): last += 1 return list(range(first, last)) @@ -27,7 +27,7 @@ def testGetSize(self): def testGetIndices(self): size = self.lgmap.getSize() - idx = self.lgmap.getIndices() + idx = self.lgmap.getIndices() self.assertEqual(len(idx), size) for i, val in enumerate(self.idx): self.assertEqual(idx[i], val) @@ -36,43 +36,43 @@ def testGetInfo(self): info = self.lgmap.getInfo() self.assertEqual(type(info), dict) if self.lgmap.getComm().getSize() == 1: - self.assertEqual(info, {}) + self.assertTrue(len(info) == 1) else: self.assertTrue(len(info) > 1) self.assertTrue(len(info) < 4) def testApply(self): - idxin = list(range(self.lgmap.getSize())) + idxin = list(range(self.lgmap.getSize())) idxout = self.lgmap.apply(idxin) self.lgmap.apply(idxin, idxout) - invmap = self.lgmap.applyInverse(idxout) - + _ = self.lgmap.applyInverse(idxout) def testApplyIS(self): - is_in = PETSc.IS().createStride(self.lgmap.getSize()) - is_out = self.lgmap.apply(is_in) + is_in = PETSc.IS().createStride(self.lgmap.getSize()) + _ = self.lgmap.apply(is_in) def testProperties(self): for prop in ('size', 'indices', 'info'): self.assertTrue(hasattr(self.lgmap, prop)) + # -------------------------------------------------------------------- -class TestLGMap(BaseTestLGMap, unittest.TestCase): +class TestLGMap(BaseTestLGMap, unittest.TestCase): def setUp(self): - self.idx = self._mk_idx(PETSc.COMM_WORLD) + self.idx = self._mk_idx(PETSc.COMM_WORLD) self.lgmap = PETSc.LGMap().create(self.idx, comm=PETSc.COMM_WORLD) -class TestLGMapIS(BaseTestLGMap, unittest.TestCase): +class TestLGMapIS(BaseTestLGMap, unittest.TestCase): def setUp(self): - self.idx = self._mk_idx(PETSc.COMM_WORLD) - self.iset = PETSc.IS().createGeneral(self.idx, comm=PETSc.COMM_WORLD) + self.idx = self._mk_idx(PETSc.COMM_WORLD) + self.iset = PETSc.IS().createGeneral(self.idx, comm=PETSc.COMM_WORLD) self.lgmap = PETSc.LGMap().create(self.iset) def tearDown(self): - self.iset = None + self.iset = None self.lgmap = None def testSameComm(self): @@ -80,10 +80,11 @@ def testSameComm(self): comm2 = self.iset.getComm() self.assertEqual(comm1, comm2) + # -------------------------------------------------------------------- -class TestLGMapBlock(unittest.TestCase): +class TestLGMapBlock(unittest.TestCase): BS = 3 def setUp(self): @@ -92,10 +93,10 @@ def setUp(self): comm_rank = comm.getRank() lsize = 10 first = lsize * comm_rank - last = first + lsize + last = first + lsize if comm_rank > 0: first -= 1 - if comm_rank < (comm_size-1): + if comm_rank < (comm_size - 1): last += 1 self.idx = list(range(first, last)) bs = self.BS @@ -116,7 +117,7 @@ def testGetBlockIndices(self): size = self.lgmap.getSize() bs = self.lgmap.getBlockSize() idx = self.lgmap.getBlockIndices() - self.assertEqual(len(idx), size//bs) + self.assertEqual(len(idx), size // bs) for i, val in enumerate(self.idx): self.assertEqual(idx[i], val) @@ -127,13 +128,13 @@ def testGetIndices(self): self.assertEqual(len(idx), size) for i, val in enumerate(self.idx): for j in range(bs): - self.assertEqual(idx[i*bs+j], val*bs+j) + self.assertEqual(idx[i * bs + j], val * bs + j) def testGetBlockInfo(self): info = self.lgmap.getBlockInfo() self.assertEqual(type(info), dict) if self.lgmap.getComm().getSize() == 1: - self.assertEqual(info, {}) + self.assertTrue(len(info) == 1) else: self.assertTrue(len(info) > 1) self.assertTrue(len(info) < 4) @@ -142,11 +143,12 @@ def testGetInfo(self): info = self.lgmap.getInfo() self.assertEqual(type(info), dict) if self.lgmap.getComm().getSize() == 1: - self.assertEqual(info, {}) + self.assertTrue(len(info) == 1) else: self.assertTrue(len(info) > 1) self.assertTrue(len(info) < 4) + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_log.py b/src/binding/petsc4py/test/test_log.py index 5c7a3f2ddf5..44aec4a9401 100644 --- a/src/binding/petsc4py/test/test_log.py +++ b/src/binding/petsc4py/test/test_log.py @@ -1,8 +1,10 @@ # -------------------------------------------------------------------- -if __name__ == "__main__": - import sys, petsc4py - petsc4py.init(sys.argv+['-log_view']) +if __name__ == '__main__': + import sys + import petsc4py + + petsc4py.init(sys.argv + ['-log_view']) # -------------------------------------------------------------------- @@ -11,10 +13,10 @@ # -------------------------------------------------------------------- -class TestLog(unittest.TestCase): +class TestLog(unittest.TestCase): def setUp(self): - #PETSc.Log.begin() + # PETSc.Log.begin() # register stages self.stage1 = PETSc.Log.Stage('Stage 1') self.stage2 = PETSc.Log.Stage('Stage 2') @@ -22,8 +24,8 @@ def setUp(self): self.klassA = PETSc.Log.Class('Class A') self.klassB = PETSc.Log.Class('Class B') # register events - self.event1 = PETSc.Log.Event('Event 1') # no class - self.event2 = PETSc.Log.Event('Event 2') # no class + self.event1 = PETSc.Log.Event('Event 1') # no class + self.event2 = PETSc.Log.Event('Event 2') # no class self.eventA = PETSc.Log.Event('Event A', self.klassA) self.eventB = PETSc.Log.Event('Event B', self.klassB) @@ -39,14 +41,14 @@ def testGetName(self): def testLogBeginEnd(self): # ----- - self._run_events() # in main stage - self._run_stages() # in user stages + self._run_events() # in main stage + self._run_stages() # in user stages # ----- for event in self._get_events(): event.deactivate() event.setActive(False) event.active = False - self._run_events() # should not be logged + self._run_events() # should not be logged for event in self._get_events(): event.activate() event.setActive(True) @@ -56,7 +58,7 @@ def testLogBeginEnd(self): klass.deactivate() klass.setActive(False) klass.active = False - self._run_events() # A and B should not be logged + self._run_events() # A and B should not be logged for klass in self._get_classes(): klass.activate() klass.setActive(True) @@ -70,7 +72,7 @@ def testLogBeginEnd(self): active = stage.getActive() self.assertFalse(active) self.assertFalse(stage.active) - self._run_stages() # should not be logged + self._run_stages() # should not be logged for stage in self._get_stages(): stage.setActive(True) stage.active = True @@ -92,8 +94,7 @@ def _get_classes(self): return (self.klassA, self.klassB) def _get_events(self): - return (self.event1, self.event2, - self.eventA, self.eventB) + return (self.event1, self.event2, self.eventA, self.eventB) def _run_events(self, stage=None): if stage is not None: @@ -116,4 +117,3 @@ def _events_end(self): if __name__ == '__main__': unittest.main() - diff --git a/src/binding/petsc4py/test/test_mat_aij.py b/src/binding/petsc4py/test/test_mat_aij.py index 8e16cfa00e4..c80e560a197 100644 --- a/src/binding/petsc4py/test/test_mat_aij.py +++ b/src/binding/petsc4py/test/test_mat_aij.py @@ -4,40 +4,51 @@ import numpy as N import numpy as np + def mkgraph(comm, m, n): - start = m*n * comm.rank - end = start + m*n + start = m * n * comm.rank + end = start + m * n idt = PETSc.IntType rows = [] - for I in range(start, end) : + for gridI in range(start, end): rows.append([]) adj = rows[-1] - i = I//n; j = I - i*n - if i> 0 : J = I-n; adj.append(J) - if j> 0 : J = I-1; adj.append(J) - adj.append(I) - if j< n-1: J = I+1; adj.append(J) - if i< m-1: J = I+n; adj.append(J) + i = gridI // n + j = gridI - i * n + if i > 0: + gridJ = gridI - n + adj.append(gridJ) + if j > 0: + gridJ = gridI - 1 + adj.append(gridJ) + adj.append(gridI) + if j < n - 1: + gridJ = gridI + 1 + adj.append(gridJ) + if i < m - 1: + gridJ = gridI + n + adj.append(gridJ) nods = N.array(range(start, end), dtype=idt) - xadj = N.array([0]*(len(rows)+1), dtype=idt) + xadj = N.array([0] * (len(rows) + 1), dtype=idt) xadj[0] = 0 xadj[1:] = N.cumsum([len(r) for r in rows], dtype=idt) - if not rows: adjy = N.array([],dtype=idt) - else: adjy = N.concatenate(rows).astype(idt) + if not rows: + adjy = N.array([], dtype=idt) + else: + adjy = N.concatenate(rows).astype(idt) return nods, xadj, adjy -class BaseTestMatAnyAIJ(object): - - COMM = PETSc.COMM_NULL - TYPE = None - GRID = 0, 0 +class BaseTestMatAnyAIJ: + COMM = PETSc.COMM_NULL + TYPE = None + GRID = 0, 0 BSIZE = None def setUp(self): - COMM = self.COMM + COMM = self.COMM GM, GN = self.GRID - BS = self.BSIZE + BS = self.BSIZE # try: rbs, cbs = BS @@ -45,14 +56,14 @@ def setUp(self): cbs = cbs or 1 except (TypeError, ValueError): rbs = cbs = BS or 1 - sdt = dtype = PETSc.ScalarType + sdt = PETSc.ScalarType self.rows, self.xadj, self.adjy = mkgraph(COMM, GM, GN) - self.vals = N.array(range(1, 1 + len(self.adjy)*rbs*cbs), dtype=sdt) + self.vals = N.array(range(1, 1 + len(self.adjy) * rbs * cbs), dtype=sdt) self.vals.shape = (-1, rbs, cbs) # m, n = GM, GN - rowsz = (m*n*rbs, None) - colsz = (m*n*cbs, None) + rowsz = (m * n * rbs, None) + colsz = (m * n * cbs, None) A = self.A = PETSc.Mat().create(comm=COMM) A.setType(self.TYPE) A.setSizes([rowsz, colsz], BS) @@ -78,7 +89,7 @@ def testSetPreallocNNZ(self): self._chk_aij(self.A, ai, aj) def testSetPreallocNNZ_2(self): - _, ai, _, _ =self._get_aijv() + _, ai, _, _ = self._get_aijv() d_nnz = N.diff(ai) nnz = [d_nnz, 3] self.A.setPreallocationNNZ(nnz) @@ -90,12 +101,13 @@ def testSetPreallocNNZ_2(self): self._chk_aij(self.A, ai, aj) opt = PETSc.Mat.Option.NEW_NONZERO_LOCATION_ERR self.A.setOption(opt, True) - ai, aj, av =self._set_values_ijv() + ai, aj, av = self._set_values_ijv() self.A.assemble() self._chk_aij(self.A, ai, aj) def testSetPreallocCSR(self): - if 'is' in self.A.getType(): return # XXX + if 'is' in self.A.getType(): + return # XXX _, ai, aj, _ = self._get_aijv() csr = [ai, aj] self.A.setPreallocationCSR(csr) @@ -111,8 +123,9 @@ def testSetPreallocCSR(self): self._chk_aij(self.A, ai, aj) def testSetPreallocCSR_2(self): - if 'is' in self.A.getType(): return # XXX - _, ai, aj, av =self._get_aijv() + if 'is' in self.A.getType(): + return # XXX + _, ai, aj, av = self._get_aijv() csr = [ai, aj, av] self.A.setPreallocationCSR(csr) self._chk_bs(self.A, self.BSIZE) @@ -153,7 +166,8 @@ def testSetValuesIJV(self): self._chk_aij(self.A, ai, aj) def testGetValuesCSR(self): - if 'is' in self.A.getType(): return # XXX + if 'is' in self.A.getType(): + return # XXX self._preallocate() self._set_values_ijv() A = self.A @@ -166,8 +180,8 @@ def testGetValuesCSR(self): for row in range(rstart, rend): cols, vals = A.getRow(row) i = row - rstart - self.assertTrue(N.allclose(aj[ai[i]:ai[i+1]], cols)) - self.assertTrue(N.allclose(av[ai[i]:ai[i+1]], vals)) + self.assertTrue(N.allclose(aj[ai[i] : ai[i + 1]], cols)) + self.assertTrue(N.allclose(av[ai[i] : ai[i + 1]], vals)) def testConvertToSAME(self): self._preallocate() @@ -185,7 +199,8 @@ def testConvertToDENSE(self): x.setRandom() z = y.duplicate() A.mult(x, y) - if A.type.endswith('sbaij'): return + if A.type.endswith('sbaij'): + return B = PETSc.Mat() A.convert('dense', B) # initial B.mult(x, z) @@ -193,7 +208,7 @@ def testConvertToDENSE(self): A.convert('dense', B) # reuse B.mult(x, z) self.assertTrue(np.allclose(y.array, z.array)) - A.convert('dense') # inplace + A.convert('dense') # inplace A.mult(x, z) self.assertTrue(np.allclose(y.array, z.array)) @@ -206,7 +221,8 @@ def testConvertToAIJ(self): x.setRandom() z = y.duplicate() A.mult(x, y) - if A.type.endswith('sbaij'): return + if A.type.endswith('sbaij'): + return B = PETSc.Mat() A.convert('aij', B) # initial B.mult(x, z) @@ -214,12 +230,13 @@ def testConvertToAIJ(self): A.convert('aij', B) # reuse B.mult(x, z) self.assertTrue(np.allclose(y.array, z.array)) - A.convert('aij') # inplace + A.convert('aij') # inplace A.mult(x, z) self.assertTrue(np.allclose(y.array, z.array)) def testGetDiagonalBlock(self): - if 'is' in self.A.getType(): return # XXX + if 'is' in self.A.getType(): + return # XXX self._preallocate() self._set_values_ijv() self.A.assemble() @@ -228,7 +245,8 @@ def testGetDiagonalBlock(self): B.destroy() def testInvertBlockDiagonal(self): - if 'is' in self.A.getType(): return # XXX + if 'is' in self.A.getType(): + return # XXX try: _ = len(self.BSIZE) return @@ -236,30 +254,31 @@ def testInvertBlockDiagonal(self): pass self._preallocate() rbs, cbs = self.A.getBlockSizes() - if rbs != cbs: return + if rbs != cbs: + return self._set_values_ijv() self.A.assemble() - self.A.shift(1000) # Make nonsingular + self.A.shift(1000) # Make nonsingular ibdiag = self.A.invertBlockDiagonal() bs = self.A.getBlockSize() m, _ = self.A.getLocalSize() - self.assertEqual(ibdiag.shape, (m//bs, bs, bs)) - tmp = N.empty((m//bs, bs, bs), dtype=PETSc.ScalarType) + self.assertEqual(ibdiag.shape, (m // bs, bs, bs)) + tmp = N.empty((m // bs, bs, bs), dtype=PETSc.ScalarType) rstart, rend = self.A.getOwnershipRange() - s, e = rstart//bs, rend//bs + s, e = rstart // bs, rend // bs for i in range(s, e): - rows = cols = N.arange(i*bs,(i+1)*bs, dtype=PETSc.IntType) - vals = self.A.getValues(rows,cols) - tmp[i-s,:,:] = N.linalg.inv(vals) + rows = cols = N.arange(i * bs, (i + 1) * bs, dtype=PETSc.IntType) + vals = self.A.getValues(rows, cols) + tmp[i - s, :, :] = N.linalg.inv(vals) self.assertTrue(N.allclose(ibdiag, tmp)) def testCreateSubMatrix(self): - if 'baij' in self.A.getType(): return # XXX + if 'baij' in self.A.getType(): + return # XXX self._preallocate() self._set_values_ijv() self.A.assemble() # - rank = self.A.getComm().getRank() rs, re = self.A.getOwnershipRange() cs, ce = self.A.getOwnershipRangeColumn() rows = N.array(range(rs, re), dtype=PETSc.IntType) @@ -278,8 +297,10 @@ def testCreateSubMatrix(self): S.destroy() def testCreateSubMatrices(self): - if 'baij' in self.A.getType(): return # XXX - if 'is' in self.A.getType(): return # XXX + if 'baij' in self.A.getType(): + return # XXX + if 'is' in self.A.getType(): + return # XXX self._preallocate() self._set_values_ijv() self.A.assemble() @@ -305,7 +326,8 @@ def testCreateSubMatrices(self): S1.destroy() S2.destroy() # - if 'seq' not in self.A.getType(): return # XXX + if 'seq' not in self.A.getType(): + return # XXX S1, S2 = self.A.createSubMatrices([rows, rows], [cols, cols]) self.assertTrue(S1.equal(S2)) S1.zeroEntries() @@ -316,13 +338,16 @@ def testCreateSubMatrices(self): S2.destroy() def testGetRedundantMatrix(self): - if 'aijcrl' in self.A.getType(): return # duplicate not supported - if 'mpisbaij' in self.A.getType(): return # not working - if 'is' in self.A.getType(): return # XXX + if 'aijcrl' in self.A.getType(): + return # duplicate not supported + if 'mpisbaij' in self.A.getType(): + return # not working + if 'is' in self.A.getType(): + return # XXX self._preallocate() self._set_values_ijv() self.A.assemble() - #Test the most simple case + # Test the most simple case sizecommA = self.A.getComm().getSize() Ared = self.A.getRedundantMatrix(sizecommA) sizecommAred = Ared.getComm().getSize() @@ -351,28 +376,34 @@ def testCreateTranspose(self): self.assertTrue(xt.equal(y)) def _get_aijv(self): - return (self.rows, self.xadj, self.adjy, self.vals,) + return ( + self.rows, + self.xadj, + self.adjy, + self.vals, + ) def _preallocate(self): self.A.setPreallocationNNZ([5, 2]) def _set_values(self): import sys + if hasattr(sys, 'gettotalrefcount'): return self._set_values_ijv() # XXX Why the code below leak refs as a beast ??? - row, ai, aj, av =self._get_aijv() + row, ai, aj, av = self._get_aijv() if not self.BSIZE: setvalues = self.A.setValues else: setvalues = self.A.setValuesBlocked for i, r in enumerate(row): - s, e = ai[i], ai[i+1] + s, e = ai[i], ai[i + 1] setvalues(r, aj[s:e], av[s:e]) return ai, aj, av def _set_values_ijv(self): - row, ai, aj, av =self._get_aijv() + row, ai, aj, av = self._get_aijv() if not self.BSIZE: setvalues = self.A.setValuesIJV else: @@ -395,252 +426,412 @@ def _chk_aij(self, A, i, j): compressed = bool(self.BSIZE) ai, aj = A.getRowIJ(compressed=compressed) if ai is not None and aj is not None: - self.assertTrue(N.all(i==ai)) - self.assertTrue(N.all(j==aj)) + self.assertTrue(N.all(i == ai)) + self.assertTrue(N.all(j == aj)) ai, aj = A.getColumnIJ(compressed=compressed) if ai is not None and aj is not None: - self.assertTrue(N.all(i==ai)) - self.assertTrue(N.all(j==aj)) + self.assertTrue(N.all(i == ai)) + self.assertTrue(N.all(j == aj)) + # -- AIJ --------------------- + class BaseTestMatAIJ(BaseTestMatAnyAIJ, unittest.TestCase): - COMM = PETSc.COMM_WORLD - TYPE = PETSc.Mat.Type.AIJ - GRID = 0, 0 + COMM = PETSc.COMM_WORLD + TYPE = PETSc.Mat.Type.AIJ + GRID = 0, 0 BSIZE = None + # -- Seq AIJ -- + class TestMatSeqAIJ(BaseTestMatAIJ): COMM = PETSc.COMM_SELF TYPE = PETSc.Mat.Type.SEQAIJ + + class TestMatSeqAIJ_G23(TestMatSeqAIJ): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatSeqAIJ_G45(TestMatSeqAIJ): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatSeqAIJ_G89(TestMatSeqAIJ): - GRID = 8, 9 + GRID = 8, 9 + # -- MPI AIJ -- + class TestMatMPIAIJ(BaseTestMatAIJ): COMM = PETSc.COMM_WORLD TYPE = PETSc.Mat.Type.MPIAIJ + + class TestMatMPIAIJ_G23(TestMatMPIAIJ): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatMPIAIJ_G45(TestMatMPIAIJ): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatMPIAIJ_G89(TestMatMPIAIJ): - GRID = 8, 9 + GRID = 8, 9 # -- Block AIJ --------------- + class BaseTestMatBAIJ(BaseTestMatAnyAIJ, unittest.TestCase): - COMM = PETSc.COMM_WORLD - TYPE = PETSc.Mat.Type.BAIJ - GRID = 0, 0 + COMM = PETSc.COMM_WORLD + TYPE = PETSc.Mat.Type.BAIJ + GRID = 0, 0 BSIZE = 1 + # -- Seq Block AIJ -- + class TestMatSeqBAIJ(BaseTestMatBAIJ): COMM = PETSc.COMM_SELF TYPE = PETSc.Mat.Type.SEQBAIJ + + # bs = 1 class TestMatSeqBAIJ_G23(TestMatSeqBAIJ): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatSeqBAIJ_G45(TestMatSeqBAIJ): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatSeqBAIJ_G89(TestMatSeqBAIJ): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatSeqBAIJ_G23_B2(TestMatSeqBAIJ_G23): BSIZE = 2 + + class TestMatSeqBAIJ_G45_B2(TestMatSeqBAIJ_G45): BSIZE = 2 + + class TestMatSeqBAIJ_G89_B2(TestMatSeqBAIJ_G89): BSIZE = 2 + + # bs = 3 class TestMatSeqBAIJ_G23_B3(TestMatSeqBAIJ_G23): BSIZE = 3 + + class TestMatSeqBAIJ_G45_B3(TestMatSeqBAIJ_G45): BSIZE = 3 + + class TestMatSeqBAIJ_G89_B3(TestMatSeqBAIJ_G89): BSIZE = 3 + + # bs = 4 class TestMatSeqBAIJ_G23_B4(TestMatSeqBAIJ_G23): BSIZE = 4 + + class TestMatSeqBAIJ_G45_B4(TestMatSeqBAIJ_G45): BSIZE = 4 + + class TestMatSeqBAIJ_G89_B4(TestMatSeqBAIJ_G89): BSIZE = 4 + + # bs = 5 class TestMatSeqBAIJ_G23_B5(TestMatSeqBAIJ_G23): BSIZE = 5 + + class TestMatSeqBAIJ_G45_B5(TestMatSeqBAIJ_G45): BSIZE = 5 + + class TestMatSeqBAIJ_G89_B5(TestMatSeqBAIJ_G89): BSIZE = 5 # -- MPI Block AIJ -- + class TestMatMPIBAIJ(BaseTestMatBAIJ): COMM = PETSc.COMM_WORLD TYPE = PETSc.Mat.Type.MPIBAIJ + + # bs = 1 class TestMatMPIBAIJ_G23(TestMatMPIBAIJ): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatMPIBAIJ_G45(TestMatMPIBAIJ): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatMPIBAIJ_G89(TestMatMPIBAIJ): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatMPIBAIJ_G23_B2(TestMatMPIBAIJ_G23): BSIZE = 2 + + class TestMatMPIBAIJ_G45_B2(TestMatMPIBAIJ_G45): BSIZE = 2 + + class TestMatMPIBAIJ_G89_B2(TestMatMPIBAIJ_G89): BSIZE = 2 + + # bs = 3 class TestMatMPIBAIJ_G23_B3(TestMatMPIBAIJ_G23): BSIZE = 3 + + class TestMatMPIBAIJ_G45_B3(TestMatMPIBAIJ_G45): BSIZE = 3 + + class TestMatMPIBAIJ_G89_B3(TestMatMPIBAIJ_G89): BSIZE = 3 + + # bs = 4 class TestMatMPIBAIJ_G23_B4(TestMatMPIBAIJ_G23): BSIZE = 4 + + class TestMatMPIBAIJ_G45_B4(TestMatMPIBAIJ_G45): BSIZE = 4 + + class TestMatMPIBAIJ_G89_B4(TestMatMPIBAIJ_G89): BSIZE = 4 + + # bs = 5 class TestMatMPIBAIJ_G23_B5(TestMatMPIBAIJ_G23): BSIZE = 5 + + class TestMatMPIBAIJ_G45_B5(TestMatMPIBAIJ_G45): BSIZE = 5 + + class TestMatMPIBAIJ_G89_B5(TestMatMPIBAIJ_G89): BSIZE = 5 + # -- SymmBlock AIJ --------------- + class BaseTestMatSBAIJ(BaseTestMatAnyAIJ, unittest.TestCase): - COMM = PETSc.COMM_WORLD - TYPE = PETSc.Mat.Type.SBAIJ - GRID = 0, 0 + COMM = PETSc.COMM_WORLD + TYPE = PETSc.Mat.Type.SBAIJ + GRID = 0, 0 BSIZE = 1 - def testInvertBlockDiagonal(self): pass + + def testInvertBlockDiagonal(self): + pass + def _chk_aij(self, A, i, j): ai, aj = A.getRowIJ(compressed=True) if ai is not None and aj is not None: - if 0: # XXX Implement - self.assertTrue(N.all(i==ai)) - self.assertTrue(N.all(j==aj)) + if 0: # XXX Implement + self.assertTrue(N.all(i == ai)) + self.assertTrue(N.all(j == aj)) ai, aj = A.getColumnIJ(compressed=True) if ai is not None and aj is not None: - if 0: # XXX Implement - self.assertTrue(N.all(i==ai)) - self.assertTrue(N.all(j==aj)) + if 0: # XXX Implement + self.assertTrue(N.all(i == ai)) + self.assertTrue(N.all(j == aj)) + # -- Seq SymmBlock AIJ -- + class TestMatSeqSBAIJ(BaseTestMatSBAIJ): COMM = PETSc.COMM_SELF TYPE = PETSc.Mat.Type.SEQSBAIJ + + # bs = 1 class TestMatSeqSBAIJ_G23(TestMatSeqSBAIJ): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatSeqSBAIJ_G45(TestMatSeqSBAIJ): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatSeqSBAIJ_G89(TestMatSeqSBAIJ): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatSeqSBAIJ_G23_B2(TestMatSeqSBAIJ_G23): BSIZE = 2 + + class TestMatSeqSBAIJ_G45_B2(TestMatSeqSBAIJ_G45): BSIZE = 2 + + class TestMatSeqSBAIJ_G89_B2(TestMatSeqSBAIJ_G89): BSIZE = 2 + + # bs = 3 class TestMatSeqSBAIJ_G23_B3(TestMatSeqSBAIJ_G23): BSIZE = 3 + + class TestMatSeqSBAIJ_G45_B3(TestMatSeqSBAIJ_G45): BSIZE = 3 + + class TestMatSeqSBAIJ_G89_B3(TestMatSeqSBAIJ_G89): BSIZE = 3 + + # bs = 4 class TestMatSeqSBAIJ_G23_B4(TestMatSeqSBAIJ_G23): BSIZE = 4 + + class TestMatSeqSBAIJ_G45_B4(TestMatSeqSBAIJ_G45): BSIZE = 4 + + class TestMatSeqSBAIJ_G89_B4(TestMatSeqSBAIJ_G89): BSIZE = 4 + + # bs = 5 class TestMatSeqSBAIJ_G23_B5(TestMatSeqSBAIJ_G23): BSIZE = 5 + + class TestMatSeqSBAIJ_G45_B5(TestMatSeqSBAIJ_G45): BSIZE = 5 + + class TestMatSeqSBAIJ_G89_B5(TestMatSeqSBAIJ_G89): BSIZE = 5 # -- MPI SymmBlock AIJ -- + class TestMatMPISBAIJ(BaseTestMatSBAIJ): COMM = PETSc.COMM_WORLD TYPE = PETSc.Mat.Type.MPISBAIJ + + # bs = 1 class TestMatMPISBAIJ_G23(TestMatMPISBAIJ): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatMPISBAIJ_G45(TestMatMPISBAIJ): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatMPISBAIJ_G89(TestMatMPISBAIJ): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatMPISBAIJ_G23_B2(TestMatMPISBAIJ_G23): BSIZE = 2 + + class TestMatMPISBAIJ_G45_B2(TestMatMPISBAIJ_G45): BSIZE = 2 + + class TestMatMPISBAIJ_G89_B2(TestMatMPISBAIJ_G89): BSIZE = 2 + + # bs = 3 class TestMatMPISBAIJ_G23_B3(TestMatMPISBAIJ_G23): BSIZE = 3 + + class TestMatMPISBAIJ_G45_B3(TestMatMPISBAIJ_G45): BSIZE = 3 + + class TestMatMPISBAIJ_G89_B3(TestMatMPISBAIJ_G89): BSIZE = 3 + + # bs = 4 class TestMatMPISBAIJ_G23_B4(TestMatMPISBAIJ_G23): BSIZE = 4 + + class TestMatMPISBAIJ_G45_B4(TestMatMPISBAIJ_G45): BSIZE = 4 + + class TestMatMPISBAIJ_G89_B4(TestMatMPISBAIJ_G89): BSIZE = 4 + + # bs = 5 class TestMatMPISBAIJ_G23_B5(TestMatMPISBAIJ_G23): BSIZE = 5 + + class TestMatMPISBAIJ_G45_B5(TestMatMPISBAIJ_G45): BSIZE = 5 + + class TestMatMPISBAIJ_G89_B5(TestMatMPISBAIJ_G89): BSIZE = 5 + # -- AIJ + Block --------------- + class BaseTestMatAIJ_B(BaseTestMatAnyAIJ, unittest.TestCase): - COMM = PETSc.COMM_WORLD - TYPE = PETSc.Mat.Type.AIJ - GRID = 0, 0 + COMM = PETSc.COMM_WORLD + TYPE = PETSc.Mat.Type.AIJ + GRID = 0, 0 BSIZE = 1 - def testSetPreallocNNZ(self):pass - def testSetPreallocNNZ_2(self):pass - def testSetPreallocCSR(self):pass - def testSetPreallocCSR_2(self):pass + def testSetPreallocNNZ(self): + pass + + def testSetPreallocNNZ_2(self): + pass + + def testSetPreallocCSR(self): + pass + + def testSetPreallocCSR_2(self): + pass + def testSetValues(self): self._preallocate() opt = PETSc.Mat.Option.NEW_NONZERO_ALLOCATION_ERR @@ -653,6 +844,7 @@ def testSetValues(self): ai, aj, av = self._set_values() self.A.assemble() self._chk_aij(self.A, ai, aj) + def testSetValuesIJV(self): self._preallocate() opt = PETSc.Mat.Option.NEW_NONZERO_ALLOCATION_ERR @@ -665,110 +857,175 @@ def testSetValuesIJV(self): ai, aj, av = self._set_values_ijv() self.A.assemble() self._chk_aij(self.A, ai, aj) + def _preallocate(self): - self.A.setPreallocationNNZ([5*self.BSIZE, 3*self.BSIZE]) + self.A.setPreallocationNNZ([5 * self.BSIZE, 3 * self.BSIZE]) self._chk_bs(self.A, self.BSIZE) + def _chk_aij(self, A, i, j): - bs = self.BSIZE or 1 ai, aj = A.getRowIJ() if ai is not None and aj is not None: ## XXX map and check !! - #self.assertTrue(N.all(i==ai)) - #self.assertTrue(N.all(j==aj)) + # self.assertTrue(N.all(i==ai)) + # self.assertTrue(N.all(j==aj)) pass ai, aj = A.getColumnIJ(compressed=bool(self.BSIZE)) - if ai is not None and aj is not None: ## XXX map and check !! - #self.assertTrue(N.all(i==ai)) - #self.assertTrue(N.all(j==aj)) + if ai is not None and aj is not None: ## XXX map and check !! + # self.assertTrue(N.all(i==ai)) + # self.assertTrue(N.all(j==aj)) pass + # -- Seq AIJ + Block -- + class TestMatSeqAIJ_B(BaseTestMatAIJ_B): COMM = PETSc.COMM_SELF TYPE = PETSc.Mat.Type.SEQAIJ + + # bs = 1 class TestMatSeqAIJ_B_G23(TestMatSeqAIJ_B): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatSeqAIJ_B_G45(TestMatSeqAIJ_B): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatSeqAIJ_B_G89(TestMatSeqAIJ_B): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatSeqAIJ_B_G23_B2(TestMatSeqAIJ_B_G23): BSIZE = 2 + + class TestMatSeqAIJ_B_G45_B2(TestMatSeqAIJ_B_G45): BSIZE = 2 + + class TestMatSeqAIJ_B_G89_B2(TestMatSeqAIJ_B_G89): BSIZE = 2 + + # bs = 3 class TestMatSeqAIJ_B_G23_B3(TestMatSeqAIJ_B_G23): BSIZE = 3 + + class TestMatSeqAIJ_B_G45_B3(TestMatSeqAIJ_B_G45): BSIZE = 3 + + class TestMatSeqAIJ_B_G89_B3(TestMatSeqAIJ_B_G89): BSIZE = 3 + + # bs = 4 class TestMatSeqAIJ_B_G23_B4(TestMatSeqAIJ_B_G23): BSIZE = 4 + + class TestMatSeqAIJ_B_G45_B4(TestMatSeqAIJ_B_G45): BSIZE = 4 + + class TestMatSeqAIJ_B_G89_B4(TestMatSeqAIJ_B_G89): BSIZE = 4 + + # bs = 5 class TestMatSeqAIJ_B_G23_B5(TestMatSeqAIJ_B_G23): BSIZE = 5 + + class TestMatSeqAIJ_B_G45_B5(TestMatSeqAIJ_B_G45): BSIZE = 5 + + class TestMatSeqAIJ_B_G89_B5(TestMatSeqAIJ_B_G89): BSIZE = 5 # -- MPI AIJ + Block -- + class TestMatMPIAIJ_B(BaseTestMatAIJ_B): COMM = PETSc.COMM_WORLD TYPE = PETSc.Mat.Type.MPIAIJ + + # bs = 1 class TestMatMPIAIJ_B_G23(TestMatMPIAIJ_B): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatMPIAIJ_B_G45(TestMatMPIAIJ_B): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatMPIAIJ_B_G89(TestMatMPIAIJ_B): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatMPIAIJ_B_G23_B2(TestMatMPIAIJ_B_G23): BSIZE = 2 + + class TestMatMPIAIJ_B_G45_B2(TestMatMPIAIJ_B_G45): BSIZE = 2 + + class TestMatMPIAIJ_B_G89_B2(TestMatMPIAIJ_B_G89): BSIZE = 2 + + # bs = 3 class TestMatMPIAIJ_B_G23_B3(TestMatMPIAIJ_B_G23): BSIZE = 3 + + class TestMatMPIAIJ_B_G45_B3(TestMatMPIAIJ_B_G45): BSIZE = 3 + + class TestMatMPIAIJ_B_G89_B3(TestMatMPIAIJ_B_G89): BSIZE = 3 + + # bs = 4 class TestMatMPIAIJ_B_G23_B4(TestMatMPIAIJ_B_G23): BSIZE = 4 + + class TestMatMPIAIJ_B_G45_B4(TestMatMPIAIJ_B_G45): BSIZE = 4 + + class TestMatMPIAIJ_B_G89_B4(TestMatMPIAIJ_B_G89): BSIZE = 4 + + # bs = 5 class TestMatMPIAIJ_B_G23_B5(TestMatMPIAIJ_B_G23): BSIZE = 5 + + class TestMatMPIAIJ_B_G45_B5(TestMatMPIAIJ_B_G45): BSIZE = 5 + + class TestMatMPIAIJ_B_G89_B5(TestMatMPIAIJ_B_G89): BSIZE = 5 + # -- Non-square blocks -- class BaseTestMatAIJ_B(BaseTestMatAnyAIJ, unittest.TestCase): - COMM = PETSc.COMM_WORLD - TYPE = PETSc.Mat.Type.AIJ - GRID = 0, 0 + COMM = PETSc.COMM_WORLD + TYPE = PETSc.Mat.Type.AIJ + GRID = 0, 0 BSIZE = 4, 2 def _preallocate(self): @@ -776,12 +1033,21 @@ def _preallocate(self): rbs, cbs = self.BSIZE except (TypeError, ValueError): rbs = cbs = self.BSIZE - self.A.setPreallocationNNZ([5*rbs, 3*cbs]) + self.A.setPreallocationNNZ([5 * rbs, 3 * cbs]) self._chk_bsizes(self.A, self.BSIZE) - def testSetPreallocNNZ(self):pass - def testSetPreallocNNZ_2(self):pass - def testSetPreallocCSR(self):pass - def testSetPreallocCSR_2(self):pass + + def testSetPreallocNNZ(self): + pass + + def testSetPreallocNNZ_2(self): + pass + + def testSetPreallocCSR(self): + pass + + def testSetPreallocCSR_2(self): + pass + def testSetValues(self): self._preallocate() opt = PETSc.Mat.Option.NEW_NONZERO_ALLOCATION_ERR @@ -794,6 +1060,7 @@ def testSetValues(self): ai, aj, av = self._set_values() self.A.assemble() self._chk_aij(self.A, ai, aj) + def testSetValuesIJV(self): self._preallocate() opt = PETSc.Mat.Option.NEW_NONZERO_ALLOCATION_ERR @@ -806,150 +1073,241 @@ def testSetValuesIJV(self): ai, aj, av = self._set_values_ijv() self.A.assemble() self._chk_aij(self.A, ai, aj) + def _chk_aij(self, A, i, j): - bs = self.BSIZE or 1 ai, aj = A.getRowIJ() if ai is not None and aj is not None: ## XXX map and check !! - #self.assertTrue(N.all(i==ai)) - #self.assertTrue(N.all(j==aj)) + # self.assertTrue(N.all(i==ai)) + # self.assertTrue(N.all(j==aj)) pass ai, aj = A.getColumnIJ() - if ai is not None and aj is not None: ## XXX map and check !! - #self.assertTrue(N.all(i==ai)) - #self.assertTrue(N.all(j==aj)) + if ai is not None and aj is not None: ## XXX map and check !! + # self.assertTrue(N.all(i==ai)) + # self.assertTrue(N.all(j==aj)) pass + # -- AIJCRL --------------------- + class BaseTestMatAIJCRL(BaseTestMatAIJ, unittest.TestCase): - TYPE = PETSc.Mat.Type.AIJCRL + TYPE = PETSc.Mat.Type.AIJCRL + # -- Seq AIJCRL -- + class TestMatSeqAIJCRL(BaseTestMatAIJCRL): COMM = PETSc.COMM_SELF TYPE = PETSc.Mat.Type.SEQAIJCRL + + class TestMatSeqAIJCRL_G23(TestMatSeqAIJCRL): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatSeqAIJCRL_G45(TestMatSeqAIJCRL): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatSeqAIJCRL_G89(TestMatSeqAIJCRL): - GRID = 8, 9 + GRID = 8, 9 + # -- MPI AIJCRL -- + class TestMatMPIAIJCRL(BaseTestMatAIJCRL): COMM = PETSc.COMM_WORLD TYPE = PETSc.Mat.Type.MPIAIJCRL + + class TestMatMPIAIJCRL_G23(TestMatMPIAIJCRL): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatMPIAIJCRL_G45(TestMatMPIAIJCRL): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatMPIAIJCRL_G89(TestMatMPIAIJCRL): - GRID = 8, 9 + GRID = 8, 9 + # -- AIJCRL + Block ------------- + class BaseTestMatAIJCRL_B(BaseTestMatAIJ_B, unittest.TestCase): - TYPE = PETSc.Mat.Type.AIJCRL + TYPE = PETSc.Mat.Type.AIJCRL + # -- Seq AIJCRL + Block -- + class TestMatSeqAIJCRL_B(BaseTestMatAIJCRL_B): COMM = PETSc.COMM_SELF TYPE = PETSc.Mat.Type.SEQAIJCRL + + # bs = 1 class TestMatSeqAIJCRL_B_G23(TestMatSeqAIJCRL_B): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatSeqAIJCRL_B_G45(TestMatSeqAIJCRL_B): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatSeqAIJCRL_B_G89(TestMatSeqAIJCRL_B): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatSeqAIJCRL_B_G23_B2(TestMatSeqAIJCRL_B_G23): BSIZE = 2 + + class TestMatSeqAIJCRL_B_G45_B2(TestMatSeqAIJCRL_B_G45): BSIZE = 2 + + class TestMatSeqAIJCRL_B_G89_B2(TestMatSeqAIJCRL_B_G89): BSIZE = 2 + + # bs = 3 class TestMatSeqAIJCRL_B_G23_B3(TestMatSeqAIJCRL_B_G23): BSIZE = 3 + + class TestMatSeqAIJCRL_B_G45_B3(TestMatSeqAIJCRL_B_G45): BSIZE = 3 + + class TestMatSeqAIJCRL_B_G89_B3(TestMatSeqAIJCRL_B_G89): BSIZE = 3 + + # bs = 4 class TestMatSeqAIJCRL_B_G23_B4(TestMatSeqAIJCRL_B_G23): BSIZE = 4 + + class TestMatSeqAIJCRL_B_G45_B4(TestMatSeqAIJCRL_B_G45): BSIZE = 4 + + class TestMatSeqAIJCRL_B_G89_B4(TestMatSeqAIJCRL_B_G89): BSIZE = 4 + + # bs = 5 class TestMatSeqAIJCRL_B_G23_B5(TestMatSeqAIJCRL_B_G23): BSIZE = 5 + + class TestMatSeqAIJCRL_B_G45_B5(TestMatSeqAIJCRL_B_G45): BSIZE = 5 + + class TestMatSeqAIJCRL_B_G89_B5(TestMatSeqAIJCRL_B_G89): BSIZE = 5 # -- MPI AIJCRL + Block -- + class TestMatMPIAIJCRL_B(BaseTestMatAIJCRL_B): COMM = PETSc.COMM_WORLD TYPE = PETSc.Mat.Type.MPIAIJCRL + + # bs = 1 class TestMatMPIAIJCRL_B_G23(TestMatMPIAIJCRL_B): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatMPIAIJCRL_B_G45(TestMatMPIAIJCRL_B): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatMPIAIJCRL_B_G89(TestMatMPIAIJCRL_B): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatMPIAIJCRL_B_G23_B2(TestMatMPIAIJCRL_B_G23): BSIZE = 2 + + class TestMatMPIAIJCRL_B_G45_B2(TestMatMPIAIJCRL_B_G45): BSIZE = 2 + + class TestMatMPIAIJCRL_B_G89_B2(TestMatMPIAIJCRL_B_G89): BSIZE = 2 + + # bs = 3 class TestMatMPIAIJCRL_B_G23_B3(TestMatMPIAIJCRL_B_G23): BSIZE = 3 + + class TestMatMPIAIJCRL_B_G45_B3(TestMatMPIAIJCRL_B_G45): BSIZE = 3 + + class TestMatMPIAIJCRL_B_G89_B3(TestMatMPIAIJCRL_B_G89): BSIZE = 3 + + # bs = 4 class TestMatMPIAIJCRL_B_G23_B4(TestMatMPIAIJCRL_B_G23): BSIZE = 4 + + class TestMatMPIAIJCRL_B_G45_B4(TestMatMPIAIJCRL_B_G45): BSIZE = 4 + + class TestMatMPIAIJCRL_B_G89_B4(TestMatMPIAIJCRL_B_G89): BSIZE = 4 + + # bs = 5 class TestMatMPIAIJCRL_B_G23_B5(TestMatMPIAIJCRL_B_G23): BSIZE = 5 + + class TestMatMPIAIJCRL_B_G45_B5(TestMatMPIAIJCRL_B_G45): BSIZE = 5 + + class TestMatMPIAIJCRL_B_G89_B5(TestMatMPIAIJCRL_B_G89): BSIZE = 5 + # -- MATIS -- + class TestMatIS(BaseTestMatAIJ): COMM = PETSc.COMM_WORLD TYPE = PETSc.Mat.Type.IS + + class TestMatIS_G23(TestMatIS): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatIS_G45(TestMatIS): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatIS_G89(TestMatIS): - GRID = 8, 9 + GRID = 8, 9 -# ----- +# ----- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_mat_dense.py b/src/binding/petsc4py/test/test_mat_dense.py index 58560ff63da..58a87f669f6 100644 --- a/src/binding/petsc4py/test/test_mat_dense.py +++ b/src/binding/petsc4py/test/test_mat_dense.py @@ -3,34 +3,35 @@ import numpy as np + def mkdata(comm, m, N, bs): start = m * comm.rank - end = start + m + end = start + m idt = PETSc.IntType sdt = PETSc.ScalarType rows = np.array(range(start, end), dtype=idt) - cols = np.array(range(0, N), dtype=idt) - vals = np.array(range(0, m*N*bs*bs), dtype=sdt) + cols = np.array(range(N), dtype=idt) + vals = np.array(range(m * N * bs * bs), dtype=sdt) vals.shape = (-1, bs, bs) return rows, cols, vals -class BaseTestMatAnyDense(object): - - COMM = PETSc.COMM_NULL - GRID = 0, 0 +class BaseTestMatAnyDense: + COMM = PETSc.COMM_NULL + GRID = 0, 0 BSIZE = None - TYPE = PETSc.Mat.Type.DENSE + TYPE = PETSc.Mat.Type.DENSE def setUp(self): - COMM = self.COMM + COMM = self.COMM GM, GN = self.GRID - BS = self.BSIZE #or 1 + BS = self.BSIZE # or 1 # self.A = PETSc.Mat().create(comm=COMM) - bs = BS or 1; m, N = GM, GN; - rowsz = (m*bs, None) - colsz = (None, N*bs) + bs = BS or 1 + m, N = GM, GN + rowsz = (m * bs, None) + colsz = (None, N * bs) self.A.setSizes([rowsz, colsz], BS) self.A.setType(self.TYPE) @@ -51,7 +52,8 @@ def testSetValues(self): def testGetDiagonalBlock(self): M, N = self.A.getSize() # only for square matrices - if M != N: return + if M != N: + return self._preallocate() self._set_values() self.A.assemble() @@ -84,9 +86,9 @@ def _preallocate(self): self.A.setPreallocationDense(None) def _set_values(self): - COMM = self.COMM + COMM = self.COMM GM, GN = self.GRID - BS = self.BSIZE or 1 + BS = self.BSIZE or 1 rows, cols, vals = mkdata(COMM, GM, GN, BS) if not self.BSIZE: setvalues = self.A.setValues @@ -99,7 +101,7 @@ def _chk_bs(self, A, bs): self.assertEqual(A.getBlockSize(), bs or 1) def _chk_array(self, A, r, c, v): - return # XXX + return # XXX vals = self.A.getValues(r, c) vals.shape = v.shape self.assertTrue(np.allclose(vals, v)) @@ -107,145 +109,242 @@ def _chk_array(self, A, r, c, v): # -- Dense --------------------- + class BaseTestMatDense(BaseTestMatAnyDense, unittest.TestCase): - COMM = PETSc.COMM_WORLD - GRID = 0, 0 + COMM = PETSc.COMM_WORLD + GRID = 0, 0 BSIZE = None + # -- Seq Dense -- + class TestMatSeqDense(BaseTestMatDense): COMM = PETSc.COMM_SELF TYPE = PETSc.Mat.Type.SEQDENSE + + class TestMatSeqDense_G23(TestMatSeqDense): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatSeqDense_G45(TestMatSeqDense): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatSeqDense_G77(TestMatSeqDense): - GRID = 7, 7 + GRID = 7, 7 + + class TestMatSeqDense_G89(TestMatSeqDense): - GRID = 8, 9 + GRID = 8, 9 + # -- MPI Dense -- + class TestMatMPIDense(BaseTestMatDense): COMM = PETSc.COMM_WORLD TYPE = PETSc.Mat.Type.MPIDENSE + + class TestMatMPIDense_G23(TestMatMPIDense): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatMPIDense_G45(TestMatMPIDense): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatMPIDense_G77(TestMatMPIDense): - GRID = 7, 7 + GRID = 7, 7 + + class TestMatMPIDense_G89(TestMatMPIDense): - GRID = 8, 9 + GRID = 8, 9 # -- Dense + Block --------------- + class BaseTestMatDense_B(BaseTestMatAnyDense, unittest.TestCase): - COMM = PETSc.COMM_WORLD - GRID = 0, 0 + COMM = PETSc.COMM_WORLD + GRID = 0, 0 BSIZE = 1 + def _preallocate(self): - #self.A.setBlockSize(self.BSIZE) + # self.A.setBlockSize(self.BSIZE) self.A.setPreallocationDense(None) - #self.A.setBlockSize(self.BSIZE) + # self.A.setBlockSize(self.BSIZE) self._chk_bs(self.A, self.BSIZE) + # -- Seq Dense + Block -- + class TestMatSeqDense_B(BaseTestMatDense_B): COMM = PETSc.COMM_SELF TYPE = PETSc.Mat.Type.SEQDENSE + + # bs = 1 class TestMatSeqDense_B_G23(TestMatSeqDense_B): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatSeqDense_B_G45(TestMatSeqDense_B): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatSeqDense_B_G89(TestMatSeqDense_B): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatSeqDense_B_G23_B2(TestMatSeqDense_B_G23): BSIZE = 2 + + class TestMatSeqDense_B_G45_B2(TestMatSeqDense_B_G45): BSIZE = 2 + + class TestMatSeqDense_B_G89_B2(TestMatSeqDense_B_G89): BSIZE = 2 + + # bs = 3 class TestMatSeqDense_B_G23_B3(TestMatSeqDense_B_G23): BSIZE = 3 + + class TestMatSeqDense_B_G45_B3(TestMatSeqDense_B_G45): BSIZE = 3 + + class TestMatSeqDense_B_G89_B3(TestMatSeqDense_B_G89): BSIZE = 3 + + # bs = 4 class TestMatSeqDense_B_G23_B4(TestMatSeqDense_B_G23): BSIZE = 4 + + class TestMatSeqDense_B_G45_B4(TestMatSeqDense_B_G45): BSIZE = 4 + + class TestMatSeqDense_B_G89_B4(TestMatSeqDense_B_G89): BSIZE = 4 + + # bs = 5 class TestMatSeqDense_B_G23_B5(TestMatSeqDense_B_G23): BSIZE = 5 + + class TestMatSeqDense_B_G45_B5(TestMatSeqDense_B_G45): BSIZE = 5 + + class TestMatSeqDense_B_G89_B5(TestMatSeqDense_B_G89): BSIZE = 5 # -- MPI Dense + Block -- + class TestMatMPIDense_B(BaseTestMatDense_B): COMM = PETSc.COMM_WORLD TYPE = PETSc.Mat.Type.MPIDENSE + + # bs = 1 class TestMatMPIDense_B_G23(TestMatMPIDense_B): - GRID = 2, 3 + GRID = 2, 3 + + class TestMatMPIDense_B_G45(TestMatMPIDense_B): - GRID = 4, 5 + GRID = 4, 5 + + class TestMatMPIDense_B_G77(TestMatMPIDense_B): - GRID = 7, 7 + GRID = 7, 7 + + class TestMatMPIDense_B_G89(TestMatMPIDense_B): - GRID = 8, 9 + GRID = 8, 9 + + # bs = 2 class TestMatMPIDense_B_G23_B2(TestMatMPIDense_B_G23): BSIZE = 2 + + class TestMatMPIDense_B_G45_B2(TestMatMPIDense_B_G45): BSIZE = 2 + + class TestMatMPIDense_B_G77_B2(TestMatMPIDense_B_G77): BSIZE = 2 + + class TestMatMPIDense_B_G89_B2(TestMatMPIDense_B_G89): BSIZE = 2 + + # bs = 3 class TestMatMPIDense_B_G23_B3(TestMatMPIDense_B_G23): BSIZE = 3 + + class TestMatMPIDense_B_G45_B3(TestMatMPIDense_B_G45): BSIZE = 3 + + class TestMatMPIDense_B_G77_B3(TestMatMPIDense_B_G77): BSIZE = 3 + + class TestMatMPIDense_B_G89_B3(TestMatMPIDense_B_G89): BSIZE = 3 + + # bs = 4 class TestMatMPIDense_B_G23_B4(TestMatMPIDense_B_G23): BSIZE = 4 + + class TestMatMPIDense_B_G45_B4(TestMatMPIDense_B_G45): BSIZE = 4 + + class TestMatMPIDense_B_G77_B4(TestMatMPIDense_B_G77): BSIZE = 4 + + class TestMatMPIDense_B_G89_B4(TestMatMPIDense_B_G89): BSIZE = 4 + + # bs = 5 class TestMatMPIDense_B_G23_B5(TestMatMPIDense_B_G23): BSIZE = 5 + + class TestMatMPIDense_B_G45_B5(TestMatMPIDense_B_G45): BSIZE = 5 + + class TestMatMPIDense_B_G77_B5(TestMatMPIDense_B_G77): BSIZE = 5 + + class TestMatMPIDense_B_G89_B5(TestMatMPIDense_B_G89): BSIZE = 5 + # ----- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_mat_fact.py b/src/binding/petsc4py/test/test_mat_fact.py index 622ba3e0235..b5ccc0712f4 100644 --- a/src/binding/petsc4py/test/test_mat_fact.py +++ b/src/binding/petsc4py/test/test_mat_fact.py @@ -1,53 +1,54 @@ from petsc4py import PETSc import unittest -import numpy as N def mkmat(n, mtype, opts): A = PETSc.Mat().create(PETSc.COMM_SELF) - A.setSizes([n,n]) + A.setSizes([n, n]) A.setType(mtype) A.setUp() for o in opts: A.setOption(o, True) return A + def mksys_diag(n, mtype, opts): A = mkmat(n, mtype, opts) x, b = A.createVecs() for i in range(n): - A[i,i] = i+1 - x[i] = 1.0/(i+1) - b[i] = 1 + A[i, i] = i + 1 + x[i] = 1.0 / (i + 1) + b[i] = 1 A.assemble() x.assemble() b.assemble() return A, x, b + def mksys_poi2(n, mtype, opts): A = mkmat(n, mtype, opts) x, b = A.createVecs() for i in range(n): if i == 0: - cols = [i, i+1] + cols = [i, i + 1] vals = [2, -1] - elif i == n-1: - cols = [i-1, i] - vals = [-1, 2] + elif i == n - 1: + cols = [i - 1, i] + vals = [-1, 2] else: - cols = [i-1, i, i+1] - vals = [-1, 2, -1] - A[i,cols] = vals - x[i] = i+1 - b[i] = 0 + cols = [i - 1, i, i + 1] + vals = [-1, 2, -1] + A[i, cols] = vals + x[i] = i + 1 + b[i] = 0 A.assemble() x.assemble() b.assemble() - A.mult(x,b) + A.mult(x, b) return A, x, b -class BaseTestMatFactor(object): +class BaseTestMatFactor: MKSYS = None MTYPE = None MOPTS = () @@ -60,32 +61,36 @@ def setUp(self): def tearDown(self): self.A.setUnfactored() - self.A.destroy(); self.A = None - self.x.destroy(); self.x = None - self.b.destroy(); self.b = None + self.A.destroy() + self.A = None + self.x.destroy() + self.x = None + self.b.destroy() + self.b = None PETSc.garbage_cleanup() -class BaseTestMatFactorLU(BaseTestMatFactor): +class BaseTestMatFactorLU(BaseTestMatFactor): def testFactorLU(self): - r, c = self.A.getOrdering("nd") + r, c = self.A.getOrdering('nd') self.A.reorderForNonzeroDiagonal(r, c) - self.A.factorLU(r,c,{'zeropivot':1e-5}) + self.A.factorLU(r, c, {'zeropivot': 1e-5}) x = self.x.duplicate() self.A.solve(self.b, x) x.axpy(-1, self.x) self.assertTrue(x.norm() < 1e-3) -class BaseTestMatFactorILU(BaseTestMatFactor): +class BaseTestMatFactorILU(BaseTestMatFactor): def testFactorILU(self): - r, c = self.A.getOrdering("natural") - self.A.factorILU(r,c,{'levels':0}) + r, c = self.A.getOrdering('natural') + self.A.factorILU(r, c, {'levels': 0}) x = self.x.duplicate() self.A.solve(self.b, x) x.axpy(-1, self.x) self.assertTrue(x.norm() < 1e-3) + ## class BaseTestMatFactorILUDT(BaseTestMatFactor): ## ## def testFactorILUDT(self): @@ -97,19 +102,18 @@ def testFactorILU(self): ## self.assertTrue(x.norm() < 1e-3) ## class BaseTestMatFactorChol(BaseTestMatFactor): - def testFactorChol(self): - r, c = self.A.getOrdering("natural") + r, c = self.A.getOrdering('natural') self.A.factorCholesky(r) x = self.x.duplicate() self.A.solve(self.b, x) x.axpy(-1, self.x) self.assertTrue(x.norm() < 1e-3) -class BaseTestMatFactorICC(BaseTestMatFactor): +class BaseTestMatFactorICC(BaseTestMatFactor): def testFactorICC(self): - r, c = self.A.getOrdering("natural") + r, c = self.A.getOrdering('natural') self.A.factorICC(r) x = self.x.duplicate() self.A.solve(self.b, x) @@ -119,64 +123,68 @@ def testFactorICC(self): # -------------------------------------------------------------------- -class TestMatFactorA1(BaseTestMatFactorLU, - BaseTestMatFactorChol, - unittest.TestCase): + +class TestMatFactorA1(BaseTestMatFactorLU, BaseTestMatFactorChol, unittest.TestCase): MKSYS = staticmethod(mksys_diag) MTYPE = PETSc.Mat.Type.SEQDENSE -class TestMatFactorA2(BaseTestMatFactorLU, - BaseTestMatFactorChol, - unittest.TestCase): + +class TestMatFactorA2(BaseTestMatFactorLU, BaseTestMatFactorChol, unittest.TestCase): MKSYS = staticmethod(mksys_poi2) MTYPE = PETSc.Mat.Type.SEQDENSE + # --- -class TestMatFactorB1(BaseTestMatFactorLU, - BaseTestMatFactorILU, - ## BaseTestMatFactorILUDT, - unittest.TestCase): + +class TestMatFactorB1( + BaseTestMatFactorLU, + BaseTestMatFactorILU, + ## BaseTestMatFactorILUDT, + unittest.TestCase, +): MKSYS = staticmethod(mksys_diag) MTYPE = PETSc.Mat.Type.SEQAIJ -class TestMatFactorB2(BaseTestMatFactorLU, - BaseTestMatFactorILU, - ## BaseTestMatFactorILUDT, - unittest.TestCase): + +class TestMatFactorB2( + BaseTestMatFactorLU, + BaseTestMatFactorILU, + ## BaseTestMatFactorILUDT, + unittest.TestCase, +): MKSYS = staticmethod(mksys_poi2) MTYPE = PETSc.Mat.Type.SEQAIJ + # --- -class TestMatFactorC1(BaseTestMatFactorLU, - BaseTestMatFactorILU, - unittest.TestCase): + +class TestMatFactorC1(BaseTestMatFactorLU, BaseTestMatFactorILU, unittest.TestCase): MKSYS = staticmethod(mksys_diag) MTYPE = PETSc.Mat.Type.SEQBAIJ -class TestMatFactorC2(BaseTestMatFactorLU, - BaseTestMatFactorILU, - unittest.TestCase): + +class TestMatFactorC2(BaseTestMatFactorLU, BaseTestMatFactorILU, unittest.TestCase): MKSYS = staticmethod(mksys_poi2) MTYPE = PETSc.Mat.Type.SEQBAIJ + # --- -class TestMatFactorD1(BaseTestMatFactorChol, - BaseTestMatFactorICC, - unittest.TestCase): + +class TestMatFactorD1(BaseTestMatFactorChol, BaseTestMatFactorICC, unittest.TestCase): MKSYS = staticmethod(mksys_diag) MTYPE = PETSc.Mat.Type.SEQSBAIJ MOPTS = [PETSc.Mat.Option.IGNORE_LOWER_TRIANGULAR] -class TestMatFactorD2(BaseTestMatFactorChol, - BaseTestMatFactorICC, - unittest.TestCase): + +class TestMatFactorD2(BaseTestMatFactorChol, BaseTestMatFactorICC, unittest.TestCase): MKSYS = staticmethod(mksys_poi2) MTYPE = PETSc.Mat.Type.SEQSBAIJ MOPTS = [PETSc.Mat.Option.IGNORE_LOWER_TRIANGULAR] + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_mat_py.py b/src/binding/petsc4py/test/test_mat_py.py index 32232805f45..ca52f2e3776 100644 --- a/src/binding/petsc4py/test/test_mat_py.py +++ b/src/binding/petsc4py/test/test_mat_py.py @@ -1,9 +1,12 @@ from petsc4py import PETSc -import unittest, numpy +import unittest +import numpy from sys import getrefcount # -------------------------------------------------------------------- -class Matrix(object): + +class Matrix: + setupcalled = 0 def __init__(self): pass @@ -14,8 +17,10 @@ def create(self, mat): def destroy(self, mat): pass -class ScaledIdentity(Matrix): + def setUp(self, mat): + self.setupcalled += 1 +class ScaledIdentity(Matrix): s = 2.0 def scale(self, mat, s): @@ -33,8 +38,8 @@ def duplicate(self, mat, op): dctx = ScaledIdentity() dmat.createPython(mat.getSizes(), dctx, comm=mat.getComm()) if op == PETSc.Mat.DuplicateOption.COPY_VALUES: - dctx.s = self.s - dmat.setUp() + dctx.s = self.s + dmat.setUp() return dmat def getDiagonal(self, mat, vd): @@ -45,13 +50,13 @@ def productSetFromOptions(self, mat, producttype, A, B, C): def productSymbolic(self, mat, product, producttype, A, B, C): if producttype == 'AB': - if mat is A: # product = identity * B + if mat is A: # product = identity * B product.setType(B.getType()) product.setSizes(B.getSizes()) product.setUp() product.assemble() B.copy(product) - elif mat is B: # product = A * identity + elif mat is B: # product = A * identity product.setType(A.getType()) product.setSizes(A.getSizes()) product.setUp() @@ -60,13 +65,13 @@ def productSymbolic(self, mat, product, producttype, A, B, C): else: raise RuntimeError('wrong configuration') elif producttype == 'AtB': - if mat is A: # product = identity^T * B + if mat is A: # product = identity^T * B product.setType(B.getType()) product.setSizes(B.getSizes()) product.setUp() product.assemble() B.copy(product) - elif mat is B: # product = A^T * identity + elif mat is B: # product = A^T * identity tmp = PETSc.Mat() A.transpose(tmp) product.setType(tmp.getType()) @@ -77,7 +82,7 @@ def productSymbolic(self, mat, product, producttype, A, B, C): else: raise RuntimeError('wrong configuration') elif producttype == 'ABt': - if mat is A: # product = identity * B^T + if mat is A: # product = identity * B^T tmp = PETSc.Mat() B.transpose(tmp) product.setType(tmp.getType()) @@ -85,7 +90,7 @@ def productSymbolic(self, mat, product, producttype, A, B, C): product.setUp() product.assemble() tmp.copy(product) - elif mat is B: # product = A * identity^T + elif mat is B: # product = A * identity^T product.setType(A.getType()) product.setSizes(A.getSizes()) product.setUp() @@ -94,7 +99,7 @@ def productSymbolic(self, mat, product, producttype, A, B, C): else: raise RuntimeError('wrong configuration') elif producttype == 'PtAP': - if mat is A: # product = P^T * identity * P + if mat is A: # product = P^T * identity * P self.tmp = PETSc.Mat() B.transposeMatMult(B, self.tmp) product.setType(self.tmp.getType()) @@ -102,7 +107,7 @@ def productSymbolic(self, mat, product, producttype, A, B, C): product.setUp() product.assemble() self.tmp.copy(product) - elif mat is B: # product = identity^T * A * identity + elif mat is B: # product = identity^T * A * identity product.setType(A.getType()) product.setSizes(A.getSizes()) product.setUp() @@ -111,7 +116,7 @@ def productSymbolic(self, mat, product, producttype, A, B, C): else: raise RuntimeError('wrong configuration') elif producttype == 'RARt': - if mat is A: # product = R * identity * R^t + if mat is A: # product = R * identity * R^t self.tmp = PETSc.Mat() B.matTransposeMult(B, self.tmp) product.setType(self.tmp.getType()) @@ -119,7 +124,7 @@ def productSymbolic(self, mat, product, producttype, A, B, C): product.setUp() product.assemble() self.tmp.copy(product) - elif mat is B: # product = identity * A * identity^T + elif mat is B: # product = identity * A * identity^T product.setType(A.getType()) product.setSizes(A.getSizes()) product.setUp() @@ -128,7 +133,7 @@ def productSymbolic(self, mat, product, producttype, A, B, C): else: raise RuntimeError('wrong configuration') elif producttype == 'ABC': - if mat is A: # product = identity * B * C + if mat is A: # product = identity * B * C self.tmp = PETSc.Mat() B.matMult(C, self.tmp) product.setType(self.tmp.getType()) @@ -136,7 +141,7 @@ def productSymbolic(self, mat, product, producttype, A, B, C): product.setUp() product.assemble() self.tmp.copy(product) - elif mat is B: # product = A * identity * C + elif mat is B: # product = A * identity * C self.tmp = PETSc.Mat() A.matMult(C, self.tmp) product.setType(self.tmp.getType()) @@ -144,7 +149,7 @@ def productSymbolic(self, mat, product, producttype, A, B, C): product.setUp() product.assemble() self.tmp.copy(product) - elif mat is C: # product = A * B * identity + elif mat is C: # product = A * B * identity self.tmp = PETSc.Mat() A.matMult(B, self.tmp) product.setType(self.tmp.getType()) @@ -155,82 +160,82 @@ def productSymbolic(self, mat, product, producttype, A, B, C): else: raise RuntimeError('wrong configuration') else: - raise RuntimeError('Product {} not implemented'.format(producttype)) + raise RuntimeError(f'Product {producttype} not implemented') product.zeroEntries() def productNumeric(self, mat, product, producttype, A, B, C): if producttype == 'AB': - if mat is A: # product = identity * B + if mat is A: # product = identity * B B.copy(product, structure=True) - elif mat is B: # product = A * identity + elif mat is B: # product = A * identity A.copy(product, structure=True) else: raise RuntimeError('wrong configuration') product.scale(self.s) elif producttype == 'AtB': - if mat is A: # product = identity^T * B + if mat is A: # product = identity^T * B B.copy(product, structure=True) - elif mat is B: # product = A^T * identity + elif mat is B: # product = A^T * identity A.setTransposePrecursor(product) A.transpose(product) else: raise RuntimeError('wrong configuration') product.scale(self.s) elif producttype == 'ABt': - if mat is A: # product = identity * B^T + if mat is A: # product = identity * B^T B.setTransposePrecursor(product) B.transpose(product) - elif mat is B: # product = A * identity^T + elif mat is B: # product = A * identity^T A.copy(product, structure=True) else: raise RuntimeError('wrong configuration') product.scale(self.s) elif producttype == 'PtAP': - if mat is A: # product = P^T * identity * P + if mat is A: # product = P^T * identity * P B.transposeMatMult(B, self.tmp) self.tmp.copy(product, structure=True) product.scale(self.s) - elif mat is B: # product = identity^T * A * identity + elif mat is B: # product = identity^T * A * identity A.copy(product, structure=True) product.scale(self.s**2) else: raise RuntimeError('wrong configuration') elif producttype == 'RARt': - if mat is A: # product = R * identity * R^t + if mat is A: # product = R * identity * R^t B.matTransposeMult(B, self.tmp) self.tmp.copy(product, structure=True) product.scale(self.s) - elif mat is B: # product = identity * A * identity^T + elif mat is B: # product = identity * A * identity^T A.copy(product, structure=True) product.scale(self.s**2) else: raise RuntimeError('wrong configuration') elif producttype == 'ABC': - if mat is A: # product = identity * B * C + if mat is A: # product = identity * B * C B.matMult(C, self.tmp) self.tmp.copy(product, structure=True) - elif mat is B: # product = A * identity * C + elif mat is B: # product = A * identity * C A.matMult(C, self.tmp) self.tmp.copy(product, structure=True) - elif mat is C: # product = A * B * identity + elif mat is C: # product = A * B * identity A.matMult(B, self.tmp) self.tmp.copy(product, structure=True) else: raise RuntimeError('wrong configuration') product.scale(self.s) else: - raise RuntimeError('Product {} not implemented'.format(producttype)) + raise RuntimeError(f'Product {producttype} not implemented') -class Diagonal(Matrix): +class Diagonal(Matrix): def create(self, mat): - super(Diagonal,self).create(mat) + super().create(mat) mat.setUp() self.D = mat.createVecLeft() def destroy(self, mat): self.D.destroy() - super(Diagonal,self).destroy(mat) + super().destroy(mat) def scale(self, mat, a): self.D.scale(a) @@ -250,15 +255,15 @@ def duplicate(self, mat, op): dmat.createPython(mat.getSizes(), dctx, comm=mat.getComm()) dctx.D = self.D.duplicate() if op == PETSc.Mat.DuplicateOption.COPY_VALUES: - self.D.copy(dctx.D) - dmat.setUp() + self.D.copy(dctx.D) + dmat.setUp() return dmat def getDiagonal(self, mat, vd): self.D.copy(vd) def setDiagonal(self, mat, vd, im): - if isinstance (im, bool): + if isinstance(im, bool): addv = im if addv: self.D.axpy(1, vd) @@ -269,19 +274,23 @@ def setDiagonal(self, mat, vd, im): elif im == PETSc.InsertMode.ADD_VALUES: self.D.axpy(1, vd) else: - raise ValueError('wrong InsertMode %d'% im) + raise ValueError('wrong InsertMode %d' % im) def diagonalScale(self, mat, vl, vr): - if vl: self.D.pointwiseMult(self.D, vl) - if vr: self.D.pointwiseMult(self.D, vr) + if vl: + self.D.pointwiseMult(self.D, vl) + if vr: + self.D.pointwiseMult(self.D, vr) + # -------------------------------------------------------------------- -class TestMatrix(unittest.TestCase): +class TestMatrix(unittest.TestCase): COMM = PETSc.COMM_WORLD PYMOD = __name__ PYCLS = 'Matrix' + CREATE_WITH_NONE = False def _getCtx(self): return self.A.getPythonContext() @@ -289,20 +298,23 @@ def _getCtx(self): def setUp(self): N = self.N = 13 self.A = PETSc.Mat() - if 0: # command line way + if 0: # command line way self.A.create(self.COMM) - self.A.setSizes([N,N]) + self.A.setSizes([N, N]) self.A.setType('python') OptDB = PETSc.Options(self.A) - OptDB['mat_python_type'] = '%s.%s' % (self.PYMOD,self.PYCLS) + OptDB['mat_python_type'] = f'{self.PYMOD}.{self.PYCLS}' self.A.setFromOptions() - self.A.setUp() del OptDB['mat_python_type'] self.assertTrue(self._getCtx() is not None) - else: # python way + else: # python way context = globals()[self.PYCLS]() - self.A.createPython([N,N], context, comm=self.COMM) - self.A.setUp() + if self.CREATE_WITH_NONE: # test passing None as context + self.A.createPython([N, N], None, comm=self.COMM) + self.A.setPythonContext(context) + self.A.setUp() + else: + self.A.createPython([N, N], context, comm=self.COMM) self.assertTrue(self._getCtx() is context) self.assertEqual(getrefcount(context), 3) del context @@ -311,49 +323,57 @@ def setUp(self): def tearDown(self): ctx = self.A.getPythonContext() self.assertEqual(getrefcount(ctx), 3) - self.A.destroy() # XXX + self.A.destroy() # XXX self.A = None PETSc.garbage_cleanup() self.assertEqual(getrefcount(ctx), 2) - #import gc,pprint; pprint.pprint(gc.get_referrers(ctx)) def testBasic(self): ctx = self.A.getPythonContext() self.assertTrue(self._getCtx() is ctx) self.assertEqual(getrefcount(ctx), 3) + def testSetUp(self): + ctx = self.A.getPythonContext() + setupcalled = ctx.setupcalled + self.A.setUp() + self.assertEqual(setupcalled, ctx.setupcalled) + self.A.setPythonContext(ctx) + self.A.setUp() + self.assertEqual(setupcalled + 1, ctx.setupcalled) + def testZeroEntries(self): - f = lambda : self.A.zeroEntries() + f = lambda: self.A.zeroEntries() self.assertRaises(Exception, f) def testMult(self): x, y = self.A.createVecs() - f = lambda : self.A.mult(x, y) + f = lambda: self.A.mult(x, y) self.assertRaises(Exception, f) def testMultTranspose(self): x, y = self.A.createVecs() - f = lambda : self.A.multTranspose(x, y) + f = lambda: self.A.multTranspose(x, y) self.assertRaises(Exception, f) def testGetDiagonal(self): d = self.A.createVecLeft() - f = lambda : self.A.getDiagonal(d) + f = lambda: self.A.getDiagonal(d) self.assertRaises(Exception, f) def testSetDiagonal(self): d = self.A.createVecLeft() - f = lambda : self.A.setDiagonal(d) + f = lambda: self.A.setDiagonal(d) self.assertRaises(Exception, f) def testDiagonalScale(self): x, y = self.A.createVecs() - f = lambda : self.A.diagonalScale(x, y) + f = lambda: self.A.diagonalScale(x, y) self.assertRaises(Exception, f) def testDuplicate(self): - f1 = lambda : self.A.duplicate(x, True) - f2 = lambda : self.A.duplicate(x, False) + f1 = lambda: self.A.duplicate(True) + f2 = lambda: self.A.duplicate(False) self.assertRaises(Exception, f1) self.assertRaises(Exception, f2) @@ -362,7 +382,7 @@ def testSetVecType(self): self.assertTrue('mpi' == self.A.getVecType()) def testH2Opus(self): - if not PETSc.Sys.hasExternalPackage("h2opus"): + if not PETSc.Sys.hasExternalPackage('h2opus'): return if self.A.getComm().Get_size() > 1: return @@ -370,67 +390,68 @@ def testH2Opus(self): # need matrix vector and its transpose for norm estimation AA = self.A.getPythonContext() - if not hasattr(AA,'mult'): + if not hasattr(AA, 'mult'): return AA.multTranspose = AA.mult # without coordinates - h.createH2OpusFromMat(self.A,leafsize=2) + h.createH2OpusFromMat(self.A, leafsize=2) h.assemble() h.destroy() # with coordinates - coords = numpy.linspace((1,2,3),(10,20,30),self.A.getSize()[0],dtype=PETSc.RealType) - h.createH2OpusFromMat(self.A,coords,leafsize=2) + coords = numpy.linspace( + (1, 2, 3), (10, 20, 30), self.A.getSize()[0], dtype=PETSc.RealType + ) + h.createH2OpusFromMat(self.A, coords, leafsize=2) h.assemble() # test API h.H2OpusOrthogonalize() - h.H2OpusCompress(1.e-1) + h.H2OpusCompress(1.0e-1) # Low-rank update U = PETSc.Mat() - U.createDense([h.getSizes()[0],3],comm=h.getComm()) + U.createDense([h.getSizes()[0], 3], comm=h.getComm()) U.setUp() U.setRandom() he = PETSc.Mat() - h.convert('dense',he) + h.convert('dense', he) he.axpy(1.0, U.matTransposeMult(U)) h.H2OpusLowRankUpdate(U) self.assertTrue(he.equal(h)) - h.destroy() del AA.multTranspose def testGetType(self): ctx = self.A.getPythonContext() - pytype = "{0}.{1}".format(ctx.__module__, type(ctx).__name__) + pytype = f'{ctx.__module__}.{type(ctx).__name__}' self.assertTrue(self.A.getPythonType() == pytype) -class TestScaledIdentity(TestMatrix): +class TestScaledIdentity(TestMatrix): PYCLS = 'ScaledIdentity' def testMult(self): s = self._getCtx().s x, y = self.A.createVecs() x.setRandom() - self.A.mult(x,y) - self.assertTrue(y.equal(s*x)) + self.A.mult(x, y) + self.assertTrue(y.equal(s * x)) def testMultTransposeSymmKnown(self): s = self._getCtx().s x, y = self.A.createVecs() x.setRandom() self.A.setOption(PETSc.Mat.Option.SYMMETRIC, True) - self.A.multTranspose(x,y) - self.assertTrue(y.equal(s*x)) + self.A.multTranspose(x, y) + self.assertTrue(y.equal(s * x)) self.A.setOption(PETSc.Mat.Option.SYMMETRIC, False) - f = lambda : self.A.multTranspose(x, y) + f = lambda: self.A.multTranspose(x, y) self.assertRaises(Exception, f) def testMultTransposeNewMeth(self): @@ -439,9 +460,9 @@ def testMultTransposeNewMeth(self): x.setRandom() AA = self.A.getPythonContext() AA.multTranspose = AA.mult - self.A.multTranspose(x,y) + self.A.multTranspose(x, y) del AA.multTranspose - self.assertTrue(y.equal(s*x)) + self.assertTrue(y.equal(s * x)) def testGetDiagonal(self): s = self._getCtx().s @@ -471,28 +492,34 @@ def testMatMat(self): B.setType(PETSc.Mat.Type.AIJ) B.setPreallocationNNZ(None) B.setRandom(R) - I = PETSc.Mat().create(self.COMM) - I.setSizes(self.A.getSizes()) - I.setType(PETSc.Mat.Type.AIJ) - I.setUp() - I.assemble() - I.shift(s) - - self.assertTrue(self.A.matMult(A).equal(I.matMult(A))) - self.assertTrue(A.matMult(self.A).equal(A.matMult(I))) + Id = PETSc.Mat().create(self.COMM) + Id.setSizes(self.A.getSizes()) + Id.setType(PETSc.Mat.Type.AIJ) + Id.setUp() + Id.assemble() + Id.shift(s) + + self.assertTrue(self.A.matMult(A).equal(Id.matMult(A))) + self.assertTrue(A.matMult(self.A).equal(A.matMult(Id))) if self.A.getComm().Get_size() == 1: - self.assertTrue(self.A.matTransposeMult(A).equal(I.matTransposeMult(A))) - self.assertTrue(A.matTransposeMult(self.A).equal(A.matTransposeMult(I))) - self.assertTrue(self.A.transposeMatMult(A).equal(I.transposeMatMult(A))) - self.assertTrue(A.transposeMatMult(self.A).equal(A.transposeMatMult(I))) - self.assertAlmostEqual((self.A.ptap(A) - I.ptap(A)).norm(), 0.0, places=5) - self.assertAlmostEqual((A.ptap(self.A) - A.ptap(I)).norm(), 0.0, places=5) + self.assertTrue(self.A.matTransposeMult(A).equal(Id.matTransposeMult(A))) + self.assertTrue(A.matTransposeMult(self.A).equal(A.matTransposeMult(Id))) + self.assertTrue(self.A.transposeMatMult(A).equal(Id.transposeMatMult(A))) + self.assertTrue(A.transposeMatMult(self.A).equal(A.transposeMatMult(Id))) + self.assertAlmostEqual((self.A.ptap(A) - Id.ptap(A)).norm(), 0.0, places=5) + self.assertAlmostEqual((A.ptap(self.A) - A.ptap(Id)).norm(), 0.0, places=5) if self.A.getComm().Get_size() == 1: - self.assertAlmostEqual((self.A.rart(A) - I.rart(A)).norm(), 0.0, places=5) - self.assertAlmostEqual((A.rart(self.A) - A.rart(I)).norm(), 0.0, places=5) - self.assertAlmostEqual((self.A.matMatMult(A,B)-I.matMatMult(A,B)).norm(), 0.0, places=5) - self.assertAlmostEqual((A.matMatMult(self.A,B)-A.matMatMult(I,B)).norm(), 0.0, places=5) - self.assertAlmostEqual((A.matMatMult(B,self.A)-A.matMatMult(B,I)).norm(), 0.0, places=5) + self.assertAlmostEqual((self.A.rart(A) - Id.rart(A)).norm(), 0.0, places=5) + self.assertAlmostEqual((A.rart(self.A) - A.rart(Id)).norm(), 0.0, places=5) + self.assertAlmostEqual( + (self.A.matMatMult(A, B) - Id.matMatMult(A, B)).norm(), 0.0, places=5 + ) + self.assertAlmostEqual( + (A.matMatMult(self.A, B) - A.matMatMult(Id, B)).norm(), 0.0, places=5 + ) + self.assertAlmostEqual( + (A.matMatMult(B, self.A) - A.matMatMult(B, Id)).norm(), 0.0, places=5 + ) def testShift(self): sold = self._getCtx().s @@ -508,19 +535,22 @@ def testScale(self): def testDiagonalMat(self): s = self._getCtx().s - B = PETSc.Mat().createConstantDiagonal(self.A.getSizes(), s, comm=self.A.getComm()) + B = PETSc.Mat().createConstantDiagonal( + self.A.getSizes(), s, comm=self.A.getComm() + ) self.assertTrue(self.A.equal(B)) -class TestDiagonal(TestMatrix): +class TestDiagonal(TestMatrix): PYCLS = 'Diagonal' + CREATE_WITH_NONE = True def setUp(self): - super(TestDiagonal, self).setUp() + super().setUp() D = self.A.createVecLeft() s, e = D.getOwnershipRange() for i in range(s, e): - D[i] = i+1 + D[i] = i + 1 D.assemble() self.A.setDiagonal(D) @@ -532,17 +562,17 @@ def testZeroEntries(self): def testMult(self): x, y = self.A.createVecs() x.set(1) - self.A.mult(x,y) + self.A.mult(x, y) self.assertTrue(y.equal(self._getCtx().D)) def testMultTransposeSymmKnown(self): x, y = self.A.createVecs() x.set(1) self.A.setOption(PETSc.Mat.Option.SYMMETRIC, True) - self.A.multTranspose(x,y) + self.A.multTranspose(x, y) self.assertTrue(y.equal(self._getCtx().D)) self.A.setOption(PETSc.Mat.Option.SYMMETRIC, False) - f = lambda : self.A.multTranspose(x, y) + f = lambda: self.A.multTranspose(x, y) self.assertRaises(Exception, f) def testMultTransposeNewMeth(self): @@ -550,7 +580,7 @@ def testMultTransposeNewMeth(self): x.set(1) AA = self.A.getPythonContext() AA.multTranspose = AA.mult - self.A.multTranspose(x,y) + self.A.multTranspose(x, y) del AA.multTranspose self.assertTrue(y.equal(self._getCtx().D)) @@ -577,7 +607,7 @@ def testDiagonalScale(self): old = self._getCtx().D.copy() self.A.diagonalScale(x, y) D = self._getCtx().D - self.assertTrue(D.equal(old*6)) + self.assertTrue(D.equal(old * 6)) def testCreateTranspose(self): A = self.A @@ -600,22 +630,22 @@ def testCreateTranspose(self): del A def testConvert(self): - self.assertTrue(self.A.convert(PETSc.Mat.Type.AIJ,PETSc.Mat()).equal(self.A)) - self.assertTrue(self.A.convert(PETSc.Mat.Type.BAIJ,PETSc.Mat()).equal(self.A)) - self.assertTrue(self.A.convert(PETSc.Mat.Type.SBAIJ,PETSc.Mat()).equal(self.A)) - self.assertTrue(self.A.convert(PETSc.Mat.Type.DENSE,PETSc.Mat()).equal(self.A)) + self.assertTrue(self.A.convert(PETSc.Mat.Type.AIJ, PETSc.Mat()).equal(self.A)) + self.assertTrue(self.A.convert(PETSc.Mat.Type.BAIJ, PETSc.Mat()).equal(self.A)) + self.assertTrue(self.A.convert(PETSc.Mat.Type.SBAIJ, PETSc.Mat()).equal(self.A)) + self.assertTrue(self.A.convert(PETSc.Mat.Type.DENSE, PETSc.Mat()).equal(self.A)) def testShift(self): old = self._getCtx().D.copy() self.A.shift(-0.5) D = self._getCtx().D - self.assertTrue(D.equal(old-0.5)) + self.assertTrue(D.equal(old - 0.5)) def testScale(self): old = self._getCtx().D.copy() self.A.scale(-0.5) D = self._getCtx().D - self.assertTrue(D.equal(-0.5*old)) + self.assertTrue(D.equal(-0.5 * old)) def testDiagonalMat(self): D = self._getCtx().D.copy() diff --git a/src/binding/petsc4py/test/test_nsp.py b/src/binding/petsc4py/test/test_nsp.py index 298f7075d6e..d5c478a8118 100644 --- a/src/binding/petsc4py/test/test_nsp.py +++ b/src/binding/petsc4py/test/test_nsp.py @@ -1,24 +1,25 @@ import unittest from petsc4py import PETSc -import numpy as N from sys import getrefcount # -------------------------------------------------------------------- + def allclose(seq1, seq2): for v1, v2 in zip(seq1, seq2): - if abs(v1-v2) > 1e-5: + if abs(v1 - v2) > 1e-5: return False return True class TestNullSpace(unittest.TestCase): - def setUp(self): u1 = PETSc.Vec().createSeq(3) u2 = PETSc.Vec().createSeq(3) - u1[0], u1[1], u1[2] = [1, 2, 0]; u1.normalize() - u2[0], u2[1], u2[2] = [2, -1, 0]; u2.normalize() + u1[0], u1[1], u1[2] = [1, 2, 0] + u1.normalize() + u2[0], u2[1], u2[2] = [2, -1, 0] + u2.normalize() basis = [u1, u2] nullsp = PETSc.NullSpace().create(False, basis, comm=PETSc.COMM_SELF) self.basis = basis @@ -30,17 +31,16 @@ def tearDown(self): PETSc.garbage_cleanup() def _remove(self): - v = PETSc.Vec().createSeq(3); - v[0], v[1], v[2] = [7, 8, 9] + v = PETSc.Vec().createSeq(3) + v[0], v[1], v[2] = [7, 8, 9] w = v.copy() self.nullsp.remove(w) return (v, w) def testRemove(self): v, w = self._remove() - tols = (0, 1e-5) - self.assertTrue(allclose(v.array, [7, 8, 9])) - self.assertTrue(allclose(w.array, [0, 0, 9])) + self.assertTrue(allclose(v.array, [7, 8, 9])) + self.assertTrue(allclose(w.array, [0, 0, 9])) del v, w def testRemoveInplace(self): @@ -51,31 +51,34 @@ def testRemoveInplace(self): def testRemoveWithFunction(self): def myremove(nsp, vec): - vec.setArray([1,2,3]) + vec.setArray([1, 2, 3]) + self.nullsp.setFunction(myremove) v, w = self._remove() - self.assertTrue(allclose(v.array, [7, 8, 9])) - self.assertTrue(allclose(w.array, [1, 2, 3])) + self.assertTrue(allclose(v.array, [7, 8, 9])) + self.assertTrue(allclose(w.array, [1, 2, 3])) self.nullsp.remove(v) - self.assertTrue(allclose(v.array, [1, 2, 3])) + self.assertTrue(allclose(v.array, [1, 2, 3])) self.nullsp.setFunction(None) self.testRemove() def testGetSetFunction(self): def rem(nsp, vec): vec.set(0) + self.nullsp.setFunction(rem) - self.assertEqual(getrefcount(rem)-1, 2) + self.assertEqual(getrefcount(rem) - 1, 2) dct = self.nullsp.getDict() self.assertTrue(dct is not None) - self.assertEqual(getrefcount(dct)-1, 2) + self.assertEqual(getrefcount(dct) - 1, 2) fun, a, kw = dct['__function__'] self.assertTrue(fun is rem) self.nullsp.setFunction(None) fun = dct.get('__function__') - self.assertEqual(getrefcount(rem)-1, 1) + self.assertEqual(getrefcount(rem) - 1, 1) self.assertTrue(fun is None) + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_object.py b/src/binding/petsc4py/test/test_object.py index 7883fca1acd..8da95db33a0 100644 --- a/src/binding/petsc4py/test/test_object.py +++ b/src/binding/petsc4py/test/test_object.py @@ -1,18 +1,21 @@ import unittest from petsc4py import PETSc +import numpy # -------------------------------------------------------------------- -class BaseTestObject(object): +class BaseTestObject: CLASS, FACTORY = None, None TARGS, KARGS = (), {} BUILD = None + def setUp(self): self.obj = self.CLASS() - getattr(self.obj,self.FACTORY)(*self.TARGS, **self.KARGS) - if not self.obj: self.obj.create() + getattr(self.obj, self.FACTORY)(*self.TARGS, **self.KARGS) + if not self.obj: + self.obj.create() def tearDown(self): self.obj = None @@ -24,11 +27,12 @@ def testTypeRegistry(self): typeobj = self.CLASS if isinstance(self.obj, PETSc.DMDA): typeobj = PETSc.DM - self.assertTrue(type_reg[classid] is typeobj ) + self.assertTrue(type_reg[classid] is typeobj) def testLogClass(self): name = self.CLASS.__name__ - if name == 'DMDA': name = 'DM' + if name == 'DMDA': + name = 'DM' logcls = PETSc.Log.Class(name) classid = self.obj.getClassId() self.assertEqual(logcls.id, classid) @@ -55,17 +59,15 @@ def testOptions(self): prefix2 = 'opt_' self.obj.setOptionsPrefix(prefix2) self.assertEqual(self.obj.getOptionsPrefix(), prefix2) - ## self.obj.appendOptionsPrefix(prefix1) - ## self.assertEqual(self.obj.getOptionsPrefix(), - ## prefix2 + prefix1) - ## self.obj.prependOptionsPrefix(prefix1) - ## self.assertEqual(self.obj.getOptionsPrefix(), - ## prefix1 + prefix2 + prefix1) + self.obj.appendOptionsPrefix(prefix1) + self.assertEqual(self.obj.getOptionsPrefix(), prefix2 + prefix1) + self.obj.setOptionsPrefix(None) + self.assertEqual(self.obj.getOptionsPrefix(), None) self.obj.setFromOptions() def testName(self): oldname = self.obj.getName() - newname = '%s-%s' %(oldname, oldname) + newname = f'{oldname}-{oldname}' self.obj.setName(newname) self.assertEqual(self.obj.getName(), newname) self.obj.setName(oldname) @@ -93,7 +95,7 @@ def testHandle(self): self.assertTrue(self.obj.handle) self.assertTrue(self.obj.fortran) h, f = self.obj.handle, self.obj.fortran - if (h>0 and f>0) or (h<0 and f<0): + if (h > 0 and f > 0) or (h < 0 and f < 0): self.assertEqual(h, f) self.obj.destroy() self.assertFalse(self.obj.handle) @@ -101,6 +103,7 @@ def testHandle(self): def testComposeQuery(self): import copy + try: myobj = copy.deepcopy(self.obj) except NotImplementedError: @@ -116,26 +119,28 @@ def testComposeQuery(self): myobj.destroy() def testProperties(self): - self.assertEqual(self.obj.getClassId(), self.obj.classid) + self.assertEqual(self.obj.getClassId(), self.obj.classid) self.assertEqual(self.obj.getClassName(), self.obj.klass) - self.assertEqual(self.obj.getType(), self.obj.type) - self.assertEqual(self.obj.getName(), self.obj.name) - self.assertEqual(self.obj.getComm(), self.obj.comm) - self.assertEqual(self.obj.getRefCount(), self.obj.refcount) + self.assertEqual(self.obj.getType(), self.obj.type) + self.assertEqual(self.obj.getName(), self.obj.name) + self.assertEqual(self.obj.getComm(), self.obj.comm) + self.assertEqual(self.obj.getRefCount(), self.obj.refcount) def testShallowCopy(self): import copy + rc = self.obj.getRefCount() obj = copy.copy(self.obj) self.assertTrue(obj is not self.obj) self.assertTrue(obj == self.obj) - self.assertTrue(type(obj) is type(self.obj)) - self.assertEqual(obj.getRefCount(), rc+1) + self.assertTrue(isinstance(obj, type(self.obj))) + self.assertEqual(obj.getRefCount(), rc + 1) del obj self.assertEqual(self.obj.getRefCount(), rc) def testDeepCopy(self): import copy + rc = self.obj.getRefCount() try: obj = copy.deepcopy(self.obj) @@ -143,7 +148,7 @@ def testDeepCopy(self): return self.assertTrue(obj is not self.obj) self.assertTrue(obj != self.obj) - self.assertTrue(type(obj) is type(self.obj)) + self.assertTrue(isinstance(obj, type(self.obj))) self.assertEqual(self.obj.getRefCount(), rc) self.assertEqual(obj.getRefCount(), 1) del obj @@ -160,100 +165,120 @@ def testStateInspection(self): # -------------------------------------------------------------------- + class TestObjectRandom(BaseTestObject, unittest.TestCase): CLASS = PETSc.Random FACTORY = 'create' + class TestObjectViewer(BaseTestObject, unittest.TestCase): CLASS = PETSc.Viewer FACTORY = 'create' + class TestObjectIS(BaseTestObject, unittest.TestCase): - CLASS = PETSc.IS + CLASS = PETSc.IS FACTORY = 'createGeneral' TARGS = ([],) + class TestObjectLGMap(BaseTestObject, unittest.TestCase): CLASS = PETSc.LGMap FACTORY = 'create' TARGS = ([],) + class TestObjectAO(BaseTestObject, unittest.TestCase): - CLASS = PETSc.AO + CLASS = PETSc.AO FACTORY = 'createMapping' TARGS = ([], []) + class TestObjectDMDA(BaseTestObject, unittest.TestCase): - CLASS = PETSc.DMDA + CLASS = PETSc.DMDA FACTORY = 'create' - TARGS = ([3,3,3],) + TARGS = ([3, 3, 3],) + class TestObjectDS(BaseTestObject, unittest.TestCase): - CLASS = PETSc.DS + CLASS = PETSc.DS FACTORY = 'create' + class TestObjectVec(BaseTestObject, unittest.TestCase): - CLASS = PETSc.Vec + CLASS = PETSc.Vec FACTORY = 'createSeq' - TARGS = (0,) + TARGS = (0,) def setUp(self): BaseTestObject.setUp(self) self.obj.assemble() + class TestObjectMat(BaseTestObject, unittest.TestCase): - CLASS = PETSc.Mat + CLASS = PETSc.Mat FACTORY = 'createAIJ' TARGS = (0,) - KARGS = {'nnz':0, 'comm': PETSc.COMM_SELF} + KARGS = {'nnz': 0, 'comm': PETSc.COMM_SELF} def setUp(self): BaseTestObject.setUp(self) self.obj.assemble() + class TestObjectMatPartitioning(BaseTestObject, unittest.TestCase): - CLASS = PETSc.MatPartitioning + CLASS = PETSc.MatPartitioning FACTORY = 'create' + class TestObjectNullSpace(BaseTestObject, unittest.TestCase): - CLASS = PETSc.NullSpace + CLASS = PETSc.NullSpace FACTORY = 'create' TARGS = (True, []) + class TestObjectKSP(BaseTestObject, unittest.TestCase): CLASS = PETSc.KSP FACTORY = 'create' + class TestObjectPC(BaseTestObject, unittest.TestCase): CLASS = PETSc.PC FACTORY = 'create' + class TestObjectSNES(BaseTestObject, unittest.TestCase): CLASS = PETSc.SNES FACTORY = 'create' + class TestObjectTS(BaseTestObject, unittest.TestCase): - CLASS = PETSc.TS + CLASS = PETSc.TS FACTORY = 'create' + def setUp(self): - super(TestObjectTS, self).setUp() + super().setUp() self.obj.setProblemType(PETSc.TS.ProblemType.NONLINEAR) self.obj.setType(PETSc.TS.Type.BEULER) + class TestObjectTAO(BaseTestObject, unittest.TestCase): - CLASS = PETSc.TAO + CLASS = PETSc.TAO FACTORY = 'create' + class TestObjectAOBasic(BaseTestObject, unittest.TestCase): - CLASS = PETSc.AO + CLASS = PETSc.AO FACTORY = 'createBasic' TARGS = ([], []) + class TestObjectAOMapping(BaseTestObject, unittest.TestCase): - CLASS = PETSc.AO + CLASS = PETSc.AO FACTORY = 'createMapping' TARGS = ([], []) + # class TestObjectFE(BaseTestObject, unittest.TestCase): # CLASS = PETSc.FE # FACTORY = 'create' @@ -262,22 +287,24 @@ class TestObjectAOMapping(BaseTestObject, unittest.TestCase): # CLASS = PETSc.Quad # FACTORY = 'create' + class TestObjectDMLabel(BaseTestObject, unittest.TestCase): - CLASS = PETSc.DMLabel + CLASS = PETSc.DMLabel FACTORY = 'create' - TARGS = ("test",) + TARGS = ('test',) + class TestObjectSpace(BaseTestObject, unittest.TestCase): - CLASS = PETSc.Space + CLASS = PETSc.Space FACTORY = 'create' + class TestObjectDualSpace(BaseTestObject, unittest.TestCase): - CLASS = PETSc.DualSpace + CLASS = PETSc.DualSpace FACTORY = 'create' -# -------------------------------------------------------------------- -import numpy +# -------------------------------------------------------------------- if numpy.iscomplexobj(PETSc.ScalarType()): del TestObjectTAO diff --git a/src/binding/petsc4py/test/test_optdb.py b/src/binding/petsc4py/test/test_optdb.py index 0b5167479cc..189841746d0 100644 --- a/src/binding/petsc4py/test/test_optdb.py +++ b/src/binding/petsc4py/test/test_optdb.py @@ -1,32 +1,35 @@ import unittest from petsc4py import PETSc from sys import getrefcount +import numpy as np # -------------------------------------------------------------------- -class TestOptions(unittest.TestCase): - PREFIX = 'myopts-' - OPTLIST = [('bool', True), - ('int', -7), - ('real', 5), - ('scalar', 3), - ('string', 'petsc4py'), - ] +class TestOptions(unittest.TestCase): + PREFIX = 'myopts-' + OPTLIST = [ + ('bool', True), + ('int', -7), + ('real', 5), + ('scalar', 3), + ('string', 'petsc4py'), + ] def _putopts(self, opts=None, OPTLIST=None): if opts is None: opts = self.opts if OPTLIST is None: OPTLIST = self.OPTLIST - for k,v in OPTLIST: + for k, v in OPTLIST: opts[k] = v + def _delopts(self, opts=None, OPTLIST=None): if opts is None: opts = self.opts if OPTLIST is None: OPTLIST = self.OPTLIST - for k,v in OPTLIST: + for k, _ in OPTLIST: del opts[k] def setUp(self): @@ -38,10 +41,10 @@ def tearDown(self): def testHasOpts(self): self._putopts() - for k, v in self.OPTLIST: + for k, _ in self.OPTLIST: self.assertTrue(self.opts.hasName(k)) self.assertTrue(k in self.opts) - missing = k+'-missing' + missing = k + '-missing' self.assertFalse(self.opts.hasName(missing)) self.assertFalse(missing in self.opts) self._delopts() @@ -49,47 +52,119 @@ def testHasOpts(self): def testGetOpts(self): self._putopts() for k, v in self.OPTLIST: - getopt = getattr(self.opts, 'get'+k.title()) + getopt = getattr(self.opts, 'get' + k.title()) self.assertEqual(getopt(k), v) self._delopts() def testGetAll(self): self._putopts() allopts = self.opts.getAll() - self.assertTrue(type(allopts) is dict) - optlist = [(k, str(v).lower()) - for (k,v) in self.OPTLIST] - for k,v in allopts.items(): + self.assertTrue(isinstance(allopts, dict)) + optlist = [(k, str(v).lower()) for (k, v) in self.OPTLIST] + for k, v in allopts.items(): self.assertTrue((k, v) in optlist) self._delopts() def testGetAllQuoted(self): - dct = {'o0' : '"0 1 2"', - 'o1' : '"a b c"', - 'o2' : '"x y z"',} + dct = { + 'o0': '"0 1 2"', + 'o1': '"a b c"', + 'o2': '"x y z"', + } for k in dct: - self.opts[k] = dct[k] + self.opts[k] = dct[k] allopts = self.opts.getAll() for k in dct: self.assertEqual(allopts[k], dct[k][1:-1]) del self.opts[k] + def testType(self): + types = [ + (bool, bool, self.opts.getBool, self.opts.getBoolArray), + (int, PETSc.IntType, self.opts.getInt, self.opts.getIntArray), + (float, PETSc.RealType, self.opts.getReal, self.opts.getRealArray), + ] + if PETSc.ScalarType is PETSc.ComplexType: + types.append( + ( + complex, + PETSc.ScalarType, + self.opts.getScalar, + self.opts.getScalarArray, + ) + ) + else: + types.append( + ( + float, + PETSc.ScalarType, + self.opts.getScalar, + self.opts.getScalarArray, + ) + ) + toval = (lambda x: x, lambda x: np.array(x).tolist(), lambda x: np.array(x)) + sv = 1 + av = (1, 0, 1) + defv = 0 + defarrayv = (0, 0, 1, 0) + for pyt, pat, pget, pgetarray in types: + for tov in toval: + self.opts.setValue('sv', tov(sv)) + self.opts.setValue('av', tov(av)) + + v = pget('sv') + self.assertTrue(isinstance(v, pyt)) + self.assertEqual(v, pyt(sv)) + + v = pget('sv', defv) + self.assertTrue(isinstance(v, pyt)) + self.assertEqual(v, pyt(sv)) + + v = pget('missing', defv) + self.assertTrue(isinstance(v, pyt)) + self.assertEqual(v, pyt(defv)) + + if pgetarray is not None: + arrayv = pgetarray('av') + self.assertEqual(arrayv.dtype, pat) + self.assertEqual(len(arrayv), len(av)) + for v1, v2 in zip(arrayv, av): + self.assertTrue(isinstance(v1.item(), pyt)) + self.assertEqual(v1.item(), pyt(v2)) + + arrayv = pgetarray('av', defarrayv) + self.assertEqual(arrayv.dtype, pat) + self.assertEqual(len(arrayv), len(av)) + for v1, v2 in zip(arrayv, av): + self.assertTrue(isinstance(v1.item(), pyt)) + self.assertEqual(v1.item(), pyt(v2)) + + arrayv = pgetarray('missing', defarrayv) + self.assertEqual(arrayv.dtype, pat) + self.assertEqual(len(arrayv), len(defarrayv)) + for v1, v2 in zip(arrayv, defarrayv): + self.assertTrue(isinstance(v1.item(), pyt)) + self.assertEqual(v1.item(), pyt(v2)) + + self.opts.delValue('sv') + self.opts.delValue('av') + def testMonitor(self): optlist = [] - mon = lambda n,v: optlist.append((n,v)) + mon = lambda n, v: optlist.append((n, v)) self.opts.setMonitor(mon) - self.assertEqual(getrefcount(mon)-1, 2) + self.assertEqual(getrefcount(mon) - 1, 2) self._putopts() - target = [(self.PREFIX+k, str(v).lower()) - for k, v in self.OPTLIST] + target = [(self.PREFIX + k, str(v).lower()) for k, v in self.OPTLIST] self.assertEqual(optlist, target) self.opts.cancelMonitor() - self.assertEqual(getrefcount(mon)-1, 1) + self.assertEqual(getrefcount(mon) - 1, 1) self._delopts() + # -------------------------------------------------------------------- -del TestOptions.testMonitor # XXX +del TestOptions.testMonitor # XXX if __name__ == '__main__': unittest.main() diff --git a/src/binding/petsc4py/test/test_pc_py.py b/src/binding/petsc4py/test/test_pc_py.py index ab72e093332..755988f239b 100644 --- a/src/binding/petsc4py/test/test_pc_py.py +++ b/src/binding/petsc4py/test/test_pc_py.py @@ -6,128 +6,132 @@ # -------------------------------------------------------------------- -class BaseMyPC(object): + +class BaseMyPC: def setup(self, pc): pass + def reset(self, pc): pass + def apply(self, pc, x, y): raise NotImplementedError + def applyT(self, pc, x, y): self.apply(pc, x, y) + def applyS(self, pc, x, y): self.apply(pc, x, y) + def applySL(self, pc, x, y): self.applyS(pc, x, y) + def applySR(self, pc, x, y): self.applyS(pc, x, y) + def applyRich(self, pc, x, y, w, tols): self.apply(pc, x, y) + def applyM(self, pc, x, y): raise NotImplementedError + class MyPCNone(BaseMyPC): def apply(self, pc, x, y): x.copy(y) + def applyM(self, pc, x, y): x.copy(y) + class MyPCJacobi(BaseMyPC): def setup(self, pc): A, P = pc.getOperators() self.diag = P.getDiagonal() self.diag.reciprocal() + def reset(self, pc): self.diag.destroy() del self.diag + def apply(self, pc, x, y): y.pointwiseMult(self.diag, x) + def applyS(self, pc, x, y): self.diag.copy(y) y.sqrtabs() y.pointwiseMult(y, x) + def applyM(self, pc, x, y): x.copy(y) y.diagonalScale(L=self.diag) -class PC_PYTHON_CLASS(object): +class PC_PYTHON_CLASS: def __init__(self): self.impl = None self.log = {} + def _log(self, method, *args): self.log.setdefault(method, 0) self.log[method] += 1 + def create(self, pc): self._log('create', pc) + def destroy(self, pc): self._log('destroy') self.impl = None + def reset(self, pc): self._log('reset', pc) + def view(self, pc, vw): self._log('view', pc, vw) - assert isinstance(pc, PETSc.PC) - assert isinstance(vw, PETSc.Viewer) - pass + def setFromOptions(self, pc): self._log('setFromOptions', pc) - assert isinstance(pc, PETSc.PC) OptDB = PETSc.Options(pc) - impl = OptDB.getString('impl','MyPCNone') + impl = OptDB.getString('impl', 'MyPCNone') klass = globals()[impl] self.impl = klass() + def setUp(self, pc): self._log('setUp', pc) - assert isinstance(pc, PETSc.PC) self.impl.setup(pc) + def preSolve(self, pc, ksp, b, x): self._log('preSolve', pc, ksp, b, x) + def postSolve(self, pc, ksp, b, x): self._log('postSolve', pc, ksp, b, x) + def apply(self, pc, x, y): self._log('apply', pc, x, y) - assert isinstance(pc, PETSc.PC) - assert isinstance(x, PETSc.Vec) - assert isinstance(y, PETSc.Vec) self.impl.apply(pc, x, y) + def applySymmetricLeft(self, pc, x, y): self._log('applySymmetricLeft', pc, x, y) - assert isinstance(pc, PETSc.PC) - assert isinstance(x, PETSc.Vec) - assert isinstance(y, PETSc.Vec) self.impl.applySL(pc, x, y) + def applySymmetricRight(self, pc, x, y): self._log('applySymmetricRight', pc, x, y) - assert isinstance(pc, PETSc.PC) - assert isinstance(x, PETSc.Vec) - assert isinstance(y, PETSc.Vec) self.impl.applySR(pc, x, y) + def applyTranspose(self, pc, x, y): self._log('applyTranspose', pc, x, y) - assert isinstance(pc, PETSc.PC) - assert isinstance(x, PETSc.Vec) - assert isinstance(y, PETSc.Vec) self.impl.applyT(pc, x, y) + def matApply(self, pc, x, y): self._log('matApply', pc, x, y) - assert isinstance(pc, PETSc.PC) - assert isinstance(x, PETSc.Mat) - assert isinstance(y, PETSc.Mat) self.impl.applyM(pc, x, y) + def applyRichardson(self, pc, x, y, w, tols): self._log('applyRichardson', pc, x, y, w, tols) - assert isinstance(pc, PETSc.PC) - assert isinstance(x, PETSc.Vec) - assert isinstance(y, PETSc.Vec) - assert isinstance(w, PETSc.Vec) - assert isinstance(tols, tuple) - assert len(tols) == 4 self.impl.applyRich(pc, x, y, w, tols) class TestPCPYTHON(unittest.TestCase): - PC_TYPE = PETSc.PC.Type.PYTHON PC_PREFIX = 'test-' @@ -139,41 +143,41 @@ def setUp(self): factory = 'PC_PYTHON_CLASS' self.pc.prefix = self.PC_PREFIX OptDB = PETSc.Options(self.pc) - assert OptDB.prefix == self.pc.prefix - OptDB['pc_python_type'] = '%s.%s' % (module, factory) + self.assertTrue(OptDB.prefix == self.pc.prefix) + OptDB['pc_python_type'] = f'{module}.{factory}' self.pc.setFromOptions() del OptDB['pc_python_type'] - assert self._getCtx().log['create'] == 1 - assert self._getCtx().log['setFromOptions'] == 1 + self.assertTrue(self._getCtx().log['create'] == 1) + self.assertTrue(self._getCtx().log['setFromOptions'] == 1) ctx = self._getCtx() self.assertEqual(getrefcount(ctx), 3) def testGetType(self): ctx = self.pc.getPythonContext() - pytype = "{0}.{1}".format(ctx.__module__, type(ctx).__name__) + pytype = f'{ctx.__module__}.{type(ctx).__name__}' self.assertTrue(self.pc.getPythonType() == pytype) def tearDown(self): ctx = self._getCtx() - self.pc.destroy() # XXX + self.pc.destroy() # XXX self.pc = None PETSc.garbage_cleanup() - assert ctx.log['destroy'] == 1 + self.assertTrue(ctx.log['destroy'] == 1) self.assertEqual(getrefcount(ctx), 2) def _prepare(self): - A = PETSc.Mat().createAIJ([3,3], comm=PETSc.COMM_SELF) + A = PETSc.Mat().createAIJ([3, 3], comm=PETSc.COMM_SELF) A.setUp() A.assemble() A.shift(10) x, y = A.createVecs() x.setRandom() self.pc.setOperators(A, A) - X = PETSc.Mat().createDense([3,5], comm=PETSc.COMM_SELF).setUp() + X = PETSc.Mat().createDense([3, 5], comm=PETSc.COMM_SELF).setUp() X.assemble() - Y = PETSc.Mat().createDense([3,5], comm=PETSc.COMM_SELF).setUp() + Y = PETSc.Mat().createDense([3, 5], comm=PETSc.COMM_SELF).setUp() Y.assemble() - assert (A,A) == self.pc.getOperators() + self.assertTrue((A, A) == self.pc.getOperators()) return A, x, y, X, Y def _getCtx(self): @@ -182,33 +186,39 @@ def _getCtx(self): def _applyMeth(self, meth): A, x, y, X, Y = self._prepare() if meth == 'matApply': - getattr(self.pc, meth)(X,Y) + getattr(self.pc, meth)(X, Y) x.copy(y) else: - getattr(self.pc, meth)(x,y) + getattr(self.pc, meth)(x, y) X.copy(Y) if 'reset' not in self._getCtx().log: - assert self._getCtx().log['setUp'] == 1 - assert self._getCtx().log[meth] == 1 + self.assertTrue(self._getCtx().log['setUp'] == 1) + self.assertTrue(self._getCtx().log[meth] == 1) else: nreset = self._getCtx().log['reset'] nsetup = self._getCtx().log['setUp'] - nmeth = self._getCtx().log[meth] - assert (nreset == nsetup) - assert (nreset == nmeth) + nmeth = self._getCtx().log[meth] + self.assertTrue(nreset == nsetup) + self.assertTrue(nreset == nmeth) if isinstance(self._getCtx().impl, MyPCNone): self.assertTrue(y.equal(x)) self.assertTrue(Y.equal(X)) + def testApply(self): self._applyMeth('apply') + def testApplySymmetricLeft(self): self._applyMeth('applySymmetricLeft') + def testApplySymmetricRight(self): self._applyMeth('applySymmetricRight') + def testApplyTranspose(self): self._applyMeth('applyTranspose') + def testApplyMat(self): self._applyMeth('matApply') + ## def testApplyRichardson(self): ## x, y = self._prepare() ## w = x.duplicate() @@ -237,28 +247,28 @@ def testKSPSolve(self): A, x, y, _, _ = self._prepare() ksp = PETSc.KSP().create(self.pc.comm) ksp.setType(PETSc.KSP.Type.PREONLY) - assert self.pc.getRefCount() == 1 + self.assertTrue(self.pc.getRefCount() == 1) ksp.setPC(self.pc) - assert self.pc.getRefCount() == 2 + self.assertTrue(self.pc.getRefCount() == 2) # normal ksp solve, twice - ksp.solve(x,y) - assert self._getCtx().log['setUp' ] == 1 - assert self._getCtx().log['apply' ] == 1 - assert self._getCtx().log['preSolve' ] == 1 - assert self._getCtx().log['postSolve'] == 1 - ksp.solve(x,y) - assert self._getCtx().log['setUp' ] == 1 - assert self._getCtx().log['apply' ] == 2 - assert self._getCtx().log['preSolve' ] == 2 - assert self._getCtx().log['postSolve'] == 2 + ksp.solve(x, y) + self.assertTrue(self._getCtx().log['setUp'] == 1) + self.assertTrue(self._getCtx().log['apply'] == 1) + self.assertTrue(self._getCtx().log['preSolve'] == 1) + self.assertTrue(self._getCtx().log['postSolve'] == 1) + ksp.solve(x, y) + self.assertTrue(self._getCtx().log['setUp'] == 1) + self.assertTrue(self._getCtx().log['apply'] == 2) + self.assertTrue(self._getCtx().log['preSolve'] == 2) + self.assertTrue(self._getCtx().log['postSolve'] == 2) # transpose ksp solve, twice - ksp.solveTranspose(x,y) - assert self._getCtx().log['setUp' ] == 1 - assert self._getCtx().log['applyTranspose'] == 1 - ksp.solveTranspose(x,y) - assert self._getCtx().log['setUp' ] == 1 - assert self._getCtx().log['applyTranspose'] == 2 - del ksp # ksp.destroy() + ksp.solveTranspose(x, y) + self.assertTrue(self._getCtx().log['setUp'] == 1) + self.assertTrue(self._getCtx().log['applyTranspose'] == 1) + ksp.solveTranspose(x, y) + self.assertTrue(self._getCtx().log['setUp'] == 1) + self.assertTrue(self._getCtx().log['applyTranspose'] == 2) + del ksp # ksp.destroy() PETSc.garbage_cleanup() self.assertEqual(self.pc.getRefCount(), 1) @@ -273,11 +283,12 @@ class TestPCPYTHON2(TestPCPYTHON): def setUp(self): OptDB = PETSc.Options(self.PC_PREFIX) OptDB['impl'] = 'MyPCJacobi' - super(TestPCPYTHON2, self).setUp() + super().setUp() clsname = type(self._getCtx().impl).__name__ - assert clsname == OptDB['impl'] + self.assertTrue(clsname == OptDB['impl']) del OptDB['impl'] + class TestPCPYTHON3(TestPCPYTHON): def setUp(self): pc = self.pc = PETSc.PC() @@ -285,18 +296,20 @@ def setUp(self): pc.createPython(ctx, comm=PETSc.COMM_SELF) self.pc.prefix = self.PC_PREFIX self.pc.setFromOptions() - assert self._getCtx().log['create'] == 1 - assert self._getCtx().log['setFromOptions'] == 1 + self.assertTrue(self._getCtx().log['create'] == 1) + self.assertTrue(self._getCtx().log['setFromOptions'] == 1) + class TestPCPYTHON4(TestPCPYTHON3): def setUp(self): OptDB = PETSc.Options(self.PC_PREFIX) OptDB['impl'] = 'MyPCJacobi' - super(TestPCPYTHON4, self).setUp() + super().setUp() clsname = type(self._getCtx().impl).__name__ - assert clsname == OptDB['impl'] + self.assertTrue(clsname == OptDB['impl']) del OptDB['impl'] + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_snes.py b/src/binding/petsc4py/test/test_snes.py index 3c7ccfee6bf..e66bc277a90 100644 --- a/src/binding/petsc4py/test/test_snes.py +++ b/src/binding/petsc4py/test/test_snes.py @@ -3,29 +3,34 @@ from petsc4py import PETSc import unittest from sys import getrefcount -import numpy +import numpy as np # -------------------------------------------------------------------- + class Function: def __call__(self, snes, x, f): - f[0] = (x[0]*x[0] + x[0]*x[1] - 3.0).item() - f[1] = (x[0]*x[1] + x[1]*x[1] - 6.0).item() + f[0] = (x[0] * x[0] + x[0] * x[1] - 3.0).item() + f[1] = (x[0] * x[1] + x[1] * x[1] - 6.0).item() f.assemble() + class Jacobian: def __call__(self, snes, x, J, P): - P[0,0] = (2.0*x[0] + x[1]).item() - P[0,1] = (x[0]).item() - P[1,0] = (x[1]).item() - P[1,1] = (x[0] + 2.0*x[1]).item() + P[0, 0] = (2.0 * x[0] + x[1]).item() + P[0, 1] = (x[0]).item() + P[1, 0] = (x[1]).item() + P[1, 1] = (x[0] + 2.0 * x[1]).item() P.assemble() - if J != P: J.assemble() + if J != P: + J.assemble() + + # -------------------------------------------------------------------- -class BaseTestSNES(object): +class BaseTestSNES: SNES_TYPE = None def setUp(self): @@ -47,15 +52,15 @@ def testGetSetType(self): def testTols(self): tols = self.snes.getTolerances() self.snes.setTolerances(*tols) - tnames = ('rtol', 'atol','stol', 'max_it') - tolvals = [getattr(self.snes, t) for t in tnames] + tnames = ('rtol', 'atol', 'stol', 'max_it') + tolvals = [getattr(self.snes, t) for t in tnames] self.assertEqual(tuple(tols), tuple(tolvals)) def testProperties(self): snes = self.snes # - snes.appctx = (1,2,3) - self.assertEqual(snes.appctx, (1,2,3)) + snes.appctx = (1, 2, 3) + self.assertEqual(snes.appctx, (1, 2, 3)) snes.appctx = None self.assertEqual(snes.appctx, None) # @@ -70,8 +75,8 @@ def testProperties(self): self.assertEqual(snes.norm, 0) # rh, ih = snes.history - self.assertTrue(len(rh)==0) - self.assertTrue(len(ih)==0) + self.assertTrue(len(rh) == 0) + self.assertTrue(len(ih) == 0) # reason = PETSc.SNES.ConvergedReason.CONVERGED_ITS snes.reason = reason @@ -131,7 +136,7 @@ def testGetSetJac(self): self.assertFalse(P) self.assertTrue(jac is None) J = PETSc.Mat().create(PETSc.COMM_SELF) - J.setSizes([2,2]) + J.setSizes([2, 2]) J.setType(PETSc.Mat.Type.SEQAIJ) J.setUp() jac = Jacobian() @@ -152,7 +157,7 @@ def testGetSetJac(self): def testCompJac(self): J = PETSc.Mat().create(PETSc.COMM_SELF) - J.setSizes([2,2]) + J.setSizes([2, 2]) J.setType(PETSc.Mat.Type.SEQAIJ) J.setUp() jac = Jacobian() @@ -178,7 +183,7 @@ def testGetSetUpd(self): upd2 = lambda snes, it: None refcnt2 = getrefcount(upd2) self.snes.setUpdate(upd2) - self.assertEqual(getrefcount(upd), refcnt) + self.assertEqual(getrefcount(upd), refcnt) self.assertEqual(getrefcount(upd2), refcnt2 + 1) tmp = self.snes.getUpdate()[0] self.assertTrue(tmp is upd2) @@ -194,7 +199,7 @@ def testGetKSP(self): def testSolve(self): J = PETSc.Mat().create(PETSc.COMM_SELF) - J.setSizes([2,2]) + J.setSizes([2, 2]) J.setType(PETSc.Mat.Type.SEQAIJ) J.setUp() r = PETSc.Vec().createSeq(2) @@ -202,11 +207,18 @@ def testSolve(self): b = PETSc.Vec().createSeq(2) self.snes.setFunction(Function(), r) self.snes.setJacobian(Jacobian(), J) - x.setArray([2,3]) + + def _update(snes, it, cnt): + cnt += 1 + cnt_up = np.array(0) + self.snes.setUpdate(_update, (cnt_up,) ) + + x.setArray([2, 3]) b.set(0) self.snes.setConvergenceHistory() self.snes.setFromOptions() self.snes.solve(b, x) + self.snes.setUpdate(None) rh, ih = self.snes.getConvergenceHistory() self.snes.setConvergenceHistory(0, reset=True) rh, ih = self.snes.getConvergenceHistory() @@ -214,13 +226,14 @@ def testSolve(self): self.assertEqual(len(ih), 0) self.assertAlmostEqual(abs(x[0]), 1.0, places=5) self.assertAlmostEqual(abs(x[1]), 2.0, places=5) + self.assertEqual(self.snes.getIterationNumber(), cnt_up) # XXX this test should not be here ! reason = self.snes.callConvergenceTest(1, 0, 0, 0) self.assertTrue(reason > 0) # test interface x = self.snes.getSolution() - x.setArray([2,3]) + x.setArray([2, 3]) self.snes.solve() self.assertAlmostEqual(abs(x[0]), 1.0, places=5) self.assertAlmostEqual(abs(x[1]), 2.0, places=5) @@ -234,8 +247,10 @@ def testResetAndSolve(self): def testSetMonitor(self): reshist = {} + def monitor(snes, its, fgnorm): reshist[its] = fgnorm + refcnt = getrefcount(monitor) self.snes.setMonitor(monitor) self.assertEqual(getrefcount(monitor), refcnt + 1) @@ -300,14 +315,14 @@ def testEW(self): self.assertEqual(params['version'], 1) def testMF(self): - #self.snes.setOptionsPrefix('MF-') - #opts = PETSc.Options(self.snes) - #opts['mat_mffd_type'] = 'ds' - #opts['snes_monitor'] = 'stdout' - #opts['ksp_monitor'] = 'stdout' - #opts['snes_view'] = 'stdout' + # self.snes.setOptionsPrefix('MF-') + # opts = PETSc.Options(self.snes) + # opts['mat_mffd_type'] = 'ds' + # opts['snes_monitor'] = 'stdout' + # opts['ksp_monitor'] = 'stdout' + # opts['snes_view'] = 'stdout' J = PETSc.Mat().create(PETSc.COMM_SELF) - J.setSizes([2,2]) + J.setSizes([2, 2]) J.setType(PETSc.Mat.Type.SEQAIJ) J.setUp() r = PETSc.Vec().createSeq(2) @@ -324,7 +339,7 @@ def testMF(self): self.assertTrue(self.snes.getUseMF()) self.snes.setFromOptions() if self.snes.getType() != PETSc.SNES.Type.NEWTONTR: - x.setArray([2,3]) + x.setArray([2, 3]) b.set(0) self.snes.solve(b, x) self.assertAlmostEqual(abs(x[0]), 1.0, places=5) @@ -332,7 +347,7 @@ def testMF(self): def testFDColor(self): J = PETSc.Mat().create(PETSc.COMM_SELF) - J.setSizes([2,2]) + J.setSizes([2, 2]) J.setType(PETSc.Mat.Type.SEQAIJ) J.setUp() r = PETSc.Vec().createSeq(2) @@ -349,25 +364,29 @@ def testFDColor(self): self.snes.setUseFD(True) self.assertTrue(self.snes.getUseFD()) self.snes.setFromOptions() - x.setArray([2,3]) + x.setArray([2, 3]) b.set(0) self.snes.solve(b, x) self.assertAlmostEqual(abs(x[0]), 1.0, places=4) self.assertAlmostEqual(abs(x[1]), 2.0, places=4) def testNPC(self): - self.snes.appctx = (1,2,3) + self.snes.appctx = (1, 2, 3) npc = self.snes.getNPC() - self.assertEqual(npc.appctx, (1,2,3)) + self.assertEqual(npc.appctx, (1, 2, 3)) + # -------------------------------------------------------------------- + class TestSNESLS(BaseTestSNES, unittest.TestCase): SNES_TYPE = PETSc.SNES.Type.NEWTONLS + class TestSNESTR(BaseTestSNES, unittest.TestCase): SNES_TYPE = PETSc.SNES.Type.NEWTONTR + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_snes_py.py b/src/binding/petsc4py/test/test_snes_py.py index 65f3305befc..e3ce402a3a2 100644 --- a/src/binding/petsc4py/test/test_snes_py.py +++ b/src/binding/petsc4py/test/test_snes_py.py @@ -2,12 +2,12 @@ from petsc4py import PETSc import unittest -from sys import getrefcount +from test_snes import BaseTestSNES # -------------------------------------------------------------------- -class MySNES(object): +class MySNES: def __init__(self): self.trace = False self.call_log = {} @@ -15,7 +15,8 @@ def __init__(self): def _log(self, method, *args): self.call_log.setdefault(method, 0) self.call_log[method] += 1 - if not self.trace: return + if not self.trace: + return clsname = self.__class__.__name__ pargs = [] for a in args: @@ -23,24 +24,25 @@ def _log(self, method, *args): if isinstance(a, PETSc.Object): pargs[-1] = type(a).__name__ pargs = tuple(pargs) - print ('%-20s' % ('%s.%s%s'% (clsname, method, pargs))) + print(f'{clsname}.{method}{pargs}') - def create(self,*args): + def create(self, *args): self._log('create', *args) - def destroy(self,*args): + def destroy(self, *args): self._log('destroy', *args) - if not self.trace: return + if not self.trace: + return for k, v in self.call_log.items(): - print ('%-20s %2d' % (k, v)) + print(f'{k} {v}') def view(self, snes, viewer): self._log('view', snes, viewer) def setFromOptions(self, snes): OptDB = PETSc.Options(snes) - self.trace = OptDB.getBool('trace',self.trace) - self._log('setFromOptions',snes) + self.trace = OptDB.getBool('trace', self.trace) + self._log('setFromOptions', snes) def setUp(self, snes): self._log('setUp', snes) @@ -48,10 +50,10 @@ def setUp(self, snes): def reset(self, snes): self._log('reset', snes) - #def preSolve(self, snes): + # def preSolve(self, snes): # self._log('preSolve', snes) # - #def postSolve(self, snes): + # def postSolve(self, snes): # self._log('postSolve', snes) def preStep(self, snes): @@ -60,16 +62,16 @@ def preStep(self, snes): def postStep(self, snes): self._log('postStep', snes) - #def computeFunction(self, snes, x, F): + # def computeFunction(self, snes, x, F): # self._log('computeFunction', snes, x, F) # snes.computeFunction(x, F) # - #def computeJacobian(self, snes, x, A, B): + # def computeJacobian(self, snes, x, A, B): # self._log('computeJacobian', snes, x, A, B) # flag = snes.computeJacobian(x, A, B) # return flag # - #def linearSolve(self, snes, b, x): + # def linearSolve(self, snes, b, x): # self._log('linearSolve', snes, b, x) # snes.ksp.solve(b,x) # ## return False # not succeed @@ -77,7 +79,7 @@ def postStep(self, snes): # return False # not succeed # return True # succeed # - #def lineSearch(self, snes, x, y, F): + # def lineSearch(self, snes, x, y, F): # self._log('lineSearch', snes, x, y, F) # x.axpy(-1,y) # snes.computeFunction(x, F) @@ -85,21 +87,19 @@ def postStep(self, snes): # return True # succeed -from test_snes import BaseTestSNES - class TestSNESPython(BaseTestSNES, unittest.TestCase): - SNES_TYPE = PETSc.SNES.Type.PYTHON def setUp(self): - super(TestSNESPython, self).setUp() + super().setUp() self.snes.setPythonContext(MySNES()) def testGetType(self): ctx = self.snes.getPythonContext() - pytype = "{0}.{1}".format(ctx.__module__, type(ctx).__name__) + pytype = f'{ctx.__module__}.{type(ctx).__name__}' self.assertTrue(self.snes.getPythonType() == pytype) + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_stdout.py b/src/binding/petsc4py/test/test_stdout.py index f1885deaed4..5936f7793de 100644 --- a/src/binding/petsc4py/test/test_stdout.py +++ b/src/binding/petsc4py/test/test_stdout.py @@ -7,6 +7,7 @@ class TestStdout(unittest.TestCase): def testStdoutRedirect(self): from io import StringIO import sys + prevstdout = sys.stdout prevstderr = sys.stderr sys.stdout = StringIO() @@ -18,11 +19,11 @@ def testStdoutRedirect(self): if not (__name__ == '__main__'): PETSc._push_python_vfprintf() - a = np.array([0.,0.,0.],dtype=PETSc.ScalarType) - a_vec = PETSc.Vec().createWithArray(a,comm=PETSc.COMM_SELF) + a = np.array([0.0, 0.0, 0.0], dtype=PETSc.ScalarType) + a_vec = PETSc.Vec().createWithArray(a, comm=PETSc.COMM_SELF) a_vec.view() v = PETSc.Viewer.STDERR(PETSc.COMM_SELF) - v.printfASCII("Error message") + v.printfASCII('Error message') newstdout = sys.stdout newstderr = sys.stderr @@ -33,20 +34,18 @@ def testStdoutRedirect(self): error = newstderr.getvalue() if not (__name__ == '__main__'): PETSc._pop_python_vfprintf() - stdoutshouldbe = \ -"""Vec Object: 1 MPI process + stdoutshouldbe = """Vec Object: 1 MPI process type: seq 0. 0. 0. """ - stderrshouldbe = "Error message" + stderrshouldbe = 'Error message' if PETSc._stdout_is_stderr(): stdoutshouldbe = stdoutshouldbe + stderrshouldbe - stderrshouldbe = "" - self.assertEqual(output,stdoutshouldbe) - self.assertEqual(error,stderrshouldbe) - + stderrshouldbe = '' + self.assertEqual(output, stdoutshouldbe) + self.assertEqual(error, stderrshouldbe) # -------------------------------------------------------------------- diff --git a/src/binding/petsc4py/test/test_sys.py b/src/binding/petsc4py/test/test_sys.py index b736638621f..66d14997008 100644 --- a/src/binding/petsc4py/test/test_sys.py +++ b/src/binding/petsc4py/test/test_sys.py @@ -3,8 +3,8 @@ # -------------------------------------------------------------------- -class TestVersion(unittest.TestCase): +class TestVersion(unittest.TestCase): def testGetVersion(self): version = PETSc.Sys.getVersion() self.assertTrue(version > (0, 0, 0)) @@ -13,15 +13,19 @@ def testGetVersion(self): self.assertTrue(isinstance(date, str)) v, author = PETSc.Sys.getVersion(author=True) self.assertTrue(version == v) - self.assertTrue(isinstance(author, (list,tuple))) + self.assertTrue(isinstance(author, (list, tuple))) def testGetVersionInfo(self): version = PETSc.Sys.getVersion() info = PETSc.Sys.getVersionInfo() - self.assertEqual(version, - (info['major'], - info['minor'], - info['subminor'],)) + self.assertEqual( + version, + ( + info['major'], + info['minor'], + info['subminor'], + ), + ) self.assertTrue(isinstance(info['release'], bool)) v, date = PETSc.Sys.getVersion(date=True) self.assertEqual(date, info['date']) @@ -35,9 +39,10 @@ def testGetSetDefaultComm(self): PETSc.Sys.setDefaultComm(PETSc.COMM_WORLD) c = PETSc.Sys.getDefaultComm() self.assertEqual(c, PETSc.COMM_WORLD) - f = lambda : PETSc.Sys.setDefaultComm(PETSc.COMM_NULL) + f = lambda: PETSc.Sys.setDefaultComm(PETSc.COMM_NULL) self.assertRaises(ValueError, f) + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_tao.py b/src/binding/petsc4py/test/test_tao.py index 5b5654fd8ec..4f5beb25385 100644 --- a/src/binding/petsc4py/test/test_tao.py +++ b/src/binding/petsc4py/test/test_tao.py @@ -2,32 +2,38 @@ from petsc4py import PETSc import unittest +import numpy + # -------------------------------------------------------------------- class Objective: def __call__(self, tao, x): - return (x[0] - 2.0)**2 + (x[1] - 2.0)**2 - 2.0*(x[0] + x[1]) + return (x[0] - 2.0) ** 2 + (x[1] - 2.0) ** 2 - 2.0 * (x[0] + x[1]) + class Gradient: def __call__(self, tao, x, g): - g[0] = 2.0*(x[0] - 2.0) - 2.0 - g[1] = 2.0*(x[1] - 2.0) - 2.0 + g[0] = 2.0 * (x[0] - 2.0) - 2.0 + g[1] = 2.0 * (x[1] - 2.0) - 2.0 g.assemble() + class EqConstraints: def __call__(self, tao, x, c): - c[0] = x[0]**2 + x[1] - 2.0 + c[0] = x[0] ** 2 + x[1] - 2.0 c.assemble() + class EqJacobian: def __call__(self, tao, x, J, P): - P[0,0] = 2.0*x[0] - P[0,1] = 1.0 + P[0, 0] = 2.0 * x[0] + P[0, 1] = 1.0 P.assemble() - if J != P: J.assemble() + if J != P: + J.assemble() -class BaseTestTAO(object): +class BaseTestTAO: COMM = None def setUp(self): @@ -43,9 +49,9 @@ def testSetRoutinesToNone(self): constraint, varbounds = None, None hessian, jacobian = None, None tao.setObjective(objective) - tao.setGradient(gradient,None) + tao.setGradient(gradient, None) tao.setVariableBounds(varbounds) - tao.setObjectiveGradient(objgrad,None) + tao.setObjectiveGradient(objgrad, None) tao.setConstraints(constraint) tao.setHessian(hessian) tao.setJacobian(jacobian) @@ -54,11 +60,21 @@ def testGetVecsAndMats(self): tao = self.tao x = tao.getSolution() (g, _) = tao.getGradient() - l, u = tao.getVariableBounds() - r = None#tao.getConstraintVec() - H, HP = None,None#tao.getHessianMat() - J, JP = None,None#tao.getJacobianMat() - for o in [x, g, r, l, u ,H, HP, J, JP,]: + low, up = tao.getVariableBounds() + r = None # tao.getConstraintVec() + H, HP = None, None # tao.getHessianMat() + J, JP = None, None # tao.getJacobianMat() + for o in [ + x, + g, + r, + low, + up, + H, + HP, + J, + JP, + ]: self.assertFalse(o) def testGetKSP(self): @@ -82,15 +98,15 @@ def testEqualityConstraints(self): J.setUp() tao.setObjective(Objective()) - tao.setGradient(Gradient(),None) - tao.setEqualityConstraints(EqConstraints(),c) - tao.setJacobianEquality(EqJacobian(),J,J) + tao.setGradient(Gradient(), None) + tao.setEqualityConstraints(EqConstraints(), c) + tao.setJacobianEquality(EqJacobian(), J, J) tao.setSolution(x) tao.setType(PETSc.TAO.Type.ALMM) - tao.setTolerances(gatol=1.e-4) + tao.setTolerances(gatol=1.0e-4) tao.setFromOptions() tao.solve() - self.assertAlmostEqual(abs(x[0]**2 + x[1] - 2.0), 0.0, places=4) + self.assertAlmostEqual(abs(x[0] ** 2 + x[1] - 2.0), 0.0, places=4) def testBNCG(self): if self.tao.getComm().Get_size() > 1: @@ -108,12 +124,12 @@ def testBNCG(self): xu.setType('standard') xu.setSizes(2) xu.set(2.0) - tao.setVariableBounds((xl,xu)) + tao.setVariableBounds((xl, xu)) tao.setObjective(Objective()) - tao.setGradient(Gradient(),None) + tao.setGradient(Gradient(), None) tao.setSolution(x) tao.setType(PETSc.TAO.Type.BNCG) - tao.setTolerances(gatol=1.e-4) + tao.setTolerances(gatol=1.0e-4) ls = tao.getLineSearch() ls.setType(PETSc.TAOLineSearch.Type.UNIT) tao.setFromOptions() @@ -121,17 +137,21 @@ def testBNCG(self): self.assertAlmostEqual(x[0], 2.0, places=4) self.assertAlmostEqual(x[1], 2.0, places=4) + # -------------------------------------------------------------------- + class TestTAOSelf(BaseTestTAO, unittest.TestCase): COMM = PETSc.COMM_SELF + class TestTAOWorld(BaseTestTAO, unittest.TestCase): COMM = PETSc.COMM_WORLD + # -------------------------------------------------------------------- -import numpy + if numpy.iscomplexobj(PETSc.ScalarType()): del BaseTestTAO del TestTAOSelf diff --git a/src/binding/petsc4py/test/test_tao_py.py b/src/binding/petsc4py/test/test_tao_py.py index 54257f66c9d..d5f8b4ba984 100644 --- a/src/binding/petsc4py/test/test_tao_py.py +++ b/src/binding/petsc4py/test/test_tao_py.py @@ -1,18 +1,22 @@ import unittest from petsc4py import PETSc from sys import getrefcount +import numpy + # -------------------------------------------------------------------- class Objective: def __call__(self, tao, x): - return (x[0] - 1.0)**2 + (x[1] - 2.0)**2 + return (x[0] - 1.0) ** 2 + (x[1] - 2.0) ** 2 + class Gradient: def __call__(self, tao, x, g): - g[0] = 2.0*(x[0] - 1.0) - g[1] = 2.0*(x[1] - 2.0) + g[0] = 2.0 * (x[0] - 1.0) + g[1] = 2.0 * (x[1] - 2.0) g.assemble() + class MyTao: def __init__(self): self.log = {} @@ -41,7 +45,7 @@ def solve(self, tao): def step(self, tao, x, g, s): self._log('step') - tao.computeGradient(x,g) + tao.computeGradient(x, g) g.copy(s) s.scale(-1.0) @@ -54,13 +58,13 @@ def postStep(self, tao): def monitor(self, tao): self._log('monitor') -class TestTaoPython(unittest.TestCase): +class TestTaoPython(unittest.TestCase): def setUp(self): self.tao = PETSc.TAO() self.tao.createPython(MyTao(), comm=PETSc.COMM_SELF) ctx = self.tao.getPythonContext() - self.assertEqual(getrefcount(ctx), 3) + self.assertEqual(getrefcount(ctx), 3) self.assertEqual(ctx.log['create'], 1) self.nsolve = 0 @@ -72,11 +76,11 @@ def tearDown(self): self.tao = None PETSc.garbage_cleanup() self.assertEqual(ctx.log['destroy'], 1) - self.assertEqual(getrefcount(ctx), 2) + self.assertEqual(getrefcount(ctx), 2) def testGetType(self): ctx = self.tao.getPythonContext() - pytype = "{0}.{1}".format(ctx.__module__, type(ctx).__name__) + pytype = f'{ctx.__module__}.{type(ctx).__name__}' self.assertTrue(self.tao.getPythonType() == pytype) def testSolve(self): @@ -88,10 +92,15 @@ def testSolve(self): y1 = x.duplicate() y2 = x.duplicate() tao.setObjective(Objective()) - tao.setGradient(Gradient(),None) + tao.setGradient(Gradient(), None) tao.setMonitor(ctx.monitor) tao.setFromOptions() tao.setMaximumIterations(3) + + def _update(tao, it, cnt): + cnt += 1 + cnt_up = numpy.array(0) + tao.setUpdate(_update, (cnt_up,) ) tao.setSolution(x) # Call the solve method of MyTAO @@ -117,18 +126,19 @@ def testSolve(self): x.copy(y2) self.assertTrue(y1.equal(y2)) - self.assertTrue(ctx.log['monitor'] == 2*(n+1)) - self.assertTrue(ctx.log['preStep'] == 2*n) - self.assertTrue(ctx.log['postStep'] == 2*n) + self.assertTrue(ctx.log['monitor'] == 2 * (n + 1)) + self.assertTrue(ctx.log['preStep'] == 2 * n) + self.assertTrue(ctx.log['postStep'] == 2 * n) self.assertTrue(ctx.log['solve'] == 1) self.assertTrue(ctx.log['setUp'] == 1) self.assertTrue(ctx.log['setFromOptions'] == 1) self.assertTrue(ctx.log['step'] == n) + self.assertEqual(cnt_up, 2 * n) tao.cancelMonitor() + # -------------------------------------------------------------------- -import numpy if numpy.iscomplexobj(PETSc.ScalarType()): del TestTaoPython diff --git a/src/binding/petsc4py/test/test_ts.py b/src/binding/petsc4py/test/test_ts.py index f87a3f7c8f2..2ae3927b502 100644 --- a/src/binding/petsc4py/test/test_ts.py +++ b/src/binding/petsc4py/test/test_ts.py @@ -1,14 +1,15 @@ import unittest from petsc4py import PETSc -from sys import getrefcount # -------------------------------------------------------------------- + class MyODE: """ du/dt + u**2 = 0; u0,u1,u2 = 1,2,3 """ + def __init__(self): self.rhsfunction_calls = 0 self.rhsjacobian_calls = 0 @@ -19,48 +20,49 @@ def __init__(self): self.postsolve_calls = 0 self.monitor_calls = 0 - def rhsfunction(self,ts,t,u,F): + def rhsfunction(self, ts, t, u, F): # print ('MyODE.rhsfunction()') self.rhsfunction_calls += 1 f = -(u * u) f.copy(F) - def rhsjacobian(self,ts,t,u,J,P): + def rhsjacobian(self, ts, t, u, J, P): # print ('MyODE.rhsjacobian()') self.rhsjacobian_calls += 1 P.zeroEntries() diag = -2 * u P.setDiagonal(diag) P.assemble() - if J != P: J.assemble() - return True # same_nz + if J != P: + J.assemble() + return True # same_nz - def ifunction(self,ts,t,u,du,F): + def ifunction(self, ts, t, u, du, F): # print ('MyODE.ifunction()') self.ifunction_calls += 1 f = du + u * u f.copy(F) - def ijacobian(self,ts,t,u,du,a,J,P): + def ijacobian(self, ts, t, u, du, a, J, P): # print ('MyODE.ijacobian()') self.ijacobian_calls += 1 P.zeroEntries() diag = a + 2 * u P.setDiagonal(diag) P.assemble() - if J != P: J.assemble() - return True # same_nz + if J != P: + J.assemble() + return True # same_nz def monitor(self, ts, s, t, u): self.monitor_calls += 1 - dt = ts.time_step - ut = ts.vec_sol.norm() - #prn = PETSc.Sys.Print - #prn('TS: step %2d, T:%f, dT:%f, u:%f' % (s,t,dt,ut)) + # dt = ts.time_step + # ut = ts.vec_sol.norm() + # prn = PETSc.Sys.Print + # prn('TS: step %2d, T:%f, dT:%f, u:%f' % (s,t,dt,ut)) -class BaseTestTSNonlinear(object): - +class BaseTestTSNonlinear: TYPE = None def setUp(self): @@ -80,16 +82,15 @@ def tearDown(self): class BaseTestTSNonlinearRHS(BaseTestTSNonlinear): - - def testSolveRHS(self): + def testSolveRHS(self, nullsol=False): ts = self.ts dct = self.ts.getDict() self.assertTrue(dct is not None) - self.assertTrue(type(dct) is dict) + self.assertTrue(isinstance(dct, dict)) ode = MyODE() J = PETSc.Mat().create(ts.comm) - J.setSizes(3); + J.setSizes(3) J.setFromOptions() J.setUp() u, f = J.createVecs() @@ -102,27 +103,31 @@ def testSolveRHS(self): ts.snes.ksp.pc.setType('none') T0, dT, nT = 0.00, 0.1, 10 - T = T0 + nT*dT + T = T0 + nT * dT ts.setTime(T0) ts.setTimeStep(dT) ts.setMaxTime(T) ts.setMaxSteps(nT) ts.setFromOptions() u[0], u[1], u[2] = 1, 2, 3 - ts.solve(u) + if nullsol: + ts.setSolution(u) + ts.solve() + else: + ts.solve(u) self.assertTrue(ode.rhsfunction_calls > 0) self.assertTrue(ode.rhsjacobian_calls > 0) dct = self.ts.getDict() - self.assertTrue('__appctx__' in dct) + self.assertTrue('__appctx__' in dct) self.assertTrue('__rhsfunction__' in dct) self.assertTrue('__rhsjacobian__' in dct) - self.assertTrue('__monitor__' in dct) + self.assertTrue('__monitor__' in dct) n = ode.monitor_calls ts.monitor(ts.step_number, ts.time) - self.assertEqual(ode.monitor_calls, n+1) + self.assertEqual(ode.monitor_calls, n + 1) n = ode.monitor_calls ts.monitorCancel() ts.monitor(ts.step_number, ts.time) @@ -132,7 +137,8 @@ def testFDColorRHS(self): ts = self.ts ode = MyODE() J = PETSc.Mat().create(ts.comm) - J.setSizes(5); J.setType('aij') + J.setSizes(5) + J.setType('aij') J.setPreallocationNNZ(nnz=1) u, f = J.createVecs() @@ -142,7 +148,7 @@ def testFDColorRHS(self): ts.setMonitor(ode.monitor) T0, dT, nT = 0.00, 0.1, 10 - T = T0 + nT*dT + T = T0 + nT * dT ts.setTime(T0) ts.setTimeStep(dT) ts.setMaxTime(T) @@ -151,7 +157,7 @@ def testFDColorRHS(self): u[0], u[1], u[2] = 1, 2, 3 ts.setSolution(u) - ode.rhsjacobian(ts,0,u,J,J) + ode.rhsjacobian(ts, 0, u, J, J) ts.setUp() ts.snes.setUseFD(True) ts.solve(u) @@ -164,18 +170,21 @@ def testResetAndSolveRHS(self): self.ts.setStepNumber(0) self.testSolveRHS() self.ts.reset() + self.ts.setStepNumber(0) + self.testSolveRHS(nullsol=True) + self.ts.reset() -class BaseTestTSNonlinearI(BaseTestTSNonlinear): +class BaseTestTSNonlinearI(BaseTestTSNonlinear): def testSolveI(self): ts = self.ts dct = self.ts.getDict() self.assertTrue(dct is not None) - self.assertTrue(type(dct) is dict) + self.assertTrue(isinstance(dct, dict)) ode = MyODE() J = PETSc.Mat().create(ts.comm) - J.setSizes(3); + J.setSizes(3) J.setFromOptions() J.setUp() u, f = J.createVecs() @@ -188,7 +197,7 @@ def testSolveI(self): ts.snes.ksp.pc.setType('none') T0, dT, nT = 0.00, 0.1, 10 - T = T0 + nT*dT + T = T0 + nT * dT ts.setTime(T0) ts.setTimeStep(dT) ts.setMaxTime(T) @@ -201,14 +210,14 @@ def testSolveI(self): self.assertTrue(ode.ijacobian_calls > 0) dct = self.ts.getDict() - self.assertTrue('__appctx__' in dct) + self.assertTrue('__appctx__' in dct) self.assertTrue('__ifunction__' in dct) self.assertTrue('__ijacobian__' in dct) - self.assertTrue('__monitor__' in dct) + self.assertTrue('__monitor__' in dct) n = ode.monitor_calls ts.monitor(ts.step_number, ts.time) - self.assertEqual(ode.monitor_calls, n+1) + self.assertEqual(ode.monitor_calls, n + 1) n = ode.monitor_calls ts.monitorCancel() ts.monitor(ts.step_number, ts.time) @@ -218,7 +227,8 @@ def testFDColorI(self): ts = self.ts ode = MyODE() J = PETSc.Mat().create(ts.comm) - J.setSizes(5); J.setType('aij') + J.setSizes(5) + J.setType('aij') J.setPreallocationNNZ(nnz=1) J.setFromOptions() u, f = J.createVecs() @@ -229,7 +239,7 @@ def testFDColorI(self): ts.setMonitor(ode.monitor) T0, dT, nT = 0.00, 0.1, 10 - T = T0 + nT*dT + T = T0 + nT * dT ts.setTime(T0) ts.setTimeStep(dT) ts.setMaxTime(T) @@ -238,7 +248,7 @@ def testFDColorI(self): u[0], u[1], u[2] = 1, 2, 3 ts.setSolution(u) - ode.ijacobian(ts,0,u,0*u,1,J,J) + ode.ijacobian(ts, 0, u, 0 * u, 1, J, J) ts.setUp() ts.snes.setUseFD(True) ts.solve(u) @@ -252,22 +262,23 @@ def testResetAndSolveI(self): self.testSolveI() self.ts.reset() -class TestTSBeuler(BaseTestTSNonlinearRHS,BaseTestTSNonlinearI, - unittest.TestCase): + +class TestTSBeuler(BaseTestTSNonlinearRHS, BaseTestTSNonlinearI, unittest.TestCase): TYPE = PETSc.TS.Type.BEULER -class TestTSCN(BaseTestTSNonlinearRHS,BaseTestTSNonlinearI, - unittest.TestCase): + +class TestTSCN(BaseTestTSNonlinearRHS, BaseTestTSNonlinearI, unittest.TestCase): TYPE = PETSc.TS.Type.CN -class TestTSTheta(BaseTestTSNonlinearRHS, BaseTestTSNonlinearI, - unittest.TestCase): + +class TestTSTheta(BaseTestTSNonlinearRHS, BaseTestTSNonlinearI, unittest.TestCase): TYPE = PETSc.TS.Type.THETA -class TestTSAlpha(BaseTestTSNonlinearRHS, BaseTestTSNonlinearI, - unittest.TestCase): + +class TestTSAlpha(BaseTestTSNonlinearRHS, BaseTestTSNonlinearI, unittest.TestCase): TYPE = PETSc.TS.Type.ALPHA + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_ts_py.py b/src/binding/petsc4py/test/test_ts_py.py index 3c9d8594697..08c5380fe1e 100644 --- a/src/binding/petsc4py/test/test_ts_py.py +++ b/src/binding/petsc4py/test/test_ts_py.py @@ -4,6 +4,7 @@ # -------------------------------------------------------------------- + class MyODE: """ du/dt + u**2 = 0; @@ -14,24 +15,25 @@ def __init__(self): self.function_calls = 0 self.jacobian_calls = 0 - def function(self,ts,t,u,du,F): - #print 'MyODE.function()' + def function(self, ts, t, u, du, F): + # print 'MyODE.function()' self.function_calls += 1 f = du + u * u f.copy(F) - def jacobian(self,ts,t,u,du,a,J,P): - #print 'MyODE.jacobian()' + def jacobian(self, ts, t, u, du, a, J, P): + # print 'MyODE.jacobian()' self.jacobian_calls += 1 P.zeroEntries() diag = a + 2 * u P.setDiagonal(diag) P.assemble() - if J != P: J.assemble() - return False # same_nz + if J != P: + J.assemble() + return False # same_nz -class MyTS(object): +class MyTS: def __init__(self): self.log = {} @@ -67,14 +69,13 @@ def adaptStep(self, ts, t, u, *args): class TestTSPython(unittest.TestCase): - def setUp(self): self.ts = PETSc.TS() self.ts.createPython(MyTS(), comm=PETSc.COMM_SELF) eft = PETSc.TS.ExactFinalTime.STEPOVER self.ts.setExactFinalTime(eft) ctx = self.ts.getPythonContext() - self.assertEqual(getrefcount(ctx), 3) + self.assertEqual(getrefcount(ctx), 3) self.assertEqual(ctx.log['create'], 1) self.nsolve = 0 @@ -82,15 +83,15 @@ def tearDown(self): ctx = self.ts.getPythonContext() self.assertEqual(getrefcount(ctx), 3) self.assertTrue('destroy' not in ctx.log) - self.ts.destroy() # XXX + self.ts.destroy() # XXX self.ts = None PETSc.garbage_cleanup() self.assertEqual(ctx.log['destroy'], 1) - self.assertEqual(getrefcount(ctx), 2) + self.assertEqual(getrefcount(ctx), 2) def testGetType(self): ctx = self.ts.getPythonContext() - pytype = "{0}.{1}".format(ctx.__module__, type(ctx).__name__) + pytype = f'{ctx.__module__}.{type(ctx).__name__}' self.assertTrue(self.ts.getPythonType() == pytype) def testSolve(self): @@ -98,7 +99,7 @@ def testSolve(self): ts.setProblemType(ts.ProblemType.NONLINEAR) ode = MyODE() J = PETSc.Mat().create(ts.comm) - J.setSizes(3); + J.setSizes(3) J.setFromOptions() J.setUp() u, f = J.createVecs() @@ -109,7 +110,7 @@ def testSolve(self): ts.snes.ksp.pc.setType('none') T0, dT, nT = 0.0, 0.1, 10 - T = T0 + nT*dT + T = T0 + nT * dT ts.setTime(T0) ts.setTimeStep(dT) ts.setMaxTime(T) @@ -117,7 +118,7 @@ def testSolve(self): ts.setFromOptions() u[0], u[1], u[2] = 1, 2, 3 ts.solve(u) - self.nsolve +=1 + self.nsolve += 1 self.assertTrue(ode.function_calls > 0) self.assertTrue(ode.jacobian_calls > 0) @@ -129,7 +130,7 @@ def testSolve(self): del ctx dct = self.ts.getDict() - self.assertTrue('__appctx__' in dct) + self.assertTrue('__appctx__' in dct) self.assertTrue('__ifunction__' in dct) self.assertTrue('__ijacobian__' in dct) @@ -139,7 +140,8 @@ def testFDColor(self): ts.setProblemType(ts.ProblemType.NONLINEAR) ode = MyODE() J = PETSc.Mat().create(ts.comm) - J.setSizes(5); J.setType('aij'); + J.setSizes(5) + J.setType('aij') J.setPreallocationNNZ(1) J.setFromOptions() u, f = J.createVecs() @@ -149,7 +151,7 @@ def testFDColor(self): ts.setIJacobian(ode.jacobian, J, J) T0, dT, nT = 0.00, 0.1, 10 - T = T0 + nT*dT + T = T0 + nT * dT ts.setTime(T0) ts.setTimeStep(dT) ts.setMaxTime(T) @@ -158,10 +160,10 @@ def testFDColor(self): u[:] = 1, 2, 3, 4, 5 ts.setSolution(u) - ode.jacobian(ts,0.0,u,u,1.0,J,J) + ode.jacobian(ts, 0.0, u, u, 1.0, J, J) ts.snes.setUseFD(True) ts.solve(u) - self.nsolve +=1 + self.nsolve += 1 def testResetAndSolve(self): self.ts.reset() @@ -181,6 +183,7 @@ def testSetAdaptLimits(self): self.assertEqual(1.0, hmin) self.assertEqual(2.0, hmax) + # -------------------------------------------------------------------- if __name__ == '__main__': diff --git a/src/binding/petsc4py/test/test_vec.py b/src/binding/petsc4py/test/test_vec.py index 21e13c37335..a7df652ba5c 100644 --- a/src/binding/petsc4py/test/test_vec.py +++ b/src/binding/petsc4py/test/test_vec.py @@ -3,8 +3,8 @@ # -------------------------------------------------------------------- -class BaseTestVec(object): +class BaseTestVec: COMM = None TYPE = None @@ -44,6 +44,7 @@ def testDot(self): def testNorm(self): from math import sqrt + self.vec.set(1) n1 = self.vec.norm(PETSc.NormType.NORM_1) n2 = self.vec.norm(PETSc.NormType.NORM_2) @@ -64,6 +65,7 @@ def testNorm(self): def testNormalize(self): from math import sqrt + self.vec.set(1) n2 = self.vec.normalize() self.assertAlmostEqual(n2, sqrt(self.vec.getSize())) @@ -95,33 +97,38 @@ def testBsize(self): def testGetSetVals(self): start, end = self.vec.getOwnershipRange() self.vec[start] = -7 - self.vec[end-1] = -7 + self.vec[end - 1] = -7 self.vec.assemble() self.assertEqual(self.vec[start], -7) - self.assertEqual(self.vec[end-1], -7) - for i in range(start, end): self.vec[i] = i + self.assertEqual(self.vec[end - 1], -7) + for i in range(start, end): + self.vec[i] = i self.vec.assemble() values = [self.vec[i] for i in range(start, end)] self.assertEqual(values, list(range(start, end))) sz = self.vec.getSize() - self.assertEqual(self.vec.sum(), (sz-1)/2.0*sz) + self.assertEqual(self.vec.sum(), (sz - 1) / 2.0 * sz) def testGetSetValsBlocked(self): return lsize, gsize = self.vec.getSizes() start, end = self.vec.getOwnershipRange() - bsizes = list(range(1, lsize+1)) - nblocks = list(range(1, lsize+1)) - compat = [(bs, nb) - for bs in bsizes if not (gsize%bs or lsize % bs) - for nb in nblocks if bs*nb <= lsize] + bsizes = list(range(1, lsize + 1)) + nblocks = list(range(1, lsize + 1)) + compat = [ + (bs, nb) + for bs in bsizes + if not (gsize % bs or lsize % bs) + for nb in nblocks + if bs * nb <= lsize + ] for bsize, nblock in compat: self.vec.setBlockSize(bsize) - bindex = [start//bsize+i for i in range(nblock)] - bvalue = [float(i) for i in range(nblock*bsize)] + bindex = [start // bsize + i for i in range(nblock)] + bvalue = [float(i) for i in range(nblock * bsize)] self.vec.setValuesBlocked(bindex, bvalue) self.vec.assemble() - index = [start+i for i in range(nblock*bsize)] + index = [start + i for i in range(nblock * bsize)] value = self.vec.getValues(index) self.assertEqual(bvalue, list(value)) @@ -137,9 +144,10 @@ def testGetSetArray(self): arr2 = self.vec.getArray().copy() self.assertTrue((arr1 == arr2).all()) import numpy + refs = self.vec.getRefCount() arr3 = numpy.asarray(self.vec) - self.assertEqual(self.vec.getRefCount(), refs+1) + self.assertEqual(self.vec.getRefCount(), refs + 1) self.assertTrue((arr1 == arr3).all()) arr3[:] = 0 self.assertAlmostEqual(abs(self.vec.sum()), 0) @@ -157,7 +165,7 @@ def testPlaceArray(self): array = self.vec.getArray().copy() self.vec.placeArray(array) array[:] = 2 - self.assertAlmostEqual(abs(self.vec.sum()), 2*self.vec.getSize()) + self.assertAlmostEqual(abs(self.vec.sum()), 2 * self.vec.getSize()) self.vec.resetArray() self.assertAlmostEqual(abs(self.vec.sum()), self.vec.getSize()) @@ -167,19 +175,19 @@ def testLocalVector(self): ln = self.vec.getLocalSize() lvec = self.vec.createLocalVector() self.vec.getLocalVector(lvec) - self.assertEqual(abs(lvec.sum()), (rank+1)*ln) + self.assertEqual(abs(lvec.sum()), (rank + 1) * ln) self.vec.restoreLocalVector(lvec) - self.vec.getLocalVector(lvec,readonly=True) - self.assertEqual(abs(lvec.sum()), (rank+1)*ln) - self.vec.restoreLocalVector(lvec,readonly=True) + self.vec.getLocalVector(lvec, readonly=True) + self.assertEqual(abs(lvec.sum()), (rank + 1) * ln) + self.vec.restoreLocalVector(lvec, readonly=True) lvec.destroy() def testSetOption(self): opt1 = PETSc.Vec.Option.IGNORE_OFF_PROC_ENTRIES opt2 = PETSc.Vec.Option.IGNORE_NEGATIVE_INDICES - for opt in [opt1, opt2]*2: - for flag in [True,False]*2: - self.vec.setOption(opt,flag) + for opt in [opt1, opt2] * 2: + for flag in [True, False] * 2: + self.vec.setOption(opt, flag) def testGetSetItem(self): v = self.vec @@ -202,8 +210,8 @@ def testGetSetItem(self): w[s:e] = v.getArray().copy() w.assemble() self.assertTrue(w.equal(v)) - w1, v1 = w[s], v[s] - w2, v2 = w[e-1], v[e-1] + w1, v1 = w[s], v[s] + w2, v2 = w[e - 1], v[e - 1] self.assertEqual(w1, v1) self.assertEqual(w2, v2) @@ -211,29 +219,29 @@ def testMAXPY(self): y = self.vec y.set(1) x = [y.copy() for _ in range(3)] - a = [1]*len(x) + a = [1] * len(x) y.maxpy(a, x) z = y.duplicate() - z.set(len(x)+1) - assert (y.equal(z)) + z.set(len(x) + 1) + self.assertTrue(y.equal(z)) def testBinOp(self): x = self.vec x.set(1) n = x.getSize() - y = 2 + 2*x + 1 - x*3 - 1 + y = 2 + 2 * x + 1 - x * 3 - 1 self.assertEqual(y.min()[1], 1) self.assertEqual(y.max()[1], 1) - z = (4*x)/(2*y) + z = (4 * x) / (2 * y) self.assertEqual(z.min()[1], 2) self.assertEqual(z.max()[1], 2) - z = z/2 + z = z / 2 self.assertEqual(z.min()[1], 1) self.assertEqual(z.max()[1], 1) s = (+x) @ (-y) self.assertEqual(s, -n) # - M, N = n, 2*n + M, N = n, 2 * n A = PETSc.Mat().createDense((M, N), comm=self.COMM) A.setUp() rs, re = A.getOwnershipRange() @@ -241,49 +249,55 @@ def testBinOp(self): a, b = 3, 5 for i in range(rs, re): for j in range(N): - A[i, j] = a*i + b*j + A[i, j] = a * i + b * j A.assemble() y = x @ A self.assertEqual(y.getSize(), N) for i in range(cs, ce): - self.assertEqual(y[i], a*M*(M-1)/2 + b*i*M) + self.assertEqual(y[i], a * M * (M - 1) / 2 + b * i * M) y.set(1) z = A @ y self.assertEqual(z.getSize(), M) for i in range(rs, re): - self.assertEqual(z[i], b*N*(N-1)/2 + a*i*N) + self.assertEqual(z[i], b * N * (N - 1) / 2 + a * i * N) + # -------------------------------------------------------------------- + class TestVecSeq(BaseTestVec, unittest.TestCase): COMM = PETSc.COMM_SELF TYPE = PETSc.Vec.Type.SEQ + class TestVecMPI(BaseTestVec, unittest.TestCase): - COMM = PETSc.COMM_WORLD + COMM = PETSc.COMM_WORLD TYPE = PETSc.Vec.Type.MPI + class TestVecShared(BaseTestVec, unittest.TestCase): if PETSc.COMM_WORLD.getSize() == 1: TYPE = PETSc.Vec.Type.SHARED else: TYPE = PETSc.Vec.Type.MPI - COMM = PETSc.COMM_WORLD + COMM = PETSc.COMM_WORLD -#class TestVecSieve(BaseTestVec, unittest.TestCase): + +# class TestVecSieve(BaseTestVec, unittest.TestCase): # CLASS = PETSc.VecSieve # TARGS = ([],) -#class TestVecGhost(BaseTestVec, unittest.TestCase): +# class TestVecGhost(BaseTestVec, unittest.TestCase): # CLASS = PETSc.VecGhost # TARGS = ([],) # -------------------------------------------------------------------- -class TestVecWithArray(unittest.TestCase): +class TestVecWithArray(unittest.TestCase): def testCreateSeq(self): import numpy + a = numpy.zeros(5, dtype=PETSc.ScalarType) v1 = PETSc.Vec().createWithArray(a, comm=PETSc.COMM_SELF) @@ -294,38 +308,47 @@ def testCreateSeq(self): self.assertTrue(v2.size == 5) self.assertTrue(v3.size == 3) - a1 = v1.getDict()['__array__']; self.assertTrue(a is a1) - a2 = v2.getDict()['__array__']; self.assertTrue(a is a2) - a3 = v3.getDict()['__array__']; self.assertTrue(a is a2) + a1 = v1.getDict()['__array__'] + self.assertTrue(a is a1) + a2 = v2.getDict()['__array__'] + self.assertTrue(a is a2) + a3 = v3.getDict()['__array__'] + self.assertTrue(a is a3) def testCreateMPI(self): import numpy + a = numpy.zeros(5, dtype=PETSc.ScalarType) v1 = PETSc.Vec().createWithArray(a, comm=PETSc.COMM_WORLD) - v2 = PETSc.Vec().createWithArray(a, size=(5,None), comm=PETSc.COMM_WORLD) - v3 = PETSc.Vec().createWithArray(a, size=(3,None), comm=PETSc.COMM_WORLD) + v2 = PETSc.Vec().createWithArray(a, size=(5, None), comm=PETSc.COMM_WORLD) + v3 = PETSc.Vec().createWithArray(a, size=(3, None), comm=PETSc.COMM_WORLD) self.assertTrue(v1.local_size == 5) self.assertTrue(v2.local_size == 5) self.assertTrue(v3.local_size == 3) - a1 = v1.getDict()['__array__']; self.assertTrue(a is a1) - a2 = v2.getDict()['__array__']; self.assertTrue(a is a2) - a3 = v3.getDict()['__array__']; self.assertTrue(a is a2) + a1 = v1.getDict()['__array__'] + self.assertTrue(a is a1) + a2 = v2.getDict()['__array__'] + self.assertTrue(a is a2) + a3 = v3.getDict()['__array__'] + self.assertTrue(a is a3) def testSetMPIGhost(self): import numpy + v = PETSc.Vec().create() v.setType(PETSc.Vec.Type.MPI) - v.setSizes((5,None)) - ghosts = [i % v.size for i in range(v.owner_range[1],v.owner_range[1]+3)] + v.setSizes((5, None)) + ghosts = [i % v.size for i in range(v.owner_range[1], v.owner_range[1] + 3)] v.setMPIGhost(ghosts) - v.setArray(numpy.array(range(*v.owner_range),dtype=PETSc.ScalarType)) + v.setArray(numpy.array(range(*v.owner_range), dtype=PETSc.ScalarType)) v.ghostUpdate() with v.localForm() as loc: - self.assertTrue((loc[0:v.local_size] == range(*v.owner_range)).all()) - self.assertTrue((loc[v.local_size:] == ghosts).all()) + self.assertTrue((loc[0 : v.local_size] == range(*v.owner_range)).all()) + self.assertTrue((loc[v.local_size :] == ghosts).all()) + # -------------------------------------------------------------------- diff --git a/src/dm/dt/dualspace/interface/dualspace.c b/src/dm/dt/dualspace/interface/dualspace.c index 3015229da65..7b74f03d4c9 100644 --- a/src/dm/dt/dualspace/interface/dualspace.c +++ b/src/dm/dt/dualspace/interface/dualspace.c @@ -75,7 +75,7 @@ PetscErrorCode PetscDualSpaceTensorPointLexicographic_Internal(PetscInt len, Pet /*@C PetscDualSpaceRegister - Adds a new `PetscDualSpaceType` - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine @@ -110,7 +110,7 @@ PetscErrorCode PetscDualSpaceRegister(const char sname[], PetscErrorCode (*funct PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceSetType - Builds a particular `PetscDualSpace` based on its `PetscDualSpaceType` Collective @@ -148,7 +148,7 @@ PetscErrorCode PetscDualSpaceSetType(PetscDualSpace sp, PetscDualSpaceType name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceGetType - Gets the `PetscDualSpaceType` name (as a string) from the object. Not Collective @@ -203,7 +203,7 @@ static PetscErrorCode PetscDualSpaceView_ASCII(PetscDualSpace sp, PetscViewer v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceViewFromOptions - View a `PetscDualSpace` based on values in the options database Collective @@ -447,11 +447,9 @@ PetscErrorCode PetscDualSpaceCreate(MPI_Comm comm, PetscDualSpace *sp) PetscFunctionBegin; PetscAssertPointer(sp, 2); PetscCall(PetscCitationsRegister(FECitation, &FEcite)); - *sp = NULL; PetscCall(PetscFEInitializePackage()); PetscCall(PetscHeaderCreate(s, PETSCDUALSPACE_CLASSID, "PetscDualSpace", "Dual Space", "PetscDualSpace", comm, PetscDualSpaceDestroy, PetscDualSpaceView)); - s->order = 0; s->Nc = 1; s->k = 0; @@ -459,8 +457,7 @@ PetscErrorCode PetscDualSpaceCreate(MPI_Comm comm, PetscDualSpace *sp) s->spintdim = -1; s->uniform = PETSC_TRUE; s->setupcalled = PETSC_FALSE; - - *sp = s; + *sp = s; PetscFunctionReturn(PETSC_SUCCESS); } @@ -780,9 +777,12 @@ PetscErrorCode PetscDualSpaceGetUniform(PetscDualSpace sp, PetscBool *uniform) Level: intermediate + Note: + Do not free `numDof` + .seealso: `PetscDualSpace`, `PetscDualSpaceGetFunctional()`, `PetscDualSpaceCreate()` @*/ -PetscErrorCode PetscDualSpaceGetNumDof(PetscDualSpace sp, const PetscInt **numDof) +PetscErrorCode PetscDualSpaceGetNumDof(PetscDualSpace sp, const PetscInt *numDof[]) { PetscFunctionBegin; PetscValidHeaderSpecific(sp, PETSCDUALSPACE_CLASSID, 1); @@ -1065,7 +1065,7 @@ PetscErrorCode PetscDualSpaceApply(PetscDualSpace sp, PetscInt f, PetscReal time PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceApplyAll - Apply all functionals from the dual space basis to the result of an evaluation at the points returned by `PetscDualSpaceGetAllData()` Input Parameters: @@ -1087,7 +1087,7 @@ PetscErrorCode PetscDualSpaceApplyAll(PetscDualSpace sp, const PetscScalar *poin PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceApplyInterior - Apply interior functionals from the dual space basis to the result of an evaluation at the points returned by `PetscDualSpaceGetInteriorData()` Input Parameters: @@ -1171,7 +1171,7 @@ PetscErrorCode PetscDualSpaceApplyDefault(PetscDualSpace sp, PetscInt f, PetscRe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceApplyAllDefault - Apply all functionals from the dual space basis to the result of an evaluation at the points returned by `PetscDualSpaceGetAllData()` Input Parameters: @@ -1207,7 +1207,7 @@ PetscErrorCode PetscDualSpaceApplyAllDefault(PetscDualSpace sp, const PetscScala PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceApplyInteriorDefault - Apply interior functionals from the dual space basis to the result of an evaluation at the points returned by `PetscDualSpaceGetInteriorData()` Input Parameters: @@ -1859,7 +1859,7 @@ PetscErrorCode PetscDualSpaceGetDeRahm(PetscDualSpace dsp, PetscInt *k) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceTransform - Transform the function values Input Parameters: @@ -1915,7 +1915,7 @@ PetscErrorCode PetscDualSpaceTransform(PetscDualSpace dsp, PetscDualSpaceTransfo PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceTransformGradient - Transform the function gradient values Input Parameters: @@ -2048,7 +2048,7 @@ PetscErrorCode PetscDualSpaceTransformGradient(PetscDualSpace dsp, PetscDualSpac PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpaceTransformHessian - Transform the function Hessian values Input Parameters: @@ -2117,7 +2117,7 @@ PetscErrorCode PetscDualSpaceTransformHessian(PetscDualSpace dsp, PetscDualSpace PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpacePullback - Transform the given functional so that it operates on real space, rather than the reference element. Operationally, this means that we map the function evaluations depending on continuity requirements of our finite element method. Input Parameters: @@ -2169,7 +2169,7 @@ PetscErrorCode PetscDualSpacePullback(PetscDualSpace dsp, PetscFEGeom *fegeom, P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpacePushforward - Transform the given function so that it operates on real space, rather than the reference element. Operationally, this means that we map the function evaluations depending on continuity requirements of our finite element method. Input Parameters: @@ -2221,7 +2221,7 @@ PetscErrorCode PetscDualSpacePushforward(PetscDualSpace dsp, PetscFEGeom *fegeom PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpacePushforwardGradient - Transform the given function gradient so that it operates on real space, rather than the reference element. Operationally, this means that we map the function evaluations depending on continuity requirements of our finite element method. Input Parameters: @@ -2273,7 +2273,7 @@ PetscErrorCode PetscDualSpacePushforwardGradient(PetscDualSpace dsp, PetscFEGeom PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDualSpacePushforwardHessian - Transform the given function Hessian so that it operates on real space, rather than the reference element. Operationally, this means that we map the function evaluations depending on continuity requirements of our finite element method. Input Parameters: diff --git a/src/dm/dt/fe/impls/basic/febasic.c b/src/dm/dt/fe/impls/basic/febasic.c index a7059416e8b..2a42ebad1b9 100644 --- a/src/dm/dt/fe/impls/basic/febasic.c +++ b/src/dm/dt/fe/impls/basic/febasic.c @@ -161,7 +161,7 @@ PETSC_INTERN PetscErrorCode PetscFECreateTabulation_Basic(PetscFE fem, PetscInt PETSC_INTERN PetscErrorCode PetscFEIntegrate_Basic(PetscDS ds, PetscInt field, PetscInt Ne, PetscFEGeom *cgeom, const PetscScalar coefficients[], PetscDS dsAux, const PetscScalar coefficientsAux[], PetscScalar integral[]) { - const PetscInt debug = 0; + const PetscInt debug = ds->printIntegrate; PetscFE fe; PetscPointFunc obj_func; PetscQuadrature quad; @@ -250,7 +250,7 @@ PETSC_INTERN PetscErrorCode PetscFEIntegrate_Basic(PetscDS ds, PetscInt field, P PETSC_INTERN PetscErrorCode PetscFEIntegrateBd_Basic(PetscDS ds, PetscInt field, PetscBdPointFunc obj_func, PetscInt Ne, PetscFEGeom *fgeom, const PetscScalar coefficients[], PetscDS dsAux, const PetscScalar coefficientsAux[], PetscScalar integral[]) { - const PetscInt debug = 0; + const PetscInt debug = ds->printIntegrate; PetscFE fe; PetscQuadrature quad; PetscTabulation *Tf, *TfAux = NULL; @@ -290,6 +290,7 @@ PETSC_INTERN PetscErrorCode PetscFEIntegrateBd_Basic(PetscDS ds, PetscInt field, } PetscCall(PetscQuadratureGetData(quad, NULL, &qNc, &Nq, &quadPoints, &quadWeights)); PetscCheck(qNc == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supports scalar quadrature, not %" PetscInt_FMT " components", qNc); + if (debug > 1) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Field: %" PetscInt_FMT " Nface: %" PetscInt_FMT " Nq: %" PetscInt_FMT "\n", field, Ne, Nq)); Np = fgeom->numPoints; dE = fgeom->dimEmbed; isAffine = fgeom->isAffine; @@ -299,6 +300,7 @@ PETSC_INTERN PetscErrorCode PetscFEIntegrateBd_Basic(PetscDS ds, PetscInt field, fegeom.n = NULL; fegeom.v = NULL; fegeom.J = NULL; + fegeom.invJ = NULL; fegeom.detJ = NULL; fegeom.dim = fgeom->dim; fegeom.dimEmbed = fgeom->dimEmbed; @@ -317,7 +319,7 @@ PETSC_INTERN PetscErrorCode PetscFEIntegrateBd_Basic(PetscDS ds, PetscInt field, cgeom.detJ = &fgeom->suppDetJ[0][e * Np]; } for (q = 0; q < Nq; ++q) { - PetscScalar integrand; + PetscScalar integrand = 0.; PetscReal w; if (isAffine) { @@ -341,12 +343,34 @@ PETSC_INTERN PetscErrorCode PetscFEIntegrateBd_Basic(PetscDS ds, PetscInt field, #endif } if (debug > 1) PetscCall(PetscPrintf(PETSC_COMM_SELF, " quad point %" PetscInt_FMT "\n", q)); + if (debug > 3) { + PetscCall(PetscPrintf(PETSC_COMM_SELF, " x_q (")); + for (PetscInt d = 0; d < dE; ++d) { + if (d) PetscCall(PetscPrintf(PETSC_COMM_SELF, ", ")); + PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g", (double)fegeom.v[d])); + } + PetscCall(PetscPrintf(PETSC_COMM_SELF, ")\n")); + PetscCall(PetscPrintf(PETSC_COMM_SELF, " n_q (")); + for (PetscInt d = 0; d < dE; ++d) { + if (d) PetscCall(PetscPrintf(PETSC_COMM_SELF, ", ")); + PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g", (double)fegeom.n[d])); + } + PetscCall(PetscPrintf(PETSC_COMM_SELF, ")\n")); + for (PetscInt f = 0; f < Nf; ++f) { + PetscCall(PetscPrintf(PETSC_COMM_SELF, " u_%" PetscInt_FMT " (", f)); + for (PetscInt c = 0; c < uOff[f + 1] - uOff[f]; ++c) { + if (c) PetscCall(PetscPrintf(PETSC_COMM_SELF, ", ")); + PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g", (double)PetscRealPart(u[uOff[f] + c]))); + } + PetscCall(PetscPrintf(PETSC_COMM_SELF, ")\n")); + } + } PetscCall(PetscFEEvaluateFieldJets_Internal(ds, Nf, face, q, Tf, &cgeom, &coefficients[cOffset], NULL, u, u_x, NULL)); if (dsAux) PetscCall(PetscFEEvaluateFieldJets_Internal(dsAux, NfAux, face, q, TfAux, &cgeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL)); obj_func(dim, Nf, NfAux, uOff, uOff_x, u, NULL, u_x, aOff, aOff_x, a, NULL, a_x, 0.0, fegeom.v, fegeom.n, numConstants, constants, &integrand); integrand *= w; integral[e * Nf + field] += integrand; - if (debug > 1) PetscCall(PetscPrintf(PETSC_COMM_SELF, " int: %g %g\n", (double)PetscRealPart(integrand), (double)PetscRealPart(integral[e * Nf + field]))); + if (debug > 1) PetscCall(PetscPrintf(PETSC_COMM_SELF, " int: %g tot: %g\n", (double)PetscRealPart(integrand), (double)PetscRealPart(integral[e * Nf + field]))); } cOffset += totDim; cOffsetAux += totDimAux; @@ -356,7 +380,7 @@ PETSC_INTERN PetscErrorCode PetscFEIntegrateBd_Basic(PetscDS ds, PetscInt field, PetscErrorCode PetscFEIntegrateResidual_Basic(PetscDS ds, PetscFormKey key, PetscInt Ne, PetscFEGeom *cgeom, const PetscScalar coefficients[], const PetscScalar coefficients_t[], PetscDS dsAux, const PetscScalar coefficientsAux[], PetscReal t, PetscScalar elemVec[]) { - const PetscInt debug = 0; + const PetscInt debug = ds->printIntegrate; const PetscInt field = key.field; PetscFE fe; PetscWeakForm wf; @@ -460,7 +484,7 @@ PetscErrorCode PetscFEIntegrateResidual_Basic(PetscDS ds, PetscFormKey key, Pets PetscErrorCode PetscFEIntegrateBdResidual_Basic(PetscDS ds, PetscWeakForm wf, PetscFormKey key, PetscInt Ne, PetscFEGeom *fgeom, const PetscScalar coefficients[], const PetscScalar coefficients_t[], PetscDS dsAux, const PetscScalar coefficientsAux[], PetscReal t, PetscScalar elemVec[]) { - const PetscInt debug = 0; + const PetscInt debug = ds->printIntegrate; const PetscInt field = key.field; PetscFE fe; PetscInt n0, n1, i; @@ -570,7 +594,7 @@ PetscErrorCode PetscFEIntegrateBdResidual_Basic(PetscDS ds, PetscWeakForm wf, Pe */ PETSC_INTERN PetscErrorCode PetscFEIntegrateHybridResidual_Basic(PetscDS ds, PetscDS dsIn, PetscFormKey key, PetscInt s, PetscInt Ne, PetscFEGeom *fgeom, const PetscScalar coefficients[], const PetscScalar coefficients_t[], PetscDS dsAux, const PetscScalar coefficientsAux[], PetscReal t, PetscScalar elemVec[]) { - const PetscInt debug = 0; + const PetscInt debug = ds->printIntegrate; const PetscInt field = key.field; PetscFE fe; PetscWeakForm wf; @@ -681,7 +705,7 @@ PETSC_INTERN PetscErrorCode PetscFEIntegrateHybridResidual_Basic(PetscDS ds, Pet PetscErrorCode PetscFEIntegrateJacobian_Basic(PetscDS ds, PetscFEJacobianType jtype, PetscFormKey key, PetscInt Ne, PetscFEGeom *cgeom, const PetscScalar coefficients[], const PetscScalar coefficients_t[], PetscDS dsAux, const PetscScalar coefficientsAux[], PetscReal t, PetscReal u_tshift, PetscScalar elemMat[]) { - const PetscInt debug = 0; + const PetscInt debug = ds->printIntegrate; PetscFE feI, feJ; PetscWeakForm wf; PetscPointJac *g0_func, *g1_func, *g2_func, *g3_func; @@ -833,7 +857,7 @@ PetscErrorCode PetscFEIntegrateJacobian_Basic(PetscDS ds, PetscFEJacobianType jt PETSC_INTERN PetscErrorCode PetscFEIntegrateBdJacobian_Basic(PetscDS ds, PetscWeakForm wf, PetscFEJacobianType jtype, PetscFormKey key, PetscInt Ne, PetscFEGeom *fgeom, const PetscScalar coefficients[], const PetscScalar coefficients_t[], PetscDS dsAux, const PetscScalar coefficientsAux[], PetscReal t, PetscReal u_tshift, PetscScalar elemMat[]) { - const PetscInt debug = 0; + const PetscInt debug = ds->printIntegrate; PetscFE feI, feJ; PetscBdPointJac *g0_func, *g1_func, *g2_func, *g3_func; PetscInt n0, n1, n2, n3, i; @@ -995,7 +1019,7 @@ PETSC_INTERN PetscErrorCode PetscFEIntegrateBdJacobian_Basic(PetscDS ds, PetscWe PETSC_INTERN PetscErrorCode PetscFEIntegrateHybridJacobian_Basic(PetscDS ds, PetscDS dsIn, PetscFEJacobianType jtype, PetscFormKey key, PetscInt s, PetscInt Ne, PetscFEGeom *fgeom, const PetscScalar coefficients[], const PetscScalar coefficients_t[], PetscDS dsAux, const PetscScalar coefficientsAux[], PetscReal t, PetscReal u_tshift, PetscScalar elemMat[]) { - const PetscInt debug = 0; + const PetscInt debug = ds->printIntegrate; PetscFE feI, feJ; PetscWeakForm wf; PetscBdPointJac *g0_func, *g1_func, *g2_func, *g3_func; diff --git a/src/dm/dt/fe/impls/composite/fecomposite.c b/src/dm/dt/fe/impls/composite/fecomposite.c index 7cc64e72d77..fc92e0c42a0 100644 --- a/src/dm/dt/fe/impls/composite/fecomposite.c +++ b/src/dm/dt/fe/impls/composite/fecomposite.c @@ -242,12 +242,15 @@ PETSC_EXTERN PetscErrorCode PetscFECreate_Composite(PetscFE fem) Output Parameters: + numSubelements - The number of sub elements -. v0 - The affine transformation for each element -. jac - The Jacobian for each element -- invjac - The inverse of the Jacobian +. v0 - The affine transformation for each element, an array of length $dim * Nc$. Pass `NULL` to ignore. +. jac - The Jacobian for each element, an array of length $dim^2 * Nc$. Pass `NULL` to ignore. +- invjac - The inverse of the Jacobian, an array of length $dim^2 * Nc$. Pass `NULL` to ignore. Level: intermediate + Note: + Do not free the output arrays. + .seealso: `PetscFE`, `PetscFECreate()` @*/ PetscErrorCode PetscFECompositeGetMapping(PetscFE fem, PetscInt *numSubelements, const PetscReal *v0[], const PetscReal *jac[], const PetscReal *invjac[]) diff --git a/src/dm/dt/fe/interface/fe.c b/src/dm/dt/fe/interface/fe.c index 765eb04d301..dd3d890ee9a 100644 --- a/src/dm/dt/fe/interface/fe.c +++ b/src/dm/dt/fe/interface/fe.c @@ -58,7 +58,7 @@ PetscBool PetscFERegisterAllCalled = PETSC_FALSE; /*@C PetscFERegister - Adds a new `PetscFEType` - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine @@ -93,7 +93,7 @@ PetscErrorCode PetscFERegister(const char sname[], PetscErrorCode (*function)(Pe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFESetType - Builds a particular `PetscFE` Collective @@ -131,7 +131,7 @@ PetscErrorCode PetscFESetType(PetscFE fem, PetscFEType name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEGetType - Gets the `PetscFEType` (as a string) from the `PetscFE` object. Not Collective @@ -156,7 +156,7 @@ PetscErrorCode PetscFEGetType(PetscFE fem, PetscFEType *name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEViewFromOptions - View from a `PetscFE` based on values in the options database Collective @@ -178,7 +178,7 @@ PetscErrorCode PetscFEViewFromOptions(PetscFE A, PetscObject obj, const char nam PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEView - Views a `PetscFE` Collective @@ -253,7 +253,7 @@ PetscErrorCode PetscFESetFromOptions(PetscFE fem) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFESetUp - Construct data structures for the `PetscFE` after the `PetscFEType` has been set Collective @@ -348,7 +348,6 @@ PetscErrorCode PetscFECreate(MPI_Comm comm, PetscFE *fem) PetscFunctionBegin; PetscAssertPointer(fem, 2); PetscCall(PetscCitationsRegister(FECitation, &FEcite)); - *fem = NULL; PetscCall(PetscFEInitializePackage()); PetscCall(PetscHeaderCreate(f, PETSCFE_CLASSID, "PetscFE", "Finite Element", "PetscFE", comm, PetscFEDestroy, PetscFEView)); @@ -751,13 +750,13 @@ PetscErrorCode PetscFECopyQuadrature(PetscFE sfe, PetscFE tfe) . fem - The `PetscFE` object Output Parameter: -. numDof - Array with the number of dofs per dimension +. numDof - Array of length `dim` with the number of dofs in each dimension Level: intermediate .seealso: `PetscFE`, `PetscSpace`, `PetscDualSpace`, `PetscFECreate()` @*/ -PetscErrorCode PetscFEGetNumDof(PetscFE fem, const PetscInt **numDof) +PetscErrorCode PetscFEGetNumDof(PetscFE fem, const PetscInt *numDof[]) { PetscFunctionBegin; PetscValidHeaderSpecific(fem, PETSCFE_CLASSID, 1); @@ -1029,7 +1028,7 @@ PetscErrorCode PetscFEComputeTabulation(PetscFE fem, PetscInt npoints, const Pet PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscTabulationDestroy - Frees memory from the associated tabulation. Not Collective @@ -1160,7 +1159,7 @@ PetscErrorCode PetscFEGetDimension(PetscFE fem, PetscInt *dim) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEPushforward - Map the reference element function to real space Input Parameters: @@ -1188,7 +1187,7 @@ PetscErrorCode PetscFEPushforward(PetscFE fe, PetscFEGeom *fegeom, PetscInt Nv, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEPushforwardGradient - Map the reference element function gradient to real space Input Parameters: @@ -1216,7 +1215,7 @@ PetscErrorCode PetscFEPushforwardGradient(PetscFE fe, PetscFEGeom *fegeom, Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEPushforwardHessian - Map the reference element function Hessian to real space Input Parameters: @@ -1335,7 +1334,7 @@ PETSC_EXTERN PetscErrorCode IntegrateElementBatchGPU(PetscInt spatial_dim, Petsc __kernel void integrateElementQuadrature(int N_cb, __global float *coefficients, __global float *jacobianInverses, __global float *jacobianDeterminants, __global float *elemVec) */ -/*@C +/*@ PetscFEIntegrate - Produce the integral for the given field for a chunk of elements by quadrature integration Not Collective @@ -1406,7 +1405,7 @@ PetscErrorCode PetscFEIntegrateBd(PetscDS prob, PetscInt field, void (*obj_func) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEIntegrateResidual - Produce the element residual vector for a chunk of elements by quadrature integration Not Collective @@ -1450,7 +1449,7 @@ PetscErrorCode PetscFEIntegrateResidual(PetscDS ds, PetscFormKey key, PetscInt N PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEIntegrateBdResidual - Produce the element residual vector for a chunk of elements by quadrature integration over a boundary Not Collective @@ -1485,7 +1484,7 @@ PetscErrorCode PetscFEIntegrateBdResidual(PetscDS ds, PetscWeakForm wf, PetscFor PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEIntegrateHybridResidual - Produce the element residual vector for a chunk of hybrid element faces by quadrature integration Not Collective @@ -1522,7 +1521,7 @@ PetscErrorCode PetscFEIntegrateHybridResidual(PetscDS ds, PetscDS dsIn, PetscFor PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEIntegrateJacobian - Produce the element Jacobian for a chunk of elements by quadrature integration Not Collective @@ -1572,7 +1571,7 @@ PetscErrorCode PetscFEIntegrateJacobian(PetscDS ds, PetscFEJacobianType jtype, P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEIntegrateBdJacobian - Produce the boundary element Jacobian for a chunk of elements by quadrature integration Not Collective @@ -1623,7 +1622,7 @@ PetscErrorCode PetscFEIntegrateBdJacobian(PetscDS ds, PetscWeakForm wf, PetscFEJ PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFEIntegrateHybridJacobian - Produce the boundary element Jacobian for a chunk of hybrid elements by quadrature integration Not Collective @@ -2053,7 +2052,7 @@ static PetscErrorCode PetscFECreate_Internal(MPI_Comm comm, PetscInt dim, PetscI PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFECreateDefault - Create a `PetscFE` for basic FEM computation Collective @@ -2083,7 +2082,7 @@ PetscErrorCode PetscFECreateDefault(MPI_Comm comm, PetscInt dim, PetscInt Nc, Pe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFECreateByCell - Create a `PetscFE` for basic FEM computation Collective @@ -2173,7 +2172,7 @@ PetscErrorCode PetscFECreateLagrangeByCell(MPI_Comm comm, PetscInt dim, PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFESetName - Names the `PetscFE` and its subobjects Not Collective diff --git a/src/dm/dt/fe/interface/fegeom.c b/src/dm/dt/fe/interface/fegeom.c index 1153bf8dd32..ded08353f6b 100644 --- a/src/dm/dt/fe/interface/fegeom.c +++ b/src/dm/dt/fe/interface/fegeom.c @@ -10,7 +10,7 @@ - faceData - Flag to construct geometry data for the faces Output Parameter: -. geom - The `PetscFEGeom` object +. geom - The `PetscFEGeom` object, which is a struct not a `PetscObject` Level: beginner @@ -73,7 +73,7 @@ PetscErrorCode PetscFEGeomDestroy(PetscFEGeom **geom) - cEnd - The first cell not in the chunk Output Parameter: -. chunkGeom - The chunk of cells +. chunkGeom - an array of cells of length `cEnd` - `cStart` Level: intermediate @@ -82,7 +82,7 @@ PetscErrorCode PetscFEGeomDestroy(PetscFEGeom **geom) .seealso: `PetscFEGeom`, `PetscFEGeomRestoreChunk()`, `PetscFEGeomCreate()` @*/ -PetscErrorCode PetscFEGeomGetChunk(PetscFEGeom *geom, PetscInt cStart, PetscInt cEnd, PetscFEGeom **chunkGeom) +PetscErrorCode PetscFEGeomGetChunk(PetscFEGeom *geom, PetscInt cStart, PetscInt cEnd, PetscFEGeom *chunkGeom[]) { PetscInt Nq; PetscInt dE; @@ -135,24 +135,24 @@ PetscErrorCode PetscFEGeomRestoreChunk(PetscFEGeom *geom, PetscInt cStart, Petsc } /*@C - PetscFEGeomGetPoint - Get the geometry for cell c at point p as a `PetscFEGeom` + PetscFEGeomGetPoint - Get the geometry for cell `c` at point `p` as a `PetscFEGeom` Input Parameters: + geom - `PetscFEGeom` object . c - The cell . p - The point -- pcoords - The reference coordinates of point p, or NULL +- pcoords - The reference coordinates of point `p`, or `NULL` Output Parameter: -. pgeom - The geometry of cell c at point p +. pgeom - The geometry of cell `c` at point `p` Level: intermediate Notes: - For affine geometries, this only copies to pgeom at point 0. Since we copy pointers into pgeom, + For affine geometries, this only copies to `pgeom` at point 0. Since we copy pointers into `pgeom`, nothing needs to be done with it afterwards. - In the affine case, pgeom must have storage for the integration point coordinates in pgeom->v if pcoords is passed in. + In the affine case, `pgeom` must have storage for the integration point coordinates in pgeom->v if `pcoords` is passed in. .seealso: `PetscFEGeom`, `PetscFEGeomRestoreChunk()`, `PetscFEGeomCreate()` @*/ @@ -186,7 +186,7 @@ PetscErrorCode PetscFEGeomGetPoint(PetscFEGeom *geom, PetscInt c, PetscInt p, co } /*@C - PetscFEGeomGetCellPoint - Get the cell geometry for face f at point p as a `PetscFEGeom` + PetscFEGeomGetCellPoint - Get the cell geometry for face `c` at point `p` as a `PetscFEGeom` Input Parameters: + geom - `PetscFEGeom` object @@ -194,12 +194,12 @@ PetscErrorCode PetscFEGeomGetPoint(PetscFEGeom *geom, PetscInt c, PetscInt p, co - p - The point Output Parameter: -. pgeom - The cell geometry of face f at point p +. pgeom - The cell geometry of face `c` at point `p` Level: intermediate Note: - For affine geometries, this only copies to pgeom at point 0. Since we copy pointers into pgeom, + For affine geometries, this only copies to pgeom at point 0. Since we copy pointers into `pgeom`, nothing needs to be done with it afterwards. .seealso: `PetscFEGeom()`, `PetscFEGeomRestoreChunk()`, `PetscFEGeomCreate()` @@ -241,8 +241,8 @@ PetscErrorCode PetscFEGeomGetCellPoint(PetscFEGeom *geom, PetscInt c, PetscInt p PetscFunctionReturn(PETSC_SUCCESS); } -/*@ - PetscFEGeomComplete - Calculate derived quantities from base geometry specification +/*@C + PetscFEGeomComplete - Calculate derived quantities from a base geometry specification Input Parameter: . geom - `PetscFEGeom` object diff --git a/src/dm/dt/fe/interface/ftn-custom/zfef.c b/src/dm/dt/fe/interface/ftn-custom/zfef.c deleted file mode 100644 index fc0e2b8ff3c..00000000000 --- a/src/dm/dt/fe/interface/ftn-custom/zfef.c +++ /dev/null @@ -1,46 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscspaceviewfromoptions_ PETSCSPACEVIEWFROMOPTIONS - #define petscdualspaceviewfromoptions_ PETSCDUALSPACEVIEWFROMOPTIONS - #define petscfeviewfromoptions_ PETSCFEVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscspaceviewfromoptions_ petscspaceviewfromoptions - #define petscdualspaceviewfromoptions_ petscdualspaceviewfromoptions - #define petscfeviewfromoptions_ petscfeviewfromoptions -#endif - -PETSC_EXTERN void petscspaceviewfromoptions_(PetscSpace *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscSpaceViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void petscdualspaceviewfromoptions_(PetscDualSpace *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscDualSpaceViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void petscfeviewfromoptions_(PetscFE *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscFEViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/dm/dt/fv/interface/ftn-custom/makefile b/src/dm/dt/fv/interface/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/dm/dt/fv/interface/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/dm/dt/fv/interface/ftn-custom/zfvf.c b/src/dm/dt/fv/interface/ftn-custom/zfvf.c deleted file mode 100644 index 11937e4a716..00000000000 --- a/src/dm/dt/fv/interface/ftn-custom/zfvf.c +++ /dev/null @@ -1,78 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscfvsetcomponentname_ PETSCFVSETCOMPONENTNAME - #define petscfvview_ PETSCFVVIEW - #define petscfvsettype_ PETSCFVSETTYPE - #define petscfvviewfromoptions_ PETSCFVVIEWFROMOPTIONS - #define petsclimiterviewfromoptions_ PETSCLIMITERVIEWFROMOPTIONS - #define petsclimitersettype_ PETSCLIMITERSETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscfvsetcomponentname_ petscfvsetcomponentname - #define petscfvview_ petscfvview - #define petscfvsettype_ petscfvsettype - #define petscfvviewfromoptions_ petscfvviewfromoptions - #define petsclimiterviewfromoptions_ petsclimiterviewfromoptions - #define petsclimitersettype_ petsclimitersettype -#endif - -PETSC_EXTERN void petscfvsetcomponentname_(PetscFV *fvm, PetscInt *comp, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *compname; - - FIXCHAR(name, len, compname); - *ierr = PetscFVSetComponentName(*fvm, *comp, compname); - if (*ierr) return; - FREECHAR(name, compname); -} - -PETSC_EXTERN void petscfvview_(PetscFV *fvm, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscFVView(*fvm, v); -} - -PETSC_EXTERN void petscfvsettype_(PetscFV *fvm, char *type_name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type_name, len, t); - *ierr = PetscFVSetType(*fvm, t); - if (*ierr) return; - FREECHAR(type_name, t); -} - -PETSC_EXTERN void petscfvviewfromoptions_(PetscFV *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscFVViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void petsclimiterviewfromoptions_(PetscLimiter *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscLimiterViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void petsclimitersettype_(PetscLimiter *lim, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T namelen) -{ - char *newname; - - FIXCHAR(name, namelen, newname); - *ierr = PetscLimiterSetType(*lim, newname); - if (*ierr) return; - FREECHAR(name, newname); -} diff --git a/src/dm/dt/fv/interface/fv.c b/src/dm/dt/fv/interface/fv.c index ee6e2ce087c..c3bec6e0e9d 100644 --- a/src/dm/dt/fv/interface/fv.c +++ b/src/dm/dt/fv/interface/fv.c @@ -19,7 +19,7 @@ const char LimiterCitation[] = "@article{BergerAftosmisMurman2005,\n" /*@C PetscLimiterRegister - Adds a new `PetscLimiter` implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine @@ -54,7 +54,7 @@ PetscErrorCode PetscLimiterRegister(const char sname[], PetscErrorCode (*functio PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLimiterSetType - Builds a `PetscLimiter` for a given `PetscLimiterType` Collective @@ -92,7 +92,7 @@ PetscErrorCode PetscLimiterSetType(PetscLimiter lim, PetscLimiterType name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLimiterGetType - Gets the `PetscLimiterType` name (as a string) from the `PetscLimiter`. Not Collective @@ -117,7 +117,7 @@ PetscErrorCode PetscLimiterGetType(PetscLimiter lim, PetscLimiterType *name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLimiterViewFromOptions - View a `PetscLimiter` based on values in the options database Collective @@ -139,7 +139,7 @@ PetscErrorCode PetscLimiterViewFromOptions(PetscLimiter A, PetscObject obj, cons PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLimiterView - Views a `PetscLimiter` Collective @@ -171,7 +171,7 @@ PetscErrorCode PetscLimiterView(PetscLimiter lim, PetscViewer v) Level: intermediate -.seealso: `PetscLimiter`, ``PetscLimiterView()` +.seealso: `PetscLimiter`, `PetscLimiterView()` @*/ PetscErrorCode PetscLimiterSetFromOptions(PetscLimiter lim) { @@ -200,7 +200,7 @@ PetscErrorCode PetscLimiterSetFromOptions(PetscLimiter lim) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLimiterSetUp - Construct data structures for the `PetscLimiter` Collective @@ -210,7 +210,7 @@ PetscErrorCode PetscLimiterSetFromOptions(PetscLimiter lim) Level: intermediate -.seealso: `PetscLimiter`, ``PetscLimiterView()`, `PetscLimiterDestroy()` +.seealso: `PetscLimiter`, `PetscLimiterView()`, `PetscLimiterDestroy()` @*/ PetscErrorCode PetscLimiterSetUp(PetscLimiter lim) { @@ -271,7 +271,6 @@ PetscErrorCode PetscLimiterCreate(MPI_Comm comm, PetscLimiter *lim) PetscFunctionBegin; PetscAssertPointer(lim, 2); PetscCall(PetscCitationsRegister(LimiterCitation, &Limitercite)); - *lim = NULL; PetscCall(PetscFVInitializePackage()); PetscCall(PetscHeaderCreate(l, PETSCLIMITER_CLASSID, "PetscLimiter", "Finite Volume Slope Limiter", "PetscLimiter", comm, PetscLimiterDestroy, PetscLimiterView)); @@ -893,7 +892,7 @@ PetscBool PetscFVRegisterAllCalled = PETSC_FALSE; /*@C PetscFVRegister - Adds a new `PetscFV` implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine @@ -928,7 +927,7 @@ PetscErrorCode PetscFVRegister(const char sname[], PetscErrorCode (*function)(Pe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFVSetType - Builds a particular `PetscFV` Collective @@ -966,7 +965,7 @@ PetscErrorCode PetscFVSetType(PetscFV fvm, PetscFVType name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFVGetType - Gets the `PetscFVType` (as a string) from a `PetscFV`. Not Collective @@ -991,7 +990,7 @@ PetscErrorCode PetscFVGetType(PetscFV fvm, PetscFVType *name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFVViewFromOptions - View a `PetscFV` based on values in the options database Collective @@ -1013,7 +1012,7 @@ PetscErrorCode PetscFVViewFromOptions(PetscFV A, PetscObject obj, const char nam PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFVView - Views a `PetscFV` Collective @@ -1160,12 +1159,10 @@ PetscErrorCode PetscFVCreate(MPI_Comm comm, PetscFV *fvm) PetscFunctionBegin; PetscAssertPointer(fvm, 2); - *fvm = NULL; PetscCall(PetscFVInitializePackage()); PetscCall(PetscHeaderCreate(f, PETSCFV_CLASSID, "PetscFV", "Finite Volume", "PetscFV", comm, PetscFVDestroy, PetscFVView)); PetscCall(PetscMemzero(f->ops, sizeof(struct _PetscFVOps))); - PetscCall(PetscLimiterCreate(comm, &f->limiter)); f->numComponents = 1; f->dim = 0; @@ -1279,7 +1276,7 @@ PetscErrorCode PetscFVGetNumComponents(PetscFV fvm, PetscInt *comp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFVSetComponentName - Set the name of a component (used in output and viewing) in a `PetscFV` Logically Collective @@ -1301,7 +1298,7 @@ PetscErrorCode PetscFVSetComponentName(PetscFV fvm, PetscInt comp, const char *n PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFVGetComponentName - Get the name of a component (used in output and viewing) in a `PetscFV` Logically Collective @@ -1317,7 +1314,7 @@ PetscErrorCode PetscFVSetComponentName(PetscFV fvm, PetscInt comp, const char *n .seealso: `PetscFV`, `PetscFVSetComponentName()` @*/ -PetscErrorCode PetscFVGetComponentName(PetscFV fvm, PetscInt comp, const char **name) +PetscErrorCode PetscFVGetComponentName(PetscFV fvm, PetscInt comp, const char *name[]) { PetscFunctionBegin; *name = fvm->componentNames[comp]; @@ -1335,7 +1332,7 @@ PetscErrorCode PetscFVGetComponentName(PetscFV fvm, PetscInt comp, const char ** Level: intermediate -.seealso: `PetscFV`, ``PetscFVGetSpatialDimension()` +.seealso: `PetscFV`, `PetscFVGetSpatialDimension()` @*/ PetscErrorCode PetscFVSetSpatialDimension(PetscFV fvm, PetscInt dim) { @@ -1476,7 +1473,7 @@ PetscErrorCode PetscFVGetQuadrature(PetscFV fvm, PetscQuadrature *q) Not Collective - Input Parameter: + Input Parameters: + fvm - The `PetscFV` object - ct - The `DMPolytopeType` for the cell @@ -1685,7 +1682,7 @@ PetscErrorCode PetscFVCreateTabulation(PetscFV fvm, PetscInt nrepl, PetscInt npo PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscFVComputeGradient - Compute the gradient reconstruction matrix for a given cell Input Parameters: @@ -1740,7 +1737,7 @@ PetscErrorCode PetscFVIntegrateRHSFunction(PetscFV fvm, PetscDS prob, PetscInt f } /*@ - PetscFVClone - Create a shallow copy of a `PetscFV` object that jsut references the internal objects. + PetscFVClone - Create a shallow copy of a `PetscFV` object that just references the internal objects. Input Parameter: . fv - The initial `PetscFV` diff --git a/src/dm/dt/interface/dt.c b/src/dm/dt/interface/dt.c index 82a34254298..33d58adc9c4 100644 --- a/src/dm/dt/interface/dt.c +++ b/src/dm/dt/interface/dt.c @@ -609,7 +609,7 @@ static PetscErrorCode PetscQuadratureView_Ascii(PetscQuadrature quad, PetscViewe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscQuadratureView - View a `PetscQuadrature` object Collective @@ -1852,7 +1852,7 @@ PetscErrorCode PetscDTGaussQuadrature(PetscInt npoints, PetscReal a, PetscReal b PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDTGaussLobattoLegendreQuadrature - creates a set of the locations and weights of the Gauss-Lobatto-Legendre nodes of a given size on the domain $[-1,1]$ @@ -1863,8 +1863,8 @@ PetscErrorCode PetscDTGaussQuadrature(PetscInt npoints, PetscReal a, PetscReal b - type - `PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA` or `PETSCGAUSSLOBATTOLEGENDRE_VIA_NEWTON` Output Parameters: -+ x - quadrature points -- w - quadrature weights ++ x - quadrature points, pass in an array of length `npoints` +- w - quadrature weights, pass in an array of length `npoints` Level: intermediate @@ -1879,7 +1879,7 @@ PetscErrorCode PetscDTGaussQuadrature(PetscInt npoints, PetscReal a, PetscReal b .seealso: `PetscDTGaussQuadrature()`, `PetscGaussLobattoLegendreCreateType` @*/ -PetscErrorCode PetscDTGaussLobattoLegendreQuadrature(PetscInt npoints, PetscGaussLobattoLegendreCreateType type, PetscReal *x, PetscReal *w) +PetscErrorCode PetscDTGaussLobattoLegendreQuadrature(PetscInt npoints, PetscGaussLobattoLegendreCreateType type, PetscReal x[], PetscReal w[]) { PetscBool newton; @@ -2591,6 +2591,7 @@ PetscErrorCode PetscDTTensorQuadratureCreate(PetscQuadrature q1, PetscQuadrature PetscValidHeaderSpecific(q1, PETSCQUADRATURE_CLASSID, 1); PetscValidHeaderSpecific(q2, PETSCQUADRATURE_CLASSID, 2); PetscAssertPointer(q, 3); + PetscCall(PetscQuadratureGetOrder(q1, &order1)); PetscCall(PetscQuadratureGetOrder(q2, &order2)); PetscCheck(order1 == order2, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Order1 %" PetscInt_FMT " != %" PetscInt_FMT " Order2", order1, order2); @@ -2846,7 +2847,7 @@ static PetscErrorCode PetscDTLegendreIntegrate(PetscInt ninterval, const PetscRe .seealso: `PetscDTLegendreEval()` @*/ -PetscErrorCode PetscDTReconstructPoly(PetscInt degree, PetscInt nsource, const PetscReal *sourcex, PetscInt ntarget, const PetscReal *targetx, PetscReal *R) +PetscErrorCode PetscDTReconstructPoly(PetscInt degree, PetscInt nsource, const PetscReal sourcex[], PetscInt ntarget, const PetscReal targetx[], PetscReal R[]) { PetscInt i, j, k, *bdegrees, worksize; PetscReal xmin, xmax, center, hscale, *sourcey, *targety, *Bsource, *Bsinv, *Btarget; @@ -2889,7 +2890,7 @@ PetscErrorCode PetscDTReconstructPoly(PetscInt degree, PetscInt nsource, const P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscGaussLobattoLegendreIntegrate - Compute the L2 integral of a function on the GLL points Not Collective @@ -2907,7 +2908,7 @@ PetscErrorCode PetscDTReconstructPoly(PetscInt degree, PetscInt nsource, const P .seealso: `PetscDTGaussLobattoLegendreQuadrature()` @*/ -PetscErrorCode PetscGaussLobattoLegendreIntegrate(PetscInt n, PetscReal *nodes, PetscReal *weights, const PetscReal *f, PetscReal *in) +PetscErrorCode PetscGaussLobattoLegendreIntegrate(PetscInt n, PetscReal nodes[], PetscReal weights[], const PetscReal f[], PetscReal *in) { PetscInt i; @@ -2924,11 +2925,11 @@ PetscErrorCode PetscGaussLobattoLegendreIntegrate(PetscInt n, PetscReal *nodes, Input Parameters: + n - the number of GLL nodes -. nodes - the GLL nodes -- weights - the GLL weights +. nodes - the GLL nodes, of length `n` +- weights - the GLL weights, of length `n` Output Parameter: -. AA - the stiffness element +. AA - the stiffness element, of size `n` by `n` Level: beginner @@ -2939,7 +2940,7 @@ PetscErrorCode PetscGaussLobattoLegendreIntegrate(PetscInt n, PetscReal *nodes, .seealso: `PetscDTGaussLobattoLegendreQuadrature()`, `PetscGaussLobattoLegendreElementLaplacianDestroy()` @*/ -PetscErrorCode PetscGaussLobattoLegendreElementLaplacianCreate(PetscInt n, PetscReal *nodes, PetscReal *weights, PetscReal ***AA) +PetscErrorCode PetscGaussLobattoLegendreElementLaplacianCreate(PetscInt n, PetscReal nodes[], PetscReal weights[], PetscReal ***AA) { PetscReal **A; const PetscReal *gllnodes = nodes; @@ -3019,15 +3020,15 @@ PetscErrorCode PetscGaussLobattoLegendreElementLaplacianCreate(PetscInt n, Petsc Input Parameters: + n - the number of GLL nodes -. nodes - the GLL nodes -. weights - the GLL weightss -- AA - the stiffness element +. nodes - the GLL nodes, ignored +. weights - the GLL weightss, ignored +- AA - the stiffness element from `PetscGaussLobattoLegendreElementLaplacianCreate()` Level: beginner .seealso: `PetscDTGaussLobattoLegendreQuadrature()`, `PetscGaussLobattoLegendreElementLaplacianCreate()` @*/ -PetscErrorCode PetscGaussLobattoLegendreElementLaplacianDestroy(PetscInt n, PetscReal *nodes, PetscReal *weights, PetscReal ***AA) +PetscErrorCode PetscGaussLobattoLegendreElementLaplacianDestroy(PetscInt n, PetscReal nodes[], PetscReal weights[], PetscReal ***AA) { PetscFunctionBegin; PetscCall(PetscFree((*AA)[0])); @@ -3043,12 +3044,12 @@ PetscErrorCode PetscGaussLobattoLegendreElementLaplacianDestroy(PetscInt n, Pets Input Parameters: + n - the number of GLL nodes -. nodes - the GLL nodes -- weights - the GLL weights +. nodes - the GLL nodes, of length `n` +- weights - the GLL weights, of length `n` Output Parameters: -+ AA - the stiffness element -- AAT - the transpose of AA (pass in `NULL` if you do not need this array) ++ AA - the stiffness element, of dimension `n` by `n` +- AAT - the transpose of AA (pass in `NULL` if you do not need this array), of dimension `n` by `n` Level: beginner @@ -3059,7 +3060,7 @@ PetscErrorCode PetscGaussLobattoLegendreElementLaplacianDestroy(PetscInt n, Pets .seealso: `PetscDTGaussLobattoLegendreQuadrature()`, `PetscGaussLobattoLegendreElementLaplacianDestroy()`, `PetscGaussLobattoLegendreElementGradientDestroy()` @*/ -PetscErrorCode PetscGaussLobattoLegendreElementGradientCreate(PetscInt n, PetscReal *nodes, PetscReal *weights, PetscReal ***AA, PetscReal ***AAT) +PetscErrorCode PetscGaussLobattoLegendreElementGradientCreate(PetscInt n, PetscReal nodes[], PetscReal weights[], PetscReal ***AA, PetscReal ***AAT) { PetscReal **A, **AT = NULL; const PetscReal *gllnodes = nodes; @@ -3103,16 +3104,16 @@ PetscErrorCode PetscGaussLobattoLegendreElementGradientCreate(PetscInt n, PetscR Input Parameters: + n - the number of GLL nodes -. nodes - the GLL nodes -. weights - the GLL weights -. AA - the stiffness element -- AAT - the transpose of the element +. nodes - the GLL nodes, ignored +. weights - the GLL weights, ignored +. AA - the stiffness element obtained with `PetscGaussLobattoLegendreElementGradientCreate()` +- AAT - the transpose of the element obtained with `PetscGaussLobattoLegendreElementGradientCreate()` Level: beginner .seealso: `PetscDTGaussLobattoLegendreQuadrature()`, `PetscGaussLobattoLegendreElementLaplacianCreate()`, `PetscGaussLobattoLegendreElementAdvectionCreate()` @*/ -PetscErrorCode PetscGaussLobattoLegendreElementGradientDestroy(PetscInt n, PetscReal *nodes, PetscReal *weights, PetscReal ***AA, PetscReal ***AAT) +PetscErrorCode PetscGaussLobattoLegendreElementGradientDestroy(PetscInt n, PetscReal nodes[], PetscReal weights[], PetscReal ***AA, PetscReal ***AAT) { PetscFunctionBegin; PetscCall(PetscFree((*AA)[0])); @@ -3133,11 +3134,11 @@ PetscErrorCode PetscGaussLobattoLegendreElementGradientDestroy(PetscInt n, Petsc Input Parameters: + n - the number of GLL nodes -. nodes - the GLL nodes -- weights - the GLL weightss +. nodes - the GLL nodes, of length `n` +- weights - the GLL weights, of length `n` Output Parameter: -. AA - the stiffness element +. AA - the stiffness element, of dimension `n` by `n` Level: beginner @@ -3150,7 +3151,7 @@ PetscErrorCode PetscGaussLobattoLegendreElementGradientDestroy(PetscInt n, Petsc .seealso: `PetscDTGaussLobattoLegendreQuadrature()`, `PetscGaussLobattoLegendreElementLaplacianCreate()`, `PetscGaussLobattoLegendreElementAdvectionDestroy()` @*/ -PetscErrorCode PetscGaussLobattoLegendreElementAdvectionCreate(PetscInt n, PetscReal *nodes, PetscReal *weights, PetscReal ***AA) +PetscErrorCode PetscGaussLobattoLegendreElementAdvectionCreate(PetscInt n, PetscReal nodes[], PetscReal weights[], PetscReal ***AA) { PetscReal **D; const PetscReal *gllweights = weights; @@ -3173,15 +3174,15 @@ PetscErrorCode PetscGaussLobattoLegendreElementAdvectionCreate(PetscInt n, Petsc Input Parameters: + n - the number of GLL nodes -. nodes - the GLL nodes -. weights - the GLL weights -- AA - advection +. nodes - the GLL nodes, ignored +. weights - the GLL weights, ignored +- AA - advection obtained with `PetscGaussLobattoLegendreElementAdvectionCreate()` Level: beginner .seealso: `PetscDTGaussLobattoLegendreQuadrature()`, `PetscGaussLobattoLegendreElementAdvectionCreate()` @*/ -PetscErrorCode PetscGaussLobattoLegendreElementAdvectionDestroy(PetscInt n, PetscReal *nodes, PetscReal *weights, PetscReal ***AA) +PetscErrorCode PetscGaussLobattoLegendreElementAdvectionDestroy(PetscInt n, PetscReal nodes[], PetscReal weights[], PetscReal ***AA) { PetscFunctionBegin; PetscCall(PetscFree((*AA)[0])); @@ -3230,7 +3231,7 @@ PetscErrorCode PetscGaussLobattoLegendreElementMassDestroy(PetscInt n, PetscReal - index - the index to convert: should be >= 0 and < Binomial(len - 1 + sum, sum) Output Parameter: -. coord - will be filled with the barycentric coordinate +. coord - will be filled with the barycentric coordinate, of length `n` Level: beginner @@ -3284,7 +3285,7 @@ PetscErrorCode PetscDTIndexToBary(PetscInt len, PetscInt sum, PetscInt index, Pe Input Parameters: + len - the desired length of the barycentric tuple (usually 1 more than the dimension it represents, so a barycentric coordinate in a triangle has length 3) . sum - the value that the sum of the barycentric coordinates (which will be non-negative integers) should sum to -- coord - a barycentric coordinate with the given length and sum +- coord - a barycentric coordinate with the given length `len` and `sum` Output Parameter: . index - the unique index for the coordinate, >= 0 and < Binomial(len - 1 + sum, sum) diff --git a/src/dm/dt/interface/dtds.c b/src/dm/dt/interface/dtds.c index 5ffbf88a6b1..14282e62459 100644 --- a/src/dm/dt/interface/dtds.c +++ b/src/dm/dt/interface/dtds.c @@ -56,7 +56,7 @@ PetscErrorCode PetscDSRegister(const char sname[], PetscErrorCode (*function)(Pe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDSSetType - Builds a particular `PetscDS` Collective; No Fortran Support @@ -94,7 +94,7 @@ PetscErrorCode PetscDSSetType(PetscDS prob, PetscDSType name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDSGetType - Gets the `PetscDSType` name (as a string) from the `PetscDS` Not Collective; No Fortran Support @@ -231,7 +231,7 @@ static PetscErrorCode PetscDSView_Ascii(PetscDS ds, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDSViewFromOptions - View a `PetscDS` based on values in the options database Collective @@ -253,7 +253,7 @@ PetscErrorCode PetscDSViewFromOptions(PetscDS A, PetscObject obj, const char nam PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDSView - Views a `PetscDS` Collective @@ -350,6 +350,7 @@ PetscErrorCode PetscDSSetFromOptions(PetscDS prob) } PetscCall(PetscOptionsBool("-petscds_jac_pre", "Discrete System", "PetscDSUseJacobianPreconditioner", prob->useJacPre, &prob->useJacPre, &flg)); PetscCall(PetscOptionsBool("-petscds_force_quad", "Discrete System", "PetscDSSetForceQuad", prob->forceQuad, &prob->forceQuad, &flg)); + PetscCall(PetscOptionsInt("-petscds_print_integrate", "Discrete System", "", prob->printIntegrate, &prob->printIntegrate, NULL)); PetscTryTypeMethod(prob, setfromoptions); /* process any options handlers added with PetscObjectAddOptionsHandler() */ PetscCall(PetscObjectProcessOptionsHandlers((PetscObject)prob, PetscOptionsObject)); @@ -358,7 +359,7 @@ PetscErrorCode PetscDSSetFromOptions(PetscDS prob) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDSSetUp - Construct data structures for the `PetscDS` Collective @@ -680,11 +681,9 @@ PetscErrorCode PetscDSCreate(MPI_Comm comm, PetscDS *ds) PetscFunctionBegin; PetscAssertPointer(ds, 2); - *ds = NULL; PetscCall(PetscDSInitializePackage()); PetscCall(PetscHeaderCreate(p, PETSCDS_CLASSID, "PetscDS", "Discrete System", "PetscDS", comm, PetscDSDestroy, PetscDSView)); - p->Nf = 0; p->setup = PETSC_FALSE; p->numConstants = 0; @@ -1256,7 +1255,7 @@ PetscErrorCode PetscDSGetJetDegree(PetscDS ds, PetscInt f, PetscInt *k) Level: developer -.seealso: ``PetscDS`, `PetscDSGetJetDegree()`, `PetscDSSetDiscretization()`, `PetscDSAddDiscretization()`, `PetscDSGetNumFields()`, `PetscDSCreate()` +.seealso: `PetscDS`, `PetscDSGetJetDegree()`, `PetscDSSetDiscretization()`, `PetscDSAddDiscretization()`, `PetscDSGetNumFields()`, `PetscDSCreate()` @*/ PetscErrorCode PetscDSSetJetDegree(PetscDS ds, PetscInt f, PetscInt k) { @@ -1581,7 +1580,7 @@ PetscErrorCode PetscDSSetRHSResidual(PetscDS ds, PetscInt f, void (*f0)(PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDSHasJacobian - Checks that the Jacobian functions have been set Not Collective @@ -1730,7 +1729,7 @@ PetscErrorCode PetscDSSetJacobian(PetscDS ds, PetscInt f, PetscInt g, void (*g0) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDSUseJacobianPreconditioner - Set whether to construct a Jacobian preconditioner Not Collective @@ -1741,6 +1740,9 @@ PetscErrorCode PetscDSSetJacobian(PetscDS ds, PetscInt f, PetscInt g, void (*g0) Level: intermediate + Developer Note: + Should be called `PetscDSSetUseJacobianPreconditioner()` + .seealso: `PetscDS`, `PetscDSGetJacobianPreconditioner()`, `PetscDSSetJacobianPreconditioner()`, `PetscDSGetJacobian()` @*/ PetscErrorCode PetscDSUseJacobianPreconditioner(PetscDS prob, PetscBool useJacPre) @@ -1751,7 +1753,7 @@ PetscErrorCode PetscDSUseJacobianPreconditioner(PetscDS prob, PetscBool useJacPr PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDSHasJacobianPreconditioner - Checks if a Jacobian preconditioner matrix has been set Not Collective @@ -1900,7 +1902,7 @@ PetscErrorCode PetscDSSetJacobianPreconditioner(PetscDS ds, PetscInt f, PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDSHasDynamicJacobian - Signals that a dynamic Jacobian, dF/du_t, has been set Not Collective @@ -2845,7 +2847,7 @@ PetscErrorCode PetscDSGetConstants(PetscDS prob, PetscInt *numConstants, const P Input Parameters: + prob - The `PetscDS` object . numConstants - The number of constants -- constants - The array of constants, NULL if there are none +- constants - The array of constants, `NULL` if there are none Level: intermediate diff --git a/src/dm/dt/interface/dtprob.c b/src/dm/dt/interface/dtprob.c index 310df5029df..c6d79370f7b 100644 --- a/src/dm/dt/interface/dtprob.c +++ b/src/dm/dt/interface/dtprob.c @@ -517,12 +517,12 @@ PetscErrorCode PetscPDFSampleConstant3D(const PetscReal p[], const PetscReal dum Input Parameters: + dim - The dimension of sample points . prefix - The options prefix, or `NULL` -- name - The option name for the probability distribution type +- name - The options database name for the probability distribution type Output Parameters: -+ pdf - The PDF of this type -. cdf - The CDF of this type -- sampler - The PDF sampler of this type ++ pdf - The PDF of this type, or `NULL` +. cdf - The CDF of this type, or `NULL` +- sampler - The PDF sampler of this type, or `NULL` Level: intermediate diff --git a/src/dm/dt/interface/dtweakform.c b/src/dm/dt/interface/dtweakform.c index 213c0b514c9..a7c35b38fa8 100644 --- a/src/dm/dt/interface/dtweakform.c +++ b/src/dm/dt/interface/dtweakform.c @@ -329,7 +329,7 @@ static PetscErrorCode PetscWeakFormRewriteKeys_Internal(PetscWeakForm wf, PetscH PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscWeakFormRewriteKeys - Change any key on the given label to use the new set of label values Not Collective @@ -407,7 +407,7 @@ static PetscErrorCode PetscWeakFormReplaceLabel_Internal(PetscWeakForm wf, Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscWeakFormReplaceLabel - Change any key on a label of the same name to use the new label Not Collective @@ -1055,7 +1055,7 @@ static PetscErrorCode PetscWeakFormView_Ascii(PetscWeakForm wf, PetscViewer view PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscWeakFormView - Views a `PetscWeakForm` Collective @@ -1104,11 +1104,9 @@ PetscErrorCode PetscWeakFormCreate(MPI_Comm comm, PetscWeakForm *wf) PetscFunctionBegin; PetscAssertPointer(wf, 2); - *wf = NULL; PetscCall(PetscDSInitializePackage()); PetscCall(PetscHeaderCreate(p, PETSCWEAKFORM_CLASSID, "PetscWeakForm", "Weak Form System", "PetscWeakForm", comm, PetscWeakFormDestroy, PetscWeakFormView)); - p->Nf = 0; PetscCall(PetscChunkBufferCreate(sizeof(&PetscWeakFormCreate), 2, &p->funcs)); PetscCall(PetscMalloc1(PETSC_NUM_WF, &p->form)); diff --git a/src/dm/dt/interface/f90-custom/zdtdsf90.c b/src/dm/dt/interface/f90-custom/zdtdsf90.c index 9cec65e7582..8a9413c1bc5 100644 --- a/src/dm/dt/interface/f90-custom/zdtdsf90.c +++ b/src/dm/dt/interface/f90-custom/zdtdsf90.c @@ -3,15 +3,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdsgettabulation_ PETSCDSGETTABULATION - #define petscdsrestoretabulation_ PETSCDSRESTORETABULATION - #define petscdsgetbdtabulation_ PETSCDSGETBDTABULATION - #define petscdsrestorebdtabulation_ PETSCDSRESTOREBDTABULATION + #define petscdsgettabulation_ PETSCDSGETTABULATION + #define petscdsrestoretabulation_ PETSCDSRESTORETABULATION #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdsgettabulation_ petscdsgettabulation - #define petscdsrestoretabulation_ petscdsrestoretabulation - #define petscdsgetbdtabulation_ petscdsgetbdtabulation - #define petscdsrestorebdtabulation_ petscdsrestorebdtabulation + #define petscdsgettabulation_ petscdsgettabulation + #define petscdsrestoretabulation_ petscdsrestoretabulation #endif PETSC_EXTERN void petscdsgettabulation_(PetscDS *prob, PetscInt *f, F90Array1d *ptrB, F90Array1d *ptrD, PetscErrorCode *ierr PETSC_F90_2PTR_PROTO(ptrb) PETSC_F90_2PTR_PROTO(ptrd)) diff --git a/src/dm/dt/interface/ftn-custom/zdsf.c b/src/dm/dt/interface/ftn-custom/zdsf.c index ce3bfb4b6fc..e1abd2d7a21 100644 --- a/src/dm/dt/interface/ftn-custom/zdsf.c +++ b/src/dm/dt/interface/ftn-custom/zdsf.c @@ -3,13 +3,9 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdsviewfromoptions_ PETSCDSVIEWFROMOPTIONS - #define petscdsview_ PETSCDSVIEW #define petscdssetcontext_ PETSCDSSETCONTEXT #define petscdssetriemannsolver_ PETSCDSSETRIEMANNSOLVER #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdsviewfromoptions_ petscdsviewfromoptions - #define petscdsview_ petscdsview #define petscdssetcontext_ petscdssetcontext #define petscdssetriemannsolver_ petscdssetriemannsolver #endif @@ -25,28 +21,9 @@ static void ourriemannsolver(PetscInt dim, PetscInt Nf, const PetscReal x[], con if (func) { (*func)(&dim, &Nf, x, n, uL, uR, &numConstants, constants, flux, _ctx); } } -PETSC_EXTERN void petscdsviewfromoptions_(PetscDS *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscDSViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void petscdsview_(PetscDS *prob, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscDSView(*prob, v); - if (*ierr) return; -} - PETSC_EXTERN void petscdssetcontext_(PetscDS *prob, PetscInt *f, void *ctx, PetscErrorCode *ierr) { - *ierr = PetscDSSetContext(*prob, *f, *prob); + *ierr = PetscDSSetContext(*prob, *f, ctx); if (*ierr) return; } diff --git a/src/dm/dt/interface/ftn-custom/zdtf.c b/src/dm/dt/interface/ftn-custom/zdtf.c deleted file mode 100644 index 60bbd21bb1a..00000000000 --- a/src/dm/dt/interface/ftn-custom/zdtf.c +++ /dev/null @@ -1,16 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscquadratureview_ PETSCQUADRATUREVIEW -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscquadratureview_ petscquadratureview -#endif - -PETSC_EXTERN void petscquadratureview_(PetscQuadrature *q, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscQuadratureView(*q, v); -} diff --git a/src/dm/dt/interface/ftn-custom/zdtfef.c b/src/dm/dt/interface/ftn-custom/zdtfef.c deleted file mode 100644 index 32beec530a5..00000000000 --- a/src/dm/dt/interface/ftn-custom/zdtfef.c +++ /dev/null @@ -1,28 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscfeview_ PETSCFEVIEW - #define petscfecreatedefault_ PETSCFECREATEDEFAULT -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscfeview_ petscfeview - #define petscfecreatedefault_ petscfecreatedefault -#endif - -PETSC_EXTERN void petscfeview_(PetscFE *fe, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscFEView(*fe, v); -} - -PETSC_EXTERN void petscfecreatedefault_(MPI_Fint *comm, PetscInt *dim, PetscInt *Nc, PetscBool *isSimplex, char *prefix, PetscInt *qorder, PetscFE *fe, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *tprefix; - - FIXCHAR(prefix, len, tprefix); - *ierr = PetscFECreateDefault(MPI_Comm_f2c(*comm), *dim, *Nc, *isSimplex, tprefix, *qorder, fe); - if (*ierr) return; - FREECHAR(prefix, tprefix); -} diff --git a/src/dm/dt/space/impls/ptrimmed/spaceptrimmed.c b/src/dm/dt/space/impls/ptrimmed/spaceptrimmed.c index 88b4865a21d..f9c6e621ee0 100644 --- a/src/dm/dt/space/impls/ptrimmed/spaceptrimmed.c +++ b/src/dm/dt/space/impls/ptrimmed/spaceptrimmed.c @@ -383,7 +383,7 @@ static PetscErrorCode PetscSpaceInitialize_Ptrimmed(PetscSpace sp) [P_{r-1}(\mathbb{R}^n)]^n \oplus \bf{x} H_{r-1}(\mathbb{R}^n). $ - In $L_2$, ($\sim k=n$), trimmed polynomial spaces are identical to the standar polynomial spaces of one degree less, $\mathcal{P}_r^- \sim P_{r-1}$. + In $L_2$, ($\sim k=n$), trimmed polynomial spaces are identical to the standard polynomial spaces of one degree less, $\mathcal{P}_r^- \sim P_{r-1}$. .seealso: `PetscSpace`, `PetscSpaceType`, `PetscSpaceCreate()`, `PetscSpaceSetType()`, `PetscDTPTrimmedEvalJet()` M*/ diff --git a/src/dm/dt/space/interface/space.c b/src/dm/dt/space/interface/space.c index 30e5f95609e..16d6a5ebd26 100644 --- a/src/dm/dt/space/interface/space.c +++ b/src/dm/dt/space/interface/space.c @@ -9,7 +9,7 @@ PetscBool PetscSpaceRegisterAllCalled = PETSC_FALSE; /*@C PetscSpaceRegister - Adds a new `PetscSpace` implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine @@ -45,7 +45,7 @@ PetscErrorCode PetscSpaceRegister(const char sname[], PetscErrorCode (*function) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSpaceSetType - Builds a particular `PetscSpace` Collective @@ -84,7 +84,7 @@ PetscErrorCode PetscSpaceSetType(PetscSpace sp, PetscSpaceType name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSpaceGetType - Gets the `PetscSpaceType` (as a string) from the object. Not Collective @@ -109,7 +109,7 @@ PetscErrorCode PetscSpaceGetType(PetscSpace sp, PetscSpaceType *name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSpaceViewFromOptions - View a `PetscSpace` based on values in the options database Collective @@ -131,7 +131,7 @@ PetscErrorCode PetscSpaceViewFromOptions(PetscSpace A, PetscObject obj, const ch PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSpaceView - Views a `PetscSpace` Collective @@ -207,7 +207,7 @@ PetscErrorCode PetscSpaceSetFromOptions(PetscSpace sp) } PetscCall(PetscOptionsBoundedInt("-petscspace_degree", "The (maximally included) polynomial degree", "PetscSpaceSetDegree", sp->degree, &sp->degree, NULL, 0)); PetscCall(PetscOptionsBoundedInt("-petscspace_variables", "The number of different variables, e.g. x and y", "PetscSpaceSetNumVariables", sp->Nv, &sp->Nv, NULL, 0)); - PetscCall(PetscOptionsBoundedInt("-petscspace_components", "The number of components", "PetscSpaceSetNumComponents", sp->Nc, &sp->Nc, NULL, 0)); + PetscCall(PetscOptionsBoundedInt("-petscspace_components", "The number of components", "PetscSpaceSetNumComponents", sp->Nc, &sp->Nc, NULL, -1)); PetscTryTypeMethod(sp, setfromoptions, PetscOptionsObject); /* process any options handlers added with PetscObjectAddOptionsHandler() */ PetscCall(PetscObjectProcessOptionsHandlers((PetscObject)sp, PetscOptionsObject)); @@ -216,7 +216,7 @@ PetscErrorCode PetscSpaceSetFromOptions(PetscSpace sp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSpaceSetUp - Construct data structures for the `PetscSpace` Collective @@ -288,11 +288,9 @@ PetscErrorCode PetscSpaceCreate(MPI_Comm comm, PetscSpace *sp) PetscFunctionBegin; PetscAssertPointer(sp, 2); PetscCall(PetscCitationsRegister(FECitation, &FEcite)); - *sp = NULL; PetscCall(PetscFEInitializePackage()); PetscCall(PetscHeaderCreate(s, PETSCSPACE_CLASSID, "PetscSpace", "Linear Space", "PetscSpace", comm, PetscSpaceDestroy, PetscSpaceView)); - s->degree = 0; s->maxDegree = PETSC_DETERMINE; s->Nc = 1; @@ -459,7 +457,7 @@ PetscErrorCode PetscSpaceGetNumVariables(PetscSpace sp, PetscInt *n) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSpaceEvaluate - Evaluate the basis functions and their derivatives (jet) at each point Input Parameters: @@ -468,14 +466,14 @@ PetscErrorCode PetscSpaceGetNumVariables(PetscSpace sp, PetscInt *n) - points - The point coordinates Output Parameters: -+ B - The function evaluations in a npoints x nfuncs array -. D - The derivative evaluations in a npoints x nfuncs x dim array -- H - The second derivative evaluations in a npoints x nfuncs x dim x dim array ++ B - The function evaluations in a `npoints` x `nfuncs` array +. D - The derivative evaluations in a `npoints` x `nfuncs` x `dim` array +- H - The second derivative evaluations in a `npoints` x `nfuncs` x `dim` x `dim` array Level: beginner Note: - Above nfuncs is the dimension of the space, and dim is the spatial dimension. The coordinates are given + Above `nfuncs` is the dimension of the space, and `dim` is the spatial dimension. The coordinates are given on the reference cell, not in real space. .seealso: `PetscSpace`, `PetscFECreateTabulation()`, `PetscFEGetCellTabulation()`, `PetscSpaceCreate()` diff --git a/src/dm/f90-mod/petscdm.h b/src/dm/f90-mod/petscdm.h index e27522c1d79..14bfa8c4ab5 100644 --- a/src/dm/f90-mod/petscdm.h +++ b/src/dm/f90-mod/petscdm.h @@ -3,12 +3,89 @@ ! #include "petsc/finclude/petscdm.h" - type tDM - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tDM end type tDM - DM, parameter :: PETSC_NULL_DM = tDM(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DM +#endif + + type, extends(tPetscObject) :: tDMAdaptor + end type tDMAdaptor + DMAdaptor, parameter :: PETSC_NULL_DM_ADAPTOR = tDMAdaptor(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DM_ADAPTOR +#endif + + type, extends(tPetscObject) :: tDMField + end type tDMField + DMField, parameter :: PETSC_NULL_DM_FIELD = tDMField(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DM_FIELD +#endif + + type, extends(tPetscObject) :: tPetscQuadrature + end type tPetscQuadrature + PetscQuadrature, parameter :: PETSC_NULL_QUADRATURE = tPetscQuadrature(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_QUADRATURE +#endif + + type, extends(tPetscObject) :: tPetscWeakForm + end type tPetscWeakForm + PetscWeakForm, parameter :: PETSC_NULL_WEAKFORM = tPetscWeakForm(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_WEAKFORM +#endif + + type, extends(tPetscObject) :: tPetscDS + end type tPetscDS + PetscDS, parameter :: PETSC_NULL_DS = tPetscDS(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DS +#endif + + type, extends(tPetscObject) :: tPetscFE + end type tPetscFE + PetscFE, parameter :: PETSC_NULL_FE = tPetscFE(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_FE +#endif + + type, extends(tPetscObject) :: tPetscSpace + end type tPetscSpace + PetscSpace, parameter :: PETSC_NULL_SPACE = tPetscSpace(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SPACE +#endif + + type, extends(tPetscObject) :: tPetscDualSpace + end type tPetscDualSpace + PetscDualSpace, parameter :: PETSC_NULL_DUAL_SPACE = tPetscDualSpace(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DUAL_SPACE +#endif + + type, extends(tPetscObject) :: tPetscFV + end type tPetscFV + PetscFV, parameter :: PETSC_NULL_FV = tPetscFV(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_FV +#endif + + type, extends(tPetscObject) :: tPetscLimiter + end type tPetscLimiter + PetscLimiter, parameter :: PETSC_NULL_LIMITER = tPetscLimiter(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_LIMITER +#endif + + type, extends(tPetscObject) :: tPetscPartitioner + end type tPetscPartitioner + PetscPartitioner, parameter :: PETSC_NULL_PARTITIONER = tPetscPartitioner(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_PARTITIONER +#endif ! ! Types of periodicity ! @@ -69,7 +146,15 @@ PetscEnum, parameter :: DM_REORDER_DEFAULT_NOTSET = -1 PetscEnum, parameter :: DM_REORDER_DEFAULT_FALSE = 0 PetscEnum, parameter :: DM_REORDER_DEFAULT_TRUE = 1 - -#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DM -#endif +! +! PetscDTNodeType +! + PetscEnum, parameter :: PETSCDTNODES_DEFAULT = -1 + PetscEnum, parameter :: PETSCDTNODES_GAUSSJACOBI = 0 + PetscEnum, parameter :: PETSCDTNODES_EQUISPACED = 1 + PetscEnum, parameter :: PETSCDTNODES_TANHSINH = 2 +! +! PetscGaussLobattoLegendreCreateType +! + PetscEnum, parameter :: PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBR = 0 + PetscEnum, parameter :: PETSCGAUSSLOBATTOLEGENDRE_VIA_NEWTON = 1 diff --git a/src/dm/f90-mod/petscdm.h90 b/src/dm/f90-mod/petscdm.h90 index 8f8197c4c5f..e69de29bb2d 100644 --- a/src/dm/f90-mod/petscdm.h90 +++ b/src/dm/f90-mod/petscdm.h90 @@ -1,35 +0,0 @@ - Interface - subroutine DMGetStratumIS(d,str,v,i,ierr) - import tDM,tIS - type(tDM), intent(in) :: d - character(*), intent(in) :: str - PetscInt, intent(in) :: v - type(tIS), intent(out) :: i - PetscErrorCode, intent(out) :: ierr - end Subroutine DMGetStratumIS - subroutine DMGetStratumSize(d,str,v,s,ierr) - import tDM - type(tDM), intent(in) :: d - character(*), intent(in) :: str - PetscInt, intent(in) :: v - PetscInt, intent(out) :: s - PetscErrorCode, intent(out) :: ierr - end Subroutine DMGetStratumSize - subroutine DMDestroy(a,ierr) - import tDM - DM a - PetscErrorCode ierr - end subroutine - subroutine DMView(a,b,z) - import tDM,tPetscViewer - DM a - PetscViewer b - PetscErrorCode z - end subroutine - subroutine DMLoad(a,b,z) - import tDM,tPetscViewer - DM a - PetscViewer b - PetscErrorCode z - end subroutine - end Interface diff --git a/src/dm/f90-mod/petscdmcomposite.h90 b/src/dm/f90-mod/petscdmcomposite.h90 index de2ea1a04bd..9501f5b5687 100644 --- a/src/dm/f90-mod/petscdmcomposite.h90 +++ b/src/dm/f90-mod/petscdmcomposite.h90 @@ -60,50 +60,6 @@ End Subroutine End Interface - Interface DMCompositeGetAccessArray - Subroutine DMCompositeGetAccessArray(a,b,c,d,e,z) - import tDM,tVec - DM a - Vec b - PetscInt c,d(*) - Vec e(*) - PetscErrorCode z - End Subroutine - End Interface - - Interface DMCompositeRestoreAccessArray - Subroutine DMCompositeRestoreAccessArray(a,b,c,d,e,z) - import tDM,tVec - DM a - Vec b - PetscInt c,d(*) - Vec e(*) - PetscErrorCode z - End Subroutine - End Interface - - Interface DMCompositeGetLocalAccessArray - Subroutine DMCompositeGetLocalAccessArray(a,b,c,d,e,z) - import tDM,tVec - DM a - Vec b - PetscInt c,d(*) - Vec e(*) - PetscErrorCode z - End Subroutine - End Interface - - Interface DMCompositeRestoreLocalAccessArray - Subroutine DMCompositeRestoreLocalAccessArray(a,b,c,d,e,z) - import tDM,tVec - DM a - Vec b - PetscInt c,d(*) - Vec e(*) - PetscErrorCode z - End Subroutine - End Interface - Interface DMCompositeGetGlobalISs Subroutine DMCompositeGetGlobalISs(a,b,z) import tDM,tIS diff --git a/src/dm/f90-mod/petscdmda.h90 b/src/dm/f90-mod/petscdmda.h90 index 18678e23a59..bae60ab57da 100644 --- a/src/dm/f90-mod/petscdmda.h90 +++ b/src/dm/f90-mod/petscdmda.h90 @@ -156,42 +156,6 @@ End Subroutine End Interface DMDAVecRestoreArrayReadF90 - Interface DMDACreate1d - Subroutine DMDACreate1d(a,b,c,d,e,f,g,z) - import tDM - MPI_Comm a - DMBoundaryType b - PetscInt c,d,e,f(*) - DM g - PetscErrorCode z - End Subroutine - End Interface DMDACreate1d - - Interface DMDACreate2d - Subroutine DMDACreate2d(a,b,c,d,e,f,g,h,i,j,k,l,m,z) - import tDM - MPI_Comm a - DMBoundaryType b,c - DMDAStencilType d - PetscInt e,f,g,h,i,j,k(*),l(*) - DM m - PetscErrorCode z - End Subroutine - End Interface DMDACreate2d - - Interface DMDACreate3d - Subroutine DMDACreate3d(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,z) - import tDM - MPI_Comm a - DMBoundaryType b,c,d - DMDAStencilType e - PetscInt f,g,h,i,j,k,l,m,n(*) - PetscInt o(*),p(*) - DM q - PetscErrorCode z - End Subroutine - End Interface DMDACreate3d - Interface DMDAGetNeighbors Subroutine DMDAGetNeighbors(a,b,z) import tDM @@ -200,117 +164,3 @@ PetscErrorCode z End Subroutine End Interface DMDAGetNeighbors - - Interface DMDAGetOwnershipRange - Subroutine DMDAGetOwnershipRange(a,b,c,d,z) - import tDM - DM a - PetscInt b(*), c(*), d(*) - PetscErrorCode z - End Subroutine - End Interface DMDAGetOwnershipRange - - Interface DMDAGetRefinementFactor - Subroutine DMDAGetRefinementFactor(a,b,c,d,z) - import tDM - DM a - PetscInt b(*), c(*), d(*) - PetscErrorCode z - End Subroutine - End Interface DMDAGetRefinementFactor - - Interface DMDASetFieldName - Subroutine DMDASetFieldName(a,b,c,z) - import tDM - DM a - PetscInt b - character(*) c - PetscErrorCode z - End Subroutine - End Interface DMDASetFieldName - - Interface DMDAGetFieldName - Subroutine DMDAGetFieldName(a,b,c,z) - import tDM - DM a - PetscInt b - character(*) c - PetscErrorCode z - End Subroutine - End Interface DMDAGetFieldName - - Interface DMDASetAOType - Subroutine DMDASetAOType(a,b,z) - import tDM - DM a - character(*) b - PetscErrorCode z - End Subroutine - End Interface DMDASetAOType - - Interface DMDAGetScatter - Subroutine DMDAGetScatter(a,b,c,z) - import tDM,tVecScatter - DM a - VecScatter b,c - PetscErrorCode z - End Subroutine - End Interface DMDAGetScatter - - Interface DMDAGetCorners - Subroutine DMDAGetCorners000000(a,x,y,z,m,n,p,ierr) - import tDM - DM a - PetscInt, intent(out) :: x,y,z - PetscInt, intent(out) :: m,n,p - PetscErrorCode, intent(out) :: ierr - End Subroutine DMDAGetCorners000000 - - Subroutine DMDAGetCorners001001(a,x,y,z,m,n,p,ierr) - import tDM - DM a - PetscInt, intent(out) :: x,y - PetscInt :: m,n - PetscInt :: z(*),p(*) - PetscErrorCode, intent(out) :: ierr - End Subroutine DMDAGetCorners001001 - - Subroutine DMDAGetCorners011011(a,x,y,z,m,n,p,ierr) - import tDM - DM a - PetscInt, intent(out) :: x - PetscInt, intent(out) :: m - PetscInt :: z(*),y(*) - PetscInt :: p(*),n(*) - PetscErrorCode, intent(out) :: ierr - End Subroutine DMDAGetCorners011011 - End Interface DMDAGetCorners - - Interface DMDAGetGhostCorners - Subroutine DMDAGetGhostCorners000000(a,x,y,z,m,n,p,ierr) - import tDM - DM a - PetscInt, intent(out) :: x,y,z - PetscInt, intent(out) :: m,n,p - PetscErrorCode, intent(out) :: ierr - End Subroutine DMDAGetGhostCorners000000 - - Subroutine DMDAGetGhostCorners001001(a,x,y,z,m,n,p,ierr) - import tDM - DM a - PetscInt, intent(out) :: x,y - PetscInt :: m,n - PetscInt :: z(*),p(*) - PetscErrorCode, intent(out) :: ierr - End Subroutine DMDAGetGhostCorners001001 - - Subroutine DMDAGetGhostCorners011011(a,x,y,z,m,n,p,ierr) - import tDM - DM a - PetscInt, intent(out) :: x - PetscInt, intent(out) :: m - PetscInt :: z(*),y(*) - PetscInt :: p(*),n(*) - PetscErrorCode, intent(out) :: ierr - End Subroutine DMDAGetGhostCorners011011 - End Interface DMDAGetGhostCorners diff --git a/src/dm/f90-mod/petscdmlabel.h b/src/dm/f90-mod/petscdmlabel.h index b98522941e8..6e02cfdc750 100644 --- a/src/dm/f90-mod/petscdmlabel.h +++ b/src/dm/f90-mod/petscdmlabel.h @@ -3,13 +3,9 @@ ! #include "petsc/finclude/petscdmlabel.h" - type tDMLabel - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tDMLabel end type tDMLabel - DMLabel, parameter :: PETSC_NULL_DMLABEL = tDMLabel(0) - #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) !DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DMLABEL #endif diff --git a/src/dm/f90-mod/petscdmlabel.h90 b/src/dm/f90-mod/petscdmlabel.h90 deleted file mode 100644 index 95d92c45604..00000000000 --- a/src/dm/f90-mod/petscdmlabel.h90 +++ /dev/null @@ -1,46 +0,0 @@ - Interface - subroutine DMGetLabel(d,str,l,ierr) - import tDM,tDMLabel - DM, intent(in) :: d - character(*), intent(in) :: str - DMLabel, intent(out) :: l - PetscErrorCode, intent(out) :: ierr - end Subroutine DMGetLabel - end Interface - Interface - subroutine DMGetLabelSize(d,str,l,ierr) - import tDM - DM, intent(in) :: d - character(*), intent(in) :: str - PetscInt, intent(out) :: l - PetscErrorCode, intent(out) :: ierr - end Subroutine DMGetLabelSize - end Interface - Interface - subroutine DMGetLabelValue(d,str,p,v,ierr) - import tDM - DM, intent(in) :: d - character(*), intent(in) :: str - PetscInt, intent(in) :: p - PetscInt, intent(out) :: v - PetscErrorCode, intent(out) :: ierr - end Subroutine DMGetLabelValue - end Interface - Interface - subroutine DMGetLabelIdIS(d,str,i,ierr) - import tDM,tIS - DM, intent(in) :: d - character(*), intent(in) :: str - IS, intent(out) :: i - PetscErrorCode, intent(out) :: ierr - end Subroutine DMGetLabelIdIS - end Interface - Interface - subroutine DMSetLabelValue(d,str,p,v,ierr) - import tDM - DM, intent(in) :: d - character(*), intent(in) :: str - PetscInt, intent(in) :: p,v - PetscErrorCode, intent(out) :: ierr - end Subroutine DMSetLabelValue - end Interface diff --git a/src/dm/f90-mod/petscdmmod.F90 b/src/dm/f90-mod/petscdmmod.F90 index 4b04a3b5335..e1d8b2d7c0e 100644 --- a/src/dm/f90-mod/petscdmmod.F90 +++ b/src/dm/f90-mod/petscdmmod.F90 @@ -56,7 +56,6 @@ module petscdmforestdef module petscdmlabel use petscdmlabeldef use petscdmdef -#include <../src/dm/f90-mod/petscdmlabel.h90> interface #include <../src/dm/f90-mod/ftn-auto-interfaces/petscdmlabel.h90> end interface diff --git a/src/dm/f90-mod/petscdmplex.h b/src/dm/f90-mod/petscdmplex.h index 32ddde1f788..f1c85016eb8 100644 --- a/src/dm/f90-mod/petscdmplex.h +++ b/src/dm/f90-mod/petscdmplex.h @@ -3,6 +3,12 @@ ! #include "petsc/finclude/petscdmplex.h" + type, extends(tPetscObject) :: tDMPlexTransform + end type tDMPlexTransform + DMPlexTransform, parameter :: PETSC_NULL_DMPLEXTRANSFORM = tDMPlexTransform(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DMPLEXTRANSFORM +#endif ! ! DMPlexInterpolatedFlag ! @@ -17,9 +23,3 @@ PetscEnum, parameter :: DMPLEX_TPS_SCHWARZ_P = 0 PetscEnum, parameter :: DMPLEX_TPS_GYROID = 1 - type tDMPlexTransform - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE - end type tDMPlexTransform - - DMPlexTransform, parameter :: PETSC_NULL_DMPLEXTRANSFORM = tDMPlexTransform(0) diff --git a/src/dm/f90-mod/petscdmplex.h90 b/src/dm/f90-mod/petscdmplex.h90 index 0b0a7fb53ce..82049777adc 100644 --- a/src/dm/f90-mod/petscdmplex.h90 +++ b/src/dm/f90-mod/petscdmplex.h90 @@ -230,7 +230,7 @@ Interface Subroutine DMPlexComputeCellGeometryFEM(m,c,fe,v0,J,iJ,dJ,er) - import tDM + import tDM, tPetscFE PetscInt c PetscReal, pointer :: v0(:) PetscReal, pointer :: J(:) @@ -328,29 +328,6 @@ End Subroutine End Interface - Interface - Subroutine DMPlexCreateFromFile(c,str,pstr,i,m,ierr) - import tDM - MPI_Comm :: c - character(len=*) :: str - character(len=*) :: pstr - PetscBool, intent(in) :: i - DM, intent(out) :: m - PetscErrorCode, intent(out):: ierr - End Subroutine - End Interface - - Interface - Subroutine DMPlexDistribute(m,o,sf,mp,ierr) - import tDM,tPetscSF - DM, intent(in) :: m - PetscInt, intent(in) :: o - PetscSF :: sf - DM, intent(out) :: mp - PetscErrorCode, intent(out):: ierr - End Subroutine - End Interface - Interface Subroutine DMPlexCreateDefault(m,ierr) import tDM @@ -358,14 +335,3 @@ PetscErrorCode, intent(out):: ierr End Subroutine End Interface - - Interface - Subroutine PetscViewerExodusIIOpen(c,str,mode,v,ierr) - import tPetscViewer - MPI_Comm :: c - character(len=*) :: str - PetscFileMode :: mode - PetscViewer :: v - PetscErrorCode, intent(out) :: ierr - End Subroutine - End Interface diff --git a/src/dm/f90-mod/petscdmswarm.h90 b/src/dm/f90-mod/petscdmswarm.h90 index eb9e9fec4fc..69b52a4deee 100644 --- a/src/dm/f90-mod/petscdmswarm.h90 +++ b/src/dm/f90-mod/petscdmswarm.h90 @@ -21,23 +21,3 @@ PetscErrorCode ierr End Subroutine End Interface - - Interface - Subroutine DMSwarmCreateGlobalVectorFromField(dm,fieldname,vec,ierr) - import tDM,tVec - DM dm - character(len=*) :: fieldname - Vec vec - PetscErrorCode ierr - End Subroutine - End Interface - - Interface - Subroutine DMSwarmDestroyGlobalVectorFromField(dm,fieldname,vec,ierr) - import tDM,tVec - DM dm - character(len=*) :: fieldname - Vec vec - PetscErrorCode ierr - End Subroutine - End Interface diff --git a/src/dm/f90-mod/petscdt.h90 b/src/dm/f90-mod/petscdt.h90 index 6f8ed139038..229e53be47c 100644 --- a/src/dm/f90-mod/petscdt.h90 +++ b/src/dm/f90-mod/petscdt.h90 @@ -1,6 +1,7 @@ Interface Subroutine PetscQuadratureGetData(q,dim,nc,np,p,w,ierr) + import tPetscQuadrature PetscInt dim, nc, np PetscReal, pointer :: p(:) PetscReal, pointer :: w(:) @@ -11,6 +12,7 @@ Interface Subroutine PetscQuadratureRestoreData(q,dim,nc,np,p,w,ierr) + import tPetscQuadrature PetscInt dim, nc, np PetscReal, pointer :: p(:) PetscReal, pointer :: w(:) @@ -21,6 +23,7 @@ Interface Subroutine PetscQuadratureSetData(q,dim,nc,np,p,w,ierr) + import tPetscQuadrature PetscInt dim, nc, np PetscReal, pointer :: p(:) PetscReal, pointer :: w(:) @@ -31,6 +34,7 @@ Interface Subroutine PetscDSGetTabulation(prob,f,b,bDer,ierr) + import tPetscDS PetscInt f PetscReal, pointer :: b(:) PetscReal, pointer :: bDer(:) @@ -41,6 +45,7 @@ Interface Subroutine PetscDSRestoreTabulation(prob,f,b,bDer,ierr) + import tPetscDS PetscInt f PetscReal, pointer :: b(:) PetscReal, pointer :: bDer(:) @@ -51,6 +56,7 @@ Interface Subroutine PetscFECreateDefault(c,d,n,i,str,o,f,ierr) + import tPetscFE MPI_Comm, intent(in) :: c PetscInt, intent(in) :: d,n PetscBool, intent(in) :: i diff --git a/src/dm/field/impls/ds/dmfieldds.c b/src/dm/field/impls/ds/dmfieldds.c index 0a5cb09b8f3..5380642c1ce 100644 --- a/src/dm/field/impls/ds/dmfieldds.c +++ b/src/dm/field/impls/ds/dmfieldds.c @@ -57,6 +57,7 @@ static PetscErrorCode DMFieldView_DS(DMField field, PetscViewer viewer) static PetscErrorCode DMFieldDSGetHeightDisc(DMField field, PetscInt height, PetscObject discList[], PetscObject *disc) { PetscFunctionBegin; + PetscCheck(height >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Height %" PetscInt_FMT " must be non-negative", height); if (!discList[height]) { PetscClassId id; @@ -794,10 +795,32 @@ static PetscErrorCode DMFieldComputeFaceData_DS(DMField field, IS pointIS, Petsc } } if (maxDegree <= 1) { - PetscInt numCells, offset, *cells; - PetscFEGeom *cellGeom; - IS suppIS; - + PetscQuadrature cellQuad = NULL; + PetscInt numCells, offset, *cells; + PetscFEGeom *cellGeom; + IS suppIS; + + if (quad) { + DM dm; + PetscReal *points, *weights; + PetscInt tdim, Nc, Np; + + PetscCall(DMFieldGetDM(field, &dm)); + PetscCall(DMGetDimension(dm, &tdim)); + if (tdim > dim) { + // Make a compatible cell quadrature (points don't matter since its affine) + PetscCall(PetscQuadratureCreate(PETSC_COMM_SELF, &cellQuad)); + PetscCall(PetscQuadratureGetData(quad, NULL, &Nc, &Np, NULL, NULL)); + PetscCall(PetscCalloc1((dim + 1) * Np, &points)); + PetscCall(PetscCalloc1(Nc * Np, &weights)); + PetscCall(PetscQuadratureSetData(cellQuad, dim + 1, Nc, Np, points, weights)); + } else { + // TODO J will be wrong here, but other things need to be fixed + // This path comes from calling DMProjectBdFieldLabelLocal() in Plex ex5 + PetscCall(PetscObjectReference((PetscObject)quad)); + cellQuad = quad; + } + } for (p = 0, numCells = 0; p < numFaces; p++) { PetscInt point = points[p]; PetscInt numSupp, numChildren; @@ -819,7 +842,7 @@ static PetscErrorCode DMFieldComputeFaceData_DS(DMField field, IS pointIS, Petsc for (s = 0; s < numSupp; s++, offset++) cells[offset] = supp[s]; } PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numCells, cells, PETSC_USE_POINTER, &suppIS)); - PetscCall(DMFieldCreateFEGeom(field, suppIS, quad, PETSC_FALSE, &cellGeom)); + PetscCall(DMFieldCreateFEGeom(field, suppIS, cellQuad, PETSC_FALSE, &cellGeom)); for (p = 0, offset = 0; p < numFaces; p++) { PetscInt point = points[p]; PetscInt numSupp, s, q; @@ -836,6 +859,7 @@ static PetscErrorCode DMFieldComputeFaceData_DS(DMField field, IS pointIS, Petsc } } PetscCall(PetscFEGeomDestroy(&cellGeom)); + PetscCall(PetscQuadratureDestroy(&cellQuad)); PetscCall(ISDestroy(&suppIS)); PetscCall(PetscFree(cells)); } else { diff --git a/src/dm/field/interface/dmfield.c b/src/dm/field/interface/dmfield.c index aefb22aa0e3..085e0e982e3 100644 --- a/src/dm/field/interface/dmfield.c +++ b/src/dm/field/interface/dmfield.c @@ -49,7 +49,7 @@ PetscErrorCode DMFieldDestroy(DMField *field) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMFieldView - view a `DMField` Collective @@ -86,7 +86,7 @@ PetscErrorCode DMFieldView(DMField field, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMFieldSetType - set the `DMField` implementation Collective @@ -129,7 +129,7 @@ PetscErrorCode DMFieldSetType(DMField field, DMFieldType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMFieldGetType - Gets the `DMFieldType` name (as a string) from the `DMField`. Not Collective diff --git a/src/dm/field/interface/dmfieldregi.c b/src/dm/field/interface/dmfieldregi.c index 2dd0214397b..a2d003e1af6 100644 --- a/src/dm/field/interface/dmfieldregi.c +++ b/src/dm/field/interface/dmfieldregi.c @@ -29,7 +29,7 @@ PetscErrorCode DMFieldRegisterAll(void) /*@C DMFieldRegister - Adds an implementation of the `DMField` object. - Not collective + Not collective, No Fortran Support Input Parameters: + sname - name of a new user-defined implementation diff --git a/src/dm/impls/composite/f90-custom/zfddaf90.c b/src/dm/impls/composite/f90-custom/zfddaf90.c index 348eb36a67e..b8390d0bd80 100644 --- a/src/dm/impls/composite/f90-custom/zfddaf90.c +++ b/src/dm/impls/composite/f90-custom/zfddaf90.c @@ -4,11 +4,9 @@ #if defined(PETSC_HAVE_FORTRAN_CAPS) #define dmcompositegetaccessvpvp_ DMCOMPOSITEGETACCESSVPVP #define dmcompositerestoreaccessvpvp_ DMCOMPOSITERESTOREACCESSVPVP - #define dmcompositegetentriesarray_ DMCOMPOSITEGETENTRIESARRAY #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define dmcompositegetaccessvpvp_ dmcompositegetaccessvpvp #define dmcompositerestoreaccessvpvp_ dmcompositerestoreaccessvpvp - #define dmcompositegetentriesarray_ dmcompositegetentriesarray #endif PETSC_EXTERN void dmcompositegetaccessvpvp_(DM *dm, Vec *v, Vec *v1, F90Array1d *p1, Vec *v2, F90Array1d *p2, PetscErrorCode *ierr PETSC_F90_2PTR_PROTO(ptrd1) PETSC_F90_2PTR_PROTO(ptrd2)) @@ -27,8 +25,3 @@ PETSC_EXTERN void dmcompositerestoreaccessvpvp_(DM *dm, Vec *v, Vec *v1, F90Arra *ierr = F90Array1dDestroy(p1, MPIU_SCALAR PETSC_F90_2PTR_PARAM(ptrd1)); *ierr = F90Array1dDestroy(p2, MPIU_SCALAR PETSC_F90_2PTR_PARAM(ptrd2)); } - -PETSC_EXTERN void dmcompositegetentriesarray_(DM *dm, DM *dmarray, PetscErrorCode *ierr) -{ - *ierr = DMCompositeGetEntriesArray(*dm, dmarray); -} diff --git a/src/dm/impls/composite/ftn-custom/zfddaf.c b/src/dm/impls/composite/ftn-custom/zfddaf.c index 3424ea01c94..f437d1a77fe 100644 --- a/src/dm/impls/composite/ftn-custom/zfddaf.c +++ b/src/dm/impls/composite/ftn-custom/zfddaf.c @@ -2,43 +2,31 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmcompositegetentries1_ DMCOMPOSITEGETENTRIES1 - #define dmcompositegetentries2_ DMCOMPOSITEGETENTRIES2 - #define dmcompositegetentries3_ DMCOMPOSITEGETENTRIES3 - #define dmcompositegetentries4_ DMCOMPOSITEGETENTRIES4 - #define dmcompositegetentries5_ DMCOMPOSITEGETENTRIES5 - #define dmcompositeadddm_ DMCOMPOSITEADDDM - #define dmcompositedestroy_ DMCOMPOSITEDESTROY - #define dmcompositegetaccess4_ DMCOMPOSITEGETACCESS4 - #define dmcompositescatter4_ DMCOMPOSITESCATTER4 - #define dmcompositerestoreaccess4_ DMCOMPOSITERESTOREACCESS4 - #define dmcompositegetlocalvectors4_ DMCOMPOSITEGETLOCALVECTORS4 - #define dmcompositerestorelocalvectors4_ DMCOMPOSITERESTORELOCALVECTORS4 - #define dmcompositegetglobaliss_ DMCOMPOSITEGETGLOBALISS - #define dmcompositegetlocaliss_ DMCOMPOSITEGETLOCALISS - #define dmcompositegetaccessarray_ DMCOMPOSITEGETACCESSARRAY - #define dmcompositerestoreaccessarray_ DMCOMPOSITERESTOREACCESSARRAY - #define dmcompositegetlocalaccessarray_ DMCOMPOSITEGETLOCALACCESSARRAY - #define dmcompositerestorelocalaccessarray_ DMCOMPOSITERESTORELOCALACCESSARRAY + #define dmcompositegetentries1_ DMCOMPOSITEGETENTRIES1 + #define dmcompositegetentries2_ DMCOMPOSITEGETENTRIES2 + #define dmcompositegetentries3_ DMCOMPOSITEGETENTRIES3 + #define dmcompositegetentries4_ DMCOMPOSITEGETENTRIES4 + #define dmcompositegetentries5_ DMCOMPOSITEGETENTRIES5 + #define dmcompositegetaccess4_ DMCOMPOSITEGETACCESS4 + #define dmcompositescatter4_ DMCOMPOSITESCATTER4 + #define dmcompositerestoreaccess4_ DMCOMPOSITERESTOREACCESS4 + #define dmcompositegetlocalvectors4_ DMCOMPOSITEGETLOCALVECTORS4 + #define dmcompositerestorelocalvectors4_ DMCOMPOSITERESTORELOCALVECTORS4 + #define dmcompositegetglobaliss_ DMCOMPOSITEGETGLOBALISS + #define dmcompositegetlocaliss_ DMCOMPOSITEGETLOCALISS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmcompositegetentries1_ dmcompositegetentries1 - #define dmcompositegetentries2_ dmcompositegetentries2 - #define dmcompositegetentries3_ dmcompositegetentries3 - #define dmcompositegetentries4_ dmcompositegetentries4 - #define dmcompositegetentries5_ dmcompositegetentries5 - #define dmcompositeadddm_ dmcompositeadddm - #define dmcompositedestroy_ dmcompositedestroy - #define dmcompositegetaccess4_ dmcompositegetaccess4 - #define dmcompositescatter4_ dmcompositescatter4 - #define dmcompositerestoreaccess4_ dmcompositerestoreaccess4 - #define dmcompositegetlocalvectors4_ dmcompositegetlocalvectors4 - #define dmcompositerestorelocalvectors4_ dmcompositerestorelocalvectors4 - #define dmcompositegetglobaliss_ dmcompositegetglobaliss - #define dmcompositegetlocaliss_ dmcompositegetlocaliss - #define dmcompositegetaccessarray_ dmcompositegetaccessarray - #define dmcompositerestoreaccessarray_ dmcompositerestoreaccessarray - #define dmcompositegetlocalaccessarray_ dmcompositegetlocalaccessarray - #define dmcompositerestorelocalaccessarray_ dmcompositerestorelocalaccessarray + #define dmcompositegetentries1_ dmcompositegetentries1 + #define dmcompositegetentries2_ dmcompositegetentries2 + #define dmcompositegetentries3_ dmcompositegetentries3 + #define dmcompositegetentries4_ dmcompositegetentries4 + #define dmcompositegetentries5_ dmcompositegetentries5 + #define dmcompositegetaccess4_ dmcompositegetaccess4 + #define dmcompositescatter4_ dmcompositescatter4 + #define dmcompositerestoreaccess4_ dmcompositerestoreaccess4 + #define dmcompositegetlocalvectors4_ dmcompositegetlocalvectors4 + #define dmcompositerestorelocalvectors4_ dmcompositerestorelocalvectors4 + #define dmcompositegetglobaliss_ dmcompositegetglobaliss + #define dmcompositegetlocaliss_ dmcompositegetlocaliss #endif PETSC_EXTERN void dmcompositegetentries1_(DM *dm, DM *da1, PetscErrorCode *ierr) @@ -118,27 +106,3 @@ PETSC_EXTERN void dmcompositegetlocaliss_(DM *dm, IS *iss, PetscErrorCode *ierr) for (i = 0; i < ndm; i++) iss[i] = ais[i]; *ierr = PetscFree(ais); } - -PETSC_EXTERN void dmcompositegetaccessarray_(DM *dm, Vec *gvec, PetscInt *n, const PetscInt *wanted, Vec *vecs, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(wanted); - *ierr = DMCompositeGetAccessArray(*dm, *gvec, *n, wanted, vecs); -} - -PETSC_EXTERN void dmcompositerestoreaccessarray_(DM *dm, Vec *gvec, PetscInt *n, const PetscInt *wanted, Vec *vecs, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(wanted); - *ierr = DMCompositeRestoreAccessArray(*dm, *gvec, *n, wanted, vecs); -} - -PETSC_EXTERN void dmcompositegetlocalaccessarray_(DM *dm, Vec *lvec, PetscInt *n, const PetscInt *wanted, Vec *vecs, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(wanted); - *ierr = DMCompositeGetLocalAccessArray(*dm, *lvec, *n, wanted, vecs); -} - -PETSC_EXTERN void dmcompositerestorelocalaccessarray_(DM *dm, Vec *lvec, PetscInt *n, const PetscInt *wanted, Vec *vecs, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(wanted); - *ierr = DMCompositeRestoreLocalAccessArray(*dm, *lvec, *n, wanted, vecs); -} diff --git a/src/dm/impls/composite/pack.c b/src/dm/impls/composite/pack.c index 1581db6b113..345fa3d729a 100644 --- a/src/dm/impls/composite/pack.c +++ b/src/dm/impls/composite/pack.c @@ -113,8 +113,6 @@ static PetscErrorCode DMSetUp_Composite(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/* ----------------------------------------------------------------------------------*/ - /*@ DMCompositeGetNumberDM - Gets the number of `DM` objects in the `DMCOMPOSITE` representation. @@ -211,7 +209,7 @@ PetscErrorCode DMCompositeGetAccess(DM dm, Vec gvec, ...) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCompositeGetAccessArray - Allows one to access the individual packed vectors in their global representation. @@ -221,10 +219,10 @@ PetscErrorCode DMCompositeGetAccess(DM dm, Vec gvec, ...) + dm - the `DMCOMPOSITE` . pvec - packed vector . nwanted - number of vectors wanted -- wanted - sorted array of vectors wanted, or NULL to get all vectors +- wanted - sorted array of vectors wanted, or `NULL` to get all vectors, length `nwanted` Output Parameter: -. vecs - array of requested global vectors (must be allocated) +. vecs - array of requested global vectors (must be previously allocated and of length `nwanted`) Level: advanced @@ -233,7 +231,7 @@ PetscErrorCode DMCompositeGetAccess(DM dm, Vec gvec, ...) .seealso: `DMCOMPOSITE`, `DM`, `DMCompositeGetAccess()`, `DMCompositeGetEntries()`, `DMCompositeScatter()`, `DMCompositeGather()` @*/ -PetscErrorCode DMCompositeGetAccessArray(DM dm, Vec pvec, PetscInt nwanted, const PetscInt *wanted, Vec *vecs) +PetscErrorCode DMCompositeGetAccessArray(DM dm, Vec pvec, PetscInt nwanted, const PetscInt wanted[], Vec vecs[]) { struct DMCompositeLink *link; PetscInt i, wnum; @@ -271,7 +269,7 @@ PetscErrorCode DMCompositeGetAccessArray(DM dm, Vec pvec, PetscInt nwanted, cons PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCompositeGetLocalAccessArray - Allows one to access the individual packed vectors in their local representation. @@ -281,10 +279,10 @@ PetscErrorCode DMCompositeGetAccessArray(DM dm, Vec pvec, PetscInt nwanted, cons + dm - the `DMCOMPOSITE` . pvec - packed vector . nwanted - number of vectors wanted -- wanted - sorted array of vectors wanted, or NULL to get all vectors +- wanted - sorted array of vectors wanted, or `NULL` to get all vectors, length `nwanted` Output Parameter: -. vecs - array of requested local vectors (must be allocated) +. vecs - array of requested local vectors (must be allocated and of length `nwanted`) Level: advanced @@ -295,7 +293,7 @@ PetscErrorCode DMCompositeGetAccessArray(DM dm, Vec pvec, PetscInt nwanted, cons .seealso: `DMCOMPOSITE`, `DM`, `DMCompositeRestoreLocalAccessArray()`, `DMCompositeGetAccess()`, `DMCompositeGetEntries()`, `DMCompositeScatter()`, `DMCompositeGather()` @*/ -PetscErrorCode DMCompositeGetLocalAccessArray(DM dm, Vec pvec, PetscInt nwanted, const PetscInt *wanted, Vec *vecs) +PetscErrorCode DMCompositeGetLocalAccessArray(DM dm, Vec pvec, PetscInt nwanted, const PetscInt wanted[], Vec vecs[]) { struct DMCompositeLink *link; PetscInt i, wnum; @@ -387,7 +385,7 @@ PetscErrorCode DMCompositeRestoreAccess(DM dm, Vec gvec, ...) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCompositeRestoreAccessArray - Returns the vectors obtained with `DMCompositeGetAccessArray()` Collective @@ -396,14 +394,14 @@ PetscErrorCode DMCompositeRestoreAccess(DM dm, Vec gvec, ...) + dm - the `DMCOMPOSITE` object . pvec - packed vector . nwanted - number of vectors wanted -. wanted - sorted array of vectors wanted, or NULL to get all vectors -- vecs - array of global vectors to return +. wanted - sorted array of vectors wanted, or `NULL` to restore all vectors +- vecs - array of global vectors Level: advanced .seealso: `DMCOMPOSITE`, `DM`, `DMCompositeRestoreAccess()`, `DMCompositeRestoreEntries()`, `DMCompositeScatter()`, `DMCompositeGather()` @*/ -PetscErrorCode DMCompositeRestoreAccessArray(DM dm, Vec pvec, PetscInt nwanted, const PetscInt *wanted, Vec *vecs) +PetscErrorCode DMCompositeRestoreAccessArray(DM dm, Vec pvec, PetscInt nwanted, const PetscInt wanted[], Vec vecs[]) { struct DMCompositeLink *link; PetscInt i, wnum; @@ -430,7 +428,7 @@ PetscErrorCode DMCompositeRestoreAccessArray(DM dm, Vec pvec, PetscInt nwanted, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCompositeRestoreLocalAccessArray - Returns the vectors obtained with `DMCompositeGetLocalAccessArray()`. Collective @@ -439,20 +437,20 @@ PetscErrorCode DMCompositeRestoreAccessArray(DM dm, Vec pvec, PetscInt nwanted, + dm - the `DMCOMPOSITE` object . pvec - packed vector . nwanted - number of vectors wanted -. wanted - sorted array of vectors wanted, or NULL to restore all vectors -- vecs - array of local vectors to return +. wanted - sorted array of vectors wanted, or `NULL` to restore all vectors +- vecs - array of local vectors Level: advanced Note: - nwanted and wanted must match the values given to `DMCompositeGetLocalAccessArray()` + `nwanted` and `wanted` must match the values given to `DMCompositeGetLocalAccessArray()` otherwise the call will fail. .seealso: `DMCOMPOSITE`, `DM`, `DMCompositeGetLocalAccessArray()`, `DMCompositeRestoreAccessArray()`, `DMCompositeRestoreAccess()`, `DMCompositeRestoreEntries()`, `DMCompositeScatter()`, `DMCompositeGather()` @*/ -PetscErrorCode DMCompositeRestoreLocalAccessArray(DM dm, Vec pvec, PetscInt nwanted, const PetscInt *wanted, Vec *vecs) +PetscErrorCode DMCompositeRestoreLocalAccessArray(DM dm, Vec pvec, PetscInt nwanted, const PetscInt wanted[], Vec *vecs) { struct DMCompositeLink *link; PetscInt i, wnum; @@ -854,13 +852,13 @@ static PetscErrorCode DMCreateLocalVector_Composite(DM dm, Vec *lvec) Level: advanced Note: - Each entry of ltogs should be destroyed with `ISLocalToGlobalMappingDestroy()`, the ltogs array should be freed with `PetscFree()`. + Each entry of `ltogs` should be destroyed with `ISLocalToGlobalMappingDestroy()`, `ltogs` should be freed with `PetscFree()`. .seealso: `DMCOMPOSITE`, `DM`, `DMDestroy()`, `DMCompositeAddDM()`, `DMCreateGlobalVector()`, `DMCompositeGather()`, `DMCompositeCreate()`, `DMCompositeGetAccess()`, `DMCompositeScatter()`, `DMCompositeGetLocalVectors()`, `DMCompositeRestoreLocalVectors()`, `DMCompositeGetEntries()` @*/ -PetscErrorCode DMCompositeGetISLocalToGlobalMappings(DM dm, ISLocalToGlobalMapping **ltogs) +PetscErrorCode DMCompositeGetISLocalToGlobalMappings(DM dm, ISLocalToGlobalMapping *ltogs[]) { PetscInt i, *idx, n, cnt; struct DMCompositeLink *next; @@ -946,9 +944,13 @@ PetscErrorCode DMCompositeGetISLocalToGlobalMappings(DM dm, ISLocalToGlobalMappi Each returned `IS` should be destroyed with `ISDestroy()`, the array should be freed with `PetscFree()`. -.seealso: `DMCOMPOSITE`, `DM`, `DMCompositeGetGlobalISs()`, `DMCompositeGetISLocalToGlobalMappings()`, `MatGetLocalSubMatrix()`, `MatCreateLocalRef()` + Fortran Note: + Pass in an array long enough to hold all the `IS`, see `DMCompositeGetNumberDM()` + +.seealso: `DMCOMPOSITE`, `DM`, `DMCompositeGetGlobalISs()`, `DMCompositeGetISLocalToGlobalMappings()`, `MatGetLocalSubMatrix()`, + `MatCreateLocalRef()`, `DMCompositeGetNumberDM()` @*/ -PetscErrorCode DMCompositeGetLocalISs(DM dm, IS **is) +PetscErrorCode DMCompositeGetLocalISs(DM dm, IS *is[]) { DM_Composite *com = (DM_Composite *)dm->data; struct DMCompositeLink *link; @@ -984,7 +986,7 @@ PetscErrorCode DMCompositeGetLocalISs(DM dm, IS **is) Level: advanced Notes: - The is entries should be destroyed with `ISDestroy()`, the is array should be freed with `PetscFree()` + The `is` entries should be destroyed with `ISDestroy()`, `is` should be freed with `PetscFree()` These could be used to extract a subset of vector entries for a "multi-physics" preconditioner @@ -1108,7 +1110,6 @@ static PetscErrorCode DMCreateFieldDecomposition_Composite(DM dm, PetscInt *len, PetscFunctionReturn(PETSC_SUCCESS); } -/* -------------------------------------------------------------------------------------*/ /*@C DMCompositeGetLocalVectors - Gets local vectors for each part of a `DMCOMPOSITE` Use `DMCompositeRestoreLocalVectors()` to return them. @@ -1192,7 +1193,6 @@ PetscErrorCode DMCompositeRestoreLocalVectors(DM dm, ...) PetscFunctionReturn(PETSC_SUCCESS); } -/* -------------------------------------------------------------------------------------*/ /*@C DMCompositeGetEntries - Gets the `DM` for each entry in a `DMCOMPOSITE`. @@ -1207,7 +1207,7 @@ PetscErrorCode DMCompositeRestoreLocalVectors(DM dm, ...) Level: advanced Fortran Notes: - Available as `DMCompositeGetEntries()` for one output `DM`, DMCompositeGetEntries2() for 2, etc + Use `DMCompositeGetEntriesArray()` .seealso: `DMCOMPOSITE`, `DM`, `DMDestroy()`, `DMCompositeAddDM()`, `DMCreateGlobalVector()`, `DMCompositeGetEntriesArray()` `DMCompositeGather()`, `DMCompositeCreate()`, `DMCompositeGetISLocalToGlobalMappings()`, `DMCompositeGetAccess()`, @@ -1237,7 +1237,7 @@ PetscErrorCode DMCompositeGetEntries(DM dm, ...) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCompositeGetEntriesArray - Gets the DM for each entry in a `DMCOMPOSITE` Not Collective diff --git a/src/dm/impls/da/da.c b/src/dm/impls/da/da.c index 670487aefa4..e851d1025b9 100644 --- a/src/dm/impls/da/da.c +++ b/src/dm/impls/da/da.c @@ -689,7 +689,7 @@ PetscErrorCode DMDAGetInterpolationType(DM da, DMDAInterpolationType *ctype) Level: intermediate Notes: - In 2d the array is of length 9, in 3d of length 27 + In 2d the `ranks` is of length 9, in 3d of length 27 Not supported in 1d @@ -791,7 +791,7 @@ PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDAGetRefinementFactor - Gets the ratios that the `DMDA` grid is refined Not Collective diff --git a/src/dm/impls/da/da1.c b/src/dm/impls/da/da1.c index 2fef593c51d..f1ec2cfd6b9 100644 --- a/src/dm/impls/da/da1.c +++ b/src/dm/impls/da/da1.c @@ -312,7 +312,7 @@ PetscErrorCode DMSetUp_DA_1D(DM da) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDACreate1d - Creates an object that will manage the communication of one-dimensional regular array data that is distributed across one or mpre MPI processes. diff --git a/src/dm/impls/da/da2.c b/src/dm/impls/da/da2.c index 93bc5e27c43..85bf6a21251 100644 --- a/src/dm/impls/da/da2.c +++ b/src/dm/impls/da/da2.c @@ -746,7 +746,7 @@ PetscErrorCode DMSetUp_DA_2D(DM da) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDACreate2d - Creates an object that will manage the communication of two-dimensional regular array data that is distributed across one or more MPI processes. diff --git a/src/dm/impls/da/da3.c b/src/dm/impls/da/da3.c index f7c18330d42..a28dc378b4d 100644 --- a/src/dm/impls/da/da3.c +++ b/src/dm/impls/da/da3.c @@ -1437,7 +1437,7 @@ PetscErrorCode DMSetUp_DA_3D(DM da) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDACreate3d - Creates an object that will manage the communication of three-dimensional regular array data that is distributed across one or more MPI processes. diff --git a/src/dm/impls/da/dacorn.c b/src/dm/impls/da/dacorn.c index 93e8b032724..5e68d7cb315 100644 --- a/src/dm/impls/da/dacorn.c +++ b/src/dm/impls/da/dacorn.c @@ -17,7 +17,7 @@ PetscErrorCode DMCreateCoordinateDM_DA(DM dm, DM *cdm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDASetFieldName - Sets the names of individual field components in multicomponent vectors associated with a `DMDA`. @@ -111,7 +111,7 @@ PetscErrorCode DMDASetFieldNames(DM da, const char *const *names) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDAGetFieldName - Gets the names of individual field components in multicomponent vectors associated with a `DMDA`. @@ -132,7 +132,7 @@ PetscErrorCode DMDASetFieldNames(DM da, const char *const *names) .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetFieldName()`, `DMDASetCoordinateName()`, `DMDAGetCoordinateName()`, `DMSetUp()` @*/ -PetscErrorCode DMDAGetFieldName(DM da, PetscInt nf, const char **name) +PetscErrorCode DMDAGetFieldName(DM da, PetscInt nf, const char *name[]) { DM_DA *dd = (DM_DA *)da->data; @@ -145,7 +145,7 @@ PetscErrorCode DMDAGetFieldName(DM da, PetscInt nf, const char **name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDASetCoordinateName - Sets the name of the coordinate directions associated with a `DMDA`, for example "x" or "y" Logically Collective; name must contain a common value; No Fortran Support @@ -175,7 +175,7 @@ PetscErrorCode DMDASetCoordinateName(DM dm, PetscInt nf, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDAGetCoordinateName - Gets the name of a coordinate direction associated with a `DMDA`. Not Collective; name will contain a common value; No Fortran Support @@ -194,7 +194,7 @@ PetscErrorCode DMDASetCoordinateName(DM dm, PetscInt nf, const char name[]) .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetCoordinateName()`, `DMDASetFieldName()`, `DMDAGetFieldName()`, `DMSetUp()` @*/ -PetscErrorCode DMDAGetCoordinateName(DM dm, PetscInt nf, const char **name) +PetscErrorCode DMDAGetCoordinateName(DM dm, PetscInt nf, const char *name[]) { DM_DA *dd = (DM_DA *)dm->data; @@ -207,7 +207,7 @@ PetscErrorCode DMDAGetCoordinateName(DM dm, PetscInt nf, const char **name) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDAGetCorners - Returns the global (`x`,`y`,`z`) indices of the lower left corner and size of the local region, excluding ghost points. @@ -226,12 +226,13 @@ PetscErrorCode DMDAGetCoordinateName(DM dm, PetscInt nf, const char **name) Level: beginner - Note: + Notes: + Any of `y`, `z`, `n`, and `p` can be passed in as `NULL` if not needed. + The corner information is independent of the number of degrees of freedom per node set with the `DMDACreateXX()` routine. Thus the `x`, `y`, and `z` can be thought of as the lower left coordinates of the patch of values on process on a logical grid and `m`, `n`, and `p` as the extent of the patch, where each grid point has (potentially) several degrees of freedom. - Any of `y`, `z`, `n`, and `p` can be passed in as `NULL` if not needed. .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetGhostCorners()`, `DMDAGetOwnershipRanges()`, `DMStagGetCorners()`, `DMSTAG` @*/ diff --git a/src/dm/impls/da/dagetelem.c b/src/dm/impls/da/dagetelem.c index 1508f49d209..025be979249 100644 --- a/src/dm/impls/da/dagetelem.c +++ b/src/dm/impls/da/dagetelem.c @@ -362,7 +362,7 @@ PetscErrorCode DMDAGetElementType(DM da, DMDAElementType *etype) + nel - number of local elements . nen - number of nodes in each element (for example in one dimension it is 2, in two dimensions it is 3 (for `DMDA_ELEMENT_P1`) and 4 (for `DMDA_ELEMENT_Q1`) -- e - the local indices of the elements' vertices +- e - the local indices of the elements' vertices, of length `nel` * `nen` Level: intermediate diff --git a/src/dm/impls/da/daghost.c b/src/dm/impls/da/daghost.c index cc1fa7748d8..43558737e67 100644 --- a/src/dm/impls/da/daghost.c +++ b/src/dm/impls/da/daghost.c @@ -4,7 +4,7 @@ #include /*I "petscdmda.h" I*/ -/*@C +/*@ DMDAGetGhostCorners - Returns the global (`i`,`j`,`k`) indices of the lower left corner and size of the local region, including ghost points. @@ -23,13 +23,14 @@ Level: beginner - Note: + Notes: + Any of `y`, `z`, `n`, and `p` can be passed in as `NULL` if not needed. + The corner information is independent of the number of degrees of freedom per node set with the `DMDACreateXX()` routine. Thus the `x`, `y`, and `z` can be thought of as the lower left coordinates of the patch of values on process on a logical grid and `m`, `n`, and `p` as the extent of the patch. Where grid point has (potentially) several degrees of freedom. - Any of `y`, `z`, `n`, and `p` can be passed in as `NULL` if not needed. .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetCorners()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDAGetOwnershipRanges()`, `DMStagGetGhostCorners()`, `DMSTAG` @*/ diff --git a/src/dm/impls/da/daindex.c b/src/dm/impls/da/daindex.c index 2ae067ee2d5..b940ac50ae6 100644 --- a/src/dm/impls/da/daindex.c +++ b/src/dm/impls/da/daindex.c @@ -45,7 +45,7 @@ PetscErrorCode DMDAGetNatural_Private(DM da, PetscInt *outNlocal, IS *isnatural) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDASetAOType - Sets the type of application ordering to create with `DMDAGetAO()`, for a distributed array. Collective diff --git a/src/dm/impls/da/dascatter.c b/src/dm/impls/da/dascatter.c index ff4a530cff5..f2972c226e1 100644 --- a/src/dm/impls/da/dascatter.c +++ b/src/dm/impls/da/dascatter.c @@ -4,7 +4,7 @@ #include /*I "petscdmda.h" I*/ -/*@C +/*@ DMDAGetScatter - Gets the global-to-local, and local-to-local vector scatter contexts for a `DMDA` distributed array. diff --git a/src/dm/impls/da/daview.c b/src/dm/impls/da/daview.c index 9e1666abab9..fd242b98170 100644 --- a/src/dm/impls/da/daview.c +++ b/src/dm/impls/da/daview.c @@ -108,7 +108,7 @@ PetscErrorCode DMView_DA_VTK(DM da, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDAGetInfo - Gets information about a given distributed array. Not Collective @@ -169,7 +169,7 @@ PetscErrorCode DMDAGetInfo(DM da, PetscInt *dim, PetscInt *M, PetscInt *N, Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDAGetLocalInfo - Gets information about a given `DMDA` and this MPI process's location in it Not Collective @@ -185,6 +185,9 @@ PetscErrorCode DMDAGetInfo(DM da, PetscInt *dim, PetscInt *M, PetscInt *N, Petsc Note: See `DMDALocalInfo` for the information that is returned + Fortran Note: + Pass in an array of type `DMDALocalInfo` of length `DMDA_LOCAL_INFO_SIZE` + .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetInfo()`, `DMDAGetCorners()`, `DMDALocalInfo` @*/ PetscErrorCode DMDAGetLocalInfo(DM da, DMDALocalInfo *info) diff --git a/src/dm/impls/da/ftn-custom/zda1f.c b/src/dm/impls/da/ftn-custom/zda1f.c deleted file mode 100644 index e539b38b5d7..00000000000 --- a/src/dm/impls/da/ftn-custom/zda1f.c +++ /dev/null @@ -1,14 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmdacreate1d_ DMDACREATE1D -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmdacreate1d_ dmdacreate1d -#endif - -PETSC_EXTERN void dmdacreate1d_(MPI_Comm *comm, DMBoundaryType *bx, PetscInt *M, PetscInt *w, PetscInt *s, PetscInt *lc, DM *inra, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(lc); - *ierr = DMDACreate1d(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bx, *M, *w, *s, lc, inra); -} diff --git a/src/dm/impls/da/ftn-custom/zda2f.c b/src/dm/impls/da/ftn-custom/zda2f.c deleted file mode 100644 index 665fa56c3ce..00000000000 --- a/src/dm/impls/da/ftn-custom/zda2f.c +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmdacreate2d_ DMDACREATE2D -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmdacreate2d_ dmdacreate2d -#endif - -PETSC_EXTERN void dmdacreate2d_(MPI_Comm *comm, DMBoundaryType *bx, DMBoundaryType *by, DMDAStencilType *stencil_type, PetscInt *M, PetscInt *N, PetscInt *m, PetscInt *n, PetscInt *w, PetscInt *s, PetscInt *lx, PetscInt *ly, DM *inra, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(lx); - CHKFORTRANNULLINTEGER(ly); - *ierr = DMDACreate2d(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bx, *by, *stencil_type, *M, *N, *m, *n, *w, *s, lx, ly, inra); -} diff --git a/src/dm/impls/da/ftn-custom/zda3f.c b/src/dm/impls/da/ftn-custom/zda3f.c deleted file mode 100644 index 7d15816b857..00000000000 --- a/src/dm/impls/da/ftn-custom/zda3f.c +++ /dev/null @@ -1,16 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmdacreate3d_ DMDACREATE3D -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmdacreate3d_ dmdacreate3d -#endif - -PETSC_EXTERN void dmdacreate3d_(MPI_Comm *comm, DMBoundaryType *bx, DMBoundaryType *by, DMBoundaryType *bz, DMDAStencilType *stencil_type, PetscInt *M, PetscInt *N, PetscInt *P, PetscInt *m, PetscInt *n, PetscInt *p, PetscInt *w, PetscInt *s, PetscInt *lx, PetscInt *ly, PetscInt *lz, DM *inra, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(lx); - CHKFORTRANNULLINTEGER(ly); - CHKFORTRANNULLINTEGER(lz); - *ierr = DMDACreate3d(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bx, *by, *bz, *stencil_type, *M, *N, *P, *m, *n, *p, *w, *s, lx, ly, lz, inra); -} diff --git a/src/dm/impls/da/ftn-custom/zdacornf.c b/src/dm/impls/da/ftn-custom/zdacornf.c deleted file mode 100644 index bd749ff2566..00000000000 --- a/src/dm/impls/da/ftn-custom/zdacornf.c +++ /dev/null @@ -1,63 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmdasetfieldname_ DMDASETFIELDNAME - #define dmdagetfieldname_ DMDAGETFIELDNAME - #define dmdagetcorners_ DMDAGETCORNERS - #define dmdagetcorners000000_ DMDAGETCORNERS000000 - #define dmdagetcorners001001_ DMDAGETCORNERS001001 - #define dmdagetcorners011011_ DMDAGETCORNERS011011 -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmdasetfieldname_ dmdasetfieldname - #define dmdagetfieldname_ dmdagetfieldname - #define dmdagetcorners_ dmdagetcorners - #define dmdagetcorners000000_ dmdagetcorners000000 - #define dmdagetcorners001001_ dmdagetcorners001001 - #define dmdagetcorners011011_ dmdagetcorners011011 -#endif - -PETSC_EXTERN void dmdasetfieldname_(DM *da, PetscInt *nf, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(name, len, t); - *ierr = DMDASetFieldName(*da, *nf, t); - if (*ierr) return; - FREECHAR(name, t); -} - -PETSC_EXTERN void dmdagetfieldname_(DM *da, PetscInt *nf, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = DMDAGetFieldName(*da, *nf, &tname); - if (*ierr) return; - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void dmdagetcorners_(DM *da, PetscInt *x, PetscInt *y, PetscInt *z, PetscInt *m, PetscInt *n, PetscInt *p, int *ierr) -{ - CHKFORTRANNULLINTEGER(y); - CHKFORTRANNULLINTEGER(z); - CHKFORTRANNULLINTEGER(n); - CHKFORTRANNULLINTEGER(p); - - *ierr = DMDAGetCorners(*da, x, y, z, m, n, p); -} - -PETSC_EXTERN void dmdagetcorners000000_(DM *da, PetscInt *x, PetscInt *y, PetscInt *z, PetscInt *m, PetscInt *n, PetscInt *p, int *ierr) -{ - dmdagetcorners_(da, x, y, z, m, n, p, ierr); -} - -PETSC_EXTERN void dmdagetcorners001001_(DM *da, PetscInt *x, PetscInt *y, PetscInt *z, PetscInt *m, PetscInt *n, PetscInt *p, int *ierr) -{ - dmdagetcorners_(da, x, y, z, m, n, p, ierr); -} - -PETSC_EXTERN void dmdagetcorners011011_(DM *da, PetscInt *x, PetscInt *y, PetscInt *z, PetscInt *m, PetscInt *n, PetscInt *p, int *ierr) -{ - dmdagetcorners_(da, x, y, z, m, n, p, ierr); -} diff --git a/src/dm/impls/da/ftn-custom/zdaf.c b/src/dm/impls/da/ftn-custom/zdaf.c index 762feb89821..645fbcaeca6 100644 --- a/src/dm/impls/da/ftn-custom/zdaf.c +++ b/src/dm/impls/da/ftn-custom/zdaf.c @@ -2,13 +2,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmdagetownershipranges_ DMDAGETOWNERSHIPRANGES - #define dmdagetneighbors_ DMDAGETNEIGHBORS - #define dmdagetrefinementfactor_ DMDAGETREFINEMENTFACTOR + #define dmdagetownershipranges_ DMDAGETOWNERSHIPRANGES + #define dmdagetneighbors_ DMDAGETNEIGHBORS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmdagetownershipranges_ dmdagetownershipranges - #define dmdagetneighbors_ dmdagetneighbors - #define dmdagetrefinementfactor_ dmdagetrefinementfactor + #define dmdagetownershipranges_ dmdagetownershipranges + #define dmdagetneighbors_ dmdagetneighbors #endif PETSC_EXTERN void dmdagetneighbors_(DM *da, PetscMPIInt *ranks, PetscErrorCode *ierr) @@ -47,11 +45,3 @@ PETSC_EXTERN void dmdagetownershipranges_(DM *da, PetscInt lx[], PetscInt ly[], for (i = 0; i < P; i++) lz[i] = gz[i]; } } - -PETSC_EXTERN void dmdagetrefinementfactor_(DM *da, PetscInt *lx, PetscInt *ly, PetscInt *lz, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(lx); - CHKFORTRANNULLINTEGER(ly); - CHKFORTRANNULLINTEGER(lz); - *ierr = DMDAGetRefinementFactor(*da, lx, ly, lz); -} diff --git a/src/dm/impls/da/ftn-custom/zdagetscatterf.c b/src/dm/impls/da/ftn-custom/zdagetscatterf.c deleted file mode 100644 index c68314bfb7f..00000000000 --- a/src/dm/impls/da/ftn-custom/zdagetscatterf.c +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmdagetscatter_ DMDAGETSCATTER -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmdagetscatter_ dmdagetscatter -#endif - -PETSC_EXTERN void dmdagetscatter_(DM *da, VecScatter *gtol, VecScatter *ltol, PetscErrorCode *ierr) -{ - CHKFORTRANNULLOBJECT(gtol); - CHKFORTRANNULLOBJECT(ltol); - *ierr = DMDAGetScatter(*da, gtol, ltol); -} diff --git a/src/dm/impls/da/ftn-custom/zdaghostf.c b/src/dm/impls/da/ftn-custom/zdaghostf.c deleted file mode 100644 index cfcf226b05f..00000000000 --- a/src/dm/impls/da/ftn-custom/zdaghostf.c +++ /dev/null @@ -1,39 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmdagetghostcorners_ DMDAGETGHOSTCORNERS - #define dmdagetghostcorners000000_ DMDAGETGHOSTCORNERS000000 - #define dmdagetghostcorners001001_ DMDAGETGHOSTCORNERS001001 - #define dmdagetghostcorners011011_ DMDAGETGHOSTCORNERS011011 -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmdagetghostcorners_ dmdagetghostcorners - #define dmdagetghostcorners000000_ dmdagetghostcorners000000 - #define dmdagetghostcorners001001_ dmdagetghostcorners001001 - #define dmdagetghostcorners011011_ dmdagetghostcorners011011 -#endif - -PETSC_EXTERN void dmdagetghostcorners_(DM *da, PetscInt *x, PetscInt *y, PetscInt *z, PetscInt *m, PetscInt *n, PetscInt *p, int *ierr) -{ - CHKFORTRANNULLINTEGER(y); - CHKFORTRANNULLINTEGER(z); - CHKFORTRANNULLINTEGER(n); - CHKFORTRANNULLINTEGER(p); - - *ierr = DMDAGetGhostCorners(*da, x, y, z, m, n, p); -} - -PETSC_EXTERN void dmdagetghostcorners000000_(DM *da, PetscInt *x, PetscInt *y, PetscInt *z, PetscInt *m, PetscInt *n, PetscInt *p, int *ierr) -{ - dmdagetghostcorners_(da, x, y, z, m, n, p, ierr); -} - -PETSC_EXTERN void dmdagetghostcorners001001_(DM *da, PetscInt *x, PetscInt *y, PetscInt *z, PetscInt *m, PetscInt *n, PetscInt *p, int *ierr) -{ - dmdagetghostcorners_(da, x, y, z, m, n, p, ierr); -} - -PETSC_EXTERN void dmdagetghostcorners011011_(DM *da, PetscInt *x, PetscInt *y, PetscInt *z, PetscInt *m, PetscInt *n, PetscInt *p, int *ierr) -{ - dmdagetghostcorners_(da, x, y, z, m, n, p, ierr); -} diff --git a/src/dm/impls/da/ftn-custom/zdaindexf.c b/src/dm/impls/da/ftn-custom/zdaindexf.c deleted file mode 100644 index f3e1d9603c7..00000000000 --- a/src/dm/impls/da/ftn-custom/zdaindexf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmdasetaotype_ DMDASETAOTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmdagetglobalindices_ dmdagetglobalindices -#endif - -PETSC_EXTERN void dmdasetaotype_(DM *da, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(type, len, t); - *ierr = DMDASetAOType(*da, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/dm/impls/da/ftn-custom/zdaviewf.c b/src/dm/impls/da/ftn-custom/zdaviewf.c deleted file mode 100644 index 7751e074369..00000000000 --- a/src/dm/impls/da/ftn-custom/zdaviewf.c +++ /dev/null @@ -1,26 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmdagetinfo_ DMDAGETINFO -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmdagetinfo_ dmdagetinfo -#endif - -PETSC_EXTERN void dmdagetinfo_(DM *da, PetscInt *dim, PetscInt *M, PetscInt *N, PetscInt *P, PetscInt *m, PetscInt *n, PetscInt *p, PetscInt *w, PetscInt *s, DMBoundaryType *wrapx, DMBoundaryType *wrapy, DMBoundaryType *wrapz, DMDAStencilType *st, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(dim); - CHKFORTRANNULLINTEGER(M); - CHKFORTRANNULLINTEGER(N); - CHKFORTRANNULLINTEGER(P); - CHKFORTRANNULLINTEGER(m); - CHKFORTRANNULLINTEGER(n); - CHKFORTRANNULLINTEGER(p); - CHKFORTRANNULLINTEGER(w); - CHKFORTRANNULLINTEGER(s); - CHKFORTRANNULLINTEGER(wrapx); - CHKFORTRANNULLINTEGER(wrapy); - CHKFORTRANNULLINTEGER(wrapz); - CHKFORTRANNULLINTEGER(st); - *ierr = DMDAGetInfo(*da, dim, M, N, P, m, n, p, w, s, wrapx, wrapy, wrapz, st); -} diff --git a/src/dm/impls/da/grvtk.c b/src/dm/impls/da/grvtk.c index 047cc07793d..1be9f575632 100644 --- a/src/dm/impls/da/grvtk.c +++ b/src/dm/impls/da/grvtk.c @@ -1,4 +1,4 @@ -#include +#include /*I "petscdmda.h" I*/ /* Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires including the private vtkvimpl.h file. The code should be refactored. @@ -482,7 +482,7 @@ static PetscErrorCode DMDAVTKWriteAll_VTR(DM da, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer Collective diff --git a/src/dm/impls/da/usfft/matusfft.c b/src/dm/impls/da/usfft/matusfft.c index f44c20612a7..0e09dd07e76 100644 --- a/src/dm/impls/da/usfft/matusfft.c +++ b/src/dm/impls/da/usfft/matusfft.c @@ -130,25 +130,28 @@ PetscErrorCode MatDestroy_SeqUSFFT(Mat A) } /*@C - MatCreateSeqUSFFT - Creates a matrix object that provides sequential USFFT + MatCreateSeqUSFFT - Creates a matrix object that provides sequential USFFT via the external package FFTW - Collective + Collective - Input Parameter: -. da - geometry of the domain encoded by a `DMDA` + Input Parameter: +. da - geometry of the domain encoded by a `DMDA` - Output Parameter: -. A - the matrix + Output Parameter: +. A - the matrix Options Database Key: . -mat_usfft_plannerflags - set the FFTW planner flags - Level: intermediate + Level: intermediate + + Note: + This does not currently exist. There is some code in place but apparently unfinished and commented out with #ifdef 0 .seealso: `Mat`, `Vec`, `DMDA`, `DM` @*/ -PetscErrorCode MatCreateSeqUSFFT(Vec sampleCoords, DMDA freqDA, Mat *A) +PetscErrorCode MatCreateSeqUSFFT(Vec sampleCoords, DM freqDA, Mat *A) { Mat_USFFT *usfft; PetscInt m,n,M,N,i; diff --git a/src/dm/impls/forest/forest.c b/src/dm/impls/forest/forest.c index ee2db9d83d5..ea5026c99b2 100644 --- a/src/dm/impls/forest/forest.c +++ b/src/dm/impls/forest/forest.c @@ -214,7 +214,7 @@ static PetscErrorCode DMDestroy_Forest(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMForestSetTopology - Set the topology of a `DMFOREST` during the pre-setup phase. The topology is a string (e.g. "cube", "shell") and can be interpreted by subtypes of `DMFOREST`) to construct the base DM of a forest during `DMSetUp()`. @@ -241,7 +241,7 @@ PetscErrorCode DMForestSetTopology(DM dm, DMForestTopology topology) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMForestGetTopology - Get a string describing the topology of a `DMFOREST`. Not Collective @@ -874,7 +874,7 @@ PetscErrorCode DMForestGetMaximumRefinement(DM dm, PetscInt *maxRefinement) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMForestSetAdaptivityStrategy - During the pre-setup phase, set the strategy for combining adaptivity labels from multiple processes. Logically Collective @@ -903,7 +903,7 @@ PetscErrorCode DMForestSetAdaptivityStrategy(DM dm, DMForestAdaptivityStrategy a PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMForestGetAdaptivityStrategy - Get the strategy for combining adaptivity labels from multiple processes. Not Collective @@ -1271,7 +1271,7 @@ PetscErrorCode DMForestGetCellSF(DM dm, PetscSF *cellSF) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMForestSetAdaptivityLabel - During the pre-setup phase, set the label of the pre-adaptation forest (see `DMForestGetAdaptivityForest()`) that holds the adaptation flags (refinement, coarsening, or some combination). @@ -1303,7 +1303,7 @@ PetscErrorCode DMForestSetAdaptivityLabel(DM dm, DMLabel adaptLabel) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMForestGetAdaptivityLabel - Get the label of the pre-adaptation forest (see `DMForestGetAdaptivityForest()`) that holds the adaptation flags (refinement, coarsening, or some combination). diff --git a/src/dm/impls/moab/dmmbfem.cxx b/src/dm/impls/moab/dmmbfem.cxx index b23eba74921..d9b61ee0b20 100644 --- a/src/dm/impls/moab/dmmbfem.cxx +++ b/src/dm/impls/moab/dmmbfem.cxx @@ -49,7 +49,7 @@ static inline PetscReal DMatrix_Determinant_4x4_Internal(PetscReal inmat[4 * 4]) return inmat[0 + 0 * 4] * (inmat[1 + 1 * 4] * (inmat[2 + 2 * 4] * inmat[3 + 3 * 4] - inmat[2 + 3 * 4] * inmat[3 + 2 * 4]) - inmat[1 + 2 * 4] * (inmat[2 + 1 * 4] * inmat[3 + 3 * 4] - inmat[2 + 3 * 4] * inmat[3 + 1 * 4]) + inmat[1 + 3 * 4] * (inmat[2 + 1 * 4] * inmat[3 + 2 * 4] - inmat[2 + 2 * 4] * inmat[3 + 1 * 4])) - inmat[0 + 1 * 4] * (inmat[1 + 0 * 4] * (inmat[2 + 2 * 4] * inmat[3 + 3 * 4] - inmat[2 + 3 * 4] * inmat[3 + 2 * 4]) - inmat[1 + 2 * 4] * (inmat[2 + 0 * 4] * inmat[3 + 3 * 4] - inmat[2 + 3 * 4] * inmat[3 + 0 * 4]) + inmat[1 + 3 * 4] * (inmat[2 + 0 * 4] * inmat[3 + 2 * 4] - inmat[2 + 2 * 4] * inmat[3 + 0 * 4])) + inmat[0 + 2 * 4] * (inmat[1 + 0 * 4] * (inmat[2 + 1 * 4] * inmat[3 + 3 * 4] - inmat[2 + 3 * 4] * inmat[3 + 1 * 4]) - inmat[1 + 1 * 4] * (inmat[2 + 0 * 4] * inmat[3 + 3 * 4] - inmat[2 + 3 * 4] * inmat[3 + 0 * 4]) + inmat[1 + 3 * 4] * (inmat[2 + 0 * 4] * inmat[3 + 1 * 4] - inmat[2 + 1 * 4] * inmat[3 + 0 * 4])) - inmat[0 + 3 * 4] * (inmat[1 + 0 * 4] * (inmat[2 + 1 * 4] * inmat[3 + 2 * 4] - inmat[2 + 2 * 4] * inmat[3 + 1 * 4]) - inmat[1 + 1 * 4] * (inmat[2 + 0 * 4] * inmat[3 + 2 * 4] - inmat[2 + 2 * 4] * inmat[3 + 0 * 4]) + inmat[1 + 2 * 4] * (inmat[2 + 0 * 4] * inmat[3 + 1 * 4] - inmat[2 + 1 * 4] * inmat[3 + 0 * 4])); } -static inline PetscErrorCode DMatrix_Invert_4x4_Internal(PetscReal *inmat, PetscReal *outmat, PetscScalar *determinant) +static inline PETSC_UNUSED PetscErrorCode DMatrix_Invert_4x4_Internal(PetscReal *inmat, PetscReal *outmat, PetscScalar *determinant) { PetscReal det = DMatrix_Determinant_4x4_Internal(inmat); if (outmat) { diff --git a/src/dm/impls/moab/dmmbfield.cxx b/src/dm/impls/moab/dmmbfield.cxx index 956f5aac797..aa4fbaffa92 100644 --- a/src/dm/impls/moab/dmmbfield.cxx +++ b/src/dm/impls/moab/dmmbfield.cxx @@ -198,7 +198,7 @@ PetscErrorCode DMMoabSetFieldNames(DM dm, PetscInt numFields, const char *fields .seealso: `DMMoabSetFieldName()`, `DMMoabSetFields()` @*/ -PetscErrorCode DMMoabGetFieldName(DM dm, PetscInt field, const char **fieldName) +PetscErrorCode DMMoabGetFieldName(DM dm, PetscInt field, const char *fieldName[]) { DM_Moab *dmmoab; diff --git a/src/dm/impls/network/network.c b/src/dm/impls/network/network.c index fe506552958..2163ecf7181 100644 --- a/src/dm/impls/network/network.c +++ b/src/dm/impls/network/network.c @@ -131,7 +131,7 @@ PetscErrorCode DMNetworkSetNumSubNetworks(DM dm, PetscInt nsubnet, PetscInt Nsub PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMNetworkAddSubnetwork - Add a subnetwork Collective @@ -150,12 +150,12 @@ PetscErrorCode DMNetworkSetNumSubNetworks(DM dm, PetscInt nsubnet, PetscInt Nsub Level: beginner Notes: - There is no copy involved in this operation, only the pointer is referenced. The edgelist should + There is no copy involved in this operation, only the pointer is referenced. The `edgelist` should not be destroyed before the call to `DMNetworkLayoutSetUp()` A network can comprise of a single subnetwork OR multiple subnetworks. For a single subnetwork, the subnetwork can be read either in serial or parallel. For a multiple subnetworks, - each subnetwork topology needs to be set on a unique rank and the communicator size needs to be at least equal to the number of subnetworks. + each subnetwork topology needs to be set on a unique MPI process and the communicator size needs to be at least equal to the number of subnetworks. Example usage: Consider the following networks\: @@ -302,7 +302,7 @@ PetscErrorCode DMNetworkAddSubnetwork(DM dm, const char *name, PetscInt ne, Pets .seealso: `DM`, `DMNETWORK`, `DMNetworkGetSharedVertices()` @*/ -PetscErrorCode DMNetworkSharedVertexGetInfo(DM dm, PetscInt v, PetscInt *gidx, PetscInt *n, const PetscInt **sv) +PetscErrorCode DMNetworkSharedVertexGetInfo(DM dm, PetscInt v, PetscInt *gidx, PetscInt *n, const PetscInt *sv[]) { DM_Network *network = (DM_Network *)dm->data; SVtx *svtx = network->cloneshared->svtx; @@ -813,7 +813,7 @@ PetscErrorCode DMNetworkLayoutSetUp(DM dm) .seealso: `DM`, `DMNETWORK`, `DMNetworkCreate()`, `DMNetworkAddSubnetwork()`, `DMNetworkLayoutSetUp()` @*/ -PetscErrorCode DMNetworkGetSubnetwork(DM dm, PetscInt netnum, PetscInt *nv, PetscInt *ne, const PetscInt **vtx, const PetscInt **edge) +PetscErrorCode DMNetworkGetSubnetwork(DM dm, PetscInt netnum, PetscInt *nv, PetscInt *ne, const PetscInt *vtx[], const PetscInt *edge[]) { DM_Network *network = (DM_Network *)dm->data; @@ -888,7 +888,7 @@ PetscErrorCode DMNetworkAddSharedVertices(DM dm, PetscInt anetnum, PetscInt bnet .seealso: `DM`, `DMNETWORK`, `DMNetworkGetSubnetwork()`, `DMNetworkLayoutSetUp()`, `DMNetworkAddSharedVertices()` @*/ -PetscErrorCode DMNetworkGetSharedVertices(DM dm, PetscInt *nsv, const PetscInt **svtx) +PetscErrorCode DMNetworkGetSharedVertices(DM dm, PetscInt *nsv, const PetscInt *svtx[]) { DM_Network *net = (DM_Network *)dm->data; @@ -1957,7 +1957,7 @@ PetscErrorCode DMNetworkDistribute(DM *dm, PetscInt overlap) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFGetSubSF - Returns an `PetscSF` for a specific subset of points. Leaves are re-numbered to reflect the new ordering Collective diff --git a/src/dm/impls/network/networkmonitor.c b/src/dm/impls/network/networkmonitor.c index bdf4f75d861..a80445c8e04 100644 --- a/src/dm/impls/network/networkmonitor.c +++ b/src/dm/impls/network/networkmonitor.c @@ -166,10 +166,10 @@ PetscErrorCode DMNetworkMonitorAdd(DMNetworkMonitor monitor, const char *name, P PetscFunctionReturn(PETSC_SUCCESS); } -/*@ - DMNetworkMonitorView - Monitor function for `TSMonitorSet()` +/*@C + DMNetworkMonitorView - A `DMNETWORK` specific monitor function for `TSMonitorSet()` - Collective + Collective, No Fortran support Input Parameters: + monitor - `DMNetworkMonitor` object @@ -177,7 +177,7 @@ PetscErrorCode DMNetworkMonitorAdd(DMNetworkMonitor monitor, const char *name, P Level: intermediate -.seealso: `DM`, `DMNETWORK`, `DMNetworkMonitorCreate()`, `DMNetworkMonitorDestroy()`, `DMNetworkMonitorAdd()` +.seealso: `DM`, `DMNETWORK`, `DMNetworkMonitor`, `DMNetworkMonitorCreate()`, `DMNetworkMonitorDestroy()`, `DMNetworkMonitorAdd()` @*/ PetscErrorCode DMNetworkMonitorView(DMNetworkMonitor monitor, Vec x) { diff --git a/src/dm/impls/patch/patch.c b/src/dm/impls/patch/patch.c index fa533253698..c6c1190941f 100644 --- a/src/dm/impls/patch/patch.c +++ b/src/dm/impls/patch/patch.c @@ -18,7 +18,7 @@ Solver loop to update \tau: TauCoarse = Rcoarse - Rcoarse_restricted */ -/*@C +/*@ DMPatchZoom - Create patches of a `DMDA` on subsets of processes, indicated by `commz` Collective diff --git a/src/dm/impls/plex/cgns/plexcgns2.c b/src/dm/impls/plex/cgns/plexcgns2.c index aed7e993f98..6d8938b2587 100644 --- a/src/dm/impls/plex/cgns/plexcgns2.c +++ b/src/dm/impls/plex/cgns/plexcgns2.c @@ -712,10 +712,11 @@ PetscErrorCode DMView_PlexCGNS(DM dm, PetscViewer viewer) PetscInt quadrature_order = field_order; PetscCall(DMClone(dm, &colloc_dm)); { // Inform the new colloc_dm that it is a coordinate DM so isoperiodic affine corrections can be applied - PetscSF face_sf; - PetscCall(DMPlexGetIsoperiodicFaceSF(dm, &face_sf)); - PetscCall(DMPlexSetIsoperiodicFaceSF(colloc_dm, face_sf)); - if (face_sf) colloc_dm->periodic.setup = DMPeriodicCoordinateSetUp_Internal; + const PetscSF *face_sfs; + PetscInt num_face_sfs; + PetscCall(DMPlexGetIsoperiodicFaceSF(dm, &num_face_sfs, &face_sfs)); + PetscCall(DMPlexSetIsoperiodicFaceSF(colloc_dm, num_face_sfs, (PetscSF *)face_sfs)); + if (face_sfs) colloc_dm->periodic.setup = DMPeriodicCoordinateSetUp_Internal; } PetscCall(DMPlexIsSimplex(dm, &is_simplex)); PetscCall(PetscFECreateLagrange(PetscObjectComm((PetscObject)dm), topo_dim, coord_dim, is_simplex, field_order, quadrature_order, &fe)); @@ -784,10 +785,10 @@ PetscErrorCode DMView_PlexCGNS(DM dm, PetscViewer viewer) PetscCall(DMPlexRestoreClosureIndices(cdm, cdm->localSection, cdm->localSection, i, PETSC_FALSE, &closure_dof, &closure_indices, NULL, NULL)); } e_owned = cEnd - cStart; - PetscCall(MPIU_Allreduce(&e_owned, &e_global, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)dm))); - PetscCheck(e_global == num_global_elems, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected number of elements %" PetscInt64_FMT "vs %" PetscInt_FMT, e_global, num_global_elems); + PetscCall(MPIU_Allreduce(&e_owned, &e_global, 1, MPIU_CGSIZE, MPI_SUM, PetscObjectComm((PetscObject)dm))); + PetscCheck(e_global == num_global_elems, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected number of elements %" PRIdCGSIZE " vs %" PetscInt_FMT, e_global, num_global_elems); e_start = 0; - PetscCallMPI(MPI_Exscan(&e_owned, &e_start, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)dm))); + PetscCallMPI(MPI_Exscan(&e_owned, &e_start, 1, MPIU_CGSIZE, MPI_SUM, PetscObjectComm((PetscObject)dm))); PetscCallCGNS(cgp_section_write(cgv->file_num, base, zone, "Elem", element_type, 1, e_global, 0, §ion)); PetscCallCGNS(cgp_elements_write_data(cgv->file_num, base, zone, section, e_start + 1, e_start + e_owned, conn)); PetscCall(PetscFree(conn)); diff --git a/src/dm/impls/plex/f90-custom/zplexf90.c b/src/dm/impls/plex/f90-custom/zplexf90.c index ff4f80f50e6..79b838628b8 100644 --- a/src/dm/impls/plex/f90-custom/zplexf90.c +++ b/src/dm/impls/plex/f90-custom/zplexf90.c @@ -42,8 +42,6 @@ #define dmplexrestoremeet_ dmplexrestoremeet #endif -/* Definitions of Fortran Wrapper routines */ - PETSC_EXTERN void dmplexgetcone_(DM *dm, PetscInt *p, F90Array1d *ptr, int *ierr PETSC_F90_2PTR_PROTO(ptrd)) { const PetscInt *v; diff --git a/src/dm/impls/plex/ftn-custom/zplexcreate.c b/src/dm/impls/plex/ftn-custom/zplexcreate.c deleted file mode 100644 index 6608ebdddaf..00000000000 --- a/src/dm/impls/plex/ftn-custom/zplexcreate.c +++ /dev/null @@ -1,34 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmplexcreateboxmesh_ DMPLEXCREATEBOXMESH - #define dmplexcreatefromfile_ DMPLEXCREATEFROMFILE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define dmplexcreateboxmesh_ dmplexcreateboxmesh - #define dmplexcreatefromfile_ dmplexcreatefromfile -#endif - -/* Definitions of Fortran Wrapper routines */ - -PETSC_EXTERN void dmplexcreateboxmesh_(MPI_Fint *comm, PetscInt *dim, PetscBool *simplex, PetscInt faces[], PetscReal lower[], PetscReal upper[], DMBoundaryType periodicity[], PetscBool *interpolate, DM *dm, int *ierr) -{ - CHKFORTRANNULLINTEGER(faces); - CHKFORTRANNULLREAL(lower); - CHKFORTRANNULLREAL(upper); - CHKFORTRANNULLINTEGER(periodicity); - *ierr = DMPlexCreateBoxMesh(MPI_Comm_f2c(*(comm)), *dim, *simplex, faces, lower, upper, periodicity, *interpolate, dm); -} - -PETSC_EXTERN void dmplexcreatefromfile_(MPI_Fint *comm, char *fname, char *pname, PetscBool *interpolate, DM *dm, int *ierr, PETSC_FORTRAN_CHARLEN_T lenfilename, PETSC_FORTRAN_CHARLEN_T lenplexname) -{ - char *filename; - char *plexname; - - FIXCHAR(fname, lenfilename, filename); - FIXCHAR(pname, lenplexname, plexname); - *ierr = DMPlexCreateFromFile(MPI_Comm_f2c(*(comm)), filename, plexname, *interpolate, dm); - if (*ierr) return; - FREECHAR(fname, filename); - FREECHAR(pname, plexname); -} diff --git a/src/dm/impls/plex/ftn-custom/zplexdistribute.c b/src/dm/impls/plex/ftn-custom/zplexdistribute.c deleted file mode 100644 index d63dc44809c..00000000000 --- a/src/dm/impls/plex/ftn-custom/zplexdistribute.c +++ /dev/null @@ -1,23 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmplexdistribute_ DMPLEXDISTRIBUTE - #define dmplexdistributeoverlap_ DMPLEXDISTRIBUTEOVERLAP -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define dmplexdistribute_ dmplexdistribute - #define dmplexdistributeoverlap_ dmplexdistributeoverlap -#endif - -/* Definitions of Fortran Wrapper routines */ -PETSC_EXTERN void dmplexdistribute_(DM *dm, PetscInt *overlap, PetscSF *sf, DM *dmParallel, int *ierr) -{ - CHKFORTRANNULLOBJECT(sf); - *ierr = DMPlexDistribute(*dm, *overlap, sf, dmParallel); -} - -PETSC_EXTERN void dmplexdistributeoverlap_(DM *dm, PetscInt *overlap, PetscSF *sf, DM *dmParallel, int *ierr) -{ - CHKFORTRANNULLOBJECT(sf); - *ierr = DMPlexDistributeOverlap(*dm, *overlap, sf, dmParallel); -} diff --git a/src/dm/impls/plex/ftn-custom/zplexexodusii.c b/src/dm/impls/plex/ftn-custom/zplexexodusii.c deleted file mode 100644 index 4a7c1359157..00000000000 --- a/src/dm/impls/plex/ftn-custom/zplexexodusii.c +++ /dev/null @@ -1,32 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmplexcreateexodusfromfile_ DMPLEXCREATEEXODUSFROMFILE - #define petscviewerexodusiiopen_ PETSCVIEWEREXODUSIIOPEN -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define dmplexcreateexodusfromfile_ dmplexcreateexodusfromfile - #define petscviewerexodusiiopen_ petscviewerexodusiiopen -#endif - -/* Definitions of Fortran Wrapper routines */ - -PETSC_EXTERN void dmplexcreateexodusfromfile_(MPI_Fint *comm, char *name, PetscBool *interpolate, DM *dm, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *filename; - - FIXCHAR(name, lenN, filename); - *ierr = DMPlexCreateExodusFromFile(MPI_Comm_f2c(*(comm)), filename, *interpolate, dm); - if (*ierr) return; - FREECHAR(name, filename); -} - -PETSC_EXTERN void petscviewerexodusiiopen_(MPI_Comm *comm, char *name, PetscFileMode *type, PetscViewer *binv, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - - FIXCHAR(name, len, c1); - *ierr = PetscViewerExodusIIOpen(MPI_Comm_f2c(*(MPI_Fint *)&*comm), c1, *type, binv); - if (*ierr) return; - FREECHAR(name, c1); -} diff --git a/src/dm/impls/plex/ftn-custom/zplexextrude.c b/src/dm/impls/plex/ftn-custom/zplexextrude.c deleted file mode 100644 index 95fe90cf257..00000000000 --- a/src/dm/impls/plex/ftn-custom/zplexextrude.c +++ /dev/null @@ -1,16 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmplexextrude_ DMPLEXEXTRUDE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define dmplexextrude_ dmplexextrude -#endif - -/* Definitions of Fortran Wrapper routines */ -PETSC_EXTERN void dmplexextrude_(DM *dm, PetscInt *layers, PetscReal *thickness, PetscBool *tensor, PetscBool *symmetric, PetscBool *periodic, PetscReal normal[], PetscReal thicknesses[], DM *edm, int *ierr) -{ - CHKFORTRANNULLREAL(normal); - CHKFORTRANNULLREAL(thicknesses); - *ierr = DMPlexExtrude(*dm, *layers, *thickness, *tensor, *symmetric, *periodic, normal, thicknesses, edm); -} diff --git a/src/dm/impls/plex/ftn-custom/zplexfluent.c b/src/dm/impls/plex/ftn-custom/zplexfluent.c deleted file mode 100644 index 0e75f12264b..00000000000 --- a/src/dm/impls/plex/ftn-custom/zplexfluent.c +++ /dev/null @@ -1,20 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmplexcreatefluentfromfile_ DMPLEXCREATEFLUENTFROMFILE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define dmplexcreatefluentfromfile_ dmplexcreatefluentfromfile -#endif - -/* Definitions of Fortran Wrapper routines */ - -PETSC_EXTERN void dmplexcreatefluentfromfile_(MPI_Fint *comm, char *name, PetscBool *interpolate, DM *dm, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *filename; - - FIXCHAR(name, lenN, filename); - *ierr = DMPlexCreateFluentFromFile(MPI_Comm_f2c(*(comm)), filename, *interpolate, dm); - if (*ierr) return; - FREECHAR(name, filename); -} diff --git a/src/dm/impls/plex/ftn-custom/zplexgmsh.c b/src/dm/impls/plex/ftn-custom/zplexgmsh.c deleted file mode 100644 index bc4dab4d2df..00000000000 --- a/src/dm/impls/plex/ftn-custom/zplexgmsh.c +++ /dev/null @@ -1,20 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmplexcreategmshfromfile_ DMPLEXCREATEGMSHFROMFILE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define dmplexcreategmshfromfile_ dmplexcreategmshfromfile -#endif - -/* Definitions of Fortran Wrapper routines */ - -PETSC_EXTERN void dmplexcreategmshfromfile_(MPI_Fint *comm, char *name, PetscBool *interpolate, DM *dm, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *filename; - - FIXCHAR(name, lenN, filename); - *ierr = DMPlexCreateGmshFromFile(MPI_Comm_f2c(*(comm)), filename, *interpolate, dm); - if (*ierr) return; - FREECHAR(name, filename); -} diff --git a/src/dm/impls/plex/ftn-custom/zplexsubmesh.c b/src/dm/impls/plex/ftn-custom/zplexsubmesh.c deleted file mode 100644 index b1c5892385f..00000000000 --- a/src/dm/impls/plex/ftn-custom/zplexsubmesh.c +++ /dev/null @@ -1,19 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmplexconstructghostcells_ DMPLEXCONSTRUCTGHOSTCELLS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define dmplexconstructghostcells_ dmplexconstructghostcells -#endif - -/* Definitions of Fortran Wrapper routines */ -PETSC_EXTERN void dmplexconstructghostcells_(DM *dm, char *name, PetscInt *numGhostCells, DM *dmGhosted, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *labelname; - - FIXCHAR(name, lenN, labelname); - *ierr = DMPlexConstructGhostCells(*dm, labelname, numGhostCells, dmGhosted); - if (*ierr) return; - FREECHAR(name, labelname); -} diff --git a/src/dm/impls/plex/plex.c b/src/dm/impls/plex/plex.c index b95806e754d..8358f2da67d 100644 --- a/src/dm/impls/plex/plex.c +++ b/src/dm/impls/plex/plex.c @@ -1043,7 +1043,8 @@ static PetscErrorCode DMPlexView_Ascii(DM dm, PetscViewer viewer) PetscCall(DMGetPointSF(dm, &sf)); PetscCall(PetscSFView(sf, viewer)); } - if (mesh->periodic.face_sf) PetscCall(PetscSFView(mesh->periodic.face_sf, viewer)); + if (mesh->periodic.face_sfs) + for (PetscInt i = 0; i < mesh->periodic.num_face_sfs; i++) PetscCall(PetscSFView(mesh->periodic.face_sfs[i], viewer)); PetscCall(PetscViewerFlush(viewer)); } else if (format == PETSC_VIEWER_ASCII_LATEX) { const char *name, *color; @@ -1055,7 +1056,7 @@ static PetscErrorCode DMPlexView_Ascii(DM dm, PetscViewer viewer) PetscBool useNumbers = PETSC_TRUE, drawNumbers[4], drawColors[4], useLabels, useColors, plotEdges, drawHasse = PETSC_FALSE; double tcoords[3]; PetscScalar *coords; - PetscInt numLabels, l, numColors, numLColors, dim, d, depth, cStart, cEnd, c, vStart, vEnd, v, eStart = 0, eEnd = 0, e, p, n; + PetscInt numLabels, l, numColors, numLColors, dim, d, depth, cStart, cEnd, c, vStart, vEnd, v, eStart = 0, eEnd = 0, fStart = 0, fEnd = 0, e, p, n; PetscMPIInt rank, size; char **names, **colors, **lcolors; PetscBool flg, lflg; @@ -1110,6 +1111,7 @@ static PetscErrorCode DMPlexView_Ascii(DM dm, PetscViewer viewer) PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd)); PetscCall(DMPlexGetDepthStratum(dm, 1, &eStart, &eEnd)); PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd)); if (lflg) { DMLabel lbl; @@ -1151,7 +1153,7 @@ static PetscErrorCode DMPlexView_Ascii(DM dm, PetscViewer viewer) PetscCall(PetscViewerASCIIPrintf(viewer, ".\n\n\n")); } if (drawHasse) { - PetscInt maxStratum = PetscMax(vEnd - vStart, PetscMax(eEnd - eStart, cEnd - cStart)); + PetscInt maxStratum = PetscMax(vEnd - vStart, PetscMax(eEnd - eStart, PetscMax(fEnd - fStart, cEnd - cStart))); PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\vStart}{%" PetscInt_FMT "}\n", vStart)); PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\vEnd}{%" PetscInt_FMT "}\n", vEnd - 1)); @@ -1161,6 +1163,10 @@ static PetscErrorCode DMPlexView_Ascii(DM dm, PetscViewer viewer) PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\eEnd}{%" PetscInt_FMT "}\n", eEnd - 1)); PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\eShift}{%.2f}\n", 3 + (maxStratum - (eEnd - eStart)) / 2.)); PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\numEdges}{%" PetscInt_FMT "}\n", eEnd - eStart)); + PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\fStart}{%" PetscInt_FMT "}\n", fStart)); + PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\fEnd}{%" PetscInt_FMT "}\n", fEnd - 1)); + PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\fShift}{%.2f}\n", 3 + (maxStratum - (fEnd - fStart)) / 2.)); + PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\numFaces}{%" PetscInt_FMT "}\n", fEnd - fStart)); PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\cStart}{%" PetscInt_FMT "}\n", cStart)); PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\cEnd}{%" PetscInt_FMT "}\n", cEnd - 1)); PetscCall(PetscViewerASCIIPrintf(viewer, "\\newcommand{\\numCells}{%" PetscInt_FMT "}\n", cEnd - cStart)); @@ -1380,25 +1386,36 @@ static PetscErrorCode DMPlexView_Ascii(DM dm, PetscViewer viewer) } else PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, ") node(%" PetscInt_FMT "_%d) [] {};\n", c, rank)); } if (drawHasse) { + int height = 0; + color = colors[depth % numColors]; PetscCall(PetscViewerASCIIPrintf(viewer, "%% Cells\n")); PetscCall(PetscViewerASCIIPrintf(viewer, "\\foreach \\c in {\\cStart,...,\\cEnd}\n")); PetscCall(PetscViewerASCIIPrintf(viewer, "{\n")); - PetscCall(PetscViewerASCIIPrintf(viewer, " \\node(\\c_%d) [draw,shape=circle,color=%s,minimum size = 6mm] at (\\cShift+\\c-\\cStart,0) {\\c};\n", rank, color)); + PetscCall(PetscViewerASCIIPrintf(viewer, " \\node(\\c_%d) [draw,shape=circle,color=%s,minimum size = 6mm] at (\\cShift+\\c-\\cStart,%d) {\\c};\n", rank, color, height++)); PetscCall(PetscViewerASCIIPrintf(viewer, "}\n")); + if (depth > 2) { + color = colors[1 % numColors]; + PetscCall(PetscViewerASCIIPrintf(viewer, "%% Faces\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\\foreach \\f in {\\fStart,...,\\fEnd}\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "{\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " \\node(\\f_%d) [draw,shape=circle,color=%s,minimum size = 6mm] at (\\fShift+\\f-\\fStart,%d) {\\f};\n", rank, color, height++)); + PetscCall(PetscViewerASCIIPrintf(viewer, "}\n")); + } + color = colors[1 % numColors]; PetscCall(PetscViewerASCIIPrintf(viewer, "%% Edges\n")); PetscCall(PetscViewerASCIIPrintf(viewer, "\\foreach \\e in {\\eStart,...,\\eEnd}\n")); PetscCall(PetscViewerASCIIPrintf(viewer, "{\n")); - PetscCall(PetscViewerASCIIPrintf(viewer, " \\node(\\e_%d) [draw,shape=circle,color=%s,minimum size = 6mm] at (\\eShift+\\e-\\eStart,1) {\\e};\n", rank, color)); + PetscCall(PetscViewerASCIIPrintf(viewer, " \\node(\\e_%d) [draw,shape=circle,color=%s,minimum size = 6mm] at (\\eShift+\\e-\\eStart,%d) {\\e};\n", rank, color, height++)); PetscCall(PetscViewerASCIIPrintf(viewer, "}\n")); color = colors[0 % numColors]; PetscCall(PetscViewerASCIIPrintf(viewer, "%% Vertices\n")); PetscCall(PetscViewerASCIIPrintf(viewer, "\\foreach \\v in {\\vStart,...,\\vEnd}\n")); PetscCall(PetscViewerASCIIPrintf(viewer, "{\n")); - PetscCall(PetscViewerASCIIPrintf(viewer, " \\node(\\v_%d) [draw,shape=circle,color=%s,minimum size = 6mm] at (\\vShift+\\v-\\vStart,2) {\\v};\n", rank, color)); + PetscCall(PetscViewerASCIIPrintf(viewer, " \\node(\\v_%d) [draw,shape=circle,color=%s,minimum size = 6mm] at (\\vShift+\\v-\\vStart,%d) {\\v};\n", rank, color, height++)); PetscCall(PetscViewerASCIIPrintf(viewer, "}\n")); for (p = pStart; p < pEnd; ++p) { @@ -2730,9 +2747,16 @@ PetscErrorCode DMDestroy_Plex(DM dm) PetscCall(ISDestroy(&mesh->subpointIS)); PetscCall(ISDestroy(&mesh->globalVertexNumbers)); PetscCall(ISDestroy(&mesh->globalCellNumbers)); - PetscCall(PetscSFDestroy(&mesh->periodic.face_sf)); + if (mesh->periodic.face_sfs) { + for (PetscInt i = 0; i < mesh->periodic.num_face_sfs; i++) PetscCall(PetscSFDestroy(&mesh->periodic.face_sfs[i])); + PetscCall(PetscFree(mesh->periodic.face_sfs)); + } PetscCall(PetscSFDestroy(&mesh->periodic.composed_sf)); - PetscCall(ISDestroy(&mesh->periodic.periodic_points)); + if (mesh->periodic.periodic_points) { + for (PetscInt i = 0; i < mesh->periodic.num_face_sfs; i++) PetscCall(ISDestroy(&mesh->periodic.periodic_points[i])); + PetscCall(PetscFree(mesh->periodic.periodic_points)); + } + if (mesh->periodic.transform) PetscCall(PetscFree(mesh->periodic.transform)); PetscCall(PetscSectionDestroy(&mesh->anchorSection)); PetscCall(ISDestroy(&mesh->anchorIS)); PetscCall(PetscSectionDestroy(&mesh->parentSection)); @@ -2781,16 +2805,28 @@ PetscErrorCode DMCreateMatrix_Plex(DM dm, Mat *J) PetscCall(PetscStrcmp(mtype, MATMPISBAIJ, &isSymMPIBlock)); PetscCall(PetscStrcmp(mtype, MATIS, &isMatIS)); if (!isShell) { - PetscBool fillMatrix = (PetscBool)(!dm->prealloc_only && !isMatIS); - PetscInt *dnz, *onz, *dnzu, *onzu, bsLocal[2], bsMinMax[2], *pblocks; - PetscInt pStart, pEnd, p, dof, cdof, num_fields; + // There are three states with pblocks, since block starts can have no dofs: + // UNKNOWN) New Block: An open block has been signalled by pblocks[p] == 1 + // TRUE) Block Start: The first entry in a block has been added + // FALSE) Block Add: An additional block entry has been added, since pblocks[p] == 0 + PetscBT blst; + PetscBool3 bstate = PETSC_BOOL3_UNKNOWN; + PetscBool fillMatrix = (PetscBool)(!dm->prealloc_only && !isMatIS); + const PetscInt *perm = NULL; + PetscInt *dnz, *onz, *dnzu, *onzu, bsLocal[2], bsMinMax[2], *pblocks; + PetscInt pStart, pEnd, dof, cdof, num_fields; PetscCall(DMGetLocalToGlobalMapping(dm, <og)); + PetscCall(PetscSectionGetBlockStarts(sectionLocal, &blst)); + if (sectionLocal->perm) PetscCall(ISGetIndices(sectionLocal->perm, &perm)); PetscCall(PetscCalloc1(localSize, &pblocks)); PetscCall(PetscSectionGetChart(sectionGlobal, &pStart, &pEnd)); PetscCall(PetscSectionGetNumFields(sectionGlobal, &num_fields)); - for (p = pStart; p < pEnd; ++p) { + // We need to process in the permuted order to get block sizes right + for (PetscInt point = pStart; point < pEnd; ++point) { + const PetscInt p = perm ? perm[point] : point; + switch (dm->blocking_type) { case DM_BLOCKING_TOPOLOGICAL_POINT: { // One block per topological point PetscInt bdof, offset; @@ -2798,9 +2834,16 @@ PetscErrorCode DMCreateMatrix_Plex(DM dm, Mat *J) PetscCall(PetscSectionGetDof(sectionGlobal, p, &dof)); PetscCall(PetscSectionGetOffset(sectionGlobal, p, &offset)); PetscCall(PetscSectionGetConstraintDof(sectionGlobal, p, &cdof)); - for (PetscInt i = 0; i < dof - cdof; ++i) pblocks[offset - localStart + i] = dof - cdof; - // Signal block concatenation - if (dof - cdof && sectionLocal->blockStarts && !PetscBTLookup(sectionLocal->blockStarts, p)) pblocks[offset - localStart] = -(dof - cdof); + if (blst && PetscBTLookup(blst, p)) bstate = PETSC_BOOL3_UNKNOWN; + if (dof > 0) { + // State change + if (bstate == PETSC_BOOL3_UNKNOWN) bstate = PETSC_BOOL3_TRUE; + else if (bstate == PETSC_BOOL3_TRUE && blst && !PetscBTLookup(blst, p)) bstate = PETSC_BOOL3_FALSE; + + for (PetscInt i = 0; i < dof - cdof; ++i) pblocks[offset - localStart + i] = dof - cdof; + // Signal block concatenation + if (bstate == PETSC_BOOL3_FALSE && dof - cdof) pblocks[offset - localStart] = -(dof - cdof); + } dof = dof < 0 ? -(dof + 1) : dof; bdof = cdof && (dof - cdof) ? 1 : dof; if (dof) { @@ -2835,6 +2878,7 @@ PetscErrorCode DMCreateMatrix_Plex(DM dm, Mat *J) } break; } } + if (sectionLocal->perm) PetscCall(ISRestoreIndices(sectionLocal->perm, &perm)); /* Must have same blocksize on all procs (some might have no points) */ bsLocal[0] = bs < 0 ? PETSC_MAX_INT : bs; bsLocal[1] = bs; @@ -2851,8 +2895,9 @@ PetscErrorCode DMCreateMatrix_Plex(DM dm, Mat *J) PetscCall(DMPlexPreallocateOperator(dm, bs, dnz, onz, dnzu, onzu, *J, fillMatrix)); PetscCall(PetscFree4(dnz, onz, dnzu, onzu)); } - { // Consolidate blocks + if (pblocks) { // Consolidate blocks PetscInt nblocks = 0; + pblocks[0] = PetscAbs(pblocks[0]); for (PetscInt i = 0; i < localSize; i += PetscMax(1, pblocks[i])) { if (pblocks[i] == 0) continue; // Negative block size indicates the blocks should be concatenated @@ -2862,7 +2907,8 @@ PetscErrorCode DMCreateMatrix_Plex(DM dm, Mat *J) } else { pblocks[nblocks++] = pblocks[i]; // nblocks always <= i } - for (PetscInt j = 1; j < pblocks[i]; j++) PetscCheck(pblocks[i + j] == pblocks[i], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Block of size %" PetscInt_FMT " mismatches entry %" PetscInt_FMT, pblocks[i], pblocks[i + j]); + for (PetscInt j = 1; j < pblocks[i]; j++) + PetscCheck(pblocks[i + j] == pblocks[i], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Block of size %" PetscInt_FMT " at %" PetscInt_FMT " mismatches entry %" PetscInt_FMT " at %" PetscInt_FMT, pblocks[i], i, pblocks[i + j], i + j); } PetscCall(MatSetVariableBlockSizes(*J, nblocks, pblocks)); } @@ -3048,7 +3094,7 @@ PetscErrorCode DMPlexGetCone(DM dm, PetscInt p, const PetscInt *cone[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexGetConeTuple - Return the points on the in-edges of several points in the DAG Not Collective @@ -4068,6 +4114,11 @@ PetscErrorCode DMPlexGetTransitiveClosure(DM dm, PetscInt p, PetscBool useCone, PetscValidHeaderSpecific(dm, DM_CLASSID, 1); if (numPoints) PetscAssertPointer(numPoints, 4); if (points) PetscAssertPointer(points, 5); + if (PetscDefined(USE_DEBUG)) { + PetscInt pStart, pEnd; + PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); + PetscCheck(p >= pStart && p < pEnd, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %" PetscInt_FMT " is not in [%" PetscInt_FMT ", %" PetscInt_FMT ")", p, pStart, pEnd); + } PetscCall(DMPlexGetTransitiveClosure_Internal(dm, p, 0, useCone, numPoints, points)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -4623,7 +4674,7 @@ PetscErrorCode DMPlexComputeCellTypes(DM dm) PetscCall(DMPlexGetPointDepth(dm, p, &pdepth)); PetscCall(DMPlexComputeCellType_Internal(dm, p, pdepth, &ct)); - PetscCheck(ct != DM_POLYTOPE_UNKNOWN && ct != DM_POLYTOPE_UNKNOWN_CELL && ct != DM_POLYTOPE_UNKNOWN_FACE, PETSC_COMM_SELF, PETSC_ERR_SUP, "Point %" PetscInt_FMT " is screwed up", p); + PetscCheck(ct != DM_POLYTOPE_UNKNOWN && ct != DM_POLYTOPE_UNKNOWN_CELL && ct != DM_POLYTOPE_UNKNOWN_FACE, PETSC_COMM_SELF, PETSC_ERR_SUP, "Point %" PetscInt_FMT " has invalid celltype (%s)", p, DMPolytopeTypes[ct]); PetscCall(DMLabelSetValue(ctLabel, p, ct)); mesh->cellTypes[p - pStart].value_as_uint8 = ct; } @@ -4656,7 +4707,7 @@ PetscErrorCode DMPlexComputeCellTypes(DM dm) .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexRestoreJoin()`, `DMPlexGetMeet()` @*/ -PetscErrorCode DMPlexGetJoin(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveredPoints, const PetscInt **coveredPoints) +PetscErrorCode DMPlexGetJoin(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveredPoints, const PetscInt *coveredPoints[]) { DM_Plex *mesh = (DM_Plex *)dm->data; PetscInt *join[2]; @@ -4755,7 +4806,7 @@ PetscErrorCode DMPlexRestoreJoin(DM dm, PetscInt numPoints, const PetscInt point .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexGetJoin()`, `DMPlexRestoreJoin()`, `DMPlexGetMeet()` @*/ -PetscErrorCode DMPlexGetFullJoin(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveredPoints, const PetscInt **coveredPoints) +PetscErrorCode DMPlexGetFullJoin(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveredPoints, const PetscInt *coveredPoints[]) { PetscInt *offsets, **closures; PetscInt *join[2]; @@ -4855,7 +4906,7 @@ PetscErrorCode DMPlexGetFullJoin(DM dm, PetscInt numPoints, const PetscInt point .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexRestoreMeet()`, `DMPlexGetJoin()` @*/ -PetscErrorCode DMPlexGetMeet(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveringPoints, const PetscInt **coveringPoints) +PetscErrorCode DMPlexGetMeet(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveringPoints, const PetscInt *coveringPoints[]) { DM_Plex *mesh = (DM_Plex *)dm->data; PetscInt *meet[2]; @@ -4921,7 +4972,7 @@ PetscErrorCode DMPlexGetMeet(DM dm, PetscInt numPoints, const PetscInt points[], .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexGetMeet()`, `DMPlexGetFullMeet()`, `DMPlexGetJoin()` @*/ -PetscErrorCode DMPlexRestoreMeet(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveredPoints, const PetscInt **coveredPoints) +PetscErrorCode DMPlexRestoreMeet(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveredPoints, const PetscInt *coveredPoints[]) { PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); @@ -4954,7 +5005,7 @@ PetscErrorCode DMPlexRestoreMeet(DM dm, PetscInt numPoints, const PetscInt point .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexGetMeet()`, `DMPlexRestoreMeet()`, `DMPlexGetJoin()` @*/ -PetscErrorCode DMPlexGetFullMeet(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveredPoints, const PetscInt **coveredPoints) +PetscErrorCode DMPlexGetFullMeet(DM dm, PetscInt numPoints, const PetscInt points[], PetscInt *numCoveredPoints, const PetscInt *coveredPoints[]) { PetscInt *offsets, **closures; PetscInt *meet[2]; @@ -5030,7 +5081,7 @@ PetscErrorCode DMPlexGetFullMeet(DM dm, PetscInt numPoints, const PetscInt point PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexEqual - Determine if two `DM` have the same topology Not Collective @@ -5093,7 +5144,7 @@ PetscErrorCode DMPlexEqual(DM dmA, DM dmB, PetscBool *equal) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexGetNumFaceVertices - Returns the number of vertices on a face Not Collective @@ -5231,7 +5282,7 @@ PetscErrorCode DMPlexGetDepth(DM dm, PetscInt *depth) { DM_Plex *mesh = (DM_Plex *)dm->data; DMLabel label; - PetscInt d = 0; + PetscInt d = -1; PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); @@ -5240,8 +5291,9 @@ PetscErrorCode DMPlexGetDepth(DM dm, PetscInt *depth) PetscCall(DMPlexTransformGetDepth(mesh->tr, depth)); } else { PetscCall(DMPlexGetDepthLabel(dm, &label)); - if (label) PetscCall(DMLabelGetNumValues(label, &d)); - *depth = d - 1; + // Allow missing depths + if (label) PetscCall(DMLabelGetValueBounds(label, NULL, &d)); + *depth = d; } PetscFunctionReturn(PETSC_SUCCESS); } @@ -5565,7 +5617,7 @@ PetscErrorCode DMCreateCoordinateField_Plex(DM dm, DMField *field) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexGetConeSection - Return a section which describes the layout of cone data Not Collective @@ -5590,7 +5642,7 @@ PetscErrorCode DMPlexGetConeSection(DM dm, PetscSection *section) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexGetSupportSection - Return a section which describes the layout of support data Not Collective @@ -5753,7 +5805,6 @@ static PetscErrorCode GetFieldSize_Private(PetscInt dim, PetscInt k, PetscBool t } /*@ - DMPlexSetClosurePermutationTensor - Create a permutation from the default (BFS) point ordering in the closure, to a lexicographic ordering over the tensor product cell (i.e., line, quad, hex, etc.), and set this permutation in the section provided (or the section of the `DM`). @@ -6447,6 +6498,12 @@ PetscErrorCode DMPlexVecGetOrientedClosure_Internal(DM dm, PetscSection section, Fortran Notes: The `csize` argument is not present in the Fortran binding since it is internal to the array. + `values` must be declared with +.vb + PetscScalar,dimension(:),pointer :: values +.ve + and it will be allocated internally by PETSc to hold the values returned + .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexVecRestoreClosure()`, `DMPlexVecSetClosure()`, `DMPlexMatSetClosure()` @*/ PetscErrorCode DMPlexVecGetClosure(DM dm, PetscSection section, Vec v, PetscInt point, PetscInt *csize, PetscScalar *values[]) @@ -6533,7 +6590,7 @@ PetscErrorCode DMPlexVecGetClosureAtDepth_Internal(DM dm, PetscSection section, } /*@C - DMPlexVecRestoreClosure - Restore the array of the values on the closure of 'point' + DMPlexVecRestoreClosure - Restore the array of the values on the closure of 'point' obtained with `DMPlexVecGetClosure()` Not collective @@ -6543,14 +6600,14 @@ PetscErrorCode DMPlexVecGetClosureAtDepth_Internal(DM dm, PetscSection section, . v - The local vector . point - The point in the `DM` . csize - The number of values in the closure, or `NULL` -- values - The array of values, which is a borrowed array and should not be freed +- values - The array of values Level: intermediate Note: The array values are discarded and not copied back into `v`. In order to copy values back to `v`, use `DMPlexVecSetClosure()` - Fortran Notes: + Fortran Note: The `csize` argument is not present in the Fortran binding since it is internal to the array. .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexVecGetClosure()`, `DMPlexVecSetClosure()`, `DMPlexMatSetClosure()` @@ -6952,10 +7009,19 @@ static inline PetscErrorCode DMPlexVecSetClosure_Depth1_Static(DM dm, PetscSecti . point - The point in the `DM` . values - The array of values - mode - The insert mode. One of `INSERT_ALL_VALUES`, `ADD_ALL_VALUES`, `INSERT_VALUES`, `ADD_VALUES`, `INSERT_BC_VALUES`, and `ADD_BC_VALUES`, - where `INSERT_ALL_VALUES` and `ADD_ALL_VALUES` also overwrite boundary conditions. + where `INSERT_ALL_VALUES` and `ADD_ALL_VALUES` also overwrite boundary conditions. Level: intermediate + Note: + Usually the input arrays were obtained with `DMPlexVecGetClosure()` + + Fortran Note: + `values` must be declared with +.vb + PetscScalar,dimension(:),pointer :: values +.ve + .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexVecGetClosure()`, `DMPlexMatSetClosure()` @*/ PetscErrorCode DMPlexVecSetClosure(DM dm, PetscSection section, Vec v, PetscInt point, const PetscScalar values[], InsertMode mode) @@ -8528,7 +8594,7 @@ PetscErrorCode DMPlexMatGetClosureIndicesRefined(DM dmf, PetscSection fsection, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexGetVTKCellHeight - Returns the height in the DAG used to determine which points are cells (normally 0) Input Parameter: @@ -8552,7 +8618,7 @@ PetscErrorCode DMPlexGetVTKCellHeight(DM dm, PetscInt *cellHeight) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexSetVTKCellHeight - Sets the height in the DAG used to determine which points are cells (normally 0) Input Parameters: @@ -8801,6 +8867,37 @@ PetscErrorCode DMPlexCreatePointNumbering(DM dm, IS *globalPointNumbers) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMPlexCreateEdgeNumbering - Create a global numbering for edges. + + Collective + + Input Parameter: +. dm - The `DMPLEX` object + + Output Parameter: +. globalEdgeNumbers - Global numbers for all edges on this process + + Level: developer + + Notes: + The point numbering `IS` is parallel, with local portion indexed by local points (see `DMGetLocalSection()`). In the IS, owned edges will have their non-negative value while edges owned by different ranks will be involuted -(idx+1). + +.seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexGetCellNumbering()`, `DMPlexGetVertexNumbering()`, `DMPlexCreatePointNumbering()` +@*/ +PetscErrorCode DMPlexCreateEdgeNumbering(DM dm, IS *globalEdgeNumbers) +{ + PetscSF sf; + PetscInt eStart, eEnd; + + PetscFunctionBegin; + PetscValidHeaderSpecific(dm, DM_CLASSID, 1); + PetscCall(DMGetPointSF(dm, &sf)); + PetscCall(DMPlexGetDepthStratum(dm, 1, &eStart, &eEnd)); + PetscCall(DMPlexCreateNumbering_Plex(dm, eStart, eEnd, 0, NULL, sf, globalEdgeNumbers)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@ DMPlexCreateRankField - Create a cell field whose value is the rank of the owner @@ -9400,6 +9497,42 @@ PetscErrorCode DMPlexCheckPointSF(DM dm, PetscSF pointSF, PetscBool allowExtraRo PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMPlexCheckOrphanVertices - Check that no vertices are disconnected from the mesh, unless the mesh only consists of disconnected vertices. + + Collective + + Input Parameter: +. dm - The `DMPLEX` object + + Level: developer + + Notes: + This is mainly intended for debugging/testing purposes. + + Other cell types which are disconnected would be caught by the symmetry and face checks. + + For the complete list of DMPlexCheck* functions, see `DMSetFromOptions()`. + +.seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexCheck()`, `DMSetFromOptions()` +@*/ +PetscErrorCode DMPlexCheckOrphanVertices(DM dm) +{ + PetscInt pStart, pEnd, vStart, vEnd; + + PetscFunctionBegin; + PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); + PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd)); + if (pStart == vStart && pEnd == vEnd) PetscFunctionReturn(PETSC_SUCCESS); + for (PetscInt v = vStart; v < vEnd; ++v) { + PetscInt suppSize; + + PetscCall(DMPlexGetSupportSize(dm, v, &suppSize)); + PetscCheck(suppSize, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Vertex %" PetscInt_FMT " is disconnected from the mesh", v); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@ DMPlexCheck - Perform various checks of `DMPLEX` sanity @@ -9429,6 +9562,7 @@ PetscErrorCode DMPlexCheck(DM dm) PetscCall(DMPlexCheckGeometry(dm)); PetscCall(DMPlexCheckPointSF(dm, NULL, PETSC_FALSE)); PetscCall(DMPlexCheckInterfaceCones(dm)); + PetscCall(DMPlexCheckOrphanVertices(dm)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/dm/impls/plex/plexcreate.c b/src/dm/impls/plex/plexcreate.c index 9b6f4217ced..5967d70d7e3 100644 --- a/src/dm/impls/plex/plexcreate.c +++ b/src/dm/impls/plex/plexcreate.c @@ -1,12 +1,17 @@ #define PETSCDM_DLL #include /*I "petscdmplex.h" I*/ -#include /*I "petscdmplex.h" I*/ +#include #include -#include +#include /*I "petscdmplextransform.h" I*/ #include #include #include +#ifdef PETSC_HAVE_UNISTD_H + #include +#endif +#include + PetscLogEvent DMPLEX_CreateFromFile, DMPLEX_CreateFromOptions, DMPLEX_BuildFromCellList, DMPLEX_BuildCoordinatesFromCellList; /* External function declarations here */ @@ -29,6 +34,7 @@ PetscErrorCode DMPlexCopy_Internal(DM dmin, PetscBool copyPeriodicity, PetscBool if (copyPeriodicity) { PetscCall(DMGetPeriodicity(dmin, &maxCell, &Lstart, &L)); PetscCall(DMSetPeriodicity(dmout, maxCell, Lstart, L)); + PetscCall(DMLocalizeCoordinates(dmout)); } PetscCall(DMPlexDistributeGetDefault(dmin, &dist)); PetscCall(DMPlexDistributeSetDefault(dmout, dist)); @@ -66,6 +72,12 @@ PetscErrorCode DMPlexReplace_Internal(DM dm, DM *ndm) PetscFunctionReturn(PETSC_SUCCESS); } dm->setupcalled = dmNew->setupcalled; + if (!dm->hdr.name) { + const char *name; + + PetscCall(PetscObjectGetName((PetscObject)*ndm, &name)); + PetscCall(PetscObjectSetName((PetscObject)dm, name)); + } PetscCall(DMGetDimension(dmNew, &dim)); PetscCall(DMSetDimension(dm, dim)); PetscCall(DMGetCoordinateDim(dmNew, &cdim)); @@ -92,8 +104,12 @@ PetscErrorCode DMPlexReplace_Internal(DM dm, DM *ndm) PetscCall(DMInitialize_Plex(dm)); dm->data = dmNew->data; ((DM_Plex *)dmNew->data)->refct++; - PetscCall(DMPlexGetIsoperiodicFaceSF(dm, &sf)); - PetscCall(DMPlexSetIsoperiodicFaceSF(dm, sf)); // for the compose function effect on dm + { + PetscInt num_face_sfs; + const PetscSF *sfs; + PetscCall(DMPlexGetIsoperiodicFaceSF(dm, &num_face_sfs, &sfs)); + PetscCall(DMPlexSetIsoperiodicFaceSF(dm, num_face_sfs, (PetscSF *)sfs)); // for the compose function effect on dm + } PetscCall(DMDestroyLabelLinkList_Internal(dm)); PetscCall(DMCopyLabels(dmNew, dm, PETSC_OWN_POINTER, PETSC_TRUE, DM_COPY_LABELS_FAIL)); PetscCall(DMGetCoarseDM(dmNew, &coarseDM)); @@ -1318,7 +1334,7 @@ static PetscErrorCode DMPlexCreateBoxMesh_Internal(DM dm, DMPlexShape shape, Pet PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexCreateBoxMesh - Creates a mesh on the tensor product of unit intervals (box) using simplices or tensor cells (hexahedra). Collective @@ -1432,7 +1448,7 @@ static PetscErrorCode DMPlexCreateWedgeBoxMesh_Internal(DM dm, const PetscInt fa } /*@ - DMPlexCreateWedgeBoxMesh - Creates a 3-D mesh tesselating the (x,y) plane and extruding in the third direction using wedge cells. + DMPlexCreateWedgeBoxMesh - Creates a 3-D mesh tessellating the (x,y) plane and extruding in the third direction using wedge cells. Collective @@ -1710,7 +1726,7 @@ PetscErrorCode DMPlexCreateHypercubicMesh(MPI_Comm comm, PetscInt dim, const Pet PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexSetOptionsPrefix - Sets the prefix used for searching for all `DM` options in the database. Logically Collective @@ -1789,7 +1805,7 @@ static void snapToCylinder(PetscInt dim, PetscInt Nf, PetscInt NfAux, const Pets f0[2] = u[2]; } -static PetscErrorCode DMPlexCreateHexCylinderMesh_Internal(DM dm, DMBoundaryType periodicZ) +static PetscErrorCode DMPlexCreateHexCylinderMesh_Internal(DM dm, DMBoundaryType periodicZ, PetscInt Nr) { const PetscInt dim = 3; PetscInt numCells, numVertices; @@ -2125,7 +2141,7 @@ static PetscErrorCode DMPlexCreateHexCylinderMesh_Internal(DM dm, DMBoundaryType PetscDS cds; PetscScalar c[2] = {1.0, 1.0}; - PetscCall(DMPlexCreateCoordinateSpace(dm, 1, PETSC_TRUE, snapToCylinder)); + PetscCall(DMPlexCreateCoordinateSpace(dm, 1, PETSC_TRUE, NULL)); PetscCall(DMGetCoordinateDM(dm, &cdm)); PetscCall(DMGetDS(cdm, &cds)); PetscCall(PetscDSSetConstants(cds, 2, c)); @@ -2134,6 +2150,48 @@ static PetscErrorCode DMPlexCreateHexCylinderMesh_Internal(DM dm, DMBoundaryType /* Wait for coordinate creation before doing in-place modification */ PetscCall(DMPlexInterpolateInPlace_Internal(dm)); + + char oldprefix[PETSC_MAX_PATH_LEN]; + const char *prefix; + + PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix)); + PetscCall(PetscStrncpy(oldprefix, prefix, PETSC_MAX_PATH_LEN)); + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, "petsc_cyl_ref_")); + for (PetscInt r = 0; r < PetscMax(0, Nr); ++r) { + DM rdm; + + PetscCall(DMRefine(dm, PetscObjectComm((PetscObject)dm), &rdm)); + PetscCall(DMPlexReplace_Internal(dm, &rdm)); + } + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldprefix)); + PetscCall(DMPlexRemapGeometry(dm, 0.0, snapToCylinder)); + + DMLabel bdlabel, edgelabel; + IS faceIS; + const PetscInt *faces; + PetscInt Nf; + + PetscCall(DMCreateLabel(dm, "marker")); + PetscCall(DMGetLabel(dm, "marker", &bdlabel)); + PetscCall(DMCreateLabel(dm, "generatrix")); + PetscCall(DMGetLabel(dm, "generatrix", &edgelabel)); + PetscCall(DMPlexMarkBoundaryFaces(dm, PETSC_DETERMINE, bdlabel)); + // Remove faces on top and bottom + PetscCall(DMLabelGetStratumIS(bdlabel, 1, &faceIS)); + if (faceIS) { + PetscCall(ISGetLocalSize(faceIS, &Nf)); + PetscCall(ISGetIndices(faceIS, &faces)); + for (PetscInt f = 0; f < Nf; ++f) { + PetscReal vol, normal[3]; + + PetscCall(DMPlexComputeCellGeometryFVM(dm, faces[f], &vol, NULL, normal)); + if (PetscAbsReal(normal[2]) < PETSC_SMALL) PetscCall(DMLabelSetValue(edgelabel, faces[f], 1)); + } + PetscCall(ISRestoreIndices(faceIS, &faces)); + PetscCall(ISDestroy(&faceIS)); + } + PetscCall(DMPlexLabelComplete(dm, bdlabel)); + PetscCall(DMPlexLabelComplete(dm, edgelabel)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -2144,7 +2202,8 @@ static PetscErrorCode DMPlexCreateHexCylinderMesh_Internal(DM dm, DMBoundaryType Input Parameters: + comm - The communicator for the `DM` object -- periodicZ - The boundary type for the Z direction +. periodicZ - The boundary type for the Z direction +- Nr - The number of refinements to carry out Output Parameter: . dm - The `DM` object @@ -2187,13 +2246,13 @@ static PetscErrorCode DMPlexCreateHexCylinderMesh_Internal(DM dm, DMBoundaryType .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexCreateBoxMesh()`, `DMSetType()`, `DMCreate()` @*/ -PetscErrorCode DMPlexCreateHexCylinderMesh(MPI_Comm comm, DMBoundaryType periodicZ, DM *dm) +PetscErrorCode DMPlexCreateHexCylinderMesh(MPI_Comm comm, DMBoundaryType periodicZ, PetscInt Nr, DM *dm) { PetscFunctionBegin; - PetscAssertPointer(dm, 3); + PetscAssertPointer(dm, 4); PetscCall(DMCreate(comm, dm)); PetscCall(DMSetType(*dm, DMPLEX)); - PetscCall(DMPlexCreateHexCylinderMesh_Internal(*dm, periodicZ)); + PetscCall(DMPlexCreateHexCylinderMesh_Internal(*dm, periodicZ, Nr)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -3855,6 +3914,35 @@ static void boxToAnnulus(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscI f0[1] = r * PetscSinReal(th); } +// Insert vertices and their joins, marked by depth +static PetscErrorCode ProcessCohesiveLabel_Vertices(DM dm, DMLabel label, DMLabel vlabel, PetscInt val, PetscInt n, const PetscInt vertices[]) +{ + PetscFunctionBegin; + PetscCall(DMPlexMarkSubmesh_Interpolated(dm, vlabel, val, PETSC_FALSE, PETSC_FALSE, label, NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +// Insert faces and their closures, marked by depth +static PetscErrorCode ProcessCohesiveLabel_Faces(DM dm, DMLabel label, PetscInt n, const PetscInt faces[]) +{ + PetscFunctionBegin; + for (PetscInt p = 0; p < n; ++p) { + const PetscInt point = faces[p]; + PetscInt *closure = NULL; + PetscInt clSize, pdepth; + + PetscCall(DMPlexGetPointDepth(dm, point, &pdepth)); + PetscCall(DMLabelSetValue(label, point, pdepth)); + PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &clSize, &closure)); + for (PetscInt cl = 0; cl < clSize * 2; cl += 2) { + PetscCall(DMPlexGetPointDepth(dm, closure[cl], &pdepth)); + PetscCall(DMLabelSetValue(label, closure[cl], pdepth)); + } + PetscCall(DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &clSize, &closure)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg); const char *const DMPlexShapes[] = {"box", "box_surface", "ball", "sphere", "cylinder", "schwarz_p", "gyroid", "doublet", "annulus", "hypercubic", "zbox", "unknown", "DMPlexShape", "DM_SHAPE_", NULL}; @@ -3865,7 +3953,7 @@ static PetscErrorCode DMPlexCreateFromOptions_Internal(PetscOptionItems *PetscOp DMPolytopeType cell = DM_POLYTOPE_TRIANGLE; PetscInt dim = 2; PetscBool simplex = PETSC_TRUE, interpolate = PETSC_TRUE, adjCone = PETSC_FALSE, adjClosure = PETSC_TRUE, refDomain = PETSC_FALSE; - PetscBool flg, flg2, fflg, bdfflg, nameflg; + PetscBool flg, flg2, fflg, strflg, bdfflg, nameflg; MPI_Comm comm; char filename[PETSC_MAX_PATH_LEN] = ""; char bdFilename[PETSC_MAX_PATH_LEN] = ""; @@ -3877,6 +3965,7 @@ static PetscErrorCode DMPlexCreateFromOptions_Internal(PetscOptionItems *PetscOp PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); /* TODO Turn this into a registration interface */ PetscCall(PetscOptionsString("-dm_plex_filename", "File containing a mesh", "DMPlexCreateFromFile", filename, filename, sizeof(filename), &fflg)); + PetscCall(PetscOptionsString("-dm_plex_file_contents", "Contents of a file format in a string", "DMPlexCreateFromFile", filename, filename, sizeof(filename), &strflg)); PetscCall(PetscOptionsString("-dm_plex_boundary_filename", "File containing a mesh boundary", "DMPlexCreateFromFile", bdFilename, bdFilename, sizeof(bdFilename), &bdfflg)); PetscCall(PetscOptionsString("-dm_plex_name", "Name of the mesh in the file", "DMPlexCreateFromFile", plexname, plexname, sizeof(plexname), &nameflg)); PetscCall(PetscOptionsEnum("-dm_plex_cell", "Cell shape", "", DMPolytopeTypes, (PetscEnum)cell, (PetscEnum *)&cell, NULL)); @@ -3905,23 +3994,59 @@ static PetscErrorCode DMPlexCreateFromOptions_Internal(PetscOptionItems *PetscOp } if (fflg) { - DM dmnew; + DM dmnew; + const char *name; - PetscCall(DMPlexCreateFromFile(PetscObjectComm((PetscObject)dm), filename, plexname, interpolate, &dmnew)); + PetscCall(PetscObjectGetName((PetscObject)dm, &name)); + PetscCall(DMPlexCreateFromFile(PetscObjectComm((PetscObject)dm), filename, nameflg ? plexname : name, interpolate, &dmnew)); PetscCall(DMPlexCopy_Internal(dm, PETSC_FALSE, PETSC_FALSE, dmnew)); PetscCall(DMPlexReplace_Internal(dm, &dmnew)); } else if (refDomain) { PetscCall(DMPlexCreateReferenceCell_Internal(dm, cell)); } else if (bdfflg) { - DM bdm, dmnew; + DM bdm, dmnew; + const char *name; - PetscCall(DMPlexCreateFromFile(PetscObjectComm((PetscObject)dm), bdFilename, plexname, interpolate, &bdm)); + PetscCall(PetscObjectGetName((PetscObject)dm, &name)); + PetscCall(DMPlexCreateFromFile(PetscObjectComm((PetscObject)dm), bdFilename, nameflg ? plexname : name, interpolate, &bdm)); PetscCall(PetscObjectSetOptionsPrefix((PetscObject)bdm, "bd_")); PetscCall(DMSetFromOptions(bdm)); PetscCall(DMPlexGenerate(bdm, NULL, interpolate, &dmnew)); PetscCall(DMDestroy(&bdm)); PetscCall(DMPlexCopy_Internal(dm, PETSC_FALSE, PETSC_FALSE, dmnew)); PetscCall(DMPlexReplace_Internal(dm, &dmnew)); + } else if (strflg) { + DM dmnew; + PetscViewer viewer; + const char *contents; + char *strname; + char tmpdir[PETSC_MAX_PATH_LEN]; + char tmpfilename[PETSC_MAX_PATH_LEN]; + char name[PETSC_MAX_PATH_LEN]; + MPI_Comm comm; + PetscMPIInt rank; + + PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); + PetscCallMPI(MPI_Comm_rank(comm, &rank)); + PetscCall(PetscStrchr(filename, ':', &strname)); + PetscCheck(strname, comm, PETSC_ERR_ARG_WRONG, "File contents must have the form \"ext:string_name\", not %s", filename); + strname[0] = '\0'; + ++strname; + PetscCall(PetscDLSym(NULL, strname, (void **)&contents)); + PetscCheck(contents, comm, PETSC_ERR_ARG_WRONG, "Could not locate mesh string %s", strname); + PetscCall(PetscGetTmp(comm, tmpdir, PETSC_MAX_PATH_LEN)); + PetscCall(PetscStrlcat(tmpdir, "/meshXXXXXX", PETSC_MAX_PATH_LEN)); + PetscCall(PetscMkdtemp(tmpdir)); + PetscCall(PetscSNPrintf(tmpfilename, PETSC_MAX_PATH_LEN, "%s/mesh.%s", tmpdir, filename)); + PetscCall(PetscViewerASCIIOpen(comm, tmpfilename, &viewer)); + PetscCall(PetscViewerASCIIPrintf(viewer, "%s\n", contents)); + PetscCall(PetscViewerDestroy(&viewer)); + PetscCall(DMPlexCreateFromFile(PetscObjectComm((PetscObject)dm), tmpfilename, plexname, interpolate, &dmnew)); + PetscCall(PetscRMTree(tmpdir)); + PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN, "%s Mesh", strname)); + PetscCall(PetscObjectSetName((PetscObject)dm, name)); + PetscCall(DMPlexCopy_Internal(dm, PETSC_FALSE, PETSC_FALSE, dmnew)); + PetscCall(DMPlexReplace_Internal(dm, &dmnew)); } else { PetscCall(PetscObjectSetName((PetscObject)dm, DMPlexShapes[shape])); switch (shape) { @@ -4014,15 +4139,17 @@ static PetscErrorCode DMPlexCreateFromOptions_Internal(PetscOptionItems *PetscOp case DM_SHAPE_CYLINDER: { DMBoundaryType bdt = DM_BOUNDARY_NONE; PetscInt Nw = 6; + PetscInt Nr = 0; PetscCall(PetscOptionsEnum("-dm_plex_cylinder_bd", "Boundary type in the z direction", "", DMBoundaryTypes, (PetscEnum)bdt, (PetscEnum *)&bdt, NULL)); PetscCall(PetscOptionsInt("-dm_plex_cylinder_num_wedges", "Number of wedges around the cylinder", "", Nw, &Nw, NULL)); + PetscCall(PetscOptionsInt("-dm_plex_cylinder_num_refine", "Number of refinements before projection", "", Nr, &Nr, NULL)); switch (cell) { case DM_POLYTOPE_TRI_PRISM_TENSOR: PetscCall(DMPlexCreateWedgeCylinderMesh_Internal(dm, Nw, interpolate)); break; default: - PetscCall(DMPlexCreateHexCylinderMesh_Internal(dm, bdt)); + PetscCall(DMPlexCreateHexCylinderMesh_Internal(dm, bdt, Nr)); break; } } break; @@ -4101,6 +4228,61 @@ static PetscErrorCode DMPlexCreateFromOptions_Internal(PetscOptionItems *PetscOp PetscCall(PetscOptionsGetIntArray(NULL, ((PetscObject)dm)->prefix, fulloption, points, &n, NULL)); for (PetscInt p = 0; p < n; ++p) PetscCall(DMLabelSetValue(label, points[p], 1)); } + // Allow cohesive label creation + // Faces are input, completed, and all points are marked with their depth + PetscCall(PetscOptionsFindPairPrefix_Private(NULL, ((PetscObject)dm)->prefix, "-dm_plex_cohesive_label_", &option, NULL, &flg)); + if (flg) { + DMLabel label; + PetscInt points[1024], n, pStart, pEnd, Nl = 1; + PetscBool noCreate = PETSC_FALSE; + char fulloption[PETSC_MAX_PATH_LEN]; + char name[PETSC_MAX_PATH_LEN]; + size_t len; + + PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); + PetscCall(PetscStrncpy(name, &option[23], PETSC_MAX_PATH_LEN)); + PetscCall(PetscStrlen(name, &len)); + if (name[len - 1] == '0') Nl = 10; + for (PetscInt l = 0; l < Nl; ++l) { + if (l > 0) name[len - 1] = '0' + l; + fulloption[0] = 0; + PetscCall(PetscStrlcat(fulloption, "-dm_plex_cohesive_label_", 32)); + PetscCall(PetscStrlcat(fulloption, name, PETSC_MAX_PATH_LEN - 32)); + n = 1024; + PetscCall(PetscOptionsGetIntArray(NULL, ((PetscObject)dm)->prefix, fulloption, points, &n, &flg)); + if (!flg) break; + PetscCall(DMHasLabel(dm, name, &noCreate)); + if (noCreate) { + DMLabel inlabel; + IS pointIS; + const PetscInt *lpoints; + PetscInt pdep, ln, inval = points[0]; + char newname[PETSC_MAX_PATH_LEN]; + + PetscCheck(n == 1, comm, PETSC_ERR_ARG_WRONG, "Must specify a label value with this option"); + PetscCall(DMGetLabel(dm, name, &inlabel)); + PetscCall(DMLabelGetStratumIS(inlabel, inval, &pointIS)); + PetscCall(ISGetLocalSize(pointIS, &ln)); + PetscCall(ISGetIndices(pointIS, &lpoints)); + PetscCall(DMPlexGetPointDepth(dm, lpoints[0], &pdep)); + PetscCall(PetscSNPrintf(newname, PETSC_MAX_PATH_LEN, "%s%" PetscInt_FMT, name, points[0])); + PetscCall(DMCreateLabel(dm, newname)); + PetscCall(DMGetLabel(dm, newname, &label)); + if (!pdep) PetscCall(ProcessCohesiveLabel_Vertices(dm, label, inlabel, inval, ln, lpoints)); + else PetscCall(ProcessCohesiveLabel_Faces(dm, label, ln, lpoints)); + PetscCall(ISRestoreIndices(pointIS, &lpoints)); + PetscCall(ISDestroy(&pointIS)); + } else { + PetscCall(DMCreateLabel(dm, name)); + PetscCall(DMGetLabel(dm, name, &label)); + if (pStart >= pEnd) n = 0; + PetscCall(ProcessCohesiveLabel_Faces(dm, label, n, points)); + } + PetscCall(DMPlexOrientLabel(dm, label)); + PetscCall(DMPlexLabelCohesiveComplete(dm, label, NULL, 1, PETSC_FALSE, PETSC_FALSE, NULL)); + } + } + PetscCall(DMViewFromOptions(dm, NULL, "-created_dm_view")); PetscCall(PetscLogEventEnd(DMPLEX_CreateFromOptions, dm, 0, 0, 0)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -4225,7 +4407,7 @@ static PetscErrorCode DMSetFromOptions_Plex(DM dm, PetscOptionItems *PetscOption DMReorderDefaultFlag reorder; PetscReal volume = -1.0; PetscInt prerefine = 0, refine = 0, r, coarsen = 0, overlap = 0, extLayers = 0, dim; - PetscBool uniformOrig = PETSC_FALSE, created = PETSC_FALSE, uniform = PETSC_TRUE, distribute, saveSF = PETSC_FALSE, interpolate = PETSC_TRUE, coordSpace = PETSC_TRUE, remap = PETSC_TRUE, ghostCells = PETSC_FALSE, isHierarchy, ignoreModel = PETSC_FALSE, flg; + PetscBool uniformOrig = PETSC_FALSE, created = PETSC_FALSE, uniform = PETSC_TRUE, distribute, saveSF = PETSC_FALSE, interpolate = PETSC_TRUE, coordSpace = PETSC_TRUE, remap = PETSC_TRUE, ghostCells = PETSC_FALSE, isHierarchy, flg; PetscFunctionBegin; PetscOptionsHeadBegin(PetscOptionsObject, "DMPlex Options"); @@ -4292,8 +4474,6 @@ static PetscErrorCode DMSetFromOptions_Plex(DM dm, PetscOptionItems *PetscOption PetscCall(DMSetFromOptions_NonRefinement_Plex(dm, PetscOptionsObject)); } /* Handle DMPlex refinement before distribution */ - PetscCall(PetscOptionsBool("-dm_refine_ignore_model", "Flag to ignore the geometry model when refining", "DMCreate", ignoreModel, &ignoreModel, &flg)); - if (flg) ((DM_Plex *)dm->data)->ignoreModel = ignoreModel; PetscCall(DMPlexGetRefinementUniform(dm, &uniformOrig)); PetscCall(PetscOptionsBoundedInt("-dm_refine_pre", "The number of refinements before distribution", "DMCreate", prerefine, &prerefine, NULL, 0)); PetscCall(PetscOptionsBool("-dm_refine_remap_pre", "Flag to control coordinate remapping", "DMCreate", remap, &remap, NULL)); @@ -4377,7 +4557,7 @@ static PetscErrorCode DMSetFromOptions_Plex(DM dm, PetscOptionItems *PetscOption PetscInt degree = 1, deg; PetscInt height = 0; DM cdm; - PetscBool flg; + PetscBool flg, localize = PETSC_TRUE, sparseLocalize = PETSC_TRUE; PetscCall(PetscOptionsBool("-dm_coord_space", "Use an FEM space for coordinates", "", coordSpace, &coordSpace, &flg)); PetscCall(PetscOptionsInt("-dm_coord_petscspace_degree", "FEM degree for coordinate space", "", degree, °ree, NULL)); @@ -4403,10 +4583,12 @@ static PetscErrorCode DMSetFromOptions_Plex(DM dm, PetscOptionItems *PetscOption } mesh->coordFunc = NULL; } - PetscCall(PetscOptionsBool("-dm_sparse_localize", "Localize only necessary cells", "", dm->sparseLocalize, &dm->sparseLocalize, &flg)); + PetscCall(PetscOptionsBool("-dm_localize", "Localize mesh coordinates", "", localize, &localize, NULL)); + PetscCall(PetscOptionsBool("-dm_sparse_localize", "Localize only necessary cells", "DMSetSparseLocalize", sparseLocalize, &sparseLocalize, &flg)); + if (flg) PetscCall(DMSetSparseLocalize(dm, sparseLocalize)); PetscCall(PetscOptionsInt("-dm_localize_height", "Localize edges and faces in addition to cells", "", height, &height, &flg)); if (flg) PetscCall(DMPlexSetMaxProjectionHeight(cdm, height)); - PetscCall(DMLocalizeCoordinates(dm)); + if (localize) PetscCall(DMLocalizeCoordinates(dm)); } /* Handle DMPlex refinement */ remap = PETSC_TRUE; @@ -4743,14 +4925,15 @@ static PetscErrorCode DMInitialize_Plex(DM dm) PETSC_INTERN PetscErrorCode DMClone_Plex(DM dm, DM *newdm) { - DM_Plex *mesh = (DM_Plex *)dm->data; - PetscSF face_sf; + DM_Plex *mesh = (DM_Plex *)dm->data; + const PetscSF *face_sfs; + PetscInt num_face_sfs; PetscFunctionBegin; mesh->refct++; (*newdm)->data = mesh; - PetscCall(DMPlexGetIsoperiodicFaceSF(dm, &face_sf)); - PetscCall(DMPlexSetIsoperiodicFaceSF(*newdm, face_sf)); + PetscCall(DMPlexGetIsoperiodicFaceSF(dm, &num_face_sfs, &face_sfs)); + PetscCall(DMPlexSetIsoperiodicFaceSF(*newdm, num_face_sfs, (PetscSF *)face_sfs)); PetscCall(PetscObjectChangeTypeName((PetscObject)*newdm, DMPLEX)); PetscCall(DMInitialize_Plex(*newdm)); PetscFunctionReturn(PETSC_SUCCESS); @@ -4769,6 +4952,8 @@ PETSC_INTERN PetscErrorCode DMClone_Plex(DM dm, DM *newdm) . -dm_distribute - Distribute mesh across processes . -dm_distribute_overlap - Number of cells to overlap for distribution . -dm_refine - Refine mesh after distribution +. -dm_localize - Whether to localize coordinates for periodic meshes +. -dm_sparse_localize - Whether to only localize cells on the periodic boundary . -dm_plex_hash_location - Use grid hashing for point location . -dm_plex_hash_box_faces - The number of divisions in each direction of the grid hash . -dm_plex_partition_balance - Attempt to evenly divide points on partition boundary between processes @@ -4998,7 +5183,7 @@ PetscErrorCode DMPlexBuildFromCellListParallel(DM dm, PetscInt numCells, PetscIn PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexBuildCoordinatesFromCellListParallel - Build `DM` coordinates from a list of coordinates for each owned vertex (common mesh generator output) Collective; No Fortran Support @@ -5130,7 +5315,7 @@ PetscErrorCode DMPlexCreateFromCellListParallelPetsc(MPI_Comm comm, PetscInt dim PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexBuildFromCellList - Build `DMPLEX` topology from a list of vertices for each cell (common mesh generator output) Collective; No Fortran Support @@ -5140,7 +5325,7 @@ PetscErrorCode DMPlexCreateFromCellListParallelPetsc(MPI_Comm comm, PetscInt dim . numCells - The number of cells owned by this process . numVertices - The number of vertices owned by this process, or `PETSC_DETERMINE` . numCorners - The number of vertices for each cell -- cells - An array of numCells*numCorners numbers, the global vertex numbers for each cell +- cells - An array of `numCells` x `numCorners` numbers, the global vertex numbers for each cell Level: advanced @@ -5216,10 +5401,10 @@ PetscErrorCode DMPlexBuildFromCellList(DM dm, PetscInt numCells, PetscInt numVer PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexBuildCoordinatesFromCellList - Build `DM` coordinates from a list of coordinates for each owned vertex (common mesh generator output) - Collective; No Fortran Support + Collective Input Parameters: + dm - The `DM` @@ -5436,10 +5621,10 @@ PetscErrorCode DMPlexCreateFromDAG(DM dm, PetscInt depth, const PetscInt numPoin Note: The format is the simplest possible: .vb - Ne - v0 v1 ... vk - Nv - x y z marker + dim Ne Nv Nc Nl + v_1 v_2 ... v_Nc + ... + x y z marker_1 ... marker_Nl .ve Developer Note: @@ -5455,9 +5640,9 @@ static PetscErrorCode DMPlexCreateCellVertexFromFile(MPI_Comm comm, const char f PetscSection coordSection; PetscScalar *coords; char line[PETSC_MAX_PATH_LEN]; - PetscInt dim = 3, cdim = 3, coordSize, v, c, d; + PetscInt cdim, coordSize, v, c, d; PetscMPIInt rank; - int snum, Nv, Nc, Ncn, Nl; + int snum, dim, Nv, Nc, Ncn, Nl; PetscFunctionBegin; PetscCallMPI(MPI_Comm_rank(comm, &rank)); @@ -5466,16 +5651,18 @@ static PetscErrorCode DMPlexCreateCellVertexFromFile(MPI_Comm comm, const char f PetscCall(PetscViewerFileSetMode(viewer, FILE_MODE_READ)); PetscCall(PetscViewerFileSetName(viewer, filename)); if (rank == 0) { - PetscCall(PetscViewerRead(viewer, line, 4, NULL, PETSC_STRING)); - snum = sscanf(line, "%d %d %d %d", &Nc, &Nv, &Ncn, &Nl); - PetscCheck(snum == 4, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unable to parse cell-vertex file: %s", line); + PetscCall(PetscViewerRead(viewer, line, 5, NULL, PETSC_STRING)); + snum = sscanf(line, "%d %d %d %d %d", &dim, &Nc, &Nv, &Ncn, &Nl); + PetscCheck(snum == 5, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unable to parse cell-vertex file: %s", line); } else { Nc = Nv = Ncn = Nl = 0; } + PetscCallMPI(MPI_Bcast(&dim, 1, MPI_INT, 0, comm)); + cdim = (PetscInt)dim; PetscCall(DMCreate(comm, dm)); PetscCall(DMSetType(*dm, DMPLEX)); PetscCall(DMPlexSetChart(*dm, 0, Nc + Nv)); - PetscCall(DMSetDimension(*dm, dim)); + PetscCall(DMSetDimension(*dm, (PetscInt)dim)); PetscCall(DMSetCoordinateDim(*dm, cdim)); /* Read topology */ if (rank == 0) { @@ -5604,7 +5791,7 @@ static PetscErrorCode DMPlexCreateCellVertexFromFile(MPI_Comm comm, const char f PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexCreateFromFile - This takes a filename and produces a `DM` Collective @@ -5754,7 +5941,7 @@ PetscErrorCode DMPlexCreateFromFile(MPI_Comm comm, const char filename[], const PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexCreateEphemeral - This takes a `DMPlexTransform` and a base `DMPlex` and produces an ephemeral `DM`, meaning one that is created on the fly in response to queries. Input Parameters: @@ -5776,7 +5963,7 @@ PetscErrorCode DMPlexCreateEphemeral(DMPlexTransform tr, const char prefix[], DM DM bdm, bcdm, cdm; Vec coordinates, coordinatesNew; PetscSection cs; - PetscInt dim, cdim, Nl; + PetscInt cdim, Nl; PetscFunctionBegin; PetscCall(DMCreate(PetscObjectComm((PetscObject)tr), dm)); @@ -5784,10 +5971,8 @@ PetscErrorCode DMPlexCreateEphemeral(DMPlexTransform tr, const char prefix[], DM ((DM_Plex *)(*dm)->data)->interpolated = DMPLEX_INTERPOLATED_FULL; // Handle coordinates PetscCall(DMPlexTransformGetDM(tr, &bdm)); - PetscCall(DMGetCoordinateDim(bdm, &cdim)); - PetscCall(DMSetCoordinateDim(*dm, cdim)); - PetscCall(DMGetDimension(bdm, &dim)); - PetscCall(DMSetDimension(*dm, dim)); + PetscCall(DMPlexTransformSetDimensions(tr, bdm, *dm)); + PetscCall(DMGetCoordinateDim(*dm, &cdim)); PetscCall(DMGetCoordinateDM(bdm, &bcdm)); PetscCall(DMGetCoordinateDM(*dm, &cdm)); PetscCall(DMCopyDisc(bcdm, cdm)); diff --git a/src/dm/impls/plex/plexdistribute.c b/src/dm/impls/plex/plexdistribute.c index 06036159f6a..b6fd18f6657 100644 --- a/src/dm/impls/plex/plexdistribute.c +++ b/src/dm/impls/plex/plexdistribute.c @@ -732,7 +732,7 @@ PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, con PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexCreateOverlapMigrationSF - Create a `PetscSF` describing the new mesh distribution to make the overlap described by the input `PetscSF` Collective @@ -1470,7 +1470,7 @@ static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_D } } -/*@C +/*@ DMPlexCreatePointSF - Build a point `PetscSF` from an `PetscSF` describing a point migration Input Parameters: @@ -1602,7 +1602,7 @@ PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownersh PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexMigrate - Migrates internal `DM` data over the supplied star forest Collective @@ -1720,7 +1720,7 @@ PetscErrorCode DMPlexRemapMigrationSF(PetscSF sfOverlap, PetscSF sfMigration, Pe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexDistribute - Distributes the mesh and any associated sections. Collective @@ -2008,7 +2008,7 @@ PetscErrorCode DMPlexDistributeOverlap_Internal(DM dm, PetscInt overlap, MPI_Com PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping `DM`. Collective @@ -2018,8 +2018,8 @@ PetscErrorCode DMPlexDistributeOverlap_Internal(DM dm, PetscInt overlap, MPI_Com - overlap - The overlap of partitions (the same on all ranks) Output Parameters: -+ sf - The `PetscSF` used for point distribution -- dmOverlap - The overlapping distributed `DMPLEX` object, or `NULL` ++ sf - The `PetscSF` used for point distribution, or pass `NULL` if not needed +- dmOverlap - The overlapping distributed `DMPLEX` object Options Database Keys: + -dm_plex_overlap_labels - List of overlap label names @@ -2352,7 +2352,7 @@ PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexDistributionSetName - Set the name of the specific parallel distribution Input Parameters: @@ -2381,7 +2381,7 @@ PetscErrorCode DMPlexDistributionSetName(DM dm, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexDistributionGetName - Retrieve the name of the specific parallel distribution Input Parameter: diff --git a/src/dm/impls/plex/plexegads.c b/src/dm/impls/plex/plexegads.c index 84dd6c0d2b7..5721bb749be 100644 --- a/src/dm/impls/plex/plexegads.c +++ b/src/dm/impls/plex/plexegads.c @@ -19,10 +19,10 @@ */ #ifdef PETSC_HAVE_EGADS -PETSC_INTERN PetscErrorCode DMPlexSnapToGeomModel_EGADS_Internal(DM, PetscInt, ego, PetscInt, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); -PETSC_INTERN PetscErrorCode DMPlexSnapToGeomModel_EGADSLite_Internal(DM, PetscInt, ego, PetscInt, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); +PETSC_INTERN PetscErrorCode DMSnapToGeomModel_EGADS_Internal(DM, PetscInt, ego, PetscInt, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); +PETSC_INTERN PetscErrorCode DMSnapToGeomModel_EGADSLite_Internal(DM, PetscInt, ego, PetscInt, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); -PetscErrorCode DMPlexSnapToGeomModel_EGADS_Internal(DM dm, PetscInt p, ego model, PetscInt bodyID, PetscInt faceID, PetscInt edgeID, const PetscScalar mcoords[], PetscScalar gcoords[]) +PetscErrorCode DMSnapToGeomModel_EGADS_Internal(DM dm, PetscInt p, ego model, PetscInt bodyID, PetscInt faceID, PetscInt edgeID, const PetscScalar mcoords[], PetscScalar gcoords[]) { DM cdm; ego *bodies; @@ -101,73 +101,62 @@ PetscErrorCode DMPlexSnapToGeomModel_EGADS_Internal(DM dm, PetscInt p, ego model } #endif -/*@ - DMPlexSnapToGeomModel - Given a coordinate point 'mcoords' on the mesh point 'p', return the closest coordinate point 'gcoords' on the geometry model associated with that point. - - Not Collective - - Input Parameters: -+ dm - The `DMPLEX` object -. p - The mesh point -. dE - The coordinate dimension -- mcoords - A coordinate point lying on the mesh point - - Output Parameter: -. gcoords - The closest coordinate point on the geometry model associated with 'p' to the given point - - Level: intermediate +PetscErrorCode DMSnapToGeomModel_EGADSLite(DM dm, PetscInt p, PetscInt dE, const PetscScalar mcoords[], PetscScalar gcoords[]) +{ + PetscFunctionBeginHot; +#ifdef PETSC_HAVE_EGADS + DMLabel bodyLabel, faceLabel, edgeLabel; + PetscInt bodyID, faceID, edgeID; + PetscContainer modelObj; + ego model; - Note: - Returns the original coordinates if no geometry model is found. Right now the only supported geometry model is EGADS. + PetscCall(DMGetLabel(dm, "EGADS Body ID", &bodyLabel)); + PetscCall(DMGetLabel(dm, "EGADS Face ID", &faceLabel)); + PetscCall(DMGetLabel(dm, "EGADS Edge ID", &edgeLabel)); + PetscCheck(bodyLabel && faceLabel && edgeLabel, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "EGADSLite meshes must have body, face, and edge labels defined"); + PetscCall(PetscObjectQuery((PetscObject)dm, "EGADSLite Model", (PetscObject *)&modelObj)); + PetscCheck(modelObj, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "EGADSLite mesh missing model object"); - The coordinate dimension may be different from the coordinate dimension of the `dm`, for example if the transformation is extrusion. + PetscCall(PetscContainerGetPointer(modelObj, (void **)&model)); + PetscCall(DMLabelGetValue(bodyLabel, p, &bodyID)); + PetscCall(DMLabelGetValue(faceLabel, p, &faceID)); + PetscCall(DMLabelGetValue(edgeLabel, p, &edgeID)); + /* Allows for "Connective" Plex Edges present in models with multiple non-touching Entities */ + if (bodyID < 0) { + for (PetscInt d = 0; d < dE; ++d) gcoords[d] = mcoords[d]; + PetscFunctionReturn(PETSC_SUCCESS); + } + PetscCall(DMSnapToGeomModel_EGADSLite_Internal(dm, p, model, bodyID, faceID, edgeID, mcoords, gcoords)); +#endif + PetscFunctionReturn(PETSC_SUCCESS); +} -.seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMRefine()`, `DMPlexCreate()`, `DMPlexSetRefinementUniform()` -@*/ -PetscErrorCode DMPlexSnapToGeomModel(DM dm, PetscInt p, PetscInt dE, const PetscScalar mcoords[], PetscScalar gcoords[]) +PetscErrorCode DMSnapToGeomModel_EGADS(DM dm, PetscInt p, PetscInt dE, const PetscScalar mcoords[], PetscScalar gcoords[]) { - PetscInt d; - PetscFunctionBeginHot; #ifdef PETSC_HAVE_EGADS - { - DM_Plex *plex = (DM_Plex *)dm->data; - DMLabel bodyLabel, faceLabel, edgeLabel; - PetscInt bodyID, faceID, edgeID; - PetscContainer modelObj; - ego model; - PetscBool islite = PETSC_FALSE; - - PetscCall(DMGetLabel(dm, "EGADS Body ID", &bodyLabel)); - PetscCall(DMGetLabel(dm, "EGADS Face ID", &faceLabel)); - PetscCall(DMGetLabel(dm, "EGADS Edge ID", &edgeLabel)); - if (!bodyLabel || !faceLabel || !edgeLabel || plex->ignoreModel) { - for (d = 0; d < dE; ++d) gcoords[d] = mcoords[d]; - PetscFunctionReturn(PETSC_SUCCESS); - } - PetscCall(PetscObjectQuery((PetscObject)dm, "EGADS Model", (PetscObject *)&modelObj)); - if (!modelObj) { - PetscCall(PetscObjectQuery((PetscObject)dm, "EGADSLite Model", (PetscObject *)&modelObj)); - islite = PETSC_TRUE; - } - if (!modelObj) { - for (d = 0; d < dE; ++d) gcoords[d] = mcoords[d]; - PetscFunctionReturn(PETSC_SUCCESS); - } - PetscCall(PetscContainerGetPointer(modelObj, (void **)&model)); - PetscCall(DMLabelGetValue(bodyLabel, p, &bodyID)); - PetscCall(DMLabelGetValue(faceLabel, p, &faceID)); - PetscCall(DMLabelGetValue(edgeLabel, p, &edgeID)); - /* Allows for "Connective" Plex Edges present in models with multiple non-touching Entities */ - if (bodyID < 0) { - for (d = 0; d < dE; ++d) gcoords[d] = mcoords[d]; - PetscFunctionReturn(PETSC_SUCCESS); - } - if (islite) PetscCall(DMPlexSnapToGeomModel_EGADSLite_Internal(dm, p, model, bodyID, faceID, edgeID, mcoords, gcoords)); - else PetscCall(DMPlexSnapToGeomModel_EGADS_Internal(dm, p, model, bodyID, faceID, edgeID, mcoords, gcoords)); + DMLabel bodyLabel, faceLabel, edgeLabel; + PetscInt bodyID, faceID, edgeID; + PetscContainer modelObj; + ego model; + + PetscCall(DMGetLabel(dm, "EGADS Body ID", &bodyLabel)); + PetscCall(DMGetLabel(dm, "EGADS Face ID", &faceLabel)); + PetscCall(DMGetLabel(dm, "EGADS Edge ID", &edgeLabel)); + PetscCheck(bodyLabel && faceLabel && edgeLabel, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "EGADS meshes must have body, face, and edge labels defined"); + PetscCall(PetscObjectQuery((PetscObject)dm, "EGADS Model", (PetscObject *)&modelObj)); + PetscCheck(modelObj, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "EGADS mesh missing model object"); + + PetscCall(PetscContainerGetPointer(modelObj, (void **)&model)); + PetscCall(DMLabelGetValue(bodyLabel, p, &bodyID)); + PetscCall(DMLabelGetValue(faceLabel, p, &faceID)); + PetscCall(DMLabelGetValue(edgeLabel, p, &edgeID)); + /* Allows for "Connective" Plex Edges present in models with multiple non-touching Entities */ + if (bodyID < 0) { + for (PetscInt d = 0; d < dE; ++d) gcoords[d] = mcoords[d]; + PetscFunctionReturn(PETSC_SUCCESS); } -#else - for (d = 0; d < dE; ++d) gcoords[d] = mcoords[d]; + PetscCall(DMSnapToGeomModel_EGADS_Internal(dm, p, model, bodyID, faceID, edgeID, mcoords, gcoords)); #endif PetscFunctionReturn(PETSC_SUCCESS); } @@ -1508,7 +1497,7 @@ PetscErrorCode DMPlexInflateToGeomModel(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexCreateEGADSFromFile - Create a `DMPLEX` mesh from an EGADS, IGES, or STEP file. Collective diff --git a/src/dm/impls/plex/plexegadslite.c b/src/dm/impls/plex/plexegadslite.c index 5dab7bc0304..85071fb11b2 100644 --- a/src/dm/impls/plex/plexegadslite.c +++ b/src/dm/impls/plex/plexegadslite.c @@ -4,7 +4,7 @@ #ifdef PETSC_HAVE_EGADS #include -PetscErrorCode DMPlexSnapToGeomModel_EGADSLite_Internal(DM dm, PetscInt p, PetscInt dE, ego model, PetscInt bodyID, PetscInt faceID, PetscInt edgeID, const PetscScalar mcoords[], PetscScalar gcoords[]) +PetscErrorCode DMSnapToGeomModel_EGADSLite_Internal(DM dm, PetscInt p, PetscInt dE, ego model, PetscInt bodyID, PetscInt faceID, PetscInt edgeID, const PetscScalar mcoords[], PetscScalar gcoords[]) { DM cdm; ego *bodies; @@ -618,7 +618,7 @@ static PetscErrorCode DMPlexEGADSLitePrintModel_Internal(ego model) } #endif -/*@C +/*@ DMPlexCreateEGADSLiteFromFile - Create a DMPlex mesh from an EGADSLite file. Collective diff --git a/src/dm/impls/plex/plexexodusii.c b/src/dm/impls/plex/plexexodusii.c index e9da805d4c7..9def15990a8 100644 --- a/src/dm/impls/plex/plexexodusii.c +++ b/src/dm/impls/plex/plexexodusii.c @@ -1320,7 +1320,7 @@ PetscErrorCode PetscViewerExodusIIGetOrder(PetscViewer viewer, PetscInt *order) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerExodusIIOpen - Opens a file for ExodusII input/output. Collective @@ -1354,7 +1354,7 @@ PetscErrorCode PetscViewerExodusIIOpen(MPI_Comm comm, const char name[], PetscFi PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexCreateExodusFromFile - Create a `DMPLEX` mesh from an ExodusII file. Collective @@ -1713,6 +1713,7 @@ PetscErrorCode DMPlexCreateExodus(MPI_Comm comm, PetscInt exoid, PetscBool inter for (v = 0; v < faceSize; ++v, ++voff) faceVertices[v] = fs_vertex_list[voff] + numCells - 1; PetscCall(DMPlexGetFullJoin(*dm, faceSize, faceVertices, &numFaces, &faces)); PetscCheck(numFaces == 1, comm, PETSC_ERR_ARG_WRONG, "Invalid ExodusII side %d in set %d maps to %" PetscInt_FMT " faces", f, fs, numFaces); + PetscCheck(dim == 1 || faces[0] >= numCells + numVertices, comm, PETSC_ERR_ARG_WRONG, "Invalid ExodusII side %d in set %d maps to point %" PetscInt_FMT " which is not a face", f, fs, faces[0]); PetscCall(DMSetLabelValue_Fast(*dm, &faceSets, "Face Sets", faces[0], fs_id[fs])); /* Only add the label if one has been detected for this side set. */ if (!fs_name_err) PetscCall(DMSetLabelValue(*dm, fs_name, faces[0], fs_id[fs])); diff --git a/src/dm/impls/plex/plexextrude.c b/src/dm/impls/plex/plexextrude.c index 93fd7bbd58c..1b119a4b385 100644 --- a/src/dm/impls/plex/plexextrude.c +++ b/src/dm/impls/plex/plexextrude.c @@ -1,7 +1,7 @@ #include /*I "petscdmplex.h" I*/ #include -/*@C +/*@ DMPlexExtrude - Extrude a volumetric mesh from the input surface mesh Input Parameters: @@ -11,8 +11,8 @@ . tensor - Flag to create tensor produt cells . symmetric - Flag to extrude symmetrically about the surface . periodic - Flag to extrude periodically -. normal - Surface normal vector, or NULL -- thicknesses - Thickness of each layer, or NULL +. normal - Surface normal vector, or `NULL` +- thicknesses - Thickness of each layer, or `NULL` Output Parameter: . edm - The volumetric mesh diff --git a/src/dm/impls/plex/plexfem.c b/src/dm/impls/plex/plexfem.c index 9e3108b0cdd..36309155b30 100644 --- a/src/dm/impls/plex/plexfem.c +++ b/src/dm/impls/plex/plexfem.c @@ -2482,7 +2482,7 @@ PetscErrorCode DMPlexComputeCellwiseIntegralFEM(DM dm, Vec X, Vec F, void *user) PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS pointIS, void (*func)(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]), PetscScalar *fintegral, void *user) +static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS pointIS, void (**funcs)(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]), PetscScalar *fintegral, void *user) { DM plex = NULL, plexA = NULL; DMEnclosureType encAux; @@ -2543,6 +2543,22 @@ static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS point PetscCall(ISGetIndices(pointIS, &points)); PetscCall(PetscCalloc2(numFaces * totDim, &u, locA ? numFaces * totDimAux : 0, &a)); PetscCall(DMFieldGetDegree(coordField, pointIS, NULL, &maxDegree)); + for (face = 0; face < numFaces; ++face) { + const PetscInt point = points[face], *support; + PetscScalar *x = NULL; + + PetscCall(DMPlexGetSupport(dm, point, &support)); + PetscCall(DMPlexVecGetClosure(plex, section, locX, support[0], NULL, &x)); + for (PetscInt i = 0; i < totDim; ++i) u[face * totDim + i] = x[i]; + PetscCall(DMPlexVecRestoreClosure(plex, section, locX, support[0], NULL, &x)); + if (locA) { + PetscInt subp; + PetscCall(DMGetEnclosurePoint(plexA, dm, encAux, support[0], &subp)); + PetscCall(DMPlexVecGetClosure(plexA, sectionAux, locA, subp, NULL, &x)); + for (PetscInt i = 0; i < totDimAux; ++i) a[f * totDimAux + i] = x[i]; + PetscCall(DMPlexVecRestoreClosure(plexA, sectionAux, locA, subp, NULL, &x)); + } + } for (field = 0; field < Nf; ++field) { PetscFE fe; @@ -2554,23 +2570,6 @@ static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS point } PetscCall(PetscQuadratureGetData(qGeom, NULL, NULL, &Nq, NULL, NULL)); PetscCall(DMPlexGetFEGeom(coordField, pointIS, qGeom, PETSC_TRUE, &fgeom)); - for (face = 0; face < numFaces; ++face) { - const PetscInt point = points[face], *support; - PetscScalar *x = NULL; - PetscInt i; - - PetscCall(DMPlexGetSupport(dm, point, &support)); - PetscCall(DMPlexVecGetClosure(plex, section, locX, support[0], NULL, &x)); - for (i = 0; i < totDim; ++i) u[face * totDim + i] = x[i]; - PetscCall(DMPlexVecRestoreClosure(plex, section, locX, support[0], NULL, &x)); - if (locA) { - PetscInt subp; - PetscCall(DMGetEnclosurePoint(plexA, dm, encAux, support[0], &subp)); - PetscCall(DMPlexVecGetClosure(plexA, sectionAux, locA, subp, NULL, &x)); - for (i = 0; i < totDimAux; ++i) a[f * totDimAux + i] = x[i]; - PetscCall(DMPlexVecRestoreClosure(plexA, sectionAux, locA, subp, NULL, &x)); - } - } /* Get blocking */ { PetscQuadrature q; @@ -2592,18 +2591,18 @@ static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS point /* Do integration for each field */ for (chunk = 0; chunk < numChunks; ++chunk) { PetscCall(PetscFEGeomGetChunk(fgeom, chunk * chunkSize, (chunk + 1) * chunkSize, &chunkGeom)); - PetscCall(PetscFEIntegrateBd(prob, field, func, chunkSize, chunkGeom, u, probAux, a, fintegral)); + PetscCall(PetscFEIntegrateBd(prob, field, funcs[field], chunkSize, chunkGeom, &u[chunk * chunkSize * totDim], probAux, PetscSafePointerPlusOffset(a, chunk * chunkSize * totDimAux), &fintegral[chunk * chunkSize * Nf])); PetscCall(PetscFEGeomRestoreChunk(fgeom, 0, offset, &chunkGeom)); } PetscCall(PetscFEGeomGetChunk(fgeom, offset, numFaces, &chunkGeom)); - PetscCall(PetscFEIntegrateBd(prob, field, func, Nr, chunkGeom, &u[offset * totDim], probAux, PetscSafePointerPlusOffset(a, offset * totDimAux), &fintegral[offset * Nf])); + PetscCall(PetscFEIntegrateBd(prob, field, funcs[field], Nr, chunkGeom, &u[offset * totDim], probAux, PetscSafePointerPlusOffset(a, offset * totDimAux), &fintegral[offset * Nf])); PetscCall(PetscFEGeomRestoreChunk(fgeom, offset, numFaces, &chunkGeom)); /* Cleanup data arrays */ PetscCall(DMPlexRestoreFEGeom(coordField, pointIS, qGeom, PETSC_TRUE, &fgeom)); PetscCall(PetscQuadratureDestroy(&qGeom)); - PetscCall(PetscFree2(u, a)); - PetscCall(ISRestoreIndices(pointIS, &points)); } + PetscCall(PetscFree2(u, a)); + PetscCall(ISRestoreIndices(pointIS, &points)); } if (plex) PetscCall(DMDestroy(&plex)); if (plexA) PetscCall(DMDestroy(&plexA)); @@ -2619,7 +2618,7 @@ static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS point . label - The boundary `DMLabel` . numVals - The number of label values to use, or `PETSC_DETERMINE` for all values . vals - The label values to use, or NULL for all values -. func - The function to integrate along the boundary +. funcs - The functions to integrate along the boundary for each field - user - The user context Output Parameter: @@ -2629,7 +2628,7 @@ static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS point .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexComputeIntegralFEM()`, `DMPlexComputeBdResidualFEM()` @*/ -PetscErrorCode DMPlexComputeBdIntegral(DM dm, Vec X, DMLabel label, PetscInt numVals, const PetscInt vals[], void (*func)(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]), PetscScalar *integral, void *user) +PetscErrorCode DMPlexComputeBdIntegral(DM dm, Vec X, DMLabel label, PetscInt numVals, const PetscInt vals[], void (**funcs)(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]), PetscScalar *integral, void *user) { Vec locX; PetscSection section; @@ -2673,7 +2672,7 @@ PetscErrorCode DMPlexComputeBdIntegral(DM dm, Vec X, DMLabel label, PetscInt num } PetscCall(ISGetLocalSize(pointIS, &numFaces)); PetscCall(PetscCalloc1(numFaces * Nf, &fintegral)); - PetscCall(DMPlexComputeBdIntegral_Internal(dm, locX, pointIS, func, fintegral, user)); + PetscCall(DMPlexComputeBdIntegral_Internal(dm, locX, pointIS, funcs, fintegral, user)); /* Sum point contributions into integral */ for (f = 0; f < Nf; ++f) for (face = 0; face < numFaces; ++face) integral[f] += fintegral[face * Nf + f]; diff --git a/src/dm/impls/plex/plexfluent.c b/src/dm/impls/plex/plexfluent.c index 009a3d7c701..edcfb62a782 100644 --- a/src/dm/impls/plex/plexfluent.c +++ b/src/dm/impls/plex/plexfluent.c @@ -2,7 +2,7 @@ #define PETSCDM_DLL #include /*I "petscdmplex.h" I*/ -/*@C +/*@ DMPlexCreateFluentFromFile - Create a `DMPLEX` mesh from a Fluent mesh file Collective diff --git a/src/dm/impls/plex/plexgenerate.c b/src/dm/impls/plex/plexgenerate.c index 0dfb15378b5..5e624fd8b4c 100644 --- a/src/dm/impls/plex/plexgenerate.c +++ b/src/dm/impls/plex/plexgenerate.c @@ -1,6 +1,6 @@ #include /*I "petscdmplex.h" I*/ -/*@C +/*@ DMPlexInvertCell - Flips cell orientations since `DMPLEX` stores some of them internally with outward normals. Input Parameters: @@ -62,7 +62,7 @@ PetscErrorCode DMPlexInvertCell(DMPolytopeType cellType, PetscInt cone[]) #undef SWAPCONE } -/*@C +/*@ DMPlexReorderCell - Flips cell orientations since `DMPLEX` stores some of them internally with outward normals. Input Parameters: @@ -137,7 +137,7 @@ PetscErrorCode DMPlexTetgenSetOptions(DM dm, const char *opts) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexGenerate - Generates a mesh. Not Collective diff --git a/src/dm/impls/plex/plexgeometry.c b/src/dm/impls/plex/plexgeometry.c index 06e7e360ecc..9913b587590 100644 --- a/src/dm/impls/plex/plexgeometry.c +++ b/src/dm/impls/plex/plexgeometry.c @@ -620,21 +620,27 @@ PetscErrorCode PetscGridHashEnlarge(PetscGridHash box, const PetscScalar point[] static PetscErrorCode DMPlexCreateGridHash(DM dm, PetscGridHash *box) { Vec coordinates; - const PetscScalar *coords; - PetscInt cdim, N, bs; + const PetscScalar *a; + PetscInt cdim, cStart, cEnd; PetscFunctionBegin; PetscCall(DMGetCoordinateDim(dm, &cdim)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); PetscCall(DMGetCoordinatesLocal(dm, &coordinates)); - PetscCall(VecGetArrayRead(coordinates, &coords)); - PetscCall(VecGetLocalSize(coordinates, &N)); - PetscCall(VecGetBlockSize(coordinates, &bs)); - PetscCheck(bs == cdim, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Coordinate block size %" PetscInt_FMT " != %" PetscInt_FMT " coordinate dimension", bs, cdim); - PetscCall(PetscGridHashCreate(PetscObjectComm((PetscObject)dm), cdim, coords, box)); - for (PetscInt i = 0; i < N; i += cdim) PetscCall(PetscGridHashEnlarge(*box, &coords[i])); + PetscCall(VecGetArrayRead(coordinates, &a)); + PetscCall(PetscGridHashCreate(PetscObjectComm((PetscObject)dm), cdim, a, box)); + PetscCall(VecRestoreArrayRead(coordinates, &a)); + for (PetscInt c = cStart; c < cEnd; ++c) { + const PetscScalar *array; + PetscScalar *coords = NULL; + PetscInt numCoords; + PetscBool isDG; - PetscCall(VecRestoreArrayRead(coordinates, &coords)); + PetscCall(DMPlexGetCellCoordinates(dm, c, &isDG, &numCoords, &array, &coords)); + for (PetscInt i = 0; i < numCoords / cdim; ++i) PetscCall(PetscGridHashEnlarge(*box, &coords[i * cdim])); + PetscCall(DMPlexRestoreCellCoordinates(dm, c, &isDG, &numCoords, &array, &coords)); + } PetscFunctionReturn(PETSC_SUCCESS); } @@ -645,8 +651,8 @@ static PetscErrorCode DMPlexCreateGridHash(DM dm, PetscGridHash *box) Input Parameters: + box - The grid hash object -. n - The number of boxes in each dimension, or `PETSC_DETERMINE` -- h - The box size in each dimension, only used if n[d] == `PETSC_DETERMINE` +. n - The number of boxes in each dimension, may use `PETSC_DETERMINE` for the entries +- h - The box size in each dimension, only used if n[d] == `PETSC_DETERMINE`, if not needed you can pass in `NULL` Level: developer @@ -684,8 +690,8 @@ PetscErrorCode PetscGridHashSetGrid(PetscGridHash box, const PetscInt n[], const - points - The input point coordinates Output Parameters: -+ dboxes - An array of numPoints*dim integers expressing the enclosing box as (i_0, i_1, ..., i_dim) -- boxes - An array of numPoints integers expressing the enclosing box as single number, or NULL ++ dboxes - An array of `numPoints` x `dim` integers expressing the enclosing box as (i_0, i_1, ..., i_dim) +- boxes - An array of `numPoints` integers expressing the enclosing box as single number, or `NULL` Level: developer @@ -710,7 +716,7 @@ PetscErrorCode PetscGridHashGetEnclosingBox(PetscGridHash box, PetscInt numPoint if (dbox == n[d] && PetscAbsReal(PetscRealPart(points[p * dim + d]) - upper[d]) < 1.0e-9) dbox = n[d] - 1; if (dbox == -1 && PetscAbsReal(PetscRealPart(points[p * dim + d]) - lower[d]) < 1.0e-9) dbox = 0; - PetscCheck(dbox >= 0 && dbox < n[d], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Input point %" PetscInt_FMT " (%g, %g, %g) is outside of our bounding box", p, (double)PetscRealPart(points[p * dim + 0]), dim > 1 ? (double)PetscRealPart(points[p * dim + 1]) : 0.0, dim > 2 ? (double)PetscRealPart(points[p * dim + 2]) : 0.0); + PetscCheck(dbox >= 0 && dbox < n[d], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Input point %" PetscInt_FMT " (%g, %g, %g) is outside of our bounding box (%g, %g, %g) - (%g, %g, %g)", p, (double)PetscRealPart(points[p * dim + 0]), dim > 1 ? (double)PetscRealPart(points[p * dim + 1]) : 0.0, dim > 2 ? (double)PetscRealPart(points[p * dim + 2]) : 0.0, (double)lower[0], (double)lower[1], (double)lower[2], (double)upper[0], (double)upper[1], (double)upper[2]); dboxes[p * dim + d] = dbox; } if (boxes) @@ -1338,16 +1344,16 @@ PetscErrorCode DMLocatePoints_Plex(DM dm, Vec v, DMPointLocationType ltype, Pets PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexComputeProjection2Dto1D - Rewrite coordinates to be the 1D projection of the 2D coordinates Not Collective Input/Output Parameter: -. coords - The coordinates of a segment, on output the new y-coordinate, and 0 for x +. coords - The coordinates of a segment, on output the new y-coordinate, and 0 for x, an array of size 4, last two entries are unchanged Output Parameter: -. R - The rotation which accomplishes the projection +. R - The rotation which accomplishes the projection, array of size 4 Level: developer @@ -1369,16 +1375,16 @@ PetscErrorCode DMPlexComputeProjection2Dto1D(PetscScalar coords[], PetscReal R[] PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexComputeProjection3Dto1D - Rewrite coordinates to be the 1D projection of the 3D coordinates Not Collective Input/Output Parameter: -. coords - The coordinates of a segment; on output, the new y-coordinate, and 0 for x and z +. coords - The coordinates of a segment; on output, the new y-coordinate, and 0 for x and z, an array of size 6, the other entries are unchanged Output Parameter: -. R - The rotation which accomplishes the projection +. R - The rotation which accomplishes the projection, an array of size 9 Level: developer @@ -1426,6 +1432,7 @@ PetscErrorCode DMPlexComputeProjection3Dto1D(PetscScalar coords[], PetscReal R[] } coords[0] = 0.0; coords[1] = r; + coords[2] = 0.0; PetscFunctionReturn(PETSC_SUCCESS); } @@ -2983,7 +2990,7 @@ PetscErrorCode DMPlexComputeGeometryFVM(DM dm, Vec *cellgeom, Vec *facegeom) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexGetMinRadius - Returns the minimum distance from any cell centroid to a face Not Collective @@ -3007,7 +3014,7 @@ PetscErrorCode DMPlexGetMinRadius(DM dm, PetscReal *minradius) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexSetMinRadius - Sets the minimum distance from the cell centroid to a face Logically Collective @@ -3972,19 +3979,19 @@ PetscErrorCode DMPlexRemapGeometry(DM dm, PetscReal time, void (*func)(PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexShearGeometry - This shears the domain, meaning adds a multiple of the shear coordinate to all other coordinates. Not Collective Input Parameters: + dm - The `DMPLEX` -. direction - The shear coordinate direction, e.g. 0 is the x-axis +. direction - The shear coordinate direction, e.g. `DM_X` is the x-axis - multipliers - The multiplier m for each direction which is not the shear direction Level: intermediate -.seealso: `DMPLEX`, `DMPlexRemapGeometry()` +.seealso: `DMPLEX`, `DMPlexRemapGeometry()`, `DMDirection`, `DM_X`, `DM_Y`, `DM_Z` @*/ PetscErrorCode DMPlexShearGeometry(DM dm, DMDirection direction, PetscReal multipliers[]) { diff --git a/src/dm/impls/plex/plexgmsh.c b/src/dm/impls/plex/plexgmsh.c index 0006719075b..d51c34458b2 100644 --- a/src/dm/impls/plex/plexgmsh.c +++ b/src/dm/impls/plex/plexgmsh.c @@ -117,10 +117,9 @@ typedef struct { int *(*lexorder)(void); } GmshCellInfo; -#define GmshCellEntry(cellType, polytope, dim, order) \ - { \ - cellType, GMSH_##polytope, dim, order, GmshNumNodes_##polytope(1), GmshNumNodes_##polytope(order), GmshLexOrder_##polytope##_##order \ - } +// clang-format off +#define GmshCellEntry(cellType, polytope, dim, order) {cellType, GMSH_##polytope, dim, order, GmshNumNodes_##polytope(1), GmshNumNodes_##polytope(order), GmshLexOrder_##polytope##_##order} +// clang-format on static const GmshCellInfo GmshCellTable[] = { GmshCellEntry(15, VTX, 0, 0), @@ -928,13 +927,14 @@ static PetscErrorCode GmshReadNodes_v41(GmshFile *gmsh, GmshMesh *mesh) PetscCall(GmshNodesCreate(numNodes, &nodes)); mesh->numNodes = numNodes; mesh->nodelist = nodes; + if (numEntityBlocks && !mesh->entities) PetscCall(PetscInfo(NULL, "File specifies %" PetscInt_FMT " entity blocks, but was missing the $Entities section\n", numEntityBlocks)); for (block = 0, node = 0; block < numEntityBlocks; ++block, node += numNodesBlock) { PetscCall(GmshReadInt(gmsh, info, 3)); dim = info[0]; eid = info[1]; parametric = info[2]; - PetscCall(GmshEntitiesGet(mesh->entities, dim, eid, &entity)); - numTags = entity->numTags; + if (mesh->entities) PetscCall(GmshEntitiesGet(mesh->entities, dim, eid, &entity)); + numTags = entity ? entity->numTags : 0; PetscCheck(!parametric, PETSC_COMM_SELF, PETSC_ERR_SUP, "Parametric coordinates not supported"); PetscCall(GmshReadSize(gmsh, &numNodesBlock, 1)); PetscCall(GmshReadSize(gmsh, nodes->id + node, numNodesBlock)); @@ -976,16 +976,17 @@ static PetscErrorCode GmshReadElements_v41(GmshFile *gmsh, GmshMesh *mesh) PetscCall(GmshElementsCreate(numElements, &elements)); mesh->numElems = numElements; mesh->elements = elements; + if (numEntityBlocks && !mesh->entities) PetscCall(PetscInfo(NULL, "File specifies %" PetscInt_FMT " entity blocks, but was missing the $Entities section\n", numEntityBlocks)); for (c = 0, block = 0; block < numEntityBlocks; ++block) { PetscCall(GmshReadInt(gmsh, info, 3)); dim = info[0]; eid = info[1]; cellType = info[2]; - PetscCall(GmshEntitiesGet(mesh->entities, dim, eid, &entity)); + if (mesh->entities) PetscCall(GmshEntitiesGet(mesh->entities, dim, eid, &entity)); PetscCall(GmshCellTypeCheck(cellType)); numVerts = GmshCellMap[cellType].numVerts; numNodes = GmshCellMap[cellType].numNodes; - numTags = entity->numTags; + numTags = entity ? entity->numTags : 0; PetscCall(GmshReadSize(gmsh, &numBlockElements, 1)); PetscCall(GmshBufferGet(gmsh, (1 + numNodes) * numBlockElements, sizeof(PetscInt), &ibuf)); PetscCall(GmshReadSize(gmsh, ibuf, (1 + numNodes) * numBlockElements)); @@ -1408,7 +1409,7 @@ static PetscErrorCode GmshCreateFE(MPI_Comm comm, const char prefix[], PetscBool PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexCreateGmshFromFile - Create a `DMPLEX` mesh from a Gmsh file Input Parameters: @@ -1603,13 +1604,16 @@ PetscErrorCode DMPlexCreateGmsh(MPI_Comm comm, PetscViewer viewer, PetscBool int PetscCall(GmshReadSection(gmsh, line)); } - /* Read entities */ + /* OPTIONAL Read entities */ if (gmsh->fileFormat >= 40) { - PetscCall(GmshExpect(gmsh, "$Entities", line)); - PetscCall(GmshReadEntities(gmsh, mesh)); - PetscCall(GmshReadEndSection(gmsh, "$EndEntities", line)); - /* Initial read for nodes section */ - PetscCall(GmshReadSection(gmsh, line)); + PetscCall(GmshMatch(gmsh, "$Entities", line, &match)); + if (match) { + PetscCall(GmshExpect(gmsh, "$Entities", line)); + PetscCall(GmshReadEntities(gmsh, mesh)); + PetscCall(GmshReadEndSection(gmsh, "$EndEntities", line)); + /* Initial read for nodes section */ + PetscCall(GmshReadSection(gmsh, line)); + } } /* Read nodes */ diff --git a/src/dm/impls/plex/plexhdf5.c b/src/dm/impls/plex/plexhdf5.c index 550f2ef3ba0..f9c485fda2a 100644 --- a/src/dm/impls/plex/plexhdf5.c +++ b/src/dm/impls/plex/plexhdf5.c @@ -15,6 +15,14 @@ static PetscErrorCode PetscViewerGetAttachedVersion_Private(PetscViewer, const c PETSC_EXTERN PetscErrorCode VecView_MPI(Vec, PetscViewer); +static PetscErrorCode PetscViewerPrintVersion_Private(PetscViewer viewer, DMPlexStorageVersion version, char str[], size_t len) +{ + PetscFunctionBegin; + PetscCall(PetscViewerCheckVersion_Private(viewer, version)); + PetscCall(PetscSNPrintf(str, len, "%d.%d.%d", version->major, version->minor, version->subminor)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode PetscViewerParseVersion_Private(PetscViewer viewer, const char str[], DMPlexStorageVersion *version) { PetscToken t; @@ -128,6 +136,84 @@ static inline PetscBool DMPlexStorageVersionGE(DMPlexStorageVersion version, int return (PetscBool)((version->major == major && version->minor == minor && version->subminor >= subminor) || (version->major == major && version->minor > minor) || (version->major > major)); } +/*@C + PetscViewerHDF5SetDMPlexStorageVersionWriting - Set the storage version for writing + + Logically collective + + Input Parameters: ++ viewer - The `PetscViewer` +- version - The storage format version + + Level: advanced + + Note: + The version has major, minor, and subminor integers. Parallel operations are only available for version 3.0.0. + +.seealso: [](ch_dmbase), `DM`, `PetscViewerHDF5GetDMPlexStorageVersionWriting()`, `PetscViewerHDF5GetDMPlexStorageVersionReading()`, `PetscViewerHDF5SetDMPlexStorageVersionReading()` +@*/ +PetscErrorCode PetscViewerHDF5SetDMPlexStorageVersionWriting(PetscViewer viewer, DMPlexStorageVersion version) +{ + const char ATTR_NAME[] = "dmplex_storage_version"; + DMPlexStorageVersion viewerVersion; + PetscBool fileHasVersion; + char fileVersion[16], versionStr[16], viewerVersionStr[16]; + + PetscFunctionBegin; + PetscValidHeaderSpecificType(viewer, PETSC_VIEWER_CLASSID, 1, PETSCVIEWERHDF5); + PetscAssertPointer(version, 2); + PetscCall(PetscViewerPrintVersion_Private(viewer, version, versionStr, 16)); + PetscCall(PetscViewerGetAttachedVersion_Private(viewer, DMPLEX_STORAGE_VERSION_WRITING_KEY, &viewerVersion)); + if (viewerVersion) { + PetscBool flg; + + PetscCall(PetscViewerPrintVersion_Private(viewer, viewerVersion, viewerVersionStr, 16)); + PetscCall(PetscStrcmp(versionStr, viewerVersionStr, &flg)); + PetscCheck(flg, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "User requested DMPlex storage version %s but viewer already has version %s - cannot mix versions", versionStr, viewerVersionStr); + } + + PetscCall(PetscViewerHDF5HasAttribute(viewer, NULL, ATTR_NAME, &fileHasVersion)); + if (fileHasVersion) { + PetscBool flg; + char *tmp; + + PetscCall(PetscViewerHDF5ReadAttribute(viewer, "/", ATTR_NAME, PETSC_STRING, NULL, &tmp)); + PetscCall(PetscStrncpy(fileVersion, tmp, sizeof(fileVersion))); + PetscCall(PetscFree(tmp)); + PetscCall(PetscStrcmp(fileVersion, versionStr, &flg)); + PetscCheck(flg, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "User requested DMPlex storage version %s but file already has version %s - cannot mix versions", versionStr, fileVersion); + } else { + PetscCall(PetscViewerHDF5WriteAttribute(viewer, "/", ATTR_NAME, PETSC_STRING, versionStr)); + } + PetscCall(PetscNew(&viewerVersion)); + viewerVersion->major = version->major; + viewerVersion->minor = version->minor; + viewerVersion->subminor = version->subminor; + PetscCall(PetscViewerAttachVersion_Private(viewer, DMPLEX_STORAGE_VERSION_WRITING_KEY, viewerVersion)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@C + PetscViewerHDF5GetDMPlexStorageVersionWriting - Get the storage version for writing + + Logically collective + + Input Parameter: +. viewer - The `PetscViewer` + + Output Parameter: +. version - The storage format version + + Options Database Keys: +. -dm_plex_view_hdf5_storage_version - Overrides the storage format version + + Level: advanced + + Note: + The version has major, minor, and subminor integers. Parallel operations are only available for version 3.0.0. + +.seealso: [](ch_dmbase), `DM`, `PetscViewerHDF5SetDMPlexStorageVersionWriting()`, `PetscViewerHDF5GetDMPlexStorageVersionReading()`, `PetscViewerHDF5SetDMPlexStorageVersionReading()` +@*/ PetscErrorCode PetscViewerHDF5GetDMPlexStorageVersionWriting(PetscViewer viewer, DMPlexStorageVersion *version) { const char ATTR_NAME[] = "dmplex_storage_version"; @@ -167,6 +253,81 @@ PetscErrorCode PetscViewerHDF5GetDMPlexStorageVersionWriting(PetscViewer viewer, PetscFunctionReturn(PETSC_SUCCESS); } +/*@C + PetscViewerHDF5SetDMPlexStorageVersionReading - Set the storage version for reading + + Logically collective + + Input Parameters: ++ viewer - The `PetscViewer` +- version - The storage format version + + Level: advanced + + Note: + The version has major, minor, and subminor integers. Parallel operations are only available for version 3.0.0. + +.seealso: [](ch_dmbase), `DM`, `PetscViewerHDF5GetDMPlexStorageVersionReading()`, `PetscViewerHDF5GetDMPlexStorageVersionWriting()`, `PetscViewerHDF5SetDMPlexStorageVersionWriting()` +@*/ +PetscErrorCode PetscViewerHDF5SetDMPlexStorageVersionReading(PetscViewer viewer, DMPlexStorageVersion version) +{ + const char ATTR_NAME[] = "dmplex_storage_version"; + DMPlexStorageVersion viewerVersion; + PetscBool fileHasVersion; + char versionStr[16], viewerVersionStr[16]; + + PetscFunctionBegin; + PetscValidHeaderSpecificType(viewer, PETSC_VIEWER_CLASSID, 1, PETSCVIEWERHDF5); + PetscAssertPointer(version, 2); + PetscCall(PetscViewerPrintVersion_Private(viewer, version, versionStr, 16)); + PetscCall(PetscViewerGetAttachedVersion_Private(viewer, DMPLEX_STORAGE_VERSION_READING_KEY, &viewerVersion)); + if (viewerVersion) { + PetscBool flg; + + PetscCall(PetscViewerPrintVersion_Private(viewer, viewerVersion, viewerVersionStr, 16)); + PetscCall(PetscStrcmp(versionStr, viewerVersionStr, &flg)); + PetscCheck(flg, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "User requested DMPlex storage version %s but viewer already has version %s - cannot mix versions", versionStr, viewerVersionStr); + } + + PetscCall(PetscViewerHDF5HasAttribute(viewer, NULL, ATTR_NAME, &fileHasVersion)); + if (fileHasVersion) { + char *fileVersion; + PetscBool flg; + + PetscCall(PetscViewerHDF5ReadAttribute(viewer, "/", ATTR_NAME, PETSC_STRING, NULL, &fileVersion)); + PetscCall(PetscStrcmp(fileVersion, versionStr, &flg)); + PetscCheck(flg, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "User requested DMPlex storage version %s but file already has version %s - cannot mix versions", versionStr, fileVersion); + PetscCall(PetscFree(fileVersion)); + } + PetscCall(PetscNew(&viewerVersion)); + viewerVersion->major = version->major; + viewerVersion->minor = version->minor; + viewerVersion->subminor = version->subminor; + PetscCall(PetscViewerAttachVersion_Private(viewer, DMPLEX_STORAGE_VERSION_READING_KEY, viewerVersion)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@C + PetscViewerHDF5GetDMPlexStorageVersionReading - Get the storage version for reading + + Logically collective + + Input Parameter: +. viewer - The `PetscViewer` + + Output Parameter: +. version - The storage format version + + Options Database Keys: +. -dm_plex_view_hdf5_storage_version - Overrides the storage format version + + Level: advanced + + Note: + The version has major, minor, and subminor integers. Parallel operations are only available for version 3.0.0. + +.seealso: [](ch_dmbase), `DM`, `PetscViewerHDF5SetDMPlexStorageVersionReading()`, `PetscViewerHDF5GetDMPlexStorageVersionWriting()`, `PetscViewerHDF5SetDMPlexStorageVersionWriting()` +@*/ PetscErrorCode PetscViewerHDF5GetDMPlexStorageVersionReading(PetscViewer viewer, DMPlexStorageVersion *version) { const char ATTR_NAME[] = "dmplex_storage_version"; @@ -200,7 +361,32 @@ static PetscErrorCode DMPlexGetHDF5Name_Private(DM dm, const char *name[]) PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode DMSequenceView_HDF5(DM dm, const char *seqname, PetscInt seqnum, PetscScalar value, PetscViewer viewer) +PetscErrorCode DMSequenceGetLength_HDF5_Internal(DM dm, const char seqname[], PetscInt *seqlen, PetscViewer viewer) +{ + hid_t file, group, dset, dspace; + hsize_t rdim, *dims; + char *groupname; + PetscBool has; + + PetscFunctionBegin; + PetscCall(PetscViewerHDF5GetGroup(viewer, NULL, &groupname)); + PetscCall(PetscViewerHDF5HasDataset(viewer, seqname, &has)); + PetscCheck(has, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "Object (dataset) \"%s\" not stored in group %s", seqname, groupname); + + PetscCall(PetscViewerHDF5OpenGroup(viewer, NULL, &file, &group)); + PetscCallHDF5Return(dset, H5Dopen2, (group, seqname, H5P_DEFAULT)); + PetscCallHDF5Return(dspace, H5Dget_space, (dset)); + PetscCallHDF5Return(rdim, H5Sget_simple_extent_dims, (dspace, NULL, NULL)); + PetscCall(PetscMalloc1(rdim, &dims)); + PetscCallHDF5Return(rdim, H5Sget_simple_extent_dims, (dspace, dims, NULL)); + *seqlen = dims[0]; + PetscCall(PetscFree(dims)); + PetscCallHDF5(H5Dclose, (dset)); + PetscCallHDF5(H5Gclose, (group)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMSequenceView_HDF5(DM dm, const char seqname[], PetscInt seqnum, PetscScalar value, PetscViewer viewer) { Vec stamp; PetscMPIInt rank; @@ -234,7 +420,7 @@ static PetscErrorCode DMSequenceView_HDF5(DM dm, const char *seqname, PetscInt s PetscFunctionReturn(PETSC_SUCCESS); } -PetscErrorCode DMSequenceLoad_HDF5_Internal(DM dm, const char *seqname, PetscInt seqnum, PetscScalar *value, PetscViewer viewer) +PetscErrorCode DMSequenceLoad_HDF5_Internal(DM dm, const char seqname[], PetscInt seqnum, PetscScalar *value, PetscViewer viewer) { Vec stamp; PetscMPIInt rank; @@ -812,6 +998,7 @@ PetscErrorCode DMPlexTopologyView_HDF5_Internal(DM dm, IS globalPointNumbers, Pe PetscFunctionBegin; PetscCall(PetscViewerHDF5GetDMPlexStorageVersionWriting(viewer, &version)); + PetscCall(PetscInfo(dm, "Writing DM %s storage version %d.%d.%d\n", dm->hdr.name, version->major, version->minor, version->subminor)); PetscCall(DMPlexGetHDF5Name_Private(dm, &topologydm_name)); if (DMPlexStorageVersionGE(version, 2, 0, 0)) { PetscCall(PetscSNPrintf(group, sizeof(group), "topologies/%s", topologydm_name)); @@ -1851,6 +2038,7 @@ static PetscErrorCode DMPlexDistributionLoad_HDF5_Private(DM dm, PetscViewer vie PetscFunctionReturn(PETSC_SUCCESS); } +// Serial load of topology static PetscErrorCode DMPlexTopologyLoad_HDF5_Legacy_Private(DM dm, PetscViewer viewer, PetscSF *sf) { MPI_Comm comm; @@ -1881,6 +2069,7 @@ static PetscErrorCode DMPlexTopologyLoad_HDF5_Legacy_Private(DM dm, PetscViewer PetscCall(DMSetDimension(dm, dim)); { /* Force serial load */ + PetscCall(PetscInfo(dm, "Loading DM %s in serial\n", dm->hdr.name)); PetscCall(PetscViewerHDF5ReadSizes(viewer, pointsName, NULL, &Np)); PetscCall(PetscLayoutSetLocalSize(pointsIS->map, rank == 0 ? Np : 0)); PetscCall(PetscLayoutSetSize(pointsIS->map, Np)); @@ -1989,6 +2178,7 @@ static PetscErrorCode PlexLayerCreate_Private(PlexLayer *layer) PetscFunctionReturn(PETSC_SUCCESS); } +// Parallel load of a depth stratum static PetscErrorCode PlexLayerLoad_Private(PlexLayer layer, PetscViewer viewer, PetscInt d, PetscLayout pointsLayout) { char path[128]; @@ -2343,6 +2533,7 @@ static PetscErrorCode PlexLayerConcatenateSFs_Private(MPI_Comm comm, PetscInt de PetscFunctionReturn(PETSC_SUCCESS); } +// Parallel load of topology static PetscErrorCode DMPlexTopologyLoad_HDF5_Private(DM dm, PetscViewer viewer, PetscSF *sfXC) { PlexLayer *layers; @@ -2362,6 +2553,7 @@ static PetscErrorCode DMPlexTopologyLoad_HDF5_Private(DM dm, PetscViewer viewer, } PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); + PetscCall(PetscInfo(dm, "Loading DM %s in parallel\n", dm->hdr.name)); { IS spOnComm; @@ -2567,6 +2759,7 @@ PetscErrorCode DMPlexLoad_HDF5_Internal(DM dm, PetscViewer viewer) PetscFunctionBegin; PetscCall(PetscViewerHDF5GetDMPlexStorageVersionReading(viewer, &version)); + PetscCall(PetscInfo(dm, "Loading DM %s storage version %d.%d.%d\n", dm->hdr.name, version->major, version->minor, version->subminor)); if (!DMPlexStorageVersionGE(version, 2, 0, 0)) { PetscCall(DMPlexTopologyLoad_HDF5_Internal(dm, viewer, NULL)); PetscCall(DMPlexLabelsLoad_HDF5_Internal(dm, viewer, NULL)); diff --git a/src/dm/impls/plex/plexinterpolate.c b/src/dm/impls/plex/plexinterpolate.c index 156b6f40bdc..72df0fa6d5f 100644 --- a/src/dm/impls/plex/plexinterpolate.c +++ b/src/dm/impls/plex/plexinterpolate.c @@ -495,27 +495,26 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth, DMLabel ctLabel; PetscHMapIJKL faceTable; PetscInt faceTypeNum[DM_NUM_POLYTOPES]; - PetscInt depth, d, pStart, Np, cStart, cEnd, c, fStart, fEnd; + PetscInt depth, pStart, Np, cStart, cEnd, fStart, fEnd, vStart, vEnd; PetscInt cntFaces, *facesId, minCone; PetscFunctionBegin; PetscCall(DMPlexGetDepth(dm, &depth)); PetscCall(PetscHMapIJKLCreate(&faceTable)); PetscCall(PetscArrayzero(faceTypeNum, DM_NUM_POLYTOPES)); + PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd)); PetscCall(DMPlexGetDepthStratum(dm, cellDepth, &cStart, &cEnd)); - /* Number new faces and save face vertices in hash table */ + // If the range incorporates the vertices, it means we have a non-manifold topology, so choose just cells + if (cStart <= vStart && cEnd >= vEnd) cEnd = vStart; + // Number new faces and save face vertices in hash table + // If depth > cellDepth, meaning we are interpolating faces, put the new (d-1)-faces after them + // otherwise, we are interpolating cells, so put the faces after the vertices PetscCall(DMPlexGetDepthStratum(dm, depth > cellDepth ? cellDepth : 0, NULL, &fStart)); - { - PetscInt opEnd; - - // We need to account for existing faces in non-manifold meshes - PetscCall(DMPlexGetChart(dm, NULL, &opEnd)); - fStart = PetscMax(opEnd, fStart); - } fEnd = fStart; - minCone = PETSC_MAX_INT; - for (c = cStart, cntFaces = 0; c < cEnd; ++c) { + minCone = PETSC_MAX_INT; + cntFaces = 0; + for (PetscInt c = cStart; c < cEnd; ++c) { const PetscInt *cone; DMPolytopeType ct; PetscInt numFaces = 0, coneSize; @@ -533,11 +532,12 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth, PetscCall(PetscMalloc1(cntFaces, &facesId)); - for (c = cStart, cntFaces = 0; c < cEnd; ++c) { + cntFaces = 0; + for (PetscInt c = cStart; c < cEnd; ++c) { const PetscInt *cone, *faceSizes, *faces; const DMPolytopeType *faceTypes; DMPolytopeType ct; - PetscInt numFaces, cf, foff = 0; + PetscInt numFaces, foff = 0; PetscCall(DMPlexGetCellType(dm, c, &ct)); PetscCall(DMPlexGetCone(dm, c, &cone)); @@ -547,7 +547,7 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth, } else { numFaces = 0; } - for (cf = 0; cf < numFaces; foff += faceSizes[cf], ++cf) { + for (PetscInt cf = 0; cf < numFaces; foff += faceSizes[cf], ++cf) { const PetscInt faceSize = faceSizes[cf]; const DMPolytopeType faceType = faceTypes[cf]; const PetscInt *face = &faces[foff]; @@ -583,11 +583,12 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth, PetscCall(PetscHMapIJKLClear(faceTable)); faceTypeStart[0] = fStart; for (ct = 1; ct < DM_NUM_POLYTOPES; ++ct) faceTypeStart[ct] = faceTypeStart[ct - 1] + faceTypeNum[ct - 1]; - for (c = cStart, cntFaces = 0; c < cEnd; ++c) { + cntFaces = 0; + for (PetscInt c = cStart; c < cEnd; ++c) { const PetscInt *cone, *faceSizes, *faces; const DMPolytopeType *faceTypes; DMPolytopeType ct; - PetscInt numFaces, cf, foff = 0; + PetscInt numFaces, foff = 0; PetscCall(DMPlexGetCellType(dm, c, &ct)); PetscCall(DMPlexGetCone(dm, c, &cone)); @@ -596,7 +597,7 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth, } else { numFaces = 0; } - for (cf = 0; cf < numFaces; foff += faceSizes[cf], ++cf) { + for (PetscInt cf = 0; cf < numFaces; foff += faceSizes[cf], ++cf) { const PetscInt faceSize = faceSizes[cf]; const DMPolytopeType faceType = faceTypes[cf]; const PetscInt *face = &faces[foff]; @@ -625,43 +626,51 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth, } PetscCall(PetscHMapIJKLDestroy(&faceTable)); - /* Add new points, always at the end of the numbering */ + // Add new points, perhaps inserting into the numbering PetscCall(DMPlexGetChart(dm, &pStart, &Np)); PetscCall(DMPlexSetChart(idm, pStart, Np + (fEnd - fStart))); - /* Set cone sizes */ - /* Must create the celltype label here so that we do not automatically try to compute the types */ + // Set cone sizes + // Must create the celltype label here so that we do not automatically try to compute the types PetscCall(DMCreateLabel(idm, "celltype")); PetscCall(DMPlexGetCellTypeLabel(idm, &ctLabel)); - for (d = 0; d <= depth; ++d) { + for (PetscInt d = 0; d <= depth; ++d) { DMPolytopeType ct; - PetscInt coneSize, pStart, pEnd, p; + PetscInt coneSize, pStart, pEnd, poff = 0; - if (d == cellDepth) continue; PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd)); - for (p = pStart; p < pEnd; ++p) { + // Check for non-manifold condition + if (d == cellDepth) { + if (pEnd == cEnd) continue; + else pStart = vEnd; + } + // Account for insertion + if (pStart >= fStart) poff = fEnd - fStart; + for (PetscInt p = pStart; p < pEnd; ++p) { PetscCall(DMPlexGetConeSize(dm, p, &coneSize)); - PetscCall(DMPlexSetConeSize(idm, p, coneSize)); + PetscCall(DMPlexSetConeSize(idm, p + poff, coneSize)); PetscCall(DMPlexGetCellType(dm, p, &ct)); - PetscCall(DMPlexSetCellType(idm, p, ct)); + PetscCall(DMPlexSetCellType(idm, p + poff, ct)); } } - for (c = cStart, cntFaces = 0; c < cEnd; ++c) { + cntFaces = 0; + for (PetscInt c = cStart; c < cEnd; ++c) { const PetscInt *cone, *faceSizes; const DMPolytopeType *faceTypes; DMPolytopeType ct; - PetscInt numFaces, cf; + PetscInt numFaces, poff = 0; PetscCall(DMPlexGetCellType(dm, c, &ct)); PetscCall(DMPlexGetCone(dm, c, &cone)); + if (c >= fStart) poff = fEnd - fStart; if (ct == DM_POLYTOPE_SEGMENT || ct == DM_POLYTOPE_POINT_PRISM_TENSOR) { - PetscCall(DMPlexSetCellType(idm, c, ct)); - PetscCall(DMPlexSetConeSize(idm, c, 2)); + PetscCall(DMPlexSetCellType(idm, c + poff, ct)); + PetscCall(DMPlexSetConeSize(idm, c + poff, 2)); continue; } PetscCall(DMPlexGetRawFaces_Internal(dm, ct, cone, &numFaces, &faceTypes, &faceSizes, NULL)); - PetscCall(DMPlexSetCellType(idm, c, ct)); - PetscCall(DMPlexSetConeSize(idm, c, numFaces)); - for (cf = 0; cf < numFaces; ++cf) { + PetscCall(DMPlexSetCellType(idm, c + poff, ct)); + PetscCall(DMPlexSetConeSize(idm, c + poff, numFaces)); + for (PetscInt cf = 0; cf < numFaces; ++cf) { const PetscInt f = facesId[cntFaces]; DMPolytopeType faceType = faceTypes[cf]; const PetscInt faceSize = faceSizes[cf]; @@ -672,7 +681,7 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth, PetscCall(DMPlexRestoreRawFaces_Internal(dm, ct, cone, &numFaces, &faceTypes, &faceSizes, NULL)); } PetscCall(DMSetUp(idm)); - /* Initialize cones so we do not need the bash table to tell us that a cone has been set */ + // Initialize cones so we do not need the bash table to tell us that a cone has been set { PetscSection cs; PetscInt *cones, csize; @@ -680,61 +689,81 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth, PetscCall(DMPlexGetConeSection(idm, &cs)); PetscCall(DMPlexGetCones(idm, &cones)); PetscCall(PetscSectionGetStorageSize(cs, &csize)); - for (c = 0; c < csize; ++c) cones[c] = -1; - } - /* Set cones */ - for (d = 0; d <= depth; ++d) { - const PetscInt *cone; - PetscInt pStart, pEnd, p; - - if (d == cellDepth) continue; - PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd)); - for (p = pStart; p < pEnd; ++p) { - PetscCall(DMPlexGetCone(dm, p, &cone)); - PetscCall(DMPlexSetCone(idm, p, cone)); - PetscCall(DMPlexGetConeOrientation(dm, p, &cone)); - PetscCall(DMPlexSetConeOrientation(idm, p, cone)); - } + for (PetscInt c = 0; c < csize; ++c) cones[c] = -1; } - for (c = cStart, cntFaces = 0; c < cEnd; ++c) { - const PetscInt *cone, *faceSizes, *faces; - const DMPolytopeType *faceTypes; - DMPolytopeType ct; - PetscInt numFaces, cf, foff = 0; - - PetscCall(DMPlexGetCellType(dm, c, &ct)); - PetscCall(DMPlexGetCone(dm, c, &cone)); - if (ct == DM_POLYTOPE_SEGMENT || ct == DM_POLYTOPE_POINT_PRISM_TENSOR) { - PetscCall(DMPlexSetCone(idm, c, cone)); - PetscCall(DMPlexGetConeOrientation(dm, c, &cone)); - PetscCall(DMPlexSetConeOrientation(idm, c, cone)); - continue; + // Set cones + { + PetscInt *icone; + PetscInt maxConeSize; + + PetscCall(DMPlexGetMaxSizes(dm, &maxConeSize, NULL)); + PetscCall(PetscMalloc1(maxConeSize, &icone)); + for (PetscInt d = 0; d <= depth; ++d) { + const PetscInt *cone; + PetscInt pStart, pEnd, poff = 0, coneSize; + + PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd)); + // Check for non-manifold condition + if (d == cellDepth) { + if (pEnd == cEnd) continue; + else pStart = vEnd; + } + // Account for insertion + if (pStart >= fStart) poff = fEnd - fStart; + for (PetscInt p = pStart; p < pEnd; ++p) { + PetscCall(DMPlexGetCone(dm, p, &cone)); + PetscCall(DMPlexGetConeSize(dm, p, &coneSize)); + for (PetscInt cp = 0; cp < coneSize; ++cp) icone[cp] = cone[cp] + (cone[cp] >= fStart ? fEnd - fStart : 0); + PetscCall(DMPlexSetCone(idm, p + poff, icone)); + PetscCall(DMPlexGetConeOrientation(dm, p, &cone)); + PetscCall(DMPlexSetConeOrientation(idm, p + poff, cone)); + } } - PetscCall(DMPlexGetRawFaces_Internal(dm, ct, cone, &numFaces, &faceTypes, &faceSizes, &faces)); - for (cf = 0; cf < numFaces; foff += faceSizes[cf], ++cf) { - DMPolytopeType faceType = faceTypes[cf]; - const PetscInt faceSize = faceSizes[cf]; - const PetscInt f = facesId[cntFaces]; - const PetscInt *face = &faces[foff]; - const PetscInt *fcone; - - PetscCall(DMPlexInsertCone(idm, c, cf, f)); - PetscCall(DMPlexGetCone(idm, f, &fcone)); - if (fcone[0] < 0) PetscCall(DMPlexSetCone(idm, f, face)); - { - const PetscInt *cone; - PetscInt coneSize, ornt; - - PetscCall(DMPlexGetConeSize(idm, f, &coneSize)); - PetscCall(DMPlexGetCone(idm, f, &cone)); - PetscCheck(coneSize == faceSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of face vertices %" PetscInt_FMT " for face %" PetscInt_FMT " should be %" PetscInt_FMT, coneSize, f, faceSize); - /* Notice that we have to use vertices here because the lower dimensional faces have not been created yet */ - PetscCall(DMPolytopeGetVertexOrientation(faceType, cone, face, &ornt)); - PetscCall(DMPlexInsertConeOrientation(idm, c, cf, ornt)); + cntFaces = 0; + for (PetscInt c = cStart; c < cEnd; ++c) { + const PetscInt *cone, *faceSizes, *faces; + const DMPolytopeType *faceTypes; + DMPolytopeType ct; + PetscInt coneSize, numFaces, foff = 0, poff = 0; + + PetscCall(DMPlexGetCellType(dm, c, &ct)); + PetscCall(DMPlexGetCone(dm, c, &cone)); + PetscCall(DMPlexGetConeSize(dm, c, &coneSize)); + if (c >= fStart) poff = fEnd - fStart; + if (ct == DM_POLYTOPE_SEGMENT || ct == DM_POLYTOPE_POINT_PRISM_TENSOR) { + for (PetscInt cp = 0; cp < coneSize; ++cp) icone[cp] = cone[cp] + (cone[cp] >= fStart ? fEnd - fStart : 0); + PetscCall(DMPlexSetCone(idm, c + poff, icone)); + PetscCall(DMPlexGetConeOrientation(dm, c, &cone)); + PetscCall(DMPlexSetConeOrientation(idm, c + poff, cone)); + continue; } - cntFaces++; + PetscCall(DMPlexGetRawFaces_Internal(dm, ct, cone, &numFaces, &faceTypes, &faceSizes, &faces)); + for (PetscInt cf = 0; cf < numFaces; foff += faceSizes[cf], ++cf) { + DMPolytopeType faceType = faceTypes[cf]; + const PetscInt faceSize = faceSizes[cf]; + const PetscInt f = facesId[cntFaces]; + const PetscInt *face = &faces[foff]; + const PetscInt *fcone; + + PetscCall(DMPlexInsertCone(idm, c, cf, f)); + PetscCall(DMPlexGetCone(idm, f, &fcone)); + if (fcone[0] < 0) PetscCall(DMPlexSetCone(idm, f, face)); + { + const PetscInt *fcone2; + PetscInt ornt; + + PetscCall(DMPlexGetConeSize(idm, f, &coneSize)); + PetscCall(DMPlexGetCone(idm, f, &fcone2)); + PetscCheck(coneSize == faceSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of face vertices %" PetscInt_FMT " for face %" PetscInt_FMT " should be %" PetscInt_FMT, coneSize, f, faceSize); + /* Notice that we have to use vertices here because the lower dimensional faces have not been created yet */ + PetscCall(DMPolytopeGetVertexOrientation(faceType, fcone2, face, &ornt)); + PetscCall(DMPlexInsertConeOrientation(idm, c + poff, cf, ornt)); + } + cntFaces++; + } + PetscCall(DMPlexRestoreRawFaces_Internal(dm, ct, cone, &numFaces, &faceTypes, &faceSizes, &faces)); } - PetscCall(DMPlexRestoreRawFaces_Internal(dm, ct, cone, &numFaces, &faceTypes, &faceSizes, &faces)); + PetscCall(PetscFree(icone)); } PetscCall(PetscFree(facesId)); PetscCall(DMPlexSymmetrize(idm)); @@ -1520,28 +1549,86 @@ PetscErrorCode DMPlexInterpolate(DM dm, DM *dmInt) PetscCall(PetscObjectReference((PetscObject)dm)); idm = dm; } else { - for (d = 1; d < dim; ++d) { - const char *prefix; - - /* Create interpolated mesh */ - PetscCall(DMCreate(PetscObjectComm((PetscObject)dm), &idm)); - PetscCall(DMSetType(idm, DMPLEX)); - PetscCall(DMSetDimension(idm, dim)); - PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix)); - PetscCall(PetscObjectSetOptionsPrefix((PetscObject)idm, prefix)); - if (depth > 0) { - PetscCall(DMPlexInterpolateFaces_Internal(odm, 1, idm)); - PetscCall(DMGetPointSF(odm, &sfPoint)); - if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(odm, sfPoint, PETSC_FALSE)); - { - /* TODO: We need to systematically fix cases of distributed Plexes with no graph set */ - PetscInt nroots; - PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL)); - if (nroots >= 0) PetscCall(DMPlexInterpolatePointSF(idm, sfPoint)); + PetscBool nonmanifold = PETSC_FALSE; + + PetscCall(PetscOptionsGetBool(NULL, dm->hdr.prefix, "-dm_plex_stratify_celltype", &nonmanifold, NULL)); + if (nonmanifold) { + do { + const char *prefix; + PetscInt pStart, pEnd, pdepth; + PetscBool done = PETSC_TRUE; + + // Find a point which is not correctly interpolated + PetscCall(DMPlexGetChart(odm, &pStart, &pEnd)); + for (PetscInt p = pStart; p < pEnd; ++p) { + DMPolytopeType ct; + const PetscInt *cone; + PetscInt coneSize, cdepth; + + PetscCall(DMPlexGetPointDepth(odm, p, &pdepth)); + PetscCall(DMPlexGetCellType(odm, p, &ct)); + // Check against celltype + if (pdepth != DMPolytopeTypeGetDim(ct)) { + done = PETSC_FALSE; + break; + } + // Check against boundary + PetscCall(DMPlexGetCone(odm, p, &cone)); + PetscCall(DMPlexGetConeSize(odm, p, &coneSize)); + for (PetscInt c = 0; c < coneSize; ++c) { + PetscCall(DMPlexGetPointDepth(odm, cone[c], &cdepth)); + if (cdepth != pdepth - 1) { + done = PETSC_FALSE; + p = pEnd; + break; + } + } + } + if (done) break; + /* Create interpolated mesh */ + PetscCall(DMCreate(PetscObjectComm((PetscObject)dm), &idm)); + PetscCall(DMSetType(idm, DMPLEX)); + PetscCall(DMSetDimension(idm, dim)); + PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix)); + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)idm, prefix)); + if (depth > 0) { + PetscCall(DMPlexInterpolateFaces_Internal(odm, pdepth, idm)); + PetscCall(DMGetPointSF(odm, &sfPoint)); + if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(odm, sfPoint, PETSC_FALSE)); + { + /* TODO: We need to systematically fix cases of distributed Plexes with no graph set */ + PetscInt nroots; + PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL)); + if (nroots >= 0) PetscCall(DMPlexInterpolatePointSF(idm, sfPoint)); + } + } + if (odm != dm) PetscCall(DMDestroy(&odm)); + odm = idm; + } while (1); + } else { + for (d = 1; d < dim; ++d) { + const char *prefix; + + /* Create interpolated mesh */ + PetscCall(DMCreate(PetscObjectComm((PetscObject)dm), &idm)); + PetscCall(DMSetType(idm, DMPLEX)); + PetscCall(DMSetDimension(idm, dim)); + PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix)); + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)idm, prefix)); + if (depth > 0) { + PetscCall(DMPlexInterpolateFaces_Internal(odm, 1, idm)); + PetscCall(DMGetPointSF(odm, &sfPoint)); + if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(odm, sfPoint, PETSC_FALSE)); + { + /* TODO: We need to systematically fix cases of distributed Plexes with no graph set */ + PetscInt nroots; + PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL)); + if (nroots >= 0) PetscCall(DMPlexInterpolatePointSF(idm, sfPoint)); + } } + if (odm != dm) PetscCall(DMDestroy(&odm)); + odm = idm; } - if (odm != dm) PetscCall(DMDestroy(&odm)); - odm = idm; } PetscCall(PetscObjectGetName((PetscObject)dm, &name)); PetscCall(PetscObjectSetName((PetscObject)idm, name)); diff --git a/src/dm/impls/plex/plexorient.c b/src/dm/impls/plex/plexorient.c index bc5aad6645b..d8aa10d4b0c 100644 --- a/src/dm/impls/plex/plexorient.c +++ b/src/dm/impls/plex/plexorient.c @@ -26,6 +26,7 @@ PetscErrorCode DMPlexOrientPoint(DM dm, PetscInt p, PetscInt o) PetscValidHeaderSpecific(dm, DM_CLASSID, 1); PetscCall(DMPlexGetCellType(dm, p, &ct)); arr = DMPolytopeTypeGetArrangement(ct, o); + if (!arr) PetscFunctionReturn(PETSC_SUCCESS); PetscCall(DMPlexGetConeSize(dm, p, &coneSize)); PetscCall(DMPlexGetCone(dm, p, &cone)); PetscCall(DMPlexGetConeOrientation(dm, p, &ornt)); @@ -64,12 +65,107 @@ PetscErrorCode DMPlexOrientPoint(DM dm, PetscInt p, PetscInt o) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscInt GetPointIndex(PetscInt point, PetscInt pStart, PetscInt pEnd, const PetscInt points[]) +{ + if (points) { + PetscInt loc; + + PetscCall(PetscFindInt(point, pEnd - pStart, points, &loc)); + if (loc >= 0) return loc; + } else { + if (point >= pStart && point < pEnd) return point - pStart; + } + return -1; +} + /* - Checks face match - Flips non-matching - Inserts faces of support cells in FIFO */ -static PetscErrorCode DMPlexCheckFace_Internal(DM dm, PetscInt *faceFIFO, PetscInt *fTop, PetscInt *fBottom, PetscInt cStart, PetscInt fStart, PetscInt fEnd, PetscBT seenCells, PetscBT flippedCells, PetscBT seenFaces) +static PetscErrorCode DMPlexCheckFace_Internal(DM dm, PetscInt *faceFIFO, PetscInt *fTop, PetscInt *fBottom, IS cellIS, IS faceIS, PetscBT seenCells, PetscBT flippedCells, PetscBT seenFaces) +{ + const PetscInt *supp, *coneA, *coneB, *coneOA, *coneOB; + PetscInt suppSize, Ns = 0, coneSizeA, coneSizeB, posA = -1, posB = -1; + PetscInt face, dim, indC[3], indS[3], seenA, flippedA, seenB, flippedB, mismatch; + const PetscInt *cells, *faces; + PetscInt cStart, cEnd, fStart, fEnd; + + PetscFunctionBegin; + face = faceFIFO[(*fTop)++]; + PetscCall(ISGetPointRange(cellIS, &cStart, &cEnd, &cells)); + PetscCall(ISGetPointRange(faceIS, &fStart, &fEnd, &faces)); + PetscCall(DMPlexGetPointDepth(dm, cells ? cells[cStart] : cStart, &dim)); + PetscCall(DMPlexGetSupportSize(dm, face, &suppSize)); + PetscCall(DMPlexGetSupport(dm, face, &supp)); + // Filter the support + for (PetscInt s = 0; s < suppSize; ++s) { + // Filter support + indC[Ns] = GetPointIndex(supp[s], cStart, cEnd, cells); + indS[Ns] = s; + if (indC[Ns] >= 0) ++Ns; + } + if (Ns < 2) PetscFunctionReturn(PETSC_SUCCESS); + PetscCheck(Ns == 2, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Faces should separate only two cells, not %" PetscInt_FMT, Ns); + PetscCheck(indC[0] >= 0 && indC[1] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Support cells %" PetscInt_FMT " (%" PetscInt_FMT ") and %" PetscInt_FMT " (%" PetscInt_FMT ") are not both valid", supp[0], indC[0], supp[1], indC[1]); + seenA = PetscBTLookup(seenCells, indC[0]); + flippedA = PetscBTLookup(flippedCells, indC[0]) ? 1 : 0; + seenB = PetscBTLookup(seenCells, indC[1]); + flippedB = PetscBTLookup(flippedCells, indC[1]) ? 1 : 0; + + PetscCall(DMPlexGetConeSize(dm, supp[indS[0]], &coneSizeA)); + PetscCall(DMPlexGetConeSize(dm, supp[indS[1]], &coneSizeB)); + PetscCall(DMPlexGetCone(dm, supp[indS[0]], &coneA)); + PetscCall(DMPlexGetCone(dm, supp[indS[1]], &coneB)); + PetscCall(DMPlexGetConeOrientation(dm, supp[indS[0]], &coneOA)); + PetscCall(DMPlexGetConeOrientation(dm, supp[indS[1]], &coneOB)); + for (PetscInt c = 0; c < coneSizeA; ++c) { + const PetscInt indF = GetPointIndex(coneA[c], fStart, fEnd, faces); + + // Filter cone + if (indF < 0) continue; + if (!PetscBTLookup(seenFaces, indF)) { + faceFIFO[(*fBottom)++] = coneA[c]; + PetscCall(PetscBTSet(seenFaces, indF)); + } + if (coneA[c] == face) posA = c; + PetscCheck(*fBottom <= fEnd - fStart, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Face %" PetscInt_FMT " was pushed exceeding capacity %" PetscInt_FMT " > %" PetscInt_FMT, coneA[c], *fBottom, fEnd - fStart); + } + PetscCheck(posA >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Face %" PetscInt_FMT " could not be located in cell %" PetscInt_FMT, face, supp[indS[0]]); + for (PetscInt c = 0; c < coneSizeB; ++c) { + const PetscInt indF = GetPointIndex(coneB[c], fStart, fEnd, faces); + + // Filter cone + if (indF < 0) continue; + if (!PetscBTLookup(seenFaces, indF)) { + faceFIFO[(*fBottom)++] = coneB[c]; + PetscCall(PetscBTSet(seenFaces, indF)); + } + if (coneB[c] == face) posB = c; + PetscCheck(*fBottom <= fEnd - fStart, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Face %" PetscInt_FMT " was pushed exceeding capacity %" PetscInt_FMT " > %" PetscInt_FMT, coneA[c], *fBottom, fEnd - fStart); + } + PetscCheck(posB >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Face %" PetscInt_FMT " could not be located in cell %" PetscInt_FMT, face, supp[indS[1]]); + + if (dim == 1) { + mismatch = posA == posB; + } else { + mismatch = coneOA[posA] == coneOB[posB]; + } + + if (mismatch ^ (flippedA ^ flippedB)) { + PetscCheck(!seenA || !seenB, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Previously seen cells %" PetscInt_FMT " and %" PetscInt_FMT " do not match: Fault mesh is non-orientable", supp[indS[0]], supp[indS[1]]); + if (!seenA && !flippedA) { + PetscCall(PetscBTSet(flippedCells, indC[0])); + } else if (!seenB && !flippedB) { + PetscCall(PetscBTSet(flippedCells, indC[1])); + } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent mesh orientation: Fault mesh is non-orientable"); + } else PetscCheck(!mismatch || !flippedA || !flippedB, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Attempt to flip already flipped cell: Fault mesh is non-orientable"); + PetscCall(PetscBTSet(seenCells, indC[0])); + PetscCall(PetscBTSet(seenCells, indC[1])); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMPlexCheckFace_Old_Internal(DM dm, PetscInt *faceFIFO, PetscInt *fTop, PetscInt *fBottom, PetscInt cStart, PetscInt fStart, PetscInt fEnd, PetscBT seenCells, PetscBT flippedCells, PetscBT seenFaces) { const PetscInt *support, *coneA, *coneB, *coneOA, *coneOB; PetscInt supportSize, coneSizeA, coneSizeB, posA = -1, posB = -1; @@ -131,6 +227,85 @@ static PetscErrorCode DMPlexCheckFace_Internal(DM dm, PetscInt *faceFIFO, PetscI PetscFunctionReturn(PETSC_SUCCESS); } +/* + DMPlexOrient_Serial - Compute valid orientation for local connected components + + Not collective + + Input Parameters: + + dm - The `DM` + - cellHeight - The height of k-cells to be oriented + + Output Parameters: + + Ncomp - The number of connected component + . cellComp - The connected component for each local cell + . faceComp - The connected component for each local face + - flippedCells - Marked cells should be inverted + + Level: developer + +.seealso: `DMPlexOrient()` +*/ +static PetscErrorCode DMPlexOrient_Serial(DM dm, IS cellIS, IS faceIS, PetscInt *Ncomp, PetscInt cellComp[], PetscInt faceComp[], PetscBT flippedCells) +{ + PetscBT seenCells, seenFaces; + PetscInt *faceFIFO; + const PetscInt *cells = NULL, *faces = NULL; + PetscInt cStart = 0, cEnd = 0, fStart = 0, fEnd = 0; + + PetscFunctionBegin; + if (cellIS) PetscCall(ISGetPointRange(cellIS, &cStart, &cEnd, &cells)); + if (faceIS) PetscCall(ISGetPointRange(faceIS, &fStart, &fEnd, &faces)); + PetscCall(PetscBTCreate(cEnd - cStart, &seenCells)); + PetscCall(PetscBTMemzero(cEnd - cStart, seenCells)); + PetscCall(PetscBTCreate(fEnd - fStart, &seenFaces)); + PetscCall(PetscBTMemzero(fEnd - fStart, seenFaces)); + PetscCall(PetscMalloc1(fEnd - fStart, &faceFIFO)); + *Ncomp = 0; + for (PetscInt c = 0; c < cEnd - cStart; ++c) cellComp[c] = -1; + do { + PetscInt cc, fTop, fBottom; + + // Look for first unmarked cell + for (cc = cStart; cc < cEnd; ++cc) + if (cellComp[cc - cStart] < 0) break; + if (cc >= cEnd) break; + // Initialize FIFO with first cell in component + { + const PetscInt cell = cells ? cells[cc] : cc; + const PetscInt *cone; + PetscInt coneSize; + + fTop = fBottom = 0; + PetscCall(DMPlexGetConeSize(dm, cell, &coneSize)); + PetscCall(DMPlexGetCone(dm, cell, &cone)); + for (PetscInt c = 0; c < coneSize; ++c) { + // Cell faces are guaranteed to be in the face set + faceFIFO[fBottom++] = cone[c]; + PetscCall(PetscBTSet(seenFaces, GetPointIndex(cone[c], fStart, fEnd, faces))); + } + PetscCall(PetscBTSet(seenCells, cc - cStart)); + } + // Consider each face in FIFO + while (fTop < fBottom) PetscCall(DMPlexCheckFace_Internal(dm, faceFIFO, &fTop, &fBottom, cellIS, faceIS, seenCells, flippedCells, seenFaces)); + // Set component for cells and faces + for (PetscInt c = 0; c < cEnd - cStart; ++c) { + if (PetscBTLookup(seenCells, c)) cellComp[c] = *Ncomp; + } + for (PetscInt f = 0; f < fEnd - fStart; ++f) { + if (PetscBTLookup(seenFaces, f)) faceComp[f] = *Ncomp; + } + // Wipe seenCells and seenFaces for next component + PetscCall(PetscBTMemzero(fEnd - fStart, seenFaces)); + PetscCall(PetscBTMemzero(cEnd - cStart, seenCells)); + ++(*Ncomp); + } while (1); + PetscCall(PetscBTDestroy(&seenCells)); + PetscCall(PetscBTDestroy(&seenFaces)); + PetscCall(PetscFree(faceFIFO)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@ DMPlexOrient - Give a consistent orientation to the input mesh @@ -148,6 +323,17 @@ static PetscErrorCode DMPlexCheckFace_Internal(DM dm, PetscInt *faceFIFO, PetscI @*/ PetscErrorCode DMPlexOrient(DM dm) { +#if 0 + IS cellIS, faceIS; + + PetscFunctionBegin; + PetscCall(DMPlexGetAllCells_Internal(dm, &cellIS)); + PetscCall(DMPlexGetAllFaces_Internal(dm, &faceIS)); + PetscCall(DMPlexOrientCells_Internal(dm, cellIS, faceIS)); + PetscCall(ISDestroy(&cellIS)); + PetscCall(ISDestroy(&faceIS)); + PetscFunctionReturn(PETSC_SUCCESS); +#else MPI_Comm comm; PetscSF sf; const PetscInt *lpoints; @@ -232,7 +418,7 @@ PetscErrorCode DMPlexOrient(DM dm) PetscCall(PetscBTSet(seenCells, cell - cStart)); } /* Consider each face in FIFO */ - while (fTop < fBottom) PetscCall(DMPlexCheckFace_Internal(dm, faceFIFO, &fTop, &fBottom, cStart, fStart, fEnd, seenCells, flippedCells, seenFaces)); + while (fTop < fBottom) PetscCall(DMPlexCheckFace_Old_Internal(dm, faceFIFO, &fTop, &fBottom, cStart, fStart, fEnd, seenCells, flippedCells, seenFaces)); /* Set component for cells and faces */ for (cell = 0; cell < cEnd - cStart; ++cell) { if (PetscBTLookup(seenCells, cell)) cellComp[cell] = comp; @@ -501,4 +687,411 @@ PetscErrorCode DMPlexOrient(DM dm) PetscCall(PetscFree3(rorntComp, lorntComp, locSupport)); PetscCall(PetscFree3(faceFIFO, cellComp, faceComp)); PetscFunctionReturn(PETSC_SUCCESS); +#endif +} + +static PetscErrorCode CreateCellAndFaceIS_Private(DM dm, DMLabel label, IS *cellIS, IS *faceIS) +{ + IS valueIS; + const PetscInt *values; + PetscInt Nv, depth = 0; + + PetscFunctionBegin; + PetscCall(DMLabelGetValueIS(label, &valueIS)); + PetscCall(ISGetLocalSize(valueIS, &Nv)); + PetscCall(ISGetIndices(valueIS, &values)); + for (PetscInt v = 0; v < Nv; ++v) { + const PetscInt val = values[v] < 0 || values[v] >= 100 ? 0 : values[v]; + + depth = PetscMax(val, depth); + } + PetscCall(ISDestroy(&valueIS)); + PetscCheck(depth >= 1 || !Nv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Depth for interface must be at least 1, not %" PetscInt_FMT, depth); + PetscCall(DMLabelGetStratumIS(label, depth, cellIS)); + PetscCall(DMLabelGetStratumIS(label, depth - 1, faceIS)); + if (!(*cellIS)) PetscCall(ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, cellIS)); + if (!(*faceIS)) PetscCall(ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, faceIS)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode DMPlexOrientLabel(DM dm, DMLabel label) +{ + IS cellIS, faceIS; + + PetscFunctionBegin; + PetscCall(CreateCellAndFaceIS_Private(dm, label, &cellIS, &faceIS)); + PetscCall(DMPlexOrientCells_Internal(dm, cellIS, faceIS)); + PetscCall(ISDestroy(&cellIS)); + PetscCall(ISDestroy(&faceIS)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode DMPlexOrientCells_Internal(DM dm, IS cellIS, IS faceIS) +{ + MPI_Comm comm; + PetscSF sf; + const PetscInt *lpoints; + const PetscSFNode *rpoints; + PetscSFNode *rorntComp = NULL, *lorntComp = NULL; + PetscInt *numNeighbors, **neighbors, *locSupp = NULL; + PetscSFNode *nrankComp; + PetscBool *match, *flipped; + PetscBT flippedCells; + PetscInt *cellComp, *faceComp; + const PetscInt *cells = NULL, *faces = NULL; + PetscInt cStart = 0, cEnd = 0, fStart = 0, fEnd = 0; + PetscInt numLeaves, numRoots, dim, Ncomp, totNeighbors = 0; + PetscMPIInt rank, size; + PetscBool view, viewSync; + PetscViewer viewer = NULL, selfviewer = NULL; + + PetscFunctionBegin; + PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); + PetscCallMPI(MPI_Comm_rank(comm, &rank)); + PetscCallMPI(MPI_Comm_size(comm, &size)); + PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-orientation_view", &view)); + PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-orientation_view_synchronized", &viewSync)); + + if (cellIS) PetscCall(ISGetPointRange(cellIS, &cStart, &cEnd, &cells)); + if (faceIS) PetscCall(ISGetPointRange(faceIS, &fStart, &fEnd, &faces)); + PetscCall(DMGetPointSF(dm, &sf)); + PetscCall(PetscSFGetGraph(sf, &numRoots, &numLeaves, &lpoints, &rpoints)); + /* Truth Table + mismatch flips do action mismatch flipA ^ flipB action + F 0 flips no F F F + F 1 flip yes F T T + F 2 flips no T F T + T 0 flips yes T T F + T 1 flip no + T 2 flips yes + */ + PetscCall(DMGetDimension(dm, &dim)); + PetscCall(PetscBTCreate(cEnd - cStart, &flippedCells)); + PetscCall(PetscBTMemzero(cEnd - cStart, flippedCells)); + PetscCall(PetscCalloc2(cEnd - cStart, &cellComp, fEnd - fStart, &faceComp)); + /* + OLD STYLE + - Add an integer array over cells and faces (component) for connected component number + Foreach component + - Mark the initial cell as seen + - Process component as usual + - Set component for all seenCells + - Wipe seenCells and seenFaces (flippedCells can stay) + - Generate parallel adjacency for component using SF and seenFaces + - Collect Ncomp adj data from each proc to 0 + - Build same serial graph + - Use same solver + - Use Scatterv to send back flipped flags for each component + - Negate flippedCells by component + + NEW STYLE + - Create the adj on each process + - Bootstrap to complete graph on proc 0 + */ + PetscCall(DMPlexOrient_Serial(dm, cellIS, faceIS, &Ncomp, cellComp, faceComp, flippedCells)); + if (view) { + PetscViewer v; + + PetscCall(PetscViewerASCIIGetStdout(comm, &v)); + PetscCall(PetscViewerASCIIPushSynchronized(v)); + PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d]BT for serial flipped cells:\n", rank)); + PetscCall(PetscBTView(cEnd - cStart, flippedCells, v)); + PetscCall(PetscViewerFlush(v)); + PetscCall(PetscViewerASCIIPopSynchronized(v)); + } + /* Now all subdomains are oriented, but we need a consistent parallel orientation */ + // TODO: This all has to be rewritten to filter cones/supports to the ISes + if (numLeaves >= 0) { + PetscInt maxSuppSize, neighbor; + + // Store orientations of boundary faces + PetscCall(DMPlexGetMaxSizes(dm, NULL, &maxSuppSize)); + PetscCall(PetscCalloc3(numRoots, &rorntComp, numRoots, &lorntComp, maxSuppSize, &locSupp)); + for (PetscInt f = fStart; f < fEnd; ++f) { + const PetscInt face = faces ? faces[f] : f; + const PetscInt *cone, *supp, *ornt; + PetscInt coneSize, suppSize, nind, c, Ns = 0; + + PetscCall(DMPlexGetSupportSize(dm, face, &suppSize)); + PetscCall(DMPlexGetSupport(dm, face, &supp)); + for (PetscInt s = 0; s < suppSize; ++s) { + PetscInt ind, l; + + // Filter support + ind = GetPointIndex(supp[s], cStart, cEnd, cells); + if (ind < 0) continue; + // Ignore overlapping cells + PetscCall(PetscFindInt(supp[s], numLeaves, lpoints, &l)); + if (l >= 0) continue; + locSupp[Ns++] = supp[s]; + } + PetscCheck(Ns < maxSuppSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Index %" PetscInt_FMT " exceeds array size %" PetscInt_FMT, Ns, maxSuppSize); + if (Ns != 1) continue; + neighbor = locSupp[0]; + nind = GetPointIndex(neighbor, cStart, cEnd, cells); + PetscCall(DMPlexGetCone(dm, neighbor, &cone)); + PetscCall(DMPlexGetConeSize(dm, neighbor, &coneSize)); + PetscCall(DMPlexGetConeOrientation(dm, neighbor, &ornt)); + for (c = 0; c < coneSize; ++c) + if (cone[c] == face) break; + if (dim == 1) { + /* Use cone position instead, shifted to -1 or 1 */ + if (PetscBTLookup(flippedCells, nind)) rorntComp[face].rank = 1 - c * 2; + else rorntComp[face].rank = c * 2 - 1; + } else { + if (PetscBTLookup(flippedCells, nind)) rorntComp[face].rank = ornt[c] < 0 ? -1 : 1; + else rorntComp[face].rank = ornt[c] < 0 ? 1 : -1; + } + rorntComp[face].index = faceComp[GetPointIndex(face, fStart, fEnd, faces)]; + } + // Communicate boundary edge orientations + PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, rorntComp, lorntComp, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, rorntComp, lorntComp, MPI_REPLACE)); + } + /* Get process adjacency */ + PetscCall(PetscMalloc2(Ncomp, &numNeighbors, Ncomp, &neighbors)); + viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)dm)); + if (viewSync) PetscCall(PetscViewerASCIIPushSynchronized(viewer)); + PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &selfviewer)); + for (PetscInt comp = 0; comp < Ncomp; ++comp) { + PetscInt n; + + numNeighbors[comp] = 0; + PetscCall(PetscMalloc1(PetscMax(numLeaves, 0), &neighbors[comp])); + /* I know this is p^2 time in general, but for bounded degree its alright */ + for (PetscInt l = 0; l < numLeaves; ++l) { + const PetscInt face = lpoints[l]; + PetscInt find; + + /* Find a representative face (edge) separating pairs of procs */ + find = GetPointIndex(face, fStart, fEnd, faces); + if ((find >= 0) && (faceComp[find] == comp) && rorntComp[face].rank) { + const PetscInt rrank = rpoints[l].rank; + const PetscInt rcomp = lorntComp[face].index; + + for (n = 0; n < numNeighbors[comp]; ++n) + if ((rrank == rpoints[neighbors[comp][n]].rank) && (rcomp == lorntComp[lpoints[neighbors[comp][n]]].index)) break; + if (n >= numNeighbors[comp]) { + const PetscInt *supp; + PetscInt suppSize, Ns = 0; + + PetscCall(DMPlexGetSupport(dm, face, &supp)); + PetscCall(DMPlexGetSupportSize(dm, face, &suppSize)); + for (PetscInt s = 0; s < suppSize; ++s) { + // Filter support + if (GetPointIndex(supp[s], cStart, cEnd, cells) >= 0) ++Ns; + } + PetscCheck(Ns == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Boundary face %" PetscInt_FMT " should see one cell, not %" PetscInt_FMT, face, Ns); + if (view) + PetscCall(PetscViewerASCIIPrintf(selfviewer, "[%d]: component %" PetscInt_FMT ", Found representative leaf %" PetscInt_FMT " (face %" PetscInt_FMT ") connecting to face %" PetscInt_FMT " on (%" PetscInt_FMT ", %" PetscInt_FMT ") with orientation %" PetscInt_FMT "\n", rank, comp, l, face, + rpoints[l].index, rrank, rcomp, lorntComp[face].rank)); + neighbors[comp][numNeighbors[comp]++] = l; + } + } + } + totNeighbors += numNeighbors[comp]; + } + PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &selfviewer)); + if (viewSync) PetscCall(PetscViewerASCIIPopSynchronized(viewer)); + PetscCall(PetscMalloc2(totNeighbors, &nrankComp, totNeighbors, &match)); + for (PetscInt comp = 0, off = 0; comp < Ncomp; ++comp) { + for (PetscInt n = 0; n < numNeighbors[comp]; ++n, ++off) { + const PetscInt face = lpoints[neighbors[comp][n]]; + const PetscInt o = rorntComp[face].rank * lorntComp[face].rank; + + if (o < 0) match[off] = PETSC_TRUE; + else if (o > 0) match[off] = PETSC_FALSE; + else + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid face %" PetscInt_FMT " (%" PetscInt_FMT ", %" PetscInt_FMT ") neighbor: %" PetscInt_FMT " comp: %" PetscInt_FMT, face, rorntComp[face].rank, lorntComp[face].rank, neighbors[comp][n], comp); + nrankComp[off].rank = rpoints[neighbors[comp][n]].rank; + nrankComp[off].index = lorntComp[lpoints[neighbors[comp][n]]].index; + } + PetscCall(PetscFree(neighbors[comp])); + } + /* Collect the graph on 0 */ + if (numLeaves >= 0) { + Mat G; + PetscBT seenProcs, flippedProcs; + PetscInt *procFIFO, pTop, pBottom; + PetscInt *N = NULL, *Noff; + PetscSFNode *adj = NULL; + PetscBool *val = NULL; + PetscMPIInt *recvcounts = NULL, *displs = NULL, *Nc; + PetscMPIInt size = 0; + + PetscCall(PetscCalloc1(Ncomp, &flipped)); + if (rank == 0) PetscCallMPI(MPI_Comm_size(comm, &size)); + PetscCall(PetscCalloc4(size, &recvcounts, size + 1, &displs, size, &Nc, size + 1, &Noff)); + PetscCallMPI(MPI_Gather(&Ncomp, 1, MPI_INT, Nc, 1, MPI_INT, 0, comm)); + for (PetscInt p = 0; p < size; ++p) displs[p + 1] = displs[p] + Nc[p]; + if (rank == 0) PetscCall(PetscMalloc1(displs[size], &N)); + PetscCallMPI(MPI_Gatherv(numNeighbors, Ncomp, MPIU_INT, N, Nc, displs, MPIU_INT, 0, comm)); + for (PetscInt p = 0, o = 0; p < size; ++p) { + recvcounts[p] = 0; + for (PetscInt c = 0; c < Nc[p]; ++c, ++o) recvcounts[p] += N[o]; + displs[p + 1] = displs[p] + recvcounts[p]; + } + if (rank == 0) PetscCall(PetscMalloc2(displs[size], &adj, displs[size], &val)); + PetscCallMPI(MPI_Gatherv(nrankComp, totNeighbors, MPIU_2INT, adj, recvcounts, displs, MPIU_2INT, 0, comm)); + PetscCallMPI(MPI_Gatherv(match, totNeighbors, MPIU_BOOL, val, recvcounts, displs, MPIU_BOOL, 0, comm)); + PetscCall(PetscFree2(numNeighbors, neighbors)); + if (rank == 0) { + for (PetscInt p = 1; p <= size; ++p) Noff[p] = Noff[p - 1] + Nc[p - 1]; + if (view) { + for (PetscInt p = 0, off = 0; p < size; ++p) { + for (PetscInt c = 0; c < Nc[p]; ++c) { + PetscCall(PetscPrintf(PETSC_COMM_SELF, "Proc %" PetscInt_FMT " Comp %" PetscInt_FMT ":\n", p, c)); + for (PetscInt n = 0; n < N[Noff[p] + c]; ++n, ++off) PetscCall(PetscPrintf(PETSC_COMM_SELF, " edge (%" PetscInt_FMT ", %" PetscInt_FMT ") (%s):\n", adj[off].rank, adj[off].index, PetscBools[val[off]])); + } + } + } + /* Symmetrize the graph */ + PetscCall(MatCreate(PETSC_COMM_SELF, &G)); + PetscCall(MatSetSizes(G, Noff[size], Noff[size], Noff[size], Noff[size])); + PetscCall(MatSetUp(G)); + for (PetscInt p = 0, off = 0; p < size; ++p) { + for (PetscInt c = 0; c < Nc[p]; ++c) { + const PetscInt r = Noff[p] + c; + + for (PetscInt n = 0; n < N[r]; ++n, ++off) { + const PetscInt q = Noff[adj[off].rank] + adj[off].index; + const PetscScalar o = val[off] ? 1.0 : 0.0; + + PetscCall(MatSetValues(G, 1, &r, 1, &q, &o, INSERT_VALUES)); + PetscCall(MatSetValues(G, 1, &q, 1, &r, &o, INSERT_VALUES)); + } + } + } + PetscCall(MatAssemblyBegin(G, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(G, MAT_FINAL_ASSEMBLY)); + + PetscCall(PetscBTCreate(Noff[size], &seenProcs)); + PetscCall(PetscBTMemzero(Noff[size], seenProcs)); + PetscCall(PetscBTCreate(Noff[size], &flippedProcs)); + PetscCall(PetscBTMemzero(Noff[size], flippedProcs)); + PetscCall(PetscMalloc1(Noff[size], &procFIFO)); + pTop = pBottom = 0; + for (PetscInt p = 0; p < Noff[size]; ++p) { + if (PetscBTLookup(seenProcs, p)) continue; + /* Initialize FIFO with next proc */ + procFIFO[pBottom++] = p; + PetscCall(PetscBTSet(seenProcs, p)); + /* Consider each proc in FIFO */ + while (pTop < pBottom) { + const PetscScalar *ornt; + const PetscInt *neighbors; + PetscInt proc, nproc, seen, flippedA, flippedB, mismatch, numNeighbors; + + proc = procFIFO[pTop++]; + flippedA = PetscBTLookup(flippedProcs, proc) ? 1 : 0; + PetscCall(MatGetRow(G, proc, &numNeighbors, &neighbors, &ornt)); + /* Loop over neighboring procs */ + for (PetscInt n = 0; n < numNeighbors; ++n) { + nproc = neighbors[n]; + mismatch = PetscRealPart(ornt[n]) > 0.5 ? 0 : 1; + seen = PetscBTLookup(seenProcs, nproc); + flippedB = PetscBTLookup(flippedProcs, nproc) ? 1 : 0; + + if (mismatch ^ (flippedA ^ flippedB)) { + PetscCheck(!seen, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Previously seen procs %" PetscInt_FMT " and %" PetscInt_FMT " do not match: Fault mesh is non-orientable", proc, nproc); + if (!flippedB) { + PetscCall(PetscBTSet(flippedProcs, nproc)); + } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent mesh orientation: Fault mesh is non-orientable"); + } else PetscCheck(!mismatch || !flippedA || !flippedB, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Attempt to flip already flipped cell: Fault mesh is non-orientable"); + if (!seen) { + procFIFO[pBottom++] = nproc; + PetscCall(PetscBTSet(seenProcs, nproc)); + } + } + } + } + PetscCall(PetscFree(procFIFO)); + PetscCall(MatDestroy(&G)); + PetscCall(PetscFree2(adj, val)); + PetscCall(PetscBTDestroy(&seenProcs)); + } + /* Scatter flip flags */ + { + PetscBool *flips = NULL; + + if (rank == 0) { + PetscCall(PetscMalloc1(Noff[size], &flips)); + for (PetscInt p = 0; p < Noff[size]; ++p) { + flips[p] = PetscBTLookup(flippedProcs, p) ? PETSC_TRUE : PETSC_FALSE; + if (view && flips[p]) PetscCall(PetscPrintf(comm, "Flipping Proc+Comp %" PetscInt_FMT ":\n", p)); + } + for (PetscInt p = 0; p < size; ++p) displs[p + 1] = displs[p] + Nc[p]; + } + PetscCallMPI(MPI_Scatterv(flips, Nc, displs, MPIU_BOOL, flipped, Ncomp, MPIU_BOOL, 0, comm)); + PetscCall(PetscFree(flips)); + } + if (rank == 0) PetscCall(PetscBTDestroy(&flippedProcs)); + PetscCall(PetscFree(N)); + PetscCall(PetscFree4(recvcounts, displs, Nc, Noff)); + PetscCall(PetscFree2(nrankComp, match)); + + /* Decide whether to flip cells in each component */ + for (PetscInt c = 0; c < cEnd - cStart; ++c) { + if (flipped[cellComp[c]]) PetscCall(PetscBTNegate(flippedCells, c)); + } + PetscCall(PetscFree(flipped)); + } + if (view) { + PetscViewer v; + + PetscCall(PetscViewerASCIIGetStdout(comm, &v)); + PetscCall(PetscViewerASCIIPushSynchronized(v)); + PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d]BT for parallel flipped cells:\n", rank)); + PetscCall(PetscBTView(cEnd - cStart, flippedCells, v)); + PetscCall(PetscViewerFlush(v)); + PetscCall(PetscViewerASCIIPopSynchronized(v)); + } + // Reverse flipped cells in the mesh + PetscViewer v; + const PetscInt *degree = NULL; + PetscInt *points; + PetscInt pStart, pEnd; + + if (view) { + PetscCall(PetscViewerASCIIGetStdout(comm, &v)); + PetscCall(PetscViewerASCIIPushSynchronized(v)); + } + PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); + if (numRoots >= 0) { + PetscCall(PetscSFComputeDegreeBegin(sf, °ree)); + PetscCall(PetscSFComputeDegreeEnd(sf, °ree)); + } + PetscCall(PetscCalloc1(pEnd - pStart, &points)); + for (PetscInt c = cStart; c < cEnd; ++c) { + if (PetscBTLookup(flippedCells, c - cStart)) { + const PetscInt cell = cells ? cells[c] : c; + + PetscCall(DMPlexOrientPoint(dm, cell, -1)); + if (degree && degree[cell]) points[cell] = 1; + if (view) PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d]Flipping cell %" PetscInt_FMT "%s\n", rank, cell, degree && degree[cell] ? " and sending to overlap" : "")); + } + } + // Must propagate flips for cells in the overlap + if (numRoots >= 0) { + PetscCall(PetscSFBcastBegin(sf, MPIU_INT, points, points, MPI_SUM)); + PetscCall(PetscSFBcastEnd(sf, MPIU_INT, points, points, MPI_SUM)); + } + for (PetscInt c = cStart; c < cEnd; ++c) { + const PetscInt cell = cells ? cells[c] : c; + + if (points[cell] && !PetscBTLookup(flippedCells, c - cStart)) { + PetscCall(DMPlexOrientPoint(dm, cell, -1)); + if (view) PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d]Flipping cell %" PetscInt_FMT " through overlap\n", rank, cell)); + } + } + if (view) { + PetscCall(PetscViewerFlush(v)); + PetscCall(PetscViewerASCIIPopSynchronized(v)); + } + PetscCall(PetscFree(points)); + PetscCall(PetscBTDestroy(&flippedCells)); + PetscCall(PetscFree2(numNeighbors, neighbors)); + PetscCall(PetscFree3(rorntComp, lorntComp, locSupp)); + PetscCall(PetscFree2(cellComp, faceComp)); + PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/dm/impls/plex/plexpartition.c b/src/dm/impls/plex/plexpartition.c index 3a040a918af..b54e19b2227 100644 --- a/src/dm/impls/plex/plexpartition.c +++ b/src/dm/impls/plex/plexpartition.c @@ -746,7 +746,7 @@ PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, Pet { PetscMPIInt size; PetscBool isplex; - PetscSection vertSection = NULL; + PetscSection vertSection = NULL, edgeSection = NULL; PetscFunctionBegin; PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); @@ -836,7 +836,24 @@ PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, Pet if (clPoints) PetscCall(ISRestoreIndices(clPoints, &clIdx)); PetscCall(PetscSectionSetUp(vertSection)); } - PetscCall(PetscPartitionerPartition(part, size, numVertices, start, adjacency, vertSection, targetSection, partSection, partition)); + if (part->useewgt) { + const PetscInt numEdges = start[numVertices]; + + PetscCall(PetscSectionCreate(PETSC_COMM_SELF, &edgeSection)); + PetscCall(PetscSectionSetChart(edgeSection, 0, numEdges)); + for (PetscInt e = 0; e < start[numVertices]; ++e) PetscCall(PetscSectionSetDof(edgeSection, e, 1)); + for (PetscInt v = 0; v < numVertices; ++v) { + DMPolytopeType ct; + + // Assume v is the cell number + PetscCall(DMPlexGetCellType(dm, v, &ct)); + if (ct != DM_POLYTOPE_POINT_PRISM_TENSOR && ct != DM_POLYTOPE_SEG_PRISM_TENSOR && ct != DM_POLYTOPE_TRI_PRISM_TENSOR && ct != DM_POLYTOPE_QUAD_PRISM_TENSOR) continue; + + for (PetscInt e = start[v]; e < start[v + 1]; ++e) PetscCall(PetscSectionSetDof(edgeSection, e, 3)); + } + PetscCall(PetscSectionSetUp(edgeSection)); + } + PetscCall(PetscPartitionerPartition(part, size, numVertices, start, adjacency, vertSection, edgeSection, targetSection, partSection, partition)); PetscCall(PetscFree(start)); PetscCall(PetscFree(adjacency)); if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */ @@ -866,6 +883,7 @@ PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, Pet } } else SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %" PetscInt_FMT " for points to partition", part->height); PetscCall(PetscSectionDestroy(&vertSection)); + PetscCall(PetscSectionDestroy(&edgeSection)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/dm/impls/plex/plexply.c b/src/dm/impls/plex/plexply.c index 408df9a1d7f..7488a8c708f 100644 --- a/src/dm/impls/plex/plexply.c +++ b/src/dm/impls/plex/plexply.c @@ -1,7 +1,7 @@ #define PETSCDM_DLL #include /*I "petscdmplex.h" I*/ -/*@C +/*@ DMPlexCreatePLYFromFile - Create a `DMPLEX` mesh from a PLY file. Input Parameters: diff --git a/src/dm/impls/plex/plexpreallocate.c b/src/dm/impls/plex/plexpreallocate.c index 0a286c05095..b71e09cc6cf 100644 --- a/src/dm/impls/plex/plexpreallocate.c +++ b/src/dm/impls/plex/plexpreallocate.c @@ -662,7 +662,7 @@ static PetscErrorCode DMPlexFillMatrix_Static(DM dm, PetscLayout rLayout, PetscI PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexPreallocateOperator - Calculate the matrix nonzero pattern based upon the information in the `DM`, the `PetscDS` it contains, and the default `PetscSection`. diff --git a/src/dm/impls/plex/plexproject.c b/src/dm/impls/plex/plexproject.c index 8c2d103dda0..eb2a3b4d6b5 100644 --- a/src/dm/impls/plex/plexproject.c +++ b/src/dm/impls/plex/plexproject.c @@ -274,7 +274,9 @@ static PetscErrorCode DMProjectPoint_Field_Private(DM dm, PetscDS ds, DM dmIn, D } continue; } + const PetscInt ***perms; PetscCall(PetscDualSpaceGetDM(sp[f], &dm)); + PetscCall(PetscDualSpaceGetSymmetries(sp[f], &perms, NULL)); PetscCall(PetscDualSpaceGetAllData(sp[f], &allPoints, NULL)); PetscCall(PetscQuadratureGetData(allPoints, &dim, NULL, &numPoints, &points, NULL)); PetscCall(DMGetWorkArray(dm, numPoints * Nc[f], MPIU_SCALAR, &pointEval)); @@ -282,8 +284,8 @@ static PetscErrorCode DMProjectPoint_Field_Private(DM dm, PetscDS ds, DM dmIn, D PetscInt qpt[2]; if (isCohesiveIn) { - PetscCall(PetscDSPermuteQuadPoint(dsIn, ornt[0], f, q, &qpt[0])); - PetscCall(PetscDSPermuteQuadPoint(dsIn, DMPolytopeTypeComposeOrientationInv(qct, ornt[1], 0), f, q, &qpt[1])); + qpt[0] = perms ? perms[0][ornt[0]][q] : q; + qpt[1] = perms ? perms[0][DMPolytopeTypeComposeOrientationInv(qct, ornt[1], 0)][q] : q; } if (isAffine) { CoordinatesRefToReal(dE, cgeom->dim, fegeom.xi, cgeom->v, fegeom.J, &points[q * dim], x); @@ -427,7 +429,7 @@ static PetscErrorCode DMProjectPoint_BdField_Private(DM dm, PetscDS ds, DM dmIn, if (isCohesiveIn) { // These points are not integration quadratures, but dual space quadratures - // If they had multiple points we should match them from both sides, simmilar to hybrid residual eval + // If they had multiple points we should match them from both sides, similar to hybrid residual eval qpt[0] = qpt[1] = q; } if (isAffine) { @@ -547,7 +549,7 @@ static PetscErrorCode PetscDualSpaceGetAllPointsUnion(PetscInt Nf, PetscDualSpac Output Parameters: + point - the first labeled point -- ds - the ds corresponding to the first labeled point +- ds - the `PetscDS` corresponding to the first labeled point Level: developer diff --git a/src/dm/impls/plex/plexrefine.c b/src/dm/impls/plex/plexrefine.c index bdc051111d2..0621cc43a22 100644 --- a/src/dm/impls/plex/plexrefine.c +++ b/src/dm/impls/plex/plexrefine.c @@ -1,7 +1,7 @@ #include /*I "petscdmplex.h" I*/ #include /* For PetscFEInterpolate_Static() */ -#include +#include /*I "petscdmplextransform.h" I*/ #include /*@ diff --git a/src/dm/impls/plex/plexreorder.c b/src/dm/impls/plex/plexreorder.c index fea8a890a24..caddd5d918a 100644 --- a/src/dm/impls/plex/plexreorder.c +++ b/src/dm/impls/plex/plexreorder.c @@ -51,7 +51,7 @@ static PetscErrorCode DMPlexCreateOrderingClosure_Static(DM dm, PetscInt numPoin Collective Input Parameters: -+ dm - The DMPlex object ++ dm - The `DMPLEX` object . otype - type of reordering, see `MatOrderingType` - label - [Optional] Label used to segregate ordering into sets, or `NULL` @@ -446,7 +446,7 @@ static PetscErrorCode DMCreateSectionPermutation_Plex_Reverse(DM dm, IS *permuta } // Reorder to group split nodes -static PetscErrorCode DMCreateSectionPermutation_Plex_Cohesive(DM dm, IS *permutation, PetscBT *blockStarts) +static PetscErrorCode DMCreateSectionPermutation_Plex_Cohesive_Old(DM dm, IS *permutation, PetscBT *blockStarts) { IS permIS; PetscBT bt, blst; @@ -489,6 +489,35 @@ static PetscErrorCode DMCreateSectionPermutation_Plex_Cohesive(DM dm, IS *permut if (!PetscBTLookupSet(bt, q)) { perm[i++] = q; s = suppSize; + // At T-junctions, we can have an unsplit point at the other end, so also order that loop + { + const PetscInt *qsupp, *qcone; + PetscInt qsuppSize; + + PetscCall(DMPlexGetSupport(dm, q, &qsupp)); + PetscCall(DMPlexGetSupportSize(dm, q, &qsuppSize)); + for (PetscInt qs = 0; qs < qsuppSize; ++qs) { + DMPolytopeType qsct; + + PetscCall(DMPlexGetCellType(dm, qsupp[qs], &qsct)); + switch (qsct) { + case DM_POLYTOPE_POINT_PRISM_TENSOR: + case DM_POLYTOPE_SEG_PRISM_TENSOR: + case DM_POLYTOPE_TRI_PRISM_TENSOR: + case DM_POLYTOPE_QUAD_PRISM_TENSOR: + PetscCall(DMPlexGetCone(dm, qsupp[qs], &qcone)); + if (qcone[0] == qcone[1]) { + if (!PetscBTLookupSet(bt, qsupp[qs])) { + perm[i++] = qsupp[qs]; + qs = qsuppSize; + } + } + break; + default: + break; + } + } + } } if (!PetscBTLookupSet(bt, qq)) { perm[i++] = qq; @@ -500,6 +529,78 @@ static PetscErrorCode DMCreateSectionPermutation_Plex_Cohesive(DM dm, IS *permut } } } + if (PetscDefined(USE_DEBUG)) { + for (PetscInt p = pStart; p < pEnd; ++p) PetscCheck(PetscBTLookup(bt, p), PETSC_COMM_SELF, PETSC_ERR_PLIB, "Index %" PetscInt_FMT " missed in permutation of [%" PetscInt_FMT ", %" PetscInt_FMT ")", p, pStart, pEnd); + } + PetscCall(PetscBTDestroy(&bt)); + PetscCheck(i == pEnd - pStart, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of points in permutation %" PetscInt_FMT " does not match chart size %" PetscInt_FMT, i, pEnd - pStart); + PetscCall(ISCreateGeneral(PETSC_COMM_SELF, pEnd - pStart, perm, PETSC_OWN_POINTER, &permIS)); + PetscCall(ISSetPermutation(permIS)); + *permutation = permIS; + *blockStarts = blst; + PetscFunctionReturn(PETSC_SUCCESS); +} + +// Mark the block associated with a cohesive cell p +static PetscErrorCode InsertCohesiveBlock_Private(DM dm, PetscBT bt, PetscBT blst, PetscInt p, PetscInt *idx, PetscInt perm[]) +{ + const PetscInt *cone; + PetscInt cS; + + PetscFunctionBegin; + if (PetscBTLookupSet(bt, p)) PetscFunctionReturn(PETSC_SUCCESS); + // Order the endcaps + PetscCall(DMPlexGetCone(dm, p, &cone)); + PetscCall(DMPlexGetConeSize(dm, p, &cS)); + if (blst) PetscCall(PetscBTSet(blst, cone[0])); + if (!PetscBTLookupSet(bt, cone[0])) perm[(*idx)++] = cone[0]; + if (!PetscBTLookupSet(bt, cone[1])) perm[(*idx)++] = cone[1]; + // Order sides + for (PetscInt c = 2; c < cS; ++c) PetscCall(InsertCohesiveBlock_Private(dm, bt, NULL, cone[c], idx, perm)); + // Order cell + perm[(*idx)++] = p; + PetscFunctionReturn(PETSC_SUCCESS); +} + +// Reorder to group split nodes +static PetscErrorCode DMCreateSectionPermutation_Plex_Cohesive(DM dm, IS *permutation, PetscBT *blockStarts) +{ + IS permIS; + PetscBT bt, blst; + PetscInt *perm; + PetscInt dim, pStart, pEnd, i = 0; + + PetscFunctionBegin; + PetscCall(DMGetDimension(dm, &dim)); + PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); + PetscCall(PetscMalloc1(pEnd - pStart, &perm)); + PetscCall(PetscBTCreate(pEnd - pStart, &bt)); + PetscCall(PetscBTCreate(pEnd - pStart, &blst)); + // Add cohesive blocks + for (PetscInt p = pStart; p < pEnd; ++p) { + DMPolytopeType ct; + + PetscCall(DMPlexGetCellType(dm, p, &ct)); + switch (dim) { + case 2: + if (ct == DM_POLYTOPE_SEG_PRISM_TENSOR) PetscCall(InsertCohesiveBlock_Private(dm, bt, blst, p, &i, perm)); + break; + case 3: + if (ct == DM_POLYTOPE_TRI_PRISM_TENSOR || ct == DM_POLYTOPE_QUAD_PRISM_TENSOR) PetscCall(InsertCohesiveBlock_Private(dm, bt, blst, p, &i, perm)); + break; + default: + break; + } + } + // Add normal blocks + for (PetscInt p = pStart; p < pEnd; ++p) { + if (PetscBTLookupSet(bt, p)) continue; + PetscCall(PetscBTSet(blst, p)); + perm[i++] = p; + } + if (PetscDefined(USE_DEBUG)) { + for (PetscInt p = pStart; p < pEnd; ++p) PetscCheck(PetscBTLookup(bt, p), PETSC_COMM_SELF, PETSC_ERR_PLIB, "Index %" PetscInt_FMT " missed in permutation of [%" PetscInt_FMT ", %" PetscInt_FMT ")", p, pStart, pEnd); + } PetscCall(PetscBTDestroy(&bt)); PetscCheck(i == pEnd - pStart, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of points in permutation %" PetscInt_FMT " does not match chart size %" PetscInt_FMT, i, pEnd - pStart); PetscCall(ISCreateGeneral(PETSC_COMM_SELF, pEnd - pStart, perm, PETSC_OWN_POINTER, &permIS)); @@ -513,17 +614,20 @@ PetscErrorCode DMCreateSectionPermutation_Plex(DM dm, IS *perm, PetscBT *blockSt { DMReorderDefaultFlag reorder; MatOrderingType otype; - PetscBool iscohesive, isreverse; + PetscBool iscohesive, iscohesiveOld, isreverse; PetscFunctionBegin; PetscCall(DMReorderSectionGetDefault(dm, &reorder)); if (reorder != DM_REORDER_DEFAULT_TRUE) PetscFunctionReturn(PETSC_SUCCESS); PetscCall(DMReorderSectionGetType(dm, &otype)); if (!otype) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(PetscStrncmp(otype, "cohesive_old", 1024, &iscohesiveOld)); PetscCall(PetscStrncmp(otype, "cohesive", 1024, &iscohesive)); PetscCall(PetscStrncmp(otype, "reverse", 1024, &isreverse)); if (iscohesive) { PetscCall(DMCreateSectionPermutation_Plex_Cohesive(dm, perm, blockStarts)); + } else if (iscohesiveOld) { + PetscCall(DMCreateSectionPermutation_Plex_Cohesive_Old(dm, perm, blockStarts)); } else if (isreverse) { PetscCall(DMCreateSectionPermutation_Plex_Reverse(dm, perm, blockStarts)); } diff --git a/src/dm/impls/plex/plexsfc.c b/src/dm/impls/plex/plexsfc.c index 55af1739a7f..d4e99f20945 100644 --- a/src/dm/impls/plex/plexsfc.c +++ b/src/dm/impls/plex/plexsfc.c @@ -179,14 +179,13 @@ static PetscInt ZCodeFind(ZCode key, PetscInt n, const ZCode X[]) return key == X[lo] ? lo : -(lo + (key > X[lo]) + 1); } -static PetscErrorCode DMPlexCreateBoxMesh_Tensor_SFC_Periodicity_Private(DM dm, const ZLayout *layout, const ZCode *vert_z, PetscSegBuffer per_faces, const PetscReal *lower, const PetscReal *upper, const DMBoundaryType *periodicity, PetscSegBuffer donor_face_closure, PetscSegBuffer my_donor_faces) +static PetscErrorCode DMPlexCreateBoxMesh_Tensor_SFC_Periodicity_Private(DM dm, const ZLayout *layout, const ZCode *vert_z, PetscSegBuffer per_faces[3], const PetscReal *lower, const PetscReal *upper, const DMBoundaryType *periodicity, PetscSegBuffer donor_face_closure[3], PetscSegBuffer my_donor_faces[3]) { - MPI_Comm comm; - size_t num_faces; - PetscInt dim, *faces, vStart, vEnd; - PetscMPIInt size; - ZCode *donor_verts, *donor_minz; - PetscSFNode *leaf; + MPI_Comm comm; + PetscInt dim, vStart, vEnd; + PetscMPIInt size; + PetscSF face_sfs[3]; + PetscScalar transforms[3][4][4] = {{{0}}}; PetscFunctionBegin; PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); @@ -194,93 +193,108 @@ static PetscErrorCode DMPlexCreateBoxMesh_Tensor_SFC_Periodicity_Private(DM dm, PetscCall(DMGetDimension(dm, &dim)); const PetscInt csize = PetscPowInt(2, dim - 1); PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd)); - PetscCall(PetscSegBufferGetSize(per_faces, &num_faces)); - PetscCall(PetscSegBufferExtractInPlace(per_faces, &faces)); - PetscCall(PetscSegBufferExtractInPlace(donor_face_closure, &donor_verts)); - PetscCall(PetscMalloc1(num_faces, &donor_minz)); - PetscCall(PetscMalloc1(num_faces, &leaf)); - for (PetscInt i = 0; i < (PetscInt)num_faces; i++) { - ZCode minz = donor_verts[i * csize]; - for (PetscInt j = 1; j < csize; j++) minz = PetscMin(minz, donor_verts[i * csize + j]); - donor_minz[i] = minz; - } - { - PetscBool sorted; - PetscCall(PetscSortedInt64(num_faces, (const PetscInt64 *)donor_minz, &sorted)); - PetscCheck(sorted, PETSC_COMM_SELF, PETSC_ERR_PLIB, "minz not sorted; periodicity in multiple dimensions not yet supported"); - } - for (PetscInt i = 0; i < (PetscInt)num_faces;) { - ZCode z = donor_minz[i]; - PetscInt remote_rank = ZCodeFind(z, size + 1, layout->zstarts), remote_count = 0; - if (remote_rank < 0) remote_rank = -(remote_rank + 1) - 1; - // Process all the vertices on this rank - for (ZCode rz = layout->zstarts[remote_rank]; rz < layout->zstarts[remote_rank + 1]; rz++) { - Ijk loc = ZCodeSplit(rz); - if (rz == z) { - leaf[i].rank = remote_rank; - leaf[i].index = remote_count; - i++; - if (i == (PetscInt)num_faces) break; - z = donor_minz[i]; + + PetscInt num_directions = 0; + for (PetscInt direction = 0; direction < dim; direction++) { + size_t num_faces; + PetscInt *faces; + ZCode *donor_verts, *donor_minz; + PetscSFNode *leaf; + + if (periodicity[direction] != DM_BOUNDARY_PERIODIC) continue; + PetscCall(PetscSegBufferGetSize(per_faces[direction], &num_faces)); + PetscCall(PetscSegBufferExtractInPlace(per_faces[direction], &faces)); + PetscCall(PetscSegBufferExtractInPlace(donor_face_closure[direction], &donor_verts)); + PetscCall(PetscMalloc1(num_faces, &donor_minz)); + PetscCall(PetscMalloc1(num_faces, &leaf)); + for (PetscInt i = 0; i < (PetscInt)num_faces; i++) { + ZCode minz = donor_verts[i * csize]; + for (PetscInt j = 1; j < csize; j++) minz = PetscMin(minz, donor_verts[i * csize + j]); + donor_minz[i] = minz; + } + { + PetscBool sorted; + PetscCall(PetscSortedInt64(num_faces, (const PetscInt64 *)donor_minz, &sorted)); + // If a donor vertex were chosen to broker multiple faces, we would have a logic error. + // Checking for sorting is a cheap check that there are no duplicates. + PetscCheck(sorted, PETSC_COMM_SELF, PETSC_ERR_PLIB, "minz not sorted; possible duplicates not checked"); + } + for (PetscInt i = 0; i < (PetscInt)num_faces;) { + ZCode z = donor_minz[i]; + PetscInt remote_rank = ZCodeFind(z, size + 1, layout->zstarts), remote_count = 0; + if (remote_rank < 0) remote_rank = -(remote_rank + 1) - 1; + // Process all the vertices on this rank + for (ZCode rz = layout->zstarts[remote_rank]; rz < layout->zstarts[remote_rank + 1]; rz++) { + Ijk loc = ZCodeSplit(rz); + if (rz == z) { + leaf[i].rank = remote_rank; + leaf[i].index = remote_count; + i++; + if (i == (PetscInt)num_faces) break; + z = donor_minz[i]; + } + if (IjkActive(layout->vextent, loc)) remote_count++; } - if (IjkActive(layout->vextent, loc)) remote_count++; } + PetscCall(PetscFree(donor_minz)); + PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), &face_sfs[num_directions])); + PetscCall(PetscSFSetGraph(face_sfs[num_directions], vEnd - vStart, num_faces, NULL, PETSC_USE_POINTER, leaf, PETSC_USE_POINTER)); + const PetscInt *my_donor_degree; + PetscCall(PetscSFComputeDegreeBegin(face_sfs[num_directions], &my_donor_degree)); + PetscCall(PetscSFComputeDegreeEnd(face_sfs[num_directions], &my_donor_degree)); + PetscInt num_multiroots = 0; + for (PetscInt i = 0; i < vEnd - vStart; i++) { + num_multiroots += my_donor_degree[i]; + if (my_donor_degree[i] == 0) continue; + PetscAssert(my_donor_degree[i] == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Local vertex has multiple faces"); + } + PetscInt *my_donors, *donor_indices, *my_donor_indices; + size_t num_my_donors; + PetscCall(PetscSegBufferGetSize(my_donor_faces[direction], &num_my_donors)); + PetscCheck((PetscInt)num_my_donors == num_multiroots, PETSC_COMM_SELF, PETSC_ERR_SUP, "Donor request does not match expected donors"); + PetscCall(PetscSegBufferExtractInPlace(my_donor_faces[direction], &my_donors)); + PetscCall(PetscMalloc1(vEnd - vStart, &my_donor_indices)); + for (PetscInt i = 0; i < (PetscInt)num_my_donors; i++) { + PetscInt f = my_donors[i]; + PetscInt num_points, *points = NULL, minv = PETSC_MAX_INT; + PetscCall(DMPlexGetTransitiveClosure(dm, f, PETSC_TRUE, &num_points, &points)); + for (PetscInt j = 0; j < num_points; j++) { + PetscInt p = points[2 * j]; + if (p < vStart || vEnd <= p) continue; + minv = PetscMin(minv, p); + } + PetscCall(DMPlexRestoreTransitiveClosure(dm, f, PETSC_TRUE, &num_points, &points)); + PetscAssert(my_donor_degree[minv - vStart] == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Local vertex not requested"); + my_donor_indices[minv - vStart] = f; + } + PetscCall(PetscMalloc1(num_faces, &donor_indices)); + PetscCall(PetscSFBcastBegin(face_sfs[num_directions], MPIU_INT, my_donor_indices, donor_indices, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(face_sfs[num_directions], MPIU_INT, my_donor_indices, donor_indices, MPI_REPLACE)); + PetscCall(PetscFree(my_donor_indices)); + // Modify our leafs so they point to donor faces instead of donor minz. Additionally, give them indices as faces. + for (PetscInt i = 0; i < (PetscInt)num_faces; i++) leaf[i].index = donor_indices[i]; + PetscCall(PetscFree(donor_indices)); + PetscInt pStart, pEnd; + PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); + PetscCall(PetscSFSetGraph(face_sfs[num_directions], pEnd - pStart, num_faces, faces, PETSC_COPY_VALUES, leaf, PETSC_OWN_POINTER)); + { + char face_sf_name[PETSC_MAX_PATH_LEN]; + PetscCall(PetscSNPrintf(face_sf_name, sizeof face_sf_name, "Z-order Isoperiodic Faces #%" PetscInt_FMT, num_directions)); + PetscCall(PetscObjectSetName((PetscObject)face_sfs[num_directions], face_sf_name)); + } + + transforms[num_directions][0][0] = 1; + transforms[num_directions][1][1] = 1; + transforms[num_directions][2][2] = 1; + transforms[num_directions][3][3] = 1; + transforms[num_directions][direction][3] = upper[direction] - lower[direction]; + num_directions++; } - PetscCall(PetscFree(donor_minz)); - PetscSF sfper; - PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), &sfper)); - PetscCall(PetscSFSetGraph(sfper, vEnd - vStart, num_faces, NULL, PETSC_USE_POINTER, leaf, PETSC_USE_POINTER)); - const PetscInt *my_donor_degree; - PetscCall(PetscSFComputeDegreeBegin(sfper, &my_donor_degree)); - PetscCall(PetscSFComputeDegreeEnd(sfper, &my_donor_degree)); - PetscInt num_multiroots = 0; - for (PetscInt i = 0; i < vEnd - vStart; i++) { - num_multiroots += my_donor_degree[i]; - if (my_donor_degree[i] == 0) continue; - PetscAssert(my_donor_degree[i] == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Local vertex has multiple faces"); - } - PetscInt *my_donors, *donor_indices, *my_donor_indices; - size_t num_my_donors; - PetscCall(PetscSegBufferGetSize(my_donor_faces, &num_my_donors)); - PetscCheck((PetscInt)num_my_donors == num_multiroots, PETSC_COMM_SELF, PETSC_ERR_SUP, "Donor request does not match expected donors"); - PetscCall(PetscSegBufferExtractInPlace(my_donor_faces, &my_donors)); - PetscCall(PetscMalloc1(vEnd - vStart, &my_donor_indices)); - for (PetscInt i = 0; i < (PetscInt)num_my_donors; i++) { - PetscInt f = my_donors[i]; - PetscInt num_points, *points = NULL, minv = PETSC_MAX_INT; - PetscCall(DMPlexGetTransitiveClosure(dm, f, PETSC_TRUE, &num_points, &points)); - for (PetscInt j = 0; j < num_points; j++) { - PetscInt p = points[2 * j]; - if (p < vStart || vEnd <= p) continue; - minv = PetscMin(minv, p); - } - PetscCall(DMPlexRestoreTransitiveClosure(dm, f, PETSC_TRUE, &num_points, &points)); - PetscAssert(my_donor_degree[minv - vStart] == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Local vertex not requested"); - my_donor_indices[minv - vStart] = f; - } - PetscCall(PetscMalloc1(num_faces, &donor_indices)); - PetscCall(PetscSFBcastBegin(sfper, MPIU_INT, my_donor_indices, donor_indices, MPI_REPLACE)); - PetscCall(PetscSFBcastEnd(sfper, MPIU_INT, my_donor_indices, donor_indices, MPI_REPLACE)); - PetscCall(PetscFree(my_donor_indices)); - // Modify our leafs so they point to donor faces instead of donor minz. Additionally, give them indices as faces. - for (PetscInt i = 0; i < (PetscInt)num_faces; i++) leaf[i].index = donor_indices[i]; - PetscCall(PetscFree(donor_indices)); - PetscInt pStart, pEnd; - PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); - PetscCall(PetscSFSetGraph(sfper, pEnd - pStart, num_faces, faces, PETSC_COPY_VALUES, leaf, PETSC_OWN_POINTER)); - PetscCall(PetscObjectSetName((PetscObject)sfper, "Z-order Isoperiodic Faces")); - - PetscCall(DMPlexSetIsoperiodicFaceSF(dm, sfper)); - - PetscScalar t[4][4] = {{0}}; - t[0][0] = 1; - t[1][1] = 1; - t[2][2] = 1; - t[3][3] = 1; - for (PetscInt i = 0; i < dim; i++) - if (periodicity[i] == DM_BOUNDARY_PERIODIC) t[i][3] = upper[i] - lower[i]; - PetscCall(DMPlexSetIsoperiodicFaceTransform(dm, &t[0][0])); - PetscCall(PetscSFDestroy(&sfper)); + + PetscCall(DMPlexSetIsoperiodicFaceSF(dm, num_directions, face_sfs)); + PetscCall(DMPlexSetIsoperiodicFaceTransform(dm, num_directions, (PetscScalar *)transforms)); + + for (PetscInt i = 0; i < num_directions; i++) PetscCall(PetscSFDestroy(&face_sfs[i])); PetscFunctionReturn(PETSC_SUCCESS); } @@ -290,8 +304,11 @@ static PetscErrorCode DMPlexCreateBoxMesh_Tensor_SFC_Periodicity_Private(DM dm, static PetscErrorCode DMCoordAddPeriodicOffsets_Private(DM dm, Vec g, InsertMode mode, Vec l, void *ctx) { PetscFunctionBegin; - PetscCall(VecScatterBegin(dm->periodic.affine_to_local, dm->periodic.affine, l, ADD_VALUES, SCATTER_FORWARD)); - PetscCall(VecScatterEnd(dm->periodic.affine_to_local, dm->periodic.affine, l, ADD_VALUES, SCATTER_FORWARD)); + // These `VecScatter`s should be merged to improve efficiency; the scatters cannot be overlapped. + for (PetscInt i = 0; i < dm->periodic.num_affines; i++) { + PetscCall(VecScatterBegin(dm->periodic.affine_to_local[i], dm->periodic.affine[i], l, ADD_VALUES, SCATTER_FORWARD)); + PetscCall(VecScatterEnd(dm->periodic.affine_to_local[i], dm->periodic.affine[i], l, ADD_VALUES, SCATTER_FORWARD)); + } PetscFunctionReturn(PETSC_SUCCESS); } @@ -303,160 +320,175 @@ static PetscErrorCode DMCoordAddPeriodicOffsets_Private(DM dm, Vec g, InsertMode // // + closure_sf - augmented point SF (see `DMGetPointSF()`) that includes the faces and all points in its closure. This // can be used to create a global section and section SF. -// - is_points - index set for just the points in the closure of `face_sf`. These may be used to apply an affine +// - is_points - array of index sets for just the points in the closure of `face_sf`. These may be used to apply an affine // transformation to periodic dofs; see DMPeriodicCoordinateSetUp_Internal(). // -static PetscErrorCode DMPlexCreateIsoperiodicPointSF_Private(DM dm, PetscSF face_sf, PetscSF *closure_sf, IS *is_points) +static PetscErrorCode DMPlexCreateIsoperiodicPointSF_Private(DM dm, PetscInt num_face_sfs, PetscSF *face_sfs, PetscSF *closure_sf, IS **is_points) { MPI_Comm comm; - PetscInt nroots, nleaves, npoints; - const PetscInt *filocal, *pilocal; - const PetscSFNode *firemote, *piremote; PetscMPIInt rank; PetscSF point_sf; + PetscInt nroots, nleaves; + const PetscInt *filocal; + const PetscSFNode *firemote; PetscFunctionBegin; PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); PetscCallMPI(MPI_Comm_rank(comm, &rank)); - PetscCall(PetscSFGetGraph(face_sf, &nroots, &nleaves, &filocal, &firemote)); PetscCall(DMGetPointSF(dm, &point_sf)); // Point SF has remote points - PetscCall(PetscSFGetGraph(point_sf, NULL, &npoints, &pilocal, &piremote)); - PetscInt *rootdata, *leafdata; - PetscCall(PetscCalloc2(2 * nroots, &rootdata, 2 * nroots, &leafdata)); - for (PetscInt i = 0; i < nleaves; i++) { - PetscInt point = filocal[i], cl_size, *closure = NULL; - PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &cl_size, &closure)); - leafdata[point] = cl_size - 1; - PetscCall(DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &cl_size, &closure)); - } - PetscCall(PetscSFReduceBegin(face_sf, MPIU_INT, leafdata, rootdata + nroots, MPIU_SUM)); - PetscCall(PetscSFReduceEnd(face_sf, MPIU_INT, leafdata, rootdata + nroots, MPIU_SUM)); - - PetscInt root_offset = 0; - for (PetscInt p = 0; p < nroots; p++) { - const PetscInt *donor_dof = rootdata + nroots; - if (donor_dof[p] == 0) { - rootdata[2 * p] = -1; - rootdata[2 * p + 1] = -1; - continue; + PetscCall(PetscMalloc1(num_face_sfs, is_points)); + + for (PetscInt f = 0; f < num_face_sfs; f++) { + PetscSF face_sf = face_sfs[f]; + PetscInt *rootdata, *leafdata; + + PetscCall(PetscSFGetGraph(face_sf, &nroots, &nleaves, &filocal, &firemote)); + PetscCall(PetscCalloc2(2 * nroots, &rootdata, 2 * nroots, &leafdata)); + for (PetscInt i = 0; i < nleaves; i++) { + PetscInt point = filocal[i], cl_size, *closure = NULL; + PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &cl_size, &closure)); + leafdata[point] = cl_size - 1; + PetscCall(DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &cl_size, &closure)); + } + PetscCall(PetscSFReduceBegin(face_sf, MPIU_INT, leafdata, rootdata + nroots, MPIU_SUM)); + PetscCall(PetscSFReduceEnd(face_sf, MPIU_INT, leafdata, rootdata + nroots, MPIU_SUM)); + + PetscInt root_offset = 0; + for (PetscInt p = 0; p < nroots; p++) { + const PetscInt *donor_dof = rootdata + nroots; + if (donor_dof[p] == 0) { + rootdata[2 * p] = -1; + rootdata[2 * p + 1] = -1; + continue; + } + PetscInt cl_size; + PetscInt *closure = NULL; + PetscCall(DMPlexGetTransitiveClosure(dm, p, PETSC_TRUE, &cl_size, &closure)); + // cl_size - 1 = points not including self + PetscAssert(donor_dof[p] == cl_size - 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Reduced leaf cone sizes do not match root cone sizes"); + rootdata[2 * p] = root_offset; + rootdata[2 * p + 1] = cl_size - 1; + root_offset += cl_size - 1; + PetscCall(DMPlexRestoreTransitiveClosure(dm, p, PETSC_TRUE, &cl_size, &closure)); + } + PetscCall(PetscSFBcastBegin(face_sf, MPIU_2INT, rootdata, leafdata, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(face_sf, MPIU_2INT, rootdata, leafdata, MPI_REPLACE)); + // Count how many leaves we need to communicate the closures + PetscInt leaf_offset = 0; + for (PetscInt i = 0; i < nleaves; i++) { + PetscInt point = filocal[i]; + if (leafdata[2 * point + 1] < 0) continue; + leaf_offset += leafdata[2 * point + 1]; } - PetscInt cl_size; - PetscInt *closure = NULL; - PetscCall(DMPlexGetTransitiveClosure(dm, p, PETSC_TRUE, &cl_size, &closure)); - // cl_size - 1 = points not including self - PetscAssert(donor_dof[p] == cl_size - 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Reduced leaf cone sizes do not match root cone sizes"); - rootdata[2 * p] = root_offset; - rootdata[2 * p + 1] = cl_size - 1; - root_offset += cl_size - 1; - PetscCall(DMPlexRestoreTransitiveClosure(dm, p, PETSC_TRUE, &cl_size, &closure)); - } - PetscCall(PetscSFBcastBegin(face_sf, MPIU_2INT, rootdata, leafdata, MPI_REPLACE)); - PetscCall(PetscSFBcastEnd(face_sf, MPIU_2INT, rootdata, leafdata, MPI_REPLACE)); - // Count how many leaves we need to communicate the closures - PetscInt leaf_offset = 0; - for (PetscInt i = 0; i < nleaves; i++) { - PetscInt point = filocal[i]; - if (leafdata[2 * point + 1] < 0) continue; - leaf_offset += leafdata[2 * point + 1]; - } - PetscSFNode *closure_leaf; - PetscCall(PetscMalloc1(leaf_offset, &closure_leaf)); - leaf_offset = 0; - for (PetscInt i = 0; i < nleaves; i++) { - PetscInt point = filocal[i]; - PetscInt cl_size = leafdata[2 * point + 1]; - if (cl_size < 0) continue; - for (PetscInt j = 0; j < cl_size; j++) { - closure_leaf[leaf_offset].rank = firemote[i].rank; - closure_leaf[leaf_offset].index = leafdata[2 * point] + j; - leaf_offset++; + PetscSFNode *closure_leaf; + PetscCall(PetscMalloc1(leaf_offset, &closure_leaf)); + leaf_offset = 0; + for (PetscInt i = 0; i < nleaves; i++) { + PetscInt point = filocal[i]; + PetscInt cl_size = leafdata[2 * point + 1]; + if (cl_size < 0) continue; + for (PetscInt j = 0; j < cl_size; j++) { + closure_leaf[leaf_offset].rank = firemote[i].rank; + closure_leaf[leaf_offset].index = leafdata[2 * point] + j; + leaf_offset++; + } } - } - PetscSF sf_closure; - PetscCall(PetscSFCreate(comm, &sf_closure)); - PetscCall(PetscSFSetGraph(sf_closure, root_offset, leaf_offset, NULL, PETSC_USE_POINTER, closure_leaf, PETSC_OWN_POINTER)); - - // Pack root buffer with owner for every point in the root cones - PetscSFNode *donor_closure; - PetscCall(PetscCalloc1(root_offset, &donor_closure)); - root_offset = 0; - for (PetscInt p = 0; p < nroots; p++) { - if (rootdata[2 * p] < 0) continue; - PetscInt cl_size; - PetscInt *closure = NULL; - PetscCall(DMPlexGetTransitiveClosure(dm, p, PETSC_TRUE, &cl_size, &closure)); - for (PetscInt j = 1; j < cl_size; j++) { - PetscInt c = closure[2 * j]; - if (pilocal) { - PetscInt found = -1; - if (npoints > 0) PetscCall(PetscFindInt(c, npoints, pilocal, &found)); - if (found >= 0) { - donor_closure[root_offset++] = piremote[found]; - continue; + PetscSF sf_closure; + PetscCall(PetscSFCreate(comm, &sf_closure)); + PetscCall(PetscSFSetGraph(sf_closure, root_offset, leaf_offset, NULL, PETSC_USE_POINTER, closure_leaf, PETSC_OWN_POINTER)); + + PetscSFNode *leaf_donor_closure; + { // Pack root buffer with owner for every point in the root cones + PetscSFNode *donor_closure; + const PetscInt *pilocal; + const PetscSFNode *piremote; + PetscInt npoints; + + PetscCall(PetscSFGetGraph(point_sf, NULL, &npoints, &pilocal, &piremote)); + PetscCall(PetscCalloc1(root_offset, &donor_closure)); + root_offset = 0; + for (PetscInt p = 0; p < nroots; p++) { + if (rootdata[2 * p] < 0) continue; + PetscInt cl_size; + PetscInt *closure = NULL; + PetscCall(DMPlexGetTransitiveClosure(dm, p, PETSC_TRUE, &cl_size, &closure)); + for (PetscInt j = 1; j < cl_size; j++) { + PetscInt c = closure[2 * j]; + if (pilocal) { + PetscInt found = -1; + if (npoints > 0) PetscCall(PetscFindInt(c, npoints, pilocal, &found)); + if (found >= 0) { + donor_closure[root_offset++] = piremote[found]; + continue; + } + } + // we own c + donor_closure[root_offset].rank = rank; + donor_closure[root_offset].index = c; + root_offset++; } + PetscCall(DMPlexRestoreTransitiveClosure(dm, p, PETSC_TRUE, &cl_size, &closure)); } - // we own c - donor_closure[root_offset].rank = rank; - donor_closure[root_offset].index = c; - root_offset++; + + PetscCall(PetscMalloc1(leaf_offset, &leaf_donor_closure)); + PetscCall(PetscSFBcastBegin(sf_closure, MPIU_2INT, donor_closure, leaf_donor_closure, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(sf_closure, MPIU_2INT, donor_closure, leaf_donor_closure, MPI_REPLACE)); + PetscCall(PetscSFDestroy(&sf_closure)); + PetscCall(PetscFree(donor_closure)); } - PetscCall(DMPlexRestoreTransitiveClosure(dm, p, PETSC_TRUE, &cl_size, &closure)); - } - PetscSFNode *leaf_donor_closure; - PetscCall(PetscMalloc1(leaf_offset, &leaf_donor_closure)); - PetscCall(PetscSFBcastBegin(sf_closure, MPIU_2INT, donor_closure, leaf_donor_closure, MPI_REPLACE)); - PetscCall(PetscSFBcastEnd(sf_closure, MPIU_2INT, donor_closure, leaf_donor_closure, MPI_REPLACE)); - PetscCall(PetscSFDestroy(&sf_closure)); - PetscCall(PetscFree(donor_closure)); - - PetscSFNode *new_iremote; - PetscCall(PetscCalloc1(nroots, &new_iremote)); - for (PetscInt i = 0; i < nroots; i++) new_iremote[i].rank = -1; - // Walk leaves and match vertices - leaf_offset = 0; - for (PetscInt i = 0; i < nleaves; i++) { - PetscInt point = filocal[i], cl_size; - PetscInt *closure = NULL; - PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &cl_size, &closure)); - for (PetscInt j = 1; j < cl_size; j++) { // TODO: should we send donor edge orientations so we can flip for consistency? - PetscInt c = closure[2 * j]; - PetscSFNode lc = leaf_donor_closure[leaf_offset]; - // printf("[%d] face %d.%d: %d ?-- (%d,%d)\n", rank, point, j, c, lc.rank, lc.index); - if (new_iremote[c].rank == -1) { - new_iremote[c] = lc; - } else PetscCheck(new_iremote[c].rank == lc.rank && new_iremote[c].index == lc.index, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Mismatched cone ordering between faces"); - leaf_offset++; - } - PetscCall(DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &cl_size, &closure)); - } - PetscCall(PetscFree(leaf_donor_closure)); - - // Include face points in closure SF - for (PetscInt i = 0; i < nleaves; i++) new_iremote[filocal[i]] = firemote[i]; - // consolidate leaves - PetscInt num_new_leaves = 0; - for (PetscInt i = 0; i < nroots; i++) { - if (new_iremote[i].rank == -1) continue; - new_iremote[num_new_leaves] = new_iremote[i]; - leafdata[num_new_leaves] = i; - num_new_leaves++; - } - PetscCall(ISCreateGeneral(PETSC_COMM_SELF, num_new_leaves, leafdata, PETSC_COPY_VALUES, is_points)); - - PetscSF csf; - PetscCall(PetscSFCreate(comm, &csf)); - PetscCall(PetscSFSetGraph(csf, nroots, num_new_leaves, leafdata, PETSC_COPY_VALUES, new_iremote, PETSC_COPY_VALUES)); - PetscCall(PetscFree(new_iremote)); // copy and delete because new_iremote is longer than it needs to be - PetscCall(PetscFree2(rootdata, leafdata)); - - if (npoints < 0) { // empty point_sf - *closure_sf = csf; - } else { - PetscCall(PetscSFMerge(point_sf, csf, closure_sf)); - PetscCall(PetscSFDestroy(&csf)); + PetscSFNode *new_iremote; + PetscCall(PetscCalloc1(nroots, &new_iremote)); + for (PetscInt i = 0; i < nroots; i++) new_iremote[i].rank = -1; + // Walk leaves and match vertices + leaf_offset = 0; + for (PetscInt i = 0; i < nleaves; i++) { + PetscInt point = filocal[i], cl_size; + PetscInt *closure = NULL; + PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &cl_size, &closure)); + for (PetscInt j = 1; j < cl_size; j++) { // TODO: should we send donor edge orientations so we can flip for consistency? + PetscInt c = closure[2 * j]; + PetscSFNode lc = leaf_donor_closure[leaf_offset]; + // printf("[%d] face %d.%d: %d ?-- (%d,%d)\n", rank, point, j, c, lc.rank, lc.index); + if (new_iremote[c].rank == -1) { + new_iremote[c] = lc; + } else PetscCheck(new_iremote[c].rank == lc.rank && new_iremote[c].index == lc.index, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Mismatched cone ordering between faces"); + leaf_offset++; + } + PetscCall(DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &cl_size, &closure)); + } + PetscCall(PetscFree(leaf_donor_closure)); + + // Include face points in closure SF + for (PetscInt i = 0; i < nleaves; i++) new_iremote[filocal[i]] = firemote[i]; + // consolidate leaves + PetscInt num_new_leaves = 0; + for (PetscInt i = 0; i < nroots; i++) { + if (new_iremote[i].rank == -1) continue; + new_iremote[num_new_leaves] = new_iremote[i]; + leafdata[num_new_leaves] = i; + num_new_leaves++; + } + PetscCall(ISCreateGeneral(PETSC_COMM_SELF, num_new_leaves, leafdata, PETSC_COPY_VALUES, &(*is_points)[f])); + + PetscSF csf; + PetscCall(PetscSFCreate(comm, &csf)); + PetscCall(PetscSFSetGraph(csf, nroots, num_new_leaves, leafdata, PETSC_COPY_VALUES, new_iremote, PETSC_COPY_VALUES)); + PetscCall(PetscFree(new_iremote)); // copy and delete because new_iremote is longer than it needs to be + PetscCall(PetscFree2(rootdata, leafdata)); + + PetscInt npoints; + PetscCall(PetscSFGetGraph(point_sf, NULL, &npoints, NULL, NULL)); + if (npoints < 0) { // empty point_sf + *closure_sf = csf; + } else { + PetscCall(PetscSFMerge(point_sf, csf, closure_sf)); + PetscCall(PetscSFDestroy(&csf)); + } + if (f > 0) PetscCall(PetscSFDestroy(&point_sf)); // Only destroy if point_sf is from previous calls to PetscSFMerge + point_sf = *closure_sf; // Use combined point + isoperiodic SF to define point ownership for further face_sf } PetscCall(PetscObjectSetName((PetscObject)*closure_sf, "Composed Periodic Points")); PetscFunctionReturn(PETSC_SUCCESS); @@ -467,11 +499,7 @@ static PetscErrorCode DMGetIsoperiodicPointSF_Plex(DM dm, PetscSF *sf) DM_Plex *plex = (DM_Plex *)dm->data; PetscFunctionBegin; - if (!plex->periodic.composed_sf) { - PetscSF face_sf = plex->periodic.face_sf; - - PetscCall(DMPlexCreateIsoperiodicPointSF_Private(dm, face_sf, &plex->periodic.composed_sf, &plex->periodic.periodic_points)); - } + if (!plex->periodic.composed_sf) PetscCall(DMPlexCreateIsoperiodicPointSF_Private(dm, plex->periodic.num_face_sfs, plex->periodic.face_sfs, &plex->periodic.composed_sf, &plex->periodic.periodic_points)); if (sf) *sf = plex->periodic.composed_sf; PetscFunctionReturn(PETSC_SUCCESS); } @@ -479,135 +507,149 @@ static PetscErrorCode DMGetIsoperiodicPointSF_Plex(DM dm, PetscSF *sf) PetscErrorCode DMPlexMigrateIsoperiodicFaceSF_Internal(DM old_dm, DM dm, PetscSF sf_migration) { DM_Plex *plex = (DM_Plex *)old_dm->data; - PetscSF sf_point; + PetscSF sf_point, *new_face_sfs; PetscMPIInt rank; PetscFunctionBegin; - if (!plex->periodic.face_sf) PetscFunctionReturn(PETSC_SUCCESS); + if (!plex->periodic.face_sfs) PetscFunctionReturn(PETSC_SUCCESS); PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank)); PetscCall(DMGetPointSF(dm, &sf_point)); - PetscInt old_npoints, new_npoints, old_nleaf, new_nleaf, point_nleaf; - PetscSFNode *new_leafdata, *rootdata, *leafdata; - const PetscInt *old_local, *point_local; - const PetscSFNode *old_remote, *point_remote; - PetscCall(PetscSFGetGraph(plex->periodic.face_sf, &old_npoints, &old_nleaf, &old_local, &old_remote)); - PetscCall(PetscSFGetGraph(sf_migration, NULL, &new_nleaf, NULL, NULL)); - PetscCall(PetscSFGetGraph(sf_point, &new_npoints, &point_nleaf, &point_local, &point_remote)); - PetscAssert(new_nleaf == new_npoints, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Expected migration leaf space to match new point root space"); - PetscCall(PetscMalloc3(old_npoints, &rootdata, old_npoints, &leafdata, new_npoints, &new_leafdata)); - - // Fill new_leafdata with new owners of all points - for (PetscInt i = 0; i < new_npoints; i++) { - new_leafdata[i].rank = rank; - new_leafdata[i].index = i; - } - for (PetscInt i = 0; i < point_nleaf; i++) { - PetscInt j = point_local[i]; - new_leafdata[j] = point_remote[i]; - } - // REPLACE is okay because every leaf agrees about the new owners - PetscCall(PetscSFReduceBegin(sf_migration, MPIU_2INT, new_leafdata, rootdata, MPI_REPLACE)); - PetscCall(PetscSFReduceEnd(sf_migration, MPIU_2INT, new_leafdata, rootdata, MPI_REPLACE)); - // rootdata now contains the new owners - - // Send to leaves of old space - for (PetscInt i = 0; i < old_npoints; i++) { - leafdata[i].rank = -1; - leafdata[i].index = -1; - } - PetscCall(PetscSFBcastBegin(plex->periodic.face_sf, MPIU_2INT, rootdata, leafdata, MPI_REPLACE)); - PetscCall(PetscSFBcastEnd(plex->periodic.face_sf, MPIU_2INT, rootdata, leafdata, MPI_REPLACE)); - - // Send to new leaf space - PetscCall(PetscSFBcastBegin(sf_migration, MPIU_2INT, leafdata, new_leafdata, MPI_REPLACE)); - PetscCall(PetscSFBcastEnd(sf_migration, MPIU_2INT, leafdata, new_leafdata, MPI_REPLACE)); - - PetscInt nface = 0, *new_local; - PetscSFNode *new_remote; - for (PetscInt i = 0; i < new_npoints; i++) nface += (new_leafdata[i].rank >= 0); - PetscCall(PetscMalloc1(nface, &new_local)); - PetscCall(PetscMalloc1(nface, &new_remote)); - nface = 0; - for (PetscInt i = 0; i < new_npoints; i++) { - if (new_leafdata[i].rank == -1) continue; - new_local[nface] = i; - new_remote[nface] = new_leafdata[i]; - nface++; + PetscCall(PetscMalloc1(plex->periodic.num_face_sfs, &new_face_sfs)); + + for (PetscInt f = 0; f < plex->periodic.num_face_sfs; f++) { + PetscInt old_npoints, new_npoints, old_nleaf, new_nleaf, point_nleaf; + PetscSFNode *new_leafdata, *rootdata, *leafdata; + const PetscInt *old_local, *point_local; + const PetscSFNode *old_remote, *point_remote; + PetscCall(PetscSFGetGraph(plex->periodic.face_sfs[f], &old_npoints, &old_nleaf, &old_local, &old_remote)); + PetscCall(PetscSFGetGraph(sf_migration, NULL, &new_nleaf, NULL, NULL)); + PetscCall(PetscSFGetGraph(sf_point, &new_npoints, &point_nleaf, &point_local, &point_remote)); + PetscAssert(new_nleaf == new_npoints, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Expected migration leaf space to match new point root space"); + PetscCall(PetscMalloc3(old_npoints, &rootdata, old_npoints, &leafdata, new_npoints, &new_leafdata)); + + // Fill new_leafdata with new owners of all points + for (PetscInt i = 0; i < new_npoints; i++) { + new_leafdata[i].rank = rank; + new_leafdata[i].index = i; + } + for (PetscInt i = 0; i < point_nleaf; i++) { + PetscInt j = point_local[i]; + new_leafdata[j] = point_remote[i]; + } + // REPLACE is okay because every leaf agrees about the new owners + PetscCall(PetscSFReduceBegin(sf_migration, MPIU_2INT, new_leafdata, rootdata, MPI_REPLACE)); + PetscCall(PetscSFReduceEnd(sf_migration, MPIU_2INT, new_leafdata, rootdata, MPI_REPLACE)); + // rootdata now contains the new owners + + // Send to leaves of old space + for (PetscInt i = 0; i < old_npoints; i++) { + leafdata[i].rank = -1; + leafdata[i].index = -1; + } + PetscCall(PetscSFBcastBegin(plex->periodic.face_sfs[f], MPIU_2INT, rootdata, leafdata, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(plex->periodic.face_sfs[f], MPIU_2INT, rootdata, leafdata, MPI_REPLACE)); + + // Send to new leaf space + PetscCall(PetscSFBcastBegin(sf_migration, MPIU_2INT, leafdata, new_leafdata, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(sf_migration, MPIU_2INT, leafdata, new_leafdata, MPI_REPLACE)); + + PetscInt nface = 0, *new_local; + PetscSFNode *new_remote; + for (PetscInt i = 0; i < new_npoints; i++) nface += (new_leafdata[i].rank >= 0); + PetscCall(PetscMalloc1(nface, &new_local)); + PetscCall(PetscMalloc1(nface, &new_remote)); + nface = 0; + for (PetscInt i = 0; i < new_npoints; i++) { + if (new_leafdata[i].rank == -1) continue; + new_local[nface] = i; + new_remote[nface] = new_leafdata[i]; + nface++; + } + PetscCall(PetscFree3(rootdata, leafdata, new_leafdata)); + PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), &new_face_sfs[f])); + PetscCall(PetscSFSetGraph(new_face_sfs[f], new_npoints, nface, new_local, PETSC_OWN_POINTER, new_remote, PETSC_OWN_POINTER)); + { + char new_face_sf_name[PETSC_MAX_PATH_LEN]; + PetscCall(PetscSNPrintf(new_face_sf_name, sizeof new_face_sf_name, "Migrated Isoperiodic Faces #%" PetscInt_FMT, f)); + PetscCall(PetscObjectSetName((PetscObject)new_face_sfs[f], new_face_sf_name)); + } } - PetscCall(PetscFree3(rootdata, leafdata, new_leafdata)); - PetscSF sf_face; - PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), &sf_face)); - PetscCall(PetscSFSetGraph(sf_face, new_npoints, nface, new_local, PETSC_OWN_POINTER, new_remote, PETSC_OWN_POINTER)); - PetscCall(PetscObjectSetName((PetscObject)sf_face, "Migrated Isoperiodic Faces")); - PetscCall(DMPlexSetIsoperiodicFaceSF(dm, sf_face)); - PetscCall(DMPlexSetIsoperiodicFaceTransform(dm, &plex->periodic.transform[0][0])); - PetscCall(PetscSFDestroy(&sf_face)); + + PetscCall(DMPlexSetIsoperiodicFaceSF(dm, plex->periodic.num_face_sfs, new_face_sfs)); + PetscCall(DMPlexSetIsoperiodicFaceTransform(dm, plex->periodic.num_face_sfs, (PetscScalar *)plex->periodic.transform)); + for (PetscInt f = 0; f < plex->periodic.num_face_sfs; f++) PetscCall(PetscSFDestroy(&new_face_sfs[f])); + PetscCall(PetscFree(new_face_sfs)); PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode DMPeriodicCoordinateSetUp_Internal(DM dm) { DM_Plex *plex = (DM_Plex *)dm->data; + size_t count; + IS isdof; + PetscInt dim; PetscFunctionBegin; - if (!plex->periodic.face_sf) PetscFunctionReturn(PETSC_SUCCESS); + if (!plex->periodic.face_sfs) PetscFunctionReturn(PETSC_SUCCESS); PetscCall(DMGetIsoperiodicPointSF_Plex(dm, NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMGetIsoperiodicPointSF_C", DMGetIsoperiodicPointSF_Plex)); - PetscInt dim; PetscCall(DMGetDimension(dm, &dim)); - size_t count; - IS isdof; - { - PetscInt npoints; - const PetscInt *points; - IS is = plex->periodic.periodic_points; - PetscSegBuffer seg; - PetscSection section; - PetscCall(DMGetLocalSection(dm, §ion)); - PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 32, &seg)); - PetscCall(ISGetSize(is, &npoints)); - PetscCall(ISGetIndices(is, &points)); - for (PetscInt i = 0; i < npoints; i++) { - PetscInt point = points[i], off, dof; - PetscCall(PetscSectionGetOffset(section, point, &off)); - PetscCall(PetscSectionGetDof(section, point, &dof)); - PetscAssert(dof % dim == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected dof %" PetscInt_FMT " not divisible by dimension %" PetscInt_FMT, dof, dim); - for (PetscInt j = 0; j < dof / dim; j++) { - PetscInt *slot; - PetscCall(PetscSegBufferGetInts(seg, 1, &slot)); - *slot = off / dim + j; + dm->periodic.num_affines = plex->periodic.num_face_sfs; + PetscCall(PetscMalloc2(dm->periodic.num_affines, &dm->periodic.affine_to_local, dm->periodic.num_affines, &dm->periodic.affine)); + + for (PetscInt f = 0; f < plex->periodic.num_face_sfs; f++) { + { + PetscInt npoints; + const PetscInt *points; + IS is = plex->periodic.periodic_points[f]; + PetscSegBuffer seg; + PetscSection section; + PetscCall(DMGetLocalSection(dm, §ion)); + PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 32, &seg)); + PetscCall(ISGetSize(is, &npoints)); + PetscCall(ISGetIndices(is, &points)); + for (PetscInt i = 0; i < npoints; i++) { + PetscInt point = points[i], off, dof; + PetscCall(PetscSectionGetOffset(section, point, &off)); + PetscCall(PetscSectionGetDof(section, point, &dof)); + PetscAssert(dof % dim == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected dof %" PetscInt_FMT " not divisible by dimension %" PetscInt_FMT, dof, dim); + for (PetscInt j = 0; j < dof / dim; j++) { + PetscInt *slot; + PetscCall(PetscSegBufferGetInts(seg, 1, &slot)); + *slot = off / dim + j; + } } + PetscInt *ind; + PetscCall(PetscSegBufferGetSize(seg, &count)); + PetscCall(PetscSegBufferExtractAlloc(seg, &ind)); + PetscCall(PetscSegBufferDestroy(&seg)); + PetscCall(ISCreateBlock(PETSC_COMM_SELF, dim, count, ind, PETSC_OWN_POINTER, &isdof)); } - PetscInt *ind; - PetscCall(PetscSegBufferGetSize(seg, &count)); - PetscCall(PetscSegBufferExtractAlloc(seg, &ind)); - PetscCall(PetscSegBufferDestroy(&seg)); - PetscCall(ISCreateBlock(PETSC_COMM_SELF, dim, count, ind, PETSC_OWN_POINTER, &isdof)); - } - Vec L, P; - VecType vec_type; - VecScatter scatter; - PetscCall(DMGetLocalVector(dm, &L)); - PetscCall(VecCreate(PETSC_COMM_SELF, &P)); - PetscCall(VecSetSizes(P, count * dim, count * dim)); - PetscCall(VecGetType(L, &vec_type)); - PetscCall(VecSetType(P, vec_type)); - PetscCall(VecScatterCreate(P, NULL, L, isdof, &scatter)); - PetscCall(DMRestoreLocalVector(dm, &L)); - PetscCall(ISDestroy(&isdof)); - - { - PetscScalar *x; - PetscCall(VecGetArrayWrite(P, &x)); - for (PetscInt i = 0; i < (PetscInt)count; i++) { - for (PetscInt j = 0; j < dim; j++) x[i * dim + j] = plex->periodic.transform[j][3]; + Vec L, P; + VecType vec_type; + VecScatter scatter; + PetscCall(DMGetLocalVector(dm, &L)); + PetscCall(VecCreate(PETSC_COMM_SELF, &P)); + PetscCall(VecSetSizes(P, count * dim, count * dim)); + PetscCall(VecGetType(L, &vec_type)); + PetscCall(VecSetType(P, vec_type)); + PetscCall(VecScatterCreate(P, NULL, L, isdof, &scatter)); + PetscCall(DMRestoreLocalVector(dm, &L)); + PetscCall(ISDestroy(&isdof)); + + { + PetscScalar *x; + PetscCall(VecGetArrayWrite(P, &x)); + for (PetscInt i = 0; i < (PetscInt)count; i++) { + for (PetscInt j = 0; j < dim; j++) x[i * dim + j] = plex->periodic.transform[f][j][3]; + } + PetscCall(VecRestoreArrayWrite(P, &x)); } - PetscCall(VecRestoreArrayWrite(P, &x)); - } - dm->periodic.affine_to_local = scatter; - dm->periodic.affine = P; + dm->periodic.affine_to_local[f] = scatter; + dm->periodic.affine[f] = P; + } PetscCall(DMGlobalToLocalHookAdd(dm, NULL, DMCoordAddPeriodicOffsets_Private, NULL)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -820,10 +862,12 @@ PetscErrorCode DMPlexCreateBoxMesh_Tensor_SFC_Internal(DM dm, PetscInt dim, cons DMLabel label; PetscCall(DMCreateLabel(dm, "Face Sets")); PetscCall(DMGetLabel(dm, "Face Sets", &label)); - PetscSegBuffer per_faces, donor_face_closure, my_donor_faces; - PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 64, &per_faces)); - PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 64, &my_donor_faces)); - PetscCall(PetscSegBufferCreate(sizeof(ZCode), 64 * PetscPowInt(2, dim), &donor_face_closure)); + PetscSegBuffer per_faces[3], donor_face_closure[3], my_donor_faces[3]; + for (PetscInt i = 0; i < 3; i++) { + PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 64, &per_faces[i])); + PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 64, &my_donor_faces[i])); + PetscCall(PetscSegBufferCreate(sizeof(ZCode), 64 * PetscPowInt(2, dim), &donor_face_closure[i])); + } PetscInt fStart, fEnd, vStart, vEnd; PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd)); PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd)); @@ -851,13 +895,13 @@ PetscErrorCode DMPlexCreateBoxMesh_Tensor_SFC_Internal(DM dm, PetscInt dim, cons if (periodicity[bc / 2] == DM_BOUNDARY_PERIODIC) { PetscInt *put; if (bc % 2 == 0) { // donor face; no label - PetscCall(PetscSegBufferGet(my_donor_faces, 1, &put)); + PetscCall(PetscSegBufferGet(my_donor_faces[bc / 2], 1, &put)); *put = f; } else { // periodic face - PetscCall(PetscSegBufferGet(per_faces, 1, &put)); + PetscCall(PetscSegBufferGet(per_faces[bc / 2], 1, &put)); *put = f; ZCode *zput; - PetscCall(PetscSegBufferGet(donor_face_closure, num_fverts, &zput)); + PetscCall(PetscSegBufferGet(donor_face_closure[bc / 2], num_fverts, &zput)); for (PetscInt i = 0; i < num_fverts; i++) { Ijk loc = ZCodeSplit(vert_z[fverts[i] - vStart]); switch (bc / 2) { @@ -889,9 +933,11 @@ PetscErrorCode DMPlexCreateBoxMesh_Tensor_SFC_Internal(DM dm, PetscInt dim, cons if (periodicity[0] == DM_BOUNDARY_PERIODIC || (dim > 1 && periodicity[1] == DM_BOUNDARY_PERIODIC) || (dim > 2 && periodicity[2] == DM_BOUNDARY_PERIODIC)) { PetscCall(DMPlexCreateBoxMesh_Tensor_SFC_Periodicity_Private(dm, &layout, vert_z, per_faces, lower, upper, periodicity, donor_face_closure, my_donor_faces)); } - PetscCall(PetscSegBufferDestroy(&per_faces)); - PetscCall(PetscSegBufferDestroy(&donor_face_closure)); - PetscCall(PetscSegBufferDestroy(&my_donor_faces)); + for (PetscInt i = 0; i < 3; i++) { + PetscCall(PetscSegBufferDestroy(&per_faces[i])); + PetscCall(PetscSegBufferDestroy(&donor_face_closure[i])); + PetscCall(PetscSegBufferDestroy(&my_donor_faces[i])); + } } PetscCall(PetscFree(layout.zstarts)); PetscCall(PetscFree(vert_z)); @@ -904,8 +950,9 @@ PetscErrorCode DMPlexCreateBoxMesh_Tensor_SFC_Internal(DM dm, PetscInt dim, cons Logically Collective Input Parameters: -+ dm - The `DMPLEX` on which to set periodicity -- face_sf - `PetscSF` in which roots are (owned) donor faces and leaves are faces that must be matched to a (possibly remote) donor face. ++ dm - The `DMPLEX` on which to set periodicity +. num_face_sfs - Number of `PetscSF`s in `face_sfs` +- face_sfs - Array of `PetscSF` in which roots are (owned) donor faces and leaves are faces that must be matched to a (possibly remote) donor face. Level: advanced @@ -915,26 +962,35 @@ PetscErrorCode DMPlexCreateBoxMesh_Tensor_SFC_Internal(DM dm, PetscInt dim, cons .seealso: [](ch_unstructured), `DMPLEX`, `DMGetGlobalSection()`, `DMPlexGetIsoperiodicFaceSF()` @*/ -PetscErrorCode DMPlexSetIsoperiodicFaceSF(DM dm, PetscSF face_sf) +PetscErrorCode DMPlexSetIsoperiodicFaceSF(DM dm, PetscInt num_face_sfs, PetscSF *face_sfs) { DM_Plex *plex = (DM_Plex *)dm->data; PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); - PetscCall(PetscObjectReference((PetscObject)face_sf)); - PetscCall(PetscSFDestroy(&plex->periodic.face_sf)); - plex->periodic.face_sf = face_sf; - if (face_sf) PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMGetIsoperiodicPointSF_C", DMGetIsoperiodicPointSF_Plex)); + if (face_sfs) PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMGetIsoperiodicPointSF_C", DMGetIsoperiodicPointSF_Plex)); + if (face_sfs == plex->periodic.face_sfs && num_face_sfs == plex->periodic.num_face_sfs) PetscFunctionReturn(PETSC_SUCCESS); + + for (PetscInt i = 0; i < num_face_sfs; i++) PetscCall(PetscObjectReference((PetscObject)face_sfs[i])); + + if (plex->periodic.num_face_sfs > 0) { + for (PetscInt i = 0; i < plex->periodic.num_face_sfs; i++) PetscCall(PetscSFDestroy(&plex->periodic.face_sfs[i])); + PetscCall(PetscFree(plex->periodic.face_sfs)); + } + + plex->periodic.num_face_sfs = num_face_sfs; + PetscCall(PetscCalloc1(num_face_sfs, &plex->periodic.face_sfs)); + for (PetscInt i = 0; i < num_face_sfs; i++) plex->periodic.face_sfs[i] = face_sfs[i]; DM cdm = dm->coordinates[0].dm; // Can't DMGetCoordinateDM because it automatically creates one if (cdm) { - PetscCall(DMPlexSetIsoperiodicFaceSF(cdm, face_sf)); - if (face_sf) cdm->periodic.setup = DMPeriodicCoordinateSetUp_Internal; + PetscCall(DMPlexSetIsoperiodicFaceSF(cdm, num_face_sfs, face_sfs)); + if (face_sfs) cdm->periodic.setup = DMPeriodicCoordinateSetUp_Internal; } PetscFunctionReturn(PETSC_SUCCESS); } -/*@ +/*@C DMPlexGetIsoperiodicFaceSF - Obtain periodicity for a mesh Logically Collective @@ -942,20 +998,22 @@ PetscErrorCode DMPlexSetIsoperiodicFaceSF(DM dm, PetscSF face_sf) Input Parameter: . dm - The `DMPLEX` for which to obtain periodic relation - Output Parameter: -. face_sf - `PetscSF` in which roots are (owned) donor faces and leaves are faces that must be matched to a (possibly remote) donor face. + Output Parameters: ++ num_face_sfs - Number of `PetscSF`s in the array +- face_sfs - Array of `PetscSF` in which roots are (owned) donor faces and leaves are faces that must be matched to a (possibly remote) donor face. Level: advanced .seealso: [](ch_unstructured), `DMPLEX`, `DMGetGlobalSection()`, `DMPlexSetIsoperiodicFaceSF()` @*/ -PetscErrorCode DMPlexGetIsoperiodicFaceSF(DM dm, PetscSF *face_sf) +PetscErrorCode DMPlexGetIsoperiodicFaceSF(DM dm, PetscInt *num_face_sfs, const PetscSF **face_sfs) { DM_Plex *plex = (DM_Plex *)dm->data; PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); - *face_sf = plex->periodic.face_sf; + *face_sfs = plex->periodic.face_sfs; + *num_face_sfs = plex->periodic.num_face_sfs; PetscFunctionReturn(PETSC_SUCCESS); } @@ -966,7 +1024,8 @@ PetscErrorCode DMPlexGetIsoperiodicFaceSF(DM dm, PetscSF *face_sf) Input Parameters: + dm - `DMPLEX` that has been configured with `DMPlexSetIsoperiodicFaceSF()` -- t - 4x4 affine transformation basis. +. n - Number of transforms in array +- t - Array of 4x4 affine transformation basis. Level: advanced @@ -984,16 +1043,21 @@ PetscErrorCode DMPlexGetIsoperiodicFaceSF(DM dm, PetscSF *face_sf) .seealso: [](ch_unstructured), `DMPLEX`, `DMGetGlobalSection()`, `DMPlexSetIsoperiodicFaceSF()` @*/ -PetscErrorCode DMPlexSetIsoperiodicFaceTransform(DM dm, const PetscScalar t[]) +PetscErrorCode DMPlexSetIsoperiodicFaceTransform(DM dm, PetscInt n, const PetscScalar t[]) { DM_Plex *plex = (DM_Plex *)dm->data; PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); - for (PetscInt i = 0; i < 4; i++) { + PetscCheck(n == plex->periodic.num_face_sfs, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Number of transforms (%" PetscInt_FMT ") must equal number of isoperiodc face SFs (%" PetscInt_FMT ")", n, plex->periodic.num_face_sfs); + + PetscCall(PetscMalloc1(n, &plex->periodic.transform)); + for (PetscInt i = 0; i < n; i++) { for (PetscInt j = 0; j < 4; j++) { - PetscCheck(i != j || t[i * 4 + j] == 1., PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Rotated transforms not supported"); - plex->periodic.transform[i][j] = t[i * 4 + j]; + for (PetscInt k = 0; k < 4; k++) { + PetscCheck(j != k || t[i * 16 + j * 4 + k] == 1., PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Rotated transforms not supported"); + plex->periodic.transform[i][j][k] = t[i * 16 + j * 4 + k]; + } } } PetscFunctionReturn(PETSC_SUCCESS); diff --git a/src/dm/impls/plex/plexsubmesh.c b/src/dm/impls/plex/plexsubmesh.c index a1d816ccc75..0d5cb6cb1d9 100644 --- a/src/dm/impls/plex/plexsubmesh.c +++ b/src/dm/impls/plex/plexsubmesh.c @@ -1032,7 +1032,7 @@ static PetscErrorCode DMPlexConstructGhostCells_Internal(DM dm, DMLabel label, P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexConstructGhostCells - Construct ghost cells which connect to every boundary face Collective @@ -2011,15 +2011,17 @@ static PetscErrorCode GetSurfaceSide_Static(DM dm, DM subdm, PetscInt numSubpoin PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode CheckFaultEdge_Private(DM dm, DMLabel label) +static PetscErrorCode CheckFaultEdge_Private(DM dm, DMLabel label, PetscBool split) { IS facePosIS, faceNegIS, dimIS; const PetscInt *points; - PetscInt dim, numPoints, p, shift = 100, shift2 = 200; + PetscInt *closure = NULL, *inclosure = NULL; + PetscInt dim, numPoints, shift = 100, shift2 = 200, debug = 0; PetscFunctionBegin; PetscCall(DMGetDimension(dm, &dim)); - /* If any faces touching the fault divide cells on either side, split them */ + // If any faces touching the fault divide cells on either side, + // either split them, or unsplit the connection PetscCall(DMLabelGetStratumIS(label, shift + dim - 1, &facePosIS)); PetscCall(DMLabelGetStratumIS(label, -(shift + dim - 1), &faceNegIS)); if (!facePosIS || !faceNegIS) { @@ -2032,7 +2034,7 @@ static PetscErrorCode CheckFaultEdge_Private(DM dm, DMLabel label) PetscCall(ISDestroy(&faceNegIS)); PetscCall(ISGetLocalSize(dimIS, &numPoints)); PetscCall(ISGetIndices(dimIS, &points)); - for (p = 0; p < numPoints; ++p) { + for (PetscInt p = 0; p < numPoints; ++p) { const PetscInt point = points[p]; const PetscInt *support; PetscInt supportSize, valA, valB; @@ -2044,51 +2046,80 @@ static PetscErrorCode CheckFaultEdge_Private(DM dm, DMLabel label) PetscCall(DMLabelGetValue(label, support[1], &valB)); if ((valA == -1) || (valB == -1)) continue; if (valA * valB > 0) continue; - /* Check that this face is not incident on only unsplit faces, meaning has at least one split face */ + // Check that this face is not incident on only unsplit faces, + // meaning has at least one split face { - PetscInt *closure = NULL; - PetscBool split = PETSC_FALSE; - PetscInt closureSize, cl; + PetscBool split = PETSC_FALSE; + PetscInt Ncl, val; - PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure)); - for (cl = 0; cl < closureSize * 2; cl += 2) { - PetscCall(DMLabelGetValue(label, closure[cl], &valA)); - if ((valA >= 0) && (valA <= dim)) { + PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &Ncl, &closure)); + for (PetscInt cl = 0; cl < Ncl * 2; cl += 2) { + PetscCall(DMLabelGetValue(label, closure[cl], &val)); + if ((val >= 0) && (val <= dim)) { split = PETSC_TRUE; break; } } - PetscCall(DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure)); if (!split) continue; } - /* Split the face */ - PetscCall(DMLabelGetValue(label, point, &valA)); - PetscCall(DMLabelClearValue(label, point, valA)); - PetscCall(DMLabelSetValue(label, point, dim - 1)); - /* Label its closure: - unmarked: label as unsplit - incident: relabel as split - split: do nothing - */ - { - PetscInt *closure = NULL; - PetscInt closureSize, cl, dep; + if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " is impinging (%" PetscInt_FMT ":%" PetscInt_FMT ", %" PetscInt_FMT ":%" PetscInt_FMT ")\n", point, support[0], valA, support[1], valB)); + if (split) { + // Split the face + PetscCall(DMLabelGetValue(label, point, &valA)); + PetscCall(DMLabelClearValue(label, point, valA)); + PetscCall(DMLabelSetValue(label, point, dim - 1)); + /* Label its closure: + unmarked: label as unsplit + incident: relabel as split + split: do nothing */ + { + PetscInt closureSize, cl, dep; + + PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure)); + for (cl = 0; cl < closureSize * 2; cl += 2) { + PetscCall(DMLabelGetValue(label, closure[cl], &valA)); + if (valA == -1) { /* Mark as unsplit */ + PetscCall(DMPlexGetPointDepth(dm, closure[cl], &dep)); + PetscCall(DMLabelSetValue(label, closure[cl], shift2 + dep)); + } else if (((valA >= shift) && (valA < shift2)) || ((valA <= -shift) && (valA > -shift2))) { + PetscCall(DMPlexGetPointDepth(dm, closure[cl], &dep)); + PetscCall(DMLabelClearValue(label, closure[cl], valA)); + PetscCall(DMLabelSetValue(label, closure[cl], dep)); + } + } + } + } else { + // Unsplit the incident faces and their closures + PetscInt Ncl, dep, val; - PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure)); - for (cl = 0; cl < closureSize * 2; cl += 2) { - PetscCall(DMLabelGetValue(label, closure[cl], &valA)); - if (valA == -1) { /* Mark as unsplit */ + PetscCall(DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &Ncl, &closure)); + for (PetscInt cl = 0; cl < Ncl * 2; cl += 2) { + PetscCall(DMLabelGetValue(label, closure[cl], &val)); + if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, " Point %" PetscInt_FMT ":%" PetscInt_FMT "\n", closure[cl], val)); + if ((val >= 0) && (val <= dim)) { + PetscInt Nincl, inval, indep; + + if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, " Point %" PetscInt_FMT " is being unsplit\n", closure[cl])); PetscCall(DMPlexGetPointDepth(dm, closure[cl], &dep)); + PetscCall(DMLabelClearValue(label, closure[cl], val)); PetscCall(DMLabelSetValue(label, closure[cl], shift2 + dep)); - } else if (((valA >= shift) && (valA < shift2)) || ((valA <= -shift) && (valA > -shift2))) { - PetscCall(DMPlexGetPointDepth(dm, closure[cl], &dep)); - PetscCall(DMLabelClearValue(label, closure[cl], valA)); - PetscCall(DMLabelSetValue(label, closure[cl], dep)); + + PetscCall(DMPlexGetTransitiveClosure(dm, closure[cl], PETSC_TRUE, &Nincl, &inclosure)); + for (PetscInt incl = 0; incl < Nincl * 2; incl += 2) { + PetscCall(DMLabelGetValue(label, inclosure[cl], &inval)); + if ((inval >= 0) && (inval <= dim)) { + if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, " Point %" PetscInt_FMT " is being unsplit\n", inclosure[incl])); + PetscCall(DMPlexGetPointDepth(dm, inclosure[incl], &indep)); + PetscCall(DMLabelClearValue(label, inclosure[incl], inval)); + PetscCall(DMLabelSetValue(label, inclosure[incl], shift2 + indep)); + } + } } } - PetscCall(DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure)); } } + PetscCall(DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &inclosure)); + PetscCall(DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure)); PetscCall(ISRestoreIndices(dimIS, &points)); PetscCall(ISDestroy(&dimIS)); PetscFunctionReturn(PETSC_SUCCESS); @@ -2104,6 +2135,7 @@ static PetscErrorCode CheckFaultEdge_Private(DM dm, DMLabel label) . blabel - A `DMLabel` marking the vertices on the boundary which will not be duplicated, or `NULL` to find them automatically . bvalue - Value of `DMLabel` marking the vertices on the boundary . flip - Flag to flip the submesh normal and replace points on the other side +. split - Split faces impinging on the surface, rather than clamping the surface boundary - subdm - The `DM` associated with the label, or `NULL` Output Parameter: @@ -2116,13 +2148,13 @@ static PetscErrorCode CheckFaultEdge_Private(DM dm, DMLabel label) .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexConstructCohesiveCells()`, `DMPlexLabelComplete()` @*/ -PetscErrorCode DMPlexLabelCohesiveComplete(DM dm, DMLabel label, DMLabel blabel, PetscInt bvalue, PetscBool flip, DM subdm) +PetscErrorCode DMPlexLabelCohesiveComplete(DM dm, DMLabel label, DMLabel blabel, PetscInt bvalue, PetscBool flip, PetscBool split, DM subdm) { DMLabel depthLabel; IS dimIS, subpointIS = NULL; const PetscInt *points, *subpoints; const PetscInt rev = flip ? -1 : 1; - PetscInt shift = 100, shift2 = 200, shift3 = 300, dim, depth, numPoints, numSubpoints, p, val; + PetscInt shift = 100, shift2 = 200, shift3 = split ? 300 : 0, dim, depth, numPoints, numSubpoints, p, val; PetscFunctionBegin; PetscCall(DMPlexGetDepth(dm, &depth)); @@ -2295,7 +2327,7 @@ PetscErrorCode DMPlexLabelCohesiveComplete(DM dm, DMLabel label, DMLabel blabel, divide: if (subpointIS) PetscCall(ISRestoreIndices(subpointIS, &subpoints)); PetscCall(DMPlexLabelFaultHalo(dm, label)); - PetscCall(CheckFaultEdge_Private(dm, label)); + PetscCall(CheckFaultEdge_Private(dm, label, split)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -2407,7 +2439,7 @@ PetscErrorCode DMPlexCreateHybridMesh(DM dm, DMLabel label, DMLabel bdlabel, Pet PetscCall(PetscStrlcat(sname, " split", sizeof(sname))); PetscCall(DMLabelCreate(PETSC_COMM_SELF, sname, &slabel)); } - PetscCall(DMPlexLabelCohesiveComplete(dm, hlabel, bdlabel, bdvalue, PETSC_FALSE, idm)); + PetscCall(DMPlexLabelCohesiveComplete(dm, hlabel, bdlabel, bdvalue, PETSC_FALSE, PETSC_TRUE, idm)); if (dmInterface) { *dmInterface = idm; } else PetscCall(DMDestroy(&idm)); @@ -2505,7 +2537,7 @@ static PetscErrorCode DMPlexMarkSubmesh_Uninterpolated(DM dm, DMLabel vertexLabe PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode DMPlexMarkSubmesh_Interpolated(DM dm, DMLabel vertexLabel, PetscInt value, PetscBool markedFaces, DMLabel subpointMap, DM subdm) +PetscErrorCode DMPlexMarkSubmesh_Interpolated(DM dm, DMLabel vertexLabel, PetscInt value, PetscBool markedFaces, PetscBool addCells, DMLabel subpointMap, DM subdm) { IS subvertexIS = NULL; const PetscInt *subvertices; @@ -2577,7 +2609,8 @@ static PetscErrorCode DMPlexMarkSubmesh_Interpolated(DM dm, DMLabel vertexLabel, } PetscCall(DMPlexGetSupportSize(dm, face, &supportSize)); PetscCall(DMPlexGetSupport(dm, face, &support)); - for (s = 0; s < supportSize; ++s) PetscCall(DMLabelSetValue(subpointMap, support[s], dim)); + if (addCells) + for (s = 0; s < supportSize; ++s) PetscCall(DMLabelSetValue(subpointMap, support[s], dim)); } PetscCall(DMPlexRestoreTransitiveClosure(dm, face, PETSC_TRUE, &closureSize, &closure)); } @@ -3356,7 +3389,7 @@ static PetscErrorCode DMPlexCreateSubmeshGeneric_Interpolated(DM dm, DMLabel lab PetscCall(DMPlexSetSubpointMap(subdm, subpointMap)); if (cellHeight) { if (isCohesive) PetscCall(DMPlexMarkCohesiveSubmesh_Interpolated(dm, label, value, subpointMap, subdm)); - else PetscCall(DMPlexMarkSubmesh_Interpolated(dm, label, value, markedFaces, subpointMap, subdm)); + else PetscCall(DMPlexMarkSubmesh_Interpolated(dm, label, value, markedFaces, PETSC_TRUE, subpointMap, subdm)); } else { DMLabel depth; IS pointIS; @@ -4046,7 +4079,7 @@ static PetscErrorCode DMPlexCreateCohesiveSubmesh_Interpolated(DM dm, const char PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexCreateCohesiveSubmesh - Extract from a mesh with cohesive cells the hypersurface defined by one face of the cells. Optionally, a label can be given to restrict the cells. Input Parameters: @@ -4151,7 +4184,7 @@ PetscErrorCode DMPlexReorderCohesiveSupports(DM dm) . ignoreLabelHalo - The flag indicating if labeled points that are in the halo are ignored - sanitizeSubmesh - The flag indicating if a subpoint is forced to be owned by a rank that owns a subcell that contains that point in its closure - Output Parameter: + Output Parameters: + ownershipTransferSF - The `PetscSF` representing the ownership transfers between parent local meshes due to submeshing. - subdm - The new mesh diff --git a/src/dm/impls/plex/plexvtk.c b/src/dm/impls/plex/plexvtk.c index 1adcf346175..788cebd7b02 100644 --- a/src/dm/impls/plex/plexvtk.c +++ b/src/dm/impls/plex/plexvtk.c @@ -630,7 +630,7 @@ static PetscErrorCode DMPlexVTKWriteAll_ASCII(DM dm, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexVTKWriteAll - Write a file containing all the fields that have been provided to the viewer Collective diff --git a/src/dm/impls/plex/tests/ex1.c b/src/dm/impls/plex/tests/ex1.c index a45e69f7261..d2f7a09615e 100644 --- a/src/dm/impls/plex/tests/ex1.c +++ b/src/dm/impls/plex/tests/ex1.c @@ -754,7 +754,7 @@ int main(int argc, char **argv) args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash test: suffix: p4est_periodic_3d - args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash + args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,3,2 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash test: suffix: p4est_gmsh_periodic args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh @@ -832,10 +832,13 @@ int main(int argc, char **argv) test: suffix: p4est_par_ovl_periodic args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash - #TODO Mesh cell 201 is inverted, vol = 0. (FVM Volume. Is it correct? -> Diagnostics disabled) + # Problem for -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_maximum_refinement 2 test: suffix: p4est_par_ovl_periodic_3d - args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -final_diagnostics 0 + args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none \ + -dm_plex_box_faces 3,5,2 \ + -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 \ + -conv_par_1_dm_p4est_refine_pattern hash test: suffix: p4est_par_ovl_gmsh_periodic args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh @@ -855,9 +858,10 @@ int main(int argc, char **argv) test: suffix: p4est_par_ovl_hyb_2d args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh + # -conv_par_1_dm_forest_maximum_refinement 1 was too expensive test: suffix: p4est_par_ovl_hyb_3d - args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh + args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 0 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh test: TODO: broken diff --git a/src/dm/impls/plex/tests/ex3.c b/src/dm/impls/plex/tests/ex3.c index a233a7da93b..ee553bd84e1 100644 --- a/src/dm/impls/plex/tests/ex3.c +++ b/src/dm/impls/plex/tests/ex3.c @@ -20,6 +20,7 @@ typedef struct { PetscFE fe; /* The finite element */ /* Testing space */ PetscInt porder; /* Order of polynomials to test */ + PetscBool RT; /* Test for Raviart-Thomas elements */ PetscBool convergence; /* Test for order of convergence */ PetscBool convRefine; /* Test for convergence using refinement, otherwise use coarsening */ PetscBool constraints; /* Test local constraints */ @@ -31,6 +32,10 @@ typedef struct { PetscReal constants[3]; /* Constant values for each dimension */ } AppCtx; +/* +Derivatives are set as n_i \partial u_j / \partial x_i +*/ + /* u = 1 */ PetscErrorCode constant(PetscInt dim, PetscReal time, const PetscReal coords[], PetscInt Nf, PetscScalar *u, void *ctx) { @@ -46,11 +51,29 @@ PetscErrorCode constantDer(PetscInt dim, PetscReal time, const PetscReal coords[ return PETSC_SUCCESS; } -/* u = x */ +/* RT_0: u = (1 + x, 1 + y) or (1 + x, 1 + y, 1 + z) */ +PetscErrorCode rt0(PetscInt dim, PetscReal time, const PetscReal coords[], PetscInt Nf, PetscScalar *u, void *ctx) +{ + PetscInt d; + for (d = 0; d < dim; ++d) u[d] = 1.0 + coords[d]; + return PETSC_SUCCESS; +} + +PetscErrorCode rt0Der(PetscInt dim, PetscReal time, const PetscReal coords[], const PetscReal n[], PetscInt Nf, PetscScalar *u, void *ctx) +{ + PetscInt d, e; + for (d = 0; d < dim; ++d) { + u[d] = 0.0; + for (e = 0; e < dim; ++e) u[d] += (d == e ? 1.0 : 0.0) * n[e]; + } + return PETSC_SUCCESS; +} + +/* u = (x + y, y + x) or (x + z, 2y, z + x) */ PetscErrorCode linear(PetscInt dim, PetscReal time, const PetscReal coords[], PetscInt Nf, PetscScalar *u, void *ctx) { PetscInt d; - for (d = 0; d < dim; ++d) u[d] = coords[d]; + for (d = 0; d < dim; ++d) u[d] = coords[d] + coords[dim - d - 1]; return PETSC_SUCCESS; } PetscErrorCode linearDer(PetscInt dim, PetscReal time, const PetscReal coords[], const PetscReal n[], PetscInt Nf, PetscScalar *u, void *ctx) @@ -58,7 +81,34 @@ PetscErrorCode linearDer(PetscInt dim, PetscReal time, const PetscReal coords[], PetscInt d, e; for (d = 0; d < dim; ++d) { u[d] = 0.0; - for (e = 0; e < dim; ++e) u[d] += (d == e ? 1.0 : 0.0) * n[e]; + for (e = 0; e < dim; ++e) u[d] += ((d == e ? 1. : 0.) + (d == (dim - e - 1) ? 1. : 0.)) * n[e]; + } + return PETSC_SUCCESS; +} + +/* RT_1: u = (1 + x + y + x^2 + xy, 1 + x + y + xy + y^2) or (1 + x + y + z + x^2 + xy + xz, 1 + x + y + z + xy + y^2 + yz, 1 + x + y + z + xz + yz + z^2) */ +PetscErrorCode rt1(PetscInt dim, PetscReal time, const PetscReal coords[], PetscInt Nf, PetscScalar *u, void *ctx) +{ + if (dim > 2) { + u[0] = 1.0 + coords[0] + coords[1] + coords[2] + coords[0] * coords[0] + coords[0] * coords[1] + coords[0] * coords[2]; + u[1] = 1.0 + coords[0] + coords[1] + coords[2] + coords[0] * coords[1] + coords[1] * coords[1] + coords[1] * coords[2]; + u[2] = 1.0 + coords[0] + coords[1] + coords[2] + coords[0] * coords[2] + coords[1] * coords[2] + coords[2] * coords[2]; + } else if (dim > 1) { + u[0] = 1.0 + coords[0] + coords[1] + coords[0] * coords[0] + coords[0] * coords[1]; + u[1] = 1.0 + coords[0] + coords[1] + coords[0] * coords[1] + coords[1] * coords[1]; + } + return PETSC_SUCCESS; +} + +PetscErrorCode rt1Der(PetscInt dim, PetscReal time, const PetscReal coords[], const PetscReal n[], PetscInt Nf, PetscScalar *u, void *ctx) +{ + if (dim > 2) { + u[0] = (1.0 + 2.0 * coords[0] + coords[1] + coords[2]) * n[0] + (1.0 + coords[0]) * n[1] + (1.0 + coords[0]) * n[2]; + u[1] = (1.0 + coords[1]) * n[0] + (1.0 + coords[0] + 2.0 * coords[1] + coords[2]) * n[1] + (1.0 + coords[1]) * n[2]; + u[2] = (1.0 + coords[2]) * n[0] + (1.0 + coords[2]) * n[1] + (1.0 + coords[0] + coords[1] + 2.0 * coords[2]) * n[2]; + } else if (dim > 1) { + u[0] = (1.0 + 2.0 * coords[0] + coords[1]) * n[0] + (1.0 + coords[0]) * n[1]; + u[1] = (1.0 + coords[1]) * n[0] + (1.0 + coords[0] + 2.0 * coords[1]) * n[1]; } return PETSC_SUCCESS; } @@ -148,6 +198,7 @@ static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) options->qorder = 0; options->numComponents = PETSC_DEFAULT; options->porder = 0; + options->RT = PETSC_FALSE; options->convergence = PETSC_FALSE; options->convRefine = PETSC_TRUE; options->constraints = PETSC_FALSE; @@ -167,6 +218,7 @@ static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) PetscCall(PetscOptionsBoundedInt("-qorder", "The quadrature order", "ex3.c", options->qorder, &options->qorder, NULL, 0)); PetscCall(PetscOptionsBoundedInt("-num_comp", "The number of field components", "ex3.c", options->numComponents, &options->numComponents, NULL, PETSC_DEFAULT)); PetscCall(PetscOptionsBoundedInt("-porder", "The order of polynomials to test", "ex3.c", options->porder, &options->porder, NULL, 0)); + PetscCall(PetscOptionsBool("-RT", "Use the Raviart-Thomas elements", "ex3.c", options->RT, &options->RT, NULL)); PetscCall(PetscOptionsBool("-convergence", "Check the convergence rate", "ex3.c", options->convergence, &options->convergence, NULL)); PetscCall(PetscOptionsBool("-conv_refine", "Use refinement for the convergence rate", "ex3.c", options->convRefine, &options->convRefine, NULL)); PetscCall(PetscOptionsBool("-constraints", "Test local constraints (serial only)", "ex3.c", options->constraints, &options->constraints, NULL)); @@ -703,12 +755,22 @@ static PetscErrorCode CheckFunctions(DM dm, PetscInt order, AppCtx *user) /* Setup functions to approximate */ switch (order) { case 0: - exactFuncs[0] = constant; - exactFuncDers[0] = constantDer; + if (user->RT) { + exactFuncs[0] = rt0; + exactFuncDers[0] = rt0Der; + } else { + exactFuncs[0] = constant; + exactFuncDers[0] = constantDer; + } break; case 1: - exactFuncs[0] = linear; - exactFuncDers[0] = linearDer; + if (user->RT) { + exactFuncs[0] = rt1; + exactFuncDers[0] = rt1Der; + } else { + exactFuncs[0] = linear; + exactFuncDers[0] = linearDer; + } break; case 2: exactFuncs[0] = quadratic; @@ -1319,6 +1381,45 @@ int main(int argc, char **argv) -petscdualspace_order 1 \ -petscdualspace_components 3 + # 2D RT_0 on a triangle + test: + suffix: rt0_2d_tri + requires: triangle + args: -qorder 1 -porder 0 -RT \ + -petscspace_type ptrimmed \ + -petscspace_components 2 \ + -petscspace_ptrimmed_form_degree -1 \ + -petscdualspace_order 1 \ + -petscdualspace_form_degree -1 \ + -petscdualspace_lagrange_trimmed true + + # 2D RT_0 on a quadrilateral + test: + suffix: rt0_2d_quad + requires: triangle + args: -dm_plex_simplex 0 -qorder 1 -porder 0 -RT \ + -petscspace_degree 1 \ + -petscspace_type sum \ + -petscspace_variables 2 \ + -petscspace_components 2 \ + -petscspace_sum_spaces 2 \ + -petscspace_sum_concatenate true \ + -sumcomp_0_petscspace_variables 2 \ + -sumcomp_0_petscspace_type tensor \ + -sumcomp_0_petscspace_tensor_spaces 2 \ + -sumcomp_0_petscspace_tensor_uniform false \ + -sumcomp_0_tensorcomp_0_petscspace_degree 1 \ + -sumcomp_0_tensorcomp_1_petscspace_degree 0 \ + -sumcomp_1_petscspace_variables 2 \ + -sumcomp_1_petscspace_type tensor \ + -sumcomp_1_petscspace_tensor_spaces 2 \ + -sumcomp_1_petscspace_tensor_uniform false \ + -sumcomp_1_tensorcomp_0_petscspace_degree 0 \ + -sumcomp_1_tensorcomp_1_petscspace_degree 1 \ + -petscdualspace_form_degree -1 \ + -petscdualspace_order 1 \ + -petscdualspace_lagrange_trimmed true + TEST*/ /* diff --git a/src/dm/impls/plex/tests/ex49.c b/src/dm/impls/plex/tests/ex49.c index 5628c363a8f..6223addde04 100644 --- a/src/dm/impls/plex/tests/ex49.c +++ b/src/dm/impls/plex/tests/ex49.c @@ -183,6 +183,36 @@ static PetscErrorCode CheckOffsets(DM dm, AppCtx *user, const char *domain_name, PetscCall(PetscFree(offsets)); PetscCall(DMGetLocalToGlobalMapping(cdm, <og)); PetscCall(ISLocalToGlobalMappingViewFromOptions(ltog, NULL, "-coord_ltog_view")); + { + DM clonedm; + Vec cloneX, X; + PetscInt clone_num_x, num_x; + const PetscScalar *clonex, *x; + + PetscCall(DMClone(dm, &clonedm)); + { // Force recreation of local coordinate vector + Vec X_global; + + PetscCall(DMGetCoordinates(dm, &X_global)); + PetscCall(DMSetCoordinates(clonedm, X_global)); + } + PetscCall(DMGetCoordinatesLocal(dm, &X)); + PetscCall(DMGetCoordinatesLocal(clonedm, &cloneX)); + PetscCall(VecGetLocalSize(X, &num_x)); + PetscCall(VecGetLocalSize(cloneX, &clone_num_x)); + PetscCheck(num_x == clone_num_x, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "Cloned DM coordinate size (%" PetscInt_FMT ") different from original DM coordinate size (%" PetscInt_FMT ")", clone_num_x, num_x); + + PetscCall(VecGetArrayRead(X, &x)); + PetscCall(VecGetArrayRead(cloneX, &clonex)); + + for (PetscInt i = 0; i < num_x; i++) { + PetscCheck(PetscIsCloseAtTolScalar(x[i], clonex[i], 1e-13, 1e-13), PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Original coordinate (%4.2f) and cloned coordinate (%4.2f) are different", (double)PetscRealPart(x[i]), (double)PetscRealPart(clonex[i])); + } + + PetscCall(VecRestoreArrayRead(X, &x)); + PetscCall(VecRestoreArrayRead(cloneX, &clonex)); + PetscCall(DMDestroy(&clonedm)); + } } PetscFunctionReturn(PETSC_SUCCESS); } @@ -238,14 +268,20 @@ int main(int argc, char **argv) args: -dm_plex_simplex 0 -dm_plex_dim 2 -dm_plex_shape zbox -dm_plex_box_faces 4,3 -dm_distribute 0 -petscspace_degree 1 -dm_plex_box_bd periodic,none -dm_view ::ascii_info_detail testset: - args: -dm_plex_simplex 0 -dm_plex_dim 2 -dm_plex_shape zbox -dm_plex_box_faces 3,2 -petscspace_degree 1 -dm_plex_box_bd none,periodic -dm_view ::ascii_info_detail -closure_tensor + args: -dm_plex_simplex 0 -dm_plex_dim 2 -dm_plex_shape zbox -dm_plex_box_faces 3,2 -petscspace_degree 1 -dm_view ::ascii_info_detail -closure_tensor nsize: 2 test: suffix: 2d_sfc_periodic_stranded - args: -dm_distribute 0 + args: -dm_distribute 0 -dm_plex_box_bd none,periodic test: suffix: 2d_sfc_periodic_stranded_dist - args: -dm_distribute 1 -petscpartitioner_type simple + args: -dm_distribute 1 -petscpartitioner_type simple -dm_plex_box_bd none,periodic + test: + suffix: 2d_sfc_biperiodic_stranded + args: -dm_distribute 0 -dm_plex_box_bd periodic,periodic + test: + suffix: 2d_sfc_biperiodic_stranded_dist + args: -dm_distribute 1 -petscpartitioner_type simple -dm_plex_box_bd periodic,periodic test: suffix: fv_0 diff --git a/src/dm/impls/plex/tests/ex57.c b/src/dm/impls/plex/tests/ex57.c index cf0934bcaaa..5ba5cc42b24 100644 --- a/src/dm/impls/plex/tests/ex57.c +++ b/src/dm/impls/plex/tests/ex57.c @@ -238,4 +238,12 @@ int main(int argc, char *argv[]) -first_dm_plex_transform_type refine_boundary_layer -first_dm_plex_transform_bl_splits 4 \ -ref_dm_view + # Tests for extrusion + test: + suffix: sphere_extruded + args: -dm_plex_shape sphere \ + -first_dm_plex_transform_type extrude \ + -first_dm_plex_transform_extrude_layers 3 \ + -first_dm_plex_transform_extrude_use_tensor 0 + TEST*/ diff --git a/src/dm/impls/plex/tests/ex66.c b/src/dm/impls/plex/tests/ex66.c index 315db1447bb..1425dd4ca27 100644 --- a/src/dm/impls/plex/tests/ex66.c +++ b/src/dm/impls/plex/tests/ex66.c @@ -3,6 +3,8 @@ static const char help[] = "Test for non-manifold interpolation"; #include /* +Test 0: + 3-------------7 /| /| / | / | @@ -17,33 +19,34 @@ static const char help[] = "Test for non-manifold interpolation"; | y | / |/ |/ 2--->-x-------6-------------9 + +Test 1: + + 3-------------7 + /| /| + / | / | + / | / | + 1-------------5 | + | | | | + | | | | + | | | | + | | | | + z 4---------|---8 + ^ / | / \ + | y | / \ + |/ |/ \ + 2--->-x-------6-------9 */ int main(int argc, char **argv) { - DM dm, idm; - DMLabel ctLabel; - PetscBool has_vtk = PETSC_FALSE; - - // 9 vertices - // 1 edge - // 0 faces - // 1 volume - PetscInt num_points[4] = {9, 1, 0, 1}; - - // point 0 = hexahedron (defined by 8 vertices) - // points 1-9 = vertices - // point 10 = edged (defined by 2 vertices) - PetscInt cone_size[11] = {8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2}; - - // hexahedron defined by points - PetscInt cones[11] = {3, 4, 2, 1, 7, 5, 6, 8, 6, 9}; - PetscInt cone_orientations[11] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; - PetscScalar vertex_coords[3 * 9] = {0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 2, 0, 0}; + DM dm, idm; + DMLabel ctLabel; + PetscInt testNum = 0; PetscFunctionBeginUser; PetscCall(PetscInitialize(&argc, &argv, NULL, help)); - PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "Output VTK?", "ex66"); - PetscCall(PetscOptionsGetBool(NULL, NULL, "-vtk", &has_vtk, NULL)); + PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "Non-Manifold Options", "ex66"); + PetscCall(PetscOptionsInt("-test_num", "Test number", "", testNum, &testNum, NULL)); PetscOptionsEnd(); PetscCall(DMCreate(PETSC_COMM_WORLD, &dm)); @@ -51,7 +54,56 @@ int main(int argc, char **argv) PetscCall(DMSetType(dm, DMPLEX)); PetscCall(DMSetDimension(dm, 3)); - PetscCall(DMPlexCreateFromDAG(dm, 3, num_points, cone_size, cones, cone_orientations, vertex_coords)); + switch (testNum) { + case 0: { + // 9 vertices + // 1 edge + // 0 faces + // 1 volume + PetscInt num_points[4] = {9, 1, 0, 1}; + + // point 0 = hexahedron (defined by 8 vertices) + // points 1-9 = vertices + // point 10 = edged (defined by 2 vertices) + PetscInt cone_size[11] = {8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2}; + + // hexahedron defined by points + PetscInt cones[11] = {3, 4, 2, 1, 7, 5, 6, 8, 6, 9}; + PetscInt cone_orientations[11] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + PetscScalar vertex_coords[3 * 9] = {0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 2, 0, 0}; + + PetscCall(DMPlexCreateFromDAG(dm, 3, num_points, cone_size, cones, cone_orientations, vertex_coords)); + } break; + case 1: { + // 9 vertices + // 0 edges + // 1 face + // 1 volume + PetscInt num_points[4] = {9, 0, 1, 1}; + + // point 0 = hexahedron (defined by 8 vertices) + // points 1-9 = vertices + // point 10 = triangle (defined by 3 vertices) + PetscInt cone_size[11] = {8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3}; + + // hexahedron defined by 8 points (point 0) + PetscInt cones[11] = {3, 4, 2, 1, 7, 5, 6, 8, 6, 9, 8}; + PetscInt cone_orientations[11] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + PetscScalar vertex_coords[3 * 9] = {0, 0, 1, // point 1 + 0, 0, 0, // point 2 + 0, 1, 1, // point 3 + 0, 1, 0, // point 4 + 1, 0, 1, // point 5 + 1, 0, 0, // point 6 + 1, 1, 1, // point 7 + 1, 1, 0, // point 8 + 2, 0, 0}; // point 9 + + PetscCall(DMPlexCreateFromDAG(dm, 3, num_points, cone_size, cones, cone_orientations, vertex_coords)); + } break; + default: + SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Invalid test number %" PetscInt_FMT, testNum); + } PetscCall(DMViewFromOptions(dm, NULL, "-dm_view")); // TODO: make it work with a DM made from a msh file @@ -60,17 +112,36 @@ int main(int argc, char **argv) // Must set cell types PetscCall(DMPlexGetCellTypeLabel(dm, &ctLabel)); - PetscCall(DMLabelSetValue(ctLabel, 0, DM_POLYTOPE_HEXAHEDRON)); - PetscCall(DMLabelSetValue(ctLabel, 1, DM_POLYTOPE_POINT)); - PetscCall(DMLabelSetValue(ctLabel, 2, DM_POLYTOPE_POINT)); - PetscCall(DMLabelSetValue(ctLabel, 3, DM_POLYTOPE_POINT)); - PetscCall(DMLabelSetValue(ctLabel, 4, DM_POLYTOPE_POINT)); - PetscCall(DMLabelSetValue(ctLabel, 5, DM_POLYTOPE_POINT)); - PetscCall(DMLabelSetValue(ctLabel, 6, DM_POLYTOPE_POINT)); - PetscCall(DMLabelSetValue(ctLabel, 7, DM_POLYTOPE_POINT)); - PetscCall(DMLabelSetValue(ctLabel, 8, DM_POLYTOPE_POINT)); - PetscCall(DMLabelSetValue(ctLabel, 9, DM_POLYTOPE_POINT)); - PetscCall(DMLabelSetValue(ctLabel, 10, DM_POLYTOPE_SEGMENT)); + switch (testNum) { + case 0: + PetscCall(DMLabelSetValue(ctLabel, 0, DM_POLYTOPE_HEXAHEDRON)); + PetscCall(DMLabelSetValue(ctLabel, 1, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 2, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 3, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 4, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 5, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 6, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 7, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 8, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 9, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 10, DM_POLYTOPE_SEGMENT)); + break; + case 1: + PetscCall(DMLabelSetValue(ctLabel, 0, DM_POLYTOPE_HEXAHEDRON)); + PetscCall(DMLabelSetValue(ctLabel, 1, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 2, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 3, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 4, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 5, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 6, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 7, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 8, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 9, DM_POLYTOPE_POINT)); + PetscCall(DMLabelSetValue(ctLabel, 10, DM_POLYTOPE_TRIANGLE)); + break; + default: + SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Invalid test number %" PetscInt_FMT, testNum); + } // interpolate (make sure to use -interp_dm_plex_stratify_celltype) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, "interp_")); @@ -81,23 +152,20 @@ int main(int argc, char **argv) PetscCall(DMSetFromOptions(dm)); PetscCall(DMViewFromOptions(dm, NULL, "-dm_view")); - if (has_vtk) { - PetscViewer viewer; - PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &viewer)); - PetscCall(PetscViewerSetType(viewer, PETSCVIEWERVTK)); - PetscCall(PetscViewerFileSetName(viewer, "ex66.vtk")); - PetscCall(DMView(dm, viewer)); - PetscCall(PetscViewerDestroy(&viewer)); - } - PetscCall(DMDestroy(&dm)); PetscCall(PetscFinalize()); return 0; } /*TEST - test: - suffix: 0 + + testset: args: -interp_dm_plex_stratify_celltype -dm_view ::ascii_info_detail -interp_dm_view ::ascii_info_detail + test: + suffix: 0 + test: + suffix: 1 + args: -test_num 1 + TEST*/ diff --git a/src/dm/impls/plex/tests/ex69.c b/src/dm/impls/plex/tests/ex69.c new file mode 100644 index 00000000000..17e4edf9b65 --- /dev/null +++ b/src/dm/impls/plex/tests/ex69.c @@ -0,0 +1,1146 @@ +static char help[] = "Tests for creation of cohesive meshes by transforms\n\n"; + +#include +#include + +#include + +PETSC_EXTERN char tri_2_cv[]; +char tri_2_cv[] = "\ +2 4 6 3 1\n\ +0 2 1\n\ +1 2 3\n\ +4 1 5\n\ +4 0 1\n\ +-1.0 0.0 0.0 1\n\ + 0.0 1.0 0.0 -1\n\ + 0.0 -1.0 0.0 1\n\ + 1.0 0.0 0.0 -1\n\ +-2.0 1.0 0.0 1\n\ +-1.0 2.0 0.0 -1"; + +/* List of test meshes + +Test tri_0: triangle + + 4-10--5 8-16--7-14--4 + |\ 1 | |\ \ 1 | + | \ | | \ \ | + 6 8 9 -> 9 12 2 11 13 + | \ | | \ \ | + | 0 \| | 0 \ \| + 2--7--3 3-10--6-15--5 + +Test tri_1: triangle, not tensor + + 4-10--5 8-10--7-16--4 + |\ 1 | |\ \ 1 | + | \ | | \ \ | + 6 8 9 -> 11 14 2 13 15 + | \ | | \ \ | + | 0 \| | 0 \ \| + 2--7--3 3-12--6--9--5 + +Test tri_2: 4 triangles, non-oriented surface + + 9 + / \ + / \ + 17 2 16 + / \ + / \ + 8-----15----5 + \ /|\ + \ / | \ + 18 3 12 | 14 + \ / | \ + \ / | \ + 4 0 11 1 7 + \ | / + \ | / + 10 | 13 + \ | / + \|/ + 6 + becomes + 8 + / \ + / \ + / \ + 25 2 24 + / \ + / \ + 13-----18------9 +28 | 5 26/ \ + 14----19----10 \ + \ /| |\ + \ / | | \ + 21 3 20 | | 23 + \ / | | \ + \ / | | \ + 6 0 17 4 16 1 7 + \ | | / + \ | | / + 15 | | 22 + \ | | / + \| |/ + 12---11 + 27 + +Test tri_3: tri_2, in parallel + + 6 + / \ + / \ + / \ + 12 1 11 + / \ + / \ + 5-----10------2 + \ + 5-----9-----3 2 + \ /| |\ + \ / | | \ + 10 1 8 | | 9 + \ / | | \ + \ / | | \ + 2 0 7 7 0 4 + \ | | / + \ | | / + 6 | | 8 + \ | | / + \| |/ + 4 3 + becomes + 11 + / \ + / \ + / \ + 19 1 18 + / \ + / \ + 8-----14------4 + 22 \ 3 | + 9------15 |\ + \ | \ + 9------14-----5 \ 20 | + 20\ 3 18/ \ \/ | + 10----15-----6 | 5 | + \ /| | | |\ + \ / | | | | \ + 17 1 16 | | | | 17 + \ / | 2 | | 2 | \ + \ / | | | | \ + 4 0 13 12 13 12 0 10 + \ | | | | / + \ | | | | / + 11 | | | | 16 + \ | | | | / + \| | | |/ + 8---7 7---6 + 19 21 + +Test quad_0: quadrilateral + + 5-10--6-11--7 5-12-10-20--9-14--6 + | | | | | | | +12 0 13 1 14 --> 15 0 18 2 17 1 16 + | | | | | | | + 2--8--3--9--4 3-11--8-19--7-13--4 + +Test quad_1: quadrilateral, not tensor + + 5-10--6-11--7 5-14-10-12--9-16--6 + | | | | | | | +12 0 13 1 14 --> 17 0 20 2 19 1 18 + | | | | | | | + 2--8--3--9--4 3-13--8-11--7-15--4 + +Test quad_2: quadrilateral, 2 processes + + 3--6--4 3--6--4 3--9--7-14--6 5-14--4--9--7 + | | | | | | | | | | + 7 0 8 7 0 8 --> 10 0 12 1 11 12 1 11 0 10 + | | | | | | | | | | + 1--5--2 1--5--2 2--8--5-13--4 3-13--2--8--6 + +Test quad_3: quadrilateral, 4 processes, non-oriented surface + + 3--6--4 3--6--4 3--9--7-14--6 5-14--4--9--7 + | | | | | | | | | | + 7 0 8 7 0 8 10 0 12 1 11 12 1 11 0 10 + | | | | | | | | | | + 1--5--2 1--5--2 2--8--5-13--4 3-13--2--8--6 + --> + 3--6--4 3--6--4 3--9--7-14--6 5-14--4--9--7 + | | | | | | | | | | + 7 0 8 7 0 8 10 0 12 1 11 12 1 11 0 10 + | | | | | | | | | | + 1--5--2 1--5--2 2--8--5-13--4 3-13--2--8--6 + +Test quad_4: embedded fault + +14-24-15-25-16-26--17 + | | | | +28 3 30 4 32 5 34 + | | | | +10-21-11-22-12-23--13 + | | | | +27 0 29 1 31 2 33 + | | | | + 6-18--7-19--8-20--9 + +becomes + + 13-26-14-27-15-28--16 + | | | | + 30 3 32 4 39 5 40 + | | | | + 12-25-17-36-19-38--21 + | | | + 41 6 42 7 43 + | | | + 12-25-17-35-18-37--20 + | | | | + 29 0 31 1 33 2 34 + | | | | + 8-22--9-23-10-24--11 + +Test quad_5: two faults + +14-24-15-25-16-26--17 + | | | | +28 3 30 4 32 5 34 + | | | | +10-21-11-22-12-23--13 + | | | | +27 0 29 1 31 2 33 + | | | | + 6-18--7-19--8-20--9 + +becomes + +12-26-13-27-14-28--15 + | | | | +37 4 31 3 33 5 40 + | | | | +17-36-18-25-19-39--21 + | | | | +43 6 44 41 7 42 + | | | | +16-35-18-25-19-38--20 + | | | | +29 0 30 1 32 2 34 + | | | | + 8-22--9-23-10-24--11 + +Test quad_6: T-junction + +14-24-15-25-16-26--17 + | | | | +28 3 30 4 32 5 34 + | | | | +10-21-11-22-12-23--13 + | | | | +27 0 29 1 31 2 33 + | | | | + 6-18--7-19--8-20--9 + +becomes + + 13-26-14-27-15-28--16 + | | | | + 30 3 32 4 39 5 40 + | | | | + 12-25-17-36-19-38--21 + | | | + 41 6 42 7 43 + | | | + 12-25-17-35-18-37--20 + | | | | + 29 0 31 1 33 2 34 + | | | | + 8-22--9-23-10-24--11 + +becomes + + 14-28-15-41-21-44--20-29-16 + | | | | | + 31 3 33 5 43 8 42 4 40 + | | | | | + 13-27-17-37-23-46--23-39-19 + | | | | + 47 6 48 48 7 49 + | | | | + 13-27-17-36-22-45--22-38-18 + | | | | | + 30 0 32 1 34 34 2 35 + | | | | | + 9-24-10-25-11-----11-26-12 + +Test tet_0: Two tets sharing a face + + cell 5 _______ cell + 0 / | \ \ 1 + 19 | 16 20 + / 15 \ \ + 2-17------4--22--6 + \ | / / + 18 | 14 21 + \ | / / + 3------- + +becomes + + cell 10 ___36____9______ cell + 0 / | \ |\ \ 1 + 29 | 27 | 26 31 + / 25 \ 24 \ \ + 3-28------8--35-----7--33--4 + \ | / | / / + 30 | 23 | 22 32 + \ | / |/ / + 6----34----5------ + cell 2 + +Test tet_1: Two tets sharing a face in parallel + + cell 4 3______ cell + 0 / | \ |\ \ 0 + 14 | 11 | 11 12 + / 10 \ 10 \ \ + 1-12------3 | 2--14--4 + \ | / | / / + 13 | 9 | 9 13 + \ | / |/ / + 2 1------ + +becomes + cell 1 cell 1 + cell 8---28---7 7---28---6______ cell + 0 / | \ |\ |\ |\ \ 0 + 24 | 22 | 21 | 22 | 21 23 + / 20 \ | \ | \ 19 \ \ + 2-23------6---27---5 20 5---27---4--25--8 + \ | / 19 / | / | / / + 25 | 18 | 17 | 18 | 17 24 + \ | / |/ |/ |/ / + 4---26---3 3---26---2------ + +Test hex_0: Two hexes sharing a face + +cell 11-----31-----12-----32------13 cell +0 /| /| /| 1 + 36 | 22 37| 24 38| + / | / | / | + 8-----29------9-----30------10 | + | | 18 | | 20 | | + | 42 | 43 | 44 + |14 | |15 | |16 | + 39 | 17 40 | 19 41 | + | 5-----27--|---6-----28--|---7 + | / | / | / + | 33 21 | 34 23 | 35 + |/ |/ |/ + 2-----25------3-----26------4 + +becomes + + cell 2 +cell 9-----38-----18-----62------17----42------10 cell +0 /| /| /| /| 1 + 45 | 30 54| 32 53| 24 46| + / | / | / | / | + 7-----37-----16-----61------15--|-41------8 | + | | 28 | | | | 22 | | + | 49 | 58 | 57 | 50 + |19 | |26 | |25 | |20 | + 47 | 27 56 | 55 | 21 48 | + | 5-----36--|--14-----60--|---13----40--|---6 + | / | / | / | / + | 43 29 | 52 31 | 51 23 | 44 + |/ |/ |/ |/ + 3-----35-----12-----59------11----39------4 + +Test hex_1: Two hexes sharing a face, in parallel + +cell 7-----18------8 7-----18------8 cell +0 /| /| /| /| 0 + 21 | 14 22| 21| 14 22| + / | / | / | / | + 5-----17------6 | 5---|-17------6 | + | | 12 | | | | 12 | | + | 25 | 26 | 25 | 26 + | 9 | |10 | | 9 | |10 | + 23 | 11 24 | 23 | 11 24 | + | 3-----16--|---4 | 3-----16--|---4 + | / | / | / | / + | 19 13 | 20 | 19 13 | 20 + |/ |/ |/ |/ + 1-----15------2 1-----15------2 + +becomes + cell 1 cell 1 +cell 5-----28-----13-----44-----12 9-----44-----8-----28------13 cell +0 /| /| /| /| /| /| 0 + 30 | 20 36| 22 35| 36| 22 35| 20 30| + / | / | / | / | / | / | + 4-----27-----11-----43-----10 | 7-----43-----6-----27------12 | + | | 18 | | | | | | | | 18 | | + | 32 | 40 | 39 | 40 | 39 | 32 + |14 | |16 | | 15| |15 | |14 | |16 | + 31 | 17 38 | 37 | 38 | 37 | 17 31 | + | 3-----26--|---9-----42-|---8 | 5----42--|---4-----26--|---11 + | / | / | / | / | / | / + | 29 19 | 34 21 | 33 | 34 21 | 33 19 | 29 + |/ |/ |/ |/ |/ |/ + 2-----25------7-----41-----6 3-----41-----2-----25------10 + +Test hex_2: hexahedra, 4 processes, non-oriented surface + + cell 0 cell 0 + 7-----18------8 7-----18------8 + /| /| /| /| + 21 | 14 22| 21 | 14 22| + / | / | / | / | + 5-----17------6 | 5-----17------6 | + | | 12 | | | | 12 | | + | 25 | 26 | 25 | 26 + |9 | |10 | |9 | |10 | + 23 | 11 24 | 23 | 11 24 | + | 3-----16--|---4 | 3-----16--|---4 + | / | / | / | / + | 19 13 | 20 | 19 13 | 20 + |/ |/ |/ |/ + 1-----15------2 1-----15------2 + + 7-----18------8 7-----18------8 + /| /| /| /| + 21 | 14 22| 21 | 14 22| + / | / | / | / | + 5-----17------6 | 5-----17------6 | + | | 12 | | | | 12 | | + | 25 | 26 | 25 | 26 + |9 | |10 | |9 | |10 | + 23 | 11 24 | 23 | 11 24 | + | 3-----16--|---4 | 3-----16--|---4 + | / | / | / | / + | 19 13 | 20 | 19 13 | 20 + |/ |/ |/ |/ + 1-----15------2 1-----15------2 + cell 0 cell 0 + +becomes + + cell 0 cell 1 cell 1 cell 0 + 5-----28------13----44------12 9-----44------8-----28------13 + /| /| /| /| /| /| + 30 | 20 36| 22 35| 36| 22 35 | 20 30| + / | / | / | / | / | / | + 4-----27------11----43------10 | 7-----43------6-----27------12 | + | | 18 | | | | | | | | 18 | | + | 32 | 40 | 39 | 40 | 39 | 32 + |14 | |16 | |15 | |15 | |14 | |16 | + 31 | 17 38 | 37 | 38 | 37 | 17 31 | + | 3-----26--|---9-----42--|---8 | 5-----42--|---4-----26--|---11 + | / | / | / | / | / | / + | 29 19 | 34 21 |33 | 34 21 | 33 19 | 29 + |/ |/ |/ |/ |/ |/ + 2-----25------7-----41------6 3-----41------2-----25------10 + + 5-----28------13----44------12 9-----44------8-----28------13 + /| /| /| /| /| /| + 30 | 20 36| 22 35| 36| 22 35| 20 30| + / | / | / | / | / | / | + 4-----27------11----43------10 | 7-----43------6-----27------12 | + | | 18 | | | | | | | | 18 | | + | 32 | 40 | 39 | 40 | 39 | 32 + |14 | |16 | |15 | |15 | |14 | |16 | + 31 | 17 38 | 37 | 38 | 37 | 17 31 | + | 3-----26--|---9-----42--|---8 | 5-----42--|---4-----26--|---11 + | / | / | / | / | / | / + | 29 19 | 34 21 |33 | 34 21 | 33 19 | 29 + |/ |/ |/ |/ |/ |/ + 2-----25------7-----41------6 3-----41------2-----25------10 + cell 0 cell 1 cell 1 cell 0 + +Test hex_3: T-junction + + 19-----52-----20-----53------21 + /| /| /| + 60 | 38 61| 41 62| + / | / | / | + 16-----50-----17-----51------18 | + | | 33 | | 35 | | + | 70 | 72 | 74 + |25 | |26 | |27 | + 64 | 32 66 | 34 68 | + | 13-----48--|--14-----49--|---15 + | /| | /| | /| + |57 | 37 | 58| 40 | 59| + |/ | |/ | |/ | + 10-----46-----11-----47------12 | + | | 29 | | 31 | | + | 69 | 71 | 73 + |22 | |23 | |24 | + 63 | 28 65 | 30 67 | + | 7-----44--|---8-----45--|---9 + | / | / | / + | 54 36 | 55 39 | 56 + |/ |/ |/ + 4-----42------5-----43------6 + cell 0 cell 1 + +becomes + + 15----102-----28---112----___27-----73------16 + /| /| / / /| + 77 | 55 104| --- 103 46 78| + / | / | / / / | + 13----101-----26---111--/----25-----72------14 | + | | 54 | | 107 / 43 | | + | 81 | 108 / 51 / | 82 + |40 | |52 | / 105 |41 | + 79 | 53 106 |/ / 42 80 | + | 21-----87--|--31---/-89------23-------/----/ + | /| | /| / /| / + |91 | 47 |109|-- 49 93| ----- + |/ | |/ /| / | / + 17-----83-----29-----85------19---- + | | | | | | + | 120 | 121 | 122 + | | |26 | | | + 117 | 118 | 119 | + | 22-----88--|--32-----90--|---24 + | /| | /| | /| + |92 | 48 |110| 50 | 94| + |/ | |/ | |/ | + 18-----84-----30-----86------20 | + | | 37 | | 39 | | + | 98 | 99 | 100 + |33 | |34 | |35 | + 95 | 36 96 | 38 97 | + | 10-----70--|--11-----71--|---12 + | / | / | / + | 74 44 | 75 45 | 76 + |/ |/ |/ + 7-----68------8-----69------9 + cell 0 cell 1 + +Test hex_4: Two non-intersecting faults + + cell 4 cell 5 cell 6 cell 7 + 33-----96-----34-----97-----35-----98-----36-----99------37 + /| /| /| /| /| + 110| 66 111| 69 112| 72 113| 75 114| + / | / | / | / | / | + 28-----92-----29-----93-----30-----94-----31-----95------32 | + | | 57 | | 59 | | 61 | | 63 | | + | 126 | 128 | 130 | 132 | 134 + |43 | |44 | |45 | |46 | |47 | + 116 | 56 118 | 58 120 | 60 122 | 62 124 | + | 23-----88--|--24-----89--|--25-----90--|--26-----91--|---27 + | /| | /| | /| | /| | /| + |105| 65 |106| 68 |107| 71 |108| 74 |109| + |/ | |/ | |/ | |/ | |/ | + 18-----84-----19-----95-----20-----86-----21-----87------22 | + | | 49 | | 51 | | 53 | | 55 | | + | 125 | 127 | 129 | 131 | 133 + |38 | |39 | |40 | |41 | |42 | + 115 | 48 117 | 50 119 | 52 121 | 54 123 | + | 13-----80--|--14-----81--|--15-----82--|--16-----83--|---17 + | / | / | / | / | / + |100 64 |101 67 |102 70 |103 73 |104 + |/ |/ |/ |/ |/ + 8-----76------9-----77-----10-----78-----11-----79------12 + cell 0 cell 1 cell 2 cell 3 + +becomes + + cell 4 cell 5 cell 7 cell 10 cell 6 + 27-----114----28-----115----29-----159----46-----170----45------116----30 + /| /| /| /| /| /| + 123| 71 124| 73 125| 87 162| 161| 78 126| + / | / | / | / | / | / | + 23-----111----24-----112----25-----158----44-----169----43-----113-----26 | + | | 65 | | 67 | | 86 | | | | 69 | | + | 134 | 135 | 137 | 166 | 165 | 140 + |56 | |57 | |58 | |84 | |83 | |59 | + 127 | 64 128 | 66 130 | 85 164 | 163 | 68 133 | + | 35-----143-|--37-----151-|--40-----109-|--42-----168-|--42-----110-|---22 + | /| | /| | /| | / | / | / + |145| 79 |147| 81 |153| 75 |160 |160 77 |122 + |/ 173 |/ 174 |/ 176 |/ |/ |/ + 31-----141----33-----149----39-----107----41-----167----41-----108-----21 +cell | | | | | cell 9 +8 | 36-----144-|--38-----152-|--40-----109----42-----110-----22 + 171 /| 172 /| 175 /| /| /| + |146| 80 |148| 82 |153| 75 160| 77 122| + |/ | |/ | |/ | / | / | + 32-----142----34-----150----39-----107----41-----108-----21 | + | | 50 | | 52 | | 61 | | 63 | | + | 156 | 157 | 136 | 138 | 139 + |47 | |48 | |53 | |54 | |55 | + 154 | 49 155 | 51 129 | 60 131 | 62 132 | + | 16-----103-|--17-----104-|--18-----105-|--19-----106-|---20 + | / | / | / | / | / + |117 70 |118 72 |119 74 |120 76 |121 + |/ |/ |/ |/ |/ + 11-----99-----12-----100----13-----101----14-----102-----15 + cell 0 cell 1 cell 2 cell 3 + +*/ + +typedef struct { + PetscInt testNum; // The mesh to test +} AppCtx; + +static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) +{ + PetscFunctionBegin; + options->testNum = 0; + + PetscOptionsBegin(comm, "", "Cohesive Meshing Options", "DMPLEX"); + PetscCall(PetscOptionsBoundedInt("-test_num", "The particular mesh to test", "ex5.c", options->testNum, &options->testNum, NULL, 0)); + PetscOptionsEnd(); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CreateQuadMesh1(MPI_Comm comm, AppCtx *user, DM *dm) +{ + const PetscInt faces[2] = {1, 1}; + PetscReal lower[2], upper[2]; + DMLabel label; + PetscMPIInt rank; + void *get_tmp; + PetscInt64 *cidx; + PetscMPIInt flg; + + PetscFunctionBeginUser; + PetscCallMPI(MPI_Comm_rank(comm, &rank)); + // Create serial mesh + lower[0] = (PetscReal)(rank % 2); + lower[1] = (PetscReal)(rank / 2); + upper[0] = (PetscReal)(rank % 2) + 1.; + upper[1] = (PetscReal)(rank / 2) + 1.; + PetscCall(DMPlexCreateBoxMesh(PETSC_COMM_SELF, 2, PETSC_FALSE, faces, lower, upper, NULL, PETSC_TRUE, dm)); + PetscCall(PetscObjectSetName((PetscObject)*dm, "box")); + // Flip edges to make fault non-oriented + switch (rank) { + case 2: + PetscCall(DMPlexOrientPoint(*dm, 8, -1)); + break; + case 3: + PetscCall(DMPlexOrientPoint(*dm, 7, -1)); + break; + default: + break; + } + // Need this so that all procs create the cell types + PetscCall(DMPlexGetCellTypeLabel(*dm, &label)); + // Replace comm in object (copied from PetscHeaderCreate/Destroy()) + PetscCall(PetscCommDestroy(&(*dm)->hdr.comm)); + PetscCall(PetscCommDuplicate(comm, &(*dm)->hdr.comm, &(*dm)->hdr.tag)); + PetscCallMPI(MPI_Comm_get_attr((*dm)->hdr.comm, Petsc_CreationIdx_keyval, &get_tmp, &flg)); + PetscCheck(flg, (*dm)->hdr.comm, PETSC_ERR_ARG_CORRUPT, "MPI_Comm does not have an object creation index"); + cidx = (PetscInt64 *)get_tmp; + (*dm)->hdr.cidx = (*cidx)++; + // Create new pointSF + { + PetscSF sf; + PetscInt *local = NULL; + PetscSFNode *remote = NULL; + PetscInt Nl; + + PetscCall(PetscSFCreate(comm, &sf)); + switch (rank) { + case 0: + Nl = 5; + PetscCall(PetscMalloc1(Nl, &local)); + PetscCall(PetscMalloc1(Nl, &remote)); + local[0] = 2; + remote[0].index = 1; + remote[0].rank = 1; + local[1] = 3; + remote[1].index = 1; + remote[1].rank = 2; + local[2] = 4; + remote[2].index = 1; + remote[2].rank = 3; + local[3] = 6; + remote[3].index = 5; + remote[3].rank = 2; + local[4] = 8; + remote[4].index = 7; + remote[4].rank = 1; + break; + case 1: + Nl = 3; + PetscCall(PetscMalloc1(Nl, &local)); + PetscCall(PetscMalloc1(Nl, &remote)); + local[0] = 3; + remote[0].index = 1; + remote[0].rank = 3; + local[1] = 4; + remote[1].index = 2; + remote[1].rank = 3; + local[2] = 6; + remote[2].index = 5; + remote[2].rank = 3; + break; + case 2: + Nl = 3; + PetscCall(PetscMalloc1(Nl, &local)); + PetscCall(PetscMalloc1(Nl, &remote)); + local[0] = 2; + remote[0].index = 1; + remote[0].rank = 3; + local[1] = 4; + remote[1].index = 3; + remote[1].rank = 3; + local[2] = 8; + remote[2].index = 7; + remote[2].rank = 3; + break; + case 3: + Nl = 0; + break; + default: + SETERRQ(comm, PETSC_ERR_SUP, "This example only supports 4 ranks"); + } + PetscCall(PetscSFSetGraph(sf, 9, Nl, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER)); + PetscCall(DMSetPointSF(*dm, sf)); + PetscCall(PetscSFDestroy(&sf)); + } + // Create fault label + PetscCall(DMCreateLabel(*dm, "fault")); + PetscCall(DMGetLabel(*dm, "fault", &label)); + switch (rank) { + case 0: + case 2: + PetscCall(DMLabelSetValue(label, 8, 1)); + PetscCall(DMLabelSetValue(label, 2, 0)); + PetscCall(DMLabelSetValue(label, 4, 0)); + break; + case 1: + case 3: + PetscCall(DMLabelSetValue(label, 7, 1)); + PetscCall(DMLabelSetValue(label, 1, 0)); + PetscCall(DMLabelSetValue(label, 3, 0)); + break; + default: + break; + } + PetscCall(DMPlexOrientLabel(*dm, label)); + PetscCall(DMPlexLabelCohesiveComplete(*dm, label, NULL, 1, PETSC_FALSE, PETSC_FALSE, NULL)); + PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CreateHexMesh1(MPI_Comm comm, AppCtx *user, DM *dm) +{ + const PetscInt faces[3] = {1, 1, 1}; + PetscReal lower[3], upper[3]; + DMLabel label; + PetscMPIInt rank; + void *get_tmp; + PetscInt64 *cidx; + PetscMPIInt flg; + + PetscFunctionBeginUser; + PetscCallMPI(MPI_Comm_rank(comm, &rank)); + // Create serial mesh + lower[0] = (PetscReal)(rank % 2); + lower[1] = 0.; + lower[2] = (PetscReal)(rank / 2); + upper[0] = (PetscReal)(rank % 2) + 1.; + upper[1] = 1.; + upper[2] = (PetscReal)(rank / 2) + 1.; + PetscCall(DMPlexCreateBoxMesh(PETSC_COMM_SELF, 3, PETSC_FALSE, faces, lower, upper, NULL, PETSC_TRUE, dm)); + PetscCall(PetscObjectSetName((PetscObject)*dm, "box")); + // Flip edges to make fault non-oriented + switch (rank) { + case 2: + PetscCall(DMPlexOrientPoint(*dm, 10, -1)); + break; + case 3: + PetscCall(DMPlexOrientPoint(*dm, 9, -1)); + break; + default: + break; + } + // Need this so that all procs create the cell types + PetscCall(DMPlexGetCellTypeLabel(*dm, &label)); + // Replace comm in object (copied from PetscHeaderCreate/Destroy()) + PetscCall(PetscCommDestroy(&(*dm)->hdr.comm)); + PetscCall(PetscCommDuplicate(comm, &(*dm)->hdr.comm, &(*dm)->hdr.tag)); + PetscCallMPI(MPI_Comm_get_attr((*dm)->hdr.comm, Petsc_CreationIdx_keyval, &get_tmp, &flg)); + PetscCheck(flg, (*dm)->hdr.comm, PETSC_ERR_ARG_CORRUPT, "MPI_Comm does not have an object creation index"); + cidx = (PetscInt64 *)get_tmp; + (*dm)->hdr.cidx = (*cidx)++; + // Create new pointSF + { + PetscSF sf; + PetscInt *local = NULL; + PetscSFNode *remote = NULL; + PetscInt Nl; + + PetscCall(PetscSFCreate(comm, &sf)); + switch (rank) { + case 0: + Nl = 15; + PetscCall(PetscMalloc1(Nl, &local)); + PetscCall(PetscMalloc1(Nl, &remote)); + local[0] = 2; + remote[0].index = 1; + remote[0].rank = 1; + local[1] = 4; + remote[1].index = 3; + remote[1].rank = 1; + local[2] = 5; + remote[2].index = 1; + remote[2].rank = 2; + local[3] = 6; + remote[3].index = 1; + remote[3].rank = 3; + local[4] = 7; + remote[4].index = 3; + remote[4].rank = 2; + local[5] = 8; + remote[5].index = 3; + remote[5].rank = 3; + local[6] = 17; + remote[6].index = 15; + remote[6].rank = 2; + local[7] = 18; + remote[7].index = 16; + remote[7].rank = 2; + local[8] = 20; + remote[8].index = 19; + remote[8].rank = 1; + local[9] = 21; + remote[9].index = 19; + remote[9].rank = 2; + local[10] = 22; + remote[10].index = 19; + remote[10].rank = 3; + local[11] = 24; + remote[11].index = 23; + remote[11].rank = 1; + local[12] = 26; + remote[12].index = 25; + remote[12].rank = 1; + local[13] = 10; + remote[13].index = 9; + remote[13].rank = 1; + local[14] = 14; + remote[14].index = 13; + remote[14].rank = 2; + break; + case 1: + Nl = 9; + PetscCall(PetscMalloc1(Nl, &local)); + PetscCall(PetscMalloc1(Nl, &remote)); + local[0] = 5; + remote[0].index = 1; + remote[0].rank = 3; + local[1] = 6; + remote[1].index = 2; + remote[1].rank = 3; + local[2] = 7; + remote[2].index = 3; + remote[2].rank = 3; + local[3] = 8; + remote[3].index = 4; + remote[3].rank = 3; + local[4] = 17; + remote[4].index = 15; + remote[4].rank = 3; + local[5] = 18; + remote[5].index = 16; + remote[5].rank = 3; + local[6] = 21; + remote[6].index = 19; + remote[6].rank = 3; + local[7] = 22; + remote[7].index = 20; + remote[7].rank = 3; + local[8] = 14; + remote[8].index = 13; + remote[8].rank = 3; + break; + case 2: + Nl = 9; + PetscCall(PetscMalloc1(Nl, &local)); + PetscCall(PetscMalloc1(Nl, &remote)); + local[0] = 2; + remote[0].index = 1; + remote[0].rank = 3; + local[1] = 4; + remote[1].index = 3; + remote[1].rank = 3; + local[2] = 6; + remote[2].index = 5; + remote[2].rank = 3; + local[3] = 8; + remote[3].index = 7; + remote[3].rank = 3; + local[4] = 20; + remote[4].index = 19; + remote[4].rank = 3; + local[5] = 22; + remote[5].index = 21; + remote[5].rank = 3; + local[6] = 24; + remote[6].index = 23; + remote[6].rank = 3; + local[7] = 26; + remote[7].index = 25; + remote[7].rank = 3; + local[8] = 10; + remote[8].index = 9; + remote[8].rank = 3; + break; + case 3: + Nl = 0; + break; + default: + SETERRQ(comm, PETSC_ERR_SUP, "This example only supports 4 ranks"); + } + PetscCall(PetscSFSetGraph(sf, 27, Nl, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER)); + PetscCall(DMSetPointSF(*dm, sf)); + PetscCall(PetscSFDestroy(&sf)); + } + // Create fault label + PetscCall(DMCreateLabel(*dm, "fault")); + PetscCall(DMGetLabel(*dm, "fault", &label)); + switch (rank) { + case 0: + case 2: + PetscCall(DMLabelSetValue(label, 10, 2)); + PetscCall(DMLabelSetValue(label, 20, 1)); + PetscCall(DMLabelSetValue(label, 22, 1)); + PetscCall(DMLabelSetValue(label, 24, 1)); + PetscCall(DMLabelSetValue(label, 26, 1)); + PetscCall(DMLabelSetValue(label, 2, 0)); + PetscCall(DMLabelSetValue(label, 4, 0)); + PetscCall(DMLabelSetValue(label, 6, 0)); + PetscCall(DMLabelSetValue(label, 8, 0)); + break; + case 1: + case 3: + PetscCall(DMLabelSetValue(label, 9, 2)); + PetscCall(DMLabelSetValue(label, 19, 1)); + PetscCall(DMLabelSetValue(label, 21, 1)); + PetscCall(DMLabelSetValue(label, 23, 1)); + PetscCall(DMLabelSetValue(label, 25, 1)); + PetscCall(DMLabelSetValue(label, 1, 0)); + PetscCall(DMLabelSetValue(label, 3, 0)); + PetscCall(DMLabelSetValue(label, 5, 0)); + PetscCall(DMLabelSetValue(label, 7, 0)); + break; + default: + break; + } + PetscCall(DMPlexOrientLabel(*dm, label)); + PetscCall(DMPlexLabelCohesiveComplete(*dm, label, NULL, 1, PETSC_FALSE, PETSC_FALSE, NULL)); + PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) +{ + PetscFunctionBegin; + switch (user->testNum) { + case 1: + PetscCall(CreateQuadMesh1(comm, user, dm)); + break; + case 2: + PetscCall(CreateHexMesh1(comm, user, dm)); + break; + default: + PetscCall(DMCreate(comm, dm)); + PetscCall(DMSetType(*dm, DMPLEX)); + break; + } + PetscCall(DMSetFromOptions(*dm)); + { + const char *prefix; + + // We cannot redistribute with cohesive cells in the SF + PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE)); + PetscCall(PetscObjectGetOptionsPrefix((PetscObject)*dm, &prefix)); + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "f0_")); + PetscCall(DMSetFromOptions(*dm)); + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "f1_")); + PetscCall(DMSetFromOptions(*dm)); + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, prefix)); + } + PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view")); + PetscFunctionReturn(PETSC_SUCCESS); +} + +int main(int argc, char **argv) +{ + DM dm; + AppCtx user; + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &argv, NULL, help)); + PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); + PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm)); + PetscCall(DMDestroy(&dm)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + testset: + requires: triangle + args: -dm_refine 1 -dm_plex_transform_type cohesive_extrude \ + -dm_plex_transform_active fault \ + -dm_view ::ascii_info_detail -coarse_dm_view ::ascii_info_detail + + test: + suffix: tri_0 + args: -dm_plex_box_faces 1,1 -dm_plex_cohesive_label_fault 8 + test: + suffix: tri_1 + args: -dm_plex_box_faces 1,1 -dm_plex_cohesive_label_fault 8 \ + -dm_plex_transform_extrude_use_tensor 0 + test: + suffix: tri_2 + args: -dm_plex_file_contents dat:tri_2_cv -dm_plex_cohesive_label_fault 11,15 + test: + suffix: tri_3 + nsize: 2 + args: -dm_plex_file_contents dat:tri_2_cv -dm_plex_cohesive_label_fault 11,15 \ + -petscpartitioner_type shell -petscpartitioner_shell_sizes 2,2 \ + -petscpartitioner_shell_points 0,3,1,2 + + testset: + args: -dm_plex_simplex 0 -dm_plex_box_faces 2,1 \ + -dm_refine 1 -dm_plex_transform_type cohesive_extrude \ + -dm_plex_transform_active fault -dm_plex_cohesive_label_fault 13 \ + -dm_view ::ascii_info_detail -coarse_dm_view ::ascii_info_detail + + test: + suffix: quad_0 + test: + suffix: quad_1 + args: -dm_plex_transform_extrude_use_tensor 0 + test: + suffix: quad_2 + nsize: 2 + args: -petscpartitioner_type simple + + test: + suffix: quad_3 + nsize: 4 + args: -test_num 1 \ + -dm_refine 1 -dm_plex_transform_type cohesive_extrude \ + -dm_plex_transform_active fault \ + -dm_view ::ascii_info_detail -coarse_dm_view ::ascii_info_detail \ + -orientation_view -orientation_view_synchronized + + test: + suffix: quad_4 + args: -dm_plex_simplex 0 -dm_plex_box_faces 3,2 \ + -dm_refine 1 -dm_plex_transform_type cohesive_extrude \ + -dm_plex_transform_active fault -dm_plex_cohesive_label_fault 22,23 \ + -dm_view ::ascii_info_detail -coarse_dm_view ::ascii_info_detail + + test: + suffix: quad_5 + args: -dm_plex_simplex 0 -dm_plex_box_faces 3,2 \ + -dm_plex_cohesive_label_fault0 21 \ + -dm_plex_cohesive_label_fault1 23 \ + -f0_dm_refine 1 -f0_dm_plex_transform_type cohesive_extrude \ + -f0_dm_plex_transform_active fault0 -f0_coarse_dm_view ::ascii_info_detail \ + -f1_dm_refine 1 -f1_dm_plex_transform_type cohesive_extrude \ + -f1_dm_plex_transform_active fault1 -f1_coarse_dm_view ::ascii_info_detail \ + -dm_view ::ascii_info_detail + + test: + suffix: quad_6 + args: -dm_plex_simplex 0 -dm_plex_box_faces 3,2 \ + -dm_plex_cohesive_label_fault0 22,23 \ + -dm_plex_cohesive_label_fault1 32 \ + -f0_dm_refine 1 -f0_dm_plex_transform_type cohesive_extrude \ + -f0_dm_plex_transform_active fault0 -f0_coarse_dm_view ::ascii_info_detail \ + -f1_dm_refine 1 -f1_dm_plex_transform_type cohesive_extrude \ + -f1_dm_plex_transform_active fault1 -f1_coarse_dm_view ::ascii_info_detail \ + -dm_view ::ascii_info_detail + + test: + suffix: quad_6w + args: -dm_plex_simplex 0 -dm_plex_box_faces 3,2 \ + -dm_plex_cohesive_label_fault0 22,23 \ + -dm_plex_cohesive_label_fault1 32 \ + -f0_dm_refine 1 -f0_dm_plex_transform_type cohesive_extrude \ + -f0_dm_plex_transform_active fault0 -f0_coarse_dm_view ::ascii_info_detail \ + -f0_dm_plex_transform_cohesive_width 0.05 \ + -f1_dm_refine 1 -f1_dm_plex_transform_type cohesive_extrude \ + -f1_dm_plex_transform_active fault1 -f1_coarse_dm_view ::ascii_info_detail \ + -f1_dm_plex_transform_cohesive_width 0.05 \ + -dm_view ::ascii_info_detail + + testset: + args: -dm_plex_dim 3 -dm_plex_shape doublet \ + -dm_refine 1 -dm_plex_transform_type cohesive_extrude \ + -dm_plex_transform_active fault -dm_plex_cohesive_label_fault 7 \ + -dm_view ::ascii_info_detail -coarse_dm_view ::ascii_info_detail + + test: + suffix: tet_0 + test: + suffix: tet_1 + nsize: 2 + args: -petscpartitioner_type simple + + testset: + args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,1,1 -dm_plex_box_upper 2,1,1 \ + -dm_refine 1 -dm_plex_transform_type cohesive_extrude \ + -dm_plex_transform_active fault -dm_plex_cohesive_label_fault 15 \ + -dm_view ::ascii_info_detail -coarse_dm_view ::ascii_info_detail + + test: + suffix: hex_0 + test: + suffix: hex_1 + nsize: 2 + args: -petscpartitioner_type simple + + test: + suffix: hex_2 + nsize: 4 + args: -test_num 2 \ + -dm_refine 1 -dm_plex_transform_type cohesive_extrude \ + -dm_plex_transform_active fault \ + -dm_view ::ascii_info_detail -coarse_dm_view ::ascii_info_detail \ + -orientation_view -orientation_view_synchronized + + test: + suffix: hex_3 + args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,1,2 -dm_plex_box_upper 2.,1.,2. \ + -dm_plex_cohesive_label_fault0 37,40 \ + -dm_plex_cohesive_label_fault1 26 \ + -f0_dm_refine 1 -f0_dm_plex_transform_type cohesive_extrude \ + -f0_dm_plex_transform_active fault0 -f0_coarse_dm_view ::ascii_info_detail \ + -f1_dm_refine 1 -f1_dm_plex_transform_type cohesive_extrude \ + -f1_dm_plex_transform_active fault1 -f1_coarse_dm_view ::ascii_info_detail \ + -dm_view ::ascii_info_detail + + test: + suffix: hex_4 + args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 4,1,2 -dm_plex_box_upper 4.,1.,2. \ + -dm_plex_cohesive_label_fault0 65,68 \ + -dm_plex_cohesive_label_fault1 46 \ + -f0_dm_refine 1 -f0_dm_plex_transform_type cohesive_extrude \ + -f0_dm_plex_transform_active fault0 -f0_coarse_dm_view ::ascii_info_detail \ + -f1_dm_refine 1 -f1_dm_plex_transform_type cohesive_extrude \ + -f1_dm_plex_transform_active fault1 -f1_coarse_dm_view ::ascii_info_detail \ + -dm_view ::ascii_info_detail + +TEST*/ diff --git a/src/dm/impls/plex/tests/ex72.c b/src/dm/impls/plex/tests/ex72.c new file mode 100644 index 00000000000..697fa6c769c --- /dev/null +++ b/src/dm/impls/plex/tests/ex72.c @@ -0,0 +1,95 @@ +static char help[] = "Tests for geometry models\n\n"; + +#include +#include + +typedef struct { + PetscReal exactVol; // The exact volume of the shape + PetscReal volTol; // The relative tolerance for checking the volume +} AppCtx; + +static void identity(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + f0[0] = 1.0; +} + +static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm) +{ + PetscFunctionBegin; + PetscCall(DMCreate(comm, dm)); + PetscCall(DMSetType(*dm, DMPLEX)); + PetscCall(DMSetFromOptions(*dm)); + PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view")); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) +{ + options->exactVol = 4. * PETSC_PI / 3.; + options->volTol = PETSC_SMALL; + + PetscFunctionBeginUser; + PetscOptionsBegin(comm, "", "Geometry Model Test Options", "DMPLEX"); + PetscCall(PetscOptionsReal("-exact_vol", "Exact volume of the shape", __FILE__, options->exactVol, &options->exactVol, NULL)); + PetscCall(PetscOptionsReal("-vol_tol", "Relative tolerance for checking the volume", __FILE__, options->volTol, &options->volTol, NULL)); + PetscOptionsEnd(); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CreateDiscretization(DM dm) +{ + PetscFE fe; + PetscDS ds; + DMPolytopeType ct; + PetscInt dim, cStart; + + PetscFunctionBeginUser; + PetscCall(DMGetDimension(dm, &dim)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, NULL)); + PetscCall(DMPlexGetCellType(dm, cStart, &ct)); + PetscCall(PetscFECreateByCell(PETSC_COMM_SELF, dim, 1, ct, NULL, -1, &fe)); + PetscCall(DMSetField(dm, 0, NULL, (PetscObject)fe)); + PetscCall(DMCreateDS(dm)); + PetscCall(DMGetDS(dm, &ds)); + PetscCall(PetscDSSetObjective(ds, 0, identity)); + PetscCall(PetscFEDestroy(&fe)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +int main(int argc, char **argv) +{ + DM dm; + Vec u; + PetscScalar volume; + AppCtx user; + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &argv, NULL, help)); + PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); + PetscCall(CreateMesh(PETSC_COMM_WORLD, &dm)); + PetscCall(CreateDiscretization(dm)); + PetscCall(DMGetGlobalVector(dm, &u)); + PetscCall(VecSet(u, 0.)); + PetscCall(DMPlexComputeIntegralFEM(dm, u, &volume, NULL)); + PetscCall(DMRestoreGlobalVector(dm, &u)); + PetscCheck(PetscAbsScalar((volume - user.exactVol) / user.exactVol) < user.volTol, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Invalid volume %g != %g", (double)PetscRealPart(volume), (double)user.exactVol); + PetscCall(DMDestroy(&dm)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + # -dm_refine 6 -vol_tol 6e-4 works + test: + suffix: ball_0 + requires: ctetgen + args: -dm_plex_dim 3 -dm_plex_shape ball -dm_refine 3 -dm_geom_model ball -vol_tol 5e-2 + + # -dm_refine 4 -vol_tol 2e-3 works + test: + suffix: cylinder_0 + args: -dm_plex_dim 3 -dm_plex_shape cylinder -dm_plex_cylinder_num_refine 1 \ + -dm_refine 1 -dm_geom_model cylinder -exact_vol 3.141592653589 -vol_tol 1e-1 + +TEST*/ diff --git a/src/dm/impls/plex/tests/ex73.c b/src/dm/impls/plex/tests/ex73.c new file mode 100644 index 00000000000..c7d5f806233 --- /dev/null +++ b/src/dm/impls/plex/tests/ex73.c @@ -0,0 +1,334 @@ +static char help[] = "Tests for Gauss' Law\n\n"; + +/* We want to check the weak version of Gauss' Law, namely that + + \int_\Omega v div q - \int_\Gamma v (q \cdot n) = 0 + +*/ + +#include +#include +#include + +typedef struct { + PetscInt degree; // The degree of the discretization + PetscBool divFree; // True if the solution is divergence-free +} AppCtx; + +static PetscErrorCode zero(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx) +{ + u[0] = 0.0; + return PETSC_SUCCESS; +} + +// div = 0 +static void solenoidal_2d(PetscInt n, const PetscReal x[], PetscScalar u[]) +{ + u[0] = PetscPowRealInt(x[0], n) * PetscPowRealInt(x[1], n - 1); + u[1] = -PetscPowRealInt(x[0], n - 1) * PetscPowRealInt(x[1], n); +} +// div = 0 +static void solenoidal_3d(PetscInt n, const PetscReal x[], PetscScalar u[]) +{ + u[0] = PetscPowRealInt(x[0], n) * PetscPowRealInt(x[1], n - 1) * PetscPowRealInt(x[2], n - 1); + u[1] = -2. * PetscPowRealInt(x[0], n - 1) * PetscPowRealInt(x[1], n) * PetscPowRealInt(x[2], n - 1); + u[2] = PetscPowRealInt(x[0], n - 1) * PetscPowRealInt(x[1], n - 1) * PetscPowRealInt(x[2], n); +} + +static PetscErrorCode solenoidal_totaldeg_2d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx) +{ + const PetscInt deg = *(PetscInt *)ctx; + const PetscInt n = deg / 2 + deg % 2; + + solenoidal_2d(n, x, u); + return PETSC_SUCCESS; +} + +static PetscErrorCode solenoidal_totaldeg_3d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx) +{ + const PetscInt deg = *(PetscInt *)ctx; + const PetscInt n = deg / 3 + (deg % 3 ? 1 : 0); + + solenoidal_3d(n, x, u); + return PETSC_SUCCESS; +} + +// This is in P_n^{-} +static PetscErrorCode source_totaldeg(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx) +{ + const PetscInt n = *(PetscInt *)ctx; + + for (PetscInt d = 0; d < dim; ++d) u[d] = PetscPowRealInt(x[d], n + 1); + return PETSC_SUCCESS; +} + +static void identity(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + const PetscInt deg = (PetscInt)PetscRealPart(constants[0]); + PetscScalar p[3]; + + if (dim == 2) PetscCallVoid(solenoidal_totaldeg_2d(dim, t, x, uOff[1] - uOff[0], p, (void *)°)); + else PetscCallVoid(solenoidal_totaldeg_3d(dim, t, x, uOff[1] - uOff[0], p, (void *)°)); + for (PetscInt c = 0; c < dim; ++c) f0[c] = -u[c] + p[c]; +} + +static void zero_bd(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], const PetscReal n[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + for (PetscInt d = 0; d < dim; ++d) f0[0] = 0.; +} + +static void flux(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], const PetscReal n[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + for (PetscInt d = 0; d < dim; ++d) f0[0] -= u[d] * n[d]; +} + +static void divergence(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + for (PetscInt d = 0; d < dim; ++d) f0[0] += u_x[d * dim + d]; +} + +static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm) +{ + PetscFunctionBegin; + PetscCall(DMCreate(comm, dm)); + PetscCall(DMSetType(*dm, DMPLEX)); + PetscCall(DMSetFromOptions(*dm)); + PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view")); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) +{ + options->degree = -1; + + PetscFunctionBeginUser; + PetscOptionsBegin(comm, "", "Gauss' Law Test Options", "DMPLEX"); + PetscOptionsEnd(); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CreateDiscretization(DM dm, AppCtx *user) +{ + PetscFE feq, fep; + PetscSpace sp; + PetscQuadrature quad, fquad; + PetscDS ds; + DMLabel label; + DMPolytopeType ct; + PetscInt dim, cStart, minDeg, maxDeg; + PetscBool isTrimmed, isSum; + + PetscFunctionBeginUser; + PetscCall(DMGetDimension(dm, &dim)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, NULL)); + PetscCall(DMPlexGetCellType(dm, cStart, &ct)); + PetscCall(PetscFECreateByCell(PETSC_COMM_SELF, dim, dim, ct, "field_", -1, &feq)); + PetscCall(DMSetField(dm, 0, NULL, (PetscObject)feq)); + PetscCall(PetscFEGetQuadrature(feq, &quad)); + PetscCall(PetscFEGetFaceQuadrature(feq, &fquad)); + PetscCall(PetscFEGetBasisSpace(feq, &sp)); + PetscCall(PetscSpaceGetDegree(sp, &minDeg, &maxDeg)); + PetscCall(PetscObjectTypeCompare((PetscObject)sp, PETSCSPACEPTRIMMED, &isTrimmed)); + PetscCall(PetscObjectTypeCompare((PetscObject)sp, PETSCSPACESUM, &isSum)); + if (isSum) { + PetscSpace subsp, xsp, ysp; + PetscInt xdeg, ydeg; + PetscBool isTensor; + + PetscCall(PetscSpaceSumGetSubspace(sp, 0, &subsp)); + PetscCall(PetscObjectTypeCompare((PetscObject)subsp, PETSCSPACETENSOR, &isTensor)); + if (isTensor) { + PetscCall(PetscSpaceTensorGetSubspace(subsp, 0, &xsp)); + PetscCall(PetscSpaceTensorGetSubspace(subsp, 1, &ysp)); + PetscCall(PetscSpaceGetDegree(xsp, &xdeg, NULL)); + PetscCall(PetscSpaceGetDegree(ysp, &ydeg, NULL)); + isTrimmed = xdeg != ydeg ? PETSC_TRUE : PETSC_FALSE; + } + } + user->degree = minDeg; + if (isTrimmed) user->divFree = PETSC_FALSE; + else user->divFree = PETSC_TRUE; + PetscCheck(!user->divFree || user->degree, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Degree 0 solution not available"); + PetscCall(PetscFEDestroy(&feq)); + PetscCall(PetscFECreateByCell(PETSC_COMM_SELF, dim, 1, ct, "pot_", -1, &fep)); + PetscCall(DMSetField(dm, 1, NULL, (PetscObject)fep)); + PetscCall(PetscFESetQuadrature(fep, quad)); + PetscCall(PetscFESetFaceQuadrature(fep, fquad)); + PetscCall(PetscFEDestroy(&fep)); + PetscCall(DMCreateDS(dm)); + + PetscCall(DMGetDS(dm, &ds)); + PetscCall(PetscDSSetResidual(ds, 0, identity, NULL)); + PetscCall(PetscDSSetResidual(ds, 1, divergence, NULL)); + if (user->divFree) { + if (dim == 2) PetscCall(PetscDSSetExactSolution(ds, 0, solenoidal_totaldeg_2d, &user->degree)); + else PetscCall(PetscDSSetExactSolution(ds, 0, solenoidal_totaldeg_3d, &user->degree)); + } else { + PetscCall(PetscDSSetExactSolution(ds, 0, source_totaldeg, &user->degree)); + } + PetscCall(PetscDSSetExactSolution(ds, 1, zero, &user->degree)); + PetscCall(DMGetLabel(dm, "marker", &label)); + + // TODO Can we also test the boundary residual integration? + //PetscWeakForm wf; + //PetscInt bd, id = 1; + //PetscCall(DMAddBoundary(dm, DM_BC_NATURAL, "boundary", label, 1, &id, 1, 0, NULL, NULL, NULL, user, &bd)); + //PetscCall(PetscDSGetBoundary(ds, bd, &wf, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)); + //PetscCall(PetscWeakFormSetIndexBdResidual(wf, label, id, 1, 0, 0, flux, 0, NULL)); + + { + PetscScalar constants[1]; + + constants[0] = user->degree; + PetscCall(PetscDSSetConstants(ds, 1, constants)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +int main(int argc, char **argv) +{ + DM dm; + SNES snes; + Vec u; + PetscReal error[2], residual; + PetscScalar source[2], outflow[2]; + AppCtx user; + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &argv, NULL, help)); + PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); + PetscCall(CreateMesh(PETSC_COMM_WORLD, &dm)); + PetscCall(CreateDiscretization(dm, &user)); + PetscCall(DMGetGlobalVector(dm, &u)); + PetscCall(PetscObjectSetName((PetscObject)u, "solution")); + PetscCall(DMComputeExactSolution(dm, 0., u, NULL)); + + PetscCall(SNESCreate(PetscObjectComm((PetscObject)dm), &snes)); + PetscCall(SNESSetDM(snes, dm)); + PetscCall(DMPlexSetSNESLocalFEM(dm, PETSC_FALSE, &user)); + PetscCall(SNESSetFromOptions(snes)); + PetscCall(DMSNESCheckDiscretization(snes, dm, 0., u, PETSC_DETERMINE, error)); + PetscCheck(PetscAbsReal(error[0]) < PETSC_SMALL, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Exact solution does not fit into FEM space: %g should be zero", (double)error[0]); + if (user.divFree) { + PetscCall(DMSNESCheckResidual(snes, dm, u, PETSC_DETERMINE, &residual)); + PetscCheck(PetscAbsReal(residual) < PETSC_SMALL, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Exact solution is not divergence-free: %g should be zero", (double)residual); + } else { + PetscDS ds; + + PetscCall(DMGetDS(dm, &ds)); + PetscCall(PetscDSSetObjective(ds, 1, divergence)); + PetscCall(DMPlexComputeIntegralFEM(dm, u, source, &user)); + } + PetscCall(SNESDestroy(&snes)); + + PetscBdPointFunc funcs[] = {zero_bd, flux}; + DMLabel label; + PetscInt id = 1; + + PetscCall(DMGetLabel(dm, "marker", &label)); + PetscCall(DMPlexComputeBdIntegral(dm, u, label, 1, &id, funcs, outflow, &user)); + if (user.divFree) PetscCheck(PetscAbsScalar(outflow[1]) < PETSC_SMALL, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Outflow %g should be zero for a divergence-free field", (double)PetscRealPart(outflow[1])); + else PetscCheck(PetscAbsScalar(source[1] + outflow[1]) < PETSC_SMALL, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Outflow %g should oppose source %g", (double)PetscRealPart(outflow[1]), (double)PetscRealPart(source[1])); + + PetscCall(DMRestoreGlobalVector(dm, &u)); + PetscCall(DMDestroy(&dm)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + testset: + suffix: p + requires: triangle ctetgen + args: -dm_plex_dim {{2 3}} -dm_plex_box_faces 2,2,2 + + test: + suffix: 1 + args: -field_petscspace_degree 1 -pot_petscspace_degree 1 + test: + suffix: 2 + args: -field_petscspace_degree 2 -pot_petscspace_degree 2 + test: + suffix: 3 + args: -field_petscspace_degree 3 -pot_petscspace_degree 3 + test: + suffix: 4 + args: -field_petscspace_degree 4 -pot_petscspace_degree 4 + + testset: + suffix: q + args: -dm_plex_dim {{2 3}} -dm_plex_simplex 0 -dm_plex_box_faces 2,2 + + test: + suffix: 1 + args: -field_petscspace_degree 1 -pot_petscspace_degree 1 + test: + suffix: 2 + args: -field_petscspace_degree 2 -pot_petscspace_degree 2 + test: + suffix: 3 + args: -field_petscspace_degree 3 -pot_petscspace_degree 3 + test: + suffix: 4 + args: -field_petscspace_degree 4 -pot_petscspace_degree 4 + + testset: + suffix: bdm + requires: triangle ctetgen + args: -dm_plex_dim 2 -dm_plex_box_faces 2,2 + + test: + suffix: 1 + args: -pot_petscspace_degree 0 -pot_petscdualspace_lagrange_continuity 0 \ + -field_petscspace_degree 1 -field_petscdualspace_type bdm \ + -field_petscfe_default_quadrature_order 2 + + testset: + suffix: rt + requires: triangle ctetgen + args: -dm_plex_dim 2 -dm_plex_box_faces 2,2 + + test: + suffix: 1 + args: -pot_petscspace_degree 0 -pot_petscdualspace_lagrange_continuity 0 \ + -field_petscspace_type ptrimmed \ + -field_petscspace_components 2 \ + -field_petscspace_ptrimmed_form_degree -1 \ + -field_petscdualspace_order 1 \ + -field_petscdualspace_form_degree -1 \ + -field_petscdualspace_lagrange_trimmed true \ + -field_petscfe_default_quadrature_order 2 + + testset: + suffix: rtq + requires: triangle ctetgen + args: -dm_plex_dim 2 -dm_plex_simplex 0 -dm_plex_box_faces 2,2 + + test: + suffix: 1 + args: -pot_petscspace_degree 0 -pot_petscdualspace_lagrange_continuity 0 \ + -field_petscspace_degree 1 \ + -field_petscspace_type sum \ + -field_petscspace_variables 2 \ + -field_petscspace_components 2 \ + -field_petscspace_sum_spaces 2 \ + -field_petscspace_sum_concatenate true \ + -field_sumcomp_0_petscspace_variables 2 \ + -field_sumcomp_0_petscspace_type tensor \ + -field_sumcomp_0_petscspace_tensor_spaces 2 \ + -field_sumcomp_0_petscspace_tensor_uniform false \ + -field_sumcomp_0_tensorcomp_0_petscspace_degree 1 \ + -field_sumcomp_0_tensorcomp_1_petscspace_degree 0 \ + -field_sumcomp_1_petscspace_variables 2 \ + -field_sumcomp_1_petscspace_type tensor \ + -field_sumcomp_1_petscspace_tensor_spaces 2 \ + -field_sumcomp_1_petscspace_tensor_uniform false \ + -field_sumcomp_1_tensorcomp_0_petscspace_degree 0 \ + -field_sumcomp_1_tensorcomp_1_petscspace_degree 1 \ + -field_petscdualspace_order 1 \ + -field_petscdualspace_form_degree -1 \ + -field_petscdualspace_lagrange_trimmed true \ + -field_petscfe_default_quadrature_order 2 + +TEST*/ diff --git a/src/dm/impls/plex/tests/ex9.c b/src/dm/impls/plex/tests/ex9.c index b606d30d674..1db440afa26 100644 --- a/src/dm/impls/plex/tests/ex9.c +++ b/src/dm/impls/plex/tests/ex9.c @@ -433,7 +433,7 @@ int main(int argc, char **argv) test: suffix: correctness_2 requires: triangle - args: -interpolate -refinement_limit 1.0e-5 + args: -interpolate -dm_refine 5 test: suffix: 0 TODO: Only for performance testing diff --git a/src/dm/impls/plex/tests/output/ex12_lb_0.out b/src/dm/impls/plex/tests/output/ex12_lb_0.out index f82c636e799..0ef5cb8ad56 100644 --- a/src/dm/impls/plex/tests/output/ex12_lb_0.out +++ b/src/dm/impls/plex/tests/output/ex12_lb_0.out @@ -7,6 +7,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 8 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/impls/plex/tests/output/ex12_lb_1.out b/src/dm/impls/plex/tests/output/ex12_lb_1.out index f82c636e799..0ef5cb8ad56 100644 --- a/src/dm/impls/plex/tests/output/ex12_lb_1.out +++ b/src/dm/impls/plex/tests/output/ex12_lb_1.out @@ -7,6 +7,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 8 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-2.out b/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-2.out index c98b66c347b..5ca54c3bd00 100644 --- a/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-2.out +++ b/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-2.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 MatPartitioning Graph Partitioner: MatPartitioning Object: 2 MPI processes type: hierarch @@ -18,9 +19,9 @@ MatPartitioning Graph Partitioner: Load imbalance ratio=0.01 Number of fine parts: 2 Fine partitioner: ptscotch - MatPartitioning Object: (hierarch_fine_) 1 MPI process - type: ptscotch - Using vertex weights - Strategy=Default behavior - Load imbalance ratio=0.01 + MatPartitioning Object: (hierarch_fine_) 1 MPI process + type: ptscotch + Using vertex weights + Strategy=Default behavior + Load imbalance ratio=0.01 DMPlexCheckPointSFHeavy PASSED diff --git a/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-3.out b/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-3.out index 6c24d9a75ba..60d4f7de4b3 100644 --- a/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-3.out +++ b/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-3.out @@ -5,6 +5,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 MatPartitioning Graph Partitioner: MatPartitioning Object: 3 MPI processes type: hierarch @@ -18,9 +19,9 @@ MatPartitioning Graph Partitioner: Load imbalance ratio=0.01 Number of fine parts: 2 Fine partitioner: ptscotch - MatPartitioning Object: (hierarch_fine_) 1 MPI process - type: ptscotch - Using vertex weights - Strategy=Default behavior - Load imbalance ratio=0.01 + MatPartitioning Object: (hierarch_fine_) 1 MPI process + type: ptscotch + Using vertex weights + Strategy=Default behavior + Load imbalance ratio=0.01 DMPlexCheckPointSFHeavy PASSED diff --git a/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-4.out b/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-4.out index 381cf35a86c..349dca6b543 100644 --- a/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-4.out +++ b/src/dm/impls/plex/tests/output/ex18_7_hdf5_hierarch_nsize-4.out @@ -5,6 +5,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 MatPartitioning Graph Partitioner: MatPartitioning Object: 4 MPI processes type: hierarch @@ -18,9 +19,9 @@ MatPartitioning Graph Partitioner: Load imbalance ratio=0.01 Number of fine parts: 2 Fine partitioner: ptscotch - MatPartitioning Object: (hierarch_fine_) 1 MPI process - type: ptscotch - Using vertex weights - Strategy=Default behavior - Load imbalance ratio=0.01 + MatPartitioning Object: (hierarch_fine_) 1 MPI process + type: ptscotch + Using vertex weights + Strategy=Default behavior + Load imbalance ratio=0.01 DMPlexCheckPointSFHeavy PASSED diff --git a/src/dm/impls/plex/tests/output/ex1_cylinder.out b/src/dm/impls/plex/tests/output/ex1_cylinder.out index 231bf7560fd..5d49c0232fe 100644 --- a/src/dm/impls/plex/tests/output/ex1_cylinder.out +++ b/src/dm/impls/plex/tests/output/ex1_cylinder.out @@ -8,3 +8,5 @@ Generated Mesh in 3 dimensions: Labels: celltype: 4 strata with value/size (0 (75), 1 (182), 4 (148), 7 (40)) depth: 4 strata with value/size (0 (75), 1 (182), 2 (148), 3 (40)) + marker: 1 strata with value/size (1 (226)) + generatrix: 1 strata with value/size (1 (80)) diff --git a/src/dm/impls/plex/tests/output/ex1_cylinder_per.out b/src/dm/impls/plex/tests/output/ex1_cylinder_per.out index eef2fb088b6..49af2ad94a4 100644 --- a/src/dm/impls/plex/tests/output/ex1_cylinder_per.out +++ b/src/dm/impls/plex/tests/output/ex1_cylinder_per.out @@ -9,3 +9,5 @@ Periodic mesh (NONE, NONE, PERIODIC) coordinates localized Labels: celltype: 4 strata with value/size (0 (150), 1 (414), 4 (384), 7 (120)) depth: 4 strata with value/size (0 (150), 1 (414), 2 (384), 3 (120)) + marker: 1 strata with value/size (1 (192)) + generatrix: 1 strata with value/size (1 (192)) diff --git a/src/dm/impls/plex/tests/output/ex1_p4est_periodic_3d.out b/src/dm/impls/plex/tests/output/ex1_p4est_periodic_3d.out index 00d120cbbe5..b1f881d02c8 100644 --- a/src/dm/impls/plex/tests/output/ex1_p4est_periodic_3d.out +++ b/src/dm/impls/plex/tests/output/ex1_p4est_periodic_3d.out @@ -1,13 +1,13 @@ DM Object: Generated Mesh 1 MPI process type: plex Generated Mesh in 3 dimensions: - Number of 0-cells per rank: 10825 - Number of 1-cells per rank: 31957 - Number of 2-cells per rank: 26165 - Number of 3-cells per rank: 6941 + Number of 0-cells per rank: 874 + Number of 1-cells per rank: 2561 + Number of 2-cells per rank: 1990 + Number of 3-cells per rank: 487 Periodic mesh coordinates localized Labels: - depth: 4 strata with value/size (0 (10825), 1 (31957), 2 (26165), 3 (6941)) - celltype: 4 strata with value/size (7 (6941), 4 (26165), 1 (31957), 0 (10825)) - marker: 1 strata with value/size (1 (3160)) - Face Sets: 2 strata with value/size (1 (1353), 2 (1168)) + depth: 4 strata with value/size (0 (874), 1 (2561), 2 (1990), 3 (487)) + celltype: 4 strata with value/size (7 (487), 4 (1990), 1 (2561), 0 (874)) + marker: 1 strata with value/size (1 (824)) + Face Sets: 2 strata with value/size (1 (315), 2 (243)) diff --git a/src/dm/impls/plex/tests/output/ex1_part_parmetis_0.out b/src/dm/impls/plex/tests/output/ex1_part_parmetis_0.out index b335cc4682c..fca09906181 100644 --- a/src/dm/impls/plex/tests/output/ex1_part_parmetis_0.out +++ b/src/dm/impls/plex/tests/output/ex1_part_parmetis_0.out @@ -9,6 +9,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 2 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -16,7 +17,7 @@ Graph Partitioner: 2 MPI Processes DM Object: box 2 MPI processes type: plex Cell balance: 1.00 (max 8, min 8, empty 0) - Edge Cut: 4 (on node 1.000) + Edge Cut: 4 (on node 0.000) [0]Nv: 8 [0] 1 3 9 [0-3) [0] 0 2 [3-5) @@ -40,6 +41,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 4 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -48,7 +50,7 @@ Minimum number of neighbors: 1 DM Object: Parallel Mesh 2 MPI processes type: plex Cell balance: 1.00 (max 8, min 8, empty 0) - Edge Cut: 4 (on node 1.000) + Edge Cut: 4 (on node 0.000) DM Object: Generated Mesh 2 MPI processes type: plex Generated Mesh in 2 dimensions: diff --git a/src/dm/impls/plex/tests/output/ex1_part_ptscotch_0.out b/src/dm/impls/plex/tests/output/ex1_part_ptscotch_0.out index a75a45ede96..799c7d3a1af 100644 --- a/src/dm/impls/plex/tests/output/ex1_part_ptscotch_0.out +++ b/src/dm/impls/plex/tests/output/ex1_part_ptscotch_0.out @@ -9,6 +9,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy QUALITY using load imbalance ratio 0.01 DM Object: box 2 MPI processes @@ -26,6 +27,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy QUALITY using load imbalance ratio 0.01 Minimum number of neighbors: 1 diff --git a/src/dm/impls/plex/tests/output/ex1_part_ptscotch_1.out b/src/dm/impls/plex/tests/output/ex1_part_ptscotch_1.out index 56000e1f475..6b0e99482b6 100644 --- a/src/dm/impls/plex/tests/output/ex1_part_ptscotch_1.out +++ b/src/dm/impls/plex/tests/output/ex1_part_ptscotch_1.out @@ -3,5 +3,6 @@ Graph Partitioner: 8 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.1 diff --git a/src/dm/impls/plex/tests/output/ex24_3.out b/src/dm/impls/plex/tests/output/ex24_3.out index 2978ea85d2a..fbee8d95e0d 100644 --- a/src/dm/impls/plex/tests/output/ex24_3.out +++ b/src/dm/impls/plex/tests/output/ex24_3.out @@ -3,6 +3,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 Graph Partitioner: 4 MPI Processes @@ -10,6 +11,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 MatPartitioning Graph Partitioner: MatPartitioning Object: (p2_) 4 MPI processes type: ptscotch @@ -21,6 +23,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 Graph Partitioner: 4 MPI Processes @@ -28,6 +31,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 MatPartitioning Graph Partitioner: MatPartitioning Object: (p2_) 4 MPI processes type: ptscotch @@ -39,6 +43,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 Graph Partitioner: 4 MPI Processes @@ -46,6 +51,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 MatPartitioning Graph Partitioner: MatPartitioning Object: (dp2_) 4 MPI processes type: ptscotch @@ -57,6 +63,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 Graph Partitioner: 4 MPI Processes @@ -64,6 +71,7 @@ Graph Partitioner: 4 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 MatPartitioning Graph Partitioner: MatPartitioning Object: (dp2_) 4 MPI processes type: ptscotch diff --git a/src/dm/impls/plex/tests/output/ex3_rt0_2d_quad.out b/src/dm/impls/plex/tests/output/ex3_rt0_2d_quad.out new file mode 100644 index 00000000000..60a51bbaec1 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex3_rt0_2d_quad.out @@ -0,0 +1,2 @@ +Function tests pass for order 0 at tolerance 1e-10 +Function tests pass for order 0 derivatives at tolerance 1e-10 diff --git a/src/dm/impls/plex/tests/output/ex3_rt0_2d_tri.out b/src/dm/impls/plex/tests/output/ex3_rt0_2d_tri.out new file mode 100644 index 00000000000..60a51bbaec1 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex3_rt0_2d_tri.out @@ -0,0 +1,2 @@ +Function tests pass for order 0 at tolerance 1e-10 +Function tests pass for order 0 derivatives at tolerance 1e-10 diff --git a/src/dm/impls/plex/tests/output/ex49_2d_sfc_biperiodic_stranded.out b/src/dm/impls/plex/tests/output/ex49_2d_sfc_biperiodic_stranded.out new file mode 100644 index 00000000000..6223f568c65 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex49_2d_sfc_biperiodic_stranded.out @@ -0,0 +1,284 @@ +DM Object: zbox 2 MPI processes + type: plex +zbox in 2 dimensions: +Supports: +[0] Max support size: 4 +[0]: 3 ----> 11 +[0]: 3 ----> 14 +[0]: 4 ----> 13 +[0]: 4 ----> 14 +[0]: 4 ----> 17 +[0]: 5 ----> 11 +[0]: 5 ----> 12 +[0]: 5 ----> 18 +[0]: 6 ----> 12 +[0]: 6 ----> 13 +[0]: 6 ----> 15 +[0]: 6 ----> 20 +[0]: 7 ----> 16 +[0]: 7 ----> 17 +[0]: 8 ----> 15 +[0]: 8 ----> 16 +[0]: 9 ----> 18 +[0]: 9 ----> 19 +[0]: 10 ----> 19 +[0]: 10 ----> 20 +[0]: 11 ----> 0 +[0]: 12 ----> 0 +[0]: 12 ----> 2 +[0]: 13 ----> 0 +[0]: 13 ----> 1 +[0]: 14 ----> 0 +[0]: 15 ----> 1 +[0]: 16 ----> 1 +[0]: 17 ----> 1 +[0]: 18 ----> 2 +[0]: 19 ----> 2 +[0]: 20 ----> 2 +[1] Max support size: 4 +[1]: 3 ----> 12 +[1]: 3 ----> 15 +[1]: 5 ----> 14 +[1]: 5 ----> 15 +[1]: 6 ----> 16 +[1]: 6 ----> 19 +[1]: 7 ----> 12 +[1]: 7 ----> 13 +[1]: 7 ----> 18 +[1]: 7 ----> 19 +[1]: 8 ----> 16 +[1]: 8 ----> 17 +[1]: 9 ----> 17 +[1]: 9 ----> 18 +[1]: 9 ----> 20 +[1]: 10 ----> 13 +[1]: 10 ----> 14 +[1]: 10 ----> 21 +[1]: 11 ----> 20 +[1]: 11 ----> 21 +[1]: 12 ----> 0 +[1]: 13 ----> 0 +[1]: 13 ----> 2 +[1]: 14 ----> 0 +[1]: 15 ----> 0 +[1]: 16 ----> 1 +[1]: 17 ----> 1 +[1]: 18 ----> 1 +[1]: 18 ----> 2 +[1]: 19 ----> 1 +[1]: 20 ----> 2 +[1]: 21 ----> 2 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 11 (0) +[0]: 0 <---- 12 (0) +[0]: 0 <---- 13 (0) +[0]: 0 <---- 14 (-1) +[0]: 1 <---- 13 (-1) +[0]: 1 <---- 15 (-1) +[0]: 1 <---- 16 (-1) +[0]: 1 <---- 17 (-1) +[0]: 2 <---- 18 (0) +[0]: 2 <---- 19 (-1) +[0]: 2 <---- 20 (-1) +[0]: 2 <---- 12 (-1) +[0]: 11 <---- 3 (0) +[0]: 11 <---- 5 (0) +[0]: 12 <---- 5 (0) +[0]: 12 <---- 6 (0) +[0]: 13 <---- 6 (0) +[0]: 13 <---- 4 (0) +[0]: 14 <---- 3 (0) +[0]: 14 <---- 4 (0) +[0]: 15 <---- 8 (0) +[0]: 15 <---- 6 (0) +[0]: 16 <---- 7 (0) +[0]: 16 <---- 8 (0) +[0]: 17 <---- 4 (0) +[0]: 17 <---- 7 (0) +[0]: 18 <---- 5 (0) +[0]: 18 <---- 9 (0) +[0]: 19 <---- 10 (0) +[0]: 19 <---- 9 (0) +[0]: 20 <---- 6 (0) +[0]: 20 <---- 10 (0) +[1] Max cone size: 4 +[1]: 0 <---- 12 (0) +[1]: 0 <---- 13 (0) +[1]: 0 <---- 14 (-1) +[1]: 0 <---- 15 (0) +[1]: 1 <---- 16 (0) +[1]: 1 <---- 17 (0) +[1]: 1 <---- 18 (0) +[1]: 1 <---- 19 (0) +[1]: 2 <---- 18 (-1) +[1]: 2 <---- 20 (0) +[1]: 2 <---- 21 (-1) +[1]: 2 <---- 13 (-1) +[1]: 12 <---- 3 (0) +[1]: 12 <---- 7 (0) +[1]: 13 <---- 7 (0) +[1]: 13 <---- 10 (0) +[1]: 14 <---- 5 (0) +[1]: 14 <---- 10 (0) +[1]: 15 <---- 5 (0) +[1]: 15 <---- 3 (0) +[1]: 16 <---- 6 (0) +[1]: 16 <---- 8 (0) +[1]: 17 <---- 8 (0) +[1]: 17 <---- 9 (0) +[1]: 18 <---- 9 (0) +[1]: 18 <---- 7 (0) +[1]: 19 <---- 7 (0) +[1]: 19 <---- 6 (0) +[1]: 20 <---- 9 (0) +[1]: 20 <---- 11 (0) +[1]: 21 <---- 10 (0) +[1]: 21 <---- 11 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 3) dim 2 offset 0 0. 0. + ( 4) dim 2 offset 2 0. 0.5 + ( 5) dim 2 offset 4 0.333333 0. + ( 6) dim 2 offset 6 0.333333 0.5 + ( 7) dim 2 offset 8 0. 1. + ( 8) dim 2 offset 10 0.333333 1. + ( 9) dim 2 offset 12 0.666667 0. + ( 10) dim 2 offset 14 0.666667 0.5 +Process 1: + ( 3) dim 2 offset 0 0.333333 0.5 + ( 4) dim 2 offset 2 0. 1. + ( 5) dim 2 offset 4 0.333333 1. + ( 6) dim 2 offset 6 0.666667 0. + ( 7) dim 2 offset 8 0.666667 0.5 + ( 8) dim 2 offset 10 1. 0. + ( 9) dim 2 offset 12 1. 0.5 + ( 10) dim 2 offset 14 0.666667 1. + ( 11) dim 2 offset 16 1. 1. +Labels: +Label 'celltype': +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 9 (0) +[1]: 10 (0) +[1]: 11 (0) +[1]: 0 (4) +[1]: 1 (4) +[1]: 2 (4) +[1]: 12 (1) +[1]: 13 (1) +[1]: 14 (1) +[1]: 15 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 21 (1) +Label 'Face Sets': +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=21, leaves=8, remote ranks=1 + [0] 6 <- (1,3) + [0] 7 <- (1,4) + [0] 8 <- (1,5) + [0] 9 <- (1,6) + [0] 10 <- (1,7) + [0] 15 <- (1,15) + [0] 19 <- (1,19) + [0] 20 <- (1,12) + [1] Number of roots=22, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 8 edges + [0] 6 <- 3 + [0] 7 <- 4 + [0] 8 <- 5 + [0] 9 <- 6 + [0] 10 <- 7 + [0] 15 <- 15 + [0] 19 <- 19 + [0] 20 <- 12 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order +PetscSF Object: Z-order Isoperiodic Faces #0 2 MPI processes + type: basic + [0] Number of roots=21, leaves=0, remote ranks=0 + [1] Number of roots=22, leaves=2, remote ranks=1 + [1] 17 <- (0,14) + [1] 20 <- (0,17) + [0] Roots referenced by my leaves, by rank + [1] Roots referenced by my leaves, by rank + [1] 0: 2 edges + [1] 17 <- 14 + [1] 20 <- 17 + MultiSF sort=rank-order +PetscSF Object: Z-order Isoperiodic Faces #1 2 MPI processes + type: basic + [0] Number of roots=21, leaves=1, remote ranks=1 + [0] 16 <- (0,11) + [1] Number of roots=22, leaves=2, remote ranks=2 + [1] 14 <- (0,18) + [1] 21 <- (1,16) + [0] Roots referenced by my leaves, by rank + [0] 0: 1 edges + [0] 16 <- 11 + [1] Roots referenced by my leaves, by rank + [1] 0: 1 edges + [1] 14 <- 18 + [1] 1: 1 edges + [1] 21 <- 16 + MultiSF sort=rank-order +## cells: 'default' {0} +Coordinates by element in tensor order +[0] 0 <-- 0 ( 0.00, 0.00) +[0] 1 <-- 4 ( 0.33, 0.00) +[0] 2 <-- 2 ( 0.00, 0.50) +[0] 3 <-- 6 ( 0.33, 0.50) +[1] 0 <-- 2 ( 0.00, 0.50) +[1] 1 <-- 6 ( 0.33, 0.50) +[1] 2 <-- 8 ( 0.00, 1.00) +[1] 3 <-- 10 ( 0.33, 1.00) +[2] 0 <-- 4 ( 0.33, 0.00) +[2] 1 <-- 12 ( 0.67, 0.00) +[2] 2 <-- 6 ( 0.33, 0.50) +[2] 3 <-- 14 ( 0.67, 0.50) +[0] 0 <-- 0 ( 0.33, 0.50) +[0] 1 <-- 8 ( 0.67, 0.50) +[0] 2 <-- 4 ( 0.33, 1.00) +[0] 3 <-- 14 ( 0.67, 1.00) +[1] 0 <-- 6 ( 0.67, 0.00) +[1] 1 <-- 10 ( 1.00, 0.00) +[1] 2 <-- 8 ( 0.67, 0.50) +[1] 3 <-- 12 ( 1.00, 0.50) +[2] 0 <-- 8 ( 0.67, 0.50) +[2] 1 <-- 12 ( 1.00, 0.50) +[2] 2 <-- 14 ( 0.67, 1.00) +[2] 3 <-- 16 ( 1.00, 1.00) +## faces: 'Face Sets' {1} +Coordinates by element in tensor order diff --git a/src/dm/impls/plex/tests/output/ex49_2d_sfc_biperiodic_stranded_dist.out b/src/dm/impls/plex/tests/output/ex49_2d_sfc_biperiodic_stranded_dist.out new file mode 100644 index 00000000000..5929b78f8c3 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex49_2d_sfc_biperiodic_stranded_dist.out @@ -0,0 +1,280 @@ +DM Object: zbox 2 MPI processes + type: plex +zbox in 2 dimensions: +Supports: +[0] Max support size: 4 +[0]: 3 ----> 11 +[0]: 3 ----> 14 +[0]: 4 ----> 13 +[0]: 4 ----> 14 +[0]: 4 ----> 16 +[0]: 5 ----> 11 +[0]: 5 ----> 12 +[0]: 5 ----> 17 +[0]: 6 ----> 12 +[0]: 6 ----> 13 +[0]: 6 ----> 18 +[0]: 6 ----> 19 +[0]: 7 ----> 15 +[0]: 7 ----> 16 +[0]: 8 ----> 15 +[0]: 8 ----> 19 +[0]: 9 ----> 17 +[0]: 9 ----> 20 +[0]: 10 ----> 18 +[0]: 10 ----> 20 +[0]: 11 ----> 0 +[0]: 12 ----> 0 +[0]: 12 ----> 2 +[0]: 13 ----> 0 +[0]: 13 ----> 1 +[0]: 14 ----> 0 +[0]: 15 ----> 1 +[0]: 16 ----> 1 +[0]: 17 ----> 2 +[0]: 18 ----> 2 +[0]: 19 ----> 1 +[0]: 20 ----> 2 +[1] Max support size: 4 +[1]: 3 ----> 11 +[1]: 3 ----> 14 +[1]: 4 ----> 13 +[1]: 4 ----> 14 +[1]: 5 ----> 15 +[1]: 5 ----> 18 +[1]: 6 ----> 11 +[1]: 6 ----> 12 +[1]: 6 ----> 17 +[1]: 6 ----> 18 +[1]: 7 ----> 15 +[1]: 7 ----> 16 +[1]: 8 ----> 16 +[1]: 8 ----> 17 +[1]: 8 ----> 19 +[1]: 9 ----> 12 +[1]: 9 ----> 13 +[1]: 9 ----> 20 +[1]: 10 ----> 19 +[1]: 10 ----> 20 +[1]: 11 ----> 0 +[1]: 12 ----> 0 +[1]: 12 ----> 2 +[1]: 13 ----> 0 +[1]: 14 ----> 0 +[1]: 15 ----> 1 +[1]: 16 ----> 1 +[1]: 17 ----> 1 +[1]: 17 ----> 2 +[1]: 18 ----> 1 +[1]: 19 ----> 2 +[1]: 20 ----> 2 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 11 (0) +[0]: 0 <---- 12 (0) +[0]: 0 <---- 13 (0) +[0]: 0 <---- 14 (-1) +[0]: 1 <---- 13 (-1) +[0]: 1 <---- 19 (-1) +[0]: 1 <---- 15 (-1) +[0]: 1 <---- 16 (-1) +[0]: 2 <---- 17 (0) +[0]: 2 <---- 20 (-1) +[0]: 2 <---- 18 (-1) +[0]: 2 <---- 12 (-1) +[0]: 11 <---- 3 (0) +[0]: 11 <---- 5 (0) +[0]: 12 <---- 5 (0) +[0]: 12 <---- 6 (0) +[0]: 13 <---- 6 (0) +[0]: 13 <---- 4 (0) +[0]: 14 <---- 3 (0) +[0]: 14 <---- 4 (0) +[0]: 15 <---- 7 (0) +[0]: 15 <---- 8 (0) +[0]: 16 <---- 4 (0) +[0]: 16 <---- 7 (0) +[0]: 17 <---- 5 (0) +[0]: 17 <---- 9 (0) +[0]: 18 <---- 6 (0) +[0]: 18 <---- 10 (0) +[0]: 19 <---- 8 (0) +[0]: 19 <---- 6 (0) +[0]: 20 <---- 10 (0) +[0]: 20 <---- 9 (0) +[1] Max cone size: 4 +[1]: 0 <---- 11 (0) +[1]: 0 <---- 12 (0) +[1]: 0 <---- 13 (-1) +[1]: 0 <---- 14 (0) +[1]: 1 <---- 15 (0) +[1]: 1 <---- 16 (0) +[1]: 1 <---- 17 (0) +[1]: 1 <---- 18 (0) +[1]: 2 <---- 17 (-1) +[1]: 2 <---- 19 (0) +[1]: 2 <---- 20 (-1) +[1]: 2 <---- 12 (-1) +[1]: 11 <---- 3 (0) +[1]: 11 <---- 6 (0) +[1]: 12 <---- 6 (0) +[1]: 12 <---- 9 (0) +[1]: 13 <---- 4 (0) +[1]: 13 <---- 9 (0) +[1]: 14 <---- 4 (0) +[1]: 14 <---- 3 (0) +[1]: 15 <---- 5 (0) +[1]: 15 <---- 7 (0) +[1]: 16 <---- 7 (0) +[1]: 16 <---- 8 (0) +[1]: 17 <---- 8 (0) +[1]: 17 <---- 6 (0) +[1]: 18 <---- 6 (0) +[1]: 18 <---- 5 (0) +[1]: 19 <---- 8 (0) +[1]: 19 <---- 10 (0) +[1]: 20 <---- 9 (0) +[1]: 20 <---- 10 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 3) dim 2 offset 0 0. 0. + ( 4) dim 2 offset 2 0. 0.5 + ( 5) dim 2 offset 4 0.333333 0. + ( 6) dim 2 offset 6 0.333333 0.5 + ( 7) dim 2 offset 8 0. 1. + ( 8) dim 2 offset 10 0.333333 1. + ( 9) dim 2 offset 12 0.666667 0. + ( 10) dim 2 offset 14 0.666667 0.5 +Process 1: + ( 3) dim 2 offset 0 0.333333 0.5 + ( 4) dim 2 offset 2 0.333333 1. + ( 5) dim 2 offset 4 0.666667 0. + ( 6) dim 2 offset 6 0.666667 0.5 + ( 7) dim 2 offset 8 1. 0. + ( 8) dim 2 offset 10 1. 0.5 + ( 9) dim 2 offset 12 0.666667 1. + ( 10) dim 2 offset 14 1. 1. +Labels: +Label 'celltype': +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 9 (0) +[1]: 10 (0) +[1]: 11 (1) +[1]: 12 (1) +[1]: 13 (1) +[1]: 14 (1) +[1]: 15 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 0 (4) +[1]: 1 (4) +[1]: 2 (4) +Label 'Face Sets': +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=21, leaves=7, remote ranks=1 + [0] 6 <- (1,3) + [0] 8 <- (1,4) + [0] 9 <- (1,5) + [0] 10 <- (1,6) + [0] 18 <- (1,11) + [0] 19 <- (1,14) + [0] 20 <- (1,18) + [1] Number of roots=21, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 7 edges + [0] 6 <- 3 + [0] 8 <- 4 + [0] 9 <- 5 + [0] 10 <- 6 + [0] 18 <- 11 + [0] 19 <- 14 + [0] 20 <- 18 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order +PetscSF Object: Migrated Isoperiodic Faces #0 2 MPI processes + type: basic + [0] Number of roots=21, leaves=0, remote ranks=0 + [1] Number of roots=21, leaves=2, remote ranks=1 + [1] 16 <- (0,14) + [1] 19 <- (0,16) + [0] Roots referenced by my leaves, by rank + [1] Roots referenced by my leaves, by rank + [1] 0: 2 edges + [1] 16 <- 14 + [1] 19 <- 16 + MultiSF sort=rank-order +PetscSF Object: Migrated Isoperiodic Faces #1 2 MPI processes + type: basic + [0] Number of roots=21, leaves=1, remote ranks=1 + [0] 15 <- (0,11) + [1] Number of roots=21, leaves=2, remote ranks=2 + [1] 13 <- (0,17) + [1] 20 <- (1,15) + [0] Roots referenced by my leaves, by rank + [0] 0: 1 edges + [0] 15 <- 11 + [1] Roots referenced by my leaves, by rank + [1] 0: 1 edges + [1] 13 <- 17 + [1] 1: 1 edges + [1] 20 <- 15 + MultiSF sort=rank-order +## cells: 'default' {0} +Coordinates by element in tensor order +[0] 0 <-- 0 ( 0.00, 0.00) +[0] 1 <-- 4 ( 0.33, 0.00) +[0] 2 <-- 2 ( 0.00, 0.50) +[0] 3 <-- 6 ( 0.33, 0.50) +[1] 0 <-- 2 ( 0.00, 0.50) +[1] 1 <-- 6 ( 0.33, 0.50) +[1] 2 <-- 8 ( 0.00, 1.00) +[1] 3 <-- 10 ( 0.33, 1.00) +[2] 0 <-- 4 ( 0.33, 0.00) +[2] 1 <-- 12 ( 0.67, 0.00) +[2] 2 <-- 6 ( 0.33, 0.50) +[2] 3 <-- 14 ( 0.67, 0.50) +[0] 0 <-- 0 ( 0.33, 0.50) +[0] 1 <-- 6 ( 0.67, 0.50) +[0] 2 <-- 2 ( 0.33, 1.00) +[0] 3 <-- 12 ( 0.67, 1.00) +[1] 0 <-- 4 ( 0.67, 0.00) +[1] 1 <-- 8 ( 1.00, 0.00) +[1] 2 <-- 6 ( 0.67, 0.50) +[1] 3 <-- 10 ( 1.00, 0.50) +[2] 0 <-- 6 ( 0.67, 0.50) +[2] 1 <-- 10 ( 1.00, 0.50) +[2] 2 <-- 12 ( 0.67, 1.00) +[2] 3 <-- 14 ( 1.00, 1.00) +## faces: 'Face Sets' {1} +Coordinates by element in tensor order diff --git a/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic.out b/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic.out index 5ad1f3380ed..7b03eea6fca 100644 --- a/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic.out +++ b/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic.out @@ -370,7 +370,7 @@ PetscSF Object: 2 MPI processes [0] 33 <- 32 [1] Roots referenced by my leaves, by rank MultiSF sort=rank-order -PetscSF Object: Z-order Isoperiodic Faces 2 MPI processes +PetscSF Object: Z-order Isoperiodic Faces #0 2 MPI processes type: basic [0] Number of roots=35, leaves=0, remote ranks=0 [1] Number of roots=35, leaves=3, remote ranks=1 diff --git a/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic_stranded.out b/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic_stranded.out index 02b4827ba22..6205f8a129a 100644 --- a/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic_stranded.out +++ b/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic_stranded.out @@ -230,7 +230,7 @@ PetscSF Object: 2 MPI processes [0] 20 <- 12 [1] Roots referenced by my leaves, by rank MultiSF sort=rank-order -PetscSF Object: Z-order Isoperiodic Faces 2 MPI processes +PetscSF Object: Z-order Isoperiodic Faces #0 2 MPI processes type: basic [0] Number of roots=21, leaves=1, remote ranks=1 [0] 16 <- (0,11) diff --git a/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic_stranded_dist.out b/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic_stranded_dist.out index 7040e8e6807..23855dbe69f 100644 --- a/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic_stranded_dist.out +++ b/src/dm/impls/plex/tests/output/ex49_2d_sfc_periodic_stranded_dist.out @@ -226,7 +226,7 @@ PetscSF Object: 2 MPI processes [0] 20 <- 18 [1] Roots referenced by my leaves, by rank MultiSF sort=rank-order -PetscSF Object: Migrated Isoperiodic Faces 2 MPI processes +PetscSF Object: Migrated Isoperiodic Faces #0 2 MPI processes type: basic [0] Number of roots=21, leaves=1, remote ranks=1 [0] 15 <- (0,11) diff --git a/src/dm/impls/plex/tests/output/ex57_sphere_extruded.out b/src/dm/impls/plex/tests/output/ex57_sphere_extruded.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex5_tri_0_perm.out b/src/dm/impls/plex/tests/output/ex5_tri_0_perm.out index e3c6dd7cd10..459db031bf1 100644 --- a/src/dm/impls/plex/tests/output/ex5_tri_0_perm.out +++ b/src/dm/impls/plex/tests/output/ex5_tri_0_perm.out @@ -24,46 +24,44 @@ PetscSection Object: 1 MPI process 2 fields field 0 "displacement" with 2 components Process 0: - ( 0) dim 0 offset 0 - ( 1) dim 0 offset 0 - ( 2) dim 0 offset 16 - ( 3) dim 2 offset 0 - ( 4) dim 2 offset 2 - ( 5) dim 2 offset 8 + ( 0) dim 0 offset 12 + ( 1) dim 0 offset 12 + ( 2) dim 0 offset 12 + ( 3) dim 2 offset 12 + ( 4) dim 2 offset 0 + ( 5) dim 2 offset 6 ( 6) dim 2 offset 14 - ( 7) dim 2 offset 4 - ( 8) dim 2 offset 10 + ( 7) dim 2 offset 2 + ( 8) dim 2 offset 8 ( 9) dim 0 offset 16 - ( 10) dim 0 offset 16 + ( 10) dim 0 offset 0 ( 11) dim 0 offset 16 ( 12) dim 0 offset 16 ( 13) dim 0 offset 16 - ( 14) dim 0 offset 16 - ( 15) dim 0 offset 6 - ( 16) dim 0 offset 12 + ( 14) dim 0 offset 0 + ( 15) dim 0 offset 4 + ( 16) dim 0 offset 10 field 1 "fault traction" with 2 components Process 0: - ( 0) dim 0 offset 0 - ( 1) dim 0 offset 0 - ( 2) dim 0 offset 16 - ( 3) dim 0 offset 2 - ( 4) dim 0 offset 4 - ( 5) dim 0 offset 10 + ( 0) dim 0 offset 12 + ( 1) dim 0 offset 12 + ( 2) dim 0 offset 12 + ( 3) dim 0 offset 14 + ( 4) dim 0 offset 2 + ( 5) dim 0 offset 8 ( 6) dim 0 offset 16 - ( 7) dim 0 offset 6 - ( 8) dim 0 offset 12 + ( 7) dim 0 offset 4 + ( 8) dim 0 offset 10 ( 9) dim 0 offset 16 - ( 10) dim 0 offset 16 + ( 10) dim 0 offset 0 ( 11) dim 0 offset 16 ( 12) dim 0 offset 16 ( 13) dim 0 offset 16 - ( 14) dim 0 offset 16 - ( 15) dim 2 offset 6 - ( 16) dim 2 offset 12 + ( 14) dim 0 offset 0 + ( 15) dim 2 offset 4 + ( 16) dim 2 offset 10 Vec Object: Local Solution 1 MPI process type: seq --0.5 -0.5 -2.77556e-17 2.77556e-17 0. @@ -76,6 +74,8 @@ Vec Object: Local Solution 1 MPI process 2. -1. 0. +-0.5 +0.5 0.5 1.5 Discrete System with 2 fields @@ -134,8 +134,6 @@ Discrete System with 2 fields (material, 2) (0, 1) Vec Object: Local Residual 1 MPI process type: seq -0. -0. 0.166667 0. -0.166667 @@ -150,21 +148,23 @@ Vec Object: Local Residual 1 MPI process 0. 0. 0. +0. +0. Mat Object: Jacobian 1 MPI process type: seqaij -row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) -row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) -row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, -0.333333) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, -0.166667) (13, 0.) -row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, -0.333333) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, -0.166667) -row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.333333) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.166667) (13, 0.) (14, 0.) (15, 0.) -row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.333333) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.166667) (14, 0.) (15, 0.) -row 6: (2, -0.333333) (3, 0.) (4, 0.333333) (5, 0.) (6, 0.) (7, 0.) (8, -0.166667) (9, 0.) (10, 0.166667) (11, 0.) (12, 0.) (13, 0.) -row 7: (2, 0.) (3, -0.333333) (4, 0.) (5, 0.333333) (6, 0.) (7, 0.) (8, 0.) (9, -0.166667) (10, 0.) (11, 0.166667) (12, 0.) (13, 0.) -row 8: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, -0.166667) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, -0.333333) (13, 0.) -row 9: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, -0.166667) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, -0.333333) -row 10: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.166667) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.333333) (13, 0.) (14, 0.) (15, 0.) -row 11: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.166667) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.333333) (14, 0.) (15, 0.) -row 12: (2, -0.166667) (3, 0.) (4, 0.166667) (5, 0.) (6, 0.) (7, 0.) (8, -0.333333) (9, 0.) (10, 0.333333) (11, 0.) (12, 0.) (13, 0.) -row 13: (2, 0.) (3, -0.166667) (4, 0.) (5, 0.166667) (6, 0.) (7, 0.) (8, 0.) (9, -0.333333) (10, 0.) (11, 0.333333) (12, 0.) (13, 0.) -row 14: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (14, 0.) (15, 0.) -row 15: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (14, 0.) (15, 0.) +row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, -0.333333) (5, 0.) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, -0.166667) (11, 0.) (12, 0.) (13, 0.) +row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, -0.333333) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, -0.166667) (12, 0.) (13, 0.) +row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.333333) (5, 0.) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.166667) (11, 0.) (14, 0.) (15, 0.) +row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.333333) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.166667) (14, 0.) (15, 0.) +row 4: (0, -0.333333) (1, 0.) (2, 0.333333) (3, 0.) (4, 0.) (5, 0.) (6, -0.166667) (7, 0.) (8, 0.166667) (9, 0.) (10, 0.) (11, 0.) +row 5: (0, 0.) (1, -0.333333) (2, 0.) (3, 0.333333) (4, 0.) (5, 0.) (6, 0.) (7, -0.166667) (8, 0.) (9, 0.166667) (10, 0.) (11, 0.) +row 6: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, -0.166667) (5, 0.) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, -0.333333) (11, 0.) (12, 0.) (13, 0.) +row 7: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, -0.166667) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, -0.333333) (12, 0.) (13, 0.) +row 8: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.166667) (5, 0.) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.333333) (11, 0.) (14, 0.) (15, 0.) +row 9: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.166667) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.333333) (14, 0.) (15, 0.) +row 10: (0, -0.166667) (1, 0.) (2, 0.166667) (3, 0.) (4, 0.) (5, 0.) (6, -0.333333) (7, 0.) (8, 0.333333) (9, 0.) (10, 0.) (11, 0.) +row 11: (0, 0.) (1, -0.166667) (2, 0.) (3, 0.166667) (4, 0.) (5, 0.) (6, 0.) (7, -0.333333) (8, 0.) (9, 0.333333) (10, 0.) (11, 0.) +row 12: (0, 0.) (1, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 13: (0, 0.) (1, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 14: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (14, 0.) (15, 0.) +row 15: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (14, 0.) (15, 0.) diff --git a/src/dm/impls/plex/tests/output/ex5_tri_t1_0_perm.out b/src/dm/impls/plex/tests/output/ex5_tri_t1_0_perm.out index befbf5dd5ba..afd20d2f9d0 100644 --- a/src/dm/impls/plex/tests/output/ex5_tri_t1_0_perm.out +++ b/src/dm/impls/plex/tests/output/ex5_tri_t1_0_perm.out @@ -37,84 +37,80 @@ PetscSection Object: 1 MPI process 2 fields field 0 "displacement" with 2 components Process 0: - ( 0) dim 0 offset 0 - ( 1) dim 0 offset 0 - ( 2) dim 0 offset 0 - ( 3) dim 0 offset 0 - ( 4) dim 0 offset 24 - ( 5) dim 0 offset 24 - ( 6) dim 2 offset 0 - ( 7) dim 2 offset 2 - ( 8) dim 2 offset 8 - ( 9) dim 2 offset 14 - ( 10) dim 2 offset 16 + ( 0) dim 0 offset 18 + ( 1) dim 0 offset 18 + ( 2) dim 0 offset 18 + ( 3) dim 0 offset 18 + ( 4) dim 0 offset 12 + ( 5) dim 0 offset 18 + ( 6) dim 2 offset 18 + ( 7) dim 2 offset 6 + ( 8) dim 2 offset 0 + ( 9) dim 2 offset 20 + ( 10) dim 2 offset 12 ( 11) dim 2 offset 22 - ( 12) dim 2 offset 4 - ( 13) dim 2 offset 10 - ( 14) dim 2 offset 18 + ( 12) dim 2 offset 8 + ( 13) dim 2 offset 2 + ( 14) dim 2 offset 14 ( 15) dim 0 offset 24 - ( 16) dim 0 offset 24 + ( 16) dim 0 offset 0 ( 17) dim 0 offset 24 ( 18) dim 0 offset 24 ( 19) dim 0 offset 24 - ( 20) dim 0 offset 24 + ( 20) dim 0 offset 12 ( 21) dim 0 offset 24 ( 22) dim 0 offset 24 ( 23) dim 0 offset 24 - ( 24) dim 0 offset 24 - ( 25) dim 0 offset 24 - ( 26) dim 0 offset 6 - ( 27) dim 0 offset 12 - ( 28) dim 0 offset 20 + ( 24) dim 0 offset 0 + ( 25) dim 0 offset 12 + ( 26) dim 0 offset 10 + ( 27) dim 0 offset 4 + ( 28) dim 0 offset 16 field 1 "fault traction" with 2 components Process 0: - ( 0) dim 0 offset 0 - ( 1) dim 0 offset 0 - ( 2) dim 0 offset 0 - ( 3) dim 0 offset 0 - ( 4) dim 0 offset 24 - ( 5) dim 0 offset 24 - ( 6) dim 0 offset 2 - ( 7) dim 0 offset 4 - ( 8) dim 0 offset 10 - ( 9) dim 0 offset 16 - ( 10) dim 0 offset 18 + ( 0) dim 0 offset 18 + ( 1) dim 0 offset 18 + ( 2) dim 0 offset 18 + ( 3) dim 0 offset 18 + ( 4) dim 0 offset 12 + ( 5) dim 0 offset 18 + ( 6) dim 0 offset 20 + ( 7) dim 0 offset 8 + ( 8) dim 0 offset 2 + ( 9) dim 0 offset 22 + ( 10) dim 0 offset 14 ( 11) dim 0 offset 24 - ( 12) dim 0 offset 6 - ( 13) dim 0 offset 12 - ( 14) dim 0 offset 20 + ( 12) dim 0 offset 10 + ( 13) dim 0 offset 4 + ( 14) dim 0 offset 16 ( 15) dim 0 offset 24 - ( 16) dim 0 offset 24 + ( 16) dim 0 offset 0 ( 17) dim 0 offset 24 ( 18) dim 0 offset 24 ( 19) dim 0 offset 24 - ( 20) dim 0 offset 24 + ( 20) dim 0 offset 12 ( 21) dim 0 offset 24 ( 22) dim 0 offset 24 ( 23) dim 0 offset 24 - ( 24) dim 0 offset 24 - ( 25) dim 0 offset 24 - ( 26) dim 2 offset 6 - ( 27) dim 2 offset 12 - ( 28) dim 2 offset 20 + ( 24) dim 0 offset 0 + ( 25) dim 0 offset 12 + ( 26) dim 2 offset 10 + ( 27) dim 2 offset 4 + ( 28) dim 2 offset 16 Vec Object: Local Solution 1 MPI process type: seq +-5.55112e-17 -1. 0. +2.22045e-16 +1. +0. -2.22045e-16 1. -1.66533e-16 2. -1. 0. --5.55112e-17 --1. -0. -2.22045e-16 -1. -0. -1. -1. -2. 1. -2. @@ -122,6 +118,10 @@ Vec Object: Local Solution 1 MPI process -1. -2. -1. +0. +1. +1. +-1. 3. Discrete System with 2 fields cell total dim 12 total comp 4 @@ -179,22 +179,18 @@ Discrete System with 2 fields (material, 2) (0, 1) Vec Object: Local Residual 1 MPI process type: seq +-0.333333 0. +0.333333 0. +-5.55112e-17 +-4.0637e-17 1.33333 0.666667 -1.33333 -0.666667 -9.92914e-17 3.73704e-16 --0.333333 -0. -0.333333 -0. --5.55112e-17 --4.0637e-17 -0. -0. 1. 1.33333 -1. @@ -203,29 +199,33 @@ Vec Object: Local Residual 1 MPI process 2.22045e-16 0. 0. +0. +0. +0. +0. Mat Object: Jacobian 1 MPI process type: seqaij -row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 0.) (17, 0.) -row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 0.) (17, 0.) -row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, -1.33333) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, -0.333333) (13, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, -0.333333) (21, 0.) -row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, -1.33333) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, -0.333333) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, -0.333333) -row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 1.33333) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.333333) (13, 0.) (14, 0.) (15, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.333333) (21, 0.) (22, 0.) (23, 0.) -row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 1.33333) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.333333) (14, 0.) (15, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.333333) (22, 0.) (23, 0.) -row 6: (2, -1.33333) (3, 0.) (4, 1.33333) (5, 0.) (6, 0.) (7, 0.) (8, -0.333333) (9, 0.) (10, 0.333333) (11, 0.) (12, 0.) (13, 0.) (16, -0.333333) (17, 0.) (18, 0.333333) (19, 0.) (20, 0.) (21, 0.) -row 7: (2, 0.) (3, -1.33333) (4, 0.) (5, 1.33333) (6, 0.) (7, 0.) (8, 0.) (9, -0.333333) (10, 0.) (11, 0.333333) (12, 0.) (13, 0.) (16, 0.) (17, -0.333333) (18, 0.) (19, 0.333333) (20, 0.) (21, 0.) -row 8: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, -0.333333) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, -0.666667) (13, 0.) -row 9: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, -0.333333) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, -0.666667) -row 10: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.333333) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.666667) (13, 0.) (14, 0.) (15, 0.) -row 11: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.333333) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.666667) (14, 0.) (15, 0.) -row 12: (2, -0.333333) (3, 0.) (4, 0.333333) (5, 0.) (6, 0.) (7, 0.) (8, -0.666667) (9, 0.) (10, 0.666667) (11, 0.) (12, 0.) (13, 0.) -row 13: (2, 0.) (3, -0.333333) (4, 0.) (5, 0.333333) (6, 0.) (7, 0.) (8, 0.) (9, -0.666667) (10, 0.) (11, 0.666667) (12, 0.) (13, 0.) -row 14: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (14, 0.) (15, 0.) -row 15: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (14, 0.) (15, 0.) -row 16: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, -0.333333) (7, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, -0.666667) (21, 0.) -row 17: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, -0.333333) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, -0.666667) -row 18: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.333333) (7, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.666667) (21, 0.) (22, 0.) (23, 0.) -row 19: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.333333) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.666667) (22, 0.) (23, 0.) -row 20: (2, -0.333333) (3, 0.) (4, 0.333333) (5, 0.) (6, 0.) (7, 0.) (16, -0.666667) (17, 0.) (18, 0.666667) (19, 0.) (20, 0.) (21, 0.) -row 21: (2, 0.) (3, -0.333333) (4, 0.) (5, 0.333333) (6, 0.) (7, 0.) (16, 0.) (17, -0.666667) (18, 0.) (19, 0.666667) (20, 0.) (21, 0.) -row 22: (4, 0.) (5, 0.) (18, 0.) (19, 0.) (22, 0.) (23, 0.) -row 23: (4, 0.) (5, 0.) (18, 0.) (19, 0.) (22, 0.) (23, 0.) +row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, -0.666667) (5, 0.) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, -0.333333) (11, 0.) (18, 0.) (19, 0.) +row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, -0.666667) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, -0.333333) (18, 0.) (19, 0.) +row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.666667) (5, 0.) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.333333) (11, 0.) (20, 0.) (21, 0.) +row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.666667) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.333333) (20, 0.) (21, 0.) +row 4: (0, -0.666667) (1, 0.) (2, 0.666667) (3, 0.) (4, 0.) (5, 0.) (6, -0.333333) (7, 0.) (8, 0.333333) (9, 0.) (10, 0.) (11, 0.) +row 5: (0, 0.) (1, -0.666667) (2, 0.) (3, 0.666667) (4, 0.) (5, 0.) (6, 0.) (7, -0.333333) (8, 0.) (9, 0.333333) (10, 0.) (11, 0.) +row 6: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, -0.333333) (5, 0.) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, -1.33333) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (16, -0.333333) (17, 0.) (18, 0.) (19, 0.) +row 7: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, -0.333333) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, -1.33333) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (16, 0.) (17, -0.333333) (18, 0.) (19, 0.) +row 8: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.333333) (5, 0.) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 1.33333) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (16, 0.333333) (17, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) +row 9: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.333333) (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 1.33333) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (16, 0.) (17, 0.333333) (20, 0.) (21, 0.) (22, 0.) (23, 0.) +row 10: (0, -0.333333) (1, 0.) (2, 0.333333) (3, 0.) (4, 0.) (5, 0.) (6, -1.33333) (7, 0.) (8, 1.33333) (9, 0.) (10, 0.) (11, 0.) (12, -0.333333) (13, 0.) (14, 0.333333) (15, 0.) (16, 0.) (17, 0.) +row 11: (0, 0.) (1, -0.333333) (2, 0.) (3, 0.333333) (4, 0.) (5, 0.) (6, 0.) (7, -1.33333) (8, 0.) (9, 1.33333) (10, 0.) (11, 0.) (12, 0.) (13, -0.333333) (14, 0.) (15, 0.333333) (16, 0.) (17, 0.) +row 12: (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, -0.333333) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (16, -0.666667) (17, 0.) (18, 0.) (19, 0.) +row 13: (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, -0.333333) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (16, 0.) (17, -0.666667) (18, 0.) (19, 0.) +row 14: (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.333333) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (16, 0.666667) (17, 0.) (22, 0.) (23, 0.) +row 15: (6, 0.) (7, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.333333) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (16, 0.) (17, 0.666667) (22, 0.) (23, 0.) +row 16: (6, -0.333333) (7, 0.) (8, 0.333333) (9, 0.) (10, 0.) (11, 0.) (12, -0.666667) (13, 0.) (14, 0.666667) (15, 0.) (16, 0.) (17, 0.) +row 17: (6, 0.) (7, -0.333333) (8, 0.) (9, 0.333333) (10, 0.) (11, 0.) (12, 0.) (13, -0.666667) (14, 0.) (15, 0.666667) (16, 0.) (17, 0.) +row 18: (0, 0.) (1, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 19: (0, 0.) (1, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 20: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (20, 0.) (21, 0.) +row 21: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (20, 0.) (21, 0.) +row 22: (8, 0.) (9, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 23: (8, 0.) (9, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) diff --git a/src/dm/impls/plex/tests/output/ex66_0.out b/src/dm/impls/plex/tests/output/ex66_0.out index f5e662f9594..f19717f0a19 100644 --- a/src/dm/impls/plex/tests/output/ex66_0.out +++ b/src/dm/impls/plex/tests/output/ex66_0.out @@ -43,72 +43,120 @@ DM Object: cubeline-fromdag (interp_) 1 MPI process cubeline-fromdag in 3 dimensions: Supports: [0] Max support size: 4 -[0]: 1 ----> 11 -[0]: 1 ----> 13 -[0]: 1 ----> 15 -[0]: 2 ----> 11 -[0]: 2 ----> 14 -[0]: 2 ----> 15 -[0]: 3 ----> 11 -[0]: 3 ----> 13 +[0]: 1 ----> 18 +[0]: 1 ----> 19 +[0]: 1 ----> 24 +[0]: 2 ----> 17 +[0]: 2 ----> 18 +[0]: 2 ----> 27 [0]: 3 ----> 16 -[0]: 4 ----> 11 -[0]: 4 ----> 14 +[0]: 3 ----> 19 +[0]: 3 ----> 25 [0]: 4 ----> 16 -[0]: 5 ----> 12 -[0]: 5 ----> 13 -[0]: 5 ----> 15 -[0]: 6 ----> 10 -[0]: 6 ----> 12 -[0]: 6 ----> 14 -[0]: 6 ----> 15 -[0]: 7 ----> 12 -[0]: 7 ----> 13 -[0]: 7 ----> 16 -[0]: 8 ----> 12 -[0]: 8 ----> 14 -[0]: 8 ----> 16 -[0]: 9 ----> 10 +[0]: 4 ----> 17 +[0]: 4 ----> 26 +[0]: 5 ----> 20 +[0]: 5 ----> 21 +[0]: 5 ----> 24 +[0]: 6 ----> 21 +[0]: 6 ----> 22 +[0]: 6 ----> 27 +[0]: 6 ----> 28 +[0]: 7 ----> 20 +[0]: 7 ----> 23 +[0]: 7 ----> 25 +[0]: 8 ----> 22 +[0]: 8 ----> 23 +[0]: 8 ----> 26 +[0]: 9 ----> 28 +[0]: 10 ----> 0 [0]: 11 ----> 0 [0]: 12 ----> 0 [0]: 13 ----> 0 [0]: 14 ----> 0 [0]: 15 ----> 0 -[0]: 16 ----> 0 +[0]: 16 ----> 10 +[0]: 16 ----> 15 +[0]: 17 ----> 10 +[0]: 17 ----> 13 +[0]: 18 ----> 10 +[0]: 18 ----> 14 +[0]: 19 ----> 10 +[0]: 19 ----> 12 +[0]: 20 ----> 11 +[0]: 20 ----> 12 +[0]: 21 ----> 11 +[0]: 21 ----> 14 +[0]: 22 ----> 11 +[0]: 22 ----> 13 +[0]: 23 ----> 11 +[0]: 23 ----> 15 +[0]: 24 ----> 12 +[0]: 24 ----> 14 +[0]: 25 ----> 12 +[0]: 25 ----> 15 +[0]: 26 ----> 13 +[0]: 26 ----> 15 +[0]: 27 ----> 13 +[0]: 27 ----> 14 Cones: [0] Max cone size: 6 +[0]: 0 <---- 10 (0) [0]: 0 <---- 11 (0) [0]: 0 <---- 12 (0) [0]: 0 <---- 13 (0) [0]: 0 <---- 14 (0) [0]: 0 <---- 15 (0) -[0]: 0 <---- 16 (0) -[0]: 10 <---- 6 (0) -[0]: 10 <---- 9 (0) -[0]: 11 <---- 3 (0) -[0]: 11 <---- 4 (0) -[0]: 11 <---- 2 (0) -[0]: 11 <---- 1 (0) -[0]: 12 <---- 7 (0) -[0]: 12 <---- 5 (0) -[0]: 12 <---- 6 (0) -[0]: 12 <---- 8 (0) -[0]: 13 <---- 3 (0) -[0]: 13 <---- 1 (0) -[0]: 13 <---- 5 (0) -[0]: 13 <---- 7 (0) -[0]: 14 <---- 2 (0) -[0]: 14 <---- 4 (0) -[0]: 14 <---- 8 (0) -[0]: 14 <---- 6 (0) -[0]: 15 <---- 1 (0) -[0]: 15 <---- 2 (0) -[0]: 15 <---- 6 (0) -[0]: 15 <---- 5 (0) +[0]: 10 <---- 16 (0) +[0]: 10 <---- 17 (0) +[0]: 10 <---- 18 (0) +[0]: 10 <---- 19 (0) +[0]: 11 <---- 20 (0) +[0]: 11 <---- 21 (0) +[0]: 11 <---- 22 (0) +[0]: 11 <---- 23 (0) +[0]: 12 <---- 19 (-1) +[0]: 12 <---- 24 (0) +[0]: 12 <---- 20 (-1) +[0]: 12 <---- 25 (0) +[0]: 13 <---- 17 (-1) +[0]: 13 <---- 26 (0) +[0]: 13 <---- 22 (-1) +[0]: 13 <---- 27 (0) +[0]: 14 <---- 18 (-1) +[0]: 14 <---- 27 (-1) +[0]: 14 <---- 21 (-1) +[0]: 14 <---- 24 (-1) +[0]: 15 <---- 25 (-1) +[0]: 15 <---- 23 (-1) +[0]: 15 <---- 26 (-1) +[0]: 15 <---- 16 (-1) [0]: 16 <---- 3 (0) -[0]: 16 <---- 7 (0) -[0]: 16 <---- 8 (0) [0]: 16 <---- 4 (0) +[0]: 17 <---- 4 (0) +[0]: 17 <---- 2 (0) +[0]: 18 <---- 2 (0) +[0]: 18 <---- 1 (0) +[0]: 19 <---- 1 (0) +[0]: 19 <---- 3 (0) +[0]: 20 <---- 7 (0) +[0]: 20 <---- 5 (0) +[0]: 21 <---- 5 (0) +[0]: 21 <---- 6 (0) +[0]: 22 <---- 6 (0) +[0]: 22 <---- 8 (0) +[0]: 23 <---- 8 (0) +[0]: 23 <---- 7 (0) +[0]: 24 <---- 1 (0) +[0]: 24 <---- 5 (0) +[0]: 25 <---- 7 (0) +[0]: 25 <---- 3 (0) +[0]: 26 <---- 4 (0) +[0]: 26 <---- 8 (0) +[0]: 27 <---- 6 (0) +[0]: 27 <---- 2 (0) +[0]: 28 <---- 6 (0) +[0]: 28 <---- 9 (0) coordinates with 1 fields field 0 with 3 components Process 0: @@ -132,11 +180,23 @@ Label 'celltype': [0]: 7 (0) [0]: 8 (0) [0]: 9 (0) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 0 (7) +[0]: 10 (4) [0]: 11 (4) [0]: 12 (4) [0]: 13 (4) [0]: 14 (4) [0]: 15 (4) -[0]: 16 (4) -[0]: 0 (7) -[0]: 10 (1) diff --git a/src/dm/impls/plex/tests/output/ex66_1.out b/src/dm/impls/plex/tests/output/ex66_1.out new file mode 100644 index 00000000000..ad2463ba0c5 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex66_1.out @@ -0,0 +1,216 @@ +DM Object: cubeline-fromdag 1 MPI process + type: plex +cubeline-fromdag in 3 dimensions: +Supports: +[0] Max support size: 2 +[0]: 1 ----> 0 +[0]: 2 ----> 0 +[0]: 3 ----> 0 +[0]: 4 ----> 0 +[0]: 5 ----> 0 +[0]: 6 ----> 0 +[0]: 6 ----> 10 +[0]: 7 ----> 0 +[0]: 8 ----> 0 +[0]: 8 ----> 10 +[0]: 9 ----> 10 +Cones: +[0] Max cone size: 8 +[0]: 0 <---- 3 (0) +[0]: 0 <---- 4 (0) +[0]: 0 <---- 2 (0) +[0]: 0 <---- 1 (0) +[0]: 0 <---- 7 (0) +[0]: 0 <---- 5 (0) +[0]: 0 <---- 6 (0) +[0]: 0 <---- 8 (0) +[0]: 10 <---- 6 (0) +[0]: 10 <---- 9 (0) +[0]: 10 <---- 8 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 1) dim 3 offset 0 0. 0. 1. + ( 2) dim 3 offset 3 0. 0. 0. + ( 3) dim 3 offset 6 0. 1. 1. + ( 4) dim 3 offset 9 0. 1. 0. + ( 5) dim 3 offset 12 1. 0. 1. + ( 6) dim 3 offset 15 1. 0. 0. + ( 7) dim 3 offset 18 1. 1. 1. + ( 8) dim 3 offset 21 1. 1. 0. + ( 9) dim 3 offset 24 2. 0. 0. +Labels: +DM Object: cubeline-fromdag (interp_) 1 MPI process + type: plex +cubeline-fromdag in 3 dimensions: +Supports: +[0] Max support size: 4 +[0]: 1 ----> 19 +[0]: 1 ----> 20 +[0]: 1 ----> 25 +[0]: 2 ----> 18 +[0]: 2 ----> 19 +[0]: 2 ----> 28 +[0]: 3 ----> 17 +[0]: 3 ----> 20 +[0]: 3 ----> 26 +[0]: 4 ----> 17 +[0]: 4 ----> 18 +[0]: 4 ----> 27 +[0]: 5 ----> 21 +[0]: 5 ----> 22 +[0]: 5 ----> 25 +[0]: 6 ----> 22 +[0]: 6 ----> 23 +[0]: 6 ----> 28 +[0]: 6 ----> 29 +[0]: 7 ----> 21 +[0]: 7 ----> 24 +[0]: 7 ----> 26 +[0]: 8 ----> 23 +[0]: 8 ----> 24 +[0]: 8 ----> 27 +[0]: 8 ----> 30 +[0]: 9 ----> 29 +[0]: 9 ----> 30 +[0]: 10 ----> 0 +[0]: 11 ----> 0 +[0]: 12 ----> 0 +[0]: 13 ----> 0 +[0]: 14 ----> 0 +[0]: 15 ----> 0 +[0]: 17 ----> 10 +[0]: 17 ----> 15 +[0]: 18 ----> 10 +[0]: 18 ----> 13 +[0]: 19 ----> 10 +[0]: 19 ----> 14 +[0]: 20 ----> 10 +[0]: 20 ----> 12 +[0]: 21 ----> 11 +[0]: 21 ----> 12 +[0]: 22 ----> 11 +[0]: 22 ----> 14 +[0]: 23 ----> 11 +[0]: 23 ----> 13 +[0]: 23 ----> 16 +[0]: 24 ----> 11 +[0]: 24 ----> 15 +[0]: 25 ----> 12 +[0]: 25 ----> 14 +[0]: 26 ----> 12 +[0]: 26 ----> 15 +[0]: 27 ----> 13 +[0]: 27 ----> 15 +[0]: 28 ----> 13 +[0]: 28 ----> 14 +[0]: 29 ----> 16 +[0]: 30 ----> 16 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 10 (0) +[0]: 0 <---- 11 (0) +[0]: 0 <---- 12 (0) +[0]: 0 <---- 13 (0) +[0]: 0 <---- 14 (0) +[0]: 0 <---- 15 (0) +[0]: 10 <---- 17 (0) +[0]: 10 <---- 18 (0) +[0]: 10 <---- 19 (0) +[0]: 10 <---- 20 (0) +[0]: 11 <---- 21 (0) +[0]: 11 <---- 22 (0) +[0]: 11 <---- 23 (0) +[0]: 11 <---- 24 (0) +[0]: 12 <---- 20 (-1) +[0]: 12 <---- 25 (0) +[0]: 12 <---- 21 (-1) +[0]: 12 <---- 26 (0) +[0]: 13 <---- 18 (-1) +[0]: 13 <---- 27 (0) +[0]: 13 <---- 23 (-1) +[0]: 13 <---- 28 (0) +[0]: 14 <---- 19 (-1) +[0]: 14 <---- 28 (-1) +[0]: 14 <---- 22 (-1) +[0]: 14 <---- 25 (-1) +[0]: 15 <---- 26 (-1) +[0]: 15 <---- 24 (-1) +[0]: 15 <---- 27 (-1) +[0]: 15 <---- 17 (-1) +[0]: 16 <---- 29 (0) +[0]: 16 <---- 30 (0) +[0]: 16 <---- 23 (-1) +[0]: 17 <---- 3 (0) +[0]: 17 <---- 4 (0) +[0]: 18 <---- 4 (0) +[0]: 18 <---- 2 (0) +[0]: 19 <---- 2 (0) +[0]: 19 <---- 1 (0) +[0]: 20 <---- 1 (0) +[0]: 20 <---- 3 (0) +[0]: 21 <---- 7 (0) +[0]: 21 <---- 5 (0) +[0]: 22 <---- 5 (0) +[0]: 22 <---- 6 (0) +[0]: 23 <---- 6 (0) +[0]: 23 <---- 8 (0) +[0]: 24 <---- 8 (0) +[0]: 24 <---- 7 (0) +[0]: 25 <---- 1 (0) +[0]: 25 <---- 5 (0) +[0]: 26 <---- 7 (0) +[0]: 26 <---- 3 (0) +[0]: 27 <---- 4 (0) +[0]: 27 <---- 8 (0) +[0]: 28 <---- 6 (0) +[0]: 28 <---- 2 (0) +[0]: 29 <---- 6 (0) +[0]: 29 <---- 9 (0) +[0]: 30 <---- 9 (0) +[0]: 30 <---- 8 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 1) dim 3 offset 0 0. 0. 1. + ( 2) dim 3 offset 3 0. 0. 0. + ( 3) dim 3 offset 6 0. 1. 1. + ( 4) dim 3 offset 9 0. 1. 0. + ( 5) dim 3 offset 12 1. 0. 1. + ( 6) dim 3 offset 15 1. 0. 0. + ( 7) dim 3 offset 18 1. 1. 1. + ( 8) dim 3 offset 21 1. 1. 0. + ( 9) dim 3 offset 24 2. 0. 0. +Labels: +Label 'celltype': +[0]: 1 (0) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 0 (7) +[0]: 10 (4) +[0]: 11 (4) +[0]: 12 (4) +[0]: 13 (4) +[0]: 14 (4) +[0]: 15 (4) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 16 (3) diff --git a/src/dm/impls/plex/tests/output/ex69_hex_0.out b/src/dm/impls/plex/tests/output/ex69_hex_0.out new file mode 100644 index 00000000000..e208f53af6e --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_hex_0.out @@ -0,0 +1,826 @@ +DM Object: box 1 MPI process + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 4 +[0]: 2 ----> 25 +[0]: 2 ----> 33 +[0]: 2 ----> 39 +[0]: 3 ----> 25 +[0]: 3 ----> 26 +[0]: 3 ----> 34 +[0]: 3 ----> 40 +[0]: 4 ----> 26 +[0]: 4 ----> 35 +[0]: 4 ----> 41 +[0]: 5 ----> 27 +[0]: 5 ----> 33 +[0]: 5 ----> 42 +[0]: 6 ----> 27 +[0]: 6 ----> 28 +[0]: 6 ----> 34 +[0]: 6 ----> 43 +[0]: 7 ----> 28 +[0]: 7 ----> 35 +[0]: 7 ----> 44 +[0]: 8 ----> 29 +[0]: 8 ----> 36 +[0]: 8 ----> 39 +[0]: 9 ----> 29 +[0]: 9 ----> 30 +[0]: 9 ----> 37 +[0]: 9 ----> 40 +[0]: 10 ----> 30 +[0]: 10 ----> 38 +[0]: 10 ----> 41 +[0]: 11 ----> 31 +[0]: 11 ----> 36 +[0]: 11 ----> 42 +[0]: 12 ----> 31 +[0]: 12 ----> 32 +[0]: 12 ----> 37 +[0]: 12 ----> 43 +[0]: 13 ----> 32 +[0]: 13 ----> 38 +[0]: 13 ----> 44 +[0]: 14 ----> 0 +[0]: 15 ----> 0 +[0]: 15 ----> 1 +[0]: 16 ----> 1 +[0]: 17 ----> 0 +[0]: 18 ----> 0 +[0]: 19 ----> 1 +[0]: 20 ----> 1 +[0]: 21 ----> 0 +[0]: 22 ----> 0 +[0]: 23 ----> 1 +[0]: 24 ----> 1 +[0]: 25 ----> 17 +[0]: 25 ----> 21 +[0]: 26 ----> 19 +[0]: 26 ----> 23 +[0]: 27 ----> 18 +[0]: 27 ----> 21 +[0]: 28 ----> 20 +[0]: 28 ----> 23 +[0]: 29 ----> 17 +[0]: 29 ----> 22 +[0]: 30 ----> 19 +[0]: 30 ----> 24 +[0]: 31 ----> 18 +[0]: 31 ----> 22 +[0]: 32 ----> 20 +[0]: 32 ----> 24 +[0]: 33 ----> 14 +[0]: 33 ----> 21 +[0]: 34 ----> 15 +[0]: 34 ----> 21 +[0]: 34 ----> 23 +[0]: 35 ----> 16 +[0]: 35 ----> 23 +[0]: 36 ----> 14 +[0]: 36 ----> 22 +[0]: 37 ----> 15 +[0]: 37 ----> 22 +[0]: 37 ----> 24 +[0]: 38 ----> 16 +[0]: 38 ----> 24 +[0]: 39 ----> 14 +[0]: 39 ----> 17 +[0]: 40 ----> 15 +[0]: 40 ----> 17 +[0]: 40 ----> 19 +[0]: 41 ----> 16 +[0]: 41 ----> 19 +[0]: 42 ----> 14 +[0]: 42 ----> 18 +[0]: 43 ----> 15 +[0]: 43 ----> 18 +[0]: 43 ----> 20 +[0]: 44 ----> 16 +[0]: 44 ----> 20 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 21 (-2) +[0]: 0 <---- 22 (0) +[0]: 0 <---- 17 (0) +[0]: 0 <---- 18 (-3) +[0]: 0 <---- 15 (0) +[0]: 0 <---- 14 (-2) +[0]: 1 <---- 23 (-2) +[0]: 1 <---- 24 (0) +[0]: 1 <---- 19 (0) +[0]: 1 <---- 20 (-3) +[0]: 1 <---- 16 (0) +[0]: 1 <---- 15 (-2) +[0]: 14 <---- 33 (0) +[0]: 14 <---- 42 (0) +[0]: 14 <---- 36 (-1) +[0]: 14 <---- 39 (-1) +[0]: 15 <---- 34 (0) +[0]: 15 <---- 43 (0) +[0]: 15 <---- 37 (-1) +[0]: 15 <---- 40 (-1) +[0]: 16 <---- 35 (0) +[0]: 16 <---- 44 (0) +[0]: 16 <---- 38 (-1) +[0]: 16 <---- 41 (-1) +[0]: 17 <---- 25 (0) +[0]: 17 <---- 40 (0) +[0]: 17 <---- 29 (-1) +[0]: 17 <---- 39 (-1) +[0]: 18 <---- 27 (0) +[0]: 18 <---- 43 (0) +[0]: 18 <---- 31 (-1) +[0]: 18 <---- 42 (-1) +[0]: 19 <---- 26 (0) +[0]: 19 <---- 41 (0) +[0]: 19 <---- 30 (-1) +[0]: 19 <---- 40 (-1) +[0]: 20 <---- 28 (0) +[0]: 20 <---- 44 (0) +[0]: 20 <---- 32 (-1) +[0]: 20 <---- 43 (-1) +[0]: 21 <---- 25 (0) +[0]: 21 <---- 34 (0) +[0]: 21 <---- 27 (-1) +[0]: 21 <---- 33 (-1) +[0]: 22 <---- 29 (0) +[0]: 22 <---- 37 (0) +[0]: 22 <---- 31 (-1) +[0]: 22 <---- 36 (-1) +[0]: 23 <---- 26 (0) +[0]: 23 <---- 35 (0) +[0]: 23 <---- 28 (-1) +[0]: 23 <---- 34 (-1) +[0]: 24 <---- 30 (0) +[0]: 24 <---- 38 (0) +[0]: 24 <---- 32 (-1) +[0]: 24 <---- 37 (-1) +[0]: 25 <---- 2 (0) +[0]: 25 <---- 3 (0) +[0]: 26 <---- 3 (0) +[0]: 26 <---- 4 (0) +[0]: 27 <---- 5 (0) +[0]: 27 <---- 6 (0) +[0]: 28 <---- 6 (0) +[0]: 28 <---- 7 (0) +[0]: 29 <---- 8 (0) +[0]: 29 <---- 9 (0) +[0]: 30 <---- 9 (0) +[0]: 30 <---- 10 (0) +[0]: 31 <---- 11 (0) +[0]: 31 <---- 12 (0) +[0]: 32 <---- 12 (0) +[0]: 32 <---- 13 (0) +[0]: 33 <---- 2 (0) +[0]: 33 <---- 5 (0) +[0]: 34 <---- 3 (0) +[0]: 34 <---- 6 (0) +[0]: 35 <---- 4 (0) +[0]: 35 <---- 7 (0) +[0]: 36 <---- 8 (0) +[0]: 36 <---- 11 (0) +[0]: 37 <---- 9 (0) +[0]: 37 <---- 12 (0) +[0]: 38 <---- 10 (0) +[0]: 38 <---- 13 (0) +[0]: 39 <---- 2 (0) +[0]: 39 <---- 8 (0) +[0]: 40 <---- 3 (0) +[0]: 40 <---- 9 (0) +[0]: 41 <---- 4 (0) +[0]: 41 <---- 10 (0) +[0]: 42 <---- 5 (0) +[0]: 42 <---- 11 (0) +[0]: 43 <---- 6 (0) +[0]: 43 <---- 12 (0) +[0]: 44 <---- 7 (0) +[0]: 44 <---- 13 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 2) dim 3 offset 0 0. 0. 0. + ( 3) dim 3 offset 3 1. 0. 0. + ( 4) dim 3 offset 6 2. 0. 0. + ( 5) dim 3 offset 9 0. 1. 0. + ( 6) dim 3 offset 12 1. 1. 0. + ( 7) dim 3 offset 15 2. 1. 0. + ( 8) dim 3 offset 18 0. 0. 1. + ( 9) dim 3 offset 21 1. 0. 1. + ( 10) dim 3 offset 24 2. 0. 1. + ( 11) dim 3 offset 27 0. 1. 1. + ( 12) dim 3 offset 30 1. 1. 1. + ( 13) dim 3 offset 33 2. 1. 1. +Labels: +Label 'marker': +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +Label 'Face Sets': +[0]: 14 (6) +[0]: 16 (5) +[0]: 17 (3) +[0]: 19 (3) +[0]: 18 (4) +[0]: 20 (4) +[0]: 21 (1) +[0]: 23 (1) +[0]: 22 (2) +[0]: 24 (2) +Label 'fault': +[0]: 15 (2) +[0]: 34 (1) +[0]: 37 (1) +[0]: 40 (1) +[0]: 43 (1) +[0]: 3 (0) +[0]: 6 (0) +[0]: 9 (0) +[0]: 12 (0) +[0]: 0 (103) +[0]: 17 (102) +[0]: 18 (102) +[0]: 21 (102) +[0]: 22 (102) +[0]: 1 (-103) +[0]: 19 (-102) +[0]: 20 (-102) +[0]: 23 (-102) +[0]: 24 (-102) +[0]: 25 (101) +[0]: 27 (101) +[0]: 29 (101) +[0]: 31 (101) +[0]: 26 (-101) +[0]: 28 (-101) +[0]: 30 (-101) +[0]: 32 (-101) +Label 'celltype': +[0]: 0 (7) +[0]: 1 (7) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (4) +[0]: 15 (4) +[0]: 16 (4) +[0]: 17 (4) +[0]: 18 (4) +[0]: 19 (4) +[0]: 20 (4) +[0]: 21 (4) +[0]: 22 (4) +[0]: 23 (4) +[0]: 24 (4) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +DM Object: box 1 MPI process + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 4 +[0]: 3 ----> 35 +[0]: 3 ----> 43 +[0]: 3 ----> 47 +[0]: 4 ----> 39 +[0]: 4 ----> 44 +[0]: 4 ----> 48 +[0]: 5 ----> 36 +[0]: 5 ----> 43 +[0]: 5 ----> 49 +[0]: 6 ----> 40 +[0]: 6 ----> 44 +[0]: 6 ----> 50 +[0]: 7 ----> 37 +[0]: 7 ----> 45 +[0]: 7 ----> 47 +[0]: 8 ----> 41 +[0]: 8 ----> 46 +[0]: 8 ----> 48 +[0]: 9 ----> 38 +[0]: 9 ----> 45 +[0]: 9 ----> 49 +[0]: 10 ----> 42 +[0]: 10 ----> 46 +[0]: 10 ----> 50 +[0]: 11 ----> 39 +[0]: 11 ----> 51 +[0]: 11 ----> 55 +[0]: 11 ----> 59 +[0]: 12 ----> 35 +[0]: 12 ----> 52 +[0]: 12 ----> 56 +[0]: 12 ----> 59 +[0]: 13 ----> 40 +[0]: 13 ----> 51 +[0]: 13 ----> 57 +[0]: 13 ----> 60 +[0]: 14 ----> 36 +[0]: 14 ----> 52 +[0]: 14 ----> 58 +[0]: 14 ----> 60 +[0]: 15 ----> 41 +[0]: 15 ----> 53 +[0]: 15 ----> 55 +[0]: 15 ----> 61 +[0]: 16 ----> 37 +[0]: 16 ----> 54 +[0]: 16 ----> 56 +[0]: 16 ----> 61 +[0]: 17 ----> 42 +[0]: 17 ----> 53 +[0]: 17 ----> 57 +[0]: 17 ----> 62 +[0]: 18 ----> 38 +[0]: 18 ----> 54 +[0]: 18 ----> 58 +[0]: 18 ----> 62 +[0]: 19 ----> 0 +[0]: 20 ----> 1 +[0]: 21 ----> 1 +[0]: 22 ----> 1 +[0]: 23 ----> 1 +[0]: 24 ----> 1 +[0]: 25 ----> 1 +[0]: 25 ----> 2 +[0]: 26 ----> 0 +[0]: 26 ----> 2 +[0]: 27 ----> 0 +[0]: 28 ----> 0 +[0]: 29 ----> 0 +[0]: 30 ----> 0 +[0]: 31 ----> 2 +[0]: 32 ----> 2 +[0]: 33 ----> 2 +[0]: 34 ----> 2 +[0]: 35 ----> 27 +[0]: 35 ----> 29 +[0]: 36 ----> 28 +[0]: 36 ----> 29 +[0]: 37 ----> 27 +[0]: 37 ----> 30 +[0]: 38 ----> 28 +[0]: 38 ----> 30 +[0]: 39 ----> 21 +[0]: 39 ----> 23 +[0]: 40 ----> 22 +[0]: 40 ----> 23 +[0]: 41 ----> 21 +[0]: 41 ----> 24 +[0]: 42 ----> 22 +[0]: 42 ----> 24 +[0]: 43 ----> 19 +[0]: 43 ----> 29 +[0]: 44 ----> 20 +[0]: 44 ----> 23 +[0]: 45 ----> 19 +[0]: 45 ----> 30 +[0]: 46 ----> 20 +[0]: 46 ----> 24 +[0]: 47 ----> 19 +[0]: 47 ----> 27 +[0]: 48 ----> 20 +[0]: 48 ----> 21 +[0]: 49 ----> 19 +[0]: 49 ----> 28 +[0]: 50 ----> 20 +[0]: 50 ----> 22 +[0]: 51 ----> 23 +[0]: 51 ----> 25 +[0]: 51 ----> 31 +[0]: 52 ----> 26 +[0]: 52 ----> 29 +[0]: 52 ----> 31 +[0]: 53 ----> 24 +[0]: 53 ----> 25 +[0]: 53 ----> 32 +[0]: 54 ----> 26 +[0]: 54 ----> 30 +[0]: 54 ----> 32 +[0]: 55 ----> 21 +[0]: 55 ----> 25 +[0]: 55 ----> 33 +[0]: 56 ----> 26 +[0]: 56 ----> 27 +[0]: 56 ----> 33 +[0]: 57 ----> 22 +[0]: 57 ----> 25 +[0]: 57 ----> 34 +[0]: 58 ----> 26 +[0]: 58 ----> 28 +[0]: 58 ----> 34 +[0]: 59 ----> 31 +[0]: 59 ----> 33 +[0]: 60 ----> 31 +[0]: 60 ----> 34 +[0]: 61 ----> 32 +[0]: 61 ----> 33 +[0]: 62 ----> 32 +[0]: 62 ----> 34 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 29 (-2) +[0]: 0 <---- 30 (0) +[0]: 0 <---- 27 (0) +[0]: 0 <---- 28 (-3) +[0]: 0 <---- 26 (0) +[0]: 0 <---- 19 (-2) +[0]: 1 <---- 23 (-2) +[0]: 1 <---- 24 (0) +[0]: 1 <---- 21 (0) +[0]: 1 <---- 22 (-3) +[0]: 1 <---- 20 (0) +[0]: 1 <---- 25 (-2) +[0]: 2 <---- 25 (0) +[0]: 2 <---- 26 (0) +[0]: 2 <---- 31 (0) +[0]: 2 <---- 34 (0) +[0]: 2 <---- 32 (-1) +[0]: 2 <---- 33 (-1) +[0]: 19 <---- 43 (0) +[0]: 19 <---- 49 (0) +[0]: 19 <---- 45 (-1) +[0]: 19 <---- 47 (-1) +[0]: 20 <---- 44 (0) +[0]: 20 <---- 50 (0) +[0]: 20 <---- 46 (-1) +[0]: 20 <---- 48 (-1) +[0]: 21 <---- 39 (0) +[0]: 21 <---- 48 (0) +[0]: 21 <---- 41 (-1) +[0]: 21 <---- 55 (-1) +[0]: 22 <---- 40 (0) +[0]: 22 <---- 50 (0) +[0]: 22 <---- 42 (-1) +[0]: 22 <---- 57 (-1) +[0]: 23 <---- 39 (0) +[0]: 23 <---- 44 (0) +[0]: 23 <---- 40 (-1) +[0]: 23 <---- 51 (-1) +[0]: 24 <---- 41 (0) +[0]: 24 <---- 46 (0) +[0]: 24 <---- 42 (-1) +[0]: 24 <---- 53 (-1) +[0]: 25 <---- 51 (0) +[0]: 25 <---- 57 (0) +[0]: 25 <---- 53 (-1) +[0]: 25 <---- 55 (-1) +[0]: 26 <---- 52 (0) +[0]: 26 <---- 58 (0) +[0]: 26 <---- 54 (-1) +[0]: 26 <---- 56 (-1) +[0]: 27 <---- 35 (0) +[0]: 27 <---- 56 (0) +[0]: 27 <---- 37 (-1) +[0]: 27 <---- 47 (-1) +[0]: 28 <---- 36 (0) +[0]: 28 <---- 58 (0) +[0]: 28 <---- 38 (-1) +[0]: 28 <---- 49 (-1) +[0]: 29 <---- 35 (0) +[0]: 29 <---- 52 (0) +[0]: 29 <---- 36 (-1) +[0]: 29 <---- 43 (-1) +[0]: 30 <---- 37 (0) +[0]: 30 <---- 54 (0) +[0]: 30 <---- 38 (-1) +[0]: 30 <---- 45 (-1) +[0]: 31 <---- 51 (0) +[0]: 31 <---- 52 (0) +[0]: 31 <---- 59 (0) +[0]: 31 <---- 60 (0) +[0]: 32 <---- 53 (0) +[0]: 32 <---- 54 (0) +[0]: 32 <---- 61 (0) +[0]: 32 <---- 62 (0) +[0]: 33 <---- 55 (0) +[0]: 33 <---- 56 (0) +[0]: 33 <---- 59 (0) +[0]: 33 <---- 61 (0) +[0]: 34 <---- 57 (0) +[0]: 34 <---- 58 (0) +[0]: 34 <---- 60 (0) +[0]: 34 <---- 62 (0) +[0]: 35 <---- 3 (0) +[0]: 35 <---- 12 (0) +[0]: 36 <---- 5 (0) +[0]: 36 <---- 14 (0) +[0]: 37 <---- 7 (0) +[0]: 37 <---- 16 (0) +[0]: 38 <---- 9 (0) +[0]: 38 <---- 18 (0) +[0]: 39 <---- 11 (0) +[0]: 39 <---- 4 (0) +[0]: 40 <---- 13 (0) +[0]: 40 <---- 6 (0) +[0]: 41 <---- 15 (0) +[0]: 41 <---- 8 (0) +[0]: 42 <---- 17 (0) +[0]: 42 <---- 10 (0) +[0]: 43 <---- 3 (0) +[0]: 43 <---- 5 (0) +[0]: 44 <---- 4 (0) +[0]: 44 <---- 6 (0) +[0]: 45 <---- 7 (0) +[0]: 45 <---- 9 (0) +[0]: 46 <---- 8 (0) +[0]: 46 <---- 10 (0) +[0]: 47 <---- 3 (0) +[0]: 47 <---- 7 (0) +[0]: 48 <---- 4 (0) +[0]: 48 <---- 8 (0) +[0]: 49 <---- 5 (0) +[0]: 49 <---- 9 (0) +[0]: 50 <---- 6 (0) +[0]: 50 <---- 10 (0) +[0]: 51 <---- 11 (0) +[0]: 51 <---- 13 (0) +[0]: 52 <---- 12 (0) +[0]: 52 <---- 14 (0) +[0]: 53 <---- 15 (0) +[0]: 53 <---- 17 (0) +[0]: 54 <---- 16 (0) +[0]: 54 <---- 18 (0) +[0]: 55 <---- 11 (0) +[0]: 55 <---- 15 (0) +[0]: 56 <---- 12 (0) +[0]: 56 <---- 16 (0) +[0]: 57 <---- 13 (0) +[0]: 57 <---- 17 (0) +[0]: 58 <---- 14 (0) +[0]: 58 <---- 18 (0) +[0]: 59 <---- 11 (0) +[0]: 59 <---- 12 (0) +[0]: 60 <---- 13 (0) +[0]: 60 <---- 14 (0) +[0]: 61 <---- 15 (0) +[0]: 61 <---- 16 (0) +[0]: 62 <---- 17 (0) +[0]: 62 <---- 18 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 3) dim 3 offset 0 0. 0. 0. + ( 4) dim 3 offset 3 2. 0. 0. + ( 5) dim 3 offset 6 0. 1. 0. + ( 6) dim 3 offset 9 2. 1. 0. + ( 7) dim 3 offset 12 0. 0. 1. + ( 8) dim 3 offset 15 2. 0. 1. + ( 9) dim 3 offset 18 0. 1. 1. + ( 10) dim 3 offset 21 2. 1. 1. + ( 11) dim 3 offset 24 1. 0. 0. + ( 12) dim 3 offset 27 1. 0. 0. + ( 13) dim 3 offset 30 1. 1. 0. + ( 14) dim 3 offset 33 1. 1. 0. + ( 15) dim 3 offset 36 1. 0. 1. + ( 16) dim 3 offset 39 1. 0. 1. + ( 17) dim 3 offset 42 1. 1. 1. + ( 18) dim 3 offset 45 1. 1. 1. +Labels: +Label 'celltype': +[0]: 0 (7) +[0]: 1 (7) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 59 (2) +[0]: 60 (2) +[0]: 61 (2) +[0]: 62 (2) +[0]: 19 (4) +[0]: 20 (4) +[0]: 21 (4) +[0]: 22 (4) +[0]: 23 (4) +[0]: 24 (4) +[0]: 25 (4) +[0]: 26 (4) +[0]: 27 (4) +[0]: 28 (4) +[0]: 29 (4) +[0]: 30 (4) +[0]: 2 (10) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[0]: 45 (1) +[0]: 46 (1) +[0]: 47 (1) +[0]: 48 (1) +[0]: 49 (1) +[0]: 50 (1) +[0]: 51 (1) +[0]: 52 (1) +[0]: 53 (1) +[0]: 54 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 31 (5) +[0]: 32 (5) +[0]: 33 (5) +[0]: 34 (5) +Label 'marker': +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[0]: 45 (1) +[0]: 46 (1) +[0]: 47 (1) +[0]: 48 (1) +[0]: 49 (1) +[0]: 50 (1) +[0]: 51 (1) +[0]: 52 (1) +[0]: 53 (1) +[0]: 54 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 59 (1) +[0]: 60 (1) +[0]: 61 (1) +[0]: 62 (1) +Label 'Face Sets': +[0]: 19 (6) +[0]: 20 (5) +[0]: 21 (3) +[0]: 27 (3) +[0]: 22 (4) +[0]: 28 (4) +[0]: 23 (1) +[0]: 29 (1) +[0]: 24 (2) +[0]: 30 (2) +Label 'fault': +[0]: 2 (2) +[0]: 25 (2) +[0]: 26 (2) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 51 (1) +[0]: 52 (1) +[0]: 53 (1) +[0]: 54 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 59 (0) +[0]: 60 (0) +[0]: 61 (0) +[0]: 62 (0) +[0]: 0 (103) +[0]: 27 (102) +[0]: 28 (102) +[0]: 29 (102) +[0]: 30 (102) +[0]: 1 (-103) +[0]: 21 (-102) +[0]: 22 (-102) +[0]: 23 (-102) +[0]: 24 (-102) +[0]: 35 (101) +[0]: 36 (101) +[0]: 37 (101) +[0]: 38 (101) +[0]: 39 (-101) +[0]: 40 (-101) +[0]: 41 (-101) +[0]: 42 (-101) diff --git a/src/dm/impls/plex/tests/output/ex69_hex_1.out b/src/dm/impls/plex/tests/output/ex69_hex_1.out new file mode 100644 index 00000000000..cf842673b25 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_hex_1.out @@ -0,0 +1,1168 @@ +DM Object: box 2 MPI processes + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 3 +[0]: 1 ----> 15 +[0]: 1 ----> 19 +[0]: 1 ----> 23 +[0]: 2 ----> 15 +[0]: 2 ----> 20 +[0]: 2 ----> 24 +[0]: 3 ----> 16 +[0]: 3 ----> 19 +[0]: 3 ----> 25 +[0]: 4 ----> 16 +[0]: 4 ----> 20 +[0]: 4 ----> 26 +[0]: 5 ----> 17 +[0]: 5 ----> 21 +[0]: 5 ----> 23 +[0]: 6 ----> 17 +[0]: 6 ----> 22 +[0]: 6 ----> 24 +[0]: 7 ----> 18 +[0]: 7 ----> 21 +[0]: 7 ----> 25 +[0]: 8 ----> 18 +[0]: 8 ----> 22 +[0]: 8 ----> 26 +[0]: 9 ----> 0 +[0]: 10 ----> 0 +[0]: 11 ----> 0 +[0]: 12 ----> 0 +[0]: 13 ----> 0 +[0]: 14 ----> 0 +[0]: 15 ----> 11 +[0]: 15 ----> 13 +[0]: 16 ----> 12 +[0]: 16 ----> 13 +[0]: 17 ----> 11 +[0]: 17 ----> 14 +[0]: 18 ----> 12 +[0]: 18 ----> 14 +[0]: 19 ----> 9 +[0]: 19 ----> 13 +[0]: 20 ----> 10 +[0]: 20 ----> 13 +[0]: 21 ----> 9 +[0]: 21 ----> 14 +[0]: 22 ----> 10 +[0]: 22 ----> 14 +[0]: 23 ----> 9 +[0]: 23 ----> 11 +[0]: 24 ----> 10 +[0]: 24 ----> 11 +[0]: 25 ----> 9 +[0]: 25 ----> 12 +[0]: 26 ----> 10 +[0]: 26 ----> 12 +[1] Max support size: 3 +[1]: 1 ----> 15 +[1]: 1 ----> 19 +[1]: 1 ----> 23 +[1]: 2 ----> 15 +[1]: 2 ----> 20 +[1]: 2 ----> 24 +[1]: 3 ----> 16 +[1]: 3 ----> 19 +[1]: 3 ----> 25 +[1]: 4 ----> 16 +[1]: 4 ----> 20 +[1]: 4 ----> 26 +[1]: 5 ----> 17 +[1]: 5 ----> 21 +[1]: 5 ----> 23 +[1]: 6 ----> 17 +[1]: 6 ----> 22 +[1]: 6 ----> 24 +[1]: 7 ----> 18 +[1]: 7 ----> 21 +[1]: 7 ----> 25 +[1]: 8 ----> 18 +[1]: 8 ----> 22 +[1]: 8 ----> 26 +[1]: 9 ----> 0 +[1]: 10 ----> 0 +[1]: 11 ----> 0 +[1]: 12 ----> 0 +[1]: 13 ----> 0 +[1]: 14 ----> 0 +[1]: 15 ----> 11 +[1]: 15 ----> 13 +[1]: 16 ----> 12 +[1]: 16 ----> 13 +[1]: 17 ----> 11 +[1]: 17 ----> 14 +[1]: 18 ----> 12 +[1]: 18 ----> 14 +[1]: 19 ----> 9 +[1]: 19 ----> 13 +[1]: 20 ----> 10 +[1]: 20 ----> 13 +[1]: 21 ----> 9 +[1]: 21 ----> 14 +[1]: 22 ----> 10 +[1]: 22 ----> 14 +[1]: 23 ----> 9 +[1]: 23 ----> 11 +[1]: 24 ----> 10 +[1]: 24 ----> 11 +[1]: 25 ----> 9 +[1]: 25 ----> 12 +[1]: 26 ----> 10 +[1]: 26 ----> 12 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 13 (-2) +[0]: 0 <---- 14 (0) +[0]: 0 <---- 11 (0) +[0]: 0 <---- 12 (-3) +[0]: 0 <---- 10 (0) +[0]: 0 <---- 9 (-2) +[0]: 9 <---- 19 (0) +[0]: 9 <---- 25 (0) +[0]: 9 <---- 21 (-1) +[0]: 9 <---- 23 (-1) +[0]: 10 <---- 20 (0) +[0]: 10 <---- 26 (0) +[0]: 10 <---- 22 (-1) +[0]: 10 <---- 24 (-1) +[0]: 11 <---- 15 (0) +[0]: 11 <---- 24 (0) +[0]: 11 <---- 17 (-1) +[0]: 11 <---- 23 (-1) +[0]: 12 <---- 16 (0) +[0]: 12 <---- 26 (0) +[0]: 12 <---- 18 (-1) +[0]: 12 <---- 25 (-1) +[0]: 13 <---- 15 (0) +[0]: 13 <---- 20 (0) +[0]: 13 <---- 16 (-1) +[0]: 13 <---- 19 (-1) +[0]: 14 <---- 17 (0) +[0]: 14 <---- 22 (0) +[0]: 14 <---- 18 (-1) +[0]: 14 <---- 21 (-1) +[0]: 15 <---- 1 (0) +[0]: 15 <---- 2 (0) +[0]: 16 <---- 3 (0) +[0]: 16 <---- 4 (0) +[0]: 17 <---- 5 (0) +[0]: 17 <---- 6 (0) +[0]: 18 <---- 7 (0) +[0]: 18 <---- 8 (0) +[0]: 19 <---- 1 (0) +[0]: 19 <---- 3 (0) +[0]: 20 <---- 2 (0) +[0]: 20 <---- 4 (0) +[0]: 21 <---- 5 (0) +[0]: 21 <---- 7 (0) +[0]: 22 <---- 6 (0) +[0]: 22 <---- 8 (0) +[0]: 23 <---- 1 (0) +[0]: 23 <---- 5 (0) +[0]: 24 <---- 2 (0) +[0]: 24 <---- 6 (0) +[0]: 25 <---- 3 (0) +[0]: 25 <---- 7 (0) +[0]: 26 <---- 4 (0) +[0]: 26 <---- 8 (0) +[1] Max cone size: 6 +[1]: 0 <---- 13 (-2) +[1]: 0 <---- 14 (0) +[1]: 0 <---- 11 (0) +[1]: 0 <---- 12 (-3) +[1]: 0 <---- 10 (0) +[1]: 0 <---- 9 (-2) +[1]: 9 <---- 19 (0) +[1]: 9 <---- 25 (0) +[1]: 9 <---- 21 (-1) +[1]: 9 <---- 23 (-1) +[1]: 10 <---- 20 (0) +[1]: 10 <---- 26 (0) +[1]: 10 <---- 22 (-1) +[1]: 10 <---- 24 (-1) +[1]: 11 <---- 15 (0) +[1]: 11 <---- 24 (0) +[1]: 11 <---- 17 (-1) +[1]: 11 <---- 23 (-1) +[1]: 12 <---- 16 (0) +[1]: 12 <---- 26 (0) +[1]: 12 <---- 18 (-1) +[1]: 12 <---- 25 (-1) +[1]: 13 <---- 15 (0) +[1]: 13 <---- 20 (0) +[1]: 13 <---- 16 (-1) +[1]: 13 <---- 19 (-1) +[1]: 14 <---- 17 (0) +[1]: 14 <---- 22 (0) +[1]: 14 <---- 18 (-1) +[1]: 14 <---- 21 (-1) +[1]: 15 <---- 1 (0) +[1]: 15 <---- 2 (0) +[1]: 16 <---- 3 (0) +[1]: 16 <---- 4 (0) +[1]: 17 <---- 5 (0) +[1]: 17 <---- 6 (0) +[1]: 18 <---- 7 (0) +[1]: 18 <---- 8 (0) +[1]: 19 <---- 1 (0) +[1]: 19 <---- 3 (0) +[1]: 20 <---- 2 (0) +[1]: 20 <---- 4 (0) +[1]: 21 <---- 5 (0) +[1]: 21 <---- 7 (0) +[1]: 22 <---- 6 (0) +[1]: 22 <---- 8 (0) +[1]: 23 <---- 1 (0) +[1]: 23 <---- 5 (0) +[1]: 24 <---- 2 (0) +[1]: 24 <---- 6 (0) +[1]: 25 <---- 3 (0) +[1]: 25 <---- 7 (0) +[1]: 26 <---- 4 (0) +[1]: 26 <---- 8 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 1) dim 3 offset 0 0. 0. 0. + ( 2) dim 3 offset 3 1. 0. 0. + ( 3) dim 3 offset 6 0. 1. 0. + ( 4) dim 3 offset 9 1. 1. 0. + ( 5) dim 3 offset 12 0. 0. 1. + ( 6) dim 3 offset 15 1. 0. 1. + ( 7) dim 3 offset 18 0. 1. 1. + ( 8) dim 3 offset 21 1. 1. 1. +Process 1: + ( 1) dim 3 offset 0 1. 0. 0. + ( 2) dim 3 offset 3 2. 0. 0. + ( 3) dim 3 offset 6 1. 1. 0. + ( 4) dim 3 offset 9 2. 1. 0. + ( 5) dim 3 offset 12 1. 0. 1. + ( 6) dim 3 offset 15 2. 0. 1. + ( 7) dim 3 offset 18 1. 1. 1. + ( 8) dim 3 offset 21 2. 1. 1. +Labels: +Label 'marker': +[0]: 1 (1) +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[1]: 1 (1) +[1]: 2 (1) +[1]: 3 (1) +[1]: 4 (1) +[1]: 5 (1) +[1]: 6 (1) +[1]: 7 (1) +[1]: 8 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 13 (1) +[1]: 14 (1) +[1]: 15 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 21 (1) +[1]: 22 (1) +[1]: 23 (1) +[1]: 24 (1) +[1]: 25 (1) +[1]: 26 (1) +Label 'Face Sets': +[0]: 13 (1) +[0]: 14 (2) +[0]: 11 (3) +[0]: 12 (4) +[0]: 9 (6) +[1]: 13 (1) +[1]: 14 (2) +[1]: 11 (3) +[1]: 12 (4) +[1]: 10 (5) +Label 'fault': +[0]: 2 (0) +[0]: 4 (0) +[0]: 6 (0) +[0]: 8 (0) +[0]: 20 (1) +[0]: 22 (1) +[0]: 24 (1) +[0]: 26 (1) +[0]: 10 (2) +[0]: 15 (101) +[0]: 16 (101) +[0]: 17 (101) +[0]: 18 (101) +[0]: 11 (102) +[0]: 12 (102) +[0]: 13 (102) +[0]: 14 (102) +[0]: 0 (103) +[1]: 0 (-103) +[1]: 11 (-102) +[1]: 12 (-102) +[1]: 13 (-102) +[1]: 14 (-102) +[1]: 15 (-101) +[1]: 16 (-101) +[1]: 17 (-101) +[1]: 18 (-101) +[1]: 1 (0) +[1]: 3 (0) +[1]: 5 (0) +[1]: 7 (0) +[1]: 19 (1) +[1]: 21 (1) +[1]: 23 (1) +[1]: 25 (1) +[1]: 9 (2) +Label 'celltype': +[0]: 1 (0) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 9 (4) +[0]: 10 (4) +[0]: 11 (4) +[0]: 12 (4) +[0]: 13 (4) +[0]: 14 (4) +[0]: 0 (7) +[1]: 1 (0) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 15 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 21 (1) +[1]: 22 (1) +[1]: 23 (1) +[1]: 24 (1) +[1]: 25 (1) +[1]: 26 (1) +[1]: 9 (4) +[1]: 10 (4) +[1]: 11 (4) +[1]: 12 (4) +[1]: 13 (4) +[1]: 14 (4) +[1]: 0 (7) +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=27, leaves=9, remote ranks=1 + [0] 2 <- (1,1) + [0] 4 <- (1,3) + [0] 6 <- (1,5) + [0] 8 <- (1,7) + [0] 10 <- (1,9) + [0] 20 <- (1,19) + [0] 22 <- (1,21) + [0] 24 <- (1,23) + [0] 26 <- (1,25) + [1] Number of roots=27, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 9 edges + [0] 2 <- 1 + [0] 4 <- 3 + [0] 6 <- 5 + [0] 8 <- 7 + [0] 10 <- 9 + [0] 20 <- 19 + [0] 22 <- 21 + [0] 24 <- 23 + [0] 26 <- 25 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order +DM Object: box 2 MPI processes + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 4 +[0]: 2 ----> 25 +[0]: 2 ----> 29 +[0]: 2 ----> 31 +[0]: 3 ----> 26 +[0]: 3 ----> 29 +[0]: 3 ----> 32 +[0]: 4 ----> 27 +[0]: 4 ----> 30 +[0]: 4 ----> 31 +[0]: 5 ----> 28 +[0]: 5 ----> 30 +[0]: 5 ----> 32 +[0]: 6 ----> 33 +[0]: 6 ----> 37 +[0]: 6 ----> 41 +[0]: 7 ----> 25 +[0]: 7 ----> 34 +[0]: 7 ----> 38 +[0]: 7 ----> 41 +[0]: 8 ----> 33 +[0]: 8 ----> 39 +[0]: 8 ----> 42 +[0]: 9 ----> 26 +[0]: 9 ----> 34 +[0]: 9 ----> 40 +[0]: 9 ----> 42 +[0]: 10 ----> 35 +[0]: 10 ----> 37 +[0]: 10 ----> 43 +[0]: 11 ----> 27 +[0]: 11 ----> 36 +[0]: 11 ----> 38 +[0]: 11 ----> 43 +[0]: 12 ----> 35 +[0]: 12 ----> 39 +[0]: 12 ----> 44 +[0]: 13 ----> 28 +[0]: 13 ----> 36 +[0]: 13 ----> 40 +[0]: 13 ----> 44 +[0]: 14 ----> 0 +[0]: 15 ----> 1 +[0]: 16 ----> 0 +[0]: 16 ----> 1 +[0]: 17 ----> 0 +[0]: 18 ----> 0 +[0]: 19 ----> 0 +[0]: 20 ----> 0 +[0]: 21 ----> 1 +[0]: 22 ----> 1 +[0]: 23 ----> 1 +[0]: 24 ----> 1 +[0]: 25 ----> 17 +[0]: 25 ----> 19 +[0]: 26 ----> 18 +[0]: 26 ----> 19 +[0]: 27 ----> 17 +[0]: 27 ----> 20 +[0]: 28 ----> 18 +[0]: 28 ----> 20 +[0]: 29 ----> 14 +[0]: 29 ----> 19 +[0]: 30 ----> 14 +[0]: 30 ----> 20 +[0]: 31 ----> 14 +[0]: 31 ----> 17 +[0]: 32 ----> 14 +[0]: 32 ----> 18 +[0]: 33 ----> 15 +[0]: 33 ----> 21 +[0]: 34 ----> 16 +[0]: 34 ----> 19 +[0]: 34 ----> 21 +[0]: 35 ----> 15 +[0]: 35 ----> 22 +[0]: 36 ----> 16 +[0]: 36 ----> 20 +[0]: 36 ----> 22 +[0]: 37 ----> 15 +[0]: 37 ----> 23 +[0]: 38 ----> 16 +[0]: 38 ----> 17 +[0]: 38 ----> 23 +[0]: 39 ----> 15 +[0]: 39 ----> 24 +[0]: 40 ----> 16 +[0]: 40 ----> 18 +[0]: 40 ----> 24 +[0]: 41 ----> 21 +[0]: 41 ----> 23 +[0]: 42 ----> 21 +[0]: 42 ----> 24 +[0]: 43 ----> 22 +[0]: 43 ----> 23 +[0]: 44 ----> 22 +[0]: 44 ----> 24 +[1] Max support size: 4 +[1]: 2 ----> 25 +[1]: 2 ----> 33 +[1]: 2 ----> 37 +[1]: 2 ----> 41 +[1]: 3 ----> 34 +[1]: 3 ----> 38 +[1]: 3 ----> 41 +[1]: 4 ----> 26 +[1]: 4 ----> 33 +[1]: 4 ----> 39 +[1]: 4 ----> 42 +[1]: 5 ----> 34 +[1]: 5 ----> 40 +[1]: 5 ----> 42 +[1]: 6 ----> 27 +[1]: 6 ----> 35 +[1]: 6 ----> 37 +[1]: 6 ----> 43 +[1]: 7 ----> 36 +[1]: 7 ----> 38 +[1]: 7 ----> 43 +[1]: 8 ----> 28 +[1]: 8 ----> 35 +[1]: 8 ----> 39 +[1]: 8 ----> 44 +[1]: 9 ----> 36 +[1]: 9 ----> 40 +[1]: 9 ----> 44 +[1]: 10 ----> 25 +[1]: 10 ----> 29 +[1]: 10 ----> 31 +[1]: 11 ----> 26 +[1]: 11 ----> 29 +[1]: 11 ----> 32 +[1]: 12 ----> 27 +[1]: 12 ----> 30 +[1]: 12 ----> 31 +[1]: 13 ----> 28 +[1]: 13 ----> 30 +[1]: 13 ----> 32 +[1]: 14 ----> 0 +[1]: 14 ----> 1 +[1]: 15 ----> 1 +[1]: 16 ----> 0 +[1]: 17 ----> 0 +[1]: 18 ----> 0 +[1]: 19 ----> 0 +[1]: 20 ----> 0 +[1]: 21 ----> 1 +[1]: 22 ----> 1 +[1]: 23 ----> 1 +[1]: 24 ----> 1 +[1]: 25 ----> 17 +[1]: 25 ----> 19 +[1]: 26 ----> 18 +[1]: 26 ----> 19 +[1]: 27 ----> 17 +[1]: 27 ----> 20 +[1]: 28 ----> 18 +[1]: 28 ----> 20 +[1]: 29 ----> 16 +[1]: 29 ----> 19 +[1]: 30 ----> 16 +[1]: 30 ----> 20 +[1]: 31 ----> 16 +[1]: 31 ----> 17 +[1]: 32 ----> 16 +[1]: 32 ----> 18 +[1]: 33 ----> 14 +[1]: 33 ----> 19 +[1]: 33 ----> 21 +[1]: 34 ----> 15 +[1]: 34 ----> 21 +[1]: 35 ----> 14 +[1]: 35 ----> 20 +[1]: 35 ----> 22 +[1]: 36 ----> 15 +[1]: 36 ----> 22 +[1]: 37 ----> 14 +[1]: 37 ----> 17 +[1]: 37 ----> 23 +[1]: 38 ----> 15 +[1]: 38 ----> 23 +[1]: 39 ----> 14 +[1]: 39 ----> 18 +[1]: 39 ----> 24 +[1]: 40 ----> 15 +[1]: 40 ----> 24 +[1]: 41 ----> 21 +[1]: 41 ----> 23 +[1]: 42 ----> 21 +[1]: 42 ----> 24 +[1]: 43 ----> 22 +[1]: 43 ----> 23 +[1]: 44 ----> 22 +[1]: 44 ----> 24 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 19 (-2) +[0]: 0 <---- 20 (0) +[0]: 0 <---- 17 (0) +[0]: 0 <---- 18 (-3) +[0]: 0 <---- 16 (0) +[0]: 0 <---- 14 (-2) +[0]: 1 <---- 15 (0) +[0]: 1 <---- 16 (0) +[0]: 1 <---- 21 (0) +[0]: 1 <---- 24 (0) +[0]: 1 <---- 22 (-1) +[0]: 1 <---- 23 (-1) +[0]: 14 <---- 29 (0) +[0]: 14 <---- 32 (0) +[0]: 14 <---- 30 (-1) +[0]: 14 <---- 31 (-1) +[0]: 15 <---- 33 (0) +[0]: 15 <---- 39 (0) +[0]: 15 <---- 35 (-1) +[0]: 15 <---- 37 (-1) +[0]: 16 <---- 34 (0) +[0]: 16 <---- 40 (0) +[0]: 16 <---- 36 (-1) +[0]: 16 <---- 38 (-1) +[0]: 17 <---- 25 (0) +[0]: 17 <---- 38 (0) +[0]: 17 <---- 27 (-1) +[0]: 17 <---- 31 (-1) +[0]: 18 <---- 26 (0) +[0]: 18 <---- 40 (0) +[0]: 18 <---- 28 (-1) +[0]: 18 <---- 32 (-1) +[0]: 19 <---- 25 (0) +[0]: 19 <---- 34 (0) +[0]: 19 <---- 26 (-1) +[0]: 19 <---- 29 (-1) +[0]: 20 <---- 27 (0) +[0]: 20 <---- 36 (0) +[0]: 20 <---- 28 (-1) +[0]: 20 <---- 30 (-1) +[0]: 21 <---- 33 (0) +[0]: 21 <---- 34 (0) +[0]: 21 <---- 41 (0) +[0]: 21 <---- 42 (0) +[0]: 22 <---- 35 (0) +[0]: 22 <---- 36 (0) +[0]: 22 <---- 43 (0) +[0]: 22 <---- 44 (0) +[0]: 23 <---- 37 (0) +[0]: 23 <---- 38 (0) +[0]: 23 <---- 41 (0) +[0]: 23 <---- 43 (0) +[0]: 24 <---- 39 (0) +[0]: 24 <---- 40 (0) +[0]: 24 <---- 42 (0) +[0]: 24 <---- 44 (0) +[0]: 25 <---- 2 (0) +[0]: 25 <---- 7 (0) +[0]: 26 <---- 3 (0) +[0]: 26 <---- 9 (0) +[0]: 27 <---- 4 (0) +[0]: 27 <---- 11 (0) +[0]: 28 <---- 5 (0) +[0]: 28 <---- 13 (0) +[0]: 29 <---- 2 (0) +[0]: 29 <---- 3 (0) +[0]: 30 <---- 4 (0) +[0]: 30 <---- 5 (0) +[0]: 31 <---- 2 (0) +[0]: 31 <---- 4 (0) +[0]: 32 <---- 3 (0) +[0]: 32 <---- 5 (0) +[0]: 33 <---- 6 (0) +[0]: 33 <---- 8 (0) +[0]: 34 <---- 7 (0) +[0]: 34 <---- 9 (0) +[0]: 35 <---- 10 (0) +[0]: 35 <---- 12 (0) +[0]: 36 <---- 11 (0) +[0]: 36 <---- 13 (0) +[0]: 37 <---- 6 (0) +[0]: 37 <---- 10 (0) +[0]: 38 <---- 7 (0) +[0]: 38 <---- 11 (0) +[0]: 39 <---- 8 (0) +[0]: 39 <---- 12 (0) +[0]: 40 <---- 9 (0) +[0]: 40 <---- 13 (0) +[0]: 41 <---- 6 (0) +[0]: 41 <---- 7 (0) +[0]: 42 <---- 8 (0) +[0]: 42 <---- 9 (0) +[0]: 43 <---- 10 (0) +[0]: 43 <---- 11 (0) +[0]: 44 <---- 12 (0) +[0]: 44 <---- 13 (0) +[1] Max cone size: 6 +[1]: 0 <---- 19 (-2) +[1]: 0 <---- 20 (0) +[1]: 0 <---- 17 (0) +[1]: 0 <---- 18 (-3) +[1]: 0 <---- 16 (0) +[1]: 0 <---- 14 (-2) +[1]: 1 <---- 14 (0) +[1]: 1 <---- 15 (0) +[1]: 1 <---- 21 (0) +[1]: 1 <---- 24 (0) +[1]: 1 <---- 22 (-1) +[1]: 1 <---- 23 (-1) +[1]: 14 <---- 33 (0) +[1]: 14 <---- 39 (0) +[1]: 14 <---- 35 (-1) +[1]: 14 <---- 37 (-1) +[1]: 15 <---- 34 (0) +[1]: 15 <---- 40 (0) +[1]: 15 <---- 36 (-1) +[1]: 15 <---- 38 (-1) +[1]: 16 <---- 29 (0) +[1]: 16 <---- 32 (0) +[1]: 16 <---- 30 (-1) +[1]: 16 <---- 31 (-1) +[1]: 17 <---- 25 (0) +[1]: 17 <---- 31 (0) +[1]: 17 <---- 27 (-1) +[1]: 17 <---- 37 (-1) +[1]: 18 <---- 26 (0) +[1]: 18 <---- 32 (0) +[1]: 18 <---- 28 (-1) +[1]: 18 <---- 39 (-1) +[1]: 19 <---- 25 (0) +[1]: 19 <---- 29 (0) +[1]: 19 <---- 26 (-1) +[1]: 19 <---- 33 (-1) +[1]: 20 <---- 27 (0) +[1]: 20 <---- 30 (0) +[1]: 20 <---- 28 (-1) +[1]: 20 <---- 35 (-1) +[1]: 21 <---- 33 (0) +[1]: 21 <---- 34 (0) +[1]: 21 <---- 41 (0) +[1]: 21 <---- 42 (0) +[1]: 22 <---- 35 (0) +[1]: 22 <---- 36 (0) +[1]: 22 <---- 43 (0) +[1]: 22 <---- 44 (0) +[1]: 23 <---- 37 (0) +[1]: 23 <---- 38 (0) +[1]: 23 <---- 41 (0) +[1]: 23 <---- 43 (0) +[1]: 24 <---- 39 (0) +[1]: 24 <---- 40 (0) +[1]: 24 <---- 42 (0) +[1]: 24 <---- 44 (0) +[1]: 25 <---- 2 (0) +[1]: 25 <---- 10 (0) +[1]: 26 <---- 4 (0) +[1]: 26 <---- 11 (0) +[1]: 27 <---- 6 (0) +[1]: 27 <---- 12 (0) +[1]: 28 <---- 8 (0) +[1]: 28 <---- 13 (0) +[1]: 29 <---- 10 (0) +[1]: 29 <---- 11 (0) +[1]: 30 <---- 12 (0) +[1]: 30 <---- 13 (0) +[1]: 31 <---- 10 (0) +[1]: 31 <---- 12 (0) +[1]: 32 <---- 11 (0) +[1]: 32 <---- 13 (0) +[1]: 33 <---- 2 (0) +[1]: 33 <---- 4 (0) +[1]: 34 <---- 3 (0) +[1]: 34 <---- 5 (0) +[1]: 35 <---- 6 (0) +[1]: 35 <---- 8 (0) +[1]: 36 <---- 7 (0) +[1]: 36 <---- 9 (0) +[1]: 37 <---- 2 (0) +[1]: 37 <---- 6 (0) +[1]: 38 <---- 3 (0) +[1]: 38 <---- 7 (0) +[1]: 39 <---- 4 (0) +[1]: 39 <---- 8 (0) +[1]: 40 <---- 5 (0) +[1]: 40 <---- 9 (0) +[1]: 41 <---- 2 (0) +[1]: 41 <---- 3 (0) +[1]: 42 <---- 4 (0) +[1]: 42 <---- 5 (0) +[1]: 43 <---- 6 (0) +[1]: 43 <---- 7 (0) +[1]: 44 <---- 8 (0) +[1]: 44 <---- 9 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 2) dim 3 offset 0 0. 0. 0. + ( 3) dim 3 offset 3 0. 1. 0. + ( 4) dim 3 offset 6 0. 0. 1. + ( 5) dim 3 offset 9 0. 1. 1. + ( 6) dim 3 offset 12 1. 0. 0. + ( 7) dim 3 offset 15 1. 0. 0. + ( 8) dim 3 offset 18 1. 1. 0. + ( 9) dim 3 offset 21 1. 1. 0. + ( 10) dim 3 offset 24 1. 0. 1. + ( 11) dim 3 offset 27 1. 0. 1. + ( 12) dim 3 offset 30 1. 1. 1. + ( 13) dim 3 offset 33 1. 1. 1. +Process 1: + ( 2) dim 3 offset 0 1. 0. 0. + ( 3) dim 3 offset 3 1. 0. 0. + ( 4) dim 3 offset 6 1. 1. 0. + ( 5) dim 3 offset 9 1. 1. 0. + ( 6) dim 3 offset 12 1. 0. 1. + ( 7) dim 3 offset 15 1. 0. 1. + ( 8) dim 3 offset 18 1. 1. 1. + ( 9) dim 3 offset 21 1. 1. 1. + ( 10) dim 3 offset 24 2. 0. 0. + ( 11) dim 3 offset 27 2. 1. 0. + ( 12) dim 3 offset 30 2. 0. 1. + ( 13) dim 3 offset 33 2. 1. 1. +Labels: +Label 'celltype': +[0]: 0 (7) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 41 (2) +[0]: 42 (2) +[0]: 43 (2) +[0]: 44 (2) +[0]: 14 (4) +[0]: 15 (4) +[0]: 16 (4) +[0]: 17 (4) +[0]: 18 (4) +[0]: 19 (4) +[0]: 20 (4) +[0]: 1 (10) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 21 (5) +[0]: 22 (5) +[0]: 23 (5) +[0]: 24 (5) +[1]: 0 (7) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 9 (0) +[1]: 10 (0) +[1]: 11 (0) +[1]: 12 (0) +[1]: 13 (0) +[1]: 41 (2) +[1]: 42 (2) +[1]: 43 (2) +[1]: 44 (2) +[1]: 14 (4) +[1]: 15 (4) +[1]: 16 (4) +[1]: 17 (4) +[1]: 18 (4) +[1]: 19 (4) +[1]: 20 (4) +[1]: 1 (10) +[1]: 25 (1) +[1]: 26 (1) +[1]: 27 (1) +[1]: 28 (1) +[1]: 29 (1) +[1]: 30 (1) +[1]: 31 (1) +[1]: 32 (1) +[1]: 33 (1) +[1]: 34 (1) +[1]: 35 (1) +[1]: 36 (1) +[1]: 37 (1) +[1]: 38 (1) +[1]: 39 (1) +[1]: 40 (1) +[1]: 21 (5) +[1]: 22 (5) +[1]: 23 (5) +[1]: 24 (5) +Label 'marker': +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[1]: 2 (1) +[1]: 3 (1) +[1]: 4 (1) +[1]: 5 (1) +[1]: 6 (1) +[1]: 7 (1) +[1]: 8 (1) +[1]: 9 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 13 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 21 (1) +[1]: 22 (1) +[1]: 23 (1) +[1]: 24 (1) +[1]: 25 (1) +[1]: 26 (1) +[1]: 27 (1) +[1]: 28 (1) +[1]: 29 (1) +[1]: 30 (1) +[1]: 31 (1) +[1]: 32 (1) +[1]: 33 (1) +[1]: 34 (1) +[1]: 35 (1) +[1]: 36 (1) +[1]: 37 (1) +[1]: 38 (1) +[1]: 39 (1) +[1]: 40 (1) +[1]: 41 (1) +[1]: 42 (1) +[1]: 43 (1) +[1]: 44 (1) +Label 'Face Sets': +[0]: 19 (1) +[0]: 20 (2) +[0]: 17 (3) +[0]: 18 (4) +[0]: 14 (6) +[1]: 19 (1) +[1]: 20 (2) +[1]: 17 (3) +[1]: 18 (4) +[1]: 16 (5) +Label 'fault': +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 41 (0) +[0]: 42 (0) +[0]: 43 (0) +[0]: 44 (0) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 1 (2) +[0]: 15 (2) +[0]: 16 (2) +[0]: 25 (101) +[0]: 26 (101) +[0]: 27 (101) +[0]: 28 (101) +[0]: 17 (102) +[0]: 18 (102) +[0]: 19 (102) +[0]: 20 (102) +[0]: 0 (103) +[1]: 0 (-103) +[1]: 17 (-102) +[1]: 18 (-102) +[1]: 19 (-102) +[1]: 20 (-102) +[1]: 25 (-101) +[1]: 26 (-101) +[1]: 27 (-101) +[1]: 28 (-101) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 9 (0) +[1]: 41 (0) +[1]: 42 (0) +[1]: 43 (0) +[1]: 44 (0) +[1]: 21 (1) +[1]: 22 (1) +[1]: 23 (1) +[1]: 24 (1) +[1]: 33 (1) +[1]: 34 (1) +[1]: 35 (1) +[1]: 36 (1) +[1]: 37 (1) +[1]: 38 (1) +[1]: 39 (1) +[1]: 40 (1) +[1]: 1 (2) +[1]: 14 (2) +[1]: 15 (2) +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=45, leaves=27, remote ranks=1 + [0] 1 <- (1,1) + [0] 6 <- (1,2) + [0] 7 <- (1,3) + [0] 8 <- (1,4) + [0] 9 <- (1,5) + [0] 10 <- (1,6) + [0] 11 <- (1,7) + [0] 12 <- (1,8) + [0] 13 <- (1,9) + [0] 15 <- (1,14) + [0] 16 <- (1,15) + [0] 21 <- (1,21) + [0] 22 <- (1,22) + [0] 23 <- (1,23) + [0] 24 <- (1,24) + [0] 33 <- (1,33) + [0] 34 <- (1,34) + [0] 35 <- (1,35) + [0] 36 <- (1,36) + [0] 37 <- (1,37) + [0] 38 <- (1,38) + [0] 39 <- (1,39) + [0] 40 <- (1,40) + [0] 41 <- (1,41) + [0] 42 <- (1,42) + [0] 43 <- (1,43) + [0] 44 <- (1,44) + [1] Number of roots=45, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 27 edges + [0] 1 <- 1 + [0] 6 <- 2 + [0] 7 <- 3 + [0] 8 <- 4 + [0] 9 <- 5 + [0] 10 <- 6 + [0] 11 <- 7 + [0] 12 <- 8 + [0] 13 <- 9 + [0] 15 <- 14 + [0] 16 <- 15 + [0] 21 <- 21 + [0] 22 <- 22 + [0] 23 <- 23 + [0] 24 <- 24 + [0] 33 <- 33 + [0] 34 <- 34 + [0] 35 <- 35 + [0] 36 <- 36 + [0] 37 <- 37 + [0] 38 <- 38 + [0] 39 <- 39 + [0] 40 <- 40 + [0] 41 <- 41 + [0] 42 <- 42 + [0] 43 <- 43 + [0] 44 <- 44 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order diff --git a/src/dm/impls/plex/tests/output/ex69_hex_2.out b/src/dm/impls/plex/tests/output/ex69_hex_2.out new file mode 100644 index 00000000000..3203a398059 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_hex_2.out @@ -0,0 +1,2441 @@ +[0]BT for serial flipped cells: +0 0 +[1]BT for serial flipped cells: +0 0 +[2]BT for serial flipped cells: +0 0 +[3]BT for serial flipped cells: +0 0 +[1]: component 0, Found representative leaf 7 (face 21) connecting to face 19 on (3, 0) with orientation 1 +Proc 0 Comp 0: +Proc 1 Comp 0: + edge (3, 0) (FALSE): +Proc 2 Comp 0: +Proc 3 Comp 0: +Flipping Proc+Comp 3: +[0]BT for parallel flipped cells: +0 0 +[1]BT for parallel flipped cells: +0 0 +[2]BT for parallel flipped cells: +0 0 +[3]BT for parallel flipped cells: +0 1 +[2]Flipping cell 10 through overlap +[3]Flipping cell 9 and sending to overlap +DM Object: box 4 MPI processes + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 3 +[0]: 1 ----> 15 +[0]: 1 ----> 19 +[0]: 1 ----> 23 +[0]: 2 ----> 15 +[0]: 2 ----> 20 +[0]: 2 ----> 24 +[0]: 3 ----> 16 +[0]: 3 ----> 19 +[0]: 3 ----> 25 +[0]: 4 ----> 16 +[0]: 4 ----> 20 +[0]: 4 ----> 26 +[0]: 5 ----> 17 +[0]: 5 ----> 21 +[0]: 5 ----> 23 +[0]: 6 ----> 17 +[0]: 6 ----> 22 +[0]: 6 ----> 24 +[0]: 7 ----> 18 +[0]: 7 ----> 21 +[0]: 7 ----> 25 +[0]: 8 ----> 18 +[0]: 8 ----> 22 +[0]: 8 ----> 26 +[0]: 9 ----> 0 +[0]: 10 ----> 0 +[0]: 11 ----> 0 +[0]: 12 ----> 0 +[0]: 13 ----> 0 +[0]: 14 ----> 0 +[0]: 15 ----> 11 +[0]: 15 ----> 13 +[0]: 16 ----> 12 +[0]: 16 ----> 13 +[0]: 17 ----> 11 +[0]: 17 ----> 14 +[0]: 18 ----> 12 +[0]: 18 ----> 14 +[0]: 19 ----> 9 +[0]: 19 ----> 13 +[0]: 20 ----> 10 +[0]: 20 ----> 13 +[0]: 21 ----> 9 +[0]: 21 ----> 14 +[0]: 22 ----> 10 +[0]: 22 ----> 14 +[0]: 23 ----> 9 +[0]: 23 ----> 11 +[0]: 24 ----> 10 +[0]: 24 ----> 11 +[0]: 25 ----> 9 +[0]: 25 ----> 12 +[0]: 26 ----> 10 +[0]: 26 ----> 12 +[1] Max support size: 3 +[1]: 1 ----> 15 +[1]: 1 ----> 19 +[1]: 1 ----> 23 +[1]: 2 ----> 15 +[1]: 2 ----> 20 +[1]: 2 ----> 24 +[1]: 3 ----> 16 +[1]: 3 ----> 19 +[1]: 3 ----> 25 +[1]: 4 ----> 16 +[1]: 4 ----> 20 +[1]: 4 ----> 26 +[1]: 5 ----> 17 +[1]: 5 ----> 21 +[1]: 5 ----> 23 +[1]: 6 ----> 17 +[1]: 6 ----> 22 +[1]: 6 ----> 24 +[1]: 7 ----> 18 +[1]: 7 ----> 21 +[1]: 7 ----> 25 +[1]: 8 ----> 18 +[1]: 8 ----> 22 +[1]: 8 ----> 26 +[1]: 9 ----> 0 +[1]: 10 ----> 0 +[1]: 11 ----> 0 +[1]: 12 ----> 0 +[1]: 13 ----> 0 +[1]: 14 ----> 0 +[1]: 15 ----> 11 +[1]: 15 ----> 13 +[1]: 16 ----> 12 +[1]: 16 ----> 13 +[1]: 17 ----> 11 +[1]: 17 ----> 14 +[1]: 18 ----> 12 +[1]: 18 ----> 14 +[1]: 19 ----> 9 +[1]: 19 ----> 13 +[1]: 20 ----> 10 +[1]: 20 ----> 13 +[1]: 21 ----> 9 +[1]: 21 ----> 14 +[1]: 22 ----> 10 +[1]: 22 ----> 14 +[1]: 23 ----> 9 +[1]: 23 ----> 11 +[1]: 24 ----> 10 +[1]: 24 ----> 11 +[1]: 25 ----> 9 +[1]: 25 ----> 12 +[1]: 26 ----> 10 +[1]: 26 ----> 12 +[2] Max support size: 3 +[2]: 1 ----> 15 +[2]: 1 ----> 19 +[2]: 1 ----> 23 +[2]: 2 ----> 15 +[2]: 2 ----> 20 +[2]: 2 ----> 24 +[2]: 3 ----> 16 +[2]: 3 ----> 19 +[2]: 3 ----> 25 +[2]: 4 ----> 16 +[2]: 4 ----> 20 +[2]: 4 ----> 26 +[2]: 5 ----> 17 +[2]: 5 ----> 21 +[2]: 5 ----> 23 +[2]: 6 ----> 17 +[2]: 6 ----> 22 +[2]: 6 ----> 24 +[2]: 7 ----> 18 +[2]: 7 ----> 21 +[2]: 7 ----> 25 +[2]: 8 ----> 18 +[2]: 8 ----> 22 +[2]: 8 ----> 26 +[2]: 9 ----> 0 +[2]: 10 ----> 0 +[2]: 11 ----> 0 +[2]: 12 ----> 0 +[2]: 13 ----> 0 +[2]: 14 ----> 0 +[2]: 15 ----> 11 +[2]: 15 ----> 13 +[2]: 16 ----> 12 +[2]: 16 ----> 13 +[2]: 17 ----> 11 +[2]: 17 ----> 14 +[2]: 18 ----> 12 +[2]: 18 ----> 14 +[2]: 19 ----> 9 +[2]: 19 ----> 13 +[2]: 20 ----> 10 +[2]: 20 ----> 13 +[2]: 21 ----> 9 +[2]: 21 ----> 14 +[2]: 22 ----> 10 +[2]: 22 ----> 14 +[2]: 23 ----> 9 +[2]: 23 ----> 11 +[2]: 24 ----> 10 +[2]: 24 ----> 11 +[2]: 25 ----> 9 +[2]: 25 ----> 12 +[2]: 26 ----> 10 +[2]: 26 ----> 12 +[3] Max support size: 3 +[3]: 1 ----> 15 +[3]: 1 ----> 19 +[3]: 1 ----> 23 +[3]: 2 ----> 15 +[3]: 2 ----> 20 +[3]: 2 ----> 24 +[3]: 3 ----> 16 +[3]: 3 ----> 19 +[3]: 3 ----> 25 +[3]: 4 ----> 16 +[3]: 4 ----> 20 +[3]: 4 ----> 26 +[3]: 5 ----> 17 +[3]: 5 ----> 21 +[3]: 5 ----> 23 +[3]: 6 ----> 17 +[3]: 6 ----> 22 +[3]: 6 ----> 24 +[3]: 7 ----> 18 +[3]: 7 ----> 21 +[3]: 7 ----> 25 +[3]: 8 ----> 18 +[3]: 8 ----> 22 +[3]: 8 ----> 26 +[3]: 9 ----> 0 +[3]: 10 ----> 0 +[3]: 11 ----> 0 +[3]: 12 ----> 0 +[3]: 13 ----> 0 +[3]: 14 ----> 0 +[3]: 15 ----> 11 +[3]: 15 ----> 13 +[3]: 16 ----> 12 +[3]: 16 ----> 13 +[3]: 17 ----> 11 +[3]: 17 ----> 14 +[3]: 18 ----> 12 +[3]: 18 ----> 14 +[3]: 19 ----> 9 +[3]: 19 ----> 13 +[3]: 20 ----> 10 +[3]: 20 ----> 13 +[3]: 21 ----> 9 +[3]: 21 ----> 14 +[3]: 22 ----> 10 +[3]: 22 ----> 14 +[3]: 23 ----> 9 +[3]: 23 ----> 11 +[3]: 24 ----> 10 +[3]: 24 ----> 11 +[3]: 25 ----> 9 +[3]: 25 ----> 12 +[3]: 26 ----> 10 +[3]: 26 ----> 12 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 13 (-2) +[0]: 0 <---- 14 (0) +[0]: 0 <---- 11 (0) +[0]: 0 <---- 12 (-3) +[0]: 0 <---- 10 (0) +[0]: 0 <---- 9 (-2) +[0]: 9 <---- 19 (0) +[0]: 9 <---- 25 (0) +[0]: 9 <---- 21 (-1) +[0]: 9 <---- 23 (-1) +[0]: 10 <---- 20 (0) +[0]: 10 <---- 26 (0) +[0]: 10 <---- 22 (-1) +[0]: 10 <---- 24 (-1) +[0]: 11 <---- 15 (0) +[0]: 11 <---- 24 (0) +[0]: 11 <---- 17 (-1) +[0]: 11 <---- 23 (-1) +[0]: 12 <---- 16 (0) +[0]: 12 <---- 26 (0) +[0]: 12 <---- 18 (-1) +[0]: 12 <---- 25 (-1) +[0]: 13 <---- 15 (0) +[0]: 13 <---- 20 (0) +[0]: 13 <---- 16 (-1) +[0]: 13 <---- 19 (-1) +[0]: 14 <---- 17 (0) +[0]: 14 <---- 22 (0) +[0]: 14 <---- 18 (-1) +[0]: 14 <---- 21 (-1) +[0]: 15 <---- 1 (0) +[0]: 15 <---- 2 (0) +[0]: 16 <---- 3 (0) +[0]: 16 <---- 4 (0) +[0]: 17 <---- 5 (0) +[0]: 17 <---- 6 (0) +[0]: 18 <---- 7 (0) +[0]: 18 <---- 8 (0) +[0]: 19 <---- 1 (0) +[0]: 19 <---- 3 (0) +[0]: 20 <---- 2 (0) +[0]: 20 <---- 4 (0) +[0]: 21 <---- 5 (0) +[0]: 21 <---- 7 (0) +[0]: 22 <---- 6 (0) +[0]: 22 <---- 8 (0) +[0]: 23 <---- 1 (0) +[0]: 23 <---- 5 (0) +[0]: 24 <---- 2 (0) +[0]: 24 <---- 6 (0) +[0]: 25 <---- 3 (0) +[0]: 25 <---- 7 (0) +[0]: 26 <---- 4 (0) +[0]: 26 <---- 8 (0) +[1] Max cone size: 6 +[1]: 0 <---- 13 (-2) +[1]: 0 <---- 14 (0) +[1]: 0 <---- 11 (0) +[1]: 0 <---- 12 (-3) +[1]: 0 <---- 10 (0) +[1]: 0 <---- 9 (-2) +[1]: 9 <---- 19 (0) +[1]: 9 <---- 25 (0) +[1]: 9 <---- 21 (-1) +[1]: 9 <---- 23 (-1) +[1]: 10 <---- 20 (0) +[1]: 10 <---- 26 (0) +[1]: 10 <---- 22 (-1) +[1]: 10 <---- 24 (-1) +[1]: 11 <---- 15 (0) +[1]: 11 <---- 24 (0) +[1]: 11 <---- 17 (-1) +[1]: 11 <---- 23 (-1) +[1]: 12 <---- 16 (0) +[1]: 12 <---- 26 (0) +[1]: 12 <---- 18 (-1) +[1]: 12 <---- 25 (-1) +[1]: 13 <---- 15 (0) +[1]: 13 <---- 20 (0) +[1]: 13 <---- 16 (-1) +[1]: 13 <---- 19 (-1) +[1]: 14 <---- 17 (0) +[1]: 14 <---- 22 (0) +[1]: 14 <---- 18 (-1) +[1]: 14 <---- 21 (-1) +[1]: 15 <---- 1 (0) +[1]: 15 <---- 2 (0) +[1]: 16 <---- 3 (0) +[1]: 16 <---- 4 (0) +[1]: 17 <---- 5 (0) +[1]: 17 <---- 6 (0) +[1]: 18 <---- 7 (0) +[1]: 18 <---- 8 (0) +[1]: 19 <---- 1 (0) +[1]: 19 <---- 3 (0) +[1]: 20 <---- 2 (0) +[1]: 20 <---- 4 (0) +[1]: 21 <---- 5 (0) +[1]: 21 <---- 7 (0) +[1]: 22 <---- 6 (0) +[1]: 22 <---- 8 (0) +[1]: 23 <---- 1 (0) +[1]: 23 <---- 5 (0) +[1]: 24 <---- 2 (0) +[1]: 24 <---- 6 (0) +[1]: 25 <---- 3 (0) +[1]: 25 <---- 7 (0) +[1]: 26 <---- 4 (0) +[1]: 26 <---- 8 (0) +[2] Max cone size: 6 +[2]: 0 <---- 13 (-2) +[2]: 0 <---- 14 (0) +[2]: 0 <---- 11 (0) +[2]: 0 <---- 12 (-3) +[2]: 0 <---- 10 (0) +[2]: 0 <---- 9 (-2) +[2]: 9 <---- 19 (0) +[2]: 9 <---- 25 (0) +[2]: 9 <---- 21 (-1) +[2]: 9 <---- 23 (-1) +[2]: 10 <---- 20 (0) +[2]: 10 <---- 26 (0) +[2]: 10 <---- 22 (-1) +[2]: 10 <---- 24 (-1) +[2]: 11 <---- 15 (0) +[2]: 11 <---- 24 (0) +[2]: 11 <---- 17 (-1) +[2]: 11 <---- 23 (-1) +[2]: 12 <---- 16 (0) +[2]: 12 <---- 26 (0) +[2]: 12 <---- 18 (-1) +[2]: 12 <---- 25 (-1) +[2]: 13 <---- 15 (0) +[2]: 13 <---- 20 (0) +[2]: 13 <---- 16 (-1) +[2]: 13 <---- 19 (-1) +[2]: 14 <---- 17 (0) +[2]: 14 <---- 22 (0) +[2]: 14 <---- 18 (-1) +[2]: 14 <---- 21 (-1) +[2]: 15 <---- 1 (0) +[2]: 15 <---- 2 (0) +[2]: 16 <---- 3 (0) +[2]: 16 <---- 4 (0) +[2]: 17 <---- 5 (0) +[2]: 17 <---- 6 (0) +[2]: 18 <---- 7 (0) +[2]: 18 <---- 8 (0) +[2]: 19 <---- 1 (0) +[2]: 19 <---- 3 (0) +[2]: 20 <---- 2 (0) +[2]: 20 <---- 4 (0) +[2]: 21 <---- 5 (0) +[2]: 21 <---- 7 (0) +[2]: 22 <---- 6 (0) +[2]: 22 <---- 8 (0) +[2]: 23 <---- 1 (0) +[2]: 23 <---- 5 (0) +[2]: 24 <---- 2 (0) +[2]: 24 <---- 6 (0) +[2]: 25 <---- 3 (0) +[2]: 25 <---- 7 (0) +[2]: 26 <---- 4 (0) +[2]: 26 <---- 8 (0) +[3] Max cone size: 6 +[3]: 0 <---- 13 (-2) +[3]: 0 <---- 14 (0) +[3]: 0 <---- 11 (0) +[3]: 0 <---- 12 (-3) +[3]: 0 <---- 10 (0) +[3]: 0 <---- 9 (-2) +[3]: 9 <---- 19 (0) +[3]: 9 <---- 25 (0) +[3]: 9 <---- 21 (-1) +[3]: 9 <---- 23 (-1) +[3]: 10 <---- 20 (0) +[3]: 10 <---- 26 (0) +[3]: 10 <---- 22 (-1) +[3]: 10 <---- 24 (-1) +[3]: 11 <---- 15 (0) +[3]: 11 <---- 24 (0) +[3]: 11 <---- 17 (-1) +[3]: 11 <---- 23 (-1) +[3]: 12 <---- 16 (0) +[3]: 12 <---- 26 (0) +[3]: 12 <---- 18 (-1) +[3]: 12 <---- 25 (-1) +[3]: 13 <---- 15 (0) +[3]: 13 <---- 20 (0) +[3]: 13 <---- 16 (-1) +[3]: 13 <---- 19 (-1) +[3]: 14 <---- 17 (0) +[3]: 14 <---- 22 (0) +[3]: 14 <---- 18 (-1) +[3]: 14 <---- 21 (-1) +[3]: 15 <---- 1 (0) +[3]: 15 <---- 2 (0) +[3]: 16 <---- 3 (0) +[3]: 16 <---- 4 (0) +[3]: 17 <---- 5 (0) +[3]: 17 <---- 6 (0) +[3]: 18 <---- 7 (0) +[3]: 18 <---- 8 (0) +[3]: 19 <---- 1 (0) +[3]: 19 <---- 3 (0) +[3]: 20 <---- 2 (0) +[3]: 20 <---- 4 (0) +[3]: 21 <---- 5 (0) +[3]: 21 <---- 7 (0) +[3]: 22 <---- 6 (0) +[3]: 22 <---- 8 (0) +[3]: 23 <---- 1 (0) +[3]: 23 <---- 5 (0) +[3]: 24 <---- 2 (0) +[3]: 24 <---- 6 (0) +[3]: 25 <---- 3 (0) +[3]: 25 <---- 7 (0) +[3]: 26 <---- 4 (0) +[3]: 26 <---- 8 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 1) dim 3 offset 0 0. 0. 0. + ( 2) dim 3 offset 3 1. 0. 0. + ( 3) dim 3 offset 6 0. 1. 0. + ( 4) dim 3 offset 9 1. 1. 0. + ( 5) dim 3 offset 12 0. 0. 1. + ( 6) dim 3 offset 15 1. 0. 1. + ( 7) dim 3 offset 18 0. 1. 1. + ( 8) dim 3 offset 21 1. 1. 1. +Process 1: + ( 1) dim 3 offset 0 1. 0. 0. + ( 2) dim 3 offset 3 2. 0. 0. + ( 3) dim 3 offset 6 1. 1. 0. + ( 4) dim 3 offset 9 2. 1. 0. + ( 5) dim 3 offset 12 1. 0. 1. + ( 6) dim 3 offset 15 2. 0. 1. + ( 7) dim 3 offset 18 1. 1. 1. + ( 8) dim 3 offset 21 2. 1. 1. +Process 2: + ( 1) dim 3 offset 0 0. 0. 1. + ( 2) dim 3 offset 3 1. 0. 1. + ( 3) dim 3 offset 6 0. 1. 1. + ( 4) dim 3 offset 9 1. 1. 1. + ( 5) dim 3 offset 12 0. 0. 2. + ( 6) dim 3 offset 15 1. 0. 2. + ( 7) dim 3 offset 18 0. 1. 2. + ( 8) dim 3 offset 21 1. 1. 2. +Process 3: + ( 1) dim 3 offset 0 1. 0. 1. + ( 2) dim 3 offset 3 2. 0. 1. + ( 3) dim 3 offset 6 1. 1. 1. + ( 4) dim 3 offset 9 2. 1. 1. + ( 5) dim 3 offset 12 1. 0. 2. + ( 6) dim 3 offset 15 2. 0. 2. + ( 7) dim 3 offset 18 1. 1. 2. + ( 8) dim 3 offset 21 2. 1. 2. +Labels: +Label 'marker': +[0]: 1 (1) +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[1]: 1 (1) +[1]: 2 (1) +[1]: 3 (1) +[1]: 4 (1) +[1]: 5 (1) +[1]: 6 (1) +[1]: 7 (1) +[1]: 8 (1) +[1]: 9 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 13 (1) +[1]: 14 (1) +[1]: 15 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 21 (1) +[1]: 22 (1) +[1]: 23 (1) +[1]: 24 (1) +[1]: 25 (1) +[1]: 26 (1) +[2]: 1 (1) +[2]: 2 (1) +[2]: 3 (1) +[2]: 4 (1) +[2]: 5 (1) +[2]: 6 (1) +[2]: 7 (1) +[2]: 8 (1) +[2]: 9 (1) +[2]: 10 (1) +[2]: 11 (1) +[2]: 12 (1) +[2]: 13 (1) +[2]: 14 (1) +[2]: 15 (1) +[2]: 16 (1) +[2]: 17 (1) +[2]: 18 (1) +[2]: 19 (1) +[2]: 20 (1) +[2]: 21 (1) +[2]: 22 (1) +[2]: 23 (1) +[2]: 24 (1) +[2]: 25 (1) +[2]: 26 (1) +[3]: 1 (1) +[3]: 2 (1) +[3]: 3 (1) +[3]: 4 (1) +[3]: 5 (1) +[3]: 6 (1) +[3]: 7 (1) +[3]: 8 (1) +[3]: 9 (1) +[3]: 10 (1) +[3]: 11 (1) +[3]: 12 (1) +[3]: 13 (1) +[3]: 14 (1) +[3]: 15 (1) +[3]: 16 (1) +[3]: 17 (1) +[3]: 18 (1) +[3]: 19 (1) +[3]: 20 (1) +[3]: 21 (1) +[3]: 22 (1) +[3]: 23 (1) +[3]: 24 (1) +[3]: 25 (1) +[3]: 26 (1) +Label 'Face Sets': +[0]: 9 (6) +[0]: 10 (5) +[0]: 11 (3) +[0]: 12 (4) +[0]: 13 (1) +[0]: 14 (2) +[1]: 9 (6) +[1]: 10 (5) +[1]: 11 (3) +[1]: 12 (4) +[1]: 13 (1) +[1]: 14 (2) +[2]: 9 (6) +[2]: 10 (5) +[2]: 11 (3) +[2]: 12 (4) +[2]: 13 (1) +[2]: 14 (2) +[3]: 9 (6) +[3]: 10 (5) +[3]: 11 (3) +[3]: 12 (4) +[3]: 13 (1) +[3]: 14 (2) +Label 'celltype': +[0]: 0 (7) +[0]: 1 (0) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (4) +[0]: 10 (4) +[0]: 11 (4) +[0]: 12 (4) +[0]: 13 (4) +[0]: 14 (4) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[1]: 0 (7) +[1]: 1 (0) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 9 (4) +[1]: 10 (4) +[1]: 11 (4) +[1]: 12 (4) +[1]: 13 (4) +[1]: 14 (4) +[1]: 15 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 21 (1) +[1]: 22 (1) +[1]: 23 (1) +[1]: 24 (1) +[1]: 25 (1) +[1]: 26 (1) +[2]: 0 (7) +[2]: 1 (0) +[2]: 2 (0) +[2]: 3 (0) +[2]: 4 (0) +[2]: 5 (0) +[2]: 6 (0) +[2]: 7 (0) +[2]: 8 (0) +[2]: 9 (4) +[2]: 10 (4) +[2]: 11 (4) +[2]: 12 (4) +[2]: 13 (4) +[2]: 14 (4) +[2]: 15 (1) +[2]: 16 (1) +[2]: 17 (1) +[2]: 18 (1) +[2]: 19 (1) +[2]: 20 (1) +[2]: 21 (1) +[2]: 22 (1) +[2]: 23 (1) +[2]: 24 (1) +[2]: 25 (1) +[2]: 26 (1) +[3]: 0 (7) +[3]: 1 (0) +[3]: 2 (0) +[3]: 3 (0) +[3]: 4 (0) +[3]: 5 (0) +[3]: 6 (0) +[3]: 7 (0) +[3]: 8 (0) +[3]: 9 (4) +[3]: 10 (4) +[3]: 11 (4) +[3]: 12 (4) +[3]: 13 (4) +[3]: 14 (4) +[3]: 15 (1) +[3]: 16 (1) +[3]: 17 (1) +[3]: 18 (1) +[3]: 19 (1) +[3]: 20 (1) +[3]: 21 (1) +[3]: 22 (1) +[3]: 23 (1) +[3]: 24 (1) +[3]: 25 (1) +[3]: 26 (1) +Label 'fault': +[0]: 10 (2) +[0]: 20 (1) +[0]: 22 (1) +[0]: 24 (1) +[0]: 26 (1) +[0]: 2 (0) +[0]: 4 (0) +[0]: 6 (0) +[0]: 8 (0) +[0]: 0 (103) +[0]: 11 (102) +[0]: 12 (102) +[0]: 13 (102) +[0]: 14 (102) +[0]: 15 (101) +[0]: 16 (101) +[0]: 17 (101) +[0]: 18 (101) +[1]: 9 (2) +[1]: 19 (1) +[1]: 21 (1) +[1]: 23 (1) +[1]: 25 (1) +[1]: 1 (0) +[1]: 3 (0) +[1]: 5 (0) +[1]: 7 (0) +[1]: 0 (-103) +[1]: 11 (-102) +[1]: 12 (-102) +[1]: 13 (-102) +[1]: 14 (-102) +[1]: 15 (-101) +[1]: 16 (-101) +[1]: 17 (-101) +[1]: 18 (-101) +[2]: 10 (2) +[2]: 20 (1) +[2]: 22 (1) +[2]: 24 (1) +[2]: 26 (1) +[2]: 2 (0) +[2]: 4 (0) +[2]: 6 (0) +[2]: 8 (0) +[2]: 0 (103) +[2]: 11 (102) +[2]: 12 (102) +[2]: 13 (102) +[2]: 14 (102) +[2]: 15 (101) +[2]: 16 (101) +[2]: 17 (101) +[2]: 18 (101) +[3]: 9 (2) +[3]: 19 (1) +[3]: 21 (1) +[3]: 23 (1) +[3]: 25 (1) +[3]: 1 (0) +[3]: 3 (0) +[3]: 5 (0) +[3]: 7 (0) +[3]: 0 (-103) +[3]: 11 (-102) +[3]: 12 (-102) +[3]: 13 (-102) +[3]: 14 (-102) +[3]: 15 (-101) +[3]: 16 (-101) +[3]: 17 (-101) +[3]: 18 (-101) +PetscSF Object: 4 MPI processes + type: basic + [0] Number of roots=27, leaves=15, remote ranks=3 + [0] 2 <- (1,1) + [0] 4 <- (1,3) + [0] 5 <- (2,1) + [0] 6 <- (3,1) + [0] 7 <- (2,3) + [0] 8 <- (3,3) + [0] 10 <- (1,9) + [0] 14 <- (2,13) + [0] 17 <- (2,15) + [0] 18 <- (2,16) + [0] 20 <- (1,19) + [0] 21 <- (2,19) + [0] 22 <- (3,19) + [0] 24 <- (1,23) + [0] 26 <- (1,25) + [1] Number of roots=27, leaves=9, remote ranks=1 + [1] 5 <- (3,1) + [1] 6 <- (3,2) + [1] 7 <- (3,3) + [1] 8 <- (3,4) + [1] 14 <- (3,13) + [1] 17 <- (3,15) + [1] 18 <- (3,16) + [1] 21 <- (3,19) + [1] 22 <- (3,20) + [2] Number of roots=27, leaves=9, remote ranks=1 + [2] 2 <- (3,1) + [2] 4 <- (3,3) + [2] 6 <- (3,5) + [2] 8 <- (3,7) + [2] 10 <- (3,9) + [2] 20 <- (3,19) + [2] 22 <- (3,21) + [2] 24 <- (3,23) + [2] 26 <- (3,25) + [3] Number of roots=27, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 6 edges + [0] 2 <- 1 + [0] 4 <- 3 + [0] 10 <- 9 + [0] 20 <- 19 + [0] 24 <- 23 + [0] 26 <- 25 + [0] 2: 6 edges + [0] 5 <- 1 + [0] 7 <- 3 + [0] 14 <- 13 + [0] 17 <- 15 + [0] 18 <- 16 + [0] 21 <- 19 + [0] 3: 3 edges + [0] 6 <- 1 + [0] 8 <- 3 + [0] 22 <- 19 + [1] Roots referenced by my leaves, by rank + [1] 3: 9 edges + [1] 5 <- 1 + [1] 6 <- 2 + [1] 7 <- 3 + [1] 8 <- 4 + [1] 14 <- 13 + [1] 17 <- 15 + [1] 18 <- 16 + [1] 21 <- 19 + [1] 22 <- 20 + [2] Roots referenced by my leaves, by rank + [2] 3: 9 edges + [2] 2 <- 1 + [2] 4 <- 3 + [2] 6 <- 5 + [2] 8 <- 7 + [2] 10 <- 9 + [2] 20 <- 19 + [2] 22 <- 21 + [2] 24 <- 23 + [2] 26 <- 25 + [3] Roots referenced by my leaves, by rank + MultiSF sort=rank-order +DM Object: box 4 MPI processes + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 4 +[0]: 2 ----> 25 +[0]: 2 ----> 29 +[0]: 2 ----> 31 +[0]: 3 ----> 26 +[0]: 3 ----> 29 +[0]: 3 ----> 32 +[0]: 4 ----> 27 +[0]: 4 ----> 30 +[0]: 4 ----> 31 +[0]: 5 ----> 28 +[0]: 5 ----> 30 +[0]: 5 ----> 32 +[0]: 6 ----> 33 +[0]: 6 ----> 37 +[0]: 6 ----> 41 +[0]: 7 ----> 25 +[0]: 7 ----> 34 +[0]: 7 ----> 38 +[0]: 7 ----> 41 +[0]: 8 ----> 33 +[0]: 8 ----> 39 +[0]: 8 ----> 42 +[0]: 9 ----> 26 +[0]: 9 ----> 34 +[0]: 9 ----> 40 +[0]: 9 ----> 42 +[0]: 10 ----> 35 +[0]: 10 ----> 37 +[0]: 10 ----> 43 +[0]: 11 ----> 27 +[0]: 11 ----> 36 +[0]: 11 ----> 38 +[0]: 11 ----> 43 +[0]: 12 ----> 35 +[0]: 12 ----> 39 +[0]: 12 ----> 44 +[0]: 13 ----> 28 +[0]: 13 ----> 36 +[0]: 13 ----> 40 +[0]: 13 ----> 44 +[0]: 14 ----> 0 +[0]: 15 ----> 1 +[0]: 16 ----> 0 +[0]: 16 ----> 1 +[0]: 17 ----> 0 +[0]: 18 ----> 0 +[0]: 19 ----> 0 +[0]: 20 ----> 0 +[0]: 21 ----> 1 +[0]: 22 ----> 1 +[0]: 23 ----> 1 +[0]: 24 ----> 1 +[0]: 25 ----> 17 +[0]: 25 ----> 19 +[0]: 26 ----> 18 +[0]: 26 ----> 19 +[0]: 27 ----> 17 +[0]: 27 ----> 20 +[0]: 28 ----> 18 +[0]: 28 ----> 20 +[0]: 29 ----> 14 +[0]: 29 ----> 19 +[0]: 30 ----> 14 +[0]: 30 ----> 20 +[0]: 31 ----> 14 +[0]: 31 ----> 17 +[0]: 32 ----> 14 +[0]: 32 ----> 18 +[0]: 33 ----> 15 +[0]: 33 ----> 21 +[0]: 34 ----> 16 +[0]: 34 ----> 19 +[0]: 34 ----> 21 +[0]: 35 ----> 15 +[0]: 35 ----> 22 +[0]: 36 ----> 16 +[0]: 36 ----> 20 +[0]: 36 ----> 22 +[0]: 37 ----> 15 +[0]: 37 ----> 23 +[0]: 38 ----> 16 +[0]: 38 ----> 17 +[0]: 38 ----> 23 +[0]: 39 ----> 15 +[0]: 39 ----> 24 +[0]: 40 ----> 16 +[0]: 40 ----> 18 +[0]: 40 ----> 24 +[0]: 41 ----> 21 +[0]: 41 ----> 23 +[0]: 42 ----> 21 +[0]: 42 ----> 24 +[0]: 43 ----> 22 +[0]: 43 ----> 23 +[0]: 44 ----> 22 +[0]: 44 ----> 24 +[1] Max support size: 4 +[1]: 2 ----> 25 +[1]: 2 ----> 33 +[1]: 2 ----> 37 +[1]: 2 ----> 41 +[1]: 3 ----> 34 +[1]: 3 ----> 38 +[1]: 3 ----> 41 +[1]: 4 ----> 26 +[1]: 4 ----> 33 +[1]: 4 ----> 39 +[1]: 4 ----> 42 +[1]: 5 ----> 34 +[1]: 5 ----> 40 +[1]: 5 ----> 42 +[1]: 6 ----> 27 +[1]: 6 ----> 35 +[1]: 6 ----> 37 +[1]: 6 ----> 43 +[1]: 7 ----> 36 +[1]: 7 ----> 38 +[1]: 7 ----> 43 +[1]: 8 ----> 28 +[1]: 8 ----> 35 +[1]: 8 ----> 39 +[1]: 8 ----> 44 +[1]: 9 ----> 36 +[1]: 9 ----> 40 +[1]: 9 ----> 44 +[1]: 10 ----> 25 +[1]: 10 ----> 29 +[1]: 10 ----> 31 +[1]: 11 ----> 26 +[1]: 11 ----> 29 +[1]: 11 ----> 32 +[1]: 12 ----> 27 +[1]: 12 ----> 30 +[1]: 12 ----> 31 +[1]: 13 ----> 28 +[1]: 13 ----> 30 +[1]: 13 ----> 32 +[1]: 14 ----> 0 +[1]: 14 ----> 1 +[1]: 15 ----> 1 +[1]: 16 ----> 0 +[1]: 17 ----> 0 +[1]: 18 ----> 0 +[1]: 19 ----> 0 +[1]: 20 ----> 0 +[1]: 21 ----> 1 +[1]: 22 ----> 1 +[1]: 23 ----> 1 +[1]: 24 ----> 1 +[1]: 25 ----> 17 +[1]: 25 ----> 19 +[1]: 26 ----> 18 +[1]: 26 ----> 19 +[1]: 27 ----> 17 +[1]: 27 ----> 20 +[1]: 28 ----> 18 +[1]: 28 ----> 20 +[1]: 29 ----> 16 +[1]: 29 ----> 19 +[1]: 30 ----> 16 +[1]: 30 ----> 20 +[1]: 31 ----> 16 +[1]: 31 ----> 17 +[1]: 32 ----> 16 +[1]: 32 ----> 18 +[1]: 33 ----> 14 +[1]: 33 ----> 19 +[1]: 33 ----> 21 +[1]: 34 ----> 15 +[1]: 34 ----> 21 +[1]: 35 ----> 14 +[1]: 35 ----> 20 +[1]: 35 ----> 22 +[1]: 36 ----> 15 +[1]: 36 ----> 22 +[1]: 37 ----> 14 +[1]: 37 ----> 17 +[1]: 37 ----> 23 +[1]: 38 ----> 15 +[1]: 38 ----> 23 +[1]: 39 ----> 14 +[1]: 39 ----> 18 +[1]: 39 ----> 24 +[1]: 40 ----> 15 +[1]: 40 ----> 24 +[1]: 41 ----> 21 +[1]: 41 ----> 23 +[1]: 42 ----> 21 +[1]: 42 ----> 24 +[1]: 43 ----> 22 +[1]: 43 ----> 23 +[1]: 44 ----> 22 +[1]: 44 ----> 24 +[2] Max support size: 4 +[2]: 2 ----> 25 +[2]: 2 ----> 29 +[2]: 2 ----> 31 +[2]: 3 ----> 26 +[2]: 3 ----> 29 +[2]: 3 ----> 32 +[2]: 4 ----> 27 +[2]: 4 ----> 30 +[2]: 4 ----> 31 +[2]: 5 ----> 28 +[2]: 5 ----> 30 +[2]: 5 ----> 32 +[2]: 6 ----> 33 +[2]: 6 ----> 37 +[2]: 6 ----> 41 +[2]: 7 ----> 25 +[2]: 7 ----> 34 +[2]: 7 ----> 38 +[2]: 7 ----> 41 +[2]: 8 ----> 33 +[2]: 8 ----> 39 +[2]: 8 ----> 42 +[2]: 9 ----> 26 +[2]: 9 ----> 34 +[2]: 9 ----> 40 +[2]: 9 ----> 42 +[2]: 10 ----> 35 +[2]: 10 ----> 37 +[2]: 10 ----> 43 +[2]: 11 ----> 27 +[2]: 11 ----> 36 +[2]: 11 ----> 38 +[2]: 11 ----> 43 +[2]: 12 ----> 35 +[2]: 12 ----> 39 +[2]: 12 ----> 44 +[2]: 13 ----> 28 +[2]: 13 ----> 36 +[2]: 13 ----> 40 +[2]: 13 ----> 44 +[2]: 14 ----> 0 +[2]: 15 ----> 1 +[2]: 16 ----> 0 +[2]: 16 ----> 1 +[2]: 17 ----> 0 +[2]: 18 ----> 0 +[2]: 19 ----> 0 +[2]: 20 ----> 0 +[2]: 21 ----> 1 +[2]: 22 ----> 1 +[2]: 23 ----> 1 +[2]: 24 ----> 1 +[2]: 25 ----> 17 +[2]: 25 ----> 19 +[2]: 26 ----> 18 +[2]: 26 ----> 19 +[2]: 27 ----> 17 +[2]: 27 ----> 20 +[2]: 28 ----> 18 +[2]: 28 ----> 20 +[2]: 29 ----> 14 +[2]: 29 ----> 19 +[2]: 30 ----> 14 +[2]: 30 ----> 20 +[2]: 31 ----> 14 +[2]: 31 ----> 17 +[2]: 32 ----> 14 +[2]: 32 ----> 18 +[2]: 33 ----> 15 +[2]: 33 ----> 21 +[2]: 34 ----> 16 +[2]: 34 ----> 19 +[2]: 34 ----> 21 +[2]: 35 ----> 15 +[2]: 35 ----> 22 +[2]: 36 ----> 16 +[2]: 36 ----> 20 +[2]: 36 ----> 22 +[2]: 37 ----> 15 +[2]: 37 ----> 23 +[2]: 38 ----> 16 +[2]: 38 ----> 17 +[2]: 38 ----> 23 +[2]: 39 ----> 15 +[2]: 39 ----> 24 +[2]: 40 ----> 16 +[2]: 40 ----> 18 +[2]: 40 ----> 24 +[2]: 41 ----> 21 +[2]: 41 ----> 23 +[2]: 42 ----> 21 +[2]: 42 ----> 24 +[2]: 43 ----> 22 +[2]: 43 ----> 23 +[2]: 44 ----> 22 +[2]: 44 ----> 24 +[3] Max support size: 4 +[3]: 2 ----> 25 +[3]: 2 ----> 33 +[3]: 2 ----> 37 +[3]: 2 ----> 41 +[3]: 3 ----> 34 +[3]: 3 ----> 38 +[3]: 3 ----> 41 +[3]: 4 ----> 26 +[3]: 4 ----> 33 +[3]: 4 ----> 39 +[3]: 4 ----> 42 +[3]: 5 ----> 34 +[3]: 5 ----> 40 +[3]: 5 ----> 42 +[3]: 6 ----> 27 +[3]: 6 ----> 35 +[3]: 6 ----> 37 +[3]: 6 ----> 43 +[3]: 7 ----> 36 +[3]: 7 ----> 38 +[3]: 7 ----> 43 +[3]: 8 ----> 28 +[3]: 8 ----> 35 +[3]: 8 ----> 39 +[3]: 8 ----> 44 +[3]: 9 ----> 36 +[3]: 9 ----> 40 +[3]: 9 ----> 44 +[3]: 10 ----> 25 +[3]: 10 ----> 29 +[3]: 10 ----> 31 +[3]: 11 ----> 26 +[3]: 11 ----> 29 +[3]: 11 ----> 32 +[3]: 12 ----> 27 +[3]: 12 ----> 30 +[3]: 12 ----> 31 +[3]: 13 ----> 28 +[3]: 13 ----> 30 +[3]: 13 ----> 32 +[3]: 14 ----> 0 +[3]: 14 ----> 1 +[3]: 15 ----> 1 +[3]: 16 ----> 0 +[3]: 17 ----> 0 +[3]: 18 ----> 0 +[3]: 19 ----> 0 +[3]: 20 ----> 0 +[3]: 21 ----> 1 +[3]: 22 ----> 1 +[3]: 23 ----> 1 +[3]: 24 ----> 1 +[3]: 25 ----> 17 +[3]: 25 ----> 19 +[3]: 26 ----> 18 +[3]: 26 ----> 19 +[3]: 27 ----> 17 +[3]: 27 ----> 20 +[3]: 28 ----> 18 +[3]: 28 ----> 20 +[3]: 29 ----> 16 +[3]: 29 ----> 19 +[3]: 30 ----> 16 +[3]: 30 ----> 20 +[3]: 31 ----> 16 +[3]: 31 ----> 17 +[3]: 32 ----> 16 +[3]: 32 ----> 18 +[3]: 33 ----> 14 +[3]: 33 ----> 19 +[3]: 33 ----> 21 +[3]: 34 ----> 15 +[3]: 34 ----> 21 +[3]: 35 ----> 14 +[3]: 35 ----> 20 +[3]: 35 ----> 22 +[3]: 36 ----> 15 +[3]: 36 ----> 22 +[3]: 37 ----> 14 +[3]: 37 ----> 17 +[3]: 37 ----> 23 +[3]: 38 ----> 15 +[3]: 38 ----> 23 +[3]: 39 ----> 14 +[3]: 39 ----> 18 +[3]: 39 ----> 24 +[3]: 40 ----> 15 +[3]: 40 ----> 24 +[3]: 41 ----> 21 +[3]: 41 ----> 23 +[3]: 42 ----> 21 +[3]: 42 ----> 24 +[3]: 43 ----> 22 +[3]: 43 ----> 23 +[3]: 44 ----> 22 +[3]: 44 ----> 24 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 19 (-2) +[0]: 0 <---- 20 (0) +[0]: 0 <---- 17 (0) +[0]: 0 <---- 18 (-3) +[0]: 0 <---- 16 (0) +[0]: 0 <---- 14 (-2) +[0]: 1 <---- 15 (0) +[0]: 1 <---- 16 (0) +[0]: 1 <---- 21 (0) +[0]: 1 <---- 24 (0) +[0]: 1 <---- 22 (-1) +[0]: 1 <---- 23 (-1) +[0]: 14 <---- 29 (0) +[0]: 14 <---- 32 (0) +[0]: 14 <---- 30 (-1) +[0]: 14 <---- 31 (-1) +[0]: 15 <---- 33 (0) +[0]: 15 <---- 39 (0) +[0]: 15 <---- 35 (-1) +[0]: 15 <---- 37 (-1) +[0]: 16 <---- 34 (0) +[0]: 16 <---- 40 (0) +[0]: 16 <---- 36 (-1) +[0]: 16 <---- 38 (-1) +[0]: 17 <---- 25 (0) +[0]: 17 <---- 38 (0) +[0]: 17 <---- 27 (-1) +[0]: 17 <---- 31 (-1) +[0]: 18 <---- 26 (0) +[0]: 18 <---- 40 (0) +[0]: 18 <---- 28 (-1) +[0]: 18 <---- 32 (-1) +[0]: 19 <---- 25 (0) +[0]: 19 <---- 34 (0) +[0]: 19 <---- 26 (-1) +[0]: 19 <---- 29 (-1) +[0]: 20 <---- 27 (0) +[0]: 20 <---- 36 (0) +[0]: 20 <---- 28 (-1) +[0]: 20 <---- 30 (-1) +[0]: 21 <---- 33 (0) +[0]: 21 <---- 34 (0) +[0]: 21 <---- 41 (0) +[0]: 21 <---- 42 (0) +[0]: 22 <---- 35 (0) +[0]: 22 <---- 36 (0) +[0]: 22 <---- 43 (0) +[0]: 22 <---- 44 (0) +[0]: 23 <---- 37 (0) +[0]: 23 <---- 38 (0) +[0]: 23 <---- 41 (0) +[0]: 23 <---- 43 (0) +[0]: 24 <---- 39 (0) +[0]: 24 <---- 40 (0) +[0]: 24 <---- 42 (0) +[0]: 24 <---- 44 (0) +[0]: 25 <---- 2 (0) +[0]: 25 <---- 7 (0) +[0]: 26 <---- 3 (0) +[0]: 26 <---- 9 (0) +[0]: 27 <---- 4 (0) +[0]: 27 <---- 11 (0) +[0]: 28 <---- 5 (0) +[0]: 28 <---- 13 (0) +[0]: 29 <---- 2 (0) +[0]: 29 <---- 3 (0) +[0]: 30 <---- 4 (0) +[0]: 30 <---- 5 (0) +[0]: 31 <---- 2 (0) +[0]: 31 <---- 4 (0) +[0]: 32 <---- 3 (0) +[0]: 32 <---- 5 (0) +[0]: 33 <---- 6 (0) +[0]: 33 <---- 8 (0) +[0]: 34 <---- 7 (0) +[0]: 34 <---- 9 (0) +[0]: 35 <---- 10 (0) +[0]: 35 <---- 12 (0) +[0]: 36 <---- 11 (0) +[0]: 36 <---- 13 (0) +[0]: 37 <---- 6 (0) +[0]: 37 <---- 10 (0) +[0]: 38 <---- 7 (0) +[0]: 38 <---- 11 (0) +[0]: 39 <---- 8 (0) +[0]: 39 <---- 12 (0) +[0]: 40 <---- 9 (0) +[0]: 40 <---- 13 (0) +[0]: 41 <---- 6 (0) +[0]: 41 <---- 7 (0) +[0]: 42 <---- 8 (0) +[0]: 42 <---- 9 (0) +[0]: 43 <---- 10 (0) +[0]: 43 <---- 11 (0) +[0]: 44 <---- 12 (0) +[0]: 44 <---- 13 (0) +[1] Max cone size: 6 +[1]: 0 <---- 19 (-2) +[1]: 0 <---- 20 (0) +[1]: 0 <---- 17 (0) +[1]: 0 <---- 18 (-3) +[1]: 0 <---- 16 (0) +[1]: 0 <---- 14 (-2) +[1]: 1 <---- 14 (0) +[1]: 1 <---- 15 (0) +[1]: 1 <---- 21 (0) +[1]: 1 <---- 24 (0) +[1]: 1 <---- 22 (-1) +[1]: 1 <---- 23 (-1) +[1]: 14 <---- 33 (0) +[1]: 14 <---- 39 (0) +[1]: 14 <---- 35 (-1) +[1]: 14 <---- 37 (-1) +[1]: 15 <---- 34 (0) +[1]: 15 <---- 40 (0) +[1]: 15 <---- 36 (-1) +[1]: 15 <---- 38 (-1) +[1]: 16 <---- 29 (0) +[1]: 16 <---- 32 (0) +[1]: 16 <---- 30 (-1) +[1]: 16 <---- 31 (-1) +[1]: 17 <---- 25 (0) +[1]: 17 <---- 31 (0) +[1]: 17 <---- 27 (-1) +[1]: 17 <---- 37 (-1) +[1]: 18 <---- 26 (0) +[1]: 18 <---- 32 (0) +[1]: 18 <---- 28 (-1) +[1]: 18 <---- 39 (-1) +[1]: 19 <---- 25 (0) +[1]: 19 <---- 29 (0) +[1]: 19 <---- 26 (-1) +[1]: 19 <---- 33 (-1) +[1]: 20 <---- 27 (0) +[1]: 20 <---- 30 (0) +[1]: 20 <---- 28 (-1) +[1]: 20 <---- 35 (-1) +[1]: 21 <---- 33 (0) +[1]: 21 <---- 34 (0) +[1]: 21 <---- 41 (0) +[1]: 21 <---- 42 (0) +[1]: 22 <---- 35 (0) +[1]: 22 <---- 36 (0) +[1]: 22 <---- 43 (0) +[1]: 22 <---- 44 (0) +[1]: 23 <---- 37 (0) +[1]: 23 <---- 38 (0) +[1]: 23 <---- 41 (0) +[1]: 23 <---- 43 (0) +[1]: 24 <---- 39 (0) +[1]: 24 <---- 40 (0) +[1]: 24 <---- 42 (0) +[1]: 24 <---- 44 (0) +[1]: 25 <---- 2 (0) +[1]: 25 <---- 10 (0) +[1]: 26 <---- 4 (0) +[1]: 26 <---- 11 (0) +[1]: 27 <---- 6 (0) +[1]: 27 <---- 12 (0) +[1]: 28 <---- 8 (0) +[1]: 28 <---- 13 (0) +[1]: 29 <---- 10 (0) +[1]: 29 <---- 11 (0) +[1]: 30 <---- 12 (0) +[1]: 30 <---- 13 (0) +[1]: 31 <---- 10 (0) +[1]: 31 <---- 12 (0) +[1]: 32 <---- 11 (0) +[1]: 32 <---- 13 (0) +[1]: 33 <---- 2 (0) +[1]: 33 <---- 4 (0) +[1]: 34 <---- 3 (0) +[1]: 34 <---- 5 (0) +[1]: 35 <---- 6 (0) +[1]: 35 <---- 8 (0) +[1]: 36 <---- 7 (0) +[1]: 36 <---- 9 (0) +[1]: 37 <---- 2 (0) +[1]: 37 <---- 6 (0) +[1]: 38 <---- 3 (0) +[1]: 38 <---- 7 (0) +[1]: 39 <---- 4 (0) +[1]: 39 <---- 8 (0) +[1]: 40 <---- 5 (0) +[1]: 40 <---- 9 (0) +[1]: 41 <---- 2 (0) +[1]: 41 <---- 3 (0) +[1]: 42 <---- 4 (0) +[1]: 42 <---- 5 (0) +[1]: 43 <---- 6 (0) +[1]: 43 <---- 7 (0) +[1]: 44 <---- 8 (0) +[1]: 44 <---- 9 (0) +[2] Max cone size: 6 +[2]: 0 <---- 19 (-2) +[2]: 0 <---- 20 (0) +[2]: 0 <---- 17 (0) +[2]: 0 <---- 18 (-3) +[2]: 0 <---- 16 (0) +[2]: 0 <---- 14 (-2) +[2]: 1 <---- 15 (0) +[2]: 1 <---- 16 (0) +[2]: 1 <---- 21 (0) +[2]: 1 <---- 24 (0) +[2]: 1 <---- 22 (-1) +[2]: 1 <---- 23 (-1) +[2]: 14 <---- 29 (0) +[2]: 14 <---- 32 (0) +[2]: 14 <---- 30 (-1) +[2]: 14 <---- 31 (-1) +[2]: 15 <---- 33 (0) +[2]: 15 <---- 39 (0) +[2]: 15 <---- 35 (-1) +[2]: 15 <---- 37 (-1) +[2]: 16 <---- 34 (0) +[2]: 16 <---- 40 (0) +[2]: 16 <---- 36 (-1) +[2]: 16 <---- 38 (-1) +[2]: 17 <---- 25 (0) +[2]: 17 <---- 38 (0) +[2]: 17 <---- 27 (-1) +[2]: 17 <---- 31 (-1) +[2]: 18 <---- 26 (0) +[2]: 18 <---- 40 (0) +[2]: 18 <---- 28 (-1) +[2]: 18 <---- 32 (-1) +[2]: 19 <---- 25 (0) +[2]: 19 <---- 34 (0) +[2]: 19 <---- 26 (-1) +[2]: 19 <---- 29 (-1) +[2]: 20 <---- 27 (0) +[2]: 20 <---- 36 (0) +[2]: 20 <---- 28 (-1) +[2]: 20 <---- 30 (-1) +[2]: 21 <---- 33 (0) +[2]: 21 <---- 34 (0) +[2]: 21 <---- 41 (0) +[2]: 21 <---- 42 (0) +[2]: 22 <---- 35 (0) +[2]: 22 <---- 36 (0) +[2]: 22 <---- 43 (0) +[2]: 22 <---- 44 (0) +[2]: 23 <---- 37 (0) +[2]: 23 <---- 38 (0) +[2]: 23 <---- 41 (0) +[2]: 23 <---- 43 (0) +[2]: 24 <---- 39 (0) +[2]: 24 <---- 40 (0) +[2]: 24 <---- 42 (0) +[2]: 24 <---- 44 (0) +[2]: 25 <---- 2 (0) +[2]: 25 <---- 7 (0) +[2]: 26 <---- 3 (0) +[2]: 26 <---- 9 (0) +[2]: 27 <---- 4 (0) +[2]: 27 <---- 11 (0) +[2]: 28 <---- 5 (0) +[2]: 28 <---- 13 (0) +[2]: 29 <---- 2 (0) +[2]: 29 <---- 3 (0) +[2]: 30 <---- 4 (0) +[2]: 30 <---- 5 (0) +[2]: 31 <---- 2 (0) +[2]: 31 <---- 4 (0) +[2]: 32 <---- 3 (0) +[2]: 32 <---- 5 (0) +[2]: 33 <---- 6 (0) +[2]: 33 <---- 8 (0) +[2]: 34 <---- 7 (0) +[2]: 34 <---- 9 (0) +[2]: 35 <---- 10 (0) +[2]: 35 <---- 12 (0) +[2]: 36 <---- 11 (0) +[2]: 36 <---- 13 (0) +[2]: 37 <---- 6 (0) +[2]: 37 <---- 10 (0) +[2]: 38 <---- 7 (0) +[2]: 38 <---- 11 (0) +[2]: 39 <---- 8 (0) +[2]: 39 <---- 12 (0) +[2]: 40 <---- 9 (0) +[2]: 40 <---- 13 (0) +[2]: 41 <---- 6 (0) +[2]: 41 <---- 7 (0) +[2]: 42 <---- 8 (0) +[2]: 42 <---- 9 (0) +[2]: 43 <---- 10 (0) +[2]: 43 <---- 11 (0) +[2]: 44 <---- 12 (0) +[2]: 44 <---- 13 (0) +[3] Max cone size: 6 +[3]: 0 <---- 19 (-2) +[3]: 0 <---- 20 (0) +[3]: 0 <---- 17 (0) +[3]: 0 <---- 18 (-3) +[3]: 0 <---- 16 (0) +[3]: 0 <---- 14 (-2) +[3]: 1 <---- 14 (0) +[3]: 1 <---- 15 (0) +[3]: 1 <---- 21 (0) +[3]: 1 <---- 24 (0) +[3]: 1 <---- 22 (-1) +[3]: 1 <---- 23 (-1) +[3]: 14 <---- 33 (0) +[3]: 14 <---- 39 (0) +[3]: 14 <---- 35 (-1) +[3]: 14 <---- 37 (-1) +[3]: 15 <---- 34 (0) +[3]: 15 <---- 40 (0) +[3]: 15 <---- 36 (-1) +[3]: 15 <---- 38 (-1) +[3]: 16 <---- 29 (0) +[3]: 16 <---- 32 (0) +[3]: 16 <---- 30 (-1) +[3]: 16 <---- 31 (-1) +[3]: 17 <---- 25 (0) +[3]: 17 <---- 31 (0) +[3]: 17 <---- 27 (-1) +[3]: 17 <---- 37 (-1) +[3]: 18 <---- 26 (0) +[3]: 18 <---- 32 (0) +[3]: 18 <---- 28 (-1) +[3]: 18 <---- 39 (-1) +[3]: 19 <---- 25 (0) +[3]: 19 <---- 29 (0) +[3]: 19 <---- 26 (-1) +[3]: 19 <---- 33 (-1) +[3]: 20 <---- 27 (0) +[3]: 20 <---- 30 (0) +[3]: 20 <---- 28 (-1) +[3]: 20 <---- 35 (-1) +[3]: 21 <---- 33 (0) +[3]: 21 <---- 34 (0) +[3]: 21 <---- 41 (0) +[3]: 21 <---- 42 (0) +[3]: 22 <---- 35 (0) +[3]: 22 <---- 36 (0) +[3]: 22 <---- 43 (0) +[3]: 22 <---- 44 (0) +[3]: 23 <---- 37 (0) +[3]: 23 <---- 38 (0) +[3]: 23 <---- 41 (0) +[3]: 23 <---- 43 (0) +[3]: 24 <---- 39 (0) +[3]: 24 <---- 40 (0) +[3]: 24 <---- 42 (0) +[3]: 24 <---- 44 (0) +[3]: 25 <---- 2 (0) +[3]: 25 <---- 10 (0) +[3]: 26 <---- 4 (0) +[3]: 26 <---- 11 (0) +[3]: 27 <---- 6 (0) +[3]: 27 <---- 12 (0) +[3]: 28 <---- 8 (0) +[3]: 28 <---- 13 (0) +[3]: 29 <---- 10 (0) +[3]: 29 <---- 11 (0) +[3]: 30 <---- 12 (0) +[3]: 30 <---- 13 (0) +[3]: 31 <---- 10 (0) +[3]: 31 <---- 12 (0) +[3]: 32 <---- 11 (0) +[3]: 32 <---- 13 (0) +[3]: 33 <---- 2 (0) +[3]: 33 <---- 4 (0) +[3]: 34 <---- 3 (0) +[3]: 34 <---- 5 (0) +[3]: 35 <---- 6 (0) +[3]: 35 <---- 8 (0) +[3]: 36 <---- 7 (0) +[3]: 36 <---- 9 (0) +[3]: 37 <---- 2 (0) +[3]: 37 <---- 6 (0) +[3]: 38 <---- 3 (0) +[3]: 38 <---- 7 (0) +[3]: 39 <---- 4 (0) +[3]: 39 <---- 8 (0) +[3]: 40 <---- 5 (0) +[3]: 40 <---- 9 (0) +[3]: 41 <---- 2 (0) +[3]: 41 <---- 3 (0) +[3]: 42 <---- 4 (0) +[3]: 42 <---- 5 (0) +[3]: 43 <---- 6 (0) +[3]: 43 <---- 7 (0) +[3]: 44 <---- 8 (0) +[3]: 44 <---- 9 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 2) dim 3 offset 0 0. 0. 0. + ( 3) dim 3 offset 3 0. 1. 0. + ( 4) dim 3 offset 6 0. 0. 1. + ( 5) dim 3 offset 9 0. 1. 1. + ( 6) dim 3 offset 12 1. 0. 0. + ( 7) dim 3 offset 15 1. 0. 0. + ( 8) dim 3 offset 18 1. 1. 0. + ( 9) dim 3 offset 21 1. 1. 0. + ( 10) dim 3 offset 24 1. 0. 1. + ( 11) dim 3 offset 27 1. 0. 1. + ( 12) dim 3 offset 30 1. 1. 1. + ( 13) dim 3 offset 33 1. 1. 1. +Process 1: + ( 2) dim 3 offset 0 1. 0. 0. + ( 3) dim 3 offset 3 1. 0. 0. + ( 4) dim 3 offset 6 1. 1. 0. + ( 5) dim 3 offset 9 1. 1. 0. + ( 6) dim 3 offset 12 1. 0. 1. + ( 7) dim 3 offset 15 1. 0. 1. + ( 8) dim 3 offset 18 1. 1. 1. + ( 9) dim 3 offset 21 1. 1. 1. + ( 10) dim 3 offset 24 2. 0. 0. + ( 11) dim 3 offset 27 2. 1. 0. + ( 12) dim 3 offset 30 2. 0. 1. + ( 13) dim 3 offset 33 2. 1. 1. +Process 2: + ( 2) dim 3 offset 0 0. 0. 1. + ( 3) dim 3 offset 3 0. 1. 1. + ( 4) dim 3 offset 6 0. 0. 2. + ( 5) dim 3 offset 9 0. 1. 2. + ( 6) dim 3 offset 12 1. 0. 1. + ( 7) dim 3 offset 15 1. 0. 1. + ( 8) dim 3 offset 18 1. 1. 1. + ( 9) dim 3 offset 21 1. 1. 1. + ( 10) dim 3 offset 24 1. 0. 2. + ( 11) dim 3 offset 27 1. 0. 2. + ( 12) dim 3 offset 30 1. 1. 2. + ( 13) dim 3 offset 33 1. 1. 2. +Process 3: + ( 2) dim 3 offset 0 1. 0. 1. + ( 3) dim 3 offset 3 1. 0. 1. + ( 4) dim 3 offset 6 1. 1. 1. + ( 5) dim 3 offset 9 1. 1. 1. + ( 6) dim 3 offset 12 1. 0. 2. + ( 7) dim 3 offset 15 1. 0. 2. + ( 8) dim 3 offset 18 1. 1. 2. + ( 9) dim 3 offset 21 1. 1. 2. + ( 10) dim 3 offset 24 2. 0. 1. + ( 11) dim 3 offset 27 2. 1. 1. + ( 12) dim 3 offset 30 2. 0. 2. + ( 13) dim 3 offset 33 2. 1. 2. +Labels: +Label 'celltype': +[0]: 0 (7) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 41 (2) +[0]: 42 (2) +[0]: 43 (2) +[0]: 44 (2) +[0]: 14 (4) +[0]: 15 (4) +[0]: 16 (4) +[0]: 17 (4) +[0]: 18 (4) +[0]: 19 (4) +[0]: 20 (4) +[0]: 1 (10) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 21 (5) +[0]: 22 (5) +[0]: 23 (5) +[0]: 24 (5) +[1]: 0 (7) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 9 (0) +[1]: 10 (0) +[1]: 11 (0) +[1]: 12 (0) +[1]: 13 (0) +[1]: 41 (2) +[1]: 42 (2) +[1]: 43 (2) +[1]: 44 (2) +[1]: 14 (4) +[1]: 15 (4) +[1]: 16 (4) +[1]: 17 (4) +[1]: 18 (4) +[1]: 19 (4) +[1]: 20 (4) +[1]: 1 (10) +[1]: 25 (1) +[1]: 26 (1) +[1]: 27 (1) +[1]: 28 (1) +[1]: 29 (1) +[1]: 30 (1) +[1]: 31 (1) +[1]: 32 (1) +[1]: 33 (1) +[1]: 34 (1) +[1]: 35 (1) +[1]: 36 (1) +[1]: 37 (1) +[1]: 38 (1) +[1]: 39 (1) +[1]: 40 (1) +[1]: 21 (5) +[1]: 22 (5) +[1]: 23 (5) +[1]: 24 (5) +[2]: 0 (7) +[2]: 2 (0) +[2]: 3 (0) +[2]: 4 (0) +[2]: 5 (0) +[2]: 6 (0) +[2]: 7 (0) +[2]: 8 (0) +[2]: 9 (0) +[2]: 10 (0) +[2]: 11 (0) +[2]: 12 (0) +[2]: 13 (0) +[2]: 41 (2) +[2]: 42 (2) +[2]: 43 (2) +[2]: 44 (2) +[2]: 14 (4) +[2]: 15 (4) +[2]: 16 (4) +[2]: 17 (4) +[2]: 18 (4) +[2]: 19 (4) +[2]: 20 (4) +[2]: 1 (10) +[2]: 25 (1) +[2]: 26 (1) +[2]: 27 (1) +[2]: 28 (1) +[2]: 29 (1) +[2]: 30 (1) +[2]: 31 (1) +[2]: 32 (1) +[2]: 33 (1) +[2]: 34 (1) +[2]: 35 (1) +[2]: 36 (1) +[2]: 37 (1) +[2]: 38 (1) +[2]: 39 (1) +[2]: 40 (1) +[2]: 21 (5) +[2]: 22 (5) +[2]: 23 (5) +[2]: 24 (5) +[3]: 0 (7) +[3]: 2 (0) +[3]: 3 (0) +[3]: 4 (0) +[3]: 5 (0) +[3]: 6 (0) +[3]: 7 (0) +[3]: 8 (0) +[3]: 9 (0) +[3]: 10 (0) +[3]: 11 (0) +[3]: 12 (0) +[3]: 13 (0) +[3]: 41 (2) +[3]: 42 (2) +[3]: 43 (2) +[3]: 44 (2) +[3]: 14 (4) +[3]: 15 (4) +[3]: 16 (4) +[3]: 17 (4) +[3]: 18 (4) +[3]: 19 (4) +[3]: 20 (4) +[3]: 1 (10) +[3]: 25 (1) +[3]: 26 (1) +[3]: 27 (1) +[3]: 28 (1) +[3]: 29 (1) +[3]: 30 (1) +[3]: 31 (1) +[3]: 32 (1) +[3]: 33 (1) +[3]: 34 (1) +[3]: 35 (1) +[3]: 36 (1) +[3]: 37 (1) +[3]: 38 (1) +[3]: 39 (1) +[3]: 40 (1) +[3]: 21 (5) +[3]: 22 (5) +[3]: 23 (5) +[3]: 24 (5) +Label 'marker': +[0]: 1 (1) +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[1]: 1 (1) +[1]: 2 (1) +[1]: 3 (1) +[1]: 4 (1) +[1]: 5 (1) +[1]: 6 (1) +[1]: 7 (1) +[1]: 8 (1) +[1]: 9 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 13 (1) +[1]: 14 (1) +[1]: 15 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 21 (1) +[1]: 22 (1) +[1]: 23 (1) +[1]: 24 (1) +[1]: 25 (1) +[1]: 26 (1) +[1]: 27 (1) +[1]: 28 (1) +[1]: 29 (1) +[1]: 30 (1) +[1]: 31 (1) +[1]: 32 (1) +[1]: 33 (1) +[1]: 34 (1) +[1]: 35 (1) +[1]: 36 (1) +[1]: 37 (1) +[1]: 38 (1) +[1]: 39 (1) +[1]: 40 (1) +[1]: 41 (1) +[1]: 42 (1) +[1]: 43 (1) +[1]: 44 (1) +[2]: 1 (1) +[2]: 2 (1) +[2]: 3 (1) +[2]: 4 (1) +[2]: 5 (1) +[2]: 6 (1) +[2]: 7 (1) +[2]: 8 (1) +[2]: 9 (1) +[2]: 10 (1) +[2]: 11 (1) +[2]: 12 (1) +[2]: 13 (1) +[2]: 14 (1) +[2]: 15 (1) +[2]: 16 (1) +[2]: 17 (1) +[2]: 18 (1) +[2]: 19 (1) +[2]: 20 (1) +[2]: 21 (1) +[2]: 22 (1) +[2]: 23 (1) +[2]: 24 (1) +[2]: 25 (1) +[2]: 26 (1) +[2]: 27 (1) +[2]: 28 (1) +[2]: 29 (1) +[2]: 30 (1) +[2]: 31 (1) +[2]: 32 (1) +[2]: 33 (1) +[2]: 34 (1) +[2]: 35 (1) +[2]: 36 (1) +[2]: 37 (1) +[2]: 38 (1) +[2]: 39 (1) +[2]: 40 (1) +[2]: 41 (1) +[2]: 42 (1) +[2]: 43 (1) +[2]: 44 (1) +[3]: 1 (1) +[3]: 2 (1) +[3]: 3 (1) +[3]: 4 (1) +[3]: 5 (1) +[3]: 6 (1) +[3]: 7 (1) +[3]: 8 (1) +[3]: 9 (1) +[3]: 10 (1) +[3]: 11 (1) +[3]: 12 (1) +[3]: 13 (1) +[3]: 14 (1) +[3]: 15 (1) +[3]: 16 (1) +[3]: 17 (1) +[3]: 18 (1) +[3]: 19 (1) +[3]: 20 (1) +[3]: 21 (1) +[3]: 22 (1) +[3]: 23 (1) +[3]: 24 (1) +[3]: 25 (1) +[3]: 26 (1) +[3]: 27 (1) +[3]: 28 (1) +[3]: 29 (1) +[3]: 30 (1) +[3]: 31 (1) +[3]: 32 (1) +[3]: 33 (1) +[3]: 34 (1) +[3]: 35 (1) +[3]: 36 (1) +[3]: 37 (1) +[3]: 38 (1) +[3]: 39 (1) +[3]: 40 (1) +[3]: 41 (1) +[3]: 42 (1) +[3]: 43 (1) +[3]: 44 (1) +Label 'Face Sets': +[0]: 14 (6) +[0]: 1 (5) +[0]: 15 (5) +[0]: 16 (5) +[0]: 17 (3) +[0]: 18 (4) +[0]: 19 (1) +[0]: 20 (2) +[1]: 1 (6) +[1]: 14 (6) +[1]: 15 (6) +[1]: 16 (5) +[1]: 17 (3) +[1]: 18 (4) +[1]: 19 (1) +[1]: 20 (2) +[2]: 14 (6) +[2]: 1 (5) +[2]: 15 (5) +[2]: 16 (5) +[2]: 17 (3) +[2]: 18 (4) +[2]: 19 (1) +[2]: 20 (2) +[3]: 1 (6) +[3]: 14 (6) +[3]: 15 (6) +[3]: 16 (5) +[3]: 17 (3) +[3]: 18 (4) +[3]: 19 (1) +[3]: 20 (2) +Label 'fault': +[0]: 1 (2) +[0]: 15 (2) +[0]: 16 (2) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 41 (0) +[0]: 42 (0) +[0]: 43 (0) +[0]: 44 (0) +[0]: 0 (103) +[0]: 17 (102) +[0]: 18 (102) +[0]: 19 (102) +[0]: 20 (102) +[0]: 25 (101) +[0]: 26 (101) +[0]: 27 (101) +[0]: 28 (101) +[1]: 1 (2) +[1]: 14 (2) +[1]: 15 (2) +[1]: 21 (1) +[1]: 22 (1) +[1]: 23 (1) +[1]: 24 (1) +[1]: 33 (1) +[1]: 34 (1) +[1]: 35 (1) +[1]: 36 (1) +[1]: 37 (1) +[1]: 38 (1) +[1]: 39 (1) +[1]: 40 (1) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 9 (0) +[1]: 41 (0) +[1]: 42 (0) +[1]: 43 (0) +[1]: 44 (0) +[1]: 0 (-103) +[1]: 17 (-102) +[1]: 18 (-102) +[1]: 19 (-102) +[1]: 20 (-102) +[1]: 25 (-101) +[1]: 26 (-101) +[1]: 27 (-101) +[1]: 28 (-101) +[2]: 1 (2) +[2]: 15 (2) +[2]: 16 (2) +[2]: 21 (1) +[2]: 22 (1) +[2]: 23 (1) +[2]: 24 (1) +[2]: 33 (1) +[2]: 34 (1) +[2]: 35 (1) +[2]: 36 (1) +[2]: 37 (1) +[2]: 38 (1) +[2]: 39 (1) +[2]: 40 (1) +[2]: 6 (0) +[2]: 7 (0) +[2]: 8 (0) +[2]: 9 (0) +[2]: 10 (0) +[2]: 11 (0) +[2]: 12 (0) +[2]: 13 (0) +[2]: 41 (0) +[2]: 42 (0) +[2]: 43 (0) +[2]: 44 (0) +[2]: 0 (103) +[2]: 17 (102) +[2]: 18 (102) +[2]: 19 (102) +[2]: 20 (102) +[2]: 25 (101) +[2]: 26 (101) +[2]: 27 (101) +[2]: 28 (101) +[3]: 1 (2) +[3]: 14 (2) +[3]: 15 (2) +[3]: 21 (1) +[3]: 22 (1) +[3]: 23 (1) +[3]: 24 (1) +[3]: 33 (1) +[3]: 34 (1) +[3]: 35 (1) +[3]: 36 (1) +[3]: 37 (1) +[3]: 38 (1) +[3]: 39 (1) +[3]: 40 (1) +[3]: 2 (0) +[3]: 3 (0) +[3]: 4 (0) +[3]: 5 (0) +[3]: 6 (0) +[3]: 7 (0) +[3]: 8 (0) +[3]: 9 (0) +[3]: 41 (0) +[3]: 42 (0) +[3]: 43 (0) +[3]: 44 (0) +[3]: 0 (-103) +[3]: 17 (-102) +[3]: 18 (-102) +[3]: 19 (-102) +[3]: 20 (-102) +[3]: 25 (-101) +[3]: 26 (-101) +[3]: 27 (-101) +[3]: 28 (-101) +PetscSF Object: 4 MPI processes + type: basic + [0] Number of roots=45, leaves=33, remote ranks=3 + [0] 1 <- (1,1) + [0] 4 <- (2,2) + [0] 5 <- (2,3) + [0] 6 <- (1,2) + [0] 7 <- (1,3) + [0] 8 <- (1,4) + [0] 9 <- (1,5) + [0] 10 <- (3,2) + [0] 11 <- (3,3) + [0] 12 <- (3,4) + [0] 13 <- (3,5) + [0] 15 <- (1,14) + [0] 16 <- (1,15) + [0] 20 <- (2,19) + [0] 21 <- (1,21) + [0] 22 <- (3,21) + [0] 23 <- (1,23) + [0] 24 <- (1,24) + [0] 27 <- (2,25) + [0] 28 <- (2,26) + [0] 30 <- (2,29) + [0] 33 <- (1,33) + [0] 34 <- (1,34) + [0] 35 <- (3,33) + [0] 36 <- (3,34) + [0] 37 <- (1,37) + [0] 38 <- (1,38) + [0] 39 <- (1,39) + [0] 40 <- (1,40) + [0] 41 <- (1,41) + [0] 42 <- (1,42) + [0] 43 <- (3,41) + [0] 44 <- (3,42) + [1] Number of roots=45, leaves=15, remote ranks=1 + [1] 6 <- (3,2) + [1] 7 <- (3,3) + [1] 8 <- (3,4) + [1] 9 <- (3,5) + [1] 12 <- (3,10) + [1] 13 <- (3,11) + [1] 20 <- (3,19) + [1] 22 <- (3,21) + [1] 27 <- (3,25) + [1] 28 <- (3,26) + [1] 30 <- (3,29) + [1] 35 <- (3,33) + [1] 36 <- (3,34) + [1] 43 <- (3,41) + [1] 44 <- (3,42) + [2] Number of roots=45, leaves=27, remote ranks=1 + [2] 1 <- (3,1) + [2] 6 <- (3,2) + [2] 7 <- (3,3) + [2] 8 <- (3,4) + [2] 9 <- (3,5) + [2] 10 <- (3,6) + [2] 11 <- (3,7) + [2] 12 <- (3,8) + [2] 13 <- (3,9) + [2] 15 <- (3,14) + [2] 16 <- (3,15) + [2] 21 <- (3,21) + [2] 22 <- (3,22) + [2] 23 <- (3,23) + [2] 24 <- (3,24) + [2] 33 <- (3,33) + [2] 34 <- (3,34) + [2] 35 <- (3,35) + [2] 36 <- (3,36) + [2] 37 <- (3,37) + [2] 38 <- (3,38) + [2] 39 <- (3,39) + [2] 40 <- (3,40) + [2] 41 <- (3,41) + [2] 42 <- (3,42) + [2] 43 <- (3,43) + [2] 44 <- (3,44) + [3] Number of roots=45, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 18 edges + [0] 1 <- 1 + [0] 6 <- 2 + [0] 7 <- 3 + [0] 8 <- 4 + [0] 9 <- 5 + [0] 15 <- 14 + [0] 16 <- 15 + [0] 21 <- 21 + [0] 23 <- 23 + [0] 24 <- 24 + [0] 33 <- 33 + [0] 34 <- 34 + [0] 37 <- 37 + [0] 38 <- 38 + [0] 39 <- 39 + [0] 40 <- 40 + [0] 41 <- 41 + [0] 42 <- 42 + [0] 2: 6 edges + [0] 4 <- 2 + [0] 5 <- 3 + [0] 20 <- 19 + [0] 27 <- 25 + [0] 28 <- 26 + [0] 30 <- 29 + [0] 3: 9 edges + [0] 10 <- 2 + [0] 11 <- 3 + [0] 12 <- 4 + [0] 13 <- 5 + [0] 22 <- 21 + [0] 35 <- 33 + [0] 36 <- 34 + [0] 43 <- 41 + [0] 44 <- 42 + [1] Roots referenced by my leaves, by rank + [1] 3: 15 edges + [1] 6 <- 2 + [1] 7 <- 3 + [1] 8 <- 4 + [1] 9 <- 5 + [1] 12 <- 10 + [1] 13 <- 11 + [1] 20 <- 19 + [1] 22 <- 21 + [1] 27 <- 25 + [1] 28 <- 26 + [1] 30 <- 29 + [1] 35 <- 33 + [1] 36 <- 34 + [1] 43 <- 41 + [1] 44 <- 42 + [2] Roots referenced by my leaves, by rank + [2] 3: 27 edges + [2] 1 <- 1 + [2] 6 <- 2 + [2] 7 <- 3 + [2] 8 <- 4 + [2] 9 <- 5 + [2] 10 <- 6 + [2] 11 <- 7 + [2] 12 <- 8 + [2] 13 <- 9 + [2] 15 <- 14 + [2] 16 <- 15 + [2] 21 <- 21 + [2] 22 <- 22 + [2] 23 <- 23 + [2] 24 <- 24 + [2] 33 <- 33 + [2] 34 <- 34 + [2] 35 <- 35 + [2] 36 <- 36 + [2] 37 <- 37 + [2] 38 <- 38 + [2] 39 <- 39 + [2] 40 <- 40 + [2] 41 <- 41 + [2] 42 <- 42 + [2] 43 <- 43 + [2] 44 <- 44 + [3] Roots referenced by my leaves, by rank + MultiSF sort=rank-order diff --git a/src/dm/impls/plex/tests/output/ex69_hex_3.out b/src/dm/impls/plex/tests/output/ex69_hex_3.out new file mode 100644 index 00000000000..ad6323996bc --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_hex_3.out @@ -0,0 +1,2508 @@ +DM Object: box (f0_) 1 MPI process + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 5 +[0]: 4 ----> 42 +[0]: 4 ----> 54 +[0]: 4 ----> 63 +[0]: 5 ----> 42 +[0]: 5 ----> 43 +[0]: 5 ----> 55 +[0]: 5 ----> 65 +[0]: 6 ----> 43 +[0]: 6 ----> 56 +[0]: 6 ----> 67 +[0]: 7 ----> 44 +[0]: 7 ----> 54 +[0]: 7 ----> 69 +[0]: 8 ----> 44 +[0]: 8 ----> 45 +[0]: 8 ----> 55 +[0]: 8 ----> 71 +[0]: 9 ----> 45 +[0]: 9 ----> 56 +[0]: 9 ----> 73 +[0]: 10 ----> 46 +[0]: 10 ----> 57 +[0]: 10 ----> 63 +[0]: 10 ----> 64 +[0]: 11 ----> 46 +[0]: 11 ----> 47 +[0]: 11 ----> 58 +[0]: 11 ----> 65 +[0]: 11 ----> 66 +[0]: 12 ----> 47 +[0]: 12 ----> 59 +[0]: 12 ----> 67 +[0]: 12 ----> 68 +[0]: 13 ----> 48 +[0]: 13 ----> 57 +[0]: 13 ----> 69 +[0]: 13 ----> 70 +[0]: 14 ----> 48 +[0]: 14 ----> 49 +[0]: 14 ----> 58 +[0]: 14 ----> 71 +[0]: 14 ----> 72 +[0]: 15 ----> 49 +[0]: 15 ----> 59 +[0]: 15 ----> 73 +[0]: 15 ----> 74 +[0]: 16 ----> 50 +[0]: 16 ----> 60 +[0]: 16 ----> 64 +[0]: 17 ----> 50 +[0]: 17 ----> 51 +[0]: 17 ----> 61 +[0]: 17 ----> 66 +[0]: 18 ----> 51 +[0]: 18 ----> 62 +[0]: 18 ----> 68 +[0]: 19 ----> 52 +[0]: 19 ----> 60 +[0]: 19 ----> 70 +[0]: 20 ----> 52 +[0]: 20 ----> 53 +[0]: 20 ----> 61 +[0]: 20 ----> 72 +[0]: 21 ----> 53 +[0]: 21 ----> 62 +[0]: 21 ----> 74 +[0]: 22 ----> 0 +[0]: 23 ----> 0 +[0]: 23 ----> 1 +[0]: 24 ----> 1 +[0]: 25 ----> 2 +[0]: 26 ----> 2 +[0]: 26 ----> 3 +[0]: 27 ----> 3 +[0]: 28 ----> 0 +[0]: 29 ----> 0 +[0]: 30 ----> 1 +[0]: 31 ----> 1 +[0]: 32 ----> 2 +[0]: 33 ----> 2 +[0]: 34 ----> 3 +[0]: 35 ----> 3 +[0]: 36 ----> 0 +[0]: 37 ----> 0 +[0]: 37 ----> 2 +[0]: 38 ----> 2 +[0]: 39 ----> 1 +[0]: 40 ----> 1 +[0]: 40 ----> 3 +[0]: 41 ----> 3 +[0]: 42 ----> 28 +[0]: 42 ----> 36 +[0]: 43 ----> 30 +[0]: 43 ----> 39 +[0]: 44 ----> 29 +[0]: 44 ----> 36 +[0]: 45 ----> 31 +[0]: 45 ----> 39 +[0]: 46 ----> 28 +[0]: 46 ----> 32 +[0]: 46 ----> 37 +[0]: 47 ----> 30 +[0]: 47 ----> 34 +[0]: 47 ----> 40 +[0]: 48 ----> 29 +[0]: 48 ----> 33 +[0]: 48 ----> 37 +[0]: 49 ----> 31 +[0]: 49 ----> 35 +[0]: 49 ----> 40 +[0]: 50 ----> 32 +[0]: 50 ----> 38 +[0]: 51 ----> 34 +[0]: 51 ----> 41 +[0]: 52 ----> 33 +[0]: 52 ----> 38 +[0]: 53 ----> 35 +[0]: 53 ----> 41 +[0]: 54 ----> 22 +[0]: 54 ----> 36 +[0]: 55 ----> 23 +[0]: 55 ----> 36 +[0]: 55 ----> 39 +[0]: 56 ----> 24 +[0]: 56 ----> 39 +[0]: 57 ----> 22 +[0]: 57 ----> 25 +[0]: 57 ----> 37 +[0]: 58 ----> 23 +[0]: 58 ----> 26 +[0]: 58 ----> 37 +[0]: 58 ----> 40 +[0]: 59 ----> 24 +[0]: 59 ----> 27 +[0]: 59 ----> 40 +[0]: 60 ----> 25 +[0]: 60 ----> 38 +[0]: 61 ----> 26 +[0]: 61 ----> 38 +[0]: 61 ----> 41 +[0]: 62 ----> 27 +[0]: 62 ----> 41 +[0]: 63 ----> 22 +[0]: 63 ----> 28 +[0]: 64 ----> 25 +[0]: 64 ----> 32 +[0]: 65 ----> 23 +[0]: 65 ----> 28 +[0]: 65 ----> 30 +[0]: 66 ----> 26 +[0]: 66 ----> 32 +[0]: 66 ----> 34 +[0]: 67 ----> 24 +[0]: 67 ----> 30 +[0]: 68 ----> 27 +[0]: 68 ----> 34 +[0]: 69 ----> 22 +[0]: 69 ----> 29 +[0]: 70 ----> 25 +[0]: 70 ----> 33 +[0]: 71 ----> 23 +[0]: 71 ----> 29 +[0]: 71 ----> 31 +[0]: 72 ----> 26 +[0]: 72 ----> 33 +[0]: 72 ----> 35 +[0]: 73 ----> 24 +[0]: 73 ----> 31 +[0]: 74 ----> 27 +[0]: 74 ----> 35 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 36 (-2) +[0]: 0 <---- 37 (0) +[0]: 0 <---- 28 (0) +[0]: 0 <---- 29 (-3) +[0]: 0 <---- 23 (0) +[0]: 0 <---- 22 (-2) +[0]: 1 <---- 39 (-2) +[0]: 1 <---- 40 (0) +[0]: 1 <---- 30 (0) +[0]: 1 <---- 31 (-3) +[0]: 1 <---- 24 (0) +[0]: 1 <---- 23 (-2) +[0]: 2 <---- 37 (-2) +[0]: 2 <---- 38 (0) +[0]: 2 <---- 32 (0) +[0]: 2 <---- 33 (-3) +[0]: 2 <---- 26 (0) +[0]: 2 <---- 25 (-2) +[0]: 3 <---- 40 (-2) +[0]: 3 <---- 41 (0) +[0]: 3 <---- 34 (0) +[0]: 3 <---- 35 (-3) +[0]: 3 <---- 27 (0) +[0]: 3 <---- 26 (-2) +[0]: 22 <---- 54 (0) +[0]: 22 <---- 69 (0) +[0]: 22 <---- 57 (-1) +[0]: 22 <---- 63 (-1) +[0]: 23 <---- 55 (0) +[0]: 23 <---- 71 (0) +[0]: 23 <---- 58 (-1) +[0]: 23 <---- 65 (-1) +[0]: 24 <---- 56 (0) +[0]: 24 <---- 73 (0) +[0]: 24 <---- 59 (-1) +[0]: 24 <---- 67 (-1) +[0]: 25 <---- 57 (0) +[0]: 25 <---- 70 (0) +[0]: 25 <---- 60 (-1) +[0]: 25 <---- 64 (-1) +[0]: 26 <---- 58 (0) +[0]: 26 <---- 72 (0) +[0]: 26 <---- 61 (-1) +[0]: 26 <---- 66 (-1) +[0]: 27 <---- 59 (0) +[0]: 27 <---- 74 (0) +[0]: 27 <---- 62 (-1) +[0]: 27 <---- 68 (-1) +[0]: 28 <---- 42 (0) +[0]: 28 <---- 65 (0) +[0]: 28 <---- 46 (-1) +[0]: 28 <---- 63 (-1) +[0]: 29 <---- 44 (0) +[0]: 29 <---- 71 (0) +[0]: 29 <---- 48 (-1) +[0]: 29 <---- 69 (-1) +[0]: 30 <---- 43 (0) +[0]: 30 <---- 67 (0) +[0]: 30 <---- 47 (-1) +[0]: 30 <---- 65 (-1) +[0]: 31 <---- 45 (0) +[0]: 31 <---- 73 (0) +[0]: 31 <---- 49 (-1) +[0]: 31 <---- 71 (-1) +[0]: 32 <---- 46 (0) +[0]: 32 <---- 66 (0) +[0]: 32 <---- 50 (-1) +[0]: 32 <---- 64 (-1) +[0]: 33 <---- 48 (0) +[0]: 33 <---- 72 (0) +[0]: 33 <---- 52 (-1) +[0]: 33 <---- 70 (-1) +[0]: 34 <---- 47 (0) +[0]: 34 <---- 68 (0) +[0]: 34 <---- 51 (-1) +[0]: 34 <---- 66 (-1) +[0]: 35 <---- 49 (0) +[0]: 35 <---- 74 (0) +[0]: 35 <---- 53 (-1) +[0]: 35 <---- 72 (-1) +[0]: 36 <---- 42 (0) +[0]: 36 <---- 55 (0) +[0]: 36 <---- 44 (-1) +[0]: 36 <---- 54 (-1) +[0]: 37 <---- 46 (0) +[0]: 37 <---- 58 (0) +[0]: 37 <---- 48 (-1) +[0]: 37 <---- 57 (-1) +[0]: 38 <---- 50 (0) +[0]: 38 <---- 61 (0) +[0]: 38 <---- 52 (-1) +[0]: 38 <---- 60 (-1) +[0]: 39 <---- 43 (0) +[0]: 39 <---- 56 (0) +[0]: 39 <---- 45 (-1) +[0]: 39 <---- 55 (-1) +[0]: 40 <---- 47 (0) +[0]: 40 <---- 59 (0) +[0]: 40 <---- 49 (-1) +[0]: 40 <---- 58 (-1) +[0]: 41 <---- 51 (0) +[0]: 41 <---- 62 (0) +[0]: 41 <---- 53 (-1) +[0]: 41 <---- 61 (-1) +[0]: 42 <---- 4 (0) +[0]: 42 <---- 5 (0) +[0]: 43 <---- 5 (0) +[0]: 43 <---- 6 (0) +[0]: 44 <---- 7 (0) +[0]: 44 <---- 8 (0) +[0]: 45 <---- 8 (0) +[0]: 45 <---- 9 (0) +[0]: 46 <---- 10 (0) +[0]: 46 <---- 11 (0) +[0]: 47 <---- 11 (0) +[0]: 47 <---- 12 (0) +[0]: 48 <---- 13 (0) +[0]: 48 <---- 14 (0) +[0]: 49 <---- 14 (0) +[0]: 49 <---- 15 (0) +[0]: 50 <---- 16 (0) +[0]: 50 <---- 17 (0) +[0]: 51 <---- 17 (0) +[0]: 51 <---- 18 (0) +[0]: 52 <---- 19 (0) +[0]: 52 <---- 20 (0) +[0]: 53 <---- 20 (0) +[0]: 53 <---- 21 (0) +[0]: 54 <---- 4 (0) +[0]: 54 <---- 7 (0) +[0]: 55 <---- 5 (0) +[0]: 55 <---- 8 (0) +[0]: 56 <---- 6 (0) +[0]: 56 <---- 9 (0) +[0]: 57 <---- 10 (0) +[0]: 57 <---- 13 (0) +[0]: 58 <---- 11 (0) +[0]: 58 <---- 14 (0) +[0]: 59 <---- 12 (0) +[0]: 59 <---- 15 (0) +[0]: 60 <---- 16 (0) +[0]: 60 <---- 19 (0) +[0]: 61 <---- 17 (0) +[0]: 61 <---- 20 (0) +[0]: 62 <---- 18 (0) +[0]: 62 <---- 21 (0) +[0]: 63 <---- 4 (0) +[0]: 63 <---- 10 (0) +[0]: 64 <---- 10 (0) +[0]: 64 <---- 16 (0) +[0]: 65 <---- 5 (0) +[0]: 65 <---- 11 (0) +[0]: 66 <---- 11 (0) +[0]: 66 <---- 17 (0) +[0]: 67 <---- 6 (0) +[0]: 67 <---- 12 (0) +[0]: 68 <---- 12 (0) +[0]: 68 <---- 18 (0) +[0]: 69 <---- 7 (0) +[0]: 69 <---- 13 (0) +[0]: 70 <---- 13 (0) +[0]: 70 <---- 19 (0) +[0]: 71 <---- 8 (0) +[0]: 71 <---- 14 (0) +[0]: 72 <---- 14 (0) +[0]: 72 <---- 20 (0) +[0]: 73 <---- 9 (0) +[0]: 73 <---- 15 (0) +[0]: 74 <---- 15 (0) +[0]: 74 <---- 21 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 4) dim 3 offset 0 0. 0. 0. + ( 5) dim 3 offset 3 1. 0. 0. + ( 6) dim 3 offset 6 2. 0. 0. + ( 7) dim 3 offset 9 0. 1. 0. + ( 8) dim 3 offset 12 1. 1. 0. + ( 9) dim 3 offset 15 2. 1. 0. + ( 10) dim 3 offset 18 0. 0. 1. + ( 11) dim 3 offset 21 1. 0. 1. + ( 12) dim 3 offset 24 2. 0. 1. + ( 13) dim 3 offset 27 0. 1. 1. + ( 14) dim 3 offset 30 1. 1. 1. + ( 15) dim 3 offset 33 2. 1. 1. + ( 16) dim 3 offset 36 0. 0. 2. + ( 17) dim 3 offset 39 1. 0. 2. + ( 18) dim 3 offset 42 2. 0. 2. + ( 19) dim 3 offset 45 0. 1. 2. + ( 20) dim 3 offset 48 1. 1. 2. + ( 21) dim 3 offset 51 2. 1. 2. +Labels: +Label 'marker': +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[0]: 45 (1) +[0]: 46 (1) +[0]: 47 (1) +[0]: 48 (1) +[0]: 49 (1) +[0]: 50 (1) +[0]: 51 (1) +[0]: 52 (1) +[0]: 53 (1) +[0]: 54 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 57 (1) +[0]: 59 (1) +[0]: 60 (1) +[0]: 61 (1) +[0]: 62 (1) +[0]: 63 (1) +[0]: 64 (1) +[0]: 65 (1) +[0]: 66 (1) +[0]: 67 (1) +[0]: 68 (1) +[0]: 69 (1) +[0]: 70 (1) +[0]: 71 (1) +[0]: 72 (1) +[0]: 73 (1) +[0]: 74 (1) +Label 'Face Sets': +[0]: 22 (6) +[0]: 25 (6) +[0]: 24 (5) +[0]: 27 (5) +[0]: 28 (3) +[0]: 30 (3) +[0]: 32 (3) +[0]: 34 (3) +[0]: 29 (4) +[0]: 31 (4) +[0]: 33 (4) +[0]: 35 (4) +[0]: 36 (1) +[0]: 39 (1) +[0]: 38 (2) +[0]: 41 (2) +Label 'fault0': +[0]: 37 (2) +[0]: 40 (2) +[0]: 46 (1) +[0]: 47 (1) +[0]: 48 (1) +[0]: 49 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 59 (1) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 0 (103) +[0]: 1 (103) +[0]: 22 (102) +[0]: 23 (102) +[0]: 24 (102) +[0]: 28 (102) +[0]: 29 (102) +[0]: 30 (102) +[0]: 31 (102) +[0]: 2 (-103) +[0]: 3 (-103) +[0]: 25 (-102) +[0]: 26 (-102) +[0]: 27 (-102) +[0]: 32 (-102) +[0]: 33 (-102) +[0]: 34 (-102) +[0]: 35 (-102) +[0]: 63 (101) +[0]: 65 (101) +[0]: 67 (101) +[0]: 69 (101) +[0]: 71 (101) +[0]: 73 (101) +[0]: 64 (-101) +[0]: 66 (-101) +[0]: 68 (-101) +[0]: 70 (-101) +[0]: 72 (-101) +[0]: 74 (-101) +Label 'celltype': +[0]: 0 (7) +[0]: 1 (7) +[0]: 2 (7) +[0]: 3 (7) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (4) +[0]: 23 (4) +[0]: 24 (4) +[0]: 25 (4) +[0]: 26 (4) +[0]: 27 (4) +[0]: 28 (4) +[0]: 29 (4) +[0]: 30 (4) +[0]: 31 (4) +[0]: 32 (4) +[0]: 33 (4) +[0]: 34 (4) +[0]: 35 (4) +[0]: 36 (4) +[0]: 37 (4) +[0]: 38 (4) +[0]: 39 (4) +[0]: 40 (4) +[0]: 41 (4) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[0]: 45 (1) +[0]: 46 (1) +[0]: 47 (1) +[0]: 48 (1) +[0]: 49 (1) +[0]: 50 (1) +[0]: 51 (1) +[0]: 52 (1) +[0]: 53 (1) +[0]: 54 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 59 (1) +[0]: 60 (1) +[0]: 61 (1) +[0]: 62 (1) +[0]: 63 (1) +[0]: 64 (1) +[0]: 65 (1) +[0]: 66 (1) +[0]: 67 (1) +[0]: 68 (1) +[0]: 69 (1) +[0]: 70 (1) +[0]: 71 (1) +[0]: 72 (1) +[0]: 73 (1) +[0]: 74 (1) +Label 'fault1': +[0]: 26 (2) +[0]: 61 (1) +[0]: 66 (1) +[0]: 72 (1) +[0]: 17 (0) +[0]: 20 (0) +[0]: 0 (103) +[0]: 2 (103) +[0]: 23 (102) +[0]: 28 (102) +[0]: 29 (102) +[0]: 32 (102) +[0]: 33 (102) +[0]: 37 (102) +[0]: 38 (102) +[0]: 1 (-103) +[0]: 3 (-103) +[0]: 30 (-102) +[0]: 31 (-102) +[0]: 34 (-102) +[0]: 35 (-102) +[0]: 40 (-102) +[0]: 41 (-102) +[0]: 46 (101) +[0]: 48 (101) +[0]: 50 (101) +[0]: 52 (101) +[0]: 65 (101) +[0]: 71 (101) +[0]: 47 (-101) +[0]: 49 (-101) +[0]: 51 (-101) +[0]: 53 (-101) +[0]: 58 (201) +[0]: 11 (200) +[0]: 14 (200) +DM Object: box (f1_) 1 MPI process + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 5 +[0]: 6 ----> 59 +[0]: 6 ----> 67 +[0]: 6 ----> 93 +[0]: 7 ----> 59 +[0]: 7 ----> 60 +[0]: 7 ----> 68 +[0]: 7 ----> 94 +[0]: 8 ----> 60 +[0]: 8 ----> 69 +[0]: 8 ----> 95 +[0]: 9 ----> 61 +[0]: 9 ----> 67 +[0]: 9 ----> 96 +[0]: 10 ----> 61 +[0]: 10 ----> 62 +[0]: 10 ----> 68 +[0]: 10 ----> 97 +[0]: 11 ----> 62 +[0]: 11 ----> 69 +[0]: 11 ----> 98 +[0]: 12 ----> 63 +[0]: 12 ----> 70 +[0]: 12 ----> 73 +[0]: 13 ----> 63 +[0]: 13 ----> 64 +[0]: 13 ----> 71 +[0]: 13 ----> 74 +[0]: 14 ----> 64 +[0]: 14 ----> 72 +[0]: 14 ----> 75 +[0]: 15 ----> 65 +[0]: 15 ----> 70 +[0]: 15 ----> 76 +[0]: 16 ----> 65 +[0]: 16 ----> 66 +[0]: 16 ----> 71 +[0]: 16 ----> 77 +[0]: 17 ----> 66 +[0]: 17 ----> 72 +[0]: 17 ----> 78 +[0]: 18 ----> 73 +[0]: 18 ----> 79 +[0]: 18 ----> 87 +[0]: 18 ----> 99 +[0]: 19 ----> 80 +[0]: 19 ----> 88 +[0]: 19 ----> 93 +[0]: 19 ----> 99 +[0]: 20 ----> 74 +[0]: 20 ----> 79 +[0]: 20 ----> 81 +[0]: 20 ----> 89 +[0]: 20 ----> 100 +[0]: 21 ----> 80 +[0]: 21 ----> 82 +[0]: 21 ----> 90 +[0]: 21 ----> 94 +[0]: 21 ----> 100 +[0]: 22 ----> 75 +[0]: 22 ----> 81 +[0]: 22 ----> 91 +[0]: 22 ----> 101 +[0]: 23 ----> 82 +[0]: 23 ----> 92 +[0]: 23 ----> 95 +[0]: 23 ----> 101 +[0]: 24 ----> 76 +[0]: 24 ----> 83 +[0]: 24 ----> 87 +[0]: 24 ----> 102 +[0]: 25 ----> 84 +[0]: 25 ----> 88 +[0]: 25 ----> 96 +[0]: 25 ----> 102 +[0]: 26 ----> 77 +[0]: 26 ----> 83 +[0]: 26 ----> 85 +[0]: 26 ----> 89 +[0]: 26 ----> 103 +[0]: 27 ----> 84 +[0]: 27 ----> 86 +[0]: 27 ----> 90 +[0]: 27 ----> 97 +[0]: 27 ----> 103 +[0]: 28 ----> 78 +[0]: 28 ----> 85 +[0]: 28 ----> 91 +[0]: 28 ----> 104 +[0]: 29 ----> 86 +[0]: 29 ----> 92 +[0]: 29 ----> 98 +[0]: 29 ----> 104 +[0]: 30 ----> 0 +[0]: 31 ----> 0 +[0]: 31 ----> 1 +[0]: 32 ----> 1 +[0]: 33 ----> 0 +[0]: 34 ----> 0 +[0]: 35 ----> 1 +[0]: 36 ----> 1 +[0]: 37 ----> 2 +[0]: 38 ----> 2 +[0]: 38 ----> 3 +[0]: 39 ----> 3 +[0]: 40 ----> 2 +[0]: 41 ----> 2 +[0]: 42 ----> 3 +[0]: 43 ----> 3 +[0]: 44 ----> 0 +[0]: 45 ----> 2 +[0]: 46 ----> 1 +[0]: 47 ----> 3 +[0]: 48 ----> 2 +[0]: 48 ----> 4 +[0]: 49 ----> 0 +[0]: 49 ----> 4 +[0]: 50 ----> 3 +[0]: 50 ----> 5 +[0]: 51 ----> 1 +[0]: 51 ----> 5 +[0]: 52 ----> 4 +[0]: 53 ----> 5 +[0]: 54 ----> 4 +[0]: 55 ----> 5 +[0]: 56 ----> 4 +[0]: 57 ----> 4 +[0]: 57 ----> 5 +[0]: 58 ----> 5 +[0]: 59 ----> 33 +[0]: 59 ----> 44 +[0]: 60 ----> 35 +[0]: 60 ----> 46 +[0]: 61 ----> 34 +[0]: 61 ----> 44 +[0]: 62 ----> 36 +[0]: 62 ----> 46 +[0]: 63 ----> 40 +[0]: 63 ----> 45 +[0]: 64 ----> 42 +[0]: 64 ----> 47 +[0]: 65 ----> 41 +[0]: 65 ----> 45 +[0]: 66 ----> 43 +[0]: 66 ----> 47 +[0]: 67 ----> 30 +[0]: 67 ----> 44 +[0]: 68 ----> 31 +[0]: 68 ----> 44 +[0]: 68 ----> 46 +[0]: 69 ----> 32 +[0]: 69 ----> 46 +[0]: 70 ----> 37 +[0]: 70 ----> 45 +[0]: 71 ----> 38 +[0]: 71 ----> 45 +[0]: 71 ----> 47 +[0]: 72 ----> 39 +[0]: 72 ----> 47 +[0]: 73 ----> 37 +[0]: 73 ----> 40 +[0]: 74 ----> 38 +[0]: 74 ----> 40 +[0]: 74 ----> 42 +[0]: 75 ----> 39 +[0]: 75 ----> 42 +[0]: 76 ----> 37 +[0]: 76 ----> 41 +[0]: 77 ----> 38 +[0]: 77 ----> 41 +[0]: 77 ----> 43 +[0]: 78 ----> 39 +[0]: 78 ----> 43 +[0]: 79 ----> 40 +[0]: 79 ----> 48 +[0]: 79 ----> 52 +[0]: 80 ----> 33 +[0]: 80 ----> 49 +[0]: 80 ----> 52 +[0]: 81 ----> 42 +[0]: 81 ----> 50 +[0]: 81 ----> 53 +[0]: 82 ----> 35 +[0]: 82 ----> 51 +[0]: 82 ----> 53 +[0]: 83 ----> 41 +[0]: 83 ----> 48 +[0]: 83 ----> 54 +[0]: 84 ----> 34 +[0]: 84 ----> 49 +[0]: 84 ----> 54 +[0]: 85 ----> 43 +[0]: 85 ----> 50 +[0]: 85 ----> 55 +[0]: 86 ----> 36 +[0]: 86 ----> 51 +[0]: 86 ----> 55 +[0]: 87 ----> 37 +[0]: 87 ----> 48 +[0]: 87 ----> 56 +[0]: 88 ----> 30 +[0]: 88 ----> 49 +[0]: 88 ----> 56 +[0]: 89 ----> 38 +[0]: 89 ----> 48 +[0]: 89 ----> 50 +[0]: 89 ----> 57 +[0]: 90 ----> 31 +[0]: 90 ----> 49 +[0]: 90 ----> 51 +[0]: 90 ----> 57 +[0]: 91 ----> 39 +[0]: 91 ----> 50 +[0]: 91 ----> 58 +[0]: 92 ----> 32 +[0]: 92 ----> 51 +[0]: 92 ----> 58 +[0]: 93 ----> 30 +[0]: 93 ----> 33 +[0]: 94 ----> 31 +[0]: 94 ----> 33 +[0]: 94 ----> 35 +[0]: 95 ----> 32 +[0]: 95 ----> 35 +[0]: 96 ----> 30 +[0]: 96 ----> 34 +[0]: 97 ----> 31 +[0]: 97 ----> 34 +[0]: 97 ----> 36 +[0]: 98 ----> 32 +[0]: 98 ----> 36 +[0]: 99 ----> 52 +[0]: 99 ----> 56 +[0]: 100 ----> 52 +[0]: 100 ----> 53 +[0]: 100 ----> 57 +[0]: 101 ----> 53 +[0]: 101 ----> 58 +[0]: 102 ----> 54 +[0]: 102 ----> 56 +[0]: 103 ----> 54 +[0]: 103 ----> 55 +[0]: 103 ----> 57 +[0]: 104 ----> 55 +[0]: 104 ----> 58 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 44 (-2) +[0]: 0 <---- 49 (0) +[0]: 0 <---- 33 (0) +[0]: 0 <---- 34 (-3) +[0]: 0 <---- 31 (0) +[0]: 0 <---- 30 (-2) +[0]: 1 <---- 46 (-2) +[0]: 1 <---- 51 (0) +[0]: 1 <---- 35 (0) +[0]: 1 <---- 36 (-3) +[0]: 1 <---- 32 (0) +[0]: 1 <---- 31 (-2) +[0]: 2 <---- 48 (-2) +[0]: 2 <---- 45 (0) +[0]: 2 <---- 40 (0) +[0]: 2 <---- 41 (-3) +[0]: 2 <---- 38 (0) +[0]: 2 <---- 37 (-2) +[0]: 3 <---- 50 (-2) +[0]: 3 <---- 47 (0) +[0]: 3 <---- 42 (0) +[0]: 3 <---- 43 (-3) +[0]: 3 <---- 39 (0) +[0]: 3 <---- 38 (-2) +[0]: 4 <---- 48 (0) +[0]: 4 <---- 49 (0) +[0]: 4 <---- 52 (0) +[0]: 4 <---- 57 (0) +[0]: 4 <---- 54 (-1) +[0]: 4 <---- 56 (-1) +[0]: 5 <---- 50 (0) +[0]: 5 <---- 51 (0) +[0]: 5 <---- 53 (0) +[0]: 5 <---- 58 (0) +[0]: 5 <---- 55 (-1) +[0]: 5 <---- 57 (-1) +[0]: 30 <---- 67 (0) +[0]: 30 <---- 96 (0) +[0]: 30 <---- 88 (-1) +[0]: 30 <---- 93 (-1) +[0]: 31 <---- 68 (0) +[0]: 31 <---- 97 (0) +[0]: 31 <---- 90 (-1) +[0]: 31 <---- 94 (-1) +[0]: 32 <---- 69 (0) +[0]: 32 <---- 98 (0) +[0]: 32 <---- 92 (-1) +[0]: 32 <---- 95 (-1) +[0]: 33 <---- 59 (0) +[0]: 33 <---- 94 (0) +[0]: 33 <---- 80 (-1) +[0]: 33 <---- 93 (-1) +[0]: 34 <---- 61 (0) +[0]: 34 <---- 97 (0) +[0]: 34 <---- 84 (-1) +[0]: 34 <---- 96 (-1) +[0]: 35 <---- 60 (0) +[0]: 35 <---- 95 (0) +[0]: 35 <---- 82 (-1) +[0]: 35 <---- 94 (-1) +[0]: 36 <---- 62 (0) +[0]: 36 <---- 98 (0) +[0]: 36 <---- 86 (-1) +[0]: 36 <---- 97 (-1) +[0]: 37 <---- 87 (0) +[0]: 37 <---- 76 (0) +[0]: 37 <---- 70 (-1) +[0]: 37 <---- 73 (-1) +[0]: 38 <---- 89 (0) +[0]: 38 <---- 77 (0) +[0]: 38 <---- 71 (-1) +[0]: 38 <---- 74 (-1) +[0]: 39 <---- 91 (0) +[0]: 39 <---- 78 (0) +[0]: 39 <---- 72 (-1) +[0]: 39 <---- 75 (-1) +[0]: 40 <---- 79 (0) +[0]: 40 <---- 74 (0) +[0]: 40 <---- 63 (-1) +[0]: 40 <---- 73 (-1) +[0]: 41 <---- 83 (0) +[0]: 41 <---- 77 (0) +[0]: 41 <---- 65 (-1) +[0]: 41 <---- 76 (-1) +[0]: 42 <---- 81 (0) +[0]: 42 <---- 75 (0) +[0]: 42 <---- 64 (-1) +[0]: 42 <---- 74 (-1) +[0]: 43 <---- 85 (0) +[0]: 43 <---- 78 (0) +[0]: 43 <---- 66 (-1) +[0]: 43 <---- 77 (-1) +[0]: 44 <---- 59 (0) +[0]: 44 <---- 68 (0) +[0]: 44 <---- 61 (-1) +[0]: 44 <---- 67 (-1) +[0]: 45 <---- 63 (0) +[0]: 45 <---- 71 (0) +[0]: 45 <---- 65 (-1) +[0]: 45 <---- 70 (-1) +[0]: 46 <---- 60 (0) +[0]: 46 <---- 69 (0) +[0]: 46 <---- 62 (-1) +[0]: 46 <---- 68 (-1) +[0]: 47 <---- 64 (0) +[0]: 47 <---- 72 (0) +[0]: 47 <---- 66 (-1) +[0]: 47 <---- 71 (-1) +[0]: 48 <---- 79 (0) +[0]: 48 <---- 89 (0) +[0]: 48 <---- 83 (-1) +[0]: 48 <---- 87 (-1) +[0]: 49 <---- 80 (0) +[0]: 49 <---- 90 (0) +[0]: 49 <---- 84 (-1) +[0]: 49 <---- 88 (-1) +[0]: 50 <---- 81 (0) +[0]: 50 <---- 91 (0) +[0]: 50 <---- 85 (-1) +[0]: 50 <---- 89 (-1) +[0]: 51 <---- 82 (0) +[0]: 51 <---- 92 (0) +[0]: 51 <---- 86 (-1) +[0]: 51 <---- 90 (-1) +[0]: 52 <---- 79 (0) +[0]: 52 <---- 80 (0) +[0]: 52 <---- 99 (0) +[0]: 52 <---- 100 (0) +[0]: 53 <---- 81 (0) +[0]: 53 <---- 82 (0) +[0]: 53 <---- 100 (0) +[0]: 53 <---- 101 (0) +[0]: 54 <---- 83 (0) +[0]: 54 <---- 84 (0) +[0]: 54 <---- 102 (0) +[0]: 54 <---- 103 (0) +[0]: 55 <---- 85 (0) +[0]: 55 <---- 86 (0) +[0]: 55 <---- 103 (0) +[0]: 55 <---- 104 (0) +[0]: 56 <---- 87 (0) +[0]: 56 <---- 88 (0) +[0]: 56 <---- 99 (0) +[0]: 56 <---- 102 (0) +[0]: 57 <---- 89 (0) +[0]: 57 <---- 90 (0) +[0]: 57 <---- 100 (0) +[0]: 57 <---- 103 (0) +[0]: 58 <---- 91 (0) +[0]: 58 <---- 92 (0) +[0]: 58 <---- 101 (0) +[0]: 58 <---- 104 (0) +[0]: 59 <---- 6 (0) +[0]: 59 <---- 7 (0) +[0]: 60 <---- 7 (0) +[0]: 60 <---- 8 (0) +[0]: 61 <---- 9 (0) +[0]: 61 <---- 10 (0) +[0]: 62 <---- 10 (0) +[0]: 62 <---- 11 (0) +[0]: 63 <---- 12 (0) +[0]: 63 <---- 13 (0) +[0]: 64 <---- 13 (0) +[0]: 64 <---- 14 (0) +[0]: 65 <---- 15 (0) +[0]: 65 <---- 16 (0) +[0]: 66 <---- 16 (0) +[0]: 66 <---- 17 (0) +[0]: 67 <---- 6 (0) +[0]: 67 <---- 9 (0) +[0]: 68 <---- 7 (0) +[0]: 68 <---- 10 (0) +[0]: 69 <---- 8 (0) +[0]: 69 <---- 11 (0) +[0]: 70 <---- 12 (0) +[0]: 70 <---- 15 (0) +[0]: 71 <---- 13 (0) +[0]: 71 <---- 16 (0) +[0]: 72 <---- 14 (0) +[0]: 72 <---- 17 (0) +[0]: 73 <---- 18 (0) +[0]: 73 <---- 12 (0) +[0]: 74 <---- 20 (0) +[0]: 74 <---- 13 (0) +[0]: 75 <---- 22 (0) +[0]: 75 <---- 14 (0) +[0]: 76 <---- 24 (0) +[0]: 76 <---- 15 (0) +[0]: 77 <---- 26 (0) +[0]: 77 <---- 16 (0) +[0]: 78 <---- 28 (0) +[0]: 78 <---- 17 (0) +[0]: 79 <---- 18 (0) +[0]: 79 <---- 20 (0) +[0]: 80 <---- 19 (0) +[0]: 80 <---- 21 (0) +[0]: 81 <---- 20 (0) +[0]: 81 <---- 22 (0) +[0]: 82 <---- 21 (0) +[0]: 82 <---- 23 (0) +[0]: 83 <---- 24 (0) +[0]: 83 <---- 26 (0) +[0]: 84 <---- 25 (0) +[0]: 84 <---- 27 (0) +[0]: 85 <---- 26 (0) +[0]: 85 <---- 28 (0) +[0]: 86 <---- 27 (0) +[0]: 86 <---- 29 (0) +[0]: 87 <---- 18 (0) +[0]: 87 <---- 24 (0) +[0]: 88 <---- 19 (0) +[0]: 88 <---- 25 (0) +[0]: 89 <---- 20 (0) +[0]: 89 <---- 26 (0) +[0]: 90 <---- 21 (0) +[0]: 90 <---- 27 (0) +[0]: 91 <---- 22 (0) +[0]: 91 <---- 28 (0) +[0]: 92 <---- 23 (0) +[0]: 92 <---- 29 (0) +[0]: 93 <---- 6 (0) +[0]: 93 <---- 19 (0) +[0]: 94 <---- 7 (0) +[0]: 94 <---- 21 (0) +[0]: 95 <---- 8 (0) +[0]: 95 <---- 23 (0) +[0]: 96 <---- 9 (0) +[0]: 96 <---- 25 (0) +[0]: 97 <---- 10 (0) +[0]: 97 <---- 27 (0) +[0]: 98 <---- 11 (0) +[0]: 98 <---- 29 (0) +[0]: 99 <---- 18 (0) +[0]: 99 <---- 19 (0) +[0]: 100 <---- 20 (0) +[0]: 100 <---- 21 (0) +[0]: 101 <---- 22 (0) +[0]: 101 <---- 23 (0) +[0]: 102 <---- 24 (0) +[0]: 102 <---- 25 (0) +[0]: 103 <---- 26 (0) +[0]: 103 <---- 27 (0) +[0]: 104 <---- 28 (0) +[0]: 104 <---- 29 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 6) dim 3 offset 0 0. 0. 0. + ( 7) dim 3 offset 3 1. 0. 0. + ( 8) dim 3 offset 6 2. 0. 0. + ( 9) dim 3 offset 9 0. 1. 0. + ( 10) dim 3 offset 12 1. 1. 0. + ( 11) dim 3 offset 15 2. 1. 0. + ( 12) dim 3 offset 18 0. 0. 2. + ( 13) dim 3 offset 21 1. 0. 2. + ( 14) dim 3 offset 24 2. 0. 2. + ( 15) dim 3 offset 27 0. 1. 2. + ( 16) dim 3 offset 30 1. 1. 2. + ( 17) dim 3 offset 33 2. 1. 2. + ( 18) dim 3 offset 36 0. 0. 1. + ( 19) dim 3 offset 39 0. 0. 1. + ( 20) dim 3 offset 42 1. 0. 1. + ( 21) dim 3 offset 45 1. 0. 1. + ( 22) dim 3 offset 48 2. 0. 1. + ( 23) dim 3 offset 51 2. 0. 1. + ( 24) dim 3 offset 54 0. 1. 1. + ( 25) dim 3 offset 57 0. 1. 1. + ( 26) dim 3 offset 60 1. 1. 1. + ( 27) dim 3 offset 63 1. 1. 1. + ( 28) dim 3 offset 66 2. 1. 1. + ( 29) dim 3 offset 69 2. 1. 1. +Labels: +Label 'celltype': +[0]: 0 (7) +[0]: 1 (7) +[0]: 2 (7) +[0]: 3 (7) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 24 (0) +[0]: 25 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 28 (0) +[0]: 29 (0) +[0]: 99 (2) +[0]: 100 (2) +[0]: 101 (2) +[0]: 102 (2) +[0]: 103 (2) +[0]: 104 (2) +[0]: 30 (4) +[0]: 31 (4) +[0]: 32 (4) +[0]: 33 (4) +[0]: 34 (4) +[0]: 35 (4) +[0]: 36 (4) +[0]: 37 (4) +[0]: 38 (4) +[0]: 39 (4) +[0]: 40 (4) +[0]: 41 (4) +[0]: 42 (4) +[0]: 43 (4) +[0]: 44 (4) +[0]: 45 (4) +[0]: 46 (4) +[0]: 47 (4) +[0]: 48 (4) +[0]: 49 (4) +[0]: 50 (4) +[0]: 51 (4) +[0]: 4 (10) +[0]: 5 (10) +[0]: 59 (1) +[0]: 60 (1) +[0]: 61 (1) +[0]: 62 (1) +[0]: 63 (1) +[0]: 64 (1) +[0]: 65 (1) +[0]: 66 (1) +[0]: 67 (1) +[0]: 68 (1) +[0]: 69 (1) +[0]: 70 (1) +[0]: 71 (1) +[0]: 72 (1) +[0]: 73 (1) +[0]: 74 (1) +[0]: 75 (1) +[0]: 76 (1) +[0]: 77 (1) +[0]: 78 (1) +[0]: 79 (1) +[0]: 80 (1) +[0]: 81 (1) +[0]: 82 (1) +[0]: 83 (1) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 90 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 94 (1) +[0]: 95 (1) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 52 (5) +[0]: 53 (5) +[0]: 54 (5) +[0]: 55 (5) +[0]: 56 (5) +[0]: 57 (5) +[0]: 58 (5) +Label 'marker': +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[0]: 45 (1) +[0]: 46 (1) +[0]: 47 (1) +[0]: 52 (1) +[0]: 53 (1) +[0]: 54 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 58 (1) +[0]: 59 (1) +[0]: 60 (1) +[0]: 61 (1) +[0]: 62 (1) +[0]: 63 (1) +[0]: 64 (1) +[0]: 65 (1) +[0]: 66 (1) +[0]: 67 (1) +[0]: 68 (1) +[0]: 69 (1) +[0]: 70 (1) +[0]: 71 (1) +[0]: 72 (1) +[0]: 73 (1) +[0]: 74 (1) +[0]: 75 (1) +[0]: 76 (1) +[0]: 77 (1) +[0]: 78 (1) +[0]: 79 (1) +[0]: 80 (1) +[0]: 81 (1) +[0]: 82 (1) +[0]: 83 (1) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 94 (1) +[0]: 95 (1) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 99 (1) +[0]: 100 (1) +[0]: 101 (1) +[0]: 102 (1) +[0]: 103 (1) +[0]: 104 (1) +Label 'Face Sets': +[0]: 30 (6) +[0]: 37 (6) +[0]: 32 (5) +[0]: 39 (5) +[0]: 33 (3) +[0]: 35 (3) +[0]: 40 (3) +[0]: 42 (3) +[0]: 34 (4) +[0]: 36 (4) +[0]: 41 (4) +[0]: 43 (4) +[0]: 44 (1) +[0]: 46 (1) +[0]: 45 (2) +[0]: 47 (2) +Label 'fault0': +[0]: 4 (2) +[0]: 5 (2) +[0]: 48 (2) +[0]: 49 (2) +[0]: 50 (2) +[0]: 51 (2) +[0]: 52 (1) +[0]: 53 (1) +[0]: 54 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 79 (1) +[0]: 80 (1) +[0]: 81 (1) +[0]: 82 (1) +[0]: 83 (1) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 90 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 24 (0) +[0]: 25 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 28 (0) +[0]: 29 (0) +[0]: 99 (0) +[0]: 100 (0) +[0]: 101 (0) +[0]: 102 (0) +[0]: 103 (0) +[0]: 104 (0) +[0]: 0 (103) +[0]: 1 (103) +[0]: 30 (102) +[0]: 31 (102) +[0]: 32 (102) +[0]: 33 (102) +[0]: 34 (102) +[0]: 35 (102) +[0]: 36 (102) +[0]: 2 (-103) +[0]: 3 (-103) +[0]: 37 (-102) +[0]: 38 (-102) +[0]: 39 (-102) +[0]: 40 (-102) +[0]: 41 (-102) +[0]: 42 (-102) +[0]: 43 (-102) +[0]: 93 (101) +[0]: 94 (101) +[0]: 95 (101) +[0]: 96 (101) +[0]: 97 (101) +[0]: 98 (101) +[0]: 73 (-101) +[0]: 74 (-101) +[0]: 75 (-101) +[0]: 76 (-101) +[0]: 77 (-101) +[0]: 78 (-101) +Label 'fault1': +[0]: 38 (2) +[0]: 71 (1) +[0]: 74 (1) +[0]: 77 (1) +[0]: 13 (0) +[0]: 16 (0) +[0]: 0 (103) +[0]: 2 (103) +[0]: 4 (102) +[0]: 31 (102) +[0]: 33 (102) +[0]: 34 (102) +[0]: 40 (102) +[0]: 41 (102) +[0]: 45 (102) +[0]: 48 (102) +[0]: 49 (102) +[0]: 1 (-103) +[0]: 3 (-103) +[0]: 5 (-102) +[0]: 35 (-102) +[0]: 36 (-102) +[0]: 42 (-102) +[0]: 43 (-102) +[0]: 47 (-102) +[0]: 50 (-102) +[0]: 51 (-102) +[0]: 52 (101) +[0]: 54 (101) +[0]: 63 (101) +[0]: 65 (101) +[0]: 79 (101) +[0]: 80 (101) +[0]: 83 (101) +[0]: 84 (101) +[0]: 94 (101) +[0]: 97 (101) +[0]: 53 (-101) +[0]: 55 (-101) +[0]: 64 (-101) +[0]: 66 (-101) +[0]: 81 (-101) +[0]: 82 (-101) +[0]: 85 (-101) +[0]: 86 (-101) +[0]: 57 (201) +[0]: 89 (201) +[0]: 90 (201) +[0]: 20 (200) +[0]: 21 (200) +[0]: 26 (200) +[0]: 27 (200) +[0]: 100 (200) +[0]: 103 (200) +DM Object: box 1 MPI process + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 8 +[0]: 7 ----> 68 +[0]: 7 ----> 74 +[0]: 7 ----> 95 +[0]: 8 ----> 68 +[0]: 8 ----> 69 +[0]: 8 ----> 75 +[0]: 8 ----> 96 +[0]: 9 ----> 69 +[0]: 9 ----> 76 +[0]: 9 ----> 97 +[0]: 10 ----> 70 +[0]: 10 ----> 74 +[0]: 10 ----> 98 +[0]: 11 ----> 70 +[0]: 11 ----> 71 +[0]: 11 ----> 75 +[0]: 11 ----> 99 +[0]: 12 ----> 71 +[0]: 12 ----> 76 +[0]: 12 ----> 100 +[0]: 13 ----> 77 +[0]: 13 ----> 79 +[0]: 13 ----> 101 +[0]: 14 ----> 72 +[0]: 14 ----> 78 +[0]: 14 ----> 80 +[0]: 15 ----> 77 +[0]: 15 ----> 81 +[0]: 15 ----> 102 +[0]: 16 ----> 73 +[0]: 16 ----> 78 +[0]: 16 ----> 82 +[0]: 17 ----> 79 +[0]: 17 ----> 83 +[0]: 17 ----> 91 +[0]: 17 ----> 117 +[0]: 18 ----> 84 +[0]: 18 ----> 92 +[0]: 18 ----> 95 +[0]: 18 ----> 117 +[0]: 19 ----> 80 +[0]: 19 ----> 85 +[0]: 19 ----> 93 +[0]: 19 ----> 119 +[0]: 20 ----> 86 +[0]: 20 ----> 94 +[0]: 20 ----> 97 +[0]: 20 ----> 119 +[0]: 21 ----> 81 +[0]: 21 ----> 87 +[0]: 21 ----> 91 +[0]: 21 ----> 120 +[0]: 22 ----> 88 +[0]: 22 ----> 92 +[0]: 22 ----> 98 +[0]: 22 ----> 120 +[0]: 23 ----> 82 +[0]: 23 ----> 89 +[0]: 23 ----> 93 +[0]: 23 ----> 122 +[0]: 24 ----> 90 +[0]: 24 ----> 94 +[0]: 24 ----> 100 +[0]: 24 ----> 122 +[0]: 25 ----> 72 +[0]: 25 ----> 103 +[0]: 25 ----> 105 +[0]: 25 ----> 111 +[0]: 26 ----> 101 +[0]: 26 ----> 104 +[0]: 26 ----> 106 +[0]: 26 ----> 111 +[0]: 27 ----> 73 +[0]: 27 ----> 103 +[0]: 27 ----> 107 +[0]: 27 ----> 112 +[0]: 28 ----> 102 +[0]: 28 ----> 104 +[0]: 28 ----> 108 +[0]: 28 ----> 112 +[0]: 29 ----> 83 +[0]: 29 ----> 85 +[0]: 29 ----> 105 +[0]: 29 ----> 106 +[0]: 29 ----> 109 +[0]: 29 ----> 113 +[0]: 29 ----> 113 +[0]: 29 ----> 118 +[0]: 30 ----> 84 +[0]: 30 ----> 86 +[0]: 30 ----> 96 +[0]: 30 ----> 110 +[0]: 30 ----> 114 +[0]: 30 ----> 114 +[0]: 30 ----> 118 +[0]: 31 ----> 87 +[0]: 31 ----> 89 +[0]: 31 ----> 107 +[0]: 31 ----> 108 +[0]: 31 ----> 109 +[0]: 31 ----> 115 +[0]: 31 ----> 115 +[0]: 31 ----> 121 +[0]: 32 ----> 88 +[0]: 32 ----> 90 +[0]: 32 ----> 99 +[0]: 32 ----> 110 +[0]: 32 ----> 116 +[0]: 32 ----> 116 +[0]: 32 ----> 121 +[0]: 33 ----> 0 +[0]: 34 ----> 0 +[0]: 34 ----> 1 +[0]: 35 ----> 1 +[0]: 36 ----> 0 +[0]: 37 ----> 0 +[0]: 38 ----> 1 +[0]: 39 ----> 1 +[0]: 40 ----> 3 +[0]: 41 ----> 2 +[0]: 42 ----> 2 +[0]: 43 ----> 2 +[0]: 44 ----> 0 +[0]: 45 ----> 1 +[0]: 46 ----> 2 +[0]: 47 ----> 3 +[0]: 47 ----> 4 +[0]: 48 ----> 0 +[0]: 48 ----> 4 +[0]: 49 ----> 2 +[0]: 49 ----> 5 +[0]: 50 ----> 1 +[0]: 50 ----> 5 +[0]: 51 ----> 2 +[0]: 51 ----> 6 +[0]: 52 ----> 3 +[0]: 52 ----> 6 +[0]: 53 ----> 3 +[0]: 54 ----> 3 +[0]: 55 ----> 3 +[0]: 56 ----> 4 +[0]: 57 ----> 5 +[0]: 58 ----> 4 +[0]: 59 ----> 5 +[0]: 60 ----> 4 +[0]: 61 ----> 4 +[0]: 61 ----> 5 +[0]: 62 ----> 5 +[0]: 63 ----> 6 +[0]: 64 ----> 6 +[0]: 65 ----> 6 +[0]: 66 ----> 6 +[0]: 68 ----> 36 +[0]: 68 ----> 44 +[0]: 69 ----> 38 +[0]: 69 ----> 45 +[0]: 70 ----> 37 +[0]: 70 ----> 44 +[0]: 71 ----> 39 +[0]: 71 ----> 45 +[0]: 72 ----> 42 +[0]: 72 ----> 46 +[0]: 73 ----> 43 +[0]: 73 ----> 46 +[0]: 74 ----> 33 +[0]: 74 ----> 44 +[0]: 75 ----> 34 +[0]: 75 ----> 44 +[0]: 75 ----> 45 +[0]: 76 ----> 35 +[0]: 76 ----> 45 +[0]: 77 ----> 40 +[0]: 77 ----> 55 +[0]: 78 ----> 41 +[0]: 78 ----> 46 +[0]: 79 ----> 40 +[0]: 79 ----> 53 +[0]: 80 ----> 41 +[0]: 80 ----> 42 +[0]: 81 ----> 40 +[0]: 81 ----> 54 +[0]: 82 ----> 41 +[0]: 82 ----> 43 +[0]: 83 ----> 47 +[0]: 83 ----> 53 +[0]: 83 ----> 56 +[0]: 84 ----> 36 +[0]: 84 ----> 48 +[0]: 84 ----> 56 +[0]: 85 ----> 42 +[0]: 85 ----> 49 +[0]: 85 ----> 57 +[0]: 86 ----> 38 +[0]: 86 ----> 50 +[0]: 86 ----> 57 +[0]: 87 ----> 47 +[0]: 87 ----> 54 +[0]: 87 ----> 58 +[0]: 88 ----> 37 +[0]: 88 ----> 48 +[0]: 88 ----> 58 +[0]: 89 ----> 43 +[0]: 89 ----> 49 +[0]: 89 ----> 59 +[0]: 90 ----> 39 +[0]: 90 ----> 50 +[0]: 90 ----> 59 +[0]: 91 ----> 40 +[0]: 91 ----> 47 +[0]: 91 ----> 60 +[0]: 92 ----> 33 +[0]: 92 ----> 48 +[0]: 92 ----> 60 +[0]: 93 ----> 41 +[0]: 93 ----> 49 +[0]: 93 ----> 62 +[0]: 94 ----> 35 +[0]: 94 ----> 50 +[0]: 94 ----> 62 +[0]: 95 ----> 33 +[0]: 95 ----> 36 +[0]: 96 ----> 34 +[0]: 96 ----> 36 +[0]: 96 ----> 38 +[0]: 97 ----> 35 +[0]: 97 ----> 38 +[0]: 98 ----> 33 +[0]: 98 ----> 37 +[0]: 99 ----> 34 +[0]: 99 ----> 37 +[0]: 99 ----> 39 +[0]: 100 ----> 35 +[0]: 100 ----> 39 +[0]: 101 ----> 53 +[0]: 101 ----> 55 +[0]: 102 ----> 54 +[0]: 102 ----> 55 +[0]: 103 ----> 46 +[0]: 103 ----> 51 +[0]: 103 ----> 63 +[0]: 104 ----> 52 +[0]: 104 ----> 55 +[0]: 104 ----> 63 +[0]: 105 ----> 42 +[0]: 105 ----> 51 +[0]: 105 ----> 64 +[0]: 106 ----> 52 +[0]: 106 ----> 53 +[0]: 106 ----> 64 +[0]: 107 ----> 43 +[0]: 107 ----> 51 +[0]: 107 ----> 65 +[0]: 108 ----> 52 +[0]: 108 ----> 54 +[0]: 108 ----> 65 +[0]: 109 ----> 47 +[0]: 109 ----> 49 +[0]: 109 ----> 51 +[0]: 109 ----> 52 +[0]: 109 ----> 61 +[0]: 109 ----> 66 +[0]: 109 ----> 66 +[0]: 110 ----> 34 +[0]: 110 ----> 48 +[0]: 110 ----> 50 +[0]: 110 ----> 61 +[0]: 110 ----> 67 +[0]: 110 ----> 67 +[0]: 111 ----> 63 +[0]: 111 ----> 64 +[0]: 112 ----> 63 +[0]: 112 ----> 65 +[0]: 113 ----> 64 +[0]: 113 ----> 66 +[0]: 114 ----> 67 +[0]: 115 ----> 65 +[0]: 115 ----> 66 +[0]: 116 ----> 67 +[0]: 117 ----> 56 +[0]: 117 ----> 60 +[0]: 118 ----> 56 +[0]: 118 ----> 57 +[0]: 118 ----> 61 +[0]: 119 ----> 57 +[0]: 119 ----> 62 +[0]: 120 ----> 58 +[0]: 120 ----> 60 +[0]: 121 ----> 58 +[0]: 121 ----> 59 +[0]: 121 ----> 61 +[0]: 122 ----> 59 +[0]: 122 ----> 62 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 44 (-2) +[0]: 0 <---- 48 (0) +[0]: 0 <---- 36 (0) +[0]: 0 <---- 37 (-3) +[0]: 0 <---- 34 (0) +[0]: 0 <---- 33 (-2) +[0]: 1 <---- 45 (-2) +[0]: 1 <---- 50 (0) +[0]: 1 <---- 38 (0) +[0]: 1 <---- 39 (-3) +[0]: 1 <---- 35 (0) +[0]: 1 <---- 34 (-2) +[0]: 2 <---- 49 (-2) +[0]: 2 <---- 46 (0) +[0]: 2 <---- 42 (0) +[0]: 2 <---- 43 (-3) +[0]: 2 <---- 41 (0) +[0]: 2 <---- 51 (-2) +[0]: 3 <---- 47 (-2) +[0]: 3 <---- 55 (0) +[0]: 3 <---- 53 (0) +[0]: 3 <---- 54 (-3) +[0]: 3 <---- 52 (0) +[0]: 3 <---- 40 (-2) +[0]: 4 <---- 47 (0) +[0]: 4 <---- 48 (0) +[0]: 4 <---- 56 (0) +[0]: 4 <---- 61 (0) +[0]: 4 <---- 58 (-1) +[0]: 4 <---- 60 (-1) +[0]: 5 <---- 49 (0) +[0]: 5 <---- 50 (0) +[0]: 5 <---- 57 (0) +[0]: 5 <---- 62 (0) +[0]: 5 <---- 59 (-1) +[0]: 5 <---- 61 (-1) +[0]: 6 <---- 51 (0) +[0]: 6 <---- 52 (0) +[0]: 6 <---- 66 (0) +[0]: 6 <---- 65 (0) +[0]: 6 <---- 63 (-1) +[0]: 6 <---- 64 (-1) +[0]: 33 <---- 74 (0) +[0]: 33 <---- 98 (0) +[0]: 33 <---- 92 (-1) +[0]: 33 <---- 95 (-1) +[0]: 34 <---- 75 (0) +[0]: 34 <---- 99 (0) +[0]: 34 <---- 110 (-1) +[0]: 34 <---- 96 (-1) +[0]: 35 <---- 76 (0) +[0]: 35 <---- 100 (0) +[0]: 35 <---- 94 (-1) +[0]: 35 <---- 97 (-1) +[0]: 36 <---- 68 (0) +[0]: 36 <---- 96 (0) +[0]: 36 <---- 84 (-1) +[0]: 36 <---- 95 (-1) +[0]: 37 <---- 70 (0) +[0]: 37 <---- 99 (0) +[0]: 37 <---- 88 (-1) +[0]: 37 <---- 98 (-1) +[0]: 38 <---- 69 (0) +[0]: 38 <---- 97 (0) +[0]: 38 <---- 86 (-1) +[0]: 38 <---- 96 (-1) +[0]: 39 <---- 71 (0) +[0]: 39 <---- 100 (0) +[0]: 39 <---- 90 (-1) +[0]: 39 <---- 99 (-1) +[0]: 40 <---- 91 (0) +[0]: 40 <---- 81 (0) +[0]: 40 <---- 77 (-1) +[0]: 40 <---- 79 (-1) +[0]: 41 <---- 93 (0) +[0]: 41 <---- 82 (0) +[0]: 41 <---- 78 (-1) +[0]: 41 <---- 80 (-1) +[0]: 42 <---- 85 (0) +[0]: 42 <---- 80 (0) +[0]: 42 <---- 72 (-1) +[0]: 42 <---- 105 (-1) +[0]: 43 <---- 89 (0) +[0]: 43 <---- 82 (0) +[0]: 43 <---- 73 (-1) +[0]: 43 <---- 107 (-1) +[0]: 44 <---- 68 (0) +[0]: 44 <---- 75 (0) +[0]: 44 <---- 70 (-1) +[0]: 44 <---- 74 (-1) +[0]: 45 <---- 69 (0) +[0]: 45 <---- 76 (0) +[0]: 45 <---- 71 (-1) +[0]: 45 <---- 75 (-1) +[0]: 46 <---- 72 (0) +[0]: 46 <---- 78 (0) +[0]: 46 <---- 73 (-1) +[0]: 46 <---- 103 (-1) +[0]: 47 <---- 83 (0) +[0]: 47 <---- 109 (0) +[0]: 47 <---- 87 (-1) +[0]: 47 <---- 91 (-1) +[0]: 48 <---- 84 (0) +[0]: 48 <---- 110 (0) +[0]: 48 <---- 88 (-1) +[0]: 48 <---- 92 (-1) +[0]: 49 <---- 85 (0) +[0]: 49 <---- 93 (0) +[0]: 49 <---- 89 (-1) +[0]: 49 <---- 109 (-1) +[0]: 50 <---- 86 (0) +[0]: 50 <---- 94 (0) +[0]: 50 <---- 90 (-1) +[0]: 50 <---- 110 (-1) +[0]: 51 <---- 109 (0) +[0]: 51 <---- 107 (0) +[0]: 51 <---- 103 (-1) +[0]: 51 <---- 105 (-1) +[0]: 52 <---- 109 (0) +[0]: 52 <---- 108 (0) +[0]: 52 <---- 104 (-1) +[0]: 52 <---- 106 (-1) +[0]: 53 <---- 83 (0) +[0]: 53 <---- 106 (0) +[0]: 53 <---- 101 (-1) +[0]: 53 <---- 79 (-1) +[0]: 54 <---- 87 (0) +[0]: 54 <---- 108 (0) +[0]: 54 <---- 102 (-1) +[0]: 54 <---- 81 (-1) +[0]: 55 <---- 101 (0) +[0]: 55 <---- 104 (0) +[0]: 55 <---- 102 (-1) +[0]: 55 <---- 77 (-1) +[0]: 56 <---- 83 (0) +[0]: 56 <---- 84 (0) +[0]: 56 <---- 117 (0) +[0]: 56 <---- 118 (0) +[0]: 57 <---- 85 (0) +[0]: 57 <---- 86 (0) +[0]: 57 <---- 118 (0) +[0]: 57 <---- 119 (0) +[0]: 58 <---- 87 (0) +[0]: 58 <---- 88 (0) +[0]: 58 <---- 120 (0) +[0]: 58 <---- 121 (0) +[0]: 59 <---- 89 (0) +[0]: 59 <---- 90 (0) +[0]: 59 <---- 121 (0) +[0]: 59 <---- 122 (0) +[0]: 60 <---- 91 (0) +[0]: 60 <---- 92 (0) +[0]: 60 <---- 117 (0) +[0]: 60 <---- 120 (0) +[0]: 61 <---- 109 (0) +[0]: 61 <---- 110 (0) +[0]: 61 <---- 118 (0) +[0]: 61 <---- 121 (0) +[0]: 62 <---- 93 (0) +[0]: 62 <---- 94 (0) +[0]: 62 <---- 119 (0) +[0]: 62 <---- 122 (0) +[0]: 63 <---- 103 (0) +[0]: 63 <---- 104 (0) +[0]: 63 <---- 111 (0) +[0]: 63 <---- 112 (0) +[0]: 64 <---- 105 (0) +[0]: 64 <---- 106 (0) +[0]: 64 <---- 113 (0) +[0]: 64 <---- 111 (0) +[0]: 65 <---- 107 (0) +[0]: 65 <---- 108 (0) +[0]: 65 <---- 115 (0) +[0]: 65 <---- 112 (0) +[0]: 66 <---- 109 (0) +[0]: 66 <---- 109 (0) +[0]: 66 <---- 113 (0) +[0]: 66 <---- 115 (0) +[0]: 67 <---- 110 (0) +[0]: 67 <---- 110 (0) +[0]: 67 <---- 114 (0) +[0]: 67 <---- 116 (0) +[0]: 68 <---- 7 (0) +[0]: 68 <---- 8 (0) +[0]: 69 <---- 8 (0) +[0]: 69 <---- 9 (0) +[0]: 70 <---- 10 (0) +[0]: 70 <---- 11 (0) +[0]: 71 <---- 11 (0) +[0]: 71 <---- 12 (0) +[0]: 72 <---- 25 (0) +[0]: 72 <---- 14 (0) +[0]: 73 <---- 27 (0) +[0]: 73 <---- 16 (0) +[0]: 74 <---- 7 (0) +[0]: 74 <---- 10 (0) +[0]: 75 <---- 8 (0) +[0]: 75 <---- 11 (0) +[0]: 76 <---- 9 (0) +[0]: 76 <---- 12 (0) +[0]: 77 <---- 13 (0) +[0]: 77 <---- 15 (0) +[0]: 78 <---- 14 (0) +[0]: 78 <---- 16 (0) +[0]: 79 <---- 17 (0) +[0]: 79 <---- 13 (0) +[0]: 80 <---- 19 (0) +[0]: 80 <---- 14 (0) +[0]: 81 <---- 21 (0) +[0]: 81 <---- 15 (0) +[0]: 82 <---- 23 (0) +[0]: 82 <---- 16 (0) +[0]: 83 <---- 17 (0) +[0]: 83 <---- 29 (0) +[0]: 84 <---- 18 (0) +[0]: 84 <---- 30 (0) +[0]: 85 <---- 29 (0) +[0]: 85 <---- 19 (0) +[0]: 86 <---- 30 (0) +[0]: 86 <---- 20 (0) +[0]: 87 <---- 21 (0) +[0]: 87 <---- 31 (0) +[0]: 88 <---- 22 (0) +[0]: 88 <---- 32 (0) +[0]: 89 <---- 31 (0) +[0]: 89 <---- 23 (0) +[0]: 90 <---- 32 (0) +[0]: 90 <---- 24 (0) +[0]: 91 <---- 17 (0) +[0]: 91 <---- 21 (0) +[0]: 92 <---- 18 (0) +[0]: 92 <---- 22 (0) +[0]: 93 <---- 19 (0) +[0]: 93 <---- 23 (0) +[0]: 94 <---- 20 (0) +[0]: 94 <---- 24 (0) +[0]: 95 <---- 7 (0) +[0]: 95 <---- 18 (0) +[0]: 96 <---- 8 (0) +[0]: 96 <---- 30 (0) +[0]: 97 <---- 9 (0) +[0]: 97 <---- 20 (0) +[0]: 98 <---- 10 (0) +[0]: 98 <---- 22 (0) +[0]: 99 <---- 11 (0) +[0]: 99 <---- 32 (0) +[0]: 100 <---- 12 (0) +[0]: 100 <---- 24 (0) +[0]: 101 <---- 13 (0) +[0]: 101 <---- 26 (0) +[0]: 102 <---- 15 (0) +[0]: 102 <---- 28 (0) +[0]: 103 <---- 25 (0) +[0]: 103 <---- 27 (0) +[0]: 104 <---- 26 (0) +[0]: 104 <---- 28 (0) +[0]: 105 <---- 29 (0) +[0]: 105 <---- 25 (0) +[0]: 106 <---- 29 (0) +[0]: 106 <---- 26 (0) +[0]: 107 <---- 31 (0) +[0]: 107 <---- 27 (0) +[0]: 108 <---- 31 (0) +[0]: 108 <---- 28 (0) +[0]: 109 <---- 29 (0) +[0]: 109 <---- 31 (0) +[0]: 110 <---- 30 (0) +[0]: 110 <---- 32 (0) +[0]: 111 <---- 25 (0) +[0]: 111 <---- 26 (0) +[0]: 112 <---- 27 (0) +[0]: 112 <---- 28 (0) +[0]: 113 <---- 29 (0) +[0]: 113 <---- 29 (0) +[0]: 114 <---- 30 (0) +[0]: 114 <---- 30 (0) +[0]: 115 <---- 31 (0) +[0]: 115 <---- 31 (0) +[0]: 116 <---- 32 (0) +[0]: 116 <---- 32 (0) +[0]: 117 <---- 17 (0) +[0]: 117 <---- 18 (0) +[0]: 118 <---- 29 (0) +[0]: 118 <---- 30 (0) +[0]: 119 <---- 19 (0) +[0]: 119 <---- 20 (0) +[0]: 120 <---- 21 (0) +[0]: 120 <---- 22 (0) +[0]: 121 <---- 31 (0) +[0]: 121 <---- 32 (0) +[0]: 122 <---- 23 (0) +[0]: 122 <---- 24 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 7) dim 3 offset 0 0. 0. 0. + ( 8) dim 3 offset 3 1. 0. 0. + ( 9) dim 3 offset 6 2. 0. 0. + ( 10) dim 3 offset 9 0. 1. 0. + ( 11) dim 3 offset 12 1. 1. 0. + ( 12) dim 3 offset 15 2. 1. 0. + ( 13) dim 3 offset 18 0. 0. 2. + ( 14) dim 3 offset 21 2. 0. 2. + ( 15) dim 3 offset 24 0. 1. 2. + ( 16) dim 3 offset 27 2. 1. 2. + ( 17) dim 3 offset 30 0. 0. 1. + ( 18) dim 3 offset 33 0. 0. 1. + ( 19) dim 3 offset 36 2. 0. 1. + ( 20) dim 3 offset 39 2. 0. 1. + ( 21) dim 3 offset 42 0. 1. 1. + ( 22) dim 3 offset 45 0. 1. 1. + ( 23) dim 3 offset 48 2. 1. 1. + ( 24) dim 3 offset 51 2. 1. 1. + ( 25) dim 3 offset 54 1. 0. 2. + ( 26) dim 3 offset 57 1. 0. 2. + ( 27) dim 3 offset 60 1. 1. 2. + ( 28) dim 3 offset 63 1. 1. 2. + ( 29) dim 3 offset 66 1. 0. 1. + ( 30) dim 3 offset 69 1. 0. 1. + ( 31) dim 3 offset 72 1. 1. 1. + ( 32) dim 3 offset 75 1. 1. 1. +Labels: +Label 'celltype': +[0]: 0 (7) +[0]: 1 (7) +[0]: 2 (7) +[0]: 3 (7) +[0]: 4 (10) +[0]: 5 (10) +[0]: 6 (10) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 24 (0) +[0]: 25 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 28 (0) +[0]: 29 (0) +[0]: 30 (0) +[0]: 31 (0) +[0]: 32 (0) +[0]: 111 (2) +[0]: 112 (2) +[0]: 113 (2) +[0]: 114 (2) +[0]: 115 (2) +[0]: 116 (2) +[0]: 117 (2) +[0]: 118 (2) +[0]: 119 (2) +[0]: 120 (2) +[0]: 121 (2) +[0]: 122 (2) +[0]: 33 (4) +[0]: 34 (4) +[0]: 35 (4) +[0]: 36 (4) +[0]: 37 (4) +[0]: 38 (4) +[0]: 39 (4) +[0]: 40 (4) +[0]: 41 (4) +[0]: 42 (4) +[0]: 43 (4) +[0]: 44 (4) +[0]: 45 (4) +[0]: 46 (4) +[0]: 47 (4) +[0]: 48 (4) +[0]: 49 (4) +[0]: 50 (4) +[0]: 51 (4) +[0]: 52 (4) +[0]: 53 (4) +[0]: 54 (4) +[0]: 55 (4) +[0]: 56 (5) +[0]: 57 (5) +[0]: 58 (5) +[0]: 59 (5) +[0]: 60 (5) +[0]: 61 (5) +[0]: 62 (5) +[0]: 63 (5) +[0]: 64 (5) +[0]: 65 (5) +[0]: 66 (5) +[0]: 67 (5) +[0]: 68 (1) +[0]: 69 (1) +[0]: 70 (1) +[0]: 71 (1) +[0]: 72 (1) +[0]: 73 (1) +[0]: 74 (1) +[0]: 75 (1) +[0]: 76 (1) +[0]: 77 (1) +[0]: 78 (1) +[0]: 79 (1) +[0]: 80 (1) +[0]: 81 (1) +[0]: 82 (1) +[0]: 83 (1) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 90 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 94 (1) +[0]: 95 (1) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 99 (1) +[0]: 100 (1) +[0]: 101 (1) +[0]: 102 (1) +[0]: 103 (1) +[0]: 104 (1) +[0]: 105 (1) +[0]: 106 (1) +[0]: 107 (1) +[0]: 108 (1) +[0]: 109 (1) +[0]: 110 (1) +Label 'marker': +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[0]: 45 (1) +[0]: 46 (1) +[0]: 53 (1) +[0]: 54 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 59 (1) +[0]: 60 (1) +[0]: 62 (1) +[0]: 63 (1) +[0]: 64 (1) +[0]: 65 (1) +[0]: 68 (1) +[0]: 69 (1) +[0]: 70 (1) +[0]: 71 (1) +[0]: 72 (1) +[0]: 73 (1) +[0]: 74 (1) +[0]: 75 (1) +[0]: 76 (1) +[0]: 77 (1) +[0]: 78 (1) +[0]: 79 (1) +[0]: 80 (1) +[0]: 81 (1) +[0]: 82 (1) +[0]: 83 (1) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 90 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 94 (1) +[0]: 95 (1) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 99 (1) +[0]: 100 (1) +[0]: 101 (1) +[0]: 102 (1) +[0]: 103 (1) +[0]: 104 (1) +[0]: 105 (1) +[0]: 106 (1) +[0]: 107 (1) +[0]: 108 (1) +[0]: 111 (1) +[0]: 112 (1) +[0]: 113 (1) +[0]: 114 (1) +[0]: 115 (1) +[0]: 116 (1) +[0]: 117 (1) +[0]: 118 (1) +[0]: 119 (1) +[0]: 120 (1) +[0]: 121 (1) +[0]: 122 (1) +Label 'Face Sets': +[0]: 33 (6) +[0]: 40 (6) +[0]: 35 (5) +[0]: 41 (5) +[0]: 36 (3) +[0]: 38 (3) +[0]: 42 (3) +[0]: 53 (3) +[0]: 37 (4) +[0]: 39 (4) +[0]: 43 (4) +[0]: 54 (4) +[0]: 44 (1) +[0]: 45 (1) +[0]: 46 (2) +[0]: 55 (2) +Label 'fault0': +[0]: 4 (2) +[0]: 5 (2) +[0]: 47 (2) +[0]: 48 (2) +[0]: 49 (2) +[0]: 50 (2) +[0]: 56 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 59 (1) +[0]: 60 (1) +[0]: 61 (1) +[0]: 62 (1) +[0]: 66 (1) +[0]: 67 (1) +[0]: 83 (1) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 90 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 94 (1) +[0]: 109 (1) +[0]: 110 (1) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 24 (0) +[0]: 29 (0) +[0]: 30 (0) +[0]: 31 (0) +[0]: 32 (0) +[0]: 113 (0) +[0]: 114 (0) +[0]: 115 (0) +[0]: 116 (0) +[0]: 117 (0) +[0]: 118 (0) +[0]: 119 (0) +[0]: 120 (0) +[0]: 121 (0) +[0]: 122 (0) +[0]: 0 (103) +[0]: 1 (103) +[0]: 33 (102) +[0]: 34 (102) +[0]: 35 (102) +[0]: 36 (102) +[0]: 37 (102) +[0]: 38 (102) +[0]: 39 (102) +[0]: 2 (-103) +[0]: 3 (-103) +[0]: 6 (-102) +[0]: 40 (-102) +[0]: 41 (-102) +[0]: 42 (-102) +[0]: 43 (-102) +[0]: 51 (-102) +[0]: 52 (-102) +[0]: 53 (-102) +[0]: 54 (-102) +[0]: 95 (101) +[0]: 96 (101) +[0]: 97 (101) +[0]: 98 (101) +[0]: 99 (101) +[0]: 100 (101) +[0]: 64 (-101) +[0]: 65 (-101) +[0]: 79 (-101) +[0]: 80 (-101) +[0]: 81 (-101) +[0]: 82 (-101) +[0]: 105 (-101) +[0]: 106 (-101) +[0]: 107 (-101) +[0]: 108 (-101) +Label 'fault1': +[0]: 6 (2) +[0]: 51 (2) +[0]: 52 (2) +[0]: 63 (1) +[0]: 64 (1) +[0]: 65 (1) +[0]: 103 (1) +[0]: 104 (1) +[0]: 105 (1) +[0]: 106 (1) +[0]: 107 (1) +[0]: 108 (1) +[0]: 25 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 28 (0) +[0]: 111 (0) +[0]: 112 (0) +[0]: 0 (103) +[0]: 3 (103) +[0]: 4 (102) +[0]: 34 (102) +[0]: 36 (102) +[0]: 37 (102) +[0]: 47 (102) +[0]: 48 (102) +[0]: 53 (102) +[0]: 54 (102) +[0]: 55 (102) +[0]: 1 (-103) +[0]: 2 (-103) +[0]: 5 (-102) +[0]: 38 (-102) +[0]: 39 (-102) +[0]: 42 (-102) +[0]: 43 (-102) +[0]: 46 (-102) +[0]: 49 (-102) +[0]: 50 (-102) +[0]: 56 (101) +[0]: 58 (101) +[0]: 83 (101) +[0]: 84 (101) +[0]: 87 (101) +[0]: 88 (101) +[0]: 96 (101) +[0]: 99 (101) +[0]: 101 (101) +[0]: 102 (101) +[0]: 57 (-101) +[0]: 59 (-101) +[0]: 72 (-101) +[0]: 73 (-101) +[0]: 85 (-101) +[0]: 86 (-101) +[0]: 89 (-101) +[0]: 90 (-101) +[0]: 61 (201) +[0]: 66 (201) +[0]: 67 (201) +[0]: 109 (201) +[0]: 110 (201) +[0]: 29 (200) +[0]: 30 (200) +[0]: 31 (200) +[0]: 32 (200) +[0]: 113 (200) +[0]: 114 (200) +[0]: 115 (200) +[0]: 116 (200) +[0]: 118 (200) +[0]: 121 (200) diff --git a/src/dm/impls/plex/tests/output/ex69_hex_4.out b/src/dm/impls/plex/tests/output/ex69_hex_4.out new file mode 100644 index 00000000000..ad10501b420 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_hex_4.out @@ -0,0 +1,3715 @@ +DM Object: box (f0_) 1 MPI process + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 5 +[0]: 8 ----> 76 +[0]: 8 ----> 100 +[0]: 8 ----> 115 +[0]: 9 ----> 76 +[0]: 9 ----> 77 +[0]: 9 ----> 101 +[0]: 9 ----> 117 +[0]: 10 ----> 77 +[0]: 10 ----> 78 +[0]: 10 ----> 102 +[0]: 10 ----> 119 +[0]: 11 ----> 78 +[0]: 11 ----> 79 +[0]: 11 ----> 103 +[0]: 11 ----> 121 +[0]: 12 ----> 79 +[0]: 12 ----> 104 +[0]: 12 ----> 123 +[0]: 13 ----> 80 +[0]: 13 ----> 100 +[0]: 13 ----> 125 +[0]: 14 ----> 80 +[0]: 14 ----> 81 +[0]: 14 ----> 101 +[0]: 14 ----> 127 +[0]: 15 ----> 81 +[0]: 15 ----> 82 +[0]: 15 ----> 102 +[0]: 15 ----> 129 +[0]: 16 ----> 82 +[0]: 16 ----> 83 +[0]: 16 ----> 103 +[0]: 16 ----> 131 +[0]: 17 ----> 83 +[0]: 17 ----> 104 +[0]: 17 ----> 133 +[0]: 18 ----> 84 +[0]: 18 ----> 105 +[0]: 18 ----> 115 +[0]: 18 ----> 116 +[0]: 19 ----> 84 +[0]: 19 ----> 85 +[0]: 19 ----> 106 +[0]: 19 ----> 117 +[0]: 19 ----> 118 +[0]: 20 ----> 85 +[0]: 20 ----> 86 +[0]: 20 ----> 107 +[0]: 20 ----> 119 +[0]: 20 ----> 120 +[0]: 21 ----> 86 +[0]: 21 ----> 87 +[0]: 21 ----> 108 +[0]: 21 ----> 121 +[0]: 21 ----> 122 +[0]: 22 ----> 87 +[0]: 22 ----> 109 +[0]: 22 ----> 123 +[0]: 22 ----> 124 +[0]: 23 ----> 88 +[0]: 23 ----> 105 +[0]: 23 ----> 125 +[0]: 23 ----> 126 +[0]: 24 ----> 88 +[0]: 24 ----> 89 +[0]: 24 ----> 106 +[0]: 24 ----> 127 +[0]: 24 ----> 128 +[0]: 25 ----> 89 +[0]: 25 ----> 90 +[0]: 25 ----> 107 +[0]: 25 ----> 129 +[0]: 25 ----> 130 +[0]: 26 ----> 90 +[0]: 26 ----> 91 +[0]: 26 ----> 108 +[0]: 26 ----> 131 +[0]: 26 ----> 132 +[0]: 27 ----> 91 +[0]: 27 ----> 109 +[0]: 27 ----> 133 +[0]: 27 ----> 134 +[0]: 28 ----> 92 +[0]: 28 ----> 110 +[0]: 28 ----> 116 +[0]: 29 ----> 92 +[0]: 29 ----> 93 +[0]: 29 ----> 111 +[0]: 29 ----> 118 +[0]: 30 ----> 93 +[0]: 30 ----> 94 +[0]: 30 ----> 112 +[0]: 30 ----> 120 +[0]: 31 ----> 94 +[0]: 31 ----> 95 +[0]: 31 ----> 113 +[0]: 31 ----> 122 +[0]: 32 ----> 95 +[0]: 32 ----> 114 +[0]: 32 ----> 124 +[0]: 33 ----> 96 +[0]: 33 ----> 110 +[0]: 33 ----> 126 +[0]: 34 ----> 96 +[0]: 34 ----> 97 +[0]: 34 ----> 111 +[0]: 34 ----> 128 +[0]: 35 ----> 97 +[0]: 35 ----> 98 +[0]: 35 ----> 112 +[0]: 35 ----> 130 +[0]: 36 ----> 98 +[0]: 36 ----> 99 +[0]: 36 ----> 113 +[0]: 36 ----> 132 +[0]: 37 ----> 99 +[0]: 37 ----> 114 +[0]: 37 ----> 134 +[0]: 38 ----> 0 +[0]: 39 ----> 0 +[0]: 39 ----> 1 +[0]: 40 ----> 1 +[0]: 40 ----> 2 +[0]: 41 ----> 2 +[0]: 41 ----> 3 +[0]: 42 ----> 3 +[0]: 43 ----> 4 +[0]: 44 ----> 4 +[0]: 44 ----> 5 +[0]: 45 ----> 5 +[0]: 45 ----> 6 +[0]: 46 ----> 6 +[0]: 46 ----> 7 +[0]: 47 ----> 7 +[0]: 48 ----> 0 +[0]: 49 ----> 0 +[0]: 50 ----> 1 +[0]: 51 ----> 1 +[0]: 52 ----> 2 +[0]: 53 ----> 2 +[0]: 54 ----> 3 +[0]: 55 ----> 3 +[0]: 56 ----> 4 +[0]: 57 ----> 4 +[0]: 58 ----> 5 +[0]: 59 ----> 5 +[0]: 60 ----> 6 +[0]: 61 ----> 6 +[0]: 62 ----> 7 +[0]: 63 ----> 7 +[0]: 64 ----> 0 +[0]: 65 ----> 0 +[0]: 65 ----> 4 +[0]: 66 ----> 4 +[0]: 67 ----> 1 +[0]: 68 ----> 1 +[0]: 68 ----> 5 +[0]: 69 ----> 5 +[0]: 70 ----> 2 +[0]: 71 ----> 2 +[0]: 71 ----> 6 +[0]: 72 ----> 6 +[0]: 73 ----> 3 +[0]: 74 ----> 3 +[0]: 74 ----> 7 +[0]: 75 ----> 7 +[0]: 76 ----> 48 +[0]: 76 ----> 64 +[0]: 77 ----> 50 +[0]: 77 ----> 67 +[0]: 78 ----> 52 +[0]: 78 ----> 70 +[0]: 79 ----> 54 +[0]: 79 ----> 73 +[0]: 80 ----> 49 +[0]: 80 ----> 64 +[0]: 81 ----> 51 +[0]: 81 ----> 67 +[0]: 82 ----> 53 +[0]: 82 ----> 70 +[0]: 83 ----> 55 +[0]: 83 ----> 73 +[0]: 84 ----> 48 +[0]: 84 ----> 56 +[0]: 84 ----> 65 +[0]: 85 ----> 50 +[0]: 85 ----> 58 +[0]: 85 ----> 68 +[0]: 86 ----> 52 +[0]: 86 ----> 60 +[0]: 86 ----> 71 +[0]: 87 ----> 54 +[0]: 87 ----> 62 +[0]: 87 ----> 74 +[0]: 88 ----> 49 +[0]: 88 ----> 57 +[0]: 88 ----> 65 +[0]: 89 ----> 51 +[0]: 89 ----> 59 +[0]: 89 ----> 68 +[0]: 90 ----> 53 +[0]: 90 ----> 61 +[0]: 90 ----> 71 +[0]: 91 ----> 55 +[0]: 91 ----> 63 +[0]: 91 ----> 74 +[0]: 92 ----> 56 +[0]: 92 ----> 66 +[0]: 93 ----> 58 +[0]: 93 ----> 69 +[0]: 94 ----> 60 +[0]: 94 ----> 72 +[0]: 95 ----> 62 +[0]: 95 ----> 75 +[0]: 96 ----> 57 +[0]: 96 ----> 66 +[0]: 97 ----> 59 +[0]: 97 ----> 69 +[0]: 98 ----> 61 +[0]: 98 ----> 72 +[0]: 99 ----> 63 +[0]: 99 ----> 75 +[0]: 100 ----> 38 +[0]: 100 ----> 64 +[0]: 101 ----> 39 +[0]: 101 ----> 64 +[0]: 101 ----> 67 +[0]: 102 ----> 40 +[0]: 102 ----> 67 +[0]: 102 ----> 70 +[0]: 103 ----> 41 +[0]: 103 ----> 70 +[0]: 103 ----> 73 +[0]: 104 ----> 42 +[0]: 104 ----> 73 +[0]: 105 ----> 38 +[0]: 105 ----> 43 +[0]: 105 ----> 65 +[0]: 106 ----> 39 +[0]: 106 ----> 44 +[0]: 106 ----> 65 +[0]: 106 ----> 68 +[0]: 107 ----> 40 +[0]: 107 ----> 45 +[0]: 107 ----> 68 +[0]: 107 ----> 71 +[0]: 108 ----> 41 +[0]: 108 ----> 46 +[0]: 108 ----> 71 +[0]: 108 ----> 74 +[0]: 109 ----> 42 +[0]: 109 ----> 47 +[0]: 109 ----> 74 +[0]: 110 ----> 43 +[0]: 110 ----> 66 +[0]: 111 ----> 44 +[0]: 111 ----> 66 +[0]: 111 ----> 69 +[0]: 112 ----> 45 +[0]: 112 ----> 69 +[0]: 112 ----> 72 +[0]: 113 ----> 46 +[0]: 113 ----> 72 +[0]: 113 ----> 75 +[0]: 114 ----> 47 +[0]: 114 ----> 75 +[0]: 115 ----> 38 +[0]: 115 ----> 48 +[0]: 116 ----> 43 +[0]: 116 ----> 56 +[0]: 117 ----> 39 +[0]: 117 ----> 48 +[0]: 117 ----> 50 +[0]: 118 ----> 44 +[0]: 118 ----> 56 +[0]: 118 ----> 58 +[0]: 119 ----> 40 +[0]: 119 ----> 50 +[0]: 119 ----> 52 +[0]: 120 ----> 45 +[0]: 120 ----> 58 +[0]: 120 ----> 60 +[0]: 121 ----> 41 +[0]: 121 ----> 52 +[0]: 121 ----> 54 +[0]: 122 ----> 46 +[0]: 122 ----> 60 +[0]: 122 ----> 62 +[0]: 123 ----> 42 +[0]: 123 ----> 54 +[0]: 124 ----> 47 +[0]: 124 ----> 62 +[0]: 125 ----> 38 +[0]: 125 ----> 49 +[0]: 126 ----> 43 +[0]: 126 ----> 57 +[0]: 127 ----> 39 +[0]: 127 ----> 49 +[0]: 127 ----> 51 +[0]: 128 ----> 44 +[0]: 128 ----> 57 +[0]: 128 ----> 59 +[0]: 129 ----> 40 +[0]: 129 ----> 51 +[0]: 129 ----> 53 +[0]: 130 ----> 45 +[0]: 130 ----> 59 +[0]: 130 ----> 61 +[0]: 131 ----> 41 +[0]: 131 ----> 53 +[0]: 131 ----> 55 +[0]: 132 ----> 46 +[0]: 132 ----> 61 +[0]: 132 ----> 63 +[0]: 133 ----> 42 +[0]: 133 ----> 55 +[0]: 134 ----> 47 +[0]: 134 ----> 63 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 64 (-2) +[0]: 0 <---- 65 (0) +[0]: 0 <---- 48 (0) +[0]: 0 <---- 49 (-3) +[0]: 0 <---- 39 (0) +[0]: 0 <---- 38 (-2) +[0]: 1 <---- 67 (-2) +[0]: 1 <---- 68 (0) +[0]: 1 <---- 50 (0) +[0]: 1 <---- 51 (-3) +[0]: 1 <---- 40 (0) +[0]: 1 <---- 39 (-2) +[0]: 2 <---- 70 (-2) +[0]: 2 <---- 71 (0) +[0]: 2 <---- 52 (0) +[0]: 2 <---- 53 (-3) +[0]: 2 <---- 41 (0) +[0]: 2 <---- 40 (-2) +[0]: 3 <---- 73 (-2) +[0]: 3 <---- 74 (0) +[0]: 3 <---- 54 (0) +[0]: 3 <---- 55 (-3) +[0]: 3 <---- 42 (0) +[0]: 3 <---- 41 (-2) +[0]: 4 <---- 65 (-2) +[0]: 4 <---- 66 (0) +[0]: 4 <---- 56 (0) +[0]: 4 <---- 57 (-3) +[0]: 4 <---- 44 (0) +[0]: 4 <---- 43 (-2) +[0]: 5 <---- 68 (-2) +[0]: 5 <---- 69 (0) +[0]: 5 <---- 58 (0) +[0]: 5 <---- 59 (-3) +[0]: 5 <---- 45 (0) +[0]: 5 <---- 44 (-2) +[0]: 6 <---- 71 (-2) +[0]: 6 <---- 72 (0) +[0]: 6 <---- 60 (0) +[0]: 6 <---- 61 (-3) +[0]: 6 <---- 46 (0) +[0]: 6 <---- 45 (-2) +[0]: 7 <---- 74 (-2) +[0]: 7 <---- 75 (0) +[0]: 7 <---- 62 (0) +[0]: 7 <---- 63 (-3) +[0]: 7 <---- 47 (0) +[0]: 7 <---- 46 (-2) +[0]: 38 <---- 100 (0) +[0]: 38 <---- 125 (0) +[0]: 38 <---- 105 (-1) +[0]: 38 <---- 115 (-1) +[0]: 39 <---- 101 (0) +[0]: 39 <---- 127 (0) +[0]: 39 <---- 106 (-1) +[0]: 39 <---- 117 (-1) +[0]: 40 <---- 102 (0) +[0]: 40 <---- 129 (0) +[0]: 40 <---- 107 (-1) +[0]: 40 <---- 119 (-1) +[0]: 41 <---- 103 (0) +[0]: 41 <---- 131 (0) +[0]: 41 <---- 108 (-1) +[0]: 41 <---- 121 (-1) +[0]: 42 <---- 104 (0) +[0]: 42 <---- 133 (0) +[0]: 42 <---- 109 (-1) +[0]: 42 <---- 123 (-1) +[0]: 43 <---- 105 (0) +[0]: 43 <---- 126 (0) +[0]: 43 <---- 110 (-1) +[0]: 43 <---- 116 (-1) +[0]: 44 <---- 106 (0) +[0]: 44 <---- 128 (0) +[0]: 44 <---- 111 (-1) +[0]: 44 <---- 118 (-1) +[0]: 45 <---- 107 (0) +[0]: 45 <---- 130 (0) +[0]: 45 <---- 112 (-1) +[0]: 45 <---- 120 (-1) +[0]: 46 <---- 108 (0) +[0]: 46 <---- 132 (0) +[0]: 46 <---- 113 (-1) +[0]: 46 <---- 122 (-1) +[0]: 47 <---- 109 (0) +[0]: 47 <---- 134 (0) +[0]: 47 <---- 114 (-1) +[0]: 47 <---- 124 (-1) +[0]: 48 <---- 76 (0) +[0]: 48 <---- 117 (0) +[0]: 48 <---- 84 (-1) +[0]: 48 <---- 115 (-1) +[0]: 49 <---- 80 (0) +[0]: 49 <---- 127 (0) +[0]: 49 <---- 88 (-1) +[0]: 49 <---- 125 (-1) +[0]: 50 <---- 77 (0) +[0]: 50 <---- 119 (0) +[0]: 50 <---- 85 (-1) +[0]: 50 <---- 117 (-1) +[0]: 51 <---- 81 (0) +[0]: 51 <---- 129 (0) +[0]: 51 <---- 89 (-1) +[0]: 51 <---- 127 (-1) +[0]: 52 <---- 78 (0) +[0]: 52 <---- 121 (0) +[0]: 52 <---- 86 (-1) +[0]: 52 <---- 119 (-1) +[0]: 53 <---- 82 (0) +[0]: 53 <---- 131 (0) +[0]: 53 <---- 90 (-1) +[0]: 53 <---- 129 (-1) +[0]: 54 <---- 79 (0) +[0]: 54 <---- 123 (0) +[0]: 54 <---- 87 (-1) +[0]: 54 <---- 121 (-1) +[0]: 55 <---- 83 (0) +[0]: 55 <---- 133 (0) +[0]: 55 <---- 91 (-1) +[0]: 55 <---- 131 (-1) +[0]: 56 <---- 84 (0) +[0]: 56 <---- 118 (0) +[0]: 56 <---- 92 (-1) +[0]: 56 <---- 116 (-1) +[0]: 57 <---- 88 (0) +[0]: 57 <---- 128 (0) +[0]: 57 <---- 96 (-1) +[0]: 57 <---- 126 (-1) +[0]: 58 <---- 85 (0) +[0]: 58 <---- 120 (0) +[0]: 58 <---- 93 (-1) +[0]: 58 <---- 118 (-1) +[0]: 59 <---- 89 (0) +[0]: 59 <---- 130 (0) +[0]: 59 <---- 97 (-1) +[0]: 59 <---- 128 (-1) +[0]: 60 <---- 86 (0) +[0]: 60 <---- 122 (0) +[0]: 60 <---- 94 (-1) +[0]: 60 <---- 120 (-1) +[0]: 61 <---- 90 (0) +[0]: 61 <---- 132 (0) +[0]: 61 <---- 98 (-1) +[0]: 61 <---- 130 (-1) +[0]: 62 <---- 87 (0) +[0]: 62 <---- 124 (0) +[0]: 62 <---- 95 (-1) +[0]: 62 <---- 122 (-1) +[0]: 63 <---- 91 (0) +[0]: 63 <---- 134 (0) +[0]: 63 <---- 99 (-1) +[0]: 63 <---- 132 (-1) +[0]: 64 <---- 76 (0) +[0]: 64 <---- 101 (0) +[0]: 64 <---- 80 (-1) +[0]: 64 <---- 100 (-1) +[0]: 65 <---- 84 (0) +[0]: 65 <---- 106 (0) +[0]: 65 <---- 88 (-1) +[0]: 65 <---- 105 (-1) +[0]: 66 <---- 92 (0) +[0]: 66 <---- 111 (0) +[0]: 66 <---- 96 (-1) +[0]: 66 <---- 110 (-1) +[0]: 67 <---- 77 (0) +[0]: 67 <---- 102 (0) +[0]: 67 <---- 81 (-1) +[0]: 67 <---- 101 (-1) +[0]: 68 <---- 85 (0) +[0]: 68 <---- 107 (0) +[0]: 68 <---- 89 (-1) +[0]: 68 <---- 106 (-1) +[0]: 69 <---- 93 (0) +[0]: 69 <---- 112 (0) +[0]: 69 <---- 97 (-1) +[0]: 69 <---- 111 (-1) +[0]: 70 <---- 78 (0) +[0]: 70 <---- 103 (0) +[0]: 70 <---- 82 (-1) +[0]: 70 <---- 102 (-1) +[0]: 71 <---- 86 (0) +[0]: 71 <---- 108 (0) +[0]: 71 <---- 90 (-1) +[0]: 71 <---- 107 (-1) +[0]: 72 <---- 94 (0) +[0]: 72 <---- 113 (0) +[0]: 72 <---- 98 (-1) +[0]: 72 <---- 112 (-1) +[0]: 73 <---- 79 (0) +[0]: 73 <---- 104 (0) +[0]: 73 <---- 83 (-1) +[0]: 73 <---- 103 (-1) +[0]: 74 <---- 87 (0) +[0]: 74 <---- 109 (0) +[0]: 74 <---- 91 (-1) +[0]: 74 <---- 108 (-1) +[0]: 75 <---- 95 (0) +[0]: 75 <---- 114 (0) +[0]: 75 <---- 99 (-1) +[0]: 75 <---- 113 (-1) +[0]: 76 <---- 8 (0) +[0]: 76 <---- 9 (0) +[0]: 77 <---- 9 (0) +[0]: 77 <---- 10 (0) +[0]: 78 <---- 10 (0) +[0]: 78 <---- 11 (0) +[0]: 79 <---- 11 (0) +[0]: 79 <---- 12 (0) +[0]: 80 <---- 13 (0) +[0]: 80 <---- 14 (0) +[0]: 81 <---- 14 (0) +[0]: 81 <---- 15 (0) +[0]: 82 <---- 15 (0) +[0]: 82 <---- 16 (0) +[0]: 83 <---- 16 (0) +[0]: 83 <---- 17 (0) +[0]: 84 <---- 18 (0) +[0]: 84 <---- 19 (0) +[0]: 85 <---- 19 (0) +[0]: 85 <---- 20 (0) +[0]: 86 <---- 20 (0) +[0]: 86 <---- 21 (0) +[0]: 87 <---- 21 (0) +[0]: 87 <---- 22 (0) +[0]: 88 <---- 23 (0) +[0]: 88 <---- 24 (0) +[0]: 89 <---- 24 (0) +[0]: 89 <---- 25 (0) +[0]: 90 <---- 25 (0) +[0]: 90 <---- 26 (0) +[0]: 91 <---- 26 (0) +[0]: 91 <---- 27 (0) +[0]: 92 <---- 28 (0) +[0]: 92 <---- 29 (0) +[0]: 93 <---- 29 (0) +[0]: 93 <---- 30 (0) +[0]: 94 <---- 30 (0) +[0]: 94 <---- 31 (0) +[0]: 95 <---- 31 (0) +[0]: 95 <---- 32 (0) +[0]: 96 <---- 33 (0) +[0]: 96 <---- 34 (0) +[0]: 97 <---- 34 (0) +[0]: 97 <---- 35 (0) +[0]: 98 <---- 35 (0) +[0]: 98 <---- 36 (0) +[0]: 99 <---- 36 (0) +[0]: 99 <---- 37 (0) +[0]: 100 <---- 8 (0) +[0]: 100 <---- 13 (0) +[0]: 101 <---- 9 (0) +[0]: 101 <---- 14 (0) +[0]: 102 <---- 10 (0) +[0]: 102 <---- 15 (0) +[0]: 103 <---- 11 (0) +[0]: 103 <---- 16 (0) +[0]: 104 <---- 12 (0) +[0]: 104 <---- 17 (0) +[0]: 105 <---- 18 (0) +[0]: 105 <---- 23 (0) +[0]: 106 <---- 19 (0) +[0]: 106 <---- 24 (0) +[0]: 107 <---- 20 (0) +[0]: 107 <---- 25 (0) +[0]: 108 <---- 21 (0) +[0]: 108 <---- 26 (0) +[0]: 109 <---- 22 (0) +[0]: 109 <---- 27 (0) +[0]: 110 <---- 28 (0) +[0]: 110 <---- 33 (0) +[0]: 111 <---- 29 (0) +[0]: 111 <---- 34 (0) +[0]: 112 <---- 30 (0) +[0]: 112 <---- 35 (0) +[0]: 113 <---- 31 (0) +[0]: 113 <---- 36 (0) +[0]: 114 <---- 32 (0) +[0]: 114 <---- 37 (0) +[0]: 115 <---- 8 (0) +[0]: 115 <---- 18 (0) +[0]: 116 <---- 18 (0) +[0]: 116 <---- 28 (0) +[0]: 117 <---- 9 (0) +[0]: 117 <---- 19 (0) +[0]: 118 <---- 19 (0) +[0]: 118 <---- 29 (0) +[0]: 119 <---- 10 (0) +[0]: 119 <---- 20 (0) +[0]: 120 <---- 20 (0) +[0]: 120 <---- 30 (0) +[0]: 121 <---- 11 (0) +[0]: 121 <---- 21 (0) +[0]: 122 <---- 21 (0) +[0]: 122 <---- 31 (0) +[0]: 123 <---- 12 (0) +[0]: 123 <---- 22 (0) +[0]: 124 <---- 22 (0) +[0]: 124 <---- 32 (0) +[0]: 125 <---- 13 (0) +[0]: 125 <---- 23 (0) +[0]: 126 <---- 23 (0) +[0]: 126 <---- 33 (0) +[0]: 127 <---- 14 (0) +[0]: 127 <---- 24 (0) +[0]: 128 <---- 24 (0) +[0]: 128 <---- 34 (0) +[0]: 129 <---- 15 (0) +[0]: 129 <---- 25 (0) +[0]: 130 <---- 25 (0) +[0]: 130 <---- 35 (0) +[0]: 131 <---- 16 (0) +[0]: 131 <---- 26 (0) +[0]: 132 <---- 26 (0) +[0]: 132 <---- 36 (0) +[0]: 133 <---- 17 (0) +[0]: 133 <---- 27 (0) +[0]: 134 <---- 27 (0) +[0]: 134 <---- 37 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 8) dim 3 offset 0 0. 0. 0. + ( 9) dim 3 offset 3 1. 0. 0. + ( 10) dim 3 offset 6 2. 0. 0. + ( 11) dim 3 offset 9 3. 0. 0. + ( 12) dim 3 offset 12 4. 0. 0. + ( 13) dim 3 offset 15 0. 1. 0. + ( 14) dim 3 offset 18 1. 1. 0. + ( 15) dim 3 offset 21 2. 1. 0. + ( 16) dim 3 offset 24 3. 1. 0. + ( 17) dim 3 offset 27 4. 1. 0. + ( 18) dim 3 offset 30 0. 0. 1. + ( 19) dim 3 offset 33 1. 0. 1. + ( 20) dim 3 offset 36 2. 0. 1. + ( 21) dim 3 offset 39 3. 0. 1. + ( 22) dim 3 offset 42 4. 0. 1. + ( 23) dim 3 offset 45 0. 1. 1. + ( 24) dim 3 offset 48 1. 1. 1. + ( 25) dim 3 offset 51 2. 1. 1. + ( 26) dim 3 offset 54 3. 1. 1. + ( 27) dim 3 offset 57 4. 1. 1. + ( 28) dim 3 offset 60 0. 0. 2. + ( 29) dim 3 offset 63 1. 0. 2. + ( 30) dim 3 offset 66 2. 0. 2. + ( 31) dim 3 offset 69 3. 0. 2. + ( 32) dim 3 offset 72 4. 0. 2. + ( 33) dim 3 offset 75 0. 1. 2. + ( 34) dim 3 offset 78 1. 1. 2. + ( 35) dim 3 offset 81 2. 1. 2. + ( 36) dim 3 offset 84 3. 1. 2. + ( 37) dim 3 offset 87 4. 1. 2. +Labels: +Label 'marker': +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 47 (1) +[0]: 48 (1) +[0]: 49 (1) +[0]: 50 (1) +[0]: 51 (1) +[0]: 52 (1) +[0]: 53 (1) +[0]: 54 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 59 (1) +[0]: 60 (1) +[0]: 61 (1) +[0]: 62 (1) +[0]: 63 (1) +[0]: 64 (1) +[0]: 66 (1) +[0]: 67 (1) +[0]: 69 (1) +[0]: 70 (1) +[0]: 72 (1) +[0]: 73 (1) +[0]: 75 (1) +[0]: 76 (1) +[0]: 77 (1) +[0]: 78 (1) +[0]: 79 (1) +[0]: 80 (1) +[0]: 81 (1) +[0]: 82 (1) +[0]: 83 (1) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 90 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 94 (1) +[0]: 95 (1) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 99 (1) +[0]: 100 (1) +[0]: 101 (1) +[0]: 102 (1) +[0]: 103 (1) +[0]: 104 (1) +[0]: 105 (1) +[0]: 109 (1) +[0]: 110 (1) +[0]: 111 (1) +[0]: 112 (1) +[0]: 113 (1) +[0]: 114 (1) +[0]: 115 (1) +[0]: 116 (1) +[0]: 117 (1) +[0]: 118 (1) +[0]: 119 (1) +[0]: 120 (1) +[0]: 121 (1) +[0]: 122 (1) +[0]: 123 (1) +[0]: 124 (1) +[0]: 125 (1) +[0]: 126 (1) +[0]: 127 (1) +[0]: 128 (1) +[0]: 129 (1) +[0]: 130 (1) +[0]: 131 (1) +[0]: 132 (1) +[0]: 133 (1) +[0]: 134 (1) +Label 'Face Sets': +[0]: 38 (6) +[0]: 43 (6) +[0]: 42 (5) +[0]: 47 (5) +[0]: 48 (3) +[0]: 50 (3) +[0]: 52 (3) +[0]: 54 (3) +[0]: 56 (3) +[0]: 58 (3) +[0]: 60 (3) +[0]: 62 (3) +[0]: 49 (4) +[0]: 51 (4) +[0]: 53 (4) +[0]: 55 (4) +[0]: 57 (4) +[0]: 59 (4) +[0]: 61 (4) +[0]: 63 (4) +[0]: 64 (1) +[0]: 67 (1) +[0]: 70 (1) +[0]: 73 (1) +[0]: 66 (2) +[0]: 69 (2) +[0]: 72 (2) +[0]: 75 (2) +Label 'fault0': +[0]: 65 (2) +[0]: 68 (2) +[0]: 84 (1) +[0]: 85 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 105 (1) +[0]: 106 (1) +[0]: 18 (0) +[0]: 19 (0) +[0]: 23 (0) +[0]: 24 (0) +[0]: 0 (103) +[0]: 1 (103) +[0]: 2 (103) +[0]: 38 (102) +[0]: 39 (102) +[0]: 40 (102) +[0]: 48 (102) +[0]: 49 (102) +[0]: 50 (102) +[0]: 51 (102) +[0]: 52 (102) +[0]: 53 (102) +[0]: 71 (102) +[0]: 4 (-103) +[0]: 5 (-103) +[0]: 6 (-103) +[0]: 43 (-102) +[0]: 44 (-102) +[0]: 45 (-102) +[0]: 56 (-102) +[0]: 57 (-102) +[0]: 58 (-102) +[0]: 59 (-102) +[0]: 60 (-102) +[0]: 61 (-102) +[0]: 86 (101) +[0]: 90 (101) +[0]: 115 (101) +[0]: 117 (101) +[0]: 119 (101) +[0]: 125 (101) +[0]: 127 (101) +[0]: 129 (101) +[0]: 116 (-101) +[0]: 118 (-101) +[0]: 120 (-101) +[0]: 126 (-101) +[0]: 128 (-101) +[0]: 130 (-101) +[0]: 107 (201) +[0]: 20 (200) +[0]: 25 (200) +Label 'celltype': +[0]: 0 (7) +[0]: 1 (7) +[0]: 2 (7) +[0]: 3 (7) +[0]: 4 (7) +[0]: 5 (7) +[0]: 6 (7) +[0]: 7 (7) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 24 (0) +[0]: 25 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 28 (0) +[0]: 29 (0) +[0]: 30 (0) +[0]: 31 (0) +[0]: 32 (0) +[0]: 33 (0) +[0]: 34 (0) +[0]: 35 (0) +[0]: 36 (0) +[0]: 37 (0) +[0]: 38 (4) +[0]: 39 (4) +[0]: 40 (4) +[0]: 41 (4) +[0]: 42 (4) +[0]: 43 (4) +[0]: 44 (4) +[0]: 45 (4) +[0]: 46 (4) +[0]: 47 (4) +[0]: 48 (4) +[0]: 49 (4) +[0]: 50 (4) +[0]: 51 (4) +[0]: 52 (4) +[0]: 53 (4) +[0]: 54 (4) +[0]: 55 (4) +[0]: 56 (4) +[0]: 57 (4) +[0]: 58 (4) +[0]: 59 (4) +[0]: 60 (4) +[0]: 61 (4) +[0]: 62 (4) +[0]: 63 (4) +[0]: 64 (4) +[0]: 65 (4) +[0]: 66 (4) +[0]: 67 (4) +[0]: 68 (4) +[0]: 69 (4) +[0]: 70 (4) +[0]: 71 (4) +[0]: 72 (4) +[0]: 73 (4) +[0]: 74 (4) +[0]: 75 (4) +[0]: 76 (1) +[0]: 77 (1) +[0]: 78 (1) +[0]: 79 (1) +[0]: 80 (1) +[0]: 81 (1) +[0]: 82 (1) +[0]: 83 (1) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 90 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 94 (1) +[0]: 95 (1) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 99 (1) +[0]: 100 (1) +[0]: 101 (1) +[0]: 102 (1) +[0]: 103 (1) +[0]: 104 (1) +[0]: 105 (1) +[0]: 106 (1) +[0]: 107 (1) +[0]: 108 (1) +[0]: 109 (1) +[0]: 110 (1) +[0]: 111 (1) +[0]: 112 (1) +[0]: 113 (1) +[0]: 114 (1) +[0]: 115 (1) +[0]: 116 (1) +[0]: 117 (1) +[0]: 118 (1) +[0]: 119 (1) +[0]: 120 (1) +[0]: 121 (1) +[0]: 122 (1) +[0]: 123 (1) +[0]: 124 (1) +[0]: 125 (1) +[0]: 126 (1) +[0]: 127 (1) +[0]: 128 (1) +[0]: 129 (1) +[0]: 130 (1) +[0]: 131 (1) +[0]: 132 (1) +[0]: 133 (1) +[0]: 134 (1) +Label 'fault1': +[0]: 46 (2) +[0]: 113 (1) +[0]: 122 (1) +[0]: 132 (1) +[0]: 31 (0) +[0]: 36 (0) +[0]: 2 (103) +[0]: 6 (103) +[0]: 41 (102) +[0]: 52 (102) +[0]: 53 (102) +[0]: 60 (102) +[0]: 61 (102) +[0]: 71 (102) +[0]: 72 (102) +[0]: 3 (-103) +[0]: 7 (-103) +[0]: 54 (-102) +[0]: 55 (-102) +[0]: 62 (-102) +[0]: 63 (-102) +[0]: 74 (-102) +[0]: 75 (-102) +[0]: 86 (101) +[0]: 90 (101) +[0]: 94 (101) +[0]: 98 (101) +[0]: 121 (101) +[0]: 131 (101) +[0]: 87 (-101) +[0]: 91 (-101) +[0]: 95 (-101) +[0]: 99 (-101) +[0]: 108 (201) +[0]: 21 (200) +[0]: 26 (200) +DM Object: box (f1_) 1 MPI process + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 8 +[0]: 10 ----> 91 +[0]: 10 ----> 111 +[0]: 10 ----> 152 +[0]: 11 ----> 91 +[0]: 11 ----> 92 +[0]: 11 ----> 112 +[0]: 11 ----> 153 +[0]: 12 ----> 92 +[0]: 12 ----> 93 +[0]: 12 ----> 113 +[0]: 12 ----> 125 +[0]: 13 ----> 93 +[0]: 13 ----> 94 +[0]: 13 ----> 114 +[0]: 13 ----> 127 +[0]: 14 ----> 94 +[0]: 14 ----> 115 +[0]: 14 ----> 129 +[0]: 15 ----> 95 +[0]: 15 ----> 111 +[0]: 15 ----> 154 +[0]: 16 ----> 95 +[0]: 16 ----> 96 +[0]: 16 ----> 112 +[0]: 16 ----> 155 +[0]: 17 ----> 96 +[0]: 17 ----> 97 +[0]: 17 ----> 113 +[0]: 17 ----> 133 +[0]: 18 ----> 97 +[0]: 18 ----> 98 +[0]: 18 ----> 114 +[0]: 18 ----> 135 +[0]: 19 ----> 98 +[0]: 19 ----> 115 +[0]: 19 ----> 137 +[0]: 20 ----> 99 +[0]: 20 ----> 100 +[0]: 20 ----> 116 +[0]: 20 ----> 127 +[0]: 20 ----> 128 +[0]: 21 ----> 100 +[0]: 21 ----> 117 +[0]: 21 ----> 129 +[0]: 21 ----> 130 +[0]: 22 ----> 101 +[0]: 22 ----> 102 +[0]: 22 ----> 116 +[0]: 22 ----> 135 +[0]: 22 ----> 136 +[0]: 23 ----> 102 +[0]: 23 ----> 117 +[0]: 23 ----> 137 +[0]: 23 ----> 138 +[0]: 24 ----> 103 +[0]: 24 ----> 118 +[0]: 24 ----> 123 +[0]: 25 ----> 103 +[0]: 25 ----> 104 +[0]: 25 ----> 119 +[0]: 25 ----> 124 +[0]: 26 ----> 104 +[0]: 26 ----> 105 +[0]: 26 ----> 120 +[0]: 26 ----> 126 +[0]: 27 ----> 105 +[0]: 27 ----> 106 +[0]: 27 ----> 121 +[0]: 27 ----> 128 +[0]: 28 ----> 106 +[0]: 28 ----> 122 +[0]: 28 ----> 130 +[0]: 29 ----> 107 +[0]: 29 ----> 118 +[0]: 29 ----> 131 +[0]: 30 ----> 107 +[0]: 30 ----> 108 +[0]: 30 ----> 119 +[0]: 30 ----> 132 +[0]: 31 ----> 108 +[0]: 31 ----> 109 +[0]: 31 ----> 120 +[0]: 31 ----> 134 +[0]: 32 ----> 109 +[0]: 32 ----> 110 +[0]: 32 ----> 121 +[0]: 32 ----> 136 +[0]: 33 ----> 110 +[0]: 33 ----> 122 +[0]: 33 ----> 138 +[0]: 34 ----> 123 +[0]: 34 ----> 139 +[0]: 34 ----> 143 +[0]: 34 ----> 156 +[0]: 35 ----> 140 +[0]: 35 ----> 144 +[0]: 35 ----> 152 +[0]: 35 ----> 156 +[0]: 36 ----> 124 +[0]: 36 ----> 139 +[0]: 36 ----> 145 +[0]: 36 ----> 147 +[0]: 36 ----> 157 +[0]: 37 ----> 140 +[0]: 37 ----> 146 +[0]: 37 ----> 148 +[0]: 37 ----> 153 +[0]: 37 ----> 157 +[0]: 38 ----> 131 +[0]: 38 ----> 141 +[0]: 38 ----> 143 +[0]: 38 ----> 158 +[0]: 39 ----> 142 +[0]: 39 ----> 144 +[0]: 39 ----> 154 +[0]: 39 ----> 158 +[0]: 40 ----> 132 +[0]: 40 ----> 141 +[0]: 40 ----> 145 +[0]: 40 ----> 149 +[0]: 40 ----> 159 +[0]: 41 ----> 142 +[0]: 41 ----> 146 +[0]: 41 ----> 150 +[0]: 41 ----> 155 +[0]: 41 ----> 159 +[0]: 42 ----> 99 +[0]: 42 ----> 125 +[0]: 42 ----> 126 +[0]: 42 ----> 147 +[0]: 42 ----> 148 +[0]: 42 ----> 151 +[0]: 42 ----> 160 +[0]: 42 ----> 160 +[0]: 43 ----> 101 +[0]: 43 ----> 133 +[0]: 43 ----> 134 +[0]: 43 ----> 149 +[0]: 43 ----> 150 +[0]: 43 ----> 151 +[0]: 43 ----> 161 +[0]: 43 ----> 161 +[0]: 44 ----> 0 +[0]: 45 ----> 0 +[0]: 45 ----> 1 +[0]: 46 ----> 0 +[0]: 47 ----> 0 +[0]: 48 ----> 1 +[0]: 49 ----> 1 +[0]: 50 ----> 1 +[0]: 50 ----> 2 +[0]: 51 ----> 2 +[0]: 51 ----> 3 +[0]: 52 ----> 3 +[0]: 53 ----> 4 +[0]: 54 ----> 4 +[0]: 54 ----> 5 +[0]: 55 ----> 5 +[0]: 55 ----> 6 +[0]: 56 ----> 6 +[0]: 56 ----> 7 +[0]: 57 ----> 7 +[0]: 58 ----> 2 +[0]: 59 ----> 2 +[0]: 60 ----> 3 +[0]: 61 ----> 3 +[0]: 62 ----> 4 +[0]: 63 ----> 4 +[0]: 64 ----> 5 +[0]: 65 ----> 5 +[0]: 66 ----> 6 +[0]: 67 ----> 6 +[0]: 68 ----> 7 +[0]: 69 ----> 7 +[0]: 70 ----> 0 +[0]: 71 ----> 4 +[0]: 72 ----> 1 +[0]: 73 ----> 5 +[0]: 74 ----> 2 +[0]: 75 ----> 2 +[0]: 75 ----> 6 +[0]: 76 ----> 6 +[0]: 77 ----> 3 +[0]: 78 ----> 3 +[0]: 78 ----> 7 +[0]: 79 ----> 7 +[0]: 80 ----> 4 +[0]: 80 ----> 8 +[0]: 81 ----> 0 +[0]: 81 ----> 8 +[0]: 82 ----> 5 +[0]: 82 ----> 9 +[0]: 83 ----> 1 +[0]: 83 ----> 9 +[0]: 84 ----> 8 +[0]: 85 ----> 8 +[0]: 86 ----> 8 +[0]: 87 ----> 8 +[0]: 87 ----> 9 +[0]: 88 ----> 9 +[0]: 89 ----> 9 +[0]: 90 ----> 9 +[0]: 91 ----> 46 +[0]: 91 ----> 70 +[0]: 92 ----> 48 +[0]: 92 ----> 72 +[0]: 93 ----> 58 +[0]: 93 ----> 74 +[0]: 94 ----> 60 +[0]: 94 ----> 77 +[0]: 95 ----> 47 +[0]: 95 ----> 70 +[0]: 96 ----> 49 +[0]: 96 ----> 72 +[0]: 97 ----> 59 +[0]: 97 ----> 74 +[0]: 98 ----> 61 +[0]: 98 ----> 77 +[0]: 99 ----> 58 +[0]: 99 ----> 66 +[0]: 99 ----> 75 +[0]: 100 ----> 60 +[0]: 100 ----> 68 +[0]: 100 ----> 78 +[0]: 101 ----> 59 +[0]: 101 ----> 67 +[0]: 101 ----> 75 +[0]: 102 ----> 61 +[0]: 102 ----> 69 +[0]: 102 ----> 78 +[0]: 103 ----> 62 +[0]: 103 ----> 71 +[0]: 104 ----> 64 +[0]: 104 ----> 73 +[0]: 105 ----> 66 +[0]: 105 ----> 76 +[0]: 106 ----> 68 +[0]: 106 ----> 79 +[0]: 107 ----> 63 +[0]: 107 ----> 71 +[0]: 108 ----> 65 +[0]: 108 ----> 73 +[0]: 109 ----> 67 +[0]: 109 ----> 76 +[0]: 110 ----> 69 +[0]: 110 ----> 79 +[0]: 111 ----> 44 +[0]: 111 ----> 70 +[0]: 112 ----> 45 +[0]: 112 ----> 70 +[0]: 112 ----> 72 +[0]: 113 ----> 50 +[0]: 113 ----> 72 +[0]: 113 ----> 74 +[0]: 114 ----> 51 +[0]: 114 ----> 74 +[0]: 114 ----> 77 +[0]: 115 ----> 52 +[0]: 115 ----> 77 +[0]: 116 ----> 51 +[0]: 116 ----> 56 +[0]: 116 ----> 75 +[0]: 116 ----> 78 +[0]: 117 ----> 52 +[0]: 117 ----> 57 +[0]: 117 ----> 78 +[0]: 118 ----> 53 +[0]: 118 ----> 71 +[0]: 119 ----> 54 +[0]: 119 ----> 71 +[0]: 119 ----> 73 +[0]: 120 ----> 55 +[0]: 120 ----> 73 +[0]: 120 ----> 76 +[0]: 121 ----> 56 +[0]: 121 ----> 76 +[0]: 121 ----> 79 +[0]: 122 ----> 57 +[0]: 122 ----> 79 +[0]: 123 ----> 53 +[0]: 123 ----> 62 +[0]: 124 ----> 54 +[0]: 124 ----> 62 +[0]: 124 ----> 64 +[0]: 125 ----> 48 +[0]: 125 ----> 50 +[0]: 125 ----> 58 +[0]: 126 ----> 55 +[0]: 126 ----> 64 +[0]: 126 ----> 66 +[0]: 127 ----> 51 +[0]: 127 ----> 58 +[0]: 127 ----> 60 +[0]: 128 ----> 56 +[0]: 128 ----> 66 +[0]: 128 ----> 68 +[0]: 129 ----> 52 +[0]: 129 ----> 60 +[0]: 130 ----> 57 +[0]: 130 ----> 68 +[0]: 131 ----> 53 +[0]: 131 ----> 63 +[0]: 132 ----> 54 +[0]: 132 ----> 63 +[0]: 132 ----> 65 +[0]: 133 ----> 49 +[0]: 133 ----> 50 +[0]: 133 ----> 59 +[0]: 134 ----> 55 +[0]: 134 ----> 65 +[0]: 134 ----> 67 +[0]: 135 ----> 51 +[0]: 135 ----> 59 +[0]: 135 ----> 61 +[0]: 136 ----> 56 +[0]: 136 ----> 67 +[0]: 136 ----> 69 +[0]: 137 ----> 52 +[0]: 137 ----> 61 +[0]: 138 ----> 57 +[0]: 138 ----> 69 +[0]: 139 ----> 62 +[0]: 139 ----> 80 +[0]: 139 ----> 84 +[0]: 140 ----> 46 +[0]: 140 ----> 81 +[0]: 140 ----> 84 +[0]: 141 ----> 63 +[0]: 141 ----> 80 +[0]: 141 ----> 85 +[0]: 142 ----> 47 +[0]: 142 ----> 81 +[0]: 142 ----> 85 +[0]: 143 ----> 53 +[0]: 143 ----> 80 +[0]: 143 ----> 86 +[0]: 144 ----> 44 +[0]: 144 ----> 81 +[0]: 144 ----> 86 +[0]: 145 ----> 54 +[0]: 145 ----> 80 +[0]: 145 ----> 82 +[0]: 145 ----> 87 +[0]: 146 ----> 45 +[0]: 146 ----> 81 +[0]: 146 ----> 83 +[0]: 146 ----> 87 +[0]: 147 ----> 64 +[0]: 147 ----> 82 +[0]: 147 ----> 88 +[0]: 148 ----> 48 +[0]: 148 ----> 83 +[0]: 148 ----> 88 +[0]: 149 ----> 65 +[0]: 149 ----> 82 +[0]: 149 ----> 89 +[0]: 150 ----> 49 +[0]: 150 ----> 83 +[0]: 150 ----> 89 +[0]: 151 ----> 50 +[0]: 151 ----> 55 +[0]: 151 ----> 75 +[0]: 151 ----> 82 +[0]: 151 ----> 83 +[0]: 151 ----> 90 +[0]: 151 ----> 90 +[0]: 152 ----> 44 +[0]: 152 ----> 46 +[0]: 153 ----> 45 +[0]: 153 ----> 46 +[0]: 153 ----> 48 +[0]: 154 ----> 44 +[0]: 154 ----> 47 +[0]: 155 ----> 45 +[0]: 155 ----> 47 +[0]: 155 ----> 49 +[0]: 156 ----> 84 +[0]: 156 ----> 86 +[0]: 157 ----> 84 +[0]: 157 ----> 87 +[0]: 157 ----> 88 +[0]: 158 ----> 85 +[0]: 158 ----> 86 +[0]: 159 ----> 85 +[0]: 159 ----> 87 +[0]: 159 ----> 89 +[0]: 160 ----> 88 +[0]: 160 ----> 90 +[0]: 161 ----> 89 +[0]: 161 ----> 90 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 70 (-2) +[0]: 0 <---- 81 (0) +[0]: 0 <---- 46 (0) +[0]: 0 <---- 47 (-3) +[0]: 0 <---- 45 (0) +[0]: 0 <---- 44 (-2) +[0]: 1 <---- 72 (-2) +[0]: 1 <---- 83 (0) +[0]: 1 <---- 48 (0) +[0]: 1 <---- 49 (-3) +[0]: 1 <---- 50 (0) +[0]: 1 <---- 45 (-2) +[0]: 2 <---- 74 (-2) +[0]: 2 <---- 75 (0) +[0]: 2 <---- 58 (0) +[0]: 2 <---- 59 (-3) +[0]: 2 <---- 51 (0) +[0]: 2 <---- 50 (-2) +[0]: 3 <---- 77 (-2) +[0]: 3 <---- 78 (0) +[0]: 3 <---- 60 (0) +[0]: 3 <---- 61 (-3) +[0]: 3 <---- 52 (0) +[0]: 3 <---- 51 (-2) +[0]: 4 <---- 80 (-2) +[0]: 4 <---- 71 (0) +[0]: 4 <---- 62 (0) +[0]: 4 <---- 63 (-3) +[0]: 4 <---- 54 (0) +[0]: 4 <---- 53 (-2) +[0]: 5 <---- 82 (-2) +[0]: 5 <---- 73 (0) +[0]: 5 <---- 64 (0) +[0]: 5 <---- 65 (-3) +[0]: 5 <---- 55 (0) +[0]: 5 <---- 54 (-2) +[0]: 6 <---- 75 (-2) +[0]: 6 <---- 76 (0) +[0]: 6 <---- 66 (0) +[0]: 6 <---- 67 (-3) +[0]: 6 <---- 56 (0) +[0]: 6 <---- 55 (-2) +[0]: 7 <---- 78 (-2) +[0]: 7 <---- 79 (0) +[0]: 7 <---- 68 (0) +[0]: 7 <---- 69 (-3) +[0]: 7 <---- 57 (0) +[0]: 7 <---- 56 (-2) +[0]: 8 <---- 80 (0) +[0]: 8 <---- 81 (0) +[0]: 8 <---- 84 (0) +[0]: 8 <---- 87 (0) +[0]: 8 <---- 85 (-1) +[0]: 8 <---- 86 (-1) +[0]: 9 <---- 82 (0) +[0]: 9 <---- 83 (0) +[0]: 9 <---- 88 (0) +[0]: 9 <---- 90 (0) +[0]: 9 <---- 89 (-1) +[0]: 9 <---- 87 (-1) +[0]: 44 <---- 111 (0) +[0]: 44 <---- 154 (0) +[0]: 44 <---- 144 (-1) +[0]: 44 <---- 152 (-1) +[0]: 45 <---- 112 (0) +[0]: 45 <---- 155 (0) +[0]: 45 <---- 146 (-1) +[0]: 45 <---- 153 (-1) +[0]: 46 <---- 91 (0) +[0]: 46 <---- 153 (0) +[0]: 46 <---- 140 (-1) +[0]: 46 <---- 152 (-1) +[0]: 47 <---- 95 (0) +[0]: 47 <---- 155 (0) +[0]: 47 <---- 142 (-1) +[0]: 47 <---- 154 (-1) +[0]: 48 <---- 92 (0) +[0]: 48 <---- 125 (0) +[0]: 48 <---- 148 (-1) +[0]: 48 <---- 153 (-1) +[0]: 49 <---- 96 (0) +[0]: 49 <---- 133 (0) +[0]: 49 <---- 150 (-1) +[0]: 49 <---- 155 (-1) +[0]: 50 <---- 113 (0) +[0]: 50 <---- 133 (0) +[0]: 50 <---- 151 (-1) +[0]: 50 <---- 125 (-1) +[0]: 51 <---- 114 (0) +[0]: 51 <---- 135 (0) +[0]: 51 <---- 116 (-1) +[0]: 51 <---- 127 (-1) +[0]: 52 <---- 115 (0) +[0]: 52 <---- 137 (0) +[0]: 52 <---- 117 (-1) +[0]: 52 <---- 129 (-1) +[0]: 53 <---- 143 (0) +[0]: 53 <---- 131 (0) +[0]: 53 <---- 118 (-1) +[0]: 53 <---- 123 (-1) +[0]: 54 <---- 145 (0) +[0]: 54 <---- 132 (0) +[0]: 54 <---- 119 (-1) +[0]: 54 <---- 124 (-1) +[0]: 55 <---- 151 (0) +[0]: 55 <---- 134 (0) +[0]: 55 <---- 120 (-1) +[0]: 55 <---- 126 (-1) +[0]: 56 <---- 116 (0) +[0]: 56 <---- 136 (0) +[0]: 56 <---- 121 (-1) +[0]: 56 <---- 128 (-1) +[0]: 57 <---- 117 (0) +[0]: 57 <---- 138 (0) +[0]: 57 <---- 122 (-1) +[0]: 57 <---- 130 (-1) +[0]: 58 <---- 93 (0) +[0]: 58 <---- 127 (0) +[0]: 58 <---- 99 (-1) +[0]: 58 <---- 125 (-1) +[0]: 59 <---- 97 (0) +[0]: 59 <---- 135 (0) +[0]: 59 <---- 101 (-1) +[0]: 59 <---- 133 (-1) +[0]: 60 <---- 94 (0) +[0]: 60 <---- 129 (0) +[0]: 60 <---- 100 (-1) +[0]: 60 <---- 127 (-1) +[0]: 61 <---- 98 (0) +[0]: 61 <---- 137 (0) +[0]: 61 <---- 102 (-1) +[0]: 61 <---- 135 (-1) +[0]: 62 <---- 139 (0) +[0]: 62 <---- 124 (0) +[0]: 62 <---- 103 (-1) +[0]: 62 <---- 123 (-1) +[0]: 63 <---- 141 (0) +[0]: 63 <---- 132 (0) +[0]: 63 <---- 107 (-1) +[0]: 63 <---- 131 (-1) +[0]: 64 <---- 147 (0) +[0]: 64 <---- 126 (0) +[0]: 64 <---- 104 (-1) +[0]: 64 <---- 124 (-1) +[0]: 65 <---- 149 (0) +[0]: 65 <---- 134 (0) +[0]: 65 <---- 108 (-1) +[0]: 65 <---- 132 (-1) +[0]: 66 <---- 99 (0) +[0]: 66 <---- 128 (0) +[0]: 66 <---- 105 (-1) +[0]: 66 <---- 126 (-1) +[0]: 67 <---- 101 (0) +[0]: 67 <---- 136 (0) +[0]: 67 <---- 109 (-1) +[0]: 67 <---- 134 (-1) +[0]: 68 <---- 100 (0) +[0]: 68 <---- 130 (0) +[0]: 68 <---- 106 (-1) +[0]: 68 <---- 128 (-1) +[0]: 69 <---- 102 (0) +[0]: 69 <---- 138 (0) +[0]: 69 <---- 110 (-1) +[0]: 69 <---- 136 (-1) +[0]: 70 <---- 91 (0) +[0]: 70 <---- 112 (0) +[0]: 70 <---- 95 (-1) +[0]: 70 <---- 111 (-1) +[0]: 71 <---- 103 (0) +[0]: 71 <---- 119 (0) +[0]: 71 <---- 107 (-1) +[0]: 71 <---- 118 (-1) +[0]: 72 <---- 92 (0) +[0]: 72 <---- 113 (0) +[0]: 72 <---- 96 (-1) +[0]: 72 <---- 112 (-1) +[0]: 73 <---- 104 (0) +[0]: 73 <---- 120 (0) +[0]: 73 <---- 108 (-1) +[0]: 73 <---- 119 (-1) +[0]: 74 <---- 93 (0) +[0]: 74 <---- 114 (0) +[0]: 74 <---- 97 (-1) +[0]: 74 <---- 113 (-1) +[0]: 75 <---- 99 (0) +[0]: 75 <---- 116 (0) +[0]: 75 <---- 101 (-1) +[0]: 75 <---- 151 (-1) +[0]: 76 <---- 105 (0) +[0]: 76 <---- 121 (0) +[0]: 76 <---- 109 (-1) +[0]: 76 <---- 120 (-1) +[0]: 77 <---- 94 (0) +[0]: 77 <---- 115 (0) +[0]: 77 <---- 98 (-1) +[0]: 77 <---- 114 (-1) +[0]: 78 <---- 100 (0) +[0]: 78 <---- 117 (0) +[0]: 78 <---- 102 (-1) +[0]: 78 <---- 116 (-1) +[0]: 79 <---- 106 (0) +[0]: 79 <---- 122 (0) +[0]: 79 <---- 110 (-1) +[0]: 79 <---- 121 (-1) +[0]: 80 <---- 139 (0) +[0]: 80 <---- 145 (0) +[0]: 80 <---- 141 (-1) +[0]: 80 <---- 143 (-1) +[0]: 81 <---- 140 (0) +[0]: 81 <---- 146 (0) +[0]: 81 <---- 142 (-1) +[0]: 81 <---- 144 (-1) +[0]: 82 <---- 147 (0) +[0]: 82 <---- 151 (0) +[0]: 82 <---- 149 (-1) +[0]: 82 <---- 145 (-1) +[0]: 83 <---- 148 (0) +[0]: 83 <---- 151 (0) +[0]: 83 <---- 150 (-1) +[0]: 83 <---- 146 (-1) +[0]: 84 <---- 139 (0) +[0]: 84 <---- 140 (0) +[0]: 84 <---- 156 (0) +[0]: 84 <---- 157 (0) +[0]: 85 <---- 141 (0) +[0]: 85 <---- 142 (0) +[0]: 85 <---- 158 (0) +[0]: 85 <---- 159 (0) +[0]: 86 <---- 143 (0) +[0]: 86 <---- 144 (0) +[0]: 86 <---- 156 (0) +[0]: 86 <---- 158 (0) +[0]: 87 <---- 145 (0) +[0]: 87 <---- 146 (0) +[0]: 87 <---- 157 (0) +[0]: 87 <---- 159 (0) +[0]: 88 <---- 147 (0) +[0]: 88 <---- 148 (0) +[0]: 88 <---- 157 (0) +[0]: 88 <---- 160 (0) +[0]: 89 <---- 149 (0) +[0]: 89 <---- 150 (0) +[0]: 89 <---- 159 (0) +[0]: 89 <---- 161 (0) +[0]: 90 <---- 151 (0) +[0]: 90 <---- 151 (0) +[0]: 90 <---- 160 (0) +[0]: 90 <---- 161 (0) +[0]: 91 <---- 10 (0) +[0]: 91 <---- 11 (0) +[0]: 92 <---- 11 (0) +[0]: 92 <---- 12 (0) +[0]: 93 <---- 12 (0) +[0]: 93 <---- 13 (0) +[0]: 94 <---- 13 (0) +[0]: 94 <---- 14 (0) +[0]: 95 <---- 15 (0) +[0]: 95 <---- 16 (0) +[0]: 96 <---- 16 (0) +[0]: 96 <---- 17 (0) +[0]: 97 <---- 17 (0) +[0]: 97 <---- 18 (0) +[0]: 98 <---- 18 (0) +[0]: 98 <---- 19 (0) +[0]: 99 <---- 42 (0) +[0]: 99 <---- 20 (0) +[0]: 100 <---- 20 (0) +[0]: 100 <---- 21 (0) +[0]: 101 <---- 43 (0) +[0]: 101 <---- 22 (0) +[0]: 102 <---- 22 (0) +[0]: 102 <---- 23 (0) +[0]: 103 <---- 24 (0) +[0]: 103 <---- 25 (0) +[0]: 104 <---- 25 (0) +[0]: 104 <---- 26 (0) +[0]: 105 <---- 26 (0) +[0]: 105 <---- 27 (0) +[0]: 106 <---- 27 (0) +[0]: 106 <---- 28 (0) +[0]: 107 <---- 29 (0) +[0]: 107 <---- 30 (0) +[0]: 108 <---- 30 (0) +[0]: 108 <---- 31 (0) +[0]: 109 <---- 31 (0) +[0]: 109 <---- 32 (0) +[0]: 110 <---- 32 (0) +[0]: 110 <---- 33 (0) +[0]: 111 <---- 10 (0) +[0]: 111 <---- 15 (0) +[0]: 112 <---- 11 (0) +[0]: 112 <---- 16 (0) +[0]: 113 <---- 12 (0) +[0]: 113 <---- 17 (0) +[0]: 114 <---- 13 (0) +[0]: 114 <---- 18 (0) +[0]: 115 <---- 14 (0) +[0]: 115 <---- 19 (0) +[0]: 116 <---- 20 (0) +[0]: 116 <---- 22 (0) +[0]: 117 <---- 21 (0) +[0]: 117 <---- 23 (0) +[0]: 118 <---- 24 (0) +[0]: 118 <---- 29 (0) +[0]: 119 <---- 25 (0) +[0]: 119 <---- 30 (0) +[0]: 120 <---- 26 (0) +[0]: 120 <---- 31 (0) +[0]: 121 <---- 27 (0) +[0]: 121 <---- 32 (0) +[0]: 122 <---- 28 (0) +[0]: 122 <---- 33 (0) +[0]: 123 <---- 34 (0) +[0]: 123 <---- 24 (0) +[0]: 124 <---- 36 (0) +[0]: 124 <---- 25 (0) +[0]: 125 <---- 12 (0) +[0]: 125 <---- 42 (0) +[0]: 126 <---- 42 (0) +[0]: 126 <---- 26 (0) +[0]: 127 <---- 13 (0) +[0]: 127 <---- 20 (0) +[0]: 128 <---- 20 (0) +[0]: 128 <---- 27 (0) +[0]: 129 <---- 14 (0) +[0]: 129 <---- 21 (0) +[0]: 130 <---- 21 (0) +[0]: 130 <---- 28 (0) +[0]: 131 <---- 38 (0) +[0]: 131 <---- 29 (0) +[0]: 132 <---- 40 (0) +[0]: 132 <---- 30 (0) +[0]: 133 <---- 17 (0) +[0]: 133 <---- 43 (0) +[0]: 134 <---- 43 (0) +[0]: 134 <---- 31 (0) +[0]: 135 <---- 18 (0) +[0]: 135 <---- 22 (0) +[0]: 136 <---- 22 (0) +[0]: 136 <---- 32 (0) +[0]: 137 <---- 19 (0) +[0]: 137 <---- 23 (0) +[0]: 138 <---- 23 (0) +[0]: 138 <---- 33 (0) +[0]: 139 <---- 34 (0) +[0]: 139 <---- 36 (0) +[0]: 140 <---- 35 (0) +[0]: 140 <---- 37 (0) +[0]: 141 <---- 38 (0) +[0]: 141 <---- 40 (0) +[0]: 142 <---- 39 (0) +[0]: 142 <---- 41 (0) +[0]: 143 <---- 34 (0) +[0]: 143 <---- 38 (0) +[0]: 144 <---- 35 (0) +[0]: 144 <---- 39 (0) +[0]: 145 <---- 36 (0) +[0]: 145 <---- 40 (0) +[0]: 146 <---- 37 (0) +[0]: 146 <---- 41 (0) +[0]: 147 <---- 36 (0) +[0]: 147 <---- 42 (0) +[0]: 148 <---- 37 (0) +[0]: 148 <---- 42 (0) +[0]: 149 <---- 40 (0) +[0]: 149 <---- 43 (0) +[0]: 150 <---- 41 (0) +[0]: 150 <---- 43 (0) +[0]: 151 <---- 42 (0) +[0]: 151 <---- 43 (0) +[0]: 152 <---- 10 (0) +[0]: 152 <---- 35 (0) +[0]: 153 <---- 11 (0) +[0]: 153 <---- 37 (0) +[0]: 154 <---- 15 (0) +[0]: 154 <---- 39 (0) +[0]: 155 <---- 16 (0) +[0]: 155 <---- 41 (0) +[0]: 156 <---- 34 (0) +[0]: 156 <---- 35 (0) +[0]: 157 <---- 36 (0) +[0]: 157 <---- 37 (0) +[0]: 158 <---- 38 (0) +[0]: 158 <---- 39 (0) +[0]: 159 <---- 40 (0) +[0]: 159 <---- 41 (0) +[0]: 160 <---- 42 (0) +[0]: 160 <---- 42 (0) +[0]: 161 <---- 43 (0) +[0]: 161 <---- 43 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 10) dim 3 offset 0 0. 0. 0. + ( 11) dim 3 offset 3 1. 0. 0. + ( 12) dim 3 offset 6 2. 0. 0. + ( 13) dim 3 offset 9 3. 0. 0. + ( 14) dim 3 offset 12 4. 0. 0. + ( 15) dim 3 offset 15 0. 1. 0. + ( 16) dim 3 offset 18 1. 1. 0. + ( 17) dim 3 offset 21 2. 1. 0. + ( 18) dim 3 offset 24 3. 1. 0. + ( 19) dim 3 offset 27 4. 1. 0. + ( 20) dim 3 offset 30 3. 0. 1. + ( 21) dim 3 offset 33 4. 0. 1. + ( 22) dim 3 offset 36 3. 1. 1. + ( 23) dim 3 offset 39 4. 1. 1. + ( 24) dim 3 offset 42 0. 0. 2. + ( 25) dim 3 offset 45 1. 0. 2. + ( 26) dim 3 offset 48 2. 0. 2. + ( 27) dim 3 offset 51 3. 0. 2. + ( 28) dim 3 offset 54 4. 0. 2. + ( 29) dim 3 offset 57 0. 1. 2. + ( 30) dim 3 offset 60 1. 1. 2. + ( 31) dim 3 offset 63 2. 1. 2. + ( 32) dim 3 offset 66 3. 1. 2. + ( 33) dim 3 offset 69 4. 1. 2. + ( 34) dim 3 offset 72 0. 0. 1. + ( 35) dim 3 offset 75 0. 0. 1. + ( 36) dim 3 offset 78 1. 0. 1. + ( 37) dim 3 offset 81 1. 0. 1. + ( 38) dim 3 offset 84 0. 1. 1. + ( 39) dim 3 offset 87 0. 1. 1. + ( 40) dim 3 offset 90 1. 1. 1. + ( 41) dim 3 offset 93 1. 1. 1. + ( 42) dim 3 offset 96 2. 0. 1. + ( 43) dim 3 offset 99 2. 1. 1. +Labels: +Label 'celltype': +[0]: 0 (7) +[0]: 1 (7) +[0]: 2 (7) +[0]: 3 (7) +[0]: 4 (7) +[0]: 5 (7) +[0]: 6 (7) +[0]: 7 (7) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 24 (0) +[0]: 25 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 28 (0) +[0]: 29 (0) +[0]: 30 (0) +[0]: 31 (0) +[0]: 32 (0) +[0]: 33 (0) +[0]: 34 (0) +[0]: 35 (0) +[0]: 36 (0) +[0]: 37 (0) +[0]: 38 (0) +[0]: 39 (0) +[0]: 40 (0) +[0]: 41 (0) +[0]: 42 (0) +[0]: 43 (0) +[0]: 156 (2) +[0]: 157 (2) +[0]: 158 (2) +[0]: 159 (2) +[0]: 160 (2) +[0]: 161 (2) +[0]: 44 (4) +[0]: 45 (4) +[0]: 46 (4) +[0]: 47 (4) +[0]: 48 (4) +[0]: 49 (4) +[0]: 50 (4) +[0]: 51 (4) +[0]: 52 (4) +[0]: 53 (4) +[0]: 54 (4) +[0]: 55 (4) +[0]: 56 (4) +[0]: 57 (4) +[0]: 58 (4) +[0]: 59 (4) +[0]: 60 (4) +[0]: 61 (4) +[0]: 62 (4) +[0]: 63 (4) +[0]: 64 (4) +[0]: 65 (4) +[0]: 66 (4) +[0]: 67 (4) +[0]: 68 (4) +[0]: 69 (4) +[0]: 70 (4) +[0]: 71 (4) +[0]: 72 (4) +[0]: 73 (4) +[0]: 74 (4) +[0]: 75 (4) +[0]: 76 (4) +[0]: 77 (4) +[0]: 78 (4) +[0]: 79 (4) +[0]: 80 (4) +[0]: 81 (4) +[0]: 82 (4) +[0]: 83 (4) +[0]: 8 (10) +[0]: 9 (10) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 94 (1) +[0]: 95 (1) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 99 (1) +[0]: 100 (1) +[0]: 101 (1) +[0]: 102 (1) +[0]: 103 (1) +[0]: 104 (1) +[0]: 105 (1) +[0]: 106 (1) +[0]: 107 (1) +[0]: 108 (1) +[0]: 109 (1) +[0]: 110 (1) +[0]: 111 (1) +[0]: 112 (1) +[0]: 113 (1) +[0]: 114 (1) +[0]: 115 (1) +[0]: 116 (1) +[0]: 117 (1) +[0]: 118 (1) +[0]: 119 (1) +[0]: 120 (1) +[0]: 121 (1) +[0]: 122 (1) +[0]: 123 (1) +[0]: 124 (1) +[0]: 125 (1) +[0]: 126 (1) +[0]: 127 (1) +[0]: 128 (1) +[0]: 129 (1) +[0]: 130 (1) +[0]: 131 (1) +[0]: 132 (1) +[0]: 133 (1) +[0]: 134 (1) +[0]: 135 (1) +[0]: 136 (1) +[0]: 137 (1) +[0]: 138 (1) +[0]: 139 (1) +[0]: 140 (1) +[0]: 141 (1) +[0]: 142 (1) +[0]: 143 (1) +[0]: 144 (1) +[0]: 145 (1) +[0]: 146 (1) +[0]: 147 (1) +[0]: 148 (1) +[0]: 149 (1) +[0]: 150 (1) +[0]: 151 (1) +[0]: 152 (1) +[0]: 153 (1) +[0]: 154 (1) +[0]: 155 (1) +[0]: 84 (5) +[0]: 85 (5) +[0]: 86 (5) +[0]: 87 (5) +[0]: 88 (5) +[0]: 89 (5) +[0]: 90 (5) +Label 'marker': +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[0]: 46 (1) +[0]: 47 (1) +[0]: 48 (1) +[0]: 49 (1) +[0]: 52 (1) +[0]: 53 (1) +[0]: 57 (1) +[0]: 58 (1) +[0]: 59 (1) +[0]: 60 (1) +[0]: 61 (1) +[0]: 62 (1) +[0]: 63 (1) +[0]: 64 (1) +[0]: 65 (1) +[0]: 66 (1) +[0]: 67 (1) +[0]: 68 (1) +[0]: 69 (1) +[0]: 70 (1) +[0]: 71 (1) +[0]: 72 (1) +[0]: 73 (1) +[0]: 74 (1) +[0]: 76 (1) +[0]: 77 (1) +[0]: 79 (1) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 94 (1) +[0]: 95 (1) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 99 (1) +[0]: 100 (1) +[0]: 101 (1) +[0]: 102 (1) +[0]: 103 (1) +[0]: 104 (1) +[0]: 105 (1) +[0]: 106 (1) +[0]: 107 (1) +[0]: 108 (1) +[0]: 109 (1) +[0]: 110 (1) +[0]: 111 (1) +[0]: 112 (1) +[0]: 113 (1) +[0]: 114 (1) +[0]: 115 (1) +[0]: 117 (1) +[0]: 118 (1) +[0]: 119 (1) +[0]: 120 (1) +[0]: 121 (1) +[0]: 122 (1) +[0]: 123 (1) +[0]: 124 (1) +[0]: 125 (1) +[0]: 126 (1) +[0]: 127 (1) +[0]: 128 (1) +[0]: 129 (1) +[0]: 130 (1) +[0]: 131 (1) +[0]: 132 (1) +[0]: 133 (1) +[0]: 134 (1) +[0]: 135 (1) +[0]: 136 (1) +[0]: 137 (1) +[0]: 138 (1) +[0]: 139 (1) +[0]: 140 (1) +[0]: 141 (1) +[0]: 142 (1) +[0]: 143 (1) +[0]: 144 (1) +[0]: 147 (1) +[0]: 148 (1) +[0]: 149 (1) +[0]: 150 (1) +[0]: 152 (1) +[0]: 153 (1) +[0]: 154 (1) +[0]: 155 (1) +[0]: 156 (1) +[0]: 157 (1) +[0]: 158 (1) +[0]: 159 (1) +[0]: 160 (1) +[0]: 161 (1) +Label 'Face Sets': +[0]: 44 (6) +[0]: 53 (6) +[0]: 52 (5) +[0]: 57 (5) +[0]: 46 (3) +[0]: 48 (3) +[0]: 58 (3) +[0]: 60 (3) +[0]: 62 (3) +[0]: 64 (3) +[0]: 66 (3) +[0]: 68 (3) +[0]: 47 (4) +[0]: 49 (4) +[0]: 59 (4) +[0]: 61 (4) +[0]: 63 (4) +[0]: 65 (4) +[0]: 67 (4) +[0]: 69 (4) +[0]: 70 (1) +[0]: 72 (1) +[0]: 74 (1) +[0]: 77 (1) +[0]: 71 (2) +[0]: 73 (2) +[0]: 76 (2) +[0]: 79 (2) +Label 'fault0': +[0]: 8 (2) +[0]: 9 (2) +[0]: 80 (2) +[0]: 81 (2) +[0]: 82 (2) +[0]: 83 (2) +[0]: 84 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 139 (1) +[0]: 140 (1) +[0]: 141 (1) +[0]: 142 (1) +[0]: 143 (1) +[0]: 144 (1) +[0]: 145 (1) +[0]: 146 (1) +[0]: 147 (1) +[0]: 148 (1) +[0]: 149 (1) +[0]: 150 (1) +[0]: 34 (0) +[0]: 35 (0) +[0]: 36 (0) +[0]: 37 (0) +[0]: 38 (0) +[0]: 39 (0) +[0]: 40 (0) +[0]: 41 (0) +[0]: 156 (0) +[0]: 157 (0) +[0]: 158 (0) +[0]: 159 (0) +[0]: 0 (103) +[0]: 1 (103) +[0]: 2 (103) +[0]: 44 (102) +[0]: 45 (102) +[0]: 46 (102) +[0]: 47 (102) +[0]: 48 (102) +[0]: 49 (102) +[0]: 50 (102) +[0]: 58 (102) +[0]: 59 (102) +[0]: 75 (102) +[0]: 4 (-103) +[0]: 5 (-103) +[0]: 6 (-103) +[0]: 53 (-102) +[0]: 54 (-102) +[0]: 55 (-102) +[0]: 62 (-102) +[0]: 63 (-102) +[0]: 64 (-102) +[0]: 65 (-102) +[0]: 66 (-102) +[0]: 67 (-102) +[0]: 99 (101) +[0]: 101 (101) +[0]: 125 (101) +[0]: 133 (101) +[0]: 152 (101) +[0]: 153 (101) +[0]: 154 (101) +[0]: 155 (101) +[0]: 123 (-101) +[0]: 124 (-101) +[0]: 126 (-101) +[0]: 131 (-101) +[0]: 132 (-101) +[0]: 134 (-101) +[0]: 90 (201) +[0]: 151 (201) +[0]: 42 (200) +[0]: 43 (200) +[0]: 160 (200) +[0]: 161 (200) +Label 'fault1': +[0]: 56 (2) +[0]: 121 (1) +[0]: 128 (1) +[0]: 136 (1) +[0]: 27 (0) +[0]: 32 (0) +[0]: 2 (103) +[0]: 6 (103) +[0]: 51 (102) +[0]: 58 (102) +[0]: 59 (102) +[0]: 66 (102) +[0]: 67 (102) +[0]: 75 (102) +[0]: 76 (102) +[0]: 3 (-103) +[0]: 7 (-103) +[0]: 60 (-102) +[0]: 61 (-102) +[0]: 68 (-102) +[0]: 69 (-102) +[0]: 78 (-102) +[0]: 79 (-102) +[0]: 99 (101) +[0]: 101 (101) +[0]: 105 (101) +[0]: 109 (101) +[0]: 127 (101) +[0]: 135 (101) +[0]: 100 (-101) +[0]: 102 (-101) +[0]: 106 (-101) +[0]: 110 (-101) +[0]: 116 (201) +[0]: 20 (200) +[0]: 22 (200) +DM Object: box 1 MPI process + type: plex +box in 3 dimensions: +Supports: +[0] Max support size: 8 +[0]: 11 ----> 99 +[0]: 11 ----> 117 +[0]: 11 ----> 154 +[0]: 12 ----> 99 +[0]: 12 ----> 100 +[0]: 12 ----> 118 +[0]: 12 ----> 155 +[0]: 13 ----> 100 +[0]: 13 ----> 101 +[0]: 13 ----> 119 +[0]: 13 ----> 129 +[0]: 14 ----> 101 +[0]: 14 ----> 102 +[0]: 14 ----> 120 +[0]: 14 ----> 131 +[0]: 15 ----> 102 +[0]: 15 ----> 121 +[0]: 15 ----> 132 +[0]: 16 ----> 103 +[0]: 16 ----> 117 +[0]: 16 ----> 156 +[0]: 17 ----> 103 +[0]: 17 ----> 104 +[0]: 17 ----> 118 +[0]: 17 ----> 157 +[0]: 18 ----> 104 +[0]: 18 ----> 105 +[0]: 18 ----> 119 +[0]: 18 ----> 136 +[0]: 19 ----> 105 +[0]: 19 ----> 106 +[0]: 19 ----> 120 +[0]: 19 ----> 138 +[0]: 20 ----> 106 +[0]: 20 ----> 121 +[0]: 20 ----> 139 +[0]: 21 ----> 108 +[0]: 21 ----> 122 +[0]: 21 ----> 132 +[0]: 21 ----> 133 +[0]: 22 ----> 110 +[0]: 22 ----> 122 +[0]: 22 ----> 139 +[0]: 22 ----> 140 +[0]: 23 ----> 111 +[0]: 23 ----> 123 +[0]: 23 ----> 127 +[0]: 24 ----> 111 +[0]: 24 ----> 112 +[0]: 24 ----> 124 +[0]: 24 ----> 128 +[0]: 25 ----> 112 +[0]: 25 ----> 125 +[0]: 25 ----> 130 +[0]: 25 ----> 158 +[0]: 26 ----> 113 +[0]: 26 ----> 126 +[0]: 26 ----> 133 +[0]: 27 ----> 114 +[0]: 27 ----> 123 +[0]: 27 ----> 134 +[0]: 28 ----> 114 +[0]: 28 ----> 115 +[0]: 28 ----> 124 +[0]: 28 ----> 135 +[0]: 29 ----> 115 +[0]: 29 ----> 125 +[0]: 29 ----> 137 +[0]: 29 ----> 159 +[0]: 30 ----> 116 +[0]: 30 ----> 126 +[0]: 30 ----> 140 +[0]: 31 ----> 127 +[0]: 31 ----> 141 +[0]: 31 ----> 145 +[0]: 31 ----> 171 +[0]: 32 ----> 142 +[0]: 32 ----> 146 +[0]: 32 ----> 154 +[0]: 32 ----> 171 +[0]: 33 ----> 128 +[0]: 33 ----> 141 +[0]: 33 ----> 147 +[0]: 33 ----> 149 +[0]: 33 ----> 172 +[0]: 34 ----> 142 +[0]: 34 ----> 148 +[0]: 34 ----> 150 +[0]: 34 ----> 155 +[0]: 34 ----> 172 +[0]: 35 ----> 134 +[0]: 35 ----> 143 +[0]: 35 ----> 145 +[0]: 35 ----> 173 +[0]: 36 ----> 144 +[0]: 36 ----> 146 +[0]: 36 ----> 156 +[0]: 36 ----> 173 +[0]: 37 ----> 135 +[0]: 37 ----> 143 +[0]: 37 ----> 147 +[0]: 37 ----> 151 +[0]: 37 ----> 174 +[0]: 38 ----> 144 +[0]: 38 ----> 148 +[0]: 38 ----> 152 +[0]: 38 ----> 157 +[0]: 38 ----> 174 +[0]: 39 ----> 107 +[0]: 39 ----> 129 +[0]: 39 ----> 130 +[0]: 39 ----> 149 +[0]: 39 ----> 150 +[0]: 39 ----> 153 +[0]: 39 ----> 175 +[0]: 39 ----> 175 +[0]: 40 ----> 109 +[0]: 40 ----> 136 +[0]: 40 ----> 137 +[0]: 40 ----> 151 +[0]: 40 ----> 152 +[0]: 40 ----> 153 +[0]: 40 ----> 176 +[0]: 40 ----> 176 +[0]: 41 ----> 107 +[0]: 41 ----> 108 +[0]: 41 ----> 131 +[0]: 41 ----> 160 +[0]: 41 ----> 163 +[0]: 41 ----> 164 +[0]: 41 ----> 167 +[0]: 41 ----> 167 +[0]: 42 ----> 109 +[0]: 42 ----> 110 +[0]: 42 ----> 138 +[0]: 42 ----> 160 +[0]: 42 ----> 165 +[0]: 42 ----> 166 +[0]: 42 ----> 168 +[0]: 42 ----> 168 +[0]: 43 ----> 113 +[0]: 43 ----> 161 +[0]: 43 ----> 163 +[0]: 43 ----> 169 +[0]: 44 ----> 158 +[0]: 44 ----> 162 +[0]: 44 ----> 164 +[0]: 44 ----> 169 +[0]: 45 ----> 116 +[0]: 45 ----> 161 +[0]: 45 ----> 165 +[0]: 45 ----> 170 +[0]: 46 ----> 159 +[0]: 46 ----> 162 +[0]: 46 ----> 166 +[0]: 46 ----> 170 +[0]: 47 ----> 0 +[0]: 48 ----> 0 +[0]: 48 ----> 1 +[0]: 49 ----> 0 +[0]: 50 ----> 0 +[0]: 51 ----> 1 +[0]: 52 ----> 1 +[0]: 53 ----> 1 +[0]: 53 ----> 2 +[0]: 54 ----> 2 +[0]: 54 ----> 3 +[0]: 55 ----> 3 +[0]: 56 ----> 4 +[0]: 57 ----> 4 +[0]: 57 ----> 5 +[0]: 58 ----> 5 +[0]: 58 ----> 7 +[0]: 59 ----> 6 +[0]: 60 ----> 2 +[0]: 61 ----> 2 +[0]: 62 ----> 3 +[0]: 63 ----> 3 +[0]: 64 ----> 4 +[0]: 65 ----> 4 +[0]: 66 ----> 5 +[0]: 67 ----> 5 +[0]: 68 ----> 6 +[0]: 69 ----> 6 +[0]: 70 ----> 0 +[0]: 71 ----> 4 +[0]: 72 ----> 1 +[0]: 73 ----> 5 +[0]: 74 ----> 2 +[0]: 75 ----> 2 +[0]: 75 ----> 7 +[0]: 76 ----> 3 +[0]: 77 ----> 3 +[0]: 77 ----> 6 +[0]: 78 ----> 6 +[0]: 79 ----> 4 +[0]: 79 ----> 8 +[0]: 80 ----> 0 +[0]: 80 ----> 8 +[0]: 81 ----> 5 +[0]: 81 ----> 9 +[0]: 82 ----> 1 +[0]: 82 ----> 9 +[0]: 83 ----> 6 +[0]: 83 ----> 10 +[0]: 84 ----> 7 +[0]: 84 ----> 10 +[0]: 85 ----> 7 +[0]: 86 ----> 7 +[0]: 87 ----> 7 +[0]: 88 ----> 8 +[0]: 89 ----> 8 +[0]: 90 ----> 8 +[0]: 91 ----> 8 +[0]: 91 ----> 9 +[0]: 92 ----> 9 +[0]: 93 ----> 9 +[0]: 94 ----> 9 +[0]: 95 ----> 10 +[0]: 96 ----> 10 +[0]: 97 ----> 10 +[0]: 98 ----> 10 +[0]: 99 ----> 49 +[0]: 99 ----> 70 +[0]: 100 ----> 51 +[0]: 100 ----> 72 +[0]: 101 ----> 60 +[0]: 101 ----> 74 +[0]: 102 ----> 62 +[0]: 102 ----> 76 +[0]: 103 ----> 50 +[0]: 103 ----> 70 +[0]: 104 ----> 52 +[0]: 104 ----> 72 +[0]: 105 ----> 61 +[0]: 105 ----> 74 +[0]: 106 ----> 63 +[0]: 106 ----> 76 +[0]: 107 ----> 60 +[0]: 107 ----> 75 +[0]: 107 ----> 85 +[0]: 108 ----> 62 +[0]: 108 ----> 68 +[0]: 108 ----> 77 +[0]: 109 ----> 61 +[0]: 109 ----> 75 +[0]: 109 ----> 86 +[0]: 110 ----> 63 +[0]: 110 ----> 69 +[0]: 110 ----> 77 +[0]: 111 ----> 64 +[0]: 111 ----> 71 +[0]: 112 ----> 66 +[0]: 112 ----> 73 +[0]: 113 ----> 68 +[0]: 113 ----> 78 +[0]: 114 ----> 65 +[0]: 114 ----> 71 +[0]: 115 ----> 67 +[0]: 115 ----> 73 +[0]: 116 ----> 69 +[0]: 116 ----> 78 +[0]: 117 ----> 47 +[0]: 117 ----> 70 +[0]: 118 ----> 48 +[0]: 118 ----> 70 +[0]: 118 ----> 72 +[0]: 119 ----> 53 +[0]: 119 ----> 72 +[0]: 119 ----> 74 +[0]: 120 ----> 54 +[0]: 120 ----> 74 +[0]: 120 ----> 76 +[0]: 121 ----> 55 +[0]: 121 ----> 76 +[0]: 122 ----> 55 +[0]: 122 ----> 59 +[0]: 122 ----> 77 +[0]: 123 ----> 56 +[0]: 123 ----> 71 +[0]: 124 ----> 57 +[0]: 124 ----> 71 +[0]: 124 ----> 73 +[0]: 125 ----> 58 +[0]: 125 ----> 73 +[0]: 125 ----> 87 +[0]: 126 ----> 59 +[0]: 126 ----> 78 +[0]: 127 ----> 56 +[0]: 127 ----> 64 +[0]: 128 ----> 57 +[0]: 128 ----> 64 +[0]: 128 ----> 66 +[0]: 129 ----> 51 +[0]: 129 ----> 53 +[0]: 129 ----> 60 +[0]: 130 ----> 58 +[0]: 130 ----> 66 +[0]: 130 ----> 85 +[0]: 131 ----> 54 +[0]: 131 ----> 60 +[0]: 131 ----> 62 +[0]: 132 ----> 55 +[0]: 132 ----> 62 +[0]: 133 ----> 59 +[0]: 133 ----> 68 +[0]: 134 ----> 56 +[0]: 134 ----> 65 +[0]: 135 ----> 57 +[0]: 135 ----> 65 +[0]: 135 ----> 67 +[0]: 136 ----> 52 +[0]: 136 ----> 53 +[0]: 136 ----> 61 +[0]: 137 ----> 58 +[0]: 137 ----> 67 +[0]: 137 ----> 86 +[0]: 138 ----> 54 +[0]: 138 ----> 61 +[0]: 138 ----> 63 +[0]: 139 ----> 55 +[0]: 139 ----> 63 +[0]: 140 ----> 59 +[0]: 140 ----> 69 +[0]: 141 ----> 64 +[0]: 141 ----> 79 +[0]: 141 ----> 88 +[0]: 142 ----> 49 +[0]: 142 ----> 80 +[0]: 142 ----> 88 +[0]: 143 ----> 65 +[0]: 143 ----> 79 +[0]: 143 ----> 89 +[0]: 144 ----> 50 +[0]: 144 ----> 80 +[0]: 144 ----> 89 +[0]: 145 ----> 56 +[0]: 145 ----> 79 +[0]: 145 ----> 90 +[0]: 146 ----> 47 +[0]: 146 ----> 80 +[0]: 146 ----> 90 +[0]: 147 ----> 57 +[0]: 147 ----> 79 +[0]: 147 ----> 81 +[0]: 147 ----> 91 +[0]: 148 ----> 48 +[0]: 148 ----> 80 +[0]: 148 ----> 82 +[0]: 148 ----> 91 +[0]: 149 ----> 66 +[0]: 149 ----> 81 +[0]: 149 ----> 92 +[0]: 150 ----> 51 +[0]: 150 ----> 82 +[0]: 150 ----> 92 +[0]: 151 ----> 67 +[0]: 151 ----> 81 +[0]: 151 ----> 93 +[0]: 152 ----> 52 +[0]: 152 ----> 82 +[0]: 152 ----> 93 +[0]: 153 ----> 53 +[0]: 153 ----> 58 +[0]: 153 ----> 75 +[0]: 153 ----> 81 +[0]: 153 ----> 82 +[0]: 153 ----> 94 +[0]: 153 ----> 94 +[0]: 154 ----> 47 +[0]: 154 ----> 49 +[0]: 155 ----> 48 +[0]: 155 ----> 49 +[0]: 155 ----> 51 +[0]: 156 ----> 47 +[0]: 156 ----> 50 +[0]: 157 ----> 48 +[0]: 157 ----> 50 +[0]: 157 ----> 52 +[0]: 158 ----> 85 +[0]: 158 ----> 87 +[0]: 159 ----> 86 +[0]: 159 ----> 87 +[0]: 160 ----> 54 +[0]: 160 ----> 75 +[0]: 160 ----> 77 +[0]: 160 ----> 83 +[0]: 160 ----> 84 +[0]: 160 ----> 95 +[0]: 160 ----> 95 +[0]: 161 ----> 78 +[0]: 161 ----> 83 +[0]: 161 ----> 96 +[0]: 162 ----> 84 +[0]: 162 ----> 87 +[0]: 162 ----> 96 +[0]: 163 ----> 68 +[0]: 163 ----> 83 +[0]: 163 ----> 97 +[0]: 164 ----> 84 +[0]: 164 ----> 85 +[0]: 164 ----> 97 +[0]: 165 ----> 69 +[0]: 165 ----> 83 +[0]: 165 ----> 98 +[0]: 166 ----> 84 +[0]: 166 ----> 86 +[0]: 166 ----> 98 +[0]: 167 ----> 95 +[0]: 167 ----> 97 +[0]: 168 ----> 95 +[0]: 168 ----> 98 +[0]: 169 ----> 96 +[0]: 169 ----> 97 +[0]: 170 ----> 96 +[0]: 170 ----> 98 +[0]: 171 ----> 88 +[0]: 171 ----> 90 +[0]: 172 ----> 88 +[0]: 172 ----> 91 +[0]: 172 ----> 92 +[0]: 173 ----> 89 +[0]: 173 ----> 90 +[0]: 174 ----> 89 +[0]: 174 ----> 91 +[0]: 174 ----> 93 +[0]: 175 ----> 92 +[0]: 175 ----> 94 +[0]: 176 ----> 93 +[0]: 176 ----> 94 +Cones: +[0] Max cone size: 6 +[0]: 0 <---- 70 (-2) +[0]: 0 <---- 80 (0) +[0]: 0 <---- 49 (0) +[0]: 0 <---- 50 (-3) +[0]: 0 <---- 48 (0) +[0]: 0 <---- 47 (-2) +[0]: 1 <---- 72 (-2) +[0]: 1 <---- 82 (0) +[0]: 1 <---- 51 (0) +[0]: 1 <---- 52 (-3) +[0]: 1 <---- 53 (0) +[0]: 1 <---- 48 (-2) +[0]: 2 <---- 74 (-2) +[0]: 2 <---- 75 (0) +[0]: 2 <---- 60 (0) +[0]: 2 <---- 61 (-3) +[0]: 2 <---- 54 (0) +[0]: 2 <---- 53 (-2) +[0]: 3 <---- 76 (-2) +[0]: 3 <---- 77 (0) +[0]: 3 <---- 62 (0) +[0]: 3 <---- 63 (-3) +[0]: 3 <---- 55 (0) +[0]: 3 <---- 54 (-2) +[0]: 4 <---- 79 (-2) +[0]: 4 <---- 71 (0) +[0]: 4 <---- 64 (0) +[0]: 4 <---- 65 (-3) +[0]: 4 <---- 57 (0) +[0]: 4 <---- 56 (-2) +[0]: 5 <---- 81 (-2) +[0]: 5 <---- 73 (0) +[0]: 5 <---- 66 (0) +[0]: 5 <---- 67 (-3) +[0]: 5 <---- 58 (0) +[0]: 5 <---- 57 (-2) +[0]: 6 <---- 77 (-2) +[0]: 6 <---- 78 (0) +[0]: 6 <---- 68 (0) +[0]: 6 <---- 69 (-3) +[0]: 6 <---- 59 (0) +[0]: 6 <---- 83 (-2) +[0]: 7 <---- 75 (-2) +[0]: 7 <---- 87 (0) +[0]: 7 <---- 85 (0) +[0]: 7 <---- 86 (-3) +[0]: 7 <---- 84 (0) +[0]: 7 <---- 58 (-2) +[0]: 8 <---- 79 (0) +[0]: 8 <---- 80 (0) +[0]: 8 <---- 88 (0) +[0]: 8 <---- 91 (0) +[0]: 8 <---- 89 (-1) +[0]: 8 <---- 90 (-1) +[0]: 9 <---- 81 (0) +[0]: 9 <---- 82 (0) +[0]: 9 <---- 92 (0) +[0]: 9 <---- 94 (0) +[0]: 9 <---- 93 (-1) +[0]: 9 <---- 91 (-1) +[0]: 10 <---- 83 (0) +[0]: 10 <---- 84 (0) +[0]: 10 <---- 95 (0) +[0]: 10 <---- 98 (0) +[0]: 10 <---- 96 (-1) +[0]: 10 <---- 97 (-1) +[0]: 47 <---- 117 (0) +[0]: 47 <---- 156 (0) +[0]: 47 <---- 146 (-1) +[0]: 47 <---- 154 (-1) +[0]: 48 <---- 118 (0) +[0]: 48 <---- 157 (0) +[0]: 48 <---- 148 (-1) +[0]: 48 <---- 155 (-1) +[0]: 49 <---- 99 (0) +[0]: 49 <---- 155 (0) +[0]: 49 <---- 142 (-1) +[0]: 49 <---- 154 (-1) +[0]: 50 <---- 103 (0) +[0]: 50 <---- 157 (0) +[0]: 50 <---- 144 (-1) +[0]: 50 <---- 156 (-1) +[0]: 51 <---- 100 (0) +[0]: 51 <---- 129 (0) +[0]: 51 <---- 150 (-1) +[0]: 51 <---- 155 (-1) +[0]: 52 <---- 104 (0) +[0]: 52 <---- 136 (0) +[0]: 52 <---- 152 (-1) +[0]: 52 <---- 157 (-1) +[0]: 53 <---- 119 (0) +[0]: 53 <---- 136 (0) +[0]: 53 <---- 153 (-1) +[0]: 53 <---- 129 (-1) +[0]: 54 <---- 120 (0) +[0]: 54 <---- 138 (0) +[0]: 54 <---- 160 (-1) +[0]: 54 <---- 131 (-1) +[0]: 55 <---- 121 (0) +[0]: 55 <---- 139 (0) +[0]: 55 <---- 122 (-1) +[0]: 55 <---- 132 (-1) +[0]: 56 <---- 145 (0) +[0]: 56 <---- 134 (0) +[0]: 56 <---- 123 (-1) +[0]: 56 <---- 127 (-1) +[0]: 57 <---- 147 (0) +[0]: 57 <---- 135 (0) +[0]: 57 <---- 124 (-1) +[0]: 57 <---- 128 (-1) +[0]: 58 <---- 153 (0) +[0]: 58 <---- 137 (0) +[0]: 58 <---- 125 (-1) +[0]: 58 <---- 130 (-1) +[0]: 59 <---- 122 (0) +[0]: 59 <---- 140 (0) +[0]: 59 <---- 126 (-1) +[0]: 59 <---- 133 (-1) +[0]: 60 <---- 101 (0) +[0]: 60 <---- 131 (0) +[0]: 60 <---- 107 (-1) +[0]: 60 <---- 129 (-1) +[0]: 61 <---- 105 (0) +[0]: 61 <---- 138 (0) +[0]: 61 <---- 109 (-1) +[0]: 61 <---- 136 (-1) +[0]: 62 <---- 102 (0) +[0]: 62 <---- 132 (0) +[0]: 62 <---- 108 (-1) +[0]: 62 <---- 131 (-1) +[0]: 63 <---- 106 (0) +[0]: 63 <---- 139 (0) +[0]: 63 <---- 110 (-1) +[0]: 63 <---- 138 (-1) +[0]: 64 <---- 141 (0) +[0]: 64 <---- 128 (0) +[0]: 64 <---- 111 (-1) +[0]: 64 <---- 127 (-1) +[0]: 65 <---- 143 (0) +[0]: 65 <---- 135 (0) +[0]: 65 <---- 114 (-1) +[0]: 65 <---- 134 (-1) +[0]: 66 <---- 149 (0) +[0]: 66 <---- 130 (0) +[0]: 66 <---- 112 (-1) +[0]: 66 <---- 128 (-1) +[0]: 67 <---- 151 (0) +[0]: 67 <---- 137 (0) +[0]: 67 <---- 115 (-1) +[0]: 67 <---- 135 (-1) +[0]: 68 <---- 108 (0) +[0]: 68 <---- 133 (0) +[0]: 68 <---- 113 (-1) +[0]: 68 <---- 163 (-1) +[0]: 69 <---- 110 (0) +[0]: 69 <---- 140 (0) +[0]: 69 <---- 116 (-1) +[0]: 69 <---- 165 (-1) +[0]: 70 <---- 99 (0) +[0]: 70 <---- 118 (0) +[0]: 70 <---- 103 (-1) +[0]: 70 <---- 117 (-1) +[0]: 71 <---- 111 (0) +[0]: 71 <---- 124 (0) +[0]: 71 <---- 114 (-1) +[0]: 71 <---- 123 (-1) +[0]: 72 <---- 100 (0) +[0]: 72 <---- 119 (0) +[0]: 72 <---- 104 (-1) +[0]: 72 <---- 118 (-1) +[0]: 73 <---- 112 (0) +[0]: 73 <---- 125 (0) +[0]: 73 <---- 115 (-1) +[0]: 73 <---- 124 (-1) +[0]: 74 <---- 101 (0) +[0]: 74 <---- 120 (0) +[0]: 74 <---- 105 (-1) +[0]: 74 <---- 119 (-1) +[0]: 75 <---- 107 (0) +[0]: 75 <---- 160 (0) +[0]: 75 <---- 109 (-1) +[0]: 75 <---- 153 (-1) +[0]: 76 <---- 102 (0) +[0]: 76 <---- 121 (0) +[0]: 76 <---- 106 (-1) +[0]: 76 <---- 120 (-1) +[0]: 77 <---- 108 (0) +[0]: 77 <---- 122 (0) +[0]: 77 <---- 110 (-1) +[0]: 77 <---- 160 (-1) +[0]: 78 <---- 113 (0) +[0]: 78 <---- 126 (0) +[0]: 78 <---- 116 (-1) +[0]: 78 <---- 161 (-1) +[0]: 79 <---- 141 (0) +[0]: 79 <---- 147 (0) +[0]: 79 <---- 143 (-1) +[0]: 79 <---- 145 (-1) +[0]: 80 <---- 142 (0) +[0]: 80 <---- 148 (0) +[0]: 80 <---- 144 (-1) +[0]: 80 <---- 146 (-1) +[0]: 81 <---- 149 (0) +[0]: 81 <---- 153 (0) +[0]: 81 <---- 151 (-1) +[0]: 81 <---- 147 (-1) +[0]: 82 <---- 150 (0) +[0]: 82 <---- 153 (0) +[0]: 82 <---- 152 (-1) +[0]: 82 <---- 148 (-1) +[0]: 83 <---- 160 (0) +[0]: 83 <---- 165 (0) +[0]: 83 <---- 161 (-1) +[0]: 83 <---- 163 (-1) +[0]: 84 <---- 160 (0) +[0]: 84 <---- 166 (0) +[0]: 84 <---- 162 (-1) +[0]: 84 <---- 164 (-1) +[0]: 85 <---- 107 (0) +[0]: 85 <---- 164 (0) +[0]: 85 <---- 158 (-1) +[0]: 85 <---- 130 (-1) +[0]: 86 <---- 109 (0) +[0]: 86 <---- 166 (0) +[0]: 86 <---- 159 (-1) +[0]: 86 <---- 137 (-1) +[0]: 87 <---- 158 (0) +[0]: 87 <---- 162 (0) +[0]: 87 <---- 159 (-1) +[0]: 87 <---- 125 (-1) +[0]: 88 <---- 141 (0) +[0]: 88 <---- 142 (0) +[0]: 88 <---- 171 (0) +[0]: 88 <---- 172 (0) +[0]: 89 <---- 143 (0) +[0]: 89 <---- 144 (0) +[0]: 89 <---- 173 (0) +[0]: 89 <---- 174 (0) +[0]: 90 <---- 145 (0) +[0]: 90 <---- 146 (0) +[0]: 90 <---- 171 (0) +[0]: 90 <---- 173 (0) +[0]: 91 <---- 147 (0) +[0]: 91 <---- 148 (0) +[0]: 91 <---- 172 (0) +[0]: 91 <---- 174 (0) +[0]: 92 <---- 149 (0) +[0]: 92 <---- 150 (0) +[0]: 92 <---- 172 (0) +[0]: 92 <---- 175 (0) +[0]: 93 <---- 151 (0) +[0]: 93 <---- 152 (0) +[0]: 93 <---- 174 (0) +[0]: 93 <---- 176 (0) +[0]: 94 <---- 153 (0) +[0]: 94 <---- 153 (0) +[0]: 94 <---- 175 (0) +[0]: 94 <---- 176 (0) +[0]: 95 <---- 160 (0) +[0]: 95 <---- 160 (0) +[0]: 95 <---- 167 (0) +[0]: 95 <---- 168 (0) +[0]: 96 <---- 161 (0) +[0]: 96 <---- 162 (0) +[0]: 96 <---- 169 (0) +[0]: 96 <---- 170 (0) +[0]: 97 <---- 163 (0) +[0]: 97 <---- 164 (0) +[0]: 97 <---- 167 (0) +[0]: 97 <---- 169 (0) +[0]: 98 <---- 165 (0) +[0]: 98 <---- 166 (0) +[0]: 98 <---- 168 (0) +[0]: 98 <---- 170 (0) +[0]: 99 <---- 11 (0) +[0]: 99 <---- 12 (0) +[0]: 100 <---- 12 (0) +[0]: 100 <---- 13 (0) +[0]: 101 <---- 13 (0) +[0]: 101 <---- 14 (0) +[0]: 102 <---- 14 (0) +[0]: 102 <---- 15 (0) +[0]: 103 <---- 16 (0) +[0]: 103 <---- 17 (0) +[0]: 104 <---- 17 (0) +[0]: 104 <---- 18 (0) +[0]: 105 <---- 18 (0) +[0]: 105 <---- 19 (0) +[0]: 106 <---- 19 (0) +[0]: 106 <---- 20 (0) +[0]: 107 <---- 39 (0) +[0]: 107 <---- 41 (0) +[0]: 108 <---- 41 (0) +[0]: 108 <---- 21 (0) +[0]: 109 <---- 40 (0) +[0]: 109 <---- 42 (0) +[0]: 110 <---- 42 (0) +[0]: 110 <---- 22 (0) +[0]: 111 <---- 23 (0) +[0]: 111 <---- 24 (0) +[0]: 112 <---- 24 (0) +[0]: 112 <---- 25 (0) +[0]: 113 <---- 43 (0) +[0]: 113 <---- 26 (0) +[0]: 114 <---- 27 (0) +[0]: 114 <---- 28 (0) +[0]: 115 <---- 28 (0) +[0]: 115 <---- 29 (0) +[0]: 116 <---- 45 (0) +[0]: 116 <---- 30 (0) +[0]: 117 <---- 11 (0) +[0]: 117 <---- 16 (0) +[0]: 118 <---- 12 (0) +[0]: 118 <---- 17 (0) +[0]: 119 <---- 13 (0) +[0]: 119 <---- 18 (0) +[0]: 120 <---- 14 (0) +[0]: 120 <---- 19 (0) +[0]: 121 <---- 15 (0) +[0]: 121 <---- 20 (0) +[0]: 122 <---- 21 (0) +[0]: 122 <---- 22 (0) +[0]: 123 <---- 23 (0) +[0]: 123 <---- 27 (0) +[0]: 124 <---- 24 (0) +[0]: 124 <---- 28 (0) +[0]: 125 <---- 25 (0) +[0]: 125 <---- 29 (0) +[0]: 126 <---- 26 (0) +[0]: 126 <---- 30 (0) +[0]: 127 <---- 31 (0) +[0]: 127 <---- 23 (0) +[0]: 128 <---- 33 (0) +[0]: 128 <---- 24 (0) +[0]: 129 <---- 13 (0) +[0]: 129 <---- 39 (0) +[0]: 130 <---- 39 (0) +[0]: 130 <---- 25 (0) +[0]: 131 <---- 14 (0) +[0]: 131 <---- 41 (0) +[0]: 132 <---- 15 (0) +[0]: 132 <---- 21 (0) +[0]: 133 <---- 21 (0) +[0]: 133 <---- 26 (0) +[0]: 134 <---- 35 (0) +[0]: 134 <---- 27 (0) +[0]: 135 <---- 37 (0) +[0]: 135 <---- 28 (0) +[0]: 136 <---- 18 (0) +[0]: 136 <---- 40 (0) +[0]: 137 <---- 40 (0) +[0]: 137 <---- 29 (0) +[0]: 138 <---- 19 (0) +[0]: 138 <---- 42 (0) +[0]: 139 <---- 20 (0) +[0]: 139 <---- 22 (0) +[0]: 140 <---- 22 (0) +[0]: 140 <---- 30 (0) +[0]: 141 <---- 31 (0) +[0]: 141 <---- 33 (0) +[0]: 142 <---- 32 (0) +[0]: 142 <---- 34 (0) +[0]: 143 <---- 35 (0) +[0]: 143 <---- 37 (0) +[0]: 144 <---- 36 (0) +[0]: 144 <---- 38 (0) +[0]: 145 <---- 31 (0) +[0]: 145 <---- 35 (0) +[0]: 146 <---- 32 (0) +[0]: 146 <---- 36 (0) +[0]: 147 <---- 33 (0) +[0]: 147 <---- 37 (0) +[0]: 148 <---- 34 (0) +[0]: 148 <---- 38 (0) +[0]: 149 <---- 33 (0) +[0]: 149 <---- 39 (0) +[0]: 150 <---- 34 (0) +[0]: 150 <---- 39 (0) +[0]: 151 <---- 37 (0) +[0]: 151 <---- 40 (0) +[0]: 152 <---- 38 (0) +[0]: 152 <---- 40 (0) +[0]: 153 <---- 39 (0) +[0]: 153 <---- 40 (0) +[0]: 154 <---- 11 (0) +[0]: 154 <---- 32 (0) +[0]: 155 <---- 12 (0) +[0]: 155 <---- 34 (0) +[0]: 156 <---- 16 (0) +[0]: 156 <---- 36 (0) +[0]: 157 <---- 17 (0) +[0]: 157 <---- 38 (0) +[0]: 158 <---- 25 (0) +[0]: 158 <---- 44 (0) +[0]: 159 <---- 29 (0) +[0]: 159 <---- 46 (0) +[0]: 160 <---- 41 (0) +[0]: 160 <---- 42 (0) +[0]: 161 <---- 43 (0) +[0]: 161 <---- 45 (0) +[0]: 162 <---- 44 (0) +[0]: 162 <---- 46 (0) +[0]: 163 <---- 41 (0) +[0]: 163 <---- 43 (0) +[0]: 164 <---- 41 (0) +[0]: 164 <---- 44 (0) +[0]: 165 <---- 42 (0) +[0]: 165 <---- 45 (0) +[0]: 166 <---- 42 (0) +[0]: 166 <---- 46 (0) +[0]: 167 <---- 41 (0) +[0]: 167 <---- 41 (0) +[0]: 168 <---- 42 (0) +[0]: 168 <---- 42 (0) +[0]: 169 <---- 43 (0) +[0]: 169 <---- 44 (0) +[0]: 170 <---- 45 (0) +[0]: 170 <---- 46 (0) +[0]: 171 <---- 31 (0) +[0]: 171 <---- 32 (0) +[0]: 172 <---- 33 (0) +[0]: 172 <---- 34 (0) +[0]: 173 <---- 35 (0) +[0]: 173 <---- 36 (0) +[0]: 174 <---- 37 (0) +[0]: 174 <---- 38 (0) +[0]: 175 <---- 39 (0) +[0]: 175 <---- 39 (0) +[0]: 176 <---- 40 (0) +[0]: 176 <---- 40 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 11) dim 3 offset 0 0. 0. 0. + ( 12) dim 3 offset 3 1. 0. 0. + ( 13) dim 3 offset 6 2. 0. 0. + ( 14) dim 3 offset 9 3. 0. 0. + ( 15) dim 3 offset 12 4. 0. 0. + ( 16) dim 3 offset 15 0. 1. 0. + ( 17) dim 3 offset 18 1. 1. 0. + ( 18) dim 3 offset 21 2. 1. 0. + ( 19) dim 3 offset 24 3. 1. 0. + ( 20) dim 3 offset 27 4. 1. 0. + ( 21) dim 3 offset 30 4. 0. 1. + ( 22) dim 3 offset 33 4. 1. 1. + ( 23) dim 3 offset 36 0. 0. 2. + ( 24) dim 3 offset 39 1. 0. 2. + ( 25) dim 3 offset 42 2. 0. 2. + ( 26) dim 3 offset 45 4. 0. 2. + ( 27) dim 3 offset 48 0. 1. 2. + ( 28) dim 3 offset 51 1. 1. 2. + ( 29) dim 3 offset 54 2. 1. 2. + ( 30) dim 3 offset 57 4. 1. 2. + ( 31) dim 3 offset 60 0. 0. 1. + ( 32) dim 3 offset 63 0. 0. 1. + ( 33) dim 3 offset 66 1. 0. 1. + ( 34) dim 3 offset 69 1. 0. 1. + ( 35) dim 3 offset 72 0. 1. 1. + ( 36) dim 3 offset 75 0. 1. 1. + ( 37) dim 3 offset 78 1. 1. 1. + ( 38) dim 3 offset 81 1. 1. 1. + ( 39) dim 3 offset 84 2. 0. 1. + ( 40) dim 3 offset 87 2. 1. 1. + ( 41) dim 3 offset 90 3. 0. 1. + ( 42) dim 3 offset 93 3. 1. 1. + ( 43) dim 3 offset 96 3. 0. 2. + ( 44) dim 3 offset 99 3. 0. 2. + ( 45) dim 3 offset 102 3. 1. 2. + ( 46) dim 3 offset 105 3. 1. 2. +Labels: +Label 'celltype': +[0]: 0 (7) +[0]: 1 (7) +[0]: 2 (7) +[0]: 3 (7) +[0]: 4 (7) +[0]: 5 (7) +[0]: 6 (7) +[0]: 7 (7) +[0]: 8 (10) +[0]: 9 (10) +[0]: 10 (10) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 24 (0) +[0]: 25 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 28 (0) +[0]: 29 (0) +[0]: 30 (0) +[0]: 31 (0) +[0]: 32 (0) +[0]: 33 (0) +[0]: 34 (0) +[0]: 35 (0) +[0]: 36 (0) +[0]: 37 (0) +[0]: 38 (0) +[0]: 39 (0) +[0]: 40 (0) +[0]: 41 (0) +[0]: 42 (0) +[0]: 43 (0) +[0]: 44 (0) +[0]: 45 (0) +[0]: 46 (0) +[0]: 167 (2) +[0]: 168 (2) +[0]: 169 (2) +[0]: 170 (2) +[0]: 171 (2) +[0]: 172 (2) +[0]: 173 (2) +[0]: 174 (2) +[0]: 175 (2) +[0]: 176 (2) +[0]: 47 (4) +[0]: 48 (4) +[0]: 49 (4) +[0]: 50 (4) +[0]: 51 (4) +[0]: 52 (4) +[0]: 53 (4) +[0]: 54 (4) +[0]: 55 (4) +[0]: 56 (4) +[0]: 57 (4) +[0]: 58 (4) +[0]: 59 (4) +[0]: 60 (4) +[0]: 61 (4) +[0]: 62 (4) +[0]: 63 (4) +[0]: 64 (4) +[0]: 65 (4) +[0]: 66 (4) +[0]: 67 (4) +[0]: 68 (4) +[0]: 69 (4) +[0]: 70 (4) +[0]: 71 (4) +[0]: 72 (4) +[0]: 73 (4) +[0]: 74 (4) +[0]: 75 (4) +[0]: 76 (4) +[0]: 77 (4) +[0]: 78 (4) +[0]: 79 (4) +[0]: 80 (4) +[0]: 81 (4) +[0]: 82 (4) +[0]: 83 (4) +[0]: 84 (4) +[0]: 85 (4) +[0]: 86 (4) +[0]: 87 (4) +[0]: 88 (5) +[0]: 89 (5) +[0]: 90 (5) +[0]: 91 (5) +[0]: 92 (5) +[0]: 93 (5) +[0]: 94 (5) +[0]: 95 (5) +[0]: 96 (5) +[0]: 97 (5) +[0]: 98 (5) +[0]: 99 (1) +[0]: 100 (1) +[0]: 101 (1) +[0]: 102 (1) +[0]: 103 (1) +[0]: 104 (1) +[0]: 105 (1) +[0]: 106 (1) +[0]: 107 (1) +[0]: 108 (1) +[0]: 109 (1) +[0]: 110 (1) +[0]: 111 (1) +[0]: 112 (1) +[0]: 113 (1) +[0]: 114 (1) +[0]: 115 (1) +[0]: 116 (1) +[0]: 117 (1) +[0]: 118 (1) +[0]: 119 (1) +[0]: 120 (1) +[0]: 121 (1) +[0]: 122 (1) +[0]: 123 (1) +[0]: 124 (1) +[0]: 125 (1) +[0]: 126 (1) +[0]: 127 (1) +[0]: 128 (1) +[0]: 129 (1) +[0]: 130 (1) +[0]: 131 (1) +[0]: 132 (1) +[0]: 133 (1) +[0]: 134 (1) +[0]: 135 (1) +[0]: 136 (1) +[0]: 137 (1) +[0]: 138 (1) +[0]: 139 (1) +[0]: 140 (1) +[0]: 141 (1) +[0]: 142 (1) +[0]: 143 (1) +[0]: 144 (1) +[0]: 145 (1) +[0]: 146 (1) +[0]: 147 (1) +[0]: 148 (1) +[0]: 149 (1) +[0]: 150 (1) +[0]: 151 (1) +[0]: 152 (1) +[0]: 153 (1) +[0]: 154 (1) +[0]: 155 (1) +[0]: 156 (1) +[0]: 157 (1) +[0]: 158 (1) +[0]: 159 (1) +[0]: 160 (1) +[0]: 161 (1) +[0]: 162 (1) +[0]: 163 (1) +[0]: 164 (1) +[0]: 165 (1) +[0]: 166 (1) +Label 'marker': +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 44 (1) +[0]: 45 (1) +[0]: 46 (1) +[0]: 47 (1) +[0]: 49 (1) +[0]: 50 (1) +[0]: 51 (1) +[0]: 52 (1) +[0]: 55 (1) +[0]: 56 (1) +[0]: 59 (1) +[0]: 60 (1) +[0]: 61 (1) +[0]: 62 (1) +[0]: 63 (1) +[0]: 64 (1) +[0]: 65 (1) +[0]: 66 (1) +[0]: 67 (1) +[0]: 68 (1) +[0]: 69 (1) +[0]: 70 (1) +[0]: 71 (1) +[0]: 72 (1) +[0]: 73 (1) +[0]: 74 (1) +[0]: 76 (1) +[0]: 78 (1) +[0]: 85 (1) +[0]: 86 (1) +[0]: 87 (1) +[0]: 88 (1) +[0]: 89 (1) +[0]: 90 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 99 (1) +[0]: 100 (1) +[0]: 101 (1) +[0]: 102 (1) +[0]: 103 (1) +[0]: 104 (1) +[0]: 105 (1) +[0]: 106 (1) +[0]: 107 (1) +[0]: 108 (1) +[0]: 109 (1) +[0]: 110 (1) +[0]: 111 (1) +[0]: 112 (1) +[0]: 113 (1) +[0]: 114 (1) +[0]: 115 (1) +[0]: 116 (1) +[0]: 117 (1) +[0]: 118 (1) +[0]: 119 (1) +[0]: 120 (1) +[0]: 121 (1) +[0]: 122 (1) +[0]: 123 (1) +[0]: 124 (1) +[0]: 125 (1) +[0]: 126 (1) +[0]: 127 (1) +[0]: 128 (1) +[0]: 129 (1) +[0]: 130 (1) +[0]: 131 (1) +[0]: 132 (1) +[0]: 133 (1) +[0]: 134 (1) +[0]: 135 (1) +[0]: 136 (1) +[0]: 137 (1) +[0]: 138 (1) +[0]: 139 (1) +[0]: 140 (1) +[0]: 141 (1) +[0]: 142 (1) +[0]: 143 (1) +[0]: 144 (1) +[0]: 145 (1) +[0]: 146 (1) +[0]: 149 (1) +[0]: 150 (1) +[0]: 151 (1) +[0]: 152 (1) +[0]: 154 (1) +[0]: 155 (1) +[0]: 156 (1) +[0]: 157 (1) +[0]: 158 (1) +[0]: 159 (1) +[0]: 161 (1) +[0]: 162 (1) +[0]: 163 (1) +[0]: 164 (1) +[0]: 165 (1) +[0]: 166 (1) +[0]: 167 (1) +[0]: 168 (1) +[0]: 169 (1) +[0]: 170 (1) +[0]: 171 (1) +[0]: 172 (1) +[0]: 173 (1) +[0]: 174 (1) +[0]: 175 (1) +[0]: 176 (1) +Label 'Face Sets': +[0]: 47 (6) +[0]: 56 (6) +[0]: 55 (5) +[0]: 59 (5) +[0]: 49 (3) +[0]: 51 (3) +[0]: 60 (3) +[0]: 62 (3) +[0]: 64 (3) +[0]: 66 (3) +[0]: 68 (3) +[0]: 85 (3) +[0]: 50 (4) +[0]: 52 (4) +[0]: 61 (4) +[0]: 63 (4) +[0]: 65 (4) +[0]: 67 (4) +[0]: 69 (4) +[0]: 86 (4) +[0]: 70 (1) +[0]: 72 (1) +[0]: 74 (1) +[0]: 76 (1) +[0]: 71 (2) +[0]: 73 (2) +[0]: 78 (2) +[0]: 87 (2) +Label 'fault0': +[0]: 8 (2) +[0]: 9 (2) +[0]: 79 (2) +[0]: 80 (2) +[0]: 81 (2) +[0]: 82 (2) +[0]: 88 (1) +[0]: 89 (1) +[0]: 90 (1) +[0]: 91 (1) +[0]: 92 (1) +[0]: 93 (1) +[0]: 141 (1) +[0]: 142 (1) +[0]: 143 (1) +[0]: 144 (1) +[0]: 145 (1) +[0]: 146 (1) +[0]: 147 (1) +[0]: 148 (1) +[0]: 149 (1) +[0]: 150 (1) +[0]: 151 (1) +[0]: 152 (1) +[0]: 31 (0) +[0]: 32 (0) +[0]: 33 (0) +[0]: 34 (0) +[0]: 35 (0) +[0]: 36 (0) +[0]: 37 (0) +[0]: 38 (0) +[0]: 171 (0) +[0]: 172 (0) +[0]: 173 (0) +[0]: 174 (0) +[0]: 0 (103) +[0]: 1 (103) +[0]: 2 (103) +[0]: 47 (102) +[0]: 48 (102) +[0]: 49 (102) +[0]: 50 (102) +[0]: 51 (102) +[0]: 52 (102) +[0]: 53 (102) +[0]: 60 (102) +[0]: 61 (102) +[0]: 75 (102) +[0]: 4 (-103) +[0]: 5 (-103) +[0]: 7 (-103) +[0]: 56 (-102) +[0]: 57 (-102) +[0]: 58 (-102) +[0]: 64 (-102) +[0]: 65 (-102) +[0]: 66 (-102) +[0]: 67 (-102) +[0]: 85 (-102) +[0]: 86 (-102) +[0]: 107 (101) +[0]: 109 (101) +[0]: 129 (101) +[0]: 136 (101) +[0]: 154 (101) +[0]: 155 (101) +[0]: 156 (101) +[0]: 157 (101) +[0]: 127 (-101) +[0]: 128 (-101) +[0]: 130 (-101) +[0]: 134 (-101) +[0]: 135 (-101) +[0]: 137 (-101) +[0]: 94 (201) +[0]: 153 (201) +[0]: 39 (200) +[0]: 40 (200) +[0]: 175 (200) +[0]: 176 (200) +Label 'fault1': +[0]: 10 (2) +[0]: 83 (2) +[0]: 84 (2) +[0]: 96 (1) +[0]: 97 (1) +[0]: 98 (1) +[0]: 161 (1) +[0]: 162 (1) +[0]: 163 (1) +[0]: 164 (1) +[0]: 165 (1) +[0]: 166 (1) +[0]: 43 (0) +[0]: 44 (0) +[0]: 45 (0) +[0]: 46 (0) +[0]: 169 (0) +[0]: 170 (0) +[0]: 2 (103) +[0]: 7 (103) +[0]: 54 (102) +[0]: 60 (102) +[0]: 61 (102) +[0]: 75 (102) +[0]: 85 (102) +[0]: 86 (102) +[0]: 87 (102) +[0]: 3 (-103) +[0]: 6 (-103) +[0]: 62 (-102) +[0]: 63 (-102) +[0]: 68 (-102) +[0]: 69 (-102) +[0]: 77 (-102) +[0]: 78 (-102) +[0]: 107 (101) +[0]: 109 (101) +[0]: 131 (101) +[0]: 138 (101) +[0]: 158 (101) +[0]: 159 (101) +[0]: 108 (-101) +[0]: 110 (-101) +[0]: 113 (-101) +[0]: 116 (-101) +[0]: 95 (201) +[0]: 160 (201) +[0]: 41 (200) +[0]: 42 (200) +[0]: 167 (200) +[0]: 168 (200) diff --git a/src/dm/impls/plex/tests/output/ex69_quad_0.out b/src/dm/impls/plex/tests/output/ex69_quad_0.out new file mode 100644 index 00000000000..e15bfbe4678 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_quad_0.out @@ -0,0 +1,252 @@ +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 2 ----> 8 +[0]: 2 ----> 12 +[0]: 3 ----> 8 +[0]: 3 ----> 9 +[0]: 3 ----> 13 +[0]: 4 ----> 9 +[0]: 4 ----> 14 +[0]: 5 ----> 10 +[0]: 5 ----> 12 +[0]: 6 ----> 10 +[0]: 6 ----> 11 +[0]: 6 ----> 13 +[0]: 7 ----> 11 +[0]: 7 ----> 14 +[0]: 8 ----> 0 +[0]: 9 ----> 1 +[0]: 10 ----> 0 +[0]: 11 ----> 1 +[0]: 12 ----> 0 +[0]: 13 ----> 0 +[0]: 13 ----> 1 +[0]: 14 ----> 1 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 8 (0) +[0]: 0 <---- 13 (0) +[0]: 0 <---- 10 (-1) +[0]: 0 <---- 12 (-1) +[0]: 1 <---- 9 (0) +[0]: 1 <---- 14 (0) +[0]: 1 <---- 11 (-1) +[0]: 1 <---- 13 (-1) +[0]: 8 <---- 2 (0) +[0]: 8 <---- 3 (0) +[0]: 9 <---- 3 (0) +[0]: 9 <---- 4 (0) +[0]: 10 <---- 5 (0) +[0]: 10 <---- 6 (0) +[0]: 11 <---- 6 (0) +[0]: 11 <---- 7 (0) +[0]: 12 <---- 2 (0) +[0]: 12 <---- 5 (0) +[0]: 13 <---- 3 (0) +[0]: 13 <---- 6 (0) +[0]: 14 <---- 4 (0) +[0]: 14 <---- 7 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 2) dim 2 offset 0 0. 0. + ( 3) dim 2 offset 2 0.5 0. + ( 4) dim 2 offset 4 1. 0. + ( 5) dim 2 offset 6 0. 1. + ( 6) dim 2 offset 8 0.5 1. + ( 7) dim 2 offset 10 1. 1. +Labels: +Label 'marker': +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 14 (1) +Label 'Face Sets': +[0]: 12 (4) +[0]: 14 (2) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (3) +[0]: 11 (3) +Label 'fault': +[0]: 13 (1) +[0]: 3 (0) +[0]: 6 (0) +[0]: 0 (102) +[0]: 8 (101) +[0]: 10 (101) +[0]: 1 (-102) +[0]: 9 (-101) +[0]: 11 (-101) +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 3 ----> 11 +[0]: 3 ----> 15 +[0]: 4 ----> 13 +[0]: 4 ----> 16 +[0]: 5 ----> 12 +[0]: 5 ----> 15 +[0]: 6 ----> 14 +[0]: 6 ----> 16 +[0]: 7 ----> 13 +[0]: 7 ----> 17 +[0]: 7 ----> 19 +[0]: 8 ----> 11 +[0]: 8 ----> 18 +[0]: 8 ----> 19 +[0]: 9 ----> 14 +[0]: 9 ----> 17 +[0]: 9 ----> 20 +[0]: 10 ----> 12 +[0]: 10 ----> 18 +[0]: 10 ----> 20 +[0]: 11 ----> 0 +[0]: 12 ----> 0 +[0]: 13 ----> 1 +[0]: 14 ----> 1 +[0]: 15 ----> 0 +[0]: 16 ----> 1 +[0]: 17 ----> 1 +[0]: 17 ----> 2 +[0]: 18 ----> 0 +[0]: 18 ----> 2 +[0]: 19 ----> 2 +[0]: 20 ----> 2 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 11 (0) +[0]: 0 <---- 18 (0) +[0]: 0 <---- 12 (-1) +[0]: 0 <---- 15 (-1) +[0]: 1 <---- 13 (0) +[0]: 1 <---- 16 (0) +[0]: 1 <---- 14 (-1) +[0]: 1 <---- 17 (-1) +[0]: 2 <---- 17 (0) +[0]: 2 <---- 18 (0) +[0]: 2 <---- 19 (0) +[0]: 2 <---- 20 (0) +[0]: 11 <---- 3 (0) +[0]: 11 <---- 8 (0) +[0]: 12 <---- 5 (0) +[0]: 12 <---- 10 (0) +[0]: 13 <---- 7 (0) +[0]: 13 <---- 4 (0) +[0]: 14 <---- 9 (0) +[0]: 14 <---- 6 (0) +[0]: 15 <---- 3 (0) +[0]: 15 <---- 5 (0) +[0]: 16 <---- 4 (0) +[0]: 16 <---- 6 (0) +[0]: 17 <---- 7 (0) +[0]: 17 <---- 9 (0) +[0]: 18 <---- 8 (0) +[0]: 18 <---- 10 (0) +[0]: 19 <---- 7 (0) +[0]: 19 <---- 8 (0) +[0]: 20 <---- 9 (0) +[0]: 20 <---- 10 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 3) dim 2 offset 0 0. 0. + ( 4) dim 2 offset 2 1. 0. + ( 5) dim 2 offset 4 0. 1. + ( 6) dim 2 offset 6 1. 1. + ( 7) dim 2 offset 8 0.5 0. + ( 8) dim 2 offset 10 0.5 0. + ( 9) dim 2 offset 12 0.5 1. + ( 10) dim 2 offset 14 0.5 1. +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 19 (2) +[0]: 20 (2) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 2 (5) +Label 'marker': +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 19 (1) +[0]: 20 (1) +Label 'Face Sets': +[0]: 15 (4) +[0]: 16 (2) +[0]: 11 (1) +[0]: 13 (1) +[0]: 12 (3) +[0]: 14 (3) +Label 'fault': +[0]: 2 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 0 (102) +[0]: 11 (101) +[0]: 12 (101) +[0]: 1 (-102) +[0]: 13 (-101) +[0]: 14 (-101) diff --git a/src/dm/impls/plex/tests/output/ex69_quad_1.out b/src/dm/impls/plex/tests/output/ex69_quad_1.out new file mode 100644 index 00000000000..7c15041abb8 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_quad_1.out @@ -0,0 +1,252 @@ +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 2 ----> 8 +[0]: 2 ----> 12 +[0]: 3 ----> 8 +[0]: 3 ----> 9 +[0]: 3 ----> 13 +[0]: 4 ----> 9 +[0]: 4 ----> 14 +[0]: 5 ----> 10 +[0]: 5 ----> 12 +[0]: 6 ----> 10 +[0]: 6 ----> 11 +[0]: 6 ----> 13 +[0]: 7 ----> 11 +[0]: 7 ----> 14 +[0]: 8 ----> 0 +[0]: 9 ----> 1 +[0]: 10 ----> 0 +[0]: 11 ----> 1 +[0]: 12 ----> 0 +[0]: 13 ----> 0 +[0]: 13 ----> 1 +[0]: 14 ----> 1 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 8 (0) +[0]: 0 <---- 13 (0) +[0]: 0 <---- 10 (-1) +[0]: 0 <---- 12 (-1) +[0]: 1 <---- 9 (0) +[0]: 1 <---- 14 (0) +[0]: 1 <---- 11 (-1) +[0]: 1 <---- 13 (-1) +[0]: 8 <---- 2 (0) +[0]: 8 <---- 3 (0) +[0]: 9 <---- 3 (0) +[0]: 9 <---- 4 (0) +[0]: 10 <---- 5 (0) +[0]: 10 <---- 6 (0) +[0]: 11 <---- 6 (0) +[0]: 11 <---- 7 (0) +[0]: 12 <---- 2 (0) +[0]: 12 <---- 5 (0) +[0]: 13 <---- 3 (0) +[0]: 13 <---- 6 (0) +[0]: 14 <---- 4 (0) +[0]: 14 <---- 7 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 2) dim 2 offset 0 0. 0. + ( 3) dim 2 offset 2 0.5 0. + ( 4) dim 2 offset 4 1. 0. + ( 5) dim 2 offset 6 0. 1. + ( 6) dim 2 offset 8 0.5 1. + ( 7) dim 2 offset 10 1. 1. +Labels: +Label 'marker': +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 14 (1) +Label 'Face Sets': +[0]: 12 (4) +[0]: 14 (2) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (3) +[0]: 11 (3) +Label 'fault': +[0]: 13 (1) +[0]: 3 (0) +[0]: 6 (0) +[0]: 0 (102) +[0]: 8 (101) +[0]: 10 (101) +[0]: 1 (-102) +[0]: 9 (-101) +[0]: 11 (-101) +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 3 ----> 13 +[0]: 3 ----> 17 +[0]: 4 ----> 15 +[0]: 4 ----> 18 +[0]: 5 ----> 14 +[0]: 5 ----> 17 +[0]: 6 ----> 16 +[0]: 6 ----> 18 +[0]: 7 ----> 11 +[0]: 7 ----> 15 +[0]: 7 ----> 19 +[0]: 8 ----> 11 +[0]: 8 ----> 13 +[0]: 8 ----> 20 +[0]: 9 ----> 12 +[0]: 9 ----> 16 +[0]: 9 ----> 19 +[0]: 10 ----> 12 +[0]: 10 ----> 14 +[0]: 10 ----> 20 +[0]: 11 ----> 2 +[0]: 12 ----> 2 +[0]: 13 ----> 0 +[0]: 14 ----> 0 +[0]: 15 ----> 1 +[0]: 16 ----> 1 +[0]: 17 ----> 0 +[0]: 18 ----> 1 +[0]: 19 ----> 1 +[0]: 19 ----> 2 +[0]: 20 ----> 0 +[0]: 20 ----> 2 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 13 (0) +[0]: 0 <---- 20 (0) +[0]: 0 <---- 14 (-1) +[0]: 0 <---- 17 (-1) +[0]: 1 <---- 15 (0) +[0]: 1 <---- 18 (0) +[0]: 1 <---- 16 (-1) +[0]: 1 <---- 19 (-1) +[0]: 2 <---- 19 (0) +[0]: 2 <---- 12 (0) +[0]: 2 <---- 20 (-1) +[0]: 2 <---- 11 (-1) +[0]: 11 <---- 7 (0) +[0]: 11 <---- 8 (0) +[0]: 12 <---- 9 (0) +[0]: 12 <---- 10 (0) +[0]: 13 <---- 3 (0) +[0]: 13 <---- 8 (0) +[0]: 14 <---- 5 (0) +[0]: 14 <---- 10 (0) +[0]: 15 <---- 7 (0) +[0]: 15 <---- 4 (0) +[0]: 16 <---- 9 (0) +[0]: 16 <---- 6 (0) +[0]: 17 <---- 3 (0) +[0]: 17 <---- 5 (0) +[0]: 18 <---- 4 (0) +[0]: 18 <---- 6 (0) +[0]: 19 <---- 7 (0) +[0]: 19 <---- 9 (0) +[0]: 20 <---- 8 (0) +[0]: 20 <---- 10 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 3) dim 2 offset 0 0. 0. + ( 4) dim 2 offset 2 1. 0. + ( 5) dim 2 offset 4 0. 1. + ( 6) dim 2 offset 6 1. 1. + ( 7) dim 2 offset 8 0.5 0. + ( 8) dim 2 offset 10 0.5 0. + ( 9) dim 2 offset 12 0.5 1. + ( 10) dim 2 offset 14 0.5 1. +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +Label 'marker': +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +Label 'Face Sets': +[0]: 17 (4) +[0]: 18 (2) +[0]: 13 (1) +[0]: 15 (1) +[0]: 14 (3) +[0]: 16 (3) +Label 'fault': +[0]: 2 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 0 (102) +[0]: 13 (101) +[0]: 14 (101) +[0]: 1 (-102) +[0]: 15 (-101) +[0]: 16 (-101) diff --git a/src/dm/impls/plex/tests/output/ex69_quad_2.out b/src/dm/impls/plex/tests/output/ex69_quad_2.out new file mode 100644 index 00000000000..e5154db42eb --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_quad_2.out @@ -0,0 +1,364 @@ +DM Object: box 2 MPI processes + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 2 +[0]: 1 ----> 5 +[0]: 1 ----> 7 +[0]: 2 ----> 5 +[0]: 2 ----> 8 +[0]: 3 ----> 6 +[0]: 3 ----> 7 +[0]: 4 ----> 6 +[0]: 4 ----> 8 +[0]: 5 ----> 0 +[0]: 6 ----> 0 +[0]: 7 ----> 0 +[0]: 8 ----> 0 +[1] Max support size: 2 +[1]: 1 ----> 5 +[1]: 1 ----> 7 +[1]: 2 ----> 5 +[1]: 2 ----> 8 +[1]: 3 ----> 6 +[1]: 3 ----> 7 +[1]: 4 ----> 6 +[1]: 4 ----> 8 +[1]: 5 ----> 0 +[1]: 6 ----> 0 +[1]: 7 ----> 0 +[1]: 8 ----> 0 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 5 (0) +[0]: 0 <---- 8 (0) +[0]: 0 <---- 6 (-1) +[0]: 0 <---- 7 (-1) +[0]: 5 <---- 1 (0) +[0]: 5 <---- 2 (0) +[0]: 6 <---- 3 (0) +[0]: 6 <---- 4 (0) +[0]: 7 <---- 1 (0) +[0]: 7 <---- 3 (0) +[0]: 8 <---- 2 (0) +[0]: 8 <---- 4 (0) +[1] Max cone size: 4 +[1]: 0 <---- 5 (0) +[1]: 0 <---- 8 (0) +[1]: 0 <---- 6 (-1) +[1]: 0 <---- 7 (-1) +[1]: 5 <---- 1 (0) +[1]: 5 <---- 2 (0) +[1]: 6 <---- 3 (0) +[1]: 6 <---- 4 (0) +[1]: 7 <---- 1 (0) +[1]: 7 <---- 3 (0) +[1]: 8 <---- 2 (0) +[1]: 8 <---- 4 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 1) dim 2 offset 0 0. 0. + ( 2) dim 2 offset 2 0.5 0. + ( 3) dim 2 offset 4 0. 1. + ( 4) dim 2 offset 6 0.5 1. +Process 1: + ( 1) dim 2 offset 0 0.5 0. + ( 2) dim 2 offset 2 1. 0. + ( 3) dim 2 offset 4 0.5 1. + ( 4) dim 2 offset 6 1. 1. +Labels: +Label 'marker': +[0]: 1 (1) +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[1]: 1 (1) +[1]: 2 (1) +[1]: 3 (1) +[1]: 4 (1) +[1]: 5 (1) +[1]: 6 (1) +[1]: 8 (1) +Label 'Face Sets': +[0]: 5 (1) +[0]: 6 (3) +[0]: 7 (4) +[1]: 5 (1) +[1]: 8 (2) +[1]: 6 (3) +Label 'fault': +[0]: 2 (0) +[0]: 4 (0) +[0]: 8 (1) +[0]: 5 (101) +[0]: 6 (101) +[0]: 0 (102) +[1]: 0 (-102) +[1]: 5 (-101) +[1]: 6 (-101) +[1]: 1 (0) +[1]: 3 (0) +[1]: 7 (1) +Label 'celltype': +[0]: 1 (0) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 0 (4) +[1]: 1 (0) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (1) +[1]: 6 (1) +[1]: 7 (1) +[1]: 8 (1) +[1]: 0 (4) +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=9, leaves=3, remote ranks=1 + [0] 2 <- (1,1) + [0] 4 <- (1,3) + [0] 8 <- (1,7) + [1] Number of roots=9, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 3 edges + [0] 2 <- 1 + [0] 4 <- 3 + [0] 8 <- 7 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order +DM Object: box 2 MPI processes + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 2 ----> 8 +[0]: 2 ----> 10 +[0]: 3 ----> 9 +[0]: 3 ----> 10 +[0]: 4 ----> 11 +[0]: 4 ----> 13 +[0]: 5 ----> 8 +[0]: 5 ----> 12 +[0]: 5 ----> 13 +[0]: 6 ----> 11 +[0]: 6 ----> 14 +[0]: 7 ----> 9 +[0]: 7 ----> 12 +[0]: 7 ----> 14 +[0]: 8 ----> 0 +[0]: 9 ----> 0 +[0]: 10 ----> 0 +[0]: 11 ----> 1 +[0]: 12 ----> 0 +[0]: 12 ----> 1 +[0]: 13 ----> 1 +[0]: 14 ----> 1 +[1] Max support size: 3 +[1]: 2 ----> 8 +[1]: 2 ----> 11 +[1]: 2 ----> 13 +[1]: 3 ----> 12 +[1]: 3 ----> 13 +[1]: 4 ----> 9 +[1]: 4 ----> 11 +[1]: 4 ----> 14 +[1]: 5 ----> 12 +[1]: 5 ----> 14 +[1]: 6 ----> 8 +[1]: 6 ----> 10 +[1]: 7 ----> 9 +[1]: 7 ----> 10 +[1]: 8 ----> 0 +[1]: 9 ----> 0 +[1]: 10 ----> 0 +[1]: 11 ----> 0 +[1]: 11 ----> 1 +[1]: 12 ----> 1 +[1]: 13 ----> 1 +[1]: 14 ----> 1 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 8 (0) +[0]: 0 <---- 12 (0) +[0]: 0 <---- 9 (-1) +[0]: 0 <---- 10 (-1) +[0]: 1 <---- 11 (0) +[0]: 1 <---- 12 (0) +[0]: 1 <---- 13 (0) +[0]: 1 <---- 14 (0) +[0]: 8 <---- 2 (0) +[0]: 8 <---- 5 (0) +[0]: 9 <---- 3 (0) +[0]: 9 <---- 7 (0) +[0]: 10 <---- 2 (0) +[0]: 10 <---- 3 (0) +[0]: 11 <---- 4 (0) +[0]: 11 <---- 6 (0) +[0]: 12 <---- 5 (0) +[0]: 12 <---- 7 (0) +[0]: 13 <---- 4 (0) +[0]: 13 <---- 5 (0) +[0]: 14 <---- 6 (0) +[0]: 14 <---- 7 (0) +[1] Max cone size: 4 +[1]: 0 <---- 8 (0) +[1]: 0 <---- 10 (0) +[1]: 0 <---- 9 (-1) +[1]: 0 <---- 11 (-1) +[1]: 1 <---- 11 (0) +[1]: 1 <---- 12 (0) +[1]: 1 <---- 13 (0) +[1]: 1 <---- 14 (0) +[1]: 8 <---- 2 (0) +[1]: 8 <---- 6 (0) +[1]: 9 <---- 4 (0) +[1]: 9 <---- 7 (0) +[1]: 10 <---- 6 (0) +[1]: 10 <---- 7 (0) +[1]: 11 <---- 2 (0) +[1]: 11 <---- 4 (0) +[1]: 12 <---- 3 (0) +[1]: 12 <---- 5 (0) +[1]: 13 <---- 2 (0) +[1]: 13 <---- 3 (0) +[1]: 14 <---- 4 (0) +[1]: 14 <---- 5 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 2) dim 2 offset 0 0. 0. + ( 3) dim 2 offset 2 0. 1. + ( 4) dim 2 offset 4 0.5 0. + ( 5) dim 2 offset 6 0.5 0. + ( 6) dim 2 offset 8 0.5 1. + ( 7) dim 2 offset 10 0.5 1. +Process 1: + ( 2) dim 2 offset 0 0.5 0. + ( 3) dim 2 offset 2 0.5 0. + ( 4) dim 2 offset 4 0.5 1. + ( 5) dim 2 offset 6 0.5 1. + ( 6) dim 2 offset 8 1. 0. + ( 7) dim 2 offset 10 1. 1. +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 13 (2) +[0]: 14 (2) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 1 (5) +[1]: 0 (4) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 13 (2) +[1]: 14 (2) +[1]: 8 (1) +[1]: 9 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 1 (5) +Label 'marker': +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 13 (1) +[0]: 14 (1) +[1]: 2 (1) +[1]: 3 (1) +[1]: 4 (1) +[1]: 5 (1) +[1]: 6 (1) +[1]: 7 (1) +[1]: 8 (1) +[1]: 9 (1) +[1]: 10 (1) +[1]: 13 (1) +[1]: 14 (1) +Label 'Face Sets': +[0]: 8 (1) +[0]: 9 (3) +[0]: 10 (4) +[1]: 8 (1) +[1]: 10 (2) +[1]: 9 (3) +Label 'fault': +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 1 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 8 (101) +[0]: 9 (101) +[0]: 0 (102) +[1]: 0 (-102) +[1]: 8 (-101) +[1]: 9 (-101) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 13 (0) +[1]: 14 (0) +[1]: 1 (1) +[1]: 11 (1) +[1]: 12 (1) +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=15, leaves=9, remote ranks=1 + [0] 1 <- (1,1) + [0] 4 <- (1,2) + [0] 5 <- (1,3) + [0] 6 <- (1,4) + [0] 7 <- (1,5) + [0] 11 <- (1,11) + [0] 12 <- (1,12) + [0] 13 <- (1,13) + [0] 14 <- (1,14) + [1] Number of roots=15, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 9 edges + [0] 1 <- 1 + [0] 4 <- 2 + [0] 5 <- 3 + [0] 6 <- 4 + [0] 7 <- 5 + [0] 11 <- 11 + [0] 12 <- 12 + [0] 13 <- 13 + [0] 14 <- 14 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order diff --git a/src/dm/impls/plex/tests/output/ex69_quad_3.out b/src/dm/impls/plex/tests/output/ex69_quad_3.out new file mode 100644 index 00000000000..a9b17b33f48 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_quad_3.out @@ -0,0 +1,785 @@ +[0]BT for serial flipped cells: +0 0 +[1]BT for serial flipped cells: +0 0 +[2]BT for serial flipped cells: +0 0 +[3]BT for serial flipped cells: +0 0 +[1]: component 0, Found representative leaf 0 (face 3) connecting to face 1 on (3, 0) with orientation -1 +Proc 0 Comp 0: +Proc 1 Comp 0: + edge (3, 0) (FALSE): +Proc 2 Comp 0: +Proc 3 Comp 0: +Flipping Proc+Comp 3: +[0]BT for parallel flipped cells: +0 0 +[1]BT for parallel flipped cells: +0 0 +[2]BT for parallel flipped cells: +0 0 +[3]BT for parallel flipped cells: +0 1 +[2]Flipping cell 8 through overlap +[3]Flipping cell 7 and sending to overlap +DM Object: box 4 MPI processes + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 2 +[0]: 1 ----> 5 +[0]: 1 ----> 7 +[0]: 2 ----> 5 +[0]: 2 ----> 8 +[0]: 3 ----> 6 +[0]: 3 ----> 7 +[0]: 4 ----> 6 +[0]: 4 ----> 8 +[0]: 5 ----> 0 +[0]: 6 ----> 0 +[0]: 7 ----> 0 +[0]: 8 ----> 0 +[1] Max support size: 2 +[1]: 1 ----> 5 +[1]: 1 ----> 7 +[1]: 2 ----> 5 +[1]: 2 ----> 8 +[1]: 3 ----> 6 +[1]: 3 ----> 7 +[1]: 4 ----> 6 +[1]: 4 ----> 8 +[1]: 5 ----> 0 +[1]: 6 ----> 0 +[1]: 7 ----> 0 +[1]: 8 ----> 0 +[2] Max support size: 2 +[2]: 1 ----> 5 +[2]: 1 ----> 7 +[2]: 2 ----> 5 +[2]: 2 ----> 8 +[2]: 3 ----> 6 +[2]: 3 ----> 7 +[2]: 4 ----> 6 +[2]: 4 ----> 8 +[2]: 5 ----> 0 +[2]: 6 ----> 0 +[2]: 7 ----> 0 +[2]: 8 ----> 0 +[3] Max support size: 2 +[3]: 1 ----> 5 +[3]: 1 ----> 7 +[3]: 2 ----> 5 +[3]: 2 ----> 8 +[3]: 3 ----> 6 +[3]: 3 ----> 7 +[3]: 4 ----> 6 +[3]: 4 ----> 8 +[3]: 5 ----> 0 +[3]: 6 ----> 0 +[3]: 7 ----> 0 +[3]: 8 ----> 0 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 5 (0) +[0]: 0 <---- 8 (0) +[0]: 0 <---- 6 (-1) +[0]: 0 <---- 7 (-1) +[0]: 5 <---- 1 (0) +[0]: 5 <---- 2 (0) +[0]: 6 <---- 3 (0) +[0]: 6 <---- 4 (0) +[0]: 7 <---- 1 (0) +[0]: 7 <---- 3 (0) +[0]: 8 <---- 2 (0) +[0]: 8 <---- 4 (0) +[1] Max cone size: 4 +[1]: 0 <---- 5 (0) +[1]: 0 <---- 8 (0) +[1]: 0 <---- 6 (-1) +[1]: 0 <---- 7 (-1) +[1]: 5 <---- 1 (0) +[1]: 5 <---- 2 (0) +[1]: 6 <---- 3 (0) +[1]: 6 <---- 4 (0) +[1]: 7 <---- 1 (0) +[1]: 7 <---- 3 (0) +[1]: 8 <---- 2 (0) +[1]: 8 <---- 4 (0) +[2] Max cone size: 4 +[2]: 0 <---- 5 (0) +[2]: 0 <---- 8 (0) +[2]: 0 <---- 6 (-1) +[2]: 0 <---- 7 (-1) +[2]: 5 <---- 1 (0) +[2]: 5 <---- 2 (0) +[2]: 6 <---- 3 (0) +[2]: 6 <---- 4 (0) +[2]: 7 <---- 1 (0) +[2]: 7 <---- 3 (0) +[2]: 8 <---- 2 (0) +[2]: 8 <---- 4 (0) +[3] Max cone size: 4 +[3]: 0 <---- 5 (0) +[3]: 0 <---- 8 (0) +[3]: 0 <---- 6 (-1) +[3]: 0 <---- 7 (-1) +[3]: 5 <---- 1 (0) +[3]: 5 <---- 2 (0) +[3]: 6 <---- 3 (0) +[3]: 6 <---- 4 (0) +[3]: 7 <---- 1 (0) +[3]: 7 <---- 3 (0) +[3]: 8 <---- 2 (0) +[3]: 8 <---- 4 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 1) dim 2 offset 0 0. 0. + ( 2) dim 2 offset 2 1. 0. + ( 3) dim 2 offset 4 0. 1. + ( 4) dim 2 offset 6 1. 1. +Process 1: + ( 1) dim 2 offset 0 1. 0. + ( 2) dim 2 offset 2 2. 0. + ( 3) dim 2 offset 4 1. 1. + ( 4) dim 2 offset 6 2. 1. +Process 2: + ( 1) dim 2 offset 0 0. 1. + ( 2) dim 2 offset 2 1. 1. + ( 3) dim 2 offset 4 0. 2. + ( 4) dim 2 offset 6 1. 2. +Process 3: + ( 1) dim 2 offset 0 1. 1. + ( 2) dim 2 offset 2 2. 1. + ( 3) dim 2 offset 4 1. 2. + ( 4) dim 2 offset 6 2. 2. +Labels: +Label 'marker': +[0]: 1 (1) +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[1]: 1 (1) +[1]: 2 (1) +[1]: 3 (1) +[1]: 4 (1) +[1]: 5 (1) +[1]: 6 (1) +[1]: 7 (1) +[1]: 8 (1) +[2]: 1 (1) +[2]: 2 (1) +[2]: 3 (1) +[2]: 4 (1) +[2]: 5 (1) +[2]: 6 (1) +[2]: 7 (1) +[2]: 8 (1) +[3]: 1 (1) +[3]: 2 (1) +[3]: 3 (1) +[3]: 4 (1) +[3]: 5 (1) +[3]: 6 (1) +[3]: 7 (1) +[3]: 8 (1) +Label 'Face Sets': +[0]: 7 (4) +[0]: 8 (2) +[0]: 5 (1) +[0]: 6 (3) +[1]: 7 (4) +[1]: 8 (2) +[1]: 5 (1) +[1]: 6 (3) +[2]: 7 (4) +[2]: 8 (2) +[2]: 5 (1) +[2]: 6 (3) +[3]: 7 (4) +[3]: 8 (2) +[3]: 5 (1) +[3]: 6 (3) +Label 'celltype': +[0]: 0 (4) +[0]: 1 (0) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[1]: 0 (4) +[1]: 1 (0) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (1) +[1]: 6 (1) +[1]: 7 (1) +[1]: 8 (1) +[2]: 0 (4) +[2]: 1 (0) +[2]: 2 (0) +[2]: 3 (0) +[2]: 4 (0) +[2]: 5 (1) +[2]: 6 (1) +[2]: 7 (1) +[2]: 8 (1) +[3]: 0 (4) +[3]: 1 (0) +[3]: 2 (0) +[3]: 3 (0) +[3]: 4 (0) +[3]: 5 (1) +[3]: 6 (1) +[3]: 7 (1) +[3]: 8 (1) +Label 'fault': +[0]: 8 (1) +[0]: 2 (0) +[0]: 4 (0) +[0]: 0 (102) +[0]: 5 (101) +[0]: 6 (101) +[1]: 7 (1) +[1]: 1 (0) +[1]: 3 (0) +[1]: 0 (-102) +[1]: 5 (-101) +[1]: 6 (-101) +[2]: 8 (1) +[2]: 2 (0) +[2]: 4 (0) +[2]: 0 (102) +[2]: 5 (101) +[2]: 6 (101) +[3]: 7 (1) +[3]: 1 (0) +[3]: 3 (0) +[3]: 0 (-102) +[3]: 5 (-101) +[3]: 6 (-101) +PetscSF Object: 4 MPI processes + type: basic + [0] Number of roots=9, leaves=5, remote ranks=3 + [0] 2 <- (1,1) + [0] 3 <- (2,1) + [0] 4 <- (3,1) + [0] 6 <- (2,5) + [0] 8 <- (1,7) + [1] Number of roots=9, leaves=3, remote ranks=1 + [1] 3 <- (3,1) + [1] 4 <- (3,2) + [1] 6 <- (3,5) + [2] Number of roots=9, leaves=3, remote ranks=1 + [2] 2 <- (3,1) + [2] 4 <- (3,3) + [2] 8 <- (3,7) + [3] Number of roots=9, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 2 edges + [0] 2 <- 1 + [0] 8 <- 7 + [0] 2: 2 edges + [0] 3 <- 1 + [0] 6 <- 5 + [0] 3: 1 edges + [0] 4 <- 1 + [1] Roots referenced by my leaves, by rank + [1] 3: 3 edges + [1] 3 <- 1 + [1] 4 <- 2 + [1] 6 <- 5 + [2] Roots referenced by my leaves, by rank + [2] 3: 3 edges + [2] 2 <- 1 + [2] 4 <- 3 + [2] 8 <- 7 + [3] Roots referenced by my leaves, by rank + MultiSF sort=rank-order +DM Object: box 4 MPI processes + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 2 ----> 8 +[0]: 2 ----> 10 +[0]: 3 ----> 9 +[0]: 3 ----> 10 +[0]: 4 ----> 11 +[0]: 4 ----> 13 +[0]: 5 ----> 8 +[0]: 5 ----> 12 +[0]: 5 ----> 13 +[0]: 6 ----> 11 +[0]: 6 ----> 14 +[0]: 7 ----> 9 +[0]: 7 ----> 12 +[0]: 7 ----> 14 +[0]: 8 ----> 0 +[0]: 9 ----> 0 +[0]: 10 ----> 0 +[0]: 11 ----> 1 +[0]: 12 ----> 0 +[0]: 12 ----> 1 +[0]: 13 ----> 1 +[0]: 14 ----> 1 +[1] Max support size: 3 +[1]: 2 ----> 8 +[1]: 2 ----> 11 +[1]: 2 ----> 13 +[1]: 3 ----> 12 +[1]: 3 ----> 13 +[1]: 4 ----> 9 +[1]: 4 ----> 11 +[1]: 4 ----> 14 +[1]: 5 ----> 12 +[1]: 5 ----> 14 +[1]: 6 ----> 8 +[1]: 6 ----> 10 +[1]: 7 ----> 9 +[1]: 7 ----> 10 +[1]: 8 ----> 0 +[1]: 9 ----> 0 +[1]: 10 ----> 0 +[1]: 11 ----> 0 +[1]: 11 ----> 1 +[1]: 12 ----> 1 +[1]: 13 ----> 1 +[1]: 14 ----> 1 +[2] Max support size: 3 +[2]: 2 ----> 8 +[2]: 2 ----> 10 +[2]: 3 ----> 9 +[2]: 3 ----> 10 +[2]: 4 ----> 11 +[2]: 4 ----> 13 +[2]: 5 ----> 8 +[2]: 5 ----> 12 +[2]: 5 ----> 13 +[2]: 6 ----> 11 +[2]: 6 ----> 14 +[2]: 7 ----> 9 +[2]: 7 ----> 12 +[2]: 7 ----> 14 +[2]: 8 ----> 0 +[2]: 9 ----> 0 +[2]: 10 ----> 0 +[2]: 11 ----> 1 +[2]: 12 ----> 0 +[2]: 12 ----> 1 +[2]: 13 ----> 1 +[2]: 14 ----> 1 +[3] Max support size: 3 +[3]: 2 ----> 8 +[3]: 2 ----> 11 +[3]: 2 ----> 13 +[3]: 3 ----> 12 +[3]: 3 ----> 13 +[3]: 4 ----> 9 +[3]: 4 ----> 11 +[3]: 4 ----> 14 +[3]: 5 ----> 12 +[3]: 5 ----> 14 +[3]: 6 ----> 8 +[3]: 6 ----> 10 +[3]: 7 ----> 9 +[3]: 7 ----> 10 +[3]: 8 ----> 0 +[3]: 9 ----> 0 +[3]: 10 ----> 0 +[3]: 11 ----> 0 +[3]: 11 ----> 1 +[3]: 12 ----> 1 +[3]: 13 ----> 1 +[3]: 14 ----> 1 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 8 (0) +[0]: 0 <---- 12 (0) +[0]: 0 <---- 9 (-1) +[0]: 0 <---- 10 (-1) +[0]: 1 <---- 11 (0) +[0]: 1 <---- 12 (0) +[0]: 1 <---- 13 (0) +[0]: 1 <---- 14 (0) +[0]: 8 <---- 2 (0) +[0]: 8 <---- 5 (0) +[0]: 9 <---- 3 (0) +[0]: 9 <---- 7 (0) +[0]: 10 <---- 2 (0) +[0]: 10 <---- 3 (0) +[0]: 11 <---- 4 (0) +[0]: 11 <---- 6 (0) +[0]: 12 <---- 5 (0) +[0]: 12 <---- 7 (0) +[0]: 13 <---- 4 (0) +[0]: 13 <---- 5 (0) +[0]: 14 <---- 6 (0) +[0]: 14 <---- 7 (0) +[1] Max cone size: 4 +[1]: 0 <---- 8 (0) +[1]: 0 <---- 10 (0) +[1]: 0 <---- 9 (-1) +[1]: 0 <---- 11 (-1) +[1]: 1 <---- 11 (0) +[1]: 1 <---- 12 (0) +[1]: 1 <---- 13 (0) +[1]: 1 <---- 14 (0) +[1]: 8 <---- 2 (0) +[1]: 8 <---- 6 (0) +[1]: 9 <---- 4 (0) +[1]: 9 <---- 7 (0) +[1]: 10 <---- 6 (0) +[1]: 10 <---- 7 (0) +[1]: 11 <---- 2 (0) +[1]: 11 <---- 4 (0) +[1]: 12 <---- 3 (0) +[1]: 12 <---- 5 (0) +[1]: 13 <---- 2 (0) +[1]: 13 <---- 3 (0) +[1]: 14 <---- 4 (0) +[1]: 14 <---- 5 (0) +[2] Max cone size: 4 +[2]: 0 <---- 8 (0) +[2]: 0 <---- 12 (0) +[2]: 0 <---- 9 (-1) +[2]: 0 <---- 10 (-1) +[2]: 1 <---- 11 (0) +[2]: 1 <---- 12 (0) +[2]: 1 <---- 13 (0) +[2]: 1 <---- 14 (0) +[2]: 8 <---- 2 (0) +[2]: 8 <---- 5 (0) +[2]: 9 <---- 3 (0) +[2]: 9 <---- 7 (0) +[2]: 10 <---- 2 (0) +[2]: 10 <---- 3 (0) +[2]: 11 <---- 4 (0) +[2]: 11 <---- 6 (0) +[2]: 12 <---- 5 (0) +[2]: 12 <---- 7 (0) +[2]: 13 <---- 4 (0) +[2]: 13 <---- 5 (0) +[2]: 14 <---- 6 (0) +[2]: 14 <---- 7 (0) +[3] Max cone size: 4 +[3]: 0 <---- 8 (0) +[3]: 0 <---- 10 (0) +[3]: 0 <---- 9 (-1) +[3]: 0 <---- 11 (-1) +[3]: 1 <---- 11 (0) +[3]: 1 <---- 12 (0) +[3]: 1 <---- 13 (0) +[3]: 1 <---- 14 (0) +[3]: 8 <---- 2 (0) +[3]: 8 <---- 6 (0) +[3]: 9 <---- 4 (0) +[3]: 9 <---- 7 (0) +[3]: 10 <---- 6 (0) +[3]: 10 <---- 7 (0) +[3]: 11 <---- 2 (0) +[3]: 11 <---- 4 (0) +[3]: 12 <---- 3 (0) +[3]: 12 <---- 5 (0) +[3]: 13 <---- 2 (0) +[3]: 13 <---- 3 (0) +[3]: 14 <---- 4 (0) +[3]: 14 <---- 5 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 2) dim 2 offset 0 0. 0. + ( 3) dim 2 offset 2 0. 1. + ( 4) dim 2 offset 4 1. 0. + ( 5) dim 2 offset 6 1. 0. + ( 6) dim 2 offset 8 1. 1. + ( 7) dim 2 offset 10 1. 1. +Process 1: + ( 2) dim 2 offset 0 1. 0. + ( 3) dim 2 offset 2 1. 0. + ( 4) dim 2 offset 4 1. 1. + ( 5) dim 2 offset 6 1. 1. + ( 6) dim 2 offset 8 2. 0. + ( 7) dim 2 offset 10 2. 1. +Process 2: + ( 2) dim 2 offset 0 0. 1. + ( 3) dim 2 offset 2 0. 2. + ( 4) dim 2 offset 4 1. 1. + ( 5) dim 2 offset 6 1. 1. + ( 6) dim 2 offset 8 1. 2. + ( 7) dim 2 offset 10 1. 2. +Process 3: + ( 2) dim 2 offset 0 1. 1. + ( 3) dim 2 offset 2 1. 1. + ( 4) dim 2 offset 4 1. 2. + ( 5) dim 2 offset 6 1. 2. + ( 6) dim 2 offset 8 2. 1. + ( 7) dim 2 offset 10 2. 2. +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 13 (2) +[0]: 14 (2) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 1 (5) +[1]: 0 (4) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 13 (2) +[1]: 14 (2) +[1]: 8 (1) +[1]: 9 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 1 (5) +[2]: 0 (4) +[2]: 2 (0) +[2]: 3 (0) +[2]: 4 (0) +[2]: 5 (0) +[2]: 6 (0) +[2]: 7 (0) +[2]: 13 (2) +[2]: 14 (2) +[2]: 8 (1) +[2]: 9 (1) +[2]: 10 (1) +[2]: 11 (1) +[2]: 12 (1) +[2]: 1 (5) +[3]: 0 (4) +[3]: 2 (0) +[3]: 3 (0) +[3]: 4 (0) +[3]: 5 (0) +[3]: 6 (0) +[3]: 7 (0) +[3]: 13 (2) +[3]: 14 (2) +[3]: 8 (1) +[3]: 9 (1) +[3]: 10 (1) +[3]: 11 (1) +[3]: 12 (1) +[3]: 1 (5) +Label 'marker': +[0]: 1 (1) +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[1]: 1 (1) +[1]: 2 (1) +[1]: 3 (1) +[1]: 4 (1) +[1]: 5 (1) +[1]: 6 (1) +[1]: 7 (1) +[1]: 8 (1) +[1]: 9 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 13 (1) +[1]: 14 (1) +[2]: 1 (1) +[2]: 2 (1) +[2]: 3 (1) +[2]: 4 (1) +[2]: 5 (1) +[2]: 6 (1) +[2]: 7 (1) +[2]: 8 (1) +[2]: 9 (1) +[2]: 10 (1) +[2]: 11 (1) +[2]: 12 (1) +[2]: 13 (1) +[2]: 14 (1) +[3]: 1 (1) +[3]: 2 (1) +[3]: 3 (1) +[3]: 4 (1) +[3]: 5 (1) +[3]: 6 (1) +[3]: 7 (1) +[3]: 8 (1) +[3]: 9 (1) +[3]: 10 (1) +[3]: 11 (1) +[3]: 12 (1) +[3]: 13 (1) +[3]: 14 (1) +Label 'Face Sets': +[0]: 10 (4) +[0]: 1 (2) +[0]: 11 (2) +[0]: 12 (2) +[0]: 8 (1) +[0]: 9 (3) +[1]: 1 (4) +[1]: 11 (4) +[1]: 12 (4) +[1]: 10 (2) +[1]: 8 (1) +[1]: 9 (3) +[2]: 10 (4) +[2]: 1 (2) +[2]: 11 (2) +[2]: 12 (2) +[2]: 8 (1) +[2]: 9 (3) +[3]: 1 (4) +[3]: 11 (4) +[3]: 12 (4) +[3]: 10 (2) +[3]: 8 (1) +[3]: 9 (3) +Label 'fault': +[0]: 1 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 0 (102) +[0]: 8 (101) +[0]: 9 (101) +[1]: 1 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 13 (0) +[1]: 14 (0) +[1]: 0 (-102) +[1]: 8 (-101) +[1]: 9 (-101) +[2]: 1 (1) +[2]: 11 (1) +[2]: 12 (1) +[2]: 4 (0) +[2]: 5 (0) +[2]: 6 (0) +[2]: 7 (0) +[2]: 13 (0) +[2]: 14 (0) +[2]: 0 (102) +[2]: 8 (101) +[2]: 9 (101) +[3]: 1 (1) +[3]: 11 (1) +[3]: 12 (1) +[3]: 2 (0) +[3]: 3 (0) +[3]: 4 (0) +[3]: 5 (0) +[3]: 13 (0) +[3]: 14 (0) +[3]: 0 (-102) +[3]: 8 (-101) +[3]: 9 (-101) +PetscSF Object: 4 MPI processes + type: basic + [0] Number of roots=15, leaves=11, remote ranks=3 + [0] 1 <- (1,1) + [0] 3 <- (2,2) + [0] 4 <- (1,2) + [0] 5 <- (1,3) + [0] 6 <- (3,2) + [0] 7 <- (3,3) + [0] 9 <- (2,8) + [0] 11 <- (1,11) + [0] 12 <- (1,12) + [0] 13 <- (1,13) + [0] 14 <- (3,13) + [1] Number of roots=15, leaves=5, remote ranks=1 + [1] 4 <- (3,2) + [1] 5 <- (3,3) + [1] 7 <- (3,6) + [1] 9 <- (3,8) + [1] 14 <- (3,13) + [2] Number of roots=15, leaves=9, remote ranks=1 + [2] 1 <- (3,1) + [2] 4 <- (3,2) + [2] 5 <- (3,3) + [2] 6 <- (3,4) + [2] 7 <- (3,5) + [2] 11 <- (3,11) + [2] 12 <- (3,12) + [2] 13 <- (3,13) + [2] 14 <- (3,14) + [3] Number of roots=15, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 6 edges + [0] 1 <- 1 + [0] 4 <- 2 + [0] 5 <- 3 + [0] 11 <- 11 + [0] 12 <- 12 + [0] 13 <- 13 + [0] 2: 2 edges + [0] 3 <- 2 + [0] 9 <- 8 + [0] 3: 3 edges + [0] 6 <- 2 + [0] 7 <- 3 + [0] 14 <- 13 + [1] Roots referenced by my leaves, by rank + [1] 3: 5 edges + [1] 4 <- 2 + [1] 5 <- 3 + [1] 7 <- 6 + [1] 9 <- 8 + [1] 14 <- 13 + [2] Roots referenced by my leaves, by rank + [2] 3: 9 edges + [2] 1 <- 1 + [2] 4 <- 2 + [2] 5 <- 3 + [2] 6 <- 4 + [2] 7 <- 5 + [2] 11 <- 11 + [2] 12 <- 12 + [2] 13 <- 13 + [2] 14 <- 14 + [3] Roots referenced by my leaves, by rank + MultiSF sort=rank-order diff --git a/src/dm/impls/plex/tests/output/ex69_quad_4.out b/src/dm/impls/plex/tests/output/ex69_quad_4.out new file mode 100644 index 00000000000..b6734c8286b --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_quad_4.out @@ -0,0 +1,510 @@ +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 4 +[0]: 6 ----> 18 +[0]: 6 ----> 27 +[0]: 7 ----> 18 +[0]: 7 ----> 19 +[0]: 7 ----> 29 +[0]: 8 ----> 19 +[0]: 8 ----> 20 +[0]: 8 ----> 31 +[0]: 9 ----> 20 +[0]: 9 ----> 33 +[0]: 10 ----> 21 +[0]: 10 ----> 27 +[0]: 10 ----> 28 +[0]: 11 ----> 21 +[0]: 11 ----> 22 +[0]: 11 ----> 29 +[0]: 11 ----> 30 +[0]: 12 ----> 22 +[0]: 12 ----> 23 +[0]: 12 ----> 31 +[0]: 12 ----> 32 +[0]: 13 ----> 23 +[0]: 13 ----> 33 +[0]: 13 ----> 34 +[0]: 14 ----> 24 +[0]: 14 ----> 28 +[0]: 15 ----> 24 +[0]: 15 ----> 25 +[0]: 15 ----> 30 +[0]: 16 ----> 25 +[0]: 16 ----> 26 +[0]: 16 ----> 32 +[0]: 17 ----> 26 +[0]: 17 ----> 34 +[0]: 18 ----> 0 +[0]: 19 ----> 1 +[0]: 20 ----> 2 +[0]: 21 ----> 0 +[0]: 21 ----> 3 +[0]: 22 ----> 1 +[0]: 22 ----> 4 +[0]: 23 ----> 2 +[0]: 23 ----> 5 +[0]: 24 ----> 3 +[0]: 25 ----> 4 +[0]: 26 ----> 5 +[0]: 27 ----> 0 +[0]: 28 ----> 3 +[0]: 29 ----> 0 +[0]: 29 ----> 1 +[0]: 30 ----> 3 +[0]: 30 ----> 4 +[0]: 31 ----> 1 +[0]: 31 ----> 2 +[0]: 32 ----> 4 +[0]: 32 ----> 5 +[0]: 33 ----> 2 +[0]: 34 ----> 5 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 18 (0) +[0]: 0 <---- 29 (0) +[0]: 0 <---- 21 (-1) +[0]: 0 <---- 27 (-1) +[0]: 1 <---- 19 (0) +[0]: 1 <---- 31 (0) +[0]: 1 <---- 22 (-1) +[0]: 1 <---- 29 (-1) +[0]: 2 <---- 20 (0) +[0]: 2 <---- 33 (0) +[0]: 2 <---- 23 (-1) +[0]: 2 <---- 31 (-1) +[0]: 3 <---- 21 (0) +[0]: 3 <---- 30 (0) +[0]: 3 <---- 24 (-1) +[0]: 3 <---- 28 (-1) +[0]: 4 <---- 22 (0) +[0]: 4 <---- 32 (0) +[0]: 4 <---- 25 (-1) +[0]: 4 <---- 30 (-1) +[0]: 5 <---- 23 (0) +[0]: 5 <---- 34 (0) +[0]: 5 <---- 26 (-1) +[0]: 5 <---- 32 (-1) +[0]: 18 <---- 6 (0) +[0]: 18 <---- 7 (0) +[0]: 19 <---- 7 (0) +[0]: 19 <---- 8 (0) +[0]: 20 <---- 8 (0) +[0]: 20 <---- 9 (0) +[0]: 21 <---- 10 (0) +[0]: 21 <---- 11 (0) +[0]: 22 <---- 11 (0) +[0]: 22 <---- 12 (0) +[0]: 23 <---- 12 (0) +[0]: 23 <---- 13 (0) +[0]: 24 <---- 14 (0) +[0]: 24 <---- 15 (0) +[0]: 25 <---- 15 (0) +[0]: 25 <---- 16 (0) +[0]: 26 <---- 16 (0) +[0]: 26 <---- 17 (0) +[0]: 27 <---- 6 (0) +[0]: 27 <---- 10 (0) +[0]: 28 <---- 10 (0) +[0]: 28 <---- 14 (0) +[0]: 29 <---- 7 (0) +[0]: 29 <---- 11 (0) +[0]: 30 <---- 11 (0) +[0]: 30 <---- 15 (0) +[0]: 31 <---- 8 (0) +[0]: 31 <---- 12 (0) +[0]: 32 <---- 12 (0) +[0]: 32 <---- 16 (0) +[0]: 33 <---- 9 (0) +[0]: 33 <---- 13 (0) +[0]: 34 <---- 13 (0) +[0]: 34 <---- 17 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 6) dim 2 offset 0 0. 0. + ( 7) dim 2 offset 2 0.333333 0. + ( 8) dim 2 offset 4 0.666667 0. + ( 9) dim 2 offset 6 1. 0. + ( 10) dim 2 offset 8 0. 0.5 + ( 11) dim 2 offset 10 0.333333 0.5 + ( 12) dim 2 offset 12 0.666667 0.5 + ( 13) dim 2 offset 14 1. 0.5 + ( 14) dim 2 offset 16 0. 1. + ( 15) dim 2 offset 18 0.333333 1. + ( 16) dim 2 offset 20 0.666667 1. + ( 17) dim 2 offset 22 1. 1. +Labels: +Label 'marker': +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 33 (1) +[0]: 34 (1) +Label 'Face Sets': +[0]: 27 (4) +[0]: 28 (4) +[0]: 33 (2) +[0]: 34 (2) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 24 (3) +[0]: 25 (3) +[0]: 26 (3) +Label 'fault': +[0]: 22 (1) +[0]: 23 (1) +[0]: 12 (0) +[0]: 13 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 29 (-101) +[0]: 31 (-101) +[0]: 33 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 5 (102) +[0]: 21 (101) +[0]: 30 (101) +[0]: 32 (101) +[0]: 34 (101) +[0]: 11 (200) +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 7 +[0]: 8 ----> 22 +[0]: 8 ----> 29 +[0]: 9 ----> 22 +[0]: 9 ----> 23 +[0]: 9 ----> 31 +[0]: 10 ----> 23 +[0]: 10 ----> 24 +[0]: 10 ----> 33 +[0]: 11 ----> 24 +[0]: 11 ----> 34 +[0]: 12 ----> 25 +[0]: 12 ----> 29 +[0]: 12 ----> 30 +[0]: 13 ----> 26 +[0]: 13 ----> 30 +[0]: 14 ----> 26 +[0]: 14 ----> 27 +[0]: 14 ----> 32 +[0]: 15 ----> 27 +[0]: 15 ----> 28 +[0]: 15 ----> 39 +[0]: 16 ----> 28 +[0]: 16 ----> 40 +[0]: 17 ----> 25 +[0]: 17 ----> 31 +[0]: 17 ----> 32 +[0]: 17 ----> 35 +[0]: 17 ----> 36 +[0]: 17 ----> 41 +[0]: 17 ----> 41 +[0]: 18 ----> 33 +[0]: 18 ----> 35 +[0]: 18 ----> 37 +[0]: 18 ----> 42 +[0]: 19 ----> 36 +[0]: 19 ----> 38 +[0]: 19 ----> 39 +[0]: 19 ----> 42 +[0]: 20 ----> 34 +[0]: 20 ----> 37 +[0]: 20 ----> 43 +[0]: 21 ----> 38 +[0]: 21 ----> 40 +[0]: 21 ----> 43 +[0]: 22 ----> 0 +[0]: 23 ----> 1 +[0]: 24 ----> 2 +[0]: 25 ----> 0 +[0]: 25 ----> 3 +[0]: 26 ----> 3 +[0]: 27 ----> 4 +[0]: 28 ----> 5 +[0]: 29 ----> 0 +[0]: 30 ----> 3 +[0]: 31 ----> 0 +[0]: 31 ----> 1 +[0]: 32 ----> 3 +[0]: 32 ----> 4 +[0]: 33 ----> 1 +[0]: 33 ----> 2 +[0]: 34 ----> 2 +[0]: 35 ----> 1 +[0]: 35 ----> 6 +[0]: 36 ----> 4 +[0]: 36 ----> 6 +[0]: 37 ----> 2 +[0]: 37 ----> 7 +[0]: 38 ----> 5 +[0]: 38 ----> 7 +[0]: 39 ----> 4 +[0]: 39 ----> 5 +[0]: 40 ----> 5 +[0]: 41 ----> 6 +[0]: 42 ----> 6 +[0]: 42 ----> 7 +[0]: 43 ----> 7 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 22 (0) +[0]: 0 <---- 31 (0) +[0]: 0 <---- 25 (-1) +[0]: 0 <---- 29 (-1) +[0]: 1 <---- 23 (0) +[0]: 1 <---- 33 (0) +[0]: 1 <---- 35 (-1) +[0]: 1 <---- 31 (-1) +[0]: 2 <---- 24 (0) +[0]: 2 <---- 34 (0) +[0]: 2 <---- 37 (-1) +[0]: 2 <---- 33 (-1) +[0]: 3 <---- 25 (0) +[0]: 3 <---- 32 (0) +[0]: 3 <---- 26 (-1) +[0]: 3 <---- 30 (-1) +[0]: 4 <---- 36 (0) +[0]: 4 <---- 39 (0) +[0]: 4 <---- 27 (-1) +[0]: 4 <---- 32 (-1) +[0]: 5 <---- 38 (0) +[0]: 5 <---- 40 (0) +[0]: 5 <---- 28 (-1) +[0]: 5 <---- 39 (-1) +[0]: 6 <---- 35 (0) +[0]: 6 <---- 36 (0) +[0]: 6 <---- 41 (0) +[0]: 6 <---- 42 (0) +[0]: 7 <---- 37 (0) +[0]: 7 <---- 38 (0) +[0]: 7 <---- 42 (0) +[0]: 7 <---- 43 (0) +[0]: 22 <---- 8 (0) +[0]: 22 <---- 9 (0) +[0]: 23 <---- 9 (0) +[0]: 23 <---- 10 (0) +[0]: 24 <---- 10 (0) +[0]: 24 <---- 11 (0) +[0]: 25 <---- 12 (0) +[0]: 25 <---- 17 (0) +[0]: 26 <---- 13 (0) +[0]: 26 <---- 14 (0) +[0]: 27 <---- 14 (0) +[0]: 27 <---- 15 (0) +[0]: 28 <---- 15 (0) +[0]: 28 <---- 16 (0) +[0]: 29 <---- 8 (0) +[0]: 29 <---- 12 (0) +[0]: 30 <---- 12 (0) +[0]: 30 <---- 13 (0) +[0]: 31 <---- 9 (0) +[0]: 31 <---- 17 (0) +[0]: 32 <---- 17 (0) +[0]: 32 <---- 14 (0) +[0]: 33 <---- 10 (0) +[0]: 33 <---- 18 (0) +[0]: 34 <---- 11 (0) +[0]: 34 <---- 20 (0) +[0]: 35 <---- 17 (0) +[0]: 35 <---- 18 (0) +[0]: 36 <---- 17 (0) +[0]: 36 <---- 19 (0) +[0]: 37 <---- 18 (0) +[0]: 37 <---- 20 (0) +[0]: 38 <---- 19 (0) +[0]: 38 <---- 21 (0) +[0]: 39 <---- 19 (0) +[0]: 39 <---- 15 (0) +[0]: 40 <---- 21 (0) +[0]: 40 <---- 16 (0) +[0]: 41 <---- 17 (0) +[0]: 41 <---- 17 (0) +[0]: 42 <---- 18 (0) +[0]: 42 <---- 19 (0) +[0]: 43 <---- 20 (0) +[0]: 43 <---- 21 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 8) dim 2 offset 0 0. 0. + ( 9) dim 2 offset 2 0.333333 0. + ( 10) dim 2 offset 4 0.666667 0. + ( 11) dim 2 offset 6 1. 0. + ( 12) dim 2 offset 8 0. 0.5 + ( 13) dim 2 offset 10 0. 1. + ( 14) dim 2 offset 12 0.333333 1. + ( 15) dim 2 offset 14 0.666667 1. + ( 16) dim 2 offset 16 1. 1. + ( 17) dim 2 offset 18 0.333333 0.5 + ( 18) dim 2 offset 20 0.666667 0.5 + ( 19) dim 2 offset 22 0.666667 0.5 + ( 20) dim 2 offset 24 1. 0.5 + ( 21) dim 2 offset 26 1. 0.5 +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 41 (2) +[0]: 42 (2) +[0]: 43 (2) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 6 (5) +[0]: 7 (5) +Label 'marker': +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 34 (1) +[0]: 40 (1) +[0]: 43 (1) +Label 'Face Sets': +[0]: 29 (4) +[0]: 30 (4) +[0]: 34 (2) +[0]: 40 (2) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 26 (3) +[0]: 27 (3) +[0]: 28 (3) +Label 'fault': +[0]: 6 (1) +[0]: 7 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 42 (0) +[0]: 43 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 31 (-101) +[0]: 33 (-101) +[0]: 34 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 5 (102) +[0]: 25 (101) +[0]: 32 (101) +[0]: 39 (101) +[0]: 40 (101) +[0]: 17 (200) +[0]: 41 (200) diff --git a/src/dm/impls/plex/tests/output/ex69_quad_5.out b/src/dm/impls/plex/tests/output/ex69_quad_5.out new file mode 100644 index 00000000000..fccd29efbaa --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_quad_5.out @@ -0,0 +1,798 @@ +DM Object: box (f0_) 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 4 +[0]: 6 ----> 18 +[0]: 6 ----> 27 +[0]: 7 ----> 18 +[0]: 7 ----> 19 +[0]: 7 ----> 29 +[0]: 8 ----> 19 +[0]: 8 ----> 20 +[0]: 8 ----> 31 +[0]: 9 ----> 20 +[0]: 9 ----> 33 +[0]: 10 ----> 21 +[0]: 10 ----> 27 +[0]: 10 ----> 28 +[0]: 11 ----> 21 +[0]: 11 ----> 22 +[0]: 11 ----> 29 +[0]: 11 ----> 30 +[0]: 12 ----> 22 +[0]: 12 ----> 23 +[0]: 12 ----> 31 +[0]: 12 ----> 32 +[0]: 13 ----> 23 +[0]: 13 ----> 33 +[0]: 13 ----> 34 +[0]: 14 ----> 24 +[0]: 14 ----> 28 +[0]: 15 ----> 24 +[0]: 15 ----> 25 +[0]: 15 ----> 30 +[0]: 16 ----> 25 +[0]: 16 ----> 26 +[0]: 16 ----> 32 +[0]: 17 ----> 26 +[0]: 17 ----> 34 +[0]: 18 ----> 0 +[0]: 19 ----> 1 +[0]: 20 ----> 2 +[0]: 21 ----> 0 +[0]: 21 ----> 3 +[0]: 22 ----> 1 +[0]: 22 ----> 4 +[0]: 23 ----> 2 +[0]: 23 ----> 5 +[0]: 24 ----> 3 +[0]: 25 ----> 4 +[0]: 26 ----> 5 +[0]: 27 ----> 0 +[0]: 28 ----> 3 +[0]: 29 ----> 0 +[0]: 29 ----> 1 +[0]: 30 ----> 3 +[0]: 30 ----> 4 +[0]: 31 ----> 1 +[0]: 31 ----> 2 +[0]: 32 ----> 4 +[0]: 32 ----> 5 +[0]: 33 ----> 2 +[0]: 34 ----> 5 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 18 (0) +[0]: 0 <---- 29 (0) +[0]: 0 <---- 21 (-1) +[0]: 0 <---- 27 (-1) +[0]: 1 <---- 19 (0) +[0]: 1 <---- 31 (0) +[0]: 1 <---- 22 (-1) +[0]: 1 <---- 29 (-1) +[0]: 2 <---- 20 (0) +[0]: 2 <---- 33 (0) +[0]: 2 <---- 23 (-1) +[0]: 2 <---- 31 (-1) +[0]: 3 <---- 21 (0) +[0]: 3 <---- 30 (0) +[0]: 3 <---- 24 (-1) +[0]: 3 <---- 28 (-1) +[0]: 4 <---- 22 (0) +[0]: 4 <---- 32 (0) +[0]: 4 <---- 25 (-1) +[0]: 4 <---- 30 (-1) +[0]: 5 <---- 23 (0) +[0]: 5 <---- 34 (0) +[0]: 5 <---- 26 (-1) +[0]: 5 <---- 32 (-1) +[0]: 18 <---- 6 (0) +[0]: 18 <---- 7 (0) +[0]: 19 <---- 7 (0) +[0]: 19 <---- 8 (0) +[0]: 20 <---- 8 (0) +[0]: 20 <---- 9 (0) +[0]: 21 <---- 10 (0) +[0]: 21 <---- 11 (0) +[0]: 22 <---- 11 (0) +[0]: 22 <---- 12 (0) +[0]: 23 <---- 12 (0) +[0]: 23 <---- 13 (0) +[0]: 24 <---- 14 (0) +[0]: 24 <---- 15 (0) +[0]: 25 <---- 15 (0) +[0]: 25 <---- 16 (0) +[0]: 26 <---- 16 (0) +[0]: 26 <---- 17 (0) +[0]: 27 <---- 6 (0) +[0]: 27 <---- 10 (0) +[0]: 28 <---- 10 (0) +[0]: 28 <---- 14 (0) +[0]: 29 <---- 7 (0) +[0]: 29 <---- 11 (0) +[0]: 30 <---- 11 (0) +[0]: 30 <---- 15 (0) +[0]: 31 <---- 8 (0) +[0]: 31 <---- 12 (0) +[0]: 32 <---- 12 (0) +[0]: 32 <---- 16 (0) +[0]: 33 <---- 9 (0) +[0]: 33 <---- 13 (0) +[0]: 34 <---- 13 (0) +[0]: 34 <---- 17 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 6) dim 2 offset 0 0. 0. + ( 7) dim 2 offset 2 0.333333 0. + ( 8) dim 2 offset 4 0.666667 0. + ( 9) dim 2 offset 6 1. 0. + ( 10) dim 2 offset 8 0. 0.5 + ( 11) dim 2 offset 10 0.333333 0.5 + ( 12) dim 2 offset 12 0.666667 0.5 + ( 13) dim 2 offset 14 1. 0.5 + ( 14) dim 2 offset 16 0. 1. + ( 15) dim 2 offset 18 0.333333 1. + ( 16) dim 2 offset 20 0.666667 1. + ( 17) dim 2 offset 22 1. 1. +Labels: +Label 'marker': +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 33 (1) +[0]: 34 (1) +Label 'Face Sets': +[0]: 27 (4) +[0]: 28 (4) +[0]: 33 (2) +[0]: 34 (2) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 24 (3) +[0]: 25 (3) +[0]: 26 (3) +Label 'fault0': +[0]: 21 (1) +[0]: 10 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 27 (-101) +[0]: 29 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 22 (101) +[0]: 28 (101) +[0]: 30 (101) +[0]: 11 (200) +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +Label 'fault1': +[0]: 23 (1) +[0]: 13 (0) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 31 (-101) +[0]: 33 (-101) +[0]: 4 (102) +[0]: 5 (102) +[0]: 22 (101) +[0]: 32 (101) +[0]: 34 (101) +[0]: 12 (200) +DM Object: box (f1_) 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 7 +[0]: 7 ----> 20 +[0]: 7 ----> 28 +[0]: 8 ----> 20 +[0]: 8 ----> 21 +[0]: 8 ----> 29 +[0]: 9 ----> 21 +[0]: 9 ----> 22 +[0]: 9 ----> 31 +[0]: 10 ----> 22 +[0]: 10 ----> 33 +[0]: 11 ----> 23 +[0]: 11 ----> 24 +[0]: 11 ----> 31 +[0]: 11 ----> 32 +[0]: 12 ----> 24 +[0]: 12 ----> 33 +[0]: 12 ----> 34 +[0]: 13 ----> 25 +[0]: 13 ----> 37 +[0]: 14 ----> 25 +[0]: 14 ----> 26 +[0]: 14 ----> 30 +[0]: 15 ----> 26 +[0]: 15 ----> 27 +[0]: 15 ----> 32 +[0]: 16 ----> 27 +[0]: 16 ----> 34 +[0]: 17 ----> 28 +[0]: 17 ----> 35 +[0]: 17 ----> 38 +[0]: 18 ----> 36 +[0]: 18 ----> 37 +[0]: 18 ----> 38 +[0]: 19 ----> 23 +[0]: 19 ----> 29 +[0]: 19 ----> 30 +[0]: 19 ----> 35 +[0]: 19 ----> 36 +[0]: 19 ----> 39 +[0]: 19 ----> 39 +[0]: 20 ----> 0 +[0]: 21 ----> 1 +[0]: 22 ----> 2 +[0]: 23 ----> 1 +[0]: 23 ----> 3 +[0]: 24 ----> 2 +[0]: 24 ----> 4 +[0]: 25 ----> 5 +[0]: 26 ----> 3 +[0]: 27 ----> 4 +[0]: 28 ----> 0 +[0]: 29 ----> 0 +[0]: 29 ----> 1 +[0]: 30 ----> 3 +[0]: 30 ----> 5 +[0]: 31 ----> 1 +[0]: 31 ----> 2 +[0]: 32 ----> 3 +[0]: 32 ----> 4 +[0]: 33 ----> 2 +[0]: 34 ----> 4 +[0]: 35 ----> 0 +[0]: 35 ----> 6 +[0]: 36 ----> 5 +[0]: 36 ----> 6 +[0]: 37 ----> 5 +[0]: 38 ----> 6 +[0]: 39 ----> 6 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 20 (0) +[0]: 0 <---- 29 (0) +[0]: 0 <---- 35 (-1) +[0]: 0 <---- 28 (-1) +[0]: 1 <---- 21 (0) +[0]: 1 <---- 31 (0) +[0]: 1 <---- 23 (-1) +[0]: 1 <---- 29 (-1) +[0]: 2 <---- 22 (0) +[0]: 2 <---- 33 (0) +[0]: 2 <---- 24 (-1) +[0]: 2 <---- 31 (-1) +[0]: 3 <---- 23 (0) +[0]: 3 <---- 32 (0) +[0]: 3 <---- 26 (-1) +[0]: 3 <---- 30 (-1) +[0]: 4 <---- 24 (0) +[0]: 4 <---- 34 (0) +[0]: 4 <---- 27 (-1) +[0]: 4 <---- 32 (-1) +[0]: 5 <---- 36 (0) +[0]: 5 <---- 30 (0) +[0]: 5 <---- 25 (-1) +[0]: 5 <---- 37 (-1) +[0]: 6 <---- 35 (0) +[0]: 6 <---- 36 (0) +[0]: 6 <---- 38 (0) +[0]: 6 <---- 39 (0) +[0]: 20 <---- 7 (0) +[0]: 20 <---- 8 (0) +[0]: 21 <---- 8 (0) +[0]: 21 <---- 9 (0) +[0]: 22 <---- 9 (0) +[0]: 22 <---- 10 (0) +[0]: 23 <---- 19 (0) +[0]: 23 <---- 11 (0) +[0]: 24 <---- 11 (0) +[0]: 24 <---- 12 (0) +[0]: 25 <---- 13 (0) +[0]: 25 <---- 14 (0) +[0]: 26 <---- 14 (0) +[0]: 26 <---- 15 (0) +[0]: 27 <---- 15 (0) +[0]: 27 <---- 16 (0) +[0]: 28 <---- 7 (0) +[0]: 28 <---- 17 (0) +[0]: 29 <---- 8 (0) +[0]: 29 <---- 19 (0) +[0]: 30 <---- 19 (0) +[0]: 30 <---- 14 (0) +[0]: 31 <---- 9 (0) +[0]: 31 <---- 11 (0) +[0]: 32 <---- 11 (0) +[0]: 32 <---- 15 (0) +[0]: 33 <---- 10 (0) +[0]: 33 <---- 12 (0) +[0]: 34 <---- 12 (0) +[0]: 34 <---- 16 (0) +[0]: 35 <---- 17 (0) +[0]: 35 <---- 19 (0) +[0]: 36 <---- 18 (0) +[0]: 36 <---- 19 (0) +[0]: 37 <---- 18 (0) +[0]: 37 <---- 13 (0) +[0]: 38 <---- 17 (0) +[0]: 38 <---- 18 (0) +[0]: 39 <---- 19 (0) +[0]: 39 <---- 19 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 7) dim 2 offset 0 0. 0. + ( 8) dim 2 offset 2 0.333333 0. + ( 9) dim 2 offset 4 0.666667 0. + ( 10) dim 2 offset 6 1. 0. + ( 11) dim 2 offset 8 0.666667 0.5 + ( 12) dim 2 offset 10 1. 0.5 + ( 13) dim 2 offset 12 0. 1. + ( 14) dim 2 offset 14 0.333333 1. + ( 15) dim 2 offset 16 0.666667 1. + ( 16) dim 2 offset 18 1. 1. + ( 17) dim 2 offset 20 0. 0.5 + ( 18) dim 2 offset 22 0. 0.5 + ( 19) dim 2 offset 24 0.333333 0.5 +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 38 (2) +[0]: 39 (2) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 6 (5) +Label 'marker': +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 37 (1) +[0]: 38 (1) +Label 'Face Sets': +[0]: 28 (4) +[0]: 37 (4) +[0]: 33 (2) +[0]: 34 (2) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 25 (3) +[0]: 26 (3) +[0]: 27 (3) +Label 'fault0': +[0]: 6 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 17 (0) +[0]: 18 (0) +[0]: 38 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 28 (-101) +[0]: 29 (-101) +[0]: 3 (102) +[0]: 5 (102) +[0]: 23 (101) +[0]: 30 (101) +[0]: 37 (101) +[0]: 19 (200) +[0]: 39 (200) +Label 'fault1': +[0]: 24 (1) +[0]: 12 (0) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 31 (-101) +[0]: 33 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 23 (101) +[0]: 32 (101) +[0]: 34 (101) +[0]: 11 (200) +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 7 +[0]: 8 ----> 22 +[0]: 8 ----> 29 +[0]: 9 ----> 22 +[0]: 9 ----> 23 +[0]: 9 ----> 30 +[0]: 10 ----> 23 +[0]: 10 ----> 24 +[0]: 10 ----> 32 +[0]: 11 ----> 24 +[0]: 11 ----> 34 +[0]: 12 ----> 26 +[0]: 12 ----> 37 +[0]: 13 ----> 26 +[0]: 13 ----> 27 +[0]: 13 ----> 31 +[0]: 14 ----> 27 +[0]: 14 ----> 28 +[0]: 14 ----> 33 +[0]: 15 ----> 28 +[0]: 15 ----> 40 +[0]: 16 ----> 29 +[0]: 16 ----> 35 +[0]: 16 ----> 43 +[0]: 17 ----> 36 +[0]: 17 ----> 37 +[0]: 17 ----> 43 +[0]: 18 ----> 25 +[0]: 18 ----> 30 +[0]: 18 ----> 31 +[0]: 18 ----> 35 +[0]: 18 ----> 36 +[0]: 18 ----> 44 +[0]: 18 ----> 44 +[0]: 19 ----> 25 +[0]: 19 ----> 32 +[0]: 19 ----> 33 +[0]: 19 ----> 38 +[0]: 19 ----> 39 +[0]: 19 ----> 41 +[0]: 19 ----> 41 +[0]: 20 ----> 34 +[0]: 20 ----> 38 +[0]: 20 ----> 42 +[0]: 21 ----> 39 +[0]: 21 ----> 40 +[0]: 21 ----> 42 +[0]: 22 ----> 0 +[0]: 23 ----> 1 +[0]: 24 ----> 2 +[0]: 25 ----> 1 +[0]: 25 ----> 3 +[0]: 26 ----> 4 +[0]: 27 ----> 3 +[0]: 28 ----> 5 +[0]: 29 ----> 0 +[0]: 30 ----> 0 +[0]: 30 ----> 1 +[0]: 31 ----> 3 +[0]: 31 ----> 4 +[0]: 32 ----> 1 +[0]: 32 ----> 2 +[0]: 33 ----> 3 +[0]: 33 ----> 5 +[0]: 34 ----> 2 +[0]: 35 ----> 0 +[0]: 35 ----> 6 +[0]: 36 ----> 4 +[0]: 36 ----> 6 +[0]: 37 ----> 4 +[0]: 38 ----> 2 +[0]: 38 ----> 7 +[0]: 39 ----> 5 +[0]: 39 ----> 7 +[0]: 40 ----> 5 +[0]: 41 ----> 7 +[0]: 42 ----> 7 +[0]: 43 ----> 6 +[0]: 44 ----> 6 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 22 (0) +[0]: 0 <---- 30 (0) +[0]: 0 <---- 35 (-1) +[0]: 0 <---- 29 (-1) +[0]: 1 <---- 23 (0) +[0]: 1 <---- 32 (0) +[0]: 1 <---- 25 (-1) +[0]: 1 <---- 30 (-1) +[0]: 2 <---- 24 (0) +[0]: 2 <---- 34 (0) +[0]: 2 <---- 38 (-1) +[0]: 2 <---- 32 (-1) +[0]: 3 <---- 25 (0) +[0]: 3 <---- 33 (0) +[0]: 3 <---- 27 (-1) +[0]: 3 <---- 31 (-1) +[0]: 4 <---- 36 (0) +[0]: 4 <---- 31 (0) +[0]: 4 <---- 26 (-1) +[0]: 4 <---- 37 (-1) +[0]: 5 <---- 39 (0) +[0]: 5 <---- 40 (0) +[0]: 5 <---- 28 (-1) +[0]: 5 <---- 33 (-1) +[0]: 6 <---- 35 (0) +[0]: 6 <---- 36 (0) +[0]: 6 <---- 43 (0) +[0]: 6 <---- 44 (0) +[0]: 7 <---- 38 (0) +[0]: 7 <---- 39 (0) +[0]: 7 <---- 41 (0) +[0]: 7 <---- 42 (0) +[0]: 22 <---- 8 (0) +[0]: 22 <---- 9 (0) +[0]: 23 <---- 9 (0) +[0]: 23 <---- 10 (0) +[0]: 24 <---- 10 (0) +[0]: 24 <---- 11 (0) +[0]: 25 <---- 18 (0) +[0]: 25 <---- 19 (0) +[0]: 26 <---- 12 (0) +[0]: 26 <---- 13 (0) +[0]: 27 <---- 13 (0) +[0]: 27 <---- 14 (0) +[0]: 28 <---- 14 (0) +[0]: 28 <---- 15 (0) +[0]: 29 <---- 8 (0) +[0]: 29 <---- 16 (0) +[0]: 30 <---- 9 (0) +[0]: 30 <---- 18 (0) +[0]: 31 <---- 18 (0) +[0]: 31 <---- 13 (0) +[0]: 32 <---- 10 (0) +[0]: 32 <---- 19 (0) +[0]: 33 <---- 19 (0) +[0]: 33 <---- 14 (0) +[0]: 34 <---- 11 (0) +[0]: 34 <---- 20 (0) +[0]: 35 <---- 16 (0) +[0]: 35 <---- 18 (0) +[0]: 36 <---- 17 (0) +[0]: 36 <---- 18 (0) +[0]: 37 <---- 17 (0) +[0]: 37 <---- 12 (0) +[0]: 38 <---- 19 (0) +[0]: 38 <---- 20 (0) +[0]: 39 <---- 19 (0) +[0]: 39 <---- 21 (0) +[0]: 40 <---- 21 (0) +[0]: 40 <---- 15 (0) +[0]: 41 <---- 19 (0) +[0]: 41 <---- 19 (0) +[0]: 42 <---- 20 (0) +[0]: 42 <---- 21 (0) +[0]: 43 <---- 16 (0) +[0]: 43 <---- 17 (0) +[0]: 44 <---- 18 (0) +[0]: 44 <---- 18 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 8) dim 2 offset 0 0. 0. + ( 9) dim 2 offset 2 0.333333 0. + ( 10) dim 2 offset 4 0.666667 0. + ( 11) dim 2 offset 6 1. 0. + ( 12) dim 2 offset 8 0. 1. + ( 13) dim 2 offset 10 0.333333 1. + ( 14) dim 2 offset 12 0.666667 1. + ( 15) dim 2 offset 14 1. 1. + ( 16) dim 2 offset 16 0. 0.5 + ( 17) dim 2 offset 18 0. 0.5 + ( 18) dim 2 offset 20 0.333333 0.5 + ( 19) dim 2 offset 22 0.666667 0.5 + ( 20) dim 2 offset 24 1. 0.5 + ( 21) dim 2 offset 26 1. 0.5 +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 6 (5) +[0]: 7 (5) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 41 (2) +[0]: 42 (2) +[0]: 43 (2) +[0]: 44 (2) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +Label 'marker': +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 34 (1) +[0]: 37 (1) +[0]: 40 (1) +[0]: 42 (1) +[0]: 43 (1) +Label 'Face Sets': +[0]: 29 (4) +[0]: 37 (4) +[0]: 34 (2) +[0]: 40 (2) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 26 (3) +[0]: 27 (3) +[0]: 28 (3) +Label 'fault0': +[0]: 6 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 16 (0) +[0]: 17 (0) +[0]: 43 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 29 (-101) +[0]: 30 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 25 (101) +[0]: 31 (101) +[0]: 37 (101) +[0]: 18 (200) +[0]: 44 (200) +Label 'fault1': +[0]: 7 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 20 (0) +[0]: 21 (0) +[0]: 42 (0) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 32 (-101) +[0]: 34 (-101) +[0]: 3 (102) +[0]: 5 (102) +[0]: 25 (101) +[0]: 33 (101) +[0]: 40 (101) +[0]: 19 (200) +[0]: 41 (200) diff --git a/src/dm/impls/plex/tests/output/ex69_quad_6.out b/src/dm/impls/plex/tests/output/ex69_quad_6.out new file mode 100644 index 00000000000..ad663280b4c --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_quad_6.out @@ -0,0 +1,888 @@ +DM Object: box (f0_) 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 4 +[0]: 6 ----> 18 +[0]: 6 ----> 27 +[0]: 7 ----> 18 +[0]: 7 ----> 19 +[0]: 7 ----> 29 +[0]: 8 ----> 19 +[0]: 8 ----> 20 +[0]: 8 ----> 31 +[0]: 9 ----> 20 +[0]: 9 ----> 33 +[0]: 10 ----> 21 +[0]: 10 ----> 27 +[0]: 10 ----> 28 +[0]: 11 ----> 21 +[0]: 11 ----> 22 +[0]: 11 ----> 29 +[0]: 11 ----> 30 +[0]: 12 ----> 22 +[0]: 12 ----> 23 +[0]: 12 ----> 31 +[0]: 12 ----> 32 +[0]: 13 ----> 23 +[0]: 13 ----> 33 +[0]: 13 ----> 34 +[0]: 14 ----> 24 +[0]: 14 ----> 28 +[0]: 15 ----> 24 +[0]: 15 ----> 25 +[0]: 15 ----> 30 +[0]: 16 ----> 25 +[0]: 16 ----> 26 +[0]: 16 ----> 32 +[0]: 17 ----> 26 +[0]: 17 ----> 34 +[0]: 18 ----> 0 +[0]: 19 ----> 1 +[0]: 20 ----> 2 +[0]: 21 ----> 0 +[0]: 21 ----> 3 +[0]: 22 ----> 1 +[0]: 22 ----> 4 +[0]: 23 ----> 2 +[0]: 23 ----> 5 +[0]: 24 ----> 3 +[0]: 25 ----> 4 +[0]: 26 ----> 5 +[0]: 27 ----> 0 +[0]: 28 ----> 3 +[0]: 29 ----> 0 +[0]: 29 ----> 1 +[0]: 30 ----> 3 +[0]: 30 ----> 4 +[0]: 31 ----> 1 +[0]: 31 ----> 2 +[0]: 32 ----> 4 +[0]: 32 ----> 5 +[0]: 33 ----> 2 +[0]: 34 ----> 5 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 18 (0) +[0]: 0 <---- 29 (0) +[0]: 0 <---- 21 (-1) +[0]: 0 <---- 27 (-1) +[0]: 1 <---- 19 (0) +[0]: 1 <---- 31 (0) +[0]: 1 <---- 22 (-1) +[0]: 1 <---- 29 (-1) +[0]: 2 <---- 20 (0) +[0]: 2 <---- 33 (0) +[0]: 2 <---- 23 (-1) +[0]: 2 <---- 31 (-1) +[0]: 3 <---- 21 (0) +[0]: 3 <---- 30 (0) +[0]: 3 <---- 24 (-1) +[0]: 3 <---- 28 (-1) +[0]: 4 <---- 22 (0) +[0]: 4 <---- 32 (0) +[0]: 4 <---- 25 (-1) +[0]: 4 <---- 30 (-1) +[0]: 5 <---- 23 (0) +[0]: 5 <---- 34 (0) +[0]: 5 <---- 26 (-1) +[0]: 5 <---- 32 (-1) +[0]: 18 <---- 6 (0) +[0]: 18 <---- 7 (0) +[0]: 19 <---- 7 (0) +[0]: 19 <---- 8 (0) +[0]: 20 <---- 8 (0) +[0]: 20 <---- 9 (0) +[0]: 21 <---- 10 (0) +[0]: 21 <---- 11 (0) +[0]: 22 <---- 11 (0) +[0]: 22 <---- 12 (0) +[0]: 23 <---- 12 (0) +[0]: 23 <---- 13 (0) +[0]: 24 <---- 14 (0) +[0]: 24 <---- 15 (0) +[0]: 25 <---- 15 (0) +[0]: 25 <---- 16 (0) +[0]: 26 <---- 16 (0) +[0]: 26 <---- 17 (0) +[0]: 27 <---- 6 (0) +[0]: 27 <---- 10 (0) +[0]: 28 <---- 10 (0) +[0]: 28 <---- 14 (0) +[0]: 29 <---- 7 (0) +[0]: 29 <---- 11 (0) +[0]: 30 <---- 11 (0) +[0]: 30 <---- 15 (0) +[0]: 31 <---- 8 (0) +[0]: 31 <---- 12 (0) +[0]: 32 <---- 12 (0) +[0]: 32 <---- 16 (0) +[0]: 33 <---- 9 (0) +[0]: 33 <---- 13 (0) +[0]: 34 <---- 13 (0) +[0]: 34 <---- 17 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 6) dim 2 offset 0 0. 0. + ( 7) dim 2 offset 2 0.333333 0. + ( 8) dim 2 offset 4 0.666667 0. + ( 9) dim 2 offset 6 1. 0. + ( 10) dim 2 offset 8 0. 0.5 + ( 11) dim 2 offset 10 0.333333 0.5 + ( 12) dim 2 offset 12 0.666667 0.5 + ( 13) dim 2 offset 14 1. 0.5 + ( 14) dim 2 offset 16 0. 1. + ( 15) dim 2 offset 18 0.333333 1. + ( 16) dim 2 offset 20 0.666667 1. + ( 17) dim 2 offset 22 1. 1. +Labels: +Label 'marker': +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 33 (1) +[0]: 34 (1) +Label 'Face Sets': +[0]: 27 (4) +[0]: 28 (4) +[0]: 33 (2) +[0]: 34 (2) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 24 (3) +[0]: 25 (3) +[0]: 26 (3) +Label 'fault0': +[0]: 22 (1) +[0]: 23 (1) +[0]: 12 (0) +[0]: 13 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 29 (-101) +[0]: 31 (-101) +[0]: 33 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 5 (102) +[0]: 21 (101) +[0]: 30 (101) +[0]: 32 (101) +[0]: 34 (101) +[0]: 11 (200) +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +Label 'fault1': +[0]: 32 (1) +[0]: 16 (0) +[0]: 1 (102) +[0]: 4 (102) +[0]: 22 (101) +[0]: 25 (101) +[0]: 31 (101) +[0]: 2 (-102) +[0]: 5 (-102) +[0]: 23 (-101) +[0]: 26 (-101) +[0]: 12 (200) +DM Object: box (f1_) 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 7 +[0]: 8 ----> 22 +[0]: 8 ----> 29 +[0]: 9 ----> 22 +[0]: 9 ----> 23 +[0]: 9 ----> 31 +[0]: 10 ----> 23 +[0]: 10 ----> 24 +[0]: 10 ----> 33 +[0]: 11 ----> 24 +[0]: 11 ----> 34 +[0]: 12 ----> 25 +[0]: 12 ----> 29 +[0]: 12 ----> 30 +[0]: 13 ----> 26 +[0]: 13 ----> 30 +[0]: 14 ----> 26 +[0]: 14 ----> 27 +[0]: 14 ----> 32 +[0]: 15 ----> 27 +[0]: 15 ----> 28 +[0]: 15 ----> 39 +[0]: 16 ----> 28 +[0]: 16 ----> 40 +[0]: 17 ----> 25 +[0]: 17 ----> 31 +[0]: 17 ----> 32 +[0]: 17 ----> 35 +[0]: 17 ----> 36 +[0]: 17 ----> 41 +[0]: 17 ----> 41 +[0]: 18 ----> 33 +[0]: 18 ----> 35 +[0]: 18 ----> 37 +[0]: 18 ----> 42 +[0]: 19 ----> 36 +[0]: 19 ----> 38 +[0]: 19 ----> 39 +[0]: 19 ----> 42 +[0]: 20 ----> 34 +[0]: 20 ----> 37 +[0]: 20 ----> 43 +[0]: 21 ----> 38 +[0]: 21 ----> 40 +[0]: 21 ----> 43 +[0]: 22 ----> 0 +[0]: 23 ----> 1 +[0]: 24 ----> 2 +[0]: 25 ----> 0 +[0]: 25 ----> 3 +[0]: 26 ----> 3 +[0]: 27 ----> 4 +[0]: 28 ----> 5 +[0]: 29 ----> 0 +[0]: 30 ----> 3 +[0]: 31 ----> 0 +[0]: 31 ----> 1 +[0]: 32 ----> 3 +[0]: 32 ----> 4 +[0]: 33 ----> 1 +[0]: 33 ----> 2 +[0]: 34 ----> 2 +[0]: 35 ----> 1 +[0]: 35 ----> 6 +[0]: 36 ----> 4 +[0]: 36 ----> 6 +[0]: 37 ----> 2 +[0]: 37 ----> 7 +[0]: 38 ----> 5 +[0]: 38 ----> 7 +[0]: 39 ----> 4 +[0]: 39 ----> 5 +[0]: 40 ----> 5 +[0]: 41 ----> 6 +[0]: 42 ----> 6 +[0]: 42 ----> 7 +[0]: 43 ----> 7 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 22 (0) +[0]: 0 <---- 31 (0) +[0]: 0 <---- 25 (-1) +[0]: 0 <---- 29 (-1) +[0]: 1 <---- 23 (0) +[0]: 1 <---- 33 (0) +[0]: 1 <---- 35 (-1) +[0]: 1 <---- 31 (-1) +[0]: 2 <---- 24 (0) +[0]: 2 <---- 34 (0) +[0]: 2 <---- 37 (-1) +[0]: 2 <---- 33 (-1) +[0]: 3 <---- 25 (0) +[0]: 3 <---- 32 (0) +[0]: 3 <---- 26 (-1) +[0]: 3 <---- 30 (-1) +[0]: 4 <---- 36 (0) +[0]: 4 <---- 39 (0) +[0]: 4 <---- 27 (-1) +[0]: 4 <---- 32 (-1) +[0]: 5 <---- 38 (0) +[0]: 5 <---- 40 (0) +[0]: 5 <---- 28 (-1) +[0]: 5 <---- 39 (-1) +[0]: 6 <---- 35 (0) +[0]: 6 <---- 36 (0) +[0]: 6 <---- 41 (0) +[0]: 6 <---- 42 (0) +[0]: 7 <---- 37 (0) +[0]: 7 <---- 38 (0) +[0]: 7 <---- 42 (0) +[0]: 7 <---- 43 (0) +[0]: 22 <---- 8 (0) +[0]: 22 <---- 9 (0) +[0]: 23 <---- 9 (0) +[0]: 23 <---- 10 (0) +[0]: 24 <---- 10 (0) +[0]: 24 <---- 11 (0) +[0]: 25 <---- 12 (0) +[0]: 25 <---- 17 (0) +[0]: 26 <---- 13 (0) +[0]: 26 <---- 14 (0) +[0]: 27 <---- 14 (0) +[0]: 27 <---- 15 (0) +[0]: 28 <---- 15 (0) +[0]: 28 <---- 16 (0) +[0]: 29 <---- 8 (0) +[0]: 29 <---- 12 (0) +[0]: 30 <---- 12 (0) +[0]: 30 <---- 13 (0) +[0]: 31 <---- 9 (0) +[0]: 31 <---- 17 (0) +[0]: 32 <---- 17 (0) +[0]: 32 <---- 14 (0) +[0]: 33 <---- 10 (0) +[0]: 33 <---- 18 (0) +[0]: 34 <---- 11 (0) +[0]: 34 <---- 20 (0) +[0]: 35 <---- 17 (0) +[0]: 35 <---- 18 (0) +[0]: 36 <---- 17 (0) +[0]: 36 <---- 19 (0) +[0]: 37 <---- 18 (0) +[0]: 37 <---- 20 (0) +[0]: 38 <---- 19 (0) +[0]: 38 <---- 21 (0) +[0]: 39 <---- 19 (0) +[0]: 39 <---- 15 (0) +[0]: 40 <---- 21 (0) +[0]: 40 <---- 16 (0) +[0]: 41 <---- 17 (0) +[0]: 41 <---- 17 (0) +[0]: 42 <---- 18 (0) +[0]: 42 <---- 19 (0) +[0]: 43 <---- 20 (0) +[0]: 43 <---- 21 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 8) dim 2 offset 0 0. 0. + ( 9) dim 2 offset 2 0.333333 0. + ( 10) dim 2 offset 4 0.666667 0. + ( 11) dim 2 offset 6 1. 0. + ( 12) dim 2 offset 8 0. 0.5 + ( 13) dim 2 offset 10 0. 1. + ( 14) dim 2 offset 12 0.333333 1. + ( 15) dim 2 offset 14 0.666667 1. + ( 16) dim 2 offset 16 1. 1. + ( 17) dim 2 offset 18 0.333333 0.5 + ( 18) dim 2 offset 20 0.666667 0.5 + ( 19) dim 2 offset 22 0.666667 0.5 + ( 20) dim 2 offset 24 1. 0.5 + ( 21) dim 2 offset 26 1. 0.5 +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 41 (2) +[0]: 42 (2) +[0]: 43 (2) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 6 (5) +[0]: 7 (5) +Label 'marker': +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 34 (1) +[0]: 40 (1) +[0]: 43 (1) +Label 'Face Sets': +[0]: 29 (4) +[0]: 30 (4) +[0]: 34 (2) +[0]: 40 (2) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 26 (3) +[0]: 27 (3) +[0]: 28 (3) +Label 'fault0': +[0]: 6 (1) +[0]: 7 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 42 (0) +[0]: 43 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 31 (-101) +[0]: 33 (-101) +[0]: 34 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 5 (102) +[0]: 25 (101) +[0]: 32 (101) +[0]: 39 (101) +[0]: 40 (101) +[0]: 17 (200) +[0]: 41 (200) +Label 'fault1': +[0]: 39 (1) +[0]: 15 (0) +[0]: 1 (102) +[0]: 4 (102) +[0]: 6 (101) +[0]: 27 (101) +[0]: 33 (101) +[0]: 35 (101) +[0]: 36 (101) +[0]: 2 (-102) +[0]: 5 (-102) +[0]: 7 (-101) +[0]: 28 (-101) +[0]: 37 (-101) +[0]: 38 (-101) +[0]: 18 (200) +[0]: 19 (200) +[0]: 42 (200) +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 7 +[0]: 9 ----> 24 +[0]: 9 ----> 30 +[0]: 10 ----> 24 +[0]: 10 ----> 25 +[0]: 10 ----> 32 +[0]: 11 ----> 25 +[0]: 11 ----> 26 +[0]: 11 ----> 34 +[0]: 12 ----> 26 +[0]: 12 ----> 35 +[0]: 13 ----> 27 +[0]: 13 ----> 30 +[0]: 13 ----> 31 +[0]: 14 ----> 28 +[0]: 14 ----> 31 +[0]: 15 ----> 28 +[0]: 15 ----> 33 +[0]: 15 ----> 41 +[0]: 16 ----> 29 +[0]: 16 ----> 40 +[0]: 17 ----> 27 +[0]: 17 ----> 32 +[0]: 17 ----> 33 +[0]: 17 ----> 36 +[0]: 17 ----> 37 +[0]: 17 ----> 47 +[0]: 17 ----> 47 +[0]: 18 ----> 35 +[0]: 18 ----> 38 +[0]: 18 ----> 49 +[0]: 19 ----> 39 +[0]: 19 ----> 40 +[0]: 19 ----> 49 +[0]: 20 ----> 29 +[0]: 20 ----> 42 +[0]: 20 ----> 44 +[0]: 21 ----> 41 +[0]: 21 ----> 43 +[0]: 21 ----> 44 +[0]: 22 ----> 34 +[0]: 22 ----> 36 +[0]: 22 ----> 38 +[0]: 22 ----> 45 +[0]: 22 ----> 45 +[0]: 22 ----> 48 +[0]: 23 ----> 37 +[0]: 23 ----> 39 +[0]: 23 ----> 42 +[0]: 23 ----> 43 +[0]: 23 ----> 46 +[0]: 23 ----> 46 +[0]: 23 ----> 48 +[0]: 24 ----> 0 +[0]: 25 ----> 1 +[0]: 26 ----> 2 +[0]: 27 ----> 0 +[0]: 27 ----> 3 +[0]: 28 ----> 3 +[0]: 29 ----> 4 +[0]: 30 ----> 0 +[0]: 31 ----> 3 +[0]: 32 ----> 0 +[0]: 32 ----> 1 +[0]: 33 ----> 3 +[0]: 33 ----> 5 +[0]: 34 ----> 1 +[0]: 34 ----> 2 +[0]: 35 ----> 2 +[0]: 36 ----> 1 +[0]: 36 ----> 6 +[0]: 37 ----> 5 +[0]: 37 ----> 6 +[0]: 38 ----> 2 +[0]: 38 ----> 7 +[0]: 39 ----> 4 +[0]: 39 ----> 7 +[0]: 40 ----> 4 +[0]: 41 ----> 5 +[0]: 42 ----> 4 +[0]: 42 ----> 8 +[0]: 43 ----> 5 +[0]: 43 ----> 8 +[0]: 44 ----> 8 +[0]: 46 ----> 8 +[0]: 47 ----> 6 +[0]: 48 ----> 6 +[0]: 48 ----> 7 +[0]: 49 ----> 7 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 24 (0) +[0]: 0 <---- 32 (0) +[0]: 0 <---- 27 (-1) +[0]: 0 <---- 30 (-1) +[0]: 1 <---- 25 (0) +[0]: 1 <---- 34 (0) +[0]: 1 <---- 36 (-1) +[0]: 1 <---- 32 (-1) +[0]: 2 <---- 26 (0) +[0]: 2 <---- 35 (0) +[0]: 2 <---- 38 (-1) +[0]: 2 <---- 34 (-1) +[0]: 3 <---- 27 (0) +[0]: 3 <---- 33 (0) +[0]: 3 <---- 28 (-1) +[0]: 3 <---- 31 (-1) +[0]: 4 <---- 39 (0) +[0]: 4 <---- 40 (0) +[0]: 4 <---- 29 (-1) +[0]: 4 <---- 42 (-1) +[0]: 5 <---- 37 (0) +[0]: 5 <---- 43 (0) +[0]: 5 <---- 41 (-1) +[0]: 5 <---- 33 (-1) +[0]: 6 <---- 36 (0) +[0]: 6 <---- 37 (0) +[0]: 6 <---- 47 (0) +[0]: 6 <---- 48 (0) +[0]: 7 <---- 38 (0) +[0]: 7 <---- 39 (0) +[0]: 7 <---- 48 (0) +[0]: 7 <---- 49 (0) +[0]: 8 <---- 42 (0) +[0]: 8 <---- 43 (0) +[0]: 8 <---- 46 (0) +[0]: 8 <---- 44 (0) +[0]: 24 <---- 9 (0) +[0]: 24 <---- 10 (0) +[0]: 25 <---- 10 (0) +[0]: 25 <---- 11 (0) +[0]: 26 <---- 11 (0) +[0]: 26 <---- 12 (0) +[0]: 27 <---- 13 (0) +[0]: 27 <---- 17 (0) +[0]: 28 <---- 14 (0) +[0]: 28 <---- 15 (0) +[0]: 29 <---- 20 (0) +[0]: 29 <---- 16 (0) +[0]: 30 <---- 9 (0) +[0]: 30 <---- 13 (0) +[0]: 31 <---- 13 (0) +[0]: 31 <---- 14 (0) +[0]: 32 <---- 10 (0) +[0]: 32 <---- 17 (0) +[0]: 33 <---- 17 (0) +[0]: 33 <---- 15 (0) +[0]: 34 <---- 11 (0) +[0]: 34 <---- 22 (0) +[0]: 35 <---- 12 (0) +[0]: 35 <---- 18 (0) +[0]: 36 <---- 17 (0) +[0]: 36 <---- 22 (0) +[0]: 37 <---- 17 (0) +[0]: 37 <---- 23 (0) +[0]: 38 <---- 22 (0) +[0]: 38 <---- 18 (0) +[0]: 39 <---- 23 (0) +[0]: 39 <---- 19 (0) +[0]: 40 <---- 19 (0) +[0]: 40 <---- 16 (0) +[0]: 41 <---- 15 (0) +[0]: 41 <---- 21 (0) +[0]: 42 <---- 23 (0) +[0]: 42 <---- 20 (0) +[0]: 43 <---- 23 (0) +[0]: 43 <---- 21 (0) +[0]: 44 <---- 20 (0) +[0]: 44 <---- 21 (0) +[0]: 45 <---- 22 (0) +[0]: 45 <---- 22 (0) +[0]: 46 <---- 23 (0) +[0]: 46 <---- 23 (0) +[0]: 47 <---- 17 (0) +[0]: 47 <---- 17 (0) +[0]: 48 <---- 22 (0) +[0]: 48 <---- 23 (0) +[0]: 49 <---- 18 (0) +[0]: 49 <---- 19 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 9) dim 2 offset 0 0. 0. + ( 10) dim 2 offset 2 0.333333 0. + ( 11) dim 2 offset 4 0.666667 0. + ( 12) dim 2 offset 6 1. 0. + ( 13) dim 2 offset 8 0. 0.5 + ( 14) dim 2 offset 10 0. 1. + ( 15) dim 2 offset 12 0.333333 1. + ( 16) dim 2 offset 14 1. 1. + ( 17) dim 2 offset 16 0.333333 0.5 + ( 18) dim 2 offset 18 1. 0.5 + ( 19) dim 2 offset 20 1. 0.5 + ( 20) dim 2 offset 22 0.666667 1. + ( 21) dim 2 offset 24 0.666667 1. + ( 22) dim 2 offset 26 0.666667 0.5 + ( 23) dim 2 offset 28 0.666667 0.5 +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 6 (5) +[0]: 7 (5) +[0]: 8 (5) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 44 (2) +[0]: 45 (2) +[0]: 46 (2) +[0]: 47 (2) +[0]: 48 (2) +[0]: 49 (2) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +Label 'marker': +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 35 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 44 (1) +[0]: 49 (1) +Label 'Face Sets': +[0]: 30 (4) +[0]: 31 (4) +[0]: 35 (2) +[0]: 40 (2) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 28 (3) +[0]: 29 (3) +[0]: 41 (3) +Label 'fault0': +[0]: 6 (1) +[0]: 7 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 18 (0) +[0]: 19 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 45 (0) +[0]: 46 (0) +[0]: 48 (0) +[0]: 49 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 32 (-101) +[0]: 34 (-101) +[0]: 35 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 5 (102) +[0]: 8 (101) +[0]: 27 (101) +[0]: 33 (101) +[0]: 40 (101) +[0]: 42 (101) +[0]: 43 (101) +[0]: 17 (200) +[0]: 47 (200) +Label 'fault1': +[0]: 8 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 20 (0) +[0]: 21 (0) +[0]: 44 (0) +[0]: 1 (102) +[0]: 5 (102) +[0]: 6 (101) +[0]: 34 (101) +[0]: 36 (101) +[0]: 37 (101) +[0]: 41 (101) +[0]: 2 (-102) +[0]: 4 (-102) +[0]: 7 (-101) +[0]: 29 (-101) +[0]: 38 (-101) +[0]: 39 (-101) +[0]: 22 (200) +[0]: 23 (200) +[0]: 45 (200) +[0]: 46 (200) +[0]: 48 (200) diff --git a/src/dm/impls/plex/tests/output/ex69_quad_6w.out b/src/dm/impls/plex/tests/output/ex69_quad_6w.out new file mode 100644 index 00000000000..784a882dbae --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_quad_6w.out @@ -0,0 +1,888 @@ +DM Object: box (f0_) 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 4 +[0]: 6 ----> 18 +[0]: 6 ----> 27 +[0]: 7 ----> 18 +[0]: 7 ----> 19 +[0]: 7 ----> 29 +[0]: 8 ----> 19 +[0]: 8 ----> 20 +[0]: 8 ----> 31 +[0]: 9 ----> 20 +[0]: 9 ----> 33 +[0]: 10 ----> 21 +[0]: 10 ----> 27 +[0]: 10 ----> 28 +[0]: 11 ----> 21 +[0]: 11 ----> 22 +[0]: 11 ----> 29 +[0]: 11 ----> 30 +[0]: 12 ----> 22 +[0]: 12 ----> 23 +[0]: 12 ----> 31 +[0]: 12 ----> 32 +[0]: 13 ----> 23 +[0]: 13 ----> 33 +[0]: 13 ----> 34 +[0]: 14 ----> 24 +[0]: 14 ----> 28 +[0]: 15 ----> 24 +[0]: 15 ----> 25 +[0]: 15 ----> 30 +[0]: 16 ----> 25 +[0]: 16 ----> 26 +[0]: 16 ----> 32 +[0]: 17 ----> 26 +[0]: 17 ----> 34 +[0]: 18 ----> 0 +[0]: 19 ----> 1 +[0]: 20 ----> 2 +[0]: 21 ----> 0 +[0]: 21 ----> 3 +[0]: 22 ----> 1 +[0]: 22 ----> 4 +[0]: 23 ----> 2 +[0]: 23 ----> 5 +[0]: 24 ----> 3 +[0]: 25 ----> 4 +[0]: 26 ----> 5 +[0]: 27 ----> 0 +[0]: 28 ----> 3 +[0]: 29 ----> 0 +[0]: 29 ----> 1 +[0]: 30 ----> 3 +[0]: 30 ----> 4 +[0]: 31 ----> 1 +[0]: 31 ----> 2 +[0]: 32 ----> 4 +[0]: 32 ----> 5 +[0]: 33 ----> 2 +[0]: 34 ----> 5 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 18 (0) +[0]: 0 <---- 29 (0) +[0]: 0 <---- 21 (-1) +[0]: 0 <---- 27 (-1) +[0]: 1 <---- 19 (0) +[0]: 1 <---- 31 (0) +[0]: 1 <---- 22 (-1) +[0]: 1 <---- 29 (-1) +[0]: 2 <---- 20 (0) +[0]: 2 <---- 33 (0) +[0]: 2 <---- 23 (-1) +[0]: 2 <---- 31 (-1) +[0]: 3 <---- 21 (0) +[0]: 3 <---- 30 (0) +[0]: 3 <---- 24 (-1) +[0]: 3 <---- 28 (-1) +[0]: 4 <---- 22 (0) +[0]: 4 <---- 32 (0) +[0]: 4 <---- 25 (-1) +[0]: 4 <---- 30 (-1) +[0]: 5 <---- 23 (0) +[0]: 5 <---- 34 (0) +[0]: 5 <---- 26 (-1) +[0]: 5 <---- 32 (-1) +[0]: 18 <---- 6 (0) +[0]: 18 <---- 7 (0) +[0]: 19 <---- 7 (0) +[0]: 19 <---- 8 (0) +[0]: 20 <---- 8 (0) +[0]: 20 <---- 9 (0) +[0]: 21 <---- 10 (0) +[0]: 21 <---- 11 (0) +[0]: 22 <---- 11 (0) +[0]: 22 <---- 12 (0) +[0]: 23 <---- 12 (0) +[0]: 23 <---- 13 (0) +[0]: 24 <---- 14 (0) +[0]: 24 <---- 15 (0) +[0]: 25 <---- 15 (0) +[0]: 25 <---- 16 (0) +[0]: 26 <---- 16 (0) +[0]: 26 <---- 17 (0) +[0]: 27 <---- 6 (0) +[0]: 27 <---- 10 (0) +[0]: 28 <---- 10 (0) +[0]: 28 <---- 14 (0) +[0]: 29 <---- 7 (0) +[0]: 29 <---- 11 (0) +[0]: 30 <---- 11 (0) +[0]: 30 <---- 15 (0) +[0]: 31 <---- 8 (0) +[0]: 31 <---- 12 (0) +[0]: 32 <---- 12 (0) +[0]: 32 <---- 16 (0) +[0]: 33 <---- 9 (0) +[0]: 33 <---- 13 (0) +[0]: 34 <---- 13 (0) +[0]: 34 <---- 17 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 6) dim 2 offset 0 0. 0. + ( 7) dim 2 offset 2 0.333333 0. + ( 8) dim 2 offset 4 0.666667 0. + ( 9) dim 2 offset 6 1. 0. + ( 10) dim 2 offset 8 0. 0.5 + ( 11) dim 2 offset 10 0.333333 0.5 + ( 12) dim 2 offset 12 0.666667 0.5 + ( 13) dim 2 offset 14 1. 0.5 + ( 14) dim 2 offset 16 0. 1. + ( 15) dim 2 offset 18 0.333333 1. + ( 16) dim 2 offset 20 0.666667 1. + ( 17) dim 2 offset 22 1. 1. +Labels: +Label 'marker': +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 33 (1) +[0]: 34 (1) +Label 'Face Sets': +[0]: 27 (4) +[0]: 28 (4) +[0]: 33 (2) +[0]: 34 (2) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 24 (3) +[0]: 25 (3) +[0]: 26 (3) +Label 'fault0': +[0]: 22 (1) +[0]: 23 (1) +[0]: 12 (0) +[0]: 13 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 29 (-101) +[0]: 31 (-101) +[0]: 33 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 5 (102) +[0]: 21 (101) +[0]: 30 (101) +[0]: 32 (101) +[0]: 34 (101) +[0]: 11 (200) +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +Label 'fault1': +[0]: 32 (1) +[0]: 16 (0) +[0]: 1 (102) +[0]: 4 (102) +[0]: 22 (101) +[0]: 25 (101) +[0]: 31 (101) +[0]: 2 (-102) +[0]: 5 (-102) +[0]: 23 (-101) +[0]: 26 (-101) +[0]: 12 (200) +DM Object: box (f1_) 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 7 +[0]: 8 ----> 22 +[0]: 8 ----> 29 +[0]: 9 ----> 22 +[0]: 9 ----> 23 +[0]: 9 ----> 31 +[0]: 10 ----> 23 +[0]: 10 ----> 24 +[0]: 10 ----> 33 +[0]: 11 ----> 24 +[0]: 11 ----> 34 +[0]: 12 ----> 25 +[0]: 12 ----> 29 +[0]: 12 ----> 30 +[0]: 13 ----> 26 +[0]: 13 ----> 30 +[0]: 14 ----> 26 +[0]: 14 ----> 27 +[0]: 14 ----> 32 +[0]: 15 ----> 27 +[0]: 15 ----> 28 +[0]: 15 ----> 39 +[0]: 16 ----> 28 +[0]: 16 ----> 40 +[0]: 17 ----> 25 +[0]: 17 ----> 31 +[0]: 17 ----> 32 +[0]: 17 ----> 35 +[0]: 17 ----> 36 +[0]: 17 ----> 41 +[0]: 17 ----> 41 +[0]: 18 ----> 33 +[0]: 18 ----> 35 +[0]: 18 ----> 37 +[0]: 18 ----> 42 +[0]: 19 ----> 36 +[0]: 19 ----> 38 +[0]: 19 ----> 39 +[0]: 19 ----> 42 +[0]: 20 ----> 34 +[0]: 20 ----> 37 +[0]: 20 ----> 43 +[0]: 21 ----> 38 +[0]: 21 ----> 40 +[0]: 21 ----> 43 +[0]: 22 ----> 0 +[0]: 23 ----> 1 +[0]: 24 ----> 2 +[0]: 25 ----> 0 +[0]: 25 ----> 3 +[0]: 26 ----> 3 +[0]: 27 ----> 4 +[0]: 28 ----> 5 +[0]: 29 ----> 0 +[0]: 30 ----> 3 +[0]: 31 ----> 0 +[0]: 31 ----> 1 +[0]: 32 ----> 3 +[0]: 32 ----> 4 +[0]: 33 ----> 1 +[0]: 33 ----> 2 +[0]: 34 ----> 2 +[0]: 35 ----> 1 +[0]: 35 ----> 6 +[0]: 36 ----> 4 +[0]: 36 ----> 6 +[0]: 37 ----> 2 +[0]: 37 ----> 7 +[0]: 38 ----> 5 +[0]: 38 ----> 7 +[0]: 39 ----> 4 +[0]: 39 ----> 5 +[0]: 40 ----> 5 +[0]: 41 ----> 6 +[0]: 42 ----> 6 +[0]: 42 ----> 7 +[0]: 43 ----> 7 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 22 (0) +[0]: 0 <---- 31 (0) +[0]: 0 <---- 25 (-1) +[0]: 0 <---- 29 (-1) +[0]: 1 <---- 23 (0) +[0]: 1 <---- 33 (0) +[0]: 1 <---- 35 (-1) +[0]: 1 <---- 31 (-1) +[0]: 2 <---- 24 (0) +[0]: 2 <---- 34 (0) +[0]: 2 <---- 37 (-1) +[0]: 2 <---- 33 (-1) +[0]: 3 <---- 25 (0) +[0]: 3 <---- 32 (0) +[0]: 3 <---- 26 (-1) +[0]: 3 <---- 30 (-1) +[0]: 4 <---- 36 (0) +[0]: 4 <---- 39 (0) +[0]: 4 <---- 27 (-1) +[0]: 4 <---- 32 (-1) +[0]: 5 <---- 38 (0) +[0]: 5 <---- 40 (0) +[0]: 5 <---- 28 (-1) +[0]: 5 <---- 39 (-1) +[0]: 6 <---- 35 (0) +[0]: 6 <---- 36 (0) +[0]: 6 <---- 41 (0) +[0]: 6 <---- 42 (0) +[0]: 7 <---- 37 (0) +[0]: 7 <---- 38 (0) +[0]: 7 <---- 42 (0) +[0]: 7 <---- 43 (0) +[0]: 22 <---- 8 (0) +[0]: 22 <---- 9 (0) +[0]: 23 <---- 9 (0) +[0]: 23 <---- 10 (0) +[0]: 24 <---- 10 (0) +[0]: 24 <---- 11 (0) +[0]: 25 <---- 12 (0) +[0]: 25 <---- 17 (0) +[0]: 26 <---- 13 (0) +[0]: 26 <---- 14 (0) +[0]: 27 <---- 14 (0) +[0]: 27 <---- 15 (0) +[0]: 28 <---- 15 (0) +[0]: 28 <---- 16 (0) +[0]: 29 <---- 8 (0) +[0]: 29 <---- 12 (0) +[0]: 30 <---- 12 (0) +[0]: 30 <---- 13 (0) +[0]: 31 <---- 9 (0) +[0]: 31 <---- 17 (0) +[0]: 32 <---- 17 (0) +[0]: 32 <---- 14 (0) +[0]: 33 <---- 10 (0) +[0]: 33 <---- 18 (0) +[0]: 34 <---- 11 (0) +[0]: 34 <---- 20 (0) +[0]: 35 <---- 17 (0) +[0]: 35 <---- 18 (0) +[0]: 36 <---- 17 (0) +[0]: 36 <---- 19 (0) +[0]: 37 <---- 18 (0) +[0]: 37 <---- 20 (0) +[0]: 38 <---- 19 (0) +[0]: 38 <---- 21 (0) +[0]: 39 <---- 19 (0) +[0]: 39 <---- 15 (0) +[0]: 40 <---- 21 (0) +[0]: 40 <---- 16 (0) +[0]: 41 <---- 17 (0) +[0]: 41 <---- 17 (0) +[0]: 42 <---- 18 (0) +[0]: 42 <---- 19 (0) +[0]: 43 <---- 20 (0) +[0]: 43 <---- 21 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 8) dim 2 offset 0 0. 0. + ( 9) dim 2 offset 2 0.333333 0. + ( 10) dim 2 offset 4 0.666667 0. + ( 11) dim 2 offset 6 1. 0. + ( 12) dim 2 offset 8 0. 0.5 + ( 13) dim 2 offset 10 0. 1. + ( 14) dim 2 offset 12 0.333333 1. + ( 15) dim 2 offset 14 0.666667 1. + ( 16) dim 2 offset 16 1. 1. + ( 17) dim 2 offset 18 0.333333 0.5 + ( 18) dim 2 offset 20 0.666667 0.475 + ( 19) dim 2 offset 22 0.666667 0.525 + ( 20) dim 2 offset 24 1. 0.475 + ( 21) dim 2 offset 26 1. 0.525 +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 41 (2) +[0]: 42 (2) +[0]: 43 (2) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 6 (5) +[0]: 7 (5) +Label 'marker': +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 34 (1) +[0]: 40 (1) +[0]: 43 (1) +Label 'Face Sets': +[0]: 29 (4) +[0]: 30 (4) +[0]: 34 (2) +[0]: 40 (2) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 26 (3) +[0]: 27 (3) +[0]: 28 (3) +Label 'fault0': +[0]: 6 (1) +[0]: 7 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 42 (0) +[0]: 43 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 31 (-101) +[0]: 33 (-101) +[0]: 34 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 5 (102) +[0]: 25 (101) +[0]: 32 (101) +[0]: 39 (101) +[0]: 40 (101) +[0]: 17 (200) +[0]: 41 (200) +Label 'fault1': +[0]: 39 (1) +[0]: 15 (0) +[0]: 1 (102) +[0]: 4 (102) +[0]: 6 (101) +[0]: 27 (101) +[0]: 33 (101) +[0]: 35 (101) +[0]: 36 (101) +[0]: 2 (-102) +[0]: 5 (-102) +[0]: 7 (-101) +[0]: 28 (-101) +[0]: 37 (-101) +[0]: 38 (-101) +[0]: 18 (200) +[0]: 19 (200) +[0]: 42 (200) +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 7 +[0]: 9 ----> 24 +[0]: 9 ----> 30 +[0]: 10 ----> 24 +[0]: 10 ----> 25 +[0]: 10 ----> 32 +[0]: 11 ----> 25 +[0]: 11 ----> 26 +[0]: 11 ----> 34 +[0]: 12 ----> 26 +[0]: 12 ----> 35 +[0]: 13 ----> 27 +[0]: 13 ----> 30 +[0]: 13 ----> 31 +[0]: 14 ----> 28 +[0]: 14 ----> 31 +[0]: 15 ----> 28 +[0]: 15 ----> 33 +[0]: 15 ----> 41 +[0]: 16 ----> 29 +[0]: 16 ----> 40 +[0]: 17 ----> 27 +[0]: 17 ----> 32 +[0]: 17 ----> 33 +[0]: 17 ----> 36 +[0]: 17 ----> 37 +[0]: 17 ----> 47 +[0]: 17 ----> 47 +[0]: 18 ----> 35 +[0]: 18 ----> 38 +[0]: 18 ----> 49 +[0]: 19 ----> 39 +[0]: 19 ----> 40 +[0]: 19 ----> 49 +[0]: 20 ----> 29 +[0]: 20 ----> 42 +[0]: 20 ----> 44 +[0]: 21 ----> 41 +[0]: 21 ----> 43 +[0]: 21 ----> 44 +[0]: 22 ----> 34 +[0]: 22 ----> 36 +[0]: 22 ----> 38 +[0]: 22 ----> 45 +[0]: 22 ----> 45 +[0]: 22 ----> 48 +[0]: 23 ----> 37 +[0]: 23 ----> 39 +[0]: 23 ----> 42 +[0]: 23 ----> 43 +[0]: 23 ----> 46 +[0]: 23 ----> 46 +[0]: 23 ----> 48 +[0]: 24 ----> 0 +[0]: 25 ----> 1 +[0]: 26 ----> 2 +[0]: 27 ----> 0 +[0]: 27 ----> 3 +[0]: 28 ----> 3 +[0]: 29 ----> 4 +[0]: 30 ----> 0 +[0]: 31 ----> 3 +[0]: 32 ----> 0 +[0]: 32 ----> 1 +[0]: 33 ----> 3 +[0]: 33 ----> 5 +[0]: 34 ----> 1 +[0]: 34 ----> 2 +[0]: 35 ----> 2 +[0]: 36 ----> 1 +[0]: 36 ----> 6 +[0]: 37 ----> 5 +[0]: 37 ----> 6 +[0]: 38 ----> 2 +[0]: 38 ----> 7 +[0]: 39 ----> 4 +[0]: 39 ----> 7 +[0]: 40 ----> 4 +[0]: 41 ----> 5 +[0]: 42 ----> 4 +[0]: 42 ----> 8 +[0]: 43 ----> 5 +[0]: 43 ----> 8 +[0]: 44 ----> 8 +[0]: 46 ----> 8 +[0]: 47 ----> 6 +[0]: 48 ----> 6 +[0]: 48 ----> 7 +[0]: 49 ----> 7 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 24 (0) +[0]: 0 <---- 32 (0) +[0]: 0 <---- 27 (-1) +[0]: 0 <---- 30 (-1) +[0]: 1 <---- 25 (0) +[0]: 1 <---- 34 (0) +[0]: 1 <---- 36 (-1) +[0]: 1 <---- 32 (-1) +[0]: 2 <---- 26 (0) +[0]: 2 <---- 35 (0) +[0]: 2 <---- 38 (-1) +[0]: 2 <---- 34 (-1) +[0]: 3 <---- 27 (0) +[0]: 3 <---- 33 (0) +[0]: 3 <---- 28 (-1) +[0]: 3 <---- 31 (-1) +[0]: 4 <---- 39 (0) +[0]: 4 <---- 40 (0) +[0]: 4 <---- 29 (-1) +[0]: 4 <---- 42 (-1) +[0]: 5 <---- 37 (0) +[0]: 5 <---- 43 (0) +[0]: 5 <---- 41 (-1) +[0]: 5 <---- 33 (-1) +[0]: 6 <---- 36 (0) +[0]: 6 <---- 37 (0) +[0]: 6 <---- 47 (0) +[0]: 6 <---- 48 (0) +[0]: 7 <---- 38 (0) +[0]: 7 <---- 39 (0) +[0]: 7 <---- 48 (0) +[0]: 7 <---- 49 (0) +[0]: 8 <---- 42 (0) +[0]: 8 <---- 43 (0) +[0]: 8 <---- 46 (0) +[0]: 8 <---- 44 (0) +[0]: 24 <---- 9 (0) +[0]: 24 <---- 10 (0) +[0]: 25 <---- 10 (0) +[0]: 25 <---- 11 (0) +[0]: 26 <---- 11 (0) +[0]: 26 <---- 12 (0) +[0]: 27 <---- 13 (0) +[0]: 27 <---- 17 (0) +[0]: 28 <---- 14 (0) +[0]: 28 <---- 15 (0) +[0]: 29 <---- 20 (0) +[0]: 29 <---- 16 (0) +[0]: 30 <---- 9 (0) +[0]: 30 <---- 13 (0) +[0]: 31 <---- 13 (0) +[0]: 31 <---- 14 (0) +[0]: 32 <---- 10 (0) +[0]: 32 <---- 17 (0) +[0]: 33 <---- 17 (0) +[0]: 33 <---- 15 (0) +[0]: 34 <---- 11 (0) +[0]: 34 <---- 22 (0) +[0]: 35 <---- 12 (0) +[0]: 35 <---- 18 (0) +[0]: 36 <---- 17 (0) +[0]: 36 <---- 22 (0) +[0]: 37 <---- 17 (0) +[0]: 37 <---- 23 (0) +[0]: 38 <---- 22 (0) +[0]: 38 <---- 18 (0) +[0]: 39 <---- 23 (0) +[0]: 39 <---- 19 (0) +[0]: 40 <---- 19 (0) +[0]: 40 <---- 16 (0) +[0]: 41 <---- 15 (0) +[0]: 41 <---- 21 (0) +[0]: 42 <---- 23 (0) +[0]: 42 <---- 20 (0) +[0]: 43 <---- 23 (0) +[0]: 43 <---- 21 (0) +[0]: 44 <---- 20 (0) +[0]: 44 <---- 21 (0) +[0]: 45 <---- 22 (0) +[0]: 45 <---- 22 (0) +[0]: 46 <---- 23 (0) +[0]: 46 <---- 23 (0) +[0]: 47 <---- 17 (0) +[0]: 47 <---- 17 (0) +[0]: 48 <---- 22 (0) +[0]: 48 <---- 23 (0) +[0]: 49 <---- 18 (0) +[0]: 49 <---- 19 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 9) dim 2 offset 0 0. 0. + ( 10) dim 2 offset 2 0.333333 0. + ( 11) dim 2 offset 4 0.666667 0. + ( 12) dim 2 offset 6 1. 0. + ( 13) dim 2 offset 8 0. 0.5 + ( 14) dim 2 offset 10 0. 1. + ( 15) dim 2 offset 12 0.333333 1. + ( 16) dim 2 offset 14 1. 1. + ( 17) dim 2 offset 16 0.333333 0.5 + ( 18) dim 2 offset 18 1. 0.475 + ( 19) dim 2 offset 20 1. 0.525 + ( 20) dim 2 offset 22 0.691667 1. + ( 21) dim 2 offset 24 0.641667 1. + ( 22) dim 2 offset 26 0.666667 0.475 + ( 23) dim 2 offset 28 0.666667 0.525 +Labels: +Label 'celltype': +[0]: 0 (4) +[0]: 1 (4) +[0]: 2 (4) +[0]: 3 (4) +[0]: 4 (4) +[0]: 5 (4) +[0]: 6 (5) +[0]: 7 (5) +[0]: 8 (5) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 17 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 21 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 44 (2) +[0]: 45 (2) +[0]: 46 (2) +[0]: 47 (2) +[0]: 48 (2) +[0]: 49 (2) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 34 (1) +[0]: 35 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 42 (1) +[0]: 43 (1) +Label 'marker': +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 35 (1) +[0]: 40 (1) +[0]: 41 (1) +[0]: 44 (1) +[0]: 49 (1) +Label 'Face Sets': +[0]: 30 (4) +[0]: 31 (4) +[0]: 35 (2) +[0]: 40 (2) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 28 (3) +[0]: 29 (3) +[0]: 41 (3) +Label 'fault0': +[0]: 6 (1) +[0]: 7 (1) +[0]: 36 (1) +[0]: 37 (1) +[0]: 38 (1) +[0]: 39 (1) +[0]: 18 (0) +[0]: 19 (0) +[0]: 22 (0) +[0]: 23 (0) +[0]: 45 (0) +[0]: 46 (0) +[0]: 48 (0) +[0]: 49 (0) +[0]: 0 (-102) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 32 (-101) +[0]: 34 (-101) +[0]: 35 (-101) +[0]: 3 (102) +[0]: 4 (102) +[0]: 5 (102) +[0]: 8 (101) +[0]: 27 (101) +[0]: 33 (101) +[0]: 40 (101) +[0]: 42 (101) +[0]: 43 (101) +[0]: 17 (200) +[0]: 47 (200) +Label 'fault1': +[0]: 8 (1) +[0]: 42 (1) +[0]: 43 (1) +[0]: 20 (0) +[0]: 21 (0) +[0]: 44 (0) +[0]: 1 (102) +[0]: 5 (102) +[0]: 6 (101) +[0]: 34 (101) +[0]: 36 (101) +[0]: 37 (101) +[0]: 41 (101) +[0]: 2 (-102) +[0]: 4 (-102) +[0]: 7 (-101) +[0]: 29 (-101) +[0]: 38 (-101) +[0]: 39 (-101) +[0]: 22 (200) +[0]: 23 (200) +[0]: 45 (200) +[0]: 46 (200) +[0]: 48 (200) diff --git a/src/dm/impls/plex/tests/output/ex69_tet_0.out b/src/dm/impls/plex/tests/output/ex69_tet_0.out new file mode 100644 index 00000000000..6ae5e1aaeb8 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_tet_0.out @@ -0,0 +1,407 @@ +DM Object: doublet 1 MPI process + type: plex +doublet in 3 dimensions: +Supports: +[0] Max support size: 4 +[0]: 2 ----> 17 +[0]: 2 ----> 18 +[0]: 2 ----> 19 +[0]: 3 ----> 14 +[0]: 3 ----> 15 +[0]: 3 ----> 18 +[0]: 3 ----> 21 +[0]: 4 ----> 14 +[0]: 4 ----> 16 +[0]: 4 ----> 17 +[0]: 4 ----> 22 +[0]: 5 ----> 15 +[0]: 5 ----> 16 +[0]: 5 ----> 19 +[0]: 5 ----> 20 +[0]: 6 ----> 20 +[0]: 6 ----> 21 +[0]: 6 ----> 22 +[0]: 7 ----> 0 +[0]: 7 ----> 1 +[0]: 8 ----> 0 +[0]: 9 ----> 0 +[0]: 10 ----> 0 +[0]: 11 ----> 1 +[0]: 12 ----> 1 +[0]: 13 ----> 1 +[0]: 14 ----> 7 +[0]: 14 ----> 8 +[0]: 14 ----> 13 +[0]: 15 ----> 7 +[0]: 15 ----> 10 +[0]: 15 ----> 11 +[0]: 16 ----> 7 +[0]: 16 ----> 9 +[0]: 16 ----> 12 +[0]: 17 ----> 8 +[0]: 17 ----> 9 +[0]: 18 ----> 8 +[0]: 18 ----> 10 +[0]: 19 ----> 9 +[0]: 19 ----> 10 +[0]: 20 ----> 11 +[0]: 20 ----> 12 +[0]: 21 ----> 11 +[0]: 21 ----> 13 +[0]: 22 ----> 12 +[0]: 22 ----> 13 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 7 (0) +[0]: 0 <---- 8 (0) +[0]: 0 <---- 9 (0) +[0]: 0 <---- 10 (0) +[0]: 1 <---- 7 (-1) +[0]: 1 <---- 11 (0) +[0]: 1 <---- 12 (0) +[0]: 1 <---- 13 (0) +[0]: 7 <---- 14 (0) +[0]: 7 <---- 15 (0) +[0]: 7 <---- 16 (0) +[0]: 8 <---- 17 (0) +[0]: 8 <---- 18 (0) +[0]: 8 <---- 14 (-1) +[0]: 9 <---- 16 (-1) +[0]: 9 <---- 19 (0) +[0]: 9 <---- 17 (-1) +[0]: 10 <---- 15 (-1) +[0]: 10 <---- 18 (-1) +[0]: 10 <---- 19 (-1) +[0]: 11 <---- 20 (0) +[0]: 11 <---- 21 (0) +[0]: 11 <---- 15 (0) +[0]: 12 <---- 16 (0) +[0]: 12 <---- 22 (0) +[0]: 12 <---- 20 (-1) +[0]: 13 <---- 14 (0) +[0]: 13 <---- 21 (-1) +[0]: 13 <---- 22 (-1) +[0]: 14 <---- 4 (0) +[0]: 14 <---- 3 (0) +[0]: 15 <---- 3 (0) +[0]: 15 <---- 5 (0) +[0]: 16 <---- 5 (0) +[0]: 16 <---- 4 (0) +[0]: 17 <---- 4 (0) +[0]: 17 <---- 2 (0) +[0]: 18 <---- 2 (0) +[0]: 18 <---- 3 (0) +[0]: 19 <---- 5 (0) +[0]: 19 <---- 2 (0) +[0]: 20 <---- 5 (0) +[0]: 20 <---- 6 (0) +[0]: 21 <---- 6 (0) +[0]: 21 <---- 3 (0) +[0]: 22 <---- 4 (0) +[0]: 22 <---- 6 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 2) dim 3 offset 0 -1. 0. 0. + ( 3) dim 3 offset 3 0. -1. 0. + ( 4) dim 3 offset 6 0. 0. 1. + ( 5) dim 3 offset 9 0. 1. 0. + ( 6) dim 3 offset 12 1. 0. 0. +Labels: +Label 'celltype': +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 0 (6) +[0]: 1 (6) +[0]: 7 (3) +[0]: 8 (3) +[0]: 9 (3) +[0]: 10 (3) +[0]: 11 (3) +[0]: 12 (3) +[0]: 13 (3) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +Label 'fault': +[0]: 7 (2) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 0 (103) +[0]: 8 (102) +[0]: 9 (102) +[0]: 10 (102) +[0]: 1 (-103) +[0]: 11 (-102) +[0]: 12 (-102) +[0]: 13 (-102) +[0]: 17 (101) +[0]: 18 (101) +[0]: 19 (101) +[0]: 20 (-101) +[0]: 21 (-101) +[0]: 22 (-101) +DM Object: doublet 1 MPI process + type: plex +doublet in 3 dimensions: +Supports: +[0] Max support size: 4 +[0]: 3 ----> 28 +[0]: 3 ----> 29 +[0]: 3 ----> 30 +[0]: 4 ----> 31 +[0]: 4 ----> 32 +[0]: 4 ----> 33 +[0]: 5 ----> 22 +[0]: 5 ----> 24 +[0]: 5 ----> 32 +[0]: 5 ----> 34 +[0]: 6 ----> 23 +[0]: 6 ----> 25 +[0]: 6 ----> 30 +[0]: 6 ----> 34 +[0]: 7 ----> 22 +[0]: 7 ----> 26 +[0]: 7 ----> 33 +[0]: 7 ----> 35 +[0]: 8 ----> 23 +[0]: 8 ----> 27 +[0]: 8 ----> 28 +[0]: 8 ----> 35 +[0]: 9 ----> 24 +[0]: 9 ----> 26 +[0]: 9 ----> 31 +[0]: 9 ----> 36 +[0]: 10 ----> 25 +[0]: 10 ----> 27 +[0]: 10 ----> 29 +[0]: 10 ----> 36 +[0]: 11 ----> 1 +[0]: 11 ----> 2 +[0]: 12 ----> 0 +[0]: 12 ----> 2 +[0]: 13 ----> 0 +[0]: 14 ----> 0 +[0]: 15 ----> 0 +[0]: 16 ----> 1 +[0]: 17 ----> 1 +[0]: 18 ----> 1 +[0]: 19 ----> 2 +[0]: 20 ----> 2 +[0]: 21 ----> 2 +[0]: 22 ----> 11 +[0]: 22 ----> 18 +[0]: 22 ----> 19 +[0]: 23 ----> 12 +[0]: 23 ----> 13 +[0]: 23 ----> 19 +[0]: 24 ----> 11 +[0]: 24 ----> 16 +[0]: 24 ----> 20 +[0]: 25 ----> 12 +[0]: 25 ----> 15 +[0]: 25 ----> 20 +[0]: 26 ----> 11 +[0]: 26 ----> 17 +[0]: 26 ----> 21 +[0]: 27 ----> 12 +[0]: 27 ----> 14 +[0]: 27 ----> 21 +[0]: 28 ----> 13 +[0]: 28 ----> 14 +[0]: 29 ----> 14 +[0]: 29 ----> 15 +[0]: 30 ----> 13 +[0]: 30 ----> 15 +[0]: 31 ----> 16 +[0]: 31 ----> 17 +[0]: 32 ----> 16 +[0]: 32 ----> 18 +[0]: 33 ----> 17 +[0]: 33 ----> 18 +[0]: 34 ----> 19 +[0]: 34 ----> 20 +[0]: 35 ----> 19 +[0]: 35 ----> 21 +[0]: 36 ----> 20 +[0]: 36 ----> 21 +Cones: +[0] Max cone size: 5 +[0]: 0 <---- 12 (0) +[0]: 0 <---- 13 (0) +[0]: 0 <---- 14 (0) +[0]: 0 <---- 15 (0) +[0]: 1 <---- 11 (-1) +[0]: 1 <---- 16 (0) +[0]: 1 <---- 17 (0) +[0]: 1 <---- 18 (0) +[0]: 2 <---- 11 (0) +[0]: 2 <---- 12 (0) +[0]: 2 <---- 19 (0) +[0]: 2 <---- 20 (0) +[0]: 2 <---- 21 (0) +[0]: 11 <---- 22 (0) +[0]: 11 <---- 24 (0) +[0]: 11 <---- 26 (0) +[0]: 12 <---- 23 (0) +[0]: 12 <---- 25 (0) +[0]: 12 <---- 27 (0) +[0]: 13 <---- 28 (0) +[0]: 13 <---- 30 (0) +[0]: 13 <---- 23 (-1) +[0]: 14 <---- 27 (-1) +[0]: 14 <---- 29 (0) +[0]: 14 <---- 28 (-1) +[0]: 15 <---- 25 (-1) +[0]: 15 <---- 30 (-1) +[0]: 15 <---- 29 (-1) +[0]: 16 <---- 31 (0) +[0]: 16 <---- 32 (0) +[0]: 16 <---- 24 (0) +[0]: 17 <---- 26 (0) +[0]: 17 <---- 33 (0) +[0]: 17 <---- 31 (-1) +[0]: 18 <---- 22 (0) +[0]: 18 <---- 32 (-1) +[0]: 18 <---- 33 (-1) +[0]: 19 <---- 22 (0) +[0]: 19 <---- 23 (0) +[0]: 19 <---- 35 (0) +[0]: 19 <---- 34 (0) +[0]: 20 <---- 24 (0) +[0]: 20 <---- 25 (0) +[0]: 20 <---- 34 (0) +[0]: 20 <---- 36 (0) +[0]: 21 <---- 26 (0) +[0]: 21 <---- 27 (0) +[0]: 21 <---- 36 (0) +[0]: 21 <---- 35 (0) +[0]: 22 <---- 7 (0) +[0]: 22 <---- 5 (0) +[0]: 23 <---- 8 (0) +[0]: 23 <---- 6 (0) +[0]: 24 <---- 5 (0) +[0]: 24 <---- 9 (0) +[0]: 25 <---- 6 (0) +[0]: 25 <---- 10 (0) +[0]: 26 <---- 9 (0) +[0]: 26 <---- 7 (0) +[0]: 27 <---- 10 (0) +[0]: 27 <---- 8 (0) +[0]: 28 <---- 8 (0) +[0]: 28 <---- 3 (0) +[0]: 29 <---- 10 (0) +[0]: 29 <---- 3 (0) +[0]: 30 <---- 3 (0) +[0]: 30 <---- 6 (0) +[0]: 31 <---- 9 (0) +[0]: 31 <---- 4 (0) +[0]: 32 <---- 4 (0) +[0]: 32 <---- 5 (0) +[0]: 33 <---- 7 (0) +[0]: 33 <---- 4 (0) +[0]: 34 <---- 5 (0) +[0]: 34 <---- 6 (0) +[0]: 35 <---- 7 (0) +[0]: 35 <---- 8 (0) +[0]: 36 <---- 9 (0) +[0]: 36 <---- 10 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 3) dim 3 offset 0 -1. 0. 0. + ( 4) dim 3 offset 3 1. 0. 0. + ( 5) dim 3 offset 6 0. -1. 0. + ( 6) dim 3 offset 9 0. -1. 0. + ( 7) dim 3 offset 12 0. 0. 1. + ( 8) dim 3 offset 15 0. 0. 1. + ( 9) dim 3 offset 18 0. 1. 0. + ( 10) dim 3 offset 21 0. 1. 0. +Labels: +Label 'celltype': +[0]: 0 (6) +[0]: 1 (6) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 34 (2) +[0]: 35 (2) +[0]: 36 (2) +[0]: 11 (3) +[0]: 12 (3) +[0]: 13 (3) +[0]: 14 (3) +[0]: 15 (3) +[0]: 16 (3) +[0]: 17 (3) +[0]: 18 (3) +[0]: 2 (9) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 28 (1) +[0]: 29 (1) +[0]: 30 (1) +[0]: 31 (1) +[0]: 32 (1) +[0]: 33 (1) +[0]: 19 (5) +[0]: 20 (5) +[0]: 21 (5) +Label 'fault': +[0]: 2 (2) +[0]: 11 (2) +[0]: 12 (2) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 26 (1) +[0]: 27 (1) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 34 (0) +[0]: 35 (0) +[0]: 36 (0) +[0]: 0 (103) +[0]: 13 (102) +[0]: 14 (102) +[0]: 15 (102) +[0]: 1 (-103) +[0]: 16 (-102) +[0]: 17 (-102) +[0]: 18 (-102) +[0]: 28 (101) +[0]: 29 (101) +[0]: 30 (101) +[0]: 31 (-101) +[0]: 32 (-101) +[0]: 33 (-101) diff --git a/src/dm/impls/plex/tests/output/ex69_tet_1.out b/src/dm/impls/plex/tests/output/ex69_tet_1.out new file mode 100644 index 00000000000..549d205737f --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_tet_1.out @@ -0,0 +1,650 @@ +DM Object: doublet 2 MPI processes + type: plex +doublet in 3 dimensions: +Supports: +[0] Max support size: 3 +[0]: 1 ----> 12 +[0]: 1 ----> 13 +[0]: 1 ----> 14 +[0]: 2 ----> 9 +[0]: 2 ----> 10 +[0]: 2 ----> 13 +[0]: 3 ----> 9 +[0]: 3 ----> 11 +[0]: 3 ----> 12 +[0]: 4 ----> 10 +[0]: 4 ----> 11 +[0]: 4 ----> 14 +[0]: 5 ----> 0 +[0]: 6 ----> 0 +[0]: 7 ----> 0 +[0]: 8 ----> 0 +[0]: 9 ----> 5 +[0]: 9 ----> 6 +[0]: 10 ----> 5 +[0]: 10 ----> 8 +[0]: 11 ----> 5 +[0]: 11 ----> 7 +[0]: 12 ----> 6 +[0]: 12 ----> 7 +[0]: 13 ----> 6 +[0]: 13 ----> 8 +[0]: 14 ----> 7 +[0]: 14 ----> 8 +[1] Max support size: 3 +[1]: 1 ----> 9 +[1]: 1 ----> 10 +[1]: 1 ----> 13 +[1]: 2 ----> 9 +[1]: 2 ----> 11 +[1]: 2 ----> 14 +[1]: 3 ----> 10 +[1]: 3 ----> 11 +[1]: 3 ----> 12 +[1]: 4 ----> 12 +[1]: 4 ----> 13 +[1]: 4 ----> 14 +[1]: 5 ----> 0 +[1]: 6 ----> 0 +[1]: 7 ----> 0 +[1]: 8 ----> 0 +[1]: 9 ----> 5 +[1]: 9 ----> 8 +[1]: 10 ----> 5 +[1]: 10 ----> 6 +[1]: 11 ----> 5 +[1]: 11 ----> 7 +[1]: 12 ----> 6 +[1]: 12 ----> 7 +[1]: 13 ----> 6 +[1]: 13 ----> 8 +[1]: 14 ----> 7 +[1]: 14 ----> 8 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 5 (0) +[0]: 0 <---- 6 (0) +[0]: 0 <---- 7 (0) +[0]: 0 <---- 8 (0) +[0]: 5 <---- 9 (0) +[0]: 5 <---- 10 (0) +[0]: 5 <---- 11 (0) +[0]: 6 <---- 12 (0) +[0]: 6 <---- 13 (0) +[0]: 6 <---- 9 (-1) +[0]: 7 <---- 11 (-1) +[0]: 7 <---- 14 (0) +[0]: 7 <---- 12 (-1) +[0]: 8 <---- 10 (-1) +[0]: 8 <---- 13 (-1) +[0]: 8 <---- 14 (-1) +[0]: 9 <---- 3 (0) +[0]: 9 <---- 2 (0) +[0]: 10 <---- 2 (0) +[0]: 10 <---- 4 (0) +[0]: 11 <---- 4 (0) +[0]: 11 <---- 3 (0) +[0]: 12 <---- 3 (0) +[0]: 12 <---- 1 (0) +[0]: 13 <---- 1 (0) +[0]: 13 <---- 2 (0) +[0]: 14 <---- 4 (0) +[0]: 14 <---- 1 (0) +[1] Max cone size: 4 +[1]: 0 <---- 5 (-1) +[1]: 0 <---- 6 (0) +[1]: 0 <---- 7 (0) +[1]: 0 <---- 8 (0) +[1]: 5 <---- 9 (0) +[1]: 5 <---- 10 (0) +[1]: 5 <---- 11 (0) +[1]: 6 <---- 12 (0) +[1]: 6 <---- 13 (0) +[1]: 6 <---- 10 (0) +[1]: 7 <---- 11 (0) +[1]: 7 <---- 14 (0) +[1]: 7 <---- 12 (-1) +[1]: 8 <---- 9 (0) +[1]: 8 <---- 13 (-1) +[1]: 8 <---- 14 (-1) +[1]: 9 <---- 2 (0) +[1]: 9 <---- 1 (0) +[1]: 10 <---- 1 (0) +[1]: 10 <---- 3 (0) +[1]: 11 <---- 3 (0) +[1]: 11 <---- 2 (0) +[1]: 12 <---- 3 (0) +[1]: 12 <---- 4 (0) +[1]: 13 <---- 4 (0) +[1]: 13 <---- 1 (0) +[1]: 14 <---- 2 (0) +[1]: 14 <---- 4 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 1) dim 3 offset 0 -1. 0. 0. + ( 2) dim 3 offset 3 0. -1. 0. + ( 3) dim 3 offset 6 0. 0. 1. + ( 4) dim 3 offset 9 0. 1. 0. +Process 1: + ( 1) dim 3 offset 0 0. -1. 0. + ( 2) dim 3 offset 3 0. 0. 1. + ( 3) dim 3 offset 6 0. 1. 0. + ( 4) dim 3 offset 9 1. 0. 0. +Labels: +Label 'celltype': +[0]: 1 (0) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 5 (3) +[0]: 6 (3) +[0]: 7 (3) +[0]: 8 (3) +[0]: 0 (6) +[1]: 1 (0) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 9 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 13 (1) +[1]: 14 (1) +[1]: 5 (3) +[1]: 6 (3) +[1]: 7 (3) +[1]: 8 (3) +[1]: 0 (6) +Label 'fault': +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 5 (2) +[0]: 12 (101) +[0]: 13 (101) +[0]: 14 (101) +[0]: 6 (102) +[0]: 7 (102) +[0]: 8 (102) +[0]: 0 (103) +[1]: 0 (-103) +[1]: 6 (-102) +[1]: 7 (-102) +[1]: 8 (-102) +[1]: 12 (-101) +[1]: 13 (-101) +[1]: 14 (-101) +[1]: 1 (0) +[1]: 2 (0) +[1]: 3 (0) +[1]: 9 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 5 (2) +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=15, leaves=7, remote ranks=1 + [0] 2 <- (1,1) + [0] 3 <- (1,2) + [0] 4 <- (1,3) + [0] 5 <- (1,5) + [0] 9 <- (1,9) + [0] 10 <- (1,10) + [0] 11 <- (1,11) + [1] Number of roots=15, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 7 edges + [0] 2 <- 1 + [0] 3 <- 2 + [0] 4 <- 3 + [0] 5 <- 5 + [0] 9 <- 9 + [0] 10 <- 10 + [0] 11 <- 11 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order +DM Object: doublet 2 MPI processes + type: plex +doublet in 3 dimensions: +Supports: +[0] Max support size: 4 +[0]: 2 ----> 23 +[0]: 2 ----> 24 +[0]: 2 ----> 25 +[0]: 3 ----> 17 +[0]: 3 ----> 19 +[0]: 3 ----> 26 +[0]: 4 ----> 18 +[0]: 4 ----> 20 +[0]: 4 ----> 25 +[0]: 4 ----> 26 +[0]: 5 ----> 17 +[0]: 5 ----> 21 +[0]: 5 ----> 27 +[0]: 6 ----> 18 +[0]: 6 ----> 22 +[0]: 6 ----> 23 +[0]: 6 ----> 27 +[0]: 7 ----> 19 +[0]: 7 ----> 21 +[0]: 7 ----> 28 +[0]: 8 ----> 20 +[0]: 8 ----> 22 +[0]: 8 ----> 24 +[0]: 8 ----> 28 +[0]: 9 ----> 1 +[0]: 10 ----> 0 +[0]: 10 ----> 1 +[0]: 11 ----> 0 +[0]: 12 ----> 0 +[0]: 13 ----> 0 +[0]: 14 ----> 1 +[0]: 15 ----> 1 +[0]: 16 ----> 1 +[0]: 17 ----> 9 +[0]: 17 ----> 14 +[0]: 18 ----> 10 +[0]: 18 ----> 11 +[0]: 18 ----> 14 +[0]: 19 ----> 9 +[0]: 19 ----> 15 +[0]: 20 ----> 10 +[0]: 20 ----> 13 +[0]: 20 ----> 15 +[0]: 21 ----> 9 +[0]: 21 ----> 16 +[0]: 22 ----> 10 +[0]: 22 ----> 12 +[0]: 22 ----> 16 +[0]: 23 ----> 11 +[0]: 23 ----> 12 +[0]: 24 ----> 12 +[0]: 24 ----> 13 +[0]: 25 ----> 11 +[0]: 25 ----> 13 +[0]: 26 ----> 14 +[0]: 26 ----> 15 +[0]: 27 ----> 14 +[0]: 27 ----> 16 +[0]: 28 ----> 15 +[0]: 28 ----> 16 +[1] Max support size: 4 +[1]: 2 ----> 17 +[1]: 2 ----> 19 +[1]: 2 ----> 24 +[1]: 2 ----> 26 +[1]: 3 ----> 18 +[1]: 3 ----> 20 +[1]: 3 ----> 26 +[1]: 4 ----> 17 +[1]: 4 ----> 21 +[1]: 4 ----> 25 +[1]: 4 ----> 27 +[1]: 5 ----> 18 +[1]: 5 ----> 22 +[1]: 5 ----> 27 +[1]: 6 ----> 19 +[1]: 6 ----> 21 +[1]: 6 ----> 23 +[1]: 6 ----> 28 +[1]: 7 ----> 20 +[1]: 7 ----> 22 +[1]: 7 ----> 28 +[1]: 8 ----> 23 +[1]: 8 ----> 24 +[1]: 8 ----> 25 +[1]: 9 ----> 0 +[1]: 9 ----> 1 +[1]: 10 ----> 1 +[1]: 11 ----> 0 +[1]: 12 ----> 0 +[1]: 13 ----> 0 +[1]: 14 ----> 1 +[1]: 15 ----> 1 +[1]: 16 ----> 1 +[1]: 17 ----> 9 +[1]: 17 ----> 13 +[1]: 17 ----> 14 +[1]: 18 ----> 10 +[1]: 18 ----> 14 +[1]: 19 ----> 9 +[1]: 19 ----> 11 +[1]: 19 ----> 15 +[1]: 20 ----> 10 +[1]: 20 ----> 15 +[1]: 21 ----> 9 +[1]: 21 ----> 12 +[1]: 21 ----> 16 +[1]: 22 ----> 10 +[1]: 22 ----> 16 +[1]: 23 ----> 11 +[1]: 23 ----> 12 +[1]: 24 ----> 11 +[1]: 24 ----> 13 +[1]: 25 ----> 12 +[1]: 25 ----> 13 +[1]: 26 ----> 14 +[1]: 26 ----> 15 +[1]: 27 ----> 14 +[1]: 27 ----> 16 +[1]: 28 ----> 15 +[1]: 28 ----> 16 +Cones: +[0] Max cone size: 5 +[0]: 0 <---- 10 (0) +[0]: 0 <---- 11 (0) +[0]: 0 <---- 12 (0) +[0]: 0 <---- 13 (0) +[0]: 1 <---- 9 (0) +[0]: 1 <---- 10 (0) +[0]: 1 <---- 14 (0) +[0]: 1 <---- 15 (0) +[0]: 1 <---- 16 (0) +[0]: 9 <---- 17 (0) +[0]: 9 <---- 19 (0) +[0]: 9 <---- 21 (0) +[0]: 10 <---- 18 (0) +[0]: 10 <---- 20 (0) +[0]: 10 <---- 22 (0) +[0]: 11 <---- 23 (0) +[0]: 11 <---- 25 (0) +[0]: 11 <---- 18 (-1) +[0]: 12 <---- 22 (-1) +[0]: 12 <---- 24 (0) +[0]: 12 <---- 23 (-1) +[0]: 13 <---- 20 (-1) +[0]: 13 <---- 25 (-1) +[0]: 13 <---- 24 (-1) +[0]: 14 <---- 17 (0) +[0]: 14 <---- 18 (0) +[0]: 14 <---- 27 (0) +[0]: 14 <---- 26 (0) +[0]: 15 <---- 19 (0) +[0]: 15 <---- 20 (0) +[0]: 15 <---- 26 (0) +[0]: 15 <---- 28 (0) +[0]: 16 <---- 21 (0) +[0]: 16 <---- 22 (0) +[0]: 16 <---- 28 (0) +[0]: 16 <---- 27 (0) +[0]: 17 <---- 5 (0) +[0]: 17 <---- 3 (0) +[0]: 18 <---- 6 (0) +[0]: 18 <---- 4 (0) +[0]: 19 <---- 3 (0) +[0]: 19 <---- 7 (0) +[0]: 20 <---- 4 (0) +[0]: 20 <---- 8 (0) +[0]: 21 <---- 7 (0) +[0]: 21 <---- 5 (0) +[0]: 22 <---- 8 (0) +[0]: 22 <---- 6 (0) +[0]: 23 <---- 6 (0) +[0]: 23 <---- 2 (0) +[0]: 24 <---- 8 (0) +[0]: 24 <---- 2 (0) +[0]: 25 <---- 2 (0) +[0]: 25 <---- 4 (0) +[0]: 26 <---- 3 (0) +[0]: 26 <---- 4 (0) +[0]: 27 <---- 5 (0) +[0]: 27 <---- 6 (0) +[0]: 28 <---- 7 (0) +[0]: 28 <---- 8 (0) +[1] Max cone size: 5 +[1]: 0 <---- 9 (-1) +[1]: 0 <---- 11 (0) +[1]: 0 <---- 12 (0) +[1]: 0 <---- 13 (0) +[1]: 1 <---- 9 (0) +[1]: 1 <---- 10 (0) +[1]: 1 <---- 14 (0) +[1]: 1 <---- 15 (0) +[1]: 1 <---- 16 (0) +[1]: 9 <---- 17 (0) +[1]: 9 <---- 19 (0) +[1]: 9 <---- 21 (0) +[1]: 10 <---- 18 (0) +[1]: 10 <---- 20 (0) +[1]: 10 <---- 22 (0) +[1]: 11 <---- 23 (0) +[1]: 11 <---- 24 (0) +[1]: 11 <---- 19 (0) +[1]: 12 <---- 21 (0) +[1]: 12 <---- 25 (0) +[1]: 12 <---- 23 (-1) +[1]: 13 <---- 17 (0) +[1]: 13 <---- 24 (-1) +[1]: 13 <---- 25 (-1) +[1]: 14 <---- 17 (0) +[1]: 14 <---- 18 (0) +[1]: 14 <---- 27 (0) +[1]: 14 <---- 26 (0) +[1]: 15 <---- 19 (0) +[1]: 15 <---- 20 (0) +[1]: 15 <---- 26 (0) +[1]: 15 <---- 28 (0) +[1]: 16 <---- 21 (0) +[1]: 16 <---- 22 (0) +[1]: 16 <---- 28 (0) +[1]: 16 <---- 27 (0) +[1]: 17 <---- 4 (0) +[1]: 17 <---- 2 (0) +[1]: 18 <---- 5 (0) +[1]: 18 <---- 3 (0) +[1]: 19 <---- 2 (0) +[1]: 19 <---- 6 (0) +[1]: 20 <---- 3 (0) +[1]: 20 <---- 7 (0) +[1]: 21 <---- 6 (0) +[1]: 21 <---- 4 (0) +[1]: 22 <---- 7 (0) +[1]: 22 <---- 5 (0) +[1]: 23 <---- 6 (0) +[1]: 23 <---- 8 (0) +[1]: 24 <---- 8 (0) +[1]: 24 <---- 2 (0) +[1]: 25 <---- 4 (0) +[1]: 25 <---- 8 (0) +[1]: 26 <---- 2 (0) +[1]: 26 <---- 3 (0) +[1]: 27 <---- 4 (0) +[1]: 27 <---- 5 (0) +[1]: 28 <---- 6 (0) +[1]: 28 <---- 7 (0) +coordinates with 1 fields + field 0 with 3 components +Process 0: + ( 2) dim 3 offset 0 -1. 0. 0. + ( 3) dim 3 offset 3 0. -1. 0. + ( 4) dim 3 offset 6 0. -1. 0. + ( 5) dim 3 offset 9 0. 0. 1. + ( 6) dim 3 offset 12 0. 0. 1. + ( 7) dim 3 offset 15 0. 1. 0. + ( 8) dim 3 offset 18 0. 1. 0. +Process 1: + ( 2) dim 3 offset 0 0. -1. 0. + ( 3) dim 3 offset 3 0. -1. 0. + ( 4) dim 3 offset 6 0. 0. 1. + ( 5) dim 3 offset 9 0. 0. 1. + ( 6) dim 3 offset 12 0. 1. 0. + ( 7) dim 3 offset 15 0. 1. 0. + ( 8) dim 3 offset 18 1. 0. 0. +Labels: +Label 'celltype': +[0]: 0 (6) +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 26 (2) +[0]: 27 (2) +[0]: 28 (2) +[0]: 9 (3) +[0]: 10 (3) +[0]: 11 (3) +[0]: 12 (3) +[0]: 13 (3) +[0]: 1 (9) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 14 (5) +[0]: 15 (5) +[0]: 16 (5) +[1]: 0 (6) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 26 (2) +[1]: 27 (2) +[1]: 28 (2) +[1]: 9 (3) +[1]: 10 (3) +[1]: 11 (3) +[1]: 12 (3) +[1]: 13 (3) +[1]: 1 (9) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 21 (1) +[1]: 22 (1) +[1]: 23 (1) +[1]: 24 (1) +[1]: 25 (1) +[1]: 14 (5) +[1]: 15 (5) +[1]: 16 (5) +Label 'fault': +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 28 (0) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 1 (2) +[0]: 9 (2) +[0]: 10 (2) +[0]: 23 (101) +[0]: 24 (101) +[0]: 25 (101) +[0]: 11 (102) +[0]: 12 (102) +[0]: 13 (102) +[0]: 0 (103) +[1]: 0 (-103) +[1]: 11 (-102) +[1]: 12 (-102) +[1]: 13 (-102) +[1]: 23 (-101) +[1]: 24 (-101) +[1]: 25 (-101) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 26 (0) +[1]: 27 (0) +[1]: 28 (0) +[1]: 14 (1) +[1]: 15 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 20 (1) +[1]: 21 (1) +[1]: 22 (1) +[1]: 1 (2) +[1]: 9 (2) +[1]: 10 (2) +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=29, leaves=21, remote ranks=1 + [0] 1 <- (1,1) + [0] 3 <- (1,2) + [0] 4 <- (1,3) + [0] 5 <- (1,4) + [0] 6 <- (1,5) + [0] 7 <- (1,6) + [0] 8 <- (1,7) + [0] 9 <- (1,9) + [0] 10 <- (1,10) + [0] 14 <- (1,14) + [0] 15 <- (1,15) + [0] 16 <- (1,16) + [0] 17 <- (1,17) + [0] 18 <- (1,18) + [0] 19 <- (1,19) + [0] 20 <- (1,20) + [0] 21 <- (1,21) + [0] 22 <- (1,22) + [0] 26 <- (1,26) + [0] 27 <- (1,27) + [0] 28 <- (1,28) + [1] Number of roots=29, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 21 edges + [0] 1 <- 1 + [0] 3 <- 2 + [0] 4 <- 3 + [0] 5 <- 4 + [0] 6 <- 5 + [0] 7 <- 6 + [0] 8 <- 7 + [0] 9 <- 9 + [0] 10 <- 10 + [0] 14 <- 14 + [0] 15 <- 15 + [0] 16 <- 16 + [0] 17 <- 17 + [0] 18 <- 18 + [0] 19 <- 19 + [0] 20 <- 20 + [0] 21 <- 21 + [0] 22 <- 22 + [0] 26 <- 26 + [0] 27 <- 27 + [0] 28 <- 28 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order diff --git a/src/dm/impls/plex/tests/output/ex69_tri_0.out b/src/dm/impls/plex/tests/output/ex69_tri_0.out new file mode 100644 index 00000000000..0da61a22ef8 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_tri_0.out @@ -0,0 +1,204 @@ +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 2 ----> 6 +[0]: 2 ----> 7 +[0]: 3 ----> 7 +[0]: 3 ----> 8 +[0]: 3 ----> 9 +[0]: 4 ----> 6 +[0]: 4 ----> 8 +[0]: 4 ----> 10 +[0]: 5 ----> 9 +[0]: 5 ----> 10 +[0]: 6 ----> 0 +[0]: 7 ----> 0 +[0]: 8 ----> 0 +[0]: 8 ----> 1 +[0]: 9 ----> 1 +[0]: 10 ----> 1 +Cones: +[0] Max cone size: 3 +[0]: 0 <---- 6 (0) +[0]: 0 <---- 7 (0) +[0]: 0 <---- 8 (0) +[0]: 1 <---- 9 (0) +[0]: 1 <---- 10 (0) +[0]: 1 <---- 8 (-1) +[0]: 6 <---- 4 (0) +[0]: 6 <---- 2 (0) +[0]: 7 <---- 2 (0) +[0]: 7 <---- 3 (0) +[0]: 8 <---- 3 (0) +[0]: 8 <---- 4 (0) +[0]: 9 <---- 3 (0) +[0]: 9 <---- 5 (0) +[0]: 10 <---- 5 (0) +[0]: 10 <---- 4 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 2) dim 2 offset 0 0. 0. + ( 3) dim 2 offset 2 1. 0. + ( 4) dim 2 offset 4 0. 1. + ( 5) dim 2 offset 6 1. 1. +Labels: +Label 'celltype': +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 0 (3) +[0]: 1 (3) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +Label 'marker': +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 9 (1) +[0]: 10 (1) +Label 'Face Sets': +[0]: 6 (1) +[0]: 7 (1) +[0]: 9 (1) +[0]: 10 (1) +Label 'fault': +[0]: 8 (1) +[0]: 3 (0) +[0]: 4 (0) +[0]: 0 (102) +[0]: 6 (101) +[0]: 7 (101) +[0]: 1 (-102) +[0]: 9 (-101) +[0]: 10 (-101) +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 3 ----> 9 +[0]: 3 ----> 10 +[0]: 4 ----> 13 +[0]: 4 ----> 14 +[0]: 5 ----> 11 +[0]: 5 ----> 13 +[0]: 5 ----> 15 +[0]: 6 ----> 10 +[0]: 6 ----> 12 +[0]: 6 ----> 15 +[0]: 7 ----> 11 +[0]: 7 ----> 14 +[0]: 7 ----> 16 +[0]: 8 ----> 9 +[0]: 8 ----> 12 +[0]: 8 ----> 16 +[0]: 9 ----> 0 +[0]: 10 ----> 0 +[0]: 11 ----> 1 +[0]: 11 ----> 2 +[0]: 12 ----> 0 +[0]: 12 ----> 2 +[0]: 13 ----> 1 +[0]: 14 ----> 1 +[0]: 15 ----> 2 +[0]: 16 ----> 2 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 9 (0) +[0]: 0 <---- 10 (0) +[0]: 0 <---- 12 (0) +[0]: 1 <---- 13 (0) +[0]: 1 <---- 14 (0) +[0]: 1 <---- 11 (-1) +[0]: 2 <---- 11 (0) +[0]: 2 <---- 12 (0) +[0]: 2 <---- 15 (0) +[0]: 2 <---- 16 (0) +[0]: 9 <---- 8 (0) +[0]: 9 <---- 3 (0) +[0]: 10 <---- 3 (0) +[0]: 10 <---- 6 (0) +[0]: 11 <---- 5 (0) +[0]: 11 <---- 7 (0) +[0]: 12 <---- 6 (0) +[0]: 12 <---- 8 (0) +[0]: 13 <---- 5 (0) +[0]: 13 <---- 4 (0) +[0]: 14 <---- 4 (0) +[0]: 14 <---- 7 (0) +[0]: 15 <---- 5 (0) +[0]: 15 <---- 6 (0) +[0]: 16 <---- 7 (0) +[0]: 16 <---- 8 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 3) dim 2 offset 0 0. 0. + ( 4) dim 2 offset 2 1. 1. + ( 5) dim 2 offset 4 1. 0. + ( 6) dim 2 offset 6 1. 0. + ( 7) dim 2 offset 8 0. 1. + ( 8) dim 2 offset 10 0. 1. +Labels: +Label 'celltype': +[0]: 0 (3) +[0]: 1 (3) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 15 (2) +[0]: 16 (2) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 2 (5) +Label 'marker': +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +Label 'Face Sets': +[0]: 9 (1) +[0]: 10 (1) +[0]: 13 (1) +[0]: 14 (1) +Label 'fault': +[0]: 2 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 15 (0) +[0]: 16 (0) +[0]: 0 (102) +[0]: 9 (101) +[0]: 10 (101) +[0]: 1 (-102) +[0]: 13 (-101) +[0]: 14 (-101) diff --git a/src/dm/impls/plex/tests/output/ex69_tri_1.out b/src/dm/impls/plex/tests/output/ex69_tri_1.out new file mode 100644 index 00000000000..ce59becee41 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_tri_1.out @@ -0,0 +1,204 @@ +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 2 ----> 6 +[0]: 2 ----> 7 +[0]: 3 ----> 7 +[0]: 3 ----> 8 +[0]: 3 ----> 9 +[0]: 4 ----> 6 +[0]: 4 ----> 8 +[0]: 4 ----> 10 +[0]: 5 ----> 9 +[0]: 5 ----> 10 +[0]: 6 ----> 0 +[0]: 7 ----> 0 +[0]: 8 ----> 0 +[0]: 8 ----> 1 +[0]: 9 ----> 1 +[0]: 10 ----> 1 +Cones: +[0] Max cone size: 3 +[0]: 0 <---- 6 (0) +[0]: 0 <---- 7 (0) +[0]: 0 <---- 8 (0) +[0]: 1 <---- 9 (0) +[0]: 1 <---- 10 (0) +[0]: 1 <---- 8 (-1) +[0]: 6 <---- 4 (0) +[0]: 6 <---- 2 (0) +[0]: 7 <---- 2 (0) +[0]: 7 <---- 3 (0) +[0]: 8 <---- 3 (0) +[0]: 8 <---- 4 (0) +[0]: 9 <---- 3 (0) +[0]: 9 <---- 5 (0) +[0]: 10 <---- 5 (0) +[0]: 10 <---- 4 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 2) dim 2 offset 0 0. 0. + ( 3) dim 2 offset 2 1. 0. + ( 4) dim 2 offset 4 0. 1. + ( 5) dim 2 offset 6 1. 1. +Labels: +Label 'celltype': +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 0 (3) +[0]: 1 (3) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +Label 'marker': +[0]: 2 (1) +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 9 (1) +[0]: 10 (1) +Label 'Face Sets': +[0]: 6 (1) +[0]: 7 (1) +[0]: 9 (1) +[0]: 10 (1) +Label 'fault': +[0]: 8 (1) +[0]: 3 (0) +[0]: 4 (0) +[0]: 0 (102) +[0]: 6 (101) +[0]: 7 (101) +[0]: 1 (-102) +[0]: 9 (-101) +[0]: 10 (-101) +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 3 ----> 11 +[0]: 3 ----> 12 +[0]: 4 ----> 15 +[0]: 4 ----> 16 +[0]: 5 ----> 9 +[0]: 5 ----> 13 +[0]: 5 ----> 15 +[0]: 6 ----> 9 +[0]: 6 ----> 12 +[0]: 6 ----> 14 +[0]: 7 ----> 10 +[0]: 7 ----> 13 +[0]: 7 ----> 16 +[0]: 8 ----> 10 +[0]: 8 ----> 11 +[0]: 8 ----> 14 +[0]: 9 ----> 2 +[0]: 10 ----> 2 +[0]: 11 ----> 0 +[0]: 12 ----> 0 +[0]: 13 ----> 1 +[0]: 13 ----> 2 +[0]: 14 ----> 0 +[0]: 14 ----> 2 +[0]: 15 ----> 1 +[0]: 16 ----> 1 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 11 (0) +[0]: 0 <---- 12 (0) +[0]: 0 <---- 14 (0) +[0]: 1 <---- 15 (0) +[0]: 1 <---- 16 (0) +[0]: 1 <---- 13 (-1) +[0]: 2 <---- 13 (0) +[0]: 2 <---- 10 (0) +[0]: 2 <---- 14 (-1) +[0]: 2 <---- 9 (-1) +[0]: 9 <---- 5 (0) +[0]: 9 <---- 6 (0) +[0]: 10 <---- 7 (0) +[0]: 10 <---- 8 (0) +[0]: 11 <---- 8 (0) +[0]: 11 <---- 3 (0) +[0]: 12 <---- 3 (0) +[0]: 12 <---- 6 (0) +[0]: 13 <---- 5 (0) +[0]: 13 <---- 7 (0) +[0]: 14 <---- 6 (0) +[0]: 14 <---- 8 (0) +[0]: 15 <---- 5 (0) +[0]: 15 <---- 4 (0) +[0]: 16 <---- 4 (0) +[0]: 16 <---- 7 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 3) dim 2 offset 0 0. 0. + ( 4) dim 2 offset 2 1. 1. + ( 5) dim 2 offset 4 1. 0. + ( 6) dim 2 offset 6 1. 0. + ( 7) dim 2 offset 8 0. 1. + ( 8) dim 2 offset 10 0. 1. +Labels: +Label 'celltype': +[0]: 0 (3) +[0]: 1 (3) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 2 (4) +Label 'marker': +[0]: 3 (1) +[0]: 4 (1) +[0]: 5 (1) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 15 (1) +[0]: 16 (1) +Label 'Face Sets': +[0]: 11 (1) +[0]: 12 (1) +[0]: 15 (1) +[0]: 16 (1) +Label 'fault': +[0]: 2 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 0 (102) +[0]: 11 (101) +[0]: 12 (101) +[0]: 1 (-102) +[0]: 15 (-101) +[0]: 16 (-101) diff --git a/src/dm/impls/plex/tests/output/ex69_tri_2.out b/src/dm/impls/plex/tests/output/ex69_tri_2.out new file mode 100644 index 00000000000..014b9253c37 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_tri_2.out @@ -0,0 +1,303 @@ +DM Object: tri_2_cv Mesh 1 MPI process + type: plex +tri_2_cv Mesh in 2 dimensions: +Supports: +[0] Max support size: 5 +[0]: 4 ----> 10 +[0]: 4 ----> 12 +[0]: 4 ----> 18 +[0]: 5 ----> 11 +[0]: 5 ----> 12 +[0]: 5 ----> 14 +[0]: 5 ----> 15 +[0]: 5 ----> 16 +[0]: 6 ----> 10 +[0]: 6 ----> 11 +[0]: 6 ----> 13 +[0]: 7 ----> 13 +[0]: 7 ----> 14 +[0]: 8 ----> 15 +[0]: 8 ----> 17 +[0]: 8 ----> 18 +[0]: 9 ----> 16 +[0]: 9 ----> 17 +[0]: 10 ----> 0 +[0]: 11 ----> 0 +[0]: 11 ----> 1 +[0]: 12 ----> 0 +[0]: 12 ----> 3 +[0]: 13 ----> 1 +[0]: 14 ----> 1 +[0]: 15 ----> 2 +[0]: 15 ----> 3 +[0]: 16 ----> 2 +[0]: 17 ----> 2 +[0]: 18 ----> 3 +Cones: +[0] Max cone size: 3 +[0]: 0 <---- 10 (0) +[0]: 0 <---- 11 (0) +[0]: 0 <---- 12 (0) +[0]: 1 <---- 11 (-1) +[0]: 1 <---- 13 (0) +[0]: 1 <---- 14 (0) +[0]: 2 <---- 15 (-1) +[0]: 2 <---- 16 (0) +[0]: 2 <---- 17 (0) +[0]: 3 <---- 18 (0) +[0]: 3 <---- 12 (-1) +[0]: 3 <---- 15 (0) +[0]: 10 <---- 4 (0) +[0]: 10 <---- 6 (0) +[0]: 11 <---- 6 (0) +[0]: 11 <---- 5 (0) +[0]: 12 <---- 5 (0) +[0]: 12 <---- 4 (0) +[0]: 13 <---- 6 (0) +[0]: 13 <---- 7 (0) +[0]: 14 <---- 7 (0) +[0]: 14 <---- 5 (0) +[0]: 15 <---- 5 (0) +[0]: 15 <---- 8 (0) +[0]: 16 <---- 5 (0) +[0]: 16 <---- 9 (0) +[0]: 17 <---- 9 (0) +[0]: 17 <---- 8 (0) +[0]: 18 <---- 8 (0) +[0]: 18 <---- 4 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 4) dim 2 offset 0 -1. 0. + ( 5) dim 2 offset 2 0. 1. + ( 6) dim 2 offset 4 0. -1. + ( 7) dim 2 offset 6 1. 0. + ( 8) dim 2 offset 8 -2. 1. + ( 9) dim 2 offset 10 -1. 2. +Labels: +Label 'celltype': +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 0 (3) +[0]: 1 (3) +[0]: 2 (3) +[0]: 3 (3) +[0]: 10 (1) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +Label 'marker': +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (1) +[0]: 8 (-2) +Label 'fault': +[0]: 11 (1) +[0]: 15 (1) +[0]: 5 (0) +[0]: 6 (0) +[0]: 8 (0) +[0]: 0 (102) +[0]: 3 (102) +[0]: 10 (101) +[0]: 12 (101) +[0]: 18 (101) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 13 (-101) +[0]: 14 (-101) +[0]: 16 (-101) +[0]: 17 (-101) +DM Object: tri_2_cv Mesh 1 MPI process + type: plex +tri_2_cv Mesh in 2 dimensions: +Supports: +[0] Max support size: 5 +[0]: 6 ----> 15 +[0]: 6 ----> 20 +[0]: 6 ----> 21 +[0]: 7 ----> 22 +[0]: 7 ----> 23 +[0]: 8 ----> 24 +[0]: 8 ----> 25 +[0]: 9 ----> 16 +[0]: 9 ----> 18 +[0]: 9 ----> 23 +[0]: 9 ----> 24 +[0]: 9 ----> 26 +[0]: 10 ----> 17 +[0]: 10 ----> 19 +[0]: 10 ----> 20 +[0]: 10 ----> 26 +[0]: 11 ----> 16 +[0]: 11 ----> 22 +[0]: 11 ----> 27 +[0]: 12 ----> 15 +[0]: 12 ----> 17 +[0]: 12 ----> 27 +[0]: 13 ----> 18 +[0]: 13 ----> 25 +[0]: 13 ----> 28 +[0]: 14 ----> 19 +[0]: 14 ----> 21 +[0]: 14 ----> 28 +[0]: 15 ----> 0 +[0]: 16 ----> 1 +[0]: 16 ----> 4 +[0]: 17 ----> 0 +[0]: 17 ----> 4 +[0]: 18 ----> 2 +[0]: 18 ----> 5 +[0]: 19 ----> 3 +[0]: 19 ----> 5 +[0]: 20 ----> 0 +[0]: 20 ----> 3 +[0]: 21 ----> 3 +[0]: 22 ----> 1 +[0]: 23 ----> 1 +[0]: 24 ----> 2 +[0]: 25 ----> 2 +[0]: 26 ----> 4 +[0]: 26 ----> 5 +[0]: 27 ----> 4 +[0]: 28 ----> 5 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 15 (0) +[0]: 0 <---- 17 (0) +[0]: 0 <---- 20 (0) +[0]: 1 <---- 16 (-1) +[0]: 1 <---- 22 (0) +[0]: 1 <---- 23 (0) +[0]: 2 <---- 18 (-1) +[0]: 2 <---- 24 (0) +[0]: 2 <---- 25 (0) +[0]: 3 <---- 21 (0) +[0]: 3 <---- 20 (-1) +[0]: 3 <---- 19 (0) +[0]: 4 <---- 16 (0) +[0]: 4 <---- 17 (0) +[0]: 4 <---- 27 (0) +[0]: 4 <---- 26 (0) +[0]: 5 <---- 18 (0) +[0]: 5 <---- 19 (0) +[0]: 5 <---- 26 (0) +[0]: 5 <---- 28 (0) +[0]: 15 <---- 6 (0) +[0]: 15 <---- 12 (0) +[0]: 16 <---- 11 (0) +[0]: 16 <---- 9 (0) +[0]: 17 <---- 12 (0) +[0]: 17 <---- 10 (0) +[0]: 18 <---- 9 (0) +[0]: 18 <---- 13 (0) +[0]: 19 <---- 10 (0) +[0]: 19 <---- 14 (0) +[0]: 20 <---- 10 (0) +[0]: 20 <---- 6 (0) +[0]: 21 <---- 14 (0) +[0]: 21 <---- 6 (0) +[0]: 22 <---- 11 (0) +[0]: 22 <---- 7 (0) +[0]: 23 <---- 7 (0) +[0]: 23 <---- 9 (0) +[0]: 24 <---- 9 (0) +[0]: 24 <---- 8 (0) +[0]: 25 <---- 8 (0) +[0]: 25 <---- 13 (0) +[0]: 26 <---- 9 (0) +[0]: 26 <---- 10 (0) +[0]: 27 <---- 11 (0) +[0]: 27 <---- 12 (0) +[0]: 28 <---- 13 (0) +[0]: 28 <---- 14 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 6) dim 2 offset 0 -1. 0. + ( 7) dim 2 offset 2 1. 0. + ( 8) dim 2 offset 4 -1. 2. + ( 9) dim 2 offset 6 0. 1. + ( 10) dim 2 offset 8 0. 1. + ( 11) dim 2 offset 10 0. -1. + ( 12) dim 2 offset 12 0. -1. + ( 13) dim 2 offset 14 -2. 1. + ( 14) dim 2 offset 16 -2. 1. +Labels: +Label 'celltype': +[0]: 0 (3) +[0]: 1 (3) +[0]: 2 (3) +[0]: 3 (3) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 26 (2) +[0]: 27 (2) +[0]: 28 (2) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 20 (1) +[0]: 21 (1) +[0]: 22 (1) +[0]: 23 (1) +[0]: 24 (1) +[0]: 25 (1) +[0]: 4 (5) +[0]: 5 (5) +Label 'marker': +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 7 (1) +[0]: 13 (-2) +[0]: 14 (-2) +[0]: 28 (-2) +Label 'fault': +[0]: 4 (1) +[0]: 5 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 18 (1) +[0]: 19 (1) +[0]: 9 (0) +[0]: 10 (0) +[0]: 11 (0) +[0]: 12 (0) +[0]: 13 (0) +[0]: 14 (0) +[0]: 26 (0) +[0]: 27 (0) +[0]: 28 (0) +[0]: 0 (102) +[0]: 3 (102) +[0]: 15 (101) +[0]: 20 (101) +[0]: 21 (101) +[0]: 1 (-102) +[0]: 2 (-102) +[0]: 22 (-101) +[0]: 23 (-101) +[0]: 24 (-101) +[0]: 25 (-101) diff --git a/src/dm/impls/plex/tests/output/ex69_tri_3.out b/src/dm/impls/plex/tests/output/ex69_tri_3.out new file mode 100644 index 00000000000..06047ea5fc2 --- /dev/null +++ b/src/dm/impls/plex/tests/output/ex69_tri_3.out @@ -0,0 +1,478 @@ +DM Object: tri_2_cv Mesh 2 MPI processes + type: plex +tri_2_cv Mesh in 2 dimensions: +Supports: +[0] Max support size: 3 +[0]: 2 ----> 6 +[0]: 2 ----> 8 +[0]: 2 ----> 10 +[0]: 3 ----> 7 +[0]: 3 ----> 8 +[0]: 3 ----> 9 +[0]: 4 ----> 6 +[0]: 4 ----> 7 +[0]: 5 ----> 9 +[0]: 5 ----> 10 +[0]: 6 ----> 0 +[0]: 7 ----> 0 +[0]: 8 ----> 0 +[0]: 8 ----> 1 +[0]: 9 ----> 1 +[0]: 10 ----> 1 +[1] Max support size: 4 +[1]: 2 ----> 7 +[1]: 2 ----> 9 +[1]: 2 ----> 10 +[1]: 2 ----> 11 +[1]: 3 ----> 7 +[1]: 3 ----> 8 +[1]: 4 ----> 8 +[1]: 4 ----> 9 +[1]: 5 ----> 10 +[1]: 5 ----> 12 +[1]: 6 ----> 11 +[1]: 6 ----> 12 +[1]: 7 ----> 0 +[1]: 8 ----> 0 +[1]: 9 ----> 0 +[1]: 10 ----> 1 +[1]: 11 ----> 1 +[1]: 12 ----> 1 +Cones: +[0] Max cone size: 3 +[0]: 0 <---- 6 (0) +[0]: 0 <---- 7 (0) +[0]: 0 <---- 8 (0) +[0]: 1 <---- 10 (0) +[0]: 1 <---- 8 (-1) +[0]: 1 <---- 9 (0) +[0]: 6 <---- 2 (0) +[0]: 6 <---- 4 (0) +[0]: 7 <---- 4 (0) +[0]: 7 <---- 3 (0) +[0]: 8 <---- 3 (0) +[0]: 8 <---- 2 (0) +[0]: 9 <---- 3 (0) +[0]: 9 <---- 5 (0) +[0]: 10 <---- 5 (0) +[0]: 10 <---- 2 (0) +[1] Max cone size: 3 +[1]: 0 <---- 7 (-1) +[1]: 0 <---- 8 (0) +[1]: 0 <---- 9 (0) +[1]: 1 <---- 10 (-1) +[1]: 1 <---- 11 (0) +[1]: 1 <---- 12 (0) +[1]: 7 <---- 3 (0) +[1]: 7 <---- 2 (0) +[1]: 8 <---- 3 (0) +[1]: 8 <---- 4 (0) +[1]: 9 <---- 4 (0) +[1]: 9 <---- 2 (0) +[1]: 10 <---- 2 (0) +[1]: 10 <---- 5 (0) +[1]: 11 <---- 2 (0) +[1]: 11 <---- 6 (0) +[1]: 12 <---- 6 (0) +[1]: 12 <---- 5 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 2) dim 2 offset 0 -1. 0. + ( 3) dim 2 offset 2 0. 1. + ( 4) dim 2 offset 4 0. -1. + ( 5) dim 2 offset 6 -2. 1. +Process 1: + ( 2) dim 2 offset 0 0. 1. + ( 3) dim 2 offset 2 0. -1. + ( 4) dim 2 offset 4 1. 0. + ( 5) dim 2 offset 6 -2. 1. + ( 6) dim 2 offset 8 -1. 2. +Labels: +Label 'celltype': +[0]: 2 (0) +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (1) +[0]: 7 (1) +[0]: 8 (1) +[0]: 9 (1) +[0]: 10 (1) +[0]: 0 (3) +[0]: 1 (3) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (1) +[1]: 8 (1) +[1]: 9 (1) +[1]: 10 (1) +[1]: 11 (1) +[1]: 12 (1) +[1]: 0 (3) +[1]: 1 (3) +Label 'marker': +[0]: 5 (-2) +[0]: 3 (0) +[0]: 4 (0) +[1]: 5 (-2) +[1]: 2 (0) +[1]: 3 (0) +[1]: 4 (1) +Label 'fault': +[0]: 3 (0) +[0]: 4 (0) +[0]: 5 (0) +[0]: 7 (1) +[0]: 9 (1) +[0]: 6 (101) +[0]: 8 (101) +[0]: 10 (101) +[0]: 0 (102) +[0]: 1 (102) +[1]: 0 (-102) +[1]: 1 (-102) +[1]: 8 (-101) +[1]: 9 (-101) +[1]: 11 (-101) +[1]: 12 (-101) +[1]: 2 (0) +[1]: 3 (0) +[1]: 5 (0) +[1]: 7 (1) +[1]: 10 (1) +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=11, leaves=5, remote ranks=1 + [0] 3 <- (1,2) + [0] 4 <- (1,3) + [0] 5 <- (1,5) + [0] 7 <- (1,7) + [0] 9 <- (1,10) + [1] Number of roots=13, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 5 edges + [0] 3 <- 2 + [0] 4 <- 3 + [0] 5 <- 5 + [0] 7 <- 7 + [0] 9 <- 10 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order +DM Object: tri_2_cv Mesh 2 MPI processes + type: plex +tri_2_cv Mesh in 2 dimensions: +Supports: +[0] Max support size: 4 +[0]: 4 ----> 11 +[0]: 4 ----> 16 +[0]: 4 ----> 17 +[0]: 5 ----> 12 +[0]: 5 ----> 14 +[0]: 5 ----> 18 +[0]: 6 ----> 13 +[0]: 6 ----> 15 +[0]: 6 ----> 16 +[0]: 6 ----> 18 +[0]: 7 ----> 12 +[0]: 7 ----> 19 +[0]: 8 ----> 11 +[0]: 8 ----> 13 +[0]: 8 ----> 19 +[0]: 9 ----> 14 +[0]: 9 ----> 20 +[0]: 10 ----> 15 +[0]: 10 ----> 17 +[0]: 10 ----> 20 +[0]: 11 ----> 0 +[0]: 12 ----> 2 +[0]: 13 ----> 0 +[0]: 13 ----> 2 +[0]: 14 ----> 3 +[0]: 15 ----> 1 +[0]: 15 ----> 3 +[0]: 16 ----> 0 +[0]: 16 ----> 1 +[0]: 17 ----> 1 +[0]: 18 ----> 2 +[0]: 18 ----> 3 +[0]: 19 ----> 2 +[0]: 20 ----> 3 +[1] Max support size: 5 +[1]: 4 ----> 12 +[1]: 4 ----> 14 +[1]: 4 ----> 17 +[1]: 4 ----> 18 +[1]: 4 ----> 20 +[1]: 5 ----> 13 +[1]: 5 ----> 15 +[1]: 5 ----> 20 +[1]: 6 ----> 12 +[1]: 6 ----> 16 +[1]: 6 ----> 21 +[1]: 7 ----> 13 +[1]: 7 ----> 21 +[1]: 8 ----> 14 +[1]: 8 ----> 19 +[1]: 8 ----> 22 +[1]: 9 ----> 15 +[1]: 9 ----> 22 +[1]: 10 ----> 16 +[1]: 10 ----> 17 +[1]: 11 ----> 18 +[1]: 11 ----> 19 +[1]: 12 ----> 0 +[1]: 12 ----> 2 +[1]: 13 ----> 2 +[1]: 14 ----> 1 +[1]: 14 ----> 3 +[1]: 15 ----> 3 +[1]: 16 ----> 0 +[1]: 17 ----> 0 +[1]: 18 ----> 1 +[1]: 19 ----> 1 +[1]: 20 ----> 2 +[1]: 20 ----> 3 +[1]: 21 ----> 2 +[1]: 22 ----> 3 +Cones: +[0] Max cone size: 4 +[0]: 0 <---- 11 (0) +[0]: 0 <---- 13 (0) +[0]: 0 <---- 16 (0) +[0]: 1 <---- 17 (0) +[0]: 1 <---- 16 (-1) +[0]: 1 <---- 15 (0) +[0]: 2 <---- 12 (0) +[0]: 2 <---- 13 (0) +[0]: 2 <---- 19 (0) +[0]: 2 <---- 18 (0) +[0]: 3 <---- 14 (0) +[0]: 3 <---- 15 (0) +[0]: 3 <---- 18 (0) +[0]: 3 <---- 20 (0) +[0]: 11 <---- 4 (0) +[0]: 11 <---- 8 (0) +[0]: 12 <---- 7 (0) +[0]: 12 <---- 5 (0) +[0]: 13 <---- 8 (0) +[0]: 13 <---- 6 (0) +[0]: 14 <---- 5 (0) +[0]: 14 <---- 9 (0) +[0]: 15 <---- 6 (0) +[0]: 15 <---- 10 (0) +[0]: 16 <---- 6 (0) +[0]: 16 <---- 4 (0) +[0]: 17 <---- 10 (0) +[0]: 17 <---- 4 (0) +[0]: 18 <---- 5 (0) +[0]: 18 <---- 6 (0) +[0]: 19 <---- 7 (0) +[0]: 19 <---- 8 (0) +[0]: 20 <---- 9 (0) +[0]: 20 <---- 10 (0) +[1] Max cone size: 4 +[1]: 0 <---- 12 (-1) +[1]: 0 <---- 16 (0) +[1]: 0 <---- 17 (0) +[1]: 1 <---- 14 (-1) +[1]: 1 <---- 18 (0) +[1]: 1 <---- 19 (0) +[1]: 2 <---- 12 (0) +[1]: 2 <---- 13 (0) +[1]: 2 <---- 21 (0) +[1]: 2 <---- 20 (0) +[1]: 3 <---- 14 (0) +[1]: 3 <---- 15 (0) +[1]: 3 <---- 20 (0) +[1]: 3 <---- 22 (0) +[1]: 12 <---- 6 (0) +[1]: 12 <---- 4 (0) +[1]: 13 <---- 7 (0) +[1]: 13 <---- 5 (0) +[1]: 14 <---- 4 (0) +[1]: 14 <---- 8 (0) +[1]: 15 <---- 5 (0) +[1]: 15 <---- 9 (0) +[1]: 16 <---- 6 (0) +[1]: 16 <---- 10 (0) +[1]: 17 <---- 10 (0) +[1]: 17 <---- 4 (0) +[1]: 18 <---- 4 (0) +[1]: 18 <---- 11 (0) +[1]: 19 <---- 11 (0) +[1]: 19 <---- 8 (0) +[1]: 20 <---- 4 (0) +[1]: 20 <---- 5 (0) +[1]: 21 <---- 6 (0) +[1]: 21 <---- 7 (0) +[1]: 22 <---- 8 (0) +[1]: 22 <---- 9 (0) +coordinates with 1 fields + field 0 with 2 components +Process 0: + ( 4) dim 2 offset 0 -1. 0. + ( 5) dim 2 offset 2 0. 1. + ( 6) dim 2 offset 4 0. 1. + ( 7) dim 2 offset 6 0. -1. + ( 8) dim 2 offset 8 0. -1. + ( 9) dim 2 offset 10 -2. 1. + ( 10) dim 2 offset 12 -2. 1. +Process 1: + ( 4) dim 2 offset 0 0. 1. + ( 5) dim 2 offset 2 0. 1. + ( 6) dim 2 offset 4 0. -1. + ( 7) dim 2 offset 6 0. -1. + ( 8) dim 2 offset 8 -2. 1. + ( 9) dim 2 offset 10 -2. 1. + ( 10) dim 2 offset 12 1. 0. + ( 11) dim 2 offset 14 -1. 2. +Labels: +Label 'celltype': +[0]: 0 (3) +[0]: 1 (3) +[0]: 4 (0) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 18 (2) +[0]: 19 (2) +[0]: 20 (2) +[0]: 11 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 16 (1) +[0]: 17 (1) +[0]: 2 (5) +[0]: 3 (5) +[1]: 0 (3) +[1]: 1 (3) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 9 (0) +[1]: 10 (0) +[1]: 11 (0) +[1]: 20 (2) +[1]: 21 (2) +[1]: 22 (2) +[1]: 12 (1) +[1]: 13 (1) +[1]: 14 (1) +[1]: 15 (1) +[1]: 16 (1) +[1]: 17 (1) +[1]: 18 (1) +[1]: 19 (1) +[1]: 2 (5) +[1]: 3 (5) +Label 'marker': +[0]: 9 (-2) +[0]: 10 (-2) +[0]: 20 (-2) +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 18 (0) +[0]: 19 (0) +[1]: 8 (-2) +[1]: 9 (-2) +[1]: 22 (-2) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 20 (0) +[1]: 21 (0) +[1]: 10 (1) +Label 'fault': +[0]: 5 (0) +[0]: 6 (0) +[0]: 7 (0) +[0]: 8 (0) +[0]: 9 (0) +[0]: 10 (0) +[0]: 18 (0) +[0]: 19 (0) +[0]: 20 (0) +[0]: 2 (1) +[0]: 3 (1) +[0]: 12 (1) +[0]: 13 (1) +[0]: 14 (1) +[0]: 15 (1) +[0]: 11 (101) +[0]: 16 (101) +[0]: 17 (101) +[0]: 0 (102) +[0]: 1 (102) +[1]: 0 (-102) +[1]: 1 (-102) +[1]: 16 (-101) +[1]: 17 (-101) +[1]: 18 (-101) +[1]: 19 (-101) +[1]: 4 (0) +[1]: 5 (0) +[1]: 6 (0) +[1]: 7 (0) +[1]: 8 (0) +[1]: 9 (0) +[1]: 20 (0) +[1]: 21 (0) +[1]: 22 (0) +[1]: 2 (1) +[1]: 3 (1) +[1]: 12 (1) +[1]: 13 (1) +[1]: 14 (1) +[1]: 15 (1) +PetscSF Object: 2 MPI processes + type: basic + [0] Number of roots=21, leaves=15, remote ranks=1 + [0] 2 <- (1,2) + [0] 3 <- (1,3) + [0] 5 <- (1,4) + [0] 6 <- (1,5) + [0] 7 <- (1,6) + [0] 8 <- (1,7) + [0] 9 <- (1,8) + [0] 10 <- (1,9) + [0] 12 <- (1,12) + [0] 13 <- (1,13) + [0] 14 <- (1,14) + [0] 15 <- (1,15) + [0] 18 <- (1,20) + [0] 19 <- (1,21) + [0] 20 <- (1,22) + [1] Number of roots=23, leaves=0, remote ranks=0 + [0] Roots referenced by my leaves, by rank + [0] 1: 15 edges + [0] 2 <- 2 + [0] 3 <- 3 + [0] 5 <- 4 + [0] 6 <- 5 + [0] 7 <- 6 + [0] 8 <- 7 + [0] 9 <- 8 + [0] 10 <- 9 + [0] 12 <- 12 + [0] 13 <- 13 + [0] 14 <- 14 + [0] 15 <- 15 + [0] 18 <- 20 + [0] 19 <- 21 + [0] 20 <- 22 + [1] Roots referenced by my leaves, by rank + MultiSF sort=rank-order diff --git a/src/dm/impls/plex/tests/output/ex72_ball_0.out b/src/dm/impls/plex/tests/output/ex72_ball_0.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex72_cylinder_0.out b/src/dm/impls/plex/tests/output/ex72_cylinder_0.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_bdm_1.out b/src/dm/impls/plex/tests/output/ex73_bdm_1.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_p_1.out b/src/dm/impls/plex/tests/output/ex73_p_1.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_p_2.out b/src/dm/impls/plex/tests/output/ex73_p_2.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_p_3.out b/src/dm/impls/plex/tests/output/ex73_p_3.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_p_4.out b/src/dm/impls/plex/tests/output/ex73_p_4.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_q_1.out b/src/dm/impls/plex/tests/output/ex73_q_1.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_q_2.out b/src/dm/impls/plex/tests/output/ex73_q_2.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_q_3.out b/src/dm/impls/plex/tests/output/ex73_q_3.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_q_4.out b/src/dm/impls/plex/tests/output/ex73_q_4.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_rt_1.out b/src/dm/impls/plex/tests/output/ex73_rt_1.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex73_rtq_1.out b/src/dm/impls/plex/tests/output/ex73_rtq_1.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dm/impls/plex/tests/output/ex9_correctness_2.out b/src/dm/impls/plex/tests/output/ex9_correctness_2.out index fbec199efe2..ec2a250fdce 100644 --- a/src/dm/impls/plex/tests/output/ex9_correctness_2.out +++ b/src/dm/impls/plex/tests/output/ex9_correctness_2.out @@ -1,4 +1,4 @@ -[0] Cones: 131072 Total time: 4.203e-04s Average time per cone: 3.207e-09s -[0] Closures: 131072 Total time: 2.625e-02s Average time per cone: 2.003e-07s -[0] VecClosures: 131072 Total time: 5.952e-02s Average time per vector closure: 4.541e-07s -[0] VecClosures with Index: 131072 Total time: 6.382e-03s Average time per vector closure: 4.869e-08s +[0] Cones: 4096 Total time: 1.502e-05s Average time per cone: 3.667e-09s +[0] Closures: 4096 Total time: 6.642e-04s Average time per cone: 1.622e-07s +[0] VecClosures: 4096 Total time: 1.391e-03s Average time per vector closure: 3.396e-07s +[0] VecClosures with Index: 4096 Total time: 2.031e-04s Average time per vector closure: 4.959e-08s diff --git a/src/dm/impls/plex/transform/impls/extrude/plextrcohesive.c b/src/dm/impls/plex/transform/impls/extrude/plextrcohesive.c new file mode 100644 index 00000000000..0993a7be06a --- /dev/null +++ b/src/dm/impls/plex/transform/impls/extrude/plextrcohesive.c @@ -0,0 +1,1131 @@ +#include /*I "petscdmplextransform.h" I*/ + +#include // For DMLabelMakeAllInvalid_Internal() + +/* + The cohesive transformation extrudes cells into a mesh from faces along an internal boundary. + + Orientation: + + We will say that a face has a positive and negative side. The positive side is defined by the cell which attaches the face with a positive orientation, and the negative side cell attaches it with a negative orientation (a reflection). However, this means that the positive side is in the opposite direction of the face normal, and the negative side is in the direction of the face normal, since all cells have outward facing normals. For clarity, in 2D the cross product of the normal and the edge is in the positive z direction. + + Labeling: + + We require an active label on input, which marks all points on the internal surface. Each point is + labeled with its depth. This label is passed to DMPlexLabelCohesiveComplete(), which adds all points + which ``impinge'' on the surface, meaning a point has a face on the surface. These points are labeled + with celltype + 100 on the positive side, and -(celltype + 100) on the negative side. + + Point Creation: + + We split points on the fault surface, creating a new partner point for each one. The negative side + receives the old point, while the positive side receives the new partner. In addition, points are + created with the two split points as boundaries. For example, split vertices have a segment between + them, split edges a quadrilaterial, split triangles a prism, and split quads a hexahedron. By + default, these spanning points have tensor ordering, but the user can choose to have them use the + outward normal convention instead. + +*/ + +static PetscErrorCode DMPlexTransformView_Cohesive(DMPlexTransform tr, PetscViewer viewer) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + PetscBool isascii; + + PetscFunctionBegin; + PetscValidHeaderSpecific(tr, DMPLEXTRANSFORM_CLASSID, 1); + PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); + PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); + if (isascii) { + const char *name; + + PetscCall(PetscObjectGetName((PetscObject)tr, &name)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Cohesive extrusion transformation %s\n", name ? name : "")); + PetscCall(PetscViewerASCIIPrintf(viewer, " create tensor cells: %s\n", ex->useTensor ? "YES" : "NO")); + } else { + SETERRQ(PetscObjectComm((PetscObject)tr), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMPlexTransform writing", ((PetscObject)viewer)->type_name); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMPlexTransformSetFromOptions_Cohesive(DMPlexTransform tr, PetscOptionItems *PetscOptionsObject) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + PetscReal width; + PetscBool tensor, flg; + + PetscFunctionBegin; + PetscOptionsHeadBegin(PetscOptionsObject, "DMPlexTransform Cohesive Extrusion Options"); + PetscCall(PetscOptionsBool("-dm_plex_transform_extrude_use_tensor", "Create tensor cells", "", ex->useTensor, &tensor, &flg)); + if (flg) PetscCall(DMPlexTransformCohesiveExtrudeSetTensor(tr, tensor)); + PetscCall(PetscOptionsReal("-dm_plex_transform_cohesive_width", "Width of a cohesive cell", "", ex->width, &width, &flg)); + if (flg) PetscCall(DMPlexTransformCohesiveExtrudeSetWidth(tr, width)); + PetscCall(PetscOptionsInt("-dm_plex_transform_cohesive_debug", "Det debugging level", "", ex->debug, &ex->debug, NULL)); + PetscOptionsHeadEnd(); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* + ComputeSplitFaceNumber - Compute an encoding describing which faces of p are split by the surface + + Not collective + + Input Parameters: + + dm - The `DM` + . label - `DMLabel` marking the surface and adjacent points + - p - Impinging point, adjacent to the surface + + Output Parameter: + . fsplit - A number encoding the faces which are split by the surface + + Level: developer + + Note: We will use a bit encoding, where bit k is 1 if face k is split. + +.seealso: ComputeUnsplitFaceNumber() +*/ +static PetscErrorCode ComputeSplitFaceNumber(DM dm, DMLabel label, PetscInt p, PetscInt *fsplit) +{ + const PetscInt *cone; + PetscInt coneSize, val; + + PetscFunctionBegin; + *fsplit = 0; + PetscCall(DMPlexGetCone(dm, p, &cone)); + PetscCall(DMPlexGetConeSize(dm, p, &coneSize)); + PetscCheck(coneSize < (PetscInt)sizeof(*fsplit) * 8, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cone of size %" PetscInt_FMT " is too large to be contained in an integer", coneSize); + for (PetscInt c = 0; c < coneSize; ++c) { + PetscCall(DMLabelGetValue(label, cone[c], &val)); + if (val >= 0 && val < 100) *fsplit |= 1 << c; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* + ComputeUnsplitFaceNumber - Compute an encoding describing which faces of p are unsplit by the surface + + Not collective + + Input Parameters: + + dm - The `DM` + . label - `DMLabel` marking the surface and adjacent points + - p - Split point, on the surface + + Output Parameter: + . funsplit - A number encoding the faces which are split by the surface + + Level: developer + + Note: We will use a bit encoding, where bit k is 1 if face k is unsplit. + +.seealso: ComputeSplitFaceNumber() +*/ +static PetscErrorCode ComputeUnsplitFaceNumber(DM dm, DMLabel label, PetscInt p, PetscInt *funsplit) +{ + const PetscInt *cone; + PetscInt coneSize, val; + + PetscFunctionBegin; + *funsplit = 0; + PetscCall(DMPlexGetCone(dm, p, &cone)); + PetscCall(DMPlexGetConeSize(dm, p, &coneSize)); + PetscCheck(coneSize < (PetscInt)sizeof(*funsplit) * 8, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cone of size %" PetscInt_FMT " is too large to be contained in an integer", coneSize); + for (PetscInt c = 0; c < coneSize; ++c) { + PetscCall(DMLabelGetValue(label, cone[c], &val)); + if (val >= 200) *funsplit |= 1 << c; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* DM_POLYTOPE_POINT produces + 2 points when split, or 1 point when unsplit, and + 1 segment, or tensor segment +*/ +static PetscErrorCode DMPlexTransformCohesiveExtrudeSetUp_Point(DMPlexTransform_Cohesive *ex) +{ + PetscInt rt, Nc, No; + + PetscFunctionBegin; + // Unsplit vertex + rt = DM_POLYTOPE_POINT * 2 + 1; + ex->Nt[rt] = 2; + Nc = 6; + No = 2; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_POINT; + ex->target[rt][1] = ex->useTensor ? DM_POLYTOPE_POINT_PRISM_TENSOR : DM_POLYTOPE_SEGMENT; + ex->size[rt][0] = 1; + ex->size[rt][1] = 1; + // cone for segment/tensor segment + ex->cone[rt][0] = DM_POLYTOPE_POINT; + ex->cone[rt][1] = 0; + ex->cone[rt][2] = 0; + ex->cone[rt][3] = DM_POLYTOPE_POINT; + ex->cone[rt][4] = 0; + ex->cone[rt][5] = 0; + for (PetscInt i = 0; i < No; ++i) ex->ornt[rt][i] = 0; + // Split vertex + rt = (DM_POLYTOPE_POINT * 2 + 1) * 100 + 0; + ex->Nt[rt] = 2; + Nc = 6; + No = 2; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_POINT; + ex->target[rt][1] = ex->useTensor ? DM_POLYTOPE_POINT_PRISM_TENSOR : DM_POLYTOPE_SEGMENT; + ex->size[rt][0] = 2; + ex->size[rt][1] = 1; + // cone for segment/tensor segment + ex->cone[rt][0] = DM_POLYTOPE_POINT; + ex->cone[rt][1] = 0; + ex->cone[rt][2] = 0; + ex->cone[rt][3] = DM_POLYTOPE_POINT; + ex->cone[rt][4] = 0; + ex->cone[rt][5] = 1; + for (PetscInt i = 0; i < No; ++i) ex->ornt[rt][i] = 0; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* DM_POLYTOPE_SEGMENT produces + 2 segments when split, or 1 segment when unsplit, and + 1 quad, or tensor quad +*/ +static PetscErrorCode DMPlexTransformCohesiveExtrudeSetUp_Segment(DMPlexTransform_Cohesive *ex) +{ + PetscInt rt, Nc, No, coff, ooff; + + PetscFunctionBegin; + // Unsplit segment + rt = DM_POLYTOPE_SEGMENT * 2 + 1; + ex->Nt[rt] = 2; + Nc = 8 + 14; + No = 2 + 4; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_SEGMENT; + ex->target[rt][1] = ex->useTensor ? DM_POLYTOPE_SEG_PRISM_TENSOR : DM_POLYTOPE_QUADRILATERAL; + ex->size[rt][0] = 1; + ex->size[rt][1] = 1; + // cones for segment + ex->cone[rt][0] = DM_POLYTOPE_POINT; + ex->cone[rt][1] = 1; + ex->cone[rt][2] = 0; + ex->cone[rt][3] = 0; + ex->cone[rt][4] = DM_POLYTOPE_POINT; + ex->cone[rt][5] = 1; + ex->cone[rt][6] = 1; + ex->cone[rt][7] = 0; + for (PetscInt i = 0; i < 2; ++i) ex->ornt[rt][i] = 0; + // cone for quad/tensor quad + coff = 8; + ooff = 2; + if (ex->useTensor) { + ex->cone[rt][coff + 0] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 1] = 0; + ex->cone[rt][coff + 2] = 0; + ex->cone[rt][coff + 3] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 4] = 0; + ex->cone[rt][coff + 5] = 0; + ex->cone[rt][coff + 6] = DM_POLYTOPE_POINT_PRISM_TENSOR; + ex->cone[rt][coff + 7] = 1; + ex->cone[rt][coff + 8] = 0; + ex->cone[rt][coff + 9] = 0; + ex->cone[rt][coff + 10] = DM_POLYTOPE_POINT_PRISM_TENSOR; + ex->cone[rt][coff + 11] = 1; + ex->cone[rt][coff + 12] = 1; + ex->cone[rt][coff + 13] = 0; + ex->ornt[rt][ooff + 0] = 0; + ex->ornt[rt][ooff + 1] = 0; + ex->ornt[rt][ooff + 2] = 0; + ex->ornt[rt][ooff + 3] = 0; + } else { + ex->cone[rt][coff + 0] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 1] = 0; + ex->cone[rt][coff + 2] = 0; + ex->cone[rt][coff + 3] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 4] = 1; + ex->cone[rt][coff + 5] = 1; + ex->cone[rt][coff + 6] = 0; + ex->cone[rt][coff + 7] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 8] = 0; + ex->cone[rt][coff + 9] = 0; + ex->cone[rt][coff + 10] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 11] = 1; + ex->cone[rt][coff + 12] = 0; + ex->cone[rt][coff + 13] = 0; + ex->ornt[rt][ooff + 0] = 0; + ex->ornt[rt][ooff + 1] = 0; + ex->ornt[rt][ooff + 2] = -1; + ex->ornt[rt][ooff + 3] = -1; + } + // Split segment + // 0: no unsplit vertex + // 1: unsplit vertex 0 + // 2: unsplit vertex 1 + // 3: both vertices unsplit (impossible) + for (PetscInt s = 0; s < 3; ++s) { + rt = (DM_POLYTOPE_SEGMENT * 2 + 1) * 100 + s; + ex->Nt[rt] = 2; + Nc = 8 * 2 + 14; + No = 2 * 2 + 4; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_SEGMENT; + ex->target[rt][1] = ex->useTensor ? DM_POLYTOPE_SEG_PRISM_TENSOR : DM_POLYTOPE_QUADRILATERAL; + ex->size[rt][0] = 2; + ex->size[rt][1] = 1; + // cones for segments + for (PetscInt i = 0; i < 2; ++i) { + ex->cone[rt][8 * i + 0] = DM_POLYTOPE_POINT; + ex->cone[rt][8 * i + 1] = 1; + ex->cone[rt][8 * i + 2] = 0; + ex->cone[rt][8 * i + 3] = s == 1 ? 0 : i; + ex->cone[rt][8 * i + 4] = DM_POLYTOPE_POINT; + ex->cone[rt][8 * i + 5] = 1; + ex->cone[rt][8 * i + 6] = 1; + ex->cone[rt][8 * i + 7] = s == 2 ? 0 : i; + } + for (PetscInt i = 0; i < 2 * 2; ++i) ex->ornt[rt][i] = 0; + // cone for quad/tensor quad + coff = 8 * 2; + ooff = 2 * 2; + if (ex->useTensor) { + ex->cone[rt][coff + 0] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 1] = 0; + ex->cone[rt][coff + 2] = 0; + ex->cone[rt][coff + 3] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 4] = 0; + ex->cone[rt][coff + 5] = 1; + ex->cone[rt][coff + 6] = DM_POLYTOPE_POINT_PRISM_TENSOR; + ex->cone[rt][coff + 7] = 1; + ex->cone[rt][coff + 8] = 0; + ex->cone[rt][coff + 9] = 0; + ex->cone[rt][coff + 10] = DM_POLYTOPE_POINT_PRISM_TENSOR; + ex->cone[rt][coff + 11] = 1; + ex->cone[rt][coff + 12] = 1; + ex->cone[rt][coff + 13] = 0; + ex->ornt[rt][ooff + 0] = 0; + ex->ornt[rt][ooff + 1] = 0; + ex->ornt[rt][ooff + 2] = 0; + ex->ornt[rt][ooff + 3] = 0; + } else { + ex->cone[rt][coff + 0] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 1] = 0; + ex->cone[rt][coff + 2] = 0; + ex->cone[rt][coff + 3] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 4] = 1; + ex->cone[rt][coff + 5] = 1; + ex->cone[rt][coff + 6] = 0; + ex->cone[rt][coff + 7] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 8] = 0; + ex->cone[rt][coff + 9] = 1; + ex->cone[rt][coff + 10] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][coff + 11] = 1; + ex->cone[rt][coff + 12] = 0; + ex->cone[rt][coff + 13] = 0; + ex->ornt[rt][ooff + 0] = 0; + ex->ornt[rt][ooff + 1] = 0; + ex->ornt[rt][ooff + 2] = -1; + ex->ornt[rt][ooff + 3] = -1; + } + } + // Impinging segment + // 0: no splits (impossible) + // 1: split vertex 0 + // 2: split vertex 1 + // 3: split both vertices (impossible) + for (PetscInt s = 1; s < 3; ++s) { + rt = (DM_POLYTOPE_SEGMENT * 2 + 0) * 100 + s; + ex->Nt[rt] = 1; + Nc = 8; + No = 2; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_SEGMENT; + ex->size[rt][0] = 1; + // cone for segment + ex->cone[rt][0] = DM_POLYTOPE_POINT; + ex->cone[rt][1] = 1; + ex->cone[rt][2] = 0; + ex->cone[rt][3] = s == 1 ? 1 : 0; + ex->cone[rt][4] = DM_POLYTOPE_POINT; + ex->cone[rt][5] = 1; + ex->cone[rt][6] = 1; + ex->cone[rt][7] = s == 2 ? 1 : 0; + for (PetscInt i = 0; i < 2; ++i) ex->ornt[rt][i] = 0; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* DM_POLYTOPE_TRIANGLE produces + 2 triangles, and + 1 triangular prism/tensor triangular prism +*/ +static PetscErrorCode DMPlexTransformCohesiveExtrudeSetUp_Triangle(DMPlexTransform_Cohesive *ex) +{ + PetscInt rt, Nc, No, coff, ooff; + + PetscFunctionBegin; + // No unsplit triangles + // Split triangles + // 0: no unsplit edge + // 1: unsplit edge 0 + // 2: unsplit edge 1 + // 3: unsplit edge 0 1 + // 4: unsplit edge 2 + // 5: unsplit edge 0 2 + // 6: unsplit edge 1 2 + // 7: all edges unsplit (impossible) + for (PetscInt s = 0; s < 7; ++s) { + rt = (DM_POLYTOPE_TRIANGLE * 2 + 1) * 100 + s; + ex->Nt[rt] = 2; + Nc = 12 * 2 + 18; + No = 3 * 2 + 5; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_TRIANGLE; + ex->target[rt][1] = ex->useTensor ? DM_POLYTOPE_TRI_PRISM_TENSOR : DM_POLYTOPE_TRI_PRISM; + ex->size[rt][0] = 2; + ex->size[rt][1] = 1; + // cones for triangles + for (PetscInt i = 0; i < 2; ++i) { + ex->cone[rt][12 * i + 0] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][12 * i + 1] = 1; + ex->cone[rt][12 * i + 2] = 0; + ex->cone[rt][12 * i + 3] = s & 1 ? 0 : i; + ex->cone[rt][12 * i + 4] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][12 * i + 5] = 1; + ex->cone[rt][12 * i + 6] = 1; + ex->cone[rt][12 * i + 7] = s & 2 ? 0 : i; + ex->cone[rt][12 * i + 8] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][12 * i + 9] = 1; + ex->cone[rt][12 * i + 10] = 2; + ex->cone[rt][12 * i + 11] = s & 4 ? 0 : i; + } + for (PetscInt i = 0; i < 3 * 2; ++i) ex->ornt[rt][i] = 0; + // cone for triangular prism/tensor triangular prism + coff = 12 * 2; + ooff = 3 * 2; + if (ex->useTensor) { + ex->cone[rt][coff + 0] = DM_POLYTOPE_TRIANGLE; + ex->cone[rt][coff + 1] = 0; + ex->cone[rt][coff + 2] = 0; + ex->cone[rt][coff + 3] = DM_POLYTOPE_TRIANGLE; + ex->cone[rt][coff + 4] = 0; + ex->cone[rt][coff + 5] = 1; + ex->cone[rt][coff + 6] = DM_POLYTOPE_SEG_PRISM_TENSOR; + ex->cone[rt][coff + 7] = 1; + ex->cone[rt][coff + 8] = 0; + ex->cone[rt][coff + 9] = 0; + ex->cone[rt][coff + 10] = DM_POLYTOPE_SEG_PRISM_TENSOR; + ex->cone[rt][coff + 11] = 1; + ex->cone[rt][coff + 12] = 1; + ex->cone[rt][coff + 13] = 0; + ex->cone[rt][coff + 14] = DM_POLYTOPE_SEG_PRISM_TENSOR; + ex->cone[rt][coff + 15] = 1; + ex->cone[rt][coff + 16] = 2; + ex->cone[rt][coff + 17] = 0; + ex->ornt[rt][ooff + 0] = 0; + ex->ornt[rt][ooff + 1] = 0; + ex->ornt[rt][ooff + 2] = 0; + ex->ornt[rt][ooff + 3] = 0; + ex->ornt[rt][ooff + 4] = 0; + } else { + ex->cone[rt][coff + 0] = DM_POLYTOPE_TRIANGLE; + ex->cone[rt][coff + 1] = 0; + ex->cone[rt][coff + 2] = 0; + ex->cone[rt][coff + 3] = DM_POLYTOPE_TRIANGLE; + ex->cone[rt][coff + 4] = 0; + ex->cone[rt][coff + 5] = 1; + ex->cone[rt][coff + 6] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 7] = 1; + ex->cone[rt][coff + 8] = 0; + ex->cone[rt][coff + 9] = 0; + ex->cone[rt][coff + 10] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 11] = 1; + ex->cone[rt][coff + 12] = 1; + ex->cone[rt][coff + 13] = 0; + ex->cone[rt][coff + 14] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 15] = 1; + ex->cone[rt][coff + 16] = 2; + ex->cone[rt][coff + 17] = 0; + ex->ornt[rt][ooff + 0] = -2; + ex->ornt[rt][ooff + 1] = 0; + ex->ornt[rt][ooff + 2] = 0; + ex->ornt[rt][ooff + 3] = 0; + ex->ornt[rt][ooff + 4] = 0; + } + } + // Impinging triangles + // 0: no splits (impossible) + // 1: split edge 0 + // 2: split edge 1 + // 3: split edges 0 and 1 + // 4: split edge 2 + // 5: split edges 0 and 2 + // 6: split edges 1 and 2 + // 7: split all edges (impossible) + for (PetscInt s = 1; s < 7; ++s) { + rt = (DM_POLYTOPE_TRIANGLE * 2 + 0) * 100 + s; + ex->Nt[rt] = 1; + Nc = 12; + No = 3; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_TRIANGLE; + ex->size[rt][0] = 1; + // cone for triangle + ex->cone[rt][0] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][1] = 1; + ex->cone[rt][2] = 0; + ex->cone[rt][3] = s & 1 ? 1 : 0; + ex->cone[rt][4] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][5] = 1; + ex->cone[rt][6] = 1; + ex->cone[rt][7] = s & 2 ? 1 : 0; + ex->cone[rt][8] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][9] = 1; + ex->cone[rt][10] = 2; + ex->cone[rt][11] = s & 4 ? 1 : 0; + for (PetscInt i = 0; i < 3; ++i) ex->ornt[rt][i] = 0; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* DM_POLYTOPE_QUADRILATERAL produces + 2 quads, and + 1 hex/tensor hex +*/ +static PetscErrorCode DMPlexTransformCohesiveExtrudeSetUp_Quadrilateral(DMPlexTransform_Cohesive *ex) +{ + PetscInt rt, Nc, No, coff, ooff; + + PetscFunctionBegin; + // No unsplit quadrilaterals + // Split quadrilateral + // 0: no unsplit edge + // 1: unsplit edge 0 + // 2: unsplit edge 1 + // 3: unsplit edge 0 1 + // 4: unsplit edge 2 + // 5: unsplit edge 0 2 + // 6: unsplit edge 1 2 + // 7: unsplit edge 0 1 2 + // 8: unsplit edge 3 + // 9: unsplit edge 0 3 + // 10: unsplit edge 1 3 + // 11: unsplit edge 0 1 3 + // 12: unsplit edge 2 3 + // 13: unsplit edge 0 2 3 + // 14: unsplit edge 1 2 3 + // 15: all edges unsplit (impossible) + for (PetscInt s = 0; s < 15; ++s) { + rt = (DM_POLYTOPE_QUADRILATERAL * 2 + 1) * 100 + s; + ex->Nt[rt] = 2; + Nc = 16 * 2 + 22; + No = 4 * 2 + 6; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_QUADRILATERAL; + ex->target[rt][1] = ex->useTensor ? DM_POLYTOPE_QUAD_PRISM_TENSOR : DM_POLYTOPE_HEXAHEDRON; + ex->size[rt][0] = 2; + ex->size[rt][1] = 1; + // cones for quads + for (PetscInt i = 0; i < 2; ++i) { + ex->cone[rt][16 * i + 0] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][16 * i + 1] = 1; + ex->cone[rt][16 * i + 2] = 0; + ex->cone[rt][16 * i + 3] = s & 1 ? 0 : i; + ex->cone[rt][16 * i + 4] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][16 * i + 5] = 1; + ex->cone[rt][16 * i + 6] = 1; + ex->cone[rt][16 * i + 7] = s & 2 ? 0 : i; + ex->cone[rt][16 * i + 8] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][16 * i + 9] = 1; + ex->cone[rt][16 * i + 10] = 2; + ex->cone[rt][16 * i + 11] = s & 4 ? 0 : i; + ex->cone[rt][16 * i + 12] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][16 * i + 13] = 1; + ex->cone[rt][16 * i + 14] = 3; + ex->cone[rt][16 * i + 15] = s & 8 ? 0 : i; + } + for (PetscInt i = 0; i < 4 * 2; ++i) ex->ornt[rt][i] = 0; + // cones for hexes/tensor hexes + coff = 16 * 2; + ooff = 4 * 2; + if (ex->useTensor) { + ex->cone[rt][coff + 0] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 1] = 0; + ex->cone[rt][coff + 2] = 0; + ex->cone[rt][coff + 3] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 4] = 0; + ex->cone[rt][coff + 5] = 1; + ex->cone[rt][coff + 6] = DM_POLYTOPE_SEG_PRISM_TENSOR; + ex->cone[rt][coff + 7] = 1; + ex->cone[rt][coff + 8] = 0; + ex->cone[rt][coff + 9] = 0; + ex->cone[rt][coff + 10] = DM_POLYTOPE_SEG_PRISM_TENSOR; + ex->cone[rt][coff + 11] = 1; + ex->cone[rt][coff + 12] = 1; + ex->cone[rt][coff + 13] = 0; + ex->cone[rt][coff + 14] = DM_POLYTOPE_SEG_PRISM_TENSOR; + ex->cone[rt][coff + 15] = 1; + ex->cone[rt][coff + 16] = 2; + ex->cone[rt][coff + 17] = 0; + ex->cone[rt][coff + 18] = DM_POLYTOPE_SEG_PRISM_TENSOR; + ex->cone[rt][coff + 19] = 1; + ex->cone[rt][coff + 20] = 3; + ex->cone[rt][coff + 21] = 0; + ex->ornt[rt][ooff + 0] = 0; + ex->ornt[rt][ooff + 1] = 0; + ex->ornt[rt][ooff + 2] = 0; + ex->ornt[rt][ooff + 3] = 0; + ex->ornt[rt][ooff + 4] = 0; + ex->ornt[rt][ooff + 5] = 0; + } else { + ex->cone[rt][coff + 0] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 1] = 0; + ex->cone[rt][coff + 2] = 0; + ex->cone[rt][coff + 3] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 4] = 0; + ex->cone[rt][coff + 5] = 1; + ex->cone[rt][coff + 6] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 7] = 1; + ex->cone[rt][coff + 8] = 0; + ex->cone[rt][coff + 9] = 0; + ex->cone[rt][coff + 10] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 11] = 1; + ex->cone[rt][coff + 12] = 2; + ex->cone[rt][coff + 13] = 0; + ex->cone[rt][coff + 14] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 15] = 1; + ex->cone[rt][coff + 16] = 1; + ex->cone[rt][coff + 17] = 0; + ex->cone[rt][coff + 18] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][coff + 19] = 1; + ex->cone[rt][coff + 20] = 3; + ex->cone[rt][coff + 21] = 0; + ex->ornt[rt][ooff + 0] = -2; + ex->ornt[rt][ooff + 1] = 0; + ex->ornt[rt][ooff + 2] = 0; + ex->ornt[rt][ooff + 3] = 0; + ex->ornt[rt][ooff + 4] = 0; + ex->ornt[rt][ooff + 5] = 1; + } + } + // Impinging quadrilaterals + // 0: no splits (impossible) + // 1: split edge 0 + // 2: split edge 1 + // 3: split edges 0 and 1 + // 4: split edge 2 + // 5: split edges 0 and 2 + // 6: split edges 1 and 2 + // 7: split edges 0, 1, and 2 + // 8: split edge 3 + // 9: split edges 0 and 3 + // 10: split edges 1 and 3 + // 11: split edges 0, 1, and 3 + // 12: split edges 2 and 3 + // 13: split edges 0, 2, and 3 + // 14: split edges 1, 2, and 3 + // 15: split all edges (impossible) + for (PetscInt s = 1; s < 15; ++s) { + rt = (DM_POLYTOPE_QUADRILATERAL * 2 + 0) * 100 + s; + ex->Nt[rt] = 1; + Nc = 16; + No = 4; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_QUADRILATERAL; + ex->size[rt][0] = 1; + // cone for quadrilateral + ex->cone[rt][0] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][1] = 1; + ex->cone[rt][2] = 0; + ex->cone[rt][3] = s & 1 ? 1 : 0; + ex->cone[rt][4] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][5] = 1; + ex->cone[rt][6] = 1; + ex->cone[rt][7] = s & 2 ? 1 : 0; + ex->cone[rt][8] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][9] = 1; + ex->cone[rt][10] = 2; + ex->cone[rt][11] = s & 4 ? 1 : 0; + ex->cone[rt][12] = DM_POLYTOPE_SEGMENT; + ex->cone[rt][13] = 1; + ex->cone[rt][14] = 3; + ex->cone[rt][15] = s & 8 ? 1 : 0; + for (PetscInt i = 0; i < 4; ++i) ex->ornt[rt][i] = 0; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMPlexTransformCohesiveExtrudeSetUp_Tetrahedron(DMPlexTransform_Cohesive *ex) +{ + PetscInt rt, Nc, No; + + PetscFunctionBegin; + // Impinging tetrahedra + // 0: no splits (impossible) + // 1: split face 0 + // 2: split face 1 + // 3: split faces 0 and 1 + // 4: split face 2 + // 5: split faces 0 and 2 + // 6: split faces 1 and 2 + // 7: split faces 0, 1, and 2 + // 8: split face 3 + // 9: split faces 0 and 3 + // 10: split faces 1 and 3 + // 11: split faces 0, 1, and 3 + // 12: split faces 2 and 3 + // 13: split faces 0, 2, and 3 + // 14: split faces 1, 2, and 3 + // 15: split all faces (impossible) + for (PetscInt s = 1; s < 15; ++s) { + rt = (DM_POLYTOPE_TETRAHEDRON * 2 + 0) * 100 + s; + ex->Nt[rt] = 1; + Nc = 16; + No = 4; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_TETRAHEDRON; + ex->size[rt][0] = 1; + // cone for triangle + ex->cone[rt][0] = DM_POLYTOPE_TRIANGLE; + ex->cone[rt][1] = 1; + ex->cone[rt][2] = 0; + ex->cone[rt][3] = s & 1 ? 1 : 0; + ex->cone[rt][4] = DM_POLYTOPE_TRIANGLE; + ex->cone[rt][5] = 1; + ex->cone[rt][6] = 1; + ex->cone[rt][7] = s & 2 ? 1 : 0; + ex->cone[rt][8] = DM_POLYTOPE_TRIANGLE; + ex->cone[rt][9] = 1; + ex->cone[rt][10] = 2; + ex->cone[rt][11] = s & 4 ? 1 : 0; + ex->cone[rt][12] = DM_POLYTOPE_TRIANGLE; + ex->cone[rt][13] = 1; + ex->cone[rt][14] = 3; + ex->cone[rt][15] = s & 8 ? 1 : 0; + for (PetscInt i = 0; i < 4; ++i) ex->ornt[rt][i] = 0; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMPlexTransformCohesiveExtrudeSetUp_Hexahedron(DMPlexTransform_Cohesive *ex) +{ + PetscInt rt, Nc, No; + + PetscFunctionBegin; + // Impinging hexahedra + // 0: no splits (impossible) + // bit is set if the face is split + // 63: split all faces (impossible) + for (PetscInt s = 1; s < 63; ++s) { + rt = (DM_POLYTOPE_HEXAHEDRON * 2 + 0) * 100 + s; + ex->Nt[rt] = 1; + Nc = 24; + No = 6; + PetscCall(PetscMalloc4(ex->Nt[rt], &ex->target[rt], ex->Nt[rt], &ex->size[rt], Nc, &ex->cone[rt], No, &ex->ornt[rt])); + ex->target[rt][0] = DM_POLYTOPE_HEXAHEDRON; + ex->size[rt][0] = 1; + // cone for hexahedron + ex->cone[rt][0] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][1] = 1; + ex->cone[rt][2] = 0; + ex->cone[rt][3] = s & 1 ? 1 : 0; + ex->cone[rt][4] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][5] = 1; + ex->cone[rt][6] = 1; + ex->cone[rt][7] = s & 2 ? 1 : 0; + ex->cone[rt][8] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][9] = 1; + ex->cone[rt][10] = 2; + ex->cone[rt][11] = s & 4 ? 1 : 0; + ex->cone[rt][12] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][13] = 1; + ex->cone[rt][14] = 3; + ex->cone[rt][15] = s & 8 ? 1 : 0; + ex->cone[rt][16] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][17] = 1; + ex->cone[rt][18] = 4; + ex->cone[rt][19] = s & 16 ? 1 : 0; + ex->cone[rt][20] = DM_POLYTOPE_QUADRILATERAL; + ex->cone[rt][21] = 1; + ex->cone[rt][22] = 5; + ex->cone[rt][23] = s & 32 ? 1 : 0; + for (PetscInt i = 0; i < 6; ++i) ex->ornt[rt][i] = 0; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* + The refine types for cohesive extrusion are: + + ct * 2 + 0: For any point which should just return itself + ct * 2 + 1: For unsplit points + (ct * 2 + 0) * 100 + fsplit: For impinging points, one type for each combination of split faces + (ct * 2 + 1) * 100 + fsplit: For split points, one type for each combination of unsplit faces +*/ +static PetscErrorCode DMPlexTransformSetUp_Cohesive(DMPlexTransform tr) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + DM dm; + DMLabel active, celltype; + PetscInt numRt, pStart, pEnd, ict; + + PetscFunctionBegin; + PetscCall(DMPlexTransformGetDM(tr, &dm)); + PetscCall(DMPlexTransformGetActive(tr, &active)); + PetscCheck(active, PetscObjectComm((PetscObject)tr), PETSC_ERR_ARG_WRONG, "Cohesive extrusion requires an active label"); + PetscCall(DMPlexGetCellTypeLabel(dm, &celltype)); + PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Refine Type", &tr->trType)); + PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); + PetscCall(DMLabelMakeAllInvalid_Internal(active)); + for (PetscInt p = pStart; p < pEnd; ++p) { + PetscInt ct, val; + + PetscCall(DMLabelGetValue(celltype, p, &ct)); + PetscCall(DMLabelGetValue(active, p, &val)); + if (val < 0) { + // Also negative size impinging points + // ct * 2 + 0 is the identity transform + PetscCall(DMLabelSetValue(tr->trType, p, ct * 2 + 0)); + } else { + PetscInt fsplit = -1, funsplit = -1; + + // Unsplit points ct * 2 + 1 + if (val >= 200) { + // Cohesive cells cannot be unsplit + // This is faulty inheritance through the label + if (ct == DM_POLYTOPE_POINT_PRISM_TENSOR || ct == DM_POLYTOPE_SEG_PRISM_TENSOR || ct == DM_POLYTOPE_TRI_PRISM_TENSOR || ct == DM_POLYTOPE_QUAD_PRISM_TENSOR) PetscCall(DMLabelSetValue(tr->trType, p, ct * 2 + 0)); + else PetscCall(DMLabelSetValue(tr->trType, p, ct * 2 + 1)); + } else if (val >= 100) { + // Impinging points: (ct * 2 + 0) * 100 + fsplit + PetscCall(ComputeSplitFaceNumber(dm, active, p, &fsplit)); + if (!fsplit) PetscCall(DMLabelSetValue(tr->trType, p, ct * 2 + 0)); + else PetscCall(DMLabelSetValue(tr->trType, p, (ct * 2 + 0) * 100 + fsplit)); + } else { + // Split points: (ct * 2 + 1) * 100 + funsplit + PetscCall(ComputeUnsplitFaceNumber(dm, active, p, &funsplit)); + PetscCall(DMLabelSetValue(tr->trType, p, (ct * 2 + 1) * 100 + funsplit)); + } + } + } + if (ex->debug) { + PetscCall(DMLabelView(active, NULL)); + PetscCall(DMLabelView(tr->trType, NULL)); + } + numRt = DM_NUM_POLYTOPES * 2 * 100; + PetscCall(PetscMalloc5(numRt, &ex->Nt, numRt, &ex->target, numRt, &ex->size, numRt, &ex->cone, numRt, &ex->ornt)); + for (ict = 0; ict < numRt; ++ict) { + ex->Nt[ict] = -1; + ex->target[ict] = NULL; + ex->size[ict] = NULL; + ex->cone[ict] = NULL; + ex->ornt[ict] = NULL; + } + PetscCall(DMPlexTransformCohesiveExtrudeSetUp_Point(ex)); + PetscCall(DMPlexTransformCohesiveExtrudeSetUp_Segment(ex)); + PetscCall(DMPlexTransformCohesiveExtrudeSetUp_Triangle(ex)); + PetscCall(DMPlexTransformCohesiveExtrudeSetUp_Quadrilateral(ex)); + PetscCall(DMPlexTransformCohesiveExtrudeSetUp_Tetrahedron(ex)); + PetscCall(DMPlexTransformCohesiveExtrudeSetUp_Hexahedron(ex)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMPlexTransformDestroy_Cohesive(DMPlexTransform tr) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + PetscInt ct; + + PetscFunctionBegin; + if (ex->target) { + for (ct = 0; ct < DM_NUM_POLYTOPES * 2 * 100; ++ct) PetscCall(PetscFree4(ex->target[ct], ex->size[ct], ex->cone[ct], ex->ornt[ct])); + } + PetscCall(PetscFree5(ex->Nt, ex->target, ex->size, ex->cone, ex->ornt)); + PetscCall(PetscFree(ex)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMPlexTransformGetSubcellOrientation_Cohesive(DMPlexTransform tr, DMPolytopeType sct, PetscInt sp, PetscInt so, DMPolytopeType tct, PetscInt r, PetscInt o, PetscInt *rnew, PetscInt *onew) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + DMLabel trType = tr->trType; + PetscInt rt; + + PetscFunctionBeginHot; + *rnew = r; + *onew = DMPolytopeTypeComposeOrientation(tct, o, so); + if (!so) PetscFunctionReturn(PETSC_SUCCESS); + if (trType) { + PetscCall(DMLabelGetValue(tr->trType, sp, &rt)); + if (rt < 100 && !(rt % 2)) PetscFunctionReturn(PETSC_SUCCESS); + } + if (ex->useTensor) { + switch (sct) { + case DM_POLYTOPE_POINT: + break; + case DM_POLYTOPE_SEGMENT: + switch (tct) { + case DM_POLYTOPE_SEGMENT: + break; + case DM_POLYTOPE_SEG_PRISM_TENSOR: + *onew = DMPolytopeTypeComposeOrientation(tct, o, so ? -1 : 0); + break; + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cell type %s is not produced by %s", DMPolytopeTypes[tct], DMPolytopeTypes[sct]); + } + break; + // We need to handle identity extrusions from volumes (TET, HEX, etc) when boundary faces are being extruded + case DM_POLYTOPE_TRIANGLE: + break; + case DM_POLYTOPE_QUADRILATERAL: + break; + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported cell type %s", DMPolytopeTypes[sct]); + } + } else { + switch (sct) { + case DM_POLYTOPE_POINT: + break; + case DM_POLYTOPE_SEGMENT: + switch (tct) { + case DM_POLYTOPE_SEGMENT: + break; + case DM_POLYTOPE_QUADRILATERAL: + *onew = DMPolytopeTypeComposeOrientation(tct, o, so ? -3 : 0); + break; + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cell type %s is not produced by %s", DMPolytopeTypes[tct], DMPolytopeTypes[sct]); + } + break; + // We need to handle identity extrusions from volumes (TET, HEX, etc) when boundary faces are being extruded + case DM_POLYTOPE_TRIANGLE: + break; + case DM_POLYTOPE_QUADRILATERAL: + break; + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported cell type %s", DMPolytopeTypes[sct]); + } + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMPlexTransformCellTransform_Cohesive(DMPlexTransform tr, DMPolytopeType source, PetscInt p, PetscInt *rt, PetscInt *Nt, DMPolytopeType *target[], PetscInt *size[], PetscInt *cone[], PetscInt *ornt[]) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + DMLabel trType = tr->trType; + PetscBool identity = PETSC_FALSE; + PetscInt val = 0; + + PetscFunctionBegin; + PetscCheck(trType, PETSC_COMM_SELF, PETSC_ERR_SUP, "Missing transform type label"); + PetscCall(DMLabelGetValue(trType, p, &val)); + identity = val < 100 && !(val % 2) ? PETSC_TRUE : PETSC_FALSE; + if (rt) *rt = val; + if (identity) { + PetscCall(DMPlexTransformCellTransformIdentity(tr, source, p, NULL, Nt, target, size, cone, ornt)); + } else { + *Nt = ex->Nt[val]; + *target = ex->target[val]; + *size = ex->size[val]; + *cone = ex->cone[val]; + *ornt = ex->ornt[val]; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* New vertices have the same coordinates */ +static PetscErrorCode DMPlexTransformMapCoordinates_Cohesive(DMPlexTransform tr, DMPolytopeType pct, DMPolytopeType ct, PetscInt p, PetscInt r, PetscInt Nv, PetscInt dE, const PetscScalar in[], PetscScalar out[]) +{ + PetscReal width; + PetscInt pval; + + PetscFunctionBeginHot; + PetscCheck(pct == DM_POLYTOPE_POINT, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not for parent point type %s", DMPolytopeTypes[pct]); + PetscCheck(ct == DM_POLYTOPE_POINT, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not for refined point type %s", DMPolytopeTypes[ct]); + PetscCheck(Nv == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Vertices should be produced from a single vertex, not %" PetscInt_FMT, Nv); + PetscCheck(r < 2, PETSC_COMM_SELF, PETSC_ERR_SUP, "Vertices should only have two replicas, not %" PetscInt_FMT, r); + + PetscCall(DMPlexTransformCohesiveExtrudeGetWidth(tr, &width)); + PetscCall(DMLabelGetValue(tr->trType, p, &pval)); + if (width == 0. || pval < 100) { + for (PetscInt d = 0; d < dE; ++d) out[d] = in[d]; + } else { + DM dm; + PetscReal avgNormal[3] = {0., 0., 0.}, norm = 0.; + PetscInt *star = NULL; + PetscInt Nst, fStart, fEnd, Nf = 0; + + PetscCall(DMPlexTransformGetDM(tr, &dm)); + PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd)); + PetscCall(DMPlexGetTransitiveClosure(dm, p, PETSC_FALSE, &Nst, &star)); + // Get support faces that are split, refine type (ct * 2 + 1) * 100 + fsplit + for (PetscInt st = 0; st < Nst * 2; st += 2) { + DMPolytopeType ct; + PetscInt val; + + if (star[st] < fStart || star[st] >= fEnd) continue; + PetscCall(DMPlexGetCellType(dm, star[st], &ct)); + PetscCall(DMLabelGetValue(tr->trType, star[st], &val)); + if (val < (PetscInt)(ct * 2 + 1) * 100) continue; + star[Nf++] = star[st]; + } + PetscCheck(Nf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Split vertex %" PetscInt_FMT " must be connected to at least one split face", p); + // Average normals + for (PetscInt f = 0; f < Nf; ++f) { + PetscReal normal[3], vol; + + PetscCall(DMPlexComputeCellGeometryFVM(dm, star[f], &vol, NULL, normal)); + for (PetscInt d = 0; d < dE; ++d) avgNormal[d] += normal[d]; + } + PetscCall(DMPlexRestoreTransitiveClosure(dm, p, PETSC_FALSE, &Nst, &star)); + // Normalize normal + for (PetscInt d = 0; d < dE; ++d) norm += PetscSqr(avgNormal[d]); + norm = PetscSqrtReal(norm); + for (PetscInt d = 0; d < dE; ++d) avgNormal[d] /= norm; + // Symmetrically push vertices along normal + for (PetscInt d = 0; d < dE; ++d) out[d] = in[d] + width * avgNormal[d] * (r ? -0.5 : 0.5); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMPlexTransformInitialize_Cohesive(DMPlexTransform tr) +{ + PetscFunctionBegin; + tr->ops->view = DMPlexTransformView_Cohesive; + tr->ops->setfromoptions = DMPlexTransformSetFromOptions_Cohesive; + tr->ops->setup = DMPlexTransformSetUp_Cohesive; + tr->ops->destroy = DMPlexTransformDestroy_Cohesive; + tr->ops->setdimensions = DMPlexTransformSetDimensions_Internal; + tr->ops->celltransform = DMPlexTransformCellTransform_Cohesive; + tr->ops->getsubcellorientation = DMPlexTransformGetSubcellOrientation_Cohesive; + tr->ops->mapcoordinates = DMPlexTransformMapCoordinates_Cohesive; + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_EXTERN PetscErrorCode DMPlexTransformCreate_Cohesive(DMPlexTransform tr) +{ + DMPlexTransform_Cohesive *ex; + + PetscFunctionBegin; + PetscValidHeaderSpecific(tr, DMPLEXTRANSFORM_CLASSID, 1); + PetscCall(PetscNew(&ex)); + tr->data = ex; + ex->useTensor = PETSC_TRUE; + PetscCall(DMPlexTransformInitialize_Cohesive(tr)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + DMPlexTransformCohesiveExtrudeGetTensor - Get the flag to use tensor cells + + Not Collective + + Input Parameter: +. tr - The `DMPlexTransform` + + Output Parameter: +. useTensor - The flag to use tensor cells + + Note: + This flag determines the orientation behavior of the created points. + + For example, if tensor is `PETSC_TRUE`, then +.vb + DM_POLYTOPE_POINT_PRISM_TENSOR is made instead of DM_POLYTOPE_SEGMENT, + DM_POLYTOPE_SEG_PRISM_TENSOR instead of DM_POLYTOPE_QUADRILATERAL, + DM_POLYTOPE_TRI_PRISM_TENSOR instead of DM_POLYTOPE_TRI_PRISM, and + DM_POLYTOPE_QUAD_PRISM_TENSOR instead of DM_POLYTOPE_HEXAHEDRON. +.ve + + Level: intermediate + +.seealso: `DMPlexTransform`, `DMPlexTransformCohesiveExtrudeSetTensor()`, `DMPlexTransformExtrudeGetTensor()` +@*/ +PetscErrorCode DMPlexTransformCohesiveExtrudeGetTensor(DMPlexTransform tr, PetscBool *useTensor) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + + PetscFunctionBegin; + PetscValidHeaderSpecific(tr, DMPLEXTRANSFORM_CLASSID, 1); + PetscAssertPointer(useTensor, 2); + *useTensor = ex->useTensor; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + DMPlexTransformCohesiveExtrudeSetTensor - Set the flag to use tensor cells + + Not Collective + + Input Parameters: ++ tr - The `DMPlexTransform` +- useTensor - The flag for tensor cells + + Note: + This flag determines the orientation behavior of the created points + For example, if tensor is `PETSC_TRUE`, then +.vb + DM_POLYTOPE_POINT_PRISM_TENSOR is made instead of DM_POLYTOPE_SEGMENT, + DM_POLYTOPE_SEG_PRISM_TENSOR instead of DM_POLYTOPE_QUADRILATERAL, + DM_POLYTOPE_TRI_PRISM_TENSOR instead of DM_POLYTOPE_TRI_PRISM, and + DM_POLYTOPE_QUAD_PRISM_TENSOR instead of DM_POLYTOPE_HEXAHEDRON. +.ve + + Level: intermediate + +.seealso: `DMPlexTransform`, `DMPlexTransformCohesiveExtrudeGetTensor()`, `DMPlexTransformExtrudeSetTensor()` +@*/ +PetscErrorCode DMPlexTransformCohesiveExtrudeSetTensor(DMPlexTransform tr, PetscBool useTensor) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + + PetscFunctionBegin; + PetscValidHeaderSpecific(tr, DMPLEXTRANSFORM_CLASSID, 1); + ex->useTensor = useTensor; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + DMPlexTransformCohesiveExtrudeGetWidth - Get the width of extruded cells + + Not Collective + + Input Parameter: +. tr - The `DMPlexTransform` + + Output Parameter: +. width - The width of extruded cells, or 0. + + Level: intermediate + +.seealso: `DMPlexTransform`, `DMPlexTransformCohesiveExtrudeSetWidth()` +@*/ +PetscErrorCode DMPlexTransformCohesiveExtrudeGetWidth(DMPlexTransform tr, PetscReal *width) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + + PetscFunctionBegin; + PetscValidHeaderSpecific(tr, DMPLEXTRANSFORM_CLASSID, 1); + PetscAssertPointer(width, 2); + *width = ex->width; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + DMPlexTransformCohesiveExtrudeSetWidth - Set the width of extruded cells + + Not Collective + + Input Parameters: ++ tr - The `DMPlexTransform` +- width - The width of the extruded cells, or 0. + + Level: intermediate + +.seealso: `DMPlexTransform`, `DMPlexTransformCohesiveExtrudeGetWidth()` +@*/ +PetscErrorCode DMPlexTransformCohesiveExtrudeSetWidth(DMPlexTransform tr, PetscReal width) +{ + DMPlexTransform_Cohesive *ex = (DMPlexTransform_Cohesive *)tr->data; + + PetscFunctionBegin; + PetscValidHeaderSpecific(tr, DMPLEXTRANSFORM_CLASSID, 1); + ex->width = width; + PetscFunctionReturn(PETSC_SUCCESS); +} diff --git a/src/dm/impls/plex/transform/impls/refine/regular/plexrefregular.c b/src/dm/impls/plex/transform/impls/refine/regular/plexrefregular.c index e250a7dcb01..292b1dd6b8a 100644 --- a/src/dm/impls/plex/transform/impls/refine/regular/plexrefregular.c +++ b/src/dm/impls/plex/transform/impls/refine/regular/plexrefregular.c @@ -119,12 +119,15 @@ PetscErrorCode DMPlexRefineRegularGetAffineFaceTransforms(DMPlexTransform tr, DM Output Parameters: + Nc - The number of subcells produced from this cell type -. v0 - The translation of the first vertex for each subcell -. J - The Jacobian for each subcell (map from reference cell to subcell) -- invJ - The inverse Jacobian for each subcell +. v0 - The translation of the first vertex for each subcell, an array of length $dim * Nc$. Pass `NULL` to ignore. +. J - The Jacobian for each subcell (map from reference cell to subcell), an array of length $dim^2 * Nc$. Pass `NULL` to ignore. +- invJ - The inverse Jacobian for each subcell, an array of length $dim^2 * Nc$. Pass `NULL` to ignore. Level: developer + Note: + Do not free these output arrays + .seealso: `DMPLEX`, `DM`, `DMPlexTransform`, `DMPolytopeType`, `DMPlexRefineRegularGetAffineFaceTransforms()`, `DMPLEXREFINEREGULAR` @*/ PetscErrorCode DMPlexRefineRegularGetAffineTransforms(DMPlexTransform tr, DMPolytopeType ct, PetscInt *Nc, PetscReal *v0[], PetscReal *J[], PetscReal *invJ[]) @@ -159,6 +162,37 @@ PetscErrorCode DMPlexRefineRegularGetAffineTransforms(DMPlexTransform tr, DMPoly 2.0, 0.0, 0.0, 2.0, 2.0, 2.0, -2.0, 0.0}; + static PetscReal tet_v0[] = {-1.0, -1.0, -1.0, -1.0, 0.0, -1.0, 0.0, -1.0, -1.0, -1.0, -1.0, 0.0, 0.0, -1.0, -1.0, -1.0, 0.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0, 0.0}; + static PetscReal tet_J[] = {0.5, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5, + + 0.5, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5, + + 0.5, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5, + + 0.5, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5, + + -0.5, -0.5, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5, 0.5, + + 0.0, 0.5, 0.5, 0.0, 0.0, -0.5, -0.5, -0.5, 0.0, + + -0.5, 0.0, 0.0, 0.5, 0.0, 0.5, -0.5, -0.5, -0.5, + + -0.5, -0.5, -0.5, 0.5, 0.5, 0.0, 0.0, -0.5, 0.0}; + static PetscReal tet_invJ[] = {2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0, + + 2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0, + + 2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0, + + 2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0, + + 0.0, 2.0, 0.0, -2.0, -2.0, 0.0, 2.0, 2.0, 2.0, + + -2.0, -2.0, -2.0, 2.0, 2.0, 0.0, 0.0, -2.0, 0.0, + + -2.0, 0.0, 0.0, 0.0, -2.0, -2.0, 2.0, 2.0, 0.0, + + 0.0, 2.0, 2.0, 0.0, 0.0, -2.0, -2.0, -2.0, 0.0}; /* 3---------2---------2 | | | @@ -243,6 +277,12 @@ PetscErrorCode DMPlexRefineRegularGetAffineTransforms(DMPlexTransform tr, DMPoly if (J) *J = quad_J; if (invJ) *invJ = quad_invJ; break; + case DM_POLYTOPE_TETRAHEDRON: + if (Nc) *Nc = 8; + if (v0) *v0 = tet_v0; + if (J) *J = tet_J; + if (invJ) *invJ = tet_invJ; + break; case DM_POLYTOPE_HEXAHEDRON: if (Nc) *Nc = 8; if (v0) *v0 = hex_v0; diff --git a/src/dm/impls/plex/transform/interface/plextransform.c b/src/dm/impls/plex/transform/interface/plextransform.c index e6d01e5a36b..d730ece5097 100644 --- a/src/dm/impls/plex/transform/interface/plextransform.c +++ b/src/dm/impls/plex/transform/interface/plextransform.c @@ -91,6 +91,7 @@ PETSC_EXTERN PetscErrorCode DMPlexTransformCreate_SBR(DMPlexTransform); PETSC_EXTERN PetscErrorCode DMPlexTransformCreate_BL(DMPlexTransform); PETSC_EXTERN PetscErrorCode DMPlexTransformCreate_1D(DMPlexTransform); PETSC_EXTERN PetscErrorCode DMPlexTransformCreate_Extrude(DMPlexTransform); +PETSC_EXTERN PetscErrorCode DMPlexTransformCreate_Cohesive(DMPlexTransform); /*@C DMPlexTransformRegisterAll - Registers all of the transform components in the `DM` package. @@ -115,12 +116,15 @@ PetscErrorCode DMPlexTransformRegisterAll(void) PetscCall(DMPlexTransformRegister(DMPLEXREFINESBR, DMPlexTransformCreate_SBR)); PetscCall(DMPlexTransformRegister(DMPLEXREFINE1D, DMPlexTransformCreate_1D)); PetscCall(DMPlexTransformRegister(DMPLEXEXTRUDE, DMPlexTransformCreate_Extrude)); + PetscCall(DMPlexTransformRegister(DMPLEXCOHESIVEEXTRUDE, DMPlexTransformCreate_Cohesive)); PetscFunctionReturn(PETSC_SUCCESS); } /*@C DMPlexTransformRegisterDestroy - This function destroys the registered `DMPlexTransformType`. It is called from `PetscFinalize()`. + Not collective + Level: developer .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMRegisterAll()`, `DMPlexTransformType`, `PetscInitialize()` @@ -164,7 +168,7 @@ PetscErrorCode DMPlexTransformCreate(MPI_Comm comm, DMPlexTransform *tr) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexTransformSetType - Sets the particular implementation for a transform. Collective @@ -201,7 +205,7 @@ PetscErrorCode DMPlexTransformSetType(DMPlexTransform tr, DMPlexTransformType me PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexTransformGetType - Gets the type name (as a string) from the transform. Not Collective @@ -272,7 +276,7 @@ static PetscErrorCode DMPlexTransformView_Ascii(DMPlexTransform tr, PetscViewer PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexTransformView - Views a `DMPlexTransform` Collective @@ -340,7 +344,7 @@ PetscErrorCode DMPlexTransformSetFromOptions(DMPlexTransform tr) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPlexTransformDestroy - Destroys a `DMPlexTransform` Collective @@ -514,6 +518,16 @@ static PetscErrorCode DMPlexTransformCreateOffset_Internal(DMPlexTransform tr, P PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMPlexTransformSetUp - Create the tables that drive the transform + + Input Parameter: +. tr - The `DMPlexTransform` object + + Level: intermediate + +.seealso: [](plex_transform_table), [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexTransform`, `DMPlexTransformApply()`, `DMPlexTransformCreate()` +@*/ PetscErrorCode DMPlexTransformSetUp(DMPlexTransform tr) { DM dm; @@ -525,6 +539,7 @@ PetscErrorCode DMPlexTransformSetUp(DMPlexTransform tr) if (tr->setupcalled) PetscFunctionReturn(PETSC_SUCCESS); PetscTryTypeMethod(tr, setup); PetscCall(DMPlexTransformGetDM(tr, &dm)); + PetscCall(DMSetSnapToGeomModel(dm, NULL)); PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); if (pEnd > pStart) { PetscCall(DMPlexGetCellType(dm, 0, &ctCell)); @@ -614,6 +629,19 @@ PetscErrorCode DMPlexTransformSetUp(DMPlexTransform tr) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMPlexTransformGetDM - Get the base `DM` for the transform + + Input Parameter: +. tr - The `DMPlexTransform` object + + Output Parameter: +. dm - The original `DM` which will be transformed + + Level: intermediate + +.seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexTransform`, `DMPlexTransformSetDM()`, `DMPlexTransformApply()`, `DMPlexTransformCreate()` +@*/ PetscErrorCode DMPlexTransformGetDM(DMPlexTransform tr, DM *dm) { PetscFunctionBegin; @@ -623,6 +651,20 @@ PetscErrorCode DMPlexTransformGetDM(DMPlexTransform tr, DM *dm) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMPlexTransformSetDM - Set the base `DM` for the transform + + Input Parameters: ++ tr - The `DMPlexTransform` object +- dm - The original `DM` which will be transformed + + Level: intermediate + + Note: + The user does not typically call this, as it is called by `DMPlexTransformApply()`. + +.seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexTransform`, `DMPlexTransformGetDM()`, `DMPlexTransformApply()`, `DMPlexTransformCreate()` +@*/ PetscErrorCode DMPlexTransformSetDM(DMPlexTransform tr, DM dm) { PetscFunctionBegin; @@ -634,6 +676,19 @@ PetscErrorCode DMPlexTransformSetDM(DMPlexTransform tr, DM dm) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMPlexTransformGetActive - Get the `DMLabel` marking the active points for the transform + + Input Parameter: +. tr - The `DMPlexTransform` object + + Output Parameter: +. active - The `DMLabel` indicating which points will be transformed + + Level: intermediate + +.seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexTransform`, `DMPlexTransformSetActive()`, `DMPlexTransformApply()`, `DMPlexTransformCreate()` +@*/ PetscErrorCode DMPlexTransformGetActive(DMPlexTransform tr, DMLabel *active) { PetscFunctionBegin; @@ -643,6 +698,20 @@ PetscErrorCode DMPlexTransformGetActive(DMPlexTransform tr, DMLabel *active) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMPlexTransformSetActive - Set the `DMLabel` marking the active points for the transform + + Input Parameters: ++ tr - The `DMPlexTransform` object +- active - The original `DM` which will be transformed + + Level: intermediate + + Note: + This only applies to transforms that can operator on a subset of the mesh, listed in [](plex_transform_table). + +.seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexTransform`, `DMPlexTransformGetDM()`, `DMPlexTransformApply()`, `DMPlexTransformCreate()` +@*/ PetscErrorCode DMPlexTransformSetActive(DMPlexTransform tr, DMLabel active) { PetscFunctionBegin; @@ -2073,7 +2142,7 @@ static PetscErrorCode DMPlexTransformSetCoordinates(DMPlexTransform tr, DM rdm) PetscCall(DMPlexTransformGetTargetPoint(tr, ct, rct[n], p, r, &vNew)); PetscCall(PetscSectionGetOffset(coordSectionNew, vNew, &off)); PetscCall(DMPlexTransformMapCoordinates(tr, ct, rct[n], p, r, Nv, dEo, icoords, vcoords)); - PetscCall(DMPlexSnapToGeomModel(dm, p, dE, vcoords, &coordsNew[off])); + PetscCall(DMSnapToGeomModel(dm, p, dE, vcoords, &coordsNew[off])); } } PetscCall(DMPlexRestoreCellCoordinates(dm, p, &isDG, &Nc, &array, &pcoords)); @@ -2143,6 +2212,22 @@ static PetscErrorCode DMPlexTransformSetCoordinates(DMPlexTransform tr, DM rdm) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMPlexTransformApply - Execute the transformation, producing another `DM` + + Collective + + Input Parameters: ++ tr - The `DMPlexTransform` object +- dm - The original `DM` + + Output Parameter: +. tdm - The transformed `DM` + + Level: intermediate + +.seealso: [](plex_transform_table), [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexTransform`, `DMPlexTransformCreate()`, `DMPlexTransformSetDM()` +@*/ PetscErrorCode DMPlexTransformApply(DMPlexTransform tr, DM dm, DM *tdm) { DM rdm; diff --git a/src/dm/impls/plex/tutorials/ex8.c b/src/dm/impls/plex/tutorials/ex8.c index 09623a66ce3..525b9006b49 100644 --- a/src/dm/impls/plex/tutorials/ex8.c +++ b/src/dm/impls/plex/tutorials/ex8.c @@ -128,15 +128,18 @@ int main(int argc, char **argv) PetscCall(ISDestroy(&valueIS)); } if (view_coord) { - DM cdm; - Vec X; - PetscInt cdim; + DM cdm, cell_dm; + Vec X; + PetscInt cdim; + PetscBool sparseLocalize; PetscCall(DMGetCoordinatesLocalSetUp(dm)); PetscCall(DMGetCoordinateDim(dm, &cdim)); PetscCall(DMGetCoordinateDM(dm, &cdm)); PetscCall(PetscObjectSetName((PetscObject)cdm, "coords")); if (tensor) PetscCall(DMPlexSetClosurePermutationTensor(cdm, PETSC_DETERMINE, NULL)); + PetscCall(DMLocalizeCoordinates(dm)); + PetscCall(DMGetSparseLocalize(dm, &sparseLocalize)); for (c = cStart; c < cEnd; ++c) { const PetscScalar *array; PetscScalar *x = NULL; @@ -152,6 +155,12 @@ int main(int argc, char **argv) PetscCall(ViewOffsets(dm, NULL)); PetscCall(DMGetCoordinatesLocal(dm, &X)); PetscCall(ViewOffsets(cdm, X)); + PetscCall(DMGetCellCoordinateDM(dm, &cell_dm)); + PetscCall(PetscObjectSetName((PetscObject)cell_dm, "cell coords")); + if (cell_dm && !sparseLocalize) { + PetscCall(DMGetCellCoordinatesLocal(dm, &X)); + PetscCall(ViewOffsets(cell_dm, X)); + } } PetscCall(DMDestroy(&dm)); PetscCall(PetscFinalize()); @@ -202,10 +211,17 @@ int main(int argc, char **argv) suffix: 1d_q1_periodic requires: !complex args: -dm_plex_dim 1 -petscspace_degree 1 -dm_plex_simplex 0 -dm_plex_box_faces 3 -dm_plex_box_bd periodic -dm_view -view_coord - test: + testset: suffix: 2d_q1_periodic requires: !complex - args: -dm_plex_dim 2 -petscspace_degree 1 -dm_plex_simplex 0 -dm_plex_box_faces 3,2 -dm_plex_box_bd periodic,none -dm_view -view_coord + args: -dm_plex_dim 2 -dm_plex_simplex 0 -dm_plex_box_faces 3,2 -dm_plex_box_bd periodic,none \ + -petscspace_degree 1 -dm_view -view_coord + + test: + test: + suffix: sparse + args: -dm_sparse_localize false -dm_localize 0 + test: suffix: 3d_q1_periodic requires: !complex diff --git a/src/dm/impls/plex/tutorials/output/ex5_0_outformat-hdf5_petsc_ptscotch.out b/src/dm/impls/plex/tutorials/output/ex5_0_outformat-hdf5_petsc_ptscotch.out index e0766760e7c..609a243dae9 100644 --- a/src/dm/impls/plex/tutorials/output/ex5_0_outformat-hdf5_petsc_ptscotch.out +++ b/src/dm/impls/plex/tutorials/output/ex5_0_outformat-hdf5_petsc_ptscotch.out @@ -55,13 +55,13 @@ Labels: DM Object: DMPlex Object (redistributed_) 4 MPI processes type: plex DMPlex Object in 3 dimensions: - Number of 0-cells per rank: 41 46 42 42 - Number of 1-cells per rank: 86 95 88 88 - Number of 2-cells per rank: 60 64 61 61 + Number of 0-cells per rank: 43 41 48 46 + Number of 1-cells per rank: 89 86 98 95 + Number of 2-cells per rank: 61 60 65 64 Number of 3-cells per rank: 14 14 14 14 Labels: - depth: 4 strata with value/size (0 (41), 1 (86), 2 (60), 3 (14)) - celltype: 4 strata with value/size (0 (41), 1 (86), 4 (60), 7 (14)) + depth: 4 strata with value/size (0 (43), 1 (89), 2 (61), 3 (14)) + celltype: 4 strata with value/size (0 (43), 1 (89), 4 (61), 7 (14)) End cycle 1 -------- Begin cycle 2 @@ -89,12 +89,12 @@ Labels: DM Object: DMPlex Object (redistributed_) 4 MPI processes type: plex DMPlex Object in 3 dimensions: - Number of 0-cells per rank: 41 46 42 42 - Number of 1-cells per rank: 86 95 88 88 - Number of 2-cells per rank: 60 64 61 61 + Number of 0-cells per rank: 42 42 46 48 + Number of 1-cells per rank: 88 88 95 98 + Number of 2-cells per rank: 61 61 64 65 Number of 3-cells per rank: 14 14 14 14 Labels: - depth: 4 strata with value/size (0 (41), 1 (86), 2 (60), 3 (14)) - celltype: 4 strata with value/size (0 (41), 1 (86), 4 (60), 7 (14)) + depth: 4 strata with value/size (0 (42), 1 (88), 2 (61), 3 (14)) + celltype: 4 strata with value/size (0 (42), 1 (88), 4 (61), 7 (14)) End cycle 2 -------- diff --git a/src/dm/impls/plex/tutorials/output/ex5_1_outformat-hdf5_petsc_ptscotch.out b/src/dm/impls/plex/tutorials/output/ex5_1_outformat-hdf5_petsc_ptscotch.out index 2b93862ccac..c5f12643963 100644 --- a/src/dm/impls/plex/tutorials/output/ex5_1_outformat-hdf5_petsc_ptscotch.out +++ b/src/dm/impls/plex/tutorials/output/ex5_1_outformat-hdf5_petsc_ptscotch.out @@ -55,13 +55,13 @@ Labels: DM Object: DMPlex Object (redistributed_) 3 MPI processes type: plex DMPlex Object in 3 dimensions: - Number of 0-cells per rank: 51 49 60 - Number of 1-cells per rank: 109 107 126 - Number of 2-cells per rank: 77 78 86 + Number of 0-cells per rank: 53 49 62 + Number of 1-cells per rank: 112 107 129 + Number of 2-cells per rank: 78 78 87 Number of 3-cells per rank: 18 19 19 Labels: - depth: 4 strata with value/size (0 (51), 1 (109), 2 (77), 3 (18)) - celltype: 4 strata with value/size (0 (51), 1 (109), 4 (77), 7 (18)) + depth: 4 strata with value/size (0 (53), 1 (112), 2 (78), 3 (18)) + celltype: 4 strata with value/size (0 (53), 1 (112), 4 (78), 7 (18)) End cycle 1 -------- Begin cycle 2 @@ -89,12 +89,12 @@ Labels: DM Object: DMPlex Object (redistributed_) 2 MPI processes type: plex DMPlex Object in 3 dimensions: - Number of 0-cells per rank: 70 80 - Number of 1-cells per rank: 156 174 - Number of 2-cells per rank: 115 123 + Number of 0-cells per rank: 69 82 + Number of 1-cells per rank: 154 177 + Number of 2-cells per rank: 114 124 Number of 3-cells per rank: 28 28 Labels: - depth: 4 strata with value/size (0 (70), 1 (156), 2 (115), 3 (28)) - celltype: 4 strata with value/size (0 (70), 1 (156), 4 (115), 7 (28)) + depth: 4 strata with value/size (0 (69), 1 (154), 2 (114), 3 (28)) + celltype: 4 strata with value/size (0 (69), 1 (154), 4 (114), 7 (28)) End cycle 2 -------- diff --git a/src/dm/impls/plex/tutorials/output/ex5_1_outformat-hdf5_xdmf_ptscotch.out b/src/dm/impls/plex/tutorials/output/ex5_1_outformat-hdf5_xdmf_ptscotch.out index c7ad9233182..745d38eec74 100644 --- a/src/dm/impls/plex/tutorials/output/ex5_1_outformat-hdf5_xdmf_ptscotch.out +++ b/src/dm/impls/plex/tutorials/output/ex5_1_outformat-hdf5_xdmf_ptscotch.out @@ -34,22 +34,22 @@ Begin cycle 1 DM Object: DMPlex Object (loaded_) 3 MPI processes type: plex DMPlex Object in 3 dimensions: - Number of 0-cells per rank: 65 70 58 + Number of 0-cells per rank: 69 71 54 Number of 3-cells per rank: 19 19 18 Labels: - depth: 2 strata with value/size (0 (65), 1 (19)) - celltype: 2 strata with value/size (7 (19), 0 (65)) + depth: 2 strata with value/size (0 (69), 1 (19)) + celltype: 2 strata with value/size (7 (19), 0 (69)) Loaded mesh distributed? TRUE DM Object: DMPlex Object (interpolated_) 3 MPI processes type: plex DMPlex Object in 3 dimensions: - Number of 0-cells per rank: 65 70 58 - Number of 1-cells per rank: 130 139 120 - Number of 2-cells per rank: 86 90 81 + Number of 0-cells per rank: 69 71 54 + Number of 1-cells per rank: 134 141 114 + Number of 2-cells per rank: 87 91 79 Number of 3-cells per rank: 19 19 18 Labels: - celltype: 4 strata with value/size (0 (65), 7 (19), 4 (86), 1 (130)) - depth: 4 strata with value/size (0 (65), 1 (130), 2 (86), 3 (19)) + celltype: 4 strata with value/size (0 (69), 7 (19), 4 (87), 1 (134)) + depth: 4 strata with value/size (0 (69), 1 (134), 2 (87), 3 (19)) DM Object: DMPlex Object (redistributed_) 3 MPI processes type: plex DMPlex Object in 3 dimensions: diff --git a/src/dm/impls/plex/tutorials/output/ex8_2d_q1_periodic_sparse.out b/src/dm/impls/plex/tutorials/output/ex8_2d_q1_periodic_sparse.out new file mode 100644 index 00000000000..b67241fc4b2 --- /dev/null +++ b/src/dm/impls/plex/tutorials/output/ex8_2d_q1_periodic_sparse.out @@ -0,0 +1,133 @@ +DM Object: box 1 MPI process + type: plex +box in 2 dimensions: + Number of 0-cells per rank: 9 + Number of 1-cells per rank: 15 + Number of 2-cells per rank: 6 +Periodic mesh (PERIODIC, NONE) coordinates not localized +Labels: + marker: 1 strata with value/size (1 (12)) + Face Sets: 2 strata with value/size (1 (3), 3 (3)) + depth: 3 strata with value/size (0 (9), 1 (15), 2 (6)) + celltype: 3 strata with value/size (4 (6), 0 (9), 1 (15)) +Element #0 +0: 0 1 3 4 +Element #1 +0: 1 2 4 5 +Element #2 +0: 2 0 5 3 +Element #3 +0: 3 4 6 7 +Element #4 +0: 4 5 7 8 +Element #5 +0: 5 3 8 6 +========= Face restriction; marker: 1 ======== +num_elem: 3, elem_size: 2, num_dof: 9 +0: 0 1 +0: 1 2 +0: 2 0 +========= Face restriction; marker: 3 ======== +num_elem: 3, elem_size: 2, num_dof: 9 +0: 7 6 +0: 8 7 +0: 6 8 +Element #0 coordinates + 0: 0.0000e+00 0.0000e+00 + 0: 3.3333e-01 0.0000e+00 + 0: 0.0000e+00 5.0000e-01 + 0: 3.3333e-01 5.0000e-01 +Element #1 coordinates + 0: 3.3333e-01 0.0000e+00 + 0: 6.6667e-01 0.0000e+00 + 0: 3.3333e-01 5.0000e-01 + 0: 6.6667e-01 5.0000e-01 +Element #2 coordinates + 0: 6.6667e-01 0.0000e+00 + 0: 1.0000e+00 0.0000e+00 + 0: 6.6667e-01 5.0000e-01 + 0: 1.0000e+00 5.0000e-01 +Element #3 coordinates + 0: 0.0000e+00 5.0000e-01 + 0: 3.3333e-01 5.0000e-01 + 0: 0.0000e+00 1.0000e+00 + 0: 3.3333e-01 1.0000e+00 +Element #4 coordinates + 0: 3.3333e-01 5.0000e-01 + 0: 6.6667e-01 5.0000e-01 + 0: 3.3333e-01 1.0000e+00 + 0: 6.6667e-01 1.0000e+00 +Element #5 coordinates + 0: 6.6667e-01 5.0000e-01 + 0: 1.0000e+00 5.0000e-01 + 0: 6.6667e-01 1.0000e+00 + 0: 1.0000e+00 1.0000e+00 +DM box offsets: num_elem 6, size 4, comp 1, dof 9 +0: 0 1 3 4 +0: 1 2 4 5 +0: 2 0 5 3 +0: 3 4 6 7 +0: 4 5 7 8 +0: 5 3 8 6 +DM coords offsets: num_elem 6, size 4, comp 2, dof 18 +0: 0 2 6 8 + 0: 0.0000e+00 0.0000e+00 + 0: 3.3333e-01 0.0000e+00 + 0: 0.0000e+00 5.0000e-01 + 0: 3.3333e-01 5.0000e-01 +0: 2 4 8 10 + 0: 3.3333e-01 0.0000e+00 + 0: 6.6667e-01 0.0000e+00 + 0: 3.3333e-01 5.0000e-01 + 0: 6.6667e-01 5.0000e-01 +0: 4 0 10 6 + 0: 6.6667e-01 0.0000e+00 + 0: 0.0000e+00 0.0000e+00 + 0: 6.6667e-01 5.0000e-01 + 0: 0.0000e+00 5.0000e-01 +0: 6 8 12 14 + 0: 0.0000e+00 5.0000e-01 + 0: 3.3333e-01 5.0000e-01 + 0: 0.0000e+00 1.0000e+00 + 0: 3.3333e-01 1.0000e+00 +0: 8 10 14 16 + 0: 3.3333e-01 5.0000e-01 + 0: 6.6667e-01 5.0000e-01 + 0: 3.3333e-01 1.0000e+00 + 0: 6.6667e-01 1.0000e+00 +0: 10 6 16 12 + 0: 6.6667e-01 5.0000e-01 + 0: 0.0000e+00 5.0000e-01 + 0: 6.6667e-01 1.0000e+00 + 0: 0.0000e+00 1.0000e+00 +DM cell coords offsets: num_elem 6, size 4, comp 2, dof 48 +0: 0 2 4 6 + 0: 0.0000e+00 0.0000e+00 + 0: 3.3333e-01 0.0000e+00 + 0: 0.0000e+00 5.0000e-01 + 0: 3.3333e-01 5.0000e-01 +0: 8 10 12 14 + 0: 3.3333e-01 0.0000e+00 + 0: 6.6667e-01 0.0000e+00 + 0: 3.3333e-01 5.0000e-01 + 0: 6.6667e-01 5.0000e-01 +0: 16 18 20 22 + 0: 6.6667e-01 0.0000e+00 + 0: 1.0000e+00 0.0000e+00 + 0: 6.6667e-01 5.0000e-01 + 0: 1.0000e+00 5.0000e-01 +0: 24 26 28 30 + 0: 0.0000e+00 5.0000e-01 + 0: 3.3333e-01 5.0000e-01 + 0: 0.0000e+00 1.0000e+00 + 0: 3.3333e-01 1.0000e+00 +0: 32 34 36 38 + 0: 3.3333e-01 5.0000e-01 + 0: 6.6667e-01 5.0000e-01 + 0: 3.3333e-01 1.0000e+00 + 0: 6.6667e-01 1.0000e+00 +0: 40 42 44 46 + 0: 6.6667e-01 5.0000e-01 + 0: 1.0000e+00 5.0000e-01 + 0: 6.6667e-01 1.0000e+00 + 0: 1.0000e+00 1.0000e+00 diff --git a/src/dm/impls/product/productutils.c b/src/dm/impls/product/productutils.c index 59608873715..36d7015837f 100644 --- a/src/dm/impls/product/productutils.c +++ b/src/dm/impls/product/productutils.c @@ -1,7 +1,7 @@ /* Additional functions in the DMProduct API, which are not part of the general DM API. */ -#include +#include /*I "petsc/private/dmproductimpl.h" I*/ -/*@C +/*@ DMProductGetDM - Get sub-`DM` associated with a given slot of a `DMPRODUCT` Not Collective @@ -17,7 +17,7 @@ .seealso: `DMPRODUCT`, `DMProductSetDM()` @*/ -PETSC_EXTERN PetscErrorCode DMProductGetDM(DM dm, PetscInt slot, DM *subdm) +PetscErrorCode DMProductGetDM(DM dm, PetscInt slot, DM *subdm) { DM_Product *product = (DM_Product *)dm->data; PetscInt dim; @@ -30,7 +30,7 @@ PETSC_EXTERN PetscErrorCode DMProductGetDM(DM dm, PetscInt slot, DM *subdm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMProductSetDM - Set sub-`DM` associated with a given slot of `DMPRODUCT` Not Collective @@ -47,7 +47,7 @@ PETSC_EXTERN PetscErrorCode DMProductGetDM(DM dm, PetscInt slot, DM *subdm) .seealso: `DMPRODUCT`, `DMProductGetDM()`, `DMProductSetDimensionIndex()` @*/ -PETSC_EXTERN PetscErrorCode DMProductSetDM(DM dm, PetscInt slot, DM subdm) +PetscErrorCode DMProductSetDM(DM dm, PetscInt slot, DM subdm) { DM_Product *product = (DM_Product *)dm->data; PetscInt dim; @@ -62,7 +62,7 @@ PETSC_EXTERN PetscErrorCode DMProductSetDM(DM dm, PetscInt slot, DM subdm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMProductSetDimensionIndex - Set the dimension index associated with a given slot/sub-`DM` Not Collective @@ -76,7 +76,7 @@ PETSC_EXTERN PetscErrorCode DMProductSetDM(DM dm, PetscInt slot, DM subdm) .seealso: `DMPRODUCT` @*/ -PETSC_EXTERN PetscErrorCode DMProductSetDimensionIndex(DM dm, PetscInt slot, PetscInt idx) +PetscErrorCode DMProductSetDimensionIndex(DM dm, PetscInt slot, PetscInt idx) { DM_Product *product = (DM_Product *)dm->data; PetscInt dim; diff --git a/src/dm/impls/redundant/dmredundant.c b/src/dm/impls/redundant/dmredundant.c index 55c8a9ed921..d93672cdf28 100644 --- a/src/dm/impls/redundant/dmredundant.c +++ b/src/dm/impls/redundant/dmredundant.c @@ -378,7 +378,7 @@ PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMRedundantCreate - Creates a `DM` object, used to manage data for dense globally coupled variables Collective diff --git a/src/dm/impls/sliced/sliced.c b/src/dm/impls/sliced/sliced.c index 3467f8ebc21..399e7ea0258 100644 --- a/src/dm/impls/sliced/sliced.c +++ b/src/dm/impls/sliced/sliced.c @@ -65,7 +65,7 @@ static PetscErrorCode DMCreateMatrix_Sliced(DM dm, Mat *J) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSlicedSetGhosts - Sets the global indices of other processes elements that will be ghosts on this process @@ -97,7 +97,7 @@ PetscErrorCode DMSlicedSetGhosts(DM dm, PetscInt bs, PetscInt nlocal, PetscInt N PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by `DMSLICED` Not Collective @@ -105,15 +105,15 @@ PetscErrorCode DMSlicedSetGhosts(DM dm, PetscInt bs, PetscInt nlocal, PetscInt N Input Parameters: + dm - the `DM` object . d_nz - number of block nonzeros per block row in diagonal portion of local - submatrix (same for all local rows) + submatrix (same for all local rows) . d_nnz - array containing the number of block nonzeros in the various block rows - of the in diagonal portion of the local (possibly different for each block - row) or `NULL`. + of the in diagonal portion of the local (possibly different for each block + row) or `NULL`. . o_nz - number of block nonzeros per block row in the off-diagonal portion of local - submatrix (same for all local rows). + submatrix (same for all local rows). - o_nnz - array containing the number of nonzeros in the various block rows of the - off-diagonal portion of the local submatrix (possibly different for - each block row) or `NULL`. + off-diagonal portion of the local submatrix (possibly different for + each block row) or `NULL`. Level: advanced @@ -163,7 +163,7 @@ static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs, const PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem of the matrix returned by `DMSlicedGetMatrix()`. @@ -182,7 +182,7 @@ static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs, const PetscInt .seealso: `DM`, `DMSLICED`, `DMSlicedGetMatrix()`, `DMDASetBlockFills()` @*/ -PetscErrorCode DMSlicedSetBlockFills(DM dm, const PetscInt *dfill, const PetscInt *ofill) +PetscErrorCode DMSlicedSetBlockFills(DM dm, const PetscInt dfill[], const PetscInt ofill[]) { DM_Sliced *slice = (DM_Sliced *)dm->data; @@ -268,7 +268,7 @@ PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSlicedCreate - Creates a `DM` object, used to manage data for a unstructured problem Collective diff --git a/src/dm/impls/stag/stag.c b/src/dm/impls/stag/stag.c index c61dec8f099..a4378b63617 100644 --- a/src/dm/impls/stag/stag.c +++ b/src/dm/impls/stag/stag.c @@ -4,8 +4,8 @@ implementations of DM API functions, and other files here contain additional DMStag-specific API functions, as well as internal functions. */ -#include -#include +#include /*I "petscdmstag.h" I*/ +#include /*I "petscdsf.h" I*/ static PetscErrorCode DMCreateFieldDecomposition_Stag(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) { diff --git a/src/dm/impls/stag/stag1d.c b/src/dm/impls/stag/stag1d.c index 0b79cbf34b1..e3cbbfe4b98 100644 --- a/src/dm/impls/stag/stag1d.c +++ b/src/dm/impls/stag/stag1d.c @@ -1,9 +1,9 @@ /* Functions specific to the 1-dimensional implementation of DMStag */ -#include +#include /*I "petscdmstag.h" I*/ -/*@C +/*@ DMStagCreate1d - Create an object to manage data living on the elements and vertices of a parallelized regular 1D grid. Collective @@ -16,10 +16,10 @@ . dof1 - number of degrees of freedom per element/1-cell . stencilType - ghost/halo region type: `DMSTAG_STENCIL_BOX` or `DMSTAG_STENCIL_NONE` . stencilWidth - width, in elements, of halo/ghost region -- lx - array of local sizes, of length equal to the comm size, summing to M +- lx - array of local sizes, of length equal to the comm size, summing to `M` or `NULL` Output Parameter: -. dm - the new DMStag object +. dm - the new `DMSTAG` object Options Database Keys: + -dm_view - calls `DMViewFromOptions()` at the conclusion of `DMSetUp()` @@ -36,7 +36,7 @@ .seealso: [](ch_stag), `DMSTAG`, `DMStagCreate2d()`, `DMStagCreate3d()`, `DMDestroy()`, `DMView()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMLocalToGlobalBegin()`, `DMDACreate1d()` @*/ -PETSC_EXTERN PetscErrorCode DMStagCreate1d(MPI_Comm comm, DMBoundaryType bndx, PetscInt M, PetscInt dof0, PetscInt dof1, DMStagStencilType stencilType, PetscInt stencilWidth, const PetscInt lx[], DM *dm) +PetscErrorCode DMStagCreate1d(MPI_Comm comm, DMBoundaryType bndx, PetscInt M, PetscInt dof0, PetscInt dof1, DMStagStencilType stencilType, PetscInt stencilWidth, const PetscInt lx[], DM *dm) { PetscMPIInt size; diff --git a/src/dm/impls/stag/stag2d.c b/src/dm/impls/stag/stag2d.c index a2aa27f2fc3..d00b53b1719 100644 --- a/src/dm/impls/stag/stag2d.c +++ b/src/dm/impls/stag/stag2d.c @@ -1,7 +1,7 @@ /* Functions specific to the 2-dimensional implementation of DMStag */ -#include +#include /*I "petscdmstag.h" I*/ -/*@C +/*@ DMStagCreate2d - Create an object to manage data living on the elements, faces, and vertices of a parallelized regular 2D grid. Collective @@ -20,8 +20,8 @@ . dof2 - number of degrees of freedom per element/2-cell . stencilType - ghost/halo region type: `DMSTAG_STENCIL_NONE`, `DMSTAG_STENCIL_BOX`, or `DMSTAG_STENCIL_STAR` . stencilWidth - width, in elements, of halo/ghost region -. lx - array of local x element counts, of length equal to `m`, summing to `M` -- ly - array of local y element counts, of length equal to `n`, summing to `N` +. lx - array of local x element counts, of length equal to `m`, summing to `M`, or `NULL` +- ly - array of local y element counts, of length equal to `n`, summing to `N`, or `NULL` Output Parameter: . dm - the new `DMSTAG` object @@ -45,7 +45,7 @@ .seealso: [](ch_stag), `DMSTAG`, `DMStagCreate1d()`, `DMStagCreate3d()`, `DMDestroy()`, `DMView()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()` @*/ -PETSC_EXTERN PetscErrorCode DMStagCreate2d(MPI_Comm comm, DMBoundaryType bndx, DMBoundaryType bndy, PetscInt M, PetscInt N, PetscInt m, PetscInt n, PetscInt dof0, PetscInt dof1, PetscInt dof2, DMStagStencilType stencilType, PetscInt stencilWidth, const PetscInt lx[], const PetscInt ly[], DM *dm) +PetscErrorCode DMStagCreate2d(MPI_Comm comm, DMBoundaryType bndx, DMBoundaryType bndy, PetscInt M, PetscInt N, PetscInt m, PetscInt n, PetscInt dof0, PetscInt dof1, PetscInt dof2, DMStagStencilType stencilType, PetscInt stencilWidth, const PetscInt lx[], const PetscInt ly[], DM *dm) { PetscFunctionBegin; PetscCall(DMCreate(comm, dm)); diff --git a/src/dm/impls/stag/stag3d.c b/src/dm/impls/stag/stag3d.c index e59ca15eb67..862d46f7574 100644 --- a/src/dm/impls/stag/stag3d.c +++ b/src/dm/impls/stag/stag3d.c @@ -1,17 +1,15 @@ /* Functions specific to the 3-dimensional implementation of DMStag */ -#include +#include /*I "petscdmstag.h" I*/ -/*@C +/*@ DMStagCreate3d - Create an object to manage data living on the elements, faces, edges, and vertices of a parallelized regular 3D grid. Collective Input Parameters: + comm - MPI communicator -. bndx - x boundary type, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_PERIODIC`, or -`DM_BOUNDARY_GHOSTED` -. bndy - y boundary type, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_PERIODIC`, or -`DM_BOUNDARY_GHOSTED` +. bndx - x boundary type, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_PERIODIC`, or `DM_BOUNDARY_GHOSTED` +. bndy - y boundary type, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_PERIODIC`, or `DM_BOUNDARY_GHOSTED` . bndz - z boundary type, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_PERIODIC`, or `DM_BOUNDARY_GHOSTED` . M - global number of elements in x direction . N - global number of elements in y direction @@ -25,9 +23,9 @@ . dof3 - number of degrees of freedom per element/3-cell . stencilType - ghost/halo region type: `DMSTAG_STENCIL_NONE`, `DMSTAG_STENCIL_BOX`, or `DMSTAG_STENCIL_STAR` . stencilWidth - width, in elements, of halo/ghost region -. lx - array of local x element counts, of length equal to `m`, summing to `M` -. ly - arrays of local y element counts, of length equal to `n`, summing to `N` -- lz - arrays of local z element counts, of length equal to `p`, summing to `P` +. lx - array of local x element counts, of length equal to `m`, summing to `M`, or `NULL` +. ly - arrays of local y element counts, of length equal to `n`, summing to `N`, or `NULL` +- lz - arrays of local z element counts, of length equal to `p`, summing to `P`, or `NULL` Output Parameter: . dm - the new `DMSTAG` object @@ -54,7 +52,7 @@ .seealso: [](ch_stag), `DMSTAG`, `DMStagCreate1d()`, `DMStagCreate2d()`, `DMDestroy()`, `DMView()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMLocalToGlobalBegin()`, `DMDACreate3d()` @*/ -PETSC_EXTERN PetscErrorCode DMStagCreate3d(MPI_Comm comm, DMBoundaryType bndx, DMBoundaryType bndy, DMBoundaryType bndz, PetscInt M, PetscInt N, PetscInt P, PetscInt m, PetscInt n, PetscInt p, PetscInt dof0, PetscInt dof1, PetscInt dof2, PetscInt dof3, DMStagStencilType stencilType, PetscInt stencilWidth, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[], DM *dm) +PetscErrorCode DMStagCreate3d(MPI_Comm comm, DMBoundaryType bndx, DMBoundaryType bndy, DMBoundaryType bndz, PetscInt M, PetscInt N, PetscInt P, PetscInt m, PetscInt n, PetscInt p, PetscInt dof0, PetscInt dof1, PetscInt dof2, PetscInt dof3, DMStagStencilType stencilType, PetscInt stencilWidth, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[], DM *dm) { PetscFunctionBegin; PetscCall(DMCreate(comm, dm)); diff --git a/src/dm/impls/stag/stagda.c b/src/dm/impls/stag/stagda.c index 41e961c5b0f..6e272d6d16e 100644 --- a/src/dm/impls/stag/stagda.c +++ b/src/dm/impls/stag/stagda.c @@ -1,7 +1,7 @@ /* Routines to convert between a (subset of) DMStag and DMDA */ -#include -#include +#include /*I "petscdmda.h" I*/ +#include /*I "petscdmstag.h" I*/ #include static PetscErrorCode DMStagCreateCompatibleDMDA(DM dm, DMStagStencilLocation loc, PetscInt c, DM *dmda) @@ -407,14 +407,14 @@ static PetscErrorCode DMStagTransferCoordinatesToDMDA(DM dmstag, DMStagStencilLo PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagVecSplitToDMDA - create a `DMDA` and `Vec` from a subgrid of a `DMSTAG` and its `Vec` Collective Input Parameters: + dm - the `DMSTAG` object -. vec - Vec object associated with `dm` +. vec - `Vec` object associated with `dm` . loc - which subgrid to extract (see `DMStagStencilLocation`) - c - which component to extract (see note below) diff --git a/src/dm/impls/stag/stagintern.c b/src/dm/impls/stag/stagintern.c index 7f3014f14b5..24c0bf9eccf 100644 --- a/src/dm/impls/stag/stagintern.c +++ b/src/dm/impls/stag/stagintern.c @@ -1,7 +1,6 @@ -/* DMStag dimension-independent internal functions. If added to the public API, - these would move to stagutils.c */ +/* DMStag dimension-independent internal functions. If added to the public API, these would move to stagutils.c */ -#include +#include /*I "petscdmstag.h" I*/ /* DMStagDuplicateWithoutSetup - duplicate a `DMSTAG` object without setting it up diff --git a/src/dm/impls/stag/stagmulti.c b/src/dm/impls/stag/stagmulti.c index b56e188367f..8171e894b36 100644 --- a/src/dm/impls/stag/stagmulti.c +++ b/src/dm/impls/stag/stagmulti.c @@ -1,7 +1,7 @@ /* Internal and DMStag-specific functions related to multigrid */ -#include +#include /*I "petscdmstag.h" I*/ -/*@C +/*@ DMStagRestrictSimple - restricts data from a fine to a coarse `DMSTAG`, in the simplest way Values on coarse cells are averages of all fine cells that they cover. diff --git a/src/dm/impls/stag/stagstencil.c b/src/dm/impls/stag/stagstencil.c index 6b08d749f53..3067d6675cd 100644 --- a/src/dm/impls/stag/stagstencil.c +++ b/src/dm/impls/stag/stagstencil.c @@ -1,5 +1,5 @@ /* Functions concerning getting and setting Vec and Mat values with DMStagStencil */ -#include +#include /*I "petscdmstag.h" I*/ /* Strings corresponding to the types defined in $PETSC_DIR/include/petscdmstag.h */ const char *const DMStagStencilTypes[] = {"NONE", "STAR", "BOX", "DMStagStencilType", "DM_STAG_STENCIL_", NULL}; @@ -13,12 +13,12 @@ const char *const DMStagStencilLocations[] = {"NONE", "BACK_DOWN_LEFT", "BACK_DO Collective Input Parameters: -+ dm - the `DMStag` object ++ dm - the `DMSTAG` object . n_stencil - the number of stencils provided - stencils - an array of `DMStagStencil` objects (`i`, `j`, and `k` are ignored) Output Parameter: -. is - the global IS +. is - the global `IS` Note: Redundant entries in the stencils argument are ignored @@ -27,7 +27,7 @@ const char *const DMStagStencilLocations[] = {"NONE", "BACK_DOWN_LEFT", "BACK_DO .seealso: [](ch_stag), `DMSTAG`, `IS`, `DMStagStencil`, `DMCreateGlobalVector` @*/ -PetscErrorCode DMStagCreateISFromStencils(DM dm, PetscInt n_stencil, DMStagStencil *stencils, IS *is) +PetscErrorCode DMStagCreateISFromStencils(DM dm, PetscInt n_stencil, DMStagStencil stencils[], IS *is) { PetscInt *stencil_active; DMStagStencil *stencils_ordered_unique; diff --git a/src/dm/impls/stag/stagutils.c b/src/dm/impls/stag/stagutils.c index af13d778a6a..2c647469843 100644 --- a/src/dm/impls/stag/stagutils.c +++ b/src/dm/impls/stag/stagutils.c @@ -1,6 +1,6 @@ /* Additional functions in the DMStag API, which are not part of the general DM API. */ -#include -#include +#include /*I "petscdmstag.h" I*/ +#include /*I "petscdmproduct.h" I*/ PetscErrorCode DMRestrictHook_Coordinates(DM, DM, void *); @@ -228,7 +228,7 @@ PETSC_EXTERN PetscErrorCode DMStagGetProductCoordinateLocationSlot(DM dm, DMStag PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetCorners - return global element indices of the local region (excluding ghost points) Not Collective @@ -250,7 +250,7 @@ PETSC_EXTERN PetscErrorCode DMStagGetProductCoordinateLocationSlot(DM dm, DMStag Level: beginner Notes: - Arguments corresponding to higher dimensions are ignored for 1D and 2D grids. These arguments may be set to NULL in this case. + Arguments corresponding to higher dimensions are ignored for 1D and 2D grids. These arguments may be set to `NULL` in this case. The number of extra partial elements is either 1 or 0. The value is 1 on right, top, and front non-periodic domain ("physical") boundaries, @@ -276,7 +276,7 @@ PetscErrorCode DMStagGetCorners(DM dm, PetscInt *x, PetscInt *y, PetscInt *z, Pe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetDOF - get number of DOF associated with each stratum of the grid Not Collective @@ -307,7 +307,7 @@ PetscErrorCode DMStagGetDOF(DM dm, PetscInt *dof0, PetscInt *dof1, PetscInt *dof PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetGhostCorners - return global element indices of the local region, including ghost points Not Collective @@ -345,7 +345,7 @@ PetscErrorCode DMStagGetGhostCorners(DM dm, PetscInt *x, PetscInt *y, PetscInt * PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetGlobalSizes - get global element counts Not Collective @@ -441,7 +441,7 @@ PetscErrorCode DMStagGetIsLastRank(DM dm, PetscBool *isLastRank0, PetscBool *isL PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetLocalSizes - get local elementwise sizes Not Collective @@ -473,7 +473,7 @@ PetscErrorCode DMStagGetLocalSizes(DM dm, PetscInt *m, PetscInt *n, PetscInt *p) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetNumRanks - get number of ranks in each direction in the global grid decomposition Not Collective @@ -502,7 +502,7 @@ PetscErrorCode DMStagGetNumRanks(DM dm, PetscInt *nRanks0, PetscInt *nRanks1, Pe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetEntries - get number of native entries in the global representation Not Collective @@ -533,7 +533,7 @@ PetscErrorCode DMStagGetEntries(DM dm, PetscInt *entries) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetEntriesLocal - get number of entries in the local representation Not Collective @@ -564,7 +564,7 @@ PetscErrorCode DMStagGetEntriesLocal(DM dm, PetscInt *entries) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetEntriesPerElement - get number of entries per element in the local representation Not Collective @@ -593,7 +593,7 @@ PetscErrorCode DMStagGetEntriesPerElement(DM dm, PetscInt *entriesPerElement) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetStencilType - get elementwise ghost/halo stencil type Not Collective @@ -618,7 +618,7 @@ PetscErrorCode DMStagGetStencilType(DM dm, DMStagStencilType *stencilType) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetStencilWidth - get elementwise stencil width Not Collective @@ -680,7 +680,7 @@ PetscErrorCode DMStagGetOwnershipRanges(DM dm, const PetscInt *lx[], const Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagCreateCompatibleDMStag - create a compatible `DMSTAG` with different dof/stratum Collective @@ -716,7 +716,7 @@ PetscErrorCode DMStagCreateCompatibleDMStag(DM dm, PetscInt dof0, PetscInt dof1, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetLocationSlot - get index to use in accessing raw local arrays Not Collective @@ -754,7 +754,7 @@ PetscErrorCode DMStagGetLocationSlot(DM dm, DMStagStencilLocation loc, PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagGetRefinementFactor - get refinement ratios in each direction Not Collective @@ -783,7 +783,7 @@ PetscErrorCode DMStagGetRefinementFactor(DM dm, PetscInt *refine_x, PetscInt *re PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagMigrateVec - transfer a vector associated with a `DMSTAG` to a vector associated with a compatible `DMSTAG` Collective @@ -903,7 +903,7 @@ PetscErrorCode DMStagMigrateVec(DM dm, Vec vec, DM dmTo, Vec vecTo) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagPopulateLocalToGlobalInjective - populate an internal 1-to-1 local-to-global map Collective @@ -1056,7 +1056,7 @@ PetscErrorCode DMStagRestoreProductCoordinateArraysRead(DM dm, void *arrX, void PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetBoundaryTypes - set `DMSTAG` boundary types Logically Collective; boundaryType0, boundaryType1, and boundaryType2 must contain common values @@ -1099,7 +1099,7 @@ PetscErrorCode DMStagSetBoundaryTypes(DM dm, DMBoundaryType boundaryType0, DMBou Input Parameters: + dm - the `DMSTAG` object -- dmtype - DMtype for coordinates, either `DMSTAG` or `DMPRODUCT` +- dmtype - `DMtype` for coordinates, either `DMSTAG` or `DMPRODUCT` Level: advanced @@ -1116,7 +1116,7 @@ PetscErrorCode DMStagSetCoordinateDMType(DM dm, DMType dmtype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetDOF - set dof/stratum Logically Collective; `dof0`, `dof1`, `dof2`, and `dof3` must contain common values @@ -1159,7 +1159,7 @@ PetscErrorCode DMStagSetDOF(DM dm, PetscInt dof0, PetscInt dof1, PetscInt dof2, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetNumRanks - set ranks in each direction in the global rank grid Logically Collective; `nRanks0`, `nRanks1`, and `nRanks2` must contain common values @@ -1198,7 +1198,7 @@ PetscErrorCode DMStagSetNumRanks(DM dm, PetscInt nRanks0, PetscInt nRanks1, Pets PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetStencilType - set elementwise ghost/halo stencil type Logically Collective; `stencilType` must contain common value @@ -1223,7 +1223,7 @@ PetscErrorCode DMStagSetStencilType(DM dm, DMStagStencilType stencilType) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetStencilWidth - set elementwise stencil width Logically Collective; `stencilWidth` must contain common value @@ -1252,7 +1252,7 @@ PetscErrorCode DMStagSetStencilWidth(DM dm, PetscInt stencilWidth) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetGlobalSizes - set global element counts in each direction Logically Collective; `N0`, `N1`, and `N2` must contain common values @@ -1288,16 +1288,16 @@ PetscErrorCode DMStagSetGlobalSizes(DM dm, PetscInt N0, PetscInt N1, PetscInt N2 PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetOwnershipRanges - set elements per rank in each direction Logically Collective; `lx`, `ly`, and `lz` must contain common values Input Parameters: + dm - the `DMSTAG` object -. lx - element counts for each rank in the x direction -. ly - element counts for each rank in the y direction -- lz - element counts for each rank in the z direction +. lx - element counts for each rank in the x direction, may be `NULL` +. ly - element counts for each rank in the y direction, may be `NULL` +- lz - element counts for each rank in the z direction, may be `NULL` Level: developer @@ -1306,7 +1306,7 @@ PetscErrorCode DMStagSetGlobalSizes(DM dm, PetscInt N0, PetscInt N1, PetscInt N2 .seealso: [](ch_stag), `DMSTAG`, `DMStagSetGlobalSizes()`, `DMStagGetOwnershipRanges()`, `DMDASetOwnershipRanges()` @*/ -PetscErrorCode DMStagSetOwnershipRanges(DM dm, PetscInt const *lx, PetscInt const *ly, PetscInt const *lz) +PetscErrorCode DMStagSetOwnershipRanges(DM dm, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[]) { DM_Stag *const stag = (DM_Stag *)dm->data; const PetscInt *lin[3]; @@ -1329,7 +1329,7 @@ PetscErrorCode DMStagSetOwnershipRanges(DM dm, PetscInt const *lx, PetscInt cons PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetRefinementFactor - set refinement ratios in each direction Logically Collective @@ -1359,7 +1359,7 @@ PetscErrorCode DMStagSetRefinementFactor(DM dm, PetscInt refine_x, PetscInt refi PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetUniformCoordinates - set `DMSTAG` coordinates to be a uniform grid Collective @@ -1408,7 +1408,7 @@ PetscErrorCode DMStagSetUniformCoordinates(DM dm, PetscReal xmin, PetscReal xmax PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetUniformCoordinatesExplicit - set `DMSTAG` coordinates to be a uniform grid, storing all values Collective @@ -1416,7 +1416,7 @@ PetscErrorCode DMStagSetUniformCoordinates(DM dm, PetscReal xmin, PetscReal xmax Input Parameters: + dm - the `DMSTAG` object . xmin - minimum global coordinate value in the x direction -. xmax - maximum global coordinate values in the x direction +. xmax - maximum global coordinate value in the x direction . ymin - minimum global coordinate value in the y direction . ymax - maximum global coordinate value in the y direction . zmin - minimum global coordinate value in the z direction @@ -1465,7 +1465,7 @@ PetscErrorCode DMStagSetUniformCoordinatesExplicit(DM dm, PetscReal xmin, PetscR PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMStagSetUniformCoordinatesProduct - create uniform coordinates, as a product of 1D arrays Set the coordinate `DM` to be a `DMPRODUCT` of 1D `DMSTAG` objects, each of which have a coordinate `DM` (also a 1d `DMSTAG`) holding uniform coordinates. @@ -1475,7 +1475,7 @@ PetscErrorCode DMStagSetUniformCoordinatesExplicit(DM dm, PetscReal xmin, PetscR Input Parameters: + dm - the `DMSTAG` object . xmin - minimum global coordinate value in the x direction -. xmax - maximum global coordinate values in the x direction +. xmax - maximum global coordinate value in the x direction . ymin - minimum global coordinate value in the y direction . ymax - maximum global coordinate value in the y direction . zmin - minimum global coordinate value in the z direction diff --git a/src/dm/impls/stag/tests/ex15.c b/src/dm/impls/stag/tests/ex15.c index 1e2985befa9..b035d3351cd 100644 --- a/src/dm/impls/stag/tests/ex15.c +++ b/src/dm/impls/stag/tests/ex15.c @@ -647,7 +647,7 @@ PetscErrorCode CreateSystem(DM dm, Mat *A, Vec *b) test: suffix: 1d_fssmooth_par nsize: 1 - requires: mumps + requires: mumps !single args: -dim 1 -stag_grid_x 256 -ksp_converged_reason -ksp_type fgmres -pc_type mg -pc_mg_levels 2 -pc_mg_galerkin -mg_coarse_pc_type lu -mg_coarse_pc_factor_mat_solver_type mumps -mg_levels_pc_type fieldsplit -mg_levels_pc_fieldsplit_detect_saddle_point test: diff --git a/src/dm/impls/stag/tutorials/ex4.c b/src/dm/impls/stag/tutorials/ex4.c index 6e52dfce899..9a1c6886ecc 100644 --- a/src/dm/impls/stag/tutorials/ex4.c +++ b/src/dm/impls/stag/tutorials/ex4.c @@ -2278,7 +2278,7 @@ static PetscErrorCode DumpSolution(Ctx ctx, PetscInt level, Vec x) test: suffix: direct_mumps - requires: mumps !complex + requires: mumps !complex !single nsize: 9 args: -dim 2 -coefficients layers -nondimensional 0 -stag_grid_x 13 -stag_grid_y 8 -pc_type lu -pc_factor_mat_solver_type mumps -ksp_converged_reason diff --git a/src/dm/impls/swarm/ftn-custom/makefile b/src/dm/impls/swarm/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/dm/impls/swarm/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/dm/impls/swarm/ftn-custom/zswarm.c b/src/dm/impls/swarm/ftn-custom/zswarm.c deleted file mode 100644 index f4aaa6861ca..00000000000 --- a/src/dm/impls/swarm/ftn-custom/zswarm.c +++ /dev/null @@ -1,41 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmswarmcreateglobalvectorfromfield_ DMSWARMCREATEGLOBALVECTORFROMFIELD - #define dmswarmdestroyglobalvectorfromfield_ DMSWARMDESTROYGLOBALVECTORFROMFIELD - #define dmswarmregisterpetscdatatypefield_ DMSWARMREGISTERPETSCDATATYPEFIELD -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define dmswarmcreateglobalvectorfromfield_ dmswarmcreateglobalvectorfromfield - #define dmswarmdestroyglobalvectorfromfield_ dmswarmdestroyglobalvectorfromfield - #define dmswarmregisterpetscdatatypefield_ dmswarmregisterpetscdatatypefield -#endif - -/* Definitions of Fortran Wrapper routines */ - -PETSC_EXTERN void dmswarmcreateglobalvectorfromfield_(DM *dm, char *name, Vec *vec, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *fieldname; - - FIXCHAR(name, lenN, fieldname); - *ierr = DMSwarmCreateGlobalVectorFromField(*dm, fieldname, vec); - FREECHAR(name, fieldname); -} - -PETSC_EXTERN void dmswarmdestroyglobalvectorfromfield_(DM *dm, char *name, Vec *vec, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *fieldname; - - FIXCHAR(name, lenN, fieldname); - *ierr = DMSwarmDestroyGlobalVectorFromField(*dm, fieldname, vec); - FREECHAR(name, fieldname); -} - -PETSC_EXTERN void dmswarmregisterpetscdatatypefield_(DM *dm, char *name, PetscInt *blocksize, PetscDataType *type, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *fieldname; - - FIXCHAR(name, lenN, fieldname); - *ierr = DMSwarmRegisterPetscDatatypeField(*dm, fieldname, *blocksize, *type); - FREECHAR(name, fieldname); -} diff --git a/src/dm/impls/swarm/swarm.c b/src/dm/impls/swarm/swarm.c index b79dc3cd536..77e1254053f 100644 --- a/src/dm/impls/swarm/swarm.c +++ b/src/dm/impls/swarm/swarm.c @@ -92,7 +92,7 @@ static PetscErrorCode VecView_Swarm(Vec v, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmVectorGetField - Gets the field from which to define a `Vec` object when `DMCreateLocalVector()`, or `DMCreateGlobalVector()` is called @@ -117,7 +117,7 @@ PetscErrorCode DMSwarmVectorGetField(DM dm, const char *fieldname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmVectorDefineField - Sets the field from which to define a `Vec` object when `DMCreateLocalVector()`, or `DMCreateGlobalVector()` is called @@ -666,7 +666,7 @@ static PetscErrorCode DMSwarmComputeMassMatrixSquare_Private(DM dmc, DM dmf, Mat PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmCreateMassMatrixSquare - Creates the block-diagonal of the square, M^T_p M_p, of the particle mass matrix M_p Collective @@ -703,7 +703,7 @@ PetscErrorCode DMSwarmCreateMassMatrixSquare(DM dmCoarse, DM dmFine, Mat *mass) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmCreateGlobalVectorFromField - Creates a `Vec` object sharing the array associated with a given field Collective @@ -717,7 +717,7 @@ PetscErrorCode DMSwarmCreateMassMatrixSquare(DM dmCoarse, DM dmFine, Mat *mass) Level: beginner - Notes: + Note: The vector must be returned using a matching call to `DMSwarmDestroyGlobalVectorFromField()`. .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmDestroyGlobalVectorFromField()` @@ -732,7 +732,7 @@ PetscErrorCode DMSwarmCreateGlobalVectorFromField(DM dm, const char fieldname[], PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmDestroyGlobalVectorFromField - Destroys the `Vec` object which share the array associated with a given field Collective @@ -756,7 +756,7 @@ PetscErrorCode DMSwarmDestroyGlobalVectorFromField(DM dm, const char fieldname[] PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmCreateLocalVectorFromField - Creates a `Vec` object sharing the array associated with a given field Collective @@ -784,7 +784,7 @@ PetscErrorCode DMSwarmCreateLocalVectorFromField(DM dm, const char fieldname[], PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmDestroyLocalVectorFromField - Destroys the `Vec` object which share the array associated with a given field Collective @@ -808,7 +808,7 @@ PetscErrorCode DMSwarmDestroyLocalVectorFromField(DM dm, const char fieldname[], PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmInitializeFieldRegister - Initiates the registration of fields to a `DMSWARM` Collective @@ -991,7 +991,7 @@ PetscErrorCode DMSwarmGetSize(DM dm, PetscInt *n) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmRegisterPetscDatatypeField - Register a field to a `DMSWARM` with a native PETSc data type Collective @@ -1101,7 +1101,7 @@ PetscErrorCode DMSwarmRegisterUserDatatypeField(DM dm, const char fieldname[], s /*@C DMSwarmGetField - Get access to the underlying array storing all entries associated with a registered field - Not Collective + Not Collective, No Fortran Support Input Parameters: + dm - a `DMSWARM` @@ -1138,7 +1138,7 @@ PetscErrorCode DMSwarmGetField(DM dm, const char fieldname[], PetscInt *blocksiz /*@C DMSwarmRestoreField - Restore access to the underlying array storing all entries associated with a registered field - Not Collective + Not Collective, No Fortran Support Input Parameters: + dm - a `DMSWARM` @@ -1386,8 +1386,13 @@ PetscErrorCode DMSwarmMigrate_GlobalToLocal_Basic(DM dm, PetscInt *globalsize); Notes: Users should call `DMSwarmCollectViewDestroy()` after they have finished computations associated with the collected points + Different collect methods are supported. See `DMSwarmSetCollectType()`. + Developer Note: + Create and Destroy routines create new objects that can get destroyed, they do not change the state + of the current object. + .seealso: `DM`, `DMSWARM`, `DMSwarmCollectViewDestroy()`, `DMSwarmSetCollectType()` @*/ PetscErrorCode DMSwarmCollectViewCreate(DM dm) @@ -1427,6 +1432,10 @@ PetscErrorCode DMSwarmCollectViewCreate(DM dm) Level: advanced + Developer Note: + Create and Destroy routines create new objects that can get destroyed, they do not change the state + of the current object. + .seealso: `DM`, `DMSWARM`, `DMSwarmCollectViewCreate()`, `DMSwarmSetCollectType()` @*/ PetscErrorCode DMSwarmCollectViewDestroy(DM dm) @@ -1724,7 +1733,7 @@ static PetscErrorCode DMView_Swarm(DM dm, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmGetCellSwarm - Extracts a single cell from the `DMSWARM` object, returns it as a single cell `DMSWARM`. The cell `DM` is filtered for fields of that cell, and the filtered `DM` is used as the cell `DM` of the new swarm object. @@ -1746,7 +1755,7 @@ static PetscErrorCode DMView_Swarm(DM dm, PetscViewer viewer) .seealso: `DM`, `DMSWARM`, `DMSwarmRestoreCellSwarm()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmGetCellSwarm(DM sw, PetscInt cellID, DM cellswarm) +PetscErrorCode DMSwarmGetCellSwarm(DM sw, PetscInt cellID, DM cellswarm) { DM_Swarm *original = (DM_Swarm *)sw->data; DMLabel label; @@ -1777,7 +1786,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmGetCellSwarm(DM sw, PetscInt cellID, DM cells PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmRestoreCellSwarm - Restores a `DMSWARM` object obtained with `DMSwarmGetCellSwarm()`. All fields are copied back into the parent swarm. Noncollective @@ -1794,7 +1803,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmGetCellSwarm(DM sw, PetscInt cellID, DM cells .seealso: `DM`, `DMSWARM`, `DMSwarmGetCellSwarm()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmRestoreCellSwarm(DM sw, PetscInt cellID, DM cellswarm) +PetscErrorCode DMSwarmRestoreCellSwarm(DM sw, PetscInt cellID, DM cellswarm) { DM dmc; PetscInt *pids, particles, p; @@ -1865,7 +1874,6 @@ PETSC_INTERN PetscErrorCode DMClone_Swarm(DM dm, DM *newdm) } /*MC - DMSWARM = "swarm" - A `DM` object used to represent arrays of data (fields) of arbitrary data type. This implementation was designed for particle methods in which the underlying data required to be represented is both (i) dynamic in length, (ii) and of arbitrary data type. @@ -1917,6 +1925,7 @@ PETSC_INTERN PetscErrorCode DMClone_Swarm(DM dm, DM *newdm) .seealso: `DM`, `DMSWARM`, `DMType`, `DMCreate()`, `DMSetType()` M*/ + PETSC_EXTERN PetscErrorCode DMCreate_Swarm(DM dm) { DM_Swarm *swarm; diff --git a/src/dm/impls/swarm/swarmpic.c b/src/dm/impls/swarm/swarmpic.c index 829441c7ff7..8b9fc44e49a 100644 --- a/src/dm/impls/swarm/swarmpic.c +++ b/src/dm/impls/swarm/swarmpic.c @@ -9,7 +9,7 @@ #include /* For CoordinatesRefToReal() */ /* Coordinate insertition/addition API */ -/*@C +/*@ DMSwarmSetPointsUniformCoordinates - Set point coordinates in a `DMSWARM` on a regular (ijk) grid Collective @@ -25,12 +25,12 @@ Notes: When using mode = `INSERT_VALUES`, this method will reset the number of particles in the `DMSWARM` - to be npoints[0]*npoints[1] (2D) or npoints[0]*npoints[1]*npoints[2] (3D). When using mode = `ADD_VALUES`, + to be `npoints[0]` x `npoints[1]` (2D) or `npoints[0]` x `npoints[1]` x `npoints[2]` (3D). When using mode = `ADD_VALUES`, new points will be appended to any already existing in the `DMSWARM` .seealso: `DM`, `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSetCellDM()`, `DMSwarmType` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmSetPointsUniformCoordinates(DM dm, PetscReal min[], PetscReal max[], PetscInt npoints[], InsertMode mode) +PetscErrorCode DMSwarmSetPointsUniformCoordinates(DM dm, PetscReal min[], PetscReal max[], PetscInt npoints[], InsertMode mode) { PetscReal lmin[] = {PETSC_MAX_REAL, PETSC_MAX_REAL, PETSC_MAX_REAL}; PetscReal lmax[] = {PETSC_MIN_REAL, PETSC_MIN_REAL, PETSC_MIN_REAL}; @@ -156,7 +156,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSetPointsUniformCoordinates(DM dm, PetscReal PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmSetPointCoordinates - Set point coordinates in a `DMSWARM` from a user defined list Collective @@ -177,7 +177,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSetPointsUniformCoordinates(DM dm, PetscReal .seealso: `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSetCellDM()`, `DMSwarmType`, `DMSwarmSetPointsUniformCoordinates()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmSetPointCoordinates(DM dm, PetscInt npoints, PetscReal coor[], PetscBool redundant, InsertMode mode) +PetscErrorCode DMSwarmSetPointCoordinates(DM dm, PetscInt npoints, PetscReal coor[], PetscBool redundant, InsertMode mode) { PetscReal gmin[] = {PETSC_MAX_REAL, PETSC_MAX_REAL, PETSC_MAX_REAL}; PetscReal gmax[] = {PETSC_MIN_REAL, PETSC_MIN_REAL, PETSC_MIN_REAL}; @@ -313,7 +313,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSetPointCoordinates(DM dm, PetscInt npoints, extern PetscErrorCode private_DMSwarmInsertPointsUsingCellDM_DA(DM, DM, DMSwarmPICLayoutType, PetscInt); extern PetscErrorCode private_DMSwarmInsertPointsUsingCellDM_PLEX(DM, DM, DMSwarmPICLayoutType, PetscInt); -/*@C +/*@ DMSwarmInsertPointsUsingCellDM - Insert point coordinates within each cell Not Collective @@ -339,7 +339,7 @@ extern PetscErrorCode private_DMSwarmInsertPointsUsingCellDM_PLEX(DM, DM, DMSwar .seealso: `DMSWARM`, `DMSwarmPICLayoutType`, `DMSwarmSetType()`, `DMSwarmSetCellDM()`, `DMSwarmType` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmInsertPointsUsingCellDM(DM dm, DMSwarmPICLayoutType layout_type, PetscInt fill_param) +PetscErrorCode DMSwarmInsertPointsUsingCellDM(DM dm, DMSwarmPICLayoutType layout_type, PetscInt fill_param) { DM celldm; PetscBool isDA, isPLEX; @@ -384,7 +384,7 @@ extern PetscErrorCode private_DMSwarmSetPointCoordinatesCellwise_PLEX(DM, DM, Pe .seealso: `DMSWARM`, `DMSwarmSetCellDM()`, `DMSwarmInsertPointsUsingCellDM()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmSetPointCoordinatesCellwise(DM dm, PetscInt npoints, PetscReal xi[]) +PetscErrorCode DMSwarmSetPointCoordinatesCellwise(DM dm, PetscInt npoints, PetscReal xi[]) { DM celldm; PetscBool isDA, isPLEX; @@ -401,7 +401,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSetPointCoordinatesCellwise(DM dm, PetscInt n PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmCreatePointPerCellCount - Count the number of points within all cells in the cell DM Not Collective @@ -420,7 +420,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSetPointCoordinatesCellwise(DM dm, PetscInt n .seealso: `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSetCellDM()`, `DMSwarmType` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmCreatePointPerCellCount(DM dm, PetscInt *ncells, PetscInt **count) +PetscErrorCode DMSwarmCreatePointPerCellCount(DM dm, PetscInt *ncells, PetscInt **count) { PetscBool isvalid; PetscInt nel; @@ -675,7 +675,7 @@ PetscErrorCode DMSwarmComputeLocalSize(DM sw, PetscInt N, PetscProbFunc density) const PetscInt cell = c + cStart; PetscReal v0[3], J[9], invJ[9], detJ, detJp = 2. / (gmax[0] - gmin[0]), xr[3], den; - /*Have to transform quadrature points/weights to cell domain*/ + /* Have to transform quadrature points/weights to cell domain */ PetscCall(DMPlexComputeCellGeometryFEM(dm, cell, NULL, v0, J, invJ, &detJ)); PetscCall(PetscArrayzero(n_int, Ns)); for (q = 0; q < Nq; ++q) { diff --git a/src/dm/impls/swarm/swarmpic_da.c b/src/dm/impls/swarm/swarmpic_da.c index b25530b4c0a..bad549c500e 100644 --- a/src/dm/impls/swarm/swarmpic_da.c +++ b/src/dm/impls/swarm/swarmpic_da.c @@ -1,7 +1,6 @@ #include -#include -#include -#include +#include /*I "petscdmda.h" I*/ +#include /*I "petscdmswarm.h" I*/ #include "../src/dm/impls/swarm/data_bucket.h" static PetscErrorCode private_DMSwarmCreateCellLocalCoords_DA_Q1_Regular(PetscInt dim, PetscInt np[], PetscInt *_npoints, PetscReal **_xi) diff --git a/src/dm/impls/swarm/swarmpic_sort.c b/src/dm/impls/swarm/swarmpic_sort.c index d8265885d36..3bfec769f56 100644 --- a/src/dm/impls/swarm/swarmpic_sort.c +++ b/src/dm/impls/swarm/swarmpic_sort.c @@ -1,8 +1,6 @@ -#include -#include -#include -#include -#include +#include /*I "petscdmda.h" I*/ +#include /*I "petscdmplex.h" I*/ +#include /*I "petscdmswarm.h" I*/ static int sort_CompareSwarmPoint(const void *dataA, const void *dataB) { @@ -106,15 +104,17 @@ PetscErrorCode DMSwarmSortDestroy(DMSwarmSort *_ctx) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmSortGetNumberOfPointsPerCell - Returns the number of points in a cell Not Collective Input Parameters: -+ dm - a `DMSWARM` objects -. e - the index of the cell -- npoints - the number of points in the cell ++ dm - a `DMSWARM` objects +- e - the index of the cell + + Output Parameter: +. npoints - the number of points in the cell Level: advanced @@ -160,7 +160,7 @@ PetscErrorCode DMSwarmSortGetNumberOfPointsPerCell(DM dm, PetscInt e, PetscInt * .seealso: `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSortGetAccess()`, `DMSwarmSortGetNumberOfPointsPerCell()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmSortGetPointsPerCell(DM dm, PetscInt e, PetscInt *npoints, PetscInt **pidlist) +PetscErrorCode DMSwarmSortGetPointsPerCell(DM dm, PetscInt e, PetscInt *npoints, PetscInt **pidlist) { DM_Swarm *swarm = (DM_Swarm *)dm->data; PetscInt points_per_cell; @@ -183,7 +183,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSortGetPointsPerCell(DM dm, PetscInt e, Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmSortGetAccess - Setups up a `DMSWARM` point sort context for efficient traversal of points within a cell Not Collective @@ -222,7 +222,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSortGetPointsPerCell(DM dm, PetscInt e, Petsc .seealso: `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSortRestoreAccess()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmSortGetAccess(DM dm) +PetscErrorCode DMSwarmSortGetAccess(DM dm) { DM_Swarm *swarm = (DM_Swarm *)dm->data; PetscInt ncells; @@ -265,8 +265,8 @@ PETSC_EXTERN PetscErrorCode DMSwarmSortGetAccess(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C - DMSwarmSortRestoreAccess - Invalidates the `DMSWARM` point sorting context +/*@ + DMSwarmSortRestoreAccess - Invalidates the `DMSWARM` point sorting context previously computed with `DMSwarmSortGetAccess()` Not Collective @@ -280,7 +280,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSortGetAccess(DM dm) .seealso: `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSortGetAccess()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmSortRestoreAccess(DM dm) +PetscErrorCode DMSwarmSortRestoreAccess(DM dm) { DM_Swarm *swarm = (DM_Swarm *)dm->data; @@ -291,7 +291,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSortRestoreAccess(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmSortGetIsValid - Gets the isvalid flag associated with a `DMSWARM` point sorting context Not Collective @@ -306,7 +306,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSortRestoreAccess(DM dm) .seealso: `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSortGetAccess()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmSortGetIsValid(DM dm, PetscBool *isvalid) +PetscErrorCode DMSwarmSortGetIsValid(DM dm, PetscBool *isvalid) { DM_Swarm *swarm = (DM_Swarm *)dm->data; @@ -319,7 +319,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSortGetIsValid(DM dm, PetscBool *isvalid) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmSortGetSizes - Gets the sizes associated with a `DMSWARM` point sorting context Not Collective @@ -335,7 +335,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmSortGetIsValid(DM dm, PetscBool *isvalid) .seealso: `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSortGetAccess()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmSortGetSizes(DM dm, PetscInt *ncells, PetscInt *npoints) +PetscErrorCode DMSwarmSortGetSizes(DM dm, PetscInt *ncells, PetscInt *npoints) { DM_Swarm *swarm = (DM_Swarm *)dm->data; diff --git a/src/dm/impls/swarm/swarmpic_view.c b/src/dm/impls/swarm/swarmpic_view.c index 5afe3d56f59..fafe8687851 100644 --- a/src/dm/impls/swarm/swarmpic_view.c +++ b/src/dm/impls/swarm/swarmpic_view.c @@ -1,7 +1,5 @@ -#include -#include -#include -#include +#include /*I "petscdmda.h" I*/ +#include /*I "petscdmswarm.h" I*/ #include "../src/dm/impls/swarm/data_bucket.h" static PetscErrorCode private_PetscViewerCreate_XDMF(MPI_Comm comm, const char filename[], PetscViewer *v) @@ -334,7 +332,7 @@ static PetscErrorCode private_ISView_Swarm_XDMF(IS is, PetscViewer viewer) .seealso: `DM`, `DMSWARM`, `DMSwarmViewXDMF()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmViewFieldsXDMF(DM dm, const char filename[], PetscInt nfields, const char *field_name_list[]) +PetscErrorCode DMSwarmViewFieldsXDMF(DM dm, const char filename[], PetscInt nfields, const char *field_name_list[]) { Vec dvec; PetscInt f, N; @@ -373,7 +371,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmViewFieldsXDMF(DM dm, const char filename[], PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSwarmViewXDMF - Write `DMSWARM` fields to an XDMF3 file Collective @@ -387,12 +385,12 @@ PETSC_EXTERN PetscErrorCode DMSwarmViewFieldsXDMF(DM dm, const char filename[], Note: Only fields user registered with data type `PETSC_DOUBLE` or `PETSC_INT` will be written into the file - Developer Notes: + Developer Note: This should be removed and replaced with the standard use of `PetscViewer` .seealso: `DM`, `DMSWARM`, `DMSwarmViewFieldsXDMF()` @*/ -PETSC_EXTERN PetscErrorCode DMSwarmViewXDMF(DM dm, const char filename[]) +PetscErrorCode DMSwarmViewXDMF(DM dm, const char filename[]) { DM_Swarm *swarm = (DM_Swarm *)dm->data; Vec dvec; diff --git a/src/dm/impls/swarm/tests/ex11.c b/src/dm/impls/swarm/tests/ex11.c index 9f483e7e54a..8fcb2200db4 100644 --- a/src/dm/impls/swarm/tests/ex11.c +++ b/src/dm/impls/swarm/tests/ex11.c @@ -88,7 +88,7 @@ static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) options->func = x2_x4; break; default: - PetscCheck(flg, comm, PETSC_ERR_ARG_WRONG, "Cannnot handle function \"%s\"", FunctionTypes[func]); + PetscCheck(flg, comm, PETSC_ERR_ARG_WRONG, "Cannot handle function \"%s\"", FunctionTypes[func]); } PetscOptionsEnd(); PetscFunctionReturn(PETSC_SUCCESS); diff --git a/src/dm/impls/swarm/tests/ex9.c b/src/dm/impls/swarm/tests/ex9.c deleted file mode 100644 index a6bd58d4799..00000000000 --- a/src/dm/impls/swarm/tests/ex9.c +++ /dev/null @@ -1,2160 +0,0 @@ -static char help[] = "Landau Damping test using Vlasov-Poisson equations\n"; - -/* - To run the code with particles sinusoidally perturbed in x space use the test "pp_poisson_bsi_1d_4" or "pp_poisson_bsi_2d_4" - According to Lukas, good damping results come at ~16k particles per cell - - To visualize the efield use - - -monitor_efield - - To visualize the swarm distribution use - - -ts_monitor_hg_swarm - - To visualize the particles, we can use - - -ts_monitor_sp_swarm -ts_monitor_sp_swarm_retain 0 -ts_monitor_sp_swarm_phase 1 -draw_size 500,500 - -*/ -#include -#include -#include -#include -#include -#include -#include -#include /* For norm and dot */ -#include /* For interpolation */ -#include -#include - -const char *EMTypes[] = {"primal", "mixed", "coulomb", "none", "EMType", "EM_", NULL}; -typedef enum { - EM_PRIMAL, - EM_MIXED, - EM_COULOMB, - EM_NONE -} EMType; - -typedef enum { - V0, - X0, - T0, - M0, - Q0, - PHI0, - POISSON, - VLASOV, - SIGMA, - NUM_CONSTANTS -} ConstantType; -typedef struct { - PetscScalar v0; /* Velocity scale, often the thermal velocity */ - PetscScalar t0; /* Time scale */ - PetscScalar x0; /* Space scale */ - PetscScalar m0; /* Mass scale */ - PetscScalar q0; /* Charge scale */ - PetscScalar kb; - PetscScalar epsi0; - PetscScalar phi0; /* Potential scale */ - PetscScalar poissonNumber; /* Non-Dimensional Poisson Number */ - PetscScalar vlasovNumber; /* Non-Dimensional Vlasov Number */ - PetscReal sigma; /* Nondimensional charge per length in x */ -} Parameter; - -typedef struct { - PetscBag bag; /* Problem parameters */ - PetscBool error; /* Flag for printing the error */ - PetscBool efield_monitor; /* Flag to show electric field monitor */ - PetscBool initial_monitor; - PetscBool periodic; /* Use periodic boundaries */ - PetscBool fake_1D; /* Run simulation in 2D but zeroing second dimension */ - PetscBool perturbed_weights; /* Uniformly sample x,v space with gaussian weights */ - PetscBool poisson_monitor; - PetscInt ostep; /* print the energy at each ostep time steps */ - PetscInt numParticles; - PetscReal timeScale; /* Nondimensionalizing time scale */ - PetscReal charges[2]; /* The charges of each species */ - PetscReal masses[2]; /* The masses of each species */ - PetscReal thermal_energy[2]; /* Thermal Energy (used to get other constants)*/ - PetscReal cosine_coefficients[2]; /*(alpha, k)*/ - PetscReal totalWeight; - PetscReal stepSize; - PetscInt steps; - PetscReal initVel; - EMType em; /* Type of electrostatic model */ - SNES snes; - PetscDraw drawef; - PetscDrawLG drawlg_ef; - PetscDraw drawic_x; - PetscDraw drawic_v; - PetscDraw drawic_w; - PetscDrawHG drawhgic_x; - PetscDrawHG drawhgic_v; - PetscDrawHG drawhgic_w; - PetscDraw EDraw; - PetscDraw RhoDraw; - PetscDraw PotDraw; - PetscDrawSP EDrawSP; - PetscDrawSP RhoDrawSP; - PetscDrawSP PotDrawSP; - PetscBool monitor_positions; /* Flag to show particle positins at each time step */ - PetscDraw positionDraw; - PetscDrawSP positionDrawSP; - -} AppCtx; - -static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) -{ - PetscFunctionBeginUser; - PetscInt d = 2; - options->error = PETSC_FALSE; - options->efield_monitor = PETSC_FALSE; - options->initial_monitor = PETSC_FALSE; - options->periodic = PETSC_FALSE; - options->fake_1D = PETSC_FALSE; - options->perturbed_weights = PETSC_FALSE; - options->poisson_monitor = PETSC_FALSE; - options->ostep = 100; - options->timeScale = 2.0e-14; - options->charges[0] = -1.0; - options->charges[1] = 1.0; - options->masses[0] = 1.0; - options->masses[1] = 1000.0; - options->thermal_energy[0] = 1.0; - options->thermal_energy[1] = 1.0; - options->cosine_coefficients[0] = 0.01; - options->cosine_coefficients[1] = 0.5; - options->initVel = 1; - options->totalWeight = 1.0; - options->drawef = NULL; - options->drawlg_ef = NULL; - options->drawic_x = NULL; - options->drawic_v = NULL; - options->drawic_w = NULL; - options->drawhgic_x = NULL; - options->drawhgic_v = NULL; - options->drawhgic_w = NULL; - options->EDraw = NULL; - options->RhoDraw = NULL; - options->PotDraw = NULL; - options->EDrawSP = NULL; - options->RhoDrawSP = NULL; - options->PotDrawSP = NULL; - options->em = EM_COULOMB; - options->numParticles = 32768; - options->monitor_positions = PETSC_FALSE; - options->positionDraw = NULL; - options->positionDrawSP = NULL; - - PetscOptionsBegin(comm, "", "Central Orbit Options", "DMSWARM"); - PetscCall(PetscOptionsBool("-error", "Flag to print the error", "ex9.c", options->error, &options->error, NULL)); - PetscCall(PetscOptionsBool("-monitor_efield", "Flag to show efield plot", "ex9.c", options->efield_monitor, &options->efield_monitor, NULL)); - PetscCall(PetscOptionsBool("-monitor_ics", "Flag to show initial condition histograms", "ex9.c", options->initial_monitor, &options->initial_monitor, NULL)); - PetscCall(PetscOptionsBool("-monitor_positions", "The flag to show particle positions", "ex9.c", options->monitor_positions, &options->monitor_positions, NULL)); - PetscCall(PetscOptionsBool("-monitor_poisson", "The flag to show charges, Efield and potential solve", "ex9.c", options->poisson_monitor, &options->poisson_monitor, NULL)); - PetscCall(PetscOptionsBool("-periodic", "Flag to use periodic particle boundaries", "ex9.c", options->periodic, &options->periodic, NULL)); - PetscCall(PetscOptionsBool("-fake_1D", "Flag to run a 1D simulation (but really in 2D)", "ex9.c", options->fake_1D, &options->fake_1D, NULL)); - PetscCall(PetscOptionsBool("-perturbed_weights", "Flag to run uniform sampling with perturbed weights", "ex9.c", options->perturbed_weights, &options->perturbed_weights, NULL)); - PetscCall(PetscOptionsInt("-output_step", "Number of time steps between output", "ex9.c", options->ostep, &options->ostep, NULL)); - PetscCall(PetscOptionsReal("-timeScale", "Nondimensionalizing time scale", "ex9.c", options->timeScale, &options->timeScale, NULL)); - PetscCall(PetscOptionsReal("-initial_velocity", "Initial velocity of perturbed particle", "ex9.c", options->initVel, &options->initVel, NULL)); - PetscCall(PetscOptionsReal("-total_weight", "Total weight of all particles", "ex9.c", options->totalWeight, &options->totalWeight, NULL)); - PetscCall(PetscOptionsRealArray("-cosine_coefficients", "Amplitude and frequency of cosine equation used in initialization", "ex9.c", options->cosine_coefficients, &d, NULL)); - PetscCall(PetscOptionsRealArray("-charges", "Species charges", "ex9.c", options->charges, &d, NULL)); - PetscCall(PetscOptionsEnum("-em_type", "Type of electrostatic solver", "ex9.c", EMTypes, (PetscEnum)options->em, (PetscEnum *)&options->em, NULL)); - PetscOptionsEnd(); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode SetupContext(DM dm, DM sw, AppCtx *user) -{ - PetscFunctionBeginUser; - if (user->efield_monitor) { - PetscDrawAxis axis_ef; - PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "monitor_efield", 0, 300, 400, 300, &user->drawef)); - PetscCall(PetscDrawSetSave(user->drawef, "ex9_Efield.png")); - PetscCall(PetscDrawSetFromOptions(user->drawef)); - PetscCall(PetscDrawLGCreate(user->drawef, 1, &user->drawlg_ef)); - PetscCall(PetscDrawLGGetAxis(user->drawlg_ef, &axis_ef)); - PetscCall(PetscDrawAxisSetLabels(axis_ef, "Electron Electric Field", "time", "E_max")); - PetscCall(PetscDrawLGSetLimits(user->drawlg_ef, 0., user->steps * user->stepSize, -10., 0.)); - PetscCall(PetscDrawAxisSetLimits(axis_ef, 0., user->steps * user->stepSize, -10., 0.)); - } - if (user->initial_monitor) { - PetscDrawAxis axis1, axis2, axis3; - PetscReal dmboxlower[2], dmboxupper[2]; - PetscInt dim, cStart, cEnd; - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMGetBoundingBox(dm, dmboxlower, dmboxupper)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - - PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "monitor_initial_conditions_x", 0, 300, 400, 300, &user->drawic_x)); - PetscCall(PetscDrawSetSave(user->drawic_x, "ex9_ic_x.png")); - PetscCall(PetscDrawSetFromOptions(user->drawic_x)); - PetscCall(PetscDrawHGCreate(user->drawic_x, dim, &user->drawhgic_x)); - PetscCall(PetscDrawHGGetAxis(user->drawhgic_x, &axis1)); - PetscCall(PetscDrawHGSetNumberBins(user->drawhgic_x, cEnd - cStart)); - PetscCall(PetscDrawAxisSetLabels(axis1, "Initial X Distribution", "X", "counts")); - PetscCall(PetscDrawAxisSetLimits(axis1, dmboxlower[0], dmboxupper[0], 0, 1500)); - - PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "monitor_initial_conditions_v", 400, 300, 400, 300, &user->drawic_v)); - PetscCall(PetscDrawSetSave(user->drawic_v, "ex9_ic_v.png")); - PetscCall(PetscDrawSetFromOptions(user->drawic_v)); - PetscCall(PetscDrawHGCreate(user->drawic_v, dim, &user->drawhgic_v)); - PetscCall(PetscDrawHGGetAxis(user->drawhgic_v, &axis2)); - PetscCall(PetscDrawHGSetNumberBins(user->drawhgic_v, 1000)); - PetscCall(PetscDrawAxisSetLabels(axis2, "Initial V_x Distribution", "V", "counts")); - PetscCall(PetscDrawAxisSetLimits(axis2, -1, 1, 0, 1500)); - - PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "monitor_initial_conditions_w", 800, 300, 400, 300, &user->drawic_w)); - PetscCall(PetscDrawSetSave(user->drawic_w, "ex9_ic_w.png")); - PetscCall(PetscDrawSetFromOptions(user->drawic_w)); - PetscCall(PetscDrawHGCreate(user->drawic_w, dim, &user->drawhgic_w)); - PetscCall(PetscDrawHGGetAxis(user->drawhgic_w, &axis3)); - PetscCall(PetscDrawHGSetNumberBins(user->drawhgic_w, 10)); - PetscCall(PetscDrawAxisSetLabels(axis3, "Initial W Distribution", "weight", "counts")); - PetscCall(PetscDrawAxisSetLimits(axis3, 0, 0.01, 0, 5000)); - } - if (user->monitor_positions) { - PetscDrawAxis axis; - - PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "position_monitor_species1", 0, 0, 400, 300, &user->positionDraw)); - PetscCall(PetscDrawSetFromOptions(user->positionDraw)); - PetscCall(PetscDrawSPCreate(user->positionDraw, 10, &user->positionDrawSP)); - PetscCall(PetscDrawSPSetDimension(user->positionDrawSP, 1)); - PetscCall(PetscDrawSPGetAxis(user->positionDrawSP, &axis)); - PetscCall(PetscDrawSPReset(user->positionDrawSP)); - PetscCall(PetscDrawAxisSetLabels(axis, "Particles", "x", "v")); - PetscCall(PetscDrawSetSave(user->positionDraw, "ex9_pos.png")); - } - if (user->poisson_monitor) { - PetscDrawAxis axis_E, axis_Rho, axis_Pot; - - PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "Efield_monitor", 0, 0, 400, 300, &user->EDraw)); - PetscCall(PetscDrawSetFromOptions(user->EDraw)); - PetscCall(PetscDrawSPCreate(user->EDraw, 10, &user->EDrawSP)); - PetscCall(PetscDrawSPSetDimension(user->EDrawSP, 1)); - PetscCall(PetscDrawSPGetAxis(user->EDrawSP, &axis_E)); - PetscCall(PetscDrawSPReset(user->EDrawSP)); - PetscCall(PetscDrawAxisSetLabels(axis_E, "Particles", "x", "E")); - PetscCall(PetscDrawSetSave(user->EDraw, "ex9_E_spatial.png")); - - PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "rho_monitor", 0, 0, 400, 300, &user->RhoDraw)); - PetscCall(PetscDrawSetFromOptions(user->RhoDraw)); - PetscCall(PetscDrawSPCreate(user->RhoDraw, 10, &user->RhoDrawSP)); - PetscCall(PetscDrawSPSetDimension(user->RhoDrawSP, 1)); - PetscCall(PetscDrawSPGetAxis(user->RhoDrawSP, &axis_Rho)); - PetscCall(PetscDrawSPReset(user->RhoDrawSP)); - PetscCall(PetscDrawAxisSetLabels(axis_Rho, "Particles", "x", "rho")); - PetscCall(PetscDrawSetSave(user->RhoDraw, "ex9_rho_spatial.png")); - - PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "potential_monitor", 0, 0, 400, 300, &user->PotDraw)); - PetscCall(PetscDrawSetFromOptions(user->PotDraw)); - PetscCall(PetscDrawSPCreate(user->PotDraw, 10, &user->PotDrawSP)); - PetscCall(PetscDrawSPSetDimension(user->PotDrawSP, 1)); - PetscCall(PetscDrawSPGetAxis(user->PotDrawSP, &axis_Pot)); - PetscCall(PetscDrawSPReset(user->PotDrawSP)); - PetscCall(PetscDrawAxisSetLabels(axis_Pot, "Particles", "x", "potential")); - PetscCall(PetscDrawSetSave(user->PotDraw, "ex9_phi_spatial.png")); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode DestroyContext(AppCtx *user) -{ - PetscFunctionBeginUser; - PetscCall(PetscDrawLGDestroy(&user->drawlg_ef)); - PetscCall(PetscDrawDestroy(&user->drawef)); - PetscCall(PetscDrawHGDestroy(&user->drawhgic_x)); - PetscCall(PetscDrawDestroy(&user->drawic_x)); - PetscCall(PetscDrawHGDestroy(&user->drawhgic_v)); - PetscCall(PetscDrawDestroy(&user->drawic_v)); - PetscCall(PetscDrawHGDestroy(&user->drawhgic_w)); - PetscCall(PetscDrawDestroy(&user->drawic_w)); - PetscCall(PetscDrawSPDestroy(&user->positionDrawSP)); - PetscCall(PetscDrawDestroy(&user->positionDraw)); - - PetscCall(PetscDrawSPDestroy(&user->EDrawSP)); - PetscCall(PetscDrawDestroy(&user->EDraw)); - PetscCall(PetscDrawSPDestroy(&user->RhoDrawSP)); - PetscCall(PetscDrawDestroy(&user->RhoDraw)); - PetscCall(PetscDrawSPDestroy(&user->PotDrawSP)); - PetscCall(PetscDrawDestroy(&user->PotDraw)); - - PetscCall(PetscBagDestroy(&user->bag)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode computeParticleMoments(DM sw, PetscReal moments[3], AppCtx *user) -{ - DM dm; - const PetscReal *coords; - const PetscScalar *w; - PetscReal mom[3] = {0.0, 0.0, 0.0}; - PetscInt cell, cStart, cEnd, dim; - - PetscFunctionBeginUser; - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetCellDM(sw, &dm)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - PetscCall(DMSwarmSortGetAccess(sw)); - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&w)); - for (cell = cStart; cell < cEnd; ++cell) { - PetscInt *pidx; - PetscInt Np, p, d; - - PetscCall(DMSwarmSortGetPointsPerCell(sw, cell, &Np, &pidx)); - for (p = 0; p < Np; ++p) { - const PetscInt idx = pidx[p]; - const PetscReal *c = &coords[idx * dim]; - - mom[0] += PetscRealPart(w[idx]); - mom[1] += PetscRealPart(w[idx]) * c[0]; - for (d = 0; d < dim; ++d) mom[2] += PetscRealPart(w[idx]) * c[d] * c[d]; - } - PetscCall(PetscFree(pidx)); - } - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&w)); - PetscCall(DMSwarmSortRestoreAccess(sw)); - PetscCall(MPIU_Allreduce(mom, moments, 3, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)sw))); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static void f0_1(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) -{ - f0[0] = u[0]; -} - -static void f0_x(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) -{ - f0[0] = x[0] * u[0]; -} - -static void f0_r2(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) -{ - PetscInt d; - - f0[0] = 0.0; - for (d = 0; d < dim; ++d) f0[0] += PetscSqr(x[d]) * u[0]; -} - -static PetscErrorCode computeFEMMoments(DM dm, Vec u, PetscReal moments[3], AppCtx *user) -{ - PetscDS prob; - PetscScalar mom; - PetscInt field = 0; - - PetscFunctionBeginUser; - PetscCall(DMGetDS(dm, &prob)); - PetscCall(PetscDSSetObjective(prob, field, &f0_1)); - PetscCall(DMPlexComputeIntegralFEM(dm, u, &mom, user)); - moments[0] = PetscRealPart(mom); - PetscCall(PetscDSSetObjective(prob, field, &f0_x)); - PetscCall(DMPlexComputeIntegralFEM(dm, u, &mom, user)); - moments[1] = PetscRealPart(mom); - PetscCall(PetscDSSetObjective(prob, field, &f0_r2)); - PetscCall(DMPlexComputeIntegralFEM(dm, u, &mom, user)); - moments[2] = PetscRealPart(mom); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode MonitorEField(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx) -{ - AppCtx *user = (AppCtx *)ctx; - DM dm, sw; - PetscReal *E; - PetscReal Enorm = 0., lgEnorm, lgEmax, sum = 0., Emax = 0., temp = 0., *weight, chargesum = 0.; - PetscReal *x, *v; - PetscInt *species, dim, p, d, Np, cStart, cEnd; - PetscReal pmoments[3]; /* \int f, \int x f, \int r^2 f */ - PetscReal fmoments[3]; /* \int \hat f, \int x \hat f, \int r^2 \hat f */ - Vec rho; - - PetscFunctionBeginUser; - if (step < 0) PetscFunctionReturn(PETSC_SUCCESS); - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMSwarmGetCellDM(sw, &dm)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMSwarmSortGetAccess(sw)); - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); - PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v)); - PetscCall(DMSwarmGetField(sw, "E_field", NULL, NULL, (void **)&E)); - PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); - PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - - for (p = 0; p < Np; ++p) { - for (d = 0; d < 1; ++d) { - temp = PetscAbsReal(E[p * dim + d]); - if (temp > Emax) Emax = temp; - } - Enorm += PetscSqrtReal(E[p * dim] * E[p * dim]); - sum += E[p * dim]; - chargesum += user->charges[0] * weight[p]; - } - lgEnorm = Enorm != 0 ? PetscLog10Real(Enorm) : -16.; - lgEmax = Emax != 0 ? PetscLog10Real(Emax) : -16.; - - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); - PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v)); - PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMSwarmRestoreField(sw, "E_field", NULL, NULL, (void **)&E)); - PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); - - Parameter *param; - PetscCall(PetscBagGetData(user->bag, (void **)¶m)); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "charges", &rho)); - if (user->em == EM_PRIMAL) { - PetscCall(computeParticleMoments(sw, pmoments, user)); - PetscCall(computeFEMMoments(dm, rho, fmoments, user)); - } else if (user->em == EM_MIXED) { - DM potential_dm; - IS potential_IS; - PetscInt fields = 1; - PetscCall(DMCreateSubDM(dm, 1, &fields, &potential_IS, &potential_dm)); - - PetscCall(computeParticleMoments(sw, pmoments, user)); - PetscCall(computeFEMMoments(potential_dm, rho, fmoments, user)); - PetscCall(DMDestroy(&potential_dm)); - PetscCall(ISDestroy(&potential_IS)); - } - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "charges", &rho)); - - PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%f\t%+e\t%e\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\n", (double)t, (double)sum, (double)Enorm, (double)lgEnorm, (double)Emax, (double)lgEmax, (double)chargesum, (double)pmoments[0], (double)pmoments[1], (double)pmoments[2], (double)fmoments[0], (double)fmoments[1], (double)fmoments[2])); - PetscCall(PetscDrawLGAddPoint(user->drawlg_ef, &t, &lgEmax)); - PetscCall(PetscDrawLGDraw(user->drawlg_ef)); - PetscCall(PetscDrawSave(user->drawef)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -PetscErrorCode MonitorInitialConditions(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx) -{ - AppCtx *user = (AppCtx *)ctx; - DM dm, sw; - const PetscScalar *u; - PetscReal *weight, *pos, *vel; - PetscInt dim, p, Np, cStart, cEnd; - - PetscFunctionBegin; - if (step < 0) PetscFunctionReturn(PETSC_SUCCESS); /* -1 indicates interpolated solution */ - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMSwarmGetCellDM(sw, &dm)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMSwarmSortGetAccess(sw)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - - if (step == 0) { - PetscCall(PetscDrawHGReset(user->drawhgic_x)); - PetscCall(PetscDrawHGGetDraw(user->drawhgic_x, &user->drawic_x)); - PetscCall(PetscDrawClear(user->drawic_x)); - PetscCall(PetscDrawFlush(user->drawic_x)); - - PetscCall(PetscDrawHGReset(user->drawhgic_v)); - PetscCall(PetscDrawHGGetDraw(user->drawhgic_v, &user->drawic_v)); - PetscCall(PetscDrawClear(user->drawic_v)); - PetscCall(PetscDrawFlush(user->drawic_v)); - - PetscCall(PetscDrawHGReset(user->drawhgic_w)); - PetscCall(PetscDrawHGGetDraw(user->drawhgic_w, &user->drawic_w)); - PetscCall(PetscDrawClear(user->drawic_w)); - PetscCall(PetscDrawFlush(user->drawic_w)); - - PetscCall(VecGetArrayRead(U, &u)); - PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&vel)); - PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&pos)); - - PetscCall(VecGetLocalSize(U, &Np)); - Np /= dim * 2; - for (p = 0; p < Np; ++p) { - PetscCall(PetscDrawHGAddValue(user->drawhgic_x, pos[p * dim])); - PetscCall(PetscDrawHGAddValue(user->drawhgic_v, vel[p * dim])); - PetscCall(PetscDrawHGAddValue(user->drawhgic_w, weight[p])); - } - - PetscCall(VecRestoreArrayRead(U, &u)); - PetscCall(PetscDrawHGDraw(user->drawhgic_x)); - PetscCall(PetscDrawHGSave(user->drawhgic_x)); - - PetscCall(PetscDrawHGDraw(user->drawhgic_v)); - PetscCall(PetscDrawHGSave(user->drawhgic_v)); - - PetscCall(PetscDrawHGDraw(user->drawhgic_w)); - PetscCall(PetscDrawHGSave(user->drawhgic_w)); - - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&pos)); - PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&vel)); - PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode MonitorPositions_2D(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx) -{ - AppCtx *user = (AppCtx *)ctx; - DM dm, sw; - PetscScalar *x, *v, *weight; - PetscReal lower[3], upper[3], speed; - const PetscInt *s; - PetscInt dim, cStart, cEnd, c; - - PetscFunctionBeginUser; - if (step > 0 && step % user->ostep == 0) { - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMSwarmGetCellDM(sw, &dm)); - PetscCall(DMGetDimension(dm, &dim)); - PetscCall(DMGetBoundingBox(dm, lower, upper)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); - PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v)); - PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&s)); - PetscCall(DMSwarmSortGetAccess(sw)); - PetscCall(PetscDrawSPReset(user->positionDrawSP)); - PetscCall(PetscDrawSPSetLimits(user->positionDrawSP, lower[0], upper[0], lower[1], upper[1])); - PetscCall(PetscDrawSPSetLimits(user->positionDrawSP, lower[0], upper[0], -12, 12)); - for (c = 0; c < cEnd - cStart; ++c) { - PetscInt *pidx, Npc, q; - PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx)); - for (q = 0; q < Npc; ++q) { - const PetscInt p = pidx[q]; - if (s[p] == 0) { - speed = PetscSqrtReal(PetscSqr(v[p * dim]) + PetscSqr(v[p * dim + 1])); - if (dim == 1 || user->fake_1D) { - PetscCall(PetscDrawSPAddPointColorized(user->positionDrawSP, &x[p * dim], &x[p * dim + 1], &speed)); - } else { - PetscCall(PetscDrawSPAddPointColorized(user->positionDrawSP, &x[p * dim], &v[p * dim], &speed)); - } - } else if (s[p] == 1) { - PetscCall(PetscDrawSPAddPoint(user->positionDrawSP, &x[p * dim], &v[p * dim])); - } - } - PetscCall(PetscFree(pidx)); - } - PetscCall(PetscDrawSPDraw(user->positionDrawSP, PETSC_TRUE)); - PetscCall(PetscDrawSave(user->positionDraw)); - PetscCall(DMSwarmSortRestoreAccess(sw)); - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); - PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v)); - PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&s)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode MonitorPoisson(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx) -{ - AppCtx *user = (AppCtx *)ctx; - DM dm, sw; - PetscScalar *x, *E, *weight, *pot, *charges; - PetscReal lower[3], upper[3], xval; - PetscInt dim, cStart, cEnd, c; - - PetscFunctionBeginUser; - if (step > 0 && step % user->ostep == 0) { - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMSwarmGetCellDM(sw, &dm)); - PetscCall(DMGetDimension(dm, &dim)); - PetscCall(DMGetBoundingBox(dm, lower, upper)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - - PetscCall(PetscDrawSPReset(user->RhoDrawSP)); - PetscCall(PetscDrawSPReset(user->EDrawSP)); - PetscCall(PetscDrawSPReset(user->PotDrawSP)); - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); - PetscCall(DMSwarmGetField(sw, "E_field", NULL, NULL, (void **)&E)); - PetscCall(DMSwarmGetField(sw, "potential", NULL, NULL, (void **)&pot)); - PetscCall(DMSwarmGetField(sw, "charges", NULL, NULL, (void **)&charges)); - PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); - - PetscCall(DMSwarmSortGetAccess(sw)); - for (c = 0; c < cEnd - cStart; ++c) { - PetscReal Esum = 0.0; - PetscInt *pidx, Npc, q; - PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx)); - for (q = 0; q < Npc; ++q) { - const PetscInt p = pidx[q]; - Esum += E[p * dim]; - } - xval = (c + 0.5) * ((upper - lower) / (cEnd - cStart)); - PetscCall(PetscDrawSPAddPoint(user->EDrawSP, &xval, &Esum)); - PetscCall(PetscFree(pidx)); - } - for (c = 0; c < (cEnd - cStart); ++c) { - xval = (c + 0.5) * ((upper - lower) / (cEnd - cStart)); - PetscCall(PetscDrawSPAddPoint(user->RhoDrawSP, &xval, &charges[c])); - PetscCall(PetscDrawSPAddPoint(user->PotDrawSP, &xval, &pot[c])); - } - PetscCall(PetscDrawSPDraw(user->RhoDrawSP, PETSC_TRUE)); - PetscCall(PetscDrawSave(user->RhoDraw)); - PetscCall(PetscDrawSPDraw(user->EDrawSP, PETSC_TRUE)); - PetscCall(PetscDrawSave(user->EDraw)); - PetscCall(PetscDrawSPDraw(user->PotDrawSP, PETSC_TRUE)); - PetscCall(PetscDrawSave(user->PotDraw)); - PetscCall(DMSwarmSortRestoreAccess(sw)); - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); - PetscCall(DMSwarmRestoreField(sw, "potential", NULL, NULL, (void **)&pot)); - PetscCall(DMSwarmRestoreField(sw, "charges", NULL, NULL, (void **)&charges)); - PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMSwarmRestoreField(sw, "E_field", NULL, NULL, (void **)&E)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode SetupParameters(MPI_Comm comm, AppCtx *ctx) -{ - PetscBag bag; - Parameter *p; - - PetscFunctionBeginUser; - /* setup PETSc parameter bag */ - PetscCall(PetscBagGetData(ctx->bag, (void **)&p)); - PetscCall(PetscBagSetName(ctx->bag, "par", "Vlasov-Poisson Parameters")); - bag = ctx->bag; - PetscCall(PetscBagRegisterScalar(bag, &p->v0, 1.0, "v0", "Velocity scale, m/s")); - PetscCall(PetscBagRegisterScalar(bag, &p->t0, 1.0, "t0", "Time scale, s")); - PetscCall(PetscBagRegisterScalar(bag, &p->x0, 1.0, "x0", "Space scale, m")); - PetscCall(PetscBagRegisterScalar(bag, &p->v0, 1.0, "phi0", "Potential scale, kg*m^2/A*s^3")); - PetscCall(PetscBagRegisterScalar(bag, &p->q0, 1.0, "q0", "Charge Scale, A*s")); - PetscCall(PetscBagRegisterScalar(bag, &p->m0, 1.0, "m0", "Mass Scale, kg")); - PetscCall(PetscBagRegisterScalar(bag, &p->epsi0, 1.0, "epsi0", "Permittivity of Free Space, kg")); - PetscCall(PetscBagRegisterScalar(bag, &p->kb, 1.0, "kb", "Boltzmann Constant, m^2 kg/s^2 K^1")); - - PetscCall(PetscBagRegisterScalar(bag, &p->sigma, 1.0, "sigma", "Charge per unit area, C/m^3")); - PetscCall(PetscBagRegisterScalar(bag, &p->poissonNumber, 1.0, "poissonNumber", "Non-Dimensional Poisson Number")); - PetscCall(PetscBagRegisterScalar(bag, &p->vlasovNumber, 1.0, "vlasovNumber", "Non-Dimensional Vlasov Number")); - PetscCall(PetscBagSetFromOptions(bag)); - { - PetscViewer viewer; - PetscViewerFormat format; - PetscBool flg; - - PetscCall(PetscOptionsGetViewer(comm, NULL, NULL, "-param_view", &viewer, &format, &flg)); - if (flg) { - PetscCall(PetscViewerPushFormat(viewer, format)); - PetscCall(PetscBagView(bag, viewer)); - PetscCall(PetscViewerFlush(viewer)); - PetscCall(PetscViewerPopFormat(viewer)); - PetscCall(PetscOptionsRestoreViewer(&viewer)); - } - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) -{ - PetscFunctionBeginUser; - PetscCall(DMCreate(comm, dm)); - PetscCall(DMSetType(*dm, DMPLEX)); - PetscCall(DMSetFromOptions(*dm)); - PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view")); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static void ion_f0(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) -{ - f0[0] = -constants[SIGMA]; -} - -static void laplacian_f1(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[]) -{ - PetscInt d; - for (d = 0; d < dim; ++d) f1[d] = u_x[d]; -} - -static void laplacian_g3(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[]) -{ - PetscInt d; - for (d = 0; d < dim; ++d) g3[d * dim + d] = 1.0; -} - -static PetscErrorCode zero(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx) -{ - *u = 0.0; - return PETSC_SUCCESS; -} - -/* - / I -grad\ / q \ = /0\ - \-div 0 / \phi/ \f/ -*/ -static void f0_q(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) -{ - for (PetscInt d = 0; d < dim; ++d) f0[d] += u[uOff[0] + d]; -} - -static void f1_q(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[]) -{ - for (PetscInt d = 0; d < dim; ++d) f1[d * dim + d] = u[uOff[1]]; -} - -static void f0_phi_backgroundCharge(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) -{ - f0[0] += constants[SIGMA]; - for (PetscInt d = 0; d < dim; ++d) f0[0] += u_x[uOff_x[0] + d * dim + d]; -} - -/* Boundary residual. Dirichlet boundary for u means u_bdy=p*n */ -static void g0_qq(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g0[]) -{ - for (PetscInt d = 0; d < dim; ++d) g0[d * dim + d] = 1.0; -} - -static void g2_qphi(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g2[]) -{ - for (PetscInt d = 0; d < dim; ++d) g2[d * dim + d] = 1.0; -} - -static void g1_phiq(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g1[]) -{ - for (PetscInt d = 0; d < dim; ++d) g1[d * dim + d] = 1.0; -} - -static PetscErrorCode CreateFEM(DM dm, AppCtx *user) -{ - PetscFE fephi, feq; - PetscDS ds; - PetscBool simplex; - PetscInt dim; - - PetscFunctionBeginUser; - PetscCall(DMGetDimension(dm, &dim)); - PetscCall(DMPlexIsSimplex(dm, &simplex)); - if (user->em == EM_MIXED) { - DMLabel label; - const PetscInt id = 1; - - PetscCall(PetscFECreateDefault(PETSC_COMM_SELF, dim, dim, simplex, "field_", PETSC_DETERMINE, &feq)); - PetscCall(PetscObjectSetName((PetscObject)feq, "field")); - PetscCall(PetscFECreateDefault(PETSC_COMM_SELF, dim, 1, simplex, "potential_", PETSC_DETERMINE, &fephi)); - PetscCall(PetscObjectSetName((PetscObject)fephi, "potential")); - PetscCall(PetscFECopyQuadrature(feq, fephi)); - PetscCall(DMSetField(dm, 0, NULL, (PetscObject)feq)); - PetscCall(DMSetField(dm, 1, NULL, (PetscObject)fephi)); - PetscCall(DMCreateDS(dm)); - PetscCall(PetscFEDestroy(&fephi)); - PetscCall(PetscFEDestroy(&feq)); - - PetscCall(DMGetLabel(dm, "marker", &label)); - PetscCall(DMGetDS(dm, &ds)); - - PetscCall(PetscDSSetResidual(ds, 0, f0_q, f1_q)); - PetscCall(PetscDSSetResidual(ds, 1, f0_phi_backgroundCharge, NULL)); - PetscCall(PetscDSSetJacobian(ds, 0, 0, g0_qq, NULL, NULL, NULL)); - PetscCall(PetscDSSetJacobian(ds, 0, 1, NULL, NULL, g2_qphi, NULL)); - PetscCall(PetscDSSetJacobian(ds, 1, 0, NULL, g1_phiq, NULL, NULL)); - - PetscCall(DMAddBoundary(dm, DM_BC_ESSENTIAL, "wall", label, 1, &id, 0, 0, NULL, (void (*)(void))zero, NULL, NULL, NULL)); - - } else if (user->em == EM_PRIMAL) { - MatNullSpace nullsp; - PetscCall(PetscFECreateDefault(PETSC_COMM_SELF, dim, 1, simplex, NULL, PETSC_DETERMINE, &fephi)); - PetscCall(PetscObjectSetName((PetscObject)fephi, "potential")); - PetscCall(DMSetField(dm, 0, NULL, (PetscObject)fephi)); - PetscCall(DMCreateDS(dm)); - PetscCall(DMGetDS(dm, &ds)); - PetscCall(PetscDSSetResidual(ds, 0, ion_f0, laplacian_f1)); - PetscCall(PetscDSSetJacobian(ds, 0, 0, NULL, NULL, NULL, laplacian_g3)); - PetscCall(MatNullSpaceCreate(PetscObjectComm((PetscObject)dm), PETSC_TRUE, 0, NULL, &nullsp)); - PetscCall(PetscObjectCompose((PetscObject)fephi, "nullspace", (PetscObject)nullsp)); - PetscCall(MatNullSpaceDestroy(&nullsp)); - PetscCall(PetscFEDestroy(&fephi)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode CreatePoisson(DM dm, AppCtx *user) -{ - SNES snes; - Mat J; - MatNullSpace nullSpace; - - PetscFunctionBeginUser; - PetscCall(CreateFEM(dm, user)); - PetscCall(SNESCreate(PetscObjectComm((PetscObject)dm), &snes)); - PetscCall(SNESSetOptionsPrefix(snes, "em_")); - PetscCall(SNESSetDM(snes, dm)); - PetscCall(DMPlexSetSNESLocalFEM(dm, PETSC_FALSE, user)); - PetscCall(SNESSetFromOptions(snes)); - - PetscCall(DMCreateMatrix(dm, &J)); - PetscCall(MatNullSpaceCreate(PetscObjectComm((PetscObject)dm), PETSC_TRUE, 0, NULL, &nullSpace)); - PetscCall(MatSetNullSpace(J, nullSpace)); - PetscCall(MatNullSpaceDestroy(&nullSpace)); - PetscCall(SNESSetJacobian(snes, J, J, NULL, NULL)); - PetscCall(MatDestroy(&J)); - user->snes = snes; - PetscFunctionReturn(PETSC_SUCCESS); -} - -PetscErrorCode PetscPDFPertubedConstant2D(const PetscReal x[], const PetscReal dummy[], PetscReal p[]) -{ - p[0] = (1 + 0.01 * PetscCosReal(0.5 * x[0])) / (2 * PETSC_PI); - p[1] = (1 + 0.01 * PetscCosReal(0.5 * x[1])) / (2 * PETSC_PI); - return PETSC_SUCCESS; -} -PetscErrorCode PetscPDFPertubedConstant1D(const PetscReal x[], const PetscReal dummy[], PetscReal p[]) -{ - p[0] = (1. + 0.01 * PetscCosReal(0.5 * x[0])) / (2 * PETSC_PI); - return PETSC_SUCCESS; -} - -PetscErrorCode PetscPDFCosine1D(const PetscReal x[], const PetscReal scale[], PetscReal p[]) -{ - const PetscReal alpha = scale ? scale[0] : 0.0; - const PetscReal k = scale ? scale[1] : 1.; - p[0] = (1 + alpha * PetscCosReal(k * x[0])); - return PETSC_SUCCESS; -} - -PetscErrorCode PetscPDFCosine2D(const PetscReal x[], const PetscReal scale[], PetscReal p[]) -{ - const PetscReal alpha = scale ? scale[0] : 0.; - const PetscReal k = scale ? scale[0] : 1.; - p[0] = (1 + alpha * PetscCosReal(k * (x[0] + x[1]))); - return PETSC_SUCCESS; -} - -static PetscErrorCode InitializeParticles_PerturbedWeights(DM sw, AppCtx *user) -{ - DM vdm, dm; - PetscScalar *weight; - PetscReal *x, *v, vmin[3], vmax[3], gmin[3], gmax[3], xi0[3]; - PetscInt *N, Ns, dim, *cellid, *species, Np, cStart, cEnd, Npc, n; - PetscInt p, q, s, c, d, cv; - PetscBool flg; - PetscMPIInt size, rank; - Parameter *param; - - PetscFunctionBegin; - PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)sw), &size)); - PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)sw), &rank)); - PetscOptionsBegin(PetscObjectComm((PetscObject)sw), "", "DMSwarm Options", "DMSWARM"); - PetscCall(DMSwarmGetNumSpecies(sw, &Ns)); - PetscCall(PetscOptionsInt("-dm_swarm_num_species", "The number of species", "DMSwarmSetNumSpecies", Ns, &Ns, &flg)); - if (flg) PetscCall(DMSwarmSetNumSpecies(sw, Ns)); - PetscCall(PetscCalloc1(Ns, &N)); - n = Ns; - PetscCall(PetscOptionsIntArray("-dm_swarm_num_particles", "The target number of particles", "", N, &n, NULL)); - PetscOptionsEnd(); - - Np = N[0]; - PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Np = %" PetscInt_FMT "\n", Np)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetCellDM(sw, &dm)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - - PetscCall(DMCreate(PETSC_COMM_WORLD, &vdm)); - PetscCall(DMSetType(vdm, DMPLEX)); - PetscCall(DMPlexSetOptionsPrefix(vdm, "v")); - PetscCall(DMSetFromOptions(vdm)); - PetscCall(DMViewFromOptions(vdm, NULL, "-vdm_view")); - - PetscCall(DMGetBoundingBox(dm, gmin, gmax)); - PetscCall(PetscBagGetData(user->bag, (void **)¶m)); - PetscCall(DMSwarmSetLocalSizes(sw, Np, 0)); - Npc = Np / (cEnd - cStart); - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid)); - for (c = 0, p = 0; c < cEnd - cStart; ++c) { - for (s = 0; s < Ns; ++s) { - for (q = 0; q < Npc; ++q, ++p) cellid[p] = c; - } - } - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid)); - PetscCall(PetscFree(N)); - - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); - PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v)); - PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); - - PetscCall(DMSwarmSortGetAccess(sw)); - PetscInt vStart, vEnd; - PetscCall(DMPlexGetHeightStratum(vdm, 0, &vStart, &vEnd)); - PetscCall(DMGetBoundingBox(vdm, vmin, vmax)); - for (c = 0; c < cEnd - cStart; ++c) { - const PetscInt cell = c + cStart; - PetscInt *pidx, Npc; - PetscReal centroid[3], volume; - - PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx)); - PetscCall(DMPlexComputeCellGeometryFVM(dm, cell, &volume, centroid, NULL)); - for (q = 0; q < Npc; ++q) { - const PetscInt p = pidx[q]; - - for (d = 0; d < dim; ++d) { - x[p * dim + d] = centroid[d]; - v[p * dim + d] = vmin[0] + (q + 0.5) * (vmax[0] - vmin[0]) / Npc; - if (user->fake_1D && d > 0) v[p * dim + d] = 0; - } - } - PetscCall(PetscFree(pidx)); - } - PetscCall(DMGetCoordinatesLocalSetUp(vdm)); - - /* Setup Quadrature for spatial and velocity weight calculations*/ - PetscQuadrature quad_x; - PetscInt Nq_x; - const PetscReal *wq_x, *xq_x; - PetscReal *xq_x_extended; - PetscReal weightsum = 0., totalcellweight = 0., *weight_x, *weight_v; - PetscReal scale[2] = {user->cosine_coefficients[0], user->cosine_coefficients[1]}; - - PetscCall(PetscCalloc2(cEnd - cStart, &weight_x, Np, &weight_v)); - if (user->fake_1D) PetscCall(PetscDTGaussTensorQuadrature(1, 1, 5, -1.0, 1.0, &quad_x)); - else PetscCall(PetscDTGaussTensorQuadrature(dim, 1, 5, -1.0, 1.0, &quad_x)); - PetscCall(PetscQuadratureGetData(quad_x, NULL, NULL, &Nq_x, &xq_x, &wq_x)); - if (user->fake_1D) { - PetscCall(PetscCalloc1(Nq_x * dim, &xq_x_extended)); - for (PetscInt i = 0; i < Nq_x; ++i) xq_x_extended[i * dim] = xq_x[i]; - } - /* Integrate the density function to get the weights of particles in each cell */ - for (d = 0; d < dim; ++d) xi0[d] = -1.0; - for (c = cStart; c < cEnd; ++c) { - PetscReal v0_x[3], J_x[9], invJ_x[9], detJ_x, xr_x[3], den_x; - PetscInt *pidx, Npc, q; - PetscInt Ncx; - const PetscScalar *array_x; - PetscScalar *coords_x = NULL; - PetscBool isDGx; - weight_x[c] = 0.; - - PetscCall(DMPlexGetCellCoordinates(dm, c, &isDGx, &Ncx, &array_x, &coords_x)); - PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx)); - PetscCall(DMPlexComputeCellGeometryFEM(dm, c, NULL, v0_x, J_x, invJ_x, &detJ_x)); - for (q = 0; q < Nq_x; ++q) { - /*Transform quadrature points from ref space to real space (0,12.5664)*/ - if (user->fake_1D) CoordinatesRefToReal(dim, dim, xi0, v0_x, J_x, &xq_x_extended[q * dim], xr_x); - else CoordinatesRefToReal(dim, dim, xi0, v0_x, J_x, &xq_x[q * dim], xr_x); - - /*Transform quadrature points from real space to ideal real space (0, 2PI/k)*/ - if (user->fake_1D) { - PetscCall(PetscPDFCosine1D(xr_x, scale, &den_x)); - detJ_x = J_x[0]; - } else PetscCall(PetscPDFCosine2D(xr_x, scale, &den_x)); - /*We have to transform the quadrature weights as well*/ - weight_x[c] += den_x * (wq_x[q] * detJ_x); - } - PetscCall(PetscPrintf(PETSC_COMM_WORLD, "c:%" PetscInt_FMT " [x_a,x_b] = %1.15f,%1.15f -> cell weight = %1.15f\n", c, (double)PetscRealPart(coords_x[0]), (double)PetscRealPart(coords_x[2]), (double)weight_x[c])); - totalcellweight += weight_x[c]; - PetscCheck(Npc / size == vEnd - vStart, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Number of particles %" PetscInt_FMT " in cell (rank %d/%d) != %" PetscInt_FMT " number of velocity vertices", Npc, rank, size, vEnd - vStart); - - /* Set weights to be gaussian in velocity cells (using exact solution) */ - for (cv = 0; cv < vEnd - vStart; ++cv) { - PetscInt Nc; - const PetscScalar *array_v; - PetscScalar *coords_v = NULL; - PetscBool isDG; - PetscCall(DMPlexGetCellCoordinates(vdm, cv, &isDG, &Nc, &array_v, &coords_v)); - - const PetscInt p = pidx[cv]; - - weight_v[p] = 0.5 * (PetscErfReal(coords_v[1] / PetscSqrtReal(2.)) - PetscErfReal(coords_v[0] / PetscSqrtReal(2.))); - - weight[p] = user->totalWeight * weight_v[p] * weight_x[c]; - weightsum += weight[p]; - - PetscCall(DMPlexRestoreCellCoordinates(vdm, cv, &isDG, &Nc, &array_v, &coords_v)); - } - PetscCall(DMPlexRestoreCellCoordinates(dm, c, &isDGx, &Ncx, &array_x, &coords_x)); - PetscCall(PetscFree(pidx)); - } - PetscCall(PetscPrintf(PETSC_COMM_WORLD, "particle weight sum = %1.10f cell weight sum = %1.10f\n", (double)totalcellweight, (double)weightsum)); - if (user->fake_1D) PetscCall(PetscFree(xq_x_extended)); - PetscCall(PetscFree2(weight_x, weight_v)); - PetscCall(PetscQuadratureDestroy(&quad_x)); - PetscCall(DMSwarmSortRestoreAccess(sw)); - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); - PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); - PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v)); - PetscCall(DMDestroy(&vdm)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode InitializeConstants(DM sw, AppCtx *user) -{ - DM dm; - PetscInt *species; - PetscReal *weight, totalCharge = 0., totalWeight = 0., gmin[3], gmax[3]; - PetscInt Np, p, dim; - - PetscFunctionBegin; - PetscCall(DMSwarmGetCellDM(sw, &dm)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMGetBoundingBox(dm, gmin, gmax)); - PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); - for (p = 0; p < Np; ++p) { - totalWeight += weight[p]; - totalCharge += user->charges[species[p]] * weight[p]; - } - PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); - PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); - { - Parameter *param; - PetscReal Area; - - PetscCall(PetscBagGetData(user->bag, (void **)¶m)); - switch (dim) { - case 1: - Area = (gmax[0] - gmin[0]); - break; - case 2: - if (user->fake_1D) { - Area = (gmax[0] - gmin[0]); - } else { - Area = (gmax[0] - gmin[0]) * (gmax[1] - gmin[1]); - } - break; - case 3: - Area = (gmax[0] - gmin[0]) * (gmax[1] - gmin[1]) * (gmax[2] - gmin[2]); - break; - default: - SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Dimension %" PetscInt_FMT " not supported", dim); - } - PetscCall(PetscPrintf(PETSC_COMM_WORLD, "dim = %" PetscInt_FMT "\ttotalWeight = %f, user->charges[species[p]] = %f\ttotalCharge = %f, Total Area = %f\n", dim, (double)totalWeight, (double)user->charges[0], (double)totalCharge, (double)Area)); - param->sigma = PetscAbsReal(totalCharge / (Area)); - - PetscCall(PetscPrintf(PETSC_COMM_SELF, "sigma: %g\n", (double)param->sigma)); - PetscCall(PetscPrintf(PETSC_COMM_SELF, "(x0,v0,t0,m0,q0,phi0): (%e, %e, %e, %e, %e, %e) - (P, V) = (%e, %e)\n", (double)param->x0, (double)param->v0, (double)param->t0, (double)param->m0, (double)param->q0, (double)param->phi0, (double)param->poissonNumber, - (double)param->vlasovNumber)); - } - /* Setup Constants */ - { - PetscDS ds; - Parameter *param; - PetscCall(PetscBagGetData(user->bag, (void **)¶m)); - PetscScalar constants[NUM_CONSTANTS]; - constants[SIGMA] = param->sigma; - constants[V0] = param->v0; - constants[T0] = param->t0; - constants[X0] = param->x0; - constants[M0] = param->m0; - constants[Q0] = param->q0; - constants[PHI0] = param->phi0; - constants[POISSON] = param->poissonNumber; - constants[VLASOV] = param->vlasovNumber; - PetscCall(DMGetDS(dm, &ds)); - PetscCall(PetscDSSetConstants(ds, NUM_CONSTANTS, constants)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode InitializeVelocites_Fake1D(DM sw, AppCtx *user) -{ - DM dm; - PetscReal *v; - PetscInt *species, cStart, cEnd; - PetscInt dim, Np, p; - - PetscFunctionBegin; - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v)); - PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); - PetscCall(DMSwarmGetCellDM(sw, &dm)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - PetscRandom rnd; - PetscCall(PetscRandomCreate(PetscObjectComm((PetscObject)sw), &rnd)); - PetscCall(PetscRandomSetInterval(rnd, 0, 1.)); - PetscCall(PetscRandomSetFromOptions(rnd)); - - for (p = 0; p < Np; ++p) { - PetscReal a[3] = {0., 0., 0.}, vel[3] = {0., 0., 0.}; - - PetscCall(PetscRandomGetValueReal(rnd, &a[0])); - if (user->perturbed_weights) { - PetscCall(PetscPDFSampleConstant1D(a, NULL, vel)); - } else { - PetscCall(PetscPDFSampleGaussian1D(a, NULL, vel)); - } - v[p * dim] = vel[0]; - } - PetscCall(PetscRandomDestroy(&rnd)); - PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v)); - PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode CreateSwarm(DM dm, AppCtx *user, DM *sw) -{ - PetscReal v0[2] = {1., 0.}; - PetscInt dim; - - PetscFunctionBeginUser; - PetscCall(DMGetDimension(dm, &dim)); - PetscCall(DMCreate(PetscObjectComm((PetscObject)dm), sw)); - PetscCall(DMSetType(*sw, DMSWARM)); - PetscCall(DMSetDimension(*sw, dim)); - PetscCall(DMSwarmSetType(*sw, DMSWARM_PIC)); - PetscCall(DMSwarmSetCellDM(*sw, dm)); - PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "w_q", 1, PETSC_SCALAR)); - PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "velocity", dim, PETSC_REAL)); - PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "species", 1, PETSC_INT)); - PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "initCoordinates", dim, PETSC_REAL)); - PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "initVelocity", dim, PETSC_REAL)); - PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "E_field", dim, PETSC_REAL)); - PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "potential", dim, PETSC_REAL)); - PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "charges", dim, PETSC_REAL)); - PetscCall(DMSwarmFinalizeFieldRegister(*sw)); - - if (user->perturbed_weights) { - PetscCall(InitializeParticles_PerturbedWeights(*sw, user)); - } else { - PetscCall(DMSwarmComputeLocalSizeFromOptions(*sw)); - PetscCall(DMSwarmInitializeCoordinates(*sw)); - if (user->fake_1D) { - PetscCall(InitializeVelocites_Fake1D(*sw, user)); - } else { - PetscCall(DMSwarmInitializeVelocitiesFromOptions(*sw, v0)); - } - } - PetscCall(DMSetFromOptions(*sw)); - PetscCall(DMSetApplicationContext(*sw, user)); - PetscCall(PetscObjectSetName((PetscObject)*sw, "Particles")); - PetscCall(DMViewFromOptions(*sw, NULL, "-sw_view")); - { - Vec gc, gc0, gv, gv0; - - PetscCall(DMSwarmCreateGlobalVectorFromField(*sw, DMSwarmPICField_coor, &gc)); - PetscCall(DMSwarmCreateGlobalVectorFromField(*sw, "initCoordinates", &gc0)); - PetscCall(VecCopy(gc, gc0)); - PetscCall(VecViewFromOptions(gc, NULL, "-ic_x_view")); - PetscCall(DMSwarmDestroyGlobalVectorFromField(*sw, DMSwarmPICField_coor, &gc)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(*sw, "initCoordinates", &gc0)); - PetscCall(DMSwarmCreateGlobalVectorFromField(*sw, "velocity", &gv)); - PetscCall(DMSwarmCreateGlobalVectorFromField(*sw, "initVelocity", &gv0)); - PetscCall(VecCopy(gv, gv0)); - PetscCall(VecViewFromOptions(gv, NULL, "-ic_v_view")); - PetscCall(DMSwarmDestroyGlobalVectorFromField(*sw, "velocity", &gv)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(*sw, "initVelocity", &gv0)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode ComputeFieldAtParticles_Coulomb(SNES snes, DM sw, PetscReal E[]) -{ - AppCtx *user; - PetscReal *coords; - PetscInt *species, dim, d, Np, p, q, Ns; - PetscMPIInt size; - - PetscFunctionBegin; - PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)snes), &size)); - PetscCheck(size == 1, PetscObjectComm((PetscObject)snes), PETSC_ERR_SUP, "Coulomb code only works in serial"); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMSwarmGetNumSpecies(sw, &Ns)); - PetscCall(DMGetApplicationContext(sw, (void *)&user)); - - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); - for (p = 0; p < Np; ++p) { - PetscReal *pcoord = &coords[p * dim]; - PetscReal pE[3] = {0., 0., 0.}; - - /* Calculate field at particle p due to particle q */ - for (q = 0; q < Np; ++q) { - PetscReal *qcoord = &coords[q * dim]; - PetscReal rpq[3], r, r3, q_q; - - if (p == q) continue; - q_q = user->charges[species[q]] * 1.; - for (d = 0; d < dim; ++d) rpq[d] = pcoord[d] - qcoord[d]; - r = DMPlex_NormD_Internal(dim, rpq); - if (r < PETSC_SQRT_MACHINE_EPSILON) continue; - r3 = PetscPowRealInt(r, 3); - for (d = 0; d < dim; ++d) pE[d] += q_q * rpq[d] / r3; - } - for (d = 0; d < dim; ++d) E[p * dim + d] = pE[d]; - } - PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode ComputeFieldAtParticles_Primal(SNES snes, DM sw, PetscReal E[]) -{ - DM dm; - AppCtx *user; - PetscDS ds; - PetscFE fe; - Mat M_p, M; - Vec phi, locPhi, rho, f; - PetscReal *coords; - PetscInt dim, d, cStart, cEnd, c, Np; - PetscQuadrature q; - - PetscFunctionBegin; - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMGetApplicationContext(sw, (void *)&user)); - - KSP ksp; - Vec rho0; - char oldField[PETSC_MAX_PATH_LEN]; - const char *tmp; - - /* Create the charges rho */ - PetscCall(SNESGetDM(snes, &dm)); - PetscCall(DMSwarmVectorGetField(sw, &tmp)); - PetscCall(PetscStrncpy(oldField, tmp, PETSC_MAX_PATH_LEN)); - PetscCall(DMSwarmVectorDefineField(sw, "w_q")); - PetscCall(DMCreateMassMatrix(sw, dm, &M_p)); - PetscCall(DMSwarmVectorDefineField(sw, oldField)); - - PetscCall(DMCreateMassMatrix(dm, dm, &M)); - PetscCall(DMGetGlobalVector(dm, &rho0)); - PetscCall(PetscObjectSetName((PetscObject)rho0, "Charge density (rho0) from Primal Compute")); - PetscCall(DMGetGlobalVector(dm, &rho)); - PetscCall(PetscObjectSetName((PetscObject)rho, "rho")); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "w_q", &f)); - - PetscCall(PetscObjectSetName((PetscObject)f, "particle weight")); - PetscCall(MatMultTranspose(M_p, f, rho)); - PetscCall(MatViewFromOptions(M_p, NULL, "-mp_view")); - PetscCall(MatViewFromOptions(M, NULL, "-m_view")); - PetscCall(VecViewFromOptions(f, NULL, "-weights_view")); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "w_q", &f)); - - PetscCall(KSPCreate(PetscObjectComm((PetscObject)dm), &ksp)); - PetscCall(KSPSetOptionsPrefix(ksp, "em_proj_")); - PetscCall(KSPSetOperators(ksp, M, M)); - PetscCall(KSPSetFromOptions(ksp)); - PetscCall(KSPSolve(ksp, rho, rho0)); - PetscCall(VecViewFromOptions(rho0, NULL, "-rho0_view")); - - PetscInt rhosize; - PetscReal *charges; - const PetscScalar *rho_vals; - PetscCall(DMSwarmGetField(sw, "charges", NULL, NULL, (void **)&charges)); - PetscCall(VecGetSize(rho0, &rhosize)); - PetscCall(VecGetArrayRead(rho0, &rho_vals)); - for (c = 0; c < rhosize; ++c) charges[c] = rho_vals[c]; - PetscCall(VecRestoreArrayRead(rho0, &rho_vals)); - PetscCall(DMSwarmRestoreField(sw, "charges", NULL, NULL, (void **)&charges)); - - PetscCall(VecScale(rho, -1.0)); - - PetscCall(VecViewFromOptions(rho0, NULL, "-rho0_view")); - PetscCall(VecViewFromOptions(rho, NULL, "-rho_view")); - PetscCall(DMRestoreGlobalVector(dm, &rho0)); - PetscCall(KSPDestroy(&ksp)); - PetscCall(MatDestroy(&M_p)); - PetscCall(MatDestroy(&M)); - - PetscCall(DMGetGlobalVector(dm, &phi)); - PetscCall(PetscObjectSetName((PetscObject)phi, "potential")); - PetscCall(VecSet(phi, 0.0)); - PetscCall(SNESSolve(snes, rho, phi)); - PetscCall(DMRestoreGlobalVector(dm, &rho)); - PetscCall(VecViewFromOptions(phi, NULL, "-phi_view")); - - PetscInt phisize; - PetscReal *pot; - const PetscScalar *phi_vals; - PetscCall(DMSwarmGetField(sw, "potential", NULL, NULL, (void **)&pot)); - PetscCall(VecGetSize(phi, &phisize)); - PetscCall(VecGetArrayRead(phi, &phi_vals)); - for (c = 0; c < phisize; ++c) pot[c] = phi_vals[c]; - PetscCall(VecRestoreArrayRead(phi, &phi_vals)); - PetscCall(DMSwarmRestoreField(sw, "potential", NULL, NULL, (void **)&pot)); - - PetscCall(DMGetLocalVector(dm, &locPhi)); - PetscCall(DMGlobalToLocalBegin(dm, phi, INSERT_VALUES, locPhi)); - PetscCall(DMGlobalToLocalEnd(dm, phi, INSERT_VALUES, locPhi)); - PetscCall(DMRestoreGlobalVector(dm, &phi)); - - PetscCall(DMGetDS(dm, &ds)); - PetscCall(PetscDSGetDiscretization(ds, 0, (PetscObject *)&fe)); - PetscCall(DMSwarmSortGetAccess(sw)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - - for (c = cStart; c < cEnd; ++c) { - PetscTabulation tab; - PetscScalar *clPhi = NULL; - PetscReal *pcoord, *refcoord; - PetscReal v[3], J[9], invJ[9], detJ; - PetscInt *points; - PetscInt Ncp, cp; - - PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Ncp, &points)); - PetscCall(DMGetWorkArray(dm, Ncp * dim, MPIU_REAL, &pcoord)); - PetscCall(DMGetWorkArray(dm, Ncp * dim, MPIU_REAL, &refcoord)); - for (cp = 0; cp < Ncp; ++cp) - for (d = 0; d < dim; ++d) pcoord[cp * dim + d] = coords[points[cp] * dim + d]; - PetscCall(DMPlexCoordinatesToReference(dm, c, Ncp, pcoord, refcoord)); - PetscCall(PetscFECreateTabulation(fe, 1, Ncp, refcoord, 1, &tab)); - PetscCall(DMPlexComputeCellGeometryFEM(dm, c, NULL, v, J, invJ, &detJ)); - PetscCall(DMPlexVecGetClosure(dm, NULL, locPhi, c, NULL, &clPhi)); - for (cp = 0; cp < Ncp; ++cp) { - const PetscReal *basisDer = tab->T[1]; - const PetscInt p = points[cp]; - - for (d = 0; d < dim; ++d) E[p * dim + d] = 0.; - PetscCall(PetscFEGetQuadrature(fe, &q)); - PetscCall(PetscFEFreeInterpolateGradient_Static(fe, basisDer, clPhi, dim, invJ, NULL, cp, &E[p * dim])); - for (d = 0; d < dim; ++d) { - E[p * dim + d] *= -1.0; - if (user->fake_1D && d > 0) E[p * dim + d] = 0; - } - } - PetscCall(DMPlexVecRestoreClosure(dm, NULL, locPhi, c, NULL, &clPhi)); - PetscCall(DMRestoreWorkArray(dm, Ncp * dim, MPIU_REAL, &pcoord)); - PetscCall(DMRestoreWorkArray(dm, Ncp * dim, MPIU_REAL, &refcoord)); - PetscCall(PetscTabulationDestroy(&tab)); - PetscCall(PetscFree(points)); - } - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmSortRestoreAccess(sw)); - PetscCall(DMRestoreLocalVector(dm, &locPhi)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode ComputeFieldAtParticles_Mixed(SNES snes, DM sw, PetscReal E[]) -{ - AppCtx *user; - DM dm, potential_dm; - KSP ksp; - IS potential_IS; - PetscDS ds; - PetscFE fe; - PetscFEGeom feGeometry; - Mat M_p, M; - Vec phi, locPhi, rho, f, temp_rho, rho0; - PetscQuadrature q; - PetscReal *coords, *pot; - PetscInt dim, d, cStart, cEnd, c, Np, fields = 1; - char oldField[PETSC_MAX_PATH_LEN]; - const char *tmp; - - PetscFunctionBegin; - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMGetApplicationContext(sw, &user)); - - /* Create the charges rho */ - PetscCall(SNESGetDM(snes, &dm)); - PetscCall(DMGetGlobalVector(dm, &rho)); - PetscCall(PetscObjectSetName((PetscObject)rho, "rho")); - - PetscCall(DMCreateSubDM(dm, 1, &fields, &potential_IS, &potential_dm)); - - PetscCall(DMSwarmVectorGetField(sw, &tmp)); - PetscCall(PetscStrncpy(oldField, tmp, PETSC_MAX_PATH_LEN)); - PetscCall(DMSwarmVectorDefineField(sw, "w_q")); - PetscCall(DMCreateMassMatrix(sw, potential_dm, &M_p)); - PetscCall(DMSwarmVectorDefineField(sw, oldField)); - - PetscCall(DMCreateMassMatrix(potential_dm, potential_dm, &M)); - PetscCall(MatViewFromOptions(M_p, NULL, "-mp_view")); - PetscCall(MatViewFromOptions(M, NULL, "-m_view")); - PetscCall(DMGetGlobalVector(potential_dm, &temp_rho)); - PetscCall(PetscObjectSetName((PetscObject)temp_rho, "Mf")); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "w_q", &f)); - PetscCall(PetscObjectSetName((PetscObject)f, "particle weight")); - PetscCall(VecViewFromOptions(f, NULL, "-weights_view")); - PetscCall(MatMultTranspose(M_p, f, temp_rho)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "w_q", &f)); - PetscCall(DMGetGlobalVector(potential_dm, &rho0)); - PetscCall(PetscObjectSetName((PetscObject)rho0, "Charge density (rho0) from Mixed Compute")); - - PetscCall(KSPCreate(PetscObjectComm((PetscObject)dm), &ksp)); - PetscCall(KSPSetOptionsPrefix(ksp, "em_proj")); - PetscCall(KSPSetOperators(ksp, M, M)); - PetscCall(KSPSetFromOptions(ksp)); - PetscCall(KSPSolve(ksp, temp_rho, rho0)); - PetscCall(VecViewFromOptions(rho0, NULL, "-rho0_view")); - - PetscInt rhosize; - PetscReal *charges; - const PetscScalar *rho_vals; - Parameter *param; - PetscCall(PetscBagGetData(user->bag, (void **)¶m)); - PetscCall(DMSwarmGetField(sw, "charges", NULL, NULL, (void **)&charges)); - PetscCall(VecGetSize(rho0, &rhosize)); - - /* Integral over reference element is size 1. Reference element area is 4. Scale rho0 by 1/4 because the basis function is 1/4 */ - PetscCall(VecScale(rho0, 0.25)); - PetscCall(VecGetArrayRead(rho0, &rho_vals)); - for (c = 0; c < rhosize; ++c) charges[c] = rho_vals[c]; - PetscCall(VecRestoreArrayRead(rho0, &rho_vals)); - PetscCall(DMSwarmRestoreField(sw, "charges", NULL, NULL, (void **)&charges)); - - PetscCall(VecISCopy(rho, potential_IS, SCATTER_FORWARD, temp_rho)); - PetscCall(VecScale(rho, 0.25)); - PetscCall(VecViewFromOptions(rho0, NULL, "-rho0_view")); - PetscCall(VecViewFromOptions(temp_rho, NULL, "-temprho_view")); - PetscCall(VecViewFromOptions(rho, NULL, "-rho_view")); - PetscCall(DMRestoreGlobalVector(potential_dm, &temp_rho)); - PetscCall(DMRestoreGlobalVector(potential_dm, &rho0)); - - PetscCall(MatDestroy(&M_p)); - PetscCall(MatDestroy(&M)); - PetscCall(KSPDestroy(&ksp)); - PetscCall(DMDestroy(&potential_dm)); - PetscCall(ISDestroy(&potential_IS)); - - PetscCall(DMGetGlobalVector(dm, &phi)); - PetscCall(PetscObjectSetName((PetscObject)phi, "potential")); - PetscCall(VecSet(phi, 0.0)); - PetscCall(SNESSolve(snes, rho, phi)); - PetscCall(DMRestoreGlobalVector(dm, &rho)); - - PetscInt phisize; - const PetscScalar *phi_vals; - PetscCall(DMSwarmGetField(sw, "potential", NULL, NULL, (void **)&pot)); - PetscCall(VecGetSize(phi, &phisize)); - PetscCall(VecViewFromOptions(phi, NULL, "-phi_view")); - PetscCall(VecGetArrayRead(phi, &phi_vals)); - for (c = 0; c < phisize; ++c) pot[c] = phi_vals[c]; - PetscCall(VecRestoreArrayRead(phi, &phi_vals)); - PetscCall(DMSwarmRestoreField(sw, "potential", NULL, NULL, (void **)&pot)); - - PetscCall(DMGetLocalVector(dm, &locPhi)); - PetscCall(DMGlobalToLocalBegin(dm, phi, INSERT_VALUES, locPhi)); - PetscCall(DMGlobalToLocalEnd(dm, phi, INSERT_VALUES, locPhi)); - PetscCall(DMRestoreGlobalVector(dm, &phi)); - - PetscCall(DMGetDS(dm, &ds)); - PetscCall(PetscDSGetDiscretization(ds, 0, (PetscObject *)&fe)); - PetscCall(DMSwarmSortGetAccess(sw)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(PetscFEGetQuadrature(fe, &q)); - PetscCall(PetscFECreateCellGeometry(fe, q, &feGeometry)); - for (c = cStart; c < cEnd; ++c) { - PetscTabulation tab; - PetscScalar *clPhi = NULL; - PetscReal *pcoord, *refcoord; - PetscInt *points; - PetscInt Ncp, cp; - - PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Ncp, &points)); - PetscCall(DMGetWorkArray(dm, Ncp * dim, MPIU_REAL, &pcoord)); - PetscCall(DMGetWorkArray(dm, Ncp * dim, MPIU_REAL, &refcoord)); - for (cp = 0; cp < Ncp; ++cp) - for (d = 0; d < dim; ++d) pcoord[cp * dim + d] = coords[points[cp] * dim + d]; - PetscCall(DMPlexCoordinatesToReference(dm, c, Ncp, pcoord, refcoord)); - PetscCall(PetscFECreateTabulation(fe, 1, Ncp, refcoord, 1, &tab)); - PetscCall(DMPlexComputeCellGeometryFEM(dm, c, q, feGeometry.v, feGeometry.J, feGeometry.invJ, feGeometry.detJ)); - PetscCall(DMPlexVecGetClosure(dm, NULL, locPhi, c, NULL, &clPhi)); - - for (cp = 0; cp < Ncp; ++cp) { - const PetscInt p = points[cp]; - - for (d = 0; d < dim; ++d) E[p * dim + d] = 0.; - PetscCall(PetscFEInterpolateAtPoints_Static(fe, tab, clPhi, &feGeometry, cp, &E[p * dim])); - PetscCall(PetscFEPushforward(fe, &feGeometry, 1, &E[p * dim])); - for (d = 0; d < dim; ++d) { - E[p * dim + d] *= -2.0; - if (user->fake_1D && d > 0) E[p * dim + d] = 0; - } - } - PetscCall(DMPlexVecRestoreClosure(dm, NULL, locPhi, c, NULL, &clPhi)); - PetscCall(DMRestoreWorkArray(dm, Ncp * dim, MPIU_REAL, &pcoord)); - PetscCall(DMRestoreWorkArray(dm, Ncp * dim, MPIU_REAL, &refcoord)); - PetscCall(PetscTabulationDestroy(&tab)); - PetscCall(PetscFree(points)); - } - PetscCall(PetscFEDestroyCellGeometry(fe, &feGeometry)); - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmSortRestoreAccess(sw)); - PetscCall(DMRestoreLocalVector(dm, &locPhi)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode ComputeFieldAtParticles(SNES snes, DM sw, PetscReal E[]) -{ - AppCtx *ctx; - PetscInt dim, Np; - - PetscFunctionBegin; - PetscValidHeaderSpecific(snes, SNES_CLASSID, 1); - PetscValidHeaderSpecific(sw, DM_CLASSID, 2); - PetscAssertPointer(E, 3); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMGetApplicationContext(sw, &ctx)); - PetscCall(PetscArrayzero(E, Np * dim)); - - switch (ctx->em) { - case EM_PRIMAL: - PetscCall(ComputeFieldAtParticles_Primal(snes, sw, E)); - break; - case EM_COULOMB: - PetscCall(ComputeFieldAtParticles_Coulomb(snes, sw, E)); - break; - case EM_MIXED: - PetscCall(ComputeFieldAtParticles_Mixed(snes, sw, E)); - break; - case EM_NONE: - break; - default: - SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "No solver for electrostatic model %s", EMTypes[ctx->em]); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec U, Vec G, void *ctx) -{ - DM sw; - SNES snes = ((AppCtx *)ctx)->snes; - const PetscReal *coords, *vel; - const PetscScalar *u; - PetscScalar *g; - PetscReal *E, m_p = 1., q_p = -1.; - PetscInt dim, d, Np, p; - - PetscFunctionBeginUser; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmGetField(sw, "initVelocity", NULL, NULL, (void **)&vel)); - PetscCall(DMSwarmGetField(sw, "E_field", NULL, NULL, (void **)&E)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(VecGetArrayRead(U, &u)); - PetscCall(VecGetArray(G, &g)); - - PetscCall(ComputeFieldAtParticles(snes, sw, E)); - - Np /= 2 * dim; - for (p = 0; p < Np; ++p) { - for (d = 0; d < dim; ++d) { - g[(p * 2 + 0) * dim + d] = u[(p * 2 + 1) * dim + d]; - g[(p * 2 + 1) * dim + d] = q_p * E[p * dim + d] / m_p; - } - } - PetscCall(DMSwarmRestoreField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmRestoreField(sw, "initVelocity", NULL, NULL, (void **)&vel)); - PetscCall(DMSwarmRestoreField(sw, "E_field", NULL, NULL, (void **)&E)); - PetscCall(VecRestoreArrayRead(U, &u)); - PetscCall(VecRestoreArray(G, &g)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -/* J_{ij} = dF_i/dx_j - J_p = ( 0 1) - (-w^2 0) - TODO Now there is another term with w^2 from the electric field. I think we will need to invert the operator. - Perhaps we can approximate the Jacobian using only the cellwise P-P gradient from Coulomb -*/ -static PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec U, Mat J, Mat P, void *ctx) -{ - DM sw; - const PetscReal *coords, *vel; - PetscInt dim, d, Np, p, rStart; - - PetscFunctionBeginUser; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(MatGetOwnershipRange(J, &rStart, NULL)); - PetscCall(DMSwarmGetField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmGetField(sw, "initVelocity", NULL, NULL, (void **)&vel)); - Np /= 2 * dim; - for (p = 0; p < Np; ++p) { - const PetscReal x0 = coords[p * dim + 0]; - const PetscReal vy0 = vel[p * dim + 1]; - const PetscReal omega = vy0 / x0; - PetscScalar vals[4] = {0., 1., -PetscSqr(omega), 0.}; - - for (d = 0; d < dim; ++d) { - const PetscInt rows[2] = {(p * 2 + 0) * dim + d + rStart, (p * 2 + 1) * dim + d + rStart}; - PetscCall(MatSetValues(J, 2, rows, 2, rows, vals, INSERT_VALUES)); - } - } - PetscCall(DMSwarmRestoreField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmRestoreField(sw, "initVelocity", NULL, NULL, (void **)&vel)); - PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY)); - PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode RHSFunctionX(TS ts, PetscReal t, Vec V, Vec Xres, void *ctx) -{ - AppCtx *user = (AppCtx *)ctx; - DM sw; - const PetscScalar *v; - PetscScalar *xres; - PetscInt Np, p, d, dim; - - PetscFunctionBeginUser; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(VecGetLocalSize(Xres, &Np)); - PetscCall(VecGetArrayRead(V, &v)); - PetscCall(VecGetArray(Xres, &xres)); - Np /= dim; - for (p = 0; p < Np; ++p) { - for (d = 0; d < dim; ++d) { - xres[p * dim + d] = v[p * dim + d]; - if (user->fake_1D && d > 0) xres[p * dim + d] = 0; - } - } - PetscCall(VecRestoreArrayRead(V, &v)); - PetscCall(VecRestoreArray(Xres, &xres)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode RHSFunctionV(TS ts, PetscReal t, Vec X, Vec Vres, void *ctx) -{ - DM sw; - AppCtx *user = (AppCtx *)ctx; - SNES snes = ((AppCtx *)ctx)->snes; - const PetscScalar *x; - const PetscReal *coords, *vel; - PetscReal *E, m_p, q_p; - PetscScalar *vres; - PetscInt Np, p, dim, d; - Parameter *param; - - PetscFunctionBeginUser; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmGetField(sw, "initVelocity", NULL, NULL, (void **)&vel)); - PetscCall(DMSwarmGetField(sw, "E_field", NULL, NULL, (void **)&E)); - PetscCall(PetscBagGetData(user->bag, (void **)¶m)); - m_p = user->masses[0] * param->m0; - q_p = user->charges[0] * param->q0; - PetscCall(VecGetLocalSize(Vres, &Np)); - PetscCall(VecGetArrayRead(X, &x)); - PetscCall(VecGetArray(Vres, &vres)); - PetscCheck(dim == 2, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Dimension must be 2"); - PetscCall(ComputeFieldAtParticles(snes, sw, E)); - - Np /= dim; - for (p = 0; p < Np; ++p) { - for (d = 0; d < dim; ++d) { - vres[p * dim + d] = q_p * E[p * dim + d] / m_p; - if (user->fake_1D && d > 0) vres[p * dim + d] = 0.; - } - } - PetscCall(VecRestoreArrayRead(X, &x)); - PetscCall(VecRestoreArray(Vres, &vres)); - PetscCall(DMSwarmRestoreField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmRestoreField(sw, "initVelocity", NULL, NULL, (void **)&vel)); - PetscCall(DMSwarmRestoreField(sw, "E_field", NULL, NULL, (void **)&E)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode CreateSolution(TS ts) -{ - DM sw; - Vec u; - PetscInt dim, Np; - - PetscFunctionBegin; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(VecCreate(PETSC_COMM_WORLD, &u)); - PetscCall(VecSetBlockSize(u, dim)); - PetscCall(VecSetSizes(u, 2 * Np * dim, PETSC_DECIDE)); - PetscCall(VecSetUp(u)); - PetscCall(TSSetSolution(ts, u)); - PetscCall(VecDestroy(&u)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode SetProblem(TS ts) -{ - AppCtx *user; - DM sw; - - PetscFunctionBegin; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetApplicationContext(sw, (void **)&user)); - // Define unified system for (X, V) - { - Mat J; - PetscInt dim, Np; - - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(MatCreate(PETSC_COMM_WORLD, &J)); - PetscCall(MatSetSizes(J, 2 * Np * dim, 2 * Np * dim, PETSC_DECIDE, PETSC_DECIDE)); - PetscCall(MatSetBlockSize(J, 2 * dim)); - PetscCall(MatSetFromOptions(J)); - PetscCall(MatSetUp(J)); - PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, user)); - PetscCall(TSSetRHSJacobian(ts, J, J, RHSJacobian, user)); - PetscCall(MatDestroy(&J)); - } - /* Define split system for X and V */ - { - Vec u; - IS isx, isv, istmp; - const PetscInt *idx; - PetscInt dim, Np, rstart; - - PetscCall(TSGetSolution(ts, &u)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(VecGetOwnershipRange(u, &rstart, NULL)); - PetscCall(ISCreateStride(PETSC_COMM_WORLD, Np, (rstart / dim) + 0, 2, &istmp)); - PetscCall(ISGetIndices(istmp, &idx)); - PetscCall(ISCreateBlock(PETSC_COMM_WORLD, dim, Np, idx, PETSC_COPY_VALUES, &isx)); - PetscCall(ISRestoreIndices(istmp, &idx)); - PetscCall(ISDestroy(&istmp)); - PetscCall(ISCreateStride(PETSC_COMM_WORLD, Np, (rstart / dim) + 1, 2, &istmp)); - PetscCall(ISGetIndices(istmp, &idx)); - PetscCall(ISCreateBlock(PETSC_COMM_WORLD, dim, Np, idx, PETSC_COPY_VALUES, &isv)); - PetscCall(ISRestoreIndices(istmp, &idx)); - PetscCall(ISDestroy(&istmp)); - PetscCall(TSRHSSplitSetIS(ts, "position", isx)); - PetscCall(TSRHSSplitSetIS(ts, "momentum", isv)); - PetscCall(ISDestroy(&isx)); - PetscCall(ISDestroy(&isv)); - PetscCall(TSRHSSplitSetRHSFunction(ts, "position", NULL, RHSFunctionX, user)); - PetscCall(TSRHSSplitSetRHSFunction(ts, "momentum", NULL, RHSFunctionV, user)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode DMSwarmTSRedistribute(TS ts) -{ - DM sw; - Vec u; - PetscReal t, maxt, dt; - PetscInt n, maxn; - - PetscFunctionBegin; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(TSGetTime(ts, &t)); - PetscCall(TSGetMaxTime(ts, &maxt)); - PetscCall(TSGetTimeStep(ts, &dt)); - PetscCall(TSGetStepNumber(ts, &n)); - PetscCall(TSGetMaxSteps(ts, &maxn)); - - PetscCall(TSReset(ts)); - PetscCall(TSSetDM(ts, sw)); - PetscCall(TSSetFromOptions(ts)); - PetscCall(TSSetTime(ts, t)); - PetscCall(TSSetMaxTime(ts, maxt)); - PetscCall(TSSetTimeStep(ts, dt)); - PetscCall(TSSetStepNumber(ts, n)); - PetscCall(TSSetMaxSteps(ts, maxn)); - - PetscCall(CreateSolution(ts)); - PetscCall(SetProblem(ts)); - PetscCall(TSGetSolution(ts, &u)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -/* - InitializeSolveAndSwarm - Set the solution values to the swarm coordinates and velocities, and also possibly set the initial values. - - Input Parameters: -+ ts - The TS -- useInitial - Flag to also set the initial conditions to the current coordinates and velocities and setup the problem - - Output Parameter: -. u - The initialized solution vector - - Level: advanced - -.seealso: InitializeSolve() -*/ -static PetscErrorCode InitializeSolveAndSwarm(TS ts, PetscBool useInitial) -{ - DM sw; - Vec u, gc, gv, gc0, gv0; - IS isx, isv; - PetscInt dim; - AppCtx *user; - - PetscFunctionBeginUser; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetApplicationContext(sw, &user)); - PetscCall(DMGetDimension(sw, &dim)); - if (useInitial) { - PetscReal v0[2] = {1., 0.}; - if (user->perturbed_weights) { - PetscCall(InitializeParticles_PerturbedWeights(sw, user)); - } else { - PetscCall(DMSwarmComputeLocalSizeFromOptions(sw)); - PetscCall(DMSwarmInitializeCoordinates(sw)); - if (user->fake_1D) { - PetscCall(InitializeVelocites_Fake1D(sw, user)); - } else { - PetscCall(DMSwarmInitializeVelocitiesFromOptions(sw, v0)); - } - } - PetscCall(DMSwarmMigrate(sw, PETSC_TRUE)); - PetscCall(DMSwarmTSRedistribute(ts)); - } - PetscCall(TSGetSolution(ts, &u)); - PetscCall(TSRHSSplitGetIS(ts, "position", &isx)); - PetscCall(TSRHSSplitGetIS(ts, "momentum", &isv)); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "initCoordinates", &gc0)); - if (useInitial) PetscCall(VecCopy(gc, gc0)); - PetscCall(VecISCopy(u, isx, SCATTER_FORWARD, gc)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "initCoordinates", &gc0)); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "velocity", &gv)); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "initVelocity", &gv0)); - if (useInitial) PetscCall(VecCopy(gv, gv0)); - PetscCall(VecISCopy(u, isv, SCATTER_FORWARD, gv)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "velocity", &gv)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "initVelocity", &gv0)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode InitializeSolve(TS ts, Vec u) -{ - PetscFunctionBegin; - PetscCall(TSSetSolution(ts, u)); - PetscCall(InitializeSolveAndSwarm(ts, PETSC_TRUE)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode ComputeError(TS ts, Vec U, Vec E) -{ - MPI_Comm comm; - DM sw; - AppCtx *user; - const PetscScalar *u; - const PetscReal *coords, *vel; - PetscScalar *e; - PetscReal t; - PetscInt dim, Np, p; - - PetscFunctionBeginUser; - PetscCall(PetscObjectGetComm((PetscObject)ts, &comm)); - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetApplicationContext(sw, &user)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(TSGetSolveTime(ts, &t)); - PetscCall(VecGetArray(E, &e)); - PetscCall(VecGetArrayRead(U, &u)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMSwarmGetField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmGetField(sw, "initVelocity", NULL, NULL, (void **)&vel)); - Np /= 2 * dim; - for (p = 0; p < Np; ++p) { - /* TODO generalize initial conditions and project into plane instead of assuming x-y */ - const PetscReal r0 = DMPlex_NormD_Internal(dim, &coords[p * dim]); - const PetscReal th0 = PetscAtan2Real(coords[p * dim + 1], coords[p * dim + 0]); - const PetscReal v0 = DMPlex_NormD_Internal(dim, &vel[p * dim]); - const PetscReal omega = v0 / r0; - const PetscReal ct = PetscCosReal(omega * t + th0); - const PetscReal st = PetscSinReal(omega * t + th0); - const PetscScalar *x = &u[(p * 2 + 0) * dim]; - const PetscScalar *v = &u[(p * 2 + 1) * dim]; - const PetscReal xe[3] = {r0 * ct, r0 * st, 0.0}; - const PetscReal ve[3] = {-v0 * st, v0 * ct, 0.0}; - PetscInt d; - - for (d = 0; d < dim; ++d) { - e[(p * 2 + 0) * dim + d] = x[d] - xe[d]; - e[(p * 2 + 1) * dim + d] = v[d] - ve[d]; - } - if (user->error) { - const PetscReal en = 0.5 * DMPlex_DotRealD_Internal(dim, v, v); - const PetscReal exen = 0.5 * PetscSqr(v0); - PetscCall(PetscPrintf(comm, "t %.4g: p%" PetscInt_FMT " error [%.2g %.2g] sol [(%.6lf %.6lf) (%.6lf %.6lf)] exact [(%.6lf %.6lf) (%.6lf %.6lf)] energy/exact energy %g / %g (%.10lf%%)\n", (double)t, p, (double)DMPlex_NormD_Internal(dim, &e[(p * 2 + 0) * dim]), (double)DMPlex_NormD_Internal(dim, &e[(p * 2 + 1) * dim]), (double)x[0], (double)x[1], (double)v[0], (double)v[1], (double)xe[0], (double)xe[1], (double)ve[0], (double)ve[1], (double)en, (double)exen, (double)(PetscAbsReal(exen - en) * 100. / exen))); - } - } - PetscCall(DMSwarmRestoreField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmRestoreField(sw, "initVelocity", NULL, NULL, (void **)&vel)); - PetscCall(VecRestoreArrayRead(U, &u)); - PetscCall(VecRestoreArray(E, &e)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -#if 0 -static PetscErrorCode EnergyMonitor(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx) -{ - const PetscInt ostep = ((AppCtx *)ctx)->ostep; - const EMType em = ((AppCtx *)ctx)->em; - DM sw; - const PetscScalar *u; - PetscReal *coords, *E; - PetscReal enKin = 0., enEM = 0.; - PetscInt dim, d, Np, p, q; - - PetscFunctionBeginUser; - if (step % ostep == 0) { - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(VecGetArrayRead(U, &u)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - Np /= 2 * dim; - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmGetField(sw, "E_field", NULL, NULL, (void **)&E)); - if (!step) PetscCall(PetscPrintf(PetscObjectComm((PetscObject)ts), "Time Step Part Energy\n")); - for (p = 0; p < Np; ++p) { - const PetscReal v2 = DMPlex_DotRealD_Internal(dim, &u[(p * 2 + 1) * dim], &u[(p * 2 + 1) * dim]); - PetscReal *pcoord = &coords[p * dim]; - - PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)ts), "%.6lf %4D %5D %10.4lf\n", t, step, p, (double)0.5 * v2)); - enKin += 0.5 * v2; - if (em == EM_NONE) { - continue; - } else if (em == EM_COULOMB) { - for (q = p + 1; q < Np; ++q) { - PetscReal *qcoord = &coords[q * dim]; - PetscReal rpq[3], r; - for (d = 0; d < dim; ++d) rpq[d] = pcoord[d] - qcoord[d]; - r = DMPlex_NormD_Internal(dim, rpq); - enEM += 1. / r; - } - } else if (em == EM_PRIMAL) { - for (d = 0; d < dim; ++d) enEM += E[p * dim + d]; - } - } - PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)ts), "%.6lf %4" PetscInt_FMT " 2\t %10.4lf\n", t, step, (double)enKin)); - PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)ts), "%.6lf %4" PetscInt_FMT " 3\t %10.4lf\n", t, step, (double)enEM)); - PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)ts), "%.6lf %4" PetscInt_FMT " 4\t %10.4lf\n", t, step, (double)enKin + enEM)); - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmRestoreField(sw, "E_field", NULL, NULL, (void **)&E)); - PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)ts), NULL)); - PetscCall(VecRestoreArrayRead(U, &u)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} -#endif - -static PetscErrorCode MigrateParticles(TS ts) -{ - DM sw; - - PetscFunctionBeginUser; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMViewFromOptions(sw, NULL, "-migrate_view_pre")); - { - Vec u, gc, gv; - IS isx, isv; - - PetscCall(TSGetSolution(ts, &u)); - PetscCall(TSRHSSplitGetIS(ts, "position", &isx)); - PetscCall(TSRHSSplitGetIS(ts, "momentum", &isv)); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); - PetscCall(VecISCopy(u, isx, SCATTER_REVERSE, gc)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "velocity", &gv)); - PetscCall(VecISCopy(u, isv, SCATTER_REVERSE, gv)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "velocity", &gv)); - } - PetscCall(DMSwarmMigrate(sw, PETSC_TRUE)); - PetscCall(DMSwarmTSRedistribute(ts)); - PetscCall(InitializeSolveAndSwarm(ts, PETSC_FALSE)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode MigrateParticles_Periodic(TS ts) -{ - DM sw, dm; - PetscInt dim; - - PetscFunctionBeginUser; - PetscCall(TSGetDM(ts, &sw)); - PetscCall(DMGetDimension(sw, &dim)); - PetscCall(DMViewFromOptions(sw, NULL, "-migrate_view_pre")); - { - Vec u, position, momentum, gc, gv; - IS isx, isv; - PetscReal *pos, *mom, *x, *v; - PetscReal lower_bound[3], upper_bound[3]; - PetscInt p, d, Np; - - PetscCall(TSGetSolution(ts, &u)); - PetscCall(DMSwarmGetLocalSize(sw, &Np)); - PetscCall(DMSwarmGetCellDM(sw, &dm)); - PetscCall(DMGetBoundingBox(dm, lower_bound, upper_bound)); - PetscCall(TSRHSSplitGetIS(ts, "position", &isx)); - PetscCall(TSRHSSplitGetIS(ts, "momentum", &isv)); - PetscCall(VecGetSubVector(u, isx, &position)); - PetscCall(VecGetSubVector(u, isv, &momentum)); - PetscCall(VecGetArray(position, &pos)); - PetscCall(VecGetArray(momentum, &mom)); - - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); - PetscCall(VecISCopy(u, isx, SCATTER_REVERSE, gc)); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "velocity", &gv)); - PetscCall(VecISCopy(u, isv, SCATTER_REVERSE, gv)); - - PetscCall(VecGetArray(gc, &x)); - PetscCall(VecGetArray(gv, &v)); - for (p = 0; p < Np; ++p) { - for (d = 0; d < dim; ++d) { - if (pos[p * dim + d] < lower_bound[d]) { - x[p * dim + d] = pos[p * dim + d] + (upper_bound[d] - lower_bound[d]); - } else if (pos[p * dim + d] > upper_bound[d]) { - x[p * dim + d] = pos[p * dim + d] - (upper_bound[d] - lower_bound[d]); - } else { - x[p * dim + d] = pos[p * dim + d]; - } - PetscCheck(x[p * dim + d] >= lower_bound[d] && x[p * dim + d] <= upper_bound[d], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "p: %" PetscInt_FMT "x[%" PetscInt_FMT "] %g", p, d, (double)x[p * dim + d]); - v[p * dim + d] = mom[p * dim + d]; - } - } - PetscCall(VecRestoreArray(gc, &x)); - PetscCall(VecRestoreArray(gv, &v)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "velocity", &gv)); - - PetscCall(VecRestoreArray(position, &pos)); - PetscCall(VecRestoreArray(momentum, &mom)); - PetscCall(VecRestoreSubVector(u, isx, &position)); - PetscCall(VecRestoreSubVector(u, isv, &momentum)); - } - PetscCall(DMSwarmMigrate(sw, PETSC_TRUE)); - PetscCall(DMSwarmTSRedistribute(ts)); - PetscCall(InitializeSolveAndSwarm(ts, PETSC_FALSE)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -int main(int argc, char **argv) -{ - DM dm, sw; - TS ts; - Vec u; - AppCtx user; - - PetscCall(PetscInitialize(&argc, &argv, NULL, help)); - PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); - PetscCall(PetscBagCreate(PETSC_COMM_SELF, sizeof(Parameter), &user.bag)); - PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm)); - PetscCall(CreatePoisson(dm, &user)); - PetscCall(CreateSwarm(dm, &user, &sw)); - PetscCall(SetupParameters(PETSC_COMM_WORLD, &user)); - PetscCall(InitializeConstants(sw, &user)); - PetscCall(DMSetApplicationContext(sw, &user)); - - PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); - PetscCall(TSSetProblemType(ts, TS_NONLINEAR)); - PetscCall(TSSetDM(ts, sw)); - PetscCall(TSSetMaxTime(ts, 0.1)); - PetscCall(TSSetTimeStep(ts, 0.00001)); - PetscCall(TSSetMaxSteps(ts, 100)); - PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP)); - - if (user.efield_monitor) PetscCall(TSMonitorSet(ts, MonitorEField, &user, NULL)); - if (user.initial_monitor) PetscCall(TSMonitorSet(ts, MonitorInitialConditions, &user, NULL)); - if (user.monitor_positions) PetscCall(TSMonitorSet(ts, MonitorPositions_2D, &user, NULL)); - if (user.poisson_monitor) PetscCall(TSMonitorSet(ts, MonitorPoisson, &user, NULL)); - - PetscCall(TSSetFromOptions(ts)); - PetscReal dt; - PetscInt maxn; - PetscCall(TSGetTimeStep(ts, &dt)); - PetscCall(TSGetMaxSteps(ts, &maxn)); - user.steps = maxn; - user.stepSize = dt; - PetscCall(SetupContext(dm, sw, &user)); - - PetscCall(DMSwarmVectorDefineField(sw, "velocity")); - PetscCall(TSSetComputeInitialCondition(ts, InitializeSolve)); - PetscCall(TSSetComputeExactError(ts, ComputeError)); - if (user.periodic) { - PetscCall(TSSetPostStep(ts, MigrateParticles_Periodic)); - } else { - PetscCall(TSSetPostStep(ts, MigrateParticles)); - } - PetscCall(CreateSolution(ts)); - PetscCall(TSGetSolution(ts, &u)); - PetscCall(TSComputeInitialCondition(ts, u)); - - PetscCall(TSSolve(ts, NULL)); - - PetscCall(SNESDestroy(&user.snes)); - PetscCall(TSDestroy(&ts)); - PetscCall(DMDestroy(&sw)); - PetscCall(DMDestroy(&dm)); - PetscCall(DestroyContext(&user)); - PetscCall(PetscFinalize()); - return 0; -} - -/*TEST - - build: - requires: double !complex - - # Recommend -draw_size 500,500 - testset: - args: -dm_plex_dim 2 -fake_1D -dm_plex_simplex 0 -dm_plex_box_faces 20,1 -dm_plex_box_lower 0,-1 -dm_plex_box_upper 12.5664,1 \ - -dm_swarm_coordinate_density constant -dm_swarm_num_particles 100 \ - -dm_plex_box_bd periodic,none -periodic -ts_type basicsymplectic -ts_basicsymplectic_type 1\ - -dm_view -output_step 50 -sigma 1.0e-8 -timeScale 2.0e-14\ - -ts_monitor_sp_swarm -ts_monitor_sp_swarm_retain 0 -ts_monitor_sp_swarm_phase 0 - test: - suffix: none_1d - args: -em_type none -error - test: - suffix: coulomb_1d - args: -em_type coulomb - - # For verification, we use - # -dm_plex_box_faces 100,1 -vdm_plex_box_faces 8000 -dm_swarm_num_particles 800000 - # -ts_monitor_sp_swarm_multi_species 0 -ts_monitor_sp_swarm_retain 0 -ts_monitor_sp_swarm_phase 1 -draw_size 500,500 - testset: - args: -dm_plex_dim 2 -dm_plex_box_bd periodic,none -dm_plex_simplex 0 -dm_plex_box_faces 10,1 -dm_plex_box_lower 0,-0.5 -dm_plex_box_upper 12.5664,0.5\ - -ts_dt 0.03 -ts_max_time 500 -ts_max_steps 500 -ts_type basicsymplectic -ts_basicsymplectic_type 1\ - -em_snes_atol 1.e-12 -em_snes_error_if_not_converged -em_ksp_error_if_not_converged\ - -dm_swarm_num_species 1 -dm_swarm_num_particles 100 -dm_view\ - -vdm_plex_dim 1 -vdm_plex_box_lower -10 -vdm_plex_box_upper 10 -vdm_plex_simplex 0 -vdm_plex_box_faces 10\ - -output_step 1 -fake_1D -perturbed_weights -periodic -cosine_coefficients 0.01,0.5 -charges -1.0,1.0 -total_weight 1.0 - test: - suffix: uniform_equilibrium_1d - args: -cosine_coefficients 0.0,0.5 -em_type primal -petscspace_degree 1 -em_pc_type svd - test: - suffix: uniform_primal_1d - args: -em_type primal -petscspace_degree 1 -em_pc_type svd - test: - suffix: uniform_none_1d - args: -em_type none - test: - suffix: uniform_mixed_1d - args: -em_type mixed\ - -ksp_rtol 1e-10\ - -em_ksp_type preonly\ - -em_ksp_error_if_not_converged\ - -em_snes_error_if_not_converged\ - -em_pc_type fieldsplit\ - -em_fieldsplit_field_pc_type lu \ - -em_fieldsplit_potential_pc_type svd\ - -em_pc_fieldsplit_type schur\ - -em_pc_fieldsplit_schur_fact_type full\ - -em_pc_fieldsplit_schur_precondition full\ - -potential_petscspace_degree 0 \ - -potential_petscdualspace_lagrange_use_moments \ - -potential_petscdualspace_lagrange_moment_order 2 \ - -field_petscspace_degree 2\ - -field_petscfe_default_quadrature_order 1\ - -field_petscspace_type sum \ - -field_petscspace_variables 2 \ - -field_petscspace_components 2 \ - -field_petscspace_sum_spaces 2 \ - -field_petscspace_sum_concatenate true \ - -field_sumcomp_0_petscspace_variables 2 \ - -field_sumcomp_0_petscspace_type tensor \ - -field_sumcomp_0_petscspace_tensor_spaces 2 \ - -field_sumcomp_0_petscspace_tensor_uniform false \ - -field_sumcomp_0_tensorcomp_0_petscspace_degree 1 \ - -field_sumcomp_0_tensorcomp_1_petscspace_degree 0 \ - -field_sumcomp_1_petscspace_variables 2 \ - -field_sumcomp_1_petscspace_type tensor \ - -field_sumcomp_1_petscspace_tensor_spaces 2 \ - -field_sumcomp_1_petscspace_tensor_uniform false \ - -field_sumcomp_1_tensorcomp_0_petscspace_degree 0 \ - -field_sumcomp_1_tensorcomp_1_petscspace_degree 1 \ - -field_petscdualspace_form_degree -1 \ - -field_petscdualspace_order 1 \ - -field_petscdualspace_lagrange_trimmed true \ - -ksp_gmres_restart 500 - -TEST*/ diff --git a/src/dm/impls/swarm/tests/output/ex9_coulomb_1d.out b/src/dm/impls/swarm/tests/output/ex9_coulomb_1d.out deleted file mode 100644 index 4412cf32479..00000000000 --- a/src/dm/impls/swarm/tests/output/ex9_coulomb_1d.out +++ /dev/null @@ -1,15 +0,0 @@ -DM Object: box 1 MPI process - type: plex -box in 2 dimensions: - Number of 0-cells per rank: 40 - Number of 1-cells per rank: 60 - Number of 2-cells per rank: 20 -Periodic mesh (PERIODIC, NONE) coordinates localized -Labels: - marker: 1 strata with value/size (1 (80)) - Face Sets: 2 strata with value/size (1 (20), 3 (20)) - depth: 3 strata with value/size (0 (40), 1 (60), 2 (20)) - celltype: 3 strata with value/size (4 (20), 0 (40), 1 (60)) -dim = 2 totalWeight = 1.000000, user->charges[species[p]] = -1.000000 totalCharge = -1.000000, Total Area = 12.566400 -sigma: 0.0795773 -(x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) diff --git a/src/dm/impls/swarm/tests/output/ex9_none_1d.out b/src/dm/impls/swarm/tests/output/ex9_none_1d.out deleted file mode 100644 index 4412cf32479..00000000000 --- a/src/dm/impls/swarm/tests/output/ex9_none_1d.out +++ /dev/null @@ -1,15 +0,0 @@ -DM Object: box 1 MPI process - type: plex -box in 2 dimensions: - Number of 0-cells per rank: 40 - Number of 1-cells per rank: 60 - Number of 2-cells per rank: 20 -Periodic mesh (PERIODIC, NONE) coordinates localized -Labels: - marker: 1 strata with value/size (1 (80)) - Face Sets: 2 strata with value/size (1 (20), 3 (20)) - depth: 3 strata with value/size (0 (40), 1 (60), 2 (20)) - celltype: 3 strata with value/size (4 (20), 0 (40), 1 (60)) -dim = 2 totalWeight = 1.000000, user->charges[species[p]] = -1.000000 totalCharge = -1.000000, Total Area = 12.566400 -sigma: 0.0795773 -(x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) diff --git a/src/dm/impls/swarm/tests/output/ex9_uniform_primal_1d.out b/src/dm/impls/swarm/tests/output/ex9_uniform_primal_1d.out deleted file mode 100644 index 5e3bfaf3beb..00000000000 --- a/src/dm/impls/swarm/tests/output/ex9_uniform_primal_1d.out +++ /dev/null @@ -1,39 +0,0 @@ -DM Object: box 1 MPI process - type: plex -box in 2 dimensions: - Number of 0-cells per rank: 20 - Number of 1-cells per rank: 30 - Number of 2-cells per rank: 10 -Periodic mesh (PERIODIC, NONE) coordinates localized -Labels: - marker: 1 strata with value/size (1 (40)) - Face Sets: 2 strata with value/size (1 (10), 3 (10)) - depth: 3 strata with value/size (0 (20), 1 (30), 2 (10)) - celltype: 3 strata with value/size (4 (10), 0 (20), 1 (30)) -Np = 100 -c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268216514538342 -c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263432611863840 -c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256054150597222 -c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.248899464902083 -c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244701413893048 -c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.245063517620130 -c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249847464141595 -c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.257225940223554 -c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.264380606043426 -c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268578610078297 -particle weight sum = 12.5664002939 cell weight sum = 12.5664002939 -dim = 2 totalWeight = 12.566400, user->charges[species[p]] = -1.000000 totalCharge = -12.566400, Total Area = 12.566400 -sigma: 1. -(x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) -Np = 100 -c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268216514538342 -c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263432611863840 -c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256054150597222 -c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.248899464902083 -c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244701413893048 -c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.245063517620130 -c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249847464141595 -c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.257225940223554 -c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.264380606043426 -c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268578610078297 -particle weight sum = 12.5664002939 cell weight sum = 12.5664002939 diff --git a/src/dm/impls/swarm/tutorials/ex1f90.F90 b/src/dm/impls/swarm/tutorials/ex1f90.F90 index 41715e0ddd5..81677369dc1 100644 --- a/src/dm/impls/swarm/tutorials/ex1f90.F90 +++ b/src/dm/impls/swarm/tutorials/ex1f90.F90 @@ -96,7 +96,6 @@ program DMSwarmTestProjection print *, 'Total number density = ', norm ! Cleanup PetscCallA(DMSwarmDestroyGlobalVectorFromField(sw, 'w_q', f, ierr)) - PetscCallA(MatDestroy(M_p, ierr)) PetscCallA(VecDestroy(rho, ierr)) PetscCallA(DMDestroy(sw, ierr)) PetscCallA(DMDestroy(dm, ierr)) diff --git a/src/dm/interface/dlregisdmdm.c b/src/dm/interface/dlregisdmdm.c index f59632ec8b3..0af095e6417 100644 --- a/src/dm/interface/dlregisdmdm.c +++ b/src/dm/interface/dlregisdmdm.c @@ -175,6 +175,8 @@ PetscErrorCode DMInitializePackage(void) PetscCall(DMGenerateRegisterAll()); PetscCall(PetscRegisterFinalize(DMGenerateRegisterDestroy)); + PetscCall(DMGeomModelRegisterAll()); + PetscCall(PetscRegisterFinalize(DMGeomModelRegisterDestroy)); PetscCall(DMPlexTransformRegisterAll()); PetscCall(PetscRegisterFinalize(DMPlexTransformRegisterDestroy)); PetscCall(DMLabelRegisterAll()); @@ -330,8 +332,8 @@ PetscErrorCode PetscFVInitializePackage(void) static PetscBool PetscDSPackageInitialized = PETSC_FALSE; /*@C - PetscDSFinalizePackage - This function finalizes everything in the PetscDS package. It is called - from PetscFinalize(). + PetscDSFinalizePackage - This function finalizes everything in the `PetscDS` package. It is called + from `PetscFinalize()`. Level: developer @@ -405,6 +407,7 @@ PETSC_EXTERN PetscErrorCode PetscDLLibraryRegister_petscdm(void) PetscCall(PetscFEInitializePackage()); PetscCall(PetscFVInitializePackage()); PetscCall(DMFieldInitializePackage()); + PetscCall(PetscDSInitializePackage()); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/dm/interface/dm.c b/src/dm/interface/dm.c index 675019ebbc9..37a2762b0ee 100644 --- a/src/dm/interface/dm.c +++ b/src/dm/interface/dm.c @@ -57,19 +57,16 @@ PetscErrorCode DMCreate(MPI_Comm comm, DM *dm) PetscFunctionBegin; PetscAssertPointer(dm, 2); - *dm = NULL; - PetscCall(DMInitializePackage()); + PetscCall(DMInitializePackage()); PetscCall(PetscHeaderCreate(v, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView)); - ((PetscObject)v)->non_cyclic_references = &DMCountNonCyclicReferences; - - v->setupcalled = PETSC_FALSE; - v->setfromoptionscalled = PETSC_FALSE; - v->ltogmap = NULL; - v->bind_below = 0; - v->bs = 1; - v->coloringtype = IS_COLORING_GLOBAL; + v->setupcalled = PETSC_FALSE; + v->setfromoptionscalled = PETSC_FALSE; + v->ltogmap = NULL; + v->bind_below = 0; + v->bs = 1; + v->coloringtype = IS_COLORING_GLOBAL; PetscCall(PetscSFCreate(comm, &v->sf)); PetscCall(PetscSFCreate(comm, &v->sectionSF)); v->labels = NULL; @@ -179,6 +176,10 @@ PetscErrorCode DMClone(DM dm, DM *newdm) PetscCall(DMClone(dm->coordinates[i].dm, &ncdm)); PetscCall(DMCopyDisc(dm->coordinates[i].dm, ncdm)); PetscCall(DMSetLocalSection(ncdm, cs)); + if (dm->coordinates[i].dm->periodic.setup) { + ncdm->periodic.setup = dm->coordinates[i].dm->periodic.setup; + PetscCall(ncdm->periodic.setup(ncdm)); + } if (i) PetscCall(DMSetCellCoordinateDM(*newdm, ncdm)); else PetscCall(DMSetCoordinateDM(*newdm, ncdm)); PetscCall(DMDestroy(&ncdm)); @@ -216,8 +217,8 @@ PetscErrorCode DMClone(DM dm, DM *newdm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C - DMSetVecType - Sets the type of vector created with `DMCreateLocalVector()` and `DMCreateGlobalVector()` +/*@ + DMSetVecType - Sets the type of vector to be created with `DMCreateLocalVector()` and `DMCreateGlobalVector()` Logically Collective @@ -246,7 +247,7 @@ PetscErrorCode DMSetVecType(DM dm, VecType ctype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetVecType - Gets the type of vector created with `DMCreateLocalVector()` and `DMCreateGlobalVector()` Logically Collective @@ -307,7 +308,7 @@ PetscErrorCode VecGetDM(Vec v, DM *dm) Level: developer - Note: + Notes: This is rarely used, generally one uses `DMGetLocalVector()` or `DMGetGlobalVector()` to create a vector associated with a given `DM` This is NOT the same as `DMCreateGlobalVector()` since it does not change the view methods or perform other customization, but merely sets the `DM` member. @@ -323,7 +324,7 @@ PetscErrorCode VecSetDM(Vec v, DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSetISColoringType - Sets the type of coloring, `IS_COLORING_GLOBAL` or `IS_COLORING_LOCAL` that is created by the `DM` Logically Collective @@ -348,7 +349,7 @@ PetscErrorCode DMSetISColoringType(DM dm, ISColoringType ctype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetISColoringType - Gets the type of coloring, `IS_COLORING_GLOBAL` or `IS_COLORING_LOCAL` that is created by the `DM` Logically Collective @@ -375,7 +376,7 @@ PetscErrorCode DMGetISColoringType(DM dm, ISColoringType *ctype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSetMatType - Sets the type of matrix created with `DMCreateMatrix()` Logically Collective @@ -404,7 +405,7 @@ PetscErrorCode DMSetMatType(DM dm, MatType ctype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetMatType - Gets the type of matrix that would be created with `DMCreateMatrix()` Logically Collective @@ -443,7 +444,7 @@ PetscErrorCode DMGetMatType(DM dm, MatType *ctype) Note: A matrix may not have a `DM` associated with it - Developer Notes: + Developer Note: Since the `Mat` class doesn't know about the `DM` class the `DM` object is associated with the `Mat` through a `PetscObjectCompose()` operation .seealso: [](ch_dmbase), `DM`, `MatSetDM()`, `DMCreateMatrix()`, `DMSetMatType()` @@ -471,7 +472,7 @@ PetscErrorCode MatGetDM(Mat A, DM *dm) Note: This is rarely used in practice, rather `DMCreateMatrix()` is used to create a matrix associated with a particular `DM` - Developer Notes: + Developer Note: Since the `Mat` class doesn't know about the `DM` class the `DM` object is associated with the `Mat` through a `PetscObjectCompose()` operation @@ -486,7 +487,7 @@ PetscErrorCode MatSetDM(Mat A, DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSetOptionsPrefix - Sets the prefix prepended to all option names when searching through the options database Logically Collective @@ -513,7 +514,7 @@ PetscErrorCode DMSetOptionsPrefix(DM dm, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMAppendOptionsPrefix - Appends an additional string to an already existing prefix used for searching for `DM` options in the options database. @@ -540,7 +541,7 @@ PetscErrorCode DMAppendOptionsPrefix(DM dm, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetOptionsPrefix - Gets the prefix used for searching for all DM options in the options database. @@ -554,7 +555,7 @@ PetscErrorCode DMAppendOptionsPrefix(DM dm, const char prefix[]) Level: advanced - Fortran Notes: + Fortran Note: Pass in a string 'prefix' of sufficient length to hold the prefix. @@ -634,7 +635,7 @@ static PetscErrorCode DMDestroyCoordinates_Private(DMCoordinates *c) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMDestroy - Destroys a `DM`. Collective @@ -771,8 +772,11 @@ PetscErrorCode DMDestroy(DM *dm) if ((*dm)->transformDestroy) PetscCall((*(*dm)->transformDestroy)(*dm, (*dm)->transformCtx)); PetscCall(DMDestroy(&(*dm)->transformDM)); PetscCall(VecDestroy(&(*dm)->transform)); - PetscCall(VecScatterDestroy(&(*dm)->periodic.affine_to_local)); - PetscCall(VecDestroy(&(*dm)->periodic.affine)); + for (PetscInt i = 0; i < (*dm)->periodic.num_affines; i++) { + PetscCall(VecScatterDestroy(&(*dm)->periodic.affine_to_local[i])); + PetscCall(VecDestroy(&(*dm)->periodic.affine[i])); + } + if ((*dm)->periodic.num_affines > 0) PetscCall(PetscFree2((*dm)->periodic.affine_to_local, (*dm)->periodic.affine)); PetscCall(DMClearDS(*dm)); PetscCall(DMDestroy(&(*dm)->dmBC)); @@ -911,7 +915,7 @@ PetscErrorCode DMSetFromOptions(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMViewFromOptions - View a `DM` in a particular way based on a request in the options database Collective @@ -936,7 +940,7 @@ PetscErrorCode DMViewFromOptions(DM dm, PetscObject obj, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMView - Views a `DM`. Depending on the `PetscViewer` and its `PetscViewerFormat` it may print some ASCII information about the `DM` to the screen or a file or save the `DM` in a binary file to be loaded later or create a visualization of the `DM` @@ -948,7 +952,7 @@ PetscErrorCode DMViewFromOptions(DM dm, PetscObject obj, const char name[]) Level: beginner - Notes: + Note: Using `PETSCVIEWERHDF5` type with `PETSC_VIEWER_HDF5_PETSC` as the `PetscViewerFormat` one can save multiple `DMPLEX` meshes in a single HDF5 file. This in turn requires one to name the `DMPLEX` object with `PetscObjectSetName()` before saving it with `DMView()` and before loading it with `DMLoad()` for identification of the mesh object. @@ -1161,7 +1165,7 @@ PetscErrorCode DMGetLocalToGlobalMapping(DM dm, ISLocalToGlobalMapping *ltog) Level: intermediate - Note: + Notes: This might be the number of degrees of freedom at each grid point for a structured grid. Complex `DM` that represent multiphysics or staggered grids or mixed-methods do not generally have a single inherent block size, but @@ -1179,7 +1183,7 @@ PetscErrorCode DMGetBlockSize(DM dm, PetscInt *bs) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCreateInterpolation - Gets the interpolation matrix between two `DM` objects. The resulting matrix map degrees of freedom in the vector obtained by `DMCreateGlobalVector()` on the coarse `DM` to similar vectors on the fine grid `DM`. @@ -1191,7 +1195,7 @@ PetscErrorCode DMGetBlockSize(DM dm, PetscInt *bs) Output Parameters: + mat - the interpolation -- vec - the scaling (optional), see `DMCreateInterpolationScale()` +- vec - the scaling (optional, pass `NULL` if not needed), see `DMCreateInterpolationScale()` Level: developer @@ -1230,11 +1234,11 @@ PetscErrorCode DMCreateInterpolation(DM dmc, DM dmf, Mat *mat, Vec *vec) Level: advanced - Notes: + Note: xcoarse = diag(L)*R*xfine preserves scale and is thus suitable for state (versus residual) restriction. In other words xcoarse is the coarse representation of xfine. - Developer Notes: + Developer Note: If the fine-scale `DMDA` has the -dm_bind_below option set to true, then `DMCreateInterpolationScale()` calls `MatSetBindingPropagates()` on the restriction/interpolation operator to set the bindingpropagates flag to true. @@ -1521,7 +1525,7 @@ PetscErrorCode DMCreateMatrix(DM dm, Mat *mat) Level: developer - Notes: + Note: This is most useful to reduce initialization costs when `MatSetPreallocationCOO()` and `MatSetValuesCOO()` will be used. @@ -1645,7 +1649,7 @@ PetscErrorCode DMGetBlockingType(DM dm, DMBlockingType *btype) Level: developer - Note: + Notes: A `DM` may stash the array between instantiations so using this routine may be more efficient than calling `PetscMalloc()` The array may contain nonzero values @@ -1712,7 +1716,7 @@ PetscErrorCode DMGetWorkArray(DM dm, PetscInt count, MPI_Datatype dtype, void *m Level: developer - Developer Notes: + Developer Note: count and dtype are ignored, they are only needed for `DMGetWorkArray()` .seealso: [](ch_dmbase), `DM`, `DMDestroy()`, `DMCreate()`, `DMGetWorkArray()` @@ -1880,11 +1884,11 @@ PetscErrorCode DMGetNearNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCo Level: intermediate Note: - The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with - `PetscFree()`, every entry of fields should be destroyed with `ISDestroy()`, and both arrays should be freed with + The user is responsible for freeing all requested arrays. In particular, every entry of `fieldNames` should be freed with + `PetscFree()`, every entry of `fields` should be destroyed with `ISDestroy()`, and both arrays should be freed with `PetscFree()`. - Developer Notes: + Developer Note: It is not clear why both this function and `DMCreateFieldDecomposition()` exist. Having two seems redundant and confusing. This function should likely be removed. @@ -2011,14 +2015,17 @@ PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, I The same as `DMCreateFieldIS()` but also returns a `DM` for each field. - The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with - `PetscFree()`, every entry of is should be destroyed with `ISDestroy()`, every entry of dm should be destroyed with `DMDestroy()`, + The user is responsible for freeing all requested arrays. In particular, every entry of `namelist` should be freed with + `PetscFree()`, every entry of `islist` should be destroyed with `ISDestroy()`, every entry of `dmlist` should be destroyed with `DMDestroy()`, and all of the arrays should be freed with `PetscFree()`. Developer Notes: It is not clear why this function and `DMCreateFieldIS()` exist. Having two seems redundant and confusing. -.seealso: [](ch_dmbase), `DM`, `DMAddField()`, `DMCreateFieldIS()`, `DMCreateSubDM()`, `DMCreateDomainDecomposition()`, `DMDestroy()`, `DMView()`, `DMCreateInterpolation()`, `DMCreateColoring()`, `DMCreateMatrix()`, `DMCreateMassMatrix()` + Unlike `DMRefine()`, `DMCoarsen()`, and `DMCreateDomainDecomposition()` this provides no mechanism to provide hooks that are called after the + decomposition is computed. + +.seealso: [](ch_dmbase), `DM`, `DMAddField()`, `DMCreateFieldIS()`, `DMCreateSubDM()`, `DMCreateDomainDecomposition()`, `DMDestroy()`, `DMView()`, `DMCreateInterpolation()`, `DMCreateColoring()`, `DMCreateMatrix()`, `DMCreateMassMatrix()`, `DMRefine()`, `DMCoarsen()` @*/ PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) { @@ -2075,7 +2082,7 @@ PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCreateSubDM - Returns an `IS` and `DM` encapsulating a subproblem defined by the fields passed in. The fields are defined by `DMCreateFieldIS()`. @@ -2087,8 +2094,8 @@ PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist - fields - The field numbers of the selected fields Output Parameters: -+ is - The global indices for all the degrees of freedom in the new sub `DM` -- subdm - The `DM` for the subproblem ++ is - The global indices for all the degrees of freedom in the new sub `DM`, use `NULL` if not needed +- subdm - The `DM` for the subproblem, use `NULL` if not needed Level: intermediate @@ -2126,9 +2133,9 @@ PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, const PetscInt fields[], Note: You need to call `DMPlexSetMigrationSF()` on the original `DM` if you want the Global-To-Natural map to be automatically constructed -.seealso: [](ch_dmbase), `DM`, `DMCreateSubDM()`, `DMPlexSetMigrationSF()`, `DMDestroy()`, `DMView()`, `DMCreateInterpolation()`, `DMCreateColoring()`, `DMCreateMatrix()`, `DMCreateMassMatrix()`, `DMCreateFieldIS()` +.seealso: [](ch_dmbase), `DM`, `DMCreateSubDM()`, `DMPlexSetMigrationSF()`, `DMDestroy()`, `DMView()`, `DMCreateInterpolation()`, `DMCreateColoring()`, `DMCreateMatrix()`, `DMCreateMassMatrix()`, `DMCreateFieldIS()`, `DMCreateDomainDecomposition()` @*/ -PetscErrorCode DMCreateSuperDM(DM dms[], PetscInt n, IS **is, DM *superdm) +PetscErrorCode DMCreateSuperDM(DM dms[], PetscInt n, IS *is[], DM *superdm) { PetscInt i; @@ -2158,27 +2165,30 @@ PetscErrorCode DMCreateSuperDM(DM dms[], PetscInt n, IS **is, DM *superdm) Output Parameters: + n - The number of subproblems in the domain decomposition (or `NULL` if not requested) . namelist - The name for each subdomain (or `NULL` if not requested) -. innerislist - The global indices for each inner subdomain (or NULL, if not requested) -. outerislist - The global indices for each outer subdomain (or NULL, if not requested) -- dmlist - The `DM`s for each subdomain subproblem (or NULL, if not requested; if `NULL` is returned, no `DM`s are defined) +. innerislist - The global indices for each inner subdomain (or `NULL`, if not requested) +. outerislist - The global indices for each outer subdomain (or `NULL`, if not requested) +- dmlist - The `DM`s for each subdomain subproblem (or `NULL`, if not requested; if `NULL` is returned, no `DM`s are defined) Level: intermediate - Note: + Notes: Each `IS` contains the global indices of the dofs of the corresponding subdomains with in the dofs of the original `DM`. The inner subdomains conceptually define a nonoverlapping covering, while outer subdomains can overlap. The optional list of `DM`s define a `DM` for each subproblem. - The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with - `PetscFree()`, every entry of is should be destroyed with `ISDestroy()`, every entry of dm should be destroyed with `DMDestroy()`, + The user is responsible for freeing all requested arrays. In particular, every entry of `namelist` should be freed with + `PetscFree()`, every entry of `innerislist` and `outerislist` should be destroyed with `ISDestroy()`, every entry of `dmlist` should be destroyed with `DMDestroy()`, and all of the arrays should be freed with `PetscFree()`. Developer Notes: The `dmlist` is for the inner subdomains or the outer subdomains or all subdomains? -.seealso: [](ch_dmbase), `DM`, `DMCreateFieldDecomposition()`, `DMDestroy()`, `DMCreateDomainDecompositionScatters()`, `DMView()`, `DMCreateInterpolation()`, `DMCreateColoring()`, `DMCreateMatrix()`, `DMCreateMassMatrix()` + The names are inconsistent, the hooks use `DMSubDomainHook` which is nothing like `DMCreateDomainDecomposition()` while `DMRefineHook` is used for `DMRefine()`. + +.seealso: [](ch_dmbase), `DM`, `DMCreateFieldDecomposition()`, `DMDestroy()`, `DMCreateDomainDecompositionScatters()`, `DMView()`, `DMCreateInterpolation()`, + `DMSubDomainHookAdd()`, `DMSubDomainHookRemove()`,`DMCreateColoring()`, `DMCreateMatrix()`, `DMCreateMassMatrix()`, `DMRefine()`, `DMCoarsen()` @*/ PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *n, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) { @@ -2230,13 +2240,14 @@ PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *n, char ***namelist, } /*@C - DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector + DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector for subdomains created with + `DMCreateDomainDecomposition()` Not Collective Input Parameters: + dm - the `DM` object -. n - the number of subdomain scatters +. n - the number of subdomains - subdms - the local subdomains Output Parameters: @@ -2252,13 +2263,13 @@ PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *n, char ***namelist, of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of solution and residual data. - Developer Notes: + Developer Note: Can the subdms input be anything or are they exactly the `DM` obtained from `DMCreateDomainDecomposition()`? .seealso: [](ch_dmbase), `DM`, `DMCreateDomainDecomposition()`, `DMDestroy()`, `DMView()`, `DMCreateInterpolation()`, `DMCreateColoring()`, `DMCreateMatrix()`, `DMCreateMassMatrix()`, `DMCreateFieldIS()` @*/ -PetscErrorCode DMCreateDomainDecompositionScatters(DM dm, PetscInt n, DM *subdms, VecScatter **iscat, VecScatter **oscat, VecScatter **gscat) +PetscErrorCode DMCreateDomainDecompositionScatters(DM dm, PetscInt n, DM *subdms, VecScatter *iscat[], VecScatter *oscat[], VecScatter *gscat[]) { PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); @@ -2287,7 +2298,8 @@ PetscErrorCode DMCreateDomainDecompositionScatters(DM dm, PetscInt n, DM *subdms Note: If no refinement was done, the return value is `NULL` -.seealso: [](ch_dmbase), `DM`, `DMCoarsen()`, `DMDestroy()`, `DMView()`, `DMCreateGlobalVector()`, `DMCreateInterpolation()` +.seealso: [](ch_dmbase), `DM`, `DMCoarsen()`, `DMDestroy()`, `DMView()`, `DMCreateGlobalVector()`, `DMCreateInterpolation()`, `DMCreateDomainDecomposition()`, + `DMRefineHookAdd()`, `DMRefineHookRemove()` @*/ PetscErrorCode DMRefine(DM dm, MPI_Comm comm, DM *dmf) { @@ -2415,7 +2427,7 @@ PetscErrorCode DMRefineHookRemove(DM coarse, PetscErrorCode (*refinehook)(DM, DM Level: developer - Developer Notes: + Developer Note: This routine is called `DMInterpolate()` while the hook is called `DMRefineHookAdd()`. It would be better to have an an API with consistent terminology. @@ -2456,7 +2468,7 @@ PetscErrorCode DMInterpolate(DM coarse, Mat interp, DM fine) out of the solution vector. Or if interpolation is inherently a nonlinear operation, such as a method using slope-limiting reconstruction. - Developer Notes: + Developer Note: This doesn't just interpolate "solutions" so its API name is questionable. .seealso: [](ch_dmbase), `DM`, `DMInterpolate()`, `DMCreateInterpolation()` @@ -3226,7 +3238,7 @@ PetscErrorCode DMLocalToGlobalEnd(DM dm, Vec l, InsertMode mode, Vec g) Level: intermediate - Notes: + Note: Must be followed by `DMLocalToLocalEnd()`. .seealso: [](ch_dmbase), `DM`, `DMLocalToLocalEnd()`, `DMCoarsen()`, `DMDestroy()`, `DMView()`, `DMCreateLocalVector()`, `DMCreateGlobalVector()`, `DMCreateInterpolation()`, `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()` @@ -3283,7 +3295,8 @@ PetscErrorCode DMLocalToLocalEnd(DM dm, Vec g, InsertMode mode, Vec l) Level: developer -.seealso: [](ch_dmbase), `DM`, `DMRefine()`, `DMDestroy()`, `DMView()`, `DMCreateGlobalVector()`, `DMCreateInterpolation()` +.seealso: [](ch_dmbase), `DM`, `DMRefine()`, `DMDestroy()`, `DMView()`, `DMCreateGlobalVector()`, `DMCreateInterpolation()`, `DMCreateDomainDecomposition()`, + `DMCoarsenHookAdd()`, `DMCoarsenHookRemove()` @*/ PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) { @@ -3380,8 +3393,10 @@ PetscErrorCode DMCoarsenHookAdd(DM fine, PetscErrorCode (*coarsenhook)(DM fine, Level: advanced - Note: - This function does nothing if the hook is not in the list. + Notes: + This function does nothing if the `coarsenhook` is not in the list. + + See `DMCoarsenHookAdd()` for the calling sequence of `coarsenhook` and `restricthook` .seealso: [](ch_dmbase), `DM`, `DMCoarsenHookAdd()`, `DMRefineHookAdd()`, `SNESFASGetInterpolation()`, `SNESFASGetInjection()`, `PetscObjectCompose()`, `PetscContainerCreate()` @*/ @@ -3416,7 +3431,7 @@ PetscErrorCode DMCoarsenHookRemove(DM fine, PetscErrorCode (*coarsenhook)(DM, DM Level: developer - Developer Notes: + Developer Note: Though this routine is called `DMRestrict()` the hooks are added with `DMCoarsenHookAdd()`, a consistent terminology would be better .seealso: [](ch_dmbase), `DM`, `DMCoarsenHookAdd()`, `MatRestrict()`, `DMInterpolate()`, `DMRefineHookAdd()` @@ -3433,7 +3448,7 @@ PetscErrorCode DMRestrict(DM fine, Mat restrct, Vec rscale, Mat inject, DM coars } /*@C - DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid + DMSubDomainHookAdd - adds a callback to be run when restricting a problem to subdomain `DM`s with `DMCreateDomainDecomposition()` Logically Collective; No Fortran Support @@ -3445,27 +3460,30 @@ PetscErrorCode DMRestrict(DM fine, Mat restrct, Vec rscale, Mat inject, DM coars Calling sequence of `ddhook`: + global - global `DM` -. block - block `DM` +. block - subdomain `DM` - ctx - optional user-defined function context Calling sequence of `restricthook`: + global - global `DM` -. out - scatter to the outer (with ghost and overlap points) block vector -. in - scatter to block vector values only owned locally -. block - block `DM` +. out - scatter to the outer (with ghost and overlap points) sub vector +. in - scatter to sub vector values only owned locally +. block - subdomain `DM` - ctx - optional user-defined function context Level: advanced Notes: - This function is only needed if auxiliary data needs to be set up on subdomain `DM`s. + This function can be used if auxiliary data needs to be set up on subdomain `DM`s. If this function is called multiple times, the hooks will be run in the order they are added. In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to extract the global information from its context (instead of from the `SNES`). -.seealso: [](ch_dmbase), `DM`, `DMSubDomainHookRemove()`, `DMRefineHookAdd()`, `SNESFASGetInterpolation()`, `SNESFASGetInjection()`, `PetscObjectCompose()`, `PetscContainerCreate()` + Developer Note: + It is unclear what "block solve" means within the definition of `restricthook` + +.seealso: [](ch_dmbase), `DM`, `DMSubDomainHookRemove()`, `DMRefineHookAdd()`, `SNESFASGetInterpolation()`, `SNESFASGetInjection()`, `PetscObjectCompose()`, `PetscContainerCreate()`, `DMCreateDomainDecomposition()` @*/ PetscErrorCode DMSubDomainHookAdd(DM global, PetscErrorCode (*ddhook)(DM global, DM block, void *ctx), PetscErrorCode (*restricthook)(DM global, VecScatter out, VecScatter in, DM block, void *ctx), void *ctx) { @@ -3486,7 +3504,7 @@ PetscErrorCode DMSubDomainHookAdd(DM global, PetscErrorCode (*ddhook)(DM global, } /*@C - DMSubDomainHookRemove - remove a callback from the list to be run when restricting a problem to the coarse grid + DMSubDomainHookRemove - remove a callback from the list to be run when restricting a problem to subdomain `DM`s with `DMCreateDomainDecomposition()` Logically Collective; No Fortran Support @@ -3498,7 +3516,11 @@ PetscErrorCode DMSubDomainHookAdd(DM global, PetscErrorCode (*ddhook)(DM global, Level: advanced -.seealso: [](ch_dmbase), `DM`, `DMSubDomainHookAdd()`, `SNESFASGetInterpolation()`, `SNESFASGetInjection()`, `PetscObjectCompose()`, `PetscContainerCreate()` + Note: + See `DMSubDomainHookAdd()` for the calling sequences of `ddhook` and `restricthook` + +.seealso: [](ch_dmbase), `DM`, `DMSubDomainHookAdd()`, `SNESFASGetInterpolation()`, `SNESFASGetInjection()`, `PetscObjectCompose()`, `PetscContainerCreate()`, + `DMCreateDomainDecomposition()` @*/ PetscErrorCode DMSubDomainHookRemove(DM global, PetscErrorCode (*ddhook)(DM, DM, void *), PetscErrorCode (*restricthook)(DM, VecScatter, VecScatter, DM, void *), void *ctx) { @@ -3518,7 +3540,7 @@ PetscErrorCode DMSubDomainHookRemove(DM global, PetscErrorCode (*ddhook)(DM, DM, } /*@ - DMSubDomainRestrict - restricts user-defined problem data to a block `DM` by running hooks registered by `DMSubDomainHookAdd()` + DMSubDomainRestrict - restricts user-defined problem data to a subdomain `DM` by running hooks registered by `DMSubDomainHookAdd()` Collective if any hooks are @@ -3530,7 +3552,7 @@ PetscErrorCode DMSubDomainHookRemove(DM global, PetscErrorCode (*ddhook)(DM, DM, Level: developer -.seealso: [](ch_dmbase), `DM`, `DMCoarsenHookAdd()`, `MatRestrict()` +.seealso: [](ch_dmbase), `DM`, `DMCoarsenHookAdd()`, `MatRestrict()`, `DMCreateDomainDecomposition()` @*/ PetscErrorCode DMSubDomainRestrict(DM global, VecScatter oscatter, VecScatter gscatter, DM subdm) { @@ -3591,7 +3613,7 @@ PetscErrorCode DMSetCoarsenLevel(DM dm, PetscInt level) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMRefineHierarchy - Refines a `DM` object, all levels at once Collective @@ -3623,7 +3645,7 @@ PetscErrorCode DMRefineHierarchy(DM dm, PetscInt nlevels, DM dmf[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCoarsenHierarchy - Coarsens a `DM` object, all levels at once Collective @@ -3687,8 +3709,10 @@ PetscErrorCode DMSetApplicationContextDestroy(DM dm, PetscErrorCode (*destroy)(v Level: intermediate - Note: + Notes: A user context is a way to pass problem specific information that is accessible whenever the `DM` is available + In a multilevel solver, the user context is shared by all the `DM` in the hierarchy; it is thus not advisable + to store objects that represent discretized quantities inside the context. .seealso: [](ch_dmbase), `DM`, `DMGetApplicationContext()`, `DMView()`, `DMCreateGlobalVector()`, `DMCreateInterpolation()`, `DMCreateColoring()`, `DMCreateMatrix()`, `DMCreateMassMatrix()` @*/ @@ -3772,7 +3796,7 @@ PetscErrorCode DMHasVariableBounds(DM dm, PetscBool *flg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMComputeVariableBounds - compute variable bounds used by `SNESVI`. Logically Collective @@ -3877,7 +3901,7 @@ PetscErrorCode DMHasCreateInjection(DM dm, PetscBool *flg) PetscFunctionList DMList = NULL; PetscBool DMRegisterAllCalled = PETSC_FALSE; -/*@C +/*@ DMSetType - Builds a `DM`, for a particular `DM` implementation. Collective @@ -3917,7 +3941,7 @@ PetscErrorCode DMSetType(DM dm, DMType method) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetType - Gets the `DM` type name (as a string) from the `DM`. Not Collective @@ -3942,7 +3966,7 @@ PetscErrorCode DMGetType(DM dm, DMType *type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMConvert - Converts a `DM` to another `DM`, either of the same or different type. Collective @@ -3956,7 +3980,7 @@ PetscErrorCode DMGetType(DM dm, DMType *type) Level: intermediate - Notes: + Note: Cannot be used to convert a sequential `DM` to a parallel or a parallel to sequential, the MPI communicator of the generated `DM` is always the same as the communicator of the input `DM`. @@ -4056,7 +4080,7 @@ PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) /*@C DMRegister - Adds a new `DM` type implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine @@ -4064,7 +4088,7 @@ PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) Level: advanced - Notes: + Note: `DMRegister()` may be called multiple times to add several user-defined `DM`s Example Usage: @@ -4092,7 +4116,7 @@ PetscErrorCode DMRegister(const char sname[], PetscErrorCode (*function)(DM)) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMLoad - Loads a DM that has been stored in binary with `DMView()`. Collective @@ -4372,14 +4396,14 @@ PetscErrorCode DMSetLocalSection(DM dm, PetscSection section) } /*@C - DMCreateSectionPermutation - Create a permutation of the `PetscSection` chart and optionally a blokc structure. + DMCreateSectionPermutation - Create a permutation of the `PetscSection` chart and optionally a block structure. Input Parameter: . dm - The `DM` - Output Parameter: + Output Parameters: + perm - A permutation of the mesh points in the chart -- blockStarts - A high bit is set for the point that begins every block, or NULL for default blocking +- blockStarts - A high bit is set for the point that begins every block, or `NULL` for default blocking Level: developer @@ -4711,7 +4735,7 @@ PetscErrorCode DMSetSectionSF(DM dm, PetscSF sf) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCreateSectionSF - Create the `PetscSF` encoding the parallel dof overlap for the `DM` based upon the `PetscSection`s describing the data layout. @@ -4725,10 +4749,11 @@ PetscErrorCode DMSetSectionSF(DM dm, PetscSF sf) Note: One usually uses `DMGetSectionSF()` to obtain the `PetscSF` - Developer Notes: + Developer Note: Since this routine has for arguments the two sections from the `DM` and puts the resulting `PetscSF` directly into the `DM`, perhaps this function should not take the local and global sections as - input and should just obtain them from the `DM`? + input and should just obtain them from the `DM`? Plus PETSc creation functions return the thing + they create, this returns nothing .seealso: [](ch_dmbase), `DM`, `DMGetSectionSF()`, `DMSetSectionSF()`, `DMGetLocalSection()`, `DMGetGlobalSection()` @*/ @@ -4998,7 +5023,7 @@ PetscErrorCode DMSetField_Internal(DM dm, PetscInt f, DMLabel label, PetscObject PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSetField - Set the discretization object for a given `DM` field. Usually one would call `DMAddField()` which automatically handles the field numbering. @@ -5027,7 +5052,7 @@ PetscErrorCode DMSetField(DM dm, PetscInt f, DMLabel label, PetscObject disc) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMAddField - Add a field to a `DM` object. A field is a function space defined by of a set of discretization points (geometric entities) and a discretization object that defines the function space associated with those points. @@ -5780,7 +5805,7 @@ PetscErrorCode DMFindRegionNum(DM dm, PetscDS ds, PetscInt *num) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCreateFEDefault - Create a `PetscFE` based on the celltype for the mesh Not Collective @@ -6299,7 +6324,7 @@ PetscErrorCode DMCopyDS(DM dm, DM newdm) Level: advanced - Developer Notes: + Developer Note: Really ugly name, nothing in PETSc is called a `Disc` plus it is an ugly abbreviation .seealso: [](ch_dmbase), `DM`, `DMCopyFields()`, `DMCopyDS()` @@ -6475,7 +6500,7 @@ PetscErrorCode DMGetOutputDM(DM dm, DM *odm) This is intended for output that should appear in sequence, for instance a set of timesteps in an `PETSCVIEWERHDF5` file, or a set of realizations of a stochastic system. - Developer Notes: + Developer Note: The `DM` serves as a convenient place to store the current iteration value. The iteration is not not directly related to the `DM`. @@ -6521,12 +6546,12 @@ PetscErrorCode DMSetOutputSequenceNumber(DM dm, PetscInt num, PetscReal val) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMOutputSequenceLoad - Retrieve the sequence value from a `PetscViewer` Input Parameters: + dm - The original `DM` -. viewer - The viewer to get it from +. viewer - The `PetscViewer` to get it from . name - The sequence name - num - The output sequence number @@ -6539,18 +6564,19 @@ PetscErrorCode DMSetOutputSequenceNumber(DM dm, PetscInt num, PetscReal val) This is intended for output that should appear in sequence, for instance a set of timesteps in an `PETSCVIEWERHDF5` file, or a set of realizations of a stochastic system. - Developer Notes: + Developer Note: It is unclear at the user API level why a `DM` is needed as input .seealso: [](ch_dmbase), `DM`, `DMGetOutputSequenceNumber()`, `DMSetOutputSequenceNumber()`, `VecView()` @*/ -PetscErrorCode DMOutputSequenceLoad(DM dm, PetscViewer viewer, const char *name, PetscInt num, PetscReal *val) +PetscErrorCode DMOutputSequenceLoad(DM dm, PetscViewer viewer, const char name[], PetscInt num, PetscReal *val) { PetscBool ishdf5; PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); + PetscAssertPointer(name, 3); PetscAssertPointer(val, 5); PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5)); if (ishdf5) { @@ -6564,6 +6590,46 @@ PetscErrorCode DMOutputSequenceLoad(DM dm, PetscViewer viewer, const char *name, PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMGetOutputSequenceLength - Retrieve the number of sequence values from a `PetscViewer` + + Input Parameters: ++ dm - The original `DM` +. viewer - The `PetscViewer` to get it from +- name - The sequence name + + Output Parameter: +. len - The length of the output sequence + + Level: intermediate + + Note: + This is intended for output that should appear in sequence, for instance + a set of timesteps in an `PETSCVIEWERHDF5` file, or a set of realizations of a stochastic system. + + Developer Note: + It is unclear at the user API level why a `DM` is needed as input + +.seealso: [](ch_dmbase), `DM`, `DMGetOutputSequenceNumber()`, `DMSetOutputSequenceNumber()`, `VecView()` +@*/ +PetscErrorCode DMGetOutputSequenceLength(DM dm, PetscViewer viewer, const char name[], PetscInt *len) +{ + PetscBool ishdf5; + + PetscFunctionBegin; + PetscValidHeaderSpecific(dm, DM_CLASSID, 1); + PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); + PetscAssertPointer(name, 3); + PetscAssertPointer(len, 4); + PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5)); + if (ishdf5) { +#if defined(PETSC_HAVE_HDF5) + PetscCall(DMSequenceGetLength_HDF5_Internal(dm, name, len, viewer)); +#endif + } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerHDF5Open()"); + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@ DMGetUseNatural - Get the flag for creating a mapping to the natural order when a `DM` is (re)distributed in parallel @@ -6597,11 +6663,11 @@ PetscErrorCode DMGetUseNatural(DM dm, PetscBool *useNatural) + dm - The `DM` - useNatural - `PETSC_TRUE` to build the mapping to a natural order during distribution + Level: beginner + Note: This also causes the map to be build after `DMCreateSubDM()` and `DMCreateSuperDM()` - Level: beginner - .seealso: [](ch_dmbase), `DM`, `DMGetUseNatural()`, `DMCreate()`, `DMPlexDistribute()`, `DMCreateSubDM()`, `DMCreateSuperDM()` @*/ PetscErrorCode DMSetUseNatural(DM dm, PetscBool useNatural) @@ -6613,7 +6679,7 @@ PetscErrorCode DMSetUseNatural(DM dm, PetscBool useNatural) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCreateLabel - Create a label of the given name if it does not already exist in the `DM` Not Collective @@ -6643,7 +6709,7 @@ PetscErrorCode DMCreateLabel(DM dm, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCreateLabelAtIndex - Create a label of the given name at the given index. If it already exists in the `DM`, move it to this index. Not Collective @@ -6695,7 +6761,7 @@ PetscErrorCode DMCreateLabelAtIndex(DM dm, PetscInt l, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetLabelValue - Get the value in a `DMLabel` for the given point, with -1 as the default Not Collective @@ -6725,7 +6791,7 @@ PetscErrorCode DMGetLabelValue(DM dm, const char name[], PetscInt point, PetscIn PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSetLabelValue - Add a point to a `DMLabel` with given value Not Collective @@ -6758,7 +6824,7 @@ PetscErrorCode DMSetLabelValue(DM dm, const char name[], PetscInt point, PetscIn PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMClearLabelValue - Remove a point from a `DMLabel` with given value Not Collective @@ -6786,7 +6852,7 @@ PetscErrorCode DMClearLabelValue(DM dm, const char name[], PetscInt point, Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetLabelSize - Get the value of `DMLabelGetNumValues()` of a `DMLabel` in the `DM` Not Collective @@ -6800,7 +6866,7 @@ PetscErrorCode DMClearLabelValue(DM dm, const char name[], PetscInt point, Petsc Level: beginner - Developer Notes: + Developer Note: This should be renamed to something like `DMGetLabelNumValues()` or removed. .seealso: [](ch_dmbase), `DM`, `DMLabelGetNumValues()`, `DMSetLabelValue()`, `DMGetLabel()` @@ -6820,7 +6886,7 @@ PetscErrorCode DMGetLabelSize(DM dm, const char name[], PetscInt *size) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetLabelIdIS - Get the `DMLabelGetValueIS()` from a `DMLabel` in the `DM` Not Collective @@ -6855,14 +6921,14 @@ PetscErrorCode DMGetLabelIdIS(DM dm, const char name[], IS *ids) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetStratumSize - Get the number of points in a label stratum Not Collective Input Parameters: + dm - The `DM` object -. name - The label name +. name - The label name of the stratum - value - The stratum value Output Parameter: @@ -6887,7 +6953,7 @@ PetscErrorCode DMGetStratumSize(DM dm, const char name[], PetscInt value, PetscI PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetStratumIS - Get the points in a label stratum Not Collective @@ -6919,7 +6985,7 @@ PetscErrorCode DMGetStratumIS(DM dm, const char name[], PetscInt value, IS *poin PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSetStratumIS - Set the points in a label stratum Not Collective @@ -6948,7 +7014,7 @@ PetscErrorCode DMSetStratumIS(DM dm, const char name[], PetscInt value, IS point PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMClearLabelStratum - Remove all points from a stratum from a `DMLabel` Not Collective @@ -7008,7 +7074,7 @@ PetscErrorCode DMGetNumLabels(DM dm, PetscInt *numLabels) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetLabelName - Return the name of nth label Not Collective @@ -7022,12 +7088,12 @@ PetscErrorCode DMGetNumLabels(DM dm, PetscInt *numLabels) Level: intermediate - Developer Notes: + Developer Note: Some of the functions that appropriate on labels using their number have the suffix ByNum, others do not. .seealso: [](ch_dmbase), `DM`, `DMLabel`, `DMGetLabelByNum()`, `DMGetLabel()`, `DMGetLabelValue()`, `DMSetLabelValue()`, `DMGetStratumIS()` @*/ -PetscErrorCode DMGetLabelName(DM dm, PetscInt n, const char **name) +PetscErrorCode DMGetLabelName(DM dm, PetscInt n, const char *name[]) { DMLabelLink next = dm->labels; PetscInt l = 0; @@ -7046,7 +7112,7 @@ PetscErrorCode DMGetLabelName(DM dm, PetscInt n, const char **name) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Label %" PetscInt_FMT " does not exist in this DM", n); } -/*@C +/*@ DMHasLabel - Determine whether the `DM` has a label of a given name Not Collective @@ -7082,7 +7148,7 @@ PetscErrorCode DMHasLabel(DM dm, const char name[], PetscBool *hasLabel) } // PetscClangLinter pragma ignore: -fdoc-section-header-unknown -/*@C +/*@ DMGetLabel - Return the label of a given name, or `NULL`, from a `DM` Not Collective @@ -7129,7 +7195,7 @@ PetscErrorCode DMGetLabel(DM dm, const char name[], DMLabel *label) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetLabelByNum - Return the nth label on a `DM` Not Collective @@ -7164,7 +7230,7 @@ PetscErrorCode DMGetLabelByNum(DM dm, PetscInt n, DMLabel *label) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Label %" PetscInt_FMT " does not exist in this DM", n); } -/*@C +/*@ DMAddLabel - Add the label to this `DM` Not Collective @@ -7203,7 +7269,7 @@ PetscErrorCode DMAddLabel(DM dm, DMLabel label) } // PetscClangLinter pragma ignore: -fdoc-section-header-unknown -/*@C +/*@ DMSetLabel - Replaces the label of a given name, or ignores it if the name is not present Not Collective @@ -7252,7 +7318,7 @@ PetscErrorCode DMSetLabel(DM dm, DMLabel label) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMRemoveLabel - Remove the label given by name from this `DM` Not Collective @@ -7346,7 +7412,7 @@ PetscErrorCode DMRemoveLabelBySelf(DM dm, DMLabel *label, PetscBool failNotFound PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetLabelOutput - Get the output flag for a given label Not Collective @@ -7385,7 +7451,7 @@ PetscErrorCode DMGetLabelOutput(DM dm, const char name[], PetscBool *output) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No label named %s was present in this dm", name); } -/*@C +/*@ DMSetLabelOutput - Set if a given label should be saved to a `PetscViewer` in calls to `DMView()` Not Collective @@ -7523,6 +7589,9 @@ PetscErrorCode DMCopyLabels(DM dmA, DM dmB, PetscCopyMode mode, PetscBool all, D Labels are matched by name. If the number of labels and their names are equal, `DMLabelCompare()` is used to compare each pair of labels with the same name. + Developer Note: + Can automatically generate the Fortran stub because `message` must be freed with `PetscFree()` + .seealso: [](ch_dmbase), `DM`, `DMLabel`, `DMAddLabel()`, `DMCopyLabelsMode`, `DMLabelCompare()` @*/ PetscErrorCode DMCompareLabels(DM dm0, DM dm1, PetscBool *equal, char **message) @@ -7896,8 +7965,9 @@ PetscErrorCode DMSetFineDM(DM dm, DM fdm) Notes: Both bcFunc abd bcFunc_t will depend on the boundary condition type. If the type if `DM_BC_ESSENTIAL`, then the calling sequence is\: - -$ void bcFunc(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar bcval[]) +.vb + void bcFunc(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar bcval[]) +.ve If the type is `DM_BC_ESSENTIAL_FIELD` or other _FIELD value, then the calling sequence is\: @@ -8246,14 +8316,14 @@ PetscErrorCode DMProjectFunctionLabelLocal(DM dm, PetscReal time, DMLabel label, . constants - The value of each constant - f - The value of the function at this point in space + Level: intermediate + Note: There are three different `DM`s that potentially interact in this function. The output `DM`, dm, specifies the layout of the values calculates by funcs. The input `DM`, attached to U, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary `DM`, attached to the auxiliary field vector, which is attached to dm, can also be different. It can have a different topology, number of fields, and discretizations. - Level: intermediate - Developer Notes: This API is specific to only particular usage of `DM` @@ -8312,14 +8382,14 @@ PetscErrorCode DMProjectFieldLocal(DM dm, PetscReal time, Vec localU, void (**fu . constants - The value of each constant - f - The value of the function at this point in space + Level: intermediate + Note: There are three different `DM`s that potentially interact in this function. The output `DM`, dm, specifies the layout of the values calculates by funcs. The input `DM`, attached to localU, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary `DM`, attached to the auxiliary field vector, which is attached to dm, can also be different. It can have a different topology, number of fields, and discretizations. - Level: intermediate - Developer Notes: This API is specific to only particular usage of `DM` @@ -8377,14 +8447,14 @@ PetscErrorCode DMProjectFieldLabelLocal(DM dm, PetscReal time, DMLabel label, Pe . constants - The value of each constant - f - The value of the function at this point in space + Level: intermediate + Note: There are three different `DM`s that potentially interact in this function. The output `DM`, dm, specifies the layout of the values calculates by funcs. The input `DM`, attached to U, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary `DM`, attached to the auxiliary field vector, which is attached to dm, can also be different. It can have a different topology, number of fields, and discretizations. - Level: intermediate - Developer Notes: This API is specific to only particular usage of `DM` @@ -8454,14 +8524,14 @@ PetscErrorCode DMProjectFieldLabel(DM dm, PetscReal time, DMLabel label, PetscIn . constants - The value of each constant - f - The value of the function at this point in space + Level: intermediate + Note: There are three different `DM`s that potentially interact in this function. The output `DM`, dm, specifies the layout of the values calculates by funcs. The input `DM`, attached to U, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary `DM`, attached to the auxiliary field vector, which is attached to dm, can also be different. It can have a different topology, number of fields, and discretizations. - Level: intermediate - Developer Notes: This API is specific to only particular usage of `DM` @@ -8643,8 +8713,8 @@ static PetscErrorCode MatFDColoringApply_AIJDM(Mat J, MatFDColoring coloring, Ve Level: advanced - Developer Notes: - this routine exists because the PETSc `Mat` library does not know about the `DM` objects + Developer Note: + This routine exists because the PETSc `Mat` library does not know about the `DM` objects .seealso: [](ch_dmbase), `DM`, `MatFDColoring`, `MatFDColoringCreate()`, `ISColoringType` @*/ @@ -8722,7 +8792,7 @@ PetscErrorCode MatFDColoringUseDM(Mat coloring, MatFDColoring fdcoloring) is required on each rank. However, in `DM` implementations which store all this information locally, this function may be merely "Logically Collective". - Developer Notes: + Developer Note: Compatibility is assumed to be a symmetric concept; `DM` A is compatible with `DM` B iff B is compatible with A. Thus, this function checks the implementations of both dm and dmc (if they are of different types), attempting to determine @@ -8802,10 +8872,10 @@ PetscErrorCode DMGetCompatibility(DM dm1, DM dm2, PetscBool *compatible, PetscBo `DMMonitorSet()` multiple times or with `DMMonitorSetFromOptions()`; all will be called in the order in which they were set. - Fortran Notes: + Fortran Note: Only a single monitor function can be set for each `DM` object - Developer Notes: + Developer Note: This API has a generic name but seems specific to a very particular aspect of the use of `DM` .seealso: [](ch_dmbase), `DM`, `DMMonitorCancel()`, `DMMonitorSetFromOptions()`, `DMMonitor()` @@ -8917,7 +8987,7 @@ PetscErrorCode DMMonitorSetFromOptions(DM dm, const char name[], const char help Level: developer - Developer Notes: + Developer Note: Note should indicate when during the life of the `DM` the monitor is run. It appears to be related to the discretization process seems rather specialized since some `DM` have no concept of discretization. @@ -9121,7 +9191,7 @@ PetscErrorCode DMSetAuxiliaryVec(DM dm, DMLabel label, PetscInt value, PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetAuxiliaryLabels - Get the labels, values, and parts for all auxiliary vectors in this `DM` Not Collective @@ -9232,7 +9302,7 @@ PetscErrorCode DMClearAuxiliaryVec(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPolytopeMatchOrientation - Determine an orientation (transformation) that takes the source face arrangement to the target face arrangement Not Collective @@ -9284,7 +9354,7 @@ PetscErrorCode DMPolytopeMatchOrientation(DMPolytopeType ct, const PetscInt sour PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPolytopeGetOrientation - Determine an orientation (transformation) that takes the source face arrangement to the target face arrangement Not Collective @@ -9302,7 +9372,7 @@ PetscErrorCode DMPolytopeMatchOrientation(DMPolytopeType ct, const PetscInt sour Note: This function is the same as `DMPolytopeMatchOrientation()` except it will generate an error if no suitable orientation can be found. - Developer Notes: + Developer Note: It is unclear why this function needs to exist since one can simply call `DMPolytopeMatchOrientation()` and error if none is found .seealso: [](ch_dmbase), `DM`, `DMPolytopeType`, `DMPolytopeMatchOrientation()`, `DMPolytopeGetVertexOrientation()`, `DMPolytopeMatchVertexOrientation()` @@ -9317,7 +9387,7 @@ PetscErrorCode DMPolytopeGetOrientation(DMPolytopeType ct, const PetscInt source PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPolytopeMatchVertexOrientation - Determine an orientation (transformation) that takes the source vertex arrangement to the target vertex arrangement Not Collective @@ -9333,7 +9403,7 @@ PetscErrorCode DMPolytopeGetOrientation(DMPolytopeType ct, const PetscInt source Level: advanced - Note: + Notes: An arrangement is a vertex order Each orientation (transformation) is labeled with an integer from negative `DMPolytopeTypeGetNumArrangements(ct)`/2 to `DMPolytopeTypeGetNumArrangements(ct)`/2 @@ -9369,7 +9439,7 @@ PetscErrorCode DMPolytopeMatchVertexOrientation(DMPolytopeType ct, const PetscIn PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPolytopeGetVertexOrientation - Determine an orientation (transformation) that takes the source vertex arrangement to the target vertex arrangement Not Collective @@ -9387,7 +9457,7 @@ PetscErrorCode DMPolytopeMatchVertexOrientation(DMPolytopeType ct, const PetscIn Note: This function is the same as `DMPolytopeMatchVertexOrientation()` except it errors if not orientation is possible. - Developer Notes: + Developer Note: It is unclear why this function needs to exist since one can simply call `DMPolytopeMatchVertexOrientation()` and error if none is found .seealso: [](ch_dmbase), `DM`, `DMPolytopeType`, `DMPolytopeMatchVertexOrientation()`, `DMPolytopeGetOrientation()` @@ -9402,7 +9472,7 @@ PetscErrorCode DMPolytopeGetVertexOrientation(DMPolytopeType ct, const PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMPolytopeInCellTest - Check whether a point lies inside the reference cell of given type Not Collective @@ -9500,7 +9570,7 @@ PetscErrorCode DMReorderSectionGetDefault(DM dm, DMReorderDefaultFlag *reorder) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMReorderSectionSetType - Set the type of local section reordering Logically collective @@ -9521,7 +9591,7 @@ PetscErrorCode DMReorderSectionSetType(DM dm, MatOrderingType reorder) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMReorderSectionGetType - Get the reordering type for the local section Not collective diff --git a/src/dm/interface/dmgenerate.c b/src/dm/interface/dmgenerate.c index d71c3ceec3d..a99a2aa3d18 100644 --- a/src/dm/interface/dmgenerate.c +++ b/src/dm/interface/dmgenerate.c @@ -69,7 +69,7 @@ PetscErrorCode DMGenerateRegisterAll(void) /*@C DMGenerateRegister - Adds a grid generator to `DM` - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined grid generator @@ -135,7 +135,7 @@ PetscErrorCode DMGenerateRegisterDestroy(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMAdaptLabel - Adapt a `DM` based on a `DMLabel` with values interpreted as coarsening and refining flags. Specific implementations of `DM` maybe have specialized flags, but all implementations should accept flag values `DM_ADAPT_DETERMINE`, `DM_ADAPT_KEEP`, `DM_ADAPT_REFINE`, and, `DM_ADAPT_COARSEN`. @@ -192,7 +192,7 @@ PetscErrorCode DMAdaptLabel(DM dm, DMLabel label, DM *dmAdapt) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMAdaptMetric - Generates a mesh adapted to the specified metric field. Input Parameters: @@ -202,7 +202,7 @@ PetscErrorCode DMAdaptLabel(DM dm, DMLabel label, DM *dmAdapt) - rgLabel - Label for cell tags, which will be preserved in the output mesh. `rgLabel` should be `NULL` if there is no such label, and should be different from "_regions_". Output Parameter: -. dmAdapt - Pointer to the DM object containing the adapted mesh +. dmAdapt - Pointer to the `DM` object containing the adapted mesh Note: The label in the adapted mesh will be registered under the name of the input `DMLabel` object diff --git a/src/dm/interface/dmgeommodel.c b/src/dm/interface/dmgeommodel.c new file mode 100644 index 00000000000..2c018feb815 --- /dev/null +++ b/src/dm/interface/dmgeommodel.c @@ -0,0 +1,185 @@ +#include /*I "petscdm.h" I*/ + +PetscFunctionList DMGeomModelList = NULL; +PetscBool DMGeomModelRegisterAllCalled = PETSC_FALSE; + +#if defined(PETSC_HAVE_EGADS) +PETSC_EXTERN PetscErrorCode DMSnapToGeomModel_EGADS(DM, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); +PETSC_EXTERN PetscErrorCode DMSnapToGeomModel_EGADSLite(DM, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); +#endif + +static PetscErrorCode DMSnapToGeomModelBall(DM dm, PetscInt p, PetscInt dE, const PetscScalar mcoords[], PetscScalar gcoords[]) +{ + PetscInt val; + + PetscFunctionBeginUser; + PetscCall(DMGetLabelValue(dm, "marker", p, &val)); + if (val >= 0) { + PetscReal norm = 0.; + + for (PetscInt d = 0; d < dE; ++d) norm += PetscSqr(PetscRealPart(mcoords[d])); + norm = PetscSqrtReal(norm); + for (PetscInt d = 0; d < dE; ++d) gcoords[d] = mcoords[d] / norm; + } else { + for (PetscInt d = 0; d < dE; ++d) gcoords[d] = mcoords[d]; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMSnapToGeomModelCylinder(DM dm, PetscInt p, PetscInt dE, const PetscScalar mcoords[], PetscScalar gcoords[]) +{ + PetscReal gmin[3], gmax[3]; + PetscInt val; + + PetscFunctionBeginUser; + PetscCall(DMGetBoundingBox(dm, gmin, gmax)); + PetscCall(DMGetLabelValue(dm, "generatrix", p, &val)); + if (val >= 0) { + PetscReal norm = 0.; + + for (PetscInt d = 0; d < dE - 1; ++d) norm += PetscSqr(PetscRealPart(mcoords[d])); + norm = PetscSqrtReal(norm); + for (PetscInt d = 0; d < dE - 1; ++d) gcoords[d] = gmax[0] * mcoords[d] / norm; + gcoords[dE - 1] = mcoords[dE - 1]; + } else { + for (PetscInt d = 0; d < dE; ++d) gcoords[d] = mcoords[d]; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@C + DMGeomModelRegisterAll - Registers all of the geometry model methods in the `DM` package. + + Not Collective + + Level: advanced + +.seealso: `DM`, `DMGeomModelRegisterDestroy()` +@*/ +PetscErrorCode DMGeomModelRegisterAll(void) +{ + PetscFunctionBegin; + if (DMGeomModelRegisterAllCalled) PetscFunctionReturn(PETSC_SUCCESS); + DMGeomModelRegisterAllCalled = PETSC_TRUE; + PetscCall(DMGeomModelRegister("ball", DMSnapToGeomModelBall)); + PetscCall(DMGeomModelRegister("cylinder", DMSnapToGeomModelCylinder)); +#if defined(PETSC_HAVE_EGADS) + PetscCall(DMGeomModelRegister("egads", DMSnapToGeomModel_EGADS)); + PetscCall(DMGeomModelRegister("egadslite", DMSnapToGeomModel_EGADSLite)); +#endif + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@C + DMGeomModelRegister - Adds a geometry model to `DM` + + Not Collective, No Fortran Support + + Input Parameters: ++ sname - name of a new user-defined gometry model +- fnc - geometry model function + + Example Usage: +.vb + DMGeomModelRegister("my_geom_model", MySnapToGeomModel); +.ve + + Then, your generator can be chosen with the procedural interface via +$ DMSetGeomModel(dm, "my_geom_model",...) + or at runtime via the option +$ -dm_geom_model my_geom_model + + Level: advanced + + Note: + `DMGeomModelRegister()` may be called multiple times to add several user-defined generators + +.seealso: `DM`, `DMGeomModelRegisterAll()`, `DMPlexGeomModel()`, `DMGeomModelRegisterDestroy()` +@*/ +PetscErrorCode DMGeomModelRegister(const char sname[], PetscErrorCode (*fnc)(DM, PetscInt, PetscInt, const PetscScalar[], PetscScalar[])) +{ + PetscFunctionBegin; + PetscCall(PetscFunctionListAdd(&DMGeomModelList, sname, (PetscVoidFn *)fnc)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +extern PetscBool DMGeomModelRegisterAllCalled; + +PetscErrorCode DMGeomModelRegisterDestroy(void) +{ + PetscFunctionBegin; + PetscCall(PetscFunctionListDestroy(&DMGeomModelList)); + DMGeomModelRegisterAllCalled = PETSC_FALSE; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + DMSetSnapToGeomModel - Choose a geometry model for this `DM`. + + Not Collective + + Input Parameters: ++ dm - The `DM` object +- name - A geometry model name, or `NULL` for the default + + Level: intermediate + +.seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMRefine()`, `DMPlexCreate()`, `DMSnapToGeomModel()` +@*/ +PetscErrorCode DMSetSnapToGeomModel(DM dm, const char name[]) +{ + char geomname[PETSC_MAX_PATH_LEN]; + PetscBool flg; + + PetscFunctionBegin; + if (!name && dm->ops->snaptogeommodel) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(PetscOptionsGetString(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dm_geom_model", geomname, sizeof(geomname), &flg)); + if (flg) name = geomname; + if (!name) { + PetscObject modelObj; + + PetscCall(PetscObjectQuery((PetscObject)dm, "EGADS Model", (PetscObject *)&modelObj)); + if (modelObj) name = "egads"; + else { + PetscCall(PetscObjectQuery((PetscObject)dm, "EGADSLite Model", (PetscObject *)&modelObj)); + if (modelObj) name = "egadslite"; + } + } + if (!name) PetscFunctionReturn(PETSC_SUCCESS); + + PetscCall(PetscFunctionListFind(DMGeomModelList, name, &dm->ops->snaptogeommodel)); + PetscCheck(dm->ops->snaptogeommodel, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Geometry model %s not registered; you may need to add --download-%s to your ./configure options", name, name); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + DMSnapToGeomModel - Given a coordinate point 'mcoords' on the mesh point 'p', return the closest coordinate point 'gcoords' on the geometry model associated with that point. + + Not Collective + + Input Parameters: ++ dm - The `DMPLEX` object +. p - The mesh point +. dE - The coordinate dimension +- mcoords - A coordinate point lying on the mesh point + + Output Parameter: +. gcoords - The closest coordinate point on the geometry model associated with 'p' to the given point + + Level: intermediate + + Note: + Returns the original coordinates if no geometry model is found. + + The coordinate dimension may be different from the coordinate dimension of the `dm`, for example if the transformation is extrusion. + +.seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMRefine()`, `DMPlexCreate()`, `DMPlexSetRefinementUniform()` +@*/ +PetscErrorCode DMSnapToGeomModel(DM dm, PetscInt p, PetscInt dE, const PetscScalar mcoords[], PetscScalar gcoords[]) +{ + PetscFunctionBegin; + if (!dm->ops->snaptogeommodel) + for (PetscInt d = 0; d < dE; ++d) gcoords[d] = mcoords[d]; + else PetscUseTypeMethod(dm, snaptogeommodel, p, dE, mcoords, gcoords); + PetscFunctionReturn(PETSC_SUCCESS); +} diff --git a/src/dm/interface/dmget.c b/src/dm/interface/dmget.c index 095fa188ed6..1dedcf62568 100644 --- a/src/dm/interface/dmget.c +++ b/src/dm/interface/dmget.c @@ -353,7 +353,7 @@ PetscErrorCode DMClearNamedLocalVectors(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMHasNamedGlobalVector - check for a named, persistent global vector created with `DMGetNamedGlobalVector()` Not Collective @@ -389,7 +389,7 @@ PetscErrorCode DMHasNamedGlobalVector(DM dm, const char *name, PetscBool *exists PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetNamedGlobalVector - get access to a named, persistent global vector Collective @@ -444,7 +444,7 @@ PetscErrorCode DMGetNamedGlobalVector(DM dm, const char *name, Vec *X) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMRestoreNamedGlobalVector - restore access to a named, persistent global vector Collective @@ -488,7 +488,7 @@ PetscErrorCode DMRestoreNamedGlobalVector(DM dm, const char *name, Vec *X) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_INCOMP, "Could not find Vec name '%s' to restore", name); } -/*@C +/*@ DMHasNamedLocalVector - check for a named, persistent local vector created with `DMGetNamedLocalVector()` Not Collective @@ -527,7 +527,7 @@ PetscErrorCode DMHasNamedLocalVector(DM dm, const char *name, PetscBool *exists) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMGetNamedLocalVector - get access to a named, persistent local vector Not Collective @@ -582,7 +582,7 @@ PetscErrorCode DMGetNamedLocalVector(DM dm, const char *name, Vec *X) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMRestoreNamedLocalVector - restore access to a named, persistent local vector obtained with `DMGetNamedLocalVector()` Not Collective diff --git a/src/dm/interface/dmi.c b/src/dm/interface/dmi.c index 7ce9c07dc74..c28003c5862 100644 --- a/src/dm/interface/dmi.c +++ b/src/dm/interface/dmi.c @@ -37,7 +37,8 @@ PetscErrorCode DMCreateGlobalVector_Section_Private(DM dm, Vec *vec) } } - in[0] = blockSize < 0 ? PETSC_MIN_INT : -blockSize; + // You cannot negate PETSC_MIN_INT + in[0] = blockSize < 0 ? -PETSC_MAX_INT : -blockSize; in[1] = blockSize; PetscCall(MPIU_Allreduce(in, out, 2, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)dm))); /* -out[0] = min(blockSize), out[1] = max(blockSize) */ @@ -376,7 +377,7 @@ static PetscErrorCode DMSelectFields_Private(DM dm, PetscSection section, PetscI PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCreateSectionSubDM - Returns an `IS` and `subDM` containing a `PetscSection` that encapsulates a subproblem defined by a subset of the fields in a `PetscSection` in the `DM`. Not Collective @@ -435,7 +436,7 @@ PetscErrorCode DMCreateSectionSubDM(DM dm, PetscInt numFields, const PetscInt fi .seealso: `DMCreateSuperDM()`, `DMGetLocalSection()`, `DMPlexSetMigrationSF()`, `DMView()` @*/ -PetscErrorCode DMCreateSectionSuperDM(DM dms[], PetscInt len, IS **is, DM *superdm) +PetscErrorCode DMCreateSectionSuperDM(DM dms[], PetscInt len, IS *is[], DM *superdm) { MPI_Comm comm; PetscSection supersection, *sections, *sectionGlobals; diff --git a/src/dm/interface/dmperiodicity.c b/src/dm/interface/dmperiodicity.c index e1a7ac000b7..d99ddd2fed7 100644 --- a/src/dm/interface/dmperiodicity.c +++ b/src/dm/interface/dmperiodicity.c @@ -17,7 +17,7 @@ .seealso: `DM` @*/ -PetscErrorCode DMGetPeriodicity(DM dm, const PetscReal **maxCell, const PetscReal **Lstart, const PetscReal **L) +PetscErrorCode DMGetPeriodicity(DM dm, const PetscReal *maxCell[], const PetscReal *Lstart[], const PetscReal *L[]) { PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); @@ -27,7 +27,7 @@ PetscErrorCode DMGetPeriodicity(DM dm, const PetscReal **maxCell, const PetscRea PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSetPeriodicity - Set the description of mesh periodicity Input Parameters: @@ -267,6 +267,52 @@ PetscErrorCode DMGetCoordinatesLocalized(DM dm, PetscBool *areLocalized) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMGetSparseLocalize - Check if the `DM` coordinates should be localized only for cells near the periodic boundary. + + Not collective + + Input Parameter: +. dm - The `DM` + + Output Parameter: +. sparse - `PETSC_TRUE` if ony cells near the periodic boundary are localized + + Level: intermediate + +.seealso: `DMSetSparseLocalize()`, `DMLocalizeCoordinates()`, `DMSetPeriodicity()` +@*/ +PetscErrorCode DMGetSparseLocalize(DM dm, PetscBool *sparse) +{ + PetscFunctionBegin; + PetscValidHeaderSpecific(dm, DM_CLASSID, 1); + PetscAssertPointer(sparse, 2); + *sparse = dm->sparseLocalize; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + DMSetSparseLocalize - Set the flag indicating that `DM` coordinates should be localized only for cells near the periodic boundary. + + Logically collective + + Input Parameters: ++ dm - The `DM` +- sparse - `PETSC_TRUE` if ony cells near the periodic boundary are localized + + Level: intermediate + +.seealso: `DMGetSparseLocalize()`, `DMLocalizeCoordinates()`, `DMSetPeriodicity()` +@*/ +PetscErrorCode DMSetSparseLocalize(DM dm, PetscBool sparse) +{ + PetscFunctionBegin; + PetscValidHeaderSpecific(dm, DM_CLASSID, 1); + PetscValidLogicalCollectiveBool(dm, sparse, 2); + dm->sparseLocalize = sparse; + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@ DMLocalizeCoordinates - If a mesh is periodic, create local coordinates for cells having periodic faces @@ -287,7 +333,7 @@ PetscErrorCode DMLocalizeCoordinates(DM dm) PetscScalar *coordsDG, *anchor, *localized; const PetscReal *Lstart, *L; PetscInt Nc, vStart, vEnd, sStart, sEnd, newStart = PETSC_MAX_INT, newEnd = PETSC_MIN_INT, bs, coordSize; - PetscBool isLocalized, sparseLocalize = dm->sparseLocalize, useDG = PETSC_FALSE, useDGGlobal; + PetscBool isLocalized, sparseLocalize, useDG = PETSC_FALSE, useDGGlobal; PetscInt maxHeight = 0, h; PetscInt *pStart = NULL, *pEnd = NULL; MPI_Comm comm; @@ -295,6 +341,7 @@ PetscErrorCode DMLocalizeCoordinates(DM dm) PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); PetscCall(DMGetPeriodicity(dm, NULL, &Lstart, &L)); + PetscCall(DMGetSparseLocalize(dm, &sparseLocalize)); /* Cannot automatically localize without L and maxCell right now */ if (!L) PetscFunctionReturn(PETSC_SUCCESS); PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); diff --git a/src/dm/interface/dmregall.c b/src/dm/interface/dmregall.c index 450ad81939d..a34ce84b621 100644 --- a/src/dm/interface/dmregall.c +++ b/src/dm/interface/dmregall.c @@ -135,7 +135,7 @@ PETSC_EXTERN PetscErrorCode PetscFECreate_OpenCL(PetscFE); PETSC_EXTERN PetscErrorCode PetscFECreate_Vector(PetscFE); /*@C - PetscFERegisterAll - Registers all of the PetscFE components in the PetscFE package. + PetscFERegisterAll - Registers all of the PetscFE components in the `PetscFE` package. Not Collective @@ -169,7 +169,7 @@ PETSC_EXTERN PetscErrorCode PetscLimiterCreate_Superbee(PetscLimiter); PETSC_EXTERN PetscErrorCode PetscLimiterCreate_MC(PetscLimiter); /*@C - PetscLimiterRegisterAll - Registers all of the PetscLimiter components in the PetscFV package. + PetscLimiterRegisterAll - Registers all of the `PetscLimiter` components in the `PetscFV` package. Not Collective @@ -198,7 +198,7 @@ PETSC_EXTERN PetscErrorCode PetscFVCreate_Upwind(PetscFV); PETSC_EXTERN PetscErrorCode PetscFVCreate_LeastSquares(PetscFV); /*@C - PetscFVRegisterAll - Registers all of the PetscFV components in the PetscFV package. + PetscFVRegisterAll - Registers all of the `PetscFV` components in the `PetscFV` package. Not Collective @@ -221,7 +221,7 @@ PetscErrorCode PetscFVRegisterAll(void) PETSC_EXTERN PetscErrorCode PetscDSCreate_Basic(PetscDS); /*@C - PetscDSRegisterAll - Registers all of the PetscDS components in the PetscDS package. + PetscDSRegisterAll - Registers all of the `PetscDS` components in the `PetscDS` package. Not Collective diff --git a/src/dm/interface/ftn-custom/zdmf.c b/src/dm/interface/ftn-custom/zdmf.c index 05e4dcaf799..ee850342c79 100644 --- a/src/dm/interface/ftn-custom/zdmf.c +++ b/src/dm/interface/ftn-custom/zdmf.c @@ -3,324 +3,12 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmcreateinterpolation_ DMCREATEINTERPOLATION - #define dmview_ DMVIEW - #define dmsetoptionsprefix_ DMSETOPTIONSPREFIX - #define dmsettype_ DMSETTYPE - #define dmgettype_ DMGETTYPE - #define dmsetmattype_ DMSETMATTYPE - #define dmsetvectype_ DMSETVECTYPE - #define dmgetmattype_ DMGETMATTYPE - #define dmgetvectype_ DMGETVECTYPE - #define dmlabelview_ DMLABELVIEW - #define dmcreatelabel_ DMCREATELABEL - #define dmhaslabel_ DMHASLABEL - #define dmgetlabelvalue_ DMGETLABELVALUE - #define dmsetlabelvalue_ DMSETLABELVALUE - #define dmgetlabelsize_ DMGETLABELSIZE - #define dmgetlabelidis_ DMGETLABELIDIS - #define dmgetlabelname_ DMGETLABELNAME - #define dmgetlabel_ DMGETLABEL - #define dmgetstratumsize_ DMGETSTRATUMSIZE - #define dmgetstratumis_ DMGETSTRATUMIS - #define dmsetstratumis_ DMSETSTRATUMIS - #define dmremovelabel_ DMREMOVELABEL - #define dmviewfromoptions_ DMVIEWFROMOPTIONS - #define dmcreatesuperdm_ DMCREATESUPERDM - #define dmcreatesubdm_ DMCREATESUBDM - #define dmdestroy_ DMDESTROY - #define dmload_ DMLOAD - #define dmsetfield_ DMSETFIELD - #define dmaddfield_ DMADDFIELD + #define dmcreatesuperdm_ DMCREATESUPERDM #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmcreateinterpolation_ dmcreateinterpolation - #define dmview_ dmview - #define dmsetoptionsprefix_ dmsetoptionsprefix - #define dmsettype_ dmsettype - #define dmgettype_ dmgettype - #define dmsetmattype_ dmsetmattype - #define dmsetvectype_ dmsetvectype - #define dmgetmattype_ dmgetmattype - #define dmgetvectype_ dmgetvectype - #define dmlabelview_ dmlabelview - #define dmcreatelabel_ dmcreatelabel - #define dmhaslabel_ dmhaslabel - #define dmgetlabelvalue_ dmgetlabelvalue - #define dmsetlabelvalue_ dmsetlabelvalue - #define dmgetlabelsize_ dmlabelsize - #define dmgetlabelidis_ dmlabelidis - #define dmgetlabelname_ dmgetlabelname - #define dmgetlabel_ dmgetlabel - #define dmgetstratumsize_ dmgetstratumsize - #define dmgetstratumis_ dmgetstratumis - #define dmsetstratumis_ dmsetstratumis - #define dmremovelabel_ dmremovelabel - #define dmviewfromoptions_ dmviewfromoptions - #define dmcreatesuperdm_ dmreatesuperdm - #define dmcreatesubdm_ dmreatesubdm - #define dmdestroy_ dmdestroy - #define dmload_ dmload - #define dmsetfield_ dmsetfield - #define dmaddfield_ dmaddfield + #define dmcreatesuperdm_ dmreatesuperdm #endif -PETSC_EXTERN void dmsetfield_(DM *dm, PetscInt *f, DMLabel label, PetscObject *disc, PetscErrorCode *ierr) -{ - CHKFORTRANNULLOBJECT(label); - *ierr = DMSetField(*dm, *f, label, *disc); -} - -PETSC_EXTERN void dmaddfield_(DM *dm, DMLabel label, PetscObject *disc, PetscErrorCode *ierr) -{ - CHKFORTRANNULLOBJECT(label); - *ierr = DMAddField(*dm, label, *disc); -} - -PETSC_EXTERN void dmload_(DM *dm, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = DMLoad(*dm, v); -} - -PETSC_EXTERN void dmgetmattype_(DM *mm, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = DMGetMatType(*mm, &tname); - if (*ierr) return; - if (name != PETSC_NULL_CHARACTER_Fortran) { - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - } - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void dmgetvectype_(DM *mm, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = DMGetVecType(*mm, &tname); - if (*ierr) return; - if (name != PETSC_NULL_CHARACTER_Fortran) { - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - } - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void dmview_(DM *da, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = DMView(*da, v); -} - -PETSC_EXTERN void dmsetoptionsprefix_(DM *dm, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = DMSetOptionsPrefix(*dm, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void dmsettype_(DM *x, char *type_name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type_name, len, t); - *ierr = DMSetType(*x, t); - if (*ierr) return; - FREECHAR(type_name, t); -} - -PETSC_EXTERN void dmgettype_(DM *mm, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = DMGetType(*mm, &tname); - if (*ierr) return; - if (name != PETSC_NULL_CHARACTER_Fortran) { - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - } - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void dmsetmattype_(DM *dm, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = DMSetMatType(*dm, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void dmsetvectype_(DM *dm, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = DMSetVecType(*dm, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void dmcreatelabel_(DM *dm, char *name, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMCreateLabel(*dm, lname); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmhaslabel_(DM *dm, char *name, PetscBool *hasLabel, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMHasLabel(*dm, lname, hasLabel); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmgetlabelvalue_(DM *dm, char *name, PetscInt *point, PetscInt *value, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMGetLabelValue(*dm, lname, *point, value); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmsetlabelvalue_(DM *dm, char *name, PetscInt *point, PetscInt *value, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMSetLabelValue(*dm, lname, *point, *value); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmgetlabelsize_(DM *dm, char *name, PetscInt *size, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMGetLabelSize(*dm, lname, size); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmgetlabelidis_(DM *dm, char *name, IS *ids, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMGetLabelIdIS(*dm, lname, ids); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmgetlabelname_(DM *dm, PetscInt *n, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tmp; - *ierr = DMGetLabelName(*dm, *n, &tmp); - *ierr = PetscStrncpy(name, tmp, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void dmgetlabel_(DM *dm, char *name, DMLabel *label, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMGetLabel(*dm, lname, label); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmgetstratumsize_(DM *dm, char *name, PetscInt *value, PetscInt *size, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMGetStratumSize(*dm, lname, *value, size); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmgetstratumis_(DM *dm, char *name, PetscInt *value, IS *is, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMGetStratumIS(*dm, lname, *value, is); - if (*ierr) return; - if (is && !*is) *is = (IS)0; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmsetstratumis_(DM *dm, char *name, PetscInt *value, IS *is, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMSetStratumIS(*dm, lname, *value, *is); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmremovelabel_(DM *dm, char *name, DMLabel *label, int *ierr, PETSC_FORTRAN_CHARLEN_T lenN) -{ - char *lname; - - FIXCHAR(name, lenN, lname); - *ierr = DMRemoveLabel(*dm, lname, label); - if (*ierr) return; - FREECHAR(name, lname); -} - -PETSC_EXTERN void dmviewfromoptions_(DM *dm, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = DMViewFromOptions(*dm, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void dmcreateinterpolation_(DM *dmc, DM *dmf, Mat *mat, Vec *vec, int *ierr) -{ - CHKFORTRANNULLOBJECT(vec); - *ierr = DMCreateInterpolation(*dmc, *dmf, mat, vec); -} - PETSC_EXTERN void dmcreatesuperdm_(DM dms[], PetscInt *len, IS ***is, DM *superdm, int *ierr) { *ierr = DMCreateSuperDM(dms, *len, *is, superdm); } - -PETSC_EXTERN void dmcreatesubdm_(DM *dm, PetscInt *numFields, PetscInt fields[], IS *is, DM *subdm, int *ierr) -{ - CHKFORTRANNULLOBJECT(is); - *ierr = DMCreateSubDM(*dm, *numFields, fields, is, subdm); -} - -PETSC_EXTERN void dmdestroy_(DM *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = DMDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} diff --git a/src/dm/interface/ftn-custom/zdmgetf.c b/src/dm/interface/ftn-custom/zdmgetf.c deleted file mode 100644 index 6a7500d4893..00000000000 --- a/src/dm/interface/ftn-custom/zdmgetf.c +++ /dev/null @@ -1,30 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmgetnamedglobalvector_ DMGETNAMEDGLOBALVECTOR - #define dmrestorenamedglobalvector_ DMRESTORENAMEDGLOBALVECTOR -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define dmgetnamedglobalvector_ dmgetnamedglobalvector - #define dmrestorenamedglobalvector_ dmrestorenamedglobalvector -#endif - -PETSC_EXTERN void dmgetnamedglobalvector_(DM *dm, char *name, Vec *X, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(name, len, t); - *ierr = DMGetNamedGlobalVector(*dm, t, X); - if (*ierr) return; - FREECHAR(name, t); -} - -PETSC_EXTERN void dmrestorenamedglobalvector_(DM *dm, char *name, Vec *X, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(name, len, t); - *ierr = DMRestoreNamedGlobalVector(*dm, t, X); - if (*ierr) return; - FREECHAR(name, t); -} diff --git a/src/dm/label/dmlabel.c b/src/dm/label/dmlabel.c index af2a9637e9f..c3ec6fe3689 100644 --- a/src/dm/label/dmlabel.c +++ b/src/dm/label/dmlabel.c @@ -7,7 +7,7 @@ PetscFunctionList DMLabelList = NULL; PetscBool DMLabelRegisterAllCalled = PETSC_FALSE; -/*@C +/*@ DMLabelCreate - Create a `DMLabel` object, which is a multimap Collective @@ -34,7 +34,6 @@ PetscErrorCode DMLabelCreate(MPI_Comm comm, const char name[], DMLabel *label) PetscCall(DMInitializePackage()); PetscCall(PetscHeaderCreate(*label, DMLABEL_CLASSID, "DMLabel", "DMLabel", "DM", comm, DMLabelDestroy, DMLabelView)); - (*label)->numStrata = 0; (*label)->defaultValue = -1; (*label)->stratumValues = NULL; @@ -51,7 +50,7 @@ PetscErrorCode DMLabelCreate(MPI_Comm comm, const char name[], DMLabel *label) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMLabelSetUp - SetUp a `DMLabel` object Collective @@ -454,7 +453,7 @@ static PetscErrorCode DMLabelView_Concrete(DMLabel label, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMLabelView - View the label Collective @@ -628,6 +627,9 @@ PetscErrorCode DMLabelDuplicate(DMLabel label, DMLabel *labelnew) The communicator needs to be specified because currently `DMLabel` can live on `PETSC_COMM_SELF` even if the underlying `DM` is parallel. + Developer Note: + Fortran stub cannot be generated automatically because `message` must be freed with `PetscFree()` + .seealso: `DMLabel`, `DM`, `DMCompareLabels()`, `DMLabelGetNumValues()`, `DMLabelGetDefaultValue()`, `DMLabelGetNonEmptyStratumValuesIS()`, `DMLabelGetStratumIS()` @*/ PetscErrorCode DMLabelCompare(MPI_Comm comm, DMLabel l0, DMLabel l1, PetscBool *equal, char **message) @@ -1213,6 +1215,43 @@ PetscErrorCode DMLabelGetValueIS(DMLabel label, IS *values) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + DMLabelGetValueBounds - Return the smallest and largest value in the label + + Not Collective + + Input Parameter: +. label - the `DMLabel` + + Output Parameters: ++ minValue - The smallest value +- maxValue - The largest value + + Level: intermediate + +.seealso: `DMLabel`, `DM`, `DMLabelGetBounds()`, `DMLabelGetValue()`, `DMLabelSetValue()` +@*/ +PetscErrorCode DMLabelGetValueBounds(DMLabel label, PetscInt *minValue, PetscInt *maxValue) +{ + PetscInt min = PETSC_MAX_INT, max = PETSC_MIN_INT; + + PetscFunctionBegin; + PetscValidHeaderSpecific(label, DMLABEL_CLASSID, 1); + for (PetscInt v = 0; v < label->numStrata; ++v) { + min = PetscMin(min, label->stratumValues[v]); + max = PetscMax(max, label->stratumValues[v]); + } + if (minValue) { + PetscAssertPointer(minValue, 2); + *minValue = min; + } + if (maxValue) { + PetscAssertPointer(maxValue, 3); + *maxValue = max; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@ DMLabelGetNonEmptyStratumValuesIS - Get an `IS` of all values that the `DMlabel` takes @@ -2265,7 +2304,7 @@ PetscErrorCode DMLabelRegisterDestroy(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMLabelSetType - Sets the particular implementation for a label. Collective @@ -2302,7 +2341,7 @@ PetscErrorCode DMLabelSetType(DMLabel label, DMLabelType method) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMLabelGetType - Gets the type name (as a string) from the label. Not Collective diff --git a/src/dm/label/ftn-custom/zdmlabel.c b/src/dm/label/ftn-custom/zdmlabel.c index 7b01ff6258a..24507398634 100644 --- a/src/dm/label/ftn-custom/zdmlabel.c +++ b/src/dm/label/ftn-custom/zdmlabel.c @@ -3,22 +3,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define dmlabelview_ DMLABELVIEW #define petscsectionsymlabelsetstratum_ PETSCSECTIONSYMLABELSETSTRATUM #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define dmlabelview_ dmlabelview #define petscsectionsymlabelsetstratum_ petscsectionsymlabelsetstratum #endif -/* Definitions of Fortran Wrapper routines */ - -PETSC_EXTERN void dmlabelview_(DMLabel *label, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = DMLabelView(*label, v); -} - PETSC_EXTERN void petscsectionsymlabelsetstratum_(PetscSectionSym *sym, PetscInt *stratum, PetscInt *size, PetscInt *minOrient, PetscInt *maxOrient, PetscCopyMode *mode, PetscInt **perms, PetscScalar **rots, int *__ierr) { *__ierr = PetscSectionSymLabelSetStratum(*sym, *stratum, *size, *minOrient, *maxOrient, *mode, (const PetscInt **)perms, (const PetscScalar **)rots); diff --git a/src/dm/label/tutorials/ex1f90.F90 b/src/dm/label/tutorials/ex1f90.F90 index cf69c6c2423..baa47f2f1fb 100644 --- a/src/dm/label/tutorials/ex1f90.F90 +++ b/src/dm/label/tutorials/ex1f90.F90 @@ -18,7 +18,7 @@ program ex1f90 PetscCallA(DMPlexCreateFromFile(PETSC_COMM_WORLD,filename,'ex1f90_plex',interpolate,dm,ierr)) PetscCallA(DMPlexDistribute(dm,izero,PETSC_NULL_SF,dmDist,ierr)) - if (dmDist /= PETSC_NULL_DM) then + if (.not. PetscObjectIsNull(dmDist)) then PetscCallA(DMDestroy(dm,ierr)) dm = dmDist end if diff --git a/src/dm/partitioner/impls/chaco/partchaco.c b/src/dm/partitioner/impls/chaco/partchaco.c index 2e2dd47a666..3a1f318fb52 100644 --- a/src/dm/partitioner/impls/chaco/partchaco.c +++ b/src/dm/partitioner/impls/chaco/partchaco.c @@ -57,7 +57,7 @@ PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, fl extern int FREE_GRAPH; #endif -static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) +static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition) { #if defined(PETSC_HAVE_CHACO) enum { @@ -130,6 +130,7 @@ static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, Pet } #endif if (part->usevwgt) PetscCall(PetscInfo(part, "PETSCPARTITIONERCHACO ignores vertex weights\n")); + if (part->useewgt) PetscCall(PetscInfo(part, "PETSCPARTITIONERCHACO ignores edge weights\n")); err = interface(nvtxs, (int *)start, (int *)adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename, assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag, vmax, ndims, eigtol, seed); #if defined(PETSC_HAVE_UNISTD_H) { diff --git a/src/dm/partitioner/impls/gather/partgather.c b/src/dm/partitioner/impls/gather/partgather.c index 608036bac96..83e13ccc49f 100644 --- a/src/dm/partitioner/impls/gather/partgather.c +++ b/src/dm/partitioner/impls/gather/partgather.c @@ -29,7 +29,7 @@ static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscVi PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) +static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition) { PetscInt np; diff --git a/src/dm/partitioner/impls/matpart/partmatpart.c b/src/dm/partitioner/impls/matpart/partmatpart.c index 8e15716443f..c430e06afac 100644 --- a/src/dm/partitioner/impls/matpart/partmatpart.c +++ b/src/dm/partitioner/impls/matpart/partmatpart.c @@ -14,16 +14,16 @@ static PetscErrorCode PetscPartitionerMatPartitioningGetMatPartitioning_MatParti PetscFunctionReturn(PETSC_SUCCESS); } -/*@C - PetscPartitionerMatPartitioningGetMatPartitioning - Get a MatPartitioning instance wrapped by this PetscPartitioner. +/*@ + PetscPartitionerMatPartitioningGetMatPartitioning - Get a `MatPartitioning` instance wrapped by this `PetscPartitioner`. Not Collective Input Parameter: -. part - The PetscPartitioner +. part - The `PetscPartitioner` Output Parameter: -. mp - The MatPartitioning +. mp - The `MatPartitioning` Level: developer @@ -85,7 +85,7 @@ static PetscErrorCode PetscPartitionerSetFromOptions_MatPartitioning(PetscPartit PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode PetscPartitionerPartition_MatPartitioning(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *is) +static PetscErrorCode PetscPartitionerPartition_MatPartitioning(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *is) { PetscPartitioner_MatPartitioning *p = (PetscPartitioner_MatPartitioning *)part->data; Mat matadj; diff --git a/src/dm/partitioner/impls/parmetis/partparmetis.c b/src/dm/partitioner/impls/parmetis/partparmetis.c index 3a7d0ad0bed..179639fa0d3 100644 --- a/src/dm/partitioner/impls/parmetis/partparmetis.c +++ b/src/dm/partitioner/impls/parmetis/partparmetis.c @@ -75,7 +75,7 @@ static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscPartitioner p PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) +static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition) { #if defined(PETSC_HAVE_PARMETIS) PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *)part->data; @@ -147,6 +147,12 @@ static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v])); wgtflag |= 2; /* have weights on graph vertices */ } + // Weight edges + if (edgeSection) { + PetscCall(PetscMalloc1(xadj[nvtxs], &adjwgt)); + for (PetscInt e = 0; e < xadj[nvtxs]; ++e) PetscCall(PetscSectionGetDof(edgeSection, e, &adjwgt[e])); + wgtflag |= 1; /* have weights on graph edges */ + } for (p = 0; !vtxdist[p + 1] && p < size; ++p); if (vtxdist[p + 1] == vtxdist[size]) { @@ -215,6 +221,7 @@ static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition)); PetscCall(PetscFree4(vtxdist, tpwgts, ubvec, assignment)); PetscCall(PetscFree(vwgt)); + PetscCall(PetscFree(adjwgt)); PetscFunctionReturn(PETSC_SUCCESS); #else SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis."); diff --git a/src/dm/partitioner/impls/ptscotch/partptscotch.c b/src/dm/partitioner/impls/ptscotch/partptscotch.c index f1063167e53..6e64d02067f 100644 --- a/src/dm/partitioner/impls/ptscotch/partptscotch.c +++ b/src/dm/partitioner/impls/ptscotch/partptscotch.c @@ -199,7 +199,7 @@ static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscPartitioner p PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) +static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition) { #if defined(PETSC_HAVE_PTSCOTCH) MPI_Comm comm; @@ -238,6 +238,11 @@ static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscCall(PetscMalloc1(nvtxs, &vwgt)); for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v])); } + // Weight edges + if (edgeSection) { + PetscCall(PetscMalloc1(xadj[nvtxs], &adjwgt)); + for (PetscInt e = 0; e < xadj[nvtxs]; ++e) PetscCall(PetscSectionGetDof(edgeSection, e, &adjwgt[e])); + } /* Calculate partition weights */ if (targetSection) { @@ -278,6 +283,7 @@ static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, } } PetscCall(PetscFree(vwgt)); + PetscCall(PetscFree(adjwgt)); PetscCall(PetscFree(tpwgts)); /* Convert to PetscSection+IS */ diff --git a/src/dm/partitioner/impls/shell/partshell.c b/src/dm/partitioner/impls/shell/partshell.c index d017d4c4885..55ccf0fbe3c 100644 --- a/src/dm/partitioner/impls/shell/partshell.c +++ b/src/dm/partitioner/impls/shell/partshell.c @@ -51,18 +51,36 @@ static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscVie static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscPartitioner part, PetscOptionItems *PetscOptionsObject) { - PetscBool random = PETSC_FALSE, set; + PetscInt sizes[16], points[1024]; + PetscInt Npart = 16, Npoints = 1024; + PetscBool random = PETSC_FALSE, set, flgSizes, flgPoints; + PetscMPIInt rank; PetscFunctionBegin; + PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank)); PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner Shell Options"); PetscCall(PetscPartitionerShellGetRandom(part, &random)); PetscCall(PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &random, &set)); if (set) PetscCall(PetscPartitionerShellSetRandom(part, random)); + PetscCall(PetscOptionsIntArray("-petscpartitioner_shell_sizes", "The size of each partition on rank 0", "PetscPartitionerShellSetPartition", sizes, &Npart, &flgSizes)); + PetscCall(PetscOptionsIntArray("-petscpartitioner_shell_points", "The points in each partition on rank 0", "PetscPartitionerShellSetPartition", points, &Npoints, &flgPoints)); + PetscCheck(!(flgSizes ^ flgPoints), PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_WRONG, "Must specify both the partition sizes and points"); + if (flgSizes) { + PetscInt Np = 0; + + for (PetscInt i = 0; i < Npart; ++i) Np += sizes[i]; + PetscCheck(Np == Npoints, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_WRONG, "Number of input points %" PetscInt_FMT " != %" PetscInt_FMT " sum of partition sizes", Npoints, Np); + if (!rank) PetscCall(PetscPartitionerShellSetPartition(part, Npart, sizes, points)); + else { + PetscCall(PetscArrayzero(sizes, Npart)); + PetscCall(PetscPartitionerShellSetPartition(part, Npart, sizes, points)); + } + } PetscOptionsHeadEnd(); PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) +static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition) { PetscPartitioner_Shell *p = (PetscPartitioner_Shell *)part->data; PetscInt np; @@ -143,7 +161,7 @@ PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscPartitionerShellSetPartition - Set an artificial partition for a mesh Collective diff --git a/src/dm/partitioner/impls/simple/partsimple.c b/src/dm/partitioner/impls/simple/partsimple.c index 4b23e314a82..a8d99c15b42 100644 --- a/src/dm/partitioner/impls/simple/partsimple.c +++ b/src/dm/partitioner/impls/simple/partsimple.c @@ -132,7 +132,7 @@ static PetscErrorCode PetscPartitionerPartition_Simple_Grid(PetscPartitioner par PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) +static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition) { PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data; MPI_Comm comm; diff --git a/src/dm/partitioner/interface/ftn-custom/makefile b/src/dm/partitioner/interface/ftn-custom/makefile deleted file mode 100644 index 0eea8ee1ec1..00000000000 --- a/src/dm/partitioner/interface/ftn-custom/makefile +++ /dev/null @@ -1,7 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - -MANSEC = DM - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/dm/partitioner/interface/ftn-custom/zpartitioner.c b/src/dm/partitioner/interface/ftn-custom/zpartitioner.c deleted file mode 100644 index 269c73f3de0..00000000000 --- a/src/dm/partitioner/interface/ftn-custom/zpartitioner.c +++ /dev/null @@ -1,45 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscpartitionersettype_ PETSCPARTITIONERSETTYPE - #define petscpartitionergettype_ PETSCPARTITIONERGETTYPE - #define petscpartitionerviewfromoptions_ PETSCPARTITIONERVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define petscpartitionersettype_ petscpartitionersettype - #define petscpartitionergettype_ petscpartitionergettype - #define petscpartitionerviewfromoptions_ petscpartitionerviewfromoptions -#endif - -PETSC_EXTERN void petscpartitionergettype_(PetscPartitioner *mm, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = PetscPartitionerGetType(*mm, &tname); - if (*ierr) return; - if (name != PETSC_NULL_CHARACTER_Fortran) { - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - } - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void petscpartitionerviewfromoptions_(PetscPartitioner *part, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscPartitionerViewFromOptions(*part, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void petscpartitionersettype_(PetscPartitioner *x, char *type_name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type_name, len, t); - *ierr = PetscPartitionerSetType(*x, t); - FREECHAR(type_name, t); -} diff --git a/src/dm/partitioner/interface/partitioner.c b/src/dm/partitioner/interface/partitioner.c index d6b18416c24..f60fd53ece6 100644 --- a/src/dm/partitioner/interface/partitioner.c +++ b/src/dm/partitioner/interface/partitioner.c @@ -1,6 +1,6 @@ #include /*I "petscpartitioner.h" I*/ -/*@C +/*@ PetscPartitionerSetType - Builds a particular `PetscPartitioner` Collective @@ -47,7 +47,7 @@ PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerTy PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object. Not Collective @@ -71,7 +71,7 @@ PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerTy PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscPartitionerViewFromOptions - View a `PetscPartitioner` object based on options in the options database Collective @@ -125,6 +125,7 @@ PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v) PetscCall(PetscViewerASCIIPrintf(v, " edge cut: %" PetscInt_FMT "\n", part->edgeCut)); PetscCall(PetscViewerASCIIPrintf(v, " balance: %.2g\n", (double)part->balance)); PetscCall(PetscViewerASCIIPrintf(v, " use vertex weights: %d\n", part->usevwgt)); + PetscCall(PetscViewerASCIIPrintf(v, " use edge weights: %d\n", part->useewgt)); } PetscTryTypeMethod(part, view, v); PetscFunctionReturn(PETSC_SUCCESS); @@ -182,6 +183,7 @@ PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part) PetscCall(PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, currentType, name, sizeof(name), &flg)); if (flg) PetscCall(PetscPartitionerSetType(part, name)); PetscCall(PetscOptionsBool("-petscpartitioner_use_vertex_weights", "Use vertex weights", "", part->usevwgt, &part->usevwgt, NULL)); + PetscCall(PetscOptionsBool("-petscpartitioner_use_edge_weights", "Use edge weights", "", part->useewgt, &part->useewgt, NULL)); PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject); PetscCall(PetscOptionsRestoreViewer(&part->viewer)); PetscCall(PetscOptionsRestoreViewer(&part->viewerGraph)); @@ -278,6 +280,7 @@ PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part) . start - row pointers for the local part of the graph (CSR style) . adjacency - adjacency list (CSR style) . vertexSection - PetscSection describing the absolute weight of each local vertex (can be `NULL`) +. edgeSection - PetscSection describing the absolute weight of each local edge (can be `NULL`) - targetSection - PetscSection describing the absolute weight of each partition (can be `NULL`) Output Parameters: @@ -296,7 +299,7 @@ PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part) .seealso: `PetscPartitionerCreate()`, `PetscPartitionerSetType()`, `PetscSectionCreate()`, `PetscSectionSetChart()`, `PetscSectionSetDof()` @*/ -PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertexSection, PetscSection targetSection, PetscSection partSection, IS *partition) +PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertexSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition) { PetscFunctionBegin; PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); @@ -315,22 +318,29 @@ PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscCall(PetscSectionGetChart(vertexSection, &s, &e)); PetscCheck(s <= 0 && e >= numVertices, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid vertexSection chart [%" PetscInt_FMT ",%" PetscInt_FMT ")", s, e); } + if (edgeSection) { + PetscInt s, e; + + PetscValidHeaderSpecific(edgeSection, PETSC_SECTION_CLASSID, 7); + PetscCall(PetscSectionGetChart(edgeSection, &s, &e)); + PetscCheck(s <= 0 && e >= start[numVertices], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid edgeSection chart [%" PetscInt_FMT ",%" PetscInt_FMT ")", s, e); + } if (targetSection) { PetscInt s, e; - PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 7); + PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 8); PetscCall(PetscSectionGetChart(targetSection, &s, &e)); PetscCheck(s <= 0 && e >= nparts, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid targetSection chart [%" PetscInt_FMT ",%" PetscInt_FMT ")", s, e); } - PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 8); - PetscAssertPointer(partition, 9); + PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 9); + PetscAssertPointer(partition, 10); PetscCall(PetscSectionReset(partSection)); PetscCall(PetscSectionSetChart(partSection, 0, nparts)); if (nparts == 1) { /* quick */ PetscCall(PetscSectionSetDof(partSection, 0, numVertices)); PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), numVertices, 0, 1, partition)); - } else PetscUseTypeMethod(part, partition, nparts, numVertices, start, adjacency, vertexSection, targetSection, partSection, partition); + } else PetscUseTypeMethod(part, partition, nparts, numVertices, start, adjacency, vertexSection, edgeSection, targetSection, partSection, partition); PetscCall(PetscSectionSetUp(partSection)); if (part->viewerGraph) { PetscViewer viewer = part->viewerGraph; diff --git a/src/dm/partitioner/interface/partitionerreg.c b/src/dm/partitioner/interface/partitionerreg.c index 609ba6c6d2d..0a15884ab32 100644 --- a/src/dm/partitioner/interface/partitionerreg.c +++ b/src/dm/partitioner/interface/partitionerreg.c @@ -8,7 +8,7 @@ PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE; /*@C PetscPartitionerRegister - Adds a new PetscPartitioner implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine diff --git a/src/dm/partitioner/tests/ex33.c b/src/dm/partitioner/tests/ex33.c index 0f821587627..94666997aca 100644 --- a/src/dm/partitioner/tests/ex33.c +++ b/src/dm/partitioner/tests/ex33.c @@ -70,7 +70,7 @@ int main(int argc, char **argv) PetscCall(PetscPartitionerReset(p)); /* test partitioning an empty graph */ - PetscCall(PetscPartitionerPartition(p, nparts, 0, NULL, NULL, vertexSection, targetSection, partSection, &partition)); + PetscCall(PetscPartitionerPartition(p, nparts, 0, NULL, NULL, vertexSection, NULL, targetSection, partSection, &partition)); PetscCall(PetscObjectSetName((PetscObject)partSection, "NULL SECTION")); PetscCall(PetscSectionView(partSection, NULL)); PetscCall(ISOnComm(partition, PETSC_COMM_WORLD, PETSC_USE_POINTER, &is)); @@ -84,9 +84,9 @@ int main(int argc, char **argv) /* test partitioning a graph on one process only (not main) */ if (rank == size - 1) { - PetscCall(PetscPartitionerPartition(p, nparts, nv, vv, vadj, vertexSection, targetSection, partSection, &partition)); + PetscCall(PetscPartitionerPartition(p, nparts, nv, vv, vadj, vertexSection, NULL, targetSection, partSection, &partition)); } else { - PetscCall(PetscPartitionerPartition(p, nparts, 0, NULL, NULL, vertexSection, targetSection, partSection, &partition)); + PetscCall(PetscPartitionerPartition(p, nparts, 0, NULL, NULL, vertexSection, NULL, targetSection, partSection, &partition)); } PetscCall(PetscObjectSetName((PetscObject)partSection, "SEQ SECTION")); PetscCall(PetscSectionView(partSection, NULL)); @@ -101,7 +101,7 @@ int main(int argc, char **argv) /* test partitioning a graph on a subset of the processes only */ if (rank % 2) { - PetscCall(PetscPartitionerPartition(p, nparts, 0, NULL, NULL, NULL, targetSection, partSection, &partition)); + PetscCall(PetscPartitionerPartition(p, nparts, 0, NULL, NULL, NULL, NULL, targetSection, partSection, &partition)); } else { PetscInt i, totv = nv * ((size + 1) / 2), *pvadj; @@ -110,7 +110,7 @@ int main(int argc, char **argv) pvadj[2 * i] = (nv * (rank / 2) + totv + i - 1) % totv; pvadj[2 * i + 1] = (nv * (rank / 2) + totv + i + 1) % totv; } - PetscCall(PetscPartitionerPartition(p, nparts, nv, vv, pvadj, NULL, targetSection, partSection, &partition)); + PetscCall(PetscPartitionerPartition(p, nparts, nv, vv, pvadj, NULL, NULL, targetSection, partSection, &partition)); PetscCall(PetscFree(pvadj)); } PetscCall(PetscObjectSetName((PetscObject)partSection, "PARVOID SECTION")); diff --git a/src/dm/partitioner/tests/output/ex33_chaco_nsize-1_nparts-1.out b/src/dm/partitioner/tests/output/ex33_chaco_nsize-1_nparts-1.out index 679270c0f2d..7e65cc35606 100644 --- a/src/dm/partitioner/tests/output/ex33_chaco_nsize-1_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_chaco_nsize-1_nparts-1.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -21,6 +22,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_chaco_nsize-2_nparts-1.out b/src/dm/partitioner/tests/output/ex33_chaco_nsize-2_nparts-1.out index 74fadd34e7e..e4272f6d997 100644 --- a/src/dm/partitioner/tests/output/ex33_chaco_nsize-2_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_chaco_nsize-2_nparts-1.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -26,6 +27,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_chaco_nsize-3_nparts-1.out b/src/dm/partitioner/tests/output/ex33_chaco_nsize-3_nparts-1.out index 6a7036d4b5e..3a10003ebe8 100644 --- a/src/dm/partitioner/tests/output/ex33_chaco_nsize-3_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_chaco_nsize-3_nparts-1.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -31,6 +32,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-1.out b/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-1.out index 989bcf583ad..8717c84c6b9 100644 --- a/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-1.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -21,6 +22,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: @@ -42,6 +44,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-2.out b/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-2.out index e740ffdcb0d..f984bb8477e 100644 --- a/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-2.out +++ b/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-2.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -22,6 +23,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: @@ -44,6 +46,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-3.out b/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-3.out index 54c896fe3a2..517235cd2c4 100644 --- a/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-3.out +++ b/src/dm/partitioner/tests/output/ex33_gather_nsize-1_nparts-3.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -23,6 +24,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: @@ -46,6 +48,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-1.out b/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-1.out index f40453eee1e..4a9e7fa4856 100644 --- a/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-1.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -26,6 +27,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: @@ -51,6 +53,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-2.out b/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-2.out index cf9c87d71bb..6bfe1d4bb8d 100644 --- a/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-2.out +++ b/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-2.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -28,6 +29,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: @@ -55,6 +57,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-3.out b/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-3.out index 36d8e8282fd..0c8979e8509 100644 --- a/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-3.out +++ b/src/dm/partitioner/tests/output/ex33_gather_nsize-2_nparts-3.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -30,6 +31,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: @@ -59,6 +61,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-1.out b/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-1.out index dc2c35a3f17..517b618f6f9 100644 --- a/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-1.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -31,6 +32,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: @@ -64,6 +66,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-2.out b/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-2.out index 91bc3b3fb91..e48dc528d00 100644 --- a/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-2.out +++ b/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-2.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -34,6 +35,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: @@ -70,6 +72,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-3.out b/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-3.out index a8409feb75e..93c269d8b14 100644 --- a/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-3.out +++ b/src/dm/partitioner/tests/output/ex33_gather_nsize-3_nparts-3.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -37,6 +38,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: @@ -76,6 +78,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-1.out b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-1.out index d220e5ca159..f38d5926912 100644 --- a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-1.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -25,6 +26,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -50,6 +52,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-2.out b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-2.out index f0e0f3c59ce..16fe864e4f1 100644 --- a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-2.out +++ b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-2.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -26,6 +27,7 @@ Graph Partitioner: 1 MPI Process edge cut: 2 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -52,6 +54,7 @@ Graph Partitioner: 1 MPI Process edge cut: 2 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-3.out b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-3.out index e8e5e650465..1830a494cca 100644 --- a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-3.out +++ b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-1_nparts-3.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -27,6 +28,7 @@ Graph Partitioner: 1 MPI Process edge cut: 2 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -54,6 +56,7 @@ Graph Partitioner: 1 MPI Process edge cut: 2 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-1.out b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-1.out index 8af1cfde374..23355ed474f 100644 --- a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-1.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -30,6 +31,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -59,6 +61,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-2.out b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-2.out index d752ace2cff..c74e7939b72 100644 --- a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-2.out +++ b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-2.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -32,6 +33,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -63,6 +65,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 2 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-3.out b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-3.out index 48b219fb6de..f5bcb1af53f 100644 --- a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-3.out +++ b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-2_nparts-3.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -34,6 +35,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -67,6 +69,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 2 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-1.out b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-1.out index 809cdd3e66c..d58f78df25e 100644 --- a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-1.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -35,6 +36,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -72,6 +74,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-2.out b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-2.out index 38f10635aa5..19181703bc0 100644 --- a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-2.out +++ b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-2.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -38,6 +39,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -78,6 +80,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 2 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-3.out b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-3.out index 3f55fda6134..812c882f24e 100644 --- a/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-3.out +++ b/src/dm/partitioner/tests/output/ex33_parmetis_nsize-3_nparts-3.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -41,6 +42,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 @@ -84,6 +86,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 3 balance: 0 use vertex weights: 1 + use edge weights: 0 ParMetis type: kway load imbalance ratio 1.05 debug flag 0 diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-1_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-1_pwgts-false.out index 84db90ea095..88f70895598 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-1_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-1_pwgts-false.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 1 MPI process @@ -23,6 +24,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 1 MPI process @@ -46,6 +48,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 1 MPI process diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-1_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-1_pwgts-true.out index 84db90ea095..88f70895598 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-1_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-1_pwgts-true.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 1 MPI process @@ -23,6 +24,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 1 MPI process @@ -46,6 +48,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 1 MPI process diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-2_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-2_pwgts-false.out index 5202ede7180..e1bc0e2d55b 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-2_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-2_pwgts-false.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 1 MPI process @@ -24,6 +25,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 1 MPI process @@ -48,6 +50,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 1 MPI process diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-2_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-2_pwgts-true.out index 5202ede7180..e1bc0e2d55b 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-2_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-2_pwgts-true.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 1 MPI process @@ -24,6 +25,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 1 MPI process @@ -48,6 +50,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 1 MPI process diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-3_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-3_pwgts-false.out index 186ac730d96..92e8e964074 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-3_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-3_pwgts-false.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 1 MPI process @@ -25,6 +26,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 1 MPI process @@ -50,6 +52,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 1 MPI process diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-3_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-3_pwgts-true.out index 932d146491f..c9a9c5dec41 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-3_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-1_nparts-3_pwgts-true.out @@ -4,6 +4,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 1 MPI process @@ -25,6 +26,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 1 MPI process @@ -50,6 +52,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 1 MPI process diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-1_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-1_pwgts-false.out index 59e37634d18..5d7036660d4 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-1_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-1_pwgts-false.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 2 MPI processes @@ -28,6 +29,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 2 MPI processes @@ -55,6 +57,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 2 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-1_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-1_pwgts-true.out index 59e37634d18..5d7036660d4 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-1_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-1_pwgts-true.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 2 MPI processes @@ -28,6 +29,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 2 MPI processes @@ -55,6 +57,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 2 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-2_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-2_pwgts-false.out index 7fd132ade1e..60c7ea40162 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-2_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-2_pwgts-false.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 2 MPI processes @@ -30,6 +31,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 2 MPI processes @@ -59,6 +61,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 2 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-2_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-2_pwgts-true.out index 7fd132ade1e..60c7ea40162 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-2_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-2_pwgts-true.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 2 MPI processes @@ -30,6 +31,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 2 MPI processes @@ -59,6 +61,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 2 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-3_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-3_pwgts-false.out index a515ec9fc20..4c66c8294c0 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-3_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-3_pwgts-false.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 2 MPI processes @@ -32,6 +33,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 2 MPI processes @@ -63,6 +65,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 2 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-3_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-3_pwgts-true.out index 49277379c14..9e26db1f73e 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-3_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-2_nparts-3_pwgts-true.out @@ -5,6 +5,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 2 MPI processes @@ -32,6 +33,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 2 MPI processes @@ -63,6 +65,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 2 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-1_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-1_pwgts-false.out index 3b42f64aaec..9e31c1406c9 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-1_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-1_pwgts-false.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 3 MPI processes @@ -33,6 +34,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 3 MPI processes @@ -68,6 +70,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 3 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-1_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-1_pwgts-true.out index 3b42f64aaec..9e31c1406c9 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-1_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-1_pwgts-true.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 3 MPI processes @@ -33,6 +34,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 3 MPI processes @@ -68,6 +70,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 3 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-2_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-2_pwgts-false.out index 01b0db54631..f607d4a1de2 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-2_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-2_pwgts-false.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 3 MPI processes @@ -36,6 +37,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 3 MPI processes @@ -74,6 +76,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 3 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-2_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-2_pwgts-true.out index 01b0db54631..f607d4a1de2 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-2_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-2_pwgts-true.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 3 MPI processes @@ -36,6 +37,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 3 MPI processes @@ -74,6 +76,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 3 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-3_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-3_pwgts-false.out index f1c4969e33d..5ab2c260388 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-3_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-3_pwgts-false.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 3 MPI processes @@ -39,6 +40,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 3 MPI processes @@ -80,6 +82,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 3 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-3_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-3_pwgts-true.out index 1fa808b50db..20bd8a343ce 100644 --- a/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-3_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_ptscotch_nsize-3_nparts-3_pwgts-true.out @@ -6,6 +6,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: NULL SECTION 3 MPI processes @@ -39,6 +40,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: SEQ SECTION 3 MPI processes @@ -80,6 +82,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using partitioning strategy DEFAULT using load imbalance ratio 0.01 PetscSection Object: PARVOID SECTION 3 MPI processes diff --git a/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-1.out b/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-1.out index b52853ee671..61414cd22c1 100644 --- a/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-1.out @@ -3,6 +3,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: NULL SECTION 1 MPI process type not yet set @@ -16,6 +17,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: SEQ SECTION 1 MPI process type not yet set @@ -33,6 +35,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: PARVOID SECTION 1 MPI process type not yet set diff --git a/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-2.out b/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-2.out index 164afa1edcb..eafef4e2896 100644 --- a/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-2.out +++ b/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-2.out @@ -3,6 +3,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: NULL SECTION 1 MPI process type not yet set @@ -17,6 +18,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: SEQ SECTION 1 MPI process type not yet set @@ -35,6 +37,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: PARVOID SECTION 1 MPI process type not yet set diff --git a/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-3.out b/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-3.out index ab540a8d956..84b9fd104d9 100644 --- a/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-3.out +++ b/src/dm/partitioner/tests/output/ex33_shell_nsize-1_nparts-3.out @@ -3,6 +3,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: NULL SECTION 1 MPI process type not yet set @@ -18,6 +19,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: SEQ SECTION 1 MPI process type not yet set @@ -37,6 +39,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: PARVOID SECTION 1 MPI process type not yet set diff --git a/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-1.out b/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-1.out index a05be1b0498..697f3f13468 100644 --- a/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-1.out @@ -3,6 +3,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: NULL SECTION 2 MPI processes type not yet set @@ -19,6 +20,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: SEQ SECTION 2 MPI processes type not yet set @@ -39,6 +41,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set diff --git a/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-2.out b/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-2.out index 7bb4b4e6719..20a9680b71c 100644 --- a/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-2.out +++ b/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-2.out @@ -3,6 +3,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: NULL SECTION 2 MPI processes type not yet set @@ -21,6 +22,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: SEQ SECTION 2 MPI processes type not yet set @@ -43,6 +45,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set diff --git a/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-3.out b/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-3.out index ac70428cdc3..6b20d6f554d 100644 --- a/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-3.out +++ b/src/dm/partitioner/tests/output/ex33_shell_nsize-2_nparts-3.out @@ -3,6 +3,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: NULL SECTION 2 MPI processes type not yet set @@ -23,6 +24,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: SEQ SECTION 2 MPI processes type not yet set @@ -47,6 +49,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set diff --git a/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-1.out b/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-1.out index d4509c0ccd0..25e2bf64ffa 100644 --- a/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-1.out +++ b/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-1.out @@ -3,6 +3,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: NULL SECTION 3 MPI processes type not yet set @@ -22,6 +23,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: SEQ SECTION 3 MPI processes type not yet set @@ -45,6 +47,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set diff --git a/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-2.out b/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-2.out index 26b26a6cc49..5743c87b1b0 100644 --- a/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-2.out +++ b/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-2.out @@ -3,6 +3,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: NULL SECTION 3 MPI processes type not yet set @@ -25,6 +26,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: SEQ SECTION 3 MPI processes type not yet set @@ -51,6 +53,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set diff --git a/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-3.out b/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-3.out index 98fcf96b6df..286b24112bf 100644 --- a/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-3.out +++ b/src/dm/partitioner/tests/output/ex33_shell_nsize-3_nparts-3.out @@ -3,6 +3,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: NULL SECTION 3 MPI processes type not yet set @@ -28,6 +29,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: SEQ SECTION 3 MPI processes type not yet set @@ -57,6 +59,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 using random partition PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-1_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-1_pwgts-false.out index 8a3ebbdc194..44b0c518137 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-1_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-1_pwgts-false.out @@ -3,6 +3,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -15,6 +16,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: @@ -31,6 +33,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-1_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-1_pwgts-true.out index 8a3ebbdc194..44b0c518137 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-1_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-1_pwgts-true.out @@ -3,6 +3,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -15,6 +16,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: @@ -31,6 +33,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-2_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-2_pwgts-false.out index ce3a3bc9a13..840cf84fdee 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-2_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-2_pwgts-false.out @@ -3,6 +3,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -16,6 +17,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: @@ -33,6 +35,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-2_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-2_pwgts-true.out index ce3a3bc9a13..840cf84fdee 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-2_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-2_pwgts-true.out @@ -3,6 +3,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -16,6 +17,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: @@ -33,6 +35,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-3_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-3_pwgts-false.out index 1d1ec2f6f3f..98599beaaa0 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-3_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-3_pwgts-false.out @@ -3,6 +3,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -17,6 +18,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: @@ -35,6 +37,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-3_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-3_pwgts-true.out index 1d1ec2f6f3f..98599beaaa0 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-3_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-1_nparts-3_pwgts-true.out @@ -3,6 +3,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 1 MPI process type not yet set Process 0: @@ -17,6 +18,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 1 MPI process type not yet set Process 0: @@ -35,6 +37,7 @@ Graph Partitioner: 1 MPI Process edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 1 MPI process type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-1_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-1_pwgts-false.out index c0c7bf1c788..8f6b37b19ee 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-1_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-1_pwgts-false.out @@ -3,6 +3,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -18,6 +19,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: @@ -37,6 +39,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-1_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-1_pwgts-true.out index c0c7bf1c788..8f6b37b19ee 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-1_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-1_pwgts-true.out @@ -3,6 +3,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -18,6 +19,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: @@ -37,6 +39,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-2_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-2_pwgts-false.out index e3288e08a10..a340f82074f 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-2_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-2_pwgts-false.out @@ -3,6 +3,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -20,6 +21,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: @@ -41,6 +43,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-2_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-2_pwgts-true.out index e3288e08a10..a340f82074f 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-2_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-2_pwgts-true.out @@ -3,6 +3,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -20,6 +21,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: @@ -41,6 +43,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-3_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-3_pwgts-false.out index 7ff8ca48ffe..5ac4e2cd6cf 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-3_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-3_pwgts-false.out @@ -3,6 +3,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -22,6 +23,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: @@ -45,6 +47,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-3_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-3_pwgts-true.out index 7ff8ca48ffe..5ac4e2cd6cf 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-3_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-2_nparts-3_pwgts-true.out @@ -3,6 +3,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 2 MPI processes type not yet set Process 0: @@ -22,6 +23,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 2 MPI processes type not yet set Process 0: @@ -45,6 +47,7 @@ Graph Partitioner: 2 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 2 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-1_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-1_pwgts-false.out index 99c4152f690..5e4bb0c0e1a 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-1_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-1_pwgts-false.out @@ -3,6 +3,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -21,6 +22,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: @@ -43,6 +45,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-1_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-1_pwgts-true.out index 99c4152f690..5e4bb0c0e1a 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-1_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-1_pwgts-true.out @@ -3,6 +3,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -21,6 +22,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: @@ -43,6 +45,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-2_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-2_pwgts-false.out index 9a1168721a9..3ee52e97d64 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-2_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-2_pwgts-false.out @@ -3,6 +3,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -24,6 +25,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: @@ -49,6 +51,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-2_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-2_pwgts-true.out index 9a1168721a9..3ee52e97d64 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-2_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-2_pwgts-true.out @@ -3,6 +3,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -24,6 +25,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: @@ -49,6 +51,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-3_pwgts-false.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-3_pwgts-false.out index fe48c3e17a4..38a07f072f4 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-3_pwgts-false.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-3_pwgts-false.out @@ -3,6 +3,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -27,6 +28,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: @@ -55,6 +57,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-3_pwgts-true.out b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-3_pwgts-true.out index 4864dfd3f9b..16e6a9c51b4 100644 --- a/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-3_pwgts-true.out +++ b/src/dm/partitioner/tests/output/ex33_simple_nsize-3_nparts-3_pwgts-true.out @@ -3,6 +3,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: NULL SECTION 3 MPI processes type not yet set Process 0: @@ -27,6 +28,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: SEQ SECTION 3 MPI processes type not yet set Process 0: @@ -55,6 +57,7 @@ Graph Partitioner: 3 MPI Processes edge cut: 0 balance: 0 use vertex weights: 1 + use edge weights: 0 PetscSection Object: PARVOID SECTION 3 MPI processes type not yet set Process 0: diff --git a/src/dm/tests/ex1f.F90 b/src/dm/tests/ex1f.F90 index 595f3004711..42846c04f60 100644 --- a/src/dm/tests/ex1f.F90 +++ b/src/dm/tests/ex1f.F90 @@ -21,7 +21,7 @@ program main PetscCallA(mpi_comm_rank(PETSC_COMM_WORLD, myid, ierr)) PetscCallA(mpi_comm_size(PETSC_COMM_WORLD, commsize, ierr)) - PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,Nx, Ny, PETSC_DECIDE, PETSC_DECIDE, Ndof, stencil_size,PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, da, ierr)) + PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,Nx, Ny, PETSC_DECIDE, PETSC_DECIDE, Ndof, stencil_size,PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, da, ierr)) PetscCallA(DMSetup(da, ierr)) PetscCallA(DMSetFromOptions(da, ierr)) diff --git a/src/dm/tutorials/ex11f90.F90 b/src/dm/tutorials/ex11f90.F90 index 7c91baccdf6..947ab16aa6e 100644 --- a/src/dm/tutorials/ex11f90.F90 +++ b/src/dm/tutorials/ex11f90.F90 @@ -29,7 +29,7 @@ program main dof = 1 sw = 1 PetscCallA(PetscInitialize(ierr)) - PetscCallA(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,dof,sw,PETSC_NULL_INTEGER,ada,ierr)) + PetscCallA(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,dof,sw,PETSC_NULL_INTEGER_ARRAY,ada,ierr)) PetscCallA(DMSetUp(ada,ierr)) PetscCallA(DMGetGlobalVector(ada,g,ierr)) PetscCallA(DMDAGetCorners(ada,xs,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,xl,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) @@ -44,7 +44,7 @@ program main PetscCallA(DMRestoreGlobalVector(ada,g,ierr)) PetscCallA(DMDestroy(ada,ierr)) - PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,m,n,PETSC_DECIDE,PETSC_DECIDE,dof,s,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ada,ierr)) + PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,m,n,PETSC_DECIDE,PETSC_DECIDE,dof,s,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,ada,ierr)) PetscCallA(DMSetUp(ada,ierr)) PetscCallA(DMGetGlobalVector(ada,g,ierr)) PetscCallA(DMDAGetCorners(ada,xs,ys,PETSC_NULL_INTEGER,xl,yl,PETSC_NULL_INTEGER,ierr)) @@ -67,7 +67,7 @@ program main PetscCallA(DMDARestoreElements(ada,nen,nel,elements,ierr)) PetscCallA(DMDestroy(ada,ierr)) - PetscCallA(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX, m,n,p,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,s,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ada,ierr)) + PetscCallA(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX, m,n,p,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,s,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,ada,ierr)) PetscCallA(DMSetUp(ada,ierr)) PetscCallA(DMGetGlobalVector(ada,g,ierr)) PetscCallA(DMDAGetCorners(ada,xs,ys,zs,xl,yl,zl,ierr)) @@ -90,7 +90,7 @@ program main ! Same tests but now with DOF > 1, so dimensions of array are one higher ! dof = 2 - PetscCallA(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,dof,sw,PETSC_NULL_INTEGER,ada,ierr)) + PetscCallA(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,dof,sw,PETSC_NULL_INTEGER_ARRAY,ada,ierr)) PetscCallA(DMSetUp(ada,ierr)) PetscCallA(DMGetGlobalVector(ada,g,ierr)) PetscCallA(DMDAGetCorners(ada,xs,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,xl,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) @@ -107,7 +107,7 @@ program main PetscCallA(DMDestroy(ada,ierr)) dof = 2 - PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,m,n,PETSC_DECIDE,PETSC_DECIDE,dof,s,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ada,ierr)) + PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,m,n,PETSC_DECIDE,PETSC_DECIDE,dof,s,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,ada,ierr)) PetscCallA(DMSetUp(ada,ierr)) PetscCallA(DMGetGlobalVector(ada,g,ierr)) PetscCallA(DMDAGetCorners(ada,xs,ys,PETSC_NULL_INTEGER,xl,yl,PETSC_NULL_INTEGER,ierr)) @@ -126,7 +126,7 @@ program main PetscCallA(DMDestroy(ada,ierr)) dof = 3 - PetscCallA(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,m,n,p,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,s,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ada,ierr)) + PetscCallA(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,m,n,p,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,s,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,ada,ierr)) PetscCallA(DMSetUp(ada,ierr)) PetscCallA(DMGetGlobalVector(ada,g,ierr)) PetscCallA(DMDAGetCorners(ada,xs,ys,zs,xl,yl,zl,ierr)) diff --git a/src/dm/tutorials/ex13f90.F90 b/src/dm/tutorials/ex13f90.F90 index 7c025572778..95fee71fb30 100644 --- a/src/dm/tutorials/ex13f90.F90 +++ b/src/dm/tutorials/ex13f90.F90 @@ -65,7 +65,7 @@ program main ! Get the BCs and create the DMDA call get_boundary_cond(b_x,b_y,b_z) - PetscCallA(DMDACreate3d(comm,b_x,b_y,b_z,DMDA_STENCIL_STAR,igmax,jgmax,kgmax,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,stw,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,SolScal,ierr)) + PetscCallA(DMDACreate3d(comm,b_x,b_y,b_z,DMDA_STENCIL_STAR,igmax,jgmax,kgmax,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,stw,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,SolScal,ierr)) PetscCallA(DMSetFromOptions(SolScal,ierr)) PetscCallA(DMSetUp(SolScal,ierr)) diff --git a/src/ksp/f90-mod/petscksp.h b/src/ksp/f90-mod/petscksp.h index 2c2c39bf14a..74a33e8b6a5 100644 --- a/src/ksp/f90-mod/petscksp.h +++ b/src/ksp/f90-mod/petscksp.h @@ -3,17 +3,19 @@ ! #include "petsc/finclude/petscksp.h" - type tKSP - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tKSP end type tKSP - KSP, parameter :: PETSC_NULL_KSP = tKSP(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_KSP +#endif - type tKSPGuess - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tKSPGuess end type tKSPGuess - - KSPGuess, parameter :: PETSC_NULL_KSPGuess = tKSPGuess(0) + KSPGuess, parameter :: PETSC_NULL_KSP_GUESS = tKSPGuess(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_KSP_GUESS +#endif PetscEnum, parameter :: KSP_CG_SYMMETRIC=0 PetscEnum, parameter :: KSP_CG_HERMITIAN=1 @@ -80,6 +82,3 @@ PetscEnum, parameter :: KSP_GMRES_CGS_REFINE_IFNEEDED = 1 PetscEnum, parameter :: KSP_GMRES_CGS_REFINE_ALWAYS = 2 -#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_KSP -#endif diff --git a/src/ksp/f90-mod/petscksp.h90 b/src/ksp/f90-mod/petscksp.h90 index 22baab0ad71..5bfae8e63f6 100644 --- a/src/ksp/f90-mod/petscksp.h90 +++ b/src/ksp/f90-mod/petscksp.h90 @@ -1,23 +1,5 @@ - Interface - subroutine KSPSetType(a,b,z) - import tKSP - KSP a - character(*) b - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine KSPView(a,b,z) - import tKSP,tPetscViewer - KSP a - PetscViewer b - PetscErrorCode z - end subroutine - end Interface - Interface PCBJacobiGetSubKSP subroutine PCBJacobiGetSubKSP1(a,b,c,d,z) import tKSP,tPC @@ -170,11 +152,3 @@ PetscErrorCode z end subroutine end Interface - - Interface - subroutine KSPDestroy(a,z) - import tKSP - KSP a - PetscErrorCode z - end subroutine - end Interface diff --git a/src/ksp/f90-mod/petscpc.h b/src/ksp/f90-mod/petscpc.h index 5f3634d8dae..981d35a82d8 100644 --- a/src/ksp/f90-mod/petscpc.h +++ b/src/ksp/f90-mod/petscpc.h @@ -3,12 +3,12 @@ ! #include "petsc/finclude/petscpc.h" - type tPC - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tPC end type tPC - PC, parameter :: PETSC_NULL_PC = tPC(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_PC +#endif ! ! PCSide ! @@ -100,6 +100,14 @@ PetscEnum, parameter :: PC_EXOTIC_FACE=0 PetscEnum, parameter :: PC_EXOTIC_WIREBASKET=1 +! PCMGCoarseSpaceType + PetscEnum, parameter :: PCMG_ADAPT_NONE = 0 + PetscEnum, parameter :: PCMG_ADAPT_POLYNOMIAL = 1 + PetscEnum, parameter :: PCMG_ADAPT_HARMONIC = 2 + PetscEnum, parameter :: PCMG_ADAPT_EIGENVECTOR = 3 + PetscEnum, parameter :: PCMG_ADAPT_GENERALIZED_EIGENVECTOR = 4 + PetscEnum, parameter :: PCMG_ADAPT_GDSW = 5 + ! PCDeflationSpaceType PetscEnum, parameter :: PC_DEFLATION_SPACE_HAAR = 0 PetscEnum, parameter :: PC_DEFLATION_SPACE_DB2 = 1 @@ -129,4 +137,3 @@ PetscEnum, parameter :: PC_INCONSISTENT_RHS=5 PetscEnum, parameter :: PC_SUBPC_ERROR=6 - external PCMGRESIDUALDEFAULT diff --git a/src/ksp/f90-mod/petscpc.h90 b/src/ksp/f90-mod/petscpc.h90 index de46532818c..5d7336bf15a 100644 --- a/src/ksp/f90-mod/petscpc.h90 +++ b/src/ksp/f90-mod/petscpc.h90 @@ -14,18 +14,6 @@ end subroutine PCASMCreateSubdomains2D end Interface - Interface - subroutine PCGASMSetSubdomains(p,a,h,i,z) - import tIS - import tPC - PC p - PetscInt a - IS h(*) - IS i(*) - PetscErrorCode z - end subroutine PCGASMSetSubdomains - end Interface - Interface subroutine PCGASMDestroySubdomains(a,h,i,z) import tIS @@ -63,72 +51,3 @@ PetscErrorCode z end subroutine PCGASMCreateSubdomains2D end Interface - - Interface - subroutine PCSetType(a,b,z) - import tPC - PC a - character(*) b - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine PCView(a,b,z) - import tPC,tPetscViewer - PC a - PetscViewer b - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine PCFieldSplitSetIS(a,b,c,z) - import tPC,tIS - PC a - character(*) b - IS c - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine PCFieldSplitGetIS(a,b,c,z) - import tPC,tIS - PC a - character(*) b - IS c - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine PCFieldSplitSetFields(a,b,c,d,e,z) - import tPC,tIS - PC a - character(*) b - PetscInt c - PetscInt d(*),e(*) - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine PCDestroy(a,z) - import tPC - PC a - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine PCBDDCCreateFETIDPOperators(a,b,c,d,e,z) - import tMat,tPC - PC a - PetscBool b - character(*) c - Mat d - PC e - PetscErrorCode z - end subroutine - end Interface diff --git a/src/ksp/ksp/impls/gmres/borthog.c b/src/ksp/ksp/impls/gmres/borthog.c index 17393644675..c0dae85f4ff 100644 --- a/src/ksp/ksp/impls/gmres/borthog.c +++ b/src/ksp/ksp/impls/gmres/borthog.c @@ -11,7 +11,7 @@ KSPGMRESModifiedGramSchmidtOrthogonalization - This is the basic orthogonalization routine using modified Gram-Schmidt. - Collective + Collective, No Fortran Support Input Parameters: + ksp - `KSP` object, must be associated with `KSPGMRES`, `KSPFGMRES`, or `KSPLGMRES` Krylov method diff --git a/src/ksp/ksp/impls/gmres/borthog2.c b/src/ksp/ksp/impls/gmres/borthog2.c index 1e076e8e65e..4f7bf3db454 100644 --- a/src/ksp/ksp/impls/gmres/borthog2.c +++ b/src/ksp/ksp/impls/gmres/borthog2.c @@ -11,7 +11,7 @@ KSPGMRESClassicalGramSchmidtOrthogonalization - This is the basic orthogonalization routine using classical Gram-Schmidt with possible iterative refinement to improve the stability - Collective + Collective, No Fortran Support Input Parameters: + ksp - `KSP` object, must be associated with `KSPGMRES`, `KSPFGMRES`, or `KSPLGMRES` Krylov method diff --git a/src/ksp/ksp/impls/gmres/dgmres/dgmres.c b/src/ksp/ksp/impls/gmres/dgmres/dgmres.c index 08a365f7d33..374e9fa2c17 100644 --- a/src/ksp/ksp/impls/gmres/dgmres/dgmres.c +++ b/src/ksp/ksp/impls/gmres/dgmres/dgmres.c @@ -193,11 +193,6 @@ static PetscErrorCode KSPDGMRESCycle(PetscInt *itcount, KSP ksp) } } - /* Monitor if we know that we will not return for a restart */ - if (it && (ksp->reason || ksp->its >= ksp->max_it)) { - PetscCall(KSPLogResidualHistory(ksp, ksp->rnorm)); - PetscCall(KSPMonitor(ksp, ksp->its, ksp->rnorm)); - } if (itcount) *itcount = it; /* @@ -208,6 +203,12 @@ static PetscErrorCode KSPDGMRESCycle(PetscInt *itcount, KSP ksp) /* Form the solution (or the solution so far) */ PetscCall(KSPDGMRESBuildSoln(GRS(0), ksp->vec_sol, ksp->vec_sol, ksp, it - 1)); + /* Monitor if we know that we will not return for a restart */ + if (it && (ksp->reason || ksp->its >= ksp->max_it)) { + PetscCall(KSPLogResidualHistory(ksp, ksp->rnorm)); + PetscCall(KSPMonitor(ksp, ksp->its, ksp->rnorm)); + } + /* Compute data for the deflation to be used during the next restart */ if (!ksp->reason && ksp->its < ksp->max_it) { test = max_k * PetscLogReal(ksp->rtol / res) / PetscLogReal(res / res_old); diff --git a/src/ksp/ksp/impls/gmres/fgmres/fgmres.c b/src/ksp/ksp/impls/gmres/fgmres/fgmres.c index 83067efdb8b..d5077453dea 100644 --- a/src/ksp/ksp/impls/gmres/fgmres/fgmres.c +++ b/src/ksp/ksp/impls/gmres/fgmres/fgmres.c @@ -183,13 +183,6 @@ static PetscErrorCode KSPFGMRESCycle(PetscInt *itcount, KSP ksp) } /* END OF ITERATION LOOP */ - /* - Monitor if we know that we will not return for a restart */ - if (loc_it && (ksp->reason || ksp->its >= ksp->max_it)) { - PetscCall(KSPMonitor(ksp, ksp->its, res_norm)); - PetscCall(KSPLogResidualHistory(ksp, res_norm)); - } - if (itcount) *itcount = loc_it; /* @@ -203,6 +196,13 @@ static PetscErrorCode KSPFGMRESCycle(PetscInt *itcount, KSP ksp) properly navigates */ PetscCall(KSPFGMRESBuildSoln(RS(0), ksp->vec_sol, ksp->vec_sol, ksp, loc_it - 1)); + + /* + Monitor if we know that we will not return for a restart */ + if (loc_it && (ksp->reason || ksp->its >= ksp->max_it)) { + PetscCall(KSPMonitor(ksp, ksp->its, res_norm)); + PetscCall(KSPLogResidualHistory(ksp, res_norm)); + } PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/ksp/ksp/impls/gmres/gmres.c b/src/ksp/ksp/impls/gmres/gmres.c index 222465f8474..a3e5fe0f752 100644 --- a/src/ksp/ksp/impls/gmres/gmres.c +++ b/src/ksp/ksp/impls/gmres/gmres.c @@ -120,7 +120,6 @@ static PetscErrorCode KSPGMRESCycle(PetscInt *itcount, KSP ksp) } *GRS(0) = gmres->rnorm0 = res; - /* check for the convergence */ PetscCall(PetscObjectSAWsTakeAccess((PetscObject)ksp)); ksp->rnorm = res; PetscCall(PetscObjectSAWsGrantAccess((PetscObject)ksp)); @@ -134,6 +133,7 @@ static PetscErrorCode KSPGMRESCycle(PetscInt *itcount, KSP ksp) PetscFunctionReturn(PETSC_SUCCESS); } + /* check for the convergence */ PetscCall((*ksp->converged)(ksp, ksp->its, res, &ksp->reason, ksp->cnvP)); while (!ksp->reason && it < max_k && ksp->its < ksp->max_it) { if (it) { @@ -186,13 +186,6 @@ static PetscErrorCode KSPGMRESCycle(PetscInt *itcount, KSP ksp) } } - /* Monitor if we know that we will not return for a restart */ - if (it && (ksp->reason || ksp->its >= ksp->max_it)) { - PetscCall(KSPLogResidualHistory(ksp, res)); - PetscCall(KSPLogErrorHistory(ksp)); - PetscCall(KSPMonitor(ksp, ksp->its, res)); - } - if (itcount) *itcount = it; /* @@ -202,6 +195,13 @@ static PetscErrorCode KSPGMRESCycle(PetscInt *itcount, KSP ksp) */ /* Form the solution (or the solution so far) */ PetscCall(KSPGMRESBuildSoln(GRS(0), ksp->vec_sol, ksp->vec_sol, ksp, it - 1)); + + /* Monitor if we know that we will not return for a restart */ + if (it && (ksp->reason || ksp->its >= ksp->max_it)) { + PetscCall(KSPLogResidualHistory(ksp, res)); + PetscCall(KSPLogErrorHistory(ksp)); + PetscCall(KSPMonitor(ksp, ksp->its, res)); + } PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/ksp/ksp/impls/gmres/lgmres/lgmres.c b/src/ksp/ksp/impls/gmres/lgmres/lgmres.c index 429e407daf7..947b6c7746f 100644 --- a/src/ksp/ksp/impls/gmres/lgmres/lgmres.c +++ b/src/ksp/ksp/impls/gmres/lgmres/lgmres.c @@ -230,9 +230,6 @@ static PetscErrorCode KSPLGMRESCycle(PetscInt *itcount, KSP ksp) /* END OF ITERATION LOOP */ PetscCall(KSPLogResidualHistory(ksp, res)); - /* Monitor if we know that we will not return for a restart */ - if (ksp->reason || ksp->its >= max_it) PetscCall(KSPMonitor(ksp, ksp->its, res)); - if (itcount) *itcount = loc_it; /* @@ -246,6 +243,9 @@ static PetscErrorCode KSPLGMRESCycle(PetscInt *itcount, KSP ksp) PetscCall(KSPLGMRESBuildSoln(GRS(0), ksp->vec_sol, ksp->vec_sol, ksp, loc_it - 1)); + /* Monitor if we know that we will not return for a restart */ + if (ksp->reason || ksp->its >= max_it) PetscCall(KSPMonitor(ksp, ksp->its, res)); + /* LGMRES_MOD collect aug vector and A*augvector for future restarts - only if we will be restarting (i.e. this cycle performed it_total iterations) */ if (!ksp->reason && ksp->its < max_it && aug_dim > 0) { diff --git a/src/ksp/ksp/impls/gmres/pgmres/pgmres.c b/src/ksp/ksp/impls/gmres/pgmres/pgmres.c index 5ccf75ea993..920e1acf819 100644 --- a/src/ksp/ksp/impls/gmres/pgmres/pgmres.c +++ b/src/ksp/ksp/impls/gmres/pgmres/pgmres.c @@ -74,11 +74,11 @@ static PetscErrorCode KSPPGMRESCycle(PetscInt *itcount, KSP ksp) else ksp->rnorm = 0; PetscCall((*ksp->converged)(ksp, ksp->its, ksp->rnorm, &ksp->reason, ksp->cnvP)); - if (it < pgmres->max_k + 1 || ksp->reason || ksp->its == ksp->max_it) { /* Monitor if we are done or still iterating, but not before a restart. */ + if (ksp->reason) break; + if (it < pgmres->max_k + 1) { /* Monitor if we are not done or still iterating, but not before a restart. */ PetscCall(KSPLogResidualHistory(ksp, ksp->rnorm)); PetscCall(KSPMonitor(ksp, ksp->its, ksp->rnorm)); } - if (ksp->reason) break; /* Catch error in happy breakdown and signal convergence and break from loop */ if (hapend) { PetscCheck(!ksp->errorifnotconverged, PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "Reached happy break down, but convergence was not indicated. Residual norm = %g", (double)res); @@ -152,6 +152,11 @@ static PetscErrorCode KSPPGMRESCycle(PetscInt *itcount, KSP ksp) */ /* Form the solution (or the solution so far) */ PetscCall(KSPPGMRESBuildSoln(RS(0), ksp->vec_sol, ksp->vec_sol, ksp, it - 2)); + + if (ksp->reason || ksp->its == ksp->max_it) { + PetscCall(KSPLogResidualHistory(ksp, ksp->rnorm)); + PetscCall(KSPMonitor(ksp, ksp->its, ksp->rnorm)); + } PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/ksp/ksp/impls/gmres/pipefgmres/pipefgmres.c b/src/ksp/ksp/impls/gmres/pipefgmres/pipefgmres.c index a9bee63f829..5a10dffa644 100644 --- a/src/ksp/ksp/impls/gmres/pipefgmres/pipefgmres.c +++ b/src/ksp/ksp/impls/gmres/pipefgmres/pipefgmres.c @@ -274,13 +274,6 @@ static PetscErrorCode KSPPIPEFGMRESCycle(PetscInt *itcount, KSP ksp) } /* END OF ITERATION LOOP */ - /* - Monitor if we know that we will not return for a restart */ - if (loc_it && (ksp->reason || ksp->its >= ksp->max_it)) { - PetscCall(KSPMonitor(ksp, ksp->its, ksp->rnorm)); - PetscCall(KSPLogResidualHistory(ksp, ksp->rnorm)); - } - if (itcount) *itcount = loc_it; /* @@ -293,6 +286,14 @@ static PetscErrorCode KSPPIPEFGMRESCycle(PetscInt *itcount, KSP ksp) /* Note: must pass in (loc_it-1) for iteration count so that KSPPIPEGMRESIIBuildSoln properly navigates */ PetscCall(KSPPIPEFGMRESBuildSoln(RS(0), ksp->vec_sol, ksp->vec_sol, ksp, loc_it - 1)); + + /* + Monitor if we know that we will not return for a restart + */ + if (loc_it && (ksp->reason || ksp->its >= ksp->max_it)) { + PetscCall(KSPMonitor(ksp, ksp->its, ksp->rnorm)); + PetscCall(KSPLogResidualHistory(ksp, ksp->rnorm)); + } PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/ksp/ksp/impls/python/ftn-custom/zpythonkspf.c b/src/ksp/ksp/impls/python/ftn-custom/zpythonkspf.c deleted file mode 100644 index 5be0f80415f..00000000000 --- a/src/ksp/ksp/impls/python/ftn-custom/zpythonkspf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define ksppythonsettype_ KSPPYTHONSETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define ksppythonsettype_ ksppythonsettype -#endif - -PETSC_EXTERN void ksppythonsettype_(KSP *ksp, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(name, len, t); - *ierr = KSPPythonSetType(*ksp, t); - if (*ierr) return; - FREECHAR(name, t); -} diff --git a/src/ksp/ksp/impls/python/pythonksp.c b/src/ksp/ksp/impls/python/pythonksp.c index a5bbc74a3d0..e4902e9c179 100644 --- a/src/ksp/ksp/impls/python/pythonksp.c +++ b/src/ksp/ksp/impls/python/pythonksp.c @@ -1,6 +1,6 @@ #include /*I "petscksp.h" I*/ -/*@C +/*@ KSPPythonSetType - Initialize a `KSP` object to a type implemented in Python. Collective @@ -25,7 +25,7 @@ PetscErrorCode KSPPythonSetType(KSP ksp, const char pyname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPPythonGetType - Get the type of a `KSP` object implemented in Python. Not Collective diff --git a/src/ksp/ksp/impls/tsirm/tsirm.c b/src/ksp/ksp/impls/tsirm/tsirm.c index 576d1be3f66..6411dad23fc 100644 --- a/src/ksp/ksp/impls/tsirm/tsirm.c +++ b/src/ksp/ksp/impls/tsirm/tsirm.c @@ -12,23 +12,13 @@ static PetscErrorCode KSPSetUp_TSIRM(KSP ksp) KSP_TSIRM *tsirm = (KSP_TSIRM *)ksp->data; PetscFunctionBegin; - /* Initialization */ -#if defined(PETSC_USE_REAL_SINGLE) - tsirm->tol_ls = 1e-25; -#else - tsirm->tol_ls = 1e-50; -#endif - tsirm->size_ls = 12; - tsirm->maxiter_ls = 15; - tsirm->cgls = 0; - /* Matrix of the system */ PetscCall(KSPGetOperators(ksp, &tsirm->A, NULL)); /* Matrix of the system */ PetscCall(MatGetSize(tsirm->A, &tsirm->size, NULL)); /* Size of the system */ PetscCall(MatGetOwnershipRange(tsirm->A, &tsirm->Istart, &tsirm->Iend)); /* Matrix S of residuals */ - PetscCall(MatCreate(PETSC_COMM_WORLD, &tsirm->S)); + PetscCall(MatCreate(PetscObjectComm((PetscObject)ksp), &tsirm->S)); PetscCall(MatSetSizes(tsirm->S, tsirm->Iend - tsirm->Istart, PETSC_DECIDE, tsirm->size, tsirm->size_ls)); PetscCall(MatSetType(tsirm->S, MATDENSE)); PetscCall(MatSetUp(tsirm->S)); @@ -48,7 +38,6 @@ static PetscErrorCode KSPSolve_TSIRM(KSP ksp) PetscScalar *array; PetscReal norm = 20; PetscInt i, *ind_row, first_iteration = 1, its = 0, total = 0, col = 0; - PetscInt restart = 30; KSP ksp_min; /* KSP for minimization */ PC pc_min; /* PC for minimization */ PetscBool isksp; @@ -66,7 +55,6 @@ static PetscErrorCode KSPSolve_TSIRM(KSP ksp) PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCKSP, &isksp)); PetscCheck(isksp, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "PC must be of type PCKSP"); PetscCall(PCKSPGetKSP(pc, &sub_ksp)); - PetscCall(KSPSetTolerances(sub_ksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, restart)); /* previously it seemed good but with SNES it seems not good... */ PetscCall(KSP_MatMult(sub_ksp, tsirm->A, x, tsirm->r)); @@ -75,9 +63,10 @@ static PetscErrorCode KSPSolve_TSIRM(KSP ksp) KSPCheckNorm(ksp, norm); ksp->its = 0; PetscCall(KSPConvergedDefault(ksp, ksp->its, norm, &ksp->reason, ksp->cnvP)); + PetscCall(KSPMonitor(ksp, ksp->its, norm)); PetscCall(KSPSetInitialGuessNonzero(sub_ksp, PETSC_TRUE)); do { - for (col = 0; col < tsirm->size_ls && ksp->reason == 0; col++) { + for (col = 0; col < tsirm->size_ls && ksp->reason == KSP_CONVERGED_ITERATING; col++) { /* Solve (inner iteration) */ PetscCall(KSPSolve(sub_ksp, b, x)); PetscCall(KSPGetIterationNumber(sub_ksp, &its)); @@ -96,7 +85,7 @@ static PetscErrorCode KSPSolve_TSIRM(KSP ksp) } /* Minimization step */ - if (!ksp->reason) { + if (ksp->reason == KSP_CONVERGED_ITERATING) { PetscCall(MatAssemblyBegin(tsirm->S, MAT_FINAL_ASSEMBLY)); PetscCall(MatAssemblyEnd(tsirm->S, MAT_FINAL_ASSEMBLY)); if (first_iteration) { @@ -107,7 +96,7 @@ static PetscErrorCode KSPSolve_TSIRM(KSP ksp) } /* CGLS or LSQR method to minimize the residuals*/ - PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp_min)); + PetscCall(KSPCreate(PetscObjectComm((PetscObject)ksp), &ksp_min)); if (tsirm->cgls) { PetscCall(KSPSetType(ksp_min, KSPCGLS)); } else { @@ -173,13 +162,15 @@ static PetscErrorCode KSPDestroy_TSIRM(KSP ksp) Notes: `KSPTSIRM` is a two-stage iteration method for solving large sparse linear systems of the form $Ax=b$. The main idea behind this new method is the use a least-squares residual minimization to improve the convergence of Krylov based iterative methods, typically those of GMRES variants. - The principle of TSIRM algorithm is to build an outer iteration over a Krylov method, called the inner solver, and to frequently store the current residual + The principle of `TSIRM` algorithm is to build an outer iteration over a Krylov method, called the inner solver, and to frequently store the current residual computed by the given Krylov method in a matrix of residuals S. After a few outer iterations, a least-squares minimization step is applied on the matrix composed by the saved residuals, in order to compute a better solution and to make new iterations if required. The minimization step consists in solving the least-squares problem $\min||b-ASa||$ to find 'a' which minimizes the residuals $(b-AS)$. The minimization step is performed using two solvers of linear least-squares problems: `KSPCGLS` or `KSPLSQR`. A new solution x with a minimal residual is computed with $x=Sa$. + Defaults to 30 iterations for the inner solve, use option `-ksp_ksp_max_it ` to change it. + Contributed by: Lilia Ziane Khodja @@ -191,10 +182,20 @@ M*/ PETSC_EXTERN PetscErrorCode KSPCreate_TSIRM(KSP ksp) { KSP_TSIRM *tsirm; + PC pc; + KSP sub_ksp; PetscFunctionBegin; PetscCall(PetscNew(&tsirm)); ksp->data = (void *)tsirm; +#if defined(PETSC_USE_REAL_SINGLE) + tsirm->tol_ls = 1e-25; +#else + tsirm->tol_ls = 1e-50; +#endif + tsirm->size_ls = 12; + tsirm->maxiter_ls = 15; + tsirm->cgls = 0; PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_PRECONDITIONED, PC_LEFT, 2)); PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_UNPRECONDITIONED, PC_RIGHT, 1)); ksp->ops->setup = KSPSetUp_TSIRM; @@ -204,6 +205,11 @@ PETSC_EXTERN PetscErrorCode KSPCreate_TSIRM(KSP ksp) ksp->ops->buildresidual = KSPBuildResidualDefault; ksp->ops->setfromoptions = KSPSetFromOptions_TSIRM; ksp->ops->view = NULL; + + PetscCall(KSPGetPC(ksp, &pc)); + PetscCall(PCSetType(pc, PCKSP)); + PetscCall(PCKSPGetKSP(pc, &sub_ksp)); + PetscCall(KSPSetTolerances(sub_ksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 30)); #if defined(PETSC_USE_COMPLEX) SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_SUP, "This is not supported for complex numbers"); #else diff --git a/src/ksp/ksp/interface/dmksp.c b/src/ksp/ksp/interface/dmksp.c index 03e2dbc30ee..36bc8a32b13 100644 --- a/src/ksp/ksp/interface/dmksp.c +++ b/src/ksp/ksp/interface/dmksp.c @@ -153,7 +153,7 @@ PetscErrorCode DMGetDMKSPWrite(DM dm, DMKSP *kspdm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCopyDMKSP - copies a `DM` `DMKSP` context to a new `DM` Logically Collective diff --git a/src/ksp/ksp/interface/eige.c b/src/ksp/ksp/interface/eige.c index 06c6e3c6128..b69ce01f5ce 100644 --- a/src/ksp/ksp/interface/eige.c +++ b/src/ksp/ksp/interface/eige.c @@ -29,7 +29,7 @@ static PetscErrorCode MatMult_KSP(Mat A, Vec X, Vec Y) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPComputeOperator - Computes the explicit preconditioned operator, including diagonal scaling and null space removal if applicable. diff --git a/src/ksp/ksp/interface/f90-custom/zitfuncf90.c b/src/ksp/ksp/interface/f90-custom/zitfuncf90.c index 152b134a650..de305f71e84 100644 --- a/src/ksp/ksp/interface/f90-custom/zitfuncf90.c +++ b/src/ksp/ksp/interface/f90-custom/zitfuncf90.c @@ -3,10 +3,8 @@ #if defined(PETSC_HAVE_FORTRAN_CAPS) #define kspgetresidualhistoryf90_ KSPGETRESIDUALHISTORYF90 - #define kspdestroy_ KSPDESTROY #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define kspgetresidualhistoryf90_ kspgetresidualhistoryf90 - #define kspdestroy_ kspdestroy #endif PETSC_EXTERN void kspgetresidualhistoryf90_(KSP *ksp, F90Array1d *indices, PetscInt *n, int *ierr PETSC_F90_2PTR_PROTO(ptrd)) @@ -16,11 +14,3 @@ PETSC_EXTERN void kspgetresidualhistoryf90_(KSP *ksp, F90Array1d *indices, Petsc if (*ierr) return; *ierr = F90Array1dCreate((void *)hist, MPIU_REAL, 1, *n, indices PETSC_F90_2PTR_PARAM(ptrd)); } - -PETSC_EXTERN void kspdestroy_(KSP *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = KSPDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} diff --git a/src/ksp/ksp/interface/ftn-custom/ziguess.c b/src/ksp/ksp/interface/ftn-custom/ziguess.c deleted file mode 100644 index f54452227da..00000000000 --- a/src/ksp/ksp/interface/ftn-custom/ziguess.c +++ /dev/null @@ -1,40 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define kspguessgettype_ KSPGUESSGETTYPE - #define kspguesssettype_ KSPGUESSSETTYPE - #define kspguessview_ KSPGUESSVIEW -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define kspguessgettype_ kspguessgettype - #define kspguesssettype_ kspguesssettype - #define kspguessview_ kspguessview -#endif - -PETSC_EXTERN void kspguessgettype_(KSPGuess *kspguess, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = KSPGuessGetType(*kspguess, &tname); - if (*ierr) return; - *ierr = PetscStrncpy(name, tname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void kspguesssettype_(KSPGuess *kspguess, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = KSPGuessSetType(*kspguess, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void kspguessview_(KSPGuess *kspguess, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = KSPGuessView(*kspguess, v); -} diff --git a/src/ksp/ksp/interface/ftn-custom/zitclf.c b/src/ksp/ksp/interface/ftn-custom/zitclf.c deleted file mode 100644 index 29bd54744f7..00000000000 --- a/src/ksp/ksp/interface/ftn-custom/zitclf.c +++ /dev/null @@ -1,60 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define kspgetoptionsprefix_ KSPGETOPTIONSPREFIX - #define kspappendoptionsprefix_ KSPAPPENDOPTIONSPREFIX - #define kspsetoptionsprefix_ KSPSETOPTIONSPREFIX - #define kspbuildsolution_ KSPBUILDSOLUTION - #define kspbuildresidual_ KSPBUILDRESIDUAL - #define matcreateschurcomplement_ MATCREATESCHURCOMPLEMENT -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define kspgetoptionsprefix_ kspgetoptionsprefix - #define kspappendoptionsprefix_ kspappendoptionsprefix - #define kspsetoptionsprefix_ kspsetoptionsprefix - #define kspbuildsolution_ kspbuildsolution - #define kspbuildresidual_ kspbuildresidual - #define matcreateschurcomplement_ matcreateschurcomplement -#endif - -PETSC_EXTERN void kspbuildsolution_(KSP *ksp, Vec *v, Vec *V, int *ierr) -{ - CHKFORTRANNULLOBJECT(V); - *ierr = KSPBuildSolution(*ksp, *v, V); -} - -PETSC_EXTERN void kspbuildresidual_(KSP *ksp, Vec *t, Vec *v, Vec *V, int *ierr) -{ - CHKFORTRANNULLOBJECT(V); - *ierr = KSPBuildResidual(*ksp, *t, *v, V); -} - -PETSC_EXTERN void kspgetoptionsprefix_(KSP *ksp, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = KSPGetOptionsPrefix(*ksp, &tname); - *ierr = PetscStrncpy(prefix, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, prefix, len); -} - -PETSC_EXTERN void kspappendoptionsprefix_(KSP *ksp, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = KSPAppendOptionsPrefix(*ksp, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void kspsetoptionsprefix_(KSP *ksp, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = KSPSetOptionsPrefix(*ksp, t); - if (*ierr) return; - FREECHAR(prefix, t); -} diff --git a/src/ksp/ksp/interface/ftn-custom/zitcreatef.c b/src/ksp/ksp/interface/ftn-custom/zitcreatef.c deleted file mode 100644 index 23b3b6d9a76..00000000000 --- a/src/ksp/ksp/interface/ftn-custom/zitcreatef.c +++ /dev/null @@ -1,62 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define kspgettype_ KSPGETTYPE - #define kspsettype_ KSPSETTYPE - #define kspview_ KSPVIEW - #define kspviewfromoptions_ KSPVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define kspgettype_ kspgettype - #define kspsettype_ kspsettype - #define kspview_ kspview - #define kspviewfromoptions_ kspviewfromoptions -#endif - -PETSC_EXTERN void kspgettype_(KSP *ksp, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = KSPGetType(*ksp, &tname); - if (*ierr) return; - *ierr = PetscStrncpy(name, tname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void kspsettype_(KSP *ksp, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = KSPSetType(*ksp, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void kspview_(KSP *ksp, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = KSPView(*ksp, v); -} - -PETSC_EXTERN void kspviewfromoptions_(KSP *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = KSPViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void kspgetconvergedreasonstring_(KSP *ksp, char *strreason, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tstrreason; - *ierr = KSPGetConvergedReasonString(*ksp, &tstrreason); - *ierr = PetscStrncpy(strreason, tstrreason, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, strreason, len); -} diff --git a/src/ksp/ksp/interface/ftn-custom/zitfuncf.c b/src/ksp/ksp/interface/ftn-custom/zitfuncf.c index 448b3cacac7..ec4add88bfd 100644 --- a/src/ksp/ksp/interface/ftn-custom/zitfuncf.c +++ b/src/ksp/ksp/interface/ftn-custom/zitfuncf.c @@ -2,8 +2,6 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define kspconvergedreasonview_ KSPCONVERGEDREASONVIEW - #define kspconvergedrateview_ KSPCONVERGEDRATEVIEW #define kspmonitorset_ KSPMONITORSET #define kspsetconvergencetest_ KSPSETCONVERGENCETEST #define kspgetresidualhistory_ KSPGETRESIDUALHISTORY @@ -23,8 +21,6 @@ #define dmkspsetcomputeinitialguess_ DMKSPSETCOMPUTEINITIALGUESS /* zdmkspf.c */ #define dmkspsetcomputeoperators_ DMKSPSETCOMPUTEOPERATORS /* zdmkspf.c */ #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define kspconvergedreasonview_ kspconvergedreasonview - #define kspconvergedrateview_ kspconvergedrateview #define kspmonitorset_ kspmonitorset #define kspsetconvergencetest_ kspsetconvergencetest #define kspgetresidualhistory_ kspgetresidualhistory @@ -197,17 +193,3 @@ PETSC_EXTERN void kspsetcomputeoperators_(KSP *ksp, void (*func)(KSP *, Vec *, v *ierr = KSPGetDM(*ksp, &dm); if (!*ierr) dmkspsetcomputeoperators_(&dm, func, ctx, ierr); } - -PETSC_EXTERN void kspconvergedreasonview_(KSP *ksp, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = KSPConvergedReasonView(*ksp, v); -} - -PETSC_EXTERN void kspconvergedrateview_(KSP *ksp, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = KSPConvergedRateView(*ksp, v); -} diff --git a/src/ksp/ksp/interface/iguess.c b/src/ksp/ksp/interface/iguess.c index 6e425f50479..554e95d93bf 100644 --- a/src/ksp/ksp/interface/iguess.c +++ b/src/ksp/ksp/interface/iguess.c @@ -6,7 +6,7 @@ static PetscBool KSPGuessRegisterAllCalled; /*@C KSPGuessRegister - Registers a method for initial guess computation in Krylov subspace solver package. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined solver @@ -144,7 +144,7 @@ PetscErrorCode KSPGuessDestroy(KSPGuess *guess) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPGuessView - View the `KSPGuess` object Logically Collective @@ -216,15 +216,15 @@ PetscErrorCode KSPGuessCreate(MPI_Comm comm, KSPGuess *guess) PetscFunctionBegin; PetscAssertPointer(guess, 2); - *guess = NULL; PetscCall(KSPInitializePackage()); + PetscCall(PetscHeaderCreate(tguess, KSPGUESS_CLASSID, "KSPGuess", "Initial guess for Krylov Method", "KSPGuess", comm, KSPGuessDestroy, KSPGuessView)); tguess->omatstate = -1; *guess = tguess; PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPGuessSetType - Sets the type of a `KSPGuess` Logically Collective @@ -263,7 +263,7 @@ PetscErrorCode KSPGuessSetType(KSPGuess guess, KSPGuessType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPGuessGetType - Gets the `KSPGuessType` as a string from the `KSPGuess` object. Not Collective diff --git a/src/ksp/ksp/interface/itcl.c b/src/ksp/ksp/interface/itcl.c index 9524bff09a6..71c99b66e6c 100644 --- a/src/ksp/ksp/interface/itcl.c +++ b/src/ksp/ksp/interface/itcl.c @@ -5,7 +5,7 @@ #include /*I "petscksp.h" I*/ #include -/*@C +/*@ KSPSetOptionsPrefix - Sets the prefix used for searching for all `KSP` options in the database. @@ -64,7 +64,7 @@ PetscErrorCode KSPSetOptionsPrefix(KSP ksp, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPAppendOptionsPrefix - Appends to the prefix used for searching for all `KSP` options in the database. @@ -187,7 +187,7 @@ PetscErrorCode KSPGetGuess(KSP ksp, KSPGuess *guess) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPGetOptionsPrefix - Gets the prefix used for searching for all `KSP` options in the database. @@ -351,10 +351,6 @@ PetscErrorCode KSPSetFromOptions(KSP ksp) PetscCall(PetscObjectGetComm((PetscObject)ksp, &comm)); PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ksp, &prefix)); - if (!ksp->skippcsetfromoptions) { - if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc)); - PetscCall(PCSetFromOptions(ksp->pc)); - } PetscCall(KSPRegisterAll()); PetscObjectOptionsBegin((PetscObject)ksp); @@ -365,6 +361,11 @@ PetscErrorCode KSPSetFromOptions(KSP ksp) */ if (!((PetscObject)ksp)->type_name) PetscCall(KSPSetType(ksp, KSPGMRES)); + if (!ksp->skippcsetfromoptions) { + if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc)); + PetscCall(PCSetFromOptions(ksp->pc)); + } + PetscCall(KSPResetViewers(ksp)); /* Cancels all monitors hardwired into code before call to KSPSetFromOptions() */ diff --git a/src/ksp/ksp/interface/itcreate.c b/src/ksp/ksp/interface/itcreate.c index 60770b4086b..68048709803 100644 --- a/src/ksp/ksp/interface/itcreate.c +++ b/src/ksp/ksp/interface/itcreate.c @@ -23,7 +23,7 @@ PetscFunctionList KSPMonitorCreateList = NULL; PetscFunctionList KSPMonitorDestroyList = NULL; PetscBool KSPMonitorRegisterAllCalled = PETSC_FALSE; -/*@C +/*@ KSPLoad - Loads a `KSP` that has been stored in a `PETSCVIEWERBINARY` with `KSPView()`. Collective @@ -67,7 +67,7 @@ PetscErrorCode KSPLoad(KSP newdm, PetscViewer viewer) #if defined(PETSC_HAVE_SAWS) #include #endif -/*@C +/*@ KSPView - Prints the `KSP` data structure. Collective @@ -212,7 +212,7 @@ PetscErrorCode KSPView(KSP ksp, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPViewFromOptions - View a `KSP` object based on values in the options database Collective @@ -546,7 +546,7 @@ PetscErrorCode KSPGetOperators(KSP ksp, Mat *Amat, Mat *Pmat) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPGetOperatorsSet - Determines if the matrix associated with the linear system and possibly a different one associated with the preconditioner have been set in the `KSP`. @@ -705,11 +705,9 @@ PetscErrorCode KSPCreate(MPI_Comm comm, KSP *inksp) PetscFunctionBegin; PetscAssertPointer(inksp, 2); - *inksp = NULL; PetscCall(KSPInitializePackage()); PetscCall(PetscHeaderCreate(ksp, KSP_CLASSID, "KSP", "Krylov Method", "KSP", comm, KSPDestroy, KSPView)); - ksp->max_it = 10000; ksp->pc_side = ksp->pc_side_set = PC_SIDE_DEFAULT; ksp->rtol = 1.e-5; @@ -761,7 +759,7 @@ PetscErrorCode KSPCreate(MPI_Comm comm, KSP *inksp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPSetType - Builds the `KSP` data structure for a particular `KSPType` Logically Collective @@ -828,7 +826,7 @@ PetscErrorCode KSPSetType(KSP ksp, KSPType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPGetType - Gets the `KSP` type as a string from the `KSP` object. Not Collective @@ -855,7 +853,7 @@ PetscErrorCode KSPGetType(KSP ksp, KSPType *type) /*@C KSPRegister - Adds a method, `KSPType`, to the Krylov subspace solver package. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined solver diff --git a/src/ksp/ksp/interface/iterativ.c b/src/ksp/ksp/interface/iterativ.c index 59c96eb662f..391ab855c54 100644 --- a/src/ksp/ksp/interface/iterativ.c +++ b/src/ksp/ksp/interface/iterativ.c @@ -1737,7 +1737,7 @@ PetscErrorCode KSPBuildResidualDefault(KSP ksp, Vec t, Vec v, Vec *V) Input Parameters: + ksp - iterative context -. rightn - number of right work vectors +. rightn - number of right work vectors to allocate - leftn - number of left work vectors to allocate Output Parameters: @@ -1758,7 +1758,7 @@ PetscErrorCode KSPBuildResidualDefault(KSP ksp, Vec t, Vec v, Vec *V) .seealso: [](ch_ksp), `MatCreateVecs()`, `VecDestroyVecs()`, `KSPSetWorkVecs()` @*/ -PetscErrorCode KSPCreateVecs(KSP ksp, PetscInt rightn, Vec **right, PetscInt leftn, Vec **left) +PetscErrorCode KSPCreateVecs(KSP ksp, PetscInt rightn, Vec *right[], PetscInt leftn, Vec *left[]) { Vec vecr = NULL, vecl = NULL; PetscBool matset, pmatset, isshell, preferdm = PETSC_FALSE; @@ -1834,7 +1834,7 @@ PetscErrorCode KSPCreateVecs(KSP ksp, PetscInt rightn, Vec **right, PetscInt lef PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPSetWorkVecs - Sets a number of work vectors into a `KSP` object Collective @@ -1927,7 +1927,7 @@ PetscErrorCode KSPGetConvergedReason(KSP ksp, KSPConvergedReason *reason) .seealso: [](ch_ksp), `KSP`, `KSPGetConvergedReason()` @*/ -PetscErrorCode KSPGetConvergedReasonString(KSP ksp, const char **strreason) +PetscErrorCode KSPGetConvergedReasonString(KSP ksp, const char *strreason[]) { PetscFunctionBegin; PetscValidHeaderSpecific(ksp, KSP_CLASSID, 1); diff --git a/src/ksp/ksp/interface/itfunc.c b/src/ksp/ksp/interface/itfunc.c index b6c31475b6e..76c00b9c4f0 100644 --- a/src/ksp/ksp/interface/itfunc.c +++ b/src/ksp/ksp/interface/itfunc.c @@ -428,7 +428,7 @@ PetscErrorCode KSPSetUp(KSP ksp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPConvergedReasonView - Displays the reason a `KSP` solve converged or diverged to a viewer Collective @@ -460,7 +460,7 @@ PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer) PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii)); if (isAscii) { PetscCall(PetscViewerGetFormat(viewer, &format)); - PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel)); + PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel + 1)); if (ksp->reason > 0 && format != PETSC_VIEWER_FAILED) { if (((PetscObject)ksp)->prefix) { PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its)); @@ -479,7 +479,7 @@ PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer) PetscCall(PetscViewerASCIIPrintf(viewer, " PC failed due to %s \n", PCFailedReasons[reason])); } } - PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel)); + PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel + 1)); } PetscFunctionReturn(PETSC_SUCCESS); } @@ -591,7 +591,7 @@ PetscErrorCode KSPConvergedReasonViewFromOptions(KSP ksp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPConvergedRateView - Displays the convergence rate of `KSPSolve()` to a viewer Collective @@ -1430,7 +1430,7 @@ PetscErrorCode KSPReset(KSP ksp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPDestroy - Destroys a `KSP` context. Collective @@ -2048,8 +2048,7 @@ PetscErrorCode KSPGetRhs(KSP ksp, Vec *r) /*@ KSPGetSolution - Gets the location of the solution for the - linear system to be solved. Note that this may not be where the solution - is stored during the iterative process; see `KSPBuildSolution()`. + linear system to be solved. Not Collective @@ -2061,6 +2060,10 @@ PetscErrorCode KSPGetRhs(KSP ksp, Vec *r) Level: developer + Note: + If this is called during a `KSPSolve()` the vector's values may not represent the solution + to the linear system. + .seealso: [](ch_ksp), `KSPGetRhs()`, `KSPBuildSolution()`, `KSPSolve()`, `KSP` @*/ PetscErrorCode KSPGetSolution(KSP ksp, Vec *v) @@ -2110,7 +2113,7 @@ PETSC_INTERN PetscErrorCode PCCreate_MPI(PC); /*@C KSPCheckPCMPI - Checks if `-mpi_linear_solver_server` is active and the `PC` should be changed to `PCMPI` - Collective + Collective, No Fortran Support Input Parameter: . ksp - iterative context obtained from `KSPCreate()` @@ -2182,10 +2185,13 @@ PetscErrorCode KSPGetPC(KSP ksp, PC *pc) Level: developer - Note: + Notes: This routine is called by the `KSP` implementations. It does not typically need to be called by the user. + For Krylov methods that do not keep a running value of the current solution (such as `KSPGMRES`) this + cannot be called after the `KSPConvergedReason` has been set but before the final solution has been computed. + .seealso: [](ch_ksp), `KSPMonitorSet()` @*/ PetscErrorCode KSPMonitor(KSP ksp, PetscInt it, PetscReal rnorm) @@ -2726,7 +2732,7 @@ PetscErrorCode KSPGetConvergenceContext(KSP ksp, void *ctx) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPBuildSolution - Builds the approximate solution in a vector provided. Collective @@ -2736,7 +2742,7 @@ PetscErrorCode KSPGetConvergenceContext(KSP ksp, void *ctx) Output Parameter: Provide exactly one of -+ v - location to stash solution. ++ v - location to stash solution, optional, otherwise pass `NULL` - V - the solution is returned in this location. This vector is created internally. This vector should NOT be destroyed by the user with `VecDestroy()`. Level: developer @@ -2754,8 +2760,8 @@ PetscErrorCode KSPGetConvergenceContext(KSP ksp, void *ctx) methods, such as `KSPCG`, the second case requires a copy of the solution, while in the first case the call is essentially free since it simply returns the vector where the solution already is stored. For some methods - like `KSPGMRES` this is a reasonably expensive operation and should only be - used in truly needed. + like `KSPGMRES` during the solve this is a reasonably expensive operation and should only be + used if truly needed. .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPBuildResidual()`, `KSP` @*/ @@ -2765,11 +2771,16 @@ PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V) PetscValidHeaderSpecific(ksp, KSP_CLASSID, 1); PetscCheck(V || v, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "Must provide either v or V"); if (!V) V = &v; - PetscUseTypeMethod(ksp, buildsolution, v, V); + if (ksp->reason != KSP_CONVERGED_ITERATING) { + if (!v) PetscCall(KSPGetSolution(ksp, V)); + else PetscCall(VecCopy(ksp->vec_sol, v)); + } else { + PetscUseTypeMethod(ksp, buildsolution, v, V); + } PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ KSPBuildResidual - Builds the residual in a vector provided. Collective @@ -2778,8 +2789,7 @@ PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V) . ksp - iterative context obtained from `KSPCreate()` Output Parameters: -+ v - optional location to stash residual. If `v` is not provided, - then a location is generated. ++ v - optional location to stash residual. If `v` is not provided, then a location is generated. . t - work vector. If not provided then one is generated. - V - the residual @@ -2974,7 +2984,7 @@ PetscErrorCode KSPSetComputeOperators(KSP ksp, KSPComputeOperatorsFn *func, void Input Parameters: + ksp - the `KSP` context -. func - function to compute the right-hand side, see `KSPComputeRHSFn` for the calling squence +. func - function to compute the right-hand side, see `KSPComputeRHSFn` for the calling sequence - ctx - optional context Level: beginner diff --git a/src/ksp/ksp/tests/ex11.c b/src/ksp/ksp/tests/ex11.c index 927c0dcd4b6..9446fe9c3a4 100644 --- a/src/ksp/ksp/tests/ex11.c +++ b/src/ksp/ksp/tests/ex11.c @@ -297,7 +297,7 @@ int main(int argc, char **argv) testset: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES) nsize: 4 - args: -f ${DATAFILESPATH}/matrices/underworld32.gz -test_fs false -prefix_push fc_ -ksp_converged_reason -ksp_max_it 100 -ksp_pc_side right -pc_type hpddm -pc_hpddm_levels_1_svd_nsv 100 -pc_hpddm_levels_1_svd_relative_threshold 1e-6 -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_coarse_pc_type cholesky -pc_hpddm_levels_1_sub_pc_factor_shift_type inblocks -prefix_pop + args: -f ${DATAFILESPATH}/matrices/underworld32.gz -test_fs false -prefix_push fc_ -ksp_converged_reason -ksp_max_it 100 -ksp_pc_side right -pc_type hpddm -pc_hpddm_levels_1_svd_nsv 100 -pc_hpddm_levels_1_svd_relative_threshold 1e-6 -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_sub_pc_factor_shift_type inblocks -prefix_pop test: suffix: harmonic_overlap_1 filter: grep -v "WARNING! " | grep -v "There are 2 unused database options" | grep -v "Option left: name:-fc_pc_hpddm_levels_1_svd_pc_" diff --git a/src/ksp/ksp/tests/ex3.c b/src/ksp/ksp/tests/ex3.c index 64607bc0c95..29d891471eb 100644 --- a/src/ksp/ksp/tests/ex3.c +++ b/src/ksp/ksp/tests/ex3.c @@ -47,14 +47,15 @@ int main(int argc, char **args) PetscScalar val, Ke[16], r[4]; PetscReal x, y, h, norm; PetscInt idx[4], count, *rows; - Vec u, ustar, b; + Vec u, ustar, b, build_sol; KSP ksp; - PetscBool viewkspest = PETSC_FALSE; + PetscBool viewkspest = PETSC_FALSE, testbuildsolution = PETSC_FALSE; PetscFunctionBeginUser; PetscCall(PetscInitialize(&argc, &args, (char *)0, help)); PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL)); PetscCall(PetscOptionsGetBool(NULL, NULL, "-ksp_est_view", &viewkspest, NULL)); + PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_build_solution", &testbuildsolution, NULL)); N = (m + 1) * (m + 1); /* dimension of matrix */ M = m * m; /* number of elements */ h = 1.0 / m; /* mesh width */ @@ -148,6 +149,16 @@ int main(int argc, char **args) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE)); PetscCall(KSPSolve(ksp, b, u)); + if (testbuildsolution) { + PetscBool ok; + + PetscCall(VecDuplicate(u, &build_sol)); + PetscCall(KSPBuildSolution(ksp, build_sol, NULL)); + PetscCall(VecEqual(u, build_sol, &ok)); + PetscCheck(ok, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "KSPBuildSolution() returned incorrect solution"); + PetscCall(VecDestroy(&build_sol)); + } + if (viewkspest) { KSP kspest; @@ -214,4 +225,10 @@ int main(int argc, char **args) filter: grep -v "variant HERMITIAN" | sed -e "s/Iterations 4/Iterations 5/g" args: -pc_type gamg -mg_levels_pc_type sor -mg_levels_esteig_ksp_type cg -ksp_view + test: + suffix: build_solution + requires: !complex + filter: grep -v Norm + args: -ksp_type {{chebyshev cg groppcg pipecg pipecgrr pipelcg pipeprcg cgne nash stcg gltr fcg pipefcg gmres fgmres lgmres dgmres pgmres tcqmr bcgs ibcgs qmrcgs fbcgs fbcgsr bcgsl pipebcgs cgs tfqmr cr pipecr bicg minres lcd gcr cgls richardson}} -test_build_solution + TEST*/ diff --git a/src/ksp/ksp/tests/ex4.c b/src/ksp/ksp/tests/ex4.c index 98f9ef8cb51..c7da3b66b8c 100644 --- a/src/ksp/ksp/tests/ex4.c +++ b/src/ksp/ksp/tests/ex4.c @@ -169,6 +169,23 @@ int main(int argc, char **args) PetscCall(KSPGetIterationNumber(ksp, &its)); PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g Iterations %" PetscInt_FMT "\n", (double)(norm * h), its)); + { // Test getting Jacobi diag + PC pc; + PetscBool is_pcjacobi; + + PetscCall(KSPGetPC(ksp, &pc)); + PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCJACOBI, &is_pcjacobi)); + if (is_pcjacobi) { + Vec diag; + + PetscCall(MatCreateVecs(C, &diag, NULL)); + PetscCall(PCJacobiGetDiagonal(pc, diag, NULL)); + PetscCall(VecNorm(diag, NORM_2, &norm)); + PetscCheck(norm > 0, PETSC_COMM_WORLD, PETSC_ERR_USER, "Jacobi preconditioner should have norm greater than 0"); + PetscCall(VecDestroy(&diag)); + } + } + PetscCall(KSPDestroy(&ksp)); PetscCall(VecDestroy(&ustar)); PetscCall(VecDestroy(&u)); diff --git a/src/ksp/ksp/tests/ex52f.F90 b/src/ksp/ksp/tests/ex52f.F90 index 9fc00a75f0e..3159d8b6039 100644 --- a/src/ksp/ksp/tests/ex52f.F90 +++ b/src/ksp/ksp/tests/ex52f.F90 @@ -30,11 +30,11 @@ program main col = 0 two = 2.0 one = 1 - PetscCallA(MatSetValues(A,one,row,one,col,two,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[row],one,[col],[two],INSERT_VALUES,ierr)) row = 1 col = 1 zero = 0.0 - PetscCallA(MatSetValues(A,one,row,one,col,zero,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[row],one,[col],[zero],INSERT_VALUES,ierr)) PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)) diff --git a/src/ksp/ksp/tests/ex5f.F90 b/src/ksp/ksp/tests/ex5f.F90 index 7b20eef9163..aba8bc41421 100644 --- a/src/ksp/ksp/tests/ex5f.F90 +++ b/src/ksp/ksp/tests/ex5f.F90 @@ -54,7 +54,6 @@ subroutine mymatmult(A,x,y,ierr) PetscCallA(VecCopy(x,y,ierr)) - return end !/*TEST diff --git a/src/ksp/ksp/tests/ex62f.F90 b/src/ksp/ksp/tests/ex62f.F90 index 731ca5700e2..3ec4a912a6c 100644 --- a/src/ksp/ksp/tests/ex62f.F90 +++ b/src/ksp/ksp/tests/ex62f.F90 @@ -95,22 +95,22 @@ program main j = II - i*n if (i.gt.0) then JJ = II - n - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],ADD_VALUES,ierr)) endif if (i.lt.m-1) then JJ = II + n - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],ADD_VALUES,ierr)) endif if (j.gt.0) then JJ = II - 1 - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],ADD_VALUES,ierr)) endif if (j.lt.n-1) then JJ = II + 1 - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],ADD_VALUES,ierr)) endif v = 4.0 - PetscCallA( MatSetValues(A,ione,II,ione,II,v,ADD_VALUES,ierr)) + PetscCallA( MatSetValues(A,ione,[II],ione,[II],[v],ADD_VALUES,ierr)) 10 continue ! Assemble matrix, using the 2-step process: diff --git a/src/ksp/ksp/tests/ex86f.F90 b/src/ksp/ksp/tests/ex86f.F90 new file mode 100644 index 00000000000..3a72605f8e7 --- /dev/null +++ b/src/ksp/ksp/tests/ex86f.F90 @@ -0,0 +1,66 @@ +! +! Description: Demonstrates error handling with incorrect Fortran objects +! +! ----------------------------------------------------------------------- + + program main +#include + use petscksp + implicit none + PetscErrorCode ierr + PetscInt test + KSP ksp + +! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +! Beginning of program +! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER,ierr)) + + test = 1 + PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-test', test, PETSC_NULL_BOOL, ierr)) + if (test == 1) then + PetscCallA(KSPSolve(PETSC_NULL_KSP,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr)) + else if (test == 2) then + PetscCallA(KSPCreate(PETSC_COMM_WORLD,PETSC_NULL_KSP,ierr)) + else if (test == 3) then + PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr)) + PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr)) + else if (test == 4) then + PetscCallA(KSPDestroy(PETSC_NULL_KSP,ierr)) + endif + +! These should error but do not when ksp has not been created +! PetscCallA(KSPSolve(ksp,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr)) +! PetscCallA(KSPDestroy(ksp,ierr)) + + PetscCallA(PetscFinalize(ierr)) + end + +!/*TEST +! +! test: +! requires: defined(PETSC_USE_DEBUG) !defined(PETSCTEST_VALGRIND) defined(PETSC_HAVE_FORTRAN_FREE_LINE_LENGTH_NONE) +! args: -petsc_ci_portable_error_output -error_output_stdout -test 1 +! filter: grep -E "(PETSC ERROR)" | sed s"?KSPCREATE?kspcreate_?" +! +! test: +! suffix: 2 +! requires: !defined(PETSCTEST_VALGRIND) defined(PETSC_HAVE_FORTRAN_FREE_LINE_LENGTH_NONE) +! args: -petsc_ci_portable_error_output -error_output_stdout -test 2 +! filter: grep -E "(PETSC ERROR)"| sed s"?KSPCREATE?kspcreate_?" +! +! test: +! suffix: 3 +! requires: !defined(PETSCTEST_VALGRIND) defined(PETSC_HAVE_FORTRAN_FREE_LINE_LENGTH_NONE) +! args: -petsc_ci_portable_error_output -error_output_stdout -test 3 +! filter: grep -E "(PETSC ERROR)" | sed s"?KSPCREATE?kspcreate_?" +! +! +! test: +! suffix: 4 +! requires: !defined(PETSCTEST_VALGRIND) defined(PETSC_HAVE_FORTRAN_FREE_LINE_LENGTH_NONE) +! args: -petsc_ci_portable_error_output -error_output_stdout -test 4 +! filter: grep -E "(PETSC ERROR)" | sed s"?KSPDESTROY?kspdestroy_?" +! +!TEST*/ diff --git a/src/ksp/ksp/tests/lostnullspace.c b/src/ksp/ksp/tests/lostnullspace.c index 51205a1813e..5ad721d7e75 100644 --- a/src/ksp/ksp/tests/lostnullspace.c +++ b/src/ksp/ksp/tests/lostnullspace.c @@ -1,4 +1,4 @@ -static char help[] = "Loosing nullspaces in PCFIELDSPLIT after zeroing rows.\n"; +static char help[] = "Losing nullspaces in PCFIELDSPLIT after zeroing rows.\n"; // Contributed by Jeremy Theler diff --git a/src/ksp/ksp/tests/output/ex3_build_solution.out b/src/ksp/ksp/tests/output/ex3_build_solution.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/ksp/ksp/tests/output/ex3_gamg_provided_not_ok.out b/src/ksp/ksp/tests/output/ex3_gamg_provided_not_ok.out index 3bbc8649501..c4d7c93f18f 100644 --- a/src/ksp/ksp/tests/output/ex3_gamg_provided_not_ok.out +++ b/src/ksp/ksp/tests/output/ex3_gamg_provided_not_ok.out @@ -12,12 +12,14 @@ PC Object: 1 MPI process Cycles per PCApply=1 Using externally compute Galerkin coarse grid matrices GAMG specific options - Threshold for dropping small values in graph on each level = -1. -1. + Threshold for dropping small values in graph on each level = -1. -1. Threshold scaling factor for each level not specified = 1. AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.11111 operator = 1.09756 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 1 MPI process diff --git a/src/ksp/ksp/tests/output/ex86f_1.out b/src/ksp/ksp/tests/output/ex86f_1.out new file mode 100644 index 00000000000..e0962298f29 --- /dev/null +++ b/src/ksp/ksp/tests/output/ex86f_1.out @@ -0,0 +1,6 @@ +[0]PETSC ERROR: --------------------- Error Message -------------------------------------------------------------- +[0]PETSC ERROR: Null argument, when expecting valid pointer +[0]PETSC ERROR: Null Pointer: Parameter # 1 +[0]PETSC ERROR: See https://petsc.org/release/faq/ for trouble shooting. +[0]PETSC ERROR: #1 KSPSolve() at itfunc.c:0 +[0]PETSC ERROR: #2 ex86f.F90:0 diff --git a/src/ksp/ksp/tests/output/ex86f_2.out b/src/ksp/ksp/tests/output/ex86f_2.out new file mode 100644 index 00000000000..78675a9fb01 --- /dev/null +++ b/src/ksp/ksp/tests/output/ex86f_2.out @@ -0,0 +1,6 @@ +[0]PETSC ERROR: --------------------- Error Message -------------------------------------------------------------- +[0]PETSC ERROR: Invalid argument +[0]PETSC ERROR: Cannot create PETSC_NULL_XXX object +[0]PETSC ERROR: See https://petsc.org/release/faq/ for trouble shooting. +[0]PETSC ERROR: #1 kspcreate_() at itcreatef.c:0 +[0]PETSC ERROR: #2 ex86f.F90:0 diff --git a/src/ksp/ksp/tests/output/ex86f_3.out b/src/ksp/ksp/tests/output/ex86f_3.out new file mode 100644 index 00000000000..0faa5962eb4 --- /dev/null +++ b/src/ksp/ksp/tests/output/ex86f_3.out @@ -0,0 +1,6 @@ +[0]PETSC ERROR: --------------------- Error Message -------------------------------------------------------------- +[0]PETSC ERROR: Invalid argument +[0]PETSC ERROR: Cannot create already existing object +[0]PETSC ERROR: See https://petsc.org/release/faq/ for trouble shooting. +[0]PETSC ERROR: #1 kspcreate_() at itcreatef.c:0 +[0]PETSC ERROR: #2 ex86f.F90:0 diff --git a/src/ksp/ksp/tests/output/ex86f_4.out b/src/ksp/ksp/tests/output/ex86f_4.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/ksp/ksp/tutorials/ex10.c b/src/ksp/ksp/tutorials/ex10.c index 67750bcb8b4..88a8fd74aae 100644 --- a/src/ksp/ksp/tutorials/ex10.c +++ b/src/ksp/ksp/tutorials/ex10.c @@ -60,8 +60,15 @@ PetscErrorCode CreateSystem(const char filename[PETSC_MAX_PATH_LEN], RHSType rhs /* load the matrix and vector; then destroy the viewer */ PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); - PetscCall(MatSetFromOptions(A)); PetscCall(MatLoad(A, viewer)); + if (permute) { + Mat Aperm; + PetscCall(MatGetOrdering(A, ordering, &rowperm, &colperm)); + PetscCall(MatPermute(A, rowperm, colperm, &Aperm)); + PetscCall(MatDestroy(&A)); + A = Aperm; /* Replace original operator with permuted version */ + } + PetscCall(MatSetFromOptions(A)); switch (rhstype) { case RHS_FILE: /* Vectors in the file might a different size than the matrix so we need a @@ -109,12 +116,7 @@ PetscErrorCode CreateSystem(const char filename[PETSC_MAX_PATH_LEN], RHSType rhs PetscCall(VecDuplicate(b, &x)); if (permute) { - Mat Aperm; - PetscCall(MatGetOrdering(A, ordering, &rowperm, &colperm)); - PetscCall(MatPermute(A, rowperm, colperm, &Aperm)); PetscCall(VecPermute(b, rowperm, PETSC_FALSE)); - PetscCall(MatDestroy(&A)); - A = Aperm; /* Replace original operator with permuted version */ PetscCall(ISDestroy(&rowperm)); } diff --git a/src/ksp/ksp/tutorials/ex100f.F90 b/src/ksp/ksp/tutorials/ex100f.F90 index 2f26f0571a1..7ca83e36cae 100644 --- a/src/ksp/ksp/tutorials/ex100f.F90 +++ b/src/ksp/ksp/tutorials/ex100f.F90 @@ -11,6 +11,7 @@ program main KSP ksp PC pc PetscErrorCode ierr + character*80 mattype N = 100 draw = .FALSE. @@ -28,6 +29,8 @@ program main PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,N,N,ierr)) PetscCallA(MatSetType(A,'python',ierr)) PetscCallA(MatPythonSetType(A,'example100.py:Laplace1D',ierr)) + PetscCallA(MatPythonGetType(A,mattype,ierr)) + PetscCheckA(mattype == 'example100.py:Laplace1D',PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Error') PetscCallA(MatSetUp(A,ierr)) PetscCallA(MatCreateVecs(A,x,b,ierr)) diff --git a/src/ksp/ksp/tutorials/ex11f.F90 b/src/ksp/ksp/tutorials/ex11f.F90 index 707afbde0a2..86b5a1aeb30 100644 --- a/src/ksp/ksp/tutorials/ex11f.F90 +++ b/src/ksp/ksp/tutorials/ex11f.F90 @@ -111,23 +111,23 @@ program main j = II - i*n if (i.gt.0) then JJ = II - n - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (i.lt.n-1) then JJ = II + n - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (j.gt.0) then JJ = II - 1 - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (j.lt.n-1) then JJ = II + 1 - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (use_random) PetscCallA(PetscRandomGetValue(rctx,sigma2,ierr)) v = 4.0 - sigma1*h2 + sigma2*h2 - PetscCallA( MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)) + PetscCallA( MatSetValues(A,one,[II],one,[II],[v],ADD_VALUES,ierr)) 10 continue if (use_random) PetscCallA(PetscRandomDestroy(rctx,ierr)) diff --git a/src/ksp/ksp/tutorials/ex13f90.F90 b/src/ksp/ksp/tutorials/ex13f90.F90 index da19c5663c8..f6558fd7b07 100644 --- a/src/ksp/ksp/tutorials/ex13f90.F90 +++ b/src/ksp/ksp/tutorials/ex13f90.F90 @@ -173,13 +173,13 @@ subroutine UserInitializeLinearSolver(m,n,userctx,ierr) ! Create the sparse matrix. Preallocate 5 nonzeros per row. - PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,Ntot,Ntot,five,PETSC_NULL_INTEGER,A,ierr)) + PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,Ntot,Ntot,five,PETSC_NULL_INTEGER_ARRAY,A,ierr)) ! ! Create vectors. Here we create vectors with no memory allocated. ! This way, we can use the data structures already in the program ! by using VecPlaceArray() subroutine at a later stage. ! - PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,one,Ntot,PETSC_NULL_SCALAR,b,ierr)) + PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,one,Ntot,PETSC_NULL_SCALAR_ARRAY,b,ierr)) PetscCall(VecDuplicate(b,x,ierr)) ! Create linear solver context. This will be used repeatedly for all @@ -194,7 +194,6 @@ subroutine UserInitializeLinearSolver(m,n,userctx,ierr) userctx%m = m userctx%n = n - return end ! ----------------------------------------------------------------------- @@ -247,25 +246,25 @@ subroutine UserDoLinearSolver(rho,userctx,userb,userx,ierr) if (j .gt. 1) then JJ = II - m v = -0.5*(rho(II+1) + rho(JJ+1))*hy2 - PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(A,one,[II],one,[JJ],[v],INSERT_VALUES,ierr)) endif if (j .lt. n) then JJ = II + m v = -0.5*(rho(II+1) + rho(JJ+1))*hy2 - PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(A,one,[II],one,[JJ],[v],INSERT_VALUES,ierr)) endif if (i .gt. 1) then JJ = II - 1 v = -0.5*(rho(II+1) + rho(JJ+1))*hx2 - PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(A,one,[II],one,[JJ],[v],INSERT_VALUES,ierr)) endif if (i .lt. m) then JJ = II + 1 v = -0.5*(rho(II+1) + rho(JJ+1))*hx2 - PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(A,one,[II],one,[JJ],[v],INSERT_VALUES,ierr)) endif v = 2*rho(II+1)*(hx2+hy2) - PetscCall(MatSetValues(A,one,II,one,II,v,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(A,one,[II],one,[II],[v],INSERT_VALUES,ierr)) II = II+1 100 continue 110 continue @@ -322,7 +321,6 @@ subroutine UserDoLinearSolver(rho,userctx,userb,userx,ierr) PetscCall(VecResetArray(x,ierr)) PetscCall(VecResetArray(b,ierr)) - return end ! ------------------------------------------------------------------------ @@ -343,7 +341,6 @@ subroutine UserFinalizeLinearSolver(userctx,ierr) PetscCall(VecDestroy(userctx%b,ierr)) PetscCall(MatDestroy(userctx%A,ierr)) PetscCall(KSPDestroy(userctx%ksp,ierr)) - return end ! diff --git a/src/ksp/ksp/tutorials/ex14f.F90 b/src/ksp/ksp/tutorials/ex14f.F90 index a9f1943428f..fb7d9f8cd99 100644 --- a/src/ksp/ksp/tutorials/ex14f.F90 +++ b/src/ksp/ksp/tutorials/ex14f.F90 @@ -114,7 +114,7 @@ program main Ny = PETSC_DECIDE PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-Nx',Nx,flg,ierr)) PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-Ny',Ny,flg,ierr)) - PetscCallA(DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,mx,my,Nx,Ny,one,one,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)) + PetscCallA(DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,mx,my,Nx,Ny,one,one,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,da,ierr)) PetscCallA(DMSetFromOptions(da,ierr)) PetscCallA(DMSetUp(da,ierr)) ! @@ -329,7 +329,6 @@ subroutine FormInitialGuess(X,ierr) ! Restore vector PetscCall(VecRestoreArrayF90(X,xx,ierr)) - return end ! ------------------------------------------------------------------- @@ -408,7 +407,6 @@ subroutine ComputeFunction(X,F,ierr) PetscCall(VecRestoreArrayReadF90(localX,xx,ierr)) PetscCall(VecRestoreArrayF90(F,ff,ierr)) - return end ! ------------------------------------------------------------------- @@ -498,7 +496,7 @@ subroutine ComputeJacobian(X,jac,ierr) row = row + 1 grow(1) = ltog(row) if (i .eq. 0 .or. j .eq. 0 .or. i .eq. (mx-1) .or. j .eq. (my-1)) then - PetscCall(MatSetValues(jac,ione,grow,ione,grow,one,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(jac,ione,grow,ione,grow,[one],INSERT_VALUES,ierr)) go to 20 endif v(1) = -hxdhy @@ -525,7 +523,6 @@ subroutine ComputeJacobian(X,jac,ierr) PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)) PetscCall(VecRestoreArrayReadF90(localX,xx,ierr)) PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)) - return end ! ------------------------------------------------------------------- @@ -550,7 +547,6 @@ subroutine MyMult(J,X,F,ierr) ! instead write their own matrix-vector product routine ! PetscCall(MatMult(B,X,F,ierr)) - return end !/*TEST diff --git a/src/ksp/ksp/tutorials/ex15f.F90 b/src/ksp/ksp/tutorials/ex15f.F90 index e77393386bf..f1c12a40d7a 100644 --- a/src/ksp/ksp/tutorials/ex15f.F90 +++ b/src/ksp/ksp/tutorials/ex15f.F90 @@ -76,8 +76,8 @@ program main PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)) PetscCallA(MatSetType(A, MATAIJ,ierr)) PetscCallA(MatSetFromOptions(A,ierr)) - PetscCallA(MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr)) - PetscCallA(MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER_ARRAY,five,PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA(MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER_ARRAY,ierr)) ! Currently, all PETSc parallel matrix formats are partitioned by ! contiguous chunks of rows across the processors. Determine which @@ -99,22 +99,22 @@ program main j = II - i*n if (i.gt.0) then JJ = II - n - PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[II],i1,[JJ],[v],ADD_VALUES,ierr)) endif if (i.lt.m-1) then JJ = II + n - PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[II],i1,[JJ],[v],ADD_VALUES,ierr)) endif if (j.gt.0) then JJ = II - 1 - PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[II],i1,[JJ],[v],ADD_VALUES,ierr)) endif if (j.lt.n-1) then JJ = II + 1 - PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[II],i1,[JJ],[v],ADD_VALUES,ierr)) endif v = 4.0 - PetscCallA( MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr)) + PetscCallA( MatSetValues(A,i1,[II],i1,[II],[v],ADD_VALUES,ierr)) 10 continue ! Assemble matrix, using the 2-step process: @@ -259,7 +259,6 @@ program main ! subroutine SampleShellPCSetUp(pc,ierr) use ex15fmodule - use petscksp implicit none PC pc diff --git a/src/ksp/ksp/tutorials/ex1f.F90 b/src/ksp/ksp/tutorials/ex1f.F90 index 6d704ff2d14..ac68dbcc21d 100644 --- a/src/ksp/ksp/tutorials/ex1f.F90 +++ b/src/ksp/ksp/tutorials/ex1f.F90 @@ -18,7 +18,6 @@ subroutine MyKSPConverged(ksp,n,rnorm,flag,defaultctx,ierr) ! Must call default convergence test on the 0th iteration PetscCall(KSPConvergedDefault(ksp, n, rnorm, flag, defaultctx, ierr)) - return end subroutine MyKSPConverged program main @@ -96,18 +95,18 @@ program main col(1) = i-1 col(2) = i col(3) = i+1 - PetscCallA(MatSetValues(A,i1,i,i3,col,value,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[i],i3,col,value,INSERT_VALUES,ierr)) 50 continue i = n - 1 col(1) = n - 2 col(2) = n - 1 - PetscCallA(MatSetValues(A,i1,i,i2,col,value,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[i],i2,col,value,INSERT_VALUES,ierr)) i = 0 col(1) = 0 col(2) = 1 value(1) = 2.0 value(2) = -1.0 - PetscCallA(MatSetValues(A,i1,i,i2,col,value,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[i],i2,col,value,INSERT_VALUES,ierr)) PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)) diff --git a/src/ksp/ksp/tutorials/ex22f.F90 b/src/ksp/ksp/tutorials/ex22f.F90 index f43a33cc043..932cbf4936b 100644 --- a/src/ksp/ksp/tutorials/ex22f.F90 +++ b/src/ksp/ksp/tutorials/ex22f.F90 @@ -29,7 +29,7 @@ program main i3 = 3 i1 = 1 PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr)) - PetscCallA(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,i3,i3,i3,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,i1,i1,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)) + PetscCallA(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,i3,i3,i3,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,i1,i1,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,da,ierr)) PetscCallA(DMSetFromOptions(da,ierr)) PetscCallA(DMSetUp(da,ierr)) PetscCallA(KSPSetDM(ksp,da,ierr)) @@ -62,7 +62,6 @@ subroutine ComputeRHS(ksp,b,ctx,ierr) h = 1.0/real((mx-1)*(my-1)*(mz-1)) PetscCall(VecSet(b,h,ierr)) - return end subroutine ComputeMatrix(ksp,JJ,jac,ctx,ierr) @@ -139,7 +138,6 @@ subroutine ComputeMatrix(ksp,JJ,jac,ctx,ierr) PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)) PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)) - return end !/*TEST diff --git a/src/ksp/ksp/tutorials/ex2f.F90 b/src/ksp/ksp/tutorials/ex2f.F90 index 48e7594cf1b..59376b8131a 100644 --- a/src/ksp/ksp/tutorials/ex2f.F90 +++ b/src/ksp/ksp/tutorials/ex2f.F90 @@ -110,22 +110,22 @@ program main j = II - i*n if (i.gt.0) then JJ = II - n - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif if (i.lt.m-1) then JJ = II + n - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif if (j.gt.0) then JJ = II - 1 - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif if (j.lt.n-1) then JJ = II + 1 - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif v = 4.0 - PetscCallA( MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)) + PetscCallA( MatSetValues(A,ione,[II],ione,[II],[v],INSERT_VALUES,ierr)) 10 continue ! Assemble matrix, using the 2-step process: diff --git a/src/ksp/ksp/tutorials/ex44f.F90 b/src/ksp/ksp/tutorials/ex44f.F90 index 07e1bd89ef4..427a499f9c5 100644 --- a/src/ksp/ksp/tutorials/ex44f.F90 +++ b/src/ksp/ksp/tutorials/ex44f.F90 @@ -61,7 +61,7 @@ subroutine ComputeRHS(da,x,ierr) PetscInt xs,xm,i,mx PetscScalar hx PetscScalar, pointer :: xx(:) - PetscCall(DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) + PetscCall(DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,ierr)) PetscCall(DMDAGetCorners(da,xs,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,xm,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) hx = 1.0_PETSC_REAL_KIND/(mx-1) PetscCall(DMDAVecGetArrayF90(da,x,xx,ierr)) @@ -70,7 +70,6 @@ subroutine ComputeRHS(da,x,ierr) xx(i) = i*hx enddo PetscCall(DMDAVecRestoreArrayF90(da,x,xx,ierr)) - return end subroutine ComputeMatrix(da,J,ierr) @@ -84,7 +83,7 @@ subroutine ComputeMatrix(da,J,ierr) PetscScalar hx,one one = 1.0 - PetscCall(DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) + PetscCall(DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,ierr)) PetscCall(DMDAGetCorners(da,xs,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,xm,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) hx = 1.0_PETSC_REAL_KIND/(mx-1) do i=xs,xs+xm-1 @@ -98,7 +97,6 @@ subroutine ComputeMatrix(da,J,ierr) enddo PetscCall(MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY,ierr)) PetscCall(MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY,ierr)) - return end !/*TEST diff --git a/src/ksp/ksp/tutorials/ex45f.F90 b/src/ksp/ksp/tutorials/ex45f.F90 index c48a151d510..1c817e7e7d3 100644 --- a/src/ksp/ksp/tutorials/ex45f.F90 +++ b/src/ksp/ksp/tutorials/ex45f.F90 @@ -16,7 +16,7 @@ program main PetscCallA(PetscInitialize(ierr)) PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr)) - PetscCallA(DMDACreate2D(PETSC_COMM_WORLD, DM_BOUNDARY_NONE,DM_BOUNDARY_NONE, DMDA_STENCIL_STAR,three,three,PETSC_DECIDE,PETSC_DECIDE,one,one, PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, dm, ierr)) + PetscCallA(DMDACreate2D(PETSC_COMM_WORLD, DM_BOUNDARY_NONE,DM_BOUNDARY_NONE, DMDA_STENCIL_STAR,three,three,PETSC_DECIDE,PETSC_DECIDE,one,one, PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY, dm, ierr)) PetscCallA(DMSetFromOptions(dm,ierr)) PetscCallA(DMSetUp(dm,ierr)) PetscCallA(KSPSetDM(ksp,dm,ierr)) diff --git a/src/ksp/ksp/tutorials/ex52.c b/src/ksp/ksp/tutorials/ex52.c index 5a932f91208..deb1b14f855 100644 --- a/src/ksp/ksp/tutorials/ex52.c +++ b/src/ksp/ksp/tutorials/ex52.c @@ -496,7 +496,7 @@ int main(int argc, char **args) suffix: mumps_omp_3 nsize: 4 requires: mumps hwloc openmp pthread defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) - args: -use_mumps_ch -mat_mumps_use_omp_threads 3 + args: -use_mumps_ch -mat_mumps_use_omp_threads 3 -mat_mumps_icntl_48 0 # Ignore the warning since we are intentionally testing the imbalanced case filter: grep -v "Warning: number of OpenMP threads" output_file: output/ex52_1.out diff --git a/src/ksp/ksp/tutorials/ex52f.F90 b/src/ksp/ksp/tutorials/ex52f.F90 index a0d0d96a2ce..97a890b4f5e 100644 --- a/src/ksp/ksp/tutorials/ex52f.F90 +++ b/src/ksp/ksp/tutorials/ex52f.F90 @@ -50,8 +50,8 @@ program main PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)) PetscCallA(MatSetType(A, MATAIJ,ierr)) PetscCallA(MatSetFromOptions(A,ierr)) - PetscCallA(MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr)) - PetscCallA(MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER_ARRAY,five,PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA(MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER_ARRAY,ierr)) PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr)) @@ -68,22 +68,22 @@ program main j = II - i*n if (i.gt.0) then JJ = II - n - PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[II],i1,[JJ],[v],ADD_VALUES,ierr)) endif if (i.lt.m-1) then JJ = II + n - PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[II],i1,[JJ],[v],ADD_VALUES,ierr)) endif if (j.gt.0) then JJ = II - 1 - PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[II],i1,[JJ],[v],ADD_VALUES,ierr)) endif if (j.lt.n-1) then JJ = II + 1 - PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[II],i1,[JJ],[v],ADD_VALUES,ierr)) endif v = 4.0 - PetscCallA( MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr)) + PetscCallA( MatSetValues(A,i1,[II],i1,[II],[v],ADD_VALUES,ierr)) 10 continue ! Assemble matrix, using the 2-step process: diff --git a/src/ksp/ksp/tutorials/ex54.c b/src/ksp/ksp/tutorials/ex54.c index fe11cdb81fc..d0722523997 100644 --- a/src/ksp/ksp/tutorials/ex54.c +++ b/src/ksp/ksp/tutorials/ex54.c @@ -229,7 +229,7 @@ int main(int argc, char **args) test: suffix: Classical - args: -ne 49 -alpha 1.e-3 -ksp_type cg -pc_type gamg -mg_levels_ksp_max_it 2 -pc_gamg_type classical -ksp_monitor -ksp_converged_reason -mg_levels_esteig_ksp_type cg -mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.1 -mat_coarsen_type mis + args: -ne 49 -alpha 1.e-3 -ksp_type cg -pc_type gamg -mg_levels_ksp_max_it 2 -pc_gamg_type classical -ksp_monitor -ksp_converged_reason -mg_levels_esteig_ksp_type cg -mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.1 -pc_gamg_mat_coarsen_type mis output_file: output/ex54_classical.out test: @@ -244,5 +244,5 @@ int main(int argc, char **args) nsize: 4 filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 8/Linear solve converged due to CONVERGED_RTOL iterations 7/g" suffix: hem - args: -ne 39 -ksp_type cg -pc_type gamg -pc_gamg_type agg -ksp_rtol 1e-4 -ksp_norm_type unpreconditioned -mat_coarsen_type hem -ksp_converged_reason -ksp_norm_type unpreconditioned + args: -ne 39 -ksp_type cg -pc_type gamg -pc_gamg_type agg -ksp_rtol 1e-4 -ksp_norm_type unpreconditioned -pc_gamg_mat_coarsen_type hem -ksp_converged_reason -ksp_norm_type unpreconditioned TEST*/ diff --git a/src/ksp/ksp/tutorials/ex54f.F90 b/src/ksp/ksp/tutorials/ex54f.F90 index 07f2f74bc76..3e45123077a 100644 --- a/src/ksp/ksp/tutorials/ex54f.F90 +++ b/src/ksp/ksp/tutorials/ex54f.F90 @@ -92,7 +92,7 @@ program main else PetscCallA(MatSetType( Amat, MATMPIAIJ, ierr)) endif - PetscCallA(MatMPIAIJSetPreallocation(Amat,f9,PETSC_NULL_INTEGER,f6,PETSC_NULL_INTEGER, ierr)) + PetscCallA(MatMPIAIJSetPreallocation(Amat,f9,PETSC_NULL_INTEGER_ARRAY,f6,PETSC_NULL_INTEGER_ARRAY, ierr)) PetscCallA(MatSetFromOptions( Amat, ierr)) PetscCallA(MatSetUp( Amat, ierr)) PetscCallA(MatGetOwnershipRange( Amat, Istart, Iend, ierr)) @@ -160,7 +160,7 @@ program main endif ! add element if (qj > 0) then ! set rhs val = h*h*exp(-100*((x+h/2)-blb(1))**2)*exp(-100*((y+h/2)-blb(2))**2) - PetscCallA(VecSetValues(bvec,one,geq,val,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(bvec,one,[geq],[val],INSERT_VALUES,ierr)) endif enddo PetscCallA(MatAssemblyBegin(Amat,MAT_FINAL_ASSEMBLY,ierr)) @@ -436,13 +436,13 @@ PetscReal function ex54_psi(x,y) ! requires: !single ! test: ! suffix: misk -! args: -mat_coarsen_type misk -pc_gamg_aggressive_coarsening 0 -ksp_monitor_short +! args: -pc_gamg_mat_coarsen_type misk -pc_gamg_aggressive_coarsening 0 -ksp_monitor_short ! test: ! suffix: mis -! args: -mat_coarsen_type mis -ksp_monitor_short +! args: -pc_gamg_mat_coarsen_type mis -ksp_monitor_short ! test: ! suffix: hem -! args: -mat_coarsen_type hem -ksp_converged_reason +! args: -pc_gamg_mat_coarsen_type hem -ksp_converged_reason ! filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[2-3]/Linear solve converged due to CONVERGED_RTOL iterations 11/g" ! !TEST*/ diff --git a/src/ksp/ksp/tutorials/ex55.c b/src/ksp/ksp/tutorials/ex55.c index 34dd8f6924a..c12fa5278dc 100644 --- a/src/ksp/ksp/tutorials/ex55.c +++ b/src/ksp/ksp/tutorials/ex55.c @@ -270,7 +270,7 @@ int main(int argc, char **args) suffix: Classical nsize: 4 requires: !complex - args: -ne 29 -alpha 1.e-3 -ksp_type gmres -pc_type gamg -pc_gamg_type classical -mg_levels_ksp_max_it 5 -ksp_converged_reason -ksp_rtol 1e-3 -mat_coarsen_misk_distance 2 -pc_gamg_threshold 0 -pc_mg_levels 3 + args: -ne 29 -alpha 1.e-3 -ksp_type gmres -pc_type gamg -pc_gamg_type classical -mg_levels_ksp_max_it 5 -ksp_converged_reason -ksp_rtol 1e-3 -pc_gamg_mat_coarsen_misk_distance 2 -pc_gamg_threshold 0 -pc_mg_levels 3 output_file: output/ex55_classical.out filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 17/Linear solve converged due to CONVERGED_RTOL iterations 18/g" diff --git a/src/ksp/ksp/tutorials/ex56.c b/src/ksp/ksp/tutorials/ex56.c index 55b0e319e25..ea5687995cd 100644 --- a/src/ksp/ksp/tutorials/ex56.c +++ b/src/ksp/ksp/tutorials/ex56.c @@ -14,7 +14,7 @@ static PetscErrorCode MaybeLogStagePush(PetscLogStage stage) return log_stages ? PetscLogStagePush(stage) : PETSC_SUCCESS; } -static PetscErrorCode MaybeLogStagePop() +static PetscErrorCode MaybeLogStagePop(void) { return log_stages ? PetscLogStagePop() : PETSC_SUCCESS; } @@ -464,7 +464,7 @@ PetscErrorCode elem_3d_elast_v_25(PetscScalar *dd) testset: requires: !complex - args: -ne 11 -alpha 1.e-3 -ksp_type cg -pc_type gamg -pc_gamg_agg_nsmooths 1 -two_solves -ksp_converged_reason -use_mat_nearnullspace -mg_levels_ksp_max_it 1 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.2,0,1.05 -mg_levels_sub_pc_type lu -pc_gamg_asm_use_agg -mg_levels_pc_asm_overlap 0 -pc_gamg_parallel_coarse_grid_solver -mg_coarse_pc_type jacobi -mg_coarse_ksp_type cg -mat_coarsen_type hem -mat_coarsen_max_it 5 -ksp_rtol 1e-4 -ksp_norm_type unpreconditioned -pc_gamg_threshold .001 -mat_coarsen_strength_index 1,2 + args: -ne 11 -alpha 1.e-3 -ksp_type cg -pc_type gamg -pc_gamg_agg_nsmooths 1 -two_solves -ksp_converged_reason -use_mat_nearnullspace -mg_levels_ksp_max_it 1 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.2,0,1.05 -mg_levels_sub_pc_type lu -pc_gamg_asm_use_agg -mg_levels_pc_asm_overlap 0 -pc_gamg_parallel_coarse_grid_solver -mg_coarse_pc_type jacobi -mg_coarse_ksp_type cg -pc_gamg_mat_coarsen_type hem -pc_gamg_mat_coarsen_max_it 5 -ksp_rtol 1e-4 -ksp_norm_type unpreconditioned -pc_gamg_threshold .001 -pc_gamg_mat_coarsen_strength_index 1,2 test: suffix: 1 nsize: 1 @@ -485,10 +485,9 @@ PetscErrorCode elem_3d_elast_v_25(PetscScalar *dd) suffix: gamg args: -pc_type gamg -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -mg_levels_pc_jacobi_type rowl1 -mg_levels_pc_jacobi_rowl1_scale .5 -mg_levels_pc_jacobi_fixdiagonal test: - nsize: 1 suffix: baij filter: grep -v variant - args: -pc_type jacobi -pc_jacobi_type rowl1 -ksp_type cg -mat_type baij -ksp_view -ksp_rtol 1e-1 + args: -pc_type jacobi -pc_jacobi_type rowl1 -ksp_type cg -mat_type baij -ksp_view -ksp_rtol 1e-1 -two_solves false test: suffix: latebs diff --git a/src/ksp/ksp/tutorials/ex57f.F90 b/src/ksp/ksp/tutorials/ex57f.F90 index 0f6eba40af2..c24d70be21d 100644 --- a/src/ksp/ksp/tutorials/ex57f.F90 +++ b/src/ksp/ksp/tutorials/ex57f.F90 @@ -48,6 +48,7 @@ program main Mat A KSP ksp PetscRandom rctx + character*80 ksptype ! These variables are not currently used. ! PC pc @@ -115,22 +116,22 @@ program main j = II - i*n if (i.gt.0) then JJ = II - n - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif if (i.lt.m-1) then JJ = II + n - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif if (j.gt.0) then JJ = II - 1 - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif if (j.lt.n-1) then JJ = II + 1 - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif v = 4.0 - PetscCallA( MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)) + PetscCallA( MatSetValues(A,ione,[II],ione,[II],[v],INSERT_VALUES,ierr)) 10 continue PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)) @@ -195,6 +196,8 @@ program main PetscCallA(KSPSetOperators(ksp,A,A,ierr)) PetscCallA(KSPSetType(ksp,KSPPREONLY,ierr)) + PetscCallA(KSPGetType(ksp,ksptype,ierr)) + PetscCheckA(ksptype == KSPPREONLY,PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Error') PetscCallA(KSPGetPC(ksp,pc,ierr)) PetscCallA(PCSetType(pc,PCCHOLESKY,ierr)) #ifdef PETSC_HAVE_MUMPS diff --git a/src/ksp/ksp/tutorials/ex5f.F90 b/src/ksp/ksp/tutorials/ex5f.F90 index bcc660e79f4..e6fbc936eaa 100644 --- a/src/ksp/ksp/tutorials/ex5f.F90 +++ b/src/ksp/ksp/tutorials/ex5f.F90 @@ -82,26 +82,26 @@ program main v =-1.0; i = Ii/n; j = Ii - i*n if (i>0) then JJ = Ii - n - PetscCallA(MatSetValues(C,one,Ii,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(C,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr)) endif if (i0) then JJ = Ii - 1 - PetscCallA(MatSetValues(C,one,Ii,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(C,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr)) endif if (j1) then JJ=Ii-n-1 - PetscCallA(MatSetValues(C,one,Ii,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(C,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr)) endif enddo else @@ -153,7 +153,7 @@ program main do i=0,ldim-1 iglobal = i + low v = real(i + 100*rank) - PetscCallA(VecSetValues(u,one,iglobal,v,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(u,one,[iglobal],[v],INSERT_VALUES,ierr)) enddo ! Assemble vector, using the 2-step process: @@ -235,26 +235,26 @@ program main v =-1.0; Ii=j + n*i if (i>0) then JJ = Ii - n - PetscCallA(MatSetValues(C,one,Ii,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(C,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr)) endif if (i0) then JJ = Ii - 1 - PetscCallA(MatSetValues(C,one,Ii,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(C,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr)) endif if (j1) then JJ=Ii-n-1 - PetscCallA(MatSetValues(C,one,Ii,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(C,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr)) endif enddo endif @@ -283,10 +283,10 @@ program main if (rank /= 0) then v = 6.0*0.00001; Ii = 0; JJ = 0 - PetscCallA(MatSetValues(C,one,Ii,one,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(C,one,[Ii],one,[JJ],[v],INSERT_VALUES,ierr)) elseif (rank == size -1) then v = 6.0*0.00001; Ii = m*n-1; JJ = m*n-1 - PetscCallA(MatSetValues(C,one,Ii,one,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(C,one,[Ii],one,[JJ],[v],INSERT_VALUES,ierr)) endif diff --git a/src/ksp/ksp/tutorials/ex61f.F90 b/src/ksp/ksp/tutorials/ex61f.F90 index ab18fc836a1..6822fde18b4 100644 --- a/src/ksp/ksp/tutorials/ex61f.F90 +++ b/src/ksp/ksp/tutorials/ex61f.F90 @@ -115,8 +115,8 @@ program tpetsc col_f_vecx => Mcol_f_vecx(ith) col_f_ksp => Mcol_f_ksp(ith) - PetscCallA(MatCreateSeqAIJ( PETSC_COMM_SELF, nrow,ncol, nz_per_row,PETSC_NULL_INTEGER, col_f_mat, ierr)) - PetscCallA(VecCreateSeqWithArray(PETSC_COMM_SELF,1,nrow,PETSC_NULL_SCALAR, col_f_vecb, ierr)) + PetscCallA(MatCreateSeqAIJ( PETSC_COMM_SELF, nrow,ncol, nz_per_row,PETSC_NULL_INTEGER_ARRAY, col_f_mat, ierr)) + PetscCallA(VecCreateSeqWithArray(PETSC_COMM_SELF,1,nrow,PETSC_NULL_SCALAR_ARRAY, col_f_vecb, ierr)) PetscCallA(VecDuplicate(col_f_vecb, col_f_vecx,ierr)) PetscCallA(KSPCreate(PETSC_COMM_SELF, col_f_ksp,ierr)) enddo diff --git a/src/ksp/ksp/tutorials/ex6f.F90 b/src/ksp/ksp/tutorials/ex6f.F90 index ac4b3e67c78..8509ed20d5f 100644 --- a/src/ksp/ksp/tutorials/ex6f.F90 +++ b/src/ksp/ksp/tutorials/ex6f.F90 @@ -64,22 +64,22 @@ program main j = II - i*n if (i.gt.0) then JJ = II - n - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (i.lt.m-1) then JJ = II + n - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (j.gt.0) then JJ = II - 1 - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (j.lt.n-1) then JJ = II + 1 - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif v = 4.0 - PetscCallA( MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)) + PetscCallA( MatSetValues(A,one,[II],one,[II],[v],ADD_VALUES,ierr)) 10 continue ! Assemble matrix, using the 2-step process: @@ -175,7 +175,7 @@ subroutine solve1(ksp,A,x,b,u,count,nsteps,A2,ierr) PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr)) do 20, II=Istart,Iend-1 v = 2.0 - PetscCallA(MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[II],[v],ADD_VALUES,ierr)) 20 continue PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)) if (pflag) then diff --git a/src/ksp/ksp/tutorials/ex71.c b/src/ksp/ksp/tutorials/ex71.c index 02557bcebb3..97feb85107b 100644 --- a/src/ksp/ksp/tutorials/ex71.c +++ b/src/ksp/ksp/tutorials/ex71.c @@ -272,16 +272,33 @@ int main(int argc, char **args) if (user.multi_element) { ISLocalToGlobalMapping mapn; - PetscInt *e_glo = NULL; - - PetscCall(PetscMalloc1(nel * nen, &e_glo)); - PetscCall(ISLocalToGlobalMappingApplyBlock(map, nen * nel, e_loc, e_glo)); - PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)map), user.dof, nen * nel, e_glo, PETSC_OWN_POINTER, &mapn)); + PetscInt *el_glo = NULL, m, n, M, N, *el_sizes; + Mat lA; + + PetscCall(PetscMalloc1(nel * nen, &el_glo)); + PetscCall(ISLocalToGlobalMappingApplyBlock(map, nen * nel, e_loc, el_glo)); + PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)map), user.dof, nen * nel, el_glo, PETSC_OWN_POINTER, &mapn)); + PetscCall(MatGetLocalSize(A, &m, &n)); + PetscCall(MatGetSize(A, &M, &N)); + PetscCall(MatDestroy(&A)); + PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); + PetscCall(MatSetSizes(A, m, n, M, N)); + PetscCall(MatSetBlockSize(A, user.dof)); + PetscCall(MatSetType(A, MATIS)); PetscCall(MatISSetAllowRepeated(A, PETSC_TRUE)); PetscCall(MatSetLocalToGlobalMapping(A, mapn, mapn)); + PetscCall(MatISSetPreallocation(A, user.dof * nen, NULL, user.dof * nen, NULL)); PetscCall(ISLocalToGlobalMappingViewFromOptions(mapn, NULL, "-multi_view")); - PetscCall(MatSetDM(A, NULL)); PetscCall(ISLocalToGlobalMappingDestroy(&mapn)); + + /* The information set with MatSetVariableBlockSizes on the local mat + can be used to detect the local elements instead of having to analyze + the sparsity pattern of the local matrix */ + PetscCall(MatISGetLocalMat(A, &lA)); + PetscCall(PetscMalloc1(nel, &el_sizes)); + for (i = 0; i < nel; i++) el_sizes[i] = user.dof * nen; + PetscCall(MatSetVariableBlockSizes(lA, nel, el_sizes)); + PetscCall(PetscFree(el_sizes)); } /* we reorder the indices since the element matrices are given in lexicographic order, @@ -641,7 +658,7 @@ int main(int argc, char **args) output_file: output/ex71_dmda_matis_elast_3d.out test: nsize: 8 - filter: grep -v "variant HERMITIAN" + filter: grep -v "variant HERMITIAN" | sed -e "s/CONVERGED_RTOL iterations 1[0-9]/CONVERGED_RTOL iterations 13/g" suffix: bddc_elast_deluxe_layers_adapt requires: mumps !complex args: -pde_type Elasticity -cells 7,9,8 -dim 3 -ksp_converged_reason -pc_bddc_coarse_redundant_pc_type svd -ksp_error_if_not_converged -pc_bddc_monolithic -sub_schurs_mat_solver_type mumps -pc_bddc_use_deluxe_scaling -pc_bddc_adaptive_threshold 2.0 -pc_bddc_schur_layers {{1 10}separate_output} -pc_bddc_adaptive_userdefined {{0 1}separate output} -sub_schurs_schur_mat_type seqdense @@ -707,6 +724,7 @@ int main(int argc, char **args) args: -sub_0_pc_bddc_interface_ext_type dirichlet suffix: composite_bddc_dirichlet +# GDSW tests testset: nsize: 8 filter: grep -v "variant HERMITIAN" @@ -730,4 +748,15 @@ int main(int argc, char **args) suffix: gdsw_elast_adaptive args: -pde_type Elasticity -mg_levels_gdsw_tolerance 0.01 -ksp_monitor_singular_value -mg_levels_gdsw_userdefined {{0 1}separate output} +# Multi-Element tests + test: + nsize: {{1 2 3}} + suffix: bddc_multi_element + args: -cells 3,3,3 -dim 3 -ksp_error_if_not_converged -multi_element -pde_type {{Poisson Elasticity}} -ksp_converged_reason + + test: + suffix: bddc_multi_square + output_file: output/ex71_bddc_multi_element.out + args: -cells 2,2 -dim 2 -ksp_error_if_not_converged -multi_element -pc_bddc_local_mat_graph_square 4 -ksp_converged_reason + TEST*/ diff --git a/src/ksp/ksp/tutorials/ex72.c b/src/ksp/ksp/tutorials/ex72.c index 6afc343391d..6eb32a0de1d 100644 --- a/src/ksp/ksp/tutorials/ex72.c +++ b/src/ksp/ksp/tutorials/ex72.c @@ -696,7 +696,7 @@ int main(int argc, char **args) testset: nsize: 4 - requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !defined(PETSC_HAVE_I_MPI_NUMVERSION) + requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !defined(PETSC_HAVE_I_MPI) args: -ksp_monitor_short -ksp_view test: suffix: xxt @@ -876,12 +876,64 @@ int main(int argc, char **args) suffix: hpddm_gen_non_hermitian output_file: output/ex72_2.out nsize: 4 - args: -f0 ${DATAFILESPATH}/matrices/arco1 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.7 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right + args: -f0 ${DATAFILESPATH}/matrices/arco1 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.7 -ksp_pc_side right test: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps !defined(PETSCTEST_VALGRIND) suffix: hpddm_gen_non_hermitian_baij output_file: output/ex72_10.out nsize: 4 timeoutfactor: 2 - args: -f0 ${DATAFILESPATH}/matrices/arco6 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.8 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right -mat_type baij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_eps_tol 1.0e-2 -ksp_monitor_short + args: -f0 ${DATAFILESPATH}/matrices/arco6 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.8 -ksp_pc_side right -mat_type baij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_eps_tol 1.0e-2 -ksp_monitor_short + + # BDDC multiple subdomains per process tests + test: + requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) + suffix: matis_bddc_multisub_3d + nsize: {{1 2 3 4 5 6 7 8}} + args: -f ${DATAFILESPATH}/matrices/matis/poisson_DMDA_9x9x9_3x3x3.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_use_faces + + test: + requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) + suffix: matis_bddc_multisub_2d + nsize: {{1 2 3 4 5 6 7 8}} + args: -f ${DATAFILESPATH}/matrices/matis/poisson_DMDA_9x9_3x3.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is + + test: + requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) + suffix: matis_bddc_multisub_plex_2d + nsize: {{1 2 3 4 5 6 7 8}} + args: -f ${DATAFILESPATH}/matrices/matis/poisson_DMPLEX_32x32_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is + + test: + requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) + suffix: matis_bddc_multisub_plex_3d + nsize: {{1 2 3 4 5 6 7 8}} + args: -f ${DATAFILESPATH}/matrices/matis/poisson_DMPLEX_16x16x16_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is + + test: + requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) + suffix: matis_bddc_multisub_hcurl_2d + nsize: {{1 2 3 4 5 6 7 8}} + args: -f ${DATAFILESPATH}/matrices/matis/hcurl_mfem_amrquad2_16.dat -pc_bddc_load ${DATAFILESPATH}/matrices/matis/bddc_hcurl_mfem_amrquad2_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_local_mat_graph_square 1 + + test: + requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) + suffix: matis_bddc_multisub_hdiv_3d + nsize: {{1 2 3 4 5 6 7 8}} + args: -f ${DATAFILESPATH}/matrices/matis/hdiv_mfem_inlinehex2_16.dat -pc_bddc_load ${DATAFILESPATH}/matrices/matis/bddc_hdiv_mfem_inlinehex2_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_use_local_mat_graph 0 + + # These two tests slightly depend on the number of processes since the + # the Nedelec 3D support is not completely independent on the decomposition + test: + requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) + suffix: matis_bddc_multisub_hcurl_3d + nsize: {{1 3 4 8}separate output} + args: -f ${DATAFILESPATH}/matrices/matis/hcurl_mfem_inlinehex_16.dat -pc_bddc_load ${DATAFILESPATH}/matrices/matis/bddc_hcurl_mfem_inlinehex_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_local_mat_graph_square 1 + + test: + requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) + suffix: matis_bddc_multisub_hcurl_3d_amr + nsize: {{1 3 4 8}separate output} + args: -f ${DATAFILESPATH}/matrices/matis/hcurl_mfem_amrhex_16.dat -pc_bddc_load ${DATAFILESPATH}/matrices/matis/bddc_hcurl_mfem_amrhex_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_local_mat_graph_square 1 + TEST*/ diff --git a/src/ksp/ksp/tutorials/ex75f.F90 b/src/ksp/ksp/tutorials/ex75f.F90 index 839f7e861a7..2b10d0447c4 100644 --- a/src/ksp/ksp/tutorials/ex75f.F90 +++ b/src/ksp/ksp/tutorials/ex75f.F90 @@ -54,7 +54,7 @@ program main PetscCallA(KSPSetOperators(ksp,A,A,ierr)) PetscCallA(KSPSetFromOptions(ksp,ierr)) PetscCallA(KSPSetUp(ksp,ierr)) - if (U .ne. PETSC_NULL_MAT) then + if (.not. PetscObjectIsNull(U)) then PetscCallA(KSPHPDDMSetDeflationMat(ksp,U,ierr)) PetscCallA(MatDestroy(U,ierr)) endif diff --git a/src/ksp/ksp/tutorials/ex76.c b/src/ksp/ksp/tutorials/ex76.c index f226ca5859b..de7ba5ccfc5 100644 --- a/src/ksp/ksp/tutorials/ex76.c +++ b/src/ksp/ksp/tutorials/ex76.c @@ -366,6 +366,7 @@ int main(int argc, char **args) suffix: geneo_share_cholesky_matstructure output_file: output/ex76_geneo_share.out # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky + filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 14/Linear solve converged due to CONVERGED_RTOL iterations 15/g" args: -pc_hpddm_levels_1_sub_pc_type cholesky -mat_type {{baij sbaij}shared output} -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_st_matstructure same -set_rhs {{false true} shared output} test: requires: mumps @@ -429,7 +430,7 @@ int main(int argc, char **args) testset: # converge really poorly because of a tiny -pc_hpddm_levels_1_eps_threshold, but needed for proper code coverage where some subdomains don't call EPSSolve() requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES) nsize: 4 - args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_threshold 0.005 -pc_hpddm_levels_1_eps_use_inertia -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_coarse_pc_type cholesky -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_define_subdomains -pc_hpddm_has_neumann -ksp_rtol 0.9 + args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_threshold 0.005 -pc_hpddm_levels_1_eps_use_inertia -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_define_subdomains -pc_hpddm_has_neumann -ksp_rtol 0.9 filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1/Linear solve converged due to CONVERGED_RTOL iterations 141/g" test: suffix: inertia_petsc diff --git a/src/ksp/ksp/tutorials/ex7f.F90 b/src/ksp/ksp/tutorials/ex7f.F90 index 6dde72606ff..328c7c3edcc 100644 --- a/src/ksp/ksp/tutorials/ex7f.F90 +++ b/src/ksp/ksp/tutorials/ex7f.F90 @@ -52,34 +52,34 @@ program main PetscCallA( MatCreate(PETSC_COMM_WORLD,A,ierr)) PetscCallA( MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)) PetscCallA( MatSetFromOptions(A,ierr)) - PetscCallA( MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr)) - PetscCallA( MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)) + PetscCallA( MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER_ARRAY,five,PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA( MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER_ARRAY,ierr)) PetscCallA( MatGetOwnershipRange(A,Istart,Iend,ierr)) do Ii=Istart,Iend-1 v =-1.0; i = Ii/n; j = Ii - i*n if (i>0) then JJ = Ii - n - PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr)) endif if (i0) then JJ = Ii - 1 - PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr)) endif if (j + +static char help[] = "Solves a linear system with a MatNest and PCFIELDSPLIT with fields defined from the command line.\n\n"; +/* similar to ex81.c except the PCFIELDSPLIT fields are defined from the command line with fields instead of hardwired IS from the MATNEST */ + +#define Q 5 /* everything is hardwired for a 5x5 MatNest for now */ + +int main(int argc, char **args) +{ + KSP ksp; + PC pc; + Mat array[Q * Q], A, a; + Vec b, x, sub; + IS rows[Q]; + PetscInt i, M, N; + PetscMPIInt size; + PetscRandom rctx; + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &args, NULL, help)); + PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); + PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx)); + PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp)); + size = PetscMax(3, size); + for (i = 0; i < Q * Q; ++i) array[i] = NULL; + for (i = 0; i < Q; ++i) { + if (i == 0) { + PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, size, size, 1, NULL, 0, NULL, array + (Q + 1) * i)); + } else if (i == 1 || i == 3) { + PetscCall(MatCreateSBAIJ(PETSC_COMM_WORLD, 2, PETSC_DECIDE, PETSC_DECIDE, size, size, 1, NULL, 0, NULL, array + (Q + 1) * i)); + } else if (i == 2 || i == 4) { + PetscCall(MatCreateBAIJ(PETSC_COMM_WORLD, 2, PETSC_DECIDE, PETSC_DECIDE, size, size, 1, NULL, 0, NULL, array + (Q + 1) * i)); + } + PetscCall(MatAssemblyBegin(array[(Q + 1) * i], MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(array[(Q + 1) * i], MAT_FINAL_ASSEMBLY)); + PetscCall(MatShift(array[(Q + 1) * i], 100 + i + 1)); + if (i == 3) { + PetscCall(MatDuplicate(array[(Q + 1) * i], MAT_COPY_VALUES, &a)); + PetscCall(MatDestroy(array + (Q + 1) * i)); + PetscCall(MatCreateHermitianTranspose(a, array + (Q + 1) * i)); + PetscCall(MatDestroy(&a)); + } + size *= 2; + } + PetscCall(MatGetSize(array[0], &M, NULL)); + for (i = 2; i < Q; ++i) { + PetscCall(MatGetSize(array[(Q + 1) * i], NULL, &N)); + if (i != Q - 1) { + PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, i == 3 ? N : M, i == 3 ? M : N, 0, NULL, 0, NULL, array + i)); + } else { + PetscCall(MatCreateDense(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, M, N, NULL, array + i)); + } + PetscCall(MatAssemblyBegin(array[i], MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(array[i], MAT_FINAL_ASSEMBLY)); + PetscCall(MatSetRandom(array[i], rctx)); + if (i == 3) { + PetscCall(MatDuplicate(array[i], MAT_COPY_VALUES, &a)); + PetscCall(MatDestroy(array + i)); + PetscCall(MatCreateHermitianTranspose(a, array + i)); + PetscCall(MatDestroy(&a)); + } + } + PetscCall(MatGetSize(array[0], NULL, &N)); + for (i = 2; i < Q; i += 2) { + PetscCall(MatGetSize(array[(Q + 1) * i], &M, NULL)); + if (i != Q - 1) { + PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, M, N, 2, NULL, 2, NULL, array + Q * i)); + } else { + PetscCall(MatCreateDense(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, N, M, NULL, array + Q * i)); + } + PetscCall(MatAssemblyBegin(array[Q * i], MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(array[Q * i], MAT_FINAL_ASSEMBLY)); + PetscCall(MatSetRandom(array[Q * i], rctx)); + if (i == Q - 1) { + PetscCall(MatDuplicate(array[Q * i], MAT_COPY_VALUES, &a)); + PetscCall(MatDestroy(array + Q * i)); + PetscCall(MatCreateHermitianTranspose(a, array + Q * i)); + PetscCall(MatDestroy(&a)); + } + } + PetscCall(MatGetSize(array[(Q + 1) * 3], &M, NULL)); + for (i = 1; i < 3; ++i) { + PetscCall(MatGetSize(array[(Q + 1) * i], NULL, &N)); + PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, M, N, 2, NULL, 2, NULL, array + Q * 3 + i)); + PetscCall(MatAssemblyBegin(array[Q * 3 + i], MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(array[Q * 3 + i], MAT_FINAL_ASSEMBLY)); + PetscCall(MatSetRandom(array[Q * 3 + i], rctx)); + } + PetscCall(MatGetSize(array[(Q + 1) * 1], NULL, &N)); + PetscCall(MatGetSize(array[(Q + 1) * (Q - 1)], &M, NULL)); + PetscCall(MatCreateBAIJ(PETSC_COMM_WORLD, 2, PETSC_DECIDE, PETSC_DECIDE, M, N, 0, NULL, 0, NULL, &a)); + PetscCall(MatAssemblyBegin(a, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(a, MAT_FINAL_ASSEMBLY)); + PetscCall(MatCreateHermitianTranspose(a, array + Q + Q - 1)); + PetscCall(MatDestroy(&a)); + PetscCall(MatDestroy(array + Q * Q - 1)); + PetscCall(MatCreateNest(PETSC_COMM_WORLD, Q, NULL, Q, NULL, array, &A)); + for (i = 0; i < Q; ++i) PetscCall(MatDestroy(array + (Q + 1) * i)); + for (i = 2; i < Q; ++i) { + PetscCall(MatDestroy(array + i)); + PetscCall(MatDestroy(array + Q * i)); + } + for (i = 1; i < 3; ++i) PetscCall(MatDestroy(array + Q * 3 + i)); + PetscCall(MatDestroy(array + Q + Q - 1)); + PetscCall(KSPSetOperators(ksp, A, A)); + PetscCall(MatNestGetISs(A, rows, NULL)); + PetscCall(KSPGetPC(ksp, &pc)); + PetscCall(PCSetType(pc, PCFIELDSPLIT)); + PetscCall(KSPSetFromOptions(ksp)); + PetscCall(MatCreateVecs(A, &b, &x)); + PetscCall(VecSetRandom(b, rctx)); + PetscCall(VecGetSubVector(b, rows[Q - 1], &sub)); + PetscCall(VecSet(sub, 0.0)); + PetscCall(VecRestoreSubVector(b, rows[Q - 1], &sub)); + PetscCall(KSPSolve(ksp, b, x)); + PetscCall(VecDestroy(&b)); + PetscCall(VecDestroy(&x)); + PetscCall(PetscRandomDestroy(&rctx)); + PetscCall(MatDestroy(&A)); + PetscCall(KSPDestroy(&ksp)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + test: + nsize: 3 + suffix: 1 + requires: !complex !single + filter: sed -e "s/CONVERGED_ATOL/CONVERGED_RTOL/g" -e "s/iterations [4-5]/iterations 4/g" -e "s/hermitiantranspose/transpose/g" + args: -pc_fieldsplit_0_fields 0,1 -pc_fieldsplit_1_fields 2,3 -pc_fieldsplit_2_fields 4 -pc_type fieldsplit -ksp_converged_reason -fieldsplit_pc_type jacobi -ksp_view + + test: + suffix: 2 + nsize: 3 + requires: !complex !single + filter: sed -e "s/CONVERGED_ATOL/CONVERGED_RTOL/g" -e "s/iterations [4-5]/iterations 4/g" -e "s/hermitiantranspose/transpose/g" + args: -pc_type fieldsplit -ksp_converged_reason -fieldsplit_pc_type jacobi -ksp_view + +TEST*/ diff --git a/src/ksp/ksp/tutorials/ex83f.F90 b/src/ksp/ksp/tutorials/ex83f.F90 index cbcf14a373e..09a03299837 100644 --- a/src/ksp/ksp/tutorials/ex83f.F90 +++ b/src/ksp/ksp/tutorials/ex83f.F90 @@ -11,7 +11,7 @@ program main use petscksp implicit none - PetscInt i,n,nz + PetscInt i,n,nz,one PetscBool flg,equal PetscErrorCode ierr PetscInt,ALLOCATABLE :: ia(:) @@ -29,7 +29,7 @@ program main PC pc PetscCallA(PetscInitialize(ierr)) - + one = 1 n = 3 PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)) nz = 3*n - 4; @@ -83,8 +83,8 @@ program main PetscCallA(MatEqual(J,Jr,equal,ierr)) PetscCheckA(equal .eqv. PETSC_TRUE,PETSC_COMM_SELF,PETSC_ERR_PLIB,'Matrices J and Jr must be equal') - PetscCallA(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n,b,rhs,ierr)) - PetscCallA(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n,x,solution,ierr)) + PetscCallA(VecCreateSeqWithArray(PETSC_COMM_SELF,one,n,b,rhs,ierr)) + PetscCallA(VecCreateSeqWithArray(PETSC_COMM_SELF,one,n,x,solution,ierr)) PetscCallA(KSPCreate(PETSC_COMM_SELF,ksp,ierr)) PetscCallA(KSPSetErrorIfNotConverged(ksp,PETSC_TRUE,ierr)) diff --git a/src/ksp/ksp/tutorials/ex84.c b/src/ksp/ksp/tutorials/ex84.c index 8e9c724e532..7b42faa4724 100644 --- a/src/ksp/ksp/tutorials/ex84.c +++ b/src/ksp/ksp/tutorials/ex84.c @@ -34,7 +34,7 @@ int main(int argc, char **argv) PetscCall(MatSetValue(A, 4, 4, 6.0, INSERT_VALUES)); PetscCall(MatSetValue(A, 4, 5, -1.0, INSERT_VALUES)); PetscCall(MatSetValue(A, 5, 5, 7.0, INSERT_VALUES)); - PetscCall(MatSetValue(A, 5, 4, -1, INSERT_VALUES)); + PetscCall(MatSetValue(A, 5, 4, -0.5, INSERT_VALUES)); } PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); @@ -72,7 +72,13 @@ int main(int argc, char **argv) PetscCall(PCRedistributeGetKSP(pc, &kspred)); PetscCall(KSPSetInitialGuessNonzero(kspred, PETSC_TRUE)); PetscCall(KSPSolve(ksp, b, x)); - + PetscCall(PetscOptionsClearValue(NULL, "-ksp_view")); + if (rank == 0) PetscCall(MatSetValue(A, 1, 2, 0.0, INSERT_VALUES)); + PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, PETSC_TRUE)); + PetscCall(KSPSetFromOptions(ksp)); + PetscCall(KSPSolveTranspose(ksp, b, x)); PetscCall(KSPDestroy(&ksp)); PetscCall(VecDestroy(&b)); PetscCall(VecDestroy(&x)); diff --git a/src/ksp/ksp/tutorials/ex86.c b/src/ksp/ksp/tutorials/ex86.c index 8274c41643b..155f6425bd2 100644 --- a/src/ksp/ksp/tutorials/ex86.c +++ b/src/ksp/ksp/tutorials/ex86.c @@ -141,7 +141,7 @@ int main(int argc, char **args) /*TEST testset: - requires: hypre !complex !defined(PETSC_HAVE_HYPRE_DEVICE) + requires: hypre !single !complex !defined(PETSC_HAVE_HYPRE_DEVICE) args: -ksp_monitor -pc_type hypre -pc_hypre_type boomeramg -pc_hypre_boomeramg_grid_sweeps_down 0 -pc_hypre_boomeramg_grid_sweeps_up 1 -pc_hypre_boomeramg_grid_sweeps_coarse 2 -pc_hypre_boomeramg_max_levels 2 -ksp_rtol 1e-7 -pc_hypre_boomeramg_max_coarse_size 16 -n 33 -ksp_max_it 30 -pc_hypre_boomeramg_relax_type_all Jacobi test: suffix: hypre diff --git a/src/ksp/ksp/tutorials/ex87.c b/src/ksp/ksp/tutorials/ex87.c index cfd7752f4f7..879acf1ebbc 100644 --- a/src/ksp/ksp/tutorials/ex87.c +++ b/src/ksp/ksp/tutorials/ex87.c @@ -77,6 +77,13 @@ int main(int argc, char **args) else PetscCall(MatHermitianTranspose(A[2 - flg[2]], MAT_INITIAL_MATRIX, A + 1 + flg[2])); } if (flg[0]) PetscCall(MatDestroy(A + 3)); + else { + PetscCall(PetscOptionsGetBool(NULL, NULL, "-diagonal_A11", flg, NULL)); + if (flg[0]) { + PetscCall(MatDestroy(A + 3)); + PetscCall(MatCreateConstantDiagonal(PETSC_COMM_WORLD, m, m, M, M, PETSC_SMALL, A + 3)); + } + } /* global coefficient matrix */ PetscCall(MatCreateNest(PETSC_COMM_WORLD, 2, NULL, 2, NULL, A, &S)); PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp)); @@ -215,5 +222,17 @@ PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, I suffix: threshold output_file: output/ex87_1_petsc_system-elasticity.out args: -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.2 -fieldsplit_1_pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -successive_solves + testset: + requires: datafilespath + nsize: 4 + args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_monitor -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type redundant -fieldsplit_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian -fieldsplit_1_pc_hpddm_coarse_p 2 -system stokes -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.3 + test: + suffix: diagonal + output_file: output/ex87_1_petsc_system-stokes.out + args: -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_0_pc_hpddm_has_neumann -diagonal_A11 {{false true}shared output} + test: + suffix: harmonic_overlap_2 + output_file: output/ex87_1_petsc_system-stokes.out + args: -fieldsplit_0_pc_hpddm_harmonic_overlap 2 -fieldsplit_0_pc_hpddm_levels_1_svd_nsv 20 -diagonal_A11 TEST*/ diff --git a/src/ksp/ksp/tutorials/output/ex15_tsirm.out b/src/ksp/ksp/tutorials/output/ex15_tsirm.out index 2ead31bc135..8fa5152e01a 100644 --- a/src/ksp/ksp/tutorials/output/ex15_tsirm.out +++ b/src/ksp/ksp/tutorials/output/ex15_tsirm.out @@ -1,2 +1,3 @@ + 0 KSP Residual norm 15.748 1 KSP Residual norm 3.59191e-07 Norm of error 6.73028e-06 iterations 30 diff --git a/src/ksp/ksp/tutorials/output/ex27_4f.out b/src/ksp/ksp/tutorials/output/ex27_4f.out index f80c607a68c..219bde9ae63 100644 --- a/src/ksp/ksp/tutorials/output/ex27_4f.out +++ b/src/ksp/ksp/tutorials/output/ex27_4f.out @@ -12,8 +12,8 @@ PC Object: 4 MPI processes FieldSplit with Schur preconditioner, blocksize = 1, factorization FULL Preconditioner for the Schur complement formed from S itself Split info: - Split number 0 Defined by IS - Split number 1 Defined by IS + Split number 0 Fields 0 + Split number 1 Fields 1 KSP solver for A00 block KSP Object: (fieldsplit_0_) 4 MPI processes type: preonly diff --git a/src/ksp/ksp/tutorials/output/ex2_help.out b/src/ksp/ksp/tutorials/output/ex2_help.out index d641a0ce53a..37561ffa9e7 100644 --- a/src/ksp/ksp/tutorials/output/ex2_help.out +++ b/src/ksp/ksp/tutorials/output/ex2_help.out @@ -87,6 +87,8 @@ Viewer (-mat_view) options: Vector (Vec) options: -vec_type : Vector type (one of) shared standard mpi seq (VecSetType) -vec_bind_below: : Set the size threshold (in local entries) below which the Vec is bound to the CPU (VecBindToCPU) +Krylov Method (KSP) options: + -ksp_type : Krylov method (one of) fetidp pipefgmres stcg tsirm tcqmr groppcg nash fcg symmlq lcd minres cgs preonly lgmres pipecgrr fbcgs pipeprcg pipecg ibcgs fgmres qcg gcr cgne pipefcg pipecr pipebcgs bcgsl pipecg2 pipelcg gltr cg tfqmr pgmres lsqr pipegcr bicg cgls bcgs cr dgmres none qmrcgs gmres richardson chebyshev fbcgsr (KSPSetType) Preconditioner (PC) options: -pc_type : Preconditioner (one of) nn tfs hmg bddc composite ksp lu icc patch bjacobi eisenstat deflation vpbjacobi redistribute sor mg pbjacobi cholesky mat qr svd fieldsplit mpi kaczmarz jacobi telescope redundant cp shell galerkin ilu exotic gasm gamg none lmvm asm lsc (PCSetType) -pc_use_amat: use Amat (instead of Pmat) to define preconditioner in nested inner solves (PCSetUseAmat) @@ -107,8 +109,6 @@ Options for SEQSBAIJ matrix: -mat_inode_limit: : Do not use inodes larger then this value (None) -pc_factor_mat_ordering_type : Reordering to reduce nonzeros in factored matrix (one of) rowlength spectral nd qmd natural rcm 1wd (PCFactorSetMatOrderingType) -pc_factor_levels: : levels of fill (PCFactorSetLevels) -Krylov Method (KSP) options: - -ksp_type : Krylov method (one of) fetidp pipefgmres stcg tsirm tcqmr groppcg nash fcg symmlq lcd minres cgs preonly lgmres pipecgrr fbcgs pipeprcg pipecg ibcgs fgmres qcg gcr cgne pipefcg pipecr pipebcgs bcgsl pipecg2 pipelcg gltr cg tfqmr pgmres lsqr pipegcr bicg cgls bcgs cr dgmres none qmrcgs gmres richardson chebyshev fbcgsr (KSPSetType) -ksp_monitor_cancel: Remove any hardwired monitor routines (KSPMonitorCancel) Viewer (-ksp_monitor) options: -ksp_monitor ascii[:[filename][:[format][:append]]]: Prints object to stdout or ASCII file (PetscOptionsGetViewer) diff --git a/src/ksp/ksp/tutorials/output/ex56_baij.out b/src/ksp/ksp/tutorials/output/ex56_baij.out index 5582cf542a5..98f1331f926 100644 --- a/src/ksp/ksp/tutorials/output/ex56_baij.out +++ b/src/ksp/ksp/tutorials/output/ex56_baij.out @@ -1,55 +1,18 @@ Linear solve converged due to CONVERGED_RTOL iterations 50 -KSP Object: 1 MPI process +KSP Object: 8 MPI processes type: cg maximum iterations=10000, initial guess is zero tolerances: relative=0.1, absolute=1e-50, divergence=10000. left preconditioning using UNPRECONDITIONED norm type for convergence test -PC Object: 1 MPI process +PC Object: 8 MPI processes type: jacobi - type ROWL1 (l1-norm off-diagonal scaling 1.000000e+00) + type ROWL1 (l1-norm off-diagonal scaling 5.000000e-01) linear system matrix = precond matrix: - Mat Object: 1 MPI process - type: seqbaij + Mat Object: 8 MPI processes + type: mpibaij rows=12288, cols=12288, bs=3 total: nonzeros=876024, allocated nonzeros=876024 total number of mallocs used during MatSetValues calls=0 has attached near null space block size is 3 -Linear solve converged due to CONVERGED_RTOL iterations 50 -KSP Object: 1 MPI process - type: cg - maximum iterations=10000, initial guess is zero - tolerances: relative=0.1, absolute=1e-50, divergence=10000. - left preconditioning - using UNPRECONDITIONED norm type for convergence test -PC Object: 1 MPI process - type: jacobi - type ROWL1 (l1-norm off-diagonal scaling 1.000000e+00) - linear system matrix = precond matrix: - Mat Object: 1 MPI process - type: seqbaij - rows=12288, cols=12288, bs=3 - total: nonzeros=876024, allocated nonzeros=876024 - total number of mallocs used during MatSetValues calls=0 - has attached near null space - block size is 3 -Linear solve converged due to CONVERGED_RTOL iterations 50 -KSP Object: 1 MPI process - type: cg - maximum iterations=10000, initial guess is zero - tolerances: relative=0.1, absolute=1e-50, divergence=10000. - left preconditioning - using UNPRECONDITIONED norm type for convergence test -PC Object: 1 MPI process - type: jacobi - type ROWL1 (l1-norm off-diagonal scaling 1.000000e+00) - linear system matrix = precond matrix: - Mat Object: 1 MPI process - type: seqbaij - rows=12288, cols=12288, bs=3 - total: nonzeros=876024, allocated nonzeros=876024 - total number of mallocs used during MatSetValues calls=0 - has attached near null space - block size is 3 -[0]main |b-Ax|/|b|=8.165280e-02, |b|=4.351790e+00, emax=7.499773e-01 diff --git a/src/ksp/ksp/tutorials/output/ex56_latebs-2.out b/src/ksp/ksp/tutorials/output/ex56_latebs-2.out index 470e1fe61cc..bdee6c4e302 100644 --- a/src/ksp/ksp/tutorials/output/ex56_latebs-2.out +++ b/src/ksp/ksp/tutorials/output/ex56_latebs-2.out @@ -30,7 +30,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 8 MPI processes + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.054 operator = 1.07125 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 8 MPI processes @@ -111,7 +113,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 8 MPI processes + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.054 operator = 1.07125 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 8 MPI processes @@ -203,7 +207,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 8 MPI processes + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.054 operator = 1.07125 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 8 MPI processes diff --git a/src/ksp/ksp/tutorials/output/ex56_latebs.out b/src/ksp/ksp/tutorials/output/ex56_latebs.out index d7783361d4c..088ce88af42 100644 --- a/src/ksp/ksp/tutorials/output/ex56_latebs.out +++ b/src/ksp/ksp/tutorials/output/ex56_latebs.out @@ -28,7 +28,7 @@ 27 KSP Residual norm 0.0370298 28 KSP Residual norm 0.0115801 29 KSP Residual norm 0.00441017 -Linear solve converged due to CONVERGED_RTOL iterations 29 + Linear solve converged due to CONVERGED_RTOL iterations 29 KSP Object: 8 MPI processes type: cg maximum iterations=10000, initial guess is zero @@ -49,7 +49,9 @@ PC Object: 8 MPI processes Number of levels of aggressive coarsening 1 MIS-k aggressive coarsening MIS-2 coarsening on aggressive levels - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 8 MPI processes + type: misk + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.748 operator = 1.52855 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 8 MPI processes @@ -205,7 +207,9 @@ PC Object: 8 MPI processes Number of levels of aggressive coarsening 1 MIS-k aggressive coarsening MIS-2 coarsening on aggressive levels - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 8 MPI processes + type: misk + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.748 operator = 1.52855 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 8 MPI processes @@ -362,7 +366,9 @@ PC Object: 8 MPI processes Number of levels of aggressive coarsening 1 MIS-k aggressive coarsening MIS-2 coarsening on aggressive levels - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 8 MPI processes + type: misk + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.748 operator = 1.52855 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 8 MPI processes diff --git a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_both_approx.out b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_both_approx.out index ef828ee8da4..399389e5fe8 100644 --- a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_both_approx.out +++ b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_both_approx.out @@ -74,7 +74,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_bddc_dirichlet_pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.125 operator = 1.05143 Coarse grid solver -- level 0 ------------------------------- KSP Object: (pc_bddc_dirichlet_mg_coarse_) 1 MPI process @@ -170,7 +172,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_bddc_neumann_pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.06452 operator = 1.02271 Coarse grid solver -- level 0 ------------------------------- KSP Object: (pc_bddc_neumann_mg_coarse_) 1 MPI process diff --git a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_deluxe_layers_adapt_cuda_approx_mat_is_localmat_type-seqaij.out b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_deluxe_layers_adapt_cuda_approx_mat_is_localmat_type-seqaij.out index 271a7baad7f..24b2c04fe3f 100644 --- a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_deluxe_layers_adapt_cuda_approx_mat_is_localmat_type-seqaij.out +++ b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_deluxe_layers_adapt_cuda_approx_mat_is_localmat_type-seqaij.out @@ -73,7 +73,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_bddc_dirichlet_pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.125 operator = 1.05143 Coarse grid solver -- level 0 ------------------------------- KSP Object: (pc_bddc_dirichlet_mg_coarse_) 1 MPI process @@ -164,7 +166,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_bddc_neumann_pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.06452 operator = 1.02271 Coarse grid solver -- level 0 ------------------------------- KSP Object: (pc_bddc_neumann_mg_coarse_) 1 MPI process diff --git a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_deluxe_layers_adapt_cuda_approx_mat_is_localmat_type-seqaijcusparse.out b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_deluxe_layers_adapt_cuda_approx_mat_is_localmat_type-seqaijcusparse.out index 61f14459675..ff05bf82e86 100644 --- a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_deluxe_layers_adapt_cuda_approx_mat_is_localmat_type-seqaijcusparse.out +++ b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_deluxe_layers_adapt_cuda_approx_mat_is_localmat_type-seqaijcusparse.out @@ -73,7 +73,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_bddc_dirichlet_pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.125 operator = 1.05143 Coarse grid solver -- level 0 ------------------------------- KSP Object: (pc_bddc_dirichlet_mg_coarse_) 1 MPI process @@ -164,7 +166,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_bddc_neumann_pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.06452 operator = 1.02271 Coarse grid solver -- level 0 ------------------------------- KSP Object: (pc_bddc_neumann_mg_coarse_) 1 MPI process diff --git a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_dir_approx.out b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_dir_approx.out index 89b51de33c9..e5983d38e2d 100644 --- a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_dir_approx.out +++ b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_dir_approx.out @@ -74,7 +74,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_bddc_dirichlet_pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.125 operator = 1.05143 Coarse grid solver -- level 0 ------------------------------- KSP Object: (pc_bddc_dirichlet_mg_coarse_) 1 MPI process diff --git a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_neu_approx.out b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_neu_approx.out index 2d716fc5bf1..3b4cec00670 100644 --- a/src/ksp/ksp/tutorials/output/ex71_bddc_elast_neu_approx.out +++ b/src/ksp/ksp/tutorials/output/ex71_bddc_elast_neu_approx.out @@ -101,7 +101,9 @@ PC Object: 8 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_bddc_neumann_pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.06452 operator = 1.02271 Coarse grid solver -- level 0 ------------------------------- KSP Object: (pc_bddc_neumann_mg_coarse_) 1 MPI process diff --git a/src/ksp/ksp/tutorials/output/ex71_bddc_multi_element.out b/src/ksp/ksp/tutorials/output/ex71_bddc_multi_element.out new file mode 100644 index 00000000000..720fd94911b --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex71_bddc_multi_element.out @@ -0,0 +1 @@ +Linear solve converged due to CONVERGED_RTOL iterations 1 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_2d.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_2d.out new file mode 100644 index 00000000000..c5e055519fc --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_2d.out @@ -0,0 +1,2 @@ +Number of iterations = 3 +Residual norm 0.00010483 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_3d.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_3d.out new file mode 100644 index 00000000000..45f6f90eb00 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_3d.out @@ -0,0 +1,2 @@ +Number of iterations = 4 +Residual norm 6.14903e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_2d.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_2d.out new file mode 100644 index 00000000000..4e844b4bc27 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_2d.out @@ -0,0 +1,2 @@ +Number of iterations = 13 +Residual norm 5.38821e-07 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-1.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-1.out new file mode 100644 index 00000000000..1100153c129 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-1.out @@ -0,0 +1,2 @@ +Number of iterations = 14 +Residual norm 3.12972e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-3.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-3.out new file mode 100644 index 00000000000..1100153c129 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-3.out @@ -0,0 +1,2 @@ +Number of iterations = 14 +Residual norm 3.12972e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-4.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-4.out new file mode 100644 index 00000000000..1100153c129 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-4.out @@ -0,0 +1,2 @@ +Number of iterations = 14 +Residual norm 3.12972e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-8.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-8.out new file mode 100644 index 00000000000..1100153c129 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_amr_nsize-8.out @@ -0,0 +1,2 @@ +Number of iterations = 14 +Residual norm 3.12972e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-1.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-1.out new file mode 100644 index 00000000000..ceed7d3283d --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-1.out @@ -0,0 +1,2 @@ +Number of iterations = 12 +Residual norm 1.40125e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-3.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-3.out new file mode 100644 index 00000000000..e88a896ce64 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-3.out @@ -0,0 +1,2 @@ +Number of iterations = 12 +Residual norm 1.43151e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-4.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-4.out new file mode 100644 index 00000000000..2188a2f4a58 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-4.out @@ -0,0 +1,2 @@ +Number of iterations = 12 +Residual norm 1.41e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-8.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-8.out new file mode 100644 index 00000000000..7e0674fbd94 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hcurl_3d_nsize-8.out @@ -0,0 +1,2 @@ +Number of iterations = 12 +Residual norm 1.40035e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hdiv_3d.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hdiv_3d.out new file mode 100644 index 00000000000..66e85c5068c --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_hdiv_3d.out @@ -0,0 +1,2 @@ +Number of iterations = 14 +Residual norm 1.39157e-05 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_plex_2d.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_plex_2d.out new file mode 100644 index 00000000000..d8d09d2130d --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_plex_2d.out @@ -0,0 +1,2 @@ +Number of iterations = 6 +Residual norm 5.76769e-06 diff --git a/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_plex_3d.out b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_plex_3d.out new file mode 100644 index 00000000000..584925c4ca9 --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex72_matis_bddc_multisub_plex_3d.out @@ -0,0 +1,2 @@ +Number of iterations = 9 +Residual norm 9.22193e-06 diff --git a/src/ksp/ksp/tutorials/output/ex7_gamg_cuda_nsize-1.out b/src/ksp/ksp/tutorials/output/ex7_gamg_cuda_nsize-1.out index f207ae50ab2..f246bf41ba6 100644 --- a/src/ksp/ksp/tutorials/output/ex7_gamg_cuda_nsize-1.out +++ b/src/ksp/ksp/tutorials/output/ex7_gamg_cuda_nsize-1.out @@ -23,7 +23,9 @@ PC Object: 1 MPI process AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.25 operator = 1.3 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 1 MPI process diff --git a/src/ksp/ksp/tutorials/output/ex7_gamg_cuda_nsize-2.out b/src/ksp/ksp/tutorials/output/ex7_gamg_cuda_nsize-2.out index 259fb249dd7..198c829702e 100644 --- a/src/ksp/ksp/tutorials/output/ex7_gamg_cuda_nsize-2.out +++ b/src/ksp/ksp/tutorials/output/ex7_gamg_cuda_nsize-2.out @@ -23,7 +23,9 @@ PC Object: 2 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 2 MPI processes + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.20833 operator = 1.21 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 2 MPI processes diff --git a/src/ksp/ksp/tutorials/output/ex7_gamg_kokkos_nsize-1.out b/src/ksp/ksp/tutorials/output/ex7_gamg_kokkos_nsize-1.out index 5ae44d6634f..784a7998f00 100644 --- a/src/ksp/ksp/tutorials/output/ex7_gamg_kokkos_nsize-1.out +++ b/src/ksp/ksp/tutorials/output/ex7_gamg_kokkos_nsize-1.out @@ -23,7 +23,9 @@ PC Object: 1 MPI process AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.25 operator = 1.3 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 1 MPI process diff --git a/src/ksp/ksp/tutorials/output/ex7_gamg_kokkos_nsize-2.out b/src/ksp/ksp/tutorials/output/ex7_gamg_kokkos_nsize-2.out index 07d00761805..10a22d27cdc 100644 --- a/src/ksp/ksp/tutorials/output/ex7_gamg_kokkos_nsize-2.out +++ b/src/ksp/ksp/tutorials/output/ex7_gamg_kokkos_nsize-2.out @@ -23,7 +23,9 @@ PC Object: 2 MPI processes AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 2 MPI processes + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.20833 operator = 1.21 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 2 MPI processes diff --git a/src/ksp/ksp/tutorials/output/ex81a_1.out b/src/ksp/ksp/tutorials/output/ex81a_1.out new file mode 100644 index 00000000000..2a5fe91bedf --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex81a_1.out @@ -0,0 +1,104 @@ +Linear solve converged due to CONVERGED_RTOL iterations 4 +KSP Object: 3 MPI processes + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 3 MPI processes + type: fieldsplit + FieldSplit with MULTIPLICATIVE composition: total splits = 3, blocksize = 1 + Solver info for each split is in the following KSP objects: + Split number 0 Fields 0, 1 + KSP Object: (fieldsplit_0_) 3 MPI processes + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (fieldsplit_0_) 3 MPI processes + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: (fieldsplit_0_) 3 MPI processes + type: nest + rows=9, cols=9 + Matrix object: + type=nest, rows=2, cols=2 + MatNest structure: + (0,0) : type=mpiaij, rows=3, cols=3 + (0,1) : NULL + (1,0) : NULL + (1,1) : type=mpisbaij, rows=6, cols=6 + Split number 1 Fields 2, 3 + KSP Object: (fieldsplit_1_) 3 MPI processes + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (fieldsplit_1_) 3 MPI processes + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: (fieldsplit_1_) 3 MPI processes + type: nest + rows=36, cols=36 + Matrix object: + type=nest, rows=2, cols=2 + MatNest structure: + (0,0) : type=mpibaij, rows=12, cols=12 + (0,1) : NULL + (1,0) : type=mpiaij, rows=24, cols=12 + (1,1) : type=transpose, rows=24, cols=24 + Split number 2 Fields 4 + KSP Object: (fieldsplit_2_) 3 MPI processes + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (fieldsplit_2_) 3 MPI processes + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: (fieldsplit_2_) 3 MPI processes + type: mpiaij + rows=48, cols=48 + total: nonzeros=0, allocated nonzeros=0 + total number of mallocs used during MatSetValues calls=0 + not using I-node (on process 0) routines + linear system matrix = precond matrix: + Mat Object: 3 MPI processes + type: nest + rows=93, cols=93 + Matrix object: + type=nest, rows=5, cols=5 + MatNest structure: + (0,0) : type=mpiaij, rows=3, cols=3 + (0,1) : NULL + (0,2) : type=mpiaij, rows=3, cols=12 + (0,3) : type=transpose, rows=3, cols=24 + (0,4) : type=mpidense, rows=3, cols=48 + (1,0) : NULL + (1,1) : type=mpisbaij, rows=6, cols=6 + (1,2) : NULL + (1,3) : NULL + (1,4) : type=transpose, rows=6, cols=48 + (2,0) : type=mpiaij, rows=12, cols=3 + (2,1) : NULL + (2,2) : type=mpibaij, rows=12, cols=12 + (2,3) : NULL + (2,4) : NULL + (3,0) : NULL + (3,1) : type=mpiaij, rows=24, cols=6 + (3,2) : type=mpiaij, rows=24, cols=12 + (3,3) : type=transpose, rows=24, cols=24 + (3,4) : NULL + (4,0) : type=transpose, rows=48, cols=3 + (4,1) : NULL + (4,2) : NULL + (4,3) : NULL + (4,4) : prefix="fieldsplit_2_", type=mpiaij, rows=48, cols=48 diff --git a/src/ksp/ksp/tutorials/output/ex81a_2.out b/src/ksp/ksp/tutorials/output/ex81a_2.out new file mode 100644 index 00000000000..8e62522b30c --- /dev/null +++ b/src/ksp/ksp/tutorials/output/ex81a_2.out @@ -0,0 +1,129 @@ +Linear solve converged due to CONVERGED_RTOL iterations 4 +KSP Object: 3 MPI processes + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 3 MPI processes + type: fieldsplit + FieldSplit with MULTIPLICATIVE composition: total splits = 5, blocksize = 1 + Solver info for each split is in the following KSP objects: + Split number 0 Fields 0 + KSP Object: (fieldsplit_0_) 3 MPI processes + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (fieldsplit_0_) 3 MPI processes + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: (fieldsplit_0_) 3 MPI processes + type: mpiaij + rows=3, cols=3 + total: nonzeros=3, allocated nonzeros=3 + total number of mallocs used during MatSetValues calls=0 + not using I-node (on process 0) routines + Split number 1 Fields 1 + KSP Object: (fieldsplit_1_) 3 MPI processes + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (fieldsplit_1_) 3 MPI processes + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: (fieldsplit_1_) 3 MPI processes + type: mpisbaij + rows=6, cols=6, bs=2 + total: nonzeros=12, allocated nonzeros=12 + total number of mallocs used during MatSetValues calls=0 + block size is 2 + Split number 2 Fields 2 + KSP Object: (fieldsplit_2_) 3 MPI processes + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (fieldsplit_2_) 3 MPI processes + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: (fieldsplit_2_) 3 MPI processes + type: mpibaij + rows=12, cols=12, bs=2 + total: nonzeros=24, allocated nonzeros=24 + total number of mallocs used during MatSetValues calls=0 + block size is 2 + Split number 3 Fields 3 + KSP Object: (fieldsplit_3_) 3 MPI processes + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (fieldsplit_3_) 3 MPI processes + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: (fieldsplit_3_) 3 MPI processes + type: transpose + rows=24, cols=24, bs=2 + total: nonzeros=48, allocated nonzeros=48 + total number of mallocs used during MatSetValues calls=0 + Split number 4 Fields 4 + KSP Object: (fieldsplit_4_) 3 MPI processes + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (fieldsplit_4_) 3 MPI processes + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: (fieldsplit_4_) 3 MPI processes + type: mpiaij + rows=48, cols=48 + total: nonzeros=0, allocated nonzeros=0 + total number of mallocs used during MatSetValues calls=0 + not using I-node (on process 0) routines + linear system matrix = precond matrix: + Mat Object: 3 MPI processes + type: nest + rows=93, cols=93 + Matrix object: + type=nest, rows=5, cols=5 + MatNest structure: + (0,0) : prefix="fieldsplit_0_", type=mpiaij, rows=3, cols=3 + (0,1) : NULL + (0,2) : type=mpiaij, rows=3, cols=12 + (0,3) : type=transpose, rows=3, cols=24 + (0,4) : type=mpidense, rows=3, cols=48 + (1,0) : NULL + (1,1) : prefix="fieldsplit_1_", type=mpisbaij, rows=6, cols=6 + (1,2) : NULL + (1,3) : NULL + (1,4) : type=transpose, rows=6, cols=48 + (2,0) : type=mpiaij, rows=12, cols=3 + (2,1) : NULL + (2,2) : prefix="fieldsplit_2_", type=mpibaij, rows=12, cols=12 + (2,3) : NULL + (2,4) : NULL + (3,0) : NULL + (3,1) : type=mpiaij, rows=24, cols=6 + (3,2) : type=mpiaij, rows=24, cols=12 + (3,3) : prefix="fieldsplit_3_", type=transpose, rows=24, cols=24 + (3,4) : NULL + (4,0) : type=transpose, rows=48, cols=3 + (4,1) : NULL + (4,2) : NULL + (4,3) : NULL + (4,4) : prefix="fieldsplit_4_", type=mpiaij, rows=48, cols=48 diff --git a/src/ksp/ksp/tutorials/output/ex84_1.out b/src/ksp/ksp/tutorials/output/ex84_1.out index c35e7101d94..f255902cfea 100644 --- a/src/ksp/ksp/tutorials/output/ex84_1.out +++ b/src/ksp/ksp/tutorials/output/ex84_1.out @@ -1,11 +1,11 @@ Mat Object: 2 MPI processes type: mpiaij -row 0: (0, 2.) (1, -1.) -row 1: (1, 3.) (2, -1.) -row 2: (2, 4.) (3, -1.) -row 3: (3, 5.) (4, -1.) -row 4: (4, 6.) (5, -1.) -row 5: (4, -1.) (5, 7.) + row 0: (0, 2.) (1, -1.) + row 1: (1, 3.) (2, -1.) + row 2: (2, 4.) (3, -1.) + row 3: (3, 5.) (4, -1.) + row 4: (4, 6.) (5, -1.) + row 5: (4, -0.5) (5, 7.) IS Object: 2 MPI processes type: stride [0] Number of indices in (stride) set 1 @@ -28,18 +28,18 @@ IS Object: 2 MPI processes [1] 0 3 Mat Object: 2 MPI processes type: mpiaij -row 0: (0, 1.) (1, 0.) -row 1: (1, 3.) (2, -1.) -row 2: (2, 4.) (3, 0.) -row 3: (3, 1.) (4, 0.) -row 4: (4, 6.) (5, -1.) -row 5: (4, -1.) (5, 7.) + row 0: (0, 1.) (1, 0.) + row 1: (1, 3.) (2, -1.) + row 2: (2, 4.) (3, 0.) + row 3: (3, 1.) (4, 0.) + row 4: (4, 6.) (5, -1.) + row 5: (4, -0.5) (5, 7.) Residual norms for redistribute_ solve. - 0 KSP Residual norm 4.787135538782e-01 - 1 KSP Residual norm 2.636082480062e-02 - 2 KSP Residual norm 1.449291115189e-16 + 0 KSP Residual norm 4.747000485090e-01 + 1 KSP Residual norm 2.445422843562e-02 + 2 KSP Residual norm 2.021518719930e-17 0 KSP Residual norm 2.449489742783e+00 - 1 KSP Residual norm 7.447602459742e-16 + 1 KSP Residual norm 3.330669073875e-16 KSP Object: 2 MPI processes type: preonly maximum iterations=10000, initial guess is zero @@ -74,33 +74,33 @@ PC Object: 2 MPI processes number of blocks = 2 Local solver information for first block is in the following KSP and PC objects on rank 0: Use -redistribute_fieldsplit_0_ksp_view ::ascii_info_detail to display information for all blocks - KSP Object: (redistribute_fieldsplit_0_sub_) 1 MPI process - type: preonly - maximum iterations=10000, initial guess is zero - tolerances: relative=1e-05, absolute=1e-50, divergence=10000. - left preconditioning - using NONE norm type for convergence test - PC Object: (redistribute_fieldsplit_0_sub_) 1 MPI process - type: ilu - out-of-place factorization - 0 levels of fill - tolerance for zero pivot 2.22045e-14 - matrix ordering: natural - factor fill ratio given 1., needed 1. - Factored matrix follows: - Mat Object: (redistribute_fieldsplit_0_sub_) 1 MPI process - type: seqaij - rows=1, cols=1 - package used to perform factorization: petsc - total: nonzeros=1, allocated nonzeros=1 - not using I-node routines - linear system matrix = precond matrix: - Mat Object: (redistribute_fieldsplit_0_sub_) 1 MPI process - type: seqaij - rows=1, cols=1 - total: nonzeros=1, allocated nonzeros=1 - total number of mallocs used during MatSetValues calls=0 - not using I-node routines + KSP Object: (redistribute_fieldsplit_0_sub_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (redistribute_fieldsplit_0_sub_) 1 MPI process + type: ilu + out-of-place factorization + 0 levels of fill + tolerance for zero pivot 2.22045e-14 + matrix ordering: natural + factor fill ratio given 1., needed 1. + Factored matrix follows: + Mat Object: (redistribute_fieldsplit_0_sub_) 1 MPI process + type: seqaij + rows=1, cols=1 + package used to perform factorization: petsc + total: nonzeros=1, allocated nonzeros=1 + not using I-node routines + linear system matrix = precond matrix: + Mat Object: (redistribute_fieldsplit_0_sub_) 1 MPI process + type: seqaij + rows=1, cols=1 + total: nonzeros=1, allocated nonzeros=1 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines linear system matrix = precond matrix: Mat Object: (redistribute_fieldsplit_0_) 2 MPI processes type: mpiaij @@ -120,33 +120,33 @@ PC Object: 2 MPI processes number of blocks = 2 Local solver information for first block is in the following KSP and PC objects on rank 0: Use -redistribute_fieldsplit_1_ksp_view ::ascii_info_detail to display information for all blocks - KSP Object: (redistribute_fieldsplit_1_sub_) 1 MPI process - type: preonly - maximum iterations=10000, initial guess is zero - tolerances: relative=1e-05, absolute=1e-50, divergence=10000. - left preconditioning - using NONE norm type for convergence test - PC Object: (redistribute_fieldsplit_1_sub_) 1 MPI process - type: ilu - out-of-place factorization - 0 levels of fill - tolerance for zero pivot 2.22045e-14 - matrix ordering: natural - factor fill ratio given 1., needed 1. - Factored matrix follows: - Mat Object: (redistribute_fieldsplit_1_sub_) 1 MPI process - type: seqaij - rows=1, cols=1 - package used to perform factorization: petsc - total: nonzeros=1, allocated nonzeros=1 - not using I-node routines - linear system matrix = precond matrix: - Mat Object: (redistribute_fieldsplit_1_sub_) 1 MPI process - type: seqaij - rows=1, cols=1 - total: nonzeros=1, allocated nonzeros=1 - total number of mallocs used during MatSetValues calls=0 - not using I-node routines + KSP Object: (redistribute_fieldsplit_1_sub_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (redistribute_fieldsplit_1_sub_) 1 MPI process + type: ilu + out-of-place factorization + 0 levels of fill + tolerance for zero pivot 2.22045e-14 + matrix ordering: natural + factor fill ratio given 1., needed 1. + Factored matrix follows: + Mat Object: (redistribute_fieldsplit_1_sub_) 1 MPI process + type: seqaij + rows=1, cols=1 + package used to perform factorization: petsc + total: nonzeros=1, allocated nonzeros=1 + not using I-node routines + linear system matrix = precond matrix: + Mat Object: (redistribute_fieldsplit_1_sub_) 1 MPI process + type: seqaij + rows=1, cols=1 + total: nonzeros=1, allocated nonzeros=1 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines linear system matrix = precond matrix: Mat Object: (redistribute_fieldsplit_1_) 2 MPI processes type: mpiaij @@ -169,9 +169,9 @@ PC Object: 2 MPI processes total number of mallocs used during MatSetValues calls=0 not using I-node (on process 0) routines Residual norms for redistribute_ solve. - 0 KSP Residual norm 1.552188880697e-16 + 0 KSP Residual norm 8.479468414379e-17 0 KSP Residual norm 2.449489742783e+00 - 1 KSP Residual norm 7.447602459742e-16 + 1 KSP Residual norm 3.330669073875e-16 KSP Object: 2 MPI processes type: preonly maximum iterations=10000, nonzero initial guess @@ -206,33 +206,33 @@ PC Object: 2 MPI processes number of blocks = 2 Local solver information for first block is in the following KSP and PC objects on rank 0: Use -redistribute_fieldsplit_0_ksp_view ::ascii_info_detail to display information for all blocks - KSP Object: (redistribute_fieldsplit_0_sub_) 1 MPI process - type: preonly - maximum iterations=10000, initial guess is zero - tolerances: relative=1e-05, absolute=1e-50, divergence=10000. - left preconditioning - using NONE norm type for convergence test - PC Object: (redistribute_fieldsplit_0_sub_) 1 MPI process - type: ilu - out-of-place factorization - 0 levels of fill - tolerance for zero pivot 2.22045e-14 - matrix ordering: natural - factor fill ratio given 1., needed 1. - Factored matrix follows: - Mat Object: (redistribute_fieldsplit_0_sub_) 1 MPI process - type: seqaij - rows=1, cols=1 - package used to perform factorization: petsc - total: nonzeros=1, allocated nonzeros=1 - not using I-node routines - linear system matrix = precond matrix: - Mat Object: (redistribute_fieldsplit_0_sub_) 1 MPI process - type: seqaij - rows=1, cols=1 - total: nonzeros=1, allocated nonzeros=1 - total number of mallocs used during MatSetValues calls=0 - not using I-node routines + KSP Object: (redistribute_fieldsplit_0_sub_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (redistribute_fieldsplit_0_sub_) 1 MPI process + type: ilu + out-of-place factorization + 0 levels of fill + tolerance for zero pivot 2.22045e-14 + matrix ordering: natural + factor fill ratio given 1., needed 1. + Factored matrix follows: + Mat Object: (redistribute_fieldsplit_0_sub_) 1 MPI process + type: seqaij + rows=1, cols=1 + package used to perform factorization: petsc + total: nonzeros=1, allocated nonzeros=1 + not using I-node routines + linear system matrix = precond matrix: + Mat Object: (redistribute_fieldsplit_0_sub_) 1 MPI process + type: seqaij + rows=1, cols=1 + total: nonzeros=1, allocated nonzeros=1 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines linear system matrix = precond matrix: Mat Object: (redistribute_fieldsplit_0_) 2 MPI processes type: mpiaij @@ -252,33 +252,33 @@ PC Object: 2 MPI processes number of blocks = 2 Local solver information for first block is in the following KSP and PC objects on rank 0: Use -redistribute_fieldsplit_1_ksp_view ::ascii_info_detail to display information for all blocks - KSP Object: (redistribute_fieldsplit_1_sub_) 1 MPI process - type: preonly - maximum iterations=10000, initial guess is zero - tolerances: relative=1e-05, absolute=1e-50, divergence=10000. - left preconditioning - using NONE norm type for convergence test - PC Object: (redistribute_fieldsplit_1_sub_) 1 MPI process - type: ilu - out-of-place factorization - 0 levels of fill - tolerance for zero pivot 2.22045e-14 - matrix ordering: natural - factor fill ratio given 1., needed 1. - Factored matrix follows: - Mat Object: (redistribute_fieldsplit_1_sub_) 1 MPI process - type: seqaij - rows=1, cols=1 - package used to perform factorization: petsc - total: nonzeros=1, allocated nonzeros=1 - not using I-node routines - linear system matrix = precond matrix: - Mat Object: (redistribute_fieldsplit_1_sub_) 1 MPI process - type: seqaij - rows=1, cols=1 - total: nonzeros=1, allocated nonzeros=1 - total number of mallocs used during MatSetValues calls=0 - not using I-node routines + KSP Object: (redistribute_fieldsplit_1_sub_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (redistribute_fieldsplit_1_sub_) 1 MPI process + type: ilu + out-of-place factorization + 0 levels of fill + tolerance for zero pivot 2.22045e-14 + matrix ordering: natural + factor fill ratio given 1., needed 1. + Factored matrix follows: + Mat Object: (redistribute_fieldsplit_1_sub_) 1 MPI process + type: seqaij + rows=1, cols=1 + package used to perform factorization: petsc + total: nonzeros=1, allocated nonzeros=1 + not using I-node routines + linear system matrix = precond matrix: + Mat Object: (redistribute_fieldsplit_1_sub_) 1 MPI process + type: seqaij + rows=1, cols=1 + total: nonzeros=1, allocated nonzeros=1 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines linear system matrix = precond matrix: Mat Object: (redistribute_fieldsplit_1_) 2 MPI processes type: mpiaij @@ -300,3 +300,9 @@ PC Object: 2 MPI processes total: nonzeros=12, allocated nonzeros=12 total number of mallocs used during MatSetValues calls=0 not using I-node (on process 0) routines + Residual norms for redistribute_ solve. + 0 KSP Residual norm 8.529808416651e-02 + 1 KSP Residual norm 1.682101381914e-03 + 2 KSP Residual norm 2.122130848081e-17 + 0 KSP Residual norm 2.449489742783e+00 + 1 KSP Residual norm 2.482534153247e-16 diff --git a/src/ksp/ksp/utils/kspmatregi.c b/src/ksp/ksp/utils/kspmatregi.c index b97ed89e197..142bfe8da0a 100644 --- a/src/ksp/ksp/utils/kspmatregi.c +++ b/src/ksp/ksp/utils/kspmatregi.c @@ -20,7 +20,10 @@ PetscErrorCode KSPMatRegisterAll(void) KSPMatRegisterAllCalled = PETSC_TRUE; PetscCall(MatRegister(MATSCHURCOMPLEMENT, MatCreate_SchurComplement)); PetscCall(MatRegister(MATLMVMDFP, MatCreate_LMVMDFP)); + PetscCall(MatRegister(MATLMVMDDFP, MatCreate_LMVMDDFP)); PetscCall(MatRegister(MATLMVMBFGS, MatCreate_LMVMBFGS)); + PetscCall(MatRegister(MATLMVMDBFGS, MatCreate_LMVMDBFGS)); + PetscCall(MatRegister(MATLMVMDQN, MatCreate_LMVMDQN)); PetscCall(MatRegister(MATLMVMSR1, MatCreate_LMVMSR1)); PetscCall(MatRegister(MATLMVMBROYDEN, MatCreate_LMVMBrdn)); PetscCall(MatRegister(MATLMVMBADBROYDEN, MatCreate_LMVMBadBrdn)); diff --git a/src/ksp/ksp/utils/lmvm/bfgs/bfgs.c b/src/ksp/ksp/utils/lmvm/bfgs/bfgs.c index dc4b037dff1..9ac8b007e5c 100644 --- a/src/ksp/ksp/utils/lmvm/bfgs/bfgs.c +++ b/src/ksp/ksp/utils/lmvm/bfgs/bfgs.c @@ -1,5 +1,7 @@ #include <../src/ksp/ksp/utils/lmvm/symbrdn/symbrdn.h> /*I "petscksp.h" I*/ #include <../src/ksp/ksp/utils/lmvm/diagbrdn/diagbrdn.h> +#include +#include /* Limited-memory Broyden-Fletcher-Goldfarb-Shano method for approximating both @@ -57,6 +59,7 @@ PetscErrorCode MatSolve_LMVMBFGS(Mat B, Vec F, Vec dX) /* Start the second loop */ for (i = 0; i <= lmvm->k; ++i) { + // dot product performed on default blocking stream, last write to lbfgs->work completes before dot product starts PetscCall(VecDot(lmvm->Y[i], dX, &ytx)); beta = PetscRealPart(ytx) / lbfgs->yts[i]; PetscCall(VecAXPY(dX, alpha[i] - beta, lmvm->S[i])); @@ -140,10 +143,10 @@ static PetscErrorCode MatUpdate_LMVMBFGS(Mat B, Vec X, Vec F) Mat_LMVM *lmvm = (Mat_LMVM *)B->data; Mat_SymBrdn *lbfgs = (Mat_SymBrdn *)lmvm->ctx; Mat_LMVM *dbase; - Mat_DiagBrdn *dctx; + Mat_DiagBrdn *diagctx; PetscInt old_k, i; - PetscReal curvtol, ststmp; - PetscScalar curvature, ytytmp; + PetscReal curvtol, ytytmp; + PetscScalar curvature, ststmp; PetscFunctionBegin; if (!lmvm->m) PetscFunctionReturn(PETSC_SUCCESS); @@ -153,9 +156,9 @@ static PetscErrorCode MatUpdate_LMVMBFGS(Mat B, Vec X, Vec F) PetscCall(VecAYPX(lmvm->Fprev, -1.0, F)); /* Test if the updates can be accepted */ - PetscCall(VecDotNorm2(lmvm->Xprev, lmvm->Fprev, &curvature, &ststmp)); - if (ststmp < lmvm->eps) curvtol = 0.0; - else curvtol = lmvm->eps * ststmp; + PetscCall(VecDotNorm2(lmvm->Xprev, lmvm->Fprev, &curvature, &ytytmp)); + if (ytytmp < lmvm->eps) curvtol = 0.0; + else curvtol = lmvm->eps * ytytmp; if (PetscRealPart(curvature) > curvtol) { /* Update is good, accept it */ @@ -172,12 +175,14 @@ static PetscErrorCode MatUpdate_LMVMBFGS(Mat B, Vec X, Vec F) } } /* Update history of useful scalars */ - PetscCall(VecDot(lmvm->Y[lmvm->k], lmvm->Y[lmvm->k], &ytytmp)); lbfgs->yts[lmvm->k] = PetscRealPart(curvature); - lbfgs->yty[lmvm->k] = PetscRealPart(ytytmp); - lbfgs->sts[lmvm->k] = ststmp; + lbfgs->yty[lmvm->k] = ytytmp; /* Compute the scalar scale if necessary */ - if (lbfgs->scale_type == MAT_LMVM_SYMBROYDEN_SCALE_SCALAR) PetscCall(MatSymBrdnComputeJ0Scalar(B)); + if (lbfgs->scale_type == MAT_LMVM_SYMBROYDEN_SCALE_SCALAR) { + PetscCall(VecDot(lmvm->S[lmvm->k], lmvm->S[lmvm->k], &ststmp)); + lbfgs->sts[lmvm->k] = PetscRealPart(ststmp); + PetscCall(MatSymBrdnComputeJ0Scalar(B)); + } } else { /* Update is bad, skip it */ ++lmvm->nrejects; @@ -186,9 +191,9 @@ static PetscErrorCode MatUpdate_LMVMBFGS(Mat B, Vec X, Vec F) } else { switch (lbfgs->scale_type) { case MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL: - dbase = (Mat_LMVM *)lbfgs->D->data; - dctx = (Mat_DiagBrdn *)dbase->ctx; - PetscCall(VecSet(dctx->invD, lbfgs->delta)); + dbase = (Mat_LMVM *)lbfgs->D->data; + diagctx = (Mat_DiagBrdn *)dbase->ctx; + PetscCall(VecSet(diagctx->invD, lbfgs->delta)); break; case MAT_LMVM_SYMBROYDEN_SCALE_SCALAR: lbfgs->sigma = lbfgs->delta; @@ -391,13 +396,13 @@ PetscErrorCode MatCreate_LMVMBFGS(Mat B) B->ops->setup = MatSetUp_LMVMBFGS; B->ops->destroy = MatDestroy_LMVMBFGS; B->ops->setfromoptions = MatSetFromOptions_LMVMBFGS; - B->ops->solve = MatSolve_LMVMBFGS; lmvm = (Mat_LMVM *)B->data; lmvm->ops->allocate = MatAllocate_LMVMBFGS; lmvm->ops->reset = MatReset_LMVMBFGS; lmvm->ops->update = MatUpdate_LMVMBFGS; lmvm->ops->mult = MatMult_LMVMBFGS; + lmvm->ops->solve = MatSolve_LMVMBFGS; lmvm->ops->copy = MatCopy_LMVMBFGS; lbfgs = (Mat_SymBrdn *)lmvm->ctx; diff --git a/src/ksp/ksp/utils/lmvm/brdn/badbrdn.c b/src/ksp/ksp/utils/lmvm/brdn/badbrdn.c index 850810cc006..3b18f55698e 100644 --- a/src/ksp/ksp/utils/lmvm/brdn/badbrdn.c +++ b/src/ksp/ksp/utils/lmvm/brdn/badbrdn.c @@ -245,6 +245,7 @@ PetscErrorCode MatCreate_LMVMBadBrdn(Mat B) lmvm->ops->allocate = MatAllocate_LMVMBadBrdn; lmvm->ops->reset = MatReset_LMVMBadBrdn; lmvm->ops->mult = MatMult_LMVMBadBrdn; + lmvm->ops->solve = MatSolve_LMVMBadBrdn; lmvm->ops->update = MatUpdate_LMVMBadBrdn; lmvm->ops->copy = MatCopy_LMVMBadBrdn; @@ -295,6 +296,7 @@ PetscErrorCode MatCreate_LMVMBadBrdn(Mat B) PetscErrorCode MatCreateLMVMBadBroyden(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) { PetscFunctionBegin; + PetscCall(KSPInitializePackage()); PetscCall(MatCreate(comm, B)); PetscCall(MatSetSizes(*B, n, n, N, N)); PetscCall(MatSetType(*B, MATLMVMBADBROYDEN)); diff --git a/src/ksp/ksp/utils/lmvm/brdn/brdn.c b/src/ksp/ksp/utils/lmvm/brdn/brdn.c index c3b471f3879..8aafbab7357 100644 --- a/src/ksp/ksp/utils/lmvm/brdn/brdn.c +++ b/src/ksp/ksp/utils/lmvm/brdn/brdn.c @@ -240,6 +240,7 @@ PetscErrorCode MatCreate_LMVMBrdn(Mat B) lmvm->ops->reset = MatReset_LMVMBrdn; lmvm->ops->mult = MatMult_LMVMBrdn; lmvm->ops->update = MatUpdate_LMVMBrdn; + lmvm->ops->solve = MatSolve_LMVMBrdn; lmvm->ops->copy = MatCopy_LMVMBrdn; PetscCall(PetscNew(&lbrdn)); @@ -281,6 +282,7 @@ PetscErrorCode MatCreate_LMVMBrdn(Mat B) PetscErrorCode MatCreateLMVMBroyden(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) { PetscFunctionBegin; + PetscCall(KSPInitializePackage()); PetscCall(MatCreate(comm, B)); PetscCall(MatSetSizes(*B, n, n, N, N)); PetscCall(MatSetType(*B, MATLMVMBROYDEN)); diff --git a/src/ksp/ksp/utils/lmvm/dense/cd_cupm/cd_cupm.cxx b/src/ksp/ksp/utils/lmvm/dense/cd_cupm/cd_cupm.cxx new file mode 100644 index 00000000000..8d28e6c5fae --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/dense/cd_cupm/cd_cupm.cxx @@ -0,0 +1,148 @@ +#include "../denseqn.h" +#include +#include + +namespace Petsc +{ + +namespace device +{ + +namespace cupm +{ + +namespace impl +{ + +template +struct UpperTriangular : CUPMObject { + PETSC_CUPMOBJECT_HEADER(T); + + static PetscErrorCode SolveInPlace(PetscDeviceContext, PetscBool, PetscInt, const PetscScalar[], PetscInt, PetscScalar[], PetscInt) noexcept; + static PetscErrorCode SolveInPlaceCyclic(PetscDeviceContext, PetscBool, PetscInt, PetscInt, const PetscScalar[], PetscInt, PetscScalar[], PetscInt) noexcept; +}; + +template +PetscErrorCode UpperTriangular::SolveInPlace(PetscDeviceContext dctx, PetscBool hermitian_transpose, PetscInt N, const PetscScalar A[], PetscInt lda, PetscScalar x[], PetscInt stride) noexcept +{ + cupmBlasInt_t n; + cupmBlasHandle_t handle; + auto _A = cupmScalarPtrCast(A); + auto _x = cupmScalarPtrCast(x); + + PetscFunctionBegin; + if (!N) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(PetscCUPMBlasIntCast(N, &n)); + PetscCall(GetHandlesFrom_(dctx, &handle)); + PetscCall(PetscLogGpuTimeBegin()); + PetscCallCUPMBLAS(cupmBlasXtrsv(handle, CUPMBLAS_FILL_MODE_UPPER, hermitian_transpose ? CUPMBLAS_OP_C : CUPMBLAS_OP_N, CUPMBLAS_DIAG_NON_UNIT, n, _A, lda, _x, stride)); + PetscCall(PetscLogGpuTimeEnd()); + + PetscCall(PetscLogGpuFlops(1.0 * N * N)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +template +PetscErrorCode UpperTriangular::SolveInPlaceCyclic(PetscDeviceContext dctx, PetscBool hermitian_transpose, PetscInt N, PetscInt oldest_index, const PetscScalar A[], PetscInt lda, PetscScalar x[], PetscInt stride) noexcept +{ + cupmBlasInt_t n_old, n_new; + cupmBlasPointerMode_t pointer_mode; + cupmBlasHandle_t handle; + auto sone = cupmScalarCast(1.0); + auto minus_one = cupmScalarCast(-1.0); + auto _A = cupmScalarPtrCast(A); + auto _x = cupmScalarPtrCast(x); + + PetscFunctionBegin; + if (!N) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(PetscCUPMBlasIntCast(N - oldest_index, &n_old)); + PetscCall(PetscCUPMBlasIntCast(oldest_index, &n_new)); + PetscCall(GetHandlesFrom_(dctx, &handle)); + PetscCall(PetscLogGpuTimeBegin()); + PetscCallCUPMBLAS(cupmBlasGetPointerMode(handle, &pointer_mode)); + PetscCallCUPMBLAS(cupmBlasSetPointerMode(handle, CUPMBLAS_POINTER_MODE_HOST)); + if (!hermitian_transpose) { + PetscCallCUPMBLAS(cupmBlasXtrsv(handle, CUPMBLAS_FILL_MODE_UPPER, CUPMBLAS_OP_N, CUPMBLAS_DIAG_NON_UNIT, n_new, _A, lda, _x, stride)); + PetscCallCUPMBLAS(cupmBlasXgemv(handle, CUPMBLAS_OP_N, n_old, n_new, &minus_one, &_A[oldest_index], lda, _x, stride, &sone, &_x[oldest_index], stride)); + PetscCallCUPMBLAS(cupmBlasXtrsv(handle, CUPMBLAS_FILL_MODE_UPPER, CUPMBLAS_OP_N, CUPMBLAS_DIAG_NON_UNIT, n_old, &_A[oldest_index * (lda + 1)], lda, &_x[oldest_index], stride)); + } else { + PetscCallCUPMBLAS(cupmBlasXtrsv(handle, CUPMBLAS_FILL_MODE_UPPER, CUPMBLAS_OP_C, CUPMBLAS_DIAG_NON_UNIT, n_old, &_A[oldest_index * (lda + 1)], lda, &_x[oldest_index], stride)); + PetscCallCUPMBLAS(cupmBlasXgemv(handle, CUPMBLAS_OP_C, n_old, n_new, &minus_one, &_A[oldest_index], lda, &_x[oldest_index], stride, &sone, _x, stride)); + PetscCallCUPMBLAS(cupmBlasXtrsv(handle, CUPMBLAS_FILL_MODE_UPPER, CUPMBLAS_OP_C, CUPMBLAS_DIAG_NON_UNIT, n_new, _A, lda, _x, stride)); + } + PetscCallCUPMBLAS(cupmBlasSetPointerMode(handle, pointer_mode)); + PetscCall(PetscLogGpuTimeEnd()); + + PetscCall(PetscLogGpuFlops(1.0 * N * N)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +#if PetscDefined(HAVE_CUDA) +template struct UpperTriangular; +#endif + +#if PetscDefined(HAVE_HIP) +template struct UpperTriangular; +#endif + +} // namespace impl + +} // namespace cupm + +} // namespace device + +} // namespace Petsc + +PETSC_INTERN PetscErrorCode MatUpperTriangularSolveInPlace_CUPM(PetscBool hermitian_transpose, PetscInt n, const PetscScalar A[], PetscInt lda, PetscScalar x[], PetscInt stride) +{ + using ::Petsc::device::cupm::impl::UpperTriangular; + using ::Petsc::device::cupm::DeviceType; + PetscDeviceContext dctx; + PetscDeviceType device_type; + + PetscFunctionBegin; + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + PetscCall(PetscDeviceContextGetDeviceType(dctx, &device_type)); + switch (device_type) { +#if PetscDefined(HAVE_CUDA) + case PETSC_DEVICE_CUDA: + PetscCall(UpperTriangular::SolveInPlace(dctx, hermitian_transpose, n, A, lda, x, stride)); + break; +#endif +#if PetscDefined(HAVE_HIP) + case PETSC_DEVICE_HIP: + PetscCall(UpperTriangular::SolveInPlace(dctx, hermitian_transpose, n, A, lda, x, stride)); + break; +#endif + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported device type %s", PetscDeviceTypes[device_type]); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode MatUpperTriangularSolveInPlaceCyclic_CUPM(PetscBool hermitian_transpose, PetscInt n, PetscInt oldest_index, const PetscScalar A[], PetscInt lda, PetscScalar x[], PetscInt stride) +{ + using ::Petsc::device::cupm::impl::UpperTriangular; + using ::Petsc::device::cupm::DeviceType; + PetscDeviceContext dctx; + PetscDeviceType device_type; + + PetscFunctionBegin; + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + PetscCall(PetscDeviceContextGetDeviceType(dctx, &device_type)); + switch (device_type) { +#if PetscDefined(HAVE_CUDA) + case PETSC_DEVICE_CUDA: + PetscCall(UpperTriangular::SolveInPlaceCyclic(dctx, hermitian_transpose, n, oldest_index, A, lda, x, stride)); + break; +#endif +#if PetscDefined(HAVE_HIP) + case PETSC_DEVICE_HIP: + PetscCall(UpperTriangular::SolveInPlaceCyclic(dctx, hermitian_transpose, n, oldest_index, A, lda, x, stride)); + break; +#endif + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported device type %s", PetscDeviceTypes[device_type]); + } + PetscFunctionReturn(PETSC_SUCCESS); +} diff --git a/src/ksp/ksp/utils/lmvm/dense/cd_cupm/makefile b/src/ksp/ksp/utils/lmvm/dense/cd_cupm/makefile new file mode 100644 index 00000000000..bc029a9435b --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/dense/cd_cupm/makefile @@ -0,0 +1,8 @@ +-include ../../../../../../../petscdir.mk +#requirespackage PETSC_HAVE_CUPM + +LIBBASE = libpetscksp +MANSEC = KSP + +include ${PETSC_DIR}/lib/petsc/conf/variables +include ${PETSC_DIR}/lib/petsc/conf/rules.doc diff --git a/src/ksp/ksp/utils/lmvm/dense/cd_utils.c b/src/ksp/ksp/utils/lmvm/dense/cd_utils.c new file mode 100644 index 00000000000..fc0b04f6cb7 --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/dense/cd_utils.c @@ -0,0 +1,221 @@ +#include <../src/ksp/ksp/utils/lmvm/dense/denseqn.h> /*I "petscksp.h" I*/ +#include <../src/ksp/ksp/utils/lmvm/diagbrdn/diagbrdn.h> +#include +#include +#include +#include +#include +#include +#include +#include + +const char *const MatLMVMDenseTypes[] = {"reorder", "inplace", "MatLMVMDenseType", "MAT_LMVM_DENSE_", NULL}; + +PETSC_INTERN PetscErrorCode MatMultAddColumnRange(Mat A, Vec xx, Vec zz, Vec yy, PetscInt c_start, PetscInt c_end) +{ + PetscFunctionBegin; + PetscCall(PetscLogEventBegin(MAT_MultAdd, (PetscObject)A, NULL, NULL, NULL)); + PetscUseMethod(A, "MatMultAddColumnRange_C", (Mat, Vec, Vec, Vec, PetscInt, PetscInt), (A, xx, zz, yy, c_start, c_end)); + PetscCall(PetscLogEventEnd(MAT_MultAdd, (PetscObject)A, NULL, NULL, NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode MatMultHermitianTransposeColumnRange(Mat A, Vec xx, Vec yy, PetscInt c_start, PetscInt c_end) +{ + PetscFunctionBegin; + PetscCall(PetscLogEventBegin(MAT_MultTranspose, (PetscObject)A, NULL, NULL, NULL)); + PetscUseMethod(A, "MatMultHermitianTransposeColumnRange_C", (Mat, Vec, Vec, PetscInt, PetscInt), (A, xx, yy, c_start, c_end)); + PetscCall(PetscLogEventEnd(MAT_MultTranspose, (PetscObject)A, NULL, NULL, NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode MatMultHermitianTransposeAddColumnRange(Mat A, Vec xx, Vec zz, Vec yy, PetscInt c_start, PetscInt c_end) +{ + PetscFunctionBegin; + PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, (PetscObject)A, NULL, NULL, NULL)); + PetscUseMethod(A, "MatMultHermitianTransposeAddColumnRange_C", (Mat, Vec, Vec, Vec, PetscInt, PetscInt), (A, xx, zz, yy, c_start, c_end)); + PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, (PetscObject)A, NULL, NULL, NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode VecCyclicShift(Mat B, Vec X, PetscInt d, Vec cyclic_work_vec) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + PetscInt m = lmvm->m; + PetscInt n; + const PetscScalar *src; + PetscScalar *dest; + PetscMemType src_memtype; + PetscMemType dest_memtype; + + PetscFunctionBegin; + PetscCall(VecGetLocalSize(X, &n)); + if (!cyclic_work_vec) PetscCall(VecDuplicate(X, &cyclic_work_vec)); + PetscCall(VecCopy(X, cyclic_work_vec)); + PetscCall(VecGetArrayReadAndMemType(cyclic_work_vec, &src, &src_memtype)); + PetscCall(VecGetArrayWriteAndMemType(X, &dest, &dest_memtype)); + if (n == 0) { /* no work on this process */ + PetscCall(VecRestoreArrayWriteAndMemType(X, &dest)); + PetscCall(VecRestoreArrayReadAndMemType(cyclic_work_vec, &src)); + PetscFunctionReturn(PETSC_SUCCESS); + } + PetscAssert(src_memtype == dest_memtype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "memtype of duplicate does not match"); + if (PetscMemTypeHost(src_memtype)) { + PetscCall(PetscArraycpy(dest, &src[d], m - d)); + PetscCall(PetscArraycpy(&dest[m - d], src, d)); + } else { + PetscDeviceContext dctx; + + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + PetscCall(PetscDeviceRegisterMemory(dest, dest_memtype, m * sizeof(*dest))); + PetscCall(PetscDeviceRegisterMemory(src, src_memtype, m * sizeof(*src))); + PetscCall(PetscDeviceArrayCopy(dctx, dest, &src[d], m - d)); + PetscCall(PetscDeviceArrayCopy(dctx, &dest[m - d], src, d)); + } + PetscCall(VecRestoreArrayWriteAndMemType(X, &dest)); + PetscCall(VecRestoreArrayReadAndMemType(cyclic_work_vec, &src)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static inline PetscInt recycle_index(PetscInt m, PetscInt idx) +{ + return idx % m; +} + +static inline PetscInt oldest_update(PetscInt m, PetscInt idx) +{ + return PetscMax(0, idx - m); +} + +PETSC_INTERN PetscErrorCode VecRecycleOrderToHistoryOrder(Mat B, Vec X, PetscInt num_updates, Vec cyclic_work_vec) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + PetscInt m = lmvm->m; + PetscInt oldest_index; + + PetscFunctionBegin; + oldest_index = recycle_index(m, oldest_update(m, num_updates)); + if (oldest_index == 0) PetscFunctionReturn(PETSC_SUCCESS); /* vector is already in history order */ + PetscCall(VecCyclicShift(B, X, oldest_index, cyclic_work_vec)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode VecHistoryOrderToRecycleOrder(Mat B, Vec X, PetscInt num_updates, Vec cyclic_work_vec) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + PetscInt m = lmvm->m; + PetscInt oldest_index; + + PetscFunctionBegin; + oldest_index = recycle_index(m, oldest_update(m, num_updates)); + if (oldest_index == 0) PetscFunctionReturn(PETSC_SUCCESS); /* vector is already in recycle order */ + PetscCall(VecCyclicShift(B, X, m - oldest_index, cyclic_work_vec)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode MatUpperTriangularSolveInPlace_Internal(MatLMVMDenseType type, PetscMemType memtype, PetscBool hermitian_transpose, PetscInt N, PetscInt oldest_index, const PetscScalar A[], PetscInt lda, PetscScalar x[], PetscInt stride) +{ + PetscFunctionBegin; + /* if oldest_index == 0, the two strategies are equivalent, redirect to the simpler one */ + if (oldest_index == 0) type = MAT_LMVM_DENSE_REORDER; + switch (type) { + case MAT_LMVM_DENSE_REORDER: + if (PetscMemTypeHost(memtype)) { + PetscBLASInt n, lda_blas, one = 1; + PetscCall(PetscBLASIntCast(N, &n)); + PetscCall(PetscBLASIntCast(lda, &lda_blas)); + PetscCallBLAS("BLAStrsv", BLAStrsv_("U", hermitian_transpose ? "C" : "N", "NotUnitTriangular", &n, A, &lda_blas, x, &one)); + PetscCall(PetscLogFlops(1.0 * n * n)); +#if defined(PETSC_HAVE_CUPM) + } else if (PetscMemTypeDevice(memtype)) { + PetscCall(MatUpperTriangularSolveInPlace_CUPM(hermitian_transpose, N, A, lda, x, 1)); +#endif + } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported memtype"); + break; + case MAT_LMVM_DENSE_INPLACE: + if (PetscMemTypeHost(memtype)) { + PetscBLASInt n_old, n_new, lda_blas, one = 1; + PetscScalar minus_one = -1.0; + PetscScalar sone = 1.0; + PetscCall(PetscBLASIntCast(N - oldest_index, &n_old)); + PetscCall(PetscBLASIntCast(oldest_index, &n_new)); + PetscCall(PetscBLASIntCast(lda, &lda_blas)); + if (!hermitian_transpose) { + PetscCallBLAS("BLAStrsv", BLAStrsv_("U", "N", "NotUnitTriangular", &n_new, A, &lda_blas, x, &one)); + PetscCallBLAS("BLASgemv", BLASgemv_("N", &n_old, &n_new, &minus_one, &A[oldest_index], &lda_blas, x, &one, &sone, &x[oldest_index], &one)); + PetscCallBLAS("BLAStrsv", BLAStrsv_("U", "N", "NotUnitTriangular", &n_old, &A[oldest_index * (lda + 1)], &lda_blas, &x[oldest_index], &one)); + } else { + PetscCallBLAS("BLAStrsv", BLAStrsv_("U", "C", "NotUnitTriangular", &n_old, &A[oldest_index * (lda + 1)], &lda_blas, &x[oldest_index], &one)); + PetscCallBLAS("BLASgemv", BLASgemv_("C", &n_old, &n_new, &minus_one, &A[oldest_index], &lda_blas, &x[oldest_index], &one, &sone, x, &one)); + PetscCallBLAS("BLAStrsv", BLAStrsv_("U", "C", "NotUnitTriangular", &n_new, A, &lda_blas, x, &one)); + } + PetscCall(PetscLogFlops(1.0 * N * N)); +#if defined(PETSC_HAVE_CUPM) + } else if (PetscMemTypeDevice(memtype)) { + PetscCall(MatUpperTriangularSolveInPlaceCyclic_CUPM(hermitian_transpose, N, oldest_index, A, lda, x, stride)); +#endif + } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported memtype"); + break; + default: + PetscUnreachable(); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode MatUpperTriangularSolveInPlace(Mat B, Mat Amat, Vec X, PetscBool hermitian_transpose, PetscInt num_updates, MatLMVMDenseType strategy) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + PetscInt m = lmvm->m; + PetscInt h, local_n; + PetscInt oldest_index; + PetscInt lda; + PetscScalar *x; + PetscMemType memtype_r, memtype_x; + const PetscScalar *A; + + PetscFunctionBegin; + h = num_updates - oldest_update(m, num_updates); + if (!h) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(VecGetLocalSize(X, &local_n)); + PetscCall(VecGetArrayAndMemType(X, &x, &memtype_x)); + PetscCall(MatDenseGetArrayReadAndMemType(Amat, &A, &memtype_r)); + if (!local_n) { + PetscCall(MatDenseRestoreArrayReadAndMemType(Amat, &A)); + PetscCall(VecRestoreArrayAndMemType(X, &x)); + PetscFunctionReturn(PETSC_SUCCESS); + } + PetscAssert(memtype_x == memtype_r, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Incompatible device pointers"); + PetscCall(MatDenseGetLDA(Amat, &lda)); + oldest_index = recycle_index(m, oldest_update(m, num_updates)); + PetscCall(MatUpperTriangularSolveInPlace_Internal(strategy, memtype_x, hermitian_transpose, h, oldest_index, A, lda, x, 1)); + PetscCall(VecRestoreArrayWriteAndMemType(X, &x)); + PetscCall(MatDenseRestoreArrayReadAndMemType(Amat, &A)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* Shifts R[end-m_keep:end,end-m_keep:end] to R[0:m_keep, 0:m_keep] */ + +PETSC_INTERN PetscErrorCode MatMove_LR3(Mat B, Mat R, PetscInt m_keep) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + PetscInt M; + Mat mat_local, local_sub, local_temp, temp_sub; + + PetscFunctionBegin; + if (!lqn->temp_mat) PetscCall(MatDuplicate(R, MAT_SHARE_NONZERO_PATTERN, &lqn->temp_mat)); + PetscCall(MatGetLocalSize(R, &M, NULL)); + if (M == 0) PetscFunctionReturn(PETSC_SUCCESS); + + PetscCall(MatDenseGetLocalMatrix(R, &mat_local)); + PetscCall(MatDenseGetLocalMatrix(lqn->temp_mat, &local_temp)); + PetscCall(MatDenseGetSubMatrix(mat_local, lmvm->m - m_keep, lmvm->m, lmvm->m - m_keep, lmvm->m, &local_sub)); + PetscCall(MatDenseGetSubMatrix(local_temp, lmvm->m - m_keep, lmvm->m, lmvm->m - m_keep, lmvm->m, &temp_sub)); + PetscCall(MatCopy(local_sub, temp_sub, SAME_NONZERO_PATTERN)); + PetscCall(MatDenseRestoreSubMatrix(mat_local, &local_sub)); + PetscCall(MatDenseGetSubMatrix(mat_local, 0, m_keep, 0, m_keep, &local_sub)); + PetscCall(MatCopy(temp_sub, local_sub, SAME_NONZERO_PATTERN)); + PetscCall(MatDenseRestoreSubMatrix(mat_local, &local_sub)); + PetscCall(MatDenseRestoreSubMatrix(local_temp, &temp_sub)); + PetscFunctionReturn(PETSC_SUCCESS); +} diff --git a/src/ksp/ksp/utils/lmvm/dense/denseqn.c b/src/ksp/ksp/utils/lmvm/dense/denseqn.c new file mode 100644 index 00000000000..d07f04ff2ba --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/dense/denseqn.c @@ -0,0 +1,1670 @@ +#include <../src/ksp/ksp/utils/lmvm/dense/denseqn.h> /*I "petscksp.h" I*/ +#include <../src/ksp/ksp/utils/lmvm/diagbrdn/diagbrdn.h> +#include +#include +#include +#include +#include +#include +#include +#include + +static PetscErrorCode MatMult_LMVMDQN(Mat, Vec, Vec); +static PetscErrorCode MatMult_LMVMDBFGS(Mat, Vec, Vec); +static PetscErrorCode MatMult_LMVMDDFP(Mat, Vec, Vec); +static PetscErrorCode MatSolve_LMVMDQN(Mat, Vec, Vec); +static PetscErrorCode MatSolve_LMVMDBFGS(Mat, Vec, Vec); +static PetscErrorCode MatSolve_LMVMDDFP(Mat, Vec, Vec); + +static inline PetscInt recycle_index(PetscInt m, PetscInt idx) +{ + return idx % m; +} + +static inline PetscInt history_index(PetscInt m, PetscInt num_updates, PetscInt idx) +{ + return (idx - num_updates) + PetscMin(m, num_updates); +} + +static inline PetscInt oldest_update(PetscInt m, PetscInt idx) +{ + return PetscMax(0, idx - m); +} + +static PetscErrorCode MatView_LMVMDQN(Mat B, PetscViewer pv) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *ldfp = (Mat_DQN *)lmvm->ctx; + + PetscBool isascii; + + PetscFunctionBegin; + PetscCall(PetscObjectTypeCompare((PetscObject)pv, PETSCVIEWERASCII, &isascii)); + PetscCall(MatView_LMVM(B, pv)); + if (!(lmvm->J0 || lmvm->user_pc || lmvm->user_ksp || lmvm->user_scale)) { PetscCall(MatView(ldfp->diag_qn, pv)); } + if (isascii) { PetscCall(PetscViewerASCIIPrintf(pv, "Counts: S x : %" PetscInt_FMT ", S^T x : %" PetscInt_FMT ", Y x : %" PetscInt_FMT ", Y^T x: %" PetscInt_FMT "\n", ldfp->S_count, ldfp->St_count, ldfp->Y_count, ldfp->Yt_count)); } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatReset_LMVMDQN(Mat, PetscBool); +static PetscErrorCode MatAllocate_LMVMDQN(Mat B, Vec X, Vec F) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + PetscBool is_dbfgs, is_ddfp, is_dqn, same, allocate = PETSC_FALSE; + VecType vec_type; + PetscInt m, n, M, N; + MPI_Comm comm = PetscObjectComm((PetscObject)B); + + PetscFunctionBegin; + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDBFGS, &is_dbfgs)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDDFP, &is_ddfp)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDQN, &is_dqn)); + + if (lmvm->allocated) { + PetscCall(VecGetType(X, &vec_type)); + PetscCall(PetscObjectTypeCompare((PetscObject)lmvm->Xprev, vec_type, &same)); + if (!same) { + /* Given X vector has a different type than allocated X-type data structures. + We need to destroy all of this and duplicate again out of the given vector. */ + allocate = PETSC_TRUE; + PetscCall(MatReset_LMVMDQN(B, PETSC_TRUE)); + } else { + VecCheckMatCompatible(B, X, 2, F, 3); + } + } else { + allocate = PETSC_TRUE; + } + if (allocate) { + PetscCall(VecGetLocalSize(X, &n)); + PetscCall(VecGetSize(X, &N)); + PetscCall(VecGetLocalSize(F, &m)); + PetscCall(VecGetSize(F, &M)); + PetscCheck(N == M, comm, PETSC_ERR_ARG_SIZ, "Incorrect problem sizes! dim(X) not equal to dim(F)"); + PetscCall(MatSetSizes(B, m, n, M, N)); + PetscCall(PetscLayoutSetUp(B->rmap)); + PetscCall(PetscLayoutSetUp(B->cmap)); + PetscCall(VecDuplicate(X, &lmvm->Xprev)); + PetscCall(VecDuplicate(F, &lmvm->Fprev)); + if (lmvm->m > 0) { + PetscMPIInt rank; + PetscInt m, M; + + PetscCallMPI(MPI_Comm_rank(comm, &rank)); + M = lmvm->m; + m = (rank == 0) ? M : 0; + + /* For DBFGS: Create data needed for MatSolve() eagerly; data needed for MatMult() will be created on demand + * For DDFP : Create data needed for MatMult() eagerly; data needed for MatSolve() will be created on demand + * For DQN : Create all data eagerly */ + PetscCall(VecGetType(X, &vec_type)); + PetscCall(MatCreateDenseFromVecType(comm, vec_type, n, m, N, M, -1, NULL, &lqn->Sfull)); + PetscCall(MatDuplicate(lqn->Sfull, MAT_SHARE_NONZERO_PATTERN, &lqn->Yfull)); + if (is_dqn) { + PetscCall(MatCreateDenseFromVecType(comm, vec_type, m, m, M, M, -1, NULL, &lqn->StY_triu)); + PetscCall(MatCreateDenseFromVecType(comm, vec_type, m, m, M, M, -1, NULL, &lqn->YtS_triu)); + PetscCall(MatCreateVecs(lqn->StY_triu, &lqn->diag_vec, &lqn->rwork1)); + PetscCall(MatCreateVecs(lqn->StY_triu, &lqn->rwork2, &lqn->rwork3)); + } else if (is_ddfp) { + PetscCall(MatCreateDenseFromVecType(comm, vec_type, m, m, M, M, -1, NULL, &lqn->YtS_triu)); + PetscCall(MatDuplicate(lqn->Sfull, MAT_SHARE_NONZERO_PATTERN, &lqn->HY)); + PetscCall(MatCreateVecs(lqn->YtS_triu, &lqn->diag_vec, &lqn->rwork1)); + PetscCall(MatCreateVecs(lqn->YtS_triu, &lqn->rwork2, &lqn->rwork3)); + } else if (is_dbfgs) { + PetscCall(MatCreateDenseFromVecType(comm, vec_type, m, m, M, M, -1, NULL, &lqn->StY_triu)); + PetscCall(MatDuplicate(lqn->Sfull, MAT_SHARE_NONZERO_PATTERN, &lqn->BS)); + PetscCall(MatCreateVecs(lqn->StY_triu, &lqn->diag_vec, &lqn->rwork1)); + PetscCall(MatCreateVecs(lqn->StY_triu, &lqn->rwork2, &lqn->rwork3)); + } else { + SETERRQ(PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_INCOMP, "MatAllocate_LMVMDQN is only available for dense derived types. (DBFGS, DDFP, DQN"); + } + /* initialize StY_triu and YtS_triu to identity, if they exist, so it is invertible */ + if (lqn->StY_triu) { + PetscCall(MatZeroEntries(lqn->StY_triu)); + PetscCall(MatShift(lqn->StY_triu, 1.0)); + } + if (lqn->YtS_triu) { + PetscCall(MatZeroEntries(lqn->YtS_triu)); + PetscCall(MatShift(lqn->YtS_triu, 1.0)); + } + if (lqn->use_recursive && (is_dbfgs || is_ddfp)) { + PetscCall(VecDuplicateVecs(X, lmvm->m, &lqn->PQ)); + PetscCall(VecDuplicate(lmvm->Xprev, &lqn->column_work2)); + PetscCall(PetscMalloc1(lmvm->m, &lqn->yts)); + if (is_dbfgs) { + PetscCall(PetscMalloc1(lmvm->m, &lqn->stp)); + } else if (is_ddfp) { + PetscCall(PetscMalloc1(lmvm->m, &lqn->ytq)); + } + } + PetscCall(VecDuplicate(lqn->rwork2, &lqn->cyclic_work_vec)); + PetscCall(VecZeroEntries(lqn->rwork1)); + PetscCall(VecZeroEntries(lqn->rwork2)); + PetscCall(VecZeroEntries(lqn->rwork3)); + PetscCall(VecZeroEntries(lqn->diag_vec)); + } + PetscCall(VecDuplicate(lmvm->Xprev, &lqn->column_work)); + if (!(lmvm->J0 || lmvm->user_pc || lmvm->user_ksp || lmvm->user_scale)) { PetscCall(MatLMVMAllocate(lqn->diag_qn, X, F)); } + lmvm->allocated = PETSC_TRUE; + B->preallocated = PETSC_TRUE; + B->assembled = PETSC_TRUE; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSetUp_LMVMDQN(Mat B) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + + PetscInt M, N; + MPI_Comm comm = PetscObjectComm((PetscObject)B); + Vec Xtmp, Ftmp; + + PetscFunctionBegin; + PetscCall(MatGetSize(B, &M, &N)); + PetscCheck(M != 0 && N != 0, comm, PETSC_ERR_ORDER, "MatSetSizes() must be called before MatSetUp()"); + if (!lmvm->allocated) { + PetscCall(PetscLayoutSetUp(B->rmap)); + PetscCall(PetscLayoutSetUp(B->cmap)); + PetscCall(MatCreateVecs(B, &Xtmp, &Ftmp)); + if (lmvm->m > 0) PetscCall(PetscMalloc1(lmvm->m, &lqn->workscalar)); + PetscCall(MatAllocate_LMVMDQN(B, Xtmp, Ftmp)); + PetscCall(VecDestroy(&Xtmp)); + PetscCall(VecDestroy(&Ftmp)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSetFromOptions_LMVMDQN_Private(Mat B, PetscOptionItems *PetscOptionsObject) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + PetscBool is_dbfgs, is_ddfp, is_dqn; + + PetscFunctionBegin; + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDBFGS, &is_dbfgs)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDDFP, &is_ddfp)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDQN, &is_dqn)); + if (is_dqn) { + PetscCall(PetscOptionsEnum("-mat_lqn_type", "Implementation options for L-QN", "MatLMVMDenseType", MatLMVMDenseTypes, (PetscEnum)lqn->strategy, (PetscEnum *)&lqn->strategy, NULL)); + PetscCall(PetscOptionsEnum("-mat_lmvm_scale_type", "(developer) scaling type applied to J0", "MatLMVMSymBrdnScaleType", MatLMVMSymBroydenScaleTypes, (PetscEnum)lqn->scale_type, (PetscEnum *)&lqn->scale_type, NULL)); + } else if (is_dbfgs) { + PetscCall(PetscOptionsBool("-mat_lbfgs_recursive", "Use recursive formulation for MatMult_LMVMDBFGS, instead of Cholesky", "", lqn->use_recursive, &lqn->use_recursive, NULL)); + PetscCall(PetscOptionsEnum("-mat_lbfgs_type", "Implementation options for L-BFGS", "MatLMVMDenseType", MatLMVMDenseTypes, (PetscEnum)lqn->strategy, (PetscEnum *)&lqn->strategy, NULL)); + PetscCall(PetscOptionsEnum("-mat_lmvm_scale_type", "(developer) scaling type applied to J0", "MatLMVMSymBrdnScaleType", MatLMVMSymBroydenScaleTypes, (PetscEnum)lqn->scale_type, (PetscEnum *)&lqn->scale_type, NULL)); + } else if (is_ddfp) { + PetscCall(PetscOptionsBool("-mat_ldfp_recursive", "Use recursive formulation for MatSolve_LMVMDDFP, instead of Cholesky", "", lqn->use_recursive, &lqn->use_recursive, NULL)); + PetscCall(PetscOptionsEnum("-mat_ldfp_type", "Implementation options for L-DFP", "MatLMVMDenseType", MatLMVMDenseTypes, (PetscEnum)lqn->strategy, (PetscEnum *)&lqn->strategy, NULL)); + PetscCall(PetscOptionsEnum("-mat_lmvm_scale_type", "(developer) scaling type applied to J0", "MatLMVMSymBrdnScaleType", MatLMVMSymBroydenScaleTypes, (PetscEnum)lqn->scale_type, (PetscEnum *)&lqn->scale_type, NULL)); + } else { + SETERRQ(PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_INCOMP, "MatSetFromOptions_LMVMDQN is only available for dense derived types. (DBFGS, DDFP, DQN"); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSetFromOptions_LMVMDQN(Mat B, PetscOptionItems *PetscOptionsObject) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + + PetscFunctionBegin; + PetscCall(MatSetFromOptions_LMVM(B, PetscOptionsObject)); + PetscOptionsBegin(PetscObjectComm((PetscObject)B), ((PetscObject)B)->prefix, "Dense QN method (MATLMVMDQN,MATLMVMDBFGS,MATLMVMDDFP)", NULL); + PetscCall(MatSetFromOptions_LMVMDQN_Private(B, PetscOptionsObject)); + PetscOptionsEnd(); + lqn->allocated = PETSC_FALSE; + if (lqn->scale_type == MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL) { + const char *prefix; + + PetscCall(MatGetOptionsPrefix(B, &prefix)); + PetscCall(MatSetOptionsPrefix(lqn->diag_qn, prefix)); + PetscCall(MatAppendOptionsPrefix(lqn->diag_qn, "J0_")); + PetscCall(MatSetFromOptions(lqn->diag_qn)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatLMVMDQNResetDestructive(Mat B) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + + PetscFunctionBegin; + PetscCall(MatDestroy(&lqn->Sfull)); + PetscCall(MatDestroy(&lqn->Yfull)); + PetscCall(MatDestroy(&lqn->HY)); + PetscCall(MatDestroy(&lqn->BS)); + PetscCall(MatDestroy(&lqn->StY_triu)); + PetscCall(MatDestroy(&lqn->YtS_triu)); + PetscCall(VecDestroy(&lqn->StFprev)); + PetscCall(VecDestroy(&lqn->Fprev_ref)); + lqn->Fprev_state = 0; + PetscCall(MatDestroy(&lqn->YtS_triu_strict)); + PetscCall(MatDestroy(&lqn->StY_triu_strict)); + PetscCall(MatDestroy(&lqn->StBS)); + PetscCall(MatDestroy(&lqn->YtHY)); + PetscCall(MatDestroy(&lqn->J)); + PetscCall(MatDestroy(&lqn->temp_mat)); + PetscCall(VecDestroy(&lqn->diag_vec)); + PetscCall(VecDestroy(&lqn->diag_vec_recycle_order)); + PetscCall(VecDestroy(&lqn->inv_diag_vec)); + PetscCall(VecDestroy(&lqn->column_work)); + PetscCall(VecDestroy(&lqn->column_work2)); + PetscCall(VecDestroy(&lqn->rwork1)); + PetscCall(VecDestroy(&lqn->rwork2)); + PetscCall(VecDestroy(&lqn->rwork3)); + PetscCall(VecDestroy(&lqn->rwork2_local)); + PetscCall(VecDestroy(&lqn->rwork3_local)); + PetscCall(VecDestroy(&lqn->cyclic_work_vec)); + PetscCall(VecDestroyVecs(lmvm->m, &lqn->PQ)); + PetscCall(PetscFree(lqn->stp)); + PetscCall(PetscFree(lqn->yts)); + PetscCall(PetscFree(lqn->ytq)); + lqn->allocated = PETSC_FALSE; + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatDestroy_LMVMDQN(Mat B) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + + PetscFunctionBegin; + PetscCall(MatLMVMDQNResetDestructive(B)); + PetscCall(PetscFree(lqn->workscalar)); + PetscCall(MatDestroy(&lqn->diag_qn)); + PetscCall(PetscFree(lmvm->ctx)); + PetscCall(MatDestroy_LMVM(B)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatReset_LMVMDQN(Mat B, PetscBool destructive) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + + PetscFunctionBegin; + lqn->watchdog = 0; + lqn->needPQ = PETSC_TRUE; + if (lqn->scale_type == MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL) { + Mat_LMVM *dbase = (Mat_LMVM *)lqn->diag_qn->data; + Mat_DiagBrdn *diagctx = (Mat_DiagBrdn *)dbase->ctx; + if (!diagctx->allocated) PetscCall(MatLMVMAllocate(lqn->diag_qn, lmvm->Xprev, lmvm->Fprev)); + PetscCall(MatLMVMReset(lqn->diag_qn, destructive)); + } + if (lqn->Sfull) PetscCall(MatZeroEntries(lqn->Sfull)); + if (lqn->Yfull) PetscCall(MatZeroEntries(lqn->Yfull)); + if (lqn->BS) PetscCall(MatZeroEntries(lqn->BS)); + if (lqn->HY) PetscCall(MatZeroEntries(lqn->HY)); + if (lqn->StY_triu) { /* Set to identity by default so it is invertible */ + PetscCall(MatZeroEntries(lqn->StY_triu)); + PetscCall(MatShift(lqn->StY_triu, 1.0)); + } + if (lqn->YtS_triu) { + PetscCall(MatZeroEntries(lqn->YtS_triu)); + PetscCall(MatShift(lqn->YtS_triu, 1.0)); + } + if (lqn->YtS_triu_strict) PetscCall(MatZeroEntries(lqn->YtS_triu_strict)); + if (lqn->StY_triu_strict) PetscCall(MatZeroEntries(lqn->StY_triu_strict)); + if (lqn->StBS) { + PetscCall(MatZeroEntries(lqn->StBS)); + PetscCall(MatShift(lqn->StBS, 1.0)); + } + if (lqn->YtHY) { + PetscCall(MatZeroEntries(lqn->YtHY)); + PetscCall(MatShift(lqn->YtHY, 1.0)); + } + if (lqn->Fprev_ref) PetscCall(VecDestroy(&lqn->Fprev_ref)); + lqn->Fprev_state = 0; + if (lqn->StFprev) PetscCall(VecZeroEntries(lqn->StFprev)); + if (destructive) { PetscCall(MatLMVMDQNResetDestructive(B)); } + lqn->num_updates = 0; + lqn->num_mult_updates = 0; + PetscCall(MatReset_LMVM(B, destructive)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatUpdate_LMVMDQN(Mat B, Vec X, Vec F) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + Mat_LMVM *dbase = (Mat_LMVM *)lqn->diag_qn->data; + Mat_DiagBrdn *diagctx = (Mat_DiagBrdn *)dbase->ctx; + + PetscBool is_ddfp, is_dbfgs, is_dqn; + PetscDeviceContext dctx; + + PetscFunctionBegin; + if (!lmvm->m) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDBFGS, &is_dbfgs)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDDFP, &is_ddfp)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDQN, &is_dqn)); + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + if (lmvm->prev_set) { + Vec FX[2]; + PetscScalar dotFX[2]; + PetscScalar stFprev; + PetscScalar curvature, yTy; + PetscReal curvtol; + Vec workvec1; + + /* Compute the new (S = X - Xprev) and (Y = F - Fprev) vectors */ + PetscCall(VecAYPX(lmvm->Xprev, -1.0, X)); + /* Test if the updates can be accepted */ + FX[0] = lmvm->Fprev; /* dotFX[0] = s^T Fprev */ + FX[1] = F; /* dotFX[1] = s^T F */ + PetscCall(VecMDot(lmvm->Xprev, 2, FX, dotFX)); + PetscCall(VecAYPX(lmvm->Fprev, -1.0, F)); + PetscCall(VecDot(lmvm->Fprev, lmvm->Fprev, &yTy)); + stFprev = PetscConj(dotFX[0]); + curvature = PetscConj(dotFX[1] - dotFX[0]); /* s^T y */ + if (PetscRealPart(yTy) < lmvm->eps) { + curvtol = 0.0; + } else { + curvtol = lmvm->eps * PetscRealPart(yTy); + } + if (PetscRealPart(curvature) > curvtol) { + PetscInt m = lmvm->m; + PetscInt k = lqn->num_updates; + PetscInt h_new = k + 1 - oldest_update(m, k + 1); + PetscInt idx = recycle_index(m, k); + PetscInt i, old_k; + + /* Update is good, accept it */ + lmvm->nupdates++; + lqn->num_updates++; + lqn->watchdog = 0; + lqn->needPQ = PETSC_TRUE; + old_k = lmvm->k; + + if (lmvm->k != m - 1) { + lmvm->k++; + } else if (lqn->strategy == MAT_LMVM_DENSE_REORDER) { + if (is_dqn) { + PetscCall(MatMove_LR3(B, lqn->StY_triu, m - 1)); + PetscCall(MatMove_LR3(B, lqn->YtS_triu, m - 1)); + } else if (is_dbfgs) { + PetscCall(MatMove_LR3(B, lqn->StY_triu, m - 1)); + } else if (is_ddfp) { + PetscCall(MatMove_LR3(B, lqn->YtS_triu, m - 1)); + } else { + SETERRQ(PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_INCOMP, "MatUpdate_LMVMDQN is only available for dense derived types. (DBFGS, DDFP, DQN"); + } + } + + if (lqn->use_recursive && (is_dbfgs || is_ddfp)) { + if (old_k == lmvm->k) { + for (i = 0; i <= lmvm->k - 1; ++i) { + lqn->yts[i] = lqn->yts[i + 1]; + if (is_dbfgs) { + lqn->stp[i] = lqn->stp[i + 1]; + } else if (is_ddfp) { + lqn->ytq[i] = lqn->ytq[i + 1]; + } + } + } + lqn->yts[lmvm->k] = PetscRealPart(curvature); + } + + /* First update the S^T matrix */ + PetscCall(MatDenseGetColumnVecWrite(lqn->Sfull, idx, &workvec1)); + PetscCall(VecCopy(lmvm->Xprev, workvec1)); + PetscCall(MatDenseRestoreColumnVecWrite(lqn->Sfull, idx, &workvec1)); + + /* Now repeat update for the Y^T matrix */ + PetscCall(MatDenseGetColumnVecWrite(lqn->Yfull, idx, &workvec1)); + PetscCall(VecCopy(lmvm->Fprev, workvec1)); + PetscCall(MatDenseRestoreColumnVecWrite(lqn->Yfull, idx, &workvec1)); + + if (is_dqn || is_dbfgs) { /* implement the scheme of Byrd, Nocedal, and Schnabel to save a MatMultTranspose call in the common case the * + * H_k is immediately applied to F after begin updated. The S^T y computation can be split up as S^T (F - F_prev) */ + PetscInt local_n; + PetscScalar *StFprev; + PetscMemType memtype; + PetscInt StYidx; + + StYidx = (lqn->strategy == MAT_LMVM_DENSE_REORDER) ? history_index(m, lqn->num_updates, k) : idx; + if (!lqn->StFprev) PetscCall(VecDuplicate(lqn->rwork1, &lqn->StFprev)); + PetscCall(VecGetLocalSize(lqn->StFprev, &local_n)); + PetscCall(VecGetArrayAndMemType(lqn->StFprev, &StFprev, &memtype)); + if (local_n) { + if (PetscMemTypeHost(memtype)) { + StFprev[idx] = stFprev; + } else { + PetscCall(PetscDeviceRegisterMemory(&stFprev, PETSC_MEMTYPE_HOST, 1 * sizeof(stFprev))); + PetscCall(PetscDeviceRegisterMemory(StFprev, memtype, local_n * sizeof(*StFprev))); + PetscCall(PetscDeviceArrayCopy(dctx, &StFprev[idx], &stFprev, 1)); + } + } + PetscCall(VecRestoreArrayAndMemType(lqn->StFprev, &StFprev)); + + { + Vec this_sy_col; + /* Now StFprev is updated for the new S vector. Write -StFprev into the appropriate row */ + PetscCall(MatDenseGetColumnVecWrite(lqn->StY_triu, StYidx, &this_sy_col)); + PetscCall(VecAXPBY(this_sy_col, -1.0, 0.0, lqn->StFprev)); + + /* Now compute the new StFprev */ + PetscCall(MatMultHermitianTransposeColumnRange(lqn->Sfull, F, lqn->StFprev, 0, h_new)); + lqn->St_count++; + + /* Now add StFprev: this_sy_col == S^T (F - Fprev) == S^T y */ + PetscCall(VecAXPY(this_sy_col, 1.0, lqn->StFprev)); + + if (lqn->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(B, this_sy_col, lqn->num_updates, lqn->cyclic_work_vec)); + PetscCall(MatDenseRestoreColumnVecWrite(lqn->StY_triu, StYidx, &this_sy_col)); + } + } + + if (is_ddfp || is_dqn) { + PetscInt YtSidx; + + YtSidx = (lqn->strategy == MAT_LMVM_DENSE_REORDER) ? history_index(m, lqn->num_updates, k) : idx; + + { + Vec this_ys_col; + + PetscCall(MatDenseGetColumnVecWrite(lqn->YtS_triu, YtSidx, &this_ys_col)); + PetscCall(MatMultHermitianTransposeColumnRange(lqn->Yfull, lmvm->Xprev, this_ys_col, 0, h_new)); + lqn->Yt_count++; + + if (lqn->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(B, this_ys_col, lqn->num_updates, lqn->cyclic_work_vec)); + PetscCall(MatDenseRestoreColumnVecWrite(lqn->YtS_triu, YtSidx, &this_ys_col)); + } + } + + if (is_dbfgs || is_dqn) { + PetscCall(MatGetDiagonal(lqn->StY_triu, lqn->diag_vec)); + } else if (is_ddfp) { + PetscCall(MatGetDiagonal(lqn->YtS_triu, lqn->diag_vec)); + } else { + SETERRQ(PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_INCOMP, "MatUpdate_LMVMDQN is only available for dense derived types. (DBFGS, DDFP, DQN"); + } + + if (lqn->strategy == MAT_LMVM_DENSE_REORDER) { + if (!lqn->diag_vec_recycle_order) PetscCall(VecDuplicate(lqn->diag_vec, &lqn->diag_vec_recycle_order)); + PetscCall(VecCopy(lqn->diag_vec, lqn->diag_vec_recycle_order)); + PetscCall(VecHistoryOrderToRecycleOrder(B, lqn->diag_vec_recycle_order, lqn->num_updates, lqn->cyclic_work_vec)); + } else { + if (!lqn->diag_vec_recycle_order) { + PetscCall(PetscObjectReference((PetscObject)lqn->diag_vec)); + lqn->diag_vec_recycle_order = lqn->diag_vec; + } + } + + if (lqn->scale_type == MAT_LMVM_SYMBROYDEN_SCALE_SCALAR) { + PetscScalar sTy = curvature; + + diagctx->sigma = PetscRealPart(sTy) / PetscRealPart(yTy); + } else if (lqn->scale_type == MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL) { + PetscScalar sTy = curvature; + PetscScalar sTDs, yTDy; + + if (!diagctx->invD) { + PetscCall(VecDuplicate(lmvm->Fprev, &diagctx->invD)); + PetscCall(VecSet(diagctx->invD, PetscRealPart(sTy) / PetscRealPart(yTy))); + } + if (!diagctx->U) PetscCall(VecDuplicate(lmvm->Fprev, &diagctx->U)); + if (!diagctx->V) PetscCall(VecDuplicate(lmvm->Fprev, &diagctx->V)); + if (!diagctx->W) PetscCall(VecDuplicate(lmvm->Fprev, &diagctx->W)); + + /* diagonal Broyden */ + PetscCall(VecReciprocal(diagctx->invD)); + PetscCall(VecPointwiseMult(diagctx->V, diagctx->invD, lmvm->Xprev)); + PetscCall(VecPointwiseMult(diagctx->U, lmvm->Fprev, lmvm->Fprev)); + if (PetscDefined(USE_COMPLEX)) PetscCall(VecAbs(diagctx->U)); + PetscCall(VecAXPY(diagctx->invD, 1.0 / sTy, diagctx->U)); + PetscCall(VecDot(diagctx->V, lmvm->Xprev, &sTDs)); + if (PetscDefined(USE_COMPLEX)) PetscCall(VecAbs(diagctx->V)); + PetscCall(VecPointwiseMult(diagctx->V, diagctx->V, diagctx->V)); + PetscCall(VecAXPY(diagctx->invD, -1.0 / PetscMax(PetscRealPart(sTDs), diagctx->tol), diagctx->V)); + PetscCall(VecReciprocal(diagctx->invD)); + PetscCall(VecAbs(diagctx->invD)); + PetscCall(VecDot(diagctx->U, diagctx->invD, &yTDy)); + PetscCall(VecScale(diagctx->invD, PetscRealPart(sTy) / PetscRealPart(yTDy))); + } + } else { + /* Update is bad, skip it */ + ++lmvm->nrejects; + ++lqn->watchdog; + PetscInt m = lmvm->m; + PetscInt k = lqn->num_updates; + PetscInt h = k - oldest_update(m, k); + + /* we still have to maintain StFprev */ + if (!lqn->StFprev) PetscCall(VecDuplicate(lqn->rwork1, &lqn->StFprev)); + PetscCall(MatMultHermitianTransposeColumnRange(lqn->Sfull, F, lqn->StFprev, 0, h)); + lqn->St_count++; + } + } else { + switch (lqn->scale_type) { + case MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL: + PetscCall(VecSet(diagctx->invD, diagctx->delta)); + break; + case MAT_LMVM_SYMBROYDEN_SCALE_SCALAR: + diagctx->sigma = diagctx->delta; + break; + default: + diagctx->sigma = 1.0; + break; + } + } + + if (lqn->watchdog > lqn->max_seq_rejects) PetscCall(MatLMVMReset(B, PETSC_FALSE)); + + /* Save the solution and function to be used in the next update */ + PetscCall(VecCopy(X, lmvm->Xprev)); + PetscCall(VecCopy(F, lmvm->Fprev)); + PetscCall(PetscObjectReference((PetscObject)F)); + PetscCall(VecDestroy(&lqn->Fprev_ref)); + lqn->Fprev_ref = F; + PetscCall(PetscObjectStateGet((PetscObject)F, &lqn->Fprev_state)); + lmvm->prev_set = PETSC_TRUE; + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatDestroyThenCopy(Mat src, Mat *dst) +{ + PetscFunctionBegin; + PetscCall(MatDestroy(dst)); + if (src) { PetscCall(MatDuplicate(src, MAT_COPY_VALUES, dst)); } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode VecDestroyThenCopy(Vec src, Vec *dst) +{ + PetscFunctionBegin; + PetscCall(VecDestroy(dst)); + if (src) { + PetscCall(VecDuplicate(src, dst)); + PetscCall(VecCopy(src, *dst)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatCopy_LMVMDQN(Mat B, Mat M, MatStructure str) +{ + Mat_LMVM *bdata = (Mat_LMVM *)B->data; + Mat_DQN *blqn = (Mat_DQN *)bdata->ctx; + Mat_LMVM *mdata = (Mat_LMVM *)M->data; + Mat_DQN *mlqn = (Mat_DQN *)mdata->ctx; + PetscInt i; + PetscBool is_dbfgs, is_ddfp, is_dqn; + + PetscFunctionBegin; + mlqn->num_updates = blqn->num_updates; + mlqn->num_mult_updates = blqn->num_mult_updates; + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDBFGS, &is_dbfgs)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDDFP, &is_ddfp)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDQN, &is_dqn)); + PetscCall(MatDestroyThenCopy(blqn->Sfull, &mlqn->Sfull)); + PetscCall(MatDestroyThenCopy(blqn->Yfull, &mlqn->Yfull)); + PetscCall(MatDestroyThenCopy(blqn->HY, &mlqn->BS)); + PetscCall(VecDestroyThenCopy(blqn->StFprev, &mlqn->StFprev)); + PetscCall(MatDestroyThenCopy(blqn->StY_triu, &mlqn->StY_triu)); + PetscCall(MatDestroyThenCopy(blqn->StY_triu_strict, &mlqn->StY_triu_strict)); + PetscCall(MatDestroyThenCopy(blqn->YtS_triu, &mlqn->YtS_triu)); + PetscCall(MatDestroyThenCopy(blqn->YtS_triu_strict, &mlqn->YtS_triu_strict)); + PetscCall(MatDestroyThenCopy(blqn->YtHY, &mlqn->YtHY)); + PetscCall(MatDestroyThenCopy(blqn->StBS, &mlqn->StBS)); + PetscCall(MatDestroyThenCopy(blqn->J, &mlqn->J)); + PetscCall(VecDestroyThenCopy(blqn->diag_vec, &mlqn->diag_vec)); + PetscCall(VecDestroyThenCopy(blqn->diag_vec_recycle_order, &mlqn->diag_vec_recycle_order)); + PetscCall(VecDestroyThenCopy(blqn->inv_diag_vec, &mlqn->inv_diag_vec)); + if (blqn->use_recursive && (is_dbfgs || is_ddfp)) { + for (i = 0; i <= bdata->k; i++) { + PetscCall(VecDestroyThenCopy(blqn->PQ[i], &mlqn->PQ[i])); + mlqn->yts[i] = blqn->yts[i]; + if (is_dbfgs) { + mlqn->stp[i] = blqn->stp[i]; + } else if (is_ddfp) { + mlqn->ytq[i] = blqn->ytq[i]; + } + } + } + mlqn->dense_type = blqn->dense_type; + mlqn->strategy = blqn->strategy; + mlqn->scale_type = blqn->scale_type; + mlqn->S_count = 0; + mlqn->St_count = 0; + mlqn->Y_count = 0; + mlqn->Yt_count = 0; + mlqn->watchdog = blqn->watchdog; + mlqn->max_seq_rejects = blqn->max_seq_rejects; + mlqn->allocated = blqn->allocated; + mlqn->use_recursive = blqn->use_recursive; + mlqn->needPQ = blqn->needPQ; + PetscCall(PetscObjectReference((PetscObject)blqn->Fprev_ref)); + PetscCall(VecDestroy(&mlqn->Fprev_ref)); + mlqn->Fprev_ref = blqn->Fprev_ref; + mlqn->Fprev_state = blqn->Fprev_state; + if (!(bdata->J0 || bdata->user_pc || bdata->user_ksp || bdata->user_scale)) { PetscCall(MatCopy(blqn->diag_qn, mlqn->diag_qn, SAME_NONZERO_PATTERN)); } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatMult_LMVMDQN(Mat B, Vec X, Vec Z) +{ + PetscFunctionBegin; + PetscCall(MatMult_LMVMDDFP(B, X, Z)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolve_LMVMDQN(Mat H, Vec F, Vec dX) +{ + PetscFunctionBegin; + PetscCall(MatSolve_LMVMDBFGS(H, F, dX)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* + This dense representation uses Davidon-Fletcher-Powell (DFP) for MatMult, + and Broyden-Fletcher-Goldfarb-Shanno (BFGS) for MatSolve. This implementation + results in avoiding costly Cholesky factorization, at the cost of duality cap. + Please refer to MatLMVMDDFP and MatLMVMDBFGS for more information. +*/ +PetscErrorCode MatCreate_LMVMDQN(Mat B) +{ + Mat_LMVM *lmvm; + Mat_DQN *lqn; + + PetscFunctionBegin; + PetscCall(MatCreate_LMVM(B)); + PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATLMVMDQN)); + PetscCall(MatSetOption(B, MAT_SPD, PETSC_TRUE)); + PetscCall(MatSetOption(B, MAT_SPD_ETERNAL, PETSC_TRUE)); + B->ops->view = MatView_LMVMDQN; + B->ops->setup = MatSetUp_LMVMDQN; + B->ops->setfromoptions = MatSetFromOptions_LMVMDQN; + B->ops->destroy = MatDestroy_LMVMDQN; + + lmvm = (Mat_LMVM *)B->data; + lmvm->square = PETSC_TRUE; + lmvm->ops->allocate = MatAllocate_LMVMDQN; + lmvm->ops->reset = MatReset_LMVMDQN; + lmvm->ops->update = MatUpdate_LMVMDQN; + lmvm->ops->mult = MatMult_LMVMDQN; + lmvm->ops->solve = MatSolve_LMVMDQN; + lmvm->ops->copy = MatCopy_LMVMDQN; + + PetscCall(PetscNew(&lqn)); + lmvm->ctx = (void *)lqn; + lqn->allocated = PETSC_FALSE; + lqn->use_recursive = PETSC_FALSE; + lqn->needPQ = PETSC_FALSE; + lqn->watchdog = 0; + lqn->max_seq_rejects = lmvm->m / 2; + lqn->strategy = MAT_LMVM_DENSE_INPLACE; + lqn->scale_type = MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL; + + PetscCall(MatCreate(PetscObjectComm((PetscObject)B), &lqn->diag_qn)); + PetscCall(MatSetType(lqn->diag_qn, MATLMVMDIAGBROYDEN)); + PetscCall(MatSetOptionsPrefix(lqn->diag_qn, "J0_")); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + MatCreateLMVMDQN - Creates a dense representation of the limited-memory + Quasi-Newton approximation to a Hessian. + + Collective + + Input Parameters: ++ comm - MPI communicator +. n - number of local rows for storage vectors +- N - global size of the storage vectors + + Output Parameter: +. B - the matrix + + Level: advanced + + Note: + It is recommended that one use the `MatCreate()`, `MatSetType()` and/or `MatSetFromOptions()` + paradigm instead of this routine directly. + +.seealso: `MatCreate()`, `MATLMVM`, `MATLMVMDBFGS`, `MATLMVMDDFP`, `MatCreateLMVMDDFP()`, `MatCreateLMVMDBFGS()` +@*/ +PetscErrorCode MatCreateLMVMDQN(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) +{ + PetscFunctionBegin; + PetscCall(KSPInitializePackage()); + PetscCall(MatCreate(comm, B)); + PetscCall(MatSetSizes(*B, n, n, N, N)); + PetscCall(MatSetType(*B, MATLMVMDQN)); + PetscCall(MatSetUp(*B)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatDQNApplyJ0Fwd(Mat B, Vec X, Vec Z) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + + PetscFunctionBegin; + if (lmvm->J0 || lmvm->user_pc || lmvm->user_ksp || lmvm->user_scale) { + lqn->scale_type = MAT_LMVM_SYMBROYDEN_SCALE_USER; + PetscCall(MatLMVMApplyJ0Fwd(B, X, Z)); + } else { + Mat_LMVM *dbase = (Mat_LMVM *)lqn->diag_qn->data; + Mat_DiagBrdn *diagctx = (Mat_DiagBrdn *)dbase->ctx; + + switch (lqn->scale_type) { + case MAT_LMVM_SYMBROYDEN_SCALE_SCALAR: + PetscCall(VecAXPBY(Z, 1.0 / diagctx->sigma, 0.0, X)); + break; + case MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL: + PetscCall(VecPointwiseDivide(Z, X, diagctx->invD)); + break; + case MAT_LMVM_SYMBROYDEN_SCALE_NONE: + default: + PetscCall(VecCopy(X, Z)); + break; + } + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatDQNApplyJ0Inv(Mat B, Vec F, Vec dX) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + + PetscFunctionBegin; + if (lmvm->J0 || lmvm->user_pc || lmvm->user_ksp || lmvm->user_scale) { + lqn->scale_type = MAT_LMVM_SYMBROYDEN_SCALE_USER; + PetscCall(MatLMVMApplyJ0Inv(B, F, dX)); + } else { + Mat_LMVM *dbase = (Mat_LMVM *)lqn->diag_qn->data; + Mat_DiagBrdn *diagctx = (Mat_DiagBrdn *)dbase->ctx; + + switch (lqn->scale_type) { + case MAT_LMVM_SYMBROYDEN_SCALE_SCALAR: + PetscCall(VecAXPBY(dX, diagctx->sigma, 0.0, F)); + break; + case MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL: + PetscCall(VecPointwiseMult(dX, F, diagctx->invD)); + break; + case MAT_LMVM_SYMBROYDEN_SCALE_NONE: + default: + PetscCall(VecCopy(F, dX)); + break; + } + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* This is not Bunch-Kaufman LDLT: here L is strictly lower triangular part of STY */ +static PetscErrorCode MatGetLDLT(Mat B, Mat result) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lbfgs = (Mat_DQN *)lmvm->ctx; + PetscInt m_local; + + PetscFunctionBegin; + if (!lbfgs->temp_mat) PetscCall(MatDuplicate(lbfgs->YtS_triu_strict, MAT_SHARE_NONZERO_PATTERN, &lbfgs->temp_mat)); + PetscCall(MatCopy(lbfgs->YtS_triu_strict, lbfgs->temp_mat, SAME_NONZERO_PATTERN)); + PetscCall(MatDiagonalScale(lbfgs->temp_mat, lbfgs->inv_diag_vec, NULL)); + PetscCall(MatGetLocalSize(result, &m_local, NULL)); + // need to conjugate and conjugate again because we have MatTransposeMatMult but not MatHermitianTransposeMatMult() + PetscCall(MatConjugate(lbfgs->temp_mat)); + if (m_local) { + Mat temp_local, YtS_local, result_local; + PetscCall(MatDenseGetLocalMatrix(lbfgs->YtS_triu_strict, &YtS_local)); + PetscCall(MatDenseGetLocalMatrix(lbfgs->temp_mat, &temp_local)); + PetscCall(MatDenseGetLocalMatrix(result, &result_local)); + PetscCall(MatTransposeMatMult(YtS_local, temp_local, MAT_REUSE_MATRIX, PETSC_DEFAULT, &result_local)); + } + PetscCall(MatConjugate(result)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatLMVMDBFGSUpdateMultData(Mat B) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lbfgs = (Mat_DQN *)lmvm->ctx; + PetscInt m = lmvm->m, m_local; + PetscInt k = lbfgs->num_updates; + PetscInt h = k - oldest_update(m, k); + PetscInt j_0; + PetscInt prev_oldest; + Mat J_local; + + PetscFunctionBegin; + if (!lbfgs->YtS_triu_strict) { + PetscCall(MatDuplicate(lbfgs->StY_triu, MAT_SHARE_NONZERO_PATTERN, &lbfgs->YtS_triu_strict)); + PetscCall(MatDestroy(&lbfgs->StBS)); + PetscCall(MatDuplicate(lbfgs->StY_triu, MAT_SHARE_NONZERO_PATTERN, &lbfgs->StBS)); + PetscCall(MatDestroy(&lbfgs->J)); + PetscCall(MatDuplicate(lbfgs->StY_triu, MAT_SHARE_NONZERO_PATTERN, &lbfgs->J)); + PetscCall(MatDestroy(&lbfgs->BS)); + PetscCall(MatDuplicate(lbfgs->Yfull, MAT_SHARE_NONZERO_PATTERN, &lbfgs->BS)); + PetscCall(MatShift(lbfgs->StBS, 1.0)); + lbfgs->num_mult_updates = oldest_update(m, k); + } + if (lbfgs->num_mult_updates == k) PetscFunctionReturn(PETSC_SUCCESS); + + /* B_0 may have been updated, we must recompute B_0 S and S^T B_0 S */ + for (PetscInt j = oldest_update(m, k); j < k; j++) { + Vec s_j; + Vec Bs_j; + Vec StBs_j; + PetscInt S_idx = recycle_index(m, j); + PetscInt StBS_idx = lbfgs->strategy == MAT_LMVM_DENSE_INPLACE ? S_idx : history_index(m, k, j); + + PetscCall(MatDenseGetColumnVecWrite(lbfgs->BS, S_idx, &Bs_j)); + PetscCall(MatDenseGetColumnVecRead(lbfgs->Sfull, S_idx, &s_j)); + PetscCall(MatDQNApplyJ0Fwd(B, s_j, Bs_j)); + PetscCall(MatDenseRestoreColumnVecRead(lbfgs->Sfull, S_idx, &s_j)); + PetscCall(MatDenseGetColumnVecWrite(lbfgs->StBS, StBS_idx, &StBs_j)); + PetscCall(MatMultHermitianTransposeColumnRange(lbfgs->Sfull, Bs_j, StBs_j, 0, h)); + lbfgs->St_count++; + if (lbfgs->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(B, StBs_j, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + PetscCall(MatDenseRestoreColumnVecWrite(lbfgs->StBS, StBS_idx, &StBs_j)); + PetscCall(MatDenseRestoreColumnVecWrite(lbfgs->BS, S_idx, &Bs_j)); + } + prev_oldest = oldest_update(m, lbfgs->num_mult_updates); + if (lbfgs->strategy == MAT_LMVM_DENSE_REORDER && prev_oldest < oldest_update(m, k)) { + /* move the YtS entries that have been computed and need to be kept back up */ + PetscInt m_keep = m - (oldest_update(m, k) - prev_oldest); + + PetscCall(MatMove_LR3(B, lbfgs->YtS_triu_strict, m_keep)); + } + PetscCall(MatGetLocalSize(lbfgs->YtS_triu_strict, &m_local, NULL)); + j_0 = PetscMax(lbfgs->num_mult_updates, oldest_update(m, k)); + for (PetscInt j = j_0; j < k; j++) { + PetscInt S_idx = recycle_index(m, j); + PetscInt YtS_idx = lbfgs->strategy == MAT_LMVM_DENSE_INPLACE ? S_idx : history_index(m, k, j); + Vec s_j, Yts_j; + + PetscCall(MatDenseGetColumnVecRead(lbfgs->Sfull, S_idx, &s_j)); + PetscCall(MatDenseGetColumnVecWrite(lbfgs->YtS_triu_strict, YtS_idx, &Yts_j)); + PetscCall(MatMultHermitianTransposeColumnRange(lbfgs->Yfull, s_j, Yts_j, 0, h)); + lbfgs->Yt_count++; + if (lbfgs->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(B, Yts_j, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + PetscCall(MatDenseRestoreColumnVecWrite(lbfgs->YtS_triu_strict, YtS_idx, &Yts_j)); + PetscCall(MatDenseRestoreColumnVecRead(lbfgs->Sfull, S_idx, &s_j)); + /* zero the corresponding row */ + if (m_local > 0) { + Mat YtS_local, YtS_row; + + PetscCall(MatDenseGetLocalMatrix(lbfgs->YtS_triu_strict, &YtS_local)); + PetscCall(MatDenseGetSubMatrix(YtS_local, YtS_idx, YtS_idx + 1, PETSC_DECIDE, PETSC_DECIDE, &YtS_row)); + PetscCall(MatZeroEntries(YtS_row)); + PetscCall(MatDenseRestoreSubMatrix(YtS_local, &YtS_row)); + } + } + if (!lbfgs->inv_diag_vec) PetscCall(VecDuplicate(lbfgs->diag_vec, &lbfgs->inv_diag_vec)); + PetscCall(VecCopy(lbfgs->diag_vec, lbfgs->inv_diag_vec)); + PetscCall(VecReciprocal(lbfgs->inv_diag_vec)); + PetscCall(MatDenseGetLocalMatrix(lbfgs->J, &J_local)); + PetscCall(MatSetFactorType(J_local, MAT_FACTOR_NONE)); + PetscCall(MatGetLDLT(B, lbfgs->J)); + PetscCall(MatAXPY(lbfgs->J, 1.0, lbfgs->StBS, SAME_NONZERO_PATTERN)); + if (m_local) { + PetscCall(MatSetOption(J_local, MAT_SPD, PETSC_TRUE)); + PetscCall(MatCholeskyFactor(J_local, NULL, NULL)); + } + lbfgs->num_mult_updates = lbfgs->num_updates; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* Solves for + * [ I | -S R^{-T} ] [ I | 0 ] [ H_0 | 0 ] [ I | Y ] [ I ] + * [-----+---] [-----+---] [---+---] [-------------] + * [ Y^T | I ] [ 0 | D ] [ 0 | I ] [ -R^{-1} S^T ] */ + +static PetscErrorCode MatSolve_LMVMDBFGS(Mat H, Vec F, Vec dX) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)H->data; + Mat_DQN *lbfgs = (Mat_DQN *)lmvm->ctx; + Vec rwork1 = lbfgs->rwork1; + PetscInt m = lmvm->m; + PetscInt k = lbfgs->num_updates; + PetscInt h = k - oldest_update(m, k); + PetscObjectState Fstate; + + PetscFunctionBegin; + VecCheckSameSize(F, 2, dX, 3); + VecCheckMatCompatible(H, dX, 3, F, 2); + + /* Block Version */ + if (!lbfgs->num_updates) { + PetscCall(MatDQNApplyJ0Inv(H, F, dX)); + PetscFunctionReturn(PETSC_SUCCESS); /* No updates stored yet */ + } + + PetscCall(PetscObjectStateGet((PetscObject)F, &Fstate)); + if (F == lbfgs->Fprev_ref && Fstate == lbfgs->Fprev_state) { + PetscCall(VecCopy(lbfgs->StFprev, rwork1)); + } else { + PetscCall(MatMultHermitianTransposeColumnRange(lbfgs->Sfull, F, rwork1, 0, h)); + lbfgs->St_count++; + } + + /* Reordering rwork1, as STY is in history order, while S is in recycled order */ + if (lbfgs->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(H, rwork1, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + PetscCall(MatUpperTriangularSolveInPlace(H, lbfgs->StY_triu, rwork1, PETSC_FALSE, lbfgs->num_updates, lbfgs->strategy)); + PetscCall(VecScale(rwork1, -1.0)); + if (lbfgs->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecHistoryOrderToRecycleOrder(H, rwork1, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + + PetscCall(VecCopy(F, lbfgs->column_work)); + PetscCall(MatMultAddColumnRange(lbfgs->Yfull, rwork1, lbfgs->column_work, lbfgs->column_work, 0, h)); + lbfgs->Y_count++; + + PetscCall(VecPointwiseMult(rwork1, lbfgs->diag_vec_recycle_order, rwork1)); + PetscCall(MatDQNApplyJ0Inv(H, lbfgs->column_work, dX)); + + PetscCall(MatMultHermitianTransposeAddColumnRange(lbfgs->Yfull, dX, rwork1, rwork1, 0, h)); + lbfgs->Yt_count++; + + if (lbfgs->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(H, rwork1, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + PetscCall(MatUpperTriangularSolveInPlace(H, lbfgs->StY_triu, rwork1, PETSC_TRUE, lbfgs->num_updates, lbfgs->strategy)); + PetscCall(VecScale(rwork1, -1.0)); + if (lbfgs->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecHistoryOrderToRecycleOrder(H, rwork1, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + + PetscCall(MatMultAddColumnRange(lbfgs->Sfull, rwork1, dX, dX, 0, h)); + lbfgs->S_count++; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* Solves for + B_0 - [ Y | B_0 S] [ -D | L^T ]^-1 [ Y^T ] + [-----+-----------] [---------] + [ L | S^T B_0 S ] [ S^T B_0 ] + + Above is equivalent to + + B_0 - [ Y | B_0 S] [[ I | 0 ][ -D | 0 ][ I | -D^{-1} L^T ]]^-1 [ Y^T ] + [[-----------+---][-----+---][---+-------------]] [---------] + [[ -L D^{-1} | I ][ 0 | J ][ 0 | I ]] [ S^T B_0 ] + + where J = S^T B_0 S + L D^{-1} L^T + + becomes + + B_0 - [ Y | B_0 S] [ I | D^{-1} L^T ][ -D^{-1} | 0 ][ I | 0 ] [ Y^T ] + [---+------------][----------+--------][----------+---] [---------] + [ 0 | I ][ 0 | J^{-1} ][ L D^{-1} | I ] [ S^T B_0 ] + + = + + B_0 + [ Y | B_0 S] [ D^{-1} | 0 ][ I | L^T ][ I | 0 ][ I | 0 ] [ Y^T ] + [--------+---][---+-----][---+---------][----------+---] [---------] + [ 0 | I ][ 0 | I ][ 0 | -J^{-1} ][ L D^{-1} | I ] [ S^T B_0 ] + + (Note that YtS_triu_strict is L^T) + Byrd, Nocedal, Schnabel 1994 + + Alternative approach: considering the fact that DFP is dual to BFGS, use MatMult of DPF: + (See ddfp.c's MatMult_LMVMDDFP) + +*/ +static PetscErrorCode MatMult_LMVMDBFGS(Mat B, Vec X, Vec Z) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lbfgs = (Mat_DQN *)lmvm->ctx; + Mat J_local; + PetscInt idx, i, j, m_local, local_n; + PetscInt m = lmvm->m; + PetscInt k = lbfgs->num_updates; + PetscInt h = k - oldest_update(m, k); + + PetscFunctionBegin; + VecCheckSameSize(X, 2, Z, 3); + VecCheckMatCompatible(B, X, 2, Z, 3); + + /* Cholesky Version */ + /* Start with the B0 term */ + PetscCall(MatDQNApplyJ0Fwd(B, X, Z)); + if (!lbfgs->num_updates) { PetscFunctionReturn(PETSC_SUCCESS); /* No updates stored yet */ } + + if (lbfgs->use_recursive) { + PetscDeviceContext dctx; + PetscMemType memtype; + PetscScalar stz, ytx, stp, sjtpi, yjtsi, *workscalar; + PetscInt oldest = oldest_update(m, k); + + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + /* Recursive formulation to avoid Cholesky. Not a dense formulation */ + PetscCall(MatMultHermitianTransposeColumnRange(lbfgs->Yfull, X, lbfgs->rwork1, 0, h)); + lbfgs->Yt_count++; + + PetscCall(VecGetLocalSize(lbfgs->rwork1, &local_n)); + + if (lbfgs->needPQ) { + PetscInt oldest = oldest_update(m, k); + for (i = 0; i <= lmvm->k; ++i) { + idx = recycle_index(m, i + oldest); + /* column_work = S[idx] */ + PetscCall(MatGetColumnVector(lbfgs->Sfull, lbfgs->column_work, idx)); + PetscCall(MatDQNApplyJ0Fwd(B, lbfgs->column_work, lbfgs->PQ[idx])); + PetscCall(MatMultHermitianTransposeColumnRange(lbfgs->Yfull, lbfgs->column_work, lbfgs->rwork3, 0, h)); + PetscCall(VecGetArrayAndMemType(lbfgs->rwork3, &workscalar, &memtype)); + for (j = 0; j < i; ++j) { + PetscInt idx_j = recycle_index(m, j + oldest); + /* Copy yjtsi in device-aware manner */ + if (local_n) { + if (PetscMemTypeHost(memtype)) { + yjtsi = workscalar[idx_j]; + } else { + PetscCall(PetscDeviceRegisterMemory(&yjtsi, PETSC_MEMTYPE_HOST, sizeof(yjtsi))); + PetscCall(PetscDeviceRegisterMemory(workscalar, memtype, local_n * sizeof(*workscalar))); + PetscCall(PetscDeviceArrayCopy(dctx, &yjtsi, &workscalar[idx_j], 1)); + } + } + PetscCallMPI(MPI_Bcast(&yjtsi, 1, MPIU_SCALAR, 0, PetscObjectComm((PetscObject)B))); + /* column_work2 = S[j] */ + PetscCall(MatGetColumnVector(lbfgs->Sfull, lbfgs->column_work2, idx_j)); + PetscCall(VecDot(lbfgs->column_work2, lbfgs->PQ[idx], &sjtpi)); + /* column_work2 = Y[j] */ + PetscCall(MatGetColumnVector(lbfgs->Yfull, lbfgs->column_work2, idx_j)); + /* Compute the pure BFGS component of the forward product */ + PetscCall(VecAXPBYPCZ(lbfgs->PQ[idx], -PetscRealPart(sjtpi) / lbfgs->stp[idx_j], PetscRealPart(yjtsi) / lbfgs->yts[j], 1.0, lbfgs->PQ[idx_j], lbfgs->column_work2)); + } + PetscCall(VecDot(lbfgs->column_work, lbfgs->PQ[idx], &stp)); + lbfgs->stp[idx] = PetscRealPart(stp); + } + lbfgs->needPQ = PETSC_FALSE; + } + + PetscCall(VecGetArrayAndMemType(lbfgs->rwork1, &workscalar, &memtype)); + for (i = 0; i <= lmvm->k; ++i) { + idx = recycle_index(m, i + oldest); + /* Copy stz[i], ytx[i] in device-aware manner */ + if (local_n) { + if (PetscMemTypeHost(memtype)) { + ytx = workscalar[idx]; + } else { + PetscCall(PetscDeviceRegisterMemory(&ytx, PETSC_MEMTYPE_HOST, 1 * sizeof(ytx))); + PetscCall(PetscDeviceRegisterMemory(workscalar, memtype, local_n * sizeof(*workscalar))); + PetscCall(PetscDeviceArrayCopy(dctx, &ytx, &workscalar[idx], 1)); + } + } + PetscCallMPI(MPI_Bcast(&ytx, 1, MPIU_SCALAR, 0, PetscObjectComm((PetscObject)B))); + /* column_work : S[i], column_work2 : Y[i] */ + PetscCall(MatGetColumnVector(lbfgs->Sfull, lbfgs->column_work, idx)); + PetscCall(MatGetColumnVector(lbfgs->Yfull, lbfgs->column_work2, idx)); + PetscCall(VecDot(lbfgs->column_work, Z, &stz)); + PetscCall(VecAXPBYPCZ(Z, -PetscRealPart(stz) / lbfgs->stp[idx], PetscRealPart(ytx) / lbfgs->yts[i], 1.0, lbfgs->PQ[idx], lbfgs->column_work2)); + } + PetscCall(VecRestoreArrayAndMemType(lbfgs->rwork1, &workscalar)); + } else { + PetscCall(MatLMVMDBFGSUpdateMultData(B)); + PetscCall(MatMultHermitianTransposeColumnRange(lbfgs->Yfull, X, lbfgs->rwork1, 0, h)); + lbfgs->Yt_count++; + PetscCall(MatMultHermitianTransposeColumnRange(lbfgs->Sfull, Z, lbfgs->rwork2, 0, h)); + lbfgs->St_count++; + if (lbfgs->strategy == MAT_LMVM_DENSE_REORDER) { + PetscCall(VecRecycleOrderToHistoryOrder(B, lbfgs->rwork1, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + PetscCall(VecRecycleOrderToHistoryOrder(B, lbfgs->rwork2, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + } + + PetscCall(VecPointwiseMult(lbfgs->rwork3, lbfgs->rwork1, lbfgs->inv_diag_vec)); + PetscCall(MatMultTransposeAdd(lbfgs->YtS_triu_strict, lbfgs->rwork3, lbfgs->rwork2, lbfgs->rwork2)); + + if (!lbfgs->rwork2_local) PetscCall(VecCreateLocalVector(lbfgs->rwork2, &lbfgs->rwork2_local)); + if (!lbfgs->rwork3_local) PetscCall(VecCreateLocalVector(lbfgs->rwork3, &lbfgs->rwork3_local)); + PetscCall(VecGetLocalVectorRead(lbfgs->rwork2, lbfgs->rwork2_local)); + PetscCall(VecGetLocalVector(lbfgs->rwork3, lbfgs->rwork3_local)); + PetscCall(MatDenseGetLocalMatrix(lbfgs->J, &J_local)); + PetscCall(VecGetSize(lbfgs->rwork2_local, &m_local)); + if (m_local) { + PetscCall(MatDenseGetLocalMatrix(lbfgs->J, &J_local)); + PetscCall(MatSolve(J_local, lbfgs->rwork2_local, lbfgs->rwork3_local)); + } + PetscCall(VecRestoreLocalVector(lbfgs->rwork3, lbfgs->rwork3_local)); + PetscCall(VecRestoreLocalVectorRead(lbfgs->rwork2, lbfgs->rwork2_local)); + PetscCall(VecScale(lbfgs->rwork3, -1.0)); + + PetscCall(MatMultAdd(lbfgs->YtS_triu_strict, lbfgs->rwork3, lbfgs->rwork1, lbfgs->rwork1)); + PetscCall(VecPointwiseMult(lbfgs->rwork1, lbfgs->rwork1, lbfgs->inv_diag_vec)); + + if (lbfgs->strategy == MAT_LMVM_DENSE_REORDER) { + PetscCall(VecHistoryOrderToRecycleOrder(B, lbfgs->rwork1, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + PetscCall(VecHistoryOrderToRecycleOrder(B, lbfgs->rwork3, lbfgs->num_updates, lbfgs->cyclic_work_vec)); + } + + PetscCall(MatMultAddColumnRange(lbfgs->Yfull, lbfgs->rwork1, Z, Z, 0, h)); + lbfgs->Y_count++; + PetscCall(MatMultAddColumnRange(lbfgs->BS, lbfgs->rwork3, Z, Z, 0, h)); + lbfgs->S_count++; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* + This dense representation reduces the L-BFGS update to a series of + matrix-vector products with dense matrices in lieu of the conventional matrix-free + two-loop algorithm. +*/ +PetscErrorCode MatCreate_LMVMDBFGS(Mat B) +{ + Mat_LMVM *lmvm; + Mat_DQN *lbfgs; + + PetscFunctionBegin; + PetscCall(MatCreate_LMVM(B)); + PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATLMVMDBFGS)); + PetscCall(MatSetOption(B, MAT_SPD, PETSC_TRUE)); + PetscCall(MatSetOption(B, MAT_SPD_ETERNAL, PETSC_TRUE)); + B->ops->view = MatView_LMVMDQN; + B->ops->setup = MatSetUp_LMVMDQN; + B->ops->setfromoptions = MatSetFromOptions_LMVMDQN; + B->ops->destroy = MatDestroy_LMVMDQN; + + lmvm = (Mat_LMVM *)B->data; + lmvm->square = PETSC_TRUE; + lmvm->ops->allocate = MatAllocate_LMVMDQN; + lmvm->ops->reset = MatReset_LMVMDQN; + lmvm->ops->update = MatUpdate_LMVMDQN; + lmvm->ops->mult = MatMult_LMVMDBFGS; + lmvm->ops->solve = MatSolve_LMVMDBFGS; + lmvm->ops->copy = MatCopy_LMVMDQN; + + PetscCall(PetscNew(&lbfgs)); + lmvm->ctx = (void *)lbfgs; + lbfgs->allocated = PETSC_FALSE; + lbfgs->use_recursive = PETSC_TRUE; + lbfgs->needPQ = PETSC_TRUE; + lbfgs->watchdog = 0; + lbfgs->max_seq_rejects = lmvm->m / 2; + lbfgs->strategy = MAT_LMVM_DENSE_INPLACE; + lbfgs->scale_type = MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL; + + PetscCall(MatCreate(PetscObjectComm((PetscObject)B), &lbfgs->diag_qn)); + PetscCall(MatSetType(lbfgs->diag_qn, MATLMVMDIAGBROYDEN)); + PetscCall(MatSetOptionsPrefix(lbfgs->diag_qn, "J0_")); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + MatCreateLMVMDBFGS - Creates a dense representation of the limited-memory + Broyden-Fletcher-Goldfarb-Shanno (BFGS) approximation to a Hessian. + + Collective + + Input Parameters: ++ comm - MPI communicator +. n - number of local rows for storage vectors +- N - global size of the storage vectors + + Output Parameter: +. B - the matrix + + Level: advanced + + Note: + It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions() + paradigm instead of this routine directly. + +.seealso: `MatCreate()`, `MATLMVM`, `MATLMVMDBFGS`, `MatCreateLMVMBFGS()` +@*/ +PetscErrorCode MatCreateLMVMDBFGS(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) +{ + PetscFunctionBegin; + PetscCall(KSPInitializePackage()); + PetscCall(MatCreate(comm, B)); + PetscCall(MatSetSizes(*B, n, n, N, N)); + PetscCall(MatSetType(*B, MATLMVMDBFGS)); + PetscCall(MatSetUp(*B)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* here R is strictly upper triangular part of STY */ +static PetscErrorCode MatGetRTDR(Mat B, Mat result) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *ldfp = (Mat_DQN *)lmvm->ctx; + PetscInt m_local; + + PetscFunctionBegin; + if (!ldfp->temp_mat) PetscCall(MatDuplicate(ldfp->StY_triu_strict, MAT_SHARE_NONZERO_PATTERN, &ldfp->temp_mat)); + PetscCall(MatCopy(ldfp->StY_triu_strict, ldfp->temp_mat, SAME_NONZERO_PATTERN)); + PetscCall(MatDiagonalScale(ldfp->temp_mat, ldfp->inv_diag_vec, NULL)); + PetscCall(MatGetLocalSize(result, &m_local, NULL)); + // need to conjugate and conjugate again because we have MatTransposeMatMult but not MatHermitianTransposeMatMult() + PetscCall(MatConjugate(ldfp->temp_mat)); + if (m_local) { + Mat temp_local, StY_local, result_local; + PetscCall(MatDenseGetLocalMatrix(ldfp->StY_triu_strict, &StY_local)); + PetscCall(MatDenseGetLocalMatrix(ldfp->temp_mat, &temp_local)); + PetscCall(MatDenseGetLocalMatrix(result, &result_local)); + PetscCall(MatTransposeMatMult(StY_local, temp_local, MAT_REUSE_MATRIX, PETSC_DEFAULT, &result_local)); + } + PetscCall(MatConjugate(result)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatLMVMDDFPUpdateSolveData(Mat B) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *ldfp = (Mat_DQN *)lmvm->ctx; + PetscInt m = lmvm->m, m_local; + PetscInt k = ldfp->num_updates; + PetscInt h = k - oldest_update(m, k); + PetscInt j_0; + PetscInt prev_oldest; + Mat J_local; + + PetscFunctionBegin; + if (!ldfp->StY_triu_strict) { + PetscCall(MatDuplicate(ldfp->YtS_triu, MAT_SHARE_NONZERO_PATTERN, &ldfp->StY_triu_strict)); + PetscCall(MatDestroy(&ldfp->YtHY)); + PetscCall(MatDuplicate(ldfp->YtS_triu, MAT_SHARE_NONZERO_PATTERN, &ldfp->YtHY)); + PetscCall(MatDestroy(&ldfp->J)); + PetscCall(MatDuplicate(ldfp->YtS_triu, MAT_SHARE_NONZERO_PATTERN, &ldfp->J)); + PetscCall(MatDestroy(&ldfp->HY)); + PetscCall(MatDuplicate(ldfp->Yfull, MAT_SHARE_NONZERO_PATTERN, &ldfp->HY)); + PetscCall(MatShift(ldfp->YtHY, 1.0)); + ldfp->num_mult_updates = oldest_update(m, k); + } + if (ldfp->num_mult_updates == k) PetscFunctionReturn(PETSC_SUCCESS); + + /* H_0 may have been updated, we must recompute H_0 Y and Y^T H_0 Y */ + for (PetscInt j = oldest_update(m, k); j < k; j++) { + Vec y_j; + Vec Hy_j; + Vec YtHy_j; + PetscInt Y_idx = recycle_index(m, j); + PetscInt YtHY_idx = ldfp->strategy == MAT_LMVM_DENSE_INPLACE ? Y_idx : history_index(m, k, j); + + PetscCall(MatDenseGetColumnVecWrite(ldfp->HY, Y_idx, &Hy_j)); + PetscCall(MatDenseGetColumnVecRead(ldfp->Yfull, Y_idx, &y_j)); + PetscCall(MatDQNApplyJ0Inv(B, y_j, Hy_j)); + PetscCall(MatDenseRestoreColumnVecRead(ldfp->Yfull, Y_idx, &y_j)); + PetscCall(MatDenseGetColumnVecWrite(ldfp->YtHY, YtHY_idx, &YtHy_j)); + PetscCall(MatMultHermitianTransposeColumnRange(ldfp->Yfull, Hy_j, YtHy_j, 0, h)); + ldfp->Yt_count++; + if (ldfp->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(B, YtHy_j, ldfp->num_updates, ldfp->cyclic_work_vec)); + PetscCall(MatDenseRestoreColumnVecWrite(ldfp->YtHY, YtHY_idx, &YtHy_j)); + PetscCall(MatDenseRestoreColumnVecWrite(ldfp->HY, Y_idx, &Hy_j)); + } + prev_oldest = oldest_update(m, ldfp->num_mult_updates); + if (ldfp->strategy == MAT_LMVM_DENSE_REORDER && prev_oldest < oldest_update(m, k)) { + /* move the YtS entries that have been computed and need to be kept back up */ + PetscInt m_keep = m - (oldest_update(m, k) - prev_oldest); + + PetscCall(MatMove_LR3(B, ldfp->StY_triu_strict, m_keep)); + } + PetscCall(MatGetLocalSize(ldfp->StY_triu_strict, &m_local, NULL)); + j_0 = PetscMax(ldfp->num_mult_updates, oldest_update(m, k)); + for (PetscInt j = j_0; j < k; j++) { + PetscInt Y_idx = recycle_index(m, j); + PetscInt StY_idx = ldfp->strategy == MAT_LMVM_DENSE_INPLACE ? Y_idx : history_index(m, k, j); + Vec y_j, Sty_j; + + PetscCall(MatDenseGetColumnVecRead(ldfp->Yfull, Y_idx, &y_j)); + PetscCall(MatDenseGetColumnVecWrite(ldfp->StY_triu_strict, StY_idx, &Sty_j)); + PetscCall(MatMultHermitianTransposeColumnRange(ldfp->Sfull, y_j, Sty_j, 0, h)); + ldfp->St_count++; + if (ldfp->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(B, Sty_j, ldfp->num_updates, ldfp->cyclic_work_vec)); + PetscCall(MatDenseRestoreColumnVecWrite(ldfp->StY_triu_strict, StY_idx, &Sty_j)); + PetscCall(MatDenseRestoreColumnVecRead(ldfp->Yfull, Y_idx, &y_j)); + /* zero the corresponding row */ + if (m_local > 0) { + Mat StY_local, StY_row; + + PetscCall(MatDenseGetLocalMatrix(ldfp->StY_triu_strict, &StY_local)); + PetscCall(MatDenseGetSubMatrix(StY_local, StY_idx, StY_idx + 1, PETSC_DECIDE, PETSC_DECIDE, &StY_row)); + PetscCall(MatZeroEntries(StY_row)); + PetscCall(MatDenseRestoreSubMatrix(StY_local, &StY_row)); + } + } + if (!ldfp->inv_diag_vec) PetscCall(VecDuplicate(ldfp->diag_vec, &ldfp->inv_diag_vec)); + PetscCall(VecCopy(ldfp->diag_vec, ldfp->inv_diag_vec)); + PetscCall(VecReciprocal(ldfp->inv_diag_vec)); + PetscCall(MatDenseGetLocalMatrix(ldfp->J, &J_local)); + PetscCall(MatSetFactorType(J_local, MAT_FACTOR_NONE)); + PetscCall(MatGetRTDR(B, ldfp->J)); + PetscCall(MatAXPY(ldfp->J, 1.0, ldfp->YtHY, SAME_NONZERO_PATTERN)); + if (m_local) { + PetscCall(MatSetOption(J_local, MAT_SPD, PETSC_TRUE)); + PetscCall(MatCholeskyFactor(J_local, NULL, NULL)); + } + ldfp->num_mult_updates = ldfp->num_updates; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* Solves for + + H_0 - [ S | H_0 Y] [ -D | R.T ]^-1 [ S^T ] + [-----+-----------] [---------] + [ R | Y^T H_0 Y ] [ Y^T H_0 ] + + Above is equivalent to + + H_0 - [ S | H_0 Y] [[ I | 0 ][ -D | 0 ][ I | -D^{-1} R^T ]]^-1 [ S^T ] + [[-----------+---][----+---][---+-------------]] [---------] + [[ -R D^{-1} | I ][ 0 | J ][ 0 | I ]] [ Y^T H_0 ] + + where J = Y^T H_0 Y + R D^{-1} R.T + + becomes + + H_0 - [ S | H_0 Y] [ I | D^{-1} R^T ][ -D^{-1} | 0 ][ I | 0 ] [ S^T ] + [---+------------][----------+--------][----------+---] [---------] + [ 0 | I ][ 0 | J^{-1} ][ R D^{-1} | I ] [ Y^T H_0 ] + + = + + H_0 + [ S | H_0 Y] [ D^{-1} | 0 ][ I | R^T ][ I | 0 ][ I | 0 ] [ S^T ] + [--------+---][---+-----][---+---------][----------+---] [---------] + [ 0 | I ][ 0 | I ][ 0 | -J^{-1} ][ R D^{-1} | I ] [ Y^T H_0 ] + + (Note that StY_triu_strict is R) + Byrd, Nocedal, Schnabel 1994 + +*/ +static PetscErrorCode MatSolve_LMVMDDFP(Mat H, Vec F, Vec dX) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)H->data; + Mat_DQN *ldfp = (Mat_DQN *)lmvm->ctx; + PetscInt m = lmvm->m; + PetscInt k = ldfp->num_updates; + PetscInt h = k - oldest_update(m, k); + PetscInt idx, i, j, local_n; + PetscInt m_local; + Mat J_local; + + PetscFunctionBegin; + VecCheckSameSize(F, 2, dX, 3); + VecCheckMatCompatible(H, dX, 3, F, 2); + + /* Cholesky Version */ + /* Start with the B0 term */ + PetscCall(MatDQNApplyJ0Inv(H, F, dX)); + if (!ldfp->num_updates) { PetscFunctionReturn(PETSC_SUCCESS); /* No updates stored yet */ } + + if (ldfp->use_recursive) { + PetscDeviceContext dctx; + PetscMemType memtype; + PetscScalar stf, ytx, ytq, yjtqi, sjtyi, *workscalar; + + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + /* Recursive formulation to avoid Cholesky. Not a dense formulation */ + PetscCall(MatMultHermitianTransposeColumnRange(ldfp->Sfull, F, ldfp->rwork1, 0, h)); + ldfp->Yt_count++; + + PetscCall(VecGetLocalSize(ldfp->rwork1, &local_n)); + + PetscInt oldest = oldest_update(m, k); + + if (ldfp->needPQ) { + PetscInt oldest = oldest_update(m, k); + for (i = 0; i <= lmvm->k; ++i) { + idx = recycle_index(m, i + oldest); + /* column_work = S[idx] */ + PetscCall(MatGetColumnVector(ldfp->Yfull, ldfp->column_work, idx)); + PetscCall(MatDQNApplyJ0Inv(H, ldfp->column_work, ldfp->PQ[idx])); + PetscCall(MatMultHermitianTransposeColumnRange(ldfp->Sfull, ldfp->column_work, ldfp->rwork3, 0, h)); + PetscCall(VecGetArrayAndMemType(ldfp->rwork3, &workscalar, &memtype)); + for (j = 0; j < i; ++j) { + PetscInt idx_j = recycle_index(m, j + oldest); + /* Copy sjtyi in device-aware manner */ + if (local_n) { + if (PetscMemTypeHost(memtype)) { + sjtyi = workscalar[idx_j]; + } else { + PetscCall(PetscDeviceRegisterMemory(&sjtyi, PETSC_MEMTYPE_HOST, 1 * sizeof(sjtyi))); + PetscCall(PetscDeviceRegisterMemory(workscalar, memtype, local_n * sizeof(*workscalar))); + PetscCall(PetscDeviceArrayCopy(dctx, &sjtyi, &workscalar[idx_j], 1)); + } + } + PetscCallMPI(MPI_Bcast(&sjtyi, 1, MPIU_SCALAR, 0, PetscObjectComm((PetscObject)H))); + /* column_work2 = Y[j] */ + PetscCall(MatGetColumnVector(ldfp->Yfull, ldfp->column_work2, idx_j)); + PetscCall(VecDot(ldfp->column_work2, ldfp->PQ[idx], &yjtqi)); + /* column_work2 = Y[j] */ + PetscCall(MatGetColumnVector(ldfp->Sfull, ldfp->column_work2, idx_j)); + /* Compute the pure BFGS component of the forward product */ + PetscCall(VecAXPBYPCZ(ldfp->PQ[idx], -PetscRealPart(yjtqi) / ldfp->ytq[idx_j], PetscRealPart(sjtyi) / ldfp->yts[j], 1.0, ldfp->PQ[idx_j], ldfp->column_work2)); + } + PetscCall(VecDot(ldfp->column_work, ldfp->PQ[idx], &ytq)); + ldfp->ytq[idx] = PetscRealPart(ytq); + } + ldfp->needPQ = PETSC_FALSE; + } + + PetscCall(VecGetArrayAndMemType(ldfp->rwork1, &workscalar, &memtype)); + for (i = 0; i <= lmvm->k; ++i) { + idx = recycle_index(m, i + oldest); + /* Copy stz[i], ytx[i] in device-aware manner */ + if (local_n) { + if (PetscMemTypeHost(memtype)) { + stf = workscalar[idx]; + } else { + PetscCall(PetscDeviceRegisterMemory(&stf, PETSC_MEMTYPE_HOST, sizeof(stf))); + PetscCall(PetscDeviceRegisterMemory(workscalar, memtype, local_n * sizeof(*workscalar))); + PetscCall(PetscDeviceArrayCopy(dctx, &stf, &workscalar[idx], 1)); + } + } + PetscCallMPI(MPI_Bcast(&stf, 1, MPIU_SCALAR, 0, PetscObjectComm((PetscObject)H))); + /* column_work : S[i], column_work2 : Y[i] */ + PetscCall(MatGetColumnVector(ldfp->Sfull, ldfp->column_work, idx)); + PetscCall(MatGetColumnVector(ldfp->Yfull, ldfp->column_work2, idx)); + PetscCall(VecDot(ldfp->column_work2, dX, &ytx)); + PetscCall(VecAXPBYPCZ(dX, -PetscRealPart(ytx) / ldfp->ytq[idx], PetscRealPart(stf) / ldfp->yts[i], 1.0, ldfp->PQ[idx], ldfp->column_work)); + } + PetscCall(VecRestoreArrayAndMemType(ldfp->rwork1, &workscalar)); + } else { + PetscCall(MatLMVMDDFPUpdateSolveData(H)); + PetscCall(MatMultHermitianTransposeColumnRange(ldfp->Sfull, F, ldfp->rwork1, 0, h)); + ldfp->St_count++; + PetscCall(MatMultHermitianTransposeColumnRange(ldfp->Yfull, dX, ldfp->rwork2, 0, h)); + ldfp->Yt_count++; + if (ldfp->strategy == MAT_LMVM_DENSE_REORDER) { + PetscCall(VecRecycleOrderToHistoryOrder(H, ldfp->rwork1, ldfp->num_updates, ldfp->cyclic_work_vec)); + PetscCall(VecRecycleOrderToHistoryOrder(H, ldfp->rwork2, ldfp->num_updates, ldfp->cyclic_work_vec)); + } + + PetscCall(VecPointwiseMult(ldfp->rwork3, ldfp->rwork1, ldfp->inv_diag_vec)); + PetscCall(MatMultTransposeAdd(ldfp->StY_triu_strict, ldfp->rwork3, ldfp->rwork2, ldfp->rwork2)); + + if (!ldfp->rwork2_local) PetscCall(VecCreateLocalVector(ldfp->rwork2, &ldfp->rwork2_local)); + if (!ldfp->rwork3_local) PetscCall(VecCreateLocalVector(ldfp->rwork3, &ldfp->rwork3_local)); + PetscCall(VecGetLocalVectorRead(ldfp->rwork2, ldfp->rwork2_local)); + PetscCall(VecGetLocalVector(ldfp->rwork3, ldfp->rwork3_local)); + PetscCall(MatDenseGetLocalMatrix(ldfp->J, &J_local)); + PetscCall(VecGetSize(ldfp->rwork2_local, &m_local)); + if (m_local) { + Mat J_local; + + PetscCall(MatDenseGetLocalMatrix(ldfp->J, &J_local)); + PetscCall(MatSolve(J_local, ldfp->rwork2_local, ldfp->rwork3_local)); + } + PetscCall(VecRestoreLocalVector(ldfp->rwork3, ldfp->rwork3_local)); + PetscCall(VecRestoreLocalVectorRead(ldfp->rwork2, ldfp->rwork2_local)); + PetscCall(VecScale(ldfp->rwork3, -1.0)); + + PetscCall(MatMultAdd(ldfp->StY_triu_strict, ldfp->rwork3, ldfp->rwork1, ldfp->rwork1)); + PetscCall(VecPointwiseMult(ldfp->rwork1, ldfp->rwork1, ldfp->inv_diag_vec)); + + if (ldfp->strategy == MAT_LMVM_DENSE_REORDER) { + PetscCall(VecHistoryOrderToRecycleOrder(H, ldfp->rwork1, ldfp->num_updates, ldfp->cyclic_work_vec)); + PetscCall(VecHistoryOrderToRecycleOrder(H, ldfp->rwork3, ldfp->num_updates, ldfp->cyclic_work_vec)); + } + + PetscCall(MatMultAddColumnRange(ldfp->Sfull, ldfp->rwork1, dX, dX, 0, h)); + ldfp->S_count++; + PetscCall(MatMultAddColumnRange(ldfp->HY, ldfp->rwork3, dX, dX, 0, h)); + ldfp->Y_count++; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* Solves for + (Theorem 1, Erway, Jain, and Marcia, 2013) + + B_0 - [ Y | B_0 S] [ -R^{-T} (D + S^T B_0 S) R^{-1} | R^{-T} ] [ Y^T ] + ---------------------------------+--------] [---------] + [ R^{-1} | 0 ] [ S^T B_0 ] + + (Note: R above is right triangular part of YTS) + which becomes, + + [ I | -Y L^{-T} ] [ I | 0 ] [ B_0 | 0 ] [ I | S ] [ I ] + [-----+---] [-----+---] [---+---] [-------------] + [ S^T | I ] [ 0 | D ] [ 0 | I ] [ -L^{-1} Y^T ] + + (Note: L above is right triangular part of STY) + +*/ +static PetscErrorCode MatMult_LMVMDDFP(Mat B, Vec X, Vec Z) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *ldfp = (Mat_DQN *)lmvm->ctx; + Vec rwork1 = ldfp->rwork1; + PetscInt m = lmvm->m; + PetscInt k = ldfp->num_updates; + PetscInt h = k - oldest_update(m, k); + PetscObjectState Xstate; + + PetscFunctionBegin; + VecCheckSameSize(X, 2, Z, 3); + VecCheckMatCompatible(B, X, 2, Z, 3); + + /* DFP Version. Erway, Jain, Marcia, 2013, Theorem 1 */ + /* Block Version */ + if (!ldfp->num_updates) { + PetscCall(MatDQNApplyJ0Fwd(B, X, Z)); + PetscFunctionReturn(PETSC_SUCCESS); /* No updates stored yet */ + } + + PetscCall(PetscObjectStateGet((PetscObject)X, &Xstate)); + PetscCall(MatMultHermitianTransposeColumnRange(ldfp->Yfull, X, rwork1, 0, h)); + + /* Reordering rwork1, as STY is in history order, while Y is in recycled order */ + if (ldfp->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(B, rwork1, ldfp->num_updates, ldfp->cyclic_work_vec)); + PetscCall(MatUpperTriangularSolveInPlace(B, ldfp->YtS_triu, rwork1, PETSC_FALSE, ldfp->num_updates, ldfp->strategy)); + PetscCall(VecScale(rwork1, -1.0)); + if (ldfp->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecHistoryOrderToRecycleOrder(B, rwork1, ldfp->num_updates, ldfp->cyclic_work_vec)); + + PetscCall(VecCopy(X, ldfp->column_work)); + PetscCall(MatMultAddColumnRange(ldfp->Sfull, rwork1, ldfp->column_work, ldfp->column_work, 0, h)); + ldfp->S_count++; + + PetscCall(VecPointwiseMult(rwork1, ldfp->diag_vec_recycle_order, rwork1)); + PetscCall(MatDQNApplyJ0Fwd(B, ldfp->column_work, Z)); + + PetscCall(MatMultHermitianTransposeAddColumnRange(ldfp->Sfull, Z, rwork1, rwork1, 0, h)); + ldfp->St_count++; + + if (ldfp->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecRecycleOrderToHistoryOrder(B, rwork1, ldfp->num_updates, ldfp->cyclic_work_vec)); + PetscCall(MatUpperTriangularSolveInPlace(B, ldfp->YtS_triu, rwork1, PETSC_TRUE, ldfp->num_updates, ldfp->strategy)); + PetscCall(VecScale(rwork1, -1.0)); + if (ldfp->strategy == MAT_LMVM_DENSE_REORDER) PetscCall(VecHistoryOrderToRecycleOrder(B, rwork1, ldfp->num_updates, ldfp->cyclic_work_vec)); + + PetscCall(MatMultAddColumnRange(ldfp->Yfull, rwork1, Z, Z, 0, h)); + ldfp->Y_count++; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* + This dense representation reduces the L-DFP update to a series of + matrix-vector products with dense matrices in lieu of the conventional + matrix-free two-loop algorithm. +*/ +PetscErrorCode MatCreate_LMVMDDFP(Mat B) +{ + Mat_LMVM *lmvm; + Mat_DQN *ldfp; + + PetscFunctionBegin; + PetscCall(MatCreate_LMVM(B)); + PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATLMVMDDFP)); + PetscCall(MatSetOption(B, MAT_SPD, PETSC_TRUE)); + PetscCall(MatSetOption(B, MAT_SPD_ETERNAL, PETSC_TRUE)); + B->ops->view = MatView_LMVMDQN; + B->ops->setup = MatSetUp_LMVMDQN; + B->ops->setfromoptions = MatSetFromOptions_LMVMDQN; + B->ops->destroy = MatDestroy_LMVMDQN; + + lmvm = (Mat_LMVM *)B->data; + lmvm->square = PETSC_TRUE; + lmvm->ops->allocate = MatAllocate_LMVMDQN; + lmvm->ops->reset = MatReset_LMVMDQN; + lmvm->ops->update = MatUpdate_LMVMDQN; + lmvm->ops->mult = MatMult_LMVMDDFP; + lmvm->ops->solve = MatSolve_LMVMDDFP; + lmvm->ops->copy = MatCopy_LMVMDQN; + + PetscCall(PetscNew(&ldfp)); + lmvm->ctx = (void *)ldfp; + ldfp->allocated = PETSC_FALSE; + ldfp->watchdog = 0; + ldfp->max_seq_rejects = lmvm->m / 2; + ldfp->strategy = MAT_LMVM_DENSE_INPLACE; + ldfp->scale_type = MAT_LMVM_SYMBROYDEN_SCALE_DIAGONAL; + ldfp->use_recursive = PETSC_TRUE; + ldfp->needPQ = PETSC_TRUE; + + PetscCall(MatCreate(PetscObjectComm((PetscObject)B), &ldfp->diag_qn)); + PetscCall(MatSetType(ldfp->diag_qn, MATLMVMDIAGBROYDEN)); + PetscCall(MatSetOptionsPrefix(ldfp->diag_qn, "J0_")); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + MatCreateLMVMDDFP - Creates a dense representation of the limited-memory + Davidon-Fletcher-Powell (DFP) approximation to a Hessian. + + Collective + + Input Parameters: ++ comm - MPI communicator +. n - number of local rows for storage vectors +- N - global size of the storage vectors + + Output Parameter: +. B - the matrix + + Level: advanced + + Note: + It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions() + paradigm instead of this routine directly. + +.seealso: `MatCreate()`, `MATLMVM`, `MATLMVMDDFP`, `MatCreateLMVMDFP()` +@*/ +PetscErrorCode MatCreateLMVMDDFP(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) +{ + PetscFunctionBegin; + PetscCall(KSPInitializePackage()); + PetscCall(MatCreate(comm, B)); + PetscCall(MatSetSizes(*B, n, n, N, N)); + PetscCall(MatSetType(*B, MATLMVMDDFP)); + PetscCall(MatSetUp(*B)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + MatLMVMDenseSetType - Sets the memory storage type for dense `MATLMVM` + + Input Parameters: ++ B - the `MATLMVM` matrix +- type - scale type, see `MatLMVMDenseSetType` + + Options Database Keys: ++ -mat_lqn_type - set the strategy +. -mat_lbfgs_type - set the strategy +- -mat_ldfp_type - set the strategy + + Level: intermediate + + MatLMVMDenseTypes\: ++ `MAT_LMVM_DENSE_REORDER` - reorders memory to minimize kernel launch +- `MAT_LMVM_DENSE_INPLACE` - launches kernel inplace to minimize memory movement + +.seealso: [](ch_ksp), `MATLMVMDQN`, `MATLMVMDBFGS`, `MATLMVMDDFP`, `MatLMVMDenseType` +@*/ +PetscErrorCode MatLMVMDenseSetType(Mat B, MatLMVMDenseType type) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + + PetscFunctionBegin; + PetscValidHeaderSpecific(B, MAT_CLASSID, 1); + lqn->strategy = type; + PetscFunctionReturn(PETSC_SUCCESS); +} diff --git a/src/ksp/ksp/utils/lmvm/dense/denseqn.h b/src/ksp/ksp/utils/lmvm/dense/denseqn.h new file mode 100644 index 00000000000..d04c2f6abb2 --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/dense/denseqn.h @@ -0,0 +1,55 @@ +#include <../src/ksp/ksp/utils/lmvm/lmvm.h> + +/* + dense representation for the limited-memory BFGS/DFP method. +*/ + +typedef struct { + Mat diag_qn; /* diagonalized Hessian init */ + + PetscInt num_updates; + PetscInt num_mult_updates; + Mat Sfull, Yfull, HY, BS; // Stored in recycled order + Vec StFprev; + Mat StY_triu; // triu(StY) is the R matrix + Mat StY_triu_strict; // strict_triu(YtS) is the R matrix + Mat YtS_triu_strict; // strict_triu(YtS) is the L^T matrix + Mat YtS_triu; // triu(YtS) is the L matrix + Mat YtHY; + Mat StBS; + Mat J; + Mat temp_mat; + Vec *PQ; /* P for BFGS, Q for DFP */ + Vec diag_vec; + Vec diag_vec_recycle_order; + Vec inv_diag_vec; + Vec column_work, column_work2, rwork1, rwork2, rwork3; + Vec rwork2_local, rwork3_local; + Vec local_work_vec, local_work_vec_copy; + Vec cyclic_work_vec; + MatType dense_type; + MatLMVMDenseType strategy; + MatLMVMSymBroydenScaleType scale_type; + + PetscReal *ytq, *stp, *yts; + PetscScalar *workscalar; + PetscInt S_count, St_count, Y_count, Yt_count; + PetscInt watchdog, max_seq_rejects; /* tracker to reset after a certain # of consecutive rejects */ + PetscBool allocated, use_recursive, needPQ; /* P for BFGS, Q for DFP */ + Vec Fprev_ref; + PetscObjectState Fprev_state; +} Mat_DQN; + +PETSC_INTERN PetscErrorCode MatView_LMVMDDFP(Mat, PetscViewer); +PETSC_INTERN PetscErrorCode MatView_LMVMDBFGS(Mat, PetscViewer); + +PETSC_INTERN PetscErrorCode MatUpperTriangularSolveInPlace_CUPM(PetscBool, PetscInt, const PetscScalar[], PetscInt, PetscScalar[], PetscInt); +PETSC_INTERN PetscErrorCode MatUpperTriangularSolveInPlaceCyclic_CUPM(PetscBool, PetscInt, PetscInt, const PetscScalar[], PetscInt, PetscScalar[], PetscInt); +PETSC_INTERN PetscErrorCode MatMultAddColumnRange(Mat, Vec, Vec, Vec, PetscInt, PetscInt); +PETSC_INTERN PetscErrorCode MatMultHermitianTransposeColumnRange(Mat, Vec, Vec, PetscInt, PetscInt); +PETSC_INTERN PetscErrorCode MatMultHermitianTransposeAddColumnRange(Mat, Vec, Vec, Vec, PetscInt, PetscInt); +PETSC_INTERN PetscErrorCode VecCyclicShift(Mat, Vec, PetscInt, Vec); +PETSC_INTERN PetscErrorCode VecRecycleOrderToHistoryOrder(Mat, Vec, PetscInt, Vec); +PETSC_INTERN PetscErrorCode VecHistoryOrderToRecycleOrder(Mat, Vec, PetscInt, Vec); +PETSC_INTERN PetscErrorCode MatUpperTriangularSolveInPlace(Mat, Mat, Vec, PetscBool, PetscInt, MatLMVMDenseType); +PETSC_INTERN PetscErrorCode MatMove_LR3(Mat, Mat, PetscInt); diff --git a/src/ksp/ksp/utils/lmvm/dense/makefile b/src/ksp/ksp/utils/lmvm/dense/makefile new file mode 100644 index 00000000000..9a13d3ae113 --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/dense/makefile @@ -0,0 +1,7 @@ +-include ../../../../../../petscdir.mk + +LIBBASE = libpetscksp +MANSEC = KSP + +include ${PETSC_DIR}/lib/petsc/conf/variables +include ${PETSC_DIR}/lib/petsc/conf/rules.doc diff --git a/src/ksp/ksp/utils/lmvm/dfp/dfp.c b/src/ksp/ksp/utils/lmvm/dfp/dfp.c index e05994ee414..f716847743a 100644 --- a/src/ksp/ksp/utils/lmvm/dfp/dfp.c +++ b/src/ksp/ksp/utils/lmvm/dfp/dfp.c @@ -133,8 +133,8 @@ static PetscErrorCode MatUpdate_LMVMDFP(Mat B, Vec X, Vec F) Mat_LMVM *dbase; Mat_DiagBrdn *dctx; PetscInt old_k, i; - PetscReal curvtol, ststmp; - PetscScalar curvature, ytytmp; + PetscReal curvtol, ytytmp; + PetscScalar curvature, ststmp; PetscFunctionBegin; if (!lmvm->m) PetscFunctionReturn(PETSC_SUCCESS); @@ -144,9 +144,9 @@ static PetscErrorCode MatUpdate_LMVMDFP(Mat B, Vec X, Vec F) PetscCall(VecAYPX(lmvm->Fprev, -1.0, F)); /* Test if the updates can be accepted */ - PetscCall(VecDotNorm2(lmvm->Xprev, lmvm->Fprev, &curvature, &ststmp)); - if (ststmp < lmvm->eps) curvtol = 0.0; - else curvtol = lmvm->eps * ststmp; + PetscCall(VecDotNorm2(lmvm->Xprev, lmvm->Fprev, &curvature, &ytytmp)); + if (ytytmp < lmvm->eps) curvtol = 0.0; + else curvtol = lmvm->eps * ytytmp; if (PetscRealPart(curvature) > curvtol) { /* Update is good, accept it */ @@ -163,10 +163,10 @@ static PetscErrorCode MatUpdate_LMVMDFP(Mat B, Vec X, Vec F) } } /* Update history of useful scalars */ - PetscCall(VecDot(lmvm->Y[lmvm->k], lmvm->Y[lmvm->k], &ytytmp)); + PetscCall(VecDot(lmvm->S[lmvm->k], lmvm->S[lmvm->k], &ststmp)); ldfp->yts[lmvm->k] = PetscRealPart(curvature); - ldfp->yty[lmvm->k] = PetscRealPart(ytytmp); - ldfp->sts[lmvm->k] = ststmp; + ldfp->yty[lmvm->k] = ytytmp; + ldfp->sts[lmvm->k] = PetscRealPart(ststmp); /* Compute the scalar scale if necessary */ if (ldfp->scale_type == MAT_LMVM_SYMBROYDEN_SCALE_SCALAR) PetscCall(MatSymBrdnComputeJ0Scalar(B)); } else { @@ -380,13 +380,13 @@ PetscErrorCode MatCreate_LMVMDFP(Mat B) B->ops->setup = MatSetUp_LMVMDFP; B->ops->destroy = MatDestroy_LMVMDFP; B->ops->setfromoptions = MatSetFromOptions_LMVMDFP; - B->ops->solve = MatSolve_LMVMDFP; lmvm = (Mat_LMVM *)B->data; lmvm->ops->allocate = MatAllocate_LMVMDFP; lmvm->ops->reset = MatReset_LMVMDFP; lmvm->ops->update = MatUpdate_LMVMDFP; lmvm->ops->mult = MatMult_LMVMDFP; + lmvm->ops->solve = MatSolve_LMVMDFP; lmvm->ops->copy = MatCopy_LMVMDFP; ldfp = (Mat_SymBrdn *)lmvm->ctx; @@ -435,6 +435,7 @@ PetscErrorCode MatCreate_LMVMDFP(Mat B) PetscErrorCode MatCreateLMVMDFP(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) { PetscFunctionBegin; + PetscCall(KSPInitializePackage()); PetscCall(MatCreate(comm, B)); PetscCall(MatSetSizes(*B, n, n, N, N)); PetscCall(MatSetType(*B, MATLMVMDFP)); diff --git a/src/ksp/ksp/utils/lmvm/diagbrdn/diagbrdn.c b/src/ksp/ksp/utils/lmvm/diagbrdn/diagbrdn.c index 2034a93c351..090a36538f6 100644 --- a/src/ksp/ksp/utils/lmvm/diagbrdn/diagbrdn.c +++ b/src/ksp/ksp/utils/lmvm/diagbrdn/diagbrdn.c @@ -1,3 +1,4 @@ +#include #include <../src/ksp/ksp/utils/lmvm/diagbrdn/diagbrdn.h> /*I "petscksp.h" I*/ static PetscErrorCode MatSolve_DiagBrdn(Mat B, Vec F, Vec dX) @@ -25,8 +26,8 @@ static PetscErrorCode MatUpdate_DiagBrdn(Mat B, Vec X, Vec F) Mat_LMVM *lmvm = (Mat_LMVM *)B->data; Mat_DiagBrdn *ldb = (Mat_DiagBrdn *)lmvm->ctx; PetscInt old_k, i, start; - PetscScalar yty, curvature, ytDy, stDs, ytDs; - PetscReal curvtol, sigma, yy_sum, ss_sum, ys_sum, denom, ststmp; + PetscScalar curvature, ytDy, sts, stDs, ytDs; + PetscReal curvtol, sigma, yy_sum, ss_sum, ys_sum, denom, ytytmp; PetscReal stDsr, ytDyr; PetscFunctionBegin; @@ -37,9 +38,9 @@ static PetscErrorCode MatUpdate_DiagBrdn(Mat B, Vec X, Vec F) PetscCall(VecAYPX(lmvm->Fprev, -1.0, F)); /* Test if the updates can be accepted */ - PetscCall(VecDotNorm2(lmvm->Xprev, lmvm->Fprev, &curvature, &ststmp)); - if (ststmp < lmvm->eps) curvtol = 0.0; - else curvtol = lmvm->eps * ststmp; + PetscCall(VecDotNorm2(lmvm->Xprev, lmvm->Fprev, &curvature, &ytytmp)); + if (ytytmp < lmvm->eps) curvtol = 0.0; + else curvtol = lmvm->eps * ytytmp; /* Test the curvature for the update */ if (PetscRealPart(curvature) > curvtol) { @@ -55,10 +56,10 @@ static PetscErrorCode MatUpdate_DiagBrdn(Mat B, Vec X, Vec F) } } /* Accept dot products into the history */ - PetscCall(VecDot(lmvm->Y[lmvm->k], lmvm->Y[lmvm->k], &yty)); - ldb->yty[lmvm->k] = PetscRealPart(yty); + PetscCall(VecDot(lmvm->S[lmvm->k], lmvm->S[lmvm->k], &sts)); + ldb->yty[lmvm->k] = ytytmp; ldb->yts[lmvm->k] = PetscRealPart(curvature); - ldb->sts[lmvm->k] = ststmp; + ldb->sts[lmvm->k] = PetscRealPart(sts); if (ldb->forward) { /* We are doing diagonal scaling of the forward Hessian B */ /* BFGS = DFP = inv(D); */ @@ -366,12 +367,14 @@ static PetscErrorCode MatSetFromOptions_DiagBrdn(Mat B, PetscOptionItems *PetscO PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode MatSetUp_DiagBrdn(Mat); static PetscErrorCode MatReset_DiagBrdn(Mat B, PetscBool destructive) { Mat_LMVM *lmvm = (Mat_LMVM *)B->data; Mat_DiagBrdn *ldb = (Mat_DiagBrdn *)lmvm->ctx; PetscFunctionBegin; + if (!ldb->allocated) PetscCall(MatSetUp_DiagBrdn(B)); PetscCall(VecSet(ldb->invD, ldb->delta)); if (destructive && ldb->allocated) { PetscCall(PetscFree3(ldb->yty, ldb->yts, ldb->sts)); @@ -447,6 +450,7 @@ static PetscErrorCode MatSetUp_DiagBrdn(Mat B) PetscCall(VecDuplicate(lmvm->Xprev, &ldb->U)); PetscCall(VecDuplicate(lmvm->Xprev, &ldb->V)); PetscCall(VecDuplicate(lmvm->Xprev, &ldb->W)); + PetscCall(VecSet(ldb->invD, ldb->delta)); ldb->allocated = PETSC_TRUE; } PetscFunctionReturn(PETSC_SUCCESS); @@ -463,7 +467,6 @@ PetscErrorCode MatCreate_LMVMDiagBrdn(Mat B) B->ops->setup = MatSetUp_DiagBrdn; B->ops->setfromoptions = MatSetFromOptions_DiagBrdn; B->ops->destroy = MatDestroy_DiagBrdn; - B->ops->solve = MatSolve_DiagBrdn; B->ops->view = MatView_DiagBrdn; lmvm = (Mat_LMVM *)B->data; @@ -472,6 +475,7 @@ PetscErrorCode MatCreate_LMVMDiagBrdn(Mat B) lmvm->ops->allocate = MatAllocate_DiagBrdn; lmvm->ops->reset = MatReset_DiagBrdn; lmvm->ops->mult = MatMult_DiagBrdn; + lmvm->ops->solve = MatSolve_DiagBrdn; lmvm->ops->update = MatUpdate_DiagBrdn; lmvm->ops->copy = MatCopy_DiagBrdn; @@ -543,6 +547,7 @@ PetscErrorCode MatCreate_LMVMDiagBrdn(Mat B) PetscErrorCode MatCreateLMVMDiagBroyden(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) { PetscFunctionBegin; + PetscCall(KSPInitializePackage()); PetscCall(MatCreate(comm, B)); PetscCall(MatSetSizes(*B, n, n, N, N)); PetscCall(MatSetType(*B, MATLMVMDIAGBROYDEN)); diff --git a/src/ksp/ksp/utils/lmvm/lmvm.h b/src/ksp/ksp/utils/lmvm/lmvm.h index ab26e70bef2..7642946e0f1 100644 --- a/src/ksp/ksp/utils/lmvm/lmvm.h +++ b/src/ksp/ksp/utils/lmvm/lmvm.h @@ -20,6 +20,7 @@ struct _MatOps_LMVM { PetscErrorCode (*allocate)(Mat, Vec, Vec); PetscErrorCode (*reset)(Mat, PetscBool); PetscErrorCode (*mult)(Mat, Vec, Vec); + PetscErrorCode (*solve)(Mat, Vec, Vec); PetscErrorCode (*copy)(Mat, Mat, MatStructure); }; @@ -65,7 +66,10 @@ PETSC_INTERN PetscErrorCode MatCreate_LMVM(Mat); /* Create functions for derived LMVM types */ PETSC_EXTERN PetscErrorCode MatCreate_LMVMDFP(Mat); +PETSC_EXTERN PetscErrorCode MatCreate_LMVMDDFP(Mat); PETSC_EXTERN PetscErrorCode MatCreate_LMVMBFGS(Mat); +PETSC_EXTERN PetscErrorCode MatCreate_LMVMDBFGS(Mat); +PETSC_EXTERN PetscErrorCode MatCreate_LMVMDQN(Mat); PETSC_EXTERN PetscErrorCode MatCreate_LMVMSR1(Mat); PETSC_EXTERN PetscErrorCode MatCreate_LMVMBrdn(Mat); PETSC_EXTERN PetscErrorCode MatCreate_LMVMBadBrdn(Mat); diff --git a/src/ksp/ksp/utils/lmvm/lmvmimpl.c b/src/ksp/ksp/utils/lmvm/lmvmimpl.c index 1824722e3ae..8a6fecdec59 100644 --- a/src/ksp/ksp/utils/lmvm/lmvmimpl.c +++ b/src/ksp/ksp/utils/lmvm/lmvmimpl.c @@ -1,4 +1,6 @@ +#include #include <../src/ksp/ksp/utils/lmvm/lmvm.h> /*I "petscksp.h" I*/ +#include PetscErrorCode MatReset_LMVM(Mat B, PetscBool destructive) { @@ -128,7 +130,20 @@ static PetscErrorCode MatMult_LMVM(Mat B, Vec X, Vec Y) VecCheckMatCompatible(B, X, 2, Y, 3); PetscCheck(lmvm->allocated, PetscObjectComm((PetscObject)B), PETSC_ERR_ORDER, "LMVM matrix must be allocated first"); PetscCall((*lmvm->ops->mult)(B, X, Y)); - PetscCall(VecAXPY(Y, lmvm->shift, X)); + if (lmvm->shift) PetscCall(VecAXPY(Y, lmvm->shift, X)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolve_LMVM(Mat B, Vec F, Vec dX) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + + PetscFunctionBegin; + VecCheckSameSize(F, 2, dX, 3); + VecCheckMatCompatible(B, F, 2, dX, 3); + PetscCheck(lmvm->allocated, PetscObjectComm((PetscObject)B), PETSC_ERR_ORDER, "LMVM matrix must be allocated first"); + PetscCheck(*lmvm->ops->solve, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_INCOMP, "LMVM matrix does not have a solution or inversion implementation"); + PetscCall((*lmvm->ops->solve)(B, F, dX)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -167,8 +182,8 @@ static PetscErrorCode MatCopy_LMVM(Mat B, Mat M, MatStructure str) mctx->nrejects = bctx->nrejects; mctx->k = bctx->k; for (i = 0; i <= bctx->k; ++i) { - PetscCall(VecCopy(bctx->S[i], mctx->S[i])); - PetscCall(VecCopy(bctx->Y[i], mctx->Y[i])); + if (bctx->S) PetscCall(VecCopy(bctx->S[i], mctx->S[i])); + if (bctx->Y) PetscCall(VecCopy(bctx->Y[i], mctx->Y[i])); PetscCall(VecCopy(bctx->Xprev, mctx->Xprev)); PetscCall(VecCopy(bctx->Fprev, mctx->Fprev)); } @@ -239,19 +254,16 @@ PetscErrorCode MatView_LMVM(Mat B, PetscViewer pv) PetscErrorCode MatSetFromOptions_LMVM(Mat B, PetscOptionItems *PetscOptionsObject) { - Mat_LMVM *lmvm = (Mat_LMVM *)B->data; - PetscInt m = lmvm->m; - PetscBool allocated; + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + PetscInt m_new = lmvm->m; PetscFunctionBegin; - PetscCall(MatLMVMIsAllocated(B, &allocated)); PetscOptionsHeadBegin(PetscOptionsObject, "Limited-memory Variable Metric matrix for approximating Jacobians"); - PetscCall(PetscOptionsInt("-mat_lmvm_hist_size", "number of past updates kept in memory for the approximation", "", lmvm->m, &m, NULL)); - if (m != lmvm->m && allocated) PetscCall(MatLMVMReset(B, PETSC_TRUE)); - lmvm->m = m; + PetscCall(PetscOptionsInt("-mat_lmvm_hist_size", "number of past updates kept in memory for the approximation", "", m_new, &m_new, NULL)); PetscCall(PetscOptionsInt("-mat_lmvm_ksp_its", "(developer) fixed number of KSP iterations to take when inverting J0", "", lmvm->ksp_max_it, &lmvm->ksp_max_it, NULL)); PetscCall(PetscOptionsReal("-mat_lmvm_eps", "(developer) machine zero definition", "", lmvm->eps, &lmvm->eps, NULL)); PetscOptionsHeadEnd(); + if (m_new != lmvm->m) PetscCall(MatLMVMSetHistorySize(B, m_new)); PetscCall(KSPSetFromOptions(lmvm->J0ksp)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -341,6 +353,7 @@ PetscErrorCode MatCreate_LMVM(Mat B) B->ops->duplicate = MatDuplicate_LMVM; B->ops->mult = MatMult_LMVM; B->ops->multadd = MatMultAdd_LMVM; + B->ops->solve = MatSolve_LMVM; B->ops->copy = MatCopy_LMVM; lmvm->ops->update = MatUpdate_LMVM; diff --git a/src/ksp/ksp/utils/lmvm/lmvmutils.c b/src/ksp/ksp/utils/lmvm/lmvmutils.c index ec5e315b2cc..86f963b18ca 100644 --- a/src/ksp/ksp/utils/lmvm/lmvmutils.c +++ b/src/ksp/ksp/utils/lmvm/lmvmutils.c @@ -1,4 +1,6 @@ +#include #include <../src/ksp/ksp/utils/lmvm/lmvm.h> /*I "petscksp.h" I*/ +#include /*@ MatLMVMUpdate - Adds (X-Xprev) and (F-Fprev) updates to an `MATLMVM` matrix. @@ -362,7 +364,7 @@ PetscErrorCode MatLMVMGetJ0KSP(Mat B, KSP *J0ksp) @*/ PetscErrorCode MatLMVMApplyJ0Fwd(Mat B, Vec X, Vec Y) { - Mat_LMVM *lmvm; + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; PetscBool same, hasMult; Mat Amat, Pmat; @@ -372,7 +374,6 @@ PetscErrorCode MatLMVMApplyJ0Fwd(Mat B, Vec X, Vec Y) PetscValidHeaderSpecific(Y, VEC_CLASSID, 3); PetscCall(PetscObjectBaseTypeCompare((PetscObject)B, MATLMVM, &same)); PetscCheck(same, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Matrix must be an LMVM-type."); - lmvm = (Mat_LMVM *)B->data; PetscCheck(lmvm->allocated, PetscObjectComm((PetscObject)B), PETSC_ERR_ORDER, "LMVM matrix must be allocated first"); VecCheckMatCompatible(B, X, 2, Y, 3); if (lmvm->user_pc || lmvm->user_ksp || lmvm->J0) { @@ -432,7 +433,7 @@ PetscErrorCode MatLMVMApplyJ0Fwd(Mat B, Vec X, Vec Y) @*/ PetscErrorCode MatLMVMApplyJ0Inv(Mat B, Vec X, Vec Y) { - Mat_LMVM *lmvm; + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; PetscBool same, hasSolve; PetscFunctionBegin; @@ -618,26 +619,44 @@ PetscErrorCode MatLMVMReset(Mat B, PetscBool destructive) @*/ PetscErrorCode MatLMVMSetHistorySize(Mat B, PetscInt hist_size) { - Mat_LMVM *lmvm; + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; PetscBool same; - Vec X, F; PetscFunctionBegin; PetscValidHeaderSpecific(B, MAT_CLASSID, 1); PetscCall(PetscObjectBaseTypeCompare((PetscObject)B, MATLMVM, &same)); if (!same) PetscFunctionReturn(PETSC_SUCCESS); - lmvm = (Mat_LMVM *)B->data; - if (hist_size > 0) { - lmvm->m = hist_size; - if (lmvm->allocated && lmvm->m != lmvm->m_old) { + PetscCheck(hist_size >= 0, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "QN history size must be a non-negative integer."); + { + PetscBool reallocate = PETSC_FALSE; + Vec X = NULL, F = NULL; + //lmvm->m = hist_size; + if (lmvm->allocated && hist_size != lmvm->m) { PetscCall(VecDuplicate(lmvm->Xprev, &X)); PetscCall(VecDuplicate(lmvm->Fprev, &F)); PetscCall(MatLMVMReset(B, PETSC_TRUE)); + reallocate = PETSC_TRUE; + } + lmvm->m = hist_size; + if (reallocate) { PetscCall(MatLMVMAllocate(B, X, F)); PetscCall(VecDestroy(&X)); PetscCall(VecDestroy(&F)); } - } else PetscCheck(hist_size >= 0, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "QN history size must be a non-negative integer."); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode MatLMVMGetHistorySize(Mat B, PetscInt *hist_size) +{ + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + PetscBool same; + + PetscFunctionBegin; + PetscValidHeaderSpecific(B, MAT_CLASSID, 1); + PetscCall(PetscObjectBaseTypeCompare((PetscObject)B, MATLMVM, &same)); + if (!same) PetscFunctionReturn(PETSC_SUCCESS); + *hist_size = lmvm->m; PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/ksp/ksp/utils/lmvm/sr1/sr1.c b/src/ksp/ksp/utils/lmvm/sr1/sr1.c index 10a9fd84f8a..19b55dd0892 100644 --- a/src/ksp/ksp/utils/lmvm/sr1/sr1.c +++ b/src/ksp/ksp/utils/lmvm/sr1/sr1.c @@ -260,7 +260,6 @@ PetscErrorCode MatCreate_LMVMSR1(Mat B) PetscCall(MatSetOption(B, MAT_SYMMETRIC, PETSC_TRUE)); B->ops->setup = MatSetUp_LMVMSR1; B->ops->destroy = MatDestroy_LMVMSR1; - B->ops->solve = MatSolve_LMVMSR1; lmvm = (Mat_LMVM *)B->data; lmvm->square = PETSC_TRUE; @@ -269,6 +268,7 @@ PetscErrorCode MatCreate_LMVMSR1(Mat B) lmvm->ops->update = MatUpdate_LMVMSR1; lmvm->ops->mult = MatMult_LMVMSR1; lmvm->ops->copy = MatCopy_LMVMSR1; + lmvm->ops->solve = MatSolve_LMVMSR1; PetscCall(PetscNew(&lsr1)); lmvm->ctx = (void *)lsr1; @@ -309,6 +309,7 @@ PetscErrorCode MatCreate_LMVMSR1(Mat B) PetscErrorCode MatCreateLMVMSR1(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) { PetscFunctionBegin; + PetscCall(KSPInitializePackage()); PetscCall(MatCreate(comm, B)); PetscCall(MatSetSizes(*B, n, n, N, N)); PetscCall(MatSetType(*B, MATLMVMSR1)); diff --git a/src/ksp/ksp/utils/lmvm/symbrdn/symbadbrdn.c b/src/ksp/ksp/utils/lmvm/symbrdn/symbadbrdn.c index 9782690138c..099d13077b2 100644 --- a/src/ksp/ksp/utils/lmvm/symbrdn/symbadbrdn.c +++ b/src/ksp/ksp/utils/lmvm/symbrdn/symbadbrdn.c @@ -240,6 +240,7 @@ PetscErrorCode MatCreate_LMVMSymBadBrdn(Mat B) PetscErrorCode MatCreateLMVMSymBadBroyden(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) { PetscFunctionBegin; + PetscCall(KSPInitializePackage()); PetscCall(MatCreate(comm, B)); PetscCall(MatSetSizes(*B, n, n, N, N)); PetscCall(MatSetType(*B, MATLMVMSYMBADBROYDEN)); diff --git a/src/ksp/ksp/utils/lmvm/symbrdn/symbrdn.c b/src/ksp/ksp/utils/lmvm/symbrdn/symbrdn.c index 7dd2569df3b..9e3ceeb4ea0 100644 --- a/src/ksp/ksp/utils/lmvm/symbrdn/symbrdn.c +++ b/src/ksp/ksp/utils/lmvm/symbrdn/symbrdn.c @@ -1,5 +1,8 @@ #include <../src/ksp/ksp/utils/lmvm/symbrdn/symbrdn.h> /*I "petscksp.h" I*/ +#include <../src/ksp/ksp/utils/lmvm/dense/denseqn.h> #include <../src/ksp/ksp/utils/lmvm/diagbrdn/diagbrdn.h> +#include +#include const char *const MatLMVMSymBroydenScaleTypes[] = {"NONE", "SCALAR", "DIAGONAL", "USER", "MatLMVMSymBrdnScaleType", "MAT_LMVM_SYMBROYDEN_SCALING_", NULL}; @@ -215,8 +218,8 @@ static PetscErrorCode MatUpdate_LMVMSymBrdn(Mat B, Vec X, Vec F) Mat_LMVM *dbase; Mat_DiagBrdn *dctx; PetscInt old_k, i; - PetscReal curvtol, ststmp; - PetscScalar curvature, ytytmp; + PetscReal curvtol, ytytmp; + PetscScalar curvature, ststmp; PetscFunctionBegin; if (!lmvm->m) PetscFunctionReturn(PETSC_SUCCESS); @@ -226,9 +229,9 @@ static PetscErrorCode MatUpdate_LMVMSymBrdn(Mat B, Vec X, Vec F) PetscCall(VecAYPX(lmvm->Fprev, -1.0, F)); /* Test if the updates can be accepted */ - PetscCall(VecDotNorm2(lmvm->Xprev, lmvm->Fprev, &curvature, &ststmp)); - if (ststmp < lmvm->eps) curvtol = 0.0; - else curvtol = lmvm->eps * ststmp; + PetscCall(VecDotNorm2(lmvm->Xprev, lmvm->Fprev, &curvature, &ytytmp)); + if (ytytmp < lmvm->eps) curvtol = 0.0; + else curvtol = lmvm->eps * ytytmp; if (PetscRealPart(curvature) > curvtol) { /* Update is good, accept it */ @@ -245,10 +248,10 @@ static PetscErrorCode MatUpdate_LMVMSymBrdn(Mat B, Vec X, Vec F) } } /* Update history of useful scalars */ - PetscCall(VecDot(lmvm->Y[lmvm->k], lmvm->Y[lmvm->k], &ytytmp)); + PetscCall(VecDot(lmvm->S[lmvm->k], lmvm->S[lmvm->k], &ststmp)); lsb->yts[lmvm->k] = PetscRealPart(curvature); - lsb->yty[lmvm->k] = PetscRealPart(ytytmp); - lsb->sts[lmvm->k] = ststmp; + lsb->yty[lmvm->k] = ytytmp; + lsb->sts[lmvm->k] = PetscRealPart(ststmp); /* Compute the scalar scale if necessary */ if (lsb->scale_type == MAT_LMVM_SYMBROYDEN_SCALE_SCALAR) PetscCall(MatSymBrdnComputeJ0Scalar(B)); } else { @@ -544,7 +547,6 @@ PetscErrorCode MatCreate_LMVMSymBrdn(Mat B) B->ops->setfromoptions = MatSetFromOptions_LMVMSymBrdn; B->ops->setup = MatSetUp_LMVMSymBrdn; B->ops->destroy = MatDestroy_LMVMSymBrdn; - B->ops->solve = MatSolve_LMVMSymBrdn; lmvm = (Mat_LMVM *)B->data; lmvm->square = PETSC_TRUE; @@ -552,6 +554,7 @@ PetscErrorCode MatCreate_LMVMSymBrdn(Mat B) lmvm->ops->reset = MatReset_LMVMSymBrdn; lmvm->ops->update = MatUpdate_LMVMSymBrdn; lmvm->ops->mult = MatMult_LMVMSymBrdn; + lmvm->ops->solve = MatSolve_LMVMSymBrdn; lmvm->ops->copy = MatCopy_LMVMSymBrdn; PetscCall(PetscNew(&lsb)); @@ -591,19 +594,51 @@ PetscErrorCode MatCreate_LMVMSymBrdn(Mat B) @*/ PetscErrorCode MatLMVMSymBroydenSetDelta(Mat B, PetscScalar delta) { - Mat_LMVM *lmvm = (Mat_LMVM *)B->data; - Mat_SymBrdn *lsb = (Mat_SymBrdn *)lmvm->ctx; - PetscBool is_bfgs, is_dfp, is_symbrdn, is_symbadbrdn; + Mat_LMVM *lmvm = (Mat_LMVM *)B->data; + PetscBool is_bfgs, is_dfp, is_symbrdn, is_symbadbrdn, is_dbfgs, is_ddfp, is_dqn; + PetscReal del_min, del_max, del_buf; PetscFunctionBegin; PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMBFGS, &is_bfgs)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDBFGS, &is_dbfgs)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDDFP, &is_ddfp)); + PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDQN, &is_dqn)); PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMDFP, &is_dfp)); PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMSYMBROYDEN, &is_symbrdn)); PetscCall(PetscObjectTypeCompare((PetscObject)B, MATLMVMSYMBADBROYDEN, &is_symbadbrdn)); - PetscCheck(is_bfgs || is_dfp || is_symbrdn || is_symbadbrdn, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_INCOMP, "diagonal scaling is only available for DFP, BFGS and SymBrdn matrices"); - lsb->delta = PetscAbsReal(PetscRealPart(delta)); - lsb->delta = PetscMin(lsb->delta, lsb->delta_max); - lsb->delta = PetscMax(lsb->delta, lsb->delta_min); + + if (is_bfgs || is_dfp || is_symbrdn || is_symbadbrdn) { + Mat_SymBrdn *lsb = (Mat_SymBrdn *)lmvm->ctx; + + lsb = (Mat_SymBrdn *)lmvm->ctx; + del_min = lsb->delta_min; + del_max = lsb->delta_max; + } else if (is_dbfgs || is_ddfp || is_dqn) { + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + Mat_LMVM *dbase = (Mat_LMVM *)lqn->diag_qn->data; + Mat_DiagBrdn *diagctx = (Mat_DiagBrdn *)dbase->ctx; + + del_min = diagctx->delta_min; + del_max = diagctx->delta_max; + } else { + SETERRQ(PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_INCOMP, "diagonal scaling only available for SymBrdn-derived types (DBFGS, BFGS, DDFP, DFP, SymBrdn, SymBadBrdn"); + } + + del_buf = PetscAbsReal(PetscRealPart(delta)); + del_buf = PetscMin(del_buf, del_max); + del_buf = PetscMax(del_buf, del_min); + if (is_dbfgs || is_ddfp || is_dqn) { + Mat_DQN *lqn = (Mat_DQN *)lmvm->ctx; + Mat_LMVM *dbase = (Mat_LMVM *)lqn->diag_qn->data; + Mat_DiagBrdn *diagctx = (Mat_DiagBrdn *)dbase->ctx; + + diagctx->delta = del_buf; + } else { + Mat_SymBrdn *lsb = (Mat_SymBrdn *)lmvm->ctx; + + lsb = (Mat_SymBrdn *)lmvm->ctx; + lsb->delta = del_buf; + } PetscFunctionReturn(PETSC_SUCCESS); } @@ -682,6 +717,7 @@ PetscErrorCode MatLMVMSymBroydenSetScaleType(Mat B, MatLMVMSymBroydenScaleType s PetscErrorCode MatCreateLMVMSymBroyden(MPI_Comm comm, PetscInt n, PetscInt N, Mat *B) { PetscFunctionBegin; + PetscCall(KSPInitializePackage()); PetscCall(MatCreate(comm, B)); PetscCall(MatSetSizes(*B, n, n, N, N)); PetscCall(MatSetType(*B, MATLMVMSYMBROYDEN)); diff --git a/src/ksp/ksp/utils/lmvm/tests/lmvm_copy_test.c b/src/ksp/ksp/utils/lmvm/tests/lmvm_copy_test.c new file mode 100644 index 00000000000..e96a04bbfb8 --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/tests/lmvm_copy_test.c @@ -0,0 +1,198 @@ +const char help[] = "Test that MatCopy() does not affect the copied LMVM matrix"; + +#include + +static PetscErrorCode positiveVectorUpdate(PetscRandom rand, Vec x, Vec f) +{ + Vec _x, _f; + PetscScalar dot; + + PetscFunctionBegin; + PetscCall(VecDuplicate(x, &_x)); + PetscCall(VecDuplicate(f, &_f)); + PetscCall(VecSetRandom(_x, rand)); + PetscCall(VecSetRandom(_f, rand)); + PetscCall(VecDot(_x, _f, &dot)); + PetscCall(VecAXPY(x, PetscAbsScalar(dot) / dot, _x)); + PetscCall(VecAXPY(f, 1.0, _f)); + PetscCall(VecDestroy(&_f)); + PetscCall(VecDestroy(&_x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode VecEqualToTolerance(Vec a, Vec b, NormType norm_type, PetscReal tol, PetscBool *flg) +{ + Vec diff; + PetscReal diff_norm; + + PetscFunctionBegin; + PetscCall(VecDuplicate(a, &diff)); + PetscCall(VecCopy(a, diff)); + PetscCall(VecAXPY(diff, -1.0, b)); + PetscCall(VecNorm(diff, norm_type, &diff_norm)); + PetscCall(VecDestroy(&diff)); + *flg = (diff_norm <= tol) ? PETSC_TRUE : PETSC_FALSE; + PetscFunctionReturn(PETSC_SUCCESS); +} + +// unlike MatTestEqual(), this test tests MatMult() and MatSolve() +static PetscErrorCode testMatEqual(PetscRandom rand, Mat A, Mat B, PetscBool *flg) +{ + Vec x, y_A, y_B; + + PetscFunctionBegin; + *flg = PETSC_TRUE; + PetscCall(MatCreateVecs(A, &x, &y_A)); + PetscCall(MatCreateVecs(B, NULL, &y_B)); + PetscCall(VecSetRandom(x, rand)); + PetscCall(MatMult(A, x, y_A)); + PetscCall(MatMult(B, x, y_B)); + PetscCall(VecEqualToTolerance(y_A, y_B, NORM_2, PETSC_SMALL, flg)); + if (*flg == PETSC_TRUE) { + PetscCall(MatSolve(A, x, y_A)); + PetscCall(MatSolve(B, x, y_B)); + PetscCall(VecEqualToTolerance(y_A, y_B, NORM_2, PETSC_SMALL, flg)); + if (*flg == PETSC_FALSE) { + PetscReal norm; + + PetscCall(VecAXPY(y_A, -1.0, y_B)); + PetscCall(VecNorm(y_A, NORM_INFINITY, &norm)); + PetscCall(PetscPrintf(PetscObjectComm((PetscObject)A), "MatSolve() norm error %g\n", (double)norm)); + } + } else { + PetscReal norm; + + PetscCall(VecAXPY(y_A, -1.0, y_B)); + PetscCall(VecNorm(y_A, NORM_INFINITY, &norm)); + PetscCall(PetscPrintf(PetscObjectComm((PetscObject)A), "MatMult() norm error %g\n", (double)norm)); + } + PetscCall(VecDestroy(&y_B)); + PetscCall(VecDestroy(&y_A)); + PetscCall(VecDestroy(&x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode testUnchangedBegin(PetscRandom rand, Mat A, Vec *x, Vec *y, Vec *z) +{ + Vec _x, _y, _z; + + PetscFunctionBegin; + PetscCall(MatCreateVecs(A, &_x, &_y)); + PetscCall(MatCreateVecs(A, NULL, &_z)); + PetscCall(VecSetRandom(_x, rand)); + PetscCall(MatMult(A, _x, _y)); + PetscCall(MatSolve(A, _x, _z)); + *x = _x; + *y = _y; + *z = _z; + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode testUnchangedEnd(PetscRandom rand, Mat A, Vec *x, Vec *y, Vec *z, PetscBool *unchanged) +{ + Vec _x, _y, _z, _y2, _z2; + + PetscFunctionBegin; + *unchanged = PETSC_TRUE; + _x = *x; + _y = *y; + _z = *z; + *x = NULL; + *y = NULL; + *z = NULL; + PetscCall(MatCreateVecs(A, NULL, &_y2)); + PetscCall(MatCreateVecs(A, NULL, &_z2)); + PetscCall(MatMult(A, _x, _y2)); + PetscCall(MatSolve(A, _x, _z2)); + PetscCall(VecEqual(_y, _y2, unchanged)); + if (*unchanged == PETSC_TRUE) PetscCall(VecEqual(_z, _z2, unchanged)); + PetscCall(VecDestroy(&_z2)); + PetscCall(VecDestroy(&_y2)); + PetscCall(VecDestroy(&_z)); + PetscCall(VecDestroy(&_y)); + PetscCall(VecDestroy(&_x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode testMatLMVMCopy(PetscRandom rand) +{ + PetscInt N = 10; + MPI_Comm comm = PetscObjectComm((PetscObject)rand); + PetscInt k_pre = 2; // number of updates before copy + Mat A, A_copy; + Vec u, x, f, x_copy, f_copy, v1, v2, v3; + PetscBool equal; + PetscLayout layout; + + PetscFunctionBegin; + PetscCall(VecCreateMPI(comm, PETSC_DECIDE, N, &u)); + PetscCall(VecSetFromOptions(u)); + PetscCall(VecSetUp(u)); + PetscCall(VecDuplicate(u, &x)); + PetscCall(VecDuplicate(u, &f)); + PetscCall(VecGetLayout(u, &layout)); + PetscCall(MatCreate(comm, &A)); + PetscCall(MatSetLayouts(A, layout, layout)); + PetscCall(MatSetType(A, MATLMVMBFGS)); + PetscCall(MatSetFromOptions(A)); + PetscCall(positiveVectorUpdate(rand, x, f)); + PetscCall(MatLMVMAllocate(A, x, f)); + PetscCall(MatSetUp(A)); + for (PetscInt k = 0; k <= k_pre; k++) { + PetscCall(positiveVectorUpdate(rand, x, f)); + PetscCall(MatLMVMUpdate(A, x, f)); + } + PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &A_copy)); + PetscCall(testMatEqual(rand, A, A_copy, &equal)); + PetscCheck(equal, comm, PETSC_ERR_PLIB, "MatCopy() not the same after initial copy"); + + PetscCall(VecDuplicate(x, &x_copy)); + PetscCall(VecCopy(x, x_copy)); + PetscCall(VecDuplicate(f, &f_copy)); + PetscCall(VecCopy(f, f_copy)); + + PetscCall(testUnchangedBegin(rand, A_copy, &v1, &v2, &v3)); + PetscCall(positiveVectorUpdate(rand, x, f)); + PetscCall(MatLMVMUpdate(A, x, f)); + PetscCall(testUnchangedEnd(rand, A_copy, &v1, &v2, &v3, &equal)); + PetscCheck(equal, comm, PETSC_ERR_PLIB, "MatLMVMUpdate() to original matrix affects copy"); + + PetscCall(testUnchangedBegin(rand, A, &v1, &v2, &v3)); + PetscCall(positiveVectorUpdate(rand, x_copy, f_copy)); + PetscCall(MatLMVMUpdate(A_copy, x_copy, f_copy)); + PetscCall(testUnchangedEnd(rand, A, &v1, &v2, &v3, &equal)); + PetscCheck(equal, comm, PETSC_ERR_PLIB, "MatLMVMUpdate() to copy matrix affects original"); + + PetscCall(VecDestroy(&f_copy)); + PetscCall(VecDestroy(&x_copy)); + PetscCall(MatDestroy(&A_copy)); + PetscCall(MatDestroy(&A)); + PetscCall(VecDestroy(&f)); + PetscCall(VecDestroy(&x)); + PetscCall(VecDestroy(&u)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +int main(int argc, char **argv) +{ + MPI_Comm comm; + PetscRandom rand; + + PetscCall(PetscInitialize(&argc, &argv, NULL, help)); + comm = PETSC_COMM_WORLD; + PetscCall(PetscRandomCreate(comm, &rand)); + PetscCall(PetscRandomSetFromOptions(rand)); + PetscCall(KSPInitializePackage()); + PetscCall(testMatLMVMCopy(rand)); + PetscCall(PetscRandomDestroy(&rand)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + test: + suffix: 0 + args: -mat_type {{lmvmbfgs lmvmdfp lmvmsr1 lmvmbroyden lmvmbadbroyden lmvmsymbroyden lmvmsymbadbroyden lmvmdiagbroyden lmvmdbfgs lmvmddfp lmvmdqn}} + +TEST*/ diff --git a/src/ksp/ksp/utils/lmvm/tests/lmvm_test.c b/src/ksp/ksp/utils/lmvm/tests/lmvm_test.c new file mode 100644 index 00000000000..1c5081faec0 --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/tests/lmvm_test.c @@ -0,0 +1,41 @@ +const char help[] = "Coverage and edge case test for LMVM"; + +#include +#include + +int main(int argc, char **argv) +{ + PetscInt type = 0, n = 10; + Mat B; + + PetscCall(PetscInitialize(&argc, &argv, NULL, help)); + PetscOptionsBegin(PETSC_COMM_WORLD, NULL, help, "KSP"); + /* LMVM Types. 0: LMVMDBFGS, 1: LMVMDDFP, 2: LMVMDQN */ + PetscCall(PetscOptionsInt("-type", "LMVM Type", __FILE__, type, &type, NULL)); + PetscOptionsEnd(); + if (type == 0) { + PetscCall(MatCreateLMVMDBFGS(PETSC_COMM_WORLD, PETSC_DECIDE, n, &B)); + } else if (type == 1) { + PetscCall(MatCreateLMVMDDFP(PETSC_COMM_WORLD, PETSC_DECIDE, n, &B)); + } else if (type == 2) { + PetscCall(MatCreateLMVMDQN(PETSC_COMM_WORLD, PETSC_DECIDE, n, &B)); + } else { + SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "Incompatible LMVM Type."); + } + PetscCall(MatSetFromOptions(B)); + PetscCall(MatSetUp(B)); + PetscCall(MatLMVMDenseSetType(B, MAT_LMVM_DENSE_INPLACE)); + PetscCall(MatDestroy(&B)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + test: + suffix: 0 + output_file: output/lmvm_test.out + nsize: {{1 2}} + args: -mat_lmvm_scale_type {{none scalar diagonal}} -type {{0 1 2}} + +TEST*/ diff --git a/src/ksp/ksp/utils/lmvm/tests/makefile b/src/ksp/ksp/utils/lmvm/tests/makefile new file mode 100644 index 00000000000..b634018eec5 --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/tests/makefile @@ -0,0 +1,7 @@ +-include ../../../../../../petscdir.mk + +LIBBASE = libpetscksp +MANSEC = KSP + +include ${PETSC_DIR}/lib/petsc/conf/variables +include ${PETSC_DIR}/lib/petsc/conf/rules diff --git a/src/ksp/ksp/utils/lmvm/tests/output/lmvm_copy_test_0.out b/src/ksp/ksp/utils/lmvm/tests/output/lmvm_copy_test_0.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/ksp/ksp/utils/lmvm/tests/output/lmvm_test.out b/src/ksp/ksp/utils/lmvm/tests/output/lmvm_test.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/ksp/ksp/utils/lmvm/tests/output/solve_performance_0.out b/src/ksp/ksp/utils/lmvm/tests/output/solve_performance_0.out new file mode 100644 index 00000000000..bb455d6e1d2 --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/tests/output/solve_performance_0.out @@ -0,0 +1,11 @@ +Mat Object: 1 MPI process + type: lmvmbfgs + Scale type: NONE + Scale history: 1 + Scale params: alpha=1., beta=0.5, rho=1. + Convex factors: phi=0., theta=0.125 + Max. storage: 5 + Used storage: 0 + Number of updates: 165 + Number of rejects: 0 + Number of resets: 12 diff --git a/src/ksp/ksp/utils/lmvm/tests/solve_performance.c b/src/ksp/ksp/utils/lmvm/tests/solve_performance.c new file mode 100644 index 00000000000..c72f08d0749 --- /dev/null +++ b/src/ksp/ksp/utils/lmvm/tests/solve_performance.c @@ -0,0 +1,87 @@ +const char help[] = "Profile the performance of MATLMVM MatSolve() in a loop"; + +#include +#include + +int main(int argc, char **argv) +{ + PetscInt n = 1000; + PetscInt n_epochs = 10; + PetscInt n_iters = 10; + Vec x, g, dx, df, p; + PetscRandom rand; + PetscLogStage matsolve_loop, main_stage; + Mat B; + + PetscCall(PetscInitialize(&argc, &argv, NULL, help)); + PetscOptionsBegin(PETSC_COMM_WORLD, NULL, help, "KSP"); + PetscCall(PetscOptionsInt("-n", "Vector size", __FILE__, n, &n, NULL)); + PetscCall(PetscOptionsInt("-epochs", "Number of epochs", __FILE__, n_epochs, &n_epochs, NULL)); + PetscCall(PetscOptionsInt("-iters", "Number of iterations per epoch", __FILE__, n_iters, &n_iters, NULL)); + PetscOptionsEnd(); + PetscCall(VecCreateMPI(PETSC_COMM_WORLD, PETSC_DETERMINE, n, &x)); + PetscCall(VecSetFromOptions(x)); + PetscCall(VecDuplicate(x, &g)); + PetscCall(VecDuplicate(x, &dx)); + PetscCall(VecDuplicate(x, &df)); + PetscCall(VecDuplicate(x, &p)); + PetscCall(MatCreateLMVMBFGS(PETSC_COMM_WORLD, PETSC_DETERMINE, n, &B)); + PetscCall(MatSetFromOptions(B)); + PetscCall(MatLMVMAllocate(B, x, g)); + PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rand)); + PetscCall(PetscRandomSetInterval(rand, -1.0, 1.0)); + PetscCall(PetscRandomSetFromOptions(rand)); + PetscCall(PetscLogStageRegister("LMVM MatSolve Loop", &matsolve_loop)); + PetscCall(PetscLogStageGetId("Main Stage", &main_stage)); + PetscCall(PetscLogStageSetVisible(main_stage, PETSC_FALSE)); + for (PetscInt epoch = 0; epoch < n_epochs + 1; epoch++) { + PetscScalar dot; + PetscReal xscale, fscale, absdot; + PetscInt history_size; + + PetscCall(VecSetRandom(dx, rand)); + PetscCall(VecSetRandom(df, rand)); + PetscCall(VecDot(dx, df, &dot)); + absdot = PetscAbsScalar(dot); + PetscCall(VecSetRandom(x, rand)); + PetscCall(VecSetRandom(g, rand)); + xscale = 1.0; + fscale = absdot / PetscRealPart(dot); + PetscCall(MatLMVMGetHistorySize(B, &history_size)); + + PetscCall(MatLMVMUpdate(B, x, g)); + for (PetscInt iter = 0; iter < history_size; iter++, xscale *= -1.0, fscale *= -1.0) { + PetscCall(VecAXPY(x, xscale, dx)); + PetscCall(VecAXPY(g, fscale, df)); + PetscCall(MatLMVMUpdate(B, x, g)); + PetscCall(MatSolve(B, g, p)); + } + if (epoch > 0) PetscCall(PetscLogStagePush(matsolve_loop)); + for (PetscInt iter = 0; iter < n_iters; iter++, xscale *= -1.0, fscale *= -1.0) { + PetscCall(VecAXPY(x, xscale, dx)); + PetscCall(VecAXPY(g, fscale, df)); + PetscCall(MatLMVMUpdate(B, x, g)); + PetscCall(MatSolve(B, g, p)); + } + PetscCall(MatLMVMReset(B, PETSC_FALSE)); + if (epoch > 0) PetscCall(PetscLogStagePop()); + } + PetscCall(MatView(B, PETSC_VIEWER_STDOUT_(PETSC_COMM_WORLD))); + PetscCall(PetscRandomDestroy(&rand)); + PetscCall(MatDestroy(&B)); + PetscCall(VecDestroy(&p)); + PetscCall(VecDestroy(&df)); + PetscCall(VecDestroy(&dx)); + PetscCall(VecDestroy(&g)); + PetscCall(VecDestroy(&x)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + test: + suffix: 0 + args: -mat_lmvm_scale_type none + +TEST*/ diff --git a/src/ksp/ksp/utils/schurm/schurm.c b/src/ksp/ksp/utils/schurm/schurm.c index 5d34d820da5..1e25bb35c8d 100644 --- a/src/ksp/ksp/utils/schurm/schurm.c +++ b/src/ksp/ksp/utils/schurm/schurm.c @@ -405,7 +405,7 @@ PetscErrorCode MatSchurComplementUpdateSubMatrices(Mat S, Mat A00, Mat Ap00, Mat PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSchurComplementGetSubMatrices - Get the individual submatrices in the Schur complement Collective @@ -423,7 +423,7 @@ PetscErrorCode MatSchurComplementUpdateSubMatrices(Mat S, Mat A00, Mat Ap00, Mat Level: intermediate Note: - `A11` is optional, and thus can be `NULL`. + Use `NULL` for any unneeded output argument. The reference counts of the submatrices are not increased before they are returned and the matrices should not be modified or destroyed. diff --git a/src/ksp/pc/impls/amgx/amgx.cxx b/src/ksp/pc/impls/amgx/amgx.cxx index 31f615830fb..24f7c6a64a5 100644 --- a/src/ksp/pc/impls/amgx/amgx.cxx +++ b/src/ksp/pc/impls/amgx/amgx.cxx @@ -634,7 +634,7 @@ PETSC_EXTERN PetscErrorCode PCCreate_AMGX(PC pc) /*@C PCAmgXGetResources - get AMGx's internal resource object - Not Collective + Not Collective, No Fortran Support Input Parameter: . pc - the PC diff --git a/src/ksp/pc/impls/asm/asm.c b/src/ksp/pc/impls/asm/asm.c index 7c618d2775b..8b414687c2d 100644 --- a/src/ksp/pc/impls/asm/asm.c +++ b/src/ksp/pc/impls/asm/asm.c @@ -904,7 +904,7 @@ static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`. Collective @@ -912,10 +912,11 @@ static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type) Input Parameters: + pc - the preconditioner context . n - the number of subdomains for this processor (default value = 1) -. is - the index set that defines the subdomains for this processor - (or `NULL` for PETSc to determine subdomains) -- is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT - (or `NULL` to not provide these) +. is - the index set that defines the subdomains for this processor (or `NULL` for PETSc to determine subdomains) + the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call +- is_local - the index sets that define the local part of the subdomains for this processor, not used unless `PCASMType` is `PC_ASM_RESTRICT` + (or `NULL` to not provide these). The values of the `is_local` array are copied so you can free the array + (not the `IS` in the array) after this call Options Database Key: . -pc_asm_local_blocks - Sets number of local blocks @@ -923,16 +924,16 @@ static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type) Level: advanced Notes: - The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local + The `IS` numbering is in the parallel, global numbering of the vector for both `is` and `is_local` By default the `PCASM` preconditioner uses 1 block per processor. Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors. - If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated - back to form the global solution (this is the standard restricted additive Schwarz method) + If `is_local` is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the `is_local` region is interpolated + back to form the global solution (this is the standard restricted additive Schwarz method, RASM) - If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is + If `is_local` is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is no code to handle that case. .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, @@ -946,7 +947,7 @@ PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[] PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCASMSetTotalSubdomains - Sets the subdomains for all processors for the additive Schwarz preconditioner, `PCASM`. @@ -955,10 +956,10 @@ PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[] Input Parameters: + pc - the preconditioner context . N - the number of subdomains for all processors -. is - the index sets that define the subdomains for all processors - (or `NULL` to ask PETSc to determine the subdomains) -- is_local - the index sets that define the local part of the subdomains for this processor - (or `NULL` to not provide this information) +. is - the index sets that define the subdomains for all processors (or `NULL` to ask PETSc to determine the subdomains) + the values of the `is` array are copied so you can free the array (not the `IS` in the array) after this call +- is_local - the index sets that define the local part of the subdomains for this processor (or `NULL` to not provide this information) + The values of the `is_local` array are copied so you can free the array (not the `IS` in the array) after this call Options Database Key: . -pc_asm_blocks - Sets total blocks @@ -966,7 +967,7 @@ PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[] Level: advanced Notes: - Currently you cannot use this to set the actual subdomains with the argument is or is_local. + Currently you cannot use this to set the actual subdomains with the argument `is` or `is_local`. By default the `PCASM` preconditioner uses 1 block per processor. @@ -1200,9 +1201,8 @@ PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort) . pc - the preconditioner context Output Parameters: -+ n_local - the number of blocks on this processor or NULL -. first_local - the global number of the first block on this processor or NULL, - all processors must request or all must pass NULL ++ n_local - the number of blocks on this processor or `NULL` +. first_local - the global number of the first block on this processor or `NULL`, all processors must request or all must pass `NULL` - ksp - the array of `KSP` contexts Level: advanced @@ -1234,24 +1234,27 @@ PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, K + -pc_asm_blocks - Sets total blocks. Defaults to one block per MPI process. . -pc_asm_overlap - Sets overlap . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()` +. -pc_asm_dm_subdomains - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()` - -pc_asm_local_type [additive, multiplicative] - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()` Level: beginner Notes: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you - will get a different convergence rate due to the default option of -pc_asm_type restrict. Use - -pc_asm_type basic to get the same convergence behavior + will get a different convergence rate due to the default option of `-pc_asm_type restrict`. Use + `-pc_asm_type basic` to get the same convergence behavior Each processor can have one or more blocks, but a block cannot be shared by more than one processor. Use `PCGASM` for subdomains shared by multiple processes. - To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC` - options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly + To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC` + options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly` To set the options on the solvers separate for each block call `PCASMGetSubKSP()` and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`) + If the `PC` has an associated `DM`, then, by default, `DMCreateDomainDecomposition()` is used to create the subdomains + .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`, `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()` `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType` @@ -1330,7 +1333,7 @@ PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc) from these if you use `PCASMSetLocalSubdomains()` Fortran Notes: - You must provide the array outis[] already allocated of length n. + You must provide the array `outis` already allocated of length `n`. .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()` @*/ @@ -1542,7 +1545,7 @@ PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[]) `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`. Fortran Notes: - The `IS` must be declared as an array of length long enough to hold `Nsub` entries + `is` must be declared as an array of length long enough to hold `Nsub` entries .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`, `PCASMSetOverlap()` @@ -1622,6 +1625,9 @@ PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, Petsc Note: The `IS` numbering is in the parallel, global numbering of the vector. + Fortran Note: + Pass in for `is` and `is_local` arrays long enough to hold all the subdomains + .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()` @*/ @@ -1663,6 +1669,9 @@ PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_loca Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner. + Fortran Note: + Pass in for `mat` an array long enough to hold all the matrices + .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`, `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()` @*/ @@ -1698,7 +1707,7 @@ PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[]) - flg - boolean indicating whether to use subdomains defined by the `DM` Options Database Key: -. -pc_asm_dm_subdomains - use subdomains defined by the `DM` +. -pc_asm_dm_subdomains - use subdomains defined by the `DM` with `DMCreateDomainDecomposition()` Level: intermediate @@ -1706,6 +1715,9 @@ PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[]) `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`, so setting either of the first two effectively turns the latter off. + Developer Note: + This should be `PCASMSetUseDMSubdomains()`, similarly for the options database key + .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()` `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()` @*/ @@ -1736,6 +1748,9 @@ PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg) Level: intermediate + Developer Note: + This should be `PCASMSetUseDMSubdomains()` + .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()` `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()` @*/ @@ -1753,7 +1768,7 @@ PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string. Not Collective @@ -1776,7 +1791,7 @@ PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves Collective diff --git a/src/ksp/pc/impls/asm/ftn-custom/zasmf.c b/src/ksp/pc/impls/asm/ftn-custom/zasmf.c index 4ec28200494..a63b50f463f 100644 --- a/src/ksp/pc/impls/asm/ftn-custom/zasmf.c +++ b/src/ksp/pc/impls/asm/ftn-custom/zasmf.c @@ -10,8 +10,6 @@ #define pcasmgetsubksp6_ PCASMGETSUBKSP6 #define pcasmgetsubksp7_ PCASMGETSUBKSP7 #define pcasmgetsubksp8_ PCASMGETSUBKSP8 - #define pcasmsetlocalsubdomains_ PCASMSETLOCALSUBDOMAINS - #define pcasmsetglobalsubdomains_ PCASMSETGLOBALSUBDOMAINS #define pcasmgetlocalsubmatrices_ PCASMGETLOCALSUBMATRICES #define pcasmgetlocalsubdomains_ PCASMGETLOCALSUBDOMAINS #define pcasmcreatesubdomains_ PCASMCREATESUBDOMAINS @@ -26,8 +24,6 @@ #define pcasmgetsubksp6_ pcasmgetsubksp6 #define pcasmgetsubksp7_ pcasmgetsubksp7 #define pcasmgetsubksp8_ pcasmgetsubksp8 - #define pcasmsetlocalsubdomains_ pcasmsetlocalsubdomains - #define pcasmsetglobalsubdomains_ pcasmsetglobalsubdomains #define pcasmgetlocalsubmatrices_ pcasmgetlocalsubmatrices #define pcasmgetlocalsubdomains_ pcasmgetlocalsubdomains #define pcasmcreatesubdomains_ pcasmcreatesubdomains @@ -121,20 +117,6 @@ PETSC_EXTERN void pcasmgetsubksp8_(PC *pc, PetscInt *n_local, PetscInt *first_lo pcasmgetsubksp1_(pc, n_local, first_local, ksp, ierr); } -PETSC_EXTERN void pcasmsetlocalsubdomains_(PC *pc, PetscInt *n, IS *is, IS *is_local, PetscErrorCode *ierr) -{ - CHKFORTRANNULLOBJECT(is); - CHKFORTRANNULLOBJECT(is_local); - *ierr = PCASMSetLocalSubdomains(*pc, *n, is, is_local); -} - -PETSC_EXTERN void pcasmsettotalsubdomains_(PC *pc, PetscInt *N, IS *is, IS *is_local, PetscErrorCode *ierr) -{ - CHKFORTRANNULLOBJECT(is); - CHKFORTRANNULLOBJECT(is_local); - *ierr = PCASMSetTotalSubdomains(*pc, *N, is, is_local); -} - PETSC_EXTERN void pcasmgetlocalsubmatrices_(PC *pc, PetscInt *n, Mat *mat, PetscErrorCode *ierr) { PetscInt nloc, i; diff --git a/src/ksp/pc/impls/bddc/bddc.c b/src/ksp/pc/impls/bddc/bddc.c index f294beeff83..2cf6d980b19 100644 --- a/src/ksp/pc/impls/bddc/bddc.c +++ b/src/ksp/pc/impls/bddc/bddc.c @@ -37,11 +37,21 @@ static PetscErrorCode PCApply_BDDC(PC, Vec, Vec); static PetscErrorCode PCSetFromOptions_BDDC(PC pc, PetscOptionItems *PetscOptionsObject) { - PC_BDDC *pcbddc = (PC_BDDC *)pc->data; - PetscInt nt, i; + PC_BDDC *pcbddc = (PC_BDDC *)pc->data; + PetscInt nt, i; + char load[PETSC_MAX_PATH_LEN] = {'\0'}; + PetscBool flg; PetscFunctionBegin; PetscOptionsHeadBegin(PetscOptionsObject, "BDDC options"); + /* Load customization from binary file (debugging) */ + PetscCall(PetscOptionsString("-pc_bddc_load", "Load customization from file (intended for debug)", "none", load, load, sizeof(load), &flg)); + if (flg) { + size_t len; + + PetscCall(PetscStrlen(load, &len)); + PetscCall(PCBDDCLoadOrViewCustomization(pc, PETSC_TRUE, len ? load : NULL)); + } /* Verbose debugging */ PetscCall(PetscOptionsInt("-pc_bddc_check_level", "Verbose output for PCBDDC (intended for debug)", "none", pcbddc->dbg_flag, &pcbddc->dbg_flag, NULL)); /* Approximate solvers */ @@ -57,6 +67,7 @@ static PetscErrorCode PCSetFromOptions_BDDC(PC pc, PetscOptionItems *PetscOption PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate_scale", "Inform PCBDDC that we need to scale the Neumann solve", "none", pcbddc->NullSpace_corr[3], &pcbddc->NullSpace_corr[3], NULL)); /* Primal space customization */ PetscCall(PetscOptionsBool("-pc_bddc_use_local_mat_graph", "Use or not adjacency graph of local mat for interface analysis", "none", pcbddc->use_local_adj, &pcbddc->use_local_adj, NULL)); + PetscCall(PetscOptionsInt("-pc_bddc_local_mat_graph_square", "Square adjacency graph of local mat for interface analysis", "none", pcbddc->local_adj_square, &pcbddc->local_adj_square, NULL)); PetscCall(PetscOptionsInt("-pc_bddc_graph_maxcount", "Maximum number of shared subdomains for a connected component", "none", pcbddc->graphmaxcount, &pcbddc->graphmaxcount, NULL)); PetscCall(PetscOptionsBool("-pc_bddc_corner_selection", "Activates face-based corner selection", "none", pcbddc->corner_selection, &pcbddc->corner_selection, NULL)); PetscCall(PetscOptionsBool("-pc_bddc_use_vertices", "Use or not corner dofs in coarse space", "none", pcbddc->use_vertices, &pcbddc->use_vertices, NULL)); @@ -1672,6 +1683,19 @@ static PetscErrorCode PCSetUp_BDDC(PC pc) PetscCall(PetscViewerASCIISubtractTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level)); PetscCall(PetscViewerASCIIPopSynchronized(pcbddc->dbg_viewer)); } + + { /* Dump customization */ + PetscBool flg; + char save[PETSC_MAX_PATH_LEN] = {'\0'}; + + PetscCall(PetscOptionsGetString(NULL, ((PetscObject)pc)->prefix, "-pc_bddc_save", save, sizeof(save), &flg)); + if (flg) { + size_t len; + + PetscCall(PetscStrlen(save, &len)); + PetscCall(PCBDDCLoadOrViewCustomization(pc, PETSC_FALSE, len ? save : NULL)); + } + } PetscFunctionReturn(PETSC_SUCCESS); } @@ -2192,6 +2216,9 @@ static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rh Level: developer + Note: + Most users should employ the `KSP` interface for linear solvers and create a solver of type `KSPFETIDP`. + .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetSolution()` @*/ PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) @@ -2374,6 +2401,9 @@ static PetscErrorCode PCDestroy_BDDCIPC(PC pc) Level: developer + Note: + Most users should employ the `KSP` interface for linear solvers and create a solver of type `KSPFETIDP`. + .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetRHS()` @*/ PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) @@ -2635,10 +2665,11 @@ static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, PetscBool fully_re Level: developer - Note: - Currently the only operations provided for FETI-DP matrix are `MatMult()` and `MatMultTranspose()` + Notes: + Most users should employ the `KSP` interface for linear solvers and create a solver of type `KSPFETIDP`. + Currently the only operations provided for the FETI-DP matrix are `MatMult()` and `MatMultTranspose()` -.seealso: [](ch_ksp), `PCBDDC`, `PCBDDCMatFETIDPGetRHS()`, `PCBDDCMatFETIDPGetSolution()` +.seealso: [](ch_ksp), `KSPFETIDP`, `PCBDDC`, `PCBDDCMatFETIDPGetRHS()`, `PCBDDCMatFETIDPGetSolution()` @*/ PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc) { @@ -2677,10 +2708,6 @@ PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, PetscBool fully_redundant, con The PETSc implementation also supports multilevel `PCBDDC` {cite}`mandel2008multispace`. Coarse grids are partitioned using a `MatPartitioning` object. Adaptive selection of primal constraints is supported for SPD systems with high-contrast in the coefficients if MUMPS or MKL_PARDISO are present. - Future versions of the code will also consider using PASTIX. - - An experimental interface to the FETI-DP method is available. FETI-DP operators could be created using `PCBDDCCreateFETIDPOperators()`. - A stand-alone class for the FETI-DP method will be provided in the next releases. Options Database Keys: + -pc_bddc_use_vertices - use or not vertices in primal space @@ -2722,10 +2749,7 @@ PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, PetscBool fully_redundant, con Level: intermediate - Contributed by: - Stefano Zampini - -.seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `MATIS`, `PCLU`, `PCGAMG`, `PC`, `PCBDDCSetLocalAdjacencyGraph()`, `PCBDDCSetDofsSplitting()`, +.seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `MATIS`, `KSPFETIDP`, `PCLU`, `PCGAMG`, `PC`, `PCBDDCSetLocalAdjacencyGraph()`, `PCBDDCSetDofsSplitting()`, `PCBDDCSetDirichletBoundaries()`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCSetPrimalVerticesIS()`, `MatNullSpace`, `MatSetNearNullSpace()`, `PCBDDCSetChangeOfBasisMat()`, `PCBDDCCreateFETIDPOperators()`, `PCNN` M*/ diff --git a/src/ksp/pc/impls/bddc/bddcfetidp.c b/src/ksp/pc/impls/bddc/bddcfetidp.c index 58ead734322..88d86a52940 100644 --- a/src/ksp/pc/impls/bddc/bddcfetidp.c +++ b/src/ksp/pc/impls/bddc/bddcfetidp.c @@ -136,6 +136,7 @@ PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx fetidpmat_ctx) PetscScalar *array, *scaling_factors, *vals_B_delta; PetscScalar **all_factors; PetscInt *aux_local_numbering_2; + PetscInt *count, **neighbours_set; PetscLayout llay; /* saddlepoint */ @@ -223,10 +224,9 @@ PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx fetidpmat_ctx) n_local_lambda = 0; partial_sum = 0; n_boundary_dofs = 0; - s = 0; /* Get Vertices used to define the BDDC */ - PetscCall(PCBDDCGraphGetCandidatesIS(pcbddc->mat_graph, NULL, NULL, NULL, NULL, &isvert)); + PetscCall(PCBDDCGraphGetCandidatesIS(mat_graph, NULL, NULL, NULL, NULL, &isvert)); PetscCall(ISGetLocalSize(isvert, &n_vertices)); PetscCall(ISGetIndices(isvert, &vertex_indices)); @@ -235,9 +235,30 @@ PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx fetidpmat_ctx) PetscCall(PetscMalloc1(dual_size, &aux_local_numbering_1)); PetscCall(PetscMalloc1(dual_size, &aux_local_numbering_2)); - PetscCall(VecGetArray(pcis->vec1_N, &array)); + /* the code below does not support multiple subdomains per process + error out in this case + TODO: I guess I can use PetscSFGetMultiSF and the code will be easier and more general */ + PetscCall(PetscMalloc2(pcis->n, &count, pcis->n, &neighbours_set)); + for (i = 0, j = 0; i < pcis->n; i++) j += mat_graph->nodes[i].count; + if (pcis->n) PetscCall(PetscMalloc1(j, &neighbours_set[0])); for (i = 0; i < pcis->n; i++) { - j = mat_graph->count[i]; /* RECALL: mat_graph->count[i] does not count myself */ + PCBDDCGraphNode *node = &mat_graph->nodes[i]; + + count[i] = 0; + for (j = 0; j < node->count; j++) { + if (node->neighbours_set[j] == rank) continue; + neighbours_set[i][count[i]++] = node->neighbours_set[j]; + } + PetscCheck(count[i] == node->count - 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Multiple subdomains per process not supported"); + s = count[i]; + PetscCall(PetscSortRemoveDupsInt(count + i, neighbours_set[i])); + PetscCheck(s == count[i], PETSC_COMM_SELF, PETSC_ERR_SUP, "Multiple subdomains per process not supported"); + if (i != pcis->n - 1) neighbours_set[i + 1] = neighbours_set[i] + count[i]; + } + + PetscCall(VecGetArray(pcis->vec1_N, &array)); + for (i = 0, s = 0; i < pcis->n; i++) { + j = count[i]; /* RECALL: count[i] does not count myself */ if (j > 0) n_boundary_dofs++; skip_node = PETSC_FALSE; if (s < n_vertices && vertex_indices[s] == i) { /* it works for a sorted set of vertices */ @@ -245,7 +266,7 @@ PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx fetidpmat_ctx) s++; } if (j < 1) skip_node = PETSC_TRUE; - if (mat_graph->special_dof[i] == PCBDDCGRAPH_DIRICHLET_MARK) skip_node = PETSC_TRUE; + if (mat_graph->nodes[i].special_dof == PCBDDCGRAPH_DIRICHLET_MARK) skip_node = PETSC_TRUE; if (!skip_node) { if (fully_redundant) { /* fully redundant set of lagrange multipliers */ @@ -265,7 +286,7 @@ PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx fetidpmat_ctx) } PetscCall(VecRestoreArray(pcis->vec1_N, &array)); PetscCall(ISRestoreIndices(isvert, &vertex_indices)); - PetscCall(PCBDDCGraphRestoreCandidatesIS(pcbddc->mat_graph, NULL, NULL, NULL, NULL, &isvert)); + PetscCall(PCBDDCGraphRestoreCandidatesIS(mat_graph, NULL, NULL, NULL, NULL, &isvert)); dual_size = partial_sum; /* compute global ordering of lagrange multipliers and associate l2g map */ @@ -305,7 +326,7 @@ PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx fetidpmat_ctx) PetscCall(PetscMalloc1(partial_sum, &recv_buffer)); PetscCall(PetscMalloc1(partial_sum, &all_factors[0])); for (i = 0; i < pcis->n - 1; i++) { - j = mat_graph->count[i]; + j = count[i]; all_factors[i + 1] = all_factors[i] + j; } @@ -328,7 +349,7 @@ PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx fetidpmat_ctx) for (j = 0; j < pcis->n_shared[i]; j++) { k = pcis->shared[i][j]; neigh_position = 0; - while (mat_graph->neighbours_set[k][neigh_position] != pcis->neigh[i]) neigh_position++; + while (neighbours_set[k][neigh_position] != pcis->neigh[i]) { neigh_position++; } all_factors[k][neigh_position] = recv_buffer[ptrs_buffer[i - 1] + j]; } } @@ -356,12 +377,12 @@ PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx fetidpmat_ctx) cum = 0; for (i = 0; i < dual_size; i++) { n_global_lambda = aux_global_numbering[cum]; - j = mat_graph->count[aux_local_numbering_1[i]]; + j = count[aux_local_numbering_1[i]]; aux_sums[0] = 0; for (s = 1; s < j; s++) aux_sums[s] = aux_sums[s - 1] + j - s + 1; if (all_factors) array = all_factors[aux_local_numbering_1[i]]; n_neg_values = 0; - while (n_neg_values < j && mat_graph->neighbours_set[aux_local_numbering_1[i]][n_neg_values] < rank) n_neg_values++; + while (n_neg_values < j && neighbours_set[aux_local_numbering_1[i]][n_neg_values] < rank) { n_neg_values++; } n_pos_values = j - n_neg_values; if (fully_redundant) { for (s = 0; s < n_neg_values; s++) { @@ -418,6 +439,8 @@ PetscErrorCode PCBDDCSetupFETIDPMatContext(FETIDPMat_ctx fetidpmat_ctx) PetscCall(PetscFree(all_factors[0])); PetscCall(PetscFree(all_factors)); } + if (pcis->n) PetscCall(PetscFree(neighbours_set[0])); + PetscCall(PetscFree2(count, neighbours_set)); /* Create local part of B_delta */ PetscCall(MatCreate(PETSC_COMM_SELF, &fetidpmat_ctx->B_delta)); diff --git a/src/ksp/pc/impls/bddc/bddcgraph.c b/src/ksp/pc/impls/bddc/bddcgraph.c index a8f83a7e23a..91aaaf39ec4 100644 --- a/src/ksp/pc/impls/bddc/bddcgraph.c +++ b/src/ksp/pc/impls/bddc/bddcgraph.c @@ -1,6 +1,8 @@ #include #include #include +#include +#include PetscErrorCode PCBDDCDestroyGraphCandidatesIS(void *ctx) { @@ -27,13 +29,13 @@ PetscErrorCode PCBDDCGraphGetDirichletDofsB(PCBDDCGraph graph, IS *dirdofs) size = 0; for (i = 0; i < graph->nvtxs; i++) { - if (graph->count[i] && graph->special_dof[i] == PCBDDCGRAPH_DIRICHLET_MARK) size++; + if (graph->nodes[i].count > 1 && graph->nodes[i].special_dof == PCBDDCGRAPH_DIRICHLET_MARK) size++; } PetscCall(PetscMalloc1(size, &dirdofs_idxs)); size = 0; for (i = 0; i < graph->nvtxs; i++) { - if (graph->count[i] && graph->special_dof[i] == PCBDDCGRAPH_DIRICHLET_MARK) dirdofs_idxs[size++] = i; + if (graph->nodes[i].count > 1 && graph->nodes[i].special_dof == PCBDDCGRAPH_DIRICHLET_MARK) dirdofs_idxs[size++] = i; } PetscCall(ISCreateGeneral(PETSC_COMM_SELF, size, dirdofs_idxs, PETSC_OWN_POINTER, &graph->dirdofsB)); PetscCall(PetscObjectReference((PetscObject)graph->dirdofsB)); @@ -53,13 +55,13 @@ PetscErrorCode PCBDDCGraphGetDirichletDofs(PCBDDCGraph graph, IS *dirdofs) size = 0; for (i = 0; i < graph->nvtxs; i++) { - if (graph->special_dof[i] == PCBDDCGRAPH_DIRICHLET_MARK) size++; + if (graph->nodes[i].special_dof == PCBDDCGRAPH_DIRICHLET_MARK) size++; } PetscCall(PetscMalloc1(size, &dirdofs_idxs)); size = 0; for (i = 0; i < graph->nvtxs; i++) { - if (graph->special_dof[i] == PCBDDCGRAPH_DIRICHLET_MARK) dirdofs_idxs[size++] = i; + if (graph->nodes[i].special_dof == PCBDDCGRAPH_DIRICHLET_MARK) dirdofs_idxs[size++] = i; } PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)graph->l2gmap), size, dirdofs_idxs, PETSC_OWN_POINTER, &graph->dirdofs)); PetscCall(PetscObjectReference((PetscObject)graph->dirdofs)); @@ -74,11 +76,12 @@ PetscErrorCode PCBDDCGraphASCIIView(PCBDDCGraph graph, PetscInt verbosity_level, PetscInt *queue_in_global_numbering; PetscFunctionBegin; + if (!viewer) PetscCall(PetscViewerASCIIGetStdout(graph->seq_graph ? PETSC_COMM_SELF : PetscObjectComm((PetscObject)graph->l2gmap), &viewer)); PetscCall(PetscViewerASCIIPushSynchronized(viewer)); PetscCall(PetscViewerASCIIGetTab(viewer, &tabs)); PetscCall(PetscViewerASCIIPrintf(viewer, "--------------------------------------------------\n")); PetscCall(PetscViewerFlush(viewer)); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Local BDDC graph for subdomain %04d\n", PetscGlobalRank)); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Local BDDC graph for subdomain %04d (seq %d)\n", PetscGlobalRank, graph->seq_graph)); PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Number of vertices %" PetscInt_FMT "\n", graph->nvtxs)); PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Number of local subdomains %" PetscInt_FMT "\n", graph->n_local_subs ? graph->n_local_subs : 1)); PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Custom minimal size %" PetscInt_FMT "\n", graph->custom_minimal_size)); @@ -87,28 +90,27 @@ PetscErrorCode PCBDDCGraphASCIIView(PCBDDCGraph graph, PetscInt verbosity_level, if (verbosity_level > 2) { for (i = 0; i < graph->nvtxs; i++) { PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%" PetscInt_FMT ":\n", i)); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " which_dof: %" PetscInt_FMT "\n", graph->which_dof[i])); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " special_dof: %" PetscInt_FMT "\n", graph->special_dof[i])); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " neighbours: %" PetscInt_FMT "\n", graph->count[i])); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " which_dof: %" PetscInt_FMT "\n", graph->nodes[i].which_dof)); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " special_dof: %" PetscInt_FMT "\n", graph->nodes[i].special_dof)); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " shared by: %" PetscInt_FMT "\n", graph->nodes[i].count)); PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); - if (graph->count[i]) { + if (graph->nodes[i].count) { PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " set of neighbours:")); - for (j = 0; j < graph->count[i]; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT, graph->neighbours_set[i][j])); + for (j = 0; j < graph->nodes[i].count; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT, graph->nodes[i].neighbours_set[j])); PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); } PetscCall(PetscViewerASCIISetTab(viewer, tabs)); PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); - if (graph->mirrors) { - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " mirrors: %" PetscInt_FMT "\n", graph->mirrors[i])); - if (graph->mirrors[i]) { - PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " set of mirrors:")); - for (j = 0; j < graph->mirrors[i]; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT, graph->mirrors_set[i][j])); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); - PetscCall(PetscViewerASCIISetTab(viewer, tabs)); - PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); - } + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " number of local groups: %" PetscInt_FMT "\n", graph->nodes[i].local_groups_count)); + PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); + if (graph->nodes[i].local_groups_count) { + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " groups:")); + for (j = 0; j < graph->nodes[i].local_groups_count; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT, graph->nodes[i].local_groups[j])); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); } + PetscCall(PetscViewerASCIISetTab(viewer, tabs)); + PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); + if (verbosity_level > 3) { if (graph->xadj) { PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " local adj list:")); @@ -122,8 +124,8 @@ PetscErrorCode PCBDDCGraphASCIIView(PCBDDCGraph graph, PetscInt verbosity_level, } } if (graph->n_local_subs) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " local sub id: %" PetscInt_FMT "\n", graph->local_subs[i])); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " interface subset id: %" PetscInt_FMT "\n", graph->subset[i])); - if (graph->subset[i] && graph->subset_ncc) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " ncc for subset: %" PetscInt_FMT "\n", graph->subset_ncc[graph->subset[i] - 1])); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " interface subset id: %" PetscInt_FMT "\n", graph->nodes[i].subset)); + if (graph->nodes[i].subset && graph->subset_ncc) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " ncc for subset: %" PetscInt_FMT "\n", graph->subset_ncc[graph->nodes[i].subset - 1])); } } PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Total number of connected components %" PetscInt_FMT "\n", graph->ncc)); @@ -132,12 +134,12 @@ PetscErrorCode PCBDDCGraphASCIIView(PCBDDCGraph graph, PetscInt verbosity_level, for (i = 0; i < graph->ncc; i++) { PetscInt node_num = graph->queue[graph->cptr[i]]; PetscBool printcc = PETSC_FALSE; - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " cc %" PetscInt_FMT " (size %" PetscInt_FMT ", fid %" PetscInt_FMT ", neighs:", i, graph->cptr[i + 1] - graph->cptr[i], graph->which_dof[node_num])); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " cc %" PetscInt_FMT " (size %" PetscInt_FMT ", fid %" PetscInt_FMT ", neighs:", i, graph->cptr[i + 1] - graph->cptr[i], graph->nodes[node_num].which_dof)); PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); - for (j = 0; j < graph->count[node_num]; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT, graph->neighbours_set[node_num][j])); + for (j = 0; j < graph->nodes[node_num].count; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT, graph->nodes[node_num].neighbours_set[j])); if (verbosity_level > 1) { PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "):")); - if (verbosity_level > 2 || graph->twodim || graph->count[node_num] > 1 || (graph->count[node_num] == 1 && graph->special_dof[node_num] == PCBDDCGRAPH_NEUMANN_MARK)) printcc = PETSC_TRUE; + if (verbosity_level > 2 || graph->twodim || graph->nodes[node_num].count > 2 || (graph->nodes[node_num].count == 2 && graph->nodes[node_num].special_dof == PCBDDCGRAPH_NEUMANN_MARK)) { printcc = PETSC_TRUE; } if (printcc) { for (j = graph->cptr[i]; j < graph->cptr[i + 1]; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " (%" PetscInt_FMT ")", graph->queue[j], queue_in_global_numbering[j])); } @@ -211,8 +213,8 @@ PetscErrorCode PCBDDCGraphGetCandidatesIS(PCBDDCGraph graph, PetscInt *n_faces, nvc = 0; for (i = 0; i < graph->ncc; i++) { PetscInt repdof = graph->queue[graph->cptr[i]]; - if (graph->cptr[i + 1] - graph->cptr[i] > graph->custom_minimal_size && graph->count[repdof] < graph->maxcount) { - if (!graph->twodim && graph->count[repdof] == 1 && graph->special_dof[repdof] != PCBDDCGRAPH_NEUMANN_MARK) { + if (graph->cptr[i + 1] - graph->cptr[i] > graph->custom_minimal_size && graph->nodes[repdof].count <= graph->maxcount) { + if (!graph->twodim && graph->nodes[repdof].count == 2 && graph->nodes[repdof].special_dof != PCBDDCGRAPH_NEUMANN_MARK) { nfc++; mark[i] = 2; } else { @@ -281,19 +283,17 @@ PetscErrorCode PCBDDCGraphGetCandidatesIS(PCBDDCGraph graph, PetscInt *n_faces, PetscErrorCode PCBDDCGraphComputeConnectedComponents(PCBDDCGraph graph) { - PetscBool adapt_interface_reduced; - MPI_Comm interface_comm; - PetscMPIInt size; - PetscInt i; - PetscBT cornerp; + PetscBool adapt_interface; + MPI_Comm interface_comm; + PetscBT cornerp = NULL; PetscFunctionBegin; - /* compute connected components locally */ PetscCall(PetscObjectGetComm((PetscObject)graph->l2gmap, &interface_comm)); + /* compute connected components locally */ PetscCall(PCBDDCGraphComputeConnectedComponentsLocal(graph)); + if (graph->seq_graph) PetscFunctionReturn(PETSC_SUCCESS); - cornerp = NULL; - if (graph->active_coords) { /* face based corner selection */ + if (graph->active_coords && !graph->multi_element) { /* face based corner selection XXX multi_element */ PetscBT excluded; PetscReal *wdist; PetscInt n_neigh, *neigh, *n_shared, **shared; @@ -426,255 +426,136 @@ PetscErrorCode PCBDDCGraphComputeConnectedComponents(PCBDDCGraph graph) PetscCall(ISLocalToGlobalMappingRestoreInfo(graph->l2gmap, &n_neigh, &neigh, &n_shared, &shared)); } - /* check consistency of connected components among neighbouring subdomains -> it adapt them in case it is needed */ - PetscCallMPI(MPI_Comm_size(interface_comm, &size)); - adapt_interface_reduced = PETSC_FALSE; - if (size > 1) { - PetscInt i; - PetscBool adapt_interface = cornerp ? PETSC_TRUE : PETSC_FALSE; - for (i = 0; i < graph->n_subsets && !adapt_interface; i++) { - /* We are not sure that on a given subset of the local interface, - with two connected components, the latters be the same among sharing subdomains */ - if (graph->subset_ncc[i] > 1) adapt_interface = PETSC_TRUE; - } - PetscCall(MPIU_Allreduce(&adapt_interface, &adapt_interface_reduced, 1, MPIU_BOOL, MPI_LOR, interface_comm)); + /* Adapt connected components if needed */ + adapt_interface = (cornerp || graph->multi_element) ? PETSC_TRUE : PETSC_FALSE; + for (PetscInt i = 0; i < graph->n_subsets && !adapt_interface; i++) { + if (graph->subset_ncc[i] > 1) adapt_interface = PETSC_TRUE; } + PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &adapt_interface, 1, MPIU_BOOL, MPI_LOR, interface_comm)); + if (adapt_interface) { + PetscSF msf; + const PetscInt *n_ref_sharing; + PetscInt *labels, *rootlabels, *mrlabels; + PetscInt nr, nmr, nrs, ncc, cum_queue; - if (graph->n_subsets && adapt_interface_reduced) { - PetscBT subset_cc_adapt; - MPI_Request *send_requests, *recv_requests; - PetscInt *send_buffer, *recv_buffer; - PetscInt sum_requests, start_of_recv, start_of_send; - PetscInt *cum_recv_counts; - PetscInt *labels; - PetscInt ncc, cum_queue, mss, mns, j, k, s; - PetscInt **refine_buffer = NULL, *private_labels = NULL; - PetscBool *subset_has_corn, *recv_buffer_bool, *send_buffer_bool; - - PetscCall(PetscCalloc1(graph->n_subsets, &subset_has_corn)); - if (cornerp) { - for (i = 0; i < graph->n_subsets; i++) { - for (j = 0; j < graph->subset_size[i]; j++) { - if (PetscBTLookup(cornerp, graph->subset_idxs[i][j])) { - subset_has_corn[i] = PETSC_TRUE; - break; - } - } - } - } PetscCall(PetscMalloc1(graph->nvtxs, &labels)); PetscCall(PetscArrayzero(labels, graph->nvtxs)); - for (i = 0, k = 0; i < graph->ncc; i++) { + for (PetscInt i = 0, k = 0; i < graph->ncc; i++) { PetscInt s = 1; - for (j = graph->cptr[i]; j < graph->cptr[i + 1]; j++) { + for (PetscInt j = graph->cptr[i]; j < graph->cptr[i + 1]; j++) { if (cornerp && PetscBTLookup(cornerp, graph->queue[j])) { - labels[graph->queue[j]] = k + s; + labels[graph->queue[j]] = -(k + s + 1); s += 1; } else { - labels[graph->queue[j]] = k; + labels[graph->queue[j]] = -(k + 1); } } k += s; } - - /* allocate some space */ - PetscCall(PetscMalloc1(graph->n_subsets + 1, &cum_recv_counts)); - PetscCall(PetscArrayzero(cum_recv_counts, graph->n_subsets + 1)); - - /* first count how many neighbours per connected component I will receive from */ - cum_recv_counts[0] = 0; - for (i = 0; i < graph->n_subsets; i++) cum_recv_counts[i + 1] = cum_recv_counts[i] + graph->count[graph->subset_idxs[i][0]]; - PetscCall(PetscMalloc1(graph->n_subsets, &send_buffer_bool)); - PetscCall(PetscMalloc1(cum_recv_counts[graph->n_subsets], &recv_buffer_bool)); - PetscCall(PetscMalloc2(cum_recv_counts[graph->n_subsets], &send_requests, cum_recv_counts[graph->n_subsets], &recv_requests)); - for (i = 0; i < cum_recv_counts[graph->n_subsets]; i++) { - send_requests[i] = MPI_REQUEST_NULL; - recv_requests[i] = MPI_REQUEST_NULL; - } - - /* exchange with my neighbours the number of my connected components on the subset of interface */ - sum_requests = 0; - for (i = 0; i < graph->n_subsets; i++) send_buffer_bool[i] = (PetscBool)(graph->subset_ncc[i] > 1 || subset_has_corn[i]); - for (i = 0; i < graph->n_subsets; i++) { - PetscMPIInt neigh, tag; - PetscInt count, *neighs; - - count = graph->count[graph->subset_idxs[i][0]]; - neighs = graph->neighbours_set[graph->subset_idxs[i][0]]; - PetscCall(PetscMPIIntCast(2 * graph->subset_ref_node[i], &tag)); - for (k = 0; k < count; k++) { - PetscCall(PetscMPIIntCast(neighs[k], &neigh)); - PetscCallMPI(MPI_Isend(send_buffer_bool + i, 1, MPIU_BOOL, neigh, tag, interface_comm, &send_requests[sum_requests])); - PetscCallMPI(MPI_Irecv(recv_buffer_bool + sum_requests, 1, MPIU_BOOL, neigh, tag, interface_comm, &recv_requests[sum_requests])); - sum_requests++; - } - } - PetscCallMPI(MPI_Waitall(sum_requests, recv_requests, MPI_STATUSES_IGNORE)); - PetscCallMPI(MPI_Waitall(sum_requests, send_requests, MPI_STATUSES_IGNORE)); - - /* determine the subsets I have to adapt (those having more than 1 cc) */ - PetscCall(PetscBTCreate(graph->n_subsets, &subset_cc_adapt)); - PetscCall(PetscBTMemzero(graph->n_subsets, subset_cc_adapt)); - for (i = 0; i < graph->n_subsets; i++) { - if (graph->subset_ncc[i] > 1 || subset_has_corn[i]) { - PetscCall(PetscBTSet(subset_cc_adapt, i)); - continue; - } - for (j = cum_recv_counts[i]; j < cum_recv_counts[i + 1]; j++) { - if (recv_buffer_bool[j]) { - PetscCall(PetscBTSet(subset_cc_adapt, i)); - break; + PetscCall(PetscSFGetGraph(graph->interface_ref_sf, &nr, NULL, NULL, NULL)); + PetscCall(PetscSFGetGraph(graph->interface_subset_sf, &nrs, NULL, NULL, NULL)); + PetscCall(PetscSFGetMultiSF(graph->interface_subset_sf, &msf)); + PetscCall(PetscSFGetGraph(msf, &nmr, NULL, NULL, NULL)); + PetscCall(PetscCalloc2(nmr, &mrlabels, nrs, &rootlabels)); + + PetscCall(PetscSFComputeDegreeBegin(graph->interface_subset_sf, &n_ref_sharing)); + PetscCall(PetscSFComputeDegreeEnd(graph->interface_subset_sf, &n_ref_sharing)); + PetscCall(PetscSFGatherBegin(graph->interface_subset_sf, MPIU_INT, labels, mrlabels)); + PetscCall(PetscSFGatherEnd(graph->interface_subset_sf, MPIU_INT, labels, mrlabels)); + + /* analyze contributions from processes + The structure of mrlabels is suitable to find intersections of ccs. + supposing the root subset has dimension 5 and leaves with labels: + 0: [4 4 7 4 7], (2 connected components) + 1: [3 2 2 3 2], (2 connected components) + 2: [1 1 6 5 6], (3 connected components) + the multiroot data and the new labels corresponding to intersected connected components will be (column major) + + 4 4 7 4 7 + mrlabels 3 2 2 3 2 + 1 1 6 5 6 + --------- + rootlabels 0 1 2 3 2 + */ + for (PetscInt i = 0, rcumlabels = 0, mcumlabels = 0; i < nr; i++) { + const PetscInt subset_size = graph->interface_ref_rsize[i]; + const PetscInt *n_sharing = n_ref_sharing + rcumlabels; + const PetscInt *mrbuffer = mrlabels + mcumlabels; + PetscInt *rbuffer = rootlabels + rcumlabels; + PetscInt subset_counter = 0; + + for (PetscInt j = 0; j < subset_size; j++) { + if (!rbuffer[j]) { /* found a new cc */ + const PetscInt *jlabels = mrbuffer + j * n_sharing[0]; + rbuffer[j] = ++subset_counter; + + for (PetscInt k = j + 1; k < subset_size; k++) { /* check for other nodes in new cc */ + PetscBool same_set = PETSC_TRUE; + const PetscInt *klabels = mrbuffer + k * n_sharing[0]; + + for (PetscInt s = 0; s < n_sharing[0]; s++) { + if (jlabels[s] != klabels[s]) { + same_set = PETSC_FALSE; + break; + } + } + if (same_set) rbuffer[k] = subset_counter; + } } } - } - PetscCall(PetscFree(send_buffer_bool)); - PetscCall(PetscFree(recv_buffer_bool)); - PetscCall(PetscFree(subset_has_corn)); - - /* determine send/recv buffers sizes */ - j = 0; - mss = 0; - for (i = 0; i < graph->n_subsets; i++) { - if (PetscBTLookup(subset_cc_adapt, i)) { - j += graph->subset_size[i]; - mss = PetscMax(graph->subset_size[i], mss); - } - } - k = 0; - mns = 0; - for (i = 0; i < graph->n_subsets; i++) { - if (PetscBTLookup(subset_cc_adapt, i)) { - k += (cum_recv_counts[i + 1] - cum_recv_counts[i]) * graph->subset_size[i]; - mns = PetscMax(cum_recv_counts[i + 1] - cum_recv_counts[i], mns); + if (subset_size) { + rcumlabels += subset_size; + mcumlabels += n_sharing[0] * subset_size; } } - PetscCall(PetscMalloc2(j, &send_buffer, k, &recv_buffer)); - - /* fill send buffer (order matters: subset_idxs ordered by global ordering) */ - j = 0; - for (i = 0; i < graph->n_subsets; i++) - if (PetscBTLookup(subset_cc_adapt, i)) - for (k = 0; k < graph->subset_size[i]; k++) send_buffer[j++] = labels[graph->subset_idxs[i][k]]; - - /* now exchange the data */ - start_of_recv = 0; - start_of_send = 0; - sum_requests = 0; - for (i = 0; i < graph->n_subsets; i++) { - if (PetscBTLookup(subset_cc_adapt, i)) { - PetscMPIInt neigh, tag; - PetscInt size_of_send = graph->subset_size[i]; - - j = graph->subset_idxs[i][0]; - PetscCall(PetscMPIIntCast(2 * graph->subset_ref_node[i] + 1, &tag)); - for (k = 0; k < graph->count[j]; k++) { - PetscCall(PetscMPIIntCast(graph->neighbours_set[j][k], &neigh)); - PetscCallMPI(MPI_Isend(&send_buffer[start_of_send], size_of_send, MPIU_INT, neigh, tag, interface_comm, &send_requests[sum_requests])); - PetscCallMPI(MPI_Irecv(&recv_buffer[start_of_recv], size_of_send, MPIU_INT, neigh, tag, interface_comm, &recv_requests[sum_requests])); - start_of_recv += size_of_send; - sum_requests++; - } - start_of_send += size_of_send; - } - } - PetscCallMPI(MPI_Waitall(sum_requests, recv_requests, MPI_STATUSES_IGNORE)); - - /* refine connected components */ - start_of_recv = 0; - /* allocate some temporary space */ - if (mss) { - PetscCall(PetscMalloc1(mss, &refine_buffer)); - PetscCall(PetscMalloc2(mss * (mns + 1), &refine_buffer[0], mss, &private_labels)); - } - ncc = 0; - cum_queue = 0; - graph->cptr[0] = 0; - for (i = 0; i < graph->n_subsets; i++) { - if (PetscBTLookup(subset_cc_adapt, i)) { - PetscInt subset_counter = 0; - PetscInt sharingprocs = cum_recv_counts[i + 1] - cum_recv_counts[i] + 1; /* count myself */ - PetscInt buffer_size = graph->subset_size[i]; - - PetscCheck(buffer_size >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Expected buffer_size %" PetscInt_FMT " >= 0", buffer_size); - /* compute pointers */ - for (j = 1; j < buffer_size; j++) refine_buffer[j] = refine_buffer[j - 1] + sharingprocs; - /* analyze contributions from subdomains that share the i-th subset - The structure of refine_buffer is suitable to find intersections of ccs among sharingprocs. - supposing the current subset is shared by 3 processes and has dimension 5 with global dofs 0,1,2,3,4 (local 0,4,3,1,2) - sharing procs connected components: - neigh 0: [0 1 4], [2 3], labels [4,7] (2 connected components) - neigh 1: [0 1], [2 3 4], labels [3 2] (2 connected components) - neigh 2: [0 4], [1], [2 3], labels [1 5 6] (3 connected components) - refine_buffer will be filled as: - [ 4, 3, 1; - 4, 2, 1; - 7, 2, 6; - 4, 3, 5; - 7, 2, 6; ]; - The connected components in local ordering are [0], [1], [2 3], [4] */ - /* fill temp_buffer */ - for (k = 0; k < buffer_size; k++) refine_buffer[k][0] = labels[graph->subset_idxs[i][k]]; - for (j = 0; j < sharingprocs - 1; j++) { - for (k = 0; k < buffer_size; k++) refine_buffer[k][j + 1] = recv_buffer[start_of_recv + k]; - start_of_recv += buffer_size; - } - PetscCall(PetscArrayzero(private_labels, buffer_size)); - for (j = 0; j < buffer_size; j++) { - if (!private_labels[j]) { /* found a new cc */ - PetscBool same_set; - - graph->cptr[ncc] = cum_queue; - ncc++; - subset_counter++; - private_labels[j] = subset_counter; - graph->queue[cum_queue++] = graph->subset_idxs[i][j]; - for (k = j + 1; k < buffer_size; k++) { /* check for other nodes in new cc */ - same_set = PETSC_TRUE; - for (s = 0; s < sharingprocs; s++) { - if (refine_buffer[j][s] != refine_buffer[k][s]) { - same_set = PETSC_FALSE; - break; - } - } - if (same_set) { - private_labels[k] = subset_counter; - graph->queue[cum_queue++] = graph->subset_idxs[i][k]; - } + + /* Now communicate the intersected labels */ + PetscCall(PetscSFBcastBegin(graph->interface_subset_sf, MPIU_INT, rootlabels, labels, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(graph->interface_subset_sf, MPIU_INT, rootlabels, labels, MPI_REPLACE)); + PetscCall(PetscFree2(mrlabels, rootlabels)); + + /* and adapt local connected components */ + PetscInt *ocptr, *oqueue; + PetscBool *touched; + + PetscCall(PetscMalloc3(graph->ncc + 1, &ocptr, graph->cptr[graph->ncc], &oqueue, graph->cptr[graph->ncc], &touched)); + PetscCall(PetscArraycpy(ocptr, graph->cptr, graph->ncc + 1)); + PetscCall(PetscArraycpy(oqueue, graph->queue, graph->cptr[graph->ncc])); + PetscCall(PetscArrayzero(touched, graph->cptr[graph->ncc])); + + ncc = 0; + cum_queue = 0; + for (PetscInt i = 0; i < graph->ncc; i++) { + for (PetscInt j = ocptr[i]; j < ocptr[i + 1]; j++) { + const PetscInt jlabel = labels[oqueue[j]]; + + if (jlabel) { + graph->cptr[ncc] = cum_queue; + ncc++; + for (PetscInt k = j; k < ocptr[i + 1]; k++) { /* check for other nodes in new cc */ + if (labels[oqueue[k]] == jlabel) { + graph->queue[cum_queue++] = oqueue[k]; + labels[oqueue[k]] = 0; } } } - graph->cptr[ncc] = cum_queue; - graph->subset_ncc[i] = subset_counter; - graph->queue_sorted = PETSC_FALSE; - } else { /* this subset does not need to be adapted */ - PetscCall(PetscArraycpy(graph->queue + cum_queue, graph->subset_idxs[i], graph->subset_size[i])); - ncc++; - cum_queue += graph->subset_size[i]; - graph->cptr[ncc] = cum_queue; } } - graph->cptr[ncc] = cum_queue; - graph->ncc = ncc; - if (mss) { - PetscCall(PetscFree2(refine_buffer[0], private_labels)); - PetscCall(PetscFree(refine_buffer)); - } + PetscCall(PetscFree3(ocptr, oqueue, touched)); PetscCall(PetscFree(labels)); - PetscCallMPI(MPI_Waitall(sum_requests, send_requests, MPI_STATUSES_IGNORE)); - PetscCall(PetscFree2(send_requests, recv_requests)); - PetscCall(PetscFree2(send_buffer, recv_buffer)); - PetscCall(PetscFree(cum_recv_counts)); - PetscCall(PetscBTDestroy(&subset_cc_adapt)); + graph->cptr[ncc] = cum_queue; + graph->queue_sorted = PETSC_FALSE; + graph->ncc = ncc; } PetscCall(PetscBTDestroy(&cornerp)); /* Determine if we are in 2D or 3D */ if (!graph->twodimset) { PetscBool twodim = PETSC_TRUE; - for (i = 0; i < graph->ncc; i++) { + for (PetscInt i = 0; i < graph->ncc; i++) { PetscInt repdof = graph->queue[graph->cptr[i]]; PetscInt ccsize = graph->cptr[i + 1] - graph->cptr[i]; - if (graph->count[repdof] > 1 && ccsize > graph->custom_minimal_size) { + if (graph->nodes[repdof].count > 2 && ccsize > graph->custom_minimal_size) { twodim = PETSC_FALSE; break; } @@ -685,81 +566,97 @@ PetscErrorCode PCBDDCGraphComputeConnectedComponents(PCBDDCGraph graph) PetscFunctionReturn(PETSC_SUCCESS); } -static inline PetscErrorCode PCBDDCGraphComputeCC_Private(PCBDDCGraph graph, PetscInt pid, PetscInt *queue_tip, PetscInt n_prev, PetscInt *n_added) +static inline PetscErrorCode PCBDDCGraphComputeCC_Private(PCBDDCGraph graph, PetscInt pid, PetscInt *PETSC_RESTRICT queue_tip, PetscInt n_prev, PetscInt *n_added) { - PetscInt i, j, n; - PetscInt *xadj = graph->xadj, *adjncy = graph->adjncy; - PetscBT touched = graph->touched; - PetscBool havecsr = (PetscBool)(!!xadj); - PetscBool havesubs = (PetscBool)(!!graph->n_local_subs); + PetscInt i, j, n = 0; + + const PetscInt *PETSC_RESTRICT xadj = graph->xadj; + const PetscInt *PETSC_RESTRICT adjncy = graph->adjncy; + const PetscInt *PETSC_RESTRICT subset_idxs = graph->subset_idxs[pid - 1]; + const PetscInt *PETSC_RESTRICT local_subs = graph->local_subs; + const PetscInt subset_size = graph->subset_size[pid - 1]; + + PCBDDCGraphNode *PETSC_RESTRICT nodes = graph->nodes; + + const PetscBool havecsr = (PetscBool)(!!xadj); + const PetscBool havesubs = (PetscBool)(!!graph->n_local_subs); PetscFunctionBegin; - n = 0; if (havecsr && !havesubs) { for (i = -n_prev; i < 0; i++) { - PetscInt start_dof = queue_tip[i]; + const PetscInt start_dof = queue_tip[i]; + /* we assume that if a dof has a size 1 adjacency list and the corresponding entry is negative, it is connected to all dofs */ if (xadj[start_dof + 1] - xadj[start_dof] == 1 && adjncy[xadj[start_dof]] < 0) { - for (j = 0; j < graph->subset_size[pid - 1]; j++) { /* pid \in [1,graph->n_subsets] */ - PetscInt dof = graph->subset_idxs[pid - 1][j]; - if (!PetscBTLookup(touched, dof) && graph->subset[dof] == pid) { - PetscCall(PetscBTSet(touched, dof)); - queue_tip[n] = dof; + for (j = 0; j < subset_size; j++) { /* pid \in [1,graph->n_subsets] */ + const PetscInt dof = subset_idxs[j]; + + if (!nodes[dof].touched && nodes[dof].subset == pid) { + nodes[dof].touched = PETSC_TRUE; + queue_tip[n] = dof; n++; } } } else { for (j = xadj[start_dof]; j < xadj[start_dof + 1]; j++) { - PetscInt dof = adjncy[j]; - if (!PetscBTLookup(touched, dof) && graph->subset[dof] == pid) { - PetscCall(PetscBTSet(touched, dof)); - queue_tip[n] = dof; + const PetscInt dof = adjncy[j]; + + if (!nodes[dof].touched && nodes[dof].subset == pid) { + nodes[dof].touched = PETSC_TRUE; + queue_tip[n] = dof; n++; } } } } } else if (havecsr && havesubs) { - PetscInt sid = graph->local_subs[queue_tip[-n_prev]]; + const PetscInt sid = local_subs[queue_tip[-n_prev]]; + for (i = -n_prev; i < 0; i++) { - PetscInt start_dof = queue_tip[i]; + const PetscInt start_dof = queue_tip[i]; + /* we assume that if a dof has a size 1 adjacency list and the corresponding entry is negative, it is connected to all dofs belonging to the local sub */ if (xadj[start_dof + 1] - xadj[start_dof] == 1 && adjncy[xadj[start_dof]] < 0) { - for (j = 0; j < graph->subset_size[pid - 1]; j++) { /* pid \in [1,graph->n_subsets] */ - PetscInt dof = graph->subset_idxs[pid - 1][j]; - if (!PetscBTLookup(touched, dof) && graph->subset[dof] == pid && graph->local_subs[dof] == sid) { - PetscCall(PetscBTSet(touched, dof)); - queue_tip[n] = dof; + for (j = 0; j < subset_size; j++) { /* pid \in [1,graph->n_subsets] */ + const PetscInt dof = subset_idxs[j]; + + if (!nodes[dof].touched && nodes[dof].subset == pid && local_subs[dof] == sid) { + nodes[dof].touched = PETSC_TRUE; + queue_tip[n] = dof; n++; } } } else { for (j = xadj[start_dof]; j < xadj[start_dof + 1]; j++) { - PetscInt dof = adjncy[j]; - if (!PetscBTLookup(touched, dof) && graph->subset[dof] == pid && graph->local_subs[dof] == sid) { - PetscCall(PetscBTSet(touched, dof)); - queue_tip[n] = dof; + const PetscInt dof = adjncy[j]; + + if (!nodes[dof].touched && nodes[dof].subset == pid && local_subs[dof] == sid) { + nodes[dof].touched = PETSC_TRUE; + queue_tip[n] = dof; n++; } } } } } else if (havesubs) { /* sub info only */ - PetscInt sid = graph->local_subs[queue_tip[-n_prev]]; - for (j = 0; j < graph->subset_size[pid - 1]; j++) { /* pid \in [1,graph->n_subsets] */ - PetscInt dof = graph->subset_idxs[pid - 1][j]; - if (!PetscBTLookup(touched, dof) && graph->subset[dof] == pid && graph->local_subs[dof] == sid) { - PetscCall(PetscBTSet(touched, dof)); - queue_tip[n] = dof; + const PetscInt sid = local_subs[queue_tip[-n_prev]]; + + for (j = 0; j < subset_size; j++) { /* pid \in [1,graph->n_subsets] */ + const PetscInt dof = subset_idxs[j]; + + if (!nodes[dof].touched && nodes[dof].subset == pid && local_subs[dof] == sid) { + nodes[dof].touched = PETSC_TRUE; + queue_tip[n] = dof; n++; } } } else { - for (j = 0; j < graph->subset_size[pid - 1]; j++) { /* pid \in [1,graph->n_subsets] */ - PetscInt dof = graph->subset_idxs[pid - 1][j]; - if (!PetscBTLookup(touched, dof) && graph->subset[dof] == pid) { - PetscCall(PetscBTSet(touched, dof)); - queue_tip[n] = dof; + for (j = 0; j < subset_size; j++) { /* pid \in [1,graph->n_subsets] */ + const PetscInt dof = subset_idxs[j]; + + if (!nodes[dof].touched && nodes[dof].subset == pid) { + nodes[dof].touched = PETSC_TRUE; + queue_tip[n] = dof; n++; } } @@ -770,8 +667,7 @@ static inline PetscErrorCode PCBDDCGraphComputeCC_Private(PCBDDCGraph graph, Pet PetscErrorCode PCBDDCGraphComputeConnectedComponentsLocal(PCBDDCGraph graph) { - PetscInt ncc, cum_queue, n; - PetscMPIInt commsize; + PetscInt ncc, cum_queue; PetscFunctionBegin; PetscCheck(graph->setupcalled, PetscObjectComm((PetscObject)graph->l2gmap), PETSC_ERR_ORDER, "PCBDDCGraphSetUp should be called first"); @@ -779,29 +675,31 @@ PetscErrorCode PCBDDCGraphComputeConnectedComponentsLocal(PCBDDCGraph graph) if (!graph->xadj && !graph->n_local_subs) PetscFunctionReturn(PETSC_SUCCESS); /* reset any previous search of connected components */ - PetscCall(PetscBTMemzero(graph->nvtxs, graph->touched)); - PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)graph->l2gmap), &commsize)); - if (commsize > graph->commsizelimit) { - PetscInt i; - for (i = 0; i < graph->nvtxs; i++) { - if (graph->special_dof[i] == PCBDDCGRAPH_DIRICHLET_MARK || !graph->count[i]) PetscCall(PetscBTSet(graph->touched, i)); + for (PetscInt i = 0; i < graph->nvtxs; i++) graph->nodes[i].touched = PETSC_FALSE; + if (!graph->seq_graph) { + for (PetscInt i = 0; i < graph->nvtxs; i++) { + if (graph->nodes[i].special_dof == PCBDDCGRAPH_DIRICHLET_MARK || graph->nodes[i].count < 2) graph->nodes[i].touched = PETSC_TRUE; } } /* begin search for connected components */ cum_queue = 0; ncc = 0; - for (n = 0; n < graph->n_subsets; n++) { - PetscInt pid = n + 1; /* partition labeled by 0 is discarded */ + for (PetscInt n = 0; n < graph->n_subsets; n++) { + const PetscInt *subset_idxs = graph->subset_idxs[n]; + const PetscInt pid = n + 1; /* partition labeled by 0 is discarded */ + PetscInt found = 0, prev = 0, first = 0, ncc_pid = 0; + while (found != graph->subset_size[n]) { PetscInt added = 0; + if (!prev) { /* search for new starting dof */ - while (PetscBTLookup(graph->touched, graph->subset_idxs[n][first])) first++; - PetscCall(PetscBTSet(graph->touched, graph->subset_idxs[n][first])); - graph->queue[cum_queue] = graph->subset_idxs[n][first]; - graph->cptr[ncc] = cum_queue; - prev = 1; + while (graph->nodes[subset_idxs[first]].touched) first++; + graph->nodes[subset_idxs[first]].touched = PETSC_TRUE; + graph->queue[cum_queue] = subset_idxs[first]; + graph->cptr[ncc] = cum_queue; + prev = 1; cum_queue++; found++; ncc_pid++; @@ -828,13 +726,13 @@ PetscErrorCode PCBDDCGraphComputeConnectedComponentsLocal(PCBDDCGraph graph) PetscErrorCode PCBDDCGraphSetUp(PCBDDCGraph graph, PetscInt custom_minimal_size, IS neumann_is, IS dirichlet_is, PetscInt n_ISForDofs, IS ISForDofs[], IS custom_primal_vertices) { - IS subset, subset_n; + IS subset; MPI_Comm comm; const PetscInt *is_indices; - PetscInt n_neigh, *neigh, *n_shared, **shared, *queue_global; - PetscInt i, j, k, s, total_counts, nodes_touched, is_size; - PetscMPIInt commsize; - PetscBool same_set, mirrors_found; + PetscInt *queue_global, *nodecount, **nodeneighs; + PetscInt i, j, k, total_counts, nodes_touched, is_size, nvtxs = graph->nvtxs; + PetscMPIInt size, rank; + PetscBool same_set; PetscFunctionBegin; PetscValidLogicalCollectiveInt(graph->l2gmap, custom_minimal_size, 2); @@ -857,105 +755,88 @@ PetscErrorCode PCBDDCGraphSetUp(PCBDDCGraph graph, PetscInt custom_minimal_size, PetscValidHeaderSpecific(custom_primal_vertices, IS_CLASSID, 7); PetscCheckSameComm(graph->l2gmap, 1, custom_primal_vertices, 7); } + for (i = 0; i < nvtxs; i++) graph->nodes[i].touched = PETSC_FALSE; + PetscCall(PetscObjectGetComm((PetscObject)graph->l2gmap, &comm)); - PetscCallMPI(MPI_Comm_size(comm, &commsize)); + PetscCallMPI(MPI_Comm_size(comm, &size)); + PetscCallMPI(MPI_Comm_rank(comm, &rank)); /* custom_minimal_size */ graph->custom_minimal_size = custom_minimal_size; - /* get info l2gmap and allocate work vectors */ - PetscCall(ISLocalToGlobalMappingGetInfo(graph->l2gmap, &n_neigh, &neigh, &n_shared, &shared)); - /* check if we have any local periodic nodes (periodic BCs) */ - mirrors_found = PETSC_FALSE; - if (graph->nvtxs && n_neigh) { - for (i = 0; i < n_shared[0]; i++) graph->count[shared[0][i]] += 1; - for (i = 0; i < n_shared[0]; i++) { - if (graph->count[shared[0][i]] > 1) { - mirrors_found = PETSC_TRUE; - break; + + /* get node info from l2gmap */ + PetscCall(ISLocalToGlobalMappingGetNodeInfo(graph->l2gmap, NULL, &nodecount, &nodeneighs)); + + /* Allocate space for storing the set of neighbours for each node */ + graph->multi_element = PETSC_FALSE; + for (i = 0; i < nvtxs; i++) { + graph->nodes[i].count = nodecount[i]; + if (!graph->seq_graph) { + PetscCall(PetscMalloc1(nodecount[i], &graph->nodes[i].neighbours_set)); + PetscCall(PetscArraycpy(graph->nodes[i].neighbours_set, nodeneighs[i], nodecount[i])); + + if (!graph->multi_element) { + PetscInt nself; + for (j = 0, nself = 0; j < graph->nodes[i].count; j++) + if (graph->nodes[i].neighbours_set[j] == rank) nself++; + if (nself > 1) graph->multi_element = PETSC_TRUE; } + } else { + PetscCall(PetscCalloc1(nodecount[i], &graph->nodes[i].neighbours_set)); } } - /* compute local mirrors (if any) */ - if (mirrors_found) { - IS to, from; - PetscInt *local_indices, *global_indices; - - PetscCall(ISCreateStride(PETSC_COMM_SELF, graph->nvtxs, 0, 1, &to)); - PetscCall(ISLocalToGlobalMappingApplyIS(graph->l2gmap, to, &from)); - /* get arrays of local and global indices */ - PetscCall(PetscMalloc1(graph->nvtxs, &local_indices)); - PetscCall(ISGetIndices(to, (const PetscInt **)&is_indices)); - PetscCall(PetscArraycpy(local_indices, is_indices, graph->nvtxs)); - PetscCall(ISRestoreIndices(to, (const PetscInt **)&is_indices)); - PetscCall(PetscMalloc1(graph->nvtxs, &global_indices)); - PetscCall(ISGetIndices(from, (const PetscInt **)&is_indices)); - PetscCall(PetscArraycpy(global_indices, is_indices, graph->nvtxs)); - PetscCall(ISRestoreIndices(from, (const PetscInt **)&is_indices)); - /* allocate space for mirrors */ - PetscCall(PetscMalloc2(graph->nvtxs, &graph->mirrors, graph->nvtxs, &graph->mirrors_set)); - PetscCall(PetscArrayzero(graph->mirrors, graph->nvtxs)); - graph->mirrors_set[0] = NULL; - - k = 0; - for (i = 0; i < n_shared[0]; i++) { - j = shared[0][i]; - if (graph->count[j] > 1) { - graph->mirrors[j]++; - k++; + PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(graph->l2gmap, NULL, &nodecount, &nodeneighs)); + PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &graph->multi_element, 1, MPIU_BOOL, MPI_LOR, comm)); + + /* compute local groups */ + if (graph->multi_element) { + const PetscInt *idxs, *indegree; + IS is, lis; + PetscLayout layout; + PetscSF sf, multisf; + PetscInt n, nmulti, c, *multi_root_subs, *start; + + PetscCheck(!nvtxs || graph->local_subs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing local subdomain information"); + + PetscCall(ISLocalToGlobalMappingGetIndices(graph->l2gmap, &idxs)); + PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nvtxs, idxs, PETSC_USE_POINTER, &is)); + PetscCall(ISRenumber(is, NULL, &n, &lis)); + PetscCall(ISDestroy(&is)); + + PetscCall(ISLocalToGlobalMappingRestoreIndices(graph->l2gmap, &idxs)); + PetscCall(ISGetIndices(lis, &idxs)); + PetscCall(PetscLayoutCreate(PETSC_COMM_SELF, &layout)); + PetscCall(PetscLayoutSetSize(layout, n)); + PetscCall(PetscSFCreate(PETSC_COMM_SELF, &sf)); + PetscCall(PetscSFSetGraphLayout(sf, layout, nvtxs, NULL, PETSC_OWN_POINTER, idxs)); + PetscCall(PetscLayoutDestroy(&layout)); + PetscCall(PetscSFGetMultiSF(sf, &multisf)); + PetscCall(PetscSFComputeDegreeBegin(sf, &indegree)); + PetscCall(PetscSFComputeDegreeEnd(sf, &indegree)); + PetscCall(PetscSFGetGraph(multisf, &nmulti, NULL, NULL, NULL)); + PetscCall(PetscMalloc2(nmulti, &multi_root_subs, n + 1, &start)); + start[0] = 0; + for (i = 0; i < n; i++) start[i + 1] = start[i] + indegree[i]; + PetscCall(PetscSFGatherBegin(sf, MPIU_INT, graph->local_subs, multi_root_subs)); + PetscCall(PetscSFGatherEnd(sf, MPIU_INT, graph->local_subs, multi_root_subs)); + for (i = 0; i < nvtxs; i++) { + PetscInt gid = idxs[i]; + + graph->nodes[i].local_sub = graph->local_subs[i]; + for (j = 0, c = 0; j < graph->nodes[i].count; j++) { + if (graph->nodes[i].neighbours_set[j] == rank) c++; } + PetscCheck(c == indegree[idxs[i]], PETSC_COMM_SELF, PETSC_ERR_PLIB, "%" PetscInt_FMT " != %" PetscInt_FMT, c, indegree[idxs[i]]); + PetscCall(PetscMalloc1(c, &graph->nodes[i].local_groups)); + for (j = 0; j < c; j++) graph->nodes[i].local_groups[j] = multi_root_subs[start[gid] + j]; + PetscCall(PetscSortInt(c, graph->nodes[i].local_groups)); + graph->nodes[i].local_groups_count = c; } - /* allocate space for set of mirrors */ - PetscCall(PetscMalloc1(k, &graph->mirrors_set[0])); - for (i = 1; i < graph->nvtxs; i++) graph->mirrors_set[i] = graph->mirrors_set[i - 1] + graph->mirrors[i - 1]; - - /* fill arrays */ - PetscCall(PetscArrayzero(graph->mirrors, graph->nvtxs)); - for (j = 0; j < n_shared[0]; j++) { - i = shared[0][j]; - if (graph->count[i] > 1) graph->mirrors_set[i][graph->mirrors[i]++] = global_indices[i]; - } - PetscCall(PetscSortIntWithArray(graph->nvtxs, global_indices, local_indices)); - for (i = 0; i < graph->nvtxs; i++) { - if (graph->mirrors[i] > 0) { - PetscCall(PetscFindInt(graph->mirrors_set[i][0], graph->nvtxs, global_indices, &k)); - j = global_indices[k]; - while (k > 0 && global_indices[k - 1] == j) k--; - for (j = 0; j < graph->mirrors[i]; j++) graph->mirrors_set[i][j] = local_indices[k + j]; - PetscCall(PetscSortInt(graph->mirrors[i], graph->mirrors_set[i])); - } - } - PetscCall(PetscFree(local_indices)); - PetscCall(PetscFree(global_indices)); - PetscCall(ISDestroy(&to)); - PetscCall(ISDestroy(&from)); - } - PetscCall(PetscArrayzero(graph->count, graph->nvtxs)); - - /* Count total number of neigh per node */ - k = 0; - for (i = 1; i < n_neigh; i++) { - k += n_shared[i]; - for (j = 0; j < n_shared[i]; j++) graph->count[shared[i][j]] += 1; - } - /* Allocate space for storing the set of neighbours for each node */ - if (graph->nvtxs) PetscCall(PetscMalloc1(k, &graph->neighbours_set[0])); - for (i = 1; i < graph->nvtxs; i++) { /* dont count myself */ - graph->neighbours_set[i] = PetscSafePointerPlusOffset(graph->neighbours_set[i - 1], graph->count[i - 1]); - } - /* Get information for sharing subdomains */ - PetscCall(PetscArrayzero(graph->count, graph->nvtxs)); - for (i = 1; i < n_neigh; i++) { /* dont count myself */ - s = n_shared[i]; - for (j = 0; j < s; j++) { - k = shared[i][j]; - graph->neighbours_set[k][graph->count[k]] = neigh[i]; - graph->count[k] += 1; - } + PetscCall(PetscFree2(multi_root_subs, start)); + PetscCall(ISRestoreIndices(lis, &idxs)); + PetscCall(ISDestroy(&lis)); + PetscCall(PetscSFDestroy(&sf)); } - /* sort set of sharing subdomains */ - for (i = 0; i < graph->nvtxs; i++) PetscCall(PetscSortRemoveDupsInt(&graph->count[i], graph->neighbours_set[i])); - /* free memory allocated by ISLocalToGlobalMappingGetInfo */ - PetscCall(ISLocalToGlobalMappingRestoreInfo(graph->l2gmap, &n_neigh, &neigh, &n_shared, &shared)); /* Get info for dofs splitting @@ -967,7 +848,7 @@ PetscErrorCode PCBDDCGraphSetUp(PCBDDCGraph graph, PetscInt custom_minimal_size, PetscCall(ISGetBlockSize(ISForDofs[i], &bs)); k += bs; } - for (i = 0; i < graph->nvtxs; i++) graph->which_dof[i] = k; /* by default a dof belongs to the complement set */ + for (i = 0; i < nvtxs; i++) graph->nodes[i].which_dof = k; /* by default a dof belongs to the complement set */ for (i = 0, k = 0; i < n_ISForDofs; i++) { PetscInt bs; @@ -980,8 +861,8 @@ PetscErrorCode PCBDDCGraphSetUp(PCBDDCGraph graph, PetscInt custom_minimal_size, for (b = 0; b < bs; b++) { PetscInt jj = bs * j + b; - if (is_indices[jj] > -1 && is_indices[jj] < graph->nvtxs) { /* out of bounds indices (if any) are skipped */ - graph->which_dof[is_indices[jj]] = k + b; + if (is_indices[jj] > -1 && is_indices[jj] < nvtxs) { /* out of bounds indices (if any) are skipped */ + graph->nodes[is_indices[jj]].which_dof = k + b; } } } @@ -994,127 +875,131 @@ PetscErrorCode PCBDDCGraphSetUp(PCBDDCGraph graph, PetscInt custom_minimal_size, PetscCall(ISGetLocalSize(neumann_is, &is_size)); PetscCall(ISGetIndices(neumann_is, (const PetscInt **)&is_indices)); for (i = 0; i < is_size; i++) { - if (is_indices[i] > -1 && is_indices[i] < graph->nvtxs) { /* out of bounds indices (if any) are skipped */ - graph->special_dof[is_indices[i]] = PCBDDCGRAPH_NEUMANN_MARK; + if (is_indices[i] > -1 && is_indices[i] < nvtxs) { /* out of bounds indices (if any) are skipped */ + graph->nodes[is_indices[i]].special_dof = PCBDDCGRAPH_NEUMANN_MARK; } } PetscCall(ISRestoreIndices(neumann_is, (const PetscInt **)&is_indices)); } - /* Take into account Dirichlet nodes (they overwrite any neumann boundary mark previously set) */ + + /* Take into account Dirichlet nodes (they overwrite any mark previously set) */ if (dirichlet_is) { PetscCall(ISGetLocalSize(dirichlet_is, &is_size)); PetscCall(ISGetIndices(dirichlet_is, (const PetscInt **)&is_indices)); for (i = 0; i < is_size; i++) { - if (is_indices[i] > -1 && is_indices[i] < graph->nvtxs) { /* out of bounds indices (if any) are skipped */ - if (commsize > graph->commsizelimit) { /* dirichlet nodes treated as internal */ - PetscCall(PetscBTSet(graph->touched, is_indices[i])); - graph->subset[is_indices[i]] = 0; + if (is_indices[i] > -1 && is_indices[i] < nvtxs) { /* out of bounds indices (if any) are skipped */ + if (!graph->seq_graph) { /* dirichlet nodes treated as internal */ + graph->nodes[is_indices[i]].touched = PETSC_TRUE; + graph->nodes[is_indices[i]].subset = 0; } - graph->special_dof[is_indices[i]] = PCBDDCGRAPH_DIRICHLET_MARK; + graph->nodes[is_indices[i]].special_dof = PCBDDCGRAPH_DIRICHLET_MARK; } } PetscCall(ISRestoreIndices(dirichlet_is, (const PetscInt **)&is_indices)); } - /* mark local periodic nodes (if any) and adapt CSR graph (if any) */ - if (graph->mirrors) { - for (i = 0; i < graph->nvtxs; i++) - if (graph->mirrors[i]) graph->special_dof[i] = PCBDDCGRAPH_LOCAL_PERIODIC_MARK; - - if (graph->xadj) { - PetscInt *new_xadj, *new_adjncy; - /* sort CSR graph */ - for (i = 0; i < graph->nvtxs; i++) PetscCall(PetscSortInt(graph->xadj[i + 1] - graph->xadj[i], &graph->adjncy[graph->xadj[i]])); - /* adapt local CSR graph in case of local periodicity */ - k = 0; - for (i = 0; i < graph->nvtxs; i++) - for (j = graph->xadj[i]; j < graph->xadj[i + 1]; j++) k += graph->mirrors[graph->adjncy[j]]; - - PetscCall(PetscMalloc1(graph->nvtxs + 1, &new_xadj)); - PetscCall(PetscMalloc1(k + graph->xadj[graph->nvtxs], &new_adjncy)); - new_xadj[0] = 0; - for (i = 0; i < graph->nvtxs; i++) { - k = graph->xadj[i + 1] - graph->xadj[i]; - PetscCall(PetscArraycpy(&new_adjncy[new_xadj[i]], &graph->adjncy[graph->xadj[i]], k)); - new_xadj[i + 1] = new_xadj[i] + k; - for (j = graph->xadj[i]; j < graph->xadj[i + 1]; j++) { - k = graph->mirrors[graph->adjncy[j]]; - PetscCall(PetscArraycpy(&new_adjncy[new_xadj[i + 1]], graph->mirrors_set[graph->adjncy[j]], k)); - new_xadj[i + 1] += k; - } - k = new_xadj[i + 1] - new_xadj[i]; - PetscCall(PetscSortRemoveDupsInt(&k, &new_adjncy[new_xadj[i]])); - new_xadj[i + 1] = new_xadj[i] + k; - } - /* set new CSR into graph */ - PetscCall(PetscFree(graph->xadj)); - PetscCall(PetscFree(graph->adjncy)); - graph->xadj = new_xadj; - graph->adjncy = new_adjncy; - } - } - /* mark special nodes (if any) -> each will become a single node equivalence class */ + /* mark special nodes (if any) -> each will become a single dof equivalence class (i.e. point constraint for BDDC) */ if (custom_primal_vertices) { PetscCall(ISGetLocalSize(custom_primal_vertices, &is_size)); PetscCall(ISGetIndices(custom_primal_vertices, (const PetscInt **)&is_indices)); for (i = 0, j = 0; i < is_size; i++) { - if (is_indices[i] > -1 && is_indices[i] < graph->nvtxs && graph->special_dof[is_indices[i]] != PCBDDCGRAPH_DIRICHLET_MARK) { /* out of bounds indices (if any) are skipped */ - graph->special_dof[is_indices[i]] = PCBDDCGRAPH_SPECIAL_MARK - j; + if (is_indices[i] > -1 && is_indices[i] < nvtxs && graph->nodes[is_indices[i]].special_dof != PCBDDCGRAPH_DIRICHLET_MARK) { /* out of bounds indices (if any) are skipped */ + graph->nodes[is_indices[i]].special_dof = PCBDDCGRAPH_SPECIAL_MARK - j; j++; } } PetscCall(ISRestoreIndices(custom_primal_vertices, (const PetscInt **)&is_indices)); } - /* mark interior nodes (if commsize > graph->commsizelimit) as touched and belonging to partition number 0 */ - if (commsize > graph->commsizelimit) { - for (i = 0; i < graph->nvtxs; i++) { - if (!graph->count[i]) { - PetscCall(PetscBTSet(graph->touched, i)); - graph->subset[i] = 0; + /* mark interior nodes as touched and belonging to partition number 0 */ + if (!graph->seq_graph) { + for (i = 0; i < nvtxs; i++) { + if (graph->nodes[i].count < 2) { + graph->nodes[i].touched = PETSC_TRUE; + graph->nodes[i].subset = 0; } } } /* init graph structure and compute default subsets */ nodes_touched = 0; - for (i = 0; i < graph->nvtxs; i++) { - if (PetscBTLookup(graph->touched, i)) nodes_touched++; - } + for (i = 0; i < nvtxs; i++) + if (graph->nodes[i].touched) nodes_touched++; + i = 0; graph->ncc = 0; total_counts = 0; /* allocated space for queues */ - if (commsize == graph->commsizelimit) { - PetscCall(PetscMalloc2(graph->nvtxs + 1, &graph->cptr, graph->nvtxs, &graph->queue)); + if (graph->seq_graph) { + PetscCall(PetscMalloc2(nvtxs + 1, &graph->cptr, nvtxs, &graph->queue)); } else { - PetscInt nused = graph->nvtxs - nodes_touched; + PetscInt nused = nvtxs - nodes_touched; PetscCall(PetscMalloc2(nused + 1, &graph->cptr, nused, &graph->queue)); } - while (nodes_touched < graph->nvtxs) { + while (nodes_touched < nvtxs) { /* find first untouched node in local ordering */ - while (PetscBTLookup(graph->touched, i)) i++; - PetscCall(PetscBTSet(graph->touched, i)); - graph->subset[i] = graph->ncc + 1; + while (graph->nodes[i].touched) i++; + graph->nodes[i].touched = PETSC_TRUE; + graph->nodes[i].subset = graph->ncc + 1; graph->cptr[graph->ncc] = total_counts; graph->queue[total_counts] = i; total_counts++; nodes_touched++; + /* now find all other nodes having the same set of sharing subdomains */ - for (j = i + 1; j < graph->nvtxs; j++) { + const PCBDDCGraphNode *nodei = &graph->nodes[i]; + const PetscInt icount = nodei->count; + const PetscInt iwhich_dof = nodei->which_dof; + const PetscInt ispecial_dof = nodei->special_dof; + const PetscInt ilocal_groups_count = nodei->local_groups_count; + const PetscInt *PETSC_RESTRICT ineighbours_set = nodei->neighbours_set; + const PetscInt *PETSC_RESTRICT ilocal_groups = nodei->local_groups; + for (j = i + 1; j < nvtxs; j++) { + PCBDDCGraphNode *PETSC_RESTRICT nodej = &graph->nodes[j]; + + if (nodej->touched) continue; /* check for same number of sharing subdomains, dof number and same special mark */ - if (!PetscBTLookup(graph->touched, j) && graph->count[i] == graph->count[j] && graph->which_dof[i] == graph->which_dof[j] && graph->special_dof[i] == graph->special_dof[j]) { + if (icount == nodej->count && iwhich_dof == nodej->which_dof && ispecial_dof == nodej->special_dof) { + PetscBool mpi_shared = PETSC_TRUE; + /* check for same set of sharing subdomains */ same_set = PETSC_TRUE; - for (k = 0; k < graph->count[j]; k++) { - if (graph->neighbours_set[i][k] != graph->neighbours_set[j][k]) same_set = PETSC_FALSE; + for (k = 0; k < icount; k++) { + if (ineighbours_set[k] != nodej->neighbours_set[k]) { + same_set = PETSC_FALSE; + break; + } } - /* I have found a friend of mine */ + + if (graph->multi_element) { + mpi_shared = PETSC_FALSE; + for (k = 0; k < icount; k++) + if (ineighbours_set[k] != rank) { + mpi_shared = PETSC_TRUE; + break; + } + } + + /* check for same local groups + shared dofs at the process boundaries will be handled differently */ + if (same_set && !mpi_shared) { + if (ilocal_groups_count != nodej->local_groups_count) same_set = PETSC_FALSE; + else { + for (k = 0; k < ilocal_groups_count; k++) { + if (ilocal_groups[k] != nodej->local_groups[k]) { + same_set = PETSC_FALSE; + break; + } + } + } + } + + /* Add to subset */ if (same_set) { - PetscCall(PetscBTSet(graph->touched, j)); - graph->subset[j] = graph->ncc + 1; + nodej->touched = PETSC_TRUE; + nodej->subset = graph->ncc + 1; nodes_touched++; graph->queue[total_counts] = j; total_counts++; @@ -1123,50 +1008,166 @@ PetscErrorCode PCBDDCGraphSetUp(PCBDDCGraph graph, PetscInt custom_minimal_size, } graph->ncc++; } - /* set default number of subsets (at this point no info on csr and/or local_subs has been taken into account, so n_subsets = ncc */ + graph->cptr[graph->ncc] = total_counts; + + /* set default number of subsets */ graph->n_subsets = graph->ncc; PetscCall(PetscMalloc1(graph->n_subsets, &graph->subset_ncc)); for (i = 0; i < graph->n_subsets; i++) graph->subset_ncc[i] = 1; - /* final pointer */ - graph->cptr[graph->ncc] = total_counts; - /* For consistency reasons (among neighbours), I need to sort (by global ordering) each connected component */ - /* Get a reference node (min index in global ordering) for each subset for tagging messages */ PetscCall(PetscMalloc1(graph->ncc, &graph->subset_ref_node)); PetscCall(PetscMalloc1(graph->cptr[graph->ncc], &queue_global)); + PetscCall(PetscMalloc2(graph->ncc, &graph->subset_size, graph->ncc, &graph->subset_idxs)); + if (graph->multi_element) PetscCall(PetscMalloc1(graph->ncc, &graph->gsubset_size)); + else graph->gsubset_size = graph->subset_size; PetscCall(ISLocalToGlobalMappingApply(graph->l2gmap, graph->cptr[graph->ncc], graph->queue, queue_global)); + + PetscHMapI cnt_unique; + + PetscCall(PetscHMapICreate(&cnt_unique)); for (j = 0; j < graph->ncc; j++) { - PetscCall(PetscSortIntWithArray(graph->cptr[j + 1] - graph->cptr[j], &queue_global[graph->cptr[j]], &graph->queue[graph->cptr[j]])); - graph->subset_ref_node[j] = graph->queue[graph->cptr[j]]; + PetscInt c = 0, ref_node = PETSC_MAX_INT; + + for (k = graph->cptr[j]; k < graph->cptr[j + 1]; k++) { + ref_node = PetscMin(ref_node, queue_global[k]); + if (graph->multi_element) { + PetscBool missing; + PetscHashIter iter; + + PetscCall(PetscHMapIPut(cnt_unique, queue_global[k], &iter, &missing)); + if (missing) c++; + } + } + graph->gsubset_size[j] = c; + graph->subset_size[j] = graph->cptr[j + 1] - graph->cptr[j]; + graph->subset_ref_node[j] = ref_node; + if (graph->multi_element) PetscCall(PetscHMapIClear(cnt_unique)); } - PetscCall(PetscFree(queue_global)); - graph->queue_sorted = PETSC_TRUE; + PetscCall(PetscHMapIDestroy(&cnt_unique)); /* save information on subsets (needed when analyzing the connected components) */ if (graph->ncc) { - PetscCall(PetscMalloc2(graph->ncc, &graph->subset_size, graph->ncc, &graph->subset_idxs)); PetscCall(PetscMalloc1(graph->cptr[graph->ncc], &graph->subset_idxs[0])); PetscCall(PetscArrayzero(graph->subset_idxs[0], graph->cptr[graph->ncc])); - for (j = 1; j < graph->ncc; j++) { - graph->subset_size[j - 1] = graph->cptr[j] - graph->cptr[j - 1]; - graph->subset_idxs[j] = graph->subset_idxs[j - 1] + graph->subset_size[j - 1]; - } - graph->subset_size[graph->ncc - 1] = graph->cptr[graph->ncc] - graph->cptr[graph->ncc - 1]; + for (j = 1; j < graph->ncc; j++) { graph->subset_idxs[j] = graph->subset_idxs[j - 1] + graph->subset_size[j - 1]; } PetscCall(PetscArraycpy(graph->subset_idxs[0], graph->queue, graph->cptr[graph->ncc])); } - /* renumber reference nodes */ - PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)graph->l2gmap), graph->ncc, graph->subset_ref_node, PETSC_COPY_VALUES, &subset_n)); - PetscCall(ISLocalToGlobalMappingApplyIS(graph->l2gmap, subset_n, &subset)); - PetscCall(ISDestroy(&subset_n)); - PetscCall(ISRenumber(subset, NULL, NULL, &subset_n)); - PetscCall(ISDestroy(&subset)); - PetscCall(ISGetLocalSize(subset_n, &k)); - PetscCheck(k == graph->ncc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid size of new subset! %" PetscInt_FMT " != %" PetscInt_FMT, k, graph->ncc); - PetscCall(ISGetIndices(subset_n, &is_indices)); - PetscCall(PetscArraycpy(graph->subset_ref_node, is_indices, graph->ncc)); - PetscCall(ISRestoreIndices(subset_n, &is_indices)); - PetscCall(ISDestroy(&subset_n)); + /* check consistency and create SF to analyze components on the interface between subdomains */ + if (!graph->seq_graph) { + PetscSF msf; + PetscLayout map; + const PetscInt *degree; + PetscInt nr, nmr, *rdata; + PetscBool valid = PETSC_TRUE; + PetscInt subset_N; + IS subset_n; + const PetscInt *idxs; + + PetscCall(ISCreateGeneral(comm, graph->n_subsets, graph->subset_ref_node, PETSC_USE_POINTER, &subset)); + PetscCall(ISRenumber(subset, NULL, &subset_N, &subset_n)); + PetscCall(ISDestroy(&subset)); + + PetscCall(PetscSFCreate(comm, &graph->interface_ref_sf)); + PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, subset_N, 1, &map)); + PetscCall(ISGetIndices(subset_n, &idxs)); + PetscCall(PetscSFSetGraphLayout(graph->interface_ref_sf, map, graph->n_subsets, NULL, PETSC_OWN_POINTER, idxs)); + PetscCall(ISRestoreIndices(subset_n, &idxs)); + PetscCall(ISDestroy(&subset_n)); + PetscCall(PetscLayoutDestroy(&map)); + + PetscCall(PetscSFComputeDegreeBegin(graph->interface_ref_sf, °ree)); + PetscCall(PetscSFComputeDegreeEnd(graph->interface_ref_sf, °ree)); + PetscCall(PetscSFGetMultiSF(graph->interface_ref_sf, &msf)); + PetscCall(PetscSFGetGraph(graph->interface_ref_sf, &nr, NULL, NULL, NULL)); + PetscCall(PetscSFGetGraph(msf, &nmr, NULL, NULL, NULL)); + PetscCall(PetscCalloc1(nmr, &rdata)); + PetscCall(PetscSFGatherBegin(graph->interface_ref_sf, MPIU_INT, graph->gsubset_size, rdata)); + PetscCall(PetscSFGatherEnd(graph->interface_ref_sf, MPIU_INT, graph->gsubset_size, rdata)); + for (PetscInt i = 0, c = 0; i < nr && valid; i++) { + for (PetscInt j = 0; j < degree[i]; j++) { + if (rdata[j + c] != rdata[c]) valid = PETSC_FALSE; + } + c += degree[i]; + } + PetscCall(PetscFree(rdata)); + PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_BOOL, MPI_LAND, comm)); + PetscCheck(valid, comm, PETSC_ERR_PLIB, "Initial local subsets are not consistent"); + + /* Now create SF with each root extended to gsubset_size roots */ + PetscInt mss = 0; + const PetscSFNode *subs_remote; + + PetscCall(PetscSFGetGraph(graph->interface_ref_sf, NULL, NULL, NULL, &subs_remote)); + for (PetscInt i = 0; i < graph->n_subsets; i++) mss = PetscMax(graph->subset_size[i], mss); + + PetscInt nri, nli, *start_rsize, *cum_rsize; + PetscCall(PetscCalloc1(graph->n_subsets + 1, &start_rsize)); + PetscCall(PetscCalloc1(nr, &graph->interface_ref_rsize)); + PetscCall(PetscMalloc1(nr + 1, &cum_rsize)); + PetscCall(PetscSFReduceBegin(graph->interface_ref_sf, MPIU_INT, graph->gsubset_size, graph->interface_ref_rsize, MPI_REPLACE)); + PetscCall(PetscSFReduceEnd(graph->interface_ref_sf, MPIU_INT, graph->gsubset_size, graph->interface_ref_rsize, MPI_REPLACE)); + + nri = 0; + cum_rsize[0] = 0; + for (PetscInt i = 0; i < nr; i++) { + nri += graph->interface_ref_rsize[i]; + cum_rsize[i + 1] = cum_rsize[i] + graph->interface_ref_rsize[i]; + } + nli = graph->cptr[graph->ncc]; + PetscCall(PetscSFBcastBegin(graph->interface_ref_sf, MPIU_INT, cum_rsize, start_rsize, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(graph->interface_ref_sf, MPIU_INT, cum_rsize, start_rsize, MPI_REPLACE)); + PetscCall(PetscFree(cum_rsize)); + + PetscInt *ilocal, *queue_global_uniq; + PetscSFNode *iremote; + PetscBool *touched; + + PetscCall(PetscSFCreate(comm, &graph->interface_subset_sf)); + PetscCall(PetscMalloc1(nli, &ilocal)); + PetscCall(PetscMalloc1(nli, &iremote)); + PetscCall(PetscMalloc2(mss, &queue_global_uniq, mss, &touched)); + for (PetscInt i = 0, nli = 0; i < graph->n_subsets; i++) { + const PetscMPIInt rr = subs_remote[i].rank; + const PetscInt start = start_rsize[i]; + const PetscInt subset_size = graph->subset_size[i]; + const PetscInt gsubset_size = graph->gsubset_size[i]; + const PetscInt *subset_idxs = graph->subset_idxs[i]; + const PetscInt *lsub_queue_global = queue_global + graph->cptr[i]; + + k = subset_size; + PetscCall(PetscArrayzero(touched, subset_size)); + PetscCall(PetscArraycpy(queue_global_uniq, lsub_queue_global, subset_size)); + PetscCall(PetscSortRemoveDupsInt(&k, queue_global_uniq)); + PetscCheck(k == gsubset_size, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid local subset %" PetscInt_FMT " size %" PetscInt_FMT " != %" PetscInt_FMT, i, k, gsubset_size); + + PetscInt t = 0, j = 0; + while (t < subset_size) { + while (j < subset_size && touched[j]) j++; + PetscCheck(j < subset_size, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %" PetscInt_FMT " >= %" PetscInt_FMT, j, subset_size); + const PetscInt ls = graph->nodes[subset_idxs[j]].local_sub; + + for (k = j; k < subset_size; k++) { + if (graph->nodes[subset_idxs[k]].local_sub == ls) { + PetscInt ig; + + PetscCall(PetscFindInt(lsub_queue_global[k], gsubset_size, queue_global_uniq, &ig)); + ilocal[nli] = subset_idxs[k]; + iremote[nli].rank = rr; + iremote[nli].index = start + ig; + touched[k] = PETSC_TRUE; + nli++; + t++; + } + } + } + } + PetscCheck(nli == graph->cptr[graph->ncc], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid ilocal size %" PetscInt_FMT " != %" PetscInt_FMT, nli, graph->cptr[graph->ncc]); + PetscCall(PetscSFSetGraph(graph->interface_subset_sf, nri, nli, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); + PetscCall(PetscFree(start_rsize)); + PetscCall(PetscFree2(queue_global_uniq, touched)); + } + PetscCall(PetscFree(queue_global)); /* free workspace */ graph->setupcalled = PETSC_TRUE; @@ -1207,17 +1208,22 @@ PetscErrorCode PCBDDCGraphReset(PCBDDCGraph graph) PetscCall(ISLocalToGlobalMappingDestroy(&graph->l2gmap)); PetscCall(PetscFree(graph->subset_ncc)); PetscCall(PetscFree(graph->subset_ref_node)); - if (graph->nvtxs) PetscCall(PetscFree(graph->neighbours_set[0])); - PetscCall(PetscBTDestroy(&graph->touched)); - PetscCall(PetscFree5(graph->count, graph->neighbours_set, graph->subset, graph->which_dof, graph->special_dof)); + for (PetscInt i = 0; i < graph->nvtxs; i++) { + PetscCall(PetscFree(graph->nodes[i].neighbours_set)); + PetscCall(PetscFree(graph->nodes[i].local_groups)); + } + PetscCall(PetscFree(graph->nodes)); PetscCall(PetscFree2(graph->cptr, graph->queue)); - if (graph->mirrors) PetscCall(PetscFree(graph->mirrors_set[0])); - PetscCall(PetscFree2(graph->mirrors, graph->mirrors_set)); if (graph->subset_idxs) PetscCall(PetscFree(graph->subset_idxs[0])); PetscCall(PetscFree2(graph->subset_size, graph->subset_idxs)); + if (graph->multi_element) PetscCall(PetscFree(graph->gsubset_size)); + PetscCall(PetscFree(graph->interface_ref_rsize)); + PetscCall(PetscSFDestroy(&graph->interface_subset_sf)); + PetscCall(PetscSFDestroy(&graph->interface_ref_sf)); PetscCall(ISDestroy(&graph->dirdofs)); PetscCall(ISDestroy(&graph->dirdofsB)); if (graph->n_local_subs) PetscCall(PetscFree(graph->local_subs)); + graph->multi_element = PETSC_FALSE; graph->has_dirichlet = PETSC_FALSE; graph->twodimset = PETSC_FALSE; graph->twodim = PETSC_FALSE; @@ -1227,6 +1233,7 @@ PetscErrorCode PCBDDCGraphReset(PCBDDCGraph graph) graph->custom_minimal_size = 1; graph->n_local_subs = 0; graph->maxcount = PETSC_MAX_INT; + graph->seq_graph = PETSC_FALSE; graph->setupcalled = PETSC_FALSE; PetscFunctionReturn(PETSC_SUCCESS); } @@ -1249,16 +1256,10 @@ PetscErrorCode PCBDDCGraphInit(PCBDDCGraph graph, ISLocalToGlobalMapping l2gmap, graph->nvtxs = n; graph->nvtxs_global = N; /* allocate used space */ - PetscCall(PetscBTCreate(graph->nvtxs, &graph->touched)); - PetscCall(PetscMalloc5(graph->nvtxs, &graph->count, graph->nvtxs, &graph->neighbours_set, graph->nvtxs, &graph->subset, graph->nvtxs, &graph->which_dof, graph->nvtxs, &graph->special_dof)); - /* zeroes memory */ - PetscCall(PetscArrayzero(graph->count, graph->nvtxs)); - PetscCall(PetscArrayzero(graph->subset, graph->nvtxs)); + PetscCall(PetscCalloc1(graph->nvtxs, &graph->nodes)); /* use -1 as a default value for which_dof array */ - for (n = 0; n < graph->nvtxs; n++) graph->which_dof[n] = -1; - PetscCall(PetscArrayzero(graph->special_dof, graph->nvtxs)); - /* zeroes first pointer to neighbour set */ - if (graph->nvtxs) graph->neighbours_set[0] = NULL; + for (n = 0; n < graph->nvtxs; n++) graph->nodes[n].which_dof = -1; + /* zeroes workspace for values of ncc */ graph->subset_ncc = NULL; graph->subset_ref_node = NULL; @@ -1284,7 +1285,6 @@ PetscErrorCode PCBDDCGraphCreate(PCBDDCGraph *graph) PetscFunctionBegin; PetscCall(PetscNew(&new_graph)); new_graph->custom_minimal_size = 1; - new_graph->commsizelimit = 1; *graph = new_graph; PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/ksp/pc/impls/bddc/bddcprivate.c b/src/ksp/pc/impls/bddc/bddcprivate.c index 6dd4d1a066c..33377ee0ac6 100644 --- a/src/ksp/pc/impls/bddc/bddcprivate.c +++ b/src/ksp/pc/impls/bddc/bddcprivate.c @@ -1,6 +1,7 @@ #include <../src/mat/impls/aij/seq/aij.h> #include #include +#include #include <../src/mat/impls/dense/seq/dense.h> #include #include @@ -147,30 +148,30 @@ static PetscErrorCode PCBDDCComputeNedelecChangeEdge(Mat lG, IS edge, IS extrow, PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode MatAIJExtractRows(Mat, IS, Mat *); + PetscErrorCode PCBDDCNedelecSupport(PC pc) { PC_BDDC *pcbddc = (PC_BDDC *)pc->data; Mat_IS *matis = (Mat_IS *)pc->pmat->data; Mat G, T, conn, lG, lGt, lGis, lGall, lGe, lGinit; - Vec tvec; PetscSF sfv; ISLocalToGlobalMapping el2g, vl2g, fl2g, al2g; MPI_Comm comm; - IS lned, primals, allprimals, nedfieldlocal; + IS lned, primals, allprimals, nedfieldlocal, elements_corners = NULL; IS *eedges, *extrows, *extcols, *alleedges; PetscBT btv, bte, btvc, btb, btbd, btvcand, btvi, btee, bter; PetscScalar *vals, *work; PetscReal *rwork; const PetscInt *idxs, *ii, *jj, *iit, *jjt; PetscInt ne, nv, Lv, order, n, field; - PetscInt n_neigh, *neigh, *n_shared, **shared; PetscInt i, j, extmem, cum, maxsize, nee; PetscInt *extrow, *extrowcum, *marks, *vmarks, *gidxs; PetscInt *sfvleaves, *sfvroots; PetscInt *corners, *cedges; PetscInt *ecount, **eneighs, *vcount, **vneighs; PetscInt *emarks; - PetscBool print, eerr, done, lrc[2], conforming, global, singular, setprimal; + PetscBool print, eerr, done, lrc[2], conforming, global, setprimal; PetscFunctionBegin; /* If the discrete gradient is defined for a subset of dofs and global is true, @@ -182,34 +183,30 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) global = pcbddc->nedglobal; setprimal = PETSC_FALSE; print = PETSC_FALSE; - singular = PETSC_FALSE; /* Command line customization */ PetscOptionsBegin(PetscObjectComm((PetscObject)pc), ((PetscObject)pc)->prefix, "BDDC Nedelec options", "PC"); PetscCall(PetscOptionsBool("-pc_bddc_nedelec_field_primal", "All edge dofs set as primals: Toselli's algorithm C", NULL, setprimal, &setprimal, NULL)); - PetscCall(PetscOptionsBool("-pc_bddc_nedelec_singular", "Infer nullspace from discrete gradient", NULL, singular, &singular, NULL)); + /* print debug info and adaptive order TODO: to be removed */ PetscCall(PetscOptionsInt("-pc_bddc_nedelec_order", "Test variable order code (to be removed)", NULL, order, &order, NULL)); - /* print debug info TODO: to be removed */ PetscCall(PetscOptionsBool("-pc_bddc_nedelec_print", "Print debug info", NULL, print, &print, NULL)); PetscOptionsEnd(); - /* Return if there are no edges in the decomposition and the problem is not singular */ + /* Return if there are no edges in the decomposition */ PetscCall(MatISGetLocalToGlobalMapping(pc->pmat, &al2g, NULL)); PetscCall(ISLocalToGlobalMappingGetSize(al2g, &n)); PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); - if (!singular) { - PetscCall(VecGetArrayRead(matis->counter, (const PetscScalar **)&vals)); - lrc[0] = PETSC_FALSE; - for (i = 0; i < n; i++) { - if (PetscRealPart(vals[i]) > 2.) { - lrc[0] = PETSC_TRUE; - break; - } + PetscCall(VecGetArrayRead(matis->counter, (const PetscScalar **)&vals)); + lrc[0] = PETSC_FALSE; + for (i = 0; i < n; i++) { + if (PetscRealPart(vals[i]) > 2.) { + lrc[0] = PETSC_TRUE; + break; } - PetscCall(VecRestoreArrayRead(matis->counter, (const PetscScalar **)&vals)); - PetscCall(MPIU_Allreduce(&lrc[0], &lrc[1], 1, MPIU_BOOL, MPI_LOR, comm)); - if (!lrc[1]) PetscFunctionReturn(PETSC_SUCCESS); } + PetscCall(VecRestoreArrayRead(matis->counter, (const PetscScalar **)&vals)); + PetscCall(MPIU_Allreduce(&lrc[0], &lrc[1], 1, MPIU_BOOL, MPI_LOR, comm)); + if (!lrc[1]) PetscFunctionReturn(PETSC_SUCCESS); /* Get Nedelec field */ PetscCheck(!pcbddc->n_ISForDofsLocal || field < pcbddc->n_ISForDofsLocal, comm, PETSC_ERR_USER, "Invalid field for Nedelec %" PetscInt_FMT ": number of fields is %" PetscInt_FMT, field, pcbddc->n_ISForDofsLocal); @@ -298,8 +295,6 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) } PetscCall(ISDestroy(&is)); } else { - /* restore default */ - pcbddc->nedfield = -1; /* one ref for the destruction of al2g, one for el2g */ PetscCall(PetscObjectReference((PetscObject)al2g)); PetscCall(PetscObjectReference((PetscObject)al2g)); @@ -320,46 +315,110 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_SUM)); PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_SUM)); - if (!singular) { /* drop connections with interior edges to avoid unneeded communications and memory movements */ - PetscCall(MatDuplicate(pcbddc->discretegradient, MAT_COPY_VALUES, &G)); - PetscCall(MatSetOption(G, MAT_KEEP_NONZERO_PATTERN, PETSC_FALSE)); - if (global) { - PetscInt rst; + /* There's no way to detect all possible corner candidates in a element-by-element case in a pure algebraic setting + Firedrake attaches a index set to identify them upfront. If it is present, we assume we are in such a case */ + if (matis->allow_repeated) PetscCall(PetscObjectQuery((PetscObject)pcbddc->discretegradient, "_elements_corners", (PetscObject *)&elements_corners)); - PetscCall(MatGetOwnershipRange(G, &rst, NULL)); - for (i = 0, cum = 0; i < pc->pmat->rmap->n; i++) { - if (matis->sf_rootdata[i] < 2) matis->sf_rootdata[cum++] = i + rst; - } - PetscCall(MatSetOption(G, MAT_NO_OFF_PROC_ZERO_ROWS, PETSC_TRUE)); - PetscCall(MatZeroRows(G, cum, matis->sf_rootdata, 0., NULL, NULL)); - } else { - PetscInt *tbz; + /* drop connections with interior edges to avoid unneeded communications and memory movements */ + PetscCall(MatViewFromOptions(pcbddc->discretegradient, (PetscObject)pc, "-pc_bddc_discrete_gradient_view")); + PetscCall(MatDuplicate(pcbddc->discretegradient, MAT_COPY_VALUES, &G)); + PetscCall(MatSetOption(G, MAT_KEEP_NONZERO_PATTERN, PETSC_FALSE)); + if (global) { + PetscInt rst; - PetscCall(PetscMalloc1(ne, &tbz)); - PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); - PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); - PetscCall(ISGetIndices(nedfieldlocal, &idxs)); - for (i = 0, cum = 0; i < ne; i++) - if (matis->sf_leafdata[idxs[i]] == 1) tbz[cum++] = i; - PetscCall(ISRestoreIndices(nedfieldlocal, &idxs)); - PetscCall(ISLocalToGlobalMappingApply(el2g, cum, tbz, tbz)); - PetscCall(MatZeroRows(G, cum, tbz, 0., NULL, NULL)); - PetscCall(PetscFree(tbz)); + PetscCall(MatGetOwnershipRange(G, &rst, NULL)); + for (i = 0, cum = 0; i < pc->pmat->rmap->n; i++) { + if (matis->sf_rootdata[i] < 2) matis->sf_rootdata[cum++] = i + rst; } - } else { /* we need the entire G to infer the nullspace */ - PetscCall(PetscObjectReference((PetscObject)pcbddc->discretegradient)); - G = pcbddc->discretegradient; + PetscCall(MatSetOption(G, MAT_NO_OFF_PROC_ZERO_ROWS, PETSC_TRUE)); + PetscCall(MatZeroRows(G, cum, matis->sf_rootdata, 0., NULL, NULL)); + } else { + PetscInt *tbz; + + PetscCall(PetscMalloc1(ne, &tbz)); + PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE)); + PetscCall(ISGetIndices(nedfieldlocal, &idxs)); + for (i = 0, cum = 0; i < ne; i++) + if (matis->sf_leafdata[idxs[i]] == 1) tbz[cum++] = i; + PetscCall(ISRestoreIndices(nedfieldlocal, &idxs)); + PetscCall(ISLocalToGlobalMappingApply(el2g, cum, tbz, tbz)); + PetscCall(MatZeroRows(G, cum, tbz, 0., NULL, NULL)); + PetscCall(PetscFree(tbz)); } - /* Extract subdomain relevant rows of G */ + /* Extract subdomain relevant rows of G */ PetscCall(ISLocalToGlobalMappingGetIndices(el2g, &idxs)); PetscCall(ISCreateGeneral(comm, ne, idxs, PETSC_USE_POINTER, &lned)); - PetscCall(MatCreateSubMatrix(G, lned, NULL, MAT_INITIAL_MATRIX, &lGall)); + PetscCall(MatAIJExtractRows(G, lned, &lGall)); + /* PetscCall(MatCreateSubMatrix(G, lned, NULL, MAT_INITIAL_MATRIX, &lGall)); */ PetscCall(ISLocalToGlobalMappingRestoreIndices(el2g, &idxs)); PetscCall(ISDestroy(&lned)); PetscCall(MatConvert(lGall, MATIS, MAT_INITIAL_MATRIX, &lGis)); PetscCall(MatDestroy(&lGall)); PetscCall(MatISGetLocalMat(lGis, &lG)); + if (matis->allow_repeated) { /* multi-element support */ + Mat *lGn, B; + IS *is_rows, *tcols, tmap, nmap; + PetscInt subnv; + const PetscInt *subvidxs; + ISLocalToGlobalMapping mapn; + + PetscCall(PetscCalloc1(pcbddc->n_local_subs * pcbddc->n_local_subs, &lGn)); + PetscCall(PetscMalloc1(pcbddc->n_local_subs, &is_rows)); + PetscCall(PetscMalloc1(pcbddc->n_local_subs, &tcols)); + for (PetscInt i = 0; i < pcbddc->n_local_subs; i++) { + if (fl2g) { + PetscCall(ISGlobalToLocalMappingApplyIS(fl2g, IS_GTOLM_MASK, pcbddc->local_subs[i], &is_rows[i])); + } else { + PetscCall(PetscObjectReference((PetscObject)pcbddc->local_subs[i])); + is_rows[i] = pcbddc->local_subs[i]; + } + PetscCall(MatCreateSubMatrix(lG, is_rows[i], NULL, MAT_INITIAL_MATRIX, &lGn[i * (1 + pcbddc->n_local_subs)])); + PetscCall(MatSeqAIJCompactOutExtraColumns_SeqAIJ(lGn[i * (1 + pcbddc->n_local_subs)], &mapn)); + PetscCall(ISLocalToGlobalMappingGetSize(mapn, &subnv)); + PetscCall(ISLocalToGlobalMappingGetIndices(mapn, &subvidxs)); + PetscCall(ISCreateGeneral(PETSC_COMM_SELF, subnv, subvidxs, PETSC_COPY_VALUES, &tcols[i])); + PetscCall(ISLocalToGlobalMappingRestoreIndices(mapn, &subvidxs)); + PetscCall(ISLocalToGlobalMappingDestroy(&mapn)); + } + + /* Create new MATIS with repeated vertices */ + PetscCall(MatCreate(comm, &B)); + PetscCall(MatSetSizes(B, lGis->rmap->n, lGis->cmap->n, lGis->rmap->N, lGis->cmap->N)); + PetscCall(MatSetType(B, MATIS)); + PetscCall(MatISSetAllowRepeated(B, PETSC_TRUE)); + PetscCall(ISConcatenate(PETSC_COMM_SELF, pcbddc->n_local_subs, tcols, &tmap)); + PetscCall(ISLocalToGlobalMappingApplyIS(lGis->cmap->mapping, tmap, &nmap)); + PetscCall(ISDestroy(&tmap)); + PetscCall(ISGetLocalSize(nmap, &subnv)); + PetscCall(ISGetIndices(nmap, &subvidxs)); + PetscCall(ISCreateGeneral(comm, subnv, subvidxs, PETSC_USE_POINTER, &tmap)); + PetscCall(ISRestoreIndices(nmap, &subvidxs)); + PetscCall(ISLocalToGlobalMappingCreateIS(tmap, &mapn)); + PetscCall(ISDestroy(&tmap)); + PetscCall(ISDestroy(&nmap)); + PetscCall(MatSetLocalToGlobalMapping(B, lGis->rmap->mapping, mapn)); + PetscCall(ISLocalToGlobalMappingDestroy(&mapn)); + PetscCall(MatCreateNest(PETSC_COMM_SELF, pcbddc->n_local_subs, is_rows, pcbddc->n_local_subs, NULL, lGn, &lG)); + for (PetscInt i = 0; i < pcbddc->n_local_subs; i++) { + PetscCall(MatDestroy(&lGn[i * (1 + pcbddc->n_local_subs)])); + PetscCall(ISDestroy(&is_rows[i])); + PetscCall(ISDestroy(&tcols[i])); + } + PetscCall(MatConvert(lG, MATSEQAIJ, MAT_INPLACE_MATRIX, &lG)); + PetscCall(PetscFree(lGn)); + PetscCall(PetscFree(is_rows)); + PetscCall(PetscFree(tcols)); + PetscCall(MatISSetLocalMat(B, lG)); + PetscCall(MatDestroy(&lG)); + + PetscCall(MatDestroy(&lGis)); + lGis = B; + + lGis->assembled = PETSC_TRUE; + } + PetscCall(MatViewFromOptions(lGis, (PetscObject)pc, "-pc_bddc_nedelec_init_G_view")); /* SF for nodal dofs communications */ PetscCall(MatGetLocalSize(G, NULL, &Lv)); @@ -370,13 +429,25 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(ISLocalToGlobalMappingGetIndices(vl2g, &idxs)); PetscCall(PetscSFSetGraphLayout(sfv, lGis->cmap, nv, NULL, PETSC_OWN_POINTER, idxs)); PetscCall(ISLocalToGlobalMappingRestoreIndices(vl2g, &idxs)); - i = singular ? 2 : 1; - PetscCall(PetscMalloc2(i * nv, &sfvleaves, i * Lv, &sfvroots)); - /* Destroy temporary G created in MATIS format and modified G */ + if (elements_corners) { + IS tmp; + Vec global, local; + Mat_IS *tGis = (Mat_IS *)lGis->data; + + PetscCall(MatCreateVecs(lGis, &global, NULL)); + PetscCall(MatCreateVecs(tGis->A, &local, NULL)); + PetscCall(PCBDDCGlobalToLocal(tGis->cctx, global, local, elements_corners, &tmp)); + PetscCall(VecDestroy(&global)); + PetscCall(VecDestroy(&local)); + elements_corners = tmp; + } + + /* Destroy temporary G */ + PetscCall(MatISGetLocalMat(lGis, &lG)); PetscCall(PetscObjectReference((PetscObject)lG)); - PetscCall(MatDestroy(&lGis)); PetscCall(MatDestroy(&G)); + PetscCall(MatDestroy(&lGis)); if (print) { PetscCall(PetscObjectSetName((PetscObject)lG, "initial_lG")); @@ -393,7 +464,6 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(PetscBTCreate(ne, &bte)); PetscCall(PetscBTCreate(ne, &btb)); PetscCall(PetscBTCreate(ne, &btbd)); - PetscCall(PetscBTCreate(nv, &btvcand)); /* need to import the boundary specification to ensure the proper detection of coarse edges' endpoints */ if (pcbddc->DirichletBoundariesLocal) { @@ -407,7 +477,7 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(ISGetLocalSize(is, &cum)); PetscCall(ISGetIndices(is, &idxs)); for (i = 0; i < cum; i++) { - if (idxs[i] >= 0) { + if (idxs[i] >= 0 && idxs[i] < ne) { PetscCall(PetscBTSet(btb, idxs[i])); PetscCall(PetscBTSet(btbd, idxs[i])); } @@ -426,15 +496,15 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(ISGetLocalSize(is, &cum)); PetscCall(ISGetIndices(is, &idxs)); for (i = 0; i < cum; i++) { - if (idxs[i] >= 0) PetscCall(PetscBTSet(btb, idxs[i])); + if (idxs[i] >= 0 && idxs[i] < ne) PetscCall(PetscBTSet(btb, idxs[i])); } PetscCall(ISRestoreIndices(is, &idxs)); if (fl2g) PetscCall(ISDestroy(&is)); } /* Count neighs per dof */ - PetscCall(ISLocalToGlobalMappingGetNodeInfo(el2g, NULL, &ecount, &eneighs)); - PetscCall(ISLocalToGlobalMappingGetNodeInfo(vl2g, NULL, &vcount, &vneighs)); + PetscCall(ISLocalToGlobalMappingGetNodeInfo(el2g, NULL, &ecount, NULL)); + PetscCall(ISLocalToGlobalMappingGetNodeInfo(vl2g, NULL, &vcount, NULL)); /* need to remove coarse faces' dofs and coarse edges' dirichlet dofs for proper detection of coarse edges' endpoints */ @@ -502,6 +572,87 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(MatZeroRows(lGe, cum, marks, 0., NULL, NULL)); /* identify splitpoints and corner candidates */ + PetscCall(PetscMalloc2(nv, &sfvleaves, Lv, &sfvroots)); + PetscCall(PetscBTCreate(nv, &btvcand)); + if (elements_corners) { + PetscCall(ISGetLocalSize(elements_corners, &cum)); + PetscCall(ISGetIndices(elements_corners, &idxs)); + for (i = 0; i < cum; i++) PetscCall(PetscBTSet(btvcand, idxs[i])); + PetscCall(ISRestoreIndices(elements_corners, &idxs)); + } + + if (matis->allow_repeated) { /* assign a uniq global id to edge local subsets and communicate it with nodal space */ + PetscSF emlsf, vmlsf; + PetscInt *eleaves, *vleaves, *meleaves, *mvleaves; + PetscInt cum_subs = 0, n_subs = pcbddc->n_local_subs, bs, emnr, emnl, vmnr, vmnl; + + PetscCall(ISLocalToGlobalMappingGetBlockSize(el2g, &bs)); + PetscCheck(bs == 1, comm, PETSC_ERR_SUP, "Not coded"); + PetscCall(ISLocalToGlobalMappingGetBlockSize(vl2g, &bs)); + PetscCheck(bs == 1, comm, PETSC_ERR_SUP, "Not coded"); + + PetscCall(ISLocalToGlobalMappingGetBlockMultiLeavesSF(el2g, &emlsf)); + PetscCall(ISLocalToGlobalMappingGetBlockMultiLeavesSF(vl2g, &vmlsf)); + + PetscCall(PetscSFGetGraph(emlsf, &emnr, &emnl, NULL, NULL)); + for (i = 0, j = 0; i < ne; i++) j += ecount[i]; + PetscCheck(emnr == ne, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of roots in edge multi-leaves SF %" PetscInt_FMT " != %" PetscInt_FMT, emnr, ne); + PetscCheck(emnl == j, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of leaves in edge multi-leaves SF %" PetscInt_FMT " != %" PetscInt_FMT, emnl, j); + + PetscCall(PetscSFGetGraph(vmlsf, &vmnr, &vmnl, NULL, NULL)); + for (i = 0, j = 0; i < nv; i++) j += vcount[i]; + PetscCheck(vmnr == nv, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of roots in nodal multi-leaves SF %" PetscInt_FMT " != %" PetscInt_FMT, vmnr, nv); + PetscCheck(vmnl == j, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of leaves in nodal multi-leaves SF %" PetscInt_FMT " != %" PetscInt_FMT, vmnl, j); + + PetscCall(PetscMalloc1(ne, &eleaves)); + PetscCall(PetscMalloc1(nv, &vleaves)); + for (i = 0; i < ne; i++) eleaves[i] = PETSC_MAX_INT; + for (i = 0; i < nv; i++) vleaves[i] = PETSC_MAX_INT; + PetscCall(PetscMalloc1(emnl, &meleaves)); + PetscCall(PetscMalloc1(vmnl, &mvleaves)); + + PetscCallMPI(MPI_Exscan(&n_subs, &cum_subs, 1, MPIU_INT, MPI_SUM, comm)); + PetscCall(MatGetRowIJ(lGinit, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &done)); + for (i = 0; i < n_subs; i++) { + const PetscInt *idxs; + const PetscInt subid = cum_subs + i; + PetscInt ns; + + PetscCall(ISGetLocalSize(pcbddc->local_subs[i], &ns)); + PetscCall(ISGetIndices(pcbddc->local_subs[i], &idxs)); + for (j = 0; j < ns; j++) { + const PetscInt e = idxs[j]; + + eleaves[e] = subid; + for (PetscInt k = ii[e]; k < ii[e + 1]; k++) vleaves[jj[k]] = subid; + } + PetscCall(ISRestoreIndices(pcbddc->local_subs[i], &idxs)); + } + PetscCall(MatRestoreRowIJ(lGinit, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &done)); + PetscCall(PetscSFBcastBegin(emlsf, MPIU_INT, eleaves, meleaves, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(emlsf, MPIU_INT, eleaves, meleaves, MPI_REPLACE)); + PetscCall(PetscSFBcastBegin(vmlsf, MPIU_INT, vleaves, mvleaves, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(vmlsf, MPIU_INT, vleaves, mvleaves, MPI_REPLACE)); + PetscCall(PetscFree(eleaves)); + PetscCall(PetscFree(vleaves)); + + PetscCall(PetscMalloc1(ne + 1, &eneighs)); + eneighs[0] = meleaves; + for (i = 0; i < ne; i++) { + PetscCall(PetscSortInt(ecount[i], eneighs[i])); + eneighs[i + 1] = eneighs[i] + ecount[i]; + } + PetscCall(PetscMalloc1(nv + 1, &vneighs)); + vneighs[0] = mvleaves; + for (i = 0; i < nv; i++) { + PetscCall(PetscSortInt(vcount[i], vneighs[i])); + vneighs[i + 1] = vneighs[i] + vcount[i]; + } + } else { + PetscCall(ISLocalToGlobalMappingGetNodeInfo(el2g, NULL, NULL, &eneighs)); + PetscCall(ISLocalToGlobalMappingGetNodeInfo(vl2g, NULL, NULL, &vneighs)); + } + PetscCall(MatTranspose(lGe, MAT_INITIAL_MATRIX, &lGt)); if (print) { PetscCall(PetscObjectSetName((PetscObject)lGe, "edgerestr_lG")); @@ -522,17 +673,19 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCheck(vorder - test <= PETSC_SQRT_MACHINE_EPSILON, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected value for vorder: %g (%" PetscInt_FMT ")", (double)vorder, test); ord = 1; } - PetscAssert(test % ord == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected number of edge dofs %" PetscInt_FMT " connected with nodal dof %" PetscInt_FMT " with order %" PetscInt_FMT, test, i, ord); for (j = ii[i]; j < ii[i + 1] && sneighs; j++) { - if (PetscBTLookup(btbd, jj[j])) { + const PetscInt e = jj[j]; + + if (PetscBTLookup(btbd, e)) { bdir = PETSC_TRUE; break; } - if (vc != ecount[jj[j]]) { + if (vc != ecount[e]) { sneighs = PETSC_FALSE; } else { - PetscInt k, *vn = vneighs[i], *en = eneighs[jj[j]]; - for (k = 0; k < vc; k++) { + const PetscInt *vn = vneighs[i], *en = eneighs[e]; + + for (PetscInt k = 0; k < vc; k++) { if (vn[k] != en[k]) { sneighs = PETSC_FALSE; break; @@ -540,6 +693,7 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) } } } + if (elements_corners) test = 0; if (!sneighs || test >= 3 * ord || bdir) { /* splitpoints */ if (print) PetscCall(PetscPrintf(PETSC_COMM_SELF, "SPLITPOINT %" PetscInt_FMT " (%s %s %s)\n", i, PetscBools[!sneighs], PetscBools[test >= 3 * ord], PetscBools[bdir])); PetscCall(PetscBTSet(btv, i)); @@ -547,14 +701,12 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) if (order == 1 || (!order && ii[i + 1] - ii[i] == 1)) { if (print) PetscCall(PetscPrintf(PETSC_COMM_SELF, "ENDPOINT %" PetscInt_FMT "\n", i)); PetscCall(PetscBTSet(btv, i)); - } else { + } else if (!elements_corners) { if (print) PetscCall(PetscPrintf(PETSC_COMM_SELF, "CORNER CANDIDATE %" PetscInt_FMT "\n", i)); PetscCall(PetscBTSet(btvcand, i)); } } } - PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(el2g, NULL, &ecount, &eneighs)); - PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(vl2g, NULL, &vcount, &vneighs)); PetscCall(PetscBTDestroy(&btbd)); /* a candidate is valid if it is connected to another candidate via a non-primal edge dof */ @@ -594,13 +746,20 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(MatTranspose(lG, MAT_INITIAL_MATRIX, &lGt)); PetscCall(MatSetOption(lGt, MAT_KEEP_NONZERO_PATTERN, PETSC_FALSE)); - /* Mark interior nodal dofs */ - PetscCall(ISLocalToGlobalMappingGetInfo(vl2g, &n_neigh, &neigh, &n_shared, &shared)); + /* Mark shared nodal dofs */ PetscCall(PetscBTCreate(nv, &btvi)); - for (i = 1; i < n_neigh; i++) { - for (j = 0; j < n_shared[i]; j++) PetscCall(PetscBTSet(btvi, shared[i][j])); + for (i = 0; i < nv; i++) { + if (vcount[i] > 1) PetscCall(PetscBTSet(btvi, i)); } - PetscCall(ISLocalToGlobalMappingRestoreInfo(vl2g, &n_neigh, &neigh, &n_shared, &shared)); + + if (matis->allow_repeated) { + PetscCall(PetscFree(eneighs[0])); + PetscCall(PetscFree(vneighs[0])); + PetscCall(PetscFree(eneighs)); + PetscCall(PetscFree(vneighs)); + } + PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(el2g, NULL, &ecount, &eneighs)); + PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(vl2g, NULL, &vcount, &vneighs)); /* communicate corners and splitpoints */ PetscCall(PetscMalloc1(nv, &vmarks)); @@ -634,8 +793,7 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) if (sfvleaves[i]) { vmarks[cum++] = i; PetscCall(PetscBTSet(btv, i)); - } - if (!PetscBTLookup(btvi, i)) vmarks[cum++] = i; + } else if (!PetscBTLookup(btvi, i)) vmarks[cum++] = i; } PetscCall(PetscBTDestroy(&btvi)); if (print) { @@ -681,75 +839,78 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)lG, "econn_")); /* Symbolic conn = lG*lGt */ - PetscCall(MatProductCreate(lG, lGt, NULL, &conn)); - PetscCall(MatProductSetType(conn, MATPRODUCT_AB)); - PetscCall(MatProductSetAlgorithm(conn, "default")); - PetscCall(MatProductSetFill(conn, PETSC_DEFAULT)); - PetscCall(PetscObjectSetOptionsPrefix((PetscObject)conn, "econn_")); - PetscCall(MatProductSetFromOptions(conn)); - PetscCall(MatProductSymbolic(conn)); - - PetscCall(MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &done)); - if (fl2g) { - PetscBT btf; - PetscInt *iia, *jja, *iiu, *jju; - PetscBool rest = PETSC_FALSE, free = PETSC_FALSE; - - /* create CSR for all local dofs */ - PetscCall(PetscMalloc1(n + 1, &iia)); - if (pcbddc->mat_graph->nvtxs_csr) { /* the user has passed in a CSR graph */ - PetscCheck(pcbddc->mat_graph->nvtxs_csr == n, PETSC_COMM_SELF, PETSC_ERR_USER, "Invalid size of CSR graph %" PetscInt_FMT ". Should be %" PetscInt_FMT, pcbddc->mat_graph->nvtxs_csr, n); - iiu = pcbddc->mat_graph->xadj; - jju = pcbddc->mat_graph->adjncy; - } else if (pcbddc->use_local_adj) { - rest = PETSC_TRUE; - PetscCall(MatGetRowIJ(matis->A, 0, PETSC_TRUE, PETSC_FALSE, &i, (const PetscInt **)&iiu, (const PetscInt **)&jju, &done)); - } else { - free = PETSC_TRUE; - PetscCall(PetscMalloc2(n + 1, &iiu, n, &jju)); - iiu[0] = 0; - for (i = 0; i < n; i++) { - iiu[i + 1] = i + 1; - jju[i] = -1; + if (!elements_corners) { /* if present, we assume we are in the element-by-element case and the CSR graph is not needed */ + PetscCall(MatProductCreate(lG, lGt, NULL, &conn)); + PetscCall(MatProductSetType(conn, MATPRODUCT_AB)); + PetscCall(MatProductSetAlgorithm(conn, "default")); + PetscCall(MatProductSetFill(conn, PETSC_DEFAULT)); + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)conn, "econn_")); + PetscCall(MatProductSetFromOptions(conn)); + PetscCall(MatProductSymbolic(conn)); + PetscCall(MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &done)); + if (fl2g) { + PetscBT btf; + PetscInt *iia, *jja, *iiu, *jju; + PetscBool rest = PETSC_FALSE, free = PETSC_FALSE; + + /* create CSR for all local dofs */ + PetscCall(PetscMalloc1(n + 1, &iia)); + if (pcbddc->mat_graph->nvtxs_csr) { /* the user has passed in a CSR graph */ + PetscCheck(pcbddc->mat_graph->nvtxs_csr == n, PETSC_COMM_SELF, PETSC_ERR_USER, "Invalid size of CSR graph %" PetscInt_FMT ". Should be %" PetscInt_FMT, pcbddc->mat_graph->nvtxs_csr, n); + iiu = pcbddc->mat_graph->xadj; + jju = pcbddc->mat_graph->adjncy; + } else if (pcbddc->use_local_adj) { + rest = PETSC_TRUE; + PetscCall(MatGetRowIJ(matis->A, 0, PETSC_TRUE, PETSC_FALSE, &i, (const PetscInt **)&iiu, (const PetscInt **)&jju, &done)); + } else { + free = PETSC_TRUE; + PetscCall(PetscMalloc2(n + 1, &iiu, n, &jju)); + iiu[0] = 0; + for (i = 0; i < n; i++) { + iiu[i + 1] = i + 1; + jju[i] = -1; + } } - } - /* import sizes of CSR */ - iia[0] = 0; - for (i = 0; i < n; i++) iia[i + 1] = iiu[i + 1] - iiu[i]; + /* import sizes of CSR */ + iia[0] = 0; + for (i = 0; i < n; i++) iia[i + 1] = iiu[i + 1] - iiu[i]; - /* overwrite entries corresponding to the Nedelec field */ - PetscCall(PetscBTCreate(n, &btf)); - PetscCall(ISGetIndices(nedfieldlocal, &idxs)); - for (i = 0; i < ne; i++) { - PetscCall(PetscBTSet(btf, idxs[i])); - iia[idxs[i] + 1] = ii[i + 1] - ii[i]; - } + /* overwrite entries corresponding to the Nedelec field */ + PetscCall(PetscBTCreate(n, &btf)); + PetscCall(ISGetIndices(nedfieldlocal, &idxs)); + for (i = 0; i < ne; i++) { + PetscCall(PetscBTSet(btf, idxs[i])); + iia[idxs[i] + 1] = ii[i + 1] - ii[i]; + } - /* iia in CSR */ - for (i = 0; i < n; i++) iia[i + 1] += iia[i]; + /* iia in CSR */ + for (i = 0; i < n; i++) iia[i + 1] += iia[i]; - /* jja in CSR */ - PetscCall(PetscMalloc1(iia[n], &jja)); - for (i = 0; i < n; i++) - if (!PetscBTLookup(btf, i)) - for (j = 0; j < iiu[i + 1] - iiu[i]; j++) jja[iia[i] + j] = jju[iiu[i] + j]; + /* jja in CSR */ + PetscCall(PetscMalloc1(iia[n], &jja)); + for (i = 0; i < n; i++) + if (!PetscBTLookup(btf, i)) + for (j = 0; j < iiu[i + 1] - iiu[i]; j++) jja[iia[i] + j] = jju[iiu[i] + j]; - /* map edge dofs connectivity */ - if (jj) { - PetscCall(ISLocalToGlobalMappingApply(fl2g, ii[ne], jj, (PetscInt *)jj)); - for (i = 0; i < ne; i++) { - PetscInt e = idxs[i]; - for (j = 0; j < ii[i + 1] - ii[i]; j++) jja[iia[e] + j] = jj[ii[i] + j]; + /* map edge dofs connectivity */ + if (jj) { + PetscCall(ISLocalToGlobalMappingApply(fl2g, ii[ne], jj, (PetscInt *)jj)); + for (i = 0; i < ne; i++) { + PetscInt e = idxs[i]; + for (j = 0; j < ii[i + 1] - ii[i]; j++) jja[iia[e] + j] = jj[ii[i] + j]; + } } + PetscCall(ISRestoreIndices(nedfieldlocal, &idxs)); + PetscCall(PCBDDCSetLocalAdjacencyGraph(pc, n, iia, jja, PETSC_COPY_VALUES)); + if (rest) PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_TRUE, PETSC_FALSE, &i, (const PetscInt **)&iiu, (const PetscInt **)&jju, &done)); + if (free) PetscCall(PetscFree2(iiu, jju)); + PetscCall(PetscBTDestroy(&btf)); + } else { + PetscCall(PCBDDCSetLocalAdjacencyGraph(pc, n, ii, jj, PETSC_COPY_VALUES)); } - PetscCall(ISRestoreIndices(nedfieldlocal, &idxs)); - PetscCall(PCBDDCSetLocalAdjacencyGraph(pc, n, iia, jja, PETSC_OWN_POINTER)); - if (rest) PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_TRUE, PETSC_FALSE, &i, (const PetscInt **)&iiu, (const PetscInt **)&jju, &done)); - if (free) PetscCall(PetscFree2(iiu, jju)); - PetscCall(PetscBTDestroy(&btf)); - } else { - PetscCall(PCBDDCSetLocalAdjacencyGraph(pc, n, ii, jj, PETSC_USE_POINTER)); + PetscCall(MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &done)); + PetscCall(MatDestroy(&conn)); } /* Analyze interface for edge dofs */ @@ -758,7 +919,6 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) /* Get coarse edges in the edge space */ PetscCall(PCBDDCGraphGetCandidatesIS(pcbddc->mat_graph, NULL, NULL, &nee, &alleedges, &allprimals)); - PetscCall(MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &done)); if (fl2g) { PetscCall(ISGlobalToLocalMappingApplyIS(fl2g, IS_GTOLM_DROP, allprimals, &primals)); @@ -1168,20 +1328,17 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) /* Create change of basis matrix (preallocation can be improved) */ PetscCall(MatCreate(comm, &T)); - PetscCall(MatSetSizes(T, pc->pmat->rmap->n, pc->pmat->rmap->n, pc->pmat->rmap->N, pc->pmat->rmap->N)); + PetscCall(MatSetLayouts(T, pc->mat->rmap, pc->mat->cmap)); PetscCall(MatSetType(T, MATAIJ)); - PetscCall(MatSeqAIJSetPreallocation(T, 10, NULL)); - PetscCall(MatMPIAIJSetPreallocation(T, 10, NULL, 10, NULL)); + PetscCall(MatSeqAIJSetPreallocation(T, maxsize, NULL)); + PetscCall(MatMPIAIJSetPreallocation(T, maxsize, NULL, maxsize, NULL)); PetscCall(MatSetLocalToGlobalMapping(T, al2g, al2g)); PetscCall(MatSetOption(T, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE)); PetscCall(MatSetOption(T, MAT_ROW_ORIENTED, PETSC_FALSE)); PetscCall(ISLocalToGlobalMappingDestroy(&al2g)); /* Defaults to identity */ - PetscCall(MatCreateVecs(pc->pmat, &tvec, NULL)); - PetscCall(VecSet(tvec, 1.0)); - PetscCall(MatDiagonalSet(T, tvec, INSERT_VALUES)); - PetscCall(VecDestroy(&tvec)); + for (i = pc->mat->rmap->rstart; i < pc->mat->rmap->rend; i++) PetscCall(MatSetValue(T, i, i, 1.0, INSERT_VALUES)); /* Create discrete gradient for the coarser level if needed */ PetscCall(MatDestroy(&pcbddc->nedcG)); @@ -1275,6 +1432,33 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(MatDestroy(&Gins)); PetscCall(MatDestroy(&GKins)); } + + /* for FDM element-by-element: first dof on the edge only constraint. Why? */ + if (elements_corners && pcbddc->mat_graph->multi_element) { + ISLocalToGlobalMapping map; + MatNullSpace nnsp; + Vec quad_vec; + + PetscCall(MatCreateVecs(pc->pmat, &quad_vec, NULL)); + PetscCall(PCBDDCNullSpaceCreate(PetscObjectComm((PetscObject)pc), PETSC_FALSE, 1, &quad_vec, &nnsp)); + PetscCall(VecLockReadPop(quad_vec)); + PetscCall(MatISGetLocalToGlobalMapping(pc->pmat, &map, NULL)); + PetscCall(VecSetLocalToGlobalMapping(quad_vec, map)); + for (i = 0; i < nee; i++) { + const PetscInt *idxs; + PetscScalar one = 1.0; + + PetscCall(ISGetLocalSize(alleedges[i], &cum)); + if (!cum) continue; + PetscCall(ISGetIndices(alleedges[i], &idxs)); + PetscCall(VecSetValuesLocal(quad_vec, 1, idxs, &one, INSERT_VALUES)); + PetscCall(ISRestoreIndices(alleedges[i], &idxs)); + } + PetscCall(VecLockReadPush(quad_vec)); + PetscCall(VecDestroy(&quad_vec)); + PetscCall(MatSetNearNullSpace(pc->pmat, nnsp)); + PetscCall(MatNullSpaceDestroy(&nnsp)); + } PetscCall(ISLocalToGlobalMappingDestroy(&el2g)); /* Start assembling */ @@ -1329,7 +1513,6 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) PetscCall(ISLocalToGlobalMappingDestroy(&fl2g)); PetscCall(PCBDDCGraphRestoreCandidatesIS(pcbddc->mat_graph, NULL, NULL, &nee, &alleedges, &allprimals)); PetscCall(PCBDDCGraphResetCSR(pcbddc->mat_graph)); - PetscCall(MatDestroy(&conn)); PetscCall(ISDestroy(&nedfieldlocal)); PetscCall(PetscFree(extrow)); @@ -1342,16 +1525,16 @@ PetscErrorCode PCBDDCNedelecSupport(PC pc) /* Complete assembling */ PetscCall(MatAssemblyEnd(T, MAT_FINAL_ASSEMBLY)); + PetscCall(MatViewFromOptions(T, (PetscObject)pc, "-pc_bddc_nedelec_change_view")); if (pcbddc->nedcG) { PetscCall(MatAssemblyEnd(pcbddc->nedcG, MAT_FINAL_ASSEMBLY)); -#if 0 - PetscCall(PetscObjectSetName((PetscObject)pcbddc->nedcG,"coarse_G")); - PetscCall(MatView(pcbddc->nedcG,NULL)); -#endif + PetscCall(MatViewFromOptions(pcbddc->nedcG, (PetscObject)pc, "-pc_bddc_nedelec_coarse_change_view")); } + PetscCall(ISDestroy(&elements_corners)); + /* set change of basis */ - PetscCall(PCBDDCSetChangeOfBasisMat(pc, T, singular)); + PetscCall(PCBDDCSetChangeOfBasisMat(pc, T, PETSC_FALSE)); PetscCall(MatDestroy(&T)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1407,37 +1590,20 @@ PetscErrorCode PCBDDCNullSpaceCreate(MPI_Comm comm, PetscBool has_const, PetscIn PetscErrorCode PCBDDCComputeNoNetFlux(Mat A, Mat divudotp, PetscBool transpose, IS vl2l, PCBDDCGraph graph, MatNullSpace *nnsp) { Mat loc_divudotp; - Vec p, v, vins, quad_vec, *quad_vecs; + Vec p, v, quad_vec; ISLocalToGlobalMapping map; - PetscScalar *vals; - const PetscScalar *array; - PetscInt i, maxneighs = 0, maxsize, *gidxs; - PetscInt n_neigh, *neigh, *n_shared, **shared; - PetscMPIInt rank; + PetscScalar *array; PetscFunctionBegin; - PetscCall(ISLocalToGlobalMappingGetInfo(graph->l2gmap, &n_neigh, &neigh, &n_shared, &shared)); - for (i = 0; i < n_neigh; i++) maxneighs = PetscMax(graph->count[shared[i][0]] + 1, maxneighs); - PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &maxneighs, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)A))); - if (!maxneighs) { - PetscCall(ISLocalToGlobalMappingRestoreInfo(graph->l2gmap, &n_neigh, &neigh, &n_shared, &shared)); - *nnsp = NULL; - PetscFunctionReturn(PETSC_SUCCESS); - } - maxsize = 0; - for (i = 0; i < n_neigh; i++) maxsize = PetscMax(n_shared[i], maxsize); - PetscCall(PetscMalloc2(maxsize, &gidxs, maxsize, &vals)); - /* create vectors to hold quadrature weights */ PetscCall(MatCreateVecs(A, &quad_vec, NULL)); if (!transpose) { PetscCall(MatISGetLocalToGlobalMapping(A, &map, NULL)); } else { PetscCall(MatISGetLocalToGlobalMapping(A, NULL, &map)); } - PetscCall(VecDuplicateVecs(quad_vec, maxneighs, &quad_vecs)); - PetscCall(VecDestroy(&quad_vec)); - PetscCall(PCBDDCNullSpaceCreate(PetscObjectComm((PetscObject)A), PETSC_FALSE, maxneighs, quad_vecs, nnsp)); - for (i = 0; i < maxneighs; i++) PetscCall(VecLockReadPop(quad_vecs[i])); + PetscCall(PCBDDCNullSpaceCreate(PetscObjectComm((PetscObject)A), PETSC_FALSE, 1, &quad_vec, nnsp)); + PetscCall(VecLockReadPop(quad_vec)); + PetscCall(VecSetLocalToGlobalMapping(quad_vec, map)); /* compute local quad vec */ PetscCall(MatISGetLocalMat(divudotp, &loc_divudotp)); @@ -1446,15 +1612,18 @@ PetscErrorCode PCBDDCComputeNoNetFlux(Mat A, Mat divudotp, PetscBool transpose, } else { PetscCall(MatCreateVecs(loc_divudotp, &p, &v)); } + /* the assumption here is that the constant vector interpolates the constant on the L2 conforming space */ PetscCall(VecSet(p, 1.)); if (!transpose) { PetscCall(MatMultTranspose(loc_divudotp, p, v)); } else { PetscCall(MatMult(loc_divudotp, p, v)); } + PetscCall(VecDestroy(&p)); if (vl2l) { Mat lA; VecScatter sc; + Vec vins; PetscCall(MatISGetLocalMat(A, &lA)); PetscCall(MatCreateVecs(lA, &vins, NULL)); @@ -1462,41 +1631,42 @@ PetscErrorCode PCBDDCComputeNoNetFlux(Mat A, Mat divudotp, PetscBool transpose, PetscCall(VecScatterBegin(sc, v, vins, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(sc, v, vins, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterDestroy(&sc)); - } else { - vins = v; - } - PetscCall(VecGetArrayRead(vins, &array)); - PetscCall(VecDestroy(&p)); - - /* insert in global quadrature vecs */ - PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank)); - for (i = 1; i < n_neigh; i++) { - const PetscInt *idxs; - PetscInt idx, nn, j; - - idxs = shared[i]; - nn = n_shared[i]; - for (j = 0; j < nn; j++) vals[j] = array[idxs[j]]; - PetscCall(PetscFindInt(rank, graph->count[idxs[0]], graph->neighbours_set[idxs[0]], &idx)); - idx = -(idx + 1); - PetscCheck(idx >= 0 && idx < maxneighs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid index %" PetscInt_FMT " not in [0,%" PetscInt_FMT ")", idx, maxneighs); - PetscCall(ISLocalToGlobalMappingApply(map, nn, idxs, gidxs)); - PetscCall(VecSetValues(quad_vecs[idx], nn, gidxs, vals, INSERT_VALUES)); - } - PetscCall(ISLocalToGlobalMappingRestoreInfo(graph->l2gmap, &n_neigh, &neigh, &n_shared, &shared)); - PetscCall(VecRestoreArrayRead(vins, &array)); - if (vl2l) PetscCall(VecDestroy(&vins)); + PetscCall(VecDestroy(&v)); + v = vins; + } + + /* mask summation of interface values */ + PetscInt n, *mmask, *mask, *idxs, nmr, nr; + const PetscInt *degree; + PetscSF msf; + + PetscCall(VecGetLocalSize(v, &n)); + PetscCall(PetscSFGetGraph(graph->interface_subset_sf, &nr, NULL, NULL, NULL)); + PetscCall(PetscSFGetMultiSF(graph->interface_subset_sf, &msf)); + PetscCall(PetscSFGetGraph(msf, &nmr, NULL, NULL, NULL)); + PetscCall(PetscCalloc3(nmr, &mmask, n, &mask, n, &idxs)); + PetscCall(PetscSFComputeDegreeBegin(graph->interface_subset_sf, °ree)); + PetscCall(PetscSFComputeDegreeEnd(graph->interface_subset_sf, °ree)); + for (PetscInt i = 0, c = 0; i < nr; i++) { + mmask[c] = 1; + c += degree[i]; + } + PetscCall(PetscSFScatterBegin(graph->interface_subset_sf, MPIU_INT, mmask, mask)); + PetscCall(PetscSFScatterEnd(graph->interface_subset_sf, MPIU_INT, mmask, mask)); + PetscCall(VecGetArray(v, &array)); + for (PetscInt i = 0; i < n; i++) { + array[i] *= mask[i]; + idxs[i] = i; + } + PetscCall(VecSetValuesLocal(quad_vec, n, idxs, array, ADD_VALUES)); + PetscCall(VecRestoreArray(v, &array)); + PetscCall(PetscFree3(mmask, mask, idxs)); PetscCall(VecDestroy(&v)); - PetscCall(PetscFree2(gidxs, vals)); - - /* assemble near null space */ - for (i = 0; i < maxneighs; i++) PetscCall(VecAssemblyBegin(quad_vecs[i])); - for (i = 0; i < maxneighs; i++) { - PetscCall(VecAssemblyEnd(quad_vecs[i])); - PetscCall(VecViewFromOptions(quad_vecs[i], NULL, "-pc_bddc_quad_vecs_view")); - PetscCall(VecLockReadPush(quad_vecs[i])); - } - PetscCall(VecDestroyVecs(maxneighs, &quad_vecs)); + PetscCall(VecAssemblyBegin(quad_vec)); + PetscCall(VecAssemblyEnd(quad_vec)); + PetscCall(VecViewFromOptions(quad_vec, NULL, "-pc_bddc_quad_vec_view")); + PetscCall(VecLockReadPush(quad_vec)); + PetscCall(VecDestroy(&quad_vec)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1629,15 +1799,28 @@ PetscErrorCode PCBDDCComputeLocalTopologyInfo(PC pc) if (!pcbddc->user_primal_vertices_local && pcbddc->user_primal_vertices) PetscCall(PCBDDCGlobalToLocal(matis->rctx, global, local, pcbddc->user_primal_vertices, &pcbddc->user_primal_vertices_local)); PetscCall(VecDestroy(&global)); PetscCall(VecDestroy(&local)); - /* detect local disconnected subdomains if requested (use matis->A) */ - if (pcbddc->detect_disconnected) { + /* detect local disconnected subdomains if requested or needed */ + if (pcbddc->detect_disconnected || matis->allow_repeated) { IS primalv = NULL; - PetscInt i; + PetscInt nel; PetscBool filter = pcbddc->detect_disconnected_filter; - for (i = 0; i < pcbddc->n_local_subs; i++) PetscCall(ISDestroy(&pcbddc->local_subs[i])); + for (PetscInt i = 0; i < pcbddc->n_local_subs; i++) PetscCall(ISDestroy(&pcbddc->local_subs[i])); PetscCall(PetscFree(pcbddc->local_subs)); - PetscCall(PCBDDCDetectDisconnectedComponents(pc, filter, &pcbddc->n_local_subs, &pcbddc->local_subs, &primalv)); + PetscCall(MatGetVariableBlockSizes(matis->A, &nel, NULL)); + if (matis->allow_repeated && nel) { + const PetscInt *elsizes; + + pcbddc->n_local_subs = nel; + PetscCall(MatGetVariableBlockSizes(matis->A, NULL, &elsizes)); + PetscCall(PetscMalloc1(nel, &pcbddc->local_subs)); + for (PetscInt i = 0, c = 0; i < nel; i++) { + PetscCall(ISCreateStride(PETSC_COMM_SELF, elsizes[i], c, 1, &pcbddc->local_subs[i])); + c += elsizes[i]; + } + } else { + PetscCall(PCBDDCDetectDisconnectedComponents(pc, filter, &pcbddc->n_local_subs, &pcbddc->local_subs, &primalv)); + } PetscCall(PCBDDCAddPrimalVerticesLocalIS(pc, primalv)); PetscCall(ISDestroy(&primalv)); } @@ -2243,6 +2426,7 @@ PetscErrorCode PCBDDCDetectDisconnectedComponents(PC pc, PetscBool filter, Petsc } } /* compute local connected components using PCBDDCGraph */ + graph->seq_graph = PETSC_TRUE; /* analyze local connected components (i.e. disconnected subdomains) irrespective of dofs count */ PetscCall(ISCreateStride(PETSC_COMM_SELF, n, 0, 1, &is_dummy)); PetscCall(ISLocalToGlobalMappingCreateIS(is_dummy, &l2gmap_dummy)); PetscCall(ISDestroy(&is_dummy)); @@ -2592,20 +2776,16 @@ PetscErrorCode PCBDDCBenignDetectSaddlePoint(PC pc, PetscBool reuse, IS *zerodia if (pcbddc->NeumannBoundariesLocal) PetscCall(ISGetLocalSize(pcbddc->NeumannBoundariesLocal, &nneu)); checkb = (PetscBool)(!pcbddc->NeumannBoundariesLocal || pcbddc->current_level); if (checkb) { /* need to compute interior nodes */ - PetscInt n, i, j; - PetscInt n_neigh, *neigh, *n_shared, **shared; - PetscInt *iwork; + PetscInt n, i; + PetscInt *count; + ISLocalToGlobalMapping mapping; - PetscCall(ISLocalToGlobalMappingGetSize(matis->rmapping, &n)); - PetscCall(ISLocalToGlobalMappingGetInfo(matis->rmapping, &n_neigh, &neigh, &n_shared, &shared)); - PetscCall(PetscCalloc1(n, &iwork)); + PetscCall(MatISGetLocalToGlobalMapping(pc->pmat, &mapping, NULL)); + PetscCall(ISLocalToGlobalMappingGetNodeInfo(mapping, &n, &count, NULL)); PetscCall(PetscMalloc1(n, &interior_dofs)); - for (i = 1; i < n_neigh; i++) - for (j = 0; j < n_shared[i]; j++) iwork[shared[i][j]] += 1; for (i = 0; i < n; i++) - if (!iwork[i]) interior_dofs[n_interior_dofs++] = i; - PetscCall(PetscFree(iwork)); - PetscCall(ISLocalToGlobalMappingRestoreInfo(matis->rmapping, &n_neigh, &neigh, &n_shared, &shared)); + if (count[i] < 2) interior_dofs[n_interior_dofs++] = i; + PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(mapping, &n, &count, NULL)); } if (has_null_pressures) { IS *subs; @@ -3283,11 +3463,11 @@ PetscErrorCode PCBDDCAdaptiveSelection(PC pc) if (nmin >= subset_size) compute_range = PETSC_FALSE; if (pcbddc->dbg_flag) { - PetscInt nc = 0; + PetscInt nc = 0, c = pcbddc->mat_graph->nodes[idxs[0]].count, w = pcbddc->mat_graph->nodes[idxs[0]].which_dof; if (sub_schurs->change_primal_sub) PetscCall(ISGetLocalSize(sub_schurs->change_primal_sub[i], &nc)); - PetscCall(PetscViewerASCIISynchronizedPrintf(pcbddc->dbg_viewer, "Computing for sub %" PetscInt_FMT "/%" PetscInt_FMT " size %" PetscInt_FMT " count %" PetscInt_FMT " fid %" PetscInt_FMT " (range %d) (change %" PetscInt_FMT ").\n", i, - sub_schurs->n_subs, subset_size, pcbddc->mat_graph->count[idxs[0]] + 1, pcbddc->mat_graph->which_dof[idxs[0]], compute_range, nc)); + PetscCall( + PetscViewerASCIISynchronizedPrintf(pcbddc->dbg_viewer, "Computing for sub %" PetscInt_FMT "/%" PetscInt_FMT " size %" PetscInt_FMT " count %" PetscInt_FMT " fid %" PetscInt_FMT " (range %d) (change %" PetscInt_FMT ").\n", i, sub_schurs->n_subs, subset_size, c, w, compute_range, nc)); } PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF)); @@ -3655,7 +3835,7 @@ PetscErrorCode PCBDDCAdaptiveSelection(PC pc) PetscErrorCode PCBDDCSetUpSolvers(PC pc) { - PetscScalar *coarse_submat_vals; + Mat coarse_submat; PetscFunctionBegin; /* Setup local scatters R_to_B and (optionally) R_to_D */ @@ -3670,13 +3850,11 @@ PetscErrorCode PCBDDCSetUpSolvers(PC pc) Setup local correction and local part of coarse basis. Gives back the dense local part of the coarse matrix in column major ordering */ - PetscCall(PCBDDCSetUpCorrection(pc, &coarse_submat_vals)); + PetscCall(PCBDDCSetUpCorrection(pc, &coarse_submat)); /* Compute total number of coarse nodes and setup coarse solver */ - PetscCall(PCBDDCSetUpCoarseSolver(pc, coarse_submat_vals)); - - /* free */ - PetscCall(PetscFree(coarse_submat_vals)); + PetscCall(PCBDDCSetUpCoarseSolver(pc, coarse_submat)); + PetscCall(MatDestroy(&coarse_submat)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -3731,11 +3909,6 @@ PetscErrorCode PCBDDCResetSolvers(PC pc) PetscFunctionBegin; PetscCall(VecDestroy(&pcbddc->coarse_vec)); - if (pcbddc->coarse_phi_B) { - PetscScalar *array; - PetscCall(MatDenseGetArray(pcbddc->coarse_phi_B, &array)); - PetscCall(PetscFree(array)); - } PetscCall(MatDestroy(&pcbddc->coarse_phi_B)); PetscCall(MatDestroy(&pcbddc->coarse_phi_D)); PetscCall(MatDestroy(&pcbddc->coarse_psi_B)); @@ -3816,29 +3989,228 @@ PetscErrorCode PCBDDCSetUpLocalWorkVectors(PC pc) PetscFunctionReturn(PETSC_SUCCESS); } -PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) +static PetscErrorCode MatSetValuesSubMat(Mat A, Mat S, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], InsertMode imode) +{ + PetscBool flg; + const PetscScalar *a; + + PetscFunctionBegin; + PetscCall(PetscObjectBaseTypeCompare((PetscObject)S, MATSEQDENSE, &flg)); + if (flg) { + PetscCall(MatDenseGetArrayRead(S, &a)); + PetscCall(MatSetOption(A, MAT_ROW_ORIENTED, PETSC_FALSE)); + PetscCall(MatSetValues(A, nr, rows, nc, cols, a, imode)); + PetscCall(MatSetOption(A, MAT_ROW_ORIENTED, PETSC_TRUE)); + PetscCall(MatDenseRestoreArrayRead(S, &a)); + } else { + const PetscInt *ii, *jj; + PetscInt n; + PetscInt buf[8192], *bufc = NULL; + PetscBool freeb = PETSC_FALSE; + Mat Sm = S; + + PetscCall(PetscObjectBaseTypeCompare((PetscObject)S, MATSEQAIJ, &flg)); + if (!flg) PetscCall(MatConvert(S, MATSEQAIJ, MAT_INITIAL_MATRIX, &Sm)); + else PetscCall(PetscObjectReference((PetscObject)S)); + PetscCall(MatSeqAIJGetArrayRead(Sm, &a)); + PetscCall(MatGetRowIJ(Sm, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &flg)); + PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cannot get IJ structure"); + if (nc <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) { + bufc = buf; + } else { + PetscCall(PetscMalloc1(nc, &bufc)); + freeb = PETSC_TRUE; + } + + for (PetscInt i = 0; i < n; i++) { + const PetscInt nci = ii[i + 1] - ii[i]; + + for (PetscInt j = 0; j < nci; j++) bufc[j] = cols[jj[ii[i] + j]]; + PetscCall(MatSetValues(A, 1, rows + i, nci, bufc, a + ii[i], imode)); + } + PetscCall(MatRestoreRowIJ(Sm, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &flg)); + PetscCall(MatSeqAIJRestoreArrayRead(Sm, &a)); + PetscCall(MatDestroy(&Sm)); + if (freeb) PetscCall(PetscFree(bufc)); + } + PetscCall(MatAssemblyBegin(A, MAT_FLUSH_ASSEMBLY)); + PetscCall(MatAssemblyEnd(A, MAT_FLUSH_ASSEMBLY)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatCreateSeqAIJFromDenseExpand(Mat D, PetscInt n, const PetscInt j[], Mat *mat) +{ + Mat_SeqAIJ *aij; + PetscInt *ii, *jj; + PetscScalar *aa; + PetscInt nnz = 0, m, nc; + const PetscScalar *a; + const PetscScalar zero = 0.0; + + PetscFunctionBegin; + PetscCall(MatGetLocalSize(D, &m, &nc)); + PetscCall(MatDenseGetArrayRead(D, &a)); + PetscCall(PetscMalloc1(m + 1, &ii)); + PetscCall(PetscMalloc1(m * nc, &jj)); + PetscCall(PetscMalloc1(m * nc, &aa)); + ii[0] = 0; + for (PetscInt k = 0; k < m; k++) { + for (PetscInt s = 0; s < nc; s++) { + const PetscInt c = s + k * nc; + const PetscScalar v = a[k + s * m]; + + if (PetscUnlikely(j[c] < 0 || v == zero)) continue; + jj[nnz] = j[c]; + aa[nnz] = a[k + s * m]; + nnz++; + } + ii[k + 1] = nnz; + } + + PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)D), m, n, ii, jj, aa, mat)); + PetscCall(MatDenseRestoreArrayRead(D, &a)); + + aij = (Mat_SeqAIJ *)(*mat)->data; + aij->free_a = PETSC_TRUE; + aij->free_ij = PETSC_TRUE; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* adapted from MatInvertVariableBlockDiagonal_SeqAIJ */ +static PetscErrorCode MatSeqAIJInvertVariableBlockDiagonalMat(Mat A, PetscInt nblocks, const PetscInt *bsizes, Mat *B) +{ + PetscInt n = A->rmap->n, ncnt = 0, ncnt2 = 0, bsizemax = 0, *v_pivots = NULL; + const PetscBool allowzeropivot = PETSC_FALSE; + PetscBool zeropivotdetected = PETSC_FALSE; + const PetscReal shift = 0.0; + PetscInt ipvt[5], *ii, *jj, *indi, *indj; + PetscScalar work[25], *v_work = NULL, *aa, *diag; + PetscLogDouble flops = 0.0; + + PetscFunctionBegin; + PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Not for rectangular matrices"); + for (PetscInt i = 0; i < nblocks; i++) { + ncnt += bsizes[i]; + ncnt2 += PetscSqr(bsizes[i]); + } + PetscCheck(ncnt == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Total blocksizes %" PetscInt_FMT " doesn't match number matrix rows %" PetscInt_FMT, ncnt, n); + for (PetscInt i = 0; i < nblocks; i++) bsizemax = PetscMax(bsizemax, bsizes[i]); + if (bsizemax > 7) PetscCall(PetscMalloc2(bsizemax, &v_work, bsizemax, &v_pivots)); + + PetscCall(PetscMalloc1(n + 1, &ii)); + PetscCall(PetscMalloc1(ncnt2, &jj)); + PetscCall(PetscCalloc1(ncnt2, &aa)); + + ncnt = 0; + ii[0] = 0; + indi = ii; + indj = jj; + diag = aa; + for (PetscInt i = 0; i < nblocks; i++) { + const PetscInt bs = bsizes[i]; + + for (PetscInt k = 0; k < bs; k++) { + indi[k + 1] = indi[k] + bs; + for (PetscInt j = 0; j < bs; j++) indj[k * bs + j] = ncnt + j; + } + PetscCall(MatGetValues(A, bs, indj, bs, indj, diag)); + switch (bs) { + case 1: + *diag = 1.0 / (*diag); + break; + case 2: + PetscCall(PetscKernel_A_gets_inverse_A_2(diag, shift, allowzeropivot, &zeropivotdetected)); + break; + case 3: + PetscCall(PetscKernel_A_gets_inverse_A_3(diag, shift, allowzeropivot, &zeropivotdetected)); + break; + case 4: + PetscCall(PetscKernel_A_gets_inverse_A_4(diag, shift, allowzeropivot, &zeropivotdetected)); + break; + case 5: + PetscCall(PetscKernel_A_gets_inverse_A_5(diag, ipvt, work, shift, allowzeropivot, &zeropivotdetected)); + break; + case 6: + PetscCall(PetscKernel_A_gets_inverse_A_6(diag, shift, allowzeropivot, &zeropivotdetected)); + break; + case 7: + PetscCall(PetscKernel_A_gets_inverse_A_7(diag, shift, allowzeropivot, &zeropivotdetected)); + break; + default: + PetscCall(PetscKernel_A_gets_inverse_A(bs, diag, v_pivots, v_work, allowzeropivot, &zeropivotdetected)); + } + ncnt += bs; + flops += 2.0 * PetscPowInt(bs, 3) / 3.0; + diag += bs * bs; + indj += bs * bs; + indi += bs; + } + PetscCall(PetscLogFlops(flops)); + PetscCall(PetscFree2(v_work, v_pivots)); + PetscCall(MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A), n, n, ii, jj, aa, B)); + { + Mat_SeqAIJ *aij = (Mat_SeqAIJ *)(*B)->data; + aij->free_a = PETSC_TRUE; + aij->free_ij = PETSC_TRUE; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatDenseScatter(Mat A, PetscSF sf, Mat B) +{ + const PetscScalar *rarr; + PetscScalar *larr; + PetscSF vsf; + PetscInt n, rld, lld; + + PetscFunctionBegin; + PetscCall(MatGetSize(A, NULL, &n)); + PetscCall(MatDenseGetLDA(A, &rld)); + PetscCall(MatDenseGetLDA(B, &lld)); + PetscCall(MatDenseGetArrayRead(A, &rarr)); + PetscCall(MatDenseGetArrayWrite(B, &larr)); + PetscCall(PetscSFCreateStridedSF(sf, n, rld, lld, &vsf)); + PetscCall(PetscSFBcastBegin(vsf, MPIU_SCALAR, rarr, larr, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(vsf, MPIU_SCALAR, rarr, larr, MPI_REPLACE)); + PetscCall(MatDenseRestoreArrayRead(A, &rarr)); + PetscCall(MatDenseRestoreArrayWrite(B, &larr)); + PetscCall(PetscSFDestroy(&vsf)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode PCBDDCSetUpCorrection(PC pc, Mat *coarse_submat) { - /* pointers to pcis and pcbddc */ PC_IS *pcis = (PC_IS *)pc->data; PC_BDDC *pcbddc = (PC_BDDC *)pc->data; + PCBDDCGraph graph = pcbddc->mat_graph; PCBDDCSubSchurs sub_schurs = pcbddc->sub_schurs; /* submatrices of local problem */ - Mat A_RV, A_VR, A_VV, local_auxmat2_R; + Mat A_RV = NULL, A_VR, A_VV, local_auxmat2_R = NULL; /* submatrices of local coarse problem */ - Mat S_VV, S_CV, S_VC, S_CC; + Mat S_CV = NULL, S_VC = NULL, S_CC = NULL; /* working matrices */ Mat C_CR; + /* additional working stuff */ - PC pc_R; - Mat F, Brhs = NULL; - Vec dummy_vec; - PetscBool isLU, isCHOL, need_benign_correction, sparserhs; - PetscScalar *coarse_submat_vals; /* TODO: use a PETSc matrix */ - PetscScalar *work; - PetscInt *idx_V_B; - PetscInt lda_rhs, n, n_vertices, n_constraints, *p0_lidx_I; - PetscInt i, n_R, n_D, n_B; - PetscScalar one = 1.0, m_one = -1.0; + PC pc_R; + IS is_R, is_V, is_C; + const PetscInt *idx_V, *idx_C; + Mat F, Brhs = NULL; + Vec dummy_vec; + PetscBool isLU, isCHOL, need_benign_correction, sparserhs; + PetscInt *idx_V_B; + PetscInt lda_rhs, n_vertices, n_constraints, *p0_lidx_I; + PetscInt n_eff_vertices, n_eff_constraints; + PetscInt i, n_R, n_D, n_B; + PetscScalar one = 1.0, m_one = -1.0; + + /* Multi-element support */ + PetscBool multi_element = graph->multi_element; + PetscInt *V_to_eff_V = NULL, *C_to_eff_C = NULL; + PetscInt *B_eff_V_J = NULL, *R_eff_V_J = NULL, *B_eff_C_J = NULL, *R_eff_C_J = NULL; + IS is_C_perm = NULL; + PetscInt n_C_bss = 0, *C_bss = NULL; + Mat coarse_phi_multi; PetscFunctionBegin; PetscCheck(pcbddc->symmetric_primal || !pcbddc->benign_n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-symmetric primal basis computation with benign trick not yet implemented"); @@ -3856,16 +4228,136 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(ISGlobalToLocalMappingApply(pcis->BtoNmap, IS_GTOLM_DROP, n_vertices, pcbddc->local_primal_ref_node, &i, idx_V_B)); PetscCheck(i == n_vertices, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Error in boundary numbering for BDDC vertices! %" PetscInt_FMT " != %" PetscInt_FMT, n_vertices, i); + /* these two cases still need to be optimized */ + if (pcbddc->benign_saddle_point || !pcbddc->symmetric_primal) multi_element = PETSC_FALSE; + /* Subdomain contribution (Non-overlapping) to coarse matrix */ - PetscCall(PetscCalloc1(pcbddc->local_primal_size * pcbddc->local_primal_size, &coarse_submat_vals)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_vertices, n_vertices, coarse_submat_vals, &S_VV)); - PetscCall(MatDenseSetLDA(S_VV, pcbddc->local_primal_size)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_constraints, n_vertices, PetscSafePointerPlusOffset(coarse_submat_vals, n_vertices), &S_CV)); - PetscCall(MatDenseSetLDA(S_CV, pcbddc->local_primal_size)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_vertices, n_constraints, PetscSafePointerPlusOffset(coarse_submat_vals, pcbddc->local_primal_size * n_vertices), &S_VC)); - PetscCall(MatDenseSetLDA(S_VC, pcbddc->local_primal_size)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_constraints, n_constraints, PetscSafePointerPlusOffset(coarse_submat_vals, (pcbddc->local_primal_size + 1) * n_vertices), &S_CC)); - PetscCall(MatDenseSetLDA(S_CC, pcbddc->local_primal_size)); + if (multi_element) { + PetscCheck(!pcbddc->benign_n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not yet implemented"); + + PetscCall(MatCreate(PETSC_COMM_SELF, coarse_submat)); + PetscCall(MatSetSizes(*coarse_submat, pcbddc->local_primal_size, pcbddc->local_primal_size, pcbddc->local_primal_size, pcbddc->local_primal_size)); + PetscCall(MatSetType(*coarse_submat, MATSEQAIJ)); + PetscCall(MatSetOption(*coarse_submat, MAT_IGNORE_ZERO_ENTRIES, PETSC_TRUE)); + PetscCall(MatSetOption(*coarse_submat, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); + + /* group vertices and constraints by subdomain id */ + const PetscInt *vidxs = pcbddc->primal_indices_local_idxs; + const PetscInt *cidxs = pcbddc->primal_indices_local_idxs + n_vertices; + PetscInt *count_eff, *V_eff_to_V, *C_eff_to_C, *nnz; + PetscInt n_el = PetscMax(graph->n_local_subs, 1); + + PetscCall(PetscCalloc1(2 * n_el, &count_eff)); + PetscCall(PetscMalloc1(n_vertices, &V_to_eff_V)); + PetscCall(PetscMalloc1(n_constraints, &C_to_eff_C)); + for (PetscInt i = 0; i < n_vertices; i++) { + PetscInt s = 2 * graph->nodes[vidxs[i]].local_sub; + + V_to_eff_V[i] = count_eff[s]; + count_eff[s] += 1; + } + for (PetscInt i = 0; i < n_constraints; i++) { + PetscInt s = 2 * graph->nodes[cidxs[i]].local_sub + 1; + + C_to_eff_C[i] = count_eff[s]; + count_eff[s] += 1; + } + + /* preallocation */ + PetscCall(PetscMalloc1(n_vertices + n_constraints, &nnz)); + for (PetscInt i = 0; i < n_vertices; i++) { + PetscInt s = 2 * graph->nodes[vidxs[i]].local_sub; + + nnz[i] = count_eff[s] + count_eff[s + 1]; + } + for (PetscInt i = 0; i < n_constraints; i++) { + PetscInt s = 2 * graph->nodes[cidxs[i]].local_sub; + + nnz[i + n_vertices] = count_eff[s] + count_eff[s + 1]; + } + PetscCall(MatSeqAIJSetPreallocation(*coarse_submat, 0, nnz)); + PetscCall(PetscFree(nnz)); + + n_eff_vertices = 0; + n_eff_constraints = 0; + for (PetscInt i = 0; i < n_el; i++) { + n_eff_vertices = PetscMax(n_eff_vertices, count_eff[2 * i]); + n_eff_constraints = PetscMax(n_eff_constraints, count_eff[2 * i + 1]); + count_eff[2 * i] = 0; + count_eff[2 * i + 1] = 0; + } + + const PetscInt *idx; + PetscCall(PetscMalloc2(n_el * n_eff_vertices, &V_eff_to_V, n_el * n_eff_constraints, &C_eff_to_C)); + + for (PetscInt i = 0; i < n_vertices; i++) { + const PetscInt e = graph->nodes[vidxs[i]].local_sub; + const PetscInt s = 2 * e; + + V_eff_to_V[e * n_eff_vertices + count_eff[s]] = i; + count_eff[s] += 1; + } + for (PetscInt i = 0; i < n_constraints; i++) { + const PetscInt e = graph->nodes[cidxs[i]].local_sub; + const PetscInt s = 2 * e + 1; + + C_eff_to_C[e * n_eff_constraints + count_eff[s]] = i; + count_eff[s] += 1; + } + + PetscCall(PetscMalloc1(n_R * n_eff_vertices, &R_eff_V_J)); + PetscCall(PetscMalloc1(n_R * n_eff_constraints, &R_eff_C_J)); + PetscCall(PetscMalloc1(n_B * n_eff_vertices, &B_eff_V_J)); + PetscCall(PetscMalloc1(n_B * n_eff_constraints, &B_eff_C_J)); + for (PetscInt i = 0; i < n_R * n_eff_vertices; i++) R_eff_V_J[i] = -1; + for (PetscInt i = 0; i < n_R * n_eff_constraints; i++) R_eff_C_J[i] = -1; + for (PetscInt i = 0; i < n_B * n_eff_vertices; i++) B_eff_V_J[i] = -1; + for (PetscInt i = 0; i < n_B * n_eff_constraints; i++) B_eff_C_J[i] = -1; + + PetscCall(ISGetIndices(pcbddc->is_R_local, &idx)); + for (PetscInt i = 0; i < n_R; i++) { + const PetscInt e = graph->nodes[idx[i]].local_sub; + const PetscInt s = 2 * e; + PetscInt j; + + for (j = 0; j < count_eff[s]; j++) R_eff_V_J[i * n_eff_vertices + j] = V_eff_to_V[e * n_eff_vertices + j]; + for (j = 0; j < count_eff[s + 1]; j++) R_eff_C_J[i * n_eff_constraints + j] = C_eff_to_C[e * n_eff_constraints + j]; + } + PetscCall(ISRestoreIndices(pcbddc->is_R_local, &idx)); + PetscCall(ISGetIndices(pcis->is_B_local, &idx)); + for (PetscInt i = 0; i < n_B; i++) { + const PetscInt e = graph->nodes[idx[i]].local_sub; + const PetscInt s = 2 * e; + PetscInt j; + + for (j = 0; j < count_eff[s]; j++) B_eff_V_J[i * n_eff_vertices + j] = V_eff_to_V[e * n_eff_vertices + j]; + for (j = 0; j < count_eff[s + 1]; j++) B_eff_C_J[i * n_eff_constraints + j] = C_eff_to_C[e * n_eff_constraints + j]; + } + PetscCall(ISRestoreIndices(pcis->is_B_local, &idx)); + + /* permutation and blocksizes for block invert of S_CC */ + PetscInt *idxp; + + PetscCall(PetscMalloc1(n_constraints, &idxp)); + PetscCall(PetscMalloc1(n_el, &C_bss)); + n_C_bss = 0; + for (PetscInt e = 0, cnt = 0; e < n_el; e++) { + const PetscInt nc = count_eff[2 * e + 1]; + + if (nc) C_bss[n_C_bss++] = nc; + for (PetscInt c = 0; c < nc; c++) { idxp[cnt + c] = C_eff_to_C[e * n_eff_constraints + c]; } + cnt += nc; + } + + PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n_constraints, idxp, PETSC_OWN_POINTER, &is_C_perm)); + + PetscCall(PetscFree2(V_eff_to_V, C_eff_to_C)); + PetscCall(PetscFree(count_eff)); + } else { + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, pcbddc->local_primal_size, pcbddc->local_primal_size, NULL, coarse_submat)); + n_eff_constraints = n_constraints; + n_eff_vertices = n_vertices; + } /* determine if can use MatSolve routines instead of calling KSPSolve on ksp_R */ PetscCall(KSPGetPC(pcbddc->ksp_R, &pc_R)); @@ -3890,23 +4382,13 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) /* determine if we can use a sparse right-hand side */ sparserhs = PETSC_FALSE; - if (F) { + if (F && !multi_element) { MatSolverType solver; PetscCall(MatFactorGetSolverType(F, &solver)); PetscCall(PetscStrcmp(solver, MATSOLVERMUMPS, &sparserhs)); } - /* allocate workspace */ - n = 0; - if (n_constraints) n += lda_rhs * n_constraints; - if (n_vertices) { - n = PetscMax(2 * lda_rhs * n_vertices, n); - n = PetscMax((lda_rhs + n_B) * n_vertices, n); - } - if (!pcbddc->symmetric_primal) n = PetscMax(2 * lda_rhs * pcbddc->local_primal_size, n); - PetscCall(PetscMalloc1(n, &work)); - /* create dummy vector to modify rhs and sol of MatMatSolve (work array will never be used) */ dummy_vec = NULL; if (need_benign_correction && lda_rhs != n_R && F) { @@ -3918,30 +4400,37 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatDestroy(&pcbddc->local_auxmat1)); PetscCall(MatDestroy(&pcbddc->local_auxmat2)); + PetscCall(ISCreateStride(PETSC_COMM_SELF, n_R, 0, 1, &is_R)); + PetscCall(ISCreateStride(PETSC_COMM_SELF, n_vertices, 0, 1, &is_V)); + PetscCall(ISCreateStride(PETSC_COMM_SELF, n_constraints, n_vertices, 1, &is_C)); + PetscCall(ISGetIndices(is_V, &idx_V)); + PetscCall(ISGetIndices(is_C, &idx_C)); + /* Precompute stuffs needed for preprocessing and application of BDDC*/ if (n_constraints) { - Mat M3, C_B; - IS is_aux; + Mat C_B; /* Extract constraints on R nodes: C_{CR} */ - PetscCall(ISCreateStride(PETSC_COMM_SELF, n_constraints, n_vertices, 1, &is_aux)); - PetscCall(MatCreateSubMatrix(pcbddc->ConstraintMatrix, is_aux, pcbddc->is_R_local, MAT_INITIAL_MATRIX, &C_CR)); - PetscCall(MatCreateSubMatrix(pcbddc->ConstraintMatrix, is_aux, pcis->is_B_local, MAT_INITIAL_MATRIX, &C_B)); + PetscCall(MatCreateSubMatrix(pcbddc->ConstraintMatrix, is_C, pcbddc->is_R_local, MAT_INITIAL_MATRIX, &C_CR)); + PetscCall(MatCreateSubMatrix(pcbddc->ConstraintMatrix, is_C, pcis->is_B_local, MAT_INITIAL_MATRIX, &C_B)); /* Assemble local_auxmat2_R = (- A_{RR}^{-1} C^T_{CR}) needed by BDDC setup */ /* Assemble pcbddc->local_auxmat2 = R_to_B (- A_{RR}^{-1} C^T_{CR}) needed by BDDC application */ if (!sparserhs) { - PetscCall(PetscArrayzero(work, lda_rhs * n_constraints)); + PetscScalar *marr; + + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_eff_constraints, NULL, &Brhs)); + PetscCall(MatDenseGetArrayWrite(Brhs, &marr)); for (i = 0; i < n_constraints; i++) { const PetscScalar *row_cmat_values; const PetscInt *row_cmat_indices; - PetscInt size_of_constraint, j; + PetscInt size_of_constraint, j, col = C_to_eff_C ? C_to_eff_C[i] : i; PetscCall(MatGetRow(C_CR, i, &size_of_constraint, &row_cmat_indices, &row_cmat_values)); - for (j = 0; j < size_of_constraint; j++) work[row_cmat_indices[j] + i * lda_rhs] = -row_cmat_values[j]; + for (j = 0; j < size_of_constraint; j++) marr[row_cmat_indices[j] + col * lda_rhs] = -row_cmat_values[j]; PetscCall(MatRestoreRow(C_CR, i, &size_of_constraint, &row_cmat_indices, &row_cmat_values)); } - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_constraints, work, &Brhs)); + PetscCall(MatDenseRestoreArrayWrite(Brhs, &marr)); } else { Mat tC_CR; @@ -3964,7 +4453,7 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatCreateTranspose(tC_CR, &Brhs)); PetscCall(MatDestroy(&tC_CR)); } - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_constraints, NULL, &local_auxmat2_R)); + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_eff_constraints, NULL, &local_auxmat2_R)); if (F) { if (need_benign_correction) { PCBDDCReuseSolvers reuse_solver = sub_schurs->reuse_solver; @@ -3977,15 +4466,16 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscScalar *marr; PCBDDCReuseSolvers reuse_solver = sub_schurs->reuse_solver; + /* XXX multi_element? */ PetscCall(MatDenseGetArray(local_auxmat2_R, &marr)); if (lda_rhs != n_R) { - for (i = 0; i < n_constraints; i++) { + for (i = 0; i < n_eff_constraints; i++) { PetscCall(VecPlaceArray(dummy_vec, marr + i * lda_rhs)); PetscCall(PCBDDCReuseSolversBenignAdapt(reuse_solver, dummy_vec, NULL, PETSC_TRUE, PETSC_TRUE)); PetscCall(VecResetArray(dummy_vec)); } } else { - for (i = 0; i < n_constraints; i++) { + for (i = 0; i < n_eff_constraints; i++) { PetscCall(VecPlaceArray(pcbddc->vec1_R, marr + i * lda_rhs)); PetscCall(PCBDDCReuseSolversBenignAdapt(reuse_solver, pcbddc->vec1_R, NULL, PETSC_TRUE, PETSC_TRUE)); PetscCall(VecResetArray(pcbddc->vec1_R)); @@ -3994,24 +4484,28 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatDenseRestoreArray(local_auxmat2_R, &marr)); } } else { - PetscScalar *marr; + const PetscScalar *barr; + PetscScalar *marr; + PetscCall(MatDenseGetArrayRead(Brhs, &barr)); PetscCall(MatDenseGetArray(local_auxmat2_R, &marr)); - for (i = 0; i < n_constraints; i++) { - PetscCall(VecPlaceArray(pcbddc->vec1_R, work + i * lda_rhs)); + for (i = 0; i < n_eff_constraints; i++) { + PetscCall(VecPlaceArray(pcbddc->vec1_R, barr + i * lda_rhs)); PetscCall(VecPlaceArray(pcbddc->vec2_R, marr + i * lda_rhs)); PetscCall(KSPSolve(pcbddc->ksp_R, pcbddc->vec1_R, pcbddc->vec2_R)); PetscCall(KSPCheckSolve(pcbddc->ksp_R, pc, pcbddc->vec2_R)); PetscCall(VecResetArray(pcbddc->vec1_R)); PetscCall(VecResetArray(pcbddc->vec2_R)); } + PetscCall(MatDenseRestoreArrayRead(Brhs, &barr)); PetscCall(MatDenseRestoreArray(local_auxmat2_R, &marr)); } if (sparserhs) PetscCall(MatScale(C_CR, -1.0)); PetscCall(MatDestroy(&Brhs)); + /* Assemble explicitly S_CC = ( C_{CR} A_{RR}^{-1} C^T_{CR})^{-1} */ if (!pcbddc->switch_static) { - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_B, n_constraints, NULL, &pcbddc->local_auxmat2)); - for (i = 0; i < n_constraints; i++) { + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_B, n_eff_constraints, NULL, &pcbddc->local_auxmat2)); + for (i = 0; i < n_eff_constraints; i++) { Vec r, b; PetscCall(MatDenseGetColumnVecRead(local_auxmat2_R, i, &r)); PetscCall(MatDenseGetColumnVec(pcbddc->local_auxmat2, i, &b)); @@ -4020,34 +4514,60 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatDenseRestoreColumnVec(pcbddc->local_auxmat2, i, &b)); PetscCall(MatDenseRestoreColumnVecRead(local_auxmat2_R, i, &r)); } - PetscCall(MatMatMult(C_B, pcbddc->local_auxmat2, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &M3)); + if (multi_element) { + Mat T; + + PetscCall(MatCreateSeqAIJFromDenseExpand(local_auxmat2_R, n_constraints, R_eff_C_J, &T)); + PetscCall(MatDestroy(&local_auxmat2_R)); + local_auxmat2_R = T; + PetscCall(MatCreateSeqAIJFromDenseExpand(pcbddc->local_auxmat2, n_constraints, B_eff_C_J, &T)); + PetscCall(MatDestroy(&pcbddc->local_auxmat2)); + pcbddc->local_auxmat2 = T; + } + PetscCall(MatMatMult(C_B, pcbddc->local_auxmat2, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &S_CC)); } else { - if (lda_rhs != n_R) { - IS dummy; + if (multi_element) { + Mat T; - PetscCall(ISCreateStride(PETSC_COMM_SELF, n_R, 0, 1, &dummy)); - PetscCall(MatCreateSubMatrix(local_auxmat2_R, dummy, NULL, MAT_INITIAL_MATRIX, &pcbddc->local_auxmat2)); - PetscCall(ISDestroy(&dummy)); + PetscCall(MatCreateSeqAIJFromDenseExpand(local_auxmat2_R, n_constraints, R_eff_C_J, &T)); + PetscCall(MatDestroy(&local_auxmat2_R)); + local_auxmat2_R = T; + } + if (lda_rhs != n_R) { + PetscCall(MatCreateSubMatrix(local_auxmat2_R, is_R, NULL, MAT_INITIAL_MATRIX, &pcbddc->local_auxmat2)); } else { PetscCall(PetscObjectReference((PetscObject)local_auxmat2_R)); pcbddc->local_auxmat2 = local_auxmat2_R; } - PetscCall(MatMatMult(C_CR, pcbddc->local_auxmat2, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &M3)); + PetscCall(MatMatMult(C_CR, pcbddc->local_auxmat2, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &S_CC)); } - PetscCall(ISDestroy(&is_aux)); - /* Assemble explicitly S_CC = ( C_{CR} A_{RR}^{-1} C^T_{CR})^{-1} */ - PetscCall(MatScale(M3, m_one)); - if (isCHOL) { - PetscCall(MatCholeskyFactor(M3, NULL, NULL)); + PetscCall(MatScale(S_CC, m_one)); + if (multi_element) { + Mat T, T2; + IS isp, ispi; + + isp = is_C_perm; + + PetscCall(ISInvertPermutation(isp, PETSC_DECIDE, &ispi)); + PetscCall(MatPermute(S_CC, isp, isp, &T)); + PetscCall(MatSeqAIJInvertVariableBlockDiagonalMat(T, n_C_bss, C_bss, &T2)); + PetscCall(MatDestroy(&T)); + PetscCall(MatDestroy(&S_CC)); + PetscCall(MatPermute(T2, ispi, ispi, &S_CC)); + PetscCall(MatDestroy(&T2)); + PetscCall(ISDestroy(&ispi)); } else { - PetscCall(MatLUFactor(M3, NULL, NULL, NULL)); + if (isCHOL) { + PetscCall(MatCholeskyFactor(S_CC, NULL, NULL)); + } else { + PetscCall(MatLUFactor(S_CC, NULL, NULL, NULL)); + } + PetscCall(MatSeqDenseInvertFactors_Private(S_CC)); } - PetscCall(MatSeqDenseInvertFactors_Private(M3)); /* Assemble local_auxmat1 = S_CC*C_{CB} needed by BDDC application in KSP and in preproc */ - PetscCall(MatMatMult(M3, C_B, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &pcbddc->local_auxmat1)); + PetscCall(MatMatMult(S_CC, C_B, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &pcbddc->local_auxmat1)); PetscCall(MatDestroy(&C_B)); - PetscCall(MatCopy(M3, S_CC, SAME_NONZERO_PATTERN)); /* S_CC can have a different LDA, MatMatSolve doesn't support it */ - PetscCall(MatDestroy(&M3)); + PetscCall(MatSetValuesSubMat(*coarse_submat, S_CC, n_constraints, idx_C, n_constraints, idx_C, INSERT_VALUES)); } /* Get submatrices from subdomain matrix */ @@ -4055,8 +4575,7 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) PetscBool oldpin; #endif - PetscBool isaij; - IS is_aux; + IS is_aux; if (sub_schurs && sub_schurs->reuse_solver) { /* is_R_local is not sorted, ISComplement doesn't like it */ IS tis; @@ -4074,63 +4593,17 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatBindToCPU(pcbddc->local_mat, PETSC_TRUE)); PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcbddc->is_R_local, is_aux, MAT_INITIAL_MATRIX, &A_RV)); PetscCall(MatCreateSubMatrix(pcbddc->local_mat, is_aux, pcbddc->is_R_local, MAT_INITIAL_MATRIX, &A_VR)); - PetscCall(PetscObjectBaseTypeCompare((PetscObject)A_VR, MATSEQAIJ, &isaij)); - if (!isaij) { /* TODO REMOVE: MatMatMult(A_VR,A_RRmA_RV) below may raise an error */ - PetscCall(MatConvert(A_VR, MATSEQAIJ, MAT_INPLACE_MATRIX, &A_VR)); - } + /* TODO REMOVE: MatMatMult(A_VR,A_RRmA_RV) below may raise an error */ + PetscCall(MatConvert(A_VR, MATSEQAIJ, MAT_INPLACE_MATRIX, &A_VR)); PetscCall(MatCreateSubMatrix(pcbddc->local_mat, is_aux, is_aux, MAT_INITIAL_MATRIX, &A_VV)); #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) PetscCall(MatBindToCPU(pcbddc->local_mat, oldpin)); #endif PetscCall(ISDestroy(&is_aux)); } + PetscCall(ISDestroy(&is_C_perm)); + PetscCall(PetscFree(C_bss)); - /* Matrix of coarse basis functions (local) */ - if (pcbddc->coarse_phi_B) { - PetscInt on_B, on_primal, on_D = n_D; - if (pcbddc->coarse_phi_D) PetscCall(MatGetSize(pcbddc->coarse_phi_D, &on_D, NULL)); - PetscCall(MatGetSize(pcbddc->coarse_phi_B, &on_B, &on_primal)); - if (on_B != n_B || on_primal != pcbddc->local_primal_size || on_D != n_D) { - PetscScalar *marray; - - PetscCall(MatDenseGetArray(pcbddc->coarse_phi_B, &marray)); - PetscCall(PetscFree(marray)); - PetscCall(MatDestroy(&pcbddc->coarse_phi_B)); - PetscCall(MatDestroy(&pcbddc->coarse_psi_B)); - PetscCall(MatDestroy(&pcbddc->coarse_phi_D)); - PetscCall(MatDestroy(&pcbddc->coarse_psi_D)); - } - } - - if (!pcbddc->coarse_phi_B) { - PetscScalar *marr; - - /* memory size */ - n = n_B * pcbddc->local_primal_size; - if (pcbddc->switch_static || pcbddc->dbg_flag) n += n_D * pcbddc->local_primal_size; - if (!pcbddc->symmetric_primal) n *= 2; - PetscCall(PetscCalloc1(n, &marr)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_B, pcbddc->local_primal_size, marr, &pcbddc->coarse_phi_B)); - marr = PetscSafePointerPlusOffset(marr, n_B * pcbddc->local_primal_size); - if (pcbddc->switch_static || pcbddc->dbg_flag) { - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_D, pcbddc->local_primal_size, marr, &pcbddc->coarse_phi_D)); - marr += n_D * pcbddc->local_primal_size; - } - if (!pcbddc->symmetric_primal) { - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_B, pcbddc->local_primal_size, marr, &pcbddc->coarse_psi_B)); - marr += n_B * pcbddc->local_primal_size; - if (pcbddc->switch_static || pcbddc->dbg_flag) PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_D, pcbddc->local_primal_size, marr, &pcbddc->coarse_psi_D)); - } else { - PetscCall(PetscObjectReference((PetscObject)pcbddc->coarse_phi_B)); - pcbddc->coarse_psi_B = pcbddc->coarse_phi_B; - if (pcbddc->switch_static || pcbddc->dbg_flag) { - PetscCall(PetscObjectReference((PetscObject)pcbddc->coarse_phi_D)); - pcbddc->coarse_psi_D = pcbddc->coarse_phi_D; - } - } - } - - /* We are now ready to evaluate coarse basis functions and subdomain contribution to coarse problem */ p0_lidx_I = NULL; if (pcbddc->benign_n && (pcbddc->switch_static || pcbddc->dbg_flag)) { const PetscInt *idxs; @@ -4141,17 +4614,36 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(ISRestoreIndices(pcis->is_I_local, &idxs)); } + /* We are now ready to evaluate coarse basis functions and subdomain contribution to coarse problem */ + + /* Matrices of coarse basis functions (local) */ + PetscCall(MatDestroy(&pcbddc->coarse_phi_B)); + PetscCall(MatDestroy(&pcbddc->coarse_psi_B)); + PetscCall(MatDestroy(&pcbddc->coarse_phi_D)); + PetscCall(MatDestroy(&pcbddc->coarse_psi_D)); + if (!multi_element) { + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_B, pcbddc->local_primal_size, NULL, &pcbddc->coarse_phi_B)); + if (pcbddc->switch_static || pcbddc->dbg_flag) PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_D, pcbddc->local_primal_size, NULL, &pcbddc->coarse_phi_D)); + coarse_phi_multi = NULL; + } else { /* Create temporary NEST matrix to hold coarse basis functions blocks */ + IS is_rows[2] = {pcbddc->is_R_local, NULL}; + IS is_cols[2] = {is_V, is_C}; + + PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n_vertices, pcbddc->local_primal_ref_node, PETSC_USE_POINTER, &is_rows[1])); + PetscCall(MatCreateNest(PETSC_COMM_SELF, 2, is_rows, 2, is_cols, NULL, &coarse_phi_multi)); + PetscCall(ISDestroy(&is_rows[1])); + } + /* vertices */ if (n_vertices) { PetscBool restoreavr = PETSC_FALSE; + Mat A_RRmA_RV = NULL; - PetscCall(MatConvert(A_VV, MATDENSE, MAT_INPLACE_MATRIX, &A_VV)); + PetscCall(MatSetValuesSubMat(*coarse_submat, A_VV, n_vertices, idx_V, n_vertices, idx_V, ADD_VALUES)); + PetscCall(MatDestroy(&A_VV)); if (n_R) { - Mat A_RRmA_RV, A_RV_bcorr = NULL, S_VVt; /* S_VVt with LDA=N */ - PetscBLASInt B_N, B_one = 1; - const PetscScalar *x; - PetscScalar *y; + Mat A_RV_bcorr = NULL, S_VV; PetscCall(MatScale(A_RV, m_one)); if (need_benign_correction) { @@ -4169,33 +4661,36 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(ISDestroy(&is_p0)); } - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_vertices, work, &A_RRmA_RV)); + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_eff_vertices, NULL, &A_RRmA_RV)); if (!sparserhs || need_benign_correction) { - if (lda_rhs == n_R) { + if (lda_rhs == n_R && !multi_element) { PetscCall(MatConvert(A_RV, MATDENSE, MAT_INPLACE_MATRIX, &A_RV)); } else { + Mat T; PetscScalar *av, *array; const PetscInt *xadj, *adjncy; PetscInt n; PetscBool flg_row; - array = work + lda_rhs * n_vertices; - PetscCall(PetscArrayzero(array, lda_rhs * n_vertices)); + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_eff_vertices, NULL, &T)); + PetscCall(MatDenseGetArrayWrite(T, &array)); PetscCall(MatConvert(A_RV, MATSEQAIJ, MAT_INPLACE_MATRIX, &A_RV)); PetscCall(MatGetRowIJ(A_RV, 0, PETSC_FALSE, PETSC_FALSE, &n, &xadj, &adjncy, &flg_row)); PetscCall(MatSeqAIJGetArray(A_RV, &av)); for (i = 0; i < n; i++) { PetscInt j; - for (j = xadj[i]; j < xadj[i + 1]; j++) array[lda_rhs * adjncy[j] + i] = av[j]; + for (j = xadj[i]; j < xadj[i + 1]; j++) array[lda_rhs * (V_to_eff_V ? V_to_eff_V[adjncy[j]] : adjncy[j]) + i] = av[j]; } PetscCall(MatRestoreRowIJ(A_RV, 0, PETSC_FALSE, PETSC_FALSE, &n, &xadj, &adjncy, &flg_row)); + PetscCall(MatDenseRestoreArrayWrite(T, &array)); PetscCall(MatDestroy(&A_RV)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_vertices, array, &A_RV)); + A_RV = T; } if (need_benign_correction) { PCBDDCReuseSolvers reuse_solver = sub_schurs->reuse_solver; PetscScalar *marr; + /* XXX multi_element */ PetscCall(MatDenseGetArray(A_RV, &marr)); /* need \Phi^T A_RV = (I+L)A_RV, L given by @@ -4263,13 +4758,13 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatDenseGetArray(Brhs, &marr)); if (lda_rhs != n_R) { - for (i = 0; i < n_vertices; i++) { + for (i = 0; i < n_eff_vertices; i++) { PetscCall(VecPlaceArray(dummy_vec, marr + i * lda_rhs)); PetscCall(PCBDDCReuseSolversBenignAdapt(reuse_solver, dummy_vec, NULL, PETSC_FALSE, PETSC_TRUE)); PetscCall(VecResetArray(dummy_vec)); } } else { - for (i = 0; i < n_vertices; i++) { + for (i = 0; i < n_eff_vertices; i++) { PetscCall(VecPlaceArray(pcbddc->vec1_R, marr + i * lda_rhs)); PetscCall(PCBDDCReuseSolversBenignAdapt(reuse_solver, pcbddc->vec1_R, NULL, PETSC_FALSE, PETSC_TRUE)); PetscCall(VecResetArray(pcbddc->vec1_R)); @@ -4286,13 +4781,13 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatDenseGetArray(A_RRmA_RV, &marr)); if (lda_rhs != n_R) { - for (i = 0; i < n_vertices; i++) { + for (i = 0; i < n_eff_vertices; i++) { PetscCall(VecPlaceArray(dummy_vec, marr + i * lda_rhs)); PetscCall(PCBDDCReuseSolversBenignAdapt(reuse_solver, dummy_vec, NULL, PETSC_TRUE, PETSC_TRUE)); PetscCall(VecResetArray(dummy_vec)); } } else { - for (i = 0; i < n_vertices; i++) { + for (i = 0; i < n_eff_vertices; i++) { PetscCall(VecPlaceArray(pcbddc->vec1_R, marr + i * lda_rhs)); PetscCall(PCBDDCReuseSolversBenignAdapt(reuse_solver, pcbddc->vec1_R, NULL, PETSC_TRUE, PETSC_TRUE)); PetscCall(VecResetArray(pcbddc->vec1_R)); @@ -4301,16 +4796,21 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatDenseRestoreArray(A_RRmA_RV, &marr)); } } else { - PetscCall(MatDenseGetArray(Brhs, &y)); - for (i = 0; i < n_vertices; i++) { - PetscCall(VecPlaceArray(pcbddc->vec1_R, y + i * lda_rhs)); - PetscCall(VecPlaceArray(pcbddc->vec2_R, work + i * lda_rhs)); + const PetscScalar *barr; + PetscScalar *marr; + + PetscCall(MatDenseGetArrayRead(Brhs, &barr)); + PetscCall(MatDenseGetArray(A_RRmA_RV, &marr)); + for (i = 0; i < n_eff_vertices; i++) { + PetscCall(VecPlaceArray(pcbddc->vec1_R, barr + i * lda_rhs)); + PetscCall(VecPlaceArray(pcbddc->vec2_R, marr + i * lda_rhs)); PetscCall(KSPSolve(pcbddc->ksp_R, pcbddc->vec1_R, pcbddc->vec2_R)); PetscCall(KSPCheckSolve(pcbddc->ksp_R, pc, pcbddc->vec2_R)); PetscCall(VecResetArray(pcbddc->vec1_R)); PetscCall(VecResetArray(pcbddc->vec2_R)); } - PetscCall(MatDenseRestoreArray(Brhs, &y)); + PetscCall(MatDenseRestoreArrayRead(Brhs, &barr)); + PetscCall(MatDenseRestoreArray(A_RRmA_RV, &marr)); } PetscCall(MatDestroy(&A_RV)); PetscCall(MatDestroy(&Brhs)); @@ -4318,51 +4818,68 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) if (n_constraints) { Mat B; - PetscCall(PetscArrayzero(work + lda_rhs * n_vertices, n_B * n_vertices)); - for (i = 0; i < n_vertices; i++) { - PetscCall(VecPlaceArray(pcbddc->vec1_R, work + i * lda_rhs)); - PetscCall(VecPlaceArray(pcis->vec1_B, work + lda_rhs * n_vertices + i * n_B)); - PetscCall(VecScatterBegin(pcbddc->R_to_B, pcbddc->vec1_R, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecScatterEnd(pcbddc->R_to_B, pcbddc->vec1_R, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecResetArray(pcis->vec1_B)); - PetscCall(VecResetArray(pcbddc->vec1_R)); + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_B, n_eff_vertices, NULL, &B)); + PetscCall(MatDenseScatter(A_RRmA_RV, pcbddc->R_to_B, B)); + + /* S_CV = pcbddc->local_auxmat1 * B */ + if (multi_element) { + Mat T; + + PetscCall(MatCreateSeqAIJFromDenseExpand(B, n_vertices, B_eff_V_J, &T)); + PetscCall(MatDestroy(&B)); + B = T; } - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_B, n_vertices, work + lda_rhs * n_vertices, &B)); - /* Reuse dense S_C = pcbddc->local_auxmat1 * B */ - PetscCall(MatProductCreateWithMat(pcbddc->local_auxmat1, B, NULL, S_CV)); + PetscCall(MatProductCreate(pcbddc->local_auxmat1, B, NULL, &S_CV)); PetscCall(MatProductSetType(S_CV, MATPRODUCT_AB)); PetscCall(MatProductSetFromOptions(S_CV)); PetscCall(MatProductSymbolic(S_CV)); PetscCall(MatProductNumeric(S_CV)); PetscCall(MatProductClear(S_CV)); - PetscCall(MatDestroy(&B)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_vertices, work + lda_rhs * n_vertices, &B)); - /* Reuse B = local_auxmat2_R * S_CV */ - PetscCall(MatProductCreateWithMat(local_auxmat2_R, S_CV, NULL, B)); + + /* B = local_auxmat2_R * S_CV */ + PetscCall(MatProductCreate(local_auxmat2_R, S_CV, NULL, &B)); PetscCall(MatProductSetType(B, MATPRODUCT_AB)); PetscCall(MatProductSetFromOptions(B)); PetscCall(MatProductSymbolic(B)); PetscCall(MatProductNumeric(B)); PetscCall(MatScale(S_CV, m_one)); - PetscCall(PetscBLASIntCast(lda_rhs * n_vertices, &B_N)); - PetscCallBLAS("BLASaxpy", BLASaxpy_(&B_N, &one, work + lda_rhs * n_vertices, &B_one, work, &B_one)); + PetscCall(MatSetValuesSubMat(*coarse_submat, S_CV, n_constraints, idx_C, n_vertices, idx_V, INSERT_VALUES)); + + if (multi_element) { + Mat T; + + PetscCall(MatCreateSeqAIJFromDenseExpand(A_RRmA_RV, n_vertices, R_eff_V_J, &T)); + PetscCall(MatDestroy(&A_RRmA_RV)); + A_RRmA_RV = T; + } + PetscCall(MatAXPY(A_RRmA_RV, 1.0, B, UNKNOWN_NONZERO_PATTERN)); /* XXX ? */ PetscCall(MatDestroy(&B)); + } else if (multi_element) { + Mat T; + + PetscCall(MatCreateSeqAIJFromDenseExpand(A_RRmA_RV, n_vertices, R_eff_V_J, &T)); + PetscCall(MatDestroy(&A_RRmA_RV)); + A_RRmA_RV = T; } + if (lda_rhs != n_R) { + Mat T; + + PetscCall(MatCreateSubMatrix(A_RRmA_RV, is_R, NULL, MAT_INITIAL_MATRIX, &T)); PetscCall(MatDestroy(&A_RRmA_RV)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_R, n_vertices, work, &A_RRmA_RV)); - PetscCall(MatDenseSetLDA(A_RRmA_RV, lda_rhs)); + A_RRmA_RV = T; } - PetscCall(MatMatMult(A_VR, A_RRmA_RV, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &S_VVt)); + /* need A_VR * \Phi * A_RRmA_RV = A_VR * (I+L)^T * A_RRmA_RV, L given as before */ - if (need_benign_correction) { + if (need_benign_correction) { /* XXX SPARSE */ PCBDDCReuseSolvers reuse_solver = sub_schurs->reuse_solver; - PetscScalar *marr, *sums; + PetscScalar *sums; + const PetscScalar *marr; + PetscCall(MatDenseGetArrayRead(A_RRmA_RV, &marr)); PetscCall(PetscMalloc1(n_vertices, &sums)); - PetscCall(MatDenseGetArray(S_VVt, &marr)); for (i = 0; i < reuse_solver->benign_n; i++) { const PetscScalar *vals; const PetscInt *idxs, *idxs_zero; @@ -4371,133 +4888,111 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(ISGetLocalSize(reuse_solver->benign_zerodiag_subs[i], &nz)); PetscCall(ISGetIndices(reuse_solver->benign_zerodiag_subs[i], &idxs_zero)); for (j = 0; j < n_vertices; j++) { - PetscInt k; sums[j] = 0.; - for (k = 0; k < nz; k++) sums[j] += work[idxs_zero[k] + j * lda_rhs]; + for (PetscInt k = 0; k < nz; k++) sums[j] += marr[idxs_zero[k] + j * n_R]; } PetscCall(MatGetRow(A_RV_bcorr, i, &n, &idxs, &vals)); for (j = 0; j < n; j++) { PetscScalar val = vals[j]; - PetscInt k; - for (k = 0; k < n_vertices; k++) marr[idxs[j] + k * n_vertices] += val * sums[k]; + for (PetscInt k = 0; k < n_vertices; k++) PetscCall(MatSetValue(*coarse_submat, idx_V[idxs[j]], idx_V[k], val * sums[k], ADD_VALUES)); } PetscCall(MatRestoreRow(A_RV_bcorr, i, &n, &idxs, &vals)); PetscCall(ISRestoreIndices(reuse_solver->benign_zerodiag_subs[i], &idxs_zero)); } PetscCall(PetscFree(sums)); - PetscCall(MatDenseRestoreArray(S_VVt, &marr)); PetscCall(MatDestroy(&A_RV_bcorr)); + PetscCall(MatDenseRestoreArrayRead(A_RRmA_RV, &marr)); } - PetscCall(MatDestroy(&A_RRmA_RV)); - PetscCall(PetscBLASIntCast(n_vertices * n_vertices, &B_N)); - PetscCall(MatDenseGetArrayRead(A_VV, &x)); - PetscCall(MatDenseGetArray(S_VVt, &y)); - PetscCallBLAS("BLASaxpy", BLASaxpy_(&B_N, &one, x, &B_one, y, &B_one)); - PetscCall(MatDenseRestoreArrayRead(A_VV, &x)); - PetscCall(MatDenseRestoreArray(S_VVt, &y)); - PetscCall(MatCopy(S_VVt, S_VV, SAME_NONZERO_PATTERN)); - PetscCall(MatDestroy(&S_VVt)); - } else { - PetscCall(MatCopy(A_VV, S_VV, SAME_NONZERO_PATTERN)); + + PetscCall(MatMatMult(A_VR, A_RRmA_RV, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &S_VV)); + PetscCall(MatSetValuesSubMat(*coarse_submat, S_VV, n_vertices, idx_V, n_vertices, idx_V, ADD_VALUES)); + PetscCall(MatDestroy(&S_VV)); } - PetscCall(MatDestroy(&A_VV)); /* coarse basis functions */ - for (i = 0; i < n_vertices; i++) { - Vec v; - PetscScalar one = 1.0, zero = 0.0; - - PetscCall(VecPlaceArray(pcbddc->vec1_R, work + lda_rhs * i)); - PetscCall(MatDenseGetColumnVec(pcbddc->coarse_phi_B, i, &v)); - PetscCall(VecScatterBegin(pcbddc->R_to_B, pcbddc->vec1_R, v, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecScatterEnd(pcbddc->R_to_B, pcbddc->vec1_R, v, INSERT_VALUES, SCATTER_FORWARD)); - if (PetscDefined(USE_DEBUG)) { /* The following VecSetValues() expects a sequential matrix */ - PetscMPIInt rank; - PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pcbddc->coarse_phi_B), &rank)); - PetscCheck(rank <= 1, PetscObjectComm((PetscObject)pcbddc->coarse_phi_B), PETSC_ERR_PLIB, "Expected a sequential dense matrix"); - } - PetscCall(VecSetValues(v, 1, &idx_V_B[i], &one, INSERT_VALUES)); - PetscCall(VecAssemblyBegin(v)); /* If v is on device, hope VecSetValues() eventually implemented by a host to device memcopy */ - PetscCall(VecAssemblyEnd(v)); - PetscCall(MatDenseRestoreColumnVec(pcbddc->coarse_phi_B, i, &v)); - - if (pcbddc->switch_static || pcbddc->dbg_flag) { - PetscInt j; - - PetscCall(MatDenseGetColumnVec(pcbddc->coarse_phi_D, i, &v)); - PetscCall(VecScatterBegin(pcbddc->R_to_D, pcbddc->vec1_R, v, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecScatterEnd(pcbddc->R_to_D, pcbddc->vec1_R, v, INSERT_VALUES, SCATTER_FORWARD)); - if (PetscDefined(USE_DEBUG)) { /* The following VecSetValues() expects a sequential matrix */ - PetscMPIInt rank; - PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pcbddc->coarse_phi_D), &rank)); - PetscCheck(rank <= 1, PetscObjectComm((PetscObject)pcbddc->coarse_phi_D), PETSC_ERR_PLIB, "Expected a sequential dense matrix"); + if (coarse_phi_multi) { + Mat Vid; + + PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, n_vertices, n_vertices, 1, NULL, &Vid)); + PetscCall(MatShift_Basic(Vid, 1.0)); + PetscCall(MatNestSetSubMat(coarse_phi_multi, 0, 0, A_RRmA_RV)); + PetscCall(MatNestSetSubMat(coarse_phi_multi, 1, 0, Vid)); + PetscCall(MatDestroy(&Vid)); + } else { + if (A_RRmA_RV) { + PetscCall(MatDenseScatter(A_RRmA_RV, pcbddc->R_to_B, pcbddc->coarse_phi_B)); + if (pcbddc->switch_static || pcbddc->dbg_flag) { + PetscCall(MatDenseScatter(A_RRmA_RV, pcbddc->R_to_D, pcbddc->coarse_phi_D)); + if (pcbddc->benign_n) { + for (i = 0; i < n_vertices; i++) { PetscCall(MatSetValues(pcbddc->coarse_phi_D, pcbddc->benign_n, p0_lidx_I, 1, &i, NULL, INSERT_VALUES)); } + } } - for (j = 0; j < pcbddc->benign_n; j++) PetscCall(VecSetValues(v, 1, &p0_lidx_I[j], &zero, INSERT_VALUES)); - PetscCall(VecAssemblyBegin(v)); - PetscCall(VecAssemblyEnd(v)); - PetscCall(MatDenseRestoreColumnVec(pcbddc->coarse_phi_D, i, &v)); } - PetscCall(VecResetArray(pcbddc->vec1_R)); + for (i = 0; i < n_vertices; i++) PetscCall(MatSetValues(pcbddc->coarse_phi_B, 1, &idx_V_B[i], 1, &i, &one, INSERT_VALUES)); + PetscCall(MatAssemblyBegin(pcbddc->coarse_phi_B, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(pcbddc->coarse_phi_B, MAT_FINAL_ASSEMBLY)); } - /* if n_R == 0 the object is not destroyed */ - PetscCall(MatDestroy(&A_RV)); + PetscCall(MatDestroy(&A_RRmA_RV)); } + PetscCall(MatDestroy(&A_RV)); PetscCall(VecDestroy(&dummy_vec)); if (n_constraints) { - Mat B; + Mat B, B2; - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, lda_rhs, n_constraints, work, &B)); PetscCall(MatScale(S_CC, m_one)); - PetscCall(MatProductCreateWithMat(local_auxmat2_R, S_CC, NULL, B)); + PetscCall(MatProductCreate(local_auxmat2_R, S_CC, NULL, &B)); PetscCall(MatProductSetType(B, MATPRODUCT_AB)); PetscCall(MatProductSetFromOptions(B)); PetscCall(MatProductSymbolic(B)); PetscCall(MatProductNumeric(B)); - PetscCall(MatScale(S_CC, m_one)); if (n_vertices) { if (isCHOL || need_benign_correction) { /* if we can solve the interior problem with cholesky, we should also be fine with transposing here */ - PetscCall(MatTransposeSetPrecursor(S_CV, S_VC)); - PetscCall(MatTranspose(S_CV, MAT_REUSE_MATRIX, &S_VC)); + PetscCall(MatTranspose(S_CV, MAT_INITIAL_MATRIX, &S_VC)); } else { - Mat S_VCt; - if (lda_rhs != n_R) { + Mat tB; + + PetscCall(MatCreateSubMatrix(B, is_R, NULL, MAT_INITIAL_MATRIX, &tB)); PetscCall(MatDestroy(&B)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_R, n_constraints, work, &B)); - PetscCall(MatDenseSetLDA(B, lda_rhs)); + B = tB; } - PetscCall(MatMatMult(A_VR, B, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &S_VCt)); - PetscCall(MatCopy(S_VCt, S_VC, SAME_NONZERO_PATTERN)); - PetscCall(MatDestroy(&S_VCt)); + PetscCall(MatMatMult(A_VR, B, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &S_VC)); } + PetscCall(MatSetValuesSubMat(*coarse_submat, S_VC, n_vertices, idx_V, n_constraints, idx_C, INSERT_VALUES)); } - PetscCall(MatDestroy(&B)); - /* coarse basis functions */ - for (i = 0; i < n_constraints; i++) { - Vec v; - PetscCall(VecPlaceArray(pcbddc->vec1_R, work + lda_rhs * i)); - PetscCall(MatDenseGetColumnVec(pcbddc->coarse_phi_B, i + n_vertices, &v)); - PetscCall(VecScatterBegin(pcbddc->R_to_B, pcbddc->vec1_R, v, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecScatterEnd(pcbddc->R_to_B, pcbddc->vec1_R, v, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(MatDenseRestoreColumnVec(pcbddc->coarse_phi_B, i + n_vertices, &v)); + /* coarse basis functions */ + if (coarse_phi_multi) { + PetscCall(MatNestSetSubMat(coarse_phi_multi, 0, 1, B)); + } else { + PetscCall(MatDenseGetSubMatrix(pcbddc->coarse_phi_B, PETSC_DECIDE, PETSC_DECIDE, n_vertices, n_vertices + n_constraints, &B2)); + PetscCall(MatDenseScatter(B, pcbddc->R_to_B, B2)); + PetscCall(MatDenseRestoreSubMatrix(pcbddc->coarse_phi_B, &B2)); if (pcbddc->switch_static || pcbddc->dbg_flag) { - PetscInt j; - PetscScalar zero = 0.0; - PetscCall(MatDenseGetColumnVec(pcbddc->coarse_phi_D, i + n_vertices, &v)); - PetscCall(VecScatterBegin(pcbddc->R_to_D, pcbddc->vec1_R, v, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecScatterEnd(pcbddc->R_to_D, pcbddc->vec1_R, v, INSERT_VALUES, SCATTER_FORWARD)); - for (j = 0; j < pcbddc->benign_n; j++) PetscCall(VecSetValues(v, 1, &p0_lidx_I[j], &zero, INSERT_VALUES)); - PetscCall(VecAssemblyBegin(v)); - PetscCall(VecAssemblyEnd(v)); - PetscCall(MatDenseRestoreColumnVec(pcbddc->coarse_phi_D, i + n_vertices, &v)); + PetscCall(MatDenseGetSubMatrix(pcbddc->coarse_phi_D, PETSC_DECIDE, PETSC_DECIDE, n_vertices, n_vertices + n_constraints, &B2)); + PetscCall(MatDenseScatter(B, pcbddc->R_to_D, B2)); + if (pcbddc->benign_n) { + for (i = 0; i < n_constraints; i++) { PetscCall(MatSetValues(B2, pcbddc->benign_n, p0_lidx_I, 1, &i, NULL, INSERT_VALUES)); } + } + PetscCall(MatDenseRestoreSubMatrix(pcbddc->coarse_phi_D, &B2)); } - PetscCall(VecResetArray(pcbddc->vec1_R)); } + PetscCall(MatDestroy(&B)); + } + + /* assemble sparse coarse basis functions */ + if (coarse_phi_multi) { + Mat T; + + PetscCall(MatConvert(coarse_phi_multi, MATSEQAIJ, MAT_INITIAL_MATRIX, &T)); + PetscCall(MatDestroy(&coarse_phi_multi)); + PetscCall(MatCreateSubMatrix(T, pcis->is_B_local, NULL, MAT_INITIAL_MATRIX, &pcbddc->coarse_phi_B)); + if (pcbddc->switch_static || pcbddc->dbg_flag) { PetscCall(MatCreateSubMatrix(T, pcis->is_I_local, NULL, MAT_INITIAL_MATRIX, &pcbddc->coarse_phi_D)); } + PetscCall(MatDestroy(&T)); } - if (n_constraints) PetscCall(MatDestroy(&local_auxmat2_R)); + PetscCall(MatDestroy(&local_auxmat2_R)); PetscCall(PetscFree(p0_lidx_I)); /* coarse matrix entries relative to B_0 */ @@ -4516,8 +5011,8 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) for (j = 0; j < pcbddc->benign_n; j++) { PetscInt primal_idx = pcbddc->local_primal_size - pcbddc->benign_n + j; for (i = 0; i < pcbddc->local_primal_size; i++) { - coarse_submat_vals[primal_idx * pcbddc->local_primal_size + i] = data[i * pcbddc->benign_n + j]; - coarse_submat_vals[i * pcbddc->local_primal_size + primal_idx] = data[i * pcbddc->benign_n + j]; + PetscCall(MatSetValue(*coarse_submat, primal_idx, i, data[i * pcbddc->benign_n + j], INSERT_VALUES)); + PetscCall(MatSetValue(*coarse_submat, i, primal_idx, data[i * pcbddc->benign_n + j], INSERT_VALUES)); } } PetscCall(MatDenseRestoreArrayRead(B0_BPHI, &data)); @@ -4528,14 +5023,17 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) /* compute other basis functions for non-symmetric problems */ if (!pcbddc->symmetric_primal) { Mat B_V = NULL, B_C = NULL; - PetscScalar *marray; + PetscScalar *marray, *work; + /* TODO multi_element MatDenseScatter */ if (n_constraints) { Mat S_CCT, C_CRT; + PetscCall(MatScale(S_CC, m_one)); PetscCall(MatTranspose(C_CR, MAT_INITIAL_MATRIX, &C_CRT)); PetscCall(MatTranspose(S_CC, MAT_INITIAL_MATRIX, &S_CCT)); PetscCall(MatMatMult(C_CRT, S_CCT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &B_C)); + PetscCall(MatConvert(B_C, MATDENSE, MAT_INPLACE_MATRIX, &B_C)); PetscCall(MatDestroy(&S_CCT)); if (n_vertices) { Mat S_VCT; @@ -4543,6 +5041,7 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatTranspose(S_VC, MAT_INITIAL_MATRIX, &S_VCT)); PetscCall(MatMatMult(C_CRT, S_VCT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &B_V)); PetscCall(MatDestroy(&S_VCT)); + PetscCall(MatConvert(B_V, MATDENSE, MAT_INPLACE_MATRIX, &B_V)); } PetscCall(MatDestroy(&C_CRT)); } else { @@ -4569,6 +5068,7 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) } /* currently there's no support for MatTransposeMatSolve(F,B,X) */ + PetscCall(PetscMalloc1(n_R * pcbddc->local_primal_size, &work)); if (n_vertices) { PetscCall(MatDenseGetArray(B_V, &marray)); for (i = 0; i < n_vertices; i++) { @@ -4594,6 +5094,8 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatDenseRestoreArray(B_C, &marray)); } /* coarse basis functions */ + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_B, pcbddc->local_primal_size, NULL, &pcbddc->coarse_psi_B)); + if (pcbddc->switch_static || pcbddc->dbg_flag) PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, n_D, pcbddc->local_primal_size, NULL, &pcbddc->coarse_psi_D)); for (i = 0; i < pcbddc->local_primal_size; i++) { Vec v; @@ -4619,15 +5121,32 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) } PetscCall(MatDestroy(&B_V)); PetscCall(MatDestroy(&B_C)); + PetscCall(PetscFree(work)); + } else { + PetscCall(PetscObjectReference((PetscObject)pcbddc->coarse_phi_B)); + pcbddc->coarse_psi_B = pcbddc->coarse_phi_B; + PetscCall(PetscObjectReference((PetscObject)pcbddc->coarse_phi_D)); + pcbddc->coarse_psi_D = pcbddc->coarse_phi_D; } + PetscCall(MatAssemblyBegin(*coarse_submat, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(*coarse_submat, MAT_FINAL_ASSEMBLY)); /* free memory */ + PetscCall(PetscFree(V_to_eff_V)); + PetscCall(PetscFree(C_to_eff_C)); + PetscCall(PetscFree(R_eff_V_J)); + PetscCall(PetscFree(R_eff_C_J)); + PetscCall(PetscFree(B_eff_V_J)); + PetscCall(PetscFree(B_eff_C_J)); + PetscCall(ISDestroy(&is_R)); + PetscCall(ISRestoreIndices(is_V, &idx_V)); + PetscCall(ISRestoreIndices(is_C, &idx_C)); + PetscCall(ISDestroy(&is_V)); + PetscCall(ISDestroy(&is_C)); PetscCall(PetscFree(idx_V_B)); - PetscCall(MatDestroy(&S_VV)); PetscCall(MatDestroy(&S_CV)); PetscCall(MatDestroy(&S_VC)); PetscCall(MatDestroy(&S_CC)); - PetscCall(PetscFree(work)); if (n_vertices) PetscCall(MatDestroy(&A_VR)); if (n_constraints) PetscCall(MatDestroy(&C_CR)); PetscCall(PetscLogEventEnd(PC_BDDC_CorrectionSetUp[pcbddc->current_level], pc, 0, 0, 0)); @@ -4636,7 +5155,6 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) /* Symmetric case : It should be \Phi^{(j)^T} A^{(j)} \Phi^{(j)}=coarse_sub_mat */ /* Non-symmetric case : It should be \Psi^{(j)^T} A^{(j)} \Phi^{(j)}=coarse_sub_mat */ if (pcbddc->dbg_flag) { - Mat coarse_sub_mat; Mat AUXMAT, TM1, TM2, TM3, TM4; Mat coarse_phi_D, coarse_phi_B; Mat coarse_psi_D, coarse_psi_B; @@ -4667,8 +5185,6 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatConvert(pcbddc->coarse_psi_D, checkmattype, MAT_INITIAL_MATRIX, &coarse_psi_D)); PetscCall(MatConvert(pcbddc->coarse_psi_B, checkmattype, MAT_INITIAL_MATRIX, &coarse_psi_B)); } - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, pcbddc->local_primal_size, pcbddc->local_primal_size, coarse_submat_vals, &coarse_sub_mat)); - PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "--------------------------------------------------\n")); PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "Check coarse sub mat computation (symmetric %d)\n", pcbddc->symmetric_primal)); PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); @@ -4724,47 +5240,7 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(ISDestroy(&is_dummy)); PetscCall(MatDestroy(&B0_BPHI)); } -#if 0 - { - PetscViewer viewer; - char filename[256]; - PetscCall(PetscSNPrintf(filename, PETSC_STATIC_ARRAY_LENGTH(filename), "details_local_coarse_mat%d_level%d.m",PetscGlobalRank,pcbddc->current_level)); - PetscCall(PetscViewerASCIIOpen(PETSC_COMM_SELF,filename,&viewer)); - PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_MATLAB)); - PetscCall(PetscObjectSetName((PetscObject)coarse_sub_mat,"computed")); - PetscCall(MatView(coarse_sub_mat,viewer)); - PetscCall(PetscObjectSetName((PetscObject)TM1,"projected")); - PetscCall(MatView(TM1,viewer)); - if (pcbddc->coarse_phi_B) { - PetscCall(PetscObjectSetName((PetscObject)pcbddc->coarse_phi_B,"phi_B")); - PetscCall(MatView(pcbddc->coarse_phi_B,viewer)); - } - if (pcbddc->coarse_phi_D) { - PetscCall(PetscObjectSetName((PetscObject)pcbddc->coarse_phi_D,"phi_D")); - PetscCall(MatView(pcbddc->coarse_phi_D,viewer)); - } - if (pcbddc->coarse_psi_B) { - PetscCall(PetscObjectSetName((PetscObject)pcbddc->coarse_psi_B,"psi_B")); - PetscCall(MatView(pcbddc->coarse_psi_B,viewer)); - } - if (pcbddc->coarse_psi_D) { - PetscCall(PetscObjectSetName((PetscObject)pcbddc->coarse_psi_D,"psi_D")); - PetscCall(MatView(pcbddc->coarse_psi_D,viewer)); - } - PetscCall(PetscObjectSetName((PetscObject)pcbddc->local_mat,"A")); - PetscCall(MatView(pcbddc->local_mat,viewer)); - PetscCall(PetscObjectSetName((PetscObject)pcbddc->ConstraintMatrix,"C")); - PetscCall(MatView(pcbddc->ConstraintMatrix,viewer)); - PetscCall(PetscObjectSetName((PetscObject)pcis->is_I_local,"I")); - PetscCall(ISView(pcis->is_I_local,viewer)); - PetscCall(PetscObjectSetName((PetscObject)pcis->is_B_local,"B")); - PetscCall(ISView(pcis->is_B_local,viewer)); - PetscCall(PetscObjectSetName((PetscObject)pcbddc->is_R_local,"R")); - PetscCall(ISView(pcbddc->is_R_local,viewer)); - PetscCall(PetscOptionsRestoreViewer(&viewer)); - } -#endif - PetscCall(MatAXPY(TM1, m_one, coarse_sub_mat, DIFFERENT_NONZERO_PATTERN)); + PetscCall(MatAXPY(TM1, m_one, *coarse_submat, DIFFERENT_NONZERO_PATTERN)); PetscCall(MatNorm(TM1, NORM_FROBENIUS, &real_value)); PetscCall(PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer)); PetscCall(PetscViewerASCIISynchronizedPrintf(pcbddc->dbg_viewer, "Subdomain %04d matrix error % 1.14e\n", PetscGlobalRank, (double)real_value)); @@ -4814,24 +5290,72 @@ PetscErrorCode PCBDDCSetUpCorrection(PC pc, PetscScalar **coarse_submat_vals_n) PetscCall(MatDestroy(&coarse_psi_D)); PetscCall(MatDestroy(&coarse_psi_B)); } - PetscCall(MatDestroy(&coarse_sub_mat)); } - /* FINAL CUDA support (we cannot currently mix viennacl and cuda vectors */ + +#if 0 { - PetscBool gpu; + PetscViewer viewer; + char filename[256]; - PetscCall(PetscObjectTypeCompare((PetscObject)pcis->vec1_N, VECSEQCUDA, &gpu)); - if (gpu) { - if (pcbddc->local_auxmat1) PetscCall(MatConvert(pcbddc->local_auxmat1, MATSEQDENSECUDA, MAT_INPLACE_MATRIX, &pcbddc->local_auxmat1)); - if (pcbddc->local_auxmat2) PetscCall(MatConvert(pcbddc->local_auxmat2, MATSEQDENSECUDA, MAT_INPLACE_MATRIX, &pcbddc->local_auxmat2)); - if (pcbddc->coarse_phi_B) PetscCall(MatConvert(pcbddc->coarse_phi_B, MATSEQDENSECUDA, MAT_INPLACE_MATRIX, &pcbddc->coarse_phi_B)); - if (pcbddc->coarse_phi_D) PetscCall(MatConvert(pcbddc->coarse_phi_D, MATSEQDENSECUDA, MAT_INPLACE_MATRIX, &pcbddc->coarse_phi_D)); - if (pcbddc->coarse_psi_B) PetscCall(MatConvert(pcbddc->coarse_psi_B, MATSEQDENSECUDA, MAT_INPLACE_MATRIX, &pcbddc->coarse_psi_B)); - if (pcbddc->coarse_psi_D) PetscCall(MatConvert(pcbddc->coarse_psi_D, MATSEQDENSECUDA, MAT_INPLACE_MATRIX, &pcbddc->coarse_psi_D)); + PetscCall(PetscSNPrintf(filename, PETSC_STATIC_ARRAY_LENGTH(filename), "details_local_coarse_mat%d_level%d.m",PetscGlobalRank,pcbddc->current_level)); + PetscCall(PetscViewerASCIIOpen(PETSC_COMM_SELF,filename,&viewer)); + PetscCall(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_MATLAB)); + PetscCall(PetscObjectSetName((PetscObject)*coarse_submat,"coarse submat")); + PetscCall(MatView(*coarse_submat,viewer)); + if (pcbddc->coarse_phi_B) { + PetscCall(PetscObjectSetName((PetscObject)pcbddc->coarse_phi_B,"phi_B")); + PetscCall(MatView(pcbddc->coarse_phi_B,viewer)); + } + if (pcbddc->coarse_phi_D) { + PetscCall(PetscObjectSetName((PetscObject)pcbddc->coarse_phi_D,"phi_D")); + PetscCall(MatView(pcbddc->coarse_phi_D,viewer)); + } + if (pcbddc->coarse_psi_B) { + PetscCall(PetscObjectSetName((PetscObject)pcbddc->coarse_psi_B,"psi_B")); + PetscCall(MatView(pcbddc->coarse_psi_B,viewer)); + } + if (pcbddc->coarse_psi_D) { + PetscCall(PetscObjectSetName((PetscObject)pcbddc->coarse_psi_D,"psi_D")); + PetscCall(MatView(pcbddc->coarse_psi_D,viewer)); + } + PetscCall(PetscObjectSetName((PetscObject)pcbddc->local_mat,"A")); + PetscCall(MatView(pcbddc->local_mat,viewer)); + PetscCall(PetscObjectSetName((PetscObject)pcbddc->ConstraintMatrix,"C")); + PetscCall(MatView(pcbddc->ConstraintMatrix,viewer)); + PetscCall(PetscObjectSetName((PetscObject)pcis->is_I_local,"I")); + PetscCall(ISView(pcis->is_I_local,viewer)); + PetscCall(PetscObjectSetName((PetscObject)pcis->is_B_local,"B")); + PetscCall(ISView(pcis->is_B_local,viewer)); + PetscCall(PetscObjectSetName((PetscObject)pcbddc->is_R_local,"R")); + PetscCall(ISView(pcbddc->is_R_local,viewer)); + PetscCall(PetscOptionsRestoreViewer(&viewer)); + } +#endif + + /* device support */ + { + PetscBool iscuda, iship, iskokkos; + MatType mtype = NULL; + + PetscCall(PetscObjectTypeCompareAny((PetscObject)pcis->vec1_N, &iscuda, VECCUDA, VECMPICUDA, VECSEQCUDA, "")); + PetscCall(PetscObjectTypeCompareAny((PetscObject)pcis->vec1_N, &iship, VECHIP, VECMPIHIP, VECSEQHIP, "")); + PetscCall(PetscObjectTypeCompareAny((PetscObject)pcis->vec1_N, &iskokkos, VECKOKKOS, VECMPIKOKKOS, VECSEQKOKKOS, "")); + if (iskokkos) { + if (PetscDefined(HAVE_MACRO_KOKKOS_ENABLE_CUDA)) iscuda = PETSC_TRUE; + else if (PetscDefined(HAVE_MACRO_KOKKOS_ENABLE_HIP)) iship = PETSC_TRUE; + } + if (iskokkos) mtype = multi_element ? MATSEQAIJKOKKOS : (iscuda ? MATSEQDENSECUDA : MATSEQDENSEHIP); + else if (iship) mtype = multi_element ? MATSEQAIJHIPSPARSE : MATSEQDENSEHIP; + else if (iscuda) mtype = multi_element ? MATSEQAIJCUSPARSE : MATSEQDENSECUDA; + if (mtype) { + if (pcbddc->local_auxmat1) PetscCall(MatConvert(pcbddc->local_auxmat1, mtype, MAT_INPLACE_MATRIX, &pcbddc->local_auxmat1)); + if (pcbddc->local_auxmat2) PetscCall(MatConvert(pcbddc->local_auxmat2, mtype, MAT_INPLACE_MATRIX, &pcbddc->local_auxmat2)); + if (pcbddc->coarse_phi_B) PetscCall(MatConvert(pcbddc->coarse_phi_B, mtype, MAT_INPLACE_MATRIX, &pcbddc->coarse_phi_B)); + if (pcbddc->coarse_phi_D) PetscCall(MatConvert(pcbddc->coarse_phi_D, mtype, MAT_INPLACE_MATRIX, &pcbddc->coarse_phi_D)); + if (pcbddc->coarse_psi_B) PetscCall(MatConvert(pcbddc->coarse_psi_B, mtype, MAT_INPLACE_MATRIX, &pcbddc->coarse_psi_B)); + if (pcbddc->coarse_psi_D) PetscCall(MatConvert(pcbddc->coarse_psi_D, mtype, MAT_INPLACE_MATRIX, &pcbddc->coarse_psi_D)); } } - /* get back data */ - *coarse_submat_vals_n = coarse_submat_vals; PetscFunctionReturn(PETSC_SUCCESS); } @@ -4948,12 +5472,30 @@ PetscErrorCode PCBDDCComputeLocalMatrix(PC pc, Mat ChangeOfBasisMatrix) PetscFunctionBegin; PetscCall(MatDestroy(&pcbddc->local_mat)); PetscCall(MatGetSize(matis->A, &local_size, NULL)); - PetscCall(ISCreateStride(PetscObjectComm((PetscObject)matis->A), local_size, 0, 1, &is_local)); - PetscCall(ISLocalToGlobalMappingApplyIS(matis->rmapping, is_local, &is_global)); - PetscCall(ISDestroy(&is_local)); - PetscCall(MatCreateSubMatrixUnsorted(ChangeOfBasisMatrix, is_global, is_global, &new_mat)); - PetscCall(ISDestroy(&is_global)); - + if (pcbddc->mat_graph->multi_element) { + Mat *mats, *bdiags; + IS *gsubs; + PetscInt nsubs = pcbddc->n_local_subs; + + PetscCall(PetscCalloc1(nsubs * nsubs, &mats)); + PetscCall(PetscMalloc1(nsubs, &gsubs)); + for (PetscInt i = 0; i < nsubs; i++) PetscCall(ISLocalToGlobalMappingApplyIS(matis->rmapping, pcbddc->local_subs[i], &gsubs[i])); + PetscCall(MatCreateSubMatrices(ChangeOfBasisMatrix, nsubs, gsubs, gsubs, MAT_INITIAL_MATRIX, &bdiags)); + for (PetscInt i = 0; i < nsubs; i++) PetscCall(ISDestroy(&gsubs[i])); + PetscCall(PetscFree(gsubs)); + + for (PetscInt i = 0; i < nsubs; i++) mats[i * (1 + nsubs)] = bdiags[i]; + PetscCall(MatCreateNest(PETSC_COMM_SELF, nsubs, pcbddc->local_subs, nsubs, pcbddc->local_subs, mats, &new_mat)); + PetscCall(MatConvert(new_mat, MATSEQAIJ, MAT_INPLACE_MATRIX, &new_mat)); + PetscCall(MatDestroySubMatrices(nsubs, &bdiags)); + PetscCall(PetscFree(mats)); + } else { + PetscCall(ISCreateStride(PetscObjectComm((PetscObject)matis->A), local_size, 0, 1, &is_local)); + PetscCall(ISLocalToGlobalMappingApplyIS(matis->rmapping, is_local, &is_global)); + PetscCall(ISDestroy(&is_local)); + PetscCall(MatCreateSubMatrixUnsorted(ChangeOfBasisMatrix, is_global, is_global, &new_mat)); + PetscCall(ISDestroy(&is_global)); + } if (pcbddc->dbg_flag) { Vec x, x_change; PetscReal error; @@ -5357,6 +5899,7 @@ PetscErrorCode PCBDDCSetUpLocalSolvers(PC pc, PetscBool dirichlet, PetscBool neu PetscCall(KSPSetErrorIfNotConverged(pcbddc->ksp_D, pc->erroriffailure)); } PetscCall(MatSetOptionsPrefix(pcis->pA_II, ((PetscObject)pcbddc->ksp_D)->prefix)); + PetscCall(MatViewFromOptions(pcis->pA_II, NULL, "-mat_view")); PetscCall(KSPSetOperators(pcbddc->ksp_D, pcis->A_II, pcis->pA_II)); /* Allow user's customization */ if (opts) PetscCall(KSPSetFromOptions(pcbddc->ksp_D)); @@ -5502,8 +6045,9 @@ PetscErrorCode PCBDDCSetUpLocalSolvers(PC pc, PetscBool dirichlet, PetscBool neu } PetscCall(KSPSetErrorIfNotConverged(pcbddc->ksp_R, pc->erroriffailure)); } - PetscCall(KSPSetOperators(pcbddc->ksp_R, A_RR, A_RR)); PetscCall(MatSetOptionsPrefix(A_RR, ((PetscObject)pcbddc->ksp_R)->prefix)); + PetscCall(MatViewFromOptions(A_RR, NULL, "-mat_view")); + PetscCall(KSPSetOperators(pcbddc->ksp_R, A_RR, A_RR)); if (opts) { /* Allow user's customization once */ PetscCall(KSPSetFromOptions(pcbddc->ksp_R)); } @@ -6108,8 +6652,6 @@ PetscErrorCode PCBDDCConstraintsSetUp(PC pc) PetscCall(ISGetSize(used_is, &size_of_constraint)); if (!size_of_constraint) continue; PetscCall(ISGetIndices(used_is, (const PetscInt **)&is_indices)); - /* change of basis should not be performed on local periodic nodes */ - if (pcbddc->mat_graph->mirrors && pcbddc->mat_graph->mirrors[is_indices[0]]) boolforchange = PETSC_FALSE; if (nnsp_has_cnst) { PetscScalar quad_value; @@ -6919,15 +7461,33 @@ PetscErrorCode PCBDDCAnalyzeInterface(PC pc) if (!pcbddc->mat_graph->xadj && pcbddc->use_local_adj) { PetscInt *xadj, *adjncy; PetscInt nvtxs; - PetscBool flg_row = PETSC_FALSE; - - PetscCall(MatGetRowIJ(matis->A, 0, PETSC_TRUE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &flg_row)); + PetscBool flg_row; + Mat A; + + PetscCall(PetscObjectReference((PetscObject)matis->A)); + A = matis->A; + for (PetscInt i = 0; i < pcbddc->local_adj_square; i++) { + Mat AtA; + + PetscCall(MatProductCreate(A, A, NULL, &AtA)); + PetscCall(MatSetOptionsPrefix(AtA, "pc_bddc_graph_")); + PetscCall(MatProductSetType(AtA, MATPRODUCT_AtB)); + PetscCall(MatProductSetFromOptions(AtA)); + PetscCall(MatProductSymbolic(AtA)); + PetscCall(MatProductClear(AtA)); + /* we only need the sparsity, cheat and tell PETSc the matrix has been assembled */ + AtA->assembled = PETSC_TRUE; + PetscCall(MatDestroy(&A)); + A = AtA; + } + PetscCall(MatGetRowIJ(A, 0, PETSC_TRUE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &flg_row)); if (flg_row) { PetscCall(PCBDDCSetLocalAdjacencyGraph(pc, nvtxs, xadj, adjncy, PETSC_COPY_VALUES)); pcbddc->computed_rowadj = PETSC_TRUE; + PetscCall(MatRestoreRowIJ(A, 0, PETSC_TRUE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &flg_row)); + rcsr = PETSC_TRUE; } - PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_TRUE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &flg_row)); - rcsr = PETSC_TRUE; + PetscCall(MatDestroy(&A)); } if (pcbddc->dbg_flag) PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); @@ -6955,10 +7515,6 @@ PetscErrorCode PCBDDCAnalyzeInterface(PC pc) pcbddc->mat_graph->nvtxs); pcbddc->mat_graph->active_coords = (PetscBool)(pcbddc->corner_selection && pcbddc->mat_graph->cdim && !pcbddc->corner_selected); - /* Setup of Graph */ - pcbddc->mat_graph->commsizelimit = 0; /* don't use the COMM_SELF variant of the graph */ - PetscCall(PCBDDCGraphSetUp(pcbddc->mat_graph, pcbddc->vertex_size, pcbddc->NeumannBoundariesLocal, pcbddc->DirichletBoundariesLocal, pcbddc->n_ISForDofsLocal, pcbddc->ISForDofsLocal, pcbddc->user_primal_vertices_local)); - /* attach info on disconnected subdomains if present */ if (pcbddc->n_local_subs) { PetscInt *local_subs, n, totn; @@ -6979,6 +7535,9 @@ PetscErrorCode PCBDDCAnalyzeInterface(PC pc) pcbddc->mat_graph->n_local_subs = totn + 1; pcbddc->mat_graph->local_subs = local_subs; } + + /* Setup of Graph */ + PetscCall(PCBDDCGraphSetUp(pcbddc->mat_graph, pcbddc->vertex_size, pcbddc->NeumannBoundariesLocal, pcbddc->DirichletBoundariesLocal, pcbddc->n_ISForDofsLocal, pcbddc->ISForDofsLocal, pcbddc->user_primal_vertices_local)); } if (!pcbddc->graphanalyzed) { @@ -7105,8 +7664,8 @@ static PetscErrorCode PCBDDCMatISGetSubassemblingPattern(Mat mat, PetscInt *n_su PetscCall(PetscFree(procs_candidates)); PetscFunctionReturn(PETSC_SUCCESS); } - PetscCall(PetscOptionsGetBool(NULL, NULL, "-matis_partitioning_use_vwgt", &use_vwgt, NULL)); - PetscCall(PetscOptionsGetInt(NULL, NULL, "-matis_partitioning_threshold", &threshold, NULL)); + PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)A)->prefix, "-mat_is_partitioning_use_vwgt", &use_vwgt, NULL)); + PetscCall(PetscOptionsGetInt(NULL, ((PetscObject)A)->prefix, "-mat_is_partitioning_threshold", &threshold, NULL)); threshold = PetscMax(threshold, 2); /* Get info on mapping */ @@ -7349,7 +7908,11 @@ static PetscErrorCode PCBDDCMatISSubassemble(Mat mat, IS is_sends, PetscInt n_su /* further checks */ PetscCall(MatISGetLocalMat(mat, &local_mat)); PetscCall(PetscObjectTypeCompare((PetscObject)local_mat, MATSEQDENSE, &isdense)); + /* XXX hack for multi_element */ + if (!isdense) PetscCall(MatConvert(local_mat, MATDENSE, MAT_INPLACE_MATRIX, &local_mat)); + PetscCall(PetscObjectTypeCompare((PetscObject)local_mat, MATSEQDENSE, &isdense)); PetscCheck(isdense, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Currently cannot subassemble MATIS when local matrix type is not of type SEQDENSE"); + PetscCall(MatGetSize(local_mat, &rows, &cols)); PetscCheck(rows == cols, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Local MATIS matrices should be square"); if (reuse && *mat_n) { @@ -7829,11 +8392,12 @@ static PetscErrorCode PCBDDCMatISSubassemble(Mat mat, IS is_sends, PetscInt n_su /* temporary hack into ksp private data structure */ #include -PetscErrorCode PCBDDCSetUpCoarseSolver(PC pc, PetscScalar *coarse_submat_vals) +PetscErrorCode PCBDDCSetUpCoarseSolver(PC pc, Mat coarse_submat) { PC_BDDC *pcbddc = (PC_BDDC *)pc->data; PC_IS *pcis = (PC_IS *)pc->data; - Mat coarse_mat, coarse_mat_is, coarse_submat_dense; + PCBDDCGraph graph = pcbddc->mat_graph; + Mat coarse_mat, coarse_mat_is; Mat coarsedivudotp = NULL; Mat coarseG, t_coarse_mat_is; MatNullSpace CoarseNullSpace = NULL; @@ -7841,12 +8405,12 @@ PetscErrorCode PCBDDCSetUpCoarseSolver(PC pc, PetscScalar *coarse_submat_vals) IS coarse_is, *isarray, corners; PetscInt i, im_active = -1, active_procs = -1; PetscInt nis, nisdofs, nisneu, nisvert; - PetscInt coarse_eqs_per_proc; + PetscInt coarse_eqs_per_proc, coarsening_ratio; PC pc_temp; PCType coarse_pc_type; KSPType coarse_ksp_type; PetscBool multilevel_requested, multilevel_allowed; - PetscBool coarse_reuse; + PetscBool coarse_reuse, multi_element = graph->multi_element; PetscInt ncoarse, nedcfield; PetscBool compute_vecs = PETSC_FALSE; PetscScalar *array; @@ -7896,12 +8460,15 @@ PetscErrorCode PCBDDCSetUpCoarseSolver(PC pc, PetscScalar *coarse_submat_vals) PetscCall(ISLocalToGlobalMappingCreateIS(coarse_is, &coarse_islg)); /* creates temporary MATIS object for coarse matrix */ - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, pcbddc->local_primal_size, pcbddc->local_primal_size, coarse_submat_vals, &coarse_submat_dense)); - PetscCall(MatCreateIS(PetscObjectComm((PetscObject)pc), 1, PETSC_DECIDE, PETSC_DECIDE, pcbddc->coarse_size, pcbddc->coarse_size, coarse_islg, coarse_islg, &t_coarse_mat_is)); - PetscCall(MatISSetLocalMat(t_coarse_mat_is, coarse_submat_dense)); + PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &t_coarse_mat_is)); + PetscCall(MatSetType(t_coarse_mat_is, MATIS)); + PetscCall(MatSetSizes(t_coarse_mat_is, PETSC_DECIDE, PETSC_DECIDE, pcbddc->coarse_size, pcbddc->coarse_size)); + PetscCall(MatISSetAllowRepeated(t_coarse_mat_is, PETSC_TRUE)); + PetscCall(MatSetLocalToGlobalMapping(t_coarse_mat_is, coarse_islg, coarse_islg)); + PetscCall(MatISSetLocalMat(t_coarse_mat_is, coarse_submat)); PetscCall(MatAssemblyBegin(t_coarse_mat_is, MAT_FINAL_ASSEMBLY)); PetscCall(MatAssemblyEnd(t_coarse_mat_is, MAT_FINAL_ASSEMBLY)); - PetscCall(MatDestroy(&coarse_submat_dense)); + PetscCall(MatViewFromOptions(t_coarse_mat_is, (PetscObject)pc, "-pc_bddc_coarse_mat_is_view")); /* count "active" (i.e. with positive local size) and "void" processes */ im_active = !!pcis->n; @@ -7915,11 +8482,12 @@ PetscErrorCode PCBDDCSetUpCoarseSolver(PC pc, PetscScalar *coarse_submat_vals) multilevel_allowed = PETSC_FALSE; multilevel_requested = PETSC_FALSE; coarse_eqs_per_proc = PetscMin(PetscMax(pcbddc->coarse_size, 1), pcbddc->coarse_eqs_per_proc); - if (coarse_eqs_per_proc < 0) coarse_eqs_per_proc = pcbddc->coarse_size; + if (coarse_eqs_per_proc < 0 || size == 1) coarse_eqs_per_proc = PetscMax(pcbddc->coarse_size, 1); if (pcbddc->current_level < pcbddc->max_levels) multilevel_requested = PETSC_TRUE; if (pcbddc->coarse_size <= pcbddc->coarse_eqs_limit) multilevel_requested = PETSC_FALSE; + coarsening_ratio = multi_element ? 1 : pcbddc->coarsening_ratio; if (multilevel_requested) { - ncoarse = active_procs / pcbddc->coarsening_ratio; + ncoarse = active_procs / coarsening_ratio; restr = PETSC_FALSE; full_restr = PETSC_FALSE; } else { @@ -7930,7 +8498,7 @@ PetscErrorCode PCBDDCSetUpCoarseSolver(PC pc, PetscScalar *coarse_submat_vals) if (!pcbddc->coarse_size || size == 1) multilevel_allowed = multilevel_requested = restr = full_restr = PETSC_FALSE; ncoarse = PetscMax(1, ncoarse); if (!pcbddc->coarse_subassembling) { - if (pcbddc->coarsening_ratio > 1) { + if (coarsening_ratio > 1) { if (multilevel_requested) { PetscCall(PCBDDCMatISGetSubassemblingPattern(pc->pmat, &ncoarse, pcbddc->coarse_adj_red, &pcbddc->coarse_subassembling, &have_void)); } else { @@ -8046,12 +8614,10 @@ PetscErrorCode PCBDDCSetUpCoarseSolver(PC pc, PetscScalar *coarse_submat_vals) if (multilevel_allowed) { Vec vp[1]; PetscInt nvecs = 0; - PetscBool reuse, reuser; + PetscBool reuse; - if (coarse_mat) reuse = PETSC_TRUE; - else reuse = PETSC_FALSE; - PetscCall(MPIU_Allreduce(&reuse, &reuser, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)pc))); vp[0] = NULL; + /* XXX HDIV also */ if (pcbddc->benign_have_null) { /* propagate no-net-flux quadrature to coarser level */ PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &vp[0])); PetscCall(VecSetSizes(vp[0], pcbddc->local_primal_size, PETSC_DECIDE)); @@ -8082,28 +8648,42 @@ PetscErrorCode PCBDDCSetUpCoarseSolver(PC pc, PetscScalar *coarse_submat_vals) PetscCall(VecDestroy(&v)); } } - if (reuser) { - PetscCall(PCBDDCMatISSubassemble(t_coarse_mat_is, pcbddc->coarse_subassembling, 0, restr, full_restr, PETSC_TRUE, &coarse_mat, nis, isarray, nvecs, vp)); - } else { - PetscCall(PCBDDCMatISSubassemble(t_coarse_mat_is, pcbddc->coarse_subassembling, 0, restr, full_restr, PETSC_FALSE, &coarse_mat_is, nis, isarray, nvecs, vp)); - } - if (vp[0]) { /* vp[0] could have been placed on a different set of processes */ - PetscScalar *arraym; - const PetscScalar *arrayv; - PetscInt nl; - PetscCall(VecGetLocalSize(vp[0], &nl)); - PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, 1, nl, NULL, &coarsedivudotp)); - PetscCall(MatDenseGetArray(coarsedivudotp, &arraym)); - PetscCall(VecGetArrayRead(vp[0], &arrayv)); - PetscCall(PetscArraycpy(arraym, arrayv, nl)); - PetscCall(VecRestoreArrayRead(vp[0], &arrayv)); - PetscCall(MatDenseRestoreArray(coarsedivudotp, &arraym)); - PetscCall(VecDestroy(&vp[0])); + if (coarse_mat) reuse = PETSC_TRUE; + else reuse = PETSC_FALSE; + if (multi_element) { + /* XXX divudotp */ + PetscCall(MatISSetAllowRepeated(t_coarse_mat_is, PETSC_FALSE)); + PetscCall(PetscObjectReference((PetscObject)t_coarse_mat_is)); + coarse_mat_is = t_coarse_mat_is; } else { - PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, 0, 0, 1, NULL, &coarsedivudotp)); + PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &reuse, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)pc))); + if (reuse) { + PetscCall(PCBDDCMatISSubassemble(t_coarse_mat_is, pcbddc->coarse_subassembling, 0, restr, full_restr, PETSC_TRUE, &coarse_mat, nis, isarray, nvecs, vp)); + } else { + PetscCall(PCBDDCMatISSubassemble(t_coarse_mat_is, pcbddc->coarse_subassembling, 0, restr, full_restr, PETSC_FALSE, &coarse_mat_is, nis, isarray, nvecs, vp)); + } + if (vp[0]) { /* vp[0] could have been placed on a different set of processes */ + PetscScalar *arraym; + const PetscScalar *arrayv; + PetscInt nl; + PetscCall(VecGetLocalSize(vp[0], &nl)); + PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, 1, nl, NULL, &coarsedivudotp)); + PetscCall(MatDenseGetArray(coarsedivudotp, &arraym)); + PetscCall(VecGetArrayRead(vp[0], &arrayv)); + PetscCall(PetscArraycpy(arraym, arrayv, nl)); + PetscCall(VecRestoreArrayRead(vp[0], &arrayv)); + PetscCall(MatDenseRestoreArray(coarsedivudotp, &arraym)); + PetscCall(VecDestroy(&vp[0])); + } else { + PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, 0, 0, 1, NULL, &coarsedivudotp)); + } } } else { - PetscCall(PCBDDCMatISSubassemble(t_coarse_mat_is, pcbddc->coarse_subassembling, 0, restr, full_restr, PETSC_FALSE, &coarse_mat_is, 0, NULL, 0, NULL)); + if (ncoarse != size) PetscCall(PCBDDCMatISSubassemble(t_coarse_mat_is, pcbddc->coarse_subassembling, 0, restr, full_restr, PETSC_FALSE, &coarse_mat_is, 0, NULL, 0, NULL)); + else { + PetscCall(PetscObjectReference((PetscObject)t_coarse_mat_is)); + coarse_mat_is = t_coarse_mat_is; + } } if (coarse_mat_is || coarse_mat) { if (!multilevel_allowed) { @@ -8151,7 +8731,7 @@ PetscErrorCode PCBDDCSetUpCoarseSolver(PC pc, PetscScalar *coarse_submat_vals) if (!multilevel_allowed) { PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "--------------------------------------------------\n")); if (multilevel_requested) { - PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "Not enough active processes on level %" PetscInt_FMT " (active processes %" PetscInt_FMT ", coarsening ratio %" PetscInt_FMT ")\n", pcbddc->current_level, active_procs, pcbddc->coarsening_ratio)); + PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "Not enough active processes on level %" PetscInt_FMT " (active processes %" PetscInt_FMT ", coarsening ratio %" PetscInt_FMT ")\n", pcbddc->current_level, active_procs, coarsening_ratio)); } else if (pcbddc->max_levels) { PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "Maximum number of requested levels reached (%" PetscInt_FMT ")\n", pcbddc->max_levels)); } @@ -8585,7 +9165,6 @@ PetscErrorCode PCBDDCComputePrimalNumbering(PC pc, PetscInt *coarse_size_n, Pets { PC_BDDC *pcbddc = (PC_BDDC *)pc->data; PC_IS *pcis = (PC_IS *)pc->data; - Mat_IS *matis = (Mat_IS *)pc->pmat->data; IS subset, subset_mult, subset_n; PetscInt local_size, coarse_size = 0; PetscInt *local_primal_indices = NULL; @@ -8609,72 +9188,11 @@ PetscErrorCode PCBDDCComputePrimalNumbering(PC pc, PetscInt *coarse_size_n, Pets PetscCall(ISRestoreIndices(subset_n, &t_local_primal_indices)); PetscCall(ISDestroy(&subset_n)); - /* check numbering */ if (pcbddc->dbg_flag) { - PetscScalar coarsesum, *array, *array2; - PetscInt i; - PetscBool set_error = PETSC_FALSE, set_error_reduced = PETSC_FALSE; - PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "--------------------------------------------------\n")); - PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "Check coarse indices\n")); - PetscCall(PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer)); - /* counter */ - PetscCall(VecSet(pcis->vec1_global, 0.0)); - PetscCall(VecSet(pcis->vec1_N, 1.0)); - PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE)); - PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE)); - PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecSet(pcis->vec1_N, 0.0)); - for (i = 0; i < pcbddc->local_primal_size; i++) PetscCall(VecSetValue(pcis->vec1_N, pcbddc->primal_indices_local_idxs[i], 1.0, INSERT_VALUES)); - PetscCall(VecAssemblyBegin(pcis->vec1_N)); - PetscCall(VecAssemblyEnd(pcis->vec1_N)); - PetscCall(VecSet(pcis->vec1_global, 0.0)); - PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE)); - PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE)); - PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_global, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_global, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecGetArray(pcis->vec1_N, &array)); - PetscCall(VecGetArray(pcis->vec2_N, &array2)); - for (i = 0; i < pcis->n; i++) { - if (array[i] != 0.0 && array[i] != array2[i]) { - PetscInt owned = (PetscInt)PetscRealPart(array[i]), gi; - PetscInt neigh = (PetscInt)PetscRealPart(array2[i]); - set_error = PETSC_TRUE; - PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, 1, &i, &gi)); - PetscCall(PetscViewerASCIISynchronizedPrintf(pcbddc->dbg_viewer, "Subdomain %04d: local index %" PetscInt_FMT " (gid %" PetscInt_FMT ") owned by %" PetscInt_FMT " processes instead of %" PetscInt_FMT "!\n", PetscGlobalRank, i, gi, owned, neigh)); - } - } - PetscCall(VecRestoreArray(pcis->vec2_N, &array2)); - PetscCall(MPIU_Allreduce(&set_error, &set_error_reduced, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)pc))); + PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "Size of coarse problem is %" PetscInt_FMT "\n", coarse_size)); PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); - for (i = 0; i < pcis->n; i++) { - if (PetscRealPart(array[i]) > 0.0) array[i] = 1.0 / PetscRealPart(array[i]); - } - PetscCall(VecRestoreArray(pcis->vec1_N, &array)); - PetscCall(VecSet(pcis->vec1_global, 0.0)); - PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE)); - PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE)); - PetscCall(VecSum(pcis->vec1_global, &coarsesum)); - PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "Size of coarse problem is %" PetscInt_FMT " (%lf)\n", coarse_size, (double)PetscRealPart(coarsesum))); - if (pcbddc->dbg_flag > 1 || set_error_reduced) { - PetscInt *gidxs; - - PetscCall(PetscMalloc1(pcbddc->local_primal_size, &gidxs)); - PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, pcbddc->local_primal_size, pcbddc->primal_indices_local_idxs, gidxs)); - PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "Distribution of local primal indices\n")); - PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); - PetscCall(PetscViewerASCIISynchronizedPrintf(pcbddc->dbg_viewer, "Subdomain %04d\n", PetscGlobalRank)); - for (i = 0; i < pcbddc->local_primal_size; i++) { - PetscCall(PetscViewerASCIISynchronizedPrintf(pcbddc->dbg_viewer, "local_primal_indices[%" PetscInt_FMT "]=%" PetscInt_FMT " (%" PetscInt_FMT ",%" PetscInt_FMT ")\n", i, local_primal_indices[i], pcbddc->primal_indices_local_idxs[i], gidxs[i])); - } - PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); - PetscCall(PetscFree(gidxs)); - } - PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); - PetscCall(PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer)); - PetscCheck(!set_error_reduced, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "BDDC Numbering of coarse dofs failed"); } /* get back data */ @@ -8944,6 +9462,132 @@ PetscErrorCode PCBDDCInitSubSchurs(PC pc) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode PCBDDCViewGlobalIS(PC pc, IS is, PetscViewer viewer) +{ + Mat_IS *matis = (Mat_IS *)pc->pmat->data; + PetscInt n = pc->pmat->rmap->n, ln, ni, st; + const PetscInt *idxs; + IS gis; + + PetscFunctionBegin; + if (!is) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(MatGetOwnershipRange(pc->pmat, &st, NULL)); + PetscCall(MatGetLocalSize(matis->A, NULL, &ln)); + PetscCall(PetscArrayzero(matis->sf_leafdata, ln)); + PetscCall(PetscArrayzero(matis->sf_rootdata, n)); + PetscCall(ISGetLocalSize(is, &ni)); + PetscCall(ISGetIndices(is, &idxs)); + for (PetscInt i = 0; i < ni; i++) { + if (idxs[i] < 0 || idxs[i] >= ln) continue; + matis->sf_leafdata[idxs[i]] = 1; + } + PetscCall(ISRestoreIndices(is, &idxs)); + PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_SUM)); + PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_SUM)); + ln = 0; + for (PetscInt i = 0; i < n; i++) { + if (matis->sf_rootdata[i]) matis->sf_rootdata[ln++] = i + st; + } + PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), ln, matis->sf_rootdata, PETSC_USE_POINTER, &gis)); + PetscCall(ISView(gis, viewer)); + PetscCall(ISDestroy(&gis)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode PCBDDCLoadOrViewCustomization(PC pc, PetscBool load, const char *outfile) +{ + PetscInt header[11]; + PC_BDDC *pcbddc = (PC_BDDC *)pc->data; + PetscViewer viewer; + MPI_Comm comm = PetscObjectComm((PetscObject)pc); + + PetscFunctionBegin; + PetscCall(PetscViewerBinaryOpen(comm, outfile ? outfile : "bddc_dump.dat", load ? FILE_MODE_READ : FILE_MODE_WRITE, &viewer)); + if (load) { + IS is; + Mat A; + + PetscCall(PetscViewerBinaryRead(viewer, header, PETSC_STATIC_ARRAY_LENGTH(header), NULL, PETSC_INT)); + PetscCheck(header[0] == 0 || header[0] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + PetscCheck(header[1] == 0 || header[1] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + PetscCheck(header[2] >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + PetscCheck(header[3] == 0 || header[3] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + PetscCheck(header[4] == 0 || header[4] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + PetscCheck(header[5] >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + PetscCheck(header[7] == 0 || header[7] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + PetscCheck(header[8] == 0 || header[8] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + PetscCheck(header[9] == 0 || header[9] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + PetscCheck(header[10] == 0 || header[10] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a BDDC dump next in file"); + if (header[0]) { + PetscCall(ISCreate(comm, &is)); + PetscCall(ISLoad(is, viewer)); + PetscCall(PCBDDCSetDirichletBoundaries(pc, is)); + PetscCall(ISDestroy(&is)); + } + if (header[1]) { + PetscCall(ISCreate(comm, &is)); + PetscCall(ISLoad(is, viewer)); + PetscCall(PCBDDCSetNeumannBoundaries(pc, is)); + PetscCall(ISDestroy(&is)); + } + if (header[2]) { + IS *isarray; + + PetscCall(PetscMalloc1(header[2], &isarray)); + for (PetscInt i = 0; i < header[2]; i++) { + PetscCall(ISCreate(comm, &isarray[i])); + PetscCall(ISLoad(isarray[i], viewer)); + } + PetscCall(PCBDDCSetDofsSplitting(pc, header[2], isarray)); + for (PetscInt i = 0; i < header[2]; i++) PetscCall(ISDestroy(&isarray[i])); + PetscCall(PetscFree(isarray)); + } + if (header[3]) { + PetscCall(ISCreate(comm, &is)); + PetscCall(ISLoad(is, viewer)); + PetscCall(PCBDDCSetPrimalVerticesIS(pc, is)); + PetscCall(ISDestroy(&is)); + } + if (header[4]) { + PetscCall(MatCreate(comm, &A)); + PetscCall(MatSetType(A, MATAIJ)); + PetscCall(MatLoad(A, viewer)); + PetscCall(PCBDDCSetDiscreteGradient(pc, A, header[5], header[6], (PetscBool)header[7], (PetscBool)header[8])); + PetscCall(MatDestroy(&A)); + } + if (header[9]) { + PetscCall(MatCreate(comm, &A)); + PetscCall(MatSetType(A, MATIS)); + PetscCall(MatLoad(A, viewer)); + PetscCall(PCBDDCSetDivergenceMat(pc, A, (PetscBool)header[10], NULL)); + PetscCall(MatDestroy(&A)); + } + } else { + header[0] = (PetscInt) !!pcbddc->DirichletBoundariesLocal; + header[1] = (PetscInt) !!pcbddc->NeumannBoundariesLocal; + header[2] = pcbddc->n_ISForDofsLocal; + header[3] = (PetscInt) !!pcbddc->user_primal_vertices_local; + header[4] = (PetscInt) !!pcbddc->discretegradient; + header[5] = pcbddc->nedorder; + header[6] = pcbddc->nedfield; + header[7] = (PetscInt)pcbddc->nedglobal; + header[8] = (PetscInt)pcbddc->conforming; + header[9] = (PetscInt) !!pcbddc->divudotp; + header[10] = (PetscInt)pcbddc->divudotp_trans; + if (header[4]) header[3] = 0; + + PetscCall(PetscViewerBinaryWrite(viewer, header, PETSC_STATIC_ARRAY_LENGTH(header), PETSC_INT)); + PetscCall(PCBDDCViewGlobalIS(pc, pcbddc->DirichletBoundariesLocal, viewer)); + PetscCall(PCBDDCViewGlobalIS(pc, pcbddc->NeumannBoundariesLocal, viewer)); + for (PetscInt i = 0; i < header[2]; i++) PetscCall(PCBDDCViewGlobalIS(pc, pcbddc->ISForDofsLocal[i], viewer)); + if (header[3]) PetscCall(PCBDDCViewGlobalIS(pc, pcbddc->user_primal_vertices_local, viewer)); + if (header[4]) PetscCall(MatView(pcbddc->discretegradient, viewer)); + if (header[9]) PetscCall(MatView(pcbddc->divudotp, viewer)); + } + PetscCall(PetscViewerDestroy(&viewer)); + PetscFunctionReturn(PETSC_SUCCESS); +} + #include <../src/mat/impls/aij/mpi/mpiaij.h> static PetscErrorCode MatMPIAIJRestrict(Mat A, MPI_Comm ccomm, Mat *B) { @@ -9028,3 +9672,81 @@ static PetscErrorCode MatMPIAIJRestrict(Mat A, MPI_Comm ccomm, Mat *B) PetscCall(MatDestroy(&At)); PetscFunctionReturn(PETSC_SUCCESS); } + +/* same as MatCreateSubMatrix(A, rows, NULL,...) but allows repeated rows */ +static PetscErrorCode MatAIJExtractRows(Mat A, IS rows, Mat *sA) +{ + PetscBool isaij; + MPI_Comm comm; + + PetscFunctionBegin; + PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); + PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)A, &isaij, MATSEQAIJ, MATMPIAIJ, "")); + PetscCheck(isaij, comm, PETSC_ERR_SUP, "Not implemented"); + PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isaij)); + if (isaij) { /* SeqAIJ supports repeated rows */ + PetscCall(MatCreateSubMatrix(A, rows, NULL, MAT_INITIAL_MATRIX, sA)); + } else { + Mat A_loc; + Mat_SeqAIJ *da; + PetscSF sf; + PetscInt ni, *di, *dj, m = A->rmap->n, c, *ldata, *rdata; + PetscScalar *daa; + const PetscInt *idxs; + const PetscSFNode *iremotes; + PetscSFNode *remotes; + + /* SF for incoming rows */ + PetscCall(PetscSFCreate(comm, &sf)); + PetscCall(ISGetLocalSize(rows, &ni)); + PetscCall(ISGetIndices(rows, &idxs)); + PetscCall(PetscSFSetGraphLayout(sf, A->rmap, ni, NULL, PETSC_USE_POINTER, idxs)); + PetscCall(ISRestoreIndices(rows, &idxs)); + + PetscCall(MatMPIAIJGetLocalMat(A, MAT_INITIAL_MATRIX, &A_loc)); + da = (Mat_SeqAIJ *)A_loc->data; + PetscCall(PetscMalloc2(2 * ni, &ldata, 2 * m, &rdata)); + for (PetscInt i = 0; i < m; i++) { + rdata[2 * i + 0] = da->i[i + 1] - da->i[i]; + rdata[2 * i + 1] = da->i[i]; + } + PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, rdata, ldata, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, rdata, ldata, MPI_REPLACE)); + PetscCall(PetscMalloc1(ni + 1, &di)); + di[0] = 0; + for (PetscInt i = 0; i < ni; i++) di[i + 1] = di[i] + ldata[2 * i + 0]; + PetscCall(PetscMalloc1(di[ni], &dj)); + PetscCall(PetscMalloc1(di[ni], &daa)); + PetscCall(PetscMalloc1(di[ni], &remotes)); + + PetscCall(PetscSFGetGraph(sf, NULL, NULL, NULL, &iremotes)); + + /* SF graph for nonzeros */ + c = 0; + for (PetscInt i = 0; i < ni; i++) { + const PetscInt rank = iremotes[i].rank; + const PetscInt rsize = ldata[2 * i]; + for (PetscInt j = 0; j < rsize; j++) { + remotes[c].rank = rank; + remotes[c].index = ldata[2 * i + 1] + j; + c++; + } + } + PetscCheck(c == di[ni], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of local nonzeros %" PetscInt_FMT " != %" PetscInt_FMT, c, di[ni]); + PetscCall(PetscSFSetGraph(sf, da->i[m], di[ni], NULL, PETSC_USE_POINTER, remotes, PETSC_USE_POINTER)); + PetscCall(PetscSFBcastBegin(sf, MPIU_INT, da->j, dj, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(sf, MPIU_INT, da->j, dj, MPI_REPLACE)); + PetscCall(PetscSFBcastBegin(sf, MPIU_SCALAR, da->a, daa, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(sf, MPIU_SCALAR, da->a, daa, MPI_REPLACE)); + + PetscCall(MatCreateMPIAIJWithArrays(comm, ni, A->cmap->n, PETSC_DECIDE, A->cmap->N, di, dj, daa, sA)); + PetscCall(MatDestroy(&A_loc)); + PetscCall(PetscSFDestroy(&sf)); + PetscCall(PetscFree(di)); + PetscCall(PetscFree(dj)); + PetscCall(PetscFree(daa)); + PetscCall(PetscFree(remotes)); + PetscCall(PetscFree2(ldata, rdata)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} diff --git a/src/ksp/pc/impls/bddc/ftn-custom/makefile b/src/ksp/pc/impls/bddc/ftn-custom/makefile deleted file mode 100644 index 5049eac97dd..00000000000 --- a/src/ksp/pc/impls/bddc/ftn-custom/makefile +++ /dev/null @@ -1,9 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - -MANSEC = KSP - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk - - diff --git a/src/ksp/pc/impls/bddc/ftn-custom/zbddcf.c b/src/ksp/pc/impls/bddc/ftn-custom/zbddcf.c deleted file mode 100644 index e2e9ddac18b..00000000000 --- a/src/ksp/pc/impls/bddc/ftn-custom/zbddcf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pcbddccreatefetidpoperators_ PCBDDCCREATEFETIDPOPERATORS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pcbddccreatefetidpoperators_ pcbddccreatefetidpoperators -#endif - -PETSC_EXTERN void pcbddccreatefetidpoperators_(PC *pc, PetscBool *fully_redundant, char *prefix, Mat *fetidp_mat, PC *fetidp_pc, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(prefix, len, t); - *ierr = PCBDDCCreateFETIDPOperators(*pc, *fully_redundant, t, fetidp_mat, fetidp_pc); - if (*ierr) return; - FREECHAR(prefix, t); -} diff --git a/src/ksp/pc/impls/bjacobi/bjkokkos/bjkokkos.kokkos.cxx b/src/ksp/pc/impls/bjacobi/bjkokkos/bjkokkos.kokkos.cxx index 7de7cee78f7..d631fda5616 100644 --- a/src/ksp/pc/impls/bjacobi/bjkokkos/bjkokkos.kokkos.cxx +++ b/src/ksp/pc/impls/bjacobi/bjkokkos/bjkokkos.kokkos.cxx @@ -573,7 +573,7 @@ static PetscErrorCode PCApply_BJKOKKOS(PC pc, Vec bin, Vec xout) // get x PetscCall(VecGetArrayAndMemType(xout, &glb_xdata, &mtype)); #if defined(PETSC_HAVE_CUDA) - PetscCheck(PetscMemTypeDevice(mtype), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "No GPU data for x %" PetscInt_FMT " != %" PetscInt_FMT, mtype, PETSC_MEMTYPE_DEVICE); + PetscCheck(PetscMemTypeDevice(mtype), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "No GPU data for x %d != %d", static_cast(mtype), static_cast(PETSC_MEMTYPE_DEVICE)); #endif PetscCall(VecGetArrayReadAndMemType(bvec, &glb_bdata, &mtype)); #if defined(PETSC_HAVE_CUDA) @@ -1078,7 +1078,7 @@ static PetscErrorCode PCBJKOKKOSSetKSP_BJKOKKOS(PC pc, KSP ksp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCBJKOKKOSSetKSP - Sets the `KSP` context for `PCBJKOKKOS` Collective @@ -1116,7 +1116,7 @@ static PetscErrorCode PCBJKOKKOSGetKSP_BJKOKKOS(PC pc, KSP *ksp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCBJKOKKOSGetKSP - Gets the `KSP` context for the `PCBJKOKKOS` preconditioner Not Collective but `KSP` returned is parallel if `PC` was parallel diff --git a/src/ksp/pc/impls/composite/composite.c b/src/ksp/pc/impls/composite/composite.c index 0665424d62f..2605fa8d76f 100644 --- a/src/ksp/pc/impls/composite/composite.c +++ b/src/ksp/pc/impls/composite/composite.c @@ -17,6 +17,7 @@ typedef struct { Vec work1; Vec work2; PetscScalar alpha; + Mat alpha_mat; } PC_Composite; static PetscErrorCode PCApply_Composite_Multiplicative(PC pc, Vec x, Vec y) @@ -113,7 +114,11 @@ static PetscErrorCode PCApply_Composite_Special(PC pc, Vec x, Vec y) PetscCall(PCSetReusePreconditioner(next->next->pc, pc->reusepreconditioner)); PetscCall(PCApply(next->pc, x, jac->work1)); - PetscCall(PCApply(next->next->pc, jac->work1, y)); + if (jac->alpha_mat) { + if (!jac->work2) PetscCall(VecDuplicate(jac->work1, &jac->work2)); + PetscCall(MatMult(jac->alpha_mat, jac->work1, jac->work2)); + PetscCall(PCApply(next->next->pc, jac->work2, y)); + } else PetscCall(PCApply(next->next->pc, jac->work1, y)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -202,6 +207,7 @@ static PetscErrorCode PCReset_Composite(PC pc) } PetscCall(VecDestroy(&jac->work1)); PetscCall(VecDestroy(&jac->work2)); + PetscCall(MatDestroy(&jac->alpha_mat)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -225,22 +231,24 @@ static PetscErrorCode PCDestroy_Composite(PC pc) PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeGetNumberPC_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeGetPC_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeSpecialSetAlpha_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeSpecialSetAlphaMat_C", NULL)); PetscCall(PetscFree(pc->data)); PetscFunctionReturn(PETSC_SUCCESS); } static PetscErrorCode PCSetFromOptions_Composite(PC pc, PetscOptionItems *PetscOptionsObject) { - PC_Composite *jac = (PC_Composite *)pc->data; - PetscInt nmax = 8, i; + PC_Composite *jac = (PC_Composite *)pc->data; + PetscInt nmax, i; PC_CompositeLink next; - char *pcs[8]; + char *pcs[1024]; PetscBool flg; PetscFunctionBegin; PetscOptionsHeadBegin(PetscOptionsObject, "Composite preconditioner options"); PetscCall(PetscOptionsEnum("-pc_composite_type", "Type of composition", "PCCompositeSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&jac->type, &flg)); if (flg) PetscCall(PCCompositeSetType(pc, jac->type)); + nmax = (PetscInt)PETSC_STATIC_ARRAY_LENGTH(pcs); PetscCall(PetscOptionsStringArray("-pc_composite_pcs", "List of composite solvers", "PCCompositeAddPCType", pcs, &nmax, &flg)); if (flg) { for (i = 0; i < nmax; i++) { @@ -292,6 +300,20 @@ static PetscErrorCode PCCompositeSpecialSetAlpha_Composite(PC pc, PetscScalar al PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode PCCompositeSpecialSetAlphaMat_Composite(PC pc, Mat alpha_mat) +{ + PC_Composite *jac = (PC_Composite *)pc->data; + + PetscFunctionBegin; + if (alpha_mat) { + PetscValidHeaderSpecific(alpha_mat, MAT_CLASSID, 2); + PetscCall(PetscObjectReference((PetscObject)alpha_mat)); + } + PetscCall(MatDestroy(&jac->alpha_mat)); + jac->alpha_mat = alpha_mat; + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode PCCompositeSetType_Composite(PC pc, PCCompositeType type) { PC_Composite *jac = (PC_Composite *)pc->data; @@ -476,6 +498,14 @@ PetscErrorCode PCCompositeSpecialSetAlpha(PC pc, PetscScalar alpha) PetscFunctionReturn(PETSC_SUCCESS); } +PetscErrorCode PCCompositeSpecialSetAlphaMat(PC pc, Mat alpha_mat) +{ + PetscFunctionBegin; + PetscValidHeaderSpecific(pc, PC_CLASSID, 1); + PetscTryMethod(pc, "PCCompositeSpecialSetAlphaMat_C", (PC, Mat), (pc, alpha_mat)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@C PCCompositeAddPCType - Adds another `PC` of the given type to the composite `PC`. @@ -612,11 +642,12 @@ PETSC_EXTERN PetscErrorCode PCCreate_Composite(PC pc) pc->ops->view = PCView_Composite; pc->ops->applyrichardson = NULL; - pc->data = (void *)jac; - jac->type = PC_COMPOSITE_ADDITIVE; - jac->work1 = NULL; - jac->work2 = NULL; - jac->head = NULL; + pc->data = (void *)jac; + jac->type = PC_COMPOSITE_ADDITIVE; + jac->work1 = NULL; + jac->work2 = NULL; + jac->head = NULL; + jac->alpha_mat = NULL; PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeSetType_C", PCCompositeSetType_Composite)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeGetType_C", PCCompositeGetType_Composite)); @@ -625,5 +656,6 @@ PETSC_EXTERN PetscErrorCode PCCreate_Composite(PC pc) PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeGetNumberPC_C", PCCompositeGetNumberPC_Composite)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeGetPC_C", PCCompositeGetPC_Composite)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeSpecialSetAlpha_C", PCCompositeSpecialSetAlpha_Composite)); + PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCCompositeSpecialSetAlphaMat_C", PCCompositeSpecialSetAlphaMat_Composite)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/ksp/pc/impls/composite/ftn-custom/makefile b/src/ksp/pc/impls/composite/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/ksp/pc/impls/composite/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ksp/pc/impls/composite/ftn-custom/zcompositef.c b/src/ksp/pc/impls/composite/ftn-custom/zcompositef.c deleted file mode 100644 index 7f6b556c88e..00000000000 --- a/src/ksp/pc/impls/composite/ftn-custom/zcompositef.c +++ /dev/null @@ -1,18 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pccompositeaddpctype_ PCCOMPOSITEADDPCTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pccompositeaddpctype_ pccompositeaddpctype -#endif - -PETSC_EXTERN void pccompositeaddpctype_(PC *pc, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = PCCompositeAddPCType(*pc, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/ksp/pc/impls/factor/factor.c b/src/ksp/pc/impls/factor/factor.c index c6b13eafd97..a73cb13407c 100644 --- a/src/ksp/pc/impls/factor/factor.c +++ b/src/ksp/pc/impls/factor/factor.c @@ -172,7 +172,7 @@ PetscErrorCode PCFactorSetShiftType(PC pc, MatFactorShiftType shifttype) Level: intermediate -.seealso: [](ch_ksp), `PCCHOLESKY`, `PCLU`, ``PCFactorSetZeroPivot()`, `PCFactorSetShiftType()` +.seealso: [](ch_ksp), `PCCHOLESKY`, `PCLU`, `PCFactorSetZeroPivot()`, `PCFactorSetShiftType()` @*/ PetscErrorCode PCFactorSetShiftAmount(PC pc, PetscReal shiftamount) { @@ -415,7 +415,7 @@ PetscErrorCode PCFactorReorderForNonzeroDiagonal(PC pc, PetscReal rtol) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCFactorSetMatSolverType - sets the solver package that is used to perform the factorization Logically Collective @@ -445,7 +445,7 @@ PetscErrorCode PCFactorSetMatSolverType(PC pc, MatSolverType stype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCFactorGetMatSolverType - gets the solver package that is used to perform the factorization Not Collective @@ -571,7 +571,7 @@ PetscErrorCode PCFactorGetUseInPlace(PC pc, PetscBool *flg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCFactorSetMatOrderingType - Sets the ordering routine (to reduce fill) to be used in the `PCLU`, `PCCHOLESKY`, `PCILU`, or `PCICC` preconditioners diff --git a/src/ksp/pc/impls/factor/ftn-custom/makefile b/src/ksp/pc/impls/factor/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/ksp/pc/impls/factor/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ksp/pc/impls/factor/ftn-custom/zluf.c b/src/ksp/pc/impls/factor/ftn-custom/zluf.c deleted file mode 100644 index fbe94bf9b8e..00000000000 --- a/src/ksp/pc/impls/factor/ftn-custom/zluf.c +++ /dev/null @@ -1,43 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pcfactorsetmatorderingtype_ PCFACTORSETMATORDERINGTYPE - #define pcfactorsetmatsolvertype_ PCFACTORSETMATSOLVERTYPE - #define pcfactorgetmatsolvertype_ PCFACTORGETMATSOLVERTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pcfactorsetmatorderingtype_ pcfactorsetmatorderingtype - #define pcfactorsetmatsolvertype_ pcfactorsetmatsolvertype - #define pcfactorgetmatsolvertype_ pcfactorgetmatsolvertype -#endif - -PETSC_EXTERN void pcfactorsetmatorderingtype_(PC *pc, char *ordering, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(ordering, len, t); - *ierr = PCFactorSetMatOrderingType(*pc, t); - if (*ierr) return; - FREECHAR(ordering, t); -} -PETSC_EXTERN void pcfactorsetmatsolvertype_(PC *pc, char *ordering, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(ordering, len, t); - *ierr = PCFactorSetMatSolverType(*pc, t); - if (*ierr) return; - FREECHAR(ordering, t); -} -PETSC_EXTERN void pcfactorgetmatsolvertype_(PC *mat, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = PCFactorGetMatSolverType(*mat, &tname); - if (*ierr) return; - if (name != PETSC_NULL_CHARACTER_Fortran) { - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - } - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/ksp/pc/impls/fieldsplit/fieldsplit.c b/src/ksp/pc/impls/fieldsplit/fieldsplit.c index 9bbbcd97de1..ed156b08ea5 100644 --- a/src/ksp/pc/impls/fieldsplit/fieldsplit.c +++ b/src/ksp/pc/impls/fieldsplit/fieldsplit.c @@ -339,23 +339,28 @@ static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/* Precondition: jac->bs is set to a meaningful value */ +/* Precondition: jac->bs is set to a meaningful value or MATNEST */ static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc) { PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; - PetscInt i, nfields, *ifields, nfields_col, *ifields_col; - PetscBool flg, flg_col; + PetscInt bs, i, nfields, *ifields, nfields_col, *ifields_col; + PetscBool flg, flg_col, mnest; char optionname[128], splitname[8], optionname_col[128]; PetscFunctionBegin; - PetscCall(PetscMalloc1(jac->bs, &ifields)); - PetscCall(PetscMalloc1(jac->bs, &ifields_col)); + PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest)); + if (mnest) { + PetscCall(MatNestGetSize(pc->pmat, &bs, NULL)); + } else { + bs = jac->bs; + } + PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col)); for (i = 0, flg = PETSC_TRUE;; i++) { PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i)); PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i)); - nfields = jac->bs; - nfields_col = jac->bs; + nfields = bs; + nfields_col = bs; PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg)); PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col)); if (!flg) break; @@ -374,8 +379,7 @@ static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc) create new splits, which would probably not be what the user wanted. */ jac->splitdefined = PETSC_TRUE; } - PetscCall(PetscFree(ifields)); - PetscCall(PetscFree(ifields_col)); + PetscCall(PetscFree2(ifields, ifields_col)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -503,6 +507,7 @@ static PetscErrorCode PCFieldSplitSetDefaults(PC pc) if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) { Mat M = pc->pmat; PetscBool isnest; + PetscInt nf; PetscCall(PetscInfo(pc, "Using default splitting of fields\n")); PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest)); @@ -510,23 +515,15 @@ static PetscErrorCode PCFieldSplitSetDefaults(PC pc) M = pc->mat; PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest)); } - if (isnest) { - IS *fields; - PetscInt nf; - - PetscCall(MatNestGetSize(M, &nf, NULL)); - PetscCall(PetscMalloc1(nf, &fields)); - PetscCall(MatNestGetISs(M, fields, NULL)); - for (i = 0; i < nf; i++) PetscCall(PCFieldSplitSetIS(pc, NULL, fields[i])); - PetscCall(PetscFree(fields)); - } else { - for (i = 0; i < jac->bs; i++) { - char splitname[8]; - PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); - PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i)); - } - jac->defaultsplit = PETSC_TRUE; + if (!isnest) nf = jac->bs; + else PetscCall(MatNestGetSize(M, &nf, NULL)); + for (i = 0; i < nf; i++) { + char splitname[8]; + + PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i)); + PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i)); } + jac->defaultsplit = PETSC_TRUE; } } } @@ -575,13 +572,14 @@ static PetscErrorCode PCSetUp_FieldSplit(PC pc) PC_FieldSplit *jac = (PC_FieldSplit *)pc->data; PC_FieldSplitLink ilink; PetscInt i, nsplit; - PetscBool sorted, sorted_col; + PetscBool sorted, sorted_col, matnest = PETSC_FALSE; PetscFunctionBegin; pc->failedreason = PC_NOERROR; PetscCall(PCFieldSplitSetDefaults(pc)); nsplit = jac->nsplits; ilink = jac->head; + if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest)); /* get the matrices for each split */ if (!jac->issetup) { @@ -603,37 +601,66 @@ static PetscErrorCode PCSetUp_FieldSplit(PC pc) PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes"); } - bs = jac->bs; - PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend)); - nslots = (rend - rstart) / bs; - for (i = 0; i < nsplit; i++) { - if (jac->defaultsplit) { - PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is)); - PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); - } else if (!ilink->is) { - if (ilink->nfields > 1) { - PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col; - PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii)); - PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj)); - for (j = 0; j < nslots; j++) { - for (k = 0; k < nfields; k++) { - ii[nfields * j + k] = rstart + bs * j + fields[k]; - jj[nfields * j + k] = rstart + bs * j + fields_col[k]; + if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */ + bs = jac->bs; + PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend)); + nslots = (rend - rstart) / bs; + for (i = 0; i < nsplit; i++) { + if (jac->defaultsplit) { + PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is)); + PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); + } else if (!ilink->is) { + if (ilink->nfields > 1) { + PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col; + + PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii)); + PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj)); + for (j = 0; j < nslots; j++) { + for (k = 0; k < nfields; k++) { + ii[nfields * j + k] = rstart + bs * j + fields[k]; + jj[nfields * j + k] = rstart + bs * j + fields_col[k]; + } } + PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is)); + PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col)); + PetscCall(ISSetBlockSize(ilink->is, nfields)); + PetscCall(ISSetBlockSize(ilink->is_col, nfields)); + } else { + PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is)); + PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col)); } - PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is)); - PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col)); - PetscCall(ISSetBlockSize(ilink->is, nfields)); - PetscCall(ISSetBlockSize(ilink->is_col, nfields)); - } else { - PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is)); - PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col)); } + PetscCall(ISSorted(ilink->is, &sorted)); + if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col)); + PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split"); + ilink = ilink->next; } - PetscCall(ISSorted(ilink->is, &sorted)); - if (ilink->is_col) PetscCall(ISSorted(ilink->is_col, &sorted_col)); - PetscCheck(sorted && sorted_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Fields must be sorted when creating split"); - ilink = ilink->next; + } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */ + IS *rowis, *colis, *ises = NULL; + PetscInt mis, nis; + + PetscCall(MatNestGetSize(pc->pmat, &mis, &nis)); + PetscCall(PetscMalloc2(mis, &rowis, nis, &colis)); + PetscCall(MatNestGetISs(pc->pmat, rowis, colis)); + if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises)); + + for (i = 0; i < nsplit; i++) { + if (jac->defaultsplit) { + PetscCall(ISDuplicate(rowis[i], &ilink->is)); + PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); + } else if (!ilink->is) { + if (ilink->nfields > 1) { + for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]]; + PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is)); + } else { + PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is)); + } + PetscCall(ISDuplicate(ilink->is, &ilink->is_col)); + } + ilink = ilink->next; + } + PetscCall(PetscFree2(rowis, colis)); + PetscCall(PetscFree(ises)); } } @@ -1129,8 +1156,10 @@ static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); PetscCall(VecScale(ilinkD->y, jac->schurscale)); PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); @@ -1151,7 +1180,9 @@ static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); @@ -1163,7 +1194,9 @@ static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x)); @@ -1193,7 +1226,9 @@ static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y) PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); @@ -1243,7 +1278,9 @@ static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y) PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); PetscCall(VecScale(ilinkD->y, jac->schurscale)); @@ -1264,7 +1301,9 @@ static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y) PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE)); @@ -1275,7 +1314,9 @@ static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y) PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x)); @@ -1304,7 +1345,9 @@ static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y) PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD)); PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1)); PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1)); PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y)); PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL)); PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE)); @@ -1342,7 +1385,10 @@ static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y) PetscFunctionBegin; if (jac->type == PC_COMPOSITE_ADDITIVE) { - if (jac->defaultsplit) { + PetscBool matnest; + + PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest)); + if (jac->defaultsplit && !matnest) { PetscCall(VecGetBlockSize(x, &bs)); PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); PetscCall(VecGetBlockSize(y, &bs)); @@ -1568,7 +1614,10 @@ static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y) PetscFunctionBegin; if (jac->type == PC_COMPOSITE_ADDITIVE) { - if (jac->defaultsplit) { + PetscBool matnest; + + PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest)); + if (jac->defaultsplit && !matnest) { PetscCall(VecGetBlockSize(x, &bs)); PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs); PetscCall(VecGetBlockSize(y, &bs)); @@ -1724,7 +1773,7 @@ static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *Petsc if (flg) PetscCall(PCFieldSplitSetType(pc, ctype)); /* Only setup fields once */ if ((jac->bs > 0) && (jac->nsplits == 0)) { - /* only allow user to set fields from command line if bs is already known. + /* only allow user to set fields from command line. otherwise user can set them in PCFieldSplitSetDefaults() */ PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc)); if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n")); @@ -1736,10 +1785,10 @@ static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems *Petsc PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL)); PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL)); } else if (jac->type == PC_COMPOSITE_GKB) { - PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitGKBTol", jac->gkbtol, &jac->gkbtol, NULL)); - PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL)); - PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0)); - PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL)); + PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL)); + PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL)); + PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0)); + PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL)); PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL)); } /* @@ -1782,10 +1831,7 @@ static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitna PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname)); PetscFunctionReturn(PETSC_SUCCESS); } - for (i = 0; i < n; i++) { - PetscCheck(fields[i] < jac->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", fields[i], jac->bs); - PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); - } + for (i = 0; i < n; i++) { PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]); } PetscCall(PetscNew(&ilink)); if (splitname) { PetscCall(PetscStrallocpy(splitname, &ilink->splitname)); @@ -1875,7 +1921,7 @@ static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP * PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`. Input Parameters: @@ -2006,7 +2052,7 @@ static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[] PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT` Logically Collective @@ -2016,15 +2062,22 @@ static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[] . splitname - name of this split, if `NULL` the number of the split is used . n - the number of fields in this split . fields - the fields in this split -- fields_col - generally the same as fields, if it does not match fields then the matrix block that is solved for this set of fields comes from an off-diagonal block - of the matrix and fields_col provides the column indices for that block +- fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block + of the matrix and `fields_col` provides the column indices for that block + + Options Database Key: +. -pc_fieldsplit_%d_fields - indicates the fields to be used in the `%d`'th split Level: intermediate Notes: Use `PCFieldSplitSetIS()` to set a general set of indices as a split. - `PCFieldSplitSetFields()` is for defining fields as strided blocks. For example, if the block + If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`. + + If the matrix used to construct the preconditioner is not `MATNEST` then + `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlocksize()` or + to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean 0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x1x3x5x7.. x12x45x78x.... where the numbered entries indicate what is in the split. @@ -2032,16 +2085,17 @@ static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[] This function is called once per split (it creates a new split each time). Solve options for this split will be available under the prefix `-fieldsplit_SPLITNAME_`. - `PCFieldSplitSetIS()` does not support having a fields_col different from fields + `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields` Developer Notes: This routine does not actually create the `IS` representing the split, that is delayed until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be available when this routine is called. -.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()` +.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`, + `MatSetBlocksize()`, `MatCreateNest()` @*/ -PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col) +PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[]) { PetscFunctionBegin; PetscValidHeaderSpecific(pc, PC_CLASSID, 1); @@ -2172,7 +2226,7 @@ PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT` Logically Collective @@ -2185,12 +2239,12 @@ PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg) Level: intermediate Notes: - Use `PCFieldSplitSetFields()`, for splits defined by strided types. + Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST` This function is called once per split (it creates a new split each time). Solve options for this split will be available under the prefix -fieldsplit_SPLITNAME_. -.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()` +.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()` @*/ PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is) { @@ -2202,7 +2256,7 @@ PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCFieldSplitGetIS - Retrieves the elements for a split as an `IS` Logically Collective @@ -2216,7 +2270,7 @@ PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is) Level: intermediate -.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()` +.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()` @*/ PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is) { @@ -2242,7 +2296,7 @@ PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS` Logically Collective @@ -2256,7 +2310,8 @@ PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is) Level: intermediate -.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()` +.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`, + @*/ PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is) { @@ -2281,7 +2336,7 @@ PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is) /*@ PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the - fieldsplit preconditioner when calling `PCFieldSplitSetIS()`. If not set the matrix block size is used. + fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used. Logically Collective @@ -2291,6 +2346,9 @@ PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is) Level: intermediate + Note: + If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields. + .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()` @*/ PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs) @@ -2337,7 +2395,7 @@ PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs) Developer Notes: There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()` - The Fortran interface should be modernized to return directly the array of values. + The Fortran interface could be modernized to return directly the array of values. .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()` @*/ @@ -2424,11 +2482,10 @@ PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) + a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix . self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix: - The only preconditioner that currently works with this symbolic representation matrix object is the `PCLSC` - preconditioner + The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM` . user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument to this function). -. selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation Sp = A11 - A10 inv(diag(A00)) A01 +. selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $ This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump` - full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation @@ -2436,12 +2493,14 @@ PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[]) useful mostly as a test that the Schur complement approach can work for your problem When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense - with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a ptype of self and + with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement. -.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, - `MatSchurComplementSetAinvType()`, `PCLSC`, + Developer Note: + The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere. +.seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, + `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()` @*/ PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre) { @@ -2472,7 +2531,6 @@ PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType pty Level: intermediate .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC` - @*/ PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre) { @@ -2588,30 +2646,40 @@ static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchu Notes: The FULL factorization is -.vb - (A B) = (1 0) (A 0) (1 Ainv*B) = L D U - (C E) (C*Ainv 1) (0 S) (0 1) -.vb - where S = E - C*Ainv*B. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$, - and DIAG is the diagonal part with the sign of S flipped (because this makes the preconditioner positive definite for many formulations, - thus allowing the use of `KSPMINRES)`. Sign flipping of S can be turned off with `PCFieldSplitSetSchurScale()`. - - If A and S are solved exactly -.vb - *) FULL factorization is a direct solver. - *) The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations. - *) With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations. -.ve + + ```{math} + \left(\begin{array}{cc} A & B \\ + C & E \\ + \end{array}\right) = + \left(\begin{array}{cc} 1 & 0 \\ + C*A^{-1} & I \\ + \end{array}\right) + \left(\begin{array}{cc} A & 0 \\ + 0 & S \\ + \end{array}\right) + \left(\begin{array}{cc} I & A^{-1}B \\ + 0 & I \\ + \end{array}\right) = L D U. + ``` + + where $ S = E - C*A^{-1}*B $. In practice, the full factorization is applied via block triangular solves with the grouping $L*(D*U)$. UPPER uses $D*U$, LOWER uses $L*D$, + and DIAG is the diagonal part with the sign of $ S $ flipped (because this makes the preconditioner positive definite for many formulations, + thus allowing the use of `KSPMINRES)`. Sign flipping of $ S $ can be turned off with `PCFieldSplitSetSchurScale()`. + + If $A$ and $S$ are solved exactly ++ 1 - FULL factorization is a direct solver. +. 2 - The preconditioned operator with LOWER or UPPER has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations. +- 3 - With DIAG, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations. If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice. - For symmetric problems in which A is positive definite and S is negative definite, DIAG can be used with `KSPMINRES`. + For symmetric problems in which $A$ is positive definite and $S$ is negative definite, DIAG can be used with `KSPMINRES`. A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g. a few iterations of a Krylov method is used to solve with A or S). .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`, - [](sec_flexibleksp) + [](sec_flexibleksp), `PCFieldSplitSetSchurPre()` @*/ PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype) { @@ -2680,6 +2748,9 @@ static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar sc Level: advanced + Note: + Use `NULL` for any unneeded output arguments + .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()` @*/ PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11) @@ -2736,7 +2807,7 @@ static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal toleranc } /*@ - PCFieldSplitSetGKBMaxit - Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner in `PCFIELDSPLIT` + PCFieldSplitSetGKBMaxit - Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT` Collective @@ -2951,7 +3022,8 @@ static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt Input Parameters: + pc - the preconditioner context -- type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` +- type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, + `PC_COMPOSITE_GKB` Options Database Key: . -pc_fieldsplit_type - Sets fieldsplit preconditioner type @@ -2959,7 +3031,7 @@ static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt Level: intermediate .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`, - `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR` + `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()` @*/ PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type) { @@ -3010,7 +3082,10 @@ PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type) Level: intermediate -.seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` + Developer Note: + The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database + +.seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` @*/ PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg) { @@ -3038,7 +3113,10 @@ PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg) Level: intermediate -.seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` + Developer Note: + The name should be `PCFieldSplitGetUseDMSplits()` + +.seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldsplitSetIS()` @*/ PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg) { @@ -3114,102 +3192,103 @@ PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg) } /*MC - PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual - collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details. + PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual + collections of variables (that may overlap) called splits. See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details. - Options Database Keys: + Options Database Keys: + -pc_fieldsplit_%d_fields - indicates the fields to be used in the `%d`'th split . -pc_fieldsplit_default - automatically add any fields to additional splits that have not - been supplied explicitly by `-pc_fieldsplit_%d_fields` + been supplied explicitly by `-pc_fieldsplit_%d_fields` . -pc_fieldsplit_block_size - size of block that defines fields (i.e. there are bs fields) + when the matrix is not of `MatType` `MATNEST` . -pc_fieldsplit_type - type of relaxation or factorization splitting . -pc_fieldsplit_schur_precondition - default is `a11`; see `PCFieldSplitSetSchurPre()` . -pc_fieldsplit_schur_fact_type - set factorization type when using `-pc_fieldsplit_type schur`; - see `PCFieldSplitSetSchurFactType()` + see `PCFieldSplitSetSchurFactType()` +. -pc_fieldsplit_dm_splits (default is true) - Whether to use `DMCreateFieldDecomposition()` for splits - -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver - Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` . - The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_` - For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields. + Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` . + The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_` + For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields. - To set options on the solvers for each block append `-fieldsplit_` to all the `PC` - options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1` + To set options on the solvers for each block append `-fieldsplit_` to all the `PC` + options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1` - To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()` - and set the options directly on the resulting `KSP` object + To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()` + and set the options directly on the resulting `KSP` object - Level: intermediate - - Notes: - Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries and `PCFieldSplitSetIS()` - to define a split by an arbitrary collection of entries. - - If no splits are set the default is used. The splits are defined by entries strided by bs, - beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`, - if this is not called the block size defaults to the blocksize of the second matrix passed - to `KSPSetOperators()`/`PCSetOperators()`. - - For the Schur complement preconditioner if - - ```{math} - J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right] - ``` + Level: intermediate - the preconditioner using `full` factorization is logically - ```{math} - \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right] + Notes: + Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()` + to define a split by an arbitrary collection of entries. + + If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports + `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs, + beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`, + if this is not called the block size defaults to the blocksize of the second matrix passed + to `KSPSetOperators()`/`PCSetOperators()`. + + For the Schur complement preconditioner if + ```{math} + J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right] + ``` + + the preconditioner using `full` factorization is logically + ```{math} + \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right] ``` - where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`. $S$ is the Schur complement - ```{math} + where the action of $\text{inv}(A_{00})$ is applied using the KSP solver with prefix `-fieldsplit_0_`. $S$ is the Schur complement + ```{math} S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01} - ``` - which is usually dense and not stored explicitly. The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given - in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0, - it returns the KSP associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default - $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$. - - The factorization type is set using `-pc_fieldsplit_schur_fact_type `. `full` is shown above, - `diag` gives - ```{math} - \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & -\text{ksp}(S) \end{array}\right] - ``` - Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$ so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip - can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of - ```{math} - \left[\begin{array}{cc} A_{00} & 0 \\ A_{10} & S \end{array}\right] - ``` - where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of - ```{math} - \left[\begin{array}{cc} A_{00} & A_{01} \\ 0 & S \end{array}\right] - ``` - where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s. - - If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS` - is used automatically for a second block. - - The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1. - Generally it should be used with the `MATAIJ` format. - - The forms of these preconditioners are closely related if not identical to forms derived as "Distributive Iterations", see, - for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`. - One can also use `PCFIELDSPLIT` - inside a smoother resulting in "Distributive Smoothers". - - See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`. - - The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the - residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables. - - The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape - ```{math} - \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right] - ``` - with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$. - A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`. - - Developer Note: - The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their - user API. + ``` + which is usually dense and not stored explicitly. The action of $\text{ksp}(S)$ is computed using the KSP solver with prefix `-fieldsplit_splitname_` (where `splitname` was given + in providing the SECOND split or 1 if not given). For `PCFieldSplitGetSubKSP()` when field number is 0, + it returns the `KSP` associated with `-fieldsplit_0_` while field number 1 gives `-fieldsplit_1_` KSP. By default + $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$. + + The factorization type is set using `-pc_fieldsplit_schur_fact_type `. `full` is shown above, + `diag` gives + ```{math} + \left[\begin{array}{cc} \text{inv}(A_{00}) & 0 \\ 0 & -\text{ksp}(S) \end{array}\right] + ``` + Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$ so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip + can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of + ```{math} + \left[\begin{array}{cc} A_{00} & 0 \\ A_{10} & S \end{array}\right] + ``` + where the inverses of A_{00} and S are applied using KSPs. The upper factorization is the inverse of + ```{math} + \left[\begin{array}{cc} A_{00} & A_{01} \\ 0 & S \end{array}\right] + ``` + where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s. + + If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS` + is used automatically for a second submatrix. + + The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1. + Generally it should be used with the `MATAIJ` or `MATNEST` `MatType` + + The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see, + for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`. + One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers". + + See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`. + + The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the + residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables. + + The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape + ```{math} + \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right] + ``` + with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$. + A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`. + + Developer Note: + The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their + user API. .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`, `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`, diff --git a/src/ksp/pc/impls/fieldsplit/ftn-custom/zfieldsplitf.c b/src/ksp/pc/impls/fieldsplit/ftn-custom/zfieldsplitf.c index 704b89e9daf..4a4e47f7c80 100644 --- a/src/ksp/pc/impls/fieldsplit/ftn-custom/zfieldsplitf.c +++ b/src/ksp/pc/impls/fieldsplit/ftn-custom/zfieldsplitf.c @@ -4,15 +4,9 @@ #if defined(PETSC_HAVE_FORTRAN_CAPS) #define pcfieldsplitgetsubksp_ PCFIELDSPLITGETSUBKSP #define pcfieldsplitschurgetsubksp_ PCFIELDSPLITSCHURGETSUBKSP - #define pcfieldsplitsetis_ PCFIELDSPLITSETIS - #define pcfieldsplitgetis_ PCFIELDSPLITGETIS - #define pcfieldsplitsetfields_ PCFIELDSPLITSETFIELDS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define pcfieldsplitgetsubksp_ pcfieldsplitgetsubksp #define pcfieldsplitschurgetsubksp_ pcfieldsplitschurgetsubksp - #define pcfieldsplitsetis_ pcfieldsplitsetis - #define pcfieldsplitgetis_ pcfieldsplitgetis - #define pcfieldsplitsetfields_ pcfieldsplitsetfields #endif PETSC_EXTERN void pcfieldsplitschurgetsubksp_(PC *pc, PetscInt *n_local, KSP *ksp, PetscErrorCode *ierr) @@ -44,30 +38,3 @@ PETSC_EXTERN void pcfieldsplitgetsubksp_(PC *pc, PetscInt *n_local, KSP *ksp, Pe } *ierr = PetscFree(tksp); } - -PETSC_EXTERN void pcfieldsplitsetis_(PC *pc, char *splitname, IS *is, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(splitname, len, t); - *ierr = PCFieldSplitSetIS(*pc, t, *is); - if (*ierr) return; - FREECHAR(splitname, t); -} - -PETSC_EXTERN void pcfieldsplitgetis_(PC *pc, char *splitname, IS *is, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(splitname, len, t); - *ierr = PCFieldSplitGetIS(*pc, t, is); - if (*ierr) return; - FREECHAR(splitname, t); -} - -PETSC_EXTERN void pcfieldsplitsetfields_(PC *pc, char *splitname, PetscInt *n, const PetscInt *fields, const PetscInt *fields_col, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(splitname, len, t); - *ierr = PCFieldSplitSetFields(*pc, t, *n, fields, fields_col); - if (*ierr) return; - FREECHAR(splitname, t); -} diff --git a/src/ksp/pc/impls/gamg/agg.c b/src/ksp/pc/impls/gamg/agg.c index b943ac40b46..b70af38a4cf 100644 --- a/src/ksp/pc/impls/gamg/agg.c +++ b/src/ksp/pc/impls/gamg/agg.c @@ -8,7 +8,7 @@ #include typedef struct { - PetscInt nsmooths; + PetscInt nsmooths; // number of smoothing steps to construct prolongation PetscInt aggressive_coarsening_levels; // number of aggressive coarsening levels (square or MISk) PetscInt aggressive_mis_k; // the k in MIS-k PetscBool use_aggressive_square_graph; @@ -18,7 +18,7 @@ typedef struct { } PC_GAMG_AGG; /*@ - PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical) used for multigrid on all the levels + PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical) used to construct the prolongation operator Logically Collective @@ -27,11 +27,18 @@ typedef struct { - n - the number of smooths Options Database Key: -. -pc_gamg_agg_nsmooths - number of smoothing steps to use with smooth aggregation +. -pc_gamg_agg_nsmooths - number of smoothing steps to use Level: intermediate -.seealso: [](ch_ksp), `PCMG`, `PCGAMG` + Note: + This is a different concept from the number smoothing steps used during the linear solution process which + can be set with `-mg_levels_ksp_max_it` + + Developer Note: + This should be named `PCGAMGAGGSetNSmooths()`. + +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCMG`, `PCGAMG` @*/ PetscErrorCode PCGAMGSetNSmooths(PC pc, PetscInt n) { @@ -63,11 +70,11 @@ static PetscErrorCode PCGAMGSetNSmooths_AGG(PC pc, PetscInt n) - n - 0, 1 or more Options Database Key: -. -pc_gamg_aggressive_coarsening - Number of levels to square the graph on before aggregating it +. -pc_gamg_aggressive_coarsening - Number of levels on which to square the graph on before aggregating it Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()`, `PCGAMGSetLowMemoryFilter()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()`, `PCGAMGSetLowMemoryFilter()` @*/ PetscErrorCode PCGAMGSetAggressiveLevels(PC pc, PetscInt n) { @@ -92,7 +99,7 @@ PetscErrorCode PCGAMGSetAggressiveLevels(PC pc, PetscInt n) Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()`, `PCGAMGSetLowMemoryFilter()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()`, `PCGAMGSetLowMemoryFilter()` @*/ PetscErrorCode PCGAMGMISkSetAggressive(PC pc, PetscInt n) { @@ -117,7 +124,7 @@ PetscErrorCode PCGAMGMISkSetAggressive(PC pc, PetscInt n) Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGMISkSetAggressive()`, `PCGAMGMISkSetMinDegreeOrdering()`, `PCGAMGSetLowMemoryFilter()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGMISkSetAggressive()`, `PCGAMGMISkSetMinDegreeOrdering()`, `PCGAMGSetLowMemoryFilter()` @*/ PetscErrorCode PCGAMGSetAggressiveSquareGraph(PC pc, PetscBool b) { @@ -142,7 +149,7 @@ PetscErrorCode PCGAMGSetAggressiveSquareGraph(PC pc, PetscBool b) Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGSetLowMemoryFilter()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGSetLowMemoryFilter()` @*/ PetscErrorCode PCGAMGMISkSetMinDegreeOrdering(PC pc, PetscBool b) { @@ -167,7 +174,8 @@ PetscErrorCode PCGAMGMISkSetMinDegreeOrdering(PC pc, PetscBool b) Level: intermediate -.seealso: `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, + `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()` @*/ PetscErrorCode PCGAMGSetLowMemoryFilter(PC pc, PetscBool b) { @@ -243,7 +251,7 @@ static PetscErrorCode PCSetFromOptions_GAMG_AGG(PC pc, PetscOptionItems *PetscOp PetscFunctionBegin; PetscOptionsHeadBegin(PetscOptionsObject, "GAMG-AGG options"); - PetscCall(PetscOptionsInt("-pc_gamg_agg_nsmooths", "smoothing steps for smoothed aggregation, usually 1", "PCGAMGSetNSmooths", pc_gamg_agg->nsmooths, &pc_gamg_agg->nsmooths, NULL)); + PetscCall(PetscOptionsInt("-pc_gamg_agg_nsmooths", "number of smoothing steps to construct prolongation, usually 1", "PCGAMGSetNSmooths", pc_gamg_agg->nsmooths, &pc_gamg_agg->nsmooths, NULL)); // aggressive coarsening logic with deprecated -pc_gamg_square_graph PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening", "Number of aggressive coarsening (MIS-2) levels from finest", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, &n_aggressive_flg)); if (!n_aggressive_flg) @@ -264,10 +272,12 @@ static PetscErrorCode PCSetFromOptions_GAMG_AGG(PC pc, PetscOptionItems *PetscOp static PetscErrorCode PCDestroy_GAMG_AGG(PC pc) { - PC_MG *mg = (PC_MG *)pc->data; - PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; + PC_MG *mg = (PC_MG *)pc->data; + PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; + PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx; PetscFunctionBegin; + PetscCall(MatCoarsenDestroy(&pc_gamg_agg->crs)); PetscCall(PetscFree(pc_gamg->subctx)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetNSmooths_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveLevels_C", NULL)); @@ -323,6 +333,7 @@ static PetscErrorCode PCSetCoordinates_AGG(PC pc, PetscInt ndm, PetscInt a_nloc, for (kk = 0; kk < nloc; kk++) { const PetscInt M = nloc * pc_gamg->data_cell_rows; /* stride into data */ PetscReal *data = &pc_gamg->data[kk * ndatarows]; /* start of cell */ + if (pc_gamg->data_cell_cols == 1) *data = 1.0; else { /* translational modes */ @@ -377,6 +388,7 @@ static PetscErrorCode PCSetData_AGG(PC pc, Mat a_A) PetscCall(MatGetNearNullSpace(a_A, &mnull)); if (!mnull) { DM dm; + PetscCall(PCGetDM(pc, &dm)); if (!dm) PetscCall(MatGetDM(a_A, &dm)); if (dm) { @@ -394,6 +406,7 @@ static PetscErrorCode PCSetData_AGG(PC pc, Mat a_A) if (!mnull) { PetscInt bs, NN, MM; + PetscCall(MatGetBlockSize(a_A, &bs)); PetscCall(MatGetLocalSize(a_A, &MM, &NN)); PetscCheck(MM % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MM %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT, MM, bs); @@ -467,6 +480,7 @@ static PetscErrorCode formProl0(PetscCoarsenData *agg_llists, PetscInt bs, Petsc /* count selected -- same as number of cols of P */ for (nSelected = mm = 0; mm < nloc; mm++) { PetscBool ise; + PetscCall(PetscCDIsEmptyAt(agg_llists, mm, &ise)); if (!ise) nSelected++; } @@ -504,6 +518,7 @@ static PetscErrorCode formProl0(PetscCoarsenData *agg_llists, PetscInt bs, Petsc PetscCall(PetscCDGetHeadPos(agg_llists, lid, &pos)); while (pos) { PetscInt gid1; + PetscCall(PetscCDIntNdGetID(pos, &gid1)); PetscCall(PetscCDGetNextPos(agg_llists, lid, &pos)); @@ -516,7 +531,8 @@ static PetscErrorCode formProl0(PetscCoarsenData *agg_llists, PetscInt bs, Petsc data = &data_in[flid * bs]; for (ii = 0; ii < bs; ii++) { for (jj = 0; jj < N; jj++) { - PetscReal d = data[jj * data_stride + ii]; + PetscReal d = data[jj * data_stride + ii]; + qqc[jj * Mdata + aggID * bs + ii] = d; } } @@ -538,6 +554,7 @@ static PetscErrorCode formProl0(PetscCoarsenData *agg_llists, PetscInt bs, Petsc /* get R - column-oriented - output B_{i+1} */ { PetscReal *data = &out_data[clid * nSAvec]; + for (jj = 0; jj < nSAvec; jj++) { for (ii = 0; ii < nSAvec; ii++) { PetscCheck(data[jj * out_data_stride + ii] == PETSC_MAX_REAL, PETSC_COMM_SELF, PETSC_ERR_PLIB, "data[jj*out_data_stride + ii] != %e", (double)PETSC_MAX_REAL); @@ -581,7 +598,17 @@ static PetscErrorCode PCView_GAMG_AGG(PC pc, PetscViewer viewer) PetscCall(PetscViewerASCIIPrintf(viewer, " %s aggressive coarsening\n", !pc_gamg_agg->use_aggressive_square_graph ? "MIS-k" : "Square graph")); if (!pc_gamg_agg->use_aggressive_square_graph) PetscCall(PetscViewerASCIIPrintf(viewer, " MIS-%d coarsening on aggressive levels\n", (int)pc_gamg_agg->aggressive_mis_k)); } - PetscCall(PetscViewerASCIIPrintf(viewer, " Number smoothing steps %d\n", (int)pc_gamg_agg->nsmooths)); + PetscCall(PetscViewerASCIIPushTab(viewer)); + PetscCall(PetscViewerASCIIPushTab(viewer)); + PetscCall(PetscViewerASCIIPushTab(viewer)); + PetscCall(PetscViewerASCIIPushTab(viewer)); + if (pc_gamg_agg->crs) PetscCall(MatCoarsenView(pc_gamg_agg->crs, viewer)); + else PetscCall(PetscViewerASCIIPrintf(viewer, "Coarsening algorithm not yet selected\n")); + PetscCall(PetscViewerASCIIPopTab(viewer)); + PetscCall(PetscViewerASCIIPopTab(viewer)); + PetscCall(PetscViewerASCIIPopTab(viewer)); + PetscCall(PetscViewerASCIIPopTab(viewer)); + PetscCall(PetscViewerASCIIPrintf(viewer, " Number smoothing steps to construct prolongation %d\n", (int)pc_gamg_agg->nsmooths)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -600,9 +627,11 @@ static PetscErrorCode PCGAMGCreateGraph_AGG(PC pc, Mat Amat, Mat *a_Gmat) PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0)); /* Note: depending on the algorithm that will be used for computing the coarse grid points this should pass PETSC_TRUE or PETSC_FALSE as the first argument */ /* MATCOARSENHEM requires numerical weights for edges so ensure they are computed */ + PetscCall(MatCoarsenDestroy(&pc_gamg_agg->crs)); PetscCall(MatCoarsenCreate(PetscObjectComm((PetscObject)pc), &pc_gamg_agg->crs)); PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix)); PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc_gamg_agg->crs, prefix)); + PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc_gamg_agg->crs, "pc_gamg_")); PetscCall(MatCoarsenSetFromOptions(pc_gamg_agg->crs)); PetscCall(MatGetBlockSize(Amat, &bs)); // check for valid indices wrt bs @@ -630,7 +659,7 @@ static PetscErrorCode PCGAMGCreateGraph_AGG(PC pc, Mat Amat, Mat *a_Gmat) if (ishem || pc_gamg_agg->use_low_mem_filter) { PetscCall(MatCreateGraph(Amat, PETSC_TRUE, (vfilter >= 0 || ishem) ? PETSC_TRUE : PETSC_FALSE, vfilter, pc_gamg_agg->crs->strength_index_size, pc_gamg_agg->crs->strength_index, a_Gmat)); } else { - // make scalar graph, symetrize if not know to be symetric, scale, but do not filter (expensive) + // make scalar graph, symetrize if not know to be symmetric, scale, but do not filter (expensive) PetscCall(MatCreateGraph(Amat, PETSC_TRUE, PETSC_TRUE, -1, pc_gamg_agg->crs->strength_index_size, pc_gamg_agg->crs->strength_index, a_Gmat)); if (vfilter >= 0) { PetscInt Istart, Iend, ncols, nnz0, nnz1, NN, MM, nloc; @@ -664,9 +693,10 @@ static PetscErrorCode PCGAMGCreateGraph_AGG(PC pc, Mat Amat, Mat *a_Gmat) b = NULL; } else { Mat_MPIAIJ *d = (Mat_MPIAIJ *)Gmat->data; - a = d->A; - b = d->B; - garray = d->garray; + + a = d->A; + b = d->B; + garray = d->garray; } /* Determine upper bound on non-zeros needed in new filtered matrix */ for (PetscInt row = 0; row < nloc; row++) { @@ -698,6 +728,7 @@ static PetscErrorCode PCGAMGCreateGraph_AGG(PC pc, Mat Amat, Mat *a_Gmat) PetscScalar sv = PetscAbs(PetscRealPart(vals[jj])); if (PetscRealPart(sv) > vfilter) { PetscInt cid = idx[jj] + Istart; //diag + nnz1++; if (c != a) cid = garray[idx[jj]]; AA[ncol_row] = vals[jj]; @@ -777,11 +808,13 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet PetscCall(MatCheckCompressedRow(mpimat_1->B, matB_1->nonzerorowcnt, &matB_1->compressedrow, matB_1->i, Gmat_1->rmap->n, -1.0)); for (ix = 0; ix < matB_1->compressedrow.nrows; ix++) { PetscInt lid = matB_1->compressedrow.rindex[ix]; + PetscCheck(lid <= nloc && lid >= -1, PETSC_COMM_SELF, PETSC_ERR_USER, "lid %d out of range. nloc = %d", (int)lid, (int)nloc); if (lid != -1) lid_cprowID_1[lid] = ix; } } else { PetscBool isAIJ; + PetscCall(PetscStrbeginswith(((PetscObject)Gmat_1)->type_name, MATSEQAIJ, &isAIJ)); PetscCheck(isAIJ, PETSC_COMM_SELF, PETSC_ERR_USER, "Require AIJ matrix."); matA_1 = (Mat_SeqAIJ *)Gmat_1->data; @@ -796,6 +829,7 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet /* set lid_state */ for (lid = 0; lid < nloc; lid++) { PetscCDIntNd *pos; + PetscCall(PetscCDGetHeadPos(aggs_2, lid, &pos)); if (pos) { PetscInt gid1; @@ -811,9 +845,11 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet NState state = lid_state[lid]; if (IS_SELECTED(state)) { PetscCDIntNd *pos; + PetscCall(PetscCDGetHeadPos(aggs_2, lid, &pos)); while (pos) { PetscInt gid1; + PetscCall(PetscCDIntNdGetID(pos, &gid1)); PetscCall(PetscCDGetNextPos(aggs_2, lid, &pos)); if (gid1 >= my0 && gid1 < Iend) lid_parent_gid[gid1 - my0] = (PetscScalar)(lid + my0); @@ -827,6 +863,7 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet PetscCall(MatCreateVecs(Gmat_1, &tempVec, NULL)); for (kk = 0, j = my0; kk < nloc; kk++, j++) { PetscScalar v = (PetscScalar)lid_state[kk]; + PetscCall(VecSetValues(tempVec, 1, &j, &v, INSERT_VALUES)); } PetscCall(VecAssemblyBegin(tempVec)); @@ -841,6 +878,7 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet /* get 'cpcol_2_par_orig' */ for (kk = 0, j = my0; kk < nloc; kk++, j++) { PetscScalar v = (PetscScalar)lid_parent_gid[kk]; + PetscCall(VecSetValues(tempVec, 1, &j, &v, INSERT_VALUES)); } PetscCall(VecAssemblyBegin(tempVec)); @@ -854,6 +892,7 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet } /* ismpi */ for (lid = 0; lid < nloc; lid++) { NState state = lid_state[lid]; + if (IS_SELECTED(state)) { /* steal locals */ ii = matA_1->i; @@ -862,11 +901,13 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet for (j = 0; j < n; j++) { PetscInt lidj = idx[j], sgid; NState statej = lid_state[lidj]; + if (statej == DELETED && (sgid = (PetscInt)PetscRealPart(lid_parent_gid[lidj])) != lid + my0) { /* steal local */ lid_parent_gid[lidj] = (PetscScalar)(lid + my0); /* send this if sgid is not local */ if (sgid >= my0 && sgid < Iend) { /* I'm stealing this local from a local sgid */ PetscInt hav = 0, slid = sgid - my0, gidj = lidj + my0; PetscCDIntNd *pos, *last = NULL; + /* looking for local from local so id_llist_2 works */ PetscCall(PetscCDGetHeadPos(aggs_2, slid, &pos)); while (pos) { @@ -894,6 +935,7 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet } /* local neighbors */ } else if (state == DELETED /* && lid_cprowID_1 */) { PetscInt sgidold = (PetscInt)PetscRealPart(lid_parent_gid[lid]); + /* see if I have a selected ghost neighbor that will steal me */ if ((ix = lid_cprowID_1[lid]) != -1) { ii = matB_1->compressedrow.i; @@ -902,15 +944,18 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet for (j = 0; j < n; j++) { PetscInt cpid = idx[j]; NState statej = (NState)PetscRealPart(cpcol_1_state[cpid]); + if (IS_SELECTED(statej) && sgidold != (PetscInt)statej) { /* ghost will steal this, remove from my list */ lid_parent_gid[lid] = (PetscScalar)statej; /* send who selected */ if (sgidold >= my0 && sgidold < Iend) { /* this was mine */ PetscInt hav = 0, oldslidj = sgidold - my0; PetscCDIntNd *pos, *last = NULL; + /* remove from 'oldslidj' list */ PetscCall(PetscCDGetHeadPos(aggs_2, oldslidj, &pos)); while (pos) { PetscInt gid; + PetscCall(PetscCDIntNdGetID(pos, &gid)); if (lid + my0 == gid) { /* id_llist_2[lastid] = id_llist_2[flid]; /\* remove lid from oldslidj list *\/ */ @@ -956,6 +1001,7 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet /* get 'cpcol_2_gid' */ for (kk = 0, j = my0; kk < nloc; kk++, j++) { PetscScalar v = (PetscScalar)j; + PetscCall(VecSetValues(tempVec, 1, &j, &v, INSERT_VALUES)); } PetscCall(VecAssemblyBegin(tempVec)); @@ -970,11 +1016,14 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet PetscCall(PCGAMGHashTableCreate(2 * nghost_2 + 1, &gid_cpid)); for (cpid = 0; cpid < nghost_2; cpid++) { NState state = (NState)PetscRealPart(cpcol_2_state[cpid]); + if (state == DELETED) { PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]); PetscInt sgid_old = (PetscInt)PetscRealPart(cpcol_2_par_orig[cpid]); + if (sgid_old == -1 && sgid_new != -1) { PetscInt gid = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]); + PetscCall(PCGAMGHashTableAdd(&gid_cpid, gid, cpid)); } } @@ -1009,6 +1058,7 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet /* look at ghosts, see if they changed - and it */ for (cpid = 0; cpid < nghost_2; cpid++) { PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]); + if (sgid_new >= my0 && sgid_new < Iend) { /* this is mine */ PetscInt gid = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]); PetscInt slid_new = sgid_new - my0, hav = 0; @@ -1018,6 +1068,7 @@ static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, Pet PetscCall(PetscCDGetHeadPos(aggs_2, slid_new, &pos)); while (pos) { PetscInt gidj; + PetscCall(PetscCDIntNdGetID(pos, &gidj)); PetscCall(PetscCDGetNextPos(aggs_2, slid_new, &pos)); @@ -1088,6 +1139,7 @@ static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData PetscCall(MatGetOwnershipRange(Gmat1, &Istart, &Iend)); for (Ii = 0; Ii < nloc; Ii++) { PetscInt nc; + PetscCall(MatGetRow(Gmat1, Istart + Ii, &nc, NULL, NULL)); degree[Ii] = nc; PetscCall(MatRestoreRow(Gmat1, Istart + Ii, &nc, NULL, NULL)); @@ -1096,7 +1148,8 @@ static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData PetscCall(PetscRandomGetValueReal(random, &hashfact)); iSwapIndex = (PetscInt)(hashfact * nloc) % nloc; if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) { - PetscInt iTemp = permute[iSwapIndex]; + PetscInt iTemp = permute[iSwapIndex]; + permute[iSwapIndex] = permute[Ii]; permute[Ii] = iTemp; iTemp = degree[iSwapIndex]; @@ -1121,6 +1174,7 @@ static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData else PetscCall(MatCoarsenSetType(pc_gamg_agg->crs, MATCOARSENMIS)); // old MIS -- side effect } else if (pc_gamg_agg->use_aggressive_square_graph && pc_gamg_agg->aggressive_coarsening_levels > 0) { // we reset the MIS const char *prefix; + PetscCall(PetscObjectGetOptionsPrefix((PetscObject)a_pc, &prefix)); PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc_gamg_agg->crs, prefix)); PetscCall(MatCoarsenSetFromOptions(pc_gamg_agg->crs)); // get the default back on non-aggressive levels when square graph switched to old MIS @@ -1129,9 +1183,7 @@ static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData PetscCall(MatCoarsenSetStrictAggs(pc_gamg_agg->crs, PETSC_TRUE)); PetscCall(MatCoarsenSetGreedyOrdering(pc_gamg_agg->crs, perm)); PetscCall(MatCoarsenApply(pc_gamg_agg->crs)); - PetscCall(MatCoarsenViewFromOptions(pc_gamg_agg->crs, NULL, "-mat_coarsen_view")); PetscCall(MatCoarsenGetData(pc_gamg_agg->crs, agg_lists)); /* output */ - PetscCall(MatCoarsenDestroy(&pc_gamg_agg->crs)); PetscCall(ISDestroy(&perm)); PetscCall(PetscFree2(permute, degree)); @@ -1139,6 +1191,7 @@ static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData if (Gmat2 != Gmat1) { // square graph, we need ghosts for selected PetscCoarsenData *llist = *agg_lists; + PetscCall(fixAggregatesWithSquare(a_pc, Gmat2, Gmat1, *agg_lists)); PetscCall(MatDestroy(&Gmat1)); *a_Gmat1 = Gmat2; /* output */ @@ -1149,7 +1202,7 @@ static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData } /* - PCGAMGProlongator_AGG + PCGAMGConstructProlongator_AGG Input Parameter: . pc - this @@ -1159,7 +1212,7 @@ static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData Output Parameter: . a_P_out - prolongation operator to the next level */ -static PetscErrorCode PCGAMGProlongator_AGG(PC pc, Mat Amat, PetscCoarsenData *agg_lists, Mat *a_P_out) +static PetscErrorCode PCGAMGConstructProlongator_AGG(PC pc, Mat Amat, PetscCoarsenData *agg_lists, Mat *a_P_out) { PC_MG *mg = (PC_MG *)pc->data; PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; @@ -1182,11 +1235,12 @@ static PetscErrorCode PCGAMGProlongator_AGG(PC pc, Mat Amat, PetscCoarsenData *a nloc = (Iend - Istart) / bs; my0 = Istart / bs; PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT ") not divisible by bs %" PetscInt_FMT, Iend, Istart, bs); - PetscCall(PetscCDGetMat(agg_lists, &Gmat)); // get auxilary matrix for ghost edges for size > 1 + PetscCall(PetscCDGetMat(agg_lists, &Gmat)); // get auxiliary matrix for ghost edges for size > 1 /* get 'nLocalSelected' */ for (ii = 0, nLocalSelected = 0; ii < nloc; ii++) { PetscBool ise; + /* filter out singletons 0 or 1? */ PetscCall(PetscCDIsEmptyAt(agg_lists, ii, &ise)); if (!ise) nLocalSelected++; @@ -1227,11 +1281,13 @@ static PetscErrorCode PCGAMGProlongator_AGG(PC pc, Mat Amat, PetscCoarsenData *a PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLA], 0, 0, 0, 0)); if (size > 1) { /* get ghost null space data */ PetscReal *tmp_gdata, *tmp_ldata, *tp2; + PetscCall(PetscMalloc1(nloc, &tmp_ldata)); for (jj = 0; jj < col_bs; jj++) { for (kk = 0; kk < bs; kk++) { PetscInt ii, stride; const PetscReal *tp = PetscSafePointerPlusOffset(pc_gamg->data, jj * bs * nloc + kk); + for (ii = 0; ii < nloc; ii++, tp += bs) tmp_ldata[ii] = *tp; PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, tmp_ldata, &stride, &tmp_gdata)); @@ -1274,6 +1330,7 @@ static PetscErrorCode PCGAMGProlongator_AGG(PC pc, Mat Amat, PetscCoarsenData *a PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLB], 0, 0, 0, 0)); { PetscReal *data_out = NULL; + PetscCall(formProl0(agg_lists, bs, col_bs, myCrs0, nbnodes, data_w_ghost, flid_fgid, &data_out, Prol)); PetscCall(PetscFree(pc_gamg->data)); @@ -1286,14 +1343,14 @@ static PetscErrorCode PCGAMGProlongator_AGG(PC pc, Mat Amat, PetscCoarsenData *a PetscCall(PetscFree(flid_fgid)); *a_P_out = Prol; /* out */ - PetscCall(MatViewFromOptions(Prol, NULL, "-view_P")); + PetscCall(MatViewFromOptions(Prol, NULL, "-pc_gamg_agg_view_initial_prolongation")); PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0)); PetscFunctionReturn(PETSC_SUCCESS); } /* - PCGAMGOptProlongator_AGG + PCGAMGOptimizeProlongator_AGG - given the initial prolongator optimizes it by smoothed aggregation pc_gamg_agg->nsmooths times Input Parameter: . pc - this @@ -1301,7 +1358,7 @@ static PetscErrorCode PCGAMGProlongator_AGG(PC pc, Mat Amat, PetscCoarsenData *a In/Output Parameter: . a_P - prolongation operator to the next level */ -static PetscErrorCode PCGAMGOptProlongator_AGG(PC pc, Mat Amat, Mat *a_P) +static PetscErrorCode PCGAMGOptimizeProlongator_AGG(PC pc, Mat Amat, Mat *a_P) { PC_MG *mg = (PC_MG *)pc->data; PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; @@ -1338,6 +1395,7 @@ static PetscErrorCode PCGAMGOptProlongator_AGG(PC pc, Mat Amat, Mat *a_P) PetscCall(KSPAppendOptionsPrefix(eksp, "pc_gamg_esteig_")); { PetscBool isset, sflg; + PetscCall(MatIsSPDKnown(Amat, &isset, &sflg)); if (isset && sflg) PetscCall(KSPSetType(eksp, KSPCG)); } @@ -1377,44 +1435,72 @@ static PetscErrorCode PCGAMGOptProlongator_AGG(PC pc, Mat Amat, Mat *a_P) } /* smooth P0 */ - for (jj = 0; jj < pc_gamg_agg->nsmooths; jj++) { - Mat tMat; + if (pc_gamg_agg->nsmooths > 0) { Vec diag; - PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0)); + /* TODO: Set a PCFailedReason and exit the building of the AMG preconditioner */ + PetscCheck(emax != 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Computed maximum singular value as zero"); - /* smooth P1 := (I - omega/lam D^{-1}A)P0 */ - PetscCall(PetscLogEventBegin(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0)); - PetscCall(MatMatMult(Amat, Prol, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tMat)); - PetscCall(PetscLogEventEnd(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0)); - PetscCall(MatProductClear(tMat)); PetscCall(MatCreateVecs(Amat, &diag, NULL)); PetscCall(MatGetDiagonal(Amat, diag)); /* effectively PCJACOBI */ PetscCall(VecReciprocal(diag)); - PetscCall(MatDiagonalScale(tMat, diag, NULL)); - PetscCall(VecDestroy(&diag)); - /* TODO: Set a PCFailedReason and exit the building of the AMG preconditioner */ - PetscCheck(emax != 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Computed maximum singular value as zero"); - /* TODO: Document the 1.4 and don't hardwire it in this routine */ - alpha = -1.4 / emax; - - PetscCall(MatAYPX(tMat, alpha, Prol, SUBSET_NONZERO_PATTERN)); - PetscCall(MatDestroy(&Prol)); - Prol = tMat; - PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0)); + for (jj = 0; jj < pc_gamg_agg->nsmooths; jj++) { + Mat tMat; + + PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0)); + /* + Smooth aggregation on the prolongator + + P_{i} := (I - 1.4/emax D^{-1}A) P_i\{i-1} + */ + PetscCall(PetscLogEventBegin(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0)); + PetscCall(MatMatMult(Amat, Prol, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tMat)); + PetscCall(PetscLogEventEnd(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0)); + PetscCall(MatProductClear(tMat)); + PetscCall(MatDiagonalScale(tMat, diag, NULL)); + + /* TODO: Document the 1.4 and don't hardwire it in this routine */ + alpha = -1.4 / emax; + PetscCall(MatAYPX(tMat, alpha, Prol, SUBSET_NONZERO_PATTERN)); + PetscCall(MatDestroy(&Prol)); + Prol = tMat; + PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0)); + } + PetscCall(VecDestroy(&diag)); } PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPT], 0, 0, 0, 0)); + PetscCall(MatViewFromOptions(Prol, NULL, "-pc_gamg_agg_view_prolongation")); *a_P = Prol; PetscFunctionReturn(PETSC_SUCCESS); } -/* - PCCreateGAMG_AGG +/*MC + PCGAMGAGG - Smooth aggregation, {cite}`vanek1996algebraic`, {cite}`vanek2001convergence`, variant of PETSc's algebraic multigrid (`PCGAMG`) preconditioner - Input Parameter: - . pc - -*/ + Options Database Keys: ++ -pc_gamg_agg_nsmooths - number of smoothing steps to use with smooth aggregation to construct prolongation +. -pc_gamg_aggressive_coarsening - number of aggressive coarsening (MIS-2) levels from finest. +. -pc_gamg_aggressive_square_graph - Use square graph (A'A) or MIS-k (k=2) for aggressive coarsening +. -pc_gamg_mis_k_minimum_degree_ordering - Use minimum degree ordering in greedy MIS algorithm +. -pc_gamg_pc_gamg_asm_hem_aggs - Number of HEM aggregation steps for ASM smoother +- -pc_gamg_aggressive_mis_k - Number (k) distance in MIS coarsening (>2 is 'aggressive') + + Level: intermediate + + Notes: + To obtain good performance for `PCGAMG` for vector valued problems you must + call `MatSetBlockSize()` to indicate the number of degrees of freedom per grid point. + Call `MatSetNearNullSpace()` (or `PCSetCoordinates()` if solving the equations of elasticity) to indicate the near null space of the operator + + The many options for `PCMG` and `PCGAMG` such as controlling the smoothers on each level etc. also work for `PCGAMGAGG` + +.seealso: `PCGAMG`, [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCCreate()`, `PCSetType()`, + `MatSetBlockSize()`, `PCMGType`, `PCSetCoordinates()`, `MatSetNearNullSpace()`, `PCGAMGSetType()`, + `PCGAMGAGG`, `PCGAMGGEO`, `PCGAMGCLASSICAL`, `PCGAMGSetProcEqLim()`, `PCGAMGSetCoarseEqLim()`, `PCGAMGSetRepartition()`, `PCGAMGRegister()`, + `PCGAMGSetReuseInterpolation()`, `PCGAMGASMSetUseAggs()`, `PCGAMGSetParallelCoarseGridSolve()`, `PCGAMGSetNlevels()`, `PCGAMGSetThreshold()`, + `PCGAMGGetType()`, `PCGAMGSetUseSAEstEig()` +M*/ PetscErrorCode PCCreateGAMG_AGG(PC pc) { PC_MG *mg = (PC_MG *)pc->data; @@ -1433,8 +1519,8 @@ PetscErrorCode PCCreateGAMG_AGG(PC pc) /* set internal function pointers */ pc_gamg->ops->creategraph = PCGAMGCreateGraph_AGG; pc_gamg->ops->coarsen = PCGAMGCoarsen_AGG; - pc_gamg->ops->prolongator = PCGAMGProlongator_AGG; - pc_gamg->ops->optprolongator = PCGAMGOptProlongator_AGG; + pc_gamg->ops->prolongator = PCGAMGConstructProlongator_AGG; + pc_gamg->ops->optprolongator = PCGAMGOptimizeProlongator_AGG; pc_gamg->ops->createdefaultdata = PCSetData_AGG; pc_gamg->ops->view = PCView_GAMG_AGG; diff --git a/src/ksp/pc/impls/gamg/classical.c b/src/ksp/pc/impls/gamg/classical.c index 8de71fda1d8..9e694b802e4 100644 --- a/src/ksp/pc/impls/gamg/classical.c +++ b/src/ksp/pc/impls/gamg/classical.c @@ -10,7 +10,7 @@ typedef struct { PetscInt nsmooths; /* number of jacobi smoothings on the prolongator */ } PC_GAMG_Classical; -/*@C +/*@ PCGAMGClassicalSetType - Sets the type of classical interpolation to use with `PCGAMG` Collective @@ -34,7 +34,7 @@ PetscErrorCode PCGAMGClassicalSetType(PC pc, PCGAMGClassicalType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCGAMGClassicalGetType - Gets the type of classical interpolation to use with `PCGAMG` Collective @@ -162,13 +162,17 @@ static PetscErrorCode PCGAMGCreateGraph_Classical(PC pc, Mat A, Mat *G) static PetscErrorCode PCGAMGCoarsen_Classical(PC pc, Mat *G, PetscCoarsenData **agg_lists) { - MatCoarsen crs; - MPI_Comm fcomm = ((PetscObject)pc)->comm; + MatCoarsen crs; + MPI_Comm fcomm = ((PetscObject)pc)->comm; + const char *prefix; PetscFunctionBegin; PetscCheck(G, fcomm, PETSC_ERR_ARG_WRONGSTATE, "Must set Graph in PC in PCGAMG before coarsening"); PetscCall(MatCoarsenCreate(fcomm, &crs)); + PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix)); + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)crs, prefix)); + PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)crs, "pc_gamg_")); PetscCall(MatCoarsenSetFromOptions(crs)); PetscCall(MatCoarsenSetAdjacency(crs, *G)); PetscCall(MatCoarsenSetStrictAggs(crs, PETSC_TRUE)); diff --git a/src/ksp/pc/impls/gamg/ftn-custom/makefile b/src/ksp/pc/impls/gamg/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/ksp/pc/impls/gamg/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ksp/pc/impls/gamg/ftn-custom/zgamgf.c b/src/ksp/pc/impls/gamg/ftn-custom/zgamgf.c deleted file mode 100644 index 031423cfca2..00000000000 --- a/src/ksp/pc/impls/gamg/ftn-custom/zgamgf.c +++ /dev/null @@ -1,32 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pcgamggettype_ PCGAMGGETTYPE - #define pcgamgsettype_ PCGAMGSETTYPE - #define pcgamgsetesteigksptype_ PCGAMGSETESTEIGKSPTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pcgamggettype_ pcgamggettype - #define pcgamgsettype_ pcgamgsettype - #define pcgamgsetesteigksptype_ pcgamgsetesteigksptype -#endif - -PETSC_EXTERN void pcgamggettype_(PC *pc, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = PCGAMGGetType(*pc, &tname); - if (*ierr) return; - *ierr = PetscStrncpy(name, tname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void pcgamgsettype_(PC *pc, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = PCGAMGSetType(*pc, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/ksp/pc/impls/gamg/gamg.c b/src/ksp/pc/impls/gamg/gamg.c index 3fc3dbaa393..5754091fbd0 100644 --- a/src/ksp/pc/impls/gamg/gamg.c +++ b/src/ksp/pc/impls/gamg/gamg.c @@ -39,6 +39,7 @@ static PetscErrorCode PCReset_GAMG(PC pc) pc_gamg->emin = 0; pc_gamg->emax = 0; PetscCall(PCReset_MG(pc)); + PetscCall(MatCoarsenDestroy(&pc_gamg->asm_crs)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -624,8 +625,7 @@ static PetscErrorCode PCSetUp_GAMG(PC pc) /* Get A_i and R_i */ for (level = 0, Aarr[0] = Pmat, nactivepe = size; level < (pc_gamg->Nlevels - 1) && (level == 0 || M > pc_gamg->coarse_eq_limit); level++) { pc_gamg->current_level = level; - PetscCheck(level < PETSC_MG_MAXLEVELS - 2, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Too many levels %" PetscInt_FMT, level + 1); - level1 = level + 1; + level1 = level + 1; #if defined(GAMG_STAGES) if (!gamg_stages[level]) { char str[32]; @@ -716,25 +716,28 @@ static PetscErrorCode PCSetUp_GAMG(PC pc) PetscCall(PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level])); PetscCall(PetscInfo(pc, "%d: %" PetscInt_FMT " ASM local domains, bs = %d\n", (int)level, nASMBlocksArr[level], (int)bs)); } else if (pc_gamg->asm_hem_aggs) { - MatCoarsen crs; const char *prefix; PetscInt bs; + + /* + Do not use aggs created for defining coarser problems, instead create aggs specifically to use + to define PCASM blocks. + */ PetscCall(PetscCDGetMat(agg_lists, &mat)); if (mat == Gmat) PetscCall(PetscCDClearMat(agg_lists)); // take the Mat away from the list (yuck) PetscCall(PetscCDDestroy(agg_lists)); PetscCall(PetscInfo(pc, "HEM ASM passes = %d\n", (int)pc_gamg->asm_hem_aggs)); - PetscCall(MatCoarsenCreate(PetscObjectComm((PetscObject)pc), &crs)); + PetscCall(MatCoarsenDestroy(&pc_gamg->asm_crs)); + PetscCall(MatCoarsenCreate(PetscObjectComm((PetscObject)pc), &pc_gamg->asm_crs)); PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix)); - PetscCall(PetscObjectSetOptionsPrefix((PetscObject)crs, prefix)); - PetscCall(MatCoarsenSetFromOptions(crs)); // get strength args - PetscCall(MatCoarsenSetType(crs, MATCOARSENHEM)); - PetscCall(MatCoarsenSetMaximumIterations(crs, pc_gamg->asm_hem_aggs)); - PetscCall(MatCoarsenSetAdjacency(crs, Gmat)); - PetscCall(MatCoarsenSetStrictAggs(crs, PETSC_TRUE)); - PetscCall(MatCoarsenApply(crs)); - PetscCall(MatCoarsenViewFromOptions(crs, NULL, "-agg_hem_mat_coarsen_view")); - PetscCall(MatCoarsenGetData(crs, &agg_lists)); /* output */ - PetscCall(MatCoarsenDestroy(&crs)); + PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc_gamg->asm_crs, prefix)); + PetscCall(MatCoarsenSetFromOptions(pc_gamg->asm_crs)); // get strength args + PetscCall(MatCoarsenSetType(pc_gamg->asm_crs, MATCOARSENHEM)); + PetscCall(MatCoarsenSetMaximumIterations(pc_gamg->asm_crs, pc_gamg->asm_hem_aggs)); + PetscCall(MatCoarsenSetAdjacency(pc_gamg->asm_crs, Gmat)); + PetscCall(MatCoarsenSetStrictAggs(pc_gamg->asm_crs, PETSC_TRUE)); + PetscCall(MatCoarsenApply(pc_gamg->asm_crs)); + PetscCall(MatCoarsenGetData(pc_gamg->asm_crs, &agg_lists)); /* output */ // create aggregates PetscCall(MatGetBlockSizes(Aarr[level], &bs, NULL)); // row block size PetscCall(PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level])); @@ -766,6 +769,7 @@ static PetscErrorCode PCSetUp_GAMG(PC pc) PetscCheck(!is_last, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Is last ?"); if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE; if (level1 == pc_gamg->Nlevels - 1) is_last = PETSC_TRUE; + if (level == PETSC_MG_MAXLEVELS - 2) is_last = PETSC_TRUE; PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_LEVEL], 0, 0, 0, 0)); PetscCall(pc_gamg->ops->createlevel(pc, Aarr[level], cr_bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last)); PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_LEVEL], 0, 0, 0, 0)); @@ -785,6 +789,10 @@ static PetscErrorCode PCSetUp_GAMG(PC pc) PetscCall(PetscInfo(pc, "%s: HARD stop of coarsening on level %" PetscInt_FMT ". Grid too small: %" PetscInt_FMT " block nodes\n", ((PetscObject)pc)->prefix, level, M / bs)); level++; break; + } else if (level == PETSC_MG_MAXLEVELS - 2) { /* stop if we are limited by PC_MG_MAXLEVELS */ + PetscCall(PetscInfo(pc, "%s: HARD stop of coarsening on level %" PetscInt_FMT ". PC_MG_MAXLEVELS reached\n", ((PetscObject)pc)->prefix, level)); + level++; + break; } } /* levels */ PetscCall(PetscFree(pc_gamg->data)); @@ -979,7 +987,10 @@ PetscErrorCode PCDestroy_GAMG(PC pc) `PCGAMG` will reduce the number of MPI processes used directly on the coarse grids so that there are around equations on each process that has degrees of freedom -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetCoarseEqLim()`, `PCGAMGSetRankReductionFactors()`, `PCGAMGSetRepartition()` + Developer Note: + Should be named `PCGAMGSetProcessEquationLimit()`. + +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetCoarseEqLim()`, `PCGAMGSetRankReductionFactors()`, `PCGAMGSetRepartition()` @*/ PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) { @@ -1017,7 +1028,8 @@ static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) For example -pc_gamg_coarse_eq_limit 1000 will stop coarsening once the coarse grid has less than 1000 unknowns. -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetProcEqLim()`, `PCGAMGSetRankReductionFactors()`, `PCGAMGSetRepartition()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetProcEqLim()`, `PCGAMGSetRankReductionFactors()`, `PCGAMGSetRepartition()`, + `PCGAMGSetParallelCoarseGridSolve()` @*/ PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) { @@ -1052,9 +1064,10 @@ static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) Level: intermediate Note: - This will generally improve the loading balancing of the work on each level + This will generally improve the loading balancing of the work on each level so the solves will be faster but it adds to the + preconditioner setup time. -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetProcEqLim()`, `PCGAMGSetRankReductionFactors()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetProcEqLim()`, `PCGAMGSetRankReductionFactors()` @*/ PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n) { @@ -1091,11 +1104,12 @@ static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n) Notes: Smoothed aggregation constructs the smoothed prolongator $P = (I - \omega D^{-1} A) T$ where $T$ is the tentative prolongator and $D$ is the diagonal of $A$. Eigenvalue estimates (based on a few `PCCG` or `PCGMRES` iterations) are computed to choose $\omega$ so that this is a stable smoothing operation. - If Chebyshev with Jacobi (diagonal) preconditioning is used for smoothing, then the eigenvalue estimates can be reused during the solution process - This option is only used when the smoother uses Jacobi, and should be turned off if a different `PCJacobiType` is used. + If `KSPCHEBYSHEV` with `PCJACOBI` (diagonal) preconditioning is used for smoothing on the finest level, then the eigenvalue estimates + can be reused during the solution process. + This option is only used when the smoother uses `PCJACOBI`, and should be turned off when a different `PCJacobiType` is used. It became default in PETSc 3.17. -.seealso: [](ch_ksp), `PCGAMG`, `KSPChebyshevSetEigenvalues()`, `KSPChebyshevEstEigSet()`, `PCGAMGSetRecomputeEstEig()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `KSPChebyshevSetEigenvalues()`, `KSPChebyshevEstEigSet()`, `PCGAMGSetRecomputeEstEig()` @*/ PetscErrorCode PCGAMGSetUseSAEstEig(PC pc, PetscBool b) { @@ -1116,20 +1130,24 @@ static PetscErrorCode PCGAMGSetUseSAEstEig_GAMG(PC pc, PetscBool b) } /*@ - PCGAMGSetRecomputeEstEig - Set flag for Chebyshev smoothers to recompute the eigen estimates + PCGAMGSetRecomputeEstEig - Set flag for Chebyshev smoothers to recompute the eigen estimates when a new matrix is used Collective Input Parameters: + pc - the preconditioner context -- b - flag +- b - flag, default is `PETSC_TRUE` Options Database Key: . -pc_gamg_recompute_esteig - use the eigen estimate Level: advanced -.seealso: [](ch_ksp), `PCGAMG`, `KSPChebyshevSetEigenvalues()`, `KSPChebyshevEstEigSet()` + Note: + If the matrix changes only slightly in a new solve using ``PETSC_FALSE`` will save time in the setting up of the preconditioner + and may not affect the solution time much. + +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `KSPChebyshevSetEigenvalues()`, `KSPChebyshevEstEigSet()` @*/ PetscErrorCode PCGAMGSetRecomputeEstEig(PC pc, PetscBool b) { @@ -1164,7 +1182,7 @@ static PetscErrorCode PCGAMGSetRecomputeEstEig_GAMG(PC pc, PetscBool b) Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetUseSAEstEig()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetUseSAEstEig()` @*/ PetscErrorCode PCGAMGSetEigenvalues(PC pc, PetscReal emax, PetscReal emin) { @@ -1205,7 +1223,7 @@ static PetscErrorCode PCGAMGSetEigenvalues_GAMG(PC pc, PetscReal emax, PetscReal May negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows rebuilding the preconditioner quicker. -.seealso: [](ch_ksp), `PCGAMG` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG` @*/ PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n) { @@ -1226,8 +1244,8 @@ static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) } /*@ - PCGAMGASMSetUseAggs - Have the `PCGAMG` smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner - used as the smoother + PCGAMGASMSetUseAggs - Have the `PCGAMG` smoother on each level use `PCASM` where the aggregates defined by the coarsening process are + the subdomains for the additive Schwarz preconditioner used as the smoother Collective @@ -1240,7 +1258,10 @@ static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCASM`, `PCSetType` + Note: + This option automatically sets the preconditioner on the levels to be `PCASM`. + +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCASM`, `PCSetType` @*/ PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg) { @@ -1270,11 +1291,11 @@ static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg) - flg - `PETSC_TRUE` to not force coarse grid onto one processor Options Database Key: -. -pc_gamg_parallel_coarse_grid_solver - use a parallel coarse grid direct solver +. -pc_gamg_parallel_coarse_grid_solver - use a parallel coarse grid solver Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetCoarseGridLayoutType()`, `PCGAMGSetCpuPinCoarseGrids()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetCoarseGridLayoutType()`, `PCGAMGSetCpuPinCoarseGrids()`, `PCGAMGSetRankReductionFactors()` @*/ PetscErrorCode PCGAMGSetParallelCoarseGridSolve(PC pc, PetscBool flg) { @@ -1308,7 +1329,7 @@ static PetscErrorCode PCGAMGSetParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetCoarseGridLayoutType()`, `PCGAMGSetParallelCoarseGridSolve()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetCoarseGridLayoutType()`, `PCGAMGSetParallelCoarseGridSolve()` @*/ PetscErrorCode PCGAMGSetCpuPinCoarseGrids(PC pc, PetscBool flg) { @@ -1342,7 +1363,7 @@ static PetscErrorCode PCGAMGSetCpuPinCoarseGrids_GAMG(PC pc, PetscBool flg) Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetParallelCoarseGridSolve()`, `PCGAMGSetCpuPinCoarseGrids()`, `PCGAMGLayoutType`, `PCGAMG_LAYOUT_COMPACT`, `PCGAMG_LAYOUT_SPREAD` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetParallelCoarseGridSolve()`, `PCGAMGSetCpuPinCoarseGrids()`, `PCGAMGLayoutType`, `PCGAMG_LAYOUT_COMPACT`, `PCGAMG_LAYOUT_SPREAD` @*/ PetscErrorCode PCGAMGSetCoarseGridLayoutType(PC pc, PCGAMGLayoutType flg) { @@ -1379,7 +1400,7 @@ static PetscErrorCode PCGAMGSetCoarseGridLayoutType_GAMG(PC pc, PCGAMGLayoutType Developer Notes: Should be called `PCGAMGSetMaximumNumberlevels()` and possible be shared with `PCMG` -.seealso: [](ch_ksp), `PCGAMG` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG` @*/ PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) { @@ -1416,7 +1437,7 @@ static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) Developer Notes: Should be called `PCGAMGSetMaximumNumberlevels()` and possible be shared with `PCMG` -.seealso: [](ch_ksp), `PCGAMG` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG` @*/ PetscErrorCode PCGAMGASMSetHEM(PC pc, PetscInt n) { @@ -1459,7 +1480,7 @@ static PetscErrorCode PCGAMGASMSetHEM_GAMG(PC pc, PetscInt n) In this case, `PCGAMGSetThresholdScale()` must be called before `PCGAMGSetThreshold()`. If `n` is greater than the total number of levels, the excess entries in threshold will not be used. -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetAggressiveLevels()`, `PCGAMGMISkSetAggressive()`, `PCGAMGSetMinDegreeOrderingMISk()`, `PCGAMGSetThresholdScale()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetAggressiveLevels()`, `PCGAMGMISkSetAggressive()`, `PCGAMGSetMinDegreeOrderingMISk()`, `PCGAMGSetThresholdScale()` @*/ PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal v[], PetscInt n) { @@ -1497,7 +1518,7 @@ static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal v[], PetscInt n) Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetProcEqLim()`, `PCGAMGSetCoarseEqLim()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetProcEqLim()`, `PCGAMGSetCoarseEqLim()`, `PCGAMGSetParallelCoarseGridSolve()` @*/ PetscErrorCode PCGAMGSetRankReductionFactors(PC pc, PetscInt v[], PetscInt n) { @@ -1538,7 +1559,7 @@ static PetscErrorCode PCGAMGSetRankReductionFactors_GAMG(PC pc, PetscInt v[], Pe The initial threshold (for an arbitrary number of levels starting from the finest) can be set with `PCGAMGSetThreshold()`. This scaling is used for each subsequent coarsening, but must be called before `PCGAMGSetThreshold()`. -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()` @*/ PetscErrorCode PCGAMGSetThresholdScale(PC pc, PetscReal v) { @@ -1558,7 +1579,7 @@ static PetscErrorCode PCGAMGSetThresholdScale_GAMG(PC pc, PetscReal v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCGAMGSetType - Set the type of algorithm `PCGAMG` should use Collective @@ -1568,11 +1589,11 @@ static PetscErrorCode PCGAMGSetThresholdScale_GAMG(PC pc, PetscReal v) - type - `PCGAMGAGG`, `PCGAMGGEO`, or `PCGAMGCLASSICAL` Options Database Key: -. -pc_gamg_type - type of algebraic multigrid to apply - only agg supported +. -pc_gamg_type - type of algebraic multigrid to apply - only agg is supported Level: intermediate -.seealso: [](ch_ksp), `PCGAMGGetType()`, `PCGAMG`, `PCGAMGType` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMGGetType()`, `PCGAMG`, `PCGAMGType` @*/ PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) { @@ -1582,7 +1603,7 @@ PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCGAMGGetType - Get the type of algorithm `PCGAMG` will use Collective @@ -1595,7 +1616,7 @@ PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) Level: intermediate -.seealso: [](ch_ksp), `PCGAMG`, `PCGAMGSetType()`, `PCGAMGType` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetType()`, `PCGAMGType` @*/ PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type) { @@ -1655,8 +1676,18 @@ static PetscErrorCode PCView_GAMG(PC pc, PetscViewer viewer) for (PetscInt i = 0; i < mg->nlevels; i++) PetscCall(PetscViewerASCIIPrintf(viewer, " %g", (double)pc_gamg->threshold[i])); PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold scaling factor for each level not specified = %g\n", (double)pc_gamg->threshold_scale)); - if (pc_gamg->use_aggs_in_asm) PetscCall(PetscViewerASCIIPrintf(viewer, " Using aggregates from coarsening process to define subdomains for PCASM\n")); // this take presedence - else if (pc_gamg->asm_hem_aggs) PetscCall(PetscViewerASCIIPrintf(viewer, " Using aggregates made with %d applications of heavy edge matching (HEM) to define subdomains for PCASM\n", (int)pc_gamg->asm_hem_aggs)); + if (pc_gamg->use_aggs_in_asm) PetscCall(PetscViewerASCIIPrintf(viewer, " Using aggregates from GAMG coarsening process to define subdomains for PCASM\n")); // this take presedence + else if (pc_gamg->asm_hem_aggs) { + PetscCall(PetscViewerASCIIPrintf(viewer, " Using aggregates made with %d applications of heavy edge matching (HEM) to define subdomains for PCASM\n", (int)pc_gamg->asm_hem_aggs)); + PetscCall(PetscViewerASCIIPushTab(viewer)); + PetscCall(PetscViewerASCIIPushTab(viewer)); + PetscCall(PetscViewerASCIIPushTab(viewer)); + PetscCall(PetscViewerASCIIPushTab(viewer)); + PetscCall(MatCoarsenView(pc_gamg->asm_crs, viewer)); + PetscCall(PetscViewerASCIIPopTab(viewer)); + PetscCall(PetscViewerASCIIPopTab(viewer)); + PetscCall(PetscViewerASCIIPopTab(viewer)); + } if (pc_gamg->use_parallel_coarse_grid_solver) PetscCall(PetscViewerASCIIPrintf(viewer, " Using parallel coarse grid solver (all coarse grid equations not put on one process)\n")); if (pc_gamg->injection_index_size) { PetscCall(PetscViewerASCIIPrintf(viewer, " Using injection restriction/prolongation on first level, dofs:")); @@ -1684,7 +1715,7 @@ static PetscErrorCode PCView_GAMG(PC pc, PetscViewer viewer) Level: intermediate -.seealso: `PCGAMG` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), `PCGAMG` @*/ PetscErrorCode PCGAMGSetInjectionIndex(PC pc, PetscInt n, PetscInt idx[]) { @@ -1789,7 +1820,7 @@ static PetscErrorCode PCSetFromOptions_GAMG(PC pc, PetscOptionItems *PetscOption - -pc_gamg_threshold_scale - Scaling of threshold on each coarser grid if not specified Options Database Keys for Aggregation: -+ -pc_gamg_agg_nsmooths - number of smoothing steps to use with smooth aggregation ++ -pc_gamg_agg_nsmooths - number of smoothing steps to use with smooth aggregation to construct prolongation . -pc_gamg_aggressive_coarsening - number of aggressive coarsening (MIS-2) levels from finest. . -pc_gamg_aggressive_square_graph - Use square graph (A'A) or MIS-k (k=2) for aggressive coarsening . -pc_gamg_mis_k_minimum_degree_ordering - Use minimum degree ordering in greedy MIS algorithm @@ -1811,10 +1842,11 @@ static PetscErrorCode PCSetFromOptions_GAMG(PC pc, PetscOptionItems *PetscOption The many options for `PCMG` also work directly for `PCGAMG` such as controlling the smoothers on each level etc. - See [the Users Manual section on PCGAMG](sec_amg) and [the Users Manual section on PCMG](sec_mg)for more details. - -.seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `MatSetBlockSize()`, `PCMGType`, `PCSetCoordinates()`, `MatSetNearNullSpace()`, `PCGAMGSetType()`, `PCGAMGAGG`, `PCGAMGGEO`, `PCGAMGCLASSICAL`, `PCGAMGSetProcEqLim()`, - `PCGAMGSetCoarseEqLim()`, `PCGAMGSetRepartition()`, `PCGAMGRegister()`, `PCGAMGSetReuseInterpolation()`, `PCGAMGASMSetUseAggs()`, `PCGAMGSetParallelCoarseGridSolve()`, `PCGAMGSetNlevels()`, `PCGAMGSetThreshold()`, `PCGAMGGetType()`, `PCGAMGSetUseSAEstEig()` +.seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCCreate()`, `PCSetType()`, + `MatSetBlockSize()`, + `PCMGType`, `PCSetCoordinates()`, `MatSetNearNullSpace()`, `PCGAMGSetType()`, `PCGAMGAGG`, `PCGAMGGEO`, `PCGAMGCLASSICAL`, `PCGAMGSetProcEqLim()`, + `PCGAMGSetCoarseEqLim()`, `PCGAMGSetRepartition()`, `PCGAMGRegister()`, `PCGAMGSetReuseInterpolation()`, `PCGAMGASMSetUseAggs()`, + `PCGAMGSetParallelCoarseGridSolve()`, `PCGAMGSetNlevels()`, `PCGAMGSetThreshold()`, `PCGAMGGetType()`, `PCGAMGSetUseSAEstEig()` M*/ PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) { @@ -1984,7 +2016,7 @@ PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC)) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCGAMGCreateGraph - Creates a graph that is used by the ``PCGAMGType`` in the coarsening process Input Parameters: diff --git a/src/ksp/pc/impls/gamg/geo.c b/src/ksp/pc/impls/gamg/geo.c index 40431756c83..f7d77dc4d2d 100644 --- a/src/ksp/pc/impls/gamg/geo.c +++ b/src/ksp/pc/impls/gamg/geo.c @@ -217,13 +217,13 @@ static PetscErrorCode triangulateAndFormProl(IS selected_2, PetscInt data_stride PetscCall(PetscSNPrintf(fname, PETSC_STATIC_ARRAY_LENGTH(fname), "C%d_%d.poly", level, rank)); file = fopen(fname, "w"); - /*First line: <# of vertices> <# of attributes> <# of boundary markers (0 or 1)>*/ + /* First line: <# of vertices> <# of attributes> <# of boundary markers (0 or 1)> */ fprintf(file, "%d %d %d %d\n", in.numberofpoints, 2, 0, 0); - /*Following lines: */ + /* Following lines: */ for (kk = 0, sid = 0; kk < in.numberofpoints; kk++, sid += 2) fprintf(file, "%d %e %e\n", kk, in.pointlist[sid], in.pointlist[sid + 1]); - /*One line: <# of segments> <# of boundary markers (0 or 1)> */ + /* One line: <# of segments> <# of boundary markers (0 or 1)> */ fprintf(file, "%d %d\n", 0, 0); - /*Following lines: [boundary marker] */ + /* Following lines: [boundary marker] */ /* One line: <# of holes> */ fprintf(file, "%d\n", 0); /* Following lines: */ @@ -245,7 +245,7 @@ static PetscErrorCode triangulateAndFormProl(IS selected_2, PetscInt data_stride /* First line: <# of vertices> <# of attributes> <# of boundary markers (0 or 1)> */ /* fprintf(file, "%d %d %d %d\n",in.numberofpoints,2,0,0); */ fprintf(file, "%d %d %d %d\n", nPlotPts, 2, 0, 0); - /*Following lines: */ + /* Following lines: */ for (kk = 0, sid = 0; kk < in.numberofpoints; kk++, sid += 2) fprintf(file, "%d %e %e\n", kk, in.pointlist[sid], in.pointlist[sid + 1]); sid /= 2; @@ -624,7 +624,7 @@ static PetscErrorCode PCGAMGProlongator_GEO(PC pc, Mat Amat, PetscCoarsenData *a PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT ") %% bs %" PetscInt_FMT, Iend, Istart, bs); /* get 'nLocalSelected' */ - PetscCall(PetscCDGetMat(agg_lists, &Gmat)); // get auxilary matrix for ghost edges + PetscCall(PetscCDGetMat(agg_lists, &Gmat)); // get auxiliary matrix for ghost edges PetscCall(PetscCDGetNonemptyIS(agg_lists, &selected_1)); PetscCall(ISGetSize(selected_1, &jj)); PetscCall(PetscMalloc1(jj, &clid_flid)); diff --git a/src/ksp/pc/impls/gasm/ftn-custom/zgasmf.c b/src/ksp/pc/impls/gasm/ftn-custom/zgasmf.c index 9f612007d62..a1aeeb7a3c0 100644 --- a/src/ksp/pc/impls/gasm/ftn-custom/zgasmf.c +++ b/src/ksp/pc/impls/gasm/ftn-custom/zgasmf.c @@ -2,7 +2,6 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pcgasmsetsubdomains_ PCGASMSETSUBDOMAINS #define pcgasmdestroysubdomains_ PCGASMDESTROYSUBDOMAINS #define pcgasmgetsubksp1_ PCGASMGETSUBKSP1 #define pcgasmgetsubksp2_ PCGASMGETSUBKSP2 @@ -14,7 +13,6 @@ #define pcgasmgetsubksp8_ PCGASMGETSUBKSP8 #define pcgasmcreatesubdomains2d_ PCGASMCREATESUBDOMAINS2D #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pcgasmsetsubdomains_ pcgasmsetsubdomains #define pcgasmdestroysubdomains_ pcgasmdestroysubdomains #define pcgasmgetsubksp2_ pcgasmgetsubksp2 #define pcgasmgetsubksp3_ pcgasmgetsubksp3 @@ -26,11 +24,6 @@ #define pcgasmcreatesubdomains2d_ pcgasmcreatesubdomains2d #endif -PETSC_EXTERN void pcgasmsetsubdomains_(PC *pc, PetscInt *n, IS *is, IS *isl, int *ierr) -{ - *ierr = PCGASMSetSubdomains(*pc, *n, is, isl); -} - PETSC_EXTERN void pcgasmdestroysubdomains_(PetscInt *n, IS *is, IS *isl, int *ierr) { IS *iis, *iisl; diff --git a/src/ksp/pc/impls/gasm/gasm.c b/src/ksp/pc/impls/gasm/gasm.c index 03f1c0ccc80..665b72c0564 100644 --- a/src/ksp/pc/impls/gasm/gasm.c +++ b/src/ksp/pc/impls/gasm/gasm.c @@ -1,12 +1,12 @@ /* This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation. - In this version each MPI rank may intersect multiple subdomains and any subdomain may - intersect multiple MPI ranks. Intersections of subdomains with MPI ranks are called *local + In this version, each MPI process may intersect multiple subdomains and any subdomain may + intersect multiple MPI processes. Intersections of subdomains with MPI processes are called *local subdomains*. N - total number of distinct global subdomains (set explicitly in PCGASMSetTotalSubdomains() or implicitly PCGASMSetSubdomains() and then calculated in PCSetUp_GASM()) - n - actual number of local subdomains on this rank (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains()) - nmax - maximum number of local subdomains per rank (calculated in PCSetUp_GASM()) + n - actual number of local subdomains on this process (set in `PCGASMSetSubdomains()` or calculated in `PCGASMSetTotalSubdomains()`) + nmax - maximum number of local subdomains per process (calculated in PCSetUp_GASM()) */ #include /*I "petscpc.h" I*/ #include @@ -968,7 +968,7 @@ static PetscErrorCode PCGASMSetSubdomains_GASM(PC pc, PetscInt n, IS iis[], IS o /* check if the inner indices cover and only cover the local portion of the preconditioning matrix */ PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend)); PetscCall(PetscCalloc1(rend - rstart, &covered)); - /* check if the current MPI rank owns indices from others */ + /* check if the current MPI process owns indices from others */ for (i = 0; i < n; i++) { PetscCall(ISGetIndices(osm->iis[i], &indices)); PetscCall(ISGetLocalSize(osm->iis[i], &lsize)); @@ -1018,7 +1018,7 @@ static PetscErrorCode PCGASMSetSortIndices_GASM(PC pc, PetscBool doSort) } /* - FIXME: This routine might need to be modified now that multiple ranks per subdomain are allowed. + FIXME: This routine might need to be modified now that multiple processes per subdomain are allowed. In particular, it would upset the global subdomain number calculation. */ static PetscErrorCode PCGASMGetSubKSP_GASM(PC pc, PetscInt *n, PetscInt *first, KSP **ksp) @@ -1042,18 +1042,19 @@ static PetscErrorCode PCGASMGetSubKSP_GASM(PC pc, PetscInt *n, PetscInt *first, PetscFunctionReturn(PETSC_SUCCESS); } /* PCGASMGetSubKSP_GASM() */ -/*@C - PCGASMSetSubdomains - Sets the subdomains for this MPI rank - for the additive Schwarz preconditioner with multiple MPI ranks per subdomain, `PCGASM` +/*@ + PCGASMSetSubdomains - Sets the subdomains for this MPI process + for the additive Schwarz preconditioner with multiple MPI processes per subdomain, `PCGASM` Collective Input Parameters: + pc - the preconditioner object -. n - the number of subdomains for this MPI rank -. iis - the index sets that define the inner subdomains (or `NULL` for PETSc to determine subdomains) +. n - the number of subdomains for this MPI process +. iis - the index sets that define the inner subdomains (or `NULL` for PETSc to determine subdomains), the `iis` array is + copied so may be freed after this call. - ois - the index sets that define the outer subdomains (or `NULL` to use the same as `iis`, or to construct by expanding `iis` by - the requested overlap) + the requested overlap), the `ois` array is copied so may be freed after this call. Level: advanced @@ -1065,15 +1066,15 @@ static PetscErrorCode PCGASMGetSubKSP_GASM(PC pc, PetscInt *n, PetscInt *first, Outer subdomains are those where the residual necessary to obtain the corrections is obtained (see `PCGASMType` for the use of inner/outer subdomains). - Both inner and outer subdomains can extend over several MPI ranks. - This rank's portion of a subdomain is known as a local subdomain. + Both inner and outer subdomains can extend over several MPI processes. + This process' portion of a subdomain is known as a local subdomain. - Inner subdomains can not overlap with each other, do not have any entities from remote ranks, - and have to cover the entire local subdomain owned by the current rank. The index sets on each - rank should be ordered such that the ith local subdomain is connected to the ith remote subdomain - on another MPI rank. + Inner subdomains can not overlap with each other, do not have any entities from remote processes, + and have to cover the entire local subdomain owned by the current process. The index sets on each + process should be ordered such that the ith local subdomain is connected to the ith remote subdomain + on another MPI process. - By default the `PGASM` preconditioner uses 1 (local) subdomain per MPI rank. + By default the `PGASM` preconditioner uses 1 (local) subdomain per MPI process. The `iis` and `ois` arrays may be freed after this call using `PCGASMDestroySubdomains()` @@ -1093,7 +1094,7 @@ PetscErrorCode PCGASMSetSubdomains(PC pc, PetscInt n, IS iis[], IS ois[]) /*@ PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the - additive Schwarz preconditioner `PCGASM`. Either all or no MPI ranks in the + additive Schwarz preconditioner `PCGASM`. Either all or no MPI processes in the pc communicator must call this routine. Logically Collective @@ -1108,9 +1109,9 @@ PetscErrorCode PCGASMSetSubdomains(PC pc, PetscInt n, IS iis[], IS ois[]) Level: intermediate Notes: - By default the `PCGASM` preconditioner uses 1 subdomain per rank. To use + By default the `PCGASM` preconditioner uses 1 subdomain per process. To use multiple subdomain per perocessor or "straddling" subdomains that intersect - multiple ranks use `PCGASMSetSubdomains()` (or option `-pc_gasm_total_subdomains` ). + multiple processes use `PCGASMSetSubdomains()` (or option `-pc_gasm_total_subdomains` ). The overlap defaults to 0, so if one desires that no additional overlap be computed beyond what may have been set with a call to @@ -1149,9 +1150,9 @@ PetscErrorCode PCGASMSetOverlap(PC pc, PetscInt ovl) - type - variant of `PCGASM`, one of .vb `PC_GASM_BASIC` - full interpolation and restriction - `PC_GASM_RESTRICT` - full restriction, local MPI rank interpolation - `PC_GASM_INTERPOLATE` - full interpolation, local MPI rank restriction - `PC_GASM_NONE` - local MPI rank restriction and interpolation + `PC_GASM_RESTRICT` - full restriction, local MPI process interpolation + `PC_GASM_INTERPOLATE` - full interpolation, local MPI process restriction + `PC_GASM_NONE` - local MPI process restriction and interpolation .ve Options Database Key: @@ -1195,8 +1196,7 @@ PetscErrorCode PCGASMSetSortIndices(PC pc, PetscBool doSort) } /*@C - PCGASMGetSubKSP - Gets the local `KSP` contexts for all subdomains on - this MPI rank. + PCGASMGetSubKSP - Gets the local `KSP` contexts for all subdomains on this MPI process. Collective iff first_local is requested @@ -1204,9 +1204,8 @@ PetscErrorCode PCGASMSetSortIndices(PC pc, PetscBool doSort) . pc - the preconditioner context Output Parameters: -+ n_local - the number of blocks on this MPI rank or `NULL` -. first_local - the global number of the first block on this rank or `NULL`, - all ranks must request or all must pass `NULL` ++ n_local - the number of blocks on this MPI process or `NULL` +. first_local - the global number of the first block on this process or `NULL`, all processes must request or all must pass `NULL` - ksp - the array of `KSP` contexts Level: advanced @@ -1470,14 +1469,14 @@ PETSC_INTERN PetscErrorCode PCGASMCreateStraddlingSubdomains(Mat A, PetscInt N, - N - the number of global subdomains requested Output Parameters: -+ n - the number of subdomains created on this MPI rank ++ n - the number of subdomains created on this MPI process - iis - the array of index sets defining the local inner subdomains (on which the correction is applied) Level: advanced Notes: When `N` >= A's communicator size, each subdomain is local -- contained within a single MPI process. - When `N` < size, the subdomains are 'straddling' (rank boundaries) and are no longer local. + When `N` < size, the subdomains are 'straddling' (process boundaries) and are no longer local. The resulting subdomains can be use in `PCGASMSetSubdomains`(pc,n,iss,`NULL`). The overlapping outer subdomains will be automatically generated from these according to the requested amount of overlap; this is currently supported only with local subdomains. @@ -1524,9 +1523,12 @@ PetscErrorCode PCGASMCreateSubdomains(Mat A, PetscInt N, PetscInt *n, IS *iis[]) destroys each `IS` on the list, and then frees the list. At the end the list pointers are set to `NULL`. + Fortran Note: + The arrays are not freed, only the `IS` within the arrays are destroyed + .seealso: [](ch_ksp), `PCGASM`, `PCGASMCreateSubdomains()`, `PCGASMSetSubdomains()` @*/ -PetscErrorCode PCGASMDestroySubdomains(PetscInt n, IS **iis, IS **ois) +PetscErrorCode PCGASMDestroySubdomains(PetscInt n, IS *iis[], IS *ois[]) { PetscInt i; @@ -1610,7 +1612,7 @@ PetscErrorCode PCGASMDestroySubdomains(PetscInt n, IS **iis, IS **ois) .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMGetSubKSP()`, `PCGASMSetOverlap()`, `PCASMCreateSubdomains2D()`, `PCGASMDestroySubdomains()` @*/ -PetscErrorCode PCGASMCreateSubdomains2D(PC pc, PetscInt M, PetscInt N, PetscInt Mdomains, PetscInt Ndomains, PetscInt dof, PetscInt overlap, PetscInt *nsub, IS **iis, IS **ois) +PetscErrorCode PCGASMCreateSubdomains2D(PC pc, PetscInt M, PetscInt N, PetscInt Mdomains, PetscInt Ndomains, PetscInt dof, PetscInt overlap, PetscInt *nsub, IS *iis[], IS *ois[]) { PetscMPIInt size, rank; PetscInt i, j; @@ -1763,7 +1765,7 @@ PetscErrorCode PCGASMCreateSubdomains2D(PC pc, PetscInt M, PetscInt N, PetscInt } /*@C - PCGASMGetSubdomains - Gets the subdomains supported on this MPI rank + PCGASMGetSubdomains - Gets the subdomains supported on this MPI process for the `PCGASM` additive Schwarz preconditioner. Not Collective @@ -1772,9 +1774,9 @@ PetscErrorCode PCGASMCreateSubdomains2D(PC pc, PetscInt M, PetscInt N, PetscInt . pc - the preconditioner context Output Parameters: -+ n - the number of subdomains for this MPI rank (default value = 1) -. iis - the index sets that define the inner subdomains (without overlap) supported on this rank (can be `NULL`) -- ois - the index sets that define the outer subdomains (with overlap) supported on this rank (can be `NULL`) ++ n - the number of subdomains for this MPI process (default value = 1) +. iis - the index sets that define the inner subdomains (without overlap) supported on this process (can be `NULL`) +- ois - the index sets that define the outer subdomains (with overlap) supported on this process (can be `NULL`) Level: advanced @@ -1811,7 +1813,7 @@ PetscErrorCode PCGASMGetSubdomains(PC pc, PetscInt *n, IS *iis[], IS *ois[]) } /*@C - PCGASMGetSubmatrices - Gets the local submatrices (for this MPI rank + PCGASMGetSubmatrices - Gets the local submatrices (for this MPI process only) for the `PCGASM` additive Schwarz preconditioner. Not Collective @@ -1820,7 +1822,7 @@ PetscErrorCode PCGASMGetSubdomains(PC pc, PetscInt *n, IS *iis[], IS *ois[]) . pc - the preconditioner context Output Parameters: -+ n - the number of matrices for this MPI rank (default value = 1) ++ n - the number of matrices for this MPI process (default value = 1) - mat - the matrices Level: advanced diff --git a/src/ksp/pc/impls/hmg/hmg.c b/src/ksp/pc/impls/hmg/hmg.c index 491b7ec2e0a..77d6083fbac 100644 --- a/src/ksp/pc/impls/hmg/hmg.c +++ b/src/ksp/pc/impls/hmg/hmg.c @@ -342,7 +342,7 @@ static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCHMGSetInnerPCType - Set an inner `PC` type Logically Collective diff --git a/src/ksp/pc/impls/hpddm/pchpddm.cxx b/src/ksp/pc/impls/hpddm/pchpddm.cxx index 1071df6d0be..7a920a019ce 100644 --- a/src/ksp/pc/impls/hpddm/pchpddm.cxx +++ b/src/ksp/pc/impls/hpddm/pchpddm.cxx @@ -1,12 +1,10 @@ +#include #include #include #include /*I "petscpc.h" I*/ #include #include /* this must be included after petschpddm.h so that DM_MAX_WORK_VECTORS is not defined */ /* otherwise, it is assumed that one is compiling libhpddm_petsc => circular dependency */ -#if PetscDefined(USE_FORTRAN_BINDINGS) - #include -#endif static PetscErrorCode (*loadedSym)(HPDDM::Schwarz *const, IS, Mat, Mat, Mat, std::vector, PC_HPDDM_Level **const) = nullptr; @@ -138,14 +136,14 @@ static inline PetscErrorCode PCHPDDMSetAuxiliaryMatNormal_Private(PC pc, Mat A, if (diagonal) { PetscCall(VecNorm(*diagonal, NORM_INFINITY, &norm)); if (norm > PETSC_SMALL) { - VecScatter scatter; - PetscInt n; + PetscSF scatter; + PetscInt n; PetscCall(ISGetLocalSize(*cols, &n)); PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)pc), n, PETSC_DECIDE, &d)); PetscCall(VecScatterCreate(*diagonal, *cols, d, nullptr, &scatter)); PetscCall(VecScatterBegin(scatter, *diagonal, d, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(scatter, *diagonal, d, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecScatterDestroy(&scatter)); + PetscCall(PetscSFDestroy(&scatter)); PetscCall(MatScale(aux, -1.0)); PetscCall(MatDiagonalSet(aux, d, ADD_VALUES)); PetscCall(VecDestroy(&d)); @@ -454,22 +452,21 @@ static PetscErrorCode PCMatApply_HPDDM(PC pc, Mat X, Mat Y) PetscFunctionReturn(PETSC_SUCCESS); } -// PetscClangLinter pragma disable: -fdoc-internal-linkage -/*@C - PCHPDDMGetComplexities - Computes the grid and operator complexities. +/*@ + PCHPDDMGetComplexities - Computes the grid and operator complexities. - Input Parameter: -. pc - preconditioner context + Input Parameter: +. pc - preconditioner context - Output Parameters: -+ gc - grid complexity = sum_i(m_i) / m_1 -- oc - operator complexity = sum_i(nnz_i) / nnz_1 + Output Parameters: ++ gc - grid complexity $ \sum_i m_i / m_1 $ +- oc - operator complexity $ \sum_i nnz_i / nnz_1 $ - Level: advanced + Level: advanced .seealso: [](ch_ksp), `PCMGGetGridComplexity()`, `PCHPDDM`, `PCHYPRE`, `PCGAMG` @*/ -static PetscErrorCode PCHPDDMGetComplexities(PC pc, PetscReal *gc, PetscReal *oc) +PetscErrorCode PCHPDDMGetComplexities(PC pc, PetscReal *gc, PetscReal *oc) { PC_HPDDM *data = (PC_HPDDM *)pc->data; MatInfo info; @@ -923,7 +920,7 @@ static PetscErrorCode PCDestroy_HPDDMShell(PC pc) PetscCall(MatDestroy(ctx->V + 1)); PetscCall(MatDestroy(ctx->V + 2)); PetscCall(VecDestroy(&ctx->D)); - PetscCall(VecScatterDestroy(&ctx->scatter)); + PetscCall(PetscSFDestroy(&ctx->scatter)); PetscCall(PCDestroy(&ctx->pc)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1124,6 +1121,8 @@ static PetscErrorCode PCHPDDMPermute_Private(IS is, IS in_is, IS *out_is, Mat in PetscBool sorted; PetscFunctionBegin; + PetscValidHeaderSpecific(is, IS_CLASSID, 1); + PetscValidHeaderSpecific(in_C, MAT_CLASSID, 4); PetscCall(ISSorted(is, &sorted)); if (!sorted) { PetscCall(ISGetLocalSize(is, &size)); @@ -1205,6 +1204,21 @@ static PetscErrorCode PCHPDDMCheckSymmetry_Private(PC pc, Mat A01, Mat A10) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode PCHPDDMCheckInclusion_Private(PC pc, IS is, IS is_local, PetscBool check) +{ + IS intersect; + const char *str = "IS of the auxiliary Mat does not include all local rows of A"; + PetscBool equal; + + PetscFunctionBegin; + PetscCall(ISIntersect(is, is_local, &intersect)); + PetscCall(ISEqualUnsorted(is_local, intersect, &equal)); + PetscCall(ISDestroy(&intersect)); + if (check) PetscCheck(equal, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%s", str); + else if (!equal) PetscCall(PetscInfo(pc, "%s\n", str)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode PCHPDDMDestroySubMatrices_Private(PetscBool flg, PetscBool algebraic, Mat *sub) { IS is; @@ -1386,7 +1400,7 @@ static PetscErrorCode PCDestroy_Nest(PC pc) template static PetscErrorCode MatMult_Schur(Mat A, Vec x, Vec y) { - std::tuple *ctx; + std::tuple *ctx; PetscFunctionBegin; PetscCall(MatShellGetContext(A, &ctx)); @@ -1402,7 +1416,7 @@ static PetscErrorCode MatMult_Schur(Mat A, Vec x, Vec y) static PetscErrorCode MatDestroy_Schur(Mat A) { - std::tuple *ctx; + std::tuple *ctx; PetscFunctionBegin; PetscCall(MatShellGetContext(A, &ctx)); @@ -1636,8 +1650,9 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PC_HPDDM *data_00; KSP ksp, inner_ksp; PC pc_00; + Mat A11 = nullptr; + Vec d = nullptr; char *prefix; - PetscReal norm; PetscCall(MatSchurComplementGetKSP(P, &ksp)); PetscCall(KSPGetPC(ksp, &pc_00)); @@ -1650,7 +1665,7 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(PetscObjectTypeCompare((PetscObject)data_00->levels[0]->pc, PCASM, &flg)); PetscCheck(flg, PetscObjectComm((PetscObject)P), PETSC_ERR_ARG_INCOMP, "-%spc_hpddm_schur_precondition %s and -%spc_type %s (!= %s)", pcpre ? pcpre : "", PCHPDDMSchurPreTypes[type], ((PetscObject)data_00->levels[0]->pc)->prefix, ((PetscObject)data_00->levels[0]->pc)->type_name, PCASM); - PetscCheck(data->Neumann == PETSC_BOOL3_TRUE, PetscObjectComm((PetscObject)P), PETSC_ERR_ARG_INCOMP, "-%spc_hpddm_schur_precondition %s and -%spc_hpddm_has_neumann != true", pcpre ? pcpre : "", PCHPDDMSchurPreTypes[type], pcpre ? pcpre : ""); + PetscCall(MatSchurComplementGetSubMatrices(P, nullptr, nullptr, nullptr, nullptr, &A11)); if (PetscDefined(USE_DEBUG) || !data->is) { Mat A01, A10, B = nullptr, C = nullptr, *sub; @@ -1688,6 +1703,13 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(MatFindNonzeroRows(C, &data->is)); PetscCall(MatDestroy(&C)); PetscCall(ISDestroy(is)); + PetscCall(ISCreateStride(PetscObjectComm((PetscObject)data->is), A11->rmap->n, A11->rmap->rstart, 1, &loc)); + if (PetscDefined(USE_DEBUG)) PetscCall(PCHPDDMCheckInclusion_Private(pc, data->is, loc, PETSC_FALSE)); + PetscCall(ISExpand(data->is, loc, is)); + PetscCall(ISDestroy(&loc)); + PetscCall(ISDestroy(&data->is)); + data->is = is[0]; + is[0] = nullptr; } if (PetscDefined(USE_DEBUG)) { PetscCall(PCHPDDMCheckSymmetry_Private(pc, A01, A10)); @@ -1707,33 +1729,64 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(ISDestroy(&uis)); PetscCall(MatDestroy(&B)); } - if (data->aux) PetscCall(MatNorm(data->aux, NORM_FROBENIUS, &norm)); - else norm = 0.0; - PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &norm, 1, MPIU_REAL, MPI_MAX, PetscObjectComm((PetscObject)P))); - if (norm < PETSC_MACHINE_EPSILON * static_cast(10.0)) { /* if A11 is near zero, e.g., Stokes equation, build a diagonal auxiliary (Neumann) Mat which is just a small diagonal weighted by the inverse of the multiplicity */ - VecScatter scatter; - Vec x; + flg = PETSC_FALSE; + if (!data->aux) { + Mat D; + + PetscCall(MatCreateVecs(A11, &d, nullptr)); + PetscCall(MatGetDiagonal(A11, d)); + PetscCall(PetscObjectTypeCompareAny((PetscObject)A11, &flg, MATDIAGONAL, MATCONSTANTDIAGONAL, "")); + if (!flg) { + PetscCall(MatCreateDiagonal(d, &D)); + PetscCall(MatMultEqual(A11, D, 20, &flg)); + PetscCall(MatDestroy(&D)); + } + if (flg) PetscCall(PetscInfo(pc, "A11 block is likely diagonal so the PC will build an auxiliary Mat (which was not initially provided by the user)\n")); + } + if (data->Neumann != PETSC_BOOL3_TRUE && !flg && A11) { + PetscReal norm; + + PetscCall(MatNorm(A11, NORM_INFINITY, &norm)); + PetscCheck(norm < PETSC_MACHINE_EPSILON * static_cast(10.0), PetscObjectComm((PetscObject)P), PETSC_ERR_ARG_INCOMP, "-%spc_hpddm_schur_precondition geneo and -%spc_hpddm_has_neumann != true with a nonzero or non-diagonal A11 block", pcpre ? pcpre : "", pcpre ? pcpre : ""); + PetscCall(PetscInfo(pc, "A11 block is likely zero so the PC will build an auxiliary Mat (which was%s initially provided by the user)\n", data->aux ? "" : " not")); + PetscCall(MatDestroy(&data->aux)); + flg = PETSC_TRUE; + } + if (!data->aux) { /* if A11 is near zero, e.g., Stokes equation, or diagonal, build an auxiliary (Neumann) Mat which is a (possibly slightly shifted) diagonal weighted by the inverse of the multiplicity */ + PetscSF scatter; const PetscScalar *read; - PetscScalar *write; + PetscScalar *write, *diagonal = nullptr; PetscCall(MatDestroy(&data->aux)); PetscCall(ISGetLocalSize(data->is, &n)); - PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)P), n, PETSC_DECIDE, &x)); - PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)P), n, PETSC_DECIDE, &v)); - PetscCall(VecScatterCreate(x, data->is, v, nullptr, &scatter)); + PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)P), n, PETSC_DECIDE, &xin)); + PetscCall(VecDuplicate(xin, &v)); + PetscCall(VecScatterCreate(xin, data->is, v, nullptr, &scatter)); PetscCall(VecSet(v, 1.0)); - PetscCall(VecSet(x, 1.0)); - PetscCall(VecScatterBegin(scatter, v, x, ADD_VALUES, SCATTER_REVERSE)); - PetscCall(VecScatterEnd(scatter, v, x, ADD_VALUES, SCATTER_REVERSE)); /* v has the multiplicity of all unknowns on the overlap */ - PetscCall(VecScatterDestroy(&scatter)); + PetscCall(VecSet(xin, 1.0)); + PetscCall(VecScatterBegin(scatter, v, xin, ADD_VALUES, SCATTER_REVERSE)); + PetscCall(VecScatterEnd(scatter, v, xin, ADD_VALUES, SCATTER_REVERSE)); /* v has the multiplicity of all unknowns on the overlap */ + PetscCall(PetscSFDestroy(&scatter)); + if (d) { + PetscCall(VecScatterCreate(d, data->is, v, nullptr, &scatter)); + PetscCall(VecScatterBegin(scatter, d, v, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecScatterEnd(scatter, d, v, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(PetscSFDestroy(&scatter)); + PetscCall(VecDestroy(&d)); + PetscCall(PetscMalloc1(n, &diagonal)); + PetscCall(VecGetArrayRead(v, &read)); + PetscCallCXX(std::copy_n(read, n, diagonal)); + PetscCall(VecRestoreArrayRead(v, &read)); + } PetscCall(VecDestroy(&v)); PetscCall(VecCreateSeq(PETSC_COMM_SELF, n, &v)); - PetscCall(VecGetArrayRead(x, &read)); + PetscCall(VecGetArrayRead(xin, &read)); PetscCall(VecGetArrayWrite(v, &write)); - PetscCallCXX(std::transform(read, read + n, write, [](const PetscScalar &m) { return PETSC_SMALL / (static_cast(1000.0) * m); })); - PetscCall(VecRestoreArrayRead(x, &read)); + for (PetscInt i = 0; i < n; ++i) write[i] = (!diagonal || std::abs(diagonal[i]) < PETSC_MACHINE_EPSILON) ? PETSC_SMALL / (static_cast(1000.0) * read[i]) : diagonal[i] / read[i]; + PetscCall(PetscFree(diagonal)); + PetscCall(VecRestoreArrayRead(xin, &read)); PetscCall(VecRestoreArrayWrite(v, &write)); - PetscCall(VecDestroy(&x)); + PetscCall(VecDestroy(&xin)); PetscCall(MatCreateDiagonal(v, &data->aux)); PetscCall(VecDestroy(&v)); } @@ -1766,6 +1819,7 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(PCSetOperators(std::get<0>(*ctx)[1], pc->mat, pc->pmat)); PetscCall(PCSetType(std::get<0>(*ctx)[1], PCHPDDM)); PetscCall(PCHPDDMSetAuxiliaryMat(std::get<0>(*ctx)[1], uis, uaux, nullptr, nullptr)); /* transfer ownership of the auxiliary inputs from the inner (PCKSP) to the inner-most (PCHPDDM) PC */ + if (flg) static_cast(std::get<0>(*ctx)[1]->data)->Neumann = PETSC_BOOL3_TRUE; PetscCall(PCSetFromOptions(std::get<0>(*ctx)[1])); PetscCall(PetscObjectDereference((PetscObject)uis)); PetscCall(PetscObjectDereference((PetscObject)uaux)); @@ -1788,6 +1842,7 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(VecDuplicate(std::get<3>(*ctx)[0], std::get<3>(*ctx) + 2)); PetscCall(PetscObjectDereference((PetscObject)inner_ksp)); PetscCall(PetscObjectDereference((PetscObject)S)); + for (std::vector::iterator it = initial.begin(); it != initial.end(); ++it) PetscCall(VecDestroy(&*it)); PetscFunctionReturn(PETSC_SUCCESS); } else { /* second call to PCSetUp() on the PC associated to the Schur complement, retrieve previously set context */ PetscCall(PetscContainerGetPointer(container, (void **)&ctx)); @@ -1864,6 +1919,7 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(MatDestroy(&B)); } else PetscCheck(type != PC_HPDDM_SCHUR_PRE_GENEO, PetscObjectComm((PetscObject)P), PETSC_ERR_ARG_INCOMP, "-%spc_hpddm_schur_precondition %s without a prior call to PCHPDDMSetAuxiliaryMat() on the A11 block%s%s", pcpre ? pcpre : "", PCHPDDMSchurPreTypes[type], pcpre ? " -" : "", pcpre ? pcpre : ""); + for (std::vector::iterator it = initial.begin(); it != initial.end(); ++it) PetscCall(VecDestroy(&*it)); PetscFunctionReturn(PETSC_SUCCESS); } else { PetscCall(PetscOptionsGetString(nullptr, pcpre, "-pc_hpddm_levels_1_st_pc_type", type, sizeof(type), nullptr)); @@ -1960,15 +2016,7 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(ISDestroy(&data->is)); data->is = is[0]; } else { - if (PetscDefined(USE_DEBUG)) { - PetscBool equal; - IS intersect; - - PetscCall(ISIntersect(data->is, loc, &intersect)); - PetscCall(ISEqualUnsorted(loc, intersect, &equal)); - PetscCall(ISDestroy(&intersect)); - PetscCheck(equal, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "IS of the auxiliary Mat does not include all local rows of A"); - } + if (PetscDefined(USE_DEBUG)) PetscCall(PCHPDDMCheckInclusion_Private(pc, data->is, loc, PETSC_TRUE)); if (overlap == -1) PetscCall(PetscObjectComposeFunction((PetscObject)pc->pmat, "PCHPDDMAlgebraicAuxiliaryMat_Private_C", PCHPDDMAlgebraicAuxiliaryMat_Private)); if (!PetscBool3ToBool(data->Neumann) && (!algebraic || overlap != -1)) { PetscCall(PetscObjectTypeCompare((PetscObject)P, MATMPISBAIJ, &flg)); @@ -2228,6 +2276,7 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) const char *matpre; PetscBool cmp[4]; PetscCall(KSPGetOperators(ksp[0], subA, subA + 1)); + PetscCall(PetscObjectReference((PetscObject)subA[0])); PetscCall(MatDuplicate(subA[1], MAT_SHARE_NONZERO_PATTERN, &D)); PetscCall(MatGetOptionsPrefix(subA[1], &matpre)); PetscCall(MatSetOptionsPrefix(D, matpre)); @@ -2270,10 +2319,20 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PC_HPDDM *data_00 = (PC_HPDDM *)std::get<0>(*ctx)[0]->data; PC s; Mat A00, P00, A01 = nullptr, A10, A11, N, b[4]; - IS sorted, is[2]; + IS sorted, is[2], *is_00; MatSolverType type; std::pair *p; + n = -1; + PetscTryMethod(data_00->levels[0]->pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (data_00->levels[0]->pc, &n, nullptr, &ksp)); + PetscCheck(n == 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of subdomain solver %" PetscInt_FMT " != 1", n); + PetscCall(KSPGetOperators(ksp[0], subA, subA + 1)); + PetscCall(ISGetLocalSize(data_00->is, &n)); + if (n != subA[0]->rmap->n || n != subA[0]->cmap->n) { + PetscCall(PCASMGetLocalSubdomains(data_00->levels[0]->pc, &n, &is_00, nullptr)); + PetscCall(ISGetLocalSize(*is_00, &n)); + PetscCheck(n == subA[0]->rmap->n && n == subA[0]->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "-%spc_hpddm_schur_precondition geneo and -%spc_hpddm_define_subdomains false", pcpre ? pcpre : "", ((PetscObject)pc)->prefix); + } else is_00 = &data_00->is; PetscCall(PCHPDDMPermute_Private(unsorted, data->is, &uis, data->aux, &C, nullptr)); /* permute since PCASM works with a sorted IS */ std::swap(C, data->aux); std::swap(uis, data->is); @@ -2291,8 +2350,8 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(PetscObjectTypeCompare((PetscObject)A10, MATHERMITIANTRANSPOSEVIRTUAL, &flg)); if (flg) PetscCall(MatHermitianTransposeGetMat(A10, &A01)); } - PetscCall(ISDuplicate(data_00->is, &sorted)); /* during setup of the PC associated to the A00 block, this IS has already been sorted, but it's put back to its original state at the end of PCSetUp_HPDDM(), which may be unsorted */ - PetscCall(ISSort(sorted)); /* this is to avoid changing users inputs, but it requires a new call to ISSort() here */ + PetscCall(ISDuplicate(*is_00, &sorted)); /* during setup of the PC associated to the A00 block, this IS has already been sorted, but it's put back to its original state at the end of PCSetUp_HPDDM(), which may be unsorted */ + PetscCall(ISSort(sorted)); /* this is to avoid changing users inputs, but it requires a new call to ISSort() here */ if (!A01) { PetscCall(MatSetOption(A10, MAT_SUBMAT_SINGLEIS, PETSC_TRUE)); PetscCall(MatCreateSubMatrices(A10, 1, &data->is, &sorted, MAT_INITIAL_MATRIX, &sub)); @@ -2321,12 +2380,6 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) } PetscCall(MatDestroySubMatrices(1, &sub)); PetscCall(ISDestroy(&sorted)); - n = -1; - PetscTryMethod(data_00->levels[0]->pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (data_00->levels[0]->pc, &n, nullptr, &ksp)); - PetscCheck(n == 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of subdomain solver %" PetscInt_FMT " != 1", n); - PetscCall(KSPGetOperators(ksp[0], subA, subA + 1)); - PetscCall(ISGetLocalSize(data_00->is, &n)); - PetscCheck(n == subA[0]->rmap->n && n == subA[0]->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "-%spc_hpddm_schur_precondition geneo and -%spc_hpddm_define_subdomains false", pcpre ? pcpre : "", ((PetscObject)pc)->prefix); if (A01 || A10) { if (flg) PetscCall(MatTranspose(b[2], MAT_INITIAL_MATRIX, b + 1)); else PetscCall(MatHermitianTranspose(b[2], MAT_INITIAL_MATRIX, b + 1)); @@ -2416,6 +2469,7 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(KSPGetOperators(ksp[0], st, st + 1)); PetscCall(MatCopy(subA[0], st[0], structure)); if (subA[1] != subA[0] || st[1] != st[0]) PetscCall(MatCopy(subA[1], st[1], SAME_NONZERO_PATTERN)); + PetscCall(PetscObjectDereference((PetscObject)subA[0])); } if (data->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_SetUp[0], data->levels[0]->ksp, nullptr, nullptr, nullptr)); if (ismatis) PetscCall(MatISGetLocalMat(C, &N)); @@ -2454,9 +2508,9 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(PCShellSetApply(spc, PCApply_HPDDMShell)); PetscCall(PCShellSetMatApply(spc, PCMatApply_HPDDMShell)); if (ctx && n == 0) { - Mat Amat, Pmat; - PetscInt m, M; - std::tuple *ctx; + Mat Amat, Pmat; + PetscInt m, M; + std::tuple *ctx; PetscCall(KSPGetOperators(data->levels[n]->ksp, nullptr, &Pmat)); PetscCall(MatGetLocalSize(Pmat, &m, nullptr)); @@ -2526,7 +2580,7 @@ static PetscErrorCode PCSetUp_HPDDM(PC pc) PetscCall(MatDestroy(data->levels[n]->V + 1)); PetscCall(MatDestroy(data->levels[n]->V + 2)); PetscCall(VecDestroy(&data->levels[n]->D)); - PetscCall(VecScatterDestroy(&data->levels[n]->scatter)); + PetscCall(PetscSFDestroy(&data->levels[n]->scatter)); } } if (reused) { @@ -2782,7 +2836,7 @@ PetscErrorCode HPDDMLoadDL_Private(PetscBool *found) It may be viewed as an alternative to spectral AMGe or `PCBDDC` with adaptive selection of constraints. The interface is explained in details in {cite}`jolivetromanzampini2020` - The matrix to be preconditioned (Pmat) may be unassembled (`MATIS`), assembled (`MATAIJ`, `MATBAIJ`, or `MATSBAIJ`), hierarchical (`MATHTOOL`), or `MATNORMAL`. + The matrix used for building the preconditioner (Pmat) may be unassembled (`MATIS`), assembled (`MATAIJ`, `MATBAIJ`, or `MATSBAIJ`), hierarchical (`MATHTOOL`), `MATNORMAL`, `MATNORMALHERMITIAN`, or `MATSCHURCOMPLEMENT` (when `PCHPDDM` is used as part of an outer `PCFIELDSPLIT`). For multilevel preconditioning, when using an assembled or hierarchical Pmat, one must provide an auxiliary local `Mat` (unassembled local operator for GenEO) using `PCHPDDMSetAuxiliaryMat()`. Calling this routine is not needed when using a `MATIS` Pmat, assembly is done internally using `MatConvert()`. diff --git a/src/ksp/pc/impls/hypre/ftn-custom/makefile b/src/ksp/pc/impls/hypre/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/ksp/pc/impls/hypre/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ksp/pc/impls/hypre/ftn-custom/zhypref.c b/src/ksp/pc/impls/hypre/ftn-custom/zhypref.c deleted file mode 100644 index a1313013afa..00000000000 --- a/src/ksp/pc/impls/hypre/ftn-custom/zhypref.c +++ /dev/null @@ -1,56 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pchypresettype_ PCHYPRESETTYPE - #define pchypregettype_ PCHYPREGETTYPE - #define pcmggalerkinsetmatproductalgorithm_ PCMGGALERKINSETMATPRODUCTALGORITHM - #define pcmggalerkingetmatproductalgorithm_ PCMGGALERKINGETMATPRODUCTALGORITHM -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pchypresettype_ pchypresettype - #define pchypregettype_ pchypregettype - #define pcmggalerkinsetmatproductalgorithm_ pcmggalerkinsetmatproductalgorithm - #define pcmggalerkingetmatproductalgorithm_ pcmggalerkingetmatproductalgorithm -#endif - -PETSC_EXTERN void pchypresettype_(PC *pc, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(name, len, t); - *ierr = PCHYPRESetType(*pc, t); - if (*ierr) return; - FREECHAR(name, t); -} - -PETSC_EXTERN void pchypregettype_(PC *pc, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = PCHYPREGetType(*pc, &tname); - if (*ierr) return; - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void pcmggalerkinsetmatproductalgorithm_(PC *pc, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(name, len, t); - *ierr = PCMGGalerkinSetMatProductAlgorithm(*pc, t); - if (*ierr) return; - FREECHAR(name, t); -} - -PETSC_EXTERN void pcmggalerkingetmatproductalgorithm_(PC *pc, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = PCMGGalerkinGetMatProductAlgorithm(*pc, &tname); - if (*ierr) return; - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/ksp/pc/impls/hypre/hypre.c b/src/ksp/pc/impls/hypre/hypre.c index c0e1f662af2..f38d4dada19 100644 --- a/src/ksp/pc/impls/hypre/hypre.c +++ b/src/ksp/pc/impls/hypre/hypre.c @@ -747,7 +747,7 @@ static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems * PC_HYPRE *jac = (PC_HYPRE *)pc->data; PetscInt bs, n, indx, level; PetscBool flg, tmp_truth; - double tmpdbl, twodbl[2]; + PetscReal tmpdbl, twodbl[2]; const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"}; const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"}; @@ -848,7 +848,7 @@ static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems * } /* Filter for ILU(k) for Euclid */ - double droptolerance; + PetscReal droptolerance; PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg)); if (flg && (jac->smoothtype == 3)) { jac->eu_droptolerance = droptolerance; @@ -2196,7 +2196,7 @@ static PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems *PetscOptio PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCHYPRESetType - Sets which hypre preconditioner you wish to use Input Parameters: @@ -2219,7 +2219,7 @@ PetscErrorCode PCHYPRESetType(PC pc, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCHYPREGetType - Gets which hypre preconditioner you are using Input Parameter: @@ -2241,7 +2241,7 @@ PetscErrorCode PCHYPREGetType(PC pc, const char *name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs Logically Collective @@ -2268,7 +2268,7 @@ PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs Not Collective @@ -2281,7 +2281,7 @@ PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[]) Level: intermediate -.seealso: [](ch_ksp), `PCHYPRE`, ``PCMGGalerkinSetMatProductAlgorithm()` +.seealso: [](ch_ksp), `PCHYPRE`, `PCMGGalerkinSetMatProductAlgorithm()` @*/ PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[]) { @@ -2389,7 +2389,7 @@ typedef struct { /* keep copy of PFMG options used so may view them */ PetscInt its; - double tol; + PetscReal tol; PetscInt relax_type; PetscInt rap_type; PetscInt num_pre_relax, num_post_relax; @@ -2616,10 +2616,10 @@ typedef struct { HYPRE_SStructSolver ss_solver; /* keep copy of SYSPFMG options used so may view them */ - PetscInt its; - double tol; - PetscInt relax_type; - PetscInt num_pre_relax, num_post_relax; + PetscInt its; + PetscReal tol; + PetscInt relax_type; + PetscInt num_pre_relax, num_post_relax; } PC_SysPFMG; static PetscErrorCode PCDestroy_SysPFMG(PC pc) @@ -2848,7 +2848,7 @@ typedef struct { MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */ HYPRE_StructSolver hsolver; PetscInt its; /* keep copy of SMG options used so may view them */ - double tol; + PetscReal tol; PetscBool print_statistics; PetscInt num_pre_relax, num_post_relax; } PC_SMG; diff --git a/src/ksp/pc/impls/is/pcis.c b/src/ksp/pc/impls/is/pcis.c index 0b214c01956..e6a5f042ed6 100644 --- a/src/ksp/pc/impls/is/pcis.c +++ b/src/ksp/pc/impls/is/pcis.c @@ -153,9 +153,8 @@ PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolv /* first time creation, get info on substructuring */ if (!pc->setupcalled) { PetscInt n_I; - PetscInt *idx_I_local, *idx_B_local, *idx_I_global, *idx_B_global; - PetscBT bt; - PetscInt i, j; + PetscInt *idx_I_local, *idx_B_local, *idx_I_global, *idx_B_global, *count; + PetscInt i; /* get info on mapping */ PetscCall(PetscObjectReference((PetscObject)matis->rmapping)); @@ -165,15 +164,11 @@ PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolv PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping, &pcis->n_neigh, &pcis->neigh, &pcis->n_shared, &pcis->shared)); /* Identifying interior and interface nodes, in local numbering */ - PetscCall(PetscBTCreate(pcis->n, &bt)); - for (i = 0; i < pcis->n_neigh; i++) - for (j = 0; j < pcis->n_shared[i]; j++) PetscCall(PetscBTSet(bt, pcis->shared[i][j])); - - /* Creating local and global index sets for interior and interface nodes. */ + PetscCall(ISLocalToGlobalMappingGetNodeInfo(pcis->mapping, NULL, &count, NULL)); PetscCall(PetscMalloc1(pcis->n, &idx_I_local)); PetscCall(PetscMalloc1(pcis->n, &idx_B_local)); for (i = 0, pcis->n_B = 0, n_I = 0; i < pcis->n; i++) { - if (!PetscBTLookup(bt, i)) { + if (count[i] < 2) { idx_I_local[n_I] = i; n_I++; } else { @@ -181,6 +176,7 @@ PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolv pcis->n_B++; } } + PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(pcis->mapping, NULL, &count, NULL)); /* Getting the global numbering */ idx_B_global = PetscSafePointerPlusOffset(idx_I_local, n_I); /* Just avoiding allocating extra memory, since we have vacant space */ @@ -197,7 +193,6 @@ PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolv /* Freeing memory */ PetscCall(PetscFree(idx_B_local)); PetscCall(PetscFree(idx_I_local)); - PetscCall(PetscBTDestroy(&bt)); /* Creating work vectors and arrays */ PetscCall(VecDuplicate(matis->x, &pcis->vec1_N)); diff --git a/src/ksp/pc/impls/jacobi/jacobi.c b/src/ksp/pc/impls/jacobi/jacobi.c index 1c841ca71bd..2e71516d87a 100644 --- a/src/ksp/pc/impls/jacobi/jacobi.c +++ b/src/ksp/pc/impls/jacobi/jacobi.c @@ -141,6 +141,21 @@ static PetscErrorCode PCJacobiGetFixDiagonal_Jacobi(PC pc, PetscBool *flg) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode PCJacobiGetDiagonal_Jacobi(PC pc, Vec diag, Vec diagsqrt) +{ + PC_Jacobi *j = (PC_Jacobi *)pc->data; + MPI_Comm comm = PetscObjectComm((PetscObject)pc); + + PetscFunctionBegin; + PetscCheck(j->diag || j->diagsqrt, comm, PETSC_ERR_ARG_WRONGSTATE, "Jacobi diagonal has not been created yet. Use PCApply to force creation"); + PetscCheck(!diag || (diag && j->diag), comm, PETSC_ERR_ARG_WRONGSTATE, "Jacobi diagonal not available. Check if PC is non-symmetric"); + PetscCheck(!diagsqrt || (diagsqrt && j->diagsqrt), comm, PETSC_ERR_ARG_WRONGSTATE, "Jacobi diagonal squareroot not available. Check if PC is symmetric"); + + if (diag) PetscCall(VecCopy(j->diag, diag)); + if (diagsqrt) PetscCall(VecCopy(j->diagsqrt, diagsqrt)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /* PCSetUp_Jacobi - Prepares for the use of the Jacobi preconditioner by setting data structures and options. @@ -188,6 +203,7 @@ static PetscErrorCode PCSetUp_Jacobi(PC pc) if (diag) { PetscBool isset, isspd; + PetscCall(VecLockReadPop(diag)); switch (jac->type) { case PC_JACOBI_DIAGONAL: PetscCall(MatGetDiagonal(pc->pmat, diag)); @@ -248,9 +264,12 @@ static PetscErrorCode PCSetUp_Jacobi(PC pc) } PetscCall(VecRestoreArray(diag, &x)); } + PetscCall(VecLockReadPush(diag)); } if (diagsqrt) { PetscScalar *x; + + PetscCall(VecLockReadPop(diagsqrt)); switch (jac->type) { case PC_JACOBI_DIAGONAL: PetscCall(MatGetDiagonal(pc->pmat, diagsqrt)); @@ -276,6 +295,7 @@ static PetscErrorCode PCSetUp_Jacobi(PC pc) } } PetscCall(VecRestoreArray(diagsqrt, &x)); + PetscCall(VecLockReadPush(diagsqrt)); } if (zeroflag) PetscCall(PetscInfo(pc, "Zero detected in diagonal of matrix, using 1 at those locations\n")); PetscFunctionReturn(PETSC_SUCCESS); @@ -295,6 +315,7 @@ static PetscErrorCode PCSetUp_Jacobi_Symmetric(PC pc) PetscFunctionBegin; PetscCall(MatCreateVecs(pc->pmat, &jac->diagsqrt, NULL)); + PetscCall(VecLockReadPush(jac->diagsqrt)); PetscCall(PCSetUp_Jacobi(pc)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -313,6 +334,7 @@ static PetscErrorCode PCSetUp_Jacobi_NonSymmetric(PC pc) PetscFunctionBegin; PetscCall(MatCreateVecs(pc->pmat, &jac->diag, NULL)); + PetscCall(VecLockReadPush(jac->diag)); PetscCall(PCSetUp_Jacobi(pc)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -367,6 +389,8 @@ static PetscErrorCode PCReset_Jacobi(PC pc) PC_Jacobi *jac = (PC_Jacobi *)pc->data; PetscFunctionBegin; + if (jac->diag) PetscCall(VecLockReadPop(jac->diag)); + if (jac->diagsqrt) PetscCall(VecLockReadPop(jac->diagsqrt)); PetscCall(VecDestroy(&jac->diag)); PetscCall(VecDestroy(&jac->diagsqrt)); PetscFunctionReturn(PETSC_SUCCESS); @@ -393,6 +417,7 @@ static PetscErrorCode PCDestroy_Jacobi(PC pc) PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCJacobiGetRowl1Scale_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCJacobiSetFixDiagonal_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCJacobiGetFixDiagonal_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCJacobiGetDiagonal_C", NULL)); /* Free the private data structure that was hanging off the PC @@ -530,6 +555,7 @@ PETSC_EXTERN PetscErrorCode PCCreate_Jacobi(PC pc) PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCJacobiGetUseAbs_C", PCJacobiGetUseAbs_Jacobi)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCJacobiSetFixDiagonal_C", PCJacobiSetFixDiagonal_Jacobi)); PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCJacobiGetFixDiagonal_C", PCJacobiGetFixDiagonal_Jacobi)); + PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCJacobiGetDiagonal_C", PCJacobiGetDiagonal_Jacobi)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -686,6 +712,30 @@ PetscErrorCode PCJacobiGetFixDiagonal(PC pc, PetscBool *flg) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + PCJacobiGetDiagonal - Returns copy of the diagonal and/or diagonal squareroot `Vec` + + Logically Collective + + Input Parameter: +. pc - the preconditioner context + + Output Parameters: ++ diagonal - Copy of `Vec` of the inverted diagonal +- diagonal_sqrt - Copy of `Vec` of the inverted square root diagonal + + Level: developer + +.seealso: [](ch_ksp), `PCJACOBI`, `PCJacobiSetType()` +@*/ +PetscErrorCode PCJacobiGetDiagonal(PC pc, Vec diagonal, Vec diagonal_sqrt) +{ + PetscFunctionBegin; + PetscValidHeaderSpecific(pc, PC_CLASSID, 1); + PetscUseMethod(pc, "PCJacobiGetDiagonal_C", (PC, Vec, Vec), (pc, diagonal, diagonal_sqrt)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@ PCJacobiSetType - Causes the Jacobi preconditioner to use either the diagonal, the maximum entry in each row, of the sum of rows entries for the diagonal preconditioner diff --git a/src/ksp/pc/impls/ksp/pcksp.c b/src/ksp/pc/impls/ksp/pcksp.c index 9d7455828af..02577b1508a 100644 --- a/src/ksp/pc/impls/ksp/pcksp.c +++ b/src/ksp/pc/impls/ksp/pcksp.c @@ -230,30 +230,29 @@ static PetscErrorCode PCSetFromOptions_KSP(PC pc, PetscOptionItems *PetscOptions } /*MC - PCKSP - Defines a preconditioner as any `KSP` solver. - This allows, for example, embedding a Krylov method inside a preconditioner. + PCKSP - Defines a preconditioner as any `KSP` solver. This allows, for example, embedding a Krylov method inside a preconditioner. Options Database Key: -. -pc_use_amat - use the matrix that defines the linear system, Amat as the matrix for the - inner solver, otherwise by default it uses the matrix used to construct - the preconditioner, Pmat (see `PCSetOperators()`) +. -pc_use_amat - use the matrix that defines the linear system, Amat as the matrix for the + inner solver, otherwise by default it uses the matrix used to construct + the preconditioner, Pmat (see `PCSetOperators()`) Level: intermediate Note: - The application of an inexact Krylov solve is a nonlinear operation. Thus, performing a solve with `KSP` is, - in general, a nonlinear operation, so `PCKSP` is in general a nonlinear preconditioner. - Thus, one can see divergence or an incorrect answer unless using a flexible Krylov method (e.g. `KSPFGMRES`, `KSPGCR`, or `KSPFCG`) for the outer Krylov solve. + The application of an inexact Krylov solve is a nonlinear operation. Thus, performing a solve with `KSP` is, + in general, a nonlinear operation, so `PCKSP` is in general a nonlinear preconditioner. + Thus, one can see divergence or an incorrect answer unless using a flexible Krylov method (e.g. `KSPFGMRES`, `KSPGCR`, or `KSPFCG`) for the outer Krylov solve. Developer Note: - If the outer Krylov method has a nonzero initial guess it will compute a new residual based on that initial guess - and pass that as the right-hand side into this `KSP` (and hence this `KSP` will always have a zero initial guess). For all outer Krylov methods - except Richardson this is necessary since Krylov methods, even the flexible ones, need to "see" the result of the action of the preconditioner on the - input (current residual) vector, the action of the preconditioner cannot depend also on some other vector (the "initial guess"). For - `KSPRICHARDSON` it is possible to provide a `PCApplyRichardson_PCKSP()` that short circuits returning to the `KSP` object at each iteration to compute the - residual, see for example `PCApplyRichardson_SOR()`. We do not implement a `PCApplyRichardson_PCKSP()` because (1) using a `KSP` directly inside a Richardson - is not an efficient algorithm anyways and (2) implementing it for its > 1 would essentially require that we implement Richardson (reimplementing the - Richardson code) inside the `PCApplyRichardson_PCKSP()` leading to duplicate code. + If the outer Krylov method has a nonzero initial guess it will compute a new residual based on that initial guess + and pass that as the right-hand side into this `KSP` (and hence this `KSP` will always have a zero initial guess). For all outer Krylov methods + except Richardson this is necessary since Krylov methods, even the flexible ones, need to "see" the result of the action of the preconditioner on the + input (current residual) vector, the action of the preconditioner cannot depend also on some other vector (the "initial guess"). For + `KSPRICHARDSON` it is possible to provide a `PCApplyRichardson_PCKSP()` that short circuits returning to the `KSP` object at each iteration to compute the + residual, see for example `PCApplyRichardson_SOR()`. We do not implement a `PCApplyRichardson_PCKSP()` because (1) using a `KSP` directly inside a Richardson + is not an efficient algorithm anyways and (2) implementing it for its > 1 would essentially require that we implement Richardson (reimplementing the + Richardson code) inside the `PCApplyRichardson_PCKSP()` leading to duplicate code. .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCSHELL`, `PCCOMPOSITE`, `PCSetUseAmat()`, `PCKSPGetKSP()`, `KSPFGMRES`, `KSPGCR`, `KSPFCG` diff --git a/src/ksp/pc/impls/lsc/lsc.c b/src/ksp/pc/impls/lsc/lsc.c index 6e3da1246db..e3ce8170fae 100644 --- a/src/ksp/pc/impls/lsc/lsc.c +++ b/src/ksp/pc/impls/lsc/lsc.c @@ -203,7 +203,7 @@ static PetscErrorCode PCView_LSC(PC pc, PetscViewer viewer) Options Database Key: + -pc_lsc_commute - Whether to commute the LSC preconditioner in the style of Olshanskii -- -pc_lsc_scale_diag - Whether to scale $BB^T$ products. Will use the inverse of the diagonal of Qscale or A if the former is not provided +- -pc_lsc_scale_diag - Whether to scale $BB^T$ products. Will use the inverse of the diagonal of $Qscale$ or $A$ if the former is not provided Level: intermediate @@ -215,18 +215,18 @@ static PetscErrorCode PCView_LSC(PC pc, PetscViewer viewer) S = A11 - A10 A00^{-1} A01 $$ - `PCLSC` currently doesn't do anything with A11, so let's assume it is 0. The idea is that a good approximation to - inv(S) is given by + `PCLSC` currently doesn't do anything with $A11$, so let's assume it is 0. The idea is that a good approximation to + $S^{-1}$ is given by $$ (A10 A01)^{-1} A10 A00 A01 (A10 A01)^{-1} $$ - The product A10 A01 can be computed for you, but you can provide it (this is - usually more efficient anyway). In the case of incompressible flow, A10 A01 is a Laplacian; call it L. The current - interface is to compose L and a preconditioning matrix Lp on the preconditioning matrix. + The product $A10 A01$ can be computed for you, but you can provide it (this is + usually more efficient anyway). In the case of incompressible flow, $A10 A01$ is a Laplacian; call it $L$. The current + interface is to compose $L$ and a matrix from which to construct its preconditioning $Lp$ on the preconditioning matrix. - If you had called `KSPSetOperators`(ksp,S,Sp), S should have type `MATSCHURCOMPLEMENT` and Sp can be any type you + If you had called `KSPSetOperators`(ksp,S,Sp), $S$ should have type `MATSCHURCOMPLEMENT` and $Sp$ can be any type you like (`PCLSC` doesn't use it directly) but should have matrices composed with it, under the names "LSC_L" and "LSC_Lp". For example, you might have setup code like this diff --git a/src/ksp/pc/impls/mg/ftn-custom/zmgfuncf.c b/src/ksp/pc/impls/mg/ftn-custom/zmgfuncf.c index 2c07201dc2a..c338c222156 100644 --- a/src/ksp/pc/impls/mg/ftn-custom/zmgfuncf.c +++ b/src/ksp/pc/impls/mg/ftn-custom/zmgfuncf.c @@ -17,10 +17,7 @@ static PetscErrorCode ourresidualfunction(Mat mat, Vec b, Vec x, Vec R) return PETSC_SUCCESS; } -PETSC_EXTERN void pcmgresidualdefault_(Mat *mat, Vec *b, Vec *x, Vec *r, PetscErrorCode *ierr) -{ - *ierr = PCMGResidualDefault(*mat, *b, *x, *r); -} +PETSC_EXTERN void pcmgresidualdefault_(Mat *, Vec *, Vec *, Vec *, PetscErrorCode *); PETSC_EXTERN void pcmgsetresidual_(PC *pc, PetscInt *l, PetscErrorCode (*residual)(Mat *, Vec *, Vec *, Vec *, PetscErrorCode *), Mat *mat, PetscErrorCode *ierr) { diff --git a/src/ksp/pc/impls/mg/gdsw.c b/src/ksp/pc/impls/mg/gdsw.c index 084c50682cc..def5eca754a 100644 --- a/src/ksp/pc/impls/mg/gdsw.c +++ b/src/ksp/pc/impls/mg/gdsw.c @@ -102,7 +102,6 @@ static PetscErrorCode PCMGGDSWSetUp(PC pc, PetscInt l, DM dm, KSP smooth, PetscI PetscCall(MatISGetLocalToGlobalMapping(A, &l2g, NULL)); PetscCall(MatGetSize(A, &N, NULL)); - graph->commsizelimit = 0; /* don't use the COMM_SELF variant of the graph */ PetscCall(PCBDDCGraphInit(graph, l2g, N, PETSC_MAX_INT)); PetscCall(MatGetRowIJ(lA, 0, PETSC_TRUE, PETSC_FALSE, &graph->nvtxs_csr, (const PetscInt **)&graph->xadj, (const PetscInt **)&graph->adjncy, &flg)); PetscCall(PCBDDCGraphSetUp(graph, vsize, NULL, NULL, 0, NULL, NULL)); diff --git a/src/ksp/pc/impls/mg/mg.c b/src/ksp/pc/impls/mg/mg.c index 37056743c7e..fe1cc0aa882 100644 --- a/src/ksp/pc/impls/mg/mg.c +++ b/src/ksp/pc/impls/mg/mg.c @@ -1520,6 +1520,7 @@ static PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType PetscFunctionBegin; mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE; mg->coarseSpaceType = ctype; + PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1550,7 +1551,7 @@ static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space. Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. @@ -1563,11 +1564,14 @@ static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr) Options Database Keys: + -pc_mg_adapt_interp_n - The number of modes to use -- -pc_mg_adapt_interp_coarse_space - The type of coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw +- -pc_mg_adapt_interp_coarse_space - The type of coarse space: none, `polynomial`, `harmonic`, `eigenvector`, `generalized_eigenvector`, `gdsw` Level: intermediate -.seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()` + Note: + Requires a `DM` with specific functionality be attached to the `PC`. + +.seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`, `DM` @*/ PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype) { @@ -1578,7 +1582,7 @@ PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space. Not Collective @@ -1829,7 +1833,7 @@ static PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat * /*@C PCMGRegisterCoarseSpaceConstructor - Adds a method to the `PCMG` package for coarse space construction. - Not Collective + Not Collective, No Fortran Support Input Parameters: + name - name of the constructor @@ -1862,7 +1866,7 @@ PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PetscErrorC /*@C PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method. - Not Collective + Not Collective, No Fortran Support Input Parameter: . name - name of the constructor @@ -1886,17 +1890,17 @@ PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PetscErrorCode ( information about the coarser grid matrices and restriction/interpolation operators. Options Database Keys: -+ -pc_mg_levels - number of levels including finest -. -pc_mg_cycle_type - provide the cycle desired ++ -pc_mg_levels - number of levels including finest +. -pc_mg_cycle_type - provide the cycle desired . -pc_mg_type - multiplicative is the default -. -pc_mg_log - log information about time spent on each level of the solver -. -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes) -. -pc_mg_galerkin - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R' -. -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1) -. -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices - to the Socket viewer for reading from MATLAB. -- -pc_mg_dump_binary - dumps the matrices for each level and the restriction/interpolation matrices - to the binary output file called binaryoutput +. -pc_mg_log - log information about time spent on each level of the solver +. -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes) +. -pc_mg_galerkin - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R' +. -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1) +. -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices + to a `PETSCVIEWERSOCKET` for reading from MATLAB. +- -pc_mg_dump_binary -dumps the matrices for each level and the restriction/interpolation matrices + to the binary output file called binaryoutput Level: intermediate diff --git a/src/ksp/pc/impls/mg/mgfunc.c b/src/ksp/pc/impls/mg/mgfunc.c index 995f4b3a6ab..250fd9dcb95 100644 --- a/src/ksp/pc/impls/mg/mgfunc.c +++ b/src/ksp/pc/impls/mg/mgfunc.c @@ -1,6 +1,6 @@ #include /*I "petscksp.h" I*/ -/*@C +/*@ PCMGResidualDefault - Default routine to calculate the residual. Collective @@ -24,7 +24,7 @@ PetscErrorCode PCMGResidualDefault(Mat mat, Vec b, Vec x, Vec r) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCMGResidualTransposeDefault - Default routine to calculate the residual of the transposed linear system Collective @@ -49,7 +49,7 @@ PetscErrorCode PCMGResidualTransposeDefault(Mat mat, Vec b, Vec x, Vec r) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCMGMatResidualDefault - Default routine to calculate the residual. Collective @@ -74,7 +74,7 @@ PetscErrorCode PCMGMatResidualDefault(Mat mat, Mat b, Mat x, Mat r) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCMGMatResidualTransposeDefault - Default routine to calculate the residual of the transposed linear system Collective @@ -536,7 +536,7 @@ PetscErrorCode PCMGGetInjection(PC pc, PetscInt l, Mat *mat) Level: advanced -.seealso: [](ch_ksp), `PCMG`, ``PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`, `PCMGGetCoarseSolve()` +.seealso: [](ch_ksp), `PCMG`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`, `PCMGGetCoarseSolve()` @*/ PetscErrorCode PCMGGetSmoother(PC pc, PetscInt l, KSP *ksp) { diff --git a/src/ksp/pc/impls/ml/ml.c b/src/ksp/pc/impls/ml/ml.c index 1962e9f0b35..5c9710b0ff5 100644 --- a/src/ksp/pc/impls/ml/ml.c +++ b/src/ksp/pc/impls/ml/ml.c @@ -693,7 +693,7 @@ static PetscErrorCode PCSetUp_ML(PC pc) for (j = 0; j < mlocal; j++) nullvec[(i + !!has_const) * mlocal + j] = v[j]; PetscCall(VecRestoreArrayRead(vecs[i], &v)); } - PetscStackCallExternalVoid("ML_Aggregate_Create", ML_Aggregate_Set_NullSpace(agg_object, bs, nvec + !!has_const, nullvec, mlocal)); + PetscStackCallExternalVoid("ML_Aggregate_Set_NullSpace", ML_Aggregate_Set_NullSpace(agg_object, bs, nvec + !!has_const, nullvec, mlocal)); PetscCall(PetscFree(nullvec)); } break; case PCML_NULLSPACE_BLOCK: @@ -1081,21 +1081,21 @@ static PetscErrorCode PCSetFromOptions_ML(PC pc, PetscOptionItems *PetscOptionsO - -pc_mg_type - (one of) additive multiplicative full kascade ML Options Database Key: -+ -pc_ml_PrintLevel <0> - Print level (`ML_Set_PrintLevel()`) -. -pc_ml_maxNlevels <10> - Maximum number of levels (None) -. -pc_ml_maxCoarseSize <1> - Maximum coarsest mesh size (`ML_Aggregate_Set_MaxCoarseSize()`) -. -pc_ml_CoarsenScheme - (one of) Uncoupled Coupled MIS METIS -. -pc_ml_DampingFactor <1.33333> - P damping factor (`ML_Aggregate_Set_DampingFactor()`) -. -pc_ml_Threshold <0> - Smoother drop tol (`ML_Aggregate_Set_Threshold()`) -. -pc_ml_SpectralNormScheme_Anorm - Method used for estimating spectral radius (`ML_Set_SpectralNormScheme_Anorm()`) -. -pc_ml_repartition - Allow ML to repartition levels of the hierarchy (`ML_Repartition_Activate()`) -. -pc_ml_repartitionMaxMinRatio <1.3> - Acceptable ratio of repartitioned sizes (`ML_Repartition_Set_LargestMinMaxRatio()`) -. -pc_ml_repartitionMinPerProc <512> - Smallest repartitioned size (`ML_Repartition_Set_MinPerProc()`) ++ -pc_ml_PrintLevel <0> - Print level (`ML_Set_PrintLevel()`) +. -pc_ml_maxNlevels <10> - Maximum number of levels (None) +. -pc_ml_maxCoarseSize <1> - Maximum coarsest mesh size (`ML_Aggregate_Set_MaxCoarseSize()`) +. -pc_ml_CoarsenScheme - (one of) Uncoupled Coupled MIS METIS +. -pc_ml_DampingFactor <1.33333> - P damping factor (`ML_Aggregate_Set_DampingFactor()`) +. -pc_ml_Threshold <0> - Smoother drop tol (`ML_Aggregate_Set_Threshold()`) +. -pc_ml_SpectralNormScheme_Anorm - Method used for estimating spectral radius (`ML_Set_SpectralNormScheme_Anorm()`) +. -pc_ml_repartition - Allow ML to repartition levels of the hierarchy (`ML_Repartition_Activate()`) +. -pc_ml_repartitionMaxMinRatio <1.3> - Acceptable ratio of repartitioned sizes (`ML_Repartition_Set_LargestMinMaxRatio()`) +. -pc_ml_repartitionMinPerProc <512> - Smallest repartitioned size (`ML_Repartition_Set_MinPerProc()`) . -pc_ml_repartitionPutOnSingleProc <5000> - Problem size automatically repartitioned to one processor (`ML_Repartition_Set_PutOnSingleProc()`) -. -pc_ml_repartitionType - Repartitioning library to use (`ML_Repartition_Set_Partitioner()`) -. -pc_ml_repartitionZoltanScheme - Repartitioning scheme to use (None) -. -pc_ml_Aux - Aggregate using auxiliary coordinate-based Laplacian (None) -- -pc_ml_AuxThreshold <0.0> - Auxiliary smoother drop tol (None) +. -pc_ml_repartitionType - Repartitioning library to use (`ML_Repartition_Set_Partitioner()`) +. -pc_ml_repartitionZoltanScheme - Repartitioning scheme to use (None) +. -pc_ml_Aux - Aggregate using auxiliary coordinate-based Laplacian (None) +- -pc_ml_AuxThreshold <0.0> - Auxiliary smoother drop tol (None) Level: intermediate diff --git a/src/ksp/pc/impls/patch/pcpatch.c b/src/ksp/pc/impls/patch/pcpatch.c index 02ed3dbbe22..9d055ecbcd5 100644 --- a/src/ksp/pc/impls/patch/pcpatch.c +++ b/src/ksp/pc/impls/patch/pcpatch.c @@ -677,7 +677,6 @@ PetscErrorCode PCPatchSetComputeOperator(PC pc, PetscErrorCode (*func)(PC pc, Pe } /*@C - PCPatchSetComputeOperatorInteriorFacets - Set the callback function used to compute facet integrals for patch matrices Logically Collective @@ -1426,7 +1425,7 @@ static PetscErrorCode PCPatchCreateCellPatchDiscretisationInfo(PC pc) } } } - /*How many local dofs in this patch? */ + /* How many local dofs in this patch? */ if (patch->local_composition_type == PC_COMPOSITE_MULTIPLICATIVE) { PetscCall(PetscHMapIGetSize(htWithArtificial, &dof)); PetscCall(PetscSectionSetDof(gtolCountsWithArtificial, v, dof)); diff --git a/src/ksp/pc/impls/python/ftn-custom/makefile b/src/ksp/pc/impls/python/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/ksp/pc/impls/python/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ksp/pc/impls/python/ftn-custom/zpythonpcf.c b/src/ksp/pc/impls/python/ftn-custom/zpythonpcf.c deleted file mode 100644 index ff681b18641..00000000000 --- a/src/ksp/pc/impls/python/ftn-custom/zpythonpcf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pcpythonsettype_ PCPYTHONSETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pcpythonsettype_ pcpythonsettype -#endif - -PETSC_EXTERN void pcpythonsettype_(PC *pc, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(name, len, t); - *ierr = PCPythonSetType(*pc, t); - if (*ierr) return; - FREECHAR(name, t); -} diff --git a/src/ksp/pc/impls/python/pythonpc.c b/src/ksp/pc/impls/python/pythonpc.c index 3134e89d5f5..f634c463e27 100644 --- a/src/ksp/pc/impls/python/pythonpc.c +++ b/src/ksp/pc/impls/python/pythonpc.c @@ -1,6 +1,6 @@ #include /*I "petscpc.h" I*/ -/*@C +/*@ PCPythonSetType - Initialize a `PC` object implemented in Python, a `PCPYTHON`. Collective @@ -25,7 +25,7 @@ PetscErrorCode PCPythonSetType(PC pc, const char pyname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCPythonGetType - Get the type of a `PC` object implemented in Python, a `PCPYTHON`. Not Collective diff --git a/src/ksp/pc/impls/redistribute/redistribute.c b/src/ksp/pc/impls/redistribute/redistribute.c index b9f53aef2df..479c9b689ae 100644 --- a/src/ksp/pc/impls/redistribute/redistribute.c +++ b/src/ksp/pc/impls/redistribute/redistribute.c @@ -377,6 +377,56 @@ static PetscErrorCode PCApply_Redistribute(PC pc, Vec b, Vec x) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode PCApplyTranspose_Redistribute(PC pc, Vec b, Vec x) +{ + PC_Redistribute *red = (PC_Redistribute *)pc->data; + PetscInt dcnt = red->dcnt, i; + const PetscInt *drows = red->drows; + PetscScalar *xwork; + const PetscScalar *bwork, *diag = red->diag; + PetscBool set, flg = PETSC_FALSE, nonzero_guess; + + PetscFunctionBegin; + PetscCall(MatIsStructurallySymmetricKnown(pc->pmat, &set, &flg)); + PetscCheck(set || flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyTranspose() not implemented for structurally unsymmetric Mat"); + if (!red->work) PetscCall(VecDuplicate(b, &red->work)); + PetscCall(KSPGetInitialGuessNonzero(red->ksp, &nonzero_guess)); + if (nonzero_guess) { + PetscCall(VecScatterBegin(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecScatterEnd(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD)); + } + + /* compute the rows of solution that have diagonal entries only */ + PetscCall(VecSet(x, 0.0)); /* x = diag(A)^{-1} b */ + PetscCall(VecGetArray(x, &xwork)); + PetscCall(VecGetArrayRead(b, &bwork)); + if (red->zerodiag) { + for (i = 0; i < dcnt; i++) { + if (diag[i] == 0.0 && bwork[drows[i]] != 0.0) { + PetscCheck(!pc->erroriffailure, PETSC_COMM_SELF, PETSC_ERR_CONV_FAILED, "Linear system is inconsistent, zero matrix row but nonzero right-hand side"); + PetscCall(PetscInfo(pc, "Linear system is inconsistent, zero matrix row but nonzero right-hand side\n")); + PetscCall(VecSetInf(x)); + pc->failedreasonrank = PC_INCONSISTENT_RHS; + } + } + } + for (i = 0; i < dcnt; i++) xwork[drows[i]] = diag[i] * bwork[drows[i]]; + PetscCall(PetscLogFlops(dcnt)); + PetscCall(VecRestoreArray(red->work, &xwork)); + PetscCall(VecRestoreArrayRead(b, &bwork)); + /* update the right-hand side for the reduced system with diagonal rows (and corresponding columns) removed */ + PetscCall(MatMultTranspose(pc->pmat, x, red->work)); + PetscCall(VecAYPX(red->work, -1.0, b)); /* red->work = b - A^T x */ + + PetscCall(VecScatterBegin(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecScatterEnd(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(KSPSolveTranspose(red->ksp, red->b, red->x)); + PetscCall(KSPCheckSolve(red->ksp, pc, red->x)); + PetscCall(VecScatterBegin(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE)); + PetscCall(VecScatterEnd(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode PCDestroy_Redistribute(PC pc) { PC_Redistribute *red = (PC_Redistribute *)pc->data; @@ -479,7 +529,7 @@ PETSC_EXTERN PetscErrorCode PCCreate_Redistribute(PC pc) pc->data = (void *)red; pc->ops->apply = PCApply_Redistribute; - pc->ops->applytranspose = NULL; + pc->ops->applytranspose = PCApplyTranspose_Redistribute; pc->ops->setup = PCSetUp_Redistribute; pc->ops->destroy = PCDestroy_Redistribute; pc->ops->setfromoptions = PCSetFromOptions_Redistribute; diff --git a/src/ksp/pc/impls/shell/ftn-custom/zshellpcf.c b/src/ksp/pc/impls/shell/ftn-custom/zshellpcf.c index 17c1d3a7c2f..cb61aae70bb 100644 --- a/src/ksp/pc/impls/shell/ftn-custom/zshellpcf.c +++ b/src/ksp/pc/impls/shell/ftn-custom/zshellpcf.c @@ -14,8 +14,6 @@ #define pcshellsetpresolve_ PCSHELLSETPRESOLVE #define pcshellsetpostsolve_ PCSHELLSETPOSTSOLVE #define pcshellsetview_ PCSHELLSETVIEW - #define pcshellsetname_ PCSHELLSETNAME - #define pcshellgetname_ PCSHELLGETNAME #define pcshellsetcontext_ PCSHELLSETCONTEXT #define pcshellgetcontext_ PCSHELLGETCONTEXT #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) @@ -28,8 +26,6 @@ #define pcshellsetpresolve_ pcshellsetpresolve #define pcshellsetpostsolve_ pcshellsetpostsolve #define pcshellsetview_ pcshellsetview - #define pcshellsetname_ pcshellsetname - #define pcshellgetname_ pcshellgetname #define pcshellsetcontext_ pcshellsetcontext #define pcshellgetcontext_ pcshellgetcontext #endif @@ -224,23 +220,3 @@ PETSC_EXTERN void pcshellsetview_(PC *pc, void (*view)(void *, PetscViewer *, Pe *ierr = PCShellSetView(*pc, ourshellview); } - -PETSC_EXTERN void pcshellsetname_(PC *pc, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c; - FIXCHAR(name, len, c); - *ierr = PCShellSetName(*pc, c); - if (*ierr) return; - FREECHAR(name, c); -} - -PETSC_EXTERN void pcshellgetname_(PC *pc, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *c; - - *ierr = PCShellGetName(*pc, &c); - if (*ierr) return; - *ierr = PetscStrncpy(name, c, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/ksp/pc/impls/shell/shellpc.c b/src/ksp/pc/impls/shell/shellpc.c index ba423fb9ca7..5eb258fc6e9 100644 --- a/src/ksp/pc/impls/shell/shellpc.c +++ b/src/ksp/pc/impls/shell/shellpc.c @@ -736,7 +736,7 @@ PetscErrorCode PCShellSetPostSolve(PC pc, PetscErrorCode (*postsolve)(PC pc, KSP PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCShellSetName - Sets an optional name to associate with a `PCSHELL` preconditioner. @@ -761,7 +761,7 @@ PetscErrorCode PCShellSetName(PC pc, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCShellGetName - Gets an optional name that the user has set for a `PCSHELL` with `PCShellSetName()` preconditioner. diff --git a/src/ksp/pc/impls/wb/wb.c b/src/ksp/pc/impls/wb/wb.c index 2fad46585ec..da3f35f544f 100644 --- a/src/ksp/pc/impls/wb/wb.c +++ b/src/ksp/pc/impls/wb/wb.c @@ -13,7 +13,6 @@ const char *const PCExoticTypes[] = {"face", "wirebasket", "PCExoticType", "PC_E /* DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space - */ static PetscErrorCode DMDAGetWireBasketInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P) { @@ -43,14 +42,14 @@ static PetscErrorCode DMDAGetWireBasketInterpolation(PC pc, DM da, PC_Exotic *ex kstart = kstart ? -1 : 0; /* - the columns of P are the interpolation of each coarse grid point (one for each vertex and edge) + the columns of P are the interpolation of each coarse (wirebasket) grid point (one for each face, vertex and edge) to all the local degrees of freedom (this includes the vertices, edges and faces). Xint are the subset of the interpolation into the interior Xface are the interpolation onto faces but not into the interior - Xsurf are the interpolation onto the vertices and edges (the surfbasket) + Xsurf are the interpolation onto the vertices and edges (the wirebasket) Xint Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain Xsurf @@ -362,7 +361,6 @@ static PetscErrorCode DMDAGetWireBasketInterpolation(PC pc, DM da, PC_Exotic *ex /* DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space - */ static PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P) { @@ -392,14 +390,14 @@ static PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, kstart = kstart ? -1 : 0; /* - the columns of P are the interpolation of each coarse grid point (one for each vertex and edge) + the columns of P are the interpolation of each coarse (face) grid point (one for each face) to all the local degrees of freedom (this includes the vertices, edges and faces). Xint are the subset of the interpolation into the interior Xface are the interpolation onto faces but not into the interior - Xsurf are the interpolation onto the vertices and edges (the surfbasket) + Xsurf are the interpolation onto the vertices and edges (the wirebasket) Xint Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain Xsurf @@ -657,7 +655,10 @@ static PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, Input Parameters: + pc - the preconditioner context -- type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face) +- type - either `PC_EXOTIC_FACE` or `PC_EXOTIC_WIREBASKET` (defaults to face) + + Options Database Keys: +. -pc_exotic_type - use a coarse grid point for each face, or edge and vertex Notes: The face based interpolation has 1 degree of freedom per face and ignores the @@ -668,8 +669,8 @@ static PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, per face. A constant on the subdomain boundary is interpolated as that constant in the interior of the domain. - The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence - if A is nonsingular A_c is also nonsingular. + The coarse grid matrix is obtained via the Galerkin computation $A_c = R A R^T$, hence + if $A$ is nonsingular $A_c$ is also nonsingular. Both interpolations are suitable for only scalar problems. @@ -794,15 +795,19 @@ static PetscErrorCode PCSetFromOptions_Exotic(PC pc, PetscOptionItems *PetscOpti } /*MC - PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces + PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces - This uses the `PCMG` infrastructure restricted to two levels and the face and wirebasket based coarse + This uses the `PCMG` infrastructure restricted to two levels and the face and wirebasket based coarse grid spaces. + Options Database Keys: ++ -pc_exotic_type - use a coarse grid point for each face, or edge and vertex +- -pc_exotic_direct_solver - use a direct solver to construct interpolation instead of an iterative solver + Level: advanced Notes: - Must be used with `DMDA` + Must be used with `DMDA` in three dimensions By default this uses `KSPGMRES` on the fine grid smoother so this should be used with `KSPFGMRES` or the smoother changed to not use `KSPGMRES` @@ -814,7 +819,10 @@ static PetscErrorCode PCSetFromOptions_Exotic(PC pc, PetscOptionItems *PetscOpti They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example, {cite}`dohrmann2008extending`, {cite}`dohrmann2008family`, {cite}`dohrmann2008domain`, {cite}`dohrmann2009overlapping`. - The usual `PCMG` options are supported, such as -mg_levels_pc_type -mg_coarse_pc_type -mg_fine_pc_type and -pc_mg_type + In this code the wirebasket includes a constant for each face, as well as the true "wirebasket". Other wirebasket algorithms exist that + only have constants for edges and vertices. + + The usual `PCMG` options are supported, such as `-mg_levels_pc_type` `-mg_coarse_pc_type` `-mg_fine_pc_type` and `-pc_mg_type` .seealso: [](ch_ksp), `PCMG`, `PCSetDM()`, `PCExoticType`, `PCExoticSetType()` M*/ diff --git a/src/ksp/pc/interface/ftn-custom/makefile b/src/ksp/pc/interface/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/ksp/pc/interface/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ksp/pc/interface/ftn-custom/zpcsetf.c b/src/ksp/pc/interface/ftn-custom/zpcsetf.c deleted file mode 100644 index bdedda26447..00000000000 --- a/src/ksp/pc/interface/ftn-custom/zpcsetf.c +++ /dev/null @@ -1,29 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pcsettype_ PCSETTYPE - #define pcgettype_ PCGETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pcsettype_ pcsettype - #define pcgettype_ pcgettype -#endif - -PETSC_EXTERN void pcsettype_(PC *pc, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = PCSetType(*pc, t); - FREECHAR(type, t); -} - -PETSC_EXTERN void pcgettype_(PC *pc, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = PCGetType(*pc, &tname); - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/ksp/pc/interface/ftn-custom/zpreconf.c b/src/ksp/pc/interface/ftn-custom/zpreconf.c deleted file mode 100644 index 22f4a800715..00000000000 --- a/src/ksp/pc/interface/ftn-custom/zpreconf.c +++ /dev/null @@ -1,77 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pcview_ PCVIEW - #define pcgetoperators_ PCGETOPERATORS - #define pcsetoptionsprefix_ PCSETOPTIONSPREFIX - #define pcappendoptionsprefix_ PCAPPENDOPTIONSPREFIX - #define pcgetoptionsprefix_ PCGETOPTIONSPREFIX - #define pcviewfromoptions_ PCVIEWFROMOPTIONS - #define pcdestroy_ PCDESTROY -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pcview_ pcview - #define pcgetoperators_ pcgetoperators - #define pcsetoptionsprefix_ pcsetoptionsprefix - #define pcappendoptionsprefix_ pcappendoptionsprefix - #define pcgetoptionsprefix_ pcgetoptionsprefix - #define pcviewfromoptions_ pcviewfromoptions - #define pcdestroy_ pcdestroy -#endif - -PETSC_EXTERN void pcview_(PC *pc, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PCView(*pc, v); -} - -PETSC_EXTERN void pcsetoptionsprefix_(PC *pc, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = PCSetOptionsPrefix(*pc, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void pcappendoptionsprefix_(PC *pc, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = PCAppendOptionsPrefix(*pc, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void pcgetoptionsprefix_(PC *pc, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = PCGetOptionsPrefix(*pc, &tname); - *ierr = PetscStrncpy(prefix, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, prefix, len); -} - -PETSC_EXTERN void pcviewfromoptions_(PC *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PCViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void pcdestroy_(PC *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = PCDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} diff --git a/src/ksp/pc/interface/pcset.c b/src/ksp/pc/interface/pcset.c index 6a3428adceb..8c7eb1ce316 100644 --- a/src/ksp/pc/interface/pcset.c +++ b/src/ksp/pc/interface/pcset.c @@ -11,7 +11,7 @@ PetscBool PCRegisterAllCalled = PETSC_FALSE; */ PetscFunctionList PCList = NULL; -/*@C +/*@ PCSetType - Builds `PC` for a particular preconditioner type Collective @@ -76,7 +76,7 @@ PetscErrorCode PCSetType(PC pc, PCType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCGetType - Gets the `PCType` (as a string) from the `PC` context. diff --git a/src/ksp/pc/interface/precon.c b/src/ksp/pc/interface/precon.c index 9474808571b..c9067ab0d67 100644 --- a/src/ksp/pc/interface/precon.c +++ b/src/ksp/pc/interface/precon.c @@ -81,7 +81,7 @@ PetscErrorCode PCReset(PC pc) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCDestroy - Destroys `PC` context that was created with `PCCreate()`. Collective @@ -113,7 +113,7 @@ PetscErrorCode PCDestroy(PC *pc) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right scaling as needed by certain time-stepping codes. @@ -435,11 +435,9 @@ PetscErrorCode PCCreate(MPI_Comm comm, PC *newpc) PetscFunctionBegin; PetscAssertPointer(newpc, 2); - *newpc = NULL; PetscCall(PCInitializePackage()); PetscCall(PetscHeaderCreate(pc, PC_CLASSID, "PC", "Preconditioner", "PC", comm, PCDestroy, PCView)); - pc->mat = NULL; pc->pmat = NULL; pc->setupcalled = 0; @@ -848,7 +846,7 @@ PetscErrorCode PCApplyBAorABTranspose(PC pc, PCSide side, Vec x, Vec y, Vec work Level: developer -.seealso: [](ch_ksp), `PC`, `PCRICHARDSON`, `PCApplyRichardson()` +.seealso: [](ch_ksp), `PC`, `KSPRICHARDSON`, `PCApplyRichardson()` @*/ PetscErrorCode PCApplyRichardsonExists(PC pc, PetscBool *exists) { @@ -945,7 +943,7 @@ PetscErrorCode PCSetFailedReason(PC pc, PCFailedReason reason) a call `KSPCheckDot()` or `KSPCheckNorm()` inside a `KSPSolve()` or `PCReduceFailedReason()`. It is not valid immediately after a `PCSetUp()` or `PCApply()`, then use `PCGetFailedReasonRank()` -.seealso: [](ch_ksp), `PC`, ``PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()` +.seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()` @*/ PetscErrorCode PCGetFailedReason(PC pc, PCFailedReason *reason) { @@ -1412,7 +1410,7 @@ PetscErrorCode PCGetOperators(PC pc, Mat *Amat, Mat *Pmat) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCGetOperatorsSet - Determines if the matrix associated with the linear system and possibly a different one associated with the preconditioner have been set in the `PC`. @@ -1468,7 +1466,7 @@ PetscErrorCode PCFactorGetMatrix(PC pc, Mat *mat) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCSetOptionsPrefix - Sets the prefix used for searching for all `PC` options in the database. @@ -1495,7 +1493,7 @@ PetscErrorCode PCSetOptionsPrefix(PC pc, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCAppendOptionsPrefix - Appends to the prefix used for searching for all `PC` options in the database. @@ -1522,7 +1520,7 @@ PetscErrorCode PCAppendOptionsPrefix(PC pc, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCGetOptionsPrefix - Gets the prefix used for searching for all PC options in the database. @@ -1676,7 +1674,7 @@ PetscErrorCode PCPostSolve(PC pc, KSP ksp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCLoad - Loads a `PC` that has been stored in binary with `PCView()`. Collective @@ -1718,7 +1716,7 @@ PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) #include #endif -/*@C +/*@ PCViewFromOptions - View from the `PC` based on options in the options database Collective @@ -1740,7 +1738,7 @@ PetscErrorCode PCViewFromOptions(PC A, PetscObject obj, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCView - Prints information about the `PC` Collective @@ -1873,7 +1871,7 @@ PetscErrorCode PCView(PC pc, PetscViewer viewer) /*@C PCRegister - Adds a method (`PCType`) to the preconditioner package. - Not collective + Not collective. No Fortran Support Input Parameters: + sname - name of a new user-defined solver @@ -1914,7 +1912,7 @@ static PetscErrorCode MatMult_PC(Mat A, Vec X, Vec Y) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PCComputeOperator - Computes the explicit preconditioned operator. Collective diff --git a/src/ksp/pc/tests/ex8f.F90 b/src/ksp/pc/tests/ex8f.F90 index 643019dafbf..c681bbdc992 100644 --- a/src/ksp/pc/tests/ex8f.F90 +++ b/src/ksp/pc/tests/ex8f.F90 @@ -76,22 +76,22 @@ program main j = II - i*n if (i.gt.0) then JJ = II - n - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (i.lt.n-1) then JJ = II + n - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (j.gt.0) then JJ = II - 1 - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif if (j.lt.n-1) then JJ = II + 1 - PetscCallA(MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr)) endif v = 4.0 - PetscCallA( MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)) + PetscCallA( MatSetValues(A,one,[II],one,[II],[v],ADD_VALUES,ierr)) 10 continue ! Assemble matrix, using the 2-step process: @@ -151,7 +151,6 @@ subroutine MyResidual(A,b,x,r,ierr) Mat A Vec b,x,r integer ierr - return end !/*TEST diff --git a/src/ksp/pc/tests/ex9f.F90 b/src/ksp/pc/tests/ex9f.F90 index bf55a0d805b..17149608fd8 100644 --- a/src/ksp/pc/tests/ex9f.F90 +++ b/src/ksp/pc/tests/ex9f.F90 @@ -58,18 +58,18 @@ program main col(1) = i-1 col(2) = i col(3) = i+1 - PetscCallA(MatSetValues(A,i1,i,i3,col,value,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[i],i3,col,value,INSERT_VALUES,ierr)) 50 continue i = n - 1 col(1) = n - 2 col(2) = n - 1 - PetscCallA(MatSetValues(A,i1,i,i2,col,value,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[i],i2,col,value,INSERT_VALUES,ierr)) i = 0 col(1) = 0 col(2) = 1 value(1) = 2.0 value(2) = -1.0 - PetscCallA(MatSetValues(A,i1,i,i2,col,value,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,i1,[i],i2,col,value,INSERT_VALUES,ierr)) PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)) diff --git a/src/ksp/pc/tutorials/ex4.c b/src/ksp/pc/tutorials/ex4.c new file mode 100644 index 00000000000..33e4b2edfe7 --- /dev/null +++ b/src/ksp/pc/tutorials/ex4.c @@ -0,0 +1,371 @@ +static char help[] = "Applies the 2023 preconditioner of Benzi and Faccio\n\n"; + +#include +#include +#include +#include +#include + +/* + * This example reproduces the preconditioner outlined in Benzi's paper + * https://doi.org/10.1137/22M1505529. The problem considered is: + * + * (A + gamma UU^T)x = b + * + * whose structure arises from, for example, grad-div stabilization in the + * Navier-Stokes momentum equation. In the code we will also refer to + * gamma UU^T as J. The preconditioner developed by Benzi is: + * + * P_alpha = (A + alpha I)(alpha I + gamma UU^T) + * + * Another variant which may yield better convergence depending on the specific + * problem is + * + * P_alpha = (A + alpha D) D^-1 (alpha D + gamma UU^T) + * + * where D = diag(A + gamma UU^T). This is the variant implemented + * here. Application of the preconditioner involves (approximate) solution of + * two systems, one with (A + alpha D), and another with (alpha D + gamma + * UU^T). For small alpha (which generally yields the best overall + * preconditioner), (alpha D + gamma UU^T) is ill-conditioned. To combat this we + * solve (alpha D + gamma UU^T) using the Sherman-Morrison-Woodbury (SMW) matrix + * identity, which effectively converts the grad-div structure to a much nicer + * div-grad (laplacian) structure. + * + * The matrices used as input can be generated by running the matlab/octave + * program IFISS. The particular matrices checked into the datafiles repository + * and used in testing of this example correspond to a leaky lid-driven cavity + * with a stretched grid and Q2-Q1 finite elements. The matrices are taken from + * the last iteration of a Picard solve with tolerance 1e-8 with a viscosity of + * 0.1 and a 32x32 grid. We summarize below iteration counts from running this + * preconditioner for different grids and viscosity with a KSP tolerance of 1e-6. + * + * 32x32 64x64 128x128 + * 0.1 28 36 43 + * 0.01 59 75 73 + * 0.002 136 161 167 + * + * A reader of Benzi's paper will note that the performance shown above with + * respect to decreasing viscosity is significantly worse than in the + * paper. This is actually because of the choice of RHS. In Benzi's work, the + * RHS was generated by multiplying the operator with a vector of 1s whereas + * here we generate the RHS using a random vector. The iteration counts from the + * Benzi paper can be reproduced by changing the RHS generation in this example, + * but we choose to use the more difficult RHS as the resulting performance may + * more closely match what users experience in "physical" contexts. + */ + +PetscErrorCode CreateAndLoadMat(const char *mat_name, Mat *mat) +{ + PetscViewer viewer; + char file[PETSC_MAX_PATH_LEN]; + char flag_name[10] = "-f"; + PetscBool flg; + + PetscFunctionBeginUser; + PetscCall(PetscOptionsGetString(NULL, NULL, strcat(flag_name, mat_name), file, sizeof(file), &flg)); + PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER, "Must indicate file with the -f option"); + PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &viewer)); + PetscCall(MatCreate(PETSC_COMM_WORLD, mat)); + PetscCall(MatSetType(*mat, MATAIJ)); + PetscCall(PetscObjectSetName((PetscObject)*mat, mat_name)); + PetscCall(MatSetFromOptions(*mat)); + PetscCall(MatLoad(*mat, viewer)); + PetscCall(PetscViewerDestroy(&viewer)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +typedef struct { + Mat U, UT, D, aD, aDinv, I_plus_gammaUTaDinvU; + PC smw_cholesky; + PetscReal gamma, alpha; + PetscBool setup_called; +} SmwPCCtx; + +PetscErrorCode SmwSetup(PC pc) +{ + SmwPCCtx *ctx; + Vec aDVec; + + PetscFunctionBeginUser; + PetscCall(PCShellGetContext(pc, &ctx)); + + if (ctx->setup_called) PetscFunctionReturn(PETSC_SUCCESS); + + // Create aD + PetscCall(MatDuplicate(ctx->D, MAT_COPY_VALUES, &ctx->aD)); + PetscCall(MatScale(ctx->aD, ctx->alpha)); + + // Create aDinv + PetscCall(MatDuplicate(ctx->aD, MAT_DO_NOT_COPY_VALUES, &ctx->aDinv)); + PetscCall(MatCreateVecs(ctx->aD, &aDVec, NULL)); + PetscCall(MatGetDiagonal(ctx->aD, aDVec)); + PetscCall(VecReciprocal(aDVec)); + PetscCall(MatDiagonalSet(ctx->aDinv, aDVec, INSERT_VALUES)); + + // Create UT + PetscCall(MatTranspose(ctx->U, MAT_INITIAL_MATRIX, &ctx->UT)); + + // Create sum Mat + PetscCall(MatMatMatMult(ctx->UT, ctx->aDinv, ctx->U, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &ctx->I_plus_gammaUTaDinvU)); + PetscCall(MatScale(ctx->I_plus_gammaUTaDinvU, ctx->gamma)); + PetscCall(MatShift(ctx->I_plus_gammaUTaDinvU, 1.)); + + PetscCall(PCCreate(PETSC_COMM_WORLD, &ctx->smw_cholesky)); + PetscCall(PCSetType(ctx->smw_cholesky, PCCHOLESKY)); + PetscCall(PCSetOperators(ctx->smw_cholesky, ctx->I_plus_gammaUTaDinvU, ctx->I_plus_gammaUTaDinvU)); + PetscCall(PCSetOptionsPrefix(ctx->smw_cholesky, "smw_")); + PetscCall(PCSetFromOptions(ctx->smw_cholesky)); + PetscCall(PCSetUp(ctx->smw_cholesky)); + + PetscCall(VecDestroy(&aDVec)); + + ctx->setup_called = PETSC_TRUE; + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode SmwApply(PC pc, Vec x, Vec y) +{ + SmwPCCtx *ctx; + Vec vel0, pressure0, pressure1; + + PetscFunctionBeginUser; + PetscCall(PCShellGetContext(pc, &ctx)); + + PetscCall(MatCreateVecs(ctx->UT, &vel0, &pressure0)); + PetscCall(VecDuplicate(pressure0, &pressure1)); + + // First term + PetscCall(MatMult(ctx->aDinv, x, vel0)); + PetscCall(MatMult(ctx->UT, vel0, pressure0)); + PetscCall(PCApply(ctx->smw_cholesky, pressure0, pressure1)); + PetscCall(MatMult(ctx->U, pressure1, vel0)); + PetscCall(MatMult(ctx->aDinv, vel0, y)); + PetscCall(VecScale(y, -ctx->gamma)); + + // Second term + PetscCall(MatMult(ctx->aDinv, x, vel0)); + + PetscCall(VecAXPY(y, 1, vel0)); + + PetscCall(VecDestroy(&vel0)); + PetscCall(VecDestroy(&pressure0)); + PetscCall(VecDestroy(&pressure1)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +int main(int argc, char **args) +{ + Mat A, B, Q, Acondensed, Bcondensed, BT, J, AplusJ, QInv, D, AplusD, JplusD, U; + Mat AplusJarray[2]; + Vec bound, x, b, Qdiag, DVec; + PetscBool flg; + PetscViewer viewer; + char file[PETSC_MAX_PATH_LEN]; + PetscInt *boundary_indices; + PetscInt boundary_indices_size, am, an, bm, bn, condensed_am, astart, aend, Dstart, Dend, num_local_bnd_dofs = 0; + const PetscScalar zero = 0; + IS boundary_is, bulk_is; + KSP ksp; + PC pc, pcA, pcJ; + PetscRandom rctx; + PetscReal *boundary_indices_values; + PetscReal gamma = 100, alpha = .01; + PetscMPIInt rank; + SmwPCCtx ctx; + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &args, (char *)0, help)); + PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); + + PetscCall(CreateAndLoadMat("A", &A)); + PetscCall(CreateAndLoadMat("B", &B)); + PetscCall(CreateAndLoadMat("Q", &Q)); + + PetscCall(PetscOptionsGetString(NULL, NULL, "-fbound", file, sizeof(file), &flg)); + PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER, "Must indicate file with the -fbound option"); + + if (rank == 0) { + PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &viewer)); + PetscCall(VecCreate(PETSC_COMM_SELF, &bound)); + PetscCall(PetscObjectSetName((PetscObject)bound, "bound")); + PetscCall(VecSetType(bound, VECSEQ)); + PetscCall(VecLoad(bound, viewer)); + PetscCall(PetscViewerDestroy(&viewer)); + PetscCall(VecGetLocalSize(bound, &boundary_indices_size)); + PetscCall(VecGetArray(bound, &boundary_indices_values)); + } + PetscCallMPI(MPI_Bcast(&boundary_indices_size, 1, MPIU_INT, 0, PETSC_COMM_WORLD)); + if (rank != 0) PetscCall(PetscMalloc1(boundary_indices_size, &boundary_indices_values)); + PetscCallMPI(MPI_Bcast(boundary_indices_values, boundary_indices_size, MPIU_SCALAR, 0, PETSC_COMM_WORLD)); + + PetscCall(MatGetSize(A, &am, NULL)); + // The total number of dofs for a given velocity component + const PetscInt nc = am / 2; + PetscCall(MatGetOwnershipRange(A, &astart, &aend)); + + PetscCall(PetscMalloc1(2 * boundary_indices_size, &boundary_indices)); + + // + // The dof index ordering appears to be all vx dofs and then all vy dofs. + // + + // First do vx + for (PetscInt i = 0; i < boundary_indices_size; ++i) { + // MATLAB uses 1-based indexing + const PetscInt bnd_dof = (PetscInt)boundary_indices_values[i] - 1; + if ((bnd_dof >= astart) && (bnd_dof < aend)) boundary_indices[num_local_bnd_dofs++] = bnd_dof; + } + + // Now vy + for (PetscInt i = 0; i < boundary_indices_size; ++i) { + // MATLAB uses 1-based indexing + const PetscInt bnd_dof = ((PetscInt)boundary_indices_values[i] - 1) + nc; + if ((bnd_dof >= astart) && (bnd_dof < aend)) boundary_indices[num_local_bnd_dofs++] = bnd_dof; + } + if (rank == 0) PetscCall(VecRestoreArray(bound, &boundary_indices_values)); + else PetscCall(PetscFree(boundary_indices_values)); + + PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, num_local_bnd_dofs, boundary_indices, PETSC_USE_POINTER, &boundary_is)); + PetscCall(ISComplement(boundary_is, astart, aend, &bulk_is)); + + PetscCall(MatCreateSubMatrix(A, bulk_is, bulk_is, MAT_INITIAL_MATRIX, &Acondensed)); + // Can't pass null for row index set :-( + PetscCall(MatTranspose(B, MAT_INPLACE_MATRIX, &B)); + PetscCall(MatCreateSubMatrix(B, bulk_is, NULL, MAT_INITIAL_MATRIX, &Bcondensed)); + PetscCall(MatGetLocalSize(Acondensed, &am, &an)); + PetscCall(MatGetLocalSize(Bcondensed, &bm, &bn)); + + // Create QInv + PetscCall(MatCreateVecs(Q, &Qdiag, NULL)); + PetscCall(MatGetDiagonal(Q, Qdiag)); + PetscCall(VecReciprocal(Qdiag)); + PetscCall(MatDuplicate(Q, MAT_DO_NOT_COPY_VALUES, &QInv)); + PetscCall(MatDiagonalSet(QInv, Qdiag, INSERT_VALUES)); + PetscCall(MatAssemblyBegin(QInv, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(QInv, MAT_FINAL_ASSEMBLY)); + + // Create J + PetscCall(MatTranspose(Bcondensed, MAT_INITIAL_MATRIX, &BT)); + PetscCall(MatMatMatMult(Bcondensed, QInv, BT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &J)); + PetscCall(MatScale(J, gamma)); + + // Create sum of A + J + AplusJarray[0] = Acondensed; + AplusJarray[1] = J; + PetscCall(MatCreateComposite(PETSC_COMM_WORLD, 2, AplusJarray, &AplusJ)); + + // Create decomposition matrices + // We've already used Qdiag, which currently represents Q^-1, for its necessary purposes. Let's + // convert it to represent Q^(-1/2) + PetscCall(VecSqrtAbs(Qdiag)); + // We can similarly reuse Qinv + PetscCall(MatDiagonalSet(QInv, Qdiag, INSERT_VALUES)); + PetscCall(MatAssemblyBegin(QInv, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(QInv, MAT_FINAL_ASSEMBLY)); + // Create U + PetscCall(MatMatMult(Bcondensed, QInv, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &U)); + + // Create x and b + PetscCall(MatCreateVecs(AplusJ, &x, &b)); + PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx)); + PetscCall(VecSetRandom(x, rctx)); + PetscCall(PetscRandomDestroy(&rctx)); + PetscCall(MatMult(AplusJ, x, b)); + + // Compute preconditioner operators + PetscCall(MatGetLocalSize(Acondensed, &condensed_am, NULL)); + PetscCall(MatCreate(PETSC_COMM_WORLD, &D)); + PetscCall(MatSetType(D, MATAIJ)); + PetscCall(MatSetSizes(D, condensed_am, condensed_am, PETSC_DETERMINE, PETSC_DETERMINE)); + PetscCall(MatGetOwnershipRange(D, &Dstart, &Dend)); + for (PetscInt i = Dstart; i < Dend; ++i) PetscCall(MatSetValues(D, 1, &i, 1, &i, &zero, INSERT_VALUES)); + PetscCall(MatAssemblyBegin(D, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(D, MAT_FINAL_ASSEMBLY)); + PetscCall(MatCreateVecs(D, &DVec, NULL)); + PetscCall(MatGetDiagonal(AplusJ, DVec)); + PetscCall(MatDiagonalSet(D, DVec, INSERT_VALUES)); + PetscCall(MatDuplicate(Acondensed, MAT_COPY_VALUES, &AplusD)); + PetscCall(MatAXPY(AplusD, alpha, D, SUBSET_NONZERO_PATTERN)); + PetscCall(MatDuplicate(J, MAT_COPY_VALUES, &JplusD)); + PetscCall(MatAXPY(JplusD, alpha, D, SUBSET_NONZERO_PATTERN)); + + // Initialize our SMW context + ctx.U = U; + ctx.D = D; + ctx.gamma = gamma; + ctx.alpha = alpha; + ctx.setup_called = PETSC_FALSE; + + // Set preconditioner operators + PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp)); + PetscCall(KSPSetType(ksp, KSPFGMRES)); + PetscCall(KSPSetOperators(ksp, AplusJ, AplusJ)); + PetscCall(KSPSetNormType(ksp, KSP_NORM_UNPRECONDITIONED)); + PetscCall(KSPGMRESSetRestart(ksp, 300)); + PetscCall(KSPGetPC(ksp, &pc)); + PetscCall(PCSetType(pc, PCCOMPOSITE)); + PetscCall(PCCompositeSetType(pc, PC_COMPOSITE_SPECIAL)); + PetscCall(PCCompositeAddPCType(pc, PCILU)); + PetscCall(PCCompositeAddPCType(pc, PCSHELL)); + PetscCall(PCCompositeGetPC(pc, 0, &pcA)); + PetscCall(PCCompositeGetPC(pc, 1, &pcJ)); + PetscCall(PCSetOperators(pcA, AplusD, AplusD)); + PetscCall(PCSetOperators(pcJ, JplusD, JplusD)); + PetscCall(PCShellSetContext(pcJ, &ctx)); + PetscCall(PCShellSetApply(pcJ, SmwApply)); + PetscCall(PCShellSetSetUp(pcJ, SmwSetup)); + PetscCall(PCCompositeSpecialSetAlphaMat(pc, D)); + + // Solve + PetscCall(KSPSetFromOptions(ksp)); + PetscCall(KSPSolve(ksp, b, x)); + + PetscCall(MatDestroy(&A)); + PetscCall(MatDestroy(&B)); + PetscCall(MatDestroy(&Q)); + PetscCall(MatDestroy(&Acondensed)); + PetscCall(MatDestroy(&Bcondensed)); + PetscCall(MatDestroy(&BT)); + PetscCall(MatDestroy(&J)); + PetscCall(MatDestroy(&AplusJ)); + PetscCall(MatDestroy(&QInv)); + PetscCall(MatDestroy(&D)); + PetscCall(MatDestroy(&AplusD)); + PetscCall(MatDestroy(&JplusD)); + PetscCall(MatDestroy(&U)); + if (rank == 0) PetscCall(VecDestroy(&bound)); + PetscCall(VecDestroy(&x)); + PetscCall(VecDestroy(&b)); + PetscCall(VecDestroy(&Qdiag)); + PetscCall(VecDestroy(&DVec)); + PetscCall(ISDestroy(&boundary_is)); + PetscCall(ISDestroy(&bulk_is)); + PetscCall(KSPDestroy(&ksp)); + PetscCall(PetscFree(boundary_indices)); + PetscCall(MatDestroy(&ctx.UT)); + PetscCall(MatDestroy(&ctx.I_plus_gammaUTaDinvU)); + PetscCall(MatDestroy(&ctx.aD)); + PetscCall(MatDestroy(&ctx.aDinv)); + PetscCall(PCDestroy(&ctx.smw_cholesky)); + + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + build: + requires: !complex + + test: + args: -fA ${DATAFILESPATH}/matrices/ifiss/A -fB ${DATAFILESPATH}/matrices/ifiss/B -fQ ${DATAFILESPATH}/matrices/ifiss/Q -fbound ${DATAFILESPATH}/is/ifiss/bound -ksp_monitor + requires: datafilespath defined(PETSC_USE_64BIT_INDICES) !complex double + + test: + suffix: 2 + nsize: 2 + args: -fA ${DATAFILESPATH}/matrices/ifiss/A -fB ${DATAFILESPATH}/matrices/ifiss/B -fQ ${DATAFILESPATH}/matrices/ifiss/Q -fbound ${DATAFILESPATH}/is/ifiss/bound -ksp_monitor + requires: datafilespath defined(PETSC_USE_64BIT_INDICES) !complex double strumpack + +TEST*/ diff --git a/src/ksp/pc/tutorials/output/ex4_1.out b/src/ksp/pc/tutorials/output/ex4_1.out new file mode 100644 index 00000000000..1a4835e0af1 --- /dev/null +++ b/src/ksp/pc/tutorials/output/ex4_1.out @@ -0,0 +1,24 @@ + 0 KSP Residual norm 3.460968555419e+03 + 1 KSP Residual norm 2.446604028888e+03 + 2 KSP Residual norm 8.181153487770e+02 + 3 KSP Residual norm 3.182400614700e+02 + 4 KSP Residual norm 2.025852357121e+02 + 5 KSP Residual norm 7.336698611174e+01 + 6 KSP Residual norm 4.631663344538e+01 + 7 KSP Residual norm 2.446240853656e+01 + 8 KSP Residual norm 1.660468459047e+01 + 9 KSP Residual norm 9.260130139535e+00 + 10 KSP Residual norm 5.246466174811e+00 + 11 KSP Residual norm 3.152405573664e+00 + 12 KSP Residual norm 1.758337244195e+00 + 13 KSP Residual norm 1.080566340072e+00 + 14 KSP Residual norm 7.107517584100e-01 + 15 KSP Residual norm 4.953959535298e-01 + 16 KSP Residual norm 3.447125554040e-01 + 17 KSP Residual norm 2.298183223366e-01 + 18 KSP Residual norm 1.487413041999e-01 + 19 KSP Residual norm 9.538923802117e-02 + 20 KSP Residual norm 6.688958654075e-02 + 21 KSP Residual norm 4.991225993509e-02 + 22 KSP Residual norm 3.524220216759e-02 + 23 KSP Residual norm 2.424977334179e-02 diff --git a/src/ksp/pc/tutorials/output/ex4_2.out b/src/ksp/pc/tutorials/output/ex4_2.out new file mode 100644 index 00000000000..a73f9ec1d65 --- /dev/null +++ b/src/ksp/pc/tutorials/output/ex4_2.out @@ -0,0 +1,23 @@ + 0 KSP Residual norm 3.495897445960e+03 + 1 KSP Residual norm 2.587114976882e+03 + 2 KSP Residual norm 8.469299635794e+02 + 3 KSP Residual norm 3.254481526101e+02 + 4 KSP Residual norm 2.019882821372e+02 + 5 KSP Residual norm 6.672283242707e+01 + 6 KSP Residual norm 4.584759138428e+01 + 7 KSP Residual norm 2.226550983206e+01 + 8 KSP Residual norm 1.442957922622e+01 + 9 KSP Residual norm 7.767981519611e+00 + 10 KSP Residual norm 4.515895713573e+00 + 11 KSP Residual norm 2.753822690099e+00 + 12 KSP Residual norm 1.483126649428e+00 + 13 KSP Residual norm 9.325891612261e-01 + 14 KSP Residual norm 6.020469321471e-01 + 15 KSP Residual norm 4.432463914062e-01 + 16 KSP Residual norm 2.930739722845e-01 + 17 KSP Residual norm 2.063699882415e-01 + 18 KSP Residual norm 1.345374901016e-01 + 19 KSP Residual norm 9.593600721656e-02 + 20 KSP Residual norm 6.478458219031e-02 + 21 KSP Residual norm 4.554622425277e-02 + 22 KSP Residual norm 2.991173022152e-02 diff --git a/src/mat/f90-mod/petscmat.h b/src/mat/f90-mod/petscmat.h index 0d6fbb72ef7..9cb9b889d96 100644 --- a/src/mat/f90-mod/petscmat.h +++ b/src/mat/f90-mod/petscmat.h @@ -3,22 +3,55 @@ ! #include "petsc/finclude/petscmat.h" - type tMat - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tMat end type tMat - type tMatNullSpace - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + Mat, parameter :: PETSC_NULL_MAT = tMat(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MAT +#endif + + type, extends(tPetscObject) :: tMatNullSpace end type tMatNullSpace - type tMatFDColoring - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + MatNullSpace, parameter :: PETSC_NULL_MAT_NULLSPACE = tMatNullSpace(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MAT_NULLSPACE +#endif + + type, extends(tPetscObject) :: tMatFDColoring end type tMatFDColoring + MatFDColoring, parameter :: PETSC_NULL_MAT_FDCOLORING = tMatFDColoring(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MAT_FDCOLORING +#endif - Mat, parameter :: PETSC_NULL_MAT = tMat(0) - MatFDColoring, parameter :: PETSC_NULL_MATFDCOLORING = tMatFDColoring(0) - MatNullSpace, parameter :: PETSC_NULL_MATNULLSPACE = tMatNullSpace(0) + type, extends(tPetscObject) :: tMatColoring + end type tMatColoring + MatColoring, parameter :: PETSC_NULL_MAT_COLORING = tMatColoring(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MAT_COLORING +#endif + + type, extends(tPetscObject) :: tMatTransposeColoring + end type tMatTransposeColoring + MatTransposeColoring, parameter :: PETSC_NULL_MAT_TRANSPOSE_COLORING = tMatTransposeColoring(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MAT_TRANSPOSE_COLORING +#endif + + type, extends(tPetscObject) :: tMatPartitioning + end type tMatPartitioning + MatPartitioning, parameter :: PETSC_NULL_MAT_PARTITIONING = tMatPartitioning(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MAT_PARTITIONING +#endif + + type, extends(tPetscObject) :: tMatCoarsen + end type tMatCoarsen + MatCoarsen, parameter :: PETSC_NULL_MAT_COARSEN = tMatCoarsen(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MAT_COARSEN +#endif +! ! ! Flag for matrix assembly ! @@ -390,9 +423,6 @@ #endif #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MAT -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MATFDCOLORING -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MATNULLSPACE !DEC$ ATTRIBUTES DLLEXPORT::MAT_FLUSH_ASSEMBLY !DEC$ ATTRIBUTES DLLEXPORT::MAT_FINAL_ASSEMBLY !DEC$ ATTRIBUTES DLLEXPORT::MAT_FACTOR_NONE diff --git a/src/mat/f90-mod/petscmat.h90 b/src/mat/f90-mod/petscmat.h90 index f4d11b2fee4..d4c5ee1939f 100644 --- a/src/mat/f90-mod/petscmat.h90 +++ b/src/mat/f90-mod/petscmat.h90 @@ -59,174 +59,7 @@ end subroutine MatSetPreallocationCOOLocal64 End Interface - Interface - subroutine MatCreateMPIAIJWithSplitArrays(a,b,c,d,e,f,g,h,i,j,k,l,& - &z) - import tMat - MPI_Comm a ! MPI_Comm - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscInt f (*) ! PetscInt - PetscInt g (*) ! PetscInt - PetscScalar h (*) ! PetscScalar - PetscInt i (*) ! PetscInt - PetscInt j (*) ! PetscInt - PetscScalar k (*) ! PetscScalar - Mat l ! Mat - PetscErrorCode z - end subroutine - End Interface - - Interface - subroutine MatCreateFromOptions(a,b,c,d,e,f,g,h,z) - import tMat - MPI_Comm a - character(*) b - PetscInt c - PetscInt d - PetscInt e - PetscInt f - PetscInt g - Mat h - PetscErrorCode z - end subroutine - End Interface - - interface MatNullSpaceCreate - subroutine MatNullSpaceCreate1(a,b,c,d,e,z) - import tVec,tMatNullSpace - MPI_Comm a - PetscBool b - PetscInt c - Vec d (*) - MatNullSpace e - PetscErrorCode z - end subroutine - subroutine MatNullSpaceCreate0(a,b,c,d,e,z) - import tVec,tMatNullSpace - MPI_Comm a - PetscBool b - PetscInt c - Vec d - MatNullSpace e - PetscErrorCode z - end subroutine - end interface - - interface MatGetSize - subroutine MatGetSize00(a,b,c,z) - import tMat - Mat a - PetscInt b - PetscInt c - PetscErrorCode z - end subroutine - subroutine MatGetSize10(a,b,c,z) - import tMat - Mat a - PetscInt b(*) - PetscInt c - PetscErrorCode z - end subroutine - subroutine MatGetSize01(a,b,c,z) - import tMat - Mat a - PetscInt b - PetscInt c(*) - PetscErrorCode z - end subroutine - end interface - - interface MatGetLocalSize - subroutine MatGetLocalSize00(a,b,c,z) - import tMat - Mat a - PetscInt b - PetscInt c - PetscErrorCode z - end subroutine - subroutine MatGetLocalSize10(a,b,c,z) - import tMat - Mat a - PetscInt b(*) - PetscInt c - PetscErrorCode z - end subroutine - subroutine MatGetLocalSize01(a,b,c,z) - import tMat - Mat a - PetscInt b - PetscInt c(*) - PetscErrorCode z - end subroutine - end interface - - Interface - subroutine MatCreateAIJ(a,b,c,d,e,f,g,h,i,j,z) - import tMat - MPI_Comm a - PetscInt b - PetscInt c - PetscInt d - PetscInt e - PetscInt f - PetscInt g (*) - PetscInt h - PetscInt i (*) - Mat j - PetscErrorCode z - end subroutine - - subroutine MatCreateSeqAIJ(a,b,c,d,e,f,z) - import tMat - MPI_Comm a - PetscInt b - PetscInt c - PetscInt d - PetscInt e(*) - Mat f - PetscErrorCode z - end subroutine - - subroutine MatSeqAIJSetPreallocation(a,b,c,z) - import tMat - Mat a - PetscInt b - PetscInt c(*) - PetscErrorCode z - end subroutine - - subroutine MatSeqBAIJSetPreallocation(a,b,c,d,z) - import tMat - Mat a - PetscInt b,c - PetscInt d(*) - PetscErrorCode z - end subroutine - - subroutine MatMPIAIJSetPreallocation(a,b,c,d,e,z) - import tMat - Mat a - PetscInt b - PetscInt c(*) - PetscInt d - PetscInt e(*) - PetscErrorCode z - end subroutine - - subroutine MatXAIJSetPreallocationb(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d (*) ! PetscInt - PetscInt e (*) ! PetscInt - PetscInt f (*) ! PetscInt - PetscErrorCode z - end subroutine - + interface subroutine MatSetValue(myMat,i,j,va,mode,ierr) import tMat Mat :: myMat @@ -244,433 +77,8 @@ InsertMode :: mode PetscErrorCode, intent(out) :: ierr end subroutine - end interface - Interface MatGetValues - ! picky Fortran requires separate prototypes if args - ! are arrays or scalars - subroutine MatGetValues0(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValuesnn1(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValuesnnnn(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f(1,1) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValues11(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValues1n(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValuesn1(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f (*) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValues11a(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f (*) ! PetscScalar - PetscErrorCode z - end subroutine - End Interface MatGetValues - - - Interface MatGetValuesLocal - ! picky Fortran requires separate prototypes if args - ! are arrays or scalars - subroutine MatGetValuesLocal0(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValuesLocalnn1(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValuesLocalnnnn(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f(1,1) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValuesLocal11(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValuesLocal11a(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f(*) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValuesLocal1n(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine MatGetValuesLocaln1(a,b,c,d,e,f,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f (*) ! PetscScalar - PetscErrorCode z - end subroutine - End Interface MatGetValuesLocal - - Interface MatSetValues - ! picky Fortran requires separate prototypes if args - ! are arrays or scalars - subroutine MatSetValues0(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesnn1(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesnnnn(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f(1,1) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValues11(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValues1n(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesn1(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - End Interface MatSetValues - - Interface MatSetValuesLocal - ! picky Fortran requires separate prototypes if args - ! are arrays or scalars - subroutine MatSetValuesLocal0(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesLocal11(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f(*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesLocal11nn(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f(1,1) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesLocal111(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesLocal1n(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesLocaln1(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - End Interface MatSetValuesLocal - - Interface MatSetValuesBlockedLocal - ! picky Fortran requires separate prototypes if args - ! are arrays or scalars - subroutine MatSetValuesBlockedLocal0(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesBlockedLocal11(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f(*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesBlockedLocal111(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f(1,1) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesBlockedLocal1n(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesBlockedLocaln1(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - End Interface MatSetValuesBlockedLocal - - Interface MatSetValuesBlocked - ! picky Fortran requires separate prototypes if args - ! are arrays or scalars - subroutine MatSetValuesBlocked2(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar, pointer :: f (:,:) - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesBlocked0(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesBlocked11(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f(*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesBlocked111(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f(1,1) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesBlocked1n(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscInt d ! PetscInt - PetscInt e (*) ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - subroutine MatSetValuesBlockedn1(a,b,c,d,e,f,g,z) - import tMat - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscInt d ! PetscInt - PetscInt e ! PetscInt - PetscScalar f (*) ! PetscScalar - InsertMode g ! InsertMode - PetscErrorCode z - end subroutine - End Interface MatSetValuesBlocked - Interface Subroutine MatGetRow(v,row,ncols,cols,vals,ierr) import tMat @@ -833,270 +241,6 @@ End Subroutine End Interface - Interface - Subroutine MatGetInfo(v, flag, info ,ierr) - import tMat - Mat v - MatInfoType flag -#include "../src/mat/f90-mod/petscmatinfosize.h" - MatInfo info(MAT_INFO_SIZE) - PetscErrorCode ierr - End Subroutine - End Interface - - Interface - subroutine MatLUFactor(v, row, col, info ,ierr) - import tMat,tIS - Mat v - IS row - IS col -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatILUFactor(v, row, col, info ,ierr) - import tMat,tIS - Mat v - IS row - IS col -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatLUFactorSymbolic(fact, v, row, col, info ,ierr) - import tMat,tIS - Mat fact - Mat v - IS row - IS col -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatLUFactorNumeric(fact, v, info ,ierr) - import tMat - Mat fact - Mat v -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatCholeskyFactor(v, perm, info ,ierr) - import tMat,tIS - Mat v - IS perm -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatCholeskyFactorSymbolic(fact,v,perm,info,ierr) - import tMat,tIS - Mat fact - Mat v - IS perm -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatCholeskyFactorNumeric(fact, v, info ,ierr) - import tMat - Mat fact - Mat v -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatILUFactorSymbolic(fact,v,row,col,info,ierr) - import tMat,tIS - Mat fact - Mat v - IS row - IS col -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatICCFactorSymbolic(fact, v, perm, info ,ierr) - import tMat,tIS - Mat fact - Mat v - IS perm -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatICCFactor(v, row, info ,ierr) - import tMat,tIS - Mat v - IS row -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatGetGetNullSpace(A,n,ierr) - import tMat,tMatNullSpace - Mat A - MatNullSpace n - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatFactorInfoInitialize(info ,ierr) -#include "../src/mat/f90-mod/petscmatfactorinfosize.h" - MatFactorInfo info(MAT_FACTORINFO_SIZE) - PetscErrorCode ierr - end subroutine - End Interface - - Interface - subroutine MatSetType(a,b,z) - import tMat - Mat a - character(*) b - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine MatView(a,b,z) - import tMat,tPetscViewer - Mat a - PetscViewer b - PetscErrorCode z - end subroutine - end Interface - - Interface MatZeroRowsLocal - subroutine MatZeroRowsLocal0(a,b,c,d,e,f,z) - import tMat,tVec - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscScalar d ! PetscScalar - Vec e ! Vec - Vec f ! Vec - PetscErrorCode z - end subroutine - subroutine MatZeroRowsLocal1(a,b,c,d,e,f,z) - import tMat,tVec - Mat a ! Mat - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscScalar d ! PetscScalar - Vec e ! Vec - Vec f ! Vec - PetscErrorCode z - end subroutine - End Interface - - interface MatGetOwnershipRange - subroutine MatGetOwnershipRange00(a,b,c,ierr) - import tMat - Mat,intent(in) :: a ! Mat - PetscInt :: b,c ! PetscInt - PetscErrorCode,intent(out) :: ierr - end subroutine - subroutine MatGetOwnershipRange10(a,b,c,ierr) - import tMat - Mat,intent(in) :: a ! Mat - PetscInt,dimension(:) :: b ! PetscInt - PetscInt :: c ! PetscInt - PetscErrorCode,intent(out) :: ierr - end subroutine - subroutine MatGetOwnershipRange01(a,b,c,ierr) - import tMat - Mat,intent(in) :: a ! Mat - PetscInt :: b ! PetscInt - PetscInt,dimension(:) :: c ! PetscInt - PetscErrorCode,intent(out) :: ierr - end subroutine - subroutine MatGetOwnershipRange11(a,b,c,ierr) - import tMat - Mat,intent(in) :: a ! Mat - PetscInt,dimension(:) :: b,c ! PetscInt - PetscErrorCode,intent(out) :: ierr - end subroutine - end interface - - interface - subroutine MatGetFactor(m,t,ft,f,ierr) - import tMat - Mat, intent(in) :: m - character(*), intent(in) :: t - MatFactorType,intent(in) :: ft - Mat, intent(out) :: f - PetscErrorCode,intent(out) :: ierr - end subroutine - - subroutine MatPartitioningCreate(a,b,ierr) - MPI_Comm, intent(in) :: a ! MPI_comm - MatPartitioning, intent(out) :: b - PetscErrorCode,intent(out) :: ierr - end subroutine - - subroutine MatPartitioningDestroy(a,ierr) - MatPartitioning, intent(in) :: a - PetscErrorCode,intent(out) :: ierr - end subroutine - - subroutine MatPartitioningSetAdjacency(a,b,ierr) - import tMat - MatPartitioning, intent(in) :: a - Mat, intent(in) :: b - PetscErrorCode,intent(out) :: ierr - end subroutine - - subroutine MatPartitioningSetFromOptions(a,ierr) - MatPartitioning, intent(in) :: a - PetscErrorCode,intent(out) :: ierr - end subroutine - - subroutine MatPartitioningApply(a,b,ierr) - import tIS - MatPartitioning, intent(in) :: a - IS, intent(out) :: b - PetscErrorCode :: ierr - end subroutine - - subroutine MatDestroy(m,ierr) - import tMat - Mat m - PetscErrorCode :: ierr - end subroutine MatDestroy - - end interface - ! revert MAT_INFO_SIZE and MAT_FACTORINFO_SIZE defines #undef MAT_INFO_SIZE #undef MAT_FACTORINFO_SIZE diff --git a/src/mat/ftn-kernels/sgemv.F90 b/src/mat/ftn-kernels/sgemv.F90 index 3d46eefafc2..41236b73d7f 100644 --- a/src/mat/ftn-kernels/sgemv.F90 +++ b/src/mat/ftn-kernels/sgemv.F90 @@ -22,7 +22,6 @@ subroutine MSGemv(bs,ncols,A,x,y) 20 continue 10 continue - return end subroutine MSGemvp(bs,ncols,A,x,y) @@ -39,7 +38,6 @@ subroutine MSGemvp(bs,ncols,A,x,y) 20 continue 10 continue - return end subroutine MSGemvm(bs,ncols,A,x,y) @@ -56,7 +54,6 @@ subroutine MSGemvm(bs,ncols,A,x,y) 20 continue 10 continue - return end subroutine MSGemvt(bs,ncols,A,x,y) @@ -75,7 +72,6 @@ subroutine MSGemvt(bs,ncols,A,x,y) y(i) = sum 10 continue - return end subroutine MSGemm(bs,A,B,C) @@ -95,7 +91,6 @@ subroutine MSGemm(bs,A,B,C) 20 continue 10 continue - return end subroutine MSGemmi(bs,A,C,B) @@ -116,5 +111,4 @@ subroutine MSGemmi(bs,A,C,B) 20 continue 10 continue - return end diff --git a/src/mat/graphops/coarsen/ftn-custom/makefile b/src/mat/graphops/coarsen/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/mat/graphops/coarsen/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/graphops/coarsen/ftn-custom/zcoarsenf.c b/src/mat/graphops/coarsen/ftn-custom/zcoarsenf.c deleted file mode 100644 index ed3d278b1e1..00000000000 --- a/src/mat/graphops/coarsen/ftn-custom/zcoarsenf.c +++ /dev/null @@ -1,19 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcoarsenviewfromoptions_ MATCOARSENVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcoarsenviewfromoptions_ matcoarsenviewfromoptions -#endif - -PETSC_EXTERN void matcoarsenviewfromoptions_(MatCoarsen *a, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = MatCoarsenViewFromOptions(*a, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/mat/graphops/coarsen/impls/hem/hem.c b/src/mat/graphops/coarsen/impls/hem/hem.c index e9f5c3a4f5d..03b21d64cc6 100644 --- a/src/mat/graphops/coarsen/impls/hem/hem.c +++ b/src/mat/graphops/coarsen/impls/hem/hem.c @@ -36,7 +36,8 @@ PetscErrorCode PetscCDDestroy(PetscCoarsenData *ail) n = n->next; while (n) { PetscCDArrNd *lstn = n; - n = n->next; + + n = n->next; PetscCall(PetscFree(lstn)); } if (ail->pool_list.array) PetscCall(PetscFree(ail->pool_list.array)); @@ -64,10 +65,11 @@ static PetscErrorCode PetscCDGetNewNode(PetscCoarsenData *ail, PetscCDIntNd **a_ *a_out = NULL; /* squelch -Wmaybe-uninitialized */ if (ail->extra_nodes) { PetscCDIntNd *node = ail->extra_nodes; - ail->extra_nodes = node->next; - node->gid = a_id; - node->next = NULL; - *a_out = node; + + ail->extra_nodes = node->next; + node->gid = a_id; + node->next = NULL; + *a_out = node; } else { if (!ail->pool_list.array) { if (!ail->chk_sz) ail->chk_sz = 10; /* use a chuck size of ail->size? */ @@ -77,6 +79,7 @@ static PetscErrorCode PetscCDGetNewNode(PetscCoarsenData *ail, PetscCDIntNd **a_ ail->new_node->next = NULL; } else if (!ail->new_left) { PetscCDArrNd *node; + PetscCall(PetscMalloc(ail->chk_sz * sizeof(PetscCDIntNd) + sizeof(PetscCDArrNd), &node)); node->array = (PetscCDIntNd *)(node + 1); node->next = ail->pool_list.next; @@ -195,7 +198,7 @@ PetscErrorCode PetscCDRemoveNextNode(PetscCoarsenData *ail, PetscInt a_idx, Pets /* PetscCDPrint */ -PetscErrorCode PetscCDPrint(const PetscCoarsenData *ail, PetscInt my0, MPI_Comm comm) +PetscErrorCode PetscCDPrint(const PetscCoarsenData *ail, PetscInt Istart, MPI_Comm comm) { PetscCDIntNd *n, *n2; PetscInt ii; @@ -203,7 +206,7 @@ PetscErrorCode PetscCDPrint(const PetscCoarsenData *ail, PetscInt my0, MPI_Comm PetscFunctionBegin; for (ii = 0; ii < ail->size; ii++) { n2 = n = ail->array[ii]; - if (n) PetscCall(PetscSynchronizedPrintf(comm, "list %" PetscInt_FMT ":", ii + my0)); + if (n) PetscCall(PetscSynchronizedPrintf(comm, "list %" PetscInt_FMT ":", ii + Istart)); while (n) { PetscCall(PetscSynchronizedPrintf(comm, " %" PetscInt_FMT, n->gid)); n = n->next; @@ -282,8 +285,9 @@ PetscErrorCode PetscCDCount(const PetscCoarsenData *ail, PetscInt *a_sz) PetscInt sz = 0; PetscFunctionBegin; - for (int ii = 0; ii < ail->size; ii++) { + for (PetscInt ii = 0; ii < ail->size; ii++) { PetscCDIntNd *n1 = ail->array[ii]; + while (n1) { n1 = n1->next; sz++; @@ -415,9 +419,9 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i #define REQ_BF_SIZE 100 PetscBool isMPI; MPI_Comm comm; - PetscInt ix, *ii, *aj, Iend, my0, ncomm_procs, bc_agg = -1, *rbuff = NULL, rbuff_sz = 0; - PetscMPIInt rank, size, comm_procs[REQ_BF_SIZE], *lid_max_pe; - const PetscInt nloc = a_Gmat->rmap->n, request_size = PetscCeilReal((PetscReal)sizeof(MPI_Request) / (PetscReal)sizeof(PetscInt)); + PetscInt ix, *ii, *aj, Istart, bc_agg = -1, *rbuff = NULL, rbuff_sz = 0; + PetscMPIInt rank, size, comm_procs[REQ_BF_SIZE], ncomm_procs, *lid_max_pe; + const PetscInt nloc = a_Gmat->rmap->n, request_size = PetscCeilInt((int)sizeof(MPI_Request), (int)sizeof(PetscInt)); PetscInt *lid_cprowID; PetscBool *lid_matched; Mat_SeqAIJ *matA, *matB = NULL; @@ -432,19 +436,16 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(PetscObjectGetComm((PetscObject)a_Gmat, &comm)); PetscCallMPI(MPI_Comm_rank(comm, &rank)); PetscCallMPI(MPI_Comm_size(comm, &size)); - PetscCall(MatGetOwnershipRange(a_Gmat, &my0, &Iend)); + PetscCall(MatGetOwnershipRange(a_Gmat, &Istart, NULL)); PetscCall(ISCreate(comm, &info_is)); PetscCall(PetscInfo(info_is, "Start %" PetscInt_FMT " iterations of HEM.\n", n_iter)); - PetscCall(PetscMalloc1(nloc, &lid_matched)); - PetscCall(PetscMalloc1(nloc, &lid_cprowID)); - PetscCall(PetscMalloc1(nloc, &lid_max_pe)); - + PetscCall(PetscMalloc3(nloc, &lid_matched, nloc, &lid_cprowID, nloc, &lid_max_pe)); PetscCall(PetscCDCreate(nloc, &agg_llists)); PetscCall(PetscCDSetChunkSize(agg_llists, nloc + 1)); *a_locals_llist = agg_llists; /* add self to all lists */ - for (int kk = 0; kk < nloc; kk++) PetscCall(PetscCDAppendID(agg_llists, kk, my0 + kk)); + for (PetscInt kk = 0; kk < nloc; kk++) PetscCall(PetscCDAppendID(agg_llists, kk, Istart + kk)); /* make a copy of the graph, this gets destroyed in iterates */ PetscCall(MatDuplicate(a_Gmat, MAT_COPY_VALUES, &cMat)); PetscCall(MatConvert(cMat, MATAIJ, MAT_INPLACE_MATRIX, &cMat)); @@ -454,16 +455,17 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(PetscCDCreate(size, &ghost_deleted_list)); PetscCall(PetscCDSetChunkSize(ghost_deleted_list, 100)); } - for (int iter = 0; iter < n_iter; iter++) { + for (PetscInt iter = 0; iter < n_iter; iter++) { const PetscScalar *lghost_max_ew, *lid_max_ew; PetscBool *lghost_matched; PetscMPIInt *lghost_pe, *lghost_max_pe; Vec locMaxEdge, ghostMaxEdge, ghostMaxPE, locMaxPE; PetscInt *lghost_gid, nEdges, nEdges0, num_ghosts = 0; Edge *Edges; - const int n_sub_its = 1000; // in case of a bug, stop at some point + const PetscInt n_sub_its = 1000; // in case of a bug, stop at some point + /* get submatrices of cMat */ - for (int kk = 0; kk < nloc; kk++) lid_cprowID[kk] = -1; + for (PetscInt kk = 0; kk < nloc; kk++) lid_cprowID[kk] = -1; if (isMPI) { mpimat = (Mat_MPIAIJ *)cMat->data; matA = (Mat_SeqAIJ *)mpimat->A->data; @@ -481,7 +483,7 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i matA = (Mat_SeqAIJ *)cMat->data; } /* set matched flags: true for empty list */ - for (int kk = 0; kk < nloc; kk++) { + for (PetscInt kk = 0; kk < nloc; kk++) { PetscCall(PetscCDCountAt(agg_llists, kk, &ix)); if (ix > 0) lid_matched[kk] = PETSC_FALSE; else lid_matched[kk] = PETSC_TRUE; // call deleted gids as matched @@ -494,11 +496,13 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i Vec vec; PetscScalar vval; const PetscScalar *buf; + PetscCall(MatCreateVecs(cMat, &vec, NULL)); PetscCall(VecGetLocalSize(mpimat->lvec, &num_ghosts)); /* lghost_matched */ - for (PetscInt kk = 0, gid = my0; kk < nloc; kk++, gid++) { + for (PetscInt kk = 0, gid = Istart; kk < nloc; kk++, gid++) { PetscScalar vval = lid_matched[kk] ? 1.0 : 0.0; + PetscCall(VecSetValues(vec, 1, &gid, &vval, INSERT_VALUES)); } PetscCall(VecAssemblyBegin(vec)); @@ -506,25 +510,26 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(VecScatterBegin(mpimat->Mvctx, vec, mpimat->lvec, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(mpimat->Mvctx, vec, mpimat->lvec, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecGetArrayRead(mpimat->lvec, &buf)); /* get proc ID in 'buf' */ - PetscCall(PetscMalloc1(num_ghosts, &lghost_matched)); - for (int kk = 0; kk < num_ghosts; kk++) { + PetscCall(PetscMalloc4(num_ghosts, &lghost_matched, num_ghosts, &lghost_pe, num_ghosts, &lghost_gid, num_ghosts, &lghost_max_pe)); + + for (PetscInt kk = 0; kk < num_ghosts; kk++) { lghost_matched[kk] = (PetscBool)(PetscRealPart(buf[kk]) != 0); // the proc of the ghost for now } PetscCall(VecRestoreArrayRead(mpimat->lvec, &buf)); /* lghost_pe */ vval = (PetscScalar)(rank); - for (PetscInt kk = 0, gid = my0; kk < nloc; kk++, gid++) PetscCall(VecSetValues(vec, 1, &gid, &vval, INSERT_VALUES)); /* set with GID */ + for (PetscInt kk = 0, gid = Istart; kk < nloc; kk++, gid++) PetscCall(VecSetValues(vec, 1, &gid, &vval, INSERT_VALUES)); /* set with GID */ PetscCall(VecAssemblyBegin(vec)); PetscCall(VecAssemblyEnd(vec)); PetscCall(VecScatterBegin(mpimat->Mvctx, vec, mpimat->lvec, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(mpimat->Mvctx, vec, mpimat->lvec, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall(VecGetArrayRead(mpimat->lvec, &buf)); /* get proc ID in 'buf' */ - PetscCall(PetscMalloc1(num_ghosts, &lghost_pe)); - for (int kk = 0; kk < num_ghosts; kk++) lghost_pe[kk] = (PetscMPIInt)PetscRealPart(buf[kk]); // the proc of the ghost for now + PetscCall(VecGetArrayRead(mpimat->lvec, &buf)); /* get proc ID in 'buf' */ + for (PetscInt kk = 0; kk < num_ghosts; kk++) lghost_pe[kk] = (PetscMPIInt)PetscRealPart(buf[kk]); // the proc of the ghost for now PetscCall(VecRestoreArrayRead(mpimat->lvec, &buf)); /* lghost_gid */ - for (PetscInt kk = 0, gid = my0; kk < nloc; kk++, gid++) { + for (PetscInt kk = 0, gid = Istart; kk < nloc; kk++, gid++) { vval = (PetscScalar)(gid); + PetscCall(VecSetValues(vec, 1, &gid, &vval, INSERT_VALUES)); /* set with GID */ } PetscCall(VecAssemblyBegin(vec)); @@ -533,31 +538,33 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(VecScatterEnd(mpimat->Mvctx, vec, mpimat->lvec, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecDestroy(&vec)); PetscCall(VecGetArrayRead(mpimat->lvec, &buf)); /* get proc ID in 'lghost_gid' */ - PetscCall(PetscMalloc1(num_ghosts, &lghost_gid)); - for (int kk = 0; kk < num_ghosts; kk++) lghost_gid[kk] = (PetscInt)PetscRealPart(buf[kk]); + for (PetscInt kk = 0; kk < num_ghosts; kk++) lghost_gid[kk] = (PetscInt)PetscRealPart(buf[kk]); PetscCall(VecRestoreArrayRead(mpimat->lvec, &buf)); } // get 'comm_procs' (could hoist) - for (int kk = 0; kk < REQ_BF_SIZE; kk++) comm_procs[kk] = -1; + for (PetscInt kk = 0; kk < REQ_BF_SIZE; kk++) comm_procs[kk] = -1; for (ix = 0, ncomm_procs = 0; ix < num_ghosts; ix++) { PetscMPIInt proc = lghost_pe[ix], idx = -1; - for (int k = 0; k < ncomm_procs && idx == -1; k++) + + for (PetscInt k = 0; k < ncomm_procs && idx == -1; k++) if (comm_procs[k] == proc) idx = k; if (idx == -1) { comm_procs[ncomm_procs++] = proc; } - PetscCheck(ncomm_procs != REQ_BF_SIZE, PETSC_COMM_SELF, PETSC_ERR_SUP, "Receive request array too small: %d", (int)ncomm_procs); + PetscCheck(ncomm_procs != REQ_BF_SIZE, PETSC_COMM_SELF, PETSC_ERR_SUP, "Receive request array too small: %d", ncomm_procs); } /* count edges, compute initial 'locMaxEdge', 'locMaxPE' */ nEdges0 = 0; - for (PetscInt kk = 0, gid = my0; kk < nloc; kk++, gid++) { + for (PetscInt kk = 0, gid = Istart; kk < nloc; kk++, gid++) { PetscReal max_e = 0., tt; PetscScalar vval; PetscInt lid = kk, max_pe = rank, pe, n; + ii = matA->i; n = ii[lid + 1] - ii[lid]; aj = PetscSafePointerPlusOffset(matA->j, ii[lid]); ap = PetscSafePointerPlusOffset(matA->a, ii[lid]); - for (int jj = 0; jj < n; jj++) { + for (PetscInt jj = 0; jj < n; jj++) { PetscInt lidj = aj[jj]; + if ((tt = PetscRealPart(ap[jj])) > threshold && lidj != lid) { if (tt > max_e) max_e = tt; if (lidj > lid) nEdges0++; @@ -568,7 +575,7 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i n = ii[ix + 1] - ii[ix]; ap = matB->a + ii[ix]; aj = matB->j + ii[ix]; - for (int jj = 0; jj < n; jj++) { + for (PetscInt jj = 0; jj < n; jj++) { if ((tt = PetscRealPart(ap[jj])) > threshold) { if (tt > max_e) max_e = tt; nEdges0++; @@ -587,7 +594,7 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(PetscCDCreate(1, &bc_list)); } PetscCall(PetscCDRemoveAllAt(agg_llists, lid)); - PetscCall(PetscCDAppendID(bc_list, 0, my0 + lid)); + PetscCall(PetscCDAppendID(bc_list, 0, Istart + lid)); } } PetscCall(VecAssemblyBegin(locMaxEdge)); @@ -595,8 +602,9 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(VecAssemblyBegin(locMaxPE)); PetscCall(VecAssemblyEnd(locMaxPE)); /* make 'ghostMaxEdge_max_ew', 'lghost_max_pe' */ - if (mpimat) { + if (isMPI) { const PetscScalar *buf; + PetscCall(VecDuplicate(mpimat->lvec, &ghostMaxEdge)); PetscCall(VecScatterBegin(mpimat->Mvctx, locMaxEdge, ghostMaxEdge, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(mpimat->Mvctx, locMaxEdge, ghostMaxEdge, INSERT_VALUES, SCATTER_FORWARD)); @@ -605,32 +613,34 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(VecScatterBegin(mpimat->Mvctx, locMaxPE, ghostMaxPE, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(mpimat->Mvctx, locMaxPE, ghostMaxPE, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecGetArrayRead(ghostMaxPE, &buf)); - PetscCall(PetscMalloc1(num_ghosts, &lghost_max_pe)); - for (int kk = 0; kk < num_ghosts; kk++) lghost_max_pe[kk] = (PetscMPIInt)PetscRealPart(buf[kk]); // the MAX proc of the ghost now + for (PetscInt kk = 0; kk < num_ghosts; kk++) lghost_max_pe[kk] = (PetscMPIInt)PetscRealPart(buf[kk]); // the MAX proc of the ghost now PetscCall(VecRestoreArrayRead(ghostMaxPE, &buf)); } { // make lid_max_pe const PetscScalar *buf; + PetscCall(VecGetArrayRead(locMaxPE, &buf)); - for (int kk = 0; kk < nloc; kk++) lid_max_pe[kk] = (PetscMPIInt)PetscRealPart(buf[kk]); // the MAX proc of the ghost now + for (PetscInt kk = 0; kk < nloc; kk++) lid_max_pe[kk] = (PetscMPIInt)PetscRealPart(buf[kk]); // the MAX proc of the ghost now PetscCall(VecRestoreArrayRead(locMaxPE, &buf)); } /* setup sorted list of edges, and make 'Edges' */ PetscCall(PetscMalloc1(nEdges0, &Edges)); nEdges = 0; - for (int kk = 0, n; kk < nloc; kk++) { + for (PetscInt kk = 0, n; kk < nloc; kk++) { const PetscInt lid = kk; PetscReal tt; + ii = matA->i; n = ii[lid + 1] - ii[lid]; aj = PetscSafePointerPlusOffset(matA->j, ii[lid]); ap = PetscSafePointerPlusOffset(matA->a, ii[lid]); - for (int jj = 0; jj < n; jj++) { + for (PetscInt jj = 0; jj < n; jj++) { PetscInt lidj = aj[jj]; + if ((tt = PetscRealPart(ap[jj])) > threshold && lidj != lid) { if (lidj > lid) { Edges[nEdges].lid0 = lid; - Edges[nEdges].gid1 = lidj + my0; + Edges[nEdges].gid1 = lidj + Istart; Edges[nEdges].ghost1_idx = -1; Edges[nEdges].weight = tt; nEdges++; @@ -642,7 +652,7 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i n = ii[ix + 1] - ii[ix]; ap = matB->a + ii[ix]; aj = matB->j + ii[ix]; - for (int jj = 0; jj < n; jj++) { + for (PetscInt jj = 0; jj < n; jj++) { if ((tt = PetscRealPart(ap[jj])) > threshold) { Edges[nEdges].lid0 = lid; Edges[nEdges].gid1 = lghost_gid[aj[jj]]; @@ -653,10 +663,10 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i } } } - PetscCheck(nEdges == nEdges0, PETSC_COMM_SELF, PETSC_ERR_SUP, "nEdges != nEdges0: %d %d", (int)nEdges0, (int)nEdges); + PetscCheck(nEdges == nEdges0, PETSC_COMM_SELF, PETSC_ERR_SUP, "nEdges != nEdges0: %" PetscInt_FMT " %" PetscInt_FMT, nEdges0, nEdges); if (Edges) qsort(Edges, nEdges, sizeof(Edge), gamg_hem_compare); - PetscCall(PetscInfo(info_is, "[%d] HEM iteration %d with %d edges\n", rank, iter, (int)nEdges)); + PetscCall(PetscInfo(info_is, "[%d] HEM iteration %" PetscInt_FMT " with %" PetscInt_FMT " edges\n", rank, iter, nEdges)); /* projection matrix */ PetscCall(MatCreate(comm, &P)); @@ -666,31 +676,33 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(MatSeqAIJSetPreallocation(P, 1, NULL)); PetscCall(MatSetUp(P)); /* process - communicate - process */ - for (int sub_it = 0, old_num_edge = 0; /* sub_it < n_sub_its */; /* sub_it++ */) { + for (PetscInt sub_it = 0, old_num_edge = 0; /* sub_it < n_sub_its */; /* sub_it++ */) { PetscInt nactive_edges = 0, n_act_n[3], gn_act_n[3]; PetscMPIInt tag1, tag2; + PetscCall(VecGetArrayRead(locMaxEdge, &lid_max_ew)); if (isMPI) { PetscCall(VecGetArrayRead(ghostMaxEdge, &lghost_max_ew)); PetscCall(PetscCommGetNewTag(comm, &tag1)); PetscCall(PetscCommGetNewTag(comm, &tag2)); } - for (int kk = 0; kk < nEdges; kk++) { - /* HEM */ + for (PetscInt kk = 0; kk < nEdges; kk++) { const Edge *e = &Edges[kk]; - const PetscInt lid0 = e->lid0, gid1 = e->gid1, ghost1_idx = e->ghost1_idx, gid0 = lid0 + my0, lid1 = gid1 - my0; + const PetscInt lid0 = e->lid0, gid1 = e->gid1, ghost1_idx = e->ghost1_idx, gid0 = lid0 + Istart, lid1 = gid1 - Istart; PetscBool isOK = PETSC_TRUE, print = PETSC_FALSE; - if (print) PetscCall(PetscSynchronizedPrintf(comm, "\t[%d] edge (%d %d), %d %d %d\n", rank, (int)gid0, (int)gid1, lid_matched[lid0], (ghost1_idx != -1 && lghost_matched[ghost1_idx]), (ghost1_idx == -1 && lid_matched[lid1]))); + + if (print) + PetscCall(PetscSynchronizedPrintf(comm, "\t[%d] edge (%" PetscInt_FMT " %" PetscInt_FMT "), %s %s %s\n", rank, gid0, gid1, lid_matched[lid0] ? "true" : "false", (ghost1_idx != -1 && lghost_matched[ghost1_idx]) ? "true" : "false", (ghost1_idx == -1 && lid_matched[lid1]) ? "true" : "false")); /* skip if either vertex is matched already */ if (lid_matched[lid0] || (ghost1_idx != -1 && lghost_matched[ghost1_idx]) || (ghost1_idx == -1 && lid_matched[lid1])) continue; nactive_edges++; PetscCheck(PetscRealPart(lid_max_ew[lid0]) >= e->weight - MY_MEPS, PETSC_COMM_SELF, PETSC_ERR_SUP, "edge weight %e > max %e", (double)e->weight, (double)PetscRealPart(lid_max_ew[lid0])); - if (print) PetscCall(PetscSynchronizedPrintf(comm, "\t[%d] active edge (%d %d), diff0 = %10.4e\n", rank, (int)gid0, (int)gid1, (double)(PetscRealPart(lid_max_ew[lid0]) - (double)e->weight))); + if (print) PetscCall(PetscSynchronizedPrintf(comm, "\t[%d] active edge (%" PetscInt_FMT " %" PetscInt_FMT "), diff0 = %10.4e\n", rank, gid0, gid1, (double)(PetscRealPart(lid_max_ew[lid0]) - (double)e->weight))); // smaller edge, lid_max_ew get updated - e0 if (PetscRealPart(lid_max_ew[lid0]) > e->weight + MY_MEPS) { if (print) - PetscCall(PetscSynchronizedPrintf(comm, "\t\t[%d] 1) e0 SKIPPING small edge %20.14e edge (%d %d), diff = %10.4e to proc %d. max = %20.14e, w = %20.14e\n", rank, (double)e->weight, (int)gid0, (int)gid1, (double)(PetscRealPart(lid_max_ew[lid0]) - e->weight), ghost1_idx != -1 ? (int)lghost_pe[ghost1_idx] : rank, (double)PetscRealPart(lid_max_ew[lid0]), + PetscCall(PetscSynchronizedPrintf(comm, "\t\t[%d] 1) e0 SKIPPING small edge %20.14e edge (%" PetscInt_FMT " %" PetscInt_FMT "), diff = %10.4e to proc %d. max = %20.14e, w = %20.14e\n", rank, (double)e->weight, gid0, gid1, (double)(PetscRealPart(lid_max_ew[lid0]) - e->weight), ghost1_idx != -1 ? lghost_pe[ghost1_idx] : rank, (double)PetscRealPart(lid_max_ew[lid0]), (double)e->weight)); continue; // we are basically filter edges here } @@ -698,62 +710,69 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i if (ghost1_idx == -1) { if (PetscRealPart(lid_max_ew[lid1]) > e->weight + MY_MEPS) { if (print) - PetscCall(PetscSynchronizedPrintf(comm, "\t\t%c[%d] 2) e1 SKIPPING small local edge %20.14e edge (%d %d), diff = %10.4e\n", ghost1_idx != -1 ? '\t' : ' ', rank, (double)e->weight, (int)gid0, (int)gid1, (double)(PetscRealPart(lid_max_ew[lid1]) - e->weight))); + PetscCall(PetscSynchronizedPrintf(comm, "\t\t%c[%d] 2) e1 SKIPPING small local edge %20.14e edge (%" PetscInt_FMT " %" PetscInt_FMT "), diff = %10.4e\n", ghost1_idx != -1 ? '\t' : ' ', rank, (double)e->weight, gid0, gid1, (double)(PetscRealPart(lid_max_ew[lid1]) - e->weight))); continue; // we are basically filter edges here } } else { // e1 - ghost /* see if edge might get matched on other proc */ PetscReal g_max_e1 = PetscRealPart(lghost_max_ew[ghost1_idx]); + if (print) - PetscCall(PetscSynchronizedPrintf(comm, "\t\t\t[%d] CHECK GHOST e1, edge (%d %d), E0 MAX EDGE WEIGHT = %10.4e, EDGE WEIGHT = %10.4e, diff1 = %10.4e, ghost proc %d with max pe %d on e0 and %d on e1\n", rank, (int)gid0, (int)gid1, (double)PetscRealPart(lid_max_ew[lid0]), - (double)e->weight, (double)(PetscRealPart(lghost_max_ew[ghost1_idx]) - e->weight), (int)lghost_pe[ghost1_idx], lid_max_pe[lid0], lghost_max_pe[ghost1_idx])); + PetscCall(PetscSynchronizedPrintf(comm, "\t\t\t[%d] CHECK GHOST e1, edge (%" PetscInt_FMT " %" PetscInt_FMT "), E0 MAX EDGE WEIGHT = %10.4e, EDGE WEIGHT = %10.4e, diff1 = %10.4e, ghost proc %d with max pe %d on e0 and %d on e1\n", rank, gid0, gid1, (double)PetscRealPart(lid_max_ew[lid0]), + (double)e->weight, (double)(PetscRealPart(lghost_max_ew[ghost1_idx]) - e->weight), lghost_pe[ghost1_idx], lid_max_pe[lid0], lghost_max_pe[ghost1_idx])); if (g_max_e1 > e->weight + MY_MEPS) { - /* PetscCall(PetscSynchronizedPrintf(comm,"\t\t\t\t[%d] 3) ghost e1 SKIPPING small edge (%d %d), diff = %10.4e from proc %d with max pe %d. max = %20.14e, w = %20.14e\n", rank, (int)gid0, (int)gid1, g_max_e1 - e->weight, (int)lghost_pe[ghost1_idx], lghost_max_pe[ghost1_idx], g_max_e1, e->weight )); */ + /* PetscCall(PetscSynchronizedPrintf(comm,"\t\t\t\t[%d] 3) ghost e1 SKIPPING small edge (%d %d), diff = %10.4e from proc %d with max pe %d. max = %20.14e, w = %20.14e\n", rank, gid0, gid1, g_max_e1 - e->weight, lghost_pe[ghost1_idx], lghost_max_pe[ghost1_idx], g_max_e1, e->weight )); */ continue; } else if (g_max_e1 >= e->weight - MY_MEPS && lghost_pe[ghost1_idx] > rank) { // is 'lghost_max_pe[ghost1_idx] > rank' needed? /* check for max_ea == to this edge and larger processor that will deal with this */ if (print) - PetscCall(PetscSynchronizedPrintf(comm, "\t\t\t[%d] ghost e1 SKIPPING EQUAL (%d %d), diff = %10.4e from larger proc %d with max pe %d. max = %20.14e, w = %20.14e\n", rank, (int)gid0, (int)gid1, - (double)(PetscRealPart(lid_max_ew[lid0]) - (double)e->weight), (int)lghost_pe[ghost1_idx], (int)lghost_max_pe[ghost1_idx], (double)g_max_e1, (double)e->weight)); + PetscCall(PetscSynchronizedPrintf(comm, "\t\t\t[%d] ghost e1 SKIPPING EQUAL (%" PetscInt_FMT " %" PetscInt_FMT "), diff = %10.4e from larger proc %d with max pe %d. max = %20.14e, w = %20.14e\n", rank, gid0, gid1, + (double)(PetscRealPart(lid_max_ew[lid0]) - (double)e->weight), lghost_pe[ghost1_idx], lghost_max_pe[ghost1_idx], (double)g_max_e1, (double)e->weight)); isOK = PETSC_FALSE; // this guy could delete me continue; } else { - /* PetscCall(PetscSynchronizedPrintf(comm,"\t[%d] Edge (%d %d) passes gid0 tests, diff = %10.4e from proc %d with max pe %d. max = %20.14e, w = %20.14e\n", rank, (int)gid0, (int)gid1, g_max_e1 - e->weight, (int)lghost_pe[ghost1_idx], lghost_max_pe[ghost1_idx], g_max_e1, e->weight )); */ + /* PetscCall(PetscSynchronizedPrintf(comm,"\t[%d] Edge (%d %d) passes gid0 tests, diff = %10.4e from proc %d with max pe %d. max = %20.14e, w = %20.14e\n", rank, gid0, gid1, g_max_e1 - e->weight, lghost_pe[ghost1_idx], lghost_max_pe[ghost1_idx], g_max_e1, e->weight )); */ } } /* check ghost for v0 */ if (isOK) { PetscReal max_e, ew; + if ((ix = lid_cprowID[lid0]) != -1) { /* if I have any ghost neighbors */ - int n; + PetscInt n; + ii = matB->compressedrow.i; n = ii[ix + 1] - ii[ix]; ap = matB->a + ii[ix]; aj = matB->j + ii[ix]; - for (int jj = 0; jj < n && isOK; jj++) { + for (PetscInt jj = 0; jj < n && isOK; jj++) { PetscInt lidj = aj[jj]; + if (lghost_matched[lidj]) continue; ew = PetscRealPart(ap[jj]); if (ew <= threshold) continue; max_e = PetscRealPart(lghost_max_ew[lidj]); + /* check for max_e == to this edge and larger processor that will deal with this */ if (ew >= PetscRealPart(lid_max_ew[lid0]) - MY_MEPS && lghost_max_pe[lidj] > rank) isOK = PETSC_FALSE; - PetscCheck(ew <= max_e + MY_MEPS, PETSC_COMM_SELF, PETSC_ERR_SUP, "edge weight %e > max %e. ncols = %d, gid0 = %d, gid1 = %d", (double)PetscRealPart(ew), (double)PetscRealPart(max_e), (int)n, (int)(lid0 + my0), (int)lghost_gid[lidj]); + PetscCheck(ew <= max_e + MY_MEPS, PETSC_COMM_SELF, PETSC_ERR_SUP, "edge weight %e > max %e. ncols = %" PetscInt_FMT ", gid0 = %" PetscInt_FMT ", gid1 = %" PetscInt_FMT, (double)PetscRealPart(ew), (double)PetscRealPart(max_e), n, lid0 + Istart, lghost_gid[lidj]); if (print) - PetscCall(PetscSynchronizedPrintf(comm, "\t\t\t\t[%d] e0: looked at ghost adj (%d %d), diff = %10.4e, ghost on proc %d (max %d). isOK = %d, %d %d %d; ew = %e, lid0 max ew = %e, diff = %e, eps = %e\n", rank, (int)gid0, (int)lghost_gid[lidj], (double)(max_e - ew), lghost_pe[lidj], lghost_max_pe[lidj], isOK, (double)(ew) >= (double)(max_e - MY_MEPS), ew >= PetscRealPart(lid_max_ew[lid0]) - MY_MEPS, lghost_pe[lidj] > rank, (double)ew, (double)PetscRealPart(lid_max_ew[lid0]), (double)(ew - PetscRealPart(lid_max_ew[lid0])), (double)MY_MEPS)); + PetscCall(PetscSynchronizedPrintf(comm, "\t\t\t\t[%d] e0: looked at ghost adj (%" PetscInt_FMT " %" PetscInt_FMT "), diff = %10.4e, ghost on proc %d (max %d). isOK = %d, %d %d %d; ew = %e, lid0 max ew = %e, diff = %e, eps = %e\n", rank, gid0, lghost_gid[lidj], (double)(max_e - ew), lghost_pe[lidj], lghost_max_pe[lidj], isOK, (double)(ew) >= (double)(max_e - MY_MEPS), ew >= PetscRealPart(lid_max_ew[lid0]) - MY_MEPS, lghost_pe[lidj] > rank, (double)ew, (double)PetscRealPart(lid_max_ew[lid0]), (double)(ew - PetscRealPart(lid_max_ew[lid0])), (double)MY_MEPS)); } - if (!isOK && print) PetscCall(PetscSynchronizedPrintf(comm, "\t\t[%d] skip edge (%d %d) from ghost inspection\n", rank, (int)gid0, (int)gid1)); + if (!isOK && print) PetscCall(PetscSynchronizedPrintf(comm, "\t\t[%d] skip edge (%" PetscInt_FMT " %" PetscInt_FMT ") from ghost inspection\n", rank, gid0, gid1)); } /* check local v1 */ if (ghost1_idx == -1) { if ((ix = lid_cprowID[lid1]) != -1) { /* if I have any ghost neighbors */ - int n; + PetscInt n; + ii = matB->compressedrow.i; n = ii[ix + 1] - ii[ix]; ap = matB->a + ii[ix]; aj = matB->j + ii[ix]; - for (int jj = 0; jj < n && isOK; jj++) { + for (PetscInt jj = 0; jj < n && isOK; jj++) { PetscInt lidj = aj[jj]; + if (lghost_matched[lidj]) continue; ew = PetscRealPart(ap[jj]); if (ew <= threshold) continue; @@ -762,25 +781,25 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i if (ew >= PetscRealPart(lid_max_ew[lid1]) - MY_MEPS && lghost_max_pe[lidj] > rank) isOK = PETSC_FALSE; PetscCheck(ew <= max_e + MY_MEPS, PETSC_COMM_SELF, PETSC_ERR_SUP, "edge weight %e > max %e", (double)PetscRealPart(ew), (double)PetscRealPart(max_e)); if (print) - PetscCall(PetscSynchronizedPrintf(comm, "\t\t\t\t\t[%d] e1: looked at ghost adj (%d %d), diff = %10.4e, ghost on proc %d (max %d)\n", rank, (int)gid0, (int)lghost_gid[lidj], (double)(max_e - ew), lghost_pe[lidj], lghost_max_pe[lidj])); + PetscCall(PetscSynchronizedPrintf(comm, "\t\t\t\t\t[%d] e1: looked at ghost adj (%" PetscInt_FMT " %" PetscInt_FMT "), diff = %10.4e, ghost on proc %d (max %d)\n", rank, gid0, lghost_gid[lidj], (double)(max_e - ew), lghost_pe[lidj], lghost_max_pe[lidj])); } } - if (!isOK && print) PetscCall(PetscSynchronizedPrintf(comm, "\t\t[%d] skip edge (%d %d) from ghost inspection\n", rank, (int)gid0, (int)gid1)); + if (!isOK && print) PetscCall(PetscSynchronizedPrintf(comm, "\t\t[%d] skip edge (%" PetscInt_FMT " %" PetscInt_FMT ") from ghost inspection\n", rank, gid0, gid1)); } } PetscReal e1_max_w = (ghost1_idx == -1 ? PetscRealPart(lid_max_ew[lid0]) : PetscRealPart(lghost_max_ew[ghost1_idx])); if (print) - PetscCall(PetscSynchronizedPrintf(comm, "\t[%d] MATCHING (%d %d) e1 max weight = %e, e1 wight diff %e, %s. isOK = %d\n", rank, (int)gid0, (int)gid1, (double)e1_max_w, (double)(e1_max_w - e->weight), ghost1_idx == -1 ? "local" : "ghost", isOK)); + PetscCall(PetscSynchronizedPrintf(comm, "\t[%d] MATCHING (%" PetscInt_FMT " %" PetscInt_FMT ") e1 max weight = %e, e1 weight diff %e, %s. isOK = %d\n", rank, gid0, gid1, (double)e1_max_w, (double)(e1_max_w - e->weight), ghost1_idx == -1 ? "local" : "ghost", isOK)); /* do it */ if (isOK) { if (ghost1_idx == -1) { - PetscCheck(!lid_matched[lid1], PETSC_COMM_SELF, PETSC_ERR_SUP, "local %d is matched", (int)gid1); + PetscCheck(!lid_matched[lid1], PETSC_COMM_SELF, PETSC_ERR_SUP, "local %" PetscInt_FMT " is matched", gid1); lid_matched[lid1] = PETSC_TRUE; /* keep track of what we've done this round */ PetscCall(PetscCDMoveAppend(agg_llists, lid0, lid1)); // takes lid1's list and appends to lid0's } else { /* add gid1 to list of ghost deleted by me -- I need their children */ PetscMPIInt proc = lghost_pe[ghost1_idx]; - PetscCheck(!lghost_matched[ghost1_idx], PETSC_COMM_SELF, PETSC_ERR_SUP, "ghost %d is matched", (int)lghost_gid[ghost1_idx]); + PetscCheck(!lghost_matched[ghost1_idx], PETSC_COMM_SELF, PETSC_ERR_SUP, "ghost %" PetscInt_FMT " is matched", lghost_gid[ghost1_idx]); lghost_matched[ghost1_idx] = PETSC_TRUE; PetscCall(PetscCDAppendID(ghost_deleted_list, proc, ghost1_idx)); /* cache to send messages */ PetscCall(PetscCDAppendID(ghost_deleted_list, proc, lid0)); @@ -789,7 +808,7 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i /* set projection */ PetscCall(MatSetValues(P, 1, &gid0, 1, &gid0, &one, INSERT_VALUES)); PetscCall(MatSetValues(P, 1, &gid1, 1, &gid0, &one, INSERT_VALUES)); - //PetscCall(PetscPrintf(comm,"\t %d.%d) match active EDGE %d : (%d %d)\n",iter,sub_it, (int)nactive_edges, (int)gid0, (int)gid1)); + //PetscCall(PetscPrintf(comm,"\t %" PetscInt_FMT ".%" PetscInt_FMT ") match active EDGE %" PetscInt_FMT " : (%" PetscInt_FMT " %" PetscInt_FMT ")\n",iter,sub_it, nactive_edges, gid0, gid1)); } /* matched */ } /* edge loop */ PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); @@ -801,7 +820,7 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i else n_act_n[2] = 0; PetscCall(PetscCDCount(agg_llists, &n_act_n[1])); PetscCall(MPIU_Allreduce(n_act_n, gn_act_n, 3, MPIU_INT, MPI_SUM, comm)); - PetscCall(PetscInfo(info_is, "[%d] %d.%d) nactive edges=%" PetscInt_FMT ", ncomm_procs=%d, nEdges=%d, %" PetscInt_FMT " deleted ghosts, N=%" PetscInt_FMT "\n", rank, iter, sub_it, gn_act_n[0], (int)ncomm_procs, (int)nEdges, gn_act_n[2], gn_act_n[1])); + PetscCall(PetscInfo(info_is, "[%d] %" PetscInt_FMT ".%" PetscInt_FMT ") nactive edges=%" PetscInt_FMT ", ncomm_procs=%d, nEdges=%" PetscInt_FMT ", %" PetscInt_FMT " deleted ghosts, N=%" PetscInt_FMT "\n", rank, iter, sub_it, gn_act_n[0], ncomm_procs, nEdges, gn_act_n[2], gn_act_n[1])); /* deal with deleted ghost */ if (isMPI) { PetscCDIntNd *pos; @@ -810,10 +829,11 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i MPI_Status status; /* send deleted ghosts */ - for (int proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { + for (PetscInt proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { const PetscMPIInt proc = comm_procs[proc_idx]; PetscInt *sbuff, *pt, scount; MPI_Request *request; + /* count ghosts */ PetscCall(PetscCDCountAt(ghost_deleted_list, proc, &ndel)); ndel /= 2; // two entries for each proc @@ -829,27 +849,29 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(PetscCDGetHeadPos(ghost_deleted_list, proc, &pos)); while (pos) { PetscInt lid0, ghost_idx, gid1; + PetscCall(PetscCDIntNdGetID(pos, &ghost_idx)); gid1 = lghost_gid[ghost_idx]; PetscCall(PetscCDGetNextPos(ghost_deleted_list, proc, &pos)); PetscCall(PetscCDIntNdGetID(pos, &lid0)); PetscCall(PetscCDGetNextPos(ghost_deleted_list, proc, &pos)); *pt++ = gid1; - *pt++ = lid0 + my0; // gid0 + *pt++ = lid0 + Istart; // gid0 } - PetscCheck(pt - sbuff == scount, PETSC_COMM_SELF, PETSC_ERR_SUP, "sbuff-pt != scount: %d", (int)(pt - sbuff)); + PetscCheck(pt - sbuff == (ptrdiff_t)scount, PETSC_COMM_SELF, PETSC_ERR_SUP, "sbuff-pt != scount: %zu", (pt - sbuff)); /* MPI_Isend: tag1 [ndel, proc, n*[gid1,gid0] ] */ PetscCallMPI(MPI_Isend(sbuff, scount, MPIU_INT, proc, tag1, comm, request)); PetscCall(PetscCDRemoveAllAt(ghost_deleted_list, proc)); // done with this list } /* receive deleted, send back partial aggregates, clear lists */ - for (int proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { + for (PetscInt proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { PetscCallMPI(MPI_Probe(comm_procs[proc_idx] /* MPI_ANY_SOURCE */, tag1, comm, &status)); { PetscInt *pt, *pt2, *pt3, *sbuff, tmp; MPI_Request *request; - int rcount, scount, ndel; + PetscMPIInt rcount, scount; const PetscMPIInt proc = status.MPI_SOURCE; + PetscCallMPI(MPI_Get_count(&status, MPIU_INT, &rcount)); if (rcount > rbuff_sz) { if (rbuff) PetscCall(PetscFree(rbuff)); @@ -864,16 +886,17 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i ndel = *pt++; // number of deleted to recv tmp = *pt++; // proc (not used) while (ndel--) { - PetscInt gid1 = *pt++, lid1 = gid1 - my0; + PetscInt gid1 = *pt++, lid1 = gid1 - Istart; int gh_gid0 = *pt++; // gid on other proc (not used here to count) - PetscCheck(lid1 >= 0 && lid1 < nloc, PETSC_COMM_SELF, PETSC_ERR_SUP, "received ghost deleted %d", (int)gid1); - PetscCheck(!lid_matched[lid1], PETSC_COMM_SELF, PETSC_ERR_PLIB, "%d) received matched local gid %" PetscInt_FMT ",%d, with ghost (lid) %" PetscInt_FMT " from proc %d", sub_it, gid1, gh_gid0, tmp, proc); + + PetscCheck(lid1 >= 0 && lid1 < nloc, PETSC_COMM_SELF, PETSC_ERR_SUP, "received ghost deleted %" PetscInt_FMT, gid1); + PetscCheck(!lid_matched[lid1], PETSC_COMM_SELF, PETSC_ERR_PLIB, "%" PetscInt_FMT ") received matched local gid %" PetscInt_FMT ",%d, with ghost (lid) %" PetscInt_FMT " from proc %d", sub_it, gid1, gh_gid0, tmp, proc); lid_matched[lid1] = PETSC_TRUE; /* keep track of what we've done this round */ PetscCall(PetscCDCountAt(agg_llists, lid1, &tmp)); // n - /* PetscCheck(tmp == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "sending %d (!= 1) size aggregate. gid-0 %d, from %d (gid-1 %d)", (int)tmp, (int) gid, proc, gh_gid0); */ + /* PetscCheck(tmp == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "sending %" PetscInt_FMT " (!= 1) size aggregate. gid-0 %" PetscInt_FMT ", from %d (gid-1 %" PetscInt_FMT ")", tmp, gid, proc, gh_gid0); */ scount += tmp + 2; // lid0, n, n*[gid] } - PetscCheck((pt - rbuff) == rcount, PETSC_COMM_SELF, PETSC_ERR_SUP, "receive buffer size != num read: %d; rcount: %d", (int)(pt - rbuff), rcount); + PetscCheck((pt - rbuff) == (ptrdiff_t)rcount, PETSC_COMM_SELF, PETSC_ERR_SUP, "receive buffer size != num read: %zu; rcount: %d", pt - rbuff, rcount); /* send tag2: *[gid0, n, n*[gid] ] */ PetscCall(PetscMalloc1(scount + request_size, &sbuff)); sbuffs2[proc_idx] = sbuff; /* cache request */ @@ -884,13 +907,15 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i ndel = *pt++; tmp = *pt++; // proc (not used) while (ndel--) { - PetscInt gid1 = *pt++, lid1 = gid1 - my0, gh_gid0 = *pt++; + PetscInt gid1 = *pt++, lid1 = gid1 - Istart, gh_gid0 = *pt++; + /* write [gid0, aggSz, aggSz[gid] ] */ *pt2++ = gh_gid0; pt3 = pt2++; /* save pointer for later */ PetscCall(PetscCDGetHeadPos(agg_llists, lid1, &pos)); while (pos) { PetscInt gid; + PetscCall(PetscCDIntNdGetID(pos, &gid)); PetscCall(PetscCDGetNextPos(agg_llists, lid1, &pos)); *pt2++ = gid; @@ -899,16 +924,17 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i /* clear list */ PetscCall(PetscCDRemoveAllAt(agg_llists, lid1)); } - PetscCheck((pt2 - sbuff) == scount, PETSC_COMM_SELF, PETSC_ERR_SUP, "buffer size != num write: %d %d", (int)(pt2 - sbuff), (int)scount); + PetscCheck((pt2 - sbuff) == (ptrdiff_t)scount, PETSC_COMM_SELF, PETSC_ERR_SUP, "buffer size != num write: %zu %d", pt2 - sbuff, scount); /* MPI_Isend: requested data tag2 *[lid0, n, n*[gid1] ] */ PetscCallMPI(MPI_Isend(sbuff, scount, MPIU_INT, proc, tag2, comm, request)); } } // proc_idx /* receive tag2 *[gid0, n, n*[gid] ] */ - for (int proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { + for (PetscMPIInt proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { PetscMPIInt proc; PetscInt *pt; int rcount; + PetscCallMPI(MPI_Probe(comm_procs[proc_idx] /* MPI_ANY_SOURCE */, tag2, comm, &status)); PetscCallMPI(MPI_Get_count(&status, MPIU_INT, &rcount)); if (rcount > rbuff_sz) { @@ -922,23 +948,26 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i pt = rbuff; while (pt - rbuff < rcount) { PetscInt gid0 = *pt++, n = *pt++; + while (n--) { PetscInt gid1 = *pt++; - PetscCall(PetscCDAppendID(agg_llists, gid0 - my0, gid1)); + + PetscCall(PetscCDAppendID(agg_llists, gid0 - Istart, gid1)); } } - PetscCheck((pt - rbuff) == rcount, PETSC_COMM_SELF, PETSC_ERR_SUP, "recv buffer size != num read: %d %d", (int)(pt - rbuff), (int)rcount); + PetscCheck((pt - rbuff) == (ptrdiff_t)rcount, PETSC_COMM_SELF, PETSC_ERR_SUP, "recv buffer size != num read: %zu %d", pt - rbuff, rcount); } /* wait for tag1 isends */ - for (int proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { - MPI_Request *request; - request = (MPI_Request *)sbuffs1[proc_idx]; + for (PetscMPIInt proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { + MPI_Request *request = (MPI_Request *)sbuffs1[proc_idx]; + PetscCallMPI(MPI_Wait(request, &status)); PetscCall(PetscFree(sbuffs1[proc_idx])); } /* wait for tag2 isends */ - for (int proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { + for (PetscMPIInt proc_idx = 0; proc_idx < ncomm_procs; proc_idx++) { MPI_Request *request = (MPI_Request *)sbuffs2[proc_idx]; + PetscCallMPI(MPI_Wait(request, &status)); PetscCall(PetscFree(sbuffs2[proc_idx])); } @@ -946,8 +975,10 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i /* set 'lghost_matched' - use locMaxEdge, ghostMaxEdge (recomputed next) */ if (isMPI) { const PetscScalar *sbuff; - for (PetscInt kk = 0, gid = my0; kk < nloc; kk++, gid++) { + + for (PetscInt kk = 0, gid = Istart; kk < nloc; kk++, gid++) { PetscScalar vval = lid_matched[kk] ? 1.0 : 0.0; + PetscCall(VecSetValues(locMaxEdge, 1, &gid, &vval, INSERT_VALUES)); /* set with GID */ } PetscCall(VecAssemblyBegin(locMaxEdge)); @@ -955,21 +986,23 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCall(VecScatterBegin(mpimat->Mvctx, locMaxEdge, ghostMaxEdge, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(mpimat->Mvctx, locMaxEdge, ghostMaxEdge, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecGetArrayRead(ghostMaxEdge, &sbuff)); - for (int kk = 0; kk < num_ghosts; kk++) { lghost_matched[kk] = (PetscBool)(PetscRealPart(sbuff[kk]) != 0.0); } + for (PetscInt kk = 0; kk < num_ghosts; kk++) { lghost_matched[kk] = (PetscBool)(PetscRealPart(sbuff[kk]) != 0.0); } PetscCall(VecRestoreArrayRead(ghostMaxEdge, &sbuff)); } /* compute 'locMaxEdge' inside sub iteration b/c max weight can drop as neighbors are matched */ - for (PetscInt kk = 0, gid = my0; kk < nloc; kk++, gid++) { + for (PetscInt kk = 0, gid = Istart; kk < nloc; kk++, gid++) { PetscReal max_e = 0., tt; PetscScalar vval; const PetscInt lid = kk; - int max_pe = rank, pe, n; - ii = matA->i; - n = ii[lid + 1] - ii[lid]; - aj = PetscSafePointerPlusOffset(matA->j, ii[lid]); - ap = PetscSafePointerPlusOffset(matA->a, ii[lid]); - for (int jj = 0; jj < n; jj++) { + PetscMPIInt max_pe = rank, pe, n; + + ii = matA->i; + n = ii[lid + 1] - ii[lid]; + aj = PetscSafePointerPlusOffset(matA->j, ii[lid]); + ap = PetscSafePointerPlusOffset(matA->a, ii[lid]); + for (PetscInt jj = 0; jj < n; jj++) { PetscInt lidj = aj[jj]; + if (lid_matched[lidj]) continue; /* this is new - can change local max */ if (lidj != lid && PetscRealPart(ap[jj]) > max_e) max_e = PetscRealPart(ap[jj]); } @@ -978,8 +1011,9 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i n = ii[ix + 1] - ii[ix]; ap = matB->a + ii[ix]; aj = matB->j + ii[ix]; - for (int jj = 0; jj < n; jj++) { + for (PetscInt jj = 0; jj < n; jj++) { PetscInt lidj = aj[jj]; + if (lghost_matched[lidj]) continue; if ((tt = PetscRealPart(ap[jj])) > max_e) max_e = tt; } @@ -992,8 +1026,9 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i n = ii[ix + 1] - ii[ix]; ap = matB->a + ii[ix]; aj = matB->j + ii[ix]; - for (int jj = 0; jj < n; jj++) { + for (PetscInt jj = 0; jj < n; jj++) { PetscInt lidj = aj[jj]; + if (lghost_matched[lidj]) continue; if ((pe = lghost_pe[aj[jj]]) > max_pe && PetscRealPart(ap[jj]) >= max_e - MY_MEPS) { max_pe = pe; } } @@ -1008,12 +1043,13 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i /* compute 'lghost_max_ew' and 'lghost_max_pe' to get ready for next iteration*/ if (isMPI) { const PetscScalar *buf; + PetscCall(VecScatterBegin(mpimat->Mvctx, locMaxEdge, ghostMaxEdge, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(mpimat->Mvctx, locMaxEdge, ghostMaxEdge, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterBegin(mpimat->Mvctx, locMaxPE, ghostMaxPE, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecScatterEnd(mpimat->Mvctx, locMaxPE, ghostMaxPE, INSERT_VALUES, SCATTER_FORWARD)); PetscCall(VecGetArrayRead(ghostMaxPE, &buf)); - for (int kk = 0; kk < num_ghosts; kk++) { + for (PetscInt kk = 0; kk < num_ghosts; kk++) { lghost_max_pe[kk] = (PetscMPIInt)PetscRealPart(buf[kk]); // the MAX proc of the ghost now } PetscCall(VecRestoreArrayRead(ghostMaxPE, &buf)); @@ -1021,36 +1057,35 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i // if no active edges, stop if (gn_act_n[0] < 1) break; // inc and check (self stopping iteration - PetscCheck(old_num_edge != gn_act_n[0], PETSC_COMM_SELF, PETSC_ERR_SUP, "HEM stalled step %d/%d", sub_it + 1, n_sub_its); + PetscCheck(old_num_edge != gn_act_n[0], PETSC_COMM_SELF, PETSC_ERR_SUP, "HEM stalled step %" PetscInt_FMT "/%" PetscInt_FMT, sub_it + 1, n_sub_its); sub_it++; - PetscCheck(sub_it < n_sub_its, PETSC_COMM_SELF, PETSC_ERR_SUP, "failed to finish HEM step %d/%d", sub_it + 1, n_sub_its); + PetscCheck(sub_it < n_sub_its, PETSC_COMM_SELF, PETSC_ERR_SUP, "failed to finish HEM step %" PetscInt_FMT "/%" PetscInt_FMT, sub_it + 1, n_sub_its); old_num_edge = gn_act_n[0]; } /* sub_it loop */ /* clean up iteration */ PetscCall(PetscFree(Edges)); - if (mpimat) { // can be hoisted + if (isMPI) { // can be hoisted PetscCall(VecRestoreArrayRead(ghostMaxEdge, &lghost_max_ew)); PetscCall(VecDestroy(&ghostMaxEdge)); PetscCall(VecDestroy(&ghostMaxPE)); - PetscCall(PetscFree(lghost_pe)); - PetscCall(PetscFree(lghost_gid)); - PetscCall(PetscFree(lghost_matched)); - PetscCall(PetscFree(lghost_max_pe)); + PetscCall(PetscFree4(lghost_matched, lghost_pe, lghost_gid, lghost_max_pe)); } PetscCall(VecDestroy(&locMaxEdge)); PetscCall(VecDestroy(&locMaxPE)); /* create next graph */ { Vec diag; + /* add identity for unmatched vertices so they stay alive */ - for (PetscInt kk = 0, gid1, gid = my0; kk < nloc; kk++, gid++) { + for (PetscInt kk = 0, gid1, gid = Istart; kk < nloc; kk++, gid++) { if (!lid_matched[kk]) { const PetscInt lid = kk; PetscCDIntNd *pos; + PetscCall(PetscCDGetHeadPos(agg_llists, lid, &pos)); - PetscCheck(pos, PETSC_COMM_SELF, PETSC_ERR_PLIB, "empty list in singleton: %d", (int)gid); + PetscCheck(pos, PETSC_COMM_SELF, PETSC_ERR_PLIB, "empty list in singleton: %" PetscInt_FMT, gid); PetscCall(PetscCDIntNdGetID(pos, &gid1)); - PetscCheck(gid1 == gid, PETSC_COMM_SELF, PETSC_ERR_PLIB, "first in list (%d) in singleton not %d", (int)gid1, (int)gid); + PetscCheck(gid1 == gid, PETSC_COMM_SELF, PETSC_ERR_PLIB, "first in list (%" PetscInt_FMT ") in singleton not %" PetscInt_FMT, gid1, gid); PetscCall(MatSetValues(P, 1, &gid, 1, &gid, &one, INSERT_VALUES)); } } @@ -1077,7 +1112,7 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i PetscCDIntNd *pos; PetscInt NN, MM, jj = 0, mxsz = 0; - for (int kk = 0; kk < nloc; kk++) { + for (PetscInt kk = 0; kk < nloc; kk++) { PetscCall(PetscCDCountAt(agg_llists, kk, &jj)); if (jj > mxsz) mxsz = jj; } @@ -1085,13 +1120,14 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i if (mxsz > MM - nloc) mxsz = MM - nloc; /* matrix of ghost adj for square graph */ PetscCall(MatCreateAIJ(comm, nloc, nloc, PETSC_DETERMINE, PETSC_DETERMINE, 0, NULL, mxsz, NULL, &mat)); - for (PetscInt lid = 0, gid = my0; lid < nloc; lid++, gid++) { + for (PetscInt lid = 0, gid = Istart; lid < nloc; lid++, gid++) { PetscCall(PetscCDGetHeadPos(agg_llists, lid, &pos)); while (pos) { PetscInt gid1; + PetscCall(PetscCDIntNdGetID(pos, &gid1)); PetscCall(PetscCDGetNextPos(agg_llists, lid, &pos)); - if (gid1 < my0 || gid1 >= my0 + nloc) PetscCall(MatSetValues(mat, 1, &gid, 1, &gid1, &one, ADD_VALUES)); + if (gid1 < Istart || gid1 >= Istart + nloc) PetscCall(MatSetValues(mat, 1, &gid, 1, &gid1, &one, ADD_VALUES)); } } PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY)); @@ -1103,9 +1139,11 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i // move BCs into some node if (bc_list) { PetscCDIntNd *pos; + PetscCall(PetscCDGetHeadPos(bc_list, 0, &pos)); while (pos) { PetscInt gid1; + PetscCall(PetscCDIntNdGetID(pos, &gid1)); PetscCall(PetscCDGetNextPos(bc_list, 0, &pos)); PetscCall(PetscCDAppendID(agg_llists, bc_agg, gid1)); @@ -1116,16 +1154,15 @@ static PetscErrorCode MatCoarsenApply_HEM_private(Mat a_Gmat, const PetscInt n_i { // check sizes -- all vertices must get in graph PetscInt sz, globalsz, MM; + PetscCall(MatGetSize(a_Gmat, &MM, NULL)); PetscCall(PetscCDCount(agg_llists, &sz)); PetscCall(MPIU_Allreduce(&sz, &globalsz, 1, MPIU_INT, MPI_SUM, comm)); - PetscCheck(MM == globalsz, comm, PETSC_ERR_SUP, "lost %d equations ?", (int)(MM - globalsz)); + PetscCheck(MM == globalsz, comm, PETSC_ERR_SUP, "lost %" PetscInt_FMT " equations ?", (MM - globalsz)); } // cleanup PetscCall(MatDestroy(&cMat)); - PetscCall(PetscFree(lid_cprowID)); - PetscCall(PetscFree(lid_max_pe)); - PetscCall(PetscFree(lid_matched)); + PetscCall(PetscFree3(lid_matched, lid_cprowID, lid_max_pe)); PetscCall(ISDestroy(&info_is)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1151,29 +1188,44 @@ static PetscErrorCode MatCoarsenView_HEM(MatCoarsen coarse, PetscViewer viewer) PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)coarse), &rank)); PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); if (iascii) { - PetscCDIntNd *pos, *pos2; - PetscCall(PetscViewerASCIIPrintf(viewer, "%d matching steps with threshold = %g\n", (int)coarse->max_it, (double)coarse->threshold)); - PetscCall(PetscViewerASCIIPushSynchronized(viewer)); - for (PetscInt kk = 0; kk < coarse->agg_lists->size; kk++) { - PetscCall(PetscCDGetHeadPos(coarse->agg_lists, kk, &pos)); - if ((pos2 = pos)) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "selected local %d: ", (int)kk)); - while (pos) { - PetscInt gid1; - PetscCall(PetscCDIntNdGetID(pos, &gid1)); - PetscCall(PetscCDGetNextPos(coarse->agg_lists, kk, &pos)); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %d ", (int)gid1)); + PetscCDIntNd *pos, *pos2; + PetscViewerFormat format; + + PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " matching steps with threshold = %g\n", coarse->max_it, (double)coarse->threshold)); + PetscCall(PetscViewerGetFormat(viewer, &format)); + if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { + if (coarse->agg_lists) { + PetscCall(PetscViewerASCIIPushSynchronized(viewer)); + for (PetscInt kk = 0; kk < coarse->agg_lists->size; kk++) { + PetscCall(PetscCDGetHeadPos(coarse->agg_lists, kk, &pos)); + if ((pos2 = pos)) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "selected local %" PetscInt_FMT ": ", kk)); + while (pos) { + PetscInt gid1; + + PetscCall(PetscCDIntNdGetID(pos, &gid1)); + PetscCall(PetscCDGetNextPos(coarse->agg_lists, kk, &pos)); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", gid1)); + } + if (pos2) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); + } + PetscCall(PetscViewerFlush(viewer)); + PetscCall(PetscViewerASCIIPopSynchronized(viewer)); + } else { + PetscCall(PetscViewerASCIIPrintf(viewer, " HEM aggregator lists are not available\n")); } - if (pos2) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); } - PetscCall(PetscViewerFlush(viewer)); - PetscCall(PetscViewerASCIIPopSynchronized(viewer)); } PetscFunctionReturn(PETSC_SUCCESS); } -/* - MatCoarsenCreate_HEM - A coarsener that uses HEM a simple greedy coarsener -*/ +/*MC + MATCOARSENHEM - A coarsener that uses HEM a simple greedy coarsener + + Level: beginner + +.seealso: `MatCoarsen`, `MatCoarsenMISKSetDistance()`, `MatCoarsenApply()`, `MatCoarsenSetType()`, `MatCoarsenType`, `MatCoarsenCreate()`, `MATCOARSENMISK`, `MATCOARSENMIS` +M*/ + PETSC_EXTERN PetscErrorCode MatCoarsenCreate_HEM(MatCoarsen coarse) { PetscFunctionBegin; diff --git a/src/mat/graphops/coarsen/impls/mis/mis.c b/src/mat/graphops/coarsen/impls/mis/mis.c index ab649b5cebc..6146d82d39e 100644 --- a/src/mat/graphops/coarsen/impls/mis/mis.c +++ b/src/mat/graphops/coarsen/impls/mis/mis.c @@ -49,17 +49,17 @@ static PetscErrorCode MatCoarsenApply_MIS_private(IS perm, Mat Gmat, PetscBool s /* force compressed storage of B */ PetscCall(MatCheckCompressedRow(mpimat->B, matB->nonzerorowcnt, &matB->compressedrow, matB->i, Gmat->rmap->n, -1.0)); } else { + matA = (Mat_SeqAIJ *)Gmat->data; PetscCall(PetscObjectBaseTypeCompare((PetscObject)Gmat, MATSEQAIJ, &isAIJ)); PetscCheck(isAIJ, comm, PETSC_ERR_PLIB, "Require AIJ matrix."); - matA = (Mat_SeqAIJ *)Gmat->data; } PetscCall(MatGetOwnershipRange(Gmat, &my0, &Iend)); - PetscCall(PetscMalloc1(nloc, &lid_gid)); /* explicit array needed */ - if (mpimat) { + PetscCall(PetscMalloc4(nloc, &lid_gid, nloc, &lid_cprowID, nloc, &lid_removed, nloc, &lid_state)); + if (strict_aggs) PetscCall(PetscMalloc1(nloc, &lid_parent_gid)); + if (isMPI) { for (kk = 0, gid = my0; kk < nloc; kk++, gid++) lid_gid[kk] = gid; PetscCall(VecGetLocalSize(mpimat->lvec, &num_fine_ghosts)); - PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_gid)); - PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_state)); + PetscCall(PetscMalloc2(num_fine_ghosts, &cpcol_gid, num_fine_ghosts, &cpcol_state)); PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)Gmat), &sf)); PetscCall(MatGetLayouts(Gmat, &layout, NULL)); PetscCall(PetscSFSetGraphLayout(sf, layout, num_fine_ghosts, NULL, PETSC_COPY_VALUES, mpimat->garray)); @@ -68,11 +68,6 @@ static PetscErrorCode MatCoarsenApply_MIS_private(IS perm, Mat Gmat, PetscBool s for (kk = 0; kk < num_fine_ghosts; kk++) cpcol_state[kk] = MIS_NOT_DONE; } else num_fine_ghosts = 0; - PetscCall(PetscMalloc1(nloc, &lid_cprowID)); - PetscCall(PetscMalloc1(nloc, &lid_removed)); /* explicit array needed */ - if (strict_aggs) PetscCall(PetscMalloc1(nloc, &lid_parent_gid)); - PetscCall(PetscMalloc1(nloc, &lid_state)); - /* has ghost nodes for !strict and uses local indexing (yuck) */ PetscCall(PetscCDCreate(strict_aggs ? nloc : num_fine_ghosts + nloc, &agg_lists)); if (a_locals_llist) *a_locals_llist = agg_lists; @@ -176,7 +171,7 @@ static PetscErrorCode MatCoarsenApply_MIS_private(IS perm, Mat Gmat, PetscBool s } /* vertex loop */ /* update ghost states and count todos */ - if (mpimat) { + if (isMPI) { /* scatter states, check for done */ PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE)); PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE)); @@ -218,8 +213,7 @@ static PetscErrorCode MatCoarsenApply_MIS_private(IS perm, Mat Gmat, PetscBool s /* tell adj who my lid_parent_gid vertices belong to - fill in agg_lists selected ghost lists */ if (strict_aggs && matB) { /* need to copy this to free buffer -- should do this globally */ - PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_sel_gid)); - PetscCall(PetscMalloc1(num_fine_ghosts, &icpcol_gid)); + PetscCall(PetscMalloc2(num_fine_ghosts, &cpcol_sel_gid, num_fine_ghosts, &icpcol_gid)); for (cpid = 0; cpid < num_fine_ghosts; cpid++) icpcol_gid[cpid] = cpcol_gid[cpid]; /* get proc of deleted ghost */ @@ -233,28 +227,24 @@ static PetscErrorCode MatCoarsenApply_MIS_private(IS perm, Mat Gmat, PetscBool s PetscCall(PetscCDAppendID(agg_lists, slid, gid)); } } - PetscCall(PetscFree(icpcol_gid)); - PetscCall(PetscFree(cpcol_sel_gid)); + PetscCall(PetscFree2(cpcol_sel_gid, icpcol_gid)); } - if (mpimat) { + if (isMPI) { PetscCall(PetscSFDestroy(&sf)); - PetscCall(PetscFree(cpcol_gid)); - PetscCall(PetscFree(cpcol_state)); + PetscCall(PetscFree2(cpcol_gid, cpcol_state)); } - PetscCall(PetscFree(lid_cprowID)); - PetscCall(PetscFree(lid_gid)); - PetscCall(PetscFree(lid_removed)); - if (strict_aggs) PetscCall(PetscFree(lid_parent_gid)); - PetscCall(PetscFree(lid_state)); + PetscCall(PetscFree4(lid_gid, lid_cprowID, lid_removed, lid_state)); if (strict_aggs) { // check sizes -- all vertices must get in graph PetscInt aa[2] = {0, nrm_tot}, bb[2], MM; + + PetscCall(PetscFree(lid_parent_gid)); PetscCall(MatGetSize(Gmat, &MM, NULL)); // check sizes -- all vertices must get in graph PetscCall(PetscCDCount(agg_lists, &aa[0])); PetscCall(MPIU_Allreduce(aa, bb, 2, MPIU_INT, MPI_SUM, comm)); if (MM != bb[0]) PetscCall(PetscInfo(info_is, "Warning: N = %" PetscInt_FMT ", sum of aggregates %" PetscInt_FMT ", %" PetscInt_FMT " removed total\n", MM, bb[0], bb[1])); - PetscCheck(MM >= bb[0], comm, PETSC_ERR_PLIB, "Sum of aggs too big"); + PetscCheck(MM >= bb[0], comm, PETSC_ERR_PLIB, "Sum of aggs is too large"); } PetscCall(ISDestroy(&info_is)); PetscFunctionReturn(PETSC_SUCCESS); @@ -286,37 +276,43 @@ static PetscErrorCode MatCoarsenApply_MIS(MatCoarsen coarse) static PetscErrorCode MatCoarsenView_MIS(MatCoarsen coarse, PetscViewer viewer) { - PetscMPIInt rank; - PetscBool iascii; + PetscMPIInt rank; + PetscBool iascii; + PetscViewerFormat format; PetscFunctionBegin; PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)coarse), &rank)); PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); - if (iascii) { - PetscCall(PetscViewerASCIIPushSynchronized(viewer)); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] MIS aggregator\n", rank)); - if (!rank) { - PetscCDIntNd *pos, *pos2; - for (PetscInt kk = 0; kk < coarse->agg_lists->size; kk++) { - PetscCall(PetscCDGetHeadPos(coarse->agg_lists, kk, &pos)); - if ((pos2 = pos)) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "selected %d: ", (int)kk)); - while (pos) { - PetscInt gid1; - PetscCall(PetscCDIntNdGetID(pos, &gid1)); - PetscCall(PetscCDGetNextPos(coarse->agg_lists, kk, &pos)); - PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %d ", (int)gid1)); + PetscCall(PetscViewerGetFormat(viewer, &format)); + if (iascii && format == PETSC_VIEWER_ASCII_INFO_DETAIL) { + if (coarse->agg_lists) { + PetscCall(PetscViewerASCIIPushSynchronized(viewer)); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] MIS aggregator\n", rank)); + if (!rank) { + PetscCDIntNd *pos, *pos2; + for (PetscInt kk = 0; kk < coarse->agg_lists->size; kk++) { + PetscCall(PetscCDGetHeadPos(coarse->agg_lists, kk, &pos)); + if ((pos2 = pos)) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "selected %d: ", (int)kk)); + while (pos) { + PetscInt gid1; + PetscCall(PetscCDIntNdGetID(pos, &gid1)); + PetscCall(PetscCDGetNextPos(coarse->agg_lists, kk, &pos)); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %d ", (int)gid1)); + } + if (pos2) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); } - if (pos2) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); } + PetscCall(PetscViewerFlush(viewer)); + PetscCall(PetscViewerASCIIPopSynchronized(viewer)); + } else { + PetscCall(PetscViewerASCIIPrintf(viewer, " MIS aggregator lists are not available\n")); } - PetscCall(PetscViewerFlush(viewer)); - PetscCall(PetscViewerASCIIPopSynchronized(viewer)); } PetscFunctionReturn(PETSC_SUCCESS); } /*MC - MATCOARSENMIS - Creates a coarsening with a maximal independent set (MIS) algorithm + MATCOARSENMIS - Creates a coarsening object that uses a maximal independent set (MIS) algorithm Collective diff --git a/src/mat/graphops/coarsen/impls/misk/misk.c b/src/mat/graphops/coarsen/impls/misk/misk.c index 520115455a5..d27e5eda4e5 100644 --- a/src/mat/graphops/coarsen/impls/misk/misk.c +++ b/src/mat/graphops/coarsen/impls/misk/misk.c @@ -84,18 +84,19 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, PetscCall(MatCheckCompressedRow(mpimat->B, matB->nonzerorowcnt, &matB->compressedrow, matB->i, cMat->rmap->n, -1.0)); } else { PetscBool isAIJ; + + matA = (Mat_SeqAIJ *)cMat->data; PetscCall(PetscObjectBaseTypeCompare((PetscObject)cMat, MATSEQAIJ, &isAIJ)); PetscCheck(isAIJ, PETSC_COMM_SELF, PETSC_ERR_USER, "Require AIJ matrix."); - matA = (Mat_SeqAIJ *)cMat->data; } PetscCall(MatGetOwnershipRange(cMat, &my0, &Iend)); - if (mpimat) { + if (isMPI) { PetscInt *lid_gid; + PetscCall(PetscMalloc1(nloc_inner, &lid_gid)); /* explicit array needed */ for (kk = 0, gid = my0; kk < nloc_inner; kk++, gid++) lid_gid[kk] = gid; PetscCall(VecGetLocalSize(mpimat->lvec, &num_fine_ghosts)); - PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_gid)); - PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_state)); + PetscCall(PetscMalloc2(num_fine_ghosts, &cpcol_gid, num_fine_ghosts, &cpcol_state)); PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)cMat), &sf)); PetscCall(MatGetLayouts(cMat, &layout, NULL)); PetscCall(PetscSFSetGraphLayout(sf, layout, num_fine_ghosts, NULL, PETSC_COPY_VALUES, mpimat->garray)); @@ -105,12 +106,7 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, PetscCall(PetscFree(lid_gid)); } else num_fine_ghosts = 0; - PetscCall(PetscMalloc1(nloc_inner, &lid_cprowID)); - PetscCall(PetscMalloc1(nloc_inner, &lid_removed)); /* explicit array needed */ - PetscCall(PetscMalloc1(nloc_inner, &lid_parent_gid)); - PetscCall(PetscMalloc1(nloc_inner, &lid_state)); - - /* the data structure */ + PetscCall(PetscMalloc4(nloc_inner, &lid_cprowID, nloc_inner, &lid_removed, nloc_inner, &lid_parent_gid, nloc_inner, &lid_state)); PetscCall(PetscCDCreate(nloc_inner, &agg_lists)); /* need an inverse map - locals */ for (kk = 0; kk < nloc_inner; kk++) { @@ -191,7 +187,7 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, } /* vertex loop */ /* update ghost states and count todos */ - if (mpimat) { + if (isMPI) { /* scatter states, check for done */ PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE)); PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE)); @@ -227,9 +223,9 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, /* tell adj who my lid_parent_gid vertices belong to - fill in agg_lists selected ghost lists */ if (matB) { PetscInt *cpcol_sel_gid, *icpcol_gid; + /* need to copy this to free buffer -- should do this globally */ - PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_sel_gid)); - PetscCall(PetscMalloc1(num_fine_ghosts, &icpcol_gid)); + PetscCall(PetscMalloc2(num_fine_ghosts, &icpcol_gid, num_fine_ghosts, &cpcol_sel_gid)); for (cpid = 0; cpid < num_fine_ghosts; cpid++) icpcol_gid[cpid] = cpcol_gid[cpid]; /* get proc of deleted ghost */ PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_parent_gid, cpcol_sel_gid, MPI_REPLACE)); @@ -243,16 +239,11 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, } } // done - cleanup - PetscCall(PetscFree(icpcol_gid)); - PetscCall(PetscFree(cpcol_sel_gid)); + PetscCall(PetscFree2(icpcol_gid, cpcol_sel_gid)); PetscCall(PetscSFDestroy(&sf)); - PetscCall(PetscFree(cpcol_gid)); - PetscCall(PetscFree(cpcol_state)); + PetscCall(PetscFree2(cpcol_gid, cpcol_state)); } - PetscCall(PetscFree(lid_cprowID)); - PetscCall(PetscFree(lid_removed)); - PetscCall(PetscFree(lid_parent_gid)); - PetscCall(PetscFree(lid_state)); + PetscCall(PetscFree4(lid_cprowID, lid_removed, lid_parent_gid, lid_state)); /* MIS done - make projection matrix - P */ MatType jtype; @@ -265,6 +256,7 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, { PetscCDIntNd *pos, *pos2; PetscInt colIndex, Iend, fgid; + PetscCall(MatGetOwnershipRangeColumn(Prols[iterIdx], &colIndex, &Iend)); // TODO - order with permutation in lid_selected (reversed) for (PetscInt lid = 0; lid < agg_lists->size; lid++) { @@ -295,6 +287,7 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, Rtot = Prols[misk - 1]; // compose P then transpose to get R for (PetscInt iterIdx = misk - 1; iterIdx > 0; iterIdx--) { Mat P; + PetscCall(MatMatMult(Prols[iterIdx - 1], Rtot, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &P)); PetscCall(MatDestroy(&Prols[iterIdx - 1])); PetscCall(MatDestroy(&Rtot)); @@ -308,14 +301,17 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, const PetscInt nloc = Gmat->rmap->n; PetscCoarsenData *agg_lists; Mat mat; + PetscCall(PetscCDCreate(nloc, &agg_lists)); *a_locals_llist = agg_lists; // return PetscCall(MatGetOwnershipRange(Rtot, &Istart, &Iend)); - for (int grow = Istart, lid = 0; grow < Iend; grow++, lid++) { + for (PetscInt grow = Istart, lid = 0; grow < Iend; grow++, lid++) { const PetscInt *idx; + PetscCall(MatGetRow(Rtot, grow, &ncols, &idx, NULL)); - for (int jj = 0; jj < ncols; jj++) { + for (PetscInt jj = 0; jj < ncols; jj++) { PetscInt gcol = idx[jj]; + PetscCall(PetscCDAppendID(agg_lists, lid, gcol)); // local row, global column } PetscCall(MatRestoreRow(Rtot, grow, &ncols, &idx, NULL)); @@ -323,7 +319,7 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, PetscCall(MatDestroy(&Rtot)); /* make fake matrix, get largest nnz */ - for (int lid = 0; lid < nloc; lid++) { + for (PetscInt lid = 0; lid < nloc; lid++) { PetscCall(PetscCDCountAt(agg_lists, lid, &jj)); if (jj > max_osz) max_osz = jj; } @@ -334,9 +330,11 @@ static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, PetscCall(MatCreateAIJ(comm, nloc, nloc, PETSC_DETERMINE, PETSC_DETERMINE, 0, NULL, max_osz, NULL, &mat)); for (PetscInt lid = 0, gidi = Istart; lid < nloc; lid++, gidi++) { PetscCDIntNd *pos; + PetscCall(PetscCDGetHeadPos(agg_lists, lid, &pos)); while (pos) { PetscInt gidj; + PetscCall(PetscCDIntNdGetID(pos, &gidj)); PetscCall(PetscCDGetNextPos(agg_lists, lid, &pos)); if (gidj < Istart || gidj >= Istart + nloc) PetscCall(MatSetValues(mat, 1, &gidi, 1, &gidj, &one, ADD_VALUES)); @@ -376,13 +374,15 @@ static PetscErrorCode MatCoarsenApply_MISK(MatCoarsen coarse) static PetscErrorCode MatCoarsenView_MISK(MatCoarsen coarse, PetscViewer viewer) { - PetscMPIInt rank; - PetscBool iascii; + PetscMPIInt rank; + PetscBool iascii; + PetscViewerFormat format; PetscFunctionBegin; PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)coarse), &rank)); PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); - if (iascii) { + PetscCall(PetscViewerGetFormat(viewer, &format)); + if (iascii && format == PETSC_VIEWER_ASCII_INFO_DETAIL) { PetscCall(PetscViewerASCIIPushSynchronized(viewer)); PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] MISK aggregator\n", rank)); if (!rank) PetscCall(PetscCoarsenDataView_private(coarse->agg_lists, viewer)); @@ -413,7 +413,10 @@ static PetscErrorCode MatCoarsenSetFromOptions_MISK(MatCoarsen coarse, PetscOpti Options Database Key: . -mat_coarsen_misk_distance - distance for MIS -.seealso: `MatCoarsen`, `MatCoarsenMISKSetDistance()`, `MatCoarsenApply()`, `MatCoarsenSetType()`, `MatCoarsenType`, `MatCoarsenCreate()` + Note: + When the coarsening is used inside `PCGAMG` then the options database key is `-pc_gamg_mat_coarsen_misk_distance` + +.seealso: `MatCoarsen`, `MatCoarsenMISKSetDistance()`, `MatCoarsenApply()`, `MatCoarsenSetType()`, `MatCoarsenType`, `MatCoarsenCreate()`, `MATCOARSENHEM`, `MATCOARSENMIS` M*/ PETSC_EXTERN PetscErrorCode MatCoarsenCreate_MISK(MatCoarsen coarse) @@ -440,6 +443,9 @@ PETSC_EXTERN PetscErrorCode MatCoarsenCreate_MISK(MatCoarsen coarse) Level: advanced + Note: + When the coarsening is used inside `PCGAMG` then the options database key is `-pc_gamg_mat_coarsen_misk_distance` + .seealso: `MATCOARSENMISK`, `MatCoarsen`, `MatCoarseSetFromOptions()`, `MatCoarsenSetType()`, `MatCoarsenRegister()`, `MatCoarsenCreate()`, `MatCoarsenDestroy()`, `MatCoarsenSetAdjacency()`, `MatCoarsenMISKGetDistance()` `MatCoarsenGetData()` diff --git a/src/mat/graphops/coarsen/coarsen.c b/src/mat/graphops/coarsen/interface/coarsen.c similarity index 90% rename from src/mat/graphops/coarsen/coarsen.c rename to src/mat/graphops/coarsen/interface/coarsen.c index adafd6318c9..08c8eea2d83 100644 --- a/src/mat/graphops/coarsen/coarsen.c +++ b/src/mat/graphops/coarsen/interface/coarsen.c @@ -9,7 +9,7 @@ PetscBool MatCoarsenRegisterAllCalled = PETSC_FALSE; /*@C MatCoarsenRegister - Adds a new sparse matrix coarsening algorithm to the matrix package. - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + sname - name of coarsen (for example `MATCOARSENMIS`) @@ -22,10 +22,7 @@ PetscBool MatCoarsenRegisterAllCalled = PETSC_FALSE; MatCoarsenRegister("my_agg", MyAggCreate); .ve - Then, your aggregator can be chosen with the procedural interface via -$ MatCoarsenSetType(agg, "my_agg") - or at runtime via the option -$ -mat_coarsen_type my_agg + Then, your aggregator can be chosen with the procedural interface via `MatCoarsenSetType(agg, "my_agg")` or at runtime via the option `-mat_coarsen_type my_agg` .seealso: `MatCoarsen`, `MatCoarsenType`, `MatCoarsenSetType()`, `MatCoarsenCreate()`, `MatCoarsenRegisterDestroy()`, `MatCoarsenRegisterAll()` @*/ @@ -37,7 +34,7 @@ PetscErrorCode MatCoarsenRegister(const char sname[], PetscErrorCode (*function) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCoarsenGetType - Gets the Coarsen method type and name (as a string) from the coarsen context. @@ -77,6 +74,8 @@ PetscErrorCode MatCoarsenGetType(MatCoarsen coarsen, MatCoarsenType *type) Level: advanced Notes: + When the coarsening is used inside `PCGAMG` then the options database keys are prefixed with `-pc_gamg_` + Use `MatCoarsenGetData()` to access the results of the coarsening The user can define additional coarsens; see `MatCoarsenRegister()`. @@ -173,7 +172,7 @@ PetscErrorCode MatCoarsenDestroy(MatCoarsen *agg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCoarsenViewFromOptions - View the coarsener from the options database Collective @@ -210,7 +209,7 @@ PetscErrorCode MatCoarsenViewFromOptions(MatCoarsen A, PetscObject obj, const ch PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCoarsenView - Prints the coarsen data structure. Collective @@ -246,7 +245,7 @@ PetscErrorCode MatCoarsenView(MatCoarsen agg, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCoarsenSetType - Sets the type of aggregator to use Collective @@ -287,7 +286,7 @@ PetscErrorCode MatCoarsenSetType(MatCoarsen coarser, MatCoarsenType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCoarsenSetGreedyOrdering - Sets the ordering of the vertices to use with a greedy coarsening method Logically Collective @@ -314,7 +313,7 @@ PetscErrorCode MatCoarsenSetGreedyOrdering(MatCoarsen coarser, const IS perm) /*@C MatCoarsenGetData - Gets the weights for vertices for a coarsener. - Logically Collective + Logically Collective, No Fortran Support Input Parameter: . coarser - the coarsen context @@ -324,7 +323,10 @@ PetscErrorCode MatCoarsenSetGreedyOrdering(MatCoarsen coarser, const IS perm) Level: advanced -.seealso: `MatCoarsen`, `MatCoarsenApply()`, `MatCoarsenCreate()`, `MatCoarsenSetType()` + Note: + This passes ownership to the caller and nullifies the value of weights (`PetscCoarsenData`) within the `MatCoarsen` + +.seealso: `MatCoarsen`, `MatCoarsenApply()`, `MatCoarsenCreate()`, `MatCoarsenSetType()`, `PetscCoarsenData` @*/ PetscErrorCode MatCoarsenGetData(MatCoarsen coarser, PetscCoarsenData **llist) { @@ -345,14 +347,18 @@ PetscErrorCode MatCoarsenGetData(MatCoarsen coarser, PetscCoarsenData **llist) . coarser - the coarsen context. Options Database Key: -. -mat_coarsen_type - mis: maximal independent set based; misk: distance k MIS; hem: heavy edge matching ++ -mat_coarsen_type - mis: maximal independent set based; misk: distance k MIS; hem: heavy edge matching +- -mat_coarsen_max_it number of iterations to use in the coarsening process - see `MatCoarsenSetMaximumIterations()` Level: advanced - Note: + Notes: + When the coarsening is used inside `PCGAMG` then the options database keys are prefixed with `-pc_gamg_` + Sets the `MatCoarsenType` to `MATCOARSENMISK` if has not been set previously -.seealso: `MatCoarsen`, `MatCoarsenType`, `MatCoarsenApply()`, `MatCoarsenCreate()`, `MatCoarsenSetType()` +.seealso: `MatCoarsen`, `MatCoarsenType`, `MatCoarsenApply()`, `MatCoarsenCreate()`, `MatCoarsenSetType()`, + `MatCoarsenSetMaximumIterations()` @*/ PetscErrorCode MatCoarsenSetFromOptions(MatCoarsen coarser) { @@ -385,7 +391,7 @@ PetscErrorCode MatCoarsenSetFromOptions(MatCoarsen coarser) } /*@ - MatCoarsenSetMaximumIterations - Max HEM iterations + MatCoarsenSetMaximumIterations - Maximum `MATCOARSENHEM` iterations to use Logically Collective @@ -394,10 +400,13 @@ PetscErrorCode MatCoarsenSetFromOptions(MatCoarsen coarser) - n - number of HEM iterations Options Database Key: -. -mat_coarsen_max_it - Max HEM iterations +. -mat_coarsen_max_it - Maximum `MATCOARSENHEM` iterations to use Level: intermediate + Note: + When the coarsening is used inside `PCGAMG` then the options database keys are prefixed with `-pc_gamg_` + .seealso: `MatCoarsen`, `MatCoarsenType`, `MatCoarsenApply()`, `MatCoarsenCreate()`, `MatCoarsenSetType()` @*/ PetscErrorCode MatCoarsenSetMaximumIterations(MatCoarsen coarse, PetscInt n) @@ -431,6 +440,9 @@ static PetscErrorCode MatCoarsenSetMaximumIterations_MATCOARSEN(MatCoarsen coars Level: intermediate + Note: + When the coarsening is used inside `PCGAMG` then the options database keys are prefixed with `-pc_gamg_` + .seealso: `MatCoarsen`, `MatCoarsenType`, `MatCoarsenApply()`, `MatCoarsenCreate()`, `MatCoarsenSetType()` @*/ PetscErrorCode MatCoarsenSetStrengthIndex(MatCoarsen coarse, PetscInt n, PetscInt idx[]) @@ -451,7 +463,7 @@ static PetscErrorCode MatCoarsenSetStrengthIndex_MATCOARSEN(MatCoarsen coarse, P } /*@ - MatCoarsenSetThreshold - Max HEM iterations + MatCoarsenSetThreshold - Set the threshold for HEM Logically Collective @@ -460,10 +472,16 @@ static PetscErrorCode MatCoarsenSetStrengthIndex_MATCOARSEN(MatCoarsen coarse, P - b - threshold value Options Database Key: -. -mat_coarsen_threshold <-1> - Max HEM iterations +. -mat_coarsen_threshold <-1> - threshold Level: intermediate + Note: + When the coarsening is used inside `PCGAMG` then the options database keys are prefixed with `-pc_gamg_` + + Developer Note: + It is not documented how this threshold is used + .seealso: `MatCoarsen`, `MatCoarsenType`, `MatCoarsenApply()`, `MatCoarsenCreate()`, `MatCoarsenSetType()` @*/ PetscErrorCode MatCoarsenSetThreshold(MatCoarsen coarse, PetscReal b) @@ -497,23 +515,20 @@ static PetscErrorCode MatCoarsenSetThreshold_MATCOARSEN(MatCoarsen coarse, Petsc .seealso: `MatCoarsen`, `MatCoarsenSetType()`, `MatCoarsenApply()`, `MatCoarsenDestroy()`, `MatCoarsenSetAdjacency()`, `MatCoarsenGetData()` - @*/ PetscErrorCode MatCoarsenCreate(MPI_Comm comm, MatCoarsen *newcrs) { MatCoarsen agg; PetscFunctionBegin; - *newcrs = NULL; - + PetscAssertPointer(newcrs, 2); PetscCall(MatInitializePackage()); + PetscCall(PetscHeaderCreate(agg, MAT_COARSEN_CLASSID, "MatCoarsen", "Matrix/graph coarsen", "MatCoarsen", comm, MatCoarsenDestroy, MatCoarsenView)); PetscCall(PetscObjectComposeFunction((PetscObject)agg, "MatCoarsenSetMaximumIterations_C", MatCoarsenSetMaximumIterations_MATCOARSEN)); PetscCall(PetscObjectComposeFunction((PetscObject)agg, "MatCoarsenSetThreshold_C", MatCoarsenSetThreshold_MATCOARSEN)); PetscCall(PetscObjectComposeFunction((PetscObject)agg, "MatCoarsenSetStrengthIndex_C", MatCoarsenSetStrengthIndex_MATCOARSEN)); - agg->strength_index_size = 0; - - *newcrs = agg; + *newcrs = agg; PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/dm/impls/plex/ftn-custom/makefile b/src/mat/graphops/coarsen/interface/makefile similarity index 73% rename from src/dm/impls/plex/ftn-custom/makefile rename to src/mat/graphops/coarsen/interface/makefile index 89dab51061a..0f142537030 100644 --- a/src/dm/impls/plex/ftn-custom/makefile +++ b/src/mat/graphops/coarsen/interface/makefile @@ -1,6 +1,7 @@ -include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' +MANSEC = Mat +SUBMANSEC = MatGraphOperations include ${PETSC_DIR}/lib/petsc/conf/variables include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/graphops/coarsen/scoarsen.c b/src/mat/graphops/coarsen/interface/scoarsen.c similarity index 100% rename from src/mat/graphops/coarsen/scoarsen.c rename to src/mat/graphops/coarsen/interface/scoarsen.c diff --git a/src/mat/graphops/color/interface/ftn-custom/makefile b/src/mat/graphops/color/interface/ftn-custom/makefile deleted file mode 100644 index 9db7109084f..00000000000 --- a/src/mat/graphops/color/interface/ftn-custom/makefile +++ /dev/null @@ -1,5 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/graphops/color/interface/ftn-custom/zmatcoloringf.c b/src/mat/graphops/color/interface/ftn-custom/zmatcoloringf.c deleted file mode 100644 index 05240579013..00000000000 --- a/src/mat/graphops/color/interface/ftn-custom/zmatcoloringf.c +++ /dev/null @@ -1,18 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcoloringsettype_ MATCOLORINGSETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define matcoloringsettype_ matcoloringsettype -#endif - -PETSC_EXTERN void matcoloringsettype_(MatColoring *mc, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = MatColoringSetType(*mc, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/mat/graphops/color/interface/matcoloring.c b/src/mat/graphops/color/interface/matcoloring.c index 9cdbfe86082..e1dbc34606d 100644 --- a/src/mat/graphops/color/interface/matcoloring.c +++ b/src/mat/graphops/color/interface/matcoloring.c @@ -7,7 +7,7 @@ const char *const MatColoringWeightTypes[] = {"RANDOM", "LEXICAL", "LF", "SL /*@C MatColoringRegister - Adds a new sparse matrix coloring to the matrix package. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of Coloring (for example `MATCOLORINGSL`) @@ -20,10 +20,8 @@ const char *const MatColoringWeightTypes[] = {"RANDOM", "LEXICAL", "LF", "SL MatColoringRegister("my_color", MyColor); .ve - Then, your partitioner can be chosen with the procedural interface via -$ MatColoringSetType(part, "my_color") - or at runtime via the option -$ -mat_coloring_type my_color + Then, your partitioner can be chosen with the procedural interface via `MatColoringSetType(part, "my_color")` or at runtime via the option + `-mat_coloring_type my_color` .seealso: `MatColoringType`, `MatColoringRegisterDestroy()`, `MatColoringRegisterAll()` @*/ @@ -75,9 +73,8 @@ PetscErrorCode MatColoringCreate(Mat m, MatColoring *mcptr) PetscFunctionBegin; PetscValidHeaderSpecific(m, MAT_CLASSID, 1); PetscAssertPointer(mcptr, 2); - *mcptr = NULL; - PetscCall(MatInitializePackage()); + PetscCall(PetscHeaderCreate(mc, MAT_COLORING_CLASSID, "MatColoring", "Matrix coloring", "MatColoring", PetscObjectComm((PetscObject)m), MatColoringDestroy, MatColoringView)); PetscCall(PetscObjectReference((PetscObject)m)); mc->mat = m; @@ -118,7 +115,7 @@ PetscErrorCode MatColoringDestroy(MatColoring *mc) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatColoringSetType - Sets the type of coloring algorithm used Collective diff --git a/src/mat/graphops/order/ftn-custom/makefile b/src/mat/graphops/order/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/mat/graphops/order/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/graphops/order/ftn-custom/zsorderf.c b/src/mat/graphops/order/ftn-custom/zsorderf.c deleted file mode 100644 index 0bfe4709ad7..00000000000 --- a/src/mat/graphops/order/ftn-custom/zsorderf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matgetordering_ MATGETORDERING -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matgetordering_ matgetordering -#endif - -PETSC_EXTERN void matgetordering_(Mat *mat, char *type, IS *rperm, IS *cperm, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(type, len, t); - *ierr = MatGetOrdering(*mat, t, rperm, cperm); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/mat/graphops/order/sorder.c b/src/mat/graphops/order/sorder.c index 1461f3abc42..b93abcfa84b 100644 --- a/src/mat/graphops/order/sorder.c +++ b/src/mat/graphops/order/sorder.c @@ -74,7 +74,7 @@ PETSC_INTERN PetscErrorCode MatGetOrdering_RowLength(Mat mat, MatOrderingType ty /*@C MatOrderingRegister - Adds a new sparse matrix ordering to the matrix package. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of ordering (for example `MATORDERINGND`) @@ -87,12 +87,10 @@ PETSC_INTERN PetscErrorCode MatGetOrdering_RowLength(Mat mat, MatOrderingType ty MatOrderingRegister("my_order", MyOrder); .ve - Then, your partitioner can be chosen with the procedural interface via -$ MatOrderingSetType(part, "my_order) - or at runtime via the option -$ -pc_factor_mat_ordering_type my_order + Then, your partitioner can be chosen with the procedural interface via `MatOrderingSetType(part, "my_order)` or at runtime via the option + `-pc_factor_mat_ordering_type my_order` -.seealso: `MatOrderingRegisterAll()`, `MatGetOrdering()` +.seealso: `Mat`, `MatOrderingType`, `MatOrderingRegisterAll()`, `MatGetOrdering()` @*/ PetscErrorCode MatOrderingRegister(const char sname[], PetscErrorCode (*function)(Mat, MatOrderingType, IS *, IS *)) { @@ -103,7 +101,7 @@ PetscErrorCode MatOrderingRegister(const char sname[], PetscErrorCode (*function } #include <../src/mat/impls/aij/mpi/mpiaij.h> -/*@C +/*@ MatGetOrdering - Gets a reordering for a matrix to reduce fill or to improve numerical stability of LU factorization. diff --git a/src/mat/graphops/partition/ftn-custom/zpartitionf.c b/src/mat/graphops/partition/ftn-custom/zpartitionf.c index 464ea51595f..e8129f09014 100644 --- a/src/mat/graphops/partition/ftn-custom/zpartitionf.c +++ b/src/mat/graphops/partition/ftn-custom/zpartitionf.c @@ -4,14 +4,8 @@ #if defined(PETSC_HAVE_FORTRAN_CAPS) #define matpartitioningsetvertexweights_ MATPARTITIONINGSETVERTEXWEIGHTS - #define matpartitioningview_ MATPARTITIONINGVIEW - #define matpartitioningsettype_ MATPARTITIONINGSETTYPE - #define matpartitioningviewfromoptions_ MATPARTITIONINGVIEWFROMOPTIONS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define matpartitioningsetvertexweights_ matpartitioningsetvertexweights - #define matpartitioningview_ matpartitioningview - #define matpartitioningsettype_ matpartitioningsettype - #define matpartitioningviewfromoptions_ matpartitioningviewfromoptions #endif PETSC_EXTERN void matpartitioningsetvertexweights_(MatPartitioning *part, const PetscInt weights[], PetscErrorCode *ierr) @@ -26,28 +20,3 @@ PETSC_EXTERN void matpartitioningsetvertexweights_(MatPartitioning *part, const if (*ierr) return; *ierr = MatPartitioningSetVertexWeights(*part, array); } -PETSC_EXTERN void matpartitioningview_(MatPartitioning *part, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = MatPartitioningView(*part, v); -} - -PETSC_EXTERN void matpartitioningsettype_(MatPartitioning *part, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(type, len, t); - *ierr = MatPartitioningSetType(*part, t); - if (*ierr) return; - FREECHAR(type, t); -} -PETSC_EXTERN void matpartitioningviewfromoptions_(MatPartitioning *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = MatPartitioningViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/mat/graphops/partition/impls/party/ftn-custom/makefile b/src/mat/graphops/partition/impls/party/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/mat/graphops/partition/impls/party/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/graphops/partition/impls/party/ftn-custom/zpartyf.c b/src/mat/graphops/partition/impls/party/ftn-custom/zpartyf.c deleted file mode 100644 index e68f4f11174..00000000000 --- a/src/mat/graphops/partition/impls/party/ftn-custom/zpartyf.c +++ /dev/null @@ -1,28 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matpartitioningpartysetglobal_ MATPARTITIONINGPARTYSETGLOBAL - #define matpartitioningpartysetlocal_ MATPARTITIONINGPARTYSETLOCAL -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matpartitioningpartysetglobal_ matpartitioningpartysetglobal - #define matpartitioningpartysetlocal_ matpartitioningpartysetlocal -#endif - -PETSC_EXTERN void matpartitioningpartysetglobal_(MatPartitioning *part, char *method, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(method, len, t); - *ierr = MatPartitioningPartySetGlobal(*part, t); - if (*ierr) return; - FREECHAR(method, t); -} - -PETSC_EXTERN void matpartitioningpartysetlocal_(MatPartitioning *part, char *method, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(method, len, t); - *ierr = MatPartitioningPartySetLocal(*part, t); - if (*ierr) return; - FREECHAR(method, t); -} diff --git a/src/mat/graphops/partition/impls/party/party.c b/src/mat/graphops/partition/impls/party/party.c index f689ee2531c..2e083df7501 100644 --- a/src/mat/graphops/partition/impls/party/party.c +++ b/src/mat/graphops/partition/impls/party/party.c @@ -149,7 +149,7 @@ static PetscErrorCode MatPartitioningView_Party(MatPartitioning part, PetscViewe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPartitioningPartySetGlobal - Set global method for Party partitioner. Collective @@ -169,6 +169,9 @@ static PetscErrorCode MatPartitioningView_Party(MatPartitioning part, PetscViewe alternatively a string describing the method. Two or more methods can be combined like "gbf,gcf". Check the Party Library Users Manual for details. + Developer Note: + Should be `MatPartitioningPartySetGlobalType()` and all uses of method should be changed to type + .seealso: `MATPARTITIONINGPARTY`, `MatPartitioningPartySetLocal()` @*/ PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning part, const char *global) @@ -188,7 +191,7 @@ static PetscErrorCode MatPartitioningPartySetGlobal_Party(MatPartitioning part, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPartitioningPartySetLocal - Set local method used by the Party partitioner. Collective @@ -206,6 +209,9 @@ static PetscErrorCode MatPartitioningPartySetGlobal_Party(MatPartitioning part, The method may be one of `MP_PARTY_HELPFUL_SETS`, `MP_PARTY_KERNIGHAN_LIN`, or `MP_PARTY_NONE`. Check the Party Library Users Manual for details. + Developer Note: + Should be `MatPartitioningPartySetLocalType()` and all uses of method should be changed to type + .seealso: `MATPARTITIONINGPARTY`, `MatPartitioningPartySetGlobal()` @*/ PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning part, const char *local) diff --git a/src/mat/graphops/partition/partition.c b/src/mat/graphops/partition/partition.c index c09ede1a6eb..928b5c3ab4e 100644 --- a/src/mat/graphops/partition/partition.c +++ b/src/mat/graphops/partition/partition.c @@ -163,7 +163,7 @@ PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE; /*@C MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS` @@ -176,10 +176,8 @@ PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE; MatPartitioningRegister("my_part", MyPartCreate); .ve - Then, your partitioner can be chosen with the procedural interface via -$ MatPartitioningSetType(part, "my_part") - or at runtime via the option -$ -mat_partitioning_type my_part + Then, your partitioner can be chosen with the procedural interface via `MatPartitioningSetType(part, "my_part")` or at runtime via the option + `-mat_partitioning_type my_part` .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()` @*/ @@ -191,7 +189,7 @@ PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*func PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPartitioningGetType - Gets the Partitioning method type and name (as a string) from the partitioning context. @@ -216,7 +214,7 @@ PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartition PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPartitioningSetNParts - Set how many partitions need to be created; by default this is one per processor. Certain partitioning schemes may in fact only support that option. @@ -467,7 +465,7 @@ PetscErrorCode MatPartitioningDestroy(MatPartitioning *part) Input Parameters: + part - the partitioning context - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or - 1 if that is not provided + 1 if that is not provided Level: beginner @@ -477,6 +475,9 @@ PetscErrorCode MatPartitioningDestroy(MatPartitioning *part) The weights may not be used by some partitioners + Fortran Note: + The array `weights` is copied during this function call. + .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()` @*/ PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[]) @@ -496,11 +497,11 @@ PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const Petsc Input Parameters: + part - the partitioning context - weights - An array of size nparts that is used to specify the fraction of - vertex weight that should be distributed to each sub-domain for - the balance constraint. If all of the sub-domains are to be of - the same size, then each of the nparts elements should be set - to a value of 1/nparts. Note that the sum of all of the weights - should be one. + vertex weight that should be distributed to each sub-domain for + the balance constraint. If all of the sub-domains are to be of + the same size, then each of the nparts elements should be set + to a value of 1/nparts. Note that the sum of all of the weights + should be one. Level: beginner @@ -508,6 +509,9 @@ PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const Petsc The array weights is freed by PETSc so the user should not free the array. In C/C++ the array must be obtained with a call to `PetscMalloc()`, not malloc(). + Fortran Note: + The array `weights` is copied during this function call. + .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()` @*/ PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[]) @@ -527,8 +531,8 @@ PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const Pe Input Parameters: + part - the partitioning context - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used, - that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal - edge weights are stored in an ADJ matrix. + that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal + edge weights are stored in an ADJ matrix. Options Database Key: . -mat_partitioning_use_edge_weights - (true or false) @@ -592,9 +596,9 @@ PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp) PetscMPIInt size; PetscFunctionBegin; - *newp = NULL; - + PetscAssertPointer(newp, 2); PetscCall(MatInitializePackage()); + PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatGraphOperations", comm, MatPartitioningDestroy, MatPartitioningView)); part->vertex_weights = NULL; part->part_weights = NULL; @@ -608,7 +612,7 @@ PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPartitioningViewFromOptions - View a partitioning context from the options database Collective @@ -645,7 +649,7 @@ PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPartitioningView - Prints the partitioning data structure. Collective @@ -690,7 +694,7 @@ PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPartitioningSetType - Sets the type of partitioner to use Collective @@ -796,7 +800,7 @@ PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex Not Collective diff --git a/src/mat/impls/adj/mpi/ftn-custom/zmpiadjf.c b/src/mat/impls/adj/mpi/ftn-custom/zmpiadjf.c index c5de7ac006a..34731c54434 100644 --- a/src/mat/impls/adj/mpi/ftn-custom/zmpiadjf.c +++ b/src/mat/impls/adj/mpi/ftn-custom/zmpiadjf.c @@ -3,11 +3,9 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreatempiadj_ MATCREATEMPIADJ - #define matmpiadjsetpreallocation_ MATMPIADJSETPREALLOCATION + #define matcreatempiadj_ MATCREATEMPIADJ #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreatempiadj_ matcreatempiadj - #define matmpiadjsetpreallocation_ matmpiadjsetpreallocation + #define matcreatempiadj_ matcreatempiadj #endif PETSC_EXTERN void matcreatempiadj_(MPI_Comm *comm, PetscInt *m, PetscInt *n, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A, PetscErrorCode *ierr) @@ -19,9 +17,3 @@ PETSC_EXTERN void matcreatempiadj_(MPI_Comm *comm, PetscInt *m, PetscInt *n, Pet adj = (Mat_MPIAdj *)(*A)->data; adj->freeaij = PETSC_FALSE; } - -PETSC_EXTERN void matmpiadjsetpreallocation_(Mat *mat, PetscInt *i, PetscInt *j, PetscInt *values, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(values); - *ierr = MatMPIAdjSetPreallocation(*mat, i, j, values); -} diff --git a/src/mat/impls/adj/mpi/mpiadj.c b/src/mat/impls/adj/mpi/mpiadj.c index b5513717ea7..7298f03b61b 100644 --- a/src/mat/impls/adj/mpi/mpiadj.c +++ b/src/mat/impls/adj/mpi/mpiadj.c @@ -725,6 +725,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj, NULL, /*150*/ NULL, NULL, + NULL, NULL}; static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) @@ -975,7 +976,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners) Logically Collective @@ -997,7 +998,7 @@ PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners) Logically Collective @@ -1024,7 +1025,7 @@ PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements Logically Collective @@ -1097,7 +1098,7 @@ PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscI Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC` Fortran Note: - From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`. + From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()` @*/ diff --git a/src/mat/impls/aij/mpi/aijmkl/mpiaijmkl.c b/src/mat/impls/aij/mpi/aijmkl/mpiaijmkl.c index e8202adadfa..7c05d6e0d78 100644 --- a/src/mat/impls/aij/mpi/aijmkl/mpiaijmkl.c +++ b/src/mat/impls/aij/mpi/aijmkl/mpiaijmkl.c @@ -54,14 +54,17 @@ The user MUST specify either the local or global matrix dimensions (possibly both). - The parallel matrix is partitioned such that the first m0 rows belong to - process 0, the next m1 rows belong to process 1, the next m2 rows belong - to process 2 etc.. where m0,m1,m2... are the input parameter `m`. + If `m` and `n` are not `PETSC_DECIDE`, then the values determine the `PetscLayout` of the matrix and the ranges returned by + `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`. + + The parallel matrix is partitioned such that the first `m0` rows belong to + process 0, the next `m1` rows belong to process 1, the next `m2` rows belong + to process 2, etc., where `m0`, `m1`, `m2`... are the input parameter `m` on each MPI process. The DIAGONAL portion of the local submatrix of a processor can be defined as the submatrix which is obtained by extraction the part corresponding - to the rows r1-r2 and columns r1-r2 of the global matrix, where r1 is the - first row that belongs to the processor, and r2 is the last row belonging + to the rows `r1` - `r2` and columns `r1` - `r2` of the global matrix, where `r1` is the + first row that belongs to the processor, and `r2` is the last row belonging to the this processor. This is a square mxm matrix. The remaining portion of the local submatrix (mxN) constitute the OFF-DIAGONAL portion. @@ -76,7 +79,9 @@ MatMPIAIJSetPreallocation(A,...); .ve -.seealso: [](ch_matrices), `Mat`, [Sparse Matrix Creation](sec_matsparse), `MATMPIAIJMKL`, `MatCreate()`, `MatCreateSeqAIJMKL()`, `MatSetValues()` +.seealso: [](ch_matrices), `Mat`, [Sparse Matrix Creation](sec_matsparse), `MATMPIAIJMKL`, `MatCreate()`, `MatCreateSeqAIJMKL()`, + `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, + `MatGetOwnershipRangesColumn()`, `PetscLayout` @*/ PetscErrorCode MatCreateMPIAIJMKL(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A) { diff --git a/src/mat/impls/aij/mpi/crl/mcrl.c b/src/mat/impls/aij/mpi/crl/mcrl.c index b664d5de69b..8a74f14f2a1 100644 --- a/src/mat/impls/aij/mpi/crl/mcrl.c +++ b/src/mat/impls/aij/mpi/crl/mcrl.c @@ -68,7 +68,7 @@ static PetscErrorCode MatMPIAIJCRL_create_aijcrl(Mat A) icols[j * m + i] = (j) ? icols[(j - 1) * m + i] : 0; /* handle case where row is EMPTY */ } } - PetscCall(PetscInfo(A, "Percentage of 0's introduced for vectorized multiply %g\n", 1.0 - ((double)aijcrl->nz) / ((double)(rmax * m)))); + PetscCall(PetscInfo(A, "Percentage of 0's introduced for vectorized multiply %g\n", 1.0 - ((double)aijcrl->nz) / PetscMax((double)rmax * m, 1))); PetscCall(PetscFree(aijcrl->array)); PetscCall(PetscMalloc1(a->B->cmap->n + nd, &array)); diff --git a/src/mat/impls/aij/mpi/fdmpiaij.c b/src/mat/impls/aij/mpi/fdmpiaij.c index 11e936ad92d..69bfa5258a5 100644 --- a/src/mat/impls/aij/mpi/fdmpiaij.c +++ b/src/mat/impls/aij/mpi/fdmpiaij.c @@ -1,7 +1,7 @@ #include <../src/mat/impls/sell/mpi/mpisell.h> #include <../src/mat/impls/aij/mpi/mpiaij.h> #include <../src/mat/impls/baij/mpi/mpibaij.h> -#include +#include /*I "petscmat.h" I*/ static PetscErrorCode MatFDColoringMarkHost_AIJ(Mat J) { @@ -722,8 +722,7 @@ PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat mat, ISColoring iscoloring, MatFD PetscFunctionReturn(PETSC_SUCCESS); } -/*@C - +/*@ MatFDColoringSetValues - takes a matrix in compressed color format and enters the matrix into a PETSc `Mat` Collective @@ -731,8 +730,8 @@ PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat mat, ISColoring iscoloring, MatFD Input Parameters: + J - the sparse matrix . coloring - created with `MatFDColoringCreate()` and a local coloring -- y - column major storage of matrix values with one color of values per column, the number of rows of y should match - the number of local rows of `J` and the number of columns is the number of colors. +- y - column major storage of matrix values with one color of values per column, the number of rows of `y` should match + the number of local rows of `J` and the number of columns is the number of colors. Level: intermediate @@ -743,7 +742,7 @@ PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat mat, ISColoring iscoloring, MatFD .seealso: [](ch_matrices), `Mat`, `MatFDColoringCreate()`, `ISColoring`, `ISColoringCreate()`, `ISColoringSetType()`, `IS_COLORING_LOCAL`, `MatFDColoringSetBlockSize()` @*/ -PetscErrorCode MatFDColoringSetValues(Mat J, MatFDColoring coloring, const PetscScalar *y) +PetscErrorCode MatFDColoringSetValues(Mat J, MatFDColoring coloring, const PetscScalar y[]) { MatEntry2 *Jentry2; PetscInt row, i, nrows_k, l, ncolors, nz = 0, bcols, nbcols = 0; diff --git a/src/mat/impls/aij/mpi/ftn-custom/zmpiaijf.c b/src/mat/impls/aij/mpi/ftn-custom/zmpiaijf.c index 62c20dc5ef4..896d09c1e33 100644 --- a/src/mat/impls/aij/mpi/ftn-custom/zmpiaijf.c +++ b/src/mat/impls/aij/mpi/ftn-custom/zmpiaijf.c @@ -2,24 +2,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matmpiaijgetseqaij_ MATMPIAIJGETSEQAIJ - #define matcreateaij_ MATCREATEAIJ - #define matmpiaijsetpreallocation_ MATMPIAIJSETPREALLOCATION - #define matxaijsetpreallocation_ MATXAIJSETPREALLOCATION - #define matcreatempiaijwithsplitarrays_ MATCREATEMPIAIJWITHSPLITARRAYS + #define matmpiaijgetseqaij_ MATMPIAIJGETSEQAIJ #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matmpiaijgetseqaij_ matmpiaijgetseqaij - #define matcreateaij_ matcreateaij - #define matmpiaijsetpreallocation_ matmpiaijsetpreallocation - #define matxaijsetpreallocation_ matxaijsetpreallocation - #define matcreatempiaijwithsplitarrays_ matcreatempiaijwithsplitarrays + #define matmpiaijgetseqaij_ matmpiaijgetseqaij #endif -PETSC_EXTERN void matcreatempiaijwithsplitarrays_(MPI_Comm *comm, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N, PetscInt i[], PetscInt j[], PetscScalar a[], PetscInt oi[], PetscInt oj[], PetscScalar oa[], Mat *mat, int *ierr) -{ - *ierr = MatCreateMPIAIJWithSplitArrays(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *m, *n, *M, *N, i, j, a, oi, oj, oa, mat); -} - PETSC_EXTERN void matmpiaijgetseqaij_(Mat *A, Mat *Ad, Mat *Ao, PetscInt *ic, size_t *iic, PetscErrorCode *ierr) { const PetscInt *i; @@ -27,27 +14,3 @@ PETSC_EXTERN void matmpiaijgetseqaij_(Mat *A, Mat *Ad, Mat *Ao, PetscInt *ic, si if (*ierr) return; *iic = PetscIntAddressToFortran(ic, (PetscInt *)i); } - -PETSC_EXTERN void matcreateaij_(MPI_Comm *comm, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N, PetscInt *d_nz, PetscInt *d_nnz, PetscInt *o_nz, PetscInt *o_nnz, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(d_nnz); - CHKFORTRANNULLINTEGER(o_nnz); - - *ierr = MatCreateAIJ(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *m, *n, *M, *N, *d_nz, d_nnz, *o_nz, o_nnz, newmat); -} - -PETSC_EXTERN void matmpiaijsetpreallocation_(Mat *mat, PetscInt *d_nz, PetscInt *d_nnz, PetscInt *o_nz, PetscInt *o_nnz, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(d_nnz); - CHKFORTRANNULLINTEGER(o_nnz); - *ierr = MatMPIAIJSetPreallocation(*mat, *d_nz, d_nnz, *o_nz, o_nnz); -} - -PETSC_EXTERN void matxaijsetpreallocation_(Mat *A, PetscInt *bs, PetscInt dnnz[], PetscInt onnz[], PetscInt dnnzu[], PetscInt onnzu[], PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(dnnz); - CHKFORTRANNULLINTEGER(onnz); - CHKFORTRANNULLINTEGER(dnnzu); - CHKFORTRANNULLINTEGER(onnzu); - *ierr = MatXAIJSetPreallocation(*A, *bs, dnnz, onnz, dnnzu, onnzu); -} diff --git a/src/mat/impls/aij/mpi/mkl_cpardiso/mkl_cpardiso.c b/src/mat/impls/aij/mpi/mkl_cpardiso/mkl_cpardiso.c index cd5bfb87bd8..de6b4ac0531 100644 --- a/src/mat/impls/aij/mpi/mkl_cpardiso/mkl_cpardiso.c +++ b/src/mat/impls/aij/mpi/mkl_cpardiso/mkl_cpardiso.c @@ -68,6 +68,8 @@ static const char *Err_MSG_CPardiso(int errNo) } } +#define PetscCallCluster(f) PetscStackCallExternalVoid("cluster_sparse_solver", f); + /* * Internal data structure. * For more information check mkl_cpardiso manual. @@ -354,10 +356,9 @@ static PetscErrorCode MatDestroy_MKL_CPARDISO(Mat A) if (mat_mkl_cpardiso->CleanUp) { mat_mkl_cpardiso->phase = JOB_RELEASE_OF_ALL_MEMORY; - cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, NULL, NULL, NULL, mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, - mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err); + PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, NULL, NULL, NULL, mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, + mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err)); } - if (mat_mkl_cpardiso->ConvertToTriples != MatCopy_seqaij_seqaij_MKL_CPARDISO) PetscCall(PetscFree3(mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, mat_mkl_cpardiso->a)); comm = MPI_Comm_f2c(mat_mkl_cpardiso->comm_mkl_cpardiso); PetscCallMPI(MPI_Comm_free(&comm)); @@ -385,9 +386,8 @@ static PetscErrorCode MatSolve_MKL_CPARDISO(Mat A, Vec b, Vec x) /* solve phase */ mat_mkl_cpardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT; - cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, - mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err); - + PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, + mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err)); PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err)); PetscCall(VecRestoreArray(x, &xarray)); @@ -409,9 +409,8 @@ static PetscErrorCode MatForwardSolve_MKL_CPARDISO(Mat A, Vec b, Vec x) /* solve phase */ mat_mkl_cpardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION; - cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, - mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err); - + PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, + mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err)); PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err)); PetscCall(VecRestoreArray(x, &xarray)); @@ -433,9 +432,8 @@ static PetscErrorCode MatBackwardSolve_MKL_CPARDISO(Mat A, Vec b, Vec x) /* solve phase */ mat_mkl_cpardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION; - cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, - mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err); - + PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, + mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err)); PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err)); PetscCall(VecRestoreArray(x, &xarray)); @@ -476,8 +474,8 @@ static PetscErrorCode MatMatSolve_MKL_CPARDISO(Mat A, Mat B, Mat X) /* solve phase */ mat_mkl_cpardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT; - cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, - mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err); + PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, + mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err)); PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err)); PetscCall(MatDenseRestoreArrayRead(B, &barray)); PetscCall(MatDenseRestoreArray(X, &xarray)); @@ -498,8 +496,8 @@ static PetscErrorCode MatFactorNumeric_MKL_CPARDISO(Mat F, Mat A, const MatFacto PetscCall((*mat_mkl_cpardiso->ConvertToTriples)(A, MAT_REUSE_MATRIX, &mat_mkl_cpardiso->nz, &mat_mkl_cpardiso->ia, &mat_mkl_cpardiso->ja, &mat_mkl_cpardiso->a)); mat_mkl_cpardiso->phase = JOB_NUMERICAL_FACTORIZATION; - cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, - mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, &mat_mkl_cpardiso->err); + PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, + mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, &mat_mkl_cpardiso->err)); PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\". Please check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err)); mat_mkl_cpardiso->matstruc = SAME_NONZERO_PATTERN; @@ -679,9 +677,8 @@ static PetscErrorCode MatLUFactorSymbolic_AIJMKL_CPARDISO(Mat F, Mat A, IS r, IS /* analysis phase */ mat_mkl_cpardiso->phase = JOB_ANALYSIS; - cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, - mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err); - + PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, + mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err)); PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\".Check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err)); mat_mkl_cpardiso->CleanUp = PETSC_TRUE; @@ -714,9 +711,8 @@ static PetscErrorCode MatCholeskyFactorSymbolic_AIJMKL_CPARDISO(Mat F, Mat A, IS /* analysis phase */ mat_mkl_cpardiso->phase = JOB_ANALYSIS; - cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, - mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err); - + PetscCallCluster(cluster_sparse_solver(mat_mkl_cpardiso->pt, &mat_mkl_cpardiso->maxfct, &mat_mkl_cpardiso->mnum, &mat_mkl_cpardiso->mtype, &mat_mkl_cpardiso->phase, &mat_mkl_cpardiso->n, mat_mkl_cpardiso->a, mat_mkl_cpardiso->ia, mat_mkl_cpardiso->ja, + mat_mkl_cpardiso->perm, &mat_mkl_cpardiso->nrhs, mat_mkl_cpardiso->iparm, &mat_mkl_cpardiso->msglvl, NULL, NULL, &mat_mkl_cpardiso->comm_mkl_cpardiso, (PetscInt *)&mat_mkl_cpardiso->err)); PetscCheck(mat_mkl_cpardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL Cluster PARDISO: err=%d, msg = \"%s\".Check manual", mat_mkl_cpardiso->err, Err_MSG_CPardiso(mat_mkl_cpardiso->err)); mat_mkl_cpardiso->CleanUp = PETSC_TRUE; @@ -925,7 +921,7 @@ static PetscErrorCode MatGetFactor_mpiaij_mkl_cpardiso(Mat A, MatFactorType ftyp PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_CPardiso(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_MKL_CPardiso(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERMKL_CPARDISO, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_mpiaij_mkl_cpardiso)); diff --git a/src/mat/impls/aij/mpi/mpiaij.c b/src/mat/impls/aij/mpi/mpiaij.c index 5d659a0c7b2..1cfa49bea55 100644 --- a/src/mat/impls/aij/mpi/mpiaij.c +++ b/src/mat/impls/aij/mpi/mpiaij.c @@ -418,7 +418,6 @@ PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat mat) PetscCall(PetscArraymove(ap1 + _i + 1, ap1 + _i, N - _i + 1)); \ rp1[_i] = col; \ ap1[_i] = value; \ - A->nonzerostate++; \ a_noinsert:; \ ailen[row] = nrow1; \ } while (0) @@ -463,7 +462,6 @@ PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat mat) PetscCall(PetscArraymove(ap2 + _i + 1, ap2 + _i, N - _i + 1)); \ rp2[_i] = col; \ ap2[_i] = value; \ - B->nonzerostate++; \ b_noinsert:; \ bilen[row] = nrow2; \ } while (0) @@ -580,8 +578,8 @@ PetscErrorCode MatSetValues_MPIAIJ(Mat mat, PetscInt m, const PetscInt im[], Pet bilen = b->ilen; bj = b->j; ba = b->a; - rp2 = bj + bi[row]; - ap2 = ba + bi[row]; + rp2 = PetscSafePointerPlusOffset(bj, bi[row]); + ap2 = PetscSafePointerPlusOffset(ba, bi[row]); rmax2 = bimax[row]; nrow2 = bilen[row]; low2 = 0; @@ -2155,7 +2153,7 @@ static PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) const PetscScalar *vb; PetscFunctionBegin; - PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &vA)); + PetscCall(MatCreateVecs(a->A, NULL, &vA)); PetscCall(MatGetRowMaxAbs(a->A, vA, idx)); PetscCall(VecGetArrayWrite(vA, &va)); @@ -2165,7 +2163,7 @@ static PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) } } - PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &vB)); + PetscCall(MatCreateVecs(a->B, NULL, &vB)); PetscCall(PetscMalloc1(m, &idxb)); PetscCall(MatGetRowMaxAbs(a->B, vB, idxb)); @@ -2192,13 +2190,12 @@ static PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) static PetscErrorCode MatGetRowSumAbs_MPIAIJ(Mat A, Vec v) { Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; - PetscInt m = A->rmap->n; Vec vB, vA; PetscFunctionBegin; - PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &vA)); + PetscCall(MatCreateVecs(a->A, NULL, &vA)); PetscCall(MatGetRowSumAbs(a->A, vA)); - PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &vB)); + PetscCall(MatCreateVecs(a->B, NULL, &vB)); PetscCall(MatGetRowSumAbs(a->B, vB)); PetscCall(VecAXPY(vA, 1.0, vB)); PetscCall(VecDestroy(&vB)); @@ -2865,7 +2862,8 @@ static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ, NULL, /*150*/ NULL, MatEliminateZeros_MPIAIJ, - MatGetRowSumAbs_MPIAIJ}; + MatGetRowSumAbs_MPIAIJ, + NULL}; static PetscErrorCode MatStoreValues_MPIAIJ(Mat mat) { @@ -3011,7 +3009,10 @@ PetscErrorCode MatDuplicate_MPIAIJ(Mat matin, MatDuplicateOption cpvalues, Mat * In fact, MatDuplicate only requires the matrix to be preallocated This may happen inside a DMCreateMatrix_Shell */ if (oldmat->lvec) PetscCall(VecDuplicate(oldmat->lvec, &a->lvec)); - if (oldmat->Mvctx) PetscCall(VecScatterCopy(oldmat->Mvctx, &a->Mvctx)); + if (oldmat->Mvctx) { + a->Mvctx = oldmat->Mvctx; + PetscCall(PetscObjectReference((PetscObject)oldmat->Mvctx)); + } PetscCall(MatDuplicate(oldmat->A, cpvalues, &a->A)); PetscCall(MatDuplicate(oldmat->B, cpvalues, &a->B)); } @@ -4011,7 +4012,7 @@ PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat B, const PetscInt i[], const Pet PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in `MATMPIAIJ` format (the default parallel PETSc format). For good matrix assembly performance the user should preallocate the matrix storage by setting the parameters @@ -4171,8 +4172,8 @@ PetscErrorCode MatMPIAIJSetPreallocation(Mat B, PetscInt d_nz, const PetscInt d_ Notes: The `i`, `j`, and `a` arrays ARE copied by this routine into the internal format used by PETSc; - thus you CANNOT change the matrix entries by changing the values of a[] after you have - called this routine. Use `MatCreateMPIAIJWithSplitArray()` to avoid needing to copy the arrays. + thus you CANNOT change the matrix entries by changing the values of `a[]` after you have + called this routine. Use `MatCreateMPIAIJWithSplitArrays()` to avoid needing to copy the arrays. The `i` and `j` indices are 0 based, and `i` indices are indices corresponding to the local `j` array. @@ -4182,7 +4183,7 @@ PetscErrorCode MatMPIAIJSetPreallocation(Mat B, PetscInt d_nz, const PetscInt d_ `MatUpdateMPIAIJWithArrays()`, the column indices **must** be sorted. The format which is used for the sparse matrix input, is equivalent to a - row-major ordering.. i.e for the following matrix, the input data expected is + row-major ordering, i.e., for the following matrix, the input data expected is as shown .vb 1 0 0 @@ -4201,7 +4202,7 @@ PetscErrorCode MatMPIAIJSetPreallocation(Mat B, PetscInt d_nz, const PetscInt d_ v = {4,5,6} [size = 3] .ve -.seealso: [](ch_matrices), `Mat`, `MATMPIAIK`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIAIJSetPreallocation()`, `MatMPIAIJSetPreallocationCSR()`, +.seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIAIJSetPreallocation()`, `MatMPIAIJSetPreallocationCSR()`, `MATMPIAIJ`, `MatCreateAIJ()`, `MatCreateMPIAIJWithSplitArrays()`, `MatUpdateMPIAIJWithArray()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()` @*/ PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt M, PetscInt N, const PetscInt i[], const PetscInt j[], const PetscScalar a[], Mat *mat) @@ -4356,7 +4357,7 @@ PetscErrorCode MatUpdateMPIAIJWithArray(Mat mat, const PetscScalar v[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateAIJ - Creates a sparse parallel matrix in `MATAIJ` format (the default parallel PETSc format). For good matrix assembly performance the user should preallocate the matrix storage by setting the parameters @@ -4367,26 +4368,26 @@ PetscErrorCode MatUpdateMPIAIJWithArray(Mat mat, const PetscScalar v[]) Input Parameters: + comm - MPI communicator . m - number of local rows (or `PETSC_DECIDE` to have calculated if M is given) - This value should be the same as the local size used in creating the - y vector for the matrix-vector product y = Ax. + This value should be the same as the local size used in creating the + y vector for the matrix-vector product y = Ax. . n - This value should be the same as the local size used in creating the - x vector for the matrix-vector product y = Ax. (or `PETSC_DECIDE` to have - calculated if N is given) For square matrices n is almost always m. + x vector for the matrix-vector product y = Ax. (or `PETSC_DECIDE` to have + calculated if N is given) For square matrices n is almost always m. . M - number of global rows (or `PETSC_DETERMINE` to have calculated if m is given) . N - number of global columns (or `PETSC_DETERMINE` to have calculated if n is given) . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix - (same value is used for all local rows) + (same value is used for all local rows) . d_nnz - array containing the number of nonzeros in the various rows of the - DIAGONAL portion of the local submatrix (possibly different for each row) - or `NULL`, if `d_nz` is used to specify the nonzero structure. - The size of this array is equal to the number of local rows, i.e 'm'. + DIAGONAL portion of the local submatrix (possibly different for each row) + or `NULL`, if `d_nz` is used to specify the nonzero structure. + The size of this array is equal to the number of local rows, i.e 'm'. . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local - submatrix (same value is used for all local rows). + submatrix (same value is used for all local rows). - o_nnz - array containing the number of nonzeros in the various rows of the - OFF-DIAGONAL portion of the local submatrix (possibly different for - each row) or `NULL`, if `o_nz` is used to specify the nonzero - structure. The size of this array is equal to the number - of local rows, i.e 'm'. + OFF-DIAGONAL portion of the local submatrix (possibly different for + each row) or `NULL`, if `o_nz` is used to specify the nonzero + structure. The size of this array is equal to the number + of local rows, i.e 'm'. Output Parameter: . A - the matrix @@ -4395,8 +4396,8 @@ PetscErrorCode MatUpdateMPIAIJWithArray(Mat mat, const PetscScalar v[]) + -mat_no_inode - Do not use inodes . -mat_inode_limit - Sets inode limit (max limit=5) - -matmult_vecscatter_view - View the vecscatter (i.e., communication pattern) used in `MatMult()` of sparse parallel matrices. - See viewer types in manual of `MatView()`. Of them, ascii_matlab, draw or binary cause the vecscatter be viewed as a matrix. - Entry (i,j) is the size of message (in bytes) rank i sends to rank j in one `MatMult()` call. + See viewer types in manual of `MatView()`. Of them, ascii_matlab, draw or binary cause the `VecScatter` + to be viewed as a matrix. Entry (i,j) is the size of message (in bytes) rank i sends to rank j in one `MatMult()` call. Level: intermediate @@ -4415,13 +4416,16 @@ PetscErrorCode MatUpdateMPIAIJWithArray(Mat mat, const PetscScalar v[]) processor than it must be used on all processors that share the object for that argument. + If `m` and `n` are not `PETSC_DECIDE`, then the values determine the `PetscLayout` of the matrix and the ranges returned by + `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`. + The user MUST specify either the local or global matrix dimensions (possibly both). The parallel matrix is partitioned across processors such that the - first m0 rows belong to process 0, the next m1 rows belong to - process 1, the next m2 rows belong to process 2 etc.. where - m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores + first `m0` rows belong to process 0, the next `m1` rows belong to + process 1, the next `m2` rows belong to process 2, etc., where + `m0`, `m1`, `m2`... are the input parameter `m` on each MPI process. I.e., each MPI process stores values corresponding to [m x N] submatrix. The columns are logically partitioned with the n0 columns belonging @@ -4526,7 +4530,8 @@ PetscErrorCode MatUpdateMPIAIJWithArray(Mat mat, const PetscScalar v[]) hence pre-allocation is perfect. .seealso: [](ch_matrices), `Mat`, [Sparse Matrix Creation](sec_matsparse), `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIAIJSetPreallocation()`, `MatMPIAIJSetPreallocationCSR()`, - `MATMPIAIJ`, `MatCreateMPIAIJWithArrays()` + `MATMPIAIJ`, `MatCreateMPIAIJWithArrays()`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, + `MatGetOwnershipRangesColumn()`, `PetscLayout` @*/ PetscErrorCode MatCreateAIJ(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A) { @@ -5064,7 +5069,7 @@ PetscErrorCode MatCreateMPIAIJSumSeqAIJSymbolic(MPI_Comm comm, Mat seqmat, Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateMPIAIJSumSeqAIJ - Creates a `MATMPIAIJ` matrix by adding sequential matrices from each processor @@ -5085,7 +5090,7 @@ PetscErrorCode MatCreateMPIAIJSumSeqAIJSymbolic(MPI_Comm comm, Mat seqmat, Petsc Note: The dimensions of the sequential matrix in each processor MUST be the same. The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be - destroyed when mpimat is destroyed. Call `PetscObjectQuery()` to access seqmat. + destroyed when `mpimat` is destroyed. Call `PetscObjectQuery()` to access `seqmat`. .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()` @*/ @@ -6548,10 +6553,14 @@ PetscErrorCode MatSetPreallocationCOO_MPIAIJ(Mat mat, PetscCount coo_n, PetscInt PetscInt n2 = nroots, *i2, *j2; /* Buffers for received COOs from other ranks, along with a permutation array */ PetscCount *perm2; /* Though PetscInt is enough for remote entries, we use PetscCount here as we want to reuse MatSplitEntries_Internal() */ PetscCall(PetscMalloc3(n2, &i2, n2, &j2, n2, &perm2)); - PetscCall(PetscSFReduceWithMemTypeBegin(sf2, MPIU_INT, PETSC_MEMTYPE_HOST, i1 + rem, PETSC_MEMTYPE_HOST, i2, MPI_REPLACE)); - PetscCall(PetscSFReduceEnd(sf2, MPIU_INT, i1 + rem, i2, MPI_REPLACE)); - PetscCall(PetscSFReduceWithMemTypeBegin(sf2, MPIU_INT, PETSC_MEMTYPE_HOST, j1 + rem, PETSC_MEMTYPE_HOST, j2, MPI_REPLACE)); - PetscCall(PetscSFReduceEnd(sf2, MPIU_INT, j1 + rem, j2, MPI_REPLACE)); + PetscAssert(rem == 0 || i1 != NULL, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cannot add nonzero offset to null"); + PetscAssert(rem == 0 || j1 != NULL, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cannot add nonzero offset to null"); + PetscInt *i1prem = i1 ? i1 + rem : NULL; /* silence ubsan warnings about pointer arithmetic on null pointer */ + PetscInt *j1prem = j1 ? j1 + rem : NULL; + PetscCall(PetscSFReduceWithMemTypeBegin(sf2, MPIU_INT, PETSC_MEMTYPE_HOST, i1prem, PETSC_MEMTYPE_HOST, i2, MPI_REPLACE)); + PetscCall(PetscSFReduceEnd(sf2, MPIU_INT, i1prem, i2, MPI_REPLACE)); + PetscCall(PetscSFReduceWithMemTypeBegin(sf2, MPIU_INT, PETSC_MEMTYPE_HOST, j1prem, PETSC_MEMTYPE_HOST, j2, MPI_REPLACE)); + PetscCall(PetscSFReduceEnd(sf2, MPIU_INT, j1prem, j2, MPI_REPLACE)); PetscCall(PetscFree(offsets)); PetscCall(PetscFree2(sendto, nentries)); @@ -6562,8 +6571,10 @@ PetscErrorCode MatSetPreallocationCOO_MPIAIJ(Mat mat, PetscCount coo_n, PetscInt /* sf2 only sends contiguous leafdata to contiguous rootdata. We record the permutation which will be used to fill leafdata */ PetscCount *Cperm1; + PetscAssert(rem == 0 || perm1 != NULL, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cannot add nonzero offset to null"); + PetscCount *perm1prem = perm1 ? perm1 + rem : NULL; PetscCall(PetscMalloc1(nleaves, &Cperm1)); - PetscCall(PetscArraycpy(Cperm1, perm1 + rem, nleaves)); + PetscCall(PetscArraycpy(Cperm1, perm1prem, nleaves)); /* Support for HYPRE matrices, kind of a hack. Swap min column with diagonal so that diagonal values will go first */ @@ -6921,7 +6932,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat B) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateMPIAIJWithSplitArrays - creates a `MATMPIAIJ` matrix using arrays that contain the "diagonal" and "off-diagonal" part of the matrix in CSR format. @@ -6948,7 +6959,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat B) Level: advanced Notes: - The `i`, `j`, and `a` arrays ARE NOT copied by this routine into the internal format used by PETSc. The user + The `i`, `j`, and `a` arrays ARE NOT copied by this routine into the internal format used by PETSc (even in Fortran). The user must free the arrays once the matrix has been destroyed and not before. The `i` and `j` indices are 0 based @@ -7857,7 +7868,7 @@ PETSC_INTERN PetscErrorCode MatCreateGraph_Simple_AIJ(Mat Amat, PetscBool symmet AA[k / bs] = val; } grow = Istart / bs + brow / bs; - PetscCall(MatSetValues(Gmat, 1, &grow, n / bs, AJ, AA, INSERT_VALUES)); + PetscCall(MatSetValues(Gmat, 1, &grow, n / bs, AJ, AA, ADD_VALUES)); } // off-diag if (ismpiaij) { @@ -7899,7 +7910,7 @@ PETSC_INTERN PetscErrorCode MatCreateGraph_Simple_AIJ(Mat Amat, PetscBool symmet } } grow = Istart / bs + brow / bs; - PetscCall(MatSetValues(Gmat, 1, &grow, nc, AJ, AA, INSERT_VALUES)); + PetscCall(MatSetValues(Gmat, 1, &grow, nc, AJ, AA, ADD_VALUES)); } } PetscCall(MatAssemblyBegin(Gmat, MAT_FINAL_ASSEMBLY)); @@ -8028,7 +8039,6 @@ PETSC_INTERN PetscErrorCode MatCreateGraph_Simple_AIJ(Mat Amat, PetscBool symmet /* Special version for direct calls from Fortran */ -#include /* Change these macros so can be used in void function */ /* Identical to PetscCallVoid, except it assigns to *_ierr */ diff --git a/src/mat/impls/aij/mpi/mumps/mumps.c b/src/mat/impls/aij/mpi/mumps/mumps.c index a6e24b1f4e6..753de93f0c3 100644 --- a/src/mat/impls/aij/mpi/mumps/mumps.c +++ b/src/mat/impls/aij/mpi/mumps/mumps.c @@ -2225,7 +2225,7 @@ static PetscErrorCode MatSetFromOptions_MUMPS(Mat F, Mat A) */ #if PETSC_PKG_MUMPS_VERSION_GE(5, 6, 2) && defined(PETSC_HAVE_MUMPS_AVOID_MPI_IN_PLACE) mumps->ICNTL20 = 10; -#elif PETSC_PKG_MUMPS_VERSION_LT(5, 3, 0) || (defined(PETSC_HAVE_MPICH_NUMVERSION) && (PETSC_HAVE_MPICH_NUMVERSION < 40000101)) +#elif PETSC_PKG_MUMPS_VERSION_LT(5, 3, 0) || (defined(PETSC_HAVE_MPICH) && (MPICH_NUMVERSION < 40000101)) mumps->ICNTL20 = 0; /* Centralized dense RHS*/ #else mumps->ICNTL20 = 10; /* Distributed dense RHS*/ @@ -2245,7 +2245,7 @@ static PetscErrorCode MatSetFromOptions_MUMPS(Mat F, Mat A) PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_25", "ICNTL(25): computes a solution of a deficient matrix and a null space basis", "None", mumps->id.ICNTL(25), &mumps->id.ICNTL(25), NULL)); PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_26", "ICNTL(26): drives the solution phase if a Schur complement matrix", "None", mumps->id.ICNTL(26), &mumps->id.ICNTL(26), NULL)); PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_27", "ICNTL(27): controls the blocking size for multiple right-hand sides", "None", mumps->id.ICNTL(27), &mumps->id.ICNTL(27), NULL)); - PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_28", "ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering", "None", mumps->id.ICNTL(28), &mumps->id.ICNTL(28), NULL)); + PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_28", "ICNTL(28): use 1 for sequential analysis and ICNTL(7) ordering, or 2 for parallel analysis and ICNTL(29) ordering", "None", mumps->id.ICNTL(28), &mumps->id.ICNTL(28), NULL)); PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_29", "ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis", "None", mumps->id.ICNTL(29), &mumps->id.ICNTL(29), NULL)); /* PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL)); */ /* call MatMumpsGetInverse() directly */ PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_31", "ICNTL(31): indicates which factors may be discarded during factorization", "None", mumps->id.ICNTL(31), &mumps->id.ICNTL(31), NULL)); @@ -2254,6 +2254,7 @@ static PetscErrorCode MatSetFromOptions_MUMPS(Mat F, Mat A) PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_35", "ICNTL(35): activates Block Low Rank (BLR) based factorization", "None", mumps->id.ICNTL(35), &mumps->id.ICNTL(35), NULL)); PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_36", "ICNTL(36): choice of BLR factorization variant", "None", mumps->id.ICNTL(36), &mumps->id.ICNTL(36), NULL)); PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_38", "ICNTL(38): estimated compression rate of LU factors with BLR", "None", mumps->id.ICNTL(38), &mumps->id.ICNTL(38), NULL)); + PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_48", "ICNTL(48): multithreading with tree parallelism", "None", mumps->id.ICNTL(48), &mumps->id.ICNTL(48), NULL)); PetscCall(PetscOptionsMUMPSInt("-mat_mumps_icntl_58", "ICNTL(58): defines options for symbolic factorization", "None", mumps->id.ICNTL(58), &mumps->id.ICNTL(58), NULL)); PetscCall(PetscOptionsReal("-mat_mumps_cntl_1", "CNTL(1): relative pivoting threshold", "None", mumps->id.CNTL(1), &mumps->id.CNTL(1), NULL)); @@ -2546,6 +2547,7 @@ static PetscErrorCode MatView_MUMPS(Mat A, PetscViewer viewer) PetscCall(PetscViewerASCIIPrintf(viewer, " ICNTL(35) (activate BLR based factorization): %d\n", mumps->id.ICNTL(35))); PetscCall(PetscViewerASCIIPrintf(viewer, " ICNTL(36) (choice of BLR factorization variant): %d\n", mumps->id.ICNTL(36))); PetscCall(PetscViewerASCIIPrintf(viewer, " ICNTL(38) (estimated compression rate of LU factors): %d\n", mumps->id.ICNTL(38))); + PetscCall(PetscViewerASCIIPrintf(viewer, " ICNTL(48) (multithreading with tree parallelism): %d\n", mumps->id.ICNTL(48))); PetscCall(PetscViewerASCIIPrintf(viewer, " ICNTL(58) (options for symbolic factorization): %d\n", mumps->id.ICNTL(58))); PetscCall(PetscViewerASCIIPrintf(viewer, " CNTL(1) (relative pivoting threshold): %g\n", (double)mumps->id.CNTL(1))); @@ -2806,7 +2808,7 @@ PetscErrorCode MatMumpsSetIcntl(Mat F, PetscInt icntl, PetscInt ival) PetscCheck(F->factortype, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix"); PetscValidLogicalCollectiveInt(F, icntl, 2); PetscValidLogicalCollectiveInt(F, ival, 3); - PetscCheck((icntl >= 1 && icntl <= 38) || icntl == 58, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_WRONG, "Unsupported ICNTL value %" PetscInt_FMT, icntl); + PetscCheck((icntl >= 1 && icntl <= 38) || icntl == 48 || icntl == 58, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_WRONG, "Unsupported ICNTL value %" PetscInt_FMT, icntl); PetscTryMethod(F, "MatMumpsSetIcntl_C", (Mat, PetscInt, PetscInt), (F, icntl, ival)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -2834,7 +2836,7 @@ PetscErrorCode MatMumpsGetIcntl(Mat F, PetscInt icntl, PetscInt *ival) PetscCheck(F->factortype, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix"); PetscValidLogicalCollectiveInt(F, icntl, 2); PetscAssertPointer(ival, 3); - PetscCheck((icntl >= 1 && icntl <= 38) || icntl == 58, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_WRONG, "Unsupported ICNTL value %" PetscInt_FMT, icntl); + PetscCheck((icntl >= 1 && icntl <= 38) || icntl == 48 || icntl == 58, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_WRONG, "Unsupported ICNTL value %" PetscInt_FMT, icntl); PetscUseMethod(F, "MatMumpsGetIcntl_C", (Mat, PetscInt, PetscInt *), (F, icntl, ival)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -3252,8 +3254,8 @@ PetscErrorCode MatMumpsGetNullPivots(Mat F, PetscInt *size, PetscInt **array) Options Database Keys: + -mat_mumps_icntl_1 - ICNTL(1): output stream for error messages . -mat_mumps_icntl_2 - ICNTL(2): output stream for diagnostic printing, statistics, and warning -. -mat_mumps_icntl_3 - ICNTL(3): output stream for global information, collected on the host -. -mat_mumps_icntl_4 - ICNTL(4): level of printing (0 to 4) +. -mat_mumps_icntl_3 - ICNTL(3): output stream for global information, collected on the host +. -mat_mumps_icntl_4 - ICNTL(4): level of printing (0 to 4) . -mat_mumps_icntl_6 - ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7) . -mat_mumps_icntl_7 - ICNTL(7): computes a symmetric permutation in sequential analysis, 0=AMD, 2=AMF, 3=Scotch, 4=PORD, 5=Metis, 6=QAMD, and 7=auto Use -pc_factor_mat_ordering_type to have PETSc perform the ordering (sequential only) @@ -3271,7 +3273,7 @@ PetscErrorCode MatMumpsGetNullPivots(Mat F, PetscInt *size, PetscInt **array) . -mat_mumps_icntl_24 - ICNTL(24): detection of null pivot rows (0 or 1) . -mat_mumps_icntl_25 - ICNTL(25): compute a solution of a deficient matrix and a null space basis . -mat_mumps_icntl_26 - ICNTL(26): drives the solution phase if a Schur complement matrix -. -mat_mumps_icntl_28 - ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering +. -mat_mumps_icntl_28 - ICNTL(28): use 1 for sequential analysis and ICNTL(7) ordering, or 2 for parallel analysis and ICNTL(29) ordering . -mat_mumps_icntl_29 - ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis . -mat_mumps_icntl_30 - ICNTL(30): compute user-specified set of entries in inv(A) . -mat_mumps_icntl_31 - ICNTL(31): indicates which factors may be discarded during factorization @@ -3279,9 +3281,10 @@ PetscErrorCode MatMumpsGetNullPivots(Mat F, PetscInt *size, PetscInt **array) . -mat_mumps_icntl_35 - ICNTL(35): level of activation of BLR (Block Low-Rank) feature . -mat_mumps_icntl_36 - ICNTL(36): controls the choice of BLR factorization variant . -mat_mumps_icntl_38 - ICNTL(38): sets the estimated compression rate of LU factors with BLR +. -mat_mumps_icntl_48 - ICNTL(48): multithreading with tree parallelism . -mat_mumps_icntl_58 - ICNTL(58): options for symbolic factorization . -mat_mumps_cntl_1 - CNTL(1): relative pivoting threshold -. -mat_mumps_cntl_2 - CNTL(2): stopping criterion of refinement +. -mat_mumps_cntl_2 - CNTL(2): stopping criterion of refinement . -mat_mumps_cntl_3 - CNTL(3): absolute pivoting threshold . -mat_mumps_cntl_4 - CNTL(4): value for static pivoting . -mat_mumps_cntl_5 - CNTL(5): fixation for null pivots @@ -3802,7 +3805,7 @@ static PetscErrorCode MatGetFactor_nest_mumps(Mat A, MatFactorType ftype, Mat *F PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MUMPS(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_MUMPS(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERMUMPS, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_mumps)); diff --git a/src/mat/impls/aij/mpi/pastix/pastix.c b/src/mat/impls/aij/mpi/pastix/pastix.c index 7a5e1efcb40..020af3a5dfe 100644 --- a/src/mat/impls/aij/mpi/pastix/pastix.c +++ b/src/mat/impls/aij/mpi/pastix/pastix.c @@ -616,7 +616,7 @@ static PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A, MatFactorType ftype, M PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Pastix(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Pastix(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_mpiaij_pastix)); diff --git a/src/mat/impls/aij/mpi/strumpack/strumpack.c b/src/mat/impls/aij/mpi/strumpack/strumpack.c index 23d46824d73..d00d438c700 100644 --- a/src/mat/impls/aij/mpi/strumpack/strumpack.c +++ b/src/mat/impls/aij/mpi/strumpack/strumpack.c @@ -1237,7 +1237,7 @@ static PetscErrorCode MatGetFactor_aij_strumpack(Mat A, MatFactorType ftype, Mat PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack)); diff --git a/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c b/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c index 7b5e24bdf28..999eafbdbc7 100644 --- a/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c +++ b/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c @@ -26,7 +26,7 @@ EXTERN_C_BEGIN #define allocateA_dist zallocateA_dist #define Create_CompRowLoc_Matrix_dist zCreate_CompRowLoc_Matrix_dist #define SLU SLU_Z - #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) + #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) #define DeAllocLlu_3d zDeAllocLlu_3d #define DeAllocGlu_3d zDeAllocGlu_3d #define Destroy_A3d_gathered_on_2d zDestroy_A3d_gathered_on_2d @@ -50,7 +50,7 @@ EXTERN_C_BEGIN #define allocateA_dist sallocateA_dist #define Create_CompRowLoc_Matrix_dist sCreate_CompRowLoc_Matrix_dist #define SLU SLU_S - #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) + #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) #define DeAllocLlu_3d sDeAllocLlu_3d #define DeAllocGlu_3d sDeAllocGlu_3d #define Destroy_A3d_gathered_on_2d sDestroy_A3d_gathered_on_2d @@ -74,7 +74,7 @@ EXTERN_C_BEGIN #define allocateA_dist dallocateA_dist #define Create_CompRowLoc_Matrix_dist dCreate_CompRowLoc_Matrix_dist #define SLU SLU_D - #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) + #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) #define DeAllocLlu_3d dDeAllocLlu_3d #define DeAllocGlu_3d dDeAllocGlu_3d #define Destroy_A3d_gathered_on_2d dDestroy_A3d_gathered_on_2d @@ -90,7 +90,7 @@ PETSC_PRAGMA_DIAGNOSTIC_IGNORED_END() typedef struct { int_t nprow, npcol, *row, *col; gridinfo_t grid; -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) PetscBool use3d; int_t npdep; /* replication factor, must be power of two */ gridinfo3d_t grid3d; @@ -138,7 +138,7 @@ typedef struct { MPI_Comm comm; PetscBool busy; gridinfo_t grid; -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) PetscBool use3d; gridinfo3d_t grid3d; #endif @@ -152,7 +152,7 @@ PETSC_EXTERN PetscMPIInt MPIAPI Petsc_Superlu_dist_keyval_DeleteFn(MPI_Comm comm PetscFunctionBegin; if (keyval != Petsc_Superlu_dist_keyval) SETERRMPI(PETSC_COMM_SELF, PETSC_ERR_ARG_CORRUPT, "Unexpected keyval"); -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) if (context->use3d) { PetscStackCallExternalVoid("SuperLU_DIST:superlu_gridexit3d", superlu_gridexit3d(&context->grid3d)); } else @@ -200,31 +200,22 @@ static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) #endif PetscStackCallExternalVoid("SuperLU_DIST:SolveFinalize", SolveFinalize(&lu->options, &lu->SOLVEstruct)); } -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) if (lu->use3d) { - if (lu->grid3d.zscp.Iam == 0) { #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) - if (lu->singleprecision) PetscStackCallExternalVoid("SuperLU_DIST:Destroy_LU", sDestroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->sLUstruct)); - else - #endif - PetscStackCallExternalVoid("SuperLU_DIST:Destroy_LU", Destroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->LUstruct)); - } else { - #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) - if (lu->singleprecision) { - PetscStackCallExternalVoid("SuperLU_DIST:DeAllocLlu_3d", sDeAllocLlu_3d(lu->A_sup.ncol, &lu->sLUstruct, &lu->grid3d)); - PetscStackCallExternalVoid("SuperLU_DIST:DeAllocGlu_3d", sDeAllocGlu_3d(&lu->sLUstruct)); - } else - #endif - { - PetscStackCallExternalVoid("SuperLU_DIST:DeAllocLlu_3d", DeAllocLlu_3d(lu->A_sup.ncol, &lu->LUstruct, &lu->grid3d)); - PetscStackCallExternalVoid("SuperLU_DIST:DeAllocGlu_3d", DeAllocGlu_3d(&lu->LUstruct)); - } - } - #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) - if (lu->singleprecision) PetscStackCallExternalVoid("SuperLU_DIST:Destroy_A3d_gathered_on_2d", sDestroy_A3d_gathered_on_2d(&lu->sSOLVEstruct, &lu->grid3d)); - else + if (lu->singleprecision) { + PetscStackCallExternalVoid("SuperLU_DIST:Destroy_LU", sDestroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->sLUstruct)); + PetscStackCallExternalVoid("SuperLU_DIST:Destroy_A3d_gathered_on_2d", sDestroy_A3d_gathered_on_2d(&lu->sSOLVEstruct, &lu->grid3d)); + PetscStackCallExternalVoid("SuperLU_DIST:ScalePermstructFree", sScalePermstructFree(&lu->sScalePermstruct)); + PetscStackCallExternalVoid("SuperLU_DIST:LUstructFree", sLUstructFree(&lu->sLUstruct)); + } else #endif + { + PetscStackCallExternalVoid("SuperLU_DIST:Destroy_LU", Destroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->LUstruct)); PetscStackCallExternalVoid("SuperLU_DIST:Destroy_A3d_gathered_on_2d", Destroy_A3d_gathered_on_2d(&lu->SOLVEstruct, &lu->grid3d)); + PetscStackCallExternalVoid("SuperLU_DIST:ScalePermstructFree", ScalePermstructFree(&lu->ScalePermstruct)); + PetscStackCallExternalVoid("SuperLU_DIST:LUstructFree", LUstructFree(&lu->LUstruct)); + } } else #endif #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) @@ -241,7 +232,7 @@ static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) } /* Release the SuperLU_DIST process grid only if the matrix has its own copy, that is it is not in the communicator context */ if (lu->comm_superlu) { -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) if (lu->use3d) { PetscStackCallExternalVoid("SuperLU_DIST:superlu_gridexit3d", superlu_gridexit3d(&lu->grid3d)); } else @@ -315,26 +306,26 @@ static PetscErrorCode MatSolve_SuperLU_DIST(Mat A, Vec b_mpi, Vec x) #endif PetscStackCallExternalVoid("SuperLU_DIST:PStatInit", PStatInit(&stat)); /* Initialize the statistics variables. */ -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) && !PetscDefined(MISSING_GETLINE) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) if (lu->use3d) { #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) if (lu->singleprecision) PetscStackCallExternalVoid("SuperLU_DIST:pgssvx3d", psgssvx3d(&lu->options, &lu->A_sup, &lu->sScalePermstruct, lu->sbptr, m, 1, &lu->grid3d, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &info)); else #endif PetscStackCallExternalVoid("SuperLU_DIST:pgssvx3d", pgssvx3d(&lu->options, &lu->A_sup, &lu->ScalePermstruct, CASTDOUBLECOMPLEX bptr, m, 1, &lu->grid3d, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info)); + PetscCheck(!info, PETSC_COMM_SELF, PETSC_ERR_LIB, "pgssvx3d fails, info: %d", info); } else #endif + { #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) - if (lu->singleprecision) - PetscStackCallExternalVoid("SuperLU_DIST:pgssvx", psgssvx(&lu->options, &lu->A_sup, &lu->sScalePermstruct, lu->sbptr, m, 1, &lu->grid, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &info)); - else + if (lu->singleprecision) PetscStackCallExternalVoid("SuperLU_DIST:pgssvx", psgssvx(&lu->options, &lu->A_sup, &lu->sScalePermstruct, lu->sbptr, m, 1, &lu->grid, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &info)); + else #endif - PetscStackCallExternalVoid("SuperLU_DIST:pgssvx", pgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, CASTDOUBLECOMPLEX bptr, m, 1, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info)); - PetscCheck(!info, PETSC_COMM_SELF, PETSC_ERR_LIB, "pdgssvx fails, info: %d", info); - + PetscStackCallExternalVoid("SuperLU_DIST:pgssvx", pgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, CASTDOUBLECOMPLEX bptr, m, 1, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info)); + PetscCheck(!info, PETSC_COMM_SELF, PETSC_ERR_LIB, "pgssvx fails, info: %d", info); + } if (lu->options.PrintStat) PetscStackCallExternalVoid("SuperLU_DIST:PStatPrint", PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */ PetscStackCallExternalVoid("SuperLU_DIST:PStatFree", PStatFree(&stat)); - #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) if (lu->singleprecision) { PetscInt n; @@ -385,12 +376,11 @@ static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A, Mat B_mpi, Mat X) PetscStackCallExternalVoid("SuperLU_DIST:PStatInit", PStatInit(&stat)); /* Initialize the statistics variables. */ PetscCall(MatDenseGetArray(X, &bptr)); -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) && !PetscDefined(MISSING_GETLINE) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) if (lu->use3d) PetscStackCallExternalVoid("SuperLU_DIST:pgssvx3d", pgssvx3d(&lu->options, &lu->A_sup, &lu->ScalePermstruct, CASTDOUBLECOMPLEX bptr, m, nrhs, &lu->grid3d, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info)); else #endif PetscStackCallExternalVoid("SuperLU_DIST:pgssvx", pgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, CASTDOUBLECOMPLEX bptr, m, nrhs, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info)); - PetscCheck(!info, PETSC_COMM_SELF, PETSC_ERR_LIB, "pdgssvx fails, info: %d", info); PetscCall(MatDenseRestoreArray(X, &bptr)); @@ -488,30 +478,17 @@ static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F, Mat A, const MatFac if (lu->FactPattern == SamePattern_SameRowPerm) { lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ } else if (lu->FactPattern == SamePattern) { -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) if (lu->use3d) { - if (lu->grid3d.zscp.Iam == 0) { - #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) - if (lu->singleprecision) { - PetscStackCallExternalVoid("SuperLU_DIST:Destroy_LU", sDestroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->sLUstruct)); - PetscStackCallExternalVoid("SuperLU_DIST:SolveFinalize", sSolveFinalize(&lu->options, &lu->sSOLVEstruct)); - } else - #endif - { - PetscStackCallExternalVoid("SuperLU_DIST:Destroy_LU", Destroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->LUstruct)); - PetscStackCallExternalVoid("SuperLU_DIST:SolveFinalize", SolveFinalize(&lu->options, &lu->SOLVEstruct)); - } - } else { #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) - if (lu->singleprecision) { - PetscStackCallExternalVoid("SuperLU_DIST:DeAllocLlu_3d", sDeAllocLlu_3d(lu->A_sup.ncol, &lu->sLUstruct, &lu->grid3d)); - PetscStackCallExternalVoid("SuperLU_DIST:DeAllocGlu_3d", sDeAllocGlu_3d(&lu->sLUstruct)); - } else + if (lu->singleprecision) { + PetscStackCallExternalVoid("SuperLU_DIST:Destroy_LU", sDestroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->sLUstruct)); + PetscStackCallExternalVoid("SuperLU_DIST:SolveFinalize", sSolveFinalize(&lu->options, &lu->sSOLVEstruct)); + } else #endif - { - PetscStackCallExternalVoid("SuperLU_DIST:DeAllocLlu_3d", DeAllocLlu_3d(lu->A_sup.ncol, &lu->LUstruct, &lu->grid3d)); - PetscStackCallExternalVoid("SuperLU_DIST:DeAllocGlu_3d", DeAllocGlu_3d(&lu->LUstruct)); - } + { + PetscStackCallExternalVoid("SuperLU_DIST:Destroy_LU", Destroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->LUstruct)); + PetscStackCallExternalVoid("SuperLU_DIST:SolveFinalize", SolveFinalize(&lu->options, &lu->SOLVEstruct)); } } else #endif @@ -564,7 +541,7 @@ static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F, Mat A, const MatFac /* Factor the matrix. */ PetscStackCallExternalVoid("SuperLU_DIST:PStatInit", PStatInit(&stat)); /* Initialize the statistics variables. */ -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) && !PetscDefined(MISSING_GETLINE) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) if (lu->use3d) { #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) if (lu->singleprecision) PetscStackCallExternalVoid("SuperLU_DIST:pgssvx3d", psgssvx3d(&lu->options, &lu->A_sup, &lu->sScalePermstruct, 0, A->rmap->n, 0, &lu->grid3d, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &sinfo)); @@ -573,12 +550,13 @@ static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F, Mat A, const MatFac PetscStackCallExternalVoid("SuperLU_DIST:pgssvx3d", pgssvx3d(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, A->rmap->n, 0, &lu->grid3d, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); } else #endif + { #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE) - if (lu->singleprecision) - PetscStackCallExternalVoid("SuperLU_DIST:pgssvx", psgssvx(&lu->options, &lu->A_sup, &lu->sScalePermstruct, 0, A->rmap->n, 0, &lu->grid, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &sinfo)); - else + if (lu->singleprecision) PetscStackCallExternalVoid("SuperLU_DIST:pgssvx", psgssvx(&lu->options, &lu->A_sup, &lu->sScalePermstruct, 0, A->rmap->n, 0, &lu->grid, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &sinfo)); + else #endif - PetscStackCallExternalVoid("SuperLU_DIST:pgssvx", pgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, A->rmap->n, 0, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); + PetscStackCallExternalVoid("SuperLU_DIST:pgssvx", pgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, A->rmap->n, 0, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); + } if (sinfo > 0) { PetscCheck(!A->erroriffailure, PETSC_COMM_SELF, PETSC_ERR_MAT_LU_ZRPVT, "Zero pivot in row %d", sinfo); if (sinfo <= lu->A_sup.ncol) { @@ -729,14 +707,13 @@ static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F, Mat A, IS r, IS c, if (size == lu->nprow * lu->npcol) break; lu->nprow--; } -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) lu->use3d = PETSC_FALSE; lu->npdep = 1; #endif -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) PetscCall(PetscOptionsBool("-mat_superlu_dist_3d", "Use SuperLU_DIST 3D distribution", "None", lu->use3d, &lu->use3d, NULL)); - PetscCheck(!PetscDefined(MISSING_GETLINE) || !lu->use3d, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP_SYS, "-mat_superlu_dist_3d requires a system with a getline() implementation"); if (lu->use3d) { PetscInt t; PetscCall(PetscOptionsInt("-mat_superlu_dist_d", "Number of z entries in processor partition", "None", lu->npdep, (PetscInt *)&lu->npdep, NULL)); @@ -755,14 +732,14 @@ static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F, Mat A, IS r, IS c, #endif PetscCall(PetscOptionsInt("-mat_superlu_dist_r", "Number rows in processor partition", "None", lu->nprow, (PetscInt *)&lu->nprow, NULL)); PetscCall(PetscOptionsInt("-mat_superlu_dist_c", "Number columns in processor partition", "None", lu->npcol, (PetscInt *)&lu->npcol, NULL)); -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) PetscCheck(size == lu->nprow * lu->npcol * lu->npdep, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of processes %d must equal to nprow %lld * npcol %lld * npdep %lld", size, (long long)lu->nprow, (long long)lu->npcol, (long long)lu->npdep); #else PetscCheck(size == lu->nprow * lu->npcol, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of processes %d must equal to nprow %lld * npcol %lld", size, (long long)lu->nprow, (long long)lu->npcol); #endif /* end of adding additional options */ -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) if (lu->use3d) { PetscStackCallExternalVoid("SuperLU_DIST:superlu_gridinit3d", superlu_gridinit3d(context ? context->comm : lu->comm_superlu, lu->nprow, lu->npcol, lu->npdep, &lu->grid3d)); if (context) { @@ -773,7 +750,7 @@ static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F, Mat A, IS r, IS c, #endif PetscStackCallExternalVoid("SuperLU_DIST:superlu_gridinit", superlu_gridinit(context ? context->comm : lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid)); if (context) context->grid = lu->grid; -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) } #endif PetscCall(PetscInfo(NULL, "Duplicating a communicator for SuperLU_DIST and calling superlu_gridinit()\n")); @@ -839,7 +816,7 @@ static PetscErrorCode MatView_Info_SuperLU_DIST(Mat A, PetscViewer viewer) /* would love to use superlu 'IFMT' macro but it looks like it's inconsistently applied, the * format spec for int64_t is set to %d for whatever reason */ PetscCall(PetscViewerASCIIPrintf(viewer, " Process grid nprow %lld x npcol %lld \n", (long long)lu->nprow, (long long)lu->npcol)); -#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(7, 2, 0) +#if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0) if (lu->use3d) PetscCall(PetscViewerASCIIPrintf(viewer, " Using 3d decomposition with npdep %lld \n", (long long)lu->npdep)); #endif @@ -990,7 +967,7 @@ static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A, MatFactorType ftype, PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERSUPERLU_DIST, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_superlu_dist)); diff --git a/src/mat/impls/aij/seq/aij.c b/src/mat/impls/aij/seq/aij.c index c7872da4ab9..b66bc92c4e6 100644 --- a/src/mat/impls/aij/seq/aij.c +++ b/src/mat/impls/aij/seq/aij.c @@ -468,7 +468,6 @@ PetscErrorCode MatSetValues_SeqAIJ(Mat A, PetscInt m, const PetscInt im[], Petsc ap[i] = value; } low = i + 1; - A->nonzerostate++; noinsert:; } ailen[row] = nrow; @@ -1873,7 +1872,7 @@ static PetscErrorCode MatInvertVariableBlockDiagonal_SeqAIJ(Mat A, PetscInt nblo } ncnt += bsizes[i]; diag += bsizes[i] * bsizes[i]; - flops += 2 * PetscPowInt(bsizes[i], 3) / 3; + flops += 2 * PetscPowInt64(bsizes[i], 3) / 3; } PetscCall(PetscLogFlops(flops)); if (bsizemax > 7) PetscCall(PetscFree2(v_work, v_pivots)); @@ -3637,7 +3636,8 @@ static struct _MatOps MatOps_Values = {MatSetValues_SeqAIJ, NULL, /*150*/ MatTransposeSymbolic_SeqAIJ, MatEliminateZeros_SeqAIJ, - MatGetRowSumAbs_SeqAIJ}; + MatGetRowSumAbs_SeqAIJ, + NULL}; static PetscErrorCode MatSeqAIJSetColumnIndices_SeqAIJ(Mat mat, PetscInt *indices) { @@ -3850,7 +3850,7 @@ PetscErrorCode MatRetrieveValues(Mat mat) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateSeqAIJ - Creates a sparse matrix in `MATSEQAIJ` (compressed row) format (the default parallel PETSc format). For good matrix assembly performance the user should preallocate the matrix storage by setting the parameter `nz` @@ -3906,7 +3906,7 @@ PetscErrorCode MatCreateSeqAIJ(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt n PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqAIJSetPreallocation - For good matrix assembly performance the user should preallocate the matrix storage by setting the parameter nz (or the array nnz). By setting these parameters accurately, performance @@ -4412,7 +4412,7 @@ PETSC_INTERN PetscErrorCode MatProductSetFromOptions_IS_XAIJ(Mat); .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArray()`, `MatSeqAIJGetArrayF90()` @*/ -PetscErrorCode MatSeqAIJGetArray(Mat A, PetscScalar **array) +PetscErrorCode MatSeqAIJGetArray(Mat A, PetscScalar *array[]) { Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; @@ -4441,7 +4441,7 @@ PetscErrorCode MatSeqAIJGetArray(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArrayF90()` @*/ -PetscErrorCode MatSeqAIJRestoreArray(Mat A, PetscScalar **array) +PetscErrorCode MatSeqAIJRestoreArray(Mat A, PetscScalar *array[]) { Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; @@ -4471,7 +4471,7 @@ PetscErrorCode MatSeqAIJRestoreArray(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArrayRead()` @*/ -PetscErrorCode MatSeqAIJGetArrayRead(Mat A, const PetscScalar **array) +PetscErrorCode MatSeqAIJGetArrayRead(Mat A, const PetscScalar *array[]) { Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; @@ -4499,7 +4499,7 @@ PetscErrorCode MatSeqAIJGetArrayRead(Mat A, const PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJGetArrayRead()` @*/ -PetscErrorCode MatSeqAIJRestoreArrayRead(Mat A, const PetscScalar **array) +PetscErrorCode MatSeqAIJRestoreArrayRead(Mat A, const PetscScalar *array[]) { Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; @@ -4527,7 +4527,7 @@ PetscErrorCode MatSeqAIJRestoreArrayRead(Mat A, const PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArrayRead()` @*/ -PetscErrorCode MatSeqAIJGetArrayWrite(Mat A, PetscScalar **array) +PetscErrorCode MatSeqAIJGetArrayWrite(Mat A, PetscScalar *array[]) { Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; @@ -4557,7 +4557,7 @@ PetscErrorCode MatSeqAIJGetArrayWrite(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJGetArrayRead()` @*/ -PetscErrorCode MatSeqAIJRestoreArrayWrite(Mat A, PetscScalar **array) +PetscErrorCode MatSeqAIJRestoreArrayWrite(Mat A, PetscScalar *array[]) { Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; @@ -4595,7 +4595,7 @@ PetscErrorCode MatSeqAIJRestoreArrayWrite(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArray()`, `MatSeqAIJGetArrayRead()` @*/ -PetscErrorCode MatSeqAIJGetCSRAndMemType(Mat mat, const PetscInt **i, const PetscInt **j, PetscScalar **a, PetscMemType *mtype) +PetscErrorCode MatSeqAIJGetCSRAndMemType(Mat mat, const PetscInt *i[], const PetscInt *j[], PetscScalar *a[], PetscMemType *mtype) { Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; @@ -4612,7 +4612,7 @@ PetscErrorCode MatSeqAIJGetCSRAndMemType(Mat mat, const PetscInt **i, const Pets PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqAIJGetMaxRowNonzeros - returns the maximum number of nonzeros in any row Not Collective @@ -5488,7 +5488,7 @@ PetscErrorCode MatEliminateZeros_SeqAIJ(Mat A, PetscBool keep) PetscFunctionList MatSeqAIJList = NULL; -/*@C +/*@ MatSeqAIJSetType - Converts a `MATSEQAIJ` matrix to a subtype Collective @@ -5523,7 +5523,7 @@ PetscErrorCode MatSeqAIJSetType(Mat mat, MatType matype) /*@C MatSeqAIJRegister - - Adds a new sub-matrix type for sequential `MATSEQAIJ` matrices - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined matrix type, for example `MATSEQAIJCRL` @@ -5591,7 +5591,6 @@ PetscErrorCode MatSeqAIJRegisterAll(void) /* Special version for direct calls from Fortran */ -#include #if defined(PETSC_HAVE_FORTRAN_CAPS) #define matsetvaluesseqaij_ MATSETVALUESSEQAIJ #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) @@ -5689,7 +5688,6 @@ PETSC_EXTERN void matsetvaluesseqaij_(Mat *AA, PetscInt *mm, const PetscInt im[] } rp[i] = col; ap[i] = value; - A->nonzerostate++; noinsert:; low = i + 1; } diff --git a/src/mat/impls/aij/seq/aij.h b/src/mat/impls/aij/seq/aij.h index dfafcafdbd1..04a8246216b 100644 --- a/src/mat/impls/aij/seq/aij.h +++ b/src/mat/impls/aij/seq/aij.h @@ -247,6 +247,7 @@ static inline PetscErrorCode MatSeqXAIJFreeAIJ(Mat AA, MatScalar **a, PetscInt * RMAX = AIMAX[ROW] = AIMAX[ROW] + CHUNKSIZE; \ Ain->maxnz += BS2 * CHUNKSIZE; \ Ain->reallocs++; \ + Amat->nonzerostate++; \ } \ } while (0) @@ -281,6 +282,7 @@ static inline PetscErrorCode MatSeqXAIJFreeAIJ(Mat AA, MatScalar **a, PetscInt * RMAX = AIMAX[ROW] = AIMAX[ROW] + CHUNKSIZE; \ Ain->maxnz += BS2 * CHUNKSIZE; \ Ain->reallocs++; \ + Amat->nonzerostate++; \ } \ } while (0) @@ -454,9 +456,10 @@ PETSC_INTERN PetscErrorCode MatDestroySubMatrix_Dummy(Mat); PETSC_INTERN PetscErrorCode MatDestroySubMatrices_Dummy(PetscInt, Mat *[]); PETSC_INTERN PetscErrorCode MatCreateSubMatrix_SeqAIJ(Mat, IS, IS, PetscInt, MatReuse, Mat *); -PETSC_INTERN PetscErrorCode MatSeqAIJCompactOutExtraColumns_SeqAIJ(Mat, ISLocalToGlobalMapping *); PETSC_INTERN PetscErrorCode MatSetSeqAIJWithArrays_private(MPI_Comm, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscScalar[], MatType, Mat); +PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqAIJCompactOutExtraColumns_SeqAIJ(Mat, ISLocalToGlobalMapping *); + /* PetscSparseDenseMinusDot - The inner kernel of triangular solves and Gauss-Siedel smoothing. \sum_i xv[i] * r[xi[i]] for CSR storage diff --git a/src/mat/impls/aij/seq/crl/crl.c b/src/mat/impls/aij/seq/crl/crl.c index a535662968b..1039307940d 100644 --- a/src/mat/impls/aij/seq/crl/crl.c +++ b/src/mat/impls/aij/seq/crl/crl.c @@ -57,7 +57,7 @@ static PetscErrorCode MatSeqAIJCRL_create_aijcrl(Mat A) icols[j * m + i] = (j) ? icols[(j - 1) * m + i] : 0; /* handle case where row is EMPTY */ } } - PetscCall(PetscInfo(A, "Percentage of 0's introduced for vectorized multiply %g. Rmax= %" PetscInt_FMT "\n", 1.0 - ((double)a->nz) / ((double)(rmax * m)), rmax)); + PetscCall(PetscInfo(A, "Percentage of 0's introduced for vectorized multiply %g. Rmax= %" PetscInt_FMT "\n", 1.0 - ((double)a->nz) / PetscMax((double)rmax * m, 1), rmax)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/mat/impls/aij/seq/crl/ftn-kernels/fmultcrl.F90 b/src/mat/impls/aij/seq/crl/ftn-kernels/fmultcrl.F90 index 45f384906be..d339691eece 100644 --- a/src/mat/impls/aij/seq/crl/ftn-kernels/fmultcrl.F90 +++ b/src/mat/impls/aij/seq/crl/ftn-kernels/fmultcrl.F90 @@ -22,5 +22,4 @@ subroutine FortranMultCRL(m,rmax,x,y,icols,acols) 20 continue 10 continue - return end diff --git a/src/mat/impls/aij/seq/essl/essl.c b/src/mat/impls/aij/seq/essl/essl.c index d6eb26b5c8c..2fc1fa11b56 100644 --- a/src/mat/impls/aij/seq/essl/essl.c +++ b/src/mat/impls/aij/seq/essl/essl.c @@ -152,7 +152,7 @@ PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_essl(Mat A, MatFactorType ftype, PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Essl(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Essl(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERESSL, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_seqaij_essl)); diff --git a/src/mat/impls/aij/seq/ftn-custom/makefile b/src/mat/impls/aij/seq/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/mat/impls/aij/seq/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/aij/seq/ftn-custom/zaijf.c b/src/mat/impls/aij/seq/ftn-custom/zaijf.c deleted file mode 100644 index 9283608ebbd..00000000000 --- a/src/mat/impls/aij/seq/ftn-custom/zaijf.c +++ /dev/null @@ -1,22 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreateseqaij_ MATCREATESEQAIJ - #define matseqaijsetpreallocation_ MATSEQAIJSETPREALLOCATION -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreateseqaij_ matcreateseqaij - #define matseqaijsetpreallocation_ matseqaijsetpreallocation -#endif - -PETSC_EXTERN void matcreateseqaij_(MPI_Comm *comm, PetscInt *m, PetscInt *n, PetscInt *nz, PetscInt *nnz, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(nnz); - *ierr = MatCreateSeqAIJ(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *m, *n, *nz, nnz, newmat); -} - -PETSC_EXTERN void matseqaijsetpreallocation_(Mat *mat, PetscInt *nz, PetscInt *nnz, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(nnz); - *ierr = MatSeqAIJSetPreallocation(*mat, *nz, nnz); -} diff --git a/src/mat/impls/aij/seq/ftn-kernels/fmult.F90 b/src/mat/impls/aij/seq/ftn-kernels/fmult.F90 index 101f44f8972..14d8cbf2bd7 100644 --- a/src/mat/impls/aij/seq/ftn-kernels/fmult.F90 +++ b/src/mat/impls/aij/seq/ftn-kernels/fmult.F90 @@ -22,7 +22,6 @@ subroutine FortranMultTransposeAddAIJ(n,x,ii,jj,a,y) 20 continue 10 continue - return end subroutine FortranMultAIJ(n,x,ii,jj,a,y) @@ -44,5 +43,4 @@ subroutine FortranMultAIJ(n,x,ii,jj,a,y) y(i) = sum 10 continue - return end diff --git a/src/mat/impls/aij/seq/ftn-kernels/fmultadd.F90 b/src/mat/impls/aij/seq/ftn-kernels/fmultadd.F90 index 39b0344d5af..409f945508c 100644 --- a/src/mat/impls/aij/seq/ftn-kernels/fmultadd.F90 +++ b/src/mat/impls/aij/seq/ftn-kernels/fmultadd.F90 @@ -23,5 +23,4 @@ subroutine FortranMultAddAIJ(n,x,ii,jj,a,y,z) z(i) = sum 10 continue - return end diff --git a/src/mat/impls/aij/seq/ftn-kernels/fsolve.F90 b/src/mat/impls/aij/seq/ftn-kernels/fsolve.F90 index d887c1d06c8..4352453f468 100644 --- a/src/mat/impls/aij/seq/ftn-kernels/fsolve.F90 +++ b/src/mat/impls/aij/seq/ftn-kernels/fsolve.F90 @@ -40,5 +40,4 @@ subroutine FortranSolveAIJ(n,x,ai,aj,adiag,aa,b) 50 continue x(i) = sum * aa(adiag(i)) 40 continue - return end diff --git a/src/mat/impls/aij/seq/kokkos/aijkok.kokkos.cxx b/src/mat/impls/aij/seq/kokkos/aijkok.kokkos.cxx index 1b7a85b1c02..80e4b2d10e5 100644 --- a/src/mat/impls/aij/seq/kokkos/aijkok.kokkos.cxx +++ b/src/mat/impls/aij/seq/kokkos/aijkok.kokkos.cxx @@ -1871,7 +1871,7 @@ PETSC_EXTERN PetscErrorCode MatGetFactor_SeqAIJKokkos_Kokkos(Mat A, MatFactorTyp PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_KOKKOS(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_KOKKOS(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERKOKKOS, MATSEQAIJKOKKOS, MAT_FACTOR_LU, MatGetFactor_SeqAIJKokkos_Kokkos)); diff --git a/src/mat/impls/aij/seq/lusol/lusol.c b/src/mat/impls/aij/seq/lusol/lusol.c index c0571635264..ab82f7b095c 100644 --- a/src/mat/impls/aij/seq/lusol/lusol.c +++ b/src/mat/impls/aij/seq/lusol/lusol.c @@ -427,7 +427,7 @@ PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_lusol(Mat A, MatFactorType ftype PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Lusol(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Lusol(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERLUSOL, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_seqaij_lusol)); diff --git a/src/mat/impls/aij/seq/matlab/aijmatlab.c b/src/mat/impls/aij/seq/matlab/aijmatlab.c index 7e766173fa8..65e6bdcb6da 100644 --- a/src/mat/impls/aij/seq/matlab/aijmatlab.c +++ b/src/mat/impls/aij/seq/matlab/aijmatlab.c @@ -205,7 +205,7 @@ static PetscErrorCode MatGetFactor_seqaij_matlab(Mat A, MatFactorType ftype, Mat PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Matlab(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Matlab(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERMATLAB, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_seqaij_matlab)); diff --git a/src/mat/impls/aij/seq/mkl_pardiso/mkl_pardiso.c b/src/mat/impls/aij/seq/mkl_pardiso/mkl_pardiso.c index 8955330e615..7dc0f9a2bdf 100644 --- a/src/mat/impls/aij/seq/mkl_pardiso/mkl_pardiso.c +++ b/src/mat/impls/aij/seq/mkl_pardiso/mkl_pardiso.c @@ -53,6 +53,8 @@ void pardiso_64init(void *pt, INT_TYPE *mtype, INT_TYPE iparm[]) #define MKL_PARDISO_INIT pardisoinit #endif +#define PetscCallPardiso(f) PetscStackCallExternalVoid("MKL_PARDISO", f); + /* Internal data structure. */ @@ -325,8 +327,8 @@ static PetscErrorCode MatDestroy_MKL_PARDISO(Mat A) if (mat_mkl_pardiso->CleanUp) { mat_mkl_pardiso->phase = JOB_RELEASE_OF_ALL_MEMORY; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, NULL, NULL, NULL, NULL, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, NULL, - &mat_mkl_pardiso->err); + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, NULL, NULL, NULL, NULL, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, + &mat_mkl_pardiso->msglvl, NULL, NULL, &mat_mkl_pardiso->err)); } PetscCall(PetscFree(mat_mkl_pardiso->perm)); PetscCall(PetscFree(mat_mkl_pardiso->schur_work)); @@ -389,13 +391,13 @@ static PetscErrorCode MatSolve_MKL_PARDISO(Mat A, Vec b, Vec x) work = mat_mkl_pardiso->schur_work; } mat_mkl_pardiso->iparm[6 - 1] = 1; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, NULL, &mat_mkl_pardiso->nrhs, - mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)work, &mat_mkl_pardiso->err); + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, NULL, + &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)work, &mat_mkl_pardiso->err)); if (!mat_mkl_pardiso->schur_work) PetscCall(PetscFree(work)); } else { mat_mkl_pardiso->iparm[6 - 1] = 0; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, - &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err); + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, + mat_mkl_pardiso->perm, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err)); } PetscCall(VecRestoreArrayRead(b, &barray)); @@ -421,10 +423,9 @@ static PetscErrorCode MatSolve_MKL_PARDISO(Mat A, Vec b, Vec x) /* expansion phase */ mat_mkl_pardiso->iparm[6 - 1] = 1; mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, - &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */ - &mat_mkl_pardiso->err); - + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, + mat_mkl_pardiso->perm, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */ + &mat_mkl_pardiso->err)); PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); mat_mkl_pardiso->iparm[6 - 1] = 0; } @@ -448,9 +449,8 @@ static PetscErrorCode MatForwardSolve_MKL_PARDISO(Mat A, Vec b, Vec x) mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, - &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err); - + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, + &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err)); PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); PetscCall(VecRestoreArrayRead(b, &barray)); @@ -474,9 +474,8 @@ static PetscErrorCode MatBackwardSolve_MKL_PARDISO(Mat A, Vec b, Vec x) mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, - &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err); - + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, + &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err)); PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); PetscCall(VecRestoreArrayRead(b, &barray)); @@ -523,8 +522,8 @@ static PetscErrorCode MatMatSolve_MKL_PARDISO(Mat A, Mat B, Mat X) if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT; else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, - &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err); + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, + mat_mkl_pardiso->perm, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)barray, (void *)xarray, &mat_mkl_pardiso->err)); PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); PetscCall(MatDenseRestoreArrayRead(B, &barray)); @@ -561,9 +560,9 @@ static PetscErrorCode MatMatSolve_MKL_PARDISO(Mat A, Mat B, Mat X) /* expansion phase */ mat_mkl_pardiso->iparm[6 - 1] = 1; mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, - &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */ - &mat_mkl_pardiso->err); + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, + mat_mkl_pardiso->perm, &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, (void *)xarray, (void *)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */ + &mat_mkl_pardiso->err)); if (o_schur_work) { /* restore original Schur_work (minimal size) */ PetscCall(PetscFree(mat_mkl_pardiso->schur_work)); mat_mkl_pardiso->schur_work = o_schur_work; @@ -586,8 +585,8 @@ static PetscErrorCode MatFactorNumeric_MKL_PARDISO(Mat F, Mat A, const MatFactor PetscCall((*mat_mkl_pardiso->Convert)(A, mat_mkl_pardiso->needsym, MAT_REUSE_MATRIX, &mat_mkl_pardiso->freeaij, &mat_mkl_pardiso->nz, &mat_mkl_pardiso->ia, &mat_mkl_pardiso->ja, (PetscScalar **)&mat_mkl_pardiso->a)); mat_mkl_pardiso->phase = JOB_NUMERICAL_FACTORIZATION; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, - &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, (void *)mat_mkl_pardiso->schur, &mat_mkl_pardiso->err); + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, + &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, (void *)mat_mkl_pardiso->schur, &mat_mkl_pardiso->err)); PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); /* report flops */ @@ -786,8 +785,8 @@ static PetscErrorCode MatFactorSymbolic_AIJMKL_PARDISO_Private(Mat F, Mat A, con /* reset flops counting if requested */ if (mat_mkl_pardiso->iparm[18]) mat_mkl_pardiso->iparm[18] = -1; - MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, - &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, NULL, &mat_mkl_pardiso->err); + PetscCallPardiso(MKL_PARDISO(mat_mkl_pardiso->pt, &mat_mkl_pardiso->maxfct, &mat_mkl_pardiso->mnum, &mat_mkl_pardiso->mtype, &mat_mkl_pardiso->phase, &mat_mkl_pardiso->n, mat_mkl_pardiso->a, mat_mkl_pardiso->ia, mat_mkl_pardiso->ja, mat_mkl_pardiso->perm, + &mat_mkl_pardiso->nrhs, mat_mkl_pardiso->iparm, &mat_mkl_pardiso->msglvl, NULL, NULL, &mat_mkl_pardiso->err)); PetscCheck(mat_mkl_pardiso->err >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by MKL PARDISO: err=%d. Please check manual", mat_mkl_pardiso->err); mat_mkl_pardiso->CleanUp = PETSC_TRUE; @@ -1055,7 +1054,7 @@ PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mkl_pardiso(Mat A, MatFactorType ft PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERMKL_PARDISO, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_mkl_pardiso)); diff --git a/src/mat/impls/aij/seq/seqcusparse/aijcusparse.cu b/src/mat/impls/aij/seq/seqcusparse/aijcusparse.cu index 74b403dad83..e52fce9f9d7 100644 --- a/src/mat/impls/aij/seq/seqcusparse/aijcusparse.cu +++ b/src/mat/impls/aij/seq/seqcusparse/aijcusparse.cu @@ -2485,13 +2485,13 @@ PETSC_INTERN PetscErrorCode MatSeqAIJCUSPARSECopyToGPU(Mat A) mat->num_rows = m; mat->num_cols = A->cmap->n; mat->num_entries = nnz; - mat->row_offsets = new THRUSTINTARRAY32(m + 1); + PetscCallCXX(mat->row_offsets = new THRUSTINTARRAY32(m + 1)); mat->row_offsets->assign(ii, ii + m + 1); - mat->column_indices = new THRUSTINTARRAY32(nnz); + PetscCallCXX(mat->column_indices = new THRUSTINTARRAY32(nnz)); mat->column_indices->assign(a->j, a->j + nnz); - mat->values = new THRUSTARRAY(nnz); + PetscCallCXX(mat->values = new THRUSTARRAY(nnz)); if (a->a) mat->values->assign(a->a, a->a + nnz); /* assign the pointer */ @@ -2511,13 +2511,13 @@ PETSC_INTERN PetscErrorCode MatSeqAIJCUSPARSECopyToGPU(Mat A) mat->num_rows = m; mat->num_cols = A->cmap->n; mat->num_entries = nnz; - mat->row_offsets = new THRUSTINTARRAY32(m + 1); + PetscCallCXX(mat->row_offsets = new THRUSTINTARRAY32(m + 1)); mat->row_offsets->assign(ii, ii + m + 1); - mat->column_indices = new THRUSTINTARRAY32(nnz); + PetscCallCXX(mat->column_indices = new THRUSTINTARRAY32(nnz)); mat->column_indices->assign(a->j, a->j + nnz); - mat->values = new THRUSTARRAY(nnz); + PetscCallCXX(mat->values = new THRUSTARRAY(nnz)); if (a->a) mat->values->assign(a->a, a->a + nnz); cusparseHybMat_t hybMat; @@ -2539,8 +2539,8 @@ PETSC_INTERN PetscErrorCode MatSeqAIJCUSPARSECopyToGPU(Mat A) /* assign the compressed row indices */ if (a->compressedrow.use) { - cusparsestruct->workVector = new THRUSTARRAY(m); - matstruct->cprowIndices = new THRUSTINTARRAY(m); + PetscCallCXX(cusparsestruct->workVector = new THRUSTARRAY(m)); + PetscCallCXX(matstruct->cprowIndices = new THRUSTINTARRAY(m)); matstruct->cprowIndices->assign(ridx, ridx + m); tmp = m; } else { @@ -4070,7 +4070,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_SeqAIJCUSPARSE(Mat B) .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJCUSPARSE()`, `MatCUSPARSESetUseCPUSolve()`, `MATAIJCUSPARSE`, `MatCreateAIJCUSPARSE()`, `MatCUSPARSESetFormat()`, `MatCUSPARSEStorageFormat`, `MatCUSPARSEFormatOperation` M*/ -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_CUSPARSE(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_CUSPARSE(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERCUSPARSE, MATSEQAIJCUSPARSE, MAT_FACTOR_LU, MatGetFactor_seqaijcusparse_cusparse)); diff --git a/src/mat/impls/aij/seq/seqhipsparse/aijhipsparse.hip.cpp b/src/mat/impls/aij/seq/seqhipsparse/aijhipsparse.hip.cpp index cfc37ac7a0a..8957be4f482 100644 --- a/src/mat/impls/aij/seq/seqhipsparse/aijhipsparse.hip.cpp +++ b/src/mat/impls/aij/seq/seqhipsparse/aijhipsparse.hip.cpp @@ -1258,7 +1258,7 @@ static PetscErrorCode MatSolve_SeqAIJHIPSPARSE_ILU0(Mat fact, Vec b, Vec x) /* Solve L*y = b */ PetscCallHIPSPARSE(hipsparseDnVecSetValues(fs->dnVecDescr_X, (void *)barray)); PetscCallHIPSPARSE(hipsparseDnVecSetValues(fs->dnVecDescr_Y, fs->Y)); - #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) + #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) || PETSC_PKG_HIP_VERSION_GE(6, 0, 0) PetscCallHIPSPARSE(hipsparseSpSV_solve(fs->handle, HIPSPARSE_OPERATION_NON_TRANSPOSE, &PETSC_HIPSPARSE_ONE, fs->spMatDescr_L, /* L Y = X */ fs->dnVecDescr_X, fs->dnVecDescr_Y, hipsparse_scalartype, HIPSPARSE_SPSV_ALG_DEFAULT, fs->spsvDescr_L)); // hipsparseSpSV_solve() secretely uses the external buffer used in hipsparseSpSV_analysis()! #else @@ -1267,7 +1267,7 @@ static PetscErrorCode MatSolve_SeqAIJHIPSPARSE_ILU0(Mat fact, Vec b, Vec x) #endif /* Solve U*x = y */ PetscCallHIPSPARSE(hipsparseDnVecSetValues(fs->dnVecDescr_X, xarray)); - #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) + #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) || PETSC_PKG_HIP_VERSION_GE(6, 0, 0) PetscCallHIPSPARSE(hipsparseSpSV_solve(fs->handle, HIPSPARSE_OPERATION_NON_TRANSPOSE, &PETSC_HIPSPARSE_ONE, fs->spMatDescr_U, /* U X = Y */ fs->dnVecDescr_Y, fs->dnVecDescr_X, hipsparse_scalartype, HIPSPARSE_SPSV_ALG_DEFAULT, fs->spsvDescr_U)); #else @@ -1316,7 +1316,7 @@ static PetscErrorCode MatSolveTranspose_SeqAIJHIPSPARSE_ILU0(Mat fact, Vec b, Ve /* Solve Ut*y = b */ PetscCallHIPSPARSE(hipsparseDnVecSetValues(fs->dnVecDescr_X, (void *)barray)); PetscCallHIPSPARSE(hipsparseDnVecSetValues(fs->dnVecDescr_Y, fs->Y)); - #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) + #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) || PETSC_PKG_HIP_VERSION_GE(6, 0, 0) PetscCallHIPSPARSE(hipsparseSpSV_solve(fs->handle, HIPSPARSE_OPERATION_TRANSPOSE, &PETSC_HIPSPARSE_ONE, fs->spMatDescr_U, /* Ut Y = X */ fs->dnVecDescr_X, fs->dnVecDescr_Y, hipsparse_scalartype, HIPSPARSE_SPSV_ALG_DEFAULT, fs->spsvDescr_Ut)); #else @@ -1325,7 +1325,7 @@ static PetscErrorCode MatSolveTranspose_SeqAIJHIPSPARSE_ILU0(Mat fact, Vec b, Ve #endif /* Solve Lt*x = y */ PetscCallHIPSPARSE(hipsparseDnVecSetValues(fs->dnVecDescr_X, xarray)); - #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) + #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) || PETSC_PKG_HIP_VERSION_GE(6, 0, 0) PetscCallHIPSPARSE(hipsparseSpSV_solve(fs->handle, HIPSPARSE_OPERATION_TRANSPOSE, &PETSC_HIPSPARSE_ONE, fs->spMatDescr_L, /* Lt X = Y */ fs->dnVecDescr_Y, fs->dnVecDescr_X, hipsparse_scalartype, HIPSPARSE_SPSV_ALG_DEFAULT, fs->spsvDescr_Lt)); #else @@ -1559,7 +1559,7 @@ static PetscErrorCode MatSolve_SeqAIJHIPSPARSE_ICC0(Mat fact, Vec b, Vec x) /* Solve L*y = b */ PetscCallHIPSPARSE(hipsparseDnVecSetValues(fs->dnVecDescr_X, (void *)barray)); PetscCallHIPSPARSE(hipsparseDnVecSetValues(fs->dnVecDescr_Y, fs->Y)); - #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) + #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) || PETSC_PKG_HIP_VERSION_GE(6, 0, 0) PetscCallHIPSPARSE(hipsparseSpSV_solve(fs->handle, HIPSPARSE_OPERATION_NON_TRANSPOSE, &PETSC_HIPSPARSE_ONE, fs->spMatDescr_L, /* L Y = X */ fs->dnVecDescr_X, fs->dnVecDescr_Y, hipsparse_scalartype, HIPSPARSE_SPSV_ALG_DEFAULT, fs->spsvDescr_L)); #else @@ -1568,7 +1568,7 @@ static PetscErrorCode MatSolve_SeqAIJHIPSPARSE_ICC0(Mat fact, Vec b, Vec x) #endif /* Solve Lt*x = y */ PetscCallHIPSPARSE(hipsparseDnVecSetValues(fs->dnVecDescr_X, xarray)); - #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) + #if PETSC_PKG_HIP_VERSION_EQ(5, 6, 0) || PETSC_PKG_HIP_VERSION_GE(6, 0, 0) PetscCallHIPSPARSE(hipsparseSpSV_solve(fs->handle, HIPSPARSE_OPERATION_TRANSPOSE, &PETSC_HIPSPARSE_ONE, fs->spMatDescr_L, /* Lt X = Y */ fs->dnVecDescr_Y, fs->dnVecDescr_X, hipsparse_scalartype, HIPSPARSE_SPSV_ALG_DEFAULT, fs->spsvDescr_Lt)); #else @@ -3546,7 +3546,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_SeqAIJHIPSPARSE(Mat B) .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJHIPSPARSE()`, `MATAIJHIPSPARSE`, `MatCreateAIJHIPSPARSE()`, `MatHIPSPARSESetFormat()`, `MatHIPSPARSEStorageFormat`, `MatHIPSPARSEFormatOperation` M*/ -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_HIPSPARSE(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_HIPSPARSE(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERHIPSPARSE, MATSEQAIJHIPSPARSE, MAT_FACTOR_LU, MatGetFactor_seqaijhipsparse_hipsparse)); @@ -3849,7 +3849,7 @@ static PetscErrorCode MatSetValuesCOO_SeqAIJHIPSPARSE(Mat A, const PetscScalar v .seealso: [](ch_matrices), `Mat`, `MatSeqAIJHIPSPARSERestoreIJ()`, `MatSeqAIJHIPSPARSEGetArrayRead()` @*/ -PetscErrorCode MatSeqAIJHIPSPARSEGetIJ(Mat A, PetscBool compressed, const int **i, const int **j) +PetscErrorCode MatSeqAIJHIPSPARSEGetIJ(Mat A, PetscBool compressed, const int *i[], const int *j[]) { Mat_SeqAIJHIPSPARSE *cusp = (Mat_SeqAIJHIPSPARSE *)A->spptr; Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; @@ -3892,7 +3892,7 @@ PetscErrorCode MatSeqAIJHIPSPARSEGetIJ(Mat A, PetscBool compressed, const int ** .seealso: [](ch_matrices), `Mat`, `MatSeqAIJHIPSPARSEGetIJ()` @*/ -PetscErrorCode MatSeqAIJHIPSPARSERestoreIJ(Mat A, PetscBool compressed, const int **i, const int **j) +PetscErrorCode MatSeqAIJHIPSPARSERestoreIJ(Mat A, PetscBool compressed, const int *i[], const int *j[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -3920,7 +3920,7 @@ PetscErrorCode MatSeqAIJHIPSPARSERestoreIJ(Mat A, PetscBool compressed, const in .seealso: [](ch_matrices), `Mat`, `MatSeqAIJHIPSPARSEGetArray()`, `MatSeqAIJHIPSPARSEGetArrayWrite()`, `MatSeqAIJHIPSPARSERestoreArrayRead()` @*/ -PetscErrorCode MatSeqAIJHIPSPARSEGetArrayRead(Mat A, const PetscScalar **a) +PetscErrorCode MatSeqAIJHIPSPARSEGetArrayRead(Mat A, const PetscScalar *a[]) { Mat_SeqAIJHIPSPARSE *cusp = (Mat_SeqAIJHIPSPARSE *)A->spptr; CsrMatrix *csr; @@ -3951,7 +3951,7 @@ PetscErrorCode MatSeqAIJHIPSPARSEGetArrayRead(Mat A, const PetscScalar **a) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJHIPSPARSEGetArrayRead()` @*/ -PetscErrorCode MatSeqAIJHIPSPARSERestoreArrayRead(Mat A, const PetscScalar **a) +PetscErrorCode MatSeqAIJHIPSPARSERestoreArrayRead(Mat A, const PetscScalar *a[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -3979,7 +3979,7 @@ PetscErrorCode MatSeqAIJHIPSPARSERestoreArrayRead(Mat A, const PetscScalar **a) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJHIPSPARSEGetArrayRead()`, `MatSeqAIJHIPSPARSEGetArrayWrite()`, `MatSeqAIJHIPSPARSERestoreArray()` @*/ -PetscErrorCode MatSeqAIJHIPSPARSEGetArray(Mat A, PetscScalar **a) +PetscErrorCode MatSeqAIJHIPSPARSEGetArray(Mat A, PetscScalar *a[]) { Mat_SeqAIJHIPSPARSE *cusp = (Mat_SeqAIJHIPSPARSE *)A->spptr; CsrMatrix *csr; @@ -4011,7 +4011,7 @@ PetscErrorCode MatSeqAIJHIPSPARSEGetArray(Mat A, PetscScalar **a) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJHIPSPARSEGetArray()` @*/ -PetscErrorCode MatSeqAIJHIPSPARSERestoreArray(Mat A, PetscScalar **a) +PetscErrorCode MatSeqAIJHIPSPARSERestoreArray(Mat A, PetscScalar *a[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -4041,7 +4041,7 @@ PetscErrorCode MatSeqAIJHIPSPARSERestoreArray(Mat A, PetscScalar **a) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJHIPSPARSEGetArray()`, `MatSeqAIJHIPSPARSEGetArrayRead()`, `MatSeqAIJHIPSPARSERestoreArrayWrite()` @*/ -PetscErrorCode MatSeqAIJHIPSPARSEGetArrayWrite(Mat A, PetscScalar **a) +PetscErrorCode MatSeqAIJHIPSPARSEGetArrayWrite(Mat A, PetscScalar *a[]) { Mat_SeqAIJHIPSPARSE *cusp = (Mat_SeqAIJHIPSPARSE *)A->spptr; CsrMatrix *csr; @@ -4073,7 +4073,7 @@ PetscErrorCode MatSeqAIJHIPSPARSEGetArrayWrite(Mat A, PetscScalar **a) .seealso: [](ch_matrices), `Mat`, `MatSeqAIJHIPSPARSEGetArrayWrite()` @*/ -PetscErrorCode MatSeqAIJHIPSPARSERestoreArrayWrite(Mat A, PetscScalar **a) +PetscErrorCode MatSeqAIJHIPSPARSERestoreArrayWrite(Mat A, PetscScalar *a[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); diff --git a/src/mat/impls/aij/seq/seqviennacl/aijviennacl.cxx b/src/mat/impls/aij/seq/seqviennacl/aijviennacl.cxx index 9c6e43fcc58..779a011fc26 100644 --- a/src/mat/impls/aij/seq/seqviennacl/aijviennacl.cxx +++ b/src/mat/impls/aij/seq/seqviennacl/aijviennacl.cxx @@ -493,7 +493,7 @@ PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJViennaCL(Mat A, MatType type .seealso: `MatCreateSeqAIJViennaCL()`, `MATAIJVIENNACL`, `MatCreateAIJViennaCL()` M*/ -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_ViennaCL(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_ViennaCL(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERPETSC, MATSEQAIJVIENNACL, MAT_FACTOR_LU, MatGetFactor_seqaij_petsc)); diff --git a/src/mat/impls/aij/seq/superlu/superlu.c b/src/mat/impls/aij/seq/superlu/superlu.c index 37831395705..70907f28b57 100644 --- a/src/mat/impls/aij/seq/superlu/superlu.c +++ b/src/mat/impls/aij/seq/superlu/superlu.c @@ -630,7 +630,7 @@ static PetscErrorCode MatGetFactor_seqsell_superlu(Mat A, MatFactorType ftype, M PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_SuperLU(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERSUPERLU, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_seqaij_superlu)); diff --git a/src/mat/impls/aij/seq/symtranspose.c b/src/mat/impls/aij/seq/symtranspose.c index 5b54306d41e..88edf83f861 100644 --- a/src/mat/impls/aij/seq/symtranspose.c +++ b/src/mat/impls/aij/seq/symtranspose.c @@ -98,12 +98,22 @@ PetscErrorCode MatTranspose_SeqAIJ(Mat A, MatReuse reuse, Mat *B) PetscCall(PetscArraycpy(atfill, ati, an)); /* Walk through A row-wise and mark nonzero entries of A^T. */ - for (i = 0; i < am; i++) { - anzj = ai[i + 1] - ai[i]; - for (j = 0; j < anzj; j++) { - atj[atfill[*aj]] = i; - ata[atfill[*aj]] = *aa++; - atfill[*aj++] += 1; + if (aa) { + for (i = 0; i < am; i++) { + anzj = ai[i + 1] - ai[i]; + for (j = 0; j < anzj; j++) { + atj[atfill[*aj]] = i; + ata[atfill[*aj]] = *aa++; + atfill[*aj++] += 1; + } + } + } else { + for (i = 0; i < am; i++) { + anzj = ai[i + 1] - ai[i]; + for (j = 0; j < anzj; j++) { + atj[atfill[*aj]] = i; + atfill[*aj++] += 1; + } } } PetscCall(PetscFree(atfill)); diff --git a/src/mat/impls/aij/seq/umfpack/umfpack.c b/src/mat/impls/aij/seq/umfpack/umfpack.c index 3e1530eec1b..b28f123ae8f 100644 --- a/src/mat/impls/aij/seq/umfpack/umfpack.c +++ b/src/mat/impls/aij/seq/umfpack/umfpack.c @@ -399,7 +399,7 @@ static PetscErrorCode MatFactorGetSolverType_seqaij_umfpack(Mat A, MatSolverType .seealso: [](ch_matrices), `Mat`, `PCLU`, `MATSOLVERSUPERLU`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType` M*/ -PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_umfpack(Mat A, MatFactorType ftype, Mat *F) +static PetscErrorCode MatGetFactor_seqaij_umfpack(Mat A, MatFactorType ftype, Mat *F) { Mat B; Mat_UMFPACK *lu; @@ -447,7 +447,7 @@ PETSC_INTERN PetscErrorCode MatGetFactor_seqsbaij_cholmod(Mat, MatFactorType, Ma PETSC_INTERN PetscErrorCode MatGetFactor_seqaij_klu(Mat, MatFactorType, Mat *); PETSC_INTERN PetscErrorCode MatGetFactor_seqaij_spqr(Mat, MatFactorType, Mat *); -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuiteSparse(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_SuiteSparse(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERUMFPACK, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_seqaij_umfpack)); diff --git a/src/mat/impls/baij/mpi/baijmkl/ftn-custom/makefile b/src/mat/impls/baij/mpi/baijmkl/ftn-custom/makefile deleted file mode 100644 index e6402f3060a..00000000000 --- a/src/mat/impls/baij/mpi/baijmkl/ftn-custom/makefile +++ /dev/null @@ -1,7 +0,0 @@ --include ../../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' -#requirespackage 'PETSC_HAVE_MKL_SPARSE_OPTIMIZE' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/baij/mpi/baijmkl/ftn-custom/zmpibaijmklf.c b/src/mat/impls/baij/mpi/baijmkl/ftn-custom/zmpibaijmklf.c deleted file mode 100644 index 8a8bc7499e9..00000000000 --- a/src/mat/impls/baij/mpi/baijmkl/ftn-custom/zmpibaijmklf.c +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreatebaijmkl_ MATCREATEBAIJMKL -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreatebaijmkl_ matcreatebaijmkl -#endif - -PETSC_EXTERN void matcreatebaijmkl_(MPI_Comm *comm, PetscInt *bs, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N, PetscInt *d_nz, PetscInt *d_nnz, PetscInt *o_nz, PetscInt *o_nnz, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(d_nnz); - CHKFORTRANNULLINTEGER(o_nnz); - *ierr = MatCreateBAIJMKL(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *m, *n, *M, *N, *d_nz, d_nnz, *o_nz, o_nnz, newmat); -} diff --git a/src/mat/impls/baij/mpi/baijmkl/mpibaijmkl.c b/src/mat/impls/baij/mpi/baijmkl/mpibaijmkl.c index b59de7b1aa4..a9e6ceb6866 100644 --- a/src/mat/impls/baij/mpi/baijmkl/mpibaijmkl.c +++ b/src/mat/impls/baij/mpi/baijmkl/mpibaijmkl.c @@ -1,4 +1,4 @@ -#include <../src/mat/impls/baij/mpi/mpibaij.h> +#include <../src/mat/impls/baij/mpi/mpibaij.h> /*I "petscmat.h" I*/ PETSC_INTERN PetscErrorCode MatConvert_SeqBAIJ_SeqBAIJMKL(Mat, MatType, MatReuse, Mat *); @@ -27,7 +27,7 @@ static PetscErrorCode MatConvert_MPIBAIJ_MPIBAIJMKL(Mat A, MatType type, MatReus } // PetscClangLinter pragma disable: -fdoc-section-header-unknown -/*@C +/*@ MatCreateBAIJMKL - Creates a sparse parallel matrix in `MATBAIJMKL` format (block compressed row). Collective @@ -37,24 +37,24 @@ static PetscErrorCode MatConvert_MPIBAIJ_MPIBAIJMKL(Mat A, MatType type, MatReus . bs - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()` . m - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given) - This value should be the same as the local size used in creating the - y vector for the matrix-vector product y = Ax. + This value should be the same as the local size used in creating the + y vector for the matrix-vector product y = Ax. . n - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given) - This value should be the same as the local size used in creating the - x vector for the matrix-vector product y = Ax. + This value should be the same as the local size used in creating the + x vector for the matrix-vector product y = Ax. . M - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given) . N - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given) . d_nz - number of nonzero blocks per block row in diagonal portion of local - submatrix (same for all local rows) + submatrix (same for all local rows) . d_nnz - array containing the number of nonzero blocks in the various block rows - of the in diagonal portion of the local (possibly different for each block - row) or `NULL`. If you plan to factor the matrix you must leave room for the diagonal entry - and set it even if it is zero. + of the in diagonal portion of the local (possibly different for each block + row) or `NULL`. If you plan to factor the matrix you must leave room for the diagonal entry + and set it even if it is zero. . o_nz - number of nonzero blocks per block row in the off-diagonal portion of local - submatrix (same for all local rows). + submatrix (same for all local rows). - o_nnz - array containing the number of nonzero blocks in the various block rows of the - off-diagonal portion of the local submatrix (possibly different for - each block row) or `NULL`. + off-diagonal portion of the local submatrix (possibly different for + each block row) or `NULL`. Output Parameter: . A - the matrix diff --git a/src/mat/impls/baij/mpi/ftn-custom/zmpibaijf.c b/src/mat/impls/baij/mpi/ftn-custom/zmpibaijf.c index 1e9eac38a53..d5c55ed20e8 100644 --- a/src/mat/impls/baij/mpi/ftn-custom/zmpibaijf.c +++ b/src/mat/impls/baij/mpi/ftn-custom/zmpibaijf.c @@ -2,13 +2,9 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matmpibaijgetseqbaij_ MATMPIBAIJGETSEQBAIJ - #define matcreatebaij_ MATCREATEBAIJ - #define matmpibaijsetpreallocation_ MATMPIBAIJSETPREALLOCATION + #define matmpibaijgetseqbaij_ MATMPIBAIJGETSEQBAIJ #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matmpibaijgetseqbaij_ matmpibaijgetseqbaij - #define matcreatebaij_ matcreatebaij - #define matmpibaijsetpreallocation_ matmpibaijsetpreallocation + #define matmpibaijgetseqbaij_ matmpibaijgetseqbaij #endif PETSC_EXTERN void matmpibaijgetseqbaij_(Mat *A, Mat *Ad, Mat *Ao, PetscInt *ic, size_t *iic, PetscErrorCode *ierr) @@ -18,17 +14,3 @@ PETSC_EXTERN void matmpibaijgetseqbaij_(Mat *A, Mat *Ad, Mat *Ao, PetscInt *ic, if (*ierr) return; *iic = PetscIntAddressToFortran(ic, (PetscInt *)i); } - -PETSC_EXTERN void matcreatebaij_(MPI_Comm *comm, PetscInt *bs, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N, PetscInt *d_nz, PetscInt *d_nnz, PetscInt *o_nz, PetscInt *o_nnz, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(d_nnz); - CHKFORTRANNULLINTEGER(o_nnz); - *ierr = MatCreateBAIJ(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *m, *n, *M, *N, *d_nz, d_nnz, *o_nz, o_nnz, newmat); -} - -PETSC_EXTERN void matmpibaijsetpreallocation_(Mat *mat, PetscInt *bs, PetscInt *d_nz, PetscInt *d_nnz, PetscInt *o_nz, PetscInt *o_nnz, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(d_nnz); - CHKFORTRANNULLINTEGER(o_nnz); - *ierr = MatMPIBAIJSetPreallocation(*mat, *bs, *d_nz, d_nnz, *o_nz, o_nnz); -} diff --git a/src/mat/impls/baij/mpi/mpibaij.c b/src/mat/impls/baij/mpi/mpibaij.c index b06518f4543..28170ac8695 100644 --- a/src/mat/impls/baij/mpi/mpibaij.c +++ b/src/mat/impls/baij/mpi/mpibaij.c @@ -63,7 +63,7 @@ static PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A, Vec v, PetscInt idx[]) const PetscScalar *vb; PetscFunctionBegin; - PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &vA)); + PetscCall(MatCreateVecs(a->A, NULL, &vA)); PetscCall(MatGetRowMaxAbs(a->A, vA, idx)); PetscCall(VecGetArrayWrite(vA, &va)); @@ -73,7 +73,7 @@ static PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A, Vec v, PetscInt idx[]) } } - PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &vB)); + PetscCall(MatCreateVecs(a->B, NULL, &vB)); PetscCall(PetscMalloc1(m, &idxb)); PetscCall(MatGetRowMaxAbs(a->B, vB, idxb)); @@ -97,6 +97,23 @@ static PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A, Vec v, PetscInt idx[]) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode MatGetRowSumAbs_MPIBAIJ(Mat A, Vec v) +{ + Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data; + Vec vB, vA; + + PetscFunctionBegin; + PetscCall(MatCreateVecs(a->A, NULL, &vA)); + PetscCall(MatGetRowSumAbs(a->A, vA)); + PetscCall(MatCreateVecs(a->B, NULL, &vB)); + PetscCall(MatGetRowSumAbs(a->B, vB)); + PetscCall(VecAXPY(vA, 1.0, vB)); + PetscCall(VecDestroy(&vB)); + PetscCall(VecCopy(vA, v)); + PetscCall(VecDestroy(&vA)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode MatStoreValues_MPIBAIJ(Mat mat) { Mat_MPIBAIJ *aij = (Mat_MPIBAIJ *)mat->data; @@ -440,22 +457,11 @@ static PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat, PetscInt m, const Pet if (mat->was_assembled) { if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat)); -#if defined(PETSC_USE_DEBUG) - #if defined(PETSC_USE_CTABLE) - { - PetscInt data; - PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &data)); - PetscCheck((data - 1) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Incorrect colmap"); - } - #else - PetscCheck((baij->colmap[in[j]] - 1) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Incorrect colmap"); - #endif -#endif #if defined(PETSC_USE_CTABLE) PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &col)); - col = (col - 1) / bs; + col = col < 1 ? -1 : (col - 1) / bs; #else - col = (baij->colmap[in[j]] - 1) / bs; + col = baij->colmap[in[j]] < 1 ? -1 : (baij->colmap[in[j]] - 1) / bs; #endif if (col < 0 && !((Mat_SeqBAIJ *)baij->B->data)->nonew) { PetscCall(MatDisAssemble_MPIBAIJ(mat)); @@ -2565,6 +2571,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ, NULL, /*150*/ NULL, MatEliminateZeros_MPIBAIJ, + MatGetRowSumAbs_MPIBAIJ, NULL}; PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType, MatReuse, Mat *); @@ -2962,7 +2969,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B) .seealso: `Mat`, `MatCreateBAIJ()`, `MATSEQBAIJ`, `MATMPIBAIJ`, `MatMPIBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocationCSR()` M*/ -/*@C +/*@ MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in `MATMPIBAIJ` format (block compressed row). @@ -3053,7 +3060,7 @@ PetscErrorCode MatMPIBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, con } // PetscClangLinter pragma disable: -fdoc-section-header-unknown -/*@C +/*@ MatCreateBAIJ - Creates a sparse parallel matrix in `MATBAIJ` format (block compressed row). @@ -3064,24 +3071,24 @@ PetscErrorCode MatMPIBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, con . bs - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()` . m - number of local rows (or `PETSC_DECIDE` to have calculated if M is given) - This value should be the same as the local size used in creating the - y vector for the matrix-vector product y = Ax. + This value should be the same as the local size used in creating the + y vector for the matrix-vector product y = Ax. . n - number of local columns (or `PETSC_DECIDE` to have calculated if N is given) - This value should be the same as the local size used in creating the - x vector for the matrix-vector product y = Ax. + This value should be the same as the local size used in creating the + x vector for the matrix-vector product y = Ax. . M - number of global rows (or `PETSC_DETERMINE` to have calculated if m is given) . N - number of global columns (or `PETSC_DETERMINE` to have calculated if n is given) . d_nz - number of nonzero blocks per block row in diagonal portion of local - submatrix (same for all local rows) + submatrix (same for all local rows) . d_nnz - array containing the number of nonzero blocks in the various block rows - of the in diagonal portion of the local (possibly different for each block - row) or NULL. If you plan to factor the matrix you must leave room for the diagonal entry - and set it even if it is zero. + of the in diagonal portion of the local (possibly different for each block + row) or NULL. If you plan to factor the matrix you must leave room for the diagonal entry + and set it even if it is zero. . o_nz - number of nonzero blocks per block row in the off-diagonal portion of local - submatrix (same for all local rows). + submatrix (same for all local rows). - o_nnz - array containing the number of nonzero blocks in the various block rows of the - off-diagonal portion of the local submatrix (possibly different for - each block row) or NULL. + off-diagonal portion of the local submatrix (possibly different for + each block row) or NULL. Output Parameter: . A - the matrix @@ -3112,6 +3119,9 @@ PetscErrorCode MatMPIBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, con If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor than it must be used on all processors that share the object for that argument. + If `m` and `n` are not `PETSC_DECIDE`, then the values determine the `PetscLayout` of the matrix and the ranges returned by + `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`. + Storage Information: For a square global matrix we define each processor's diagonal portion to be its local rows and the corresponding columns (a square submatrix); @@ -3146,7 +3156,8 @@ PetscErrorCode MatMPIBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, con In general, for PDE problems in which most nonzeros are near the diagonal, one expects `d_nz` >> `o_nz`. -.seealso: `Mat`, `MatCreate()`, `MatCreateSeqBAIJ()`, `MatSetValues()`, `MatMPIBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocationCSR()` +.seealso: `Mat`, `MatCreate()`, `MatCreateSeqBAIJ()`, `MatSetValues()`, `MatMPIBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocationCSR()`, + `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout` @*/ PetscErrorCode MatCreateBAIJ(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A) { diff --git a/src/mat/impls/baij/seq/baij.c b/src/mat/impls/baij/seq/baij.c index 3033b457a13..be8d283bd7a 100644 --- a/src/mat/impls/baij/seq/baij.c +++ b/src/mat/impls/baij/seq/baij.c @@ -2549,7 +2549,6 @@ PetscErrorCode MatSetValues_SeqBAIJ(Mat A, PetscInt m, const PetscInt im[], Pets ap[bs2 * i + bs * cidx + ridx] = value; } a->nz++; - A->nonzerostate++; noinsert1:; low = i; } @@ -2645,7 +2644,6 @@ static PetscErrorCode MatGetRowMaxAbs_SeqBAIJ(Mat A, Vec v, PetscInt idx[]) PetscInt ncols, brow, krow, kcol; PetscFunctionBegin; - /* why is this not a macro???????????????????????????????????????????????????????????????? */ PetscCheck(!A->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); bs = A->rmap->bs; aa = a->a; @@ -3162,7 +3160,8 @@ static struct _MatOps MatOps_Values = {MatSetValues_SeqBAIJ, NULL, /*150*/ NULL, MatEliminateZeros_SeqBAIJ, - MatGetRowSumAbs_SeqBAIJ}; + MatGetRowSumAbs_SeqBAIJ, + NULL}; static PetscErrorCode MatStoreValues_SeqBAIJ(Mat mat) { @@ -3494,7 +3493,7 @@ static PetscErrorCode MatSeqBAIJSetPreallocationCSR_SeqBAIJ(Mat B, PetscInt bs, .seealso: [](ch_matrices), `Mat`, `MATSEQBAIJ`, `MatSeqBAIJRestoreArray()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()` @*/ -PetscErrorCode MatSeqBAIJGetArray(Mat A, PetscScalar **array) +PetscErrorCode MatSeqBAIJGetArray(Mat A, PetscScalar *array[]) { PetscFunctionBegin; PetscUseMethod(A, "MatSeqBAIJGetArray_C", (Mat, PetscScalar **), (A, array)); @@ -3514,7 +3513,7 @@ PetscErrorCode MatSeqBAIJGetArray(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MatSeqBAIJGetArray()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()` @*/ -PetscErrorCode MatSeqBAIJRestoreArray(Mat A, PetscScalar **array) +PetscErrorCode MatSeqBAIJRestoreArray(Mat A, PetscScalar *array[]) { PetscFunctionBegin; PetscUseMethod(A, "MatSeqBAIJRestoreArray_C", (Mat, PetscScalar **), (A, array)); @@ -3803,7 +3802,7 @@ PetscErrorCode MatLoad_SeqBAIJ(Mat mat, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateSeqBAIJ - Creates a sparse matrix in `MATSEQAIJ` (block compressed row) format. For good matrix assembly performance the user should preallocate the matrix storage by setting the parameter `nz` @@ -3862,7 +3861,7 @@ PetscErrorCode MatCreateSeqBAIJ(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqBAIJSetPreallocation - Sets the block size and expected nonzeros per row in the matrix. For good matrix assembly performance the user should preallocate the matrix storage by setting the parameter `nz` diff --git a/src/mat/impls/baij/seq/baijmkl/baijmkl.c b/src/mat/impls/baij/seq/baijmkl/baijmkl.c index bc4d7ae46f1..1bfdffca54a 100644 --- a/src/mat/impls/baij/seq/baijmkl/baijmkl.c +++ b/src/mat/impls/baij/seq/baijmkl/baijmkl.c @@ -6,7 +6,7 @@ */ #include <../src/mat/impls/baij/seq/baij.h> -#include <../src/mat/impls/baij/seq/baijmkl/baijmkl.h> +#include <../src/mat/impls/baij/seq/baijmkl/baijmkl.h> /*I "petscmat.h" I*/ #if defined(PETSC_HAVE_MKL_INTEL_ILP64) #define MKL_ILP64 #endif @@ -431,7 +431,7 @@ static PetscErrorCode MatAssemblyEnd_SeqBAIJMKL(Mat A, MatAssemblyType mode) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateSeqBAIJMKL - Creates a sparse matrix of type `MATSEQBAIJMKL`. This type inherits from `MATSEQBAIJ` and is largely identical, but uses sparse BLAS routines from Intel MKL whenever possible. diff --git a/src/mat/impls/baij/seq/baijmkl/ftn-custom/makefile b/src/mat/impls/baij/seq/baijmkl/ftn-custom/makefile deleted file mode 100644 index e6402f3060a..00000000000 --- a/src/mat/impls/baij/seq/baijmkl/ftn-custom/makefile +++ /dev/null @@ -1,7 +0,0 @@ --include ../../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' -#requirespackage 'PETSC_HAVE_MKL_SPARSE_OPTIMIZE' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/baij/seq/baijmkl/ftn-custom/zbaijmklf.c b/src/mat/impls/baij/seq/baijmkl/ftn-custom/zbaijmklf.c deleted file mode 100644 index 8cdbe23f587..00000000000 --- a/src/mat/impls/baij/seq/baijmkl/ftn-custom/zbaijmklf.c +++ /dev/null @@ -1,14 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreateseqbaijmkl_ MATCREATESEQBAIJMKL -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreateseqbaijmkl_ matcreateseqbaijmkl -#endif - -PETSC_EXTERN void matcreateseqbaijmkl_(MPI_Comm *comm, PetscInt *bs, PetscInt *m, PetscInt *n, PetscInt *nz, PetscInt *nnz, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(nnz); - *ierr = MatCreateSeqBAIJMKL(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *m, *n, *nz, nnz, newmat); -} diff --git a/src/mat/impls/baij/seq/dgedi.c b/src/mat/impls/baij/seq/dgedi.c index 21ae967736f..c4dbb5d122e 100644 --- a/src/mat/impls/baij/seq/dgedi.c +++ b/src/mat/impls/baij/seq/dgedi.c @@ -12,10 +12,9 @@ */ -#include #include -PETSC_INTERN PetscErrorCode PetscLINPACKgedi(MatScalar *a, PetscInt n, PetscInt *ipvt, MatScalar *work) +PetscErrorCode PetscLINPACKgedi(MatScalar *a, PetscInt n, PetscInt *ipvt, MatScalar *work) { PetscInt i__2, kb, kp1, nm1, i, j, k, l, ll, kn, knp1, jn1; MatScalar *aa, *ax, *ay, tmp; diff --git a/src/mat/impls/baij/seq/dgefa.c b/src/mat/impls/baij/seq/dgefa.c index a2f2d8ada7d..b6a8a6cd5cf 100644 --- a/src/mat/impls/baij/seq/dgefa.c +++ b/src/mat/impls/baij/seq/dgefa.c @@ -10,9 +10,9 @@ src/mat/impls/baij/seq */ -#include +#include -PETSC_INTERN PetscErrorCode PetscLINPACKgefa(MatScalar *a, PetscInt n, PetscInt *ipvt, PetscBool allowzeropivot, PetscBool *zeropivotdetected) +PetscErrorCode PetscLINPACKgefa(MatScalar *a, PetscInt n, PetscInt *ipvt, PetscBool allowzeropivot, PetscBool *zeropivotdetected) { PetscInt i__2, i__3, kp1, nm1, j, k, l, ll, kn, knp1, jn1; MatScalar t, *ax, *ay, *aa; diff --git a/src/mat/impls/baij/seq/ftn-custom/makefile b/src/mat/impls/baij/seq/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/mat/impls/baij/seq/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/baij/seq/ftn-custom/zbaijf.c b/src/mat/impls/baij/seq/ftn-custom/zbaijf.c deleted file mode 100644 index 1ad37fdbf2f..00000000000 --- a/src/mat/impls/baij/seq/ftn-custom/zbaijf.c +++ /dev/null @@ -1,22 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreateseqbaij_ MATCREATESEQBAIJ - #define matseqbaijsetpreallocation_ MATSEQBAIJSETPREALLOCATION -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreateseqbaij_ matcreateseqbaij - #define matseqbaijsetpreallocation_ matseqbaijsetpreallocation -#endif - -PETSC_EXTERN void matcreateseqbaij_(MPI_Comm *comm, PetscInt *bs, PetscInt *m, PetscInt *n, PetscInt *nz, PetscInt *nnz, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(nnz); - *ierr = MatCreateSeqBAIJ(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *m, *n, *nz, nnz, newmat); -} - -PETSC_EXTERN void matseqbaijsetpreallocation_(Mat *mat, PetscInt *bs, PetscInt *nz, PetscInt *nnz, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(nnz); - *ierr = MatSeqBAIJSetPreallocation(*mat, *bs, *nz, nnz); -} diff --git a/src/mat/impls/baij/seq/ftn-kernels/fsolvebaij.F90 b/src/mat/impls/baij/seq/ftn-kernels/fsolvebaij.F90 index f05b1b878fb..04c511e5824 100644 --- a/src/mat/impls/baij/seq/ftn-kernels/fsolvebaij.F90 +++ b/src/mat/impls/baij/seq/ftn-kernels/fsolvebaij.F90 @@ -94,7 +94,6 @@ subroutine FortranSolveBAIJ4Unroll(n,x,ai,aj,adiag,a,b) x(idx+3) = a(ax+3)*s1+a(ax+7)*s2+a(ax+11)*s3+a(ax+15)*s4 idx = idx - 4 40 continue - return end ! version that does not call BLAS 2 operation for each row block @@ -210,5 +209,4 @@ subroutine FortranSolveBAIJ4(n,x,ai,aj,adiag,a,b,w) idx = idx - 4 40 continue - return end diff --git a/src/mat/impls/blockmat/seq/blockmat.c b/src/mat/impls/blockmat/seq/blockmat.c index 962bfd16df5..6d285fd209f 100644 --- a/src/mat/impls/blockmat/seq/blockmat.c +++ b/src/mat/impls/blockmat/seq/blockmat.c @@ -262,7 +262,6 @@ static PetscErrorCode MatSetValues_BlockMat(Mat A, PetscInt m, const PetscInt im if (N >= i) ap[i] = NULL; rp[i] = bcol; a->nz++; - A->nonzerostate++; noinsert1:; if (!*(ap + i)) PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, bs, bs, 0, NULL, ap + i)); PetscCall(MatSetValues(ap[i], 1, &ridx, 1, &cidx, &value, is)); @@ -821,6 +820,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_BlockMat, NULL, /*150*/ NULL, NULL, + NULL, NULL}; /*@C diff --git a/src/mat/impls/dense/mpi/cupm/matmpidensecupm.hpp b/src/mat/impls/dense/mpi/cupm/matmpidensecupm.hpp index 2f238e3b079..38233e771c0 100644 --- a/src/mat/impls/dense/mpi/cupm/matmpidensecupm.hpp +++ b/src/mat/impls/dense/mpi/cupm/matmpidensecupm.hpp @@ -19,7 +19,7 @@ namespace impl { template -class MatDense_MPI_CUPM : MatDense_CUPM> { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL MatDense_MPI_CUPM : MatDense_CUPM> { public: MATDENSECUPM_HEADER(T, MatDense_MPI_CUPM); diff --git a/src/mat/impls/dense/mpi/ftn-custom/makefile b/src/mat/impls/dense/mpi/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/mat/impls/dense/mpi/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/dense/mpi/ftn-custom/zmpidensef.c b/src/mat/impls/dense/mpi/ftn-custom/zmpidensef.c deleted file mode 100644 index 65cbf6f9b2a..00000000000 --- a/src/mat/impls/dense/mpi/ftn-custom/zmpidensef.c +++ /dev/null @@ -1,22 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreatedense_ MATCREATEDENSE - #define matmpidensesetpreallocation_ MATMPIDENSESETPREALLOCATION -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreatedense_ matcreatedense - #define matmpidensesetpreallocation_ matmpidensesetpreallocation -#endif - -PETSC_EXTERN void matcreatedense_(MPI_Comm *comm, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N, PetscScalar *data, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLSCALAR(data); - *ierr = MatCreateDense(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *m, *n, *M, *N, data, newmat); -} - -PETSC_EXTERN void matmpidensesetpreallocation_(Mat *mat, PetscScalar *data, PetscErrorCode *ierr) -{ - CHKFORTRANNULLSCALAR(data); - *ierr = MatMPIDenseSetPreallocation(*mat, data); -} diff --git a/src/mat/impls/dense/mpi/mpidense.c b/src/mat/impls/dense/mpi/mpidense.c index eb3abe0be17..ecc233fba1d 100644 --- a/src/mat/impls/dense/mpi/mpidense.c +++ b/src/mat/impls/dense/mpi/mpidense.c @@ -463,6 +463,11 @@ static PetscErrorCode MatZeroRows_MPIDense(Mat A, PetscInt n, const PetscInt row PetscFunctionReturn(PETSC_SUCCESS); } +PETSC_INTERN PetscErrorCode MatMult_SeqDense(Mat, Vec, Vec); +PETSC_INTERN PetscErrorCode MatMultAdd_SeqDense(Mat, Vec, Vec, Vec); +PETSC_INTERN PetscErrorCode MatMultTranspose_SeqDense(Mat, Vec, Vec); +PETSC_INTERN PetscErrorCode MatMultTransposeAdd_SeqDense(Mat, Vec, Vec, Vec); + static PetscErrorCode MatMult_MPIDense(Mat mat, Vec xx, Vec yy) { Mat_MPIDense *mdn = (Mat_MPIDense *)mat->data; @@ -482,6 +487,25 @@ static PetscErrorCode MatMult_MPIDense(Mat mat, Vec xx, Vec yy) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode MatMultAddColumnRange_MPIDense(Mat mat, Vec xx, Vec yy, Vec zz, PetscInt c_start, PetscInt c_end) +{ + Mat_MPIDense *mdn = (Mat_MPIDense *)mat->data; + const PetscScalar *ax; + PetscScalar *ay; + PetscMemType axmtype, aymtype; + + PetscFunctionBegin; + if (!mdn->Mvctx) PetscCall(MatSetUpMultiply_MPIDense(mat)); + PetscCall(VecGetArrayReadAndMemType(xx, &ax, &axmtype)); + PetscCall(VecGetArrayAndMemType(mdn->lvec, &ay, &aymtype)); + PetscCall(PetscSFBcastWithMemTypeBegin(mdn->Mvctx, MPIU_SCALAR, axmtype, ax, aymtype, ay, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(mdn->Mvctx, MPIU_SCALAR, ax, ay, MPI_REPLACE)); + PetscCall(VecRestoreArrayAndMemType(mdn->lvec, &ay)); + PetscCall(VecRestoreArrayReadAndMemType(xx, &ax)); + PetscUseMethod(mdn->A, "MatMultAddColumnRange_C", (Mat, Vec, Vec, Vec, PetscInt, PetscInt), (mdn->A, mdn->lvec, yy, zz, c_start, c_end)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode MatMultAdd_MPIDense(Mat mat, Vec xx, Vec yy, Vec zz) { Mat_MPIDense *mdn = (Mat_MPIDense *)mat->data; @@ -501,6 +525,26 @@ static PetscErrorCode MatMultAdd_MPIDense(Mat mat, Vec xx, Vec yy, Vec zz) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode MatMultHermitianTransposeColumnRange_MPIDense(Mat A, Vec xx, Vec yy, PetscInt c_start, PetscInt c_end) +{ + Mat_MPIDense *a = (Mat_MPIDense *)A->data; + const PetscScalar *ax; + PetscScalar *ay; + PetscMemType axmtype, aymtype; + + PetscFunctionBegin; + if (!a->Mvctx) PetscCall(MatSetUpMultiply_MPIDense(A)); + PetscCall(VecSet(yy, 0.0)); + PetscUseMethod(a->A, "MatMultHermitianTransposeColumnRange_C", (Mat, Vec, Vec, PetscInt, PetscInt), (a->A, xx, a->lvec, c_start, c_end)); + PetscCall(VecGetArrayReadAndMemType(a->lvec, &ax, &axmtype)); + PetscCall(VecGetArrayAndMemType(yy, &ay, &aymtype)); + PetscCall(PetscSFReduceWithMemTypeBegin(a->Mvctx, MPIU_SCALAR, axmtype, ax, aymtype, ay, MPIU_SUM)); + PetscCall(PetscSFReduceEnd(a->Mvctx, MPIU_SCALAR, ax, ay, MPIU_SUM)); + PetscCall(VecRestoreArrayReadAndMemType(a->lvec, &ax)); + PetscCall(VecRestoreArrayAndMemType(yy, &ay)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode MatMultTransposeKernel_MPIDense(Mat A, Vec xx, Vec yy, PetscBool herm) { Mat_MPIDense *a = (Mat_MPIDense *)A->data; @@ -522,6 +566,28 @@ static PetscErrorCode MatMultTransposeKernel_MPIDense(Mat A, Vec xx, Vec yy, Pet PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode MatMultHermitianTransposeAddColumnRange_MPIDense(Mat A, Vec xx, Vec yy, Vec zz, PetscInt c_start, PetscInt c_end) +{ + Mat_MPIDense *a = (Mat_MPIDense *)A->data; + const PetscScalar *ax; + PetscScalar *ay; + PetscMemType axmtype, aymtype; + + PetscFunctionBegin; + if (!a->Mvctx) PetscCall(MatSetUpMultiply_MPIDense(A)); + PetscCall(VecCopy(yy, zz)); + PetscMPIInt rank; + PetscCallMPI(MPI_Comm_rank(MPI_COMM_WORLD, &rank)); + PetscUseMethod(a->A, "MatMultHermitianTransposeColumnRange_C", (Mat, Vec, Vec, PetscInt, PetscInt), (a->A, xx, a->lvec, c_start, c_end)); + PetscCall(VecGetArrayReadAndMemType(a->lvec, &ax, &axmtype)); + PetscCall(VecGetArrayAndMemType(zz, &ay, &aymtype)); + PetscCall(PetscSFReduceWithMemTypeBegin(a->Mvctx, MPIU_SCALAR, axmtype, ax, aymtype, ay, MPIU_SUM)); + PetscCall(PetscSFReduceEnd(a->Mvctx, MPIU_SCALAR, ax, ay, MPIU_SUM)); + PetscCall(VecRestoreArrayReadAndMemType(a->lvec, &ax)); + PetscCall(VecRestoreArrayAndMemType(zz, &ay)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode MatMultTransposeAddKernel_MPIDense(Mat A, Vec xx, Vec yy, Vec zz, PetscBool herm) { Mat_MPIDense *a = (Mat_MPIDense *)A->data; @@ -589,7 +655,7 @@ PetscErrorCode MatGetDiagonal_MPIDense(Mat A, Vec v) PetscCall(MatDenseGetLDA(a->A, &lda)); for (i = 0; i < len; i++) x[i] = av[radd + i * lda + i]; PetscCall(MatDenseRestoreArrayRead(a->A, &av)); - PetscCall(PetscArrayzero(x + i, nl - i)); + if (nl - i > 0) PetscCall(PetscArrayzero(x + i, nl - i)); PetscCall(VecRestoreArray(v, &x)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -684,6 +750,9 @@ static PetscErrorCode MatDestroy_MPIDense(Mat mat) PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatDenseRestoreColumnVecWrite_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatDenseGetSubMatrix_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatDenseRestoreSubMatrix_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMultAddColumnRange_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMultHermitianTransposeColumnRange_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMultHermitianTransposeAddColumnRange_C", NULL)); PetscCall(PetscObjectCompose((PetscObject)mat, "DiagonalBlock", NULL)); PetscFunctionReturn(PETSC_SUCCESS); @@ -1287,6 +1356,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_MPIDense, NULL, /*150*/ NULL, NULL, + NULL, NULL}; static PetscErrorCode MatMPIDenseSetPreallocation_MPIDense(Mat mat, PetscScalar *data) @@ -1695,6 +1765,9 @@ PetscErrorCode MatCreate_MPIDense(Mat mat) #endif PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatDenseGetColumn_C", MatDenseGetColumn_MPIDense)); PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatDenseRestoreColumn_C", MatDenseRestoreColumn_MPIDense)); + PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMultAddColumnRange_C", MatMultAddColumnRange_MPIDense)); + PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMultHermitianTransposeColumnRange_C", MatMultHermitianTransposeColumnRange_MPIDense)); + PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMultHermitianTransposeAddColumnRange_C", MatMultHermitianTransposeAddColumnRange_MPIDense)); PetscCall(PetscObjectChangeTypeName((PetscObject)mat, MATMPIDENSE)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1713,7 +1786,7 @@ PetscErrorCode MatCreate_MPIDense(Mat mat) .seealso: [](ch_matrices), `Mat`, `MATSEQDENSE`, `MATMPIDENSE`, `MATDENSECUDA`, `MATDENSEHIP` M*/ -/*@C +/*@ MatMPIDenseSetPreallocation - Sets the array used to store the matrix entries Collective @@ -1721,7 +1794,7 @@ M*/ Input Parameters: + B - the matrix - data - optional location of matrix data. Set to `NULL` for PETSc - to control all matrix memory allocation. + to control all matrix memory allocation. Level: intermediate @@ -1830,7 +1903,7 @@ PetscErrorCode MatDenseReplaceArray(Mat mat, const PetscScalar *array) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateDense - Creates a matrix in `MATDENSE` format. Collective diff --git a/src/mat/impls/dense/seq/cupm/matseqdensecupm.hpp b/src/mat/impls/dense/seq/cupm/matseqdensecupm.hpp index b88f9858109..a61aedf481e 100644 --- a/src/mat/impls/dense/seq/cupm/matseqdensecupm.hpp +++ b/src/mat/impls/dense/seq/cupm/matseqdensecupm.hpp @@ -27,7 +27,7 @@ namespace impl { template -class MatDense_Seq_CUPM : MatDense_CUPM> { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL MatDense_Seq_CUPM : MatDense_CUPM> { public: MATDENSECUPM_HEADER(T, MatDense_Seq_CUPM); @@ -65,6 +65,10 @@ class MatDense_Seq_CUPM : MatDense_CUPM> { template static PetscErrorCode MatMatSolve_Factored_Dispatch_(Mat, Mat, Mat) noexcept; template + static PetscErrorCode MatMultAddColumnRange_Dispatch_(Mat, Vec, Vec, Vec, PetscInt, PetscInt) noexcept; + template + static PetscErrorCode MatMultColumnRange_Dispatch_(Mat, Vec, Vec, PetscInt, PetscInt) noexcept; + template static PetscErrorCode MatMultAdd_Dispatch_(Mat, Vec, Vec, Vec) noexcept; template @@ -214,10 +218,10 @@ inline PetscErrorCode MatDense_Seq_CUPM::SetPreallocation_(Mat m, PetscDevice mcu->d_user_alloc = PETSC_TRUE; mcu->d_v = user_device_array; } else { - PetscInt size; + std::size_t size; mcu->d_user_alloc = PETSC_FALSE; - PetscCall(PetscIntMultError(lda, ncols, &size)); + size = lda * ncols; PetscCall(PetscCUPMMallocAsync(&mcu->d_v, size, stream)); PetscCall(PetscCUPMMemsetAsync(mcu->d_v, 0, size, stream)); } @@ -797,10 +801,11 @@ inline PetscErrorCode MatDense_Seq_CUPM::MatMatSolve_Factored_Dispatch_(Mat A template template -inline PetscErrorCode MatDense_Seq_CUPM::MatMultAdd_Dispatch_(Mat A, Vec xx, Vec yy, Vec zz) noexcept +inline PetscErrorCode MatDense_Seq_CUPM::MatMultAddColumnRange_Dispatch_(Mat A, Vec xx, Vec yy, Vec zz, PetscInt c_start, PetscInt c_end) noexcept { - const auto m = static_cast(A->rmap->n); - const auto n = static_cast(A->cmap->n); + const auto m = static_cast(A->rmap->n); + const auto n = static_cast(c_end - c_start); + const auto lda = static_cast(MatIMPLCast(A)->lda); cupmBlasHandle_t handle; PetscDeviceContext dctx; @@ -822,13 +827,31 @@ inline PetscErrorCode MatDense_Seq_CUPM::MatMultAdd_Dispatch_(Mat A, Vec xx, const auto dzz = VecSeq_CUPM::DeviceArrayReadWrite(dctx, zz); PetscCall(PetscLogGpuTimeBegin()); - PetscCallCUPMBLAS(cupmBlasXgemv(handle, op, m, n, &one, da.cupmdata(), static_cast(MatIMPLCast(A)->lda), dxx.cupmdata(), 1, (yy ? &one : &zero), dzz.cupmdata(), 1)); + PetscCallCUPMBLAS(cupmBlasXgemv(handle, op, m, n, &one, da.cupmdata() + c_start * lda, lda, dxx.cupmdata() + (transpose ? 0 : c_start), 1, (yy ? &one : &zero), dzz.cupmdata() + (transpose ? c_start : 0), 1)); PetscCall(PetscLogGpuTimeEnd()); } PetscCall(PetscLogGpuFlops(2.0 * m * n - (yy ? 0 : m))); PetscFunctionReturn(PETSC_SUCCESS); } +template +template +inline PetscErrorCode MatDense_Seq_CUPM::MatMultColumnRange_Dispatch_(Mat A, Vec xx, Vec yy, PetscInt c_start, PetscInt c_end) noexcept +{ + PetscFunctionBegin; + PetscCall(MatMultAddColumnRange_Dispatch_(A, xx, nullptr, yy, c_start, c_end)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +template +template +inline PetscErrorCode MatDense_Seq_CUPM::MatMultAdd_Dispatch_(Mat A, Vec xx, Vec yy, Vec zz) noexcept +{ + PetscFunctionBegin; + PetscCall(MatMultAddColumnRange_Dispatch_(A, xx, yy, zz, 0, A->cmap->n)); + PetscFunctionReturn(PETSC_SUCCESS); +} + // ========================================================================================== // MatDense_Seq_CUPM - Private API - Conversion Dispatch // ========================================================================================== @@ -1035,6 +1058,9 @@ inline PetscErrorCode MatDense_Seq_CUPM::BindToCPU(Mat A, PetscBool to_host) MatComposeOp_CUPM(to_host, pobj, "MatDenseGetSubMatrix_C", MatDenseGetSubMatrix_SeqDense, GetSubMatrix); MatComposeOp_CUPM(to_host, pobj, "MatDenseRestoreSubMatrix_C", MatDenseRestoreSubMatrix_SeqDense, RestoreSubMatrix); MatComposeOp_CUPM(to_host, pobj, "MatQRFactor_C", MatQRFactor_SeqDense, SolveQR::Factor); + MatComposeOp_CUPM(to_host, pobj, "MatMultAddColumnRange_C", MatMultAddColumnRange_SeqDense, MatMultAddColumnRange_Dispatch_); + MatComposeOp_CUPM(to_host, pobj, "MatMultHermitianTransposeColumnRange_C", MatMultHermitianTransposeColumnRange_SeqDense, MatMultColumnRange_Dispatch_); + MatComposeOp_CUPM(to_host, pobj, "MatMultHermitianTransposeAddColumnRange_C", MatMultHermitianTransposeAddColumnRange_SeqDense, MatMultAddColumnRange_Dispatch_); // always the same PetscCall(PetscObjectComposeFunction(pobj, "MatDenseSetLDA_C", MatDenseSetLDA_SeqDense)); diff --git a/src/mat/impls/dense/seq/dense.c b/src/mat/impls/dense/seq/dense.c index 5c28e42491d..ae0680b9dbd 100644 --- a/src/mat/impls/dense/seq/dense.c +++ b/src/mat/impls/dense/seq/dense.c @@ -1053,7 +1053,7 @@ static PetscErrorCode MatSOR_SeqDense(Mat A, Vec bb, PetscReal omega, MatSORType PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode MatMultKernel_SeqDense(Mat A, Vec xx, Vec yy, PetscBool trans, PetscBool herm) +static PetscErrorCode MatMultColumnRangeKernel_SeqDense(Mat A, Vec xx, Vec yy, PetscInt c_start, PetscInt c_end, PetscBool trans, PetscBool herm) { Mat_SeqDense *mat = (Mat_SeqDense *)A->data; PetscScalar *y, _DOne = 1.0, _DZero = 0.0; @@ -1062,10 +1062,10 @@ static PetscErrorCode MatMultKernel_SeqDense(Mat A, Vec xx, Vec yy, PetscBool tr PetscFunctionBegin; PetscCall(PetscBLASIntCast(A->rmap->n, &m)); - PetscCall(PetscBLASIntCast(A->cmap->n, &n)); + PetscCall(PetscBLASIntCast(c_end - c_start, &n)); PetscCall(VecGetArrayRead(xx, &x)); PetscCall(VecGetArrayWrite(yy, &y)); - if (!A->rmap->n || !A->cmap->n) { + if (!m || !n) { PetscBLASInt i; if (trans) for (i = 0; i < n; i++) y[i] = 0.0; @@ -1073,41 +1073,47 @@ static PetscErrorCode MatMultKernel_SeqDense(Mat A, Vec xx, Vec yy, PetscBool tr for (i = 0; i < m; i++) y[i] = 0.0; } else { if (trans) { - if (herm) PetscCallBLAS("BLASgemv", BLASgemv_("C", &m, &n, &_DOne, v, &mat->lda, x, &_One, &_DZero, y, &_One)); - else PetscCallBLAS("BLASgemv", BLASgemv_("T", &m, &n, &_DOne, v, &mat->lda, x, &_One, &_DZero, y, &_One)); + if (herm) PetscCallBLAS("BLASgemv", BLASgemv_("C", &m, &n, &_DOne, v + c_start * mat->lda, &mat->lda, x, &_One, &_DZero, y + c_start, &_One)); + else PetscCallBLAS("BLASgemv", BLASgemv_("T", &m, &n, &_DOne, v + c_start * mat->lda, &mat->lda, x, &_One, &_DZero, y + c_start, &_One)); } else { - PetscCallBLAS("BLASgemv", BLASgemv_("N", &m, &n, &_DOne, v, &mat->lda, x, &_One, &_DZero, y, &_One)); + PetscCallBLAS("BLASgemv", BLASgemv_("N", &m, &n, &_DOne, v + c_start * mat->lda, &mat->lda, x + c_start, &_One, &_DZero, y, &_One)); } - if (trans) PetscCall(PetscLogFlops(2.0 * A->rmap->n * A->cmap->n - A->cmap->n)); - else PetscCall(PetscLogFlops(2.0 * A->rmap->n * A->cmap->n - A->rmap->n)); + PetscCall(PetscLogFlops(2.0 * m * n - n)); } PetscCall(VecRestoreArrayRead(xx, &x)); PetscCall(VecRestoreArrayWrite(yy, &y)); PetscFunctionReturn(PETSC_SUCCESS); } +PetscErrorCode MatMultHermitianTransposeColumnRange_SeqDense(Mat A, Vec xx, Vec yy, PetscInt c_start, PetscInt c_end) +{ + PetscFunctionBegin; + PetscCall(MatMultColumnRangeKernel_SeqDense(A, xx, yy, c_start, c_end, PETSC_TRUE, PETSC_TRUE)); + PetscFunctionReturn(PETSC_SUCCESS); +} + PetscErrorCode MatMult_SeqDense(Mat A, Vec xx, Vec yy) { PetscFunctionBegin; - PetscCall(MatMultKernel_SeqDense(A, xx, yy, PETSC_FALSE, PETSC_FALSE)); + PetscCall(MatMultColumnRangeKernel_SeqDense(A, xx, yy, 0, A->cmap->n, PETSC_FALSE, PETSC_FALSE)); PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode MatMultTranspose_SeqDense(Mat A, Vec xx, Vec yy) { PetscFunctionBegin; - PetscCall(MatMultKernel_SeqDense(A, xx, yy, PETSC_TRUE, PETSC_FALSE)); + PetscCall(MatMultColumnRangeKernel_SeqDense(A, xx, yy, 0, A->cmap->n, PETSC_TRUE, PETSC_FALSE)); PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode MatMultHermitianTranspose_SeqDense(Mat A, Vec xx, Vec yy) { PetscFunctionBegin; - PetscCall(MatMultKernel_SeqDense(A, xx, yy, PETSC_TRUE, PETSC_TRUE)); + PetscCall(MatMultColumnRangeKernel_SeqDense(A, xx, yy, 0, A->cmap->n, PETSC_TRUE, PETSC_TRUE)); PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode MatMultAddKernel_SeqDense(Mat A, Vec xx, Vec zz, Vec yy, PetscBool trans, PetscBool herm) +static PetscErrorCode MatMultAddColumnRangeKernel_SeqDense(Mat A, Vec xx, Vec zz, Vec yy, PetscInt c_start, PetscInt c_end, PetscBool trans, PetscBool herm) { Mat_SeqDense *mat = (Mat_SeqDense *)A->data; const PetscScalar *v = mat->v, *x; @@ -1116,41 +1122,57 @@ static PetscErrorCode MatMultAddKernel_SeqDense(Mat A, Vec xx, Vec zz, Vec yy, P PetscFunctionBegin; PetscCall(PetscBLASIntCast(A->rmap->n, &m)); - PetscCall(PetscBLASIntCast(A->cmap->n, &n)); + PetscCall(PetscBLASIntCast(c_end - c_start, &n)); PetscCall(VecCopy(zz, yy)); - if (!A->rmap->n || !A->cmap->n) PetscFunctionReturn(PETSC_SUCCESS); + if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); PetscCall(VecGetArray(yy, &y)); PetscCall(VecGetArrayRead(xx, &x)); if (trans) { - if (herm) PetscCallBLAS("BLASgemv", BLASgemv_("C", &m, &n, &_DOne, v, &mat->lda, x, &_One, &_DOne, y, &_One)); - else PetscCallBLAS("BLASgemv", BLASgemv_("T", &m, &n, &_DOne, v, &mat->lda, x, &_One, &_DOne, y, &_One)); + if (herm) PetscCallBLAS("BLASgemv", BLASgemv_("C", &m, &n, &_DOne, v + c_start * mat->lda, &mat->lda, x, &_One, &_DOne, y + c_start, &_One)); + else PetscCallBLAS("BLASgemv", BLASgemv_("T", &m, &n, &_DOne, v + c_start * mat->lda, &mat->lda, x, &_One, &_DOne, y + c_start, &_One)); } else { - PetscCallBLAS("BLASgemv", BLASgemv_("N", &m, &n, &_DOne, v, &mat->lda, x, &_One, &_DOne, y, &_One)); + PetscCallBLAS("BLASgemv", BLASgemv_("N", &m, &n, &_DOne, v + c_start * mat->lda, &mat->lda, x + c_start, &_One, &_DOne, y, &_One)); } PetscCall(VecRestoreArrayRead(xx, &x)); PetscCall(VecRestoreArray(yy, &y)); - PetscCall(PetscLogFlops(2.0 * A->rmap->n * A->cmap->n)); + PetscCall(PetscLogFlops(2.0 * m * n)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode MatMultAddColumnRange_SeqDense(Mat A, Vec xx, Vec zz, Vec yy, PetscInt c_start, PetscInt c_end) +{ + PetscFunctionBegin; + PetscCall(MatMultAddColumnRangeKernel_SeqDense(A, xx, zz, yy, c_start, c_end, PETSC_FALSE, PETSC_FALSE)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode MatMultHermitianTransposeAddColumnRange_SeqDense(Mat A, Vec xx, Vec zz, Vec yy, PetscInt c_start, PetscInt c_end) +{ + PetscFunctionBegin; + PetscMPIInt rank; + PetscCallMPI(MPI_Comm_rank(MPI_COMM_WORLD, &rank)); + PetscCall(MatMultAddColumnRangeKernel_SeqDense(A, xx, zz, yy, c_start, c_end, PETSC_TRUE, PETSC_TRUE)); PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode MatMultAdd_SeqDense(Mat A, Vec xx, Vec zz, Vec yy) { PetscFunctionBegin; - PetscCall(MatMultAddKernel_SeqDense(A, xx, zz, yy, PETSC_FALSE, PETSC_FALSE)); + PetscCall(MatMultAddColumnRangeKernel_SeqDense(A, xx, zz, yy, 0, A->cmap->n, PETSC_FALSE, PETSC_FALSE)); PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode MatMultTransposeAdd_SeqDense(Mat A, Vec xx, Vec zz, Vec yy) { PetscFunctionBegin; - PetscCall(MatMultAddKernel_SeqDense(A, xx, zz, yy, PETSC_TRUE, PETSC_FALSE)); + PetscCall(MatMultAddColumnRangeKernel_SeqDense(A, xx, zz, yy, 0, A->cmap->n, PETSC_TRUE, PETSC_FALSE)); PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode MatMultHermitianTransposeAdd_SeqDense(Mat A, Vec xx, Vec zz, Vec yy) { PetscFunctionBegin; - PetscCall(MatMultAddKernel_SeqDense(A, xx, zz, yy, PETSC_TRUE, PETSC_TRUE)); + PetscCall(MatMultAddColumnRangeKernel_SeqDense(A, xx, zz, yy, 0, A->cmap->n, PETSC_TRUE, PETSC_TRUE)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1216,7 +1238,7 @@ static PetscErrorCode MatSetValues_SeqDense(Mat A, PetscInt m, const PetscInt in av[indexn[j] * mat->lda + indexm[i]] = v ? v[idx++] : (idx++, 0.0); } } - } else if (v) { + } else { for (j = 0; j < n; j++) { if (indexn[j] < 0) { idx += m; @@ -1229,7 +1251,7 @@ static PetscErrorCode MatSetValues_SeqDense(Mat A, PetscInt m, const PetscInt in continue; } PetscCheck(indexm[i] < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, indexm[i], A->rmap->n - 1); - av[indexn[j] * mat->lda + indexm[i]] += v[idx++]; + av[indexn[j] * mat->lda + indexm[i]] += v ? v[idx++] : (idx++, 0.0); } } } @@ -1250,7 +1272,7 @@ static PetscErrorCode MatSetValues_SeqDense(Mat A, PetscInt m, const PetscInt in av[indexn[j] * mat->lda + indexm[i]] = v ? v[idx++] : (idx++, 0.0); } } - } else if (v) { + } else { for (i = 0; i < m; i++) { if (indexm[i] < 0) { idx += n; @@ -1263,7 +1285,7 @@ static PetscErrorCode MatSetValues_SeqDense(Mat A, PetscInt m, const PetscInt in continue; } PetscCheck(indexn[j] < A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, indexn[j], A->cmap->n - 1); - av[indexn[j] * mat->lda + indexm[i]] += v[idx++]; + av[indexn[j] * mat->lda + indexm[i]] += v ? v[idx++] : (idx++, 0.0); } } } @@ -1754,6 +1776,9 @@ PetscErrorCode MatDestroy_SeqDense(Mat mat) PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatDenseRestoreColumnVecWrite_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatDenseGetSubMatrix_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatDenseRestoreSubMatrix_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMultAddColumnRange_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMultHermitianTransposeColumnRange_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMultHermitianTransposeAddColumnRange_C", NULL)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -2166,7 +2191,7 @@ PetscErrorCode MatDenseSetLDA(Mat A, PetscInt lda) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseRestoreArray()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()` @*/ -PetscErrorCode MatDenseGetArray(Mat A, PetscScalar **array) +PetscErrorCode MatDenseGetArray(Mat A, PetscScalar *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -2191,7 +2216,7 @@ PetscErrorCode MatDenseGetArray(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseGetArray()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()` @*/ -PetscErrorCode MatDenseRestoreArray(Mat A, PetscScalar **array) +PetscErrorCode MatDenseRestoreArray(Mat A, PetscScalar *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -2219,7 +2244,7 @@ PetscErrorCode MatDenseRestoreArray(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseRestoreArrayRead()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()` @*/ -PetscErrorCode MatDenseGetArrayRead(Mat A, const PetscScalar **array) +PetscErrorCode MatDenseGetArrayRead(Mat A, const PetscScalar *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -2241,7 +2266,7 @@ PetscErrorCode MatDenseGetArrayRead(Mat A, const PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseGetArrayRead()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()` @*/ -PetscErrorCode MatDenseRestoreArrayRead(Mat A, const PetscScalar **array) +PetscErrorCode MatDenseRestoreArrayRead(Mat A, const PetscScalar *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -2265,7 +2290,7 @@ PetscErrorCode MatDenseRestoreArrayRead(Mat A, const PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseRestoreArrayWrite()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()` @*/ -PetscErrorCode MatDenseGetArrayWrite(Mat A, PetscScalar **array) +PetscErrorCode MatDenseGetArrayWrite(Mat A, PetscScalar *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -2287,7 +2312,7 @@ PetscErrorCode MatDenseGetArrayWrite(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseGetArrayWrite()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()` @*/ -PetscErrorCode MatDenseRestoreArrayWrite(Mat A, PetscScalar **array) +PetscErrorCode MatDenseRestoreArrayWrite(Mat A, PetscScalar *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -2321,7 +2346,7 @@ PetscErrorCode MatDenseRestoreArrayWrite(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseRestoreArrayAndMemType()`, `MatDenseGetArrayReadAndMemType()`, `MatDenseGetArrayWriteAndMemType()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()`, `MatSeqAIJGetCSRAndMemType()` @*/ -PetscErrorCode MatDenseGetArrayAndMemType(Mat A, PetscScalar **array, PetscMemType *mtype) +PetscErrorCode MatDenseGetArrayAndMemType(Mat A, PetscScalar *array[], PetscMemType *mtype) { PetscBool isMPI; @@ -2360,7 +2385,7 @@ PetscErrorCode MatDenseGetArrayAndMemType(Mat A, PetscScalar **array, PetscMemTy .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseGetArrayAndMemType()`, `MatDenseGetArray()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()` @*/ -PetscErrorCode MatDenseRestoreArrayAndMemType(Mat A, PetscScalar **array) +PetscErrorCode MatDenseRestoreArrayAndMemType(Mat A, PetscScalar *array[]) { PetscBool isMPI; @@ -2406,7 +2431,7 @@ PetscErrorCode MatDenseRestoreArrayAndMemType(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseRestoreArrayReadAndMemType()`, `MatDenseGetArrayWriteAndMemType()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()`, `MatSeqAIJGetCSRAndMemType()` @*/ -PetscErrorCode MatDenseGetArrayReadAndMemType(Mat A, const PetscScalar **array, PetscMemType *mtype) +PetscErrorCode MatDenseGetArrayReadAndMemType(Mat A, const PetscScalar *array[], PetscMemType *mtype) { PetscBool isMPI; @@ -2444,7 +2469,7 @@ PetscErrorCode MatDenseGetArrayReadAndMemType(Mat A, const PetscScalar **array, .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseGetArrayReadAndMemType()`, `MatDenseGetArray()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()` @*/ -PetscErrorCode MatDenseRestoreArrayReadAndMemType(Mat A, const PetscScalar **array) +PetscErrorCode MatDenseRestoreArrayReadAndMemType(Mat A, const PetscScalar *array[]) { PetscBool isMPI; @@ -2489,7 +2514,7 @@ PetscErrorCode MatDenseRestoreArrayReadAndMemType(Mat A, const PetscScalar **arr .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseRestoreArrayWriteAndMemType()`, `MatDenseGetArrayReadAndMemType()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()`, `MatSeqAIJGetCSRAndMemType()` @*/ -PetscErrorCode MatDenseGetArrayWriteAndMemType(Mat A, PetscScalar **array, PetscMemType *mtype) +PetscErrorCode MatDenseGetArrayWriteAndMemType(Mat A, PetscScalar *array[], PetscMemType *mtype) { PetscBool isMPI; @@ -2527,7 +2552,7 @@ PetscErrorCode MatDenseGetArrayWriteAndMemType(Mat A, PetscScalar **array, Petsc .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseGetArrayWriteAndMemType()`, `MatDenseGetArray()`, `MatDenseGetArrayRead()`, `MatDenseRestoreArrayRead()`, `MatDenseGetArrayWrite()`, `MatDenseRestoreArrayWrite()` @*/ -PetscErrorCode MatDenseRestoreArrayWriteAndMemType(Mat A, PetscScalar **array) +PetscErrorCode MatDenseRestoreArrayWriteAndMemType(Mat A, PetscScalar *array[]) { PetscBool isMPI; @@ -3243,9 +3268,10 @@ static struct _MatOps MatOps_Values = {MatSetValues_SeqDense, NULL, /*150*/ NULL, NULL, + NULL, NULL}; -/*@C +/*@ MatCreateSeqDense - Creates a `MATSEQDENSE` that is stored in column major order (the usual Fortran format). @@ -3256,7 +3282,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_SeqDense, . m - number of rows . n - number of columns - data - optional location of matrix data in column major order. Use `NULL` for PETSc - to control all matrix memory allocation. + to control all matrix memory allocation. Output Parameter: . A - the matrix @@ -3273,7 +3299,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_SeqDense, .seealso: [](ch_matrices), `Mat`, `MATSEQDENSE`, `MatCreate()`, `MatCreateDense()`, `MatSetValues()` @*/ -PetscErrorCode MatCreateSeqDense(MPI_Comm comm, PetscInt m, PetscInt n, PetscScalar *data, Mat *A) +PetscErrorCode MatCreateSeqDense(MPI_Comm comm, PetscInt m, PetscInt n, PetscScalar data[], Mat *A) { PetscFunctionBegin; PetscCall(MatCreate(comm, A)); @@ -3283,7 +3309,7 @@ PetscErrorCode MatCreateSeqDense(MPI_Comm comm, PetscInt m, PetscInt n, PetscSca PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqDenseSetPreallocation - Sets the array used for storing the matrix elements of a `MATSEQDENSE` matrix Collective @@ -3640,6 +3666,9 @@ PetscErrorCode MatCreate_SeqDense(Mat B) PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatDenseRestoreColumnVecWrite_C", MatDenseRestoreColumnVecWrite_SeqDense)); PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatDenseGetSubMatrix_C", MatDenseGetSubMatrix_SeqDense)); PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatDenseRestoreSubMatrix_C", MatDenseRestoreSubMatrix_SeqDense)); + PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMultAddColumnRange_C", MatMultAddColumnRange_SeqDense)); + PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMultHermitianTransposeColumnRange_C", MatMultHermitianTransposeColumnRange_SeqDense)); + PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMultHermitianTransposeAddColumnRange_C", MatMultHermitianTransposeAddColumnRange_SeqDense)); PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATSEQDENSE)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -3663,7 +3692,7 @@ PetscErrorCode MatCreate_SeqDense(Mat B) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseRestoreColumn()`, `MatDenseGetColumnVec()` @*/ -PetscErrorCode MatDenseGetColumn(Mat A, PetscInt col, PetscScalar **vals) +PetscErrorCode MatDenseGetColumn(Mat A, PetscInt col, PetscScalar *vals[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -3686,7 +3715,7 @@ PetscErrorCode MatDenseGetColumn(Mat A, PetscInt col, PetscScalar **vals) .seealso: [](ch_matrices), `Mat`, `MATDENSE`, `MatDenseGetColumn()` @*/ -PetscErrorCode MatDenseRestoreColumn(Mat A, PetscScalar **vals) +PetscErrorCode MatDenseRestoreColumn(Mat A, PetscScalar *vals[]) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); diff --git a/src/mat/impls/dense/seq/dense.h b/src/mat/impls/dense/seq/dense.h index 62e8f90ef0b..81bc2bb2b5c 100644 --- a/src/mat/impls/dense/seq/dense.h +++ b/src/mat/impls/dense/seq/dense.h @@ -96,6 +96,10 @@ PETSC_INTERN PetscErrorCode MatSetUp_SeqDense(Mat); PETSC_INTERN PetscErrorCode MatSetRandom_SeqDense(Mat, PetscRandom); PETSC_INTERN PetscErrorCode MatGetDiagonal_SeqDense(Mat, Vec); +PETSC_INTERN PetscErrorCode MatMultAddColumnRange_SeqDense(Mat, Vec, Vec, Vec, PetscInt, PetscInt); +PETSC_INTERN PetscErrorCode MatMultHermitianTransposeColumnRange_SeqDense(Mat, Vec, Vec, PetscInt, PetscInt); +PETSC_INTERN PetscErrorCode MatMultHermitianTransposeAddColumnRange_SeqDense(Mat, Vec, Vec, Vec, PetscInt, PetscInt); + #if defined(PETSC_HAVE_CUDA) PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat); PETSC_INTERN PetscErrorCode MatMatMultNumeric_SeqDenseCUDA_SeqDenseCUDA_Internal(Mat, Mat, Mat, PetscBool, PetscBool); diff --git a/src/mat/impls/dense/seq/ftn-custom/makefile b/src/mat/impls/dense/seq/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/mat/impls/dense/seq/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/dense/seq/ftn-custom/zdensef.c b/src/mat/impls/dense/seq/ftn-custom/zdensef.c deleted file mode 100644 index ad3f88a041f..00000000000 --- a/src/mat/impls/dense/seq/ftn-custom/zdensef.c +++ /dev/null @@ -1,22 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreateseqdense_ MATCREATESEQDENSE - #define matseqdensesetpreallocation_ MATSEQDENSESETPREALLOCATION -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreateseqdense_ matcreateseqdense - #define matseqdensesetpreallocation_ matseqdensesetpreallocation -#endif - -PETSC_EXTERN void matcreateseqdense_(MPI_Comm *comm, PetscInt *m, PetscInt *n, PetscScalar *data, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLSCALAR(data); - *ierr = MatCreateSeqDense(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *m, *n, data, newmat); -} - -PETSC_EXTERN void matseqdensesetpreallocation_(Mat *mat, PetscScalar *data, PetscErrorCode *ierr) -{ - CHKFORTRANNULLSCALAR(data); - *ierr = MatSeqDenseSetPreallocation(*mat, data); -} diff --git a/src/mat/impls/diagonal/diagonal.c b/src/mat/impls/diagonal/diagonal.c index 524c83dc63d..5dfebb30bb5 100644 --- a/src/mat/impls/diagonal/diagonal.c +++ b/src/mat/impls/diagonal/diagonal.c @@ -307,6 +307,21 @@ static PetscErrorCode MatDiagonalRestoreInverseDiagonal_Diagonal(Mat A, Vec *inv PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode MatPermute_Diagonal(Mat A, IS rowp, IS colp, Mat *B) +{ + Mat_Diagonal *ctx = (Mat_Diagonal *)A->data; + Vec v; + + PetscFunctionBegin; + PetscCheck(rowp == colp, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_INCOMP, "Row permutation and column permutation must be the same"); + PetscCall(VecDuplicate(ctx->diag, &v)); + PetscCall(VecCopy(ctx->diag, v)); + PetscCall(VecPermute(v, rowp, PETSC_FALSE)); + PetscCall(MatCreateDiagonal(v, B)); + PetscCall(VecDestroy(&v)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode MatDestroy_Diagonal(Mat mat) { Mat_Diagonal *ctx = (Mat_Diagonal *)mat->data; @@ -654,6 +669,7 @@ PETSC_INTERN PetscErrorCode MatCreate_Diagonal(Mat A) A->ops->getinfo = MatGetInfo_Diagonal; A->ops->axpy = MatAXPY_Diagonal; A->ops->setup = MatSetUp_Diagonal; + A->ops->permute = MatPermute_Diagonal; PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatDiagonalGetDiagonal_C", MatDiagonalGetDiagonal_Diagonal)); PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatDiagonalRestoreDiagonal_C", MatDiagonalRestoreDiagonal_Diagonal)); diff --git a/src/mat/impls/elemental/matelem.cxx b/src/mat/impls/elemental/matelem.cxx index f3527c1f732..fad3a01d773 100644 --- a/src/mat/impls/elemental/matelem.cxx +++ b/src/mat/impls/elemental/matelem.cxx @@ -775,7 +775,7 @@ static PetscErrorCode MatGetFactor_elemental_elemental(Mat A, MatFactorType ftyp PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Elemental(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERELEMENTAL, MATELEMENTAL, MAT_FACTOR_LU, MatGetFactor_elemental_elemental)); @@ -1329,6 +1329,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_Elemental, nullptr, /*150*/ nullptr, nullptr, + nullptr, nullptr}; /*MC diff --git a/src/mat/impls/fft/fft.c b/src/mat/impls/fft/fft.c index 55b88fd89a5..7725b2ad99a 100644 --- a/src/mat/impls/fft/fft.c +++ b/src/mat/impls/fft/fft.c @@ -16,7 +16,7 @@ static PetscErrorCode MatDestroy_FFT(Mat A) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateFFT - Creates a matrix object that provides FFT via an external package Collective diff --git a/src/mat/impls/fft/ftn-custom/makefile b/src/mat/impls/fft/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/mat/impls/fft/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/fft/ftn-custom/zfftf.c b/src/mat/impls/fft/ftn-custom/zfftf.c deleted file mode 100644 index 6a47530f5e1..00000000000 --- a/src/mat/impls/fft/ftn-custom/zfftf.c +++ /dev/null @@ -1,18 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreatefft_ MATCREATEFFT -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreatefft_ matcreatefft -#endif - -PETSC_EXTERN void matcreatefft_(MPI_Comm *comm, PetscInt *ndim, PetscInt *dim, char *type_name, Mat *A, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *mattype; - - FIXCHAR(type_name, len, mattype); - *ierr = MatCreateFFT(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *ndim, dim, mattype, A); - if (*ierr) return; - FREECHAR(type_name, mattype); -} diff --git a/src/mat/impls/h2opus/cuda/makefile b/src/mat/impls/h2opus/cuda/makefile index d34b0a51891..8e28529c611 100644 --- a/src/mat/impls/h2opus/cuda/makefile +++ b/src/mat/impls/h2opus/cuda/makefile @@ -1,5 +1,6 @@ -include ../../../../../petscdir.mk #requirespackage 'PETSC_HAVE_CUDA' +#requirespackage 'PETSC_HAVE_H2OPUS' MANSEC = Mat diff --git a/src/mat/impls/h2opus/cuda/math2opus.cu b/src/mat/impls/h2opus/cuda/math2opus.cu index 6d3ded39dfa..7df2eb8cc52 100644 --- a/src/mat/impls/h2opus/cuda/math2opus.cu +++ b/src/mat/impls/h2opus/cuda/math2opus.cu @@ -12,7 +12,7 @@ #include #include #include - #include + #include /*I "petscmat.h" I*/ #include /* math2opusutils */ @@ -1337,7 +1337,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_H2OPUS(Mat A) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatH2OpusOrthogonalize - Orthogonalize the basis tree of a hierarchical matrix. Input Parameter: @@ -1420,7 +1420,7 @@ PetscErrorCode MatH2OpusOrthogonalize(Mat A) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatH2OpusCompress - Compress a hierarchical matrix. Input Parameters: @@ -1527,7 +1527,7 @@ PetscErrorCode MatH2OpusCompress(Mat A, PetscReal tol) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatH2OpusSetSamplingMat - Set a matrix to be sampled from matrix-vector products on another matrix to construct a hierarchical matrix. Input Parameters: @@ -1625,7 +1625,7 @@ PetscErrorCode MatCreateH2OpusFromKernel(MPI_Comm comm, PetscInt m, PetscInt n, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateH2OpusFromMat - Creates a `MATH2OPUS` sampling from a user-supplied operator. Input Parameters: @@ -1713,7 +1713,7 @@ PetscErrorCode MatCreateH2OpusFromMat(Mat B, PetscInt spacedim, const PetscReal PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatH2OpusGetIndexMap - Access reordering index set. Input Parameter: @@ -1742,7 +1742,7 @@ PetscErrorCode MatH2OpusGetIndexMap(Mat A, IS *indexmap) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatH2OpusMapVec - Maps a vector between PETSc and H2Opus ordering Input Parameters: @@ -1795,7 +1795,7 @@ PetscErrorCode MatH2OpusMapVec(Mat A, PetscBool nativetopetsc, Vec in, Vec *out) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatH2OpusLowRankUpdate - Perform a low-rank update of the form $ A = A + s * U * V^T $ Input Parameters: diff --git a/src/mat/impls/htool/htool.cxx b/src/mat/impls/htool/htool.cxx index c8261442725..8d508b0c309 100644 --- a/src/mat/impls/htool/htool.cxx +++ b/src/mat/impls/htool/htool.cxx @@ -547,6 +547,8 @@ static PetscErrorCode MatHtoolGetHierarchicalMat_Htool(Mat A, const htool::Virtu /*@C MatHtoolGetHierarchicalMat - Retrieves the opaque pointer to a Htool virtual matrix stored in a `MATHTOOL`. + No Fortran Support, No C Support + Input Parameter: . A - hierarchical matrix @@ -581,6 +583,8 @@ static PetscErrorCode MatHtoolSetKernel_Htool(Mat A, MatHtoolKernelFn *kernel, v /*@C MatHtoolSetKernel - Sets the kernel and context used for the assembly of a `MATHTOOL`. + Collective, No Fortran Support + Input Parameters: + A - hierarchical matrix . kernel - computational kernel (or `NULL`) @@ -590,7 +594,7 @@ static PetscErrorCode MatHtoolSetKernel_Htool(Mat A, MatHtoolKernelFn *kernel, v .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatCreateHtoolFromKernel()` @*/ -PETSC_EXTERN PetscErrorCode MatHtoolSetKernel(Mat A, MatHtoolKernelFn *kernel, void *kernelctx) +PetscErrorCode MatHtoolSetKernel(Mat A, MatHtoolKernelFn *kernel, void *kernelctx) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -612,7 +616,7 @@ static PetscErrorCode MatHtoolGetPermutationSource_Htool(Mat A, IS *is) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatHtoolGetPermutationSource - Gets the permutation associated to the source cluster for a `MATHTOOL` matrix. Input Parameter: @@ -625,7 +629,7 @@ static PetscErrorCode MatHtoolGetPermutationSource_Htool(Mat A, IS *is) .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationTarget()`, `MatHtoolUsePermutation()` @*/ -PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationSource(Mat A, IS *is) +PetscErrorCode MatHtoolGetPermutationSource(Mat A, IS *is) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -646,7 +650,7 @@ static PetscErrorCode MatHtoolGetPermutationTarget_Htool(Mat A, IS *is) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatHtoolGetPermutationTarget - Gets the permutation associated to the target cluster for a `MATHTOOL` matrix. Input Parameter: @@ -659,7 +663,7 @@ static PetscErrorCode MatHtoolGetPermutationTarget_Htool(Mat A, IS *is) .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolUsePermutation()` @*/ -PETSC_EXTERN PetscErrorCode MatHtoolGetPermutationTarget(Mat A, IS *is) +PetscErrorCode MatHtoolGetPermutationTarget(Mat A, IS *is) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -677,7 +681,7 @@ static PetscErrorCode MatHtoolUsePermutation_Htool(Mat A, PetscBool use) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatHtoolUsePermutation - Sets whether a `MATHTOOL` matrix should permute input (resp. output) vectors following its internal source (resp. target) permutation. Input Parameters: @@ -688,7 +692,7 @@ static PetscErrorCode MatHtoolUsePermutation_Htool(Mat A, PetscBool use) .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolGetPermutationTarget()` @*/ -PETSC_EXTERN PetscErrorCode MatHtoolUsePermutation(Mat A, PetscBool use) +PetscErrorCode MatHtoolUsePermutation(Mat A, PetscBool use) { PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); @@ -802,6 +806,8 @@ static PetscErrorCode MatTranspose_Htool(Mat A, MatReuse reuse, Mat *B) /*@C MatCreateHtoolFromKernel - Creates a `MATHTOOL` from a user-supplied kernel. + Collective, No Fortran Support + Input Parameters: + comm - MPI communicator . m - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given) diff --git a/src/mat/impls/hypre/cuda/hypre1.cu b/src/mat/impls/hypre/cuda/hypre1.cu index fdafe06a2a6..c51eb5f8c8e 100644 --- a/src/mat/impls/hypre/cuda/hypre1.cu +++ b/src/mat/impls/hypre/cuda/hypre1.cu @@ -1,4 +1,4 @@ -#include +#include #include #include <../src/mat/impls/hypre/mhypre_kernels.hpp> diff --git a/src/mat/impls/hypre/cuda/makefile b/src/mat/impls/hypre/cuda/makefile index 0f8838c4e3b..750b50bb11c 100644 --- a/src/mat/impls/hypre/cuda/makefile +++ b/src/mat/impls/hypre/cuda/makefile @@ -1,4 +1,5 @@ -include ../../../../../petscdir.mk +#requirespackage 'PETSC_HAVE_HYPRE_DEVICE' #requirespackage 'PETSC_HAVE_CUDA' MANSEC = Mat diff --git a/src/mat/impls/hypre/hip/hypre2.hip.cpp b/src/mat/impls/hypre/hip/hypre2.hip.cpp index 6ba0031959c..f0e3d87e3de 100644 --- a/src/mat/impls/hypre/hip/hypre2.hip.cpp +++ b/src/mat/impls/hypre/hip/hypre2.hip.cpp @@ -1,4 +1,4 @@ -#include +#include #include #include <../src/mat/impls/hypre/mhypre_kernels.hpp> diff --git a/src/mat/impls/hypre/hip/makefile b/src/mat/impls/hypre/hip/makefile index e83000ab4ec..031b718d0d8 100644 --- a/src/mat/impls/hypre/hip/makefile +++ b/src/mat/impls/hypre/hip/makefile @@ -1,4 +1,5 @@ -include ../../../../../petscdir.mk +#requirespackage 'PETSC_HAVE_HYPRE_DEVICE' #requirespackage 'PETSC_HAVE_HIP' MANSEC = Mat diff --git a/src/mat/impls/hypre/kokkos/hypre3.kokkos.cxx b/src/mat/impls/hypre/kokkos/hypre3.kokkos.cxx index c79cc599986..ab6b21bba95 100644 --- a/src/mat/impls/hypre/kokkos/hypre3.kokkos.cxx +++ b/src/mat/impls/hypre/kokkos/hypre3.kokkos.cxx @@ -1,4 +1,4 @@ -#include +#include #include #include <../src/mat/impls/hypre/mhypre.h> diff --git a/src/mat/impls/hypre/kokkos/makefile b/src/mat/impls/hypre/kokkos/makefile index 8fc93d6ca6c..ffa59afcef8 100644 --- a/src/mat/impls/hypre/kokkos/makefile +++ b/src/mat/impls/hypre/kokkos/makefile @@ -1,4 +1,5 @@ -include ../../../../../petscdir.mk +#requirespackage 'PETSC_HAVE_HYPRE_DEVICE' #requirespackage 'PETSC_HAVE_KOKKOS' MANSEC = Mat diff --git a/src/mat/impls/hypre/mhypre.c b/src/mat/impls/hypre/mhypre.c index 4812664455e..b4ba6e0301a 100644 --- a/src/mat/impls/hypre/mhypre.c +++ b/src/mat/impls/hypre/mhypre.c @@ -145,6 +145,7 @@ static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij) PetscFunctionReturn(PETSC_SUCCESS); } PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name); + PetscFunctionReturn(PETSC_SUCCESS); } static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij) @@ -1849,7 +1850,7 @@ static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr /*@C MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix - Not Collective + Not Collective, No Fortran Support Input Parameter: . A - the `MATHYPRE` object diff --git a/src/mat/impls/is/matis.c b/src/mat/impls/is/matis.c index acee8761f0c..02a71642587 100644 --- a/src/mat/impls/is/matis.c +++ b/src/mat/impls/is/matis.c @@ -613,8 +613,8 @@ PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reu const PetscInt *di, *dj, *oi, *oj; const PetscInt *odi, *odj, *ooi, *ooj; PetscInt *aux, *ii, *jj; - PetscInt bs, lc, dr, dc, oc, str, stc, nnz, i, jd, jo, cum; - PetscBool flg, ismpiaij, ismpibaij, was_inplace = PETSC_FALSE; + PetscInt rbs, cbs, lc, dr, dc, oc, str, stc, nnz, i, jd, jo, cum; + PetscBool flg, ismpiaij, ismpibaij, was_inplace = PETSC_FALSE, cong; PetscMPIInt size; PetscFunctionBegin; @@ -624,14 +624,15 @@ PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reu PetscCall(MatConvert_SeqXAIJ_IS(A, type, reuse, newmat)); PetscFunctionReturn(PETSC_SUCCESS); } - if (reuse != MAT_REUSE_MATRIX && A->cmap->N == A->rmap->N) { + PetscCall(MatGetBlockSizes(A, &rbs, &cbs)); + PetscCall(MatHasCongruentLayouts(A, &cong)); + if (reuse != MAT_REUSE_MATRIX && cong && rbs == cbs) { PetscCall(MatMPIXAIJComputeLocalToGlobalMapping_Private(A, &rl2g)); PetscCall(MatCreate(comm, &B)); PetscCall(MatSetType(B, MATIS)); - PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N)); + PetscCall(MatSetSizes(B, A->rmap->n, A->rmap->n, A->rmap->N, A->rmap->N)); PetscCall(MatSetLocalToGlobalMapping(B, rl2g, rl2g)); - PetscCall(MatGetBlockSize(A, &bs)); - PetscCall(MatSetBlockSize(B, bs)); + PetscCall(MatSetBlockSizes(B, rbs, rbs)); PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); if (reuse == MAT_INPLACE_MATRIX) was_inplace = PETSC_TRUE; reuse = MAT_REUSE_MATRIX; @@ -640,7 +641,7 @@ PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reu Mat *newlA, lA; IS rows, cols; const PetscInt *ridx, *cidx; - PetscInt rbs, cbs, nr, nc; + PetscInt nr, nc; if (!B) B = *newmat; PetscCall(MatISGetLocalToGlobalMapping(B, &rl2g, &cl2g)); @@ -678,14 +679,13 @@ PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reu else *newmat = B; PetscFunctionReturn(PETSC_SUCCESS); } - /* rectangular case, just compress out the column space */ + /* general case, just compress out the column space */ PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij)); PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &ismpibaij)); if (ismpiaij) { - bs = 1; + cbs = 1; /* We cannot guarantee the off-process matrix will respect the column block size */ PetscCall(MatMPIAIJGetSeqAIJ(A, &Ad, &Ao, &garray)); } else if (ismpibaij) { - PetscCall(MatGetBlockSize(A, &bs)); PetscCall(MatMPIBAIJGetSeqBAIJ(A, &Ad, &Ao, &garray)); PetscCall(MatConvert(Ad, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ad)); PetscCall(MatConvert(Ao, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ao)); @@ -696,7 +696,7 @@ PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reu /* access relevant information from MPIAIJ */ PetscCall(MatGetOwnershipRange(A, &str, NULL)); PetscCall(MatGetOwnershipRangeColumn(A, &stc, NULL)); - PetscCall(MatGetLocalSize(A, &dr, &dc)); + PetscCall(MatGetLocalSize(Ad, &dr, &dc)); PetscCall(MatGetLocalSize(Ao, NULL, &oc)); PetscCheck(!oc || garray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "garray not present"); @@ -712,13 +712,13 @@ PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reu ooj = oj; /* generate l2g maps for rows and cols */ - PetscCall(ISCreateStride(comm, dr / bs, str / bs, 1, &is)); - if (bs > 1) { + PetscCall(ISCreateStride(comm, dr / rbs, str / rbs, 1, &is)); + if (rbs > 1) { IS is2; PetscCall(ISGetLocalSize(is, &i)); PetscCall(ISGetIndices(is, (const PetscInt **)&aux)); - PetscCall(ISCreateBlock(comm, bs, i, aux, PETSC_COPY_VALUES, &is2)); + PetscCall(ISCreateBlock(comm, rbs, i, aux, PETSC_COPY_VALUES, &is2)); PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux)); PetscCall(ISDestroy(&is)); is = is2; @@ -726,13 +726,13 @@ PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reu PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g)); PetscCall(ISDestroy(&is)); if (dr) { - PetscCall(PetscMalloc1((dc + oc) / bs, &aux)); - for (i = 0; i < dc / bs; i++) aux[i] = i + stc / bs; - for (i = 0; i < oc / bs; i++) aux[i + dc / bs] = garray[i]; - PetscCall(ISCreateBlock(comm, bs, (dc + oc) / bs, aux, PETSC_OWN_POINTER, &is)); + PetscCall(PetscMalloc1((dc + oc) / cbs, &aux)); + for (i = 0; i < dc / cbs; i++) aux[i] = i + stc / cbs; + for (i = 0; i < oc / cbs; i++) aux[i + dc / cbs] = garray[i]; + PetscCall(ISCreateBlock(comm, cbs, (dc + oc) / cbs, aux, PETSC_OWN_POINTER, &is)); lc = dc + oc; } else { - PetscCall(ISCreateBlock(comm, bs, 0, NULL, PETSC_OWN_POINTER, &is)); + PetscCall(ISCreateBlock(comm, cbs, 0, NULL, PETSC_OWN_POINTER, &is)); lc = 0; } PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g)); @@ -742,7 +742,7 @@ PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reu PetscCall(MatCreate(comm, &B)); PetscCall(MatSetSizes(B, dr, dc, PETSC_DECIDE, PETSC_DECIDE)); PetscCall(MatSetType(B, MATIS)); - PetscCall(MatSetBlockSize(B, bs)); + PetscCall(MatSetBlockSizes(B, rbs, cbs)); PetscCall(MatSetLocalToGlobalMapping(B, rl2g, cl2g)); PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); PetscCall(ISLocalToGlobalMappingDestroy(&cl2g)); @@ -2889,10 +2889,10 @@ static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m, const PetscInt PetscFunctionBegin; PetscCall(ISGlobalToLocalMappingApplyBlock(is->rmapping, IS_GTOLM_MASK, m, rows, &m, rows_l)); if (m != n || rows != cols || is->cmapping != is->rmapping) { - PetscCall(ISGlobalToLocalMappingApply(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l)); + PetscCall(ISGlobalToLocalMappingApplyBlock(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l)); PetscCall(MatSetValuesBlocked(is->A, m, rows_l, n, cols_l, values, addv)); } else { - PetscCall(MatSetValuesBlocked(is->A, m, rows_l, n, rows_l, values, addv)); + PetscCall(MatSetValuesBlocked(is->A, m, rows_l, m, rows_l, values, addv)); } PetscFunctionReturn(PETSC_SUCCESS); } @@ -3185,7 +3185,7 @@ static PetscErrorCode MatISSetLocalMatType_IS(Mat mat, MatType mtype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatISSetLocalMatType - Specifies the type of local matrix inside the `MATIS` Logically Collective. @@ -3668,6 +3668,15 @@ static PetscErrorCode MatISGetLocalToGlobalMapping_IS(Mat A, ISLocalToGlobalMapp PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode MatSetBlockSizes_IS(Mat A, PetscInt rbs, PetscInt cbs) +{ + Mat_IS *a = (Mat_IS *)A->data; + + PetscFunctionBegin; + if (a->A) PetscCall(MatSetBlockSizes(a->A, rbs, cbs)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /*MC MATIS - MATIS = "is" - A matrix type to be used for non-overlapping domain decomposition methods (e.g. `PCBDDC` or `KSPFETIDP`). This stores the matrices in globally unassembled form and the parallel matrix vector product is handled "implicitly". @@ -3741,6 +3750,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) A->ops->getdiagonalblock = MatGetDiagonalBlock_IS; A->ops->createsubmatrices = MatCreateSubMatrices_IS; A->ops->increaseoverlap = MatIncreaseOverlap_IS; + A->ops->setblocksizes = MatSetBlockSizes_IS; /* special MATIS functions */ PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMatType_C", MatISSetLocalMatType_IS)); diff --git a/src/mat/impls/kaij/kaij.c b/src/mat/impls/kaij/kaij.c index 76f87866537..e789ca1092a 100644 --- a/src/mat/impls/kaij/kaij.c +++ b/src/mat/impls/kaij/kaij.c @@ -26,7 +26,7 @@ #include <../src/mat/utils/freespace.h> #include -/*@C +/*@ MatKAIJGetAIJ - Get the `MATAIJ` matrix describing the blockwise action of the `MATKAIJ` matrix Not Collective, but if the `MATKAIJ` matrix is parallel, the `MATAIJ` matrix is also parallel @@ -83,7 +83,7 @@ PetscErrorCode MatKAIJGetAIJ(Mat A, Mat *B) .seealso: [](ch_matrices), `Mat`, `MATKAIJ`, `MatCreateKAIJ()`, `MatGetBlockSizes()` @*/ -PetscErrorCode MatKAIJGetS(Mat A, PetscInt *m, PetscInt *n, PetscScalar **S) +PetscErrorCode MatKAIJGetS(Mat A, PetscInt *m, PetscInt *n, PetscScalar *S[]) { Mat_SeqKAIJ *b = (Mat_SeqKAIJ *)A->data; @@ -114,7 +114,7 @@ PetscErrorCode MatKAIJGetS(Mat A, PetscInt *m, PetscInt *n, PetscScalar **S) .seealso: [](ch_matrices), `Mat`, `MATKAIJ`, `MatCreateKAIJ()`, `MatGetBlockSizes()` @*/ -PetscErrorCode MatKAIJGetSRead(Mat A, PetscInt *m, PetscInt *n, const PetscScalar **S) +PetscErrorCode MatKAIJGetSRead(Mat A, PetscInt *m, PetscInt *n, const PetscScalar *S[]) { Mat_SeqKAIJ *b = (Mat_SeqKAIJ *)A->data; @@ -142,7 +142,7 @@ PetscErrorCode MatKAIJGetSRead(Mat A, PetscInt *m, PetscInt *n, const PetscScala .seealso: [](ch_matrices), `Mat`, `MATKAIJ`, `MatKAIJGetS()`, `MatKAIJGetSRead()`, `MatKAIJRestoreSRead()` @*/ -PetscErrorCode MatKAIJRestoreS(Mat A, PetscScalar **S) +PetscErrorCode MatKAIJRestoreS(Mat A, PetscScalar *S[]) { PetscFunctionBegin; if (S) *S = NULL; @@ -167,7 +167,7 @@ PetscErrorCode MatKAIJRestoreS(Mat A, PetscScalar **S) .seealso: [](ch_matrices), `Mat`, `MATKAIJ`, `MatKAIJGetS()`, `MatKAIJGetSRead()` @*/ -PetscErrorCode MatKAIJRestoreSRead(Mat A, const PetscScalar **S) +PetscErrorCode MatKAIJRestoreSRead(Mat A, const PetscScalar *S[]) { PetscFunctionBegin; if (S) *S = NULL; @@ -194,7 +194,7 @@ PetscErrorCode MatKAIJRestoreSRead(Mat A, const PetscScalar **S) .seealso: [](ch_matrices), `Mat`, `MATKAIJ`, `MatCreateKAIJ()`, `MatGetBlockSizes()` @*/ -PetscErrorCode MatKAIJGetT(Mat A, PetscInt *m, PetscInt *n, PetscScalar **T) +PetscErrorCode MatKAIJGetT(Mat A, PetscInt *m, PetscInt *n, PetscScalar *T[]) { Mat_SeqKAIJ *b = (Mat_SeqKAIJ *)A->data; @@ -225,7 +225,7 @@ PetscErrorCode MatKAIJGetT(Mat A, PetscInt *m, PetscInt *n, PetscScalar **T) .seealso: [](ch_matrices), `Mat`, `MATKAIJ`, `MatCreateKAIJ()`, `MatGetBlockSizes()` @*/ -PetscErrorCode MatKAIJGetTRead(Mat A, PetscInt *m, PetscInt *n, const PetscScalar **T) +PetscErrorCode MatKAIJGetTRead(Mat A, PetscInt *m, PetscInt *n, const PetscScalar *T[]) { Mat_SeqKAIJ *b = (Mat_SeqKAIJ *)A->data; @@ -253,7 +253,7 @@ PetscErrorCode MatKAIJGetTRead(Mat A, PetscInt *m, PetscInt *n, const PetscScala .seealso: [](ch_matrices), `Mat`, `MATKAIJ`, `MatKAIJGetT()`, `MatKAIJGetTRead()`, `MatKAIJRestoreTRead()` @*/ -PetscErrorCode MatKAIJRestoreT(Mat A, PetscScalar **T) +PetscErrorCode MatKAIJRestoreT(Mat A, PetscScalar *T[]) { PetscFunctionBegin; if (T) *T = NULL; @@ -278,7 +278,7 @@ PetscErrorCode MatKAIJRestoreT(Mat A, PetscScalar **T) .seealso: [](ch_matrices), `Mat`, `MATKAIJ`, `MatKAIJGetT()`, `MatKAIJGetTRead()` @*/ -PetscErrorCode MatKAIJRestoreTRead(Mat A, const PetscScalar **T) +PetscErrorCode MatKAIJRestoreTRead(Mat A, const PetscScalar *T[]) { PetscFunctionBegin; if (T) *T = NULL; @@ -323,7 +323,7 @@ PetscErrorCode MatKAIJSetAIJ(Mat A, Mat B) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatKAIJSetS - Set the `S` matrix describing the shift action of the `MATKAIJ` matrix Logically Collective; the entire `S` is stored independently on all processes. @@ -359,7 +359,7 @@ PetscErrorCode MatKAIJSetS(Mat A, PetscInt p, PetscInt q, const PetscScalar S[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatKAIJGetScaledIdentity - Check if both `S` and `T` are scaled identities. Logically Collective. @@ -400,7 +400,7 @@ PetscErrorCode MatKAIJGetScaledIdentity(Mat A, PetscBool *identity) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatKAIJSetT - Set the transformation matrix `T` associated with the `MATKAIJ` matrix Logically Collective; the entire `T` is stored independently on all processes. diff --git a/src/mat/impls/mffd/ftn-custom/zmffdf.c b/src/mat/impls/mffd/ftn-custom/zmffdf.c index f6f8ac91325..aa7c82ae31a 100644 --- a/src/mat/impls/mffd/ftn-custom/zmffdf.c +++ b/src/mat/impls/mffd/ftn-custom/zmffdf.c @@ -2,15 +2,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matmffdsetfunction_ MATMFFDSETFUNCTION - #define matmffdsettype_ MATMFFDSETTYPE - #define matmffdsetoptionsprefix_ MATMFFDSETOPTIONSPREFIX - #define matmffdsetbase_ MATMFFDSETBASE + #define matmffdsetfunction_ MATMFFDSETFUNCTION + #define matmffdsetbase_ MATMFFDSETBASE #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matmffdsetfunction_ matmffdsetfunction - #define matmffdsettype_ matmffdsettype - #define matmffdsetoptionsprefix_ matmffdsetoptionsprefix - #define matmffdsetbase_ matmffdsetbase + #define matmffdsetfunction_ matmffdsetfunction + #define matmffdsetbase_ matmffdsetbase #endif static PetscErrorCode ourmatmffdfunction(void *ctx, Vec x, Vec f) @@ -28,21 +24,3 @@ PETSC_EXTERN void matmffdsetfunction_(Mat *mat, void (*func)(void *, Vec *, Vec *ierr = MatMFFDSetFunction(*mat, ourmatmffdfunction, *mat); } - -PETSC_EXTERN void matmffdsettype_(Mat *mat, char *ftype, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(ftype, len, t); - *ierr = MatMFFDSetType(*mat, t); - if (*ierr) return; - FREECHAR(ftype, t); -} - -PETSC_EXTERN void matmffdsetoptionsprefix_(Mat *mat, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(prefix, len, t); - *ierr = MatMFFDSetOptionsPrefix(*mat, t); - if (*ierr) return; - FREECHAR(prefix, t); -} diff --git a/src/mat/impls/mffd/mffd.c b/src/mat/impls/mffd/mffd.c index eede05adf04..8ba82654d22 100644 --- a/src/mat/impls/mffd/mffd.c +++ b/src/mat/impls/mffd/mffd.c @@ -91,7 +91,7 @@ static PetscErrorCode MatMFFDSetType_MFFD(Mat mat, MatMFFDType ftype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatMFFDSetType - Sets the method that is used to compute the differencing parameter for finite difference matrix-free formulations. @@ -171,7 +171,7 @@ static PetscErrorCode MatMFFDResetHHistory_MFFD(Mat J) /*@C MatMFFDRegister - Adds a method to the `MATMFFD` registry. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined compute-h module @@ -187,10 +187,8 @@ static PetscErrorCode MatMFFDResetHHistory_MFFD(Mat J) MatMFFDRegister("my_h", MyHCreate); .ve - Then, your solver can be chosen with the procedural interface via -$ `MatMFFDSetType`(mfctx, "my_h") - or at runtime via the option -$ -mat_mffd_type my_h + Then, your solver can be chosen with the procedural interface via `MatMFFDSetType`(mfctx, "my_h")` or at runtime via the option + `-mat_mffd_type my_h` .seealso: [](ch_matrices), `Mat`, `MATMFFD`, `MatMFFDRegisterAll()`, `MatMFFDRegisterDestroy()` @*/ @@ -465,7 +463,7 @@ static PetscErrorCode MatMFFDSetCheckh_MFFD(Mat J, FCN3 fun, void *ectx) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatMFFDSetOptionsPrefix - Sets the prefix used for searching for all MATMFFD` options in the database. diff --git a/src/mat/impls/nest/ftn-custom/zmatnestf.c b/src/mat/impls/nest/ftn-custom/zmatnestf.c index 66f2c0e6bdb..521a77b168d 100644 --- a/src/mat/impls/nest/ftn-custom/zmatnestf.c +++ b/src/mat/impls/nest/ftn-custom/zmatnestf.c @@ -3,11 +3,9 @@ #if defined(PETSC_HAVE_FORTRAN_CAPS) #define matcreatenest_ MATCREATENEST - #define matnestgetiss_ MATNESTGETISS #define matnestgetsubmats_ MATNESTGETSUBMATS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define matcreatenest_ matcreatenest - #define matnestgetiss_ matnestgetiss #define matnestgetsubmats_ matnestgetsubmats #endif @@ -31,13 +29,6 @@ PETSC_EXTERN void matcreatenest_(MPI_Fint *comm, PetscInt *nr, IS is_row[], Pets *ierr = PetscFree(m); } -PETSC_EXTERN void matnestgetiss_(Mat *A, IS rows[], IS cols[], int *ierr) -{ - CHKFORTRANNULLOBJECT(rows); - CHKFORTRANNULLOBJECT(cols); - *ierr = MatNestGetISs(*A, rows, cols); -} - PETSC_EXTERN void matnestgetsubmats_(Mat *A, PetscInt *M, PetscInt *N, Mat *sub, int *ierr) { PetscInt i, j, m, n; diff --git a/src/mat/impls/nest/matnest.c b/src/mat/impls/nest/matnest.c index 6ed31076df4..f8d027e1ca7 100644 --- a/src/mat/impls/nest/matnest.c +++ b/src/mat/impls/nest/matnest.c @@ -1,5 +1,6 @@ #include <../src/mat/impls/nest/matnestimpl.h> /*I "petscmat.h" I*/ #include <../src/mat/impls/aij/seq/aij.h> +#include <../src/mat/impls/shell/shell.h> #include static PetscErrorCode MatSetUp_NestIS_Private(Mat, PetscInt, const IS[], PetscInt, const IS[]); @@ -1184,8 +1185,8 @@ static PetscErrorCode MatNestGetSubMats_Nest(Mat A, PetscInt *M, PetscInt *N, Ma . A - nest matrix Output Parameters: -+ M - number of rows in the nest matrix -. N - number of cols in the nest matrix ++ M - number of submatrix rows in the nest matrix +. N - number of submatrix columns in the nest matrix - mat - array of matrices Level: developer @@ -1194,8 +1195,7 @@ static PetscErrorCode MatNestGetSubMats_Nest(Mat A, PetscInt *M, PetscInt *N, Ma The user should not free the array `mat`. Fortran Notes: - This routine has a calling sequence -$ call MatNestGetSubMats(A, M, N, mat, ierr) + This routine has a calling sequence `call MatNestGetSubMats(A, M, N, mat, ierr)` where the space allocated for the optional argument `mat` is assumed large enough (if provided). Matrices in `mat` are returned in row-major order, see `MatCreateNest()` for an example. @@ -1234,6 +1234,9 @@ static PetscErrorCode MatNestGetSize_Nest(Mat A, PetscInt *M, PetscInt *N) Level: developer + Note: + `size` refers to the number of submatrices in the row and column directions of the nested matrix + .seealso: [](ch_matrices), `Mat`, `MATNEST`, `MatNestGetSubMat()`, `MatNestGetSubMats()`, `MatCreateNest()`, `MatNestGetLocalISs()`, `MatNestGetISs()` @*/ @@ -1267,8 +1270,8 @@ static PetscErrorCode MatNestGetISs_Nest(Mat A, IS rows[], IS cols[]) . A - `MATNEST` matrix Output Parameters: -+ rows - array of row index sets -- cols - array of column index sets ++ rows - array of row index sets (pass `NULL` to ignore) +- cols - array of column index sets (pass `NULL` to ignore) Level: advanced @@ -1299,7 +1302,7 @@ static PetscErrorCode MatNestGetLocalISs_Nest(Mat A, IS rows[], IS cols[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatNestGetLocalISs - Returns the index sets partitioning the row and column spaces of a `MATNEST` Not Collective @@ -1308,8 +1311,8 @@ static PetscErrorCode MatNestGetLocalISs_Nest(Mat A, IS rows[], IS cols[]) . A - `MATNEST` matrix Output Parameters: -+ rows - array of row index sets (or `NULL` to ignore) -- cols - array of column index sets (or `NULL` to ignore) ++ rows - array of row index sets (pass `NULL` to ignore) +- cols - array of column index sets (pass `NULL` to ignore) Level: advanced @@ -1339,7 +1342,7 @@ static PetscErrorCode MatNestSetVecType_Nest(Mat A, VecType vtype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatNestSetVecType - Sets the type of `Vec` returned by `MatCreateVecs()` Not Collective @@ -1467,7 +1470,7 @@ static PetscErrorCode MatNestSetSubMats_Nest(Mat A, PetscInt nr, const IS is_row Notes: This always resets any block matrix information previously set. - Pass `NULL` in the correspoding entry of `a` for an empty block. + Pass `NULL` in the corresponding entry of `a` for an empty block. In both C and Fortran, `a` must be a row-major order array containing the matrices. See `MatCreateNest()` for an example. @@ -1876,18 +1879,23 @@ static PetscErrorCode MatConvert_Nest_SeqAIJ_fast(Mat A, MatType newtype, MatReu PetscCall(ISGetLocalSize(nest->isglobal.row[i], &ncr)); for (j = 0; j < nest->nc; ++j) { if (aii[i * nest->nc + j]) { - PetscScalar *nvv = avv[i * nest->nc + j]; + PetscScalar *nvv = avv[i * nest->nc + j], vscale = 1.0, vshift = 0.0; PetscInt *nii = aii[i * nest->nc + j]; PetscInt *njj = ajj[i * nest->nc + j]; PetscInt ir, cst; + if (trans[i * nest->nc + j]) { + vscale = ((Mat_Shell *)nest->m[i][j]->data)->vscale; + vshift = ((Mat_Shell *)nest->m[i][j]->data)->vshift; + } PetscCall(ISStrideGetInfo(nest->isglobal.col[j], &cst, NULL)); for (ir = rst; ir < ncr + rst; ++ir) { PetscInt ij, rsize = nii[1] - nii[0], ist = ii[ir] + ci[ir]; for (ij = 0; ij < rsize; ij++) { jj[ist + ij] = *njj + cst; - vv[ist + ij] = *nvv; + vv[ist + ij] = vscale * *nvv; + if (PetscUnlikely(vshift != 0.0 && *njj == ir - rst)) vv[ist + ij] += vshift; njj++; nvv++; } @@ -2044,6 +2052,7 @@ static PetscErrorCode MatConvert_Nest_AIJ(Mat A, MatType newtype, MatReuse reuse PetscCall(PetscObjectTypeCompare((PetscObject)Bt, MATSEQAIJ, &fast)); } } + if (fast) fast = (PetscBool)(!((Mat_Shell *)B->data)->zrows && !((Mat_Shell *)B->data)->zcols && !((Mat_Shell *)B->data)->axpy && !((Mat_Shell *)B->data)->left && !((Mat_Shell *)B->data)->right && !((Mat_Shell *)B->data)->dshift); } } } diff --git a/src/mat/impls/python/ftn-custom/makefile b/src/mat/impls/python/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/mat/impls/python/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/python/ftn-custom/zpythonmf.c b/src/mat/impls/python/ftn-custom/zpythonmf.c deleted file mode 100644 index aefd0faea4e..00000000000 --- a/src/mat/impls/python/ftn-custom/zpythonmf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matpythonsettype_ MATPYTHONSETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matpythonsettype_ matpythonsettype -#endif - -PETSC_EXTERN void matpythonsettype_(Mat *mat, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(name, len, t); - *ierr = MatPythonSetType(*mat, t); - if (*ierr) return; - FREECHAR(name, t); -} diff --git a/src/mat/impls/python/pythonmat.c b/src/mat/impls/python/pythonmat.c index 6ad662e0cda..c8acaf428e2 100644 --- a/src/mat/impls/python/pythonmat.c +++ b/src/mat/impls/python/pythonmat.c @@ -1,6 +1,6 @@ #include /*I "petscmat.h" I*/ -/*@C +/*@ MatPythonSetType - Initialize a `Mat` object implemented in Python. Collective @@ -25,7 +25,7 @@ PetscErrorCode MatPythonSetType(Mat mat, const char pyname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPythonGetType - Get the Python name of a `Mat` object implemented in Python. Not Collective @@ -49,7 +49,7 @@ PetscErrorCode MatPythonGetType(Mat mat, const char *pyname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatPythonCreate - Create a `Mat` object implemented in Python. Collective diff --git a/src/mat/impls/sbaij/mpi/ftn-custom/makefile b/src/mat/impls/sbaij/mpi/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/mat/impls/sbaij/mpi/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/sbaij/mpi/ftn-custom/zmpisbaijf.c b/src/mat/impls/sbaij/mpi/ftn-custom/zmpisbaijf.c deleted file mode 100644 index 6b07a250d71..00000000000 --- a/src/mat/impls/sbaij/mpi/ftn-custom/zmpisbaijf.c +++ /dev/null @@ -1,24 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreatesbaij_ MATCREATESBAIJ - #define matmpisbaijsetpreallocation_ MATMPISBAIJSETPREALLOCATION -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreatesbaij_ matcreatesbaij - #define matmpisbaijsetpreallocation_ matmpisbaijsetpreallocation -#endif - -PETSC_EXTERN void matcreatesbaij_(MPI_Comm *comm, PetscInt *bs, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N, PetscInt *d_nz, PetscInt *d_nnz, PetscInt *o_nz, PetscInt *o_nnz, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(d_nnz); - CHKFORTRANNULLINTEGER(o_nnz); - *ierr = MatCreateSBAIJ(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *m, *n, *M, *N, *d_nz, d_nnz, *o_nz, o_nnz, newmat); -} - -PETSC_EXTERN void matmpisbaijsetpreallocation_(Mat *mat, PetscInt *bs, PetscInt *d_nz, PetscInt *d_nnz, PetscInt *o_nz, PetscInt *o_nnz, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(d_nnz); - CHKFORTRANNULLINTEGER(o_nnz); - *ierr = MatMPISBAIJSetPreallocation(*mat, *bs, *d_nz, d_nnz, *o_nz, o_nnz); -} diff --git a/src/mat/impls/sbaij/mpi/mpisbaij.c b/src/mat/impls/sbaij/mpi/mpisbaij.c index 304993943dd..1cc64920ee7 100644 --- a/src/mat/impls/sbaij/mpi/mpisbaij.c +++ b/src/mat/impls/sbaij/mpi/mpisbaij.c @@ -234,7 +234,6 @@ static PetscErrorCode MatRetrieveValues_MPISBAIJ(Mat mat) PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \ rp[_i] = bcol; \ ap[bs2 * _i + bs * cidx + ridx] = value; \ - A->nonzerostate++; \ a_noinsert:; \ ailen[brow] = nrow; \ } while (0) @@ -275,7 +274,6 @@ static PetscErrorCode MatRetrieveValues_MPISBAIJ(Mat mat) PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \ rp[_i] = bcol; \ ap[bs2 * _i + bs * cidx + ridx] = value; \ - B->nonzerostate++; \ b_noinsert:; \ bilen[brow] = nrow; \ } while (0) @@ -622,22 +620,11 @@ static PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat, PetscInt m, const Pe if (mat->was_assembled) { if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat)); -#if defined(PETSC_USE_DEBUG) - #if defined(PETSC_USE_CTABLE) - { - PetscInt data; - PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &data)); - PetscCheck((data - 1) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Incorrect colmap"); - } - #else - PetscCheck((baij->colmap[in[j]] - 1) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Incorrect colmap"); - #endif -#endif #if defined(PETSC_USE_CTABLE) PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &col)); - col = (col - 1) / bs; + col = col < 1 ? -1 : (col - 1) / bs; #else - col = (baij->colmap[in[j]] - 1) / bs; + col = baij->colmap[in[j]] < 1 ? -1 : (baij->colmap[in[j]] - 1) / bs; #endif if (col < 0 && !((Mat_SeqBAIJ *)baij->A->data)->nonew) { PetscCall(MatDisAssemble_MPISBAIJ(mat)); @@ -1964,6 +1951,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_MPISBAIJ, NULL, /*150*/ NULL, MatEliminateZeros_MPISBAIJ, + NULL, NULL}; static PetscErrorCode MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt *d_nnz, PetscInt o_nz, const PetscInt *o_nnz) @@ -2255,7 +2243,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_MPISBAIJ(Mat B) .seealso: [](ch_matrices), `Mat`, `MATSEQSBAIJ`, `MATMPISBAIJ`, `MatCreateSBAIJ()`, `MATSEQSBAIJ`, `MATMPISBAIJ` M*/ -/*@C +/*@ MatMPISBAIJSetPreallocation - For good matrix assembly performance the user should preallocate the matrix storage by setting the parameters d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, @@ -2268,20 +2256,20 @@ M*/ . bs - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs() . d_nz - number of block nonzeros per block row in diagonal portion of local - submatrix (same for all local rows) + submatrix (same for all local rows) . d_nnz - array containing the number of block nonzeros in the various block rows - in the upper triangular and diagonal part of the in diagonal portion of the local - (possibly different for each block row) or `NULL`. If you plan to factor the matrix you must leave room - for the diagonal entry and set a value even if it is zero. + in the upper triangular and diagonal part of the in diagonal portion of the local + (possibly different for each block row) or `NULL`. If you plan to factor the matrix you must leave room + for the diagonal entry and set a value even if it is zero. . o_nz - number of block nonzeros per block row in the off-diagonal portion of local - submatrix (same for all local rows). + submatrix (same for all local rows). - o_nnz - array containing the number of nonzeros in the various block rows of the - off-diagonal portion of the local submatrix that is right of the diagonal - (possibly different for each block row) or `NULL`. + off-diagonal portion of the local submatrix that is right of the diagonal + (possibly different for each block row) or `NULL`. Options Database Keys: + -mat_no_unroll - uses code that does not unroll the loops in the - block calculations (much slower) + block calculations (much slower) - -mat_block_size - size of the blocks to use Level: intermediate @@ -2347,7 +2335,7 @@ PetscErrorCode MatMPISBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, co } // PetscClangLinter pragma disable: -fdoc-section-header-unknown -/*@C +/*@ MatCreateSBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format, `MATSBAIJ`, (block compressed row). For good matrix assembly performance the user should preallocate the matrix storage by setting the parameters @@ -2360,35 +2348,35 @@ PetscErrorCode MatMPISBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, co . bs - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()` . m - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given) - This value should be the same as the local size used in creating the - y vector for the matrix-vector product y = Ax. + This value should be the same as the local size used in creating the + y vector for the matrix-vector product y = Ax. . n - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given) - This value should be the same as the local size used in creating the - x vector for the matrix-vector product y = Ax. + This value should be the same as the local size used in creating the + x vector for the matrix-vector product y = Ax. . M - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given) . N - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given) . d_nz - number of block nonzeros per block row in diagonal portion of local - submatrix (same for all local rows) + submatrix (same for all local rows) . d_nnz - array containing the number of block nonzeros in the various block rows - in the upper triangular portion of the in diagonal portion of the local - (possibly different for each block block row) or `NULL`. - If you plan to factor the matrix you must leave room for the diagonal entry and - set its value even if it is zero. + in the upper triangular portion of the in diagonal portion of the local + (possibly different for each block block row) or `NULL`. + If you plan to factor the matrix you must leave room for the diagonal entry and + set its value even if it is zero. . o_nz - number of block nonzeros per block row in the off-diagonal portion of local - submatrix (same for all local rows). + submatrix (same for all local rows). - o_nnz - array containing the number of nonzeros in the various block rows of the - off-diagonal portion of the local submatrix (possibly different for - each block row) or `NULL`. + off-diagonal portion of the local submatrix (possibly different for + each block row) or `NULL`. Output Parameter: . A - the matrix Options Database Keys: + -mat_no_unroll - uses code that does not unroll the loops in the - block calculations (much slower) + block calculations (much slower) . -mat_block_size - size of the blocks to use - -mat_mpi - use the parallel matrix data structures even on one processor - (defaults to using SeqBAIJ format on one processor) + (defaults to using SeqBAIJ format on one processor) Level: intermediate @@ -2406,6 +2394,9 @@ PetscErrorCode MatMPISBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, co If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor than it must be used on all processors that share the object for that argument. + If `m` and `n` are not `PETSC_DECIDE`, then the values determines the `PetscLayout` of the matrix and the ranges returned by + `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`. + If the *_nnz parameter is given then the *_nz parameter is ignored Storage Information: @@ -2443,7 +2434,8 @@ PetscErrorCode MatMPISBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, co In general, for PDE problems in which most nonzeros are near the diagonal, one expects `d_nz` >> `o_nz`. -.seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()` +.seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`, + `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout` @*/ PetscErrorCode MatCreateSBAIJ(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A) { @@ -2833,7 +2825,7 @@ PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm comm, PetscInt bs, PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatMPISBAIJSetPreallocationCSR - Creates a sparse parallel matrix in `MATMPISBAIJ` format using the given nonzero structure and (optional) numerical values Collective diff --git a/src/mat/impls/sbaij/seq/ftn-custom/makefile b/src/mat/impls/sbaij/seq/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/mat/impls/sbaij/seq/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/sbaij/seq/ftn-custom/zsbaijf.c b/src/mat/impls/sbaij/seq/ftn-custom/zsbaijf.c deleted file mode 100644 index 9fa95a1cb83..00000000000 --- a/src/mat/impls/sbaij/seq/ftn-custom/zsbaijf.c +++ /dev/null @@ -1,22 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreateseqsbaij_ MATCREATESEQSBAIJ - #define matseqsbaijsetpreallocation_ MATSEQSBAIJSETPREALLOCATION -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreateseqsbaij_ matcreateseqsbaij - #define matseqsbaijsetpreallocation_ matseqsbaijsetpreallocation -#endif - -PETSC_EXTERN void matcreateseqsbaij_(MPI_Comm *comm, PetscInt *bs, PetscInt *m, PetscInt *n, PetscInt *nz, PetscInt *nnz, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(nnz); - *ierr = MatCreateSeqSBAIJ(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *m, *n, *nz, nnz, newmat); -} - -PETSC_EXTERN void matseqsbaijsetpreallocation_(Mat *mat, PetscInt *bs, PetscInt *nz, PetscInt *nnz, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(nnz); - *ierr = MatSeqSBAIJSetPreallocation(*mat, *bs, *nz, nnz); -} diff --git a/src/mat/impls/sbaij/seq/sbaij.c b/src/mat/impls/sbaij/seq/sbaij.c index c5d57d594a1..43dec5f0baf 100644 --- a/src/mat/impls/sbaij/seq/sbaij.c +++ b/src/mat/impls/sbaij/seq/sbaij.c @@ -658,7 +658,6 @@ PetscErrorCode MatSetValuesBlocked_SeqSBAIJ(Mat A, PetscInt m, const PetscInt im PetscFunctionBegin; if (roworiented) stepval = (n - 1) * bs; else stepval = (m - 1) * bs; - for (k = 0; k < m; k++) { /* loop over added rows */ row = im[k]; if (row < 0) continue; @@ -889,7 +888,6 @@ PetscErrorCode MatSetValues_SeqSBAIJ(Mat A, PetscInt m, const PetscInt im[], Pet ap[bs2 * i + bs * cidx + ridx] = value; /* for diag block, add/insert its symmetric element a(cidx,ridx) */ if (brow == bcol && ridx < cidx) ap[bs2 * i + bs * ridx + cidx] = value; - A->nonzerostate++; noinsert1:; low = i; } @@ -1418,6 +1416,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_SeqSBAIJ, NULL, /*150*/ NULL, MatEliminateZeros_SeqSBAIJ, + NULL, NULL}; static PetscErrorCode MatStoreValues_SeqSBAIJ(Mat mat) @@ -1612,9 +1611,11 @@ static PetscErrorCode MatSeqSBAIJSetPreallocation_SeqSBAIJ(Mat B, PetscInt bs, P static PetscErrorCode MatSeqSBAIJSetPreallocationCSR_SeqSBAIJ(Mat B, PetscInt bs, const PetscInt ii[], const PetscInt jj[], const PetscScalar V[]) { - PetscInt i, j, m, nz, anz, nz_max = 0, *nnz; - PetscScalar *values = NULL; - PetscBool roworiented = ((Mat_SeqSBAIJ *)B->data)->roworiented; + PetscInt i, j, m, nz, anz, nz_max = 0, *nnz; + PetscScalar *values = NULL; + Mat_SeqSBAIJ *b = (Mat_SeqSBAIJ *)B->data; + PetscBool roworiented = b->roworiented; + PetscBool ilw = b->ignore_ltriangular; PetscFunctionBegin; PetscCheck(bs >= 1, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_OUTOFRANGE, "Invalid block size specified, must be positive but it is %" PetscInt_FMT, bs); @@ -1630,6 +1631,7 @@ static PetscErrorCode MatSeqSBAIJSetPreallocationCSR_SeqSBAIJ(Mat B, PetscInt bs for (i = 0; i < m; i++) { nz = ii[i + 1] - ii[i]; PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row %" PetscInt_FMT " has a negative number of columns %" PetscInt_FMT, i, nz); + PetscCheckSorted(nz, jj + ii[i]); anz = 0; for (j = 0; j < nz; j++) { /* count only values on the diagonal or above */ @@ -1638,7 +1640,7 @@ static PetscErrorCode MatSeqSBAIJSetPreallocationCSR_SeqSBAIJ(Mat B, PetscInt bs break; } } - nz_max = PetscMax(nz_max, anz); + nz_max = PetscMax(nz_max, nz); nnz[i] = anz; } PetscCall(MatSeqSBAIJSetPreallocation(B, bs, 0, nnz)); @@ -1646,9 +1648,11 @@ static PetscErrorCode MatSeqSBAIJSetPreallocationCSR_SeqSBAIJ(Mat B, PetscInt bs values = (PetscScalar *)V; if (!values) PetscCall(PetscCalloc1(bs * bs * nz_max, &values)); + b->ignore_ltriangular = PETSC_TRUE; for (i = 0; i < m; i++) { PetscInt ncols = ii[i + 1] - ii[i]; const PetscInt *icols = jj + ii[i]; + if (!roworiented || bs == 1) { const PetscScalar *svals = values + (V ? (bs * bs * ii[i]) : 0); PetscCall(MatSetValuesBlocked_SeqSBAIJ(B, 1, &i, ncols, icols, svals, INSERT_VALUES)); @@ -1663,6 +1667,7 @@ static PetscErrorCode MatSeqSBAIJSetPreallocationCSR_SeqSBAIJ(Mat B, PetscInt bs PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); + b->ignore_ltriangular = ilw; PetscFunctionReturn(PETSC_SUCCESS); } @@ -1793,7 +1798,7 @@ PETSC_INTERN PetscErrorCode MatGetFactor_seqsbaij_petsc(Mat A, MatFactorType fty .seealso: [](ch_matrices), `Mat`, `MATSEQSBAIJ`, `MatSeqSBAIJRestoreArray()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()` @*/ -PetscErrorCode MatSeqSBAIJGetArray(Mat A, PetscScalar **array) +PetscErrorCode MatSeqSBAIJGetArray(Mat A, PetscScalar *array[]) { PetscFunctionBegin; PetscUseMethod(A, "MatSeqSBAIJGetArray_C", (Mat, PetscScalar **), (A, array)); @@ -1813,7 +1818,7 @@ PetscErrorCode MatSeqSBAIJGetArray(Mat A, PetscScalar **array) .seealso: [](ch_matrices), `Mat`, `MATSEQSBAIJ`, `MatSeqSBAIJGetArray()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()` @*/ -PetscErrorCode MatSeqSBAIJRestoreArray(Mat A, PetscScalar **array) +PetscErrorCode MatSeqSBAIJRestoreArray(Mat A, PetscScalar *array[]) { PetscFunctionBegin; PetscUseMethod(A, "MatSeqSBAIJRestoreArray_C", (Mat, PetscScalar **), (A, array)); @@ -1927,7 +1932,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_SeqSBAIJ(Mat B) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqSBAIJSetPreallocation - Creates a sparse symmetric matrix in block AIJ (block compressed row) `MATSEQSBAIJ` format. For good matrix assembly performance the user should preallocate the matrix storage by setting the parameter `nz` @@ -2011,7 +2016,7 @@ PetscErrorCode MatSeqSBAIJSetPreallocationCSR(Mat B, PetscInt bs, const PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateSeqSBAIJ - Creates a sparse symmetric matrix in (block compressed row) `MATSEQSBAIJ` format. For good matrix assembly performance the user should preallocate the matrix storage by setting the parameter `nz` diff --git a/src/mat/impls/scalapack/matscalapack.c b/src/mat/impls/scalapack/matscalapack.c index 4fd94082dfa..a556fbc9ab4 100644 --- a/src/mat/impls/scalapack/matscalapack.c +++ b/src/mat/impls/scalapack/matscalapack.c @@ -890,7 +890,7 @@ static PetscErrorCode MatGetFactor_scalapack_scalapack(Mat A, MatFactorType ftyp PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_ScaLAPACK(void) +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_ScaLAPACK(void) { PetscFunctionBegin; PetscCall(MatSolverTypeRegister(MATSOLVERSCALAPACK, MATSCALAPACK, MAT_FACTOR_LU, MatGetFactor_scalapack_scalapack)); @@ -1365,22 +1365,22 @@ static PetscErrorCode MatLoad_ScaLAPACK(Mat newMat, PetscViewer viewer) } static struct _MatOps MatOps_Values = {MatSetValues_ScaLAPACK, - 0, - 0, + NULL, + NULL, MatMult_ScaLAPACK, /* 4*/ MatMultAdd_ScaLAPACK, MatMultTranspose_ScaLAPACK, MatMultTransposeAdd_ScaLAPACK, MatSolve_ScaLAPACK, MatSolveAdd_ScaLAPACK, - 0, - /*10*/ 0, + NULL, + /*10*/ NULL, MatLUFactor_ScaLAPACK, MatCholeskyFactor_ScaLAPACK, - 0, + NULL, MatTranspose_ScaLAPACK, /*15*/ MatGetInfo_ScaLAPACK, - 0, + NULL, MatGetDiagonal_ScaLAPACK, MatDiagonalScale_ScaLAPACK, MatNorm_ScaLAPACK, @@ -1388,135 +1388,136 @@ static struct _MatOps MatOps_Values = {MatSetValues_ScaLAPACK, MatAssemblyEnd_ScaLAPACK, MatSetOption_ScaLAPACK, MatZeroEntries_ScaLAPACK, - /*24*/ 0, + /*24*/ NULL, MatLUFactorSymbolic_ScaLAPACK, MatLUFactorNumeric_ScaLAPACK, MatCholeskyFactorSymbolic_ScaLAPACK, MatCholeskyFactorNumeric_ScaLAPACK, /*29*/ MatSetUp_ScaLAPACK, - 0, - 0, - 0, - 0, + NULL, + NULL, + NULL, + NULL, /*34*/ MatDuplicate_ScaLAPACK, - 0, - 0, - 0, - 0, + NULL, + NULL, + NULL, + NULL, /*39*/ MatAXPY_ScaLAPACK, - 0, - 0, - 0, + NULL, + NULL, + NULL, MatCopy_ScaLAPACK, - /*44*/ 0, + /*44*/ NULL, MatScale_ScaLAPACK, MatShift_ScaLAPACK, - 0, - 0, - /*49*/ 0, - 0, - 0, - 0, - 0, - /*54*/ 0, - 0, - 0, - 0, - 0, - /*59*/ 0, + NULL, + NULL, + /*49*/ NULL, + NULL, + NULL, + NULL, + NULL, + /*54*/ NULL, + NULL, + NULL, + NULL, + NULL, + /*59*/ NULL, MatDestroy_ScaLAPACK, MatView_ScaLAPACK, - 0, - 0, - /*64*/ 0, - 0, - 0, - 0, - 0, - /*69*/ 0, - 0, + NULL, + NULL, + /*64*/ NULL, + NULL, + NULL, + NULL, + NULL, + /*69*/ NULL, + NULL, MatConvert_ScaLAPACK_Dense, - 0, - 0, - /*74*/ 0, - 0, - 0, - 0, - 0, - /*79*/ 0, - 0, - 0, - 0, + NULL, + NULL, + /*74*/ NULL, + NULL, + NULL, + NULL, + NULL, + /*79*/ NULL, + NULL, + NULL, + NULL, MatLoad_ScaLAPACK, - /*84*/ 0, - 0, - 0, - 0, - 0, - /*89*/ 0, - 0, + /*84*/ NULL, + NULL, + NULL, + NULL, + NULL, + /*89*/ NULL, + NULL, MatMatMultNumeric_ScaLAPACK, - 0, - 0, - /*94*/ 0, - 0, - 0, + NULL, + NULL, + /*94*/ NULL, + NULL, + NULL, MatMatTransposeMultNumeric_ScaLAPACK, - 0, + NULL, /*99*/ MatProductSetFromOptions_ScaLAPACK, - 0, - 0, + NULL, + NULL, MatConjugate_ScaLAPACK, - 0, - /*104*/ 0, - 0, - 0, - 0, - 0, + NULL, + /*104*/ NULL, + NULL, + NULL, + NULL, + NULL, /*109*/ MatMatSolve_ScaLAPACK, - 0, - 0, - 0, + NULL, + NULL, + NULL, MatMissingDiagonal_ScaLAPACK, - /*114*/ 0, - 0, - 0, - 0, - 0, - /*119*/ 0, + /*114*/ NULL, + NULL, + NULL, + NULL, + NULL, + /*119*/ NULL, MatHermitianTranspose_ScaLAPACK, MatMultHermitianTranspose_ScaLAPACK, MatMultHermitianTransposeAdd_ScaLAPACK, - 0, - /*124*/ 0, - 0, - 0, - 0, - 0, - /*129*/ 0, - 0, - 0, + NULL, + /*124*/ NULL, + NULL, + NULL, + NULL, + NULL, + /*129*/ NULL, + NULL, + NULL, MatTransposeMatMultNumeric_ScaLAPACK, - 0, - /*134*/ 0, - 0, - 0, - 0, - 0, - 0, - /*140*/ 0, - 0, - 0, - 0, - 0, - /*145*/ 0, - 0, - 0, - 0, - 0, - /*150*/ 0, - 0, - 0}; + NULL, + /*134*/ NULL, + NULL, + NULL, + NULL, + NULL, + NULL, + /*140*/ NULL, + NULL, + NULL, + NULL, + NULL, + /*145*/ NULL, + NULL, + NULL, + NULL, + NULL, + /*150*/ NULL, + NULL, + NULL, + NULL}; static PetscErrorCode MatStashScatterBegin_ScaLAPACK(Mat mat, MatStash *stash, PetscInt *owners) { diff --git a/src/mat/impls/scatter/mscatter.c b/src/mat/impls/scatter/mscatter.c index 600e0dc9fbc..dcadaa9feae 100644 --- a/src/mat/impls/scatter/mscatter.c +++ b/src/mat/impls/scatter/mscatter.c @@ -246,6 +246,7 @@ static struct _MatOps MatOps_Values = {NULL, NULL, /*150*/ NULL, NULL, + NULL, NULL}; /*MC @@ -277,7 +278,7 @@ PETSC_EXTERN PetscErrorCode MatCreate_Scatter(Mat A) } #include -/*@C +/*@ MatCreateScatter - Creates a new matrix of `MatType` `MATSCATTER`, based on a VecScatter Collective @@ -291,14 +292,15 @@ PETSC_EXTERN PetscErrorCode MatCreate_Scatter(Mat A) Level: intermediate - PETSc requires that matrices and vectors being used for certain - operations are partitioned accordingly. For example, when - creating a scatter matrix, A, that supports parallel matrix-vector - products using `MatMult`(A,x,y) the user should set the number - of local matrix rows to be the number of local elements of the - corresponding result vector, y. Note that this is information is - required for use of the matrix interface routines, even though - the scatter matrix may not actually be physically partitioned. + Notes: + PETSc requires that matrices and vectors being used for certain + operations are partitioned accordingly. For example, when + creating a scatter matrix, A, that supports parallel matrix-vector + products using `MatMult`(A,x,y) the user should set the number + of local matrix rows to be the number of local elements of the + corresponding result vector, y. Note that this is information is + required for use of the matrix interface routines, even though + the scatter matrix may not actually be physically partitioned. Developer Notes: This directly accesses information inside the `VecScatter` associated with the matrix-vector product diff --git a/src/mat/impls/sell/mpi/mpicuda/mpisellcuda.cu b/src/mat/impls/sell/mpi/mpicuda/mpisellcuda.cu index 7523853cacf..47d6fbd3c66 100644 --- a/src/mat/impls/sell/mpi/mpicuda/mpisellcuda.cu +++ b/src/mat/impls/sell/mpi/mpicuda/mpisellcuda.cu @@ -29,61 +29,6 @@ static PetscErrorCode MatMPISELLSetPreallocation_MPISELLCUDA(Mat B, PetscInt d_r PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode MatMult_MPISELLCUDA(Mat A, Vec xx, Vec yy) -{ - Mat_MPISELL *a = (Mat_MPISELL *)A->data; - PetscInt nt; - - PetscFunctionBegin; - PetscCall(VecGetLocalSize(xx, &nt)); - PetscCheck(nt == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible partition of A (%" PetscInt_FMT ") and xx (%" PetscInt_FMT ")", A->cmap->n, nt); - PetscCall(VecScatterBegin(a->Mvctx, xx, a->lvec, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall((*a->A->ops->mult)(a->A, xx, yy)); - PetscCall(VecScatterEnd(a->Mvctx, xx, a->lvec, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall((*a->B->ops->multadd)(a->B, a->lvec, yy, yy)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode MatZeroEntries_MPISELLCUDA(Mat A) -{ - Mat_MPISELL *l = (Mat_MPISELL *)A->data; - - PetscFunctionBegin; - PetscCall(MatZeroEntries(l->A)); - PetscCall(MatZeroEntries(l->B)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode MatMultAdd_MPISELLCUDA(Mat A, Vec xx, Vec yy, Vec zz) -{ - Mat_MPISELL *a = (Mat_MPISELL *)A->data; - PetscInt nt; - - PetscFunctionBegin; - PetscCall(VecGetLocalSize(xx, &nt)); - PetscCheck(nt == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible partition of A (%" PetscInt_FMT ") and xx (%" PetscInt_FMT ")", A->cmap->n, nt); - PetscCall(VecScatterBegin(a->Mvctx, xx, a->lvec, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall((*a->A->ops->multadd)(a->A, xx, yy, zz)); - PetscCall(VecScatterEnd(a->Mvctx, xx, a->lvec, INSERT_VALUES, SCATTER_FORWARD)); - PetscCall((*a->B->ops->multadd)(a->B, a->lvec, zz, zz)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -static PetscErrorCode MatMultTranspose_MPISELLCUDA(Mat A, Vec xx, Vec yy) -{ - Mat_MPISELL *a = (Mat_MPISELL *)A->data; - PetscInt nt; - - PetscFunctionBegin; - PetscCall(VecGetLocalSize(xx, &nt)); - PetscCheck(nt == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible partition of A (%" PetscInt_FMT ") and xx (%" PetscInt_FMT ")", A->rmap->n, nt); - PetscUseTypeMethod(a->B, multtranspose, xx, a->lvec); - PetscUseTypeMethod(a->A, multtranspose, xx, yy); - PetscCall(VecScatterBegin(a->Mvctx, a->lvec, yy, ADD_VALUES, SCATTER_REVERSE)); - PetscCall(VecScatterEnd(a->Mvctx, a->lvec, yy, ADD_VALUES, SCATTER_REVERSE)); - PetscFunctionReturn(PETSC_SUCCESS); -} - static PetscErrorCode MatSetFromOptions_MPISELLCUDA(Mat, PetscOptionItems *) { return PETSC_SUCCESS; @@ -125,12 +70,8 @@ PETSC_INTERN PetscErrorCode MatConvert_MPISELL_MPISELLCUDA(Mat B, MatType, MatRe if (a->lvec) PetscCall(VecSetType(a->lvec, VECSEQCUDA)); A->ops->assemblyend = MatAssemblyEnd_MPISELLCUDA; - A->ops->mult = MatMult_MPISELLCUDA; - A->ops->multadd = MatMultAdd_MPISELLCUDA; - A->ops->multtranspose = MatMultTranspose_MPISELLCUDA; A->ops->setfromoptions = MatSetFromOptions_MPISELLCUDA; A->ops->destroy = MatDestroy_MPISELLCUDA; - A->ops->zeroentries = MatZeroEntries_MPISELLCUDA; PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATMPISELLCUDA)); PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatMPISELLSetPreallocation_C", MatMPISELLSetPreallocation_MPISELLCUDA)); diff --git a/src/ksp/ksp/impls/python/ftn-custom/makefile b/src/mat/impls/sell/mpi/mpihip/makefile similarity index 73% rename from src/ksp/ksp/impls/python/ftn-custom/makefile rename to src/mat/impls/sell/mpi/mpihip/makefile index c6170f8b367..82492f52aec 100644 --- a/src/ksp/ksp/impls/python/ftn-custom/makefile +++ b/src/mat/impls/sell/mpi/mpihip/makefile @@ -1,6 +1,7 @@ -include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' +#requirespackage 'PETSC_HAVE_HIP' +MANSEC = Mat include ${PETSC_DIR}/lib/petsc/conf/variables include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/sell/mpi/mpihip/mpisellhip.hip.cpp b/src/mat/impls/sell/mpi/mpihip/mpisellhip.hip.cpp new file mode 100644 index 00000000000..4c620140ad3 --- /dev/null +++ b/src/mat/impls/sell/mpi/mpihip/mpisellhip.hip.cpp @@ -0,0 +1,171 @@ +#include +#include +#include <../src/mat/impls/sell/mpi/mpisell.h> /*I "petscmat.h" I*/ + +static PetscErrorCode MatMPISELLSetPreallocation_MPISELLHIP(Mat B, PetscInt d_rlenmax, const PetscInt d_rlen[], PetscInt o_rlenmax, const PetscInt o_rlen[]) +{ + Mat_MPISELL *b = (Mat_MPISELL *)B->data; + + PetscFunctionBegin; + PetscCall(PetscLayoutSetUp(B->rmap)); + PetscCall(PetscLayoutSetUp(B->cmap)); + + if (!B->preallocated) { + /* Explicitly create 2 MATSEQSELLHIP matrices. */ + PetscCall(MatCreate(PETSC_COMM_SELF, &b->A)); + PetscCall(MatBindToCPU(b->A, B->boundtocpu)); + PetscCall(MatSetSizes(b->A, B->rmap->n, B->cmap->n, B->rmap->n, B->cmap->n)); + PetscCall(MatSetType(b->A, MATSEQSELLHIP)); + PetscCall(MatCreate(PETSC_COMM_SELF, &b->B)); + PetscCall(MatBindToCPU(b->B, B->boundtocpu)); + PetscCall(MatSetSizes(b->B, B->rmap->n, B->cmap->N, B->rmap->n, B->cmap->N)); + PetscCall(MatSetType(b->B, MATSEQSELLHIP)); + } + PetscCall(MatSeqSELLSetPreallocation(b->A, d_rlenmax, d_rlen)); + PetscCall(MatSeqSELLSetPreallocation(b->B, o_rlenmax, o_rlen)); + B->preallocated = PETSC_TRUE; + B->was_assembled = PETSC_FALSE; + B->assembled = PETSC_FALSE; + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSetFromOptions_MPISELLHIP(Mat, PetscOptionItems *) +{ + return PETSC_SUCCESS; +} + +static PetscErrorCode MatAssemblyEnd_MPISELLHIP(Mat A, MatAssemblyType mode) +{ + PetscFunctionBegin; + PetscCall(MatAssemblyEnd_MPISELL(A, mode)); + if (!A->was_assembled && mode == MAT_FINAL_ASSEMBLY) PetscCall(VecSetType(((Mat_MPISELL *)A->data)->lvec, VECSEQHIP)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatDestroy_MPISELLHIP(Mat A) +{ + PetscFunctionBegin; + PetscCall(MatDestroy_MPISELL(A)); + PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatMPISELLSetPreallocation_C", NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode MatConvert_MPISELL_MPISELLHIP(Mat B, MatType, MatReuse reuse, Mat *newmat) +{ + Mat_MPISELL *a; + Mat A; + + PetscFunctionBegin; + PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP)); + if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatDuplicate(B, MAT_COPY_VALUES, newmat)); + else if (reuse == MAT_REUSE_MATRIX) PetscCall(MatCopy(B, *newmat, SAME_NONZERO_PATTERN)); + A = *newmat; + A->boundtocpu = PETSC_FALSE; + PetscCall(PetscFree(A->defaultvectype)); + PetscCall(PetscStrallocpy(VECHIP, &A->defaultvectype)); + + a = (Mat_MPISELL *)A->data; + if (a->A) PetscCall(MatSetType(a->A, MATSEQSELLHIP)); + if (a->B) PetscCall(MatSetType(a->B, MATSEQSELLHIP)); + if (a->lvec) PetscCall(VecSetType(a->lvec, VECSEQHIP)); + + A->ops->assemblyend = MatAssemblyEnd_MPISELLHIP; + A->ops->setfromoptions = MatSetFromOptions_MPISELLHIP; + A->ops->destroy = MatDestroy_MPISELLHIP; + + PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATMPISELLHIP)); + PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatMPISELLSetPreallocation_C", MatMPISELLSetPreallocation_MPISELLHIP)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_EXTERN PetscErrorCode MatCreate_MPISELLHIP(Mat A) +{ + PetscFunctionBegin; + PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP)); + PetscCall(MatCreate_MPISELL(A)); + PetscCall(MatConvert_MPISELL_MPISELLHIP(A, MATMPISELLHIP, MAT_INPLACE_MATRIX, &A)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + MatCreateSELLHIP - Creates a sparse matrix in SELL format. + This matrix will be ultimately pushed down to GPUs. + + Collective + + Input Parameters: ++ comm - MPI communicator, set to `PETSC_COMM_SELF` +. m - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given) + This value should be the same as the local size used in creating the + y vector for the matrix-vector product $ y = Ax $. +. n - This value should be the same as the local size used in creating the + x vector for the matrix-vector product $ y = Ax $. (or `PETSC_DECIDE` to have + calculated if `N` is given) For square matrices `n` is almost always `m`. +. M - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given) +. N - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given) +. d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix + (same value is used for all local rows) +. d_nnz - array containing the number of nonzeros in the various rows of the + DIAGONAL portion of the local submatrix (possibly different for each row) + or `NULL`, if `d_nz` is used to specify the nonzero structure. + The size of this array is equal to the number of local rows, i.e `m`. + For matrices you plan to factor you must leave room for the diagonal entry and + put in the entry even if it is zero. +. o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local + submatrix (same value is used for all local rows). +- o_nnz - array containing the number of nonzeros in the various rows of the + OFF-DIAGONAL portion of the local submatrix (possibly different for + each row) or `NULL`, if `o_nz` is used to specify the nonzero + structure. The size of this array is equal to the number + of local rows, i.e `m`. + + Output Parameter: +. A - the matrix + + Level: intermediate + + Notes: + If `nnz` is given then `nz` is ignored + + Specify the preallocated storage with either `nz` or `nnz` (not both). + Set `nz` = `PETSC_DEFAULT` and `nnz` = `NULL` for PETSc to control dynamic memory + allocation. + +.seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateSELL()`, `MatSetValues()`, `MATMPISELLHIP`, `MATSELLHIP` +@*/ +PetscErrorCode MatCreateSELLHIP(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A) +{ + PetscMPIInt size; + + PetscFunctionBegin; + PetscCall(MatCreate(comm, A)); + PetscCall(MatSetSizes(*A, m, n, M, N)); + PetscCallMPI(MPI_Comm_size(comm, &size)); + if (size > 1) { + PetscCall(MatSetType(*A, MATMPISELLHIP)); + PetscCall(MatMPISELLSetPreallocation(*A, d_nz, d_nnz, o_nz, o_nnz)); + } else { + PetscCall(MatSetType(*A, MATSEQSELLHIP)); + PetscCall(MatSeqSELLSetPreallocation(*A, d_nz, d_nnz)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*MC + MATSELLHIP - "sellhip" = "mpisellhip" - A matrix type to be used for sparse matrices on AMD GPUs + + Sliced ELLPACK matrix type whose data resides on GPUs. + + This matrix type is identical to `MATSEQSELLHIP` when constructed with a single process communicator, + and `MATMPISELLHIP` otherwise. As a result, for single process communicators, + `MatSeqSELLSetPreallocation()` is supported, and similarly `MatMPISELLSetPreallocation()` is supported + for communicators controlling multiple processes. It is recommended that you call both of + the above preallocation routines for simplicity. + + Options Database Key: +. -mat_type sellhip - sets the matrix type to `MATSELLHIP` during a call to MatSetFromOptions() + + Level: beginner + +.seealso: `MatCreateSELLHIP()`, `MATSEQSELLHIP`, `MatCreateSeqSELLHIP()`, `MatHIPFormatOperation()` +M*/ diff --git a/src/mat/impls/sell/mpi/mpisell.c b/src/mat/impls/sell/mpi/mpisell.c index ee3283b5ddd..c41fbb41fba 100644 --- a/src/mat/impls/sell/mpi/mpisell.c +++ b/src/mat/impls/sell/mpi/mpisell.c @@ -99,7 +99,6 @@ PetscErrorCode MatCreateColmap_MPISELL_Private(Mat mat) vp1[sliceheight * _i] = value; \ a->nz++; \ nrow1++; \ - A->nonzerostate++; \ a_noinsert:; \ a->rlen[row] = nrow1; \ } @@ -144,7 +143,6 @@ PetscErrorCode MatCreateColmap_MPISELL_Private(Mat mat) vp2[sliceheight * _i] = value; \ b->nz++; \ nrow2++; \ - B->nonzerostate++; \ b_noinsert:; \ b->rlen[row] = nrow2; \ } @@ -1249,6 +1247,7 @@ static const struct _MatOps MatOps_Values = {MatSetValues_MPISELL, NULL, /*150*/ NULL, NULL, + NULL, NULL}; /*@C @@ -1404,26 +1403,26 @@ M*/ Input Parameters: + comm - MPI communicator . m - number of local rows (or `PETSC_DECIDE` to have calculated if M is given) - This value should be the same as the local size used in creating the - y vector for the matrix-vector product y = Ax. + This value should be the same as the local size used in creating the + y vector for the matrix-vector product y = Ax. . n - This value should be the same as the local size used in creating the - x vector for the matrix-vector product y = Ax. (or `PETSC_DECIDE` to have - calculated if `N` is given) For square matrices n is almost always `m`. + x vector for the matrix-vector product y = Ax. (or `PETSC_DECIDE` to have + calculated if `N` is given) For square matrices n is almost always `m`. . M - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given) . N - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given) . d_rlenmax - max number of nonzeros per row in DIAGONAL portion of local submatrix - (same value is used for all local rows) + (same value is used for all local rows) . d_rlen - array containing the number of nonzeros in the various rows of the - DIAGONAL portion of the local submatrix (possibly different for each row) - or `NULL`, if d_rlenmax is used to specify the nonzero structure. - The size of this array is equal to the number of local rows, i.e `m`. + DIAGONAL portion of the local submatrix (possibly different for each row) + or `NULL`, if d_rlenmax is used to specify the nonzero structure. + The size of this array is equal to the number of local rows, i.e `m`. . o_rlenmax - max number of nonzeros per row in the OFF-DIAGONAL portion of local - submatrix (same value is used for all local rows). + submatrix (same value is used for all local rows). - o_rlen - array containing the number of nonzeros in the various rows of the - OFF-DIAGONAL portion of the local submatrix (possibly different for - each row) or `NULL`, if `o_rlenmax` is used to specify the nonzero - structure. The size of this array is equal to the number - of local rows, i.e `m`. + OFF-DIAGONAL portion of the local submatrix (possibly different for + each row) or `NULL`, if `o_rlenmax` is used to specify the nonzero + structure. The size of this array is equal to the number + of local rows, i.e `m`. Output Parameter: . A - the matrix diff --git a/src/mat/impls/sell/seq/fdsell.c b/src/mat/impls/sell/seq/fdsell.c index e444fae11f3..65818568fa8 100644 --- a/src/mat/impls/sell/seq/fdsell.c +++ b/src/mat/impls/sell/seq/fdsell.c @@ -16,16 +16,17 @@ PetscErrorCode MatGetColumnIJ_SeqSELL_Color(Mat A, PetscInt oshift, PetscBool sy PetscBool isnonzero; PetscFunctionBegin; + PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Expected cmap->n %" PetscInt_FMT " >= 0", n); *nn = n; if (!ia) PetscFunctionReturn(PETSC_SUCCESS); - PetscCall(PetscCalloc1(n + 1, &collengths)); + PetscCall(PetscCalloc1(n, &collengths)); PetscCall(PetscMalloc1(n + 1, &cia)); PetscCall(PetscMalloc1(a->nz + 1, &cja)); PetscCall(PetscMalloc1(a->nz + 1, &cspidx)); - totalslices = A->rmap->n / 8 + ((A->rmap->n & 0x07) ? 1 : 0); /* floor(n/8) */ - for (i = 0; i < totalslices; i++) { /* loop over slices */ + totalslices = PetscCeilInt(A->rmap->n, 8); + for (i = 0; i < totalslices; i++) { /* loop over slices */ for (j = a->sliidx[i], row = 0; j < a->sliidx[i + 1]; j++, row = ((row + 1) & 0x07)) { isnonzero = (PetscBool)((j - a->sliidx[i]) / 8 < a->rlen[8 * i + row]); if (isnonzero) collengths[a->colidx[j]]++; diff --git a/src/mat/impls/sell/seq/ftn-custom/makefile b/src/mat/impls/sell/seq/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/mat/impls/sell/seq/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/sell/seq/ftn-custom/zsellf.c b/src/mat/impls/sell/seq/ftn-custom/zsellf.c deleted file mode 100644 index c12f9f4e0fd..00000000000 --- a/src/mat/impls/sell/seq/ftn-custom/zsellf.c +++ /dev/null @@ -1,22 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matcreateseqsell_ MATCREATESEQSELL - #define matseqsellsetpreallocation_ MATSEQSELLSETPREALLOCATION -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matcreateseqsell_ matcreateseqsell - #define matseqsellsetpreallocation_ matseqsellsetpreallocation -#endif - -PETSC_EXTERN void matcreateseqsell_(MPI_Comm *comm, PetscInt *m, PetscInt *n, PetscInt *maxrlenrow, PetscInt *rlen, Mat *newmat, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(rlen); - *ierr = MatCreateSeqSELL(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *m, *n, *maxrlenrow, rlen, newmat); -} - -PETSC_EXTERN void matseqsellsetpreallocation_(Mat *mat, PetscInt *maxrlenrow, PetscInt *rlen, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(rlen); - *ierr = MatSeqSELLSetPreallocation(*mat, *maxrlenrow, rlen); -} diff --git a/src/mat/impls/sell/seq/sell.c b/src/mat/impls/sell/seq/sell.c index 4f1e60bf0aa..e17772332ce 100644 --- a/src/mat/impls/sell/seq/sell.c +++ b/src/mat/impls/sell/seq/sell.c @@ -41,7 +41,7 @@ static const char citation[] = "@inproceedings{ZhangELLPACK2018,\n" #endif #endif /* PETSC_HAVE_IMMINTRIN_H */ -/*@C +/*@ MatSeqSELLSetPreallocation - For good matrix assembly performance the user should preallocate the matrix storage by setting the parameter `nz` (or the array `nnz`). @@ -88,7 +88,7 @@ PetscErrorCode MatSeqSELLSetPreallocation_SeqSELL(Mat B, PetscInt maxallocrow, c { Mat_SeqSELL *b; PetscInt i, j, totalslices; -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) PetscInt rlenmax = 0; #endif PetscBool skipallocation = PETSC_FALSE, realalloc = PETSC_FALSE; @@ -118,7 +118,7 @@ PetscErrorCode MatSeqSELLSetPreallocation_SeqSELL(Mat B, PetscInt maxallocrow, c b = (Mat_SeqSELL *)B->data; if (!b->sliceheight) { /* not set yet */ -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) b->sliceheight = 16; #else b->sliceheight = 8; @@ -135,14 +135,14 @@ PetscErrorCode MatSeqSELLSetPreallocation_SeqSELL(Mat B, PetscInt maxallocrow, c if (!rlen) { /* if rlen is not provided, allocate same space for all the slices */ if (maxallocrow == PETSC_DEFAULT || maxallocrow == PETSC_DECIDE) maxallocrow = 10; else if (maxallocrow < 0) maxallocrow = 1; -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) rlenmax = maxallocrow; /* Pad the slice to DEVICE_MEM_ALIGN */ while (b->sliceheight * maxallocrow % DEVICE_MEM_ALIGN) maxallocrow++; #endif for (i = 0; i <= totalslices; i++) b->sliidx[i] = b->sliceheight * i * maxallocrow; } else { -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) PetscInt mul = DEVICE_MEM_ALIGN / b->sliceheight; #endif maxallocrow = 0; @@ -150,10 +150,11 @@ PetscErrorCode MatSeqSELLSetPreallocation_SeqSELL(Mat B, PetscInt maxallocrow, c for (i = 1; i < totalslices; i++) { b->sliidx[i] = 0; for (j = 0; j < b->sliceheight; j++) { b->sliidx[i] = PetscMax(b->sliidx[i], rlen[b->sliceheight * (i - 1) + j]); } -#if defined(PETSC_HAVE_CUDA) - rlenmax = PetscMax(b->sliidx[i], rlenmax); - /* Pad the slice to DEVICE_MEM_ALIGN */ - b->sliidx[i] = ((b->sliidx[i] - 1) / mul + 1) * mul; +#if defined(PETSC_HAVE_CUPM) + if (mul != 0) { /* Pad the slice to DEVICE_MEM_ALIGN if sliceheight < DEVICE_MEM_ALIGN */ + rlenmax = PetscMax(b->sliidx[i], rlenmax); + b->sliidx[i] = ((b->sliidx[i] - 1) / mul + 1) * mul; + } #endif maxallocrow = PetscMax(b->sliidx[i], maxallocrow); PetscCall(PetscIntSumError(b->sliidx[i - 1], b->sliceheight * b->sliidx[i], &b->sliidx[i])); @@ -161,9 +162,11 @@ PetscErrorCode MatSeqSELLSetPreallocation_SeqSELL(Mat B, PetscInt maxallocrow, c /* last slice */ b->sliidx[totalslices] = 0; for (j = b->sliceheight * (totalslices - 1); j < B->rmap->n; j++) b->sliidx[totalslices] = PetscMax(b->sliidx[totalslices], rlen[j]); -#if defined(PETSC_HAVE_CUDA) - rlenmax = PetscMax(b->sliidx[i], rlenmax); - b->sliidx[totalslices] = ((b->sliidx[totalslices] - 1) / mul + 1) * mul; +#if defined(PETSC_HAVE_CUPM) + if (mul != 0) { + rlenmax = PetscMax(b->sliidx[i], rlenmax); + b->sliidx[totalslices] = ((b->sliidx[totalslices] - 1) / mul + 1) * mul; + } #endif maxallocrow = PetscMax(b->sliidx[totalslices], maxallocrow); b->sliidx[totalslices] = b->sliidx[totalslices - 1] + b->sliceheight * b->sliidx[totalslices]; @@ -187,7 +190,7 @@ PetscErrorCode MatSeqSELLSetPreallocation_SeqSELL(Mat B, PetscInt maxallocrow, c b->nz = 0; b->maxallocrow = maxallocrow; -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) b->rlenmax = rlenmax; #else b->rlenmax = maxallocrow; @@ -885,7 +888,7 @@ PetscErrorCode MatDestroy_SeqSELL(Mat A) PetscCall(PetscFree(a->saved_values)); PetscCall(PetscFree2(a->getrowcols, a->getrowvals)); PetscCall(PetscFree(A->data)); -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) PetscCall(PetscFree(a->chunk_slice_map)); #endif @@ -898,6 +901,9 @@ PetscErrorCode MatDestroy_SeqSELL(Mat A) PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqsell_seqaij_C", NULL)); #if defined(PETSC_HAVE_CUDA) PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqsell_seqsellcuda_C", NULL)); +#endif +#if defined(PETSC_HAVE_HIP) + PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqsell_seqsellhip_C", NULL)); #endif PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqSELLGetFillRatio_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqSELLGetMaxSliceWidth_C", NULL)); @@ -1028,7 +1034,7 @@ PetscErrorCode MatDiagonalScale_SeqSELL(Mat A, Vec ll, Vec rr) PetscCall(PetscLogFlops(a->nz)); } PetscCall(MatSeqSELLInvalidateDiagonal(A)); -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) if (A->offloadmask != PETSC_OFFLOAD_UNALLOCATED) A->offloadmask = PETSC_OFFLOAD_CPU; #endif PetscFunctionReturn(PETSC_SUCCESS); @@ -1434,7 +1440,7 @@ PetscErrorCode MatAssemblyEnd_SeqSELL(Mat A, MatAssemblyType mode) Mat_SeqSELL *a = (Mat_SeqSELL *)A->data; PetscInt i, shift, row_in_slice, row, nrow, *cp, lastcol, j, k; MatScalar *vp; -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) PetscInt totalchunks = 0; #endif @@ -1484,7 +1490,7 @@ PetscErrorCode MatAssemblyEnd_SeqSELL(Mat A, MatAssemblyType mode) a->reallocs = 0; PetscCall(MatSeqSELLInvalidateDiagonal(A)); -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) if (!a->chunksize && a->totalslices) { a->chunksize = 64; while (a->chunksize < 1024 && 2 * a->chunksize <= a->sliidx[a->totalslices] / a->totalslices) a->chunksize *= 2; @@ -1534,7 +1540,7 @@ PetscErrorCode MatSetValues_SeqSELL(Mat A, PetscInt m, const PetscInt im[], Pets PetscInt shift, i, k, l, low, high, t, ii, row, col, nrow; PetscInt *cp, nonew = a->nonew, lastcol = -1; MatScalar *vp, value; -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) PetscBool inserted = PETSC_FALSE; PetscInt mul = DEVICE_MEM_ALIGN / a->sliceheight; #endif @@ -1576,7 +1582,7 @@ PetscErrorCode MatSetValues_SeqSELL(Mat A, PetscInt m, const PetscInt im[], Pets if (*(cp + a->sliceheight * i) == col) { if (is == ADD_VALUES) *(vp + a->sliceheight * i) += value; else *(vp + a->sliceheight * i) = value; -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) inserted = PETSC_TRUE; #endif low = i + 1; @@ -1586,7 +1592,7 @@ PetscErrorCode MatSetValues_SeqSELL(Mat A, PetscInt m, const PetscInt im[], Pets if (value == 0.0 && a->ignorezeroentries) goto noinsert; if (nonew == 1) goto noinsert; PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", row, col); -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) MatSeqXSELLReallocateSELL(A, A->rmap->n, 1, nrow, a->sliidx, a->sliceheight, row / a->sliceheight, row, col, a->colidx, a->val, cp, vp, nonew, MatScalar, mul); #else /* If the current row length exceeds the slice width (e.g. nrow==slice_width), allocate a new space, otherwise do nothing */ @@ -1601,8 +1607,7 @@ PetscErrorCode MatSetValues_SeqSELL(Mat A, PetscInt m, const PetscInt im[], Pets *(cp + a->sliceheight * i) = col; *(vp + a->sliceheight * i) = value; a->nz++; - A->nonzerostate++; -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) inserted = PETSC_TRUE; #endif low = i + 1; @@ -1612,7 +1617,7 @@ PetscErrorCode MatSetValues_SeqSELL(Mat A, PetscInt m, const PetscInt im[], Pets } a->rlen[row] = nrow; } -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) if (A->offloadmask != PETSC_OFFLOAD_UNALLOCATED && inserted) A->offloadmask = PETSC_OFFLOAD_CPU; #endif PetscFunctionReturn(PETSC_SUCCESS); @@ -1668,7 +1673,7 @@ PetscErrorCode MatScale_SeqSELL(Mat inA, PetscScalar alpha) PetscCallBLAS("BLASscal", BLASscal_(&size, &oalpha, aval, &one)); PetscCall(PetscLogFlops(a->nz)); PetscCall(MatSeqSELLInvalidateDiagonal(inA)); -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) if (inA->offloadmask != PETSC_OFFLOAD_UNALLOCATED) inA->offloadmask = PETSC_OFFLOAD_CPU; #endif PetscFunctionReturn(PETSC_SUCCESS); @@ -1938,6 +1943,7 @@ static struct _MatOps MatOps_Values = {MatSetValues_SeqSELL, NULL, /*150*/ NULL, NULL, + NULL, NULL}; static PetscErrorCode MatStoreValues_SeqSELL(Mat mat) @@ -2026,13 +2032,13 @@ static PetscErrorCode MatSeqSELLSetSliceHeight_SeqSELL(Mat A, PetscInt sliceheig if (A->preallocated) PetscFunctionReturn(PETSC_SUCCESS); PetscCheck(a->sliceheight <= 0 || a->sliceheight == sliceheight, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot change slice height %" PetscInt_FMT " to %" PetscInt_FMT, a->sliceheight, sliceheight); a->sliceheight = sliceheight; -#if defined(PETSC_HAVE_CUDA) - PetscCheck(DEVICE_MEM_ALIGN % sliceheight == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "DEVICE_MEM_ALIGN is not divisible by the slice height %" PetscInt_FMT, sliceheight); +#if defined(PETSC_HAVE_CUPM) + PetscCheck(PetscMax(DEVICE_MEM_ALIGN, sliceheight) % PetscMin(DEVICE_MEM_ALIGN, sliceheight) == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "The slice height is not compatible with DEVICE_MEM_ALIGN (one must be divisible by the other) %" PetscInt_FMT, sliceheight); #endif PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqSELLGetFillRatio - returns a ratio that indicates the irregularity of the matrix. Not Collective @@ -2054,7 +2060,7 @@ PetscErrorCode MatSeqSELLGetFillRatio(Mat A, PetscReal *ratio) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqSELLGetMaxSliceWidth - returns the maximum slice width. Not Collective @@ -2076,7 +2082,7 @@ PetscErrorCode MatSeqSELLGetMaxSliceWidth(Mat A, PetscInt *slicewidth) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqSELLGetAvgSliceWidth - returns the average slice width. Not Collective @@ -2098,7 +2104,7 @@ PetscErrorCode MatSeqSELLGetAvgSliceWidth(Mat A, PetscReal *slicewidth) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqSELLSetSliceHeight - sets the slice height. Not Collective @@ -2123,7 +2129,7 @@ PetscErrorCode MatSeqSELLSetSliceHeight(Mat A, PetscInt sliceheight) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSeqSELLGetVarSliceSize - returns the variance of the slice size. Not Collective @@ -2148,6 +2154,9 @@ PetscErrorCode MatSeqSELLGetVarSliceSize(Mat A, PetscReal *variance) #if defined(PETSC_HAVE_CUDA) PETSC_EXTERN PetscErrorCode MatConvert_SeqSELL_SeqSELLCUDA(Mat); #endif +#if defined(PETSC_HAVE_HIP) +PETSC_EXTERN PetscErrorCode MatConvert_SeqSELL_SeqSELLHIP(Mat); +#endif PETSC_EXTERN PetscErrorCode MatCreate_SeqSELL(Mat B) { @@ -2193,6 +2202,9 @@ PETSC_EXTERN PetscErrorCode MatCreate_SeqSELL(Mat B) PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqsell_seqaij_C", MatConvert_SeqSELL_SeqAIJ)); #if defined(PETSC_HAVE_CUDA) PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqsell_seqsellcuda_C", MatConvert_SeqSELL_SeqSELLCUDA)); +#endif +#if defined(PETSC_HAVE_HIP) + PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqsell_seqsellhip_C", MatConvert_SeqSELL_SeqSELLHIP)); #endif PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqSELLGetFillRatio_C", MatSeqSELLGetFillRatio_SeqSELL)); PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqSELLGetMaxSliceWidth_C", MatSeqSELLGetMaxSliceWidth_SeqSELL)); @@ -2204,14 +2216,14 @@ PETSC_EXTERN PetscErrorCode MatCreate_SeqSELL(Mat B) { PetscInt newsh = -1; PetscBool flg; -#if defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_CUPM) PetscInt chunksize = 0; #endif PetscCall(PetscOptionsInt("-mat_sell_slice_height", "Set the slice height used to store SELL matrix", "MatSELLSetSliceHeight", newsh, &newsh, &flg)); if (flg) { PetscCall(MatSeqSELLSetSliceHeight(B, newsh)); } -#if defined(PETSC_HAVE_CUDA) - PetscCall(PetscOptionsInt("-mat_sell_chunk_size", "Set the chunksize for load-balanced CUDA kernels. Choices include 64,128,256,512,1024", NULL, chunksize, &chunksize, &flg)); +#if defined(PETSC_HAVE_CUPM) + PetscCall(PetscOptionsInt("-mat_sell_chunk_size", "Set the chunksize for load-balanced CUDA/HIP kernels. Choices include 64,128,256,512,1024", NULL, chunksize, &chunksize, &flg)); if (flg) { PetscCheck(chunksize >= 64 && chunksize <= 1024 && chunksize % 64 == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "chunksize must be a number in {64,128,256,512,1024}: value %" PetscInt_FMT, chunksize); b->chunksize = chunksize; @@ -2242,7 +2254,8 @@ static PetscErrorCode MatDuplicateNoCreate_SeqSELL(Mat C, Mat A, MatDuplicateOpt PetscCall(PetscLayoutReference(A->rmap, &C->rmap)); PetscCall(PetscLayoutReference(A->cmap, &C->cmap)); - PetscCall(PetscMalloc1(a->sliceheight * totalslices, &c->rlen)); + c->sliceheight = a->sliceheight; + PetscCall(PetscMalloc1(c->sliceheight * totalslices, &c->rlen)); PetscCall(PetscMalloc1(totalslices + 1, &c->sliidx)); for (i = 0; i < m; i++) c->rlen[i] = a->rlen[i]; @@ -2367,7 +2380,7 @@ M*/ .seealso: `Mat`, `MatCreateSeqSELL()`, `MatCreateSeqAIJ()`, `MatCreateSell()`, `MATSEQSELL`, `MATMPISELL`, `MATSEQAIJ`, `MATMPIAIJ`, `MATAIJ` M*/ -/*@C +/*@ MatCreateSeqSELL - Creates a sparse matrix in `MATSEQSELL` format. Collective @@ -2442,7 +2455,7 @@ PetscErrorCode MatConjugate_SeqSELL(Mat A) PetscFunctionBegin; for (i = 0; i < a->sliidx[a->totalslices]; i++) { val[i] = PetscConj(val[i]); } - #if defined(PETSC_HAVE_CUDA) + #if defined(PETSC_HAVE_CUPM) if (A->offloadmask != PETSC_OFFLOAD_UNALLOCATED) A->offloadmask = PETSC_OFFLOAD_CPU; #endif #else diff --git a/src/mat/impls/sell/seq/sell.h b/src/mat/impls/sell/seq/sell.h index 930c4ec9ee0..f64b780452f 100644 --- a/src/mat/impls/sell/seq/sell.h +++ b/src/mat/impls/sell/seq/sell.h @@ -111,6 +111,7 @@ static inline PetscErrorCode MatSeqXSELLFreeSELL(Mat AA, MatScalar **val, PetscI Ain->singlemalloc = PETSC_TRUE; \ Ain->maxallocmat = new_size; \ Ain->reallocs++; \ + A->nonzerostate++; \ if (WIDTH >= Ain->maxallocrow) Ain->maxallocrow += MUL; \ if (WIDTH >= Ain->rlenmax) Ain->rlenmax++; \ } \ diff --git a/src/mat/impls/sell/seq/seqcuda/sellcuda.cu b/src/mat/impls/sell/seq/seqcuda/sellcuda.cu index b0304b07617..5dd4c380ec6 100644 --- a/src/mat/impls/sell/seq/seqcuda/sellcuda.cu +++ b/src/mat/impls/sell/seq/seqcuda/sellcuda.cu @@ -986,6 +986,19 @@ PETSC_INTERN PetscErrorCode MatConvert_SeqSELL_SeqSELLCUDA(Mat B) PetscFunctionReturn(PETSC_SUCCESS); } +/*MC + MATSEQSELLCUDA - MATSELLCUDA = "(seq)sellcuda" - A matrix type to be used for sparse matrices on NVIDIA GPUs. + + Options Database Keys: ++ -mat_type seqsellcuda - sets the matrix type to "seqsellcuda" during a call to `MatSetFromOptions()` +. -mat_sell_spmv_cuda_kernel - selects a spmv kernel for MatSELLCUDA +- -mat_sell_spmv_cuda_blocky - sets the y dimension of the block size of the spmv kernels. These kernels use a 2D block with the x dimension being 32 + + Level: beginner + +.seealso: [](ch_matrices), `Mat`, `MATSELLCUDA` +M*/ + PETSC_EXTERN PetscErrorCode MatCreate_SeqSELLCUDA(Mat B) { PetscFunctionBegin; diff --git a/src/dm/dt/fe/interface/ftn-custom/makefile b/src/mat/impls/sell/seq/seqhip/makefile similarity index 73% rename from src/dm/dt/fe/interface/ftn-custom/makefile rename to src/mat/impls/sell/seq/seqhip/makefile index c6170f8b367..82492f52aec 100644 --- a/src/dm/dt/fe/interface/ftn-custom/makefile +++ b/src/mat/impls/sell/seq/seqhip/makefile @@ -1,6 +1,7 @@ -include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' +#requirespackage 'PETSC_HAVE_HIP' +MANSEC = Mat include ${PETSC_DIR}/lib/petsc/conf/variables include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/mat/impls/sell/seq/seqhip/sellhip.hip.cpp b/src/mat/impls/sell/seq/seqhip/sellhip.hip.cpp new file mode 100644 index 00000000000..fec449e0d3d --- /dev/null +++ b/src/mat/impls/sell/seq/seqhip/sellhip.hip.cpp @@ -0,0 +1,998 @@ +#include "hip/hip_runtime.h" +#include + +#include +#include +#include <../src/mat/impls/sell/seq/sell.h> /*I "petscmat.h" I*/ + +#define WARP_SIZE 64 + +typedef struct { + PetscInt maxallocmat; + PetscInt totalentries; + PetscInt *colidx; /* column index array, device pointer */ + MatScalar *val; /* value array, device pointer */ + PetscInt totalslices; + PetscInt *sliidx; /* slice index array, device pointer */ + PetscInt nonzerostate; + PetscInt kernelchoice; + PetscInt blocky; + PetscInt chunksperblock; + PetscInt totalchunks; + PetscInt *chunk_slice_map; /* starting slice for each chunk, device pointer */ +} Mat_SeqSELLHIP; + +static PetscErrorCode MatSeqSELLHIP_Destroy(Mat_SeqSELLHIP **hipstruct) +{ + PetscFunctionBegin; + if (*hipstruct) { + if ((*hipstruct)->colidx) { PetscCallHIP(hipFree((*hipstruct)->colidx)); } + if ((*hipstruct)->val) { PetscCallHIP(hipFree((*hipstruct)->val)); } + if ((*hipstruct)->sliidx) { PetscCallHIP(hipFree((*hipstruct)->sliidx)); } + if ((*hipstruct)->chunk_slice_map) { PetscCallHIP(hipFree((*hipstruct)->chunk_slice_map)); } + PetscCall(PetscFree(*hipstruct)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSeqSELLHIPCopyToGPU(Mat A) +{ + Mat_SeqSELLHIP *hipstruct = (Mat_SeqSELLHIP *)A->spptr; + Mat_SeqSELL *a = (Mat_SeqSELL *)A->data; + + PetscFunctionBegin; + if (A->offloadmask == PETSC_OFFLOAD_UNALLOCATED || A->offloadmask == PETSC_OFFLOAD_CPU) { + PetscCall(PetscLogEventBegin(MAT_HIPCopyToGPU, A, 0, 0, 0)); + if (A->assembled && A->nonzerostate == hipstruct->nonzerostate) { + /* copy values only */ + PetscCallHIP(hipMemcpy(hipstruct->val, a->val, a->sliidx[a->totalslices] * sizeof(MatScalar), hipMemcpyHostToDevice)); + PetscCall(PetscLogCpuToGpu(a->sliidx[a->totalslices] * (sizeof(MatScalar)))); + } else { + if (hipstruct->colidx) PetscCallHIP(hipFree(hipstruct->colidx)); + if (hipstruct->val) PetscCallHIP(hipFree(hipstruct->val)); + if (hipstruct->sliidx) PetscCallHIP(hipFree(hipstruct->sliidx)); + if (hipstruct->chunk_slice_map) PetscCallHIP(hipFree(hipstruct->chunk_slice_map)); + hipstruct->maxallocmat = a->maxallocmat; + hipstruct->totalentries = a->sliidx[a->totalslices]; + hipstruct->totalslices = a->totalslices; + hipstruct->totalchunks = a->totalchunks; + PetscCallHIP(hipMalloc((void **)&hipstruct->colidx, a->maxallocmat * sizeof(*hipstruct->colidx))); + PetscCallHIP(hipMalloc((void **)&hipstruct->val, a->maxallocmat * sizeof(*hipstruct->val))); + /* copy values, nz or maxallocmat? */ + PetscCallHIP(hipMemcpy(hipstruct->colidx, a->colidx, a->sliidx[a->totalslices] * sizeof(*a->colidx), hipMemcpyHostToDevice)); + PetscCallHIP(hipMemcpy(hipstruct->val, a->val, a->sliidx[a->totalslices] * sizeof(*a->val), hipMemcpyHostToDevice)); + + PetscCallHIP(hipMalloc((void **)&hipstruct->sliidx, (a->totalslices + 1) * sizeof(*hipstruct->sliidx))); + PetscCallHIP(hipMemcpy(hipstruct->sliidx, a->sliidx, (a->totalslices + 1) * sizeof(*a->sliidx), hipMemcpyHostToDevice)); + PetscCallHIP(hipMalloc((void **)&hipstruct->chunk_slice_map, a->totalchunks * sizeof(*hipstruct->chunk_slice_map))); + PetscCallHIP(hipMemcpy(hipstruct->chunk_slice_map, a->chunk_slice_map, a->totalchunks * sizeof(*a->chunk_slice_map), hipMemcpyHostToDevice)); + PetscCall(PetscLogCpuToGpu(a->sliidx[a->totalslices] * (sizeof(MatScalar) + sizeof(PetscInt)) + (a->totalslices + 1 + a->totalchunks) * sizeof(PetscInt))); + } + PetscCallHIP(WaitForHIP()); + PetscCall(PetscLogEventEnd(MAT_HIPCopyToGPU, A, 0, 0, 0)); + A->offloadmask = PETSC_OFFLOAD_BOTH; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static __global__ void matmult_seqsell_basic_kernel(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, PetscScalar *y) +{ + PetscInt i, row, slice_id, row_in_slice; + MatScalar sum; + /* one thread per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + sum = 0.0; + for (i = sliidx[slice_id] + row_in_slice; i < sliidx[slice_id + 1]; i += sliceheight) sum += aval[i] * x[acolidx[i]]; + y[row] = sum; + } +} + +static __global__ void matmultadd_seqsell_basic_kernel(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, const PetscScalar *y, PetscScalar *z) +{ + PetscInt i, row, slice_id, row_in_slice; + MatScalar sum; + /* one thread per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + sum = 0.0; + for (i = sliidx[slice_id] + row_in_slice; i < sliidx[slice_id + 1]; i += sliceheight) sum += aval[i] * x[acolidx[i]]; + z[row] = y[row] + sum; + } +} + +#if !defined(PETSC_USE_COMPLEX) +/* use 1 block per slice, suitable for large slice width */ +template +__global__ void matmult_seqsell_tiled_kernel9(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, PetscScalar *y) +{ + __shared__ MatScalar shared[WARP_SIZE][BLOCKY]; + PetscInt i, row, slice_id = blockIdx.x; + int tid = threadIdx.x + threadIdx.y * WARP_SIZE; + /* transposed index */ + int tidx = tid % BLOCKY; + int tidy = tid / BLOCKY; + PetscScalar t = 0.0; + + row = slice_id * sliceheight + threadIdx.x % sliceheight; + if (row < nrows) { + for (i = sliidx[slice_id] + threadIdx.x + WARP_SIZE * threadIdx.y; i < sliidx[slice_id + 1]; i += WARP_SIZE * BLOCKY) t += aval[i] * x[acolidx[i]]; + } + #pragma unroll + for (int offset = WARP_SIZE / 2; offset >= sliceheight; offset /= 2) { t += __shfl_down(t, offset); } + /* transpose layout to reduce each row using warp shfl */ + if (threadIdx.x < sliceheight) shared[threadIdx.x][threadIdx.y] = t; + __syncthreads(); + if (tidy < sliceheight) t = shared[tidy][tidx]; + #pragma unroll + for (int offset = BLOCKY / 2; offset > 0; offset /= 2) { t += __shfl_down(t, offset, BLOCKY); } + if (tidx == 0 && tidy < sliceheight) { shared[0][tidy] = t; } + __syncthreads(); + if (row < nrows && threadIdx.y == 0 && threadIdx.x < sliceheight) { y[row] = shared[0][threadIdx.x]; } +} + +/* use 1 block per slice, suitable for large slice width */ +template +__global__ void matmultadd_seqsell_tiled_kernel9(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, const PetscScalar *y, PetscScalar *z) +{ + __shared__ MatScalar shared[WARP_SIZE][BLOCKY]; + PetscInt i, row, slice_id = blockIdx.x; + int tid = threadIdx.x + threadIdx.y * WARP_SIZE; + /* transposed index */ + int tidx = tid % BLOCKY; + int tidy = tid / BLOCKY; + PetscScalar t = 0.0; + + row = slice_id * sliceheight + threadIdx.x % sliceheight; + if (row < nrows) { + for (i = sliidx[slice_id] + threadIdx.x + WARP_SIZE * threadIdx.y; i < sliidx[slice_id + 1]; i += WARP_SIZE * BLOCKY) t += aval[i] * x[acolidx[i]]; + } + #pragma unroll + for (int offset = WARP_SIZE / 2; offset >= sliceheight; offset /= 2) { t += __shfl_down(t, offset); } + /* transpose layout to reduce each row using warp shfl */ + if (threadIdx.x < sliceheight) shared[threadIdx.x][threadIdx.y] = t; + __syncthreads(); + if (tidy < sliceheight) t = shared[tidy][tidx]; + #pragma unroll + for (int offset = BLOCKY / 2; offset > 0; offset /= 2) { t += __shfl_down(t, offset, BLOCKY); } + if (tidx == 0 && tidy < sliceheight) { shared[0][tidy] = t; } + __syncthreads(); + if (row < nrows && threadIdx.y == 0 && threadIdx.x < sliceheight) { z[row] = y[row] + shared[0][threadIdx.x]; } +} + +template +__device__ __forceinline__ static bool segment_scan(PetscInt flag[], MatScalar shared[], PetscScalar *val) +{ + bool head = true; + #pragma unroll + for (int i = 1; i < BLOCKY * 2; i <<= 1) { + int halfwarpid = threadIdx.y * 2 + threadIdx.x / (WARP_SIZE / 2); + shared[threadIdx.x + threadIdx.y * WARP_SIZE] = 0; + if (halfwarpid >= i && flag[halfwarpid - i] == flag[halfwarpid]) { + shared[threadIdx.x + threadIdx.y * WARP_SIZE] = *val; + if (i == 1) head = false; + } + __syncthreads(); + if (halfwarpid < BLOCKY * 2 - i) *val += shared[threadIdx.x + threadIdx.y * WARP_SIZE + i * WARP_SIZE]; + __syncthreads(); + } + return head; +} + +/* load-balancing version. Chunksize is equal to the number of threads per block */ +template +__global__ void matmult_seqsell_tiled_kernel8(PetscInt nrows, PetscInt sliceheight, PetscInt chunksperblock, PetscInt totalchunks, const PetscInt *chunk_slice_map, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, PetscScalar *y) +{ + __shared__ MatScalar shared[BLOCKY * WARP_SIZE]; + PetscInt gid, row, start_slice, cid; + PetscScalar t = 0.0; + AtomicAdd atomAdd; + /* zero out y */ + for (int iter = 0; iter < 1 + (nrows - 1) / (gridDim.x * WARP_SIZE * BLOCKY); iter++) { + gid = gridDim.x * WARP_SIZE * BLOCKY * iter + blockIdx.x * BLOCKY * WARP_SIZE + threadIdx.y * WARP_SIZE + threadIdx.x; + if (gid < nrows) y[gid] = 0.0; + } + for (int iter = 0; iter < chunksperblock; iter++) { + cid = blockIdx.x * chunksperblock + iter; /* chunk id */ + if (cid < totalchunks) { + start_slice = chunk_slice_map[cid]; /* starting slice at each iteration */ + gid = cid * BLOCKY * WARP_SIZE + threadIdx.y * WARP_SIZE + threadIdx.x; + if ((cid + 1) * BLOCKY * WARP_SIZE > sliidx[start_slice + 1]) { /* this iteration covers more than one slice */ + __shared__ PetscInt flag[BLOCKY * 2]; + bool write; + PetscInt slice_id = start_slice, totalslices = PetscCeilInt(nrows, sliceheight), totalentries = sliidx[totalslices]; + /* find out the slice that this element belongs to */ + while (gid < totalentries && gid >= sliidx[slice_id + 1]) slice_id++; + if (threadIdx.x % (WARP_SIZE / 2) == 0) flag[threadIdx.y * 2 + threadIdx.x / (WARP_SIZE / 2)] = slice_id; + row = slice_id * sliceheight + threadIdx.x % sliceheight; + if (row < nrows && gid < totalentries) t = aval[gid] * x[acolidx[gid]]; + __syncthreads(); + write = segment_scan(flag, shared, &t); + if (row < nrows && gid < totalentries && write) atomAdd(y[row], t); + t = 0.0; + } else { /* this iteration covers only one slice */ + row = start_slice * sliceheight + threadIdx.x % sliceheight; + if (row < nrows) t += aval[gid] * x[acolidx[gid]]; + if (iter == chunksperblock - 1 || (cid + 2) * BLOCKY * WARP_SIZE > sliidx[start_slice + 1]) { /* last iteration or next iteration covers more than one slice */ + int tid = threadIdx.x + threadIdx.y * WARP_SIZE, tidx = tid % BLOCKY, tidy = tid / BLOCKY; + /* reduction and write to output vector */ + #pragma unroll + for (int offset = WARP_SIZE / 2; offset >= sliceheight; offset /= 2) { t += __shfl_down(t, offset); } + /* transpose layout to reduce each row using warp shfl */ + if (threadIdx.x < sliceheight) shared[threadIdx.x * BLOCKY + threadIdx.y] = t; /* shared[threadIdx.x][threadIdx.y] = t */ + __syncthreads(); + if (tidy < sliceheight) t = shared[tidy * BLOCKY + tidx]; /* shared[tidy][tidx] */ + #pragma unroll + for (int offset = BLOCKY / 2; offset > 0; offset /= 2) { t += __shfl_down(t, offset, BLOCKY); } + if (tidx == 0 && tidy < sliceheight) { shared[tidy] = t; /* shared[0][tidy] = t */ } + __syncthreads(); + if (row < nrows && threadIdx.y == 0 && threadIdx.x < sliceheight) atomAdd(y[row], shared[threadIdx.x]); /* shared[0][threadIdx.x] */ + t = 0.0; + } + } + } + } +} + +/* load-balancing version. Chunksize is equal to the number of threads per block */ +template +__global__ void matmultadd_seqsell_tiled_kernel8(PetscInt nrows, PetscInt sliceheight, PetscInt chunksperblock, PetscInt totalchunks, const PetscInt *chunk_slice_map, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, const PetscScalar *y, PetscScalar *z) +{ + __shared__ MatScalar shared[BLOCKY * WARP_SIZE]; + PetscInt gid, row, start_slice, cid; + PetscScalar t = 0.0; + AtomicAdd atomAdd; + /* copy y to z */ + for (int iter = 0; iter < 1 + (nrows - 1) / (gridDim.x * WARP_SIZE * BLOCKY); iter++) { + gid = gridDim.x * WARP_SIZE * BLOCKY * iter + blockIdx.x * BLOCKY * WARP_SIZE + threadIdx.y * WARP_SIZE + threadIdx.x; + if (gid < nrows) z[gid] = y[gid]; + } + for (int iter = 0; iter < chunksperblock; iter++) { + cid = blockIdx.x * chunksperblock + iter; /* chunk id */ + if (cid < totalchunks) { + start_slice = chunk_slice_map[cid]; /* starting slice at each iteration */ + gid = cid * BLOCKY * WARP_SIZE + threadIdx.y * WARP_SIZE + threadIdx.x; + if ((cid + 1) * BLOCKY * WARP_SIZE > sliidx[start_slice + 1]) { /* this iteration covers more than one slice */ + __shared__ PetscInt flag[BLOCKY * 2]; + bool write; + PetscInt slice_id = start_slice, totalslices = PetscCeilInt(nrows, sliceheight), totalentries = sliidx[totalslices]; + /* find out the slice that this element belongs to */ + while (gid < totalentries && gid >= sliidx[slice_id + 1]) slice_id++; + if (threadIdx.x % (WARP_SIZE / 2) == 0) flag[threadIdx.y * 2 + threadIdx.x / (WARP_SIZE / 2)] = slice_id; + row = slice_id * sliceheight + threadIdx.x % sliceheight; + if (row < nrows && gid < totalentries) t = aval[gid] * x[acolidx[gid]]; + __syncthreads(); + write = segment_scan(flag, shared, &t); + if (row < nrows && gid < totalentries && write) atomAdd(z[row], t); + t = 0.0; + } else { /* this iteration covers only one slice */ + row = start_slice * sliceheight + threadIdx.x % sliceheight; + if (row < nrows) t += aval[gid] * x[acolidx[gid]]; + if (iter == chunksperblock - 1 || (cid + 2) * BLOCKY * WARP_SIZE > sliidx[start_slice + 1]) { /* last iteration or next iteration covers more than one slice */ + int tid = threadIdx.x + threadIdx.y * WARP_SIZE, tidx = tid % BLOCKY, tidy = tid / BLOCKY; + /* reduction and write to output vector */ + #pragma unroll + for (int offset = WARP_SIZE / 2; offset >= sliceheight; offset /= 2) { t += __shfl_down(t, offset); } + /* transpose layout to reduce each row using warp shfl */ + if (threadIdx.x < sliceheight) shared[threadIdx.x * BLOCKY + threadIdx.y] = t; /* shared[threadIdx.x][threadIdx.y] = t */ + __syncthreads(); + if (tidy < sliceheight) t = shared[tidy * BLOCKY + tidx]; /* shared[tidy][tidx] */ + #pragma unroll + for (int offset = BLOCKY / 2; offset > 0; offset /= 2) { t += __shfl_down(t, offset, BLOCKY); } + if (tidx == 0 && tidy < sliceheight) { shared[tidy] = t; /* shared[0][tidy] = t */ } + __syncthreads(); + if (row < nrows && threadIdx.y == 0 && threadIdx.x < sliceheight) atomAdd(z[row], shared[threadIdx.x]); /* shared[0][threadIdx.x] */ + t = 0.0; + } + } + } + } +} + +/* use 1 warp per slice, suitable for small slice width */ +static __global__ void matmult_seqsell_tiled_kernel7(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, PetscScalar *y) +{ + PetscInt i, row, slice_id; + slice_id = blockIdx.x * blockDim.y + threadIdx.y; + row = slice_id * sliceheight + threadIdx.x % sliceheight; + double t = 0.0; + if (row < nrows) { + for (i = sliidx[slice_id] + threadIdx.x; i < sliidx[slice_id + 1]; i += WARP_SIZE) t += aval[i] * x[acolidx[i]]; + } + #pragma unroll + for (int offset = WARP_SIZE / 2; offset >= sliceheight; offset /= 2) { t += __shfl_down(t, offset); } + if (row < nrows && threadIdx.x < sliceheight) { y[row] = t; } +} + +/* use 1 warp per slice, suitable for small slice width */ +static __global__ void matmultadd_seqsell_tiled_kernel7(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, const PetscScalar *y, PetscScalar *z) +{ + PetscInt i, row, slice_id; + slice_id = blockIdx.x * blockDim.y + threadIdx.y; + row = slice_id * sliceheight + threadIdx.x % sliceheight; + double t = 0.0; + if (row < nrows) { + for (i = sliidx[slice_id] + threadIdx.x; i < sliidx[slice_id + 1]; i += WARP_SIZE) t += aval[i] * x[acolidx[i]]; + } + #pragma unroll + for (int offset = WARP_SIZE / 2; offset >= sliceheight; offset /= 2) { t += __shfl_down(t, offset); } + if (row < nrows && threadIdx.x < sliceheight) { z[row] = y[row] + t; } +} +#endif + +/*********** Kernel 2-6 require a slice height smaller than 512, 256, 128, 64, 32, espectively. They are kept only for performance comparison **********/ + +static __global__ void matmult_seqsell_tiled_kernel6(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, PetscScalar *y) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 16) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 16) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 8) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 8) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 4) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 4) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 2) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 2) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + y[row] = shared[threadIdx.x]; + } + } +} + +static __global__ void matmult_seqsell_tiled_kernel5(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, PetscScalar *y) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 8) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 8) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 4) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 4) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 2) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 2) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + y[row] = shared[threadIdx.x]; + } + } +} + +static __global__ void matmult_seqsell_tiled_kernel4(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, PetscScalar *y) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 4) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 4) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 2) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 2) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + y[row] = shared[threadIdx.x]; + } + } +} + +static __global__ void matmult_seqsell_tiled_kernel3(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, PetscScalar *y) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 2) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 2) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + y[row] = shared[threadIdx.x]; + } + } +} + +static __global__ void matmult_seqsell_tiled_kernel2(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, PetscScalar *y) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + y[row] = shared[threadIdx.x]; + } + } +} + +static __global__ void matmultadd_seqsell_tiled_kernel6(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, const PetscScalar *y, PetscScalar *z) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 16) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 16) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 8) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 8) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 4) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 4) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 2) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 2) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + z[row] = y[row] + shared[threadIdx.x]; + } + } +} + +static __global__ void matmultadd_seqsell_tiled_kernel5(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, const PetscScalar *y, PetscScalar *z) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 8) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 8) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 4) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 4) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 2) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 2) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + z[row] = y[row] + shared[threadIdx.x]; + } + } +} + +static __global__ void matmultadd_seqsell_tiled_kernel4(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, const PetscScalar *y, PetscScalar *z) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 4) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 4) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 2) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 2) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + z[row] = y[row] + shared[threadIdx.x]; + } + } +} + +static __global__ void matmultadd_seqsell_tiled_kernel3(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, const PetscScalar *y, PetscScalar *z) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 2) { shared[threadIdx.y * blockDim.x + threadIdx.x] += shared[(threadIdx.y + 2) * blockDim.x + threadIdx.x]; } + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + z[row] = y[row] + shared[threadIdx.x]; + } + } +} + +static __global__ void matmultadd_seqsell_tiled_kernel2(PetscInt nrows, PetscInt sliceheight, const PetscInt *acolidx, const MatScalar *aval, const PetscInt *sliidx, const PetscScalar *x, const PetscScalar *y, PetscScalar *z) +{ + __shared__ MatScalar shared[32 * 16]; + PetscInt i, row, slice_id, row_in_slice; + /* multiple threads per row. */ + row = blockIdx.x * blockDim.x + threadIdx.x; + if (row < nrows) { + slice_id = row / sliceheight; + row_in_slice = row % sliceheight; + + shared[threadIdx.y * blockDim.x + threadIdx.x] = 0.0; + for (i = sliidx[slice_id] + row_in_slice + sliceheight * threadIdx.y; i < sliidx[slice_id + 1]; i += sliceheight * blockDim.y) shared[threadIdx.y * blockDim.x + threadIdx.x] += aval[i] * x[acolidx[i]]; + __syncthreads(); + if (threadIdx.y < 1) { + shared[threadIdx.x] += shared[blockDim.x + threadIdx.x]; + z[row] = y[row] + shared[threadIdx.x]; + } + } +} + +static PetscErrorCode MatMult_SeqSELLHIP(Mat A, Vec xx, Vec yy) +{ + Mat_SeqSELL *a = (Mat_SeqSELL *)A->data; + Mat_SeqSELLHIP *hipstruct = (Mat_SeqSELLHIP *)A->spptr; + PetscScalar *y; + const PetscScalar *x; + PetscInt nrows = A->rmap->n, sliceheight = a->sliceheight; + MatScalar *aval; + PetscInt *acolidx; + PetscInt *sliidx; + PetscInt nblocks, blocksize = 512; /* blocksize is fixed to be 512 */ + dim3 block2(256, 2), block4(128, 4), block8(64, 8), block16(32, 16), block32(16, 32); +#if !defined(PETSC_USE_COMPLEX) + PetscInt chunksperblock, nchunks, *chunk_slice_map; + PetscReal maxoveravg; +#endif + + PetscFunctionBegin; + PetscCheck(WARP_SIZE % sliceheight == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "The kernel requires a slice height be a divisor of WARP_SIZE, but the input matrix has a slice height of %" PetscInt_FMT, sliceheight); + PetscCheck(!(hipstruct->kernelchoice >= 2 && hipstruct->kernelchoice <= 6 && sliceheight > 32), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Kernel choices {2-6} requires the slice height of the matrix be less than 32, but the current slice height is %" PetscInt_FMT, sliceheight); + PetscCall(MatSeqSELLHIPCopyToGPU(A)); + /* hipstruct may not be available until MatSeqSELLHIPCopyToGPU() is called */ + aval = hipstruct->val; + acolidx = hipstruct->colidx; + sliidx = hipstruct->sliidx; + + PetscCall(VecHIPGetArrayRead(xx, &x)); + PetscCall(VecHIPGetArrayWrite(yy, &y)); + PetscCall(PetscLogGpuTimeBegin()); + + switch (hipstruct->kernelchoice) { +#if !defined(PETSC_USE_COMPLEX) + case 9: /* 1 slice per block */ + nblocks = 1 + (nrows - 1) / sliceheight; + if (hipstruct->blocky == 2) { + matmult_seqsell_tiled_kernel9<2><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else if (hipstruct->blocky == 4) { + matmult_seqsell_tiled_kernel9<4><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else if (hipstruct->blocky == 8) { + matmult_seqsell_tiled_kernel9<8><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else if (hipstruct->blocky == 16) { + matmult_seqsell_tiled_kernel9<16><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else { + matmult_seqsell_tiled_kernel9<2><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } + break; + case 7: /* each block handles blocky slices */ + nblocks = 1 + (nrows - 1) / (hipstruct->blocky * sliceheight); + if (hipstruct->blocky == 2) { + matmult_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else if (hipstruct->blocky == 4) { + matmult_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else if (hipstruct->blocky == 8) { + matmult_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else if (hipstruct->blocky == 16) { + matmult_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else { + nblocks = 1 + (nrows - 1) / (2 * sliceheight); + matmult_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } + break; +#endif + case 6: + nblocks = 1 + (nrows - 1) / (blocksize / 32); /* 1 slice per block if sliceheight=32 */ + matmult_seqsell_tiled_kernel6<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + break; + case 5: + nblocks = 1 + (nrows - 1) / (blocksize / 16); /* 2 slices per block if sliceheight=32*/ + matmult_seqsell_tiled_kernel5<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + break; + case 4: + nblocks = 1 + (nrows - 1) / (blocksize / 8); /* 4 slices per block if sliceheight=32 */ + matmult_seqsell_tiled_kernel4<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + break; + case 3: + nblocks = 1 + (nrows - 1) / (blocksize / 4); /* 8 slices per block if sliceheight=32 */ + matmult_seqsell_tiled_kernel3<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + break; + case 2: /* 16 slices per block if sliceheight=32 */ + nblocks = 1 + (nrows - 1) / (blocksize / 2); + matmult_seqsell_tiled_kernel2<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + break; + case 1: /* 32 slices per block if sliceheight=32 */ + nblocks = 1 + (nrows - 1) / blocksize; + matmult_seqsell_basic_kernel<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + break; +#if !defined(PETSC_USE_COMPLEX) + case 0: + maxoveravg = a->maxslicewidth / a->avgslicewidth; + if (maxoveravg > 12.0 && maxoveravg / nrows > 0.001) { /* important threshold */ + /* each block handles approximately one slice */ + PetscInt blocky = a->chunksize / 32; + nchunks = hipstruct->totalchunks; + chunksperblock = hipstruct->chunksperblock ? hipstruct->chunksperblock : 1 + (hipstruct->totalentries / hipstruct->totalslices - 1) / a->chunksize; + nblocks = 1 + (nchunks - 1) / chunksperblock; + chunk_slice_map = hipstruct->chunk_slice_map; + if (blocky == 2) { + matmult_seqsell_tiled_kernel8<2><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y); + } else if (blocky == 4) { + matmult_seqsell_tiled_kernel8<4><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y); + } else if (blocky == 8) { + matmult_seqsell_tiled_kernel8<8><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y); + } else if (blocky == 16) { + matmult_seqsell_tiled_kernel8<16><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y); + } else { + matmult_seqsell_tiled_kernel8<2><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y); + } + } else { + PetscInt avgslicesize = sliceheight * a->avgslicewidth; + if (avgslicesize <= 432) { + if (sliceheight * a->maxslicewidth < 2048 && nrows > 100000) { + nblocks = 1 + (nrows - 1) / (2 * sliceheight); /* two slices per block */ + matmult_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else { + nblocks = 1 + (nrows - 1) / sliceheight; + matmult_seqsell_tiled_kernel9<2><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } + } else if (avgslicesize <= 2400) { + nblocks = 1 + (nrows - 1) / sliceheight; + matmult_seqsell_tiled_kernel9<8><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } else { + nblocks = 1 + (nrows - 1) / sliceheight; + matmult_seqsell_tiled_kernel9<16><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y); + } + } + break; +#endif + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "unsupported kernel choice %" PetscInt_FMT " for MatMult_SeqSELLHIP.", hipstruct->kernelchoice); + } + PetscCall(PetscLogGpuTimeEnd()); + PetscCall(VecHIPRestoreArrayRead(xx, &x)); + PetscCall(VecHIPRestoreArrayWrite(yy, &y)); + PetscCall(PetscLogGpuFlops(2.0 * a->nz - a->nonzerorowcnt)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatMultAdd_SeqSELLHIP(Mat A, Vec xx, Vec yy, Vec zz) +{ + Mat_SeqSELL *a = (Mat_SeqSELL *)A->data; + Mat_SeqSELLHIP *hipstruct = (Mat_SeqSELLHIP *)A->spptr; + PetscScalar *z; + const PetscScalar *y, *x; + PetscInt nrows = A->rmap->n, sliceheight = a->sliceheight; + MatScalar *aval = hipstruct->val; + PetscInt *acolidx = hipstruct->colidx; + PetscInt *sliidx = hipstruct->sliidx; +#if !defined(PETSC_USE_COMPLEX) + PetscReal maxoveravg; + PetscInt chunksperblock, nchunks, *chunk_slice_map; + PetscInt blocky = hipstruct->blocky; +#endif + + PetscFunctionBegin; + PetscCheck(WARP_SIZE % sliceheight == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "The kernel requires a slice height be a divisor of WARP_SIZE, but the input matrix has a slice height of %" PetscInt_FMT, sliceheight); + PetscCheck(!(hipstruct->kernelchoice >= 2 && hipstruct->kernelchoice <= 6 && sliceheight != sliceheight), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Kernel choices {2-6} requires the slice height of the matrix be 16, but the current slice height is %" PetscInt_FMT, sliceheight); + PetscCall(MatSeqSELLHIPCopyToGPU(A)); + if (a->nz) { + PetscInt nblocks, blocksize = 512; + dim3 block2(256, 2), block4(128, 4), block8(64, 8), block16(32, 16), block32(16, 32); + PetscCall(VecHIPGetArrayRead(xx, &x)); + PetscCall(VecHIPGetArrayRead(yy, &y)); + PetscCall(VecHIPGetArrayWrite(zz, &z)); + PetscCall(PetscLogGpuTimeBegin()); + + switch (hipstruct->kernelchoice) { +#if !defined(PETSC_USE_COMPLEX) + case 9: + nblocks = 1 + (nrows - 1) / sliceheight; + if (blocky == 2) { + matmultadd_seqsell_tiled_kernel9<2><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 4) { + matmultadd_seqsell_tiled_kernel9<4><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 8) { + matmultadd_seqsell_tiled_kernel9<8><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 16) { + matmultadd_seqsell_tiled_kernel9<16><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else { + matmultadd_seqsell_tiled_kernel9<2><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } + break; + case 8: + /* each block handles approximately one slice */ + nchunks = hipstruct->totalchunks; + blocky = a->chunksize / 32; + chunksperblock = hipstruct->chunksperblock ? hipstruct->chunksperblock : 1 + (hipstruct->totalentries / hipstruct->totalslices - 1) / a->chunksize; + nblocks = 1 + (nchunks - 1) / chunksperblock; + chunk_slice_map = hipstruct->chunk_slice_map; + if (blocky == 2) { + matmultadd_seqsell_tiled_kernel8<2><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 4) { + matmultadd_seqsell_tiled_kernel8<4><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 8) { + matmultadd_seqsell_tiled_kernel8<8><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 16) { + matmultadd_seqsell_tiled_kernel8<16><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } else { + matmultadd_seqsell_tiled_kernel8<2><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } + break; + case 7: + nblocks = 1 + (nrows - 1) / (blocky * sliceheight); + if (blocky == 2) { + matmultadd_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 4) { + matmultadd_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 8) { + matmultadd_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 16) { + matmultadd_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else { + nblocks = 1 + (nrows - 1) / (2 * sliceheight); + matmultadd_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } + break; +#endif + case 6: + nblocks = 1 + (nrows - 1) / (blocksize / 32); + matmultadd_seqsell_tiled_kernel6<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + break; + case 5: + nblocks = 1 + (nrows - 1) / (blocksize / 16); + matmultadd_seqsell_tiled_kernel5<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + break; + case 4: + nblocks = 1 + (nrows - 1) / (blocksize / 8); + matmultadd_seqsell_tiled_kernel4<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + break; + case 3: + nblocks = 1 + (nrows - 1) / (blocksize / 4); + matmultadd_seqsell_tiled_kernel3<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + break; + case 2: + nblocks = 1 + (nrows - 1) / (blocksize / 2); + matmultadd_seqsell_tiled_kernel2<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + break; + case 1: + nblocks = 1 + (nrows - 1) / blocksize; + matmultadd_seqsell_basic_kernel<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + break; +#if !defined(PETSC_USE_COMPLEX) + case 0: + maxoveravg = a->maxslicewidth / a->avgslicewidth; + if (maxoveravg > 12.0 && maxoveravg / nrows > 0.001) { /* important threshold */ + /* each block handles approximately one slice */ + nchunks = hipstruct->totalchunks; + blocky = a->chunksize / 32; + chunksperblock = hipstruct->chunksperblock ? hipstruct->chunksperblock : 1 + (hipstruct->totalentries / hipstruct->totalslices - 1) / a->chunksize; + nblocks = 1 + (nchunks - 1) / chunksperblock; + chunk_slice_map = hipstruct->chunk_slice_map; + if (blocky == 2) { + matmultadd_seqsell_tiled_kernel8<2><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 4) { + matmultadd_seqsell_tiled_kernel8<4><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 8) { + matmultadd_seqsell_tiled_kernel8<8><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } else if (blocky == 16) { + matmultadd_seqsell_tiled_kernel8<16><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } else { + matmultadd_seqsell_tiled_kernel8<2><<>>(nrows, sliceheight, chunksperblock, nchunks, chunk_slice_map, acolidx, aval, sliidx, x, y, z); + } + } else { + PetscInt avgslicesize = sliceheight * a->avgslicewidth; + if (avgslicesize <= 432) { + if (sliceheight * a->maxslicewidth < 2048 && nrows > 100000) { + nblocks = 1 + (nrows - 1) / (2 * sliceheight); /* two slices per block */ + matmultadd_seqsell_tiled_kernel7<<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else { + nblocks = 1 + (nrows - 1) / sliceheight; + matmultadd_seqsell_tiled_kernel9<2><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } + } else if (avgslicesize <= 2400) { + nblocks = 1 + (nrows - 1) / sliceheight; + matmultadd_seqsell_tiled_kernel9<8><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } else { + nblocks = 1 + (nrows - 1) / sliceheight; + matmultadd_seqsell_tiled_kernel9<16><<>>(nrows, sliceheight, acolidx, aval, sliidx, x, y, z); + } + } + break; +#endif + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "unsupported kernel choice %" PetscInt_FMT " for MatMult_SeqSELLHIP.", hipstruct->kernelchoice); + } + PetscCall(PetscLogGpuTimeEnd()); + PetscCall(VecHIPRestoreArrayRead(xx, &x)); + PetscCall(VecHIPRestoreArrayRead(yy, &y)); + PetscCall(VecHIPRestoreArrayWrite(zz, &z)); + PetscCall(PetscLogGpuFlops(2.0 * a->nz)); + } else { + PetscCall(VecCopy(yy, zz)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSetFromOptions_SeqSELLHIP(Mat A, PetscOptionItems *PetscOptionsObject) +{ + Mat_SeqSELLHIP *hipstruct = (Mat_SeqSELLHIP *)A->spptr; + PetscInt kernel, blocky; + PetscBool flg; + + PetscFunctionBegin; + PetscOptionsHeadBegin(PetscOptionsObject, "SeqSELLHIP options"); + PetscCall(PetscOptionsGetInt(NULL, NULL, "-mat_sell_spmv_hip_blocky", &blocky, &flg)); + if (flg) { + PetscCheck(blocky == 2 || blocky == 4 || blocky == 8 || blocky == 16 || blocky == 32, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Unsupported blocky: %" PetscInt_FMT " it should be in {2,4,8,16,32}", blocky); + hipstruct->blocky = blocky; + } + PetscCall(PetscOptionsGetInt(NULL, NULL, "-mat_sell_spmv_hip_kernel", &kernel, &flg)); + if (flg) { + PetscCheck(kernel >= 0 && kernel <= 9, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Wrong kernel choice: %" PetscInt_FMT " it should be in [0,9]", kernel); + hipstruct->kernelchoice = kernel; + if (kernel == 8) { PetscCall(PetscOptionsGetInt(NULL, NULL, "-mat_sell_spmv_hip_chunksperblock", &hipstruct->chunksperblock, &flg)); } + } + PetscOptionsHeadEnd(); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode MatAssemblyEnd_SpMV_Preprocessing_Private(Mat A) +{ + Mat_SeqSELL *a = (Mat_SeqSELL *)A->data; + + PetscFunctionBegin; + PetscCall(MatSeqSELLGetAvgSliceWidth(A, &a->avgslicewidth)); + PetscCall(MatSeqSELLGetMaxSliceWidth(A, &a->maxslicewidth)); + PetscCall(MatSeqSELLGetFillRatio(A, &a->fillratio)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatAssemblyEnd_SeqSELLHIP(Mat A, MatAssemblyType mode) +{ + PetscFunctionBegin; + PetscCall(MatAssemblyEnd_SeqSELL(A, mode)); + PetscCall(MatAssemblyEnd_SpMV_Preprocessing_Private(A)); + if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(PETSC_SUCCESS); + if (A->factortype == MAT_FACTOR_NONE) { PetscCall(MatSeqSELLHIPCopyToGPU(A)); } + A->ops->mult = MatMult_SeqSELLHIP; + A->ops->multadd = MatMultAdd_SeqSELLHIP; + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatZeroEntries_SeqSELLHIP(Mat A) +{ + PetscBool both = PETSC_FALSE; + Mat_SeqSELL *a = (Mat_SeqSELL *)A->data; + + PetscFunctionBegin; + if (A->factortype == MAT_FACTOR_NONE) { + Mat_SeqSELLHIP *hipstruct = (Mat_SeqSELLHIP *)A->spptr; + if (hipstruct->val) { + both = PETSC_TRUE; + PetscCallHIP(hipMemset(hipstruct->val, 0, a->sliidx[a->totalslices] * sizeof(*hipstruct->val))); + } + } + PetscCall(PetscArrayzero(a->val, a->sliidx[a->totalslices])); + PetscCall(MatSeqSELLInvalidateDiagonal(A)); + if (both) A->offloadmask = PETSC_OFFLOAD_BOTH; + else A->offloadmask = PETSC_OFFLOAD_CPU; + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatDestroy_SeqSELLHIP(Mat A) +{ + PetscFunctionBegin; + if (A->factortype == MAT_FACTOR_NONE && A->offloadmask != PETSC_OFFLOAD_UNALLOCATED) PetscCall(MatSeqSELLHIP_Destroy((Mat_SeqSELLHIP **)&A->spptr)); + PetscCall(MatDestroy_SeqSELL(A)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode MatConvert_SeqSELL_SeqSELLHIP(Mat); +static PetscErrorCode MatDuplicate_SeqSELLHIP(Mat A, MatDuplicateOption cpvalues, Mat *B) +{ + PetscFunctionBegin; + PetscCall(MatDuplicate_SeqSELL(A, cpvalues, B)); + PetscCall(MatConvert_SeqSELL_SeqSELLHIP(*B)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PETSC_INTERN PetscErrorCode MatConvert_SeqSELL_SeqSELLHIP(Mat B) +{ + Mat_SeqSELLHIP *hipstruct; + + PetscFunctionBegin; + PetscCall(PetscFree(B->defaultvectype)); + PetscCall(PetscStrallocpy(VECHIP, &B->defaultvectype)); + + if (!B->spptr) { + if (B->factortype == MAT_FACTOR_NONE) { + PetscCall(PetscNew(&hipstruct)); + B->spptr = hipstruct; + } + } + + B->ops->assemblyend = MatAssemblyEnd_SeqSELLHIP; + B->ops->destroy = MatDestroy_SeqSELLHIP; + B->ops->setfromoptions = MatSetFromOptions_SeqSELLHIP; + B->ops->mult = MatMult_SeqSELLHIP; + B->ops->multadd = MatMultAdd_SeqSELLHIP; + B->ops->duplicate = MatDuplicate_SeqSELLHIP; + B->ops->zeroentries = MatZeroEntries_SeqSELLHIP; + + /* No need to assemble SeqSELL, but need to do the preprocessing for SpMV */ + PetscCall(MatAssemblyEnd_SpMV_Preprocessing_Private(B)); + + PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATSEQSELLHIP)); + B->offloadmask = PETSC_OFFLOAD_UNALLOCATED; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*MC + MATSEQSELLHIP - MATSELLHIP = "(seq)sellhip" - A matrix type to be used for sparse matrices on AMD GPUs. + + Options Database Keys: ++ -mat_type seqsellhip - sets the matrix type to "seqsellhip" during a call to `MatSetFromOptions()` +. -mat_sell_spmv_hip_kernel - selects a spmv kernel for MatSELLHIP +- -mat_sell_spmv_hip_blocky - sets the y dimension of the block size of the spmv kernels. These kernels use a 2D block with the x dimension equal to the wrap size (normally 64 for AMD GPUs) + + Level: beginner + +.seealso: [](ch_matrices), `Mat`, `MATSELLHIP` +M*/ + +PETSC_EXTERN PetscErrorCode MatCreate_SeqSELLHIP(Mat B) +{ + PetscFunctionBegin; + PetscCall(MatCreate_SeqSELL(B)); + PetscCall(MatConvert_SeqSELL_SeqSELLHIP(B)); + PetscCall(MatSetFromOptions(B)); + PetscFunctionReturn(PETSC_SUCCESS); +} diff --git a/src/mat/impls/shell/shell.c b/src/mat/impls/shell/shell.c index 20518804e35..a882b061016 100644 --- a/src/mat/impls/shell/shell.c +++ b/src/mat/impls/shell/shell.c @@ -262,7 +262,7 @@ static PetscErrorCode MatZeroRowsColumns_Local_Shell(Mat mat, PetscInt nr, Petsc /* Expand/create index set of zeroed rows */ PetscCall(PetscMalloc1(nr, &idxs)); for (i = 0; i < nr; i++) idxs[i] = rows[i] + rst; - PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nr, idxs, PETSC_OWN_POINTER, &is1)); + PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nr, idxs, PETSC_OWN_POINTER, &is1)); PetscCall(ISSort(is1)); PetscCall(VecISSet(shell->zvals, is1, diag)); if (shell->zrows) { @@ -299,7 +299,7 @@ static PetscErrorCode MatZeroRowsColumns_Local_Shell(Mat mat, PetscInt nr, Petsc if (rc) { PetscCall(PetscMalloc1(nc, &idxs)); for (i = 0; i < nc; i++) idxs[i] = cols[i] + cst; - PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nc, idxs, PETSC_OWN_POINTER, &is1)); + PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nc, idxs, PETSC_OWN_POINTER, &is1)); PetscCall(ISSort(is1)); if (shell->zcols) { PetscCall(ISSum(shell->zcols, is1, &is2)); @@ -1486,6 +1486,7 @@ static struct _MatOps MatOps_Values = {NULL, NULL, /*150*/ NULL, NULL, + NULL, NULL}; static PetscErrorCode MatShellSetContext_Shell(Mat mat, void *ctx) diff --git a/src/mat/impls/transpose/htransm.c b/src/mat/impls/transpose/htransm.c index dfa650849c0..2b35d141a6c 100644 --- a/src/mat/impls/transpose/htransm.c +++ b/src/mat/impls/transpose/htransm.c @@ -102,6 +102,7 @@ PETSC_INTERN PetscErrorCode MatProductSetFromOptions_HT(Mat D) PetscCall(MatProductSetFromOptions(D)); PetscFunctionReturn(PETSC_SUCCESS); } + static PetscErrorCode MatMult_HT(Mat N, Vec x, Vec y) { Mat A; @@ -121,6 +122,199 @@ static PetscErrorCode MatMultHermitianTranspose_HT(Mat N, Vec x, Vec y) PetscCall(MatMult(A, x, y)); PetscFunctionReturn(PETSC_SUCCESS); } + +static PetscErrorCode MatSolve_HT_LU(Mat N, Vec b, Vec x) +{ + Mat A; + Vec w; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(VecDuplicate(b, &w)); + PetscCall(VecCopy(b, w)); + PetscCall(VecConjugate(w)); + PetscCall(MatSolveTranspose(A, w, x)); + PetscCall(VecConjugate(x)); + PetscCall(VecDestroy(&w)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolveAdd_HT_LU(Mat N, Vec b, Vec y, Vec x) +{ + Mat A; + Vec v, w; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(VecDuplicate(b, &v)); + PetscCall(VecDuplicate(b, &w)); + PetscCall(VecCopy(y, v)); + PetscCall(VecCopy(b, w)); + PetscCall(VecConjugate(v)); + PetscCall(VecConjugate(w)); + PetscCall(MatSolveTransposeAdd(A, w, v, x)); + PetscCall(VecConjugate(x)); + PetscCall(VecDestroy(&v)); + PetscCall(VecDestroy(&w)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatMatSolve_HT_LU(Mat N, Mat B, Mat X) +{ + Mat A, W; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &W)); + PetscCall(MatConjugate(W)); + PetscCall(MatMatSolveTranspose(A, W, X)); + PetscCall(MatConjugate(X)); + PetscCall(MatDestroy(&W)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatLUFactor_HT(Mat N, IS row, IS col, const MatFactorInfo *minfo) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatLUFactor(A, col, row, minfo)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE, (void (*)(void))MatSolve_HT_LU)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE_ADD, (void (*)(void))MatSolveAdd_HT_LU)); + PetscCall(MatShellSetOperation(N, MATOP_MAT_SOLVE, (void (*)(void))MatMatSolve_HT_LU)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolve_HT_Cholesky(Mat N, Vec b, Vec x) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatSolve(A, b, x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolveAdd_HT_Cholesky(Mat N, Vec b, Vec y, Vec x) +{ + Mat A; + Vec v, w; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(VecDuplicate(b, &v)); + PetscCall(VecDuplicate(b, &w)); + PetscCall(VecCopy(y, v)); + PetscCall(VecCopy(b, w)); + PetscCall(VecConjugate(v)); + PetscCall(VecConjugate(w)); + PetscCall(MatSolveTransposeAdd(A, w, v, x)); + PetscCall(VecConjugate(x)); + PetscCall(VecDestroy(&v)); + PetscCall(VecDestroy(&w)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatMatSolve_HT_Cholesky(Mat N, Mat B, Mat X) +{ + Mat A, W; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &W)); + PetscCall(MatConjugate(W)); + PetscCall(MatMatSolveTranspose(A, W, X)); + PetscCall(MatConjugate(X)); + PetscCall(MatDestroy(&W)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatCholeskyFactor_HT(Mat N, IS perm, const MatFactorInfo *minfo) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCheck(!PetscDefined(USE_COMPLEX) || A->hermitian == PETSC_BOOL3_TRUE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cholesky supported only if original matrix is Hermitian"); + PetscCall(MatCholeskyFactor(A, perm, minfo)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE, (void (*)(void))MatSolve_HT_Cholesky)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE_ADD, (void (*)(void))MatSolveAdd_HT_Cholesky)); + PetscCall(MatShellSetOperation(N, MATOP_MAT_SOLVE, (void (*)(void))MatMatSolve_HT_Cholesky)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatLUFactorNumeric_HT(Mat F, Mat N, const MatFactorInfo *info) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatShellGetContext(F, &FA)); + PetscCall(MatLUFactorNumeric(FA, A, info)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE, (void (*)(void))MatSolve_HT_LU)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE_ADD, (void (*)(void))MatSolveAdd_HT_LU)); + PetscCall(MatShellSetOperation(F, MATOP_MAT_SOLVE, (void (*)(void))MatMatSolve_HT_LU)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatLUFactorSymbolic_HT(Mat F, Mat N, IS row, IS col, const MatFactorInfo *info) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatShellGetContext(F, &FA)); + PetscCall(MatLUFactorSymbolic(FA, A, row, col, info)); + PetscCall(MatShellSetOperation(F, MATOP_LUFACTOR_NUMERIC, (void (*)(void))MatLUFactorNumeric_HT)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatCholeskyFactorNumeric_HT(Mat F, Mat N, const MatFactorInfo *info) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatShellGetContext(F, &FA)); + PetscCall(MatCholeskyFactorNumeric(FA, A, info)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE, (void (*)(void))MatSolve_HT_Cholesky)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE_ADD, (void (*)(void))MatSolveAdd_HT_Cholesky)); + PetscCall(MatShellSetOperation(F, MATOP_MAT_SOLVE, (void (*)(void))MatMatSolve_HT_Cholesky)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatCholeskyFactorSymbolic_HT(Mat F, Mat N, IS perm, const MatFactorInfo *info) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatShellGetContext(F, &FA)); + PetscCall(MatCholeskyFactorSymbolic(FA, A, perm, info)); + PetscCall(MatShellSetOperation(F, MATOP_CHOLESKY_FACTOR_NUMERIC, (void (*)(void))MatCholeskyFactorNumeric_HT)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatGetFactor_HT(Mat N, MatSolverType type, MatFactorType ftype, Mat *F) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatGetFactor(A, type, ftype, &FA)); + PetscCall(MatCreateTranspose(FA, F)); + if (ftype == MAT_FACTOR_LU) PetscCall(MatShellSetOperation(*F, MATOP_LUFACTOR_SYMBOLIC, (void (*)(void))MatLUFactorSymbolic_HT)); + else if (ftype == MAT_FACTOR_CHOLESKY) { + PetscCheck(!PetscDefined(USE_COMPLEX) || A->hermitian == PETSC_BOOL3_TRUE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cholesky supported only if original matrix is Hermitian"); + PetscCall(MatPropagateSymmetryOptions(A, FA)); + PetscCall(MatShellSetOperation(*F, MATOP_CHOLESKY_FACTOR_SYMBOLIC, (void (*)(void))MatCholeskyFactorSymbolic_HT)); + } else SETERRQ(PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Support for factor type %s not implemented in MATTRANSPOSEVIRTUAL", MatFactorTypes[ftype]); + (*F)->factortype = ftype; + PetscCall(MatDestroy(&FA)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode MatDestroy_HT(Mat N) { Mat A; @@ -134,6 +328,27 @@ static PetscErrorCode MatDestroy_HT(Mat N) #endif PetscCall(PetscObjectComposeFunction((PetscObject)N, "MatProductSetFromOptions_anytype_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)N, "MatShellSetContext_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)N, "MatFactorGetSolverType_C", NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatGetInfo_HT(Mat N, MatInfoType flag, MatInfo *info) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatGetInfo(A, flag, info)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatFactorGetSolverType_HT(Mat N, MatSolverType *type) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatFactorGetSolverType(A, type)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -145,8 +360,11 @@ static PetscErrorCode MatDuplicate_HT(Mat N, MatDuplicateOption op, Mat *m) PetscCall(MatShellGetContext(N, &A)); PetscCall(MatDuplicate(A, op, &C)); PetscCall(MatCreateHermitianTranspose(C, m)); + if (op == MAT_COPY_VALUES) { + PetscCall(MatCopy(N, *m, SAME_NONZERO_PATTERN)); + PetscCall(MatPropagateSymmetryOptions(A, C)); + } PetscCall(MatDestroy(&C)); - if (op == MAT_COPY_VALUES) PetscCall(MatCopy(N, *m, SAME_NONZERO_PATTERN)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -222,12 +440,21 @@ static PetscErrorCode MatCopy_HT(Mat A, Mat B, MatStructure str) static PetscErrorCode MatConvert_HT(Mat N, MatType newtype, MatReuse reuse, Mat *newmat) { - Mat A; - PetscBool flg; + Mat A; + PetscScalar vscale = 1.0, vshift = 0.0; + PetscBool flg; PetscFunctionBegin; PetscCall(MatShellGetContext(N, &A)); PetscCall(MatHasOperation(A, MATOP_HERMITIAN_TRANSPOSE, &flg)); + if (flg || N->ops->getrow) { /* if this condition is false, MatConvert_Shell() will be called in MatConvert_Basic(), so the following checks are not needed */ + PetscCheck(!((Mat_Shell *)N->data)->zrows && !((Mat_Shell *)N->data)->zcols, PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Cannot call MatConvert() if MatZeroRows() or MatZeroRowsColumns() has been called on the input Mat"); + PetscCheck(!((Mat_Shell *)N->data)->axpy, PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Cannot call MatConvert() if MatAXPY() has been called on the input Mat"); + PetscCheck(!((Mat_Shell *)N->data)->left && !((Mat_Shell *)N->data)->right, PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Cannot call MatConvert() if MatDiagonalScale() has been called on the input Mat"); + PetscCheck(!((Mat_Shell *)N->data)->dshift, PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Cannot call MatConvert() if MatDiagonalSet() has been called on the input Mat"); + vscale = ((Mat_Shell *)N->data)->vscale; + vshift = ((Mat_Shell *)N->data)->vshift; + } if (flg) { Mat B; @@ -240,8 +467,13 @@ static PetscErrorCode MatConvert_HT(Mat N, MatType newtype, MatReuse reuse, Mat PetscCall(MatHeaderReplace(N, &B)); } } else { /* use basic converter as fallback */ + flg = (PetscBool)(N->ops->getrow != NULL); PetscCall(MatConvert_Basic(N, newtype, reuse, newmat)); } + if (flg) { + PetscCall(MatScale(*newmat, vscale)); + PetscCall(MatShift(*newmat, vshift)); + } PetscFunctionReturn(PETSC_SUCCESS); } @@ -305,6 +537,10 @@ PetscErrorCode MatCreateHermitianTranspose(Mat A, Mat *N) #if !defined(PETSC_USE_COMPLEX) PetscCall(MatShellSetOperation(*N, MATOP_MULT_TRANSPOSE, (void (*)(void))MatMultHermitianTranspose_HT)); #endif + PetscCall(MatShellSetOperation(*N, MATOP_LUFACTOR, (void (*)(void))MatLUFactor_HT)); + PetscCall(MatShellSetOperation(*N, MATOP_CHOLESKYFACTOR, (void (*)(void))MatCholeskyFactor_HT)); + PetscCall(MatShellSetOperation(*N, MATOP_GET_FACTOR, (void (*)(void))MatGetFactor_HT)); + PetscCall(MatShellSetOperation(*N, MATOP_GETINFO, (void (*)(void))MatGetInfo_HT)); PetscCall(MatShellSetOperation(*N, MATOP_DUPLICATE, (void (*)(void))MatDuplicate_HT)); PetscCall(MatShellSetOperation(*N, MATOP_HAS_OPERATION, (void (*)(void))MatHasOperation_HT)); PetscCall(MatShellSetOperation(*N, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiagonal_HT)); @@ -316,6 +552,7 @@ PetscErrorCode MatCreateHermitianTranspose(Mat A, Mat *N) PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatTransposeGetMat_C", MatHermitianTransposeGetMat_HT)); #endif PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatProductSetFromOptions_anytype_C", MatProductSetFromOptions_HT)); + PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatFactorGetSolverType_C", MatFactorGetSolverType_HT)); PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatShellSetContext_C", MatShellSetContext_Immutable)); PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatShellSetContextDestroy_C", MatShellSetContextDestroy_Immutable)); PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatShellSetManageScalingShifts_C", MatShellSetManageScalingShifts_Immutable)); diff --git a/src/mat/impls/transpose/transm.c b/src/mat/impls/transpose/transm.c index a7c7b8f49ca..84f0d9073c7 100644 --- a/src/mat/impls/transpose/transm.c +++ b/src/mat/impls/transpose/transm.c @@ -20,6 +20,234 @@ static PetscErrorCode MatMultTranspose_Transpose(Mat N, Vec x, Vec y) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode MatSolve_Transpose_LU(Mat N, Vec b, Vec x) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatSolveTranspose(A, b, x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolveAdd_Transpose_LU(Mat N, Vec b, Vec y, Vec x) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatSolveTransposeAdd(A, b, y, x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolveTranspose_Transpose_LU(Mat N, Vec b, Vec x) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatSolve(A, b, x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolveTransposeAdd_Transpose_LU(Mat N, Vec b, Vec y, Vec x) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatSolveAdd(A, b, y, x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatMatSolve_Transpose_LU(Mat N, Mat B, Mat X) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatMatSolveTranspose(A, B, X)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatMatSolveTranspose_Transpose_LU(Mat N, Mat B, Mat X) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatMatSolve(A, B, X)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatLUFactor_Transpose(Mat N, IS row, IS col, const MatFactorInfo *minfo) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatLUFactor(A, col, row, minfo)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE, (void (*)(void))MatSolve_Transpose_LU)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE_ADD, (void (*)(void))MatSolveAdd_Transpose_LU)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE_TRANSPOSE, (void (*)(void))MatSolveTranspose_Transpose_LU)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE_TRANSPOSE_ADD, (void (*)(void))MatSolveTransposeAdd_Transpose_LU)); + PetscCall(MatShellSetOperation(N, MATOP_MAT_SOLVE, (void (*)(void))MatMatSolve_Transpose_LU)); + PetscCall(MatShellSetOperation(N, MATOP_MAT_SOLVE_TRANSPOSE, (void (*)(void))MatMatSolveTranspose_Transpose_LU)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolve_Transpose_Cholesky(Mat N, Vec b, Vec x) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatSolveTranspose(A, b, x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolveAdd_Transpose_Cholesky(Mat N, Vec b, Vec y, Vec x) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatSolveTransposeAdd(A, b, y, x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolveTranspose_Transpose_Cholesky(Mat N, Vec b, Vec x) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatSolve(A, b, x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatSolveTransposeAdd_Transpose_Cholesky(Mat N, Vec b, Vec y, Vec x) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatSolveAdd(A, b, y, x)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatMatSolve_Transpose_Cholesky(Mat N, Mat B, Mat X) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatMatSolveTranspose(A, B, X)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatMatSolveTranspose_Transpose_Cholesky(Mat N, Mat B, Mat X) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatMatSolve(A, B, X)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatCholeskyFactor_Transpose(Mat N, IS perm, const MatFactorInfo *minfo) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatCholeskyFactor(A, perm, minfo)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE, (void (*)(void))MatSolve_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE_ADD, (void (*)(void))MatSolveAdd_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE_TRANSPOSE, (void (*)(void))MatSolveTranspose_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(N, MATOP_SOLVE_TRANSPOSE_ADD, (void (*)(void))MatSolveTransposeAdd_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(N, MATOP_MAT_SOLVE, (void (*)(void))MatMatSolve_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(N, MATOP_MAT_SOLVE_TRANSPOSE, (void (*)(void))MatMatSolveTranspose_Transpose_Cholesky)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatLUFactorNumeric_Transpose(Mat F, Mat N, const MatFactorInfo *info) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatShellGetContext(F, &FA)); + PetscCall(MatLUFactorNumeric(FA, A, info)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE, (void (*)(void))MatSolve_Transpose_LU)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE_ADD, (void (*)(void))MatSolveAdd_Transpose_LU)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE_TRANSPOSE, (void (*)(void))MatSolveTranspose_Transpose_LU)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE_TRANSPOSE_ADD, (void (*)(void))MatSolveTransposeAdd_Transpose_LU)); + PetscCall(MatShellSetOperation(F, MATOP_MAT_SOLVE, (void (*)(void))MatMatSolve_Transpose_LU)); + PetscCall(MatShellSetOperation(F, MATOP_MAT_SOLVE_TRANSPOSE, (void (*)(void))MatMatSolveTranspose_Transpose_LU)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatLUFactorSymbolic_Transpose(Mat F, Mat N, IS row, IS col, const MatFactorInfo *info) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatShellGetContext(F, &FA)); + PetscCall(MatLUFactorSymbolic(FA, A, row, col, info)); + PetscCall(MatShellSetOperation(F, MATOP_LUFACTOR_NUMERIC, (void (*)(void))MatLUFactorNumeric_Transpose)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatCholeskyFactorNumeric_Transpose(Mat F, Mat N, const MatFactorInfo *info) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatShellGetContext(F, &FA)); + PetscCall(MatCholeskyFactorNumeric(FA, A, info)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE, (void (*)(void))MatSolve_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE_ADD, (void (*)(void))MatSolveAdd_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE_TRANSPOSE, (void (*)(void))MatSolveTranspose_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(F, MATOP_SOLVE_TRANSPOSE_ADD, (void (*)(void))MatSolveTransposeAdd_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(F, MATOP_MAT_SOLVE, (void (*)(void))MatMatSolve_Transpose_Cholesky)); + PetscCall(MatShellSetOperation(F, MATOP_MAT_SOLVE_TRANSPOSE, (void (*)(void))MatMatSolveTranspose_Transpose_Cholesky)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatCholeskyFactorSymbolic_Transpose(Mat F, Mat N, IS perm, const MatFactorInfo *info) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatShellGetContext(F, &FA)); + PetscCall(MatCholeskyFactorSymbolic(FA, A, perm, info)); + PetscCall(MatShellSetOperation(F, MATOP_CHOLESKY_FACTOR_NUMERIC, (void (*)(void))MatCholeskyFactorNumeric_Transpose)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatGetFactor_Transpose(Mat N, MatSolverType type, MatFactorType ftype, Mat *F) +{ + Mat A, FA; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatGetFactor(A, type, ftype, &FA)); + PetscCall(MatCreateTranspose(FA, F)); + if (ftype == MAT_FACTOR_LU) PetscCall(MatShellSetOperation(*F, MATOP_LUFACTOR_SYMBOLIC, (void (*)(void))MatLUFactorSymbolic_Transpose)); + else if (ftype == MAT_FACTOR_CHOLESKY) { + PetscCall(MatShellSetOperation(*F, MATOP_CHOLESKY_FACTOR_SYMBOLIC, (void (*)(void))MatCholeskyFactorSymbolic_Transpose)); + PetscCall(MatPropagateSymmetryOptions(A, FA)); + } else SETERRQ(PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Support for factor type %s not implemented in MATTRANSPOSEVIRTUAL", MatFactorTypes[ftype]); + (*F)->factortype = ftype; + PetscCall(MatDestroy(&FA)); + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode MatDestroy_Transpose(Mat N) { Mat A; @@ -30,6 +258,27 @@ static PetscErrorCode MatDestroy_Transpose(Mat N) PetscCall(PetscObjectComposeFunction((PetscObject)N, "MatTransposeGetMat_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)N, "MatProductSetFromOptions_anytype_C", NULL)); PetscCall(PetscObjectComposeFunction((PetscObject)N, "MatShellSetContext_C", NULL)); + PetscCall(PetscObjectComposeFunction((PetscObject)N, "MatFactorGetSolverType_C", NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatGetInfo_Transpose(Mat N, MatInfoType flag, MatInfo *info) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatGetInfo(A, flag, info)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MatFactorGetSolverType_Transpose(Mat N, MatSolverType *type) +{ + Mat A; + + PetscFunctionBegin; + PetscCall(MatShellGetContext(N, &A)); + PetscCall(MatFactorGetSolverType(A, type)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -41,8 +290,11 @@ static PetscErrorCode MatDuplicate_Transpose(Mat N, MatDuplicateOption op, Mat * PetscCall(MatShellGetContext(N, &A)); PetscCall(MatDuplicate(A, op, &C)); PetscCall(MatCreateTranspose(C, m)); + if (op == MAT_COPY_VALUES) { + PetscCall(MatCopy(N, *m, SAME_NONZERO_PATTERN)); + PetscCall(MatPropagateSymmetryOptions(A, C)); + } PetscCall(MatDestroy(&C)); - if (op == MAT_COPY_VALUES) PetscCall(MatCopy(N, *m, SAME_NONZERO_PATTERN)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -186,12 +438,21 @@ static PetscErrorCode MatCopy_Transpose(Mat A, Mat B, MatStructure str) static PetscErrorCode MatConvert_Transpose(Mat N, MatType newtype, MatReuse reuse, Mat *newmat) { - Mat A; - PetscBool flg; + Mat A; + PetscScalar vscale = 1.0, vshift = 0.0; + PetscBool flg; PetscFunctionBegin; PetscCall(MatShellGetContext(N, &A)); PetscCall(MatHasOperation(A, MATOP_TRANSPOSE, &flg)); + if (flg || N->ops->getrow) { /* if this condition is false, MatConvert_Shell() will be called in MatConvert_Basic(), so the following checks are not needed */ + PetscCheck(!((Mat_Shell *)N->data)->zrows && !((Mat_Shell *)N->data)->zcols, PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Cannot call MatConvert() if MatZeroRows() or MatZeroRowsColumns() has been called on the input Mat"); + PetscCheck(!((Mat_Shell *)N->data)->axpy, PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Cannot call MatConvert() if MatAXPY() has been called on the input Mat"); + PetscCheck(!((Mat_Shell *)N->data)->left && !((Mat_Shell *)N->data)->right, PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Cannot call MatConvert() if MatDiagonalScale() has been called on the input Mat"); + PetscCheck(!((Mat_Shell *)N->data)->dshift, PetscObjectComm((PetscObject)N), PETSC_ERR_SUP, "Cannot call MatConvert() if MatDiagonalSet() has been called on the input Mat"); + vscale = ((Mat_Shell *)N->data)->vscale; + vshift = ((Mat_Shell *)N->data)->vshift; + } if (flg) { Mat B; @@ -204,8 +465,13 @@ static PetscErrorCode MatConvert_Transpose(Mat N, MatType newtype, MatReuse reus PetscCall(MatHeaderReplace(N, &B)); } } else { /* use basic converter as fallback */ + flg = (PetscBool)(N->ops->getrow != NULL); PetscCall(MatConvert_Basic(N, newtype, reuse, newmat)); } + if (flg) { + PetscCall(MatScale(*newmat, vscale)); + PetscCall(MatShift(*newmat, vshift)); + } PetscFunctionReturn(PETSC_SUCCESS); } @@ -299,6 +565,10 @@ PetscErrorCode MatCreateTranspose(Mat A, Mat *N) PetscCall(MatShellSetOperation(*N, MATOP_DESTROY, (void (*)(void))MatDestroy_Transpose)); PetscCall(MatShellSetOperation(*N, MATOP_MULT, (void (*)(void))MatMult_Transpose)); PetscCall(MatShellSetOperation(*N, MATOP_MULT_TRANSPOSE, (void (*)(void))MatMultTranspose_Transpose)); + PetscCall(MatShellSetOperation(*N, MATOP_LUFACTOR, (void (*)(void))MatLUFactor_Transpose)); + PetscCall(MatShellSetOperation(*N, MATOP_CHOLESKYFACTOR, (void (*)(void))MatCholeskyFactor_Transpose)); + PetscCall(MatShellSetOperation(*N, MATOP_GET_FACTOR, (void (*)(void))MatGetFactor_Transpose)); + PetscCall(MatShellSetOperation(*N, MATOP_GETINFO, (void (*)(void))MatGetInfo_Transpose)); PetscCall(MatShellSetOperation(*N, MATOP_DUPLICATE, (void (*)(void))MatDuplicate_Transpose)); PetscCall(MatShellSetOperation(*N, MATOP_HAS_OPERATION, (void (*)(void))MatHasOperation_Transpose)); PetscCall(MatShellSetOperation(*N, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiagonal_Transpose)); @@ -307,6 +577,7 @@ PetscErrorCode MatCreateTranspose(Mat A, Mat *N) PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatTransposeGetMat_C", MatTransposeGetMat_Transpose)); PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatProductSetFromOptions_anytype_C", MatProductSetFromOptions_Transpose)); + PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatFactorGetSolverType_C", MatFactorGetSolverType_Transpose)); PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatShellSetContext_C", MatShellSetContext_Immutable)); PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatShellSetContextDestroy_C", MatShellSetContextDestroy_Immutable)); PetscCall(PetscObjectComposeFunction((PetscObject)*N, "MatShellSetManageScalingShifts_C", MatShellSetManageScalingShifts_Immutable)); diff --git a/src/mat/interface/dlregismat.c b/src/mat/interface/dlregismat.c index bc809d1c6f4..e5c62c075d4 100644 --- a/src/mat/interface/dlregismat.c +++ b/src/mat/interface/dlregismat.c @@ -66,55 +66,55 @@ PETSC_INTERN PetscErrorCode MatSolverTypeRegister_DENSEHIP(void); #endif #if defined(PETSC_HAVE_MUMPS) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MUMPS(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_MUMPS(void); #endif #if defined(PETSC_HAVE_CUDA) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_CUSPARSE(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_CUSPARSE(void); #endif #if defined(PETSC_HAVE_HIP) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_HIPSPARSE(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_HIPSPARSE(void); #endif #if defined(PETSC_HAVE_KOKKOS_KERNELS) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_KOKKOS(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_KOKKOS(void); #endif #if defined(PETSC_HAVE_VIENNACL) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_ViennaCL(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_ViennaCL(void); #endif #if defined(PETSC_HAVE_ELEMENTAL) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Elemental(void); #endif #if defined(PETSC_HAVE_SCALAPACK) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_ScaLAPACK(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_ScaLAPACK(void); #endif #if defined(PETSC_HAVE_MATLAB) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Matlab(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Matlab(void); #endif #if defined(PETSC_HAVE_ESSL) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Essl(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Essl(void); #endif #if defined(PETSC_HAVE_SUPERLU) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_SuperLU(void); #endif #if defined(PETSC_HAVE_STRUMPACK) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void); #endif #if defined(PETSC_HAVE_PASTIX) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Pastix(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Pastix(void); #endif #if defined(PETSC_HAVE_SUPERLU_DIST) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void); #endif #if defined(PETSC_HAVE_MKL_PARDISO) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void); #endif #if defined(PETSC_HAVE_MKL_CPARDISO) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_CPardiso(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_MKL_CPardiso(void); #endif #if defined(PETSC_HAVE_SUITESPARSE) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuiteSparse(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_SuiteSparse(void); #endif #if defined(PETSC_HAVE_LUSOL) -PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Lusol(void); +PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Lusol(void); #endif PETSC_INTERN PetscErrorCode MatGetFactor_seqaij_petsc(Mat, MatFactorType, Mat *); diff --git a/src/mat/interface/ftn-custom/zmatnullf.c b/src/mat/interface/ftn-custom/zmatnullf.c index a71aafca4b2..f92ed019a01 100644 --- a/src/mat/interface/ftn-custom/zmatnullf.c +++ b/src/mat/interface/ftn-custom/zmatnullf.c @@ -3,27 +3,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matnullspacecreate0_ MATNULLSPACECREATE0 - #define matnullspacecreate1_ MATNULLSPACECREATE1 #define matnullspacegetvecs_ MATNULLSPACEGETVECS - #define matnullspaceview_ MATNULLSPACEVIEW #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matnullspacecreate0_ matnullspacecreate0 - #define matnullspacecreate1_ matnullspacecreate1 #define matnullspacegetvecs_ matnullspacegetvecs - #define matnullspaceview_ matnullspaceview #endif -PETSC_EXTERN void matnullspacecreate0_(MPI_Fint *comm, PetscBool *has_cnst, PetscInt *n, Vec vecs[], MatNullSpace *SP, PetscErrorCode *ierr) -{ - *ierr = MatNullSpaceCreate(MPI_Comm_f2c(*(comm)), *has_cnst, *n, vecs, SP); -} - -PETSC_EXTERN void matnullspacecreate1_(MPI_Fint *comm, PetscBool *has_cnst, PetscInt *n, Vec vecs[], MatNullSpace *SP, PetscErrorCode *ierr) -{ - *ierr = MatNullSpaceCreate(MPI_Comm_f2c(*(comm)), *has_cnst, *n, vecs, SP); -} - PETSC_EXTERN void matnullspacegetvecs_(MatNullSpace *sp, PetscBool *HAS_CNST, PetscInt *N, Vec *VECS, PetscErrorCode *ierr) { PetscBool has_cnst; @@ -42,10 +26,3 @@ PETSC_EXTERN void matnullspacegetvecs_(MatNullSpace *sp, PetscBool *HAS_CNST, Pe for (i = 0; i < n; i++) { VECS[i] = vecs[i]; } } } - -PETSC_EXTERN void matnullspaceview_(MatNullSpace *sp, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = MatNullSpaceView(*sp, v); -} diff --git a/src/mat/interface/ftn-custom/zmatproductf.c b/src/mat/interface/ftn-custom/zmatproductf.c deleted file mode 100644 index 5a9c703a46c..00000000000 --- a/src/mat/interface/ftn-custom/zmatproductf.c +++ /dev/null @@ -1,43 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matproductview_ MATPRODUCTVIEW - #define matproductsetalgorithm_ MATPRODUCTSETALGORITHM - #define matproductgetalgorithm_ MATPRODUCTGETALGORITHM -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matproductview_ matproductview - #define matproductsetalgorithm_ matproductsetalgorithm - #define matproductgetalgorithm_ matproductgetalgorithm -#endif - -PETSC_EXTERN void matproductview_(Mat *mat, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = MatProductView(*mat, v); -} - -PETSC_EXTERN void matproductsetalgorithm_(Mat *mat, char *algorithm, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(algorithm, len, t); - *ierr = MatProductSetAlgorithm(*mat, t); - if (*ierr) return; - FREECHAR(algorithm, t); -} - -PETSC_EXTERN void matproductgetalgorithm_(Mat *mat, char *algorithm, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *talgo; - - *ierr = MatProductGetAlgorithm(*mat, &talgo); - if (*ierr) return; - if (algorithm != PETSC_NULL_CHARACTER_Fortran) { - *ierr = PetscStrncpy(algorithm, talgo, len); - if (*ierr) return; - } - FIXRETURNCHAR(PETSC_TRUE, algorithm, len); -} diff --git a/src/mat/interface/ftn-custom/zmatregf.c b/src/mat/interface/ftn-custom/zmatregf.c deleted file mode 100644 index c8197295da2..00000000000 --- a/src/mat/interface/ftn-custom/zmatregf.c +++ /dev/null @@ -1,60 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matsettype_ MATSETTYPE - #define matgettype_ MATGETTYPE - #define matsetvectype_ MATSETVECTYPE - #define matgetvectype_ MATGETVECTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matsettype_ matsettype - #define matgettype_ matgettype - #define matsetvectype_ matsetvectype - #define matgetvectype_ matgetvectype -#endif - -PETSC_EXTERN void matsettype_(Mat *x, char *type_name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type_name, len, t); - *ierr = MatSetType(*x, t); - if (*ierr) return; - FREECHAR(type_name, t); -} - -PETSC_EXTERN void matgettype_(Mat *mm, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = MatGetType(*mm, &tname); - if (*ierr) return; - if (name != PETSC_NULL_CHARACTER_Fortran) { - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - } - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void matsetvectype_(Mat *x, char *type_name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type_name, len, t); - *ierr = MatSetVecType(*x, t); - if (*ierr) return; - FREECHAR(type_name, t); -} - -PETSC_EXTERN void matgetvectype_(Mat *mm, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = MatGetVecType(*mm, &tname); - if (*ierr) return; - if (name != PETSC_NULL_CHARACTER_Fortran) { - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - } - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/mat/interface/ftn-custom/zmatrixf.c b/src/mat/interface/ftn-custom/zmatrixf.c index 72400b8095a..d9b238e5c38 100644 --- a/src/mat/interface/ftn-custom/zmatrixf.c +++ b/src/mat/interface/ftn-custom/zmatrixf.c @@ -4,577 +4,41 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define matgetvalues_ MATGETVALUES - #define matgetvalues0_ MATGETVALUES0 - #define matgetvaluesnn1_ MATGETVALUESnn1 - #define matgetvaluesnnnn_ MATGETVALUESnnnn - #define matgetvalues11_ MATGETVALUES11 - #define matgetvalues11a_ MATGETVALUES11A - #define matgetvalues1n_ MATGETVALUES1N - #define matgetvaluesn1_ MATGETVALUESN1 - #define matgetvalueslocal_ MATGETVALUESLOCAL - #define matgetvalueslocal0_ MATGETVALUESLOCAL0 - #define matgetvalueslocalnn1_ MATGETVALUESLOCALNN1 - #define matgetvalueslocalnnnn_ MATGETVALUESLOCALNNNN - #define matgetvalueslocal11_ MATGETVALUESLOCAL11 - #define matgetvalueslocal11a_ MATGETVALUESLOCAL11A - #define matgetvalueslocal1n_ MATGETVALUESLOCAL1N - #define matgetvalueslocaln1_ MATGETVALUESLOCALN1 - #define matsetvalues_ MATSETVALUES - #define matsetvaluesnnnn_ MATSETVALUESNNNN - #define matsetvalues0_ MATSETVALUES0 - #define matsetvaluesnn1_ MATSETVALUESNN1 - #define matsetvalues11_ MATSETVALUES11 - #define matsetvalues1n_ MATSETVALUES1N - #define matsetvaluesn1_ MATSETVALUESN1 - #define matsetvaluesblocked0_ MATSETVALUESBLOCKED0 - #define matsetvaluesblocked2_ MATSETVALUESBLOCKED2 - #define matsetvaluesblocked11_ MATSETVALUESBLOCKED11 - #define matsetvaluesblocked111_ MATSETVALUESBLOCKED111 - #define matsetvaluesblocked1n_ MATSETVALUESBLOCKED1N - #define matsetvaluesblockedn1_ MATSETVALUESBLOCKEDN1 - #define matsetvaluesblockedlocal_ MATSETVALUESBLOCKEDLOCAL - #define matsetvaluesblockedlocal0_ MATSETVALUESBLOCKEDLOCAL0 - #define matsetvaluesblockedlocal11_ MATSETVALUESBLOCKEDLOCAL11 - #define matsetvaluesblockedlocal111_ MATSETVALUESBLOCKEDLOCAL111 - #define matsetvaluesblockedlocal1n_ MATSETVALUESBLOCKEDLOCAL1N - #define matsetvaluesblockedlocaln1_ MATSETVALUESBLOCKEDLOCALN1 - #define matsetvalueslocal_ MATSETVALUESLOCAL - #define matsetvalueslocal0_ MATSETVALUESLOCAL0 - #define matsetvalueslocal11_ MATSETVALUESLOCAL11 - #define matsetvalueslocal11nn_ MATSETVALUESLOCAL11NN - #define matsetvalueslocal111_ MATSETVALUESLOCAL111 - #define matsetvalueslocal1n_ MATSETVALUESLOCAL1N - #define matsetvalueslocaln1_ MATSETVALUESLOCALN1 - #define matgetrowmin_ MATGETROWMIN - #define matgetrowminabs_ MATGETROWMINABS - #define matgetrowmax_ MATGETROWMAX - #define matgetrowmaxabs_ MATGETROWMAXABS - #define matdestroymatrices_ MATDESTROYMATRICES - #define matdestroysubmatrices_ MATDESTROYSUBMATRICES - #define matgetfactor_ MATGETFACTOR - #define matfactorgetsolverpackage_ MATFACTORGETSOLVERPACKAGE - #define matgetrowij_ MATGETROWIJ - #define matrestorerowij_ MATRESTOREROWIJ - #define matgetrow_ MATGETROW - #define matrestorerow_ MATRESTOREROW - #define matload_ MATLOAD - #define matview_ MATVIEW - #define matseqaijgetarray_ MATSEQAIJGETARRAY - #define matseqaijrestorearray_ MATSEQAIJRESTOREARRAY - #define matdensegetarray_ MATDENSEGETARRAY - #define matdensegetarrayread_ MATDENSEGETARRAYREAD - #define matdenserestorearray_ MATDENSERESTOREARRAY - #define matdenserestorearrayread_ MATDENSERESTOREARRAYREAD - #define matconvert_ MATCONVERT - #define matcreatesubmatrices_ MATCREATESUBMATRICES - #define matcreatesubmatricesmpi_ MATCREATESUBMATRICESMPI - #define matzerorowscolumns_ MATZEROROWSCOLUMNS - #define matzerorowscolumnsis_ MATZEROROWSCOLUMNSIS - #define matzerorowsstencil_ MATZEROROWSSTENCIL - #define matzerorowscolumnsstencil_ MATZEROROWSCOLUMNSSTENCIL - #define matzerorows_ MATZEROROWS - #define matzerorowsis_ MATZEROROWSIS - #define matzerorowslocal_ MATZEROROWSLOCAL - #define matzerorowslocal0_ MATZEROROWSLOCAL0 - #define matzerorowslocal1_ MATZEROROWSLOCAL1 - #define matzerorowslocalis_ MATZEROROWSLOCALIS - #define matzerorowscolumnslocal_ MATZEROROWSCOLUMNSLOCAL - #define matzerorowscolumnslocalis_ MATZEROROWSCOLUMNSLOCALIS - #define matsetoptionsprefix_ MATSETOPTIONSPREFIX - #define matcreatevecs_ MATCREATEVECS - #define matnullspaceremove_ MATNULLSPACEREMOVE - #define matgetinfo_ MATGETINFO - #define matlufactor_ MATLUFACTOR - #define matilufactor_ MATILUFACTOR - #define matlufactorsymbolic_ MATLUFACTORSYMBOLIC - #define matlufactornumeric_ MATLUFACTORNUMERIC - #define matcholeskyfactor_ MATCHOLESKYFACTOR - #define matcholeskyfactorsymbolic_ MATCHOLESKYFACTORSYMBOLIC - #define matcholeskyfactornumeric_ MATCHOLESKYFACTORNUMERIC - #define matilufactorsymbolic_ MATILUFACTORSYMBOLIC - #define maticcfactorsymbolic_ MATICCFACTORSYMBOLIC - #define maticcfactor_ MATICCFACTOR - #define matfactorinfoinitialize_ MATFACTORINFOINITIALIZE - #define matnullspacesetfunction_ MATNULLSPACESETFUNCTION - #define matfindnonzerorows_ MATFINDNONZEROROWS - #define matgetsize_ MATGETSIZE - #define matgetsize00_ MATGETSIZE00 - #define matgetsize10_ MATGETSIZE10 - #define matgetsize01_ MATGETSIZE01 - #define matgetlocalsize_ MATGETLOCALSIZE - #define matgetlocalsize00_ MATGETLOCALSIZE00 - #define matgetlocalsize10_ MATGETLOCALSIZE10 - #define matgetlocalsize01_ MATGETLOCALSIZE01 - #define matsetnullspace_ MATSETNULLSPACE - #define matgetownershiprange_ MATGETOWNERSHIPRANGE - #define matgetownershiprange00_ MATGETOWNERSHIPRANGE00 - #define matgetownershiprange10_ MATGETOWNERSHIPRANGE10 - #define matgetownershiprange01_ MATGETOWNERSHIPRANGE01 - #define matgetownershiprange11_ MATGETOWNERSHIPRANGE11 - #define matgetownershipis_ MATGETOWNERSHIPIS - #define matgetownershiprangecolumn_ MATGETOWNERSHIPRANGECOLUMN - #define matviewfromoptions_ MATVIEWFROMOPTIONS - #define matdestroy_ MATDESTROY - #define matcreatefromoptions_ MATCREATEFROMOPTIONS + #define matdestroymatrices_ MATDESTROYMATRICES + #define matdestroysubmatrices_ MATDESTROYSUBMATRICES + #define matgetrowij_ MATGETROWIJ + #define matrestorerowij_ MATRESTOREROWIJ + #define matgetrow_ MATGETROW + #define matrestorerow_ MATRESTOREROW + #define matseqaijgetarray_ MATSEQAIJGETARRAY + #define matseqaijrestorearray_ MATSEQAIJRESTOREARRAY + #define matdensegetarray_ MATDENSEGETARRAY + #define matdensegetarrayread_ MATDENSEGETARRAYREAD + #define matdenserestorearray_ MATDENSERESTOREARRAY + #define matdenserestorearrayread_ MATDENSERESTOREARRAYREAD + #define matcreatesubmatrices_ MATCREATESUBMATRICES + #define matcreatesubmatricesmpi_ MATCREATESUBMATRICESMPI + #define matnullspacesetfunction_ MATNULLSPACESETFUNCTION + #define matfindnonzerorows_ MATFINDNONZEROROWS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define matsetvalues_ matsetvalues - #define matsetvaluesnnnn_ matsetvaluesnnnn - #define matsetvalues0_ matsetvalues0 - #define matsetvaluesnn1_ matsetvaluesnn1 - #define matsetvalues11_ matsetvalues11 - #define matsetvaluesn1_ matsetvaluesn1 - #define matsetvalues1n_ matsetvalues1n - #define matsetvalueslocal_ matsetvalueslocal - #define matsetvalueslocal0_ matsetvalueslocal0 - #define matsetvalueslocal11_ matsetvalueslocal11 - #define matsetvalueslocal11nn_ matsetvalueslocal11nn - #define matsetvalueslocal111_ matsetvalueslocal111 - #define matsetvalueslocal1n_ matsetvalueslocal1n - #define matsetvalueslocaln1_ matsetvalueslocaln1 - #define matsetvaluesblocked_ matsetvaluesblocked - #define matsetvaluesblocked0_ matsetvaluesblocked0 - #define matsetvaluesblocked2_ matsetvaluesblocked2 - #define matsetvaluesblocked11_ matsetvaluesblocked11 - #define matsetvaluesblocked111_ matsetvaluesblocked111 - #define matsetvaluesblocked1n_ matsetvaluesblocked1n - #define matsetvaluesblockedn1_ matsetvaluesblockedn1 - #define matsetvaluesblockedlocal_ matsetvaluesblockedlocal - #define matsetvaluesblockedlocal0_ matsetvaluesblockedlocal0 - #define matsetvaluesblockedlocal11_ matsetvaluesblockedlocal11 - #define matsetvaluesblockedlocal111_ matsetvaluesblockedlocal111 - #define matsetvaluesblockedlocal1n_ matsetvaluesblockedlocal1n - #define matsetvaluesblockedlocaln1_ matsetvaluesblockedlocaln1 - #define matgetrowmin_ matgetrowmin - #define matgetrowminabs_ matgetrowminabs - #define matgetrowmax_ matgetrowmax - #define matgetrowmaxabs_ matgetrowmaxabs - #define matdestroymatrices_ matdestroymatrices - #define matdestroysubmatrices_ matdestroysubmatrices - #define matgetfactor_ matgetfactor - #define matfactorgetsolverpackage_ matfactorgetsolverpackage - #define matcreatevecs_ matcreatevecs - #define matgetrowij_ matgetrowij - #define matrestorerowij_ matrestorerowij - #define matgetrow_ matgetrow - #define matrestorerow_ matrestorerow - #define matview_ matview - #define matload_ matload - #define matseqaijgetarray_ matseqaijgetarray - #define matseqaijrestorearray_ matseqaijrestorearray - #define matdensegetarray_ matdensegetarray - #define matdensegetarrayread_ matdensegetarrayread - #define matdenserestorearray_ matdenserestorearray - #define matdenserestorearrayread_ matdenserestorearrayread - #define matconvert_ matconvert - #define matcreatesubmatrices_ matcreatesubmatrices - #define matcreatesubmatricesmpi_ matcreatesubmatricesmpi - #define matzerorowscolumns_ matzerorowscolumns - #define matzerorowscolumnsis_ matzerorowscolumnsis - #define matzerorowsstencil_ matzerorowsstencil - #define matzerorowscolumnsstencil_ matzerorowscolumnsstencil - #define matzerorows_ matzerorows - #define matzerorowsis_ matzerorowsis - #define matzerorowslocal_ matzerorowslocal - #define matzerorowslocalis_ matzerorowslocalis - #define matzerorowscolumnslocal_ matzerorowscolumnslocal - #define matzerorowscolumnslocalis_ matzerorowscolumnslocalis - #define matsetoptionsprefix_ matsetoptionsprefix - #define matnullspaceremove_ matnullspaceremove - #define matgetinfo_ matgetinfo - #define matlufactor_ matlufactor - #define matilufactor_ matilufactor - #define matlufactorsymbolic_ matlufactorsymbolic - #define matlufactornumeric_ matlufactornumeric - #define matcholeskyfactor_ matcholeskyfactor - #define matcholeskyfactorsymbolic_ matcholeskyfactorsymbolic - #define matcholeskyfactornumeric_ matcholeskyfactornumeric - #define matilufactorsymbolic_ matilufactorsymbolic - #define maticcfactorsymbolic_ maticcfactorsymbolic - #define maticcfactor_ maticcfactor - #define matfactorinfoinitialize_ matfactorinfoinitialize - #define matnullspacesetfunction_ matnullspacesetfunction - #define matfindnonzerorows_ matfindnonzerorows - #define matgetsize_ matgetsize - #define matgetsize00_ matgetsize00 - #define matgetsize10_ matgetsize10 - #define matgetsize01_ matgetsize01 - #define matgetlocalsize_ matgetlocalsize - #define matgetlocalsize00_ matgetlocalsize00 - #define matgetlocalsize10_ matgetlocalsize10 - #define matgetlocalsize01_ matgetlocalsize01 - #define matgetvalues_ matgetvalues - #define matgetvalues0_ matgetvalues0 - #define matgetvaluesnn1_ matgetvaluesnn1 - #define matgetvaluesnnnn_ matgetvaluesnnnn - #define matgetvalues11_ matgetvalues11 - #define matgetvalues11a_ matgetvalues11a - #define matgetvalues1n_ matgetvalues1n - #define matgetvaluesn1_ matgetvaluesn1 - #define matgetvalueslocal_ matgetvalueslocal - #define matgetvalueslocal0_ matgetvalueslocal0 - #define matgetvalueslocalnn1_ matgetvalueslocalnn1 - #define matgetvalueslocalnnnn_ matgetvalueslocalnnnn - #define matgetvalueslocal11_ matgetvalueslocal11 - #define matgetvalueslocal1n_ matgetvalueslocal1n - #define matgetvalueslocaln1_ matgetvalueslocaln1 - #define matsetnullspace_ matsetnullspace - #define matgetownershiprange_ matgetownershiprange - #define matgetownershiprange00_ matgetownershiprange00 - #define matgetownershiprange10_ matgetownershiprange10 - #define matgetownershiprange01_ matgetownershiprange01 - #define matgetownershiprange11_ matgetownershiprange11 - #define matgetownershipis_ matgetownershipis - #define matgetownershiprangecolumn_ matgetownershiprangecolumn - #define matviewfromoptions_ matviewfromoptions - #define matdestroy_ matdestroy - #define matcreatefromoptions_ matcreatefromoptions + #define matdestroymatrices_ matdestroymatrices + #define matdestroysubmatrices_ matdestroysubmatrices + #define matgetrowij_ matgetrowij + #define matrestorerowij_ matrestorerowij + #define matgetrow_ matgetrow + #define matrestorerow_ matrestorerow + #define matseqaijgetarray_ matseqaijgetarray + #define matseqaijrestorearray_ matseqaijrestorearray + #define matdensegetarray_ matdensegetarray + #define matdensegetarrayread_ matdensegetarrayread + #define matdenserestorearray_ matdenserestorearray + #define matdenserestorearrayread_ matdenserestorearrayread + #define matcreatesubmatrices_ matcreatesubmatrices + #define matcreatesubmatricesmpi_ matcreatesubmatricesmpi + #define matnullspacesetfunction_ matnullspacesetfunction + #define matfindnonzerorows_ matfindnonzerorows #endif -PETSC_EXTERN void matcreatefromoptions_(MPI_Fint *comm, char *prefix, PetscInt *bs, PetscInt *m, PetscInt *n, PetscInt *M, PetscInt *N, Mat *A, int *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *fprefix; - FIXCHAR(prefix, len, fprefix); - *ierr = MatCreateFromOptions(MPI_Comm_f2c(*(comm)), fprefix, *bs, *m, *n, *M, *N, A); - if (*ierr) return; - FREECHAR(prefix, fprefix); -} - -PETSC_EXTERN void matgetvalues_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - *ierr = MatGetValues(*mat, *m, idxm, *n, idxn, v); -} - -PETSC_EXTERN void matgetvalues0_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalues_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvaluesnn1_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalues_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvaluesnnnn_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalues_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalues11_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalues_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalues11a_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalues_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalues1n_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalues_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvaluesn1_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalues_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalueslocal_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - *ierr = MatGetValuesLocal(*mat, *m, idxm, *n, idxn, v); -} - -PETSC_EXTERN void matgetvalueslocal0_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalueslocal_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalueslocalnn1_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalueslocal_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalueslocalnnnn_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalueslocal_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalueslocal11_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalueslocal_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalueslocal11a_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalueslocal_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalueslocal1n_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalueslocal_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetvalueslocaln1_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], int *ierr) -{ - matgetvalueslocal_(mat, m, idxm, n, idxn, v, ierr); -} - -PETSC_EXTERN void matgetownershiprange_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - CHKFORTRANNULLINTEGER(m); - CHKFORTRANNULLINTEGER(n); - *ierr = MatGetOwnershipRange(*mat, m, n); -} - -PETSC_EXTERN void matgetownershiprange00_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - CHKFORTRANNULLINTEGER(m); - CHKFORTRANNULLINTEGER(n); - *ierr = MatGetOwnershipRange(*mat, m, n); -} - -PETSC_EXTERN void matgetownershiprange10_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - CHKFORTRANNULLINTEGER(m); - CHKFORTRANNULLINTEGER(n); - *ierr = MatGetOwnershipRange(*mat, m, n); -} - -PETSC_EXTERN void matgetownershiprange01_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - CHKFORTRANNULLINTEGER(m); - CHKFORTRANNULLINTEGER(n); - *ierr = MatGetOwnershipRange(*mat, m, n); -} - -PETSC_EXTERN void matgetownershiprange11_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - CHKFORTRANNULLINTEGER(m); - CHKFORTRANNULLINTEGER(n); - *ierr = MatGetOwnershipRange(*mat, m, n); -} - -PETSC_EXTERN void matgetownershipis_(Mat *mat, IS *m, IS *n, int *ierr) -{ - CHKFORTRANNULLOBJECT(m); - CHKFORTRANNULLOBJECT(n); - *ierr = MatGetOwnershipIS(*mat, m, n); -} - -PETSC_EXTERN void matgetownershiprangecolumn_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - CHKFORTRANNULLINTEGER(m); - CHKFORTRANNULLINTEGER(n); - *ierr = MatGetOwnershipRangeColumn(*mat, m, n); -} - -PETSC_EXTERN void matgetsize_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - CHKFORTRANNULLINTEGER(m); - CHKFORTRANNULLINTEGER(n); - *ierr = MatGetSize(*mat, m, n); -} - -PETSC_EXTERN void matgetsize00_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - matgetsize_(mat, m, n, ierr); -} - -PETSC_EXTERN void matgetsize10_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - matgetsize_(mat, m, n, ierr); -} - -PETSC_EXTERN void matgetsize01_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - matgetsize_(mat, m, n, ierr); -} - -PETSC_EXTERN void matgetlocalsize_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - CHKFORTRANNULLINTEGER(m); - CHKFORTRANNULLINTEGER(n); - *ierr = MatGetLocalSize(*mat, m, n); -} - -PETSC_EXTERN void matgetlocalsize00_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - matgetlocalsize_(mat, m, n, ierr); -} - -PETSC_EXTERN void matgetlocalsize10_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - matgetlocalsize_(mat, m, n, ierr); -} - -PETSC_EXTERN void matgetlocalsize01_(Mat *mat, PetscInt *m, PetscInt *n, int *ierr) -{ - matgetlocalsize_(mat, m, n, ierr); -} - -PETSC_EXTERN void matsetvaluesblocked_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - *ierr = MatSetValuesBlocked(*mat, *m, idxm, *n, idxn, v, *addv); -} - -PETSC_EXTERN void matsetvaluesblocked2_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], F90Array2d *y, InsertMode *addv, int *ierr PETSC_F90_2PTR_PROTO(ptrd)) -{ - PetscScalar *fa; - *ierr = F90Array2dAccess(y, MPIU_SCALAR, (void **)&fa PETSC_F90_2PTR_PARAM(ptrd)); - if (*ierr) return; - matsetvaluesblocked_(mat, m, idxm, n, idxn, fa, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblocked0_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblocked_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblocked11_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblocked_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblocked111_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblocked_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblocked1n_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblocked_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblockedn1_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblocked_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblockedlocal_(Mat *mat, PetscInt *nrow, PetscInt irow[], PetscInt *ncol, PetscInt icol[], PetscScalar y[], InsertMode *addv, int *ierr) -{ - *ierr = MatSetValuesBlockedLocal(*mat, *nrow, irow, *ncol, icol, y, *addv); -} - -PETSC_EXTERN void matsetvaluesblockedlocal0_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblockedlocal_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblockedlocal11_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblockedlocal_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblockedlocal111_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblockedlocal_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblockedlocal1n_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblockedlocal_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesblockedlocaln1_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvaluesblockedlocal_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvalues_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - *ierr = MatSetValues(*mat, *m, idxm, *n, idxn, v, *addv); -} - -PETSC_EXTERN void matsetvaluesnnnn_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvalues_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvalues0_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvalues_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesnn1_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvalues_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvalues11_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvalues_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvaluesn1_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvalues_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvalues1n_(Mat *mat, PetscInt *m, PetscInt idxm[], PetscInt *n, PetscInt idxn[], PetscScalar v[], InsertMode *addv, int *ierr) -{ - matsetvalues_(mat, m, idxm, n, idxn, v, addv, ierr); -} - -PETSC_EXTERN void matsetvalueslocal_(Mat *mat, PetscInt *nrow, PetscInt irow[], PetscInt *ncol, PetscInt icol[], PetscScalar y[], InsertMode *addv, int *ierr) -{ - *ierr = MatSetValuesLocal(*mat, *nrow, irow, *ncol, icol, y, *addv); -} - -PETSC_EXTERN void matsetvalueslocal0_(Mat *mat, PetscInt *nrow, PetscInt irow[], PetscInt *ncol, PetscInt icol[], PetscScalar y[], InsertMode *addv, int *ierr) -{ - matsetvalueslocal_(mat, nrow, irow, ncol, icol, y, addv, ierr); -} - -PETSC_EXTERN void matsetvalueslocal11_(Mat *mat, PetscInt *nrow, PetscInt irow[], PetscInt *ncol, PetscInt icol[], PetscScalar y[], InsertMode *addv, int *ierr) -{ - matsetvalueslocal_(mat, nrow, irow, ncol, icol, y, addv, ierr); -} - -PETSC_EXTERN void matsetvalueslocal11nn_(Mat *mat, PetscInt *nrow, PetscInt irow[], PetscInt *ncol, PetscInt icol[], PetscScalar y[], InsertMode *addv, int *ierr) -{ - matsetvalueslocal_(mat, nrow, irow, ncol, icol, y, addv, ierr); -} - -PETSC_EXTERN void matsetvalueslocal111_(Mat *mat, PetscInt *nrow, PetscInt irow[], PetscInt *ncol, PetscInt icol[], PetscScalar y[], InsertMode *addv, int *ierr) -{ - matsetvalueslocal_(mat, nrow, irow, ncol, icol, y, addv, ierr); -} - -PETSC_EXTERN void matsetvalueslocal1n_(Mat *mat, PetscInt *nrow, PetscInt irow[], PetscInt *ncol, PetscInt icol[], PetscScalar y[], InsertMode *addv, int *ierr) -{ - matsetvalueslocal_(mat, nrow, irow, ncol, icol, y, addv, ierr); -} - -PETSC_EXTERN void matsetvalueslocaln1_(Mat *mat, PetscInt *nrow, PetscInt irow[], PetscInt *ncol, PetscInt icol[], PetscScalar y[], InsertMode *addv, int *ierr) -{ - matsetvalueslocal_(mat, nrow, irow, ncol, icol, y, addv, ierr); -} - -PETSC_EXTERN void matgetrowmin_(Mat *mat, Vec *v, PetscInt idx[], int *ierr) -{ - CHKFORTRANNULLINTEGER(idx); - *ierr = MatGetRowMin(*mat, *v, idx); -} - -PETSC_EXTERN void matgetrowminabs_(Mat *mat, Vec *v, PetscInt idx[], int *ierr) -{ - CHKFORTRANNULLINTEGER(idx); - *ierr = MatGetRowMinAbs(*mat, *v, idx); -} - -PETSC_EXTERN void matgetrowmax_(Mat *mat, Vec *v, PetscInt idx[], int *ierr) -{ - CHKFORTRANNULLINTEGER(idx); - *ierr = MatGetRowMax(*mat, *v, idx); -} - -PETSC_EXTERN void matgetrowmaxabs_(Mat *mat, Vec *v, PetscInt idx[], int *ierr) -{ - CHKFORTRANNULLINTEGER(idx); - *ierr = MatGetRowMaxAbs(*mat, *v, idx); -} - static PetscErrorCode ournullfunction(MatNullSpace sp, Vec x, void *ctx) { PetscCallFortranVoidFunction((*(void (*)(MatNullSpace *, Vec *, void *, PetscErrorCode *))(((PetscObject)sp)->fortran_func_pointers[0]))(&sp, &x, ctx, &ierr)); @@ -589,13 +53,6 @@ PETSC_EXTERN void matnullspacesetfunction_(MatNullSpace *sp, PetscErrorCode (*re *ierr = MatNullSpaceSetFunction(*sp, ournullfunction, ctx); } -PETSC_EXTERN void matcreatevecs_(Mat *mat, Vec *right, Vec *left, int *ierr) -{ - CHKFORTRANNULLOBJECT(right); - CHKFORTRANNULLOBJECT(left); - *ierr = MatCreateVecs(*mat, right, left); -} - PETSC_EXTERN void matgetrowij_(Mat *B, PetscInt *shift, PetscBool *sym, PetscBool *blockcompressed, PetscInt *n, PetscInt *ia, size_t *iia, PetscInt *ja, size_t *jja, PetscBool *done, PetscErrorCode *ierr) { const PetscInt *IA, *JA; @@ -672,20 +129,6 @@ PETSC_EXTERN void matrestorerow_(Mat *mat, PetscInt *row, PetscInt *ncols, Petsc matgetrowactive = 0; } -PETSC_EXTERN void matview_(Mat *mat, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = MatView(*mat, v); -} - -PETSC_EXTERN void matload_(Mat *mat, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = MatLoad(*mat, v); -} - PETSC_EXTERN void matseqaijgetarray_(Mat *mat, PetscScalar *fa, size_t *ia, PetscErrorCode *ierr) { PetscScalar *mm; @@ -764,37 +207,6 @@ PETSC_EXTERN void matdenserestorearrayread_(Mat *mat, PetscScalar *fa, size_t *i if (*ierr) return; } -PETSC_EXTERN void matfactorgetsolverpackage_(Mat *mat, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = MatFactorGetSolverType(*mat, &tname); - if (*ierr) return; - if (name != PETSC_NULL_CHARACTER_Fortran) { - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - } - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void matgetfactor_(Mat *mat, char *outtype, MatFactorType *ftype, Mat *M, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(outtype, len, t); - *ierr = MatGetFactor(*mat, t, *ftype, M); - if (*ierr) return; - FREECHAR(outtype, t); -} - -PETSC_EXTERN void matconvert_(Mat *mat, char *outtype, MatReuse *reuse, Mat *M, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(outtype, len, t); - *ierr = MatConvert(*mat, t, *reuse, M); - if (*ierr) return; - FREECHAR(outtype, t); -} - /* MatCreateSubmatrices() is slightly different from C since the Fortran provides the array to hold the submatrix objects,while in C that @@ -858,135 +270,20 @@ PETSC_EXTERN void matdestroymatrices_(PetscInt *n, Mat *smat, PetscErrorCode *ie MatDestroySubMatrices() is slightly different from C since the Fortran provides the array to hold the submatrix objects, while in C that array is allocated by the MatCreateSubmatrices() + + An extra matrix may be stored at the end of the array, hence the check see + MatDestroySubMatrices_Dummy() */ PETSC_EXTERN void matdestroysubmatrices_(PetscInt *n, Mat *smat, PetscErrorCode *ierr) { Mat *lsmat; PetscInt i; + if (*n == 0) return; *ierr = PetscMalloc1(*n + 1, &lsmat); - for (i = 0; i <= *n; i++) { - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(&smat[i]); - lsmat[i] = smat[i]; - } - *ierr = MatDestroySubMatrices(*n, &lsmat); - for (i = 0; i <= *n; i++) { PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(&smat[i]); } -} - -PETSC_EXTERN void matdestroy_(Mat *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = MatDestroy(x); if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} - -PETSC_EXTERN void matsetoptionsprefix_(Mat *mat, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = MatSetOptionsPrefix(*mat, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void matnullspaceremove_(MatNullSpace *sp, Vec *vec, PetscErrorCode *ierr) -{ - CHKFORTRANNULLOBJECT(*sp); - *ierr = MatNullSpaceRemove(*sp, *vec); -} - -PETSC_EXTERN void matgetinfo_(Mat *mat, MatInfoType *flag, MatInfo *info, int *ierr) -{ - *ierr = MatGetInfo(*mat, *flag, info); -} - -PETSC_EXTERN void matlufactor_(Mat *mat, IS *row, IS *col, const MatFactorInfo *info, int *ierr) -{ - CHKFORTRANNULLOBJECT(row); - CHKFORTRANNULLOBJECT(col); - *ierr = MatLUFactor(*mat, row ? *row : NULL, col ? *col : NULL, info); -} - -PETSC_EXTERN void matilufactor_(Mat *mat, IS *row, IS *col, const MatFactorInfo *info, int *ierr) -{ - CHKFORTRANNULLOBJECT(row); - CHKFORTRANNULLOBJECT(col); - *ierr = MatILUFactor(*mat, row ? *row : NULL, col ? *col : NULL, info); -} - -PETSC_EXTERN void matlufactorsymbolic_(Mat *fact, Mat *mat, IS *row, IS *col, const MatFactorInfo *info, int *ierr) -{ - CHKFORTRANNULLOBJECT(row); - CHKFORTRANNULLOBJECT(col); - *ierr = MatLUFactorSymbolic(*fact, *mat, row ? *row : NULL, col ? *col : NULL, info); -} - -PETSC_EXTERN void matlufactornumeric_(Mat *fact, Mat *mat, const MatFactorInfo *info, int *ierr) -{ - *ierr = MatLUFactorNumeric(*fact, *mat, info); -} - -PETSC_EXTERN void matcholeskyfactor_(Mat *mat, IS *perm, const MatFactorInfo *info, int *ierr) -{ - CHKFORTRANNULLOBJECT(perm); - *ierr = MatCholeskyFactor(*mat, perm ? *perm : NULL, info); -} - -PETSC_EXTERN void matcholeskyfactorsymbolic_(Mat *fact, Mat *mat, IS *perm, const MatFactorInfo *info, int *ierr) -{ - CHKFORTRANNULLOBJECT(perm); - *ierr = MatCholeskyFactorSymbolic(*fact, *mat, perm ? *perm : NULL, info); -} - -PETSC_EXTERN void matcholeskyfactornumeric_(Mat *fact, Mat *mat, const MatFactorInfo *info, int *ierr) -{ - *ierr = MatCholeskyFactorNumeric(*fact, *mat, info); -} - -PETSC_EXTERN void matilufactorsymbolic_(Mat *fact, Mat *mat, IS *row, IS *col, const MatFactorInfo *info, int *ierr) -{ - CHKFORTRANNULLOBJECT(row); - CHKFORTRANNULLOBJECT(col); - *ierr = MatILUFactorSymbolic(*fact, *mat, row ? *row : NULL, col ? *col : NULL, info); -} - -PETSC_EXTERN void maticcfactorsymbolic_(Mat *fact, Mat *mat, IS *perm, const MatFactorInfo *info, int *ierr) -{ - CHKFORTRANNULLOBJECT(perm); - *ierr = MatICCFactorSymbolic(*fact, *mat, perm ? *perm : NULL, info); -} - -PETSC_EXTERN void maticcfactor_(Mat *mat, IS *perm, const MatFactorInfo *info, int *ierr) -{ - CHKFORTRANNULLOBJECT(perm); - *ierr = MatICCFactor(*mat, perm ? *perm : NULL, info); -} - -PETSC_EXTERN void matfactorinfoinitialize_(MatFactorInfo *info, int *ierr) -{ - *ierr = MatFactorInfoInitialize(info); -} -PETSC_EXTERN void matzerorowslocal_(Mat *mat, PetscInt *numRows, PetscInt rows[], PetscScalar *diag, Vec *x, Vec *b, int *ierr) -{ - *ierr = MatZeroRowsLocal(*mat, *numRows, rows, *diag, *x, *b); -} -PETSC_EXTERN void matzerorowslocal0_(Mat *mat, PetscInt *numRows, PetscInt rows[], PetscScalar *diag, Vec *x, Vec *b, int *ierr) -{ - matzerorowslocal_(mat, numRows, rows, diag, x, b, ierr); -} -PETSC_EXTERN void matzerorowslocal1_(Mat *mat, PetscInt *numRows, PetscInt rows[], PetscScalar *diag, Vec *x, Vec *b, int *ierr) -{ - matzerorowslocal_(mat, numRows, rows, diag, x, b, ierr); -} -PETSC_EXTERN void matviewfromoptions_(Mat *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = MatViewFromOptions(*ao, obj, t); + for (i = 0; i <= *n; i++) { lsmat[i] = smat[i]; } + *ierr = MatDestroySubMatrices(*n, &lsmat); if (*ierr) return; - FREECHAR(type, t); + for (i = 0; i <= *n; i++) { PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(&smat[i]); } } diff --git a/src/mat/interface/matnull.c b/src/mat/interface/matnull.c index 35c08922373..c73517cd497 100644 --- a/src/mat/interface/matnull.c +++ b/src/mat/interface/matnull.c @@ -41,16 +41,19 @@ PetscErrorCode MatNullSpaceSetFunction(MatNullSpace sp, PetscErrorCode (*rem)(Ma Output Parameters: + has_const - `PETSC_TRUE` if the null space contains the constant vector, otherwise `PETSC_FALSE` . n - number of vectors (excluding constant vector) in the null space -- vecs - orthonormal vectors that span the null space (excluding the constant vector), `NULL` if `n` is 0 +- vecs - returns array of length `n` containing the orthonormal vectors that span the null space (excluding the constant vector), `NULL` if `n` is 0 Level: developer Note: These vectors and the array are owned by the `MatNullSpace` and should not be destroyed or freeded by the caller + Fortran Note: + One must pass in an array `vecs` that is large enough to hold all of the requested vectors + .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatNullSpaceCreate()`, `MatGetNullSpace()`, `MatGetNearNullSpace()` @*/ -PetscErrorCode MatNullSpaceGetVecs(MatNullSpace sp, PetscBool *has_const, PetscInt *n, const Vec **vecs) +PetscErrorCode MatNullSpaceGetVecs(MatNullSpace sp, PetscBool *has_const, PetscInt *n, const Vec *vecs[]) { PetscFunctionBegin; PetscValidHeaderSpecific(sp, MAT_NULLSPACE_CLASSID, 1); @@ -157,7 +160,7 @@ PetscErrorCode MatNullSpaceCreateRigidBody(Vec coords, MatNullSpace *sp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatNullSpaceView - Visualizes a null space object. Collective; No Fortran Support @@ -197,7 +200,7 @@ PetscErrorCode MatNullSpaceView(MatNullSpace sp, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatNullSpaceCreate - Creates a `MatNullSpace` data structure used to project vectors out of null spaces. Collective @@ -207,9 +210,9 @@ PetscErrorCode MatNullSpaceView(MatNullSpace sp, PetscViewer viewer) . has_cnst - `PETSC_TRUE` if the null space contains the constant vector; otherwise `PETSC_FALSE` . n - number of vectors (excluding constant vector) in null space - vecs - the vectors that span the null space (excluding the constant vector); - these vectors must be orthonormal. These vectors are NOT copied, so do not change them - after this call. You should free the array that you pass in and destroy the vectors (this will reduce the reference count - for them by one). + these vectors must be orthonormal. These vectors are NOT copied, so do not change them + after this call. You should free the array that you pass in and destroy the vectors (this will reduce the reference count + for them by one). Output Parameter: . SP - the null space context @@ -322,7 +325,7 @@ PetscErrorCode MatNullSpaceDestroy(MatNullSpace *sp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatNullSpaceRemove - Removes all the components of a null space from a vector. Collective diff --git a/src/mat/interface/matproduct.c b/src/mat/interface/matproduct.c index 5d134b0ba02..7a2d12aeb88 100644 --- a/src/mat/interface/matproduct.c +++ b/src/mat/interface/matproduct.c @@ -545,7 +545,7 @@ PetscErrorCode MatProductSetFromOptions(Mat mat) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatProductView - View the private matrix-matrix algorithm object within a matrix Logically Collective @@ -556,6 +556,9 @@ PetscErrorCode MatProductSetFromOptions(Mat mat) Level: intermediate + Developer Note: + Shouldn't this information be printed from an approriate `MatView()` with perhaps certain formats set? + .seealso: [](ch_matrices), `MatProductType`, `Mat`, `MatProductSetFromOptions()`, `MatView()`, `MatProductCreate()`, `MatProductCreateWithMat()` @*/ PetscErrorCode MatProductView(Mat mat, PetscViewer viewer) @@ -850,7 +853,7 @@ PetscErrorCode MatProductSetFill(Mat mat, PetscReal fill) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatProductSetAlgorithm - Requests a particular algorithm for a matrix-matrix product operation that will perform to compute the given matrix Collective @@ -876,7 +879,7 @@ PetscErrorCode MatProductSetAlgorithm(Mat mat, MatProductAlgorithm alg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatProductGetAlgorithm - Returns the selected algorithm for a matrix-matrix product operation Not Collective diff --git a/src/mat/interface/matreg.c b/src/mat/interface/matreg.c index d01fb60fbcf..5c2d1ea911d 100644 --- a/src/mat/interface/matreg.c +++ b/src/mat/interface/matreg.c @@ -85,7 +85,7 @@ PetscErrorCode MatGetMPIMatType_Private(Mat mat, MatType *MPIType) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSetType - Builds matrix object for a particular matrix type Collective @@ -148,6 +148,14 @@ PetscErrorCode MatSetType(Mat mat, MatType matype) PetscCall(MatConvert(mat, matype, MAT_INPLACE_MATRIX, &mat)); PetscFunctionReturn(PETSC_SUCCESS); } + if (names && mat->assembled) { + PetscCall(PetscStrbeginswith(names->rname, "sell", &sametype)); + if (sametype) { /* mattype is MATSELL or its subclass */ + PetscCall(MatConvert(mat, MATSELL, MAT_INPLACE_MATRIX, &mat)); /* convert to matsell first */ + PetscCall(MatConvert(mat, matype, MAT_INPLACE_MATRIX, &mat)); + PetscFunctionReturn(PETSC_SUCCESS); + } + } PetscTryTypeMethod(mat, destroy); mat->ops->destroy = NULL; @@ -173,7 +181,7 @@ PetscErrorCode MatSetType(Mat mat, MatType matype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetType - Gets the matrix type as a string from the matrix object. Not Collective @@ -197,7 +205,7 @@ PetscErrorCode MatGetType(Mat mat, MatType *type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetVecType - Gets the vector type the matrix will return with `MatCreateVecs()` Not Collective @@ -221,7 +229,7 @@ PetscErrorCode MatGetVecType(Mat mat, VecType *vtype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSetVecType - Set the vector type the matrix will return with `MatCreateVecs()` Collective @@ -249,7 +257,7 @@ PetscErrorCode MatSetVecType(Mat mat, VecType vtype) /*@C MatRegister - - Adds a new matrix type implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined matrix type @@ -265,10 +273,8 @@ PetscErrorCode MatSetVecType(Mat mat, VecType vtype) MatRegister("my_mat", MyMatCreate); .ve - Then, your solver can be chosen with the procedural interface via -$ MatSetType(Mat, "my_mat") - or at runtime via the option -$ -mat_type my_mat + Then, your solver can be chosen with the procedural interface via `MatSetType(Mat, "my_mat")` or at runtime via the option + `-mat_type my_mat` .seealso: [](ch_matrices), `Mat`, `MatType`, `MatSetType()`, `MatRegisterAll()` @*/ @@ -282,7 +288,7 @@ PetscErrorCode MatRegister(const char sname[], PetscErrorCode (*function)(Mat)) MatRootName MatRootNameList = NULL; -/*@C +/*@ MatRegisterRootName - Registers a name that can be used for either a sequential or its corresponding parallel matrix type. Input Parameters: diff --git a/src/mat/interface/matregis.c b/src/mat/interface/matregis.c index 549c0e961d0..6f8247b308e 100644 --- a/src/mat/interface/matregis.c +++ b/src/mat/interface/matregis.c @@ -70,6 +70,8 @@ PETSC_EXTERN PetscErrorCode MatCreate_MPISELLCUDA(Mat); #if defined(PETSC_HAVE_HIP) PETSC_EXTERN PetscErrorCode MatCreate_SeqAIJHIPSPARSE(Mat); PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJHIPSPARSE(Mat); +PETSC_EXTERN PetscErrorCode MatCreate_SeqSELLHIP(Mat); +PETSC_EXTERN PetscErrorCode MatCreate_MPISELLHIP(Mat); #endif #if defined(PETSC_HAVE_VIENNACL) @@ -213,6 +215,9 @@ PetscErrorCode MatRegisterAll(void) PetscCall(MatRegisterRootName(MATAIJHIPSPARSE, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE)); PetscCall(MatRegister(MATSEQAIJHIPSPARSE, MatCreate_SeqAIJHIPSPARSE)); PetscCall(MatRegister(MATMPIAIJHIPSPARSE, MatCreate_MPIAIJHIPSPARSE)); + PetscCall(MatRegisterRootName(MATSELLHIP, MATSEQSELLHIP, MATMPISELLHIP)); + PetscCall(MatRegister(MATSEQSELLHIP, MatCreate_SeqSELLHIP)); + PetscCall(MatRegister(MATMPISELLHIP, MatCreate_MPISELLHIP)); #endif #if defined(PETSC_HAVE_VIENNACL) diff --git a/src/mat/interface/matrix.c b/src/mat/interface/matrix.c index cb89f599f70..b13e0616be8 100644 --- a/src/mat/interface/matrix.c +++ b/src/mat/interface/matrix.c @@ -38,7 +38,7 @@ PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGen PetscLogEvent MAT_PreallCOO, MAT_SetVCOO; PetscLogEvent MAT_SetValuesBatch; PetscLogEvent MAT_ViennaCLCopyToGPU; -PetscLogEvent MAT_CUDACopyToGPU; +PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU; PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU; PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom; PetscLogEvent MAT_FactorFactS, MAT_FactorInvS; @@ -265,6 +265,9 @@ PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal Note: `keptrows` is set to `NULL` if all rows are nonzero. + Developer Note: + If `keptrows` is not `NULL`, it must be sorted. + .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()` @*/ PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows) @@ -277,6 +280,7 @@ PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows) PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows); else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows)); + if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -548,11 +552,11 @@ PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd) The calling sequence is .vb MatGetRow(matrix,row,ncols,cols,values,ierr) - Mat matrix (input) - integer row (input) - integer ncols (output) - integer cols(maxcols) (output) - double precision (or double complex) values(maxcols) output + Mat matrix (input) + PetscInt row (input) + PetscInt ncols (output) + PetscInt cols(maxcols) (output) + PetscScalar values(maxcols) output .ve where maxcols >= maximum nonzeros in any row of the matrix. @@ -621,19 +625,8 @@ PetscErrorCode MatConjugate(Mat mat) us of the array after it has been restored. If you pass `NULL`, it will not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid. - Fortran Notes: - The calling sequence is -.vb - MatRestoreRow(matrix,row,ncols,cols,values,ierr) - Mat matrix (input) - integer row (input) - integer ncols (output) - integer cols(maxcols) (output) - double precision (or double complex) values(maxcols) output -.ve - Where maxcols >= maximum nonzeros in any row of the matrix. - - In Fortran `MatRestoreRow()` MUST be called after `MatGetRow()` + Fortran Note: + `MatRestoreRow()` MUST be called after `MatGetRow()` before another call to `MatGetRow()` can be made. .seealso: [](ch_matrices), `Mat`, `MatGetRow()` @@ -709,7 +702,7 @@ PetscErrorCode MatRestoreRowUpperTriangular(Mat mat) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSetOptionsPrefix - Sets the prefix used for searching for all `Mat` options in the database. @@ -739,7 +732,7 @@ PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for for matrices created with `MatGetFactor()` @@ -775,7 +768,7 @@ PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for for matrices created with `MatGetFactor()` @@ -819,7 +812,7 @@ PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatAppendOptionsPrefix - Appends to the prefix used for searching for all matrix options in the database. @@ -845,7 +838,7 @@ PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetOptionsPrefix - Gets the prefix used for searching for all matrix options in the database. @@ -874,6 +867,37 @@ PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } +/*@ + MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()` + + Not Collective + + Input Parameter: +. A - the matrix + + Output Parameter: +. state - the object state + + Level: advanced + + Note: + Object state is an integer which gets increased every time + the object is changed. By saving and later querying the object state + one can determine whether information about the object is still current. + + See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed. + +.seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()` +@*/ +PetscErrorCode MatGetState(Mat A, PetscObjectState *state) +{ + PetscFunctionBegin; + PetscValidHeaderSpecific(A, MAT_CLASSID, 1); + PetscAssertPointer(state, 2); + PetscCall(PetscObjectStateGet((PetscObject)A, state)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@ MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by the user. @@ -952,7 +976,7 @@ PetscErrorCode MatSetUp(Mat A) static PetscInt insidematview = 0; #endif -/*@C +/*@ MatViewFromOptions - View properties of the matrix based on options set in the options database Collective @@ -992,7 +1016,7 @@ PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatView - display information about a matrix in a variety ways Collective on viewer @@ -1203,7 +1227,7 @@ PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat) } #endif -/*@C +/*@ MatLoad - Loads a matrix that has been stored in binary/HDF5 format with `MatView()`. The matrix format is determined from the options database. Generates a parallel MPI matrix if the communicator has more than one @@ -1373,7 +1397,7 @@ static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatDestroy - Frees space taken by a matrix. Collective @@ -1425,7 +1449,7 @@ PetscErrorCode MatDestroy(Mat *A) } // PetscClangLinter pragma disable: -fdoc-section-header-unknown -/*@C +/*@ MatSetValues - Inserts or adds a block of values into a matrix. These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()` MUST be called after all calls to `MatSetValues()` have been completed. @@ -1462,6 +1486,14 @@ PetscErrorCode MatDestroy(Mat *A) The routine `MatSetValuesBlocked()` may offer much better efficiency for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`). + Fortran Notes: + If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example, +.vb + MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES) +.ve + + If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array + Developer Note: This is labeled with C so does not automatically generate Fortran stubs and interfaces because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays. @@ -1513,7 +1545,7 @@ PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt } // PetscClangLinter pragma disable: -fdoc-section-header-unknown -/*@C +/*@ MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()` MUST be called after all calls to `MatSetValues()` have been completed. @@ -1929,7 +1961,7 @@ PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSetValuesBlocked - Inserts or adds a block of values into a matrix. Not Collective @@ -1996,6 +2028,14 @@ PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16] .ve + Fortran Notes: + If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example, +.vb + MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES) +.ve + + If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array + .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()` @*/ PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv) @@ -2017,8 +2057,9 @@ PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], P PetscInt rbs, cbs, M, N, i; PetscCall(MatGetBlockSizes(mat, &rbs, &cbs)); PetscCall(MatGetSize(mat, &M, &N)); - for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block index %" PetscInt_FMT " (index %" PetscInt_FMT ") greater than row length %" PetscInt_FMT, i, idxm[i], M); - for (i = 0; i < n; i++) PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block index %" PetscInt_FMT " (index %" PetscInt_FMT ") great than column length %" PetscInt_FMT, i, idxn[i], N); + for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M); + for (i = 0; i < n; i++) + PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N); } if (mat->assembled) { mat->was_assembled = PETSC_TRUE; @@ -2055,7 +2096,7 @@ PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetValues - Gets a block of local values from a matrix. Not Collective; can only return values that are owned by the give process @@ -2111,7 +2152,7 @@ PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices defined previously by `MatSetLocalToGlobalMapping()` @@ -2346,7 +2387,7 @@ PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSetValuesLocal - Inserts or adds values into certain locations of a matrix, using a local numbering of the rows and columns. @@ -2373,6 +2414,14 @@ PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap) These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()` MUST be called after all calls to `MatSetValuesLocal()` have been completed. + Fortran Notes: + If any of `irow`, `icol`, and `y` are scalars pass them using, for example, +.vb + MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES) +.ve + + If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array + Developer Note: This is labeled with C so does not automatically generate Fortran stubs and interfaces because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays. @@ -2430,7 +2479,7 @@ PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix, using a local ordering of the nodes a block at a time. @@ -2458,6 +2507,14 @@ PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()` MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed. + Fortran Notes: + If any of `irow`, `icol`, and `y` are scalars pass them using, for example, +.vb + MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES) +.ve + + If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array + Developer Note: This is labeled with C so does not automatically generate Fortran stubs and interfaces because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays. @@ -2898,7 +2955,7 @@ PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetFactorType - gets the type of factorization a matrix is Not Collective @@ -2924,7 +2981,7 @@ PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSetFactorType - sets the type of factorization a matrix is Logically Collective @@ -2947,7 +3004,7 @@ PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetInfo - Returns information about matrix storage (number of nonzeros, memory, etc.). @@ -2963,6 +3020,8 @@ PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t) Options Database Key: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT` + Level: intermediate + Notes: The `MatInfo` context contains a variety of matrix data, including number of nonzeros allocated and used, number of mallocs during @@ -2972,7 +3031,7 @@ PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t) Example: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of - data within the MatInfo context. For example, + data within the `MatInfo` context. For example, .vb MatInfo info; Mat A; @@ -2983,12 +3042,12 @@ PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t) nz_a = info.nz_allocated; .ve - Fortran users should declare info as a double precision - array of dimension `MAT_INFO_SIZE`, and then extract the parameters + Fortran Note: + Declare info as a `MatInfo` array of dimension `MAT_INFO_SIZE`, and then extract the parameters of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h a complete list of parameter names. .vb - double precision info(MAT_INFO_SIZE) + MatInfo info(MAT_INFO_SIZE) double precision mal, nz_a Mat A integer ierr @@ -2998,12 +3057,6 @@ PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t) nz_a = info(MAT_INFO_NZ_ALLOCATED) .ve - Level: intermediate - - Developer Note: - The Fortran interface is not autogenerated as the - interface definition cannot be generated correctly [due to `MatInfo` argument] - .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()` @*/ PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info) @@ -3028,7 +3081,7 @@ PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatLUFactor - Performs in-place LU factorization of matrix. Collective @@ -3089,7 +3142,7 @@ PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatILUFactor - Performs in-place ILU factorization of matrix. Collective @@ -3143,7 +3196,7 @@ PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatLUFactorSymbolic - Performs symbolic LU factorization of matrix. Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`. @@ -3202,7 +3255,7 @@ PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatF PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatLUFactorNumeric - Performs numeric LU factorization of a matrix. Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`. @@ -3258,7 +3311,7 @@ PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCholeskyFactor - Performs in-place Cholesky factorization of a symmetric matrix. @@ -3311,7 +3364,7 @@ PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization of a symmetric matrix. @@ -3372,7 +3425,7 @@ PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFa PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCholeskyFactorNumeric - Performs numeric Cholesky factorization of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and `MatCholeskyFactorSymbolic()`. @@ -4233,7 +4286,7 @@ PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str) Level: intermediate Notes: - If you use `SAME_NONZERO_PATTERN` then the two matrices must have the same nonzero pattern or the routine will crash. + If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash. `MatCopy()` copies the matrix entries of a matrix to another existing matrix (after first zeroing the second matrix). A related routine is @@ -4275,7 +4328,7 @@ PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatConvert - Converts a matrix to another matrix, either of the same or different type. @@ -4284,10 +4337,10 @@ PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str) Input Parameters: + mat - the matrix . newtype - new matrix type. Use `MATSAME` to create a new matrix of the - same type as the original matrix. + same type as the original matrix. - reuse - denotes if the destination matrix is to be created or reused. - Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use - `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused). + Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use + `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused). Output Parameter: . M - pointer to place new matrix @@ -4457,7 +4510,7 @@ PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatFactorGetSolverType - Returns name of the package providing the factorization routines Not Collective @@ -4471,7 +4524,7 @@ PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M) Level: intermediate Fortran Note: - Pass in an empty string and the package name will be copied into it. Make sure the string is long enough. + Pass in an empty string that is long enough and the package name will be copied into it. .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()` @*/ @@ -4510,6 +4563,8 @@ static MatSolverTypeHolder MatSolverTypeHolders = NULL; /*@C MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type + Logically Collective, No Fortran Support + Input Parameters: + package - name of the package, for example petsc or superlu . mtype - the matrix type that works with this package @@ -4571,7 +4626,7 @@ PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFa MatSolverTypeGet - Gets the function that creates the factor matrix if it exist Input Parameters: -+ type - name of the package, for example petsc or superlu, if this is 'NULL' then the first result that satisfies the other criteria is returned ++ type - name of the package, for example petsc or superlu, if this is 'NULL', then the first result that satisfies the other criteria is returned . ftype - the type of factorization supported by the type - mtype - the matrix type that works with this type @@ -4582,7 +4637,7 @@ PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFa Calling sequence of `createfactor`: + A - the matrix providing the factor matrix -. mtype - the `MatType` of the factor requested +. ftype - the `MatFactorType` of the factor requested - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()` Level: developer @@ -4590,12 +4645,12 @@ PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFa Note: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`. Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver. - For example if one configuration had --download-mumps while a different one had --download-superlu_dist. + For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`. .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`, `MatInitializePackage()` @*/ -PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType mtype, Mat *B)) +PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B)) { MatSolverTypeHolder next = MatSolverTypeHolders; PetscBool flg; @@ -4682,7 +4737,7 @@ PetscErrorCode MatSolverTypeDestroy(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()` Logically Collective @@ -4708,7 +4763,7 @@ PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object Logically Collective @@ -4732,14 +4787,14 @@ PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOr PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric() Collective Input Parameters: + mat - the matrix -. type - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL' then the first result that satisfies +. type - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies the other criteria is returned - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR` @@ -4779,7 +4834,7 @@ PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOr @*/ PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f) { - PetscBool foundtype, foundmtype; + PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE; PetscErrorCode (*conv)(Mat, MatFactorType, Mat *); PetscFunctionBegin; @@ -4789,6 +4844,13 @@ PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Ma PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); MatCheckPreallocated(mat, 1); + PetscCall(MatIsShell(mat, &shell)); + if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop)); + if (hasop) { + PetscUseTypeMethod(mat, getfactor, type, ftype, f); + PetscFunctionReturn(PETSC_SUCCESS); + } + PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv)); if (!foundtype) { if (type) { @@ -4806,7 +4868,7 @@ PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Ma PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type Not Collective @@ -4971,7 +5033,7 @@ PetscErrorCode MatGetDiagonal(Mat mat, Vec v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetRowMin - Gets the minimum value (of the real part) of each row of the matrix @@ -4982,7 +5044,7 @@ PetscErrorCode MatGetDiagonal(Mat mat, Vec v) Output Parameters: + v - the vector for storing the maximums -- idx - the indices of the column found for each row (optional) +- idx - the indices of the column found for each row (optional, pass `NULL` if not needed) Level: intermediate @@ -5017,7 +5079,7 @@ PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetRowMinAbs - Gets the minimum value (in absolute value) of each row of the matrix @@ -5033,7 +5095,7 @@ PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[]) Level: intermediate Notes: - if a row is completely empty or has only 0.0 values then the `idx` value for that + if a row is completely empty or has only 0.0 values, then the `idx` value for that row is 0 (the first column). This code is only implemented for a couple of matrix formats. @@ -5064,7 +5126,7 @@ PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetRowMax - Gets the maximum value (of the real part) of each row of the matrix @@ -5075,7 +5137,7 @@ PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[]) Output Parameters: + v - the vector for storing the maximums -- idx - the indices of the column found for each row (optional) +- idx - the indices of the column found for each row (optional, otherwise pass `NULL`) Level: intermediate @@ -5109,7 +5171,7 @@ PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each row of the matrix @@ -5125,7 +5187,7 @@ PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[]) Level: intermediate Notes: - if a row is completely empty or has only 0.0 values then the `idx` value for that + if a row is completely empty or has only 0.0 values, then the `idx` value for that row is 0 (the first column). This code is only implemented for a couple of matrix formats. @@ -5155,7 +5217,7 @@ PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix Logically Collective @@ -5922,17 +5984,17 @@ PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type) ignored. Thus, if memory has not already been allocated for this particular data, then the insertion is ignored. For dense matrices, in which the entire array is allocated, no entries are ever ignored. - Set after the first `MatAssemblyEnd()`. If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction + Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion that would generate a new entry in the nonzero structure instead produces - an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction + an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion that would generate a new entry that has not been preallocated will instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) This is a useful flag when debugging matrix memory preallocation. - If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction + If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for other processors should be dropped, rather than stashed. @@ -6522,7 +6584,7 @@ PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatSte PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal) of a set of rows of a matrix; using local numbering of rows. @@ -6714,7 +6776,7 @@ PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetSize - Returns the numbers of rows and columns in a matrix. Not Collective @@ -6742,7 +6804,7 @@ PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`. @@ -6770,7 +6832,7 @@ PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a vector one multiplies this matrix by that are owned by this processor. @@ -6785,11 +6847,20 @@ PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n) Level: developer - Note: + Notes: + If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`. + + If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`. + If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`. + + For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine + the local values in the matrix. + Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix Layouts](sec_matlayout) for details on matrix layouts. -.seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout` +.seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`, + `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM` @*/ PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n) { @@ -6804,7 +6875,7 @@ PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by this MPI process. @@ -6819,13 +6890,23 @@ PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n) Level: beginner - Note: + Notes: + If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`. + + If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`. + If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`. + + For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine + the local values in the matrix. + + The high argument is one more than the last element stored locally. + For all matrices it returns the range of matrix rows associated with rows of a vector that would contain the result of a matrix vector product with this matrix. See [Matrix Layouts](sec_matlayout) for details on matrix layouts. -.seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, - `PetscLayout` +.seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`, + `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM` @*/ PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n) { @@ -6850,18 +6931,29 @@ PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n) . mat - the matrix Output Parameter: -. ranges - start of each processors portion plus one more than the total length at the end +. ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1 + where `size` is the number of MPI processes used by `mat` Level: beginner - Note: + Notes: + If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`. + + If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`. + If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`. + + For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine + the local values in the matrix. + For all matrices it returns the ranges of matrix rows associated with rows of a vector that would contain the result of a matrix vector product with this matrix. See [Matrix Layouts](sec_matlayout) for details on matrix layouts. -.seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout` +.seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`, + `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`, + `DMDAGetGhostCorners()`, `DM` @*/ -PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt **ranges) +PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[]) { PetscFunctionBegin; PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); @@ -6885,13 +6977,23 @@ PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt **ranges) Level: beginner - Note: + Notes: + If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`. + + If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`. + If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`. + + For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine + the local values in the matrix. + Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix Layouts](sec_matlayout) for details on matrix layouts. -.seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()` +.seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`, + `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, + `DMDAGetGhostCorners()`, `DM` @*/ -PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt **ranges) +PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[]) { PetscFunctionBegin; PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); @@ -6901,7 +7003,7 @@ PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt **ranges) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets. Not Collective @@ -6916,18 +7018,22 @@ PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt **ranges) Level: intermediate Note: + You should call `ISDestroy()` on the returned `IS` + For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for details on matrix layouts. -.seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatSetValues()`, ``MATELEMENTAL``, ``MATSCALAPACK`` +.seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK` @*/ PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols) { PetscErrorCode (*f)(Mat, IS *, IS *); PetscFunctionBegin; + PetscValidHeaderSpecific(A, MAT_CLASSID, 1); + PetscValidType(A, 1); MatCheckPreallocated(A, 1); PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f)); if (f) { @@ -6939,7 +7045,7 @@ PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()` Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()` to complete the factorization. @@ -6998,7 +7104,7 @@ PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const Mat PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatICCFactorSymbolic - Performs symbolic incomplete Cholesky factorization for a symmetric matrix. Use `MatCholeskyFactorNumeric()` to complete the factorization. @@ -7098,8 +7204,7 @@ PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorI column 0. Fortran Note: - The Fortran interface is slightly different from that given below; it - requires one to pass in as `submat` a `Mat` (integer) array of size at least n+1. + One must pass in as `submat` a `Mat` array of size at least `n`+1. .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse` @*/ @@ -7208,13 +7313,15 @@ PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], con Level: advanced - Note: + Notes: Frees not only the matrices, but also the array that contains the matrices + For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()` + Fortran Note: - This does not free the array. + Does not free the `mat` array. -.seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()` `MatDestroySubMatrices()` +.seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()` @*/ PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[]) { @@ -7248,7 +7355,7 @@ PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[]) Frees not only the matrices, but also the array that contains the matrices Fortran Note: - This does not free the array. + Does not free the `mat` array. .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()` @*/ @@ -7271,7 +7378,7 @@ PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process Collective @@ -7280,7 +7387,7 @@ PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[]) . mat - the matrix Output Parameter: -. matstruct - the sequential matrix with the nonzero structure of mat +. matstruct - the sequential matrix with the nonzero structure of `mat` Level: developer @@ -7308,13 +7415,12 @@ PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct) Collective Input Parameter: -. mat - the matrix (this is a pointer to the array of matrices, just to match the calling - sequence of `MatGetSeqNonzeroStructure()`) +. mat - the matrix Level: advanced Note: - Frees not only the matrices, but also the array that contains the matrices + This is not needed, one can just call `MatDestroy()` .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()` @*/ @@ -7751,7 +7857,7 @@ PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C) /*@ MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size - Logically Collective + Not Collective Input Parameters: + mat - the matrix @@ -7768,15 +7874,15 @@ PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C) .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI` @*/ -PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, PetscInt *bsizes) +PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[]) { - PetscInt i, ncnt = 0, nlocal; + PetscInt ncnt = 0, nlocal; PetscFunctionBegin; PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); PetscCall(MatGetLocalSize(mat, &nlocal, NULL)); PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal); - for (i = 0; i < nblocks; i++) ncnt += bsizes[i]; + for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i]; PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal); PetscCall(PetscFree(mat->bsizes)); mat->nblocks = nblocks; @@ -7788,7 +7894,7 @@ PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, PetscInt *bsi /*@C MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size - Logically Collective; No Fortran Support + Not Collective; No Fortran Support Input Parameter: . mat - the matrix @@ -7801,12 +7907,12 @@ PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, PetscInt *bsi .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()` @*/ -PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt **bsizes) +PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[]) { PetscFunctionBegin; PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); - *nblocks = mat->nblocks; - *bsizes = mat->bsizes; + if (nblocks) *nblocks = mat->nblocks; + if (bsizes) *bsizes = mat->bsizes; PetscFunctionReturn(PETSC_SUCCESS); } @@ -8096,8 +8202,8 @@ PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, Pets . shift - 1 or zero indicating we want the indices starting at 0 or 1 . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the - inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is - always used. + inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is + always used. . n - size of (possibly compressed) matrix . ia - the row pointers - ja - the column indices @@ -8148,8 +8254,8 @@ PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, Pet . shift - 1 or zero indicating we want the indices starting at 0 or 1 . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the - inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is - always used. + inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is + always used. Output Parameters: + n - size of (possibly compressed) matrix @@ -8182,7 +8288,7 @@ PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or `MatGetColumnIJ()`. @@ -8420,7 +8526,7 @@ M*/ If `iscol` is `NULL` then all columns are obtained (not supported in Fortran). - If `isrow` and `iscol` have a nontrivial block-size then the resulting matrix has this block-size as well. This feature + If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature is used by `PCFIELDSPLIT` to allow easy nesting of its use. Example usage: @@ -8901,7 +9007,7 @@ PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp) - mat - the array of matrices Output Parameters: -. nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space +. nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n` Level: developer @@ -8939,7 +9045,7 @@ PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[]) Input Parameters: + n - the number of matrices . mat - the array of matrices -- nullsp - an array of null spaces, `NULL` if the null space does not exist +- nullsp - an array of null spaces Level: developer @@ -9127,7 +9233,7 @@ PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix. Collective @@ -9571,7 +9677,7 @@ PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, Pe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same parallel layout, `PetscLayout` for rows and columns @@ -9627,7 +9733,7 @@ PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure with default values. @@ -9644,10 +9750,6 @@ PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left) Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed - Developer Note: - The Fortran interface is not autogenerated as the - interface definition cannot be generated correctly [due to `MatFactorInfo`] - .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo` @*/ PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info) @@ -10725,7 +10827,7 @@ PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is) .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()` @*/ -PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar **values) +PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[]) { PetscFunctionBegin; PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); @@ -10735,7 +10837,7 @@ PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar **values) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries. Collective; No Fortran Support @@ -10757,7 +10859,7 @@ PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar **values) .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()` @*/ -PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt *bsizes, PetscScalar *values) +PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[]) { PetscFunctionBegin; PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); @@ -10807,7 +10909,7 @@ PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created via `MatTransposeColoringCreate()`. @@ -10841,7 +10943,7 @@ PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying `MatTransposeColoring` to sparse `B`. @@ -10873,7 +10975,7 @@ PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$ in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix @@ -10908,7 +11010,7 @@ PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Ma PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$. Collective @@ -10931,13 +11033,13 @@ PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTra MPI_Comm comm; PetscFunctionBegin; + PetscAssertPointer(color, 3); + PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0)); PetscCall(PetscObjectGetComm((PetscObject)mat, &comm)); PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL)); - c->ctype = iscoloring->ctype; PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c); - *color = c; PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0)); PetscFunctionReturn(PETSC_SUCCESS); @@ -10945,8 +11047,7 @@ PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTra /*@ MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the - matrix has had no new nonzero locations added to (or removed from) the matrix since the previous call then the value will be the - same, otherwise it will be larger + matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger. Not Collective @@ -11294,7 +11395,7 @@ PetscErrorCode MatSetInf(Mat A) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms and possibly removes small values from the graph structure. diff --git a/src/mat/matfd/fdmatrix.c b/src/mat/matfd/fdmatrix.c index f25581565e0..689ee50e339 100644 --- a/src/mat/matfd/fdmatrix.c +++ b/src/mat/matfd/fdmatrix.c @@ -67,7 +67,7 @@ static PetscErrorCode MatFDColoringView_Draw(MatFDColoring fd, PetscViewer viewe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatFDColoringView - Views a finite difference coloring context. Collective @@ -368,7 +368,7 @@ PetscErrorCode MatFDColoringSetFromOptions(MatFDColoring matfd) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatFDColoringSetType - Sets the approach for computing the finite difference parameter Collective @@ -451,6 +451,7 @@ PetscErrorCode MatFDColoringCreate(Mat mat, ISColoring iscoloring, MatFDColoring PetscFunctionBegin; PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); + PetscAssertPointer(color, 3); PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be assembled by calls to MatAssemblyBegin/End();"); PetscCall(PetscLogEventBegin(MAT_FDColoringCreate, mat, 0, 0, 0)); PetscCall(MatGetSize(mat, &M, &N)); diff --git a/src/mat/matfd/ftn-custom/zfdmatrixf.c b/src/mat/matfd/ftn-custom/zfdmatrixf.c index 562f9bbdaaf..9bfbab70149 100644 --- a/src/mat/matfd/ftn-custom/zfdmatrixf.c +++ b/src/mat/matfd/ftn-custom/zfdmatrixf.c @@ -8,15 +8,11 @@ typedef struct _p_SNES *SNES; #if defined(PETSC_HAVE_FORTRAN_CAPS) #define matfdcoloringsetfunctionts_ MATFDCOLORINGSETFUNCTIONTS #define matfdcoloringsetfunction_ MATFDCOLORINGSETFUNCTION - #define matfdcoloringview_ MATFDCOLORINGVIEW - #define matfdcoloingsettype_ MATFDCOLORINGSETTYPE #define matfdcoloringgetperturbedcolumnsf90_ MATFDCOLORINGGETPERTURBEDCOLUMNSF90 #define matfdcoloringrestoreperturbedcolumnsf90_ MATFDCOLORINGRESTOREPERTURBEDCOLUMNSF90 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define matfdcoloringsetfunctionts_ matfdcoloringsetfunctionts #define matfdcoloringsetfunction_ matfdcoloringsetfunction - #define matfdcoloringview_ matfdcoloringview - #define matfdcoloingsettype_ matfdcoloringsettype #define matfdcoloringgetperturbedcolumnsf90_ matfdcoloringgetperturbedcolumnsf90 #define matfdcoloringrestoreperturbedcolumnsf90_ matfdcoloringrestoreperturbedcolumnsf90 #endif @@ -73,21 +69,3 @@ PETSC_EXTERN void matfdcoloringsetfunction_(MatFDColoring *fd, void (*f)(SNES *, *ierr = MatFDColoringSetFunction(*fd, (PetscErrorCodeFn *)ourmatfdcoloringfunctionsnes, *fd); } - -PETSC_EXTERN void matfdcoloringview_(MatFDColoring *c, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = MatFDColoringView(*c, v); -} - -PETSC_EXTERN void matfdcoloringsettype_(MatFDColoring *matfdcoloring, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = MatFDColoringSetType(*matfdcoloring, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/mat/tests/bench_spmv.c b/src/mat/tests/bench_spmv.c index 61e65e209c7..f9dee38761c 100644 --- a/src/mat/tests/bench_spmv.c +++ b/src/mat/tests/bench_spmv.c @@ -129,26 +129,25 @@ PetscErrorCode TimedSpMV(Mat A, Vec b, PetscReal *time, const char *petscmatform PetscInt i; Vec u; PetscLogDouble vstart = 0, vend = 0; - PetscBool isaijcusparse, isaijkokkos, issellcuda; + PetscBool isaijcusparse, isaijhipsparse, isaijkokkos, issellcuda, issellhip; PetscFunctionBeginUser; PetscCall(PetscStrcmp(petscmatformat, MATAIJCUSPARSE, &isaijcusparse)); + PetscCall(PetscStrcmp(petscmatformat, MATAIJHIPSPARSE, &isaijhipsparse)); PetscCall(PetscStrcmp(petscmatformat, MATAIJKOKKOS, &isaijkokkos)); PetscCall(PetscStrcmp(petscmatformat, MATSELLCUDA, &issellcuda)); + PetscCall(PetscStrcmp(petscmatformat, MATSELLHIP, &issellhip)); if (isaijcusparse || issellcuda) PetscCall(VecSetType(b, VECCUDA)); if (isaijkokkos) PetscCall(VecSetType(b, VECKOKKOS)); + if (isaijhipsparse || issellhip) PetscCall(VecSetType(b, VECHIP)); PetscCall(VecDuplicate(b, &u)); if (time) *time = 0.0; for (i = 0; i < repetitions; i++) { if (use_gpu) { PetscCall(MatDestroy(&A2)); PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &A2)); - if (issellcuda) { - PetscCall(MatConvert(A2, MATSELL, MAT_INPLACE_MATRIX, &A2)); - PetscCall(MatConvert(A2, MATSELLCUDA, MAT_INPLACE_MATRIX, &A2)); - } else { - PetscCall(MatConvert(A2, petscmatformat, MAT_INPLACE_MATRIX, &A2)); - } + PetscCall(MatSetType(A2, petscmatformat)); + PetscCall(MatSetFromOptions(A2)); // This allows to change parameters such as slice height in SpMV kernels for SELL } else A2 = A; /* Timing MatMult */ if (time) PetscCall(PetscTime(&vstart)); @@ -165,11 +164,40 @@ PetscErrorCode TimedSpMV(Mat A, Vec b, PetscReal *time, const char *petscmatform PetscFunctionReturn(PETSC_SUCCESS); } +PetscErrorCode WarmUpDevice(Mat A, Vec b, const char *petscmatformat) +{ + Mat A2 = NULL; + PetscLogEvent event; + Vec u; + PetscBool isaijcusparse, isaijhipsparse, isaijkokkos, issellcuda, issellhip; + + PetscFunctionBeginUser; + PetscCall(PetscStrcmp(petscmatformat, MATAIJCUSPARSE, &isaijcusparse)); + PetscCall(PetscStrcmp(petscmatformat, MATAIJHIPSPARSE, &isaijhipsparse)); + PetscCall(PetscStrcmp(petscmatformat, MATAIJKOKKOS, &isaijkokkos)); + PetscCall(PetscStrcmp(petscmatformat, MATSELLCUDA, &issellcuda)); + PetscCall(PetscStrcmp(petscmatformat, MATSELLHIP, &issellhip)); + if (!isaijcusparse && !isaijkokkos && !isaijhipsparse && !issellcuda && !issellhip) PetscFunctionReturn(PETSC_SUCCESS); + if (isaijcusparse || issellcuda) PetscCall(VecSetType(b, VECCUDA)); + if (isaijkokkos) PetscCall(VecSetType(b, VECKOKKOS)); + if (isaijhipsparse || issellhip) PetscCall(VecSetType(b, VECHIP)); + PetscCall(VecDuplicate(b, &u)); + PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &A2)); + PetscCall(MatSetType(A2, petscmatformat)); + PetscCall(PetscLogEventGetId("MatMult", &event)); + PetscCall(PetscLogEventDeactivatePush(event)); + PetscCall(MatMult(A2, b, u)); + PetscCall(PetscLogEventDeactivatePop(event)); + PetscCall(VecDestroy(&u)); + PetscCall(MatDestroy(&A2)); + PetscFunctionReturn(PETSC_SUCCESS); +} + PetscErrorCode PetscLogSpMVTime(PetscReal *gputime, PetscReal *cputime, PetscReal *gpuflops, const char *petscmatformat) { PetscLogEvent event; PetscEventPerfInfo eventInfo; - //PetscReal gpuflopRate; + // PetscReal gpuflopRate; // if (matformat) { // PetscCall(PetscLogEventGetId("MatCUDACopyTo", &event)); @@ -182,7 +210,7 @@ PetscErrorCode PetscLogSpMVTime(PetscReal *gputime, PetscReal *cputime, PetscRea PetscFunctionBeginUser; PetscCall(PetscLogEventGetId("MatMult", &event)); PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &eventInfo)); - //gpuflopRate = eventInfo.GpuFlops/eventInfo.GpuTime; + // gpuflopRate = eventInfo.GpuFlops/eventInfo.GpuTime; // PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%.2f %.4e %.4e\n", gpuflopRate/1.e6, eventInfo.GpuTime, eventInfo.time)); if (cputime) *cputime = eventInfo.time; #if defined(PETSC_HAVE_DEVICE) @@ -199,13 +227,25 @@ PetscErrorCode MapToPetscMatType(const char *matformat, PetscBool use_gpu, char PetscFunctionBeginUser; PetscCall(PetscStrcmp(matformat, "csr", &iscsr)); if (iscsr) { - if (use_gpu) PetscCall(PetscStrallocpy(MATAIJCUSPARSE, petscmatformat)); - else PetscCall(PetscStrallocpy(MATAIJ, petscmatformat)); + if (use_gpu) { +#if defined(PETSC_HAVE_CUDA) + PetscCall(PetscStrallocpy(MATAIJCUSPARSE, petscmatformat)); +#endif +#if defined(PETSC_HAVE_HIP) + PetscCall(PetscStrallocpy(MATAIJHIPSPARSE, petscmatformat)); +#endif + } else PetscCall(PetscStrallocpy(MATAIJ, petscmatformat)); } else { PetscCall(PetscStrcmp(matformat, "sell", &issell)); if (issell) { - if (use_gpu) PetscCall(PetscStrallocpy(MATSELLCUDA, petscmatformat)); - else PetscCall(PetscStrallocpy(MATSELL, petscmatformat)); + if (use_gpu) { +#if defined(PETSC_HAVE_CUDA) + PetscCall(PetscStrallocpy(MATSELLCUDA, petscmatformat)); +#endif +#if defined(PETSC_HAVE_HIP) + PetscCall(PetscStrallocpy(MATSELLHIP, petscmatformat)); +#endif + } else PetscCall(PetscStrallocpy(MATSELL, petscmatformat)); } else { PetscCall(PetscStrcmp(matformat, "csrkokkos", &iscsrkokkos)); if (iscsrkokkos) PetscCall(PetscStrallocpy(MATAIJKOKKOS, petscmatformat)); @@ -294,6 +334,7 @@ int main(int argc, char **args) PetscCall(MatCreateVecs(A, &b, NULL)); PetscCall(VecSet(b, 1.0)); } + if (use_gpu) PetscCall(WarmUpDevice(A, b, petscmatformat)); PetscCall(TimedSpMV(A, b, NULL, petscmatformat, use_gpu, repetitions)); if (use_gpu) PetscCall(PetscLogSpMVTime(&spmv_times[i], NULL, NULL, petscmatformat)); else PetscCall(PetscLogSpMVTime(NULL, &spmv_times[i], NULL, petscmatformat)); @@ -308,6 +349,7 @@ int main(int argc, char **args) PetscCall(MatCreateVecs(A, &b, NULL)); PetscCall(VecSet(b, 1.0)); } + if (use_gpu) PetscCall(WarmUpDevice(A, b, petscmatformat)); PetscCall(TimedSpMV(A, b, &spmv_time, petscmatformat, use_gpu, repetitions)); if (!bflg) PetscCall(VecDestroy(&b)); } @@ -348,4 +390,10 @@ int main(int argc, char **args) output_file: output/bench_spmv_1.out requires: cuda + test: + suffix: 3 + args:-AMTX ${wPETSC_DIR}/share/petsc/datafiles/matrices/amesos2_test_mat0.mtx -use_gpu + output_file: output/bench_spmv_1.out + requires: hip + TEST*/ diff --git a/src/mat/tests/ex105f.F90 b/src/mat/tests/ex105f.F90 index 01f22a3d1b9..dacc90f4d9e 100644 --- a/src/mat/tests/ex105f.F90 +++ b/src/mat/tests/ex105f.F90 @@ -15,18 +15,18 @@ program main PetscCallA(MatCreate(PETSC_COMM_WORLD,m,ierr)) PetscCallA(MatSetSizes(m,PETSC_DECIDE,PETSC_DECIDE,twelve,twelve,ierr)) PetscCallA(MatSetFromOptions(m,ierr)) - PetscCallA(MatMPIAIJSetPreallocation(m,PETSC_DEFAULT_INTEGER,PETSC_NULL_INTEGER,PETSC_DEFAULT_INTEGER,PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatMPIAIJSetPreallocation(m,PETSC_DEFAULT_INTEGER,PETSC_NULL_INTEGER_ARRAY,PETSC_DEFAULT_INTEGER,PETSC_NULL_INTEGER_ARRAY,ierr)) value = 3.0 i = 4 one = 1 - PetscCallA(MatSetValuesMPIAIJ(m,one,i,one,i,value,ADD_VALUES,ierr)) + PetscCallA(MatSetValuesMPIAIJ(m,one,[i],one,[i],[value],ADD_VALUES,ierr)) i = 5 j = 7 - PetscCallA(MatSetValuesMPIAIJ(m,one,i,one,j,value,ADD_VALUES,ierr)) + PetscCallA(MatSetValuesMPIAIJ(m,one,[i],one,[j],[value],ADD_VALUES,ierr)) i = 10 j = 9 - PetscCallA(MatSetValuesMPIAIJ(m,one,i,one,j,value,ADD_VALUES,ierr)) + PetscCallA(MatSetValuesMPIAIJ(m,one,[i],one,[j],[value],ADD_VALUES,ierr)) PetscCallA(MatAssemblyBegin(m,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(m,MAT_FINAL_ASSEMBLY,ierr)) diff --git a/src/mat/tests/ex120f.F90 b/src/mat/tests/ex120f.F90 index da69419a16e..022b702315e 100644 --- a/src/mat/tests/ex120f.F90 +++ b/src/mat/tests/ex120f.F90 @@ -14,7 +14,6 @@ subroutine mymatgetvecs(A,x,y,ierr) tw = 12 PetscCallA(VecCreateSeq(PETSC_COMM_SELF,tw,x,ierr)) PetscCallA(VecCreateSeq(PETSC_COMM_SELF,tw,y,ierr)) - return end program main diff --git a/src/mat/tests/ex126f.F90 b/src/mat/tests/ex126f.F90 index 78e1351d646..4c58af40e31 100644 --- a/src/mat/tests/ex126f.F90 +++ b/src/mat/tests/ex126f.F90 @@ -35,8 +35,8 @@ program main PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m*m, m*m, ierr)) PetscCallA(MatSetType(A, MATAIJ, ierr)) PetscCallA(MatSetFromOptions(A, ierr)) - PetscCallA(MatSeqAIJSetPreallocation(A,ifive, PETSC_NULL_INTEGER, ierr)) - PetscCallA(MatMPIAIJSetPreallocation(A,ifive,PETSC_NULL_INTEGER,ifive,PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatSeqAIJSetPreallocation(A,ifive, PETSC_NULL_INTEGER_ARRAY, ierr)) + PetscCallA(MatMPIAIJSetPreallocation(A,ifive,PETSC_NULL_INTEGER_ARRAY,ifive,PETSC_NULL_INTEGER_ARRAY,ierr)) PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr)) @@ -46,22 +46,22 @@ program main j = II - i*m if (i.gt.0) then JJ = II - m - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif if (i.lt.m-1) then JJ = II + m - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif if (j.gt.0) then JJ = II - 1 - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif if (j.lt.m-1) then JJ = II + 1 - PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr)) endif v = 4.0 - PetscCallA( MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)) + PetscCallA( MatSetValues(A,ione,[II],ione,[II],[v],INSERT_VALUES,ierr)) 10 continue PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr)) diff --git a/src/mat/tests/ex134.c b/src/mat/tests/ex134.c index b82313fa1c5..ca067165759 100644 --- a/src/mat/tests/ex134.c +++ b/src/mat/tests/ex134.c @@ -14,7 +14,7 @@ PetscErrorCode Assemble(MPI_Comm comm, PetscInt bs, MatType mtype) PetscRandom rdm; Vec b, x, y; PetscInt i, j; - PetscReal norm2, tol = 10 * PETSC_SQRT_MACHINE_EPSILON; + PetscReal norm2, tol = 100 * PETSC_SQRT_MACHINE_EPSILON; PetscBool issbaij; #endif PetscViewer viewer; diff --git a/src/mat/tests/ex154.c b/src/mat/tests/ex154.c new file mode 100644 index 00000000000..1f75da68514 --- /dev/null +++ b/src/mat/tests/ex154.c @@ -0,0 +1,122 @@ +static char help[] = "Tests MatMatSolve() in Schur complement mode.\n\n"; + +#include + +int main(int argc, char **args) +{ + Mat F, A, B, X, Y, S; + IS is_schur; + PetscMPIInt size; + PetscInt ns = 0, m, n; + PetscReal norm, tol = PETSC_SQRT_MACHINE_EPSILON; + MatFactorType factor = MAT_FACTOR_LU; + PetscViewer fd; + char solver[256], converttype[256]; + char file[PETSC_MAX_PATH_LEN]; + PetscBool flg; + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &args, (char *)0, help)); + PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); + PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This is a uniprocessor test"); + + PetscCall(PetscOptionsGetString(NULL, NULL, "-A", file, sizeof(file), &flg)); + PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_SUP, "Must provide a binary matrix with -A filename option"); + PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd)); + PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); + PetscCall(MatLoad(A, fd)); + PetscCall(PetscViewerDestroy(&fd)); + PetscCall(MatGetSize(A, &m, &n)); + PetscCheck(m == n, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "This example is not intended for rectangular matrices (%" PetscInt_FMT ", %" PetscInt_FMT ")", m, n); + PetscCall(MatViewFromOptions(A, NULL, "-A_view")); + + PetscCall(PetscOptionsGetString(NULL, NULL, "-B", file, sizeof(file), &flg)); + PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_SUP, "Must provide a binary matrix with -B filename option"); + PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd)); + PetscCall(MatCreate(PETSC_COMM_WORLD, &B)); + PetscCall(MatLoad(B, fd)); + PetscCall(PetscViewerDestroy(&fd)); + PetscCall(MatViewFromOptions(B, NULL, "-B_view")); + PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)B, &flg, MATSEQDENSE, MATMPIDENSE, NULL)); + if (!flg) PetscCall(PetscObjectTypeCompare((PetscObject)B, MATTRANSPOSEVIRTUAL, &flg)); + if (!flg) { + Mat Bt; + + PetscCall(MatCreateTranspose(B, &Bt)); + PetscCall(MatDestroy(&B)); + B = Bt; + } + PetscCall(PetscOptionsGetString(NULL, NULL, "-B_convert_type", converttype, sizeof(converttype), &flg)); + if (flg) PetscCall(MatConvert(B, converttype, MAT_INPLACE_MATRIX, &B)); + + PetscCall(PetscOptionsGetInt(NULL, NULL, "-ns", &ns, NULL)); + + PetscCall(PetscOptionsGetString(NULL, NULL, "-mat_solver_type", solver, sizeof(solver), &flg)); + if (!flg) PetscCall(PetscStrncpy(solver, MATSOLVERMUMPS, sizeof(solver))); + PetscCall(PetscOptionsGetEnum(NULL, NULL, "-mat_factor_type", MatFactorTypes, (PetscEnum *)&factor, NULL)); + PetscCall(MatGetFactor(A, solver, factor, &F)); + + PetscCall(ISCreateStride(PETSC_COMM_SELF, ns, m - ns, 1, &is_schur)); + PetscCall(MatFactorSetSchurIS(F, is_schur)); + PetscCall(ISDestroy(&is_schur)); + switch (factor) { + case MAT_FACTOR_LU: + PetscCall(MatLUFactorSymbolic(F, A, NULL, NULL, NULL)); + PetscCall(MatLUFactorNumeric(F, A, NULL)); + break; + case MAT_FACTOR_CHOLESKY: + PetscCall(MatCholeskyFactorSymbolic(F, A, NULL, NULL)); + PetscCall(MatCholeskyFactorNumeric(F, A, NULL)); + break; + default: + PetscCheck(PETSC_FALSE, PETSC_COMM_WORLD, PETSC_ERR_SUP, "Not coded for factor type %s", MatFactorTypes[factor]); + } + + PetscCall(MatFactorCreateSchurComplement(F, &S, NULL)); + PetscCall(MatViewFromOptions(S, NULL, "-S_view")); + PetscCall(MatDestroy(&S)); + + PetscCall(MatGetSize(B, NULL, &n)); + PetscCall(MatCreate(PETSC_COMM_WORLD, &X)); + PetscCall(MatSetSizes(X, m, PETSC_DECIDE, PETSC_DECIDE, n)); + PetscCall(MatSetType(X, MATDENSE)); + PetscCall(MatSetFromOptions(X)); + PetscCall(MatSetUp(X)); + + PetscCall(MatMatSolve(F, B, X)); + PetscCall(MatViewFromOptions(X, NULL, "-X_view")); + PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Y)); + PetscCall(MatViewFromOptions(Y, NULL, "-Y_view")); + PetscCall(MatAXPY(Y, -1.0, B, SAME_NONZERO_PATTERN)); + PetscCall(MatViewFromOptions(Y, NULL, "-err_view")); + PetscCall(MatNorm(Y, NORM_FROBENIUS, &norm)); + if (norm > tol) { + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "MatMatSolve: Norm of error %g\n", (double)norm)); + PetscCall(MatConvert(Y, MATAIJ, MAT_INPLACE_MATRIX, &Y)); + PetscCall(MatFilter(Y, PETSC_SMALL, PETSC_TRUE, PETSC_FALSE)); + PetscCall(MatViewFromOptions(Y, NULL, "-aij_err_view")); + } + PetscCall(MatDestroy(&A)); + PetscCall(MatDestroy(&X)); + PetscCall(MatDestroy(&F)); + PetscCall(MatDestroy(&B)); + PetscCall(MatDestroy(&Y)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + test: + output_file: output/ex62_1.out + suffix: mumps_1 + requires: mumps double !complex !defined(PETSC_USE_64BIT_INDICES) + args: -A ${DATAFILESPATH}/matrices/factorSchur/A.dat -B ${DATAFILESPATH}/matrices/factorSchur/B1.dat -ns {{0 1}} + + test: + output_file: output/ex62_1.out + suffix: mumps_2 + requires: mumps double !complex !defined(PETSC_USE_64BIT_INDICES) + args: -A ${DATAFILESPATH}/matrices/factorSchur/A.dat -B ${DATAFILESPATH}/matrices/factorSchur/B2.dat -ns {{0 1}} + +TEST*/ diff --git a/src/mat/tests/ex2.c b/src/mat/tests/ex2.c index 031b20c828f..34f4375fed0 100644 --- a/src/mat/tests/ex2.c +++ b/src/mat/tests/ex2.c @@ -121,7 +121,7 @@ int main(int argc, char **argv) PetscCall(MatNorm(mat, NORM_FROBENIUS, &normf)); PetscCall(MatNorm(mat, NORM_1, &norm1)); PetscCall(MatNorm(mat, NORM_INFINITY, &normi)); - PetscCall(PetscPrintf(PETSC_COMM_WORLD, "original A: Frobenious norm = %g, one norm = %g, infinity norm = %g\n", (double)normf, (double)norm1, (double)normi)); + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "original A: Frobenius norm = %g, one norm = %g, infinity norm = %g\n", (double)normf, (double)norm1, (double)normi)); PetscCall(MatView(mat, PETSC_VIEWER_STDOUT_WORLD)); /* --------------- Test MatTranspose() -------------- */ @@ -139,7 +139,7 @@ int main(int argc, char **argv) PetscCall(MatNorm(tmat, NORM_FROBENIUS, &normf)); PetscCall(MatNorm(tmat, NORM_1, &norm1)); PetscCall(MatNorm(tmat, NORM_INFINITY, &normi)); - PetscCall(PetscPrintf(PETSC_COMM_WORLD, "B = A^T: Frobenious norm = %g, one norm = %g, infinity norm = %g\n", (double)normf, (double)norm1, (double)normi)); + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "B = A^T: Frobenius norm = %g, one norm = %g, infinity norm = %g\n", (double)normf, (double)norm1, (double)normi)); PetscCall(MatView(tmat, PETSC_VIEWER_STDOUT_WORLD)); /* ----------------- Test MatAXPY(), MatAYPX() ----------------- */ diff --git a/src/mat/tests/ex201f.F90 b/src/mat/tests/ex201f.F90 index cddb0e9376c..9fc2c2e413e 100644 --- a/src/mat/tests/ex201f.F90 +++ b/src/mat/tests/ex201f.F90 @@ -12,7 +12,6 @@ subroutine mymatmult(A, x, y, ierr) PetscErrorCode ierr print*, 'Called MatMult' - return end subroutine mymatmultadd(A, x, y, z, ierr) @@ -23,7 +22,6 @@ subroutine mymatmultadd(A, x, y, z, ierr) PetscErrorCode ierr print*, 'Called MatMultAdd' - return end subroutine mymatmulttranspose(A, x, y, ierr) @@ -34,7 +32,6 @@ subroutine mymatmulttranspose(A, x, y, ierr) PetscErrorCode ierr print*, 'Called MatMultTranspose' - return end subroutine mymatmulthermitiantranspose(A, x, y, ierr) @@ -45,7 +42,6 @@ subroutine mymatmulthermitiantranspose(A, x, y, ierr) PetscErrorCode ierr print*, 'Called MatMultHermitianTranspose' - return end subroutine mymatmulttransposeadd(A, x, y, z, ierr) @@ -56,7 +52,6 @@ subroutine mymatmulttransposeadd(A, x, y, z, ierr) PetscErrorCode ierr print*, 'Called MatMultTransposeAdd' - return end subroutine mymatmulthermitiantransposeadd(A, x, y, z, ierr) @@ -67,7 +62,6 @@ subroutine mymatmulthermitiantransposeadd(A, x, y, z, ierr) PetscErrorCode ierr print*, 'Called MatMultHermitianTransposeAdd' - return end subroutine mymattranspose(A, reuse, B, ierr) @@ -85,7 +79,6 @@ subroutine mymattranspose(A, reuse, B, ierr) PetscCallA(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY, ierr)) print*, 'Called MatTranspose' - return end subroutine mymatgetdiagonal(A, x, ierr) @@ -96,7 +89,6 @@ subroutine mymatgetdiagonal(A, x, ierr) PetscErrorCode ierr print*, 'Called MatGetDiagonal' - return end subroutine mymatdiagonalscale(A, x, y, ierr) @@ -107,7 +99,6 @@ subroutine mymatdiagonalscale(A, x, y, ierr) PetscErrorCode ierr print*, 'Called MatDiagonalScale' - return end subroutine mymatzeroentries(A, ierr) @@ -117,7 +108,6 @@ subroutine mymatzeroentries(A, ierr) PetscErrorCode ierr print*, 'Called MatZeroEntries' - return end subroutine mymataxpy(A, alpha, B, str, ierr) @@ -129,7 +119,6 @@ subroutine mymataxpy(A, alpha, B, str, ierr) PetscErrorCode ierr print*, 'Called MatAXPY' - return end subroutine mymatshift(A, alpha, ierr) @@ -140,7 +129,6 @@ subroutine mymatshift(A, alpha, ierr) PetscErrorCode ierr print*, 'Called MatShift' - return end subroutine mymatdiagonalset(A, x, ins, ierr) @@ -152,7 +140,6 @@ subroutine mymatdiagonalset(A, x, ins, ierr) PetscErrorCode ierr print*, 'Called MatDiagonalSet' - return end subroutine mymatdestroy(A, ierr) @@ -162,7 +149,6 @@ subroutine mymatdestroy(A, ierr) PetscErrorCode ierr print*, 'Called MatDestroy' - return end subroutine mymatview(A, viewer, ierr) @@ -173,7 +159,6 @@ subroutine mymatview(A, viewer, ierr) PetscErrorCode ierr print*, 'Called MatView' - return end subroutine mymatgetvecs(A, x, y, ierr) @@ -184,7 +169,6 @@ subroutine mymatgetvecs(A, x, y, ierr) PetscErrorCode ierr print*, 'Called MatCreateVecs' - return end program main diff --git a/src/mat/tests/ex209f.F90 b/src/mat/tests/ex209f.F90 index 692ee6c2d35..667cf207037 100644 --- a/src/mat/tests/ex209f.F90 +++ b/src/mat/tests/ex209f.F90 @@ -11,7 +11,7 @@ program main PetscScalar, pointer :: km(:,:) PetscInt three,one PetscInt idxm(1),i,j - PetscScalar v + PetscScalar v(1) PetscCallA(PetscInitialize(ierr)) @@ -31,13 +31,13 @@ program main enddo enddo - PetscCallA(MatSetValuesBlocked(A, one, idxm, one, idxm, km, ADD_VALUES, ierr)) + PetscCallA(MatSetValuesBlocked(A, one, idxm, one, idxm, reshape(km, [three*three]), ADD_VALUES, ierr)) PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatView(A,PETSC_VIEWER_STDOUT_WORLD,ierr)) j = 0 - PetscCallA(MatGetValues(A,one,j,one,j,v,ierr)) + PetscCallA(MatGetValues(A,one,[j],one,[j],v,ierr)) PetscCallA(MatDestroy(A,ierr)) diff --git a/src/mat/tests/ex212f.F90 b/src/mat/tests/ex212f.F90 index 59b9bdb41e9..14fa720a7d9 100644 --- a/src/mat/tests/ex212f.F90 +++ b/src/mat/tests/ex212f.F90 @@ -24,27 +24,27 @@ program main PetscCallA(MatCreate(PETSC_COMM_WORLD,B,ierr)) PetscCallA(MatGetNullSpace(A,sp,ierr)) - PetscCheckA(sp .eq. PETSC_NULL_MATNULLSPACE,PETSC_COMM_SELF,PETSC_ERR_PLIB,'Matrix null space should not exist') + PetscCheckA(PetscObjectIsNull(sp),PETSC_COMM_SELF,PETSC_ERR_PLIB,'Matrix null space should not exist') - PetscCallA(MatSetNullSpace(A,PETSC_NULL_MATNULLSPACE,ierr)) + PetscCallA(MatSetNullSpace(A,PETSC_NULL_MAT_NULLSPACE,ierr)) PetscCallA(MatGetNullSpace(A,sp,ierr)) - PetscCheckA(sp .eq. PETSC_NULL_MATNULLSPACE,PETSC_COMM_SELF,PETSC_ERR_PLIB,'Matrix null space should not exist') + PetscCheckA(PetscObjectIsNull(sp),PETSC_COMM_SELF,PETSC_ERR_PLIB,'Matrix null space should not exist') - PetscCallA(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,zero,PETSC_NULL_VEC,sp,ierr)) + PetscCallA(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,zero,PETSC_NULL_VEC_ARRAY,sp,ierr)) PetscCallA(MatSetNullSpace(A,sp,ierr)) PetscCallA(MatGetNullSpace(A,sp1,ierr)) - PetscCheckA(sp1 .ne. PETSC_NULL_MATNULLSPACE,PETSC_COMM_SELF,PETSC_ERR_PLIB,'Matrix null space should not be null') + PetscCheckA(.not. PetscObjectIsNull(sp),PETSC_COMM_SELF,PETSC_ERR_PLIB,'Matrix null space should not exist') PetscCallA(MatNullSpaceDestroy(sp,ierr)) - PetscCallA(MatCreateSeqDense(PETSC_COMM_WORLD,one,one,PETSC_NULL_SCALAR,C,ierr)) - PetscCallA(MatSetValues(C,one,zero,one,zero,sone,INSERT_VALUES,ierr)) + PetscCallA(MatCreateSeqDense(PETSC_COMM_WORLD,one,one,PETSC_NULL_SCALAR_ARRAY,C,ierr)) + PetscCallA(MatSetValues(C,one,[zero],one,[zero],[sone],INSERT_VALUES,ierr)) PetscCallA(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatCreateSchurComplement(C,C,C,C,PETSC_NULL_MAT,SC,ierr)) PetscCallA(MatGetOwnershipRange(SC,PETSC_NULL_INTEGER,rend,ierr)) PetscCallA(VecCreateSeq(PETSC_COMM_SELF,one,x,ierr)) PetscCallA(VecDuplicate(x,y,ierr)) - PetscCallA(VecSetValues(x,one,zero,sone,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(x,one,[zero],[sone],INSERT_VALUES,ierr)) PetscCallA(VecAssemblyBegin(x,ierr)) PetscCallA(VecAssemblyEnd(x,ierr)) PetscCallA(MatMult(SC,x,y,ierr)) diff --git a/src/mat/tests/ex219f.F90 b/src/mat/tests/ex219f.F90 index d4cc08ae198..b5a2735a5e9 100644 --- a/src/mat/tests/ex219f.F90 +++ b/src/mat/tests/ex219f.F90 @@ -6,7 +6,7 @@ program newnonzero Mat :: A PetscInt :: n,m,idxm(1),idxn(1),nl1,nl2,zero,one,i - PetscScalar :: v(1),value,values(2) + PetscScalar :: v(1),value(1),values(2) PetscErrorCode :: ierr IS :: is ISLocalToGlobalMapping :: ismap @@ -16,7 +16,7 @@ program newnonzero one = 1 n=3 m=n - PetscCallA(MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,n,m,one,PETSC_NULL_INTEGER,zero,PETSC_NULL_INTEGER,A,ierr)) + PetscCallA(MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,n,m,one,PETSC_NULL_INTEGER_ARRAY,zero,PETSC_NULL_INTEGER_ARRAY,A,ierr)) PetscCallA(MatGetOwnershipRange(A,nl1,nl2,ierr)) do i=nl1,nl2-1 @@ -50,10 +50,10 @@ program newnonzero PetscCallA(MatSetLocalToGlobalMapping(A,ismap,ismap,ierr)) PetscCallA(ISLocalToGlobalMappingDestroy(ismap,ierr)) PetscCallA(ISDestroy(is,ierr)) - PetscCallA(MatGetValuesLocal(A,one,zero,one,zero,value,ierr)) - PetscCallA(MatGetValuesLocal(A,one,zero,one,zero,values,ierr)) + PetscCallA(MatGetValuesLocal(A,one,[zero],one,[zero],value,ierr)) + PetscCallA(MatGetValuesLocal(A,one,[zero],one,[zero],values,ierr)) idxn(1) = 0 - PetscCallA(MatGetValuesLocal(A,one,idxn,one,zero,values,ierr)) + PetscCallA(MatGetValuesLocal(A,one,idxn,one,[zero],values,ierr)) PetscCallA(MatGetValuesLocal(A,one,idxn,one,idxn,values,ierr)) PetscCallA(MatDestroy(A,ierr)) diff --git a/src/mat/tests/ex23.c b/src/mat/tests/ex23.c index 222c2b80a0e..7c4320f7fa0 100644 --- a/src/mat/tests/ex23.c +++ b/src/mat/tests/ex23.c @@ -17,15 +17,15 @@ int main(int argc, char **args) IS *rows, *cols; IS irow[2], icol[2]; PetscLayout rlayout, clayout; - const PetscInt *rrange, *crange; + const PetscInt *rrange, *crange, *idxs1, *idxs2; MatType lmtype; - PetscScalar diag = 2.; + PetscScalar diag = 2., *vals; PetscInt n, m, i, lm, ln; - PetscInt rst, ren, cst, cen, nr, nc; + PetscInt rst, ren, cst, cen, nr, nc, rbs = 1, cbs = 1; PetscMPIInt rank, size, lrank, rrank; PetscBool testT, squaretest, isaij; PetscBool permute = PETSC_FALSE, negmap = PETSC_FALSE, repmap = PETSC_FALSE, allow_repeated = PETSC_TRUE; - PetscBool diffmap = PETSC_TRUE, symmetric = PETSC_FALSE, issymmetric, test_matlab = PETSC_FALSE; + PetscBool diffmap = PETSC_TRUE, symmetric = PETSC_FALSE, issymmetric, test_matlab = PETSC_FALSE, test_setvalues = PETSC_TRUE; PetscFunctionBeginUser; PetscCall(PetscInitialize(&argc, &args, (char *)0, help)); @@ -41,6 +41,9 @@ int main(int argc, char **args) PetscCall(PetscOptionsGetBool(NULL, NULL, "-diffmap", &diffmap, NULL)); PetscCall(PetscOptionsGetBool(NULL, NULL, "-allow_repeated", &allow_repeated, NULL)); PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_matlab", &test_matlab, NULL)); + PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_setvalues", &test_setvalues, NULL)); + PetscCall(PetscOptionsGetInt(NULL, NULL, "-rbs", &rbs, NULL)); + PetscCall(PetscOptionsGetInt(NULL, NULL, "-cbs", &cbs, NULL)); PetscCheck(size == 1 || m >= 4, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of rows should be larger or equal 4 for parallel runs"); PetscCheck(size != 1 || m >= 2, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of rows should be larger or equal 2 for uniprocessor runs"); PetscCheck(n >= 2, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of cols should be larger or equal 2"); @@ -88,6 +91,7 @@ int main(int argc, char **args) PetscCall(MatISSetAllowRepeated(A, allow_repeated)); PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap)); + PetscCall(MatSetBlockSizes(A, rbs, cbs)); PetscCall(MatISStoreL2L(A, PETSC_FALSE)); PetscCall(MatISSetPreallocation(A, 3, NULL, 3, NULL)); PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, (PetscBool) !(repmap || negmap))); /* I do not want to precompute the pattern */ @@ -118,8 +122,6 @@ int main(int argc, char **args) PetscCall(ISLocalToGlobalMappingGetSize(cmap, &nc)); if (nr != nc) squaretest = PETSC_FALSE; else { - const PetscInt *idxs1, *idxs2; - PetscCall(ISLocalToGlobalMappingGetIndices(rmap, &idxs1)); PetscCall(ISLocalToGlobalMappingGetIndices(cmap, &idxs2)); PetscCall(PetscArraycmp(idxs1, idxs2, nr, &squaretest)); @@ -155,6 +157,7 @@ int main(int argc, char **args) /* Create a MPIAIJ matrix, same as A */ PetscCall(MatCreate(PETSC_COMM_WORLD, &B)); PetscCall(MatSetSizes(B, PETSC_DECIDE, PETSC_DECIDE, m, n)); + PetscCall(MatSetBlockSizes(B, rbs, cbs)); PetscCall(MatSetType(B, MATAIJ)); PetscCall(MatSetFromOptions(B)); PetscCall(MatSetLocalToGlobalMapping(B, rmap, cmap)); @@ -726,7 +729,6 @@ int main(int argc, char **args) Mat Abd, Bbd; IS is, bis; const PetscScalar *isbd, *aijbd; - PetscScalar *vals; const PetscInt *sts, *idxs; PetscInt *idxs2, diff, perm, nl, bs, st, en, in; PetscBool ok; @@ -877,6 +879,48 @@ int main(int argc, char **args) PetscCall(MatDestroy(&A2)); } + /* Test MatZeroEntries */ + PetscCall(MatZeroEntries(A)); + PetscCall(MatZeroEntries(B)); + PetscCall(CheckMat(A, B, PETSC_FALSE, "MatZeroEntries")); + + /* Test MatSetValues and MatSetValuesBlocked */ + if (test_setvalues) { + PetscCall(PetscMalloc1(lm * ln, &vals)); + for (i = 0; i < lm * ln; i++) vals[i] = i + 1.0; + PetscCall(MatGetLocalSize(A, NULL, &ln)); + PetscCall(MatISSetPreallocation(A, ln, NULL, n - ln, NULL)); + PetscCall(MatSeqAIJSetPreallocation(B, ln, NULL)); + PetscCall(MatMPIAIJSetPreallocation(B, ln, NULL, n - ln, NULL)); + PetscCall(ISLocalToGlobalMappingGetSize(rmap, &lm)); + PetscCall(ISLocalToGlobalMappingGetSize(cmap, &ln)); + + PetscCall(ISLocalToGlobalMappingGetIndices(rmap, &idxs1)); + PetscCall(ISLocalToGlobalMappingGetIndices(cmap, &idxs2)); + PetscCall(MatSetValues(A, lm, idxs1, ln, idxs2, vals, ADD_VALUES)); + PetscCall(MatSetValues(B, lm, idxs1, ln, idxs2, vals, ADD_VALUES)); + PetscCall(ISLocalToGlobalMappingRestoreIndices(rmap, &idxs1)); + PetscCall(ISLocalToGlobalMappingRestoreIndices(cmap, &idxs2)); + PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); + PetscCall(CheckMat(A, B, PETSC_FALSE, "MatSetValues")); + + PetscCall(ISLocalToGlobalMappingGetBlockIndices(rmap, &idxs1)); + PetscCall(ISLocalToGlobalMappingGetBlockIndices(cmap, &idxs2)); + PetscCall(MatSetValuesBlocked(A, lm / rbs, idxs1, ln / cbs, idxs2, vals, ADD_VALUES)); + PetscCall(MatSetValuesBlocked(B, lm / rbs, idxs1, ln / cbs, idxs2, vals, ADD_VALUES)); + PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(rmap, &idxs1)); + PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(cmap, &idxs2)); + PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); + PetscCall(CheckMat(A, B, PETSC_FALSE, "MatSetValuesBlocked")); + PetscCall(PetscFree(vals)); + } + /* free testing matrices */ PetscCall(ISLocalToGlobalMappingDestroy(&cmap)); PetscCall(ISLocalToGlobalMappingDestroy(&rmap)); @@ -1068,7 +1112,7 @@ PetscErrorCode TestMatZeroRows(Mat A, Mat Afull, PetscBool squaretest, IS is, Pe test: suffix: 3 nsize: 5 - args: -m 11 -n 10 -mat_is_convert_local_nest -nr 2 -nc 1 + args: -m 11 -n 10 -mat_is_convert_local_nest -nr 2 -nc 1 -cbs 2 test: suffix: 4 @@ -1078,11 +1122,11 @@ PetscErrorCode TestMatZeroRows(Mat A, Mat Afull, PetscBool squaretest, IS is, Pe test: suffix: 5 nsize: 6 - args: -m 12 -n 12 -test_trans -nr 3 -nc 1 + args: -m 12 -n 12 -test_trans -nr 3 -nc 1 -rbs 2 test: suffix: 6 - args: -m 12 -n 12 -test_trans -nr 2 -nc 3 -diffmap + args: -m 12 -n 12 -test_trans -nr 2 -nc 3 -diffmap -rbs 6 -cbs 3 test: suffix: 7 @@ -1111,7 +1155,7 @@ PetscErrorCode TestMatZeroRows(Mat A, Mat Afull, PetscBool squaretest, IS is, Pe test: suffix: 12 nsize: 3 - args: -m 12 -n 12 -symmetric -mat_is_localmat_type sbaij -test_trans -nr 2 -nc 3 + args: -m 12 -n 12 -symmetric -mat_is_localmat_type sbaij -test_trans -nr 2 -nc 3 -test_setvalues 0 testset: output_file: output/ex23_13.out diff --git a/src/mat/tests/ex241f.F90 b/src/mat/tests/ex241f.F90 index 30bd541c764..6ef9c3ca26f 100644 --- a/src/mat/tests/ex241f.F90 +++ b/src/mat/tests/ex241f.F90 @@ -26,8 +26,8 @@ program test_assembly PetscCallA(MatSetType(L,MATAIJ,ierr)) PetscCallA(MatSetSizes(L,PETSC_DECIDE,PETSC_DECIDE,n,n,ierr)) - PetscCallA(MatSeqAIJSetPreallocation(L,i1,PETSC_NULL_INTEGER,ierr)) - PetscCallA(MatMPIAIJSetPreallocation(L,i1,PETSC_NULL_INTEGER,i0,PETSC_NULL_INTEGER,ierr)) ! No allocated non-zero in off-diagonal part + PetscCallA(MatSeqAIJSetPreallocation(L,i1,PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA(MatMPIAIJSetPreallocation(L,i1,PETSC_NULL_INTEGER_ARRAY,i0,PETSC_NULL_INTEGER_ARRAY,ierr)) ! No allocated non-zero in off-diagonal part PetscCallA(MatSetOption(L,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE,ierr)) PetscCallA(MatSetOption(L,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE,ierr)) PetscCallA(MatSetOption(L,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE,ierr)) diff --git a/src/mat/tests/ex242.c b/src/mat/tests/ex242.c index b7b4d681c3c..be686de6b7e 100644 --- a/src/mat/tests/ex242.c +++ b/src/mat/tests/ex242.c @@ -217,16 +217,19 @@ int main(int argc, char **args) requires: scalapack test: + requires: !single # garbage prints in single precision from sgemr2d nsize: 2 args: -mb 5 -nb 5 -M 12 -N 10 test: + requires: !single # garbage prints in single precision from sgemr2d suffix: 2 nsize: 6 args: -mb 8 -nb 6 -M 20 -N 50 output_file: output/ex242_1.out test: + requires: !single # garbage prints in single precision from sgemr2d suffix: 3 nsize: 3 args: -mb 2 -nb 2 -M 20 -N 20 -test_matmatmult diff --git a/src/mat/tests/ex243.c b/src/mat/tests/ex243.c index 0b1ca965fa3..5c23798365a 100644 --- a/src/mat/tests/ex243.c +++ b/src/mat/tests/ex243.c @@ -93,15 +93,18 @@ int main(int argc, char **argv) requires: scalapack test: + requires: !single # garbage prints in single precision from sgemr2d nsize: 6 test: + requires: !single # garbage prints in single precision from sgemr2d suffix: 2 nsize: 6 args: -mat_type aij output_file: output/ex243_1.out test: + requires: !single # garbage prints in single precision from sgemr2d suffix: 3 nsize: 6 args: -mat_type scalapack diff --git a/src/mat/tests/ex245.c b/src/mat/tests/ex245.c index bbde402586b..c67dc6f039c 100644 --- a/src/mat/tests/ex245.c +++ b/src/mat/tests/ex245.c @@ -240,11 +240,13 @@ int main(int argc, char **argv) test: nsize: 2 + requires: !single # garbage prints in single precision from sgemr2d output_file: output/ex245.out test: suffix: 2 nsize: 6 + requires: !single # garbage prints in single precision from sgemr2d output_file: output/ex245.out TEST*/ diff --git a/src/mat/tests/ex262f.F90 b/src/mat/tests/ex262f.F90 index d0290f5e5f2..a573642842e 100644 --- a/src/mat/tests/ex262f.F90 +++ b/src/mat/tests/ex262f.F90 @@ -29,7 +29,7 @@ program main km(1,1) = i + j idxm(1) = i - 1 + 3*rank idxmj(1) = j - 1 + 3*rank - PetscCallA(MatSetValues(B, one, idxm, one, idxmj, km, ADD_VALUES, ierr)) + PetscCallA(MatSetValues(B, one, idxm, one, idxmj, reshape(km, [three*three]), ADD_VALUES, ierr)) enddo enddo diff --git a/src/mat/tests/ex264.c b/src/mat/tests/ex264.c new file mode 100644 index 00000000000..b2c8693b0f6 --- /dev/null +++ b/src/mat/tests/ex264.c @@ -0,0 +1,100 @@ +static char help[] = "Test MatConvert() with a MATNEST with scaled and shifted MATTRANSPOSEVIRTUAL blocks.\n\n"; + +#include + +/* + This example builds the matrix + + H = [ R C + alpha C^H + beta I gamma R^T + delta I ], + + where R is Hermitian and C is complex symmetric. In particular, R and C have the + following Toeplitz structure: + + R = pentadiag{a,b,c,conj(b),conj(a)} + C = tridiag{b,d,b} + + where a,b,d are complex scalars, and c is real. +*/ + +int main(int argc, char **argv) +{ + Mat block[4], H, R, C, M; + PetscScalar a, b, c, d; + PetscInt n = 13, Istart, Iend, i; + PetscBool flg; + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); + + PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL)); + + a = PetscCMPLX(-0.1, 0.2); + b = PetscCMPLX(1.0, 0.5); + c = 4.5; + d = PetscCMPLX(2.0, 0.2); + + PetscCall(MatCreate(PETSC_COMM_WORLD, &R)); + PetscCall(MatSetSizes(R, PETSC_DECIDE, PETSC_DECIDE, n, n)); + PetscCall(MatSetFromOptions(R)); + + PetscCall(MatCreate(PETSC_COMM_WORLD, &C)); + PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, n, n)); + PetscCall(MatSetFromOptions(C)); + + PetscCall(MatGetOwnershipRange(R, &Istart, &Iend)); + for (i = Istart; i < Iend; i++) { + if (i > 1) PetscCall(MatSetValue(R, i, i - 2, a, INSERT_VALUES)); + if (i > 0) PetscCall(MatSetValue(R, i, i - 1, b, INSERT_VALUES)); + PetscCall(MatSetValue(R, i, i, c, INSERT_VALUES)); + if (i < n - 1) PetscCall(MatSetValue(R, i, i + 1, PetscConj(b), INSERT_VALUES)); + if (i < n - 2) PetscCall(MatSetValue(R, i, i + 2, PetscConj(a), INSERT_VALUES)); + } + + PetscCall(MatGetOwnershipRange(C, &Istart, &Iend)); + for (i = Istart; i < Iend; i++) { + if (i > 0) PetscCall(MatSetValue(C, i, i - 1, b, INSERT_VALUES)); + PetscCall(MatSetValue(C, i, i, d, INSERT_VALUES)); + if (i < n - 1) PetscCall(MatSetValue(C, i, i + 1, b, INSERT_VALUES)); + } + + PetscCall(MatAssemblyBegin(R, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(R, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); + + block[0] = R; + block[1] = C; + + PetscCall(MatCreateHermitianTranspose(C, &block[2])); + PetscCall(MatScale(block[2], PetscConj(b))); + PetscCall(MatShift(block[2], d)); + PetscCall(MatCreateTranspose(R, &block[3])); + PetscCall(MatScale(block[3], PetscConj(d))); + PetscCall(MatShift(block[3], b)); + PetscCall(MatCreateNest(PetscObjectComm((PetscObject)R), 2, NULL, 2, NULL, block, &H)); + PetscCall(MatDestroy(&block[2])); + PetscCall(MatDestroy(&block[3])); + + PetscCall(MatConvert(H, MATAIJ, MAT_INITIAL_MATRIX, &M)); + PetscCall(MatMultEqual(H, M, 20, &flg)); + PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "MatNest != MatAIJ"); + + PetscCall(MatDestroy(&R)); + PetscCall(MatDestroy(&C)); + PetscCall(MatDestroy(&H)); + PetscCall(MatDestroy(&M)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + build: + requires: complex + + test: + output_file: output/ex109.out + nsize: {{1 4}} + +TEST*/ diff --git a/src/mat/tests/ex265.c b/src/mat/tests/ex265.c new file mode 100644 index 00000000000..589f9e15ad1 --- /dev/null +++ b/src/mat/tests/ex265.c @@ -0,0 +1,43 @@ +static char help[] = "Tests inserting new block into SBAIJ and BAIJ matrix \n "; + +#include + +int main(int argc, char **argv) +{ + DM dm; + Mat A; + PetscInt idm = 0, idn = 8; + PetscScalar v[] = {1, 2, 3, 4}; + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &argv, NULL, help)); + PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &dm)); + PetscCall(DMSetFromOptions(dm)); + PetscCall(DMSetUp(dm)); + PetscCall(DMCreateMatrix(dm, &A)); + PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE)); + PetscCall(MatSetValuesBlocked(A, 1, &idm, 1, &idn, v, INSERT_VALUES)); + PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatDestroy(&A)); + PetscCall(DMDestroy(&dm)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + test: + args: -dm_mat_type {{aij baij sbaij}separate output} -mat_view + + test: + suffix: 2 + nsize: 2 + args: -dm_mat_type {{aij baij sbaij}separate output} -mat_view + + test: + suffix: 3 + nsize: 3 + args: -dm_mat_type {{aij baij sbaij}separate output} -mat_view + +TEST*/ diff --git a/src/mat/tests/ex266.c b/src/mat/tests/ex266.c new file mode 100644 index 00000000000..229114bee78 --- /dev/null +++ b/src/mat/tests/ex266.c @@ -0,0 +1,88 @@ +static char help[] = "Test MatDuplicate() with new nonzeros on the duplicate\n\n"; + +#include +int main(int argc, char **args) +{ + Mat A, B, C; + PetscInt k; + const PetscInt M = 18, N = 18; + PetscBool equal; + PetscScalar *vals; + PetscMPIInt rank; + + // clang-format off + // i0/j0[] has a dense diagonal + PetscInt i0[] = {7, 7, 8, 8, 9, 16, 17, 9, 10, 1, 1, -2, 2, 3, 3, 14, 4, 5, 10, 13, 9, 9, 10, 1, 0, 0, 5, 5, 6, 6, 13, 13, 14, -14, 4, 4, 5, 11, 11, 12, 15, 15, 16}; + PetscInt j0[] = {7, 6, 8, 4, 9, 16, 17, 16, 10, 2, 1, 3, 2, 4, 3, 14, 4, 5, 15, 13, 10, 16, 11, 2, 0, 1, 5, -11, 0, 6, 15, 17, 11, 13, 4, 8, 2, 11, 17, 12, 3, 15, 9}; + + // i0/j0[] miss some diagonals + PetscInt i1[] = {8, 5, 15, 16, 6, 13, 4, 17, 8, 9, 9, 10, -6, 12, 7, 3, -4, 1, 1, 2, 5, 5, 6, 14, 17, 8, 9, 9, 10, 4, 5, 10, 11, 1, 2}; + PetscInt j1[] = {2, 3, 16, 9, 5, 17, 1, 13, 4, 10, 16, 11, -5, 12, 1, 7, -1, 2, 7, 3, 6, 11, 0, 11, 13, 4, 10, 16, 11, 8, -2, 15, 12, 7, 3}; + + PetscInt i2[] = {3, 4, 1, 10, 0, 1, 1, 2, 1, 1, 2, 2, 3, 3, 4, 4, 1, 2, 5, 5, 6, 4, 17, 0, 1, 1, 8, 5, 5, 6, 4, 7, 8, 5}; + PetscInt j2[] = {7, 1, 2, 11, 5, 2, 7, 3, 2, 7, 3, 8, 4, 9, 3, 5, 7, 3, 6, 11, 0, 1, 13, 5, 2, 7, 4, 6, 11, 0, 1, 3, 4, 2}; + // clang-format on + + typedef struct { + PetscInt *i, *j, n; + } coo_data; + + coo_data coos[3] = { + {i0, j0, PETSC_STATIC_ARRAY_LENGTH(i0)}, + {i1, j1, PETSC_STATIC_ARRAY_LENGTH(i1)}, + {i2, j2, PETSC_STATIC_ARRAY_LENGTH(i2)} + }; + coo_data mycoo; + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &args, (char *)0, help)); + PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); + + mycoo = coos[rank / 3]; + + PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); + PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, M, N)); + PetscCall(MatSetFromOptions(A)); + + // Assemble matrix A with the full arrays + PetscCall(PetscMalloc1(mycoo.n, &vals)); + for (k = 0; k < mycoo.n; k++) { + vals[k] = mycoo.j[k]; + PetscCall(MatSetValue(A, mycoo.i[k], mycoo.j[k], vals[k], ADD_VALUES)); + } + PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); + + // Assemble matrix B with the 1st half of the arrays + PetscCall(MatCreate(PETSC_COMM_WORLD, &B)); + PetscCall(MatSetSizes(B, PETSC_DECIDE, PETSC_DECIDE, M, N)); + PetscCall(MatSetFromOptions(B)); + for (k = 0; k < mycoo.n / 2; k++) PetscCall(MatSetValue(B, mycoo.i[k], mycoo.j[k], vals[k], ADD_VALUES)); + PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); + + // Duplicate B to C and continue adding nozeros to C with the 2nd half + PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &C)); + for (k = mycoo.n / 2; k < mycoo.n; k++) PetscCall(MatSetValue(C, mycoo.i[k], mycoo.j[k], vals[k], ADD_VALUES)); + PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); + + // Test if A == C + PetscCall(MatMultEqual(A, C, 10, &equal)); + if (!equal) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "MatDuplicate() on regular matrices failed\n")); + + PetscCall(PetscFree(vals)); + PetscCall(MatDestroy(&A)); + PetscCall(MatDestroy(&B)); + PetscCall(MatDestroy(&C)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + test: + nsize: 3 + output_file: output/empty.out + +TEST*/ diff --git a/src/mat/tests/ex267.c b/src/mat/tests/ex267.c new file mode 100644 index 00000000000..862c4ad0778 --- /dev/null +++ b/src/mat/tests/ex267.c @@ -0,0 +1,194 @@ +static char help[] = "Test different MatSolve routines with MATTRANSPOSEVIRTUAL.\n\n"; + +#include + +PetscErrorCode TestMatrix(const char *test, Mat A, PetscInt nrhs, PetscBool inplace, PetscBool chol) +{ + Mat F, RHS, X, C1; + Vec b, x, y, f; + IS perm, iperm; + PetscInt n, i; + PetscReal norm, tol = 1000 * PETSC_MACHINE_EPSILON; + PetscBool ht; +#if defined(PETSC_USE_COMPLEX) + PetscScalar v1 = PetscCMPLX(1.0, -0.1), v2 = PetscCMPLX(-1.0, 0.1); +#else + PetscScalar v1 = 1.0, v2 = -1.0; +#endif + + PetscFunctionBegin; + PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHERMITIANTRANSPOSEVIRTUAL, &ht)); + PetscCall(MatCreateVecs(A, &f, &b)); + PetscCall(MatCreateVecs(A, &x, &y)); + PetscCall(VecSet(b, v1)); + PetscCall(VecSet(y, v2)); + + PetscCall(MatGetOrdering(A, MATORDERINGND, &perm, &iperm)); + if (!inplace) { + if (!chol) { + PetscCall(MatGetFactor(A, MATSOLVERPETSC, MAT_FACTOR_LU, &F)); + PetscCall(MatLUFactorSymbolic(F, A, perm, iperm, NULL)); + PetscCall(MatLUFactorNumeric(F, A, NULL)); + } else { /* Cholesky */ + PetscCall(MatGetFactor(A, MATSOLVERPETSC, MAT_FACTOR_CHOLESKY, &F)); + PetscCall(MatCholeskyFactorSymbolic(F, A, perm, NULL)); + PetscCall(MatCholeskyFactorNumeric(F, A, NULL)); + } + } else { /* Test inplace factorization */ + PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &F)); + if (!chol) PetscCall(MatLUFactor(F, perm, iperm, NULL)); + else PetscCall(MatCholeskyFactor(F, perm, NULL)); + } + + /* MatSolve */ + PetscCall(MatSolve(F, b, x)); + PetscCall(MatMult(A, x, f)); + PetscCall(VecAXPY(f, -1.0, b)); + PetscCall(VecNorm(f, NORM_2, &norm)); + if (norm > tol) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%12s MatSolve : Error of norm %g\n", test, (double)norm)); + + /* MatSolveTranspose */ + if (!ht) { + PetscCall(MatSolveTranspose(F, b, x)); + PetscCall(MatMultTranspose(A, x, f)); + PetscCall(VecAXPY(f, -1.0, b)); + PetscCall(VecNorm(f, NORM_2, &norm)); + if (norm > tol) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%12s MatSolveTranspose : Error of norm %g\n", test, (double)norm)); + } + + /* MatSolveAdd */ + PetscCall(MatSolveAdd(F, b, y, x)); + PetscCall(MatMult(A, y, f)); + PetscCall(VecScale(f, -1.0)); + PetscCall(MatMultAdd(A, x, f, f)); + PetscCall(VecAXPY(f, -1.0, b)); + PetscCall(VecNorm(f, NORM_2, &norm)); + if (norm > tol) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%12s MatSolveAdd : Error of norm %g\n", test, (double)norm)); + + /* MatSolveTransposeAdd */ + if (!ht) { + PetscCall(MatSolveTransposeAdd(F, b, y, x)); + PetscCall(MatMultTranspose(A, y, f)); + PetscCall(VecScale(f, -1.0)); + PetscCall(MatMultTransposeAdd(A, x, f, f)); + PetscCall(VecAXPY(f, -1.0, b)); + PetscCall(VecNorm(f, NORM_2, &norm)); + if (norm > tol) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%12s MatSolveTransposeAdd : Error of norm %g\n", test, (double)norm)); + } + + /* MatMatSolve */ + PetscCall(MatGetSize(A, &n, NULL)); + PetscCall(MatCreate(PETSC_COMM_WORLD, &RHS)); + PetscCall(MatSetSizes(RHS, PETSC_DECIDE, PETSC_DECIDE, n, nrhs)); + PetscCall(MatSetType(RHS, MATSEQDENSE)); + PetscCall(MatSetUp(RHS)); + for (i = 0; i < nrhs; i++) PetscCall(MatSetValue(RHS, i, i, 1.0, INSERT_VALUES)); + PetscCall(MatAssemblyBegin(RHS, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(RHS, MAT_FINAL_ASSEMBLY)); + PetscCall(MatDuplicate(RHS, MAT_DO_NOT_COPY_VALUES, &X)); + + if (!ht) { + PetscCall(MatMatSolve(F, RHS, X)); + PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &C1)); + PetscCall(MatAXPY(C1, -1.0, RHS, SAME_NONZERO_PATTERN)); + PetscCall(MatNorm(C1, NORM_FROBENIUS, &norm)); + if (norm > tol) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%12s MatMatSolve : Error of norm %g\n", test, (double)norm)); + PetscCall(MatDestroy(&C1)); + + PetscCall(MatMatSolveTranspose(F, RHS, X)); + PetscCall(MatTransposeMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &C1)); + PetscCall(MatAXPY(C1, -1.0, RHS, SAME_NONZERO_PATTERN)); + PetscCall(MatNorm(C1, NORM_FROBENIUS, &norm)); + if (norm > tol) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%12s MatMatSolveTranspose : Error of norm %g\n", test, (double)norm)); + PetscCall(MatDestroy(&C1)); + } + PetscCall(VecDestroy(&b)); + PetscCall(VecDestroy(&x)); + PetscCall(VecDestroy(&f)); + PetscCall(VecDestroy(&y)); + PetscCall(ISDestroy(&perm)); + PetscCall(ISDestroy(&iperm)); + PetscCall(MatDestroy(&F)); + PetscCall(MatDestroy(&RHS)); + PetscCall(MatDestroy(&X)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +int main(int argc, char **args) +{ + PetscMPIInt size; + Mat A, At, Aht; + PetscInt i, n = 8, nrhs = 2; + PetscBool aij, inplace = PETSC_FALSE; +#if defined(PETSC_USE_COMPLEX) + PetscScalar a = PetscCMPLX(-1.0, 0.5); +#else + PetscScalar a = -1.0; +#endif + + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &args, (char *)0, help)); + PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); + PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This is a uniprocessor example only"); + PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL)); + PetscCall(PetscOptionsGetInt(NULL, NULL, "-nrhs", &nrhs, NULL)); + PetscCall(PetscOptionsGetBool(NULL, NULL, "-inplace", &inplace, NULL)); + PetscCheck(nrhs <= n, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "Must have nrhs <= n"); + + /* Hermitian matrix */ + PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); + PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, n, n)); + PetscCall(MatSetFromOptions(A)); + for (i = 0; i < n; i++) { + if (i > 0) PetscCall(MatSetValue(A, i, i - 1, a, INSERT_VALUES)); + if (i < n - 1) PetscCall(MatSetValue(A, i, i + 1, PetscConj(a), INSERT_VALUES)); + PetscCall(MatSetValue(A, i, i, 2.0, INSERT_VALUES)); + } + PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); + + PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &aij, MATSEQAIJ, MATSEQBAIJ, "")); +#if defined(PETSC_USE_COMPLEX) + PetscCall(MatSetOption(A, MAT_HERMITIAN, PETSC_TRUE)); +#else + PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE)); +#endif + + PetscCall(MatCreateTranspose(A, &At)); + PetscCall(MatCreateHermitianTranspose(A, &Aht)); + + PetscCall(TestMatrix("LU T", At, nrhs, inplace, PETSC_FALSE)); + PetscCall(TestMatrix("LU HT", Aht, nrhs, inplace, PETSC_FALSE)); + if (!aij) { + PetscCall(TestMatrix("Chol T", At, nrhs, inplace, PETSC_TRUE)); + PetscCall(TestMatrix("Chol HT", Aht, nrhs, inplace, PETSC_TRUE)); + } + + /* Make the matrix non-Hermitian */ + PetscCall(MatSetValue(A, 0, 1, -5.0, INSERT_VALUES)); + PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); +#if defined(PETSC_USE_COMPLEX) + PetscCall(MatSetOption(A, MAT_HERMITIAN, PETSC_FALSE)); +#else + PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_FALSE)); +#endif + + PetscCall(TestMatrix("LU T nonsym", At, nrhs, inplace, PETSC_FALSE)); + PetscCall(TestMatrix("LU HT nonsym", Aht, nrhs, inplace, PETSC_FALSE)); + + PetscCall(MatDestroy(&A)); + PetscCall(MatDestroy(&At)); + PetscCall(MatDestroy(&Aht)); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + test: + suffix: 1 + args: -inplace {{0 1}} -mat_type {{aij dense}} + output_file: output/empty.out + +TEST*/ diff --git a/src/mat/tests/ex36f.F90 b/src/mat/tests/ex36f.F90 index 5a5480e94a4..1d17a21da1e 100644 --- a/src/mat/tests/ex36f.F90 +++ b/src/mat/tests/ex36f.F90 @@ -67,7 +67,6 @@ subroutine Demo1() ! Clean up PetscCall(MatDestroy(A,ierr)) - return end ! ----------------------------------------------------------------- @@ -101,7 +100,6 @@ subroutine Demo2() ! Clean up PetscCall(MatDestroy(A,ierr)) - return end ! ----------------------------------------------------------------- @@ -115,5 +113,4 @@ subroutine FillUpMatrix(m,n,X) X(i,j) = 1.0/real(i+j-1) 20 continue 10 continue - return end diff --git a/src/mat/tests/ex49.c b/src/mat/tests/ex49.c index bb33af9e6df..ef5a17e92f9 100644 --- a/src/mat/tests/ex49.c +++ b/src/mat/tests/ex49.c @@ -51,7 +51,7 @@ int main(int argc, char **argv) PetscCall(MatNorm(mat, NORM_FROBENIUS, &normf)); PetscCall(MatNorm(mat, NORM_1, &norm1)); PetscCall(MatNorm(mat, NORM_INFINITY, &normi)); - PetscCall(PetscPrintf(PETSC_COMM_WORLD, "original: Frobenious norm = %g, one norm = %g, infinity norm = %g\n", (double)normf, (double)norm1, (double)normi)); + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "original: Frobenius norm = %g, one norm = %g, infinity norm = %g\n", (double)normf, (double)norm1, (double)normi)); PetscCall(MatView(mat, PETSC_VIEWER_STDOUT_WORLD)); /* Form matrix transpose */ @@ -70,7 +70,7 @@ int main(int argc, char **argv) PetscCall(MatNorm(tmat, NORM_FROBENIUS, &normf)); PetscCall(MatNorm(tmat, NORM_1, &norm1)); PetscCall(MatNorm(tmat, NORM_INFINITY, &normi)); - PetscCall(PetscPrintf(PETSC_COMM_WORLD, "transpose: Frobenious norm = %g, one norm = %g, infinity norm = %g\n", (double)normf, (double)norm1, (double)normi)); + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "transpose: Frobenius norm = %g, one norm = %g, infinity norm = %g\n", (double)normf, (double)norm1, (double)normi)); PetscCall(MatView(tmat, PETSC_VIEWER_STDOUT_WORLD)); /* Test MatAXPY */ diff --git a/src/mat/tests/ex5.c b/src/mat/tests/ex5.c index ecf360f61db..f0c2454e66e 100644 --- a/src/mat/tests/ex5.c +++ b/src/mat/tests/ex5.c @@ -315,4 +315,27 @@ int main(int argc, char **args) args: -m 32 -mat_type sellcuda -vec_type cuda -mat_sell_spmv_cuda_kernel {{0 7 9}} -mat_sell_spmv_cuda_blocky {{2 4 8 16 32}} output_file: output/ex5_57.out requires: cuda !complex !single + + test: + suffix: sell_8 + nsize: 3 + args: -mat_type sellhip -vec_type hip -test_diagonalscale -test_zeroentries + filter: sed -e "s/hip/cuda/g" + output_file: output/ex5_55.out + requires: hip !complex + + test: + suffix: sell_9 + nsize: 3 + args: -mat_type sellhip -vec_type hip -mat_sell_spmv_hip_kernel {{1 2 3 4 5 6}} + filter: sed -e "s/hip/cuda/g" + output_file: output/ex5_56.out + requires: hip !complex + + test: + suffix: sell_10 + args: -m 32 -mat_type sellhip -vec_type hip -mat_sell_spmv_hip_kernel {{0 7 9}} -mat_sell_spmv_hip_blocky {{2 4 8 16 32}} + filter: sed -e "s/hip/cuda/g" + output_file: output/ex5_57.out + requires: hip !complex !single TEST*/ diff --git a/src/mat/tests/ex58f.F90 b/src/mat/tests/ex58f.F90 index e9d7be0c310..8d9344951be 100644 --- a/src/mat/tests/ex58f.F90 +++ b/src/mat/tests/ex58f.F90 @@ -13,6 +13,7 @@ program main PetscViewer v Vec rowmax PetscBool flg + IS isrow, iscol character*(256) f PetscCallA(PetscInitialize(ierr)) @@ -33,18 +34,23 @@ program main PetscCallA(VecSetSizes(rowmax,M,M,ierr)) PetscCallA(VecSetFromOptions(rowmax,ierr)) - PetscCallA(MatGetRowMaxAbs(A,rowmax,PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatGetRowMaxAbs(A,rowmax,PETSC_NULL_INTEGER_ARRAY,ierr)) PetscCallA(VecView(rowmax,PETSC_VIEWER_STDOUT_WORLD,ierr)) - PetscCallA(MatGetRowMax(A,rowmax,PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatGetRowMax(A,rowmax,PETSC_NULL_INTEGER_ARRAY,ierr)) PetscCallA(VecView(rowmax,PETSC_VIEWER_STDOUT_WORLD,ierr)) - PetscCallA(MatGetRowMinAbs(A,rowmax,PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatGetRowMinAbs(A,rowmax,PETSC_NULL_INTEGER_ARRAY,ierr)) PetscCallA(VecView(rowmax,PETSC_VIEWER_STDOUT_WORLD,ierr)) - PetscCallA(MatGetRowMin(A,rowmax,PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatGetRowMin(A,rowmax,PETSC_NULL_INTEGER_ARRAY,ierr)) PetscCallA(VecView(rowmax,PETSC_VIEWER_STDOUT_WORLD,ierr)) + PetscCallA(MatGetOwnershipIS(A,isrow,iscol,ierr)) + PetscCallA(ISDestroy(isrow,ierr)) + PetscCallA(ISDestroy(iscol,ierr)) + PetscCallA(MatGetOwnershipIS(A,PETSC_NULL_IS,PETSC_NULL_IS,ierr)) + PetscCallA(MatDestroy(A,ierr)) PetscCallA(PetscViewerDestroy(v,ierr)) PetscCallA(VecDestroy(rowmax,ierr)) diff --git a/src/mat/tests/ex63f.F90 b/src/mat/tests/ex63f.F90 index 99bb01f875a..aaa2c65070d 100644 --- a/src/mat/tests/ex63f.F90 +++ b/src/mat/tests/ex63f.F90 @@ -25,7 +25,7 @@ program main ! if (rank .eq. 0) then ten = 10 - PetscCallA(MatCreateSeqDense(PETSC_COMM_SELF,ten,ten,PETSC_NULL_SCALAR,A,ierr)) + PetscCallA(MatCreateSeqDense(PETSC_COMM_SELF,ten,ten,PETSC_NULL_SCALAR_ARRAY,A,ierr)) v = 1.0 do row=0,9 do col=0,9 diff --git a/src/mat/tests/ex85f.F90 b/src/mat/tests/ex85f.F90 index 5d7bc116481..8444cf28adc 100644 --- a/src/mat/tests/ex85f.F90 +++ b/src/mat/tests/ex85f.F90 @@ -23,7 +23,7 @@ program main value = 3.0 i = 4 one = 1 - PetscCallA(MatSetValues(m,one,i,one,i,value,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(m,one,[i],one,[i],[value],INSERT_VALUES,ierr)) PetscCallA(MatAssemblyBegin(m,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(m,MAT_FINAL_ASSEMBLY,ierr)) diff --git a/src/mat/tests/output/ex23_3.out b/src/mat/tests/output/ex23_3.out index 5bafc0ea0bf..75795fd649d 100644 --- a/src/mat/tests/output/ex23_3.out +++ b/src/mat/tests/output/ex23_3.out @@ -245,31 +245,31 @@ Mat Object: 5 MPI processes total number of mallocs used during MatSetValues calls=0 Mat Object: 1 MPI process type: seqaij - rows=11, cols=10 + rows=11, cols=10, rbs=1, cbs=2 total: nonzeros=33, allocated nonzeros=33 total number of mallocs used during MatSetValues calls=0 not using I-node routines Mat Object: 1 MPI process type: seqaij - rows=0, cols=0 + rows=0, cols=0, rbs=1, cbs=2 total: nonzeros=0, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Mat Object: 1 MPI process type: seqaij - rows=0, cols=0 + rows=0, cols=0, rbs=1, cbs=2 total: nonzeros=0, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Mat Object: 1 MPI process type: seqaij - rows=0, cols=0 + rows=0, cols=0, rbs=1, cbs=2 total: nonzeros=0, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Mat Object: 1 MPI process type: seqaij - rows=0, cols=0 + rows=0, cols=0, rbs=1, cbs=2 total: nonzeros=0, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines diff --git a/src/mat/tests/output/ex23_5.out b/src/mat/tests/output/ex23_5.out index 3c15795297d..81c2db08d75 100644 --- a/src/mat/tests/output/ex23_5.out +++ b/src/mat/tests/output/ex23_5.out @@ -311,37 +311,37 @@ Mat Object: 6 MPI processes total number of mallocs used during MatSetValues calls=0 Mat Object: 1 MPI process type: seqaij - rows=12, cols=12 + rows=12, cols=12, rbs=2, cbs=1 total: nonzeros=36, allocated nonzeros=36 total number of mallocs used during MatSetValues calls=0 not using I-node routines Mat Object: 1 MPI process type: seqaij - rows=0, cols=0 + rows=0, cols=0, rbs=2, cbs=1 total: nonzeros=0, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Mat Object: 1 MPI process type: seqaij - rows=0, cols=0 + rows=0, cols=0, rbs=2, cbs=1 total: nonzeros=0, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Mat Object: 1 MPI process type: seqaij - rows=0, cols=0 + rows=0, cols=0, rbs=2, cbs=1 total: nonzeros=0, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Mat Object: 1 MPI process type: seqaij - rows=0, cols=0 + rows=0, cols=0, rbs=2, cbs=1 total: nonzeros=0, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines Mat Object: 1 MPI process type: seqaij - rows=0, cols=0 + rows=0, cols=0, rbs=2, cbs=1 total: nonzeros=0, allocated nonzeros=0 total number of mallocs used during MatSetValues calls=0 not using I-node routines diff --git a/src/mat/tests/output/ex23_6.out b/src/mat/tests/output/ex23_6.out index a579b35308f..5f8abbe0aec 100644 --- a/src/mat/tests/output/ex23_6.out +++ b/src/mat/tests/output/ex23_6.out @@ -46,7 +46,7 @@ Mat Object: 1 MPI process total number of mallocs used during MatSetValues calls=0 Mat Object: 1 MPI process type: seqaij - rows=12, cols=12 + rows=12, cols=12, rbs=6, cbs=3 total: nonzeros=36, allocated nonzeros=36 total number of mallocs used during MatSetValues calls=0 not using I-node routines @@ -86,7 +86,7 @@ Mat Object: 1 MPI process total number of mallocs used during MatSetValues calls=0 Mat Object: 1 MPI process type: seqaij - rows=12, cols=12 + rows=12, cols=12, rbs=6, cbs=3 total: nonzeros=36, allocated nonzeros=36 total number of mallocs used during MatSetValues calls=0 not using I-node routines @@ -126,7 +126,7 @@ Mat Object: 1 MPI process total number of mallocs used during MatSetValues calls=0 Mat Object: 1 MPI process type: seqaij - rows=12, cols=12 + rows=12, cols=12, rbs=6, cbs=3 total: nonzeros=36, allocated nonzeros=36 total number of mallocs used during MatSetValues calls=0 not using I-node routines @@ -166,7 +166,7 @@ Mat Object: 1 MPI process total number of mallocs used during MatSetValues calls=0 Mat Object: 1 MPI process type: seqaij - rows=12, cols=12 + rows=12, cols=12, rbs=6, cbs=3 total: nonzeros=36, allocated nonzeros=36 total number of mallocs used during MatSetValues calls=0 not using I-node routines diff --git a/src/mat/tests/output/ex265_2_dm_mat_type-aij.out b/src/mat/tests/output/ex265_2_dm_mat_type-aij.out new file mode 100644 index 00000000000..e11566a5b09 --- /dev/null +++ b/src/mat/tests/output/ex265_2_dm_mat_type-aij.out @@ -0,0 +1,68 @@ +Mat Object: 2 MPI processes + type: mpiaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +Mat Object: 2 MPI processes + type: mpiaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 1.) (17, 2.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 3.) (17, 4.) + row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) diff --git a/src/mat/tests/output/ex265_2_dm_mat_type-baij.out b/src/mat/tests/output/ex265_2_dm_mat_type-baij.out new file mode 100644 index 00000000000..eb2ee0a547f --- /dev/null +++ b/src/mat/tests/output/ex265_2_dm_mat_type-baij.out @@ -0,0 +1,68 @@ +Mat Object: 2 MPI processes + type: mpibaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +Mat Object: 2 MPI processes + type: mpibaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 1.) (17, 2.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 3.) (17, 4.) + row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) diff --git a/src/mat/tests/output/ex265_2_dm_mat_type-sbaij.out b/src/mat/tests/output/ex265_2_dm_mat_type-sbaij.out new file mode 100644 index 00000000000..9bf3fd74141 --- /dev/null +++ b/src/mat/tests/output/ex265_2_dm_mat_type-sbaij.out @@ -0,0 +1,68 @@ +Mat Object: 2 MPI processes + type: mpisbaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 2: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (30, 0.) (31, 0.) + row 31: (30, 0.) (31, 0.) +Mat Object: 2 MPI processes + type: mpisbaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 1.) (17, 2.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 3.) (17, 4.) + row 2: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (30, 0.) (31, 0.) + row 31: (30, 0.) (31, 0.) diff --git a/src/mat/tests/output/ex265_3_dm_mat_type-aij.out b/src/mat/tests/output/ex265_3_dm_mat_type-aij.out new file mode 100644 index 00000000000..86c4d7c25be --- /dev/null +++ b/src/mat/tests/output/ex265_3_dm_mat_type-aij.out @@ -0,0 +1,68 @@ +Mat Object: 3 MPI processes + type: mpiaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +Mat Object: 3 MPI processes + type: mpiaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 1.) (17, 2.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 3.) (17, 4.) + row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) diff --git a/src/mat/tests/output/ex265_3_dm_mat_type-baij.out b/src/mat/tests/output/ex265_3_dm_mat_type-baij.out new file mode 100644 index 00000000000..608986ceda1 --- /dev/null +++ b/src/mat/tests/output/ex265_3_dm_mat_type-baij.out @@ -0,0 +1,68 @@ +Mat Object: 3 MPI processes + type: mpibaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +Mat Object: 3 MPI processes + type: mpibaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 1.) (17, 2.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 3.) (17, 4.) + row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) diff --git a/src/mat/tests/output/ex265_3_dm_mat_type-sbaij.out b/src/mat/tests/output/ex265_3_dm_mat_type-sbaij.out new file mode 100644 index 00000000000..eb3323fa525 --- /dev/null +++ b/src/mat/tests/output/ex265_3_dm_mat_type-sbaij.out @@ -0,0 +1,68 @@ +Mat Object: 3 MPI processes + type: mpisbaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) + row 2: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (30, 0.) (31, 0.) + row 31: (30, 0.) (31, 0.) +Mat Object: 3 MPI processes + type: mpisbaij + row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 1.) (17, 2.) + row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 3.) (17, 4.) + row 2: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 3: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) + row 4: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 5: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) + row 6: (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 7: (6, 0.) (7, 0.) (14, 0.) (15, 0.) + row 8: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 9: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) + row 10: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 11: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) + row 12: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 13: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) + row 14: (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 15: (14, 0.) (15, 0.) (22, 0.) (23, 0.) + row 16: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 17: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) + row 18: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 19: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) + row 20: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 21: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) + row 22: (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 23: (22, 0.) (23, 0.) (30, 0.) (31, 0.) + row 24: (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 25: (24, 0.) (25, 0.) (26, 0.) (27, 0.) + row 26: (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 27: (26, 0.) (27, 0.) (28, 0.) (29, 0.) + row 28: (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 29: (28, 0.) (29, 0.) (30, 0.) (31, 0.) + row 30: (30, 0.) (31, 0.) + row 31: (30, 0.) (31, 0.) diff --git a/src/mat/tests/output/ex265_dm_mat_type-aij.out b/src/mat/tests/output/ex265_dm_mat_type-aij.out new file mode 100644 index 00000000000..a74df66ca77 --- /dev/null +++ b/src/mat/tests/output/ex265_dm_mat_type-aij.out @@ -0,0 +1,68 @@ +Mat Object: 1 MPI process + type: seqaij +row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) +row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) +row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +Mat Object: 1 MPI process + type: seqaij +row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 1.) (17, 2.) +row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 3.) (17, 4.) +row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) diff --git a/src/mat/tests/output/ex265_dm_mat_type-baij.out b/src/mat/tests/output/ex265_dm_mat_type-baij.out new file mode 100644 index 00000000000..7f4e0226087 --- /dev/null +++ b/src/mat/tests/output/ex265_dm_mat_type-baij.out @@ -0,0 +1,68 @@ +Mat Object: 1 MPI process + type: seqbaij +row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) +row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) +row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +Mat Object: 1 MPI process + type: seqbaij +row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 1.) (17, 2.) +row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 3.) (17, 4.) +row 2: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 3: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 4: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 5: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 6: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 7: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 8: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 9: (0, 0.) (1, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 10: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 11: (2, 0.) (3, 0.) (8, 0.) (9, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 12: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 13: (4, 0.) (5, 0.) (10, 0.) (11, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 14: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 15: (6, 0.) (7, 0.) (12, 0.) (13, 0.) (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 16: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 17: (8, 0.) (9, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 18: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 19: (10, 0.) (11, 0.) (16, 0.) (17, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 20: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 21: (12, 0.) (13, 0.) (18, 0.) (19, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 22: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 23: (14, 0.) (15, 0.) (20, 0.) (21, 0.) (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 24: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 25: (16, 0.) (17, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 26: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 27: (18, 0.) (19, 0.) (24, 0.) (25, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 28: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 29: (20, 0.) (21, 0.) (26, 0.) (27, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 30: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 31: (22, 0.) (23, 0.) (28, 0.) (29, 0.) (30, 0.) (31, 0.) diff --git a/src/mat/tests/output/ex265_dm_mat_type-sbaij.out b/src/mat/tests/output/ex265_dm_mat_type-sbaij.out new file mode 100644 index 00000000000..0d4a1467c74 --- /dev/null +++ b/src/mat/tests/output/ex265_dm_mat_type-sbaij.out @@ -0,0 +1,68 @@ +Mat Object: 1 MPI process + type: seqsbaij +row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) +row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) +row 2: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 3: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 4: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 5: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 6: (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 7: (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 8: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 9: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 10: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 11: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 12: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 13: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 14: (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 15: (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 16: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 17: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 18: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 19: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 20: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 21: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 22: (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 23: (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 24: (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 25: (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 26: (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 27: (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 28: (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 29: (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 30: (30, 0.) (31, 0.) +row 31: (30, 0.) (31, 0.) +Mat Object: 1 MPI process + type: seqsbaij +row 0: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 1.) (17, 2.) +row 1: (0, 0.) (1, 0.) (2, 0.) (3, 0.) (8, 0.) (9, 0.) (16, 3.) (17, 4.) +row 2: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 3: (2, 0.) (3, 0.) (4, 0.) (5, 0.) (10, 0.) (11, 0.) +row 4: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 5: (4, 0.) (5, 0.) (6, 0.) (7, 0.) (12, 0.) (13, 0.) +row 6: (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 7: (6, 0.) (7, 0.) (14, 0.) (15, 0.) +row 8: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 9: (8, 0.) (9, 0.) (10, 0.) (11, 0.) (16, 0.) (17, 0.) +row 10: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 11: (10, 0.) (11, 0.) (12, 0.) (13, 0.) (18, 0.) (19, 0.) +row 12: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 13: (12, 0.) (13, 0.) (14, 0.) (15, 0.) (20, 0.) (21, 0.) +row 14: (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 15: (14, 0.) (15, 0.) (22, 0.) (23, 0.) +row 16: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 17: (16, 0.) (17, 0.) (18, 0.) (19, 0.) (24, 0.) (25, 0.) +row 18: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 19: (18, 0.) (19, 0.) (20, 0.) (21, 0.) (26, 0.) (27, 0.) +row 20: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 21: (20, 0.) (21, 0.) (22, 0.) (23, 0.) (28, 0.) (29, 0.) +row 22: (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 23: (22, 0.) (23, 0.) (30, 0.) (31, 0.) +row 24: (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 25: (24, 0.) (25, 0.) (26, 0.) (27, 0.) +row 26: (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 27: (26, 0.) (27, 0.) (28, 0.) (29, 0.) +row 28: (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 29: (28, 0.) (29, 0.) (30, 0.) (31, 0.) +row 30: (30, 0.) (31, 0.) +row 31: (30, 0.) (31, 0.) diff --git a/src/mat/tests/output/ex2_11_A.out b/src/mat/tests/output/ex2_11_A.out index f05356e0d2f..2befe825c81 100644 --- a/src/mat/tests/output/ex2_11_A.out +++ b/src/mat/tests/output/ex2_11_A.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 320.617, one norm = 273., infinity norm = 585. +original A: Frobenius norm = 320.617, one norm = 273., infinity norm = 585. type: seqaij row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) (7, 8.) (8, 9.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) (5, 16.) (6, 17.) (7, 18.) (8, 19.) @@ -7,7 +7,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) (7, row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) (7, 48.) (8, 49.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) (7, 58.) (8, 59.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) (7, 68.) (8, 69.) -B = A^T: Frobenious norm = 320.617, one norm = 585., infinity norm = 273. +B = A^T: Frobenius norm = 320.617, one norm = 585., infinity norm = 273. type: seqaij row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) diff --git a/src/mat/tests/output/ex2_11_A_aijcusparse.out b/src/mat/tests/output/ex2_11_A_aijcusparse.out index e57cc043394..530def0eb00 100644 --- a/src/mat/tests/output/ex2_11_A_aijcusparse.out +++ b/src/mat/tests/output/ex2_11_A_aijcusparse.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 320.617, one norm = 273., infinity norm = 585. +original A: Frobenius norm = 320.617, one norm = 273., infinity norm = 585. type: seqaijcusparse row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) (7, 8.) (8, 9.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) (5, 16.) (6, 17.) (7, 18.) (8, 19.) @@ -7,7 +7,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) (7, row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) (7, 48.) (8, 49.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) (7, 58.) (8, 59.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) (7, 68.) (8, 69.) -B = A^T: Frobenious norm = 320.617, one norm = 585., infinity norm = 273. +B = A^T: Frobenius norm = 320.617, one norm = 585., infinity norm = 273. type: seqaijcusparse row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) diff --git a/src/mat/tests/output/ex2_11_B.out b/src/mat/tests/output/ex2_11_B.out index 740f3027bb7..449181d40bc 100644 --- a/src/mat/tests/output/ex2_11_B.out +++ b/src/mat/tests/output/ex2_11_B.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 228.44, one norm = 245., infinity norm = 315. +original A: Frobenius norm = 228.44, one norm = 245., infinity norm = 315. type: seqaij row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) @@ -7,7 +7,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) -B = A^T: Frobenious norm = 228.44, one norm = 315., infinity norm = 245. +B = A^T: Frobenius norm = 228.44, one norm = 315., infinity norm = 245. type: seqaij row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) diff --git a/src/mat/tests/output/ex2_12_A.out b/src/mat/tests/output/ex2_12_A.out index fa4d0a81816..602aee8064a 100644 --- a/src/mat/tests/output/ex2_12_A.out +++ b/src/mat/tests/output/ex2_12_A.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 320.617, one norm = 273., infinity norm = 585. +original A: Frobenius norm = 320.617, one norm = 273., infinity norm = 585. row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) (7, 8.) (8, 9.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) (5, 16.) (6, 17.) (7, 18.) (8, 19.) row 2: (0, 21.) (1, 22.) (2, 23.) (3, 24.) (4, 25.) (5, 26.) (6, 27.) (7, 28.) (8, 29.) @@ -6,7 +6,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) (7, row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) (7, 48.) (8, 49.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) (7, 58.) (8, 59.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) (7, 68.) (8, 69.) -B = A^T: Frobenious norm = 320.617, one norm = 585., infinity norm = 273. +B = A^T: Frobenius norm = 320.617, one norm = 585., infinity norm = 273. row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) row 2: (0, 3.) (1, 13.) (2, 23.) (3, 33.) (4, 43.) (5, 53.) (6, 63.) diff --git a/src/mat/tests/output/ex2_12_B.out b/src/mat/tests/output/ex2_12_B.out index c29316bf1fd..fa892edea07 100644 --- a/src/mat/tests/output/ex2_12_B.out +++ b/src/mat/tests/output/ex2_12_B.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 228.44, one norm = 245., infinity norm = 315. +original A: Frobenius norm = 228.44, one norm = 245., infinity norm = 315. row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) row 2: (0, 21.) (1, 22.) (2, 23.) (3, 24.) (4, 25.) @@ -6,7 +6,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) -B = A^T: Frobenious norm = 228.44, one norm = 315., infinity norm = 245. +B = A^T: Frobenius norm = 228.44, one norm = 315., infinity norm = 245. row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) row 2: (0, 3.) (1, 13.) (2, 23.) (3, 33.) (4, 43.) (5, 53.) (6, 63.) diff --git a/src/mat/tests/output/ex2_2.out b/src/mat/tests/output/ex2_2.out index 6327d653ef3..e0fc0b939b8 100644 --- a/src/mat/tests/output/ex2_2.out +++ b/src/mat/tests/output/ex2_2.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 270.476, one norm = 252, infinity norm = 441 +original A: Frobenius norm = 270.476, one norm = 252, infinity norm = 441 row 0: (1, 1) (2, 2) (3, 3) (4, 4) (5, 5) (6, 6) row 1: (0, 10) (1, 11) (2, 12) (3, 13) (4, 14) (5, 15) (6, 16) row 2: (0, 20) (1, 21) (2, 22) (3, 23) (4, 24) (5, 25) (6, 26) @@ -6,7 +6,7 @@ row 3: (0, 30) (1, 31) (2, 32) (3, 33) (4, 34) (5, 35) (6, 36) row 4: (0, 40) (1, 41) (2, 42) (3, 43) (4, 44) (5, 45) (6, 46) row 5: (0, 50) (1, 51) (2, 52) (3, 53) (4, 54) (5, 55) (6, 56) row 6: (0, 60) (1, 61) (2, 62) (3, 63) (4, 64) (5, 65) (6, 66) -B = A^T: Frobenious norm = 270.476, one norm = 441, infinity norm = 252 +B = A^T: Frobenius norm = 270.476, one norm = 441, infinity norm = 252 row 0: (1, 10) (2, 20) (3, 30) (4, 40) (5, 50) (6, 60) row 1: (0, 1) (1, 11) (2, 21) (3, 31) (4, 41) (5, 51) (6, 61) row 2: (0, 2) (1, 12) (2, 22) (3, 32) (4, 42) (5, 52) (6, 62) diff --git a/src/mat/tests/output/ex2_21.out b/src/mat/tests/output/ex2_21.out index 006d963c240..6fab54a5539 100644 --- a/src/mat/tests/output/ex2_21.out +++ b/src/mat/tests/output/ex2_21.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 276.478, one norm = 259., infinity norm = 448. +original A: Frobenius norm = 276.478, one norm = 259., infinity norm = 448. row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) (5, 16.) (6, 17.) row 2: (0, 21.) (1, 22.) (2, 23.) (3, 24.) (4, 25.) (5, 26.) (6, 27.) @@ -6,7 +6,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) -B = A^T: Frobenious norm = 276.478, one norm = 448., infinity norm = 259. +B = A^T: Frobenius norm = 276.478, one norm = 448., infinity norm = 259. row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) row 2: (0, 3.) (1, 13.) (2, 23.) (3, 33.) (4, 43.) (5, 53.) (6, 63.) diff --git a/src/mat/tests/output/ex2_21_aijcusparse.out b/src/mat/tests/output/ex2_21_aijcusparse.out index 3b86875901e..7465663234d 100644 --- a/src/mat/tests/output/ex2_21_aijcusparse.out +++ b/src/mat/tests/output/ex2_21_aijcusparse.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 270.476, one norm = 252., infinity norm = 441. +original A: Frobenius norm = 270.476, one norm = 252., infinity norm = 441. Mat Object: 1 MPI process type: mpiaijcusparse row 0: (1, 1.) (2, 2.) (3, 3.) (4, 4.) (5, 5.) (6, 6.) @@ -8,7 +8,7 @@ row 3: (0, 30.) (1, 31.) (2, 32.) (3, 33.) (4, 34.) (5, 35.) (6, 36.) row 4: (0, 40.) (1, 41.) (2, 42.) (3, 43.) (4, 44.) (5, 45.) (6, 46.) row 5: (0, 50.) (1, 51.) (2, 52.) (3, 53.) (4, 54.) (5, 55.) (6, 56.) row 6: (0, 60.) (1, 61.) (2, 62.) (3, 63.) (4, 64.) (5, 65.) (6, 66.) -B = A^T: Frobenious norm = 270.476, one norm = 441., infinity norm = 252. +B = A^T: Frobenius norm = 270.476, one norm = 441., infinity norm = 252. Mat Object: 1 MPI process type: mpiaijcusparse row 0: (1, 10.) (2, 20.) (3, 30.) (4, 40.) (5, 50.) (6, 60.) diff --git a/src/mat/tests/output/ex2_22.out b/src/mat/tests/output/ex2_22.out index 006d963c240..6fab54a5539 100644 --- a/src/mat/tests/output/ex2_22.out +++ b/src/mat/tests/output/ex2_22.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 276.478, one norm = 259., infinity norm = 448. +original A: Frobenius norm = 276.478, one norm = 259., infinity norm = 448. row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) (5, 16.) (6, 17.) row 2: (0, 21.) (1, 22.) (2, 23.) (3, 24.) (4, 25.) (5, 26.) (6, 27.) @@ -6,7 +6,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) -B = A^T: Frobenious norm = 276.478, one norm = 448., infinity norm = 259. +B = A^T: Frobenius norm = 276.478, one norm = 448., infinity norm = 259. row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) row 2: (0, 3.) (1, 13.) (2, 23.) (3, 33.) (4, 43.) (5, 53.) (6, 63.) diff --git a/src/mat/tests/output/ex2_23.out b/src/mat/tests/output/ex2_23.out index 0d41dff6543..d64f1c46fcc 100644 --- a/src/mat/tests/output/ex2_23.out +++ b/src/mat/tests/output/ex2_23.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 276.478, one norm = 259., infinity norm = 448. +original A: Frobenius norm = 276.478, one norm = 259., infinity norm = 448. row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) (5, 16.) (6, 17.) row 2: (0, 21.) (1, 22.) (2, 23.) (3, 24.) (4, 25.) (5, 26.) (6, 27.) @@ -6,7 +6,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) -B = A^T: Frobenious norm = 276.478, one norm = 448., infinity norm = 259. +B = A^T: Frobenius norm = 276.478, one norm = 448., infinity norm = 259. row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) row 2: (0, 3.) (1, 13.) (2, 23.) (3, 33.) (4, 43.) (5, 53.) (6, 63.) diff --git a/src/mat/tests/output/ex2_24.out b/src/mat/tests/output/ex2_24.out index 0d41dff6543..d64f1c46fcc 100644 --- a/src/mat/tests/output/ex2_24.out +++ b/src/mat/tests/output/ex2_24.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 276.478, one norm = 259., infinity norm = 448. +original A: Frobenius norm = 276.478, one norm = 259., infinity norm = 448. row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) (5, 16.) (6, 17.) row 2: (0, 21.) (1, 22.) (2, 23.) (3, 24.) (4, 25.) (5, 26.) (6, 27.) @@ -6,7 +6,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) -B = A^T: Frobenious norm = 276.478, one norm = 448., infinity norm = 259. +B = A^T: Frobenius norm = 276.478, one norm = 448., infinity norm = 259. row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) row 2: (0, 3.) (1, 13.) (2, 23.) (3, 33.) (4, 43.) (5, 53.) (6, 63.) diff --git a/src/mat/tests/output/ex2_3.out b/src/mat/tests/output/ex2_3.out index 4807081840a..f7764777fc3 100644 --- a/src/mat/tests/output/ex2_3.out +++ b/src/mat/tests/output/ex2_3.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 320.617, one norm = 273., infinity norm = 585. +original A: Frobenius norm = 320.617, one norm = 273., infinity norm = 585. Mat Object: 2 MPI processes type: mpiaij row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) (7, 8.) (8, 9.) @@ -8,7 +8,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) (7, row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) (7, 48.) (8, 49.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) (7, 58.) (8, 59.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) (7, 68.) (8, 69.) -B = A^T: Frobenious norm = 320.617, one norm = 585., infinity norm = 273. +B = A^T: Frobenius norm = 320.617, one norm = 585., infinity norm = 273. Mat Object: 2 MPI processes type: mpiaij row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) diff --git a/src/mat/tests/output/ex2_3_aijcusparse.out b/src/mat/tests/output/ex2_3_aijcusparse.out index f10559597bc..75725743cbb 100644 --- a/src/mat/tests/output/ex2_3_aijcusparse.out +++ b/src/mat/tests/output/ex2_3_aijcusparse.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 320.617, one norm = 273., infinity norm = 585. +original A: Frobenius norm = 320.617, one norm = 273., infinity norm = 585. Mat Object: 2 MPI processes type: mpiaijcusparse row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) (7, 8.) (8, 9.) @@ -8,7 +8,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) (7, row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) (7, 48.) (8, 49.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) (7, 58.) (8, 59.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) (7, 68.) (8, 69.) -B = A^T: Frobenious norm = 320.617, one norm = 585., infinity norm = 273. +B = A^T: Frobenius norm = 320.617, one norm = 585., infinity norm = 273. Mat Object: 2 MPI processes type: mpiaijcusparse row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) diff --git a/src/mat/tests/output/ex2_4.out b/src/mat/tests/output/ex2_4.out index 43e3ce9e9ba..aeeb72eb30f 100644 --- a/src/mat/tests/output/ex2_4.out +++ b/src/mat/tests/output/ex2_4.out @@ -1,4 +1,4 @@ -original A: Frobenious norm = 320.617, one norm = 273., infinity norm = 585. +original A: Frobenius norm = 320.617, one norm = 273., infinity norm = 585. row 0: (0, 1.) (1, 2.) (2, 3.) (3, 4.) (4, 5.) (5, 6.) (6, 7.) (7, 8.) (8, 9.) row 1: (0, 11.) (1, 12.) (2, 13.) (3, 14.) (4, 15.) (5, 16.) (6, 17.) (7, 18.) (8, 19.) row 2: (0, 21.) (1, 22.) (2, 23.) (3, 24.) (4, 25.) (5, 26.) (6, 27.) (7, 28.) (8, 29.) @@ -6,7 +6,7 @@ row 3: (0, 31.) (1, 32.) (2, 33.) (3, 34.) (4, 35.) (5, 36.) (6, 37.) (7, row 4: (0, 41.) (1, 42.) (2, 43.) (3, 44.) (4, 45.) (5, 46.) (6, 47.) (7, 48.) (8, 49.) row 5: (0, 51.) (1, 52.) (2, 53.) (3, 54.) (4, 55.) (5, 56.) (6, 57.) (7, 58.) (8, 59.) row 6: (0, 61.) (1, 62.) (2, 63.) (3, 64.) (4, 65.) (5, 66.) (6, 67.) (7, 68.) (8, 69.) -B = A^T: Frobenious norm = 320.617, one norm = 585., infinity norm = 273. +B = A^T: Frobenius norm = 320.617, one norm = 585., infinity norm = 273. row 0: (0, 1.) (1, 11.) (2, 21.) (3, 31.) (4, 41.) (5, 51.) (6, 61.) row 1: (0, 2.) (1, 12.) (2, 22.) (3, 32.) (4, 42.) (5, 52.) (6, 62.) row 2: (0, 3.) (1, 13.) (2, 23.) (3, 33.) (4, 43.) (5, 53.) (6, 63.) diff --git a/src/mat/tests/output/ex49_1.out b/src/mat/tests/output/ex49_1.out index 3d3df44b90a..c1e14764907 100644 --- a/src/mat/tests/output/ex49_1.out +++ b/src/mat/tests/output/ex49_1.out @@ -1,5 +1,5 @@ original matrix nonzeros = 16, allocated nonzeros = 16 -original: Frobenious norm = 79.8499, one norm = 72., infinity norm = 126. +original: Frobenius norm = 79.8499, one norm = 72., infinity norm = 126. Mat Object: 1 MPI process type: seqaij row 0: (0, 0.) (1, 1.) (2, 2.) (3, 3.) @@ -7,7 +7,7 @@ row 1: (0, 10.) (1, 11.) (2, 12.) (3, 13.) row 2: (0, 20.) (1, 21.) (2, 22.) (3, 23.) row 3: (0, 30.) (1, 31.) (2, 32.) (3, 33.) transpose matrix nonzeros = 16, allocated nonzeros = 16 -transpose: Frobenious norm = 79.8499, one norm = 126., infinity norm = 72. +transpose: Frobenius norm = 79.8499, one norm = 126., infinity norm = 72. Mat Object: 1 MPI process type: seqaij row 0: (0, 0.) (1, 10.) (2, 20.) (3, 30.) diff --git a/src/mat/tests/output/ex49_r1.out b/src/mat/tests/output/ex49_r1.out index 2a54b2a0489..1f801bb561d 100644 --- a/src/mat/tests/output/ex49_r1.out +++ b/src/mat/tests/output/ex49_r1.out @@ -1,5 +1,5 @@ original matrix nonzeros = 24, allocated nonzeros = 24 -original: Frobenious norm = 102.078, one norm = 80., infinity norm = 195. +original: Frobenius norm = 102.078, one norm = 80., infinity norm = 195. Mat Object: 1 MPI process type: seqaij row 0: (0, 0.) (1, 1.) (2, 2.) (3, 3.) (4, 4.) (5, 5.) @@ -7,7 +7,7 @@ row 1: (0, 10.) (1, 11.) (2, 12.) (3, 13.) (4, 14.) (5, 15.) row 2: (0, 20.) (1, 21.) (2, 22.) (3, 23.) (4, 24.) (5, 25.) row 3: (0, 30.) (1, 31.) (2, 32.) (3, 33.) (4, 34.) (5, 35.) transpose matrix nonzeros = 24, allocated nonzeros = 24 -transpose: Frobenious norm = 102.078, one norm = 195., infinity norm = 80. +transpose: Frobenius norm = 102.078, one norm = 195., infinity norm = 80. Mat Object: 1 MPI process type: seqaij row 0: (0, 0.) (1, 10.) (2, 20.) (3, 30.) diff --git a/src/mat/tests/output/ex49_r1_par.out b/src/mat/tests/output/ex49_r1_par.out index 6664d8fd1ad..8fd9d7f821c 100644 --- a/src/mat/tests/output/ex49_r1_par.out +++ b/src/mat/tests/output/ex49_r1_par.out @@ -1,5 +1,5 @@ original matrix nonzeros = 24, allocated nonzeros = 24 -original: Frobenious norm = 102.078, one norm = 80., infinity norm = 195. +original: Frobenius norm = 102.078, one norm = 80., infinity norm = 195. Mat Object: 2 MPI processes type: mpiaij row 0: (0, 0.) (1, 1.) (2, 2.) (3, 3.) (4, 4.) (5, 5.) @@ -7,7 +7,7 @@ row 1: (0, 10.) (1, 11.) (2, 12.) (3, 13.) (4, 14.) (5, 15.) row 2: (0, 20.) (1, 21.) (2, 22.) (3, 23.) (4, 24.) (5, 25.) row 3: (0, 30.) (1, 31.) (2, 32.) (3, 33.) (4, 34.) (5, 35.) transpose matrix nonzeros = 24, allocated nonzeros = 24 -transpose: Frobenious norm = 102.078, one norm = 195., infinity norm = 80. +transpose: Frobenius norm = 102.078, one norm = 195., infinity norm = 80. Mat Object: 2 MPI processes type: mpiaij row 0: (0, 0.) (1, 10.) (2, 20.) (3, 30.) diff --git a/src/mat/tutorials/ex15f.F90 b/src/mat/tutorials/ex15f.F90 index ad34e1da719..a446499253e 100644 --- a/src/mat/tutorials/ex15f.F90 +++ b/src/mat/tutorials/ex15f.F90 @@ -25,8 +25,8 @@ program main PetscCallA(MatCreate(PETSC_COMM_WORLD, A,ierr)) PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, N, N,ierr)) PetscCallA(MatSetFromOptions(A,ierr)) - PetscCallA(MatSeqAIJSetPreallocation(A, three, PETSC_NULL_INTEGER,ierr)) - PetscCallA(MatMPIAIJSetPreallocation(A, three, PETSC_NULL_INTEGER, two, PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatSeqAIJSetPreallocation(A, three, PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA(MatMPIAIJSetPreallocation(A, three, PETSC_NULL_INTEGER_ARRAY, two, PETSC_NULL_INTEGER_ARRAY,ierr)) !/* Create a linear mesh */ PetscCallA(MatGetOwnershipRange(A, myStart, myEnd,ierr)) @@ -36,21 +36,21 @@ program main allocate(vals(2)) vals = 1.0 allocate(cols(2),source=[r,r+1]) - PetscCallA(MatSetValues(A, one, r, two, cols, vals, INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A, one, [r], two, cols, vals, INSERT_VALUES,ierr)) deallocate(cols) deallocate(vals) else if (r == N-1) then allocate(vals(2)) vals = 1.0 allocate(cols(2),source=[r-1,r]) - PetscCallA(MatSetValues(A, one, r, two, cols, vals, INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A, one, [r], two, cols, vals, INSERT_VALUES,ierr)) deallocate(cols) deallocate(vals) else allocate(vals(3)) vals = 1.0 allocate(cols(3),source=[r-1,r,r+1]) - PetscCallA(MatSetValues(A, one, r, three, cols, vals, INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A, one, [r], three, cols, vals, INSERT_VALUES,ierr)) deallocate(cols) deallocate(vals) end if diff --git a/src/mat/tutorials/ex17f.F90 b/src/mat/tutorials/ex17f.F90 index 179cfb56d9b..8a5c261147f 100644 --- a/src/mat/tutorials/ex17f.F90 +++ b/src/mat/tutorials/ex17f.F90 @@ -47,12 +47,12 @@ program main PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr)) PetscCallA(MatSetsizes(A,m,m,PETSC_DECIDE,PETSC_DECIDE,ierr)) PetscCallA(MatSetFromOptions(A,ierr)) - PetscCallA(MatSeqAIJSetPreallocation(A,three,PETSC_NULL_INTEGER,ierr)) - PetscCallA(MatMPIAIJSetPreallocation(A,three,PETSC_NULL_INTEGER,two,PETSC_NULL_INTEGER,ierr)) - PetscCallA(MatSeqBAIJSetPreallocation(A,one,three,PETSC_NULL_INTEGER,ierr)) - PetscCallA(MatMPIBAIJSetPreallocation(A,one,three,PETSC_NULL_INTEGER,2,PETSC_NULL_INTEGER,ierr)) - PetscCallA(MatSeqSBAIJSetPreallocation(A,one,two,PETSC_NULL_INTEGER,ierr)) - PetscCallA(MatMPISBAIJSetPreallocation(A,one,two,PETSC_NULL_INTEGER,1,PETSC_NULL_INTEGER,ierr)) + PetscCallA(MatSeqAIJSetPreallocation(A,three,PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA(MatMPIAIJSetPreallocation(A,three,PETSC_NULL_INTEGER_ARRAY,two,PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA(MatSeqBAIJSetPreallocation(A,one,three,PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA(MatMPIBAIJSetPreallocation(A,one,three,PETSC_NULL_INTEGER_ARRAY,two,PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA(MatSeqSBAIJSetPreallocation(A,one,two,PETSC_NULL_INTEGER_ARRAY,ierr)) + PetscCallA(MatMPISBAIJSetPreallocation(A,one,two,PETSC_NULL_INTEGER_ARRAY,one,PETSC_NULL_INTEGER_ARRAY,ierr)) PetscCallA(MatGetSize(A,PETSC_NULL_INTEGER,N,ierr)) PetscCallA(MatGetOwnershipRange(A,rstart,rend,ierr)) @@ -63,7 +63,7 @@ program main cols = (/mod((i+N-1),N),i,mod((i+1),N)/) vals = [1.0,1.0,1.0] - PetscCallA(MatSetValues(A,one,i,three,cols,vals,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(A,one,[i],three,cols,vals,INSERT_VALUES,ierr)) end do deallocate(cols) deallocate(vals) diff --git a/src/mat/utils/axpy.c b/src/mat/utils/axpy.c index ed41a6031a0..acecbd70ee6 100644 --- a/src/mat/utils/axpy.c +++ b/src/mat/utils/axpy.c @@ -375,7 +375,7 @@ PetscErrorCode MatAYPX(Mat Y, PetscScalar a, Mat X, MatStructure str) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatComputeOperator - Computes the explicit matrix Collective @@ -390,7 +390,7 @@ PetscErrorCode MatAYPX(Mat Y, PetscScalar a, Mat X, MatStructure str) Level: advanced Note: - This computation is done by applying the operators to columns of the identity matrix. + This computation is done by applying the operator to columns of the identity matrix. This routine is costly in general, and is recommended for use only with relatively small systems. Currently, this routine uses a dense matrix format if `mattype` == `NULL`. @@ -405,7 +405,7 @@ PetscErrorCode MatComputeOperator(Mat inmat, MatType mattype, Mat *mat) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatComputeOperatorTranspose - Computes the explicit matrix representation of a give matrix that can apply `MatMultTranspose()` diff --git a/src/mat/utils/gcreate.c b/src/mat/utils/gcreate.c index cd40f565598..c4346136940 100644 --- a/src/mat/utils/gcreate.c +++ b/src/mat/utils/gcreate.c @@ -3,7 +3,7 @@ #include <../src/mat/impls/aij/seq/aij.h> #include <../src/mat/impls/aij/mpi/mpiaij.h> -PETSC_INTERN PetscErrorCode MatSetBlockSizes_Default(Mat mat, PetscInt rbs, PetscInt cbs) +PetscErrorCode MatSetBlockSizes_Default(Mat mat, PetscInt rbs, PetscInt cbs) { PetscFunctionBegin; if (!mat->preallocated) PetscFunctionReturn(PETSC_SUCCESS); @@ -12,7 +12,7 @@ PETSC_INTERN PetscErrorCode MatSetBlockSizes_Default(Mat mat, PetscInt rbs, Pets PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_INTERN PetscErrorCode MatShift_Basic(Mat Y, PetscScalar a) +PetscErrorCode MatShift_Basic(Mat Y, PetscScalar a) { PetscInt i, start, end, oldValA = 0, oldValB = 0; PetscScalar alpha = a; @@ -98,8 +98,6 @@ PetscErrorCode MatCreate(MPI_Comm comm, Mat *A) PetscFunctionBegin; PetscAssertPointer(A, 2); - - *A = NULL; PetscCall(MatInitializePackage()); PetscCall(PetscHeaderCreate(B, MAT_CLASSID, "Mat", "Matrix", "Mat", comm, MatDestroy, MatView)); @@ -124,7 +122,7 @@ PetscErrorCode MatCreate(MPI_Comm comm, Mat *A) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateFromOptions - Creates a matrix whose type is set from the options database Collective @@ -216,11 +214,15 @@ PetscErrorCode MatSetErrorIfFailure(Mat mat, PetscBool flg) Likewise, the `n` used must match that used as the local size in `VecCreateMPI()` for 'x'. + If `m` and `n` are not `PETSC_DECIDE`, then the values determine the `PetscLayout` of the matrix and the ranges returned by + `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`. + You cannot change the sizes once they have been set. The sizes must be set before `MatSetUp()` or MatXXXSetPreallocation() is called. -.seealso: [](ch_matrices), `Mat`, `MatGetSize()`, `PetscSplitOwnership()` +.seealso: [](ch_matrices), `Mat`, `MatGetSize()`, `PetscSplitOwnership()`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, + `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`, `VecSetSizes()` @*/ PetscErrorCode MatSetSizes(Mat A, PetscInt m, PetscInt n, PetscInt M, PetscInt N) { @@ -335,7 +337,7 @@ PetscErrorCode MatSetFromOptions(Mat B) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatXAIJSetPreallocation - set preallocation for serial and parallel `MATAIJ`, `MATBAIJ`, and `MATSBAIJ` matrices and their unassembled versions. Collective @@ -422,6 +424,11 @@ PetscErrorCode MatXAIJSetPreallocation(Mat A, PetscInt bs, const PetscInt dnnz[] Level: developer + Notes: + `A` and `C` must be of the same type. + The object list and query function list in `A` are retained, as well as the object name, and prefix. + The object state of `A` is increased by 1. + Developer Note: This is somewhat different from `MatHeaderReplace()`, it would be nice to merge the code @@ -429,19 +436,21 @@ PetscErrorCode MatXAIJSetPreallocation(Mat A, PetscInt bs, const PetscInt dnnz[] @*/ PetscErrorCode MatHeaderMerge(Mat A, Mat *C) { - PetscInt refct; - PetscOps Abops; - struct _MatOps Aops; - char *mtype, *mname, *mprefix; - Mat_Product *product; - Mat_Redundant *redundant; - PetscObjectState state; + PetscInt refct; + PetscOps Abops; + struct _MatOps Aops; + char *mtype, *mname, *mprefix; + Mat_Product *product; + Mat_Redundant *redundant; + PetscObjectState state; + PetscObjectList olist; + PetscFunctionList qlist; PetscFunctionBegin; PetscValidHeaderSpecific(A, MAT_CLASSID, 1); PetscValidHeaderSpecific(*C, MAT_CLASSID, 2); if (A == *C) PetscFunctionReturn(PETSC_SUCCESS); - PetscCheckSameComm(A, 1, *C, 2); + PetscCheckSameTypeAndComm(A, 1, *C, 2); /* save the parts of A we need */ Abops = ((PetscObject)A)->bops[0]; Aops = A->ops[0]; @@ -452,10 +461,14 @@ PetscErrorCode MatHeaderMerge(Mat A, Mat *C) mprefix = ((PetscObject)A)->prefix; product = A->product; redundant = A->redundant; + qlist = ((PetscObject)A)->qlist; + olist = ((PetscObject)A)->olist; /* zero these so the destroy below does not free them */ ((PetscObject)A)->type_name = NULL; ((PetscObject)A)->name = NULL; + ((PetscObject)A)->qlist = NULL; + ((PetscObject)A)->olist = NULL; /* free all the interior data structures from mat @@ -468,8 +481,6 @@ PetscErrorCode MatHeaderMerge(Mat A, Mat *C) PetscCall(PetscFree(A->defaultrandtype)); PetscCall(PetscLayoutDestroy(&A->rmap)); PetscCall(PetscLayoutDestroy(&A->cmap)); - PetscCall(PetscFunctionListDestroy(&((PetscObject)A)->qlist)); - PetscCall(PetscObjectListDestroy(&((PetscObject)A)->olist)); PetscCall(PetscComposedQuantitiesDestroy((PetscObject)A)); /* copy C over to A */ @@ -487,10 +498,15 @@ PetscErrorCode MatHeaderMerge(Mat A, Mat *C) A->product = product; A->redundant = redundant; + /* Append the saved lists */ + PetscCall(PetscFunctionListDuplicate(qlist, &((PetscObject)A)->qlist)); + PetscCall(PetscObjectListDuplicate(olist, &((PetscObject)A)->olist)); + PetscCall(PetscFunctionListDestroy(&qlist)); + PetscCall(PetscObjectListDestroy(&olist)); + /* since these two are copied into A we do not want them destroyed in C */ ((PetscObject)*C)->qlist = NULL; ((PetscObject)*C)->olist = NULL; - PetscCall(PetscHeaderDestroy(C)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -670,7 +686,7 @@ PetscErrorCode MatSetPreallocationCOO_Basic(Mat A, PetscCount ncoo, PetscInt coo Level: beginner Notes: - The indices `coo_i` and `coo_j` may be modified within this function. The caller should not rely on them + The indices within `coo_i` and `coo_j` may be modified within this function. The caller should not rely on them having any specific value after this function returns. The arrays can be freed or reused immediately after this function returns. diff --git a/src/mat/utils/veccreatematdense.c b/src/mat/utils/veccreatematdense.c index 3fbd450ee52..7d3828308e3 100644 --- a/src/mat/utils/veccreatematdense.c +++ b/src/mat/utils/veccreatematdense.c @@ -1,6 +1,6 @@ -#include +#include /*I "petscmat.h" I*/ -/*@C +/*@ MatCreateDenseFromVecType - Create a matrix that matches the type of a Vec. Collective diff --git a/src/snes/f90-mod/petscsnes.h b/src/snes/f90-mod/petscsnes.h index 93e6c1e4689..40c0c39a21b 100644 --- a/src/snes/f90-mod/petscsnes.h +++ b/src/snes/f90-mod/petscsnes.h @@ -3,16 +3,19 @@ ! #include "petsc/finclude/petscsnes.h" - type tSNES - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tSNES end type tSNES + SNES, parameter :: PETSC_NULL_SNES = tSNES(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SNES +#endif - type tPetscConvEst - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tPetscConvEst end type tPetscConvEst - - SNES, parameter :: PETSC_NULL_SNES = tSNES(0) PetscConvEst, parameter :: PETSC_NULL_CONVEST = tPetscConvEst(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_CONVEST +#endif ! ! Convergence flags ! @@ -34,15 +37,6 @@ PetscEnum, parameter :: SNES_DIVERGED_TR_DELTA = -11 PetscEnum, parameter :: SNES_CONVERGED_ITERATING = 0 ! -! SNESLineSearchReason -! - PetscEnum, parameter :: SNES_LINESEARCH_SUCCEEDED = 0 - PetscEnum, parameter :: SNES_LINESEARCH_FAILED_NANORINF = 1 - PetscEnum, parameter :: SNES_LINESEARCH_FAILED_DOMAIN = 2 - PetscEnum, parameter :: SNES_LINESEARCH_FAILED_REDUCT = 3 - PetscEnum, parameter :: SNES_LINESEARCH_FAILED_USER = 4 - PetscEnum, parameter :: SNES_LINESEARCH_FAILED_FUNCTION = 5 -! ! SNESNormSchedule ! PetscEnum, parameter :: SNES_NORM_DEFAULT = -1 @@ -52,6 +46,12 @@ PetscEnum, parameter :: SNES_NORM_FINAL_ONLY = 3 PetscEnum, parameter :: SNES_NORM_INITIAL_FINAL_ONLY = 4 ! +! SNESFunctionType +! + PetscEnum, parameter :: SNES_FUNCTION_DEFAULT = -1 + PetscEnum, parameter :: SNES_FUNCTION_UNPRECONDITIONED = 0 + PetscEnum, parameter :: SNES_FUNCTION_PRECONDITIONED = 1 +! ! Some PETSc Fortran functions that the user might pass as arguments ! external SNESCOMPUTEJACOBIANDEFAULT diff --git a/src/snes/f90-mod/petscsnes.h90 b/src/snes/f90-mod/petscsnes.h90 index adcb8da413b..92d014a5bcc 100644 --- a/src/snes/f90-mod/petscsnes.h90 +++ b/src/snes/f90-mod/petscsnes.h90 @@ -41,47 +41,4 @@ end subroutine end Interface #endif - Interface - subroutine SNESSetType(a,b,ierr) - import tSNES - SNES, intent(in) :: a - character(*), intent(in) :: b - PetscErrorCode, intent(out) :: ierr - end subroutine - end Interface - - Interface - subroutine SNESGetType(a,b,ierr) - import tSNES - SNES, intent(in) :: a - character(*), intent(out) :: b - PetscErrorCode, intent(out) :: ierr - end subroutine - end Interface - - Interface - subroutine SNESView(a,b,z) - import tSNES,tPetscViewer - SNES a - PetscViewer b - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine SNESSetOptionsPrefix(snes,prefix,ierr) - import tSNES - SNES, intent(in) :: snes - character(*), intent(in) :: prefix - PetscErrorCode, intent(out) :: ierr - end subroutine SNESSetOptionsPrefix - end Interface - - interface - subroutine SNESDestroy(a,z) - import tSNES - SNES a - PetscErrorCode z - end subroutine - end interface diff --git a/src/snes/f90-mod/petscsneslinesearch.h b/src/snes/f90-mod/petscsneslinesearch.h new file mode 100644 index 00000000000..806b300e5cf --- /dev/null +++ b/src/snes/f90-mod/petscsneslinesearch.h @@ -0,0 +1,19 @@ +! +! Used by petscsnesmod.F90 to create Fortran module file +! + + type, extends(tPetscObject) :: tSNESLineSearch + end type tSNESLineSearch + SNESLineSearch, parameter :: PETSC_NULL_SNES_LINESEARCH = tSNESLineSearch(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SNES_LINESEARCH +#endif +! +! SNESLineSearchReason +! + PetscEnum, parameter :: SNES_LINESEARCH_SUCCEEDED = 0 + PetscEnum, parameter :: SNES_LINESEARCH_FAILED_NANORINF = 1 + PetscEnum, parameter :: SNES_LINESEARCH_FAILED_DOMAIN = 2 + PetscEnum, parameter :: SNES_LINESEARCH_FAILED_REDUCT = 3 + PetscEnum, parameter :: SNES_LINESEARCH_FAILED_USER = 4 + PetscEnum, parameter :: SNES_LINESEARCH_FAILED_FUNCTION = 5 diff --git a/src/snes/f90-mod/petscsnesmod.F90 b/src/snes/f90-mod/petscsnesmod.F90 index 3d86c6ce67a..fd8b27de7f0 100644 --- a/src/snes/f90-mod/petscsnesmod.F90 +++ b/src/snes/f90-mod/petscsnesmod.F90 @@ -1,6 +1,7 @@ module petscsnesdefdummy use petsckspdef #include <../src/snes/f90-mod/petscsnes.h> +#include <../src/snes/f90-mod/petscsneslinesearch.h> #include <../src/snes/f90-mod/petscsnesfas.h> end module petscsnesdefdummy diff --git a/src/snes/impls/composite/snescomposite.c b/src/snes/impls/composite/snescomposite.c index 41af30de2e8..b828f2a6717 100644 --- a/src/snes/impls/composite/snescomposite.c +++ b/src/snes/impls/composite/snescomposite.c @@ -545,7 +545,7 @@ static PetscErrorCode SNESCompositeGetSNES_Composite(SNES snes, PetscInt n, SNES PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESCompositeSetType - Sets the type of composite preconditioner. Logically Collective @@ -571,7 +571,7 @@ PetscErrorCode SNESCompositeSetType(SNES snes, SNESCompositeType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESCompositeAddSNES - Adds another `SNES` to the `SNESCOMPOSITE` Collective diff --git a/src/snes/impls/fas/fasfunc.c b/src/snes/impls/fas/fasfunc.c index 6d2297fc1be..3be57fd64b9 100644 --- a/src/snes/impls/fas/fasfunc.c +++ b/src/snes/impls/fas/fasfunc.c @@ -318,7 +318,7 @@ PetscErrorCode SNESFASSetCycles(SNES snes, PetscInt cycles) PetscFunctionReturn(PETSC_SUCCESS); } -/*@ +/*@C SNESFASSetMonitor - Sets the method-specific cycle monitoring Logically Collective diff --git a/src/snes/impls/gs/gssecant.c b/src/snes/impls/gs/gssecant.c index 326d6927d6b..df4f4c4fb18 100644 --- a/src/snes/impls/gs/gssecant.c +++ b/src/snes/impls/gs/gssecant.c @@ -2,22 +2,22 @@ PETSC_EXTERN PetscErrorCode SNESComputeNGSDefaultSecant(SNES snes, Vec X, Vec B, void *ctx) { - SNES_NGS *gs = (SNES_NGS *)snes->data; - PetscInt i, j, k, ncolors; - DM dm; - PetscBool flg; - ISColoring coloring = gs->coloring; - MatColoring mc; - Vec W, G, F; - PetscScalar h = gs->h; - IS *coloris; - PetscScalar f, g, x, w, d; - PetscReal dxt, xt, ft, ft1 = 0; - const PetscInt *idx; - PetscInt size, s; - PetscReal atol, rtol, stol; - PetscInt its; - PetscErrorCode (*func)(SNES, Vec, Vec, void *); + SNES_NGS *gs = (SNES_NGS *)snes->data; + PetscInt i, j, k, ncolors; + DM dm; + PetscBool flg; + ISColoring coloring = gs->coloring; + MatColoring mc; + Vec W, G, F; + PetscScalar h = gs->h; + IS *coloris; + PetscScalar f, g, x, w, d; + PetscReal dxt, xt, ft, ft1 = 0; + const PetscInt *idx; + PetscInt size, s; + PetscReal atol, rtol, stol; + PetscInt its; + SNESFunctionFn *func; void *fctx; PetscBool mat = gs->secant_mat, equal, isdone, alldone; PetscScalar *xa, *wa; diff --git a/src/snes/impls/ls/ls.c b/src/snes/impls/ls/ls.c index 2fb916c317f..5a74a65f653 100644 --- a/src/snes/impls/ls/ls.c +++ b/src/snes/impls/ls/ls.c @@ -45,10 +45,10 @@ */ static PetscErrorCode SNESNEWTONLSCheckLocalMin_Private(SNES snes, Mat A, Vec F, PetscReal fnorm, PetscBool *ismin) { - PetscReal a1; - PetscBool hastranspose; - Vec W; - PetscErrorCode (*objective)(SNES, Vec, PetscReal *, void *); + PetscReal a1; + PetscBool hastranspose; + Vec W; + SNESObjectiveFn *objective; PetscFunctionBegin; *ismin = PETSC_FALSE; @@ -87,9 +87,9 @@ static PetscErrorCode SNESNEWTONLSCheckLocalMin_Private(SNES snes, Mat A, Vec F, */ static PetscErrorCode SNESNEWTONLSCheckResidual_Private(SNES snes, Mat A, Vec F, Vec X) { - PetscReal a1, a2; - PetscBool hastranspose; - PetscErrorCode (*objective)(SNES, Vec, PetscReal *, void *); + PetscReal a1, a2; + PetscBool hastranspose; + SNESObjectiveFn *objective; PetscFunctionBegin; PetscCall(MatHasOperation(A, MATOP_MULT_TRANSPOSE, &hastranspose)); diff --git a/src/snes/impls/ms/ms.c b/src/snes/impls/ms/ms.c index 42f875d1230..82cf12f2df1 100644 --- a/src/snes/impls/ms/ms.c +++ b/src/snes/impls/ms/ms.c @@ -157,7 +157,7 @@ PetscErrorCode SNESMSFinalizePackage(void) /*@C SNESMSRegister - register a multistage scheme for `SNESMS` - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + name - identifier for method @@ -470,7 +470,7 @@ static PetscErrorCode SNESMSSetType_MS(SNES snes, SNESMSType mstype) SETERRQ(PetscObjectComm((PetscObject)snes), PETSC_ERR_ARG_UNKNOWN_TYPE, "Could not find '%s'", mstype); } -/*@C +/*@ SNESMSGetType - Get the type of multistage smoother `SNESMS` Not Collective @@ -494,7 +494,7 @@ PetscErrorCode SNESMSGetType(SNES snes, SNESMSType *mstype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESMSSetType - Set the type of multistage smoother `SNESMS` Logically Collective @@ -534,7 +534,7 @@ static PetscErrorCode SNESMSSetDamping_MS(SNES snes, PetscReal damping) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESMSGetDamping - Get the damping parameter of `SNESMS` multistage scheme Not Collective @@ -558,7 +558,7 @@ PetscErrorCode SNESMSGetDamping(SNES snes, PetscReal *damping) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESMSSetDamping - Set the damping parameter for a `SNESMS` multistage scheme Logically Collective diff --git a/src/snes/impls/ngmres/ngmresfunc.c b/src/snes/impls/ngmres/ngmresfunc.c index ea6d1465e66..82506336aca 100644 --- a/src/snes/impls/ngmres/ngmresfunc.c +++ b/src/snes/impls/ngmres/ngmresfunc.c @@ -204,8 +204,8 @@ PetscErrorCode SNESNGMRESSelect_Private(SNES snes, PetscInt k_restart, Vec XM, V } } if (ngmres->monitor) { - PetscReal objT = *fnorm; - PetscErrorCode (*objective)(SNES, Vec, PetscReal *, void *); + PetscReal objT = *fnorm; + SNESObjectiveFn *objective; PetscCall(SNESGetObjective(snes, &objective, NULL)); if (objective) PetscCall(SNESComputeObjective(snes, X, &objT)); diff --git a/src/snes/impls/ngmres/snesngmres.c b/src/snes/impls/ngmres/snesngmres.c index 18d396563b7..c66c00131ff 100644 --- a/src/snes/impls/ngmres/snesngmres.c +++ b/src/snes/impls/ngmres/snesngmres.c @@ -131,24 +131,18 @@ PetscErrorCode SNESView_NGMRES(SNES snes, PetscViewer viewer) static PetscErrorCode SNESSolve_NGMRES(SNES snes) { - SNES_NGMRES *ngmres = (SNES_NGMRES *)snes->data; - /* present solution, residual, and preconditioned residual */ - Vec X, F, B, D, Y; - - /* candidate linear combination answers */ - Vec XA, FA, XM, FM; - - /* coefficients and RHS to the minimization problem */ - PetscReal fnorm, fMnorm, fAnorm; - PetscReal xnorm, xMnorm, xAnorm; - PetscReal ynorm, yMnorm, yAnorm; - PetscInt k, k_restart, l, ivec, restart_count = 0; - - /* support for objective functions minimization */ - PetscReal objmin, objM, objA, obj; - - /* solution selection data */ - PetscBool selectRestart; + SNES_NGMRES *ngmres = (SNES_NGMRES *)snes->data; + Vec X, F, B, D, Y; /* present solution, residual, and preconditioned residual */ + Vec XA, FA, XM, FM; /* candidate linear combination answers */ + PetscReal fnorm, fMnorm, fAnorm; /* coefficients and RHS to the minimization problem */ + PetscReal xnorm, xMnorm, xAnorm; + PetscReal ynorm, yMnorm, yAnorm; + PetscInt k, k_restart, l, ivec, restart_count = 0; + PetscReal objmin, objM, objA, obj; /* support for objective functions minimization */ + PetscBool selectRestart; /* solution selection data */ + SNESConvergedReason reason; + SNESLineSearchReason lssucceed; + SNESObjectiveFn *objective; /* These two variables are initialized to prevent compilers/analyzers from producing false warnings about these variables being passed to SNESNGMRESSelect_Private() without being set when SNES_NGMRES_RESTART_DIFFERENCE, the values are not used in the subroutines in that case @@ -156,11 +150,6 @@ static PetscErrorCode SNESSolve_NGMRES(SNES snes) */ PetscReal dnorm = 0.0, dminnorm = 0.0; - SNESConvergedReason reason; - SNESLineSearchReason lssucceed; - - PetscErrorCode (*objective)(SNES, Vec, PetscReal *, void *); - PetscFunctionBegin; PetscCheck(!snes->xl && !snes->xu && !snes->ops->computevariablebounds, PetscObjectComm((PetscObject)snes), PETSC_ERR_ARG_WRONGSTATE, "SNES solver %s does not support bounds", ((PetscObject)snes)->type_name); diff --git a/src/snes/impls/python/ftn-custom/makefile b/src/snes/impls/python/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/snes/impls/python/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/snes/impls/python/ftn-custom/zpythonsf.c b/src/snes/impls/python/ftn-custom/zpythonsf.c deleted file mode 100644 index 94e11d7035b..00000000000 --- a/src/snes/impls/python/ftn-custom/zpythonsf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define snespythonsettype_ SNESPYTHONSETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define snespythonsettype_ snespythonsettype -#endif - -PETSC_EXTERN void snespythonsettype_(SNES *snes, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(name, len, t); - *ierr = SNESPythonSetType(*snes, t); - if (*ierr) return; - FREECHAR(name, t); -} diff --git a/src/snes/impls/python/pythonsnes.c b/src/snes/impls/python/pythonsnes.c index 0bcf9224306..a3be059999f 100644 --- a/src/snes/impls/python/pythonsnes.c +++ b/src/snes/impls/python/pythonsnes.c @@ -1,6 +1,6 @@ #include /*I "petscsnes.h" I*/ -/*@C +/*@ SNESPythonSetType - Initialize a `SNES` object implemented in Python. Collective @@ -25,7 +25,7 @@ PetscErrorCode SNESPythonSetType(SNES snes, const char pyname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESPythonGetType - Get the type of a `SNES` object implemented in Python set with `SNESPythonSetType()` Not Collective diff --git a/src/snes/impls/tr/tr.c b/src/snes/impls/tr/tr.c index 35f6bc45553..b51966a1e64 100644 --- a/src/snes/impls/tr/tr.c +++ b/src/snes/impls/tr/tr.c @@ -35,15 +35,16 @@ static PetscErrorCode SNESTR_KSPConverged_Private(KSP ksp, PetscInt n, PetscReal PetscReal nrm; PetscFunctionBegin; - PetscCall((*ctx->convtest)(ksp, n, rnorm, reason, ctx->convctx)); - if (*reason) PetscCall(PetscInfo(snes, "Default or user provided convergence test KSP iterations=%" PetscInt_FMT ", rnorm=%g\n", n, (double)rnorm)); /* Determine norm of solution */ PetscCall(KSPBuildSolution(ksp, NULL, &x)); PetscCall(VecNorm(x, neP->norm, &nrm)); if (nrm >= neP->delta) { - PetscCall(PetscInfo(snes, "Ending linear iteration early, delta=%g, length=%g\n", (double)neP->delta, (double)nrm)); + PetscCall(PetscInfo(snes, "Ending linear iteration early due to exiting trust region, delta=%g, length=%g\n", (double)neP->delta, (double)nrm)); *reason = KSP_CONVERGED_STEP_LENGTH; + PetscFunctionReturn(PETSC_SUCCESS); } + PetscCall((*ctx->convtest)(ksp, n, rnorm, reason, ctx->convctx)); + if (*reason) PetscCall(PetscInfo(snes, "Default or user provided convergence test KSP iterations=%" PetscInt_FMT ", rnorm=%g\n", n, (double)rnorm)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -467,9 +468,8 @@ static PetscErrorCode SNESSolve_NEWTONTR(SNES snes) PetscBool clear_converged_test, rho_satisfied, has_objective; SNES_TR_KSPConverged_Ctx *ctx; void *convctx; - + SNESObjectiveFn *objective; PetscErrorCode (*convtest)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), (*convdestroy)(void *); - PetscErrorCode (*objective)(SNES, Vec, PetscReal *, void *); PetscFunctionBegin; PetscCall(SNESGetObjective(snes, &objective, NULL)); @@ -559,8 +559,6 @@ static PetscErrorCode SNESSolve_NEWTONTR(SNES snes) } // XXX PetscCall(SNESGetNPCFunction(snes, F, &fnorm)); - } else if (snes->ops->update) { /* if update is present, recompute objective function and function norm */ - PetscCall(SNESComputeFunction(snes, X, F)); } /* Jacobian */ diff --git a/src/snes/interface/f90-custom/zsnesf90.c b/src/snes/interface/f90-custom/zsnesf90.c index 2cc8aa341ae..c2c140564e0 100644 --- a/src/snes/interface/f90-custom/zsnesf90.c +++ b/src/snes/interface/f90-custom/zsnesf90.c @@ -3,10 +3,8 @@ #if defined(PETSC_HAVE_FORTRAN_CAPS) #define snesgetconvergencehistoryf90_ SNESGETCONVERGENCEHISTORYF90 - #define snesdestroy_ SNESDESTROY #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define snesgetconvergencehistoryf90_ snesgetconvergencehistoryf90 - #define snesdestroy_ snesdestroy #endif PETSC_EXTERN void snesgetconvergencehistoryf90_(SNES *snes, F90Array1d *r, F90Array1d *fits, PetscInt *n, int *ierr PETSC_F90_2PTR_PROTO(ptrd1) PETSC_F90_2PTR_PROTO(ptrd2)) @@ -19,11 +17,3 @@ PETSC_EXTERN void snesgetconvergencehistoryf90_(SNES *snes, F90Array1d *r, F90Ar if (*ierr) return; *ierr = F90Array1dCreate(its, MPIU_INT, 1, *n, fits PETSC_F90_2PTR_PARAM(ptrd2)); } - -PETSC_EXTERN void snesdestroy_(SNES *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = SNESDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} diff --git a/src/snes/interface/ftn-custom/zsnesf.c b/src/snes/interface/ftn-custom/zsnesf.c index 0c3ab81c0c0..02b1cf69d8e 100644 --- a/src/snes/interface/ftn-custom/zsnesf.c +++ b/src/snes/interface/ftn-custom/zsnesf.c @@ -4,18 +4,15 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define snesconvergedreasonview_ SNESCONVERGEDREASONVIEW #define snessetpicard_ SNESSETPICARD - #define matmffdcomputejacobian_ MATMFFDCOMPUTEJACOBIAN #define snessolve_ SNESSOLVE #define snescomputejacobiandefault_ SNESCOMPUTEJACOBIANDEFAULT #define snescomputejacobiandefaultcolor_ SNESCOMPUTEJACOBIANDEFAULTCOLOR #define snessetjacobian_ SNESSETJACOBIAN #define snessetjacobian1_ SNESSETJACOBIAN1 #define snessetjacobian2_ SNESSETJACOBIAN2 - #define snesgetoptionsprefix_ SNESGETOPTIONSPREFIX - #define snesgettype_ SNESGETTYPE #define snessetfunction_ SNESSETFUNCTION + #define snessetobjective_ SNESSETOBJECTIVE #define snessetngs_ SNESSETNGS #define snessetupdate_ SNESSETUPDATE #define snesgetfunction_ SNESGETFUNCTION @@ -23,12 +20,8 @@ #define snessetconvergencetest_ SNESSETCONVERGENCETEST #define snesconvergeddefault_ SNESCONVERGEDDEFAULT #define snesconvergedskip_ SNESCONVERGEDSKIP - #define snesview_ SNESVIEW #define snesgetconvergencehistory_ SNESGETCONVERGENCEHISTORY #define snesgetjacobian_ SNESGETJACOBIAN - #define snessettype_ SNESSETTYPE - #define snesappendoptionsprefix_ SNESAPPENDOPTIONSPREFIX - #define snessetoptionsprefix_ SNESSETOPTIONSPREFIX #define snesmonitordefault_ SNESMONITORDEFAULT #define snesmonitorsolution_ SNESMONITORSOLUTION #define snesmonitorsolutionupdate_ SNESMONITORSOLUTIONUPDATE @@ -37,21 +30,17 @@ #define snesnewtontrsetpostcheck_ SNESNEWTONTRSETPOSTCHECK #define snesnewtontrdcsetprecheck_ SNESNEWTONTRDCSETPRECHECK #define snesnewtontrdcsetpostcheck_ SNESNEWTONTRDCSETPOSTCHECK - #define snesviewfromoptions_ SNESVIEWFROMOPTIONS - #define snesgetconvergedreasonstring_ SNESGETCONVERGEDREASONSTRING + #define matmffdcomputejacobian_ MATMFFDCOMPUTEJACOBIAN #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define snesconvergedreasonview_ snesconvergedreasonview #define snessetpicard_ snessetpicard - #define matmffdcomputejacobian_ matmffdcomputejacobian #define snessolve_ snessolve #define snescomputejacobiandefault_ snescomputejacobiandefault #define snescomputejacobiandefaultcolor_ snescomputejacobiandefaultcolor #define snessetjacobian_ snessetjacobian #define snessetjacobian1_ snessetjacobian1 #define snessetjacobian2_ snessetjacobian2 - #define snesgetoptionsprefix_ snesgetoptionsprefix - #define snesgettype_ snesgettype #define snessetfunction_ snessetfunction + #define snessetobjective_ snessetobjective #define snessetngs_ snessetngs #define snessetupdate_ snessetupdate #define snesgetfunction_ snesgetfunction @@ -59,12 +48,8 @@ #define snessetconvergencetest_ snessetconvergencetest #define snesconvergeddefault_ snesconvergeddefault #define snesconvergedskip_ snesconvergedskip - #define snesview_ snesview #define snesgetjacobian_ snesgetjacobian #define snesgetconvergencehistory_ snesgetconvergencehistory - #define snessettype_ snessettype - #define snesappendoptionsprefix_ snesappendoptionsprefix - #define snessetoptionsprefix_ snessetoptionsprefix #define snesmonitordefault_ snesmonitordefault #define snesmonitorsolution_ snesmonitorsolution #define snesmonitorsolutionupdate_ snesmonitorsolutionupdate @@ -73,12 +58,12 @@ #define snesnewtontrsetpostcheck_ snesnewtontrsetpostcheck #define snesnewtontrdcsetprecheck_ snesnewtontrdcsetprecheck #define snesnewtontrdcsetpostcheck_ snesnewtontrdcsetpostcheck - #define snesviewfromoptions_ snesviewfromoptions - #define snesgetconvergedreasonstring_ snesgetconvergedreasonstring + #define matmffdcomputejacobian_ matmffdcomputejacobian #endif static struct { PetscFortranCallbackId function; + PetscFortranCallbackId objective; PetscFortranCallbackId test; PetscFortranCallbackId destroy; PetscFortranCallbackId jacobian; @@ -90,6 +75,7 @@ static struct { PetscFortranCallbackId trpostcheck; #if defined(PETSC_HAVE_F90_2PTR_ARG) PetscFortranCallbackId function_pgiptr; + PetscFortranCallbackId objective_pgiptr; PetscFortranCallbackId trprecheck_pgiptr; PetscFortranCallbackId trpostcheck_pgiptr; #endif @@ -166,6 +152,15 @@ static PetscErrorCode oursnesfunction(SNES snes, Vec x, Vec f, void *ctx) PetscObjectUseFortranCallback(snes, _cb.function, (SNES *, Vec *, Vec *, void *, PetscErrorCode *PETSC_F90_2PTR_PROTO_NOVAR), (&snes, &x, &f, _ctx, &ierr PETSC_F90_2PTR_PARAM(ptr))); } +static PetscErrorCode oursnesobjective(SNES snes, Vec x, PetscReal *v, void *ctx) +{ +#if defined(PETSC_HAVE_F90_2PTR_ARG) + void *ptr; + PetscCall(PetscObjectGetFortranCallback((PetscObject)snes, PETSC_FORTRAN_CALLBACK_CLASS, _cb.objective_pgiptr, NULL, &ptr)); +#endif + PetscObjectUseFortranCallback(snes, _cb.objective, (SNES *, Vec *, PetscReal *, void *, PetscErrorCode *PETSC_F90_2PTR_PROTO_NOVAR), (&snes, &x, v, _ctx, &ierr PETSC_F90_2PTR_PARAM(ptr))); +} + static PetscErrorCode oursnestest(SNES snes, PetscInt it, PetscReal a, PetscReal d, PetscReal c, SNESConvergedReason *reason, void *ctx) { PetscObjectUseFortranCallback(snes, _cb.test, (SNES *, PetscInt *, PetscReal *, PetscReal *, PetscReal *, SNESConvergedReason *, void *, PetscErrorCode *), (&snes, &it, &a, &d, &c, reason, _ctx, &ierr)); @@ -199,23 +194,10 @@ static PetscErrorCode ourmondestroy(void **ctx) PetscObjectUseFortranCallback(snes, _cb.mondestroy, (void *, PetscErrorCode *), (_ctx, &ierr)); } -/* - snescomputejacobiandefault() and snescomputejacobiandefaultcolor() - These can be used directly from Fortran but are mostly so that - Fortran SNESSetJacobian() will properly handle the defaults being passed in. -*/ -PETSC_EXTERN void matmffdcomputejacobian_(SNES *snes, Vec *x, Mat *m, Mat *p, void *ctx, PetscErrorCode *ierr) -{ - *ierr = MatMFFDComputeJacobian(*snes, *x, *m, *p, ctx); -} -PETSC_EXTERN void snescomputejacobiandefault_(SNES *snes, Vec *x, Mat *m, Mat *p, void *ctx, PetscErrorCode *ierr) -{ - *ierr = SNESComputeJacobianDefault(*snes, *x, *m, *p, ctx); -} -PETSC_EXTERN void snescomputejacobiandefaultcolor_(SNES *snes, Vec *x, Mat *m, Mat *p, void *ctx, PetscErrorCode *ierr) -{ - *ierr = SNESComputeJacobianDefaultColor(*snes, *x, *m, *p, *(MatFDColoring *)ctx); -} +/* these are generated automatically by bfort */ +PETSC_EXTERN void snescomputejacobiandefault_(SNES *, Vec *, Mat *, Mat *, void *, PetscErrorCode *); +PETSC_EXTERN void snescomputejacobiandefaultcolor_(SNES *, Vec *, Mat *, Mat *, void *, PetscErrorCode *); +PETSC_EXTERN void matmffdcomputejacobian_(SNES *, Vec *, Mat *, Mat *, void *, PetscErrorCode *); PETSC_EXTERN void snessetjacobian_(SNES *snes, Mat *A, Mat *B, void (*func)(SNES *, Vec *, Mat *, Mat *, void *, PetscErrorCode *), void *ctx, PetscErrorCode *ierr) { @@ -269,32 +251,12 @@ PETSC_EXTERN void snessetpicard_(SNES *snes, Vec *r, void (*func)(SNES *, Vec *, if (!*ierr) *ierr = SNESSetPicard(*snes, *r, oursnespicardfunction, *A, *B, oursnespicardjacobian, NULL); } -PETSC_EXTERN void snesgetoptionsprefix_(SNES *snes, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = SNESGetOptionsPrefix(*snes, &tname); - *ierr = PetscStrncpy(prefix, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, prefix, len); -} - -PETSC_EXTERN void snesgettype_(SNES *snes, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = SNESGetType(*snes, &tname); - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - /* These are not usually called from Fortran but allow Fortran users to transparently set these monitors from .F code */ -PETSC_EXTERN void snessetfunction_(SNES *snes, Vec *r, void (*func)(SNES *, Vec *, Vec *, void *, PetscErrorCode *), void *ctx, PetscErrorCode *ierr PETSC_F90_2PTR_PROTO(ptr)) +PETSC_EXTERN void snessetfunction_(SNES *snes, Vec *r, SNESFunctionFn func, void *ctx, PetscErrorCode *ierr PETSC_F90_2PTR_PROTO(ptr)) { *ierr = PetscObjectSetFortranCallback((PetscObject)*snes, PETSC_FORTRAN_CALLBACK_CLASS, &_cb.function, (PetscVoidFn *)func, ctx); if (*ierr) return; @@ -305,6 +267,17 @@ PETSC_EXTERN void snessetfunction_(SNES *snes, Vec *r, void (*func)(SNES *, Vec *ierr = SNESSetFunction(*snes, *r, oursnesfunction, NULL); } +PETSC_EXTERN void snessetobjective_(SNES *snes, void (*func)(SNES *, Vec *, PetscReal *, void *, PetscErrorCode *), void *ctx, PetscErrorCode *ierr PETSC_F90_2PTR_PROTO(ptr)) +{ + *ierr = PetscObjectSetFortranCallback((PetscObject)*snes, PETSC_FORTRAN_CALLBACK_CLASS, &_cb.objective, (PetscVoidFn *)func, ctx); + if (*ierr) return; +#if defined(PETSC_HAVE_F90_2PTR_ARG) + *ierr = PetscObjectSetFortranCallback((PetscObject)*snes, PETSC_FORTRAN_CALLBACK_CLASS, &_cb.objective_pgiptr, NULL, ptr); + if (*ierr) return; +#endif + *ierr = SNESSetObjective(*snes, oursnesobjective, NULL); +} + PETSC_EXTERN void snessetngs_(SNES *snes, void (*func)(SNES *, Vec *, Vec *, void *, PetscErrorCode *), void *ctx, PetscErrorCode *ierr) { *ierr = PetscObjectSetFortranCallback((PetscObject)*snes, PETSC_FORTRAN_CALLBACK_CLASS, &_cb.ngs, (PetscVoidFn *)func, ctx); @@ -319,7 +292,7 @@ PETSC_EXTERN void snessetupdate_(SNES *snes, void (*func)(SNES *, PetscInt *, Pe } /* the func argument is ignored */ -PETSC_EXTERN void snesgetfunction_(SNES *snes, Vec *r, void (*func)(SNES, Vec, Vec, void *), void **ctx, PetscErrorCode *ierr) +PETSC_EXTERN void snesgetfunction_(SNES *snes, Vec *r, SNESFunctionFn func, void **ctx, PetscErrorCode *ierr) { CHKFORTRANNULLOBJECT(r); *ierr = SNESGetFunction(*snes, r, NULL, NULL); @@ -360,13 +333,6 @@ PETSC_EXTERN void snessetconvergencetest_(SNES *snes, void (*func)(SNES *, Petsc } } -PETSC_EXTERN void snesview_(SNES *snes, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = SNESView(*snes, v); -} - /* func is currently ignored from Fortran */ PETSC_EXTERN void snesgetjacobian_(SNES *snes, Mat *A, Mat *B, int *func, void **ctx, PetscErrorCode *ierr) { @@ -383,36 +349,6 @@ PETSC_EXTERN void snesgetconvergencehistory_(SNES *snes, PetscInt *na, PetscErro *ierr = SNESGetConvergenceHistory(*snes, NULL, NULL, na); } -PETSC_EXTERN void snessettype_(SNES *snes, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = SNESSetType(*snes, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void snesappendoptionsprefix_(SNES *snes, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = SNESAppendOptionsPrefix(*snes, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void snessetoptionsprefix_(SNES *snes, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = SNESSetOptionsPrefix(*snes, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - PETSC_EXTERN void snesmonitordefault_(SNES *snes, PetscInt *its, PetscReal *fgnorm, PetscViewerAndFormat **dummy, PetscErrorCode *ierr) { *ierr = SNESMonitorDefault(*snes, *its, *fgnorm, *dummy); @@ -445,30 +381,3 @@ PETSC_EXTERN void snesmonitorset_(SNES *snes, void (*func)(SNES *, PetscInt *, P *ierr = SNESMonitorSet(*snes, oursnesmonitor, *snes, ourmondestroy); } } - -PETSC_EXTERN void snesviewfromoptions_(SNES *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = SNESViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void snesconvergedreasonview_(SNES *snes, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = SNESConvergedReasonView(*snes, v); -} - -PETSC_EXTERN void snesgetconvergedreasonstring_(SNES *snes, char *strreason, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tstrreason; - *ierr = SNESGetConvergedReasonString(*snes, &tstrreason); - *ierr = PetscStrncpy(strreason, tstrreason, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, strreason, len); -} diff --git a/src/snes/interface/noise/snesmfj2.c b/src/snes/interface/noise/snesmfj2.c index 59d6d296cb6..6b078e8f187 100644 --- a/src/snes/interface/noise/snesmfj2.c +++ b/src/snes/interface/noise/snesmfj2.c @@ -147,7 +147,7 @@ static PetscErrorCode SNESMatrixFreeMult2_Private(Mat mat, Vec a, Vec y) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatCreateSNESMFMore - Creates a matrix-free matrix context for use with a `SNES` solver that uses the More method to compute an optimal h based on the noise of the function. This matrix can be used as the Jacobian argument for the routine `SNESSetJacobian()`. @@ -259,7 +259,7 @@ PetscErrorCode MatCreateSNESMFMore(SNES snes, Vec x, Mat *J) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ MatSNESMFMoreSetParameters - Sets the parameters for the approximation of matrix-vector products using finite differences, see `MatCreateSNESMFMore()` diff --git a/src/snes/interface/snes.c b/src/snes/interface/snes.c index e8c099a0170..f69e6fbe887 100644 --- a/src/snes/interface/snes.c +++ b/src/snes/interface/snes.c @@ -271,7 +271,7 @@ PetscErrorCode SNESGetJacobianDomainError(SNES snes, PetscBool *domainerror) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESLoad - Loads a `SNES` that has been stored in `PETSCVIEWERBINARY` with `SNESView()`. Collective @@ -321,7 +321,7 @@ PetscErrorCode SNESLoad(SNES snes, PetscViewer viewer) #include #endif -/*@C +/*@ SNESViewFromOptions - View a `SNES` based on values in the options database Collective @@ -345,7 +345,7 @@ PetscErrorCode SNESViewFromOptions(SNES A, PetscObject obj, const char name[]) PETSC_EXTERN PetscErrorCode SNESComputeJacobian_DMDA(SNES, Vec, Mat, Mat, void *); -/*@C +/*@ SNESView - Prints or visualizes the `SNES` data structure. Collective @@ -681,6 +681,7 @@ static PetscErrorCode DMCoarsenHook_SNESVecSol(DM dm, DM dmc, void *ctx) static PetscErrorCode KSPComputeOperators_SNES(KSP ksp, Mat A, Mat B, void *ctx) { SNES snes = (SNES)ctx; + DMSNES sdm; Vec X, Xnamed = NULL; DM dmsave; void *ctxsave; @@ -690,21 +691,37 @@ static PetscErrorCode KSPComputeOperators_SNES(KSP ksp, Mat A, Mat B, void *ctx) dmsave = snes->dm; PetscCall(KSPGetDM(ksp, &snes->dm)); if (dmsave == snes->dm) X = snes->vec_sol; /* We are on the finest level */ - else { /* We are on a coarser level, this vec was initialized using a DM restrict hook */ PetscCall(DMGetNamedGlobalVector(snes->dm, "SNESVecSol", &Xnamed)); + else { + PetscBool has; + + /* We are on a coarser level, this vec was initialized using a DM restrict hook */ + PetscCall(DMHasNamedGlobalVector(snes->dm, "SNESVecSol", &has)); + PetscCheck(has, PetscObjectComm((PetscObject)snes->dm), PETSC_ERR_PLIB, "Missing SNESVecSol"); + PetscCall(DMGetNamedGlobalVector(snes->dm, "SNESVecSol", &Xnamed)); X = Xnamed; PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, &ctxsave)); /* If the DM's don't match up, the MatFDColoring context needed for the jacobian won't match up either -- fixit. */ if (jac == SNESComputeJacobianDefaultColor) PetscCall(SNESSetJacobian(snes, NULL, NULL, SNESComputeJacobianDefaultColor, NULL)); } - /* Make sure KSP DM has the Jacobian computation routine */ - { - DMSNES sdm; - PetscCall(DMGetDMSNES(snes->dm, &sdm)); - if (!sdm->ops->computejacobian) PetscCall(DMCopyDMSNES(dmsave, snes->dm)); - } /* Compute the operators */ + PetscCall(DMGetDMSNES(snes->dm, &sdm)); + if (Xnamed && sdm->ops->computefunction) { + /* The SNES contract with the user is that ComputeFunction is always called before ComputeJacobian. + We make sure of this here. Disable affine shift since it is for the finest level */ + Vec F, saverhs = snes->vec_rhs; + + snes->vec_rhs = NULL; + PetscCall(DMGetGlobalVector(snes->dm, &F)); + PetscCall(SNESComputeFunction(snes, X, F)); + PetscCall(DMRestoreGlobalVector(snes->dm, &F)); + snes->vec_rhs = saverhs; + snes->nfuncs--; /* Do not log coarser level evaluations */ + } + /* Make sure KSP DM has the Jacobian computation routine */ + if (!sdm->ops->computejacobian) PetscCall(DMCopyDMSNES(dmsave, snes->dm)); PetscCall(SNESComputeJacobian(snes, X, A, B)); + /* Put the previous context back */ if (snes->dm != dmsave && jac == SNESComputeJacobianDefaultColor) PetscCall(SNESSetJacobian(snes, NULL, NULL, jac, ctxsave)); @@ -1742,11 +1759,9 @@ PetscErrorCode SNESCreate(MPI_Comm comm, SNES *outsnes) PetscFunctionBegin; PetscAssertPointer(outsnes, 2); - *outsnes = NULL; PetscCall(SNESInitializePackage()); PetscCall(PetscHeaderCreate(snes, SNES_CLASSID, "SNES", "Nonlinear solver", "SNES", comm, SNESDestroy, SNESView)); - snes->ops->converged = SNESConvergedDefault; snes->usesksp = PETSC_TRUE; snes->tolerancesset = PETSC_FALSE; @@ -2064,7 +2079,7 @@ PetscErrorCode SNESGetSolutionNorm(SNES snes, PetscReal *xnorm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESSetFunctionType - Sets the `SNESFunctionType` of the `SNES` method. @@ -2094,7 +2109,7 @@ PetscErrorCode SNESSetFunctionType(SNES snes, SNESFunctionType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESGetFunctionType - Gets the `SNESFunctionType` used in convergence and monitoring set with `SNESSetFunctionType()` of the SNES method. @@ -3390,7 +3405,7 @@ PetscErrorCode SNESConvergedReasonViewCancel(SNES snes) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESDestroy - Destroys the nonlinear solver context that was created with `SNESCreate()`. @@ -4393,31 +4408,33 @@ PetscErrorCode SNESGetConvergenceHistory(SNES snes, PetscReal *a[], PetscInt *it /*@C SNESSetUpdate - Sets the general-purpose update function called at the beginning of every iteration of the nonlinear solve. Specifically - it is called just before the Jacobian is "evaluated". + it is called just before the Jacobian is "evaluated" and after the function + evaluation. Logically Collective Input Parameters: + snes - The nonlinear solver context -- func - The function - - Calling sequence of `func`: -+ snes - the nonlinear solver context -- step - The current step of the iteration +- func - The update function; for calling sequence see `SNESUpdateFn` Level: advanced Notes: This is NOT what one uses to update the ghost points before a function evaluation, that should be done at the beginning of your function provided to `SNESSetFunction()`, or `SNESSetPicard()` - This is not used by most users. + This is not used by most users, and it is intended to provide a general hook that is run + right before the direction step is computed. + Users are free to modify the current residual vector, + the current linearization point, or any other vector associated to the specific solver used. + If such modifications take place, it is the user responsibility to update all the relevant + vectors. There are a variety of function hooks one many set that are called at different stages of the nonlinear solution process, see the functions listed below. .seealso: [](ch_snes), `SNES`, `SNESSolve()`, `SNESSetJacobian()`, `SNESLineSearchSetPreCheck()`, `SNESLineSearchSetPostCheck()`, `SNESNewtonTRSetPreCheck()`, `SNESNewtonTRSetPostCheck()`, - `SNESMonitorSet()`, `SNESSetDivergenceTest()` + `SNESMonitorSet()` @*/ -PetscErrorCode SNESSetUpdate(SNES snes, PetscErrorCode (*func)(SNES snes, PetscInt step)) +PetscErrorCode SNESSetUpdate(SNES snes, SNESUpdateFn *func) { PetscFunctionBegin; PetscValidHeaderSpecific(snes, SNES_CLASSID, 1); @@ -4425,7 +4442,7 @@ PetscErrorCode SNESSetUpdate(SNES snes, PetscErrorCode (*func)(SNES snes, PetscI PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESConvergedReasonView - Displays the reason a `SNES` solve converged or diverged to a viewer Collective @@ -4459,7 +4476,7 @@ PetscErrorCode SNESConvergedReasonView(SNES snes, PetscViewer viewer) PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii)); if (isAscii) { PetscCall(PetscViewerGetFormat(viewer, &format)); - PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)snes)->tablevel)); + PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)snes)->tablevel + 1)); if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { DM dm; Vec u; @@ -4493,7 +4510,7 @@ PetscErrorCode SNESConvergedReasonView(SNES snes, PetscViewer viewer) PetscCall(PetscViewerASCIIPrintf(viewer, "Nonlinear solve did not converge due to %s iterations %" PetscInt_FMT "\n", SNESConvergedReasons[snes->reason], snes->iter)); } } - PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)snes)->tablevel)); + PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)snes)->tablevel + 1)); } PetscFunctionReturn(PETSC_SUCCESS); } @@ -4783,7 +4800,7 @@ PetscErrorCode SNESSolve(SNES snes, Vec b, Vec x) /* --------- Internal routines for SNES Package --------- */ -/*@C +/*@ SNESSetType - Sets the method for the nonlinear solver. Collective @@ -4856,7 +4873,7 @@ PetscErrorCode SNESSetType(SNES snes, SNESType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESGetType - Gets the `SNES` method type and name (as a string). Not Collective @@ -5028,7 +5045,7 @@ PetscErrorCode SNESGetNGS(SNES snes, SNESNGSFn **f, void **ctx) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESSetOptionsPrefix - Sets the prefix used for searching for all `SNES` options in the database. @@ -5060,7 +5077,7 @@ PetscErrorCode SNESSetOptionsPrefix(SNES snes, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESAppendOptionsPrefix - Appends to the prefix used for searching for all `SNES` options in the database. @@ -5092,7 +5109,7 @@ PetscErrorCode SNESAppendOptionsPrefix(SNES snes, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESGetOptionsPrefix - Gets the prefix used for searching for all `SNES` options in the database. diff --git a/src/snes/interface/snesj.c b/src/snes/interface/snesj.c index b51f8d83fa9..bd7926027bd 100644 --- a/src/snes/interface/snesj.c +++ b/src/snes/interface/snesj.c @@ -2,7 +2,7 @@ #include /* for Vec->ops->setvalues */ #include -/*@C +/*@ SNESComputeJacobianDefault - Computes the Jacobian using finite differences. Collective diff --git a/src/snes/interface/snesj2.c b/src/snes/interface/snesj2.c index 7ff7b2044df..47105b5be31 100644 --- a/src/snes/interface/snesj2.c +++ b/src/snes/interface/snesj2.c @@ -14,7 +14,7 @@ static PetscErrorCode SNESComputeMFFunctionCtx(void *snes, Vec x, Vec f, void *c return SNESComputeMFFunction((SNES)snes, x, f); } -/*@C +/*@ SNESComputeJacobianDefaultColor - Computes the Jacobian using finite differences and coloring to exploit matrix sparsity. @@ -116,7 +116,7 @@ PetscErrorCode SNESComputeJacobianDefaultColor(SNES snes, Vec x1, Mat J, Mat B, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESPruneJacobianColor - Remove nondiagonal zeros in the Jacobian matrix and update the `MatMFFD` coloring information based on the new nonzero structure Collective diff --git a/src/snes/interface/snesob.c b/src/snes/interface/snesob.c index 70745b691fc..57ad704e375 100644 --- a/src/snes/interface/snesob.c +++ b/src/snes/interface/snesob.c @@ -1,4 +1,4 @@ -#include +#include /*I "petscsnes.h" I*/ /*@C SNESSetObjective - Sets the objective function minimized by some of the `SNES` linesearch methods, used instead of the 2-norm of the residual in the line search @@ -60,7 +60,7 @@ PetscErrorCode SNESGetObjective(SNES snes, SNESObjectiveFn **obj, void **ctx) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESComputeObjective - Computes the objective function that has been provided by `SNESSetObjective()` Collective diff --git a/src/snes/interface/snesut.c b/src/snes/interface/snesut.c index 95347441eb7..3fb2e22813e 100644 --- a/src/snes/interface/snesut.c +++ b/src/snes/interface/snesut.c @@ -309,9 +309,9 @@ PetscErrorCode SNESMonitorDefault(SNES snes, PetscInt its, PetscReal fgnorm, Pet if (isascii) { PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)snes)->tablevel)); if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { - Vec dx; - PetscReal upnorm; - PetscErrorCode (*objective)(SNES, Vec, PetscReal *, void *); + Vec dx; + PetscReal upnorm; + SNESObjectiveFn *objective; PetscCall(SNESGetSolutionUpdate(snes, &dx)); PetscCall(VecNorm(dx, NORM_2, &upnorm)); @@ -813,7 +813,7 @@ PetscErrorCode SNESConvergedSkip(SNES snes, PetscInt it, PetscReal xnorm, PetscR PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESSetWorkVecs - Allocates a number of work vectors to be used internally by `SNES` solvers Input Parameters: diff --git a/src/snes/linesearch/impls/basic/linesearchbasic.c b/src/snes/linesearch/impls/basic/linesearchbasic.c index 60d118aec95..76dd14db86b 100644 --- a/src/snes/linesearch/impls/basic/linesearchbasic.c +++ b/src/snes/linesearch/impls/basic/linesearchbasic.c @@ -66,12 +66,12 @@ static PetscErrorCode SNESLineSearchApply_Basic(SNESLineSearch linesearch) well-behaved problems. Also named as `SNESLINESEARCHNONE` Options Database Keys: -+ -snes_linesearch_damping - search vector is scaled by this amount, default is 1.0 -- -snes_linesearch_norms - whether to compute norms or not, default is true (SNESLineSearchSetComputeNorms()) ++ -snes_linesearch_damping - search vector is scaled by this amount on entry to the line search, default is 1.0 +- -snes_linesearch_norms - whether to compute norms or not, default is true (`SNESLineSearchSetComputeNorms()`) Note: For methods with ill-scaled updates (`SNESNRICHARDSON`, `SNESNCG`), a small - damping parameter may yield satisfactory but slow convergence despite + damping parameter may yield satisfactory, but slow convergence, despite the lack of the line search. Level: advanced diff --git a/src/snes/linesearch/impls/bt/linesearchbt.c b/src/snes/linesearch/impls/bt/linesearchbt.c index 9fb465499fa..72f42840061 100644 --- a/src/snes/linesearch/impls/bt/linesearchbt.c +++ b/src/snes/linesearch/impls/bt/linesearchbt.c @@ -51,6 +51,7 @@ PetscErrorCode SNESLineSearchBTGetAlpha(SNESLineSearch linesearch, PetscReal *al static PetscErrorCode SNESLineSearchApply_BT(SNESLineSearch linesearch) { + SNESLineSearch_BT *bt = (SNESLineSearch_BT *)linesearch->data; PetscBool changed_y, changed_w; Vec X, F, Y, W, G; SNES snes; @@ -61,10 +62,9 @@ static PetscErrorCode SNESLineSearchApply_BT(SNESLineSearch linesearch) PetscReal g, gprev; PetscViewer monitor; PetscInt max_its, count; - SNESLineSearch_BT *bt = (SNESLineSearch_BT *)linesearch->data; Mat jac; + SNESObjectiveFn *objective; const char *const ordStr[] = {"Linear", "Quadratic", "Cubic"}; - PetscErrorCode (*objective)(SNES, Vec, PetscReal *, void *); PetscFunctionBegin; PetscCall(SNESLineSearchGetVecs(linesearch, &X, &F, &Y, &W, &G)); @@ -395,14 +395,14 @@ static PetscErrorCode SNESLineSearchSetFromOptions_BT(SNESLineSearch linesearch, This line search finds the minimum of a polynomial fitting of the L2 norm of the function or the objective function if it is provided with `SNESSetObjective()`. If this fit does not satisfy the conditions for progress, the interval shrinks - and the fit is reattempted at most max_it times or until lambda is below minlambda. + and the fit is reattempted at most `max_it` times or until $\lambda$ is below `minlambda`. Options Database Keys: + -snes_linesearch_alpha <1e\-4> - slope descent parameter -. -snes_linesearch_damping <1.0> - initial step length +. -snes_linesearch_damping <1.0> - scaling of initial step length on entry to the line search . -snes_linesearch_maxstep - if the length the initial step is larger than this then the step is scaled back to be of this length at the beginning of the line search -. -snes_linesearch_max_it <40> - maximum number of shrinking step +. -snes_linesearch_max_it <40> - maximum number of shrinking steps . -snes_linesearch_minlambda <1e\-12> - minimum step length allowed - -snes_linesearch_order <1,2,3> - order of the approximation. With order 1, it performs a simple backtracking without any curve fitting diff --git a/src/snes/linesearch/impls/cp/linesearchcp.c b/src/snes/linesearch/impls/cp/linesearchcp.c index 86424c6b308..5865a3d8550 100644 --- a/src/snes/linesearch/impls/cp/linesearchcp.c +++ b/src/snes/linesearch/impls/cp/linesearchcp.c @@ -120,14 +120,14 @@ static PetscErrorCode SNESLineSearchApply_CP(SNESLineSearch linesearch) /*MC SNESLINESEARCHCP - Critical point line search. This line search assumes that there exists some - artificial G(x) for which the `SNESFunction` F(x) = grad G(x). Therefore, this line search seeks - to find roots of dot(F, Y) via a secant method. + artificial $G(x)$ for which the `SNESFunction` $ F(x) = grad G(x)$. Therefore, this line search seeks + to find roots of $ F^T Y$ via a secant method. Options Database Keys: + -snes_linesearch_minlambda - the minimum acceptable lambda -. -snes_linesearch_maxstep - the algorithm insures that a step length is never longer than this value -. -snes_linesearch_damping - initial trial step length is scaled by this factor, default is 1.0 -- -snes_linesearch_max_it - the maximum number of secant steps performed. +. -snes_linesearch_maxstep - the algorithm insures that a step length is never longer than this value +. -snes_linesearch_damping - initial trial step length is scaled by this factor on entry to the line search, default is 1.0 +- -snes_linesearch_max_it - the maximum number of secant steps performed. Level: advanced diff --git a/src/snes/linesearch/impls/l2/linesearchl2.c b/src/snes/linesearch/impls/l2/linesearchl2.c index 7cac60d9713..80d21e8fc53 100644 --- a/src/snes/linesearch/impls/l2/linesearchl2.c +++ b/src/snes/linesearch/impls/l2/linesearchl2.c @@ -3,22 +3,22 @@ static PetscErrorCode SNESLineSearchApply_L2(SNESLineSearch linesearch) { - PetscBool changed_y, changed_w; - Vec X; - Vec F; - Vec Y; - Vec W; - SNES snes; - PetscReal gnorm; - PetscReal ynorm; - PetscReal xnorm; - PetscReal steptol, maxstep, rtol, atol, ltol; - PetscViewer monitor; - PetscReal lambda, lambda_old, lambda_mid, lambda_update, delLambda; - PetscReal fnrm, fnrm_old, fnrm_mid; - PetscReal delFnrm, delFnrm_old, del2Fnrm; - PetscInt i, max_its; - PetscErrorCode (*objective)(SNES, Vec, PetscReal *, void *); + PetscBool changed_y, changed_w; + Vec X; + Vec F; + Vec Y; + Vec W; + SNES snes; + PetscReal gnorm; + PetscReal ynorm; + PetscReal xnorm; + PetscReal steptol, maxstep, rtol, atol, ltol; + PetscViewer monitor; + PetscReal lambda, lambda_old, lambda_mid, lambda_update, delLambda; + PetscReal fnrm, fnrm_old, fnrm_mid; + PetscReal delFnrm, delFnrm_old, del2Fnrm; + PetscInt i, max_its; + SNESObjectiveFn *objective; PetscFunctionBegin; PetscCall(SNESLineSearchGetVecs(linesearch, &X, &F, &Y, &W, NULL)); @@ -153,23 +153,25 @@ static PetscErrorCode SNESLineSearchApply_L2(SNESLineSearch linesearch) /*MC SNESLINESEARCHL2 - Secant search in the L2 norm of the function or the objective function, if it is provided with `SNESSetObjective()`. - Attempts to solve min_lambda f(x + lambda y) using the secant method with the initial bracketing of lambda between [0,damping]. Differences of f() - are used to approximate the first and second derivative of f() with respect to lambda, f'() and f''(). The secant method is run for maxit iterations. + Attempts to solve $ \min_{\lambda} f(x + \lambda y) $ using the secant method with the initial bracketing of $ \lambda $ between [0,damping]. + Differences of $f()$ are used to approximate the first and second derivative of $f()$ with respect to + $\lambda$, $f'()$ and $f''()$. The secant method is run for `maxit` iterations. - When an objective function is provided f(w) is the objective function otherwise f(w) = ||F(w)||^2. x is the current step and y is the search direction. + When an objective function is provided $f(w)$ is the objective function otherwise $f(w) = ||F(w)||^2$. + $x$ is the current step and $y$ is the search direction. This has no checks on whether the secant method is actually converging. Options Database Keys: -+ -snes_linesearch_max_it - maximum number of iterations, default is 1 -. -snes_linesearch_maxstep - the algorithm insures that a step length is never longer than this value -. -snes_linesearch_damping - initial step is scaled back by this factor, default is 1.0 ++ -snes_linesearch_max_it - maximum number of iterations, default is 1 +. -snes_linesearch_maxstep - the algorithm insures that a step length is never longer than this value +. -snes_linesearch_damping - initial step is scaled back by this factor, default is 1.0 - -snes_linesearch_minlambda - minimum allowable lambda Level: advanced Developer Note: - A better name for this method might be `SNESLINESEARCHSECANT`, L2 is not descriptive + A better name for this method might be `SNESLINESEARCHSECANT`, L2 is not descriptive .seealso: [](ch_snes), `SNESLINESEARCHBT`, `SNESLINESEARCHCP`, `SNESLineSearch`, `SNESLineSearchType`, `SNESLineSearchCreate()`, `SNESLineSearchSetType()` M*/ diff --git a/src/snes/linesearch/interface/ftn-custom/zlinesearchf.c b/src/snes/linesearch/interface/ftn-custom/zlinesearchf.c index 0c85586c432..451a272cbfe 100644 --- a/src/snes/linesearch/interface/ftn-custom/zlinesearchf.c +++ b/src/snes/linesearch/interface/ftn-custom/zlinesearchf.c @@ -2,15 +2,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define sneslinesearchgettype_ SNESLINESEARCHGETTYPE - #define sneslinesearchsettype_ SNESLINESEARCHSETTYPE #define sneslinesearchsetprecheck_ SNESLINESEARCHSETPRECHECK #define sneslinesearchgetprecheck_ SNESLINESEARCHGETPRECHECK #define sneslinesearchsetpostcheck_ SNESLINESEARCHSETPOSTCHECK #define sneslinesearchgetpostcheck_ SNESLINESEARCHGETPOSTCHECK #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define sneslinesearchgettype_ sneslinesearchgettype - #define sneslinesearchsettype_ sneslinesearchsettype #define sneslinesearchsetprecheck_ sneslinesearchsetprecheck #define sneslinesearchgetprecheck_ sneslinesearchgetprecheck #define sneslinesearchsetpostcheck_ sneslinesearchsetpostcheck @@ -34,26 +30,6 @@ static PetscErrorCode oursneslinesearchpostcheck(SNESLineSearch linesearch, Vec PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN void sneslinesearchgettype_(SNESLineSearch *linesearch, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = SNESLineSearchGetType(*linesearch, &tname); - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void sneslinesearchsettype_(SNESLineSearch *linesearch, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = SNESLineSearchSetType(*linesearch, t); - if (*ierr) return; - FREECHAR(type, t); -} - PETSC_EXTERN void sneslinesearchsetprecheck_(SNESLineSearch *linesearch, void (*func)(SNESLineSearch *, Vec *, Vec *, PetscBool *, PetscErrorCode *), void *ctx, PetscErrorCode *ierr) { PetscObjectAllocateFortranPointers(*linesearch, 3); diff --git a/src/snes/linesearch/interface/linesearch.c b/src/snes/linesearch/interface/linesearch.c index d1e7ed7b087..293bdf84aef 100644 --- a/src/snes/linesearch/interface/linesearch.c +++ b/src/snes/linesearch/interface/linesearch.c @@ -179,10 +179,8 @@ PetscErrorCode SNESLineSearchCreate(MPI_Comm comm, SNESLineSearch *outlinesearch PetscFunctionBegin; PetscAssertPointer(outlinesearch, 2); PetscCall(SNESInitializePackage()); - *outlinesearch = NULL; PetscCall(PetscHeaderCreate(linesearch, SNESLINESEARCH_CLASSID, "SNESLineSearch", "Linesearch", "SNESLineSearch", comm, SNESLineSearchDestroy, SNESLineSearchView)); - linesearch->vec_sol_new = NULL; linesearch->vec_func_new = NULL; linesearch->vec_sol = NULL; @@ -920,7 +918,7 @@ PetscErrorCode SNESLineSearchView(SNESLineSearch linesearch, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESLineSearchGetType - Gets the `SNESLinesearchType` of a `SNESLineSearch` Logically Collective @@ -944,7 +942,7 @@ PetscErrorCode SNESLineSearchGetType(SNESLineSearch linesearch, SNESLineSearchTy PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESLineSearchSetType - Sets the `SNESLinesearchType` of a `SNESLineSearch` Logically Collective @@ -1547,7 +1545,7 @@ PetscErrorCode SNESLineSearchSetVecs(SNESLineSearch linesearch, Vec X, Vec F, Ve PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESLineSearchAppendOptionsPrefix - Appends to the prefix used for searching for all `SNESLineSearch` options in the database. @@ -1573,7 +1571,7 @@ PetscErrorCode SNESLineSearchAppendOptionsPrefix(SNESLineSearch linesearch, cons PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ SNESLineSearchGetOptionsPrefix - Gets the prefix used for searching for all SNESLineSearch options in the database. @@ -1652,6 +1650,8 @@ PetscErrorCode SNESLineSearchGetReason(SNESLineSearch linesearch, SNESLineSearch /*@ SNESLineSearchSetReason - Sets the success/failure status of the line search application + Logically Collective; No Fortran Support + Input Parameters: + linesearch - the line search context - result - The success or failure status @@ -1731,6 +1731,8 @@ PetscErrorCode SNESLineSearchGetVIFunctions(SNESLineSearch linesearch, SNESLineS /*@C SNESLineSearchRegister - register a line search type `SNESLineSearchType` + Logically Collective, No Fortran Support + Input Parameters: + sname - name of the `SNESLineSearchType()` - function - the creation function for that type diff --git a/src/snes/mf/snesmfj.c b/src/snes/mf/snesmfj.c index e53b2ad1097..654cfd240c1 100644 --- a/src/snes/mf/snesmfj.c +++ b/src/snes/mf/snesmfj.c @@ -3,7 +3,7 @@ #include <../src/mat/impls/mffd/mffdimpl.h> #include -/*@C +/*@ MatMFFDComputeJacobian - Tells the matrix-free Jacobian object the new location at which Jacobian matrix-vector products will be computed at, i.e. J(x) * a. The x is obtained from the `SNES` object (using `SNESGetSolution()`). diff --git a/src/snes/tests/ex12f.F90 b/src/snes/tests/ex12f.F90 index 011c31ff67b..a8effeea206 100644 --- a/src/snes/tests/ex12f.F90 +++ b/src/snes/tests/ex12f.F90 @@ -90,7 +90,7 @@ program main PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)) ! Set up data structures - PetscCallA(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,N,i1,i1,PETSC_NULL_INTEGER,ctx%da,ierr)) + PetscCallA(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,N,i1,i1,PETSC_NULL_INTEGER_ARRAY,ctx%da,ierr)) PetscCallA(DMSetFromOptions(ctx%da,ierr)) PetscCallA(DMSetUp(ctx%da,ierr)) PetscCallA(DMCreateGlobalVector(ctx%da,x,ierr)) @@ -102,7 +102,7 @@ program main PetscCallA(VecDuplicate(x,U,ierr)) PetscCallA(PetscObjectSetName(U,'Exact Solution',ierr)) - PetscCallA(MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,N,N,i3,PETSC_NULL_INTEGER,i0,PETSC_NULL_INTEGER,J,ierr)) + PetscCallA(MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,N,N,i3,PETSC_NULL_INTEGER_ARRAY,i0,PETSC_NULL_INTEGER_ARRAY,J,ierr)) PetscCallA(MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE,ierr)) PetscCallA(MatGetType(J,matrixname,ierr)) @@ -114,8 +114,8 @@ program main do 10, i=0,nn-1 FF = 6.0*xp + (xp+1.e-12)**6.e0 UU = xp*xp*xp - PetscCallA(VecSetValues(ctx%F,i1,ii,FF,INSERT_VALUES,ierr)) - PetscCallA(VecSetValues(U,i1,ii,UU,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(ctx%F,i1,[ii],[FF],INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(U,i1,[ii],[UU],INSERT_VALUES,ierr)) xp = xp + h ii = ii + 1 10 continue @@ -221,7 +221,6 @@ subroutine FormFunction(snes,x,f,ctx,ierr) PetscCall(VecRestoreArrayF90(f,vff,ierr)) PetscCall(VecRestoreArrayReadF90(ctx%xl,vxx,ierr)) PetscCall(VecRestoreArrayF90(ctx%F,vF2,ierr)) - return end ! -------------------- Form initial approximation ----------------- @@ -237,7 +236,6 @@ subroutine FormInitialGuess(snes,x,ierr) five = .5 PetscCall(VecSet(x,five,ierr)) - return end ! -------------------- Evaluate Jacobian -------------------- @@ -269,7 +267,7 @@ subroutine FormJacobian(snes,x,jac,B,ctx,ierr) if (rank .eq. 0) then A = 1.0 - PetscCall(MatSetValues(jac,i1,start,i1,start,A,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(jac,i1,[start],i1,[start],[A],INSERT_VALUES,ierr)) istart = 1 else istart = 0 @@ -277,7 +275,7 @@ subroutine FormJacobian(snes,x,jac,B,ctx,ierr) if (rank .eq. size-1) then i = INT(ctx%N-1) A = 1.0 - PetscCall(MatSetValues(jac,i1,i,i1,i,A,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(jac,i1,[i],i1,[i],[A],INSERT_VALUES,ierr)) iend = n-1 else iend = n @@ -285,16 +283,15 @@ subroutine FormJacobian(snes,x,jac,B,ctx,ierr) do 10 i=istart,iend-1 ii = i + start j = start + i - 1 - PetscCall(MatSetValues(jac,i1,ii,i1,j,d,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(jac,i1,[ii],i1,[j],[d],INSERT_VALUES,ierr)) j = start + i + 1 - PetscCall(MatSetValues(jac,i1,ii,i1,j,d,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(jac,i1,[ii],i1,[j],[d],INSERT_VALUES,ierr)) A = -2.0*d + 2.0*vxx(i+1) - PetscCall(MatSetValues(jac,i1,ii,i1,ii,A,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(jac,i1,[ii],i1,[ii],[A],INSERT_VALUES,ierr)) 10 continue PetscCall(VecRestoreArrayReadF90(x,vxx,ierr)) PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)) PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)) - return end !/*TEST diff --git a/src/snes/tests/ex1f.F90 b/src/snes/tests/ex1f.F90 index a37ab769ecc..898e68eec4a 100644 --- a/src/snes/tests/ex1f.F90 +++ b/src/snes/tests/ex1f.F90 @@ -182,7 +182,7 @@ subroutine SNESSetJacobian2(a,b,c,d,e,z) PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-snes_mf',matrix_free,ierr)) if (.not. matrix_free) then - PetscCallA(MatCreateSeqAIJ(PETSC_COMM_WORLD,N,N,i5,PETSC_NULL_INTEGER,J,ierr)) + PetscCallA(MatCreateSeqAIJ(PETSC_COMM_WORLD,N,N,i5,PETSC_NULL_INTEGER_ARRAY,J,ierr)) endif ! @@ -336,7 +336,6 @@ subroutine FormInitialGuess(X,ierr) PetscCallA(VecRestoreArrayF90(X,lx_v,ierr)) - return end ! ApplicationInitialGuess - Computes initial approximation, called by @@ -389,7 +388,6 @@ subroutine ApplicationInitialGuess(x,ierr) 10 continue 20 continue - return end ! --------------------------------------------------------------------- @@ -462,7 +460,6 @@ subroutine FormFunction(snes,X,F,fdcoloring,ierr) 1000 format(50i4) PetscCallA(MatFDColoringRestorePerturbedColumnsF90(fdcoloring,indices,ierr)) endif - return end ! --------------------------------------------------------------------- @@ -524,7 +521,6 @@ subroutine ApplicationFunction(x,f,ierr) 10 continue 20 continue - return end ! --------------------------------------------------------------------- @@ -587,7 +583,6 @@ subroutine FormJacobian(snes,X,jac,jac_prec,dummy,ierr) PetscCallA(MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr)) - return end ! --------------------------------------------------------------------- @@ -649,7 +644,7 @@ subroutine ApplicationJacobian(x,jac,jac_prec,ierr) row(1) = row(1) + 1 ! boundary points if (i .eq. 1 .or. j .eq. 1 .or. i .eq. mx .or. j .eq. my) then - PetscCallA(MatSetValues(jac_prec,i1,row,i1,row,one,INSERT_VALUES,ierr)) + PetscCallA(MatSetValues(jac_prec,i1,row,i1,row,[one],INSERT_VALUES,ierr)) ! interior grid points else v(1) = -hxdhy @@ -667,7 +662,6 @@ subroutine ApplicationJacobian(x,jac,jac_prec,ierr) 10 continue 20 continue - return end ! diff --git a/src/snes/tests/ex21f.F90 b/src/snes/tests/ex21f.F90 index 6ceefc77c0e..c5eba2da50f 100644 --- a/src/snes/tests/ex21f.F90 +++ b/src/snes/tests/ex21f.F90 @@ -27,11 +27,11 @@ program main one = 1 zero = 0 two = 2 - PetscCallA(MatCreateSeqAIJ(PETSC_COMM_SELF,two,two,two,PETSC_NULL_INTEGER,user%A,ierr)) - val = 2.0; PetscCallA(MatSetValues(user%A,one,zero,one,zero,val,INSERT_VALUES,ierr)) - val = -1.0; PetscCallA(MatSetValues(user%A,one,zero,one,one,val,INSERT_VALUES,ierr)) - val = -1.0; PetscCallA(MatSetValues(user%A,one,one,one,zero,val,INSERT_VALUES,ierr)) - val = 1.0; PetscCallA(MatSetValues(user%A,one,one,one,one,val,INSERT_VALUES,ierr)) + PetscCallA(MatCreateSeqAIJ(PETSC_COMM_SELF,two,two,two,PETSC_NULL_INTEGER_ARRAY,user%A,ierr)) + val = 2.0; PetscCallA(MatSetValues(user%A,one,[zero],one,[zero],[val],INSERT_VALUES,ierr)) + val = -1.0; PetscCallA(MatSetValues(user%A,one,[zero],one,[one],[val],INSERT_VALUES,ierr)) + val = -1.0; PetscCallA(MatSetValues(user%A,one,[one],one,[zero],[val],INSERT_VALUES,ierr)) + val = 1.0; PetscCallA(MatSetValues(user%A,one,[one],one,[one],[val],INSERT_VALUES,ierr)) PetscCallA(MatAssemblyBegin(user%A,MAT_FINAL_ASSEMBLY,ierr)) PetscCallA(MatAssemblyEnd(user%A,MAT_FINAL_ASSEMBLY,ierr)) diff --git a/src/snes/tutorials/ex11.c b/src/snes/tutorials/ex11.c index 8f892808b17..06f93b8188b 100644 --- a/src/snes/tutorials/ex11.c +++ b/src/snes/tutorials/ex11.c @@ -442,13 +442,13 @@ int main(int argc, char **argv) nsize: 2 suffix: hpddm requires: hpddm slepc !single defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES) - args: -pc_type hpddm -pc_hpddm_coarse_correction balanced -pc_hpddm_coarse_mat_type aij -pc_hpddm_coarse_pc_type lu -pc_hpddm_levels_eps_nev 1 -pc_hpddm_levels_sub_pc_type lu -ksp_monitor -initial_dm_plex_simplex 0 -petscpartitioner_type simple + args: -pc_type hpddm -pc_hpddm_coarse_correction balanced -pc_hpddm_coarse_mat_type aij -pc_hpddm_levels_eps_nev 1 -pc_hpddm_levels_sub_pc_type lu -ksp_monitor -initial_dm_plex_simplex 0 -petscpartitioner_type simple test: nsize: 2 suffix: hpddm_p4est requires: p4est hpddm slepc !single defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES) - args: -pc_type hpddm -pc_hpddm_coarse_correction balanced -pc_hpddm_coarse_mat_type aij -pc_hpddm_coarse_pc_type lu -pc_hpddm_levels_eps_nev 1 -pc_hpddm_levels_sub_pc_type lu -ksp_monitor -initial_dm_plex_simplex 0 -p4est -petscpartitioner_type simple + args: -pc_type hpddm -pc_hpddm_coarse_correction balanced -pc_hpddm_coarse_mat_type aij -pc_hpddm_levels_eps_nev 1 -pc_hpddm_levels_sub_pc_type lu -ksp_monitor -initial_dm_plex_simplex 0 -p4est -petscpartitioner_type simple test: nsize: 4 diff --git a/src/snes/tutorials/ex12.c b/src/snes/tutorials/ex12.c index 66cf088e486..946c4aeb361 100644 --- a/src/snes/tutorials/ex12.c +++ b/src/snes/tutorials/ex12.c @@ -926,13 +926,14 @@ int main(int argc, char **argv) } if (user.bdIntegral) { - DMLabel label; - PetscInt id = 1; - PetscScalar bdInt = 0.0; - PetscReal exact = 3.3333333333; + DMLabel label; + PetscBdPointFunc func[1] = {bd_integral_2d}; + PetscInt id = 1; + PetscScalar bdInt = 0.0; + PetscReal exact = 3.3333333333; PetscCall(DMGetLabel(dm, "marker", &label)); - PetscCall(DMPlexComputeBdIntegral(dm, u, label, 1, &id, bd_integral_2d, &bdInt, NULL)); + PetscCall(DMPlexComputeBdIntegral(dm, u, label, 1, &id, func, &bdInt, NULL)); PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Solution boundary integral: %.4g\n", (double)PetscAbsScalar(bdInt))); PetscCheck(PetscAbsReal(PetscAbsScalar(bdInt) - exact) <= PETSC_SQRT_MACHINE_EPSILON, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Invalid boundary integral %g != %g", (double)PetscAbsScalar(bdInt), (double)exact); } @@ -1664,22 +1665,6 @@ int main(int argc, char **argv) args: -quiet -run_type test -petscspace_degree 1 -dm_plex_simplex 0 -petscspace_poly_tensor -dm_plex_convert_type p4est -dm_forest_minimum_refinement 2 -dm_forest_initial_refinement 2 -dm_forest_maximum_refinement 4 -dm_p4est_refine_pattern hash nsize: 4 - test: - suffix: p4est_convergence_test_2 - requires: p4est - args: -quiet -run_type test -petscspace_degree 1 -dm_plex_simplex 0 -petscspace_poly_tensor -dm_plex_convert_type p4est -dm_forest_minimum_refinement 3 -dm_forest_initial_refinement 3 -dm_forest_maximum_refinement 5 -dm_p4est_refine_pattern hash - - test: - suffix: p4est_convergence_test_3 - requires: p4est - args: -quiet -run_type test -petscspace_degree 1 -dm_plex_simplex 0 -petscspace_poly_tensor -dm_plex_convert_type p4est -dm_forest_minimum_refinement 4 -dm_forest_initial_refinement 4 -dm_forest_maximum_refinement 6 -dm_p4est_refine_pattern hash - - test: - suffix: p4est_convergence_test_4 - requires: p4est - args: -quiet -run_type test -petscspace_degree 1 -dm_plex_simplex 0 -petscspace_poly_tensor -dm_plex_convert_type p4est -dm_forest_minimum_refinement 5 -dm_forest_initial_refinement 5 -dm_forest_maximum_refinement 7 -dm_p4est_refine_pattern hash - timeoutfactor: 5 - # Serial tests with GLVis visualization test: suffix: glvis_2d_tet_p1 diff --git a/src/snes/tutorials/ex13.c b/src/snes/tutorials/ex13.c index 8fac4f01e23..a563782b9ba 100644 --- a/src/snes/tutorials/ex13.c +++ b/src/snes/tutorials/ex13.c @@ -642,16 +642,29 @@ int main(int argc, char **argv) -mg_levels_esteig_ksp_max_it 10 \ -mg_levels_ksp_chebyshev_esteig 0,0.1,0,1.1 \ -mg_levels_pc_type jacobi - test: - suffix: 2d_p1_gmg_fcycle - requires: triangle - args: -potential_petscspace_degree 1 -dm_plex_box_faces 2,2 -dm_refine_hierarchy 3 \ + # Run with -dm_refine_hierarchy 3 to get a better idea of the solver + testset: + args: -potential_petscspace_degree 1 -dm_refine_hierarchy 2 \ -ksp_rtol 5e-10 -pc_type mg -pc_mg_type full \ -mg_levels_ksp_max_it 2 \ -mg_levels_esteig_ksp_type cg \ -mg_levels_esteig_ksp_max_it 10 \ -mg_levels_ksp_chebyshev_esteig 0,0.1,0,1.1 \ -mg_levels_pc_type jacobi + test: + suffix: 2d_p1_gmg_fcycle + requires: triangle + args: -dm_plex_box_faces 2,2 + test: + suffix: 2d_q1_gmg_fcycle + args: -dm_plex_simplex 0 -dm_plex_box_faces 2,2 + test: + suffix: 3d_p1_gmg_fcycle + requires: ctetgen + args: -dm_plex_dim 3 -dm_plex_box_faces 2,2,1 + test: + suffix: 3d_q1_gmg_fcycle + args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 test: suffix: 2d_p1_gmg_vcycle_adapt requires: triangle diff --git a/src/snes/tutorials/ex1f.F90 b/src/snes/tutorials/ex1f.F90 index fb7802bb917..58c4fd742ed 100644 --- a/src/snes/tutorials/ex1f.F90 +++ b/src/snes/tutorials/ex1f.F90 @@ -199,7 +199,6 @@ subroutine FormFunction(snes,x,f,dummy,ierr) PetscCall(VecRestoreArrayReadF90(x,lx_v,ierr)) PetscCall(VecRestoreArrayF90(f,lf_v,ierr)) - return end ! --------------------------------------------------------------------- @@ -263,7 +262,6 @@ subroutine FormJacobian(snes,X,jac,B,dummy,ierr) PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)) endif - return end subroutine MyLineSearch(linesearch, lctx, ierr) @@ -289,7 +287,6 @@ subroutine MyLineSearch(linesearch, lctx, ierr) PetscCall(VecNorm(x,NORM_2,xnorm,ierr)) PetscCall(VecNorm(y,NORM_2,ynorm,ierr)) PetscCall(SNESLineSearchSetNorms(linesearch, xnorm, gnorm, ynorm,ierr)) - return end !/*TEST diff --git a/src/snes/tutorials/ex24.c b/src/snes/tutorials/ex24.c index 69b70127f12..aef326c6b23 100644 --- a/src/snes/tutorials/ex24.c +++ b/src/snes/tutorials/ex24.c @@ -339,6 +339,59 @@ int main(int argc, char **argv) /*TEST + test: + suffix:2d_rt0_tri + requires: triangle + args: -sol_type linear -dmsnes_check 0.001 \ + -potential_petscspace_degree 0 \ + -potential_petscdualspace_lagrange_continuity 0 \ + -field_petscspace_type ptrimmed \ + -field_petscspace_components 2 \ + -field_petscspace_ptrimmed_form_degree -1 \ + -field_petscdualspace_order 1 \ + -field_petscdualspace_form_degree -1 \ + -field_petscdualspace_lagrange_trimmed true \ + -field_petscfe_default_quadrature_order 2 \ + -snes_error_if_not_converged \ + -ksp_rtol 1e-10 -ksp_error_if_not_converged \ + -pc_type fieldsplit -pc_fieldsplit_type schur \ + -pc_fieldsplit_schur_factorization_type full -pc_fieldsplit_schur_precondition full \ + -fieldsplit_field_pc_type lu \ + -fieldsplit_potential_ksp_rtol 1e-10 -fieldsplit_potential_pc_type lu + + test: + suffix:2d_rt0_quad + requires: triangle + args: -dm_plex_simplex 0 -sol_type linear -dmsnes_check 0.001 \ + -potential_petscspace_degree 0 \ + -potential_petscdualspace_lagrange_continuity 0 \ + -field_petscspace_degree 1 \ + -field_petscspace_type sum \ + -field_petscspace_variables 2 \ + -field_petscspace_components 2 \ + -field_petscspace_sum_spaces 2 \ + -field_petscspace_sum_concatenate true \ + -field_sumcomp_0_petscspace_variables 2 \ + -field_sumcomp_0_petscspace_type tensor \ + -field_sumcomp_0_petscspace_tensor_spaces 2 \ + -field_sumcomp_0_petscspace_tensor_uniform false \ + -field_sumcomp_0_tensorcomp_0_petscspace_degree 1 \ + -field_sumcomp_0_tensorcomp_1_petscspace_degree 0 \ + -field_sumcomp_1_petscspace_variables 2 \ + -field_sumcomp_1_petscspace_type tensor \ + -field_sumcomp_1_petscspace_tensor_spaces 2 \ + -field_sumcomp_1_petscspace_tensor_uniform false \ + -field_sumcomp_1_tensorcomp_0_petscspace_degree 0 \ + -field_sumcomp_1_tensorcomp_1_petscspace_degree 1 \ + -field_petscdualspace_form_degree -1 \ + -field_petscdualspace_order 1 \ + -field_petscdualspace_lagrange_trimmed true \ + -field_petscfe_default_quadrature_order 2 \ + -pc_type fieldsplit -pc_fieldsplit_type schur \ + -pc_fieldsplit_schur_factorization_type full -pc_fieldsplit_schur_precondition full \ + -fieldsplit_field_pc_type lu \ + -fieldsplit_potential_ksp_rtol 1e-10 -fieldsplit_potential_pc_type lu + test: suffix: 2d_bdm1_p0 requires: triangle @@ -352,11 +405,11 @@ int main(int argc, char **argv) test: nsize: 4 suffix: 2d_bdm1_p0_bddc - requires: triangle + requires: triangle !single args: -sol_type linear \ -field_petscspace_degree 1 -field_petscdualspace_type bdm -dm_refine 1 \ -dmsnes_check .001 -snes_error_if_not_converged \ - -ksp_error_if_not_converged -ksp_type cg \ + -ksp_error_if_not_converged -ksp_type cg -ksp_norm_type natural -ksp_divtol 1e10 \ -petscpartitioner_type simple -dm_mat_type is \ -pc_type bddc -pc_bddc_use_local_mat_graph 0 \ -pc_bddc_benign_trick -pc_bddc_nonetflux -pc_bddc_detect_disconnected -pc_bddc_use_qr_single \ @@ -364,6 +417,7 @@ int main(int argc, char **argv) test: nsize: 9 + requires: !single suffix: 2d_rt1_p0_bddc args: -sol_type quadratic \ -potential_petscspace_degree 0 \ diff --git a/src/snes/tutorials/ex35.c b/src/snes/tutorials/ex35.c index 981f52056ea..39c441b8e0a 100644 --- a/src/snes/tutorials/ex35.c +++ b/src/snes/tutorials/ex35.c @@ -140,14 +140,14 @@ PetscErrorCode MyComputeFunction(SNES snes, Vec x, Vec F, void *ctx) PetscFunctionBeginUser; PetscCall(SNESGetDM(snes, &dm)); - PetscCall(DMGetApplicationContext(dm, &J)); + PetscCall(PetscObjectQuery((PetscObject)dm, "_ex35_J", (PetscObject *)&J)); if (!J) { PetscCall(DMSetMatType(dm, MATAIJ)); PetscCall(DMCreateMatrix(dm, &J)); PetscCall(MatSetDM(J, NULL)); PetscCall(FormMatrix(dm, J)); - PetscCall(DMSetApplicationContext(dm, J)); - PetscCall(DMSetApplicationContextDestroy(dm, (PetscErrorCode(*)(void **))MatDestroy)); + PetscCall(PetscObjectCompose((PetscObject)dm, "_ex35_J", (PetscObject)J)); + PetscCall(PetscObjectDereference((PetscObject)J)); } PetscCall(MatMult(J, x, F)); PetscFunctionReturn(PETSC_SUCCESS); diff --git a/src/snes/tutorials/ex40f90.F90 b/src/snes/tutorials/ex40f90.F90 index 7fd1690d558..aab39cd49d2 100644 --- a/src/snes/tutorials/ex40f90.F90 +++ b/src/snes/tutorials/ex40f90.F90 @@ -30,7 +30,7 @@ program ex40f90 two = 2 sone = 1.0 - PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,ten,ten,PETSC_DECIDE,PETSC_DECIDE,two,one,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)) + PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,ten,ten,PETSC_DECIDE,PETSC_DECIDE,two,one,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,da,ierr)) PetscCallA(DMSetFromOptions(da,ierr)) PetscCallA(DMSetUp(da,ierr)) @@ -70,7 +70,6 @@ subroutine FormFunctionLocal(in,x,f,dummy,ierr) enddo enddo - return end !/*TEST diff --git a/src/snes/tutorials/ex5.c b/src/snes/tutorials/ex5.c index 24a46efbf8b..fb84075e58e 100644 --- a/src/snes/tutorials/ex5.c +++ b/src/snes/tutorials/ex5.c @@ -945,4 +945,19 @@ int main(int argc, char **argv) suffix: complex args: -snes_mf_operator -mat_mffd_complex -snes_monitor + test: + requires: !single + suffix: 7_ksp_view_pre + args: -pc_type gamg -ksp_view_pre + + test: + requires: !single + suffix: hem_view_detailed + args: -pc_type gamg -ksp_view ::ascii_info_detail -pc_gamg_mat_coarsen_type hem + + test: + requires: !single + suffix: mis_view_detailed + args: -pc_type gamg -ksp_view ::ascii_info_detail -pc_gamg_mat_coarsen_type mis + TEST*/ diff --git a/src/snes/tutorials/ex56.c b/src/snes/tutorials/ex56.c index a0c390d129e..91d2f59f648 100644 --- a/src/snes/tutorials/ex56.c +++ b/src/snes/tutorials/ex56.c @@ -464,7 +464,7 @@ int main(int argc, char **args) timeoutfactor: 2 test: suffix: 0 - args: -run_type 1 -max_conv_its 3 -mat_coarsen_type hem -mat_coarsen_max_it 5 -pc_gamg_asm_hem_aggs 4 -ksp_rtol 1.e-6 + args: -run_type 1 -max_conv_its 3 -pc_gamg_mat_coarsen_type hem -pc_gamg_mat_coarsen_max_it 5 -pc_gamg_asm_hem_aggs 4 -ksp_rtol 1.e-6 filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 7/Linear solve converged due to CONVERGED_RTOL iterations 8/g" test: suffix: 1 diff --git a/src/snes/tutorials/ex5f.F90 b/src/snes/tutorials/ex5f.F90 index 3a7235e599b..f4025088bdb 100644 --- a/src/snes/tutorials/ex5f.F90 +++ b/src/snes/tutorials/ex5f.F90 @@ -124,7 +124,7 @@ program main ! This really needs only the star-type stencil, but we use the box stencil call DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,i4,i4,PETSC_DECIDE,PETSC_DECIDE, & - i1,i1, PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr) + i1,i1, PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,da,ierr) CHKERRA(ierr) call DMSetFromOptions(da,ierr) CHKERRA(ierr) @@ -142,8 +142,8 @@ program main ! Get local grid boundaries (for 2-dimensional DMDA) call DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, & - PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, & - PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr) + PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_ENUM,PETSC_NULL_ENUM, & + PETSC_NULL_ENUM,PETSC_NULL_ENUM,ierr) CHKERRA(ierr) call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,PETSC_NULL_INTEGER,ierr) CHKERRA(ierr) @@ -268,7 +268,6 @@ subroutine FormInitialGuess(X,ierr) call VecRestoreArrayF90(X,lx_v,ierr) CHKERRQ(ierr) - return end ! --------------------------------------------------------------------- @@ -317,7 +316,6 @@ subroutine InitialGuessLocal(x,ierr) 10 continue 20 continue - return end ! --------------------------------------------------------------------- @@ -387,7 +385,6 @@ subroutine FormFunctionLocal(info,x,f,da,ierr) call PetscLogFlops(11.0d0*ym*xm,ierr) CHKERRQ(ierr) - return end ! --------------------------------------------------------------------- @@ -476,7 +473,7 @@ subroutine FormJacobianLocal(info,x,A,jac,da,ierr) ! Some f90 compilers need 4th arg to be of same type in both calls col(1) = row v(1) = one - call MatSetValuesLocal(jac,i1,row,i1,col,v,INSERT_VALUES,ierr) + call MatSetValuesLocal(jac,i1,[row],i1,[col],[v],INSERT_VALUES,ierr) CHKERRQ(ierr) ! interior grid points else @@ -490,7 +487,7 @@ subroutine FormJacobianLocal(info,x,A,jac,da,ierr) col(3) = row col(4) = row + 1 col(5) = row + gxm - call MatSetValuesLocal(jac,i1,row,i5,col,v, INSERT_VALUES,ierr) + call MatSetValuesLocal(jac,i1,[row],i5,[col],[v], INSERT_VALUES,ierr) CHKERRQ(ierr) endif 10 continue @@ -505,7 +502,6 @@ subroutine FormJacobianLocal(info,x,A,jac,da,ierr) call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr) CHKERRQ(ierr) endif - return end ! diff --git a/src/snes/tutorials/ex5f90.F90 b/src/snes/tutorials/ex5f90.F90 index 6371957623e..aa13b07fb24 100644 --- a/src/snes/tutorials/ex5f90.F90 +++ b/src/snes/tutorials/ex5f90.F90 @@ -112,7 +112,6 @@ subroutine FormFunction(snes,X,F,user,ierr) ! PetscCallA(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)) ! PetscCallA(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)) - return end subroutine formfunction end module ex5f90module @@ -199,11 +198,11 @@ program main ! This really needs only the star-type stencil, but we use the box ! stencil temporarily. - PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)) + PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,da,ierr)) PetscCallA(DMSetFromOptions(da,ierr)) PetscCallA(DMSetUp(da,ierr)) - PetscCallA(DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) + PetscCallA(DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,ierr)) ! ! Visualize the distribution of the array across the processors @@ -356,7 +355,6 @@ subroutine FormInitialGuess(snes,X,ierr) ! Insert values into global vector - return end ! --------------------------------------------------------------------- @@ -407,7 +405,6 @@ subroutine InitialGuessLocal(user,x,ierr) 10 continue 20 continue - return end ! --------------------------------------------------------------------- @@ -464,7 +461,6 @@ subroutine FormFunctionLocal(x,f,user,ierr) 10 continue 20 continue - return end ! --------------------------------------------------------------------- @@ -563,7 +559,6 @@ subroutine FormJacobian(snes,X,jac,jac_prec,user,ierr) PetscCallA(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr)) - return end ! --------------------------------------------------------------------- @@ -648,7 +643,7 @@ subroutine FormJacobianLocal(x,jac_prec,user,ierr) if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then col(1) = row v(1) = one - PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ione,col,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValuesLocal(jac_prec,ione,[row],ione,col,v,INSERT_VALUES,ierr)) ! interior grid points else v(1) = -hxdhy @@ -661,12 +656,11 @@ subroutine FormJacobianLocal(x,jac_prec,user,ierr) col(3) = row col(4) = row + 1 col(5) = row + user%gxm - PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValuesLocal(jac_prec,ione,[row],ifive,col,v,INSERT_VALUES,ierr)) endif 10 continue 20 continue - return end ! diff --git a/src/snes/tutorials/ex5f90t.F90 b/src/snes/tutorials/ex5f90t.F90 index 38d2f1479ee..6b3e1845cfb 100644 --- a/src/snes/tutorials/ex5f90t.F90 +++ b/src/snes/tutorials/ex5f90t.F90 @@ -108,7 +108,6 @@ subroutine FormFunction(snesIn,X,F,user,ierr) ! PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)) ! PetscCall(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)) - return end subroutine formfunction end module ex5f90tmodule @@ -204,10 +203,10 @@ program main ! This really needs only the star-type stencil, but we use the box ! stencil temporarily. - PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr)) + PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,user%da,ierr)) PetscCallA(DMSetFromOptions(user%da,ierr)) PetscCallA(DMSetUp(user%da,ierr)) - PetscCallA(DMDAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) + PetscCallA(DMDAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,ierr)) ! ! Visualize the distribution of the array across the processors @@ -361,7 +360,6 @@ subroutine FormInitialGuess(mysnes,X,ierr) ! Insert values into global vector - return end ! --------------------------------------------------------------------- @@ -412,7 +410,6 @@ subroutine InitialGuessLocal(user,x,ierr) 10 continue 20 continue - return end ! --------------------------------------------------------------------- @@ -468,7 +465,6 @@ subroutine FormFunctionLocal(x,f,user,ierr) 10 continue 20 continue ierr = 0 - return end ! --------------------------------------------------------------------- @@ -567,7 +563,6 @@ subroutine FormJacobian(mysnes,X,jac,jac_prec,user,ierr) PetscCallA(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr)) - return end ! --------------------------------------------------------------------- @@ -651,7 +646,7 @@ subroutine FormJacobianLocal(x,jac_prec,user,ierr) if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then col(1) = row v(1) = one - PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ione,col,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValuesLocal(jac_prec,ione,[row],ione,col,v,INSERT_VALUES,ierr)) ! interior grid points else v(1) = -hxdhy @@ -664,11 +659,10 @@ subroutine FormJacobianLocal(x,jac_prec,user,ierr) col(3) = row col(4) = row + 1 col(5) = row + user%gxm - PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,INSERT_VALUES,ierr)) + PetscCallA(MatSetValuesLocal(jac_prec,ione,[row],ifive,col,v,INSERT_VALUES,ierr)) endif 10 continue 20 continue - return end !/*TEST diff --git a/src/snes/tutorials/ex62.c b/src/snes/tutorials/ex62.c index 0e4e87ca927..d4fb39e5d7c 100644 --- a/src/snes/tutorials/ex62.c +++ b/src/snes/tutorials/ex62.c @@ -623,6 +623,7 @@ int main(int argc, char **argv) # Full Schur + Velocity GMG test: suffix: 2d_p2_p1_gmg_vcycle + TODO: broken (requires subDMs hooks) requires: triangle args: -sol quadratic -dm_refine_hierarchy 2 -vel_petscspace_degree 2 -pres_petscspace_degree 1 \ -ksp_type fgmres -ksp_atol 1e-9 -snes_error_if_not_converged -pc_use_amat \ diff --git a/src/snes/tutorials/ex69.c b/src/snes/tutorials/ex69.c index 9e30e44b6ba..c829d279ca3 100644 --- a/src/snes/tutorials/ex69.c +++ b/src/snes/tutorials/ex69.c @@ -3092,7 +3092,7 @@ static PetscErrorCode SetupProblem(DM dm, AppCtx *user) PetscCall(PetscDSSetJacobian(prob, 0, 0, NULL, NULL, NULL, stokes_momentum_vel_J_cx)); PetscCall(PetscDSSetJacobian(prob, 0, 1, NULL, NULL, stokes_momentum_pres_J, NULL)); PetscCall(PetscDSSetJacobian(prob, 1, 0, NULL, stokes_mass_J, NULL, NULL)); - PetscCall(PetscDSSetJacobianPreconditioner(prob, 0, 0, NULL, NULL, NULL, stokes_momentum_vel_J_kx)); + PetscCall(PetscDSSetJacobianPreconditioner(prob, 0, 0, NULL, NULL, NULL, stokes_momentum_vel_J_cx)); PetscCall(PetscDSSetJacobianPreconditioner(prob, 1, 1, stokes_identity_J_cx, NULL, NULL, NULL)); break; default: @@ -3380,6 +3380,7 @@ int main(int argc, char **argv) -fieldsplit_pressure_ksp_rtol 1.e-9 -fieldsplit_pressure_pc_type lu test: suffix: p2p1_gmg + TODO: broken (requires subDMs hooks) requires: triangle args: -dm_plex_separate_marker -dm_refine_hierarchy 2 -vel_petscspace_degree 2 -pres_petscspace_degree 1 \ -snes_error_if_not_converged -dmsnes_check .001 \ diff --git a/src/snes/tutorials/ex71.c b/src/snes/tutorials/ex71.c index e933034640c..6892203b404 100644 --- a/src/snes/tutorials/ex71.c +++ b/src/snes/tutorials/ex71.c @@ -417,6 +417,7 @@ int main(int argc, char **argv) -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_pressure_pc_type jacobi test: suffix: 2d_tri_p2_p1_conv_gmg_vcycle + TODO: broken (requires subDMs hooks) requires: triangle !single args: -dm_plex_separate_marker -dm_plex_box_faces 2,2 -dm_refine_hierarchy 1 \ -vel_petscspace_degree 2 -pres_petscspace_degree 1 \ diff --git a/src/snes/tutorials/ex73f90t.F90 b/src/snes/tutorials/ex73f90t.F90 index bde268144fd..607b58e648e 100644 --- a/src/snes/tutorials/ex73f90t.F90 +++ b/src/snes/tutorials/ex73f90t.F90 @@ -82,6 +82,24 @@ Subroutine SNESGetApplicationContext(snesIn,ctx,ierr) End Interface SNESGetApplicationContext end module ex73f90tmodule_interfaces + subroutine MyObjective(snes, x, result, ctx, ierr ) +#include + use petsc + implicit none + PetscInt ctx + Vec x, f + SNES snes + PetscErrorCode ierr + PetscScalar result + PetscReal fnorm + + PetscCall(VecDuplicate(x,f,ierr)) + PetscCall(SNESComputeFunction(snes,x,f,ierr)) + PetscCall(VecNorm(f,NORM_2,fnorm,ierr)) + result = .5*fnorm*fnorm + PetscCall(VecDestroy(f,ierr)) + end subroutine MyObjective + program main #include #include @@ -116,10 +134,11 @@ program main PetscReal lambda_max,lambda_min type(ex73f90tmodule_type) solver PetscScalar bval(1),cval(1),one + PetscBool useobjective ! Note: Any user-defined Fortran routines (such as FormJacobian) ! MUST be declared as external. - external FormInitialGuess,FormJacobian,FormFunction + external FormInitialGuess,FormJacobian,FormFunction,MyObjective ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Initialize program @@ -134,18 +153,20 @@ program main ione = 1 nfour = 4 itwo = 2 + useobjective = PETSC_FALSE PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-par', solver%lambda,flg,ierr)) PetscCheckA(solver%lambda .le. lambda_max .and. solver%lambda .ge. lambda_min,PETSC_COMM_SELF,PETSC_ERR_USER,'Lambda provided with -par is out of range') + PetscCallA(PetscOptionsGetBool(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-objective', useobjective,flg,ierr)) ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Create vector data structures; set function evaluation routine ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! just get size - PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,daphi,ierr)) + PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,daphi,ierr)) PetscCallA(DMSetFromOptions(daphi,ierr)) PetscCallA(DMSetUp(daphi,ierr)) - PetscCallA(DMDAGetInfo(daphi,PETSC_NULL_INTEGER,solver%mx,solver%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) + PetscCallA(DMDAGetInfo(daphi,PETSC_NULL_INTEGER,solver%mx,solver%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,ierr)) N1 = solver%my*solver%mx N2 = solver%my flg = .false. @@ -304,8 +325,10 @@ program main PetscCallA(SNESSetDM(mysnes,solver%da,ierr)) ! Set function evaluation routine and vector - PetscCallA(SNESSetFunction(mysnes,r,FormFunction,solver,ierr)) - + PetscCallA(SNESSetFunction(mysnes, r, FormFunction, solver,ierr)) + if (useobjective .eqv. PETSC_TRUE) then + PetscCallA(SNESSetObjective(mysnes, MyObjective, solver, ierr)) + endif PetscCallA(SNESSetJacobian(mysnes,KKTmat,KKTmat,FormJacobian,solver,ierr)) ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -395,7 +418,7 @@ subroutine FormInitialGuess(mysnes,Xnest,ierr) itwo = 2 ierr = 0 PetscCall(SNESGetApplicationContext(mysnes,solver,ierr)) - PetscCall(DMCompositeGetAccessArray(solver%da,Xnest,itwo,PETSC_NULL_INTEGER,Xsub,ierr)) + PetscCall(DMCompositeGetAccessArray(solver%da,Xnest,itwo,PETSC_NULL_INTEGER_ARRAY,Xsub,ierr)) PetscCall(InitialGuessLocal(solver,Xsub(1),ierr)) PetscCall(VecAssemblyBegin(Xsub(1),ierr)) @@ -403,9 +426,8 @@ subroutine FormInitialGuess(mysnes,Xnest,ierr) ! zero out lambda PetscCall(VecZeroEntries(Xsub(2),ierr)) - PetscCall(DMCompositeRestoreAccessArray(solver%da,Xnest,itwo,PETSC_NULL_INTEGER,Xsub,ierr)) + PetscCall(DMCompositeRestoreAccessArray(solver%da,Xnest,itwo,PETSC_NULL_INTEGER_ARRAY,Xsub,ierr)) - return end subroutine FormInitialGuess ! --------------------------------------------------------------------- @@ -457,10 +479,9 @@ subroutine InitialGuessLocal(solver,X1,ierr) else v = temp1 * sqrt(min(min(i,solver%mx-i+1)*hx,temp)) endif - PetscCall(VecSetValues(X1,ione,row,v,INSERT_VALUES,ierr)) + PetscCall(VecSetValues(X1,ione,[row],[v],INSERT_VALUES,ierr)) 20 continue - return end subroutine InitialGuessLocal ! --------------------------------------------------------------------- @@ -496,14 +517,14 @@ subroutine FormJacobian(dummy,X,jac,jac_prec,solver,ierr) ione = 1 - PetscCall(DMCompositeGetAccessArray(solver%da,X,ione,PETSC_NULL_INTEGER,Xsub,ierr)) + PetscCall(DMCompositeGetAccessArray(solver%da,X,ione,PETSC_NULL_INTEGER_ARRAY,Xsub,ierr)) ! Compute entries for the locally owned part of the Jacobian preconditioner. PetscCall(MatCreateSubMatrix(jac_prec,solver%isPhi,solver%isPhi,MAT_INITIAL_MATRIX,Amat,ierr)) PetscCall(FormJacobianLocal(Xsub(1),Amat,solver,.true.,ierr)) PetscCall(MatDestroy(Amat,ierr)) ! discard our reference - PetscCall(DMCompositeRestoreAccessArray(solver%da,X,ione,PETSC_NULL_INTEGER,Xsub,ierr)) + PetscCall(DMCompositeRestoreAccessArray(solver%da,X,ione,PETSC_NULL_INTEGER_ARRAY,Xsub,ierr)) ! the rest of the matrix is not touched PetscCall(MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr)) @@ -517,7 +538,6 @@ subroutine FormJacobian(dummy,X,jac,jac_prec,solver,ierr) ! matrix. If we do it will generate an error. PetscCall(MatSetOption(jac_prec,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr)) - return end subroutine FormJacobian ! --------------------------------------------------------------------- @@ -603,7 +623,6 @@ subroutine FormJacobianLocal(X1,jac,solver,add_nl_term,ierr) PetscCall(VecRestoreArrayReadF90(X1,lx_v,ierr)) - return end subroutine FormJacobianLocal ! --------------------------------------------------------------------- @@ -640,8 +659,8 @@ subroutine FormFunction(snesIn,X,F,solver,ierr) ! be done while messages are in transition. itwo = 2 - PetscCall(DMCompositeGetAccessArray(solver%da,X,itwo,PETSC_NULL_INTEGER,Xsub,ierr)) - PetscCall(DMCompositeGetAccessArray(solver%da,F,itwo,PETSC_NULL_INTEGER,Fsub,ierr)) + PetscCall(DMCompositeGetAccessArray(solver%da,X,itwo,PETSC_NULL_INTEGER_ARRAY,Xsub,ierr)) + PetscCall(DMCompositeGetAccessArray(solver%da,F,itwo,PETSC_NULL_INTEGER_ARRAY,Fsub,ierr)) PetscCall(FormFunctionNLTerm( Xsub(1), Fsub(1), solver, ierr)) PetscCall(MatMultAdd( solver%AmatLin, Xsub(1), Fsub(1), Fsub(1), ierr)) @@ -651,9 +670,8 @@ subroutine FormFunction(snesIn,X,F,solver,ierr) PetscCall(MatMultAdd( solver%Bmat, Xsub(2), Fsub(1), Fsub(1), ierr)) PetscCall(MatMultAdd( solver%Dmat, Xsub(2), Fsub(2), Fsub(2), ierr)) - PetscCall(DMCompositeRestoreAccessArray(solver%da,X,itwo,PETSC_NULL_INTEGER,Xsub,ierr)) - PetscCall(DMCompositeRestoreAccessArray(solver%da,F,itwo,PETSC_NULL_INTEGER,Fsub,ierr)) - return + PetscCall(DMCompositeRestoreAccessArray(solver%da,X,itwo,PETSC_NULL_INTEGER_ARRAY,Xsub,ierr)) + PetscCall(DMCompositeRestoreAccessArray(solver%da,F,itwo,PETSC_NULL_INTEGER_ARRAY,Fsub,ierr)) end subroutine formfunction ! --------------------------------------------------------------------- @@ -714,7 +732,6 @@ subroutine FormFunctionNLTerm(X1,F1,solver,ierr) PetscCall(VecAssemblyEnd(F1,ierr)) ierr = 0 - return end subroutine FormFunctionNLTerm !/*TEST @@ -726,4 +743,10 @@ end subroutine FormFunctionNLTerm ! nsize: 4 ! args: -par 5.0 -da_grid_x 10 -da_grid_y 10 -snes_monitor_short -snes_linesearch_type basic -snes_converged_reason -ksp_type fgmres -ksp_norm_type unpreconditioned -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_fact_type upper -ksp_monitor_short -fieldsplit_lambda_ksp_type preonly -fieldsplit_lambda_pc_type jacobi -fieldsplit_phi_pc_type gamg -fieldsplit_phi_pc_gamg_esteig_ksp_type cg -fieldsplit_phi_pc_gamg_esteig_ksp_max_it 10 -fieldsplit_phi_pc_gamg_agg_nsmooths 1 -fieldsplit_phi_pc_gamg_threshold 0. ! +! test: +! args: -snes_linesearch_type {{l2 cp}separate output} -objective {{false true}shared output} +! +! test: +! args: -snes_linesearch_type bt -objective {{false true}separate output} +! !TEST*/ diff --git a/src/snes/tutorials/output/ex13_2d_q1_gmg_fcycle.out b/src/snes/tutorials/output/ex13_2d_q1_gmg_fcycle.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/snes/tutorials/output/ex13_3d_p1_gmg_fcycle.out b/src/snes/tutorials/output/ex13_3d_p1_gmg_fcycle.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/snes/tutorials/output/ex13_3d_q1_gmg_fcycle.out b/src/snes/tutorials/output/ex13_3d_q1_gmg_fcycle.out new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/snes/tutorials/output/ex24_2d_rt0_quad.out b/src/snes/tutorials/output/ex24_2d_rt0_quad.out new file mode 100644 index 00000000000..3cb082e4447 --- /dev/null +++ b/src/snes/tutorials/output/ex24_2d_rt0_quad.out @@ -0,0 +1,3 @@ +L_2 Error: [0., 0.144338] +L_2 Residual: 3.16462e-16 +Function appears to be linear diff --git a/src/snes/tutorials/output/ex24_2d_rt0_tri.out b/src/snes/tutorials/output/ex24_2d_rt0_tri.out new file mode 100644 index 00000000000..fade9077017 --- /dev/null +++ b/src/snes/tutorials/output/ex24_2d_rt0_tri.out @@ -0,0 +1,3 @@ +L_2 Error: [8.05672e-17, 0.117851] +L_2 Residual: 2.11379 +Function appears to be linear diff --git a/src/snes/tutorials/output/ex56_1.out b/src/snes/tutorials/output/ex56_1.out index bd73c716194..90417b4292b 100644 --- a/src/snes/tutorials/output/ex56_1.out +++ b/src/snes/tutorials/output/ex56_1.out @@ -1,4 +1,4 @@ - Linear solve converged due to CONVERGED_RTOL iterations 5 + Linear solve converged due to CONVERGED_RTOL iterations 5 SNES Object: 4 MPI processes type: ksponly maximum iterations=1, maximum function evaluations=10000 @@ -22,10 +22,15 @@ SNES Object: 4 MPI processes Threshold for dropping small values in graph on each level = 0.001 0.001 Threshold scaling factor for each level not specified = 1. Using aggregates made with 3 applications of heavy edge matching (HEM) to define subdomains for PCASM + MatCoarsen Object: 4 MPI processes + type: hem + 3 matching steps with threshold = 0. AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 4 MPI processes + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.11111 operator = 1.02041 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 4 MPI processes @@ -131,7 +136,7 @@ Labels: boundary: 1 strata with value/size (1 (77)) Field deformation: adjacency FEM - Linear solve converged due to CONVERGED_RTOL iterations 8 + Linear solve converged due to CONVERGED_RTOL iterations 8 SNES Object: 4 MPI processes type: ksponly maximum iterations=1, maximum function evaluations=10000 @@ -155,10 +160,15 @@ SNES Object: 4 MPI processes Threshold for dropping small values in graph on each level = 0.001 0.001 Threshold scaling factor for each level not specified = 1. Using aggregates made with 3 applications of heavy edge matching (HEM) to define subdomains for PCASM + MatCoarsen Object: 4 MPI processes + type: hem + 3 matching steps with threshold = 0. AGG specific options Number of levels of aggressive coarsening 1 Square graph aggressive coarsening - Number smoothing steps 1 + MatCoarsen Object: (pc_gamg_) 4 MPI processes + type: mis + Number smoothing steps to construct prolongation 1 Complexity: grid = 1.02721 operator = 1.00432 Coarse grid solver -- level 0 ------------------------------- KSP Object: (mg_coarse_) 4 MPI processes diff --git a/src/snes/tutorials/output/ex5_7_ksp_view_pre.out b/src/snes/tutorials/output/ex5_7_ksp_view_pre.out new file mode 100644 index 00000000000..e48b5052f8b --- /dev/null +++ b/src/snes/tutorials/output/ex5_7_ksp_view_pre.out @@ -0,0 +1,341 @@ +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using DEFAULT norm type for convergence test +PC Object: 1 MPI process + type: gamg + PC has not been set up so information may be incomplete + type is MULTIPLICATIVE, levels=0 cycles=unknown + Cycles per PCApply=0 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 1 + Square graph aggressive coarsening + Coarsening algorithm not yet selected + Number smoothing steps to construct prolongation 1 + Complexity: grid = 0. operator = 0. + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 1 + Square graph aggressive coarsening + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.1875 operator = 1.14062 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for first block is in the following KSP and PC objects on rank 0: + Use -mg_coarse_ksp_view ::ascii_info_detail to display information for all blocks + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + package used to perform factorization: petsc + total: nonzeros=9, allocated nonzeros=9 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 1.06112, max 11.6723 + eigenvalues provided (min 0.311583, max 10.6112) with transform: [0. 0.1; 0. 1.1] + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 1 + Square graph aggressive coarsening + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.1875 operator = 1.14062 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for first block is in the following KSP and PC objects on rank 0: + Use -mg_coarse_ksp_view ::ascii_info_detail to display information for all blocks + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + package used to perform factorization: petsc + total: nonzeros=9, allocated nonzeros=9 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 0.159372, max 1.75309 + eigenvalues estimated via gmres: min 0.406283, max 1.59372 + eigenvalues estimated using gmres with transform: [0. 0.1; 0. 1.1] + KSP Object: (mg_levels_1_esteig_) 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10, initial guess is zero + tolerances: relative=1e-12, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test + estimating eigenvalues using a noisy random number generated right-hand side + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 1 + Square graph aggressive coarsening + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.1875 operator = 1.14062 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for first block is in the following KSP and PC objects on rank 0: + Use -mg_coarse_ksp_view ::ascii_info_detail to display information for all blocks + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + package used to perform factorization: petsc + total: nonzeros=9, allocated nonzeros=9 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 0.160581, max 1.76639 + eigenvalues estimated via gmres: min 0.394193, max 1.60581 + eigenvalues estimated using gmres with transform: [0. 0.1; 0. 1.1] + KSP Object: (mg_levels_1_esteig_) 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10, initial guess is zero + tolerances: relative=1e-12, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test + estimating eigenvalues using a noisy random number generated right-hand side + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines diff --git a/src/snes/tutorials/output/ex5_hem_view_detailed.out b/src/snes/tutorials/output/ex5_hem_view_detailed.out new file mode 100644 index 00000000000..56e67293a8f --- /dev/null +++ b/src/snes/tutorials/output/ex5_hem_view_detailed.out @@ -0,0 +1,442 @@ +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 0 + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: hem + 4 matching steps with threshold = -1. + HEM aggregator lists are not available + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.125 operator = 1.0625 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for each block is in the following KSP and PC objects: + [0] number of local blocks = 1, first local block number = 0 + [0] local block number 0 + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + package used to perform factorization: petsc + total: nonzeros=4, allocated nonzeros=4 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + total: nonzeros=4, allocated nonzeros=4 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + - - - - - - - - - - - - - - - - - - + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + total: nonzeros=4, allocated nonzeros=4 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 1.06112, max 11.6723 + eigenvalues provided (min 0.311583, max 10.6112) with transform: [0. 0.1; 0. 1.1] + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + Vec Object: 1 MPI process + type: seq + length=16 + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 0 + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: hem + 4 matching steps with threshold = -1. + HEM aggregator lists are not available + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.125 operator = 1.0625 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for each block is in the following KSP and PC objects: + [0] number of local blocks = 1, first local block number = 0 + [0] local block number 0 + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + package used to perform factorization: petsc + total: nonzeros=4, allocated nonzeros=4 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + total: nonzeros=4, allocated nonzeros=4 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + - - - - - - - - - - - - - - - - - - + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + total: nonzeros=4, allocated nonzeros=4 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 0.159372, max 1.75309 + eigenvalues estimated via gmres: min 0.406283, max 1.59372 + eigenvalues estimated using gmres with transform: [0. 0.1; 0. 1.1] + KSP Object: (mg_levels_1_esteig_) 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10, initial guess is zero + tolerances: relative=1e-12, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test + estimating eigenvalues using a noisy random number generated right-hand side + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + Vec Object: 1 MPI process + type: seq + length=16 + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 0 + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: hem + 4 matching steps with threshold = -1. + HEM aggregator lists are not available + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.125 operator = 1.0625 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for each block is in the following KSP and PC objects: + [0] number of local blocks = 1, first local block number = 0 + [0] local block number 0 + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + package used to perform factorization: petsc + total: nonzeros=4, allocated nonzeros=4 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + total: nonzeros=4, allocated nonzeros=4 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + - - - - - - - - - - - - - - - - - - + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + total: nonzeros=4, allocated nonzeros=4 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 0.160581, max 1.76639 + eigenvalues estimated via gmres: min 0.394193, max 1.60581 + eigenvalues estimated using gmres with transform: [0. 0.1; 0. 1.1] + KSP Object: (mg_levels_1_esteig_) 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10, initial guess is zero + tolerances: relative=1e-12, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test + estimating eigenvalues using a noisy random number generated right-hand side + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + Vec Object: 1 MPI process + type: seq + length=16 + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 0 + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: hem + 4 matching steps with threshold = -1. + HEM aggregator lists are not available + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.125 operator = 1.0625 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for each block is in the following KSP and PC objects: + [0] number of local blocks = 1, first local block number = 0 + [0] local block number 0 + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + package used to perform factorization: petsc + total: nonzeros=4, allocated nonzeros=4 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + total: nonzeros=4, allocated nonzeros=4 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + - - - - - - - - - - - - - - - - - - + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=2, cols=2 + total: nonzeros=4, allocated nonzeros=4 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 0.160614, max 1.76675 + eigenvalues estimated via gmres: min 0.393863, max 1.60614 + eigenvalues estimated using gmres with transform: [0. 0.1; 0. 1.1] + KSP Object: (mg_levels_1_esteig_) 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10, initial guess is zero + tolerances: relative=1e-12, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test + estimating eigenvalues using a noisy random number generated right-hand side + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + Vec Object: 1 MPI process + type: seq + length=16 + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines diff --git a/src/snes/tutorials/output/ex5_mis_view_detailed.out b/src/snes/tutorials/output/ex5_mis_view_detailed.out new file mode 100644 index 00000000000..9666acf2584 --- /dev/null +++ b/src/snes/tutorials/output/ex5_mis_view_detailed.out @@ -0,0 +1,442 @@ +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 1 + Square graph aggressive coarsening + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + MIS aggregator lists are not available + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.1875 operator = 1.14062 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for each block is in the following KSP and PC objects: + [0] number of local blocks = 1, first local block number = 0 + [0] local block number 0 + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + package used to perform factorization: petsc + total: nonzeros=9, allocated nonzeros=9 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + - - - - - - - - - - - - - - - - - - + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 1.06112, max 11.6723 + eigenvalues provided (min 0.311583, max 10.6112) with transform: [0. 0.1; 0. 1.1] + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + Vec Object: 1 MPI process + type: seq + length=16 + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 1 + Square graph aggressive coarsening + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + MIS aggregator lists are not available + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.1875 operator = 1.14062 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for each block is in the following KSP and PC objects: + [0] number of local blocks = 1, first local block number = 0 + [0] local block number 0 + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + package used to perform factorization: petsc + total: nonzeros=9, allocated nonzeros=9 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + - - - - - - - - - - - - - - - - - - + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 0.159372, max 1.75309 + eigenvalues estimated via gmres: min 0.406283, max 1.59372 + eigenvalues estimated using gmres with transform: [0. 0.1; 0. 1.1] + KSP Object: (mg_levels_1_esteig_) 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10, initial guess is zero + tolerances: relative=1e-12, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test + estimating eigenvalues using a noisy random number generated right-hand side + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + Vec Object: 1 MPI process + type: seq + length=16 + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 1 + Square graph aggressive coarsening + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + MIS aggregator lists are not available + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.1875 operator = 1.14062 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for each block is in the following KSP and PC objects: + [0] number of local blocks = 1, first local block number = 0 + [0] local block number 0 + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + package used to perform factorization: petsc + total: nonzeros=9, allocated nonzeros=9 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + - - - - - - - - - - - - - - - - - - + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 0.160581, max 1.76639 + eigenvalues estimated via gmres: min 0.394193, max 1.60581 + eigenvalues estimated using gmres with transform: [0. 0.1; 0. 1.1] + KSP Object: (mg_levels_1_esteig_) 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10, initial guess is zero + tolerances: relative=1e-12, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test + estimating eigenvalues using a noisy random number generated right-hand side + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + Vec Object: 1 MPI process + type: seq + length=16 + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines +KSP Object: 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test +PC Object: 1 MPI process + type: gamg + type is MULTIPLICATIVE, levels=2 cycles=v + Cycles per PCApply=1 + Using externally compute Galerkin coarse grid matrices + GAMG specific options + Threshold for dropping small values in graph on each level = -1. -1. + Threshold scaling factor for each level not specified = 1. + AGG specific options + Number of levels of aggressive coarsening 1 + Square graph aggressive coarsening + MatCoarsen Object: (pc_gamg_) 1 MPI process + type: mis + MIS aggregator lists are not available + Number smoothing steps to construct prolongation 1 + Complexity: grid = 1.1875 operator = 1.14062 + Coarse grid solver -- level 0 ------------------------------- + KSP Object: (mg_coarse_) 1 MPI process + type: preonly + maximum iterations=10000, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_) 1 MPI process + type: bjacobi + number of blocks = 1 + Local solver information for each block is in the following KSP and PC objects: + [0] number of local blocks = 1, first local block number = 0 + [0] local block number 0 + KSP Object: (mg_coarse_sub_) 1 MPI process + type: preonly + maximum iterations=1, initial guess is zero + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_coarse_sub_) 1 MPI process + type: lu + out-of-place factorization + tolerance for zero pivot 2.22045e-14 + using diagonal shift on blocks to prevent zero pivot [INBLOCKS] + matrix ordering: nd + factor fill ratio given 5., needed 1. + Factored matrix follows: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + package used to perform factorization: petsc + total: nonzeros=9, allocated nonzeros=9 + using I-node routines: found 1 nodes, limit used is 5 + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + - - - - - - - - - - - - - - - - - - + linear system matrix = precond matrix: + Mat Object: (mg_coarse_sub_) 1 MPI process + type: seqaij + rows=3, cols=3 + total: nonzeros=9, allocated nonzeros=9 + total number of mallocs used during MatSetValues calls=0 + using I-node routines: found 1 nodes, limit used is 5 + Down solver (pre-smoother) on level 1 ------------------------------- + KSP Object: (mg_levels_1_) 1 MPI process + type: chebyshev + Chebyshev polynomial of first kind + eigenvalue targets used: min 0.160614, max 1.76675 + eigenvalues estimated via gmres: min 0.393863, max 1.60614 + eigenvalues estimated using gmres with transform: [0. 0.1; 0. 1.1] + KSP Object: (mg_levels_1_esteig_) 1 MPI process + type: gmres + restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement + happy breakdown tolerance 1e-30 + maximum iterations=10, initial guess is zero + tolerances: relative=1e-12, absolute=1e-50, divergence=10000. + left preconditioning + using PRECONDITIONED norm type for convergence test + estimating eigenvalues using a noisy random number generated right-hand side + maximum iterations=2, nonzero initial guess + tolerances: relative=1e-05, absolute=1e-50, divergence=10000. + left preconditioning + using NONE norm type for convergence test + PC Object: (mg_levels_1_) 1 MPI process + type: jacobi + type DIAGONAL + Vec Object: 1 MPI process + type: seq + length=16 + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines + Up solver (post-smoother) same as down solver (pre-smoother) + linear system matrix = precond matrix: + Mat Object: 1 MPI process + type: seqaij + rows=16, cols=16 + total: nonzeros=64, allocated nonzeros=64 + total number of mallocs used during MatSetValues calls=0 + not using I-node routines diff --git a/src/snes/tutorials/output/ex73f90t_objective-false.out b/src/snes/tutorials/output/ex73f90t_objective-false.out new file mode 100644 index 00000000000..c7c24951689 --- /dev/null +++ b/src/snes/tutorials/output/ex73f90t_objective-false.out @@ -0,0 +1 @@ +Number of SNES iterations = 6 diff --git a/src/snes/tutorials/output/ex73f90t_objective-true.out b/src/snes/tutorials/output/ex73f90t_objective-true.out new file mode 100644 index 00000000000..0b76be2b328 --- /dev/null +++ b/src/snes/tutorials/output/ex73f90t_objective-true.out @@ -0,0 +1 @@ +Number of SNES iterations = 8 diff --git a/src/snes/tutorials/output/ex73f90t_snes_linesearch_type-bt.out b/src/snes/tutorials/output/ex73f90t_snes_linesearch_type-bt.out new file mode 100644 index 00000000000..0b76be2b328 --- /dev/null +++ b/src/snes/tutorials/output/ex73f90t_snes_linesearch_type-bt.out @@ -0,0 +1 @@ +Number of SNES iterations = 8 diff --git a/src/snes/tutorials/output/ex73f90t_snes_linesearch_type-cp.out b/src/snes/tutorials/output/ex73f90t_snes_linesearch_type-cp.out new file mode 100644 index 00000000000..71e688b064f --- /dev/null +++ b/src/snes/tutorials/output/ex73f90t_snes_linesearch_type-cp.out @@ -0,0 +1 @@ +Number of SNES iterations = 13 diff --git a/src/snes/tutorials/output/ex73f90t_snes_linesearch_type-l2.out b/src/snes/tutorials/output/ex73f90t_snes_linesearch_type-l2.out new file mode 100644 index 00000000000..c7c24951689 --- /dev/null +++ b/src/snes/tutorials/output/ex73f90t_snes_linesearch_type-l2.out @@ -0,0 +1 @@ +Number of SNES iterations = 6 diff --git a/src/snes/tutorials/output/ex77_0.out b/src/snes/tutorials/output/ex77_0.out index 1da4f8ee279..be0f9235470 100644 --- a/src/snes/tutorials/output/ex77_0.out +++ b/src/snes/tutorials/output/ex77_0.out @@ -1,23 +1,28 @@ - 0 SNES Function norm 0.301674 - 0 KSP Residual norm 0.301674 - 1 KSP Residual norm 0.0293257 + 0 SNES Function norm 0.334788 + 0 KSP Residual norm 0.334788 + 1 KSP Residual norm 0.0352782 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 1 SNES Function norm 0.0118615 - 0 KSP Residual norm 0.0118615 - 1 KSP Residual norm 0.000679877 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 1 SNES Function norm 0.144267 + 0 KSP Residual norm 0.144267 + 1 KSP Residual norm 0.0111639 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 2 SNES Function norm 0.00303475 - 0 KSP Residual norm 0.00303475 - 1 KSP Residual norm 0.00019555 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 2 SNES Function norm 0.0276657 + 0 KSP Residual norm 0.0276657 + 1 KSP Residual norm 0.00247755 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 3 SNES Function norm 2.6003e-05 - 0 KSP Residual norm 2.6003e-05 - 1 KSP Residual norm 1.66825e-06 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 3 SNES Function norm 0.00826251 + 0 KSP Residual norm 0.00826251 + 1 KSP Residual norm 0.000289934 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 4 SNES Function norm 2.46828e-08 -Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 4 -Number of SNES iterations = 4 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 4 SNES Function norm 0.000260571 + 0 KSP Residual norm 0.000260571 + 1 KSP Residual norm 9.82302e-06 + 2 KSP Residual norm < 1.e-11 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 5 SNES Function norm 6.47516e-07 + Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 5 +Number of SNES iterations = 5 diff --git a/src/snes/tutorials/output/ex77_1.out b/src/snes/tutorials/output/ex77_1.out index 9ce6f939555..3b9082cbe5e 100644 --- a/src/snes/tutorials/output/ex77_1.out +++ b/src/snes/tutorials/output/ex77_1.out @@ -1,18 +1,28 @@ - 0 SNES Function norm 0.982061 - 0 KSP Residual norm 0.982061 - 1 KSP Residual norm 0.115894 + 0 SNES Function norm 1.10151 + 0 KSP Residual norm 1.10151 + 1 KSP Residual norm 0.139837 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 1 SNES Function norm 0.0666588 - 0 KSP Residual norm 0.0666588 - 1 KSP Residual norm 0.00221562 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 1 SNES Function norm 0.528739 + 0 KSP Residual norm 0.528739 + 1 KSP Residual norm 0.0428326 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 2 SNES Function norm 0.00168468 - 0 KSP Residual norm 0.00168468 - 1 KSP Residual norm 0.000156209 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 2 SNES Function norm 0.117202 + 0 KSP Residual norm 0.117202 + 1 KSP Residual norm 0.00881688 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 3 SNES Function norm 6.8812e-06 -Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 3 -Number of SNES iterations = 3 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 3 SNES Function norm 0.00797867 + 0 KSP Residual norm 0.00797867 + 1 KSP Residual norm 0.00053253 + 2 KSP Residual norm < 1.e-11 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 4 SNES Function norm 2.51775e-05 + 0 KSP Residual norm 2.51775e-05 + 1 KSP Residual norm 1.49004e-06 + 2 KSP Residual norm < 1.e-11 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 5 SNES Function norm 5.359e-10 + Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 5 +Number of SNES iterations = 5 diff --git a/src/snes/tutorials/output/ex77_2.out b/src/snes/tutorials/output/ex77_2.out index ef9df32ca06..48556728aa7 100644 --- a/src/snes/tutorials/output/ex77_2.out +++ b/src/snes/tutorials/output/ex77_2.out @@ -1,18 +1,28 @@ - 0 SNES Function norm 0.170993 - 0 KSP Residual norm 0.170993 - 1 KSP Residual norm 0.0138957 + 0 SNES Function norm 0.294628 + 0 KSP Residual norm 0.294628 + 1 KSP Residual norm 0.0278529 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 1 SNES Function norm 0.0105253 - 0 KSP Residual norm 0.0105253 - 1 KSP Residual norm 0.000183972 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 1 SNES Function norm 0.117858 + 0 KSP Residual norm 0.117858 + 1 KSP Residual norm 0.00218783 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 2 SNES Function norm 0.000244394 - 0 KSP Residual norm 0.000244394 - 1 KSP Residual norm 7.9238e-06 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 2 SNES Function norm 0.0360603 + 0 KSP Residual norm 0.0360603 + 1 KSP Residual norm 0.000870849 2 KSP Residual norm < 1.e-11 - Linear solve converged due to CONVERGED_RTOL iterations 2 - 3 SNES Function norm 7.92286e-08 -Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 3 -Number of SNES iterations = 3 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 3 SNES Function norm 0.00191581 + 0 KSP Residual norm 0.00191581 + 1 KSP Residual norm 1.80959e-05 + 2 KSP Residual norm < 1.e-11 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 4 SNES Function norm 1.49002e-05 + 0 KSP Residual norm 1.49002e-05 + 1 KSP Residual norm 2.71385e-07 + 2 KSP Residual norm < 1.e-11 + Linear solve converged due to CONVERGED_RTOL iterations 2 + 5 SNES Function norm 3.795e-10 + Nonlinear solve converged due to CONVERGED_FNORM_RELATIVE iterations 5 +Number of SNES iterations = 5 diff --git a/src/snes/utils/dm/dmadapt.c b/src/snes/utils/dm/dmadapt.c index fecf3fa625c..538d12af06b 100644 --- a/src/snes/utils/dm/dmadapt.c +++ b/src/snes/utils/dm/dmadapt.c @@ -39,8 +39,8 @@ PetscErrorCode DMAdaptorCreate(MPI_Comm comm, DMAdaptor *adaptor) PetscFunctionBegin; PetscAssertPointer(adaptor, 2); PetscCall(PetscSysInitializePackage()); - PetscCall(PetscHeaderCreate(*adaptor, DM_CLASSID, "DMAdaptor", "DM Adaptor", "SNES", comm, DMAdaptorDestroy, DMAdaptorView)); + PetscCall(PetscHeaderCreate(*adaptor, DM_CLASSID, "DMAdaptor", "DM Adaptor", "SNES", comm, DMAdaptorDestroy, DMAdaptorView)); (*adaptor)->monitor = PETSC_FALSE; (*adaptor)->adaptCriterion = DM_ADAPTATION_NONE; (*adaptor)->numSeq = 1; diff --git a/src/snes/utils/dm/dminterpolatesnes.c b/src/snes/utils/dm/dminterpolatesnes.c index 95f9e441c2c..203dc07876a 100644 --- a/src/snes/utils/dm/dminterpolatesnes.c +++ b/src/snes/utils/dm/dminterpolatesnes.c @@ -840,7 +840,7 @@ static inline PetscErrorCode DMInterpolate_Hex_Private(DMInterpolationInfo ctx, Input Parameters: + ctx - The `DMInterpolationInfo` context obtained with `DMInterpolationCreate()` . dm - The `DM` -- x - The local vector containing the field to be interpolated, obtained with `DMInterpolationGetCoordinates()` +- x - The local vector containing the field to be interpolated, can be created with `DMCreateGlobalVector()` Output Parameter: . v - The vector containing the interpolated values, obtained with `DMInterpolationGetVector()` diff --git a/src/snes/utils/dmplexsnes.c b/src/snes/utils/dmplexsnes.c index 415448eaf5b..44a8de8e9be 100644 --- a/src/snes/utils/dmplexsnes.c +++ b/src/snes/utils/dmplexsnes.c @@ -493,7 +493,7 @@ PetscErrorCode DMPlexSNESComputeBoundaryFEM(DM dm, Vec X, void *user) Developer Note: This should be called `DMPlexSNESComputeJacobianAction()` -.seealso: [](ch_snes), `DM`, ``DMSNESCreateJacobianMF()`, `DMPlexSNESComputeResidualFEM()` +.seealso: [](ch_snes), `DM`, `DMSNESCreateJacobianMF()`, `DMPlexSNESComputeResidualFEM()` @*/ PetscErrorCode DMSNESComputeJacobianAction(DM dm, Vec X, Vec Y, Vec F, void *user) { @@ -778,7 +778,7 @@ PetscErrorCode DMPlexSetSNESLocalFEM(DM dm, PetscBool use_obj, void *ctx) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSNESCheckDiscretization - Check the discretization error of the exact solution Input Parameters: @@ -870,7 +870,7 @@ PetscErrorCode DMSNESCheckDiscretization(SNES snes, DM dm, PetscReal t, Vec u, P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSNESCheckResidual - Check the residual of the exact solution Input Parameters: @@ -917,7 +917,7 @@ PetscErrorCode DMSNESCheckResidual(SNES snes, DM dm, Vec u, PetscReal tol, Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSNESCheckJacobian - Check the Jacobian of the exact solution against the residual using the Taylor Test Input Parameters: @@ -1048,7 +1048,7 @@ PetscErrorCode DMSNESCheck_Internal(SNES snes, DM dm, Vec u) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMSNESCheckFromOptions - Check the residual and Jacobian functions using the exact solution by outputting some diagnostic information Input Parameters: diff --git a/src/snes/utils/dmsnes.c b/src/snes/utils/dmsnes.c index 68f21108f50..dad97a88438 100644 --- a/src/snes/utils/dmsnes.c +++ b/src/snes/utils/dmsnes.c @@ -248,7 +248,7 @@ PetscErrorCode DMGetDMSNESWrite(DM dm, DMSNES *snesdm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMCopyDMSNES - copies a `DMSNES` context to a new `DM` Logically Collective diff --git a/src/sys/ams/pams.c b/src/sys/ams/pams.c index 5d7ea3ae480..05328c38635 100644 --- a/src/sys/ams/pams.c +++ b/src/sys/ams/pams.c @@ -2,7 +2,7 @@ #include #include -/*@C +/*@ PetscObjectSAWsTakeAccess - Take access of the data fields that have been published to SAWs by a `PetscObject` so their values may be changed in the computation @@ -28,7 +28,7 @@ PetscErrorCode PetscObjectSAWsTakeAccess(PetscObject obj) return PETSC_SUCCESS; } -/*@C +/*@ PetscObjectSAWsGrantAccess - Grants access of the data fields that have been published to SAWs called when the changes made during `PetscObjectSAWsTakeAccess()` are complete. @@ -53,7 +53,7 @@ PetscErrorCode PetscObjectSAWsGrantAccess(PetscObject obj) return PETSC_SUCCESS; } -/*@C +/*@ PetscSAWsBlock - Blocks on SAWs until a client (person using the web browser) unblocks it Not Collective @@ -81,7 +81,7 @@ PetscErrorCode PetscSAWsBlock(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectSAWsBlock - Blocks the object if `PetscObjectSAWsSetBlock()` has been called Collective @@ -103,7 +103,7 @@ PetscErrorCode PetscObjectSAWsBlock(PetscObject obj) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectSAWsSetBlock - Sets whether an object will block at `PetscObjectSAWsBlock()` Collective diff --git a/src/sys/classes/bag/bag.c b/src/sys/classes/bag/bag.c index 236fbd6a6dd..88be7717ac3 100644 --- a/src/sys/classes/bag/bag.c +++ b/src/sys/classes/bag/bag.c @@ -517,7 +517,7 @@ PetscErrorCode PetscBagRegisterBool(PetscBag bag, void *addr, PetscBool mdefault PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBagDestroy - Destroys a `PetscBag` Collective @@ -626,7 +626,7 @@ PetscErrorCode PetscBagSetFromOptions(PetscBag bag) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBagView - Views a bag of values as either ASCII text or a binary file Collective @@ -747,7 +747,7 @@ PetscErrorCode PetscBagView(PetscBag bag, PetscViewer view) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBagViewFromOptions - Processes command line options to determine if/how a `PetscBag` is to be viewed. Collective @@ -787,7 +787,7 @@ PetscErrorCode PetscBagViewFromOptions(PetscBag bag, PetscObject bobj, const cha PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBagLoad - Loads a bag of values from a binary file Collective @@ -879,9 +879,9 @@ PetscErrorCode PetscBagLoad(PetscViewer view, PetscBag bag) Notes: After creating the bag, for each entry in the C struct call the appropriate `PetscBagRegisterInt()` etc to define the C structs layout - The size of the A struct must be small enough to fit in a `PetscInt`; by default + The size of the struct must be small enough to fit in a `PetscInt`; by default `PetscInt` is 4 bytes; this means a bag cannot be larger than 2 gigabytes in length. - The warning about casting to a shorter length can be ignored below unless your A struct is too large + The warning about casting to a shorter length can be ignored below unless your struct is too large .seealso: `PetscBag`, `PetscBagGetName()`, `PetscBagView()`, `PetscBagLoad()`, `PetscBagGetData()` `PetscBagRegisterReal()`, `PetscBagRegisterInt()`, `PetscBagRegisterBool()`, `PetscBagRegisterScalar()` @@ -893,9 +893,10 @@ PetscErrorCode PetscBagCreate(MPI_Comm comm, size_t bagsize, PetscBag *bag) PetscFunctionBegin; PetscAssertPointer(bag, 3); + PetscCall(PetscIntCast((PetscInt64)totalsize, NULL)); + PetscCall(PetscInfo(NULL, "Creating Bag with total size %d\n", (int)totalsize)); PetscCall(PetscCalloc(totalsize, bag)); - (*bag)->bagsize = totalsize; (*bag)->bagcomm = comm; (*bag)->bagprefix = NULL; @@ -903,7 +904,7 @@ PetscErrorCode PetscBagCreate(MPI_Comm comm, size_t bagsize, PetscBag *bag) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBagSetName - Sets the name of a bag of values Not Collective @@ -947,7 +948,7 @@ PetscErrorCode PetscBagSetName(PetscBag bag, const char *name, const char *help) `PetscBagRegisterReal()`, `PetscBagRegisterInt()`, `PetscBagRegisterBool()`, `PetscBagRegisterScalar()` `PetscBagSetFromOptions()`, `PetscBagCreate()`, `PetscBagDestroy()`, `PetscBagRegisterEnum()` @*/ -PetscErrorCode PetscBagGetName(PetscBag bag, char **name) +PetscErrorCode PetscBagGetName(PetscBag bag, const char **name) { PetscFunctionBegin; PetscAssertPointer(bag, 1); @@ -984,7 +985,7 @@ PetscErrorCode PetscBagGetData(PetscBag bag, void **data) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBagSetOptionsPrefix - Sets the prefix used for searching for all `PetscBag` items in the options database. diff --git a/src/sys/classes/bag/f90-custom/zbagf90.c b/src/sys/classes/bag/f90-custom/zbagf90.c index 07d1edd4e88..c9aff9e8321 100644 --- a/src/sys/classes/bag/f90-custom/zbagf90.c +++ b/src/sys/classes/bag/f90-custom/zbagf90.c @@ -5,9 +5,6 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscbagdestroy_ PETSCBAGDESTROY - #define petscbagview_ PETSCBAGVIEW - #define petscbagload_ PETSCBAGLOAD #define petscbaggetdata_ PETSCBAGGETDATA #define petscbagregisterint_ PETSCBAGREGISTERINT #define petscbagregisterint64_ PETSCBAGREGISTERINT64 @@ -18,13 +15,8 @@ #define petscbagregisterrealarray_ PETSCBAGREGISTERREALARRAY #define petscbagregisterbool_ PETSCBAGREGISTERBOOL #define petscbagregisterboolarray_ PETSCBAGREGISTERBOOLARRAY - #define petscbagsetname_ PETSCBAGSETNAME - #define petscbagsetoptionsprefix_ PETSCBAGSETOPTIONSPREFIX #define petscbagcreate_ PETSCBAGCREATE #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscbagdestroy_ petscbagdestroy - #define petscbagview_ petscbagview - #define petscbagload_ petscbagload #define petscbaggetdata_ petscbaggetdata #define petscbagregisterint_ petscbagregisterint #define petscbagregisterint64_ petscbagregisterint64 @@ -35,8 +27,6 @@ #define petscbagregisterrealarray_ petscbagregisterrealarray #define petscbagregisterbool_ petscbagregisterbool #define petscbagregisterboolarray_ petscbagregisterboolarray - #define petscbagsetname_ petscbagsetname - #define petscbagsetoptionsprefix_ petscbagsetoptionsprefix #define petscbagcreate_ petscbagcreate #endif @@ -45,25 +35,6 @@ PETSC_EXTERN void petscbagcreate_(MPI_Fint *comm, size_t *bagsize, PetscBag *bag *ierr = PetscBagCreate(MPI_Comm_f2c(*(comm)), *bagsize, bag); } -PETSC_EXTERN void petscbagdestroy_(PetscBag *bag, PetscErrorCode *ierr) -{ - *ierr = PetscBagDestroy(bag); -} - -PETSC_EXTERN void petscbagview_(PetscBag *bag, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscBagView(*bag, v); -} - -PETSC_EXTERN void petscbagload_(PetscViewer *viewer, PetscBag *bag, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscBagLoad(v, *bag); -} - PETSC_EXTERN void petscbagregisterint_(PetscBag *bag, void *ptr, PetscInt *def, char *s1, char *s2, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T l1, PETSC_FORTRAN_CHARLEN_T l2) { char *t1, *t2; @@ -175,23 +146,3 @@ PETSC_EXTERN void petscbaggetdata_(PetscBag *bag, void **data, PetscErrorCode *i { *ierr = PetscBagGetData(*bag, data); } - -PETSC_EXTERN void petscbagsetname_(PetscBag *bag, char *ns, char *hs, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T nl, PETSC_FORTRAN_CHARLEN_T hl) -{ - char *nt, *ht; - FIXCHAR(ns, nl, nt); - FIXCHAR(hs, hl, ht); - *ierr = PetscBagSetName(*bag, nt, ht); - if (*ierr) return; - FREECHAR(ns, nt); - FREECHAR(hs, ht); -} - -PETSC_EXTERN void petscbagsetoptionsprefix_(PetscBag *bag, char *pre, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(pre, len, t); - *ierr = PetscBagSetOptionsPrefix(*bag, t); - if (*ierr) return; - FREECHAR(pre, t); -} diff --git a/src/sys/classes/bm/interfaces/bm.c b/src/sys/classes/bm/interfaces/bm.c index 21dbadf9af1..7563898bbec 100644 --- a/src/sys/classes/bm/interfaces/bm.c +++ b/src/sys/classes/bm/interfaces/bm.c @@ -43,7 +43,7 @@ PetscErrorCode PetscBenchInitializePackage(void) /*@C PetscBenchRegister - Adds a benchmark test, `PetscBenchType`, to the `PetscBench` package - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new benchmark @@ -64,7 +64,7 @@ PetscErrorCode PetscBenchRegister(const char sname[], PetscErrorCode (*function) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchReset - removes all the intermediate data structures in a `PetscBench` Collective @@ -86,7 +86,7 @@ PetscErrorCode PetscBenchReset(PetscBench bm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchDestroy - Destroys a `PetscBench` Collective @@ -196,7 +196,7 @@ PetscErrorCode PetscBenchSetFromOptions(PetscBench bm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchView - Views a PETSc benchmark `PetscBench` Collective @@ -219,7 +219,7 @@ PetscErrorCode PetscBenchView(PetscBench bm, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchViewFromOptions - Processes command line options to determine if/how a `PetscBench` is to be viewed. Collective @@ -242,7 +242,7 @@ PetscErrorCode PetscBenchViewFromOptions(PetscBench bm, PetscObject bobj, const PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchCreate - Create a PETSc benchmark `PetscBench` object Collective @@ -262,14 +262,14 @@ PetscErrorCode PetscBenchCreate(MPI_Comm comm, PetscBench *bm) { PetscFunctionBegin; PetscAssertPointer(bm, 2); - *bm = NULL; PetscCall(PetscBenchInitializePackage()); + PetscCall(PetscHeaderCreate(*bm, BM_CLASSID, "BM", "PetscBench", "BM", comm, PetscBenchDestroy, PetscBenchView)); (*bm)->size = PETSC_DECIDE; PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchSetOptionsPrefix - Sets the prefix used for searching for all `PetscBench` items in the options database. Logically Collective @@ -291,7 +291,7 @@ PetscErrorCode PetscBenchSetOptionsPrefix(PetscBench bm, const char pre[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchSetSize - Sets the size of the `PetscBench` benchmark to run Logically Collective @@ -318,7 +318,7 @@ PetscErrorCode PetscBenchSetSize(PetscBench bm, PetscInt n) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchGetSize - Gets the size of the `PetscBench` benchmark to run Logically Collective @@ -343,7 +343,7 @@ PetscErrorCode PetscBenchGetSize(PetscBench bm, PetscInt *n) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchSetType - set the type of `PetscBench` benchmark to run Collective @@ -391,7 +391,7 @@ PetscErrorCode PetscBenchSetType(PetscBench bm, PetscBenchType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBenchGetType - Gets the `PetscBenchType` (as a string) from the `PetscBench` context. diff --git a/src/sys/classes/bm/interfaces/ftn-custom/makefile b/src/sys/classes/bm/interfaces/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/sys/classes/bm/interfaces/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/sys/classes/bm/interfaces/ftn-custom/zbmf.c b/src/sys/classes/bm/interfaces/ftn-custom/zbmf.c deleted file mode 100644 index 5aee10eac03..00000000000 --- a/src/sys/classes/bm/interfaces/ftn-custom/zbmf.c +++ /dev/null @@ -1,53 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscbmsettype_ PETSCBMSETTYPE - #define petscbmgettype_ PETSCBMGETTYPE - #define petscbmsetoptionsprefix_ PETSCBMSETOPTIONSPREFIX - #define petscbmviewfromoptions_ PETSCBMVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscbmsettype_ petscbmsettype - #define petscbmgettype_ petscbmgettype - #define petscbmsetoptionsprefix_ petscbmsetoptionsprefix - #define petscbmviewfromoptions_ petscbmviewfromoptions -#endif - -PETSC_EXTERN void petscbmsettype_(PetscBench *ctx, char *text, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(text, len, t); - *ierr = PetscBenchSetType(*ctx, t); - if (*ierr) return; - FREECHAR(text, t); -} - -PETSC_EXTERN void petscbmgettype_(PetscBench *bm, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = PetscBenchGetType(*bm, &tname); - if (*ierr) return; - *ierr = PetscStrncpy(name, tname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void petscbmsetoptionsprefix_(PetscBench *ctx, char *text, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(text, len, t); - *ierr = PetscBenchSetOptionsPrefix(*ctx, t); - if (*ierr) return; - FREECHAR(text, t); -} - -PETSC_EXTERN void petscbmviewfromoptions_(PetscBench *bm, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscBenchViewFromOptions(*bm, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/sys/classes/draw/impls/image/drawimage.c b/src/sys/classes/draw/impls/image/drawimage.c index a1cf364ae96..9e5fc71358e 100644 --- a/src/sys/classes/draw/impls/image/drawimage.c +++ b/src/sys/classes/draw/impls/image/drawimage.c @@ -590,7 +590,7 @@ PETSC_EXTERN PetscErrorCode PetscDrawCreate_Image(PetscDraw draw) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawOpenImage - Opens an image for use with the `PetscDraw` routines. Collective diff --git a/src/sys/classes/draw/impls/x/drawopenx.c b/src/sys/classes/draw/impls/x/drawopenx.c index e29fbe0564a..f78a30a82e8 100644 --- a/src/sys/classes/draw/impls/x/drawopenx.c +++ b/src/sys/classes/draw/impls/x/drawopenx.c @@ -2,9 +2,9 @@ Defines the operations for the X PetscDraw implementation. */ -#include <../src/sys/classes/draw/impls/x/ximpl.h> /*I "petscsys.h" I*/ +#include <../src/sys/classes/draw/impls/x/ximpl.h> /*I "petscdraw.h" I*/ -/*@C +/*@ PetscDrawOpenX - Opens an X-window for use with the `PetscDraw` routines. Collective diff --git a/src/sys/classes/draw/impls/x/ftn-custom/makefile b/src/sys/classes/draw/impls/x/ftn-custom/makefile deleted file mode 100644 index 08508c660fe..00000000000 --- a/src/sys/classes/draw/impls/x/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/sys/classes/draw/impls/x/ftn-custom/zdrawopenxf.c b/src/sys/classes/draw/impls/x/ftn-custom/zdrawopenxf.c deleted file mode 100644 index f360d4abf46..00000000000 --- a/src/sys/classes/draw/impls/x/ftn-custom/zdrawopenxf.c +++ /dev/null @@ -1,20 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdrawopenx_ PETSCDRAWOPENX -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdrawopenx_ petscdrawopenx -#endif - -PETSC_EXTERN void petscdrawopenx_(MPI_Comm *comm, char *display, char *title, int *x, int *y, int *w, int *h, PetscDraw *inctx, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1, PETSC_FORTRAN_CHARLEN_T len2) -{ - char *t1, *t2; - - FIXCHAR(display, len1, t1); - FIXCHAR(title, len2, t2); - *ierr = PetscDrawOpenX(MPI_Comm_f2c(*(MPI_Fint *)&*comm), t1, t2, *x, *y, *w, *h, inctx); - if (*ierr) return; - FREECHAR(display, t1); - FREECHAR(title, t2); -} diff --git a/src/sys/classes/draw/interface/draw.c b/src/sys/classes/draw/interface/draw.c index 38e304834b0..93f79de9dd2 100644 --- a/src/sys/classes/draw/interface/draw.c +++ b/src/sys/classes/draw/interface/draw.c @@ -148,7 +148,7 @@ PetscErrorCode PetscDrawCheckResizedWindow(PetscDraw draw) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawGetTitle - Gets pointer to title of a `PetscDraw` context. Not Collective @@ -172,7 +172,7 @@ PetscErrorCode PetscDrawGetTitle(PetscDraw draw, const char *title[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawSetTitle - Sets the title of a `PetscDraw` context. Collective @@ -205,7 +205,7 @@ PetscErrorCode PetscDrawSetTitle(PetscDraw draw, const char title[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawAppendTitle - Appends to the title of a `PetscDraw` context. Collective @@ -331,7 +331,7 @@ PetscErrorCode PetscDrawGetPopup(PetscDraw draw, PetscDraw *popup) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawSetDisplay - Sets the display where a `PetscDraw` object will be displayed Input Parameters: @@ -370,7 +370,7 @@ PetscErrorCode PetscDrawSetDoubleBuffer(PetscDraw draw) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawGetSingleton - Gain access to a `PetscDraw` object as if it were owned by the one process. @@ -406,7 +406,7 @@ PetscErrorCode PetscDrawGetSingleton(PetscDraw draw, PetscDraw *sdraw) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawRestoreSingleton - Remove access to a `PetscDraw` object obtained with `PetscDrawGetSingleton()` by the one process. @@ -439,7 +439,7 @@ PetscErrorCode PetscDrawRestoreSingleton(PetscDraw draw, PetscDraw *sdraw) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawSetVisible - Sets if the drawing surface (the 'window') is visible on its display. Input Parameters: diff --git a/src/sys/classes/draw/interface/drawreg.c b/src/sys/classes/draw/interface/drawreg.c index 03eb2d8c31b..a6f28ece25f 100644 --- a/src/sys/classes/draw/interface/drawreg.c +++ b/src/sys/classes/draw/interface/drawreg.c @@ -12,7 +12,7 @@ */ PetscFunctionList PetscDrawList = NULL; -/*@C +/*@ PetscDrawView - Prints the `PetscDraw` data structure. Collective @@ -80,7 +80,7 @@ PetscErrorCode PetscDrawView(PetscDraw indraw, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawViewFromOptions - View a `PetscDraw` from the option database Collective @@ -102,7 +102,7 @@ PetscErrorCode PetscDrawViewFromOptions(PetscDraw A, PetscObject obj, const char PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawCreate - Creates a graphics context. Collective @@ -139,10 +139,10 @@ PetscErrorCode PetscDrawCreate(MPI_Comm comm, const char display[], const char t PetscBool flag; PetscFunctionBegin; + PetscAssertPointer(indraw, 8); PetscCall(PetscDrawInitializePackage()); - *indraw = NULL; - PetscCall(PetscHeaderCreate(draw, PETSC_DRAW_CLASSID, "Draw", "Graphics", "Draw", comm, PetscDrawDestroy, PetscDrawView)); + PetscCall(PetscHeaderCreate(draw, PETSC_DRAW_CLASSID, "Draw", "Graphics", "Draw", comm, PetscDrawDestroy, PetscDrawView)); draw->data = NULL; PetscCall(PetscStrallocpy(display, &draw->display)); PetscCall(PetscStrallocpy(title, &draw->title)); @@ -182,7 +182,7 @@ PetscErrorCode PetscDrawCreate(MPI_Comm comm, const char display[], const char t PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawSetType - Builds graphics object for a particular implementation Collective @@ -259,7 +259,7 @@ PetscErrorCode PetscDrawSetType(PetscDraw draw, PetscDrawType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawGetType - Gets the `PetscDraw` type as a string from the `PetscDraw` object. Not Collective @@ -286,7 +286,7 @@ PetscErrorCode PetscDrawGetType(PetscDraw draw, PetscDrawType *type) /*@C PetscDrawRegister - Adds a method to the graphics package. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined graphics class @@ -317,7 +317,7 @@ PetscErrorCode PetscDrawRegister(const char *sname, PetscErrorCode (*function)(P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawSetOptionsPrefix - Sets the prefix used for searching for all `PetscDraw` options in the database. diff --git a/src/sys/classes/draw/interface/drect.c b/src/sys/classes/draw/interface/drect.c index e97be94e278..81707cb5dcc 100644 --- a/src/sys/classes/draw/interface/drect.c +++ b/src/sys/classes/draw/interface/drect.c @@ -48,7 +48,7 @@ PetscErrorCode PetscDrawIndicatorFunction(PetscDraw draw, PetscReal xmin, PetscR PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawCoordinateToPixel - given a coordinate in a `PetscDraw` returns the pixel location Not Collective @@ -74,7 +74,7 @@ PetscErrorCode PetscDrawCoordinateToPixel(PetscDraw draw, PetscReal x, PetscReal PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawPixelToCoordinate - given a pixel in a `PetscDraw` returns the coordinate Not Collective diff --git a/src/sys/classes/draw/interface/dsave.c b/src/sys/classes/draw/interface/dsave.c index 97dffb57adb..fbd52eabbb4 100644 --- a/src/sys/classes/draw/interface/dsave.c +++ b/src/sys/classes/draw/interface/dsave.c @@ -9,7 +9,7 @@ PETSC_EXTERN PetscErrorCode PetscDrawMovieCheckFormat(const char *[]); static PetscErrorCode PetscDrawSave_SAWs(PetscDraw); #endif -/*@C +/*@ PetscDrawSetSave - Saves images produced in a `PetscDraw` into a file Collective @@ -76,7 +76,7 @@ PetscErrorCode PetscDrawSetSave(PetscDraw draw, const char filename[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawSetSaveMovie - Saves a movie produced from a `PetscDraw` into a file Collective @@ -111,7 +111,7 @@ PetscErrorCode PetscDrawSetSaveMovie(PetscDraw draw, const char movieext[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawSetSaveFinalImage - Saves the final image produced in a `PetscDraw` into a file Collective diff --git a/src/sys/classes/draw/interface/dtext.c b/src/sys/classes/draw/interface/dtext.c index 402e39ebd68..d47e3e8e2fe 100644 --- a/src/sys/classes/draw/interface/dtext.c +++ b/src/sys/classes/draw/interface/dtext.c @@ -1,6 +1,6 @@ #include /*I "petscdraw.h" I*/ -/*@C +/*@ PetscDrawString - draws text onto a drawable. Not Collective @@ -27,7 +27,7 @@ PetscErrorCode PetscDrawString(PetscDraw draw, PetscReal xl, PetscReal yl, int c PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawStringVertical - draws text onto a drawable. Not Collective @@ -62,7 +62,7 @@ PetscErrorCode PetscDrawStringVertical(PetscDraw draw, PetscReal xl, PetscReal y PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawStringCentered - draws text onto a drawable centered at a point Not Collective @@ -95,7 +95,7 @@ PetscErrorCode PetscDrawStringCentered(PetscDraw draw, PetscReal xc, PetscReal y PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawStringBoxed - Draws a string with a box around it Not Collective diff --git a/src/sys/classes/draw/interface/dtri.c b/src/sys/classes/draw/interface/dtri.c index b626869fa33..fd2c837260a 100644 --- a/src/sys/classes/draw/interface/dtri.c +++ b/src/sys/classes/draw/interface/dtri.c @@ -106,7 +106,7 @@ static PetscErrorCode PetscDrawTensorContour_Zoom(PetscDraw win, void *dctx) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawTensorContour - draws a contour plot for a two-dimensional array Collective, but `draw` must be sequential diff --git a/src/sys/classes/draw/interface/dviewp.c b/src/sys/classes/draw/interface/dviewp.c index 9f4ded7518a..14652d37ad1 100644 --- a/src/sys/classes/draw/interface/dviewp.c +++ b/src/sys/classes/draw/interface/dviewp.c @@ -144,9 +144,12 @@ PetscErrorCode PetscDrawSplitViewPort(PetscDraw draw) Level: advanced + Fortran Note: + No Fortran support since `PetscDrawViewPorts` is a C struct + .seealso: `PetscDrawSplitViewPort()`, `PetscDrawSetViewPort()`, `PetscDrawViewPortsSet()`, `PetscDrawViewPortsDestroy()` @*/ -PetscErrorCode PetscDrawViewPortsCreate(PetscDraw draw, PetscInt nports, PetscDrawViewPorts **newports) +PetscErrorCode PetscDrawViewPortsCreate(PetscDraw draw, PetscInt nports, PetscDrawViewPorts *newports[]) { PetscDrawViewPorts *ports; PetscInt i, n; @@ -225,9 +228,12 @@ PetscErrorCode PetscDrawViewPortsCreate(PetscDraw draw, PetscInt nports, PetscDr Level: advanced + Fortran Note: + No Fortran support since `PetscDrawViewPorts` is a C struct + .seealso: `PetscDrawSplitViewPort()`, `PetscDrawSetViewPort()`, `PetscDrawViewPortsSet()`, `PetscDrawViewPortsDestroy()`, `PetscDrawViewPorts` @*/ -PetscErrorCode PetscDrawViewPortsCreateRect(PetscDraw draw, PetscInt nx, PetscInt ny, PetscDrawViewPorts **newports) +PetscErrorCode PetscDrawViewPortsCreateRect(PetscDraw draw, PetscInt nx, PetscInt ny, PetscDrawViewPorts *newports[]) { PetscDrawViewPorts *ports; PetscReal *xl, *xr, *yl, *yr, hx, hy; @@ -302,6 +308,9 @@ PetscErrorCode PetscDrawViewPortsCreateRect(PetscDraw draw, PetscInt nx, PetscIn Level: advanced + Fortran Note: + No Fortran support since `PetscDrawViewPorts` is a C struct + .seealso: `PetscDrawViewPorts`, `PetscDrawSplitViewPort()`, `PetscDrawSetViewPort()`, `PetscDrawViewPortsSet()`, `PetscDrawViewPortsCreate()` @*/ PetscErrorCode PetscDrawViewPortsDestroy(PetscDrawViewPorts *ports) @@ -328,6 +337,9 @@ PetscErrorCode PetscDrawViewPortsDestroy(PetscDrawViewPorts *ports) Level: advanced + Fortran Note: + No Fortran support since `PetscDrawViewPorts` is a C struct + .seealso: `PetscDrawViewPorts`, `PetscDrawSplitViewPort()`, `PetscDrawSetViewPort()`, `PetscDrawViewPortsDestroy()`, `PetscDrawViewPortsCreate()` @*/ PetscErrorCode PetscDrawViewPortsSet(PetscDrawViewPorts *ports, PetscInt port) diff --git a/src/sys/classes/draw/interface/ftn-custom/makefile b/src/sys/classes/draw/interface/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/sys/classes/draw/interface/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/sys/classes/draw/interface/ftn-custom/zdrawf.c b/src/sys/classes/draw/interface/ftn-custom/zdrawf.c deleted file mode 100644 index f71c8762c66..00000000000 --- a/src/sys/classes/draw/interface/ftn-custom/zdrawf.c +++ /dev/null @@ -1,60 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdrawgettitle_ PETSCDRAWGETTITLE - #define petscdrawsettitle_ PETSCDRAWSETTITLE - #define petscdrawappendtitle_ PETSCDRAWAPPENDTITLE - #define petscdrawsetsavefinalimage_ PETSCDRAWSETSAVEFINALIMAGE - #define petscdrawsetsave_ PETSCDRAWSETSAVE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdrawgettitle_ petscdrawgettitle - #define petscdrawsettitle_ petscdrawsettitle - #define petscdrawappendtitle_ petscdrawappendtitle - #define petscdrawsetsavefinalimage_ petscdrawsetsavefinalimage - #define petscdrawsetsave_ petscdrawsetsave -#endif - -PETSC_EXTERN void petscdrawgettitle_(PetscDraw *draw, char *title, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *t; - *ierr = PetscDrawGetTitle(*draw, &t); - *ierr = PetscStrncpy(title, t, len); - FIXRETURNCHAR(PETSC_TRUE, title, len); -} - -PETSC_EXTERN void petscdrawsettitle_(PetscDraw *draw, char *title, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t1; - FIXCHAR(title, len, t1); - *ierr = PetscDrawSetTitle(*draw, t1); - if (*ierr) return; - FREECHAR(title, t1); -} - -PETSC_EXTERN void petscdrawappendtitle_(PetscDraw *draw, char *title, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t1; - FIXCHAR(title, len, t1); - *ierr = PetscDrawAppendTitle(*draw, t1); - if (*ierr) return; - FREECHAR(title, t1); -} - -PETSC_EXTERN void petscdrawsetsavefinalimage_(PetscDraw *draw, char *filename, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t1; - FIXCHAR(filename, len, t1); - *ierr = PetscDrawSetSaveFinalImage(*draw, t1); - if (*ierr) return; - FREECHAR(filename, t1); -} - -PETSC_EXTERN void petscdrawsetsave_(PetscDraw *draw, char *filename, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t1; - FIXCHAR(filename, len, t1); - *ierr = PetscDrawSetSave(*draw, t1); - if (*ierr) return; - FREECHAR(filename, t1); -} diff --git a/src/sys/classes/draw/interface/ftn-custom/zdrawregf.c b/src/sys/classes/draw/interface/ftn-custom/zdrawregf.c deleted file mode 100644 index a45b4871900..00000000000 --- a/src/sys/classes/draw/interface/ftn-custom/zdrawregf.c +++ /dev/null @@ -1,55 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdrawsettype_ PETSCDRAWSETTYPE - #define petscdrawcreate_ PETSCDRAWCREATE - #define petscdrawsetoptionsprefix_ PETSCDRAWSETOPTIONSPREFIX - #define petscdrawviewfromoptions_ PETSCDRAWVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdrawsettype_ petscdrawsettype - #define petscdrawcreate_ petscdrawcreate - #define petscdrawsetoptionsprefix_ petscdrawsetoptionsprefix - #define petscdrawviewfromoptions_ petscdrawviewfromoptions -#endif - -PETSC_EXTERN void petscdrawsettype_(PetscDraw *ctx, char *text, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(text, len, t); - *ierr = PetscDrawSetType(*ctx, t); - if (*ierr) return; - FREECHAR(text, t); -} - -PETSC_EXTERN void petscdrawcreate_(MPI_Comm *comm, char *display, char *title, int *x, int *y, int *w, int *h, PetscDraw *inctx, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1, PETSC_FORTRAN_CHARLEN_T len2) -{ - char *t1, *t2; - - FIXCHAR(display, len1, t1); - FIXCHAR(title, len2, t2); - *ierr = PetscDrawCreate(MPI_Comm_f2c(*(MPI_Fint *)&*comm), t1, t2, *x, *y, *w, *h, inctx); - if (*ierr) return; - FREECHAR(display, t1); - FREECHAR(title, t2); -} - -PETSC_EXTERN void petscdrawsetoptionsprefix_(PetscDraw *ctx, char *text, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(text, len, t); - *ierr = PetscDrawSetOptionsPrefix(*ctx, t); - if (*ierr) return; - FREECHAR(text, t); -} - -PETSC_EXTERN void petscdrawviewfromoptions_(PetscDraw *draw, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscDrawViewFromOptions(*draw, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/sys/classes/draw/interface/ftn-custom/zdtextf.c b/src/sys/classes/draw/interface/ftn-custom/zdtextf.c deleted file mode 100644 index 2fab5007d6c..00000000000 --- a/src/sys/classes/draw/interface/ftn-custom/zdtextf.c +++ /dev/null @@ -1,32 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdrawstring_ PETSCDRAWSTRING -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdrawstring_ petscdrawstring -#endif - -PETSC_EXTERN void petscdrawstring_(PetscDraw *ctx, double *xl, double *yl, int *cl, char *text, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(text, len, t); - *ierr = PetscDrawString(*ctx, *xl, *yl, *cl, t); - if (*ierr) return; - FREECHAR(text, t); -} - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdrawstringvertical_ PETSCDRAWSTRINGVERTICAL -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdrawstringvertical_ petscdrawstringvertical -#endif - -PETSC_EXTERN void petscdrawstringvertical_(PetscDraw *ctx, double *xl, double *yl, int *cl, char *text, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(text, len, t); - *ierr = PetscDrawStringVertical(*ctx, *xl, *yl, *cl, t); - if (*ierr) return; - FREECHAR(text, t); -} diff --git a/src/sys/classes/draw/interface/ftn-custom/zdtrif.c b/src/sys/classes/draw/interface/ftn-custom/zdtrif.c deleted file mode 100644 index 25680383612..00000000000 --- a/src/sys/classes/draw/interface/ftn-custom/zdtrif.c +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdrawtensorcontour_ PETSCDRAWTENSORCONTOUR -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdrawtensorcontour_ petscdrawtensorcontour -#endif - -PETSC_EXTERN void petscdrawtensorcontour_(PetscDraw *win, int *m, int *n, PetscReal *x, PetscReal *y, PetscReal *V, PetscErrorCode *ierr) -{ - CHKFORTRANNULLREAL(x); - CHKFORTRANNULLREAL(y); - *ierr = PetscDrawTensorContour(*win, *m, *n, x, y, V); -} diff --git a/src/sys/classes/draw/tests/ex4f.F90 b/src/sys/classes/draw/tests/ex4f.F90 index 0aaab24691c..6014cacf5ee 100644 --- a/src/sys/classes/draw/tests/ex4f.F90 +++ b/src/sys/classes/draw/tests/ex4f.F90 @@ -24,7 +24,6 @@ subroutine zoomfunction(draw,dummy,ierr) value = i/max PetscCall(PetscDrawLine(draw,zero,value,one,value,i,ierr)) 10 continue - return end program main diff --git a/src/sys/classes/draw/utils/axisc.c b/src/sys/classes/draw/utils/axisc.c index 7423fbb9ca5..151258924b5 100644 --- a/src/sys/classes/draw/utils/axisc.c +++ b/src/sys/classes/draw/utils/axisc.c @@ -33,10 +33,8 @@ PetscErrorCode PetscDrawAxisCreate(PetscDraw draw, PetscDrawAxis *axis) PetscAssertPointer(axis, 2); PetscCall(PetscHeaderCreate(ad, PETSC_DRAWAXIS_CLASSID, "DrawAxis", "Draw Axis", "Draw", PetscObjectComm((PetscObject)draw), PetscDrawAxisDestroy, NULL)); - PetscCall(PetscObjectReference((PetscObject)draw)); - ad->win = draw; - + ad->win = draw; ad->xticks = PetscADefTicks; ad->yticks = PetscADefTicks; ad->xlabelstr = PetscADefLabel; @@ -47,8 +45,7 @@ PetscErrorCode PetscDrawAxisCreate(PetscDraw draw, PetscDrawAxis *axis) ad->xlabel = NULL; ad->ylabel = NULL; ad->toplabel = NULL; - - *axis = ad; + *axis = ad; PetscFunctionReturn(PETSC_SUCCESS); } @@ -111,7 +108,7 @@ PetscErrorCode PetscDrawAxisSetColors(PetscDrawAxis axis, int ac, int tc, int cc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawAxisSetLabels - Sets the x and y axis labels. Logically Collective diff --git a/src/sys/classes/draw/utils/bars.c b/src/sys/classes/draw/utils/bars.c index 41770016fe4..8d81c83acc3 100644 --- a/src/sys/classes/draw/utils/bars.c +++ b/src/sys/classes/draw/utils/bars.c @@ -7,7 +7,7 @@ PetscClassId PETSC_DRAWBAR_CLASSID = 0; -/*@C +/*@ PetscDrawBarCreate - Creates a bar graph data structure. Collective @@ -41,21 +41,17 @@ PetscErrorCode PetscDrawBarCreate(PetscDraw draw, PetscDrawBar *bar) PetscAssertPointer(bar, 2); PetscCall(PetscHeaderCreate(h, PETSC_DRAWBAR_CLASSID, "DrawBar", "Bar Graph", "Draw", PetscObjectComm((PetscObject)draw), PetscDrawBarDestroy, NULL)); - PetscCall(PetscObjectReference((PetscObject)draw)); - h->win = draw; - + h->win = draw; h->view = NULL; h->destroy = NULL; h->color = PETSC_DRAW_GREEN; h->ymin = 0.; /* if user has not set these then they are determined from the data */ h->ymax = 0.; h->numBins = 0; - PetscCall(PetscDrawAxisCreate(draw, &h->axis)); h->axis->xticks = NULL; - - *bar = h; + *bar = h; PetscFunctionReturn(PETSC_SUCCESS); } @@ -68,18 +64,18 @@ PetscErrorCode PetscDrawBarCreate(PetscDraw draw, PetscDrawBar *bar) + bar - The bar graph context. . bins - number of items . data - values of each item -- labels - optional label for each bar, NULL terminated array of strings +- labels - optional label for each bar, `NULL` terminated array of strings Level: intermediate Notes: Call `PetscDrawBarDraw()` after this call to display the new plot - The data is ignored on all ranks except zero + The data is ignored on all MPI processes except rank zero .seealso: `PetscDrawBar`, `PetscDrawBarCreate()`, `PetscDrawBarDraw()` @*/ -PetscErrorCode PetscDrawBarSetData(PetscDrawBar bar, PetscInt bins, const PetscReal data[], const char *const *labels) +PetscErrorCode PetscDrawBarSetData(PetscDrawBar bar, PetscInt bins, const PetscReal data[], const char *const labels[]) { PetscFunctionBegin; PetscValidHeaderSpecific(bar, PETSC_DRAWBAR_CLASSID, 1); @@ -95,7 +91,7 @@ PetscErrorCode PetscDrawBarSetData(PetscDrawBar bar, PetscInt bins, const PetscR PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawBarDestroy - Frees all space taken up by bar graph data structure. Collective @@ -312,7 +308,7 @@ PetscErrorCode PetscDrawBarSetLimits(PetscDrawBar bar, PetscReal y_min, PetscRea PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawBarGetAxis - Gets the axis context associated with a bar graph. This is useful if one wants to change some axis property, such as labels, color, etc. The axis context should not be destroyed by the @@ -339,7 +335,7 @@ PetscErrorCode PetscDrawBarGetAxis(PetscDrawBar bar, PetscDrawAxis *axis) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawBarGetDraw - Gets the draw context associated with a bar graph. Not Collective, draw is parallel if bar is parallel diff --git a/src/sys/classes/draw/utils/dscatter.c b/src/sys/classes/draw/utils/dscatter.c index 81b1b7088d0..43231cb5261 100644 --- a/src/sys/classes/draw/utils/dscatter.c +++ b/src/sys/classes/draw/utils/dscatter.c @@ -9,7 +9,7 @@ PetscClassId PETSC_DRAWSP_CLASSID = 0; -/*@C +/*@ PetscDrawSPCreate - Creates a scatter plot data structure. Collective @@ -239,7 +239,7 @@ PetscErrorCode PetscDrawSPAddPoint(PetscDrawSP sp, PetscReal *x, PetscReal *y) .seealso: `PetscDrawSPAddPoint()`, `PetscDrawSP`, `PetscDrawSPCreate()`, `PetscDrawSPReset()`, `PetscDrawSPDraw()`, `PetscDrawSPAddPointColorized()` @*/ -PetscErrorCode PetscDrawSPAddPoints(PetscDrawSP sp, int n, PetscReal **xx, PetscReal **yy) +PetscErrorCode PetscDrawSPAddPoints(PetscDrawSP sp, int n, PetscReal *xx[], PetscReal *yy[]) { PetscInt i, j, k; PetscReal *x, *y; diff --git a/src/sys/classes/draw/utils/ftn-custom/zaxisf.c b/src/sys/classes/draw/utils/ftn-custom/zaxisf.c deleted file mode 100644 index ad5317b4ff4..00000000000 --- a/src/sys/classes/draw/utils/ftn-custom/zaxisf.c +++ /dev/null @@ -1,22 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdrawaxissetlabels_ PETSCDRAWAXISSETLABELS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdrawaxissetlabels_ petscdrawaxissetlabels -#endif - -PETSC_EXTERN void petscdrawaxissetlabels_(PetscDrawAxis *axis, char *top, char *xlabel, char *ylabel, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1, PETSC_FORTRAN_CHARLEN_T len2, PETSC_FORTRAN_CHARLEN_T len3) -{ - char *t1, *t2, *t3; - - FIXCHAR(top, len1, t1); - FIXCHAR(xlabel, len2, t2); - FIXCHAR(ylabel, len3, t3); - *ierr = PetscDrawAxisSetLabels(*axis, t1, t2, t3); - if (*ierr) return; - FREECHAR(top, t1); - FREECHAR(xlabel, t2); - FREECHAR(ylabel, t3); -} diff --git a/src/sys/classes/draw/utils/ftn-custom/zlgcf.c b/src/sys/classes/draw/utils/ftn-custom/zlgcf.c deleted file mode 100644 index 32bfc3914eb..00000000000 --- a/src/sys/classes/draw/utils/ftn-custom/zlgcf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscdrawlgsetoptionsprefix_ PETSCDRAWLGSETOPTIONSPREFIX -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscdrawlgsetoptionsprefix_ petscdrawlgsetoptionsprefix -#endif - -PETSC_EXTERN void petscdrawlgsetoptionsprefix_(PetscDrawLG *lg, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(prefix, len, t); - *ierr = PetscDrawLGSetOptionsPrefix(*lg, t); - if (*ierr) return; - FREECHAR(prefix, t); -} diff --git a/src/sys/classes/draw/utils/hists.c b/src/sys/classes/draw/utils/hists.c index 81aad7397b2..f16c9073534 100644 --- a/src/sys/classes/draw/utils/hists.c +++ b/src/sys/classes/draw/utils/hists.c @@ -28,7 +28,7 @@ struct _p_PetscDrawHG { #define CHUNKSIZE 100 -/*@C +/*@ PetscDrawHGCreate - Creates a histogram data structure. Collective @@ -64,10 +64,8 @@ PetscErrorCode PetscDrawHGCreate(PetscDraw draw, int bins, PetscDrawHG *hist) PetscAssertPointer(hist, 3); PetscCall(PetscHeaderCreate(h, PETSC_DRAWHG_CLASSID, "DrawHG", "Histogram", "Draw", PetscObjectComm((PetscObject)draw), PetscDrawHGDestroy, NULL)); - PetscCall(PetscObjectReference((PetscObject)draw)); - h->win = draw; - + h->win = draw; h->view = NULL; h->destroy = NULL; h->color = PETSC_DRAW_GREEN; @@ -77,17 +75,13 @@ PetscErrorCode PetscDrawHGCreate(PetscDraw draw, int bins, PetscDrawHG *hist) h->ymax = 1.; h->numBins = bins; h->maxBins = bins; - PetscCall(PetscMalloc1(h->maxBins, &h->bins)); - h->numValues = 0; h->maxValues = CHUNKSIZE; h->calcStats = PETSC_FALSE; h->integerBins = PETSC_FALSE; - PetscCall(PetscMalloc1(h->maxValues, &h->values)); PetscCall(PetscDrawAxisCreate(draw, &h->axis)); - *hist = h; PetscFunctionReturn(PETSC_SUCCESS); } @@ -145,7 +139,7 @@ PetscErrorCode PetscDrawHGReset(PetscDrawHG hist) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawHGDestroy - Frees all space taken up by histogram data structure. Collective @@ -585,7 +579,7 @@ PetscErrorCode PetscDrawHGIntegerBins(PetscDrawHG hist, PetscBool ints) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawHGGetAxis - Gets the axis context associated with a histogram. This is useful if one wants to change some axis property, such as labels, color, etc. The axis context should not be destroyed by the @@ -612,7 +606,7 @@ PetscErrorCode PetscDrawHGGetAxis(PetscDrawHG hist, PetscDrawAxis *axis) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawHGGetDraw - Gets the draw context associated with a histogram. Not Collective, draw is parallel if hist is parallel diff --git a/src/sys/classes/draw/utils/lg.c b/src/sys/classes/draw/utils/lg.c index 51beaedaf88..e2914bd47a5 100644 --- a/src/sys/classes/draw/utils/lg.c +++ b/src/sys/classes/draw/utils/lg.c @@ -127,7 +127,7 @@ PetscErrorCode PetscDrawLGAddPoint(PetscDrawLG lg, const PetscReal *x, const Pet .seealso: `PetscDrawLG`, `PetscDrawLGCreate()`, `PetscDrawLGAddPoint()`, `PetscDrawLGAddCommonPoint()`, `PetscDrawLGReset()`, `PetscDrawLGDraw()` @*/ -PetscErrorCode PetscDrawLGAddPoints(PetscDrawLG lg, PetscInt n, PetscReal **xx, PetscReal **yy) +PetscErrorCode PetscDrawLGAddPoints(PetscDrawLG lg, PetscInt n, PetscReal *xx[], PetscReal *yy[]) { PetscInt i, j, k; PetscReal *x, *y; diff --git a/src/sys/classes/draw/utils/lgc.c b/src/sys/classes/draw/utils/lgc.c index 0f19a33e21f..6603a87549f 100644 --- a/src/sys/classes/draw/utils/lgc.c +++ b/src/sys/classes/draw/utils/lgc.c @@ -220,7 +220,7 @@ PetscErrorCode PetscDrawLGSetColors(PetscDrawLG lg, const int colors[]) .seealso: `PetscDrawLGGetAxis()`, `PetscDrawAxis`, `PetscDrawAxisSetColors()`, `PetscDrawAxisSetLabels()`, `PetscDrawAxisSetHoldLimits()` @*/ -PetscErrorCode PetscDrawLGSetLegend(PetscDrawLG lg, const char *const *names) +PetscErrorCode PetscDrawLGSetLegend(PetscDrawLG lg, const char *const names[]) { PetscInt i; @@ -549,7 +549,7 @@ PetscErrorCode PetscDrawLGView(PetscDrawLG lg, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDrawLGSetOptionsPrefix - Sets the prefix used for searching for all `PetscDrawLG` options in the database. diff --git a/src/sys/classes/matlabengine/ftn-custom/zmatlabf.c b/src/sys/classes/matlabengine/ftn-custom/zmatlabf.c index 34d4a8c57d4..64fa80289e9 100644 --- a/src/sys/classes/matlabengine/ftn-custom/zmatlabf.c +++ b/src/sys/classes/matlabengine/ftn-custom/zmatlabf.c @@ -2,31 +2,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscmatlabenginecreate_ PETSCMATLABENGINECREATE - #define petscmatlabengineevaluate_ PETSCMATLABENGINEEVALUATE - #define petscmatlabenginegetoutput_ PETSCMATLABENGINEGETOUTPUT - #define petscmatlabengineprintoutput_ PETSCMATLABENGINEPRINTOUTPUT - #define petscmatlabengineputarray_ PETSCMATLABENGINEPUTARRAY - #define petscmatlabenginegetarray_ PETSCMATLABENGINEGETARRAY + #define petscmatlabengineevaluate_ PETSCMATLABENGINEEVALUATE #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscmatlabenginecreate_ petscmatlabenginecreate - #define petscmatlabengineevaluate_ petscmatlabengineevaluate - #define petscmatlabenginegetoutput_ petscmatlabenginegetoutput - #define petscmatlabengineprintoutput_ petscmatlabengineprintoutput - #define petscmatlabengineputarray_ petscmatlabengineputarray - #define petscmatlabenginegetarray_ petscmatlabenginegetarray + #define petscmatlabengineevaluate_ petscmatlabengineevaluate #endif -PETSC_EXTERN void petscmatlabenginecreate_(MPI_Comm *comm, char *m, PetscMatlabEngine *e, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *ms; - - FIXCHAR(m, len, ms); - *ierr = PetscMatlabEngineCreate(MPI_Comm_f2c(*(MPI_Fint *)&*comm), ms, e); - if (*ierr) return; - FREECHAR(m, ms); -} - PETSC_EXTERN void petscmatlabengineevaluate_(PetscMatlabEngine *e, char *m, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) { char *ms; @@ -35,21 +15,3 @@ PETSC_EXTERN void petscmatlabengineevaluate_(PetscMatlabEngine *e, char *m, Pets if (*ierr) return; FREECHAR(m, ms); } - -PETSC_EXTERN void petscmatlabengineputarray_(PetscMatlabEngine *e, PetscInt *m, PetscInt *n, PetscScalar *a, char *s, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *ms; - FIXCHAR(s, len, ms); - *ierr = PetscMatlabEnginePutArray(*e, *m, *n, a, ms); - if (*ierr) return; - FREECHAR(s, ms); -} - -PETSC_EXTERN void petscmatlabenginegetarray_(PetscMatlabEngine *e, PetscInt *m, PetscInt *n, PetscScalar *a, char *s, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *ms; - FIXCHAR(s, len, ms); - *ierr = PetscMatlabEngineGetArray(*e, *m, *n, a, ms); - if (*ierr) return; - FREECHAR(s, ms); -} diff --git a/src/sys/classes/matlabengine/matlab.c b/src/sys/classes/matlabengine/matlab.c index 524c5a797c5..95aa7dedc01 100644 --- a/src/sys/classes/matlabengine/matlab.c +++ b/src/sys/classes/matlabengine/matlab.c @@ -11,7 +11,7 @@ struct _p_PetscMatlabEngine { PetscClassId MATLABENGINE_CLASSID = -1; -/*@C +/*@ PetscMatlabEngineCreate - Creates a MATLAB engine object Not Collective @@ -50,16 +50,16 @@ PetscErrorCode PetscMatlabEngineCreate(MPI_Comm comm, const char host[], PetscMa char lhost[64]; PetscFunctionBegin; + PetscAssertPointer(mengine, 3); if (MATLABENGINE_CLASSID == -1) PetscCall(PetscClassIdRegister("MATLAB Engine", &MATLABENGINE_CLASSID)); - PetscCall(PetscHeaderCreate(e, MATLABENGINE_CLASSID, "MatlabEngine", "MATLAB Engine", "Sys", comm, PetscMatlabEngineDestroy, NULL)); + PetscCall(PetscHeaderCreate(e, MATLABENGINE_CLASSID, "MatlabEngine", "MATLAB Engine", "Sys", comm, PetscMatlabEngineDestroy, NULL)); if (!host) { PetscCall(PetscOptionsGetString(NULL, NULL, "-matlab_engine_host", lhost, sizeof(lhost), &flg)); if (flg) host = lhost; } flg = PETSC_FALSE; PetscCall(PetscOptionsGetBool(NULL, NULL, "-matlab_engine_graphics", &flg, NULL)); - if (host) { PetscCall(PetscInfo(0, "Starting MATLAB engine on %s\n", host)); PetscCall(PetscStrncpy(buffer, "ssh ", sizeof(buffer))); @@ -80,7 +80,6 @@ PetscErrorCode PetscMatlabEngineCreate(MPI_Comm comm, const char host[], PetscMa engOutputBuffer(e->ep, e->buffer, sizeof(e->buffer)); if (host) PetscCall(PetscInfo(0, "Started MATLAB engine on %s\n", host)); else PetscCall(PetscInfo(0, "Started MATLAB engine\n")); - PetscCallMPI(MPI_Comm_rank(comm, &rank)); PetscCallMPI(MPI_Comm_size(comm, &size)); PetscCall(PetscMatlabEngineEvaluate(e, "MPI_Comm_rank = %d; MPI_Comm_size = %d;\n", rank, size)); @@ -181,7 +180,7 @@ PetscErrorCode PetscMatlabEngineEvaluate(PetscMatlabEngine mengine, const char s `PetscMatlabEngineEvaluate()`, `PetscMatlabEngineCreate()`, `PetscMatlabEnginePrintOutput()`, `PETSC_MATLAB_ENGINE_()`, `PetscMatlabEnginePutArray()`, `PetscMatlabEngineGetArray()`, `PetscMatlabEngine` @*/ -PetscErrorCode PetscMatlabEngineGetOutput(PetscMatlabEngine mengine, char **string) +PetscErrorCode PetscMatlabEngineGetOutput(PetscMatlabEngine mengine, const char *string[]) { PetscFunctionBegin; PetscCheck(mengine, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Null argument: probably PETSC_MATLAB_ENGINE_() failed"); @@ -352,7 +351,7 @@ PetscMatlabEngine PETSC_MATLAB_ENGINE_(MPI_Comm comm) PetscFunctionReturn(mengine); } -/*@C +/*@ PetscMatlabEnginePutArray - Puts an array into the MATLAB space, treating it as a Fortran style (column major ordering) array. For parallel objects, each processors part is put in a separate MATLAB process. @@ -371,7 +370,7 @@ PetscMatlabEngine PETSC_MATLAB_ENGINE_(MPI_Comm comm) `PetscMatlabEngineEvaluate()`, `PetscMatlabEngineGetOutput()`, `PetscMatlabEnginePrintOutput()`, `PETSC_MATLAB_ENGINE_()`, `PetscMatlabEnginePut()`, `PetscMatlabEngineGetArray()`, `PetscMatlabEngine` @*/ -PetscErrorCode PetscMatlabEnginePutArray(PetscMatlabEngine mengine, int m, int n, const PetscScalar *array, const char name[]) +PetscErrorCode PetscMatlabEnginePutArray(PetscMatlabEngine mengine, int m, int n, const PetscScalar array[], const char name[]) { mxArray *mat; @@ -390,7 +389,7 @@ PetscErrorCode PetscMatlabEnginePutArray(PetscMatlabEngine mengine, int m, int n PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscMatlabEngineGetArray - Gets a variable from MATLAB into an array Not Collective @@ -399,7 +398,7 @@ PetscErrorCode PetscMatlabEnginePutArray(PetscMatlabEngine mengine, int m, int n + mengine - the MATLAB engine . m - the x dimension of the array . n - the y dimension of the array -. array - the array (represented in one dimension) +. array - the array (represented in one dimension), much be large enough to hold all the data - name - the name of the array Level: advanced @@ -408,7 +407,7 @@ PetscErrorCode PetscMatlabEnginePutArray(PetscMatlabEngine mengine, int m, int n `PetscMatlabEngineEvaluate()`, `PetscMatlabEngineGetOutput()`, `PetscMatlabEnginePrintOutput()`, `PETSC_MATLAB_ENGINE_()`, `PetscMatlabEnginePutArray()`, `PetscMatlabEngineGet()`, `PetscMatlabEngine` @*/ -PetscErrorCode PetscMatlabEngineGetArray(PetscMatlabEngine mengine, int m, int n, PetscScalar *array, const char name[]) +PetscErrorCode PetscMatlabEngineGetArray(PetscMatlabEngine mengine, int m, int n, PetscScalar array[], const char name[]) { mxArray *mat; diff --git a/src/sys/classes/random/impls/curand/curand.c b/src/sys/classes/random/impls/curand/curand.c index 64c53b9f6ac..2ec5c86b79a 100644 --- a/src/sys/classes/random/impls/curand/curand.c +++ b/src/sys/classes/random/impls/curand/curand.c @@ -78,11 +78,16 @@ M*/ PETSC_EXTERN PetscErrorCode PetscRandomCreate_CURAND(PetscRandom r) { PetscRandom_CURAND *curand; + PetscDeviceContext dctx; + cudaStream_t *stream; PetscFunctionBegin; PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA)); + PetscCall(PetscDeviceContextGetCurrentContextAssertType_Internal(&dctx, PETSC_DEVICE_CUDA)); + PetscCall(PetscDeviceContextGetStreamHandle(dctx, (void **)&stream)); PetscCall(PetscNew(&curand)); PetscCallCURAND(curandCreateGenerator(&curand->gen, CURAND_RNG_PSEUDO_DEFAULT)); + PetscCallCURAND(curandSetStream(curand->gen, *stream)); /* https://docs.nvidia.com/cuda/curand/host-api-overview.html#performance-notes2 */ PetscCallCURAND(curandSetGeneratorOrdering(curand->gen, CURAND_ORDERING_PSEUDO_SEEDED)); r->ops[0] = PetscRandomOps_Values; diff --git a/src/sys/classes/random/impls/curand/curand2.cu b/src/sys/classes/random/impls/curand/curand2.cu index 9670cb58a0f..581d061f0b8 100644 --- a/src/sys/classes/random/impls/curand/curand2.cu +++ b/src/sys/classes/random/impls/curand/curand2.cu @@ -16,7 +16,7 @@ struct complexscalelw : public thrust::unary_function x) { return x.get<1>() % 2 ? x.get<0>() * iw + il : x.get<0>() * rw + rl; } + __host__ __device__ PetscReal operator()(thrust::tuple x) { return thrust::get<1>(x) % 2 ? thrust::get<0>(x) * iw + il : thrust::get<0>(x) * rw + rl; } }; #endif diff --git a/src/sys/classes/random/impls/random123/random123.c b/src/sys/classes/random/impls/random123/random123.c index 0d145b7f335..4efb63debdf 100644 --- a/src/sys/classes/random/impls/random123/random123.c +++ b/src/sys/classes/random/impls/random123/random123.c @@ -96,6 +96,58 @@ static PetscErrorCode PetscRandomGetValueReal_Random123(PetscRandom r, PetscReal PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode PetscRandomGetValuesReal_Random123(PetscRandom r, PetscInt n, PetscReal vals[]) +{ + PetscRandom123 *r123 = (PetscRandom123 *)r->data; + PetscInt peel_start; + PetscInt rem, lim; + PetscReal scale = ((PetscReal)1.) / (UINT64_MAX + ((PetscReal)1.)); + PetscReal shift = .5 * scale; + PetscRandom123 r123_copy; + + PetscFunctionBegin; + peel_start = (4 - (r123->count % 4)) % 4; + peel_start = PetscMin(n, peel_start); + for (PetscInt i = 0; i < peel_start; i++) PetscCall(PetscRandomGetValueReal(r, &vals[i])); + PetscAssert((r123->count % 4) == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Bad modular arithmetic"); + n -= peel_start; + vals += peel_start; + rem = (n % 4); + lim = n - rem; + if (r->iset) { + scale *= PetscRealPart(r->width); + shift *= PetscRealPart(r->width); + shift += PetscRealPart(r->low); + } + r123_copy = *r123; + for (PetscInt i = 0; i < lim; i += 4, vals += 4) { + vals[0] = r123_copy.result.v[0] * scale + shift; + vals[1] = r123_copy.result.v[1] * scale + shift; + vals[2] = r123_copy.result.v[2] * scale + shift; + vals[3] = r123_copy.result.v[3] * scale + shift; + r123_copy.counter.v[0] += 4; + r123_copy.counter.v[1] += 4; + r123_copy.counter.v[2] += 4; + r123_copy.counter.v[3] += 4; + r123_copy.result = threefry4x64(r123->counter, r123->key); + } + r123_copy.count += lim; + *r123 = r123_copy; + for (PetscInt i = 0; i < rem; i++) PetscCall(PetscRandomGetValueReal(r, &vals[i])); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode PetscRandomGetValues_Random123(PetscRandom r, PetscInt n, PetscScalar vals[]) +{ + PetscFunctionBegin; +#if PetscDefined(USE_COMPLEX) + for (PetscInt i = 0; i < n; i++) PetscCall(PetscRandomGetValue_Random123(r, n, &vals[i])); +#else + PetscCall(PetscRandomGetValuesReal_Random123(r, n, vals)); +#endif + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode PetscRandomDestroy_Random123(PetscRandom r) { PetscFunctionBegin; @@ -104,12 +156,14 @@ static PetscErrorCode PetscRandomDestroy_Random123(PetscRandom r) } static struct _PetscRandomOps PetscRandomOps_Values = { + // clang-format off PetscDesignatedInitializer(seed, PetscRandomSeed_Random123), PetscDesignatedInitializer(getvalue, PetscRandomGetValue_Random123), PetscDesignatedInitializer(getvaluereal, PetscRandomGetValueReal_Random123), - PetscDesignatedInitializer(getvalues, NULL), - PetscDesignatedInitializer(getvaluesreal, NULL), + PetscDesignatedInitializer(getvalues, PetscRandomGetValues_Random123), + PetscDesignatedInitializer(getvaluesreal, PetscRandomGetValuesReal_Random123), PetscDesignatedInitializer(destroy, PetscRandomDestroy_Random123), + // clang-format on }; /*MC diff --git a/src/sys/classes/random/interface/ftn-custom/zrandomf.c b/src/sys/classes/random/interface/ftn-custom/zrandomf.c index 9587e81f748..d919ea7bdb8 100644 --- a/src/sys/classes/random/interface/ftn-custom/zrandomf.c +++ b/src/sys/classes/random/interface/ftn-custom/zrandomf.c @@ -2,19 +2,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscrandomsettype_ PETSCRANDOMSETTYPE - #define petscrandomgettype_ PETSCRANDOMGETTYPE - #define petscrandomsetseed_ PETSCRANDOMSETSEED - #define petscrandomgetseed_ PETSCRANDOMGETSEED - #define petscrandomviewfromoptions_ PETSCRANDOMVIEWFROMOPTIONS - #define petscrandomdestroy_ PETSCRANDOMDESTROY + #define petscrandomsetseed_ PETSCRANDOMSETSEED + #define petscrandomgetseed_ PETSCRANDOMGETSEED #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscrandomsettype_ petscrandomsettype - #define petscrandomgettype_ petscrandomgettype - #define petscrandomsetseed_ petscrandomsetseed - #define petscrandomgetseed_ petscrandomgetseed - #define petscrandomviewfromoptions_ petscrandomviewfromoptions - #define petscrandomdestroy_ petscrandomdestroy + #define petscrandomsetseed_ petscrandomsetseed + #define petscrandomgetseed_ petscrandomgetseed #endif PETSC_EXTERN void petscrandomgetseed_(PetscRandom *r, unsigned long *seed, PetscErrorCode *ierr) @@ -25,42 +17,3 @@ PETSC_EXTERN void petscrandomsetseed_(PetscRandom *r, unsigned long *seed, Petsc { *ierr = PetscRandomSetSeed(*r, *seed); } - -PETSC_EXTERN void petscrandomsettype_(PetscRandom *rnd, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = PetscRandomSetType(*rnd, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void petscrandomgettype_(PetscRandom *petscrandom, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = PetscRandomGetType(*petscrandom, &tname); - if (*ierr) return; - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} -PETSC_EXTERN void petscrandomviewfromoptions_(PetscRandom *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscRandomViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void petscrandomdestroy_(PetscRandom *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = PetscRandomDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} diff --git a/src/sys/classes/random/interface/randomc.c b/src/sys/classes/random/interface/randomc.c index e122224e55d..666c7e37d6d 100644 --- a/src/sys/classes/random/interface/randomc.c +++ b/src/sys/classes/random/interface/randomc.c @@ -17,7 +17,7 @@ /* Logging support */ PetscClassId PETSC_RANDOM_CLASSID; -/*@C +/*@ PetscRandomDestroy - Destroys a `PetscRandom` object that was created by `PetscRandomCreate()`. Collective @@ -198,7 +198,7 @@ PetscErrorCode PetscRandomSetFromOptions(PetscRandom rnd) #include #endif -/*@C +/*@ PetscRandomViewFromOptions - View a `PetscRandom` object based on the options database Collective @@ -220,7 +220,7 @@ PetscErrorCode PetscRandomViewFromOptions(PetscRandom A, PetscObject obj, const PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscRandomView - Views a random number generator object. Collective @@ -332,13 +332,10 @@ PetscErrorCode PetscRandomCreate(MPI_Comm comm, PetscRandom *r) PetscFunctionBegin; PetscAssertPointer(r, 2); - *r = NULL; PetscCall(PetscRandomInitializePackage()); PetscCall(PetscHeaderCreate(rr, PETSC_RANDOM_CLASSID, "PetscRandom", "Random number generator", "Sys", comm, PetscRandomDestroy, PetscRandomView)); - PetscCallMPI(MPI_Comm_rank(comm, &rank)); - rr->data = NULL; rr->low = 0.0; rr->width = 1.0; diff --git a/src/sys/classes/random/interface/randreg.c b/src/sys/classes/random/interface/randreg.c index 2cb74c81905..37c7519d67e 100644 --- a/src/sys/classes/random/interface/randreg.c +++ b/src/sys/classes/random/interface/randreg.c @@ -3,7 +3,7 @@ PetscFunctionList PetscRandomList = NULL; PetscBool PetscRandomRegisterAllCalled = PETSC_FALSE; -/*@C +/*@ PetscRandomSetType - Builds a context for generating a particular type of random numbers. Collective @@ -46,7 +46,7 @@ PetscErrorCode PetscRandomSetType(PetscRandom rnd, PetscRandomType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscRandomGetType - Gets the type name (as a string) from the `PetscRandom`. Not Collective @@ -73,7 +73,7 @@ PetscErrorCode PetscRandomGetType(PetscRandom rnd, PetscRandomType *type) /*@C PetscRandomRegister - Adds a new `PetscRandom` implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine diff --git a/src/sys/classes/viewer/impls/ams/amsopen.c b/src/sys/classes/viewer/impls/ams/amsopen.c index a5661ba9241..251b02c90ce 100644 --- a/src/sys/classes/viewer/impls/ams/amsopen.c +++ b/src/sys/classes/viewer/impls/ams/amsopen.c @@ -1,7 +1,7 @@ -#include /*I "petscsys.h" */ -#include +#include /*I "petscsys.h" I*/ +#include /*I "petscviewersaws.h" I*/ -/*@C +/*@ PetscViewerSAWsOpen - Opens an SAWs `PetscViewer`. Collective; No Fortran Support @@ -38,7 +38,7 @@ PetscErrorCode PetscViewerSAWsOpen(MPI_Comm comm, PetscViewer *lab) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectViewSAWs - View the base portion of any object with an SAWs viewer Collective diff --git a/src/sys/classes/viewer/impls/ascii/filev.c b/src/sys/classes/viewer/impls/ascii/filev.c index 53a28a78dd6..f29d200e0b9 100644 --- a/src/sys/classes/viewer/impls/ascii/filev.c +++ b/src/sys/classes/viewer/impls/ascii/filev.c @@ -198,6 +198,7 @@ PetscErrorCode PetscViewerASCIIGetPointer(PetscViewer viewer, FILE **fd) PetscViewer_ASCII *vascii = (PetscViewer_ASCII *)viewer->data; PetscFunctionBegin; + PetscCheck(!vascii->fileunit, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "Cannot request file pointer for viewers that use Fortran files"); *fd = vascii->fd; PetscFunctionReturn(PETSC_SUCCESS); } @@ -349,7 +350,7 @@ PetscErrorCode PetscViewerASCIISubtractTab(PetscViewer viewer, PetscInt tabs) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerASCIIPushSynchronized - Allows calls to `PetscViewerASCIISynchronizedPrintf()` for this viewer Collective @@ -379,7 +380,7 @@ PetscErrorCode PetscViewerASCIIPushSynchronized(PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerASCIIPopSynchronized - Undoes most recent `PetscViewerASCIIPushSynchronized()` for this viewer Collective @@ -412,7 +413,7 @@ PetscErrorCode PetscViewerASCIIPopSynchronized(PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerASCIIPushTab - Adds one more tab to the amount that `PetscViewerASCIIPrintf()` lines are tabbed. @@ -439,7 +440,7 @@ PetscErrorCode PetscViewerASCIIPushTab(PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerASCIIPopTab - Removes one tab from the amount that `PetscViewerASCIIPrintf()` lines are tabbed that was provided by `PetscViewerASCIIPushTab()` @@ -506,11 +507,13 @@ PetscErrorCode PetscViewerASCIIUseTabs(PetscViewer viewer, PetscBool flg) #if defined(PETSC_HAVE_FORTRAN_CAPS) #define petscviewerasciiopenwithfileunit_ PETSCVIEWERASCIIOPENWITHFILEUNIT - #define petscviewerasciisetfilefileunit_ PETSCVIEWERASCIISETFILEUNIT + #define petscviewerasciisetfileunit_ PETSCVIEWERASCIISETFILEUNIT + #define petscviewerasciiworldsetfileunit_ PETSCVIEWERASCIIWORLDSETFILEUNIT #define petscfortranprinttounit_ PETSCFORTRANPRINTTOUNIT #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define petscviewerasciiopenwithfileunit_ petscviewerasciiopenwithfileunit #define petscviewerasciisetfileunit_ petscviewerasciisetfileunit + #define petscviewerasciiworldsetfileunit_ petscviewerasciiworldsetfileunit #define petscfortranprinttounit_ petscfortranprinttounit #endif @@ -522,6 +525,48 @@ extern void petscfortranprinttounit_(PetscInt *, const char *, PetscErrorCode *, #define PETSCDEFAULTBUFFERSIZE 8 * 1024 +static PetscInt PETSC_VIEWER_ASCII_WORLD_fileunit = 0; + +// PetscClangLinter pragma disable: -fdoc-synopsis-macro-explicit-synopsis-valid-header +/*MC + PetscViewerASCIIWORLDSetFileUnit - sets `PETSC_VIEWER_STDOUT_WORLD` to write to a Fortran IO unit + + Synopsis: + #include + void PetscViewerASCIIWORLDSetFileUnit(PetscInt unit, PetscErrorCode ierr) + + Input Parameter: +. unit - the unit number + + Output Parameter: +. ierr - the error code + + Level: intermediate + + Notes: + Must be called before `PetscInitialize()` + + This may not work currently with some viewers that (improperly) use the `fd` directly instead of `PetscViewerASCIIPrintf()` + + With this option, for example, `-log_options` results will be saved to the Fortran file + + Any process may call this but only the unit passed on the first process is used + + Fortran Note: + Only for Fortran + + Developer Note: + `PetscViewerASCIIWORLDSetFilename()` could be added in the future + +.seealso: `PetscViewerASCIISetFILE()`, `PETSCVIEWERASCII`, `PetscViewerASCIIOpenWithFileUnit()`, `PetscViewerASCIIWORLDSetFileUnit()` +M*/ +PETSC_EXTERN void petscviewerasciiworldsetfileunit_(PetscInt *unit, PetscErrorCode *ierr) +{ + PETSC_VIEWER_ASCII_WORLD_fileunit = *unit; +} + + #include + // PetscClangLinter pragma disable: -fdoc-synopsis-macro-explicit-synopsis-valid-header /*MC PetscViewerASCIISetFileUnit - sets the `PETSCVIEWERASCII` to write to a Fortran IO unit @@ -545,12 +590,15 @@ extern void petscfortranprinttounit_(PetscInt *, const char *, PetscErrorCode *, Fortran Notes: Only for Fortran, use `PetscViewerASCIISetFILE()` for C -.seealso: `PetscViewerASCIISetFILE()`, `PETSCVIEWERASCII`, `PetscViewerASCIIOpenWithFileUnit()` +.seealso: `PetscViewerASCIISetFILE()`, `PETSCVIEWERASCII`, `PetscViewerASCIIOpenWithFileUnit()`, `PetscViewerASCIIWORLDSetFileUnit()` M*/ PETSC_EXTERN void petscviewerasciisetfileunit_(PetscViewer *lab, PetscInt *unit, PetscErrorCode *ierr) { - PetscViewer_ASCII *vascii = (PetscViewer_ASCII *)(*lab)->data; + PetscViewer_ASCII *vascii; + PetscViewer v; + PetscPatchDefaultViewers_Fortran(lab, v); + vascii = (PetscViewer_ASCII *)v->data; if (vascii->mode == FILE_MODE_READ) { *ierr = PETSC_ERR_ARG_WRONGSTATE; return; @@ -635,6 +683,63 @@ static PetscErrorCode PetscFPrintfFortran(PetscInt unit, const char str[]) } #endif +/*@ + PetscViewerASCIIGetStdout - Creates a `PETSCVIEWERASCII` `PetscViewer` shared by all processes + in a communicator. Error returning version of `PETSC_VIEWER_STDOUT_()` + + Collective + + Input Parameter: +. comm - the MPI communicator to share the `PetscViewer` + + Output Parameter: +. viewer - the viewer + + Level: beginner + + Note: + This object is destroyed in `PetscFinalize()`, `PetscViewerDestroy()` should never be called on it + + Developer Note: + This should be used in all PETSc source code instead of `PETSC_VIEWER_STDOUT_()` since it allows error checking + +.seealso: [](sec_viewers), `PETSC_VIEWER_DRAW_()`, `PetscViewerASCIIOpen()`, `PETSC_VIEWER_STDERR_`, `PETSC_VIEWER_STDOUT_WORLD`, + `PETSC_VIEWER_STDOUT_SELF` +@*/ +PetscErrorCode PetscViewerASCIIGetStdout(MPI_Comm comm, PetscViewer *viewer) +{ + PetscBool flg; + MPI_Comm ncomm; + + PetscFunctionBegin; + PetscAssertPointer(viewer, 2); + PetscCall(PetscSpinlockLock(&PetscViewerASCIISpinLockStdout)); + PetscCall(PetscCommDuplicate(comm, &ncomm, NULL)); + if (Petsc_Viewer_Stdout_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Viewer_Stdout_keyval, NULL)); + PetscCallMPI(MPI_Comm_get_attr(ncomm, Petsc_Viewer_Stdout_keyval, (void **)viewer, (PetscMPIInt *)&flg)); + if (!flg) { /* PetscViewer not yet created */ +#if defined(PETSC_USE_FORTRAN_BINDINGS) + PetscMPIInt size, gsize; + + PetscCallMPI(MPI_Comm_size(comm, &size)); + PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &gsize)); + if (size == gsize) { PetscCallMPI(MPI_Bcast(&PETSC_VIEWER_ASCII_WORLD_fileunit, 1, MPIU_INT, 0, comm)); } + if (PETSC_VIEWER_ASCII_WORLD_fileunit) { + PetscErrorCode ierr; + + petscviewerasciiopenwithfileunit_(&ncomm, &PETSC_VIEWER_ASCII_WORLD_fileunit, viewer, &ierr); + } else +#endif + PetscCall(PetscViewerASCIIOpen(ncomm, "stdout", viewer)); + ((PetscObject)*viewer)->persistent = PETSC_TRUE; + PetscCall(PetscObjectRegisterDestroy((PetscObject)*viewer)); + PetscCallMPI(MPI_Comm_set_attr(ncomm, Petsc_Viewer_Stdout_keyval, (void *)*viewer)); + } + PetscCall(PetscCommDestroy(&ncomm)); + PetscCall(PetscSpinlockUnlock(&PetscViewerASCIISpinLockStdout)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@C PetscViewerASCIIPrintf - Prints to a file, only from the first processor in the `PetscViewer` of type `PETSCVIEWERASCII` @@ -702,7 +807,7 @@ PetscErrorCode PetscViewerASCIIPrintf(PetscViewer viewer, const char format[], . PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerFileSetName - Sets the name of the file the `PetscViewer` should use. Collective @@ -749,7 +854,7 @@ PetscErrorCode PetscViewerFileSetName(PetscViewer viewer, const char name[]) .seealso: [](sec_viewers), `PetscViewerCreate()`, `PetscViewerSetType()`, `PetscViewerASCIIOpen()`, `PetscViewerBinaryOpen()`, `PetscViewerFileSetName()` @*/ -PetscErrorCode PetscViewerFileGetName(PetscViewer viewer, const char **name) +PetscErrorCode PetscViewerFileGetName(PetscViewer viewer, const char *name[]) { PetscFunctionBegin; PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1); @@ -891,7 +996,8 @@ static PetscErrorCode PetscViewerView_ASCII(PetscViewer v, PetscViewer viewer) PetscViewer_ASCII *ascii = (PetscViewer_ASCII *)v->data; PetscFunctionBegin; - if (ascii->filename) PetscCall(PetscViewerASCIIPrintf(viewer, "Filename: %s\n", ascii->filename)); + if (ascii->fileunit) PetscCall(PetscViewerASCIIPrintf(viewer, "Fortran FILE UNIT: %" PetscInt_FMT "\n", ascii->fileunit)); + else if (ascii->filename) PetscCall(PetscViewerASCIIPrintf(viewer, "Filename: %s\n", ascii->filename)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/sys/classes/viewer/impls/ascii/ftn-custom/zfilevf.c b/src/sys/classes/viewer/impls/ascii/ftn-custom/zfilevf.c index b64a269bf32..864b69e00f2 100644 --- a/src/sys/classes/viewer/impls/ascii/ftn-custom/zfilevf.c +++ b/src/sys/classes/viewer/impls/ascii/ftn-custom/zfilevf.c @@ -2,47 +2,13 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscviewerfilesetname_ PETSCVIEWERFILESETNAME - #define petscviewerfilegetname_ PETSCVIEWERFILEGETNAME #define petscviewerasciiprintf_ PETSCVIEWERASCIIPRINTF - #define petscviewerasciipushtab_ PETSCVIEWERASCIIPUSHTAB - #define petscviewerasciipoptab_ PETSCVIEWERASCIIPOPTAB #define petscviewerasciisynchronizedprintf_ PETSCVIEWERASCIISYNCHRONIZEDPRINTF - #define petscviewerasciipushsynchronized_ PETSCVIEWERASCIIPUSHSYNCHRONIZED - #define petscviewerasciipopsynchronized_ PETSCVIEWERASCIIPOPSYNCHRONIZED #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscviewerfilesetname_ petscviewerfilesetname - #define petscviewerfilegetname_ petscviewerfilegetname #define petscviewerasciiprintf_ petscviewerasciiprintf - #define petscviewerasciipushtab_ petscviewerasciipushtab - #define petscviewerasciipoptab_ petscviewerasciipoptab #define petscviewerasciisynchronizedprintf_ petscviewerasciisynchronizedprintf - #define petscviewerasciipushsynchronized_ petscviewerasciipushsynchronized - #define petscviewerasciipopsynchronized_ petscviewerasciipopsynchronized #endif -PETSC_EXTERN void petscviewerfilesetname_(PetscViewer *viewer, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - FIXCHAR(name, len, c1); - *ierr = PetscViewerFileSetName(v, c1); - if (*ierr) return; - FREECHAR(name, c1); -} - -PETSC_EXTERN void petscviewerfilegetname_(PetscViewer *viewer, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *c1; - - *ierr = PetscViewerGetType(*viewer, &c1); - if (*ierr) return; - *ierr = PetscStrncpy(name, c1, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - static PetscErrorCode PetscFixSlashN(const char *in, char **out) { PetscInt i; @@ -75,20 +41,6 @@ PETSC_EXTERN void petscviewerasciiprintf_(PetscViewer *viewer, char *str, PetscE *ierr = PetscFree(tmp); } -PETSC_EXTERN void petscviewerasciipushtab_(PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscViewerASCIIPushTab(v); -} - -PETSC_EXTERN void petscviewerasciipoptab_(PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscViewerASCIIPopTab(v); -} - PETSC_EXTERN void petscviewerasciisynchronizedprintf_(PetscViewer *viewer, char *str, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1) { char *c1, *tmp; @@ -103,19 +55,3 @@ PETSC_EXTERN void petscviewerasciisynchronizedprintf_(PetscViewer *viewer, char if (*ierr) return; *ierr = PetscFree(tmp); } - -PETSC_EXTERN void petscviewerasciipushsynchronized_(PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscViewerASCIIPushSynchronized(v); -} - -PETSC_EXTERN void petscviewerasciipopsynchronized_(PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscViewerASCIIPopSynchronized(v); -} diff --git a/src/sys/classes/viewer/impls/ascii/ftn-custom/zvcreatef.c b/src/sys/classes/viewer/impls/ascii/ftn-custom/zvcreatef.c index 31e85c07137..da0296a5ead 100644 --- a/src/sys/classes/viewer/impls/ascii/ftn-custom/zvcreatef.c +++ b/src/sys/classes/viewer/impls/ascii/ftn-custom/zvcreatef.c @@ -3,9 +3,7 @@ #if defined(PETSC_HAVE_FORTRAN_CAPS) #define petsc_viewer_stdout__ PETSC_VIEWER_STDOUT_BROKEN - #define petscviewerasciiopen_ PETSCVIEWERASCIIOPEN #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscviewerasciiopen_ petscviewerasciiopen #define petsc_viewer_stdout__ petsc_viewer_stdout_ #endif @@ -13,15 +11,6 @@ #define petsc_viewer_stdout__ petsc_viewer_stdout___ #endif -PETSC_EXTERN void petscviewerasciiopen_(MPI_Comm *comm, char *name, PetscViewer *lab, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - FIXCHAR(name, len, c1); - *ierr = PetscViewerASCIIOpen(MPI_Comm_f2c(*(MPI_Fint *)&*comm), c1, lab); - if (*ierr) return; - FREECHAR(name, c1); -} - PETSC_EXTERN PetscViewer petsc_viewer_stdout__(MPI_Comm *comm) { return PETSC_VIEWER_STDOUT_(MPI_Comm_f2c(*(MPI_Fint *)&*comm)); diff --git a/src/sys/classes/viewer/impls/ascii/vcreatea.c b/src/sys/classes/viewer/impls/ascii/vcreatea.c index 07fadf89703..c703feac864 100644 --- a/src/sys/classes/viewer/impls/ascii/vcreatea.c +++ b/src/sys/classes/viewer/impls/ascii/vcreatea.c @@ -6,50 +6,6 @@ */ PetscMPIInt Petsc_Viewer_Stdout_keyval = MPI_KEYVAL_INVALID; -/*@ - PetscViewerASCIIGetStdout - Creates a `PETSCVIEWERASCII` `PetscViewer` shared by all processors - in a communicator. Error returning version of `PETSC_VIEWER_STDOUT_()` - - Collective - - Input Parameter: -. comm - the MPI communicator to share the `PetscViewer` - - Output Parameter: -. viewer - the viewer - - Level: beginner - - Note: - This object is destroyed in `PetscFinalize()`, `PetscViewerDestroy()` should never be called on it - - Developer Note: - This should be used in all PETSc source code instead of `PETSC_VIEWER_STDOUT_()` since it allows error checking - -.seealso: [](sec_viewers), `PETSC_VIEWER_DRAW_()`, `PetscViewerASCIIOpen()`, `PETSC_VIEWER_STDERR_`, `PETSC_VIEWER_STDOUT_WORLD`, - `PETSC_VIEWER_STDOUT_SELF` -@*/ -PetscErrorCode PetscViewerASCIIGetStdout(MPI_Comm comm, PetscViewer *viewer) -{ - PetscBool flg; - MPI_Comm ncomm; - - PetscFunctionBegin; - PetscCall(PetscSpinlockLock(&PetscViewerASCIISpinLockStdout)); - PetscCall(PetscCommDuplicate(comm, &ncomm, NULL)); - if (Petsc_Viewer_Stdout_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Viewer_Stdout_keyval, NULL)); - PetscCallMPI(MPI_Comm_get_attr(ncomm, Petsc_Viewer_Stdout_keyval, (void **)viewer, (PetscMPIInt *)&flg)); - if (!flg) { /* PetscViewer not yet created */ - PetscCall(PetscViewerASCIIOpen(ncomm, "stdout", viewer)); - ((PetscObject)*viewer)->persistent = PETSC_TRUE; - PetscCall(PetscObjectRegisterDestroy((PetscObject)*viewer)); - PetscCallMPI(MPI_Comm_set_attr(ncomm, Petsc_Viewer_Stdout_keyval, (void *)*viewer)); - } - PetscCall(PetscCommDestroy(&ncomm)); - PetscCall(PetscSpinlockUnlock(&PetscViewerASCIISpinLockStdout)); - PetscFunctionReturn(PETSC_SUCCESS); -} - /*@C PETSC_VIEWER_STDOUT_ - Creates a `PETSCVIEWERASCII` `PetscViewer` shared by all MPI processes in a communicator. @@ -190,7 +146,7 @@ PetscMPIInt MPIAPI Petsc_DelViewer(MPI_Comm comm, PetscMPIInt keyval, void *attr PetscFunctionReturn(MPI_SUCCESS); } -/*@C +/*@ PetscViewerASCIIOpen - Opens an ASCII file for writing as a `PETSCVIEWERASCII` `PetscViewer`. Collective @@ -200,17 +156,17 @@ PetscMPIInt MPIAPI Petsc_DelViewer(MPI_Comm comm, PetscMPIInt keyval, void *attr - name - the file name Output Parameter: -. lab - the `PetscViewer` to use with the specified file +. viewer - the `PetscViewer` to use with the specified file Level: beginner Notes: To open a ASCII file as a viewer for reading one must use the sequence .vb - PetscViewerCreate(comm,&lab); - PetscViewerSetType(lab,PETSCVIEWERASCII); - PetscViewerFileSetMode(lab,FILE_MODE_READ); - PetscViewerFileSetName(lab,name); + PetscViewerCreate(comm,&viewer); + PetscViewerSetType(viewer,PETSCVIEWERASCII); + PetscViewerFileSetMode(viewer,FILE_MODE_READ); + PetscViewerFileSetName(viewer,name); .ve This `PetscViewer` can be destroyed with `PetscViewerDestroy()`. @@ -229,17 +185,18 @@ PetscMPIInt MPIAPI Petsc_DelViewer(MPI_Comm comm, PetscMPIInt keyval, void *attr `PetscViewerASCIIGetPointer()`, `PetscViewerPushFormat()`, `PETSC_VIEWER_STDOUT_`, `PETSC_VIEWER_STDERR_`, `PETSC_VIEWER_STDOUT_WORLD`, `PETSC_VIEWER_STDOUT_SELF`, @*/ -PetscErrorCode PetscViewerASCIIOpen(MPI_Comm comm, const char name[], PetscViewer *lab) +PetscErrorCode PetscViewerASCIIOpen(MPI_Comm comm, const char name[], PetscViewer *viewer) { PetscViewerLink *vlink, *nv; PetscBool flg, eq; size_t len; PetscFunctionBegin; + PetscAssertPointer(viewer, 3); PetscCall(PetscStrlen(name, &len)); if (!len) { - PetscCall(PetscViewerASCIIGetStdout(comm, lab)); - PetscCall(PetscObjectReference((PetscObject)*lab)); + PetscCall(PetscViewerASCIIGetStdout(comm, viewer)); + PetscCall(PetscObjectReference((PetscObject)*viewer)); PetscFunctionReturn(PETSC_SUCCESS); } PetscCall(PetscSpinlockLock(&PetscViewerASCIISpinLockOpen)); @@ -260,7 +217,7 @@ PetscErrorCode PetscViewerASCIIOpen(MPI_Comm comm, const char name[], PetscViewe PetscCall(PetscStrcmp(name, ((PetscViewer_ASCII *)vlink->viewer->data)->filename, &eq)); if (eq) { PetscCall(PetscObjectReference((PetscObject)vlink->viewer)); - *lab = vlink->viewer; + *viewer = vlink->viewer; PetscCall(PetscCommDestroy(&comm)); PetscCall(PetscSpinlockUnlock(&PetscViewerASCIISpinLockOpen)); PetscFunctionReturn(PETSC_SUCCESS); @@ -268,12 +225,12 @@ PetscErrorCode PetscViewerASCIIOpen(MPI_Comm comm, const char name[], PetscViewe vlink = vlink->next; } } - PetscCall(PetscViewerCreate(comm, lab)); - PetscCall(PetscViewerSetType(*lab, PETSCVIEWERASCII)); - if (name) PetscCall(PetscViewerFileSetName(*lab, name)); + PetscCall(PetscViewerCreate(comm, viewer)); + PetscCall(PetscViewerSetType(*viewer, PETSCVIEWERASCII)); + if (name) PetscCall(PetscViewerFileSetName(*viewer, name)); /* save viewer into communicator if needed later */ PetscCall(PetscNew(&nv)); - nv->viewer = *lab; + nv->viewer = *viewer; if (!flg) { PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Viewer_keyval, nv)); } else { @@ -300,7 +257,7 @@ PetscErrorCode PetscViewerASCIIOpen(MPI_Comm comm, const char name[], PetscViewe - fd - the `FILE` pointer Output Parameter: -. lab - the `PetscViewer` to use with the specified file +. viewer - the `PetscViewer` to use with the specified file Level: beginner @@ -318,12 +275,12 @@ PetscErrorCode PetscViewerASCIIOpen(MPI_Comm comm, const char name[], PetscViewe `PetscViewerASCIIGetPointer()`, `PetscViewerPushFormat()`, `PETSC_VIEWER_STDOUT_`, `PETSC_VIEWER_STDERR_`, `PETSC_VIEWER_STDOUT_WORLD`, `PETSC_VIEWER_STDOUT_SELF`, `PetscViewerASCIIOpen()`, `PetscViewerASCIISetFILE()`, `PETSCVIEWERASCII` @*/ -PetscErrorCode PetscViewerASCIIOpenWithFILE(MPI_Comm comm, FILE *fd, PetscViewer *lab) +PetscErrorCode PetscViewerASCIIOpenWithFILE(MPI_Comm comm, FILE *fd, PetscViewer *viewer) { PetscFunctionBegin; - PetscCall(PetscViewerCreate(comm, lab)); - PetscCall(PetscViewerSetType(*lab, PETSCVIEWERASCII)); - PetscCall(PetscViewerASCIISetFILE(*lab, fd)); + PetscCall(PetscViewerCreate(comm, viewer)); + PetscCall(PetscViewerSetType(*viewer, PETSCVIEWERASCII)); + PetscCall(PetscViewerASCIISetFILE(*viewer, fd)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/sys/classes/viewer/impls/binary/binv.c b/src/sys/classes/viewer/impls/binary/binv.c index 4d975b4931f..e519738c1b1 100644 --- a/src/sys/classes/viewer/impls/binary/binv.c +++ b/src/sys/classes/viewer/impls/binary/binv.c @@ -831,7 +831,7 @@ static PetscErrorCode PetscViewerDestroy_Binary(PetscViewer v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerBinaryOpen - Opens a file for binary input/output. Collective @@ -1162,7 +1162,7 @@ PetscErrorCode PetscViewerBinaryWriteAll(PetscViewer viewer, const void *data, P `VecView()`, `MatView()`, `VecLoad()`, `MatLoad()`, `PetscViewerBinaryGetDescriptor()`, `PetscViewerBinaryGetInfoPointer()`, `PetscFileMode`, `PetscViewer`, `PetscViewerBinaryRead()` @*/ -PetscErrorCode PetscViewerBinaryWriteStringArray(PetscViewer viewer, const char *const *data) +PetscErrorCode PetscViewerBinaryWriteStringArray(PetscViewer viewer, const char *const data[]) { PetscInt i, n = 0, *sizes; size_t len; @@ -1224,7 +1224,7 @@ PetscErrorCode PetscViewerBinaryReadStringArray(PetscViewer viewer, char ***data PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerFileSetMode - Sets the open mode of file Logically Collective @@ -1263,7 +1263,7 @@ static PetscErrorCode PetscViewerFileSetMode_Binary(PetscViewer viewer, PetscFil PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerFileGetMode - Gets the open mode of a file associated with a `PetscViewer` Not Collective diff --git a/src/sys/classes/viewer/impls/binary/ftn-custom/zbinvf.c b/src/sys/classes/viewer/impls/binary/ftn-custom/zbinvf.c index 10109efada9..c0cf593e8e5 100644 --- a/src/sys/classes/viewer/impls/binary/ftn-custom/zbinvf.c +++ b/src/sys/classes/viewer/impls/binary/ftn-custom/zbinvf.c @@ -2,31 +2,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscviewerfilesetmode_ PETSCVIEWERFILESETMODE - #define petscviewerbinaryopen_ PETSCVIEWERBINARYOPEN #define petscviewerbinarygetdescriptor_ PETSCVIEWERBINARYGETDESCRIPTOR #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscviewerfilesetmode_ petscviewerfilesetmode - #define petscviewerbinaryopen_ petscviewerbinaryopen #define petscviewerbinarygetdescriptor_ petscviewerbinarygetdescriptor #endif -PETSC_EXTERN void petscviewerfilesetmode_(PetscViewer *viewer, PetscFileMode *type, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscViewerFileSetMode(v, *type); -} - -PETSC_EXTERN void petscviewerbinaryopen_(MPI_Comm *comm, char *name, PetscFileMode *type, PetscViewer *binv, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - FIXCHAR(name, len, c1); - *ierr = PetscViewerBinaryOpen(MPI_Comm_f2c(*(MPI_Fint *)&*comm), c1, *type, binv); - if (*ierr) return; - FREECHAR(name, c1); -} - PETSC_EXTERN void petscviewerbinarygetdescriptor_(PetscViewer *viewer, int *fd, PetscErrorCode *ierr) { PetscViewer v; diff --git a/src/sys/classes/viewer/impls/draw/drawv.c b/src/sys/classes/viewer/impls/draw/drawv.c index 09f7f940344..d259f5f9eff 100644 --- a/src/sys/classes/viewer/impls/draw/drawv.c +++ b/src/sys/classes/viewer/impls/draw/drawv.c @@ -34,7 +34,7 @@ static PetscErrorCode PetscViewerFlush_Draw(PetscViewer v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerDrawGetDraw - Returns `PetscDraw` object from `PETSCVIEWERDRAW` `PetscViewer` object. This `PetscDraw` object may then be used to perform graphics using `PetscDraw` commands. @@ -99,7 +99,7 @@ PetscErrorCode PetscViewerDrawGetDraw(PetscViewer viewer, PetscInt windownumber, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerDrawBaseAdd - add to the base integer that is added to the `windownumber` passed to `PetscViewerDrawGetDraw()` Logically Collective @@ -132,7 +132,7 @@ PetscErrorCode PetscViewerDrawBaseAdd(PetscViewer viewer, PetscInt windownumber) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerDrawBaseSet - sets the base integer that is added to the `windownumber` passed to `PetscViewerDrawGetDraw()` Logically Collective @@ -165,7 +165,7 @@ PetscErrorCode PetscViewerDrawBaseSet(PetscViewer viewer, PetscInt windownumber) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerDrawGetDrawLG - Returns a `PetscDrawLG` object from `PetscViewer` object of type `PETSCVIEWERDRAW`. This `PetscDrawLG` object may then be used to perform graphics using `PetscDrawLG` commands. @@ -208,7 +208,7 @@ PetscErrorCode PetscViewerDrawGetDrawLG(PetscViewer viewer, PetscInt windownumbe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerDrawGetDrawAxis - Returns a `PetscDrawAxis` object from a `PetscViewer` object of type `PETSCVIEWERDRAW`. This `PetscDrawAxis` object may then be used to perform graphics using `PetscDrawAxis` commands. @@ -344,7 +344,7 @@ PetscErrorCode PetscViewerDrawGetTitle(PetscViewer v, const char *title[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerDrawOpen - Opens a `PetscDraw` window for use as a `PetscViewer` with type `PETSCVIEWERDRAW`. @@ -850,7 +850,7 @@ PetscErrorCode PetscViewerDrawSetBounds(PetscViewer viewer, PetscInt nbounds, co .seealso: [](sec_viewers), `PETSCVIEWERDRAW`, `PetscViewerDrawGetLG()`, `PetscViewerDrawGetAxis()`, `PetscViewerDrawOpen()`, `PetscViewerDrawSetBounds()` @*/ -PetscErrorCode PetscViewerDrawGetBounds(PetscViewer viewer, PetscInt *nbounds, const PetscReal **bounds) +PetscErrorCode PetscViewerDrawGetBounds(PetscViewer viewer, PetscInt *nbounds, const PetscReal *bounds[]) { PetscViewer_Draw *vdraw; PetscBool isdraw; diff --git a/src/sys/classes/viewer/impls/draw/ftn-custom/zdrawvf.c b/src/sys/classes/viewer/impls/draw/ftn-custom/zdrawvf.c index e423277fa79..e4c14767ced 100644 --- a/src/sys/classes/viewer/impls/draw/ftn-custom/zdrawvf.c +++ b/src/sys/classes/viewer/impls/draw/ftn-custom/zdrawvf.c @@ -3,15 +3,9 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petsc_viewer_draw__ PETSC_VIEWER_DRAW_BROKEN - #define petscviewerdrawgetdraw_ PETSCVIEWERDRAWGETDRAW - #define petscviewerdrawgetdrawlg_ PETSCVIEWERDRAWGETDRAWLG - #define petscviewerdrawopen_ PETSCVIEWERDRAWOPEN + #define petsc_viewer_draw__ PETSC_VIEWER_DRAW_BROKEN #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petsc_viewer_draw__ petsc_viewer_draw_ - #define petscviewerdrawgetdraw_ petscviewerdrawgetdraw - #define petscviewerdrawgetdrawlg_ petscviewerdrawgetdrawlg - #define petscviewerdrawopen_ petscviewerdrawopen + #define petsc_viewer_draw__ petsc_viewer_draw_ #endif #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE) @@ -22,29 +16,3 @@ PETSC_EXTERN PetscViewer petsc_viewer_draw__(MPI_Comm *comm) { return PETSC_VIEWER_DRAW_(MPI_Comm_f2c(*(MPI_Fint *)&*comm)); } - -PETSC_EXTERN void petscviewerdrawgetdraw_(PetscViewer *vin, int *win, PetscDraw *draw, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscViewerDrawGetDraw(v, *win, draw); -} - -PETSC_EXTERN void petscviewerdrawgetdrawlg_(PetscViewer *vin, int *win, PetscDrawLG *drawlg, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscViewerDrawGetDrawLG(v, *win, drawlg); -} - -PETSC_EXTERN void petscviewerdrawopen_(MPI_Comm *comm, char *display, char *title, int *x, int *y, int *w, int *h, PetscViewer *v, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1, PETSC_FORTRAN_CHARLEN_T len2) -{ - char *c1, *c2; - - FIXCHAR(display, len1, c1); - FIXCHAR(title, len2, c2); - *ierr = PetscViewerDrawOpen(MPI_Comm_f2c(*(MPI_Fint *)&*comm), c1, c2, *x, *y, *w, *h, v); - if (*ierr) return; - FREECHAR(display, c1); - FREECHAR(title, c2); -} diff --git a/src/sys/classes/viewer/impls/glvis/glvis.c b/src/sys/classes/viewer/impls/glvis/glvis.c index 5ed128c10b8..d06c839c763 100644 --- a/src/sys/classes/viewer/impls/glvis/glvis.c +++ b/src/sys/classes/viewer/impls/glvis/glvis.c @@ -585,7 +585,7 @@ static PetscErrorCode PetscViewerFileSetName_GLVis(PetscViewer viewer, const cha PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerGLVisOpen - Opens a `PETSCVIEWERGLVIS` `PetscViewer` Collective; No Fortran Support @@ -603,7 +603,7 @@ static PetscErrorCode PetscViewerFileSetName_GLVis(PetscViewer viewer, const cha + -glvis_precision - Sets number of digits for floating point values . -glvis_size - Sets the window size (in pixels) . -glvis_pause - Sets time (in seconds) that the program pauses after each visualization - (0 is default, -1 implies every visualization) + (0 is default, -1 implies every visualization) . -glvis_keys - Additional keys to configure visualization - -glvis_exec - Additional commands to configure visualization diff --git a/src/sys/classes/viewer/impls/mathematica/mathematica.c b/src/sys/classes/viewer/impls/mathematica/mathematica.c index 91fb5ad81c8..bf6bf4af7f0 100644 --- a/src/sys/classes/viewer/impls/mathematica/mathematica.c +++ b/src/sys/classes/viewer/impls/mathematica/mathematica.c @@ -299,7 +299,7 @@ PetscErrorCode PetscViewerMathematicaSetLinkMode(PetscViewer v, LinkMode mode) } /*----------------------------------------- Public Functions --------------------------------------------------------*/ -/*@C +/*@ PetscViewerMathematicaOpen - Communicates with Mathemtica using MathLink. Collective diff --git a/src/sys/classes/viewer/impls/matlab/ftn-custom/makefile b/src/sys/classes/viewer/impls/matlab/ftn-custom/makefile deleted file mode 100644 index 08508c660fe..00000000000 --- a/src/sys/classes/viewer/impls/matlab/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/sys/classes/viewer/impls/matlab/ftn-custom/zvmatlabf.c b/src/sys/classes/viewer/impls/matlab/ftn-custom/zvmatlabf.c deleted file mode 100644 index b6c17625160..00000000000 --- a/src/sys/classes/viewer/impls/matlab/ftn-custom/zvmatlabf.c +++ /dev/null @@ -1,19 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscviewermatlabopen_ PETSCVIEWERMATLABOPEN -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscviewermatlabopen_ petscviewermatlabopen -#endif - -#if defined(PETSC_HAVE_MATLAB) -PETSC_EXTERN void petscviewermatlabopen_(MPI_Comm *comm, char *name, PetscFileMode *type, PetscViewer *binv, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - FIXCHAR(name, len, c1); - *ierr = PetscViewerMatlabOpen(MPI_Comm_f2c(*(MPI_Fint *)&*comm), c1, *type, binv); - if (*ierr) return; - FREECHAR(name, c1); -} -#endif diff --git a/src/sys/classes/viewer/impls/matlab/vmatlab.c b/src/sys/classes/viewer/impls/matlab/vmatlab.c index 87916dce89b..846d0182880 100644 --- a/src/sys/classes/viewer/impls/matlab/vmatlab.c +++ b/src/sys/classes/viewer/impls/matlab/vmatlab.c @@ -1,5 +1,5 @@ -#include -#include +#include /*I "petscviewer.h" I*/ +#include /*I "petscmat.h" I*/ typedef struct { MATFile *ep; @@ -7,7 +7,7 @@ typedef struct { PetscFileMode btype; } PetscViewer_Matlab; -/*@C +/*@ PetscViewerMatlabPutArray - Puts an array into the `PETSCVIEWERMATLAB` viewer. Not Collective, only processor zero saves `array` @@ -58,7 +58,7 @@ PetscErrorCode PetscViewerMatlabPutVariable(PetscViewer viewer, const char *name PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerMatlabGetArray - Gets a variable from a `PETSCVIEWERMATLAB` viewer into an array Not Collective; only processor zero reads in the array @@ -67,7 +67,7 @@ PetscErrorCode PetscViewerMatlabPutVariable(PetscViewer viewer, const char *name + mfile - the MATLAB file viewer . m - the first dimensions of `array` . n - the second dimensions of `array` -. array - the array (represented in one dimension) +. array - the array (represented in one dimension), must of be length `m` * `n` - name - the MATLAB name of `array` Level: advanced @@ -77,7 +77,7 @@ PetscErrorCode PetscViewerMatlabPutVariable(PetscViewer viewer, const char *name .seealso: `PETSCVIEWERMATLAB`, `PetscViewerMatlabPutArray()` @*/ -PetscErrorCode PetscViewerMatlabGetArray(PetscViewer mfile, int m, int n, PetscScalar *array, const char *name) +PetscErrorCode PetscViewerMatlabGetArray(PetscViewer mfile, int m, int n, PetscScalar array[], const char *name) { PetscViewer_Matlab *ml; mxArray *mat; @@ -195,7 +195,7 @@ PETSC_EXTERN PetscErrorCode PetscViewerCreate_Matlab(PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerMatlabOpen - Opens a MATLAB .mat file for output Collective @@ -241,30 +241,29 @@ PetscErrorCode PetscViewerMatlabOpen(MPI_Comm comm, const char name[], PetscFile static PetscMPIInt Petsc_Viewer_Matlab_keyval = MPI_KEYVAL_INVALID; /*@C - PETSC_VIEWER_MATLAB_ - Creates a `PETSCVIEWERMATLAB` `PetscViewer` shared by all processors - in a communicator. + PETSC_VIEWER_MATLAB_ - Creates a `PETSCVIEWERMATLAB` `PetscViewer` shared by all processors + in a communicator. - Collective + Collective - Input Parameter: -. comm - the MPI communicator to share the MATLAB `PetscViewer` + Input Parameter: +. comm - the MPI communicator to share the MATLAB `PetscViewer` Options Database Key: . -viewer_matlab_filename - name of the MATLAB file Environmental variable: -. `PETSC_VIEWER_MATLAB_FILENAME` - name of the MATLAB file +. `PETSC_VIEWER_MATLAB_FILENAME` - name of the MATLAB file - Level: intermediate + Level: intermediate - Notes: - This object is destroyed in `PetscFinalize()`, `PetscViewerDestroy()` should never be called on it + Notes: + This object is destroyed in `PetscFinalize()`, `PetscViewerDestroy()` should never be called on it - Unlike almost all other PETSc routines, `PETSC_VIEWER_MATLAB_()` does not return - an error code. The MATLAB `PetscViewer` is usually used in the form -$ XXXView(XXX object, PETSC_VIEWER_MATLAB_(comm)); + Unlike almost all other PETSc routines, `PETSC_VIEWER_MATLAB_()` does not return + an error code. The MATLAB `PetscViewer` is usually used in the form `XXXView(XXX object, PETSC_VIEWER_MATLAB_(comm))` - Use `PETSC_VIEWER_SOCKET_()` or `PetscViewerSocketOpen()` to communicator with an interactive MATLAB session. + Use `PETSC_VIEWER_SOCKET_()` or `PetscViewerSocketOpen()` to communicator with an interactive MATLAB session. .seealso: `PETSC_VIEWER_MATLAB_WORLD`, `PETSC_VIEWER_MATLAB_SELF`, `PetscViewerMatlabOpen()`, `PetscViewerCreate()`, `PetscViewerDestroy()` diff --git a/src/sys/classes/viewer/impls/socket/ftn-custom/makefile b/src/sys/classes/viewer/impls/socket/ftn-custom/makefile deleted file mode 100644 index 08508c660fe..00000000000 --- a/src/sys/classes/viewer/impls/socket/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/sys/classes/viewer/impls/socket/ftn-custom/zsendf.c b/src/sys/classes/viewer/impls/socket/ftn-custom/zsendf.c deleted file mode 100644 index cd649cab19a..00000000000 --- a/src/sys/classes/viewer/impls/socket/ftn-custom/zsendf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscviewersocketopen_ PETSCVIEWERSOCKETOPEN -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscviewersocketopen_ petscviewersocketopen -#endif - -PETSC_EXTERN void petscviewersocketopen_(MPI_Comm *comm, char *name, int *port, PetscViewer *lab, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - FIXCHAR(name, len, c1); - *ierr = PetscViewerSocketOpen(MPI_Comm_f2c(*(MPI_Fint *)&*comm), c1, *port, lab); - if (*ierr) return; - FREECHAR(name, c1); -} diff --git a/src/sys/classes/viewer/impls/socket/send.c b/src/sys/classes/viewer/impls/socket/send.c index e70f8a41f31..2692529a879 100644 --- a/src/sys/classes/viewer/impls/socket/send.c +++ b/src/sys/classes/viewer/impls/socket/send.c @@ -1,4 +1,4 @@ -#include +#include /*I "petscviewer.h" I*/ #if defined(PETSC_NEEDS_UTYPE_TYPEDEFS) /* Some systems have inconsistent include files that use but do not @@ -77,7 +77,7 @@ static PetscErrorCode PetscViewerDestroy_Socket(PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOpenSocket - handles connected to an open port where someone is waiting. Input Parameters: @@ -255,7 +255,7 @@ static PetscErrorCode PetscSocketListen(int listenport, int *t) // "Unknown section 'Environmental Variables'" // PetscClangLinter pragma disable: -fdoc-section-header-unknown -/*@C +/*@ PetscViewerSocketOpen - Opens a connection to a MATLAB or other socket based server. Collective @@ -397,7 +397,7 @@ PETSC_EXTERN PetscErrorCode PetscViewerCreate_Socket(PetscViewer v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerSocketSetConnection - Sets the machine and port that a PETSc socket viewer is to use @@ -466,40 +466,39 @@ PetscErrorCode PetscViewerSocketSetConnection(PetscViewer v, const char machine[ PetscMPIInt Petsc_Viewer_Socket_keyval = MPI_KEYVAL_INVALID; /*@C - PETSC_VIEWER_SOCKET_ - Creates a socket viewer shared by all processors in a communicator. + PETSC_VIEWER_SOCKET_ - Creates a socket viewer shared by all processors in a communicator. - Collective + Collective - Input Parameter: -. comm - the MPI communicator to share the `PETSCVIEWERSOCKET` `PetscViewer` + Input Parameter: +. comm - the MPI communicator to share the `PETSCVIEWERSOCKET` `PetscViewer` - Level: intermediate + Level: intermediate Options Database Keys: For use with the default `PETSC_VIEWER_SOCKET_WORLD` or if - `NULL` is passed for machine or `PETSC_DEFAULT` is passed for port -+ -viewer_socket_machine - machine to connect to -- -viewer_socket_port - port to connect to + `NULL` is passed for machine or `PETSC_DEFAULT` is passed for port ++ -viewer_socket_machine - machine to connect to +- -viewer_socket_port - port to connect to Environmental variables: -+ `PETSC_VIEWER_SOCKET_PORT` - portnumber -- `PETSC_VIEWER_SOCKET_MACHINE` - machine name ++ `PETSC_VIEWER_SOCKET_PORT` - portnumber +- `PETSC_VIEWER_SOCKET_MACHINE` - machine name - Notes: - This object is destroyed in `PetscFinalize()`, `PetscViewerDestroy()` should never be called on it + Notes: + This object is destroyed in `PetscFinalize()`, `PetscViewerDestroy()` should never be called on it - Unlike almost all other PETSc routines, `PETSC_VIEWER_SOCKET_()` does not return - an error code, it returns NULL if it fails. The `PETSCVIEWERSOCKET` `PetscViewer` is usually used in the form -$ XXXView(XXX object, PETSC_VIEWER_SOCKET_(comm)); + Unlike almost all other PETSc routines, `PETSC_VIEWER_SOCKET_()` does not return + an error code, it returns `NULL` if it fails. The `PETSCVIEWERSOCKET` `PetscViewer` is usually used in the form `XXXView(XXX object, PETSC_VIEWER_SOCKET_(comm))` - Currently the only socket client available is MATLAB. See - src/dm/tests/ex12.c and ex12.m for an example of usage. + Currently the only socket client available is MATLAB. See + src/dm/tests/ex12.c and ex12.m for an example of usage. - Connects to a waiting socket and stays connected until `PetscViewerDestroy()` is called. + Connects to a waiting socket and stays connected until `PetscViewerDestroy()` is called. - Use this for communicating with an interactive MATLAB session, see `PETSC_VIEWER_MATLAB_()` for writing output to a - .mat file. Use `PetscMatlabEngineCreate()` or `PETSC_MATLAB_ENGINE_()`, `PETSC_MATLAB_ENGINE_SELF`, or `PETSC_MATLAB_ENGINE_WORLD` - for communicating with a MATLAB Engine + Use this for communicating with an interactive MATLAB session, see `PETSC_VIEWER_MATLAB_()` for writing output to a + .mat file. Use `PetscMatlabEngineCreate()` or `PETSC_MATLAB_ENGINE_()`, `PETSC_MATLAB_ENGINE_SELF`, or `PETSC_MATLAB_ENGINE_WORLD` + for communicating with a MATLAB Engine .seealso: [](sec_viewers), `PETSCVIEWERMATLAB`, `PETSCVIEWERSOCKET`, `PETSC_VIEWER_SOCKET_WORLD`, `PETSC_VIEWER_SOCKET_SELF`, `PetscViewerSocketOpen()`, `PetscViewerCreate()`, `PetscViewerSocketSetConnection()`, `PetscViewerDestroy()`, `PETSC_VIEWER_SOCKET_()`, `PetscViewerBinaryWrite()`, `PetscViewerBinaryRead()`, diff --git a/src/sys/classes/viewer/impls/string/stringv.c b/src/sys/classes/viewer/impls/string/stringv.c index 11ec31d8cca..f5d0d74a333 100644 --- a/src/sys/classes/viewer/impls/string/stringv.c +++ b/src/sys/classes/viewer/impls/string/stringv.c @@ -139,7 +139,6 @@ PETSC_EXTERN PetscErrorCode PetscViewerCreate_String(PetscViewer v) } /*@C - PetscViewerStringGetStringRead - Returns the string that a `PETSCVIEWERSTRING` uses Logically Collective @@ -174,7 +173,6 @@ PetscErrorCode PetscViewerStringGetStringRead(PetscViewer viewer, const char *st } /*@C - PetscViewerStringSetString - sets the string that a string viewer will print to Logically Collective @@ -215,8 +213,7 @@ PetscErrorCode PetscViewerStringSetString(PetscViewer viewer, char string[], siz PetscFunctionReturn(PETSC_SUCCESS); } -/*@C - +/*@ PetscViewerStringSetOwnString - tells the viewer that it now owns the string and is responsible for freeing it Logically Collective diff --git a/src/sys/classes/viewer/impls/vtk/ftn-custom/makefile b/src/sys/classes/viewer/impls/vtk/ftn-custom/makefile deleted file mode 100644 index 08508c660fe..00000000000 --- a/src/sys/classes/viewer/impls/vtk/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/sys/classes/viewer/impls/vtk/ftn-custom/zvtkvf.c b/src/sys/classes/viewer/impls/vtk/ftn-custom/zvtkvf.c deleted file mode 100644 index 9d53b3b9f6d..00000000000 --- a/src/sys/classes/viewer/impls/vtk/ftn-custom/zvtkvf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscviewervtkopen_ PETSCVIEWERVTKOPEN -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscviewervtkopen_ petscviewervtkopen -#endif - -PETSC_EXTERN void petscviewervtkopen_(MPI_Comm *comm, char *name, PetscFileMode *type, PetscViewer *binv, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - FIXCHAR(name, len, c1); - *ierr = PetscViewerVTKOpen(MPI_Comm_f2c(*(MPI_Fint *)&*comm), c1, *type, binv); - if (*ierr) return; - FREECHAR(name, c1); -} diff --git a/src/sys/classes/viewer/impls/vtk/vtkv.c b/src/sys/classes/viewer/impls/vtk/vtkv.c index b7a85c18dc2..b3ec6f32aba 100644 --- a/src/sys/classes/viewer/impls/vtk/vtkv.c +++ b/src/sys/classes/viewer/impls/vtk/vtkv.c @@ -47,7 +47,7 @@ PetscErrorCode PetscViewerVTKAddField(PetscViewer viewer, PetscObject dm, PetscE PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerVTKGetDM - get the `DM` associated with the `PETSCVIEWERVTK` viewer Collective @@ -233,7 +233,7 @@ PETSC_EXTERN PetscErrorCode PetscViewerCreate_VTK(PetscViewer v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerVTKOpen - Opens a `PETSCVIEWERVTK` viewer file. Collective @@ -243,9 +243,9 @@ PETSC_EXTERN PetscErrorCode PetscViewerCreate_VTK(PetscViewer v) . name - name of file - type - type of file .vb - FILE_MODE_WRITE - create new file for binary output - FILE_MODE_READ - open existing file for binary input (not currently supported) - FILE_MODE_APPEND - open existing file for binary output (not currently supported) + FILE_MODE_WRITE - create new file for binary output + FILE_MODE_READ - open existing file for binary input (not currently supported) + FILE_MODE_APPEND - open existing file for binary output (not currently supported) .ve Output Parameter: diff --git a/src/sys/classes/viewer/interface/dupl.c b/src/sys/classes/viewer/interface/dupl.c index a7aa0819bce..487fa8f0fd2 100644 --- a/src/sys/classes/viewer/interface/dupl.c +++ b/src/sys/classes/viewer/interface/dupl.c @@ -1,6 +1,6 @@ #include /*I "petscviewer.h" I*/ -/*@C +/*@ PetscViewerGetSubViewer - Creates a new `PetscViewer` (same type as the old) that lives on a subcommunicator of the original viewer's communicator @@ -26,7 +26,7 @@ parallel object. For example `PCView()` on a `PCBJACOBI` could use this to obtain a `PetscViewer` that is used with the sequential `KSP` on one block of the preconditioner. - `PetscViewerFlush()` is run automatically at the begining of `PetscViewerGetSubViewer()` and with `PetscViewerRestoreSubViewer()` + `PetscViewerFlush()` is run automatically at the beginning of `PetscViewerGetSubViewer()` and with `PetscViewerRestoreSubViewer()` for `PETSCVIEWERASCII` `PETSCVIEWERDRAW` and `PETSCVIEWERBINARY` only support returning a singleton viewer on MPI rank 0, @@ -64,7 +64,7 @@ PetscErrorCode PetscViewerGetSubViewer(PetscViewer viewer, MPI_Comm comm, PetscV PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerRestoreSubViewer - Restores a `PetscViewer` obtained with `PetscViewerGetSubViewer()`. Collective diff --git a/src/sys/classes/viewer/interface/flush.c b/src/sys/classes/viewer/interface/flush.c index d5c3b3891c2..b54fd4c1ad2 100644 --- a/src/sys/classes/viewer/interface/flush.c +++ b/src/sys/classes/viewer/interface/flush.c @@ -1,6 +1,6 @@ #include /*I "petscviewer.h" I*/ -/*@C +/*@ PetscViewerFlush - Flushes a `PetscViewer` (i.e. tries to dump all the data that has been printed through a `PetscViewer`). diff --git a/src/sys/classes/viewer/interface/ftn-custom/zviewaf.c b/src/sys/classes/viewer/interface/ftn-custom/zviewaf.c index adf413971fc..0779ae6a4a7 100644 --- a/src/sys/classes/viewer/interface/ftn-custom/zviewaf.c +++ b/src/sys/classes/viewer/interface/ftn-custom/zviewaf.c @@ -2,54 +2,13 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscviewersetformat_ PETSCVIEWERSETFORMAT - #define petscviewersettype_ PETSCVIEWERSETTYPE - #define petscviewergettype_ PETSCVIEWERGETTYPE - #define petscviewerpushformat_ PETSCVIEWERPUSHFORMAT - #define petscviewerpopformat_ PETSCVIEWERPOPFORMAT #define petscviewerandformatcreate_ PETSCVIEWERANDFORMATCREATE #define petscviewerandformatdestroy_ PETSCVIEWERANDFORMATDESTROY - #define petscviewergetsubviewer_ PETSCVIEWERGETSUBVIEWER - #define petscviewerrestoresubviewer_ PETSCVIEWERRESTORESUBVIEWER - #define petscviewierview_ PETSCVIEWERVIEW - #define petscviewerflush_ PETSCVIEWERFLUSH #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscviewersetformat_ petscviewersetformat - #define petscviewersettype_ petscviewersettype - #define petscviewergettype_ petscviewergettype - #define petscviewerpushformat_ petscviewerpushformat - #define petscviewerpopformat_ petscviewerpopformat #define petscviewerandformatcreate_ petscviewerandformatcreate #define petscviewerandformatdestroy_ petscviewerandformatdestroy - #define petscviewergetsubviewer_ petscviewergetsubviewer - #define petscviewerrestoresubviewer_ petscviewerrestoresubviewer - #define petscviewierview_ petscviewerview - #define petscviewerflush_ petscviewerflush #endif -PETSC_EXTERN void petscviewerflush_(PetscViewer *viewer, int *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscViewerFlush(v); -} - -PETSC_EXTERN void petscviewergetsubviewer_(PetscViewer *vin, MPI_Fint *comm, PetscViewer *outviewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscViewerGetSubViewer(v, MPI_Comm_f2c(*(comm)), outviewer); -} - -PETSC_EXTERN void petscviewerrestoresubviewer_(PetscViewer *vin, MPI_Fint *comm, PetscViewer *outviewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscViewerRestoreSubViewer(v, MPI_Comm_f2c(*(comm)), outviewer); -} - -PETSC_EXTERN PetscErrorCode PetscViewerSetFormatDeprecated(PetscViewer, PetscViewerFormat); - PETSC_EXTERN void petscviewerandformatcreate_(PetscViewer *vin, PetscViewerFormat *format, PetscViewerAndFormat **vf, PetscErrorCode *ierr) { PetscViewer v; @@ -61,51 +20,3 @@ PETSC_EXTERN void petscviewerandformatdestroy_(PetscViewerAndFormat **vf, PetscE { *ierr = PetscViewerAndFormatDestroy(vf); } - -PETSC_EXTERN void petscviewersetformat_(PetscViewer *vin, PetscViewerFormat *format, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscViewerSetFormatDeprecated(v, *format); -} - -PETSC_EXTERN void petscviewersettype_(PetscViewer *x, char *type_name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type_name, len, t); - *ierr = PetscViewerSetType(*x, t); - if (*ierr) return; - FREECHAR(type_name, t); -} - -PETSC_EXTERN void petscviewergettype_(PetscViewer *viewer, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *c1; - - *ierr = PetscViewerGetType(*viewer, &c1); - *ierr = PetscStrncpy(type, c1, len); - FIXRETURNCHAR(PETSC_TRUE, type, len); -} - -PETSC_EXTERN void petscviewerpushformat_(PetscViewer *vin, PetscViewerFormat *format, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscViewerPushFormat(v, *format); -} - -PETSC_EXTERN void petscviewerpopformat_(PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscViewerPopFormat(v); -} - -PETSC_EXTERN void petscviewerview_(PetscViewer *vin, PetscViewer *viewerin, PetscErrorCode *ierr) -{ - PetscViewer v, viewer; - PetscPatchDefaultViewers_Fortran(vin, v); - PetscPatchDefaultViewers_Fortran(viewerin, viewer); - *ierr = PetscViewerView(v, viewer); -} diff --git a/src/sys/classes/viewer/interface/ftn-custom/zviewasetf.c b/src/sys/classes/viewer/interface/ftn-custom/zviewasetf.c deleted file mode 100644 index c4f8accdea9..00000000000 --- a/src/sys/classes/viewer/interface/ftn-custom/zviewasetf.c +++ /dev/null @@ -1,14 +0,0 @@ -#include - -/* - We need this stub function in a separate file that does not include petscviewer.h so that PETSc Fortran - builds do not print messages about deprecated functions -*/ -typedef PetscEnum PetscViewerFormat; - -PETSC_EXTERN PetscErrorCode PetscViewerSetFormat(PetscViewer, PetscViewerFormat); - -PETSC_EXTERN PetscErrorCode PetscViewerSetFormatDeprecated(PetscViewer v, PetscViewerFormat f) -{ - return PetscViewerSetFormat(v, f); -} diff --git a/src/sys/classes/viewer/interface/view.c b/src/sys/classes/viewer/interface/view.c index b9d1da0a4f4..f48444d02c3 100644 --- a/src/sys/classes/viewer/interface/view.c +++ b/src/sys/classes/viewer/interface/view.c @@ -164,7 +164,7 @@ PetscErrorCode PetscViewerAndFormatDestroy(PetscViewerAndFormat **vf) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerGetType - Returns the type of a `PetscViewer`. Not Collective @@ -191,7 +191,7 @@ PetscErrorCode PetscViewerGetType(PetscViewer viewer, PetscViewerType *type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerSetOptionsPrefix - Sets the prefix used for searching for `PetscViewer` options in the database during `PetscViewerSetFromOptions()`. @@ -217,7 +217,7 @@ PetscErrorCode PetscViewerSetOptionsPrefix(PetscViewer viewer, const char prefix PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerAppendOptionsPrefix - Appends to the prefix used for searching for `PetscViewer` options in the database during `PetscViewerSetFromOptions()`. @@ -243,7 +243,7 @@ PetscErrorCode PetscViewerAppendOptionsPrefix(PetscViewer viewer, const char pre PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerGetOptionsPrefix - Gets the prefix used for searching for `PetscViewer` options in the database during `PetscViewerSetFromOptions()`. @@ -296,7 +296,7 @@ PetscErrorCode PetscViewerSetUp(PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerViewFromOptions - View from the viewer based on options in the options database Collective @@ -321,7 +321,7 @@ PetscErrorCode PetscViewerViewFromOptions(PetscViewer A, PetscObject obj, const PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerView - Visualizes a viewer object. Collective diff --git a/src/sys/classes/viewer/interface/viewa.c b/src/sys/classes/viewer/interface/viewa.c index cdf506d2855..413660963a9 100644 --- a/src/sys/classes/viewer/interface/viewa.c +++ b/src/sys/classes/viewer/interface/viewa.c @@ -5,7 +5,7 @@ const char *const PetscViewerFormats[] = {"DEFAULT", "ASCII_MATLAB", "ASCII_MATH /*@C PetscViewerSetFormat - Sets the format for a `PetscViewer`. - Logically Collective + Logically Collective, No Fortran Support This routine is deprecated, you should use `PetscViewerPushFormat()`/`PetscViewerPopFormat()` @@ -31,7 +31,7 @@ PetscErrorCode PetscViewerSetFormat(PetscViewer viewer, PetscViewerFormat format PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerPushFormat - Sets the format for a `PetscViewer`. Logically Collective @@ -60,7 +60,7 @@ PetscErrorCode PetscViewerPushFormat(PetscViewer viewer, PetscViewerFormat forma PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerPopFormat - Resets the format for a `PetscViewer` to the value it had before the previous call to `PetscViewerPushFormat()` Logically Collective @@ -83,7 +83,7 @@ PetscErrorCode PetscViewerPopFormat(PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerGetFormat - Gets the current format for `PetscViewer`. Not Collective diff --git a/src/sys/classes/viewer/interface/viewreg.c b/src/sys/classes/viewer/interface/viewreg.c index fb926d93590..2096d51db4c 100644 --- a/src/sys/classes/viewer/interface/viewreg.c +++ b/src/sys/classes/viewer/interface/viewreg.c @@ -381,6 +381,15 @@ static PetscErrorCode PetscOptionsGetViewers_Internal(MPI_Comm comm, PetscOption Level: intermediate Notes: + The argument has the following form +.vb + type:filename:format:filemode +.ve + where all parts are optional, but you need to include the colon to access the next part. The mode argument must a valid `PetscFileMode`, i.e. read, write, append, update, or append_update. For example, to read from an HDF5 file, use +.vb + hdf5:sol.h5::read +.ve + If no value is provided ascii:stdout is used + ascii[:[filename][:[format][:append]]] - defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab, for example ascii::ascii_info prints just the information about the object not all details @@ -512,7 +521,7 @@ PetscErrorCode PetscViewerCreate(MPI_Comm comm, PetscViewer *inviewer) PetscViewer viewer; PetscFunctionBegin; - *inviewer = NULL; + PetscAssertPointer(inviewer, 2); PetscCall(PetscViewerInitializePackage()); PetscCall(PetscHeaderCreate(viewer, PETSC_VIEWER_CLASSID, "PetscViewer", "PetscViewer", "Viewer", comm, PetscViewerDestroy, PetscViewerView)); *inviewer = viewer; @@ -520,7 +529,7 @@ PetscErrorCode PetscViewerCreate(MPI_Comm comm, PetscViewer *inviewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscViewerSetType - Builds `PetscViewer` for a particular implementation. Collective @@ -568,7 +577,7 @@ PetscErrorCode PetscViewerSetType(PetscViewer viewer, PetscViewerType type) /*@C PetscViewerRegister - Adds a viewer to those available for use with `PetscViewerSetType()` - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined viewer diff --git a/src/sys/classes/viewer/tutorials/ex2f.F90 b/src/sys/classes/viewer/tutorials/ex2f.F90 new file mode 100644 index 00000000000..875af659f96 --- /dev/null +++ b/src/sys/classes/viewer/tutorials/ex2f.F90 @@ -0,0 +1,19 @@ + program ex2f + +#include + use petscsys + implicit none + + PetscErrorCode ierr + + PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER,'ex2f90 test'//c_new_line,ierr)) + PetscCallA(PetscViewerView(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_STDOUT_WORLD,ierr)); + PetscCallA(PetscFinalize(ierr)) + end + +/*TEST + + test: + args: + +TEST*/ diff --git a/src/sys/classes/viewer/tutorials/output/ex2f_1.out b/src/sys/classes/viewer/tutorials/output/ex2f_1.out new file mode 100644 index 00000000000..5aecd518756 --- /dev/null +++ b/src/sys/classes/viewer/tutorials/output/ex2f_1.out @@ -0,0 +1,3 @@ +PetscViewer Object: 1 MPI process + type: ascii + Filename: stdout diff --git a/src/sys/classes/viewer/utils/btview.c b/src/sys/classes/viewer/utils/btview.c index 3c080cd28ae..cd7aff84451 100644 --- a/src/sys/classes/viewer/utils/btview.c +++ b/src/sys/classes/viewer/utils/btview.c @@ -5,7 +5,6 @@ PetscErrorCode PetscBTView(PetscInt m, const PetscBT bt, PetscViewer viewer) { PetscFunctionBegin; - if (m < 1) PetscFunctionReturn(PETSC_SUCCESS); if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PETSC_COMM_SELF, &viewer)); PetscCall(PetscViewerASCIIPushSynchronized(viewer)); for (PetscInt i = 0; i < m; ++i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%" PetscInt_FMT " %hhu\n", i, PetscBTLookup(bt, i))); diff --git a/src/sys/dll/dl.c b/src/sys/dll/dl.c index 00dbcbff331..6b8458a159b 100644 --- a/src/sys/dll/dl.c +++ b/src/sys/dll/dl.c @@ -49,7 +49,7 @@ PetscErrorCode PetscDLLibraryPrintPath(PetscDLLibrary libs) .seealso: `PetscFileRetrieve()` @*/ -PetscErrorCode PetscDLLibraryRetrieve(MPI_Comm comm, const char libname[], char *lname, size_t llen, PetscBool *found) +PetscErrorCode PetscDLLibraryRetrieve(MPI_Comm comm, const char libname[], char lname[], size_t llen, PetscBool *found) { char *buf, *par2, *gz = NULL, *so = NULL; size_t len, blen; @@ -99,7 +99,7 @@ PetscErrorCode PetscDLLibraryRetrieve(MPI_Comm comm, const char libname[], char /*@C PetscDLLibraryOpen - Opens a PETSc dynamic link library - Collective + Collective, No Fortran Support Input Parameters: + comm - MPI processes that are opening the library @@ -190,7 +190,7 @@ PetscErrorCode PetscDLLibraryOpen(MPI_Comm comm, const char path[], PetscDLLibra /*@C PetscDLLibrarySym - Load a symbol from a list of dynamic link libraries. - Collective + Collective, No Fortran Support Input Parameters: + comm - the MPI communicator that will load the symbol @@ -292,7 +292,7 @@ PetscErrorCode PetscDLLibrarySym(MPI_Comm comm, PetscDLLibrary *outlist, const c /*@C PetscDLLibraryAppend - Appends another dynamic link library to the end of the search list - Collective + Collective, No Fortran Support Input Parameters: + comm - MPI communicator @@ -377,7 +377,7 @@ PetscErrorCode PetscDLLibraryAppend(MPI_Comm comm, PetscDLLibrary *outlist, cons /*@C PetscDLLibraryPrepend - Add another dynamic library to search for symbols to the beginning of the search list - Collective + Collective, No Fortran Support Input Parameters: + comm - MPI communicator @@ -467,7 +467,7 @@ PetscErrorCode PetscDLLibraryPrepend(MPI_Comm comm, PetscDLLibrary *outlist, con /*@C PetscDLLibraryClose - Destroys the search path of dynamic libraries and closes the libraries. - Collective + Collective, No Fortran Support Input Parameter: . list - library list diff --git a/src/sys/dll/dlimpl.c b/src/sys/dll/dlimpl.c index 8247df40777..143856166cf 100644 --- a/src/sys/dll/dlimpl.c +++ b/src/sys/dll/dlimpl.c @@ -32,7 +32,7 @@ typedef void *dlsymbol_t; /*@C PetscDLOpen - opens a dynamic library - Not Collective + Not Collective, No Fortran Support Input Parameters: + name - name of library @@ -128,7 +128,7 @@ PetscErrorCode PetscDLOpen(const char name[], PetscDLMode mode, PetscDLHandle *h /*@C PetscDLClose - closes a dynamic library - Not Collective + Not Collective, No Fortran Support Input Parameter: . handle - the handle for the library obtained with `PetscDLOpen()` @@ -193,7 +193,7 @@ PetscErrorCode PetscDLClose(PetscDLHandle *handle) /*@C PetscDLSym - finds a symbol in a dynamic library - Not Collective + Not Collective, No Fortran Support Input Parameters: + handle - obtained with `PetscDLOpen()` or `NULL` @@ -308,7 +308,7 @@ PetscErrorCode PetscDLSym(PetscDLHandle handle, const char symbol[], void **valu /*@C PetscDLAddr - find the name of a symbol in a dynamic library - Not Collective + Not Collective, No Fortran Support Input Parameters: . func - pointer to the function, `NULL` if not found diff --git a/src/sys/dll/reg.c b/src/sys/dll/reg.c index 8a8597e45b6..7987408129a 100644 --- a/src/sys/dll/reg.c +++ b/src/sys/dll/reg.c @@ -294,12 +294,13 @@ PetscErrorCode PetscFunctionListAdd_Private(PetscFunctionList *fl, const char na PetscAssertPointer(fl, 1); if (name) PetscAssertPointer(name, 2); if (fptr) PetscValidFunction(fptr, 3); + if (!fptr && !*fl) PetscFunctionReturn(PETSC_SUCCESS); PetscCall(PetscFunctionListCreate_Private(0, fl)); PetscCall(PetscHMapFuncInsert_Private((*fl)->map, name, fptr)); PetscFunctionReturn(PETSC_SUCCESS); } -/*@ +/*@C PetscFunctionListDestroy - Destroys a list of registered routines. Input Parameter: @@ -340,7 +341,7 @@ PetscErrorCode PetscFunctionListDestroy(PetscFunctionList *fl) } /* end while */ \ } while (0) -/*@ +/*@C PetscFunctionListClear - Clear a `PetscFunctionList` Not Collective @@ -384,6 +385,8 @@ PetscErrorCode PetscFunctionListPrintAll(void) /*@C PetscFunctionListPrintNonEmpty - Print composed names for non `NULL` function pointers + Logically Collective, No Fortran Support + Input Parameter: . fl - the function list @@ -409,6 +412,8 @@ PetscErrorCode PetscFunctionListPrintNonEmpty(PetscFunctionList fl) /*MC PetscFunctionListFind - Find function registered under given name + Not Collective, No Fortran Support + Synopsis: #include PetscErrorCode PetscFunctionListFind(PetscFunctionList flist,const char name[],void (**fptr)(void)) @@ -434,7 +439,7 @@ PetscErrorCode PetscFunctionListFind_Private(PetscFunctionList fl, const char na PetscFunctionReturn(PETSC_SUCCESS); } -/*@ +/*@C PetscFunctionListView - prints out contents of a `PetscFunctionList` Collective @@ -482,7 +487,7 @@ PetscErrorCode PetscFunctionListView(PetscFunctionList list, PetscViewer viewer) PetscFunctionListGet - Gets an array the contains the entries in `PetscFunctionList`, this is used by help etc. - Not Collective + Not Collective, No Fortran Support Input Parameter: . list - list of types @@ -494,8 +499,7 @@ PetscErrorCode PetscFunctionListView(PetscFunctionList list, PetscViewer viewer) Level: developer Note: - This allocates the array so that must be freed. BUT the individual entries are - not copied so should not be freed. + This allocates the array so that must be freed with `PetscFree()`. BUT the individual entries should not be freed. .seealso: `PetscFunctionListAdd()`, `PetscFunctionList` @*/ @@ -521,7 +525,7 @@ PetscErrorCode PetscFunctionListGet(PetscFunctionList list, const char ***array, /*@C PetscFunctionListPrintTypes - Prints the methods available in a list of functions - Collective + Collective, No Fortran Support Input Parameters: + comm - the communicator (usually `MPI_COMM_WORLD`) @@ -553,7 +557,7 @@ PetscErrorCode PetscFunctionListPrintTypes(MPI_Comm comm, FILE *fd, const char p PetscFunctionReturn(PETSC_SUCCESS); } -/*@ +/*@C PetscFunctionListDuplicate - Creates a new list from a given function list `PetscFunctionList`. Input Parameter: diff --git a/src/sys/error/adebug.c b/src/sys/error/adebug.c index d7e4f80ac82..85bf815d58d 100644 --- a/src/sys/error/adebug.c +++ b/src/sys/error/adebug.c @@ -17,7 +17,7 @@ static PetscBool UseDebugTerminal = PETSC_TRUE; PetscBool petscwaitonerrorflg = PETSC_FALSE; PetscBool petscindebugger = PETSC_FALSE; -/*@C +/*@ PetscSetDebugTerminal - Sets the terminal to use for debugging. Not Collective; No Fortran Support @@ -55,7 +55,7 @@ PetscErrorCode PetscSetDebugTerminal(const char terminal[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSetDebugger - Sets options associated with the debugger. Not Collective; No Fortran Support @@ -82,10 +82,10 @@ PetscErrorCode PetscSetDebugger(const char debugger[], PetscBool usedebugtermina PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSetDefaultDebugger - Causes PETSc to use its default debugger and output terminal - Not Collective + Not Collective, No Fortran Support Level: developer @@ -121,7 +121,7 @@ static PetscErrorCode PetscCheckDebugger_Private(const char defaultDbg[], const PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSetDebuggerFromString - Set the complete path for the debugger for PETSc to use. @@ -134,7 +134,7 @@ static PetscErrorCode PetscCheckDebugger_Private(const char defaultDbg[], const .seealso: `PetscSetDebugger()`, `PetscSetDefaultDebugger()`, `PetscAttachDebugger()` @*/ -PetscErrorCode PetscSetDebuggerFromString(const char *string) +PetscErrorCode PetscSetDebuggerFromString(const char string[]) { const char *debugger = NULL; PetscBool useterminal = PETSC_TRUE; @@ -464,7 +464,7 @@ PetscErrorCode PetscAttachDebugger(void) a debugger to a running process when an error is detected. This routine is useful for examining variables, etc. - Not Collective + Not Collective, No Fortran Support Input Parameters: + comm - communicator over which error occurred @@ -524,12 +524,12 @@ PetscErrorCode PetscAttachDebuggerErrorHandler(MPI_Comm comm, int line, const ch PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscStopForDebugger - Prints a message to the screen indicating how to attach to the process with the debugger and then waits for the debugger to attach. - Not Collective + Not Collective, No Fortran Support Options Database Key: . -stop_for_debugger - will stop for you to attach the debugger when `PetscInitialize()` is called diff --git a/src/sys/error/checkptr.c b/src/sys/error/checkptr.c index 6843075bf4d..c736d6d4363 100644 --- a/src/sys/error/checkptr.c +++ b/src/sys/error/checkptr.c @@ -40,7 +40,7 @@ static PetscBool PetscSegvJumpBuf_set; /*@C PetscSignalSegvCheckPointerOrMpi - To be called from a signal handler for SIGSEGV. - Not Collective + Not Collective, No Fortran Support Level: developer @@ -59,7 +59,7 @@ void PetscSignalSegvCheckPointerOrMpi(void) /*@C PetscCheckPointer - Returns `PETSC_TRUE` if a pointer points to accessible data - Not Collective + Not Collective, No Fortran Support Input Parameters: + ptr - the pointer diff --git a/src/sys/error/err.c b/src/sys/error/err.c index 0ec2653a654..0632e182af7 100644 --- a/src/sys/error/err.c +++ b/src/sys/error/err.c @@ -25,7 +25,7 @@ static EH eh = NULL; PetscEmacsClientErrorHandler - Error handler that uses the emacsclient program to load the file where the error occurred. Then calls the "previous" error handler. - Not Collective + Not Collective, No Fortran Support Input Parameters: + comm - communicator over which error occurred @@ -87,7 +87,7 @@ PetscErrorCode PetscEmacsClientErrorHandler(MPI_Comm comm, int line, const char /*@C PetscPushErrorHandler - Sets a routine to be called on detection of errors. - Not Collective + Not Collective, No Fortran Support Input Parameters: + handler - error handler routine @@ -159,7 +159,7 @@ PetscErrorCode PetscPopErrorHandler(void) /*@C PetscReturnErrorHandler - Error handler that causes a return without printing an error message. - Not Collective + Not Collective, No Fortran Support Input Parameters: + comm - communicator over which error occurred @@ -251,7 +251,7 @@ static const char *PetscErrorStrings[] = { /*@C PetscErrorMessage - Returns the text string associated with a PETSc error code. - Not Collective + Not Collective, No Fortran Support Input Parameter: . errnum - the error code @@ -267,7 +267,7 @@ static const char *PetscErrorStrings[] = { `PetscError()`, `SETERRQ()`, `PetscCall()` `PetscAbortErrorHandler()`, `PetscTraceBackErrorHandler()` @*/ -PetscErrorCode PetscErrorMessage(PetscErrorCode errnum, const char *text[], char **specific) +PetscErrorCode PetscErrorMessage(PetscErrorCode errnum, const char *text[], char *specific[]) { PetscFunctionBegin; if (text) { @@ -425,7 +425,7 @@ PetscErrorCode PetscError(MPI_Comm comm, int line, const char *func, const char return ierr; } -/*@C +/*@ PetscIntView - Prints an array of integers; useful for debugging. Collective @@ -438,7 +438,7 @@ PetscErrorCode PetscError(MPI_Comm comm, int line, const char *func, const char Level: intermediate Note: - This may be called from within the debugger + This may be called from within the debugger, passing 0 as the viewer Developer Note: `idx` cannot be const because may be passed to binary viewer where temporary byte swapping may be done @@ -522,7 +522,7 @@ PetscErrorCode PetscIntView(PetscInt N, const PetscInt idx[], PetscViewer viewer PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscRealView - Prints an array of doubles; useful for debugging. Collective @@ -535,7 +535,7 @@ PetscErrorCode PetscIntView(PetscInt N, const PetscInt idx[], PetscViewer viewer Level: intermediate Note: - This may be called from within the debugger + This may be called from within the debugger, passing 0 as the viewer Developer Note: `idx` cannot be const because may be passed to binary viewer where temporary byte swapping may be done @@ -627,7 +627,7 @@ PetscErrorCode PetscRealView(PetscInt N, const PetscReal idx[], PetscViewer view PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscScalarView - Prints an array of `PetscScalar`; useful for debugging. Collective @@ -640,7 +640,7 @@ PetscErrorCode PetscRealView(PetscInt N, const PetscReal idx[], PetscViewer view Level: intermediate Note: - This may be called from within the debugger + This may be called from within the debugger, passing 0 as the viewer Developer Note: `idx` cannot be const because may be passed to binary viewer where byte swapping may be done @@ -939,6 +939,8 @@ PETSC_EXTERN const char *PetscHIPSolverGetErrorName(hipsolverStatus_t status) PetscMPIErrorString - Given an MPI error code returns the `MPI_Error_string()` appropriately formatted for displaying with the PETSc error handlers. + Not Collective, No Fortran Support + Input Parameter: . err - the MPI error code diff --git a/src/sys/error/errabort.c b/src/sys/error/errabort.c index ffe8038ceac..4b63c4c0877 100644 --- a/src/sys/error/errabort.c +++ b/src/sys/error/errabort.c @@ -9,7 +9,7 @@ This routine is very useful when running in the debugger, because the user can look directly at the stack frames and the variables where the error occurred - Not Collective + Not Collective, No Fortran Support Input Parameters: + comm - communicator over which error occurred diff --git a/src/sys/error/errstop.c b/src/sys/error/errstop.c index 59ab3f3fc28..e4adfdb8a87 100644 --- a/src/sys/error/errstop.c +++ b/src/sys/error/errstop.c @@ -2,15 +2,15 @@ #include "err.h" /*@C - PetscMPIAbortErrorHandler - Calls PETSCABORT and exits. + PetscMPIAbortErrorHandler - Calls `PETSCABORT()` and exits. - Not Collective + Not Collective, No Fortran Support Input Parameters: + comm - communicator over which error occurred -. line - the line number of the error (indicated by __LINE__) +. line - the line number of the error (indicated by `__LINE__`) . fun - the function name -. file - the file in which the error was detected (indicated by __FILE__) +. file - the file in which the error was detected (indicated by `__FILE__`) . mess - an error text string, usually just printed to the screen . n - the generic error number . p - `PETSC_ERROR_INITIAL` if error just detected, otherwise `PETSC_ERROR_REPEAT` diff --git a/src/sys/error/errtrace.c b/src/sys/error/errtrace.c index 2dfbe98e5e3..7244650e662 100644 --- a/src/sys/error/errtrace.c +++ b/src/sys/error/errtrace.c @@ -8,10 +8,14 @@ #include "err.h" #include // PETSC_TLS +#if defined(PETSC_HAVE_CUPM) + #include +#endif + /*@C PetscIgnoreErrorHandler - Deprecated, use `PetscReturnErrorHandler()`. Ignores the error, allows program to continue as if error did not occur - Not Collective + Not Collective, No Fortran Support Input Parameters: + comm - communicator over which error occurred @@ -148,7 +152,7 @@ static PETSC_TLS PetscBool petsc_traceback_error_silent = PETSC_FALSE; PetscTraceBackErrorHandler - Default error handler routine that generates a traceback on error detection. - Not Collective + Not Collective, No Fortran Support Input Parameters: + comm - communicator over which error occurred @@ -219,12 +223,23 @@ PetscErrorCode PetscTraceBackErrorHandler(MPI_Comm comm, int line, const char *f if (text) ierr = (*PetscErrorPrintf)("%s\n", text); } if (mess) ierr = (*PetscErrorPrintf)("%s\n", mess); +#if defined(PETSC_PKG_CUDA_MIN_ARCH) + int confCudaArch = PETSC_PKG_CUDA_MIN_ARCH; // if PETSc was configured with numbered CUDA arches, get the min arch. + int runCudaArch = PetscDeviceCUPMRuntimeArch; // 0 indicates the code has never initialized a cuda device. + if (runCudaArch && confCudaArch > runCudaArch) { + ierr = (*PetscErrorPrintf)("WARNING! Run on a CUDA device with GPU architecture %d, but PETSc was configured with a minimal GPU architecture %d.\n", runCudaArch, confCudaArch); + ierr = (*PetscErrorPrintf)("If it is a cudaErrorNoKernelImageForDevice error, you may need to reconfigure PETSc with --with-cuda-arch=%d or --with-cuda-arch=%d,%d\n", runCudaArch, runCudaArch, confCudaArch); + } +#endif ierr = PetscOptionsLeftError(); ierr = (*PetscErrorPrintf)("See https://petsc.org/release/faq/ for trouble shooting.\n"); if (!PetscCIEnabledPortableErrorOutput) { + size_t clen; + ierr = (*PetscErrorPrintf)("%s\n", version); - if (PetscErrorPrintfInitializeCalled) ierr = (*PetscErrorPrintf)("%s on a %s named %s by %s %s\n", pname, arch, hostname, username, date); - ierr = (*PetscErrorPrintf)("Configure options %s\n", petscconfigureoptions); + if (PetscErrorPrintfInitializeCalled) ierr = (*PetscErrorPrintf)("%s with PETSC_ARCH %s on %s by %s %s\n", pname, arch, hostname, username, date); + ierr = PetscStrlen(petscconfigureoptions, &clen); + ierr = (*PetscErrorPrintf)("Configure options: %s\n", clen ? petscconfigureoptions : "none used"); } } /* print line of stack trace */ diff --git a/src/sys/error/ftn-custom/zerrf.c b/src/sys/error/ftn-custom/zerrf.c index 0bb4e680367..28816367009 100644 --- a/src/sys/error/ftn-custom/zerrf.c +++ b/src/sys/error/ftn-custom/zerrf.c @@ -12,8 +12,6 @@ #define petscerror_ PETSCERROR #define petscerrorf_ PETSCERRORF #define petscerrormpi_ PETSCERRORMPI - #define petscrealview_ PETSCREALVIEW - #define petscintview_ PETSCINTVIEW #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define petscpusherrorhandler_ petscpusherrorhandler #define petsctracebackerrorhandler_ petsctracebackerrorhandler @@ -24,8 +22,6 @@ #define petscerror_ petscerror #define petscerrorf_ petscerrorf #define petscerrormpi_ petscerrormpi - #define petscrealview_ petscrealview - #define petscintview_ petscintview #endif static void (*f2)(MPI_Comm *comm, int *, const char *, const char *, PetscErrorCode *, PetscErrorType *, const char *, void *, PetscErrorCode *, PETSC_FORTRAN_CHARLEN_T len1, PETSC_FORTRAN_CHARLEN_T len2, PETSC_FORTRAN_CHARLEN_T len3); @@ -130,30 +126,3 @@ PETSC_EXTERN void petscerrormpi_(PetscErrorCode *err) *err = PETSC_ERR_MPI; } #endif - -PETSC_EXTERN void petscrealview_(PetscInt *n, PetscReal *d, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscRealView(*n, d, v); -} - -PETSC_EXTERN void petscintview_(PetscInt *n, PetscInt *d, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscIntView(*n, d, v); -} - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscscalarview_ PETSCSCALARVIEW -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscscalarview_ petscscalarview -#endif - -PETSC_EXTERN void petscscalarview_(PetscInt *n, PetscScalar *d, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscScalarView(*n, d, v); -} diff --git a/src/sys/error/pstack.c b/src/sys/error/pstack.c index 6d4f3b1e895..4ff9dbb5d1c 100644 --- a/src/sys/error/pstack.c +++ b/src/sys/error/pstack.c @@ -9,7 +9,7 @@ PetscStack petscstack; static PetscBool amsmemstack = PETSC_FALSE; -/*@C +/*@ PetscStackSAWsGrantAccess - Grants access of the PETSc stack frames to the SAWs publisher Collective on `PETSC_COMM_WORLD`? @@ -30,7 +30,7 @@ void PetscStackSAWsGrantAccess(void) } } -/*@C +/*@ PetscStackSAWsTakeAccess - Takes access of the PETSc stack frames from the SAWs publisher Collective on `PETSC_COMM_WORLD`? @@ -88,6 +88,7 @@ PetscErrorCode PetscStackReset(void) return PETSC_SUCCESS; } +// PetscClangLinter pragma disable: -fdoc-sowing-chars /* PetscStackView - Print the current (default) PETSc stack to an ASCII file @@ -185,6 +186,7 @@ PetscErrorCode PetscStackCopy(PetscStack *sint, PetscStack *sout) return PETSC_SUCCESS; } +// PetscClangLinter pragma disable: -fdoc-sowing-chars /* PetscStackPrint - Prints a given PETSc stack to an ASCII file @@ -192,7 +194,7 @@ PetscErrorCode PetscStackCopy(PetscStack *sint, PetscStack *sout) Input Parameters: + sint - the PETSc stack to print -- file - the file pointer +- fp - the file pointer Level: developer diff --git a/src/sys/error/signal.c b/src/sys/error/signal.c index 87e3554960a..ca2c29e28f9 100644 --- a/src/sys/error/signal.c +++ b/src/sys/error/signal.c @@ -181,7 +181,7 @@ PetscErrorCode PetscSignalHandlerDefault(int sig, void *ptr) PetscPushSignalHandler - Catches the usual fatal errors and calls a user-provided routine. - Not Collective + Not Collective, No Fortran Support Input Parameters: + routine - routine to call when a signal is received @@ -248,7 +248,7 @@ PetscErrorCode PetscPushSignalHandler(PetscErrorCode (*routine)(int, void *), vo signal(SIGSYS, PETSC_SIGNAL_CAST PetscSignalHandler_Private); #endif #if !defined(PETSC_MISSING_SIGTERM) - #if !defined(OMPI_MAJOR_VERSION) + #if !defined(PETSC_HAVE_OPENMPI) /* Open MPI may use SIGTERM to close down all its ranks; we don't want to generate many confusing PETSc error messages in that case */ signal(SIGTERM, PETSC_SIGNAL_CAST PetscSignalHandler_Private); #endif diff --git a/src/sys/f90-mod/petscdraw.h b/src/sys/f90-mod/petscdraw.h index 447b5e72a03..a8d16dc7954 100644 --- a/src/sys/f90-mod/petscdraw.h +++ b/src/sys/f90-mod/petscdraw.h @@ -3,6 +3,55 @@ ! #include "petsc/finclude/petscdraw.h" ! + type, extends(tPetscObject) :: tPetscDraw + end type tPetscDraw + PetscDraw, parameter :: PETSC_NULL_DRAW = tPetscDraw(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SNES_DRAW +#endif + + type, extends(tPetscObject) :: tPetscDrawLG + end type tPetscDrawLG + PetscDrawLG, parameter :: PETSC_NULL_DRAW_LG = tPetscDrawLG(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SNES_DRAW_LG +#endif + + type, extends(tPetscObject) :: tPetscDrawBar + end type tPetscDrawBar + PetscDrawBar, parameter :: PETSC_NULL_DRAW_BAR = tPetscDrawBar(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SNES_DRAW_BAR +#endif + + type, extends(tPetscObject) :: tPetscDrawAxis + end type tPetscDrawAxis + PetscDrawAxis, parameter :: PETSC_NULL_DRAW_AXIS = tPetscDrawAxis(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SNES_DRAW_AXIS +#endif + + type, extends(tPetscObject) :: tPetscDrawHG + end type tPetscDrawHG + PetscDrawHG, parameter :: PETSC_NULL_DRAW_HG = tPetscDrawHG(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SNES_DRAW_HG +#endif + + type, extends(tPetscObject) :: tPetscDrawSP + end type tPetscDrawSP + PetscDrawSP, parameter :: PETSC_NULL_DRAW_SP = tPetscDrawSP(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SNES_DRAW_SP +#endif + + type, extends(tPetscObject) :: tPetscDrawMesh + end type tPetscDrawMesh + PetscDrawMesh, parameter :: PETSC_NULL_DRAW_MESH = tPetscDrawMesh(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SNES_DRAW_MESH +#endif + ! Flags for draw ! PetscEnum, parameter :: PETSC_DRAW_BASIC_COLORS = 33 diff --git a/src/sys/f90-mod/petscsys.h b/src/sys/f90-mod/petscsys.h index 33805117809..6a2cb4513fa 100644 --- a/src/sys/f90-mod/petscsys.h +++ b/src/sys/f90-mod/petscsys.h @@ -10,20 +10,52 @@ ! The following block allows one to write constants that match the ! precision of PetscReal as, for example, x = .7_PETSC_REAL_KIND ! - PetscReal,Parameter :: PetscReal_Private = 1.0 - Integer,Parameter :: PETSC_REAL_KIND = Selected_Real_Kind(Precision(PetscReal_Private)) + PetscReal,Parameter :: PetscReal_Private = 1.0 + Integer,Parameter :: PETSC_REAL_KIND = Selected_Real_Kind(Precision(PetscReal_Private)) - type tPetscOptions + type :: tPetscObject PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE - end type tPetscOptions + end type tPetscObject + PetscObject, parameter :: PETSC_NULL_OBJECT = tPetscObject(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_OBJECT +#endif + type, extends(tPetscObject) :: tPetscOptions + end type tPetscOptions PetscOptions, parameter :: PETSC_NULL_OPTIONS = tPetscOptions(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_OPTIONS +#endif - type tPetscBench - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tPetscBench end type tPetscBench + PetscBench, parameter :: PETSC_NULL_BENCH = tPetscBench(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_BENCH +#endif + + type :: tPetscDevice + PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + end type tPetscDevice + PetscDevice, parameter :: PETSC_NULL_DEVICE = tPetscDevice(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DEVICE +#endif + + type, extends(tPetscObject) :: tPetscDeviceContext + end type tPetscDeviceContext + PetscDeviceContext, parameter :: PETSC_NULL_DEVICE_CONTEXT = tPetscDeviceContext(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL__DEVICE_CONTEXT +#endif - PetscBench, parameter :: PETSC_NULL_BM = tPetscBench(0) + type, extends(tPetscObject) :: tPetscMatlabEngine + end type tPetscMatlabEngine + PetscMatlabEngine, parameter :: PETSC_NULL_MATLAB_ENGINE = tPetscMatlabEngine(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MATLAB_ENGINE +#endif ! ------------------------------------------------------------------------ ! Non Common block Stuff declared first ! @@ -122,12 +154,12 @@ ! ! Random numbers ! - type tPetscRandom - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tPetscRandom end type tPetscRandom - PetscRandom, parameter :: PETSC_NULL_RANDOM = tPetscRandom(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_RANDOM +#endif ! #define PETSCRAND 'rand' #define PETSCRAND48 'rand48' @@ -156,12 +188,12 @@ PetscEnum, parameter :: PETSC_BUILDTWOSIDED_REDSCATTER = 2 type tPetscSubcomm - sequence PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE end type tPetscSubcomm - PetscSubcomm, parameter :: PETSC_NULL_SUBCOMM = tPetscSubcomm(0) - +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SUBCOMM +#endif ! ! PetscSubcommType ! @@ -172,7 +204,6 @@ #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) !DEC$ ATTRIBUTES DLLEXPORT::PetscReal_Private !DEC$ ATTRIBUTES DLLEXPORT::PETSC_REAL_KIND -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_OPTIONS !DEC$ ATTRIBUTES DLLEXPORT::PETSC_TRUE !DEC$ ATTRIBUTES DLLEXPORT::PETSC_FALSE !DEC$ ATTRIBUTES DLLEXPORT::PETSC_DECIDE @@ -202,7 +233,6 @@ !DEC$ ATTRIBUTES DLLEXPORT::PETSC_OWN_POINTER !DEC$ ATTRIBUTES DLLEXPORT::PETSC_USE_POINTER !DEC$ ATTRIBUTES DLLEXPORT::PETSC_i -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_RANDOM !DEC$ ATTRIBUTES DLLEXPORT::PETSC_BINARY_INT_SIZE !DEC$ ATTRIBUTES DLLEXPORT::PETSC_BINARY_FLOAT_SIZE !DEC$ ATTRIBUTES DLLEXPORT::PETSC_BINARY_CHAR_SIZE diff --git a/src/sys/f90-mod/petscsys.h90 b/src/sys/f90-mod/petscsys.h90 index 881b3a00a69..c37b1cd2944 100644 --- a/src/sys/f90-mod/petscsys.h90 +++ b/src/sys/f90-mod/petscsys.h90 @@ -1,35 +1,3 @@ -#if defined(PETSC_HAVE_FORTRAN_TYPE_STAR) - Interface - subroutine PetscObjectReference(obj,ierr) - type(*) :: obj - PetscErrorCode :: ierr - end Subroutine PetscObjectReference - subroutine PetscObjectDereference(obj,ierr) - type(*) :: obj - PetscErrorCode :: ierr - end Subroutine PetscObjectDereference - subroutine PetscObjectGetReference(obj,c,ierr) - type(*) :: obj - PetscInt c - PetscErrorCode :: ierr - end Subroutine PetscObjectGetReference - subroutine PetscObjectCompose(obj,str,obj2,ierr) - type(*) :: obj,obj2 - character(*) :: str - PetscErrorCode :: ierr - end Subroutine PetscObjectCompose - subroutine PetscObjectQuery(obj,str,obj2,ierr) - type(*) :: obj,obj2 - character(*) :: str - PetscErrorCode :: ierr - end Subroutine PetscObjectQuery - subroutine PetscBarrier(a,z) - type(*) :: a - PetscErrorCode :: z - end subroutine - end Interface -#endif - Interface Subroutine PetscPrintf(m,c,ierr) MPI_Comm :: m @@ -76,13 +44,6 @@ PetscBool s PetscErrorCode ierr End Subroutine - Subroutine PetscOptionsHasName(o,p,n,s,ierr) - import tPetscOptions - PetscOptions o - character(*) p,n - PetscBool s - PetscErrorCode ierr - End Subroutine Subroutine PetscOptionsGetIntArray(o,p,n,v,c,s,ierr) import tPetscOptions PetscOptions o @@ -109,42 +70,6 @@ PetscBool s PetscErrorCode ierr End Subroutine - Subroutine PetscOptionsSetValue(o,n,v,ierr) - import tPetscOptions - PetscOptions o - character(*) n,v - PetscErrorCode ierr - End Subroutine - Subroutine PetscOptionsClearValue(o,n,ierr) - import tPetscOptions - PetscOptions o - character(*) n - PetscErrorCode ierr - End Subroutine - Subroutine PetscOptionsClear(o,ierr) - import tPetscOptions - PetscOptions o - PetscErrorCode ierr - End Subroutine - Subroutine PetscOptionsInsertString(o,n,ierr) - import tPetscOptions - PetscOptions o - character(*) n - PetscErrorCode ierr - End Subroutine - Subroutine PetscOptionsView(o,v,ierr) - import tPetscOptions,tPetscViewer - PetscOptions, intent(in) :: o - PetscViewer, intent(in) :: v - PetscErrorCode, intent(out) :: ierr - End Subroutine PetscOptionsView - - subroutine PetscRandomSetType(a,b,ierr) - import tPetscRandom - PetscRandom a - character(*) b - PetscErrorCode ierr - end subroutine #if defined(PETSC_HAVE_FORTRAN_FREE_LINE_LENGTH_NONE) subroutine PetscErrorf(ierr,line,file) @@ -172,25 +97,6 @@ PetscErrorCode, intent(out) :: ierr end Subroutine PetscFinalize - subroutine PetscRandomView(a,b,ierr) - import tPetscRandom,tPetscViewer - PetscRandom a - PetscViewer b - PetscErrorCode ierr - end subroutine - - subroutine PetscRandomDestroy(a,z) - import tPetscRandom - PetscRandom a - PetscErrorCode z - end subroutine - - subroutine PetscSubcommView(a,b,z) - import tPetscSubcomm,tPetscViewer - PetscSubcomm a ! PetscSubcomm - PetscViewer b ! PetscViewer - PetscErrorCode z - end subroutine subroutine PetscSubcommGetParent(a,b,z) import tPetscSubcomm PetscSubcomm a ! PetscSubcomm @@ -215,19 +121,6 @@ PetscErrorCode,intent(in) :: ierr end subroutine - subroutine PetscLogEventRegister(name,classid,event,ierr) - character(len=*), intent(in) :: name - PetscClassId, intent(in) :: classid - PetscLogEvent, intent(in) :: event - PetscErrorCode, intent(out) :: ierr - end subroutine PetscLogEventRegister - - subroutine PetscLogStageRegister(name,stage,ierr) - character(len=*), intent(in) :: name - PetscLogStage, intent(out) :: stage - PetscErrorCode, intent(out) :: ierr - end subroutine PetscLogStageRegister - subroutine PetscLogEventBegin(event,ierr) PetscLogEvent, intent(in) :: event PetscErrorCode, intent(out) :: ierr @@ -237,15 +130,6 @@ PetscLogEvent, intent(in) :: event PetscErrorCode, intent(out) :: ierr end subroutine PetscLogEventEnd - - subroutine PetscLogStagePop(ierr) - PetscErrorCode, intent(out) :: ierr - end subroutine PetscLogStagePop - - subroutine PetscLogStagePush(s,ierr) - PetscLogStage, intent(in) :: s - PetscErrorCode, intent(out) :: ierr - end subroutine PetscLogStagePush end Interface Interface diff --git a/src/sys/f90-mod/petscsysmod.F90 b/src/sys/f90-mod/petscsysmod.F90 index 2009e95e555..4917a9d7e25 100644 --- a/src/sys/f90-mod/petscsysmod.F90 +++ b/src/sys/f90-mod/petscsysmod.F90 @@ -55,6 +55,8 @@ end module petscsysdefdummy module petscsysdef use petscsysdefdummy + + ! These will eventually be automatically generated interface operator(.ne.) function petscviewernotequal(A,B) import tPetscViewer @@ -70,6 +72,21 @@ function petscviewerequals(A,B) end function end interface operator (.eq.) + interface operator(.ne.) + function petscdrawnotequal(A,B) + import tPetscDraw + logical petscdrawnotequal + type(tPetscDraw), intent(in) :: A,B + end function + end interface operator (.ne.) + interface operator(.eq.) + function petscdrawequals(A,B) + import tPetscDraw + logical petscdrawequals + type(tPetscDraw), intent(in) :: A,B + end function + end interface operator (.eq.) + interface operator(.ne.) function petscrandomnotequal(A,B) import tPetscRandom @@ -229,66 +246,76 @@ subroutine petscbinarywriteint1(fd,data,num,type,z) end subroutine end Interface - Interface petscintview - subroutine petscintview(N,idx,viewer,ierr) - use petscsysdefdummy, only: tPetscViewer - PetscInt N - PetscInt idx(*) - PetscViewer viewer - PetscErrorCode ierr - end subroutine - end Interface - - Interface petscscalarview - subroutine petscscalarview(N,s,viewer,ierr) - use petscsysdefdummy, only: tPetscViewer - PetscInt N - PetscScalar s(*) - PetscViewer viewer - PetscErrorCode ierr - end subroutine - end Interface - - Interface petscrealview - subroutine petscrealview(N,s,viewer,ierr) - use petscsysdefdummy, only: tPetscViewer - PetscInt N - PetscReal s(*) - PetscViewer viewer - PetscErrorCode ierr - end subroutine - end Interface - end module function petscviewernotequal(A,B) use petscsysdefdummy, only: tPetscViewer logical petscviewernotequal type(tPetscViewer), intent(in) :: A,B + if (A%v .eq. 0 .or. B%v .eq. 0) then + print*, 'PETSc Error: Cannot compare with PETSC_NULL_VIEWER, use PetscObjectIsNull()' + ! stop PETSC_ERR_SUP won't compile + stop 55 + endif petscviewernotequal = (A%v .ne. B%v) end function function petscviewerequals(A,B) use petscsysdefdummy, only: tPetscViewer logical petscviewerequals type(tPetscViewer), intent(in) :: A,B + if (A%v .eq. 0 .or. B%v .eq. 0) then + print*, 'PETSc Error: Cannot compare with PETSC_NULL_VIEWER, use PetscObjectIsNull()' + stop 55 + endif petscviewerequals = (A%v .eq. B%v) end function - function petscrandomnotequal(A,B) + function petscdrawnotequal(A,B) + use petscsysdefdummy, only: tPetscDraw + logical petscdrawnotequal + type(tPetscDraw), intent(in) :: A,B + if (A%v .eq. 0 .or. B%v .eq. 0) then + print*, 'PETSc Error: Cannot compare with PETSC_NULL_DRAW, use PetscObjectIsNull()' + stop 55 + endif + petscdrawnotequal = (A%v .ne. B%v) + end function + function petscdrawequals(A,B) + use petscsysdefdummy, only: tPetscDraw + logical petscdrawequals + type(tPetscDraw), intent(in) :: A,B + if (A%v .eq. 0 .or. B%v .eq. 0) then + print*, 'PETSc Error: Cannot compare with PETSC_NULL_DRAW, use PetscObjectIsNull()' + stop 55 + endif + petscdrawequals = (A%v .eq. B%v) + end function + + function petscrandomnotequal(A,B) use petscsysdefdummy, only: tPetscRandom logical petscrandomnotequal type(tPetscRandom), intent(in) :: A,B + if (A%v .eq. 0 .or. B%v .eq. 0) then + print*, 'PETSc Error: Cannot compare with PETSC_NULL_RANDOM, use PetscObjectIsNull()' + stop 55 + endif petscrandomnotequal = (A%v .ne. B%v) end function function petscrandomequals(A,B) use petscsysdefdummy, only: tPetscRandom logical petscrandomequals type(tPetscRandom), intent(in) :: A,B + if (A%v .eq. 0 .or. B%v .eq. 0) then + print*, 'PETSc Error: Cannot compare with PETSC_NULL_RANDOM, use PetscObjectIsNull()' + stop 55 + endif petscrandomequals = (A%v .eq. B%v) end function #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) !DEC$ ATTRIBUTES DLLEXPORT::petscviewernotequal !DEC$ ATTRIBUTES DLLEXPORT::petscviewerequals +!DEC$ ATTRIBUTES DLLEXPORT::petscdrawnotequal +!DEC$ ATTRIBUTES DLLEXPORT::petscdrawequals !DEC$ ATTRIBUTES DLLEXPORT::petscrandomnotequal !DEC$ ATTRIBUTES DLLEXPORT::petscrandomequals #endif @@ -296,16 +323,14 @@ module petscsys use,intrinsic :: iso_c_binding use petscsysdef PetscChar(80) PETSC_NULL_CHARACTER = '' - PetscInt PETSC_NULL_INTEGER(1) - PetscFortranDouble PETSC_NULL_DOUBLE(1) - PetscScalar PETSC_NULL_SCALAR(1) - PetscReal PETSC_NULL_REAL(1) + PetscInt PETSC_NULL_INTEGER, PETSC_NULL_INTEGER_ARRAY(1) + PetscFortranDouble PETSC_NULL_DOUBLE + PetscScalar PETSC_NULL_SCALAR, PETSC_NULL_SCALAR_ARRAY(1) + PetscReal PETSC_NULL_REAL, PETSC_NULL_REAL_ARRAY(1) PetscBool PETSC_NULL_BOOL + PetscEnum PETSC_NULL_ENUM MPI_Comm PETSC_NULL_MPI_COMM(1) ! -! -! -! ! Basic math constants ! PetscReal PETSC_PI @@ -320,10 +345,14 @@ module petscsys #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) !DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_CHARACTER !DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_INTEGER +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_INTEGER_ARRAY !DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_DOUBLE !DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SCALAR +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SCALAR_ARRAY !DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_REAL +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_REAL_ARRAY !DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_BOOL +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_ENUM !DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_MPI_COMM !DEC$ ATTRIBUTES DLLEXPORT::PETSC_PI !DEC$ ATTRIBUTES DLLEXPORT::PETSC_MAX_REAL @@ -396,7 +425,6 @@ subroutine PetscSetCOMM(c1,c2) PETSC_COMM_WORLD = c1 PETSC_COMM_SELF = c2 - return end subroutine PetscGetCOMM(c1) @@ -405,21 +433,21 @@ subroutine PetscGetCOMM(c1) MPI_Comm c1 c1 = PETSC_COMM_WORLD - return end subroutine PetscSetModuleBlock() - use petscsys, only: PETSC_NULL_CHARACTER,PETSC_NULL_INTEGER,& - PETSC_NULL_SCALAR,PETSC_NULL_DOUBLE,PETSC_NULL_REAL,& - PETSC_NULL_BOOL,PETSC_NULL_FUNCTION,PETSC_NULL_MPI_COMM + use petscsys!, only: PETSC_NULL_CHARACTER,PETSC_NULL_INTEGER,& + ! PETSC_NULL_SCALAR,PETSC_NULL_DOUBLE,PETSC_NULL_REAL,& + ! PETSC_NULL_BOOL,PETSC_NULL_FUNCTION,PETSC_NULL_MPI_COMM implicit none - call PetscSetFortranBasePointers(PETSC_NULL_CHARACTER, & + call PetscSetFortranBasePointers(PETSC_NULL_CHARACTER, & & PETSC_NULL_INTEGER,PETSC_NULL_SCALAR, & & PETSC_NULL_DOUBLE,PETSC_NULL_REAL, & - & PETSC_NULL_BOOL,PETSC_NULL_FUNCTION,PETSC_NULL_MPI_COMM) - - return + & PETSC_NULL_BOOL,PETSC_NULL_ENUM,PETSC_NULL_FUNCTION, & + & PETSC_NULL_MPI_COMM, & + & PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_SCALAR_ARRAY, & + & PETSC_NULL_REAL_ARRAY) end subroutine PetscSetModuleBlockMPI(freal,fscalar,fsum,finteger) @@ -433,7 +461,6 @@ subroutine PetscSetModuleBlockMPI(freal,fscalar,fsum,finteger) MPIU_SUM = fsum MPIU_INTEGER = finteger - return end subroutine PetscSetModuleBlockNumeric(pi,maxreal,minreal,eps, & @@ -455,5 +482,4 @@ subroutine PetscSetModuleBlockNumeric(pi,maxreal,minreal,eps, & PETSC_INFINITY = pinf PETSC_NINFINITY = pninf - return end diff --git a/src/sys/f90-mod/petscviewer.h b/src/sys/f90-mod/petscviewer.h index c91423d7059..98d83d3b02c 100644 --- a/src/sys/f90-mod/petscviewer.h +++ b/src/sys/f90-mod/petscviewer.h @@ -3,11 +3,12 @@ ! #include "petsc/finclude/petscviewer.h" - type tPetscViewer - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tPetscViewer end type tPetscViewer - PetscViewer, parameter :: PETSC_NULL_VIEWER = tPetscViewer(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_VIEWER +#endif ! ! The numbers used below should match those in ! petsc/private/fortranimpl.h @@ -81,7 +82,6 @@ PetscEnum, parameter :: PETSC_VIEWER_LOAD_ALL = 37 #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_VIEWER !DEC$ ATTRIBUTES DLLEXPORT::PETSC_VIEWER_STDOUT_SELF !DEC$ ATTRIBUTES DLLEXPORT::PETSC_VIEWER_DRAW_WORLD !DEC$ ATTRIBUTES DLLEXPORT::PETSC_VIEWER_DRAW_SELF diff --git a/src/sys/f90-src/f90_cwrap.c b/src/sys/f90-src/f90_cwrap.c index b1ec196fc79..c9fb2c381c9 100644 --- a/src/sys/f90-src/f90_cwrap.c +++ b/src/sys/f90-src/f90_cwrap.c @@ -4,7 +4,7 @@ PetscMPIFortranDatatypeToC - Converts a `MPI_Fint` that contains a Fortran `MPI_Datatype` to its C `MPI_Datatype` equivalent - Not Collective + Not Collective, No Fortran Support Input Parameter: . unit - The Fortran `MPI_Datatype` @@ -15,7 +15,7 @@ Level: developer Developer Note: - The MPI documentation in multiple places says that one can never us + The MPI documentation in multiple places says that one can never us Fortran `MPI_Datatype`s in C (or vice-versa) but this is problematic since users could never call C routines from Fortran that have `MPI_Datatype` arguments. Jed states that the Fortran `MPI_Datatype`s will always be available in C if the MPI was built to support Fortran. This function diff --git a/src/sys/fileio/fdir.c b/src/sys/fileio/fdir.c index e2bfb51ec32..920c7492ce1 100644 --- a/src/sys/fileio/fdir.c +++ b/src/sys/fileio/fdir.c @@ -28,7 +28,7 @@ static PetscErrorCode PetscPathJoin(const char dname[], const char fname[], size PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscMkdir - Create a directory Not Collective @@ -57,7 +57,7 @@ PetscErrorCode PetscMkdir(const char dir[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscMkdtemp - Create a directory with a unique name given a name template. Input Parameter: @@ -138,7 +138,7 @@ PetscErrorCode PetscRMTree(const char dir[]) #include #include -/*@C +/*@ PetscRMTree - delete a directory and all of its children Input Parameter: diff --git a/src/sys/fileio/fretrieve.c b/src/sys/fileio/fretrieve.c index 169abd8624d..597c5abaf43 100644 --- a/src/sys/fileio/fretrieve.c +++ b/src/sys/fileio/fretrieve.c @@ -76,7 +76,7 @@ PetscErrorCode PetscGetTmp(MPI_Comm comm, char dir[], size_t len) // "Unknown section 'Environmental Variables'" // PetscClangLinter pragma disable: -fdoc-section-header-unknown -/*@C +/*@ PetscSharedTmp - Determines if all processors in a communicator share a tmp directory or have different ones. @@ -201,7 +201,7 @@ PetscErrorCode PetscSharedTmp(MPI_Comm comm, PetscBool *shared) // "Unknown section 'Environmental Variables'" // PetscClangLinter pragma disable: -fdoc-section-header-unknown -/*@C +/*@ PetscSharedWorkingDirectory - Determines if all processors in a communicator share a working directory or have different ones. Collective diff --git a/src/sys/fileio/ftest.c b/src/sys/fileio/ftest.c index b63889a777c..1d50f217656 100644 --- a/src/sys/fileio/ftest.c +++ b/src/sys/fileio/ftest.c @@ -139,7 +139,7 @@ static PetscErrorCode PetscGetFileStat(const char fname[], uid_t *fileUid, gid_t PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscTestFile - checks for the existence of a file Not Collective @@ -182,7 +182,7 @@ PetscErrorCode PetscTestFile(const char fname[], char mode, PetscBool *flg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscTestDirectory - checks for the existence of a directory Not Collective diff --git a/src/sys/fileio/ftn-custom/zsysiof.c b/src/sys/fileio/ftn-custom/zsysiof.c index a845f8cd391..388a3a979ad 100644 --- a/src/sys/fileio/ftn-custom/zsysiof.c +++ b/src/sys/fileio/ftn-custom/zsysiof.c @@ -1,8 +1,6 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscbinaryopen_ PETSCBINARYOPEN - #define petsctestfile_ PETSCTESTFILE #define petscbinaryreadint_ PETSCBINARYREADINT #define petscbinaryreadreal_ PETSCBINARYREADREAL #define petscbinaryreadcomplex_ PETSCBINARYREADCOMPLEX @@ -22,8 +20,6 @@ #define petscbinarywritereal1_ PETSCBINARYWRITEREAL1 #define petscbinarywritecomplex1_ PETSCBINARYWRITECOMPLEX1 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscbinaryopen_ petscbinaryopen - #define petsctestfile_ petsctestfile #define petscbinaryreadint_ petscbinaryreadint #define petscbinaryreadreal_ petscbinaryreadreal #define petscbinaryreadcomplex_ petscbinaryreadcomplex @@ -79,16 +75,6 @@ PETSC_EXTERN void petscbinarywritecomplex1_(int *fd, void *p, PetscInt *n, Petsc *ierr = PetscBinaryWrite(*fd, p, *n, *type); } -PETSC_EXTERN void petscbinaryopen_(char *name, PetscFileMode *type, int *fd, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - - FIXCHAR(name, len, c1); - *ierr = PetscBinaryOpen(c1, *type, fd); - if (*ierr) return; - FREECHAR(name, c1); -} - PETSC_EXTERN void petscbinaryreadint_(int *fd, void *data, PetscInt *num, PetscInt *count, PetscDataType *type, int *ierr) { CHKFORTRANNULLINTEGER(count); @@ -173,16 +159,6 @@ PETSC_EXTERN void petscbinaryreadcomplex1cnt_(int *fd, void *data, PetscInt *num if (*ierr) return; } -PETSC_EXTERN void petsctestfile_(char *name, char *mode, PetscBool *flg, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len, PETSC_FORTRAN_CHARLEN_T len1) -{ - char *c1; - - FIXCHAR(name, len, c1); - *ierr = PetscTestFile(c1, *mode, flg); - if (*ierr) return; - FREECHAR(name, c1); -} - #if defined(__cplusplus) } #endif diff --git a/src/sys/fileio/grpath.c b/src/sys/fileio/grpath.c index a4285fdc9d9..43fbbe1c13e 100644 --- a/src/sys/fileio/grpath.c +++ b/src/sys/fileio/grpath.c @@ -30,7 +30,7 @@ Level: developer Notes: - rpath is assumed to be of length `PETSC_MAX_PATH_LEN`. + `rpath` is assumed to be of length `PETSC_MAX_PATH_LEN`. Systems that use the automounter often generate absolute paths of the form "/tmp_mnt....". However, the automounter will fail to diff --git a/src/sys/fileio/mpiuopen.c b/src/sys/fileio/mpiuopen.c index b5d9ab3469b..1796e227803 100644 --- a/src/sys/fileio/mpiuopen.c +++ b/src/sys/fileio/mpiuopen.c @@ -3,14 +3,14 @@ Some PETSc utility routines to add simple parallel IO capabilities */ #include -#include +#include /*I "petscsys.h" I*/ #include /*@C PetscFOpen - Has the first process in the MPI communicator open a file; all others do nothing. - Logically Collective; No Fortran Support + Logically Collective Input Parameters: + comm - the MPI communicator @@ -61,7 +61,7 @@ PetscErrorCode PetscFOpen(MPI_Comm comm, const char name[], const char mode[], F PetscFClose - Has MPI rank 0 in the communicator close a file (usually obtained with `PetscFOpen()`; all others do nothing. - Logically Collective; No Fortran Support + Logically Collective Input Parameters: + comm - the MPI communicator @@ -85,7 +85,6 @@ PetscErrorCode PetscFClose(MPI_Comm comm, FILE *fd) PetscFunctionReturn(PETSC_SUCCESS); } -#if defined(PETSC_HAVE_POPEN) static char PetscPOpenMachine[128] = ""; /*@C @@ -106,15 +105,21 @@ static char PetscPOpenMachine[128] = ""; @*/ PetscErrorCode PetscPClose(MPI_Comm comm, FILE *fd) { +#if defined(PETSC_HAVE_POPEN) PetscMPIInt rank; +#endif PetscFunctionBegin; +#if defined(PETSC_HAVE_POPEN) PetscCallMPI(MPI_Comm_rank(comm, &rank)); if (rank == 0) { char buf[1024]; while (fgets(buf, 1024, fd)); /* wait till it prints everything */ (void)pclose(fd); } +#else + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "pclose() - routine is unavailable."); +#endif PetscFunctionReturn(PETSC_SUCCESS); } @@ -150,12 +155,15 @@ PetscErrorCode PetscPClose(MPI_Comm comm, FILE *fd) @*/ PetscErrorCode PetscPOpen(MPI_Comm comm, const char machine[], const char program[], const char mode[], FILE **fp) { +#if defined(PETSC_HAVE_POPEN) PetscMPIInt rank; size_t i, len, cnt; char commandt[PETSC_MAX_PATH_LEN], command[PETSC_MAX_PATH_LEN]; FILE *fd; +#endif PetscFunctionBegin; +#if defined(PETSC_HAVE_POPEN) /* all processors have to do the string manipulation because PetscStrreplace() is a collective operation */ if (PetscPOpenMachine[0] || (machine && machine[0])) { PetscCall(PetscStrncpy(command, "ssh ", sizeof(command))); @@ -189,13 +197,16 @@ PetscErrorCode PetscPOpen(MPI_Comm comm, const char machine[], const char progra PetscCheck((fd = popen(commandt, mode)), PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot run command %s", commandt); if (fp) *fp = fd; } +#else + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "popen() - system routine is unavailable."); +#endif PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscPOpenSetMachine - Sets the name of the default machine to run `PetscPOpen()` calls on - Logically Collective, but only MPI rank 0 runs the command + Logically Collective, but only the MPI process with rank 0 runs the command Input Parameter: . machine - machine to run command on or `NULL` for the current machine @@ -217,5 +228,3 @@ PetscErrorCode PetscPOpenSetMachine(const char machine[]) } PetscFunctionReturn(PETSC_SUCCESS); } - -#endif diff --git a/src/sys/fileio/mprint.c b/src/sys/fileio/mprint.c index acf5e5ef45e..3bb67f065a5 100644 --- a/src/sys/fileio/mprint.c +++ b/src/sys/fileio/mprint.c @@ -36,7 +36,7 @@ FILE *PETSC_STDERR = NULL; .seealso: `PetscFormatConvert()`, `PetscVSNPrintf()`, `PetscVFPrintf()` @*/ -PetscErrorCode PetscFormatConvertGetSize(const char *format, size_t *size) +PetscErrorCode PetscFormatConvertGetSize(const char format[], size_t *size) { size_t sz = 0; PetscInt i = 0; @@ -84,7 +84,7 @@ PetscErrorCode PetscFormatConvertGetSize(const char *format, size_t *size) . format - the PETSc format string Output Parameter: -. newformat - the formatted string +. newformat - the formatted string, must be long enough to hold result Level: developer @@ -98,7 +98,7 @@ PetscErrorCode PetscFormatConvertGetSize(const char *format, size_t *size) .seealso: `PetscFormatConvertGetSize()`, `PetscVSNPrintf()`, `PetscVFPrintf()` @*/ -PetscErrorCode PetscFormatConvert(const char *format, char *newformat) +PetscErrorCode PetscFormatConvert(const char format[], char newformat[]) { PetscInt i = 0, j = 0; @@ -151,6 +151,8 @@ PetscErrorCode PetscFormatConvert(const char *format, char *newformat) /*@C PetscVSNPrintf - The PETSc version of `vsnprintf()`. Ensures that all `%g` formatted arguments' output contains the decimal point (which is used by the test harness) + No Fortran Support + Input Parameters: + str - location to put result . len - the length of `str` @@ -170,7 +172,7 @@ PetscErrorCode PetscFormatConvert(const char *format, char *newformat) .seealso: `PetscFormatConvert()`, `PetscFormatConvertGetSize()`, `PetscErrorPrintf()`, `PetscVPrintf()` @*/ -PetscErrorCode PetscVSNPrintf(char *str, size_t len, const char *format, size_t *fullLength, va_list Argp) +PetscErrorCode PetscVSNPrintf(char str[], size_t len, const char format[], size_t *fullLength, va_list Argp) { char *newformat = NULL; char formatbuf[PETSCDEFAULTBUFFERSIZE]; @@ -279,6 +281,9 @@ PetscErrorCode PetscVSNPrintf(char *str, size_t len, const char *format, size_t If `fd` is `NULL`, all open output streams are flushed, including ones not directly accessible to the program. + Fortran Note: + Use `PetscFlush()` + .seealso: `PetscPrintf()`, `PetscFPrintf()`, `PetscVFPrintf()`, `PetscVSNPrintf()` @*/ PetscErrorCode PetscFFlush(FILE *fd) @@ -295,6 +300,8 @@ PetscErrorCode PetscFFlush(FILE *fd) PetscVFPrintfDefault - All PETSc standard out and error messages are sent through this function; so, in theory, this can can be replaced with something that does not simply write to a file. + No Fortran Support + Input Parameters: + fd - the file descriptor to write to . format - the format string to write with @@ -333,7 +340,7 @@ PetscErrorCode PetscFFlush(FILE *fd) .seealso: `PetscVSNPrintf()`, `PetscErrorPrintf()`, `PetscFFlush()` @*/ -PetscErrorCode PetscVFPrintfDefault(FILE *fd, const char *format, va_list Argp) +PetscErrorCode PetscVFPrintfDefault(FILE *fd, const char format[], va_list Argp) { char str[PETSCDEFAULTBUFFERSIZE]; char *buff = str; @@ -377,7 +384,7 @@ PetscErrorCode PetscVFPrintfDefault(FILE *fd, const char *format, va_list Argp) /*@C PetscSNPrintf - Prints to a string of given length - Not Collective + Not Collective, No Fortran Support Input Parameters: + len - the length of `str` @@ -392,7 +399,7 @@ PetscErrorCode PetscVFPrintfDefault(FILE *fd, const char *format, va_list Argp) `PetscPrintf()`, `PetscViewerASCIIPrintf()`, `PetscViewerASCIISynchronizedPrintf()`, `PetscVFPrintf()`, `PetscFFlush()` @*/ -PetscErrorCode PetscSNPrintf(char *str, size_t len, const char format[], ...) +PetscErrorCode PetscSNPrintf(char str[], size_t len, const char format[], ...) { size_t fullLength; va_list Argp; @@ -407,7 +414,7 @@ PetscErrorCode PetscSNPrintf(char *str, size_t len, const char format[], ...) /*@C PetscSNPrintfCount - Prints to a string of given length, returns count of characters printed - Not Collective + Not Collective, No Fortran Support Input Parameters: + len - the length of `str` @@ -423,7 +430,7 @@ PetscErrorCode PetscSNPrintf(char *str, size_t len, const char format[], ...) .seealso: `PetscSynchronizedFlush()`, `PetscSynchronizedFPrintf()`, `PetscFPrintf()`, `PetscVSNPrintf()`, `PetscPrintf()`, `PetscViewerASCIIPrintf()`, `PetscViewerASCIISynchronizedPrintf()`, `PetscSNPrintf()`, `PetscVFPrintf()` @*/ -PetscErrorCode PetscSNPrintfCount(char *str, size_t len, const char format[], size_t *countused, ...) +PetscErrorCode PetscSNPrintfCount(char str[], size_t len, const char format[], size_t *countused, ...) { va_list Argp; @@ -518,7 +525,7 @@ static inline PetscErrorCode PetscSynchronizedFPrintf_Private(MPI_Comm comm, FIL REQUIRES a call to `PetscSynchronizedFlush()` by all the processes after the completion of the calls to `PetscSynchronizedPrintf()` for the information from all the processors to be printed. - Fortran Notes: + Fortran Note: The call sequence is `PetscSynchronizedPrintf`(`MPI_Comm`, `character`(*), `PetscErrorCode` ierr). That is, you can only pass a single character string from Fortran. @@ -546,7 +553,7 @@ PetscErrorCode PetscSynchronizedPrintf(MPI_Comm comm, const char format[], ...) Input Parameters: + comm - the MPI communicator -. fp - the file pointer +. fp - the file pointer, `PETSC_STDOUT` or value obtained from `PetscFOpen()` - format - the usual `printf()` format string Level: intermediate @@ -555,6 +562,10 @@ PetscErrorCode PetscSynchronizedPrintf(MPI_Comm comm, const char format[], ...) REQUIRES a intervening call to `PetscSynchronizedFlush()` for the information from all the processors to be printed. + Fortran Note: + The call sequence is `PetscSynchronizedPrintf`(`MPI_Comm`, fp, `character`(*), `PetscErrorCode` ierr). + That is, you can only pass a single character string from Fortran. + .seealso: `PetscSynchronizedPrintf()`, `PetscSynchronizedFlush()`, `PetscFPrintf()`, `PetscFOpen()`, `PetscViewerASCIISynchronizedPrintf()`, `PetscViewerASCIIPrintf()`, `PetscFFlush()` @@ -578,7 +589,7 @@ PetscErrorCode PetscSynchronizedFPrintf(MPI_Comm comm, FILE *fp, const char form Input Parameters: + comm - the MPI communicator -- fd - the file pointer (valid on MPI rank 0 of the communicator) +- fd - the file pointer (valid on MPI rank 0 of the communicator), `PETSC_STDOUT` or value obtained from `PetscFOpen()` Level: intermediate @@ -586,9 +597,6 @@ PetscErrorCode PetscSynchronizedFPrintf(MPI_Comm comm, FILE *fp, const char form If `PetscSynchronizedPrintf()` and/or `PetscSynchronizedFPrintf()` are called with different MPI communicators there must be an intervening call to `PetscSynchronizedFlush()` between the calls with different MPI communicators. - Fortran Notes: - Pass `PETSC_STDOUT` if the flush is for standard out; otherwise pass a value obtained from `PetscFOpen()` - .seealso: `PetscSynchronizedPrintf()`, `PetscFPrintf()`, `PetscPrintf()`, `PetscViewerASCIIPrintf()`, `PetscViewerASCIISynchronizedPrintf()` @*/ @@ -644,15 +652,19 @@ PetscErrorCode PetscSynchronizedFlush(MPI_Comm comm, FILE *fd) PetscFPrintf - Prints to a file, only from the first MPI process in the communicator. - Not Collective; No Fortran Support + Not Collective Input Parameters: + comm - the MPI communicator -. fd - the file pointer +. fd - the file pointer, `PETSC_STDOUT` or value obtained from `PetscFOpen()` - format - the usual `printf()` format string Level: intermediate + Fortran Note: + The call sequence is `PetscFPrintf`(`MPI_Comm`, fp, `character`(*), `PetscErrorCode` ierr). + That is, you can only pass a single character string from Fortran. + Developer Notes: This maybe, and is, called from PETSc error handlers and `PetscMallocValidate()` hence it does not use `PetscCallMPI()` which could recursively restart the malloc validation. diff --git a/src/sys/fileio/sysio.c b/src/sys/fileio/sysio.c index 0c0f63c19b8..9f1f6c252ab 100644 --- a/src/sys/fileio/sysio.c +++ b/src/sys/fileio/sysio.c @@ -270,6 +270,9 @@ PetscErrorCode PetscByteSwap(void *data, PetscDataType pdtype, PetscInt count) file as 64-bit integers, this means they can only be read back in when the option `--with-64-bit-indices` is used. + Fortran Note: + There are different functions for each datatype, for example `PetscBinaryReadInt()` + .seealso: `PetscBinaryWrite()`, `PetscBinaryOpen()`, `PetscBinaryClose()`, `PetscViewerBinaryGetDescriptor()`, `PetscBinarySynchronizedWrite()`, `PetscBinarySynchronizedRead()`, `PetscBinarySynchronizedSeek()` @*/ @@ -386,6 +389,9 @@ PetscErrorCode PetscBinaryRead(int fd, void *data, PetscInt num, PetscInt *count Because byte-swapping may be done on the values in data it cannot be declared const + Fortran Note: + There are different functions for each datatype, for example `PetscBinaryWriteInt()` + .seealso: `PetscBinaryRead()`, `PetscBinaryOpen()`, `PetscBinaryClose()`, `PetscViewerBinaryGetDescriptor()`, `PetscBinarySynchronizedWrite()`, `PetscBinarySynchronizedRead()`, `PetscBinarySynchronizedSeek()` @*/ @@ -478,7 +484,7 @@ PetscErrorCode PetscBinaryWrite(int fd, const void *p, PetscInt n, PetscDataType PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscBinaryOpen - Opens a PETSc binary file. Not Collective @@ -538,12 +544,12 @@ PetscErrorCode PetscBinaryClose(int fd) /*@C PetscBinarySeek - Moves the file pointer on a PETSc binary file. - Not Collective + Not Collective, No Fortran Support Input Parameters: + fd - the file . off - number of bytes to move. Use `PETSC_BINARY_INT_SIZE`, `PETSC_BINARY_SCALAR_SIZE`, - etc. in your calculation rather than `sizeof()` to compute byte lengths. + etc. in your calculation rather than `sizeof()` to compute byte lengths. - whence - see `PetscBinarySeekType` for possible values Output Parameter: @@ -576,7 +582,7 @@ PetscErrorCode PetscBinarySeek(int fd, off_t off, PetscBinarySeekType whence, of /*@C PetscBinarySynchronizedRead - Reads from a binary file, all MPI processes get the same values - Collective + Collective, No Fortran Support Input Parameters: + comm - the MPI communicator @@ -651,7 +657,7 @@ PetscErrorCode PetscBinarySynchronizedRead(MPI_Comm comm, int fd, void *data, Pe /*@C PetscBinarySynchronizedWrite - writes to a binary file. - Collective + Collective, No Fortran Support Input Parameters: + comm - the MPI communicator @@ -692,6 +698,8 @@ PetscErrorCode PetscBinarySynchronizedWrite(MPI_Comm comm, int fd, const void *p /*@C PetscBinarySynchronizedSeek - Moves the file pointer on a PETSc binary file. + No Fortran Support + Input Parameters: + comm - the communicator to read with . fd - the file diff --git a/src/sys/fsrc/somefort.F90 b/src/sys/fsrc/somefort.F90 index 0be29658fe0..12fe7c6be14 100644 --- a/src/sys/fsrc/somefort.F90 +++ b/src/sys/fsrc/somefort.F90 @@ -9,11 +9,14 @@ subroutine MPIU_Abort(comm,ierr) implicit none MPI_Comm comm - PetscMPIInt ierr,nierr - - call MPI_Abort(comm,ierr,nierr) - - return + PetscMPIInt ierr, nierr, ciportable + call PetscCIEnabledPortableErrorOutput(ciportable) + if (ciportable == 1) then + call MPI_Finalize(nierr) + stop 0 + else + call MPI_Abort(comm,ierr,nierr) + endif end #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) !DEC$ ATTRIBUTES DLLEXPORT::MPIU_Abort @@ -26,25 +29,20 @@ subroutine PetscFortranPrintToUnit(unit,str,ierr) PetscErrorCode ierr write(unit=unit, fmt="(A)", advance='no') str ierr = 0 - return end #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) !DEC$ ATTRIBUTES DLLEXPORT::PetscFortranPrintToUnit #endif ! This uses F2003 feature - and is the preferred mode for accessing command line arguments -#if defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) integer function PetscCommandArgumentCount() implicit none PetscCommandArgumentCount= command_argument_count() - return end subroutine PetscGetCommandArgument(n,val) implicit none - integer n + integer, intent(in) :: n character(*) val call get_command_argument(n,val) - return end -#endif diff --git a/src/sys/ftn-custom/zsys.c b/src/sys/ftn-custom/zsys.c index cd821f8f540..bef73fa9de8 100644 --- a/src/sys/ftn-custom/zsys.c +++ b/src/sys/ftn-custom/zsys.c @@ -1,16 +1,23 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define chkmemfortran_ CHKMEMFORTRAN - #define petscoffsetfortran_ PETSCOFFSETFORTRAN - #define petscobjectstateincrease_ PETSCOBJECTSTATEINCREASE + #define chkmemfortran_ CHKMEMFORTRAN + #define petscoffsetfortran_ PETSCOFFSETFORTRAN + #define petscobjectstateincrease_ PETSCOBJECTSTATEINCREASE + #define petsccienabledportableerroroutput_ PETSCCIENABLEDPORTABLEERROROUTPUT #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscoffsetfortran_ petscoffsetfortran - #define chkmemfortran_ chkmemfortran - #define flush__ flush_ - #define petscobjectstateincrease_ petscobjectstateincrease + #define petscoffsetfortran_ petscoffsetfortran + #define chkmemfortran_ chkmemfortran + #define flush__ flush_ + #define petscobjectstateincrease_ petscobjectstateincrease + #define petsccienabledportableerroroutput_ petsccienabledportableerroroutput #endif +PETSC_EXTERN void petsccienabledportableerroroutput_(PetscMPIInt *cienabled) +{ + *cienabled = PetscCIEnabledPortableErrorOutput ? 1 : 0; +} + PETSC_EXTERN void petscobjectstateincrease_(PetscObject *obj, PetscErrorCode *ierr) { *ierr = PetscObjectStateIncrease(*obj); diff --git a/src/sys/ftn-custom/zutils.c b/src/sys/ftn-custom/zutils.c index 3def4e0df08..829d8c8fa1b 100644 --- a/src/sys/ftn-custom/zutils.c +++ b/src/sys/ftn-custom/zutils.c @@ -24,12 +24,17 @@ M*/ This is code for translating PETSc memory addresses to integer offsets for Fortran. */ -char *PETSC_NULL_CHARACTER_Fortran = NULL; -void *PETSC_NULL_INTEGER_Fortran = NULL; -void *PETSC_NULL_SCALAR_Fortran = NULL; -void *PETSC_NULL_DOUBLE_Fortran = NULL; -void *PETSC_NULL_REAL_Fortran = NULL; -void *PETSC_NULL_BOOL_Fortran = NULL; +char *PETSC_NULL_CHARACTER_Fortran = NULL; +void *PETSC_NULL_INTEGER_Fortran = NULL; +void *PETSC_NULL_SCALAR_Fortran = NULL; +void *PETSC_NULL_DOUBLE_Fortran = NULL; +void *PETSC_NULL_REAL_Fortran = NULL; +void *PETSC_NULL_BOOL_Fortran = NULL; +void *PETSC_NULL_ENUM_Fortran = NULL; +void *PETSC_NULL_INTEGER_ARRAY_Fortran = NULL; +void *PETSC_NULL_SCALAR_ARRAY_Fortran = NULL; +void *PETSC_NULL_REAL_ARRAY_Fortran = NULL; + EXTERN_C_BEGIN void (*PETSC_NULL_FUNCTION_Fortran)(void) = NULL; EXTERN_C_END diff --git a/src/sys/info/ftn-custom/zverboseinfof.c b/src/sys/info/ftn-custom/zverboseinfof.c index 2a56260c5d8..cfe7ba18821 100644 --- a/src/sys/info/ftn-custom/zverboseinfof.c +++ b/src/sys/info/ftn-custom/zverboseinfof.c @@ -23,38 +23,6 @@ static PetscErrorCode PetscFixSlashN(const char *in, char **out) PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_EXTERN void petscinfosetfile_(char *filename, char *mode, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1, PETSC_FORTRAN_CHARLEN_T len2) -{ - char *t1, *t2; - - FIXCHAR(filename, len1, t1); - FIXCHAR(mode, len2, t2); - *ierr = PetscInfoSetFile(t1, t2); - if (*ierr) return; - FREECHAR(filename, t1); - FREECHAR(mode, t2); -} - -PETSC_EXTERN void petscinfogetclass_(char *classname, PetscBool **found, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(classname, len, t); - *ierr = PetscInfoGetClass(t, *found); - if (*ierr) return; - FREECHAR(classname, t); -} - -PETSC_EXTERN void petscinfoprocessclass_(char *classname, PetscInt *numClassID, PetscClassId *classIDs[], PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(classname, len, t); - *ierr = PetscInfoProcessClass(t, *numClassID, *classIDs); - if (*ierr) return; - FREECHAR(classname, t); -} - PETSC_EXTERN void petscinfo_(char *text, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1) { char *c1, *tmp; diff --git a/src/sys/info/verboseinfo.c b/src/sys/info/verboseinfo.c index 1b7e9467696..0bd99c9f832 100644 --- a/src/sys/info/verboseinfo.c +++ b/src/sys/info/verboseinfo.c @@ -18,13 +18,13 @@ */ const char *const PetscInfoCommFlags[] = {"all", "no_self", "only_self", "PetscInfoCommFlag", "PETSC_INFO_COMM_", NULL}; static PetscBool PetscInfoClassesLocked = PETSC_FALSE, PetscInfoInvertClasses = PETSC_FALSE, PetscInfoClassesSet = PETSC_FALSE; -static char **PetscInfoClassnames = NULL; -static char *PetscInfoFilename = NULL; -static PetscInt PetscInfoNumClasses = -1; -static PetscInfoCommFlag PetscInfoCommFilter = PETSC_INFO_COMM_ALL; -static int PetscInfoFlags[] = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}; +static char **PetscInfoClassnames = NULL; +static char *PetscInfoFilename = NULL; +static PetscInt PetscInfoNumClasses = -1; +static PetscInfoCommFlag PetscInfoCommFilter = PETSC_INFO_COMM_ALL; +static int PetscInfoFlags[] = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}; static char *PetscInfoNames[PETSC_STATIC_ARRAY_LENGTH(PetscInfoFlags)] = {NULL}; PetscBool PetscLogPrintInfo = PETSC_FALSE; FILE *PetscInfoFile = NULL; @@ -76,20 +76,17 @@ PetscErrorCode PetscInfoAllow(PetscBool flag) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscInfoSetFile - Sets the printing destination for all `PetscInfo()` calls Not Collective Input Parameters: -+ filename - Name of the file where `PetscInfo()` will print to ++ filename - Name of the file where `PetscInfo()` will print to, use `NULL` to write to `PETSC_STDOUT`. - mode - Write mode passed to `PetscFOpen()` Level: advanced - Note: - Use `filename = NULL` to set `PetscInfo()` to write to `PETSC_STDOUT`. - .seealso: [](sec_PetscInfo), `PetscInfo()`, `PetscInfoGetFile()`, `PetscInfoSetFromOptions()`, `PetscFOpen()` @*/ PetscErrorCode PetscInfoSetFile(const char filename[], const char mode[]) @@ -137,11 +134,11 @@ PetscErrorCode PetscInfoSetFile(const char filename[], const char mode[]) Note: This routine allocates and copies the `filename` so that the `filename` survives `PetscInfoDestroy()`. The user is - therefore responsible for freeing the allocated `filename` pointer afterwards. + therefore responsible for freeing the allocated `filename` pointer with `PetscFree()` .seealso: [](sec_PetscInfo), `PetscInfo()`, `PetscInfoSetFile()`, `PetscInfoSetFromOptions()`, `PetscInfoDestroy()` @*/ -PetscErrorCode PetscInfoGetFile(char **filename, FILE **InfoFile) +PetscErrorCode PetscInfoGetFile(char *filename[], FILE **InfoFile) { PetscFunctionBegin; PetscAssertPointer(filename, 1); @@ -194,7 +191,7 @@ PetscErrorCode PetscInfoSetClasses(PetscBool exclude, PetscInt n, const char *co PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscInfoGetClass - Indicates whether the provided `classname` is marked as a filter in `PetscInfo()` as set by `PetscInfoSetClasses()` Not Collective @@ -212,7 +209,7 @@ PetscErrorCode PetscInfoSetClasses(PetscBool exclude, PetscInt n, const char *co .seealso: [](sec_PetscInfo), `PetscInfo()`, `PetscInfoSetClasses()`, `PetscInfoSetFromOptions()`, `PetscObjectGetName()` @*/ -PetscErrorCode PetscInfoGetClass(const char *classname, PetscBool *found) +PetscErrorCode PetscInfoGetClass(const char classname[], PetscBool *found) { PetscInt unused; @@ -260,7 +257,7 @@ PetscErrorCode PetscInfoGetInfo(PetscBool *infoEnabled, PetscBool *classesSet, P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscInfoProcessClass - Activates or deactivates a class based on the filtering status of `PetscInfo()` Not Collective @@ -297,6 +294,7 @@ PetscErrorCode PetscInfoProcessClass(const char classname[], PetscInt numClassID for (PetscInt i = 0; i < numClassID; ++i) PetscCall(PetscInfoDeactivateClass(classIDs[i])); } } + PetscCheck(PETSC_LARGEST_CLASSID - PETSC_SMALLEST_CLASSID < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(PetscInfoNames), PETSC_COMM_SELF, PETSC_ERR_PLIB, "PetscInfoNames array is too small for %s, need %" PetscInt_FMT " not %" PetscInt_FMT, classname, (PetscInt)(PETSC_LARGEST_CLASSID - PETSC_SMALLEST_CLASSID + 1), (PetscInt)PETSC_STATIC_ARRAY_LENGTH(PetscInfoNames)); for (PetscInt i = 0; i < numClassID; ++i) { const PetscClassId idx = classIDs[i] - PETSC_SMALLEST_CLASSID; diff --git a/src/sys/logging/ftn-custom/zpetscloghf.c b/src/sys/logging/ftn-custom/zpetscloghf.c deleted file mode 100644 index 897fc943002..00000000000 --- a/src/sys/logging/ftn-custom/zpetscloghf.c +++ /dev/null @@ -1,20 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petsclogflops_ PETSCLOGFLOPS - #define petscloggpuflops_ PETSCLOGGPUFLOPS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petsclogflops_ petsclogflops - #define petscloggpuflops_ petscloggpuflops -#endif - -PETSC_EXTERN void petsclogflops_(PetscLogDouble *f, PetscErrorCode *ierr) -{ - *ierr = PetscLogFlops(*f); -} - -PETSC_EXTERN void petscloggpuflops_(PetscLogDouble *n, PetscErrorCode *ierr) -{ - *ierr = PetscLogGpuFlops(*n); -} diff --git a/src/sys/logging/ftn-custom/zplogf.c b/src/sys/logging/ftn-custom/zplogf.c index b65449c83e4..d6eeba0d0ca 100644 --- a/src/sys/logging/ftn-custom/zplogf.c +++ b/src/sys/logging/ftn-custom/zplogf.c @@ -2,35 +2,11 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petsclogview_ PETSCLOGVIEW - #define petsclogallbegin_ PETSCLOGALLBEGIN - #define petsclogdefaultbegin_ PETSCLOGDEFAULTBEGIN - #define petsclognestedbegin_ PETSCLOGNESTEDBEGIN - #define petsclogdump_ PETSCLOGDUMP - #define petsclogeventregister_ PETSCLOGEVENTREGISTER - #define petsclogstagepop_ PETSCLOGSTAGEPOP - #define petsclogstageregister_ PETSCLOGSTAGEREGISTER - #define petscclassidregister_ PETSCCLASSIDREGISTER - #define petsclogstagepush_ PETSCLOGSTAGEPUSH - #define petscgetflops_ PETSCGETFLOPS - #define petsclogstagegetid_ PETSCLOGSTAGEGETID - #define petsclogeventbegin_ PETSCLOGEVENTBEGIN - #define petsclogeventend_ PETSCLOGEVENTEND + #define petsclogeventbegin_ PETSCLOGEVENTBEGIN + #define petsclogeventend_ PETSCLOGEVENTEND #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petsclogview_ petsclogview - #define petsclogallbegin_ petsclogallbegin - #define petsclogdefaultbegin_ petsclogdefaultbegin - #define petsclognestedbegin_ petsclognestedbegin - #define petsclogeventregister_ petsclogeventregister - #define petsclogdump_ petsclogdump - #define petsclogstagepop_ petsclogstagepop - #define petsclogstageregister_ petsclogstageregister - #define petscclassidregister_ petscclassidregister - #define petsclogstagepush_ petsclogstagepush - #define petscgetflops_ petscgetflops - #define petsclogstagegetid_ petsclogstagegetid - #define petsclogeventbegin_ petsclogeventbegin - #define petsclogeventend_ petsclogeventend + #define petsclogeventbegin_ petsclogeventbegin + #define petsclogeventend_ petsclogeventend #endif PETSC_EXTERN void petsclogeventbegin_(PetscLogEvent *e, PetscErrorCode *ierr) @@ -42,111 +18,3 @@ PETSC_EXTERN void petsclogeventend_(PetscLogEvent *e, PetscErrorCode *ierr) { *ierr = PetscLogEventEnd(*e, 0, 0, 0, 0); } - -PETSC_EXTERN void petsclogview_(PetscViewer *viewer, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ -#if defined(PETSC_USE_LOG) - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscLogView(v); -#endif -} - -PETSC_EXTERN void petsclogdump_(char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ -#if defined(PETSC_USE_LOG) - char *t1; - FIXCHAR(name, len, t1); - *ierr = PetscLogDump(t1); - if (*ierr) return; - FREECHAR(name, t1); -#endif -} -PETSC_EXTERN void petsclogeventregister_(char *string, PetscClassId *classid, PetscLogEvent *e, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ -#if defined(PETSC_USE_LOG) - char *t1; - FIXCHAR(string, len, t1); - *ierr = PetscLogEventRegister(t1, *classid, e); - if (*ierr) return; - FREECHAR(string, t1); -#endif -} -PETSC_EXTERN void petscclassidregister_(char *string, PetscClassId *e, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ -#if defined(PETSC_USE_LOG) - char *t1; - FIXCHAR(string, len, t1); - - *ierr = PetscClassIdRegister(t1, e); - if (*ierr) return; - FREECHAR(string, t1); -#endif -} - -PETSC_EXTERN void petsclogallbegin_(PetscErrorCode *ierr) -{ -#if defined(PETSC_USE_LOG) - *ierr = PetscLogDefaultBegin(); -#endif -} - -PETSC_EXTERN void petsclogdefaultbegin_(PetscErrorCode *ierr) -{ -#if defined(PETSC_USE_LOG) - *ierr = PetscLogDefaultBegin(); -#endif -} - -PETSC_EXTERN void petsclognestedbegin_(PetscErrorCode *ierr) -{ -#if defined(PETSC_USE_LOG) - *ierr = PetscLogNestedBegin(); -#endif -} - -PETSC_EXTERN void petsclogstagepop_(PetscErrorCode *ierr) -{ -#if defined(PETSC_USE_LOG) - *ierr = PetscLogStagePop(); -#endif -} - -PETSC_EXTERN void petsclogstageregister_(char *sname, PetscLogStage *stage, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ -#if defined(PETSC_USE_LOG) - char *t; - FIXCHAR(sname, len, t); - *ierr = PetscLogStageRegister(t, stage); - if (*ierr) return; - FREECHAR(sname, t); -#endif -} - -PETSC_EXTERN void petsclogstagepush_(PetscLogStage *stage, PetscErrorCode *ierr) -{ -#if defined(PETSC_USE_LOG) - *ierr = PetscLogStagePush(*stage); -#endif -} - -PETSC_EXTERN void petscgetflops_(PetscLogDouble *d, PetscErrorCode *ierr) -{ -#if defined(PETSC_USE_LOG) - *ierr = PetscGetFlops(d); -#else - *ierr = PETSC_SUCCESS; - *d = 0.0; -#endif -} - -PETSC_EXTERN void petsclogstagegetid_(char *sname, PetscLogStage *stage, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ -#if defined(PETSC_USE_LOG) - char *t; - FIXCHAR(sname, len, t); - *ierr = PetscLogStageGetId(t, stage); - if (*ierr) return; - FREECHAR(sname, t); -#endif -} diff --git a/src/sys/logging/handler/impls/default/logdefault.c b/src/sys/logging/handler/impls/default/logdefault.c index 3c7d51995ef..fc15c309376 100644 --- a/src/sys/logging/handler/impls/default/logdefault.c +++ b/src/sys/logging/handler/impls/default/logdefault.c @@ -273,6 +273,7 @@ static PetscErrorCode PetscLogHandlerContextCreate_Default(PetscLogHandler_Defau PetscCall(PetscLogStageInfoArrayCreate(8, &def->stages)); PetscCall(PetscLogActionArrayCreate(64, &def->petsc_actions)); PetscCall(PetscLogObjectArrayCreate(64, &def->petsc_objects)); + PetscCall(PetscSpinlockCreate(&def->lock)); PetscCall(PetscOptionsGetBool(NULL, NULL, "-log_include_actions", &def->petsc_logActions, NULL)); PetscCall(PetscOptionsGetBool(NULL, NULL, "-log_include_objects", &def->petsc_logObjects, NULL)); @@ -289,6 +290,7 @@ static PetscErrorCode PetscLogHandlerDestroy_Default(PetscLogHandler h) PetscCall(PetscLogStageInfoArrayDestroy(&def->stages)); PetscCall(PetscLogActionArrayDestroy(&def->petsc_actions)); PetscCall(PetscLogObjectArrayDestroy(&def->petsc_objects)); + PetscCall(PetscSpinlockDestroy(&def->lock)); if (def->eventInfoMap_th) { PetscEventPerfInfo **array; PetscInt n, off = 0; @@ -1147,51 +1149,51 @@ static PetscErrorCode PetscLogHandlerView_Default_CSV(PetscLogHandler handler, P PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode PetscLogViewWarnSync(MPI_Comm comm, FILE *fd) +static PetscErrorCode PetscLogViewWarnSync(PetscViewer viewer) { PetscFunctionBegin; if (!PetscLogSyncOn) PetscFunctionReturn(PETSC_SUCCESS); - PetscCall(PetscFPrintf(comm, fd, "\n\n")); - PetscCall(PetscFPrintf(comm, fd, " ##########################################################\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " # WARNING!!! #\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " # This program was run with logging synchronization. #\n")); - PetscCall(PetscFPrintf(comm, fd, " # This option provides more meaningful imbalance #\n")); - PetscCall(PetscFPrintf(comm, fd, " # figures at the expense of slowing things down and #\n")); - PetscCall(PetscFPrintf(comm, fd, " # providing a distorted view of the overall runtime. #\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " ##########################################################\n\n\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " ##########################################################\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # WARNING!!! #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # This program was run with logging synchronization. #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # This option provides more meaningful imbalance #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # figures at the expense of slowing things down and #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # providing a distorted view of the overall runtime. #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " ##########################################################\n\n\n")); PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode PetscLogViewWarnDebugging(MPI_Comm comm, FILE *fd) +static PetscErrorCode PetscLogViewWarnDebugging(PetscViewer viewer) { PetscFunctionBegin; if (PetscDefined(USE_DEBUG)) { - PetscCall(PetscFPrintf(comm, fd, "\n\n")); - PetscCall(PetscFPrintf(comm, fd, " ##########################################################\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " # WARNING!!! #\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " # This code was compiled with a debugging option. #\n")); - PetscCall(PetscFPrintf(comm, fd, " # To get timing results run ./configure #\n")); - PetscCall(PetscFPrintf(comm, fd, " # using --with-debugging=no, the performance will #\n")); - PetscCall(PetscFPrintf(comm, fd, " # be generally two or three times faster. #\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " ##########################################################\n\n\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " ##########################################################\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # WARNING!!! #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # This code was compiled with a debugging option. #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # To get timing results run ./configure #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # using --with-debugging=no, the performance will #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # be generally two or three times faster. #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " ##########################################################\n\n\n")); } PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode PetscLogViewWarnNoGpuAwareMpi(MPI_Comm comm, FILE *fd) +static PetscErrorCode PetscLogViewWarnNoGpuAwareMpi(PetscViewer viewer) { #if defined(PETSC_HAVE_DEVICE) PetscMPIInt size; PetscBool deviceInitialized = PETSC_FALSE; PetscFunctionBegin; - PetscCallMPI(MPI_Comm_size(comm, &size)); + PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size)); for (int i = PETSC_DEVICE_HOST + 1; i < PETSC_DEVICE_MAX; ++i) { const PetscDeviceType dtype = PetscDeviceTypeCast(i); if (PetscDeviceInitialized(dtype)) { /* a non-host device was initialized */ @@ -1201,43 +1203,43 @@ static PetscErrorCode PetscLogViewWarnNoGpuAwareMpi(MPI_Comm comm, FILE *fd) } /* the last condition says petsc is configured with device but it is a pure CPU run, so don't print misleading warnings */ if (use_gpu_aware_mpi || size == 1 || !deviceInitialized) PetscFunctionReturn(PETSC_SUCCESS); - PetscCall(PetscFPrintf(comm, fd, "\n\n")); - PetscCall(PetscFPrintf(comm, fd, " ##########################################################\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " # WARNING!!! #\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " # This code was compiled with GPU support and you've #\n")); - PetscCall(PetscFPrintf(comm, fd, " # created PETSc/GPU objects, but you intentionally #\n")); - PetscCall(PetscFPrintf(comm, fd, " # used -use_gpu_aware_mpi 0, requiring PETSc to copy #\n")); - PetscCall(PetscFPrintf(comm, fd, " # additional data between the GPU and CPU. To obtain #\n")); - PetscCall(PetscFPrintf(comm, fd, " # meaningful timing results on multi-rank runs, use #\n")); - PetscCall(PetscFPrintf(comm, fd, " # GPU-aware MPI instead. #\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " ##########################################################\n\n\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " ##########################################################\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # WARNING!!! #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # This code was compiled with GPU support and you've #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # created PETSc/GPU objects, but you intentionally #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # used -use_gpu_aware_mpi 0, requiring PETSc to copy #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # additional data between the GPU and CPU. To obtain #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # meaningful timing results on multi-rank runs, use #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # GPU-aware MPI instead. #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " ##########################################################\n\n\n")); PetscFunctionReturn(PETSC_SUCCESS); #else return PETSC_SUCCESS; #endif } -static PetscErrorCode PetscLogViewWarnGpuTime(MPI_Comm comm, FILE *fd) +static PetscErrorCode PetscLogViewWarnGpuTime(PetscViewer viewer) { #if defined(PETSC_HAVE_DEVICE) PetscFunctionBegin; if (!PetscLogGpuTimeFlag || petsc_gflops == 0) PetscFunctionReturn(PETSC_SUCCESS); - PetscCall(PetscFPrintf(comm, fd, "\n\n")); - PetscCall(PetscFPrintf(comm, fd, " ##########################################################\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " # WARNING!!! #\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " # This code was run with -log_view_gpu_time #\n")); - PetscCall(PetscFPrintf(comm, fd, " # This provides accurate timing within the GPU kernels #\n")); - PetscCall(PetscFPrintf(comm, fd, " # but can slow down the entire computation by a #\n")); - PetscCall(PetscFPrintf(comm, fd, " # measurable amount. For fastest runs we recommend #\n")); - PetscCall(PetscFPrintf(comm, fd, " # not using this option. #\n")); - PetscCall(PetscFPrintf(comm, fd, " # #\n")); - PetscCall(PetscFPrintf(comm, fd, " ##########################################################\n\n\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " ##########################################################\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # WARNING!!! #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # This code was run with -log_view_gpu_time #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # This provides accurate timing within the GPU kernels #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # but can slow down the entire computation by a #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # measurable amount. For fastest runs we recommend #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # not using this option. #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " # #\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " ##########################################################\n\n\n")); PetscFunctionReturn(PETSC_SUCCESS); #else return PETSC_SUCCESS; @@ -1246,7 +1248,6 @@ static PetscErrorCode PetscLogViewWarnGpuTime(MPI_Comm comm, FILE *fd) static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, PetscViewer viewer) { - FILE *fd; PetscLogHandler_Default def = (PetscLogHandler_Default)handler->data; char arch[128], hostname[128], username[128], pname[PETSC_MAX_PATH_LEN], date[128]; PetscLogDouble locTotalTime, TotalTime, TotalFlops; @@ -1280,42 +1281,45 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, PetscCall(PetscLogHandlerGetState(handler, &state)); PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF)); PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm)); - PetscCall(PetscViewerASCIIGetPointer(viewer, &fd)); PetscCallMPI(MPI_Comm_size(comm, &size)); PetscCallMPI(MPI_Comm_rank(comm, &rank)); /* Get the total elapsed time */ PetscCall(PetscTime(&locTotalTime)); locTotalTime -= petsc_BaseTime; - PetscCall(PetscFPrintf(comm, fd, "****************************************************************************************************************************************************************\n")); - PetscCall(PetscFPrintf(comm, fd, "*** WIDEN YOUR WINDOW TO 160 CHARACTERS. Use 'enscript -r -fCourier9' to print this document ***\n")); - PetscCall(PetscFPrintf(comm, fd, "****************************************************************************************************************************************************************\n")); - PetscCall(PetscFPrintf(comm, fd, "\n------------------------------------------------------------------ PETSc Performance Summary: ------------------------------------------------------------------\n\n")); - PetscCall(PetscLogViewWarnSync(comm, fd)); - PetscCall(PetscLogViewWarnDebugging(comm, fd)); - PetscCall(PetscLogViewWarnNoGpuAwareMpi(comm, fd)); - PetscCall(PetscLogViewWarnGpuTime(comm, fd)); + PetscCall(PetscViewerASCIIPrintf(viewer, "****************************************************************************************************************************************************************\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "*** WIDEN YOUR WINDOW TO 160 CHARACTERS. Use 'enscript -r -fCourier9' to print this document ***\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "****************************************************************************************************************************************************************\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n------------------------------------------------------------------ PETSc Performance Summary: ------------------------------------------------------------------\n\n")); + PetscCall(PetscLogViewWarnSync(viewer)); + PetscCall(PetscLogViewWarnDebugging(viewer)); + PetscCall(PetscLogViewWarnNoGpuAwareMpi(viewer)); + PetscCall(PetscLogViewWarnGpuTime(viewer)); PetscCall(PetscGetArchType(arch, sizeof(arch))); PetscCall(PetscGetHostName(hostname, sizeof(hostname))); PetscCall(PetscGetUserName(username, sizeof(username))); PetscCall(PetscGetProgramName(pname, sizeof(pname))); PetscCall(PetscGetDate(date, sizeof(date))); PetscCall(PetscGetVersion(version, sizeof(version))); - if (size == 1) { - PetscCall(PetscFPrintf(comm, fd, "%s on a %s named %s with %d processor, by %s %s\n", pname, arch, hostname, size, username, date)); - } else { - PetscCall(PetscFPrintf(comm, fd, "%s on a %s named %s with %d processors, by %s %s\n", pname, arch, hostname, size, username, date)); - } + +#if defined(PETSC_HAVE_CUPM) + const char *cupm = PetscDefined(HAVE_CUDA) ? "CUDA" : "HIP"; + if (PetscDeviceCUPMRuntimeArch) + PetscCall(PetscViewerASCIIPrintf(viewer, "%s on a %s named %s with %d process%s and %s architecture %d, by %s on %s\n", pname, arch, hostname, size, size > 1 ? "es" : "", cupm, PetscDeviceCUPMRuntimeArch, username, date)); + else +#endif + PetscCall(PetscViewerASCIIPrintf(viewer, "%s on a %s named %s with %d process%s, by %s on %s\n", pname, arch, hostname, size, size > 1 ? "es" : "", username, date)); + #if defined(PETSC_HAVE_OPENMP) - PetscCall(PetscFPrintf(comm, fd, "Using %" PetscInt_FMT " OpenMP threads\n", PetscNumOMPThreads)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Using %" PetscInt_FMT " OpenMP threads\n", PetscNumOMPThreads)); #endif - PetscCall(PetscFPrintf(comm, fd, "Using %s\n", version)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Using %s\n", version)); /* Must preserve reduction count before we go on */ red = petsc_allreduce_ct + petsc_gather_ct + petsc_scatter_ct; /* Calculate summary information */ - PetscCall(PetscFPrintf(comm, fd, "\n Max Max/Min Avg Total\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n Max Max/Min Avg Total\n")); /* Time */ PetscCall(MPIU_Allreduce(&locTotalTime, &min, 1, MPIU_PETSCLOGDOUBLE, MPI_MIN, comm)); PetscCall(MPIU_Allreduce(&locTotalTime, &max, 1, MPIU_PETSCLOGDOUBLE, MPI_MAX, comm)); @@ -1323,7 +1327,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, avg = tot / ((PetscLogDouble)size); if (min != 0.0) ratio = max / min; else ratio = 0.0; - PetscCall(PetscFPrintf(comm, fd, "Time (sec): %5.3e %7.3f %5.3e\n", max, ratio, avg)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Time (sec): %5.3e %7.3f %5.3e\n", max, ratio, avg)); TotalTime = tot; /* Objects */ { @@ -1338,7 +1342,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, avg = tot / ((PetscLogDouble)size); if (min != 0.0) ratio = max / min; else ratio = 0.0; - PetscCall(PetscFPrintf(comm, fd, "Objects: %5.3e %7.3f %5.3e\n", max, ratio, avg)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Objects: %5.3e %7.3f %5.3e\n", max, ratio, avg)); /* Flops */ PetscCall(MPIU_Allreduce(&petsc_TotalFlops, &min, 1, MPIU_PETSCLOGDOUBLE, MPI_MIN, comm)); PetscCall(MPIU_Allreduce(&petsc_TotalFlops, &max, 1, MPIU_PETSCLOGDOUBLE, MPI_MAX, comm)); @@ -1346,7 +1350,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, avg = tot / ((PetscLogDouble)size); if (min != 0.0) ratio = max / min; else ratio = 0.0; - PetscCall(PetscFPrintf(comm, fd, "Flops: %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Flops: %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); TotalFlops = tot; /* Flops/sec -- Must talk to Barry here */ if (locTotalTime != 0.0) flops = petsc_TotalFlops / locTotalTime; @@ -1357,7 +1361,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, avg = tot / ((PetscLogDouble)size); if (min != 0.0) ratio = max / min; else ratio = 0.0; - PetscCall(PetscFPrintf(comm, fd, "Flops/sec: %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Flops/sec: %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); /* Memory */ PetscCall(PetscMallocGetMaximumUsage(&mem)); if (mem > 0.0) { @@ -1367,7 +1371,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, avg = tot / ((PetscLogDouble)size); if (min != 0.0) ratio = max / min; else ratio = 0.0; - PetscCall(PetscFPrintf(comm, fd, "Memory (bytes): %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Memory (bytes): %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); } /* Messages */ mess = 0.5 * (petsc_irecv_ct + petsc_isend_ct + petsc_recv_ct + petsc_send_ct); @@ -1377,7 +1381,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, avg = tot / ((PetscLogDouble)size); if (min != 0.0) ratio = max / min; else ratio = 0.0; - PetscCall(PetscFPrintf(comm, fd, "MPI Msg Count: %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); + PetscCall(PetscViewerASCIIPrintf(viewer, "MPI Msg Count: %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); numMessages = tot; /* Message Lengths */ mess = 0.5 * (petsc_irecv_len + petsc_isend_len + petsc_recv_len + petsc_send_len); @@ -1388,7 +1392,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, else avg = 0.0; if (min != 0.0) ratio = max / min; else ratio = 0.0; - PetscCall(PetscFPrintf(comm, fd, "MPI Msg Len (bytes): %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); + PetscCall(PetscViewerASCIIPrintf(viewer, "MPI Msg Len (bytes): %5.3e %7.3f %5.3e %5.3e\n", max, ratio, avg, tot)); messageLength = tot; /* Reductions */ PetscCall(MPIU_Allreduce(&red, &min, 1, MPIU_PETSCLOGDOUBLE, MPI_MIN, comm)); @@ -1396,11 +1400,11 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, PetscCall(MPIU_Allreduce(&red, &tot, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, comm)); if (min != 0.0) ratio = max / min; else ratio = 0.0; - PetscCall(PetscFPrintf(comm, fd, "MPI Reductions: %5.3e %7.3f\n", max, ratio)); + PetscCall(PetscViewerASCIIPrintf(viewer, "MPI Reductions: %5.3e %7.3f\n", max, ratio)); numReductions = red; /* wrong because uses count from process zero */ - PetscCall(PetscFPrintf(comm, fd, "\nFlop counting convention: 1 flop = 1 real number operation of type (multiply/divide/add/subtract)\n")); - PetscCall(PetscFPrintf(comm, fd, " e.g., VecAXPY() for real vectors of length N --> 2N flops\n")); - PetscCall(PetscFPrintf(comm, fd, " and VecAXPY() for complex vectors of length N --> 8N flops\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\nFlop counting convention: 1 flop = 1 real number operation of type (multiply/divide/add/subtract)\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " e.g., VecAXPY() for real vectors of length N --> 2N flops\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " and VecAXPY() for complex vectors of length N --> 8N flops\n")); PetscCall(PetscLogRegistryCreateGlobalStageNames(comm, state->registry, &global_stages)); PetscCall(PetscLogRegistryCreateGlobalEventNames(comm, state->registry, &global_events)); @@ -1431,8 +1435,8 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, PetscCall(MPIU_Allreduce(localStageVisible, stageVisible, numStages, MPIU_BOOL, MPI_LAND, comm)); for (stage = 0; stage < numStages; stage++) { if (stageUsed[stage] && stageVisible[stage]) { - PetscCall(PetscFPrintf(comm, fd, "\nSummary of Stages: ----- Time ------ ----- Flop ------ --- Messages --- -- Message Lengths -- -- Reductions --\n")); - PetscCall(PetscFPrintf(comm, fd, " Avg %%Total Avg %%Total Count %%Total Avg %%Total Count %%Total\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\nSummary of Stages: ----- Time ------ ----- Flop ------ --- Messages --- -- Message Lengths -- -- Reductions --\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Avg %%Total Avg %%Total Count %%Total Avg %%Total Count %%Total\n")); break; } } @@ -1472,63 +1476,63 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, else fracLength = 0.0; if (numReductions != 0.0) fracReductions = red / numReductions; else fracReductions = 0.0; - PetscCall(PetscFPrintf(comm, fd, "%2d: %15s: %6.4e %5.1f%% %6.4e %5.1f%% %5.3e %5.1f%% %5.3e %5.1f%% %5.3e %5.1f%%\n", stage, stage_name, stageTime / size, 100.0 * fracTime, flops, 100.0 * fracFlops, mess, 100.0 * fracMessages, avgMessLen, 100.0 * fracLength, red, 100.0 * fracReductions)); + PetscCall(PetscViewerASCIIPrintf(viewer, "%2d: %15s: %6.4e %5.1f%% %6.4e %5.1f%% %5.3e %5.1f%% %5.3e %5.1f%% %5.3e %5.1f%%\n", stage, stage_name, stageTime / size, 100.0 * fracTime, flops, 100.0 * fracFlops, mess, 100.0 * fracMessages, avgMessLen, 100.0 * fracLength, red, 100.0 * fracReductions)); } } - PetscCall(PetscFPrintf(comm, fd, "\n------------------------------------------------------------------------------------------------------------------------\n")); - PetscCall(PetscFPrintf(comm, fd, "See the 'Profiling' chapter of the users' manual for details on interpreting output.\n")); - PetscCall(PetscFPrintf(comm, fd, "Phase summary info:\n")); - PetscCall(PetscFPrintf(comm, fd, " Count: number of times phase was executed\n")); - PetscCall(PetscFPrintf(comm, fd, " Time and Flop: Max - maximum over all processors\n")); - PetscCall(PetscFPrintf(comm, fd, " Ratio - ratio of maximum to minimum over all processors\n")); - PetscCall(PetscFPrintf(comm, fd, " Mess: number of messages sent\n")); - PetscCall(PetscFPrintf(comm, fd, " AvgLen: average message length (bytes)\n")); - PetscCall(PetscFPrintf(comm, fd, " Reduct: number of global reductions\n")); - PetscCall(PetscFPrintf(comm, fd, " Global: entire computation\n")); - PetscCall(PetscFPrintf(comm, fd, " Stage: stages of a computation. Set stages with PetscLogStagePush() and PetscLogStagePop().\n")); - PetscCall(PetscFPrintf(comm, fd, " %%T - percent time in this phase %%F - percent flop in this phase\n")); - PetscCall(PetscFPrintf(comm, fd, " %%M - percent messages in this phase %%L - percent message lengths in this phase\n")); - PetscCall(PetscFPrintf(comm, fd, " %%R - percent reductions in this phase\n")); - PetscCall(PetscFPrintf(comm, fd, " Total Mflop/s: 10e-6 * (sum of flop over all processors)/(max time over all processors)\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n------------------------------------------------------------------------------------------------------------------------\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "See the 'Profiling' chapter of the users' manual for details on interpreting output.\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Phase summary info:\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Count: number of times phase was executed\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Time and Flop: Max - maximum over all processors\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Ratio - ratio of maximum to minimum over all processors\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Mess: number of messages sent\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " AvgLen: average message length (bytes)\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Reduct: number of global reductions\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Global: entire computation\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Stage: stages of a computation. Set stages with PetscLogStagePush() and PetscLogStagePop().\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " %%T - percent time in this phase %%F - percent flop in this phase\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " %%M - percent messages in this phase %%L - percent message lengths in this phase\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " %%R - percent reductions in this phase\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Total Mflop/s: 10e-6 * (sum of flop over all processors)/(max time over all processors)\n")); if (PetscLogMemory) { - PetscCall(PetscFPrintf(comm, fd, " Memory usage is summed over all MPI processes, it is given in mega-bytes\n")); - PetscCall(PetscFPrintf(comm, fd, " Malloc Mbytes: Memory allocated and kept during event (sum over all calls to event). May be negative\n")); - PetscCall(PetscFPrintf(comm, fd, " EMalloc Mbytes: extra memory allocated during event and then freed (maximum over all calls to events). Never negative\n")); - PetscCall(PetscFPrintf(comm, fd, " MMalloc Mbytes: Increase in high water mark of allocated memory (sum over all calls to event). Never negative\n")); - PetscCall(PetscFPrintf(comm, fd, " RMI Mbytes: Increase in resident memory (sum over all calls to event)\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Memory usage is summed over all MPI processes, it is given in mega-bytes\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Malloc Mbytes: Memory allocated and kept during event (sum over all calls to event). May be negative\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " EMalloc Mbytes: extra memory allocated during event and then freed (maximum over all calls to events). Never negative\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " MMalloc Mbytes: Increase in high water mark of allocated memory (sum over all calls to event). Never negative\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " RMI Mbytes: Increase in resident memory (sum over all calls to event)\n")); } #if defined(PETSC_HAVE_DEVICE) - PetscCall(PetscFPrintf(comm, fd, " GPU Mflop/s: 10e-6 * (sum of flop on GPU over all processors)/(max GPU time over all processors)\n")); - PetscCall(PetscFPrintf(comm, fd, " CpuToGpu Count: total number of CPU to GPU copies per processor\n")); - PetscCall(PetscFPrintf(comm, fd, " CpuToGpu Size (Mbytes): 10e-6 * (total size of CPU to GPU copies per processor)\n")); - PetscCall(PetscFPrintf(comm, fd, " GpuToCpu Count: total number of GPU to CPU copies per processor\n")); - PetscCall(PetscFPrintf(comm, fd, " GpuToCpu Size (Mbytes): 10e-6 * (total size of GPU to CPU copies per processor)\n")); - PetscCall(PetscFPrintf(comm, fd, " GPU %%F: percent flops on GPU in this event\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " GPU Mflop/s: 10e-6 * (sum of flop on GPU over all processors)/(max GPU time over all processors)\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " CpuToGpu Count: total number of CPU to GPU copies per processor\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " CpuToGpu Size (Mbytes): 10e-6 * (total size of CPU to GPU copies per processor)\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " GpuToCpu Count: total number of GPU to CPU copies per processor\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " GpuToCpu Size (Mbytes): 10e-6 * (total size of GPU to CPU copies per processor)\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " GPU %%F: percent flops on GPU in this event\n")); #endif - PetscCall(PetscFPrintf(comm, fd, "------------------------------------------------------------------------------------------------------------------------\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "------------------------------------------------------------------------------------------------------------------------\n")); - PetscCall(PetscLogViewWarnDebugging(comm, fd)); + PetscCall(PetscLogViewWarnDebugging(viewer)); /* Report events */ - PetscCall(PetscFPrintf(comm, fd, "Event Count Time (sec) Flop --- Global --- --- Stage ---- Total")); - if (PetscLogMemory) PetscCall(PetscFPrintf(comm, fd, " Malloc EMalloc MMalloc RMI")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Event Count Time (sec) Flop --- Global --- --- Stage ---- Total")); + if (PetscLogMemory) PetscCall(PetscViewerASCIIPrintf(viewer, " Malloc EMalloc MMalloc RMI")); #if defined(PETSC_HAVE_DEVICE) - PetscCall(PetscFPrintf(comm, fd, " GPU - CpuToGpu - - GpuToCpu - GPU")); + PetscCall(PetscViewerASCIIPrintf(viewer, " GPU - CpuToGpu - - GpuToCpu - GPU")); #endif - PetscCall(PetscFPrintf(comm, fd, "\n")); - PetscCall(PetscFPrintf(comm, fd, " Max Ratio Max Ratio Max Ratio Mess AvgLen Reduct %%T %%F %%M %%L %%R %%T %%F %%M %%L %%R Mflop/s")); - if (PetscLogMemory) PetscCall(PetscFPrintf(comm, fd, " Mbytes Mbytes Mbytes Mbytes")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Max Ratio Max Ratio Max Ratio Mess AvgLen Reduct %%T %%F %%M %%L %%R %%T %%F %%M %%L %%R Mflop/s")); + if (PetscLogMemory) PetscCall(PetscViewerASCIIPrintf(viewer, " Mbytes Mbytes Mbytes Mbytes")); #if defined(PETSC_HAVE_DEVICE) - PetscCall(PetscFPrintf(comm, fd, " Mflop/s Count Size Count Size %%F")); + PetscCall(PetscViewerASCIIPrintf(viewer, " Mflop/s Count Size Count Size %%F")); #endif - PetscCall(PetscFPrintf(comm, fd, "\n")); - PetscCall(PetscFPrintf(comm, fd, "------------------------------------------------------------------------------------------------------------------------")); - if (PetscLogMemory) PetscCall(PetscFPrintf(comm, fd, "-----------------------------")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "------------------------------------------------------------------------------------------------------------------------")); + if (PetscLogMemory) PetscCall(PetscViewerASCIIPrintf(viewer, "-----------------------------")); #if defined(PETSC_HAVE_DEVICE) - PetscCall(PetscFPrintf(comm, fd, "---------------------------------------")); + PetscCall(PetscViewerASCIIPrintf(viewer, "---------------------------------------")); #endif - PetscCall(PetscFPrintf(comm, fd, "\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); #if defined(PETSC_HAVE_DEVICE) /* this indirect way of accessing these values is needed when PETSc is build with multiple libraries since the symbols are not in libpetscsys */ @@ -1546,7 +1550,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, if (!(stageVisible[stage] && stageUsed[stage])) continue; PetscCall(PetscLogGlobalNamesGlobalGetLocal(global_stages, stage, &stage_id)); PetscCall(PetscLogGlobalNamesGlobalGetName(global_stages, stage, &stage_name)); - PetscCall(PetscFPrintf(comm, fd, "\n--- Event Stage %d: %s\n\n", stage, stage_name)); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n--- Event Stage %d: %s\n\n", stage, stage_name)); stage_info = &zero_info; if (localStageUsed[stage]) { PetscStagePerf *stage_perf_info; @@ -1602,7 +1606,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, PetscCall(MPIU_Allreduce(&event_info->GpuTime, &gmaxt, 1, MPIU_PETSCLOGDOUBLE, MPI_MAX, comm)); #endif if (mint < 0.0) { - PetscCall(PetscFPrintf(comm, fd, "WARNING!!! Minimum time %g over all processors for %s is negative! This happens\n on some machines whose times cannot handle too rapid calls.!\n artificially changing minimum to zero.\n", mint, event_name)); + PetscCall(PetscViewerASCIIPrintf(viewer, "WARNING!!! Minimum time %g over all processors for %s is negative! This happens\n on some machines whose times cannot handle too rapid calls.!\n artificially changing minimum to zero.\n", mint, event_name)); mint = 0; } PetscCheck(minf >= 0.0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Minimum flop %g over all processors for %s is negative! Not possible!", minf, event_name); @@ -1652,52 +1656,52 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, if (maxt != 0.0) flopr = totf / maxt; else flopr = 0.0; if (fracStageTime > 1.0 || fracStageFlops > 1.0 || fracStageMess > 1.0 || fracStageMessLen > 1.0 || fracStageRed > 1.0) - PetscCall(PetscFPrintf(comm, fd, "%-16s %7d %3.1f %5.4e %3.1f %3.2e %3.1f %2.1e %2.1e %2.1e %2.0f %2.0f %2.0f %2.0f %2.0f Multiple stages %5.0f", event_name, maxC, ratC, maxt, ratt, maxf, ratf, totm, totml, totr, 100.0 * fracTime, 100.0 * fracFlops, 100.0 * fracMess, 100.0 * fracMessLen, 100.0 * fracRed, PetscAbs(flopr) / 1.0e6)); + PetscCall(PetscViewerASCIIPrintf(viewer, "%-16s %7d %3.1f %5.4e %3.1f %3.2e %3.1f %2.1e %2.1e %2.1e %2.0f %2.0f %2.0f %2.0f %2.0f Multiple stages %5.0f", event_name, maxC, ratC, maxt, ratt, maxf, ratf, totm, totml, totr, 100.0 * fracTime, 100.0 * fracFlops, 100.0 * fracMess, 100.0 * fracMessLen, 100.0 * fracRed, PetscAbs(flopr) / 1.0e6)); else { if (PetscIsNanReal((PetscReal)maxt)) { // when maxt, ratt, flopr are NaN (i.e., run with GPUs but without -log_view_gpu_time), replace the confusing "nan" with "n/a" - PetscCall(PetscFPrintf(comm, fd, "%-16s %7d %3.1f n/a n/a %3.2e %3.1f %2.1e %2.1e %2.1e %2.0f %2.0f %2.0f %2.0f %2.0f %3.0f %2.0f %2.0f %2.0f %2.0f n/a", event_name, maxC, ratC, maxf, ratf, totm, totml, totr, 100.0 * fracTime, 100.0 * fracFlops, 100.0 * fracMess, 100.0 * fracMessLen, 100.0 * fracRed, 100.0 * fracStageTime, 100.0 * fracStageFlops, 100.0 * fracStageMess, 100.0 * fracStageMessLen, 100.0 * fracStageRed)); + PetscCall(PetscViewerASCIIPrintf(viewer, "%-16s %7d %3.1f n/a n/a %3.2e %3.1f %2.1e %2.1e %2.1e %2.0f %2.0f %2.0f %2.0f %2.0f %3.0f %2.0f %2.0f %2.0f %2.0f n/a", event_name, maxC, ratC, maxf, ratf, totm, totml, totr, 100.0 * fracTime, 100.0 * fracFlops, 100.0 * fracMess, 100.0 * fracMessLen, 100.0 * fracRed, 100.0 * fracStageTime, 100.0 * fracStageFlops, 100.0 * fracStageMess, 100.0 * fracStageMessLen, 100.0 * fracStageRed)); } else { - PetscCall(PetscFPrintf(comm, fd, "%-16s %7d %3.1f %5.4e %3.1f %3.2e %3.1f %2.1e %2.1e %2.1e %2.0f %2.0f %2.0f %2.0f %2.0f %3.0f %2.0f %2.0f %2.0f %2.0f %5.0f", event_name, maxC, ratC, maxt, ratt, maxf, ratf, totm, totml, totr, 100.0 * fracTime, 100.0 * fracFlops, 100.0 * fracMess, 100.0 * fracMessLen, 100.0 * fracRed, 100.0 * fracStageTime, 100.0 * fracStageFlops, 100.0 * fracStageMess, 100.0 * fracStageMessLen, 100.0 * fracStageRed, PetscAbs(flopr) / 1.0e6)); + PetscCall(PetscViewerASCIIPrintf(viewer, "%-16s %7d %3.1f %5.4e %3.1f %3.2e %3.1f %2.1e %2.1e %2.1e %2.0f %2.0f %2.0f %2.0f %2.0f %3.0f %2.0f %2.0f %2.0f %2.0f %5.0f", event_name, maxC, ratC, maxt, ratt, maxf, ratf, totm, totml, totr, 100.0 * fracTime, 100.0 * fracFlops, 100.0 * fracMess, 100.0 * fracMessLen, 100.0 * fracRed, 100.0 * fracStageTime, 100.0 * fracStageFlops, 100.0 * fracStageMess, 100.0 * fracStageMessLen, 100.0 * fracStageRed, PetscAbs(flopr) / 1.0e6)); } } - if (PetscLogMemory) PetscCall(PetscFPrintf(comm, fd, " %5.0f %5.0f %5.0f %5.0f", mal / 1.0e6, emalmax / 1.0e6, malmax / 1.0e6, mem / 1.0e6)); + if (PetscLogMemory) PetscCall(PetscViewerASCIIPrintf(viewer, " %5.0f %5.0f %5.0f %5.0f", mal / 1.0e6, emalmax / 1.0e6, malmax / 1.0e6, mem / 1.0e6)); #if defined(PETSC_HAVE_DEVICE) if (totf != 0.0) fracgflops = gflops / totf; else fracgflops = 0.0; if (gmaxt != 0.0) gflopr = gflops / gmaxt; else gflopr = 0.0; if (PetscIsNanReal((PetscReal)gflopr)) { - PetscCall(PetscFPrintf(comm, fd, " n/a %4.0f %3.2e %4.0f %3.2e % 2.0f", cct / size, csz / (1.0e6 * size), gct / size, gsz / (1.0e6 * size), 100.0 * fracgflops)); + PetscCall(PetscViewerASCIIPrintf(viewer, " n/a %4.0f %3.2e %4.0f %3.2e % 2.0f", cct / size, csz / (1.0e6 * size), gct / size, gsz / (1.0e6 * size), 100.0 * fracgflops)); } else { - PetscCall(PetscFPrintf(comm, fd, " %5.0f %4.0f %3.2e %4.0f %3.2e % 2.0f", PetscAbs(gflopr) / 1.0e6, cct / size, csz / (1.0e6 * size), gct / size, gsz / (1.0e6 * size), 100.0 * fracgflops)); + PetscCall(PetscViewerASCIIPrintf(viewer, " %5.0f %4.0f %3.2e %4.0f %3.2e % 2.0f", PetscAbs(gflopr) / 1.0e6, cct / size, csz / (1.0e6 * size), gct / size, gsz / (1.0e6 * size), 100.0 * fracgflops)); } #endif - PetscCall(PetscFPrintf(comm, fd, "\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); } } } } /* Memory usage and object creation */ - PetscCall(PetscFPrintf(comm, fd, "------------------------------------------------------------------------------------------------------------------------")); - if (PetscLogMemory) PetscCall(PetscFPrintf(comm, fd, "-----------------------------")); + PetscCall(PetscViewerASCIIPrintf(viewer, "------------------------------------------------------------------------------------------------------------------------")); + if (PetscLogMemory) PetscCall(PetscViewerASCIIPrintf(viewer, "-----------------------------")); #if defined(PETSC_HAVE_DEVICE) - PetscCall(PetscFPrintf(comm, fd, "---------------------------------------")); + PetscCall(PetscViewerASCIIPrintf(viewer, "---------------------------------------")); #endif - PetscCall(PetscFPrintf(comm, fd, "\n")); - PetscCall(PetscFPrintf(comm, fd, "\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); /* Right now, only stages on the first processor are reported here, meaning only objects associated with the global communicator, or MPI_COMM_SELF for proc 1. We really should report global stats and then stats for stages local to processor sets. */ /* We should figure out the longest object name here (now 20 characters) */ - PetscCall(PetscFPrintf(comm, fd, "Object Type Creations Destructions. Reports information only for process 0.\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Object Type Creations Destructions. Reports information only for process 0.\n")); for (stage = 0; stage < numStages; stage++) { const char *stage_name; PetscCall(PetscLogGlobalNamesGlobalGetName(global_stages, stage, &stage_name)); - PetscCall(PetscFPrintf(comm, fd, "\n--- Event Stage %d: %s\n\n", stage, stage_name)); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n--- Event Stage %d: %s\n\n", stage, stage_name)); if (localStageUsed[stage]) { PetscInt num_classes; @@ -1714,7 +1718,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, if (stage == 0 && oclass == num_classes - 1) { PetscCall(PetscStrcmp(class_reg_info.name, "Viewer", &flg)); PetscCheck(flg && class_perf_info->creations == 1 && class_perf_info->destructions == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "The last PetscObject type of the main PetscLogStage should be PetscViewer with a single creation and no destruction"); - } else PetscCall(PetscFPrintf(comm, fd, "%20s %5d %5d\n", class_reg_info.name, class_perf_info->creations, class_perf_info->destructions)); + } else PetscCall(PetscViewerASCIIPrintf(viewer, "%20s %5d %5d\n", class_reg_info.name, class_perf_info->creations, class_perf_info->destructions)); } } } @@ -1728,7 +1732,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, PetscCall(PetscLogGlobalNamesDestroy(&global_events)); /* Information unrelated to this particular run */ - PetscCall(PetscFPrintf(comm, fd, "========================================================================================================================\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "========================================================================================================================\n")); PetscCall(PetscTime(&y)); PetscCall(PetscTime(&x)); PetscCall(PetscTime(&y)); @@ -1741,7 +1745,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, PetscCall(PetscTime(&y)); PetscCall(PetscTime(&y)); PetscCall(PetscTime(&y)); - PetscCall(PetscFPrintf(comm, fd, "Average time to get PetscTime(): %g\n", (y - x) / 10.0)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Average time to get PetscTime(): %g\n", (y - x) / 10.0)); /* MPI information */ if (size > 1) { MPI_Status status; @@ -1756,7 +1760,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, PetscCallMPI(MPI_Barrier(comm)); PetscCallMPI(MPI_Barrier(comm)); PetscCall(PetscTime(&y)); - PetscCall(PetscFPrintf(comm, fd, "Average time for MPI_Barrier(): %g\n", (y - x) / 5.0)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Average time for MPI_Barrier(): %g\n", (y - x) / 5.0)); PetscCall(PetscCommDuplicate(comm, &newcomm, &tag)); PetscCallMPI(MPI_Barrier(comm)); if (rank) { @@ -1767,7 +1771,7 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, 1, tag, newcomm)); PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, newcomm, &status)); PetscCall(PetscTime(&y)); - PetscCall(PetscFPrintf(comm, fd, "Average time for zero size MPI_Send(): %g\n", (y - x) / size)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Average time for zero size MPI_Send(): %g\n", (y - x) / size)); } PetscCall(PetscCommDestroy(&newcomm)); } @@ -1775,37 +1779,37 @@ static PetscErrorCode PetscLogHandlerView_Default_Info(PetscLogHandler handler, /* Machine and compile information */ if (PetscDefined(USE_FORTRAN_KERNELS)) { - PetscCall(PetscFPrintf(comm, fd, "Compiled with FORTRAN kernels\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Compiled with FORTRAN kernels\n")); } else { - PetscCall(PetscFPrintf(comm, fd, "Compiled without FORTRAN kernels\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Compiled without FORTRAN kernels\n")); } if (PetscDefined(USE_64BIT_INDICES)) { - PetscCall(PetscFPrintf(comm, fd, "Compiled with 64-bit PetscInt\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Compiled with 64-bit PetscInt\n")); } else if (PetscDefined(USE___FLOAT128)) { - PetscCall(PetscFPrintf(comm, fd, "Compiled with 32-bit PetscInt\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Compiled with 32-bit PetscInt\n")); } if (PetscDefined(USE_REAL_SINGLE)) { - PetscCall(PetscFPrintf(comm, fd, "Compiled with single precision PetscScalar and PetscReal\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Compiled with single precision PetscScalar and PetscReal\n")); } else if (PetscDefined(USE___FLOAT128)) { - PetscCall(PetscFPrintf(comm, fd, "Compiled with 128 bit precision PetscScalar and PetscReal\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Compiled with 128 bit precision PetscScalar and PetscReal\n")); } if (PetscDefined(USE_REAL_MAT_SINGLE)) { - PetscCall(PetscFPrintf(comm, fd, "Compiled with single precision matrices\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Compiled with single precision matrices\n")); } else { - PetscCall(PetscFPrintf(comm, fd, "Compiled with full precision matrices (default)\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Compiled with full precision matrices (default)\n")); } - PetscCall(PetscFPrintf(comm, fd, "sizeof(short) %d sizeof(int) %d sizeof(long) %d sizeof(void*) %d sizeof(PetscScalar) %d sizeof(PetscInt) %d\n", (int)sizeof(short), (int)sizeof(int), (int)sizeof(long), (int)sizeof(void *), (int)sizeof(PetscScalar), (int)sizeof(PetscInt))); + PetscCall(PetscViewerASCIIPrintf(viewer, "sizeof(short) %d sizeof(int) %d sizeof(long) %d sizeof(void*) %d sizeof(PetscScalar) %d sizeof(PetscInt) %d\n", (int)sizeof(short), (int)sizeof(int), (int)sizeof(long), (int)sizeof(void *), (int)sizeof(PetscScalar), (int)sizeof(PetscInt))); - PetscCall(PetscFPrintf(comm, fd, "Configure options: %s", petscconfigureoptions)); - PetscCall(PetscFPrintf(comm, fd, "%s", petscmachineinfo)); - PetscCall(PetscFPrintf(comm, fd, "%s", petsccompilerinfo)); - PetscCall(PetscFPrintf(comm, fd, "%s", petsccompilerflagsinfo)); - PetscCall(PetscFPrintf(comm, fd, "%s", petsclinkerinfo)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Configure options: %s", petscconfigureoptions)); + PetscCall(PetscViewerASCIIPrintf(viewer, "%s", petscmachineinfo)); + PetscCall(PetscViewerASCIIPrintf(viewer, "%s", petsccompilerinfo)); + PetscCall(PetscViewerASCIIPrintf(viewer, "%s", petsccompilerflagsinfo)); + PetscCall(PetscViewerASCIIPrintf(viewer, "%s", petsclinkerinfo)); /* Cleanup */ - PetscCall(PetscFPrintf(comm, fd, "\n")); - PetscCall(PetscLogViewWarnNoGpuAwareMpi(comm, fd)); - PetscCall(PetscLogViewWarnDebugging(comm, fd)); + PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); + PetscCall(PetscLogViewWarnNoGpuAwareMpi(viewer)); + PetscCall(PetscLogViewWarnDebugging(viewer)); PetscCall(PetscFPTrapPop()); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/sys/logging/handler/impls/nested/xmlviewer.c b/src/sys/logging/handler/impls/nested/xmlviewer.c index 99244a47582..829ee6208f7 100644 --- a/src/sys/logging/handler/impls/nested/xmlviewer.c +++ b/src/sys/logging/handler/impls/nested/xmlviewer.c @@ -7,7 +7,6 @@ *************************************************************************************/ #include #include -#include #include "xmlviewer.h" #include "lognested.h" diff --git a/src/sys/logging/handler/impls/trace/logtrace.c b/src/sys/logging/handler/impls/trace/logtrace.c index 5d5b2c793f1..d002739a62e 100644 --- a/src/sys/logging/handler/impls/trace/logtrace.c +++ b/src/sys/logging/handler/impls/trace/logtrace.c @@ -91,7 +91,7 @@ PETSC_INTERN PetscErrorCode PetscLogHandlerCreate_Trace(PetscLogHandler handler) /*@C PetscLogHandlerCreateTrace - Create a logger that traces events and stages to a given file descriptor - Collective + Collective, No Fortran Support Input Parameters: + comm - an MPI communicator diff --git a/src/sys/logging/handler/interface/lhreg.c b/src/sys/logging/handler/interface/lhreg.c index 3c9a9d23022..c016cd4b315 100644 --- a/src/sys/logging/handler/interface/lhreg.c +++ b/src/sys/logging/handler/interface/lhreg.c @@ -45,7 +45,7 @@ static PetscErrorCode PetscLogHandlerRegisterAll(void) /*@C PetscLogHandlerRegister - Register a new `PetscLogHandler` - Not collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine diff --git a/src/sys/logging/handler/interface/loghandler.c b/src/sys/logging/handler/interface/loghandler.c index c2b24b98365..51d14d6bf90 100644 --- a/src/sys/logging/handler/interface/loghandler.c +++ b/src/sys/logging/handler/interface/loghandler.c @@ -346,7 +346,7 @@ PetscErrorCode PetscLogHandlerView(PetscLogHandler h, PetscViewer viewer) /*@C PetscLogHandlerGetEventPerfInfo - Get a direct reference to the `PetscEventPerfInfo` of a stage and event - Not collective + Not collective, No Fortran Support Input Parameters: + handler - a `PetscLogHandler` @@ -355,8 +355,8 @@ PetscErrorCode PetscLogHandlerView(PetscLogHandler h, PetscViewer viewer) Output Parameter: . event_info - a pointer to a performance log for `event` during `stage` (or `NULL` if this handler does not use - `PetscEventPerfInfo` to record performance data); writing to `event_info` will change the record in - `handler` + `PetscEventPerfInfo` to record performance data); writing to `event_info` will change the record in + `handler` Level: developer @@ -375,7 +375,7 @@ PetscErrorCode PetscLogHandlerGetEventPerfInfo(PetscLogHandler handler, PetscLog /*@C PetscLogHandlerGetStagePerfInfo - Get a direct reference to the `PetscEventPerfInfo` of a stage - Not collective + Not collective, No Fortran Support Input Parameters: + handler - a `PetscLogHandler` @@ -467,7 +467,7 @@ PetscErrorCode PetscLogHandlerLogObjectState_Internal(PetscLogHandler handler, P /*@C PetscLogHandlerLogObjectState - Record information about an object with the default log handler - Not Collective + Not Collective, No Fortran Support Input Parameters: + handler - a `PetscLogHandler` diff --git a/src/sys/logging/plog.c b/src/sys/logging/plog.c index 3e410ae92f5..8b3b728697f 100644 --- a/src/sys/logging/plog.c +++ b/src/sys/logging/plog.c @@ -103,10 +103,9 @@ PetscBool PetscLogGpuTimeFlag = PETSC_FALSE; PetscLogState petsc_log_state = NULL; -#define PETSC_LOG_HANDLER_HOT_BLANK \ - { \ - NULL, NULL, NULL, NULL, NULL, NULL \ - } +// clang-format off +#define PETSC_LOG_HANDLER_HOT_BLANK {NULL, NULL, NULL, NULL, NULL, NULL} +// clang-format on PetscLogHandlerHot PetscLogHandlers[PETSC_LOG_HANDLER_MAX] = { PETSC_LOG_HANDLER_HOT_BLANK, @@ -358,7 +357,7 @@ PetscErrorCode PetscLogHandlerStop(PetscLogHandler h) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogIsActive - Check if logging is currently in progress. Not Collective @@ -429,7 +428,7 @@ PETSC_INTERN PetscErrorCode PetscLogTypeBegin(PetscLogHandlerType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogDefaultBegin - Turns on logging of objects and events using the default log handler. This logs flop rates and object creation and should not slow programs down too much. This routine may be called more than once. @@ -438,7 +437,7 @@ PETSC_INTERN PetscErrorCode PetscLogTypeBegin(PetscLogHandlerType type) Options Database Key: . -log_view [viewertype:filename:viewerformat] - Prints summary of flop and timing information to the - screen (for code configured with --with-log=1 (which is the default)) + screen (for code configured with --with-log=1 (which is the default)) Example Usage: .vb @@ -468,7 +467,7 @@ PetscErrorCode PetscLogDefaultBegin(void) PetscLogTraceBegin - Begins trace logging. Every time a PETSc event begins or ends, the event name is printed. - Logically Collective on `PETSC_COMM_WORLD` + Logically Collective on `PETSC_COMM_WORLD`, No Fortran Support Input Parameter: . file - The file to print trace in (e.g. stdout) @@ -503,11 +502,11 @@ PetscErrorCode PetscLogTraceBegin(FILE *file) PETSC_INTERN PetscErrorCode PetscLogHandlerCreate_Nested(MPI_Comm, PetscLogHandler *); -/*@C +/*@ PetscLogNestedBegin - Turns on nested logging of objects and events. This logs flop rates and object creation and should not slow programs down too much. - Logically Collective on `PETSC_COMM_WORLD` + Logically Collective on `PETSC_COMM_WORLD`, No Fortran Support Options Database Keys: . -log_view :filename.xml:ascii_xml - Prints an XML summary of flop and timing information to the file @@ -598,7 +597,7 @@ static PetscBool PetscBeganMPE = PETSC_FALSE; PetscLogMPEBegin - Turns on MPE logging of events. This creates large log files and slows the program down. - Collective on `PETSC_COMM_WORLD` + Collective on `PETSC_COMM_WORLD`, No Fortran Support Options Database Key: . -log_mpe - Prints extensive log information @@ -640,7 +639,7 @@ PetscErrorCode PetscLogMPEBegin(void) /*@C PetscLogPerfstubsBegin - Turns on logging of events using the perfstubs interface. - Collective on `PETSC_COMM_WORLD` + Collective on `PETSC_COMM_WORLD`, No Fortran Support Options Database Key: . -log_perfstubs - use an external log handler through the perfstubs interface @@ -723,7 +722,7 @@ PetscErrorCode PetscLogObjects(PetscBool flag) } /*------------------------------------------------ Stage Functions --------------------------------------------------*/ -/*@C +/*@ PetscLogStageRegister - Attaches a character string name to a logging stage. Not Collective @@ -749,7 +748,7 @@ PetscErrorCode PetscLogStageRegister(const char sname[], PetscLogStage *stage) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogStagePush - This function pushes a stage on the logging stack. Events started and stopped until `PetscLogStagePop()` will be associated with the stage Not Collective @@ -794,7 +793,7 @@ PetscErrorCode PetscLogStagePush(PetscLogStage stage) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogStagePop - This function pops a stage from the logging stack that was pushed with `PetscLogStagePush()` Not Collective @@ -942,7 +941,7 @@ PetscErrorCode PetscLogStageGetVisible(PetscLogStage stage, PetscBool *isVisible PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogStageGetId - Returns the stage id when given the stage name. Not Collective @@ -968,7 +967,7 @@ PetscErrorCode PetscLogStageGetId(const char name[], PetscLogStage *stage) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogStageGetName - Returns the stage name when given the stage id. Not Collective @@ -983,7 +982,7 @@ PetscErrorCode PetscLogStageGetId(const char name[], PetscLogStage *stage) .seealso: [](ch_profiling), `PetscLogStageRegister()`, `PetscLogStagePush()`, `PetscLogStagePop()`, `PetscPreLoadBegin()`, `PetscPreLoadEnd()`, `PetscPreLoadStage()` @*/ -PetscErrorCode PetscLogStageGetName(PetscLogStage stage, const char **name) +PetscErrorCode PetscLogStageGetName(PetscLogStage stage, const char *name[]) { PetscLogStageInfo stage_info; PetscLogState state; @@ -999,7 +998,7 @@ PetscErrorCode PetscLogStageGetName(PetscLogStage stage, const char **name) /*------------------------------------------------ Event Functions --------------------------------------------------*/ -/*@C +/*@ PetscLogEventRegister - Registers an event name for logging operations Not Collective @@ -1527,6 +1526,8 @@ M*/ /*@C PetscLogStageGetPerfInfo - Return the performance information about the given stage + No Fortran Support + Input Parameters: . stage - The stage number or `PETSC_DETERMINE` for the current stage @@ -1565,6 +1566,8 @@ PetscErrorCode PetscLogStageGetPerfInfo(PetscLogStage stage, PetscEventPerfInfo /*@C PetscLogEventGetPerfInfo - Return the performance information about the given event in the given stage + No Fortran Support + Input Parameters: + stage - The stage number or `PETSC_DETERMINE` for the current stage - event - The event number @@ -1601,7 +1604,7 @@ PetscErrorCode PetscLogEventGetPerfInfo(PetscLogStage stage, PetscLogEvent event PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogEventSetDof - Set the nth number of degrees of freedom of a numerical problem associated with this event Not Collective @@ -1638,7 +1641,7 @@ PetscErrorCode PetscLogEventSetDof(PetscLogEvent event, PetscInt n, PetscLogDoub PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogEventSetError - Set the nth error associated with a numerical problem associated with this event Not Collective @@ -1678,7 +1681,7 @@ PetscErrorCode PetscLogEventSetError(PetscLogEvent event, PetscInt n, PetscLogDo PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogEventGetId - Returns the event id when given the event name. Not Collective @@ -1704,7 +1707,7 @@ PetscErrorCode PetscLogEventGetId(const char name[], PetscLogEvent *event) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogEventGetName - Returns the event name when given the event id. Not Collective @@ -1719,7 +1722,7 @@ PetscErrorCode PetscLogEventGetId(const char name[], PetscLogEvent *event) .seealso: [](ch_profiling), `PetscLogEventRegister()`, `PetscLogEventBegin()`, `PetscLogEventEnd()`, `PetscPreLoadBegin()`, `PetscPreLoadEnd()`, `PetscPreLoadStage()` @*/ -PetscErrorCode PetscLogEventGetName(PetscLogEvent event, const char **name) +PetscErrorCode PetscLogEventGetName(PetscLogEvent event, const char *name[]) { PetscLogEventInfo event_info; PetscLogState state; @@ -1822,7 +1825,7 @@ M*/ .seealso: [](ch_profiling), `PetscLogHandler`, `PetscLogObjectCreate()` M*/ -/*@C +/*@ PetscLogClassGetClassId - Returns the `PetscClassId` when given the class name. Not Collective @@ -1887,7 +1890,7 @@ PetscErrorCode PetscLogClassIdGetName(PetscClassId classid, const char **name) } /*------------------------------------------------ Output Functions -------------------------------------------------*/ -/*@C +/*@ PetscLogDump - Dumps logs of objects to a file. This file is intended to be read by bin/petscview. This program no longer exists. @@ -1923,7 +1926,7 @@ PetscErrorCode PetscLogDump(const char sname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogMPEDump - Dumps the MPE logging info to file for later use with Jumpshot. Collective on `PETSC_COMM_WORLD` @@ -1958,7 +1961,7 @@ PetscErrorCode PetscLogMPEDump(const char sname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogView - Prints a summary of the logging. Collective @@ -2119,7 +2122,7 @@ PetscErrorCode PetscLogSetThreshold(PetscLogDouble newThresh, PetscLogDouble *ol } /*----------------------------------------------- Counter Functions -------------------------------------------------*/ -/*@C +/*@ PetscGetFlops - Returns the number of flops used on this processor since the program began. @@ -2307,7 +2310,7 @@ M*/ #if PetscDefined(HAVE_DEVICE) #include -/*@C +/*@ PetscLogGpuTime - turn on the logging of GPU time for GPU kernels Options Database Key: @@ -2335,7 +2338,7 @@ PetscErrorCode PetscLogGpuTime(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogGpuTimeBegin - Start timer for device Level: intermediate @@ -2387,7 +2390,7 @@ PetscErrorCode PetscLogGpuTimeBegin(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogGpuTimeEnd - Stop timer for device Level: intermediate @@ -2551,7 +2554,7 @@ PETSC_INTERN PetscErrorCode PetscLogFinalize(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscClassIdRegister - Registers a new class name for objects and logging operations in an application code. Not Collective diff --git a/src/sys/logging/state/logstate.c b/src/sys/logging/state/logstate.c index 88c31efeeec..757c6333e13 100644 --- a/src/sys/logging/state/logstate.c +++ b/src/sys/logging/state/logstate.c @@ -184,7 +184,7 @@ static PetscErrorCode PetscLogStateResize(PetscLogState state) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogStateStageRegister - Register a new stage with a logging state Not collective @@ -223,7 +223,7 @@ PetscErrorCode PetscLogStateStageRegister(PetscLogState state, const char sname[ PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogStateEventRegister - Register a new event with a logging state Not collective @@ -517,7 +517,7 @@ PetscErrorCode PetscLogStateGetEventFromName(PetscLogState state, const char nam PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogStateGetStageFromName - Get a `PetscLogStage` from the name it was registered with. Not collective @@ -540,7 +540,7 @@ PetscErrorCode PetscLogStateGetStageFromName(PetscLogState state, const char nam PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscLogStateGetClassFromName - Get a `PetscLogClass` from the name of the class it was registered with. Not collective diff --git a/src/sys/logging/utils/stack.c b/src/sys/logging/utils/stack.c index 8273bb158b2..a24857837bd 100644 --- a/src/sys/logging/utils/stack.c +++ b/src/sys/logging/utils/stack.c @@ -3,7 +3,7 @@ /*@C PetscIntStackDestroy - This function destroys a stack. - Not Collective + Not Collective, No Fortran Support Input Parameter: . stack - The stack @@ -24,7 +24,7 @@ PetscErrorCode PetscIntStackDestroy(PetscIntStack stack) /*@C PetscIntStackEmpty - This function determines whether any items have been pushed. - Not Collective + Not Collective, No Fortran Support Input Parameter: . stack - The stack @@ -48,7 +48,7 @@ PetscErrorCode PetscIntStackEmpty(PetscIntStack stack, PetscBool *empty) /*@C PetscIntStackTop - This function returns the top of the stack. - Not Collective + Not Collective, No Fortran Support Input Parameter: . stack - The stack @@ -72,7 +72,7 @@ PetscErrorCode PetscIntStackTop(PetscIntStack stack, int *top) /*@C PetscIntStackPush - This function pushes an integer on the stack. - Not Collective + Not Collective, No Fortran Support Input Parameters: + stack - The stack @@ -97,7 +97,7 @@ PetscErrorCode PetscIntStackPush(PetscIntStack stack, int item) /*@C PetscIntStackPop - This function pops an integer from the stack. - Not Collective + Not Collective, No Fortran Support Input Parameter: . stack - The stack @@ -125,7 +125,7 @@ PetscErrorCode PetscIntStackPop(PetscIntStack stack, int *item) /*@C PetscIntStackCreate - This function creates a stack. - Not Collective + Not Collective, No Fortran Support Output Parameter: . stack - The stack diff --git a/src/sys/memory/cuda/mcudahost.cu b/src/sys/memory/cuda/mcudahost.cu index 586b7cf5707..3e927e64f03 100644 --- a/src/sys/memory/cuda/mcudahost.cu +++ b/src/sys/memory/cuda/mcudahost.cu @@ -22,7 +22,7 @@ static PetscErrorCode (*PetscMallocOld)(size_t, PetscBool, int, const char[], co static PetscErrorCode (*PetscReallocOld)(size_t, int, const char[], const char[], void **); static PetscErrorCode (*PetscFreeOld)(void *, int, const char[], const char[]); -/*@C +/*@ PetscMallocSetCUDAHost - Set `PetscMalloc()` to use `CUDAHostMalloc()` Switch the current malloc and free routines to the CUDA malloc and free routines @@ -49,7 +49,7 @@ PetscErrorCode PetscMallocSetCUDAHost(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscMallocResetCUDAHost - Reset the changes made by `PetscMallocSetCUDAHost()` Not Collective diff --git a/src/sys/memory/hip/mhiphost.hip.cpp b/src/sys/memory/hip/mhiphost.hip.cpp index 954b754246d..cfb065a322f 100644 --- a/src/sys/memory/hip/mhiphost.hip.cpp +++ b/src/sys/memory/hip/mhiphost.hip.cpp @@ -22,7 +22,7 @@ static PetscErrorCode (*PetscMallocOld)(size_t, PetscBool, int, const char[], co static PetscErrorCode (*PetscReallocOld)(size_t, int, const char[], const char[], void **); static PetscErrorCode (*PetscFreeOld)(void *, int, const char[], const char[]); -/*@C +/*@ PetscMallocSetHIPHost - Set `PetscMalloc()` to use `HIPHostMalloc()` Switch the current malloc and free routines to the HIP malloc and free routines @@ -49,7 +49,7 @@ PETSC_EXTERN PetscErrorCode PetscMallocSetHIPHost(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscMallocResetHIPHost - Reset the changes made by `PetscMallocSetHIPHost()` Not Collective diff --git a/src/sys/memory/mal.c b/src/sys/memory/mal.c index a2b4af4aebb..1c1575cc87d 100644 --- a/src/sys/memory/mal.c +++ b/src/sys/memory/mal.c @@ -195,7 +195,7 @@ PetscBool petscsetmallocvisited = PETSC_FALSE; /*@C PetscMallocSet - Sets the underlying allocation routines used by `PetscMalloc()` and `PetscFree()` - Not Collective + Not Collective, No Fortran Support Input Parameters: + imalloc - the routine that provides the `malloc()` implementation (also provides `calloc()`, which is used depending on the second argument) @@ -221,7 +221,7 @@ PetscErrorCode PetscMallocSet(PetscErrorCode (*imalloc)(size_t, PetscBool, int, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscMallocClear - Resets the routines used by `PetscMalloc()` and `PetscFree()` Not Collective @@ -266,7 +266,7 @@ static PetscErrorCode (*PetscTrMallocOld)(size_t, PetscBool, int, const char[], static PetscErrorCode (*PetscTrReallocOld)(size_t, int, const char[], const char[], void **) = PetscReallocAlign; static PetscErrorCode (*PetscTrFreeOld)(void *, int, const char[], const char[]) = PetscFreeAlign; -/*@C +/*@ PetscMallocSetDRAM - Set `PetscMalloc()` to use DRAM. If memkind is available, change the memkind type. Otherwise, switch the current malloc and free routines to the `PetscMallocAlign()` and @@ -302,7 +302,7 @@ PetscErrorCode PetscMallocSetDRAM(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscMallocResetDRAM - Reset the changes made by `PetscMallocSetDRAM()` Not Collective @@ -334,7 +334,7 @@ static PetscBool petscmalloccoalesce = PETSC_FALSE; #endif -/*@C +/*@ PetscMallocSetCoalesce - Use coalesced `PetscMalloc()` when allocating groups of objects, that is when using `PetscMallocN()` Not Collective @@ -366,7 +366,7 @@ PetscErrorCode PetscMallocSetCoalesce(PetscBool coalesce) /*@C PetscMallocA - Allocate and optionally clear one or more memory locations, possibly using coalesced malloc - Not Collective + Not Collective, No Fortran Support Input Parameters: + n - number of objects to allocate (at least 1) @@ -426,7 +426,7 @@ PetscErrorCode PetscMallocA(int n, PetscBool clear, int lineno, const char *func /*@C PetscFreeA - Free one or more memory locations, possibly allocated using coalesced `PetscMallocN()` - Not Collective + Not Collective, No Fortran Support Input Parameters: + n - number of objects to free (at least 1) diff --git a/src/sys/memory/mtr.c b/src/sys/memory/mtr.c index 7faa521a68e..abcc831d704 100644 --- a/src/sys/memory/mtr.c +++ b/src/sys/memory/mtr.c @@ -203,7 +203,7 @@ static PetscErrorCode PetscTrMallocDefault(size_t a, PetscBool clear, int lineno #if defined(PETSC_USE_DEBUG) && !defined(PETSC_HAVE_THREADSAFETY) PetscCall(PetscStackCopy(&petscstack, &head->stack)); /* fix the line number to where PetscTrMallocDefault() was called, not the PetscFunctionBegin; */ - head->stack.line[head->stack.currentsize - 2] = lineno; + head->stack.line[PetscMax(head->stack.currentsize - 2, 0)] = lineno; head->stack.currentsize--; #if defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE) if (!clear && TRdebugIinitializenan) { @@ -413,7 +413,7 @@ static PetscErrorCode PetscTrReallocDefault(size_t len, int lineno, const char f #if defined(PETSC_USE_DEBUG) && !defined(PETSC_HAVE_THREADSAFETY) PetscCall(PetscStackCopy(&petscstack, &head->stack)); /* fix the line number to where the malloc() was called, not the PetscFunctionBegin; */ - head->stack.line[head->stack.currentsize - 2] = lineno; + head->stack.line[PetscMax(head->stack.currentsize - 2, 0)] = lineno; #endif /* @@ -626,7 +626,7 @@ PetscErrorCode PetscMallocPopMaximumUsage(int event, PetscLogDouble *mu) /*@C PetscMallocGetStack - returns a pointer to the stack for the location in the program a call to `PetscMalloc()` was used to obtain that memory - Not Collective + Not Collective, No Fortran Support Input Parameter: . ptr - the memory location diff --git a/src/sys/mpiuni/fsrc/somempifort.F90 b/src/sys/mpiuni/fsrc/somempifort.F90 index 17639045b2b..f01b4fd8c91 100644 --- a/src/sys/mpiuni/fsrc/somempifort.F90 +++ b/src/sys/mpiuni/fsrc/somempifort.F90 @@ -4,5 +4,4 @@ subroutine MPIUNISetModuleBlock() use mpiuni implicit none call MPIUNISetFortranBasePointers(MPI_IN_PLACE) - return end diff --git a/src/sys/objects/cxx/object_pool.cxx b/src/sys/objects/cxx/object_pool.cxx index 201ae8a9570..e90fbb6ea7e 100644 --- a/src/sys/objects/cxx/object_pool.cxx +++ b/src/sys/objects/cxx/object_pool.cxx @@ -592,11 +592,12 @@ void PoolAllocated::operator delete(void *ptr) noexcept { PetscFunctionBegin; if (PetscLikely(ptr)) { - size_type size{}; - align_type align{}; + size_type size{}; + align_type align{}; + allocator_type &allocated = pool(); - PetscCallAbort(PETSC_COMM_SELF, pool().get_attributes(ptr, &size, &align)); - PetscCallAbort(PETSC_COMM_SELF, pool().deallocate(&ptr, size, align)); + PetscCallAbort(PETSC_COMM_SELF, allocated.get_attributes(ptr, &size, &align)); + PetscCallAbort(PETSC_COMM_SELF, allocated.deallocate(&ptr, size, align)); } PetscFunctionReturnVoid(); } diff --git a/src/sys/objects/destroy.c b/src/sys/objects/destroy.c index ef80aa505bf..7f115c0550f 100644 --- a/src/sys/objects/destroy.c +++ b/src/sys/objects/destroy.c @@ -51,7 +51,7 @@ PetscErrorCode PetscObjectDestroy(PetscObject *obj) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectView - Views a `PetscObject` regardless of the type. Collective @@ -77,7 +77,7 @@ PetscErrorCode PetscObjectView(PetscObject obj, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectViewFromOptions - Processes command line options to determine if/how a `PetscObject` is to be viewed. Collective @@ -93,6 +93,15 @@ PetscErrorCode PetscObjectView(PetscObject obj, PetscViewer viewer) Level: developer Notes: + The argument has the following form +.vb + type:filename:format:filemode +.ve + where all parts are optional, but you need to include the colon to access the next part. For example, to read from an HDF5 file, use +.vb + hdf5:sol.h5::read +.ve + .vb If no value is provided ascii:stdout is used ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab, @@ -134,7 +143,7 @@ PetscErrorCode PetscObjectViewFromOptions(PetscObject obj, PetscObject bobj, con PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectTypeCompare - Determines whether a PETSc object is of a particular type. Not Collective @@ -167,7 +176,7 @@ PetscErrorCode PetscObjectTypeCompare(PetscObject obj, const char type_name[], P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectObjectTypeCompare - Determines whether two PETSc objects are of the same type Logically Collective @@ -194,7 +203,7 @@ PetscErrorCode PetscObjectObjectTypeCompare(PetscObject obj1, PetscObject obj2, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectBaseTypeCompare - Determines whether a `PetscObject` is of a given base type. For example the base type of `MATSEQAIJPERM` is `MATSEQAIJ` Not Collective @@ -390,7 +399,7 @@ static PetscErrorCode RegisterFinalizer(PetscFinalizerContainer container) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectRegisterDestroy - Registers a PETSc object to be destroyed when `PetscFinalize()` is called. diff --git a/src/sys/objects/device/impls/cupm/cupmallocator.hpp b/src/sys/objects/device/impls/cupm/cupmallocator.hpp index eeff4b12b7f..dc6c7e95f87 100644 --- a/src/sys/objects/device/impls/cupm/cupmallocator.hpp +++ b/src/sys/objects/device/impls/cupm/cupmallocator.hpp @@ -28,7 +28,7 @@ class HostAllocator; // Allocator class to allocate pinned host memory for use with device template -class HostAllocator : public memory::impl::SegmentedMemoryPoolAllocatorBase, impl::Interface { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL HostAllocator : public memory::impl::SegmentedMemoryPoolAllocatorBase, impl::Interface { public: PETSC_CUPM_INHERIT_INTERFACE_TYPEDEFS_USING(T); using base_type = memory::impl::SegmentedMemoryPoolAllocatorBase; @@ -79,7 +79,7 @@ template class DeviceAllocator; template -class DeviceAllocator : public memory::impl::SegmentedMemoryPoolAllocatorBase, impl::Interface { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL DeviceAllocator : public memory::impl::SegmentedMemoryPoolAllocatorBase, impl::Interface { public: PETSC_CUPM_INHERIT_INTERFACE_TYPEDEFS_USING(T); using base_type = memory::impl::SegmentedMemoryPoolAllocatorBase; diff --git a/src/sys/objects/device/impls/cupm/cupmcontext.hpp b/src/sys/objects/device/impls/cupm/cupmcontext.hpp index 16ef945fe3c..e21262c6237 100644 --- a/src/sys/objects/device/impls/cupm/cupmcontext.hpp +++ b/src/sys/objects/device/impls/cupm/cupmcontext.hpp @@ -24,7 +24,7 @@ namespace impl { template -class DeviceContext : SolverInterface { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL DeviceContext : SolverInterface { public: PETSC_CUPMSOLVER_INHERIT_INTERFACE_TYPEDEFS_USING(T); diff --git a/src/sys/objects/device/impls/cupm/cupmdevice.cxx b/src/sys/objects/device/impls/cupm/cupmdevice.cxx index 160775ac11d..3e82abfc125 100644 --- a/src/sys/objects/device/impls/cupm/cupmdevice.cxx +++ b/src/sys/objects/device/impls/cupm/cupmdevice.cxx @@ -70,7 +70,7 @@ PetscErrorCode Device::DeviceInternal::initialize() noexcept PetscCall(CUPMAwareMPI_(&aware)); // For Open MPI, we could do a compile time check with - // "defined(PETSC_HAVE_OMPI_MAJOR_VERSION) && defined(MPIX_CUDA_AWARE_SUPPORT) && + // "defined(PETSC_HAVE_OPENMPI) && defined(MPIX_CUDA_AWARE_SUPPORT) && // MPIX_CUDA_AWARE_SUPPORT" to see if it is CUDA-aware. However, recent versions of IBM // Spectrum MPI (e.g., 10.3.1) on Summit meet above conditions, but one has to use jsrun // --smpiargs=-gpu to really enable GPU-aware MPI. So we do the check at runtime with a @@ -99,6 +99,7 @@ PetscErrorCode Device::DeviceInternal::configure() noexcept if (cupmSetDevice(id_) != cupmErrorDeviceAlreadyInUse) PetscCallCUPM(cupmGetLastError()); // need to update the device properties PetscCallCUPM(cupmGetDeviceProperties(&dprop_, id_)); + PetscDeviceCUPMRuntimeArch = dprop_.major * 10 + dprop_.minor; PetscCall(PetscInfo(nullptr, "Configured device %d\n", id_)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -274,16 +275,16 @@ PetscErrorCode Device::initialize(MPI_Comm comm, PetscInt *defaultDeviceId, P template PetscErrorCode Device::init_device_id_(PetscInt *inid) const noexcept { - const auto id = *inid == PETSC_DECIDE ? defaultDevice_ : *inid; + const auto id = *inid == PETSC_DECIDE ? defaultDevice_ : (int)*inid; const auto cerr = static_cast(-defaultDevice_); PetscFunctionBegin; PetscCheck(defaultDevice_ != PETSC_CUPM_DEVICE_NONE, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Trying to retrieve a %s PetscDevice when it has been disabled", cupmName()); PetscCheck(defaultDevice_ >= 0, PETSC_COMM_SELF, PETSC_ERR_GPU, "Cannot lazily initialize PetscDevice: %s error %d (%s) : %s", cupmName(), static_cast(cerr), cupmGetErrorName(cerr), cupmGetErrorString(cerr)); - PetscAssert(static_cast(id) < devices_.size(), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Only supports %zu number of devices but trying to get device with id %" PetscInt_FMT, devices_.size(), id); + PetscAssert(static_cast(id) < devices_.size(), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Only supports %zu number of devices but trying to get device with id %d", devices_.size(), id); if (!devices_[id]) devices_[id] = util::make_unique(id); - PetscAssert(id == devices_[id]->id(), PETSC_COMM_SELF, PETSC_ERR_PLIB, "Entry %" PetscInt_FMT " contains device with mismatching id %d", id, devices_[id]->id()); + PetscAssert(id == devices_[id]->id(), PETSC_COMM_SELF, PETSC_ERR_PLIB, "Entry %d contains device with mismatching id %d", id, devices_[id]->id()); PetscCall(devices_[id]->initialize()); *inid = id; PetscFunctionReturn(PETSC_SUCCESS); diff --git a/src/sys/objects/device/impls/cupm/cupmevent.hpp b/src/sys/objects/device/impls/cupm/cupmevent.hpp index b7075fad64e..a1ddb07a8f2 100644 --- a/src/sys/objects/device/impls/cupm/cupmevent.hpp +++ b/src/sys/objects/device/impls/cupm/cupmevent.hpp @@ -19,7 +19,7 @@ namespace cupm // destroy, they are not free. Using the pool vs on-demand creation and destruction yields a ~20% // speedup. template -class CUPMEventPool : impl::Interface, public RegisterFinalizeable> { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL CUPMEventPool : impl::Interface, public RegisterFinalizeable> { public: PETSC_CUPM_INHERIT_INTERFACE_TYPEDEFS_USING(T); @@ -98,7 +98,7 @@ inline auto cupm_timer_event_pool() noexcept -> decltype(cupm_event_pool -class CUPMEvent : impl::Interface, public memory::PoolAllocated { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL CUPMEvent : impl::Interface, public memory::PoolAllocated { using pool_type = memory::PoolAllocated; public: diff --git a/src/sys/objects/device/impls/cupm/cupmstream.hpp b/src/sys/objects/device/impls/cupm/cupmstream.hpp index da93e9c8ff7..dfb09e264f0 100644 --- a/src/sys/objects/device/impls/cupm/cupmstream.hpp +++ b/src/sys/objects/device/impls/cupm/cupmstream.hpp @@ -21,7 +21,7 @@ namespace cupm // Address of the objects does not suffice since cupmStreams are very likely internally reused. template -class CUPMStream : public StreamBase>, impl::Interface { +class PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL CUPMStream : public StreamBase>, impl::Interface { using crtp_base_type = StreamBase>; friend crtp_base_type; diff --git a/src/sys/objects/device/impls/sycl/sycldevice.sycl.cxx b/src/sys/objects/device/impls/sycl/sycldevice.sycl.cxx index ba26f2eac07..d72782a5756 100644 --- a/src/sys/objects/device/impls/sycl/sycldevice.sycl.cxx +++ b/src/sys/objects/device/impls/sycl/sycldevice.sycl.cxx @@ -188,14 +188,13 @@ PetscErrorCode Device::finalize_() noexcept PetscErrorCode Device::init_device_id_(PetscInt *inid) const noexcept { - const auto id = *inid == PETSC_DECIDE ? defaultDevice_ : *inid; + const auto id = *inid == PETSC_DECIDE ? defaultDevice_ : (int)*inid; PetscFunctionBegin; PetscCheck(defaultDevice_ != PETSC_SYCL_DEVICE_NONE, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Trying to retrieve a SYCL PetscDevice when it has been disabled"); - PetscCheck(!(id < PETSC_SYCL_DEVICE_HOST) && !(id - PETSC_SYCL_DEVICE_HOST >= PETSC_DEVICE_MAX_DEVICES), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Only supports %zu number of devices but trying to get device with id %" PetscInt_FMT, - devices_array_.size(), id); + PetscCheck(!(id < PETSC_SYCL_DEVICE_HOST) && !(id - PETSC_SYCL_DEVICE_HOST >= PETSC_DEVICE_MAX_DEVICES), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Only supports %zu number of devices but trying to get device with id %d", devices_array_.size(), id); if (!devices_[id]) devices_[id] = new DeviceInternal(id); - PetscCheck(id == devices_[id]->id(), PETSC_COMM_SELF, PETSC_ERR_PLIB, "Entry %" PetscInt_FMT " contains device with mismatching id %" PetscInt_FMT, id, devices_[id]->id()); + PetscCheck(id == devices_[id]->id(), PETSC_COMM_SELF, PETSC_ERR_PLIB, "Entry %d contains device with mismatching id %d", id, devices_[id]->id()); PetscCall(devices_[id]->initialize()); *inid = id; PetscFunctionReturn(PETSC_SUCCESS); diff --git a/src/sys/objects/device/interface/dcontext.cxx b/src/sys/objects/device/interface/dcontext.cxx index b7a07a2984c..59cb6581171 100644 --- a/src/sys/objects/device/interface/dcontext.cxx +++ b/src/sys/objects/device/interface/dcontext.cxx @@ -986,7 +986,7 @@ PetscErrorCode PetscDeviceContextSetFromOptions(MPI_Comm comm, PetscDeviceContex PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDeviceContextView - View a `PetscDeviceContext` Collective on `viewer` @@ -1041,7 +1041,7 @@ PetscErrorCode PetscDeviceContextView(PetscDeviceContext dctx, PetscViewer viewe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDeviceContextViewFromOptions - View a `PetscDeviceContext` from options Input Parameters: diff --git a/src/sys/objects/device/interface/device.cxx b/src/sys/objects/device/interface/device.cxx index e27d17f1d53..512f1c4361e 100644 --- a/src/sys/objects/device/interface/device.cxx +++ b/src/sys/objects/device/interface/device.cxx @@ -15,6 +15,10 @@ using namespace Petsc::device; +#if defined(PETSC_HAVE_CUPM) +int PetscDeviceCUPMRuntimeArch = 0; +#endif + namespace { @@ -103,6 +107,7 @@ PetscErrorCode PetscDeviceCreate(PetscDeviceType type, PetscInt devid, PetscDevi PetscValidDeviceType(type, 1); PetscAssertPointer(device, 3); PetscCall(PetscDeviceInitializePackage()); + PetscCall(PetscNew(device)); (*device)->id = PetscDeviceCounter++; (*device)->type = type; @@ -197,7 +202,7 @@ PetscErrorCode PetscDeviceConfigure(PetscDevice device) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDeviceView - View a `PetscDevice` Collective on viewer @@ -255,7 +260,7 @@ PetscErrorCode PetscDeviceView(PetscDevice device, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDeviceGetType - Get the type of device Not Collective diff --git a/src/sys/objects/device/interface/global_dcontext.cxx b/src/sys/objects/device/interface/global_dcontext.cxx index ec8516f4769..c58a8658b5d 100644 --- a/src/sys/objects/device/interface/global_dcontext.cxx +++ b/src/sys/objects/device/interface/global_dcontext.cxx @@ -1,4 +1,5 @@ #include "petscdevice_interface_internal.hpp" /*I I*/ +#include static auto rootDeviceType = PETSC_DEVICE_CONTEXT_DEFAULT_DEVICE_TYPE; static auto rootStreamType = PETSC_DEVICE_CONTEXT_DEFAULT_STREAM_TYPE; @@ -22,6 +23,31 @@ PetscErrorCode PetscDeviceContextSetRootStreamType_Internal(PetscStreamType type PetscFunctionReturn(PETSC_SUCCESS); } +static inline PetscErrorCode PetscSetDefaultCUPMStreamFromDeviceContext(PetscDeviceContext dctx, PetscDeviceType dtype) +{ + PetscFunctionBegin; +#if PetscDefined(HAVE_CUDA) + if (dtype == PETSC_DEVICE_CUDA) { + void *handle; + + PetscCall(PetscDeviceContextGetStreamHandle_Internal(dctx, &handle)); + PetscDefaultCudaStream = *static_cast(handle); + } +#endif +#if PetscDefined(HAVE_HIP) + if (dtype == PETSC_DEVICE_HIP) { + void *handle; + + PetscCall(PetscDeviceContextGetStreamHandle_Internal(dctx, &handle)); + PetscDefaultHipStream = *static_cast(handle); + } +#endif +#if !PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP) + (void)dctx, (void)dtype; +#endif + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode PetscDeviceContextSetupGlobalContext_Private() noexcept { PetscFunctionBegin; @@ -51,6 +77,7 @@ static PetscErrorCode PetscDeviceContextSetupGlobalContext_Private() noexcept PetscCall(PetscDeviceContextSetStreamType(globalContext, rootStreamType)); PetscCall(PetscDeviceContextSetDefaultDeviceForType_Internal(globalContext, dtype)); PetscCall(PetscDeviceContextSetUp(globalContext)); + PetscCall(PetscSetDefaultCUPMStreamFromDeviceContext(globalContext, dtype)); } PetscFunctionReturn(PETSC_SUCCESS); } @@ -123,5 +150,6 @@ PetscErrorCode PetscDeviceContextSetCurrentContext(PetscDeviceContext dctx) PetscCall(PetscDeviceSetDefaultDeviceType(dtype)); globalContext = dctx; PetscCall(PetscInfo(dctx, "Set global PetscDeviceContext id %" PetscInt64_FMT "\n", PetscObjectCast(dctx)->id)); + PetscCall(PetscSetDefaultCUPMStreamFromDeviceContext(globalContext, dtype)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/sys/objects/device/util/memory.c b/src/sys/objects/device/util/memory.c index da20494dc5d..68756d5953a 100644 --- a/src/sys/objects/device/util/memory.c +++ b/src/sys/objects/device/util/memory.c @@ -6,7 +6,7 @@ /*@C PetscGetMemType - Query the `PetscMemType` of a pointer - Not Collective + Not Collective, No Fortran Support Input Parameter: . ptr - The pointer to query (may be `NULL`) @@ -52,7 +52,11 @@ PetscErrorCode PetscGetMemType(const void *ptr, PetscMemType *type) enum hipMemoryType mtype; cerr = hipPointerGetAttributes(&attr, ptr); if (cerr) cerr = hipGetLastError(); + #if PETSC_PKG_HIP_VERSION_GE(5, 5, 0) + mtype = attr.type; + #else mtype = attr.memoryType; + #endif if (cerr == hipSuccess && mtype == hipMemoryTypeDevice) *type = PETSC_MEMTYPE_DEVICE; } #endif diff --git a/src/sys/objects/fcallback.c b/src/sys/objects/fcallback.c index d5242b57022..530dbe4f626 100644 --- a/src/sys/objects/fcallback.c +++ b/src/sys/objects/fcallback.c @@ -37,7 +37,7 @@ static PetscErrorCode PetscFortranCallbackFinalize(void) PetscFortranCallbackRegister - register a type+subtype callback. This is used by the PETSc Fortran stubs to allow the use of user Fortran functions as arguments to PETSc functions that take function pointers - Not Collective + Not Collective, No Fortran Support Input Parameters: + classid - ID of class on which to register callback @@ -100,7 +100,7 @@ PetscErrorCode PetscFortranCallbackRegister(PetscClassId classid, const char *su /*@C PetscFortranCallbackGetSizes - get sizes of class and subtype pointer arrays - Collective + Collective, No Fortran Support Input Parameter: . classid - class Id diff --git a/src/sys/objects/ftn-custom/zdestroyf.c b/src/sys/objects/ftn-custom/zdestroyf.c deleted file mode 100644 index dd9e1d5eb9c..00000000000 --- a/src/sys/objects/ftn-custom/zdestroyf.c +++ /dev/null @@ -1,16 +0,0 @@ -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscobjecttypecompare_ PETSCOBJECTTYPECOMPARE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscobjecttypecompare_ petscobjecttypecompare -#endif - -PETSC_EXTERN void petscobjecttypecompare_(PetscObject *obj, char *type_name, PetscBool *same, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - FIXCHAR(type_name, len, c1); - *ierr = PetscObjectTypeCompare(*obj, c1, same); - if (*ierr) return; - FREECHAR(type_name, c1); -} diff --git a/src/sys/objects/ftn-custom/zgtype.c b/src/sys/objects/ftn-custom/zgtype.c deleted file mode 100644 index 443e5196958..00000000000 --- a/src/sys/objects/ftn-custom/zgtype.c +++ /dev/null @@ -1,17 +0,0 @@ -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscobjectgettype_ PETSCOBJECTGETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscobjectgettype_ petscobjectgettype -#endif - -PETSC_EXTERN void petscobjectgettype_(PetscObject *obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tmp; - *ierr = PetscObjectGetType(*obj, &tmp); - if (*ierr) return; - *ierr = PetscStrncpy(type, tmp, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, type, len); -} diff --git a/src/sys/objects/ftn-custom/zinheritf.c b/src/sys/objects/ftn-custom/zinheritf.c deleted file mode 100644 index dfa77de8ea9..00000000000 --- a/src/sys/objects/ftn-custom/zinheritf.c +++ /dev/null @@ -1,58 +0,0 @@ -/* - This file contains Fortran stubs for Options routines. - These are not generated automatically since they require passing strings - between Fortran and C. -*/ - -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscobjectcompose_ PETSCOBJECTCOMPOSE - #define petscobjectquery_ PETSCOBJECTQUERY - #define petscobjectreference_ PETSCOBJECTREFERENCE - #define petscobjectdereference_ PETSCOBJECTDEREFERENCE - #define petscobjectgetreference_ PETSCOBJECTGETREFERENCE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscobjectcompose_ petscobjectcompose - #define petscobjectquery_ petscobjectquery - #define petscobjectreference_ petscobjectreference - #define petscobjectdereference_ petscobjectdereference - #define petscobjectgetreference_ petscobjectgetreference -#endif - -/* ---------------------------------------------------------------------*/ - -PETSC_EXTERN void petscobjectcompose_(PetscObject *obj, char *name, PetscObject *ptr, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *n1; - - FIXCHAR(name, len, n1); - *ierr = PetscObjectCompose(*obj, n1, *ptr); - if (*ierr) return; - FREECHAR(name, n1); -} - -PETSC_EXTERN void petscobjectquery_(PetscObject *obj, char *name, PetscObject *ptr, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *n1; - - FIXCHAR(name, len, n1); - *ierr = PetscObjectQuery(*obj, n1, ptr); - if (*ierr) return; - FREECHAR(name, n1); -} - -PETSC_EXTERN void petscobjectreference_(PetscObject *obj, PetscErrorCode *ierr) -{ - *ierr = PetscObjectReference(*obj); -} - -PETSC_EXTERN void petscobjectdereference_(PetscObject *obj, PetscErrorCode *ierr) -{ - *ierr = PetscObjectDereference(*obj); -} - -PETSC_EXTERN void petscobjectgetreference_(PetscObject *obj, PetscInt *ref, PetscErrorCode *ierr) -{ - *ierr = PetscObjectGetReference(*obj, ref); -} diff --git a/src/sys/objects/ftn-custom/zoptionsf.c b/src/sys/objects/ftn-custom/zoptionsf.c index f4ac08eb1af..323987d8a70 100644 --- a/src/sys/objects/ftn-custom/zoptionsf.c +++ b/src/sys/objects/ftn-custom/zoptionsf.c @@ -20,7 +20,6 @@ #define petscoptionsscalar_ PETSCOPTIONSSCALAR #define petscoptionsscalararray_ PETSCOPTIONSSCALARARRAY #define petscoptionsstring_ PETSCOPTIONSSTRING - #define petscsubcommview_ PETSCSUBCOMMVIEW #define petscsubcommgetparent_ PETSCSUBCOMMGETPARENT #define petscsubcommgetcontiguousparent_ PETSCSUBCOMMGETCONTIGUOUSPARENT #define petscsubcommgetchild_ PETSCSUBCOMMGETCHILD @@ -29,9 +28,6 @@ #define petscoptionsgetbool_ PETSCOPTIONSGETBOOL #define petscoptionsgetboolarray_ PETSCOPTIONSGETBOOLARRAY #define petscoptionsgetintarray_ PETSCOPTIONSGETINTARRAY - #define petscoptionssetvalue_ PETSCOPTIONSSETVALUE - #define petscoptionsclearvalue_ PETSCOPTIONSCLEARVALUE - #define petscoptionshasname_ PETSCOPTIONSHASNAME #define petscoptionsgetint_ PETSCOPTIONSGETINT #define petscoptionsgetreal_ PETSCOPTIONSGETREAL #define petscoptionsgetscalar_ PETSCOPTIONSGETSCALAR @@ -39,12 +35,6 @@ #define petscoptionsgetrealarray_ PETSCOPTIONSGETREALARRAY #define petscoptionsgetstring_ PETSCOPTIONSGETSTRING #define petscgetprogramname PETSCGETPROGRAMNAME - #define petscoptionsinsertfile_ PETSCOPTIONSINSERTFILE - #define petscoptionsclear_ PETSCOPTIONSCLEAR - #define petscoptionsinsertstring_ PETSCOPTIONSINSERTSTRING - #define petscoptionsview_ PETSCOPTIONSVIEW - #define petscoptionsleft_ PETSCOPTIONSLEFT - #define petscobjectviewfromoptions_ PETSCOBJECTVIEWFROMOPTIONS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define petscoptionsbegin_ petscoptionsbegin #define petscoptionsend_ petscoptionsend @@ -58,7 +48,6 @@ #define petscoptionsscalar_ petscoptionsscalar #define petscoptionsscalararray_ petscoptionsscalararray #define petscoptionsstring_ petscoptionsstring - #define petscsubcommview_ petscsubcommview #define petscsubcommgetparent_ petscsubcommgetparent #define petscsubcommgetcontiguousparent_ petscsubcommgetcontiguousparent #define petscsubcommgetchild_ petscsubcommgetchild @@ -66,9 +55,6 @@ #define petscoptionsgetenumprivate_ petscoptionsgetenumprivate #define petscoptionsgetbool_ petscoptionsgetbool #define petscoptionsgetboolarray_ petscoptionsgetboolarray - #define petscoptionssetvalue_ petscoptionssetvalue - #define petscoptionsclearvalue_ petscoptionsclearvalue - #define petscoptionshasname_ petscoptionshasname #define petscoptionsgetint_ petscoptionsgetint #define petscoptionsgetreal_ petscoptionsgetreal #define petscoptionsgetscalar_ petscoptionsgetscalar @@ -77,12 +63,6 @@ #define petscoptionsgetstring_ petscoptionsgetstring #define petscoptionsgetintarray_ petscoptionsgetintarray #define petscgetprogramname_ petscgetprogramname - #define petscoptionsinsertfile_ petscoptionsinsertfile - #define petscoptionsclear_ petscoptionsclear - #define petscoptionsinsertstring_ petscoptionsinsertstring - #define petscoptionsview_ petscoptionsview - #define petscoptionsleft_ petscoptionsleft - #define petscobjectviewfromoptions_ petscobjectviewfromoptions #endif static PetscOptionItems PetscOptionsObjectBase, *PetscOptionsObject = NULL; @@ -322,65 +302,6 @@ PETSC_EXTERN void petscoptionsstring_(char *opt, char *text, char *man, char *cu FIXRETURNCHAR(flag, value, lenvalue); } -PETSC_EXTERN void petscoptionsinsertstring_(PetscOptions *options, char *file, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - - FIXCHAR(file, len, c1); - *ierr = PetscOptionsInsertString(*options, c1); - if (*ierr) return; - FREECHAR(file, c1); -} - -PETSC_EXTERN void petscoptionsinsertfile_(MPI_Fint *comm, PetscOptions *options, char *file, PetscBool *require, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - - FIXCHAR(file, len, c1); - *ierr = PetscOptionsInsertFile(MPI_Comm_f2c(*comm), *options, c1, *require); - if (*ierr) return; - FREECHAR(file, c1); -} - -PETSC_EXTERN void petscoptionssetvalue_(PetscOptions *options, char *name, char *value, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1, PETSC_FORTRAN_CHARLEN_T len2) -{ - char *c1, *c2; - - FIXCHAR(name, len1, c1); - FIXCHAR(value, len2, c2); - *ierr = PetscOptionsSetValue(*options, c1, c2); - if (*ierr) return; - FREECHAR(name, c1); - FREECHAR(value, c2); -} - -PETSC_EXTERN void petscoptionsclear_(PetscOptions *options, PetscErrorCode *ierr) -{ - *ierr = PetscOptionsClear(*options); -} - -PETSC_EXTERN void petscoptionsclearvalue_(PetscOptions *options, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - - FIXCHAR(name, len, c1); - *ierr = PetscOptionsClearValue(*options, c1); - if (*ierr) return; - FREECHAR(name, c1); -} - -PETSC_EXTERN void petscoptionshasname_(PetscOptions *options, char *pre, char *name, PetscBool *flg, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1, PETSC_FORTRAN_CHARLEN_T len2) -{ - char *c1, *c2; - - FIXCHAR(pre, len1, c1); - FIXCHAR(name, len2, c2); - *ierr = PetscOptionsHasName(*options, c1, c2, flg); - if (*ierr) return; - FREECHAR(pre, c1); - FREECHAR(name, c2); -} - PETSC_EXTERN void petscoptionsgetint_(PetscOptions *opt, char *pre, char *name, PetscInt *ivalue, PetscBool *flg, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len1, PETSC_FORTRAN_CHARLEN_T len2) { char *c1, *c2; @@ -536,25 +457,6 @@ PETSC_EXTERN void petscgetprogramname_(char *name, PetscErrorCode *ierr, PETSC_F FIXRETURNCHAR(PETSC_TRUE, name, len_in); } -PETSC_EXTERN void petscoptionsview_(PetscOptions *options, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscOptionsView(*options, v); -} - -PETSC_EXTERN void petscobjectviewfromoptions_(PetscObject *obj, PetscObject *bobj, char *option, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T loption) -{ - char *o; - - FIXCHAR(option, loption, o); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscObjectViewFromOptions(*obj, *bobj, o); - if (*ierr) return; - FREECHAR(option, o); -} - PETSC_EXTERN void petscsubcommgetparent_(PetscSubcomm *scomm, MPI_Fint *pcomm, int *ierr) { MPI_Comm tcomm; @@ -575,10 +477,3 @@ PETSC_EXTERN void petscsubcommgetchild_(PetscSubcomm *scomm, MPI_Fint *ccomm, in *ierr = PetscSubcommGetChild(*scomm, &tcomm); *ccomm = MPI_Comm_c2f(tcomm); } - -PETSC_EXTERN void petscsubcommview_(PetscSubcomm *psubcomm, PetscViewer *viewer, int *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = PetscSubcommView(*psubcomm, v); -} diff --git a/src/sys/objects/ftn-custom/zoptionsyamlf.c b/src/sys/objects/ftn-custom/zoptionsyamlf.c deleted file mode 100644 index c5502262685..00000000000 --- a/src/sys/objects/ftn-custom/zoptionsyamlf.c +++ /dev/null @@ -1,23 +0,0 @@ -/* - This file contains Fortran stubs for Options routines. - These are not generated automatically since they require passing strings - between Fortran and C. -*/ - -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscoptionsinsertfileyaml_ PETSCOPTIONSINSERTFILEYAML -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscoptionsinsertfileyaml_ petscoptionsinsertfileyaml -#endif - -PETSC_EXTERN void petscoptionsinsertfileyaml_(MPI_Fint *comm, PetscOptions *options, char *file, PetscBool *require, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *c1; - - FIXCHAR(file, len, c1); - *ierr = PetscOptionsInsertFileYAML(MPI_Comm_f2c(*comm), *options, c1, *require); - if (*ierr) return; - FREECHAR(file, c1); -} diff --git a/src/sys/objects/ftn-custom/zpackage.c b/src/sys/objects/ftn-custom/zpackage.c deleted file mode 100644 index 36f5b076f23..00000000000 --- a/src/sys/objects/ftn-custom/zpackage.c +++ /dev/null @@ -1,17 +0,0 @@ -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petschasexternalpackage_ PETSCHASEXTERNALPACKAGE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petschasexternalpackage_ petschasexternalpackage -#endif - -PETSC_EXTERN void petschasexternalpackage_(char *pkg, PetscBool *has, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t1; - - FIXCHAR(pkg, len, t1); - *ierr = PetscHasExternalPackage(t1, has); - if (*ierr) return; - FREECHAR(pkg, t1); -} diff --git a/src/sys/objects/ftn-custom/zpgnamef.c b/src/sys/objects/ftn-custom/zpgnamef.c index 939858bd03e..5c34ad73da4 100644 --- a/src/sys/objects/ftn-custom/zpgnamef.c +++ b/src/sys/objects/ftn-custom/zpgnamef.c @@ -1,16 +1,13 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscobjectgetname_ PETSCOBJECTGETNAME + #define petscgetversion_ PETSCGETVERSION #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscobjectgetname_ petscobjectgetname + #define petscgetversion_ petscgetversion #endif -PETSC_EXTERN void petscobjectgetname_(PetscObject *obj, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) +PETSC_EXTERN void petscgetversion_(char *version, int *ierr, PETSC_FORTRAN_CHARLEN_T len1) { - const char *tmp; - *ierr = PetscObjectGetName(*obj, &tmp); - *ierr = PetscStrncpy(name, tmp, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); + *ierr = PetscGetVersion(version, len1); + FIXRETURNCHAR(PETSC_TRUE, version, len1); } diff --git a/src/sys/objects/ftn-custom/zpnamef.c b/src/sys/objects/ftn-custom/zpnamef.c deleted file mode 100644 index a8aa9e23511..00000000000 --- a/src/sys/objects/ftn-custom/zpnamef.c +++ /dev/null @@ -1,17 +0,0 @@ -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscobjectsetname_ PETSCOBJECTSETNAME -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscobjectsetname_ petscobjectsetname -#endif - -PETSC_EXTERN void petscobjectsetname_(PetscObject *obj, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t1; - - FIXCHAR(name, len, t1); - *ierr = PetscObjectSetName(*obj, t1); - if (*ierr) return; - FREECHAR(name, t1); -} diff --git a/src/sys/objects/ftn-custom/zprefixf.c b/src/sys/objects/ftn-custom/zprefixf.c deleted file mode 100644 index e65cafe049b..00000000000 --- a/src/sys/objects/ftn-custom/zprefixf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscobjectsetoptionsprefix PETSCOBJECTSETOPTIONSPREFIX -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscobjectsetoptionsprefix_ petscobjectsetoptionsprefix -#endif - -PETSC_EXTERN void petscobjectsetoptionsprefix_(PetscObject *obj, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = PetscObjectSetOptionsPrefix(*obj, t); - if (*ierr) return; - FREECHAR(prefix, t); -} diff --git a/src/sys/objects/ftn-custom/zstart.c b/src/sys/objects/ftn-custom/zstart.c index 6532fd051cb..0a03c72fe4c 100644 --- a/src/sys/objects/ftn-custom/zstart.c +++ b/src/sys/objects/ftn-custom/zstart.c @@ -17,8 +17,6 @@ #define petscinitializef_ PETSCINITIALIZEF #define petscfinalize_ PETSCFINALIZE #define petscend_ PETSCEND - #define iargc_ IARGC - #define getarg_ GETARG #define mpi_init_ MPI_INIT #define petscgetcomm_ PETSCGETCOMM #define petsccommandargumentcount_ PETSCCOMMANDARGUMENTCOUNT @@ -28,43 +26,11 @@ #define petscfinalize_ petscfinalize #define petscend_ petscend #define mpi_init_ mpi_init - #define iargc_ iargc - #define getarg_ getarg #define petscgetcomm_ petscgetcomm #define petsccommandargumentcount_ petsccommandargumentcount #define petscgetcommandargument_ petscgetcommandargument #endif -#if defined(PETSC_HAVE_NAGF90) - #undef iargc_ - #undef getarg_ - #define iargc_ f90_unix_MP_iargc - #define getarg_ f90_unix_MP_getarg -#endif -#if defined(PETSC_USE_NARGS) /* Digital Fortran */ - #undef iargc_ - #undef getarg_ - #define iargc_ NARGS - #define getarg_ GETARG -#elif defined(PETSC_HAVE_PXFGETARG_NEW) /* Cray X1 */ - #undef iargc_ - #undef getarg_ - #define iargc_ ipxfargc_ - #define getarg_ pxfgetarg_ -#endif - -#if defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* Fortran 2003 */ - #undef iargc_ - #undef getarg_ - #define iargc_ petsccommandargumentcount_ - #define getarg_ petscgetcommandargument_ -#elif defined(PETSC_HAVE_BGL_IARGC) /* bgl g77 has different external & internal name mangling */ - #undef iargc_ - #undef getarg_ - #define iargc iargc_ - #define getarg getarg_ -#endif - /* The extra _ is because the f2c compiler puts an extra _ at the end if the original routine name @@ -95,28 +61,8 @@ PETSC_EXTERN void petscgetcomm_(PetscMPIInt *); /* Different Fortran compilers handle command lines in different ways */ -#if defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* Fortran 2003 - same as 'else' case */ -PETSC_EXTERN int iargc_(void); -PETSC_EXTERN void getarg_(int *, char *, PETSC_FORTRAN_CHARLEN_T); -#elif defined(PETSC_USE_NARGS) -PETSC_EXTERN short __stdcall NARGS(void); -PETSC_EXTERN void __stdcall GETARG(short *, char *, int, short *); - -#elif defined(PETSC_HAVE_PXFGETARG_NEW) -PETSC_EXTERN int iargc_(void); -PETSC_EXTERN void getarg_(int *, char *, int *, int *, int); - -#else -PETSC_EXTERN int iargc_(void); -PETSC_EXTERN void getarg_(int *, char *, int); - /* - The Cray T3D/T3E use the PXFGETARG() function -*/ - #if defined(PETSC_HAVE_PXFGETARG) -PETSC_EXTERN void PXFGETARG(int *, _fcd, int *, int *); - #endif -#endif - +PETSC_EXTERN int petsccommandargumentcount_(void); +PETSC_EXTERN void petscgetcommandargument_(int *, char *, PETSC_FORTRAN_CHARLEN_T); PETSC_EXTERN PetscErrorCode PetscMallocAlign(size_t, PetscBool, int, const char[], const char[], void **); PETSC_EXTERN PetscErrorCode PetscFreeAlign(void *, int, const char[], const char[]); PETSC_INTERN int PetscGlobalArgc; @@ -129,24 +75,13 @@ PETSC_INTERN char **PetscGlobalArgs; PetscErrorCode PETScParseFortranArgs_Private(int *argc, char ***argv) { -#if defined(PETSC_USE_NARGS) - short i, flg; -#else - int i; -#endif + int i; int warg = 256; PetscMPIInt rank; char *p; PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); - if (rank == 0) { -#if defined(PETSC_HAVE_IARG_COUNT_PROGNAME) - *argc = iargc_(); -#else - /* most compilers do not count the program name for argv[0] */ - *argc = 1 + iargc_(); -#endif - } + if (rank == 0) { *argc = 1 + petsccommandargumentcount_(); } PetscCallMPI(MPI_Bcast(argc, 1, MPI_INT, 0, PETSC_COMM_WORLD)); /* PetscTrMalloc() not yet set, so don't use PetscMalloc() */ @@ -157,20 +92,7 @@ PetscErrorCode PETScParseFortranArgs_Private(int *argc, char ***argv) PetscCall(PetscMemzero((*argv)[0], (*argc) * warg * sizeof(char))); for (i = 0; i < *argc; i++) { (*argv)[i + 1] = (*argv)[i] + warg; -#if defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* same as 'else' case */ - getarg_(&i, (*argv)[i], warg); -#elif defined(PETSC_HAVE_PXFGETARG_NEW) - { - char *tmp = (*argv)[i]; - int ilen; - PetscCallFortranVoidFunction(getarg_(&i, tmp, &ilen, &ierr, warg)); - tmp[ilen] = 0; - } -#elif defined(PETSC_USE_NARGS) - GETARG(&i, (*argv)[i], warg, &flg); -#else - getarg_(&i, (*argv)[i], warg); -#endif + petscgetcommandargument_(&i, (*argv)[i], warg); /* zero out garbage at end of each argument */ p = (*argv)[i] + warg - 1; while (p > (*argv)[i]) { @@ -218,10 +140,7 @@ PETSC_INTERN PetscErrorCode PetscInitFortran_Private(PetscBool readarguments, co */ PETSC_EXTERN void petscinitializef_(char *filename, char *help, PetscBool *readarguments, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len, PETSC_FORTRAN_CHARLEN_T helplen) { - int j, i; -#if defined(PETSC_USE_NARGS) - short flg; -#endif + int j, i; int flag; char name[256] = {0}; PetscMPIInt f_petsc_comm_world; @@ -229,22 +148,7 @@ PETSC_EXTERN void petscinitializef_(char *filename, char *help, PetscBool *reada *ierr = PETSC_SUCCESS; if (PetscInitializeCalled) return; i = 0; -#if defined(PETSC_HAVE_FORTRAN_GET_COMMAND_ARGUMENT) /* same as 'else' case */ - getarg_(&i, name, sizeof(name)); -#elif defined(PETSC_HAVE_PXFGETARG_NEW) - { - int ilen, sierr; - getarg_(&i, name, &ilen, &sierr, 256); - if (sierr) { - *ierr = PetscStrncpy(name, "Unknown Name", 256); - if (*ierr) return; - } else name[ilen] = 0; - } -#elif defined(PETSC_USE_NARGS) - GETARG(&i, name, 256, &flg); -#else - getarg_(&i, name, 256); -#endif + petscgetcommandargument_(&i, name, sizeof(name)); /* Eliminate spaces at the end of the string */ for (j = sizeof(name) - 2; j >= 0; j--) { if (name[j] != ' ') { diff --git a/src/sys/objects/ftn-custom/zstartf.c b/src/sys/objects/ftn-custom/zstartf.c index 672c9d13bb2..fe504699f28 100644 --- a/src/sys/objects/ftn-custom/zstartf.c +++ b/src/sys/objects/ftn-custom/zstartf.c @@ -19,16 +19,20 @@ PETSC_EXTERN void petscinitializefortran_(int *ierr) *ierr = PetscInitializeFortran(); } -PETSC_EXTERN void petscsetfortranbasepointers_(char *fnull_character, void *fnull_integer, void *fnull_scalar, void *fnull_double, void *fnull_real, void *fnull_truth, void (*fnull_function)(void), void *fnull_mpi_comm, PETSC_FORTRAN_CHARLEN_T len) +PETSC_EXTERN void petscsetfortranbasepointers_(char *fnull_character, void *fnull_integer, void *fnull_scalar, void *fnull_double, void *fnull_real, void *fnull_bool, void *fnull_enum, void (*fnull_function)(void), void *fnull_mpi_comm, void *fnull_integer_array, void *fnull_scalar_array, void *fnull_real_array, PETSC_FORTRAN_CHARLEN_T len) { - PETSC_NULL_CHARACTER_Fortran = fnull_character; - PETSC_NULL_INTEGER_Fortran = fnull_integer; - PETSC_NULL_SCALAR_Fortran = fnull_scalar; - PETSC_NULL_DOUBLE_Fortran = fnull_double; - PETSC_NULL_REAL_Fortran = fnull_real; - PETSC_NULL_BOOL_Fortran = fnull_truth; - PETSC_NULL_FUNCTION_Fortran = fnull_function; - PETSC_NULL_MPI_COMM_Fortran = fnull_mpi_comm; + PETSC_NULL_CHARACTER_Fortran = fnull_character; + PETSC_NULL_INTEGER_Fortran = fnull_integer; + PETSC_NULL_SCALAR_Fortran = fnull_scalar; + PETSC_NULL_DOUBLE_Fortran = fnull_double; + PETSC_NULL_REAL_Fortran = fnull_real; + PETSC_NULL_BOOL_Fortran = fnull_bool; + PETSC_NULL_ENUM_Fortran = fnull_enum; + PETSC_NULL_FUNCTION_Fortran = fnull_function; + PETSC_NULL_MPI_COMM_Fortran = fnull_mpi_comm; + PETSC_NULL_INTEGER_ARRAY_Fortran = fnull_integer_array; + PETSC_NULL_SCALAR_ARRAY_Fortran = fnull_scalar_array; + PETSC_NULL_REAL_ARRAY_Fortran = fnull_real_array; } /* diff --git a/src/sys/objects/ftn-custom/zversionf.c b/src/sys/objects/ftn-custom/zversionf.c deleted file mode 100644 index a7ea66e1c9f..00000000000 --- a/src/sys/objects/ftn-custom/zversionf.c +++ /dev/null @@ -1,34 +0,0 @@ -#include -#include - -#ifdef PETSC_HAVE_FORTRAN_CAPS - #define petscgetversion_ PETSCGETVERSION - #define petscgetversionnumber_ PETSCGETVERSIONNUMBER -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define petscgetversion_ petscgetversion - #define petscgetversionnumber_ petscgetversionnumber -#endif - -/* Definitions of Fortran Wrapper routines */ -#if defined(__cplusplus) -extern "C" { -#endif - -PETSC_EXTERN void petscgetversion_(char *version, int *ierr, PETSC_FORTRAN_CHARLEN_T len1) -{ - *ierr = PetscGetVersion(version, len1); - FIXRETURNCHAR(PETSC_TRUE, version, len1); -} - -PETSC_EXTERN void petscgetversionnumber_(PetscInt *major, PetscInt *minor, PetscInt *subminor, PetscInt *release, int *ierr) -{ - CHKFORTRANNULLINTEGER(major); - CHKFORTRANNULLINTEGER(minor); - CHKFORTRANNULLINTEGER(subminor); - CHKFORTRANNULLINTEGER(release); - *ierr = PetscGetVersionNumber(major, minor, subminor, release); -} - -#if defined(__cplusplus) -} -#endif diff --git a/src/sys/objects/gcomm.c b/src/sys/objects/gcomm.c index 6163b70713c..c12bd6a46a8 100644 --- a/src/sys/objects/gcomm.c +++ b/src/sys/objects/gcomm.c @@ -6,7 +6,7 @@ /*@C PetscObjectComm - Gets the MPI communicator for any `PetscObject` regardless of the type. - Not Collective + Not Collective, No Fortran Support Input Parameter: . obj - any PETSc object, for example a `Vec`, `Mat` or `KSP`. It must be diff --git a/src/sys/objects/gcookie.c b/src/sys/objects/gcookie.c index edfb08d696a..8c7ecb0fea6 100644 --- a/src/sys/objects/gcookie.c +++ b/src/sys/objects/gcookie.c @@ -28,7 +28,7 @@ PetscErrorCode PetscObjectGetClassId(PetscObject obj, PetscClassId *classid) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectGetClassName - Gets the class name for any `PetscObject` Not Collective diff --git a/src/sys/objects/gtype.c b/src/sys/objects/gtype.c index e88288b9e48..82382d1a10f 100644 --- a/src/sys/objects/gtype.c +++ b/src/sys/objects/gtype.c @@ -3,7 +3,7 @@ */ #include /*I "petscsys.h" I*/ -/*@C +/*@ PetscObjectGetType - Gets the object type of any `PetscObject`. Not Collective diff --git a/src/sys/objects/inherit.c b/src/sys/objects/inherit.c index d7ea9099b11..1dc57f9a5e2 100644 --- a/src/sys/objects/inherit.c +++ b/src/sys/objects/inherit.c @@ -200,7 +200,7 @@ PetscErrorCode PetscHeaderReset_Internal(PetscObject obj) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectCopyFortranFunctionPointers - Copy function pointers to another object Logically Collective @@ -246,7 +246,7 @@ PetscErrorCode PetscObjectCopyFortranFunctionPointers(PetscObject src, PetscObje /*@C PetscObjectSetFortranCallback - set Fortran callback function pointer and context - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + obj - object on which to set callback @@ -289,7 +289,7 @@ PetscErrorCode PetscObjectSetFortranCallback(PetscObject obj, PetscFortranCallba /*@C PetscObjectGetFortranCallback - get Fortran callback function pointer and context - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + obj - object on which to get callback @@ -383,7 +383,7 @@ PetscErrorCode PetscObjectsDump(FILE *fd, PetscBool all) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectsView - Prints the currently existing objects. Logically Collective @@ -409,7 +409,7 @@ PetscErrorCode PetscObjectsView(PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectsGetObject - Get a pointer to a named object Not Collective @@ -425,7 +425,7 @@ PetscErrorCode PetscObjectsView(PetscViewer viewer) .seealso: `PetscObject` @*/ -PetscErrorCode PetscObjectsGetObject(const char *name, PetscObject *obj, char **classname) +PetscErrorCode PetscObjectsGetObject(const char name[], PetscObject *obj, const char *classname[]) { PetscInt i; PetscObject h; @@ -447,6 +447,16 @@ PetscErrorCode PetscObjectsGetObject(const char *name, PetscObject *obj, char ** } PetscFunctionReturn(PETSC_SUCCESS); } +#else +PetscErrorCode PetscObjectsView(PetscViewer viewer) +{ + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode PetscObjectsGetObject(const char name[], PetscObject *obj, const char *classname[]) +{ + PetscFunctionReturn(PETSC_SUCCESS); +} #endif /*@ @@ -559,7 +569,7 @@ PetscErrorCode PetscObjectProcessOptionsHandlers(PetscObject obj, PetscOptionIte PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectDestroyOptionsHandlers - Destroys all the option handlers attached to an object Not Collective @@ -583,7 +593,7 @@ PetscErrorCode PetscObjectDestroyOptionsHandlers(PetscObject obj) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectReference - Indicates to a `PetscObject` that it is being referenced by another `PetscObject`. This increases the reference count for that object by one. @@ -609,7 +619,7 @@ PetscErrorCode PetscObjectReference(PetscObject obj) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectGetReference - Gets the current reference count for a PETSc object. Not Collective @@ -634,7 +644,7 @@ PetscErrorCode PetscObjectGetReference(PetscObject obj, PetscInt *cnt) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectDereference - Indicates to any `PetscObject` that it is being referenced by one less `PetscObject`. This decreases the reference count for that object by one. @@ -676,7 +686,7 @@ PetscErrorCode PetscObjectRemoveReference(PetscObject obj, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectCompose - Associates another PETSc object with a given PETSc object. Not Collective @@ -726,7 +736,7 @@ PetscErrorCode PetscObjectCompose(PetscObject obj, const char name[], PetscObjec PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectQuery - Gets a PETSc object associated with a given object that was composed with `PetscObjectCompose()` Not Collective @@ -836,7 +846,7 @@ struct _p_PetscContainer { PetscContainerUserDestroyDefault - Default destroy routine for user-provided data that simply calls `PetscFree()` in the data provided with `PetscContainerSetPointer()` - Logically Collective on the `PetscContainer` containing the user data + Logically Collective on the `PetscContainer` containing the user data, No Fortran Support Input Parameter: . ctx - pointer to user-provided data @@ -855,7 +865,7 @@ PetscErrorCode PetscContainerUserDestroyDefault(void *ctx) /*@C PetscContainerGetPointer - Gets the pointer value contained in the container that was provided with `PetscContainerSetPointer()` - Not Collective + Not Collective, No Fortran Support Input Parameter: . obj - the object created with `PetscContainerCreate()` @@ -880,7 +890,7 @@ PetscErrorCode PetscContainerGetPointer(PetscContainer obj, void **ptr) /*@C PetscContainerSetPointer - Sets the pointer value contained in the container. - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + obj - the object created with `PetscContainerCreate()` @@ -903,7 +913,7 @@ PetscErrorCode PetscContainerSetPointer(PetscContainer obj, void *ptr) /*@C PetscContainerDestroy - Destroys a PETSc container object. - Collective + Collective, No Fortran Support Input Parameter: . obj - an object that was created with `PetscContainerCreate()` @@ -933,7 +943,7 @@ PetscErrorCode PetscContainerDestroy(PetscContainer *obj) /*@C PetscContainerSetUserDestroy - Sets name of the user destroy function for the data provided to the `PetscContainer` with `PetscContainerSetPointer()` - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + obj - an object that was created with `PetscContainerCreate()` @@ -959,7 +969,7 @@ PetscClassId PETSC_CONTAINER_CLASSID; /*@C PetscContainerCreate - Creates a PETSc object that has room to hold a single pointer. - Collective + Collective, No Fortran Support Input Parameter: . comm - MPI communicator that shares the object @@ -1029,3 +1039,35 @@ PetscErrorCode PetscObjectSetUp(PetscObject obj) PetscValidHeader(obj, 1); PetscFunctionReturn(PETSC_SUCCESS); } + +/*MC + PetscObjectIsNull - returns true if the given PETSc object is a null object + + Fortran only + + Synopsis: + #include + PetscBool PetscObjectIsNull(PetscObject obj) + + Logically Collective + + Input Parameters: +. obj - the PETSc object + + Level: beginner + + Example Usage: +.vb + if (PetscObjectIsNull(dm)) then + if (.not. PetscObjectIsNull(dm)) then +.ve + + Note: + Code such as +.vb + if (dm == PETSC_NULL_DM) then +.ve + is not allowed. + +.seealso: `PetscObject`, `PETSC_NULL_OBJECT`, `PETSC_NULL_VEC`, `PETSC_NULL_VEC_ARRAY` +M*/ diff --git a/src/sys/objects/init.c b/src/sys/objects/init.c index 2ec62927dde..6059b5af7e1 100644 --- a/src/sys/objects/init.c +++ b/src/sys/objects/init.c @@ -209,6 +209,8 @@ PetscErrorCode (*PetscExternalHelpFunction)(MPI_Comm) = NULL; PetscSetHelpVersionFunctions - Sets functions that print help and version information before the PETSc help and version information is printed. + No Fortran Support + Input Parameters: + help - the help function (may be `NULL`) - version - the version function (may be `NULL`) diff --git a/src/sys/objects/kokkos/kinit.kokkos.cxx b/src/sys/objects/kokkos/kinit.kokkos.cxx index 5147a672111..d52a06605cc 100644 --- a/src/sys/objects/kokkos/kinit.kokkos.cxx +++ b/src/sys/objects/kokkos/kinit.kokkos.cxx @@ -2,8 +2,9 @@ #include #include #include +#include -PetscBool PetscKokkosInitialized = PETSC_FALSE; +PetscBool PetscKokkosInitialized = PETSC_FALSE; // Has Kokkos been initialized (either by petsc or by users)? PetscScalar *PetscScalarPool = nullptr; PetscInt PetscScalarPoolSize = 0; @@ -13,6 +14,7 @@ PetscErrorCode PetscKokkosFinalize_Private(void) { PetscFunctionBegin; PetscCallCXX(delete PetscKokkosExecutionSpacePtr); + PetscKokkosExecutionSpacePtr = nullptr; PetscCallCXX(Kokkos::kokkos_free(PetscScalarPool)); PetscScalarPoolSize = 0; if (PetscBeganKokkos) { @@ -40,7 +42,7 @@ PetscErrorCode PetscKokkosInitializeCheck(void) auto args = Kokkos::InitArguments{}; /* use default constructor */ #endif -#if (defined(KOKKOS_ENABLE_CUDA) && PetscDefined(HAVE_CUDA)) || (defined(KOKKOS_ENABLE_HIP) && PetscDefined(HAVE_HIP)) || (defined(KOKKOS_ENABLE_SYCL) && PetscDefined(HAVE_SYCL)) +#if (defined(KOKKOS_ENABLE_CUDA) && defined(PETSC_HAVE_CUDA)) || (defined(KOKKOS_ENABLE_HIP) && defined(PETSC_HAVE_HIP)) || (defined(KOKKOS_ENABLE_SYCL) && defined(PETSC_HAVE_SYCL)) /* Kokkos does not support CUDA and HIP at the same time (but we do :)) */ PetscDevice device; PetscInt deviceId; @@ -76,17 +78,26 @@ PetscErrorCode PetscKokkosInitializeCheck(void) PetscCallCXX(Kokkos::initialize(args)); PetscBeganKokkos = PETSC_TRUE; } + if (!PetscKokkosExecutionSpacePtr) { // No matter Kokkos is init'ed by petsc or by user, we need to init PetscKokkosExecutionSpacePtr -#if defined(PETSC_HAVE_CUDA) - extern cudaStream_t PetscDefaultCudaStream; - PetscCallCXX(PetscKokkosExecutionSpacePtr = new Kokkos::DefaultExecutionSpace(PetscDefaultCudaStream)); -#elif defined(PETS_HAVE_HIP) - extern hipStream_t PetscDefaultHipStream; - PetscCallCXX(PetscKokkosExecutionSpacePtr = new Kokkos::DefaultExecutionSpace(PetscDefaultHipStream)); +#if (defined(KOKKOS_ENABLE_CUDA) && defined(PETSC_HAVE_CUDA)) || (defined(KOKKOS_ENABLE_HIP) && defined(PETSC_HAVE_HIP)) + PetscDeviceContext dctx; + PetscDeviceType dtype; + + PetscDeviceContextGetCurrentContext(&dctx); // it internally sets PetscDefaultCuda/HipStream + PetscDeviceContextGetDeviceType(dctx, &dtype); + + #if defined(PETSC_HAVE_CUDA) + if (dtype == PETSC_DEVICE_CUDA) PetscCallCXX(PetscKokkosExecutionSpacePtr = new Kokkos::DefaultExecutionSpace(PetscDefaultCudaStream)); + #elif defined(PETSC_HAVE_HIP) + if (dtype == PETSC_DEVICE_HIP) PetscCallCXX(PetscKokkosExecutionSpacePtr = new Kokkos::DefaultExecutionSpace(PetscDefaultHipStream)); + #endif #else + // In all other cases, we use Kokkos default PetscCallCXX(PetscKokkosExecutionSpacePtr = new Kokkos::DefaultExecutionSpace()); #endif } + if (!PetscScalarPoolSize) { // A pool for a small count of PetscScalars PetscScalarPoolSize = 1024; PetscCallCXX(PetscScalarPool = static_cast(Kokkos::kokkos_malloc(sizeof(PetscScalar) * PetscScalarPoolSize))); diff --git a/src/sys/objects/olist.c b/src/sys/objects/olist.c index b23e37a28ec..71e60cb2f96 100644 --- a/src/sys/objects/olist.c +++ b/src/sys/objects/olist.c @@ -7,6 +7,8 @@ /*@C PetscObjectListRemoveReference - Calls `PetscObjectDereference()` on an object in the list immediately but keeps a pointer to the object in the list. + No Fortran Support + Input Parameters: + fl - the object list - name - the name to use for the object @@ -48,6 +50,8 @@ PetscErrorCode PetscObjectListRemoveReference(PetscObjectList *fl, const char na /*@C PetscObjectListAdd - Adds a new object to an `PetscObjectList` + No Fortran Support + Input Parameters: + fl - the object list . name - the name to use for the object @@ -121,6 +125,8 @@ PetscErrorCode PetscObjectListAdd(PetscObjectList *fl, const char name[], PetscO /*@C PetscObjectListDestroy - Destroy a list of objects + No Fortran Support + Input Parameter: . ifl - pointer to list @@ -149,6 +155,8 @@ PetscErrorCode PetscObjectListDestroy(PetscObjectList *ifl) /*@C PetscObjectListFind - given a name, find the matching object in a list + No Fortran Support + Input Parameters: + fl - pointer to list - name - name string @@ -185,6 +193,8 @@ PetscErrorCode PetscObjectListFind(PetscObjectList fl, const char name[], PetscO /*@C PetscObjectListReverseFind - given a object, find the matching name if it exists + No Fortran Support + Input Parameters: + fl - pointer to list - obj - the PETSc object @@ -202,7 +212,7 @@ PetscErrorCode PetscObjectListFind(PetscObjectList fl, const char name[], PetscO .seealso: `PetscObjectListDestroy()`,`PetscObjectListAdd()`,`PetscObjectListDuplicate()`,`PetscObjectListFind()`, `PetscObjectList` @*/ -PetscErrorCode PetscObjectListReverseFind(PetscObjectList fl, PetscObject obj, char **name, PetscBool *skipdereference) +PetscErrorCode PetscObjectListReverseFind(PetscObjectList fl, PetscObject obj, char *name[], PetscBool *skipdereference) { PetscFunctionBegin; PetscAssertPointer(name, 3); @@ -222,6 +232,8 @@ PetscErrorCode PetscObjectListReverseFind(PetscObjectList fl, PetscObject obj, c /*@C PetscObjectListDuplicate - Creates a new list from a given object list. + No Fortran Support + Input Parameter: . fl - pointer to list diff --git a/src/sys/objects/options.c b/src/sys/objects/options.c index ea8915ca9c3..31e1c230ba4 100644 --- a/src/sys/objects/options.c +++ b/src/sys/objects/options.c @@ -266,7 +266,7 @@ PetscErrorCode PetscOptionsDestroyDefault(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsValidKey - PETSc Options database keys must begin with one or two dashes (-) followed by a letter. Not Collective @@ -283,7 +283,8 @@ PetscErrorCode PetscOptionsDestroyDefault(void) @*/ PetscErrorCode PetscOptionsValidKey(const char key[], PetscBool *valid) { - char *ptr; + char *ptr; + PETSC_UNUSED double d; PetscFunctionBegin; if (key) PetscAssertPointer(key, 1); @@ -293,7 +294,7 @@ PetscErrorCode PetscOptionsValidKey(const char key[], PetscBool *valid) if (key[0] != '-') PetscFunctionReturn(PETSC_SUCCESS); if (key[1] == '-') key++; if (!isalpha((int)key[1])) PetscFunctionReturn(PETSC_SUCCESS); - (void)strtod(key, &ptr); + d = strtod(key, &ptr); if (ptr != key && !(*ptr == '_' || isalnum((int)*ptr))) PetscFunctionReturn(PETSC_SUCCESS); *valid = PETSC_TRUE; PetscFunctionReturn(PETSC_SUCCESS); @@ -353,7 +354,7 @@ static PetscErrorCode PetscOptionsInsertString_Private(PetscOptions options, con PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsInsertString - Inserts options into the database from a string Logically Collective @@ -594,7 +595,7 @@ static PetscErrorCode PetscOptionsInsertFilePetsc(MPI_Comm comm, PetscOptions op PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsInsertFile - Inserts options into the database from a file. Collective @@ -922,11 +923,11 @@ static PetscBool PetscCIOption(const char *name) return found; } -/*@C +/*@ PetscOptionsView - Prints the options that have been loaded. This is useful for debugging purposes. - Logically Collective + Logically Collective, No Fortran Support Input Parameters: + options - options database, use `NULL` for default global database @@ -1031,7 +1032,7 @@ PETSC_EXTERN PetscErrorCode PetscOptionsViewError(void) return PETSC_SUCCESS; } -/*@C +/*@ PetscOptionsPrefixPush - Designate a prefix to be used by all options insertions to follow. Logically Collective @@ -1084,7 +1085,7 @@ PetscErrorCode PetscOptionsPrefixPush(PetscOptions options, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsPrefixPop - Remove the latest options prefix, see `PetscOptionsPrefixPush()` for details Logically Collective on the `MPI_Comm` used when called `PetscOptionsPrefixPush()` @@ -1109,7 +1110,7 @@ PetscErrorCode PetscOptionsPrefixPop(PetscOptions options) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsClear - Removes all options form the database leaving it empty. Logically Collective @@ -1171,7 +1172,7 @@ PetscErrorCode PetscOptionsClear(PetscOptions options) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsSetAlias - Makes a key and alias for another key Logically Collective @@ -1237,7 +1238,7 @@ PetscErrorCode PetscOptionsSetAlias(PetscOptions options, const char newname[], PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsSetValue - Sets an option name-value pair in the options database, overriding whatever is already present. @@ -1394,7 +1395,7 @@ PetscErrorCode PetscOptionsSetValue_Private(PetscOptions options, const char nam PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsClearValue - Clears an option name-value pair in the options database, overriding whatever is already present. @@ -1676,7 +1677,7 @@ PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions opti PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsReject - Generates an error if a certain option is given. Not Collective @@ -1709,7 +1710,7 @@ PetscErrorCode PetscOptionsReject(PetscOptions options, const char pre[], const PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsHasHelp - Determines whether the "-help" option is in the database. Not Collective @@ -1742,7 +1743,7 @@ PetscErrorCode PetscOptionsHasHelpIntro_Internal(PetscOptions options, PetscBool PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsHasName - Determines whether a certain option is given in the database. This returns true whether the option is a number, string or Boolean, even if its value is set to false. @@ -1832,7 +1833,7 @@ PetscErrorCode PetscOptionsGetAll(PetscOptions options, char *copts[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsUsed - Indicates if PETSc has used a particular option set in the database Not Collective diff --git a/src/sys/objects/optionsyaml.c b/src/sys/objects/optionsyaml.c index 7556dc14920..0ca32787485 100644 --- a/src/sys/objects/optionsyaml.c +++ b/src/sys/objects/optionsyaml.c @@ -174,7 +174,8 @@ PetscErrorCode PetscOptionsInsertStringYAML_Private(PetscOptions options, const yaml_parser_delete(&parser); PetscFunctionReturn(PETSC_SUCCESS); } -/*@C + +/*@ PetscOptionsInsertStringYAML - Inserts YAML-formatted options into the options database from a string Logically Collective @@ -199,7 +200,7 @@ PetscErrorCode PetscOptionsInsertStringYAML(PetscOptions options, const char in_ PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscOptionsInsertFileYAML - Insert a YAML-formatted file in the options database Collective diff --git a/src/sys/objects/package.c b/src/sys/objects/package.c index 44790b91687..6ad8af2c2b0 100644 --- a/src/sys/objects/package.c +++ b/src/sys/objects/package.c @@ -1,6 +1,6 @@ #include /*I "petscsys.h" I*/ -/*@C +/*@ PetscHasExternalPackage - Determine whether PETSc has been configured with the given package Not Collective diff --git a/src/sys/objects/pgname.c b/src/sys/objects/pgname.c index 53bfba2ae74..7466ee151f6 100644 --- a/src/sys/objects/pgname.c +++ b/src/sys/objects/pgname.c @@ -1,6 +1,6 @@ #include /*I "petscsys.h" I*/ -/*@C +/*@ PetscObjectGetName - Gets a string name associated with a PETSc object. Not Collective unless `obj` has not yet been named @@ -8,7 +8,7 @@ Input Parameters: + obj - the PETSc variable. It must be cast with a (`PetscObject`), for example, `PetscObjectGetName`((`PetscObject`)mat,&name); -- name - the name associated with `obj` +- name - the name associated with `obj`, do not free Level: intermediate diff --git a/src/sys/objects/pinit.c b/src/sys/objects/pinit.c index 0a5e5c147d9..eddda59e29f 100644 --- a/src/sys/objects/pinit.c +++ b/src/sys/objects/pinit.c @@ -84,7 +84,7 @@ extern PetscInt PetscNumBLASThreads; /*@C PetscInitializeNoPointers - Calls PetscInitialize() from C/C++ without the pointers to argc and args - Collective + Collective, No Fortran Support Input Parameters: + argc - number of args @@ -96,7 +96,7 @@ extern PetscInt PetscNumBLASThreads; Notes: this is called only by the PETSc Julia interface. Even though it might start MPI it sets the flag to - indicate that it did NOT start MPI so that the PetscFinalize() does not end MPI, thus allowing PetscInitialize() to + indicate that it did NOT start MPI so that the `PetscFinalize()` does not end MPI, thus allowing `PetscInitialize()` to be called multiple times from Julia without the problem of trying to initialize MPI more than once. Developer Notes: @@ -448,10 +448,10 @@ PetscErrorCode PetscCitationsInitialize(void) and Jose~E. Roman and Karl Rupp and Patrick Sanan and Jason Sarich and Barry~F. Smith\n\ and Stefano Zampini and Hong Zhang and Hong Zhang and Junchao Zhang},\n\ Title = {{PETSc/TAO} Users Manual},\n\ - Number = {ANL-21/39 - Revision 3.20},\n\ + Number = {ANL-21/39 - Revision 3.21},\n\ Doi = {10.2172/2205494},\n\ Institution = {Argonne National Laboratory},\n\ - Year = {2023}\n}\n", + Year = {2024}\n}\n", NULL)); PetscCall(PetscCitationsRegister("@InProceedings{petsc-efficient,\n\ @@ -501,7 +501,7 @@ PetscErrorCode PetscGetProgramName(char name[], size_t len) PetscGetArgs - Allows you to access the raw command line arguments anywhere after PetscInitialize() is called but before `PetscFinalize()`. - Not Collective + Not Collective, No Fortran Support Output Parameters: + argc - count of number of command line arguments @@ -530,7 +530,7 @@ PetscErrorCode PetscGetArgs(int *argc, char ***args) PetscGetArguments - Allows you to access the command line arguments anywhere after `PetscInitialize()` is called but before `PetscFinalize()`. - Not Collective + Not Collective, No Fortran Support Output Parameter: . args - the command line arguments @@ -561,7 +561,7 @@ PetscErrorCode PetscGetArguments(char ***args) /*@C PetscFreeArguments - Frees the memory obtained with `PetscGetArguments()` - Not Collective + Not Collective, No Fortran Support Output Parameter: . args - the command line arguments @@ -814,7 +814,7 @@ PETSC_INTERN PetscErrorCode PetscInitialize_Common(const char *prog, const char } #endif /* check for Open MPI version, it is not part of the MPI ABI initiative (is it part of another initiative that needs to be handled?) */ - #elif defined(OMPI_MAJOR_VERSION) + #elif defined(PETSC_HAVE_OPENMPI) { char *ver, bs[MPI_MAX_LIBRARY_VERSION_STRING], *bsf; PetscBool flg = PETSC_FALSE; @@ -825,14 +825,14 @@ PETSC_INTERN PetscErrorCode PetscInitialize_Common(const char *prog, const char for (i = 0; i < PSTRSZ; i++) { PetscCall(PetscStrstr(mpilibraryversion, ompistr1[i], &ver)); if (ver) { - PetscCall(PetscSNPrintf(bs, MPI_MAX_LIBRARY_VERSION_STRING, "%s%d.%d", ompistr2[i], OMPI_MAJOR_VERSION, OMPI_MINOR_VERSION)); + PetscCall(PetscSNPrintf(bs, MPI_MAX_LIBRARY_VERSION_STRING, "%s%d.%d", ompistr2[i], PETSC_PKG_OPENMPI_VERSION_MAJOR, PETSC_PKG_OPENMPI_VERSION_MINOR)); PetscCall(PetscStrstr(ver, bs, &bsf)); if (bsf) flg = PETSC_TRUE; break; } } if (!flg) { - PetscCall(PetscInfo(NULL, "PETSc warning --- Open MPI library version \n%s does not match what PETSc was compiled with %d.%d.\n", mpilibraryversion, OMPI_MAJOR_VERSION, OMPI_MINOR_VERSION)); + PetscCall(PetscInfo(NULL, "PETSc warning --- Open MPI library version \n%s does not match what PETSc was compiled with %d.%d.\n", mpilibraryversion, PETSC_PKG_OPENMPI_VERSION_MAJOR, PETSC_PKG_OPENMPI_VERSION_MINOR)); flg = PETSC_TRUE; } } @@ -1454,7 +1454,7 @@ PetscErrorCode PetscFinalize(void) PetscCall(PetscOptionsGetBool(NULL, NULL, "-x_virtual", &flg1, NULL)); if (flg1) { /* this is a crude hack, but better than nothing */ - PetscCall(PetscPOpen(PETSC_COMM_WORLD, NULL, "pkill -9 Xvfb", "r", NULL)); + PetscCall(PetscPOpen(PETSC_COMM_WORLD, NULL, "pkill -15 Xvfb", "r", NULL)); } #endif @@ -1479,6 +1479,14 @@ PetscErrorCode PetscFinalize(void) if (flg1) PetscCall(PetscLogMPEDump(mname[0] ? mname : NULL)); } +#if defined(PETSC_HAVE_KOKKOS) + // Free petsc/kokkos stuff before the potentially non-null petsc default gpu stream is destroyed by PetscObjectRegisterDestroyAll + if (PetscKokkosInitialized) { + PetscCall(PetscKokkosFinalize_Private()); + PetscKokkosInitialized = PETSC_FALSE; + } +#endif + // Free all objects registered with PetscObjectRegisterDestroy() such as PETSC_VIEWER_XXX_(). PetscCall(PetscObjectRegisterDestroyAll()); @@ -1511,13 +1519,7 @@ PetscErrorCode PetscFinalize(void) PetscCall(PetscOptionsHasName(NULL, NULL, "-objects_dump", &flg1)); PetscCall(PetscOptionsGetBool(NULL, NULL, "-options_view", &flg2, NULL)); - if (flg2) { - PetscViewer viewer; - PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &viewer)); - PetscCall(PetscViewerSetType(viewer, PETSCVIEWERASCII)); - PetscCall(PetscOptionsView(NULL, viewer)); - PetscCall(PetscViewerDestroy(&viewer)); - } + if (flg2) { PetscCall(PetscOptionsView(NULL, PETSC_VIEWER_STDOUT_WORLD)); } /* to prevent PETSc -options_left from warning */ PetscCall(PetscOptionsHasName(NULL, NULL, "-nox", &flg1)); @@ -1528,11 +1530,7 @@ PetscErrorCode PetscFinalize(void) if (!flg1) flg3 = PETSC_TRUE; if (flg3) { if (!flg2 && flg1) { /* have not yet printed the options */ - PetscViewer viewer; - PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &viewer)); - PetscCall(PetscViewerSetType(viewer, PETSCVIEWERASCII)); - PetscCall(PetscOptionsView(NULL, viewer)); - PetscCall(PetscViewerDestroy(&viewer)); + PetscCall(PetscOptionsView(NULL, PETSC_VIEWER_STDOUT_WORLD)); } PetscCall(PetscOptionsAllUsed(NULL, &nopt)); if (nopt) { @@ -1658,13 +1656,6 @@ PetscErrorCode PetscFinalize(void) PetscGlobalArgc = 0; PetscGlobalArgs = NULL; -#if defined(PETSC_HAVE_KOKKOS) - if (PetscKokkosInitialized) { - PetscCall(PetscKokkosFinalize_Private()); - PetscKokkosInitialized = PETSC_FALSE; - } -#endif - #if defined(PETSC_HAVE_NVSHMEM) if (PetscBeganNvshmem) { PetscCall(PetscNvshmemFinalize()); diff --git a/src/sys/objects/pname.c b/src/sys/objects/pname.c index aec6ee7d1b6..e7d44d4fe1b 100644 --- a/src/sys/objects/pname.c +++ b/src/sys/objects/pname.c @@ -1,7 +1,7 @@ #include /*I "petscsys.h" I*/ #include -/*@C +/*@ PetscObjectSetName - Sets a string name for a PETSc object. Not Collective @@ -27,7 +27,7 @@ PetscErrorCode PetscObjectSetName(PetscObject obj, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectPrintClassNamePrefixType - used in the `XXXView()` methods to display information about the class, name, prefix and type of an object Input Parameters: @@ -76,7 +76,7 @@ PetscErrorCode PetscObjectPrintClassNamePrefixType(PetscObject obj, PetscViewer PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectName - Gives `obj` a name if it does not have one Collective diff --git a/src/sys/objects/prefix.c b/src/sys/objects/prefix.c index 02ddfe0fa4f..bc814dfc580 100644 --- a/src/sys/objects/prefix.c +++ b/src/sys/objects/prefix.c @@ -3,7 +3,7 @@ */ #include /*I "petscsys.h" I*/ -/*@C +/*@ PetscObjectGetOptions - Gets the options database used by the object that has been set with `PetscObjectSetOptions()` Collective @@ -33,7 +33,7 @@ PetscErrorCode PetscObjectGetOptions(PetscObject obj, PetscOptions *options) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectSetOptions - Sets the options database used by the object. Call immediately after creating the object. Collective @@ -61,7 +61,7 @@ PetscErrorCode PetscObjectSetOptions(PetscObject obj, PetscOptions options) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectSetOptionsPrefix - Sets the prefix used for searching for all options for the given object in the database. @@ -96,7 +96,7 @@ PetscErrorCode PetscObjectSetOptionsPrefix(PetscObject obj, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectAppendOptionsPrefix - Appends to the prefix used for searching for options for the given object in the database. Input Parameters: @@ -134,7 +134,7 @@ PetscErrorCode PetscObjectAppendOptionsPrefix(PetscObject obj, const char prefix PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectGetOptionsPrefix - Gets the prefix of the `PetscObject` used for searching in the options database Input Parameter: @@ -157,7 +157,7 @@ PetscErrorCode PetscObjectGetOptionsPrefix(PetscObject obj, const char *prefix[] PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectPrependOptionsPrefix - Sets the prefix used for searching for options of for this object in the database. Input Parameters: diff --git a/src/sys/objects/ptype.c b/src/sys/objects/ptype.c index 2899400c2e6..43dccb1070e 100644 --- a/src/sys/objects/ptype.c +++ b/src/sys/objects/ptype.c @@ -166,7 +166,7 @@ PetscErrorCode PetscDataTypeGetSize(PetscDataType ptype, size_t *size) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscDataTypeFromString - Gets the enum value of a PETSc datatype represented as a string Not Collective @@ -182,7 +182,7 @@ PetscErrorCode PetscDataTypeGetSize(PetscDataType ptype, size_t *size) .seealso: `PetscDataType`, `PetscDataTypeToMPIDataType()`, `PetscDataTypeGetSize()` @*/ -PetscErrorCode PetscDataTypeFromString(const char *name, PetscDataType *ptype, PetscBool *found) +PetscErrorCode PetscDataTypeFromString(const char name[], PetscDataType *ptype, PetscBool *found) { PetscFunctionBegin; PetscCall(PetscEnumFind(PetscDataTypes, name, (PetscEnum *)ptype, found)); diff --git a/src/sys/objects/state.c b/src/sys/objects/state.c index 29cc7fff2d7..6b8e5e20048 100644 --- a/src/sys/objects/state.c +++ b/src/sys/objects/state.c @@ -3,7 +3,7 @@ */ #include /*I "petscsys.h" I*/ -/*@C +/*@ PetscObjectStateGet - Gets the state of any `PetscObject`, regardless of the type. @@ -36,7 +36,7 @@ PetscErrorCode PetscObjectStateGet(PetscObject obj, PetscObjectState *state) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscObjectStateSet - Sets the state of any `PetscObject`, regardless of the type. diff --git a/src/sys/objects/subcomm.c b/src/sys/objects/subcomm.c index a1073fae9fb..6566c526c00 100644 --- a/src/sys/objects/subcomm.c +++ b/src/sys/objects/subcomm.c @@ -56,7 +56,7 @@ PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSubcommSetOptionsPrefix - Sets the prefix used for searching for options in the options database for this object Logically Collective @@ -82,7 +82,7 @@ PetscErrorCode PetscSubcommSetOptionsPrefix(PetscSubcomm psubcomm, const char pr PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSubcommView - Views a `PetscSubcomm` Collective diff --git a/src/sys/objects/tagm.c b/src/sys/objects/tagm.c index 198fc8eb331..1724499c4dd 100644 --- a/src/sys/objects/tagm.c +++ b/src/sys/objects/tagm.c @@ -10,7 +10,7 @@ */ -/*@C +/*@ PetscObjectGetNewTag - Gets a unique new tag from a PETSc object. All processors that share the object MUST call this routine EXACTLY the same number of times. This tag should only be used with the current objects diff --git a/src/sys/objects/version.c b/src/sys/objects/version.c index 4974cf487f3..7c374962d57 100644 --- a/src/sys/objects/version.c +++ b/src/sys/objects/version.c @@ -29,7 +29,7 @@ PetscErrorCode PetscGetVersion(char version[], size_t len) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscGetVersionNumber - Gets the PETSc version information from the library Not Collective diff --git a/src/sys/python/ftn-custom/makefile b/src/sys/python/ftn-custom/makefile deleted file mode 100644 index 9964de29350..00000000000 --- a/src/sys/python/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/sys/python/ftn-custom/zpythonf.c b/src/sys/python/ftn-custom/zpythonf.c deleted file mode 100644 index 1506e1089a9..00000000000 --- a/src/sys/python/ftn-custom/zpythonf.c +++ /dev/null @@ -1,25 +0,0 @@ -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscpythoninitialize_ PETSCPYTHONINITIALIZE - #define petscpythonfinalize_ PETSCPYTHONFINALIZE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscpythoninitialize_ petscpythoninitialize - #define petscpythonfinalize_ petscpythonfinalize -#endif - -PETSC_EXTERN void petscpythoninitialize_(char *n1, char *n2, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T l1, PETSC_FORTRAN_CHARLEN_T l2) -{ - char *t1, *t2; - FIXCHAR(n1, l1, t1); - FIXCHAR(n2, l2, t2); - *ierr = PetscPythonInitialize(t1, t2); - if (*ierr) return; - FREECHAR(n1, t1); - FREECHAR(n2, t2); -} - -PETSC_EXTERN void petscpythonfinalize_(PetscErrorCode *ierr) -{ - *ierr = PetscPythonFinalize(); -} diff --git a/src/sys/python/pythonsys.c b/src/sys/python/pythonsys.c index 101ebde4965..b72a216a440 100644 --- a/src/sys/python/pythonsys.c +++ b/src/sys/python/pythonsys.c @@ -69,7 +69,7 @@ static PetscErrorCode PetscPythonFindLibrary(const char pythonexe[], char python PetscCall(PetscPythonFindLibraryName(pythonexe, cmdlines[i], pythonlib, pl, &found)); if (found) break; } - PetscCall(PetscInfo(NULL, "Python library %s found %d\n", pythonlib, found)); + PetscCall(PetscInfo(NULL, "Python library %s found %d\n", pythonlib, found)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -142,7 +142,7 @@ static char PetscPythonExe[PETSC_MAX_PATH_LEN] = {0}; static char PetscPythonLib[PETSC_MAX_PATH_LEN] = {0}; static PetscBool PetscBeganPython = PETSC_FALSE; -/*@C +/*@ PetscPythonFinalize - Finalize PETSc for use with Python. Level: intermediate @@ -159,7 +159,7 @@ PetscErrorCode PetscPythonFinalize(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscPythonInitialize - Initialize Python for use with PETSc and import petsc4py. Input Parameters: @@ -199,11 +199,11 @@ PetscErrorCode PetscPythonInitialize(const char pyexe[], const char pylib[]) char path[PETSC_MAX_PATH_LEN] = {0}; /* initialize Python. Py_InitializeEx() prints an error and EXITS the program if it is not successful! */ - PetscCall(PetscInfo(NULL, "Calling Py_InitializeEx(0);\n")); - Py_InitializeEx(0); /* 0: do not install signal handlers */ - PetscCall(PetscInfo(NULL, "Py_InitializeEx(0) called successfully;\n")); + PetscCall(PetscInfo(NULL, "Calling Py_InitializeEx(0)\n")); + PetscStackCallExternalVoid("Py_InitializeEx", Py_InitializeEx(0)); /* 0: do not install signal handlers */ + PetscCall(PetscInfo(NULL, "Py_InitializeEx(0) called successfully\n")); - /* build 'sys.argv' list */ + /* build 'sys.argv' list */ py_version = Py_GetVersion(); if (py_version[0] == '2') { int argc = 0; @@ -235,15 +235,18 @@ PetscErrorCode PetscPythonInitialize(const char pyexe[], const char pylib[]) registered = PETSC_TRUE; } PetscBeganPython = PETSC_TRUE; - PetscCall(PetscInfo(NULL, "Python initialize completed.\n")); + PetscCall(PetscInfo(NULL, "Python initialize completed\n")); } /* import 'petsc4py.PETSc' module */ - module = PyImport_ImportModule("petsc4py.PETSc"); + PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF)); + PetscStackCallExternalVoid("PyImport_ImportModule", module = PyImport_ImportModule("petsc4py.PETSc")); + PetscCall(PetscFPTrapPop()); if (module) { - PetscCall(PetscInfo(NULL, "Python: successfully imported module 'petsc4py.PETSc'\n")); + PetscCall(PetscInfo(NULL, "Python: successfully imported module 'petsc4py.PETSc'\n")); Py_DecRef(module); module = NULL; } else { + PetscCall(PetscInfo(NULL, "Python: error when importing module 'petsc4py.PETSc'\n")); PetscCall(PetscPythonPrintError()); SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Python: could not import module 'petsc4py.PETSc', perhaps your PYTHONPATH does not contain it"); } @@ -274,7 +277,7 @@ PetscErrorCode PetscPythonPrintError(void) PETSC_EXTERN PetscErrorCode (*PetscPythonMonitorSet_C)(PetscObject, const char[]); PetscErrorCode (*PetscPythonMonitorSet_C)(PetscObject, const char[]) = NULL; -/*@C +/*@ PetscPythonMonitorSet - Set a Python monitor for a `PetscObject` Level: developer diff --git a/src/sys/tests/ex13f.F90 b/src/sys/tests/ex13f.F90 index ec55309fb6d..7e3f7298129 100644 --- a/src/sys/tests/ex13f.F90 +++ b/src/sys/tests/ex13f.F90 @@ -28,7 +28,10 @@ program main ! !/*TEST ! +! build: +! requires: defined(PETSC_HAVE_FORTRAN_TYPE_STAR) +! ! test: -! suffix: 0 +! suffix: 0 ! !TEST*/ diff --git a/src/sys/tests/ex1f.F90 b/src/sys/tests/ex1f.F90 index 3599c61950d..826524b68b1 100644 --- a/src/sys/tests/ex1f.F90 +++ b/src/sys/tests/ex1f.F90 @@ -9,7 +9,6 @@ subroutine GenerateErr(line,ierr) integer line call PetscError(PETSC_COMM_SELF,1,PETSC_ERROR_INITIAL,'My error message') - return end subroutine MyErrHandler(comm,line,fun,file,n,p,mess,ctx,ierr) @@ -22,7 +21,6 @@ subroutine MyErrHandler(comm,line,fun,file,n,p,mess,ctx,ierr) write(6,*) 'My error handler ',mess call flush(6) - return end program main @@ -48,6 +46,6 @@ program main ! ! test: ! args: -error_output_stdout -! filter:Error: strings | grep -E "(My error handler|Operating system error: Cannot allocate memory)" | wc -l +! filter:Error: grep -E "(My error handler|Operating system error: Cannot allocate memory)" | wc -l ! !TEST*/ diff --git a/src/sys/tests/ex36f.F90 b/src/sys/tests/ex36f.F90 index fa6daa08da2..75a55c3f5c4 100644 --- a/src/sys/tests/ex36f.F90 +++ b/src/sys/tests/ex36f.F90 @@ -37,7 +37,10 @@ program main ! !/*TEST ! +! build: +! requires: defined(PETSC_HAVE_FORTRAN_TYPE_STAR) +! ! test: -! requires: !complex +! requires: !complex ! !TEST*/ diff --git a/src/sys/tests/ex49f.F90 b/src/sys/tests/ex49f.F90 index 99216d074b1..c6cf8aa6083 100644 --- a/src/sys/tests/ex49f.F90 +++ b/src/sys/tests/ex49f.F90 @@ -23,7 +23,6 @@ subroutine CompareIntegers(a,b,ctx,res) else res = 1 end if - return end subroutine CompareIntegers end module ex49fmodule diff --git a/src/sys/tests/linter/output/testValidFunctionDocStrings.out b/src/sys/tests/linter/output/testValidFunctionDocStrings.out index 786c237fead..4d0423a6ef8 100644 --- a/src/sys/tests/linter/output/testValidFunctionDocStrings.out +++ b/src/sys/tests/linter/output/testValidFunctionDocStrings.out @@ -309,12 +309,6 @@ 124: testCustomFortranInterfaceDocString - Lorem ipsum dolor sit amet, consectetur adipiscing elit 125: -./src/sys/tests/linter/testValidFunctionDocStrings.c:134:52 Note: due to char pointer 'string' of type 'char *******' - 133: @*/ -> 134: PetscErrorCode testCustomFortranInterfaceDocString(char *******string, PetscErrorCode (*function_ptr)(PetscInt)) - ^^^^^^^^^^^^^^^^^^ - 135: { - ./src/sys/tests/linter/testValidFunctionDocStrings.c:134:72 Note: due to function pointer 'function_ptr' of type 'PetscErrorCode (*)(PetscInt)' (a.k.a. 'int (*)(int)') 133: @*/ > 134: PetscErrorCode testCustomFortranInterfaceDocString(char *******string, PetscErrorCode (*function_ptr)(PetscInt)) diff --git a/src/sys/time/ftn-custom/makefile b/src/sys/time/ftn-custom/makefile deleted file mode 100644 index 9964de29350..00000000000 --- a/src/sys/time/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/sys/time/ftn-custom/zptimef.c b/src/sys/time/ftn-custom/zptimef.c deleted file mode 100644 index 655b883d18b..00000000000 --- a/src/sys/time/ftn-custom/zptimef.c +++ /dev/null @@ -1,13 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petsctime_ PETSCTIME -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE) - #define petsctime_ petsctime -#endif - -PETSC_EXTERN void petsctime_(PetscLogDouble *t, int *__ierr) -{ - *__ierr = PetscTime(t); -} diff --git a/src/sys/tutorials/ex17f.F90 b/src/sys/tutorials/ex17f.F90 index 7cb06555310..c2af983c22b 100644 --- a/src/sys/tutorials/ex17f.F90 +++ b/src/sys/tutorials/ex17f.F90 @@ -6,13 +6,13 @@ program main implicit none PetscErrorCode :: ierr - PetscInt :: major,minor,subminor + PetscInt :: major,minor,subminor,release character(len=PETSC_MAX_PATH_LEN) :: outputString ! Every PETSc routine should begin with the PetscInitialize() routine. PetscCallA(PetscInitialize(ierr)) - PetscCallA(PetscGetVersionNumber(major,minor,subminor,PETSC_NULL_INTEGER,ierr)) + PetscCallA(PetscGetVersionNumber(major,minor,subminor,release,ierr)) if (major /= PETSC_VERSION_MAJOR) then write(outputString,*)'Library major',major,'does not equal include',PETSC_VERSION_MAJOR diff --git a/src/sys/utils/ftn-custom/zarchf.c b/src/sys/utils/ftn-custom/zarchf.c index 558a1f9a528..9c0aefbaa3a 100644 --- a/src/sys/utils/ftn-custom/zarchf.c +++ b/src/sys/utils/ftn-custom/zarchf.c @@ -2,10 +2,8 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) #define petscgetarchtype_ PETSCGETARCHTYPE - #define petscbarrier_ PETSCBARRIER #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define petscgetarchtype_ petscgetarchtype - #define petscbarrier_ petscbarrier #endif PETSC_EXTERN void petscgetarchtype_(char *str, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) @@ -17,8 +15,3 @@ PETSC_EXTERN void petscgetarchtype_(char *str, PetscErrorCode *ierr, PETSC_FORTR *ierr = PetscGetArchType(tstr, tlen); FIXRETURNCHAR(PETSC_TRUE, str, len); } - -PETSC_EXTERN void petscbarrier_(PetscObject *obj, int *ierr) -{ - *ierr = PetscBarrier(*obj); -} diff --git a/src/sys/utils/ftn-kernels/fcopy.F90 b/src/sys/utils/ftn-kernels/fcopy.F90 index 2ec9b15eb01..ee192b2f8eb 100644 --- a/src/sys/utils/ftn-kernels/fcopy.F90 +++ b/src/sys/utils/ftn-kernels/fcopy.F90 @@ -14,7 +14,6 @@ subroutine FortranCopy(n,x,y) do 10,i=1,n y(i) = x(i) 10 continue - return end subroutine FortranZero(n,x) @@ -26,5 +25,4 @@ subroutine FortranZero(n,x) do 10,i=1,n x(i) = 0.0 10 continue - return end diff --git a/src/sys/utils/mathfit.c b/src/sys/utils/mathfit.c index a216141fc33..898cc563e7b 100644 --- a/src/sys/utils/mathfit.c +++ b/src/sys/utils/mathfit.c @@ -1,7 +1,7 @@ #include #include -/*@C +/*@ PetscLinearRegression - Gives the best least-squares linear fit to some x-y data points Input Parameters: diff --git a/src/sys/utils/mathinf.c b/src/sys/utils/mathinf.c index 8e948f8895e..17295c45ebb 100644 --- a/src/sys/utils/mathinf.c +++ b/src/sys/utils/mathinf.c @@ -4,19 +4,19 @@ #include /*@C - PetscIsNormalReal - Returns `PETSC_TRUE` if the input value satisfies `isnormal()` + PetscIsNormalReal - Returns `PETSC_TRUE` if the input value satisfies `isnormal()` - Input Parameter: -. a - the `PetscReal` Value + Input Parameter: +. a - the `PetscReal` Value - Level: beginner + Level: beginner - Developer Notes: - Uses the C99 standard `isnormal()` on systems where they exist. + Developer Notes: + Uses the C99 standard `isnormal()` on systems where they exist. - Uses `isnormalq()` with `__float128` + Uses `isnormalq()` with `__float128` - Otherwise always returns true + Otherwise always returns true .seealso: `PetscIsInfReal()`, `PetscIsNanReal()` @*/ @@ -37,18 +37,24 @@ PetscBool PetscIsNormalReal(PetscReal a) } #endif +#if defined(PETSC_HAVE_NO_FINITE_MATH_ONLY) + #define PETSC_FORCE_NO_FINITE_MATH_ONLY __attribute__((optimize("no-finite-math-only"))) +#else + #define PETSC_FORCE_NO_FINITE_MATH_ONLY +#endif + /*@C - PetscIsInfReal - Returns whether the `PetscReal` input is an infinity value. + PetscIsInfReal - Returns whether the `PetscReal` input is an infinity value. - Input Parameter: -. a - the floating point number + Input Parameter: +. a - the floating point number - Level: beginner + Level: beginner - Developer Notes: - Uses the C99 standard `isinf()` on systems where it exists. + Developer Notes: + Uses the C99 standard `isinf()` on systems where it exists. - Otherwise uses (a && a/2 == a), note that some optimizing compilers compile out this form, thus removing the check. + Otherwise uses (a && a/2 == a), note that some optimizing compilers compile out this form, thus removing the check. .seealso: `PetscIsNormalReal()`, `PetscIsNanReal()` @*/ @@ -58,7 +64,7 @@ PetscBool PetscIsInfReal(PetscReal a) return isinfq(a) ? PETSC_TRUE : PETSC_FALSE; } #elif defined(PETSC_HAVE_ISINF) -PetscBool PetscIsInfReal(PetscReal a) +PETSC_FORCE_NO_FINITE_MATH_ONLY PetscBool PetscIsInfReal(PetscReal a) { return isinf(a) ? PETSC_TRUE : PETSC_FALSE; } @@ -81,18 +87,18 @@ PetscBool PetscIsInfReal(PetscReal a) #endif /*@C - PetscIsNanReal - Returns whether the `PetscReal` input is a Not-a-Number (NaN) value. + PetscIsNanReal - Returns whether the `PetscReal` input is a Not-a-Number (NaN) value. - Input Parameter: -. a - the floating point number + Input Parameter: +. a - the floating point number - Level: beginner + Level: beginner - Developer Notes: - Uses the C99 standard `isnan()` on systems where it exists. + Developer Notes: + Uses the C99 standard `isnan()` on systems where it exists. - Otherwise uses (a != a), note that some optimizing compilers compile - out this form, thus removing the check. + Otherwise uses (a != a), note that some optimizing compilers compile + out this form, thus removing the check. .seealso: `PetscIsNormalReal()`, `PetscIsInfReal()` @*/ @@ -102,7 +108,7 @@ PetscBool PetscIsNanReal(PetscReal a) return isnanq(a) ? PETSC_TRUE : PETSC_FALSE; } #elif defined(PETSC_HAVE_ISNAN) -PetscBool PetscIsNanReal(PetscReal a) +PETSC_FORCE_NO_FINITE_MATH_ONLY PetscBool PetscIsNanReal(PetscReal a) { return isnan(a) ? PETSC_TRUE : PETSC_FALSE; } diff --git a/src/sys/utils/memc.c b/src/sys/utils/memc.c index 2045a7b77d8..9ef9e6395a6 100644 --- a/src/sys/utils/memc.c +++ b/src/sys/utils/memc.c @@ -51,20 +51,24 @@ PetscErrorCode PetscMemcmp(const void *str1, const void *str2, size_t len, Petsc #if defined(PETSC_HAVE_HWLOC) #include #include +#endif -/*@C - PetscProcessPlacementView - display the MPI rank placement by core +/*@ + PetscProcessPlacementView - display the MPI rank placement by core Input Parameter: -. viewer - `PETSCVIEWERASCII` to display the results on +. viewer - `PETSCVIEWERASCII` to display the results on Level: intermediate Note: - Requires that PETSc be installed with hwloc, for example using --download-hwloc + Requires that PETSc be installed with hwloc, for example using `--download-hwloc` + +.seealso: `PetscInitialize()` @*/ PetscErrorCode PetscProcessPlacementView(PetscViewer viewer) { +#if defined(PETSC_HAVE_HWLOC) PetscBool isascii; PetscMPIInt rank; hwloc_bitmap_t set; @@ -86,6 +90,9 @@ PetscErrorCode PetscProcessPlacementView(PetscViewer viewer) PetscCall(PetscViewerFlush(viewer)); hwloc_bitmap_free(set); hwloc_topology_destroy(topology); +#else + PetscFunctionBegin; + SETERRQ(PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Requires PETSc be configured with --with-hwloc or --download-hwloc"); +#endif PetscFunctionReturn(PETSC_SUCCESS); } -#endif diff --git a/src/sys/utils/mpimesg.c b/src/sys/utils/mpimesg.c index 0420c670c8c..9666e9faa31 100644 --- a/src/sys/utils/mpimesg.c +++ b/src/sys/utils/mpimesg.c @@ -4,7 +4,7 @@ /*@C PetscGatherNumberOfMessages - Computes the number of messages an MPI rank expects to receive during a neighbor communication - Collective + Collective, No Fortran Support Input Parameters: + comm - Communicator @@ -60,7 +60,7 @@ PetscErrorCode PetscGatherNumberOfMessages(MPI_Comm comm, const PetscMPIInt ifla PetscGatherMessageLengths - Computes information about messages that an MPI rank will receive, including (from-id,length) pairs for each message. - Collective + Collective, No Fortran Support Input Parameters: + comm - Communicator @@ -119,7 +119,7 @@ PetscErrorCode PetscGatherMessageLengths(MPI_Comm comm, PetscMPIInt nsends, Pets PetscCall(PetscMalloc1(nrecvs, onodes)); for (i = 0; i < nrecvs; ++i) { (*onodes)[i] = w_status[i].MPI_SOURCE; -#if defined(PETSC_HAVE_OMPI_MAJOR_VERSION) +#if defined(PETSC_HAVE_OPENMPI) /* This line is a workaround for a bug in Open MPI 2.1.1 distributed by Ubuntu-18.04.2 LTS. It happens in self-to-self MPI_Send/Recv using MPI_ANY_SOURCE for message matching. Open MPI does not put correct value in recv buffer. See also @@ -208,7 +208,7 @@ PetscErrorCode PetscGatherMessageLengths_Private(MPI_Comm comm, PetscMPIInt nsen including (from-id,length) pairs for each message. Same functionality as `PetscGatherMessageLengths()` except it takes TWO ilenths and output TWO olengths. - Collective + Collective, No Fortran Support Input Parameters: + comm - Communicator diff --git a/src/sys/utils/mpishm.c b/src/sys/utils/mpishm.c index dcb809ca91f..dcbf6ab5042 100644 --- a/src/sys/utils/mpishm.c +++ b/src/sys/utils/mpishm.c @@ -54,7 +54,7 @@ static PetscErrorCode PetscShmCommDestroyDuppedComms(void) Collective. Input Parameter: -. globcomm - `MPI_Comm`, which can be a user MPI_Comm or a PETSc inner MPI_Comm +. globcomm - `MPI_Comm`, which can be a user `MPI_Comm` or a PETSc inner `MPI_Comm` Output Parameter: . pshmcomm - the PETSc shared memory communicator object @@ -62,7 +62,7 @@ static PetscErrorCode PetscShmCommDestroyDuppedComms(void) Level: developer Note: - When used with MPICH, MPICH must be configured with --download-mpich-device=ch3:nemesis + When used with MPICH, MPICH must be configured with `--download-mpich-device=ch3:nemesis` .seealso: `PetscShmCommGlobalToLocal()`, `PetscShmCommLocalToGlobal()`, `PetscShmCommGetMpiShmComm()` @*/ diff --git a/src/sys/utils/mpits.c b/src/sys/utils/mpits.c index 3bed805de85..bbfd70a6096 100644 --- a/src/sys/utils/mpits.c +++ b/src/sys/utils/mpits.c @@ -223,7 +223,7 @@ static PetscErrorCode PetscCommBuildTwoSided_RedScatter(MPI_Comm comm, PetscMPII /*@C PetscCommBuildTwoSided - discovers communicating ranks given one-sided information, moving constant-sized data in the process (often message lengths) - Collective + Collective, No Fortran Support Input Parameters: + comm - communicator @@ -408,7 +408,7 @@ static PetscErrorCode PetscCommBuildTwoSidedFReq_Ibarrier(MPI_Comm comm, PetscMP /*@C PetscCommBuildTwoSidedF - discovers communicating ranks given one-sided information, calling user-defined functions during rendezvous - Collective + Collective, No Fortran Support Input Parameters: + comm - communicator @@ -453,7 +453,7 @@ PetscErrorCode PetscCommBuildTwoSidedF(MPI_Comm comm, PetscMPIInt count, MPI_Dat /*@C PetscCommBuildTwoSidedFReq - discovers communicating ranks given one-sided information, calling user-defined functions during rendezvous, returns requests - Collective + Collective, No Fortran Support Input Parameters: + comm - communicator diff --git a/src/sys/utils/mpiu.c b/src/sys/utils/mpiu.c index 93290f4e207..77f16bd6fd5 100644 --- a/src/sys/utils/mpiu.c +++ b/src/sys/utils/mpiu.c @@ -130,7 +130,7 @@ PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscGlobalMinMaxInt - Get the global min/max from local min/max input Collective @@ -160,7 +160,7 @@ PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscGlobalMinMaxReal - Get the global min/max from local min/max input Collective diff --git a/src/sys/utils/pbarrier.c b/src/sys/utils/pbarrier.c index 89a0f10a5f8..24279a67d93 100644 --- a/src/sys/utils/pbarrier.c +++ b/src/sys/utils/pbarrier.c @@ -3,7 +3,7 @@ /* Logging support */ PetscLogEvent PETSC_Barrier; -/*@C +/*@ PetscBarrier - Blocks until this routine is executed by all processors owning the object `obj`. Input Parameter: diff --git a/src/sys/utils/pdisplay.c b/src/sys/utils/pdisplay.c index d398f589896..2561bf06748 100644 --- a/src/sys/utils/pdisplay.c +++ b/src/sys/utils/pdisplay.c @@ -119,7 +119,7 @@ PetscErrorCode PetscSetDisplay(void) PetscCall(PetscOptionsGetBool(NULL, NULL, "-x_virtual", &flag, NULL)); if (flag) { /* this is a crude hack, but better than nothing */ - PetscCall(PetscPOpen(PETSC_COMM_WORLD, NULL, "pkill -9 Xvfb", "r", NULL)); + PetscCall(PetscPOpen(PETSC_COMM_WORLD, NULL, "pkill -15 Xvfb", "r", NULL)); PetscCall(PetscSleep(1)); PetscCall(PetscPOpen(PETSC_COMM_WORLD, NULL, "Xvfb :15 -screen 0 1600x1200x24", "r", NULL)); PetscCall(PetscSleep(5)); diff --git a/src/sys/utils/segbuffer.c b/src/sys/utils/segbuffer.c index 76396aedea9..0c096f1c9c1 100644 --- a/src/sys/utils/segbuffer.c +++ b/src/sys/utils/segbuffer.c @@ -41,7 +41,7 @@ static PetscErrorCode PetscSegBufferAlloc_Private(PetscSegBuffer seg, size_t cou /*@C PetscSegBufferCreate - create a segmented buffer - Not Collective + Not Collective, No Fortran Support Input Parameters: + unitbytes - number of bytes that each entry will contain @@ -73,7 +73,7 @@ PetscErrorCode PetscSegBufferCreate(size_t unitbytes, size_t expected, PetscSegB /*@C PetscSegBufferGet - get new buffer space from a segmented buffer - Not Collective + Not Collective, No Fortran Support Input Parameters: + seg - `PetscSegBuffer` buffer @@ -85,7 +85,7 @@ PetscErrorCode PetscSegBufferCreate(size_t unitbytes, size_t expected, PetscSegB Level: developer .seealso: `PetscSegBufferCreate()`, `PetscSegBufferExtractAlloc()`, `PetscSegBufferExtractTo()`, `PetscSegBufferExtractInPlace()`, `PetscSegBufferDestroy()`, - `PetscSegBuffer` + `PetscSegBuffer`, `PetscSegBufferGetInts()` @*/ PetscErrorCode PetscSegBufferGet(PetscSegBuffer seg, size_t count, void *buf) { @@ -103,7 +103,7 @@ PetscErrorCode PetscSegBufferGet(PetscSegBuffer seg, size_t count, void *buf) /*@C PetscSegBufferDestroy - destroy segmented buffer - Not Collective + Not Collective, No Fortran Support Input Parameter: . seg - address of segmented buffer object @@ -130,7 +130,7 @@ PetscErrorCode PetscSegBufferDestroy(PetscSegBuffer *seg) /*@C PetscSegBufferExtractTo - extract contiguous data to provided buffer and reset segmented buffer - Not Collective + Not Collective, No Fortran Support Input Parameters: + seg - segmented buffer @@ -169,7 +169,7 @@ PetscErrorCode PetscSegBufferExtractTo(PetscSegBuffer seg, void *contig) /*@C PetscSegBufferExtractAlloc - extract contiguous data to new allocation and reset segmented buffer - Not Collective + Not Collective, No Fortran Support Input Parameter: . seg - `PetscSegBuffer` buffer @@ -202,7 +202,7 @@ PetscErrorCode PetscSegBufferExtractAlloc(PetscSegBuffer seg, void *contiguous) /*@C PetscSegBufferExtractInPlace - extract in-place contiguous representation of data and reset segmented buffer for reuse - Not Collective + Not Collective, No Fortran Support Input Parameter: . seg - `PetscSegBuffer` object @@ -238,7 +238,7 @@ PetscErrorCode PetscSegBufferExtractInPlace(PetscSegBuffer seg, void *contig) /*@C PetscSegBufferGetSize - get currently used size of a `PetscSegBuffer` - Not Collective + Not Collective, No Fortran Support Input Parameter: . seg - `PetscSegBuffer` object @@ -260,7 +260,7 @@ PetscErrorCode PetscSegBufferGetSize(PetscSegBuffer seg, size_t *usedsize) /*@C PetscSegBufferUnuse - return some unused entries obtained with an overzealous `PetscSegBufferGet()` - Not Collective + Not Collective, No Fortran Support Input Parameters: + seg - `PetscSegBuffer` object diff --git a/src/sys/utils/sortd.c b/src/sys/utils/sortd.c index 1693e94ef6f..88b378807af 100644 --- a/src/sys/utils/sortd.c +++ b/src/sys/utils/sortd.c @@ -209,9 +209,9 @@ PetscErrorCode PetscFindReal(PetscReal key, PetscInt n, const PetscReal t[], Pet PetscFunctionReturn(PETSC_SUCCESS); } PetscAssertPointer(t, 3); - PetscCheckSorted(n, t); while (hi - lo > 1) { PetscInt mid = lo + (hi - lo) / 2; + PetscAssert(t[lo] <= t[mid] && t[mid] <= t[hi - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Input array was not sorted: (%g, %g, %g)", (double)t[lo], (double)t[mid], (double)t[hi - 1]); if (key < t[mid]) hi = mid; else lo = mid; } diff --git a/src/sys/utils/sorti.c b/src/sys/utils/sorti.c index 9dc794c6cea..bf62730a263 100644 --- a/src/sys/utils/sorti.c +++ b/src/sys/utils/sorti.c @@ -510,9 +510,9 @@ PetscErrorCode PetscFindInt(PetscInt key, PetscInt n, const PetscInt X[], PetscI PetscFunctionReturn(PETSC_SUCCESS); } PetscAssertPointer(X, 3); - PetscCheckSorted(n, X); while (hi - lo > 1) { PetscInt mid = lo + (hi - lo) / 2; + PetscAssert(X[lo] <= X[mid] && X[mid] <= X[hi - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Input array was not sorted: (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")", X[lo], X[mid], X[hi - 1]); if (key < X[mid]) hi = mid; else lo = mid; } @@ -589,9 +589,9 @@ PetscErrorCode PetscFindMPIInt(PetscMPIInt key, PetscInt n, const PetscMPIInt X[ PetscFunctionReturn(PETSC_SUCCESS); } PetscAssertPointer(X, 3); - PetscCheckSorted(n, X); while (hi - lo > 1) { PetscInt mid = lo + (hi - lo) / 2; + PetscAssert(X[lo] <= X[mid] && X[mid] <= X[hi - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Input array was not sorted: (%d, %d, %d)", X[lo], X[mid], X[hi - 1]); if (key < X[mid]) hi = mid; else lo = mid; } @@ -866,7 +866,7 @@ PetscErrorCode PetscSortIntWithScalarArray(PetscInt n, PetscInt X[], PetscScalar changes a second array to match the sorted first INTEGER array. Unlike other sort routines, the user must provide workspace (the size of an element in the data array) to use when sorting. - Not Collective + Not Collective, No Fortran Support Input Parameters: + n - number of values @@ -1079,7 +1079,7 @@ PetscErrorCode PetscMergeMPIIntArray(PetscInt an, const PetscMPIInt aI[], PetscI /*@C PetscProcessTree - Prepares tree data to be displayed graphically - Not Collective + Not Collective, No Fortran Support Input Parameters: + n - number of values diff --git a/src/sys/utils/sortip.c b/src/sys/utils/sortip.c index 9c929ccae82..4e7cf96b95b 100644 --- a/src/sys/utils/sortip.c +++ b/src/sys/utils/sortip.c @@ -182,17 +182,17 @@ static PetscErrorCode PetscSortStrWithPermutation_Private(const char *v[], Petsc PetscSortStrWithPermutation - Computes the permutation of strings that gives a sorted sequence. - Not Collective + Not Collective, No Fortran Support Input Parameters: + n - number of values to sort . i - values to sort -- idx - permutation array. Must be initialized to 0:n-1 on input. +- idx - permutation array. Must be initialized to `0:n-1` on input. Level: intermediate Note: - i is unchanged on output. + `i` is unchanged on output. .seealso: `PetscSortInt()`, `PetscSortRealWithPermutation()` @*/ diff --git a/src/sys/utils/sortso.c b/src/sys/utils/sortso.c index d46346ecee2..f0bc9d8d227 100644 --- a/src/sys/utils/sortso.c +++ b/src/sys/utils/sortso.c @@ -891,7 +891,7 @@ static inline PetscErrorCode PetscTimSortBuildRunWithArray_Private(char *arr, ch /*@C PetscTimSort - Sorts an array in place in increasing order using Tim Peters adaptive sorting algorithm. - Not Collective + Not Collective, No Fortran Support Input Parameters: + n - number of values @@ -1014,7 +1014,7 @@ PetscErrorCode PetscTimSort(PetscInt n, void *arr, size_t size, int (*cmp)(const PetscTimSortWithArray - Sorts an array in place in increasing order using Tim Peters adaptive sorting algorithm and reorders a second array to match the first. The arrays need not be the same type. - Not Collective + Not Collective, No Fortran Support Input Parameters: + n - number of values diff --git a/src/sys/utils/sseenabled.c b/src/sys/utils/sseenabled.c index 002b21d077e..b7c47780063 100644 --- a/src/sys/utils/sseenabled.c +++ b/src/sys/utils/sseenabled.c @@ -100,7 +100,8 @@ static PetscBool petsc_sse_local_is_untested = PETSC_TRUE; static PetscBool petsc_sse_enabled_local = PETSC_FALSE; static PetscBool petsc_sse_global_is_untested = PETSC_TRUE; static PetscBool petsc_sse_enabled_global = PETSC_FALSE; -/*@C + +/*@ PetscSSEIsEnabled - Determines if Intel Streaming SIMD Extensions (SSE) to the x86 instruction set can be used. Some operating systems do not allow the use of these instructions despite hardware availability. diff --git a/src/sys/utils/str.c b/src/sys/utils/str.c index 898dbc6f82d..a3b57a11f91 100644 --- a/src/sys/utils/str.c +++ b/src/sys/utils/str.c @@ -628,7 +628,7 @@ PetscErrorCode PetscStrreplace(MPI_Comm comm, const char aa[], char b[], size_t /*@C PetscStrcmpAny - Determines whether a string matches any of a list of strings. - Not Collective + Not Collective, No Fortran Support Input Parameters: + src - pointer to input the string diff --git a/src/tao/bound/impls/blmvm/blmvm.c b/src/tao/bound/impls/blmvm/blmvm.c index 0a996862b0d..bce2ca15474 100644 --- a/src/tao/bound/impls/blmvm/blmvm.c +++ b/src/tao/bound/impls/blmvm/blmvm.c @@ -41,7 +41,7 @@ static PetscErrorCode TaoSolve_BLMVM(Tao tao) /* Call general purpose update function */ if (tao->ops->update) { PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); - PetscCall(TaoComputeObjectiveAndGradient(tao, tao->solution, &f, tao->gradient)); + PetscCall(TaoComputeObjective(tao, tao->solution, &f)); } /* Compute direction */ gnorm2 = gnorm * gnorm; diff --git a/src/tao/bound/impls/bncg/bncg.c b/src/tao/bound/impls/bncg/bncg.c index 1bfd0d4890c..95909ab5c01 100644 --- a/src/tao/bound/impls/bncg/bncg.c +++ b/src/tao/bound/impls/bncg/bncg.c @@ -111,7 +111,7 @@ static PetscErrorCode TaoSolve_BNCG(Tao tao) /* Call general purpose update function */ if (tao->ops->update) { PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); - PetscCall(TaoComputeObjectiveAndGradient(tao, tao->solution, &cg->f, cg->unprojected_gradient)); + PetscCall(TaoComputeObjective(tao, tao->solution, &cg->f)); } PetscCall(TaoBNCGConductIteration(tao, gnorm)); if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(PETSC_SUCCESS); diff --git a/src/tao/bound/impls/bnk/bnls.c b/src/tao/bound/impls/bnk/bnls.c index 11262251fc6..e09aa28c6d2 100644 --- a/src/tao/bound/impls/bnk/bnls.c +++ b/src/tao/bound/impls/bnk/bnls.c @@ -107,7 +107,7 @@ PetscErrorCode TaoSolve_BNLS(Tao tao) /* Call general purpose update function */ if (tao->ops->update) { PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); - PetscCall(TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient)); + PetscCall(TaoComputeObjective(tao, tao->solution, &bnk->f)); } if (needH && bnk->inactive_idx) { diff --git a/src/tao/bound/impls/bnk/bntl.c b/src/tao/bound/impls/bnk/bntl.c index d51540d3c49..69763aca1eb 100644 --- a/src/tao/bound/impls/bnk/bntl.c +++ b/src/tao/bound/impls/bnk/bntl.c @@ -126,7 +126,7 @@ PetscErrorCode TaoSolve_BNTL(Tao tao) /* Call general purpose update function */ if (tao->ops->update) { PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); - PetscCall(TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient)); + PetscCall(TaoComputeObjective(tao, tao->solution, &bnk->f)); } if (needH && bnk->inactive_idx) { diff --git a/src/tao/bound/impls/bnk/bntr.c b/src/tao/bound/impls/bnk/bntr.c index 222d974851d..4c0641a0d98 100644 --- a/src/tao/bound/impls/bnk/bntr.c +++ b/src/tao/bound/impls/bnk/bntr.c @@ -108,7 +108,7 @@ PetscErrorCode TaoSolve_BNTR(Tao tao) /* Call general purpose update function */ if (tao->ops->update) { PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); - PetscCall(TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient)); + PetscCall(TaoComputeObjective(tao, tao->solution, &bnk->f)); } if (needH && bnk->inactive_idx) { diff --git a/src/tao/bound/impls/tron/tron.c b/src/tao/bound/impls/tron/tron.c index da787e08c4c..95c3befb54e 100644 --- a/src/tao/bound/impls/tron/tron.c +++ b/src/tao/bound/impls/tron/tron.c @@ -109,7 +109,10 @@ static PetscErrorCode TaoSolve_TRON(Tao tao) PetscUseTypeMethod(tao, convergencetest, tao->cnvP); while (tao->reason == TAO_CONTINUE_ITERATING) { /* Call general purpose update function */ - PetscTryTypeMethod(tao, update, tao->niter, tao->user_update); + if (tao->ops->update) { + PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); + PetscCall(TaoComputeObjective(tao, tao->solution, &tron->f)); + } /* Perform projected gradient iterations */ PetscCall(TronGradientProjections(tao, tron)); diff --git a/src/tao/bound/tutorials/plate2.c b/src/tao/bound/tutorials/plate2.c index cd3afc46612..7318f0c6993 100644 --- a/src/tao/bound/tutorials/plate2.c +++ b/src/tao/bound/tutorials/plate2.c @@ -970,5 +970,6 @@ PetscErrorCode MyMatMult(Mat H_shell, Vec X, Vec Y) test: suffix: 20 args: -tao_monitor_short -mx 8 -my 6 -bmx 3 -bmy 3 -bheight 0.2 -tao_type bntl -tao_gatol 1e-5 -tao_mf_hessian + requires: !single TEST*/ diff --git a/src/tao/bound/tutorials/plate2f.F90 b/src/tao/bound/tutorials/plate2f.F90 index d937d09513a..b8d525f8be7 100644 --- a/src/tao/bound/tutorials/plate2f.F90 +++ b/src/tao/bound/tutorials/plate2f.F90 @@ -90,7 +90,7 @@ module plate2fmodule ! derives from an elliptic PDE on a two-dimensional domain. From the ! distributed array, create the vectors - PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE, DMDA_STENCIL_BOX,mx,my,Nx,Ny,i1,i1,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,dm,ierr)) + PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE, DMDA_STENCIL_BOX,mx,my,Nx,Ny,i1,i1,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,dm,ierr)) PetscCallA(DMSetFromOptions(dm,ierr)) PetscCallA(DMSetUp(dm,ierr)) @@ -109,7 +109,7 @@ module plate2fmodule ! assembly PetscCallA(VecGetLocalSize(x,m,ierr)) - PetscCallA(MatCreateAIJ(PETSC_COMM_WORLD,m,m,N,N,i7,PETSC_NULL_INTEGER,i3,PETSC_NULL_INTEGER,H,ierr)) + PetscCallA(MatCreateAIJ(PETSC_COMM_WORLD,m,m,N,N,i7,PETSC_NULL_INTEGER_ARRAY,i3,PETSC_NULL_INTEGER_ARRAY,H,ierr)) PetscCallA(MatSetOption(H,MAT_SYMMETRIC,PETSC_TRUE,ierr)) PetscCallA(DMGetLocalToGlobalMapping(dm,isltog,ierr)) @@ -392,7 +392,6 @@ subroutine FormFunctionGradient(tao,X,fcn,G,dummy,ierr) PetscCall(PetscLogFlops(70.0d0*xm*ym,ierr)) - return end !FormFunctionGradient ! ---------------------------------------------------------------------------- @@ -610,7 +609,7 @@ subroutine FormHessian(tao, X, Hessian, Hpc, dummy, ierr) endif ! Set matrix values using local numbering, defined earlier in main routine - PetscCall(MatSetValuesLocal(Hessian,i1,row,k,col,v,INSERT_VALUES,ierr)) + PetscCall(MatSetValuesLocal(Hessian,i1,[row],k,col,v,INSERT_VALUES,ierr)) enddo enddo @@ -628,7 +627,6 @@ subroutine FormHessian(tao, X, Hessian, Hpc, dummy, ierr) PetscCall(PetscLogFlops(199.0d0*xm*ym,ierr)) - return end ! Top,Left,Right,Bottom,bheight,mx,my,bmx,bmy,H, defined in plate2f.h @@ -784,7 +782,6 @@ subroutine MSA_BoundaryConditions(ierr) PetscCall(VecScale(Left,scl,ierr)) endif - return end ! ---------------------------------------------------------------------------- @@ -843,7 +840,6 @@ subroutine MSA_Plate(tao,xl,xu,dummy,ierr) PetscCall(VecRestoreArrayF90(xl,xl_v,ierr)) - return end ! ---------------------------------------------------------------------------- @@ -927,7 +923,6 @@ subroutine MSA_InitialPoint(X, ierr) endif - return end ! diff --git a/src/tao/bound/utils/isutil.c b/src/tao/bound/utils/isutil.c index e7b32711f2e..b2d78d0a141 100644 --- a/src/tao/bound/utils/isutil.c +++ b/src/tao/bound/utils/isutil.c @@ -3,7 +3,7 @@ #include #include <../src/tao/matrix/submatfree.h> -/*@C +/*@ TaoVecGetSubVec - Gets a subvector using the `IS` Input Parameters: @@ -88,7 +88,7 @@ PetscErrorCode TaoVecGetSubVec(Vec vfull, IS is, TaoSubsetType reduced_type, Pet PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoMatGetSubMat - Gets a submatrix using the `IS` Input Parameters: @@ -321,7 +321,7 @@ PetscErrorCode TaoEstimateActiveBounds(Vec X, Vec XL, Vec XU, Vec G, Vec S, Vec PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoBoundStep - Ensures the correct zero or adjusted step direction values for active variables. @@ -383,7 +383,7 @@ PetscErrorCode TaoBoundStep(Vec X, Vec XL, Vec XU, IS active_lower, IS active_up PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoBoundSolution - Ensures that the solution vector is snapped into the bounds within a given tolerance. Collective diff --git a/src/tao/constrained/tutorials/ex1.c b/src/tao/constrained/tutorials/ex1.c index 0d567f23c50..ea87786be76 100644 --- a/src/tao/constrained/tutorials/ex1.c +++ b/src/tao/constrained/tutorials/ex1.c @@ -481,7 +481,7 @@ PetscErrorCode FormEqualityJacobian(Tao tao, Vec X, Mat JE, Mat JEpre, void *ctx test: args: -tao_converged_reason -tao_gatol 1.e-6 -tao_type pdipm -tao_pdipm_kkt_shift_pd - requires: mumps + requires: mumps !single filter: sed -e "s/CONVERGED_GATOL iterations *[0-9]\{1,\}/CONVERGED_GATOL/g" test: @@ -509,7 +509,7 @@ PetscErrorCode FormEqualityJacobian(Tao tao, Vec X, Mat JE, Mat JEpre, void *ctx test: suffix: 5 - args: -tao_converged_reason -tao_almm_type classic -no_eq + args: -tao_converged_reason -tao_almm_type classic -no_eq -tao_almm_subsolver_tao_max_it 100 requires: !single !defined(PETSCTEST_VALGRIND) filter: sed -e "s/CONVERGED_GATOL iterations *[0-9]\{1,\}/CONVERGED_GATOL/g" diff --git a/src/tao/constrained/tutorials/maros.c b/src/tao/constrained/tutorials/maros.c index 3ec5ea7dc3b..b58e4f673c2 100644 --- a/src/tao/constrained/tutorials/maros.c +++ b/src/tao/constrained/tutorials/maros.c @@ -265,7 +265,7 @@ PetscErrorCode FormEqualityJacobian(Tao tao, Vec x, Mat JE, Mat JEpre, void *ctx requires: !complex test: - requires: superlu + requires: !single superlu localrunfiles: HS21 TEST*/ diff --git a/src/tao/f90-mod/petsctao.h b/src/tao/f90-mod/petsctao.h index 542457358b5..6eea7b7a21b 100644 --- a/src/tao/f90-mod/petsctao.h +++ b/src/tao/f90-mod/petsctao.h @@ -3,10 +3,19 @@ ! #include "petsc/finclude/petsctao.h" - type tTao - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE - end type + type, extends(tPetscObject) :: tTao + end type tTao Tao, parameter :: PETSC_NULL_TAO = tTao(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_TAO +#endif + + type, extends(tPetscObject) :: tTaoLineSearch + end type tTaoLineSearch + TaoLineSearch, parameter :: PETSC_NULL_TAO_LINESEARCH = tTaoLineSearch(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_TAO_LINESEARCH +#endif PetscEnum, parameter :: TAO_CONVERGED_GATOL = 3 PetscEnum, parameter :: TAO_CONVERGED_GRTOL = 4 @@ -21,3 +30,7 @@ PetscEnum, parameter :: TAO_DIVERGED_TR_REDUCTION = -7 PetscEnum, parameter :: TAO_DIVERGED_USER = -8 PetscEnum, parameter :: TAO_CONTINUE_ITERATING = 0 + + PetscEnum, parameter :: TAO_SUBSET_SUBVEC = 0 + PetscEnum, parameter :: TAO_SUBSET_MASK = 1 + PetscEnum, parameter :: TAO_SUBSET_MATRIXFREE = 2 diff --git a/src/tao/f90-mod/petsctaomod.F90 b/src/tao/f90-mod/petsctaomod.F90 index 571c44e52fd..d6843fc43c3 100644 --- a/src/tao/f90-mod/petsctaomod.F90 +++ b/src/tao/f90-mod/petsctaomod.F90 @@ -1,6 +1,11 @@ - module petsctaodef + module petsctaodefdummy use petsckspdef #include <../src/tao/f90-mod/petsctao.h> + end module petsctaodefdummy + + module petsctaodef + use petsctaodefdummy + use petscksp end module petsctaodef module petsctao diff --git a/src/tao/interface/ftn-custom/ztaosolverf.c b/src/tao/interface/ftn-custom/ztaosolverf.c index 647d10ccd7d..59e1d53e9de 100644 --- a/src/tao/interface/ftn-custom/ztaosolverf.c +++ b/src/tao/interface/ftn-custom/ztaosolverf.c @@ -19,17 +19,9 @@ #define taosetvariableboundsroutine_ TAOSETVARIABLEBOUNDSROUTINE #define taosetconstraintsroutine_ TAOSETCONSTRAINTSROUTINE #define taomonitorset_ TAOMONITORSET - #define taosettype_ TAOSETTYPE - #define taoview_ TAOVIEW #define taogetconvergencehistory_ TAOGETCONVERGENCEHISTORY #define taosetconvergencetest_ TAOSETCONVERGENCETEST - #define taogetoptionsprefix_ TAOGETOPTIONSPREFIX - #define taosetoptionsprefix_ TAOSETOPTIONSPREFIX - #define taoappendoptionsprefix_ TAOAPPENDOPTIONSPREFIX - #define taogettype_ TAOGETTYPE #define taosetupdate_ TAOSETUPDATE - #define taoviewfromoptions_ TAOVIEWFROMOPTIONS - #define taodestroy_ TAODESTROY #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define taosetobjective_ taosetobjective #define taosetgradient_ taosetgradient @@ -47,17 +39,9 @@ #define taosetvariableboundsroutine_ taosetvariableboundsroutine #define taosetconstraintsroutine_ taosetconstraintsroutine #define taomonitorset_ taomonitorset - #define taosettype_ taosettype - #define taoview_ taoview #define taogetconvergencehistory_ taogetconvergencehistory #define taosetconvergencetest_ taosetconvergencetest - #define taogetoptionsprefix_ taogetoptionsprefix - #define taosetoptionsprefix_ taosetoptionsprefix - #define taoappendoptionsprefix_ taoappendoptionsprefix - #define taogettype_ taogettype #define taosetupdate_ taosetupdate - #define taoviewfromoptions_ taoviewfromoptions - #define taodestroy_ taodestroy #endif static struct { @@ -180,8 +164,6 @@ static PetscErrorCode ourtaoupdateroutine(Tao tao, PetscInt iter, void *ctx) PetscObjectUseFortranCallback(tao, _cb.update, (Tao *, PetscInt *, void *), (&tao, &iter, _ctx)); } -EXTERN_C_BEGIN - PETSC_EXTERN void taosetobjective_(Tao *tao, void (*func)(Tao *, Vec *, PetscReal *, void *, PetscErrorCode *), void *ctx, PetscErrorCode *ierr) { CHKFORTRANNULLFUNCTION(func); @@ -276,64 +258,11 @@ PETSC_EXTERN void taosetconstraintsroutine_(Tao *tao, Vec *C, void (*func)(Tao * if (!*ierr) *ierr = TaoSetConstraintsRoutine(*tao, *C, ourtaoconstraintsroutine, ctx); } -PETSC_EXTERN void taosettype_(Tao *tao, char *type_name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type_name, len, t); - *ierr = TaoSetType(*tao, t); - if (*ierr) return; - FREECHAR(type_name, t); -} - -PETSC_EXTERN void taoview_(Tao *tao, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = TaoView(*tao, v); -} - PETSC_EXTERN void taogetconvergencehistory_(Tao *tao, PetscInt *nhist, PetscErrorCode *ierr) { *ierr = TaoGetConvergenceHistory(*tao, NULL, NULL, NULL, NULL, nhist); } -PETSC_EXTERN void taogetoptionsprefix_(Tao *tao, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *name; - *ierr = TaoGetOptionsPrefix(*tao, &name); - *ierr = PetscStrncpy(prefix, name, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, prefix, len); -} - -PETSC_EXTERN void taoappendoptionsprefix_(Tao *tao, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *name; - FIXCHAR(prefix, len, name); - *ierr = TaoAppendOptionsPrefix(*tao, name); - if (*ierr) return; - FREECHAR(prefix, name); -} - -PETSC_EXTERN void taosetoptionsprefix_(Tao *tao, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(prefix, len, t); - *ierr = TaoSetOptionsPrefix(*tao, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void taogettype_(Tao *tao, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - *ierr = TaoGetType(*tao, &tname); - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - PETSC_EXTERN void taosetjacobianinequalityroutine_(Tao *tao, Mat *J, Mat *Jp, void (*func)(Tao *, Vec *, Mat *, Mat *, void *, PetscErrorCode *), void *ctx, PetscErrorCode *ierr) { CHKFORTRANNULLFUNCTION(func); @@ -368,24 +297,3 @@ PETSC_EXTERN void taosetupdate_(Tao *tao, void (*func)(Tao *, PetscInt *, PetscE *ierr = PetscObjectSetFortranCallback((PetscObject)*tao, PETSC_FORTRAN_CALLBACK_CLASS, &_cb.update, (PetscVoidFn *)func, ctx); if (!*ierr) *ierr = TaoSetUpdate(*tao, ourtaoupdateroutine, ctx); } - -PETSC_EXTERN void taoviewfromoptions_(Tao *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = TaoViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void taodestroy_(Tao *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = TaoDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} - -EXTERN_C_END diff --git a/src/tao/interface/taosolver.c b/src/tao/interface/taosolver.c index 70a80d3b615..8e3aae03eb4 100644 --- a/src/tao/interface/taosolver.c +++ b/src/tao/interface/taosolver.c @@ -94,14 +94,14 @@ PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao) PetscFunctionBegin; PetscAssertPointer(newtao, 2); PetscCall(TaoInitializePackage()); + PetscCall(TaoLineSearchInitializePackage()); PetscCall(PetscHeaderCreate(tao, TAO_CLASSID, "Tao", "Optimization solver", "Tao", comm, TaoDestroy, TaoView)); /* Set non-NULL defaults */ tao->ops->convergencetest = TaoDefaultConvergenceTest; - - tao->max_it = 10000; - tao->max_funcs = -1; + tao->max_it = 10000; + tao->max_funcs = -1; #if defined(PETSC_USE_REAL_SINGLE) tao->gatol = 1e-5; tao->grtol = 1e-5; @@ -113,13 +113,11 @@ PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao) tao->crtol = 1e-8; tao->catol = 1e-8; #endif - tao->gttol = 0.0; - tao->steptol = 0.0; - tao->trust0 = PETSC_INFINITY; - tao->fmin = PETSC_NINFINITY; - + tao->gttol = 0.0; + tao->steptol = 0.0; + tao->trust0 = PETSC_INFINITY; + tao->fmin = PETSC_NINFINITY; tao->hist_reset = PETSC_TRUE; - PetscCall(TaoResetStatistics(tao)); *newtao = tao; PetscFunctionReturn(PETSC_SUCCESS); @@ -215,7 +213,7 @@ PetscErrorCode TaoSetUp(Tao tao) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoDestroy - Destroys the `Tao` context that was created with `TaoCreate()` Collective @@ -542,7 +540,7 @@ PetscErrorCode TaoSetFromOptions(Tao tao) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoViewFromOptions - View a `Tao` object based on values in the options database Collective @@ -564,7 +562,7 @@ PetscErrorCode TaoViewFromOptions(Tao A, PetscObject obj, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoView - Prints information about the `Tao` object Collective @@ -1391,18 +1389,24 @@ PetscErrorCode TaoResetStatistics(Tao tao) Logically Collective Input Parameters: -+ tao - The `Tao` solver context -- func - The function ++ tao - The `Tao` solver +. func - The function +- ctx - The update function context Calling sequence of `func`: -+ tao - the optimizer context -- ctx - The current step of the iteration ++ tao - The optimizer context +. it - The current iteration index +- ctx - The update context Level: advanced + Notes: + Users can modify the gradient direction or any other vector associated to the specific solver used. + The objective function value is always recomputed after a call to the update hook. + .seealso: [](ch_tao), `Tao`, `TaoSolve()` @*/ -PetscErrorCode TaoSetUpdate(Tao tao, PetscErrorCode (*func)(Tao, PetscInt, void *), void *ctx) +PetscErrorCode TaoSetUpdate(Tao tao, PetscErrorCode (*func)(Tao tao, PetscInt it, void *ctx), void *ctx) { PetscFunctionBegin; PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); @@ -1982,7 +1986,7 @@ PetscErrorCode TaoDefaultConvergenceTest(Tao tao, void *dummy) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoSetOptionsPrefix - Sets the prefix used for searching for all Tao options in the database. @@ -2023,7 +2027,7 @@ PetscErrorCode TaoSetOptionsPrefix(Tao tao, const char p[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoAppendOptionsPrefix - Appends to the prefix used for searching for all Tao options in the database. Logically Collective @@ -2050,7 +2054,7 @@ PetscErrorCode TaoAppendOptionsPrefix(Tao tao, const char p[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoGetOptionsPrefix - Gets the prefix used for searching for all Tao options in the database @@ -2077,7 +2081,7 @@ PetscErrorCode TaoGetOptionsPrefix(Tao tao, const char *p[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoSetType - Sets the `TaoType` for the minimization solver. Collective @@ -2128,7 +2132,7 @@ PetscErrorCode TaoSetType(Tao tao, TaoType type) /*@C TaoRegister - Adds a method to the Tao package for minimization. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined solver @@ -2397,7 +2401,7 @@ PetscErrorCode TaoGetSolutionStatus(Tao tao, PetscInt *its, PetscReal *f, PetscR PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoGetType - Gets the current `TaoType` being used in the `Tao` object Not Collective @@ -2550,6 +2554,7 @@ PetscErrorCode TaoSetConvergenceHistory(Tao tao, PetscReal obj[], PetscReal resi .vb call TaoGetConvergenceHistory(Tao tao, PetscInt nhist, PetscErrorCode ierr) .ve + In other words this gets the current number of entries in the history. Access the history through the array you passed to `TaoSetConvergenceHistory()` .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetConvergenceHistory()` @*/ @@ -2660,7 +2665,7 @@ PetscErrorCode TaoGetGradientNorm(Tao tao, Mat *M) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoGradientNorm - Compute the norm using the `NormType`, the user has selected Collective diff --git a/src/tao/interface/taosolver_bounds.c b/src/tao/interface/taosolver_bounds.c index a2104021ac1..d3adcb60138 100644 --- a/src/tao/interface/taosolver_bounds.c +++ b/src/tao/interface/taosolver_bounds.c @@ -88,7 +88,7 @@ PetscErrorCode TaoGetVariableBounds(Tao tao, Vec *XL, Vec *XU) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeVariableBounds - Compute the variable bounds using the routine set by `TaoSetVariableBoundsRoutine()`. @@ -174,7 +174,7 @@ PetscErrorCode TaoGetInequalityBounds(Tao tao, Vec *IL, Vec *IU) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeConstraints - Compute the variable bounds using the routine set by `TaoSetConstraintsRoutine()`. @@ -375,7 +375,7 @@ PetscErrorCode TaoSetInequalityConstraintsRoutine(Tao tao, Vec ci, PetscErrorCod PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeEqualityConstraints - Compute the variable bounds using the routine set by `TaoSetEqualityConstraintsRoutine()`. @@ -407,7 +407,7 @@ PetscErrorCode TaoComputeEqualityConstraints(Tao tao, Vec X, Vec CE) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeInequalityConstraints - Compute the variable bounds using the routine set by `TaoSetInequalityConstraintsRoutine()`. diff --git a/src/tao/interface/taosolver_hj.c b/src/tao/interface/taosolver_hj.c index 187683e8d9e..38da1ac39fb 100644 --- a/src/tao/interface/taosolver_hj.c +++ b/src/tao/interface/taosolver_hj.c @@ -213,7 +213,7 @@ PetscErrorCode TaoTestHessian(Tao tao) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeHessian - Computes the Hessian matrix that has been set with `TaoSetHessian()`. @@ -266,7 +266,7 @@ PetscErrorCode TaoComputeHessian(Tao tao, Vec X, Mat H, Mat Hpre) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeJacobian - Computes the Jacobian matrix that has been set with TaoSetJacobianRoutine(). @@ -307,7 +307,7 @@ PetscErrorCode TaoComputeJacobian(Tao tao, Vec X, Mat J, Mat Jpre) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeResidualJacobian - Computes the least-squares residual Jacobian matrix that has been set with `TaoSetJacobianResidual()`. @@ -348,7 +348,7 @@ PetscErrorCode TaoComputeResidualJacobian(Tao tao, Vec X, Mat J, Mat Jpre) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeJacobianState - Computes the Jacobian matrix that has been set with `TaoSetJacobianStateRoutine()`. @@ -386,7 +386,7 @@ PetscErrorCode TaoComputeJacobianState(Tao tao, Vec X, Mat J, Mat Jpre, Mat Jinv PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeJacobianDesign - Computes the Jacobian matrix that has been set with `TaoSetJacobianDesignRoutine()`. @@ -659,7 +659,7 @@ PetscErrorCode TaoSetStateDesignIS(Tao tao, IS s_is, IS d_is) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeJacobianEquality - Computes the Jacobian matrix that has been set with `TaoSetJacobianEqualityRoutine()`. @@ -696,7 +696,7 @@ PetscErrorCode TaoComputeJacobianEquality(Tao tao, Vec X, Mat J, Mat Jpre) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoComputeJacobianInequality - Computes the Jacobian matrix that has been set with `TaoSetJacobianInequalityRoutine()`. diff --git a/src/tao/leastsquares/tutorials/chwirut1f.F90 b/src/tao/leastsquares/tutorials/chwirut1f.F90 index e3e080ae2b9..00b35b17efb 100644 --- a/src/tao/leastsquares/tutorials/chwirut1f.F90 +++ b/src/tao/leastsquares/tutorials/chwirut1f.F90 @@ -30,7 +30,7 @@ program main PetscErrorCode ierr ! used to check for functions returning nonzeros Vec x ! solution vector Vec f ! vector of functions - Tao tao ! Tao context + Tao tao ! Tao context PetscInt nhist PetscMPIInt size,rank ! number of processes running PetscReal hist(100) ! objective value history @@ -136,7 +136,6 @@ subroutine FormFunction(tao, x, f, dummy, ierr) PetscCall(VecRestoreArrayF90(X,x_v,ierr)) PetscCall(VecRestoreArrayF90(F,f_v,ierr)) - return end subroutine FormStartingPoint(x) @@ -151,7 +150,6 @@ subroutine FormStartingPoint(x) x_v(2) = 0.008 x_v(3) = 0.01 PetscCall(VecRestoreArrayF90(x,x_v,ierr)) - return end subroutine InitializeData() @@ -374,7 +372,6 @@ subroutine InitializeData() y(i) = 28.9000; t(i) = 1.7500; i=i+1 y(i) = 28.9500; t(i) = 1.7500; i=i+1 - return end !/*TEST diff --git a/src/tao/leastsquares/tutorials/chwirut2f.F90 b/src/tao/leastsquares/tutorials/chwirut2f.F90 index 73c329cd9e7..3df8b12692f 100644 --- a/src/tao/leastsquares/tutorials/chwirut2f.F90 +++ b/src/tao/leastsquares/tutorials/chwirut2f.F90 @@ -165,7 +165,6 @@ subroutine FormFunction(tao, x, f, dummy, ierr) ! Restore vectors PetscCall(VecRestoreArrayReadF90(x,x_v,ierr)) PetscCall(VecRestoreArrayF90(F,f_v,ierr)) - return end subroutine FormStartingPoint(x) @@ -180,7 +179,6 @@ subroutine FormStartingPoint(x) x_v(2) = 0.008 x_v(3) = 0.01 PetscCall(VecRestoreArrayF90(x,x_v,ierr)) - return end subroutine InitializeData() @@ -403,7 +401,6 @@ subroutine InitializeData() y(i) = 28.9000; t(i) = 1.7500; i=i+1 y(i) = 28.9500; t(i) = 1.7500; i=i+1 - return end subroutine TaskWorker(ierr) @@ -434,7 +431,6 @@ subroutine TaskWorker(ierr) end if enddo ierr = 0 - return end subroutine RunSimulation(x,i,f,ierr) @@ -445,7 +441,6 @@ subroutine RunSimulation(x,i,f,ierr) PetscErrorCode ierr f = y(i) - exp(-x(1)*t(i))/(x(2)+x(3)*t(i)) ierr = 0 - return end subroutine StopWorkers(ierr) @@ -469,7 +464,6 @@ subroutine StopWorkers(ierr) PetscCallMPI(MPI_Send(x,nn,MPIU_SCALAR,source,DIE_TAG,PETSC_COMM_WORLD,ierr)) enddo ierr = 0 - return end !/*TEST diff --git a/src/tao/linesearch/interface/ftn-custom/ztaolinesearchf.c b/src/tao/linesearch/interface/ftn-custom/ztaolinesearchf.c index 182ff7ca68a..1617b827e0a 100644 --- a/src/tao/linesearch/interface/ftn-custom/ztaolinesearchf.c +++ b/src/tao/linesearch/interface/ftn-custom/ztaolinesearchf.c @@ -6,18 +6,12 @@ #define taolinesearchsetgradientroutine_ TAOLINESEARCHSETGRADIENTROUTINE #define taolinesearchsetobjectiveandgradientroutine_ TAOLINESEARCHSETOBJECTIVEANDGRADIENTROUTINE #define taolinesearchsetobjectiveandgtsroutine_ TAOLINESEARCHSETOBJECTIVEANDGTSROUTINE - #define taolinesearchview_ TAOLINESEARCHVIEW - #define taolinesearchsettype_ TAOLINESEARCHSETTYPE - #define taolinesearchviewfromoptions_ TAOLINESEARCHVIEWFROMOPTIONS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define taolinesearchsetobjectiveroutine_ taolinesearchsetobjectiveroutine #define taolinesearchsetgradientroutine_ taolinesearchsetgradientroutine #define taolinesearchsetobjectiveandgradientroutine_ taolinesearchsetobjectiveandgradientroutine #define taolinesearchsetobjectiveandgtsroutine_ taolinesearchsetobjectiveandgtsroutine - #define taolinesearchview_ taolinesearchview - #define taolinesearchsettype_ taolinesearchsettype - #define taolinesearchviewfromoptions_ taolinesearchviewfromoptions #endif static int OBJ = 0; @@ -93,67 +87,3 @@ PETSC_EXTERN void taolinesearchsetobjectiveandgtsroutine_(TaoLineSearch *ls, voi *ierr = TaoLineSearchSetObjectiveAndGTSRoutine(*ls, ourtaolinesearchobjectiveandgtsroutine, ctx); } } - -PETSC_EXTERN void taolinesearchsettype_(TaoLineSearch *ls, char *type_name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) - -{ - char *t; - - FIXCHAR(type_name, len, t); - *ierr = TaoLineSearchSetType(*ls, t); - if (*ierr) return; - FREECHAR(type_name, t); -} - -PETSC_EXTERN void taolinesearchview_(TaoLineSearch *ls, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = TaoLineSearchView(*ls, v); -} - -PETSC_EXTERN void taolinesearchgetoptionsprefix_(TaoLineSearch *ls, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *name; - *ierr = TaoLineSearchGetOptionsPrefix(*ls, &name); - *ierr = PetscStrncpy(prefix, name, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, prefix, len); -} - -PETSC_EXTERN void taolinesearchappendoptionsprefix_(TaoLineSearch *ls, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *name; - FIXCHAR(prefix, len, name); - *ierr = TaoLineSearchAppendOptionsPrefix(*ls, name); - if (*ierr) return; - FREECHAR(prefix, name); -} - -PETSC_EXTERN void taolinesearchsetoptionsprefix_(TaoLineSearch *ls, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(prefix, len, t); - *ierr = TaoLineSearchSetOptionsPrefix(*ls, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void taolinesearchgettype_(TaoLineSearch *ls, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - *ierr = TaoLineSearchGetType(*ls, &tname); - *ierr = PetscStrncpy(name, tname, len); - if (*ierr) return; - FIXRETURNCHAR(PETSC_TRUE, name, len); -} -PETSC_EXTERN void taolinesearchviewfromoptions_(TaoLineSearch *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = TaoLineSearchViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/tao/linesearch/interface/taolinesearch.c b/src/tao/linesearch/interface/taolinesearch.c index 6616c4650c4..91cb52ada32 100644 --- a/src/tao/linesearch/interface/taolinesearch.c +++ b/src/tao/linesearch/interface/taolinesearch.c @@ -8,7 +8,7 @@ PetscClassId TAOLINESEARCH_CLASSID = 0; PetscLogEvent TAOLINESEARCH_Apply; PetscLogEvent TAOLINESEARCH_Eval; -/*@C +/*@ TaoLineSearchViewFromOptions - View a `TaoLineSearch` object based on values in the options database Collective @@ -33,7 +33,7 @@ PetscErrorCode TaoLineSearchViewFromOptions(TaoLineSearch A, PetscObject obj, co PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoLineSearchView - Prints information about the `TaoLineSearch` Collective @@ -353,7 +353,7 @@ PetscErrorCode TaoLineSearchApply(TaoLineSearch ls, Vec x, PetscReal *f, Vec g, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoLineSearchSetType - Sets the algorithm used in a line search Collective @@ -509,7 +509,7 @@ PetscErrorCode TaoLineSearchSetFromOptions(TaoLineSearch ls) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoLineSearchGetType - Gets the current line search algorithm Not Collective @@ -763,7 +763,7 @@ PetscErrorCode TaoLineSearchSetObjectiveAndGTSRoutine(TaoLineSearch ls, PetscErr PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoLineSearchUseTaoRoutines - Informs the `TaoLineSearch` to use the objective and gradient evaluation routines from the given `Tao` object. The default. @@ -1149,7 +1149,7 @@ PetscErrorCode TaoLineSearchGetStepLength(TaoLineSearch ls, PetscReal *s) /*@C TaoLineSearchRegister - Adds a line-search algorithm to the registry - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined solver @@ -1180,7 +1180,7 @@ PetscErrorCode TaoLineSearchRegister(const char sname[], PetscErrorCode (*func)( PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoLineSearchAppendOptionsPrefix - Appends to the prefix used for searching for all `TaoLineSearch` options in the database. @@ -1205,7 +1205,7 @@ PetscErrorCode TaoLineSearchAppendOptionsPrefix(TaoLineSearch ls, const char p[] return PetscObjectAppendOptionsPrefix((PetscObject)ls, p); } -/*@C +/*@ TaoLineSearchGetOptionsPrefix - Gets the prefix used for searching for all `TaoLineSearch` options in the database @@ -1230,7 +1230,7 @@ PetscErrorCode TaoLineSearchGetOptionsPrefix(TaoLineSearch ls, const char *p[]) return PetscObjectGetOptionsPrefix((PetscObject)ls, p); } -/*@C +/*@ TaoLineSearchSetOptionsPrefix - Sets the prefix used for searching for all `TaoLineSearch` options in the database. diff --git a/src/tao/matrix/submatfree.c b/src/tao/matrix/submatfree.c index 9f23e1b302f..b555e47efd0 100644 --- a/src/tao/matrix/submatfree.c +++ b/src/tao/matrix/submatfree.c @@ -1,7 +1,7 @@ #include /*I "petsctao.h" I*/ #include <../src/tao/matrix/submatfree.h> -/*@C +/*@ MatCreateSubMatrixFree - Creates a reduced matrix by masking a full matrix. @@ -20,6 +20,9 @@ Note: The caller is responsible for destroying the input objects after matrix J has been destroyed. + Developer Note: + This should be moved/supported in `Mat` + .seealso: `MatCreate()` @*/ PetscErrorCode MatCreateSubMatrixFree(Mat mat, IS Rows, IS Cols, Mat *J) diff --git a/src/tao/python/ftn-custom/makefile b/src/tao/python/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/tao/python/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/tao/python/ftn-custom/zpythontaof.c b/src/tao/python/ftn-custom/zpythontaof.c deleted file mode 100644 index 2e527747ffa..00000000000 --- a/src/tao/python/ftn-custom/zpythontaof.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define taopythonsettype_ TAOPYTHONSETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define taopythonsettype_ taopythonsettype -#endif - -PETSC_EXTERN void taopythonsettype_(Tao *tao, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(name, len, t); - *ierr = TaoPythonSetType(*tao, t); - if (*ierr) return; - FREECHAR(name, t); -} diff --git a/src/tao/python/pythontao.c b/src/tao/python/pythontao.c index 6c68db3de45..a564e3e9dd0 100644 --- a/src/tao/python/pythontao.c +++ b/src/tao/python/pythontao.c @@ -1,6 +1,6 @@ #include /*I "petsctao.h" I*/ -/*@C +/*@ TaoPythonSetType - Initialize a `Tao` object implemented in Python. Collective @@ -25,7 +25,7 @@ PetscErrorCode TaoPythonSetType(Tao tao, const char pyname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TaoPythonGetType - Get the type of a `Tao` object implemented in Python. Not Collective diff --git a/src/tao/unconstrained/impls/bmrm/bmrm.c b/src/tao/unconstrained/impls/bmrm/bmrm.c index 0f0e93b185a..5faadaae214 100644 --- a/src/tao/unconstrained/impls/bmrm/bmrm.c +++ b/src/tao/unconstrained/impls/bmrm/bmrm.c @@ -18,6 +18,7 @@ static PetscErrorCode solve(TAO_DF *df) PetscReal **Q = df->Q, *f = df->f, *t = df->t; PetscInt dim = df->dim, *ipt = df->ipt, *ipt2 = df->ipt2, *uv = df->uv; + PetscCheck(dim >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Expected dim %" PetscInt_FMT " >= 0", dim); /* variables for the adaptive nonmonotone linesearch */ PetscInt L, llast; PetscReal fr, fbest, fv, fc, fv0; diff --git a/src/tao/unconstrained/impls/cg/taocg.c b/src/tao/unconstrained/impls/cg/taocg.c index a36f068df4f..4fd430b42be 100644 --- a/src/tao/unconstrained/impls/cg/taocg.c +++ b/src/tao/unconstrained/impls/cg/taocg.c @@ -52,8 +52,10 @@ static PetscErrorCode TaoSolve_CG(Tao tao) while (1) { /* Call general purpose update function */ - PetscTryTypeMethod(tao, update, tao->niter, tao->user_update); - + if (tao->ops->update) { + PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); + PetscCall(TaoComputeObjective(tao, tao->solution, &f)); + } /* Save the current gradient information */ f_old = f; gnorm2_old = gnorm2; diff --git a/src/tao/unconstrained/impls/lmvm/lmvm.c b/src/tao/unconstrained/impls/lmvm/lmvm.c index 8f0a98830fd..276c22d90b2 100644 --- a/src/tao/unconstrained/impls/lmvm/lmvm.c +++ b/src/tao/unconstrained/impls/lmvm/lmvm.c @@ -37,7 +37,10 @@ static PetscErrorCode TaoSolve_LMVM(Tao tao) /* Have not converged; continue with Newton method */ while (tao->reason == TAO_CONTINUE_ITERATING) { /* Call general purpose update function */ - PetscTryTypeMethod(tao, update, tao->niter, tao->user_update); + if (tao->ops->update) { + PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); + PetscCall(TaoComputeObjective(tao, tao->solution, &f)); + } /* Compute direction */ if (lmP->H0) { @@ -50,7 +53,7 @@ static PetscErrorCode TaoSolve_LMVM(Tao tao) if (nupdates > 0) stepType = LMVM_STEP_BFGS; /* Check for success (descent direction) */ - PetscCall(VecDot(lmP->D, tao->gradient, &gdx)); + PetscCall(VecDotRealPart(lmP->D, tao->gradient, &gdx)); if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) { /* Step is not descent or direction produced not a number We can assert bfgsUpdates > 1 in this case because @@ -203,12 +206,18 @@ static PetscErrorCode TaoView_LMVM(Tao tao, PetscViewer viewer) PetscFunctionBegin; PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); if (isascii) { - PetscCall(PetscViewerASCIIPrintf(viewer, " Gradient steps: %" PetscInt_FMT "\n", lm->grad)); + PetscCall(PetscViewerASCIIPushTab(viewer)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Gradient steps: %" PetscInt_FMT "\n", lm->grad)); if (lm->recycle) { - PetscCall(PetscViewerASCIIPrintf(viewer, " Recycle: on\n")); + PetscCall(PetscViewerASCIIPrintf(viewer, "Recycle: on\n")); recycled_its = lm->bfgs + lm->grad; - PetscCall(PetscViewerASCIIPrintf(viewer, " Total recycled iterations: %" PetscInt_FMT "\n", recycled_its)); + PetscCall(PetscViewerASCIIPrintf(viewer, "Total recycled iterations: %" PetscInt_FMT "\n", recycled_its)); } + PetscCall(PetscViewerASCIIPrintf(viewer, "LMVM Matrix:\n")); + PetscCall(PetscViewerASCIIPushTab(viewer)); + PetscCall(MatView(lm->M, viewer)); + PetscCall(PetscViewerASCIIPopTab(viewer)); + PetscCall(PetscViewerASCIIPopTab(viewer)); } PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/tao/unconstrained/impls/lmvm/tests/ex1.c b/src/tao/unconstrained/impls/lmvm/tests/ex1.c new file mode 100644 index 00000000000..c557e4b2ec3 --- /dev/null +++ b/src/tao/unconstrained/impls/lmvm/tests/ex1.c @@ -0,0 +1,95 @@ +const char help[] = "Test TAOLMVM on a least-squares problem"; + +#include +#include + +typedef struct _n_AppCtx { + Mat A; + Vec b; + Vec r; +} AppCtx; + +static PetscErrorCode LSObjAndGrad(Tao tao, Vec x, PetscReal *obj, Vec g, void *_ctx) +{ + PetscFunctionBegin; + AppCtx *ctx = (AppCtx *)_ctx; + PetscCall(VecAXPBY(ctx->r, -1.0, 0.0, ctx->b)); + PetscCall(MatMultAdd(ctx->A, x, ctx->r, ctx->r)); + PetscCall(VecDotRealPart(ctx->r, ctx->r, obj)); + *obj *= 0.5; + PetscCall(MatMultTranspose(ctx->A, ctx->r, g)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +int main(int argc, char **argv) +{ + PetscCall(PetscInitialize(&argc, &argv, NULL, help)); + MPI_Comm comm = PETSC_COMM_WORLD; + AppCtx ctx; + Vec sol; + PetscBool flg, cuda = PETSC_FALSE; + + PetscInt M = 10; + PetscInt N = 10; + PetscOptionsBegin(comm, "", help, "TAO"); + PetscCall(PetscOptionsInt("-m", "data size", NULL, M, &M, NULL)); + PetscCall(PetscOptionsInt("-n", "data size", NULL, N, &N, NULL)); + PetscCall(PetscOptionsGetBool(NULL, NULL, "-cuda", &cuda, &flg)); + PetscOptionsEnd(); + + if (cuda) { + VecType vec_type; + PetscCall(VecCreateSeqCUDA(comm, N, &ctx.b)); + PetscCall(VecGetType(ctx.b, &vec_type)); + PetscCall(MatCreateDenseFromVecType(comm, vec_type, M, N, PETSC_DECIDE, PETSC_DECIDE, -1, NULL, &ctx.A)); + PetscCall(MatCreateVecs(ctx.A, &sol, NULL)); + } else { + PetscCall(MatCreateDense(comm, PETSC_DECIDE, PETSC_DECIDE, M, N, NULL, &ctx.A)); + PetscCall(MatCreateVecs(ctx.A, &sol, &ctx.b)); + } + PetscCall(VecDuplicate(ctx.b, &ctx.r)); + PetscCall(VecZeroEntries(sol)); + + PetscRandom rand; + PetscCall(PetscRandomCreate(comm, &rand)); + PetscCall(PetscRandomSetFromOptions(rand)); + PetscCall(MatSetRandom(ctx.A, rand)); + PetscCall(VecSetRandom(ctx.b, rand)); + PetscCall(PetscRandomDestroy(&rand)); + + Tao tao; + PetscCall(TaoCreate(comm, &tao)); + PetscCall(TaoSetSolution(tao, sol)); + PetscCall(TaoSetObjectiveAndGradient(tao, NULL, LSObjAndGrad, &ctx)); + PetscCall(TaoSetType(tao, TAOLMVM)); + PetscCall(TaoSetFromOptions(tao)); + PetscCall(TaoSolve(tao)); + PetscCall(TaoDestroy(&tao)); + + PetscCall(VecDestroy(&ctx.r)); + PetscCall(VecDestroy(&sol)); + PetscCall(VecDestroy(&ctx.b)); + PetscCall(MatDestroy(&ctx.A)); + + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + build: + requires: !complex !__float128 !single !defined(PETSC_USE_64BIT_INDICES) + + test: + suffix: 0 + args: -tao_monitor -tao_ls_gtol 1.e-6 -tao_view -tao_lmvm_mat_lmvm_hist_size 20 -tao_ls_type more-thuente -tao_lmvm_mat_lmvm_scale_type none -tao_lmvm_mat_type lmvmbfgs + + test: + suffix: 1 + args: -tao_monitor -tao_ls_gtol 1.e-6 -tao_view -tao_lmvm_mat_lmvm_hist_size 20 -tao_ls_type more-thuente -tao_lmvm_mat_lmvm_scale_type none -tao_lmvm_mat_type lmvmdbfgs + + test: + suffix: 2 + args: -tao_monitor -tao_ls_gtol 1.e-6 -tao_view -tao_lmvm_mat_lmvm_hist_size 20 -tao_ls_type more-thuente -tao_lmvm_mat_type lmvmdbfgs -tao_lmvm_mat_lmvm_scale_type none -tao_lmvm_mat_lbfgs_type {{inplace reorder}} + +TEST*/ diff --git a/src/tao/unconstrained/impls/lmvm/tests/makefile b/src/tao/unconstrained/impls/lmvm/tests/makefile new file mode 100644 index 00000000000..7d0fbd72aa4 --- /dev/null +++ b/src/tao/unconstrained/impls/lmvm/tests/makefile @@ -0,0 +1,7 @@ +-include ../../../../../../petscdir.mk + +MANSEC = Tao + +include ${PETSC_DIR}/lib/petsc/conf/variables +include ${PETSC_DIR}/lib/petsc/conf/rules + diff --git a/src/tao/unconstrained/impls/lmvm/tests/output/ex1_0.out b/src/tao/unconstrained/impls/lmvm/tests/output/ex1_0.out new file mode 100644 index 00000000000..4e7c7fcc7ae --- /dev/null +++ b/src/tao/unconstrained/impls/lmvm/tests/output/ex1_0.out @@ -0,0 +1,40 @@ + 0 TAO, Function value: 1.4176, Residual: 7.41172 + 1 TAO, Function value: 0.447004, Residual: 0.749103 + 2 TAO, Function value: 0.214731, Residual: 0.506414 + 3 TAO, Function value: 0.0818304, Residual: 0.34992 + 4 TAO, Function value: 0.0414914, Residual: 0.0703319 + 5 TAO, Function value: 0.0364991, Residual: 0.0786374 + 6 TAO, Function value: 0.0234817, Residual: 0.0589561 + 7 TAO, Function value: 0.0200626, Residual: 0.0301767 + 8 TAO, Function value: 0.016036, Residual: 0.043587 + 9 TAO, Function value: 0.00661074, Residual: 0.00529724 + 10 TAO, Function value: 2.34371e-22, Residual: 2.77451e-11 +Tao Object: 1 MPI process + type: lmvm + Gradient steps: 1 + LMVM Matrix: + Mat Object: (tao_lmvm_) 1 MPI process + type: lmvmbfgs + Scale type: NONE + Scale history: 1 + Scale params: alpha=1., beta=0.5, rho=1. + Convex factors: phi=0., theta=0.125 + Max. storage: 20 + Used storage: 9 + Number of updates: 9 + Number of rejects: 0 + Number of resets: 1 + TaoLineSearch Object: 1 MPI process + type: more-thuente + maximum function evaluations=30 + tolerances: ftol=0.0001, rtol=1e-10, gtol=1e-06 + total number of function evaluations=0 + total number of gradient evaluations=0 + total number of function/gradient evaluations=6 + Termination reason: 1 + convergence tolerances: gatol=1e-08, steptol=0., gttol=0. + Residual in Function/Gradient:=2.77451e-11 + Objective value=2.34371e-22 + total number of iterations=10, (max: 2000) + total number of function/gradient evaluations=27, (max: 4000) + Solution converged: ||g(X)|| <= gatol diff --git a/src/tao/unconstrained/impls/lmvm/tests/output/ex1_1.out b/src/tao/unconstrained/impls/lmvm/tests/output/ex1_1.out new file mode 100644 index 00000000000..12d91d27031 --- /dev/null +++ b/src/tao/unconstrained/impls/lmvm/tests/output/ex1_1.out @@ -0,0 +1,47 @@ + 0 TAO, Function value: 1.4176, Residual: 7.41172 + 1 TAO, Function value: 0.447004, Residual: 0.749103 + 2 TAO, Function value: 0.214731, Residual: 0.506414 + 3 TAO, Function value: 0.0818304, Residual: 0.34992 + 4 TAO, Function value: 0.0414914, Residual: 0.0703319 + 5 TAO, Function value: 0.0364991, Residual: 0.0786374 + 6 TAO, Function value: 0.0234817, Residual: 0.0589561 + 7 TAO, Function value: 0.0200626, Residual: 0.0301767 + 8 TAO, Function value: 0.016036, Residual: 0.043587 + 9 TAO, Function value: 0.00661074, Residual: 0.00529724 + 10 TAO, Function value: 3.79457e-20, Residual: 2.83804e-10 +Tao Object: 1 MPI process + type: lmvm + Gradient steps: 1 + LMVM Matrix: + Mat Object: (tao_lmvm_) 1 MPI process + type: lmvmdbfgs + Max. storage: 20 + Used storage: 9 + Number of updates: 9 + Number of rejects: 0 + Number of resets: 1 + Mat Object: (J0_) 1 MPI process + type: lmvmdiagbroyden + Scale history: 1 + Scale params: alpha=1., beta=0.5, rho=1. + Convex factor: theta=0. + Max. storage: 1 + Used storage: 0 + Number of updates: 0 + Number of rejects: 0 + Number of resets: 0 + Counts: S x : 9, S^T x : 9, Y x : 9, Y^T x: 9 + TaoLineSearch Object: 1 MPI process + type: more-thuente + maximum function evaluations=30 + tolerances: ftol=0.0001, rtol=1e-10, gtol=1e-06 + total number of function evaluations=0 + total number of gradient evaluations=0 + total number of function/gradient evaluations=6 + Termination reason: 1 + convergence tolerances: gatol=1e-08, steptol=0., gttol=0. + Residual in Function/Gradient:=2.83804e-10 + Objective value=3.79457e-20 + total number of iterations=10, (max: 2000) + total number of function/gradient evaluations=27, (max: 4000) + Solution converged: ||g(X)|| <= gatol diff --git a/src/tao/unconstrained/impls/lmvm/tests/output/ex1_2.out b/src/tao/unconstrained/impls/lmvm/tests/output/ex1_2.out new file mode 100644 index 00000000000..12d91d27031 --- /dev/null +++ b/src/tao/unconstrained/impls/lmvm/tests/output/ex1_2.out @@ -0,0 +1,47 @@ + 0 TAO, Function value: 1.4176, Residual: 7.41172 + 1 TAO, Function value: 0.447004, Residual: 0.749103 + 2 TAO, Function value: 0.214731, Residual: 0.506414 + 3 TAO, Function value: 0.0818304, Residual: 0.34992 + 4 TAO, Function value: 0.0414914, Residual: 0.0703319 + 5 TAO, Function value: 0.0364991, Residual: 0.0786374 + 6 TAO, Function value: 0.0234817, Residual: 0.0589561 + 7 TAO, Function value: 0.0200626, Residual: 0.0301767 + 8 TAO, Function value: 0.016036, Residual: 0.043587 + 9 TAO, Function value: 0.00661074, Residual: 0.00529724 + 10 TAO, Function value: 3.79457e-20, Residual: 2.83804e-10 +Tao Object: 1 MPI process + type: lmvm + Gradient steps: 1 + LMVM Matrix: + Mat Object: (tao_lmvm_) 1 MPI process + type: lmvmdbfgs + Max. storage: 20 + Used storage: 9 + Number of updates: 9 + Number of rejects: 0 + Number of resets: 1 + Mat Object: (J0_) 1 MPI process + type: lmvmdiagbroyden + Scale history: 1 + Scale params: alpha=1., beta=0.5, rho=1. + Convex factor: theta=0. + Max. storage: 1 + Used storage: 0 + Number of updates: 0 + Number of rejects: 0 + Number of resets: 0 + Counts: S x : 9, S^T x : 9, Y x : 9, Y^T x: 9 + TaoLineSearch Object: 1 MPI process + type: more-thuente + maximum function evaluations=30 + tolerances: ftol=0.0001, rtol=1e-10, gtol=1e-06 + total number of function evaluations=0 + total number of gradient evaluations=0 + total number of function/gradient evaluations=6 + Termination reason: 1 + convergence tolerances: gatol=1e-08, steptol=0., gttol=0. + Residual in Function/Gradient:=2.83804e-10 + Objective value=3.79457e-20 + total number of iterations=10, (max: 2000) + total number of function/gradient evaluations=27, (max: 4000) + Solution converged: ||g(X)|| <= gatol diff --git a/src/tao/unconstrained/impls/nls/nls.c b/src/tao/unconstrained/impls/nls/nls.c index c58a2e76e4e..b5f6c11b7f0 100644 --- a/src/tao/unconstrained/impls/nls/nls.c +++ b/src/tao/unconstrained/impls/nls/nls.c @@ -250,7 +250,10 @@ static PetscErrorCode TaoSolve_NLS(Tao tao) /* Have not converged; continue with Newton method */ while (tao->reason == TAO_CONTINUE_ITERATING) { /* Call general purpose update function */ - PetscTryTypeMethod(tao, update, tao->niter, tao->user_update); + if (tao->ops->update) { + PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); + PetscCall(TaoComputeObjective(tao, tao->solution, &f)); + } ++tao->niter; tao->ksp_its = 0; diff --git a/src/tao/unconstrained/impls/ntl/ntl.c b/src/tao/unconstrained/impls/ntl/ntl.c index 91f58972b67..b9829b13f9d 100644 --- a/src/tao/unconstrained/impls/ntl/ntl.c +++ b/src/tao/unconstrained/impls/ntl/ntl.c @@ -224,7 +224,10 @@ static PetscErrorCode TaoSolve_NTL(Tao tao) /* Have not converged; continue with Newton method */ while (tao->reason == TAO_CONTINUE_ITERATING) { /* Call general purpose update function */ - PetscTryTypeMethod(tao, update, tao->niter, tao->user_update); + if (tao->ops->update) { + PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); + PetscCall(TaoComputeObjective(tao, tao->solution, &f)); + } ++tao->niter; tao->ksp_its = 0; /* Compute the Hessian */ diff --git a/src/tao/unconstrained/impls/ntr/ntr.c b/src/tao/unconstrained/impls/ntr/ntr.c index 081f2119e71..71631ab9759 100644 --- a/src/tao/unconstrained/impls/ntr/ntr.c +++ b/src/tao/unconstrained/impls/ntr/ntr.c @@ -220,7 +220,10 @@ static PetscErrorCode TaoSolve_NTR(Tao tao) /* Have not converged; continue with Newton method */ while (tao->reason == TAO_CONTINUE_ITERATING) { /* Call general purpose update function */ - PetscTryTypeMethod(tao, update, tao->niter, tao->user_update); + if (tao->ops->update) { + PetscUseTypeMethod(tao, update, tao->niter, tao->user_update); + PetscCall(TaoComputeObjective(tao, tao->solution, &f)); + } ++tao->niter; tao->ksp_its = 0; /* Compute the Hessian */ diff --git a/src/tao/unconstrained/tutorials/eptorsion2f.F90 b/src/tao/unconstrained/tutorials/eptorsion2f.F90 index f0a4b629de5..a151ce2b265 100644 --- a/src/tao/unconstrained/tutorials/eptorsion2f.F90 +++ b/src/tao/unconstrained/tutorials/eptorsion2f.F90 @@ -76,7 +76,7 @@ module eptorsion2fmodule PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-par',param,flg,ierr)) ! Set up distributed array and vectors - PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,mx,my,Nx,Ny,i1,i1,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,dm,ierr)) + PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,mx,my,Nx,Ny,i1,i1,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,dm,ierr)) PetscCallA(DMSetFromOptions(dm,ierr)) PetscCallA(DMSetUp(dm,ierr)) @@ -174,12 +174,11 @@ subroutine FormInitialGuess(X,ierr) do i=xs,xe-1 k = (j-gys)*gxm + i-gxs val = min((min(i+1,mx-i))*hx,temp) - PetscCall(VecSetValuesLocal(X,i1,k,val,ADD_VALUES,ierr)) + PetscCall(VecSetValuesLocal(X,i1,[k],[val],ADD_VALUES,ierr)) end do end do PetscCall(VecAssemblyBegin(X,ierr)) PetscCall(VecAssemblyEnd(X,ierr)) - return end ! --------------------------------------------------------------------- @@ -292,17 +291,17 @@ subroutine FormFunctionGradient(tao,X,f,G,dummy,ierr) if (i .ne. -1 .and. j .ne. -1) then ind = k val = - dvdx/hx - dvdy/hy - cdiv3 - PetscCall(VecSetValuesLocal(G,i1,k,val,ADD_VALUES,ierr)) + PetscCall(VecSetValuesLocal(G,i1,[k],[val],ADD_VALUES,ierr)) endif if (i .ne. mx-1 .and. j .ne. -1) then ind = k+1 val = dvdx/hx - cdiv3 - PetscCall(VecSetValuesLocal(G,i1,ind,val,ADD_VALUES,ierr)) + PetscCall(VecSetValuesLocal(G,i1,[ind],[val],ADD_VALUES,ierr)) endif if (i .ne. -1 .and. j .ne. my-1) then ind = k+gxm val = dvdy/hy - cdiv3 - PetscCall(VecSetValuesLocal(G,i1,ind,val,ADD_VALUES,ierr)) + PetscCall(VecSetValuesLocal(G,i1,[ind],[val],ADD_VALUES,ierr)) endif fquad = fquad + dvdx*dvdx + dvdy*dvdy flin = flin - cdiv3 * (v+vr+vt) @@ -325,17 +324,17 @@ subroutine FormFunctionGradient(tao,X,f,G,dummy,ierr) if (i .ne. mx .and. j .ne. 0) then ind = k-gxm val = - dvdy/hy - cdiv3 - PetscCall(VecSetValuesLocal(G,i1,ind,val,ADD_VALUES,ierr)) + PetscCall(VecSetValuesLocal(G,i1,[ind],[val],ADD_VALUES,ierr)) endif if (i .ne. 0 .and. j .ne. my) then ind = k-1 val = - dvdx/hx - cdiv3 - PetscCall(VecSetValuesLocal(G,i1,ind,val,ADD_VALUES,ierr)) + PetscCall(VecSetValuesLocal(G,i1,[ind],[val],ADD_VALUES,ierr)) endif if (i .ne. mx .and. j .ne. my) then ind = k val = dvdx/hx + dvdy/hy - cdiv3 - PetscCall(VecSetValuesLocal(G,i1,ind,val,ADD_VALUES,ierr)) + PetscCall(VecSetValuesLocal(G,i1,[ind],[val],ADD_VALUES,ierr)) endif fquad = fquad + dvdx*dvdx + dvdy*dvdy flin = flin - cdiv3*(vb + vl + v) @@ -357,7 +356,6 @@ subroutine FormFunctionGradient(tao,X,f,G,dummy,ierr) ! Sum function contributions from all processes PetscCallMPI(MPI_Allreduce(floc,f,1,MPIU_SCALAR,MPIU_SUM,PETSC_COMM_WORLD,ierr)) PetscCall(PetscLogFlops(20.0d0*(ye-ysm)*(xe-xsm)+16.0d0*(xep-xs)*(yep-ys),ierr)) - return end subroutine ComputeHessian(tao, X, H, Hpre, dummy, ierr) @@ -416,7 +414,7 @@ subroutine ComputeHessian(tao, X, H, Hpre, dummy, ierr) k = k + 1 endif - PetscCall(MatSetValuesLocal(H,i1,row,k,col,v,INSERT_VALUES,ierr)) + PetscCall(MatSetValuesLocal(H,i1,[row],k,col,v,INSERT_VALUES,ierr)) enddo enddo @@ -433,7 +431,6 @@ subroutine ComputeHessian(tao, X, H, Hpre, dummy, ierr) PetscCall(PetscLogFlops(9.0d0*xm*ym + 49.0d0*xm,ierr)) ierr = 0 - return end subroutine Monitor(tao, dummy, ierr) @@ -455,7 +452,6 @@ subroutine Monitor(tao, dummy, ierr) ierr = 0 - return end subroutine ConvergenceTest(tao, dummy, ierr) @@ -477,7 +473,6 @@ subroutine ConvergenceTest(tao, dummy, ierr) ierr = 0 - return end !/*TEST diff --git a/src/tao/unconstrained/tutorials/makefile b/src/tao/unconstrained/tutorials/makefile index edc89dc82af..b683a1effcf 100644 --- a/src/tao/unconstrained/tutorials/makefile +++ b/src/tao/unconstrained/tutorials/makefile @@ -4,3 +4,7 @@ CLEANFILES = eptorsion1 eptorsion2 minsurf2 rosenbrock1 eptorsion2f rosenbrock1 include ${PETSC_DIR}/lib/petsc/conf/variables include ${PETSC_DIR}/lib/petsc/conf/rules + +rosenbrock4.o: rosenbrock4.h + +rosenbrock4cu.o: rosenbrock4.h diff --git a/src/tao/unconstrained/tutorials/output/minsurf2_1.out b/src/tao/unconstrained/tutorials/output/minsurf2_1.out index a539aa5af35..bd045991fb2 100644 --- a/src/tao/unconstrained/tutorials/output/minsurf2_1.out +++ b/src/tao/unconstrained/tutorials/output/minsurf2_1.out @@ -1,18 +1,40 @@ ---- Minimum Surface Area Problem ----- -mx: 10 my: 8 +mx: 10 my: 8 -iter = 0, Function value 1.45591, Residual: 0.21372 -iter = 1, Function value 1.43049, Residual: 0.132023 -iter = 2, Function value 1.42069, Residual: 0.0887699 -iter = 3, Function value 1.41857, Residual: 0.0676407 -iter = 4, Function value 1.41781, Residual: 0.0161113 -iter = 5, Function value 1.41776, Residual: 0.00313094 -iter = 6, Function value 1.41776, Residual: 0.00280956 -iter = 7, Function value 1.41776, Residual: 0.000935101 +iter = 0, Function value 1.45591, Residual: 0.21372 +iter = 1, Function value 1.43049, Residual: 0.132023 +iter = 2, Function value 1.42069, Residual: 0.0887699 +iter = 3, Function value 1.41857, Residual: 0.0676407 +iter = 4, Function value 1.41781, Residual: 0.0161113 +iter = 5, Function value 1.41776, Residual: 0.00313094 +iter = 6, Function value 1.41776, Residual: 0.00280956 +iter = 7, Function value 1.41776, Residual: 0.000935101 Tao Object: 1 MPI process type: lmvm Gradient steps: 1 + LMVM Matrix: + Mat Object: (tao_lmvm_) 1 MPI process + type: lmvmbfgs + Scale type: DIAGONAL + Scale history: 1 + Scale params: alpha=1., beta=0.5, rho=1. + Convex factors: phi=0., theta=0.125 + Max. storage: 5 + Used storage: 5 + Number of updates: 6 + Number of rejects: 0 + Number of resets: 1 + Mat Object: (tao_lmvm_J0_) 1 MPI process + type: lmvmdiagbroyden + Scale history: 1 + Scale params: alpha=1., beta=0.5, rho=1. + Convex factor: theta=0.125 + Max. storage: 1 + Used storage: 1 + Number of updates: 6 + Number of rejects: 0 + Number of resets: 1 TaoLineSearch Object: 1 MPI process type: more-thuente maximum function evaluations=30 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_11.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_11.out index 622d0bc210f..b9e7b161796 100644 --- a/src/tao/unconstrained/tutorials/output/rosenbrock1_11.out +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_11.out @@ -1 +1 @@ -error between LMVM MatMult and MatSolve: < 1.e-11 +Inverse error of LMVM MatMult and MatSolve: < 1.e-11 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_12.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_12.out index 622d0bc210f..b9e7b161796 100644 --- a/src/tao/unconstrained/tutorials/output/rosenbrock1_12.out +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_12.out @@ -1 +1 @@ -error between LMVM MatMult and MatSolve: < 1.e-11 +Inverse error of LMVM MatMult and MatSolve: < 1.e-11 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_13.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_13.out index 622d0bc210f..b9e7b161796 100644 --- a/src/tao/unconstrained/tutorials/output/rosenbrock1_13.out +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_13.out @@ -1 +1 @@ -error between LMVM MatMult and MatSolve: < 1.e-11 +Inverse error of LMVM MatMult and MatSolve: < 1.e-11 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_14.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_14.out index 622d0bc210f..b9e7b161796 100644 --- a/src/tao/unconstrained/tutorials/output/rosenbrock1_14.out +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_14.out @@ -1 +1 @@ -error between LMVM MatMult and MatSolve: < 1.e-11 +Inverse error of LMVM MatMult and MatSolve: < 1.e-11 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_15.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_15.out index 622d0bc210f..b9e7b161796 100644 --- a/src/tao/unconstrained/tutorials/output/rosenbrock1_15.out +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_15.out @@ -1 +1 @@ -error between LMVM MatMult and MatSolve: < 1.e-11 +Inverse error of LMVM MatMult and MatSolve: < 1.e-11 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_16.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_16.out index 622d0bc210f..b9e7b161796 100644 --- a/src/tao/unconstrained/tutorials/output/rosenbrock1_16.out +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_16.out @@ -1 +1 @@ -error between LMVM MatMult and MatSolve: < 1.e-11 +Inverse error of LMVM MatMult and MatSolve: < 1.e-11 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_21.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_21.out index 622d0bc210f..b9e7b161796 100644 --- a/src/tao/unconstrained/tutorials/output/rosenbrock1_21.out +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_21.out @@ -1 +1 @@ -error between LMVM MatMult and MatSolve: < 1.e-11 +Inverse error of LMVM MatMult and MatSolve: < 1.e-11 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_29.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_29.out new file mode 100644 index 00000000000..fe9dbdce5dc --- /dev/null +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_29.out @@ -0,0 +1 @@ +Inverse error of LMVM MatMult and MatSolve is not small: 4.774872e+01 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_30.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_30.out new file mode 100644 index 00000000000..1cec0806aa0 --- /dev/null +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_30.out @@ -0,0 +1 @@ +Inverse error of LMVM MatMult and MatSolve is not small: 2.946727e+01 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_31.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_31.out new file mode 100644 index 00000000000..dd8006d4dfa --- /dev/null +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_31.out @@ -0,0 +1 @@ +Inverse error of LMVM MatMult and MatSolve is not small: 6.715717e+01 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock1_32.out b/src/tao/unconstrained/tutorials/output/rosenbrock1_32.out new file mode 100644 index 00000000000..e8b4abd12c5 --- /dev/null +++ b/src/tao/unconstrained/tutorials/output/rosenbrock1_32.out @@ -0,0 +1 @@ +Inverse error of LMVM MatMult and MatSolve is not small: 1.620015e+00 diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock4_bfgs_rejects.out b/src/tao/unconstrained/tutorials/output/rosenbrock4_bfgs_rejects.out new file mode 100644 index 00000000000..48da6aaeb0c --- /dev/null +++ b/src/tao/unconstrained/tutorials/output/rosenbrock4_bfgs_rejects.out @@ -0,0 +1,68 @@ +iter = 0, Function value 9., Residual: 6. +iter = 1, Function value 7.21816, Residual: 5.95088 +iter = 2, Function value 5.65482, Residual: 5.01816 +iter = 3, Function value 4.75704, Residual: 10.5299 +iter = 4, Function value 4.06716, Residual: 21.3356 +iter = 5, Function value 2.0834, Residual: 11.9903 +iter = 6, Function value 1.17095, Residual: 9.73895 +iter = 7, Function value 0.870077, Residual: 8.74691 +iter = 8, Function value 0.383667, Residual: 2.42654 +iter = 9, Function value 0.351094, Residual: 1.4684 +iter = 10, Function value 0.332918, Residual: 1.87654 +iter = 11, Function value 0.312951, Residual: 1.25957 +iter = 12, Function value 0.245345, Residual: 1.06067 +iter = 13, Function value 0.160065, Residual: 2.84589 +iter = 14, Function value 0.0916562, Residual: 1.87019 +iter = 15, Function value 0.0496723, Residual: 1.07232 +iter = 16, Function value 0.0158149, Residual: 0.775471 +iter = 17, Function value 0.00579069, Residual: 1.06559 +iter = 18, Function value 0.000547221, Residual: 0.26713 +iter = 19, Function value 0.000179375, Residual: 0.169825 +iter = 20, Function value 9.32084e-05, Residual: 0.132614 +iter = 21, Function value 2.38703e-05, Residual: 0.0537998 +iter = 22, Function value 1.24628e-05, Residual: 0.0295418 +iter = 23, Function value 8.83612e-06, Residual: 0.0164191 +iter = 24, Function value 5.05478e-06, Residual: 0.0102872 +iter = 25, Function value 3.63146e-06, Residual: 0.00527079 +iter = 26, Function value 3.46106e-06, Residual: 0.00557845 +iter = 27, Function value 3.30711e-06, Residual: 0.00230313 +iter = 28, Function value 3.23715e-06, Residual: 0.00244554 +iter = 29, Function value 2.80746e-06, Residual: 0.00559006 +iter = 30, Function value 2.18681e-06, Residual: 0.00839101 +iter = 31, Function value 1.05877e-06, Residual: 0.010349 +iter = 32, Function value 8.6079e-07, Residual: 0.0104813 +iter = 33, Function value 2.20004e-07, Residual: 0.00564676 +iter = 34, Function value 2.18779e-08, Residual: 0.0010248 +iter = 35, Function value 6.78175e-09, Residual: 0.000612843 +iter = 36, Function value 1.22801e-09, Residual: 0.00039596 +iter = 37, Function value 2.07093e-10, Residual: 0.000118131 +iter = 38, Function value 5.2308e-11, Residual: 5.15664e-05 +Tao Object: 1 MPI process + type: blmvm + Gradient steps: 0 + Mat Object: (tao_blmvm_) 1 MPI process + type: lmvmbfgs + rows=10, cols=10 + Scale type: SCALAR + Scale history: 1 + Scale params: alpha=1., beta=0.5, rho=1. + Convex factors: phi=0., theta=0.125 + Max. storage: 5 + Used storage: 5 + Number of updates: 31 + Number of rejects: 6 + Number of resets: 2 + TaoLineSearch Object: 1 MPI process + type: more-thuente + maximum function evaluations=30 + tolerances: ftol=0.0001, rtol=1e-10, gtol=0.9 + total number of function evaluations=0 + total number of gradient evaluations=0 + total number of function/gradient evaluations=1 + Termination reason: 1 + convergence tolerances: gatol=0.0001, steptol=0., gttol=0. + Residual in Function/Gradient:=5.15664e-05 + Objective value=5.2308e-11 + total number of iterations=38, (max: 2000) + total number of function/gradient evaluations=48, (max: 4000) + Solution converged: ||g(X)|| <= gatol diff --git a/src/tao/unconstrained/tutorials/output/rosenbrock4_dbfgs_rejects.out b/src/tao/unconstrained/tutorials/output/rosenbrock4_dbfgs_rejects.out new file mode 100644 index 00000000000..3ac303af515 --- /dev/null +++ b/src/tao/unconstrained/tutorials/output/rosenbrock4_dbfgs_rejects.out @@ -0,0 +1,76 @@ +iter = 0, Function value 9., Residual: 6. +iter = 1, Function value 7.21816, Residual: 5.95088 +iter = 2, Function value 5.65482, Residual: 5.01816 +iter = 3, Function value 4.75704, Residual: 10.5299 +iter = 4, Function value 4.06716, Residual: 21.3356 +iter = 5, Function value 2.0834, Residual: 11.9903 +iter = 6, Function value 1.17095, Residual: 9.73895 +iter = 7, Function value 0.870077, Residual: 8.74691 +iter = 8, Function value 0.383667, Residual: 2.42654 +iter = 9, Function value 0.351094, Residual: 1.4684 +iter = 10, Function value 0.332918, Residual: 1.87654 +iter = 11, Function value 0.312951, Residual: 1.25957 +iter = 12, Function value 0.245345, Residual: 1.06067 +iter = 13, Function value 0.160065, Residual: 2.84589 +iter = 14, Function value 0.0916562, Residual: 1.87019 +iter = 15, Function value 0.0496723, Residual: 1.07232 +iter = 16, Function value 0.0158149, Residual: 0.775471 +iter = 17, Function value 0.00579069, Residual: 1.06559 +iter = 18, Function value 0.000547221, Residual: 0.26713 +iter = 19, Function value 0.000179375, Residual: 0.169825 +iter = 20, Function value 9.32084e-05, Residual: 0.132614 +iter = 21, Function value 6.05728e-05, Residual: 0.114998 +iter = 22, Function value 1.05049e-05, Residual: 0.020588 +iter = 23, Function value 6.89895e-06, Residual: 0.0152198 +iter = 24, Function value 4.51204e-06, Residual: 0.00942712 +iter = 25, Function value 4.11255e-06, Residual: 0.0125682 +iter = 26, Function value 3.39556e-06, Residual: 0.00271934 +iter = 27, Function value 3.35014e-06, Residual: 0.00188774 +iter = 28, Function value 3.24672e-06, Residual: 0.0023488 +iter = 29, Function value 2.96597e-06, Residual: 0.00506394 +iter = 30, Function value 2.4229e-06, Residual: 0.00823409 +iter = 31, Function value 1.44489e-06, Residual: 0.0102945 +iter = 32, Function value 6.55679e-07, Residual: 0.0109646 +iter = 33, Function value 2.70019e-07, Residual: 0.00549772 +iter = 34, Function value 9.56495e-08, Residual: 0.00243722 +iter = 35, Function value 2.136e-08, Residual: 0.0010518 +iter = 36, Function value 3.77283e-09, Residual: 0.000513505 +iter = 37, Function value 1.39562e-09, Residual: 0.000472808 +iter = 38, Function value 3.2961e-11, Residual: 6.98847e-05 +Tao Object: 1 MPI process + type: blmvm + Gradient steps: 0 + Mat Object: (tao_blmvm_) 1 MPI process + type: lmvmdbfgs + rows=10, cols=10 + Max. storage: 5 + Used storage: 5 + Number of updates: 30 + Number of rejects: 7 + Number of resets: 2 + Mat Object: (J0_) 1 MPI process + type: lmvmdiagbroyden + rows=10, cols=10 + Scale history: 1 + Scale params: alpha=1., beta=0.5, rho=1. + Convex factor: theta=0. + Max. storage: 1 + Used storage: 0 + Number of updates: 0 + Number of rejects: 0 + Number of resets: 0 + Counts: S x : 34, S^T x : 71, Y x : 34, Y^T x: 34 + TaoLineSearch Object: 1 MPI process + type: more-thuente + maximum function evaluations=30 + tolerances: ftol=0.0001, rtol=1e-10, gtol=0.9 + total number of function evaluations=0 + total number of gradient evaluations=0 + total number of function/gradient evaluations=1 + Termination reason: 1 + convergence tolerances: gatol=0.0001, steptol=0., gttol=0. + Residual in Function/Gradient:=6.98847e-05 + Objective value=3.2961e-11 + total number of iterations=38, (max: 2000) + total number of function/gradient evaluations=49, (max: 4000) + Solution converged: ||g(X)|| <= gatol diff --git a/src/tao/unconstrained/tutorials/rosenbrock1.c b/src/tao/unconstrained/tutorials/rosenbrock1.c index 17f80957b26..714fda52f31 100644 --- a/src/tao/unconstrained/tutorials/rosenbrock1.c +++ b/src/tao/unconstrained/tutorials/rosenbrock1.c @@ -97,11 +97,11 @@ int main(int argc, char **argv) PetscCall(VecAXPY(out2, -1.0, in)); PetscCall(VecNorm(out2, NORM_2, &mult_solve_dist)); if (mult_solve_dist < 1.e-11) { - PetscCall(PetscPrintf(PetscObjectComm((PetscObject)tao), "error between LMVM MatMult and MatSolve: < 1.e-11\n")); + PetscCall(PetscPrintf(PetscObjectComm((PetscObject)tao), "Inverse error of LMVM MatMult and MatSolve: < 1.e-11\n")); } else if (mult_solve_dist < 1.e-6) { - PetscCall(PetscPrintf(PetscObjectComm((PetscObject)tao), "error between LMVM MatMult and MatSolve: < 1.e-6\n")); + PetscCall(PetscPrintf(PetscObjectComm((PetscObject)tao), "Inverse error of LMVM MatMult and MatSolve: < 1.e-6\n")); } else { - PetscCall(PetscPrintf(PetscObjectComm((PetscObject)tao), "error between LMVM MatMult and MatSolve: %e\n", (double)mult_solve_dist)); + PetscCall(PetscPrintf(PetscObjectComm((PetscObject)tao), "Inverse error of LMVM MatMult and MatSolve is not small: %e\n", (double)mult_solve_dist)); } PetscCall(VecDestroy(&in)); PetscCall(VecDestroy(&out)); @@ -242,136 +242,165 @@ PetscErrorCode FormHessian(Tao tao, Vec X, Mat H, Mat Hpre, void *ptr) /*TEST build: - requires: !complex + requires: !complex test: - args: -tao_monitor_short -tao_type nls -tao_gatol 1.e-4 - requires: !single + requires: !single + args: -tao_monitor_short -tao_type nls -tao_gatol 1.e-4 test: - suffix: 2 - args: -tao_monitor_short -tao_type lmvm -tao_gatol 1.e-3 + suffix: 2 + requires: !single + args: -tao_monitor_short -tao_type lmvm -tao_gatol 1.e-3 test: - suffix: 3 - args: -tao_monitor_short -tao_type ntr -tao_gatol 1.e-4 - requires: !single + suffix: 3 + requires: !single + args: -tao_monitor_short -tao_type ntr -tao_gatol 1.e-4 test: - suffix: 4 - args: -tao_monitor_short -tao_type ntr -tao_mf_hessian -tao_ntr_pc_type none -tao_gatol 1.e-4 + suffix: 4 + requires: !single + args: -tao_monitor_short -tao_type ntr -tao_mf_hessian -tao_ntr_pc_type none -tao_gatol 1.e-4 test: - suffix: 5 - args: -tao_monitor_short -tao_type bntr -tao_gatol 1.e-4 + suffix: 5 + requires: !single + args: -tao_monitor_short -tao_type bntr -tao_gatol 1.e-4 test: - suffix: 6 - args: -tao_monitor_short -tao_type bntl -tao_gatol 1.e-4 + suffix: 6 + requires: !single + args: -tao_monitor_short -tao_type bntl -tao_gatol 1.e-4 test: - suffix: 7 - args: -tao_monitor_short -tao_type bnls -tao_gatol 1.e-4 + suffix: 7 + requires: !single + args: -tao_monitor_short -tao_type bnls -tao_gatol 1.e-4 test: - suffix: 8 - args: -tao_monitor_short -tao_type bntr -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 + suffix: 8 + requires: !single + args: -tao_monitor_short -tao_type bntr -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 test: - suffix: 9 - args: -tao_monitor_short -tao_type bntl -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 + suffix: 9 + requires: !single + args: -tao_monitor_short -tao_type bntl -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 test: - suffix: 10 - args: -tao_monitor_short -tao_type bnls -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 + suffix: 10 + requires: !single + args: -tao_monitor_short -tao_type bnls -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 test: - suffix: 11 - args: -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmbroyden + suffix: 11 + requires: !single + args: -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmbroyden test: - suffix: 12 - args: -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmbadbroyden + suffix: 12 + requires: !single + args: -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmbadbroyden test: suffix: 13 + requires: !single args: -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmsymbroyden test: suffix: 14 + requires: !single args: -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmbfgs test: suffix: 15 + requires: !single args: -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdfp test: suffix: 16 + requires: !single args: -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmsr1 test: suffix: 17 + requires: !single args: -tao_monitor_short -tao_gatol 1e-4 -tao_type bqnls test: suffix: 18 + requires: !single args: -tao_monitor_short -tao_gatol 1e-4 -tao_type blmvm test: suffix: 19 + requires: !single args: -tao_monitor_short -tao_gatol 1e-4 -tao_type bqnktr -tao_bqnk_mat_type lmvmsr1 test: suffix: 20 + requires: !single args: -tao_monitor -tao_gatol 1e-4 -tao_type blmvm -tao_ls_monitor test: suffix: 21 + requires: !single args: -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmsymbadbroyden test: suffix: 22 + requires: !single args: -tao_max_it 1 -tao_converged_reason test: suffix: 23 + requires: !single args: -tao_max_funcs 0 -tao_converged_reason test: suffix: 24 + requires: !single args: -tao_gatol 10 -tao_converged_reason test: suffix: 25 + requires: !single args: -tao_grtol 10 -tao_converged_reason test: suffix: 26 + requires: !single args: -tao_gttol 10 -tao_converged_reason test: suffix: 27 + requires: !single args: -tao_steptol 10 -tao_converged_reason test: suffix: 28 + requires: !single args: -tao_fmin 10 -tao_converged_reason test: suffix: snes + requires: !single args: -snes_monitor ::ascii_info_detail -tao_type snes -snes_type newtontr -snes_atol 1.e-4 -pc_type none -tao_mf_hessian -ksp_type cg test: suffix: snes_ls_armijo + requires: !single args: -snes_monitor ::ascii_info_detail -tao_type snes -snes_type newtonls -snes_atol 1.e-4 -pc_type none -tao_mf_hessian -snes_linesearch_monitor -snes_linesearch_order 1 test: suffix: snes_tr_cgnegcurve_kmdc - args: -snes_monitor ::ascii_info_detail -tao_type snes -snes_type newtontr -snes_atol 1.e-4 -pc_type none -ksp_type cg -snes_tr_kmdc 0.9 -ksp_converged_neg_curve -ksp_converged_reason requires: !single + args: -snes_monitor ::ascii_info_detail -tao_type snes -snes_type newtontr -snes_atol 1.e-4 -pc_type none -ksp_type cg -snes_tr_kmdc 0.9 -ksp_converged_neg_curve -ksp_converged_reason test: suffix: snes_ls_lmvm + requires: !single args: -snes_monitor ::ascii_info_detail -tao_type snes -snes_type newtonls -snes_atol 1.e-4 -pc_type lmvm -tao_mf_hessian TEST*/ diff --git a/src/tao/unconstrained/tutorials/rosenbrock1f.F90 b/src/tao/unconstrained/tutorials/rosenbrock1f.F90 index f0bb79acf8c..6059983dced 100644 --- a/src/tao/unconstrained/tutorials/rosenbrock1f.F90 +++ b/src/tao/unconstrained/tutorials/rosenbrock1f.F90 @@ -61,7 +61,7 @@ PetscCallA(VecCreateSeq(PETSC_COMM_SELF,n,x,ierr)) ! Allocate storage space for Hessian; - PetscCallA(MatCreateSeqBAIJ(PETSC_COMM_SELF,i2,n,n,i1,PETSC_NULL_INTEGER, H,ierr)) + PetscCallA(MatCreateSeqBAIJ(PETSC_COMM_SELF,i2,n,n,i1,PETSC_NULL_INTEGER_ARRAY, H,ierr)) PetscCallA(MatSetOption(H,MAT_SYMMETRIC,PETSC_TRUE,ierr)) @@ -153,7 +153,6 @@ subroutine FormFunctionGradient(tao, X, f, G, dummy, ierr) f = ff PetscCall(PetscLogFlops(15.0d0*nn,ierr)) - return end ! @@ -236,7 +235,6 @@ subroutine FormHessian(tao,X,H,PrecH,dummy,ierr) PetscCall(PetscLogFlops(9.0d0*nn,ierr)) - return end ! diff --git a/src/tao/unconstrained/tutorials/rosenbrock4.c b/src/tao/unconstrained/tutorials/rosenbrock4.c new file mode 100644 index 00000000000..d457a4959af --- /dev/null +++ b/src/tao/unconstrained/tutorials/rosenbrock4.c @@ -0,0 +1,206 @@ +static char help[] = "This example demonstrates the use of different performance portable backends in user-defined callbacks in Tao.\n"; + +#include "rosenbrock4.h" + +int main(int argc, char **argv) +{ + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); + PetscCall(RosenbrockMain()); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + build: + requires: !complex !__float128 !single !defined(PETSC_USE_64BIT_INDICES) + + test: + suffix: 1 + nsize: {{1 2 3}} + args: -tao_monitor_short -tao_type nls -tao_gatol 1.e-4 + output_file: output/rosenbrock1_1.out + + test: + suffix: 2 + args: -tao_monitor_short -tao_type lmvm -tao_gatol 1.e-3 + output_file: output/rosenbrock1_2.out + + test: + suffix: 3 + args: -tao_monitor_short -tao_type ntr -tao_gatol 1.e-4 + output_file: output/rosenbrock1_3.out + + test: + suffix: 4 + args: -tao_monitor_short -tao_type ntr -tao_mf_hessian -tao_ntr_pc_type none -tao_gatol 1.e-4 + output_file: output/rosenbrock1_4.out + + test: + suffix: 5 + args: -tao_monitor_short -tao_type bntr -tao_gatol 1.e-4 + output_file: output/rosenbrock1_5.out + + test: + suffix: 6 + args: -tao_monitor_short -tao_type bntl -tao_gatol 1.e-4 + output_file: output/rosenbrock1_6.out + + test: + suffix: 7 + args: -tao_monitor_short -tao_type bnls -tao_gatol 1.e-4 + output_file: output/rosenbrock1_7.out + + test: + suffix: 8 + args: -tao_monitor_short -tao_type bntr -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 + output_file: output/rosenbrock1_8.out + + test: + suffix: 9 + args: -tao_monitor_short -tao_type bntl -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 + output_file: output/rosenbrock1_9.out + + test: + suffix: 10 + args: -tao_monitor_short -tao_type bnls -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 + output_file: output/rosenbrock1_10.out + + test: + suffix: 11 + args: -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmbroyden + output_file: output/rosenbrock1_11.out + + test: + suffix: 12 + args: -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmbadbroyden + output_file: output/rosenbrock1_12.out + + test: + suffix: 13 + args: -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmsymbroyden + output_file: output/rosenbrock1_13.out + + test: + suffix: 14 + args: -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmbfgs + output_file: output/rosenbrock1_14.out + + test: + suffix: 15 + args: -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmdfp + output_file: output/rosenbrock1_15.out + + test: + suffix: 16 + args: -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmsr1 + output_file: output/rosenbrock1_16.out + + test: + suffix: 17 + args: -tao_monitor_short -tao_gatol 1e-4 -tao_type bqnls + output_file: output/rosenbrock1_17.out + + test: + suffix: 18 + args: -tao_monitor_short -tao_gatol 1e-4 -tao_type blmvm + output_file: output/rosenbrock1_18.out + + test: + suffix: 19 + args: -tao_monitor_short -tao_gatol 1e-4 -tao_type bqnktr -tao_bqnk_mat_type lmvmsr1 + output_file: output/rosenbrock1_19.out + + test: + suffix: 20 + args: -tao_monitor -tao_gatol 1e-4 -tao_type blmvm -tao_ls_monitor + output_file: output/rosenbrock1_20.out + + test: + suffix: 21 + args: -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmsymbadbroyden + output_file: output/rosenbrock1_21.out + + test: + suffix: 22 + args: -tao_max_it 1 -tao_converged_reason + output_file: output/rosenbrock1_22.out + + test: + suffix: 23 + args: -tao_max_funcs 0 -tao_converged_reason + output_file: output/rosenbrock1_23.out + + test: + suffix: 24 + args: -tao_gatol 10 -tao_converged_reason + output_file: output/rosenbrock1_24.out + + test: + suffix: 25 + args: -tao_grtol 10 -tao_converged_reason + output_file: output/rosenbrock1_25.out + + test: + suffix: 26 + args: -tao_gttol 10 -tao_converged_reason + output_file: output/rosenbrock1_26.out + + test: + suffix: 27 + args: -tao_steptol 10 -tao_converged_reason + output_file: output/rosenbrock1_27.out + + test: + suffix: 28 + args: -tao_fmin 10 -tao_converged_reason + output_file: output/rosenbrock1_28.out + + test: + suffix: test_dbfgs + nsize: {{1 2 3}} + output_file: output/rosenbrock1_14.out + args: -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdbfgs -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_lbfgs_type {{inplace reorder}} -tao_bqnk_mat_lbfgs_recursive {{0 1}} + + test: + suffix: test_ddfp + nsize: {{1 2 3}} + output_file: output/rosenbrock1_14.out + args: -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmddfp -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_ldfp_type {{inplace reorder}} -tao_bqnk_mat_ldfp_recursive {{0 1}} + + test: + suffix: test_dqn_1 + nsize: 1 + output_file: output/rosenbrock1_29.out + args: -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdqn -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_lqn_type {{inplace reorder}} + + test: + suffix: test_dqn_2 + nsize: 2 + output_file: output/rosenbrock1_30.out + args: -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdqn -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_lqn_type {{inplace reorder}} + + test: + suffix: test_dqn_3 + nsize: 3 + output_file: output/rosenbrock1_31.out + args: -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdqn -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_lqn_type {{inplace reorder}} + + test: + suffix: test_dqn_diag + output_file: output/rosenbrock1_32.out + args: -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdqn -tao_bqnk_mat_lmvm_scale_type diagonal -tao_bqnk_mat_lqn_type {{inplace reorder}} + + # test with a large update rejection tolerance to test that portion of code + test: + suffix: bfgs_rejects + output_file: output/rosenbrock4_bfgs_rejects.out + args: -tao_monitor_short -tao_gatol 1e-4 -tao_type blmvm -tao_view -n 10 -bs 10 -alpha 8.0 -tao_blmvm_mat_lmvm_eps 0.01 -tao_blmvm_mat_type lmvmbfgs -tao_blmvm_mat_lmvm_scale_type scalar + + test: + suffix: dbfgs_rejects + output_file: output/rosenbrock4_dbfgs_rejects.out + args: -tao_monitor_short -tao_gatol 1e-4 -tao_type blmvm -tao_view -n 10 -bs 10 -alpha 8.0 -tao_blmvm_mat_lmvm_eps 0.01 -tao_blmvm_mat_type lmvmdbfgs -tao_blmvm_mat_lmvm_scale_type scalar + +TEST*/ diff --git a/src/tao/unconstrained/tutorials/rosenbrock4.h b/src/tao/unconstrained/tutorials/rosenbrock4.h new file mode 100644 index 00000000000..45d602241d9 --- /dev/null +++ b/src/tao/unconstrained/tutorials/rosenbrock4.h @@ -0,0 +1,745 @@ +#pragma once + +#include +#include +#include +#include + +/* + User-defined application context - contains data needed by the + application-provided call-back routines that evaluate the function, + gradient, and hessian. +*/ + +typedef struct _Rosenbrock { + PetscInt bs; // each block of bs variables is one chained multidimensional rosenbrock problem + PetscInt i_start, i_end; + PetscInt c_start, c_end; + PetscReal alpha; // condition parameter +} Rosenbrock; + +typedef struct _AppCtx *AppCtx; +struct _AppCtx { + MPI_Comm comm; + PetscInt n; /* dimension */ + PetscInt n_local; + PetscInt n_local_comp; + Rosenbrock problem; + Vec Hvalues; /* vector for writing COO values of this MPI process */ + Vec gvalues; /* vector for writing gradient values of this mpi process */ + Vec fvector; + PetscSF off_process_scatter; + PetscSF gscatter; + Vec off_process_values; /* buffer for off-process values if chained */ + PetscBool test_lmvm; + PetscLogEvent event_f, event_g, event_fg; +}; + +/* -------------- User-defined routines ---------- */ + +static PETSC_HOSTDEVICE_INLINE_DECL PetscReal RosenbrockObjective(PetscScalar alpha, PetscScalar x_1, PetscScalar x_2) +{ + PetscScalar d = x_2 - x_1 * x_1; + PetscScalar e = 1.0 - x_1; + return alpha * d * d + e * e; +} + +static const PetscLogDouble RosenbrockObjectiveFlops = 7.0; + +static PETSC_HOSTDEVICE_INLINE_DECL void RosenbrockGradient(PetscScalar alpha, PetscScalar x_1, PetscScalar x_2, PetscScalar g[2]) +{ + PetscScalar d = x_2 - x_1 * x_1; + PetscScalar e = 1.0 - x_1; + PetscScalar g2 = alpha * d * 2.0; + + g[0] = -2.0 * x_1 * g2 - 2.0 * e; + g[1] = g2; +} + +static const PetscInt RosenbrockGradientFlops = 9.0; + +static PETSC_HOSTDEVICE_INLINE_DECL PetscReal RosenbrockObjectiveGradient(PetscScalar alpha, PetscScalar x_1, PetscScalar x_2, PetscScalar g[2]) +{ + PetscScalar d = x_2 - x_1 * x_1; + PetscScalar e = 1.0 - x_1; + PetscScalar ad = alpha * d; + PetscScalar g2 = ad * 2.0; + + g[0] = -2.0 * x_1 * g2 - 2.0 * e; + g[1] = g2; + return ad * d + e * e; +} + +static const PetscLogDouble RosenbrockObjectiveGradientFlops = 12.0; + +static PETSC_HOSTDEVICE_INLINE_DECL void RosenbrockHessian(PetscScalar alpha, PetscScalar x_1, PetscScalar x_2, PetscScalar h[4]) +{ + PetscScalar d = x_2 - x_1 * x_1; + PetscScalar g2 = alpha * d * 2.0; + PetscScalar h2 = -4.0 * alpha * x_1; + + h[0] = -2.0 * (g2 + x_1 * h2) + 2.0; + h[1] = h[2] = h2; + h[3] = 2.0 * alpha; +} + +static const PetscLogDouble RosenbrockHessianFlops = 11.0; + +static PetscErrorCode AppCtxCreate(MPI_Comm comm, AppCtx *ctx) +{ + AppCtx user; + PetscDeviceContext dctx; + + PetscFunctionBegin; + PetscCall(PetscNew(ctx)); + user = *ctx; + user->comm = PETSC_COMM_WORLD; + + /* Initialize problem parameters */ + user->n = 2; + user->problem.alpha = 99.0; + user->problem.bs = 2; // bs = 2 is block Rosenbrock, bs = n is chained Rosenbrock + user->test_lmvm = PETSC_FALSE; + /* Check for command line arguments to override defaults */ + PetscOptionsBegin(user->comm, NULL, "Rosenbrock example", NULL); + PetscCall(PetscOptionsInt("-n", "Rosenbrock problem size", NULL, user->n, &user->n, NULL)); + PetscCall(PetscOptionsInt("-bs", "Rosenbrock block size (2 <= bs <= n)", NULL, user->problem.bs, &user->problem.bs, NULL)); + PetscCall(PetscOptionsReal("-alpha", "Rosenbrock off-diagonal coefficient", NULL, user->problem.alpha, &user->problem.alpha, NULL)); + PetscCall(PetscOptionsBool("-test_lmvm", "Test LMVM solve against LMVM mult", NULL, user->test_lmvm, &user->test_lmvm, NULL)); + PetscOptionsEnd(); + PetscCheck(user->problem.bs >= 1, comm, PETSC_ERR_ARG_INCOMP, "Block size %" PetscInt_FMT " is not bigger than 1", user->problem.bs); + PetscCheck((user->n % user->problem.bs) == 0, comm, PETSC_ERR_ARG_INCOMP, "Block size %" PetscInt_FMT " does not divide problem size % " PetscInt_FMT, user->problem.bs, user->n); + PetscCall(PetscLogEventRegister("Rbock_Obj", TAO_CLASSID, &user->event_f)); + PetscCall(PetscLogEventRegister("Rbock_Grad", TAO_CLASSID, &user->event_g)); + PetscCall(PetscLogEventRegister("Rbock_ObjGrad", TAO_CLASSID, &user->event_fg)); + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + PetscCall(PetscDeviceContextSetUp(dctx)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode AppCtxDestroy(AppCtx *ctx) +{ + AppCtx user; + + PetscFunctionBegin; + user = *ctx; + *ctx = NULL; + PetscCall(VecDestroy(&user->Hvalues)); + PetscCall(VecDestroy(&user->gvalues)); + PetscCall(VecDestroy(&user->fvector)); + PetscCall(VecDestroy(&user->off_process_values)); + PetscCall(PetscSFDestroy(&user->off_process_scatter)); + PetscCall(PetscSFDestroy(&user->gscatter)); + PetscCall(PetscFree(user)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CreateHessian(AppCtx user, Mat *Hessian) +{ + Mat H; + PetscLayout layout; + PetscInt i_start, i_end, n_local_comp, nnz_local; + PetscInt c_start, c_end; + PetscInt *coo_i; + PetscInt *coo_j; + PetscInt bs = user->problem.bs; + VecType vec_type; + + PetscFunctionBegin; + /* Partition the optimization variables and the computations. + There are (bs - 1) contributions to the objective function for every (bs) + degrees of freedom. */ + PetscCall(PetscLayoutCreateFromSizes(user->comm, PETSC_DECIDE, user->n, 1, &layout)); + PetscCall(PetscLayoutSetUp(layout)); + PetscCall(PetscLayoutGetRange(layout, &i_start, &i_end)); + user->problem.i_start = i_start; + user->problem.i_end = i_end; + user->n_local = i_end - i_start; + user->problem.c_start = c_start = (i_start / bs) * (bs - 1) + (i_start % bs); + user->problem.c_end = c_end = (i_end / bs) * (bs - 1) + (i_end % bs); + user->n_local_comp = n_local_comp = c_end - c_start; + + PetscCall(MatCreate(user->comm, Hessian)); + H = *Hessian; + PetscCall(MatSetLayouts(H, layout, layout)); + PetscCall(PetscLayoutDestroy(&layout)); + PetscCall(MatSetType(H, MATAIJ)); + PetscCall(MatSetOption(H, MAT_HERMITIAN, PETSC_TRUE)); + PetscCall(MatSetOption(H, MAT_SYMMETRIC, PETSC_TRUE)); + PetscCall(MatSetOption(H, MAT_SYMMETRY_ETERNAL, PETSC_TRUE)); + PetscCall(MatSetOption(H, MAT_STRUCTURALLY_SYMMETRIC, PETSC_TRUE)); + PetscCall(MatSetOption(H, MAT_STRUCTURAL_SYMMETRY_ETERNAL, PETSC_TRUE)); + PetscCall(MatSetFromOptions(H)); /* set from options so that we can change the underlying matrix type */ + + nnz_local = n_local_comp * 4; + PetscCall(PetscMalloc2(nnz_local, &coo_i, nnz_local, &coo_j)); + /* Instead of having one computation thread per row of the matrix, + this example uses one thread per contribution to the objective + function. Each contribution to the objective function relates + two adjacent degrees of freedom, so each contribution to + the objective function adds a 2x2 block into the matrix. + We describe these 2x2 blocks in COO format. */ + for (PetscInt c = c_start, k = 0; c < c_end; c++, k += 4) { + PetscInt i = (c / (bs - 1)) * bs + c % (bs - 1); + + coo_i[k + 0] = i; + coo_i[k + 1] = i; + coo_i[k + 2] = i + 1; + coo_i[k + 3] = i + 1; + + coo_j[k + 0] = i; + coo_j[k + 1] = i + 1; + coo_j[k + 2] = i; + coo_j[k + 3] = i + 1; + } + PetscCall(MatSetPreallocationCOO(H, nnz_local, coo_i, coo_j)); + PetscCall(PetscFree2(coo_i, coo_j)); + + PetscCall(MatGetVecType(H, &vec_type)); + PetscCall(VecCreate(user->comm, &user->Hvalues)); + PetscCall(VecSetSizes(user->Hvalues, nnz_local, PETSC_DETERMINE)); + PetscCall(VecSetType(user->Hvalues, vec_type)); + + // vector to collect contributions to the objective + PetscCall(VecCreate(user->comm, &user->fvector)); + PetscCall(VecSetSizes(user->fvector, user->n_local_comp, PETSC_DETERMINE)); + PetscCall(VecSetType(user->fvector, vec_type)); + + { /* If we are using a device (such as a GPU), run some computations that will + warm up its linear algebra runtime before the problem we actually want + to profile */ + + PetscMemType memtype; + const PetscScalar *a; + + PetscCall(VecGetArrayReadAndMemType(user->fvector, &a, &memtype)); + PetscCall(VecRestoreArrayReadAndMemType(user->fvector, &a)); + + if (memtype == PETSC_MEMTYPE_DEVICE) { + PetscLogStage warmup; + Mat A, AtA; + Vec x, b; + PetscInt warmup_size = 1000; + PetscDeviceContext dctx; + + PetscCall(PetscLogStageRegister("Device Warmup", &warmup)); + PetscCall(PetscLogStageSetActive(warmup, PETSC_FALSE)); + + PetscCall(PetscLogStagePush(warmup)); + PetscCall(MatCreateDenseFromVecType(PETSC_COMM_SELF, vec_type, warmup_size, warmup_size, warmup_size, warmup_size, PETSC_DEFAULT, NULL, &A)); + PetscCall(MatSetRandom(A, NULL)); + PetscCall(MatCreateVecs(A, &x, &b)); + PetscCall(VecSetRandom(x, NULL)); + + PetscCall(MatMult(A, x, b)); + PetscCall(MatTransposeMatMult(A, A, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &AtA)); + PetscCall(MatShift(AtA, (PetscScalar)warmup_size)); + PetscCall(MatSetOption(AtA, MAT_SPD, PETSC_TRUE)); + PetscCall(MatCholeskyFactor(AtA, NULL, NULL)); + PetscCall(MatDestroy(&AtA)); + PetscCall(VecDestroy(&b)); + PetscCall(VecDestroy(&x)); + PetscCall(MatDestroy(&A)); + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + PetscCall(PetscDeviceContextSynchronize(dctx)); + PetscCall(PetscLogStagePop()); + } + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CreateVectors(AppCtx user, Mat H, Vec *solution, Vec *gradient) +{ + VecType vec_type; + PetscInt n_coo, *coo_i, i_start, i_end; + Vec x; + PetscInt n_recv; + PetscSFNode recv; + PetscLayout layout; + PetscInt c_start = user->problem.c_start, c_end = user->problem.c_end, bs = user->problem.bs; + + PetscFunctionBegin; + PetscCall(MatCreateVecs(H, solution, gradient)); + x = *solution; + PetscCall(VecGetOwnershipRange(x, &i_start, &i_end)); + PetscCall(VecGetType(x, &vec_type)); + // create scatter for communicating values + PetscCall(VecGetLayout(x, &layout)); + n_recv = 0; + if (user->n_local_comp && i_end < user->n) { + PetscMPIInt rank; + PetscInt index; + + n_recv = 1; + PetscCall(PetscLayoutFindOwnerIndex(layout, i_end, &rank, &index)); + recv.rank = rank; + recv.index = index; + } + PetscCall(PetscSFCreate(user->comm, &user->off_process_scatter)); + PetscCall(PetscSFSetGraph(user->off_process_scatter, user->n_local, n_recv, NULL, PETSC_USE_POINTER, &recv, PETSC_COPY_VALUES)); + PetscCall(VecCreate(user->comm, &user->off_process_values)); + PetscCall(VecSetSizes(user->off_process_values, 1, PETSC_DETERMINE)); + PetscCall(VecSetType(user->off_process_values, vec_type)); + PetscCall(VecZeroEntries(user->off_process_values)); + + // create COO data for writing the gradient + n_coo = user->n_local_comp * 2; + PetscCall(PetscMalloc1(n_coo, &coo_i)); + for (PetscInt c = c_start, k = 0; c < c_end; c++, k += 2) { + PetscInt i = (c / (bs - 1)) * bs + (c % (bs - 1)); + + coo_i[k + 0] = i; + coo_i[k + 1] = i + 1; + } + PetscCall(PetscSFCreate(user->comm, &user->gscatter)); + PetscCall(PetscSFSetGraphLayout(user->gscatter, layout, n_coo, NULL, PETSC_USE_POINTER, coo_i)); + PetscCall(PetscSFSetUp(user->gscatter)); + PetscCall(PetscFree(coo_i)); + PetscCall(VecCreate(user->comm, &user->gvalues)); + PetscCall(VecSetSizes(user->gvalues, n_coo, PETSC_DETERMINE)); + PetscCall(VecSetType(user->gvalues, vec_type)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +#if PetscDefined(USING_CUPMCC) + + #if PetscDefined(USING_NVCC) +typedef cudaStream_t cupmStream_t; + #define PetscCUPMLaunch(...) \ + do { \ + __VA_ARGS__; \ + PetscCallCUDA(cudaGetLastError()); \ + } while (0) + #elif PetscDefined(USING_HCC) + #define PetscCUPMLaunch(...) \ + do { \ + __VA_ARGS__; \ + PetscCallHIP(hipGetLastError()); \ + } while (0) +typedef hipStream_t cupmStream_t; + #endif + +// x: on-process optimization variables +// o: buffer that contains the next optimization variable after the variables on this process +template +PETSC_DEVICE_INLINE_DECL static void rosenbrock_for_loop(Rosenbrock r, const PetscScalar x[], const PetscScalar o[], T &&func) noexcept +{ + PetscInt idx = blockIdx.x * blockDim.x + threadIdx.x; // 1D grid + PetscInt num_threads = gridDim.x * blockDim.x; + + for (PetscInt c = r.c_start + idx, k = idx; c < r.c_end; c += num_threads, k += num_threads) { + PetscInt i = (c / (r.bs - 1)) * r.bs + (c % (r.bs - 1)); + PetscScalar x_a = x[i - r.i_start]; + PetscScalar x_b = ((i + 1) < r.i_end) ? x[i + 1 - r.i_start] : o[0]; + + func(k, x_a, x_b); + } + return; +} + +PETSC_KERNEL_DECL void RosenbrockObjective_Kernel(Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar f_vec[]) +{ + rosenbrock_for_loop(r, x, o, [&](PetscInt k, PetscScalar x_a, PetscScalar x_b) { f_vec[k] = RosenbrockObjective(r.alpha, x_a, x_b); }); +} + +PETSC_KERNEL_DECL void RosenbrockGradient_Kernel(Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar g[]) +{ + rosenbrock_for_loop(r, x, o, [&](PetscInt k, PetscScalar x_a, PetscScalar x_b) { RosenbrockGradient(r.alpha, x_a, x_b, &g[2 * k]); }); +} + +PETSC_KERNEL_DECL void RosenbrockObjectiveGradient_Kernel(Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar f_vec[], PetscScalar g[]) +{ + rosenbrock_for_loop(r, x, o, [&](PetscInt k, PetscScalar x_a, PetscScalar x_b) { f_vec[k] = RosenbrockObjectiveGradient(r.alpha, x_a, x_b, &g[2 * k]); }); +} + +PETSC_KERNEL_DECL void RosenbrockHessian_Kernel(Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar h[]) +{ + rosenbrock_for_loop(r, x, o, [&](PetscInt k, PetscScalar x_a, PetscScalar x_b) { RosenbrockHessian(r.alpha, x_a, x_b, &h[4 * k]); }); +} + +static PetscErrorCode RosenbrockObjective_Device(cupmStream_t stream, Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar f_vec[]) +{ + PetscInt n_comp = r.c_end - r.c_start; + + PetscFunctionBegin; + if (n_comp) PetscCUPMLaunch(RosenbrockObjective_Kernel<<<(n_comp + 255) / 256, 256, 0, stream>>>(r, x, o, f_vec)); + PetscCall(PetscLogGpuFlops(RosenbrockObjectiveFlops * n_comp)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode RosenbrockGradient_Device(cupmStream_t stream, Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar g[]) +{ + PetscInt n_comp = r.c_end - r.c_start; + + PetscFunctionBegin; + if (n_comp) PetscCUPMLaunch(RosenbrockGradient_Kernel<<<(n_comp + 255) / 256, 256, 0, stream>>>(r, x, o, g)); + PetscCall(PetscLogGpuFlops(RosenbrockGradientFlops * n_comp)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode RosenbrockObjectiveGradient_Device(cupmStream_t stream, Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar f_vec[], PetscScalar g[]) +{ + PetscInt n_comp = r.c_end - r.c_start; + + PetscFunctionBegin; + if (n_comp) PetscCUPMLaunch(RosenbrockObjectiveGradient_Kernel<<<(n_comp + 255) / 256, 256, 0, stream>>>(r, x, o, f_vec, g)); + PetscCall(PetscLogGpuFlops(RosenbrockObjectiveGradientFlops * n_comp)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode RosenbrockHessian_Device(cupmStream_t stream, Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar h[]) +{ + PetscInt n_comp = r.c_end - r.c_start; + + PetscFunctionBegin; + if (n_comp) PetscCUPMLaunch(RosenbrockHessian_Kernel<<<(n_comp + 255) / 256, 256, 0, stream>>>(r, x, o, h)); + PetscCall(PetscLogGpuFlops(RosenbrockHessianFlops * n_comp)); + PetscFunctionReturn(PETSC_SUCCESS); +} +#endif + +static PetscErrorCode RosenbrockObjective_Host(Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscReal *f) +{ + PetscReal _f = 0.0; + + PetscFunctionBegin; + for (PetscInt c = r.c_start; c < r.c_end; c++) { + PetscInt i = (c / (r.bs - 1)) * r.bs + (c % (r.bs - 1)); + PetscScalar x_a = x[i - r.i_start]; + PetscScalar x_b = ((i + 1) < r.i_end) ? x[i + 1 - r.i_start] : o[0]; + + _f += RosenbrockObjective(r.alpha, x_a, x_b); + } + *f = _f; + PetscCall(PetscLogFlops((RosenbrockObjectiveFlops + 1.0) * (r.c_end - r.c_start))); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode RosenbrockGradient_Host(Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar g[]) +{ + PetscFunctionBegin; + for (PetscInt c = r.c_start, k = 0; c < r.c_end; c++, k++) { + PetscInt i = (c / (r.bs - 1)) * r.bs + (c % (r.bs - 1)); + PetscScalar x_a = x[i - r.i_start]; + PetscScalar x_b = ((i + 1) < r.i_end) ? x[i + 1 - r.i_start] : o[0]; + + RosenbrockGradient(r.alpha, x_a, x_b, &g[2 * k]); + } + PetscCall(PetscLogFlops(RosenbrockGradientFlops * (r.c_end - r.c_start))); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode RosenbrockObjectiveGradient_Host(Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscReal *f, PetscScalar g[]) +{ + PetscReal _f = 0.0; + + PetscFunctionBegin; + for (PetscInt c = r.c_start, k = 0; c < r.c_end; c++, k++) { + PetscInt i = (c / (r.bs - 1)) * r.bs + (c % (r.bs - 1)); + PetscScalar x_a = x[i - r.i_start]; + PetscScalar x_b = ((i + 1) < r.i_end) ? x[i + 1 - r.i_start] : o[0]; + + _f += RosenbrockObjectiveGradient(r.alpha, x_a, x_b, &g[2 * k]); + } + *f = _f; + PetscCall(PetscLogFlops(RosenbrockObjectiveGradientFlops * (r.c_end - r.c_start))); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode RosenbrockHessian_Host(Rosenbrock r, const PetscScalar x[], const PetscScalar o[], PetscScalar h[]) +{ + PetscFunctionBegin; + for (PetscInt c = r.c_start, k = 0; c < r.c_end; c++, k++) { + PetscInt i = (c / (r.bs - 1)) * r.bs + (c % (r.bs - 1)); + PetscScalar x_a = x[i - r.i_start]; + PetscScalar x_b = ((i + 1) < r.i_end) ? x[i + 1 - r.i_start] : o[0]; + + RosenbrockHessian(r.alpha, x_a, x_b, &h[4 * k]); + } + PetscCall(PetscLogFlops(RosenbrockHessianFlops * (r.c_end - r.c_start))); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* -------------------------------------------------------------------- */ + +static PetscErrorCode FormObjective(Tao tao, Vec X, PetscReal *f, void *ptr) +{ + AppCtx user = (AppCtx)ptr; + PetscReal f_local = 0.0; + const PetscScalar *x; + const PetscScalar *o = NULL; + PetscMemType memtype_x; + + PetscFunctionBeginUser; + PetscCall(PetscLogEventBegin(user->event_f, tao, NULL, NULL, NULL)); + PetscCall(VecScatterBegin(user->off_process_scatter, X, user->off_process_values, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecScatterEnd(user->off_process_scatter, X, user->off_process_values, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecGetArrayReadAndMemType(user->off_process_values, &o, NULL)); + PetscCall(VecGetArrayReadAndMemType(X, &x, &memtype_x)); + if (memtype_x == PETSC_MEMTYPE_HOST) { + PetscCall(RosenbrockObjective_Host(user->problem, x, o, &f_local)); + PetscCallMPI(MPI_Allreduce(&f_local, f, 1, MPI_DOUBLE, MPI_SUM, user->comm)); +#if PetscDefined(USING_CUPMCC) + } else if (memtype_x == PETSC_MEMTYPE_DEVICE) { + PetscScalar *_fvec; + PetscScalar f_scalar; + cupmStream_t *stream; + PetscDeviceContext dctx; + + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + PetscCall(PetscDeviceContextGetStreamHandle(dctx, (void **)&stream)); + PetscCall(VecGetArrayWriteAndMemType(user->fvector, &_fvec, NULL)); + PetscCall(RosenbrockObjective_Device(*stream, user->problem, x, o, _fvec)); + PetscCall(VecRestoreArrayWriteAndMemType(user->fvector, &_fvec)); + PetscCall(VecSum(user->fvector, &f_scalar)); + *f = PetscRealPart(f_scalar); +#endif + } else SETERRQ(user->comm, PETSC_ERR_SUP, "Unsupported memtype %d", (int)memtype_x); + PetscCall(VecRestoreArrayReadAndMemType(X, &x)); + PetscCall(VecRestoreArrayReadAndMemType(user->off_process_values, &o)); + PetscCall(PetscLogEventEnd(user->event_f, tao, NULL, NULL, NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode FormGradient(Tao tao, Vec X, Vec G, void *ptr) +{ + AppCtx user = (AppCtx)ptr; + PetscScalar *g; + const PetscScalar *x; + const PetscScalar *o = NULL; + PetscMemType memtype_x, memtype_g; + + PetscFunctionBeginUser; + PetscCall(PetscLogEventBegin(user->event_g, tao, NULL, NULL, NULL)); + PetscCall(VecScatterBegin(user->off_process_scatter, X, user->off_process_values, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecScatterEnd(user->off_process_scatter, X, user->off_process_values, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecGetArrayReadAndMemType(user->off_process_values, &o, NULL)); + PetscCall(VecGetArrayReadAndMemType(X, &x, &memtype_x)); + PetscCall(VecGetArrayWriteAndMemType(user->gvalues, &g, &memtype_g)); + PetscAssert(memtype_x == memtype_g, user->comm, PETSC_ERR_ARG_INCOMP, "solution vector and gradient must have save memtype"); + if (memtype_x == PETSC_MEMTYPE_HOST) { + PetscCall(RosenbrockGradient_Host(user->problem, x, o, g)); +#if PetscDefined(USING_CUPMCC) + } else if (memtype_x == PETSC_MEMTYPE_DEVICE) { + cupmStream_t *stream; + PetscDeviceContext dctx; + + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + PetscCall(PetscDeviceContextGetStreamHandle(dctx, (void **)&stream)); + PetscCall(RosenbrockGradient_Device(*stream, user->problem, x, o, g)); +#endif + } else SETERRQ(user->comm, PETSC_ERR_SUP, "Unsupported memtype %d", (int)memtype_x); + PetscCall(VecRestoreArrayWriteAndMemType(user->gvalues, &g)); + PetscCall(VecRestoreArrayReadAndMemType(X, &x)); + PetscCall(VecRestoreArrayReadAndMemType(user->off_process_values, &o)); + PetscCall(VecZeroEntries(G)); + PetscCall(VecScatterBegin(user->gscatter, user->gvalues, G, ADD_VALUES, SCATTER_REVERSE)); + PetscCall(VecScatterEnd(user->gscatter, user->gvalues, G, ADD_VALUES, SCATTER_REVERSE)); + PetscCall(PetscLogEventEnd(user->event_g, tao, NULL, NULL, NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* + FormObjectiveGradient - Evaluates the function, f(X), and gradient, G(X). + + Input Parameters: +. tao - the Tao context +. X - input vector +. ptr - optional user-defined context, as set by TaoSetObjectiveGradient() + + Output Parameters: +. G - vector containing the newly evaluated gradient +. f - function value + + Note: + Some optimization methods ask for the function and the gradient evaluation + at the same time. Evaluating both at once may be more efficient that + evaluating each separately. +*/ +static PetscErrorCode FormObjectiveGradient(Tao tao, Vec X, PetscReal *f, Vec G, void *ptr) +{ + AppCtx user = (AppCtx)ptr; + PetscReal f_local = 0.0; + PetscScalar *g; + const PetscScalar *x; + const PetscScalar *o = NULL; + PetscMemType memtype_x, memtype_g; + + PetscFunctionBeginUser; + PetscCall(PetscLogEventBegin(user->event_fg, tao, NULL, NULL, NULL)); + PetscCall(VecScatterBegin(user->off_process_scatter, X, user->off_process_values, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecScatterEnd(user->off_process_scatter, X, user->off_process_values, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecGetArrayReadAndMemType(user->off_process_values, &o, NULL)); + PetscCall(VecGetArrayReadAndMemType(X, &x, &memtype_x)); + PetscCall(VecGetArrayWriteAndMemType(user->gvalues, &g, &memtype_g)); + PetscAssert(memtype_x == memtype_g, user->comm, PETSC_ERR_ARG_INCOMP, "solution vector and gradient must have save memtype"); + if (memtype_x == PETSC_MEMTYPE_HOST) { + PetscCall(RosenbrockObjectiveGradient_Host(user->problem, x, o, &f_local, g)); + PetscCallMPI(MPI_Allreduce((void *)&f_local, (void *)f, 1, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD)); +#if PetscDefined(USING_CUPMCC) + } else if (memtype_x == PETSC_MEMTYPE_DEVICE) { + PetscScalar *_fvec; + PetscScalar f_scalar; + cupmStream_t *stream; + PetscDeviceContext dctx; + + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + PetscCall(PetscDeviceContextGetStreamHandle(dctx, (void **)&stream)); + PetscCall(VecGetArrayWriteAndMemType(user->fvector, &_fvec, NULL)); + PetscCall(RosenbrockObjectiveGradient_Device(*stream, user->problem, x, o, _fvec, g)); + PetscCall(VecRestoreArrayWriteAndMemType(user->fvector, &_fvec)); + PetscCall(VecSum(user->fvector, &f_scalar)); + *f = PetscRealPart(f_scalar); +#endif + } else SETERRQ(user->comm, PETSC_ERR_SUP, "Unsupported memtype %d", (int)memtype_x); + + PetscCall(VecRestoreArrayWriteAndMemType(user->gvalues, &g)); + PetscCall(VecRestoreArrayReadAndMemType(X, &x)); + PetscCall(VecRestoreArrayReadAndMemType(user->off_process_values, &o)); + PetscCall(VecZeroEntries(G)); + PetscCall(VecScatterBegin(user->gscatter, user->gvalues, G, ADD_VALUES, SCATTER_REVERSE)); + PetscCall(VecScatterEnd(user->gscatter, user->gvalues, G, ADD_VALUES, SCATTER_REVERSE)); + PetscCall(PetscLogEventEnd(user->event_fg, tao, NULL, NULL, NULL)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* ------------------------------------------------------------------- */ +/* + FormHessian - Evaluates Hessian matrix. + + Input Parameters: +. tao - the Tao context +. x - input vector +. ptr - optional user-defined context, as set by TaoSetHessian() + + Output Parameters: +. H - Hessian matrix + + Note: Providing the Hessian may not be necessary. Only some solvers + require this matrix. +*/ +static PetscErrorCode FormHessian(Tao tao, Vec X, Mat H, Mat Hpre, void *ptr) +{ + AppCtx user = (AppCtx)ptr; + PetscScalar *h; + const PetscScalar *x; + const PetscScalar *o = NULL; + PetscMemType memtype_x, memtype_h; + + PetscFunctionBeginUser; + PetscCall(VecScatterBegin(user->off_process_scatter, X, user->off_process_values, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecScatterEnd(user->off_process_scatter, X, user->off_process_values, INSERT_VALUES, SCATTER_FORWARD)); + PetscCall(VecGetArrayReadAndMemType(user->off_process_values, &o, NULL)); + PetscCall(VecGetArrayReadAndMemType(X, &x, &memtype_x)); + PetscCall(VecGetArrayWriteAndMemType(user->Hvalues, &h, &memtype_h)); + PetscAssert(memtype_x == memtype_h, user->comm, PETSC_ERR_ARG_INCOMP, "solution vector and hessian must have save memtype"); + if (memtype_x == PETSC_MEMTYPE_HOST) { + PetscCall(RosenbrockHessian_Host(user->problem, x, o, h)); +#if PetscDefined(USING_CUPMCC) + } else if (memtype_x == PETSC_MEMTYPE_DEVICE) { + cupmStream_t *stream; + PetscDeviceContext dctx; + + PetscCall(PetscDeviceContextGetCurrentContext(&dctx)); + PetscCall(PetscDeviceContextGetStreamHandle(dctx, (void **)&stream)); + PetscCall(RosenbrockHessian_Device(*stream, user->problem, x, o, h)); +#endif + } else SETERRQ(user->comm, PETSC_ERR_SUP, "Unsupported memtype %d", (int)memtype_x); + + PetscCall(MatSetValuesCOO(H, h, INSERT_VALUES)); + PetscCall(VecRestoreArrayWriteAndMemType(user->Hvalues, &h)); + + PetscCall(VecRestoreArrayReadAndMemType(X, &x)); + PetscCall(VecRestoreArrayReadAndMemType(user->off_process_values, &o)); + + if (Hpre != H) PetscCall(MatCopy(H, Hpre, SAME_NONZERO_PATTERN)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode TestLMVM(Tao tao) +{ + KSP ksp; + PC pc; + PetscBool is_lmvm; + + PetscFunctionBegin; + PetscCall(TaoGetKSP(tao, &ksp)); + if (!ksp) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(KSPGetPC(ksp, &pc)); + PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCLMVM, &is_lmvm)); + if (is_lmvm) { + Mat M; + Vec in, out, out2; + PetscReal mult_solve_dist; + Vec x; + + PetscCall(PCLMVMGetMatLMVM(pc, &M)); + PetscCall(TaoGetSolution(tao, &x)); + PetscCall(VecDuplicate(x, &in)); + PetscCall(VecDuplicate(x, &out)); + PetscCall(VecDuplicate(x, &out2)); + PetscCall(VecSetRandom(in, NULL)); + PetscCall(MatMult(M, in, out)); + PetscCall(MatSolve(M, out, out2)); + + PetscCall(VecAXPY(out2, -1.0, in)); + PetscCall(VecNorm(out2, NORM_2, &mult_solve_dist)); + if (mult_solve_dist < 1.e-11) { + PetscCall(PetscPrintf(PetscObjectComm((PetscObject)tao), "Inverse error of LMVM MatMult and MatSolve: < 1.e-11\n")); + } else if (mult_solve_dist < 1.e-6) { + PetscCall(PetscPrintf(PetscObjectComm((PetscObject)tao), "Inverse error of LMVM MatMult and MatSolve: < 1.e-6\n")); + } else { + PetscCall(PetscPrintf(PetscObjectComm((PetscObject)tao), "Inverse error of LMVM MatMult and MatSolve is not small: %e\n", (double)mult_solve_dist)); + } + PetscCall(VecDestroy(&in)); + PetscCall(VecDestroy(&out)); + PetscCall(VecDestroy(&out2)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode RosenbrockMain(void) +{ + Vec x; /* solution vector */ + Vec g; /* gradient vector */ + Mat H; /* Hessian matrix */ + Tao tao; /* Tao solver context */ + AppCtx user; /* user-defined application context */ + PetscLogStage solve; + + /* Initialize TAO and PETSc */ + PetscFunctionBegin; + PetscCall(PetscLogStageRegister("Rosenbrock solve", &solve)); + + PetscCall(AppCtxCreate(PETSC_COMM_WORLD, &user)); + PetscCall(CreateHessian(user, &H)); + PetscCall(CreateVectors(user, H, &x, &g)); + + /* The TAO code begins here */ + + PetscCall(TaoCreate(user->comm, &tao)); + PetscCall(VecZeroEntries(x)); + PetscCall(TaoSetSolution(tao, x)); + + /* Set routines for function, gradient, hessian evaluation */ + PetscCall(TaoSetObjective(tao, FormObjective, user)); + PetscCall(TaoSetObjectiveAndGradient(tao, g, FormObjectiveGradient, user)); + PetscCall(TaoSetGradient(tao, g, FormGradient, user)); + PetscCall(TaoSetHessian(tao, H, H, FormHessian, user)); + + PetscCall(TaoSetFromOptions(tao)); + + /* SOLVE THE APPLICATION */ + PetscCall(PetscLogStagePush(solve)); + PetscCall(TaoSolve(tao)); + PetscCall(PetscLogStagePop()); + + if (user->test_lmvm) PetscCall(TestLMVM(tao)); + + PetscCall(TaoDestroy(&tao)); + PetscCall(VecDestroy(&g)); + PetscCall(VecDestroy(&x)); + PetscCall(MatDestroy(&H)); + PetscCall(AppCtxDestroy(&user)); + PetscFunctionReturn(PETSC_SUCCESS); +} diff --git a/src/tao/unconstrained/tutorials/rosenbrock4cu.cu b/src/tao/unconstrained/tutorials/rosenbrock4cu.cu new file mode 100644 index 00000000000..cced68660e1 --- /dev/null +++ b/src/tao/unconstrained/tutorials/rosenbrock4cu.cu @@ -0,0 +1,195 @@ +const char help[] = "CUDA backend of rosenbrock4cu.cu\n"; + +/* ------------------------------------------------------------------------ + + Copy of rosenbrock1.c. + Once petsc test harness supports conditional linking, we can remove this duplicate. + See https://gitlab.com/petsc/petsc/-/issues/1173 + ------------------------------------------------------------------------- */ + +#include "rosenbrock4.h" + +int main(int argc, char **argv) +{ + /* Initialize TAO and PETSc */ + PetscFunctionBeginUser; + PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); + PetscCall(RosenbrockMain()); + PetscCall(PetscFinalize()); + return 0; +} + +/*TEST + + build: + requires: !complex cuda !single !__float128 !defined(PETSC_USE_65BIT_INDICES) + + test: + suffix: 1 + nsize: {{1 2 3}} + args: -mat_type aijcusparse -tao_monitor_short -tao_type nls -tao_gatol 1.e-4 -tao_nls_pc_type pbjacobi + requires: !single + output_file: output/rosenbrock1_1.out + + test: + suffix: 2 + args: -mat_type aijcusparse -tao_monitor_short -tao_type lmvm -tao_gatol 1.e-3 + output_file: output/rosenbrock1_2.out + + test: + suffix: 3 + args: -mat_type aijcusparse -tao_monitor_short -tao_type ntr -tao_gatol 1.e-4 + requires: !single + output_file: output/rosenbrock1_3.out + + test: + suffix: 5 + args: -mat_type aijcusparse -tao_monitor_short -tao_type bntr -tao_gatol 1.e-4 + output_file: output/rosenbrock1_5.out + + test: + suffix: 6 + args: -mat_type aijcusparse -tao_monitor_short -tao_type bntl -tao_gatol 1.e-4 + output_file: output/rosenbrock1_6.out + + test: + suffix: 7 + args: -mat_type aijcusparse -tao_monitor_short -tao_type bnls -tao_gatol 1.e-4 + output_file: output/rosenbrock1_7.out + + test: + suffix: 8 + args: -mat_type aijcusparse -tao_monitor_short -tao_type bntr -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 + output_file: output/rosenbrock1_8.out + + test: + suffix: 9 + args: -mat_type aijcusparse -tao_monitor_short -tao_type bntl -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 + output_file: output/rosenbrock1_9.out + + test: + suffix: 10 + args: -mat_type aijcusparse -tao_monitor_short -tao_type bnls -tao_bnk_max_cg_its 3 -tao_gatol 1.e-4 + output_file: output/rosenbrock1_10.out + + test: + suffix: 11 + args: -mat_type aijcusparse -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmbroyden + output_file: output/rosenbrock1_11.out + + test: + suffix: 12 + args: -mat_type aijcusparse -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmbadbroyden + output_file: output/rosenbrock1_12.out + + test: + suffix: 13 + args: -mat_type aijcusparse -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmsymbroyden + output_file: output/rosenbrock1_13.out + + test: + suffix: 14 + args: -mat_type aijcusparse -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmbfgs + output_file: output/rosenbrock1_14.out + + test: + suffix: 15 + args: -mat_type aijcusparse -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmdfp + output_file: output/rosenbrock1_15.out + + test: + suffix: 16 + args: -mat_type aijcusparse -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmsr1 + output_file: output/rosenbrock1_16.out + + test: + suffix: 17 + args: -mat_type aijcusparse -tao_monitor_short -tao_gatol 1e-4 -tao_type bqnls + output_file: output/rosenbrock1_17.out + + test: + suffix: 18 + args: -mat_type aijcusparse -tao_monitor_short -tao_gatol 1e-4 -tao_type blmvm + output_file: output/rosenbrock1_18.out + + test: + suffix: 19 + args: -mat_type aijcusparse -tao_monitor_short -tao_gatol 1e-4 -tao_type bqnktr -tao_bqnk_mat_type lmvmsr1 + output_file: output/rosenbrock1_19.out + + test: + suffix: 20 + args: -mat_type aijcusparse -tao_monitor -tao_gatol 1e-4 -tao_type blmvm -tao_ls_monitor + output_file: output/rosenbrock1_20.out + + test: + suffix: 21 + args: -mat_type aijcusparse -test_lmvm -tao_type bqnktr -tao_max_it 10 -tao_bqnk_mat_type lmvmsymbadbroyden + output_file: output/rosenbrock1_21.out + + test: + suffix: 22 + args: -mat_type aijcusparse -tao_max_it 1 -tao_converged_reason + output_file: output/rosenbrock1_22.out + + test: + suffix: 23 + args: -mat_type aijcusparse -tao_max_funcs 0 -tao_converged_reason + output_file: output/rosenbrock1_23.out + + test: + suffix: 24 + args: -mat_type aijcusparse -tao_gatol 10 -tao_converged_reason + output_file: output/rosenbrock1_24.out + + test: + suffix: 25 + args: -mat_type aijcusparse -tao_grtol 10 -tao_converged_reason + output_file: output/rosenbrock1_25.out + + test: + suffix: 26 + args: -mat_type aijcusparse -tao_gttol 10 -tao_converged_reason + output_file: output/rosenbrock1_26.out + + test: + suffix: 27 + args: -mat_type aijcusparse -tao_steptol 10 -tao_converged_reason + output_file: output/rosenbrock1_27.out + + test: + suffix: 28 + args: -mat_type aijcusparse -tao_fmin 10 -tao_converged_reason + output_file: output/rosenbrock1_28.out + + test: + suffix: test_dbfgs + nsize: {{1 2 3}} + output_file: output/rosenbrock1_14.out + args: -mat_type aijcusparse -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdbfgs -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_lbfgs_type {{inplace reorder}} -tao_bqnk_mat_lbfgs_recursive {{0 1}} + + test: + suffix: test_ddfp + nsize: {{1 2 3}} + output_file: output/rosenbrock1_14.out + args: -mat_type aijcusparse -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmddfp -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_ldfp_type {{inplace reorder}} -tao_bqnk_mat_ldfp_recursive {{0 1}} + + test: + suffix: test_dqn_1 + nsize: 1 + output_file: output/rosenbrock1_29.out + args: -mat_type aijcusparse -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdqn -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_lqn_type {{inplace reorder}} + + test: + suffix: test_dqn_2 + nsize: 2 + output_file: output/rosenbrock1_30.out + args: -mat_type aijcusparse -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdqn -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_lqn_type {{inplace reorder}} + + test: + suffix: test_dqn_3 + nsize: 3 + output_file: output/rosenbrock1_31.out + args: -mat_type aijcusparse -n 10 -tao_type bqnktr -test_lmvm -tao_max_it 10 -tao_bqnk_mat_type lmvmdqn -tao_bqnk_mat_lmvm_scale_type none -tao_bqnk_mat_lqn_type {{inplace reorder}} + +TEST*/ diff --git a/src/ts/adapt/impls/dsp/adaptdsp.c b/src/ts/adapt/impls/dsp/adaptdsp.c index 2426a6d01a9..ea8e2538464 100644 --- a/src/ts/adapt/impls/dsp/adaptdsp.c +++ b/src/ts/adapt/impls/dsp/adaptdsp.c @@ -294,7 +294,7 @@ static PetscErrorCode TSAdaptSetFromOptions_DSP(TSAdapt adapt, PetscOptionItems PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSAdaptDSPSetFilter - Sets internal parameters corresponding to the named filter {cite}`soderlind2006adaptive` {cite}`soderlind2003digital` Collective @@ -318,7 +318,7 @@ static PetscErrorCode TSAdaptSetFromOptions_DSP(TSAdapt adapt, PetscOptionItems .seealso: [](ch_ts), `TSADAPTDSP`, `TS`, `TSAdapt`, `TSGetAdapt()`, `TSAdaptDSPSetPID()` @*/ -PetscErrorCode TSAdaptDSPSetFilter(TSAdapt adapt, const char *name) +PetscErrorCode TSAdaptDSPSetFilter(TSAdapt adapt, const char name[]) { PetscFunctionBegin; PetscValidHeaderSpecific(adapt, TSADAPT_CLASSID, 1); diff --git a/src/ts/adapt/impls/dsp/ftn-custom/makefile b/src/ts/adapt/impls/dsp/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/ts/adapt/impls/dsp/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ts/adapt/impls/dsp/ftn-custom/zadaptdspf.c b/src/ts/adapt/impls/dsp/ftn-custom/zadaptdspf.c deleted file mode 100644 index fcc61f76f15..00000000000 --- a/src/ts/adapt/impls/dsp/ftn-custom/zadaptdspf.c +++ /dev/null @@ -1,18 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tsadaptdspsetfilter_ TSADAPTDSPSETFILTER -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tsadaptdspsetfilter_ tsadaptdspsetfilter -#endif - -PETSC_EXTERN void tsadaptdspsetfilter_(TSAdapt *tsadapt, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(name, len, t); - *ierr = TSAdaptDSPSetFilter(*tsadapt, t); - if (*ierr) return; - FREECHAR(name, t); -} diff --git a/src/ts/adapt/interface/ftn-custom/makefile b/src/ts/adapt/interface/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/ts/adapt/interface/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ts/adapt/interface/ftn-custom/ztsadaptf.c b/src/ts/adapt/interface/ftn-custom/ztsadaptf.c deleted file mode 100644 index 30404938960..00000000000 --- a/src/ts/adapt/interface/ftn-custom/ztsadaptf.c +++ /dev/null @@ -1,29 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tsadaptsettype_ TSADAPTSETTYPE - #define tsadaptgettype_ TSADAPTGETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tsadaptsettype_ tsadaptsettype - #define tsadaptgettype_ tsadaptgettype -#endif - -PETSC_EXTERN void tsadaptsettype_(TSAdapt *tsadapt, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = TSAdaptSetType(*tsadapt, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void tsadaptgettype_(TSAdapt *adapt, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *t; - - *ierr = TSAdaptGetType(*adapt, &t); - *ierr = PetscStrncpy(type, t, len); - FIXRETURNCHAR(PETSC_TRUE, type, len); -} diff --git a/src/ts/adapt/interface/tsadapt.c b/src/ts/adapt/interface/tsadapt.c index 39a9d769211..988b87b6c16 100644 --- a/src/ts/adapt/interface/tsadapt.c +++ b/src/ts/adapt/interface/tsadapt.c @@ -16,7 +16,7 @@ PETSC_EXTERN PetscErrorCode TSAdaptCreate_History(TSAdapt); /*@C TSAdaptRegister - adds a TSAdapt implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of user-defined adaptivity scheme @@ -106,7 +106,7 @@ PetscErrorCode TSAdaptInitializePackage(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSAdaptSetType - sets the approach used for the error adapter Logicially Collective @@ -141,7 +141,7 @@ PetscErrorCode TSAdaptSetType(TSAdapt adapt, TSAdaptType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSAdaptGetType - gets the `TS` adapter method type (as a string). Not Collective @@ -173,7 +173,7 @@ PetscErrorCode TSAdaptSetOptionsPrefix(TSAdapt adapt, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSAdaptLoad - Loads a TSAdapt that has been stored in binary with `TSAdaptView()`. Collective @@ -1093,11 +1093,9 @@ PetscErrorCode TSAdaptCreate(MPI_Comm comm, TSAdapt *inadapt) PetscFunctionBegin; PetscAssertPointer(inadapt, 2); - *inadapt = NULL; PetscCall(TSAdaptInitializePackage()); PetscCall(PetscHeaderCreate(adapt, TSADAPT_CLASSID, "TSAdapt", "Time stepping adaptivity", "TS", comm, TSAdaptDestroy, TSAdaptView)); - adapt->always_accept = PETSC_FALSE; adapt->safety = 0.9; adapt->reject_safety = 0.5; @@ -1114,7 +1112,6 @@ PetscErrorCode TSAdaptCreate(MPI_Comm comm, TSAdapt *inadapt) adapt->matchstepfac[1] = 2.0; /* halve last step if it is greater than what remains divided this factor */ adapt->wnormtype = NORM_2; adapt->timestepjustdecreased_delay = 0; - - *inadapt = adapt; + *inadapt = adapt; PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/ts/characteristic/interface/characteristic.c b/src/ts/characteristic/interface/characteristic.c index 2a4c250cc8b..2208e136882 100644 --- a/src/ts/characteristic/interface/characteristic.c +++ b/src/ts/characteristic/interface/characteristic.c @@ -106,7 +106,7 @@ PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ CharacteristicSetType - Builds Characteristic for a particular solver. Logically Collective @@ -197,7 +197,7 @@ PetscErrorCode CharacteristicSetUp(Characteristic c) /*@C CharacteristicRegister - Adds a solver to the method of characteristics package. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined solver diff --git a/src/ts/event/tsevent.c b/src/ts/event/tsevent.c index 0ab4489f180..99e83ade3ca 100644 --- a/src/ts/event/tsevent.c +++ b/src/ts/event/tsevent.c @@ -608,7 +608,7 @@ static inline PetscReal TSEvent_dt_cap(TS ts, PetscReal t, PetscReal dt, PetscBo { PetscReal res = dt; if (ts->exact_final_time == TS_EXACTFINALTIME_MATCHSTEP) { - PetscReal maxdt = ts->max_time - t; // this may be overriden by tspan + PetscReal maxdt = ts->max_time - t; // this may be overridden by tspan PetscBool cut_made = PETSC_FALSE; PetscReal eps = 10 * PETSC_MACHINE_EPSILON; if (ts->tspan) { diff --git a/src/ts/f90-mod/petscts.h b/src/ts/f90-mod/petscts.h index a539342216d..e7512b0c657 100644 --- a/src/ts/f90-mod/petscts.h +++ b/src/ts/f90-mod/petscts.h @@ -3,19 +3,33 @@ ! #include "petsc/finclude/petscts.h" - type tTS - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tTS end type tTS - type tTSAdapt - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + TS, parameter :: PETSC_NULL_TS = tTS(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_TS +#endif + + type, extends(tPetscObject) :: tTSAdapt end type tTSAdapt - type tTSTrajectory - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + TSAdapt, parameter :: PETSC_NULL_TS_ADAPT = tTSAdapt(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_TS_ADAPT +#endif + + type, extends(tPetscObject) :: tTSTrajectory end type tTSTrajectory + TSTrajectory, parameter :: PETSC_NULL_TS_TRAJECTORY = tTSTrajectory(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_TS_TRAJECTORY +#endif - TS, parameter :: PETSC_NULL_TS = tTS(0) - TSAdapt, parameter :: PETSC_NULL_TSADAPT = tTSAdapt(0) - TSTrajectory, parameter :: PETSC_NULL_TSTrajectory = tTSTrajectory(0) + type, extends(tPetscObject) :: tTSGLLEAdapt + end type tTSGLLEAdapt + TSGLLEAdapt, parameter :: PETSC_NULL_TS_GLLE_ADAPT = tTSGLLEAdapt(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_TS_GLLE_ADAPT +#endif ! ! Convergence flags diff --git a/src/ts/f90-mod/petscts.h90 b/src/ts/f90-mod/petscts.h90 index fdbe7d3cfb5..8b137891791 100644 --- a/src/ts/f90-mod/petscts.h90 +++ b/src/ts/f90-mod/petscts.h90 @@ -1,9 +1 @@ - interface - subroutine TSDestroy(a,z) - import tTS - TS a - PetscErrorCode z - end subroutine - end interface - diff --git a/src/ts/impls/arkimex/arkimex.c b/src/ts/impls/arkimex/arkimex.c index c47000d4082..5dec896826d 100644 --- a/src/ts/impls/arkimex/arkimex.c +++ b/src/ts/impls/arkimex/arkimex.c @@ -52,6 +52,7 @@ typedef struct { Vec Ydot0; /* Holds the slope from the previous step in FSAL case */ Vec Ydot; /* Work vector holding Ydot during residual evaluation */ Vec Z; /* Ydot = shift(Y-Z) */ + IS alg_is; /* Index set for algebraic variables, needed when restarting with DIRK */ PetscScalar *work; /* Scalar work */ PetscReal scoeff; /* shift = scoeff/dt */ PetscReal stage_time; @@ -1120,7 +1121,7 @@ PetscErrorCode TSARKIMEXFinalizePackage(void) /*@C TSARKIMEXRegister - register a `TSARKIMEX` scheme by providing the entries in the Butcher tableau and optionally embedded approximations and interpolation - Logically Collective. + Logically Collective Input Parameters: + name - identifier for method @@ -1152,6 +1153,7 @@ PetscErrorCode TSARKIMEXRegister(TSARKIMEXType name, PetscInt order, PetscInt s, PetscInt i, j; PetscFunctionBegin; + PetscCheck(s > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Expected number of stages s %" PetscInt_FMT " > 0", s); PetscCall(TSARKIMEXInitializePackage()); for (link = ARKTableauList; link; link = link->next) { PetscBool match; @@ -1369,6 +1371,8 @@ static PetscErrorCode TSARKIMEXTestMassIdentity(TS ts, PetscBool *id) PetscFunctionReturn(PETSC_SUCCESS); } +static PetscErrorCode TSARKIMEXComputeAlgebraicIS(TS, PetscReal, Vec, IS *); + static PetscErrorCode TSStep_ARKIMEX(TS ts) { TS_ARKIMEX *ark = (TS_ARKIMEX *)ts->data; @@ -1420,8 +1424,14 @@ static PetscErrorCode TSStep_ARKIMEX(TS ts) if (dirk && tab->explicit_first_stage && ts->steprestart) { ark->scoeff = PETSC_MAX_REAL; PetscCall(VecCopy(ts->vec_sol, Z)); + if (!ark->alg_is) { + PetscCall(TSARKIMEXComputeAlgebraicIS(ts, ts->ptime, Z, &ark->alg_is)); + PetscCall(ISViewFromOptions(ark->alg_is, (PetscObject)ts, "-ts_arkimex_algebraic_is_view")); + } PetscCall(TSGetSNES(ts, &snes)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)snes, (PetscObject)snes, 1)); PetscCall(SNESSolve(snes, NULL, Ydot0)); + PetscCall(PetscObjectIncrementTabLevel((PetscObject)snes, (PetscObject)snes, -1)); } /* For IMEX we compute a step */ @@ -1798,6 +1808,7 @@ static PetscErrorCode TSReset_ARKIMEX(TS ts) PetscCall(VecDestroy(&ark->Ydot)); PetscCall(VecDestroy(&ark->Ydot0)); PetscCall(VecDestroy(&ark->Z)); + PetscCall(ISDestroy(&ark->alg_is)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1843,7 +1854,60 @@ static PetscErrorCode TSARKIMEXRestoreVecs(TS ts, DM dm, Vec *Z, Vec *Ydot) PetscFunctionReturn(PETSC_SUCCESS); } -PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat, PetscBool, PetscReal, IS *); +/* + DAEs need special handling for algebraic variables when restarting DIRK methods with explicit + first stage. In particular, we need: + - to zero the nonlinear function (in case the dual variables are not consistent in the first step) + - to modify the preconditioning matrix by calling MatZeroRows with identity on these variables. +*/ +static PetscErrorCode TSARKIMEXComputeAlgebraicIS(TS ts, PetscReal time, Vec X, IS *alg_is) +{ + TS_ARKIMEX *ark = (TS_ARKIMEX *)ts->data; + DM dm; + Vec F, W, Xdot; + const PetscScalar *w; + PetscInt nz = 0, n, st; + PetscInt *nzr; + + PetscFunctionBegin; + PetscCall(TSGetDM(ts, &dm)); /* may be already from SNES */ + PetscCall(DMGetGlobalVector(dm, &Xdot)); + PetscCall(DMGetGlobalVector(dm, &F)); + PetscCall(DMGetGlobalVector(dm, &W)); + PetscCall(VecSet(Xdot, 0.0)); + PetscCall(TSComputeIFunction(ts, time, X, Xdot, F, ark->imex)); + PetscCall(VecSetRandom(Xdot, NULL)); + PetscCall(TSComputeIFunction(ts, time, X, Xdot, W, ark->imex)); + PetscCall(VecAXPY(W, -1.0, F)); + PetscCall(VecGetOwnershipRange(W, &st, NULL)); + PetscCall(VecGetLocalSize(W, &n)); + PetscCall(VecGetArrayRead(W, &w)); + for (PetscInt i = 0; i < n; i++) + if (w[i] == 0.0) nz++; + PetscCall(PetscMalloc1(nz, &nzr)); + nz = 0; + for (PetscInt i = 0; i < n; i++) + if (w[i] == 0.0) nzr[nz++] = i + st; + PetscCall(VecRestoreArrayRead(W, &w)); + PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), nz, nzr, PETSC_OWN_POINTER, alg_is)); + PetscCall(DMRestoreGlobalVector(dm, &Xdot)); + PetscCall(DMRestoreGlobalVector(dm, &F)); + PetscCall(DMRestoreGlobalVector(dm, &W)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* As for the method specific Z and Ydot, we store the algebraic IS in the ARKIMEX data structure + at the finest level, in the DM for coarser solves. */ +static PetscErrorCode TSARKIMEXGetAlgebraicIS(TS ts, DM dm, IS *alg_is) +{ + TS_ARKIMEX *ax = (TS_ARKIMEX *)ts->data; + + PetscFunctionBegin; + if (dm && dm != ts->dm) { + PetscCall(PetscObjectQuery((PetscObject)dm, "TSARKIMEX_ALG_IS", (PetscObject *)alg_is)); + } else *alg_is = ax->alg_is; + PetscFunctionReturn(PETSC_SUCCESS); +} /* This defines the nonlinear equation that is to be solved with SNES */ static PetscErrorCode SNESTSFormFunction_ARKIMEX(SNES snes, Vec X, Vec F, TS ts) @@ -1851,16 +1915,27 @@ static PetscErrorCode SNESTSFormFunction_ARKIMEX(SNES snes, Vec X, Vec F, TS ts) TS_ARKIMEX *ark = (TS_ARKIMEX *)ts->data; DM dm, dmsave; Vec Z, Ydot; + IS alg_is; PetscFunctionBegin; PetscCall(SNESGetDM(snes, &dm)); PetscCall(TSARKIMEXGetVecs(ts, dm, &Z, &Ydot)); + if (ark->scoeff == PETSC_MAX_REAL) PetscCall(TSARKIMEXGetAlgebraicIS(ts, dm, &alg_is)); + dmsave = ts->dm; ts->dm = dm; if (ark->scoeff == PETSC_MAX_REAL) { /* We are solving F(t_n,x_n,xdot) = 0 to start the method */ + if (!alg_is) { + PetscCheck(dmsave != ts->dm, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Missing algebraic IS"); + PetscCall(TSARKIMEXComputeAlgebraicIS(ts, ark->stage_time, Z, &alg_is)); + PetscCall(PetscObjectCompose((PetscObject)dm, "TSARKIMEX_ALG_IS", (PetscObject)alg_is)); + PetscCall(PetscObjectDereference((PetscObject)alg_is)); + PetscCall(ISViewFromOptions(alg_is, (PetscObject)snes, "-ts_arkimex_algebraic_is_view")); + } PetscCall(TSComputeIFunction(ts, ark->stage_time, Z, X, F, ark->imex)); + PetscCall(VecISSet(F, alg_is, 0.0)); } else { PetscReal shift = ark->scoeff / ts->time_step; PetscCall(VecAXPBYPCZ(Ydot, -shift, shift, 0, Z, X)); /* Ydot = shift*(X-Z) */ @@ -1878,47 +1953,29 @@ static PetscErrorCode SNESTSFormJacobian_ARKIMEX(SNES snes, Vec X, Mat A, Mat B, DM dm, dmsave; Vec Ydot, Z; PetscReal shift; + IS alg_is; PetscFunctionBegin; PetscCall(SNESGetDM(snes, &dm)); - PetscCall(TSARKIMEXGetVecs(ts, dm, &Z, &Ydot)); /* ark->Ydot has already been computed in SNESTSFormFunction_ARKIMEX (SNES guarantees this) */ + PetscCall(TSARKIMEXGetVecs(ts, dm, &Z, &Ydot)); + /* alg_is has been computed in SNESTSFormFunction_ARKIMEX */ + if (ark->scoeff == PETSC_MAX_REAL) PetscCall(TSARKIMEXGetAlgebraicIS(ts, dm, &alg_is)); + dmsave = ts->dm; ts->dm = dm; if (ark->scoeff == PETSC_MAX_REAL) { PetscBool hasZeroRows; - IS alg_is; /* We are solving F(t_n,x_n,xdot) = 0 to start the method - Jed's proposal is to compute with a very large shift and then scale back the matrix */ + We compute with a very large shift and then scale back the matrix */ shift = 1.0 / PETSC_MACHINE_EPSILON; PetscCall(TSComputeIJacobian(ts, ark->stage_time, Z, X, shift, A, B, ark->imex)); PetscCall(MatScale(B, PETSC_MACHINE_EPSILON)); - /* DAEs need special handling for preconditioning purposes only. - We need to locate the algebraic variables and modify the preconditioning matrix by - calling MatZeroRows with identity on these variables. - We must store the IS in the DM since this function can be called by multilevel solvers. - */ - PetscCall(PetscObjectQuery((PetscObject)dm, "TSARKIMEX_ALG_IS", (PetscObject *)&alg_is)); - if (!alg_is) { - PetscInt m, n; - IS nonzeroRows; - - PetscCall(MatViewFromOptions(B, (PetscObject)snes, "-ts_arkimex_alg_mat_view_pre")); - PetscCall(MatFindNonzeroRowsOrCols_Basic(B, PETSC_FALSE, 100 * PETSC_MACHINE_EPSILON, &nonzeroRows)); - if (nonzeroRows) PetscCall(ISViewFromOptions(nonzeroRows, (PetscObject)snes, "-ts_arkimex_alg_is_view_pre")); - PetscCall(MatGetOwnershipRange(B, &m, &n)); - if (nonzeroRows) PetscCall(ISComplement(nonzeroRows, m, n, &alg_is)); - else PetscCall(ISCreateStride(PetscObjectComm((PetscObject)snes), 0, m, 1, &alg_is)); - PetscCall(ISDestroy(&nonzeroRows)); - PetscCall(PetscObjectCompose((PetscObject)dm, "TSARKIMEX_ALG_IS", (PetscObject)alg_is)); - PetscCall(ISDestroy(&alg_is)); - } - PetscCall(PetscObjectQuery((PetscObject)dm, "TSARKIMEX_ALG_IS", (PetscObject *)&alg_is)); - PetscCall(ISViewFromOptions(alg_is, (PetscObject)snes, "-ts_arkimex_alg_is_view")); PetscCall(MatHasOperation(B, MATOP_ZERO_ROWS, &hasZeroRows)); if (hasZeroRows) { + PetscCheck(alg_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Missing algebraic IS"); /* the default of AIJ is to not keep the pattern! We should probably change it someday */ PetscCall(MatSetOption(B, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE)); PetscCall(MatZeroRowsIS(B, alg_is, 1.0, NULL, NULL)); @@ -2140,7 +2197,7 @@ static PetscErrorCode TSLoad_ARKIMEX(TS ts, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSARKIMEXSetType - Set the type of `TSARKIMEX` scheme Logically Collective @@ -2166,7 +2223,7 @@ PetscErrorCode TSARKIMEXSetType(TS ts, TSARKIMEXType arktype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSARKIMEXGetType - Get the type of `TSARKIMEX` scheme Logically Collective @@ -2384,7 +2441,7 @@ static PetscErrorCode TSDIRKSetType_DIRK(TS ts, TSDIRKType dirktype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSDIRKSetType - Set the type of `TSDIRK` scheme Logically Collective @@ -2409,7 +2466,7 @@ PetscErrorCode TSDIRKSetType(TS ts, TSDIRKType dirktype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSDIRKGetType - Get the type of `TSDIRK` scheme Logically Collective diff --git a/src/ts/impls/arkimex/ftn-custom/makefile b/src/ts/impls/arkimex/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/ts/impls/arkimex/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ts/impls/arkimex/ftn-custom/zarkimexf.c b/src/ts/impls/arkimex/ftn-custom/zarkimexf.c deleted file mode 100644 index 186bb584c17..00000000000 --- a/src/ts/impls/arkimex/ftn-custom/zarkimexf.c +++ /dev/null @@ -1,29 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tsarkimexsettype_ TSARKIMEXSETTYPE - #define tsarkimexgettype_ TSARKIMEXGETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tsarkimexsettype_ tsarkimexsettype - #define tsarkimexgettype_ tsarkimexgettype -#endif - -PETSC_EXTERN void tsarkimexsettype_(TS *ts, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = TSARKIMEXSetType(*ts, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void tsarkimexgettype_(TS *ts, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = TSARKIMEXGetType(*ts, &tname); - *ierr = PetscStrncpy(name, tname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/ts/impls/eimex/eimex.c b/src/ts/impls/eimex/eimex.c index a21341138f5..651c5b450e5 100644 --- a/src/ts/impls/eimex/eimex.c +++ b/src/ts/impls/eimex/eimex.c @@ -381,7 +381,7 @@ static PetscErrorCode TSView_EIMEX(TS ts, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSEIMEXSetMaxRows - Set the maximum number of rows for `TSEIMEX` schemes Logically Collective @@ -402,7 +402,7 @@ PetscErrorCode TSEIMEXSetMaxRows(TS ts, PetscInt nrows) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSEIMEXSetRowCol - Set the number of rows and the number of columns for the tableau that represents the T solution in the `TSEIMEX` scheme Logically Collective @@ -424,7 +424,7 @@ PetscErrorCode TSEIMEXSetRowCol(TS ts, PetscInt row, PetscInt col) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSEIMEXSetOrdAdapt - Set the order adaptativity for the `TSEIMEX` schemes Logically Collective diff --git a/src/ts/impls/explicit/rk/ftn-custom/makefile b/src/ts/impls/explicit/rk/ftn-custom/makefile deleted file mode 100644 index 7cee45faa73..00000000000 --- a/src/ts/impls/explicit/rk/ftn-custom/makefile +++ /dev/null @@ -1,8 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - -MANSEC = TS - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk - diff --git a/src/ts/impls/explicit/rk/ftn-custom/zrkf.c b/src/ts/impls/explicit/rk/ftn-custom/zrkf.c deleted file mode 100644 index 77adc14b35e..00000000000 --- a/src/ts/impls/explicit/rk/ftn-custom/zrkf.c +++ /dev/null @@ -1,19 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tsrksettype_ TSRKSETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tsrksettype_ tsrksettype -#endif - -PETSC_EXTERN void tsrksettype_(TS *ts, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = TSRKSetType(*ts, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/ts/impls/explicit/rk/rk.c b/src/ts/impls/explicit/rk/rk.c index 18d38163902..b1393539770 100644 --- a/src/ts/impls/explicit/rk/rk.c +++ b/src/ts/impls/explicit/rk/rk.c @@ -409,7 +409,7 @@ PetscErrorCode TSRKFinalizePackage(void) /*@C TSRKRegister - register an `TSRK` scheme by providing the entries in the Butcher tableau and optionally embedded approximations and interpolation - Not Collective, but the same schemes should be registered on all processes on which they will be used + Not Collective, but the same schemes should be registered on all processes on which they will be used, No Fortran Support Input Parameters: + name - identifier for method @@ -1253,7 +1253,7 @@ PetscErrorCode TSRKGetOrder(TS ts, PetscInt *order) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRKSetType - Set the type of the `TSRK` scheme Logically Collective @@ -1278,7 +1278,7 @@ PetscErrorCode TSRKSetType(TS ts, TSRKType rktype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRKGetType - Get the type of `TSRK` scheme Not Collective @@ -1408,7 +1408,7 @@ static PetscErrorCode SNESTSFormJacobian_RK(SNES snes, Vec x, Mat A, Mat B, TS t PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRKSetMultirate - Use the interpolation-based multirate `TSRK` method Logically Collective @@ -1434,7 +1434,7 @@ PetscErrorCode TSRKSetMultirate(TS ts, PetscBool use_multirate) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRKGetMultirate - Gets whether to use the interpolation-based multirate `TSRK` method Not Collective diff --git a/src/ts/impls/explicit/ssp/ftn-custom/makefile b/src/ts/impls/explicit/ssp/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/ts/impls/explicit/ssp/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ts/impls/explicit/ssp/ftn-custom/zsspf.c b/src/ts/impls/explicit/ssp/ftn-custom/zsspf.c deleted file mode 100644 index 033297ef15b..00000000000 --- a/src/ts/impls/explicit/ssp/ftn-custom/zsspf.c +++ /dev/null @@ -1,29 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tssspsettype_ TSSSPSETTYPE - #define tssspgettype_ TSSSPGETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tssspsettype_ tssspsettype - #define tssspgettype_ tssspgettype -#endif - -PETSC_EXTERN void tssspsettype_(TS *ts, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = TSSSPSetType(*ts, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void tssspgettype_(TS *ts, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = TSSSPGetType(*ts, &tname); - *ierr = PetscStrncpy(name, tname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/ts/impls/explicit/ssp/ssp.c b/src/ts/impls/explicit/ssp/ssp.c index ab21ed7b8c4..08f6991d35b 100644 --- a/src/ts/impls/explicit/ssp/ssp.c +++ b/src/ts/impls/explicit/ssp/ssp.c @@ -239,7 +239,7 @@ static PetscErrorCode TSDestroy_SSP(TS ts) } /*------------------------------------------------------------*/ -/*@C +/*@ TSSSPSetType - set the `TSSSP` time integration scheme to use Logically Collective @@ -265,7 +265,7 @@ PetscErrorCode TSSSPSetType(TS ts, TSSSPType ssptype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSSSPGetType - get the `TSSSP` time integration scheme Logically Collective diff --git a/src/ts/impls/glee/glee.c b/src/ts/impls/glee/glee.c index ae7a59bf4c9..1b66f7f43be 100644 --- a/src/ts/impls/glee/glee.c +++ b/src/ts/impls/glee/glee.c @@ -348,7 +348,7 @@ PetscErrorCode TSGLEEFinalizePackage(void) /*@C TSGLEERegister - register a new `TSGLEE` scheme by providing the entries in the Butcher tableau - Not Collective, but the same schemes should be registered on all processes on which they will be used + Not Collective, but the same schemes should be registered on all processes on which they will be used, No Fortran Support Input Parameters: + name - identifier for method @@ -829,7 +829,7 @@ static PetscErrorCode TSLoad_GLEE(TS ts, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSGLEESetType - Set the type of `TSGLEE` scheme Logically Collective @@ -851,7 +851,7 @@ PetscErrorCode TSGLEESetType(TS ts, TSGLEEType gleetype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSGLEEGetType - Get the type of `TSGLEE` scheme Logically Collective diff --git a/src/ts/impls/implicit/glle/glle.c b/src/ts/impls/implicit/glle/glle.c index ca10569c3f2..03af71a4332 100644 --- a/src/ts/impls/implicit/glle/glle.c +++ b/src/ts/impls/implicit/glle/glle.c @@ -605,7 +605,7 @@ static PetscErrorCode TSGLLECreate_IRKS(TS ts) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSGLLESetType - sets the class of general linear method, `TSGLLE` to use for time-stepping Collective @@ -671,7 +671,7 @@ PetscErrorCode TSGLLESetAcceptType(TS ts, TSGLLEAcceptType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSGLLEGetAdapt - gets the `TSGLLEAdapt` object from the `TS` Not Collective @@ -1195,7 +1195,7 @@ static PetscErrorCode TSView_GLLE(TS ts, PetscViewer viewer) /*@C TSGLLERegister - adds a `TSGLLE` implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of user-defined general linear scheme diff --git a/src/ts/impls/implicit/glle/glleadapt.c b/src/ts/impls/implicit/glle/glleadapt.c index 4492fd572dd..fde806dd37c 100644 --- a/src/ts/impls/implicit/glle/glleadapt.c +++ b/src/ts/impls/implicit/glle/glleadapt.c @@ -24,7 +24,7 @@ PETSC_EXTERN PetscErrorCode TSGLLEAdaptCreate_Both(TSGLLEAdapt); /*@C TSGLLEAdaptRegister - adds a `TSGLLEAdapt` implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of user-defined adaptivity scheme diff --git a/src/ts/impls/implicit/irk/irk.c b/src/ts/impls/implicit/irk/irk.c index 6c39276efac..16e3a975c06 100644 --- a/src/ts/impls/implicit/irk/irk.c +++ b/src/ts/impls/implicit/irk/irk.c @@ -159,7 +159,7 @@ static PetscErrorCode TSIRKCreate_Gauss(TS ts) /*@C TSIRKRegister - adds a `TSIRK` implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of user-defined IRK scheme @@ -661,7 +661,7 @@ static PetscErrorCode TSLoad_IRK(TS ts, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSIRKSetType - Set the type of `TSIRK` scheme to use Logically Collective @@ -686,7 +686,7 @@ PetscErrorCode TSIRKSetType(TS ts, TSIRKType irktype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSIRKGetType - Get the type of `TSIRK` IMEX scheme being used Logically Collective @@ -709,7 +709,7 @@ PetscErrorCode TSIRKGetType(TS ts, TSIRKType *irktype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSIRKSetNumStages - Set the number of stages of `TSIRK` scheme to use Logically Collective @@ -733,7 +733,7 @@ PetscErrorCode TSIRKSetNumStages(TS ts, PetscInt nstages) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSIRKGetNumStages - Get the number of stages of `TSIRK` scheme Logically Collective diff --git a/src/ts/impls/implicit/sundials/ftn-custom/makefile b/src/ts/impls/implicit/sundials/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/ts/impls/implicit/sundials/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ts/impls/implicit/sundials/ftn-custom/zsundialsf.c b/src/ts/impls/implicit/sundials/ftn-custom/zsundialsf.c deleted file mode 100644 index 71f4a903112..00000000000 --- a/src/ts/impls/implicit/sundials/ftn-custom/zsundialsf.c +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tssundialsgetiterations_ TSSUNDIALSGETITERATIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tssundialsgetiterations_ tssundialsgetiterations -#endif - -PETSC_EXTERN void tssundialsgetiterations_(TS *ts, PetscInt *nonlin, PetscInt *lin, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(nonlin); - CHKFORTRANNULLINTEGER(lin); - *ierr = TSSundialsGetIterations(*ts, nonlin, lin); -} diff --git a/src/ts/impls/implicit/sundials/sundials.c b/src/ts/impls/implicit/sundials/sundials.c index 40d4f170301..6130baf0490 100644 --- a/src/ts/impls/implicit/sundials/sundials.c +++ b/src/ts/impls/implicit/sundials/sundials.c @@ -479,7 +479,6 @@ static PetscErrorCode TSView_Sundials(TS ts, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/* --------------------------------------------------------------------------*/ static PetscErrorCode TSSundialsSetType_Sundials(TS ts, TSSundialsLmmType type) { TS_Sundials *cvode = (TS_Sundials *)ts->data; @@ -581,9 +580,8 @@ static PetscErrorCode TSSundialsMonitorInternalSteps_Sundials(TS ts, PetscBool s cvode->monitorstep = s; PetscFunctionReturn(PETSC_SUCCESS); } -/* -------------------------------------------------------------------------------------------*/ -/*@C +/*@ TSSundialsGetIterations - Gets the number of nonlinear and linear iterations used so far by `TSSUNDIALS`. Not Collective @@ -861,7 +859,6 @@ PetscErrorCode TSSundialsSetUseDense(TS ts, PetscBool use_dense) PetscFunctionReturn(PETSC_SUCCESS); } -/* -------------------------------------------------------------------------------------------*/ /*MC TSSUNDIALS - ODE solver using a very old version of the LLNL CVODE/SUNDIALS package, version 2.5 (now called SUNDIALS). Requires ./configure --download-sundials diff --git a/src/ts/impls/multirate/mprk.c b/src/ts/impls/multirate/mprk.c index c21ff7cf919..fdd7e0169f4 100644 --- a/src/ts/impls/multirate/mprk.c +++ b/src/ts/impls/multirate/mprk.c @@ -434,7 +434,7 @@ PetscErrorCode TSMPRKFinalizePackage(void) /*@C TSMPRKRegister - register a `TSMPRK` scheme by providing the entries in the Butcher tableau - Not Collective, but the same schemes should be registered on all processes on which they will be used + Not Collective, but the same schemes should be registered on all processes on which they will be used, No Fortran Support Input Parameters: + name - identifier for method @@ -1139,7 +1139,7 @@ static PetscErrorCode TSLoad_MPRK(TS ts, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSMPRKSetType - Set the type of `TSMPRK` scheme Not Collective @@ -1164,7 +1164,7 @@ PetscErrorCode TSMPRKSetType(TS ts, TSMPRKType mprktype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSMPRKGetType - Get the type of `TSMPRK` scheme Not Collective diff --git a/src/ts/impls/pseudo/posindep.c b/src/ts/impls/pseudo/posindep.c index acca0c810ad..7ba11637349 100644 --- a/src/ts/impls/pseudo/posindep.c +++ b/src/ts/impls/pseudo/posindep.c @@ -27,7 +27,7 @@ typedef struct { /* ------------------------------------------------------------------------------*/ -/*@C +/*@ TSPseudoComputeTimeStep - Computes the next timestep for a currently running pseudo-timestepping process. @@ -62,7 +62,7 @@ PetscErrorCode TSPseudoComputeTimeStep(TS ts, PetscReal *dt) /*@C TSPseudoVerifyTimeStepDefault - Default code to verify the quality of the last timestep. - Collective + Collective, No Fortran Support Input Parameters: + ts - the timestep context @@ -345,7 +345,6 @@ static PetscErrorCode TSView_Pseudo(TS ts, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/* ----------------------------------------------------------------------------- */ /*@C TSPseudoSetVerifyTimeStep - Sets a user-defined routine to verify the quality of the last timestep. @@ -642,7 +641,7 @@ PETSC_EXTERN PetscErrorCode TSCreate_Pseudo(TS ts) /*@C TSPseudoTimeStepDefault - Default code to compute pseudo-timestepping. Use with `TSPseudoSetTimeStep()`. - Collective + Collective, No Fortran Support Input Parameters: + ts - the timestep context diff --git a/src/ts/impls/python/ftn-custom/makefile b/src/ts/impls/python/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/ts/impls/python/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ts/impls/python/ftn-custom/zpythontf.c b/src/ts/impls/python/ftn-custom/zpythontf.c deleted file mode 100644 index 8f0923d7e45..00000000000 --- a/src/ts/impls/python/ftn-custom/zpythontf.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tspythonsettype_ TSPYTHONSETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tspythonsettype_ tspythonsettype -#endif - -PETSC_EXTERN void tspythonsettype_(TS *ts, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(name, len, t); - *ierr = TSPythonSetType(*ts, t); - if (*ierr) return; - FREECHAR(name, t); -} diff --git a/src/ts/impls/python/pythonts.c b/src/ts/impls/python/pythonts.c index d0fa1f14904..e41793858c9 100644 --- a/src/ts/impls/python/pythonts.c +++ b/src/ts/impls/python/pythonts.c @@ -1,6 +1,6 @@ #include /*I "petscts.h" I*/ -/*@C +/*@ TSPythonSetType - Initialize a `TS` object implemented in Python. Collective @@ -25,7 +25,7 @@ PetscErrorCode TSPythonSetType(TS ts, const char pyname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSPythonGetType - Get the type of a `TS` object implemented in Python. Not Collective diff --git a/src/ts/impls/rosw/ftn-custom/makefile b/src/ts/impls/rosw/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/ts/impls/rosw/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ts/impls/rosw/ftn-custom/zroswf.c b/src/ts/impls/rosw/ftn-custom/zroswf.c deleted file mode 100644 index df1049d3338..00000000000 --- a/src/ts/impls/rosw/ftn-custom/zroswf.c +++ /dev/null @@ -1,29 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tsroswsettype_ TSROSWSETTYPE - #define tsroswgettype_ TSROSWGETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tsroswsettype_ tsroswsettype - #define tsroswgettype_ tsroswgettype -#endif - -PETSC_EXTERN void tsroswsettype_(TS *ts, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = TSRosWSetType(*ts, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void tsroswgettype_(TS *ts, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = TSRosWGetType(*ts, &tname); - *ierr = PetscStrncpy(name, tname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/ts/impls/rosw/rosw.c b/src/ts/impls/rosw/rosw.c index 6f12f9c56a8..6abf83e4c76 100644 --- a/src/ts/impls/rosw/rosw.c +++ b/src/ts/impls/rosw/rosw.c @@ -127,6 +127,33 @@ M*/ .seealso: [](ch_ts), `TSROSW` M*/ +/*MC + TSROSWR34PRW - Four stage third order L-stable Rosenbrock-W scheme for PDAE of index 1 {cite}`rang2015improved`. + + Only an approximate Jacobian is needed. By default, it is only recomputed once per step. + + This is strongly A-stable with R(infty) = 0. The embedded method of order 2 is strongly A-stable with R(infty) = 0.25. + This method is B_{PR} consistent of order 3. + This method is spelled "ROS34PRw" in the paper, an improvement to an earlier "ROS34PRW" method from the same author with B_{PR} order 2. + + Level: intermediate + +.seealso: [](ch_ts), `TSROSW` +M*/ + +/*MC + TSROSWR3PRL2 - Four stage third order L-stable Rosenbrock-W scheme for PDAE of index 1 {cite}`rang2015improved`. + + Only an approximate Jacobian is needed. By default, it is only recomputed once per step. + + This is strongly A-stable with R(infty) = 0. The embedded method of order 2 is strongly A-stable with R(infty) = 0.25. + This method is B_{PR} consistent of order 3. + + Level: intermediate + +.seealso: [](ch_ts), `TSROSW` +M*/ + /*MC TSROSWRODAS3 - Four stage third order L-stable Rosenbrock scheme {cite}`sandu_1997` @@ -139,6 +166,39 @@ M*/ .seealso: [](ch_ts), `TSROSW`, `TSROSWSANDU3` M*/ +/*MC + TSROSWRODASPR - Six stage fourth order L-stable Rosenbrock scheme {cite}`rang2015improved` + + By default, the Jacobian is only recomputed once per step. + + Both the fourth order and embedded third order methods are stiffly accurate and L-stable. + The method is B_{PR} consistent of order 3, which ensures convergence order for non-stiff, medium stiff, and stiff problems. + + Level: intermediate + +.seealso: [](ch_ts), `TSROSW`, `TSROSWR34PRW`, `TSROSWR3PRL2` +M*/ + +/*MC + TSROSWRODASPR2 - Six stage fourth order L-stable Rosenbrock scheme {cite}`rang2015improved` + + By default, the Jacobian is only recomputed once per step. + + Both the fourth order and embedded third order methods are stiffly accurate and L-stable. + The method is B_{PR} consistent of order 3, which ensures convergence order for non-stiff, medium stiff, and stiff problems. + This method is similar to `TSROSWRODASPR`, but satisfies one extra B_{PR} order condition. + + Developer Note: + In numerical experiments with ts/tutorials/ex22.c, I (Jed) find this to produce surprisingly poor results. + Although the coefficients pass basic smoke tests, I'm not confident it was tabulated correctly in the paper. + It would be informative if someone could reproduce tests from the paper and/or reach out to the author to understand why it fails on this test problem. + If the method is implemented correctly, doing so might shed light on an additional analysis lens (or further conditions) for robustness on such problems. + + Level: intermediate + +.seealso: [](ch_ts), `TSROSW`, `TSROSWRODASPR` +M*/ + /*MC TSROSWSANDU3 - Three stage third order L-stable Rosenbrock scheme {cite}`sandu_1997` @@ -389,6 +449,44 @@ PetscErrorCode TSRosWRegisterAll(void) PetscCall(TSRosWRegister(TSROSWRA34PW2, 3, 4, &A[0][0], &Gamma[0][0], b, b2, 3, &binterpt[0][0])); } + { + /* const PetscReal g = 4.3586652150845900e-01; Directly written in-place below */ + const PetscReal A[4][4] = { + {0, 0, 0, 0}, + {8.7173304301691801e-01, 0, 0, 0}, + {1.4722022879435914e+00, -3.1840250568090289e-01, 0, 0}, + {8.1505192016694938e-01, 5.0000000000000000e-01, -3.1505192016694938e-01, 0} + }; + const PetscReal Gamma[4][4] = { + {4.3586652150845900e-01, 0, 0, 0 }, + {-8.7173304301691801e-01, 4.3586652150845900e-01, 0, 0 }, + {-1.2855347382089872e+00, 5.0507005541550687e-01, 4.3586652150845900e-01, 0 }, + {-4.8201449182864348e-01, 2.1793326075422950e-01, -1.7178529043404503e-01, 4.3586652150845900e-01} + }; + const PetscReal b[4] = {3.3303742833830591e-01, 7.1793326075422947e-01, -4.8683721060099439e-01, 4.3586652150845900e-01}; + const PetscReal b2[4] = {2.5000000000000000e-01, 7.4276119608319180e-01, -3.1472922970066219e-01, 3.2196803361747034e-01}; + + PetscCall(TSRosWRegister(TSROSWR34PRW, 3, 4, &A[0][0], &Gamma[0][0], b, b2, 0, NULL)); + } + { + /* const PetscReal g = 4.3586652150845900e-01; Directly written in-place below */ + const PetscReal A[4][4] = { + {0, 0, 0, 0}, + {1.3075995645253771e+00, 0, 0, 0}, + {0.5, 0.5, 0, 0}, + {0.5, 0.5, 0, 0} + }; + const PetscReal Gamma[4][4] = { + {4.3586652150845900e-01, 0, 0, 0 }, + {-1.3075995645253771e+00, 4.3586652150845900e-01, 0, 0 }, + {-7.0988575860972170e-01, -5.5996735960277766e-01, 4.3586652150845900e-01, 0 }, + {-1.5550856807552085e-01, -9.5388516575112225e-01, 6.7352721231818413e-01, 4.3586652150845900e-01} + }; + const PetscReal b[4] = {3.4449143192447917e-01, -4.5388516575112231e-01, 6.7352721231818413e-01, 4.3586652150845900e-01}; + const PetscReal b2[4] = {5.0000000000000000e-01, -2.5738812086522078e-01, 4.3542008724775044e-01, 3.2196803361747034e-01}; + + PetscCall(TSRosWRegister(TSROSWR3PRL2, 3, 4, &A[0][0], &Gamma[0][0], b, b2, 0, NULL)); + } { /* const PetscReal g = 0.5; Directly written in-place below */ const PetscReal A[4][4] = { @@ -408,6 +506,52 @@ PetscErrorCode TSRosWRegisterAll(void) PetscCall(TSRosWRegister(TSROSWRODAS3, 3, 4, &A[0][0], &Gamma[0][0], b, b2, 0, NULL)); } + { + /* const PetscReal g = 0.25; Directly written in-place below */ + const PetscReal A[6][6] = { + {0, 0, 0, 0, 0, 0}, + {0.75, 0, 0, 0, 0, 0}, + {7.5162877593868457e-02, 2.4837122406131545e-02, 0, 0, 0, 0}, + {1.6532708886396510e+00, 2.1545706385445562e-01, -1.3157488872766792e+00, 0, 0, 0}, + {1.9385003738039885e+01, 1.2007117225835324e+00, -1.9337924059522791e+01, -2.4779140110062559e-01, 0, 0}, + {-7.3844531665375115e+00, -3.0593419030174646e-01, 7.8622074209377981e+00, 5.7817993590145966e-01, 0.25, 0} + }; + const PetscReal Gamma[6][6] = { + {0.25, 0, 0, 0, 0, 0 }, + {-7.5000000000000000e-01, 0.25, 0, 0, 0, 0 }, + {-8.8644359075349941e-02, -2.8688974257983398e-02, 0.25, 0, 0, 0 }, + {-4.8470034585330284e+00, -3.1583244269672095e-01, 4.9536568360123221e+00, 0.25, 0, 0 }, + {-2.6769456904577400e+01, -1.5066459128852787e+00, 2.7200131480460591e+01, 8.2597133700208525e-01, 0.25, 0 }, + {6.5876206496361416e+00, 3.6807059172993878e-01, -6.7423520694658121e+00, -1.0619631475741095e-01, -3.5714285714285715e-01, 0.25} + }; + const PetscReal b[6] = {-7.9683251690137014e-01, 6.2136401428192344e-02, 1.1198553514719862e+00, 4.7198362114404874e-01, -1.0714285714285714e-01, 0.25}; + const PetscReal b2[6] = {-7.3844531665375115e+00, -3.0593419030174646e-01, 7.8622074209377981e+00, 5.7817993590145966e-01, 0.25, 0.0}; + + PetscCall(TSRosWRegister(TSROSWRODASPR, 4, 6, &A[0][0], &Gamma[0][0], b, b2, 0, NULL)); + } + { + /* const PetscReal g = 0.3125; Directly written in-place below */ + const PetscReal A[6][6] = { + {0, 0, 0, 0, 0, 0}, + {9.3750000000000000e-01, 0, 0, 0, 0, 0}, + {-4.7145892646261345e-02, 5.4531286650471122e-01, 0, 0, 0, 0}, + {4.6915543899742240e-01, 4.4490537602383673e-01, -2.2498239334061121e-01, 0, 0, 0}, + {1.0950372887345903e+00, 6.3223023457294381e-01, -8.9232966090485821e-01, 1.6506213759732410e-01, 0, 0}, + {-1.7746585073632790e-01, -5.8241418952602364e-01, 6.8180612588238165e-01, 7.6557391437996980e-01, 3.1250000000000000e-01, 0} + }; + const PetscReal Gamma[6][6] = { + {0.3125, 0, 0, 0, 0, 0 }, + {-9.3750000000000000e-01, 0.3125, 0, 0, 0, 0 }, + {-9.7580572085994507e-02, -5.8666328499964138e-01, 0.3125, 0, 0, 0 }, + {-4.9407065013256957e-01, -5.6819726428975503e-01, 5.0318949274167679e-01, 0.3125, 0, 0 }, + {-1.2725031394709183e+00, -1.2146444240989676e+00, 1.5741357867872399e+00, 6.0051177678264578e-01, 0.3125, 0 }, + {6.9690744901421153e-01, 6.2237005730756434e-01, -1.1553701989197045e+00, 1.8350029013386296e-01, -6.5990759753593431e-01, 0.3125} + }; + const PetscReal b[6] = {5.1944159827788361e-01, 3.9955867781540699e-02, -4.7356407303732290e-01, 9.4907420451383284e-01, -3.4740759753593431e-01, 0.3125}; + const PetscReal b2[6] = {-1.7746585073632790e-01, -5.8241418952602364e-01, 6.8180612588238165e-01, 7.6557391437996980e-01, 0.3125, 0}; + + PetscCall(TSRosWRegister(TSROSWRODASPR2, 4, 6, &A[0][0], &Gamma[0][0], b, b2, 0, NULL)); + } { /*const PetscReal g = 0.43586652150845899941601945119356; Directly written in-place below */ const PetscReal A[3][3] = { @@ -1444,7 +1588,7 @@ static PetscErrorCode TSLoad_RosW(TS ts, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRosWSetType - Set the type of Rosenbrock-W, `TSROSW`, scheme Logically Collective @@ -1466,7 +1610,7 @@ PetscErrorCode TSRosWSetType(TS ts, TSRosWType roswtype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRosWGetType - Get the type of Rosenbrock-W scheme Logically Collective @@ -1489,7 +1633,7 @@ PetscErrorCode TSRosWGetType(TS ts, TSRosWType *rostype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRosWSetRecomputeJacobian - Set whether to recompute the Jacobian at each stage. The default is to update the Jacobian once per step. Logically Collective diff --git a/src/ts/impls/symplectic/basicsymplectic/basicsymplectic.c b/src/ts/impls/symplectic/basicsymplectic/basicsymplectic.c index 7f60525461a..70d060e6535 100644 --- a/src/ts/impls/symplectic/basicsymplectic/basicsymplectic.c +++ b/src/ts/impls/symplectic/basicsymplectic/basicsymplectic.c @@ -208,13 +208,19 @@ static PetscErrorCode TSStep_BasicSymplectic(TS ts) IS is_q = bsymp->is_q, is_p = bsymp->is_p; TS subts_q = bsymp->subts_q, subts_p = bsymp->subts_p; PetscBool stageok = PETSC_TRUE; - PetscReal ptime = ts->ptime, next_time_step = ts->time_step; - PetscInt iter; + PetscReal ptime, next_time_step = ts->time_step; + PetscInt n; PetscFunctionBegin; + PetscCall(TSGetStepNumber(ts, &n)); + PetscCall(TSSetStepNumber(subts_p, n)); + PetscCall(TSSetStepNumber(subts_q, n)); + PetscCall(TSGetTime(ts, &ptime)); + PetscCall(TSSetTime(subts_p, ptime)); + PetscCall(TSSetTime(subts_q, ptime)); PetscCall(VecGetSubVector(update, is_q, &q_update)); PetscCall(VecGetSubVector(update, is_p, &p_update)); - for (iter = 0; iter < scheme->s; iter++) { + for (PetscInt iter = 0; iter < scheme->s; iter++) { PetscCall(TSPreStage(ts, ptime)); PetscCall(VecGetSubVector(solution, is_q, &q)); PetscCall(VecGetSubVector(solution, is_p, &p)); @@ -363,7 +369,7 @@ static PetscErrorCode TSComputeLinearStability_BasicSymplectic(TS ts, PetscReal PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSBasicSymplecticSetType - Set the type of the basic symplectic method Logically Collective @@ -392,7 +398,7 @@ PetscErrorCode TSBasicSymplecticSetType(TS ts, TSBasicSymplecticType bsymptype) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSBasicSymplecticGetType - Get the type of the basic symplectic method Logically Collective diff --git a/src/ts/interface/ftn-custom/ztscreatef.c b/src/ts/interface/ftn-custom/ztscreatef.c deleted file mode 100644 index 8cecceaead3..00000000000 --- a/src/ts/interface/ftn-custom/ztscreatef.c +++ /dev/null @@ -1,23 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tscreate_ TSCREATE - #define tsdestroy_ TSDESTROY -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tscreate_ tscreate - #define tsdestroy_ tsdestroy -#endif - -PETSC_EXTERN void tscreate_(MPI_Comm *comm, TS *outts, PetscErrorCode *ierr) -{ - *ierr = TSCreate(MPI_Comm_f2c(*(MPI_Fint *)&*comm), outts); -} - -PETSC_EXTERN void tsdestroy_(TS *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = TSDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} diff --git a/src/ts/interface/ftn-custom/ztsf.c b/src/ts/interface/ftn-custom/ztsf.c index 17a157eb10b..8be351b998d 100644 --- a/src/ts/interface/ftn-custom/ztsf.c +++ b/src/ts/interface/ftn-custom/ztsf.c @@ -13,10 +13,6 @@ #define tsgetifunction_ TSGETIFUNCTION #define tssetijacobian_ TSSETIJACOBIAN #define tsgetijacobian_ TSGETIJACOBIAN - #define tsview_ TSVIEW - #define tssetoptionsprefix_ TSSETOPTIONSPREFIX - #define tsgetoptionsprefix_ TSGETOPTIONSPREFIX - #define tsappendoptionsprefix_ TSAPPENDOPTIONSPREFIX #define tsmonitorset_ TSMONITORSET #define tscomputerhsfunctionlinear_ TSCOMPUTERHSFUNCTIONLINEAR #define tscomputerhsjacobianconstant_ TSCOMPUTERHSJACOBIANCONSTANT @@ -25,7 +21,6 @@ #define tsmonitordefault_ TSMONITORDEFAULT #define tssetprestep_ TSSETPRESTEP #define tssetpoststep_ TSSETPOSTSTEP - #define tsviewfromoptions_ TSVIEWFROMOPTIONS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define tsmonitorlgsettransform_ tsmonitorlgsettransform #define tssetrhsfunction_ tssetrhsfunction @@ -36,10 +31,6 @@ #define tsgetifunction_ tsgetifunction #define tssetijacobian_ tssetijacobian #define tsgetijacobian_ tsgetijacobian - #define tsview_ tsview - #define tssetoptionsprefix_ tssetoptionsprefix - #define tsgetoptionsprefix_ tsgetoptionsprefix - #define tsappendoptionsprefix_ tsappendoptionsprefix #define tsmonitorset_ tsmonitorset #define tscomputerhsfunctionlinear_ tscomputerhsfunctionlinear #define tscomputerhsjacobianconstant_ tscomputerhsjacobianconstant @@ -48,7 +39,6 @@ #define tsmonitordefault_ tsmonitordefault #define tssetprestep_ tssetprestep #define tssetpoststep_ tssetpoststep - #define tsviewfromoptions_ tsviewfromoptions #endif static struct { @@ -271,46 +261,3 @@ PETSC_EXTERN void tsgetrhsjacobian_(TS *ts, Mat *J, Mat *M, int *func, void **ct { *ierr = TSGetRHSJacobian(*ts, J, M, NULL, ctx); } - -PETSC_EXTERN void tsview_(TS *ts, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = TSView(*ts, v); -} - -PETSC_EXTERN void tssetoptionsprefix_(TS *ts, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(prefix, len, t); - *ierr = TSSetOptionsPrefix(*ts, t); - if (*ierr) return; - FREECHAR(prefix, t); -} -PETSC_EXTERN void tsgetoptionsprefix_(TS *ts, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = TSGetOptionsPrefix(*ts, &tname); - *ierr = PetscStrncpy(prefix, tname, len); - FIXRETURNCHAR(PETSC_TRUE, prefix, len); -} -PETSC_EXTERN void tsappendoptionsprefix_(TS *ts, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(prefix, len, t); - *ierr = TSAppendOptionsPrefix(*ts, t); - if (*ierr) return; - FREECHAR(prefix, t); -} - -PETSC_EXTERN void tsviewfromoptions_(TS *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = TSViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/ts/interface/ftn-custom/ztsregf.c b/src/ts/interface/ftn-custom/ztsregf.c deleted file mode 100644 index 934ad2f46e8..00000000000 --- a/src/ts/interface/ftn-custom/ztsregf.c +++ /dev/null @@ -1,29 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tssettype_ TSSETTYPE - #define tsgettype_ TSGETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tssettype_ tssettype - #define tsgettype_ tsgettype -#endif - -PETSC_EXTERN void tssettype_(TS *ts, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = TSSetType(*ts, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void tsgettype_(TS *ts, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = TSGetType(*ts, &tname); - *ierr = PetscStrncpy(name, tname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/ts/interface/sensitivity/tssen.c b/src/ts/interface/sensitivity/tssen.c index 7caa1c1350d..2b2c63ea0c8 100644 --- a/src/ts/interface/sensitivity/tssen.c +++ b/src/ts/interface/sensitivity/tssen.c @@ -150,7 +150,7 @@ PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, Pe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSComputeIJacobianP - Runs the user-defined IJacobianP function. Collective @@ -284,7 +284,7 @@ PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, Pet PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSGetCostIntegral - Returns the values of the integral term in the cost functions. It is valid to call the routine after a backward run. @@ -298,7 +298,7 @@ PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, Pet Level: intermediate -.seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, ``TSSetCostIntegrand()` +.seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()` @*/ PetscErrorCode TSGetCostIntegral(TS ts, Vec *v) { @@ -312,7 +312,7 @@ PetscErrorCode TSGetCostIntegral(TS ts, Vec *v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSComputeCostIntegrand - Evaluates the integral function in the cost functions. Input Parameters: @@ -346,7 +346,7 @@ PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q) } // PetscClangLinter pragma disable: -fdoc-* -/*@C +/*@ TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` Level: deprecated @@ -364,7 +364,7 @@ PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) } // PetscClangLinter pragma disable: -fdoc-* -/*@C +/*@ TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` Level: deprecated @@ -444,7 +444,7 @@ PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianp PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. Collective @@ -467,7 +467,7 @@ PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianp .seealso: [](ch_ts), `TSSetIHessianProduct()` @*/ -PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) +PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[]) { PetscFunctionBegin; if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); @@ -485,7 +485,7 @@ PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. Collective @@ -508,7 +508,7 @@ PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec .seealso: [](ch_ts), `TSSetIHessianProduct()` @*/ -PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) +PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[]) { PetscFunctionBegin; if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); @@ -526,7 +526,7 @@ PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. Collective @@ -549,7 +549,7 @@ PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec .seealso: [](ch_ts), `TSSetIHessianProduct()` @*/ -PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) +PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[]) { PetscFunctionBegin; if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); @@ -590,7 +590,7 @@ PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec .seealso: [](ch_ts), `TSSetIHessianProduct()` @*/ -PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) +PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[]) { PetscFunctionBegin; if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); @@ -675,7 +675,7 @@ PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. Collective @@ -698,7 +698,7 @@ PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshe .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()` @*/ -PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) +PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[]) { PetscFunctionBegin; if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); @@ -709,7 +709,7 @@ PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, V PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. Collective @@ -732,7 +732,7 @@ PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, V .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()` @*/ -PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) +PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[]) { PetscFunctionBegin; if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); @@ -743,7 +743,7 @@ PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, V PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. Collective @@ -766,7 +766,7 @@ PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, V .seealso: [](ch_ts), `TSSetRHSHessianProduct()` @*/ -PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) +PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[]) { PetscFunctionBegin; if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); @@ -777,7 +777,7 @@ PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, V PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. Collective @@ -800,7 +800,7 @@ PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, V .seealso: [](ch_ts), `TSSetRHSHessianProduct()` @*/ -PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) +PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[]) { PetscFunctionBegin; if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); diff --git a/src/ts/interface/ts.c b/src/ts/interface/ts.c index d5c5f3983ff..ee32ffb8140 100644 --- a/src/ts/interface/ts.c +++ b/src/ts/interface/ts.c @@ -1786,7 +1786,7 @@ PetscErrorCode TS2GetSolution(TS ts, Vec *u, Vec *v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSLoad - Loads a `TS` that has been stored in binary with `TSView()`. Collective @@ -1837,7 +1837,7 @@ PetscErrorCode TSLoad(TS ts, PetscViewer viewer) #include #endif -/*@C +/*@ TSViewFromOptions - View a `TS` based on values in the options database Collective @@ -1859,7 +1859,7 @@ PetscErrorCode TSViewFromOptions(TS ts, PetscObject obj, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSView - Prints the `TS` data structure. Collective @@ -2395,7 +2395,7 @@ PetscErrorCode TSSetProblemType(TS ts, TSProblemType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSGetProblemType - Gets the type of problem to be solved. Not collective @@ -2613,7 +2613,7 @@ PetscErrorCode TSReset(TS ts) static PetscErrorCode TSResizeReset(TS); -/*@C +/*@ TSDestroy - Destroys the timestepper context that was created with `TSCreate()`. @@ -3759,7 +3759,7 @@ static PetscErrorCode TSResizeTransferVecs(TS ts, PetscInt cnt, Vec vecsin[], Ve .seealso: [](ch_ts), `TS`, `TSSetResize()`, `TSResize()`, `TSResizeRetrieveVec()` @*/ -PetscErrorCode TSResizeRegisterVec(TS ts, const char *name, Vec vec) +PetscErrorCode TSResizeRegisterVec(TS ts, const char name[], Vec vec) { PetscFunctionBegin; PetscValidHeaderSpecific(ts, TS_CLASSID, 1); @@ -3787,7 +3787,7 @@ PetscErrorCode TSResizeRegisterVec(TS ts, const char *name, Vec vec) .seealso: [](ch_ts), `TS`, `TSSetResize()`, `TSResize()`, `TSResizeRegisterVec()` @*/ -PetscErrorCode TSResizeRetrieveVec(TS ts, const char *name, Vec *vec) +PetscErrorCode TSResizeRetrieveVec(TS ts, const char name[], Vec *vec) { PetscFunctionBegin; PetscValidHeaderSpecific(ts, TS_CLASSID, 1); @@ -4106,7 +4106,7 @@ PetscErrorCode TSSolve(TS ts, Vec u) When called during time step evaluation (e.g. during residual evaluation or via hooks set using `TSSetPreStep()`, `TSSetPreStage()`, `TSSetPostStage()`, or `TSSetPostStep()`), the time is the time at the start of the step being evaluated. -.seealso: [](ch_ts), `TS`, ``TSGetSolveTime()`, `TSSetTime()`, `TSGetTimeStep()`, `TSGetStepNumber()` +.seealso: [](ch_ts), `TS`, `TSGetSolveTime()`, `TSSetTime()`, `TSGetTimeStep()`, `TSGetStepNumber()` @*/ PetscErrorCode TSGetTime(TS ts, PetscReal *t) { @@ -4130,7 +4130,7 @@ PetscErrorCode TSGetTime(TS ts, PetscReal *t) Level: beginner -.seealso: [](ch_ts), `TS`, ``TSGetTime()`, `TSGetSolveTime()`, `TSGetTimeStep()` +.seealso: [](ch_ts), `TS`, `TSGetTime()`, `TSGetSolveTime()`, `TSGetTimeStep()` @*/ PetscErrorCode TSGetPrevTime(TS ts, PetscReal *t) { @@ -4163,7 +4163,7 @@ PetscErrorCode TSSetTime(TS ts, PetscReal t) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSSetOptionsPrefix - Sets the prefix used for searching for all TS options in the database. @@ -4194,7 +4194,7 @@ PetscErrorCode TSSetOptionsPrefix(TS ts, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSAppendOptionsPrefix - Appends to the prefix used for searching for all TS options in the database. @@ -4225,7 +4225,7 @@ PetscErrorCode TSAppendOptionsPrefix(TS ts, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSGetOptionsPrefix - Sets the prefix used for searching for all `TS` options in the database. @@ -4683,7 +4683,7 @@ PetscErrorCode TSSetEquationType(TS ts, TSEquationType equation_type) Note: Can only be called after the call to `TSSolve()` is complete. -.seealso: [](ch_ts), `TS`, `TSSolve()`, `TSSetConvergenceTest()`, `TSConvergedReason` +.seealso: [](ch_ts), `TS`, `TSSolve()`, `TSConvergedReason` @*/ PetscErrorCode TSGetConvergedReason(TS ts, TSConvergedReason *reason) { @@ -4735,7 +4735,7 @@ PetscErrorCode TSSetConvergedReason(TS ts, TSConvergedReason reason) Note: Can only be called after the call to `TSSolve()` is complete. -.seealso: [](ch_ts), `TS`, `TSSolve()`, `TSSetConvergenceTest()`, `TSConvergedReason` +.seealso: [](ch_ts), `TS`, `TSSolve()`, `TSConvergedReason` @*/ PetscErrorCode TSGetSolveTime(TS ts, PetscReal *ftime) { @@ -5467,7 +5467,7 @@ PetscErrorCode TSFunctionDomainError(TS ts, PetscReal stagetime, Vec Y, PetscBoo PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSClone - This function clones a time step `TS` object. Collective @@ -5609,7 +5609,7 @@ PetscErrorCode TSRHSJacobianTest(TS ts, PetscBool *flg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRHSJacobianTestTranspose - Compares the multiply transpose routine provided to the `MATSHELL` with differencing on the `TS` given RHS function. Logically Collective @@ -5787,7 +5787,7 @@ PetscErrorCode TSSetTimeSpan(TS ts, PetscInt n, PetscReal *span_times) .seealso: [](ch_ts), `TS`, `TSSetTimeSpan()`, `TSGetTimeSpanSolutions()` @*/ -PetscErrorCode TSGetTimeSpan(TS ts, PetscInt *n, const PetscReal **span_times) +PetscErrorCode TSGetTimeSpan(TS ts, PetscInt *n, const PetscReal *span_times[]) { PetscFunctionBegin; PetscValidHeaderSpecific(ts, TS_CLASSID, 1); @@ -5839,7 +5839,7 @@ PetscErrorCode TSGetTimeSpanSolutions(TS ts, PetscInt *nsol, Vec **Sols) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSPruneIJacobianColor - Remove nondiagonal zeros in the Jacobian matrix and update the `MatMFFD` coloring information. Collective diff --git a/src/ts/interface/tscreate.c b/src/ts/interface/tscreate.c index 6301a420e81..d613c1b359e 100644 --- a/src/ts/interface/tscreate.c +++ b/src/ts/interface/tscreate.c @@ -3,7 +3,7 @@ const char *const TSConvergedReasons_Shifted[] = {"ADJOINT_DIVERGED_LINEAR_SOLVE", "FORWARD_DIVERGED_LINEAR_SOLVE", "DIVERGED_STEP_REJECTED", "DIVERGED_NONLINEAR_SOLVE", "CONVERGED_ITERATING", "CONVERGED_TIME", "CONVERGED_ITS", "CONVERGED_USER", "CONVERGED_EVENT", "CONVERGED_PSEUDO_FATOL", "CONVERGED_PSEUDO_FATOL", "TSConvergedReason", "TS_", NULL}; const char *const *TSConvergedReasons = TSConvergedReasons_Shifted + 4; -/*@C +/*@ TSCreate - This function creates an empty timestepper. The problem type can then be set with `TSSetProblemType()` and the type of solver can then be set with `TSSetType()`. @@ -32,7 +32,6 @@ PetscErrorCode TSCreate(MPI_Comm comm, TS *ts) PetscFunctionBegin; PetscAssertPointer(ts, 2); - *ts = NULL; PetscCall(TSInitializePackage()); PetscCall(PetscHeaderCreate(t, TS_CLASSID, "TS", "Time stepping", "TS", comm, TSDestroy, TSView)); diff --git a/src/ts/interface/tsreg.c b/src/ts/interface/tsreg.c index 0f4853739d4..d4915ce29f8 100644 --- a/src/ts/interface/tsreg.c +++ b/src/ts/interface/tsreg.c @@ -3,7 +3,7 @@ PetscFunctionList TSList = NULL; PetscBool TSRegisterAllCalled = PETSC_FALSE; -/*@C +/*@ TSSetType - Sets the method to be used as the timestepping solver. Collective @@ -63,7 +63,7 @@ PetscErrorCode TSSetType(TS ts, TSType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSGetType - Gets the `TS` method type (as a string). Not Collective @@ -92,7 +92,7 @@ PetscErrorCode TSGetType(TS ts, TSType *type) /*@C TSRegister - Adds a creation method to the `TS` package. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine diff --git a/src/ts/interface/tsrhssplit.c b/src/ts/interface/tsrhssplit.c index 68a01532f74..c6290fcf3eb 100644 --- a/src/ts/interface/tsrhssplit.c +++ b/src/ts/interface/tsrhssplit.c @@ -15,7 +15,7 @@ static PetscErrorCode TSRHSSplitGetRHSSplit(TS ts, const char splitname[], TS_RH PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRHSSplitSetIS - Set the index set for the specified split Logically Collective @@ -61,7 +61,7 @@ PetscErrorCode TSRHSSplitSetIS(TS ts, const char splitname[], IS is) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRHSSplitGetIS - Retrieves the elements for a split as an `IS` Logically Collective @@ -143,7 +143,7 @@ PetscErrorCode TSRHSSplitSetRHSFunction(TS ts, const char splitname[], Vec r, TS PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSRHSSplitGetSubTS - Get the sub-`TS` by split name. Logically Collective diff --git a/src/ts/tests/ex11.c b/src/ts/tests/ex11.c index afe49df87d0..72a019aabbf 100644 --- a/src/ts/tests/ex11.c +++ b/src/ts/tests/ex11.c @@ -76,12 +76,6 @@ static PetscErrorCode DMCreate_Dummy(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode MatOrderingCreate_Dummy(Mat mat, MatOrderingType mtype, IS *isr, IS *isc) -{ - PetscFunctionBeginUser; - PetscFunctionReturn(PETSC_SUCCESS); -} - static PetscErrorCode MatPartitioningCreate_Dummy(MatPartitioning mat) { PetscFunctionBeginUser; @@ -156,7 +150,7 @@ static PetscErrorCode PetscRandomCreate_Dummy(PetscRandom arand) int main(int argc, char **argv) { - PetscReal A[1], Gamma[1] = {1.0}, b[1], c[1], d[1]; + PetscReal A[1] = {0.0}, Gamma[1] = {1.0}, b[1] = {1.0}, c[1] = {1.0}, d[1] = {1.0}; PetscFunctionBeginUser; PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); @@ -166,7 +160,7 @@ int main(int argc, char **argv) PetscCall(TSGLLERegister("dummy", TSGLLECreate_Dummy)); PetscCall(TSRKRegister("dummy", 0, 0, A, 0, 0, 0, 0, 0)); PetscCall(TSGLEERegister("dummy", 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); - PetscCall(TSARKIMEXRegister("dummy", 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); + PetscCall(TSARKIMEXRegister("dummy", 0, 1, A, b, c, A, b, c, 0, 0, 0, 0, 0)); PetscCall(TSRosWRegister("dummy", 0, 1, A, Gamma, b, 0, 0, 0)); PetscCall(TSBasicSymplecticRegister("dummy", 0, 0, c, d)); PetscCall(TSAdaptRegister("dummy", TSAdaptCreate_Dummy)); @@ -180,7 +174,6 @@ int main(int argc, char **argv) PetscCall(KSPRegister("dummy", KSPCreate_Dummy)); PetscCall(PCRegister("dummy", PCCreate_Dummy)); PetscCall(DMRegister("dummy", DMCreate_Dummy)); - PetscCall(MatOrderingRegister("dummy", MatOrderingCreate_Dummy)); PetscCall(MatPartitioningRegister("dummy", MatPartitioningCreate_Dummy)); PetscCall(MatRegister("dummy", MatCreate_Dummy)); PetscCall(PFRegister("dummy", PFCreate_Dummy)); diff --git a/src/ts/tests/ex18.c b/src/ts/tests/ex18.c index 24654ba448d..8836e1a2ce1 100644 --- a/src/ts/tests/ex18.c +++ b/src/ts/tests/ex18.c @@ -20,11 +20,12 @@ int main(int argc, char **argv) { TS ts; Vec x; - PetscBool dae = PETSC_TRUE; + PetscBool dae = PETSC_TRUE, random = PETSC_FALSE; PetscFunctionBeginUser; PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); PetscCall(PetscOptionsGetBool(NULL, NULL, "-dae", &dae, NULL)); + PetscCall(PetscOptionsGetBool(NULL, NULL, "-random", &random, NULL)); PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); PetscCall(TSSetIFunction(ts, NULL, IFunction, &dae)); @@ -34,7 +35,10 @@ int main(int argc, char **argv) PetscCall(VecSetSizes(x, 2, PETSC_DECIDE)); PetscCall(VecSetFromOptions(x)); PetscCall(VecSetUp(x)); - PetscCall(VecSet(x, 0.5)); + if (random) { + PetscCall(VecSetRandom(x, NULL)); + PetscCall(VecRealPart(x)); + } else PetscCall(VecSet(x, 0.5)); PetscCall(TSSetSolution(ts, x)); PetscCall(VecDestroy(&x)); @@ -102,7 +106,7 @@ PetscErrorCode IJacobian(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal shift, M /*TEST testset: - args: -ts_view_solution -ts_max_steps 10 -ts_dt 0.1 -ts_view_solution -ts_adapt_type {{none basic}} -ts_exact_final_time matchstep + args: -ts_view_solution -ts_max_steps 10 -ts_dt 0.1 -ts_view_solution -ts_adapt_type {{none basic}} -ts_exact_final_time matchstep -snes_error_if_not_converged test: output_file: output/ex18_1.out @@ -114,4 +118,9 @@ PetscErrorCode IJacobian(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal shift, M suffix: dirk args: -dae {{0 1}} -ts_type dirk -ts_dirk_type {{s212 es122sal es213sal es324sal es325sal 657a es648sa 658a s659a 7510sal es7510sa 759a s7511sal 8614a 8616sal es8516sal}} + test: + output_file: output/ex18_1.out + suffix: dirk_explicit_first_random_dae + args: -dae -ts_type dirk -ts_dirk_type es122sal -random 1 -ts_max_reject -1 + TEST*/ diff --git a/src/ts/trajectory/impls/memory/trajmemory.c b/src/ts/trajectory/impls/memory/trajmemory.c index ce6dd61d078..d4daca7edd9 100644 --- a/src/ts/trajectory/impls/memory/trajmemory.c +++ b/src/ts/trajectory/impls/memory/trajmemory.c @@ -1882,7 +1882,7 @@ PETSC_UNUSED static PetscErrorCode TSTrajectorySetUseDRAM(TSTrajectory tj, Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectoryMemorySetType - sets the software that is used to generate the checkpointing schedule. Logically Collective @@ -1905,7 +1905,7 @@ PetscErrorCode TSTrajectoryMemorySetType(TSTrajectory tj, TSTrajectoryMemoryType PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectorySetMaxCpsRAM - Set maximum number of checkpoints in RAM Logically Collective @@ -1927,7 +1927,7 @@ PetscErrorCode TSTrajectorySetMaxCpsRAM(TSTrajectory tj, PetscInt max_cps_ram) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectorySetMaxCpsDisk - Set maximum number of checkpoints on disk Logically Collective @@ -1949,7 +1949,7 @@ PetscErrorCode TSTrajectorySetMaxCpsDisk(TSTrajectory tj, PetscInt max_cps_disk) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectorySetMaxUnitsRAM - Set maximum number of checkpointing units in RAM Logically Collective @@ -1971,7 +1971,7 @@ PetscErrorCode TSTrajectorySetMaxUnitsRAM(TSTrajectory tj, PetscInt max_units_ra PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectorySetMaxUnitsDisk - Set maximum number of checkpointing units on disk Logically Collective diff --git a/src/ts/trajectory/interface/ftn-custom/makefile b/src/ts/trajectory/interface/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/ts/trajectory/interface/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/ts/trajectory/interface/ftn-custom/ztrajf.c b/src/ts/trajectory/interface/ftn-custom/ztrajf.c deleted file mode 100644 index f2f826d0af6..00000000000 --- a/src/ts/trajectory/interface/ftn-custom/ztrajf.c +++ /dev/null @@ -1,39 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define tstrajectorysetdirname_ TSTRAJECTORYSETDIRNAME - #define tstrajectorysetfiletemplate_ TSTRAJECTORYSETFILETEMPLATE - #define tstrajectoryviewfromoptions_ TSTRAJECTORYVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define tstrajectorysetdirname_ tstrajectorysetdirname - #define tstrajectorysetfiletemplate_ tstrajectorysetfiletemplate - #define tstrajectoryviewfromoptions_ tstrajectoryviewfromoptions -#endif - -PETSC_EXTERN void tstrajectorysetdirname_(TSTrajectory *tj, char dirname[], int *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(dirname, len, t); - *ierr = TSTrajectorySetDirname(*tj, t); - if (*ierr) return; - FREECHAR(dirname, t); -} - -PETSC_EXTERN void tstrajectorysetfiletemplate_(TSTrajectory *tj, char filetemplate[], int *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - FIXCHAR(filetemplate, len, t); - *ierr = TSTrajectorySetFiletemplate(*tj, t); - FREECHAR(filetemplate, t); -} -PETSC_EXTERN void tstrajectoryviewfromoptions_(TSTrajectory *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = TSTrajectoryViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/ts/trajectory/interface/traj.c b/src/ts/trajectory/interface/traj.c index b21c789e3bc..680f357fac1 100644 --- a/src/ts/trajectory/interface/traj.c +++ b/src/ts/trajectory/interface/traj.c @@ -10,7 +10,7 @@ PetscLogEvent TSTrajectory_Set, TSTrajectory_Get, TSTrajectory_GetVecs, TSTr /*@C TSTrajectoryRegister - Adds a way of storing trajectories to the `TS` package - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - the name of a new user-defined creation routine @@ -252,7 +252,7 @@ PetscErrorCode TSTrajectoryGetVecs(TSTrajectory tj, TS ts, PetscInt stepnum, Pet PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectoryViewFromOptions - View a `TSTrajectory` based on values in the options database Collective @@ -274,7 +274,7 @@ PetscErrorCode TSTrajectoryViewFromOptions(TSTrajectory A, PetscObject obj, cons PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectoryView - Prints information about the trajectory object Collective @@ -399,13 +399,11 @@ PetscErrorCode TSTrajectoryCreate(MPI_Comm comm, TSTrajectory *tj) PetscFunctionBegin; PetscAssertPointer(tj, 2); - *tj = NULL; PetscCall(TSInitializePackage()); PetscCall(PetscHeaderCreate(t, TSTRAJECTORY_CLASSID, "TSTrajectory", "Time stepping", "TS", comm, TSTrajectoryDestroy, TSTrajectoryView)); t->setupcalled = PETSC_FALSE; PetscCall(TSHistoryCreate(comm, &t->tsh)); - t->lag.order = 1; t->lag.L = NULL; t->lag.T = NULL; @@ -431,7 +429,7 @@ PetscErrorCode TSTrajectoryCreate(MPI_Comm comm, TSTrajectory *tj) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectorySetType - Sets the storage method to be used as in a trajectory Collective @@ -472,7 +470,7 @@ PetscErrorCode TSTrajectorySetType(TSTrajectory tj, TS ts, TSTrajectoryType type PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectoryGetType - Gets the trajectory type Collective @@ -711,7 +709,7 @@ PetscErrorCode TSTrajectorySetKeepFiles(TSTrajectory tj, PetscBool flg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectorySetDirname - Specify the name of the directory where `TSTrajectory` disk checkpoints are stored. Collective @@ -745,7 +743,7 @@ PetscErrorCode TSTrajectorySetDirname(TSTrajectory tj, const char dirname[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ TSTrajectorySetFiletemplate - Specify the name template for the files storing `TSTrajectory` checkpoints. Collective diff --git a/src/ts/tutorials/advection-diffusion-reaction/shashi.F90 b/src/ts/tutorials/advection-diffusion-reaction/shashi.F90 index d17dc39d8ae..18ab6564a3e 100644 --- a/src/ts/tutorials/advection-diffusion-reaction/shashi.F90 +++ b/src/ts/tutorials/advection-diffusion-reaction/shashi.F90 @@ -143,7 +143,6 @@ subroutine FormFunction(snes,x,f,dummy,ierr) PetscCall(VecRestoreArrayReadF90(x,lx_v,ierr)) PetscCall(VecRestoreArrayF90(f,lf_v,ierr)) - return end ! --------------------------------------------------------------------- @@ -186,7 +185,6 @@ subroutine FormJacobian(snes,X,jac,B,dummy,ierr) PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)) PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)) - return end subroutine ShashiLowerBound(an_r) @@ -198,7 +196,6 @@ subroutine ShashiLowerBound(an_r) do i=2,26 an_r(i) = 1000.0/6.023D+23 enddo - return end subroutine ShashiInitialGuess(an_r) @@ -294,7 +291,6 @@ subroutine ShashiInitialGuess(an_r) an_r(26) = 6.149551D-18 #endif - return end subroutine ShashiFormFunction(an_r,f_eq) @@ -493,7 +489,6 @@ subroutine ShashiFormFunction(an_r,f_eq) write(44,*)i,f_eq(i) enddo - return end subroutine ShashiFormJacobian(an_r,d_eq) @@ -1011,7 +1006,6 @@ subroutine ShashiFormJacobian(an_r,d_eq) enddo enddo - return end subroutine ShashiPostCheck(ls,X,Y,W,c_Y,c_W,dummy) @@ -1036,5 +1030,4 @@ subroutine ShashiPostCheck(ls,X,Y,W,c_Y,c_W,dummy) endif enddo PetscCall(VecRestoreArrayF90(W,xx,ierr)) - return end diff --git a/src/ts/tutorials/ex11.c b/src/ts/tutorials/ex11.c index dd666851ac3..a3f5e23e741 100644 --- a/src/ts/tutorials/ex11.c +++ b/src/ts/tutorials/ex11.c @@ -398,7 +398,7 @@ static PetscErrorCode CreateQFunctionContext_SW(Physics phys, Ceed ceed, CeedQFu PetscCallCEED(CeedQFunctionContextCreate(ceed, qfCtx)); PetscCallCEED(CeedQFunctionContextSetData(*qfCtx, CEED_MEM_HOST, CEED_USE_POINTER, sizeof(*sw), sw)); PetscCallCEED(CeedQFunctionContextSetDataDestroy(*qfCtx, CEED_MEM_HOST, FreeContextPetsc)); - PetscCallCEED(CeedQFunctionContextRegisterDouble(*qfCtx, "gravity", offsetof(Physics_SW, gravity), 1, "Accelaration due to gravity")); + PetscCallCEED(CeedQFunctionContextRegisterDouble(*qfCtx, "gravity", offsetof(Physics_SW, gravity), 1, "Acceleration due to gravity")); PetscFunctionReturn(PETSC_SUCCESS); } #endif @@ -1687,7 +1687,7 @@ int initLinearWave(EulerNode *ux, const PetscReal gamma, const PetscReal coord[] test: suffix: tut_4 - requires: exodusii + requires: exodusii !single nsize: 4 args: -dm_distribute_overlap 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/annulus-20.exo -physics sw -monitor Height,Energy -petscfv_type leastsquares -petsclimiter_type minmod @@ -1800,7 +1800,7 @@ int initLinearWave(EulerNode *ux, const PetscReal gamma, const PetscReal coord[] test: suffix: euler_0 - requires: exodusii !complex + requires: exodusii !complex !single args: -eu_riemann godunov -bc_wall 100,101 -ufv_cfl 5 -petsclimiter_type sin \ -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/annulus-20.exo \ -ts_max_time 1 -ts_ssp_type rks2 -ts_ssp_nstages 10 @@ -1853,17 +1853,17 @@ int initLinearWave(EulerNode *ux, const PetscReal gamma, const PetscReal coord[] requires: exodusii args: -ufv_vtk_interval 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -bc_inflow 100,101,200 -bc_outflow 201 + # Run with -dm_forest_maximum_refinement 6 -ts_max_time 0.5 instead to get the full movie test: suffix: shock_0 requires: p4est !single !complex args: -dm_plex_box_faces 2,1 -grid_bounds -1,1.,0.,1 -grid_skew_60 \ - -dm_type p4est -dm_forest_partition_overlap 1 -dm_forest_maximum_refinement 6 -dm_forest_minimum_refinement 2 -dm_forest_initial_refinement 2 \ + -dm_type p4est -dm_forest_partition_overlap 1 -dm_forest_maximum_refinement 2 -dm_forest_minimum_refinement 2 -dm_forest_initial_refinement 2 \ -ufv_use_amr -refine_vec_tagger_box 0.5,inf -coarsen_vec_tagger_box 0,1.e-2 -refine_tag_view -coarsen_tag_view \ -bc_wall 1,2,3,4 -physics euler -eu_type iv_shock -ufv_cfl 10 -eu_alpha 60. -eu_gamma 1.4 -eu_amach 2.02 -eu_rho2 3. \ -petscfv_type leastsquares -petsclimiter_type minmod -petscfv_compute_gradients 0 \ - -ts_max_time 0.5 -ts_ssp_type rks2 -ts_ssp_nstages 10 \ + -ts_max_steps 3 -ts_ssp_type rks2 -ts_ssp_nstages 10 \ -ufv_vtk_basename ${wPETSC_DIR}/ex11 -ufv_vtk_interval 0 -monitor density,energy - timeoutfactor: 3 # Test GLVis visualization of PetscFV fields test: diff --git a/src/ts/tutorials/ex1f.F90 b/src/ts/tutorials/ex1f.F90 index 1748d997cf1..d2a213eff46 100644 --- a/src/ts/tutorials/ex1f.F90 +++ b/src/ts/tutorials/ex1f.F90 @@ -89,7 +89,7 @@ program main ! preallocating memory in sparse matrices. ! i5 = 5 - PetscCallA(MatCreateSeqAIJ(PETSC_COMM_SELF,N,N,i5,PETSC_NULL_INTEGER,J,ierr)) + PetscCallA(MatCreateSeqAIJ(PETSC_COMM_SELF,N,N,i5,PETSC_NULL_INTEGER_ARRAY,J,ierr)) ! ! Create timestepper context @@ -222,7 +222,6 @@ subroutine FormInitialGuess(X,user,ierr) 20 continue 10 continue PetscCall(VecRestoreArrayF90(X,xx,ierr)) - return end ! ! -------------------- Evaluate Function F(x) --------------------- @@ -279,7 +278,6 @@ subroutine FormFunction(ts,t,X,F,user,ierr) PetscCall(VecRestoreArrayReadF90(X,xx,ierr)) PetscCall(VecRestoreArrayF90(F,ff,ierr)) - return end ! ! -------------------- Evaluate Jacobian of F(x) -------------------- @@ -328,7 +326,7 @@ subroutine FormJacobian(ts,ctime,X,JJ,B,user,ierr) ! row(1) = i - 1 + (j-1)*mx if (i .eq. 1 .or. j .eq. 1 .or. i .eq. mx .or. j .eq. my) then - PetscCall(MatSetValues(jac,i1,row,i1,row,one,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(jac,i1,[row],i1,[row],[one],INSERT_VALUES,ierr)) else v(1) = hxdhy col(1) = row(1) - mx @@ -340,14 +338,13 @@ subroutine FormJacobian(ts,ctime,X,JJ,B,user,ierr) col(4) = row(1) + 1 v(5) = hxdhy col(5) = row(1) + mx - PetscCall(MatSetValues(jac,i1,row,i5,col,v,INSERT_VALUES,ierr)) + PetscCall(MatSetValues(jac,i1,[row],i5,col,v,INSERT_VALUES,ierr)) endif 20 continue 10 continue PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)) PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)) PetscCall(VecRestoreArrayF90(X,xx,ierr)) - return end !/*TEST diff --git a/src/ts/tutorials/ex2.c b/src/ts/tutorials/ex2.c index be55bf2c40e..6d9ccde0871 100644 --- a/src/ts/tutorials/ex2.c +++ b/src/ts/tutorials/ex2.c @@ -639,7 +639,7 @@ PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, vo suffix: tut_2 nsize: 4 args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor - # GEMV sensitve to single + # GEMV sensitive to single args: -vec_mdot_use_gemv 0 -vec_maxpy_use_gemv 0 test: diff --git a/src/ts/tutorials/ex22f.F90 b/src/ts/tutorials/ex22f.F90 index 5f9d9788a48..12abe1721b2 100644 --- a/src/ts/tutorials/ex22f.F90 +++ b/src/ts/tutorials/ex22f.F90 @@ -139,7 +139,7 @@ subroutine GetLayout(da,mx,xs,xe,gxs,gxe,ierr) PetscInt mx,xs,xe,gxs,gxe PetscErrorCode ierr PetscInt xm,gxm - PetscCall(DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) + PetscCall(DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,ierr)) PetscCall(DMDAGetCorners(da,xs,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,xm,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) PetscCall(DMDAGetGhostCorners(da,gxs,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,gxm,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) xs = xs + 1 @@ -318,7 +318,7 @@ subroutine FormIJacobian(ts,t,X,Xdot,shift,J,Jpre,user,ierr) val(2) = -k2 val(3) = -k1 val(4) = shift + k2 - PetscCall(MatSetValuesBlockedLocal(Jpre,i1,row,i1,col,val,INSERT_VALUES,ierr)) + PetscCall(MatSetValuesBlockedLocal(Jpre,i1,[row],i1,[col],val,INSERT_VALUES,ierr)) 10 continue PetscCall(MatAssemblyBegin(Jpre,MAT_FINAL_ASSEMBLY,ierr)) PetscCall(MatAssemblyEnd(Jpre,MAT_FINAL_ASSEMBLY,ierr)) diff --git a/src/ts/tutorials/ex22f_mf.F90 b/src/ts/tutorials/ex22f_mf.F90 index afa3053aa95..655478f295d 100644 --- a/src/ts/tutorials/ex22f_mf.F90 +++ b/src/ts/tutorials/ex22f_mf.F90 @@ -69,7 +69,7 @@ program main ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Create distributed array (DMDA) to manage parallel grid and vectors ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - PetscCallA(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,im11,i2,i2,PETSC_NULL_INTEGER,da,ierr)) + PetscCallA(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,im11,i2,i2,PETSC_NULL_INTEGER_ARRAY,da,ierr)) PetscCallA(DMSetFromOptions(da,ierr)) PetscCallA(DMSetUp(da,ierr)) @@ -174,7 +174,7 @@ subroutine GetLayout(da,mx,xs,xe,gxs,gxe,ierr) PetscInt mx,xs,xe,gxs,gxe PetscErrorCode ierr PetscInt xm,gxm - PetscCall(DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) + PetscCall(DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,ierr)) PetscCall(DMDAGetCorners(da,xs,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,xm,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) PetscCall(DMDAGetGhostCorners(da,gxs,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,gxm,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)) xs = xs + 1 @@ -358,7 +358,7 @@ subroutine FormIJacobian(ts,t,X,Xdot,shift,J,Jpre,user,ierr) val(2) = -k2 val(3) = -k1 val(4) = shift + k2 - PetscCall(MatSetValuesBlockedLocal(Jpre,i1,row,i1,col,val,INSERT_VALUES,ierr)) + PetscCall(MatSetValuesBlockedLocal(Jpre,i1,[row],i1,[col],val,INSERT_VALUES,ierr)) end do PetscCall(MatAssemblyBegin(Jpre,MAT_FINAL_ASSEMBLY,ierr)) PetscCall(MatAssemblyEnd(Jpre,MAT_FINAL_ASSEMBLY,ierr)) @@ -487,7 +487,7 @@ subroutine MyMult(A,X,F,ierr) val(2) = -k2 val(3) = -k1 val(4) = shift + k2 - PetscCall(MatSetValuesBlockedLocal(Jmat,i1,row,i1,col,val,INSERT_VALUES,ierr)) + PetscCall(MatSetValuesBlockedLocal(Jmat,i1,[row],i1,[col],val,INSERT_VALUES,ierr)) end do ! PetscCall(MatAssemblyBegin(Jpre,MAT_FINAL_ASSEMBLY,ierr)) @@ -499,7 +499,6 @@ subroutine MyMult(A,X,F,ierr) PetscCall(MatMult(Jmat,X,F,ierr)) - return end subroutine MyMult ! diff --git a/src/ts/tutorials/ex30.c b/src/ts/tutorials/ex30.c index 20bf75c4ded..7518bbcb6fa 100644 --- a/src/ts/tutorials/ex30.c +++ b/src/ts/tutorials/ex30.c @@ -697,7 +697,7 @@ static PetscErrorCode ProjectSource(DM dm, PetscReal time, AppCtx *ctx) ctxs[P_FIELD_ID] = NULL; break; default: - SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unknwon source"); + SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unknown source"); } funcs[C_FIELD_ID] = zerof; ctxs[C_FIELD_ID] = NULL; @@ -1002,7 +1002,7 @@ static PetscErrorCode SetInitialConditionsAndTolerances(TS ts, PetscInt nv, Vec funcs[C_FIELD_ID] = initial_conditions_C_2; break; default: - SETERRQ(PetscObjectComm((PetscObject)ts), PETSC_ERR_SUP, "Unknwon IC"); + SETERRQ(PetscObjectComm((PetscObject)ts), PETSC_ERR_SUP, "Unknown IC"); } funcs[P_FIELD_ID] = zerof; PetscCall(DMProjectFunction(dm, t, funcs, NULL, INSERT_ALL_VALUES, u)); @@ -1478,6 +1478,7 @@ int main(int argc, char **argv) args: -test_restart -dm_plex_box_faces 3,3 -ksp_type preonly -pc_type mg -mg_levels_pc_type svd -c_petscspace_degree 1 -p_petscspace_degree 1 -petscpartitioner_type simple -test_restart test: + requires: !single suffix: restart nsize: {{1 2}separate output} args: -dm_refine_hierarchy {{0 1}separate output} -dm_plex_simplex 0 @@ -1489,6 +1490,7 @@ int main(int argc, char **argv) args: -dm_refine_hierarchy {{0 1}separate output} -dm_plex_simplex 1 test: + requires: !single suffix: restart_refonly nsize: {{1 2}separate output} args: -dm_refine 1 -dm_plex_simplex 0 diff --git a/src/ts/tutorials/ex36A.c b/src/ts/tutorials/ex36A.c deleted file mode 100644 index 6b24ddfd97b..00000000000 --- a/src/ts/tutorials/ex36A.c +++ /dev/null @@ -1,177 +0,0 @@ -static char help[] = "Transistor amplifier (autonomous).\n"; - -/*F - M y'=f(y) - - Useful options: -ts_monitor_lg_solution -ts_monitor_lg_timestep -lg_indicate_data_points 0 -F*/ - -/* - Include "petscts.h" so that we can use TS solvers. Note that this - file automatically includes: - petscsys.h - base PETSc routines petscvec.h - vectors - petscmat.h - matrices - petscis.h - index sets petscksp.h - Krylov subspace methods - petscviewer.h - viewers petscpc.h - preconditioners - petscksp.h - linear solvers -*/ -#include - -FILE *gfilepointer_data, *gfilepointer_info; - -/* Defines the source */ -PetscErrorCode Ue(PetscScalar t, PetscScalar *U) -{ - PetscFunctionBeginUser; - U = 0.4 * sin(200 * pi * t); - PetscFunctionReturn(PETSC_SUCCESS); -} -* / - - /* - Defines the DAE passed to the time solver -*/ - static PetscErrorCode IFunctionImplicit(TS ts, PetscReal t, Vec Y, Vec Ydot, Vec F, void *ctx) -{ - const PetscScalar *y, *ydot; - PetscScalar *f; - - PetscFunctionBeginUser; - /* The next three lines allow us to access the entries of the vectors directly */ - PetscCall(VecGetArrayRead(Y, &y)); - PetscCall(VecGetArrayRead(Ydot, &ydot)); - PetscCall(VecGetArray(F, &f)); - - f[0] = PetscSinReal(200 * PETSC_PI * y[5]) / 2500. - y[0] / 1000. - ydot[0] / 1.e6 + ydot[1] / 1.e6; - f[1] = 0.0006666766666666667 - PetscExpReal((500 * (y[1] - y[2])) / 13.) / 1.e8 - y[1] / 4500. + ydot[0] / 1.e6 - ydot[1] / 1.e6; - f[2] = -1.e-6 + PetscExpReal((500 * (y[1] - y[2])) / 13.) / 1.e6 - y[2] / 9000. - ydot[2] / 500000.; - f[3] = 0.0006676566666666666 - (99 * PetscExpReal((500 * (y[1] - y[2])) / 13.)) / 1.e8 - y[3] / 9000. - (3 * ydot[3]) / 1.e6 + (3 * ydot[4]) / 1.e6; - f[4] = -y[4] / 9000. + (3 * ydot[3]) / 1.e6 - (3 * ydot[4]) / 1.e6; - f[5] = -1 + ydot[5]; - - PetscCall(VecRestoreArrayRead(Y, &y)); - PetscCall(VecRestoreArrayRead(Ydot, &ydot)); - PetscCall(VecRestoreArray(F, &f)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -/* - Defines the Jacobian of the ODE passed to the ODE solver. See TSSetIJacobian() for the meaning of a and the Jacobian. -*/ -static PetscErrorCode IJacobianImplicit(TS ts, PetscReal t, Vec Y, Vec Ydot, PetscReal a, Mat A, Mat B, void *ctx) -{ - PetscInt rowcol[] = {0, 1, 2, 3, 4, 5}; - const PetscScalar *y, *ydot; - PetscScalar J[6][6]; - - PetscFunctionBeginUser; - PetscCall(VecGetArrayRead(Y, &y)); - PetscCall(VecGetArrayRead(Ydot, &ydot)); - - PetscCall(PetscMemzero(J, sizeof(J))); - - J[0][0] = -0.001 - a / 1.e6; - J[0][1] = a / 1.e6; - J[0][5] = (2 * PETSC_PI * PetscCosReal(200 * PETSC_PI * y[5])) / 25.; - J[1][0] = a / 1.e6; - J[1][1] = -0.00022222222222222223 - a / 1.e6 - PetscExpReal((500 * (y[1] - y[2])) / 13.) / 2.6e6; - J[1][2] = PetscExpReal((500 * (y[1] - y[2])) / 13.) / 2.6e6; - J[2][1] = PetscExpReal((500 * (y[1] - y[2])) / 13.) / 26000.; - J[2][2] = -0.00011111111111111112 - a / 500000. - PetscExpReal((500 * (y[1] - y[2])) / 13.) / 26000.; - J[3][1] = (-99 * PetscExpReal((500 * (y[1] - y[2])) / 13.)) / 2.6e6; - J[3][2] = (99 * PetscExpReal((500 * (y[1] - y[2])) / 13.)) / 2.6e6; - J[3][3] = -0.00011111111111111112 - (3 * a) / 1.e6; - J[3][4] = (3 * a) / 1.e6; - J[4][3] = (3 * a) / 1.e6; - J[4][4] = -0.00011111111111111112 - (3 * a) / 1.e6; - J[5][5] = a; - - PetscCall(MatSetValues(B, 6, rowcol, 6, rowcol, &J[0][0], INSERT_VALUES)); - - PetscCall(VecRestoreArrayRead(Y, &y)); - PetscCall(VecRestoreArrayRead(Ydot, &ydot)); - - PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); - PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); - if (A != B) { - PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); - PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -int main(int argc, char **argv) -{ - TS ts; /* ODE integrator */ - Vec Y; /* solution will be stored here */ - Mat A; /* Jacobian matrix */ - PetscMPIInt size; - PetscInt n = 6; - PetscScalar *y; - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Initialize program - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscFunctionBeginUser; - PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); - PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); - PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Only for sequential runs"); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Create necessary matrix and vectors - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); - PetscCall(MatSetSizes(A, n, n, PETSC_DETERMINE, PETSC_DETERMINE)); - PetscCall(MatSetFromOptions(A)); - PetscCall(MatSetUp(A)); - - PetscCall(MatCreateVecs(A, &Y, NULL)); - - PetscCall(VecGetArray(Y, &y)); - y[0] = 0.0; - y[1] = 3.0; - y[2] = y[1]; - y[3] = 6.0; - y[4] = 0.0; - y[5] = 0.0; - PetscCall(VecRestoreArray(Y, &y)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Create timestepping solver context - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); - PetscCall(TSSetProblemType(ts, TS_NONLINEAR)); - PetscCall(TSSetType(ts, TSARKIMEX)); - PetscCall(TSSetEquationType(ts, TS_EQ_DAE_IMPLICIT_INDEX1)); - PetscCall(TSARKIMEXSetFullyImplicit(ts, PETSC_TRUE)); - /*PetscCall(TSSetType(ts,TSROSW));*/ - PetscCall(TSSetIFunction(ts, NULL, IFunctionImplicit, NULL)); - PetscCall(TSSetIJacobian(ts, A, A, IJacobianImplicit, NULL)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Set initial conditions - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(TSSetSolution(ts, Y)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Set solver options - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(TSSetMaxTime(ts, 0.15)); - PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); - PetscCall(TSSetTimeStep(ts, .001)); - PetscCall(TSSetFromOptions(ts)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Do Time stepping - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(TSSolve(ts, Y)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Free work space. All PETSc objects should be destroyed when they are no longer needed. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(MatDestroy(&A)); - PetscCall(VecDestroy(&Y)); - PetscCall(TSDestroy(&ts)); - PetscCall(PetscFinalize()); - return 0; -} diff --git a/src/ts/tutorials/ex36SE.c b/src/ts/tutorials/ex36SE.c deleted file mode 100644 index 0db6166a477..00000000000 --- a/src/ts/tutorials/ex36SE.c +++ /dev/null @@ -1,178 +0,0 @@ -static char help[] = "Transistor amplifier (semi-explicit).\n"; - -/*F - [I 0] [y'] = f(t,y,z) - [0 0] [z'] = g(t,y,z) - Useful options: -ts_monitor_lg_solution -ts_monitor_lg_timestep -lg_indicate_data_points 0 -F*/ - -/* - Include "petscts.h" so that we can use TS solvers. Note that this - file automatically includes: - petscsys.h - base PETSc routines petscvec.h - vectors - petscmat.h - matrices - petscis.h - index sets petscksp.h - Krylov subspace methods - petscviewer.h - viewers petscpc.h - preconditioners - petscksp.h - linear solvers -*/ -#include - -FILE *gfilepointer_data, *gfilepointer_info; - -/* Defines the source */ -PetscErrorCode Ue(PetscScalar t, PetscScalar *U) -{ - PetscFunctionBeginUser; - U = 0.4 * sin(200 * pi * t); - PetscFunctionReturn(PETSC_SUCCESS); -} -* / - - /* - Defines the DAE passed to the time solver -*/ - static PetscErrorCode IFunctionSemiExplicit(TS ts, PetscReal t, Vec Y, Vec Ydot, Vec F, void *ctx) -{ - const PetscScalar *y, *ydot; - PetscScalar *f; - - PetscFunctionBeginUser; - /* The next three lines allow us to access the entries of the vectors directly */ - PetscCall(VecGetArrayRead(Y, &y)); - PetscCall(VecGetArrayRead(Ydot, &ydot)); - PetscCall(VecGetArray(F, &f)); - - f[0] = -400 * PetscSinReal(200 * PETSC_PI * t) + 1000 * y[3] + ydot[0]; - f[1] = 0.5 - 1 / (2. * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)) + (500 * y[1]) / 9. + ydot[1]; - f[2] = -222.5522222222222 + 33 / (100. * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)) + (1000 * y[4]) / 27. + ydot[2]; - f[3] = 0.0006666766666666667 - 1 / (1.e8 * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)) + PetscSinReal(200 * PETSC_PI * t) / 2500. + y[0] / 4500. - (11 * y[3]) / 9000.; - f[4] = 0.0006676566666666666 - 99 / (1.e8 * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)) + y[2] / 9000. - y[4] / 4500.; - - PetscCall(VecRestoreArrayRead(Y, &y)); - PetscCall(VecRestoreArrayRead(Ydot, &ydot)); - PetscCall(VecRestoreArray(F, &f)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -/* - Defines the Jacobian of the ODE passed to the ODE solver. See TSSetIJacobian() for the meaning of a and the Jacobian. -*/ -static PetscErrorCode IJacobianSemiExplicit(TS ts, PetscReal t, Vec Y, Vec Ydot, PetscReal a, Mat A, Mat B, void *ctx) -{ - PetscInt rowcol[] = {0, 1, 2, 3, 4}; - const PetscScalar *y, *ydot; - PetscScalar J[5][5]; - - PetscFunctionBeginUser; - PetscCall(VecGetArrayRead(Y, &y)); - PetscCall(VecGetArrayRead(Ydot, &ydot)); - - PetscCall(PetscMemzero(J, sizeof(J))); - - J[0][0] = a; - J[0][3] = 1000; - J[1][0] = 250 / (13. * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[1][1] = 55.55555555555556 + a + 250 / (13. * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[1][3] = -250 / (13. * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[2][0] = -165 / (13. * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[2][1] = -165 / (13. * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[2][2] = a; - J[2][3] = 165 / (13. * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[2][4] = 37.03703703703704; - J[3][0] = 0.00022222222222222223 + 1 / (2.6e6 * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[3][1] = 1 / (2.6e6 * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[3][3] = -0.0012222222222222222 - 1 / (2.6e6 * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[4][0] = 99 / (2.6e6 * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[4][1] = 99 / (2.6e6 * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[4][2] = 0.00011111111111111112; - J[4][3] = -99 / (2.6e6 * PetscExpReal((500 * (y[0] + y[1] - y[3])) / 13.)); - J[4][4] = -0.00022222222222222223; - - PetscCall(MatSetValues(B, 5, rowcol, 5, rowcol, &J[0][0], INSERT_VALUES)); - - PetscCall(VecRestoreArrayRead(Y, &y)); - PetscCall(VecRestoreArrayRead(Ydot, &ydot)); - - PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); - PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); - if (A != B) { - PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); - PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); - } - PetscFunctionReturn(PETSC_SUCCESS); -} - -int main(int argc, char **argv) -{ - TS ts; /* ODE integrator */ - Vec Y; /* solution will be stored here */ - Mat A; /* Jacobian matrix */ - PetscMPIInt size; - PetscInt n = 5; - PetscScalar *y; - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Initialize program - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscFunctionBeginUser; - PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); - PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); - PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Only for sequential runs"); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Create necessary matrix and vectors - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); - PetscCall(MatSetSizes(A, n, n, PETSC_DETERMINE, PETSC_DETERMINE)); - PetscCall(MatSetFromOptions(A)); - PetscCall(MatSetUp(A)); - - PetscCall(MatCreateVecs(A, &Y, NULL)); - - PetscCall(VecGetArray(Y, &y)); - y[0] = -3.0; - y[1] = 3.0; - y[2] = 6.0; - y[3] = 0.0; - y[4] = 6.0; - PetscCall(VecRestoreArray(Y, &y)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Create timestepping solver context - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); - PetscCall(TSSetProblemType(ts, TS_NONLINEAR)); - PetscCall(TSSetType(ts, TSARKIMEX)); - PetscCall(TSSetEquationType(ts, TS_EQ_DAE_IMPLICIT_INDEX1)); - PetscCall(TSARKIMEXSetFullyImplicit(ts, PETSC_TRUE)); - /*PetscCall(TSSetType(ts,TSROSW));*/ - PetscCall(TSSetIFunction(ts, NULL, IFunctionSemiExplicit, NULL)); - PetscCall(TSSetIJacobian(ts, A, A, IJacobianSemiExplicit, NULL)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Set initial conditions - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(TSSetSolution(ts, Y)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Set solver options - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(TSSetMaxTime(ts, 0.15)); - PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); - PetscCall(TSSetTimeStep(ts, .001)); - PetscCall(TSSetFromOptions(ts)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Do Time stepping - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(TSSolve(ts, Y)); - - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Free work space. All PETSc objects should be destroyed when they are no longer needed. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - PetscCall(MatDestroy(&A)); - PetscCall(VecDestroy(&Y)); - PetscCall(TSDestroy(&ts)); - PetscCall(PetscFinalize()); - return 0; -} diff --git a/src/ts/tutorials/hamiltonian/ex2.c b/src/ts/tutorials/hamiltonian/ex2.c index e5f92626a26..0b2b1cdfb00 100644 --- a/src/ts/tutorials/hamiltonian/ex2.c +++ b/src/ts/tutorials/hamiltonian/ex2.c @@ -1,71 +1,679 @@ -static char help[] = "Two stream instability from Birdsal and Langdon with DMSwarm and TS basic symplectic integrators\n"; +static char help[] = "Landau Damping/Two Stream instability test using Vlasov-Poisson equations\n"; +/* + To run the code with particles sinusoidally perturbed in x space use the test "pp_poisson_bsi_1d_4" or "pp_poisson_bsi_2d_4" + According to Lukas, good damping results come at ~16k particles per cell + + To visualize the efield use + + -monitor_efield + + To visualize the swarm distribution use + + -ts_monitor_hg_swarm + + To visualize the particles, we can use + + -ts_monitor_sp_swarm -ts_monitor_sp_swarm_retain 0 -ts_monitor_sp_swarm_phase 1 -draw_size 500,500 + +*/ +#include #include -#include #include +#include #include -#include -#include -#include -#include -#include +#include +#include +#include /* For norm and dot */ +#include /* For interpolation */ +#include "petscdm.h" +#include "petscdmlabel.h" + +PETSC_EXTERN PetscErrorCode stream(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar[], void *); +PETSC_EXTERN PetscErrorCode line(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar[], void *); + +const char *EMTypes[] = {"primal", "mixed", "coulomb", "none", "EMType", "EM_", NULL}; +typedef enum { + EM_PRIMAL, + EM_MIXED, + EM_COULOMB, + EM_NONE +} EMType; + +typedef enum { + V0, + X0, + T0, + M0, + Q0, + PHI0, + POISSON, + VLASOV, + SIGMA, + NUM_CONSTANTS +} ConstantType; +typedef struct { + PetscScalar v0; /* Velocity scale, often the thermal velocity */ + PetscScalar t0; /* Time scale */ + PetscScalar x0; /* Space scale */ + PetscScalar m0; /* Mass scale */ + PetscScalar q0; /* Charge scale */ + PetscScalar kb; + PetscScalar epsi0; + PetscScalar phi0; /* Potential scale */ + PetscScalar poissonNumber; /* Non-Dimensional Poisson Number */ + PetscScalar vlasovNumber; /* Non-Dimensional Vlasov Number */ + PetscReal sigma; /* Nondimensional charge per length in x */ +} Parameter; typedef struct { - PetscInt dim; /* The topological mesh dimension */ - PetscBool simplex; /* Flag for simplices or tensor cells */ - PetscBool bdm; /* Flag for mixed form poisson */ - PetscBool monitor; /* Flag for use of the TS monitor */ - PetscBool uniform; /* Flag to uniformly space particles in x */ - char meshFilename[PETSC_MAX_PATH_LEN]; /* Name of the mesh filename if any */ - PetscReal sigma; /* Linear charge per box length */ - PetscReal timeScale; /* Nondimensionalizing time scaling */ - PetscInt particlesPerCell; /* The number of partices per cell */ - PetscReal particleRelDx; /* Relative particle position perturbation compared to average cell diameter h */ - PetscInt k; /* Mode number for test function */ - PetscReal momentTol; /* Tolerance for checking moment conservation */ - SNES snes; /* SNES object */ - PetscInt steps; /* TS iterations */ - PetscReal stepSize; /* Time stepper step size */ - PetscErrorCode (*func)(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar *, void *); + PetscBag bag; /* Problem parameters */ + PetscBool error; /* Flag for printing the error */ + PetscBool efield_monitor; /* Flag to show electric field monitor */ + PetscBool initial_monitor; + PetscBool fake_1D; /* Run simulation in 2D but zeroing second dimension */ + PetscBool perturbed_weights; /* Uniformly sample x,v space with gaussian weights */ + PetscBool poisson_monitor; + PetscInt ostep; /* print the energy at each ostep time steps */ + PetscInt numParticles; + PetscReal timeScale; /* Nondimensionalizing time scale */ + PetscReal charges[2]; /* The charges of each species */ + PetscReal masses[2]; /* The masses of each species */ + PetscReal thermal_energy[2]; /* Thermal Energy (used to get other constants)*/ + PetscReal cosine_coefficients[2]; /*(alpha, k)*/ + PetscReal totalWeight; + PetscReal stepSize; + PetscInt steps; + PetscReal initVel; + EMType em; /* Type of electrostatic model */ + SNES snes; + PetscDraw drawef; + PetscDrawLG drawlg_ef; + PetscDraw drawic_x; + PetscDraw drawic_v; + PetscDraw drawic_w; + PetscDrawHG drawhgic_x; + PetscDrawHG drawhgic_v; + PetscDrawHG drawhgic_w; + PetscDraw EDraw; + PetscDraw RhoDraw; + PetscDraw PotDraw; + PetscDrawSP EDrawSP; + PetscDrawSP RhoDrawSP; + PetscDrawSP PotDrawSP; + PetscBool monitor_positions; /* Flag to show particle positins at each time step */ + PetscDraw positionDraw; + PetscDrawSP positionDrawSP; + DM swarm; + PetscRandom random; + PetscBool twostream; + PetscBool checkweights; + PetscInt checkVRes; /* Flag to check/output velocity residuals for nightly tests */ } AppCtx; static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) { PetscFunctionBeginUser; - options->dim = 2; - options->simplex = PETSC_TRUE; - options->monitor = PETSC_TRUE; - options->particlesPerCell = 1; - options->k = 1; - options->particleRelDx = 1.e-20; - options->momentTol = 100. * PETSC_MACHINE_EPSILON; - options->sigma = 1.; - options->timeScale = 1.0e-6; - options->uniform = PETSC_FALSE; - options->steps = 1; - options->stepSize = 0.01; - options->bdm = PETSC_FALSE; - - PetscOptionsBegin(comm, "", "Two Stream options", "DMPLEX"); - PetscCall(PetscStrncpy(options->meshFilename, "", sizeof(options->meshFilename))); - PetscCall(PetscOptionsInt("-dim", "The topological mesh dimension", "ex2.c", options->dim, &options->dim, NULL)); - PetscCall(PetscOptionsInt("-steps", "TS steps to take", "ex2.c", options->steps, &options->steps, NULL)); - PetscCall(PetscOptionsBool("-monitor", "To use the TS monitor or not", "ex2.c", options->monitor, &options->monitor, NULL)); - PetscCall(PetscOptionsBool("-simplex", "The flag for simplices or tensor cells", "ex2.c", options->simplex, &options->simplex, NULL)); - PetscCall(PetscOptionsBool("-uniform", "Uniform particle spacing", "ex2.c", options->uniform, &options->uniform, NULL)); - PetscCall(PetscOptionsBool("-bdm", "Use H1 instead of C0", "ex2.c", options->bdm, &options->bdm, NULL)); - PetscCall(PetscOptionsString("-mesh", "Name of the mesh filename if any", "ex2.c", options->meshFilename, options->meshFilename, PETSC_MAX_PATH_LEN, NULL)); - PetscCall(PetscOptionsInt("-k", "Mode number of test", "ex5.c", options->k, &options->k, NULL)); - PetscCall(PetscOptionsInt("-particlesPerCell", "Number of particles per cell", "ex2.c", options->particlesPerCell, &options->particlesPerCell, NULL)); - PetscCall(PetscOptionsReal("-sigma", "parameter", "<1>", options->sigma, &options->sigma, NULL)); - PetscCall(PetscOptionsReal("-stepSize", "parameter", "<1e-2>", options->stepSize, &options->stepSize, NULL)); - PetscCall(PetscOptionsReal("-timeScale", "parameter", "<1>", options->timeScale, &options->timeScale, NULL)); - PetscCall(PetscOptionsReal("-particle_perturbation", "Relative perturbation of particles (0,1)", "ex2.c", options->particleRelDx, &options->particleRelDx, NULL)); + PetscInt d = 2; + PetscInt maxSpecies = 2; + options->error = PETSC_FALSE; + options->efield_monitor = PETSC_FALSE; + options->initial_monitor = PETSC_FALSE; + options->fake_1D = PETSC_FALSE; + options->perturbed_weights = PETSC_FALSE; + options->poisson_monitor = PETSC_FALSE; + options->ostep = 100; + options->timeScale = 2.0e-14; + options->charges[0] = -1.0; + options->charges[1] = 1.0; + options->masses[0] = 1.0; + options->masses[1] = 1000.0; + options->thermal_energy[0] = 1.0; + options->thermal_energy[1] = 1.0; + options->cosine_coefficients[0] = 0.01; + options->cosine_coefficients[1] = 0.5; + options->initVel = 1; + options->totalWeight = 1.0; + options->drawef = NULL; + options->drawlg_ef = NULL; + options->drawic_x = NULL; + options->drawic_v = NULL; + options->drawic_w = NULL; + options->drawhgic_x = NULL; + options->drawhgic_v = NULL; + options->drawhgic_w = NULL; + options->EDraw = NULL; + options->RhoDraw = NULL; + options->PotDraw = NULL; + options->EDrawSP = NULL; + options->RhoDrawSP = NULL; + options->PotDrawSP = NULL; + options->em = EM_COULOMB; + options->numParticles = 32768; + options->monitor_positions = PETSC_FALSE; + options->positionDraw = NULL; + options->positionDrawSP = NULL; + options->twostream = PETSC_FALSE; + options->checkweights = PETSC_FALSE; + options->checkVRes = 0; + + PetscOptionsBegin(comm, "", "Landau Damping and Two Stream options", "DMSWARM"); + PetscCall(PetscOptionsBool("-error", "Flag to print the error", "ex2.c", options->error, &options->error, NULL)); + PetscCall(PetscOptionsBool("-monitor_efield", "Flag to show efield plot", "ex2.c", options->efield_monitor, &options->efield_monitor, NULL)); + PetscCall(PetscOptionsBool("-monitor_ics", "Flag to show initial condition histograms", "ex2.c", options->initial_monitor, &options->initial_monitor, NULL)); + PetscCall(PetscOptionsBool("-monitor_positions", "The flag to show particle positions", "ex2.c", options->monitor_positions, &options->monitor_positions, NULL)); + PetscCall(PetscOptionsBool("-monitor_poisson", "The flag to show charges, Efield and potential solve", "ex2.c", options->poisson_monitor, &options->poisson_monitor, NULL)); + PetscCall(PetscOptionsBool("-fake_1D", "Flag to run a 1D simulation (but really in 2D)", "ex2.c", options->fake_1D, &options->fake_1D, NULL)); + PetscCall(PetscOptionsBool("-twostream", "Run two stream instability", "ex2.c", options->twostream, &options->twostream, NULL)); + PetscCall(PetscOptionsBool("-perturbed_weights", "Flag to run uniform sampling with perturbed weights", "ex2.c", options->perturbed_weights, &options->perturbed_weights, NULL)); + PetscCall(PetscOptionsBool("-check_weights", "Ensure all particle weights are positive", "ex2.c", options->checkweights, &options->checkweights, NULL)); + PetscCall(PetscOptionsInt("-check_vel_res", "Check particle velocity residuals for nightly tests", "ex2.c", options->checkVRes, &options->checkVRes, NULL)); + PetscCall(PetscOptionsInt("-output_step", "Number of time steps between output", "ex2.c", options->ostep, &options->ostep, NULL)); + PetscCall(PetscOptionsReal("-timeScale", "Nondimensionalizing time scale", "ex2.c", options->timeScale, &options->timeScale, NULL)); + PetscCall(PetscOptionsReal("-initial_velocity", "Initial velocity of perturbed particle", "ex2.c", options->initVel, &options->initVel, NULL)); + PetscCall(PetscOptionsReal("-total_weight", "Total weight of all particles", "ex2.c", options->totalWeight, &options->totalWeight, NULL)); + PetscCall(PetscOptionsRealArray("-cosine_coefficients", "Amplitude and frequency of cosine equation used in initialization", "ex2.c", options->cosine_coefficients, &d, NULL)); + PetscCall(PetscOptionsRealArray("-charges", "Species charges", "ex2.c", options->charges, &maxSpecies, NULL)); + PetscCall(PetscOptionsEnum("-em_type", "Type of electrostatic solver", "ex2.c", EMTypes, (PetscEnum)options->em, (PetscEnum *)&options->em, NULL)); PetscOptionsEnd(); PetscFunctionReturn(PETSC_SUCCESS); } -static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user) +static PetscErrorCode SetupContext(DM dm, DM sw, AppCtx *user) +{ + PetscFunctionBeginUser; + if (user->efield_monitor) { + PetscDrawAxis axis_ef; + PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "monitor_efield", 0, 300, 400, 300, &user->drawef)); + PetscCall(PetscDrawSetSave(user->drawef, "ex9_Efield.png")); + PetscCall(PetscDrawSetFromOptions(user->drawef)); + PetscCall(PetscDrawLGCreate(user->drawef, 1, &user->drawlg_ef)); + PetscCall(PetscDrawLGGetAxis(user->drawlg_ef, &axis_ef)); + PetscCall(PetscDrawAxisSetLabels(axis_ef, "Electron Electric Field", "time", "E_max")); + PetscCall(PetscDrawLGSetLimits(user->drawlg_ef, 0., user->steps * user->stepSize, -10., 0.)); + PetscCall(PetscDrawAxisSetLimits(axis_ef, 0., user->steps * user->stepSize, -10., 0.)); + } + if (user->initial_monitor) { + PetscDrawAxis axis1, axis2, axis3; + PetscReal dmboxlower[2], dmboxupper[2]; + PetscInt dim, cStart, cEnd; + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMGetBoundingBox(dm, dmboxlower, dmboxupper)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + + PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "monitor_initial_conditions_x", 0, 300, 400, 300, &user->drawic_x)); + PetscCall(PetscDrawSetSave(user->drawic_x, "ex9_ic_x.png")); + PetscCall(PetscDrawSetFromOptions(user->drawic_x)); + PetscCall(PetscDrawHGCreate(user->drawic_x, dim, &user->drawhgic_x)); + PetscCall(PetscDrawHGGetAxis(user->drawhgic_x, &axis1)); + PetscCall(PetscDrawHGSetNumberBins(user->drawhgic_x, cEnd - cStart)); + PetscCall(PetscDrawAxisSetLabels(axis1, "Initial X Distribution", "X", "counts")); + PetscCall(PetscDrawAxisSetLimits(axis1, dmboxlower[0], dmboxupper[0], 0, 1500)); + + PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "monitor_initial_conditions_v", 400, 300, 400, 300, &user->drawic_v)); + PetscCall(PetscDrawSetSave(user->drawic_v, "ex9_ic_v.png")); + PetscCall(PetscDrawSetFromOptions(user->drawic_v)); + PetscCall(PetscDrawHGCreate(user->drawic_v, dim, &user->drawhgic_v)); + PetscCall(PetscDrawHGGetAxis(user->drawhgic_v, &axis2)); + PetscCall(PetscDrawHGSetNumberBins(user->drawhgic_v, 1000)); + PetscCall(PetscDrawAxisSetLabels(axis2, "Initial V_x Distribution", "V", "counts")); + PetscCall(PetscDrawAxisSetLimits(axis2, -1, 1, 0, 1500)); + + PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "monitor_initial_conditions_w", 800, 300, 400, 300, &user->drawic_w)); + PetscCall(PetscDrawSetSave(user->drawic_w, "ex9_ic_w.png")); + PetscCall(PetscDrawSetFromOptions(user->drawic_w)); + PetscCall(PetscDrawHGCreate(user->drawic_w, dim, &user->drawhgic_w)); + PetscCall(PetscDrawHGGetAxis(user->drawhgic_w, &axis3)); + PetscCall(PetscDrawHGSetNumberBins(user->drawhgic_w, 10)); + PetscCall(PetscDrawAxisSetLabels(axis3, "Initial W Distribution", "weight", "counts")); + PetscCall(PetscDrawAxisSetLimits(axis3, 0, 0.01, 0, 5000)); + } + if (user->monitor_positions) { + PetscDrawAxis axis; + + PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "position_monitor_species1", 0, 0, 400, 300, &user->positionDraw)); + PetscCall(PetscDrawSetFromOptions(user->positionDraw)); + PetscCall(PetscDrawSPCreate(user->positionDraw, 10, &user->positionDrawSP)); + PetscCall(PetscDrawSPSetDimension(user->positionDrawSP, 1)); + PetscCall(PetscDrawSPGetAxis(user->positionDrawSP, &axis)); + PetscCall(PetscDrawSPReset(user->positionDrawSP)); + PetscCall(PetscDrawAxisSetLabels(axis, "Particles", "x", "v")); + PetscCall(PetscDrawSetSave(user->positionDraw, "ex9_pos.png")); + } + if (user->poisson_monitor) { + PetscDrawAxis axis_E, axis_Rho, axis_Pot; + + PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "Efield_monitor", 0, 0, 400, 300, &user->EDraw)); + PetscCall(PetscDrawSetFromOptions(user->EDraw)); + PetscCall(PetscDrawSPCreate(user->EDraw, 10, &user->EDrawSP)); + PetscCall(PetscDrawSPSetDimension(user->EDrawSP, 1)); + PetscCall(PetscDrawSPGetAxis(user->EDrawSP, &axis_E)); + PetscCall(PetscDrawSPReset(user->EDrawSP)); + PetscCall(PetscDrawAxisSetLabels(axis_E, "Particles", "x", "E")); + PetscCall(PetscDrawSetSave(user->EDraw, "ex9_E_spatial.png")); + + PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "rho_monitor", 0, 0, 400, 300, &user->RhoDraw)); + PetscCall(PetscDrawSetFromOptions(user->RhoDraw)); + PetscCall(PetscDrawSPCreate(user->RhoDraw, 10, &user->RhoDrawSP)); + PetscCall(PetscDrawSPSetDimension(user->RhoDrawSP, 1)); + PetscCall(PetscDrawSPGetAxis(user->RhoDrawSP, &axis_Rho)); + PetscCall(PetscDrawSPReset(user->RhoDrawSP)); + PetscCall(PetscDrawAxisSetLabels(axis_Rho, "Particles", "x", "rho")); + PetscCall(PetscDrawSetSave(user->RhoDraw, "ex9_rho_spatial.png")); + + PetscCall(PetscDrawCreate(PETSC_COMM_WORLD, NULL, "potential_monitor", 0, 0, 400, 300, &user->PotDraw)); + PetscCall(PetscDrawSetFromOptions(user->PotDraw)); + PetscCall(PetscDrawSPCreate(user->PotDraw, 10, &user->PotDrawSP)); + PetscCall(PetscDrawSPSetDimension(user->PotDrawSP, 1)); + PetscCall(PetscDrawSPGetAxis(user->PotDrawSP, &axis_Pot)); + PetscCall(PetscDrawSPReset(user->PotDrawSP)); + PetscCall(PetscDrawAxisSetLabels(axis_Pot, "Particles", "x", "potential")); + PetscCall(PetscDrawSetSave(user->PotDraw, "ex9_phi_spatial.png")); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DestroyContext(AppCtx *user) +{ + PetscFunctionBeginUser; + PetscCall(PetscDrawLGDestroy(&user->drawlg_ef)); + PetscCall(PetscDrawDestroy(&user->drawef)); + PetscCall(PetscDrawHGDestroy(&user->drawhgic_x)); + PetscCall(PetscDrawDestroy(&user->drawic_x)); + PetscCall(PetscDrawHGDestroy(&user->drawhgic_v)); + PetscCall(PetscDrawDestroy(&user->drawic_v)); + PetscCall(PetscDrawHGDestroy(&user->drawhgic_w)); + PetscCall(PetscDrawDestroy(&user->drawic_w)); + PetscCall(PetscDrawSPDestroy(&user->positionDrawSP)); + PetscCall(PetscDrawDestroy(&user->positionDraw)); + + PetscCall(PetscDrawSPDestroy(&user->EDrawSP)); + PetscCall(PetscDrawDestroy(&user->EDraw)); + PetscCall(PetscDrawSPDestroy(&user->RhoDrawSP)); + PetscCall(PetscDrawDestroy(&user->RhoDraw)); + PetscCall(PetscDrawSPDestroy(&user->PotDrawSP)); + PetscCall(PetscDrawDestroy(&user->PotDraw)); + + PetscCall(PetscBagDestroy(&user->bag)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CheckNonNegativeWeights(DM sw, AppCtx *user) +{ + const PetscScalar *w; + PetscInt Np; + + PetscFunctionBeginUser; + if (!user->checkweights) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&w)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + for (PetscInt p = 0; p < Np; ++p) PetscCheck(w[p] >= 0.0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Particle %" PetscInt_FMT " has negative weight %g", p, w[p]); + PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&w)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode computeParticleMoments(DM sw, PetscReal moments[3], AppCtx *user) +{ + DM dm; + const PetscReal *coords; + const PetscScalar *w; + PetscReal mom[3] = {0.0, 0.0, 0.0}; + PetscInt cell, cStart, cEnd, dim; + + PetscFunctionBeginUser; + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetCellDM(sw, &dm)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + PetscCall(DMSwarmSortGetAccess(sw)); + PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&w)); + for (cell = cStart; cell < cEnd; ++cell) { + PetscInt *pidx; + PetscInt Np, p, d; + + PetscCall(DMSwarmSortGetPointsPerCell(sw, cell, &Np, &pidx)); + for (p = 0; p < Np; ++p) { + const PetscInt idx = pidx[p]; + const PetscReal *c = &coords[idx * dim]; + + mom[0] += PetscRealPart(w[idx]); + mom[1] += PetscRealPart(w[idx]) * c[0]; + for (d = 0; d < dim; ++d) mom[2] += PetscRealPart(w[idx]) * c[d] * c[d]; + //if (w[idx] < 0. ) PetscPrintf(PETSC_COMM_WORLD, "error, negative weight %" PetscInt_FMT " \n", idx); + } + PetscCall(PetscFree(pidx)); + } + PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&w)); + PetscCall(DMSwarmSortRestoreAccess(sw)); + PetscCallMPI(MPI_Allreduce(mom, moments, 3, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)sw))); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static void f0_1(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + f0[0] = u[0]; +} + +static void f0_x(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + f0[0] = x[0] * u[0]; +} + +static void f0_r2(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + PetscInt d; + + f0[0] = 0.0; + for (d = 0; d < dim; ++d) f0[0] += PetscSqr(x[d]) * u[0]; +} + +static PetscErrorCode computeFEMMoments(DM dm, Vec u, PetscReal moments[3], AppCtx *user) +{ + PetscDS prob; + PetscScalar mom; + PetscInt field = 0; + + PetscFunctionBeginUser; + PetscCall(DMGetDS(dm, &prob)); + PetscCall(PetscDSSetObjective(prob, field, &f0_1)); + PetscCall(DMPlexComputeIntegralFEM(dm, u, &mom, user)); + moments[0] = PetscRealPart(mom); + PetscCall(PetscDSSetObjective(prob, field, &f0_x)); + PetscCall(DMPlexComputeIntegralFEM(dm, u, &mom, user)); + moments[1] = PetscRealPart(mom); + PetscCall(PetscDSSetObjective(prob, field, &f0_r2)); + PetscCall(DMPlexComputeIntegralFEM(dm, u, &mom, user)); + moments[2] = PetscRealPart(mom); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MonitorEField(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx) +{ + AppCtx *user = (AppCtx *)ctx; + DM dm, sw; + PetscReal *E; + PetscReal Enorm = 0., lgEnorm, lgEmax, sum = 0., Emax = 0., temp = 0., *weight, chargesum = 0.; + PetscReal *x, *v; + PetscInt *species, dim, p, d, Np, cStart, cEnd; + PetscReal pmoments[3]; /* \int f, \int x f, \int r^2 f */ + PetscReal fmoments[3]; /* \int \hat f, \int x \hat f, \int r^2 \hat f */ + Vec rho; + + PetscFunctionBeginUser; + if (step < 0) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMSwarmGetCellDM(sw, &dm)); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMSwarmSortGetAccess(sw)); + PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); + PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v)); + PetscCall(DMSwarmGetField(sw, "E_field", NULL, NULL, (void **)&E)); + PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); + PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + + for (p = 0; p < Np; ++p) { + for (d = 0; d < 1; ++d) { + temp = PetscAbsReal(E[p * dim + d]); + if (temp > Emax) Emax = temp; + } + Enorm += PetscSqrtReal(E[p * dim] * E[p * dim]); + sum += E[p * dim]; + chargesum += user->charges[0] * weight[p]; + } + lgEnorm = Enorm != 0 ? PetscLog10Real(Enorm) : -16.; + lgEmax = Emax != 0 ? PetscLog10Real(Emax) : -16.; + + PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); + PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v)); + PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMSwarmRestoreField(sw, "E_field", NULL, NULL, (void **)&E)); + PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); + + Parameter *param; + PetscCall(PetscBagGetData(user->bag, (void **)¶m)); + PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "charges", &rho)); + if (user->em == EM_PRIMAL) { + PetscCall(computeParticleMoments(sw, pmoments, user)); + PetscCall(computeFEMMoments(dm, rho, fmoments, user)); + } else if (user->em == EM_MIXED) { + DM potential_dm; + IS potential_IS; + PetscInt fields = 1; + PetscCall(DMCreateSubDM(dm, 1, &fields, &potential_IS, &potential_dm)); + + PetscCall(computeParticleMoments(sw, pmoments, user)); + PetscCall(computeFEMMoments(potential_dm, rho, fmoments, user)); + PetscCall(DMDestroy(&potential_dm)); + PetscCall(ISDestroy(&potential_IS)); + } + PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "charges", &rho)); + + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%f\t%+e\t%e\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\n", (double)t, (double)sum, (double)Enorm, (double)lgEnorm, (double)Emax, (double)lgEmax, (double)chargesum, (double)pmoments[0], (double)pmoments[1], (double)pmoments[2], (double)fmoments[0], (double)fmoments[1], (double)fmoments[2])); + PetscCall(PetscDrawLGAddPoint(user->drawlg_ef, &t, &lgEmax)); + PetscCall(PetscDrawLGDraw(user->drawlg_ef)); + PetscCall(PetscDrawSave(user->drawef)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode MonitorInitialConditions(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx) +{ + AppCtx *user = (AppCtx *)ctx; + DM dm, sw; + const PetscScalar *u; + PetscReal *weight, *pos, *vel; + PetscInt dim, p, Np, cStart, cEnd; + + PetscFunctionBegin; + if (step < 0) PetscFunctionReturn(PETSC_SUCCESS); /* -1 indicates interpolated solution */ + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMSwarmGetCellDM(sw, &dm)); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMSwarmSortGetAccess(sw)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + + if (step == 0) { + PetscCall(PetscDrawHGReset(user->drawhgic_x)); + PetscCall(PetscDrawHGGetDraw(user->drawhgic_x, &user->drawic_x)); + PetscCall(PetscDrawClear(user->drawic_x)); + PetscCall(PetscDrawFlush(user->drawic_x)); + + PetscCall(PetscDrawHGReset(user->drawhgic_v)); + PetscCall(PetscDrawHGGetDraw(user->drawhgic_v, &user->drawic_v)); + PetscCall(PetscDrawClear(user->drawic_v)); + PetscCall(PetscDrawFlush(user->drawic_v)); + + PetscCall(PetscDrawHGReset(user->drawhgic_w)); + PetscCall(PetscDrawHGGetDraw(user->drawhgic_w, &user->drawic_w)); + PetscCall(PetscDrawClear(user->drawic_w)); + PetscCall(PetscDrawFlush(user->drawic_w)); + + PetscCall(VecGetArrayRead(U, &u)); + PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&vel)); + PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&pos)); + + PetscCall(VecGetLocalSize(U, &Np)); + Np /= dim * 2; + for (p = 0; p < Np; ++p) { + PetscCall(PetscDrawHGAddValue(user->drawhgic_x, pos[p * dim])); + PetscCall(PetscDrawHGAddValue(user->drawhgic_v, vel[p * dim])); + PetscCall(PetscDrawHGAddValue(user->drawhgic_w, weight[p])); + } + + PetscCall(VecRestoreArrayRead(U, &u)); + PetscCall(PetscDrawHGDraw(user->drawhgic_x)); + PetscCall(PetscDrawHGSave(user->drawhgic_x)); + + PetscCall(PetscDrawHGDraw(user->drawhgic_v)); + PetscCall(PetscDrawHGSave(user->drawhgic_v)); + + PetscCall(PetscDrawHGDraw(user->drawhgic_w)); + PetscCall(PetscDrawHGSave(user->drawhgic_w)); + + PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&pos)); + PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&vel)); + PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MonitorPositions_2D(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx) +{ + AppCtx *user = (AppCtx *)ctx; + DM dm, sw; + PetscScalar *x, *v, *weight; + PetscReal lower[3], upper[3], speed; + const PetscInt *s; + PetscInt dim, cStart, cEnd, c; + + PetscFunctionBeginUser; + if (step > 0 && step % user->ostep == 0) { + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMSwarmGetCellDM(sw, &dm)); + PetscCall(DMGetDimension(dm, &dim)); + PetscCall(DMGetBoundingBox(dm, lower, upper)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); + PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v)); + PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&s)); + PetscCall(DMSwarmSortGetAccess(sw)); + PetscCall(PetscDrawSPReset(user->positionDrawSP)); + PetscCall(PetscDrawSPSetLimits(user->positionDrawSP, lower[0], upper[0], lower[1], upper[1])); + PetscCall(PetscDrawSPSetLimits(user->positionDrawSP, lower[0], upper[0], -12, 12)); + for (c = 0; c < cEnd - cStart; ++c) { + PetscInt *pidx, Npc, q; + PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx)); + for (q = 0; q < Npc; ++q) { + const PetscInt p = pidx[q]; + if (s[p] == 0) { + speed = PetscSqrtReal(PetscSqr(v[p * dim]) + PetscSqr(v[p * dim + 1])); + if (dim == 1 || user->fake_1D) { + PetscCall(PetscDrawSPAddPointColorized(user->positionDrawSP, &x[p * dim], &v[p * dim], &speed)); + } else { + PetscCall(PetscDrawSPAddPointColorized(user->positionDrawSP, &x[p * dim], &x[p * dim + 1], &speed)); + } + } else if (s[p] == 1) { + PetscCall(PetscDrawSPAddPoint(user->positionDrawSP, &x[p * dim], &v[p * dim])); + } + } + PetscCall(PetscFree(pidx)); + } + PetscCall(PetscDrawSPDraw(user->positionDrawSP, PETSC_TRUE)); + PetscCall(PetscDrawSave(user->positionDraw)); + PetscCall(DMSwarmSortRestoreAccess(sw)); + PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); + PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v)); + PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&s)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MonitorPoisson(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx) +{ + AppCtx *user = (AppCtx *)ctx; + DM dm, sw; + PetscScalar *x, *E, *weight, *pot, *charges; + PetscReal lower[3], upper[3], xval; + PetscInt dim, cStart, cEnd, c; + + PetscFunctionBeginUser; + if (step > 0 && step % user->ostep == 0) { + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMSwarmGetCellDM(sw, &dm)); + PetscCall(DMGetDimension(dm, &dim)); + PetscCall(DMGetBoundingBox(dm, lower, upper)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + + PetscCall(PetscDrawSPReset(user->RhoDrawSP)); + PetscCall(PetscDrawSPReset(user->EDrawSP)); + PetscCall(PetscDrawSPReset(user->PotDrawSP)); + PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); + PetscCall(DMSwarmGetField(sw, "E_field", NULL, NULL, (void **)&E)); + PetscCall(DMSwarmGetField(sw, "potential", NULL, NULL, (void **)&pot)); + PetscCall(DMSwarmGetField(sw, "charges", NULL, NULL, (void **)&charges)); + PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); + + PetscCall(DMSwarmSortGetAccess(sw)); + for (c = 0; c < cEnd - cStart; ++c) { + PetscReal Esum = 0.0; + PetscInt *pidx, Npc, q; + PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx)); + for (q = 0; q < Npc; ++q) { + const PetscInt p = pidx[q]; + Esum += E[p * dim]; + } + xval = (c + 0.5) * ((upper[0] - lower[0]) / (cEnd - cStart)); + PetscCall(PetscDrawSPAddPoint(user->EDrawSP, &xval, &Esum)); + PetscCall(PetscFree(pidx)); + } + for (c = 0; c < (cEnd - cStart); ++c) { + xval = (c + 0.5) * ((upper[0] - lower[0]) / (cEnd - cStart)); + PetscCall(PetscDrawSPAddPoint(user->RhoDrawSP, &xval, &charges[c])); + PetscCall(PetscDrawSPAddPoint(user->PotDrawSP, &xval, &pot[c])); + } + PetscCall(PetscDrawSPDraw(user->RhoDrawSP, PETSC_TRUE)); + PetscCall(PetscDrawSave(user->RhoDraw)); + PetscCall(PetscDrawSPDraw(user->EDrawSP, PETSC_TRUE)); + PetscCall(PetscDrawSave(user->EDraw)); + PetscCall(PetscDrawSPDraw(user->PotDrawSP, PETSC_TRUE)); + PetscCall(PetscDrawSave(user->PotDraw)); + PetscCall(DMSwarmSortRestoreAccess(sw)); + PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); + PetscCall(DMSwarmRestoreField(sw, "potential", NULL, NULL, (void **)&pot)); + PetscCall(DMSwarmRestoreField(sw, "charges", NULL, NULL, (void **)&charges)); + PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMSwarmRestoreField(sw, "E_field", NULL, NULL, (void **)&E)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode SetupParameters(MPI_Comm comm, AppCtx *ctx) +{ + PetscBag bag; + Parameter *p; + + PetscFunctionBeginUser; + /* setup PETSc parameter bag */ + PetscCall(PetscBagGetData(ctx->bag, (void **)&p)); + PetscCall(PetscBagSetName(ctx->bag, "par", "Vlasov-Poisson Parameters")); + bag = ctx->bag; + PetscCall(PetscBagRegisterScalar(bag, &p->v0, 1.0, "v0", "Velocity scale, m/s")); + PetscCall(PetscBagRegisterScalar(bag, &p->t0, 1.0, "t0", "Time scale, s")); + PetscCall(PetscBagRegisterScalar(bag, &p->x0, 1.0, "x0", "Space scale, m")); + PetscCall(PetscBagRegisterScalar(bag, &p->v0, 1.0, "phi0", "Potential scale, kg*m^2/A*s^3")); + PetscCall(PetscBagRegisterScalar(bag, &p->q0, 1.0, "q0", "Charge Scale, A*s")); + PetscCall(PetscBagRegisterScalar(bag, &p->m0, 1.0, "m0", "Mass Scale, kg")); + PetscCall(PetscBagRegisterScalar(bag, &p->epsi0, 1.0, "epsi0", "Permittivity of Free Space, kg")); + PetscCall(PetscBagRegisterScalar(bag, &p->kb, 1.0, "kb", "Boltzmann Constant, m^2 kg/s^2 K^1")); + + PetscCall(PetscBagRegisterScalar(bag, &p->sigma, 1.0, "sigma", "Charge per unit area, C/m^3")); + PetscCall(PetscBagRegisterScalar(bag, &p->poissonNumber, 1.0, "poissonNumber", "Non-Dimensional Poisson Number")); + PetscCall(PetscBagRegisterScalar(bag, &p->vlasovNumber, 1.0, "vlasovNumber", "Non-Dimensional Vlasov Number")); + PetscCall(PetscBagSetFromOptions(bag)); + { + PetscViewer viewer; + PetscViewerFormat format; + PetscBool flg; + + PetscCall(PetscOptionsGetViewer(comm, NULL, NULL, "-param_view", &viewer, &format, &flg)); + if (flg) { + PetscCall(PetscViewerPushFormat(viewer, format)); + PetscCall(PetscBagView(bag, viewer)); + PetscCall(PetscViewerFlush(viewer)); + PetscCall(PetscViewerPopFormat(viewer)); + PetscCall(PetscViewerDestroy(&viewer)); + } + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) { PetscFunctionBeginUser; PetscCall(DMCreate(comm, dm)); @@ -75,400 +683,1405 @@ static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user) PetscFunctionReturn(PETSC_SUCCESS); } +static void ion_f0(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + f0[0] = -constants[SIGMA]; +} + static void laplacian_f1(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[]) { PetscInt d; for (d = 0; d < dim; ++d) f1[d] = u_x[d]; } -static void laplacian(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[]) +static void laplacian_g3(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[]) { PetscInt d; for (d = 0; d < dim; ++d) g3[d * dim + d] = 1.0; } +static PetscErrorCode zero(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx) +{ + *u = 0.0; + return PETSC_SUCCESS; +} + +/* + / I -grad\ / q \ = /0\ + \-div 0 / \phi/ \f/ +*/ +static void f0_q(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + for (PetscInt d = 0; d < dim; ++d) f0[d] += u[uOff[0] + d]; +} + +static void f1_q(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[]) +{ + for (PetscInt d = 0; d < dim; ++d) f1[d * dim + d] = u[uOff[1]]; +} + +static void f0_phi_backgroundCharge(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[]) +{ + f0[0] += constants[SIGMA]; + for (PetscInt d = 0; d < dim; ++d) f0[0] += u_x[uOff_x[0] + d * dim + d]; +} + +/* Boundary residual. Dirichlet boundary for u means u_bdy=p*n */ +static void g0_qq(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g0[]) +{ + for (PetscInt d = 0; d < dim; ++d) g0[d * dim + d] = 1.0; +} + +static void g2_qphi(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g2[]) +{ + for (PetscInt d = 0; d < dim; ++d) g2[d * dim + d] = 1.0; +} + +static void g1_phiq(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g1[]) +{ + for (PetscInt d = 0; d < dim; ++d) g1[d * dim + d] = 1.0; +} + static PetscErrorCode CreateFEM(DM dm, AppCtx *user) { - PetscFE fe; - PetscDS ds; - DMPolytopeType ct; - PetscBool simplex; - PetscInt dim, cStart; + PetscFE fephi, feq; + PetscDS ds; + PetscBool simplex; + PetscInt dim; PetscFunctionBeginUser; PetscCall(DMGetDimension(dm, &dim)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, NULL)); - PetscCall(DMPlexGetCellType(dm, cStart, &ct)); - simplex = DMPolytopeTypeGetNumVertices(ct) == DMPolytopeTypeGetDim(ct) + 1 ? PETSC_TRUE : PETSC_FALSE; - PetscCall(PetscFECreateDefault(PetscObjectComm((PetscObject)dm), dim, 1, simplex, NULL, -1, &fe)); - PetscCall(PetscObjectSetName((PetscObject)fe, "potential")); - PetscCall(DMSetField(dm, 0, NULL, (PetscObject)fe)); - PetscCall(DMCreateDS(dm)); - PetscCall(PetscFEDestroy(&fe)); - PetscCall(DMGetDS(dm, &ds)); - PetscCall(PetscDSSetResidual(ds, 0, NULL, laplacian_f1)); - PetscCall(PetscDSSetJacobian(ds, 0, 0, NULL, NULL, NULL, laplacian)); + PetscCall(DMPlexIsSimplex(dm, &simplex)); + if (user->em == EM_MIXED) { + DMLabel label; + const PetscInt id = 1; + + PetscCall(PetscFECreateDefault(PETSC_COMM_SELF, dim, dim, simplex, "field_", PETSC_DETERMINE, &feq)); + PetscCall(PetscObjectSetName((PetscObject)feq, "field")); + PetscCall(PetscFECreateDefault(PETSC_COMM_SELF, dim, 1, simplex, "potential_", PETSC_DETERMINE, &fephi)); + PetscCall(PetscObjectSetName((PetscObject)fephi, "potential")); + PetscCall(PetscFECopyQuadrature(feq, fephi)); + PetscCall(DMSetField(dm, 0, NULL, (PetscObject)feq)); + PetscCall(DMSetField(dm, 1, NULL, (PetscObject)fephi)); + PetscCall(DMCreateDS(dm)); + PetscCall(PetscFEDestroy(&fephi)); + PetscCall(PetscFEDestroy(&feq)); + + PetscCall(DMGetLabel(dm, "marker", &label)); + PetscCall(DMGetDS(dm, &ds)); + + PetscCall(PetscDSSetResidual(ds, 0, f0_q, f1_q)); + PetscCall(PetscDSSetResidual(ds, 1, f0_phi_backgroundCharge, NULL)); + PetscCall(PetscDSSetJacobian(ds, 0, 0, g0_qq, NULL, NULL, NULL)); + PetscCall(PetscDSSetJacobian(ds, 0, 1, NULL, NULL, g2_qphi, NULL)); + PetscCall(PetscDSSetJacobian(ds, 1, 0, NULL, g1_phiq, NULL, NULL)); + + PetscCall(DMAddBoundary(dm, DM_BC_ESSENTIAL, "wall", label, 1, &id, 0, 0, NULL, (void (*)(void))zero, NULL, NULL, NULL)); + + } else if (user->em == EM_PRIMAL) { + MatNullSpace nullsp; + PetscCall(PetscFECreateDefault(PETSC_COMM_SELF, dim, 1, simplex, NULL, PETSC_DETERMINE, &fephi)); + PetscCall(PetscObjectSetName((PetscObject)fephi, "potential")); + PetscCall(DMSetField(dm, 0, NULL, (PetscObject)fephi)); + PetscCall(DMCreateDS(dm)); + PetscCall(DMGetDS(dm, &ds)); + PetscCall(PetscDSSetResidual(ds, 0, ion_f0, laplacian_f1)); + PetscCall(PetscDSSetJacobian(ds, 0, 0, NULL, NULL, NULL, laplacian_g3)); + PetscCall(MatNullSpaceCreate(PetscObjectComm((PetscObject)dm), PETSC_TRUE, 0, NULL, &nullsp)); + PetscCall(PetscObjectCompose((PetscObject)fephi, "nullspace", (PetscObject)nullsp)); + PetscCall(MatNullSpaceDestroy(&nullsp)); + PetscCall(PetscFEDestroy(&fephi)); + } PetscFunctionReturn(PETSC_SUCCESS); } -/* - Initialize particle coordinates uniformly and with opposing velocities -*/ -static PetscErrorCode CreateParticles(DM dm, DM *sw, AppCtx *user) +static PetscErrorCode CreatePoisson(DM dm, AppCtx *user) { - PetscRandom rnd, rndp; - PetscReal interval = user->particleRelDx; - PetscScalar value, *vals; - PetscReal *centroid, *coords, *xi0, *v0, *J, *invJ, detJ, *initialConditions, normalized_vel; - PetscInt *cellid, cStart; - PetscInt Ncell, Np = user->particlesPerCell, p, c, dim, d; + SNES snes; + Mat J; + MatNullSpace nullSpace; + + PetscFunctionBeginUser; + PetscCall(CreateFEM(dm, user)); + PetscCall(SNESCreate(PetscObjectComm((PetscObject)dm), &snes)); + PetscCall(SNESSetOptionsPrefix(snes, "em_")); + PetscCall(SNESSetDM(snes, dm)); + PetscCall(DMPlexSetSNESLocalFEM(dm, PETSC_FALSE, user)); + PetscCall(SNESSetFromOptions(snes)); + + PetscCall(DMCreateMatrix(dm, &J)); + PetscCall(MatNullSpaceCreate(PetscObjectComm((PetscObject)dm), PETSC_TRUE, 0, NULL, &nullSpace)); + PetscCall(MatSetNullSpace(J, nullSpace)); + PetscCall(MatNullSpaceDestroy(&nullSpace)); + PetscCall(SNESSetJacobian(snes, J, J, NULL, NULL)); + PetscCall(MatDestroy(&J)); + user->snes = snes; + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode PetscPDFPertubedConstant2D(const PetscReal x[], const PetscReal dummy[], PetscReal p[]) +{ + p[0] = (1 + 0.01 * PetscCosReal(0.5 * x[0])) / (2 * PETSC_PI); + p[1] = (1 + 0.01 * PetscCosReal(0.5 * x[1])) / (2 * PETSC_PI); + return PETSC_SUCCESS; +} +PetscErrorCode PetscPDFPertubedConstant1D(const PetscReal x[], const PetscReal dummy[], PetscReal p[]) +{ + p[0] = (1. + 0.01 * PetscCosReal(0.5 * x[0])) / (2 * PETSC_PI); + return PETSC_SUCCESS; +} + +PetscErrorCode PetscPDFCosine1D(const PetscReal x[], const PetscReal scale[], PetscReal p[]) +{ + const PetscReal alpha = scale ? scale[0] : 0.0; + const PetscReal k = scale ? scale[1] : 1.; + p[0] = (1 + alpha * PetscCosReal(k * x[0])); + return PETSC_SUCCESS; +} + +PetscErrorCode PetscPDFCosine2D(const PetscReal x[], const PetscReal scale[], PetscReal p[]) +{ + const PetscReal alpha = scale ? scale[0] : 0.; + const PetscReal k = scale ? scale[0] : 1.; + p[0] = (1 + alpha * PetscCosReal(k * (x[0] + x[1]))); + return PETSC_SUCCESS; +} + +PetscErrorCode PetscPDFCosine1D_TwoStream(const PetscReal x[], const PetscReal scale[], PetscReal p[]) +{ + const PetscReal alpha = scale ? scale[0] : 0.0; + const PetscReal k = scale ? scale[1] : 1.; + p[0] = (1. + alpha * PetscCosReal(k * x[0])); + return PETSC_SUCCESS; +} + +static PetscErrorCode InitializeParticles_PerturbedWeights(DM sw, AppCtx *user) +{ + DM vdm, dm; + PetscScalar *weight; + PetscReal *x, *v, vmin[3], vmax[3], gmin[3], gmax[3], xi0[3]; + PetscInt *N, Ns, dim, *cellid, *species, Np, cStart, cEnd, Npc, n; + PetscInt Np_global, p, q, s, c, d, cv; + PetscBool flg; + PetscMPIInt size, rank; + Parameter *param; + + PetscFunctionBegin; + PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)sw), &size)); + PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)sw), &rank)); + PetscOptionsBegin(PetscObjectComm((PetscObject)sw), "", "DMSwarm Options", "DMSWARM"); + PetscCall(DMSwarmGetNumSpecies(sw, &Ns)); + PetscCall(PetscOptionsInt("-dm_swarm_num_species", "The number of species", "DMSwarmSetNumSpecies", Ns, &Ns, &flg)); + if (flg) PetscCall(DMSwarmSetNumSpecies(sw, Ns)); + PetscCall(PetscCalloc1(Ns, &N)); + n = Ns; + PetscCall(PetscOptionsIntArray("-dm_swarm_num_particles", "The target number of particles", "", N, &n, NULL)); + PetscOptionsEnd(); + + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetCellDM(sw, &dm)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + + PetscCall(DMCreate(PETSC_COMM_SELF, &vdm)); + PetscCall(DMSetType(vdm, DMPLEX)); + PetscCall(DMPlexSetOptionsPrefix(vdm, "v")); + PetscCall(DMSetFromOptions(vdm)); + PetscCall(DMViewFromOptions(vdm, NULL, "-vdm_view")); + + PetscInt vStart, vEnd; + PetscCall(DMPlexGetHeightStratum(vdm, 0, &vStart, &vEnd)); + PetscCall(DMGetBoundingBox(vdm, vmin, vmax)); + + PetscCall(DMGetBoundingBox(dm, gmin, gmax)); + PetscCall(PetscBagGetData(user->bag, (void **)¶m)); + Np = (cEnd - cStart) * (vEnd - vStart); + PetscCall(MPIU_Allreduce(&Np, &Np_global, 1, MPIU_INT, MPIU_SUM, PETSC_COMM_WORLD)); + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Global Np = %" PetscInt_FMT "\n", Np_global)); + PetscCall(DMSwarmSetLocalSizes(sw, Np, 0)); + Npc = Np / (cEnd - cStart); + PetscCall(DMSwarmGetField(sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid)); + for (c = 0, p = 0; c < cEnd - cStart; ++c) { + for (s = 0; s < Ns; ++s) { + for (q = 0; q < Npc; ++q, ++p) cellid[p] = c; + } + } + PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid)); + PetscCall(PetscFree(N)); + + PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); + PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v)); + PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); + + PetscCall(DMSwarmSortGetAccess(sw)); + for (c = 0; c < cEnd - cStart; ++c) { + const PetscInt cell = c + cStart; + PetscInt *pidx, Npc; + PetscReal centroid[3], volume; + + PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx)); + PetscCall(DMPlexComputeCellGeometryFVM(dm, cell, &volume, centroid, NULL)); + for (q = 0; q < Npc; ++q) { + const PetscInt p = pidx[q]; + + for (d = 0; d < dim; ++d) { + x[p * dim + d] = centroid[d]; + v[p * dim + d] = vmin[0] + (q + 0.5) * (vmax[0] - vmin[0]) / Npc; + if (user->fake_1D && d > 0) v[p * dim + d] = 0; + } + } + PetscCall(PetscFree(pidx)); + } + PetscCall(DMGetCoordinatesLocalSetUp(vdm)); + + /* Setup Quadrature for spatial and velocity weight calculations*/ + PetscQuadrature quad_x; + PetscInt Nq_x; + const PetscReal *wq_x, *xq_x; + PetscReal *xq_x_extended; + PetscReal weightsum = 0., totalcellweight = 0., *weight_x, *weight_v; + PetscReal scale[2] = {user->cosine_coefficients[0], user->cosine_coefficients[1]}; + + PetscCall(PetscCalloc2(cEnd - cStart, &weight_x, Np, &weight_v)); + if (user->fake_1D) PetscCall(PetscDTGaussTensorQuadrature(1, 1, 5, -1.0, 1.0, &quad_x)); + else PetscCall(PetscDTGaussTensorQuadrature(dim, 1, 5, -1.0, 1.0, &quad_x)); + PetscCall(PetscQuadratureGetData(quad_x, NULL, NULL, &Nq_x, &xq_x, &wq_x)); + if (user->fake_1D) { + PetscCall(PetscCalloc1(Nq_x * dim, &xq_x_extended)); + for (PetscInt i = 0; i < Nq_x; ++i) xq_x_extended[i * dim] = xq_x[i]; + } + /* Integrate the density function to get the weights of particles in each cell */ + for (d = 0; d < dim; ++d) xi0[d] = -1.0; + for (c = cStart; c < cEnd; ++c) { + PetscReal v0_x[3], J_x[9], invJ_x[9], detJ_x, xr_x[3], den_x; + PetscInt *pidx, Npc, q; + PetscInt Ncx; + const PetscScalar *array_x; + PetscScalar *coords_x = NULL; + PetscBool isDGx; + weight_x[c] = 0.; + + PetscCall(DMPlexGetCellCoordinates(dm, c, &isDGx, &Ncx, &array_x, &coords_x)); + PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx)); + PetscCall(DMPlexComputeCellGeometryFEM(dm, c, NULL, v0_x, J_x, invJ_x, &detJ_x)); + for (q = 0; q < Nq_x; ++q) { + /*Transform quadrature points from ref space to real space (0,12.5664)*/ + if (user->fake_1D) CoordinatesRefToReal(dim, dim, xi0, v0_x, J_x, &xq_x_extended[q * dim], xr_x); + else CoordinatesRefToReal(dim, dim, xi0, v0_x, J_x, &xq_x[q * dim], xr_x); + + /*Transform quadrature points from real space to ideal real space (0, 2PI/k)*/ + if (user->fake_1D) { + if (user->twostream) PetscCall(PetscPDFCosine1D_TwoStream(xr_x, scale, &den_x)); + else PetscCall(PetscPDFCosine1D(xr_x, scale, &den_x)); + detJ_x = J_x[0]; + } else PetscCall(PetscPDFCosine2D(xr_x, scale, &den_x)); + /*We have to transform the quadrature weights as well*/ + weight_x[c] += den_x * (wq_x[q] * detJ_x); + } + // Get the cell numbering for consistent output between sequential and distributed tests + IS globalOrdering; + const PetscInt *ordering; + PetscCall(DMPlexGetCellNumbering(dm, &globalOrdering)); + PetscCall(ISGetIndices(globalOrdering, &ordering)); + PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "c:%" PetscInt_FMT " [x_a,x_b] = %1.15f,%1.15f -> cell weight = %1.15f\n", ordering[c], (double)PetscRealPart(coords_x[0]), (double)PetscRealPart(coords_x[2]), (double)weight_x[c])); + PetscCall(ISRestoreIndices(globalOrdering, &ordering)); + totalcellweight += weight_x[c]; + // Confirm the number of particles per spatial cell conforms to the size of the velocity grid + PetscCheck(Npc == vEnd - vStart, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Number of particles %" PetscInt_FMT " in cell (rank %d/%d) != %" PetscInt_FMT " number of velocity vertices", Npc, rank, size, vEnd - vStart); + + /* Set weights to be gaussian in velocity cells (using exact solution) */ + for (cv = 0; cv < vEnd - vStart; ++cv) { + PetscInt Nc; + const PetscScalar *array_v; + PetscScalar *coords_v = NULL; + PetscBool isDG; + PetscCall(DMPlexGetCellCoordinates(vdm, cv, &isDG, &Nc, &array_v, &coords_v)); + + const PetscInt p = pidx[cv]; + // Two stream function from 1/2pi v^2 e^(-v^2/2) + if (user->twostream) + weight_v[p] = 1. / (PetscSqrtReal(2 * PETSC_PI)) * (((coords_v[0] * PetscExpReal(-PetscSqr(coords_v[0]) / 2.)) - (coords_v[1] * PetscExpReal(-PetscSqr(coords_v[1]) / 2.)))) - 0.5 * PetscErfReal(coords_v[0] / PetscSqrtReal(2.)) + 0.5 * (PetscErfReal(coords_v[1] / PetscSqrtReal(2.))); + else weight_v[p] = 0.5 * (PetscErfReal(coords_v[1] / PetscSqrtReal(2.)) - PetscErfReal(coords_v[0] / PetscSqrtReal(2.))); + + weight[p] = user->totalWeight * weight_v[p] * weight_x[c]; + if (weight[p] > 1.) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "weights: %g, %g, %g\n", user->totalWeight, weight_v[p], weight_x[c])); + //PetscPrintf(PETSC_COMM_WORLD, "particle %"PetscInt_FMT": %g, weight_v: %g weight_x: %g\n", p, weight[p], weight_v[p], weight_x[p]); + weightsum += weight[p]; + + PetscCall(DMPlexRestoreCellCoordinates(vdm, cv, &isDG, &Nc, &array_v, &coords_v)); + } + PetscCall(DMPlexRestoreCellCoordinates(dm, c, &isDGx, &Ncx, &array_x, &coords_x)); + PetscCall(PetscFree(pidx)); + } + PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT)); + PetscReal global_cellweight, global_weightsum; + PetscCall(MPIU_Allreduce(&totalcellweight, &global_cellweight, 1, MPIU_REAL, MPIU_SUM, PETSC_COMM_WORLD)); + PetscCall(MPIU_Allreduce(&weightsum, &global_weightsum, 1, MPIU_REAL, MPIU_SUM, PETSC_COMM_WORLD)); + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "particle weight sum = %1.10f cell weight sum = %1.10f\n", (double)global_cellweight, (double)global_weightsum)); + if (user->fake_1D) PetscCall(PetscFree(xq_x_extended)); + PetscCall(PetscFree2(weight_x, weight_v)); + PetscCall(PetscQuadratureDestroy(&quad_x)); + PetscCall(DMSwarmSortRestoreAccess(sw)); + PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&x)); + PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); + PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v)); + PetscCall(DMDestroy(&vdm)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode InitializeConstants(DM sw, AppCtx *user) +{ + DM dm; + PetscInt *species; + PetscReal *weight, totalCharge = 0., totalWeight = 0., gmin[3], gmax[3], global_charge, global_weight; + PetscInt Np, dim; + + PetscFunctionBegin; + PetscCall(DMSwarmGetCellDM(sw, &dm)); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMGetBoundingBox(dm, gmin, gmax)); + PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); + for (PetscInt p = 0; p < Np; ++p) { + totalWeight += weight[p]; + totalCharge += user->charges[species[p]] * weight[p]; + } + PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&weight)); + PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); + { + Parameter *param; + PetscReal Area; + + PetscCall(PetscBagGetData(user->bag, (void **)¶m)); + switch (dim) { + case 1: + Area = (gmax[0] - gmin[0]); + break; + case 2: + if (user->fake_1D) { + Area = (gmax[0] - gmin[0]); + } else { + Area = (gmax[0] - gmin[0]) * (gmax[1] - gmin[1]); + } + break; + case 3: + Area = (gmax[0] - gmin[0]) * (gmax[1] - gmin[1]) * (gmax[2] - gmin[2]); + break; + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Dimension %" PetscInt_FMT " not supported", dim); + } + PetscCall(MPIU_Allreduce(&totalWeight, &global_weight, 1, MPIU_REAL, MPIU_SUM, PETSC_COMM_WORLD)); + PetscCall(MPIU_Allreduce(&totalCharge, &global_charge, 1, MPIU_REAL, MPIU_SUM, PETSC_COMM_WORLD)); + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "dim = %" PetscInt_FMT "\ttotalWeight = %f, user->charges[species[0]] = %f\ttotalCharge = %f, Total Area = %f\n", dim, (double)global_weight, (double)user->charges[0], (double)global_charge, (double)Area)); + param->sigma = PetscAbsReal(global_charge / (Area)); + + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "sigma: %g\n", (double)param->sigma)); + PetscCall(PetscPrintf(PETSC_COMM_WORLD, "(x0,v0,t0,m0,q0,phi0): (%e, %e, %e, %e, %e, %e) - (P, V) = (%e, %e)\n", (double)param->x0, (double)param->v0, (double)param->t0, (double)param->m0, (double)param->q0, (double)param->phi0, (double)param->poissonNumber, + (double)param->vlasovNumber)); + } + /* Setup Constants */ + { + PetscDS ds; + Parameter *param; + PetscCall(PetscBagGetData(user->bag, (void **)¶m)); + PetscScalar constants[NUM_CONSTANTS]; + constants[SIGMA] = param->sigma; + constants[V0] = param->v0; + constants[T0] = param->t0; + constants[X0] = param->x0; + constants[M0] = param->m0; + constants[Q0] = param->q0; + constants[PHI0] = param->phi0; + constants[POISSON] = param->poissonNumber; + constants[VLASOV] = param->vlasovNumber; + PetscCall(DMGetDS(dm, &ds)); + PetscCall(PetscDSSetConstants(ds, NUM_CONSTANTS, constants)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode InitializeVelocities_Fake1D(DM sw, AppCtx *user) +{ + DM dm; + PetscReal *v; + PetscInt *species, cStart, cEnd; + PetscInt dim, Np; + + PetscFunctionBegin; + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v)); + PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); + PetscCall(DMSwarmGetCellDM(sw, &dm)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + PetscRandom rnd; + PetscCall(PetscRandomCreate(PetscObjectComm((PetscObject)sw), &rnd)); + PetscCall(PetscRandomSetInterval(rnd, 0, 1.)); + PetscCall(PetscRandomSetFromOptions(rnd)); + + for (PetscInt p = 0; p < Np; ++p) { + PetscReal a[3] = {0., 0., 0.}, vel[3] = {0., 0., 0.}; + + PetscCall(PetscRandomGetValueReal(rnd, &a[0])); + if (user->perturbed_weights) { + PetscCall(PetscPDFSampleConstant1D(a, NULL, vel)); + } else { + PetscCall(PetscPDFSampleGaussian1D(a, NULL, vel)); + } + v[p * dim] = vel[0]; + } + PetscCall(PetscRandomDestroy(&rnd)); + PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v)); + PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode CreateSwarm(DM dm, AppCtx *user, DM *sw) +{ + PetscReal v0[2] = {1., 0.}; + PetscInt dim; PetscFunctionBeginUser; PetscCall(DMGetDimension(dm, &dim)); PetscCall(DMCreate(PetscObjectComm((PetscObject)dm), sw)); PetscCall(DMSetType(*sw, DMSWARM)); PetscCall(DMSetDimension(*sw, dim)); - PetscCall(PetscRandomCreate(PetscObjectComm((PetscObject)dm), &rnd)); - PetscCall(PetscRandomSetInterval(rnd, 0.0, 1.0)); - PetscCall(PetscRandomSetFromOptions(rnd)); - PetscCall(PetscRandomCreate(PetscObjectComm((PetscObject)dm), &rndp)); - PetscCall(PetscRandomSetInterval(rndp, -interval, interval)); - PetscCall(PetscRandomSetFromOptions(rndp)); PetscCall(DMSwarmSetType(*sw, DMSWARM_PIC)); PetscCall(DMSwarmSetCellDM(*sw, dm)); PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "w_q", 1, PETSC_SCALAR)); - PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "kinematics", dim, PETSC_REAL)); + PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "velocity", dim, PETSC_REAL)); + PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "species", 1, PETSC_INT)); + PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "initCoordinates", dim, PETSC_REAL)); + PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "initVelocity", dim, PETSC_REAL)); + PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "E_field", dim, PETSC_REAL)); + PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "potential", dim, PETSC_REAL)); + PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "charges", dim, PETSC_REAL)); PetscCall(DMSwarmFinalizeFieldRegister(*sw)); - PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &Ncell)); - PetscCall(DMSwarmSetLocalSizes(*sw, Ncell * Np, 0)); + PetscCall(DMSetApplicationContext(*sw, user)); PetscCall(DMSetFromOptions(*sw)); - PetscCall(DMSwarmGetField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmGetField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid)); - PetscCall(DMSwarmGetField(*sw, "w_q", NULL, NULL, (void **)&vals)); - PetscCall(DMSwarmGetField(*sw, "kinematics", NULL, NULL, (void **)&initialConditions)); - PetscCall(PetscMalloc5(dim, ¢roid, dim, &xi0, dim, &v0, dim * dim, &J, dim * dim, &invJ)); - for (c = cStart; c < Ncell; c++) { - if (Np == 1) { - PetscCall(DMPlexComputeCellGeometryFVM(dm, c, NULL, centroid, NULL)); - cellid[c] = c; - for (d = 0; d < dim; ++d) coords[c * dim + d] = centroid[d]; + user->swarm = *sw; + if (user->perturbed_weights) { + PetscCall(InitializeParticles_PerturbedWeights(*sw, user)); + } else { + PetscCall(DMSwarmComputeLocalSizeFromOptions(*sw)); + PetscCall(DMSwarmInitializeCoordinates(*sw)); + if (user->fake_1D) { + PetscCall(InitializeVelocities_Fake1D(*sw, user)); } else { - for (d = 0; d < dim; ++d) xi0[d] = -1.0; - PetscCall(DMPlexComputeCellGeometryFEM(dm, c, NULL, v0, J, invJ, &detJ)); /* affine */ - for (p = 0; p < Np; ++p) { - const PetscInt n = c * Np + p; - PetscReal refcoords[3], spacing; - - cellid[n] = c; - if (user->uniform) { - spacing = 2. / Np; - PetscCall(PetscRandomGetValue(rnd, &value)); - for (d = 0; d < dim; ++d) refcoords[d] = d == 0 ? -1. + spacing / 2. + p * spacing + value / 100. : 0.; - } else { - for (d = 0; d < dim; ++d) { - PetscCall(PetscRandomGetValue(rnd, &value)); - refcoords[d] = d == 0 ? PetscRealPart(value) : 0.; - } - } - CoordinatesRefToReal(dim, dim, xi0, v0, J, refcoords, &coords[n * dim]); - /* constant particle weights */ - for (d = 0; d < dim; ++d) vals[n] = user->sigma / Np; + PetscCall(DMSwarmInitializeVelocitiesFromOptions(*sw, v0)); + } + } + PetscCall(PetscObjectSetName((PetscObject)*sw, "Particles")); + PetscCall(DMViewFromOptions(*sw, NULL, "-sw_view")); + { + Vec gc, gc0, gv, gv0; + + PetscCall(DMSwarmCreateGlobalVectorFromField(*sw, DMSwarmPICField_coor, &gc)); + PetscCall(DMSwarmCreateGlobalVectorFromField(*sw, "initCoordinates", &gc0)); + PetscCall(VecCopy(gc, gc0)); + PetscCall(VecViewFromOptions(gc, NULL, "-ic_x_view")); + PetscCall(DMSwarmDestroyGlobalVectorFromField(*sw, DMSwarmPICField_coor, &gc)); + PetscCall(DMSwarmDestroyGlobalVectorFromField(*sw, "initCoordinates", &gc0)); + PetscCall(DMSwarmCreateGlobalVectorFromField(*sw, "velocity", &gv)); + PetscCall(DMSwarmCreateGlobalVectorFromField(*sw, "initVelocity", &gv0)); + PetscCall(VecCopy(gv, gv0)); + PetscCall(VecViewFromOptions(gv, NULL, "-ic_v_view")); + PetscCall(DMSwarmDestroyGlobalVectorFromField(*sw, "velocity", &gv)); + PetscCall(DMSwarmDestroyGlobalVectorFromField(*sw, "initVelocity", &gv0)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode ComputeFieldAtParticles_Coulomb(SNES snes, DM sw, PetscReal E[]) +{ + AppCtx *user; + PetscReal *coords; + PetscInt *species, dim, Np, Ns; + PetscMPIInt size; + + PetscFunctionBegin; + PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)snes), &size)); + PetscCheck(size == 1, PetscObjectComm((PetscObject)snes), PETSC_ERR_SUP, "Coulomb code only works in serial"); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMSwarmGetNumSpecies(sw, &Ns)); + PetscCall(DMGetApplicationContext(sw, (void *)&user)); + + PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species)); + for (PetscInt p = 0; p < Np; ++p) { + PetscReal *pcoord = &coords[p * dim]; + PetscReal pE[3] = {0., 0., 0.}; + + /* Calculate field at particle p due to particle q */ + for (PetscInt q = 0; q < Np; ++q) { + PetscReal *qcoord = &coords[q * dim]; + PetscReal rpq[3], r, r3, q_q; + + if (p == q) continue; + q_q = user->charges[species[q]] * 1.; + for (PetscInt d = 0; d < dim; ++d) rpq[d] = pcoord[d] - qcoord[d]; + r = DMPlex_NormD_Internal(dim, rpq); + if (r < PETSC_SQRT_MACHINE_EPSILON) continue; + r3 = PetscPowRealInt(r, 3); + for (PetscInt d = 0; d < dim; ++d) pE[d] += q_q * rpq[d] / r3; + } + for (PetscInt d = 0; d < dim; ++d) E[p * dim + d] = pE[d]; + } + PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species)); + PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode ComputeFieldAtParticles_Primal(SNES snes, DM sw, PetscReal E[]) +{ + DM dm; + AppCtx *user; + PetscDS ds; + PetscFE fe; + Mat M_p, M; + Vec phi, locPhi, rho, f; + PetscReal *coords; + PetscInt dim, cStart, cEnd, Np; + PetscQuadrature q; + + PetscFunctionBegin; + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMGetApplicationContext(sw, (void *)&user)); + + KSP ksp; + Vec rho0; + char oldField[PETSC_MAX_PATH_LEN]; + const char *tmp; + + /* Create the charges rho */ + PetscCall(SNESGetDM(snes, &dm)); + PetscCall(DMSwarmVectorGetField(sw, &tmp)); + PetscCall(PetscStrncpy(oldField, tmp, PETSC_MAX_PATH_LEN)); + PetscCall(DMSwarmVectorDefineField(sw, "w_q")); + PetscCall(DMCreateMassMatrix(sw, dm, &M_p)); + PetscCall(DMSwarmVectorDefineField(sw, oldField)); + + PetscCall(DMCreateMassMatrix(dm, dm, &M)); + PetscCall(DMGetGlobalVector(dm, &rho0)); + PetscCall(PetscObjectSetName((PetscObject)rho0, "Charge density (rho0) from Primal Compute")); + PetscCall(DMGetGlobalVector(dm, &rho)); + PetscCall(PetscObjectSetName((PetscObject)rho, "rho")); + PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "w_q", &f)); + + PetscCall(PetscObjectSetName((PetscObject)f, "particle weight")); + PetscCall(MatMultTranspose(M_p, f, rho)); + PetscCall(MatViewFromOptions(M_p, NULL, "-mp_view")); + PetscCall(MatViewFromOptions(M, NULL, "-m_view")); + PetscCall(VecViewFromOptions(f, NULL, "-weights_view")); + PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "w_q", &f)); + + PetscCall(KSPCreate(PetscObjectComm((PetscObject)dm), &ksp)); + PetscCall(KSPSetOptionsPrefix(ksp, "em_proj_")); + PetscCall(KSPSetOperators(ksp, M, M)); + PetscCall(KSPSetFromOptions(ksp)); + PetscCall(KSPSolve(ksp, rho, rho0)); + PetscCall(VecViewFromOptions(rho0, NULL, "-rho0_view")); + + PetscInt rhosize; + PetscReal *charges; + const PetscScalar *rho_vals; + PetscCall(DMSwarmGetField(sw, "charges", NULL, NULL, (void **)&charges)); + PetscCall(VecGetLocalSize(rho0, &rhosize)); + PetscCall(VecGetArrayRead(rho0, &rho_vals)); + for (PetscInt c = 0; c < rhosize; ++c) charges[c] = rho_vals[c]; + PetscCall(VecRestoreArrayRead(rho0, &rho_vals)); + PetscCall(DMSwarmRestoreField(sw, "charges", NULL, NULL, (void **)&charges)); + + PetscCall(VecScale(rho, -1.0)); + + PetscCall(VecViewFromOptions(rho0, NULL, "-rho0_view")); + PetscCall(VecViewFromOptions(rho, NULL, "-rho_view")); + PetscCall(DMRestoreGlobalVector(dm, &rho0)); + PetscCall(KSPDestroy(&ksp)); + PetscCall(MatDestroy(&M_p)); + PetscCall(MatDestroy(&M)); + + PetscCall(DMGetGlobalVector(dm, &phi)); + PetscCall(PetscObjectSetName((PetscObject)phi, "potential")); + PetscCall(VecSet(phi, 0.0)); + PetscCall(SNESSolve(snes, rho, phi)); + PetscCall(DMRestoreGlobalVector(dm, &rho)); + PetscCall(VecViewFromOptions(phi, NULL, "-phi_view")); + + PetscInt phisize; + PetscReal *pot; + const PetscScalar *phi_vals; + PetscCall(DMSwarmGetField(sw, "potential", NULL, NULL, (void **)&pot)); + PetscCall(VecGetLocalSize(phi, &phisize)); + PetscCall(VecGetArrayRead(phi, &phi_vals)); + for (PetscInt c = 0; c < phisize; ++c) pot[c] = phi_vals[c]; + PetscCall(VecRestoreArrayRead(phi, &phi_vals)); + PetscCall(DMSwarmRestoreField(sw, "potential", NULL, NULL, (void **)&pot)); + + PetscCall(DMGetLocalVector(dm, &locPhi)); + PetscCall(DMGlobalToLocalBegin(dm, phi, INSERT_VALUES, locPhi)); + PetscCall(DMGlobalToLocalEnd(dm, phi, INSERT_VALUES, locPhi)); + PetscCall(DMRestoreGlobalVector(dm, &phi)); + + PetscCall(DMGetDS(dm, &ds)); + PetscCall(PetscDSGetDiscretization(ds, 0, (PetscObject *)&fe)); + PetscCall(DMSwarmSortGetAccess(sw)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); + + for (PetscInt c = cStart; c < cEnd; ++c) { + PetscTabulation tab; + PetscScalar *clPhi = NULL; + PetscReal *pcoord, *refcoord; + PetscReal v[3], J[9], invJ[9], detJ; + PetscInt *points; + PetscInt Ncp; + + PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Ncp, &points)); + PetscCall(DMGetWorkArray(dm, Ncp * dim, MPIU_REAL, &pcoord)); + PetscCall(DMGetWorkArray(dm, Ncp * dim, MPIU_REAL, &refcoord)); + for (PetscInt cp = 0; cp < Ncp; ++cp) + for (PetscInt d = 0; d < dim; ++d) pcoord[cp * dim + d] = coords[points[cp] * dim + d]; + PetscCall(DMPlexCoordinatesToReference(dm, c, Ncp, pcoord, refcoord)); + PetscCall(PetscFECreateTabulation(fe, 1, Ncp, refcoord, 1, &tab)); + PetscCall(DMPlexComputeCellGeometryFEM(dm, c, NULL, v, J, invJ, &detJ)); + PetscCall(DMPlexVecGetClosure(dm, NULL, locPhi, c, NULL, &clPhi)); + for (PetscInt cp = 0; cp < Ncp; ++cp) { + const PetscReal *basisDer = tab->T[1]; + const PetscInt p = points[cp]; + + for (PetscInt d = 0; d < dim; ++d) E[p * dim + d] = 0.; + PetscCall(PetscFEGetQuadrature(fe, &q)); + PetscCall(PetscFEFreeInterpolateGradient_Static(fe, basisDer, clPhi, dim, invJ, NULL, cp, &E[p * dim])); + for (PetscInt d = 0; d < dim; ++d) { + E[p * dim + d] *= -1.0; + if (user->fake_1D && d > 0) E[p * dim + d] = 0; } } + PetscCall(DMPlexVecRestoreClosure(dm, NULL, locPhi, c, NULL, &clPhi)); + PetscCall(DMRestoreWorkArray(dm, Ncp * dim, MPIU_REAL, &pcoord)); + PetscCall(DMRestoreWorkArray(dm, Ncp * dim, MPIU_REAL, &refcoord)); + PetscCall(PetscTabulationDestroy(&tab)); + PetscCall(PetscFree(points)); } - PetscCall(PetscFree5(centroid, xi0, v0, J, invJ)); - normalized_vel = 1.; - for (c = 0; c < Ncell; ++c) { - for (p = 0; p < Np; ++p) { - if (p % 2 == 0) { - for (d = 0; d < dim; ++d) initialConditions[(c * Np + p) * dim + d] = d == 0 ? normalized_vel : 0.; - } else { - for (d = 0; d < dim; ++d) initialConditions[(c * Np + p) * dim + d] = d == 0 ? -(normalized_vel) : 0.; + PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmSortRestoreAccess(sw)); + PetscCall(DMRestoreLocalVector(dm, &locPhi)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode ComputeFieldAtParticles_Mixed(SNES snes, DM sw, PetscReal E[]) +{ + AppCtx *user; + DM dm, potential_dm; + KSP ksp; + IS potential_IS; + PetscDS ds; + PetscFE fe; + PetscFEGeom feGeometry; + Mat M_p, M; + Vec phi, locPhi, rho, f, temp_rho, rho0; + PetscQuadrature q; + PetscReal *coords, *pot; + PetscInt dim, cStart, cEnd, Np, fields = 1; + char oldField[PETSC_MAX_PATH_LEN]; + const char *tmp; + + PetscFunctionBegin; + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMGetApplicationContext(sw, &user)); + + /* Create the charges rho */ + PetscCall(SNESGetDM(snes, &dm)); + PetscCall(DMGetGlobalVector(dm, &rho)); + PetscCall(PetscObjectSetName((PetscObject)rho, "rho")); + + PetscCall(DMCreateSubDM(dm, 1, &fields, &potential_IS, &potential_dm)); + + PetscCall(DMSwarmVectorGetField(sw, &tmp)); + PetscCall(PetscStrncpy(oldField, tmp, PETSC_MAX_PATH_LEN)); + PetscCall(DMSwarmVectorDefineField(sw, "w_q")); + PetscCall(DMCreateMassMatrix(sw, potential_dm, &M_p)); + PetscCall(DMSwarmVectorDefineField(sw, oldField)); + + PetscCall(DMCreateMassMatrix(potential_dm, potential_dm, &M)); + PetscCall(MatViewFromOptions(M_p, NULL, "-mp_view")); + PetscCall(MatViewFromOptions(M, NULL, "-m_view")); + PetscCall(DMGetGlobalVector(potential_dm, &temp_rho)); + PetscCall(PetscObjectSetName((PetscObject)temp_rho, "Mf")); + PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "w_q", &f)); + PetscCall(PetscObjectSetName((PetscObject)f, "particle weight")); + PetscCall(VecViewFromOptions(f, NULL, "-weights_view")); + PetscCall(MatMultTranspose(M_p, f, temp_rho)); + PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "w_q", &f)); + PetscCall(DMGetGlobalVector(potential_dm, &rho0)); + PetscCall(PetscObjectSetName((PetscObject)rho0, "Charge density (rho0) from Mixed Compute")); + + PetscCall(KSPCreate(PetscObjectComm((PetscObject)dm), &ksp)); + PetscCall(KSPSetOptionsPrefix(ksp, "em_proj")); + PetscCall(KSPSetOperators(ksp, M, M)); + PetscCall(KSPSetFromOptions(ksp)); + PetscCall(KSPSolve(ksp, temp_rho, rho0)); + PetscCall(VecViewFromOptions(rho0, NULL, "-rho0_view")); + + PetscInt rhosize; + PetscReal *charges; + const PetscScalar *rho_vals; + Parameter *param; + PetscCall(PetscBagGetData(user->bag, (void **)¶m)); + PetscCall(DMSwarmGetField(sw, "charges", NULL, NULL, (void **)&charges)); + PetscCall(VecGetLocalSize(rho0, &rhosize)); + + /* Integral over reference element is size 1. Reference element area is 4. Scale rho0 by 1/4 because the basis function is 1/4 */ + PetscCall(VecScale(rho0, 0.25)); + PetscCall(VecGetArrayRead(rho0, &rho_vals)); + for (PetscInt c = 0; c < rhosize; ++c) charges[c] = rho_vals[c]; + PetscCall(VecRestoreArrayRead(rho0, &rho_vals)); + PetscCall(DMSwarmRestoreField(sw, "charges", NULL, NULL, (void **)&charges)); + + PetscCall(VecISCopy(rho, potential_IS, SCATTER_FORWARD, temp_rho)); + PetscCall(VecScale(rho, 0.25)); + PetscCall(VecViewFromOptions(rho0, NULL, "-rho0_view")); + PetscCall(VecViewFromOptions(temp_rho, NULL, "-temprho_view")); + PetscCall(VecViewFromOptions(rho, NULL, "-rho_view")); + PetscCall(DMRestoreGlobalVector(potential_dm, &temp_rho)); + PetscCall(DMRestoreGlobalVector(potential_dm, &rho0)); + + PetscCall(MatDestroy(&M_p)); + PetscCall(MatDestroy(&M)); + PetscCall(KSPDestroy(&ksp)); + PetscCall(DMDestroy(&potential_dm)); + PetscCall(ISDestroy(&potential_IS)); + + PetscCall(DMGetGlobalVector(dm, &phi)); + PetscCall(PetscObjectSetName((PetscObject)phi, "potential")); + PetscCall(VecSet(phi, 0.0)); + PetscCall(SNESSolve(snes, rho, phi)); + PetscCall(DMRestoreGlobalVector(dm, &rho)); + + PetscInt phisize; + const PetscScalar *phi_vals; + PetscCall(DMSwarmGetField(sw, "potential", NULL, NULL, (void **)&pot)); + PetscCall(VecGetLocalSize(phi, &phisize)); + PetscCall(VecViewFromOptions(phi, NULL, "-phi_view")); + PetscCall(VecGetArrayRead(phi, &phi_vals)); + for (PetscInt c = 0; c < phisize; ++c) pot[c] = phi_vals[c]; + PetscCall(VecRestoreArrayRead(phi, &phi_vals)); + PetscCall(DMSwarmRestoreField(sw, "potential", NULL, NULL, (void **)&pot)); + + PetscCall(DMGetLocalVector(dm, &locPhi)); + PetscCall(DMGlobalToLocalBegin(dm, phi, INSERT_VALUES, locPhi)); + PetscCall(DMGlobalToLocalEnd(dm, phi, INSERT_VALUES, locPhi)); + PetscCall(DMRestoreGlobalVector(dm, &phi)); + + PetscCall(DMGetDS(dm, &ds)); + PetscCall(PetscDSGetDiscretization(ds, 0, (PetscObject *)&fe)); + PetscCall(DMSwarmSortGetAccess(sw)); + PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); + PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); + PetscCall(PetscFEGetQuadrature(fe, &q)); + PetscCall(PetscFECreateCellGeometry(fe, q, &feGeometry)); + for (PetscInt c = cStart; c < cEnd; ++c) { + PetscTabulation tab; + PetscScalar *clPhi = NULL; + PetscReal *pcoord, *refcoord; + PetscInt *points; + PetscInt Ncp; + + PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Ncp, &points)); + PetscCall(DMGetWorkArray(dm, Ncp * dim, MPIU_REAL, &pcoord)); + PetscCall(DMGetWorkArray(dm, Ncp * dim, MPIU_REAL, &refcoord)); + for (PetscInt cp = 0; cp < Ncp; ++cp) + for (PetscInt d = 0; d < dim; ++d) pcoord[cp * dim + d] = coords[points[cp] * dim + d]; + PetscCall(DMPlexCoordinatesToReference(dm, c, Ncp, pcoord, refcoord)); + PetscCall(PetscFECreateTabulation(fe, 1, Ncp, refcoord, 1, &tab)); + PetscCall(DMPlexComputeCellGeometryFEM(dm, c, q, feGeometry.v, feGeometry.J, feGeometry.invJ, feGeometry.detJ)); + PetscCall(DMPlexVecGetClosure(dm, NULL, locPhi, c, NULL, &clPhi)); + + for (PetscInt cp = 0; cp < Ncp; ++cp) { + const PetscInt p = points[cp]; + + for (PetscInt d = 0; d < dim; ++d) E[p * dim + d] = 0.; + PetscCall(PetscFEInterpolateAtPoints_Static(fe, tab, clPhi, &feGeometry, cp, &E[p * dim])); + PetscCall(PetscFEPushforward(fe, &feGeometry, 1, &E[p * dim])); + for (PetscInt d = 0; d < dim; ++d) { + E[p * dim + d] *= -2.0; + if (user->fake_1D && d > 0) E[p * dim + d] = 0; } } + PetscCall(DMPlexVecRestoreClosure(dm, NULL, locPhi, c, NULL, &clPhi)); + PetscCall(DMRestoreWorkArray(dm, Ncp * dim, MPIU_REAL, &pcoord)); + PetscCall(DMRestoreWorkArray(dm, Ncp * dim, MPIU_REAL, &refcoord)); + PetscCall(PetscTabulationDestroy(&tab)); + PetscCall(PetscFree(points)); + } + PetscCall(PetscFEDestroyCellGeometry(fe, &feGeometry)); + PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmSortRestoreAccess(sw)); + PetscCall(DMRestoreLocalVector(dm, &locPhi)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode ComputeFieldAtParticles(SNES snes, DM sw, PetscReal E[]) +{ + AppCtx *ctx; + PetscInt dim, Np; + + PetscFunctionBegin; + PetscValidHeaderSpecific(snes, SNES_CLASSID, 1); + PetscValidHeaderSpecific(sw, DM_CLASSID, 2); + PetscAssertPointer(E, 3); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMGetApplicationContext(sw, &ctx)); + PetscCall(PetscArrayzero(E, Np * dim)); + + switch (ctx->em) { + case EM_PRIMAL: + PetscCall(ComputeFieldAtParticles_Primal(snes, sw, E)); + break; + case EM_COULOMB: + PetscCall(ComputeFieldAtParticles_Coulomb(snes, sw, E)); + break; + case EM_MIXED: + PetscCall(ComputeFieldAtParticles_Mixed(snes, sw, E)); + break; + case EM_NONE: + break; + default: + SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "No solver for electrostatic model %s", EMTypes[ctx->em]); } - PetscCall(DMSwarmRestoreField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmRestoreField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid)); - PetscCall(DMSwarmRestoreField(*sw, "w_q", NULL, NULL, (void **)&vals)); - PetscCall(DMSwarmRestoreField(*sw, "kinematics", NULL, NULL, (void **)&initialConditions)); - PetscCall(PetscRandomDestroy(&rnd)); - PetscCall(PetscRandomDestroy(&rndp)); - PetscCall(PetscObjectSetName((PetscObject)*sw, "Particles")); - PetscCall(DMViewFromOptions(*sw, NULL, "-sw_view")); - PetscCall(DMLocalizeCoordinates(*sw)); PetscFunctionReturn(PETSC_SUCCESS); } -/* Solve for particle position updates */ -static PetscErrorCode RHSFunction1(TS ts, PetscReal t, Vec V, Vec Posres, void *ctx) +static PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec U, Vec G, void *ctx) { + DM sw; + SNES snes = ((AppCtx *)ctx)->snes; + const PetscReal *coords, *vel; + const PetscScalar *u; + PetscScalar *g; + PetscReal *E, m_p = 1., q_p = -1.; + PetscInt dim, d, Np, p; + + PetscFunctionBeginUser; + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmGetField(sw, "initVelocity", NULL, NULL, (void **)&vel)); + PetscCall(DMSwarmGetField(sw, "E_field", NULL, NULL, (void **)&E)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(VecGetArrayRead(U, &u)); + PetscCall(VecGetArray(G, &g)); + + PetscCall(ComputeFieldAtParticles(snes, sw, E)); + + Np /= 2 * dim; + for (p = 0; p < Np; ++p) { + for (d = 0; d < dim; ++d) { + g[(p * 2 + 0) * dim + d] = u[(p * 2 + 1) * dim + d]; + g[(p * 2 + 1) * dim + d] = q_p * E[p * dim + d] / m_p; + } + } + PetscCall(DMSwarmRestoreField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmRestoreField(sw, "initVelocity", NULL, NULL, (void **)&vel)); + PetscCall(DMSwarmRestoreField(sw, "E_field", NULL, NULL, (void **)&E)); + PetscCall(VecRestoreArrayRead(U, &u)); + PetscCall(VecRestoreArray(G, &g)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +/* J_{ij} = dF_i/dx_j + J_p = ( 0 1) + (-w^2 0) + TODO Now there is another term with w^2 from the electric field. I think we will need to invert the operator. + Perhaps we can approximate the Jacobian using only the cellwise P-P gradient from Coulomb +*/ +static PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec U, Mat J, Mat P, void *ctx) +{ + DM sw; + const PetscReal *coords, *vel; + PetscInt dim, d, Np, p, rStart; + + PetscFunctionBeginUser; + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(MatGetOwnershipRange(J, &rStart, NULL)); + PetscCall(DMSwarmGetField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmGetField(sw, "initVelocity", NULL, NULL, (void **)&vel)); + Np /= 2 * dim; + for (p = 0; p < Np; ++p) { + const PetscReal x0 = coords[p * dim + 0]; + const PetscReal vy0 = vel[p * dim + 1]; + const PetscReal omega = vy0 / x0; + PetscScalar vals[4] = {0., 1., -PetscSqr(omega), 0.}; + + for (d = 0; d < dim; ++d) { + const PetscInt rows[2] = {(p * 2 + 0) * dim + d + rStart, (p * 2 + 1) * dim + d + rStart}; + PetscCall(MatSetValues(J, 2, rows, 2, rows, vals, INSERT_VALUES)); + } + } + PetscCall(DMSwarmRestoreField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmRestoreField(sw, "initVelocity", NULL, NULL, (void **)&vel)); + PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode RHSFunctionX(TS ts, PetscReal t, Vec V, Vec Xres, void *ctx) +{ + AppCtx *user = (AppCtx *)ctx; + DM sw; const PetscScalar *v; - PetscScalar *posres; - PetscInt Np, p, dim, d; - DM dm; + PetscScalar *xres; + PetscInt Np, p, d, dim; PetscFunctionBeginUser; - PetscCall(VecGetLocalSize(Posres, &Np)); - PetscCall(VecGetArray(Posres, &posres)); + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(VecGetLocalSize(Xres, &Np)); PetscCall(VecGetArrayRead(V, &v)); - PetscCall(TSGetDM(ts, &dm)); - PetscCall(DMGetDimension(dm, &dim)); + PetscCall(VecGetArray(Xres, &xres)); Np /= dim; for (p = 0; p < Np; ++p) { - for (d = 0; d < dim; ++d) posres[p * dim + d] = v[p * dim + d]; + for (d = 0; d < dim; ++d) { + xres[p * dim + d] = v[p * dim + d]; + if (user->fake_1D && d > 0) xres[p * dim + d] = 0; + } } PetscCall(VecRestoreArrayRead(V, &v)); - PetscCall(VecRestoreArray(Posres, &posres)); + PetscCall(VecRestoreArray(Xres, &xres)); PetscFunctionReturn(PETSC_SUCCESS); } -/* - Solve for the gradient of the electric field and apply force to particles. - */ -static PetscErrorCode RHSFunction2(TS ts, PetscReal t, Vec X, Vec Vres, void *ctx) +static PetscErrorCode RHSFunctionV(TS ts, PetscReal t, Vec X, Vec Vres, void *ctx) { + DM sw; AppCtx *user = (AppCtx *)ctx; - DM dm, plex; - PetscDS prob; - PetscFE fe; - Mat M_p; - Vec phi, locPhi, rho, f; + SNES snes = ((AppCtx *)ctx)->snes; const PetscScalar *x; + const PetscReal *coords, *vel; + PetscReal *E, m_p, q_p; PetscScalar *vres; - PetscReal *coords, phi_0; - PetscInt dim, d, cStart, cEnd, cell, cdim; - PetscReal m_e = 9.11e-31, q_e = 1.60e-19, epsi_0 = 8.85e-12; + PetscInt Np, p, dim, d; + Parameter *param; PetscFunctionBeginUser; - PetscCall(PetscObjectSetName((PetscObject)X, "rhsf2 position")); - PetscCall(VecViewFromOptions(X, NULL, "-rhsf2_x_view")); + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmGetField(sw, "initVelocity", NULL, NULL, (void **)&vel)); + PetscCall(DMSwarmGetField(sw, "E_field", NULL, NULL, (void **)&E)); + PetscCall(PetscBagGetData(user->bag, (void **)¶m)); + m_p = user->masses[0] * param->m0; + q_p = user->charges[0] * param->q0; + PetscCall(VecGetLocalSize(Vres, &Np)); PetscCall(VecGetArrayRead(X, &x)); PetscCall(VecGetArray(Vres, &vres)); - PetscCall(TSGetDM(ts, &dm)); - PetscCall(DMGetDimension(dm, &dim)); - PetscCall(SNESGetDM(user->snes, &plex)); - PetscCall(DMGetCoordinateDim(plex, &cdim)); - PetscCall(DMGetDS(plex, &prob)); - PetscCall(PetscDSGetDiscretization(prob, 0, (PetscObject *)&fe)); - PetscCall(DMGetGlobalVector(plex, &phi)); - PetscCall(DMGetLocalVector(plex, &locPhi)); - PetscCall(DMCreateMassMatrix(dm, plex, &M_p)); - PetscCall(MatViewFromOptions(M_p, NULL, "-mp_view")); - PetscCall(DMGetGlobalVector(plex, &rho)); - PetscCall(DMSwarmCreateGlobalVectorFromField(dm, "w_q", &f)); - PetscCall(PetscObjectSetName((PetscObject)f, "weights vector")); - PetscCall(VecViewFromOptions(f, NULL, "-weights_view")); - PetscCall(MatMultTranspose(M_p, f, rho)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(dm, "w_q", &f)); - PetscCall(PetscObjectSetName((PetscObject)rho, "rho")); - PetscCall(VecViewFromOptions(rho, NULL, "-poisson_rho_view")); - /* Take nullspace out of rhs */ - { - PetscScalar sum; - PetscInt n; - phi_0 = (user->sigma * user->sigma * user->sigma) * (user->timeScale * user->timeScale) / (m_e * q_e * epsi_0); - - PetscCall(VecGetSize(rho, &n)); - PetscCall(VecSum(rho, &sum)); - PetscCall(VecShift(rho, -sum / n)); + PetscCheck(dim == 2, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Dimension must be 2"); + PetscCall(ComputeFieldAtParticles(snes, sw, E)); - PetscCall(VecSum(rho, &sum)); - PetscCheck(PetscAbsScalar(sum) <= 1.0e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Charge should have no DC component %g", (double)PetscAbsScalar(sum)); - PetscCall(VecScale(rho, phi_0)); - } - PetscCall(VecSet(phi, 0.0)); - PetscCall(SNESSolve(user->snes, rho, phi)); - PetscCall(VecViewFromOptions(phi, NULL, "-phi_view")); - PetscCall(DMRestoreGlobalVector(plex, &rho)); - PetscCall(MatDestroy(&M_p)); - PetscCall(DMGlobalToLocalBegin(plex, phi, INSERT_VALUES, locPhi)); - PetscCall(DMGlobalToLocalEnd(plex, phi, INSERT_VALUES, locPhi)); - PetscCall(DMSwarmSortGetAccess(dm)); - PetscCall(DMSwarmGetField(dm, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMPlexGetHeightStratum(plex, 0, &cStart, &cEnd)); - for (cell = cStart; cell < cEnd; ++cell) { - PetscTabulation tab; - PetscReal v[3], J[9], invJ[9], detJ; - PetscScalar *ph = NULL; - PetscReal *pcoord = NULL; - PetscReal *refcoord = NULL; - PetscInt *points = NULL, Ncp, cp; - PetscScalar gradPhi[3]; - - PetscCall(DMPlexComputeCellGeometryFEM(plex, cell, NULL, v, J, invJ, &detJ)); - PetscCall(DMSwarmSortGetPointsPerCell(dm, cell, &Ncp, &points)); - PetscCall(DMGetWorkArray(dm, Ncp * cdim, MPIU_REAL, &pcoord)); - PetscCall(DMGetWorkArray(dm, Ncp * cdim, MPIU_REAL, &refcoord)); - for (cp = 0; cp < Ncp; ++cp) { - for (d = 0; d < cdim; ++d) pcoord[cp * cdim + d] = coords[points[cp] * cdim + d]; + Np /= dim; + for (p = 0; p < Np; ++p) { + for (d = 0; d < dim; ++d) { + vres[p * dim + d] = q_p * E[p * dim + d] / m_p; + if (user->fake_1D && d > 0) vres[p * dim + d] = 0.; } - PetscCall(DMPlexCoordinatesToReference(plex, cell, Ncp, pcoord, refcoord)); - PetscCall(PetscFECreateTabulation(fe, 1, Ncp, refcoord, 1, &tab)); - PetscCall(DMPlexVecGetClosure(plex, NULL, locPhi, cell, NULL, &ph)); - for (cp = 0; cp < Ncp; ++cp) { - const PetscInt p = points[cp]; - gradPhi[0] = 0.0; - gradPhi[1] = 0.0; - gradPhi[2] = 0.0; - const PetscReal *basisDer = tab->T[1]; + } + PetscCall(VecRestoreArrayRead(X, &x)); + /* + Syncrhonized, ordered output for parallel/sequential test cases. + In the 1D (on the 2D mesh) case, every y component should be zero. + */ + if (user->checkVRes) { + PetscBool pr = user->checkVRes > 1 ? PETSC_TRUE : PETSC_FALSE; + PetscInt step; - PetscCall(PetscFEFreeInterpolateGradient_Static(fe, basisDer, ph, cdim, invJ, NULL, cp, gradPhi)); - for (d = 0; d < cdim; ++d) vres[p * cdim + d] = d == 0 ? gradPhi[d] : 0.; + PetscCall(TSGetStepNumber(ts, &step)); + if (pr) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "step: %" PetscInt_FMT "\n", step)); + for (PetscInt p = 0; p < Np; ++p) { + if (pr) PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Residual: %.12g %.12g\n", (double)PetscRealPart(vres[p * dim + 0]), (double)PetscRealPart(vres[p * dim + 1]))); + PetscCheck(PetscAbsScalar(vres[p * dim + 1]) < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Y velocity should be 0., not %g", (double)PetscRealPart(vres[p * dim + 1])); } - PetscCall(DMPlexVecRestoreClosure(plex, NULL, locPhi, cell, NULL, &ph)); - PetscCall(PetscTabulationDestroy(&tab)); - PetscCall(DMRestoreWorkArray(dm, Ncp * cdim, MPIU_REAL, &pcoord)); - PetscCall(DMRestoreWorkArray(dm, Ncp * cdim, MPIU_REAL, &refcoord)); - PetscCall(PetscFree(points)); + if (pr) PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT)); } - PetscCall(DMSwarmRestoreField(dm, DMSwarmPICField_coor, NULL, NULL, (void **)&coords)); - PetscCall(DMSwarmSortRestoreAccess(dm)); - PetscCall(DMRestoreLocalVector(plex, &locPhi)); - PetscCall(DMRestoreGlobalVector(plex, &phi)); PetscCall(VecRestoreArray(Vres, &vres)); - PetscCall(VecRestoreArrayRead(X, &x)); - PetscCall(VecViewFromOptions(Vres, NULL, "-vel_res_view")); + PetscCall(DMSwarmRestoreField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmRestoreField(sw, "initVelocity", NULL, NULL, (void **)&vel)); + PetscCall(DMSwarmRestoreField(sw, "E_field", NULL, NULL, (void **)&E)); PetscFunctionReturn(PETSC_SUCCESS); } -int main(int argc, char **argv) +static PetscErrorCode CreateSolution(TS ts) { - PetscInt i, par; - PetscInt locSize, p, d, dim, Np, step, *idx1, *idx2; - TS ts; - DM dm, sw; - AppCtx user; - MPI_Comm comm; - Vec coorVec, kinVec, probVec, solution, position, momentum; - const PetscScalar *coorArr, *kinArr; - PetscReal ftime = 10., *probArr, *probVecArr; - IS is1, is2; - PetscReal *coor, *kin, *pos, *mom; + DM sw; + Vec u; + PetscInt dim, Np; - PetscFunctionBeginUser; - PetscCall(PetscInitialize(&argc, &argv, NULL, help)); - comm = PETSC_COMM_WORLD; - PetscCall(ProcessOptions(comm, &user)); - /* Create dm and particles */ - PetscCall(CreateMesh(comm, &dm, &user)); - PetscCall(CreateFEM(dm, &user)); - PetscCall(CreateParticles(dm, &sw, &user)); - PetscCall(SNESCreate(comm, &user.snes)); - PetscCall(SNESSetDM(user.snes, dm)); - PetscCall(DMPlexSetSNESLocalFEM(dm, PETSC_FALSE, &user)); - PetscCall(SNESSetFromOptions(user.snes)); + PetscFunctionBegin; + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(VecCreate(PETSC_COMM_WORLD, &u)); + PetscCall(VecSetBlockSize(u, dim)); + PetscCall(VecSetSizes(u, 2 * Np * dim, PETSC_DECIDE)); + PetscCall(VecSetUp(u)); + PetscCall(TSSetSolution(ts, u)); + PetscCall(VecDestroy(&u)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode SetProblem(TS ts) +{ + AppCtx *user; + DM sw; + + PetscFunctionBegin; + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMGetApplicationContext(sw, (void **)&user)); + // Define unified system for (X, V) { - Mat J; - MatNullSpace nullSpace; - - PetscCall(DMCreateMatrix(dm, &J)); - PetscCall(MatNullSpaceCreate(PetscObjectComm((PetscObject)dm), PETSC_TRUE, 0, NULL, &nullSpace)); - PetscCall(MatSetNullSpace(J, nullSpace)); - PetscCall(MatNullSpaceDestroy(&nullSpace)); - PetscCall(SNESSetJacobian(user.snes, J, J, NULL, NULL)); + Mat J; + PetscInt dim, Np; + + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(MatCreate(PETSC_COMM_WORLD, &J)); + PetscCall(MatSetSizes(J, 2 * Np * dim, 2 * Np * dim, PETSC_DECIDE, PETSC_DECIDE)); + PetscCall(MatSetBlockSize(J, 2 * dim)); + PetscCall(MatSetFromOptions(J)); + PetscCall(MatSetUp(J)); + PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, user)); + PetscCall(TSSetRHSJacobian(ts, J, J, RHSJacobian, user)); PetscCall(MatDestroy(&J)); } - /* Place TSSolve in a loop to handle resetting the TS at every manual call of TSStep() */ - PetscCall(TSCreate(comm, &ts)); - PetscCall(TSSetMaxTime(ts, ftime)); - PetscCall(TSSetTimeStep(ts, user.stepSize)); - PetscCall(TSSetMaxSteps(ts, 100000)); - PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP)); - for (step = 0; step < user.steps; ++step) { - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "kinematics", &kinVec)); - PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &coorVec)); - PetscCall(VecViewFromOptions(kinVec, NULL, "-ic_vec_view")); + /* Define split system for X and V */ + { + Vec u; + IS isx, isv, istmp; + const PetscInt *idx; + PetscInt dim, Np, rstart; + + PetscCall(TSGetSolution(ts, &u)); PetscCall(DMGetDimension(sw, &dim)); - PetscCall(VecGetLocalSize(kinVec, &locSize)); - PetscCall(PetscMalloc1(locSize, &idx1)); - PetscCall(PetscMalloc1(locSize, &idx2)); - PetscCall(PetscMalloc1(2 * locSize, &probArr)); - Np = locSize / dim; - PetscCall(VecGetArrayRead(kinVec, &kinArr)); - PetscCall(VecGetArrayRead(coorVec, &coorArr)); - for (p = 0; p < Np; ++p) { - for (d = 0; d < dim; ++d) { - probArr[p * 2 * dim + d] = coorArr[p * dim + d]; - probArr[(p * 2 + 1) * dim + d] = kinArr[p * dim + d]; + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(VecGetOwnershipRange(u, &rstart, NULL)); + PetscCall(ISCreateStride(PETSC_COMM_WORLD, Np, (rstart / dim) + 0, 2, &istmp)); + PetscCall(ISGetIndices(istmp, &idx)); + PetscCall(ISCreateBlock(PETSC_COMM_WORLD, dim, Np, idx, PETSC_COPY_VALUES, &isx)); + PetscCall(ISRestoreIndices(istmp, &idx)); + PetscCall(ISDestroy(&istmp)); + PetscCall(ISCreateStride(PETSC_COMM_WORLD, Np, (rstart / dim) + 1, 2, &istmp)); + PetscCall(ISGetIndices(istmp, &idx)); + PetscCall(ISCreateBlock(PETSC_COMM_WORLD, dim, Np, idx, PETSC_COPY_VALUES, &isv)); + PetscCall(ISRestoreIndices(istmp, &idx)); + PetscCall(ISDestroy(&istmp)); + PetscCall(TSRHSSplitSetIS(ts, "position", isx)); + PetscCall(TSRHSSplitSetIS(ts, "momentum", isv)); + PetscCall(ISDestroy(&isx)); + PetscCall(ISDestroy(&isv)); + PetscCall(TSRHSSplitSetRHSFunction(ts, "position", NULL, RHSFunctionX, user)); + PetscCall(TSRHSSplitSetRHSFunction(ts, "momentum", NULL, RHSFunctionV, user)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode DMSwarmTSRedistribute(TS ts) +{ + DM sw; + Vec u; + PetscReal t, maxt, dt; + PetscInt n, maxn; + + PetscFunctionBegin; + PetscCall(TSGetDM(ts, &sw)); + PetscCall(TSGetTime(ts, &t)); + PetscCall(TSGetMaxTime(ts, &maxt)); + PetscCall(TSGetTimeStep(ts, &dt)); + PetscCall(TSGetStepNumber(ts, &n)); + PetscCall(TSGetMaxSteps(ts, &maxn)); + + PetscCall(TSReset(ts)); + PetscCall(TSSetDM(ts, sw)); + PetscCall(TSSetFromOptions(ts)); + PetscCall(TSSetTime(ts, t)); + PetscCall(TSSetMaxTime(ts, maxt)); + PetscCall(TSSetTimeStep(ts, dt)); + PetscCall(TSSetStepNumber(ts, n)); + PetscCall(TSSetMaxSteps(ts, maxn)); + + PetscCall(CreateSolution(ts)); + PetscCall(SetProblem(ts)); + PetscCall(TSGetSolution(ts, &u)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +PetscErrorCode line(PetscInt dim, PetscReal time, const PetscReal dummy[], PetscInt p, PetscScalar x[], void *ctx) +{ + DM sw, cdm; + PetscInt Np; + PetscReal low[2], high[2]; + AppCtx *user = (AppCtx *)ctx; + + sw = user->swarm; + PetscCall(DMSwarmGetCellDM(sw, &cdm)); + // Get the bounding box so we can equally space the particles + PetscCall(DMGetLocalBoundingBox(cdm, low, high)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + // shift it by h/2 so nothing is initialized directly on a boundary + x[0] = ((high[0] - low[0]) / Np) * (p + 0.5); + x[1] = 0.; + return PETSC_SUCCESS; +} + +/* + InitializeSolveAndSwarm - Set the solution values to the swarm coordinates and velocities, and also possibly set the initial values. + + Input Parameters: ++ ts - The TS +- useInitial - Flag to also set the initial conditions to the current coodinates and velocities and setup the problem + + Output Parameters: +. u - The initialized solution vector + + Level: advanced + +.seealso: InitializeSolve() +*/ +static PetscErrorCode InitializeSolveAndSwarm(TS ts, PetscBool useInitial) +{ + DM sw; + Vec u, gc, gv, gc0, gv0; + IS isx, isv; + PetscInt dim; + AppCtx *user; + + PetscFunctionBeginUser; + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMGetApplicationContext(sw, &user)); + PetscCall(DMGetDimension(sw, &dim)); + if (useInitial) { + PetscReal v0[2] = {1., 0.}; + if (user->perturbed_weights) { + PetscCall(InitializeParticles_PerturbedWeights(sw, user)); + } else { + PetscCall(DMSwarmComputeLocalSizeFromOptions(sw)); + PetscCall(DMSwarmInitializeCoordinates(sw)); + if (user->fake_1D) { + PetscCall(InitializeVelocities_Fake1D(sw, user)); + } else { + PetscCall(DMSwarmInitializeVelocitiesFromOptions(sw, v0)); } } - PetscCall(VecRestoreArrayRead(kinVec, &kinArr)); - PetscCall(VecRestoreArrayRead(coorVec, &coorArr)); - /* Allocate for IS Strides that will contain x, y and vx, vy */ - for (p = 0; p < Np; ++p) { - for (d = 0; d < dim; ++d) { - idx1[p * dim + d] = (p * 2 + 0) * dim + d; - idx2[p * dim + d] = (p * 2 + 1) * dim + d; - } + PetscCall(DMSwarmMigrate(sw, PETSC_TRUE)); + PetscCall(DMSwarmTSRedistribute(ts)); + } + PetscCall(TSGetSolution(ts, &u)); + PetscCall(TSRHSSplitGetIS(ts, "position", &isx)); + PetscCall(TSRHSSplitGetIS(ts, "momentum", &isv)); + PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); + PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "initCoordinates", &gc0)); + PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "velocity", &gv)); + PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "initVelocity", &gv0)); + if (useInitial) { + PetscCall(VecCopy(gc, gc0)); + PetscCall(VecCopy(gv, gv0)); + } + PetscCall(VecISCopy(u, isx, SCATTER_FORWARD, gc)); + PetscCall(VecISCopy(u, isv, SCATTER_FORWARD, gv)); + PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); + PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "initCoordinates", &gc0)); + PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "velocity", &gv)); + PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "initVelocity", &gv0)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode InitializeSolve(TS ts, Vec u) +{ + PetscFunctionBegin; + PetscCall(TSSetSolution(ts, u)); + PetscCall(InitializeSolveAndSwarm(ts, PETSC_TRUE)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode ComputeError(TS ts, Vec U, Vec E) +{ + MPI_Comm comm; + DM sw; + AppCtx *user; + const PetscScalar *u; + const PetscReal *coords, *vel; + PetscScalar *e; + PetscReal t; + PetscInt dim, Np, p; + + PetscFunctionBeginUser; + PetscCall(PetscObjectGetComm((PetscObject)ts, &comm)); + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMGetApplicationContext(sw, &user)); + PetscCall(DMGetDimension(sw, &dim)); + PetscCall(TSGetSolveTime(ts, &t)); + PetscCall(VecGetArray(E, &e)); + PetscCall(VecGetArrayRead(U, &u)); + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMSwarmGetField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmGetField(sw, "initVelocity", NULL, NULL, (void **)&vel)); + Np /= 2 * dim; + for (p = 0; p < Np; ++p) { + /* TODO generalize initial conditions and project into plane instead of assuming x-y */ + const PetscReal r0 = DMPlex_NormD_Internal(dim, &coords[p * dim]); + const PetscReal th0 = PetscAtan2Real(coords[p * dim + 1], coords[p * dim + 0]); + const PetscReal v0 = DMPlex_NormD_Internal(dim, &vel[p * dim]); + const PetscReal omega = v0 / r0; + const PetscReal ct = PetscCosReal(omega * t + th0); + const PetscReal st = PetscSinReal(omega * t + th0); + const PetscScalar *x = &u[(p * 2 + 0) * dim]; + const PetscScalar *v = &u[(p * 2 + 1) * dim]; + const PetscReal xe[3] = {r0 * ct, r0 * st, 0.0}; + const PetscReal ve[3] = {-v0 * st, v0 * ct, 0.0}; + PetscInt d; + + for (d = 0; d < dim; ++d) { + e[(p * 2 + 0) * dim + d] = x[d] - xe[d]; + e[(p * 2 + 1) * dim + d] = v[d] - ve[d]; } + if (user->error) { + const PetscReal en = 0.5 * DMPlex_DotRealD_Internal(dim, v, v); + const PetscReal exen = 0.5 * PetscSqr(v0); + PetscCall(PetscPrintf(comm, "t %.4g: p%" PetscInt_FMT " error [%.2g %.2g] sol [(%.6lf %.6lf) (%.6lf %.6lf)] exact [(%.6lf %.6lf) (%.6lf %.6lf)] energy/exact energy %g / %g (%.10lf%%)\n", (double)t, p, (double)DMPlex_NormD_Internal(dim, &e[(p * 2 + 0) * dim]), (double)DMPlex_NormD_Internal(dim, &e[(p * 2 + 1) * dim]), (double)x[0], (double)x[1], (double)v[0], (double)v[1], (double)xe[0], (double)xe[1], (double)ve[0], (double)ve[1], (double)en, (double)exen, (double)(PetscAbsReal(exen - en) * 100. / exen))); + } + } + PetscCall(DMSwarmRestoreField(sw, "initCoordinates", NULL, NULL, (void **)&coords)); + PetscCall(DMSwarmRestoreField(sw, "initVelocity", NULL, NULL, (void **)&vel)); + PetscCall(VecRestoreArrayRead(U, &u)); + PetscCall(VecRestoreArray(E, &e)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +static PetscErrorCode MigrateParticles(TS ts) +{ + DM sw, cdm; + const PetscReal *L; - PetscCall(ISCreateGeneral(comm, locSize, idx1, PETSC_OWN_POINTER, &is1)); - PetscCall(ISCreateGeneral(comm, locSize, idx2, PETSC_OWN_POINTER, &is2)); - /* DM needs to be set before splits so it propagates to sub TSs */ - PetscCall(TSSetDM(ts, sw)); - PetscCall(TSSetType(ts, TSBASICSYMPLECTIC)); - PetscCall(TSRHSSplitSetIS(ts, "position", is1)); - PetscCall(TSRHSSplitSetIS(ts, "momentum", is2)); - PetscCall(TSRHSSplitSetRHSFunction(ts, "position", NULL, RHSFunction1, &user)); - PetscCall(TSRHSSplitSetRHSFunction(ts, "momentum", NULL, RHSFunction2, &user)); - PetscCall(TSSetTime(ts, step * user.stepSize)); - if (step == 0) PetscCall(TSSetFromOptions(ts)); - /* Compose vector from array for TS solve with all kinematic variables */ - PetscCall(VecCreate(comm, &probVec)); - PetscCall(VecSetBlockSize(probVec, 1)); - PetscCall(VecSetSizes(probVec, PETSC_DECIDE, 2 * locSize)); - PetscCall(VecSetUp(probVec)); - PetscCall(VecGetArray(probVec, &probVecArr)); - for (i = 0; i < 2 * locSize; ++i) probVecArr[i] = probArr[i]; - PetscCall(VecRestoreArray(probVec, &probVecArr)); - PetscCall(TSSetSolution(ts, probVec)); - PetscCall(PetscFree(probArr)); - PetscCall(VecViewFromOptions(kinVec, NULL, "-ic_view")); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "kinematics", &kinVec)); - PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &coorVec)); - PetscCall(TSMonitor(ts, step, ts->ptime, ts->vec_sol)); - if (!ts->steprollback) PetscCall(TSPreStep(ts)); - PetscCall(TSStep(ts)); - if (ts->steprollback) PetscCall(TSPostEvaluate(ts)); - if (!ts->steprollback) { - PetscCall(TSPostStep(ts)); - PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coor)); - PetscCall(DMSwarmGetField(sw, "kinematics", NULL, NULL, (void **)&kin)); - PetscCall(TSGetSolution(ts, &solution)); - PetscCall(VecGetSubVector(solution, is1, &position)); - PetscCall(VecGetSubVector(solution, is2, &momentum)); - PetscCall(VecGetArray(position, &pos)); - PetscCall(VecGetArray(momentum, &mom)); - for (par = 0; par < Np; ++par) { - for (d = 0; d < dim; ++d) { - if (pos[par * dim + d] < 0.) { - coor[par * dim + d] = pos[par * dim + d] + 2. * PETSC_PI; - } else if (pos[par * dim + d] > 2. * PETSC_PI) { - coor[par * dim + d] = pos[par * dim + d] - 2. * PETSC_PI; + PetscFunctionBeginUser; + PetscCall(TSGetDM(ts, &sw)); + PetscCall(DMViewFromOptions(sw, NULL, "-migrate_view_pre")); + { + Vec u, gc, gv, position, momentum; + IS isx, isv; + PetscReal *pos, *mom; + + PetscCall(TSGetSolution(ts, &u)); + PetscCall(TSRHSSplitGetIS(ts, "position", &isx)); + PetscCall(TSRHSSplitGetIS(ts, "momentum", &isv)); + PetscCall(VecGetSubVector(u, isx, &position)); + PetscCall(VecGetSubVector(u, isv, &momentum)); + PetscCall(VecGetArray(position, &pos)); + PetscCall(VecGetArray(momentum, &mom)); + PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); + PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "velocity", &gv)); + PetscCall(VecISCopy(u, isx, SCATTER_REVERSE, gc)); + PetscCall(VecISCopy(u, isv, SCATTER_REVERSE, gv)); + + PetscCall(DMSwarmGetCellDM(sw, &cdm)); + PetscCall(DMGetPeriodicity(cdm, NULL, NULL, &L)); + if ((L[0] || L[1]) >= 0.) { + PetscReal *x, *v, upper[3], lower[3]; + PetscInt Np, dim; + + PetscCall(DMSwarmGetLocalSize(sw, &Np)); + PetscCall(DMGetDimension(cdm, &dim)); + PetscCall(DMGetBoundingBox(cdm, lower, upper)); + PetscCall(VecGetArray(gc, &x)); + PetscCall(VecGetArray(gv, &v)); + for (PetscInt p = 0; p < Np; ++p) { + for (PetscInt d = 0; d < dim; ++d) { + if (pos[p * dim + d] < lower[d]) { + x[p * dim + d] = pos[p * dim + d] + (upper[d] - lower[d]); + } else if (pos[p * dim + d] > upper[d]) { + x[p * dim + d] = pos[p * dim + d] - (upper[d] - lower[d]); } else { - coor[par * dim + d] = pos[par * dim + d]; + x[p * dim + d] = pos[p * dim + d]; } - kin[par * dim + d] = mom[par * dim + d]; + PetscCheck(x[p * dim + d] >= lower[d] && x[p * dim + d] <= upper[d], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "p: %" PetscInt_FMT "x[%" PetscInt_FMT "] %g", p, d, (double)x[p * dim + d]); + v[p * dim + d] = mom[p * dim + d]; } } - PetscCall(VecRestoreArray(position, &pos)); - PetscCall(VecRestoreArray(momentum, &mom)); - PetscCall(VecRestoreSubVector(solution, is1, &position)); - PetscCall(VecRestoreSubVector(solution, is2, &momentum)); - PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coor)); - PetscCall(DMSwarmRestoreField(sw, "kinematics", NULL, NULL, (void **)&kin)); + PetscCall(VecRestoreArray(gc, &x)); + PetscCall(VecRestoreArray(gv, &v)); } - PetscCall(DMSwarmMigrate(sw, PETSC_TRUE)); - PetscCall(DMLocalizeCoordinates(sw)); - PetscCall(TSReset(ts)); - PetscCall(VecDestroy(&probVec)); - PetscCall(ISDestroy(&is1)); - PetscCall(ISDestroy(&is2)); + PetscCall(VecRestoreArray(position, &pos)); + PetscCall(VecRestoreArray(momentum, &mom)); + PetscCall(VecRestoreSubVector(u, isx, &position)); + PetscCall(VecRestoreSubVector(u, isv, &momentum)); + PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "velocity", &gv)); + PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &gc)); } + PetscCall(DMSwarmMigrate(sw, PETSC_TRUE)); + PetscCall(DMSwarmTSRedistribute(ts)); + PetscCall(InitializeSolveAndSwarm(ts, PETSC_FALSE)); + PetscFunctionReturn(PETSC_SUCCESS); +} + +int main(int argc, char **argv) +{ + DM dm, sw; + TS ts; + Vec u; + PetscReal dt; + PetscInt maxn; + AppCtx user; + + PetscCall(PetscInitialize(&argc, &argv, NULL, help)); + PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); + PetscCall(PetscBagCreate(PETSC_COMM_SELF, sizeof(Parameter), &user.bag)); + PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm)); + PetscCall(CreatePoisson(dm, &user)); + PetscCall(CreateSwarm(dm, &user, &sw)); + PetscCall(SetupParameters(PETSC_COMM_WORLD, &user)); + PetscCall(InitializeConstants(sw, &user)); + PetscCall(DMSetApplicationContext(sw, &user)); + + PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); + PetscCall(TSSetProblemType(ts, TS_NONLINEAR)); + PetscCall(TSSetDM(ts, sw)); + PetscCall(TSSetMaxTime(ts, 0.1)); + PetscCall(TSSetTimeStep(ts, 0.00001)); + PetscCall(TSSetMaxSteps(ts, 100)); + PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP)); + + if (user.efield_monitor) PetscCall(TSMonitorSet(ts, MonitorEField, &user, NULL)); + if (user.initial_monitor) PetscCall(TSMonitorSet(ts, MonitorInitialConditions, &user, NULL)); + if (user.monitor_positions) PetscCall(TSMonitorSet(ts, MonitorPositions_2D, &user, NULL)); + if (user.poisson_monitor) PetscCall(TSMonitorSet(ts, MonitorPoisson, &user, NULL)); + + PetscCall(TSSetFromOptions(ts)); + PetscCall(TSGetTimeStep(ts, &dt)); + PetscCall(TSGetMaxSteps(ts, &maxn)); + user.steps = maxn; + user.stepSize = dt; + PetscCall(SetupContext(dm, sw, &user)); + PetscCall(DMSwarmVectorDefineField(sw, "velocity")); + PetscCall(TSSetComputeInitialCondition(ts, InitializeSolve)); + PetscCall(TSSetComputeExactError(ts, ComputeError)); + PetscCall(TSSetPostStep(ts, MigrateParticles)); + PetscCall(CreateSolution(ts)); + PetscCall(TSGetSolution(ts, &u)); + PetscCall(TSComputeInitialCondition(ts, u)); + PetscCall(CheckNonNegativeWeights(sw, &user)); + PetscCall(TSSolve(ts, NULL)); + PetscCall(SNESDestroy(&user.snes)); PetscCall(TSDestroy(&ts)); PetscCall(DMDestroy(&sw)); PetscCall(DMDestroy(&dm)); + PetscCall(DestroyContext(&user)); PetscCall(PetscFinalize()); return 0; } @@ -476,25 +2089,142 @@ int main(int argc, char **argv) /*TEST build: - requires: triangle !single !complex - test: - suffix: bsiq3 - args: -particlesPerCell 200\ - -petscspace_degree 2\ - -petscfe_default_quadrature_order 3\ - -ts_basicsymplectic_type {{1 2 3}}\ - -pc_type svd\ - -uniform\ - -sigma 1.0e-8\ - -timeScale 2.0e-14\ - -stepSize 1.0e-2\ - -ts_monitor_sp_swarm\ - -steps 10\ - -dm_view\ - -dm_plex_simplex 0 -dm_plex_dim 2\ - -dm_plex_box_lower 0,-1\ - -dm_plex_box_upper 6.283185307179586,1\ - -dm_plex_box_bd periodic,none\ - -dm_plex_box_faces 4,1 - output_file: output/ex2_bsiq3.out + requires: !complex double + + # This tests that we can put particles in a box and compute the Coulomb force + # Recommend -draw_size 500,500 + testset: + requires: defined(PETSC_HAVE_EXECUTABLE_EXPORT) + args: -dm_plex_dim 2 -fake_1D -dm_plex_simplex 0 -dm_plex_box_faces 20,1 \ + -dm_plex_box_lower 0,-1 -dm_plex_box_upper 12.5664,1 \ + -dm_plex_box_bd periodic,none \ + -dm_swarm_coordinate_density constant -dm_swarm_num_particles 100 \ + -sigma 1.0e-8 -timeScale 2.0e-14 \ + -ts_type basicsymplectic -ts_basicsymplectic_type 1 \ + -ts_monitor_sp_swarm -ts_monitor_sp_swarm_retain 0 -ts_monitor_sp_swarm_phase 0 \ + -output_step 50 -check_vel_res + test: + suffix: none_1d + args: -em_type none -error + test: + suffix: coulomb_1d + args: -em_type coulomb + + # for viewers + #-ts_monitor_sp_swarm_phase -ts_monitor_sp_swarm -em_snes_monitor -ts_monitor_sp_swarm_multi_species 0 -ts_monitor_sp_swarm_retain 0 + testset: + nsize: {{1 2}} + requires: defined(PETSC_HAVE_EXECUTABLE_EXPORT) + args: -dm_plex_dim 2 -fake_1D -dm_plex_simplex 0 -dm_plex_box_faces 36,1 \ + -dm_plex_box_lower 0.,-0.5 -dm_plex_box_upper 12.5664,0.5 \ + -dm_plex_box_bd periodic,none \ + -vdm_plex_dim 1 -vdm_plex_simplex 0 -vdm_plex_box_faces 10 \ + -vdm_plex_box_lower -3 -vdm_plex_box_upper 3 \ + -dm_swarm_num_species 1 -dm_swarm_num_particles 360 \ + -twostream -charges -1.,1. -sigma 1.0e-8 \ + -cosine_coefficients 0.01,0.5 -perturbed_weights -total_weight 1. \ + -ts_type basicsymplectic -ts_basicsymplectic_type 2 \ + -ts_dt 0.01 -ts_max_time 5 -ts_max_steps 10 \ + -em_snes_atol 1.e-15 -em_snes_error_if_not_converged -em_ksp_error_if_not_converged \ + -output_step 1 -check_vel_res + test: + suffix: two_stream_c0 + args: -em_type primal -petscfe_default_quadrature_order 2 -petscspace_degree 2 -em_pc_type svd + test: + suffix: two_stream_rt + requires: superlu_dist + args: -em_type mixed \ + -potential_petscspace_degree 0 \ + -potential_petscdualspace_lagrange_use_moments \ + -potential_petscdualspace_lagrange_moment_order 2 \ + -field_petscspace_degree 2 -field_petscfe_default_quadrature_order 1 \ + -field_petscspace_type sum \ + -field_petscspace_variables 2 \ + -field_petscspace_components 2 \ + -field_petscspace_sum_spaces 2 \ + -field_petscspace_sum_concatenate true \ + -field_sumcomp_0_petscspace_variables 2 \ + -field_sumcomp_0_petscspace_type tensor \ + -field_sumcomp_0_petscspace_tensor_spaces 2 \ + -field_sumcomp_0_petscspace_tensor_uniform false \ + -field_sumcomp_0_tensorcomp_0_petscspace_degree 1 \ + -field_sumcomp_0_tensorcomp_1_petscspace_degree 0 \ + -field_sumcomp_1_petscspace_variables 2 \ + -field_sumcomp_1_petscspace_type tensor \ + -field_sumcomp_1_petscspace_tensor_spaces 2 \ + -field_sumcomp_1_petscspace_tensor_uniform false \ + -field_sumcomp_1_tensorcomp_0_petscspace_degree 0 \ + -field_sumcomp_1_tensorcomp_1_petscspace_degree 1 \ + -field_petscdualspace_form_degree -1 \ + -field_petscdualspace_order 1 \ + -field_petscdualspace_lagrange_trimmed true \ + -em_snes_error_if_not_converged \ + -em_ksp_type preonly -em_ksp_error_if_not_converged \ + -em_pc_type fieldsplit -em_pc_fieldsplit_type schur \ + -em_pc_fieldsplit_schur_fact_type full -em_pc_fieldsplit_schur_precondition full \ + -em_fieldsplit_field_pc_type lu \ + -em_fieldsplit_field_pc_factor_mat_solver_type superlu_dist \ + -em_fieldsplit_potential_pc_type svd + + # For verification, we use + # -dm_plex_box_faces 100,1 -vdm_plex_box_faces 8000 -dm_swarm_num_particles 800000 + # -ts_monitor_sp_swarm_multi_species 0 -ts_monitor_sp_swarm_retain 0 -ts_monitor_sp_swarm_phase 1 -draw_size 500,500 + testset: + nsize: {{1 2}} + requires: defined(PETSC_HAVE_EXECUTABLE_EXPORT) + args: -dm_plex_dim 2 -fake_1D -dm_plex_simplex 0 -dm_plex_box_faces 10,1 \ + -dm_plex_box_lower 0.,-0.5 -dm_plex_box_upper 12.5664,0.5 \ + -dm_plex_box_bd periodic,none \ + -vdm_plex_dim 1 -vdm_plex_simplex 0 -vdm_plex_box_faces 10 \ + -vdm_plex_box_lower -10 -vdm_plex_box_upper 10 \ + -dm_swarm_num_species 1 -dm_swarm_num_particles 100 \ + -charges -1.,1. \ + -cosine_coefficients 0.01,0.5 -perturbed_weights -total_weight 1. \ + -ts_type basicsymplectic -ts_basicsymplectic_type 1 \ + -ts_dt 0.03 -ts_max_time 500 -ts_max_steps 1 \ + -em_snes_atol 1.e-12 -em_snes_error_if_not_converged -em_ksp_error_if_not_converged \ + -output_step 1 -check_vel_res + + test: + suffix: uniform_equilibrium_1d + args: -cosine_coefficients 0.0,0.5 -em_type primal -petscspace_degree 1 -em_pc_type svd + test: + suffix: uniform_primal_1d + args: -em_type primal -petscspace_degree 1 -em_pc_type svd + test: + requires: superlu_dist + suffix: uniform_mixed_1d + args: -em_type mixed \ + -potential_petscspace_degree 0 \ + -potential_petscdualspace_lagrange_use_moments \ + -potential_petscdualspace_lagrange_moment_order 2 \ + -field_petscspace_degree 2 -field_petscfe_default_quadrature_order 1 \ + -field_petscspace_type sum \ + -field_petscspace_variables 2 \ + -field_petscspace_components 2 \ + -field_petscspace_sum_spaces 2 \ + -field_petscspace_sum_concatenate true \ + -field_sumcomp_0_petscspace_variables 2 \ + -field_sumcomp_0_petscspace_type tensor \ + -field_sumcomp_0_petscspace_tensor_spaces 2 \ + -field_sumcomp_0_petscspace_tensor_uniform false \ + -field_sumcomp_0_tensorcomp_0_petscspace_degree 1 \ + -field_sumcomp_0_tensorcomp_1_petscspace_degree 0 \ + -field_sumcomp_1_petscspace_variables 2 \ + -field_sumcomp_1_petscspace_type tensor \ + -field_sumcomp_1_petscspace_tensor_spaces 2 \ + -field_sumcomp_1_petscspace_tensor_uniform false \ + -field_sumcomp_1_tensorcomp_0_petscspace_degree 0 \ + -field_sumcomp_1_tensorcomp_1_petscspace_degree 1 \ + -field_petscdualspace_form_degree -1 \ + -field_petscdualspace_order 1 \ + -field_petscdualspace_lagrange_trimmed true \ + -em_snes_error_if_not_converged \ + -em_ksp_type preonly -em_ksp_error_if_not_converged \ + -em_pc_type fieldsplit -em_pc_fieldsplit_type schur \ + -em_pc_fieldsplit_schur_fact_type full -em_pc_fieldsplit_schur_precondition full \ + -em_fieldsplit_field_pc_type lu \ + -em_fieldsplit_field_pc_factor_mat_solver_type superlu_dist \ + -em_fieldsplit_potential_pc_type svd + TEST*/ diff --git a/src/ts/tutorials/hamiltonian/output/ex2_bsiq3.out b/src/ts/tutorials/hamiltonian/output/ex2_bsiq3.out deleted file mode 100644 index 0df96d635bb..00000000000 --- a/src/ts/tutorials/hamiltonian/output/ex2_bsiq3.out +++ /dev/null @@ -1,12 +0,0 @@ -DM Object: box 1 MPI process - type: plex -box in 2 dimensions: - Number of 0-cells per rank: 8 - Number of 1-cells per rank: 12 - Number of 2-cells per rank: 4 -Periodic mesh (PERIODIC, NONE) coordinates localized -Labels: - marker: 1 strata with value/size (1 (16)) - Face Sets: 2 strata with value/size (1 (4), 3 (4)) - depth: 3 strata with value/size (0 (8), 1 (12), 2 (4)) - celltype: 3 strata with value/size (4 (4), 0 (8), 1 (12)) diff --git a/src/ts/tutorials/hamiltonian/output/ex2_coulomb_1d.out b/src/ts/tutorials/hamiltonian/output/ex2_coulomb_1d.out new file mode 100644 index 00000000000..eab6dc23eef --- /dev/null +++ b/src/ts/tutorials/hamiltonian/output/ex2_coulomb_1d.out @@ -0,0 +1,3 @@ +dim = 2 totalWeight = 1.000000, user->charges[species[0]] = -1.000000 totalCharge = -1.000000, Total Area = 12.566400 +sigma: 0.0795773 +(x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) diff --git a/src/ts/tutorials/hamiltonian/output/ex2_none_1d.out b/src/ts/tutorials/hamiltonian/output/ex2_none_1d.out new file mode 100644 index 00000000000..eab6dc23eef --- /dev/null +++ b/src/ts/tutorials/hamiltonian/output/ex2_none_1d.out @@ -0,0 +1,3 @@ +dim = 2 totalWeight = 1.000000, user->charges[species[0]] = -1.000000 totalCharge = -1.000000, Total Area = 12.566400 +sigma: 0.0795773 +(x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) diff --git a/src/ts/tutorials/hamiltonian/output/ex2_two_stream_c0.out b/src/ts/tutorials/hamiltonian/output/ex2_two_stream_c0.out new file mode 100644 index 00000000000..96193284c83 --- /dev/null +++ b/src/ts/tutorials/hamiltonian/output/ex2_two_stream_c0.out @@ -0,0 +1,79 @@ +Global Np = 360 +c:0 [x_a,x_b] = 0.000000000000000,0.349066666666667 -> cell weight = 0.352539638258674 +c:1 [x_a,x_b] = 0.349066666666667,0.698133333333333 -> cell weight = 0.352434113281987 +c:2 [x_a,x_b] = 0.698133333333333,1.047200000000000 -> cell weight = 0.352226269666591 +c:3 [x_a,x_b] = 1.047200000000000,1.396266666666667 -> cell weight = 0.351922422665025 +c:4 [x_a,x_b] = 1.396266666666667,1.745333333333333 -> cell weight = 0.351531804557746 +c:5 [x_a,x_b] = 1.745333333333333,2.094400000000000 -> cell weight = 0.351066284133648 +c:6 [x_a,x_b] = 2.094400000000000,2.443466666666667 -> cell weight = 0.350540006061240 +c:7 [x_a,x_b] = 2.443466666666667,2.792533333333333 -> cell weight = 0.349968961108035 +c:8 [x_a,x_b] = 2.792533333333333,3.141600000000000 -> cell weight = 0.349370500266919 +c:9 [x_a,x_b] = 3.141600000000000,3.490666666666666 -> cell weight = 0.348762807552540 +c:10 [x_a,x_b] = 3.490666666666666,3.839733333333333 -> cell weight = 0.348164347486654 +c:11 [x_a,x_b] = 3.839733333333333,4.188800000000000 -> cell weight = 0.347593304060354 +c:12 [x_a,x_b] = 4.188800000000000,4.537866666666666 -> cell weight = 0.347067028220130 +c:13 [x_a,x_b] = 4.537866666666666,4.886933333333333 -> cell weight = 0.346601510665674 +c:14 [x_a,x_b] = 4.886933333333333,5.236000000000000 -> cell weight = 0.346210895978298 +c:15 [x_a,x_b] = 5.236000000000000,5.585066666666666 -> cell weight = 0.345907052842987 +c:16 [x_a,x_b] = 5.585066666666666,5.934133333333333 -> cell weight = 0.345699213422721 +c:17 [x_a,x_b] = 5.934133333333333,6.283199999999999 -> cell weight = 0.345593692842573 +c:18 [x_a,x_b] = 6.283199999999999,6.632266666666666 -> cell weight = 0.345593697306934 +c:19 [x_a,x_b] = 6.632266666666666,6.981333333333333 -> cell weight = 0.345699226680153 +c:20 [x_a,x_b] = 6.981333333333333,7.330399999999999 -> cell weight = 0.345907074490668 +c:21 [x_a,x_b] = 7.330399999999999,7.679466666666666 -> cell weight = 0.346210925358473 +c:22 [x_a,x_b] = 7.679466666666666,8.028533333333332 -> cell weight = 0.346601546885634 +c:23 [x_a,x_b] = 8.028533333333332,8.377599999999999 -> cell weight = 0.347067070179348 +c:24 [x_a,x_b] = 8.377599999999999,8.726666666666667 -> cell weight = 0.347593350483914 +c:25 [x_a,x_b] = 8.726666666666667,9.075733333333332 -> cell weight = 0.348164396963990 +c:26 [x_a,x_b] = 9.075733333333332,9.424799999999999 -> cell weight = 0.348762858580305 +c:27 [x_a,x_b] = 9.424799999999999,9.773866666666667 -> cell weight = 0.349370551294651 +c:28 [x_a,x_b] = 9.773866666666667,10.122933333333332 -> cell weight = 0.349969010585274 +c:29 [x_a,x_b] = 10.122933333333332,10.472000000000000 -> cell weight = 0.350540052484640 +c:30 [x_a,x_b] = 10.472000000000000,10.821066666666665 -> cell weight = 0.351066326092649 +c:31 [x_a,x_b] = 10.821066666666665,11.170133333333332 -> cell weight = 0.351531840777441 +c:32 [x_a,x_b] = 11.170133333333332,11.519200000000000 -> cell weight = 0.351922452044892 +c:33 [x_a,x_b] = 11.519200000000000,11.868266666666665 -> cell weight = 0.352226291313931 +c:34 [x_a,x_b] = 11.868266666666665,12.217333333333332 -> cell weight = 0.352434126539057 +c:35 [x_a,x_b] = 12.217333333333332,12.566400000000000 -> cell weight = 0.352539642722659 +particle weight sum = 12.5664002939 cell weight sum = 12.1983192887 +dim = 2 totalWeight = 12.198319, user->charges[species[0]] = -1.000000 totalCharge = -12.198319, Total Area = 12.566400 +sigma: 0.970709 +(x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) +Global Np = 360 +c:0 [x_a,x_b] = 0.000000000000000,0.349066666666667 -> cell weight = 0.352539638258674 +c:1 [x_a,x_b] = 0.349066666666667,0.698133333333333 -> cell weight = 0.352434113281987 +c:2 [x_a,x_b] = 0.698133333333333,1.047200000000000 -> cell weight = 0.352226269666591 +c:3 [x_a,x_b] = 1.047200000000000,1.396266666666667 -> cell weight = 0.351922422665025 +c:4 [x_a,x_b] = 1.396266666666667,1.745333333333333 -> cell weight = 0.351531804557746 +c:5 [x_a,x_b] = 1.745333333333333,2.094400000000000 -> cell weight = 0.351066284133648 +c:6 [x_a,x_b] = 2.094400000000000,2.443466666666667 -> cell weight = 0.350540006061240 +c:7 [x_a,x_b] = 2.443466666666667,2.792533333333333 -> cell weight = 0.349968961108035 +c:8 [x_a,x_b] = 2.792533333333333,3.141600000000000 -> cell weight = 0.349370500266919 +c:9 [x_a,x_b] = 3.141600000000000,3.490666666666666 -> cell weight = 0.348762807552540 +c:10 [x_a,x_b] = 3.490666666666666,3.839733333333333 -> cell weight = 0.348164347486654 +c:11 [x_a,x_b] = 3.839733333333333,4.188800000000000 -> cell weight = 0.347593304060354 +c:12 [x_a,x_b] = 4.188800000000000,4.537866666666666 -> cell weight = 0.347067028220130 +c:13 [x_a,x_b] = 4.537866666666666,4.886933333333333 -> cell weight = 0.346601510665674 +c:14 [x_a,x_b] = 4.886933333333333,5.236000000000000 -> cell weight = 0.346210895978298 +c:15 [x_a,x_b] = 5.236000000000000,5.585066666666666 -> cell weight = 0.345907052842987 +c:16 [x_a,x_b] = 5.585066666666666,5.934133333333333 -> cell weight = 0.345699213422721 +c:17 [x_a,x_b] = 5.934133333333333,6.283199999999999 -> cell weight = 0.345593692842573 +c:18 [x_a,x_b] = 6.283199999999999,6.632266666666666 -> cell weight = 0.345593697306934 +c:19 [x_a,x_b] = 6.632266666666666,6.981333333333333 -> cell weight = 0.345699226680153 +c:20 [x_a,x_b] = 6.981333333333333,7.330399999999999 -> cell weight = 0.345907074490668 +c:21 [x_a,x_b] = 7.330399999999999,7.679466666666666 -> cell weight = 0.346210925358473 +c:22 [x_a,x_b] = 7.679466666666666,8.028533333333332 -> cell weight = 0.346601546885634 +c:23 [x_a,x_b] = 8.028533333333332,8.377599999999999 -> cell weight = 0.347067070179348 +c:24 [x_a,x_b] = 8.377599999999999,8.726666666666667 -> cell weight = 0.347593350483914 +c:25 [x_a,x_b] = 8.726666666666667,9.075733333333332 -> cell weight = 0.348164396963990 +c:26 [x_a,x_b] = 9.075733333333332,9.424799999999999 -> cell weight = 0.348762858580305 +c:27 [x_a,x_b] = 9.424799999999999,9.773866666666667 -> cell weight = 0.349370551294651 +c:28 [x_a,x_b] = 9.773866666666667,10.122933333333332 -> cell weight = 0.349969010585274 +c:29 [x_a,x_b] = 10.122933333333332,10.472000000000000 -> cell weight = 0.350540052484640 +c:30 [x_a,x_b] = 10.472000000000000,10.821066666666665 -> cell weight = 0.351066326092649 +c:31 [x_a,x_b] = 10.821066666666665,11.170133333333332 -> cell weight = 0.351531840777441 +c:32 [x_a,x_b] = 11.170133333333332,11.519200000000000 -> cell weight = 0.351922452044892 +c:33 [x_a,x_b] = 11.519200000000000,11.868266666666665 -> cell weight = 0.352226291313931 +c:34 [x_a,x_b] = 11.868266666666665,12.217333333333332 -> cell weight = 0.352434126539057 +c:35 [x_a,x_b] = 12.217333333333332,12.566400000000000 -> cell weight = 0.352539642722659 +particle weight sum = 12.5664002939 cell weight sum = 12.1983192887 diff --git a/src/ts/tutorials/hamiltonian/output/ex2_two_stream_rt.out b/src/ts/tutorials/hamiltonian/output/ex2_two_stream_rt.out new file mode 100644 index 00000000000..96193284c83 --- /dev/null +++ b/src/ts/tutorials/hamiltonian/output/ex2_two_stream_rt.out @@ -0,0 +1,79 @@ +Global Np = 360 +c:0 [x_a,x_b] = 0.000000000000000,0.349066666666667 -> cell weight = 0.352539638258674 +c:1 [x_a,x_b] = 0.349066666666667,0.698133333333333 -> cell weight = 0.352434113281987 +c:2 [x_a,x_b] = 0.698133333333333,1.047200000000000 -> cell weight = 0.352226269666591 +c:3 [x_a,x_b] = 1.047200000000000,1.396266666666667 -> cell weight = 0.351922422665025 +c:4 [x_a,x_b] = 1.396266666666667,1.745333333333333 -> cell weight = 0.351531804557746 +c:5 [x_a,x_b] = 1.745333333333333,2.094400000000000 -> cell weight = 0.351066284133648 +c:6 [x_a,x_b] = 2.094400000000000,2.443466666666667 -> cell weight = 0.350540006061240 +c:7 [x_a,x_b] = 2.443466666666667,2.792533333333333 -> cell weight = 0.349968961108035 +c:8 [x_a,x_b] = 2.792533333333333,3.141600000000000 -> cell weight = 0.349370500266919 +c:9 [x_a,x_b] = 3.141600000000000,3.490666666666666 -> cell weight = 0.348762807552540 +c:10 [x_a,x_b] = 3.490666666666666,3.839733333333333 -> cell weight = 0.348164347486654 +c:11 [x_a,x_b] = 3.839733333333333,4.188800000000000 -> cell weight = 0.347593304060354 +c:12 [x_a,x_b] = 4.188800000000000,4.537866666666666 -> cell weight = 0.347067028220130 +c:13 [x_a,x_b] = 4.537866666666666,4.886933333333333 -> cell weight = 0.346601510665674 +c:14 [x_a,x_b] = 4.886933333333333,5.236000000000000 -> cell weight = 0.346210895978298 +c:15 [x_a,x_b] = 5.236000000000000,5.585066666666666 -> cell weight = 0.345907052842987 +c:16 [x_a,x_b] = 5.585066666666666,5.934133333333333 -> cell weight = 0.345699213422721 +c:17 [x_a,x_b] = 5.934133333333333,6.283199999999999 -> cell weight = 0.345593692842573 +c:18 [x_a,x_b] = 6.283199999999999,6.632266666666666 -> cell weight = 0.345593697306934 +c:19 [x_a,x_b] = 6.632266666666666,6.981333333333333 -> cell weight = 0.345699226680153 +c:20 [x_a,x_b] = 6.981333333333333,7.330399999999999 -> cell weight = 0.345907074490668 +c:21 [x_a,x_b] = 7.330399999999999,7.679466666666666 -> cell weight = 0.346210925358473 +c:22 [x_a,x_b] = 7.679466666666666,8.028533333333332 -> cell weight = 0.346601546885634 +c:23 [x_a,x_b] = 8.028533333333332,8.377599999999999 -> cell weight = 0.347067070179348 +c:24 [x_a,x_b] = 8.377599999999999,8.726666666666667 -> cell weight = 0.347593350483914 +c:25 [x_a,x_b] = 8.726666666666667,9.075733333333332 -> cell weight = 0.348164396963990 +c:26 [x_a,x_b] = 9.075733333333332,9.424799999999999 -> cell weight = 0.348762858580305 +c:27 [x_a,x_b] = 9.424799999999999,9.773866666666667 -> cell weight = 0.349370551294651 +c:28 [x_a,x_b] = 9.773866666666667,10.122933333333332 -> cell weight = 0.349969010585274 +c:29 [x_a,x_b] = 10.122933333333332,10.472000000000000 -> cell weight = 0.350540052484640 +c:30 [x_a,x_b] = 10.472000000000000,10.821066666666665 -> cell weight = 0.351066326092649 +c:31 [x_a,x_b] = 10.821066666666665,11.170133333333332 -> cell weight = 0.351531840777441 +c:32 [x_a,x_b] = 11.170133333333332,11.519200000000000 -> cell weight = 0.351922452044892 +c:33 [x_a,x_b] = 11.519200000000000,11.868266666666665 -> cell weight = 0.352226291313931 +c:34 [x_a,x_b] = 11.868266666666665,12.217333333333332 -> cell weight = 0.352434126539057 +c:35 [x_a,x_b] = 12.217333333333332,12.566400000000000 -> cell weight = 0.352539642722659 +particle weight sum = 12.5664002939 cell weight sum = 12.1983192887 +dim = 2 totalWeight = 12.198319, user->charges[species[0]] = -1.000000 totalCharge = -12.198319, Total Area = 12.566400 +sigma: 0.970709 +(x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) +Global Np = 360 +c:0 [x_a,x_b] = 0.000000000000000,0.349066666666667 -> cell weight = 0.352539638258674 +c:1 [x_a,x_b] = 0.349066666666667,0.698133333333333 -> cell weight = 0.352434113281987 +c:2 [x_a,x_b] = 0.698133333333333,1.047200000000000 -> cell weight = 0.352226269666591 +c:3 [x_a,x_b] = 1.047200000000000,1.396266666666667 -> cell weight = 0.351922422665025 +c:4 [x_a,x_b] = 1.396266666666667,1.745333333333333 -> cell weight = 0.351531804557746 +c:5 [x_a,x_b] = 1.745333333333333,2.094400000000000 -> cell weight = 0.351066284133648 +c:6 [x_a,x_b] = 2.094400000000000,2.443466666666667 -> cell weight = 0.350540006061240 +c:7 [x_a,x_b] = 2.443466666666667,2.792533333333333 -> cell weight = 0.349968961108035 +c:8 [x_a,x_b] = 2.792533333333333,3.141600000000000 -> cell weight = 0.349370500266919 +c:9 [x_a,x_b] = 3.141600000000000,3.490666666666666 -> cell weight = 0.348762807552540 +c:10 [x_a,x_b] = 3.490666666666666,3.839733333333333 -> cell weight = 0.348164347486654 +c:11 [x_a,x_b] = 3.839733333333333,4.188800000000000 -> cell weight = 0.347593304060354 +c:12 [x_a,x_b] = 4.188800000000000,4.537866666666666 -> cell weight = 0.347067028220130 +c:13 [x_a,x_b] = 4.537866666666666,4.886933333333333 -> cell weight = 0.346601510665674 +c:14 [x_a,x_b] = 4.886933333333333,5.236000000000000 -> cell weight = 0.346210895978298 +c:15 [x_a,x_b] = 5.236000000000000,5.585066666666666 -> cell weight = 0.345907052842987 +c:16 [x_a,x_b] = 5.585066666666666,5.934133333333333 -> cell weight = 0.345699213422721 +c:17 [x_a,x_b] = 5.934133333333333,6.283199999999999 -> cell weight = 0.345593692842573 +c:18 [x_a,x_b] = 6.283199999999999,6.632266666666666 -> cell weight = 0.345593697306934 +c:19 [x_a,x_b] = 6.632266666666666,6.981333333333333 -> cell weight = 0.345699226680153 +c:20 [x_a,x_b] = 6.981333333333333,7.330399999999999 -> cell weight = 0.345907074490668 +c:21 [x_a,x_b] = 7.330399999999999,7.679466666666666 -> cell weight = 0.346210925358473 +c:22 [x_a,x_b] = 7.679466666666666,8.028533333333332 -> cell weight = 0.346601546885634 +c:23 [x_a,x_b] = 8.028533333333332,8.377599999999999 -> cell weight = 0.347067070179348 +c:24 [x_a,x_b] = 8.377599999999999,8.726666666666667 -> cell weight = 0.347593350483914 +c:25 [x_a,x_b] = 8.726666666666667,9.075733333333332 -> cell weight = 0.348164396963990 +c:26 [x_a,x_b] = 9.075733333333332,9.424799999999999 -> cell weight = 0.348762858580305 +c:27 [x_a,x_b] = 9.424799999999999,9.773866666666667 -> cell weight = 0.349370551294651 +c:28 [x_a,x_b] = 9.773866666666667,10.122933333333332 -> cell weight = 0.349969010585274 +c:29 [x_a,x_b] = 10.122933333333332,10.472000000000000 -> cell weight = 0.350540052484640 +c:30 [x_a,x_b] = 10.472000000000000,10.821066666666665 -> cell weight = 0.351066326092649 +c:31 [x_a,x_b] = 10.821066666666665,11.170133333333332 -> cell weight = 0.351531840777441 +c:32 [x_a,x_b] = 11.170133333333332,11.519200000000000 -> cell weight = 0.351922452044892 +c:33 [x_a,x_b] = 11.519200000000000,11.868266666666665 -> cell weight = 0.352226291313931 +c:34 [x_a,x_b] = 11.868266666666665,12.217333333333332 -> cell weight = 0.352434126539057 +c:35 [x_a,x_b] = 12.217333333333332,12.566400000000000 -> cell weight = 0.352539642722659 +particle weight sum = 12.5664002939 cell weight sum = 12.1983192887 diff --git a/src/dm/impls/swarm/tests/output/ex9_uniform_equilibrium_1d.out b/src/ts/tutorials/hamiltonian/output/ex2_uniform_equilibrium_1d.out similarity index 80% rename from src/dm/impls/swarm/tests/output/ex9_uniform_equilibrium_1d.out rename to src/ts/tutorials/hamiltonian/output/ex2_uniform_equilibrium_1d.out index c225fdf71b3..f02f9f7f184 100644 --- a/src/dm/impls/swarm/tests/output/ex9_uniform_equilibrium_1d.out +++ b/src/ts/tutorials/hamiltonian/output/ex2_uniform_equilibrium_1d.out @@ -1,16 +1,4 @@ -DM Object: box 1 MPI process - type: plex -box in 2 dimensions: - Number of 0-cells per rank: 20 - Number of 1-cells per rank: 30 - Number of 2-cells per rank: 10 -Periodic mesh (PERIODIC, NONE) coordinates localized -Labels: - marker: 1 strata with value/size (1 (40)) - Face Sets: 2 strata with value/size (1 (10), 3 (10)) - depth: 3 strata with value/size (0 (20), 1 (30), 2 (10)) - celltype: 3 strata with value/size (4 (10), 0 (20), 1 (30)) -Np = 100 +Global Np = 100 c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.256640000000000 c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.256640000000000 c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256640000000000 @@ -22,10 +10,10 @@ c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.25664000 c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.256640000000001 c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.256639999999999 particle weight sum = 12.5664000000 cell weight sum = 12.5664000000 -dim = 2 totalWeight = 12.566400, user->charges[species[p]] = -1.000000 totalCharge = -12.566400, Total Area = 12.566400 +dim = 2 totalWeight = 12.566400, user->charges[species[0]] = -1.000000 totalCharge = -12.566400, Total Area = 12.566400 sigma: 1. (x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) -Np = 100 +Global Np = 100 c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.256640000000000 c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.256640000000000 c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256640000000000 diff --git a/src/dm/impls/swarm/tests/output/ex9_uniform_mixed_1d.out b/src/ts/tutorials/hamiltonian/output/ex2_uniform_mixed_1d.out similarity index 62% rename from src/dm/impls/swarm/tests/output/ex9_uniform_mixed_1d.out rename to src/ts/tutorials/hamiltonian/output/ex2_uniform_mixed_1d.out index 5e3bfaf3beb..233172b7275 100644 --- a/src/dm/impls/swarm/tests/output/ex9_uniform_mixed_1d.out +++ b/src/ts/tutorials/hamiltonian/output/ex2_uniform_mixed_1d.out @@ -1,39 +1,27 @@ -DM Object: box 1 MPI process - type: plex -box in 2 dimensions: - Number of 0-cells per rank: 20 - Number of 1-cells per rank: 30 - Number of 2-cells per rank: 10 -Periodic mesh (PERIODIC, NONE) coordinates localized -Labels: - marker: 1 strata with value/size (1 (40)) - Face Sets: 2 strata with value/size (1 (10), 3 (10)) - depth: 3 strata with value/size (0 (20), 1 (30), 2 (10)) - celltype: 3 strata with value/size (4 (10), 0 (20), 1 (30)) -Np = 100 -c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268216514538342 -c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263432611863840 -c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256054150597222 -c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.248899464902083 -c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244701413893048 -c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.245063517620130 -c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249847464141595 -c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.257225940223554 -c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.264380606043426 -c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268578610078297 +Global Np = 100 +c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268395728819320 +c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263905419667826 +c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256639954596585 +c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.249374506867984 +c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244884243120081 +c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.244884299241914 +c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249374653796755 +c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.256640136210245 +c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.263905566595815 +c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268395784939884 particle weight sum = 12.5664002939 cell weight sum = 12.5664002939 -dim = 2 totalWeight = 12.566400, user->charges[species[p]] = -1.000000 totalCharge = -12.566400, Total Area = 12.566400 +dim = 2 totalWeight = 12.566400, user->charges[species[0]] = -1.000000 totalCharge = -12.566400, Total Area = 12.566400 sigma: 1. (x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) -Np = 100 -c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268216514538342 -c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263432611863840 -c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256054150597222 -c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.248899464902083 -c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244701413893048 -c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.245063517620130 -c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249847464141595 -c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.257225940223554 -c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.264380606043426 -c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268578610078297 +Global Np = 100 +c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268395728819320 +c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263905419667826 +c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256639954596585 +c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.249374506867984 +c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244884243120081 +c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.244884299241914 +c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249374653796755 +c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.256640136210245 +c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.263905566595815 +c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268395784939884 particle weight sum = 12.5664002939 cell weight sum = 12.5664002939 diff --git a/src/dm/impls/swarm/tests/output/ex9_uniform_none_1d.out b/src/ts/tutorials/hamiltonian/output/ex2_uniform_primal_1d.out similarity index 62% rename from src/dm/impls/swarm/tests/output/ex9_uniform_none_1d.out rename to src/ts/tutorials/hamiltonian/output/ex2_uniform_primal_1d.out index 5e3bfaf3beb..233172b7275 100644 --- a/src/dm/impls/swarm/tests/output/ex9_uniform_none_1d.out +++ b/src/ts/tutorials/hamiltonian/output/ex2_uniform_primal_1d.out @@ -1,39 +1,27 @@ -DM Object: box 1 MPI process - type: plex -box in 2 dimensions: - Number of 0-cells per rank: 20 - Number of 1-cells per rank: 30 - Number of 2-cells per rank: 10 -Periodic mesh (PERIODIC, NONE) coordinates localized -Labels: - marker: 1 strata with value/size (1 (40)) - Face Sets: 2 strata with value/size (1 (10), 3 (10)) - depth: 3 strata with value/size (0 (20), 1 (30), 2 (10)) - celltype: 3 strata with value/size (4 (10), 0 (20), 1 (30)) -Np = 100 -c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268216514538342 -c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263432611863840 -c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256054150597222 -c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.248899464902083 -c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244701413893048 -c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.245063517620130 -c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249847464141595 -c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.257225940223554 -c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.264380606043426 -c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268578610078297 +Global Np = 100 +c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268395728819320 +c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263905419667826 +c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256639954596585 +c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.249374506867984 +c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244884243120081 +c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.244884299241914 +c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249374653796755 +c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.256640136210245 +c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.263905566595815 +c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268395784939884 particle weight sum = 12.5664002939 cell weight sum = 12.5664002939 -dim = 2 totalWeight = 12.566400, user->charges[species[p]] = -1.000000 totalCharge = -12.566400, Total Area = 12.566400 +dim = 2 totalWeight = 12.566400, user->charges[species[0]] = -1.000000 totalCharge = -12.566400, Total Area = 12.566400 sigma: 1. (x0,v0,t0,m0,q0,phi0): (1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 1.000000e+00, 0.000000e+00) - (P, V) = (1.000000e+00, 1.000000e+00) -Np = 100 -c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268216514538342 -c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263432611863840 -c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256054150597222 -c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.248899464902083 -c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244701413893048 -c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.245063517620130 -c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249847464141595 -c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.257225940223554 -c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.264380606043426 -c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268578610078297 +Global Np = 100 +c:0 [x_a,x_b] = 0.000000000000000,1.256640000000000 -> cell weight = 1.268395728819320 +c:1 [x_a,x_b] = 1.256640000000000,2.513280000000000 -> cell weight = 1.263905419667826 +c:2 [x_a,x_b] = 2.513280000000000,3.769920000000000 -> cell weight = 1.256639954596585 +c:3 [x_a,x_b] = 3.769920000000000,5.026560000000000 -> cell weight = 1.249374506867984 +c:4 [x_a,x_b] = 5.026560000000000,6.283200000000000 -> cell weight = 1.244884243120081 +c:5 [x_a,x_b] = 6.283200000000000,7.539840000000000 -> cell weight = 1.244884299241914 +c:6 [x_a,x_b] = 7.539840000000000,8.796479999999999 -> cell weight = 1.249374653796755 +c:7 [x_a,x_b] = 8.796479999999999,10.053120000000000 -> cell weight = 1.256640136210245 +c:8 [x_a,x_b] = 10.053120000000000,11.309760000000001 -> cell weight = 1.263905566595815 +c:9 [x_a,x_b] = 11.309760000000001,12.566400000000000 -> cell weight = 1.268395784939884 particle weight sum = 12.5664002939 cell weight sum = 12.5664002939 diff --git a/src/ts/tutorials/output/ex11_shock_0.out b/src/ts/tutorials/output/ex11_shock_0.out index 1ff8b3eac44..286465ca1d1 100644 --- a/src/ts/tutorials/output/ex11_shock_0.out +++ b/src/ts/tutorials/output/ex11_shock_0.out @@ -7,51 +7,8 @@ VecTagger Object: (coarsen_) 1 MPI process Block size: 1 absolute box=[ 0.,0.01 ] PhysicsCreate_Euler set Euler type: iv_shock - 0 time 0 |x| 14.53 Density [ 1, 3] int 4.27039 Energy [ 2.5, 14.53237] int 11.0137 - 1 time 0.0109 |x| 14.53 Density [ 1, 3] int 4.314568 Energy [ 2.5, 14.53334] int 11.32709 - 2 time 0.02179 |x| 14.54 Density [ 1, 3] int 4.358745 Energy [ 2.5, 14.53528] int 11.64047 - 3 time 0.03269 |x| 14.54 Density [ 1, 3] int 4.402923 Energy [ 2.5, 14.53833] int 11.95386 - 4 time 0.04359 |x| 14.53 Density [ 1, 3] int 4.4471 Energy [ 2.5, 14.53237] int 12.26725 - 5 time 0.05449 |x| 14.53 Density [ 1, 3] int 4.491278 Energy [ 2.5, 14.53237] int 12.58064 - 6 time 0.06538 |x| 14.53 Density [ 1, 3] int 4.535456 Energy [ 2.5, 14.53237] int 12.89402 - 7 time 0.07628 |x| 14.53 Density [ 1, 3.000002] int 4.579633 Energy [ 2.5, 14.53237] int 13.20741 - 8 time 0.08718 |x| 14.53 Density [ 1, 3.000008] int 4.623811 Energy [ 2.5, 14.53237] int 13.5208 - 9 time 0.09807 |x| 14.53 Density [ 1, 3.000024] int 4.667989 Energy [ 2.5, 14.53237] int 13.83418 - 10 time 0.109 |x| 14.53 Density [ 1, 3.000067] int 4.712167 Energy [ 2.5, 14.53237] int 14.14757 - 11 time 0.1199 |x| 14.53 Density [ 1, 3.000189] int 4.756344 Energy [ 2.5, 14.53237] int 14.46096 - 12 time 0.1308 |x| 14.54 Density [ 1.000001, 3.000527] int 4.800523 Energy [ 2.5, 14.5404] int 14.77435 - 13 time 0.1417 |x| 14.56 Density [ 1.000001, 3.001352] int 4.8447 Energy [ 2.5, 14.55982] int 15.08774 - 14 time 0.1526 |x| 14.57 Density [ 1.000002, 3.003123] int 4.888878 Energy [ 2.5, 14.57314] int 15.40113 - 15 time 0.1635 |x| 14.59 Density [ 1.000004, 3.006915] int 4.933057 Energy [ 2.5, 14.5863] int 15.71452 - 16 time 0.1744 |x| 14.59 Density [ 1.000007, 3.016685] int 4.977234 Energy [ 2.5, 14.59239] int 16.02791 - 17 time 0.1853 |x| 14.6 Density [ 1.000012, 3.064361] int 5.021412 Energy [ 2.5, 14.60359] int 16.34129 - 18 time 0.1961 |x| 14.61 Density [ 1.00002, 3.301828] int 5.06559 Energy [ 2.5, 14.60829] int 16.65469 - 19 time 0.207 |x| 14.62 Density [ 1.000029, 3.826729] int 5.109769 Energy [ 2.5, 14.61892] int 16.96808 - 20 time 0.2179 |x| 14.62 Density [ 1.000043, 4.232344] int 5.153946 Energy [ 2.5, 14.62241] int 17.28146 - 21 time 0.2288 |x| 14.63 Density [ 1.000064, 4.831494] int 5.198124 Energy [ 2.5, 14.63091] int 17.59486 - 22 time 0.2397 |x| 15.37 Density [ 1.000094, 5.333088] int 5.242302 Energy [ 2.5, 15.36564] int 17.90824 - 23 time 0.2506 |x| 16.1 Density [ 1.00014, 5.745357] int 5.286479 Energy [ 2.5, 16.09537] int 18.22163 - 24 time 0.2615 |x| 16.64 Density [ 1.000211, 6.104925] int 5.330658 Energy [ 2.5, 16.6423] int 18.53502 - 25 time 0.2724 |x| 17.1 Density [ 1.000322, 6.418691] int 5.374839 Energy [ 2.5, 17.0986] int 18.84842 - 26 time 0.2833 |x| 17.48 Density [ 1.00049, 6.708512] int 5.419016 Energy [ 2.5, 17.48134] int 19.1618 - 27 time 0.2942 |x| 17.82 Density [ 1.000747, 6.968005] int 5.463193 Energy [ 2.5, 17.81597] int 19.47519 - 28 time 0.3051 |x| 18.12 Density [ 1.001157, 7.207842] int 5.507372 Energy [ 2.5, 18.1185] int 19.78857 - 29 time 0.316 |x| 18.4 Density [ 1.001741, 7.433855] int 5.55155 Energy [ 2.5, 18.39909] int 20.10196 - 30 time 0.3269 |x| 18.67 Density [ 1.002573, 7.649363] int 5.595728 Energy [ 2.5, 18.67175] int 20.41535 - 31 time 0.3378 |x| 18.93 Density [ 1.003963, 7.845387] int 5.639908 Energy [ 2.500001, 18.92902] int 20.72874 - 32 time 0.3487 |x| 19.17 Density [ 1.006524, 8.020984] int 5.684085 Energy [ 2.500001, 19.16706] int 21.04213 - 33 time 0.3596 |x| 19.39 Density [ 1.011076, 8.184492] int 5.728262 Energy [ 2.500001, 19.3855] int 21.35552 - 34 time 0.3705 |x| 19.59 Density [ 1.019343, 8.33076] int 5.77244 Energy [ 2.500002, 19.59226] int 21.6689 - 35 time 0.3814 |x| 19.78 Density [ 1.035333, 8.464896] int 5.816618 Energy [ 2.500002, 19.78003] int 21.98229 - 36 time 0.3923 |x| 19.95 Density [ 1.08115, 8.58635] int 5.860796 Energy [ 2.500003, 19.95495] int 22.29568 - 37 time 0.4032 |x| 20.12 Density [ 1.205791, 8.699499] int 5.904974 Energy [ 2.500004, 20.11533] int 22.60906 - 38 time 0.4141 |x| 20.29 Density [ 1.45509, 8.801011] int 5.949152 Energy [ 2.500005, 20.28664] int 22.92245 - 39 time 0.425 |x| 20.44 Density [ 1.806314, 8.893649] int 5.99333 Energy [ 2.500006, 20.44127] int 23.23584 - 40 time 0.4359 |x| 20.58 Density [ 2.182612, 8.977421] int 6.037507 Energy [ 2.500008, 20.5844] int 23.54923 - 41 time 0.4468 |x| 20.72 Density [ 2.518563, 9.055758] int 6.081685 Energy [ 2.500011, 20.71508] int 23.86261 - 42 time 0.4577 |x| 20.84 Density [ 2.604091, 9.133686] int 6.125864 Energy [ 2.500014, 20.83866] int 24.176 - 43 time 0.4686 |x| 20.95 Density [ 2.604919, 9.202759] int 6.170041 Energy [ 2.500019, 20.95059] int 24.48938 - 44 time 0.4795 |x| 21.07 Density [ 2.60588, 9.265996] int 6.214219 Energy [ 2.500025, 21.07212] int 24.80277 - 45 time 0.4904 |x| 21.21 Density [ 2.606325, 9.322801] int 6.258397 Energy [ 2.500033, 21.20571] int 25.11616 - 46 time 0.5013 |x| 21.33 Density [ 2.606997, 9.377228] int 6.302575 Energy [ 2.500034, 21.32504] int 25.42955 -CONVERGED_TIME at time 0.501271 after 46 steps + 0 time 0 |x| 14.53 Density [ 1, 3] int 4.211732 Energy [ 2.5, 14.53237] int 10.59759 + 1 time 0.1939 |x| 14.53 Density [ 1.297135, 3.414373] int 4.997713 Energy [ 2.50037, 14.53043] int 16.17323 + 2 time 0.3878 |x| 14.53 Density [ 2.152667, 4.284056] int 5.776744 Energy [ 2.711631, 14.53093] int 21.74058 + 3 time 0.5816 |x| 15.58 Density [ 2.548056, 5.015044] int 6.372278 Energy [ 6.749956, 15.57513] int 26.99454 +CONVERGED_ITS at time 0.581635 after 3 steps diff --git a/src/ts/utils/dmlocalts.c b/src/ts/utils/dmlocalts.c index 2a43abc3947..5729e61a354 100644 --- a/src/ts/utils/dmlocalts.c +++ b/src/ts/utils/dmlocalts.c @@ -434,7 +434,7 @@ PetscErrorCode DMTSSetRHSFunctionLocal(DM dm, PetscErrorCode (*func)(DM, PetscRe PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMTSCreateRHSMassMatrix - This creates the mass matrix associated with the given `DM`, and a solver to invert it, and stores them in the `DM` context. Collective @@ -469,7 +469,7 @@ PetscErrorCode DMTSCreateRHSMassMatrix(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMTSCreateRHSMassMatrixLumped - This creates the lumped mass matrix associated with the given `DM`, and a solver to invert it, and stores them in the `DM` context. Collective @@ -500,7 +500,7 @@ PetscErrorCode DMTSCreateRHSMassMatrixLumped(DM dm) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMTSDestroyRHSMassMatrix - Destroys the mass matrix and solver stored in the `DM` context, if they exist. Logically Collective diff --git a/src/ts/utils/dmplexts.c b/src/ts/utils/dmplexts.c index 7598ff1db73..625b5f317c3 100644 --- a/src/ts/utils/dmplexts.c +++ b/src/ts/utils/dmplexts.c @@ -321,7 +321,7 @@ PetscErrorCode DMPlexTSComputeRHSFunctionFEM(DM dm, PetscReal time, Vec locX, Ve PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMTSCheckResidual - Check the residual of the exact solution Input Parameters: @@ -372,7 +372,7 @@ PetscErrorCode DMTSCheckResidual(TS ts, DM dm, PetscReal t, Vec u, Vec u_t, Pets PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ DMTSCheckJacobian - Check the Jacobian of the exact solution against the residual using the Taylor Test Input Parameters: diff --git a/src/vec/f90-mod/petscao.h b/src/vec/f90-mod/petscao.h index 80fa5abce43..3e56a544803 100644 --- a/src/vec/f90-mod/petscao.h +++ b/src/vec/f90-mod/petscao.h @@ -2,3 +2,11 @@ ! Used by petscvecmod.F90 to create Fortran module file ! #include "petsc/finclude/petscao.h" + +! cannot use tAO because that type matches the variable tao used in tao examples + type, extends(tPetscObject) :: tPetscAO + end type tPetscAO + AO, parameter :: PETSC_NULL_AO = tPetscAO(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_AO +#endif diff --git a/src/vec/f90-mod/petscis.h b/src/vec/f90-mod/petscis.h index 75880e7cf2b..9df10be2c28 100644 --- a/src/vec/f90-mod/petscis.h +++ b/src/vec/f90-mod/petscis.h @@ -3,32 +3,46 @@ ! #include "petsc/finclude/petscis.h" - type tIS - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tIS end type tIS - type tISColoring - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + IS, parameter :: PETSC_NULL_IS = tIS(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_IS +#endif + + type, extends(tPetscObject) :: tISColoring end type tISColoring - type tPetscSection - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + IS, parameter :: PETSC_NULL_IS_COLORING = tIS(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_IS_COLORING +#endif + + type, extends(tPetscObject) :: tPetscSection end type tPetscSection - type tPetscSectionSym - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + PetscSection, parameter :: PETSC_NULL_SECTION = tPetscSection(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SECTION +#endif + + type, extends(tPetscObject) :: tPetscSectionSym end type tPetscSectionSym - type tPetscSF - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + PetscSectionSym, parameter :: PETSC_NULL_SECTION_SYM = tPetscSectionSym(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SECTION_SYM +#endif + + type, extends(tPetscObject) :: tPetscSF end type tPetscSF + PetscSF, parameter :: PETSC_NULL_SF = tPetscSF(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SF +#endif + type PetscSFNode - sequence PetscInt rank PetscInt index end type PetscSFNode - IS, parameter :: PETSC_NULL_IS = tIS(0) - PetscSF, parameter :: PETSC_NULL_SF = tPetscSF(0) - PetscSection, parameter :: PETSC_NULL_SECTION = tPetscSection(0) - PetscSectionSym, parameter :: PETSC_NULL_SECTIONSYM = tPetscSectionSym(0) - PetscEnum, parameter :: IS_COLORING_GLOBAL = 0 PetscEnum, parameter :: IS_COLORING_LOCAL = 1 @@ -55,10 +69,6 @@ PetscEnum, parameter :: IS_GLOBAL = 1 #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_IS -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SF -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SECTION -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_SECTIONSYM !DEC$ ATTRIBUTES DLLEXPORT::IS_COLORING_GLOBAL !DEC$ ATTRIBUTES DLLEXPORT::IS_COLORING_LOCAL !DEC$ ATTRIBUTES DLLEXPORT::IS_GENERAL diff --git a/src/vec/f90-mod/petscis.h90 b/src/vec/f90-mod/petscis.h90 index 22eb6aaac01..f7de239ac57 100644 --- a/src/vec/f90-mod/petscis.h90 +++ b/src/vec/f90-mod/petscis.h90 @@ -3,25 +3,6 @@ ! they cannot currently be generated automatically ! - Interface - Subroutine PetscLayoutFindOwner(a,b,c,z) - PetscLayout a - PetscInt b - PetscMPIInt c - PetscErrorCode z - End Subroutine - End Interface - - Interface - Subroutine PetscLayoutFindOwnerIndex(a,b,c,d,z) - PetscLayout a - PetscMPIInt b - PetscInt c - PetscInt d - PetscErrorCode z - End Subroutine - End Interface - Interface Subroutine PetscLayoutGetRangesF90(a,b,z) PetscLayout a @@ -50,6 +31,7 @@ Interface Subroutine ISLocalToGlobalMappingGetIndicesF90(i,array,ierr) + import tISLocalToGlobalMapping PetscInt, pointer :: array(:) PetscErrorCode ierr ISLocalToGlobalMapping i @@ -58,6 +40,7 @@ Interface Subroutine ISLocalToGlobalMappingRestoreIndicesF90(i,array,ierr) + import tISLocalToGlobalMapping PetscInt, pointer :: array(:) PetscErrorCode ierr ISLocalToGlobalMapping i @@ -168,32 +151,6 @@ End Subroutine End Interface - Interface - subroutine ISSetType(a,b,z) - import tIS - IS a - character(*) b - integer z - end subroutine - end Interface - - Interface - subroutine ISView(a,b,z) - import tIS,tPetscViewer - IS a - PetscViewer b - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine ISDestroy(a,z) - import tIS - IS a - PetscErrorCode z - end subroutine - end Interface - Interface Subroutine PetscSFDistributeSectionF90(sf,rootsection,array,leafsection,ierr) import tPetscSection diff --git a/src/vec/f90-mod/petscislocaltoglobalmapping.h b/src/vec/f90-mod/petscislocaltoglobalmapping.h new file mode 100644 index 00000000000..7a682622c53 --- /dev/null +++ b/src/vec/f90-mod/petscislocaltoglobalmapping.h @@ -0,0 +1,9 @@ +! +! Used by petscvecmod.F90 to create Fortran module file +! + type, extends(tPetscObject) :: tISLocalToGlobalMapping + end type tISLocalToGlobalMapping + ISLocalToGlobalMapping, parameter :: PETSC_NULL_IS_LOCALTOGLOBALMAPPING = tISLocalToGlobalMapping(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_IS_LOCALTOGLOBALMAPPING +#endif diff --git a/src/vec/f90-mod/petscvec.h b/src/vec/f90-mod/petscvec.h index 716c4e39690..74179d3c6d9 100644 --- a/src/vec/f90-mod/petscvec.h +++ b/src/vec/f90-mod/petscvec.h @@ -3,22 +3,28 @@ ! #include "petsc/finclude/petscvec.h" - type tVec - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + type, extends(tPetscObject) :: tVec end type tVec - type tVecScatter - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE + Vec, parameter :: PETSC_NULL_VEC = tVec(0) + Vec, parameter :: PETSC_NULL_VEC_ARRAY(1) = tVec(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_VEC +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_VEC_ARRAY +#endif + + type, extends(tPetscObject) :: tVecScatter end type tVecScatter - type tVecTagger - sequence - PetscFortranAddr:: v PETSC_FORTRAN_TYPE_INITIALIZE - end type tVecTagger + VecScatter, parameter :: PETSC_NULL_VEC_SCATTER = tVecScatter(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_VEC_SCATTER +#endif - Vec, parameter :: PETSC_NULL_VEC = tVec(0) - VecScatter, parameter :: PETSC_NULL_VECSCATTER = tVecScatter(0) - VecTagger, parameter :: PETSC_NULL_VECTAGGER = tVecTagger(0) + type, extends(tPetscObject) :: tVecTagger + end type tVecTagger + VecTagger, parameter :: PETSC_NULL_VEC_TAGGER = tVecTagger(0) +#if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) +!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_VEC_TAGGER +#endif ! ! ! Types of vector and matrix norms @@ -64,9 +70,6 @@ PetscEnum, parameter :: VECOP_LOADNATIVE = 69 #if defined(_WIN32) && defined(PETSC_USE_SHARED_LIBRARIES) -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_VEC -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_VECSCATTER -!DEC$ ATTRIBUTES DLLEXPORT::PETSC_NULL_VECTAGGER !DEC$ ATTRIBUTES DLLEXPORT::NORM_1 !DEC$ ATTRIBUTES DLLEXPORT::NORM_2 !DEC$ ATTRIBUTES DLLEXPORT::NORM_FROBENIUS diff --git a/src/vec/f90-mod/petscvec.h90 b/src/vec/f90-mod/petscvec.h90 index 65273d5d2dc..4d9a6b1fb01 100644 --- a/src/vec/f90-mod/petscvec.h90 +++ b/src/vec/f90-mod/petscvec.h90 @@ -67,27 +67,6 @@ end Subroutine PetscSFGetRootRanks end Interface - Interface VecGetOwnershipRange - subroutine VecGetOwnershipRange1(x,l,h,ierr) - import tVec - Vec, intent(in) :: x - PetscInt, intent(out) :: l,h - PetscErrorCode, intent(out) :: ierr - end subroutine - subroutine VecGetOwnershipRange2(x,l,h,ierr) - import tVec - Vec, intent(in) :: x - PetscInt, intent(out) :: l(*),h - PetscErrorCode, intent(out) :: ierr - end subroutine - subroutine VecGetOwnershipRange3(x,l,h,ierr) - import tVec - Vec, intent(in) :: x - PetscInt, intent(out) :: l,h(*) - PetscErrorCode, intent(out) :: ierr - end subroutine - end Interface - Interface VecMin subroutine VecMin1(x,p,val,ierr) import tVec @@ -122,168 +101,6 @@ end subroutine end Interface - Interface - subroutine VecScatterCreateToAll(a,b,c,z) - import tVec,tVecScatter - Vec a - VecScatter b - Vec c - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine VecScatterCreateToZero(a,b,c,z) - import tVec,tVecScatter - Vec a - VecScatter b - Vec c - PetscErrorCode z - end subroutine - end Interface - - Interface - subroutine VecCreateFromOptions(a,b,c,d,e,f,z) - import tVec - MPI_Comm a - character(*) b - PetscInt c - PetscInt d - PetscInt e - Vec f - PetscErrorCode z - end subroutine - End Interface - - Interface VecCreateSeqWithArray - subroutine VecCreateSeqWithArray0(a,b,c,d,e,z) - import tVec - MPI_Comm a - integer4 b ! bs - PetscInt c ! length - PetscScalar d (*) ! array of values - Vec e - PetscErrorCode z - end subroutine - subroutine VecCreateSeqWithArray1(a,b,c,d,e,z) - import tVec - MPI_Comm a - integer8 b ! bs - PetscInt c ! length - PetscScalar d (*) ! array of values - Vec e - PetscErrorCode z - end subroutine - end Interface - - Interface VecSetValuesLocal - subroutine VecSetValuesLocal0(a,b,c,d,e,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscScalar d (*) ! PetscScalar - InsertMode e ! InsertMode - PetscErrorCode z - end subroutine - subroutine VecSetValuesLocal11(a,b,c,d,e,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscScalar d ! PetscScalar - InsertMode e ! InsertMode - PetscErrorCode z - end subroutine - end interface VecSetValuesLocal - - Interface VecGetValues - subroutine VecGetValues0(a,b,c,d,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscScalar d (*) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine VecGetValues1(a,b,c,d,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscScalar d (*) ! PetscScalar - PetscErrorCode z - end subroutine - subroutine VecGetValues11(a,b,c,d,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscScalar d ! PetscScalar - PetscErrorCode z - end subroutine - End Interface VecGetValues - - Interface VecSetValues - subroutine VecSetValues0(a,b,c,d,e,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscScalar d (*) ! PetscScalar - InsertMode e ! InsertMode - PetscErrorCode z - end subroutine - subroutine VecSetValues1(a,b,c,d,e,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscScalar d (*) ! PetscScalar - InsertMode e ! InsertMode - PetscErrorCode z - end subroutine - subroutine VecSetValues11(a,b,c,d,e,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscScalar d ! PetscScalar - InsertMode e ! InsertMode - PetscErrorCode z - end subroutine - End Interface VecSetValues - - Interface VecSetValuesBlocked - subroutine VecSetValuesBlocked0(a,b,c,d,e,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c (*) ! PetscInt - PetscScalar d (*) ! PetscScalar - InsertMode e ! InsertMode - PetscErrorCode z - end subroutine - subroutine VecSetValuesBlocked1(a,b,c,d,e,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscScalar d (*) ! PetscScalar - InsertMode e ! InsertMode - PetscErrorCode z - end subroutine - subroutine VecSetValuesBlocked11(a,b,c,d,e,z) - import tVec - Vec a ! Vec - PetscInt b ! PetscInt - PetscInt c ! PetscInt - PetscScalar d ! PetscScalar - InsertMode e ! InsertMode - PetscErrorCode z - end subroutine - End Interface VecSetValuesBlocked - Interface Subroutine VecGetArrayF90(v,array,ierr) import tVec @@ -378,26 +195,6 @@ PetscErrorCode ierr End Subroutine - Subroutine VecDestroy(a,ierr) - import tVec - Vec a - PetscErrorCode ierr - End Subroutine - - subroutine VecSetType(a,b,ierr) - import tVec - Vec a - character(*) b - PetscErrorCode ierr - end subroutine - - subroutine VecView(a,b,ierr) - import tVec,tPetscViewer - Vec a - PetscViewer b - PetscErrorCode ierr - end subroutine - subroutine VecScatterRemap(a,b,c,ierr) import tVecScatter VecScatter a ! VecScatter @@ -406,11 +203,4 @@ PetscErrorCode ierr end subroutine - subroutine VecLoad(a,b,ierr) - import tVec,tPetscViewer - Vec a - PetscViewer b - PetscErrorCode ierr - end subroutine - end Interface diff --git a/src/vec/f90-mod/petscvecmod.F90 b/src/vec/f90-mod/petscvecmod.F90 index 4c54fbf63dc..02233834c06 100644 --- a/src/vec/f90-mod/petscvecmod.F90 +++ b/src/vec/f90-mod/petscvecmod.F90 @@ -1,7 +1,7 @@ module petscisdefdummy use petscsysdef #include <../src/vec/f90-mod/petscis.h> -#include <../src/vec/f90-mod/petscao.h> +#include <../src/vec/f90-mod/petscislocaltoglobalmapping.h> end module petscisdefdummy module petscisdef diff --git a/src/vec/is/ao/impls/basic/aobasic.c b/src/vec/is/ao/impls/basic/aobasic.c index 6e3e07664f9..5ee57f2bb64 100644 --- a/src/vec/is/ao/impls/basic/aobasic.c +++ b/src/vec/is/ao/impls/basic/aobasic.c @@ -249,7 +249,7 @@ PETSC_INTERN PetscErrorCode AOCreate_Basic(AO ao) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ AOCreateBasic - Creates a basic application ordering using two integer arrays. Collective @@ -259,7 +259,7 @@ PETSC_INTERN PetscErrorCode AOCreate_Basic(AO ao) . napp - size of integer arrays . myapp - integer array that defines an ordering - mypetsc - integer array that defines another ordering (may be `NULL` to - indicate the natural ordering, that is 0,1,2,3,...) + indicate the natural ordering, that is 0,1,2,3,...) Output Parameter: . aoout - the new application ordering @@ -267,7 +267,7 @@ PETSC_INTERN PetscErrorCode AOCreate_Basic(AO ao) Level: beginner Note: - The arrays myapp and mypetsc must contain the all the integers 0 to napp-1 with no duplicates; that is there cannot be any "holes" + The arrays `myapp` and `mypetsc` must contain the all the integers 0 to `napp`-1 with no duplicates; that is there cannot be any "holes" in the indices. Use `AOCreateMapping()` or `AOCreateMappingIS()` if you wish to have "holes" in the indices. .seealso: [](sec_ao), [](sec_scatter), `AO`, `AOCreateBasicIS()`, `AODestroy()`, `AOPetscToApplication()`, `AOApplicationToPetsc()` @@ -290,15 +290,14 @@ PetscErrorCode AOCreateBasic(MPI_Comm comm, PetscInt napp, const PetscInt myapp[ PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ AOCreateBasicIS - Creates a basic application ordering using two `IS` index sets. Collective Input Parameters: + isapp - index set that defines an ordering -- ispetsc - index set that defines another ordering (may be `NULL` to use the - natural ordering) +- ispetsc - index set that defines another ordering (may be `NULL` to use the natural ordering) Output Parameter: . aoout - the new application ordering @@ -306,7 +305,7 @@ PetscErrorCode AOCreateBasic(MPI_Comm comm, PetscInt napp, const PetscInt myapp[ Level: beginner Note: - The index sets isapp and ispetsc must contain the all the integers 0 to napp-1 (where napp is the length of the index sets) with no duplicates; + The index sets `isapp` and `ispetsc` must contain the all the integers 0 to napp-1 (where napp is the length of the index sets) with no duplicates; that is there cannot be any "holes" .seealso: [](sec_ao), [](sec_scatter), `IS`, `AO`, `AOCreateBasic()`, `AODestroy()` diff --git a/src/vec/is/ao/impls/basic/ftn-custom/makefile b/src/vec/is/ao/impls/basic/ftn-custom/makefile deleted file mode 100644 index 08508c660fe..00000000000 --- a/src/vec/is/ao/impls/basic/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/vec/is/ao/impls/basic/ftn-custom/zaobasicf.c b/src/vec/is/ao/impls/basic/ftn-custom/zaobasicf.c deleted file mode 100644 index b57bbfdb068..00000000000 --- a/src/vec/is/ao/impls/basic/ftn-custom/zaobasicf.c +++ /dev/null @@ -1,44 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define aocreatebasic_ AOCREATEBASIC - #define aocreatebasicis_ AOCREATEBASICIS - #define aocreatememoryscalable_ AOCREATEMEMORYSCALABLE - #define aocreatememoryscalableis_ AOCREATEMEMORYSCALABLEIS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define aocreatebasic_ aocreatebasic - #define aocreatebasicis_ aocreatebasicis - #define aocreatememoryscalable_ aocreatememoryscalable - #define aocreatememoryscalableis_ aocreatememoryscalableis -#endif - -PETSC_EXTERN void aocreatebasic_(MPI_Comm *comm, PetscInt *napp, PetscInt *myapp, PetscInt *mypetsc, AO *aoout, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(myapp); - CHKFORTRANNULLINTEGER(mypetsc); - *ierr = AOCreateBasic(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *napp, myapp, mypetsc, aoout); -} - -PETSC_EXTERN void aocreatebasicis_(IS *isapp, IS *ispetsc, AO *aoout, PetscErrorCode *ierr) -{ - IS cispetsc = NULL; - CHKFORTRANNULLOBJECT(ispetsc); - if (ispetsc) cispetsc = *ispetsc; - *ierr = AOCreateBasicIS(*isapp, cispetsc, aoout); -} - -PETSC_EXTERN void aocreatememoryscalable_(MPI_Comm *comm, PetscInt *napp, PetscInt *myapp, PetscInt *mypetsc, AO *aoout, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(myapp); - CHKFORTRANNULLINTEGER(mypetsc); - *ierr = AOCreateMemoryScalable(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *napp, myapp, mypetsc, aoout); -} - -PETSC_EXTERN void aocreatememoryscalableis_(IS *isapp, IS *ispetsc, AO *aoout, PetscErrorCode *ierr) -{ - IS cispetsc = NULL; - CHKFORTRANNULLOBJECT(ispetsc); - if (ispetsc) cispetsc = *ispetsc; - *ierr = AOCreateMemoryScalableIS(*isapp, cispetsc, aoout); -} diff --git a/src/vec/is/ao/impls/mapping/aomapping.c b/src/vec/is/ao/impls/mapping/aomapping.c index c0832b39538..d313b157415 100644 --- a/src/vec/is/ao/impls/mapping/aomapping.c +++ b/src/vec/is/ao/impls/mapping/aomapping.c @@ -120,7 +120,7 @@ static const struct _AOOps AOps = { PetscDesignatedInitializer(applicationtopetsc, AOApplicationToPetsc_Mapping), }; -/*@C +/*@ AOMappingHasApplicationIndex - Checks if an `AO` has a requested application index. Not Collective @@ -208,7 +208,7 @@ PetscErrorCode AOMappingHasPetscIndex(AO ao, PetscInt idex, PetscBool *hasIndex) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ AOCreateMapping - Creates an application mapping using two integer arrays. Input Parameters: @@ -226,7 +226,7 @@ PetscErrorCode AOMappingHasPetscIndex(AO ao, PetscInt idex, PetscBool *hasIndex) Level: beginner Note: - The arrays myapp and mypetsc need NOT contain the all the integers 0 to napp-1, that is there CAN be "holes" in the indices. + The arrays `myapp` and `mypetsc` need NOT contain the all the integers 0 to `napp`-1, that is there CAN be "holes" in the indices. Use `AOCreateBasic()` or `AOCreateBasicIS()` if they do not have holes for better performance. .seealso: [](sec_ao), `AOCreateBasic()`, `AOCreateMappingIS()`, `AODestroy()` diff --git a/src/vec/is/ao/impls/mapping/ftn-custom/makefile b/src/vec/is/ao/impls/mapping/ftn-custom/makefile deleted file mode 100644 index 08508c660fe..00000000000 --- a/src/vec/is/ao/impls/mapping/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/vec/is/ao/impls/mapping/ftn-custom/zaomappingf.c b/src/vec/is/ao/impls/mapping/ftn-custom/zaomappingf.c deleted file mode 100644 index 98515ef973f..00000000000 --- a/src/vec/is/ao/impls/mapping/ftn-custom/zaomappingf.c +++ /dev/null @@ -1,19 +0,0 @@ -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define aocreatemapping_ AOCREATEMAPPING - #define aocreatemappingis_ AOCREATEMAPPINGIS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define aocreatemapping_ aocreatemapping - #define aocreatemappingis_ aocreatemappingis -#endif - -PETSC_EXTERN void aocreatemapping_(MPI_Comm *comm, PetscInt *napp, PetscInt *myapp, PetscInt *mypetsc, AO *aoout, PetscErrorCode *ierr) -{ - if (*napp) { - CHKFORTRANNULLINTEGER(myapp); - CHKFORTRANNULLINTEGER(mypetsc); - } - *ierr = AOCreateMapping(MPI_Comm_f2c(*(MPI_Fint *)comm), *napp, myapp, mypetsc, aoout); -} diff --git a/src/vec/is/ao/impls/memscalable/aomemscalable.c b/src/vec/is/ao/impls/memscalable/aomemscalable.c index ae286cc526b..199e132420c 100644 --- a/src/vec/is/ao/impls/memscalable/aomemscalable.c +++ b/src/vec/is/ao/impls/memscalable/aomemscalable.c @@ -445,7 +445,7 @@ PETSC_INTERN PetscErrorCode AOCreate_MemoryScalable(AO ao) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ AOCreateMemoryScalable - Creates a memory scalable application ordering using two integer arrays. Collective @@ -454,8 +454,7 @@ PETSC_INTERN PetscErrorCode AOCreate_MemoryScalable(AO ao) + comm - MPI communicator that is to share the `AO` . napp - size of integer arrays . myapp - integer array that defines an ordering -- mypetsc - integer array that defines another ordering (may be `NULL` to - indicate the natural ordering, that is 0,1,2,3,...) +- mypetsc - integer array that defines another ordering (may be `NULL` to indicate the natural ordering, that is 0,1,2,3,...) Output Parameter: . aoout - the new application ordering @@ -463,7 +462,7 @@ PETSC_INTERN PetscErrorCode AOCreate_MemoryScalable(AO ao) Level: beginner Note: - The arrays myapp and mypetsc must contain the all the integers 0 to napp-1 with no duplicates; that is there cannot be any "holes" + The arrays `myapp` and `mypetsc` must contain the all the integers 0 to `napp`-1 with no duplicates; that is there cannot be any "holes" in the indices. Use `AOCreateMapping()` or `AOCreateMappingIS()` if you wish to have "holes" in the indices. Comparing with `AOCreateBasic()`, this routine trades memory with message communication. @@ -487,15 +486,14 @@ PetscErrorCode AOCreateMemoryScalable(MPI_Comm comm, PetscInt napp, const PetscI PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ AOCreateMemoryScalableIS - Creates a memory scalable application ordering using two index sets. Collective Input Parameters: + isapp - index set that defines an ordering -- ispetsc - index set that defines another ordering (may be `NULL` to use the - natural ordering) +- ispetsc - index set that defines another ordering (may be `NULL` to use the natural ordering) Output Parameter: . aoout - the new application ordering @@ -503,7 +501,7 @@ PetscErrorCode AOCreateMemoryScalable(MPI_Comm comm, PetscInt napp, const PetscI Level: beginner Notes: - The index sets isapp and ispetsc must contain the all the integers 0 to napp-1 (where napp is the length of the index sets) with no duplicates; + The index sets `isapp` and `ispetsc` must contain the all the integers 0 to napp-1 (where napp is the length of the index sets) with no duplicates; that is there cannot be any "holes". Comparing with `AOCreateBasicIS()`, this routine trades memory with message communication. diff --git a/src/vec/is/ao/interface/ao.c b/src/vec/is/ao/interface/ao.c index 7551f91c80c..ca148281fde 100644 --- a/src/vec/is/ao/interface/ao.c +++ b/src/vec/is/ao/interface/ao.c @@ -7,7 +7,7 @@ PetscClassId AO_CLASSID; PetscLogEvent AO_PetscToApplication, AO_ApplicationToPetsc; -/*@C +/*@ AOView - Displays an application ordering. Collective @@ -46,7 +46,7 @@ PetscErrorCode AOView(AO ao, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ AOViewFromOptions - View an `AO` based on values in the options database Collective @@ -496,7 +496,6 @@ PetscErrorCode AOCreate(MPI_Comm comm, AO *ao) PetscFunctionBegin; PetscAssertPointer(ao, 2); - *ao = NULL; PetscCall(AOInitializePackage()); PetscCall(PetscHeaderCreate(aonew, AO_CLASSID, "AO", "Application Ordering", "AO", comm, AODestroy, AOView)); diff --git a/src/vec/is/ao/interface/aoreg.c b/src/vec/is/ao/interface/aoreg.c index 3ddc13c2ed2..5fba32af75b 100644 --- a/src/vec/is/ao/interface/aoreg.c +++ b/src/vec/is/ao/interface/aoreg.c @@ -62,7 +62,7 @@ PetscErrorCode AOInitializePackage(void) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ AOSetType - Builds an application ordering for a particular `AOType` Collective @@ -103,7 +103,7 @@ PetscErrorCode AOSetType(AO ao, AOType method) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ AOGetType - Gets the `AO` type name (as a string) from the AO. Not Collective @@ -135,7 +135,7 @@ PetscFunctionList AOList = NULL; /*@C AORegister - Register an application ordering method - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - the name (`AOType`) of the `AO` scheme diff --git a/src/vec/is/ao/interface/ftn-custom/makefile b/src/vec/is/ao/interface/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/vec/is/ao/interface/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/vec/is/ao/interface/ftn-custom/zaof.c b/src/vec/is/ao/interface/ftn-custom/zaof.c deleted file mode 100644 index cfe115ff5e5..00000000000 --- a/src/vec/is/ao/interface/ftn-custom/zaof.c +++ /dev/null @@ -1,41 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define aoview_ AOVIEW - #define aosettype_ AOSETTYPE - #define aoviewfromoptions_ AOVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define aoview_ aoview - #define aosettype_ aosettype - #define aoviewfromoptions_ aoviewfromoptions -#endif - -PETSC_EXTERN void aoview_(AO *ao, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = AOView(*ao, v); -} - -PETSC_EXTERN void aosettype_(AO *ao, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - *ierr = AOSetType(*ao, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void aoviewfromoptions_(AO *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = AOViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/vec/is/ao/tests/ex4f.F90 b/src/vec/is/ao/tests/ex4f.F90 index 4d220f7e5d0..0a9db66649a 100644 --- a/src/vec/is/ao/tests/ex4f.F90 +++ b/src/vec/is/ao/tests/ex4f.F90 @@ -30,11 +30,11 @@ program main endif ! Test AOCreateBasic() - PetscCallA(AOCreateBasic(PETSC_COMM_WORLD, nlocal, localvert,PETSC_NULL_INTEGER,ao,ierr)) + PetscCallA(AOCreateBasic(PETSC_COMM_WORLD, nlocal, localvert,PETSC_NULL_INTEGER_ARRAY,ao,ierr)) PetscCallA(AODestroy(ao,ierr)) ! Test AOCreateMemoryScalable() - PetscCallA(AOCreateMemoryScalable(PETSC_COMM_WORLD, nlocal, localvert,PETSC_NULL_INTEGER,ao,ierr)) + PetscCallA(AOCreateMemoryScalable(PETSC_COMM_WORLD, nlocal, localvert,PETSC_NULL_INTEGER_ARRAY,ao,ierr)) PetscCallA(AODestroy(ao,ierr)) PetscCallA(AOCreate(PETSC_COMM_WORLD,ao,ierr)) diff --git a/src/vec/is/is/interface/f90-custom/zindexf90.c b/src/vec/is/is/interface/f90-custom/zindexf90.c index 8df767ad1de..7f677844c7d 100644 --- a/src/vec/is/is/interface/f90-custom/zindexf90.c +++ b/src/vec/is/is/interface/f90-custom/zindexf90.c @@ -5,12 +5,10 @@ #define petsclayoutgetrangesf90_ PETSCLAYOUTGETRANGESF90 #define isgetindicesf90_ ISGETINDICESF90 #define isrestoreindicesf90_ ISRESTOREINDICESF90 - #define isdestroy_ ISDESTROY #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define petsclayoutgetrangesf90_ petsclayoutgetrangesf90 #define isgetindicesf90_ isgetindicesf90 #define isrestoreindicesf90_ isrestoreindicesf90 - #define isdestroy_ isdestroy #endif PETSC_EXTERN void petsclayoutgetrangesf90_(PetscLayout *map, F90Array1d *ptr, int *__ierr PETSC_F90_2PTR_PROTO(ptrd)) @@ -46,11 +44,3 @@ PETSC_EXTERN void isrestoreindicesf90_(IS *x, F90Array1d *ptr, int *__ierr PETSC if (*__ierr) return; *__ierr = ISRestoreIndices(*x, &fa); } - -PETSC_EXTERN void isdestroy_(IS *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = ISDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} diff --git a/src/vec/is/is/interface/ftn-custom/zindexf.c b/src/vec/is/is/interface/ftn-custom/zindexf.c index a8c45a24523..6a166eda75c 100644 --- a/src/vec/is/is/interface/ftn-custom/zindexf.c +++ b/src/vec/is/is/interface/ftn-custom/zindexf.c @@ -3,9 +3,6 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petsclayoutfindowner_ PETSCLAYOUTFINDOWNER - #define petsclayoutfindownerindex_ PETSCLAYOUTFINDOWNERINDEX - #define isview_ ISVIEW #define isgetindices_ ISGETINDICES #define isrestoreindices_ ISRESTOREINDICES #define isgettotalindices_ ISGETTOTALINDICES @@ -16,11 +13,7 @@ #define islocaltoglobalmappingrestoreindices_ ISLOCALTOGLOBALMAPPINGRESTOREINDICES #define islocaltoglobalmappinggetblockindices_ ISLOCALTOGLOBALMAPPINGGETBLOCKINDICES #define islocaltoglobalmappingrestoreblockindices_ ISLOCALTOGLOBALMAPPINGRESTOREBLOCKINDICES - #define isviewfromoptions_ ISVIEWFROMOPTIONS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petsclayoutfindowner_ petsclayoutfindowner - #define petsclayoutfindownerindex_ petsclayoutfindownerindex - #define isview_ isview #define isgetindices_ isgetindices #define isrestoreindices_ isrestoreindices #define isgettotalindices_ isgettotalindices @@ -31,26 +24,8 @@ #define islocaltoglobalmappingrestoreindices_ islocaltoglobalmappingrestoreindices #define islocaltoglobalmappinggetblockindices_ islocaltoglobalmappinggetblockindices #define islocaltoglobalmappingrestoreblockindices_ islocaltoglobalmappingrestoreblockindices - #define isviewfromoptions_ isviewfromoptions #endif -PETSC_EXTERN void petsclayoutfindowner_(PetscLayout *map, PetscInt *idx, PetscMPIInt *owner, PetscErrorCode *ierr) -{ - *ierr = PetscLayoutFindOwner(*map, *idx, owner); -} - -PETSC_EXTERN void petsclayoutfindownerindex_(PetscLayout *map, PetscInt *idx, PetscMPIInt *owner, PetscInt *ridx, PetscErrorCode *ierr) -{ - *ierr = PetscLayoutFindOwnerIndex(*map, *idx, owner, ridx); -} - -PETSC_EXTERN void isview_(IS *is, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = ISView(*is, v); -} - PETSC_EXTERN void isgetindices_(IS *x, PetscInt *fa, size_t *ia, PetscErrorCode *ierr) { const PetscInt *lx; @@ -125,14 +100,3 @@ PETSC_EXTERN void islocaltoglobalmappingrestoreblockindices_(ISLocalToGlobalMapp const PetscInt *lx = PetscIntAddressFromFortran(fa, *ia); *ierr = ISLocalToGlobalMappingRestoreBlockIndices(*x, &lx); } - -PETSC_EXTERN void isviewfromoptions_(IS *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = ISViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/vec/is/is/interface/index.c b/src/vec/is/is/interface/index.c index 5580889c965..953b4428e76 100644 --- a/src/vec/is/is/interface/index.c +++ b/src/vec/is/is/interface/index.c @@ -326,6 +326,17 @@ static PetscErrorCode ISSetInfo_Internal(IS is, ISInfo info, ISInfoType type, IS /* set implications */ switch (info) { case IS_SORTED: + if (PetscDefined(USE_DEBUG) && flg) { + PetscInt n; + const PetscInt *indices; + + PetscCall(ISGetLocalSize(is, &n)); + PetscCall(ISGetIndices(is, &indices)); + PetscCall(PetscSortedInt(n, indices, &flg)); + if (type == IS_GLOBAL) PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)is))); + PetscCheck(flg, type == IS_GLOBAL ? PetscObjectComm((PetscObject)is) : PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "IS is not sorted"); + PetscCall(ISRestoreIndices(is, &indices)); + } if (flg && type == IS_GLOBAL) { /* an array that is globally sorted is also locally sorted */ is->info[IS_LOCAL][(int)info] = IS_INFO_TRUE; /* global permanence implies local permanence */ @@ -463,7 +474,7 @@ PetscErrorCode ISSetInfo(IS is, ISInfo info, ISInfoType type, PetscBool permanen errcomm = PETSC_COMM_SELF; } - PetscCheck(((int)info) > IS_INFO_MIN && ((int)info) < IS_INFO_MAX, errcomm, PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)info); + PetscCheck((int)info > IS_INFO_MIN && (int)info < IS_INFO_MAX, errcomm, PETSC_ERR_ARG_OUTOFRANGE, "Option %d is out of range", (int)info); PetscCallMPI(MPI_Comm_size(comm, &size)); /* do not use global values if size == 1: it makes it easier to keep the implications straight */ @@ -789,7 +800,7 @@ PetscErrorCode ISGetInfo(IS is, ISInfo info, ISInfoType type, PetscBool compute, PetscCallMPI(MPI_Comm_size(comm, &size)); PetscCallMPI(MPI_Comm_rank(comm, &rank)); - PetscCheck(((int)info) > IS_INFO_MIN && ((int)info) < IS_INFO_MAX, errcomm, PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)info); + PetscCheck((int)info > IS_INFO_MIN && (int)info < IS_INFO_MAX, errcomm, PETSC_ERR_ARG_OUTOFRANGE, "Option %d is out of range", (int)info); if (size == 1) type = IS_LOCAL; itype = (type == IS_LOCAL) ? 0 : 1; hasprop = PETSC_FALSE; @@ -1012,7 +1023,7 @@ PetscErrorCode ISSetPermutation(IS is) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISDestroy - Destroys an index set. Collective @@ -1228,7 +1239,7 @@ PetscErrorCode ISGetIndices(IS is, const PetscInt *ptr[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISGetMinMax - Gets the minimum and maximum values in an `IS` Not Collective @@ -1237,8 +1248,8 @@ PetscErrorCode ISGetIndices(IS is, const PetscInt *ptr[]) . is - the index set Output Parameters: -+ min - the minimum value -- max - the maximum value ++ min - the minimum value, you may pass `NULL` +- max - the maximum value, you may pass `NULL` Level: intermediate @@ -1571,7 +1582,7 @@ PetscErrorCode ISRestoreNonlocalIS(IS is, IS *complement) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISViewFromOptions - View an `IS` based on options in the options database Collective @@ -1596,7 +1607,7 @@ PetscErrorCode ISViewFromOptions(IS A, PetscObject obj, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISView - Displays an index set. Collective @@ -1674,10 +1685,15 @@ PetscErrorCode ISLoad(IS is, PetscViewer viewer) @*/ PetscErrorCode ISSort(IS is) { + PetscBool flg; + PetscFunctionBegin; PetscValidHeaderSpecific(is, IS_CLASSID, 1); - PetscUseTypeMethod(is, sort); - PetscCall(ISSetInfo(is, IS_SORTED, IS_LOCAL, is->info_permanent[IS_LOCAL][IS_SORTED], PETSC_TRUE)); + PetscCall(ISGetInfo(is, IS_SORTED, IS_LOCAL, PETSC_FALSE, &flg)); + if (!flg) { + PetscUseTypeMethod(is, sort); + PetscCall(ISSetInfo(is, IS_SORTED, IS_LOCAL, is->info_permanent[IS_LOCAL][IS_SORTED], PETSC_TRUE)); + } PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/vec/is/is/interface/isreg.c b/src/vec/is/is/interface/isreg.c index 96f9cf08d7f..fb1b83eb51b 100644 --- a/src/vec/is/is/interface/isreg.c +++ b/src/vec/is/is/interface/isreg.c @@ -35,7 +35,7 @@ PetscErrorCode ISCreate(MPI_Comm comm, IS *is) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISSetType - Builds a index set, for a particular `ISType` Collective @@ -79,7 +79,7 @@ PetscErrorCode ISSetType(IS is, ISType method) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISGetType - Gets the index set type name, `ISType`, (as a string) from the `IS`. Not Collective @@ -109,7 +109,7 @@ PetscErrorCode ISGetType(IS is, ISType *type) /*@C ISRegister - Adds a new index set implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine diff --git a/src/vec/is/is/utils/ftn-custom/makefile b/src/vec/is/is/utils/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/vec/is/is/utils/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/vec/is/is/utils/ftn-custom/ziscoloringf.c b/src/vec/is/is/utils/ftn-custom/ziscoloringf.c deleted file mode 100644 index 016aa9abfd3..00000000000 --- a/src/vec/is/is/utils/ftn-custom/ziscoloringf.c +++ /dev/null @@ -1,29 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define iscoloringview_ ISCOLORINGVIEW - #define iscoloringviewfromoptions_ ISCOLORINGVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define iscoloringview_ iscoloringview - #define iscoloringviewfromoptions_ iscoloringviewfromoptions -#endif - -PETSC_EXTERN void iscoloringview_(ISColoring *iscoloring, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = ISColoringView(*iscoloring, v); -} - -PETSC_EXTERN void iscoloringviewfromoptions_(ISColoring *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = ISColoringViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/vec/is/is/utils/isblock.c b/src/vec/is/is/utils/isblock.c index e24503d8cb1..6d27b6f4303 100644 --- a/src/vec/is/is/utils/isblock.c +++ b/src/vec/is/is/utils/isblock.c @@ -106,7 +106,7 @@ PetscErrorCode ISCompressIndicesGeneral(PetscInt n, PetscInt nkeys, PetscInt bs, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISExpandIndicesGeneral - convert the indices of an array `IS` into non-block indices in an array of `ISGENERAL` Input Parameters: @@ -114,10 +114,10 @@ PetscErrorCode ISCompressIndicesGeneral(PetscInt n, PetscInt nkeys, PetscInt bs, . nkeys - expected number of keys when `PETSC_USE_CTABLE` is used . bs - the size of block . imax - the number of index sets -- is_in - the blocked array of index sets +- is_in - the blocked array of index sets, must be as large as `imax` Output Parameter: -. is_out - the non-blocked new index set, as `ISGENERAL` +. is_out - the non-blocked new index set, as `ISGENERAL`, must be as large as `imax` Level: intermediate diff --git a/src/vec/is/is/utils/iscoloring.c b/src/vec/is/is/utils/iscoloring.c index f7fd76f1173..b3a2a9359da 100644 --- a/src/vec/is/is/utils/iscoloring.c +++ b/src/vec/is/is/utils/iscoloring.c @@ -11,7 +11,7 @@ PetscErrorCode ISColoringReference(ISColoring coloring) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISColoringSetType - indicates if the coloring is for the local representation (including ghost points) or the global representation of a `Mat` Collective @@ -36,8 +36,7 @@ PetscErrorCode ISColoringSetType(ISColoring coloring, ISColoringType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C - +/*@ ISColoringGetType - gets if the coloring is for the local representation (including ghost points) or the global representation Collective @@ -93,7 +92,7 @@ PetscErrorCode ISColoringDestroy(ISColoring *iscoloring) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISColoringViewFromOptions - Processes command line options to determine if/how an `ISColoring` object is to be viewed. Collective @@ -129,7 +128,7 @@ PetscErrorCode ISColoringViewFromOptions(ISColoring obj, PetscObject bobj, const PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISColoringView - Views an `ISColoring` coloring context. Collective @@ -731,7 +730,7 @@ PetscErrorCode ISAllGather(IS is, IS *isout) .seealso: `ISCOloringValue`, `ISColoring()`, `ISCreateGeneral()`, `ISCreateStride()`, `ISCreateBlock()`, `ISAllGather()` @*/ -PetscErrorCode ISAllGatherColors(MPI_Comm comm, PetscInt n, ISColoringValue *lindices, PetscInt *outN, ISColoringValue *outindices[]) +PetscErrorCode ISAllGatherColors(MPI_Comm comm, PetscInt n, ISColoringValue lindices[], PetscInt *outN, ISColoringValue *outindices[]) { ISColoringValue *indices; PetscInt i, N; @@ -818,6 +817,7 @@ PetscErrorCode ISComplement(IS is, PetscInt nmin, PetscInt nmax, IS *isout) } PetscCheck(cnt == nmax - nmin - unique, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of entries found in complement %" PetscInt_FMT " does not match expected %" PetscInt_FMT, cnt, nmax - nmin - unique); PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is), cnt, nindices, PETSC_OWN_POINTER, isout)); + PetscCall(ISSetInfo(*isout, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE)); PetscCall(ISRestoreIndices(is, &indices)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/vec/is/is/utils/isdiff.c b/src/vec/is/is/utils/isdiff.c index 1939fa98725..6f537ef3817 100644 --- a/src/vec/is/is/utils/isdiff.c +++ b/src/vec/is/is/utils/isdiff.c @@ -119,18 +119,14 @@ PetscErrorCode ISDifference(IS is1, IS is2, IS *isout) @*/ PetscErrorCode ISSum(IS is1, IS is2, IS *is3) { - MPI_Comm comm; PetscBool f; - PetscMPIInt size; const PetscInt *i1, *i2; PetscInt n1, n2, n3, p1, p2, *iout; PetscFunctionBegin; PetscValidHeaderSpecific(is1, IS_CLASSID, 1); PetscValidHeaderSpecific(is2, IS_CLASSID, 2); - PetscCall(PetscObjectGetComm((PetscObject)(is1), &comm)); - PetscCallMPI(MPI_Comm_size(comm, &size)); - PetscCheck(size <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Currently only for uni-processor IS"); + PetscCheckSameComm(is1, 1, is2, 2); PetscCall(ISSorted(is1, &f)); PetscCheck(f, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Arg 1 is not sorted"); @@ -239,7 +235,7 @@ PetscErrorCode ISSum(IS is1, IS is2, IS *is3) PetscCall(ISRestoreIndices(is1, &i1)); PetscCall(ISRestoreIndices(is2, &i2)); - PetscCall(ISCreateGeneral(comm, n3, iout, PETSC_OWN_POINTER, is3)); + PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is1), n3, iout, PETSC_OWN_POINTER, is3)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -273,6 +269,7 @@ PetscErrorCode ISExpand(IS is1, IS is2, IS *isout) { PetscInt i, n1, n2, imin, imax, nout, *iout; const PetscInt *i1, *i2; + PetscBool sorted1 = PETSC_TRUE, sorted2 = PETSC_TRUE; PetscBT mask; MPI_Comm comm; @@ -299,15 +296,25 @@ PetscErrorCode ISExpand(IS is1, IS is2, IS *isout) if (n1 || n2) { imin = PETSC_MAX_INT; imax = 0; - for (i = 0; i < n1; i++) { - if (i1[i] < 0) continue; - imin = PetscMin(imin, i1[i]); - imax = PetscMax(imax, i1[i]); + if (n1) { + PetscCall(ISSorted(is1, &sorted1)); + if (sorted1 && i1[0] >= 0) imin = i1[0], imax = i1[n1 - 1]; + else + for (i = 0; i < n1; i++) { + if (i1[i] < 0) continue; + imin = PetscMin(imin, i1[i]); + imax = PetscMax(imax, i1[i]); + } } - for (i = 0; i < n2; i++) { - if (i2[i] < 0) continue; - imin = PetscMin(imin, i2[i]); - imax = PetscMax(imax, i2[i]); + if (n2) { + PetscCall(ISSorted(is2, &sorted2)); + if (sorted2 && i2[0] >= 0) imin = PetscMin(imin, i2[0]), imax = PetscMax(imax, i2[n2 - 1]); + else + for (i = 0; i < n2; i++) { + if (i2[i] < 0) continue; + imin = PetscMin(imin, i2[i]); + imax = PetscMax(imax, i2[i]); + } } } else imin = imax = 0; @@ -319,6 +326,7 @@ PetscErrorCode ISExpand(IS is1, IS is2, IS *isout) if (i1[i] < 0) continue; if (!PetscBTLookupSet(mask, i1[i] - imin)) iout[nout++] = i1[i]; } + n1 = -nout; PetscCall(ISRestoreIndices(is1, &i1)); /* Put the values from is2 */ for (i = 0; i < n2; i++) { @@ -330,7 +338,8 @@ PetscErrorCode ISExpand(IS is1, IS is2, IS *isout) /* create the new IS containing the sum */ PetscCall(PetscObjectGetComm((PetscObject)is1, &comm)); PetscCall(ISCreateGeneral(comm, nout, iout, PETSC_OWN_POINTER, isout)); - + /* no entries of is2 (resp. is1) was inserted, so if is1 (resp. is2) is sorted, then so is isout */ + if ((-n1 == nout && sorted1) || (n1 == 0 && sorted2)) PetscCall(ISSetInfo(*isout, IS_SORTED, IS_LOCAL, PETSC_FALSE, PETSC_TRUE)); PetscCall(PetscBTDestroy(&mask)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -423,7 +432,7 @@ PetscErrorCode ISIntersect(IS is1, IS is2, IS *isout) /* create the new IS containing the sum */ PetscCall(ISCreateGeneral(comm, nout, iout, PETSC_OWN_POINTER, isout)); - + PetscCall(ISSetInfo(*isout, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE)); PetscCall(ISRestoreIndices(is2sorted, &i2)); PetscCall(ISDestroy(&is2sorted)); PetscCall(ISRestoreIndices(is1sorted, &i1)); @@ -578,7 +587,7 @@ PetscErrorCode ISListToPair(MPI_Comm comm, PetscInt listlen, IS islist[], IS *xi PetscFunctionReturn(PETSC_SUCCESS); } -/*@ +/*@C ISPairToList - Convert an `IS` pair encoding an integer map to a list of `IS`. Collective diff --git a/src/vec/is/section/interface/ftn-custom/makefile b/src/vec/is/section/interface/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/vec/is/section/interface/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/vec/is/section/interface/ftn-custom/zsectionf.c b/src/vec/is/section/interface/ftn-custom/zsectionf.c deleted file mode 100644 index af6b541477c..00000000000 --- a/src/vec/is/section/interface/ftn-custom/zsectionf.c +++ /dev/null @@ -1,20 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscsectionviewfromoptions_ PETSCSECTIONVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscsectionviewfromoptions_ petscsectionviewfromoptions -#endif - -PETSC_EXTERN void petscsectionviewfromoptions_(PetscSection *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscSectionViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/vec/is/section/interface/section.c b/src/vec/is/section/interface/section.c index cb7f482a363..c19c0e45868 100644 --- a/src/vec/is/section/interface/section.c +++ b/src/vec/is/section/interface/section.c @@ -41,7 +41,6 @@ PetscErrorCode PetscSectionCreate(MPI_Comm comm, PetscSection *s) PetscCall(ISInitializePackage()); PetscCall(PetscHeaderCreate(*s, PETSC_SECTION_CLASSID, "PetscSection", "Section", "IS", comm, PetscSectionDestroy, PetscSectionView)); - (*s)->pStart = -1; (*s)->pEnd = -1; (*s)->perm = NULL; @@ -404,7 +403,7 @@ PetscErrorCode PetscSectionSetNumFields(PetscSection s, PetscInt numFields) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionGetFieldName - Returns the name of a field in the `PetscSection` Not Collective @@ -433,7 +432,7 @@ PetscErrorCode PetscSectionGetFieldName(PetscSection s, PetscInt field, const ch PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionSetFieldName - Sets the name of a field in the `PetscSection` Not Collective @@ -461,7 +460,7 @@ PetscErrorCode PetscSectionSetFieldName(PetscSection s, PetscInt field, const ch PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionGetComponentName - Gets the name of a field component in the `PetscSection` Not Collective @@ -496,7 +495,7 @@ PetscErrorCode PetscSectionGetComponentName(PetscSection s, PetscInt field, Pets PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionSetComponentName - Sets the name of a field component in the `PetscSection` Not Collective @@ -744,7 +743,7 @@ PetscErrorCode PetscSectionSetPermutation(PetscSection s, IS perm) /*@C PetscSectionGetBlockStarts - Returns a table indicating which points start new blocks - Not Collective + Not Collective, No Fortran Support Input Parameter: . s - the `PetscSection` @@ -775,7 +774,7 @@ PetscErrorCode PetscSectionGetBlockStarts(PetscSection s, PetscBT *blockStarts) /*@C PetscSectionSetBlockStarts - Sets a table indicating which points start new blocks - Not Collective + Not Collective, No Fortran Support Input Parameters: + s - the `PetscSection` @@ -2456,7 +2455,7 @@ static PetscErrorCode PetscSectionView_ASCII(PetscSection s, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionViewFromOptions - View the `PetscSection` based on values in the options database Collective @@ -2481,7 +2480,7 @@ PetscErrorCode PetscSectionViewFromOptions(PetscSection A, PetscObject obj, cons PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionView - Views a `PetscSection` Collective @@ -2534,7 +2533,7 @@ PetscErrorCode PetscSectionView(PetscSection s, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionLoad - Loads a `PetscSection` Collective @@ -2767,7 +2766,7 @@ static PetscErrorCode VecIntSetValuesSection_Private(PetscInt *baseArray, PetscS PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionHasConstraints - Determine whether a `PetscSection` has constrained dofs Not Collective @@ -2810,7 +2809,7 @@ PetscErrorCode PetscSectionHasConstraints(PetscSection s, PetscBool *hasConstrai .seealso: [PetscSection](sec_petscsection), `PetscSectionSetConstraintIndices()`, `PetscSectionGetConstraintDof()`, `PetscSection` @*/ -PetscErrorCode PetscSectionGetConstraintIndices(PetscSection s, PetscInt point, const PetscInt **indices) +PetscErrorCode PetscSectionGetConstraintIndices(PetscSection s, PetscInt point, const PetscInt *indices[]) { PetscFunctionBegin; PetscValidHeaderSpecific(s, PETSC_SECTION_CLASSID, 1); @@ -2820,7 +2819,7 @@ PetscErrorCode PetscSectionGetConstraintIndices(PetscSection s, PetscInt point, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionSetConstraintIndices - Set the point dof numbers, in [0, dof), which are constrained Not Collective @@ -3282,11 +3281,12 @@ PetscErrorCode PetscSectionSymCreate(MPI_Comm comm, PetscSectionSym *sym) PetscFunctionBegin; PetscAssertPointer(sym, 2); PetscCall(ISInitializePackage()); + PetscCall(PetscHeaderCreate(*sym, PETSC_SECTION_SYM_CLASSID, "PetscSectionSym", "Section Symmetry", "IS", comm, PetscSectionSymDestroy, PetscSectionSymView)); PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionSymSetType - Builds a `PetscSectionSym`, for a particular implementation. Collective @@ -3319,7 +3319,7 @@ PetscErrorCode PetscSectionSymSetType(PetscSectionSym sym, PetscSectionSymType m PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionSymGetType - Gets the section symmetry type name (as a string) from the `PetscSectionSym`. Not Collective @@ -3346,7 +3346,7 @@ PetscErrorCode PetscSectionSymGetType(PetscSectionSym sym, PetscSectionSymType * /*@C PetscSectionSymRegister - Registers a new section symmetry implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine @@ -3404,7 +3404,7 @@ PetscErrorCode PetscSectionSymDestroy(PetscSectionSym *sym) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSectionSymView - Displays a section symmetry Collective @@ -3536,8 +3536,8 @@ PetscErrorCode PetscSectionGetFieldSym(PetscSection section, PetscInt field, Pet + section - the section . numPoints - the number of points - points - an array of size 2 * `numPoints`, containing a list of (point, orientation) pairs. (An orientation is an - arbitrary integer: its interpretation is up to sym. Orientations are used by `DM`: for their interpretation in that - context, see `DMPlexGetConeOrientation()`). + arbitrary integer: its interpretation is up to sym. Orientations are used by `DM`: for their interpretation in that + context, see `DMPlexGetConeOrientation()`). Output Parameters: + perms - The permutations for the given orientations (or `NULL` if there is no symmetry or the permutation is the identity). @@ -3644,8 +3644,8 @@ PetscErrorCode PetscSectionGetPointSyms(PetscSection section, PetscInt numPoints + section - the section . numPoints - the number of points . points - an array of size 2 * `numPoints`, containing a list of (point, orientation) pairs. (An orientation is an - arbitrary integer: its interpretation is up to sym. Orientations are used by `DM`: for their interpretation in that - context, see `DMPlexGetConeOrientation()`). + arbitrary integer: its interpretation is up to sym. Orientations are used by `DM`: for their interpretation in that + context, see `DMPlexGetConeOrientation()`). . perms - The permutations for the given orientations: set to `NULL` at conclusion - rots - The field rotations symmetries for the given orientations: set to `NULL` at conclusion diff --git a/src/vec/is/sf/impls/basic/allgather/sfallgather.c b/src/vec/is/sf/impls/basic/allgather/sfallgather.c index b30adfdec4d..a6b3c3dd82d 100644 --- a/src/vec/is/sf/impls/basic/allgather/sfallgather.c +++ b/src/vec/is/sf/impls/basic/allgather/sfallgather.c @@ -43,7 +43,7 @@ static PetscErrorCode PetscSFBcastBegin_Allgather(PetscSF sf, MPI_Datatype unit, PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); PetscCall(PetscMPIIntCast(sf->nroots, &sendcount)); PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, PETSCSF_ROOT2LEAF, &rootbuf, &leafbuf, &req, NULL)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, PETSCSF_ROOT2LEAF)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); PetscCallMPI(MPIU_Iallgather(rootbuf, sendcount, unit, leafbuf, sendcount, unit, comm, req)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -77,7 +77,7 @@ static PetscErrorCode PetscSFReduceBegin_Allgather(PetscSF sf, MPI_Datatype unit } if (rank == 0 && link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi] == leafbuf) leafbuf = MPI_IN_PLACE; PetscCall(PetscMPIIntCast(sf->nleaves * link->bs, &count)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, PETSCSF_LEAF2ROOT)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); PetscCallMPI(MPI_Reduce(leafbuf, link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi], count, link->basicunit, op, 0, comm)); /* Must do reduce with MPI builtin datatype basicunit */ PetscCallMPI(MPIU_Iscatter(link->leafbuf_alloc[PETSCSF_REMOTE][link->leafmtype_mpi], recvcount, unit, rootbuf, recvcount, unit, 0 /*rank 0*/, comm, req)); } @@ -100,7 +100,7 @@ static PetscErrorCode PetscSFBcastToZero_Allgather(PetscSF sf, MPI_Datatype unit PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); PetscCall(PetscMPIIntCast(sf->nroots, &sendcount)); PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, PETSCSF_ROOT2LEAF, &rootbuf, &leafbuf, &req, NULL)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, PETSCSF_ROOT2LEAF)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); PetscCallMPI(MPIU_Igather(rootbuf == leafbuf ? MPI_IN_PLACE : rootbuf, sendcount, unit, leafbuf, sendcount, unit, 0 /*rank 0*/, comm, req)); PetscCall(PetscSFLinkGetInUse(sf, unit, rootdata, leafdata, PETSC_OWN_POINTER, &link)); PetscCall(PetscSFLinkFinishCommunication(sf, link, PETSCSF_ROOT2LEAF)); diff --git a/src/vec/is/sf/impls/basic/allgatherv/sfallgatherv.c b/src/vec/is/sf/impls/basic/allgatherv/sfallgatherv.c index c15d3de2a8b..f48d9c1e53d 100644 --- a/src/vec/is/sf/impls/basic/allgatherv/sfallgatherv.c +++ b/src/vec/is/sf/impls/basic/allgatherv/sfallgatherv.c @@ -119,7 +119,7 @@ static PetscErrorCode PetscSFBcastBegin_Allgatherv(PetscSF sf, MPI_Datatype unit if (dat->bcast_pattern && rank == dat->bcast_root) PetscCall((*link->Memcpy)(link, link->leafmtype_mpi, leafbuf, link->rootmtype_mpi, rootbuf, (size_t)sendcount * link->unitbytes)); /* Ready the buffers for MPI */ - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, PETSCSF_ROOT2LEAF)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); if (dat->bcast_pattern) PetscCallMPI(MPIU_Ibcast(leafbuf, sf->nleaves, unit, dat->bcast_root, comm, req)); else PetscCallMPI(MPIU_Iallgatherv(rootbuf, sendcount, unit, leafbuf, dat->recvcounts, dat->displs, unit, comm, req)); PetscFunctionReturn(PETSC_SUCCESS); @@ -147,10 +147,10 @@ static PetscErrorCode PetscSFReduceBegin_Allgatherv(PetscSF sf, MPI_Datatype uni PetscCall(PetscSFLinkPackLeafData(sf, link, PETSCSF_REMOTE, leafdata)); PetscCall(PetscSFLinkCopyLeafBufferInCaseNotUseGpuAwareMPI(sf, link, PETSC_TRUE /* device2host before sending */)); PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, PETSCSF_LEAF2ROOT, &rootbuf, &leafbuf, &req, NULL)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, PETSCSF_LEAF2ROOT)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); if (dat->bcast_pattern) { -#if defined(PETSC_HAVE_OMPI_MAJOR_VERSION) /* Workaround: cuda-aware Open MPI 4.1.3 does not support MPI_Ireduce() with device buffers */ - *req = MPI_REQUEST_NULL; /* Set NULL so that we can safely MPI_Wait(req) */ +#if defined(PETSC_HAVE_OPENMPI) /* Workaround: cuda-aware Open MPI 4.1.3 does not support MPI_Ireduce() with device buffers */ + *req = MPI_REQUEST_NULL; /* Set NULL so that we can safely MPI_Wait(req) */ PetscCallMPI(MPI_Reduce(leafbuf, rootbuf, sf->nleaves, unit, op, dat->bcast_root, comm)); #else PetscCallMPI(MPIU_Ireduce(leafbuf, rootbuf, sf->nleaves, unit, op, dat->bcast_root, comm, req)); @@ -209,7 +209,7 @@ static PetscErrorCode PetscSFBcastToZero_Allgatherv(PetscSF sf, MPI_Datatype uni PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); PetscCall(PetscMPIIntCast(sf->nroots, &sendcount)); PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, PETSCSF_ROOT2LEAF, &rootbuf, &leafbuf, &req, NULL)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, PETSCSF_ROOT2LEAF)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); PetscCallMPI(MPIU_Igatherv(rootbuf, sendcount, unit, leafbuf, dat->recvcounts, dat->displs, unit, 0 /*rank 0*/, comm, req)); PetscCall(PetscSFLinkGetInUse(sf, unit, rootdata, leafdata, PETSC_OWN_POINTER, &link)); diff --git a/src/vec/is/sf/impls/basic/alltoall/sfalltoall.c b/src/vec/is/sf/impls/basic/alltoall/sfalltoall.c index 67d07fd4e68..6b697e98618 100644 --- a/src/vec/is/sf/impls/basic/alltoall/sfalltoall.c +++ b/src/vec/is/sf/impls/basic/alltoall/sfalltoall.c @@ -20,7 +20,7 @@ static PetscErrorCode PetscSFLinkStartCommunication_Alltoall(PetscSF sf, PetscSF } PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, &rootbuf, &leafbuf, &req, NULL)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, direction)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); if (direction == PETSCSF_ROOT2LEAF) { PetscCallMPI(MPIU_Ialltoall(rootbuf, 1, unit, leafbuf, 1, unit, comm, req)); diff --git a/src/vec/is/sf/impls/basic/cupm/sfcupm.hpp b/src/vec/is/sf/impls/basic/cupm/sfcupm.hpp index 257ad4c644a..038052879d8 100644 --- a/src/vec/is/sf/impls/basic/cupm/sfcupm.hpp +++ b/src/vec/is/sf/impls/basic/cupm/sfcupm.hpp @@ -17,7 +17,7 @@ namespace impl { template -struct SfInterface : device::cupm::impl::Interface { +struct PETSC_SINGLE_LIBRARY_VISIBILITY_INTERNAL SfInterface : device::cupm::impl::Interface { PETSC_CUPM_INHERIT_INTERFACE_TYPEDEFS_USING(T); private: diff --git a/src/vec/is/sf/impls/basic/gather/sfgather.c b/src/vec/is/sf/impls/basic/gather/sfgather.c index 1f3c687d908..39a4de880ae 100644 --- a/src/vec/is/sf/impls/basic/gather/sfgather.c +++ b/src/vec/is/sf/impls/basic/gather/sfgather.c @@ -21,7 +21,7 @@ static PetscErrorCode PetscSFLinkStartCommunication_Gather(PetscSF sf, PetscSFLi PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); PetscCall(PetscMPIIntCast(sf->nroots, &count)); PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, &rootbuf, &leafbuf, &req, NULL)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, direction)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); if (direction == PETSCSF_ROOT2LEAF) { PetscCallMPI(MPIU_Igather(rootbuf == leafbuf ? MPI_IN_PLACE : rootbuf, count, unit, leafbuf, count, unit, 0 /*rank 0*/, comm, req)); diff --git a/src/vec/is/sf/impls/basic/gatherv/sfgatherv.c b/src/vec/is/sf/impls/basic/gatherv/sfgatherv.c index b2f90b4e67c..fda02d2bb7c 100644 --- a/src/vec/is/sf/impls/basic/gatherv/sfgatherv.c +++ b/src/vec/is/sf/impls/basic/gatherv/sfgatherv.c @@ -23,7 +23,7 @@ static PetscErrorCode PetscSFLinkStartCommunication_Gatherv(PetscSF sf, PetscSFL PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); PetscCall(PetscMPIIntCast(sf->nroots, &count)); PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, &rootbuf, &leafbuf, &req, NULL)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, direction)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); if (direction == PETSCSF_ROOT2LEAF) { PetscCallMPI(MPIU_Igatherv(rootbuf, count, unit, leafbuf, dat->recvcounts, dat->displs, unit, 0 /*rank 0*/, comm, req)); diff --git a/src/vec/is/sf/impls/basic/neighbor/sfneighbor.c b/src/vec/is/sf/impls/basic/neighbor/sfneighbor.c index 2726d66f663..a5958c36e65 100644 --- a/src/vec/is/sf/impls/basic/neighbor/sfneighbor.c +++ b/src/vec/is/sf/impls/basic/neighbor/sfneighbor.c @@ -1,5 +1,6 @@ #include <../src/vec/is/sf/impls/basic/sfpack.h> #include <../src/vec/is/sf/impls/basic/sfbasic.h> +#include /* Convenience local types */ #if defined(PETSC_HAVE_MPI_LARGE_COUNT) && defined(PETSC_USE_64BIT_INDICES) @@ -93,7 +94,7 @@ static PetscErrorCode PetscSFLinkStartCommunication_Neighbor(PetscSF sf, PetscSF PetscCall(PetscSFGetDistComm_Neighbor(sf, direction, &distcomm)); PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, &rootbuf, &leafbuf, &req, NULL)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, direction)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); if (dat->rootdegree || dat->leafdegree) { // OpenMPI-3.0 ran into error with rootdegree = leafdegree = 0, so we skip the call in this case if (direction == PETSCSF_ROOT2LEAF) { @@ -149,7 +150,7 @@ static PetscErrorCode PetscSFLinkStartCommunication_Persistent_Neighbor(PetscSF } PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, NULL, NULL, &req, NULL)); - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, direction)); + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); if (dat->rootdegree || dat->leafdegree) { PetscCallMPI(MPI_Start(req)); if (direction == PETSCSF_ROOT2LEAF) { @@ -185,7 +186,7 @@ static PetscErrorCode PetscSFSetUp_Neighbor(PetscSF sf) PetscSF_Neighbor *dat = (PetscSF_Neighbor *)sf->data; PetscInt i, j, nrootranks, ndrootranks, nleafranks, ndleafranks; const PetscInt *rootoffset, *leafoffset; - PetscMPIInt m, n; + PetscMPIInt m, n, m2, n2; PetscFunctionBegin; /* SFNeighbor inherits from Basic */ @@ -198,13 +199,16 @@ static PetscErrorCode PetscSFSetUp_Neighbor(PetscSF sf) sf->nleafreqs = 0; dat->nrootreqs = 1; // collectives only need one MPI_Request. We just put it in rootreqs[] - /* Only setup MPI displs/counts for non-distinguished ranks. Distinguished ranks use shared memory */ -#if !PetscDefined(HAVE_OPENMPI) || (PetscDefined(HAVE_OMPI_MAJOR_VERSION) && PetscDefined(HAVE_OMPI_MINOR_VERSION) && PetscDefined(HAVE_OMPI_RELEASE_VERSION) && !(PETSC_HAVE_OMPI_MAJOR_VERSION == 5 && PETSC_HAVE_OMPI_MINOR_VERSION == 0 && PETSC_HAVE_OMPI_RELEASE_VERSION == 0)) - PetscCall(PetscMalloc6(m, &dat->rootdispls, m, &dat->rootcounts, m, &dat->rootweights, n, &dat->leafdispls, n, &dat->leafcounts, n, &dat->leafweights)); -#else // workaround for an OpenMPI 5.0.0 bug, https://github.com/open-mpi/ompi/issues/12037 - PetscMPIInt m2 = m ? m : 1, n2 = n ? n : 1; - PetscCall(PetscMalloc6(m2, &dat->rootdispls, m2, &dat->rootcounts, m2, &dat->rootweights, n2, &dat->leafdispls, n2, &dat->leafcounts, n2, &dat->leafweights)); + m2 = m; + n2 = n; +#if defined(PETSC_HAVE_OPENMPI) // workaround for an OpenMPI 5.0.x bug, https://github.com/open-mpi/ompi/pull/12614 + #if PETSC_PKG_OPENMPI_VERSION_LE(5, 0, 3) + m2 = m ? m : 1; + n2 = n ? n : 1; + #endif #endif + // Only setup MPI displs/counts for non-distinguished ranks. Distinguished ranks use shared memory + PetscCall(PetscMalloc6(m2, &dat->rootdispls, m2, &dat->rootcounts, m2, &dat->rootweights, n2, &dat->leafdispls, n2, &dat->leafcounts, n2, &dat->leafweights)); #if defined(PETSC_HAVE_MPI_LARGE_COUNT) && defined(PETSC_USE_64BIT_INDICES) for (i = ndrootranks, j = 0; i < nrootranks; i++, j++) { diff --git a/src/vec/is/sf/impls/basic/sfbasic.c b/src/vec/is/sf/impls/basic/sfbasic.c index b648180c68d..9fce0de84cd 100644 --- a/src/vec/is/sf/impls/basic/sfbasic.c +++ b/src/vec/is/sf/impls/basic/sfbasic.c @@ -56,38 +56,38 @@ static PetscErrorCode PetscSFLinkInitMPIRequests_Persistent_Basic(PetscSF sf, Pe // Start MPI requests. If use non-GPU aware MPI, we might need to copy data from device buf to host buf static PetscErrorCode PetscSFLinkStartCommunication_Persistent_Basic(PetscSF sf, PetscSFLink link, PetscSFDirection direction) { - PetscMPIInt nreqs; - MPI_Request *reqs = NULL; - PetscSF_Basic *bas = (PetscSF_Basic *)sf->data; - PetscInt buflen; + PetscMPIInt nsreqs = 0, nrreqs = 0; + MPI_Request *sreqs = NULL, *rreqs = NULL; + PetscSF_Basic *bas = (PetscSF_Basic *)sf->data; + PetscInt sbuflen, rbuflen; PetscFunctionBegin; - buflen = (direction == PETSCSF_ROOT2LEAF) ? sf->leafbuflen[PETSCSF_REMOTE] : bas->rootbuflen[PETSCSF_REMOTE]; - if (buflen) { + rbuflen = (direction == PETSCSF_ROOT2LEAF) ? sf->leafbuflen[PETSCSF_REMOTE] : bas->rootbuflen[PETSCSF_REMOTE]; + if (rbuflen) { if (direction == PETSCSF_ROOT2LEAF) { - nreqs = sf->nleafreqs; - PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, NULL, NULL, NULL, &reqs)); + nrreqs = sf->nleafreqs; + PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, NULL, NULL, NULL, &rreqs)); } else { /* leaf to root */ - nreqs = bas->nrootreqs; - PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, NULL, NULL, &reqs, NULL)); + nrreqs = bas->nrootreqs; + PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, NULL, NULL, &rreqs, NULL)); } - PetscCallMPI(MPI_Startall_irecv(buflen, link->unit, nreqs, reqs)); } - buflen = (direction == PETSCSF_ROOT2LEAF) ? bas->rootbuflen[PETSCSF_REMOTE] : sf->leafbuflen[PETSCSF_REMOTE]; - if (buflen) { + sbuflen = (direction == PETSCSF_ROOT2LEAF) ? bas->rootbuflen[PETSCSF_REMOTE] : sf->leafbuflen[PETSCSF_REMOTE]; + if (sbuflen) { if (direction == PETSCSF_ROOT2LEAF) { - nreqs = bas->nrootreqs; + nsreqs = bas->nrootreqs; PetscCall(PetscSFLinkCopyRootBufferInCaseNotUseGpuAwareMPI(sf, link, PETSC_TRUE /*device2host before sending */)); - PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, NULL, NULL, &reqs, NULL)); + PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, NULL, NULL, &sreqs, NULL)); } else { /* leaf to root */ - nreqs = sf->nleafreqs; + nsreqs = sf->nleafreqs; PetscCall(PetscSFLinkCopyLeafBufferInCaseNotUseGpuAwareMPI(sf, link, PETSC_TRUE)); - PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, NULL, NULL, NULL, &reqs)); + PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, NULL, NULL, NULL, &sreqs)); } - PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link, direction)); - PetscCallMPI(MPI_Startall_isend(buflen, link->unit, nreqs, reqs)); } + PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link)); // need to sync the stream to make BOTH sendbuf and recvbuf ready + if (rbuflen) PetscCallMPI(MPI_Startall_irecv(rbuflen, link->unit, nrreqs, rreqs)); + if (sbuflen) PetscCallMPI(MPI_Startall_isend(sbuflen, link->unit, nsreqs, sreqs)); PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/vec/is/sf/impls/basic/sfmpi.c b/src/vec/is/sf/impls/basic/sfmpi.c index 4722e2bdfac..c3080564cf7 100644 --- a/src/vec/is/sf/impls/basic/sfmpi.c +++ b/src/vec/is/sf/impls/basic/sfmpi.c @@ -1,6 +1,6 @@ #include <../src/vec/is/sf/impls/basic/sfpack.h> -// Though there is no default machanism to start a communication, we have a +// Though there is no default mechanism to start a communication, we have a // default to finish communication, which is just waiting on the requests. // It should work for both non-blocking or persistent send/recvs or collectivwes. static PetscErrorCode PetscSFLinkFinishCommunication_Default(PetscSF sf, PetscSFLink link, PetscSFDirection direction) diff --git a/src/vec/is/sf/impls/basic/sfpack.c b/src/vec/is/sf/impls/basic/sfpack.c index 9bfb9ad06d6..71f080a9c99 100644 --- a/src/vec/is/sf/impls/basic/sfpack.c +++ b/src/vec/is/sf/impls/basic/sfpack.c @@ -816,33 +816,28 @@ PetscErrorCode PetscSFLinkGetFetchAndOpLocal(PetscSFLink link, PetscMemType mtyp static inline PetscErrorCode PetscSFLinkLogFlopsAfterUnpackRootData(PetscSF sf, PetscSFLink link, PetscSFScope scope, MPI_Op op) { - PetscLogDouble flops; PetscSF_Basic *bas = (PetscSF_Basic *)sf->data; PetscFunctionBegin; if (op != MPI_REPLACE && link->basicunit == MPIU_SCALAR) { /* op is a reduction on PetscScalars */ - flops = bas->rootbuflen[scope] * link->bs; /* # of roots in buffer x # of scalars in unit */ #if defined(PETSC_HAVE_DEVICE) - if (PetscMemTypeDevice(link->rootmtype)) PetscCall(PetscLogGpuFlops(flops)); + if (PetscMemTypeDevice(link->rootmtype)) PetscCall(PetscLogGpuFlops(bas->rootbuflen[scope] * link->bs)); else #endif - PetscCall(PetscLogFlops(flops)); + PetscCall(PetscLogFlops(bas->rootbuflen[scope] * link->bs)); /* # of roots in buffer x # of scalars in unit */ } PetscFunctionReturn(PETSC_SUCCESS); } static inline PetscErrorCode PetscSFLinkLogFlopsAfterUnpackLeafData(PetscSF sf, PetscSFLink link, PetscSFScope scope, MPI_Op op) { - PetscLogDouble flops; - PetscFunctionBegin; if (op != MPI_REPLACE && link->basicunit == MPIU_SCALAR) { /* op is a reduction on PetscScalars */ - flops = sf->leafbuflen[scope] * link->bs; /* # of roots in buffer x # of scalars in unit */ #if defined(PETSC_HAVE_DEVICE) - if (PetscMemTypeDevice(link->leafmtype)) PetscCall(PetscLogGpuFlops(flops)); + if (PetscMemTypeDevice(link->leafmtype)) PetscCall(PetscLogGpuFlops(sf->leafbuflen[scope] * link->bs)); /* # of roots in buffer x # of scalars in unit */ else #endif - PetscCall(PetscLogFlops(flops)); + PetscCall(PetscLogFlops(sf->leafbuflen[scope] * link->bs)); } PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/vec/is/sf/impls/basic/sfpack.h b/src/vec/is/sf/impls/basic/sfpack.h index 69feb00b7ed..12cbe754ad6 100644 --- a/src/vec/is/sf/impls/basic/sfpack.h +++ b/src/vec/is/sf/impls/basic/sfpack.h @@ -337,29 +337,19 @@ static inline PetscErrorCode PetscSFLinkCopyLeafBufferInCaseNotUseGpuAwareMPI(Pe } /* Make sure root/leafbuf for the remote is ready for MPI */ -static inline PetscErrorCode PetscSFLinkSyncStreamBeforeCallMPI(PetscSF sf, PetscSFLink link, PetscSFDirection direction) +static inline PetscErrorCode PetscSFLinkSyncStreamBeforeCallMPI(PetscSF sf, PetscSFLink link) { - PetscSF_Basic *bas; - PetscInt buflen; - PetscMemType mtype; + PetscSF_Basic *bas = (PetscSF_Basic *)sf->data; PetscFunctionBegin; - if (direction == PETSCSF_ROOT2LEAF) { - bas = (PetscSF_Basic *)sf->data; - mtype = link->rootmtype; - buflen = bas->rootbuflen[PETSCSF_REMOTE]; - } else { - mtype = link->leafmtype; - buflen = sf->leafbuflen[PETSCSF_REMOTE]; - } - - if (PetscMemTypeDevice(mtype) && buflen) PetscCall((*link->SyncStream)(link)); + // Make sendbuf ready to read, recvbuf ready to write (other previous operations on recvbuf might finish after MPI_Waitall() if they use different streams) + if ((PetscMemTypeDevice(link->rootmtype) && bas->rootbuflen[PETSCSF_REMOTE]) || (PetscMemTypeDevice(link->leafmtype) && sf->leafbuflen[PETSCSF_REMOTE])) PetscCall((*link->SyncStream)(link)); PetscFunctionReturn(PETSC_SUCCESS); } #else /* Host only */ #define PetscSFLinkCopyRootBufferInCaseNotUseGpuAwareMPI(a, b, c) PETSC_SUCCESS #define PetscSFLinkCopyLeafBufferInCaseNotUseGpuAwareMPI(a, b, c) PETSC_SUCCESS - #define PetscSFLinkSyncStreamBeforeCallMPI(a, b, c) PETSC_SUCCESS + #define PetscSFLinkSyncStreamBeforeCallMPI(a, b) PETSC_SUCCESS #endif /* Get root indices used for pack/unpack diff --git a/src/vec/is/sf/impls/window/sfwindow.c b/src/vec/is/sf/impls/window/sfwindow.c index 19d29517c48..1caa9c462ff 100644 --- a/src/vec/is/sf/impls/window/sfwindow.c +++ b/src/vec/is/sf/impls/window/sfwindow.c @@ -118,7 +118,7 @@ static PetscErrorCode PetscSFWindowGetDataTypes(PetscSF sf, MPI_Datatype unit, c PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFWindowSetFlavorType - Set flavor type for `MPI_Win` creation Logically Collective @@ -176,7 +176,7 @@ static PetscErrorCode PetscSFWindowSetFlavorType_Window(PetscSF sf, PetscSFWindo PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFWindowGetFlavorType - Get `PETSCSFWINDOW` flavor type for `PetscSF` communication Logically Collective @@ -209,7 +209,7 @@ static PetscErrorCode PetscSFWindowGetFlavorType_Window(PetscSF sf, PetscSFWindo PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFWindowSetSyncType - Set synchronization type for `PetscSF` communication of type `PETSCSFWINDOW` Logically Collective @@ -243,7 +243,7 @@ static PetscErrorCode PetscSFWindowSetSyncType_Window(PetscSF sf, PetscSFWindowS PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFWindowGetSyncType - Get synchronization type for `PetscSF` communication of type `PETSCSFWINDOW` Logically Collective @@ -455,7 +455,7 @@ static PetscErrorCode PetscSFGetWindow(PetscSF sf, MPI_Datatype unit, void *arra #if defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW) case PETSCSF_WINDOW_FLAVOR_DYNAMIC: PetscCallMPI(MPI_Win_create_dynamic(w->info, PetscObjectComm((PetscObject)sf), &link->win)); - #if defined(PETSC_HAVE_OMPI_MAJOR_VERSION) /* some Open MPI versions do not support MPI_Win_attach(win,NULL,0); */ + #if defined(PETSC_HAVE_OPENMPI) /* some Open MPI versions do not support MPI_Win_attach(win,NULL,0); */ PetscCallMPI(MPI_Win_attach(link->win, wsize ? array : (void *)dummy, wsize)); #else PetscCallMPI(MPI_Win_attach(link->win, array, wsize)); @@ -993,7 +993,8 @@ PETSC_INTERN PetscErrorCode PetscSFCreate_Window(PetscSF sf) PetscCall(PetscObjectComposeFunction((PetscObject)sf, "PetscSFWindowSetInfo_C", PetscSFWindowSetInfo_Window)); PetscCall(PetscObjectComposeFunction((PetscObject)sf, "PetscSFWindowGetInfo_C", PetscSFWindowGetInfo_Window)); -#if defined(OMPI_MAJOR_VERSION) && (OMPI_MAJOR_VERSION < 1 || (OMPI_MAJOR_VERSION == 1 && OMPI_MINOR_VERSION <= 6)) +#if defined(PETSC_HAVE_OPENMPI) + #if PETSC_PKG_OPENMPI_VERSION_LE(1, 6, 0) { PetscBool ackbug = PETSC_FALSE; PetscCall(PetscOptionsGetBool(NULL, NULL, "-acknowledge_ompi_onesided_bug", &ackbug, NULL)); @@ -1001,6 +1002,7 @@ PETSC_INTERN PetscErrorCode PetscSFCreate_Window(PetscSF sf) PetscCall(PetscInfo(sf, "Acknowledged Open MPI bug, proceeding anyway. Expect memory corruption.\n")); } else SETERRQ(PetscObjectComm((PetscObject)sf), PETSC_ERR_LIB, "Open MPI is known to be buggy (https://svn.open-mpi.org/trac/ompi/ticket/1905 and 2656), use -acknowledge_ompi_onesided_bug to proceed"); } + #endif #endif PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/vec/is/sf/interface/ftn-custom/zsf.c b/src/vec/is/sf/interface/ftn-custom/zsf.c index 90532ec3f7b..81568da9c60 100644 --- a/src/vec/is/sf/interface/ftn-custom/zsf.c +++ b/src/vec/is/sf/interface/ftn-custom/zsf.c @@ -2,49 +2,27 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define petscsfview_ PETSCSFVIEW - #define petscsfgetgraph_ PETSCSFGETGRAPH - #define petscsfbcastbegin_ PETSCSFBCASTBEGIN - #define petscsfbcastend_ PETSCSFBCASTEND - #define petscsfreducebegin_ PETSCSFREDUCEBEGIN - #define petscsfreduceend_ PETSCSFREDUCEEND - #define f90arraysfnodecreate_ F90ARRAYSFNODECREATE - #define petscsfviewfromoptions_ PETSCSFVIEWFROMOPTIONS - #define petscsfdestroy_ PETSCSFDESTROY - #define petscsfsetgraph_ PETSCSFSETGRAPH - #define petscsfgetleafranks_ PETSCSFGETLEAFRANKS - #define petscsfgetrootranks_ PETSCSFGETROOTRANKS + #define petscsfgetgraph_ PETSCSFGETGRAPH + #define petscsfbcastbegin_ PETSCSFBCASTBEGIN + #define petscsfbcastend_ PETSCSFBCASTEND + #define petscsfreducebegin_ PETSCSFREDUCEBEGIN + #define petscsfreduceend_ PETSCSFREDUCEEND + #define f90arraysfnodecreate_ F90ARRAYSFNODECREATE + #define petscsfgetleafranks_ PETSCSFGETLEAFRANKS + #define petscsfgetrootranks_ PETSCSFGETROOTRANKS #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define petscsfgetgraph_ petscsfgetgraph - #define petscsfview_ petscsfview - #define petscsfbcastbegin_ petscsfbcastbegin - #define petscsfbcastend_ petscsfbcastend - #define petscsfreducebegin_ petscsfreducebegin - #define petscsfreduceend_ petscsfreduceend - #define f90arraysfnodecreate_ f90arraysfnodecreate - #define petscsfviewfromoptions_ petscsfviewfromoptions - #define petscsfdestroy_ petscsfdestroy - #define petscsfsetgraph_ petscsfsetgraph - #define petscsfgetleafranks_ petscsfgetleafranks - #define petscsfgetrootranks_ petscsfgetrootranks + #define petscsfgetgraph_ petscsfgetgraph + #define petscsfbcastbegin_ petscsfbcastbegin + #define petscsfbcastend_ petscsfbcastend + #define petscsfreducebegin_ petscsfreducebegin + #define petscsfreduceend_ petscsfreduceend + #define f90arraysfnodecreate_ f90arraysfnodecreate + #define petscsfgetleafranks_ petscsfgetleafranks + #define petscsfgetrootranks_ petscsfgetrootranks #endif PETSC_EXTERN void f90arraysfnodecreate_(const PetscInt *, PetscInt *, void *PETSC_F90_2PTR_PROTO_NOVAR); -PETSC_EXTERN void petscsfsetgraph_(PetscSF *sf, PetscInt *nroots, PetscInt *nleaves, PetscInt *ilocal, PetscCopyMode *localmode, PetscSFNode *iremote, PetscCopyMode *remotemode, int *ierr) -{ - if (ilocal == PETSC_NULL_INTEGER_Fortran) ilocal = NULL; - *ierr = PetscSFSetGraph(*sf, *nroots, *nleaves, ilocal, *localmode, iremote, *remotemode); -} - -PETSC_EXTERN void petscsfview_(PetscSF *sf, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscSFView(*sf, v); -} - PETSC_EXTERN void petscsfgetgraph_(PetscSF *sf, PetscInt *nroots, PetscInt *nleaves, F90Array1d *ailocal, F90Array1d *airemote, PetscErrorCode *ierr PETSC_F90_2PTR_PROTO(pilocal) PETSC_F90_2PTR_PROTO(piremote)) { const PetscInt *ilocal; @@ -201,24 +179,4 @@ PETSC_EXTERN void petscsfreduceend_(PetscSF *sf, MPI_Fint *unit, F90Array1d *lpt if (*ierr) return; *ierr = PetscSFReduceEnd(*sf, dtype, leafdata, rootdata, cop); } - -PETSC_EXTERN void petscsfviewfromoptions_(PetscSF *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PetscSFViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} - -PETSC_EXTERN void petscsfdestroy_(PetscSF *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = PetscSFDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} - #endif diff --git a/src/vec/is/sf/interface/ftn-custom/zvscat.c b/src/vec/is/sf/interface/ftn-custom/zvscat.c deleted file mode 100644 index 03992fae440..00000000000 --- a/src/vec/is/sf/interface/ftn-custom/zvscat.c +++ /dev/null @@ -1,49 +0,0 @@ -#include -#include -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define vecscatterremap_ VECSCATTERREMAP - #define vecscatterview_ VECSCATTERVIEW - #define vecscattercreatetoall_ VECSCATTERCREATETOALL - #define vecscattercreatetozero_ VECSCATTERCREATETOZERO - #define vecscatterdestroy_ VECSCATTERDESTROY -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define vecscatterremap_ vecscatterremap - #define vecscatterview_ vecscatterview - #define vecscattercreatetoall_ vecscattercreatetoall - #define vecscattercreatetozero_ vecscattercreatetozero - #define vecscatterdestroy_ vecscatterdestroy -#endif - -PETSC_EXTERN void vecscattercreatetoall_(Vec *vin, VecScatter *ctx, Vec *vout, PetscErrorCode *ierr) -{ - CHKFORTRANNULLOBJECT(vout); - *ierr = VecScatterCreateToAll(*vin, ctx, vout); -} - -PETSC_EXTERN void vecscattercreatetozero_(Vec *vin, VecScatter *ctx, Vec *vout, PetscErrorCode *ierr) -{ - CHKFORTRANNULLOBJECT(vout); - *ierr = VecScatterCreateToZero(*vin, ctx, vout); -} - -PETSC_EXTERN void vecscatterview_(VecScatter *vecscatter, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = VecScatterView(*vecscatter, v); -} - -PETSC_EXTERN void vecscatterremap_(VecScatter *scat, PetscInt *rto, PetscInt *rfrom, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(rto); - CHKFORTRANNULLINTEGER(rfrom); - *ierr = VecScatterRemap(*scat, rto, rfrom); -} - -PETSC_EXTERN void vecscatterdestroy_(VecScatter *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = VecScatterDestroy(x); - if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); -} diff --git a/src/vec/is/sf/interface/sf.c b/src/vec/is/sf/interface/sf.c index d441b9c6e43..9aa89afc007 100644 --- a/src/vec/is/sf/interface/sf.c +++ b/src/vec/is/sf/interface/sf.c @@ -62,7 +62,6 @@ PetscErrorCode PetscSFCreate(MPI_Comm comm, PetscSF *sf) PetscCall(PetscSFInitializePackage()); PetscCall(PetscHeaderCreate(b, PETSCSF_CLASSID, "PetscSF", "Star Forest", "PetscSF", comm, PetscSFDestroy, PetscSFView)); - b->nroots = -1; b->nleaves = -1; b->minleaf = PETSC_MAX_INT; @@ -146,7 +145,7 @@ PetscErrorCode PetscSFReset(PetscSF sf) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFSetType - Set the `PetscSF` communication implementation Collective @@ -191,7 +190,7 @@ PetscErrorCode PetscSFSetType(PetscSF sf, PetscSFType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFGetType - Get the `PetscSF` communication implementation Not Collective @@ -215,7 +214,7 @@ PetscErrorCode PetscSFGetType(PetscSF sf, PetscSFType *type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFDestroy - destroy a star forest Collective @@ -418,7 +417,7 @@ PetscErrorCode PetscSFSetRankOrder(PetscSF sf, PetscBool flg) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFSetGraph - Set a parallel star forest Collective @@ -835,7 +834,7 @@ PetscErrorCode PetscSFGetLeafRange(PetscSF sf, PetscInt *minleaf, PetscInt *maxl PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFViewFromOptions - View a `PetscSF` based on arguments in the options database Collective @@ -860,7 +859,7 @@ PetscErrorCode PetscSFViewFromOptions(PetscSF A, PetscObject obj, const char nam PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PetscSFView - view a star forest Collective diff --git a/src/vec/is/sf/interface/sfregi.c b/src/vec/is/sf/interface/sfregi.c index ac4b9d4c41e..46dd50166b2 100644 --- a/src/vec/is/sf/interface/sfregi.c +++ b/src/vec/is/sf/interface/sfregi.c @@ -48,7 +48,7 @@ PetscErrorCode PetscSFRegisterAll(void) /*@C PetscSFRegister - Adds an implementation of the `PetscSF` communication protocol. - Not Collective + Not Collective, No Fortran Support Input Parameters: + name - name of a new user-defined implementation diff --git a/src/vec/is/sf/interface/vscat.c b/src/vec/is/sf/interface/vscat.c index 4f46cc72185..17ced937050 100644 --- a/src/vec/is/sf/interface/vscat.c +++ b/src/vec/is/sf/interface/vscat.c @@ -370,7 +370,7 @@ PetscErrorCode VecScatterSetUp(VecScatter sf) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecScatterSetType - Builds a vector scatter, for a particular vector scatter implementation. Collective @@ -396,7 +396,7 @@ PetscErrorCode VecScatterSetType(VecScatter sf, VecScatterType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecScatterGetType - Gets the vector scatter type name (as a string) from the `VecScatter`. Not Collective @@ -438,7 +438,6 @@ PetscErrorCode VecScatterRegister(const char sname[], PetscErrorCode (*function) PetscFunctionReturn(PETSC_SUCCESS); } -/* ------------------------------------------------------------------*/ /*@ VecScatterGetMerged - Returns true if the scatter is completed in the `VecScatterBegin()` and the `VecScatterEnd()` does nothing @@ -462,7 +461,7 @@ PetscErrorCode VecScatterGetMerged(VecScatter sf, PetscBool *flg) if (flg) *flg = sf->vscat.beginandendtogether; PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecScatterDestroy - Destroys a scatter context created by `VecScatterCreate()` Collective @@ -505,7 +504,7 @@ PetscErrorCode VecScatterCopy(VecScatter sf, VecScatter *newsf) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecScatterViewFromOptions - View a `VecScatter` object based on values in the options database Collective @@ -530,8 +529,7 @@ PetscErrorCode VecScatterViewFromOptions(VecScatter sf, PetscObject obj, const c PetscFunctionReturn(PETSC_SUCCESS); } -/* ------------------------------------------------------------------*/ -/*@C +/*@ VecScatterView - Views a vector scatter context. Collective @@ -551,7 +549,7 @@ PetscErrorCode VecScatterView(VecScatter sf, PetscViewer viewer) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecScatterRemap - Remaps the "from" and "to" indices in a vector scatter context. @@ -1096,7 +1094,7 @@ PetscErrorCode VecScatterCreate(Vec x, IS ix, Vec y, IS iy, VecScatter *newsf) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecScatterCreateToAll - Creates a vector and a scatter context that copies all vector values to each processor @@ -1168,7 +1166,7 @@ PetscErrorCode VecScatterCreateToAll(Vec vin, VecScatter *ctx, Vec *vout) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecScatterCreateToZero - Creates an output vector and a scatter context used to copy all vector values into the output vector on the zeroth processor diff --git a/src/vec/is/sf/tests/ex1.c b/src/vec/is/sf/tests/ex1.c index 85a1891795a..036fd97f84c 100644 --- a/src/vec/is/sf/tests/ex1.c +++ b/src/vec/is/sf/tests/ex1.c @@ -276,6 +276,6 @@ int main(int argc, char **argv) suffix: window_shared args: -user_sf_type window -sf_type window -sf_window_flavor shared -sf_window_sync {{fence active lock}} nsize: {{1 2 3}separate output} - requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW) + requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH) defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW) TEST*/ diff --git a/src/vec/is/sf/tests/ex3.c b/src/vec/is/sf/tests/ex3.c index 7a69028664b..7326687232a 100644 --- a/src/vec/is/sf/tests/ex3.c +++ b/src/vec/is/sf/tests/ex3.c @@ -221,7 +221,7 @@ int main(int argc, char **argv) test: # N=10 is divisible by nsize, to trigger Allgather/Gather in SF #MPI_Sendrecv_replace is broken with 20210400300 - requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION) + requires: !defined(PETSC_HAVE_I_MPI) nsize: 2 args: -op replace @@ -233,7 +233,7 @@ int main(int argc, char **argv) # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF test: #MPI_Sendrecv_replace is broken with 20210400300 - requires: !defined(PETSC_HAVE_I_MPI_NUMVERSION) + requires: !defined(PETSC_HAVE_I_MPI) suffix: 3 nsize: 3 args: -op replace diff --git a/src/vec/is/sf/tests/ex4.c b/src/vec/is/sf/tests/ex4.c index 9f91583ab81..42f3b9a90b7 100644 --- a/src/vec/is/sf/tests/ex4.c +++ b/src/vec/is/sf/tests/ex4.c @@ -153,6 +153,6 @@ int main(int argc, char **argv) filter: grep -v "type" | grep -v "sort" output_file: output/ex4_2.out args: -sparse_sfB -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared - requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED) + requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH) defined(PETSC_HAVE_MPI_ONE_SIDED) TEST*/ diff --git a/src/vec/is/sf/tests/ex5.c b/src/vec/is/sf/tests/ex5.c index 1eb650a46cf..708cb84f1c1 100644 --- a/src/vec/is/sf/tests/ex5.c +++ b/src/vec/is/sf/tests/ex5.c @@ -267,6 +267,6 @@ int main(int argc, char **argv) filter: grep -v "type" | grep -v "sort" output_file: output/ex5_2.out args: -view -nl 5 -explicit_inverse {{0 1}} -sf_type window -sf_window_sync {{fence lock active}} -sf_window_flavor shared - requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED) + requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH) defined(PETSC_HAVE_MPI_ONE_SIDED) TEST*/ diff --git a/src/vec/is/sf/tests/ex9.c b/src/vec/is/sf/tests/ex9.c index 761be6a5e2c..f342aec946c 100644 --- a/src/vec/is/sf/tests/ex9.c +++ b/src/vec/is/sf/tests/ex9.c @@ -459,7 +459,7 @@ int main(int argc, char **argv) suffix: 2 args: -sub2sub # deadlocks with NECMPI and INTELMPI (20210400300) - requires: !defined(PETSC_HAVE_NECMPI) !defined(PETSC_HAVE_I_MPI_NUMVERSION) + requires: !defined(PETSC_HAVE_NECMPI) !defined(PETSC_HAVE_I_MPI) test: suffix: 3 diff --git a/src/vec/is/sf/tutorials/ex1.c b/src/vec/is/sf/tutorials/ex1.c index 0b731d07a3c..907764c03f2 100644 --- a/src/vec/is/sf/tutorials/ex1.c +++ b/src/vec/is/sf/tutorials/ex1.c @@ -623,7 +623,7 @@ int main(int argc, char **argv) filter: grep -v "type" | grep -v "sort" nsize: 4 args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared -test_all -test_bcastop 0 -test_fetchandop 0 - requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW) + requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH) defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW) test: suffix: 10_basic diff --git a/src/vec/is/sf/tutorials/ex1f.F90 b/src/vec/is/sf/tutorials/ex1f.F90 index 0fc31bfc5b3..969ac016761 100644 --- a/src/vec/is/sf/tutorials/ex1f.F90 +++ b/src/vec/is/sf/tutorials/ex1f.F90 @@ -123,7 +123,7 @@ program main ! Create a star forest with continuous leaves and hence no buffer PetscCallA(PetscSFCreate(PETSC_COMM_WORLD,sf,ierr)) PetscCallA(PetscSFSetFromOptions(sf,ierr)) - PetscCallA(PetscSFSetGraph(sf,nrootsalloc,nleaves,PETSC_NULL_INTEGER,PETSC_COPY_VALUES,remote,PETSC_COPY_VALUES,ierr)) + PetscCallA(PetscSFSetGraph(sf,nrootsalloc,nleaves,PETSC_NULL_INTEGER_ARRAY,PETSC_COPY_VALUES,remote,PETSC_COPY_VALUES,ierr)) PetscCallA(PetscSFSetUp(sf,ierr)) ! View graph, mostly useful for debugging purposes. diff --git a/src/vec/is/sf/tutorials/ex2.c b/src/vec/is/sf/tutorials/ex2.c index 1a731e56775..9c5fa251f50 100644 --- a/src/vec/is/sf/tutorials/ex2.c +++ b/src/vec/is/sf/tutorials/ex2.c @@ -115,6 +115,6 @@ int main(int argc, char **argv) filter: grep -v "type" | grep -v "sort" output_file: output/ex2_basic.out args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared - requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED) + requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH) defined(PETSC_HAVE_MPI_ONE_SIDED) TEST*/ diff --git a/src/vec/is/sf/tutorials/ex3.c b/src/vec/is/sf/tutorials/ex3.c index 76eda9f4692..400473874fd 100644 --- a/src/vec/is/sf/tutorials/ex3.c +++ b/src/vec/is/sf/tutorials/ex3.c @@ -110,13 +110,13 @@ int main(int argc, char **argv) output_file: output/ex3_window.out filter: grep -v "type" | grep -v "sort" args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared - requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI_NUMVERSION) + requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI) test: suffix: window_dupped_shared output_file: output/ex3_window_dupped.out filter: grep -v "type" | grep -v "sort" args: -test_dupped_type -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared - requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI_NUMVERSION) + requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) defined(PETSC_HAVE_MPI_ONE_SIDED) !defined(PETSC_HAVE_I_MPI) TEST*/ diff --git a/src/vec/is/sf/utils/ftn-custom/zsfutilsf.c b/src/vec/is/sf/utils/ftn-custom/zsfutilsf.c index 41dd2a8309d..e70b4251fd2 100644 --- a/src/vec/is/sf/utils/ftn-custom/zsfutilsf.c +++ b/src/vec/is/sf/utils/ftn-custom/zsfutilsf.c @@ -3,23 +3,16 @@ #if defined(PETSC_HAVE_FORTRAN_CAPS) #define petscsfdistributesection_ PETSCSFDISTRIBUTESECTION - #define petscsfgetgraphlayout_ PETSCSFGETGRAPHLAYOUT #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define petscsfdistributesection_ petscsfdistributesection - #define petscsfgetgraphlayout_ petscsfgetgraphlayout #endif -PETSC_EXTERN void petscsfdistributesection_(PetscSF *sf, PetscSection *rootSection, PetscInt **remoteOffsets, PetscSection *leafSection, PetscErrorCode *__ierr) +PETSC_EXTERN void petscsfdistributesection_(PetscSF *sf, PetscSection *rootSection, PetscInt **remoteOffsets, PetscSection *leafSection, PetscErrorCode *ierr) { if (remoteOffsets != PETSC_NULL_INTEGER_Fortran) { (void)PetscError(PETSC_COMM_SELF, __LINE__, "PetscSFDistributeSection_Fortran", __FILE__, PETSC_ERR_SUP, PETSC_ERROR_INITIAL, "The remoteOffsets argument must be PETSC_NULL_INTEGER in Fortran"); - *__ierr = PETSC_ERR_SUP; + *ierr = PETSC_ERR_SUP; return; } - *__ierr = PetscSFDistributeSection(*sf, *rootSection, NULL, *leafSection); -} - -PETSC_EXTERN void petscsfgetgraphlayout_(PetscSF *sf, PetscLayout *layout, PetscInt *nleaves, const PetscInt *ilocal[], PetscInt *gremote[], PetscErrorCode *__ierr) -{ - *__ierr = PetscSFGetGraphLayout(*sf, layout, nleaves, ilocal, gremote); + *ierr = PetscSFDistributeSection(*sf, *rootSection, NULL, *leafSection); } diff --git a/src/vec/is/sf/utils/sfutils.c b/src/vec/is/sf/utils/sfutils.c index cd0041c29da..3aedac198aa 100644 --- a/src/vec/is/sf/utils/sfutils.c +++ b/src/vec/is/sf/utils/sfutils.c @@ -68,15 +68,15 @@ PetscErrorCode PetscSFSetGraphLayout(PetscSF sf, PetscLayout layout, PetscInt nl Output Parameters: + layout - `PetscLayout` defining the global space for roots . nleaves - number of leaf vertices on the current process, each of these references a root on any process -. ilocal - locations of leaves in leafdata buffers, or NULL for contiguous storage +. ilocal - locations of leaves in leafdata buffers, or `NULL` for contiguous storage - gremote - root vertices in global numbering corresponding to leaves in ilocal Level: intermediate Notes: The outputs are such that passing them as inputs to `PetscSFSetGraphLayout()` would lead to the same star forest. - The outputs layout and gremote are freshly created each time this function is called, - so they need to be freed by user and cannot be qualified as const. + The outputs `layout` and `gremote` are freshly created each time this function is called, + so they need to be freed (with `PetscLayoutDestroy()` and `PetscFree()`) by the user. .seealso: `PetscSF`, `PetscSFSetGraphLayout()`, `PetscSFCreate()`, `PetscSFView()`, `PetscSFSetGraph()`, `PetscSFGetGraph()` @*/ diff --git a/src/vec/is/tests/ex7.c b/src/vec/is/tests/ex7.c index 72a8c4d07d6..df5fc66b822 100644 --- a/src/vec/is/tests/ex7.c +++ b/src/vec/is/tests/ex7.c @@ -1,13 +1,15 @@ static char help[] = "Tests ISLocalToGlobalMappingGetInfo() and ISLocalToGlobalMappingGetNodeInfo().\n\n"; #include +#include #include int main(int argc, char **argv) { ISLocalToGlobalMapping ltog = NULL; - PetscInt *p, *ns, **ids; - PetscInt i, j, n, np, bs = 1, test = 0; + PetscSF mlsf; + PetscInt *p, *ns, **ids, *leaves, *mleaves; + PetscInt nl, mnl, mnr, i, j, k, n, np, bs = 1, test = 0; PetscViewer viewer; PetscMPIInt rank, size; @@ -107,6 +109,58 @@ int main(int argc, char **argv) } PetscCall(PetscViewerFlush(viewer)); PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(ltog, &n, &ns, &ids)); + + /* Test block */ + PetscCall(PetscViewerASCIIPrintf(viewer, "GETBLOCKINFO OUTPUT\n")); + PetscCall(PetscViewerASCIIPushSynchronized(viewer)); + PetscCall(ISLocalToGlobalMappingGetBlockInfo(ltog, &np, &p, &ns, &ids)); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local NP %" PetscInt_FMT "\n", rank, np)); + for (i = 0; i < np; i++) { + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] procs[%" PetscInt_FMT "] = %" PetscInt_FMT ", shared %" PetscInt_FMT "\n", rank, i, p[i], ns[i])); + for (j = 0; j < ns[i]; j++) { PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] ids[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, j, ids[i][j])); } + } + PetscCall(PetscViewerFlush(viewer)); + PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(ltog, &np, &p, &ns, &ids)); + PetscCall(PetscViewerASCIIPrintf(viewer, "GETBLOCKNODEINFO OUTPUT\n")); + PetscCall(ISLocalToGlobalMappingGetBlockNodeInfo(ltog, &n, &ns, &ids)); + PetscCall(PetscViewerASCIIPushSynchronized(viewer)); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local N %" PetscInt_FMT "\n", rank, n)); + for (i = 0; i < n; i++) { + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] sharedby[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, i, ns[i])); + for (j = 0; j < ns[i]; j++) { PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] ids[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, j, ids[i][j])); } + } + PetscCall(PetscViewerFlush(viewer)); + PetscCall(ISLocalToGlobalMappingGetBlockMultiLeavesSF(ltog, &mlsf)); + PetscCall(PetscSFGetGraph(mlsf, &mnr, &mnl, NULL, NULL)); + PetscCall(ISLocalToGlobalMappingGetSize(ltog, &nl)); + nl /= bs; + for (i = 0, j = 0; i < n; i++) j += ns[i]; + PetscCheck(mnr == nl, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of roots in multi-leaves SF %" PetscInt_FMT " != %" PetscInt_FMT, mnr, nl); + PetscCheck(mnl == j, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of leaves in multi-leaves SF %" PetscInt_FMT " != %" PetscInt_FMT, mnl, j); + PetscCall(PetscMalloc2(2 * nl, &leaves, 2 * mnl, &mleaves)); + for (i = 0; i < nl; i++) { + leaves[2 * i] = -(rank + 1); + leaves[2 * i + 1] = i; + } + PetscCall(PetscViewerASCIIPrintf(viewer, "BLOCK MULTI-LEAVES INPUT\n")); + PetscCall(PetscViewerASCIIPushSynchronized(viewer)); + for (i = 0; i < nl; i++) { PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] input[%" PetscInt_FMT "] = (%" PetscInt_FMT ", %" PetscInt_FMT ")\n", rank, i, leaves[2 * i], leaves[2 * i + 1])); } + PetscCall(PetscViewerFlush(viewer)); + PetscCall(PetscSFBcastBegin(mlsf, MPIU_2INT, leaves, mleaves, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(mlsf, MPIU_2INT, leaves, mleaves, MPI_REPLACE)); + PetscCall(PetscViewerASCIIPrintf(viewer, "BLOCK MULTI-LEAVES OUTPUT\n")); + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local N %" PetscInt_FMT "\n", rank, n)); + for (i = 0, k = 0; i < n; i++) { + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] sharedby[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, i, ns[i])); + for (j = 0; j < ns[i]; j++) { + PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] recv[%" PetscInt_FMT "] = (%" PetscInt_FMT ", %" PetscInt_FMT ") from %" PetscInt_FMT "\n", rank, j, mleaves[2 * (k + j)], mleaves[2 * (k + j) + 1], ids[i][j])); + } + k += ns[i]; + } + PetscCall(PetscViewerFlush(viewer)); + PetscCall(PetscFree2(leaves, mleaves)); + PetscCall(ISLocalToGlobalMappingRestoreBlockNodeInfo(ltog, &n, &ns, &ids)); + PetscCall(ISLocalToGlobalMappingDestroy(<og)); PetscCall(PetscFinalize()); return 0; diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-0.out index d7f67b6eba3..705e41158b0 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-0.out @@ -4,3 +4,10 @@ GETINFO OUTPUT [0] Local NP 0 GETNODEINFO OUTPUT [0] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-1.out index 28e09c97ea9..8f77ca10d59 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-1.out @@ -85,3 +85,143 @@ GETNODEINFO OUTPUT [0] ids[1] = 0 [0] sharedby[15] = 1 [0] ids[0] = 0 +GETBLOCKINFO OUTPUT +[0] Local NP 1 +[0] procs[0] = 0, shared 12 +[0] ids[0] = 1 +[0] ids[1] = 4 +[0] ids[2] = 2 +[0] ids[3] = 8 +[0] ids[4] = 9 +[0] ids[5] = 12 +[0] ids[6] = 6 +[0] ids[7] = 3 +[0] ids[8] = 7 +[0] ids[9] = 13 +[0] ids[10] = 11 +[0] ids[11] = 14 +GETBLOCKNODEINFO OUTPUT +[0] Local N 16 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[7] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[8] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[9] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[10] = 1 +[0] ids[0] = 0 +[0] sharedby[11] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[12] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[13] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[14] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[15] = 1 +[0] ids[0] = 0 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[0] input[9] = (-1, 9) +[0] input[10] = (-1, 10) +[0] input[11] = (-1, 11) +[0] input[12] = (-1, 12) +[0] input[13] = (-1, 13) +[0] input[14] = (-1, 14) +[0] input[15] = (-1, 15) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 16 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 9) from 0 +[0] recv[3] = (-1, 12) from 0 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 9) from 0 +[0] recv[3] = (-1, 12) from 0 +[0] sharedby[7] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-1, 13) from 0 +[0] sharedby[8] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[9] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 9) from 0 +[0] recv[3] = (-1, 12) from 0 +[0] sharedby[10] = 1 +[0] recv[0] = (-1, 10) from 0 +[0] sharedby[11] = 2 +[0] recv[0] = (-1, 11) from 0 +[0] recv[1] = (-1, 14) from 0 +[0] sharedby[12] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 9) from 0 +[0] recv[3] = (-1, 12) from 0 +[0] sharedby[13] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-1, 13) from 0 +[0] sharedby[14] = 2 +[0] recv[0] = (-1, 11) from 0 +[0] recv[1] = (-1, 14) from 0 +[0] sharedby[15] = 1 +[0] recv[0] = (-1, 15) from 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-2.out index 2d672457ebe..8baddffa0ad 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-1_test-2.out @@ -72,3 +72,122 @@ GETNODEINFO OUTPUT [0] ids[3] = 0 [0] sharedby[12] = 1 [0] ids[0] = 0 +GETBLOCKINFO OUTPUT +[0] Local NP 1 +[0] procs[0] = 0, shared 10 +[0] ids[0] = 0 +[0] ids[1] = 3 +[0] ids[2] = 1 +[0] ids[3] = 9 +[0] ids[4] = 11 +[0] ids[5] = 8 +[0] ids[6] = 6 +[0] ids[7] = 2 +[0] ids[8] = 10 +[0] ids[9] = 5 +GETBLOCKNODEINFO OUTPUT +[0] Local N 13 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[3] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[4] = 1 +[0] ids[0] = 0 +[0] sharedby[5] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[7] = 1 +[0] ids[0] = 0 +[0] sharedby[8] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[9] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[10] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[11] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[12] = 1 +[0] ids[0] = 0 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[0] input[9] = (-1, 9) +[0] input[10] = (-1, 10) +[0] input[11] = (-1, 11) +[0] input[12] = (-1, 12) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 13 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 9) from 0 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 8) from 0 +[0] recv[3] = (-1, 11) from 0 +[0] sharedby[3] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] sharedby[4] = 1 +[0] recv[0] = (-1, 4) from 0 +[0] sharedby[5] = 2 +[0] recv[0] = (-1, 5) from 0 +[0] recv[1] = (-1, 10) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 8) from 0 +[0] recv[3] = (-1, 11) from 0 +[0] sharedby[7] = 1 +[0] recv[0] = (-1, 7) from 0 +[0] sharedby[8] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 8) from 0 +[0] recv[3] = (-1, 11) from 0 +[0] sharedby[9] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 9) from 0 +[0] sharedby[10] = 2 +[0] recv[0] = (-1, 5) from 0 +[0] recv[1] = (-1, 10) from 0 +[0] sharedby[11] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 8) from 0 +[0] recv[3] = (-1, 11) from 0 +[0] sharedby[12] = 1 +[0] recv[0] = (-1, 12) from 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-0.out index d7f67b6eba3..705e41158b0 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-0.out @@ -4,3 +4,10 @@ GETINFO OUTPUT [0] Local NP 0 GETNODEINFO OUTPUT [0] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-1.out index 8dbfba4f576..cb6d5993829 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-1.out @@ -213,3 +213,143 @@ GETNODEINFO OUTPUT [0] ids[0] = 0 [0] sharedby[47] = 1 [0] ids[0] = 0 +GETBLOCKINFO OUTPUT +[0] Local NP 1 +[0] procs[0] = 0, shared 12 +[0] ids[0] = 1 +[0] ids[1] = 4 +[0] ids[2] = 2 +[0] ids[3] = 8 +[0] ids[4] = 9 +[0] ids[5] = 12 +[0] ids[6] = 6 +[0] ids[7] = 3 +[0] ids[8] = 7 +[0] ids[9] = 13 +[0] ids[10] = 11 +[0] ids[11] = 14 +GETBLOCKNODEINFO OUTPUT +[0] Local N 16 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[7] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[8] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[9] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[10] = 1 +[0] ids[0] = 0 +[0] sharedby[11] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[12] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[13] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[14] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[15] = 1 +[0] ids[0] = 0 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[0] input[9] = (-1, 9) +[0] input[10] = (-1, 10) +[0] input[11] = (-1, 11) +[0] input[12] = (-1, 12) +[0] input[13] = (-1, 13) +[0] input[14] = (-1, 14) +[0] input[15] = (-1, 15) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 16 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 9) from 0 +[0] recv[3] = (-1, 12) from 0 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 9) from 0 +[0] recv[3] = (-1, 12) from 0 +[0] sharedby[7] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-1, 13) from 0 +[0] sharedby[8] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[9] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 9) from 0 +[0] recv[3] = (-1, 12) from 0 +[0] sharedby[10] = 1 +[0] recv[0] = (-1, 10) from 0 +[0] sharedby[11] = 2 +[0] recv[0] = (-1, 11) from 0 +[0] recv[1] = (-1, 14) from 0 +[0] sharedby[12] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 9) from 0 +[0] recv[3] = (-1, 12) from 0 +[0] sharedby[13] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-1, 13) from 0 +[0] sharedby[14] = 2 +[0] recv[0] = (-1, 11) from 0 +[0] recv[1] = (-1, 14) from 0 +[0] sharedby[15] = 1 +[0] recv[0] = (-1, 15) from 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-2.out index e2fa4513d66..c7f96060add 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-1_bs-3_test-2.out @@ -180,3 +180,122 @@ GETNODEINFO OUTPUT [0] ids[0] = 0 [0] sharedby[38] = 1 [0] ids[0] = 0 +GETBLOCKINFO OUTPUT +[0] Local NP 1 +[0] procs[0] = 0, shared 10 +[0] ids[0] = 0 +[0] ids[1] = 3 +[0] ids[2] = 1 +[0] ids[3] = 9 +[0] ids[4] = 11 +[0] ids[5] = 8 +[0] ids[6] = 6 +[0] ids[7] = 2 +[0] ids[8] = 10 +[0] ids[9] = 5 +GETBLOCKNODEINFO OUTPUT +[0] Local N 13 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[3] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[4] = 1 +[0] ids[0] = 0 +[0] sharedby[5] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[7] = 1 +[0] ids[0] = 0 +[0] sharedby[8] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[9] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[10] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[11] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 0 +[0] sharedby[12] = 1 +[0] ids[0] = 0 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[0] input[9] = (-1, 9) +[0] input[10] = (-1, 10) +[0] input[11] = (-1, 11) +[0] input[12] = (-1, 12) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 13 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 9) from 0 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 8) from 0 +[0] recv[3] = (-1, 11) from 0 +[0] sharedby[3] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] sharedby[4] = 1 +[0] recv[0] = (-1, 4) from 0 +[0] sharedby[5] = 2 +[0] recv[0] = (-1, 5) from 0 +[0] recv[1] = (-1, 10) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 8) from 0 +[0] recv[3] = (-1, 11) from 0 +[0] sharedby[7] = 1 +[0] recv[0] = (-1, 7) from 0 +[0] sharedby[8] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 8) from 0 +[0] recv[3] = (-1, 11) from 0 +[0] sharedby[9] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 9) from 0 +[0] sharedby[10] = 2 +[0] recv[0] = (-1, 5) from 0 +[0] recv[1] = (-1, 10) from 0 +[0] sharedby[11] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-1, 8) from 0 +[0] recv[3] = (-1, 11) from 0 +[0] sharedby[12] = 1 +[0] recv[0] = (-1, 12) from 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-0.out index 31cc2a4b273..d78b8b01a75 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-0.out @@ -6,3 +6,13 @@ GETINFO OUTPUT GETNODEINFO OUTPUT [0] Local N 0 [1] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +[1] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +[1] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 +[1] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-1.out index 41db4d2e2a9..fbf73ca8ef8 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-1.out @@ -98,3 +98,157 @@ GETNODEINFO OUTPUT [1] ids[1] = 1 [1] sharedby[7] = 1 [1] ids[0] = 1 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 6 +[0] ids[0] = 1 +[0] ids[1] = 4 +[0] ids[2] = 2 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 7 +[0] procs[1] = 1, shared 4 +[0] ids[0] = 2 +[0] ids[1] = 3 +[0] ids[2] = 6 +[0] ids[3] = 7 +[1] Local NP 2 +[1] procs[0] = 1, shared 6 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 4 +[1] ids[3] = 5 +[1] ids[4] = 3 +[1] ids[5] = 6 +[1] procs[1] = 0, shared 4 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 4 +[1] ids[3] = 5 +GETBLOCKNODEINFO OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 1 +[0] ids[3] = 1 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 1 +[0] ids[3] = 1 +[0] sharedby[7] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[1] Local N 8 +[1] sharedby[0] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[1] = 4 +[1] ids[0] = 0 +[1] ids[1] = 0 +[1] ids[2] = 1 +[1] ids[3] = 1 +[1] sharedby[2] = 1 +[1] ids[0] = 1 +[1] sharedby[3] = 2 +[1] ids[0] = 1 +[1] ids[1] = 1 +[1] sharedby[4] = 4 +[1] ids[0] = 0 +[1] ids[1] = 0 +[1] ids[2] = 1 +[1] ids[3] = 1 +[1] sharedby[5] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[6] = 2 +[1] ids[0] = 1 +[1] ids[1] = 1 +[1] sharedby[7] = 1 +[1] ids[0] = 1 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[1] input[0] = (-2, 0) +[1] input[1] = (-2, 1) +[1] input[2] = (-2, 2) +[1] input[3] = (-2, 3) +[1] input[4] = (-2, 4) +[1] input[5] = (-2, 5) +[1] input[6] = (-2, 6) +[1] input[7] = (-2, 7) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-2, 0) from 1 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-2, 1) from 1 +[0] recv[3] = (-2, 4) from 1 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-2, 1) from 1 +[0] recv[3] = (-2, 4) from 1 +[0] sharedby[7] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-2, 5) from 1 +[1] Local N 8 +[1] sharedby[0] = 2 +[1] recv[0] = (-1, 2) from 0 +[1] recv[1] = (-2, 0) from 1 +[1] sharedby[1] = 4 +[1] recv[0] = (-1, 3) from 0 +[1] recv[1] = (-1, 6) from 0 +[1] recv[2] = (-2, 1) from 1 +[1] recv[3] = (-2, 4) from 1 +[1] sharedby[2] = 1 +[1] recv[0] = (-2, 2) from 1 +[1] sharedby[3] = 2 +[1] recv[0] = (-2, 3) from 1 +[1] recv[1] = (-2, 6) from 1 +[1] sharedby[4] = 4 +[1] recv[0] = (-1, 3) from 0 +[1] recv[1] = (-1, 6) from 0 +[1] recv[2] = (-2, 1) from 1 +[1] recv[3] = (-2, 4) from 1 +[1] sharedby[5] = 2 +[1] recv[0] = (-1, 7) from 0 +[1] recv[1] = (-2, 5) from 1 +[1] sharedby[6] = 2 +[1] recv[0] = (-2, 3) from 1 +[1] recv[1] = (-2, 6) from 1 +[1] sharedby[7] = 1 +[1] recv[0] = (-2, 7) from 1 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-2.out index cc415ee80ee..092d694680f 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-1_test-2.out @@ -85,3 +85,136 @@ GETNODEINFO OUTPUT [1] ids[1] = 1 [1] sharedby[3] = 1 [1] ids[0] = 1 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 7 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 8 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 2 +[0] ids[6] = 4 +[0] procs[1] = 1, shared 5 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] ids[2] = 3 +[0] ids[3] = 6 +[0] ids[4] = 4 +[1] Local NP 2 +[1] procs[0] = 1, shared 3 +[1] ids[0] = 2 +[1] ids[1] = 1 +[1] ids[2] = 0 +[1] procs[1] = 0, shared 3 +[1] ids[0] = 2 +[1] ids[1] = 1 +[1] ids[2] = 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 1 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 1 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 1 +[0] sharedby[7] = 1 +[0] ids[0] = 0 +[0] sharedby[8] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[1] = 4 +[1] ids[0] = 0 +[1] ids[1] = 0 +[1] ids[2] = 0 +[1] ids[3] = 1 +[1] sharedby[2] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[3] = 1 +[1] ids[0] = 1 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[1] input[0] = (-2, 0) +[1] input[1] = (-2, 1) +[1] input[2] = (-2, 2) +[1] input[3] = (-2, 3) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-2, 2) from 1 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-2, 1) from 1 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-2, 1) from 1 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 4) from 0 +[0] recv[1] = (-2, 0) from 1 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-2, 1) from 1 +[0] sharedby[7] = 1 +[0] recv[0] = (-1, 7) from 0 +[0] sharedby[8] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] recv[0] = (-1, 4) from 0 +[1] recv[1] = (-2, 0) from 1 +[1] sharedby[1] = 4 +[1] recv[0] = (-1, 2) from 0 +[1] recv[1] = (-1, 3) from 0 +[1] recv[2] = (-1, 6) from 0 +[1] recv[3] = (-2, 1) from 1 +[1] sharedby[2] = 2 +[1] recv[0] = (-1, 0) from 0 +[1] recv[1] = (-2, 2) from 1 +[1] sharedby[3] = 1 +[1] recv[0] = (-2, 3) from 1 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-0.out index 31cc2a4b273..d78b8b01a75 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-0.out @@ -6,3 +6,13 @@ GETINFO OUTPUT GETNODEINFO OUTPUT [0] Local N 0 [1] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +[1] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +[1] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 +[1] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-1.out index 30b7f44aab9..b7359bbf0e2 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-1.out @@ -242,3 +242,157 @@ GETNODEINFO OUTPUT [1] ids[0] = 1 [1] sharedby[23] = 1 [1] ids[0] = 1 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 6 +[0] ids[0] = 1 +[0] ids[1] = 4 +[0] ids[2] = 2 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 7 +[0] procs[1] = 1, shared 4 +[0] ids[0] = 2 +[0] ids[1] = 3 +[0] ids[2] = 6 +[0] ids[3] = 7 +[1] Local NP 2 +[1] procs[0] = 1, shared 6 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 4 +[1] ids[3] = 5 +[1] ids[4] = 3 +[1] ids[5] = 6 +[1] procs[1] = 0, shared 4 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 4 +[1] ids[3] = 5 +GETBLOCKNODEINFO OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 1 +[0] ids[3] = 1 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 1 +[0] ids[3] = 1 +[0] sharedby[7] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[1] Local N 8 +[1] sharedby[0] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[1] = 4 +[1] ids[0] = 0 +[1] ids[1] = 0 +[1] ids[2] = 1 +[1] ids[3] = 1 +[1] sharedby[2] = 1 +[1] ids[0] = 1 +[1] sharedby[3] = 2 +[1] ids[0] = 1 +[1] ids[1] = 1 +[1] sharedby[4] = 4 +[1] ids[0] = 0 +[1] ids[1] = 0 +[1] ids[2] = 1 +[1] ids[3] = 1 +[1] sharedby[5] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[6] = 2 +[1] ids[0] = 1 +[1] ids[1] = 1 +[1] sharedby[7] = 1 +[1] ids[0] = 1 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[1] input[0] = (-2, 0) +[1] input[1] = (-2, 1) +[1] input[2] = (-2, 2) +[1] input[3] = (-2, 3) +[1] input[4] = (-2, 4) +[1] input[5] = (-2, 5) +[1] input[6] = (-2, 6) +[1] input[7] = (-2, 7) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-2, 0) from 1 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-2, 1) from 1 +[0] recv[3] = (-2, 4) from 1 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-2, 1) from 1 +[0] recv[3] = (-2, 4) from 1 +[0] sharedby[7] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-2, 5) from 1 +[1] Local N 8 +[1] sharedby[0] = 2 +[1] recv[0] = (-1, 2) from 0 +[1] recv[1] = (-2, 0) from 1 +[1] sharedby[1] = 4 +[1] recv[0] = (-1, 3) from 0 +[1] recv[1] = (-1, 6) from 0 +[1] recv[2] = (-2, 1) from 1 +[1] recv[3] = (-2, 4) from 1 +[1] sharedby[2] = 1 +[1] recv[0] = (-2, 2) from 1 +[1] sharedby[3] = 2 +[1] recv[0] = (-2, 3) from 1 +[1] recv[1] = (-2, 6) from 1 +[1] sharedby[4] = 4 +[1] recv[0] = (-1, 3) from 0 +[1] recv[1] = (-1, 6) from 0 +[1] recv[2] = (-2, 1) from 1 +[1] recv[3] = (-2, 4) from 1 +[1] sharedby[5] = 2 +[1] recv[0] = (-1, 7) from 0 +[1] recv[1] = (-2, 5) from 1 +[1] sharedby[6] = 2 +[1] recv[0] = (-2, 3) from 1 +[1] recv[1] = (-2, 6) from 1 +[1] sharedby[7] = 1 +[1] recv[0] = (-2, 7) from 1 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-2.out index 0db3203851b..d98fab9de01 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-2_bs-3_test-2.out @@ -209,3 +209,136 @@ GETNODEINFO OUTPUT [1] ids[0] = 1 [1] sharedby[11] = 1 [1] ids[0] = 1 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 7 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 8 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 2 +[0] ids[6] = 4 +[0] procs[1] = 1, shared 5 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] ids[2] = 3 +[0] ids[3] = 6 +[0] ids[4] = 4 +[1] Local NP 2 +[1] procs[0] = 1, shared 3 +[1] ids[0] = 2 +[1] ids[1] = 1 +[1] ids[2] = 0 +[1] procs[1] = 0, shared 3 +[1] ids[0] = 2 +[1] ids[1] = 1 +[1] ids[2] = 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 1 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 1 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 1 +[0] sharedby[7] = 1 +[0] ids[0] = 0 +[0] sharedby[8] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[1] = 4 +[1] ids[0] = 0 +[1] ids[1] = 0 +[1] ids[2] = 0 +[1] ids[3] = 1 +[1] sharedby[2] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[3] = 1 +[1] ids[0] = 1 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[1] input[0] = (-2, 0) +[1] input[1] = (-2, 1) +[1] input[2] = (-2, 2) +[1] input[3] = (-2, 3) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-2, 2) from 1 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-2, 1) from 1 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-2, 1) from 1 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 4) from 0 +[0] recv[1] = (-2, 0) from 1 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-2, 1) from 1 +[0] sharedby[7] = 1 +[0] recv[0] = (-1, 7) from 0 +[0] sharedby[8] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] recv[0] = (-1, 4) from 0 +[1] recv[1] = (-2, 0) from 1 +[1] sharedby[1] = 4 +[1] recv[0] = (-1, 2) from 0 +[1] recv[1] = (-1, 3) from 0 +[1] recv[2] = (-1, 6) from 0 +[1] recv[3] = (-2, 1) from 1 +[1] sharedby[2] = 2 +[1] recv[0] = (-1, 0) from 0 +[1] recv[1] = (-2, 2) from 1 +[1] sharedby[3] = 1 +[1] recv[0] = (-2, 3) from 1 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-0.out index 6858a7babd5..0b6ecc4dac8 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-0.out @@ -8,3 +8,16 @@ GETNODEINFO OUTPUT [0] Local N 0 [1] Local N 0 [2] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +[1] Local NP 0 +[2] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-1.out index 1e6694c0b8d..0daf1e58c37 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-1.out @@ -100,3 +100,160 @@ GETNODEINFO OUTPUT [2] ids[1] = 2 [2] sharedby[7] = 1 [2] ids[0] = 2 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 6 +[0] ids[0] = 1 +[0] ids[1] = 4 +[0] ids[2] = 2 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 7 +[0] procs[1] = 2, shared 4 +[0] ids[0] = 2 +[0] ids[1] = 3 +[0] ids[2] = 6 +[0] ids[3] = 7 +[1] Local NP 0 +[2] Local NP 2 +[2] procs[0] = 2, shared 6 +[2] ids[0] = 0 +[2] ids[1] = 1 +[2] ids[2] = 4 +[2] ids[3] = 5 +[2] ids[4] = 3 +[2] ids[5] = 6 +[2] procs[1] = 0, shared 4 +[2] ids[0] = 0 +[2] ids[1] = 1 +[2] ids[2] = 4 +[2] ids[3] = 5 +GETBLOCKNODEINFO OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 2 +[0] ids[3] = 2 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 2 +[0] ids[3] = 2 +[0] sharedby[7] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[1] Local N 0 +[2] Local N 8 +[2] sharedby[0] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[1] = 4 +[2] ids[0] = 0 +[2] ids[1] = 0 +[2] ids[2] = 2 +[2] ids[3] = 2 +[2] sharedby[2] = 1 +[2] ids[0] = 2 +[2] sharedby[3] = 2 +[2] ids[0] = 2 +[2] ids[1] = 2 +[2] sharedby[4] = 4 +[2] ids[0] = 0 +[2] ids[1] = 0 +[2] ids[2] = 2 +[2] ids[3] = 2 +[2] sharedby[5] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[6] = 2 +[2] ids[0] = 2 +[2] ids[1] = 2 +[2] sharedby[7] = 1 +[2] ids[0] = 2 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[2] input[0] = (-3, 0) +[2] input[1] = (-3, 1) +[2] input[2] = (-3, 2) +[2] input[3] = (-3, 3) +[2] input[4] = (-3, 4) +[2] input[5] = (-3, 5) +[2] input[6] = (-3, 6) +[2] input[7] = (-3, 7) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-3, 0) from 2 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-3, 1) from 2 +[0] recv[3] = (-3, 4) from 2 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-3, 1) from 2 +[0] recv[3] = (-3, 4) from 2 +[0] sharedby[7] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-3, 5) from 2 +[1] Local N 0 +[2] Local N 8 +[2] sharedby[0] = 2 +[2] recv[0] = (-1, 2) from 0 +[2] recv[1] = (-3, 0) from 2 +[2] sharedby[1] = 4 +[2] recv[0] = (-1, 3) from 0 +[2] recv[1] = (-1, 6) from 0 +[2] recv[2] = (-3, 1) from 2 +[2] recv[3] = (-3, 4) from 2 +[2] sharedby[2] = 1 +[2] recv[0] = (-3, 2) from 2 +[2] sharedby[3] = 2 +[2] recv[0] = (-3, 3) from 2 +[2] recv[1] = (-3, 6) from 2 +[2] sharedby[4] = 4 +[2] recv[0] = (-1, 3) from 0 +[2] recv[1] = (-1, 6) from 0 +[2] recv[2] = (-3, 1) from 2 +[2] recv[3] = (-3, 4) from 2 +[2] sharedby[5] = 2 +[2] recv[0] = (-1, 7) from 0 +[2] recv[1] = (-3, 5) from 2 +[2] sharedby[6] = 2 +[2] recv[0] = (-3, 3) from 2 +[2] recv[1] = (-3, 6) from 2 +[2] sharedby[7] = 1 +[2] recv[0] = (-3, 7) from 2 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-2.out index 276a6a60ad7..14886fe8029 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-1_test-2.out @@ -87,3 +87,139 @@ GETNODEINFO OUTPUT [2] ids[1] = 2 [2] sharedby[3] = 1 [2] ids[0] = 2 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 7 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 8 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 2 +[0] ids[6] = 4 +[0] procs[1] = 2, shared 5 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] ids[2] = 3 +[0] ids[3] = 6 +[0] ids[4] = 4 +[1] Local NP 0 +[2] Local NP 2 +[2] procs[0] = 2, shared 3 +[2] ids[0] = 2 +[2] ids[1] = 1 +[2] ids[2] = 0 +[2] procs[1] = 0, shared 3 +[2] ids[0] = 2 +[2] ids[1] = 1 +[2] ids[2] = 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 2 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 2 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 2 +[0] sharedby[7] = 1 +[0] ids[0] = 0 +[0] sharedby[8] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[1] Local N 0 +[2] Local N 4 +[2] sharedby[0] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[1] = 4 +[2] ids[0] = 0 +[2] ids[1] = 0 +[2] ids[2] = 0 +[2] ids[3] = 2 +[2] sharedby[2] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[3] = 1 +[2] ids[0] = 2 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[2] input[0] = (-3, 0) +[2] input[1] = (-3, 1) +[2] input[2] = (-3, 2) +[2] input[3] = (-3, 3) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-3, 2) from 2 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-3, 1) from 2 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-3, 1) from 2 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 4) from 0 +[0] recv[1] = (-3, 0) from 2 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-3, 1) from 2 +[0] sharedby[7] = 1 +[0] recv[0] = (-1, 7) from 0 +[0] sharedby[8] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[1] Local N 0 +[2] Local N 4 +[2] sharedby[0] = 2 +[2] recv[0] = (-1, 4) from 0 +[2] recv[1] = (-3, 0) from 2 +[2] sharedby[1] = 4 +[2] recv[0] = (-1, 2) from 0 +[2] recv[1] = (-1, 3) from 0 +[2] recv[2] = (-1, 6) from 0 +[2] recv[3] = (-3, 1) from 2 +[2] sharedby[2] = 2 +[2] recv[0] = (-1, 0) from 0 +[2] recv[1] = (-3, 2) from 2 +[2] sharedby[3] = 1 +[2] recv[0] = (-3, 3) from 2 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-0.out index 6858a7babd5..0b6ecc4dac8 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-0.out @@ -8,3 +8,16 @@ GETNODEINFO OUTPUT [0] Local N 0 [1] Local N 0 [2] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +[1] Local NP 0 +[2] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-1.out index 58958501385..beb5bdf170e 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-1.out @@ -244,3 +244,160 @@ GETNODEINFO OUTPUT [2] ids[0] = 2 [2] sharedby[23] = 1 [2] ids[0] = 2 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 6 +[0] ids[0] = 1 +[0] ids[1] = 4 +[0] ids[2] = 2 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 7 +[0] procs[1] = 2, shared 4 +[0] ids[0] = 2 +[0] ids[1] = 3 +[0] ids[2] = 6 +[0] ids[3] = 7 +[1] Local NP 0 +[2] Local NP 2 +[2] procs[0] = 2, shared 6 +[2] ids[0] = 0 +[2] ids[1] = 1 +[2] ids[2] = 4 +[2] ids[3] = 5 +[2] ids[4] = 3 +[2] ids[5] = 6 +[2] procs[1] = 0, shared 4 +[2] ids[0] = 0 +[2] ids[1] = 1 +[2] ids[2] = 4 +[2] ids[3] = 5 +GETBLOCKNODEINFO OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 2 +[0] ids[3] = 2 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 2 +[0] ids[3] = 2 +[0] sharedby[7] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[1] Local N 0 +[2] Local N 8 +[2] sharedby[0] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[1] = 4 +[2] ids[0] = 0 +[2] ids[1] = 0 +[2] ids[2] = 2 +[2] ids[3] = 2 +[2] sharedby[2] = 1 +[2] ids[0] = 2 +[2] sharedby[3] = 2 +[2] ids[0] = 2 +[2] ids[1] = 2 +[2] sharedby[4] = 4 +[2] ids[0] = 0 +[2] ids[1] = 0 +[2] ids[2] = 2 +[2] ids[3] = 2 +[2] sharedby[5] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[6] = 2 +[2] ids[0] = 2 +[2] ids[1] = 2 +[2] sharedby[7] = 1 +[2] ids[0] = 2 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[2] input[0] = (-3, 0) +[2] input[1] = (-3, 1) +[2] input[2] = (-3, 2) +[2] input[3] = (-3, 3) +[2] input[4] = (-3, 4) +[2] input[5] = (-3, 5) +[2] input[6] = (-3, 6) +[2] input[7] = (-3, 7) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-3, 0) from 2 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-3, 1) from 2 +[0] recv[3] = (-3, 4) from 2 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-3, 1) from 2 +[0] recv[3] = (-3, 4) from 2 +[0] sharedby[7] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-3, 5) from 2 +[1] Local N 0 +[2] Local N 8 +[2] sharedby[0] = 2 +[2] recv[0] = (-1, 2) from 0 +[2] recv[1] = (-3, 0) from 2 +[2] sharedby[1] = 4 +[2] recv[0] = (-1, 3) from 0 +[2] recv[1] = (-1, 6) from 0 +[2] recv[2] = (-3, 1) from 2 +[2] recv[3] = (-3, 4) from 2 +[2] sharedby[2] = 1 +[2] recv[0] = (-3, 2) from 2 +[2] sharedby[3] = 2 +[2] recv[0] = (-3, 3) from 2 +[2] recv[1] = (-3, 6) from 2 +[2] sharedby[4] = 4 +[2] recv[0] = (-1, 3) from 0 +[2] recv[1] = (-1, 6) from 0 +[2] recv[2] = (-3, 1) from 2 +[2] recv[3] = (-3, 4) from 2 +[2] sharedby[5] = 2 +[2] recv[0] = (-1, 7) from 0 +[2] recv[1] = (-3, 5) from 2 +[2] sharedby[6] = 2 +[2] recv[0] = (-3, 3) from 2 +[2] recv[1] = (-3, 6) from 2 +[2] sharedby[7] = 1 +[2] recv[0] = (-3, 7) from 2 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-2.out index 97d41a57fa6..f39a2dc0ff6 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-3_bs-3_test-2.out @@ -211,3 +211,139 @@ GETNODEINFO OUTPUT [2] ids[0] = 2 [2] sharedby[11] = 1 [2] ids[0] = 2 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 7 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 8 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 2 +[0] ids[6] = 4 +[0] procs[1] = 2, shared 5 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] ids[2] = 3 +[0] ids[3] = 6 +[0] ids[4] = 4 +[1] Local NP 0 +[2] Local NP 2 +[2] procs[0] = 2, shared 3 +[2] ids[0] = 2 +[2] ids[1] = 1 +[2] ids[2] = 0 +[2] procs[1] = 0, shared 3 +[2] ids[0] = 2 +[2] ids[1] = 1 +[2] ids[2] = 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 2 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 2 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 2 +[0] sharedby[7] = 1 +[0] ids[0] = 0 +[0] sharedby[8] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[1] Local N 0 +[2] Local N 4 +[2] sharedby[0] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[1] = 4 +[2] ids[0] = 0 +[2] ids[1] = 0 +[2] ids[2] = 0 +[2] ids[3] = 2 +[2] sharedby[2] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[3] = 1 +[2] ids[0] = 2 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[2] input[0] = (-3, 0) +[2] input[1] = (-3, 1) +[2] input[2] = (-3, 2) +[2] input[3] = (-3, 3) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-3, 2) from 2 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-3, 1) from 2 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-3, 1) from 2 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 4) from 0 +[0] recv[1] = (-3, 0) from 2 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-3, 1) from 2 +[0] sharedby[7] = 1 +[0] recv[0] = (-1, 7) from 0 +[0] sharedby[8] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[1] Local N 0 +[2] Local N 4 +[2] sharedby[0] = 2 +[2] recv[0] = (-1, 4) from 0 +[2] recv[1] = (-3, 0) from 2 +[2] sharedby[1] = 4 +[2] recv[0] = (-1, 2) from 0 +[2] recv[1] = (-1, 3) from 0 +[2] recv[2] = (-1, 6) from 0 +[2] recv[3] = (-3, 1) from 2 +[2] sharedby[2] = 2 +[2] recv[0] = (-1, 0) from 0 +[2] recv[1] = (-3, 2) from 2 +[2] sharedby[3] = 1 +[2] recv[0] = (-3, 3) from 2 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-0.out index db454110278..9116d994752 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-0.out @@ -10,3 +10,19 @@ GETNODEINFO OUTPUT [1] Local N 0 [2] Local N 0 [3] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +[1] Local NP 0 +[2] Local NP 0 +[3] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-1.out index 92bc45895fe..7bcbad005c5 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-1.out @@ -126,3 +126,187 @@ GETNODEINFO OUTPUT [3] ids[1] = 3 [3] sharedby[3] = 1 [3] ids[0] = 3 +GETBLOCKINFO OUTPUT +[0] Local NP 4 +[0] procs[0] = 0, shared 3 +[0] ids[0] = 1 +[0] ids[1] = 2 +[0] ids[2] = 3 +[0] procs[1] = 1, shared 2 +[0] ids[0] = 2 +[0] ids[1] = 3 +[0] procs[2] = 2, shared 2 +[0] ids[0] = 1 +[0] ids[1] = 3 +[0] procs[3] = 3, shared 1 +[0] ids[0] = 3 +[1] Local NP 4 +[1] procs[0] = 1, shared 3 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 3 +[1] procs[1] = 0, shared 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] procs[2] = 2, shared 1 +[1] ids[0] = 1 +[1] procs[3] = 3, shared 2 +[1] ids[0] = 1 +[1] ids[1] = 3 +[2] Local NP 4 +[2] procs[0] = 2, shared 3 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] ids[2] = 3 +[2] procs[1] = 0, shared 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] procs[2] = 1, shared 1 +[2] ids[0] = 2 +[2] procs[3] = 3, shared 2 +[2] ids[0] = 2 +[2] ids[1] = 3 +[3] Local NP 4 +[3] procs[0] = 3, shared 3 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] ids[2] = 2 +[3] procs[1] = 0, shared 1 +[3] ids[0] = 0 +[3] procs[2] = 1, shared 2 +[3] ids[0] = 0 +[3] ids[1] = 2 +[3] procs[3] = 2, shared 2 +[3] ids[0] = 0 +[3] ids[1] = 1 +GETBLOCKNODEINFO OUTPUT +[0] Local N 4 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 2 +[0] ids[3] = 3 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[1] = 4 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 2 +[1] ids[3] = 3 +[1] sharedby[2] = 1 +[1] ids[0] = 1 +[1] sharedby[3] = 2 +[1] ids[0] = 1 +[1] ids[1] = 3 +[2] Local N 4 +[2] sharedby[0] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[1] = 1 +[2] ids[0] = 2 +[2] sharedby[2] = 4 +[2] ids[0] = 0 +[2] ids[1] = 1 +[2] ids[2] = 2 +[2] ids[3] = 3 +[2] sharedby[3] = 2 +[2] ids[0] = 2 +[2] ids[1] = 3 +[3] Local N 4 +[3] sharedby[0] = 4 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] ids[2] = 2 +[3] ids[3] = 3 +[3] sharedby[1] = 2 +[3] ids[0] = 2 +[3] ids[1] = 3 +[3] sharedby[2] = 2 +[3] ids[0] = 1 +[3] ids[1] = 3 +[3] sharedby[3] = 1 +[3] ids[0] = 3 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[1] input[0] = (-2, 0) +[1] input[1] = (-2, 1) +[1] input[2] = (-2, 2) +[1] input[3] = (-2, 3) +[2] input[0] = (-3, 0) +[2] input[1] = (-3, 1) +[2] input[2] = (-3, 2) +[2] input[3] = (-3, 3) +[3] input[0] = (-4, 0) +[3] input[1] = (-4, 1) +[3] input[2] = (-4, 2) +[3] input[3] = (-4, 3) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 4 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-3, 0) from 2 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-2, 0) from 1 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-2, 1) from 1 +[0] recv[2] = (-3, 2) from 2 +[0] recv[3] = (-4, 0) from 3 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] recv[0] = (-1, 2) from 0 +[1] recv[1] = (-2, 0) from 1 +[1] sharedby[1] = 4 +[1] recv[0] = (-1, 3) from 0 +[1] recv[1] = (-2, 1) from 1 +[1] recv[2] = (-3, 2) from 2 +[1] recv[3] = (-4, 0) from 3 +[1] sharedby[2] = 1 +[1] recv[0] = (-2, 2) from 1 +[1] sharedby[3] = 2 +[1] recv[0] = (-2, 3) from 1 +[1] recv[1] = (-4, 2) from 3 +[2] Local N 4 +[2] sharedby[0] = 2 +[2] recv[0] = (-1, 1) from 0 +[2] recv[1] = (-3, 0) from 2 +[2] sharedby[1] = 1 +[2] recv[0] = (-3, 1) from 2 +[2] sharedby[2] = 4 +[2] recv[0] = (-1, 3) from 0 +[2] recv[1] = (-2, 1) from 1 +[2] recv[2] = (-3, 2) from 2 +[2] recv[3] = (-4, 0) from 3 +[2] sharedby[3] = 2 +[2] recv[0] = (-3, 3) from 2 +[2] recv[1] = (-4, 1) from 3 +[3] Local N 4 +[3] sharedby[0] = 4 +[3] recv[0] = (-1, 3) from 0 +[3] recv[1] = (-2, 1) from 1 +[3] recv[2] = (-3, 2) from 2 +[3] recv[3] = (-4, 0) from 3 +[3] sharedby[1] = 2 +[3] recv[0] = (-3, 3) from 2 +[3] recv[1] = (-4, 1) from 3 +[3] sharedby[2] = 2 +[3] recv[0] = (-2, 3) from 1 +[3] recv[1] = (-4, 2) from 3 +[3] sharedby[3] = 1 +[3] recv[0] = (-4, 3) from 3 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-2.out index 20b0399cc23..47f3ae1922b 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-1_test-2.out @@ -111,3 +111,164 @@ GETNODEINFO OUTPUT [3] ids[1] = 3 [3] sharedby[2] = 1 [3] ids[0] = 3 +GETBLOCKINFO OUTPUT +[0] Local NP 4 +[0] procs[0] = 0, shared 3 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 2 +[0] procs[1] = 1, shared 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] procs[2] = 2, shared 2 +[0] ids[0] = 1 +[0] ids[1] = 2 +[0] procs[3] = 3, shared 1 +[0] ids[0] = 2 +[1] Local NP 4 +[1] procs[0] = 1, shared 3 +[1] ids[0] = 0 +[1] ids[1] = 3 +[1] ids[2] = 2 +[1] procs[1] = 0, shared 2 +[1] ids[0] = 0 +[1] ids[1] = 3 +[1] procs[2] = 2, shared 1 +[1] ids[0] = 3 +[1] procs[3] = 3, shared 2 +[1] ids[0] = 3 +[1] ids[1] = 2 +[2] Local NP 4 +[2] procs[0] = 2, shared 2 +[2] ids[0] = 2 +[2] ids[1] = 0 +[2] procs[1] = 0, shared 2 +[2] ids[0] = 2 +[2] ids[1] = 0 +[2] procs[2] = 1, shared 1 +[2] ids[0] = 0 +[2] procs[3] = 3, shared 1 +[2] ids[0] = 0 +[3] Local NP 4 +[3] procs[0] = 3, shared 2 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] procs[1] = 0, shared 1 +[3] ids[0] = 0 +[3] procs[2] = 1, shared 2 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] procs[3] = 2, shared 1 +[3] ids[0] = 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 3 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 2 +[0] ids[3] = 3 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[1] = 1 +[1] ids[0] = 1 +[1] sharedby[2] = 2 +[1] ids[0] = 1 +[1] ids[1] = 3 +[1] sharedby[3] = 4 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 2 +[1] ids[3] = 3 +[2] Local N 3 +[2] sharedby[0] = 4 +[2] ids[0] = 0 +[2] ids[1] = 1 +[2] ids[2] = 2 +[2] ids[3] = 3 +[2] sharedby[1] = 1 +[2] ids[0] = 2 +[2] sharedby[2] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[3] Local N 3 +[3] sharedby[0] = 4 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] ids[2] = 2 +[3] ids[3] = 3 +[3] sharedby[1] = 2 +[3] ids[0] = 1 +[3] ids[1] = 3 +[3] sharedby[2] = 1 +[3] ids[0] = 3 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[1] input[0] = (-2, 0) +[1] input[1] = (-2, 1) +[1] input[2] = (-2, 2) +[1] input[3] = (-2, 3) +[2] input[0] = (-3, 0) +[2] input[1] = (-3, 1) +[2] input[2] = (-3, 2) +[3] input[0] = (-4, 0) +[3] input[1] = (-4, 1) +[3] input[2] = (-4, 2) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 3 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-2, 0) from 1 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-3, 2) from 2 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-2, 3) from 1 +[0] recv[2] = (-3, 0) from 2 +[0] recv[3] = (-4, 0) from 3 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] recv[0] = (-1, 0) from 0 +[1] recv[1] = (-2, 0) from 1 +[1] sharedby[1] = 1 +[1] recv[0] = (-2, 1) from 1 +[1] sharedby[2] = 2 +[1] recv[0] = (-2, 2) from 1 +[1] recv[1] = (-4, 1) from 3 +[1] sharedby[3] = 4 +[1] recv[0] = (-1, 2) from 0 +[1] recv[1] = (-2, 3) from 1 +[1] recv[2] = (-3, 0) from 2 +[1] recv[3] = (-4, 0) from 3 +[2] Local N 3 +[2] sharedby[0] = 4 +[2] recv[0] = (-1, 2) from 0 +[2] recv[1] = (-2, 3) from 1 +[2] recv[2] = (-3, 0) from 2 +[2] recv[3] = (-4, 0) from 3 +[2] sharedby[1] = 1 +[2] recv[0] = (-3, 1) from 2 +[2] sharedby[2] = 2 +[2] recv[0] = (-1, 1) from 0 +[2] recv[1] = (-3, 2) from 2 +[3] Local N 3 +[3] sharedby[0] = 4 +[3] recv[0] = (-1, 2) from 0 +[3] recv[1] = (-2, 3) from 1 +[3] recv[2] = (-3, 0) from 2 +[3] recv[3] = (-4, 0) from 3 +[3] sharedby[1] = 2 +[3] recv[0] = (-2, 2) from 1 +[3] recv[1] = (-4, 1) from 3 +[3] sharedby[2] = 1 +[3] recv[0] = (-4, 2) from 3 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-0.out index db454110278..9116d994752 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-0.out @@ -10,3 +10,19 @@ GETNODEINFO OUTPUT [1] Local N 0 [2] Local N 0 [3] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +[1] Local NP 0 +[2] Local NP 0 +[3] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-1.out index 61305599d6a..de4a61f66ec 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-1.out @@ -294,3 +294,187 @@ GETNODEINFO OUTPUT [3] ids[0] = 3 [3] sharedby[11] = 1 [3] ids[0] = 3 +GETBLOCKINFO OUTPUT +[0] Local NP 4 +[0] procs[0] = 0, shared 3 +[0] ids[0] = 1 +[0] ids[1] = 2 +[0] ids[2] = 3 +[0] procs[1] = 1, shared 2 +[0] ids[0] = 2 +[0] ids[1] = 3 +[0] procs[2] = 2, shared 2 +[0] ids[0] = 1 +[0] ids[1] = 3 +[0] procs[3] = 3, shared 1 +[0] ids[0] = 3 +[1] Local NP 4 +[1] procs[0] = 1, shared 3 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 3 +[1] procs[1] = 0, shared 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] procs[2] = 2, shared 1 +[1] ids[0] = 1 +[1] procs[3] = 3, shared 2 +[1] ids[0] = 1 +[1] ids[1] = 3 +[2] Local NP 4 +[2] procs[0] = 2, shared 3 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] ids[2] = 3 +[2] procs[1] = 0, shared 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] procs[2] = 1, shared 1 +[2] ids[0] = 2 +[2] procs[3] = 3, shared 2 +[2] ids[0] = 2 +[2] ids[1] = 3 +[3] Local NP 4 +[3] procs[0] = 3, shared 3 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] ids[2] = 2 +[3] procs[1] = 0, shared 1 +[3] ids[0] = 0 +[3] procs[2] = 1, shared 2 +[3] ids[0] = 0 +[3] ids[1] = 2 +[3] procs[3] = 2, shared 2 +[3] ids[0] = 0 +[3] ids[1] = 1 +GETBLOCKNODEINFO OUTPUT +[0] Local N 4 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 2 +[0] ids[3] = 3 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[1] = 4 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 2 +[1] ids[3] = 3 +[1] sharedby[2] = 1 +[1] ids[0] = 1 +[1] sharedby[3] = 2 +[1] ids[0] = 1 +[1] ids[1] = 3 +[2] Local N 4 +[2] sharedby[0] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[2] sharedby[1] = 1 +[2] ids[0] = 2 +[2] sharedby[2] = 4 +[2] ids[0] = 0 +[2] ids[1] = 1 +[2] ids[2] = 2 +[2] ids[3] = 3 +[2] sharedby[3] = 2 +[2] ids[0] = 2 +[2] ids[1] = 3 +[3] Local N 4 +[3] sharedby[0] = 4 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] ids[2] = 2 +[3] ids[3] = 3 +[3] sharedby[1] = 2 +[3] ids[0] = 2 +[3] ids[1] = 3 +[3] sharedby[2] = 2 +[3] ids[0] = 1 +[3] ids[1] = 3 +[3] sharedby[3] = 1 +[3] ids[0] = 3 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[1] input[0] = (-2, 0) +[1] input[1] = (-2, 1) +[1] input[2] = (-2, 2) +[1] input[3] = (-2, 3) +[2] input[0] = (-3, 0) +[2] input[1] = (-3, 1) +[2] input[2] = (-3, 2) +[2] input[3] = (-3, 3) +[3] input[0] = (-4, 0) +[3] input[1] = (-4, 1) +[3] input[2] = (-4, 2) +[3] input[3] = (-4, 3) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 4 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-3, 0) from 2 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-2, 0) from 1 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-2, 1) from 1 +[0] recv[2] = (-3, 2) from 2 +[0] recv[3] = (-4, 0) from 3 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] recv[0] = (-1, 2) from 0 +[1] recv[1] = (-2, 0) from 1 +[1] sharedby[1] = 4 +[1] recv[0] = (-1, 3) from 0 +[1] recv[1] = (-2, 1) from 1 +[1] recv[2] = (-3, 2) from 2 +[1] recv[3] = (-4, 0) from 3 +[1] sharedby[2] = 1 +[1] recv[0] = (-2, 2) from 1 +[1] sharedby[3] = 2 +[1] recv[0] = (-2, 3) from 1 +[1] recv[1] = (-4, 2) from 3 +[2] Local N 4 +[2] sharedby[0] = 2 +[2] recv[0] = (-1, 1) from 0 +[2] recv[1] = (-3, 0) from 2 +[2] sharedby[1] = 1 +[2] recv[0] = (-3, 1) from 2 +[2] sharedby[2] = 4 +[2] recv[0] = (-1, 3) from 0 +[2] recv[1] = (-2, 1) from 1 +[2] recv[2] = (-3, 2) from 2 +[2] recv[3] = (-4, 0) from 3 +[2] sharedby[3] = 2 +[2] recv[0] = (-3, 3) from 2 +[2] recv[1] = (-4, 1) from 3 +[3] Local N 4 +[3] sharedby[0] = 4 +[3] recv[0] = (-1, 3) from 0 +[3] recv[1] = (-2, 1) from 1 +[3] recv[2] = (-3, 2) from 2 +[3] recv[3] = (-4, 0) from 3 +[3] sharedby[1] = 2 +[3] recv[0] = (-3, 3) from 2 +[3] recv[1] = (-4, 1) from 3 +[3] sharedby[2] = 2 +[3] recv[0] = (-2, 3) from 1 +[3] recv[1] = (-4, 2) from 3 +[3] sharedby[3] = 1 +[3] recv[0] = (-4, 3) from 3 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-2.out index e4cd895132e..b6d57a94a3c 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-4_bs-3_test-2.out @@ -255,3 +255,164 @@ GETNODEINFO OUTPUT [3] ids[0] = 3 [3] sharedby[8] = 1 [3] ids[0] = 3 +GETBLOCKINFO OUTPUT +[0] Local NP 4 +[0] procs[0] = 0, shared 3 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 2 +[0] procs[1] = 1, shared 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] procs[2] = 2, shared 2 +[0] ids[0] = 1 +[0] ids[1] = 2 +[0] procs[3] = 3, shared 1 +[0] ids[0] = 2 +[1] Local NP 4 +[1] procs[0] = 1, shared 3 +[1] ids[0] = 0 +[1] ids[1] = 3 +[1] ids[2] = 2 +[1] procs[1] = 0, shared 2 +[1] ids[0] = 0 +[1] ids[1] = 3 +[1] procs[2] = 2, shared 1 +[1] ids[0] = 3 +[1] procs[3] = 3, shared 2 +[1] ids[0] = 3 +[1] ids[1] = 2 +[2] Local NP 4 +[2] procs[0] = 2, shared 2 +[2] ids[0] = 2 +[2] ids[1] = 0 +[2] procs[1] = 0, shared 2 +[2] ids[0] = 2 +[2] ids[1] = 0 +[2] procs[2] = 1, shared 1 +[2] ids[0] = 0 +[2] procs[3] = 3, shared 1 +[2] ids[0] = 0 +[3] Local NP 4 +[3] procs[0] = 3, shared 2 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] procs[1] = 0, shared 1 +[3] ids[0] = 0 +[3] procs[2] = 1, shared 2 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] procs[3] = 2, shared 1 +[3] ids[0] = 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 3 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 2 +[0] ids[3] = 3 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] sharedby[1] = 1 +[1] ids[0] = 1 +[1] sharedby[2] = 2 +[1] ids[0] = 1 +[1] ids[1] = 3 +[1] sharedby[3] = 4 +[1] ids[0] = 0 +[1] ids[1] = 1 +[1] ids[2] = 2 +[1] ids[3] = 3 +[2] Local N 3 +[2] sharedby[0] = 4 +[2] ids[0] = 0 +[2] ids[1] = 1 +[2] ids[2] = 2 +[2] ids[3] = 3 +[2] sharedby[1] = 1 +[2] ids[0] = 2 +[2] sharedby[2] = 2 +[2] ids[0] = 0 +[2] ids[1] = 2 +[3] Local N 3 +[3] sharedby[0] = 4 +[3] ids[0] = 0 +[3] ids[1] = 1 +[3] ids[2] = 2 +[3] ids[3] = 3 +[3] sharedby[1] = 2 +[3] ids[0] = 1 +[3] ids[1] = 3 +[3] sharedby[2] = 1 +[3] ids[0] = 3 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[1] input[0] = (-2, 0) +[1] input[1] = (-2, 1) +[1] input[2] = (-2, 2) +[1] input[3] = (-2, 3) +[2] input[0] = (-3, 0) +[2] input[1] = (-3, 1) +[2] input[2] = (-3, 2) +[3] input[0] = (-4, 0) +[3] input[1] = (-4, 1) +[3] input[2] = (-4, 2) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 3 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-2, 0) from 1 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-3, 2) from 2 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-2, 3) from 1 +[0] recv[2] = (-3, 0) from 2 +[0] recv[3] = (-4, 0) from 3 +[1] Local N 4 +[1] sharedby[0] = 2 +[1] recv[0] = (-1, 0) from 0 +[1] recv[1] = (-2, 0) from 1 +[1] sharedby[1] = 1 +[1] recv[0] = (-2, 1) from 1 +[1] sharedby[2] = 2 +[1] recv[0] = (-2, 2) from 1 +[1] recv[1] = (-4, 1) from 3 +[1] sharedby[3] = 4 +[1] recv[0] = (-1, 2) from 0 +[1] recv[1] = (-2, 3) from 1 +[1] recv[2] = (-3, 0) from 2 +[1] recv[3] = (-4, 0) from 3 +[2] Local N 3 +[2] sharedby[0] = 4 +[2] recv[0] = (-1, 2) from 0 +[2] recv[1] = (-2, 3) from 1 +[2] recv[2] = (-3, 0) from 2 +[2] recv[3] = (-4, 0) from 3 +[2] sharedby[1] = 1 +[2] recv[0] = (-3, 1) from 2 +[2] sharedby[2] = 2 +[2] recv[0] = (-1, 1) from 0 +[2] recv[1] = (-3, 2) from 2 +[3] Local N 3 +[3] sharedby[0] = 4 +[3] recv[0] = (-1, 2) from 0 +[3] recv[1] = (-2, 3) from 1 +[3] recv[2] = (-3, 0) from 2 +[3] recv[3] = (-4, 0) from 3 +[3] sharedby[1] = 2 +[3] recv[0] = (-2, 2) from 1 +[3] recv[1] = (-4, 1) from 3 +[3] sharedby[2] = 1 +[3] recv[0] = (-4, 2) from 3 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-0.out index 518ecffa95d..afdffc27269 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-0.out @@ -12,3 +12,22 @@ GETNODEINFO OUTPUT [2] Local N 0 [3] Local N 0 [4] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +[1] Local NP 0 +[2] Local NP 0 +[3] Local NP 0 +[4] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-1.out index 5ec279cc790..e1f962855ca 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-1.out @@ -104,3 +104,166 @@ GETNODEINFO OUTPUT [4] ids[1] = 4 [4] sharedby[7] = 1 [4] ids[0] = 4 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 6 +[0] ids[0] = 1 +[0] ids[1] = 4 +[0] ids[2] = 2 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 7 +[0] procs[1] = 4, shared 4 +[0] ids[0] = 2 +[0] ids[1] = 3 +[0] ids[2] = 6 +[0] ids[3] = 7 +[1] Local NP 0 +[2] Local NP 0 +[3] Local NP 0 +[4] Local NP 2 +[4] procs[0] = 4, shared 6 +[4] ids[0] = 0 +[4] ids[1] = 1 +[4] ids[2] = 4 +[4] ids[3] = 5 +[4] ids[4] = 3 +[4] ids[5] = 6 +[4] procs[1] = 0, shared 4 +[4] ids[0] = 0 +[4] ids[1] = 1 +[4] ids[2] = 4 +[4] ids[3] = 5 +GETBLOCKNODEINFO OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 4 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 4 +[0] ids[3] = 4 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 4 +[0] ids[3] = 4 +[0] sharedby[7] = 2 +[0] ids[0] = 0 +[0] ids[1] = 4 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 8 +[4] sharedby[0] = 2 +[4] ids[0] = 0 +[4] ids[1] = 4 +[4] sharedby[1] = 4 +[4] ids[0] = 0 +[4] ids[1] = 0 +[4] ids[2] = 4 +[4] ids[3] = 4 +[4] sharedby[2] = 1 +[4] ids[0] = 4 +[4] sharedby[3] = 2 +[4] ids[0] = 4 +[4] ids[1] = 4 +[4] sharedby[4] = 4 +[4] ids[0] = 0 +[4] ids[1] = 0 +[4] ids[2] = 4 +[4] ids[3] = 4 +[4] sharedby[5] = 2 +[4] ids[0] = 0 +[4] ids[1] = 4 +[4] sharedby[6] = 2 +[4] ids[0] = 4 +[4] ids[1] = 4 +[4] sharedby[7] = 1 +[4] ids[0] = 4 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[4] input[0] = (-5, 0) +[4] input[1] = (-5, 1) +[4] input[2] = (-5, 2) +[4] input[3] = (-5, 3) +[4] input[4] = (-5, 4) +[4] input[5] = (-5, 5) +[4] input[6] = (-5, 6) +[4] input[7] = (-5, 7) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-5, 0) from 4 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-5, 1) from 4 +[0] recv[3] = (-5, 4) from 4 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-5, 1) from 4 +[0] recv[3] = (-5, 4) from 4 +[0] sharedby[7] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-5, 5) from 4 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 8 +[4] sharedby[0] = 2 +[4] recv[0] = (-1, 2) from 0 +[4] recv[1] = (-5, 0) from 4 +[4] sharedby[1] = 4 +[4] recv[0] = (-1, 3) from 0 +[4] recv[1] = (-1, 6) from 0 +[4] recv[2] = (-5, 1) from 4 +[4] recv[3] = (-5, 4) from 4 +[4] sharedby[2] = 1 +[4] recv[0] = (-5, 2) from 4 +[4] sharedby[3] = 2 +[4] recv[0] = (-5, 3) from 4 +[4] recv[1] = (-5, 6) from 4 +[4] sharedby[4] = 4 +[4] recv[0] = (-1, 3) from 0 +[4] recv[1] = (-1, 6) from 0 +[4] recv[2] = (-5, 1) from 4 +[4] recv[3] = (-5, 4) from 4 +[4] sharedby[5] = 2 +[4] recv[0] = (-1, 7) from 0 +[4] recv[1] = (-5, 5) from 4 +[4] sharedby[6] = 2 +[4] recv[0] = (-5, 3) from 4 +[4] recv[1] = (-5, 6) from 4 +[4] sharedby[7] = 1 +[4] recv[0] = (-5, 7) from 4 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-2.out index 93747d2b444..1fa07f7780f 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-1_test-2.out @@ -91,3 +91,145 @@ GETNODEINFO OUTPUT [4] ids[1] = 4 [4] sharedby[3] = 1 [4] ids[0] = 4 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 7 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 8 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 2 +[0] ids[6] = 4 +[0] procs[1] = 4, shared 5 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] ids[2] = 3 +[0] ids[3] = 6 +[0] ids[4] = 4 +[1] Local NP 0 +[2] Local NP 0 +[3] Local NP 0 +[4] Local NP 2 +[4] procs[0] = 4, shared 3 +[4] ids[0] = 2 +[4] ids[1] = 1 +[4] ids[2] = 0 +[4] procs[1] = 0, shared 3 +[4] ids[0] = 2 +[4] ids[1] = 1 +[4] ids[2] = 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 4 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 4 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 4 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 4 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 4 +[0] sharedby[7] = 1 +[0] ids[0] = 0 +[0] sharedby[8] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 4 +[4] sharedby[0] = 2 +[4] ids[0] = 0 +[4] ids[1] = 4 +[4] sharedby[1] = 4 +[4] ids[0] = 0 +[4] ids[1] = 0 +[4] ids[2] = 0 +[4] ids[3] = 4 +[4] sharedby[2] = 2 +[4] ids[0] = 0 +[4] ids[1] = 4 +[4] sharedby[3] = 1 +[4] ids[0] = 4 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[4] input[0] = (-5, 0) +[4] input[1] = (-5, 1) +[4] input[2] = (-5, 2) +[4] input[3] = (-5, 3) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-5, 2) from 4 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-5, 1) from 4 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-5, 1) from 4 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 4) from 0 +[0] recv[1] = (-5, 0) from 4 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-5, 1) from 4 +[0] sharedby[7] = 1 +[0] recv[0] = (-1, 7) from 0 +[0] sharedby[8] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 4 +[4] sharedby[0] = 2 +[4] recv[0] = (-1, 4) from 0 +[4] recv[1] = (-5, 0) from 4 +[4] sharedby[1] = 4 +[4] recv[0] = (-1, 2) from 0 +[4] recv[1] = (-1, 3) from 0 +[4] recv[2] = (-1, 6) from 0 +[4] recv[3] = (-5, 1) from 4 +[4] sharedby[2] = 2 +[4] recv[0] = (-1, 0) from 0 +[4] recv[1] = (-5, 2) from 4 +[4] sharedby[3] = 1 +[4] recv[0] = (-5, 3) from 4 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-0.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-0.out index 518ecffa95d..afdffc27269 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-0.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-0.out @@ -12,3 +12,22 @@ GETNODEINFO OUTPUT [2] Local N 0 [3] Local N 0 [4] Local N 0 +GETBLOCKINFO OUTPUT +[0] Local NP 0 +[1] Local NP 0 +[2] Local NP 0 +[3] Local NP 0 +[4] Local NP 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 0 +BLOCK MULTI-LEAVES INPUT +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 0 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-1.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-1.out index bd841edc877..7ce5305b5c1 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-1.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-1.out @@ -248,3 +248,166 @@ GETNODEINFO OUTPUT [4] ids[0] = 4 [4] sharedby[23] = 1 [4] ids[0] = 4 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 6 +[0] ids[0] = 1 +[0] ids[1] = 4 +[0] ids[2] = 2 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 7 +[0] procs[1] = 4, shared 4 +[0] ids[0] = 2 +[0] ids[1] = 3 +[0] ids[2] = 6 +[0] ids[3] = 7 +[1] Local NP 0 +[2] Local NP 0 +[3] Local NP 0 +[4] Local NP 2 +[4] procs[0] = 4, shared 6 +[4] ids[0] = 0 +[4] ids[1] = 1 +[4] ids[2] = 4 +[4] ids[3] = 5 +[4] ids[4] = 3 +[4] ids[5] = 6 +[4] procs[1] = 0, shared 4 +[4] ids[0] = 0 +[4] ids[1] = 1 +[4] ids[2] = 4 +[4] ids[3] = 5 +GETBLOCKNODEINFO OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] ids[0] = 0 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 2 +[0] ids[0] = 0 +[0] ids[1] = 4 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 4 +[0] ids[3] = 4 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 4 +[0] ids[3] = 4 +[0] sharedby[7] = 2 +[0] ids[0] = 0 +[0] ids[1] = 4 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 8 +[4] sharedby[0] = 2 +[4] ids[0] = 0 +[4] ids[1] = 4 +[4] sharedby[1] = 4 +[4] ids[0] = 0 +[4] ids[1] = 0 +[4] ids[2] = 4 +[4] ids[3] = 4 +[4] sharedby[2] = 1 +[4] ids[0] = 4 +[4] sharedby[3] = 2 +[4] ids[0] = 4 +[4] ids[1] = 4 +[4] sharedby[4] = 4 +[4] ids[0] = 0 +[4] ids[1] = 0 +[4] ids[2] = 4 +[4] ids[3] = 4 +[4] sharedby[5] = 2 +[4] ids[0] = 0 +[4] ids[1] = 4 +[4] sharedby[6] = 2 +[4] ids[0] = 4 +[4] ids[1] = 4 +[4] sharedby[7] = 1 +[4] ids[0] = 4 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[4] input[0] = (-5, 0) +[4] input[1] = (-5, 1) +[4] input[2] = (-5, 2) +[4] input[3] = (-5, 3) +[4] input[4] = (-5, 4) +[4] input[5] = (-5, 5) +[4] input[6] = (-5, 6) +[4] input[7] = (-5, 7) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 8 +[0] sharedby[0] = 1 +[0] recv[0] = (-1, 0) from 0 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[2] = 2 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-5, 0) from 4 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-5, 1) from 4 +[0] recv[3] = (-5, 4) from 4 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 4) from 0 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 3) from 0 +[0] recv[1] = (-1, 6) from 0 +[0] recv[2] = (-5, 1) from 4 +[0] recv[3] = (-5, 4) from 4 +[0] sharedby[7] = 2 +[0] recv[0] = (-1, 7) from 0 +[0] recv[1] = (-5, 5) from 4 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 8 +[4] sharedby[0] = 2 +[4] recv[0] = (-1, 2) from 0 +[4] recv[1] = (-5, 0) from 4 +[4] sharedby[1] = 4 +[4] recv[0] = (-1, 3) from 0 +[4] recv[1] = (-1, 6) from 0 +[4] recv[2] = (-5, 1) from 4 +[4] recv[3] = (-5, 4) from 4 +[4] sharedby[2] = 1 +[4] recv[0] = (-5, 2) from 4 +[4] sharedby[3] = 2 +[4] recv[0] = (-5, 3) from 4 +[4] recv[1] = (-5, 6) from 4 +[4] sharedby[4] = 4 +[4] recv[0] = (-1, 3) from 0 +[4] recv[1] = (-1, 6) from 0 +[4] recv[2] = (-5, 1) from 4 +[4] recv[3] = (-5, 4) from 4 +[4] sharedby[5] = 2 +[4] recv[0] = (-1, 7) from 0 +[4] recv[1] = (-5, 5) from 4 +[4] sharedby[6] = 2 +[4] recv[0] = (-5, 3) from 4 +[4] recv[1] = (-5, 6) from 4 +[4] sharedby[7] = 1 +[4] recv[0] = (-5, 7) from 4 diff --git a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-2.out b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-2.out index edecf3a64be..f1de71a728f 100644 --- a/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-2.out +++ b/src/vec/is/tests/output/ex7_ltog_info_nsize-5_bs-3_test-2.out @@ -215,3 +215,145 @@ GETNODEINFO OUTPUT [4] ids[0] = 4 [4] sharedby[11] = 1 [4] ids[0] = 4 +GETBLOCKINFO OUTPUT +[0] Local NP 2 +[0] procs[0] = 0, shared 7 +[0] ids[0] = 0 +[0] ids[1] = 1 +[0] ids[2] = 8 +[0] ids[3] = 3 +[0] ids[4] = 6 +[0] ids[5] = 2 +[0] ids[6] = 4 +[0] procs[1] = 4, shared 5 +[0] ids[0] = 0 +[0] ids[1] = 2 +[0] ids[2] = 3 +[0] ids[3] = 6 +[0] ids[4] = 4 +[1] Local NP 0 +[2] Local NP 0 +[3] Local NP 0 +[4] Local NP 2 +[4] procs[0] = 4, shared 3 +[4] ids[0] = 2 +[4] ids[1] = 1 +[4] ids[2] = 0 +[4] procs[1] = 0, shared 3 +[4] ids[0] = 2 +[4] ids[1] = 1 +[4] ids[2] = 0 +GETBLOCKNODEINFO OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] ids[0] = 0 +[0] ids[1] = 4 +[0] sharedby[1] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] sharedby[2] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 4 +[0] sharedby[3] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 4 +[0] sharedby[4] = 2 +[0] ids[0] = 0 +[0] ids[1] = 4 +[0] sharedby[5] = 1 +[0] ids[0] = 0 +[0] sharedby[6] = 4 +[0] ids[0] = 0 +[0] ids[1] = 0 +[0] ids[2] = 0 +[0] ids[3] = 4 +[0] sharedby[7] = 1 +[0] ids[0] = 0 +[0] sharedby[8] = 2 +[0] ids[0] = 0 +[0] ids[1] = 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 4 +[4] sharedby[0] = 2 +[4] ids[0] = 0 +[4] ids[1] = 4 +[4] sharedby[1] = 4 +[4] ids[0] = 0 +[4] ids[1] = 0 +[4] ids[2] = 0 +[4] ids[3] = 4 +[4] sharedby[2] = 2 +[4] ids[0] = 0 +[4] ids[1] = 4 +[4] sharedby[3] = 1 +[4] ids[0] = 4 +BLOCK MULTI-LEAVES INPUT +[0] input[0] = (-1, 0) +[0] input[1] = (-1, 1) +[0] input[2] = (-1, 2) +[0] input[3] = (-1, 3) +[0] input[4] = (-1, 4) +[0] input[5] = (-1, 5) +[0] input[6] = (-1, 6) +[0] input[7] = (-1, 7) +[0] input[8] = (-1, 8) +[4] input[0] = (-5, 0) +[4] input[1] = (-5, 1) +[4] input[2] = (-5, 2) +[4] input[3] = (-5, 3) +BLOCK MULTI-LEAVES OUTPUT +[0] Local N 9 +[0] sharedby[0] = 2 +[0] recv[0] = (-1, 0) from 0 +[0] recv[1] = (-5, 2) from 4 +[0] sharedby[1] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[0] sharedby[2] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-5, 1) from 4 +[0] sharedby[3] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-5, 1) from 4 +[0] sharedby[4] = 2 +[0] recv[0] = (-1, 4) from 0 +[0] recv[1] = (-5, 0) from 4 +[0] sharedby[5] = 1 +[0] recv[0] = (-1, 5) from 0 +[0] sharedby[6] = 4 +[0] recv[0] = (-1, 2) from 0 +[0] recv[1] = (-1, 3) from 0 +[0] recv[2] = (-1, 6) from 0 +[0] recv[3] = (-5, 1) from 4 +[0] sharedby[7] = 1 +[0] recv[0] = (-1, 7) from 0 +[0] sharedby[8] = 2 +[0] recv[0] = (-1, 1) from 0 +[0] recv[1] = (-1, 8) from 0 +[1] Local N 0 +[2] Local N 0 +[3] Local N 0 +[4] Local N 4 +[4] sharedby[0] = 2 +[4] recv[0] = (-1, 4) from 0 +[4] recv[1] = (-5, 0) from 4 +[4] sharedby[1] = 4 +[4] recv[0] = (-1, 2) from 0 +[4] recv[1] = (-1, 3) from 0 +[4] recv[2] = (-1, 6) from 0 +[4] recv[3] = (-5, 1) from 4 +[4] sharedby[2] = 2 +[4] recv[0] = (-1, 0) from 0 +[4] recv[1] = (-5, 2) from 4 +[4] sharedby[3] = 1 +[4] recv[0] = (-5, 3) from 4 diff --git a/src/vec/is/utils/ftn-custom/zisltogf.c b/src/vec/is/utils/ftn-custom/zisltogf.c index 110b11a9ca4..4a07bc8e6c0 100644 --- a/src/vec/is/utils/ftn-custom/zisltogf.c +++ b/src/vec/is/utils/ftn-custom/zisltogf.c @@ -3,22 +3,13 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define islocaltoglobalmappingview_ ISLOCALTOGLOBALMAPPINGVIEW #define islocaltoglobalmpnggetinfosize_ ISLOCALTOGLOBALMPNGGETINFOSIZE #define islocaltoglobalmappinggetinfo_ ISLOCALTOGLOBALMAPPINGGETINFO #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define islocaltoglobalmappingview_ islocaltoglobalmappingview #define islocaltoglobalmpnggetinfosize_ islocaltoglobalmpnggetinfosize #define islocaltoglobalmappinggetinfo_ islocaltoglobalmappinggetinfo #endif -PETSC_EXTERN void islocaltoglobalmappingview_(ISLocalToGlobalMapping *mapping, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = ISLocalToGlobalMappingView(*mapping, v); -} - static PetscInt *sprocs, *snumprocs, **sindices; static PetscBool called; PETSC_EXTERN void islocaltoglobalmpnggetinfosize_(ISLocalToGlobalMapping *mapping, PetscInt *size, PetscInt *maxnumprocs, PetscErrorCode *ierr) @@ -53,14 +44,3 @@ PETSC_EXTERN void islocaltoglobalmappinggetinfo_(ISLocalToGlobalMapping *mapping if (*ierr) return; called = PETSC_FALSE; } - -PETSC_EXTERN void islocaltoglobalmappingviewfromoptions_(ISLocalToGlobalMapping *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = ISLocalToGlobalMappingViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/vec/is/utils/ftn-custom/zvsectionisf.c b/src/vec/is/utils/ftn-custom/zvsectionisf.c index 5d60301fa6a..287857371a6 100644 --- a/src/vec/is/utils/ftn-custom/zvsectionisf.c +++ b/src/vec/is/utils/ftn-custom/zvsectionisf.c @@ -8,17 +8,11 @@ #define petscsectionrestorepointsyms_ PETSCSECTIONRESTOREPOINTSYMS #define petscsectiongetfieldpointsyms_ PETSCSECTIONGETFIELDPOINTSYMS #define petscsectionrestorefieldpointsyms_ PETSCSECTIONRESTOREFIELDPOINTSYMS - #define petscsectionview_ PETSCSECTIONVIEW - #define petscsectiongetfieldname_ PETSCSECTIONGETFIELDNAME - #define petscsectionsetfieldname_ PETSCSECTIONSETFIELDNAME #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define petscsectiongetpointsyms_ petscsectiongetpointsyms #define petscsectionrestorepointsyms_ petscsectionrestorepointsyms #define petscsectiongetfieldpointsyms_ petscsectiongetfieldpointsyms #define petscsectionrestorefieldpointsyms_ petscsectionrestorefieldpointsyms - #define petscsectionview_ petscsectionview - #define petscsectiongetfieldname_ petscsectiongetfieldname - #define petscsectionsetfieldname_ petscsectionsetfieldname #endif PETSC_EXTERN void petscsectiongetpointsyms_(PetscSection section, PetscInt *numPoints, PetscInt *points, PetscInt ***perms, PetscScalar ***rots, int *__ierr) @@ -37,31 +31,3 @@ PETSC_EXTERN void petscsectionrestorefieldpointsyms_(PetscSection section, Petsc { *__ierr = PetscSectionRestoreFieldPointSyms(section, *field, *numPoints, points, (const PetscInt ***)perms, (const PetscScalar ***)rots); } - -PETSC_EXTERN void petscsectionview_(PetscSection *s, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - - PetscPatchDefaultViewers_Fortran(vin, v); - *ierr = PetscSectionView(*s, v); -} - -PETSC_EXTERN void petscsectiongetfieldname_(PetscSection *s, PetscInt *field, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *fname; - - *ierr = PetscSectionGetFieldName(*s, *field, &fname); - if (*ierr) return; - *ierr = PetscStrncpy(name, fname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} - -PETSC_EXTERN void petscsectionsetfieldname_(PetscSection *s, PetscInt *field, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *f; - - FIXCHAR(name, len, f); - *ierr = PetscSectionSetFieldName(*s, *field, f); - if (*ierr) return; - FREECHAR(name, f); -} diff --git a/src/vec/is/utils/hdf5/hdf5io.c b/src/vec/is/utils/hdf5/hdf5io.c index 5a357d67dae..5035dcafd92 100644 --- a/src/vec/is/utils/hdf5/hdf5io.c +++ b/src/vec/is/utils/hdf5/hdf5io.c @@ -208,7 +208,7 @@ static PetscErrorCode PetscViewerHDF5ReadArray_Private(PetscViewer viewer, HDF5R .seealso: `PetscViewer`, `PETSCVIEWERHDF5`, `PetscViewerHDF5Open()`, `PetscViewerHDF5PushGroup()`, `PetscViewerHDF5OpenGroup()`, `PetscViewerHDF5ReadSizes()`, `VecLoad()`, `ISLoad()`, `PetscLayout` @*/ -PetscErrorCode PetscViewerHDF5Load(PetscViewer viewer, const char *name, PetscLayout map, hid_t datatype, void **newarr) +PetscErrorCode PetscViewerHDF5Load(PetscViewer viewer, const char name[], PetscLayout map, hid_t datatype, void **newarr) { PetscBool has; char *group; diff --git a/src/vec/is/utils/isltog.c b/src/vec/is/utils/isltog.c index 2ff20661bf9..8be18ffa00a 100644 --- a/src/vec/is/utils/isltog.c +++ b/src/vec/is/utils/isltog.c @@ -45,7 +45,7 @@ typedef struct { .seealso: [](sec_scatter), `IS`, `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` @*/ -PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) +PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt *points[]) { PetscInt numCells, step = 1; PetscBool isStride; @@ -76,7 +76,7 @@ PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, con .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` @*/ -PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) +PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt *points[]) { PetscInt step = 1; PetscBool isStride; @@ -88,7 +88,7 @@ PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISGetPointSubrange - Configures the input `IS` to be a subrange for the traversal information given Not Collective @@ -109,7 +109,7 @@ PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()` @*/ -PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points) +PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt points[]) { PetscFunctionBeginHot; if (points) { @@ -229,6 +229,7 @@ static PetscErrorCode ISLocalToGlobalMappingResetBlockInfo_Private(ISLocalToGlob } if (mapping->info_nodei) PetscCall(PetscFree(mapping->info_nodei[0])); PetscCall(PetscFree2(mapping->info_nodec, mapping->info_nodei)); + PetscCall(PetscSFDestroy(&mapping->multileaves_sf)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -322,7 +323,7 @@ PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, Pet PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISLocalToGlobalMappingViewFromOptions - View an `ISLocalToGlobalMapping` based on values in the options database Collective @@ -337,7 +338,7 @@ PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, Pet Note: See `PetscObjectViewFromOptions()` for the available `PetscViewer` and `PetscViewerFormat` -.seealso: [](sec_scatter), `PetscViewer`, ``ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()` +.seealso: [](sec_scatter), `PetscViewer`, `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()` @*/ PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, PetscObject obj, const char name[]) { @@ -347,7 +348,7 @@ PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, P PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISLocalToGlobalMappingView - View a local to global mapping Collective on viewer @@ -532,9 +533,8 @@ PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is, ISLocalToGlobalMapping *map PetscFunctionReturn(PETSC_SUCCESS); } -/*@C - ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) - ordering and a global parallel ordering. +/*@ + ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) ordering and a global parallel ordering induced by a star forest. Collective @@ -1091,7 +1091,7 @@ PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping, Level: advanced .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, - `ISLocalToGlobalMappingRestoreBlockInfo()` + `ISLocalToGlobalMappingRestoreBlockInfo()`, `ISLocalToGlobalMappingGetBlockMultiLeavesSF()` @*/ PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) { @@ -1113,7 +1113,7 @@ PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping Input Parameter: . mapping - the mapping from local to global indexing - Output Parameter: + Output Parameters: + n - number of local block nodes . n_procs - an array storing the number of processes for each local block node (including self) - procs - the processes' rank for each local block node (sorted, self is first) @@ -1166,9 +1166,53 @@ PetscErrorCode ISLocalToGlobalMappingRestoreBlockNodeInfo(ISLocalToGlobalMapping PetscFunctionReturn(PETSC_SUCCESS); } +/*@C + ISLocalToGlobalMappingGetBlockMultiLeavesSF - Get the star-forest to communicate multi-leaf block data + + Collective the first time it is called + + Input Parameter: +. mapping - the mapping from local to global indexing + + Output Parameter: +. mlsf - the `PetscSF` + + Level: advanced + + Notes: + The returned star forest is suitable to exchange local information with other processes sharing the same global block index. + For example, suppose a mapping with two processes has been created with +.vb + rank 0 global block indices: [0, 1, 2] + rank 1 global block indices: [2, 3, 4] +.ve + and we want to share the local information +.vb + rank 0 data: [-1, -2, -3] + rank 1 data: [1, 2, 3] +.ve + then, the broadcasting action of `mlsf` will allow to collect +.vb + rank 0 mlleafdata: [-1, -2, -3, 3] + rank 1 mlleafdata: [-3, 3, 1, 2] +.ve + Use ``ISLocalToGlobalMappingGetBlockNodeInfo()`` to index into the multi-leaf data. + +.seealso: [](sec_scatter), `ISLocalToGlobalMappingGetBlockNodeInfo()`, `PetscSF` +@*/ +PetscErrorCode ISLocalToGlobalMappingGetBlockMultiLeavesSF(ISLocalToGlobalMapping mapping, PetscSF *mlsf) +{ + PetscFunctionBegin; + PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); + PetscAssertPointer(mlsf, 2); + PetscCall(ISLocalToGlobalMappingSetUpBlockInfo_Private(mapping)); + *mlsf = mapping->multileaves_sf; + PetscFunctionReturn(PETSC_SUCCESS); +} + static PetscErrorCode ISLocalToGlobalMappingSetUpBlockInfo_Private(ISLocalToGlobalMapping mapping) { - PetscSF sf; + PetscSF sf, sf2, imsf, msf; MPI_Comm comm; const PetscSFNode *sfnode; PetscSFNode *newsfnode; @@ -1182,7 +1226,7 @@ static PetscErrorCode ISLocalToGlobalMappingSetUpBlockInfo_Private(ISLocalToGlob PetscMPIInt rank, size; PetscFunctionBegin; - if (mapping->info_numprocs) PetscFunctionReturn(PETSC_SUCCESS); + if (mapping->multileaves_sf) PetscFunctionReturn(PETSC_SUCCESS); PetscCall(PetscObjectGetComm((PetscObject)mapping, &comm)); PetscCallMPI(MPI_Comm_size(comm, &size)); PetscCallMPI(MPI_Comm_rank(comm, &rank)); @@ -1243,14 +1287,15 @@ static PetscErrorCode ISLocalToGlobalMappingSetUpBlockInfo_Private(ISLocalToGlob PetscCall(PetscSFGatherBegin(sf, MPIU_INT, leafdata, mrootdata)); PetscCall(PetscSFGatherEnd(sf, MPIU_INT, leafdata, mrootdata)); - /* set new multi-leaves graph into the SF */ - PetscCall(PetscSFSetGraph(sf, mnroots, newnleaves, NULL, PETSC_OWN_POINTER, newsfnode, PETSC_OWN_POINTER)); - PetscCall(PetscSFSetUp(sf)); + /* from multi-roots to multi-leaves */ + PetscCall(PetscSFCreate(comm, &sf2)); + PetscCall(PetscSFSetGraph(sf2, mnroots, newnleaves, NULL, PETSC_OWN_POINTER, newsfnode, PETSC_OWN_POINTER)); + PetscCall(PetscSFSetUp(sf2)); /* broadcast multi-root data to multi-leaves */ PetscCall(PetscMalloc1(newnleaves, &newleafdata)); - PetscCall(PetscSFBcastBegin(sf, MPIU_INT, mrootdata, newleafdata, MPI_REPLACE)); - PetscCall(PetscSFBcastEnd(sf, MPIU_INT, mrootdata, newleafdata, MPI_REPLACE)); + PetscCall(PetscSFBcastBegin(sf2, MPIU_INT, mrootdata, newleafdata, MPI_REPLACE)); + PetscCall(PetscSFBcastEnd(sf2, MPIU_INT, mrootdata, newleafdata, MPI_REPLACE)); /* sort sharing ranks */ for (i = 0, m = 0; i < nleaves; i++) { @@ -1277,9 +1322,9 @@ static PetscErrorCode ISLocalToGlobalMappingSetUpBlockInfo_Private(ISLocalToGlob PetscCall(PetscHMapIDestroy(&neighs)); /* collect info data */ - PetscCall(PetscMalloc1(mapping->info_nproc + 1, &mapping->info_numprocs)); - PetscCall(PetscMalloc1(mapping->info_nproc + 1, &mapping->info_indices)); - for (i = 0; i < mapping->info_nproc + 1; i++) mapping->info_indices[i] = NULL; + PetscCall(PetscMalloc1(mapping->info_nproc, &mapping->info_numprocs)); + PetscCall(PetscMalloc1(mapping->info_nproc, &mapping->info_indices)); + for (i = 0; i < mapping->info_nproc; i++) mapping->info_indices[i] = NULL; PetscCall(PetscMalloc1(nleaves, &mask)); PetscCall(PetscMalloc1(nleaves, &tmpg)); @@ -1316,10 +1361,17 @@ static PetscErrorCode ISLocalToGlobalMappingSetUpBlockInfo_Private(ISLocalToGlob for (i = 0; i < nleaves - 1; i++) mapping->info_nodei[i + 1] = mapping->info_nodei[i] + mapping->info_nodec[i]; PetscCall(PetscArraycpy(mapping->info_nodei[0], newleafdata, newnleaves)); + /* Create SF from leaves to multi-leaves */ + PetscCall(PetscSFGetMultiSF(sf, &msf)); + PetscCall(PetscSFCreateInverseSF(msf, &imsf)); + PetscCall(PetscSFCompose(imsf, sf2, &mapping->multileaves_sf)); + PetscCall(PetscSFDestroy(&imsf)); + PetscCall(PetscSFDestroy(&sf)); + PetscCall(PetscSFDestroy(&sf2)); + PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(mapping, &gidxs)); PetscCall(PetscFree(tmpg)); PetscCall(PetscFree(mask)); - PetscCall(PetscSFDestroy(&sf)); PetscCall(PetscFree3(mrootdata, leafdata, leafrd)); PetscCall(PetscFree(newleafdata)); PetscFunctionReturn(PETSC_SUCCESS); @@ -1597,7 +1649,7 @@ M*/ .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()` @*/ -PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) +PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); @@ -1631,7 +1683,7 @@ PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, con .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` @*/ -PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) +PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); @@ -1656,7 +1708,7 @@ PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()` @*/ -PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) +PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); @@ -1678,7 +1730,7 @@ PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` @*/ -PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) +PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt *array[]) { PetscFunctionBegin; PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); @@ -1688,7 +1740,7 @@ PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings Not Collective @@ -1788,7 +1840,7 @@ PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMap /*@C ISLocalToGlobalMappingRegister - Registers a method for applying a global to local mapping with an `ISLocalToGlobalMapping` - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new method @@ -1820,7 +1872,7 @@ PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[], PetscErrorCode PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISLocalToGlobalMappingSetType - Sets the implementation type `ISLocalToGlobalMapping` will use Logically Collective @@ -1873,7 +1925,7 @@ PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLoca PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ ISLocalToGlobalMappingGetType - Get the type of the `ISLocalToGlobalMapping` Not Collective diff --git a/src/vec/is/utils/pmap.c b/src/vec/is/utils/pmap.c index ff3d794f90d..6998a50b481 100644 --- a/src/vec/is/utils/pmap.c +++ b/src/vec/is/utils/pmap.c @@ -589,3 +589,72 @@ PetscErrorCode PetscLayoutCompare(PetscLayout mapa, PetscLayout mapb, PetscBool if (mapa->N == mapb->N && mapa->range && mapb->range && mapa->size == mapb->size) PetscCall(PetscArraycmp(mapa->range, mapb->range, mapa->size + 1, congruent)); PetscFunctionReturn(PETSC_SUCCESS); } + +/*@ + PetscLayoutFindOwner - Find the owning MPI process for a global index + + Not Collective; No Fortran Support + + Input Parameters: ++ map - the layout +- idx - global index to find the owner of + + Output Parameter: +. owner - the owning rank + + Level: developer + +.seealso: `PetscLayout`, `PetscLayoutFindOwnerIndex()` +@*/ +PetscErrorCode PetscLayoutFindOwner(PetscLayout map, PetscInt idx, PetscMPIInt *owner) +{ + PetscMPIInt lo = 0, hi, t; + + PetscFunctionBegin; + *owner = -1; /* GCC erroneously issues warning about possibly uninitialized use when error condition */ + PetscAssert((map->n >= 0) && (map->N >= 0) && (map->range), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PetscLayoutSetUp() must be called first"); + PetscAssert(idx >= 0 && idx <= map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index %" PetscInt_FMT " is out of range", idx); + hi = map->size; + while (hi - lo > 1) { + t = lo + (hi - lo) / 2; + if (idx < map->range[t]) hi = t; + else lo = t; + } + *owner = lo; + PetscFunctionReturn(PETSC_SUCCESS); +} + +/*@ + PetscLayoutFindOwnerIndex - Find the owning MPI process and the local index on that process for a global index + + Not Collective; No Fortran Support + + Input Parameters: ++ map - the layout +- idx - global index to find the owner of + + Output Parameters: ++ owner - the owning rank +- lidx - local index used by the owner for `idx` + + Level: developer + +.seealso: `PetscLayout`, `PetscLayoutFindOwner()` +@*/ +PetscErrorCode PetscLayoutFindOwnerIndex(PetscLayout map, PetscInt idx, PetscMPIInt *owner, PetscInt *lidx) +{ + PetscMPIInt lo = 0, hi, t; + + PetscFunctionBegin; + PetscAssert((map->n >= 0) && (map->N >= 0) && (map->range), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PetscLayoutSetUp() must be called first"); + PetscAssert(idx >= 0 && idx <= map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index %" PetscInt_FMT " is out of range", idx); + hi = map->size; + while (hi - lo > 1) { + t = lo + (hi - lo) / 2; + if (idx < map->range[t]) hi = t; + else lo = t; + } + if (owner) *owner = lo; + if (lidx) *lidx = idx - map->range[lo]; + PetscFunctionReturn(PETSC_SUCCESS); +} diff --git a/src/vec/pf/impls/string/cstring.c b/src/vec/pf/impls/string/cstring.c index 29a66a13126..12411e8a0d0 100644 --- a/src/vec/pf/impls/string/cstring.c +++ b/src/vec/pf/impls/string/cstring.c @@ -54,7 +54,7 @@ static PetscErrorCode PFSetFromOptions_String(PF pf, PetscOptionItems *PetscOpti .seealso: `PFSetFromOptions()` @*/ -PetscErrorCode PFStringSetFunction(PF pf, const char *string) +PetscErrorCode PFStringSetFunction(PF pf, const char string[]) { char task[1024], tmp[PETSC_MAX_PATH_LEN], lib[PETSC_MAX_PATH_LEN]; PetscBool tmpshared, wdshared, keeptmpfiles = PETSC_FALSE; diff --git a/src/vec/pf/interface/ftn-custom/makefile b/src/vec/pf/interface/ftn-custom/makefile deleted file mode 100644 index 89dab51061a..00000000000 --- a/src/vec/pf/interface/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/vec/pf/interface/ftn-custom/zpff.c b/src/vec/pf/interface/ftn-custom/zpff.c deleted file mode 100644 index f3ca77dc517..00000000000 --- a/src/vec/pf/interface/ftn-custom/zpff.c +++ /dev/null @@ -1,20 +0,0 @@ -#include -#include -#include - -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define pfviewfromoptions_ PFVIEWFROMOPTIONS -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define pfviewfromoptions_ pfviewfromoptions -#endif - -PETSC_EXTERN void pfviewfromoptions_(PF *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = PFViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/vec/pf/interface/pf.c b/src/vec/pf/interface/pf.c index 7ab8036910e..d1e504111ed 100644 --- a/src/vec/pf/interface/pf.c +++ b/src/vec/pf/interface/pf.c @@ -200,7 +200,7 @@ PetscErrorCode PFApply(PF pf, PetscInt n, const PetscScalar *x, PetscScalar *y) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PFViewFromOptions - View a `PF` based on options set in the options database Collective @@ -307,7 +307,7 @@ PetscErrorCode PFRegister(const char sname[], PetscErrorCode (*function)(PF, voi PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PFGetType - Gets the `PFType` name (as a string) from the `PF` context. @@ -332,7 +332,7 @@ PetscErrorCode PFGetType(PF pf, PFType *type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ PFSetType - Builds `PF` for a particular function Collective diff --git a/src/vec/vec/impls/mpi/ftn-custom/makefile b/src/vec/vec/impls/mpi/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/vec/vec/impls/mpi/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/vec/vec/impls/mpi/ftn-custom/zpbvecf.c b/src/vec/vec/impls/mpi/ftn-custom/zpbvecf.c deleted file mode 100644 index cd0bb1d98b2..00000000000 --- a/src/vec/vec/impls/mpi/ftn-custom/zpbvecf.c +++ /dev/null @@ -1,29 +0,0 @@ -#include -#include -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define veccreatempiwitharray_ VECCREATEMPIWITHARRAY - #define veccreateghostblockwitharray_ VECCREATEGHOSTBLOCKWITHARRAY - #define veccreateghostwitharray_ VECCREATEGHOSTWITHARRAY -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define veccreatempiwitharray_ veccreatempiwitharray - #define veccreateghostblockwitharray_ veccreateghostblockwitharray - #define veccreateghostwitharray_ veccreateghostwitharray -#endif - -PETSC_EXTERN void veccreatempiwitharray_(MPI_Comm *comm, PetscInt *bs, PetscInt *n, PetscInt *N, PetscScalar *s, Vec *V, PetscErrorCode *ierr) -{ - CHKFORTRANNULLSCALAR(s); - *ierr = VecCreateMPIWithArray(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *n, *N, s, V); -} - -PETSC_EXTERN void veccreateghostblockwitharray_(MPI_Comm *comm, PetscInt *bs, PetscInt *n, PetscInt *N, PetscInt *nghost, PetscInt *ghosts, PetscScalar *array, Vec *vv, PetscErrorCode *ierr) -{ - CHKFORTRANNULLSCALAR(array); - *ierr = VecCreateGhostBlockWithArray(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *n, *N, *nghost, ghosts, array, vv); -} - -PETSC_EXTERN void veccreateghostwitharray_(MPI_Comm *comm, PetscInt *n, PetscInt *N, PetscInt *nghost, PetscInt *ghosts, PetscScalar *array, Vec *vv, PetscErrorCode *ierr) -{ - CHKFORTRANNULLSCALAR(array); - *ierr = VecCreateGhostWithArray(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *n, *N, *nghost, ghosts, array, vv); -} diff --git a/src/vec/vec/impls/mpi/kokkos/mpikok.kokkos.cxx b/src/vec/vec/impls/mpi/kokkos/mpikok.kokkos.cxx index 24bdc20b4fd..220a4c9257a 100644 --- a/src/vec/vec/impls/mpi/kokkos/mpikok.kokkos.cxx +++ b/src/vec/vec/impls/mpi/kokkos/mpikok.kokkos.cxx @@ -279,6 +279,7 @@ static PetscErrorCode VecDuplicateVecs_MPIKokkos_GEMV(Vec w, PetscInt m, Vec *V[ PetscScalar *array_h, *array_d; PetscLayout map; Vec_MPI *wmpi = (Vec_MPI *)w->data; + PetscBool mdot_use_gemv, maxpy_use_gemv; PetscFunctionBegin; PetscCall(PetscKokkosInitializeCheck()); // as we'll call kokkos_malloc() @@ -288,8 +289,7 @@ static PetscErrorCode VecDuplicateVecs_MPIKokkos_GEMV(Vec w, PetscInt m, Vec *V[ } else { PetscCall(PetscMalloc1(m, V)); PetscCall(VecGetLayout(w, &map)); - lda = map->n; - lda = ((lda + 31) / 32) * 32; // make every vector 32-elements aligned + VecGetLocalSizeAligned(w, 64, &lda); // get in lda the 64-bytes aligned local size // allocate raw arrays on host and device for the whole m vectors PetscCall(PetscCalloc1(m * lda, &array_h)); @@ -298,7 +298,8 @@ static PetscErrorCode VecDuplicateVecs_MPIKokkos_GEMV(Vec w, PetscInt m, Vec *V[ #else PetscCallCXX(array_d = static_cast(Kokkos::kokkos_malloc("VecDuplicateVecs", sizeof(PetscScalar) * (m * lda)))); #endif - + mdot_use_gemv = (w->ops->mdot == VecMDot_MPIKokkos_GEMV) ? PETSC_TRUE : PETSC_FALSE; + maxpy_use_gemv = (w->ops->maxpy == VecMAXPY_SeqKokkos_GEMV) ? PETSC_TRUE : PETSC_FALSE; // create the m vectors with raw arrays for (PetscInt i = 0; i < m; i++) { Vec v; @@ -306,6 +307,13 @@ static PetscErrorCode VecDuplicateVecs_MPIKokkos_GEMV(Vec w, PetscInt m, Vec *V[ PetscCallCXX(static_cast(v->spptr)->v_dual.modify_host()); // as we only init'ed array_h PetscCall(PetscObjectListDuplicate(((PetscObject)w)->olist, &((PetscObject)v)->olist)); PetscCall(PetscFunctionListDuplicate(((PetscObject)w)->qlist, &((PetscObject)v)->qlist)); + if (mdot_use_gemv) { // inherit w's mdot/maxpy optimization setting + v->ops->mdot = VecMDot_MPIKokkos_GEMV; + v->ops->mtdot = VecMTDot_MPIKokkos_GEMV; + v->ops->mdot_local = VecMDot_SeqKokkos_GEMV; + v->ops->mtdot_local = VecMTDot_SeqKokkos_GEMV; + } + if (maxpy_use_gemv) v->ops->maxpy = VecMAXPY_SeqKokkos_GEMV; v->ops->view = w->ops->view; v->stash.donotstash = w->stash.donotstash; v->stash.ignorenegidx = w->stash.ignorenegidx; diff --git a/src/vec/vec/impls/mpi/pbvec.c b/src/vec/vec/impls/mpi/pbvec.c index 89edebe808e..e61fa1accd8 100644 --- a/src/vec/vec/impls/mpi/pbvec.c +++ b/src/vec/vec/impls/mpi/pbvec.c @@ -66,14 +66,11 @@ static PetscErrorCode VecDuplicateVecs_MPI_GEMV(Vec w, PetscInt m, Vec *V[]) w->ops->duplicatevecs = VecDuplicateVecs_Default; PetscCall(VecDuplicateVecs(w, m, V)); } else { - PetscInt nlocal; PetscScalar *array; PetscInt64 lda; // use 64-bit as we will do "m * lda" PetscCall(PetscMalloc1(m, V)); - PetscCall(VecGetLocalSize(w, &nlocal)); - lda = nlocal; - lda = ((lda + 31) / 32) * 32; // make every vector 32-elements aligned + VecGetLocalSizeAligned(w, 64, &lda); // get in lda the 64-bytes aligned local size PetscCall(PetscCalloc1(m * lda, &array)); for (PetscInt i = 0; i < m; i++) { @@ -552,11 +549,10 @@ PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, co if (mdot_use_gemv || maxpy_use_gemv) v->ops[0].duplicatevecs = VecDuplicateVecs_MPI_GEMV; if (mdot_use_gemv) { - v->ops[0].duplicatevecs = VecDuplicateVecs_MPI_GEMV; - v->ops[0].mdot = VecMDot_MPI_GEMV; - v->ops[0].mdot_local = VecMDot_Seq_GEMV; - v->ops[0].mtdot = VecMTDot_MPI_GEMV; - v->ops[0].mtdot_local = VecMTDot_Seq_GEMV; + v->ops[0].mdot = VecMDot_MPI_GEMV; + v->ops[0].mdot_local = VecMDot_Seq_GEMV; + v->ops[0].mtdot = VecMTDot_MPI_GEMV; + v->ops[0].mtdot_local = VecMTDot_Seq_GEMV; } if (maxpy_use_gemv) v->ops[0].maxpy = VecMAXPY_Seq_GEMV; @@ -661,7 +657,7 @@ PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecCreateMPIWithArray - Creates a parallel, array-style vector, where the user provides the array space to store the vector values. @@ -705,7 +701,7 @@ PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, Pet PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; the caller allocates the array space. @@ -902,8 +898,7 @@ PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[]) PetscFunctionReturn(PETSC_SUCCESS); } -/* ------------------------------------------------------------------------------------------*/ -/*@C +/*@ VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; the caller allocates the array space. Indices in the ghost region are based on blocks. diff --git a/src/vec/vec/impls/mpi/vmpicr.c b/src/vec/vec/impls/mpi/vmpicr.c index 9122d6a48c3..872ab640bae 100644 --- a/src/vec/vec/impls/mpi/vmpicr.c +++ b/src/vec/vec/impls/mpi/vmpicr.c @@ -25,8 +25,12 @@ Use `VecDuplicate()` or `VecDuplicateVecs()` to form additional vectors of the same type as an existing vector. + If `n` is not `PETSC_DECIDE`, then the value determines the `PetscLayout` of the vector and the ranges returned by + `VecGetOwnershipRange()` and `VecGetOwnershipRanges()` + .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`, - `VecCreateMPIWithArray()`, `VecCreateGhostWithArray()`, `VecMPISetGhost()` + `VecCreateMPIWithArray()`, `VecCreateGhostWithArray()`, `VecMPISetGhost()`, `PetscLayout`, + `VecGetOwnershipRange()`, `VecGetOwnershipRanges()` @*/ PetscErrorCode VecCreateMPI(MPI_Comm comm, PetscInt n, PetscInt N, Vec *v) { diff --git a/src/vec/vec/impls/nest/ftn-custom/zvecnestf.c b/src/vec/vec/impls/nest/ftn-custom/zvecnestf.c index d02a41d6b8c..bc4dc173ad8 100644 --- a/src/vec/vec/impls/nest/ftn-custom/zvecnestf.c +++ b/src/vec/vec/impls/nest/ftn-custom/zvecnestf.c @@ -3,11 +3,9 @@ #if defined(PETSC_HAVE_FORTRAN_CAPS) #define vecnestgetsubvecs_ VECNESTGETSUBVECS - #define vecnestsetsubvecs_ VECNESTSETSUBVECS #define veccreatenest_ VECCREATENEST #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define vecnestgetsubvecs_ vecnestgetsubvecs - #define vecnestsetsubvecs_ vecnestsetsubvecs #define veccreatenest_ veccreatenest #endif @@ -25,11 +23,6 @@ PETSC_EXTERN void vecnestgetsubvecs_(Vec *X, PetscInt *N, Vec *sx, PetscErrorCod } } -PETSC_EXTERN void vecnestsetsubvecs_(Vec *X, PetscInt *N, PetscInt *idxm, Vec *sx, PetscErrorCode *ierr) -{ - *ierr = VecNestSetSubVecs(*X, *N, idxm, sx); -} - PETSC_EXTERN void veccreatenest_(MPI_Fint *comm, PetscInt *nb, IS is[], Vec x[], Vec *Y, int *ierr) { CHKFORTRANNULLOBJECT(is); diff --git a/src/vec/vec/impls/nest/vecnest.c b/src/vec/vec/impls/nest/vecnest.c index 7c8e7682f16..7eecb547571 100644 --- a/src/vec/vec/impls/nest/vecnest.c +++ b/src/vec/vec/impls/nest/vecnest.c @@ -883,7 +883,7 @@ static PetscErrorCode VecNestGetSubVecs_Nest(Vec X, PetscInt *N, Vec **sx) Output Parameters: + N - number of nested vecs -- sx - array of vectors +- sx - array of vectors, can pass in `NULL` Level: developer @@ -891,11 +891,11 @@ static PetscErrorCode VecNestGetSubVecs_Nest(Vec X, PetscInt *N, Vec **sx) The user should not free the array `sx`. Fortran Notes: - The caller must allocate the array to hold the subvectors. + The caller must allocate the array to hold the subvectors and pass it in. .seealso: `VECNEST`, [](ch_vectors), `Vec`, `VecType`, `VecNestGetSize()`, `VecNestGetSubVec()` @*/ -PetscErrorCode VecNestGetSubVecs(Vec X, PetscInt *N, Vec **sx) +PetscErrorCode VecNestGetSubVecs(Vec X, PetscInt *N, Vec *sx[]) { PetscFunctionBegin; PetscUseMethod(X, "VecNestGetSubVecs_C", (Vec, PetscInt *, Vec **), (X, N, sx)); @@ -914,9 +914,9 @@ static PetscErrorCode VecNestSetSubVec_Private(Vec X, PetscInt idxm, Vec x) /* check if idxm < bx->nb */ PetscCheck(idxm < bx->nb, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Out of range index value %" PetscInt_FMT " maximum %" PetscInt_FMT, idxm, bx->nb); - PetscCall(VecDestroy(&bx->v[idxm])); /* destroy the existing vector */ - PetscCall(VecDuplicate(x, &bx->v[idxm])); /* duplicate the layout of given vector */ - PetscCall(VecCopy(x, bx->v[idxm])); /* copy the contents of the given vector */ + PetscCall(PetscObjectReference((PetscObject)x)); + PetscCall(VecDestroy(&bx->v[idxm])); + bx->v[idxm] = x; /* check if we need to update the IS for the block */ offset = X->map->rstart; @@ -990,9 +990,11 @@ static PetscErrorCode VecNestSetSubVec_Nest(Vec X, PetscInt idxm, Vec sx) Level: developer - Note: + Notes: The new vector `sx` does not have to be of same size as X[idxm]. Arbitrary vector layouts are allowed. + The nest vector `X` keeps a reference to `sx` rather than creating a duplicate. + .seealso: `VECNEST`, [](ch_vectors), `Vec`, `VecType`, `VecNestSetSubVecs()`, `VecNestGetSubVec()` @*/ PetscErrorCode VecNestSetSubVec(Vec X, PetscInt idxm, Vec sx) @@ -1012,7 +1014,7 @@ static PetscErrorCode VecNestSetSubVecs_Nest(Vec X, PetscInt N, PetscInt *idxm, PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecNestSetSubVecs - Sets the component vectors at the specified indices in a nest vector. Not Collective @@ -1025,13 +1027,15 @@ static PetscErrorCode VecNestSetSubVecs_Nest(Vec X, PetscInt N, PetscInt *idxm, Level: developer - Note: + Notes: The components in the vector array `sx` do not have to be of the same size as corresponding components in `X`. The user can also free the array `sx` after the call. + The nest vector `X` keeps references to `sx` vectors rather than creating duplicates. + .seealso: `VECNEST`, [](ch_vectors), `Vec`, `VecType`, `VecNestGetSize()`, `VecNestGetSubVec()` @*/ -PetscErrorCode VecNestSetSubVecs(Vec X, PetscInt N, PetscInt *idxm, Vec *sx) +PetscErrorCode VecNestSetSubVecs(Vec X, PetscInt N, PetscInt idxm[], Vec sx[]) { PetscFunctionBegin; PetscUseMethod(X, "VecNestSetSubVecs_C", (Vec, PetscInt, PetscInt *, Vec *), (X, N, idxm, sx)); diff --git a/src/vec/vec/impls/seq/bvec1.c b/src/vec/vec/impls/seq/bvec1.c index d548c94c599..ed98a5f1755 100644 --- a/src/vec/vec/impls/seq/bvec1.c +++ b/src/vec/vec/impls/seq/bvec1.c @@ -100,21 +100,19 @@ PetscErrorCode VecAXPBY_Seq(Vec yin, PetscScalar a, PetscScalar b, Vec xin) } else { const PetscInt n = yin->map->n; const PetscScalar *xx; - PetscInt flops; PetscScalar *yy; PetscCall(VecGetArrayRead(xin, &xx)); PetscCall(VecGetArray(yin, &yy)); if (b == (PetscScalar)0.0) { - flops = n; for (PetscInt i = 0; i < n; ++i) yy[i] = a * xx[i]; + PetscCall(PetscLogFlops(n)); } else { - flops = 3 * n; for (PetscInt i = 0; i < n; ++i) yy[i] = a * xx[i] + b * yy[i]; + PetscCall(PetscLogFlops(3.0 * n)); } PetscCall(VecRestoreArrayRead(xin, &xx)); PetscCall(VecRestoreArray(yin, &yy)); - PetscCall(PetscLogFlops(flops)); } PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/src/vec/vec/impls/seq/bvec2.c b/src/vec/vec/impls/seq/bvec2.c index 063a821d2bd..8269100e7b4 100644 --- a/src/vec/vec/impls/seq/bvec2.c +++ b/src/vec/vec/impls/seq/bvec2.c @@ -765,15 +765,11 @@ static PetscErrorCode VecDuplicateVecs_Seq_GEMV(Vec w, PetscInt m, Vec *V[]) w->ops->duplicatevecs = VecDuplicateVecs_Default; PetscCall(VecDuplicateVecs(w, m, V)); } else { - PetscInt nlocal; PetscScalar *array; PetscInt64 lda; // use 64-bit as we will do "m * lda" PetscCall(PetscMalloc1(m, V)); - PetscCall(VecGetLocalSize(w, &nlocal)); - lda = nlocal; - lda = ((lda + 31) / 32) * 32; // make every vector 32-elements aligned - + VecGetLocalSizeAligned(w, 64, &lda); // get in lda the 64-bytes aligned local size PetscCall(PetscCalloc1(m * lda, &array)); for (PetscInt i = 0; i < m; i++) { Vec v; @@ -950,7 +946,7 @@ PetscErrorCode VecCreate_Seq_Private(Vec v, const PetscScalar array[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecCreateSeqWithArray - Creates a standard,sequential array-style vector, where the user provides the array space to store the vector values. diff --git a/src/vec/vec/impls/seq/cupm/vecseqcupm_impl.hpp b/src/vec/vec/impls/seq/cupm/vecseqcupm_impl.hpp index 7891494b189..068b0403231 100644 --- a/src/vec/vec/impls/seq/cupm/vecseqcupm_impl.hpp +++ b/src/vec/vec/impls/seq/cupm/vecseqcupm_impl.hpp @@ -1156,12 +1156,9 @@ inline PetscErrorCode VecSeq_CUPM::MDot_(std::false_type, Vec xin, PetscInt n // how many sub streams to create, if nv <= batchsize we can do this without looping, so we // do not create substreams. Note we don't create more than 8 streams, in practice we could // not get more parallelism with higher numbers. - const auto num_sub_streams = nv > batchsize ? std::min((nv + batchsize) / batchsize, batchsize) : 0; - const auto n = xin->map->n; - // number of vectors that we handle via the batches. note any singletons are handled by - // cublas, hence the nv-1. - const auto nvbatch = ((nv % batchsize) == 1) ? nv - 1 : nv; - const auto nwork = nvbatch * MDOT_WORKGROUP_NUM; + const auto num_sub_streams = nv > batchsize ? std::min((nv + batchsize) / batchsize, batchsize) : 0; + const auto n = xin->map->n; + const auto nwork = nv * MDOT_WORKGROUP_NUM; PetscScalar *d_results; cupmStream_t stream; @@ -1213,13 +1210,9 @@ inline PetscErrorCode VecSeq_CUPM::MDot_(std::false_type, Vec xin, PetscInt n case 2: PetscCall(MDot_kernel_dispatch_<2>(cur_ctx, cur_stream, xptr.data(), yin, n, d_results, yidx)); break; - case 1: { - cupmBlasHandle_t cupmBlasHandle; - - PetscCall(GetHandlesFrom_(cur_ctx, &cupmBlasHandle)); - PetscCallCUPMBLAS(cupmBlasXdot(cupmBlasHandle, static_cast(n), DeviceArrayRead(cur_ctx, yin[yidx]).cupmdata(), 1, xptr.cupmdata(), 1, cupmScalarPtrCast(z + yidx))); - ++yidx; - } break; + case 1: + PetscCall(MDot_kernel_dispatch_<1>(cur_ctx, cur_stream, xptr.data(), yin, n, d_results, yidx)); + break; default: // 8 or more PetscCall(MDot_kernel_dispatch_<8>(cur_ctx, cur_stream, xptr.data(), yin, n, d_results, yidx)); break; @@ -1229,9 +1222,9 @@ inline PetscErrorCode VecSeq_CUPM::MDot_(std::false_type, Vec xin, PetscInt n PetscCall(PetscDeviceContextJoin(dctx, num_sub_streams, PETSC_DEVICE_CONTEXT_JOIN_DESTROY, &sub)); } - PetscCall(PetscCUPMLaunchKernel1D(nvbatch, 0, stream, kernels::sum_kernel, nvbatch, d_results)); + PetscCall(PetscCUPMLaunchKernel1D(nv, 0, stream, kernels::sum_kernel, nv, d_results)); // copy result of device reduction to host - PetscCall(PetscCUPMMemcpyAsync(z, d_results, nvbatch, cupmMemcpyDeviceToHost, stream)); + PetscCall(PetscCUPMMemcpyAsync(z, d_results, nv, cupmMemcpyDeviceToHost, stream)); // do these now while final reduction is in flight PetscCall(PetscLogGpuFlops(nwork)); PetscCall(PetscDeviceFree(dctx, d_results)); diff --git a/src/vec/vec/impls/seq/ftn-custom/makefile b/src/vec/vec/impls/seq/ftn-custom/makefile deleted file mode 100644 index c6170f8b367..00000000000 --- a/src/vec/vec/impls/seq/ftn-custom/makefile +++ /dev/null @@ -1,6 +0,0 @@ --include ../../../../../../petscdir.mk -#requiresdefine 'PETSC_USE_FORTRAN_BINDINGS' - - -include ${PETSC_DIR}/lib/petsc/conf/variables -include ${PETSC_DIR}/lib/petsc/conf/rules_doc.mk diff --git a/src/vec/vec/impls/seq/ftn-custom/zbvec2f.c b/src/vec/vec/impls/seq/ftn-custom/zbvec2f.c deleted file mode 100644 index 2532cae8bed..00000000000 --- a/src/vec/vec/impls/seq/ftn-custom/zbvec2f.c +++ /dev/null @@ -1,21 +0,0 @@ -#include -#include -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define veccreateseqwitharray0_ VECCREATESEQWITHARRAY0 - #define veccreateseqwitharray1_ VECCREATESEQWITHARRAY1 -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define veccreateseqwitharray0_ veccreateseqwitharray0 - #define veccreateseqwitharray1_ veccreateseqwitharray1 -#endif - -PETSC_EXTERN void veccreateseqwitharray0_(MPI_Comm *comm, int *bs, PetscInt *n, PetscScalar *s, Vec *V, PetscErrorCode *ierr) -{ - CHKFORTRANNULLSCALAR(s); - *ierr = VecCreateSeqWithArray(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *n, s, V); -} - -PETSC_EXTERN void veccreateseqwitharray1_(MPI_Comm *comm, PetscInt64 *bs, PetscInt *n, PetscScalar *s, Vec *V, PetscErrorCode *ierr) -{ - CHKFORTRANNULLSCALAR(s); - *ierr = VecCreateSeqWithArray(MPI_Comm_f2c(*(MPI_Fint *)&*comm), *bs, *n, s, V); -} diff --git a/src/vec/vec/impls/seq/ftn-kernels/faypx.F90 b/src/vec/vec/impls/seq/ftn-kernels/faypx.F90 index e8e841210d5..ddc774c52e8 100644 --- a/src/vec/vec/impls/seq/ftn-kernels/faypx.F90 +++ b/src/vec/vec/impls/seq/ftn-kernels/faypx.F90 @@ -16,5 +16,4 @@ subroutine FortranAYPX(n,a,x,y) y(i) = x(i) + a*y(i) 10 continue - return end diff --git a/src/vec/vec/impls/seq/ftn-kernels/fmaxpy.F90 b/src/vec/vec/impls/seq/ftn-kernels/fmaxpy.F90 index 430a7bb7d0b..877efbfadd5 100644 --- a/src/vec/vec/impls/seq/ftn-kernels/fmaxpy.F90 +++ b/src/vec/vec/impls/seq/ftn-kernels/fmaxpy.F90 @@ -20,7 +20,6 @@ Subroutine FortranMAXPY4(x, a0, a1, a2, a3, y0, y1, y2, y3, n) do i=1,n x(i) = x(i) + (a0*y0(i) + a1*y1(i) + a2*y2(i) + a3*y3(i)) enddo - return end subroutine FortranMAXPY3(x,a0,a1,a2,y0,y1,y2,n) @@ -36,7 +35,6 @@ subroutine FortranMAXPY3(x,a0,a1,a2,y0,y1,y2,n) do 10,i=1,n x(i) = x(i) + (a0*y0(i) + a1*y1(i) + a2*y2(i)) 10 continue - return end Subroutine FortranMAXPY2(x, a0, a1, y0, y1, n) @@ -50,5 +48,4 @@ Subroutine FortranMAXPY2(x, a0, a1, y0, y1, n) do i=1,n x(i) = x(i) + (a0*y0(i) + a1*y1(i)) enddo - return end diff --git a/src/vec/vec/impls/seq/ftn-kernels/fmdot.F90 b/src/vec/vec/impls/seq/ftn-kernels/fmdot.F90 index 1afd942dd47..0cc55d2178a 100644 --- a/src/vec/vec/impls/seq/ftn-kernels/fmdot.F90 +++ b/src/vec/vec/impls/seq/ftn-kernels/fmdot.F90 @@ -24,7 +24,6 @@ subroutine FortranMDot4(x,y1,y2,y3,y4,n,sum1,sum2,sum3,sum4) sum4 = sum4 + x(i)*PetscConj(y4(i)) 10 continue - return end subroutine FortranMDot3(x,y1,y2,y3,n,sum1,sum2,sum3) @@ -44,7 +43,6 @@ subroutine FortranMDot3(x,y1,y2,y3,n,sum1,sum2,sum3) sum3 = sum3 + x(i)*PetscConj(y3(i)) 10 continue - return end subroutine FortranMDot2(x,y1,y2,n,sum1,sum2) @@ -61,7 +59,6 @@ subroutine FortranMDot2(x,y1,y2,n,sum1,sum2) sum2 = sum2 + x(i)*PetscConj(y2(i)) 10 continue - return end subroutine FortranMDot1(x,y1,n,sum1) @@ -76,5 +73,4 @@ subroutine FortranMDot1(x,y1,n,sum1) sum1 = sum1 + x(i)*PetscConj(y1(i)) 10 continue - return end diff --git a/src/vec/vec/impls/seq/ftn-kernels/fwaxpy.F90 b/src/vec/vec/impls/seq/ftn-kernels/fwaxpy.F90 index 4265e9ffa3b..8062aaba082 100644 --- a/src/vec/vec/impls/seq/ftn-kernels/fwaxpy.F90 +++ b/src/vec/vec/impls/seq/ftn-kernels/fwaxpy.F90 @@ -20,5 +20,4 @@ subroutine FortranWAXPY(n,a,x,y,w) w(i) = a*x(i) + y(i) 10 continue - return end diff --git a/src/vec/vec/impls/seq/ftn-kernels/fxtimesy.F90 b/src/vec/vec/impls/seq/ftn-kernels/fxtimesy.F90 index 4a463f5f7f9..04be8d8aa7d 100644 --- a/src/vec/vec/impls/seq/ftn-kernels/fxtimesy.F90 +++ b/src/vec/vec/impls/seq/ftn-kernels/fxtimesy.F90 @@ -12,5 +12,4 @@ subroutine Fortranxtimesy(x,y,z,n) do 10,i=1,n z(i) = x(i) * y(i) 10 continue - return end diff --git a/src/vec/vec/impls/seq/kokkos/veckok.kokkos.cxx b/src/vec/vec/impls/seq/kokkos/veckok.kokkos.cxx index 66610dc25b0..3ec4d054e34 100644 --- a/src/vec/vec/impls/seq/kokkos/veckok.kokkos.cxx +++ b/src/vec/vec/impls/seq/kokkos/veckok.kokkos.cxx @@ -571,7 +571,9 @@ static PetscErrorCode VecMultiDot_SeqKokkos_GEMV(PetscBool conjugate, Vec xin, P const auto &A = Kokkos::View(yarray, lda, m); const auto &Y = Kokkos::subview(A, std::pair(0, n), Kokkos::ALL); auto zv = PetscScalarKokkosDualView(PetscScalarKokkosView(z_d + i, m), PetscScalarKokkosViewHost(z_h + i, m)); + PetscCall(PetscLogGpuTimeBegin()); PetscCallCXX(KokkosBlas::gemv(PetscGetKokkosExecutionSpace(), trans, 1.0, Y, xv, 0.0, zv.view_device())); + PetscCall(PetscLogGpuTimeEnd()); zv.modify_device(); zv.sync_host(); PetscCall(PetscLogGpuFlops(PetscMax(m * (2.0 * n - 1), 0.0))); @@ -595,18 +597,14 @@ static PetscErrorCode VecMultiDot_SeqKokkos_GEMV(PetscBool conjugate, Vec xin, P PetscErrorCode VecMDot_SeqKokkos_GEMV(Vec xin, PetscInt nv, const Vec yin[], PetscScalar *z) { PetscFunctionBegin; - PetscCall(PetscLogGpuTimeBegin()); PetscCall(VecMultiDot_SeqKokkos_GEMV(PETSC_TRUE, xin, nv, yin, z)); // conjugate - PetscCall(PetscLogGpuTimeEnd()); PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode VecMTDot_SeqKokkos_GEMV(Vec xin, PetscInt nv, const Vec yin[], PetscScalar *z) { PetscFunctionBegin; - PetscCall(PetscLogGpuTimeBegin()); PetscCall(VecMultiDot_SeqKokkos_GEMV(PETSC_FALSE, xin, nv, yin, z)); // transpose - PetscCall(PetscLogGpuTimeEnd()); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1811,14 +1809,13 @@ static PetscErrorCode VecDuplicateVecs_SeqKokkos_GEMV(Vec w, PetscInt m, Vec *V[ PetscInt64 lda; // use 64-bit as we will do "m * lda" PetscScalar *array_h, *array_d; PetscLayout map; + PetscBool mdot_use_gemv, maxpy_use_gemv; PetscFunctionBegin; PetscCall(PetscKokkosInitializeCheck()); // as we'll call kokkos_malloc() PetscCall(PetscMalloc1(m, V)); PetscCall(VecGetLayout(w, &map)); - lda = map->n; - lda = ((lda + 31) / 32) * 32; // make every vector 32-elements aligned - + VecGetLocalSizeAligned(w, 64, &lda); // get in lda the 64-bytes aligned local size // allocate raw arrays on host and device for the whole m vectors PetscCall(PetscCalloc1(m * lda, &array_h)); #if defined(KOKKOS_ENABLE_DEFAULT_DEVICE_TYPE_HOST) @@ -1827,6 +1824,9 @@ static PetscErrorCode VecDuplicateVecs_SeqKokkos_GEMV(Vec w, PetscInt m, Vec *V[ PetscCallCXX(array_d = static_cast(Kokkos::kokkos_malloc("VecDuplicateVecs", sizeof(PetscScalar) * (m * lda)))); #endif + mdot_use_gemv = (w->ops->mdot == VecMDot_SeqKokkos_GEMV) ? PETSC_TRUE : PETSC_FALSE; + maxpy_use_gemv = (w->ops->maxpy == VecMAXPY_SeqKokkos_GEMV) ? PETSC_TRUE : PETSC_FALSE; + // create the m vectors with raw arrays for (PetscInt i = 0; i < m; i++) { Vec v; @@ -1834,6 +1834,13 @@ static PetscErrorCode VecDuplicateVecs_SeqKokkos_GEMV(Vec w, PetscInt m, Vec *V[ PetscCallCXX(static_cast(v->spptr)->v_dual.modify_host()); // as we only init'ed array_h PetscCall(PetscObjectListDuplicate(((PetscObject)w)->olist, &((PetscObject)v)->olist)); PetscCall(PetscFunctionListDuplicate(((PetscObject)w)->qlist, &((PetscObject)v)->qlist)); + if (mdot_use_gemv) { // inherit w's mdot/maxpy optimization setting + v->ops->mdot = VecMDot_SeqKokkos_GEMV; + v->ops->mtdot = VecMTDot_SeqKokkos_GEMV; + v->ops->mdot_local = VecMDot_SeqKokkos_GEMV; + v->ops->mtdot_local = VecMTDot_SeqKokkos_GEMV; + } + if (maxpy_use_gemv) v->ops->maxpy = VecMAXPY_SeqKokkos_GEMV; v->ops->view = w->ops->view; v->stash.ignorenegidx = w->stash.ignorenegidx; (*V)[i] = v; @@ -1888,8 +1895,8 @@ PetscErrorCode VecCreate_SeqKokkos(Vec v) if (mdot_use_gemv) { v->ops[0].mdot = VecMDot_SeqKokkos_GEMV; - v->ops[0].mdot_local = VecMDot_SeqKokkos_GEMV; v->ops[0].mtdot = VecMTDot_SeqKokkos_GEMV; + v->ops[0].mdot_local = VecMDot_SeqKokkos_GEMV; v->ops[0].mtdot_local = VecMTDot_SeqKokkos_GEMV; } if (maxpy_use_gemv) v->ops[0].maxpy = VecMAXPY_SeqKokkos_GEMV; diff --git a/src/vec/vec/interface/f90-custom/zvectorf90.c b/src/vec/vec/interface/f90-custom/zvectorf90.c index 1d715b42222..3e151ac9446 100644 --- a/src/vec/vec/interface/f90-custom/zvectorf90.c +++ b/src/vec/vec/interface/f90-custom/zvectorf90.c @@ -8,7 +8,6 @@ #define vecrestorearrayreadf90_ VECRESTOREARRAYREADF90 #define vecduplicatevecsf90_ VECDUPLICATEVECSF90 #define vecdestroyvecsf90_ VECDESTROYVECSF90 - #define vecdestroy_ VECDESTROY #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) #define vecgetarrayf90_ vecgetarrayf90 #define vecrestorearrayf90_ vecrestorearrayf90 @@ -16,95 +15,86 @@ #define vecrestorearrayreadf90_ vecrestorearrayreadf90 #define vecduplicatevecsf90_ vecduplicatevecsf90 #define vecdestroyvecsf90_ vecdestroyvecsf90 - #define vecdestroy_ vecdestroy #endif -PETSC_EXTERN void vecgetarrayf90_(Vec *x, F90Array1d *ptr, int *__ierr PETSC_F90_2PTR_PROTO(ptrd)) +PETSC_EXTERN void vecgetarrayf90_(Vec *x, F90Array1d *ptr, int *ierr PETSC_F90_2PTR_PROTO(ptrd)) { PetscScalar *fa; PetscInt len; if (!ptr) { - *__ierr = PetscError(((PetscObject)*x)->comm, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_BADPTR, PETSC_ERROR_INITIAL, "ptr==NULL, maybe #include is missing?"); + *ierr = PetscError(((PetscObject)*x)->comm, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_BADPTR, PETSC_ERROR_INITIAL, "ptr==NULL, maybe #include is missing?"); return; } - *__ierr = VecGetArray(*x, &fa); - if (*__ierr) return; - *__ierr = VecGetLocalSize(*x, &len); - if (*__ierr) return; - *__ierr = F90Array1dCreate(fa, MPIU_SCALAR, 1, len, ptr PETSC_F90_2PTR_PARAM(ptrd)); + *ierr = VecGetArray(*x, &fa); + if (*ierr) return; + *ierr = VecGetLocalSize(*x, &len); + if (*ierr) return; + *ierr = F90Array1dCreate(fa, MPIU_SCALAR, 1, len, ptr PETSC_F90_2PTR_PARAM(ptrd)); } -PETSC_EXTERN void vecrestorearrayf90_(Vec *x, F90Array1d *ptr, int *__ierr PETSC_F90_2PTR_PROTO(ptrd)) +PETSC_EXTERN void vecrestorearrayf90_(Vec *x, F90Array1d *ptr, int *ierr PETSC_F90_2PTR_PROTO(ptrd)) { PetscScalar *fa; - *__ierr = F90Array1dAccess(ptr, MPIU_SCALAR, (void **)&fa PETSC_F90_2PTR_PARAM(ptrd)); - if (*__ierr) return; - *__ierr = F90Array1dDestroy(ptr, MPIU_SCALAR PETSC_F90_2PTR_PARAM(ptrd)); - if (*__ierr) return; - *__ierr = VecRestoreArray(*x, &fa); + *ierr = F90Array1dAccess(ptr, MPIU_SCALAR, (void **)&fa PETSC_F90_2PTR_PARAM(ptrd)); + if (*ierr) return; + *ierr = F90Array1dDestroy(ptr, MPIU_SCALAR PETSC_F90_2PTR_PARAM(ptrd)); + if (*ierr) return; + *ierr = VecRestoreArray(*x, &fa); } -PETSC_EXTERN void vecgetarrayreadf90_(Vec *x, F90Array1d *ptr, int *__ierr PETSC_F90_2PTR_PROTO(ptrd)) +PETSC_EXTERN void vecgetarrayreadf90_(Vec *x, F90Array1d *ptr, int *ierr PETSC_F90_2PTR_PROTO(ptrd)) { const PetscScalar *fa; PetscInt len; if (!ptr) { - *__ierr = PetscError(((PetscObject)*x)->comm, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_BADPTR, PETSC_ERROR_INITIAL, "ptr==NULL, maybe #include is missing?"); + *ierr = PetscError(((PetscObject)*x)->comm, __LINE__, PETSC_FUNCTION_NAME, __FILE__, PETSC_ERR_ARG_BADPTR, PETSC_ERROR_INITIAL, "ptr==NULL, maybe #include is missing?"); return; } - *__ierr = VecGetArrayRead(*x, &fa); - if (*__ierr) return; - *__ierr = VecGetLocalSize(*x, &len); - if (*__ierr) return; - *__ierr = F90Array1dCreate((PetscScalar *)fa, MPIU_SCALAR, 1, len, ptr PETSC_F90_2PTR_PARAM(ptrd)); + *ierr = VecGetArrayRead(*x, &fa); + if (*ierr) return; + *ierr = VecGetLocalSize(*x, &len); + if (*ierr) return; + *ierr = F90Array1dCreate((PetscScalar *)fa, MPIU_SCALAR, 1, len, ptr PETSC_F90_2PTR_PARAM(ptrd)); } -PETSC_EXTERN void vecrestorearrayreadf90_(Vec *x, F90Array1d *ptr, int *__ierr PETSC_F90_2PTR_PROTO(ptrd)) +PETSC_EXTERN void vecrestorearrayreadf90_(Vec *x, F90Array1d *ptr, int *ierr PETSC_F90_2PTR_PROTO(ptrd)) { const PetscScalar *fa; - *__ierr = F90Array1dAccess(ptr, MPIU_SCALAR, (void **)&fa PETSC_F90_2PTR_PARAM(ptrd)); - if (*__ierr) return; - *__ierr = F90Array1dDestroy(ptr, MPIU_SCALAR PETSC_F90_2PTR_PARAM(ptrd)); - if (*__ierr) return; - *__ierr = VecRestoreArrayRead(*x, &fa); + *ierr = F90Array1dAccess(ptr, MPIU_SCALAR, (void **)&fa PETSC_F90_2PTR_PARAM(ptrd)); + if (*ierr) return; + *ierr = F90Array1dDestroy(ptr, MPIU_SCALAR PETSC_F90_2PTR_PARAM(ptrd)); + if (*ierr) return; + *ierr = VecRestoreArrayRead(*x, &fa); } -PETSC_EXTERN void vecduplicatevecsf90_(Vec *v, int *m, F90Array1d *ptr, int *__ierr PETSC_F90_2PTR_PROTO(ptrd)) +PETSC_EXTERN void vecduplicatevecsf90_(Vec *v, int *m, F90Array1d *ptr, int *ierr PETSC_F90_2PTR_PROTO(ptrd)) { Vec *lV; PetscFortranAddr *newvint; int i; - *__ierr = VecDuplicateVecs(*v, *m, &lV); - if (*__ierr) return; - *__ierr = PetscMalloc1(*m, &newvint); - if (*__ierr) return; + *ierr = VecDuplicateVecs(*v, *m, &lV); + if (*ierr) return; + *ierr = PetscMalloc1(*m, &newvint); + if (*ierr) return; for (i = 0; i < *m; i++) newvint[i] = (PetscFortranAddr)lV[i]; - *__ierr = PetscFree(lV); - if (*__ierr) return; - *__ierr = F90Array1dCreate(newvint, MPIU_FORTRANADDR, 1, *m, ptr PETSC_F90_2PTR_PARAM(ptrd)); + *ierr = PetscFree(lV); + if (*ierr) return; + *ierr = F90Array1dCreate(newvint, MPIU_FORTRANADDR, 1, *m, ptr PETSC_F90_2PTR_PARAM(ptrd)); } -PETSC_EXTERN void vecdestroyvecsf90_(int *m, F90Array1d *ptr, int *__ierr PETSC_F90_2PTR_PROTO(ptrd)) +PETSC_EXTERN void vecdestroyvecsf90_(int *m, F90Array1d *ptr, int *ierr PETSC_F90_2PTR_PROTO(ptrd)) { Vec *vecs; int i; - *__ierr = F90Array1dAccess(ptr, MPIU_FORTRANADDR, (void **)&vecs PETSC_F90_2PTR_PARAM(ptrd)); - if (*__ierr) return; + *ierr = F90Array1dAccess(ptr, MPIU_FORTRANADDR, (void **)&vecs PETSC_F90_2PTR_PARAM(ptrd)); + if (*ierr) return; for (i = 0; i < *m; i++) { PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(&vecs[i]); - *__ierr = VecDestroy(&vecs[i]); - if (*__ierr) return; + *ierr = VecDestroy(&vecs[i]); + if (*ierr) return; PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(&vecs[i]); } - *__ierr = F90Array1dDestroy(ptr, MPIU_FORTRANADDR PETSC_F90_2PTR_PARAM(ptrd)); - if (*__ierr) return; - *__ierr = PetscFree(vecs); -} - -PETSC_EXTERN void vecdestroy_(Vec *x, int *ierr) -{ - PETSC_FORTRAN_OBJECT_F_DESTROYED_TO_C_NULL(x); - *ierr = VecDestroy(x); + *ierr = F90Array1dDestroy(ptr, MPIU_FORTRANADDR PETSC_F90_2PTR_PARAM(ptrd)); if (*ierr) return; - PETSC_FORTRAN_OBJECT_C_NULL_TO_F_DESTROYED(x); + *ierr = PetscFree(vecs); } diff --git a/src/vec/vec/interface/ftn-custom/zvecregf.c b/src/vec/vec/interface/ftn-custom/zvecregf.c deleted file mode 100644 index 213f1620b21..00000000000 --- a/src/vec/vec/interface/ftn-custom/zvecregf.c +++ /dev/null @@ -1,28 +0,0 @@ -#include -#include -#if defined(PETSC_HAVE_FORTRAN_CAPS) - #define vecsettype_ VECSETTYPE - #define vecgettype_ VECGETTYPE -#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define vecsettype_ vecsettype - #define vecgettype_ vecgettype -#endif - -PETSC_EXTERN void vecsettype_(Vec *x, char *type_name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type_name, len, t); - *ierr = VecSetType(*x, t); - if (*ierr) return; - FREECHAR(type_name, t); -} - -PETSC_EXTERN void vecgettype_(Vec *vv, char *name, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - const char *tname; - - *ierr = VecGetType(*vv, &tname); - *ierr = PetscStrncpy(name, tname, len); - FIXRETURNCHAR(PETSC_TRUE, name, len); -} diff --git a/src/vec/vec/interface/ftn-custom/zvectorf.c b/src/vec/vec/interface/ftn-custom/zvectorf.c index 68d9172d718..aecbc23baa3 100644 --- a/src/vec/vec/interface/ftn-custom/zvectorf.c +++ b/src/vec/vec/interface/ftn-custom/zvectorf.c @@ -3,178 +3,37 @@ #include #if defined(PETSC_HAVE_FORTRAN_CAPS) - #define vecsetrandom_ VECSETRANDOM - #define vecsetvalueslocal0_ VECSETVALUESLOCAL0 - #define vecsetvalueslocal11_ VECSETVALUESLOCAL11 - #define vecsetvalueslocal1_ VECSETVALUESLOCAL1 - #define vecgetvalues_ VECGETVALUES - #define vecgetvalues0_ VECGETVALUES0 - #define vecgetvalues1_ VECGETVALUES1 - #define vecgetvalues11_ VECGETVALUES11 - #define vecsetvalues_ VECSETVALUES - #define vecsetvalues0_ VECSETVALUES0 - #define vecsetvalues1_ VECSETVALUES1 - #define vecsetvalues11_ VECSETVALUES11 - #define vecsetvaluesblocked VECSETVALUESBLOCKED - #define vecsetvaluesblocked0_ VECSETVALUESBLOCKED0 - #define vecsetvaluesblocked1_ VECSETVALUESBLOCKED1 - #define vecsetvaluesblocked11_ VECSETVALUESBLOCKED11 - #define vecsetvalue_ VECSETVALUE - #define vecsetvaluelocal_ VECSETVALUELOCAL - #define vecload_ VECLOAD - #define vecview_ VECVIEW - #define vecgetarray_ VECGETARRAY - #define vecgetarrayread_ VECGETARRAYREAD - #define vecgetarrayaligned_ VECGETARRAYALIGNED - #define vecrestorearray_ VECRESTOREARRAY - #define vecrestorearrayread_ VECRESTOREARRAYREAD - #define vecduplicatevecs_ VECDUPLICATEVECS - #define vecdestroyvecs_ VECDESTROYVECS - #define vecmin1_ VECMIN1 - #define vecmin2_ VECMIN2 - #define vecmax1_ VECMAX1 - #define vecmax2_ VECMAX2 - #define vecgetownershiprange1_ VECGETOWNERSHIPRANGE1 - #define vecgetownershiprange2_ VECGETOWNERSHIPRANGE2 - #define vecgetownershiprange3_ VECGETOWNERSHIPRANGE3 - #define vecgetownershipranges_ VECGETOWNERSHIPRANGES - #define vecsetoptionsprefix_ VECSETOPTIONSPREFIX - #define vecviewfromoptions_ VECVIEWFROMOPTIONS - #define vecstashviewfromoptions_ VECSTASHVIEWFROMOPTIONS - #define veccreatefromoptions_ VECCREATEFROMOPTIONS + #define vecsetvalue_ VECSETVALUE + #define vecsetvaluelocal_ VECSETVALUELOCAL + #define vecgetarray_ VECGETARRAY + #define vecgetarrayread_ VECGETARRAYREAD + #define vecgetarrayaligned_ VECGETARRAYALIGNED + #define vecrestorearray_ VECRESTOREARRAY + #define vecrestorearrayread_ VECRESTOREARRAYREAD + #define vecduplicatevecs_ VECDUPLICATEVECS + #define vecdestroyvecs_ VECDESTROYVECS + #define vecmin1_ VECMIN1 + #define vecmin2_ VECMIN2 + #define vecmax1_ VECMAX1 + #define vecmax2_ VECMAX2 + #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) - #define vecsetrandom_ vecsetrandom - #define vecsetvalueslocal0_ vecsetvalueslocal0 - #define vecsetvalueslocal1_ vecsetvalueslocal1 - #define vecsetvalueslocal11_ vecsetvalueslocal11 - #define vecgetvalues_ vecgetvalues - #define vecgetvalues0_ vecgetvalues0 - #define vecgetvalues1_ vecgetvalues1 - #define vecgetvalues11_ vecgetvalues11 - #define vecsetvalues_ vecsetvalues - #define vecsetvalues0_ vecsetvalues0 - #define vecsetvalues1_ vecsetvalues1 - #define vecsetvalues11_ vecsetvalues11 - #define vecsetvaluesblocked_ vecsetvaluesblocked - #define vecsetvaluesblocked0_ vecsetvaluesblocked0 - #define vecsetvaluesblocked1_ vecsetvaluesblocked1 - #define vecsetvaluesblocked11_ vecsetvaluesblocked11 - #define vecgetarrayaligned_ vecgetarrayaligned - #define vecsetvalue_ vecsetvalue - #define vecsetvaluelocal_ vecsetvaluelocal - #define vecload_ vecload - #define vecview_ vecview - #define vecgetarray_ vecgetarray - #define vecrestorearray_ vecrestorearray - #define vecgetarrayaligned_ vecgetarrayaligned - #define vecgetarrayread_ vecgetarrayread - #define vecrestorearrayread_ vecrestorearrayread - #define vecduplicatevecs_ vecduplicatevecs - #define vecdestroyvecs_ vecdestroyvecs - #define vecmin1_ vecmin1 - #define vecmin2_ vecmin2 - #define vecmax1_ vecmax1 - #define vecmax2_ vecmax2 - #define vecgetownershiprange1_ vecgetownershiprange1 - #define vecgetownershiprange2_ vecgetownershiprange2 - #define vecgetownershiprange3_ vecgetownershiprange3 - #define vecgetownershipranges_ vecgetownershipranges - #define vecsetoptionsprefix_ vecsetoptionsprefix - #define vecviewfromoptions_ vecviewfromoptions - #define vecstashviewfromoptions_ vecstashviewfromoptions - #define veccreatefromoptions_ veccreatefromoptions + #define vecgetarrayaligned_ vecgetarrayaligned + #define vecsetvalue_ vecsetvalue + #define vecsetvaluelocal_ vecsetvaluelocal + #define vecgetarray_ vecgetarray + #define vecrestorearray_ vecrestorearray + #define vecgetarrayaligned_ vecgetarrayaligned + #define vecgetarrayread_ vecgetarrayread + #define vecrestorearrayread_ vecrestorearrayread + #define vecduplicatevecs_ vecduplicatevecs + #define vecdestroyvecs_ vecdestroyvecs + #define vecmin1_ vecmin1 + #define vecmin2_ vecmin2 + #define vecmax1_ vecmax1 + #define vecmax2_ vecmax2 #endif -PETSC_EXTERN void veccreatefromoptions_(MPI_Fint *comm, char *prefix, PetscInt *bs, PetscInt *m, PetscInt *n, Vec *vec, int *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *fprefix; - - FIXCHAR(prefix, len, fprefix); - *ierr = VecCreateFromOptions(MPI_Comm_f2c(*(comm)), fprefix, *bs, *m, *n, vec); - if (*ierr) return; - FREECHAR(prefix, fprefix); -} - -PETSC_EXTERN void vecsetvalueslocal_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - *ierr = VecSetValuesLocal(*x, *ni, ix, y, *iora); -} - -PETSC_EXTERN void vecsetvalueslocal0_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - vecsetvalueslocal_(x, ni, ix, y, iora, ierr); -} - -PETSC_EXTERN void vecsetvalueslocal1_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - vecsetvalueslocal_(x, ni, ix, y, iora, ierr); -} - -PETSC_EXTERN void vecsetvalueslocal11_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - vecsetvalueslocal_(x, ni, ix, y, iora, ierr); -} - -PETSC_EXTERN void vecgetvalues_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], int *ierr) -{ - *ierr = VecGetValues(*x, *ni, ix, y); -} - -PETSC_EXTERN void vecgetvalues0_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], int *ierr) -{ - vecgetvalues_(x, ni, ix, y, ierr); -} - -PETSC_EXTERN void vecgetvalues1_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], int *ierr) -{ - vecgetvalues_(x, ni, ix, y, ierr); -} - -PETSC_EXTERN void vecgetvalues11_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], int *ierr) -{ - vecgetvalues_(x, ni, ix, y, ierr); -} - -PETSC_EXTERN void vecsetvalues_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - *ierr = VecSetValues(*x, *ni, ix, y, *iora); -} - -PETSC_EXTERN void vecsetvalues0_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - vecsetvalues_(x, ni, ix, y, iora, ierr); -} - -PETSC_EXTERN void vecsetvalues1_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - vecsetvalues_(x, ni, ix, y, iora, ierr); -} - -PETSC_EXTERN void vecsetvalues11_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - vecsetvalues_(x, ni, ix, y, iora, ierr); -} - -PETSC_EXTERN void vecsetvaluesblocked_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - *ierr = VecSetValuesBlocked(*x, *ni, ix, y, *iora); -} - -PETSC_EXTERN void vecsetvaluesblocked0_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - vecsetvaluesblocked_(x, ni, ix, y, iora, ierr); -} - -PETSC_EXTERN void vecsetvaluesblocked1_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - vecsetvaluesblocked_(x, ni, ix, y, iora, ierr); -} - -PETSC_EXTERN void vecsetvaluesblocked11_(Vec *x, PetscInt *ni, PetscInt ix[], PetscScalar y[], InsertMode *iora, int *ierr) -{ - vecsetvaluesblocked_(x, ni, ix, y, iora, ierr); -} - PETSC_EXTERN void vecsetvalue_(Vec *v, PetscInt *i, PetscScalar *va, InsertMode *mode, PetscErrorCode *ierr) { /* cannot use VecSetValue() here since that uses PetscCall() which has a return in it */ @@ -186,25 +45,6 @@ PETSC_EXTERN void vecsetvaluelocal_(Vec *v, PetscInt *i, PetscScalar *va, Insert *ierr = VecSetValuesLocal(*v, 1, i, va, *mode); } -PETSC_EXTERN void vecload_(Vec *vec, PetscViewer *viewer, PetscErrorCode *ierr) -{ - PetscViewer v; - PetscPatchDefaultViewers_Fortran(viewer, v); - *ierr = VecLoad(*vec, v); -} - -PETSC_EXTERN void vecview_(Vec *x, PetscViewer *vin, PetscErrorCode *ierr) -{ - PetscViewer v; - - PetscPatchDefaultViewers_Fortran(vin, v); - if (!v) { - *ierr = PETSC_ERR_SYS; - return; - } - *ierr = VecView(*x, v); -} - /*MC VecGetArrayAligned - FORTRAN only. Forces alignment of vector arrays so that arrays of derived types may be used. @@ -359,27 +199,6 @@ PETSC_EXTERN void vecmax2_(Vec *x, PetscInt *p, PetscReal *val, PetscErrorCode * *ierr = VecMax(*x, p, val); } -PETSC_EXTERN void vecgetownershiprange1_(Vec *x, PetscInt *low, PetscInt *high, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(low); - CHKFORTRANNULLINTEGER(high); - *ierr = VecGetOwnershipRange(*x, low, high); -} - -PETSC_EXTERN void vecgetownershiprange2_(Vec *x, PetscInt *low, PetscInt *high, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(low); - CHKFORTRANNULLINTEGER(high); - *ierr = VecGetOwnershipRange(*x, low, high); -} - -PETSC_EXTERN void vecgetownershiprange3_(Vec *x, PetscInt *low, PetscInt *high, PetscErrorCode *ierr) -{ - CHKFORTRANNULLINTEGER(low); - CHKFORTRANNULLINTEGER(high); - *ierr = VecGetOwnershipRange(*x, low, high); -} - PETSC_EXTERN void vecgetownershipranges_(Vec *x, PetscInt *range, PetscErrorCode *ierr) { PetscMPIInt size, mpi_ierr; @@ -394,33 +213,3 @@ PETSC_EXTERN void vecgetownershipranges_(Vec *x, PetscInt *range, PetscErrorCode if (*ierr) return; *ierr = PetscArraycpy(range, r, size + 1); } - -PETSC_EXTERN void vecsetoptionsprefix_(Vec *v, char *prefix, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(prefix, len, t); - *ierr = VecSetOptionsPrefix(*v, t); - if (*ierr) return; - FREECHAR(prefix, t); -} -PETSC_EXTERN void vecviewfromoptions_(Vec *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = VecViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} -PETSC_EXTERN void vecstashviewfromoptions_(Vec *ao, PetscObject obj, char *type, PetscErrorCode *ierr, PETSC_FORTRAN_CHARLEN_T len) -{ - char *t; - - FIXCHAR(type, len, t); - CHKFORTRANNULLOBJECT(obj); - *ierr = VecStashViewFromOptions(*ao, obj, t); - if (*ierr) return; - FREECHAR(type, t); -} diff --git a/src/vec/vec/interface/rvector.c b/src/vec/vec/interface/rvector.c index b23953e9572..32e2d9841d7 100644 --- a/src/vec/vec/interface/rvector.c +++ b/src/vec/vec/interface/rvector.c @@ -328,6 +328,9 @@ PetscErrorCode VecNormalize(Vec x, PetscReal *val) Returns the smallest index with the maximum value + Developer Note: + The Nag Fortran compiler does not like the symbol name VecMax + .seealso: [](ch_vectors), `Vec`, `VecNorm()`, `VecMin()` @*/ PetscErrorCode VecMax(Vec x, PetscInt *p, PetscReal *val) @@ -365,6 +368,9 @@ PetscErrorCode VecMax(Vec x, PetscInt *p, PetscReal *val) This returns the smallest index with the minimum value + Developer Note: + The Nag Fortran compiler does not like the symbol name VecMin + .seealso: [](ch_vectors), `Vec`, `VecMax()` @*/ PetscErrorCode VecMin(Vec x, PetscInt *p, PetscReal *val) @@ -867,7 +873,7 @@ PetscErrorCode VecWAXPY(Vec w, PetscScalar alpha, Vec x, Vec y) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecSetValues - Inserts or adds values into certain locations of a vector. Not Collective @@ -901,6 +907,12 @@ PetscErrorCode VecWAXPY(Vec w, PetscScalar alpha, Vec x, Vec y) with homogeneous Dirichlet boundary conditions that you don't want represented in the vector. + Fortran Note: + If any of `ix` and `y` are scalars pass them using, for example, +.vb + VecSetValues(mat, one, [ix], [y], INSERT_VALUES) +.ve + .seealso: [](ch_vectors), `Vec`, `VecAssemblyBegin()`, `VecAssemblyEnd()`, `VecSetValuesLocal()`, `VecSetValue()`, `VecSetValuesBlocked()`, `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `VecGetValues()` @*/ @@ -920,7 +932,7 @@ PetscErrorCode VecSetValues(Vec x, PetscInt ni, const PetscInt ix[], const Petsc PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecGetValues - Gets values from certain locations of a vector. Currently can only get values on the same processor on which they are owned @@ -932,7 +944,7 @@ PetscErrorCode VecSetValues(Vec x, PetscInt ni, const PetscInt ix[], const Petsc - ix - indices where to get them from (in global 1d numbering) Output Parameter: -. y - array of values +. y - array of values, must be passed in with a length of `ni` Level: beginner @@ -964,7 +976,7 @@ PetscErrorCode VecGetValues(Vec x, PetscInt ni, const PetscInt ix[], PetscScalar PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecSetValuesBlocked - Inserts or adds blocks of values into certain locations of a vector. Not Collective @@ -996,6 +1008,12 @@ PetscErrorCode VecGetValues(Vec x, PetscInt ni, const PetscInt ix[], PetscScalar with homogeneous Dirichlet boundary conditions that you don't want represented in the vector. + Fortran Note: + If any of `ix` and `y` are scalars pass them using, for example, +.vb + VecSetValuesBlocked(mat, one, [ix], [y], INSERT_VALUES) +.ve + .seealso: [](ch_vectors), `Vec`, `VecAssemblyBegin()`, `VecAssemblyEnd()`, `VecSetValuesBlockedLocal()`, `VecSetValues()` @*/ @@ -1015,7 +1033,7 @@ PetscErrorCode VecSetValuesBlocked(Vec x, PetscInt ni, const PetscInt ix[], cons PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecSetValuesLocal - Inserts or adds values into certain locations of a vector, using a local ordering of the nodes. @@ -1042,6 +1060,12 @@ PetscErrorCode VecSetValuesBlocked(Vec x, PetscInt ni, const PetscInt ix[], cons `VecSetValuesLocal()` uses 0-based indices in Fortran as well as in C. + Fortran Note: + If any of `ix` and `y` are scalars pass them using, for example, +.vb + VecSetValuesLocal(mat, one, [ix], [y], INSERT_VALUES) +.ve + .seealso: [](ch_vectors), `Vec`, `VecAssemblyBegin()`, `VecAssemblyEnd()`, `VecSetValues()`, `VecSetLocalToGlobalMapping()`, `VecSetValuesBlockedLocal()` @*/ @@ -1098,6 +1122,12 @@ PetscErrorCode VecSetValuesLocal(Vec x, PetscInt ni, const PetscInt ix[], const `VecSetValuesBlockedLocal()` uses 0-based indices in Fortran as well as in C. + Fortran Note: + If any of `ix` and `y` are scalars pass them using, for example, +.vb + VecSetValuesBlockedLocal(mat, one, [ix], [y], INSERT_VALUES) +.ve + .seealso: [](ch_vectors), `Vec`, `VecAssemblyBegin()`, `VecAssemblyEnd()`, `VecSetValues()`, `VecSetValuesBlocked()`, `VecSetLocalToGlobalMapping()` @*/ @@ -1525,10 +1555,12 @@ PetscErrorCode VecGetSubVectorThroughVecScatter_Private(Vec X, IS is, PetscInt b Notes: The subvector `Y` should be returned with `VecRestoreSubVector()`. - `X` and must be defined on the same communicator + `X` and `is` must be defined on the same communicator + + Changes to the subvector will be reflected in the `X` vector on the call to `VecRestoreSubVector()`. This function may return a subvector without making a copy, therefore it is not safe to use the original vector while - modifying the subvector. Other non-overlapping subvectors can still be obtained from X using this function. + modifying the subvector. Other non-overlapping subvectors can still be obtained from `X` using this function. The resulting subvector inherits the block size from `is` if greater than one. Otherwise, the block size is guessed from the block size of the original `X`. @@ -3886,10 +3918,8 @@ PetscErrorCode VecRestoreArray4dRead(Vec x, PetscInt m, PetscInt n, PetscInt p, PetscFunctionReturn(PETSC_SUCCESS); } -#if defined(PETSC_USE_DEBUG) - /*@ - VecLockGet - Gets the current lock status of a vector + VecLockGet - Get the current lock status of a vector Logically Collective @@ -3908,6 +3938,7 @@ PetscErrorCode VecLockGet(Vec x, PetscInt *state) { PetscFunctionBegin; PetscValidHeaderSpecific(x, VEC_CLASSID, 1); + PetscAssertPointer(state, 2); *state = x->lock; PetscFunctionReturn(PETSC_SUCCESS); } @@ -3919,25 +3950,24 @@ PetscErrorCode VecLockGetLocation(Vec x, const char *file[], const char *func[], PetscAssertPointer(file, 2); PetscAssertPointer(func, 3); PetscAssertPointer(line, 4); - #if !PetscDefined(HAVE_THREADSAFETY) +#if PetscDefined(USE_DEBUG) && !PetscDefined(HAVE_THREADSAFETY) { const int index = x->lockstack.currentsize - 1; - PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupted vec lock stack, have negative index %d", index); - *file = x->lockstack.file[index]; - *func = x->lockstack.function[index]; - *line = x->lockstack.line[index]; + *file = index < 0 ? NULL : x->lockstack.file[index]; + *func = index < 0 ? NULL : x->lockstack.function[index]; + *line = index < 0 ? 0 : x->lockstack.line[index]; } - #else +#else *file = NULL; *func = NULL; *line = 0; - #endif +#endif PetscFunctionReturn(PETSC_SUCCESS); } /*@ - VecLockReadPush - Pushes a read-only lock on a vector to prevent it from being written to + VecLockReadPush - Push a read-only lock on a vector to prevent it from being written to Logically Collective @@ -3959,31 +3989,30 @@ PetscErrorCode VecLockReadPush(Vec x) PetscFunctionBegin; PetscValidHeaderSpecific(x, VEC_CLASSID, 1); PetscCheck(x->lock++ >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Vector is already locked for exclusive write access but you want to read it"); - #if !PetscDefined(HAVE_THREADSAFETY) +#if PetscDefined(USE_DEBUG) && !PetscDefined(HAVE_THREADSAFETY) { const char *file, *func; int index, line; - if ((index = petscstack.currentsize - 2) == -1) { + if ((index = petscstack.currentsize - 2) < 0) { // vec was locked "outside" of petsc, either in user-land or main. the error message will // now show this function as the culprit, but it will include the stacktrace file = "unknown user-file"; func = "unknown_user_function"; line = 0; } else { - PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected petscstack, have negative index %d", index); file = petscstack.file[index]; func = petscstack.function[index]; line = petscstack.line[index]; } PetscStackPush_Private(x->lockstack, file, func, line, petscstack.petscroutine[index], PETSC_FALSE); } - #endif +#endif PetscFunctionReturn(PETSC_SUCCESS); } /*@ - VecLockReadPop - Pops a read-only lock from a vector + VecLockReadPop - Pop a read-only lock from a vector Logically Collective @@ -3999,18 +4028,18 @@ PetscErrorCode VecLockReadPop(Vec x) PetscFunctionBegin; PetscValidHeaderSpecific(x, VEC_CLASSID, 1); PetscCheck(--x->lock >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Vector has been unlocked from read-only access too many times"); - #if !PetscDefined(HAVE_THREADSAFETY) +#if PetscDefined(USE_DEBUG) && !PetscDefined(HAVE_THREADSAFETY) { const char *previous = x->lockstack.function[x->lockstack.currentsize - 1]; PetscStackPop_Private(x->lockstack, previous); } - #endif +#endif PetscFunctionReturn(PETSC_SUCCESS); } -/*@C - VecLockWriteSet - Lock or unlock a vector for exclusive read/write access +/*@ + VecLockWriteSet - Lock or unlock a vector for exclusive read/write access Logically Collective @@ -4055,35 +4084,3 @@ PetscErrorCode VecLockWriteSet(Vec x, PetscBool flg) } PetscFunctionReturn(PETSC_SUCCESS); } - -// PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation -/*@ - VecLockPush - Pushes a read-only lock on a vector to prevent it from being written to - - Level: deprecated - -.seealso: [](ch_vectors), `Vec`, `VecLockReadPush()` -@*/ -PetscErrorCode VecLockPush(Vec x) -{ - PetscFunctionBegin; - PetscCall(VecLockReadPush(x)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -// PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation -/*@ - VecLockPop - Pops a read-only lock from a vector - - Level: deprecated - -.seealso: [](ch_vectors), `Vec`, `VecLockReadPop()` -@*/ -PetscErrorCode VecLockPop(Vec x) -{ - PetscFunctionBegin; - PetscCall(VecLockReadPop(x)); - PetscFunctionReturn(PETSC_SUCCESS); -} - -#endif diff --git a/src/vec/vec/interface/veccreate.c b/src/vec/vec/interface/veccreate.c index d7c5f560a92..ff2ba4cbf72 100644 --- a/src/vec/vec/interface/veccreate.c +++ b/src/vec/vec/interface/veccreate.c @@ -45,8 +45,8 @@ PetscErrorCode VecCreate(MPI_Comm comm, Vec *vec) PetscFunctionBegin; PetscAssertPointer(vec, 2); - *vec = NULL; PetscCall(VecInitializePackage()); + PetscCall(PetscHeaderCreate(v, VEC_CLASSID, "Vec", "Vector", "Vec", comm, VecDestroy, VecView)); PetscCall(PetscLayoutCreate(comm, &v->map)); PetscCall(VecCreate_Common_Private(v)); @@ -54,7 +54,7 @@ PetscErrorCode VecCreate(MPI_Comm comm, Vec *vec) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecCreateFromOptions - Creates a vector whose type is set from the options database Collective diff --git a/src/vec/vec/interface/vecreg.c b/src/vec/vec/interface/vecreg.c index 4bdecd21585..20b4d07b10f 100644 --- a/src/vec/vec/interface/vecreg.c +++ b/src/vec/vec/interface/vecreg.c @@ -26,7 +26,7 @@ static inline PetscErrorCode VecTypeCompareAny_Private(VecType srcType, PetscBoo #define PETSC_MAX_VECTYPE_LEN 64 -/*@C +/*@ VecSetType - Builds a vector, for a particular vector implementation. Collective @@ -166,7 +166,7 @@ PetscErrorCode VecSetType(Vec vec, VecType newType) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecGetType - Gets the vector type name (as a string) from a `Vec`. Not Collective @@ -221,7 +221,7 @@ PetscErrorCode VecGetRootType_Private(Vec vec, VecType *vtype) /*@C VecRegister - Adds a new vector component implementation - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - The name of a new user-defined creation routine diff --git a/src/vec/vec/interface/vector.c b/src/vec/vec/interface/vector.c index 679d7f00569..b8979cc056d 100644 --- a/src/vec/vec/interface/vector.c +++ b/src/vec/vec/interface/vector.c @@ -552,7 +552,7 @@ PetscErrorCode VecDuplicate(Vec v, Vec *newv) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecDestroy - Destroys a vector. Collective @@ -667,7 +667,7 @@ PetscErrorCode VecDestroyVecs(PetscInt m, Vec *vv[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecViewFromOptions - View a vector based on values in the options database Collective @@ -692,7 +692,7 @@ PetscErrorCode VecViewFromOptions(Vec A, PetscObject obj, const char name[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecView - Views a vector object. Collective @@ -910,10 +910,10 @@ PetscErrorCode VecGetLocalSize(Vec x, PetscInt *size) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecGetOwnershipRange - Returns the range of indices owned by this process. The vector is laid out with the - first n1 elements on the first processor, next n2 elements on the + first `n1` elements on the first processor, next `n2` elements on the second, etc. For certain parallel layouts this range may not be well defined. @@ -928,13 +928,19 @@ PetscErrorCode VecGetLocalSize(Vec x, PetscInt *size) Level: beginner - Note: + Notes: + If the `Vec` was obtained from a `DM` with `DMCreateGlobalVector()`, then the range values are determined by the specific `DM`. + + If the `Vec` was created directly the range values are determined by the local size passed to `VecSetSizes()` or `VecCreateMPI()`. + If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`. + The high argument is one more than the last element stored locally. - Fortran Notes: - `PETSC_NULL_INTEGER` should be used instead of NULL + For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine + the local values in the vector. -.seealso: [](ch_vectors), `Vec`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `VecGetOwnershipRanges()` +.seealso: [](ch_vectors), `Vec`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `VecGetOwnershipRanges()`, `PetscSplitOwnership()`, + `VecSetSizes()`, `VecCreateMPI()`, `PetscLayout`, `DMDAGetGhostCorners()`, `DM` @*/ PetscErrorCode VecGetOwnershipRange(Vec x, PetscInt *low, PetscInt *high) { @@ -951,7 +957,7 @@ PetscErrorCode VecGetOwnershipRange(Vec x, PetscInt *low, PetscInt *high) /*@C VecGetOwnershipRanges - Returns the range of indices owned by EACH processor, The vector is laid out with the - first n1 elements on the first processor, next n2 elements on the + first `n1` elements on the first processor, next `n2` elements on the second, etc. For certain parallel layouts this range may not be well defined. @@ -961,19 +967,30 @@ PetscErrorCode VecGetOwnershipRange(Vec x, PetscInt *low, PetscInt *high) . x - the vector Output Parameter: -. ranges - array of length size+1 with the start and end+1 for each process +. ranges - array of length `size` + 1 with the start and end+1 for each process Level: beginner Notes: + If the `Vec` was obtained from a `DM` with `DMCreateGlobalVector()`, then the range values are determined by the specific `DM`. + + If the `Vec` was created directly the range values are determined by the local size passed to `VecSetSizes()` or `VecCreateMPI()`. + If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`. + + The high argument is one more than the last element stored locally. + + For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine + the local values in the vector. + The high argument is one more than the last element stored locally. - If the ranges are used after all vectors that share the ranges has been destroyed then the program will crash accessing ranges[]. + If `ranges` are used after all vectors that share the ranges has been destroyed, then the program will crash accessing `ranges`. Fortran Notes: - You must PASS in an array of length size+1 + You must PASS in an array of length `size` + 1, where `size` is the size of the communicator owning the vector -.seealso: [](ch_vectors), `Vec`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `VecGetOwnershipRange()` +.seealso: [](ch_vectors), `Vec`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `VecGetOwnershipRange()`, `PetscSplitOwnership()`, + `VecSetSizes()`, `VecCreateMPI()`, `PetscLayout`, `DMDAGetGhostCorners()`, `DM` @*/ PetscErrorCode VecGetOwnershipRanges(Vec x, const PetscInt *ranges[]) { @@ -1072,7 +1089,7 @@ PetscErrorCode VecResetArray(Vec vec) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecLoad - Loads a vector that has been stored in binary or HDF5 format with `VecView()`. @@ -1457,7 +1474,11 @@ PetscErrorCode VecSetFromOptions(Vec vec) If one processor calls this with `N` of `PETSC_DETERMINE` then all processors must, otherwise the program will hang. -.seealso: [](ch_vectors), `Vec`, `VecGetSize()`, `PetscSplitOwnership()` + If `n` is not `PETSC_DECIDE`, then the value determines the `PetscLayout` of the vector and the ranges returned by + `VecGetOwnershipRange()` and `VecGetOwnershipRanges()` + +.seealso: [](ch_vectors), `Vec`, `VecCreate()`, `VecCreateSeq()`, `VecCreateMPI()`, `VecGetSize()`, `PetscSplitOwnership()`, `PetscLayout`, + `VecGetOwnershipRange()`, `VecGetOwnershipRanges()`, `MatSetSizes()` @*/ PetscErrorCode VecSetSizes(Vec v, PetscInt n, PetscInt N) { @@ -1533,7 +1554,7 @@ PetscErrorCode VecGetBlockSize(Vec v, PetscInt *bs) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecSetOptionsPrefix - Sets the prefix used for searching for all `Vec` options in the database. @@ -1559,7 +1580,7 @@ PetscErrorCode VecSetOptionsPrefix(Vec v, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecAppendOptionsPrefix - Appends to the prefix used for searching for all `Vec` options in the database. @@ -1585,7 +1606,7 @@ PetscErrorCode VecAppendOptionsPrefix(Vec v, const char prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecGetOptionsPrefix - Sets the prefix used for searching for all Vec options in the database. @@ -1613,6 +1634,35 @@ PetscErrorCode VecGetOptionsPrefix(Vec v, const char *prefix[]) PetscFunctionReturn(PETSC_SUCCESS); } +/*@C + VecGetState - Gets the state of a `Vec`. + + Not Collective + + Input Parameter: +. v - the `Vec` context + + Output Parameter: +. state - the object state + + Level: advanced + + Note: + Object state is an integer which gets increased every time + the object is changed. By saving and later querying the object state + one can determine whether information about the object is still current. + +.seealso: [](ch_vectors), `Vec`, `VecCreate()`, `PetscObjectStateGet()` +@*/ +PetscErrorCode VecGetState(Vec v, PetscObjectState *state) +{ + PetscFunctionBegin; + PetscValidHeaderSpecific(v, VEC_CLASSID, 1); + PetscAssertPointer(state, 2); + PetscCall(PetscObjectStateGet((PetscObject)v, state)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /*@ VecSetUp - Sets up the internal vector data structures for the later use. @@ -1803,7 +1853,7 @@ PetscErrorCode VecSwap(Vec x, Vec y) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecStashViewFromOptions - Processes command line options to determine if/how a `VecStash` object is to be viewed. Collective @@ -2331,7 +2381,7 @@ static PetscErrorCode VecErrorWeightedNorms_Basic(Vec U, Vec Y, Vec E, NormType Notes: This is primarily used for computing weighted local truncation errors in ``TS``. -.seealso: [](ch_vectors), `Vec`, `NormType`, ``TSErrorWeightedNorm()``, ``TSErrorWeightedENorm()`` +.seealso: [](ch_vectors), `Vec`, `NormType`, `TSErrorWeightedNorm()`, `TSErrorWeightedENorm()` @*/ PetscErrorCode VecErrorWeightedNorms(Vec U, Vec Y, Vec E, NormType wnormtype, PetscReal atol, Vec vatol, PetscReal rtol, Vec vrtol, PetscReal ignore_max, PetscReal *norm, PetscInt *norm_loc, PetscReal *norma, PetscInt *norma_loc, PetscReal *normr, PetscInt *normr_loc) { diff --git a/src/vec/vec/tests/ex17f.F90 b/src/vec/vec/tests/ex17f.F90 index 933f66ea8cf..3969e74b8d8 100644 --- a/src/vec/vec/tests/ex17f.F90 +++ b/src/vec/vec/tests/ex17f.F90 @@ -39,7 +39,7 @@ do 10, i=0,n-1 iglobal = i + low value = i + 10*rank - PetscCallA(VecSetValues(y,ione,iglobal,value,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(y,ione,[iglobal],[value],INSERT_VALUES,ierr)) 10 continue PetscCallA(VecAssemblyBegin(y,ierr)) diff --git a/src/vec/vec/tutorials/ex14f.F90 b/src/vec/vec/tutorials/ex14f.F90 index 04f28f3d9cb..097d4dba85a 100644 --- a/src/vec/vec/tutorials/ex14f.F90 +++ b/src/vec/vec/tutorials/ex14f.F90 @@ -90,7 +90,7 @@ program main ione = 1 do 10, i=rstart,rend-1 value = i - PetscCallA(VecSetValues(gx,ione,i,value,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(gx,ione,[i],[value],INSERT_VALUES,ierr)) 10 continue PetscCallA(VecAssemblyBegin(gx,ierr)) diff --git a/src/vec/vec/tutorials/ex16f.F90 b/src/vec/vec/tutorials/ex16f.F90 index c644c203e45..a2d79d5de5c 100644 --- a/src/vec/vec/tutorials/ex16f.F90 +++ b/src/vec/vec/tutorials/ex16f.F90 @@ -39,7 +39,7 @@ program main PetscCallA(VecGetOwnershipRange(v,start,endd,ierr)) do i=start,endd-1 myValue = real(i) - PetscCallA(VecSetValues(v,one,i,myValue,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(v,one,[i],[myValue],INSERT_VALUES,ierr)) end do PetscCallA(VecAssemblyBegin(v,ierr)); PetscCallA(VecAssemblyEnd(v,ierr)); diff --git a/src/vec/vec/tutorials/ex18f.F90 b/src/vec/vec/tutorials/ex18f.F90 index d8d61e32e19..ce4eba22158 100644 --- a/src/vec/vec/tutorials/ex18f.F90 +++ b/src/vec/vec/tutorials/ex18f.F90 @@ -40,12 +40,12 @@ program main myResult = 0.5 if (rank==0) then i = 0 - PetscCallA(VecSetValues(xend,one,i,myResult,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(xend,one,[i],[myResult],INSERT_VALUES,ierr)) endif if (rank == size-1) then i = N-1 - PetscCallA(VecSetValues(xend,one,i,myResult,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(xend,one,[i],[myResult],INSERT_VALUES,ierr)) endif ! Assemble vector, using the 2-step process: diff --git a/src/vec/vec/tutorials/ex2f.F90 b/src/vec/vec/tutorials/ex2f.F90 index 21bc35709aa..b2af9727cc8 100644 --- a/src/vec/vec/tutorials/ex2f.F90 +++ b/src/vec/vec/tutorials/ex2f.F90 @@ -19,7 +19,7 @@ program main PetscInt N,i,ione PetscErrorCode ierr PetscMPIInt rank - PetscScalar one + PetscScalar one, value(1) PetscCallA(PetscInitialize(ierr)) one = 1.0 @@ -51,7 +51,7 @@ program main ione = 1 do 100 i=0,N-rank-1 - PetscCallA(VecSetValues(x,ione,i,one,ADD_VALUES,ierr)) + PetscCallA(VecSetValues(x,ione,[i],[one],ADD_VALUES,ierr)) 100 continue ! Assemble vector, using the 2-step process: @@ -65,7 +65,7 @@ program main ! Test VecGetValues() with scalar entries if (rank .eq. 0) then ione = 0 - PetscCallA(VecGetValues(x,ione,i,one,ierr)) + PetscCallA(VecGetValues(x,ione,[i],value,ierr)) endif ! View the vector; then destroy it. diff --git a/src/vec/vec/tutorials/ex3f.F90 b/src/vec/vec/tutorials/ex3f.F90 index ae11535fccc..a1b63ab53b6 100644 --- a/src/vec/vec/tutorials/ex3f.F90 +++ b/src/vec/vec/tutorials/ex3f.F90 @@ -43,7 +43,7 @@ program main ! - Each processor needs to insert only elements that it owns locally. do 100 i=istart,iend-1 v = 1.0*real(i) - PetscCallA(VecSetValues(x,ione,i,v,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(x,ione,[i],[v],INSERT_VALUES,ierr)) 100 continue ! Assemble vector, using the 2-step process: diff --git a/src/vec/vec/tutorials/ex5f.F90 b/src/vec/vec/tutorials/ex5f.F90 index f348dde7dc3..65484cf2d02 100644 --- a/src/vec/vec/tutorials/ex5f.F90 +++ b/src/vec/vec/tutorials/ex5f.F90 @@ -43,7 +43,7 @@ program main do i=0,ldim-1 iglobal = i + low v = real(i + 100*rank) - PetscCallA(VecSetValues(u,one,iglobal,v,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(u,one,[iglobal],[v],INSERT_VALUES,ierr)) end do PetscCallA(VecAssemblyBegin(u,ierr)) PetscCallA(VecAssemblyEnd(u,ierr)) diff --git a/src/vec/vec/tutorials/ex7.c b/src/vec/vec/tutorials/ex7.c index c416ed75884..b0372f42ea3 100644 --- a/src/vec/vec/tutorials/ex7.c +++ b/src/vec/vec/tutorials/ex7.c @@ -7,7 +7,6 @@ and from Fortran to C\n\n"; Ugly stuff to insure the function names match between Fortran and C. This is out of our PETSc hands to cleanup. */ -#include #if defined(PETSC_HAVE_FORTRAN_CAPS) #define ex7f_ EX7F #define ex7c_ EX7C diff --git a/src/vec/vec/tutorials/ex7f.F90 b/src/vec/vec/tutorials/ex7f.F90 index 98b213c1adf..84af29b52f6 100644 --- a/src/vec/vec/tutorials/ex7f.F90 +++ b/src/vec/vec/tutorials/ex7f.F90 @@ -36,5 +36,4 @@ subroutine ex7f(vec,comm) ! Now Call a PETSc Routine from Fortran ! PetscCall(VecView(vec,PETSC_VIEWER_STDOUT_WORLD,ierr)) - return end diff --git a/src/vec/vec/tutorials/ex8f.F90 b/src/vec/vec/tutorials/ex8f.F90 index 1ec02f46a95..2cb568c5360 100644 --- a/src/vec/vec/tutorials/ex8f.F90 +++ b/src/vec/vec/tutorials/ex8f.F90 @@ -72,7 +72,7 @@ program main ! contributions will be added together. do i=0,ng-1 - PetscCallA(VecSetValuesLocal(x,one,i,sone,ADD_VALUES,ierr)) + PetscCallA(VecSetValuesLocal(x,one,[i],[sone],ADD_VALUES,ierr)) end do ! diff --git a/src/vec/vec/tutorials/ex9f.F90 b/src/vec/vec/tutorials/ex9f.F90 index 940f28bee3e..d0ed890c70d 100644 --- a/src/vec/vec/tutorials/ex9f.F90 +++ b/src/vec/vec/tutorials/ex9f.F90 @@ -88,7 +88,7 @@ program main ione = 1 do 10, i=rstart,rend-1 value = real(i) - PetscCallA(VecSetValues(gx,ione,i,value,INSERT_VALUES,ierr)) + PetscCallA(VecSetValues(gx,ione,[i],[value],INSERT_VALUES,ierr)) 10 continue PetscCallA(VecAssemblyBegin(gx,ierr)) diff --git a/src/vec/vec/utils/projection.c b/src/vec/vec/utils/projection.c index a47ca8e3460..e640d976843 100644 --- a/src/vec/vec/utils/projection.c +++ b/src/vec/vec/utils/projection.c @@ -484,7 +484,11 @@ PetscErrorCode VecISAXPY(Vec vfull, IS is, PetscScalar alpha, Vec vreduced) PetscFunctionBegin; PetscValidHeaderSpecific(vfull, VEC_CLASSID, 1); PetscValidHeaderSpecific(is, IS_CLASSID, 2); + PetscCheckSameComm(vfull, 1, is, 2); + PetscValidLogicalCollectiveScalar(vfull, alpha, 3); PetscValidHeaderSpecific(vreduced, VEC_CLASSID, 4); + PetscCall(ISGetSize(is, &nfull)); + if (!nfull) PetscFunctionReturn(PETSC_SUCCESS); PetscCall(VecGetSize(vfull, &nfull)); PetscCall(VecGetSize(vreduced, &nreduced)); if (nfull == nreduced) PetscCall(ISGetInfo(is, IS_SORTED, IS_GLOBAL, PETSC_TRUE, &sorted)); @@ -550,8 +554,11 @@ PetscErrorCode VecISCopy(Vec vfull, IS is, ScatterMode mode, Vec vreduced) PetscFunctionBegin; PetscValidHeaderSpecific(vfull, VEC_CLASSID, 1); PetscValidHeaderSpecific(is, IS_CLASSID, 2); + PetscCheckSameComm(vfull, 1, is, 2); PetscValidLogicalCollectiveEnum(vfull, mode, 3); PetscValidHeaderSpecific(vreduced, VEC_CLASSID, 4); + PetscCall(ISGetSize(is, &nfull)); + if (!nfull) PetscFunctionReturn(PETSC_SUCCESS); PetscCall(VecGetSize(vfull, &nfull)); PetscCall(VecGetSize(vreduced, &nreduced)); if (nfull == nreduced) PetscCall(ISGetInfo(is, IS_SORTED, IS_GLOBAL, PETSC_TRUE, &sorted)); @@ -656,19 +663,22 @@ PetscErrorCode VecISSet(Vec V, IS S, PetscScalar c) PetscFunctionBegin; PetscValidHeaderSpecific(V, VEC_CLASSID, 1); - PetscValidType(V, 1); PetscValidHeaderSpecific(S, IS_CLASSID, 2); - PetscCall(VecGetOwnershipRange(V, &low, &high)); - PetscCall(ISGetLocalSize(S, &nloc)); - PetscCall(ISGetIndices(S, &s)); - PetscCall(VecGetArray(V, &v)); - for (i = 0; i < nloc; ++i) { - if (s[i] < 0) continue; - PetscCheck(s[i] >= low && s[i] < high, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only owned values supported"); - v[s[i] - low] = c; + PetscCheckSameComm(V, 1, S, 2); + PetscCall(ISGetSize(S, &nloc)); + if (nloc) { + PetscCall(VecGetOwnershipRange(V, &low, &high)); + PetscCall(ISGetLocalSize(S, &nloc)); + PetscCall(ISGetIndices(S, &s)); + PetscCall(VecGetArray(V, &v)); + for (i = 0; i < nloc; ++i) { + if (s[i] < 0) continue; + PetscCheck(s[i] >= low && s[i] < high, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only owned values supported"); + v[s[i] - low] = c; + } + PetscCall(ISRestoreIndices(S, &s)); + PetscCall(VecRestoreArray(V, &v)); } - PetscCall(ISRestoreIndices(S, &s)); - PetscCall(VecRestoreArray(V, &v)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -698,25 +708,27 @@ PetscErrorCode VecISShift(Vec V, IS S, PetscScalar c) PetscFunctionBegin; PetscValidHeaderSpecific(V, VEC_CLASSID, 1); - PetscValidType(V, 1); PetscValidHeaderSpecific(S, IS_CLASSID, 2); - PetscCall(VecGetOwnershipRange(V, &low, &high)); - PetscCall(ISGetLocalSize(S, &nloc)); - PetscCall(ISGetIndices(S, &s)); - PetscCall(VecGetArray(V, &v)); - for (i = 0; i < nloc; ++i) { - if (s[i] < 0) continue; - PetscCheck(s[i] >= low && s[i] < high, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only owned values supported"); - v[s[i] - low] += c; + PetscCheckSameComm(V, 1, S, 2); + PetscCall(ISGetSize(S, &nloc)); + if (nloc) { + PetscCall(VecGetOwnershipRange(V, &low, &high)); + PetscCall(ISGetLocalSize(S, &nloc)); + PetscCall(ISGetIndices(S, &s)); + PetscCall(VecGetArray(V, &v)); + for (i = 0; i < nloc; ++i) { + if (s[i] < 0) continue; + PetscCheck(s[i] >= low && s[i] < high, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only owned values supported"); + v[s[i] - low] += c; + } + PetscCall(ISRestoreIndices(S, &s)); + PetscCall(VecRestoreArray(V, &v)); } - PetscCall(ISRestoreIndices(S, &s)); - PetscCall(VecRestoreArray(V, &v)); PetscFunctionReturn(PETSC_SUCCESS); } -#if !defined(PETSC_USE_COMPLEX) -/*@C - VecBoundGradientProjection - Projects vector according to this definition. +/*@ + VecBoundGradientProjection - Projects vector according to this definition. If XL[i] < X[i] < XU[i], then GP[i] = G[i]; If X[i] <= XL[i], then GP[i] = min(G[i],0); If X[i] >= XU[i], then GP[i] = max(G[i],0); @@ -735,14 +747,16 @@ PetscErrorCode VecISShift(Vec V, IS S, PetscScalar c) Note: `GP` may be the same vector as `G` + For complex numbers only the real part is used in the bounds. + .seealso: `Vec` @*/ PetscErrorCode VecBoundGradientProjection(Vec G, Vec X, Vec XL, Vec XU, Vec GP) { - PetscInt n, i; - const PetscReal *xptr, *xlptr, *xuptr; - PetscReal *gptr, *gpptr; - PetscReal xval, gpval; + PetscInt n, i; + const PetscScalar *xptr, *xlptr, *xuptr; + PetscScalar *gptr, *gpptr; + PetscScalar xval, gpval; /* Project variables at the lower and upper bound */ PetscFunctionBegin; @@ -766,9 +780,9 @@ PetscErrorCode VecBoundGradientProjection(Vec G, Vec X, Vec XL, Vec XU, Vec GP) for (i = 0; i < n; ++i) { gpval = gptr[i]; xval = xptr[i]; - if (gpval > 0.0 && xval <= xlptr[i]) { + if (PetscRealPart(gpval) > 0.0 && PetscRealPart(xval) <= PetscRealPart(xlptr[i])) { gpval = 0.0; - } else if (gpval < 0.0 && xval >= xuptr[i]) { + } else if (PetscRealPart(gpval) < 0.0 && PetscRealPart(xval) >= PetscRealPart(xuptr[i])) { gpval = 0.0; } gpptr[i] = gpval; @@ -780,7 +794,6 @@ PetscErrorCode VecBoundGradientProjection(Vec G, Vec X, Vec XL, Vec XU, Vec GP) PetscCall(VecRestoreArrayPair(G, GP, &gptr, &gpptr)); PetscFunctionReturn(PETSC_SUCCESS); } -#endif /*@ VecStepMaxBounded - See below diff --git a/src/vec/vec/utils/tagger/impls/absolute.c b/src/vec/vec/utils/tagger/impls/absolute.c index 4dce93206cb..cfc5a0b337a 100644 --- a/src/vec/vec/utils/tagger/impls/absolute.c +++ b/src/vec/vec/utils/tagger/impls/absolute.c @@ -33,7 +33,7 @@ static PetscErrorCode VecTaggerComputeBoxes_Absolute(VecTagger tagger, Vec vec, .seealso: `VecTagger`, `VecTaggerBox`, `VecTaggerAbsoluteGetBox()` @*/ -PetscErrorCode VecTaggerAbsoluteSetBox(VecTagger tagger, VecTaggerBox *box) +PetscErrorCode VecTaggerAbsoluteSetBox(VecTagger tagger, VecTaggerBox box[]) { PetscFunctionBegin; PetscCall(VecTaggerSetBox_Simple(tagger, box)); @@ -55,7 +55,7 @@ PetscErrorCode VecTaggerAbsoluteSetBox(VecTagger tagger, VecTaggerBox *box) .seealso: `VecTagger`, `VecTaggerBox`, `VecTaggerAbsoluteSetBox()` @*/ -PetscErrorCode VecTaggerAbsoluteGetBox(VecTagger tagger, const VecTaggerBox **box) +PetscErrorCode VecTaggerAbsoluteGetBox(VecTagger tagger, const VecTaggerBox *box[]) { PetscFunctionBegin; PetscCall(VecTaggerGetBox_Simple(tagger, box)); diff --git a/src/vec/vec/utils/tagger/impls/and.c b/src/vec/vec/utils/tagger/impls/and.c index 789ee240bbc..e6c4ec406de 100644 --- a/src/vec/vec/utils/tagger/impls/and.c +++ b/src/vec/vec/utils/tagger/impls/and.c @@ -17,7 +17,7 @@ .seealso: `VecTagger`, `VecTaggerAndSetSubs()` @*/ -PetscErrorCode VecTaggerAndGetSubs(VecTagger tagger, PetscInt *nsubs, VecTagger **subs) +PetscErrorCode VecTaggerAndGetSubs(VecTagger tagger, PetscInt *nsubs, VecTagger *subs[]) { PetscFunctionBegin; PetscCall(VecTaggerGetSubs_AndOr(tagger, nsubs, subs)); @@ -39,7 +39,7 @@ PetscErrorCode VecTaggerAndGetSubs(VecTagger tagger, PetscInt *nsubs, VecTagger .seealso: `VecTagger` @*/ -PetscErrorCode VecTaggerAndSetSubs(VecTagger tagger, PetscInt nsubs, VecTagger *subs, PetscCopyMode mode) +PetscErrorCode VecTaggerAndSetSubs(VecTagger tagger, PetscInt nsubs, VecTagger subs[], PetscCopyMode mode) { PetscFunctionBegin; PetscCall(VecTaggerSetSubs_AndOr(tagger, nsubs, subs, mode)); diff --git a/src/vec/vec/utils/tagger/impls/cdf.c b/src/vec/vec/utils/tagger/impls/cdf.c index a34943d8436..54b3a9156ff 100644 --- a/src/vec/vec/utils/tagger/impls/cdf.c +++ b/src/vec/vec/utils/tagger/impls/cdf.c @@ -415,7 +415,7 @@ static PetscErrorCode VecTaggerSetFromOptions_CDF(VecTagger tagger, PetscOptionI PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerCDFSetMethod - Set the method used to compute absolute boxes from CDF boxes Logically Collective @@ -439,7 +439,7 @@ PetscErrorCode VecTaggerCDFSetMethod(VecTagger tagger, VecTaggerCDFMethod method PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerCDFGetMethod - Get the method used to compute absolute boxes from CDF boxes Logically Collective @@ -496,7 +496,7 @@ PetscErrorCode VecTaggerCDFIterativeSetTolerances(VecTagger tagger, PetscInt max PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerCDFIterativeGetTolerances - Get the tolerances for iterative computation of absolute boxes from CDF boxes. Logically Collective @@ -539,7 +539,7 @@ PetscErrorCode VecTaggerCDFIterativeGetTolerances(VecTagger tagger, PetscInt *ma .seealso: `VecTagger`, `VecTaggerCDFGetBox()`, `VecTaggerBox` @*/ -PetscErrorCode VecTaggerCDFSetBox(VecTagger tagger, VecTaggerBox *box) +PetscErrorCode VecTaggerCDFSetBox(VecTagger tagger, VecTaggerBox box[]) { PetscFunctionBegin; PetscCall(VecTaggerSetBox_Simple(tagger, box)); @@ -562,7 +562,7 @@ PetscErrorCode VecTaggerCDFSetBox(VecTagger tagger, VecTaggerBox *box) .seealso: `VecTagger`, `VecTaggerCDFSetBox()`, `VecTaggerBox` @*/ -PetscErrorCode VecTaggerCDFGetBox(VecTagger tagger, const VecTaggerBox **box) +PetscErrorCode VecTaggerCDFGetBox(VecTagger tagger, const VecTaggerBox *box[]) { PetscFunctionBegin; PetscCall(VecTaggerGetBox_Simple(tagger, box)); diff --git a/src/vec/vec/utils/tagger/impls/or.c b/src/vec/vec/utils/tagger/impls/or.c index 9fa354519aa..462b7c41905 100644 --- a/src/vec/vec/utils/tagger/impls/or.c +++ b/src/vec/vec/utils/tagger/impls/or.c @@ -17,7 +17,7 @@ .seealso: `VecTaggerOrSetSubs()` @*/ -PetscErrorCode VecTaggerOrGetSubs(VecTagger tagger, PetscInt *nsubs, VecTagger **subs) +PetscErrorCode VecTaggerOrGetSubs(VecTagger tagger, PetscInt *nsubs, VecTagger *subs[]) { PetscFunctionBegin; PetscCall(VecTaggerGetSubs_AndOr(tagger, nsubs, subs)); @@ -39,7 +39,7 @@ PetscErrorCode VecTaggerOrGetSubs(VecTagger tagger, PetscInt *nsubs, VecTagger * .seealso: `VecTaggetOrGetStubs()` @*/ -PetscErrorCode VecTaggerOrSetSubs(VecTagger tagger, PetscInt nsubs, VecTagger *subs, PetscCopyMode mode) +PetscErrorCode VecTaggerOrSetSubs(VecTagger tagger, PetscInt nsubs, VecTagger subs[], PetscCopyMode mode) { PetscFunctionBegin; PetscCall(VecTaggerSetSubs_AndOr(tagger, nsubs, subs, mode)); diff --git a/src/vec/vec/utils/tagger/impls/relative.c b/src/vec/vec/utils/tagger/impls/relative.c index 7eca0a0918f..dc982d25b8c 100644 --- a/src/vec/vec/utils/tagger/impls/relative.c +++ b/src/vec/vec/utils/tagger/impls/relative.c @@ -67,7 +67,7 @@ static PetscErrorCode VecTaggerComputeBoxes_Relative(VecTagger tagger, Vec vec, .seealso: `VecTaggerRelativeGetBox()` @*/ -PetscErrorCode VecTaggerRelativeSetBox(VecTagger tagger, VecTaggerBox *box) +PetscErrorCode VecTaggerRelativeSetBox(VecTagger tagger, VecTaggerBox box[]) { PetscFunctionBegin; PetscCall(VecTaggerSetBox_Simple(tagger, box)); @@ -89,7 +89,7 @@ PetscErrorCode VecTaggerRelativeSetBox(VecTagger tagger, VecTaggerBox *box) .seealso: `VecTaggerRelativeSetBox()` @*/ -PetscErrorCode VecTaggerRelativeGetBox(VecTagger tagger, const VecTaggerBox **box) +PetscErrorCode VecTaggerRelativeGetBox(VecTagger tagger, const VecTaggerBox *box[]) { PetscFunctionBegin; PetscCall(VecTaggerGetBox_Simple(tagger, box)); diff --git a/src/vec/vec/utils/tagger/interface/tagger.c b/src/vec/vec/utils/tagger/interface/tagger.c index 27e8f8cff25..60a18222630 100644 --- a/src/vec/vec/utils/tagger/interface/tagger.c +++ b/src/vec/vec/utils/tagger/interface/tagger.c @@ -1,6 +1,6 @@ #include /*I "petscvec.h" I*/ -/*@C +/*@ VecTaggerCreate - create a `VecTagger` context. Collective @@ -41,16 +41,14 @@ PetscErrorCode VecTaggerCreate(MPI_Comm comm, VecTagger *tagger) PetscCall(VecTaggerInitializePackage()); PetscCall(PetscHeaderCreate(b, VEC_TAGGER_CLASSID, "VecTagger", "Vec Tagger", "Vec", comm, VecTaggerDestroy, VecTaggerView)); - b->blocksize = 1; b->invert = PETSC_FALSE; b->setupcalled = PETSC_FALSE; - - *tagger = b; + *tagger = b; PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerSetType - set the Vec tagger implementation Collective @@ -98,7 +96,7 @@ PetscErrorCode VecTaggerSetType(VecTagger tagger, VecTaggerType type) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerGetType - Gets the `VecTaggerType` name (as a string) from the `VecTagger`. Not Collective @@ -208,7 +206,7 @@ PetscErrorCode VecTaggerSetFromOptions(VecTagger tagger) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerSetBlockSize - set the block size of the set of indices returned by `VecTaggerComputeIS()`. Logically Collective @@ -240,7 +238,7 @@ PetscErrorCode VecTaggerSetBlockSize(VecTagger tagger, PetscInt blocksize) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerGetBlockSize - get the block size of the indices created by `VecTaggerComputeIS()`. Logically Collective @@ -264,7 +262,7 @@ PetscErrorCode VecTaggerGetBlockSize(VecTagger tagger, PetscInt *blocksize) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerSetInvert - If the tagged index sets are based on boxes that can be returned by `VecTaggerComputeBoxes()`, then this option inverts values used to compute the IS, i.e., from being in the union of the boxes to being in the intersection of their exteriors. @@ -288,7 +286,7 @@ PetscErrorCode VecTaggerSetInvert(VecTagger tagger, PetscBool invert) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerGetInvert - get whether the set of indices returned by `VecTaggerComputeIS()` are inverted Logically Collective @@ -312,7 +310,7 @@ PetscErrorCode VecTaggerGetInvert(VecTagger tagger, PetscBool *invert) PetscFunctionReturn(PETSC_SUCCESS); } -/*@C +/*@ VecTaggerView - view a `VecTagger` context Collective @@ -368,7 +366,7 @@ PetscErrorCode VecTaggerView(VecTagger tagger, PetscViewer viewer) .seealso: `VecTaggerComputeIS()`, `VecTagger`, `VecTaggerCreate()` @*/ -PetscErrorCode VecTaggerComputeBoxes(VecTagger tagger, Vec vec, PetscInt *numBoxes, VecTaggerBox **boxes, PetscBool *listed) +PetscErrorCode VecTaggerComputeBoxes(VecTagger tagger, Vec vec, PetscInt *numBoxes, VecTaggerBox *boxes[], PetscBool *listed) { PetscInt vls, tbs; @@ -404,7 +402,7 @@ PetscErrorCode VecTaggerComputeBoxes(VecTagger tagger, Vec vec, PetscInt *numBox .seealso: `VecTaggerComputeBoxes()`, `VecTagger`, `VecTaggerCreate()` @*/ -PetscErrorCode VecTaggerComputeIS(VecTagger tagger, Vec vec, IS *is, PetscBool *listed) +PetscErrorCode VecTaggerComputeIS(VecTagger tagger, Vec vec, IS is[], PetscBool *listed) { PetscInt vls, tbs; diff --git a/src/vec/vec/utils/tagger/interface/taggerregi.c b/src/vec/vec/utils/tagger/interface/taggerregi.c index c21c09577ce..c624204855f 100644 --- a/src/vec/vec/utils/tagger/interface/taggerregi.c +++ b/src/vec/vec/utils/tagger/interface/taggerregi.c @@ -33,7 +33,7 @@ PetscErrorCode VecTaggerRegisterAll(void) /*@C VecTaggerRegister - Adds an implementation of the `VecTagger` communication protocol. - Not Collective + Not Collective, No Fortran Support Input Parameters: + sname - name of a new user-defined implementation diff --git a/src/vec/vec/utils/vsection.c b/src/vec/vec/utils/vsection.c index 3c0f13226fc..2c1a91b6b3f 100644 --- a/src/vec/vec/utils/vsection.c +++ b/src/vec/vec/utils/vsection.c @@ -122,7 +122,7 @@ PetscErrorCode PetscSectionVecView(PetscSection s, Vec v, PetscViewer viewer) .seealso: `PetscSection`, `PetscSectionCreate()`, `VecSetValuesSection()` @*/ -PetscErrorCode VecGetValuesSection(Vec v, PetscSection s, PetscInt point, PetscScalar **values) +PetscErrorCode VecGetValuesSection(Vec v, PetscSection s, PetscInt point, PetscScalar *values[]) { PetscScalar *baseArray; const PetscInt p = point - s->pStart; @@ -295,8 +295,8 @@ PetscErrorCode PetscSectionRestoreField_Internal(PetscSection section, PetscSect PetscFunctionReturn(PETSC_SUCCESS); } -/*@C - PetscSectionVecNorm - Computes the vector norm, separated into field components. +/*@ + PetscSectionVecNorm - Computes the vector norm of each field Input Parameters: + s - the local Section