import numpy as np import petsc4py import math import time # from mpi4py import MPI from tools.postprocessK.kperm.computeFlows import * from tools.postprocessK.flow import getKeff from petsc4py import PETSc import sys def PetscP(datadir, ref, k, saveres, Rtol, comm): if comm == 0: pcomm = PETSc.COMM_SELF rank = 0 pn = 1 else: pcomm = PETSc.COMM_WORLD rank = pcomm.rank pn = pcomm.size t0 = time.time() if pn == 1: if not isinstance(k, np.ndarray): k = np.load(datadir + "k.npy") if k.shape[2] == 1: refz = 1 else: refz = ref nz, ny, nx = k.shape[0] * ref, k.shape[1] * ref, k.shape[2] * refz n = nx * ny * nz K = PETSc.Mat().create(comm=pcomm) K.setType("seqaij") K.setSizes(((n, None), (n, None))) # Aca igual que lo que usas arriba K.setPreallocationNNZ(nnz=(7, 4)) # Idem anterior K.setUp() R = PETSc.Vec().createSeq((n, None), comm=pcomm) # PETSc.COMM_WORLD R.setUp() k2, Nz, nnz2 = getKref(k, 1, 2, ref) k, Nz, nnz = getKref(k, 0, 2, ref) pbc = float(Nz) K, R = firstL(K, R, k, pbc) r = (k.shape[1] - 2) * (k.shape[2] - 2) * nnz2 # start row K, R = lastL(K, R, k2, r) k2 = 0 else: if not isinstance(k, np.ndarray): k = np.load(datadir + "k.npy") k, Nz, nnz = getKref(k, rank, pn, ref) pbc = float(Nz) nz, ny, nx = (k.shape[0] - 2), (k.shape[1] - 2), (k.shape[2] - 2) n = nx * ny * nz K = PETSc.Mat().createAIJ(((n, None), (n, None)), nnz=(7, 4), comm=pcomm) K.setUp() R = PETSc.Vec().createMPI((n, None), comm=pcomm) R.setUp() r = nx * ny * nnz * rank # start row if rank == 0: K, R = firstL(K, R, k, pbc) if (rank > 0) and (rank < pn - 1): K, R = centL(K, R, k, r) k = 0 if rank == (pn - 1): K, R = lastL(K, R, k, r) k = 0 K.assemble() R.assemble() ksp = PETSc.KSP() ksp.create(comm=pcomm) ksp.setTolerances(rtol=Rtol, atol=1.0e-100, max_it=999999999) ksp.setFromOptions() P = R.copy() ksp.setType(PETSc.KSP.Type.CG) pc = PETSc.PC() pc.create(comm=pcomm) pc.setType(PETSc.PC.Type.JACOBI) ksp.setPC(pc) ksp.setOperators(K) ksp.setUp() t1 = time.time() ksp.solve(R, P) t2 = time.time() p = P.getArray().reshape(nz, ny, nx) if rank == 0: keff, Q = getKeff(p, k[1:-1, 1:-1, 1:-1], pbc, Nz) if saveres == True: for i in range(1, pn): from mpi4py import MPI comm = MPI.COMM_WORLD pi = comm.recv(source=i) p = np.append(p, pi, axis=0) np.save(datadir + "P", p) f = open(datadir + "RunTimes.out", "a") f.write("ref: " + str(ref) + "\n") f.write("Matrix creation: " + str(t1 - t0) + "\n") f.write("Solver: " + str(t2 - t1) + "\n") f.write("Keff: " + str(keff) + "\n") f.write("N_cores: " + str(pn) + "\n") f.close() try: res = np.loadtxt(datadir + "SolverRes.txt") res = np.append(res, np.array([keff, ref, t2 - t0, pn])) except: res = np.array([keff, ref, t2 - t0, pn]) np.savetxt( datadir + "SolverRes.txt", res, header="Keff, ref, Runtime, N_cores" ) print(datadir[-3:], " keff= " + str(keff), " rtime= " + str(t2 - t0)) return keff else: if saveres == True: from mpi4py import MPI comm = MPI.COMM_WORLD comm.send(p, dest=0) return # Ver: A posteriori error estimates and adaptive solvers for porous media flows (Martin Vohralik) try: if sys.argv[5] == "1": from mpi4py import MPI icomm = MPI.Comm.Get_parent() PetscP( sys.argv[1], int(sys.argv[2]), "0", True, float(sys.argv[4]), 1 ) # multip cores not Tupac # icomm = MPI.Comm.Get_parent() icomm.Disconnect() else: PetscP( sys.argv[1], int(sys.argv[2]), "0", True, float(sys.argv[4]), 0 ) # 1 core read k map except IndexError: try: PetscP( sys.argv[1], int(sys.argv[2]), "0", True, 1e-4, 1 ) # multip core as executable except IndexError: nada = 0 # PetscP(sys.argv[1],int(sys.argv[2]),sys.argv[3],False,1e-4,0) #1 core, k field as argument