Actual source code: xxt.c
2: /*
3: Module Name: xxt
4: Module Info:
6: author: Henry M. Tufo III
7: e-mail: hmt@asci.uchicago.edu
8: contact:
9: +--------------------------------+--------------------------------+
10: |MCS Division - Building 221 |Department of Computer Science |
11: |Argonne National Laboratory |Ryerson 152 |
12: |9700 S. Cass Avenue |The University of Chicago |
13: |Argonne, IL 60439 |Chicago, IL 60637 |
14: |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx |
15: +--------------------------------+--------------------------------+
17: Last Modification: 3.20.01
18: */
19: #include <../src/ksp/pc/impls/tfs/tfs.h>
21: #define LEFT -1
22: #define RIGHT 1
23: #define BOTH 0
25: typedef struct xxt_solver_info {
26: PetscInt n, m, n_global, m_global;
27: PetscInt nnz, max_nnz, msg_buf_sz;
28: PetscInt *nsep, *lnsep, *fo, nfo, *stages;
29: PetscInt *col_sz, *col_indices;
30: PetscScalar **col_vals, *x, *solve_uu, *solve_w;
31: PetscInt nsolves;
32: PetscScalar tot_solve_time;
33: } xxt_info;
35: typedef struct matvec_info {
36: PetscInt n, m, n_global, m_global;
37: PetscInt *local2global;
38: PCTFS_gs_ADT PCTFS_gs_handle;
39: PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
40: void *grid_data;
41: } mv_info;
43: struct xxt_CDT {
44: PetscInt id;
45: PetscInt ns;
46: PetscInt level;
47: xxt_info *info;
48: mv_info *mvi;
49: };
51: static PetscInt n_xxt = 0;
52: static PetscInt n_xxt_handles = 0;
54: /* prototypes */
55: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
56: static PetscErrorCode check_handle(xxt_ADT xxt_handle);
57: static PetscErrorCode det_separators(xxt_ADT xxt_handle);
58: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
59: static PetscErrorCode xxt_generate(xxt_ADT xxt_handle);
60: static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle);
61: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);
63: xxt_ADT XXT_new(void)
64: {
65: xxt_ADT xxt_handle;
67: /* rolling count on n_xxt ... pot. problem here */
68: n_xxt_handles++;
69: xxt_handle = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
70: xxt_handle->id = ++n_xxt;
71: xxt_handle->info = NULL;
72: xxt_handle->mvi = NULL;
74: return (xxt_handle);
75: }
77: PetscErrorCode XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt handle */
78: PetscInt *local2global, /* global column mapping */
79: PetscInt n, /* local num rows */
80: PetscInt m, /* local num cols */
81: PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc */
82: void *grid_data) /* grid data for matvec */
83: {
84: PetscFunctionBegin;
85: PetscCall(PCTFS_comm_init());
86: PetscCall(check_handle(xxt_handle));
88: /* only 2^k for now and all nodes participating */
89: PetscCheck((1 << (xxt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes);
91: /* space for X info */
92: xxt_handle->info = (xxt_info *)malloc(sizeof(xxt_info));
94: /* set up matvec handles */
95: xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode(*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);
97: /* matrix is assumed to be of full rank */
98: /* LATER we can reset to indicate rank def. */
99: xxt_handle->ns = 0;
101: /* determine separators and generate firing order - NB xxt info set here */
102: PetscCall(det_separators(xxt_handle));
104: PetscCall(do_xxt_factor(xxt_handle));
105: PetscFunctionReturn(PETSC_SUCCESS);
106: }
108: PetscErrorCode XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
109: {
110: PetscFunctionBegin;
111: PetscCall(PCTFS_comm_init());
112: PetscCall(check_handle(xxt_handle));
114: /* need to copy b into x? */
115: if (b) PetscCall(PCTFS_rvec_copy(x, b, xxt_handle->mvi->n));
116: PetscCall(do_xxt_solve(xxt_handle, x));
117: PetscFunctionReturn(PETSC_SUCCESS);
118: }
120: PetscErrorCode XXT_free(xxt_ADT xxt_handle)
121: {
122: PetscFunctionBegin;
123: PetscCall(PCTFS_comm_init());
124: PetscCall(check_handle(xxt_handle));
125: n_xxt_handles--;
127: free(xxt_handle->info->nsep);
128: free(xxt_handle->info->lnsep);
129: free(xxt_handle->info->fo);
130: free(xxt_handle->info->stages);
131: free(xxt_handle->info->solve_uu);
132: free(xxt_handle->info->solve_w);
133: free(xxt_handle->info->x);
134: free(xxt_handle->info->col_vals);
135: free(xxt_handle->info->col_sz);
136: free(xxt_handle->info->col_indices);
137: free(xxt_handle->info);
138: free(xxt_handle->mvi->local2global);
139: PetscCall(PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle));
140: free(xxt_handle->mvi);
141: free(xxt_handle);
143: /* if the check fails we nuke */
144: /* if NULL pointer passed to free we nuke */
145: /* if the calls to free fail that's not my problem */
146: PetscFunctionReturn(PETSC_SUCCESS);
147: }
149: /*
151: Description: get A_local, local portion of global coarse matrix which
152: is a row dist. nxm matrix w/ n<m.
153: o my_ml holds address of ML struct associated w/A_local and coarse grid
154: o local2global holds global number of column i (i=0,...,m-1)
155: o local2global holds global number of row i (i=0,...,n-1)
156: o mylocmatvec performs A_local . vec_local (note that gs is performed using
157: PCTFS_gs_init/gop).
159: mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
160: mylocmatvec (void :: void *data, double *in, double *out)
161: */
162: static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle)
163: {
164: return xxt_generate(xxt_handle);
165: }
167: static PetscErrorCode xxt_generate(xxt_ADT xxt_handle)
168: {
169: PetscInt i, j, k, idex;
170: PetscInt dim, col;
171: PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
172: PetscInt *segs;
173: PetscInt op[] = {GL_ADD, 0};
174: PetscInt off, len;
175: PetscScalar *x_ptr;
176: PetscInt *iptr, flag;
177: PetscInt start = 0, end, work;
178: PetscInt op2[] = {GL_MIN, 0};
179: PCTFS_gs_ADT PCTFS_gs_handle;
180: PetscInt *nsep, *lnsep, *fo;
181: PetscInt a_n = xxt_handle->mvi->n;
182: PetscInt a_m = xxt_handle->mvi->m;
183: PetscInt *a_local2global = xxt_handle->mvi->local2global;
184: PetscInt level;
185: PetscInt xxt_nnz = 0, xxt_max_nnz = 0;
186: PetscInt n, m;
187: PetscInt *col_sz, *col_indices, *stages;
188: PetscScalar **col_vals, *x;
189: PetscInt n_global;
190: PetscBLASInt i1 = 1, dlen;
191: PetscScalar dm1 = -1.0;
193: n = xxt_handle->mvi->n;
194: nsep = xxt_handle->info->nsep;
195: lnsep = xxt_handle->info->lnsep;
196: fo = xxt_handle->info->fo;
197: end = lnsep[0];
198: level = xxt_handle->level;
199: PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
201: /* is there a null space? */
202: /* LATER add in ability to detect null space by checking alpha */
203: for (i = 0, j = 0; i <= level; i++) j += nsep[i];
205: m = j - xxt_handle->ns;
206: if (m != j) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xxt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xxt_handle->ns));
208: /* get and initialize storage for x local */
209: /* note that x local is nxm and stored by columns */
210: col_sz = (PetscInt *)malloc(m * sizeof(PetscInt));
211: col_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
212: col_vals = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
213: for (i = j = 0; i < m; i++, j += 2) {
214: col_indices[j] = col_indices[j + 1] = col_sz[i] = -1;
215: col_vals[i] = NULL;
216: }
217: col_indices[j] = -1;
219: /* size of separators for each sub-hc working from bottom of tree to top */
220: /* this looks like nsep[]=segments */
221: stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
222: segs = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
223: PetscCall(PCTFS_ivec_zero(stages, level + 1));
224: PCTFS_ivec_copy(segs, nsep, level + 1);
225: for (i = 0; i < level; i++) segs[i + 1] += segs[i];
226: stages[0] = segs[0];
228: /* temporary vectors */
229: u = (PetscScalar *)malloc(n * sizeof(PetscScalar));
230: z = (PetscScalar *)malloc(n * sizeof(PetscScalar));
231: v = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
232: uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
233: w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
235: /* extra nnz due to replication of vertices across separators */
236: for (i = 1, j = 0; i <= level; i++) j += nsep[i];
238: /* storage for sparse x values */
239: n_global = xxt_handle->info->n_global;
240: xxt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
241: x = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
242: xxt_nnz = 0;
244: /* LATER - can embed next sep to fire in gs */
245: /* time to make the donuts - generate X factor */
246: for (dim = i = j = 0; i < m; i++) {
247: /* time to move to the next level? */
248: while (i == segs[dim]) {
249: PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level");
250: stages[dim++] = i;
251: end += lnsep[dim];
252: }
253: stages[dim] = i;
255: /* which column are we firing? */
256: /* i.e. set v_l */
257: /* use new seps and do global min across hc to determine which one to fire */
258: (start < end) ? (col = fo[start]) : (col = INT_MAX);
259: PetscCall(PCTFS_giop_hc(&col, &work, 1, op2, dim));
261: /* shouldn't need this */
262: if (col == INT_MAX) {
263: PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n"));
264: continue;
265: }
267: /* do I own it? I should */
268: PetscCall(PCTFS_rvec_zero(v, a_m));
269: if (col == fo[start]) {
270: start++;
271: idex = PCTFS_ivec_linear_search(col, a_local2global, a_n);
272: PetscCheck(idex != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
273: v[idex] = 1.0;
274: j++;
275: } else {
276: idex = PCTFS_ivec_linear_search(col, a_local2global, a_m);
277: if (idex != -1) v[idex] = 1.0;
278: }
280: /* perform u = A.v_l */
281: PetscCall(PCTFS_rvec_zero(u, n));
282: PetscCall(do_matvec(xxt_handle->mvi, v, u));
284: /* uu = X^T.u_l (local portion) */
285: /* technically only need to zero out first i entries */
286: /* later turn this into an XXT_solve call ? */
287: PetscCall(PCTFS_rvec_zero(uu, m));
288: x_ptr = x;
289: iptr = col_indices;
290: for (k = 0; k < i; k++) {
291: off = *iptr++;
292: len = *iptr++;
293: PetscCall(PetscBLASIntCast(len, &dlen));
294: PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, x_ptr, &i1));
295: x_ptr += len;
296: }
298: /* uu = X^T.u_l (comm portion) */
299: PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages));
301: /* z = X.uu */
302: PetscCall(PCTFS_rvec_zero(z, n));
303: x_ptr = x;
304: iptr = col_indices;
305: for (k = 0; k < i; k++) {
306: off = *iptr++;
307: len = *iptr++;
308: PetscCall(PetscBLASIntCast(len, &dlen));
309: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
310: x_ptr += len;
311: }
313: /* compute v_l = v_l - z */
314: PetscCall(PCTFS_rvec_zero(v + a_n, a_m - a_n));
315: PetscCall(PetscBLASIntCast(n, &dlen));
316: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));
318: /* compute u_l = A.v_l */
319: if (a_n != a_m) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim));
320: PetscCall(PCTFS_rvec_zero(u, n));
321: PetscCall(do_matvec(xxt_handle->mvi, v, u));
323: /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
324: PetscCall(PetscBLASIntCast(n, &dlen));
325: PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, v, &i1));
326: /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
327: PetscCall(PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim));
329: alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);
331: /* check for small alpha */
332: /* LATER use this to detect and determine null space */
333: PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha));
335: /* compute v_l = v_l/sqrt(alpha) */
336: PetscCall(PCTFS_rvec_scale(v, 1.0 / alpha, n));
338: /* add newly generated column, v_l, to X */
339: flag = 1;
340: off = len = 0;
341: for (k = 0; k < n; k++) {
342: if (v[k] != 0.0) {
343: len = k;
344: if (flag) {
345: off = k;
346: flag = 0;
347: }
348: }
349: }
351: len -= (off - 1);
353: if (len > 0) {
354: if ((xxt_nnz + len) > xxt_max_nnz) {
355: PetscCall(PetscInfo(0, "increasing space for X by 2x!\n"));
356: xxt_max_nnz *= 2;
357: x_ptr = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
358: PetscCall(PCTFS_rvec_copy(x_ptr, x, xxt_nnz));
359: free(x);
360: x = x_ptr;
361: x_ptr += xxt_nnz;
362: }
363: xxt_nnz += len;
364: PetscCall(PCTFS_rvec_copy(x_ptr, v + off, len));
366: col_indices[2 * i] = off;
367: col_sz[i] = col_indices[2 * i + 1] = len;
368: col_vals[i] = x_ptr;
369: } else {
370: col_indices[2 * i] = 0;
371: col_sz[i] = col_indices[2 * i + 1] = 0;
372: col_vals[i] = x_ptr;
373: }
374: }
376: /* close off stages for execution phase */
377: while (dim != level) {
378: stages[dim++] = i;
379: PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level));
380: }
381: stages[dim] = i;
383: xxt_handle->info->n = xxt_handle->mvi->n;
384: xxt_handle->info->m = m;
385: xxt_handle->info->nnz = xxt_nnz;
386: xxt_handle->info->max_nnz = xxt_max_nnz;
387: xxt_handle->info->msg_buf_sz = stages[level] - stages[0];
388: xxt_handle->info->solve_uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
389: xxt_handle->info->solve_w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
390: xxt_handle->info->x = x;
391: xxt_handle->info->col_vals = col_vals;
392: xxt_handle->info->col_sz = col_sz;
393: xxt_handle->info->col_indices = col_indices;
394: xxt_handle->info->stages = stages;
395: xxt_handle->info->nsolves = 0;
396: xxt_handle->info->tot_solve_time = 0.0;
398: free(segs);
399: free(u);
400: free(v);
401: free(uu);
402: free(z);
403: free(w);
405: return PETSC_SUCCESS;
406: }
408: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc)
409: {
410: PetscInt off, len, *iptr;
411: PetscInt level = xxt_handle->level;
412: PetscInt n = xxt_handle->info->n;
413: PetscInt m = xxt_handle->info->m;
414: PetscInt *stages = xxt_handle->info->stages;
415: PetscInt *col_indices = xxt_handle->info->col_indices;
416: PetscScalar *x_ptr, *uu_ptr;
417: PetscScalar *solve_uu = xxt_handle->info->solve_uu;
418: PetscScalar *solve_w = xxt_handle->info->solve_w;
419: PetscScalar *x = xxt_handle->info->x;
420: PetscBLASInt i1 = 1, dlen;
422: PetscFunctionBegin;
423: uu_ptr = solve_uu;
424: PetscCall(PCTFS_rvec_zero(uu_ptr, m));
426: /* x = X.Y^T.b */
427: /* uu = Y^T.b */
428: for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
429: off = *iptr++;
430: len = *iptr++;
431: PetscCall(PetscBLASIntCast(len, &dlen));
432: PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, x_ptr, &i1));
433: }
435: /* communication of beta */
436: uu_ptr = solve_uu;
437: if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
439: PetscCall(PCTFS_rvec_zero(uc, n));
441: /* x = X.uu */
442: for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
443: off = *iptr++;
444: len = *iptr++;
445: PetscCall(PetscBLASIntCast(len, &dlen));
446: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
447: }
448: PetscFunctionReturn(PETSC_SUCCESS);
449: }
451: static PetscErrorCode check_handle(xxt_ADT xxt_handle)
452: {
453: PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};
455: PetscFunctionBegin;
456: PetscCheck(xxt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", (void *)xxt_handle);
458: vals[0] = vals[1] = xxt_handle->id;
459: PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
460: PetscCheck(!(vals[0] != vals[1]) && !(xxt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xxt_handle->id);
461: PetscFunctionReturn(PETSC_SUCCESS);
462: }
464: static PetscErrorCode det_separators(xxt_ADT xxt_handle)
465: {
466: PetscInt i, ct, id;
467: PetscInt mask, edge, *iptr;
468: PetscInt *dir, *used;
469: PetscInt sum[4], w[4];
470: PetscScalar rsum[4], rw[4];
471: PetscInt op[] = {GL_ADD, 0};
472: PetscScalar *lhs, *rhs;
473: PetscInt *nsep, *lnsep, *fo, nfo = 0;
474: PCTFS_gs_ADT PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
475: PetscInt *local2global = xxt_handle->mvi->local2global;
476: PetscInt n = xxt_handle->mvi->n;
477: PetscInt m = xxt_handle->mvi->m;
478: PetscInt level = xxt_handle->level;
479: PetscInt shared = 0;
481: PetscFunctionBegin;
482: dir = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
483: nsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
484: lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
485: fo = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
486: used = (PetscInt *)malloc(sizeof(PetscInt) * n);
488: PetscCall(PCTFS_ivec_zero(dir, level + 1));
489: PetscCall(PCTFS_ivec_zero(nsep, level + 1));
490: PetscCall(PCTFS_ivec_zero(lnsep, level + 1));
491: PetscCall(PCTFS_ivec_set(fo, -1, n + 1));
492: PetscCall(PCTFS_ivec_zero(used, n));
494: lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
495: rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
497: /* determine the # of unique dof */
498: PetscCall(PCTFS_rvec_zero(lhs, m));
499: PetscCall(PCTFS_rvec_set(lhs, 1.0, n));
500: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level));
501: PetscCall(PCTFS_rvec_zero(rsum, 2));
502: for (i = 0; i < n; i++) {
503: if (lhs[i] != 0.0) {
504: rsum[0] += 1.0 / lhs[i];
505: rsum[1] += lhs[i];
506: }
507: }
508: PetscCall(PCTFS_grop_hc(rsum, rw, 2, op, level));
509: rsum[0] += 0.1;
510: rsum[1] += 0.1;
512: if (PetscAbsScalar(rsum[0] - rsum[1]) > EPS) shared = 1;
514: xxt_handle->info->n_global = xxt_handle->info->m_global = (PetscInt)rsum[0];
515: xxt_handle->mvi->n_global = xxt_handle->mvi->m_global = (PetscInt)rsum[0];
517: /* determine separator sets top down */
518: if (shared) {
519: for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
520: /* set rsh of hc, fire, and collect lhs responses */
521: PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
522: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
524: /* set lsh of hc, fire, and collect rhs responses */
525: PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
526: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
528: for (i = 0; i < n; i++) {
529: if (id < mask) {
530: if (lhs[i] != 0.0) lhs[i] = 1.0;
531: }
532: if (id >= mask) {
533: if (rhs[i] != 0.0) rhs[i] = 1.0;
534: }
535: }
537: if (id < mask) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge - 1));
538: else PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge - 1));
540: /* count number of dofs I own that have signal and not in sep set */
541: PetscCall(PCTFS_rvec_zero(rsum, 4));
542: PetscCall(PCTFS_ivec_zero(sum, 4));
543: for (ct = i = 0; i < n; i++) {
544: if (!used[i]) {
545: /* number of unmarked dofs on node */
546: ct++;
547: /* number of dofs to be marked on lhs hc */
548: if (id < mask) {
549: if (lhs[i] != 0.0) {
550: sum[0]++;
551: rsum[0] += 1.0 / lhs[i];
552: }
553: }
554: /* number of dofs to be marked on rhs hc */
555: if (id >= mask) {
556: if (rhs[i] != 0.0) {
557: sum[1]++;
558: rsum[1] += 1.0 / rhs[i];
559: }
560: }
561: }
562: }
564: /* go for load balance - choose half with most unmarked dofs, bias LHS */
565: (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
566: (id < mask) ? (rsum[2] = ct) : (rsum[3] = ct);
567: PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
568: PetscCall(PCTFS_grop_hc(rsum, rw, 4, op, edge));
569: rsum[0] += 0.1;
570: rsum[1] += 0.1;
571: rsum[2] += 0.1;
572: rsum[3] += 0.1;
574: if (id < mask) {
575: /* mark dofs I own that have signal and not in sep set */
576: for (ct = i = 0; i < n; i++) {
577: if ((!used[i]) && (lhs[i] != 0.0)) {
578: ct++;
579: nfo++;
581: PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
583: *--iptr = local2global[i];
584: used[i] = edge;
585: }
586: }
587: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
589: lnsep[edge] = ct;
590: nsep[edge] = (PetscInt)rsum[0];
591: dir[edge] = LEFT;
592: }
594: if (id >= mask) {
595: /* mark dofs I own that have signal and not in sep set */
596: for (ct = i = 0; i < n; i++) {
597: if ((!used[i]) && (rhs[i] != 0.0)) {
598: ct++;
599: nfo++;
601: PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
603: *--iptr = local2global[i];
604: used[i] = edge;
605: }
606: }
607: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
609: lnsep[edge] = ct;
610: nsep[edge] = (PetscInt)rsum[1];
611: dir[edge] = RIGHT;
612: }
614: /* LATER or we can recur on these to order seps at this level */
615: /* do we need full set of separators for this? */
617: /* fold rhs hc into lower */
618: if (id >= mask) id -= mask;
619: }
620: } else {
621: for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
622: /* set rsh of hc, fire, and collect lhs responses */
623: PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
624: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
626: /* set lsh of hc, fire, and collect rhs responses */
627: PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
628: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
630: /* count number of dofs I own that have signal and not in sep set */
631: PetscCall(PCTFS_ivec_zero(sum, 4));
632: for (ct = i = 0; i < n; i++) {
633: if (!used[i]) {
634: /* number of unmarked dofs on node */
635: ct++;
636: /* number of dofs to be marked on lhs hc */
637: if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
638: /* number of dofs to be marked on rhs hc */
639: if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
640: }
641: }
643: /* go for load balance - choose half with most unmarked dofs, bias LHS */
644: (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
645: PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
647: /* lhs hc wins */
648: if (sum[2] >= sum[3]) {
649: if (id < mask) {
650: /* mark dofs I own that have signal and not in sep set */
651: for (ct = i = 0; i < n; i++) {
652: if ((!used[i]) && (lhs[i] != 0.0)) {
653: ct++;
654: nfo++;
655: *--iptr = local2global[i];
656: used[i] = edge;
657: }
658: }
659: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
660: lnsep[edge] = ct;
661: }
662: nsep[edge] = sum[0];
663: dir[edge] = LEFT;
664: } else { /* rhs hc wins */
665: if (id >= mask) {
666: /* mark dofs I own that have signal and not in sep set */
667: for (ct = i = 0; i < n; i++) {
668: if ((!used[i]) && (rhs[i] != 0.0)) {
669: ct++;
670: nfo++;
671: *--iptr = local2global[i];
672: used[i] = edge;
673: }
674: }
675: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
676: lnsep[edge] = ct;
677: }
678: nsep[edge] = sum[1];
679: dir[edge] = RIGHT;
680: }
681: /* LATER or we can recur on these to order seps at this level */
682: /* do we need full set of separators for this? */
684: /* fold rhs hc into lower */
685: if (id >= mask) id -= mask;
686: }
687: }
689: /* level 0 is on processor case - so mark the remainder */
690: for (ct = i = 0; i < n; i++) {
691: if (!used[i]) {
692: ct++;
693: nfo++;
694: *--iptr = local2global[i];
695: used[i] = edge;
696: }
697: }
698: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
699: lnsep[edge] = ct;
700: nsep[edge] = ct;
701: dir[edge] = LEFT;
703: xxt_handle->info->nsep = nsep;
704: xxt_handle->info->lnsep = lnsep;
705: xxt_handle->info->fo = fo;
706: xxt_handle->info->nfo = nfo;
708: free(dir);
709: free(lhs);
710: free(rhs);
711: free(used);
712: PetscFunctionReturn(PETSC_SUCCESS);
713: }
715: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data)
716: {
717: mv_info *mvi = (mv_info *)malloc(sizeof(mv_info));
719: mvi->n = n;
720: mvi->m = m;
721: mvi->n_global = -1;
722: mvi->m_global = -1;
723: mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));
724: PCTFS_ivec_copy(mvi->local2global, local2global, m);
725: mvi->local2global[m] = INT_MAX;
726: mvi->matvec = matvec;
727: mvi->grid_data = grid_data;
729: /* set xxt communication handle to perform restricted matvec */
730: mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
732: return (mvi);
733: }
735: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
736: {
737: PetscFunctionBegin;
738: PetscCall(A->matvec((mv_info *)A->grid_data, v, u));
739: PetscFunctionReturn(PETSC_SUCCESS);
740: }