Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Master #60

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,7 @@ elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
endif(${CMAKE_SYSTEM_NAME} MATCHES "BlueGeneP-static")



# inspired by elemental-bgp
if(MATH_LIBS)
message(STATUS "Using user-defined MATH_LIBS=${MATH_LIBS}")
Expand Down
120 changes: 119 additions & 1 deletion PIPS-NLP/Core/LinearSolvers/Ma57Solver/Ma57Solver.C
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
#include "DenseGenMatrix.h"

#include <cmath>
#include <limits>
#include <cstdio>


Expand All @@ -30,6 +31,8 @@ extern int separateHandDiag;
extern double gHSL_PivotLV;
extern double gMA57_Ordering;

const double kInitPrecision = 1.e-7;

#ifndef MIN
#define MIN(a,b) ((a > b) ? b : a)
#endif
Expand Down Expand Up @@ -101,6 +104,7 @@ void Ma57Solver::SetUpMa57Solver( SparseSymMatrix * sgm )
M = mStorage->M;
}

SpReferTo( mMat, sgm );
}


Expand Down Expand Up @@ -397,14 +401,16 @@ void Ma57Solver::solve(int solveType, OoqpVector& rhs_in)
//delete [] iwork; //!using a sort of cache now
}

void Ma57Solver::solve(GenMatrix& rhs_in)
void Ma57Solver::BasicSolve(GenMatrix& rhs_in, int NRHS_in)
{
if(n==0) return;

DenseGenMatrix &rhs = dynamic_cast<DenseGenMatrix&>(rhs_in);
int N,NRHS;
// rhs vectors are on the "rows", for continuous memory
rhs.getSize(NRHS,N);
if (NRHS_in > 0)
NRHS = (NRHS < NRHS_in)? NRHS: NRHS_in;
assert(n==N);

// we need checks on the residuals, can't do that with multiple RHS
Expand All @@ -422,10 +428,122 @@ void Ma57Solver::solve(GenMatrix& rhs_in)
&NRHS, drhs, &n,
work, &lwork, iwork,
icntl, info );

delete [] iwork;
delete [] work;
}

void Ma57Solver::solve(GenMatrix& rhs_in)
{
// do at least one basicSolve
// this->BasicSolve( sol_best );
// return;

if(n==0) return;

DenseGenMatrix &sol_best = dynamic_cast<DenseGenMatrix&>(rhs_in);
int N,NRHS;
// rhs vectors are on the "rows", for continuous memory
sol_best.getSize(NRHS,N);

assert(n==N);

// define structures to save rhs and store residuals
DenseGenMatrix rhsMtx_ori(NRHS,N);
DenseGenMatrix rhsMtx_resid(NRHS,N);
sol_best.fromGetDense( 0, 0, rhsMtx_ori.elements(), N, NRHS, N);
sol_best.fromGetDense( 0, 0, rhsMtx_resid.elements(), N, NRHS, N);

SimpleVectorHandle vsol_temp ( new SimpleVector(N) );

int *done = new int[NRHS]{0};
int sumDone{0};
int maxIR{10}, n_IR{0};
double *rnorm = new double[NRHS]{0};
double *rnormRHS = new double[NRHS]{0};
int norm_temp;
int *ori_col_idx = new int[NRHS]{0};
int col_idx;

int next_NRHS = 0;
int curr_NRHS = NRHS;

for (int i = 0; i < curr_NRHS; i++) {
SimpleVector vresid(rhsMtx_ori[i],N);
rnormRHS[i] = vresid.twonorm();
ori_col_idx[i] = i;
}

// do at least one basicSolve
this->BasicSolve( sol_best );

// compute residuals
for (int i = 0; i < curr_NRHS; i++) {
SimpleVector vsol(sol_best[i],N);
SimpleVector vrhs_resid(rhsMtx_resid[i],N);

mMat->mult(-1.0, vrhs_resid.elements(), 1, 1.0, vsol.elements(), 1);
rnorm[i] = vrhs_resid.twonorm();

if(rnorm[i] < kInitPrecision*(1.e0+rnormRHS[i]) ){
// residuals are small enough, Skip IR for this RHS
}else {
// requires IR for this RHS
SimpleVector vrhs_resid_next(rhsMtx_resid[next_NRHS],N);
vrhs_resid_next.copyFrom(vrhs_resid);
ori_col_idx[next_NRHS] = i;
next_NRHS++;
}
}

// Do IR
while (next_NRHS != 0 && n_IR < maxIR ) {

this->BasicSolve( rhsMtx_resid,next_NRHS);

curr_NRHS = next_NRHS;
next_NRHS = 0;

// compute new residuals
for (int i = 0; i < curr_NRHS; i++) {
col_idx = ori_col_idx[i];

SimpleVector vsol_best(sol_best[col_idx],N);
SimpleVector vrhs_ori(rhsMtx_ori[col_idx],N);
SimpleVector vrhs_resid(rhsMtx_resid[i],N);

vsol_temp->copyFrom(vsol_best);
vsol_temp->axpy(1.0,vrhs_resid);
vrhs_resid.copyFrom(vrhs_ori);

mMat->mult(-1.0, vrhs_resid.elements(), 1, 1.0, vsol_temp->elements(), 1);

norm_temp = vrhs_resid.twonorm();
if(norm_temp < rnorm[col_idx]){
// has improvement, take this IR step and continues IR for this RHS
vsol_best.copyFrom(*vsol_temp);
rnorm[col_idx] = norm_temp;
SimpleVector vrhs_resid_next(rhsMtx_resid[next_NRHS],N);
vrhs_resid_next.copyFrom(vrhs_resid);
ori_col_idx[next_NRHS] = col_idx;
next_NRHS++;
}else if(norm_temp < kInitPrecision*(1.e0+rnorm[col_idx]) ){
// residuals are small enough, take this IR step and terminates IR for this RHS
vsol_best.copyFrom(*vsol_temp);
rnorm[col_idx] = norm_temp;
}else {
// no improvement, do not take this IR step and terminates IR for this RHS
}
}
n_IR++;
}

delete [] done;
delete [] rnorm;
delete [] rnormRHS;
delete [] ori_col_idx;
}

int* Ma57Solver::new_iworkn(int dim)
{
if(niworkn != dim) {
Expand Down
5 changes: 5 additions & 0 deletions PIPS-NLP/Core/LinearSolvers/Ma57Solver/Ma57Solver.h
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,10 @@ extern "C" {
class Ma57Solver : public DoubleLinearSolver {
private:
Ma57Solver() {};

/** store as a sparse symmetric matrix */
SparseSymMatrixHandle mMat;

void SetUpMa57Solver( SparseSymMatrix * sgm);
protected:
int icntl[20];
Expand Down Expand Up @@ -132,6 +136,7 @@ class Ma57Solver : public DoubleLinearSolver {
virtual void diagonalChanged( int idiag, int extent );
virtual int matrixChanged();
virtual void solve( OoqpVector& rhs );
virtual void BasicSolve(GenMatrix& rhs_in, int NRHS_in = 0);
virtual void solve( GenMatrix& rhs);

//virtual void Lsolve ( OoqpVector& x );
Expand Down
36 changes: 20 additions & 16 deletions PIPS-NLP/Core/NlpStoch/NlpPIPSIpmInterface.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class NlpPIPSIpmInterface
int go(int addslack=0);

double getObjective() const;
void computeProblemSize(int&, int&);
void computeProblemSize(long int&, long int&);
double getFirstStageObjective() const;


Expand Down Expand Up @@ -131,16 +131,18 @@ int NlpPIPSIpmInterface<FORMULATION,IPMSOLVER,UPDATENLP>::go(int addSlack) {

std::cout << nscens << " scenarios." << endl;
}
int sum_var=0,sum_icon=0,sum_econ=0,total_var=0,total_icon=0,total_econ=0;
long int sum_var=0,sum_icon=0,sum_econ=0,total_var=0,total_icon=0,total_econ=0;
for(int j=0;j<nscens;j++){
sum_var += data->children[j]->getLocalnx();
sum_econ += data->children[j]->getLocalmy();
sum_icon += data->children[j]->getLocalmz();
if(data->children[j] != sData::dummy) {
sum_var += data->children[j]->getLocalnx();
sum_econ += data->children[j]->getLocalmy();
sum_icon += data->children[j]->getLocalmz();
}
}

MPI_Allreduce(&sum_var, &total_var, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_econ, &total_econ, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_icon, &total_icon, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_var, &total_var, 1, MPI_LONG_LONG, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_econ, &total_econ, 1, MPI_LONG_LONG, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_icon, &total_icon, 1, MPI_LONG_LONG, MPI_SUM, MPI_COMM_WORLD);

if(mype==0) {
std::cout << "Total " << data->getLocalnx() + total_var << " variables, "
Expand Down Expand Up @@ -195,18 +197,20 @@ double NlpPIPSIpmInterface<FORMULATION,SOLVER,UPDATENLP>::getObjective() const {
}

template<typename FORMULATION, typename SOLVER, typename UPDATENLP>
void NlpPIPSIpmInterface<FORMULATION,SOLVER,UPDATENLP>::computeProblemSize(int& nvar, int& ncon){
void NlpPIPSIpmInterface<FORMULATION,SOLVER,UPDATENLP>::computeProblemSize(long int& nvar, long int& ncon){
int nscens=data->children.size();
int sum_var=0,sum_icon=0,sum_econ=0,total_var=0,total_icon=0,total_econ=0;
long int sum_var=0,sum_icon=0,sum_econ=0,total_var=0,total_icon=0,total_econ=0;
for(int j=0;j<nscens;j++){
sum_var += data->children[j]->getLocalnx();
sum_econ += data->children[j]->getLocalmy();
sum_icon += data->children[j]->getLocalmz();
if(data->children[j] != sData::dummy) {
sum_var += data->children[j]->getLocalnx();
sum_econ += data->children[j]->getLocalmy();
sum_icon += data->children[j]->getLocalmz();
}
}

MPI_Allreduce(&sum_var, &total_var, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_econ, &total_econ, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_icon, &total_icon, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_var, &total_var, 1, MPI_LONG_LONG, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_econ, &total_econ, 1, MPI_LONG_LONG, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&sum_icon, &total_icon, 1, MPI_LONG_LONG, MPI_SUM, MPI_COMM_WORLD);
nvar = total_var;
ncon = total_econ + total_icon;
}
Expand Down
4 changes: 2 additions & 2 deletions PIPS-NLP/Drivers/parallelPipsNlp_C_Callback.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ double PipsNlpProblemStructGetObjective(PipsNlpProblemStruct* prob)
}

extern "C"
int PipsNlpProblemStructGetTotalVars(PipsNlpProblemStruct* prob)
long int PipsNlpProblemStructGetTotalVars(PipsNlpProblemStruct* prob)
{
if(prob)
return prob->nvars;
Expand All @@ -216,7 +216,7 @@ int PipsNlpProblemStructGetTotalVars(PipsNlpProblemStruct* prob)
}

extern "C"
int PipsNlpProblemStructGetTotalCons(PipsNlpProblemStruct* prob)
long int PipsNlpProblemStructGetTotalCons(PipsNlpProblemStruct* prob)
{
if(prob)
return prob->ncons;
Expand Down
8 changes: 4 additions & 4 deletions PIPS-NLP/Drivers/parallelPipsNlp_C_Callback.h
Original file line number Diff line number Diff line change
Expand Up @@ -154,8 +154,8 @@ extern "C"
UserDataPtr userdata;
// holders for optimal solution and objective
double objective;
int nvars;
int ncons;
long int nvars;
long int ncons;
};
typedef struct PipsNlpProblemStruct* PipsNlpProblemStructPtr; /** Pointer to a pips_nlp Problem. **/

Expand Down Expand Up @@ -192,12 +192,12 @@ extern "C"
/*
* Get the number of total variables
*/
int PipsNlpProblemStructGetTotalVars(PipsNlpProblemStruct* prob);
long int PipsNlpProblemStructGetTotalVars(PipsNlpProblemStruct* prob);

/*
* Get the number of total constraints
*/
int PipsNlpProblemStructGetTotalCons(PipsNlpProblemStruct* prob);
long int PipsNlpProblemStructGetTotalCons(PipsNlpProblemStruct* prob);
};
#endif