3

I have a large set of model parameters controlling several different components. The model is being run in parallel. The model parameters are held constant during the run. The problem is that I have to add an IndepVarComp() for every model parameter when running in parallel, even though I would like to pass them all by object. I need to be able to edit the values in my run script before running the model (between setup and run). Is there a good way of doing this? I recognize the data passing issue due to running under MPI without a "source" for the parameters.

It works if I add an IndepVarComp() for each model parameter, as long as I don't pass by object. This makes sense, if I tell OpenMDAO I want to be able to change the value and track how the model changes, then passing by object is contradictory. However, I need to be able to pass in the parameter values after setup and I can't do that under MPI without making an IndepVarComp() for each model parameter.

I have attached an example based on the Sellar problem from the OpenMDAO docs of what I want to do. By uncommenting line 28, commenting out line 27, and uncommenting line 139 in src.py, the example functions fine in parallel.

run with $ mpirun -np 4 python call.py

call.py

from __future__ import print_function

from openmdao.api import Problem, ScipyOptimizer

from src import SellarDerivativesSuperGroup

import numpy as np

if __name__ == "__main__":

    ######################### for MPI functionality #########################
    from openmdao.core.mpi_wrap import MPI

    # if MPI: # pragma: no cover
    #     if you called this script with 'mpirun', then use the petsc data passing

    if MPI:
        from openmdao.core.petsc_impl import PetscImpl as impl
    else:
        from openmdao.api import BasicImpl as impl
    # else:
    #     if you didn't use 'mpirun', then use the numpy data passing
        # from openmdao.api import BasicImpl as impl

    def mpi_print(prob, *args):
        """ helper function to only print on rank 0 """
        if prob.root.comm.rank == 0:
            print(*args)

    ##################
    nProblems = 4
    datasize = 10
    top = Problem(impl=impl)
    top.root = SellarDerivativesSuperGroup(nProblems=nProblems, datasize=datasize)

    top.driver = ScipyOptimizer()
    top.driver.options['optimizer'] = 'SLSQP'
    top.driver.options['tol'] = 1.0e-8



    top.driver.add_desvar('z', lower=np.array([-10.0, 0.0]),
                         upper=np.array([10.0, 10.0]))
    top.driver.add_desvar('x', lower=0.0, upper=10.0)

    top.driver.add_objective('obj')
    top.driver.add_constraint('con1', upper=0.0)
    top.driver.add_constraint('con2', upper=0.0)

    top.setup(check=True)

    # Setting initial values for design variables
    top['x'] = 1.0
    top['z'] = np.array([5.0, 2.0])
    top['varTree:leaf1'] = np.ones(datasize)

    top.run()

    if top.root.comm.rank == 0:
        print("\n")
        print("Minimum found at (%f, %f, %f)" % (top['z'][0],
                                                 top['z'][1],
                                                 top['x']))
        print("Coupling vars: %f, %f" % (top['y1_0'], top['y2_0']))
        print("Minimum objective: ", top['obj']/nProblems)

src.py

from __future__ import print_function

from openmdao.api import ExecComp, IndepVarComp, Group, NLGaussSeidel, \
                         Component, ParallelGroup, ScipyGMRES

import numpy as np


class SellarDis1(Component):
    """Component containing Discipline 1."""

    def __init__(self, problem_id=0, datasize=0):
        super(SellarDis1, self).__init__()

        self.problem_id = problem_id

        # Global Design Variable
        self.add_param('z', val=np.zeros(2))

        # Local Design Variable
        self.add_param('x', val=0.)

        # Coupling parameter
        self.add_param('y2_%i' % problem_id, val=1.0)

        # Dummy variable tree element
        self.add_param('varTree:leaf1', val=np.zeros(datasize), pass_by_obj=True)
        # self.add_param('varTree:leaf1', val=np.zeros(datasize), pass_by_obj=False)

        # Coupling output
        self.add_output('y1_%i' % problem_id, val=1.0)

    def solve_nonlinear(self, params, unknowns, resids):
        """Evaluates the equation
        y1 = z1**2 + z2 + x1 - 0.2*y2"""

        problem_id = self.problem_id

        z1 = params['z'][0]
        z2 = params['z'][1]
        x1 = params['x']
        y2 = params['y2_%i' % problem_id]

        unknowns['y1_%i' % problem_id] = z1**2 + z2 + x1 - 0.2*y2

    def linearize(self, params, unknowns, resids):
        """ Jacobian for Sellar discipline 1."""

        problem_id = self.problem_id

        J = {}

        J['y1_%i' % problem_id, 'y2_%i' % problem_id] = -0.2
        J['y1_%i' % problem_id, 'z'] = np.array([[2*params['z'][0], 1.0]])
        J['y1_%i' % problem_id, 'x'] = 1.0

        return J


class SellarDis2(Component):
    """Component containing Discipline 2."""

    def __init__(self, problem_id=0):
        super(SellarDis2, self).__init__()

        self.problem_id = problem_id

        # Global Design Variable
        self.add_param('z', val=np.zeros(2))

        # Coupling parameter
        self.add_param('y1_%i' % problem_id, val=1.0)

        # Coupling output
        self.add_output('y2_%i' % problem_id, val=1.0)

    def solve_nonlinear(self, params, unknowns, resids):
        """Evaluates the equation
        y2 = y1**(.5) + z1 + z2"""

        problem_id = self.problem_id

        z1 = params['z'][0]
        z2 = params['z'][1]
        y1 = params['y1_%i' % problem_id]

        # Note: this may cause some issues. However, y1 is constrained to be
        # above 3.16, so lets just let it converge, and the optimizer will
        # throw it out
        y1 = abs(y1)

        unknowns['y2_%i' % problem_id] = y1**.5 + z1 + z2

    def linearize(self, params, unknowns, resids):
        """ Jacobian for Sellar discipline 2."""

        problem_id = self.problem_id

        J = {}

        J['y2_%i' % problem_id, 'y1_%i' % problem_id] = .5*params['y1_%i' % problem_id]**-.5
        J['y2_%i' % problem_id, 'z'] = np.array([[1.0, 1.0]])

        return J


class SellarDerivativesSubGroup(Group):

    def __init__(self, problem_id=0, datasize=0):
        super(SellarDerivativesSubGroup, self).__init__()

        self.add('d1', SellarDis1(problem_id=problem_id, datasize=datasize), promotes=['*'])
        self.add('d2', SellarDis2(problem_id=problem_id), promotes=['*'])

        self.nl_solver = NLGaussSeidel()
        self.nl_solver.options['atol'] = 1.0e-12

        self.ln_solver = ScipyGMRES()


class SellarDerivatives(Group):
    """ Group containing the Sellar MDA. This version uses the disciplines
    with derivatives."""

    def __init__(self, problem_id=0, datasize=0):
        super(SellarDerivatives, self).__init__()

        self.add('d', SellarDerivativesSubGroup(problem_id=problem_id, datasize=datasize), promotes=['*'])


class SellarDerivativesSuperGroup(Group):

    def __init__(self, nProblems=0, datasize=0):

        super(SellarDerivativesSuperGroup, self).__init__()

        self.add('px', IndepVarComp('x', 1.0), promotes=['*'])
        self.add('pz', IndepVarComp('z', np.array([5.0, 2.0])), promotes=['*'])
        # self.add('vt', IndepVarComp('varTree:leaf1', val=np.zeros(datasize)), promotes=['*'])

        pg = self.add('manySellars', ParallelGroup(), promotes=['*'])
        print(nProblems)
        for problem_id in np.arange(0, nProblems):
            pg.add('Sellar%i' % problem_id, SellarDerivatives(problem_id=problem_id, datasize=datasize), promotes=['*'])

        self.add('obj_cmp', ExecComp('obj = (x**2 + z[1] + y1_0 + exp(-y2_0)) + (x**2 + z[1] + y1_1 + exp(-y2_1)) + '
                                     '(x**2 + z[1] + y1_2 + exp(-y2_2)) + (x**2 + z[1] + y1_3 + exp(-y2_3))',
                                     z=np.array([0.0, 0.0]), x=0.0,
                                     y1_0=0.0, y2_0=0.0,
                                     y1_1=0.0, y2_1=0.0,
                                     y1_2=0.0, y2_2=0.0,
                                     y1_3=0.0, y2_3=0.0),
                 promotes=['*'])

        self.add('con_cmp1', ExecComp('con1 = 3.16 - y1_0'), promotes=['*'])
        self.add('con_cmp2', ExecComp('con2 = y2_0 - 24.0'), promotes=['*'])
jthomas
  • 2,437
  • 1
  • 13
  • 15

2 Answers2

1

If these parameters will never be used as optimization design variables, you don't have to declare them as OpenMDAO variables. You could just declare these things as regular python attributes in the init methods, then write a small method that loops over the hierarchy and sets the attribute values to whatever you want.

That might be a little simpler than adding IndepVarComps with pass-by-object,though your own proposed solution also work.

Justin Gray
  • 5,605
  • 1
  • 11
  • 16
0

On further investigation, I found that the I can specify pass_by_obj in an IndepVarComp(). That solves part of the problem. The other part of the problem I solved by creating a function that adds the params rather than having a large list of parameters in my constructor that would decrease readability.

My solution is below. If someone else has a better one I would definitely be interested.

src.py

from __future__ import print_function

from openmdao.api import ExecComp, IndepVarComp, Group, NLGaussSeidel, \
                         Component, ParallelGroup, ScipyGMRES

import numpy as np


class SellarDis1(Component):
    """Component containing Discipline 1."""

    def __init__(self, problem_id=0, datasize=0):
        super(SellarDis1, self).__init__()

        self.problem_id = problem_id

        # Global Design Variable
        self.add_param('z', val=np.zeros(2))

        # Local Design Variable
        self.add_param('x', val=0.)

        # Coupling parameter
        self.add_param('y2_%i' % problem_id, val=1.0)

        # Dummy variable tree element
        # self.add_param('varTree:leaf1', val=np.zeros(datasize), pass_by_obj=True)
        self.add_param('varTree:leaf1', val=np.zeros(datasize), pass_by_obj=True)

        # Coupling output
        self.add_output('y1_%i' % problem_id, val=1.0)

    def solve_nonlinear(self, params, unknowns, resids):
        """Evaluates the equation
        y1 = z1**2 + z2 + x1 - 0.2*y2"""

        problem_id = self.problem_id

        z1 = params['z'][0]
        z2 = params['z'][1]
        x1 = params['x']
        y2 = params['y2_%i' % problem_id]

        unknowns['y1_%i' % problem_id] = z1**2 + z2 + x1 - 0.2*y2

    def linearize(self, params, unknowns, resids):
        """ Jacobian for Sellar discipline 1."""

        problem_id = self.problem_id

        J = {}

        J['y1_%i' % problem_id, 'y2_%i' % problem_id] = -0.2
        J['y1_%i' % problem_id, 'z'] = np.array([[2*params['z'][0], 1.0]])
        J['y1_%i' % problem_id, 'x'] = 1.0

        return J


class SellarDis2(Component):
    """Component containing Discipline 2."""

    def __init__(self, problem_id=0):
        super(SellarDis2, self).__init__()

        self.problem_id = problem_id

        # Global Design Variable
        self.add_param('z', val=np.zeros(2))

        # Coupling parameter
        self.add_param('y1_%i' % problem_id, val=1.0)

        # Coupling output
        self.add_output('y2_%i' % problem_id, val=1.0)

    def solve_nonlinear(self, params, unknowns, resids):
        """Evaluates the equation
        y2 = y1**(.5) + z1 + z2"""

        problem_id = self.problem_id

        z1 = params['z'][0]
        z2 = params['z'][1]
        y1 = params['y1_%i' % problem_id]

        # Note: this may cause some issues. However, y1 is constrained to be
        # above 3.16, so lets just let it converge, and the optimizer will
        # throw it out
        y1 = abs(y1)

        unknowns['y2_%i' % problem_id] = y1**.5 + z1 + z2

    def linearize(self, params, unknowns, resids):
        """ Jacobian for Sellar discipline 2."""

        problem_id = self.problem_id

        J = {}

        J['y2_%i' % problem_id, 'y1_%i' % problem_id] = .5*params['y1_%i' % problem_id]**-.5
        J['y2_%i' % problem_id, 'z'] = np.array([[1.0, 1.0]])

        return J


class SellarDerivativesSubGroup(Group):

    def __init__(self, problem_id=0, datasize=0):
        super(SellarDerivativesSubGroup, self).__init__()

        self.add('d1', SellarDis1(problem_id=problem_id, datasize=datasize), promotes=['*'])
        self.add('d2', SellarDis2(problem_id=problem_id), promotes=['*'])

        self.nl_solver = NLGaussSeidel()
        self.nl_solver.options['atol'] = 1.0e-12

        self.ln_solver = ScipyGMRES()


class SellarDerivatives(Group):
    """ Group containing the Sellar MDA. This version uses the disciplines
    with derivatives."""

    def __init__(self, problem_id=0, datasize=0):
        super(SellarDerivatives, self).__init__()

        self.add('d', SellarDerivativesSubGroup(problem_id=problem_id, datasize=datasize), promotes=['*'])


class SellarDerivativesSuperGroup(Group):

    def __init__(self, nProblems=0, datasize=0):

        super(SellarDerivativesSuperGroup, self).__init__()

        self.add('px', IndepVarComp('x', 1.0), promotes=['*'])
        self.add('pz', IndepVarComp('z', np.array([5.0, 2.0])), promotes=['*'])
        # self.add('vt', MyIndepVarComp(datasize=datasize), promotes=['*'])
        # self.add('vt', IndepVarComp('varTree:leaf1', val=np.zeros(datasize), pass_by_obj=True), promotes=['*'])
        addVariableTree(self, datasize=datasize)

        pg = self.add('manySellars', ParallelGroup(), promotes=['*'])
        print(nProblems)
        for problem_id in np.arange(0, nProblems):
            pg.add('Sellar%i' % problem_id, SellarDerivatives(problem_id=problem_id, datasize=datasize), promotes=['*'])

        self.add('obj_cmp', ExecComp('obj = (x**2 + z[1] + y1_0 + exp(-y2_0)) + (x**2 + z[1] + y1_1 + exp(-y2_1)) + '
                                     '(x**2 + z[1] + y1_2 + exp(-y2_2)) + (x**2 + z[1] + y1_3 + exp(-y2_3))',
                                     z=np.array([0.0, 0.0]), x=0.0,
                                     y1_0=0.0, y2_0=0.0,
                                     y1_1=0.0, y2_1=0.0,
                                     y1_2=0.0, y2_2=0.0,
                                     y1_3=0.0, y2_3=0.0),
                 promotes=['*'])

        self.add('con_cmp1', ExecComp('con1 = 3.16 - y1_0'), promotes=['*'])
        self.add('con_cmp2', ExecComp('con2 = y2_0 - 24.0'), promotes=['*'])


def addVariableTree(openmdao_class, datasize=0):

    openmdao_class.add('vt', IndepVarComp('varTree:leaf1', val=np.zeros(datasize), pass_by_obj=True), promotes=['*'])
Community
  • 1
  • 1
jthomas
  • 2,437
  • 1
  • 13
  • 15