ReducedFunctional doesn't like InitialConditionParameter("name")
Affects | Status | Importance | Assigned to | Milestone | |
---|---|---|---|---|---|
dolfin-adjoint |
New
|
High
|
Simon Funke |
Bug Description
[pef@aislinn:
=== modified file 'tests/
--- tests/optimizat
+++ tests/optimizat
@@ -54,7 +54,7 @@
lb = project(
# Define the reduced funtional
- reduced_functional = ReducedFunction
+ reduced_functional = ReducedFunction
# Run the optimisation problem with gradient tests and L-BFGS-B
u_opt = minimize(
[pef@aislinn:
Traceback (most recent call last):
File "optimization.py", line 60, in <module>
u_opt = minimize(
File "/home/
return optimization_
File "/home/
m_global = get_global(m)
File "/home/
raise TypeError, 'Unknown parameter type %s.' % str(type(m))
TypeError: Unknown parameter type <type 'str'>.
Making the change adjoint/ parameter. py' adjoint/ parameter. py 2012-08-01 14:43:02 +0000 adjoint/ parameter. py 2012-10-08 14:54:16 +0000
=== modified file 'dolfin_
--- dolfin_
+++ dolfin_
@@ -60,7 +60,10 @@
return None
def data(self): self.coeff, str): adjointer. get_variable_ value(self. var).data
- return self.coeff
+ if isinstance(
+ return adjglobals.
+ else:
+ return self.coeff
class ScalarParameter (DolfinAdjointP arameter) :
'''This Parameter is used as input to the tangent linear model (TLM)
helps -- it now gets a little further:
RUNNING THE L-BFGS-B CODE
* * *
Machine precision = 2.220D-16
N = 21 M = 10
At X0 1 variables are exactly at the bounds reduced_ functional, algorithm = 'scipy.l_bfgs_b', pgtol=1e-6, factr=1e5, bounds = (lb, 1), iprint = 1) pef/src/ dolfin- adjoint/ dolfin_ adjoint/ optimization. py", line 236, in minimize algorithms_ dict[algorithm] [1](reduced_ func_array, reduced_ func_deriv_ array, [p.data() for p in reduced_ func.parameter] , **kwargs) pef/src/ dolfin- adjoint/ dolfin_ adjoint/ optimization. py", line 115, in minimize_ scipy_fmin_ l_bfgs_ b python2. 7/dist- packages/ scipy/optimize/ lbfgsb. py", line 196, in fmin_l_bfgs_b python2. 7/dist- packages/ scipy/optimize/ lbfgsb. py", line 152, in func_and_grad pef/src/ dolfin- adjoint/ dolfin_ adjoint/ optimization. py", line 213, in reduced_ func_deriv_ array parameters[ "optimization" ]["test_ gradient_ seed"]) pef/src/ dolfin- adjoint/ dolfin_ adjoint/ utils.py" , line 455, in test_gradient_array pef/src/ dolfin- adjoint/ dolfin_ adjoint/ optimization. py", line 229, in reduced_func_array func.parameter] pef/src/ dolfin- adjoint/ dolfin_ adjoint/ parameter. py", line 64, in data adjointer. get_variable_ value(self. var).data python2. 7/dist- packages/ libadjoint/ libadjoint. py", line 1429, in get_variable_value adj_get_ variable_ value(self. adjointer, var.var, vec) python2. 7/dist- packages/ libadjoint/ libadjoint. py", line 18, in handle_error exceptions. LibadjointError NeedValue: Need a value for Velocity: 0:0:Forward, but don't have one recorded.
Traceback (most recent call last):
Traceback (most recent call last):
File "optimization.py", line 60, in <module>
u_opt = minimize(
File "/home/
return optimization_
File "/home/
mopt, f, d = fmin_l_bfgs_b(J, m_global, fprime = dJ, bounds = bounds, **kwargs)
File "/usr/lib/
f, g = func_and_grad(x)
File "/usr/lib/
g = fprime(x, *args)
File "/home/
seed = dolfin.
File "/home/
j_direct = J(x)
File "/home/
m = [p.data() for p in reduced_
File "/home/
return adjglobals.
File "/usr/lib/
clib.
File "/usr/lib/
raise exception, errstr
libadjoint.
If you apply the following patch (which you shouldn't), this fixes it: adjoint/ optimization. py' adjoint/ optimization. py 2012-09-20 15:40:13 +0000 adjoint/ optimization. py 2012-10-08 14:55:59 +0000 m)).any( ):
reduced_ func_array( m_array)
=== modified file 'dolfin_
--- dolfin_
+++ dolfin_
@@ -204,7 +204,7 @@
if (m_array != get_global(
- dJdm = utils.compute_ gradient( reduced_ func.functional , reduced_ func.parameter) gradient( reduced_ func.functional , reduced_ func.parameter, forget=False...
+ dJdm = utils.compute_