-
Notifications
You must be signed in to change notification settings - Fork 0
/
optim_funcs.f90
73 lines (55 loc) · 2.13 KB
/
optim_funcs.f90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
! Funcoes para o SGRA
! f: function (x), funcao a ser otimizada - arquivo function.f90
! phi, dimension(q): function(x), restricoes - arquivo constraints.f90
module optim_funcs
implicit none
common /sgra/ n, q, maxiter, e1, e2, theta2, theta3
integer :: n, q, maxiter
double precision :: e1, e2, theta2, theta3
contains
include 'function.f90'
include 'constraints.f90'
! Gradiente da funcao
function grad_f(x)
double precision, intent(in) :: x(n)
double precision :: grad_f(n)
double precision :: x1(n), x2(n)
integer :: i
do i = 1,n
x1 = x
x2 = x
x1(i) = x(i) - e2
x2(i) = x(i) + e2
grad_f(i) = (f(x2) - f(x1))/(2*e2)
end do
end function
! Gradiente das restricoes
function grad_phi(x)
double precision, intent(in) :: x(n)
double precision :: grad_phi(n,q)
double precision :: x1(n), x2(n), y1(q), y2(q)
integer :: i, j
do i = 1,n
x1 = x
x2 = x
x1(i) = x(i) - e2
x2(i) = x(i) + e2
y1 = phi(x1)
y2 = phi(x2)
do j = 1,q
grad_phi(i,j) = (y2(j) - y1(j))/(2*e2)
end do
end do
end function
! Funcao aumentada para calculo do passo do gradiente
double precision function f_aug(x,lambda)
double precision, intent(in) :: x(n), lambda(q)
f_aug = f(x) + dot_product(lambda,phi(x))
end function
! Gradiente da funcao aumentada
function grad_f_aug(x,lambda)
double precision, intent(in) :: x(n), lambda(q)
double precision :: grad_f_aug(n)
grad_f_aug = grad_f(x) + matmul(grad_phi(x),lambda)
end function
end module