Different optimal results of SCE-UA optimization algorithm in Julia and MATLAB

I posted this question in Stackoverflow, but some people suggested me to post it here.

I rewrite the SCE-UA optimization algorithm in Julia.

However, when I run my script in Julia. The optimized value is quite different from the results in MATLAB. The global best cost in MATLAB is 2.4598e-63 while the global best cost in Julia is 8.264629809290885e-8. This means something went wrong. The objective function is z=sum(x.^2) for both scripts, and the search range and parameters for the algorithm were also the same. However, I checked, again and again, to make sure the algorithms are the same, but I could not find out why the results were quite different.

Could some please check if those two scripts were different? Please give me any suggestions, thanks a lot.

The SCE-UA code in MATLAB is written by Yarpiz, which can be found in https://github.com/smkalami/ypea110-shuffled-complex-evolution/archive/master.zip

The code in Julia is as follows

using UnPack
using Plots
using StatsBase

mutable struct Pop
    position::Vector{Float64}
    cost::Float64
end

# CCE parameters
mutable struct CCE_params
    q::Int64
    alpha::Int64
    beta::Int64
    lb::Vector{Float64}
    ub::Vector{Float64}
    obj_func
end

# SCE parameters
mutable struct SCE_params
    max_iter::Int64
    n_complex::Int64
    n_complex_pop::Int64
    dim::Int64
    lb::Vector{Float64}
    ub::Vector{Float64}
    obj_func
end

import Base.isless
isless(a::Pop, b::Pop) = isless(a.cost, b.cost)

function cost_func(x)
    return sum(x.^2)
end

function uniform_rand(lb::Array{Float64, 1}, ub::Array{Float64, 1})
    dim = length(lb)
    arr = rand(dim) .* (ub .- lb) .+ lb
    return arr
end
# SCE parameters
max_iter = 500
n_complex = 5
n_complex_pop = 10
dim = 10
lb = ones(dim) * -10
ub = ones(dim) * 10
obj_func = cost_func
n_complex_pop = max(n_complex_pop, dim+1) # Nelder-Mead Standard
sce_params = SCE_params(max_iter, n_complex, n_complex_pop, dim, lb, ub, obj_func)

# CCE parameters
cce_q = max(round(Int64, 0.5*n_complex_pop), 2)
cce_alpha = 3
cce_beta = 5

cce_params = CCE_params(cce_q, cce_alpha, cce_beta, lb, ub, obj_func)

function SCE(sce_params, cce_params)
    @unpack max_iter, n_complex, n_complex_pop, dim, lb, ub, obj_func = sce_params

    n_pop = n_complex * n_complex_pop
    I = reshape(1:n_pop, n_complex, :)

    # Step 1. Generate rand_sample
    best_costs = Vector{Float64}(undef, max_iter)

    pops = ()
    for i in 1:n_pop
        pop_position = uniform_rand(lb, ub)
        pop_cost = obj_func(pop_position)
        pop = Pop(pop_position, pop_cost)
        push!(pops, pop)
    end
    complex = Array{Pop}(undef, n_complex_pop, n_complex)

    # Step 2. Rank Points
    sort!(pops)
    best_pop = pops(1)

    # Main loop
    for iter in 1:max_iter

        # Step 3. Partion into complexes
        for j in 1:n_complex
            complex(:,j) = deepcopy(pops(I(j,:)))
            # Step 4. Evolve complex, run CCE
            complex(:,j) = CCE(complex(:,j), cce_params)
            pops(I(j,:)) = deepcopy(complex(:,j))
        end
        # Step 5. Shuffle Complexes

        sort!(pops)
        best_pop = pops(1)

        best_costs(iter) = best_pop.cost

        # Show Iteration Information
        println("Iter = ", iter)
        println("The Best Cost is: ", best_costs(iter))
    end
    best_costs
end

function rand_sample(P, q)
    L = Vector{Int64}(undef, q)
    for i in 1:q
        L(i) = sample(1:length(P), Weights(P))
        # L(i) = sample(1:sizeof(P), weights(P), 1, replace=true)
    end
    return L
end

function not_in_search_space(position, lb, ub)
    return any(position .<= lb) || any(position .>= ub)
end

function CCE(complex_pops, cce_params)
    # Step 1. Initialize
    @unpack q, alpha, beta, lb, ub, obj_func = cce_params
    n_pop = length(complex_pops)

    # Step 2. Assign weights
    P = (2*(n_pop+1-i) / (n_pop*(n_pop+1)) for i in 1:n_pop)

    # Calculate Population Range (Smallest Hypercube)
    new_lb = complex_pops(1).position
    new_ub = complex_pops(1).position
    for i in 2:n_pop
        new_lb = min.(new_lb, complex_pops(i).position)
        new_ub = max.(new_ub, complex_pops(i).position)
    end

    # CCE main loop
    for it in 1:beta
        # Step 3. Select parents
        L = rand_sample(P, q)
        B = complex_pops(L)

        # Step 4. Generate Offspring
        for k in 1:alpha
            # a) Sort population
            sorted_indexs = sortperm(B)
            sort!(B)
            L(:) = L(sorted_indexs)

            # Calculate the centroid
            g = zeros(length(lb))
            for i in 1:q-1
                g .= g .+ B(i).position
            end
            g .= g ./ (q-1)

            # b) Reflection step
            reflection = deepcopy(B(end))
            reflection.position = 2 .* g .- B(end).position # newly generated point using reflection
            if not_in_search_space(reflection.position, lb, ub)
                reflection.position = uniform_rand(new_lb, new_ub)
            end
            reflection.cost = obj_func(reflection.position)

            if reflection.cost < B(end).cost
                B(end) = deepcopy(reflection)
            else # Contraction
                contraction = deepcopy(B(end))
                contraction.position = (g .+ B(end).position) ./ 2
                contraction.cost = obj_func(contraction.position)

                if contraction.cost < B(end).cost
                    B(end) = deepcopy(contraction)
                else
                    B(end).position = uniform_rand(new_lb, new_ub)
                    B(end).cost = obj_func(B(end).position)
                end
            end

        end

        complex_pops(L) = B
    end
    return complex_pops
end
best_costs = SCE(sce_params, cce_params)

plot(best_costs, yaxis=:log, label = "cost")

# savefig("Julia.png")

Both scripts could run normally with just one click.

The result of SCE-UA algorithm from MATLAB is as follows
enter image description here

The result from Julia is as follows

enter image description here

Obviously, the convergence curves are quite different. The result from Julia is not accurate enough, but I don’t know why.