Exercises

Practice what we’ve learned so far.

Exercise 1: Using both algorithms

Maximize the function f(x,y)=x2y2+6x+4yf(x, y) = -x^2-y^2+6x+4y. Use both genetic algorithms and PSO. Compare the results of both approaches.

Press + to interact
import numpy as np
#######################
## GENETIC ALGORITHM ##
#######################
# mutation operation
def mutation(s, Pm):
# Remove the following line and add the code for mutations
pass
# crossover operation
def crossover(p1, p2, Pc):
# Remove the following line and add the code for crossovers
pass
# score function
def score(s):
# Remove the following line and add the code for the score
pass
# generate the initial population with length N
def generate_initial_population(N):
# Remove the following line and add the code to generate a random population
pass
# determine the best solution
def best_solution(P, score_func):
scores = list(map(score_func, P))
index = np.argmax(scores)
return P[index]
def select_parent(P, score_func):
scores = list(map(score_func, P))
score_sum = sum(scores)
probs = list(map(lambda s: s/score_sum, scores))
# random.choice only receive 1-dimensional arrays
return P[np.random.choice(range(len(P)), p=probs)]
def genetic_algorithm(Pm, Pc, N, max_iter):
P = generate_initial_population(N)
# placeholders for the best result
best_solution, best_score = 0, -1e9
# stop criterion is max_iter reached
for iteration in range(max_iter):
# get best solution in this generation
best_candidate = best_solution_in_population(P, score)
# keep the best solution found
if score(best_candidate) > best_score:
best_solution = best_candidate
best_score = score(best_candidate)
newP = []
for _ in range(N):
p1 = select_parent(P, score)
p2 = select_parent(P, score)
# produce new child with prob Pc
c = crossover(p1, p2, Pc)
# mutate c with probability Pm
c = mutation(c, Pm)
newP.append(c)
# make a new generation
P = newP
# return the best solution and its score
return best_solution, best_score
#########
## PSO ##
#########
class Particle:
def __init__(self, initial_value, initial_velocity, score_func):
self.value = initial_value
self.v = initial_velocity
self.best = initial_value
self.score = score_func
self.best_score = score_func(self)
def move(self):
self.value += self.v
new_score = self.score(self)
if new_score < self.best_score:
self.best_score = new_score
self.best = self.value
def update_velocity(self, global_best, w, wp, wg):
self.v = w*self.v \
+ wp*(self.best - self.value) \
+ wg*(global_best - self.value)
def score(s):
# Remove the following line and add the code for the score
pass
def best_solution_in_population(P, score_func):
scores = list(map(score_func, P))
index = np.argmax(scores)
return P[index]
def generate_initial_population(N):
# Remove the following line and add the code to generate initial population
pass
def pso(N, w, wp, wg, max_iter):
P = generate_initial_population(N)
best_solution, best_score = 0, -1e9
for iteration in range(max_iter):
best_candidate = best_solution_in_population(P, score)
best_pop_score = score(best_candidate)
if best_pop_score > best_score:
best_solution = best_candidate.value
best_score = best_pop_score
for particle in P:
particle.update_velocity(best_solution, w, wp, wg)
particle.move()
return best_solution, best_score
# Experiment with different parameters and add logs to see what happens in the executions
genetic_solution, genetic_sol_score = genetic_algorithm(0.8, 0.9, 200, 50)
pso_solution, pso_sol_score = pso(500, 0.3, 0.6, 0.9, 20)

Exercise 2: Assigning tasks

We have a list of tasks t1,t2,...,tnt_1, t_2, ..., t_n ...