169 lines
6.0 KiB
Python
169 lines
6.0 KiB
Python
import numpy as np
|
|
|
|
INPUT_FILE = "data/medium.in"
|
|
POPULATION = 50
|
|
MUTATION_AMOUNT = 200#1000000
|
|
ITERATIONS = 100
|
|
|
|
def mutation(entries):
|
|
|
|
def mutation_entry(entry, args):
|
|
HEIGHT, WIDTH, MIN_PIECES, MAX_SIZE, _ = args
|
|
|
|
def expand_hztl(entry, x, y, direction):
|
|
val = entry[y, x]
|
|
ynew = y+direction
|
|
oval = entry[ynew, x]
|
|
# left
|
|
nx = x
|
|
while nx > 0 and val == entry[y, nx-1]:
|
|
entry[ynew, nx] = val
|
|
nx -= 1
|
|
nval = entry[ynew, nx] if nx != x else oval
|
|
entry[ynew, nx] = val
|
|
if nval != 0:
|
|
while nx > 0 and nval == entry[ynew, nx-1]:
|
|
nx -= 1
|
|
entry[ynew, nx] = 0
|
|
# right
|
|
nx = x
|
|
while nx < WIDTH-1 and val == entry[y, nx+1]:
|
|
entry[ynew, nx] = val
|
|
nx += 1
|
|
nval = entry[ynew, nx] if nx != x else oval
|
|
entry[ynew, nx] = val
|
|
if nval != 0:
|
|
while nx < WIDTH-1 and nval == entry[ynew, nx+1]:
|
|
nx += 1
|
|
entry[ynew, nx] = 0
|
|
|
|
def expand_vert(entry, x, y, direction):
|
|
val = entry[y, x]
|
|
xnew = x+direction
|
|
oval = entry[y, xnew]
|
|
# left
|
|
ny = y
|
|
while ny > 0 and val == entry[ny-1, x]:
|
|
entry[ny, xnew] = val
|
|
ny -= 1
|
|
nval = entry[ny, xnew] if ny != y else oval
|
|
entry[ny, xnew] = val
|
|
if nval != 0:
|
|
while ny > 0 and nval == entry[ny-1, xnew]:
|
|
ny -= 1
|
|
entry[ny, xnew] = 0
|
|
# right
|
|
ny = y
|
|
while ny < HEIGHT-1 and val == entry[ny+1, x]:
|
|
entry[ny, xnew] = val
|
|
ny += 1
|
|
nval = entry[ny, xnew] if ny != y else oval
|
|
entry[ny, xnew] = val
|
|
if nval != 0:
|
|
while ny < HEIGHT-1 and nval == entry[ny+1, xnew]:
|
|
ny += 1
|
|
entry[ny, xnew] = 0
|
|
|
|
for _ in range(np.random.random_integers(MUTATION_AMOUNT)):
|
|
y = np.random.random_integers(HEIGHT)-1
|
|
x = np.random.random_integers(WIDTH)-1
|
|
if entry[y, x] == 0: # create new cluser
|
|
entry[y, x] = np.amax(entry)+1
|
|
z = np.random.random()
|
|
if z < 0.25: # expand to top
|
|
if y > 0:
|
|
expand_hztl(entry, x, y, -1)
|
|
elif z < 0.5: # expand to left
|
|
if x > 0:
|
|
expand_vert(entry, x, y, -1)
|
|
elif z < 0.75: # expand to bottom
|
|
if y < HEIGHT-1:
|
|
expand_hztl(entry, x, y, 1)
|
|
else: # expand to right
|
|
if x < WIDTH-1:
|
|
expand_vert(entry, x, y, 1)
|
|
return entry
|
|
|
|
sub_arr, args = entries
|
|
for idx in range(sub_arr.shape[0]):
|
|
sub_arr[idx,:,:] = mutation_entry(sub_arr[idx,:,:], args)
|
|
return sub_arr
|
|
|
|
def get_fitnesses(entries):
|
|
def get_fitness(entry, args):
|
|
HEIGHT, WIDTH, MIN_PIECES, MAX_SIZE, (data, data_inv) = args
|
|
|
|
def get_fitness_per_cluster(cluster):
|
|
fit = 0
|
|
size = cluster[0]
|
|
a = 0
|
|
b = 0
|
|
if size <= MAX_SIZE and cluster[1] >= MIN_PIECES:
|
|
a = size
|
|
size_diff = MAX_SIZE-size
|
|
if size_diff < 0:
|
|
b = MAX_SIZE/8+size_diff
|
|
elif size_diff > 0:
|
|
b = np.power(3, -size_diff)*MAX_SIZE/8
|
|
else:
|
|
b = a+1
|
|
return [a, b] # a = clean score ; b = fitness score
|
|
|
|
mname, mcount = np.unique(entry*data, return_counts=True)
|
|
mdict = dict(zip(mname, mcount))
|
|
tname, tcount = np.unique(entry*data_inv, return_counts=True)
|
|
tdict = dict(zip(tname, tcount))
|
|
c = np.array([[mdict.get(key) or 0, tdict.get(key) or 0] for key in (mdict.keys() | tdict.keys()) if key != 0])
|
|
c = np.vstack((np.sum(c, axis=1), np.min(c, axis=1))).T
|
|
return sum(np.apply_along_axis(get_fitness_per_cluster, 1, c))
|
|
|
|
sub_arr, args = entries
|
|
fitnesses = np.zeros((sub_arr.shape[0], 2))
|
|
for idx in range(sub_arr.shape[0]):
|
|
x = get_fitness(sub_arr[idx,:,:], args)
|
|
#print(x)
|
|
fitnesses[idx] = get_fitness(sub_arr[idx,:,:], args)
|
|
return fitnesses
|
|
|
|
if __name__ == '__main__':
|
|
import numpy as np
|
|
import multiprocessing
|
|
|
|
def thread_map(func, array, data=None):
|
|
chunks = [(sub_arr, (HEIGHT, WIDTH, MIN_PIECES, MAX_SIZE, data)) for sub_arr in np.array_split(array, min(multiprocessing.cpu_count(), len(array)))]
|
|
pool = multiprocessing.Pool()
|
|
individual_results = pool.map(func, chunks)
|
|
pool.close()
|
|
pool.join()
|
|
return np.concatenate(individual_results)
|
|
|
|
data = [line for line in open(INPUT_FILE)]
|
|
params = list(map(int, data[0].split(" ")))
|
|
HEIGHT = params[0]
|
|
WIDTH = params[1]
|
|
MIN_PIECES = params[2]
|
|
MAX_SIZE = params[3]
|
|
data = [[0 if x=="T" else 1 for x in line] for line in data[1:]]
|
|
data = np.array(data)[:, :-1]
|
|
data_inv = 1-data
|
|
clusters = np.arange(HEIGHT*WIDTH).reshape((1, HEIGHT, WIDTH))
|
|
clusters = np.repeat(clusters, POPULATION, axis=0)+1
|
|
|
|
for iteration in range(ITERATIONS):
|
|
# mutation
|
|
print(iteration, "Mutation")
|
|
clusters = thread_map(mutation, clusters)
|
|
# get fitness
|
|
print(iteration, "Get Fitness")
|
|
fitnesses = thread_map(get_fitnesses, clusters, (data, data_inv))
|
|
max_idx = np.argmax(fitnesses[:, 0])
|
|
print(clusters[max_idx])
|
|
print(iteration, max(fitnesses[:, 0]))
|
|
# selection
|
|
print(iteration, "Selection")
|
|
z_exp = [np.exp(i) for i in fitnesses[:, 1]]
|
|
sum_z_exp = sum(z_exp)
|
|
softmax = [i / sum_z_exp for i in z_exp]
|
|
idx = np.random.choice(POPULATION, POPULATION, p=softmax)
|
|
clusters = clusters[idx, :, :]
|