Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MA experiments, Genetic weights, MomentumTimeline updates #47

Merged
merged 20 commits into from
Aug 23, 2023
Merged
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
97add4c
Fixed MomentumTimeline giving very bad results comparing to JustInTim…
StannisMod Jul 6, 2023
7fabb60
Algorithms efficiency for genetic comparison
StannisMod Jul 6, 2023
e21ca99
Merge branch 'main' into feature/ma_experiments
StannisMod Jul 7, 2023
4b4bd4f
Made MA experiments with heuristic algorithms
StannisMod Jul 7, 2023
ce55f18
Integrated genetic first population weighting
StannisMod Jul 10, 2023
6d4c662
Probably fixed problems with negative resource deliveries
StannisMod Jul 10, 2023
f02dc46
Refactored Genetic and it's tests
StannisMod Jul 10, 2023
16bc888
Added genetic initialization experiment
StannisMod Jul 10, 2023
ba7e0bb
Added genetic initialization experiment
StannisMod Jul 10, 2023
6e9c227
Merge remote-tracking branch 'origin/feature/ma_experiments' into fea…
StannisMod Jul 10, 2023
68bd9c2
Added init module
StannisMod Jul 11, 2023
82cf88a
Merge branch 'main' into feature/ma_experiments
StannisMod Jul 11, 2023
f0c39b8
Made experiments
StannisMod Jul 11, 2023
4299abb
Made experiments
StannisMod Jul 28, 2023
ffbfbff
Merge branch 'main' into feature/ma_experiments
StannisMod Aug 23, 2023
a75bbf5
Fixed merge issues
StannisMod Aug 23, 2023
d354ab0
Fixed merge issues
StannisMod Aug 23, 2023
7a477d8
Merge remote-tracking branch 'origin/feature/ma_experiments' into fea…
StannisMod Aug 23, 2023
d9e61b3
Merge remote-tracking branch 'origin/feature/ma_experiments' into fea…
StannisMod Aug 23, 2023
3f7d743
Merge remote-tracking branch 'origin/feature/ma_experiments' into fea…
StannisMod Aug 23, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 7 additions & 9 deletions examples/field_development/field_development_scheduling.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,16 @@
from matplotlib import pyplot as plt

from sampo.utilities.schedule import remove_service_tasks
import warnings

from sampo.utilities.visualization.base import VisualizationMode
from matplotlib import pyplot as plt

from sampo.generator import get_contractor_by_wg # Warning!!! sampo~=0.1.1.77
from sampo.generator.environment.contractor_by_wg import get_contractor_by_wg
from sampo.scheduler.heft.base import HEFTScheduler
from sampo.schemas.graph import WorkGraph
from sampo.schemas.contractor import Contractor
from sampo.schemas.graph import WorkGraph
from sampo.structurator.base import graph_restructuring
from sampo.utilities.visualization.work_graph import work_graph_fig
from sampo.utilities.schedule import remove_service_tasks
from sampo.utilities.visualization.base import VisualizationMode
from sampo.utilities.visualization.schedule import schedule_gant_chart_fig

import warnings
from sampo.utilities.visualization.work_graph import work_graph_fig

warnings.filterwarnings("ignore") # for matplotlib warning suppression

Expand Down
4 changes: 2 additions & 2 deletions experiments/algorithms_2_multi_agency.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def log(message: str, logfile: IO):
with open(f'algorithms_2_multi_agency_comparison.txt', 'w') as logfile:
logger = partial(log, logfile=logfile)

bg = generate_block_graph(SyntheticBlockGraphType.RANDOM, 10, [1, 1, 1], lambda x: (30, 50), 0.5,
bg = generate_block_graph(SyntheticBlockGraphType.RANDOM, 10, [0, 1, 1], lambda x: (None, 50), 0.5,
rand, obstruction_getter, 2, [3, 4], [3, 4], logger=logger)
conjuncted = bg.to_work_graph()

Expand All @@ -57,4 +57,4 @@ def log(message: str, logfile: IO):

schedule = best_algo.schedule(conjuncted, contractors)

print(f'Best algo res res: {schedule.execution_time}')
print(f'Best algo res: {schedule.execution_time}')
69 changes: 69 additions & 0 deletions experiments/algorithms_efficiency.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
from functools import partial
from random import Random
from typing import IO

from pathos.multiprocessing import ProcessingPool

from sampo.generator import SimpleSynthetic
from sampo.scheduler.heft.base import HEFTBetweenScheduler
from sampo.scheduler.heft.base import HEFTScheduler
from sampo.scheduler.multi_agency.block_generator import SyntheticBlockGraphType, generate_block_graph
from sampo.scheduler.multi_agency.multi_agency import Agent, Manager
from sampo.scheduler.topological.base import TopologicalScheduler

r_seed = Random().randint(0, 100000)
p_rand = SimpleSynthetic(rand=r_seed)
rand = Random(r_seed)


def obstruction_getter(i: int):
return None
# return OneInsertObstruction.from_static_graph(0.5, rand, p_rand.work_graph(SyntheticGraphType.SEQUENTIAL, 10))


def log(message: str, logfile: IO):
# print(message)
logfile.write(message + '\n')


def run_iteration(args):
iteration = args[0]
schedulers = [HEFTScheduler(), HEFTBetweenScheduler(), TopologicalScheduler()]

blocks_received = {str(scheduler): 0 for scheduler in schedulers}
for i in range(1, 5):
with open(f'algorithms_efficiency_block_size_{50 * i}_iteration_{iteration}.txt', 'w') as logfile:
logger = partial(log, logfile=logfile)
logger(f'block_size ~ {50 * i}')

for graph_type in SyntheticBlockGraphType:
contractors = [p_rand.contractor(10) for _ in range(len(schedulers))]

agents = [Agent(f'Agent {i}', schedulers[i % len(schedulers)], [contractor])
for i, contractor in enumerate(contractors)]
manager = Manager(agents)

bg = generate_block_graph(graph_type, 10, [1, 1, 1], lambda x: (None, 50 * i), 0.5,
rand, obstruction_getter, 2, [3, 4], [3, 4], logger=logger)

scheduled_blocks = manager.manage_blocks(bg, logger=logger)

# aggregate statistics
for sblock in scheduled_blocks.values():
blocks_received[str(sblock.agent.scheduler)] += 1

# downtimes
logger(' '.join([str(agent.downtime.value) for agent in agents]))

logger('')

print('Received blocks statistics:')
for scheduler, blocks in blocks_received.items():
print(f'{scheduler} {blocks}')


if __name__ == '__main__':
pool = ProcessingPool(10)
args = [[i] for i in range(10)]

pool.map(run_iteration, args)
2 changes: 1 addition & 1 deletion experiments/genetic_2_multi_agency.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def obstruction_getter(i: int):
for i, contractor in enumerate(contractors)]
manager = Manager(agents)

bg = generate_block_graph(SyntheticBlockGraphType.Sequential, 100, [1, 0, 0], lambda x: (30, 50), 0.2,
bg = generate_block_graph(SyntheticBlockGraphType.SEQUENTIAL, 100, [1, 0, 0], lambda x: (None, 50), 0.2,
rand, obstruction_getter, 2, [3, 4] * 1, [3, 4] * 1, logger=print)

conjuncted = bg.to_work_graph()
Expand Down
61 changes: 61 additions & 0 deletions experiments/genetic_init.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
from pathos.multiprocessing import ProcessingPool

from sampo.generator.base import SimpleSynthetic
from sampo.generator.environment.contractor_by_wg import get_contractor_by_wg
from sampo.scheduler.genetic.base import GeneticScheduler
from sampo.schemas.time import Time


def run_test(args) -> list[tuple[Time, Time]]:
graph_size, iterations = args

result = []
for i in range(iterations):
ss = SimpleSynthetic()
wg = ss.work_graph(top_border=graph_size)
contractors = [get_contractor_by_wg(wg)]

baseline_genetic = GeneticScheduler(mutate_order=1.0,
mutate_resources=1.0,
size_selection=200,
size_of_population=200)

optimized_genetic = GeneticScheduler(mutate_order=1.0,
mutate_resources=1.0,
size_selection=200,
size_of_population=200)
optimized_genetic.set_weights([14, 11, 1, 1, 1, 1, 10])

baseline_result = baseline_genetic.schedule(wg, contractors)
my_result = optimized_genetic.schedule(wg, contractors)

result.append((baseline_result.execution_time, my_result.execution_time))

return result


if __name__ == '__main__':
num_iterations = 50

sizes = [200 * i for i in range(1, num_iterations + 1)]
# iterations = [5 - i for i in range(1, num_iterations + 1)]
iterations = [10 for i in range(1, num_iterations + 1)]

with ProcessingPool(10) as p:
results_by_size = p.map(run_test, zip(sizes, iterations))

print('-------------------------------------------------------------')
print('-------------------------| Results |-------------------------')
print('-------------------------------------------------------------')
with open('genetic_init_res.txt', 'w') as f:
for graph_size, result_list in zip(sizes, results_by_size):
global_ratio = 0
for baseline_result, my_result in result_list:
ratio = baseline_result / my_result
global_ratio += ratio
global_ratio /= len(result_list)

res_string = f'Size: {graph_size}, upgrade ratio: {global_ratio}'
print(res_string)
f.write(res_string)
f.write('\n')
116 changes: 61 additions & 55 deletions experiments/modular_examples/comparison_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,10 @@


# parsing raw data
def parse_raw_data(mode_index: int, iterations: int, algos: list[str], algo_labels: list[str]):
def parse_raw_data(block_size: int, iterations: int, algos: list[str], algo_labels: list[str]):
for launch_index in range(iterations):
with open(f'algorithms_comparison_block_size_{mode_index}_{launch_index}.txt', 'r') as f:
# with open(f'algorithms_comparison_block_size_{mode_index}_{launch_index}.txt', 'r') as f:
with open(f'algorithms_efficiency_block_size_{block_size}_iteration_{launch_index}.txt', 'r') as f:
mode = None
finished = True
bg_info_read = False
Expand Down Expand Up @@ -72,7 +73,7 @@ def parse_raw_data(mode_index: int, iterations: int, algos: list[str], algo_labe
bg_info = line.split(' ')
bg_info_read = True
continue
if i == 10 or (i == 7 and bg_info[0] == 'Queues'):
if i == 10 or (i == 7 and bg_info[0] == 'QUEUES'):
finished = True
downtimes = [int(downtime) for downtime in line.split(' ')]
for algo_ind, algo in enumerate(algos):
Expand Down Expand Up @@ -199,11 +200,11 @@ def compare_algos_block_type(title: str, compare_dict: Dict, algo_labels: list[s

# boxplot block type comparison
def boxplot_compare_algos_block_type(title: str, compare_dict: Dict, algo_labels: list[str]):
fig = plt.figure(figsize=(14, 10))
fig = plt.figure(figsize=(15, 5))
fig.suptitle(title, fontsize=32)
for i, freq_by_step in enumerate(compare_dict.items()):
values = np.array([[f for f in freq.values()] for freq in freq_by_step[1].values()])
ax = fig.add_subplot(33 * 10 + i + 1)
ax = fig.add_subplot(13 * 10 + i + 1)
ax.set(title=algo_labels[i])
ax.set_xlabel('Block type')
ax.set_ylabel('Count')
Expand All @@ -220,13 +221,11 @@ def boxplot_compare_algos_block_type(title: str, compare_dict: Dict, algo_labels


# all algorithms
# algos = ['HEFTAddEnd', 'HEFTAddBetween', 'Topological',
# 'Genetic[generations=50,size_selection=None,mutate_order=None,mutate_resources=None]']
# algo_labels = ['HEFTAddEnd', 'HEFTAddBetween', 'Topological',
# 'Genetic[\ngenerations=50,size_selection=None,\nmutate_order=None,mutate_resources=None]']
#
algos = ['HEFTAddEnd', 'HEFTAddBetween', 'Topological']
algo_labels = ['HEFTAddEnd', 'HEFTAddBetween', 'Topological']

# parse_raw_data(0, 10, algos, algo_labels)
#

# compare_algos_general('Algorithms block receive count - average', global_algo_frequencies, algos, algo_labels)
# compare_algos_general('Algorithms downtimes - average', global_algo_downtimes, algos, algo_labels)
#
Expand All @@ -238,53 +237,60 @@ def boxplot_compare_algos_block_type(title: str, compare_dict: Dict, algo_labels
# global_algo_block_type_frequencies, algo_labels)

# genetics
algos = ['Genetic[generations=50,size_selection=50,mutate_order=0.5,mutate_resources=0.5]',
'Genetic[generations=50,size_selection=100,mutate_order=0.25,mutate_resources=0.5]',
'Genetic[generations=50,size_selection=100,mutate_order=0.5,mutate_resources=0.75]',
'Genetic[generations=50,size_selection=100,mutate_order=0.75,mutate_resources=0.75]',
'Genetic[generations=50,size_selection=50,mutate_order=0.9,mutate_resources=0.9]',
'Genetic[generations=50,size_selection=100,mutate_order=0.5,mutate_resources=0.5]',
'Genetic[generations=50,size_selection=200,mutate_order=0.25,mutate_resources=0.5]',
'Genetic[generations=50,size_selection=50,mutate_order=0.5,mutate_resources=0.75]',
'Genetic[generations=50,size_selection=100,mutate_order=0.75,mutate_resources=0.75]',
'Genetic[generations=50,size_selection=50,mutate_order=0.5,mutate_resources=0.9]']
# algo_labels = ['first_population=100,\nsize_selection=50,\nmutate_order=0.5,\nmutate_resources=0.5',
# 'first_population=100,\nsize_selection=100,\nmutate_order=0.25,\nmutate_resources=0.5',
# 'first_population=100,\nsize_selection=100,\nmutate_order=0.5,\nmutate_resources=0.75',
# 'first_population=100,\nsize_selection=100,\nmutate_order=0.75,\nmutate_resources=0.75',
# 'first_population=100,\nsize_selection=50,\nmutate_order=0.9\n,mutate_resources=0.9',
# 'first_population=500,\nsize_selection=100,\nmutate_order=0.5,\nmutate_resources=0.5]',
# 'first_population=500,\nsize_selection=200,\nmutate_order=0.25,\nmutate_resources=0.5]',
# 'first_population=500,\nsize_selection=50,\nmutate_order=0.5,\nmutate_resources=0.75]',
# 'first_population=500,\nsize_selection=100,\nmutate_order=0.75,\nmutate_resources=0.75]',
# 'first_population=500,\nsize_selection=50,\nmutate_order=0.5,\nmutate_resources=0.9]']
algo_labels = [str(i) for i in range(1, 10 + 1)]

# clear previous data
global_algo_frequencies.clear()
global_algo_downtimes.clear()
global_algo_bg_type_frequencies.clear()
global_algo_bg_type_downtimes.clear()
global_algo_block_type_frequencies.clear()
global_algo_block_type_downtimes.clear()
launch_algo_frequencies.clear()
launch_algo_downtimes.clear()
launch_algo_bg_type_frequencies.clear()
launch_algo_bg_type_downtimes.clear()
launch_algo_block_type_frequencies.clear()
launch_algo_block_type_downtimes.clear()

parse_raw_data(1, 1, algos, algo_labels)

boxplot_compare_algos_general('Received blocks - genetics comparison', launch_algo_frequencies, algos, algo_labels)
boxplot_compare_algos_general('Downtimes blocks - genetics comparison', launch_algo_downtimes, algos, algo_labels)

boxplot_compare_algos_bg_type('Received blocks - genetics with graph types comparison',
# algos = ['Genetic[generations=50,size_selection=50,mutate_order=0.5,mutate_resources=0.5]',
# 'Genetic[generations=50,size_selection=100,mutate_order=0.25,mutate_resources=0.5]',
# 'Genetic[generations=50,size_selection=100,mutate_order=0.5,mutate_resources=0.75]',
# 'Genetic[generations=50,size_selection=100,mutate_order=0.75,mutate_resources=0.75]',
# 'Genetic[generations=50,size_selection=50,mutate_order=0.9,mutate_resources=0.9]',
# 'Genetic[generations=50,size_selection=100,mutate_order=0.5,mutate_resources=0.5]',
# 'Genetic[generations=50,size_selection=200,mutate_order=0.25,mutate_resources=0.5]',
# 'Genetic[generations=50,size_selection=50,mutate_order=0.5,mutate_resources=0.75]',
# 'Genetic[generations=50,size_selection=100,mutate_order=0.75,mutate_resources=0.75]',
# 'Genetic[generations=50,size_selection=50,mutate_order=0.5,mutate_resources=0.9]']
# # algo_labels = ['first_population=100,\nsize_selection=50,\nmutate_order=0.5,\nmutate_resources=0.5',
# # 'first_population=100,\nsize_selection=100,\nmutate_order=0.25,\nmutate_resources=0.5',
# # 'first_population=100,\nsize_selection=100,\nmutate_order=0.5,\nmutate_resources=0.75',
# # 'first_population=100,\nsize_selection=100,\nmutate_order=0.75,\nmutate_resources=0.75',
# # 'first_population=100,\nsize_selection=50,\nmutate_order=0.9\n,mutate_resources=0.9',
# # 'first_population=500,\nsize_selection=100,\nmutate_order=0.5,\nmutate_resources=0.5]',
# # 'first_population=500,\nsize_selection=200,\nmutate_order=0.25,\nmutate_resources=0.5]',
# # 'first_population=500,\nsize_selection=50,\nmutate_order=0.5,\nmutate_resources=0.75]',
# # 'first_population=500,\nsize_selection=100,\nmutate_order=0.75,\nmutate_resources=0.75]',
# # 'first_population=500,\nsize_selection=50,\nmutate_order=0.5,\nmutate_resources=0.9]']
# algo_labels = [str(i) for i in range(1, 10 + 1)]
#
# # clear previous data
# global_algo_frequencies.clear()
# global_algo_downtimes.clear()
# global_algo_bg_type_frequencies.clear()
# global_algo_bg_type_downtimes.clear()
# global_algo_block_type_frequencies.clear()
# global_algo_block_type_downtimes.clear()
# launch_algo_frequencies.clear()
# launch_algo_downtimes.clear()
# launch_algo_bg_type_frequencies.clear()
# launch_algo_bg_type_downtimes.clear()
# launch_algo_block_type_frequencies.clear()
# launch_algo_block_type_downtimes.clear()
#
parse_raw_data(200, 10, algos, algo_labels)

comparison_class = 'algorithms'

boxplot_compare_algos_general(f'Received blocks - {comparison_class} comparison', launch_algo_frequencies, algos, algo_labels)
boxplot_compare_algos_general(f'Downtimes blocks - {comparison_class} comparison', launch_algo_downtimes, algos, algo_labels)

boxplot_compare_algos_bg_type(f'Received blocks - {comparison_class} with graph types comparison',
launch_algo_bg_type_frequencies, algo_labels)
boxplot_compare_algos_bg_type('Downtimes - genetics with graph types comparison',
boxplot_compare_algos_bg_type(f'Downtimes - {comparison_class} with graph types comparison',
launch_algo_bg_type_downtimes, algo_labels)

boxplot_compare_algos_block_type('Received blocks - genetics with block types comparison',
for v in launch_algo_block_type_frequencies.values():
for vv in v.values():
for block_type in ['GENERAL', 'SEQUENTIAL', 'PARALLEL']:
temp = vv[block_type]

boxplot_compare_algos_block_type(f'Received blocks - {comparison_class} with block types comparison',
launch_algo_block_type_frequencies, algo_labels)


Loading