From 42e05ec58fb229fb9942b3127d31ab1b265bd147 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Sat, 5 Oct 2019 15:23:35 +0200 Subject: [PATCH 01/44] Add drea to stp solvers --- stn/config/config.py | 1 + 1 file changed, 1 insertion(+) diff --git a/stn/config/config.py b/stn/config/config.py index 6991859..40b02d7 100644 --- a/stn/config/config.py +++ b/stn/config/config.py @@ -145,4 +145,5 @@ def fpc_algorithm(stn): stp_solver_factory = STPSolverFactory() stp_solver_factory.register_solver('fpc', FullPathConsistency) stp_solver_factory.register_solver('srea', StaticRobustExecution) +stp_solver_factory.register_solver('drea', StaticRobustExecution) stp_solver_factory.register_solver('dsc_lp', DegreeStongControllability) \ No newline at end of file From fef2213a4a214504469bfcd52c164c4105c5ca20 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Sun, 20 Oct 2019 11:59:49 +0200 Subject: [PATCH 02/44] Use G.nodes instead of the deprecated G.node G.node was removed in networkx 2.4 --- stn/pstn/pstn.py | 8 ++++---- stn/stn.py | 16 ++++++++-------- stn/stnu/stnu.py | 8 ++++---- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index 591be60..c279ccd 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -48,7 +48,7 @@ def __str__(self): if self.has_edge(j, i) and i < j: # Constraints with the zero timepoint if i == 0: - timepoint = Node.from_dict(self.node[j]['data']) + timepoint = Node.from_dict(self.nodes[j]['data']) lower_bound = -self[j][i]['weight'] upper_bound = self[i][j]['weight'] to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound) @@ -139,15 +139,15 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.node[i]['data']['node_type'] == "navigation": + if self.nodes[i]['data']['node_type'] == "navigation": distribution = self.get_navigation_distribution(i, j) self.add_constraint(i, j, distribution=distribution) - elif self.node[i]['data']['node_type'] == "start": + elif self.nodes[i]['data']['node_type'] == "start": distribution = self.get_task_distribution(task) self.add_constraint(i, j, distribution=distribution) - elif self.node[i]['data']['node_type'] == "finish": + elif self.nodes[i]['data']['node_type'] == "finish": # wait time between finish of one task and start of the next one. Fixed to [0, inf] self.add_constraint(i, j) diff --git a/stn/stn.py b/stn/stn.py index 3420b75..8b7afe1 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -37,7 +37,7 @@ def __str__(self): if self.has_edge(j, i) and i < j: # Constraints with the zero timepoint if i == 0: - timepoint = Node.from_dict(self.node[j]['data']) + timepoint = Node.from_dict(self.nodes[j]['data']) lower_bound = -self[j][i]['weight'] upper_bound = self[i][j]['weight'] to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound) @@ -206,15 +206,15 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.node[i]['data']['node_type'] == "navigation": + if self.nodes[i]['data']['node_type'] == "navigation": duration = self.get_navigation_duration(i, j) self.add_constraint(i, j, duration) - elif self.node[i]['data']['node_type'] == "start": + elif self.nodes[i]['data']['node_type'] == "start": duration = self.get_task_duration(task) self.add_constraint(i, j, duration) - elif self.node[i]['data']['node_type'] == "finish": + elif self.nodes[i]['data']['node_type'] == "finish": # wait time between finish of one task and start of the next one. Fixed to [0, inf] self.add_constraint(i, j) @@ -290,7 +290,7 @@ def remove_task(self, position=1): self.logger.debug("Constraints: %s", constraints) for (i, j) in constraints: - if self.node[i]['data']['node_type'] == "finish": + if self.nodes[i]['data']['node_type'] == "finish": # wait time between finish of one task and start of the next one self.add_constraint(i, j) @@ -302,7 +302,7 @@ def get_tasks(self): """ tasks = list() for i in self.nodes(): - timepoint = Node.from_dict(self.node[i]['data']) + timepoint = Node.from_dict(self.nodes[i]['data']) if timepoint.node_type == "navigation": tasks.append(timepoint.task_id) @@ -465,7 +465,7 @@ def get_task_id(self, position): navigation_node = 2 * position + (position-2) if self.has_node(navigation_node): - task_id = self.node[navigation_node]['data']['task_id'] + task_id = self.nodes[navigation_node]['data']['task_id'] else: self.logger.error("There is no task in position %s", position) return @@ -497,7 +497,7 @@ def get_task_node_ids(self, task_id): """ node_ids = list() for i in self.nodes(): - if task_id == self.node[i]['data']['task_id']: + if task_id == self.nodes[i]['data']['task_id']: node_ids.append(i) return node_ids diff --git a/stn/stnu/stnu.py b/stn/stnu/stnu.py index 8047899..6e2e980 100644 --- a/stn/stnu/stnu.py +++ b/stn/stnu/stnu.py @@ -23,7 +23,7 @@ def __str__(self): if self.has_edge(j, i) and i < j: # Constraints with the zero timepoint if i == 0: - timepoint = Node.from_dict(self.node[j]['data']) + timepoint = Node.from_dict(self.nodes[j]['data']) lower_bound = -self[j][i]['weight'] upper_bound = self[i][j]['weight'] to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound) @@ -132,15 +132,15 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.node[i]['data']['node_type'] == "navigation": + if self.nodes[i]['data']['node_type'] == "navigation": lower_bound, upper_bound = self.get_navigation_bounded_duration(i, j) self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) - elif self.node[i]['data']['node_type'] == "start": + elif self.nodes[i]['data']['node_type'] == "start": lower_bound, upper_bound = self.get_task_bounded_duration(task) self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) - elif self.node[i]['data']['node_type'] == "finish": + elif self.nodes[i]['data']['node_type'] == "finish": # wait time between finish of one task and start of the next one. Fixed to [0, inf] self.add_constraint(i, j, 0) From a306b0aa58b95408d1e61985503c31f99c3a7c98 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Wed, 23 Oct 2019 08:37:12 +0200 Subject: [PATCH 03/44] fpc: Update logging message to debug and rephrase content --- stn/methods/fpc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stn/methods/fpc.py b/stn/methods/fpc.py index 45c7370..f91638b 100644 --- a/stn/methods/fpc.py +++ b/stn/methods/fpc.py @@ -17,4 +17,4 @@ def get_minimal_network(stn): minimal_network.update_edges(shortest_path_array) return minimal_network else: - logger.warning("The minimal network is inconsistent") + logger.debug("The minimal network is inconsistent. STP could not be solved") From acbde71047ac255b201a4c3a8b2c874dbef2709b Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Thu, 31 Oct 2019 12:29:05 +0100 Subject: [PATCH 04/44] stp.solve() returns a dispatchable graph with a risk metric Before, the solve method returned a dispatchable graph and its risk metric, now it assigns the risk metric to the dispatchable graph. stn: Add attrs risk_metric, temporal_metric, method compute_temporal_metric exceptions: Add NoSTPSolution exception task: Rename attr id to task_id --- stn/config/config.py | 15 +++++++++++---- stn/exceptions/__init__.py | 0 stn/exceptions/stp.py | 6 ++++++ stn/stn.py | 16 ++++++++++++++-- stn/stp.py | 16 +++++----------- stn/task.py | 4 ++-- 6 files changed, 38 insertions(+), 19 deletions(-) create mode 100644 stn/exceptions/__init__.py create mode 100644 stn/exceptions/stp.py diff --git a/stn/config/config.py b/stn/config/config.py index 40b02d7..5902b94 100644 --- a/stn/config/config.py +++ b/stn/config/config.py @@ -82,7 +82,9 @@ def srea_algorithm(stn): return risk_metric, dispatchable_graph = result - return risk_metric, dispatchable_graph + dispatchable_graph.risk_metric = risk_metric + + return dispatchable_graph class DegreeStongControllability(object): @@ -108,14 +110,16 @@ def dsc_lp_algorithm(stn): stnu = dsc_lp.get_stnu(bounds) - # Returns a schedule because it is an offline approach + # The dispatchable graph is a schedule because it is an offline approach schedule = dsc_lp.get_schedule(bounds) # A strongly controllable STNU has a DSC of 1, i.e., a DSC value of 1 is better. We take # 1 − DC to be the risk metric, so that small values are preferable risk_metric = 1 - dsc - return risk_metric, schedule + schedule.risk_metric = risk_metric + + return schedule class FullPathConsistency(object): @@ -134,7 +138,10 @@ def fpc_algorithm(stn): if dispatchable_graph is None: return risk_metric = 1 - return risk_metric, dispatchable_graph + + dispatchable_graph.risk_metric = risk_metric + + return dispatchable_graph stn_factory = STNFactory() diff --git a/stn/exceptions/__init__.py b/stn/exceptions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/stn/exceptions/stp.py b/stn/exceptions/stp.py new file mode 100644 index 0000000..cc07fe5 --- /dev/null +++ b/stn/exceptions/stp.py @@ -0,0 +1,6 @@ +class NoSTPSolution(Exception): + + def __init__(self): + """ Raised when the stp solver cannot produce a solution for the problem + """ + Exception.__init__(self) \ No newline at end of file diff --git a/stn/stn.py b/stn/stn.py index 8b7afe1..e687631 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -30,6 +30,8 @@ def __init__(self): super().__init__() self.add_zero_timepoint() self.max_makespan = MAX_FLOAT + self.risk_metric = None + self.temporal_metric = None def __str__(self): to_print = "" @@ -122,7 +124,7 @@ def add_timepoint(self, id, task, node_type): - finish: time at which the robot finishes executing the task """ pose = self.get_node_pose(task, node_type) - node = Node(task.id, pose, node_type) + node = Node(task.task_id, pose, node_type) self.add_node(id, data=node.to_dict()) def add_task(self, task, position=1): @@ -142,7 +144,7 @@ def add_task(self, task, position=1): Note: Position 0 is reserved for the zero_timepoint Add tasks from postion 1 onwards """ - self.logger.info("Adding task %s in position %s", task.id, position) + self.logger.info("Adding task %s in position %s", task.task_id, position) navigation_node_id = 2 * position + (position-2) start_node_id = navigation_node_id + 1 @@ -371,6 +373,16 @@ def get_edge_weight(self, i, j): else: return float('inf') + def compute_temporal_metric(self, temporal_criterion): + if temporal_criterion == 'completion_time': + self.temporal_metric = self.get_completion_time() + elif temporal_criterion == 'makespan': + self.temporal_metric = self.get_makespan() + elif temporal_criterion == 'idle_time': + self.temporal_metric = self.get_idle_time() + else: + raise ValueError(temporal_criterion) + def get_completion_time(self): nodes = list(self.nodes()) node_first_task = nodes[1] diff --git a/stn/stp.py b/stn/stp.py index 3d142d4..b3426c4 100644 --- a/stn/stp.py +++ b/stn/stp.py @@ -1,5 +1,6 @@ from stn.config.config import stn_factory, stp_solver_factory from stn.methods.fpc import get_minimal_network +from stn.exceptions.stp import NoSTPSolution """ Solves a Simple Temporal Problem (STP) @@ -48,19 +49,12 @@ def get_stn(self, **kwargs): def solve(self, stn): """ Computes the dispatchable graph and risk metric of the given stn """ - result_stp = self.solver.compute_dispatchable_graph(stn) - return result_stp + dispatchable_graph = self.solver.compute_dispatchable_graph(stn) - @staticmethod - def compute_temporal_metric(dispatchable_graph, temporal_criterion): - if temporal_criterion == 'completion_time': - temporal_metric = dispatchable_graph.get_completion_time() - elif temporal_criterion == 'makespan': - temporal_metric = dispatchable_graph.get_makespan() - else: - raise ValueError(temporal_criterion) + if dispatchable_graph is None: + raise NoSTPSolution() - return temporal_metric + return dispatchable_graph @staticmethod def is_consistent(stn): diff --git a/stn/task.py b/stn/task.py index dc134ed..836ad56 100644 --- a/stn/task.py +++ b/stn/task.py @@ -1,5 +1,5 @@ class STNTask(object): - def __init__(self, id, + def __init__(self, task_id, r_earliest_navigation_start_time, r_earliest_start_time, r_latest_start_time, @@ -19,7 +19,7 @@ def __init__(self, id, hard_constraints (bool): False if the task can be scheduled ASAP, True if the task is not flexible. Defaults to True """ - self.id = id + self.task_id = task_id self.r_earliest_navigation_start_time = round(r_earliest_navigation_start_time, 2) self.r_earliest_start_time = round(r_earliest_start_time, 2) self.r_latest_start_time = round(r_latest_start_time, 2) From ff31c45ce04aeb8e17ef840394b5f27c3592591a Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Sun, 10 Nov 2019 13:50:37 +0100 Subject: [PATCH 05/44] Fix tests. Use updated STP.solve() method and rename task attr id to task_id --- test/test_dsc.py | 6 ++--- test/test_fpc.py | 2 +- test/test_srea.py | 60 ++++++++++++++++++++++----------------------- test/update_pstn.py | 8 +++--- test/update_stn.py | 8 +++--- test/update_stnu.py | 8 +++--- 6 files changed, 46 insertions(+), 46 deletions(-) diff --git a/test/test_dsc.py b/test/test_dsc.py index 600c63f..2d04e92 100644 --- a/test/test_dsc.py +++ b/test/test_dsc.py @@ -36,9 +36,9 @@ def test_build_stn(self): self.logger.info("STNU: \n %s", self.stn) self.logger.info("Getting Schedule...") - risk_metric, schedule = self.stp.solve(self.stn) + schedule = self.stp.solve(self.stn) - self.logger.info("DSC: %s ", risk_metric) + self.logger.info("DSC: %s ", schedule.risk_metric) self.logger.info("schedule: %s ", schedule) completion_time = schedule.get_completion_time() @@ -51,7 +51,7 @@ def test_build_stn(self): self.assertEqual(makespan, 98) expected_risk_metric = 0.0 - self.assertEqual(risk_metric, expected_risk_metric) + self.assertEqual(schedule.risk_metric, expected_risk_metric) constraints = schedule.get_constraints() diff --git a/test/test_fpc.py b/test/test_fpc.py index 64624c0..725c9c5 100644 --- a/test/test_fpc.py +++ b/test/test_fpc.py @@ -34,7 +34,7 @@ def setUp(self): def test_build_stn(self): self.logger.info("STN: \n %s", self.stn) - metric, minimal_network = self.stp.solve(self.stn) + minimal_network = self.stp.solve(self.stn) self.logger.info("Minimal STN: \n %s", minimal_network) diff --git a/test/test_srea.py b/test/test_srea.py index 1fd0783..10ccdb5 100644 --- a/test/test_srea.py +++ b/test/test_srea.py @@ -38,79 +38,79 @@ def test_build_stn(self): self.logger.info("PSTN: \n %s", self.stn) self.logger.info("Getting GUIDE...") - alpha, guide_stn = self.stp.solve(self.stn) + dispatchable_graph = self.stp.solve(self.stn) self.logger.info("GUIDE") - self.logger.info(guide_stn) - self.logger.info("Alpha: %s ", alpha) + self.logger.info(dispatchable_graph) + self.logger.info("Risk metric: %s ", dispatchable_graph.risk_metric) - completion_time = guide_stn.get_completion_time() - makespan = guide_stn.get_makespan() + completion_time = dispatchable_graph.get_completion_time() + makespan = dispatchable_graph.get_makespan() self.logger.info("Completion time: %s ", completion_time) self.logger.info("Makespan: %s ", makespan) self.assertEqual(completion_time, 60) self.assertEqual(makespan, 97) - expected_alpha = 0.0 - self.assertEqual(alpha, expected_alpha) + expected_risk_metric = 0.0 + self.assertEqual(dispatchable_graph.risk_metric, expected_risk_metric) - constraints = guide_stn.get_constraints() + constraints = dispatchable_graph.get_constraints() for (i, j) in constraints: if i == 0 and j == 1: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 37) self.assertEqual(upper_bound, 38) if i == 0 and j == 2: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 41) self.assertEqual(upper_bound, 47) if i == 0 and j == 3: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 42) self.assertEqual(upper_bound, 54) if i == 0 and j == 4: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 92) self.assertEqual(upper_bound, 94) if i == 0 and j == 5: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 96) self.assertEqual(upper_bound, 102) if i == 0 and j == 6: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 97) self.assertEqual(upper_bound, 109) if i == 1 and j == 2: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 0) self.assertEqual(upper_bound, 47) if i == 2 and j == 3: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 0) self.assertEqual(upper_bound, 61) if i == 3 and j == 4: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 0) self.assertEqual(upper_bound, 61) if i == 4 and j == 5: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 0) self.assertEqual(upper_bound, 61) if i == 5 and j == 6: - lower_bound = -guide_stn[j][i]['weight'] - upper_bound = guide_stn[i][j]['weight'] + lower_bound = -dispatchable_graph[j][i]['weight'] + upper_bound = dispatchable_graph[i][j]['weight'] self.assertEqual(lower_bound, 0) self.assertEqual(upper_bound, MAX_FLOAT) diff --git a/test/update_pstn.py b/test/update_pstn.py index 3ebcc5a..083e0e6 100644 --- a/test/update_pstn.py +++ b/test/update_pstn.py @@ -5,7 +5,7 @@ class Task(object): def __init__(self): - self.id = '' + self.task_id = '' self.earliest_start_time = -1 self.latest_start_time = -1 self.start_pose_name = '' @@ -17,7 +17,7 @@ class UpdatePSTN(unittest.TestCase): def setUp(self): task_1 = Task() - task_1.id = "616af00-ec3b-4ecd-ae62-c94a3703594c" + task_1.task_id = "616af00-ec3b-4ecd-ae62-c94a3703594c" task_1.r_earliest_navigation_start_time = 0.0 task_1.r_earliest_start_time = 96.0 task_1.r_latest_start_time = 102.0 @@ -25,7 +25,7 @@ def setUp(self): task_1.finish_pose_name = "AMK_TDU-TGR-1_X_15.09_Y_5.69" task_2 = Task() - task_2.id = "207cc8da-2f0e-4538-802b-b8f3954df38d" + task_2.task_id = "207cc8da-2f0e-4538-802b-b8f3954df38d" task_2.r_earliest_navigation_start_time = 0.0 task_2.r_earliest_start_time = 71.0 task_2.r_latest_start_time = 76.0 @@ -33,7 +33,7 @@ def setUp(self): task_2.finish_pose_name = "AMK_TDU-TGR-1_X_6.67_Y_14.52" task_3 = Task() - task_3.id = "0d06fb90-a76d-48b4-b64f-857b7388ab70" + task_3.task_id = "0d06fb90-a76d-48b4-b64f-857b7388ab70" task_3.r_earliest_navigation_start_time = 0.0 task_3.r_earliest_start_time = 41.0 task_3.r_latest_start_time = 47.0 diff --git a/test/update_stn.py b/test/update_stn.py index f32188a..6438c4e 100644 --- a/test/update_stn.py +++ b/test/update_stn.py @@ -5,7 +5,7 @@ class Task(object): def __init__(self): - self.id = '' + self.task_id = '' self.earliest_start_time = -1 self.latest_start_time = -1 self.start_pose_name = '' @@ -17,7 +17,7 @@ class UpdateSTN(unittest.TestCase): def setUp(self): task_1 = Task() - task_1.id = "616af00-ec3b-4ecd-ae62-c94a3703594c" + task_1.task_id = "616af00-ec3b-4ecd-ae62-c94a3703594c" task_1.r_earliest_navigation_start_time = 0.0 task_1.r_earliest_start_time = 96.0 task_1.r_latest_start_time = 102.0 @@ -25,7 +25,7 @@ def setUp(self): task_1.finish_pose_name = "AMK_TDU-TGR-1_X_15.09_Y_5.69" task_2 = Task() - task_2.id = "207cc8da-2f0e-4538-802b-b8f3954df38d" + task_2.task_id = "207cc8da-2f0e-4538-802b-b8f3954df38d" task_2.r_earliest_navigation_start_time = 0.0 task_2.r_earliest_start_time = 71.0 task_2.r_latest_start_time = 76.0 @@ -33,7 +33,7 @@ def setUp(self): task_2.finish_pose_name = "AMK_TDU-TGR-1_X_6.67_Y_14.52" task_3 = Task() - task_3.id = "0d06fb90-a76d-48b4-b64f-857b7388ab70" + task_3.task_id = "0d06fb90-a76d-48b4-b64f-857b7388ab70" task_3.r_earliest_navigation_start_time = 0.0 task_3.r_earliest_start_time = 41.0 task_3.r_latest_start_time = 47.0 diff --git a/test/update_stnu.py b/test/update_stnu.py index fb34807..0c33a24 100644 --- a/test/update_stnu.py +++ b/test/update_stnu.py @@ -5,7 +5,7 @@ class Task(object): def __init__(self): - self.id = '' + self.task_id = '' self.earliest_start_time = -1 self.latest_start_time = -1 self.start_pose_name = '' @@ -17,7 +17,7 @@ class UpdateSTNU(unittest.TestCase): def setUp(self): task_1 = Task() - task_1.id = "616af00-ec3b-4ecd-ae62-c94a3703594c" + task_1.task_id = "616af00-ec3b-4ecd-ae62-c94a3703594c" task_1.r_earliest_navigation_start_time = 0.0 task_1.r_earliest_start_time = 96.0 task_1.r_latest_start_time = 102.0 @@ -25,7 +25,7 @@ def setUp(self): task_1.finish_pose_name = "AMK_TDU-TGR-1_X_15.09_Y_5.69" task_2 = Task() - task_2.id = "207cc8da-2f0e-4538-802b-b8f3954df38d" + task_2.task_id = "207cc8da-2f0e-4538-802b-b8f3954df38d" task_2.r_earliest_navigation_start_time = 0.0 task_2.r_earliest_start_time = 71.0 task_2.r_latest_start_time = 76.0 @@ -33,7 +33,7 @@ def setUp(self): task_2.finish_pose_name = "AMK_TDU-TGR-1_X_6.67_Y_14.52" task_3 = Task() - task_3.id = "0d06fb90-a76d-48b4-b64f-857b7388ab70" + task_3.task_id = "0d06fb90-a76d-48b4-b64f-857b7388ab70" task_3.r_earliest_navigation_start_time = 0.0 task_3.r_earliest_start_time = 41.0 task_3.r_latest_start_time = 47.0 From 0e0f5e37124d92b211ba0807494b9bab6c8755b8 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Tue, 12 Nov 2019 09:50:42 +0100 Subject: [PATCH 06/44] node: Remove unused import --- stn/node.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/stn/node.py b/stn/node.py index 4bf57d6..46a9a4c 100644 --- a/stn/node.py +++ b/stn/node.py @@ -1,5 +1,3 @@ -from stn.utils.uuid import generate_uuid - class Node(object): """Represents a timepoint in the STN """ From cf2f57cf80ea8a5c7a40a927f3677157cedc0fc2 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Tue, 12 Nov 2019 09:51:13 +0100 Subject: [PATCH 07/44] utils: Add method to convert from str to uuid --- stn/utils/uuid.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/stn/utils/uuid.py b/stn/utils/uuid.py index 16ed069..4d50f55 100644 --- a/stn/utils/uuid.py +++ b/stn/utils/uuid.py @@ -6,3 +6,10 @@ def generate_uuid(): Returns a string containing a random uuid """ return uuid.uuid4() + + +def from_str(uuid_str): + """ + Converts a uuid string to an uuid instance + """ + return uuid.UUID(uuid_str) From a5753925db2e3ec2713f8c83ddf48e83f50f94b1 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Tue, 12 Nov 2019 09:52:36 +0100 Subject: [PATCH 08/44] test/scheduling_srea: Fix call to stp.solve(), it returns a dispatchable graph Before it returned an alpha (risk metric) and a dispatchable metric. Now it returns a dispatchable metric with a risk metric --- test/scheduling_srea.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/scheduling_srea.py b/test/scheduling_srea.py index 789597a..cb3c73a 100644 --- a/test/scheduling_srea.py +++ b/test/scheduling_srea.py @@ -40,7 +40,7 @@ def get_schedule(dispatchable_graph, stn): n_tasks = 3 print("STN: ", stn) - alpha, dispatchable_graph = stp.solve(stn) + dispatchable_graph = stp.solve(stn) print("Guide: ", dispatchable_graph) schedule = get_schedule(dispatchable_graph, stn) From f0a04a264406ba4c0d3e88dd47be2fde04f4422b Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Tue, 12 Nov 2019 09:58:36 +0100 Subject: [PATCH 09/44] Fix access to Node attrs (attrs are objects and not dictionaries) Before, the conversion of a json string to an stn (networkx graph) filled in the node attr as dictionaries, therefore all methods accesed node attrs as entries in a dictionary Problem: the task_id was a string and not an uuid object Now, the from_json method returns an stn where node data is an object of type node. --- stn/node.py | 8 +++++++- stn/pstn/pstn.py | 9 ++++----- stn/stn.py | 34 +++++++++++++++++++--------------- stn/stnu/stnu.py | 9 ++++----- test/test.py | 20 ++++++++++++++++++++ test/update_pstn.py | 7 ++++--- test/update_stn.py | 7 ++++--- test/update_stnu.py | 7 ++++--- 8 files changed, 66 insertions(+), 35 deletions(-) create mode 100644 test/test.py diff --git a/stn/node.py b/stn/node.py index 46a9a4c..ef6c3c1 100644 --- a/stn/node.py +++ b/stn/node.py @@ -1,9 +1,13 @@ +from stn.utils.uuid import from_str + class Node(object): """Represents a timepoint in the STN """ def __init__(self, task_id, pose, node_type): # id of the task represented by this node + if isinstance(task_id, str): + task_id = from_str(task_id) self.task_id = task_id # Pose in the map where the node has to be executed self.pose = pose @@ -30,7 +34,7 @@ def __eq__(self, other): def to_dict(self): node_dict = dict() - node_dict['task_id'] = self.task_id + node_dict['task_id'] = str(self.task_id) node_dict['pose'] = self.pose node_dict['node_type'] = self.node_type return node_dict @@ -38,6 +42,8 @@ def to_dict(self): @staticmethod def from_dict(node_dict): task_id = node_dict['task_id'] + if isinstance(task_id, str): + task_id = from_str(task_id) pose = node_dict['pose'] node_type = node_dict['node_type'] node = Node(task_id, pose, node_type) diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index c279ccd..d9a4ff5 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -24,7 +24,6 @@ from stn.pstn.constraint import Constraint from stn.stn import STN -from stn.stn import Node from json import JSONEncoder import logging @@ -48,7 +47,7 @@ def __str__(self): if self.has_edge(j, i) and i < j: # Constraints with the zero timepoint if i == 0: - timepoint = Node.from_dict(self.nodes[j]['data']) + timepoint = self.nodes[j]['data'] lower_bound = -self[j][i]['weight'] upper_bound = self[i][j]['weight'] to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound) @@ -139,15 +138,15 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.nodes[i]['data']['node_type'] == "navigation": + if self.nodes[i]['data'].node_type == "navigation": distribution = self.get_navigation_distribution(i, j) self.add_constraint(i, j, distribution=distribution) - elif self.nodes[i]['data']['node_type'] == "start": + elif self.nodes[i]['data'].node_type == "start": distribution = self.get_task_distribution(task) self.add_constraint(i, j, distribution=distribution) - elif self.nodes[i]['data']['node_type'] == "finish": + elif self.nodes[i]['data'].node_type == "finish": # wait time between finish of one task and start of the next one. Fixed to [0, inf] self.add_constraint(i, j) diff --git a/stn/stn.py b/stn/stn.py index e687631..c8c9740 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -9,6 +9,7 @@ from stn.node import Node from uuid import UUID +import copy MAX_FLOAT = sys.float_info.max @@ -39,7 +40,7 @@ def __str__(self): if self.has_edge(j, i) and i < j: # Constraints with the zero timepoint if i == 0: - timepoint = Node.from_dict(self.nodes[j]['data']) + timepoint = self.nodes[j]['data'] lower_bound = -self[j][i]['weight'] upper_bound = self[i][j]['weight'] to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound) @@ -53,7 +54,7 @@ def __str__(self): def add_zero_timepoint(self): node = Node(generate_uuid(), '', 'zero_timepoint') - self.add_node(0, data=node.to_dict()) + self.add_node(0, data=node) def add_constraint(self, i, j, wji=0.0, wij=float('inf')): """ @@ -125,7 +126,7 @@ def add_timepoint(self, id, task, node_type): """ pose = self.get_node_pose(task, node_type) node = Node(task.task_id, pose, node_type) - self.add_node(id, data=node.to_dict()) + self.add_node(id, data=node) def add_task(self, task, position=1): """ A task is added as 3 timepoints and 5 constraints in the STN" @@ -208,15 +209,15 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.nodes[i]['data']['node_type'] == "navigation": + if self.nodes[i]['data'].node_type == "navigation": duration = self.get_navigation_duration(i, j) self.add_constraint(i, j, duration) - elif self.nodes[i]['data']['node_type'] == "start": + elif self.nodes[i]['data'].node_type == "start": duration = self.get_task_duration(task) self.add_constraint(i, j, duration) - elif self.nodes[i]['data']['node_type'] == "finish": + elif self.nodes[i]['data'].node_type == "finish": # wait time between finish of one task and start of the next one. Fixed to [0, inf] self.add_constraint(i, j) @@ -292,7 +293,7 @@ def remove_task(self, position=1): self.logger.debug("Constraints: %s", constraints) for (i, j) in constraints: - if self.nodes[i]['data']['node_type'] == "finish": + if self.nodes[i]['data'].node_type == "finish": # wait time between finish of one task and start of the next one self.add_constraint(i, j) @@ -304,9 +305,8 @@ def get_tasks(self): """ tasks = list() for i in self.nodes(): - timepoint = Node.from_dict(self.nodes[i]['data']) - if timepoint.node_type == "navigation": - tasks.append(timepoint.task_id) + if self.nodes[i]['data'].node_type == "navigation": + tasks.append(self.nodes[i]['data'].task_id) return tasks @@ -457,7 +457,8 @@ def timepoint_soft_constraints(self, node_id, task, node_type): def get_time(self, task_id, node_type='navigation', lower_bound=True): _time = None for i, data in self.nodes.data(): - if task_id == data['data']['task_id'] and data['data']['node_type'] == node_type: + + if task_id == data['data'].task_id and data['data'].node_type == node_type: if lower_bound: _time = -self[i][0]['weight'] else: # upper bound @@ -477,7 +478,7 @@ def get_task_id(self, position): navigation_node = 2 * position + (position-2) if self.has_node(navigation_node): - task_id = self.nodes[navigation_node]['data']['task_id'] + task_id = self.nodes[navigation_node]['data'].task_id else: self.logger.error("There is no task in position %s", position) return @@ -509,7 +510,7 @@ def get_task_node_ids(self, task_id): """ node_ids = list() for i in self.nodes(): - if task_id == self.nodes[i]['data']['task_id']: + if task_id == self.nodes[i]['data'].task_id: node_ids.append(i) return node_ids @@ -542,7 +543,10 @@ def to_json(self): return stn_json def to_dict(self): - stn_dict = json_graph.node_link_data(self) + stn = copy.deepcopy(self) + for i, data in self.nodes.data(): + stn.nodes[i]['data'] = self.nodes[i]['data'].to_dict() + stn_dict = json_graph.node_link_data(stn) return stn_dict @classmethod @@ -550,7 +554,7 @@ def from_json(cls, stn_json): stn = cls() dict_json = json.loads(stn_json) graph = json_graph.node_link_graph(dict_json) - stn.add_nodes_from(graph.nodes(data=True)) + stn.add_nodes_from([(i, {'data': Node.from_dict(graph.nodes[i]['data'])}) for i in graph.nodes()]) stn.add_edges_from(graph.edges(data=True)) return stn diff --git a/stn/stnu/stnu.py b/stn/stnu/stnu.py index 6e2e980..97b70b2 100644 --- a/stn/stnu/stnu.py +++ b/stn/stnu/stnu.py @@ -1,5 +1,4 @@ from stn.stn import STN -from stn.stn import Node from json import JSONEncoder import logging @@ -23,7 +22,7 @@ def __str__(self): if self.has_edge(j, i) and i < j: # Constraints with the zero timepoint if i == 0: - timepoint = Node.from_dict(self.nodes[j]['data']) + timepoint = self.nodes[j]['data'] lower_bound = -self[j][i]['weight'] upper_bound = self[i][j]['weight'] to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound) @@ -132,15 +131,15 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.nodes[i]['data']['node_type'] == "navigation": + if self.nodes[i]['data'].node_type == "navigation": lower_bound, upper_bound = self.get_navigation_bounded_duration(i, j) self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) - elif self.nodes[i]['data']['node_type'] == "start": + elif self.nodes[i]['data'].node_type == "start": lower_bound, upper_bound = self.get_task_bounded_duration(task) self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) - elif self.nodes[i]['data']['node_type'] == "finish": + elif self.nodes[i]['data'].node_type == "finish": # wait time between finish of one task and start of the next one. Fixed to [0, inf] self.add_constraint(i, j, 0) diff --git a/test/test.py b/test/test.py new file mode 100644 index 0000000..27a6c17 --- /dev/null +++ b/test/test.py @@ -0,0 +1,20 @@ +import json +from stn.stp import STP +STNU = "data/stnu_two_tasks.json" + + +if __name__ == '__main__': + with open(STNU) as json_file: + stnu_dict = json.load(json_file) + + # Convert the dict to a json string + stnu_json = json.dumps(stnu_dict) + + stp = STP('dsc_lp') + stn = stp.get_stn(stn_json=stnu_json) + + print(stn) + stn_dict = stn.to_dict() + + print(stn_dict) + print(type(stn_dict['nodes'][0]['data'])) diff --git a/test/update_pstn.py b/test/update_pstn.py index 083e0e6..0d2c11d 100644 --- a/test/update_pstn.py +++ b/test/update_pstn.py @@ -1,5 +1,6 @@ from stn.pstn.pstn import PSTN import unittest +from stn.utils.uuid import from_str class Task(object): @@ -17,7 +18,7 @@ class UpdatePSTN(unittest.TestCase): def setUp(self): task_1 = Task() - task_1.task_id = "616af00-ec3b-4ecd-ae62-c94a3703594c" + task_1.task_id = from_str("0616af00-ec3b-4ecd-ae62-c94a3703594c") task_1.r_earliest_navigation_start_time = 0.0 task_1.r_earliest_start_time = 96.0 task_1.r_latest_start_time = 102.0 @@ -25,7 +26,7 @@ def setUp(self): task_1.finish_pose_name = "AMK_TDU-TGR-1_X_15.09_Y_5.69" task_2 = Task() - task_2.task_id = "207cc8da-2f0e-4538-802b-b8f3954df38d" + task_2.task_id = from_str("207cc8da-2f0e-4538-802b-b8f3954df38d") task_2.r_earliest_navigation_start_time = 0.0 task_2.r_earliest_start_time = 71.0 task_2.r_latest_start_time = 76.0 @@ -33,7 +34,7 @@ def setUp(self): task_2.finish_pose_name = "AMK_TDU-TGR-1_X_6.67_Y_14.52" task_3 = Task() - task_3.task_id = "0d06fb90-a76d-48b4-b64f-857b7388ab70" + task_3.task_id = from_str("0d06fb90-a76d-48b4-b64f-857b7388ab70") task_3.r_earliest_navigation_start_time = 0.0 task_3.r_earliest_start_time = 41.0 task_3.r_latest_start_time = 47.0 diff --git a/test/update_stn.py b/test/update_stn.py index 6438c4e..f0fcd1a 100644 --- a/test/update_stn.py +++ b/test/update_stn.py @@ -1,5 +1,6 @@ from stn.stn import STN import unittest +from stn.utils.uuid import from_str class Task(object): @@ -17,7 +18,7 @@ class UpdateSTN(unittest.TestCase): def setUp(self): task_1 = Task() - task_1.task_id = "616af00-ec3b-4ecd-ae62-c94a3703594c" + task_1.task_id = from_str("0616af00-ec3b-4ecd-ae62-c94a3703594c") task_1.r_earliest_navigation_start_time = 0.0 task_1.r_earliest_start_time = 96.0 task_1.r_latest_start_time = 102.0 @@ -25,7 +26,7 @@ def setUp(self): task_1.finish_pose_name = "AMK_TDU-TGR-1_X_15.09_Y_5.69" task_2 = Task() - task_2.task_id = "207cc8da-2f0e-4538-802b-b8f3954df38d" + task_2.task_id = from_str("207cc8da-2f0e-4538-802b-b8f3954df38d") task_2.r_earliest_navigation_start_time = 0.0 task_2.r_earliest_start_time = 71.0 task_2.r_latest_start_time = 76.0 @@ -33,7 +34,7 @@ def setUp(self): task_2.finish_pose_name = "AMK_TDU-TGR-1_X_6.67_Y_14.52" task_3 = Task() - task_3.task_id = "0d06fb90-a76d-48b4-b64f-857b7388ab70" + task_3.task_id = from_str("0d06fb90-a76d-48b4-b64f-857b7388ab70") task_3.r_earliest_navigation_start_time = 0.0 task_3.r_earliest_start_time = 41.0 task_3.r_latest_start_time = 47.0 diff --git a/test/update_stnu.py b/test/update_stnu.py index 0c33a24..a812a1a 100644 --- a/test/update_stnu.py +++ b/test/update_stnu.py @@ -1,5 +1,6 @@ from stn.stnu.stnu import STNU import unittest +from stn.utils.uuid import from_str class Task(object): @@ -17,7 +18,7 @@ class UpdateSTNU(unittest.TestCase): def setUp(self): task_1 = Task() - task_1.task_id = "616af00-ec3b-4ecd-ae62-c94a3703594c" + task_1.task_id = from_str("0616af00-ec3b-4ecd-ae62-c94a3703594c") task_1.r_earliest_navigation_start_time = 0.0 task_1.r_earliest_start_time = 96.0 task_1.r_latest_start_time = 102.0 @@ -25,7 +26,7 @@ def setUp(self): task_1.finish_pose_name = "AMK_TDU-TGR-1_X_15.09_Y_5.69" task_2 = Task() - task_2.task_id = "207cc8da-2f0e-4538-802b-b8f3954df38d" + task_2.task_id = from_str("207cc8da-2f0e-4538-802b-b8f3954df38d") task_2.r_earliest_navigation_start_time = 0.0 task_2.r_earliest_start_time = 71.0 task_2.r_latest_start_time = 76.0 @@ -33,7 +34,7 @@ def setUp(self): task_2.finish_pose_name = "AMK_TDU-TGR-1_X_6.67_Y_14.52" task_3 = Task() - task_3.task_id = "0d06fb90-a76d-48b4-b64f-857b7388ab70" + task_3.task_id = from_str("0d06fb90-a76d-48b4-b64f-857b7388ab70") task_3.r_earliest_navigation_start_time = 0.0 task_3.r_earliest_start_time = 41.0 task_3.r_latest_start_time = 47.0 From 1a9107b1e5e04a907da7c9f1f72eba100d93705d Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Thu, 14 Nov 2019 07:52:33 +0100 Subject: [PATCH 10/44] setup: Add stn.exceptions package --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b39ef45..f2d0405 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup setup(name='stn', - packages=['stn', 'stn.config', 'stn.methods', 'stn.pstn', 'stn.stnu', 'stn.utils'], + packages=['stn', 'stn.config', 'stn.exceptions', 'stn.methods', 'stn.pstn', 'stn.stnu', 'stn.utils'], version='0.2.0', install_requires=[ 'numpy', From b5724ee11e65ca61fbba6710c728160582ce1fe8 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Wed, 13 Nov 2019 16:13:48 +0100 Subject: [PATCH 11/44] stn: Change input args of assign_timepoint to assign a time based on the task_id and node_type Before, the position of the task in the stn was required. Now the position is obtained based on the task_id and the node_type --- stn/stn.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/stn/stn.py b/stn/stn.py index c8c9740..a38776e 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -346,19 +346,22 @@ def update_edge_weight(self, i, j, weight, create=False): if weight < self[i][j]['weight']: self[i][j]['weight'] = weight - def assign_timepoint(self, time, position=1): + def assign_timepoint(self, allotted_time, task_id, node_type): """ - Assigns the given time to the earliest and latest time of the - timepoint at the given position + Assigns the allotted time to the earliest and latest time of the timepoint + of task_id of type node_type Args: - time: float representing seconds - position: int representing the location of the timepoint in the stn - - Returns: + allotted_time (float): seconds after zero timepoint + task_id(UUID): id of the task + node_type(string): can be "navigation", "start" of "finish" """ - self.update_edge_weight(0, position, time) - self.update_edge_weight(position, 0, -time) + for i in self.nodes(): + node_data = self.nodes[i]['data'] + if node_data.task_id == task_id and node_data.node_type == node_type: + self.update_edge_weight(0, i, allotted_time) + self.update_edge_weight(i, 0, -allotted_time) + break def get_edge_weight(self, i, j): """ Returns the weight of the edge between node starting_node and node ending_node From 802761f6e4c0d339fbfea966ffcfe8a16d77aa00 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Fri, 15 Nov 2019 11:30:32 +0100 Subject: [PATCH 12/44] stp: Refactor is_consistent method Do not update the edges in the stn, only return whether the stn is consistent or not --- stn/stp.py | 10 +++++----- stn/utils/{config_logger.py => utils.py} | 0 2 files changed, 5 insertions(+), 5 deletions(-) rename stn/utils/{config_logger.py => utils.py} (100%) diff --git a/stn/stp.py b/stn/stp.py index b3426c4..648f82e 100644 --- a/stn/stp.py +++ b/stn/stp.py @@ -1,5 +1,6 @@ +import networkx as nx + from stn.config.config import stn_factory, stp_solver_factory -from stn.methods.fpc import get_minimal_network from stn.exceptions.stp import NoSTPSolution """ Solves a Simple Temporal Problem (STP) @@ -58,10 +59,9 @@ def solve(self, stn): @staticmethod def is_consistent(stn): - minimal_network = get_minimal_network(stn) - if minimal_network: + shortest_path_array = nx.floyd_warshall(stn) + if stn.is_consistent(shortest_path_array): return True - else: - return False + return False diff --git a/stn/utils/config_logger.py b/stn/utils/utils.py similarity index 100% rename from stn/utils/config_logger.py rename to stn/utils/utils.py From 140778251724e2b4ecbba696cd7a7316bc9cb24e Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Fri, 15 Nov 2019 13:13:31 +0100 Subject: [PATCH 13/44] stn: Fix timetpoint_soft_constraints method Rename r_earliest_navigation_start to r_earliest_navigation_start_time --- stn/stn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stn/stn.py b/stn/stn.py index a38776e..ed4f4c8 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -448,7 +448,7 @@ def timepoint_hard_constraints(self, node_id, task, node_type): def timepoint_soft_constraints(self, node_id, task, node_type): if node_type == "navigation": - self.add_constraint(0, node_id, task.r_earliest_navigation_start) + self.add_constraint(0, node_id, task.r_earliest_navigation_start_time) if node_type == "start": self.add_constraint(0, node_id) From 7fa0eec6f199b5e25e4c1774a2dec75fab460524 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Fri, 22 Nov 2019 09:17:05 +0100 Subject: [PATCH 14/44] stn: Add tolerance to is_consistent method --- stn/stn.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/stn/stn.py b/stn/stn.py index ed4f4c8..f5615ec 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -10,6 +10,7 @@ from stn.node import Node from uuid import UUID import copy +import math MAX_FLOAT = sys.float_info.max @@ -314,7 +315,7 @@ def is_consistent(self, shortest_path_array): """The STN is not consistent if it has negative cycles""" consistent = True for node, nodes in shortest_path_array.items(): - if nodes[node] != 0: + if not math.isclose(nodes[node], 0.0, abs_tol=1e-09): consistent = False return consistent From bcc7a412f70af2875448710fae21acbbd359bd46 Mon Sep 17 00:00:00 2001 From: Angela Enriquez Date: Fri, 22 Nov 2019 16:48:26 +0100 Subject: [PATCH 15/44] stn: Add method to get task position based on task_id --- stn/stn.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/stn/stn.py b/stn/stn.py index f5615ec..54a14f3 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -489,6 +489,11 @@ def get_task_id(self, position): return task_id + def get_task_position(self, task_id): + for i, data in self.nodes.data(): + if task_id == data['data'].task_id and data['data'].node_type == 'navigation': + return i + def get_earliest_task_id(self): """ Returns the id of the earliest task in the stn From cad6d967bab2765306c09ca2528e1be3358e3a02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Mon, 16 Dec 2019 12:25:58 +0100 Subject: [PATCH 16/44] srea: Catch PulpSolverError exception Sometimes pulp throws an exception instead of returning a problem with unfeasible status --- stn/methods/srea.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/stn/methods/srea.py b/stn/methods/srea.py index 45274b6..3b9e331 100644 --- a/stn/methods/srea.py +++ b/stn/methods/srea.py @@ -267,7 +267,13 @@ def srea_LP(inputstn, prob.writeLP('STN.lp') pulp.LpSolverDefault.msg = 10 - prob.solve() + # Based on https://stackoverflow.com/questions/27406858/pulp-solver-error + # Sometimes pulp throws an exception instead of returning a problem with unfeasible status + try: + prob.solve() + except pulp.PulpSolverError: + print("Problem unfeasible") + return None status = pulp.LpStatus[prob.status] if debug: From 2d1a924c2a162ef2a0d9d9d4dc7b901aed494a76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Mon, 16 Dec 2019 13:58:12 +0100 Subject: [PATCH 17/44] node: Remove pose attr --- stn/node.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/stn/node.py b/stn/node.py index ef6c3c1..a3fffb9 100644 --- a/stn/node.py +++ b/stn/node.py @@ -4,14 +4,12 @@ class Node(object): """Represents a timepoint in the STN """ - def __init__(self, task_id, pose, node_type): + def __init__(self, task_id, node_type): # id of the task represented by this node if isinstance(task_id, str): task_id = from_str(task_id) self.task_id = task_id - # Pose in the map where the node has to be executed - self.pose = pose - # The node can be of node_type zero_timepoint, navigation, start or finish + # The node can be of node_type zero_timepoint, start, pickup or delivery self.node_type = node_type def __str__(self): @@ -23,19 +21,17 @@ def __repr__(self): return str(self.to_dict()) def __hash__(self): - return hash((self.task_id, self.pose, self.node_type)) + return hash((self.task_id, self.node_type)) def __eq__(self, other): if other is None: return False return (self.task_id == other.task_id and - self.pose == other.pose and self.node_type == other.node_type) def to_dict(self): node_dict = dict() node_dict['task_id'] = str(self.task_id) - node_dict['pose'] = self.pose node_dict['node_type'] = self.node_type return node_dict @@ -44,7 +40,6 @@ def from_dict(node_dict): task_id = node_dict['task_id'] if isinstance(task_id, str): task_id = from_str(task_id) - pose = node_dict['pose'] node_type = node_dict['node_type'] - node = Node(task_id, pose, node_type) + node = Node(task_id, node_type) return node From 0933fb1e44792e620bf5a7e39159ebeafee82c51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Mon, 16 Dec 2019 14:01:05 +0100 Subject: [PATCH 18/44] Update Task: includes timepoint and inter_timepoint constraints The task contains work_time and travel_time and intertimepoint constraints are added according to the type of stn --- stn/pstn/pstn.py | 105 +++++++++++++------- stn/stn.py | 231 ++++++++++++++++++------------------------- stn/stnu/stnu.py | 97 +++++++++--------- stn/task.py | 97 ++++++++++++++---- stn/utils/utils.py | 26 ++++- test/data/tasks.yaml | 36 +++++++ test/update_pstn.py | 45 ++------- test/update_stn.py | 54 +++------- test/update_stnu.py | 52 +++------- 9 files changed, 387 insertions(+), 356 deletions(-) create mode 100644 test/data/tasks.yaml diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index d9a4ff5..42d0118 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -26,6 +26,8 @@ from stn.stn import STN from json import JSONEncoder import logging +from stn.task import TimepointConstraint +import numpy as np class MyEncoder(JSONEncoder): @@ -94,22 +96,22 @@ def add_constraint(self, i, j, wji=0.0, wij=float('inf'), distribution=""): self.add_edge(j, i, distribution=distribution) self.add_edge(j, i, is_contingent=is_contingent) - def timepoint_hard_constraints(self, node_id, task, node_type): - """ Adds the earliest and latest times to execute a timepoint (node) - Navigation timepoint [0, inf] - Start timepoint [earliest_start_time, latest_start_time] - Finish timepoint [0, inf] - """ - - if node_type == "navigation": - self.add_constraint(0, node_id, task.r_earliest_navigation_start_time) - - if node_type == "start": - self.add_constraint(0, node_id, task.r_earliest_start_time, task.r_latest_start_time) - - elif node_type == "finish": - self.add_constraint(0, node_id) - + # def timepoint_hard_constraints(self, node_id, task, node_type): + # """ Adds the earliest and latest times to execute a timepoint (node) + # Navigation timepoint [0, inf] + # Start timepoint [earliest_start_time, latest_start_time] + # Finish timepoint [0, inf] + # """ + # + # if node_type == "navigation": + # self.add_constraint(0, node_id, task.r_earliest_navigation_start_time) + # + # if node_type == "start": + # self.add_constraint(0, node_id, task.r_earliest_start_time, task.r_latest_start_time) + # + # elif node_type == "finish": + # self.add_constraint(0, node_id) + # def get_contingent_constraints(self): """ Returns a dictionary with the contingent constraints in the PSTN {(starting_node, ending_node): Constraint (object)} @@ -125,9 +127,9 @@ def get_contingent_constraints(self): def add_intertimepoints_constraints(self, constraints, task): """ Adds constraints between the timepoints of a task Constraints between: - - navigation start and start (contingent) - - start and finish (contingent) - - finish and next task (if any) (requirement) + - start and pickup (contingent) + - pickup and delivery (contingent) + - delivery and next task (if any) (requirement) Args: constraints (list) : list of tuples that defines the pair of nodes between which a new constraint should be added Example: @@ -138,28 +140,55 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.nodes[i]['data'].node_type == "navigation": - distribution = self.get_navigation_distribution(i, j) - self.add_constraint(i, j, distribution=distribution) + if self.nodes[i]['data'].node_type == "start": + distribution = self.get_travel_time_distribution(task) + if distribution == "N_0.0_0.0": + self.add_constraint(i, j, 0, 0) + else: + self.add_constraint(i, j, distribution=distribution) - elif self.nodes[i]['data'].node_type == "start": - distribution = self.get_task_distribution(task) + elif self.nodes[i]['data'].node_type == "pickup": + distribution = self.get_work_time_distribution(task) self.add_constraint(i, j, distribution=distribution) - elif self.nodes[i]['data'].node_type == "finish": + elif self.nodes[i]['data'].node_type == "delivery": # wait time between finish of one task and start of the next one. Fixed to [0, inf] self.add_constraint(i, j) - def get_navigation_distribution(self, source, destination): - """ Reads from the database the probability distribution for navigating from source to destination - """ - # TODO: Read estimated distribution from dataset - distribution = "N_1_1" - return distribution - - def get_task_distribution(self, task): - """ Reads from the database the estimated distribution of the task - In the case of transportation tasks, the estimated distribution is the navigation time from the pickup to the delivery location - """ - distribution = "N_1_1" - return distribution + @staticmethod + def get_travel_time_distribution(task): + travel_time = task.get_inter_timepoint_constraint("travel_time") + travel_time_distribution = "N_" + str(travel_time.mean) + "_" + str(travel_time.standard_dev) + return travel_time_distribution + + @staticmethod + def get_work_time_distribution(task): + work_time = task.get_inter_timepoint_constraint("work_time") + work_time_distribution = "N_" + str(work_time.mean) + "_" + str(work_time.standard_dev) + return work_time_distribution + + @staticmethod + def get_prev_timepoint_constraint(constraint_name, next_timepoint_constraint, inter_timepoint_constraint): + r_earliest_time = next_timepoint_constraint.r_earliest_time - \ + (inter_timepoint_constraint.mean - 2*inter_timepoint_constraint.standard_dev) + r_latest_time = np.inf + return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) + + @staticmethod + def get_next_timepoint_constraint(constraint_name, prev_timepoint_constraint, inter_timepoint_constraint): + r_earliest_time = 0 + r_latest_time = np.inf + return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) + + @staticmethod + def create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time): + start_constraint = TimepointConstraint(name="start", + r_earliest_time=r_earliest_pickup - (travel_time.mean - 2*work_time.standard_dev), + r_latest_time=np.inf) + pickup_constraint = TimepointConstraint(name="pickup", + r_earliest_time=r_earliest_pickup, + r_latest_time=r_latest_pickup) + delivery_constraint = TimepointConstraint(name="delivery", + r_earliest_time= 0, + r_latest_time=np.inf) + return [start_constraint, pickup_constraint, delivery_constraint] diff --git a/stn/stn.py b/stn/stn.py index 54a14f3..5c55a54 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -11,6 +11,7 @@ from uuid import UUID import copy import math +from stn.task import TimepointConstraint MAX_FLOAT = sys.float_info.max @@ -54,7 +55,7 @@ def __str__(self): return to_print def add_zero_timepoint(self): - node = Node(generate_uuid(), '', 'zero_timepoint') + node = Node(generate_uuid(), 'zero_timepoint') self.add_node(0, data=node) def add_constraint(self, i, j, wji=0.0, wij=float('inf')): @@ -103,43 +104,28 @@ def get_constraints(self): return constraints - def get_node_pose(self, task, node_type): - """ Returns the pose in the map where the task has to be executed - """ - if node_type == 'navigation': - # TODO: initialize the pose with the robot current position (read it from mongo) - # this value will be overwritten once the task is allocated - pose = '' - elif node_type == 'start': - pose = task.start_pose_name - elif node_type == 'finish': - pose = task.finish_pose_name - - return pose - def add_timepoint(self, id, task, node_type): """ A timepoint is represented by a node in the STN The node can be of node_type: - zero_timepoint: references the schedule to the origin - - navigation: time at which the robot starts navigating towards the task - - start: time at which the robot starts executing the task - - finish: time at which the robot finishes executing the task + - start : time at which the robot starts navigating towards the pickup location + - pickup : time at which the robot arrives starts the pickup action + - delivery : time at which the robot finishes the delivery action """ - pose = self.get_node_pose(task, node_type) - node = Node(task.task_id, pose, node_type) + node = Node(task.task_id, node_type) self.add_node(id, data=node) def add_task(self, task, position=1): """ A task is added as 3 timepoints and 5 constraints in the STN" Timepoints: - - navigation start - - start time - - finish time + - start + - pickup time + - delivery time Constraints: - - earliest and latest navigation times - - navigation duration - earliest and latest start times - - task duration + - travel time: time to go from current position to pickup position) + - earliest and latest pickup times + - work time: time to perform the task (time to transport an object from the pickup to the delivery location) - earliest and latest finish times If the task is not the first in the STN, add wait time constraint @@ -148,58 +134,58 @@ def add_task(self, task, position=1): """ self.logger.info("Adding task %s in position %s", task.task_id, position) - navigation_node_id = 2 * position + (position-2) - start_node_id = navigation_node_id + 1 - finish_node_id = start_node_id + 1 + start_node_id = 2 * position + (position-2) + pickup_node_id = start_node_id + 1 + delivery_node_id = pickup_node_id + 1 - # Remove constraint linking navigation_node_id and previous node (if any) - if self.has_edge(navigation_node_id-1, navigation_node_id) and navigation_node_id-1 != 0: - self.logger.debug("Deleting constraint: %s => %s", navigation_node_id-1, navigation_node_id) + # Remove constraint linking start_node_id and previous node (if any) + if self.has_edge(start_node_id-1, start_node_id) and start_node_id-1 != 0: + self.logger.debug("Deleting constraint: %s => %s", start_node_id-1, start_node_id) - self.remove_constraint(navigation_node_id-1, navigation_node_id) + self.remove_constraint(start_node_id-1, start_node_id) # Displace by 3 all nodes and constraints after position mapping = {} for node_id, data in self.nodes(data=True): - if node_id >= navigation_node_id: + if node_id >= start_node_id: mapping[node_id] = node_id + 3 self.logger.debug("mapping: %s ", mapping) nx.relabel_nodes(self, mapping, copy=False) # Add new timepoints - self.add_timepoint(navigation_node_id, task, "navigation") - self.add_timepoint_constraints(navigation_node_id, task, "navigation") - self.add_timepoint(start_node_id, task, "start") - self.add_timepoint_constraints(start_node_id, task, "start") + self.add_timepoint_constraint(start_node_id, task.get_timepoint_constraint("start")) + + self.add_timepoint(pickup_node_id, task, "pickup") + self.add_timepoint_constraint(pickup_node_id, task.get_timepoint_constraint("pickup")) - self.add_timepoint(finish_node_id, task, "finish") - self.add_timepoint_constraints(finish_node_id, task, "finish") + self.add_timepoint(delivery_node_id, task, "delivery") + self.add_timepoint_constraint(delivery_node_id, task.get_timepoint_constraint("delivery")) # Add constraints between new nodes - new_constraints_between = [navigation_node_id, start_node_id, finish_node_id] + new_constraints_between = [start_node_id, pickup_node_id, delivery_node_id] # Check if there is a node after the new delivery node - if self.has_node(finish_node_id+1): - new_constraints_between.append(finish_node_id+1) + if self.has_node(delivery_node_id+1): + new_constraints_between.append(delivery_node_id+1) # Check if there is a node before the new start node - if self.has_node(navigation_node_id-1): - new_constraints_between.insert(0, navigation_node_id-1) + if self.has_node(start_node_id-1): + new_constraints_between.insert(0, start_node_id-1) self.logger.debug("New constraints between nodes: %s", new_constraints_between) constraints = [((i), (i + 1)) for i in new_constraints_between[:-1]] - self.logger.debug("Constraints: %s", constraints) + print("Constraints: %s", constraints) self.add_intertimepoints_constraints(constraints, task) def add_intertimepoints_constraints(self, constraints, task): """ Adds constraints between the timepoints of a task Constraints between: - - navigation start and start - - start and finish - - finish and next task (if any) + - start and pickup + - pickup and delivery + - delivery and start next task (if any) Args: constraints (list) : list of tuples that defines the pair of nodes between which a new constraint should be added Example: @@ -210,52 +196,44 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.nodes[i]['data'].node_type == "navigation": - duration = self.get_navigation_duration(i, j) - self.add_constraint(i, j, duration) + if self.nodes[i]['data'].node_type == "start": + travel_time = self.get_travel_time(task) + self.add_constraint(i, j, travel_time, travel_time) - elif self.nodes[i]['data'].node_type == "start": - duration = self.get_task_duration(task) - self.add_constraint(i, j, duration) + elif self.nodes[i]['data'].node_type == "pickup": + work_time = self.get_work_time(task) + self.add_constraint(i, j, work_time, work_time) - elif self.nodes[i]['data'].node_type == "finish": + elif self.nodes[i]['data'].node_type == "delivery": # wait time between finish of one task and start of the next one. Fixed to [0, inf] self.add_constraint(i, j) - def get_navigation_duration(self, source, destination): - """ Reads from the database the estimated duration for navigating from source to destination and takes the mean + @staticmethod + def get_travel_time(task): + """ Returns the mean of the travel time (time for going from current pose to pickup pose) """ - # TODO: Read estimated duration from dataset - duration = 1.0 - return duration - - def get_task_duration(self, task): - """ Reads from the database the estimated duration of the task - In the case of transportation tasks, the estimated duration is the navigation time from the pickup to the delivery location - """ - # TODO: Read estimated duration from dataset - duration = 1.0 - return duration - - def get_navigation_start_time(self, task): - """ Returns the earliest_start_time and latest start navigation time - """ - navigation_duration = self.get_navigation_duration(task.start_pose_name, task.finish_pose_name) - - earliest_navigation_start_time = task.r_earliest_start_time - navigation_duration - latest_navigation_start_time = task.r_latest_start_time - navigation_duration - - return earliest_navigation_start_time, latest_navigation_start_time + travel_time = task.get_inter_timepoint_constraint("travel_time") + return travel_time.mean - def get_finish_time(self, task): - """ Returns the earliest and latest finish time + @staticmethod + def get_work_time(task): + """ Returns the mean of the work time (time to transport an object from the pickup to the delivery location) """ - task_duration = self.get_task_duration(task) - - earliest_finish_time = task.r_earliest_start_time + task_duration - latest_finish_time = task.r_latest_start_time + task_duration - - return earliest_finish_time, latest_finish_time + work_time = task.get_inter_timepoint_constraint("work_time") + return work_time.mean + + @staticmethod + def create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time): + start_constraint = TimepointConstraint(name="start", + r_earliest_time=r_earliest_pickup - travel_time.mean, + r_latest_time=r_latest_pickup - travel_time.mean) + pickup_constraint = TimepointConstraint(name="pickup", + r_earliest_time=r_earliest_pickup, + r_latest_time=r_latest_pickup) + delivery_constraint = TimepointConstraint(name="delivery", + r_earliest_time=r_earliest_pickup + work_time.mean, + r_latest_time=r_latest_pickup + work_time.mean) + return [start_constraint, pickup_constraint, delivery_constraint] def show_n_nodes_edges(self): """ Prints the number of nodes and edges in the stn @@ -267,24 +245,24 @@ def remove_task(self, position=1): """ Removes the task from the given position""" self.logger.info("Removing task at position: %s", position) - navigation_node_id = 2 * position + (position-2) - start_node_id = navigation_node_id + 1 - finish_node_id = start_node_id + 1 + start_node_id = 2 * position + (position-2) + pickup_node_id = start_node_id + 1 + delivery_node_id = pickup_node_id + 1 new_constraints_between = list() - if self.has_node(navigation_node_id-1) and self.has_node(finish_node_id+1): - new_constraints_between = [navigation_node_id-1, navigation_node_id] + if self.has_node(start_node_id-1) and self.has_node(delivery_node_id+1): + new_constraints_between = [start_node_id-1, start_node_id] # Remove node and all adjacent edges - self.remove_node(navigation_node_id) self.remove_node(start_node_id) - self.remove_node(finish_node_id) + self.remove_node(pickup_node_id) + self.remove_node(delivery_node_id) # Displace by -3 all nodes and constraints after position mapping = {} for node_id, data in self.nodes(data=True): - if node_id >= navigation_node_id: + if node_id >= start_node_id: mapping[node_id] = node_id - 3 self.logger.debug("mapping: %s", mapping) nx.relabel_nodes(self, mapping, copy=False) @@ -294,7 +272,7 @@ def remove_task(self, position=1): self.logger.debug("Constraints: %s", constraints) for (i, j) in constraints: - if self.nodes[i]['data'].node_type == "finish": + if self.nodes[i]['data'].node_type == "delivery": # wait time between finish of one task and start of the next one self.add_constraint(i, j) @@ -306,7 +284,7 @@ def get_tasks(self): """ tasks = list() for i in self.nodes(): - if self.nodes[i]['data'].node_type == "navigation": + if self.nodes[i]['data'].node_type == "start": tasks.append(self.nodes[i]['data'].task_id) return tasks @@ -416,49 +394,32 @@ def get_idle_time(self): for i, task_id in enumerate(task_ids): if i > 0: - r_earliest_finish_time_previous_task = self.get_time(task_ids[i-1], "finish") - r_earliest_start_time = self.get_time(task_ids[i], "start") + r_earliest_finish_time_previous_task = self.get_time(task_ids[i-1], "delivery") + r_earliest_start_time = self.get_time(task_ids[i], "pickup") idle_time += round(r_earliest_start_time - r_earliest_finish_time_previous_task) return idle_time - def add_timepoint_constraints(self, node_id, task, node_type): + def add_timepoint_constraint(self, node_id, timepoint_constraint): """ Adds the earliest and latest times to execute a timepoint (node) - Navigation timepoint [earliest_navigation_start_time, latest_navigation_start_time] Start timepoint [earliest_start_time, latest_start_time] - Finish timepoint [earliest_finish_time, lastest_finish_time] + Pickup timepoint [earliest_pickup_time, latest_pickup_time] + Delivery timepoint [earliest_delivery_time, lastest_delivery_time] """ + self.add_constraint(0, node_id, timepoint_constraint.r_earliest_time, timepoint_constraint.r_latest_time) - if task.hard_constraints: - self.timepoint_hard_constraints(node_id, task, node_type) - else: - self.timepoint_soft_constraints(node_id, task, node_type) - - def timepoint_hard_constraints(self, node_id, task, node_type): - if node_type == "navigation": - earliest_navigation_start_time, latest_navigation_start_time = self.get_navigation_start_time(task) - - self.add_constraint(0, node_id, earliest_navigation_start_time, latest_navigation_start_time) - - if node_type == "start": - self.add_constraint(0, node_id, task.r_earliest_start_time, task.r_latest_start_time) - - elif node_type == "finish": - earliest_finish_time, latest_finish_time = self.get_finish_time(task) - - self.add_constraint(0, node_id, earliest_finish_time, latest_finish_time) - - def timepoint_soft_constraints(self, node_id, task, node_type): - if node_type == "navigation": - self.add_constraint(0, node_id, task.r_earliest_navigation_start_time) - - if node_type == "start": - self.add_constraint(0, node_id) - - elif node_type == "finish": + @staticmethod + def get_prev_timepoint_constraint(constraint_name, next_timepoint_constraint, inter_timepoint_constraint): + r_earliest_time = next_timepoint_constraint.r_earliest_time - inter_timepoint_constraint.mean + r_latest_time = next_timepoint_constraint.r_latest_time - inter_timepoint_constraint.mean + return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) - self.add_constraint(0, node_id, 0, self.max_makespan) + @staticmethod + def get_next_timepoint_constraint(constraint_name, prev_timepoint_constraint, inter_timepoint_constraint): + r_earliest_time = prev_timepoint_constraint.r_earliest_time + inter_timepoint_constraint.mean + r_latest_time = prev_timepoint_constraint.r_latest_time + inter_timepoint_constraint.mean + return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) - def get_time(self, task_id, node_type='navigation', lower_bound=True): + def get_time(self, task_id, node_type='start', lower_bound=True): _time = None for i, data in self.nodes.data(): @@ -479,10 +440,10 @@ def get_task_id(self, position): Returns: (string) task id """ - navigation_node = 2 * position + (position-2) + start_node = 2 * position + (position-2) - if self.has_node(navigation_node): - task_id = self.nodes[navigation_node]['data'].task_id + if self.has_node(start_node): + task_id = self.nodes[start_node]['data'].task_id else: self.logger.error("There is no task in position %s", position) return @@ -491,7 +452,7 @@ def get_task_id(self, position): def get_task_position(self, task_id): for i, data in self.nodes.data(): - if task_id == data['data'].task_id and data['data'].node_type == 'navigation': + if task_id == data['data'].task_id and data['data'].node_type == 'start': return i def get_earliest_task_id(self): diff --git a/stn/stnu/stnu.py b/stn/stnu/stnu.py index 97b70b2..9d3341e 100644 --- a/stn/stnu/stnu.py +++ b/stn/stnu/stnu.py @@ -1,6 +1,7 @@ from stn.stn import STN from json import JSONEncoder import logging +from stn.task import TimepointConstraint class MyEncoder(JSONEncoder): @@ -70,22 +71,6 @@ def add_constraint(self, i, j, wji=0.0, wij=float('inf'), is_contingent=False): self.add_edge(j, i, is_contingent=is_contingent) - def timepoint_hard_constraints(self, node_id, task, node_type): - """ Adds the earliest and latest times to execute a timepoint (node) - Navigation timepoint [0, inf] - Start timepoint [earliest_start_time, latest_start_time] - Finish timepoint [0, inf] - """ - - if node_type == "navigation": - self.add_constraint(0, node_id, task.r_earliest_navigation_start_time) - - if node_type == "start": - self.add_constraint(0, node_id, task.r_earliest_start_time, task.r_latest_start_time) - - elif node_type == "finish": - self.add_constraint(0, node_id) - def get_contingent_constraints(self): """ Returns a dictionary with the contingent constraints in the STNU {(starting_node, ending_node): self[i][j] } @@ -131,56 +116,74 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.nodes[i]['data'].node_type == "navigation": - lower_bound, upper_bound = self.get_navigation_bounded_duration(i, j) + if self.nodes[i]['data'].node_type == "start": + lower_bound, upper_bound = self.get_travel_time_bounded_duration(task) self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) - elif self.nodes[i]['data'].node_type == "start": - lower_bound, upper_bound = self.get_task_bounded_duration(task) + elif self.nodes[i]['data'].node_type == "pickup": + lower_bound, upper_bound = self.get_work_time_bounded_duration(task) self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) - elif self.nodes[i]['data'].node_type == "finish": + elif self.nodes[i]['data'].node_type == "delivery": # wait time between finish of one task and start of the next one. Fixed to [0, inf] self.add_constraint(i, j, 0) - def get_navigation_bounded_duration(self, source, destination): - """ Reads from the database the probability distribution for navigating from source to destination and converts it to a bounded interval + @staticmethod + def get_travel_time_bounded_duration(task): + """ Returns the estimated travel time as a bounded interval [mu - 2*sigma, mu + 2*sigma] as in: Shyan Akmal, Savana Ammons, Hemeng Li, and James Boerkoel Jr. Quantifying Degrees of Controllability in Temporal Networks with Uncertainty. In Proceedings of the 29th International Conference on Automated Planning and Scheduling, ICAPS 2019, 07 2019. """ - # TODO: Read estimated distribution from database - distribution = "N_1_1" - name_split = distribution.split("_") - # mean - mu = float(name_split[1]) - # standard deviation - sigma = float(name_split[2]) - - lower_bound = mu - 2*sigma - upper_bound = mu + 2*sigma + travel_time = task.get_inter_timepoint_constraint("travel_time") + lower_bound = travel_time.mean - 2*travel_time.standard_dev + upper_bound = travel_time.mean + 2*travel_time.standard_dev return lower_bound, upper_bound - def get_task_bounded_duration(self, task): - """ Reads from the database the estimated distribution of the task - In the case of transportation tasks, the estimated distribution is the navigation time from the pickup to the delivery location - Converts the estimated distribution to a bounded interval + @staticmethod + def get_work_time_bounded_duration(task): + """ Returns the estimated work time as a bounded interval [mu - 2*sigma, mu + 2*sigma] as in: Shyan Akmal, Savana Ammons, Hemeng Li, and James Boerkoel Jr. Quantifying Degrees of Controllability in Temporal Networks with Uncertainty. In Proceedings of the 29th International Conference on Automated Planning and Scheduling, ICAPS 2019, 07 2019. """ - # TODO: Read estimated distribution from database - distribution = "N_4_1" - name_split = distribution.split("_") - # mean - mu = float(name_split[1]) - # standard deviation - sigma = float(name_split[2]) - - lower_bound = mu - 2*sigma - upper_bound = mu + 2*sigma + work_time = task.get_inter_timepoint_constraint("work_time") + lower_bound = work_time.mean - 2*work_time.standard_dev + upper_bound = work_time.mean + 2*work_time.standard_dev return lower_bound, upper_bound + + @staticmethod + def get_prev_timepoint_constraint(constraint_name, next_timepoint_constraint, inter_timepoint_constraint): + r_earliest_time = next_timepoint_constraint.r_earliest_time - \ + (inter_timepoint_constraint.mean - 2*inter_timepoint_constraint.standard_dev) + r_latest_time = next_timepoint_constraint.r_latest_time - \ + (inter_timepoint_constraint.mean + 2*inter_timepoint_constraint.standard_dev) + + return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) + + @staticmethod + def get_next_timepoint_constraint(constraint_name, prev_timepoint_constraint, inter_timepoint_constraint): + r_earliest_time = prev_timepoint_constraint.r_earliest_time + \ + (inter_timepoint_constraint.mean - 2*inter_timepoint_constraint.standard_dev) + r_latest_time = prev_timepoint_constraint.r_latest_time + \ + (inter_timepoint_constraint.mean + 2*inter_timepoint_constraint.standard_dev) + + return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) + + @staticmethod + def create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time): + start_constraint = TimepointConstraint(name="start", + r_earliest_time=r_earliest_pickup - (travel_time.mean - 2*work_time.standard_dev), + r_latest_time=r_latest_pickup - (travel_time.mean + 2*work_time.standard_dev)) + pickup_constraint = TimepointConstraint(name="pickup", + r_earliest_time=r_earliest_pickup, + r_latest_time=r_latest_pickup) + delivery_constraint = TimepointConstraint(name="delivery", + r_earliest_time=r_earliest_pickup + work_time.mean - 2*work_time.standard_dev, + r_latest_time=r_latest_pickup + work_time.mean - 2*work_time.standard_dev) + return [start_constraint, pickup_constraint, delivery_constraint] + diff --git a/stn/task.py b/stn/task.py index 836ad56..ba3e9de 100644 --- a/stn/task.py +++ b/stn/task.py @@ -1,28 +1,85 @@ -class STNTask(object): - def __init__(self, task_id, - r_earliest_navigation_start_time, - r_earliest_start_time, - r_latest_start_time, - start_pose_name, - finish_pose_name, - **kwargs): +import numpy as np + + +class InterTimepointConstraint: + def __init__(self, name, mean, variance, **kwargs): + self.name = name + self.mean = round(mean, 3) + self.variance = round(variance, 3) + self.standard_dev = round(variance ** 0.5, 3) + + def __str__(self): + to_print = "" + to_print += "{}: N({}, {})".format(self.name, self.mean, self.standard_dev) + return to_print + + +class TimepointConstraint: + """ + r_earliest_time (float): earliest time relative to a ztp + r_latest_time (float): latest time relative to a ztp + + """ + def __init__(self, name, r_earliest_time, r_latest_time, **kwargs): + self.name = name + self.r_earliest_time = round(r_earliest_time, 3) + self.r_latest_time = round(r_latest_time, 3) + + def __str__(self): + to_print = "" + to_print += "{}: [{}, {}]".format(self.name, self.r_earliest_time, self.r_latest_time) + return to_print + + +class Task(object): + def __init__(self, task_id, timepoint_constraints, inter_timepoint_constraints): """ Constructor for the Task object Args: - id (UUID): An instance of an UUID object - r_earliest_navigation_start_time (float): earliest navigation start time relative to the ztp - r_earliest_start_time (float): earliest start time relative to the ztp - r_latest_start_time (float): latest start time relative to the ztp - start_pose_name (str): Name of the location where the robot should execute the task - finish_pose_name (str): Name of the location where the robot must terminate task execution + task_id (UUID): An instance of an UUID object + timepoint_constraints (list): list of timepoint constraints (TimepointConstraint) + inter_timepoint_constraints (list): list of inter timepoint constraints (InterTimepointConstraint) hard_constraints (bool): False if the task can be scheduled ASAP, True if the task is not flexible. Defaults to True """ self.task_id = task_id - self.r_earliest_navigation_start_time = round(r_earliest_navigation_start_time, 2) - self.r_earliest_start_time = round(r_earliest_start_time, 2) - self.r_latest_start_time = round(r_latest_start_time, 2) - self.start_pose_name = start_pose_name - self.finish_pose_name = finish_pose_name - self.hard_constraints = kwargs.get('hard_constraints', True) + self.timepoint_constraints = list() + self.inter_timepoint_constraints = list() + + for constraint in timepoint_constraints: + self.timepoint_constraints.append(constraint) + for constraint in inter_timepoint_constraints: + self.inter_timepoint_constraints.append(constraint) + + def __str__(self): + to_print = "" + to_print += "{} \n".format(self.task_id) + to_print += "TimepointConstraints: \n" + for constraint in self.timepoint_constraints: + to_print += str(constraint) + "\t" + to_print += "\n InterTimepointConstraints\n" + for constraint in self.inter_timepoint_constraints: + to_print += str(constraint) + "\t" + return to_print + + def get_timepoint_constraint(self, constraint_name): + return [constraint for constraint in self.timepoint_constraints + if constraint.name == constraint_name].pop() + + def get_inter_timepoint_constraint(self, constraint_name): + return [constraint for constraint in self.inter_timepoint_constraints + if constraint.name == constraint_name].pop() + + def update_timepoint_constraint(self, constraint_name, r_earliest_time, r_latest_time=np.inf): + in_list = False + for constraint in self.timepoint_constraints: + if constraint.name == constraint_name: + in_list = True + constraint.r_earliest_time = r_earliest_time + constraint.r_latest_time = r_latest_time + if not in_list: + self.timepoint_constraints.append(TimepointConstraint(constraint_name, + r_earliest_time, + r_latest_time)) + diff --git a/stn/utils/utils.py b/stn/utils/utils.py index e865b7b..b5e68cb 100644 --- a/stn/utils/utils.py +++ b/stn/utils/utils.py @@ -1,9 +1,33 @@ import logging.config import yaml +from stn.task import Task, InterTimepointConstraint def config_logger(logging_file): with open(logging_file) as f: log_config = yaml.safe_load(f) - logging.config.dictConfig(log_config) \ No newline at end of file + logging.config.dictConfig(log_config) + + +def load_yaml(file): + """ Reads a yaml file and returns a dictionary with its contents + + :param file: file to load + :return: data as dict() + """ + with open(file, 'r') as file: + data = yaml.safe_load(file) + return data + + +def create_task(stn, task_dict): + task_id = task_dict.get("task_id") + r_earliest_pickup = task_dict.get("earliest_pickup") + r_latest_pickup = task_dict.get("latest_pickup") + travel_time = InterTimepointConstraint(**task_dict.get("travel_time")) + work_time = InterTimepointConstraint(**task_dict.get("work_time")) + timepoint_constraints = stn.create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time) + inter_timepoint_constraints = [travel_time, work_time] + + return Task(task_id, timepoint_constraints, inter_timepoint_constraints) diff --git a/test/data/tasks.yaml b/test/data/tasks.yaml new file mode 100644 index 0000000..8ec79ca --- /dev/null +++ b/test/data/tasks.yaml @@ -0,0 +1,36 @@ +0616af00-ec3b-4ecd-ae62-c94a3703594c: + task_id: 0616af00-ec3b-4ecd-ae62-c94a3703594c + earliest_pickup: 10 + latest_pickup: 20 + travel_time: + name: "travel_time" + mean: 5 + variance: 0.2 + work_time: + name: "work_time" + mean: 10 + variance: 0.2 +207cc8da-2f0e-4538-802b-b8f3954df38d: + task_id: 207cc8da-2f0e-4538-802b-b8f3954df38d + earliest_pickup: 40 + latest_pickup: 50 + travel_time: + name: "travel_time" + mean: 5 + variance: 0.2 + work_time: + name: "work_time" + mean: 10 + variance: 0.2 +0d06fb90-a76d-48b4-b64f-857b7388ab70: + task_id: 0d06fb90-a76d-48b4-b64f-857b7388ab70 + earliest_pickup: 70 + latest_pickup: 80 + travel_time: + name: "travel_time" + mean: 5 + variance: 0.2 + work_time: + name: "work_time" + mean: 10 + variance: 0.2 diff --git a/test/update_pstn.py b/test/update_pstn.py index 0d2c11d..4c48eaf 100644 --- a/test/update_pstn.py +++ b/test/update_pstn.py @@ -1,47 +1,20 @@ +import os from stn.pstn.pstn import PSTN import unittest from stn.utils.uuid import from_str - - -class Task(object): - - def __init__(self): - self.task_id = '' - self.earliest_start_time = -1 - self.latest_start_time = -1 - self.start_pose_name = '' - self.finish_pose_name = '' - self.hard_constraints = True +from stn.utils.utils import load_yaml, create_task class UpdatePSTN(unittest.TestCase): def setUp(self): - task_1 = Task() - task_1.task_id = from_str("0616af00-ec3b-4ecd-ae62-c94a3703594c") - task_1.r_earliest_navigation_start_time = 0.0 - task_1.r_earliest_start_time = 96.0 - task_1.r_latest_start_time = 102.0 - task_1.start_pose_name = "AMK_TDU-TGR-1_X_14.03_Y_9.55" - task_1.finish_pose_name = "AMK_TDU-TGR-1_X_15.09_Y_5.69" - - task_2 = Task() - task_2.task_id = from_str("207cc8da-2f0e-4538-802b-b8f3954df38d") - task_2.r_earliest_navigation_start_time = 0.0 - task_2.r_earliest_start_time = 71.0 - task_2.r_latest_start_time = 76.0 - task_2.start_pose_name = "AMK_TDU-TGR-1_X_7.15_Y_10.55" - task_2.finish_pose_name = "AMK_TDU-TGR-1_X_6.67_Y_14.52" - - task_3 = Task() - task_3.task_id = from_str("0d06fb90-a76d-48b4-b64f-857b7388ab70") - task_3.r_earliest_navigation_start_time = 0.0 - task_3.r_earliest_start_time = 41.0 - task_3.r_latest_start_time = 47.0 - task_3.start_pose_name = "AMK_TDU-TGR-1_X_9.7_Y_5.6" - task_3.finish_pose_name = "AMK_TDU-TGR-1_X_5.82_Y_6.57" - - self.tasks = [task_1, task_2, task_3] + code_dir = os.path.abspath(os.path.dirname(__file__)) + tasks_dict = load_yaml(code_dir + "/data/tasks.yaml") + self.tasks = list() + for task_dict in tasks_dict.values(): + task = create_task(PSTN(), task_dict) + print(task) + self.tasks.append(task) def test_add_tasks_consecutively(self): """ Adds tasks in consecutive positions. Example diff --git a/test/update_stn.py b/test/update_stn.py index f0fcd1a..8b77752 100644 --- a/test/update_stn.py +++ b/test/update_stn.py @@ -1,47 +1,20 @@ -from stn.stn import STN +import os import unittest -from stn.utils.uuid import from_str - - -class Task(object): - def __init__(self): - self.task_id = '' - self.earliest_start_time = -1 - self.latest_start_time = -1 - self.start_pose_name = '' - self.finish_pose_name = '' - self.hard_constraints = True +from stn.stn import STN +from stn.utils.utils import load_yaml, create_task class UpdateSTN(unittest.TestCase): def setUp(self): - task_1 = Task() - task_1.task_id = from_str("0616af00-ec3b-4ecd-ae62-c94a3703594c") - task_1.r_earliest_navigation_start_time = 0.0 - task_1.r_earliest_start_time = 96.0 - task_1.r_latest_start_time = 102.0 - task_1.start_pose_name = "AMK_TDU-TGR-1_X_14.03_Y_9.55" - task_1.finish_pose_name = "AMK_TDU-TGR-1_X_15.09_Y_5.69" - - task_2 = Task() - task_2.task_id = from_str("207cc8da-2f0e-4538-802b-b8f3954df38d") - task_2.r_earliest_navigation_start_time = 0.0 - task_2.r_earliest_start_time = 71.0 - task_2.r_latest_start_time = 76.0 - task_2.start_pose_name = "AMK_TDU-TGR-1_X_7.15_Y_10.55" - task_2.finish_pose_name = "AMK_TDU-TGR-1_X_6.67_Y_14.52" - - task_3 = Task() - task_3.task_id = from_str("0d06fb90-a76d-48b4-b64f-857b7388ab70") - task_3.r_earliest_navigation_start_time = 0.0 - task_3.r_earliest_start_time = 41.0 - task_3.r_latest_start_time = 47.0 - task_3.start_pose_name = "AMK_TDU-TGR-1_X_9.7_Y_5.6" - task_3.finish_pose_name = "AMK_TDU-TGR-1_X_5.82_Y_6.57" - - self.tasks = [task_1, task_2, task_3] + code_dir = os.path.abspath(os.path.dirname(__file__)) + tasks_dict = load_yaml(code_dir + "/data/tasks.yaml") + self.tasks = list() + for task_dict in tasks_dict.values(): + task = create_task(STN(), task_dict) + print(task) + self.tasks.append(task) def test_add_tasks_consecutively(self): """ Adds tasks in consecutive positions. Example @@ -63,7 +36,7 @@ def test_add_tasks_consecutively(self): self.assertEqual(n_nodes, stn.number_of_nodes()) self.assertEqual(n_edges, stn.number_of_edges()) - def test_add_task_beggining(self): + def test_add_task_beginning(self): """Adds task at the beginning. Displaces the other tasks """ print("--->Adding task at the beginning...") @@ -119,6 +92,8 @@ def test_remove_task_beginning(self): for i, task in enumerate(self.tasks): stn.add_task(task, i+1) + print(stn) + # Remove task in position 1 stn.remove_task(1) @@ -163,7 +138,6 @@ def test_remove_task_end(self): # Add all tasks for i, task in enumerate(self.tasks): stn.add_task(task, i+1) - print(stn) print(stn) # Remove task in position 3 @@ -191,7 +165,7 @@ def test_add_two_tasks(self): print(stn) stn_json = stn.to_json() - print("JSON format", stn_json) + # print("JSON format", stn_json) if __name__ == '__main__': diff --git a/test/update_stnu.py b/test/update_stnu.py index a812a1a..879cf39 100644 --- a/test/update_stnu.py +++ b/test/update_stnu.py @@ -1,47 +1,20 @@ -from stn.stnu.stnu import STNU +import os import unittest -from stn.utils.uuid import from_str - - -class Task(object): - def __init__(self): - self.task_id = '' - self.earliest_start_time = -1 - self.latest_start_time = -1 - self.start_pose_name = '' - self.finish_pose_name = '' - self.hard_constraints = True +from stn.stnu.stnu import STNU +from stn.utils.utils import load_yaml, create_task class UpdateSTNU(unittest.TestCase): def setUp(self): - task_1 = Task() - task_1.task_id = from_str("0616af00-ec3b-4ecd-ae62-c94a3703594c") - task_1.r_earliest_navigation_start_time = 0.0 - task_1.r_earliest_start_time = 96.0 - task_1.r_latest_start_time = 102.0 - task_1.start_pose_name = "AMK_TDU-TGR-1_X_14.03_Y_9.55" - task_1.finish_pose_name = "AMK_TDU-TGR-1_X_15.09_Y_5.69" - - task_2 = Task() - task_2.task_id = from_str("207cc8da-2f0e-4538-802b-b8f3954df38d") - task_2.r_earliest_navigation_start_time = 0.0 - task_2.r_earliest_start_time = 71.0 - task_2.r_latest_start_time = 76.0 - task_2.start_pose_name = "AMK_TDU-TGR-1_X_7.15_Y_10.55" - task_2.finish_pose_name = "AMK_TDU-TGR-1_X_6.67_Y_14.52" - - task_3 = Task() - task_3.task_id = from_str("0d06fb90-a76d-48b4-b64f-857b7388ab70") - task_3.r_earliest_navigation_start_time = 0.0 - task_3.r_earliest_start_time = 41.0 - task_3.r_latest_start_time = 47.0 - task_3.start_pose_name = "AMK_TDU-TGR-1_X_9.7_Y_5.6" - task_3.finish_pose_name = "AMK_TDU-TGR-1_X_5.82_Y_6.57" - - self.tasks = [task_1, task_2, task_3] + code_dir = os.path.abspath(os.path.dirname(__file__)) + tasks_dict = load_yaml(code_dir + "/data/tasks.yaml") + self.tasks = list() + for task_dict in tasks_dict.values(): + task = create_task(STNU(), task_dict) + print(task) + self.tasks.append(task) def test_add_tasks_consecutively(self): """ Adds tasks in consecutive positions. Example @@ -119,6 +92,8 @@ def test_remove_task_beginning(self): for i, task in enumerate(self.tasks): stnu.add_task(task, i+1) + print(stnu) + # Remove task in position 1 stnu.remove_task(1) @@ -163,7 +138,6 @@ def test_remove_task_end(self): # Add all tasks for i, task in enumerate(self.tasks): stnu.add_task(task, i+1) - print(stnu) print(stnu) # Remove task in position 3 @@ -191,7 +165,7 @@ def test_add_two_tasks(self): print(stnu) stnu_json = stnu.to_json() - print("JSON format ", stnu_json) + # print("JSON format ", stnu_json) if __name__ == '__main__': From 7bbb1e331d0adec00a5a9cee469cbbe88a36032e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Wed, 18 Dec 2019 09:11:33 +0100 Subject: [PATCH 19/44] Remove commented code --- stn/pstn/pstn.py | 16 ---------------- stn/stn.py | 5 ++++- 2 files changed, 4 insertions(+), 17 deletions(-) diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index 42d0118..766663b 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -96,22 +96,6 @@ def add_constraint(self, i, j, wji=0.0, wij=float('inf'), distribution=""): self.add_edge(j, i, distribution=distribution) self.add_edge(j, i, is_contingent=is_contingent) - # def timepoint_hard_constraints(self, node_id, task, node_type): - # """ Adds the earliest and latest times to execute a timepoint (node) - # Navigation timepoint [0, inf] - # Start timepoint [earliest_start_time, latest_start_time] - # Finish timepoint [0, inf] - # """ - # - # if node_type == "navigation": - # self.add_constraint(0, node_id, task.r_earliest_navigation_start_time) - # - # if node_type == "start": - # self.add_constraint(0, node_id, task.r_earliest_start_time, task.r_latest_start_time) - # - # elif node_type == "finish": - # self.add_constraint(0, node_id) - # def get_contingent_constraints(self): """ Returns a dictionary with the contingent constraints in the PSTN {(starting_node, ending_node): Constraint (object)} diff --git a/stn/stn.py b/stn/stn.py index 5c55a54..ad1de9d 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -176,7 +176,7 @@ def add_task(self, task, position=1): self.logger.debug("New constraints between nodes: %s", new_constraints_between) constraints = [((i), (i + 1)) for i in new_constraints_between[:-1]] - print("Constraints: %s", constraints) + self.logger.debug("Constraints: %s", constraints) self.add_intertimepoints_constraints(constraints, task) @@ -450,6 +450,9 @@ def get_task_id(self, position): return task_id + def get_pickup_constraint(self, task_id): + task_position = self.get_task_position(task_id) + def get_task_position(self, task_id): for i, data in self.nodes.data(): if task_id == data['data'].task_id and data['data'].node_type == 'start': From 0ff9b89c4d9d9b13bf1e7dbf6c385cf0bbd58938 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Sat, 4 Jan 2020 08:00:20 +0100 Subject: [PATCH 20/44] stn: Fix get_task_position to return task position and not node position --- stn/stn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stn/stn.py b/stn/stn.py index ad1de9d..a1f4afd 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -456,7 +456,7 @@ def get_pickup_constraint(self, task_id): def get_task_position(self, task_id): for i, data in self.nodes.data(): if task_id == data['data'].task_id and data['data'].node_type == 'start': - return i + return math.ceil(i/3) def get_earliest_task_id(self): """ Returns the id of the earliest task in the stn From a9e142a0b00189dcf770ccd3d2bebbc98a651c62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Thu, 9 Jan 2020 15:38:32 +0100 Subject: [PATCH 21/44] Add node attr is_executed Add methods to mark a timepoint and an edge as executed Add method to remove executed timepoints which do not have an influence on the remaning timepoints Add method to get node_idx Add method to remove consecutive node_ids from idx 1 onwards --- stn/node.py | 17 +++++--- stn/pstn/pstn.py | 13 +++--- stn/stn.py | 82 ++++++++++++++++++++++++++++++++--- stn/stnu/stnu.py | 7 ++- test/data/pstn_two_tasks.json | 66 ++++++++++++++++++---------- test/data/stn_two_tasks.json | 66 ++++++++++++++++++---------- test/data/stnu_two_tasks.json | 66 ++++++++++++++++++---------- 7 files changed, 233 insertions(+), 84 deletions(-) diff --git a/stn/node.py b/stn/node.py index a3fffb9..d7ebbaa 100644 --- a/stn/node.py +++ b/stn/node.py @@ -4,35 +4,41 @@ class Node(object): """Represents a timepoint in the STN """ - def __init__(self, task_id, node_type): + def __init__(self, task_id, node_type, is_executed=False): # id of the task represented by this node if isinstance(task_id, str): task_id = from_str(task_id) self.task_id = task_id # The node can be of node_type zero_timepoint, start, pickup or delivery self.node_type = node_type + self.is_executed = is_executed def __str__(self): to_print = "" - to_print += "node {} {}".format(self.task_id, self.node_type) + to_print += "{} {}".format(self.task_id, self.node_type) return to_print def __repr__(self): return str(self.to_dict()) def __hash__(self): - return hash((self.task_id, self.node_type)) + return hash((self.task_id, self.node_type, self.is_executed)) def __eq__(self, other): if other is None: return False return (self.task_id == other.task_id and - self.node_type == other.node_type) + self.node_type == other.node_type and + self.is_executed == other.is_executed) + + def execute(self): + self.is_executed = True def to_dict(self): node_dict = dict() node_dict['task_id'] = str(self.task_id) node_dict['node_type'] = self.node_type + node_dict['is_executed'] = self.is_executed return node_dict @staticmethod @@ -41,5 +47,6 @@ def from_dict(node_dict): if isinstance(task_id, str): task_id = from_str(task_id) node_type = node_dict['node_type'] - node = Node(task_id, node_type) + is_executed = node_dict.get('is_executed', False) + node = Node(task_id, node_type, is_executed) return node diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index 766663b..f7e4a95 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -53,13 +53,19 @@ def __str__(self): lower_bound = -self[j][i]['weight'] upper_bound = self[i][j]['weight'] to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound) + if timepoint.is_executed: + to_print += " Ex" # Constraints between the other timepoints else: if 'is_contingent' in self[j][i]: to_print += "Constraint {} => {}: [{}, {}] ({})".format(i, j, -self[j][i]['weight'], self[i][j]['weight'], self[i][j]['distribution']) + if self[i][j]['is_executed']: + to_print += " Ex" else: to_print += "Constraint {} => {}: [{}, {}]".format(i, j, -self[j][i]['weight'], self[i][j]['weight']) + if self[i][j]['is_executed']: + to_print += " Ex" to_print += "\n" @@ -90,11 +96,8 @@ def add_constraint(self, i, j, wji=0.0, wij=float('inf'), distribution=""): super().add_constraint(i, j, wji, wij) - self.add_edge(i, j, distribution=distribution) - self.add_edge(i, j, is_contingent=is_contingent) - - self.add_edge(j, i, distribution=distribution) - self.add_edge(j, i, is_contingent=is_contingent) + self.add_edge(i, j, distribution=distribution, is_contingent=is_contingent) + self.add_edge(j, i, distribution=distribution, is_contingent=is_contingent) def get_contingent_constraints(self): """ Returns a dictionary with the contingent constraints in the PSTN diff --git a/stn/stn.py b/stn/stn.py index a1f4afd..e02f678 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -46,9 +46,13 @@ def __str__(self): lower_bound = -self[j][i]['weight'] upper_bound = self[i][j]['weight'] to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound) + if timepoint.is_executed: + to_print += " Ex" # Constraints between the other timepoints else: to_print += "Constraint {} => {}: [{}, {}]".format(i, j, -self[j][i]['weight'], self[i][j]['weight']) + if self[i][j]['is_executed']: + to_print += " Ex" to_print += "\n" @@ -80,8 +84,8 @@ def add_constraint(self, i, j, wji=0.0, wij=float('inf')): # Maximum allocated time between i and j max_time = wij - self.add_edge(j, i, weight=min_time) - self.add_edge(i, j, weight=max_time) + self.add_edge(j, i, weight=min_time, is_executed=False) + self.add_edge(i, j, weight=max_time, is_executed=False) def remove_constraint(self, i, j): """ i : starting node id @@ -276,6 +280,18 @@ def remove_task(self, position=1): # wait time between finish of one task and start of the next one self.add_constraint(i, j) + def remove_node_ids(self, node_ids): + # Assumes that the node_ids are in consecutive order from node_id 1 onwards + for node_id in node_ids: + self.remove_node(node_id) + + # Displace all remaining nodes by 3 + mapping = {} + for node_id, data in self.nodes(data=True): + if node_id > 0: + mapping[node_id] = node_id - 3 + nx.relabel_nodes(self, mapping, copy=False) + def get_tasks(self): """ Gets the tasks (in order) @@ -284,9 +300,8 @@ def get_tasks(self): """ tasks = list() for i in self.nodes(): - if self.nodes[i]['data'].node_type == "start": + if self.nodes[i]['data'].task_id not in tasks and self.nodes[i]['data'].node_type != 'zero_timepoint': tasks.append(self.nodes[i]['data'].task_id) - return tasks def is_consistent(self, shortest_path_array): @@ -450,9 +465,6 @@ def get_task_id(self, position): return task_id - def get_pickup_constraint(self, task_id): - task_position = self.get_task_position(task_id) - def get_task_position(self, task_id): for i, data in self.nodes.data(): if task_id == data['data'].task_id and data['data'].node_type == 'start': @@ -488,6 +500,12 @@ def get_task_node_ids(self, task_id): return node_ids + def get_task_graph(self, task_id): + node_ids = self.get_task_node_ids(task_id) + node_ids.insert(0, 0) + task_graph = self.subgraph(node_ids) + return task_graph + def get_subgraph(self, n_tasks): """ Returns a subgraph of the stn that includes the nodes of the first n_tasks and the zero timepoint @@ -509,6 +527,56 @@ def get_subgraph(self, n_tasks): sub_graph = self.subgraph(node_ids) return sub_graph + def execute_timepoint(self, task_id, node_type): + for i in self.nodes(): + node_data = self.nodes[i]['data'] + if node_data.task_id == task_id and node_data.node_type == node_type: + node_data.is_executed = True + + def execute_edge(self, node_1, node_2): + nx.set_edge_attributes(self, {(node_1, node_2): {'is_executed': True}, + (node_2, node_1): {'is_executed': True}}) + + def execute_incoming_edge(self, task_id, node_type): + finish_node_idx = self.get_edge_node_idx(task_id, node_type) + if node_type == "start": + return + elif node_type == "pickup": + start_node_idx = self.get_edge_node_idx(task_id, "start") + elif node_type == "delivery": + start_node_idx = self.get_edge_node_idx(task_id, "pickup") + self.execute_edge(start_node_idx, finish_node_idx) + + def remove_old_timepoints(self): + nodes_to_remove = list() + for i in self.nodes(): + node_data = self.nodes[i]['data'] + if not node_data.is_executed: + continue + if node_data.is_executed and (self.has_edge(i, i+1) and self[i][i+1]['is_executed']): + nodes_to_remove.append(i) + elif node_data.is_executed and not self.has_edge(i, i+1): + nodes_to_remove.append(i) + + for node in nodes_to_remove: + self.remove_node(node) + + def get_edge_node_idx(self, task_id, node_type): + for i in self.nodes(): + node_data = self.nodes[i]['data'] + if node_data.task_id == task_id and node_data.node_type == node_type: + return i + + def get_edge_nodes_idx(self, task_id, node_type_1, node_type_2): + for i in self.nodes(): + node_data = self.nodes[i]['data'] + if node_data.task_id == task_id and node_data.node_type == node_type_1: + start_node_idx = i + elif node_data.task_id == task_id and node_data.node_type == node_type_2: + finish_node_idx = i + + return start_node_idx, finish_node_idx + def to_json(self): stn_dict = self.to_dict() MyEncoder().encode(stn_dict) diff --git a/stn/stnu/stnu.py b/stn/stnu/stnu.py index 9d3341e..5daa1aa 100644 --- a/stn/stnu/stnu.py +++ b/stn/stnu/stnu.py @@ -27,13 +27,19 @@ def __str__(self): lower_bound = -self[j][i]['weight'] upper_bound = self[i][j]['weight'] to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound) + if timepoint.is_executed: + to_print += " Ex" # Constraints between the other timepoints else: if self[j][i]['is_contingent'] is True: to_print += "Constraint {} => {}: [{}, {}] (contingent)".format(i, j, -self[j][i]['weight'], self[i][j]['weight']) + if self[i][j]['is_executed']: + to_print += " Ex" else: to_print += "Constraint {} => {}: [{}, {}]".format(i, j, -self[j][i]['weight'], self[i][j]['weight']) + if self[i][j]['is_executed']: + to_print += " Ex" to_print += "\n" @@ -68,7 +74,6 @@ def add_constraint(self, i, j, wji=0.0, wij=float('inf'), is_contingent=False): super().add_constraint(i, j, wji, wij) self.add_edge(i, j, is_contingent=is_contingent) - self.add_edge(j, i, is_contingent=is_contingent) def get_contingent_constraints(self): diff --git a/test/data/pstn_two_tasks.json b/test/data/pstn_two_tasks.json index ac1e50c..51efca8 100644 --- a/test/data/pstn_two_tasks.json +++ b/test/data/pstn_two_tasks.json @@ -5,154 +5,176 @@ "target":1, "weight":Infinity, "source":0, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"", "target":2, "weight":47.0, "source":0, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"", "target":3, "weight":Infinity, "source":0, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"", "target":4, "weight":Infinity, "source":0, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"", "target":5, "weight":102.0, "source":0, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"", "target":6, "weight":Infinity, "source":0, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"", "target":0, "weight":-0.0, "source":1, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"N_6_1", "target":2, "weight":Infinity, "source":1, - "is_contingent":true + "is_contingent":true, + "is_executed": false }, { "distribution":"", "target":0, "weight":-41.0, "source":2, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"N_6_1", "target":1, "weight":-0.0, "source":2, - "is_contingent":true + "is_contingent":true, + "is_executed": false }, { "distribution":"N_4_1", "target":3, "weight":Infinity, "source":2, - "is_contingent":true + "is_contingent":true, + "is_executed": false }, { "distribution":"", "target":0, "weight":-0.0, "source":3, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"N_4_1", "target":2, "weight":-0.0, "source":3, - "is_contingent":true + "is_contingent":true, + "is_executed": false }, { "distribution":"", "target":4, "weight":Infinity, "source":3, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"", "target":0, "weight":-0.0, "source":4, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"", "target":3, "weight":-0.0, "source":4, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"N_6_1", "target":5, "weight":Infinity, "source":4, - "is_contingent":true + "is_contingent":true, + "is_executed": false }, { "distribution":"", "target":0, "weight":-96.0, "source":5, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"N_6_1", "target":4, "weight":-0.0, "source":5, - "is_contingent":true + "is_contingent":true, + "is_executed": false }, { "distribution":"N_4_1", "target":6, "weight":Infinity, "source":5, - "is_contingent":true + "is_contingent":true, + "is_executed": false }, { "distribution":"", "target":0, "weight":-0.0, "source":6, - "is_contingent":false + "is_contingent":false, + "is_executed": false }, { "distribution":"N_4_1", "target":5, "weight":-0.0, "source":6, - "is_contingent":true + "is_contingent":true, + "is_executed": false } ], "graph":{ diff --git a/test/data/stn_two_tasks.json b/test/data/stn_two_tasks.json index bf9daa7..fe4319e 100644 --- a/test/data/stn_two_tasks.json +++ b/test/data/stn_two_tasks.json @@ -3,112 +3,134 @@ { "source":0, "target":1, - "weight":41.0 + "weight":41.0, + "is_executed": false }, { "source":0, "target":2, - "weight":47.0 + "weight":47.0, + "is_executed": false }, { "source":0, "target":3, - "weight":51.0 + "weight":51.0, + "is_executed": false }, { "source":0, "target":4, - "weight":96.0 + "weight":96.0, + "is_executed": false }, { "source":0, "target":5, - "weight":102.0 + "weight":102.0, + "is_executed": false }, { "source":0, "target":6, - "weight":106.0 + "weight":106.0, + "is_executed": false }, { "source":1, "target":0, - "weight":-35.0 + "weight":-35.0, + "is_executed": false }, { "source":1, "target":2, - "weight":Infinity + "weight":Infinity, + "is_executed": false }, { "source":2, "target":0, - "weight":-41.0 + "weight":-41.0, + "is_executed": false }, { "source":2, "target":1, - "weight":-6.0 + "weight":-6.0, + "is_executed": false }, { "source":2, "target":3, - "weight":Infinity + "weight":Infinity, + "is_executed": false }, { "source":3, "target":0, - "weight":-45.0 + "weight":-45.0, + "is_executed": false }, { "source":3, "target":2, - "weight":-4.0 + "weight":-4.0, + "is_executed": false }, { "source":3, "target":4, - "weight":Infinity + "weight":Infinity, + "is_executed": false }, { "source":4, "target":0, - "weight":-90.0 + "weight":-90.0, + "is_executed": false }, { "source":4, "target":3, - "weight":-0.0 + "weight":-0.0, + "is_executed": false }, { "source":4, "target":5, - "weight":Infinity + "weight":Infinity, + "is_executed": false }, { "source":5, "target":0, - "weight":-96.0 + "weight":-96.0, + "is_executed": false }, { "source":5, "target":4, - "weight":-6.0 + "weight":-6.0, + "is_executed": false }, { "source":5, "target":6, - "weight":Infinity + "weight":Infinity, + "is_executed": false }, { "source":6, "target":0, - "weight":-100.0 + "weight":-100.0, + "is_executed": false }, { "source":6, "target":5, - "weight":-4.0 + "weight":-4.0, + "is_executed": false } ], "graph":{ diff --git a/test/data/stnu_two_tasks.json b/test/data/stnu_two_tasks.json index 9d659f5..c887ef6 100644 --- a/test/data/stnu_two_tasks.json +++ b/test/data/stnu_two_tasks.json @@ -63,133 +63,155 @@ "target":1, "is_contingent":false, "weight":Infinity, - "source":0 + "source":0, + "is_executed": false }, { "target":2, "is_contingent":false, "weight":47.0, - "source":0 + "source":0, + "is_executed": false }, { "target":3, "is_contingent":false, "weight":Infinity, - "source":0 + "source":0, + "is_executed": false }, { "target":4, "is_contingent":false, "weight":Infinity, - "source":0 + "source":0, + "is_executed": false }, { "target":5, "is_contingent":false, "weight":102.0, - "source":0 + "source":0, + "is_executed": false }, { "target":6, "is_contingent":false, "weight":Infinity, - "source":0 + "source":0, + "is_executed": false }, { "target":0, "is_contingent":false, "weight":-0.0, - "source":1 + "source":1, + "is_executed": false }, { "target":2, "is_contingent":true, "weight":8.0, - "source":1 + "source":1, + "is_executed": false }, { "target":0, "is_contingent":false, "weight":-41.0, - "source":2 + "source":2, + "is_executed": false }, { "target":1, "is_contingent":true, "weight":-4.0, - "source":2 + "source":2, + "is_executed": false }, { "target":3, "is_contingent":true, "weight":6.0, - "source":2 + "source":2, + "is_executed": false }, { "target":0, "is_contingent":false, "weight":-0.0, - "source":3 + "source":3, + "is_executed": false }, { "target":2, "is_contingent":true, "weight":-2.0, - "source":3 + "source":3, + "is_executed": false }, { "target":4, "is_contingent":false, "weight":Infinity, - "source":3 + "source":3, + "is_executed": false }, { "target":0, "is_contingent":false, "weight":-0.0, - "source":4 + "source":4, + "is_executed": false }, { "target":3, "is_contingent":false, "weight":0, - "source":4 + "source":4, + "is_executed": false }, { "target":5, "is_contingent":true, "weight":8.0, - "source":4 + "source":4, + "is_executed": false }, { "target":0, "is_contingent":false, "weight":-96.0, - "source":5 + "source":5, + "is_executed": false }, { "target":4, "is_contingent":true, "weight":-4.0, - "source":5 + "source":5, + "is_executed": false }, { "target":6, "is_contingent":true, "weight":6.0, - "source":5 + "source":5, + "is_executed": false }, { "target":0, "is_contingent":false, "weight":-0.0, - "source":6 + "source":6, + "is_executed": false }, { "target":5, "is_contingent":true, "weight":-2.0, - "source":6 + "source":6, + "is_executed": false } ], "graph":{ From 6cf2699bd9dc85b274bbb373b59ae96b30928906 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Sat, 18 Jan 2020 14:35:40 +0100 Subject: [PATCH 22/44] Add opt arg force to assign_timepoint get_task_id(position): If a task does not have start node, return the pickup or the delivery node --- stn/stn.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/stn/stn.py b/stn/stn.py index e02f678..3f9b309 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -320,7 +320,7 @@ def update_edges(self, shortest_path_array, create=False): for n in nodes: self.update_edge_weight(column, n, shortest_path_array[column][n]) - def update_edge_weight(self, i, j, weight, create=False): + def update_edge_weight(self, i, j, weight, force=False): """ Updates the weight of the edge between node starting_node and node ending_node Updates the weight if the new weight is less than the previous weight @@ -340,7 +340,10 @@ def update_edge_weight(self, i, j, weight, create=False): if weight < self[i][j]['weight']: self[i][j]['weight'] = weight - def assign_timepoint(self, allotted_time, task_id, node_type): + if force: + self[i][j]['weight'] = weight + + def assign_timepoint(self, allotted_time, task_id, node_type, force=False): """ Assigns the allotted time to the earliest and latest time of the timepoint of task_id of type node_type @@ -353,8 +356,8 @@ def assign_timepoint(self, allotted_time, task_id, node_type): for i in self.nodes(): node_data = self.nodes[i]['data'] if node_data.task_id == task_id and node_data.node_type == node_type: - self.update_edge_weight(0, i, allotted_time) - self.update_edge_weight(i, 0, -allotted_time) + self.update_edge_weight(0, i, allotted_time, force) + self.update_edge_weight(i, 0, -allotted_time, force) break def get_edge_weight(self, i, j): @@ -455,10 +458,16 @@ def get_task_id(self, position): Returns: (string) task id """ - start_node = 2 * position + (position-2) + start_node_id = 2 * position + (position-2) + pickup_node_id = start_node_id + 1 + delivery_node_id = pickup_node_id + 1 - if self.has_node(start_node): - task_id = self.nodes[start_node]['data'].task_id + if self.has_node(start_node_id): + task_id = self.nodes[start_node_id]['data'].task_id + elif self.has_node(pickup_node_id): + task_id = self.nodes[pickup_node_id]['data'].task_id + elif self.has_node(delivery_node_id): + task_id = self.nodes[delivery_node_id]['data'].task_id else: self.logger.error("There is no task in position %s", position) return From ba7beb64f17ebd5690fc7e9e5e9e319f3571c306 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Mon, 27 Jan 2020 14:11:43 +0100 Subject: [PATCH 23/44] Add AsDictMixin to convert Task from an to a dict --- stn/stn.py | 25 ++++++++++++++++++++++ stn/task.py | 31 ++++++++++++++++++++++++--- stn/utils/as_dict.py | 50 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 103 insertions(+), 3 deletions(-) create mode 100644 stn/utils/as_dict.py diff --git a/stn/stn.py b/stn/stn.py index 3f9b309..f5fab61 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -245,6 +245,31 @@ def show_n_nodes_edges(self): self.logger.info("Nodes: %s ", self.number_of_nodes()) self.logger.info("Edges: %s ", self.number_of_edges()) + def update_task(self, task): + position = self.get_task_position(task.task_id) + start_node_id = 2 * position + (position-2) + pickup_node_id = start_node_id + 1 + delivery_node_id = pickup_node_id + 1 + + # Adding an existing timepoint constraint updates the constraint + self.add_timepoint_constraint(start_node_id, task.get_timepoint_constraint("start")) + self.add_timepoint_constraint(pickup_node_id, task.get_timepoint_constraint("pickup")) + self.add_timepoint_constraint(delivery_node_id, task.get_timepoint_constraint("delivery")) + + # Add constraints between new nodes + new_constraints_between = [start_node_id, pickup_node_id, delivery_node_id] + + # Check if there is a node after the new delivery node + if self.has_node(delivery_node_id+1): + new_constraints_between.append(delivery_node_id+1) + + # Check if there is a node before the new start node + if self.has_node(start_node_id-1): + new_constraints_between.insert(0, start_node_id-1) + + constraints = [((i), (i + 1)) for i in new_constraints_between[:-1]] + self.add_intertimepoints_constraints(constraints, task) + def remove_task(self, position=1): """ Removes the task from the given position""" diff --git a/stn/task.py b/stn/task.py index ba3e9de..4117f76 100644 --- a/stn/task.py +++ b/stn/task.py @@ -1,7 +1,8 @@ import numpy as np +from stn.utils.as_dict import AsDictMixin -class InterTimepointConstraint: +class InterTimepointConstraint(AsDictMixin): def __init__(self, name, mean, variance, **kwargs): self.name = name self.mean = round(mean, 3) @@ -14,7 +15,7 @@ def __str__(self): return to_print -class TimepointConstraint: +class TimepointConstraint(AsDictMixin): """ r_earliest_time (float): earliest time relative to a ztp r_latest_time (float): latest time relative to a ztp @@ -31,7 +32,7 @@ def __str__(self): return to_print -class Task(object): +class Task(AsDictMixin): def __init__(self, task_id, timepoint_constraints, inter_timepoint_constraints): """ Constructor for the Task object @@ -83,3 +84,27 @@ def update_timepoint_constraint(self, constraint_name, r_earliest_time, r_latest r_earliest_time, r_latest_time)) + def to_dict(self): + dict_repr = super().to_dict() + timepoint_constraints = list() + inter_timepoint_constraints = list() + for c in self.timepoint_constraints: + timepoint_constraints.append(c.to_dict()) + for c in self.inter_timepoint_constraints: + inter_timepoint_constraints.append(c.to_dict()) + dict_repr.update(timepoint_constraints=timepoint_constraints) + dict_repr.update(inter_timepoint_constraints=inter_timepoint_constraints) + return dict_repr + + @classmethod + def to_attrs(cls, dict_repr): + attrs = super().to_attrs(dict_repr) + timepoint_constraints = list() + inter_timepoint_constraints = list() + for c in attrs.get("timepoint_constraints"): + timepoint_constraints.append(TimepointConstraint.from_dict(c)) + for c in attrs.get("inter_timepoint_constraints"): + inter_timepoint_constraints.append(InterTimepointConstraint.from_dict(c)) + attrs.update(timepoint_constraints=timepoint_constraints) + attrs.update(inter_timepoint_constraints=inter_timepoint_constraints) + return attrs diff --git a/stn/utils/as_dict.py b/stn/utils/as_dict.py new file mode 100644 index 0000000..ac1eef5 --- /dev/null +++ b/stn/utils/as_dict.py @@ -0,0 +1,50 @@ +""" Adapted from: +https://realpython.com/inheritance-composition-python/#mixing-features-with-mixin-classes +""" +import uuid +from stn.utils.uuid import from_str + + +class AsDictMixin: + + def to_dict(self): + return { + prop: self._represent(value) + for prop, value in self.__dict__.items() + if not self.is_internal(prop) + } + + @classmethod + def _represent(cls, value): + if isinstance(value, object): + if hasattr(value, 'to_dict'): + return value.to_dict() + elif isinstance(value, uuid.UUID): + return str(value) + else: + return value + else: + return value + + @staticmethod + def is_internal(prop): + return prop.startswith('_') + + @classmethod + def from_dict(cls, dict_repr): + attrs = cls.to_attrs(dict_repr) + return cls(**attrs) + + @classmethod + def to_attrs(cls, dict_repr): + attrs = dict() + for key, value in dict_repr.items(): + attrs[key] = cls._get_value(key, value) + return attrs + + @classmethod + def _get_value(cls, key, value): + if key == 'task_id': + return from_str(value) + else: + return value From 1f04549607e48a3e4d5c97e9525fd36a49f6c6ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Mon, 3 Feb 2020 10:27:24 +0100 Subject: [PATCH 24/44] stn: Do not remove last executed node --- stn/stn.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/stn/stn.py b/stn/stn.py index f5fab61..da7c9aa 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -589,8 +589,6 @@ def remove_old_timepoints(self): continue if node_data.is_executed and (self.has_edge(i, i+1) and self[i][i+1]['is_executed']): nodes_to_remove.append(i) - elif node_data.is_executed and not self.has_edge(i, i+1): - nodes_to_remove.append(i) for node in nodes_to_remove: self.remove_node(node) From e4c496d86ba3e92eff80cde0f6ad4790244f62f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Tue, 4 Feb 2020 14:59:23 +0100 Subject: [PATCH 25/44] stn: Increment tolerance to evaluate a number equal to 0.0 --- stn/stn.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/stn/stn.py b/stn/stn.py index da7c9aa..56b6aa3 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -333,7 +333,9 @@ def is_consistent(self, shortest_path_array): """The STN is not consistent if it has negative cycles""" consistent = True for node, nodes in shortest_path_array.items(): - if not math.isclose(nodes[node], 0.0, abs_tol=1e-09): + # Check if the tolerance is too large. Maybe it is better to use + # only integers and change the resolution to seconds + if not math.isclose(nodes[node], 0.0, abs_tol=1e-01): consistent = False return consistent From 6f4111bd894cb63c66c124d68a5f9e425d4ac6fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Wed, 5 Feb 2020 09:11:51 +0100 Subject: [PATCH 26/44] Fix contingent timepoints and edges for stnu If a contigent edge's upper bound is equal to its lower bound, make it a requirement edge Fix get contingent timepoints. Do not use node list (it might be out of index), append an integer instead --- stn/config/config.py | 4 ++-- stn/methods/dsc_lp.py | 3 ++- stn/stnu/stnu.py | 8 +++++--- test/test_dsc.py | 2 +- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/stn/config/config.py b/stn/config/config.py index 5902b94..8ccaa29 100644 --- a/stn/config/config.py +++ b/stn/config/config.py @@ -147,10 +147,10 @@ def fpc_algorithm(stn): stn_factory = STNFactory() stn_factory.register_stn('fpc', STN) stn_factory.register_stn('srea', PSTN) -stn_factory.register_stn('dsc_lp', STNU) +stn_factory.register_stn('dsc', STNU) stp_solver_factory = STPSolverFactory() stp_solver_factory.register_solver('fpc', FullPathConsistency) stp_solver_factory.register_solver('srea', StaticRobustExecution) stp_solver_factory.register_solver('drea', StaticRobustExecution) -stp_solver_factory.register_solver('dsc_lp', DegreeStongControllability) \ No newline at end of file +stp_solver_factory.register_solver('dsc', DegreeStongControllability) \ No newline at end of file diff --git a/stn/methods/dsc_lp.py b/stn/methods/dsc_lp.py index 3bf0d38..638b982 100644 --- a/stn/methods/dsc_lp.py +++ b/stn/methods/dsc_lp.py @@ -22,6 +22,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +import copy import pulp import sys @@ -43,7 +44,7 @@ class DSC_LP(object): logger = logging.getLogger('stn.dsc_lp') def __init__(self, stnu): - self.stnu = stnu + self.stnu = copy.deepcopy(stnu) self.constraints = stnu.get_constraints() self.contingent_constraints = stnu.get_contingent_constraints() self.contingent_timepoints = stnu.get_contingent_timepoints() diff --git a/stn/stnu/stnu.py b/stn/stnu/stnu.py index 5daa1aa..3fb5aad 100644 --- a/stn/stnu/stnu.py +++ b/stn/stnu/stnu.py @@ -91,12 +91,11 @@ def get_contingent_constraints(self): def get_contingent_timepoints(self): """ Returns a list with the contingent (uncontrollable) timepoints in the STNU """ - timepoints = list(self.nodes) contingent_timepoints = list() for (i, j, data) in self.edges.data(): if self[i][j]['is_contingent'] is True and i < j: - contingent_timepoints.append(timepoints[j]) + contingent_timepoints.append(j) return contingent_timepoints @@ -123,7 +122,10 @@ def add_intertimepoints_constraints(self, constraints, task): self.logger.debug("Adding constraint: %s ", (i, j)) if self.nodes[i]['data'].node_type == "start": lower_bound, upper_bound = self.get_travel_time_bounded_duration(task) - self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) + if lower_bound == upper_bound: + self.add_constraint(i, j, 0, 0) + else: + self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) elif self.nodes[i]['data'].node_type == "pickup": lower_bound, upper_bound = self.get_work_time_bounded_duration(task) diff --git a/test/test_dsc.py b/test/test_dsc.py index 2d04e92..6c7f19d 100644 --- a/test/test_dsc.py +++ b/test/test_dsc.py @@ -29,7 +29,7 @@ def setUp(self): # Convert the dict to a json string stnu_json = json.dumps(stnu_dict) - self.stp = STP('dsc_lp') + self.stp = STP('dsc') self.stn = self.stp.get_stn(stn_json=stnu_json) def test_build_stn(self): From 33be94e1f2129aa9736af70bcaef585334c3a302 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Thu, 6 Feb 2020 13:27:17 +0100 Subject: [PATCH 27/44] stn: Add methods __eq__, __ne__ --- stn/node.py | 3 +++ stn/stn.py | 21 +++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/stn/node.py b/stn/node.py index d7ebbaa..fea2c5f 100644 --- a/stn/node.py +++ b/stn/node.py @@ -31,6 +31,9 @@ def __eq__(self, other): self.node_type == other.node_type and self.is_executed == other.is_executed) + def __ne__(self, other): + return not self.__eq__(other) + def execute(self): self.is_executed = True diff --git a/stn/stn.py b/stn/stn.py index 56b6aa3..4ff780e 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -58,6 +58,27 @@ def __str__(self): return to_print + def __eq__(self, other): + if other is None: + return False + if len(other.nodes()) != len(self.nodes()): + return False + for (i, j, data) in self.edges.data(): + if other.has_edge(i, j): + if other[i][j]['weight'] != self[i][j]['weight']: + return False + else: + return False + if other.has_node(i): + if other.nodes[i]['data'] != self.nodes[i]['data'] : + return False + else: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + def add_zero_timepoint(self): node = Node(generate_uuid(), 'zero_timepoint') self.add_node(0, data=node) From 4cd09edc602bacdddbddf83c773fe9be00826ebb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Mon, 10 Feb 2020 15:50:02 +0100 Subject: [PATCH 28/44] stn: Fix get_idle_time Idle time is the time between the delivery of last task and the start of current task (not pickup of current task) --- stn/stn.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/stn/stn.py b/stn/stn.py index 4ff780e..8fc5889 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -460,9 +460,9 @@ def get_idle_time(self): for i, task_id in enumerate(task_ids): if i > 0: - r_earliest_finish_time_previous_task = self.get_time(task_ids[i-1], "delivery") - r_earliest_start_time = self.get_time(task_ids[i], "pickup") - idle_time += round(r_earliest_start_time - r_earliest_finish_time_previous_task) + r_earliest_delivery_time_previous_task = self.get_time(task_ids[i-1], "delivery") + r_earliest_start_time = self.get_time(task_ids[i], "start") + idle_time += round(r_earliest_start_time - r_earliest_delivery_time_previous_task) return idle_time def add_timepoint_constraint(self, node_id, timepoint_constraint): From 51b60bec845e0028cb6cc0e9e4ec919ae2d36e96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Mon, 17 Feb 2020 08:03:18 +0100 Subject: [PATCH 29/44] pstn: Make a constraint with stdev 0 a requirement constraint --- stn/pstn/pstn.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index f7e4a95..d91b85d 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -129,8 +129,10 @@ def add_intertimepoints_constraints(self, constraints, task): self.logger.debug("Adding constraint: %s ", (i, j)) if self.nodes[i]['data'].node_type == "start": distribution = self.get_travel_time_distribution(task) - if distribution == "N_0.0_0.0": - self.add_constraint(i, j, 0, 0) + if distribution.endswith("_0.0"): # the distribution has no variation (stdev is 0) + # Make the constraint a requirement constraint + mean = float(distribution.split("_")[1]) + self.add_constraint(i, j, mean, 0) else: self.add_constraint(i, j, distribution=distribution) From bd21110416c90bbd46eae1dee93afb2ee157b0fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Fri, 21 Feb 2020 12:12:02 +0100 Subject: [PATCH 30/44] stn: Add method to assign earliest time to a timepoint --- stn/stn.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/stn/stn.py b/stn/stn.py index 8fc5889..bf80c2c 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -408,6 +408,13 @@ def assign_timepoint(self, allotted_time, task_id, node_type, force=False): self.update_edge_weight(i, 0, -allotted_time, force) break + def assign_earliest_time(self, time_, task_id, node_type, force=False): + for i in self.nodes(): + node_data = self.nodes[i]['data'] + if node_data.task_id == task_id and node_data.node_type == node_type: + self.update_edge_weight(i, 0, -time_, force) + break + def get_edge_weight(self, i, j): """ Returns the weight of the edge between node starting_node and node ending_node :param i: starting_node_id From 711bb7565c63d0344265cf19ec7b0ee1decf8c27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Fri, 21 Feb 2020 12:13:35 +0100 Subject: [PATCH 31/44] pstn: Earliest start time of prev_timepoint_constraint is zero --- stn/pstn/pstn.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index d91b85d..9a5087a 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -158,8 +158,7 @@ def get_work_time_distribution(task): @staticmethod def get_prev_timepoint_constraint(constraint_name, next_timepoint_constraint, inter_timepoint_constraint): - r_earliest_time = next_timepoint_constraint.r_earliest_time - \ - (inter_timepoint_constraint.mean - 2*inter_timepoint_constraint.standard_dev) + r_earliest_time = 0 r_latest_time = np.inf return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) From 1f2986069ef0d5ed109a29aa71347114e0b58871 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Thu, 27 Feb 2020 12:50:45 +0100 Subject: [PATCH 32/44] stn: Fix get_completion_time The completion time is the sum of the finish times of all tasks in the stn, not the diff between the first and the last task --- stn/stn.py | 26 ++++++++++---------------- test/data/pstn_two_tasks.json | 12 ++++++------ test/data/stn_two_tasks.json | 12 ++++++------ test/data/stnu_two_tasks.json | 12 ++++++------ test/test_dsc.py | 2 +- test/test_fpc.py | 2 +- test/test_srea.py | 2 +- 7 files changed, 31 insertions(+), 37 deletions(-) diff --git a/stn/stn.py b/stn/stn.py index bf80c2c..4cb1098 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -34,7 +34,6 @@ def __init__(self): self.add_zero_timepoint() self.max_makespan = MAX_FLOAT self.risk_metric = None - self.temporal_metric = None def __str__(self): to_print = "" @@ -430,27 +429,22 @@ def get_edge_weight(self, i, j): def compute_temporal_metric(self, temporal_criterion): if temporal_criterion == 'completion_time': - self.temporal_metric = self.get_completion_time() + temporal_metric = self.get_completion_time() elif temporal_criterion == 'makespan': - self.temporal_metric = self.get_makespan() + temporal_metric = self.get_makespan() elif temporal_criterion == 'idle_time': - self.temporal_metric = self.get_idle_time() + temporal_metric = self.get_idle_time() else: raise ValueError(temporal_criterion) + return temporal_metric def get_completion_time(self): - nodes = list(self.nodes()) - node_first_task = nodes[1] - node_last_task = nodes[-1] - - start_time_lower_bound = -self[node_first_task][0]['weight'] - - finish_time_lower_bound = -self[node_last_task][0]['weight'] - - self.logger.debug("Start time: %s", start_time_lower_bound) - self.logger.debug("Finish time: %s", finish_time_lower_bound) - - completion_time = finish_time_lower_bound - start_time_lower_bound + completion_time = 0 + task_ids = self.get_tasks() + for i, task_id in enumerate(task_ids): + print(task_id) + print(self.get_time(task_id, "delivery", lower_bound=False)) + completion_time += self.get_time(task_id, "delivery", lower_bound=False) return completion_time diff --git a/test/data/pstn_two_tasks.json b/test/data/pstn_two_tasks.json index 51efca8..06a5f14 100644 --- a/test/data/pstn_two_tasks.json +++ b/test/data/pstn_two_tasks.json @@ -195,7 +195,7 @@ "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"", - "node_type":"navigation" + "node_type":"start" } }, { @@ -203,7 +203,7 @@ "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"AMK_TDU-TGR-1_X_9.7_Y_5.6", - "node_type":"start" + "node_type":"pickup" } }, { @@ -211,7 +211,7 @@ "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"AMK_TDU-TGR-1_X_5.82_Y_6.57", - "node_type":"finish" + "node_type":"delivery" } }, { @@ -219,7 +219,7 @@ "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"", - "node_type":"navigation" + "node_type":"start" } }, { @@ -227,7 +227,7 @@ "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"AMK_TDU-TGR-1_X_14.03_Y_9.55", - "node_type":"start" + "node_type":"pickup" } }, { @@ -235,7 +235,7 @@ "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"AMK_TDU-TGR-1_X_15.09_Y_5.69", - "node_type":"finish" + "node_type":"delivery" } } ], diff --git a/test/data/stn_two_tasks.json b/test/data/stn_two_tasks.json index fe4319e..e0d0baa 100644 --- a/test/data/stn_two_tasks.json +++ b/test/data/stn_two_tasks.json @@ -150,7 +150,7 @@ { "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", - "node_type":"navigation", + "node_type":"start", "pose":"" }, "id":1 @@ -158,7 +158,7 @@ { "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", - "node_type":"start", + "node_type":"pickup", "pose":"AMK_TDU-TGR-1_X_9.7_Y_5.6" }, "id":2 @@ -166,7 +166,7 @@ { "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", - "node_type":"finish", + "node_type":"delivery", "pose":"AMK_TDU-TGR-1_X_5.82_Y_6.57" }, "id":3 @@ -174,7 +174,7 @@ { "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", - "node_type":"navigation", + "node_type":"start", "pose":"" }, "id":4 @@ -182,7 +182,7 @@ { "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", - "node_type":"start", + "node_type":"pickup", "pose":"AMK_TDU-TGR-1_X_14.03_Y_9.55" }, "id":5 @@ -190,7 +190,7 @@ { "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", - "node_type":"finish", + "node_type":"delivery", "pose":"AMK_TDU-TGR-1_X_15.09_Y_5.69" }, "id":6 diff --git a/test/data/stnu_two_tasks.json b/test/data/stnu_two_tasks.json index c887ef6..2cc571b 100644 --- a/test/data/stnu_two_tasks.json +++ b/test/data/stnu_two_tasks.json @@ -10,7 +10,7 @@ }, { "data":{ - "node_type":"navigation", + "node_type":"start", "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"" }, @@ -18,7 +18,7 @@ }, { "data":{ - "node_type":"start", + "node_type":"pickup", "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"AMK_TDU-TGR-1_X_9.7_Y_5.6" }, @@ -26,7 +26,7 @@ }, { "data":{ - "node_type":"finish", + "node_type":"delivery", "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"AMK_TDU-TGR-1_X_5.82_Y_6.57" }, @@ -34,7 +34,7 @@ }, { "data":{ - "node_type":"navigation", + "node_type":"start", "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"" }, @@ -42,7 +42,7 @@ }, { "data":{ - "node_type":"start", + "node_type":"pickup", "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"AMK_TDU-TGR-1_X_14.03_Y_9.55" }, @@ -50,7 +50,7 @@ }, { "data":{ - "node_type":"finish", + "node_type":"delivery", "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"AMK_TDU-TGR-1_X_15.09_Y_5.69" }, diff --git a/test/test_dsc.py b/test/test_dsc.py index 6c7f19d..74bb9c4 100644 --- a/test/test_dsc.py +++ b/test/test_dsc.py @@ -47,7 +47,7 @@ def test_build_stn(self): self.logger.info("Completion time: %s ", completion_time) self.logger.info("Makespan: %s ", makespan) - self.assertEqual(completion_time, 61) + self.assertEqual(completion_time, 157) self.assertEqual(makespan, 98) expected_risk_metric = 0.0 diff --git a/test/test_fpc.py b/test/test_fpc.py index 725c9c5..95b8dea 100644 --- a/test/test_fpc.py +++ b/test/test_fpc.py @@ -43,7 +43,7 @@ def test_build_stn(self): self.logger.info("Completion time: %s ", completion_time) self.logger.info("Makespan: %s ", makespan) - self.assertEqual(completion_time, 65) + self.assertEqual(completion_time, 157) self.assertEqual(makespan, 100) constraints = minimal_network.get_constraints() diff --git a/test/test_srea.py b/test/test_srea.py index 10ccdb5..8f8d8e0 100644 --- a/test/test_srea.py +++ b/test/test_srea.py @@ -48,7 +48,7 @@ def test_build_stn(self): self.logger.info("Completion time: %s ", completion_time) self.logger.info("Makespan: %s ", makespan) - self.assertEqual(completion_time, 60) + self.assertEqual(completion_time, 163) self.assertEqual(makespan, 97) expected_risk_metric = 0.0 From 67b045c4c9e719c35687b121379aadcc85dcd1f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Thu, 27 Feb 2020 12:51:06 +0100 Subject: [PATCH 33/44] stnu: Fix boundaries returned by get_prev_timepoint_constraint --- stn/stnu/stnu.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stn/stnu/stnu.py b/stn/stnu/stnu.py index 3fb5aad..b2184ec 100644 --- a/stn/stnu/stnu.py +++ b/stn/stnu/stnu.py @@ -166,9 +166,9 @@ def get_work_time_bounded_duration(task): @staticmethod def get_prev_timepoint_constraint(constraint_name, next_timepoint_constraint, inter_timepoint_constraint): r_earliest_time = next_timepoint_constraint.r_earliest_time - \ - (inter_timepoint_constraint.mean - 2*inter_timepoint_constraint.standard_dev) - r_latest_time = next_timepoint_constraint.r_latest_time - \ (inter_timepoint_constraint.mean + 2*inter_timepoint_constraint.standard_dev) + r_latest_time = next_timepoint_constraint.r_latest_time - \ + (inter_timepoint_constraint.mean - 2*inter_timepoint_constraint.standard_dev) return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) From 22b420ef1d5dca4c6b9d38d2f90094ab3d72f1a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Thu, 27 Feb 2020 12:51:28 +0100 Subject: [PATCH 34/44] task: Add method to add and subtract inter_timepoint_constraints --- stn/task.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/stn/task.py b/stn/task.py index 4117f76..e24f0cf 100644 --- a/stn/task.py +++ b/stn/task.py @@ -14,6 +14,18 @@ def __str__(self): to_print += "{}: N({}, {})".format(self.name, self.mean, self.standard_dev) return to_print + def __sub__(self, other): + # Difference of two independent random variables + mean = self.mean - other.mean + variance = self.variance + other.variance + return mean, variance + + def __add__(self, other): + # Addition of two independent random variables + mean = self.mean + other.mean + variance = self.variance + other.variance + return mean, variance + class TimepointConstraint(AsDictMixin): """ From bea61b3c92a4df29517b3e95ac28028182d8cf0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Thu, 27 Feb 2020 12:55:48 +0100 Subject: [PATCH 35/44] stn: Remove print statements --- stn/stn.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/stn/stn.py b/stn/stn.py index 4cb1098..10789dd 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -442,8 +442,6 @@ def get_completion_time(self): completion_time = 0 task_ids = self.get_tasks() for i, task_id in enumerate(task_ids): - print(task_id) - print(self.get_time(task_id, "delivery", lower_bound=False)) completion_time += self.get_time(task_id, "delivery", lower_bound=False) return completion_time From 57b8bc58dc6b681df2ba3bfa5a16e23ce1a4f559 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Sun, 15 Mar 2020 12:49:30 +0100 Subject: [PATCH 36/44] pstn: Fix requirement constraint upper and lower bound is equal to the mean --- stn/pstn/pstn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index 9a5087a..7a037a8 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -132,7 +132,7 @@ def add_intertimepoints_constraints(self, constraints, task): if distribution.endswith("_0.0"): # the distribution has no variation (stdev is 0) # Make the constraint a requirement constraint mean = float(distribution.split("_")[1]) - self.add_constraint(i, j, mean, 0) + self.add_constraint(i, j, mean, mean) else: self.add_constraint(i, j, distribution=distribution) From b69d22898f2ee9eb72fcab0e34e9718d6b569f30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Sun, 15 Mar 2020 12:49:51 +0100 Subject: [PATCH 37/44] task: Add method to update intertimepont constraint --- stn/task.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/stn/task.py b/stn/task.py index e24f0cf..f1ab1b3 100644 --- a/stn/task.py +++ b/stn/task.py @@ -96,6 +96,18 @@ def update_timepoint_constraint(self, constraint_name, r_earliest_time, r_latest r_earliest_time, r_latest_time)) + def update_inter_timepoint_constraint(self, name, mean, variance): + in_list = False + for constraint in self.inter_timepoint_constraints: + if constraint.name == name: + in_list = True + constraint.mean = round(mean, 3) + constraint.variance = round(variance, 3) + constraint.standard_dev = round(variance ** 0.5, 3) + if not in_list: + self.inter_timepoint_constraints.append(InterTimepointConstraint(name=name, mean=mean, + variance=variance)) + def to_dict(self): dict_repr = super().to_dict() timepoint_constraints = list() From bafd47d53d9afcb6c65d3a37090f575ba4d3d3d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Fri, 20 Mar 2020 15:12:42 +0100 Subject: [PATCH 38/44] Use float(inf) instead of np.inf --- stn/pstn/pstn.py | 14 +++++++------- stn/task.py | 3 +-- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index 7a037a8..6c280ff 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -22,12 +22,12 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +import logging +from json import JSONEncoder + from stn.pstn.constraint import Constraint from stn.stn import STN -from json import JSONEncoder -import logging from stn.task import TimepointConstraint -import numpy as np class MyEncoder(JSONEncoder): @@ -159,24 +159,24 @@ def get_work_time_distribution(task): @staticmethod def get_prev_timepoint_constraint(constraint_name, next_timepoint_constraint, inter_timepoint_constraint): r_earliest_time = 0 - r_latest_time = np.inf + r_latest_time = float('inf') return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) @staticmethod def get_next_timepoint_constraint(constraint_name, prev_timepoint_constraint, inter_timepoint_constraint): r_earliest_time = 0 - r_latest_time = np.inf + r_latest_time = float('inf') return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) @staticmethod def create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time): start_constraint = TimepointConstraint(name="start", r_earliest_time=r_earliest_pickup - (travel_time.mean - 2*work_time.standard_dev), - r_latest_time=np.inf) + r_latest_time=float('inf')) pickup_constraint = TimepointConstraint(name="pickup", r_earliest_time=r_earliest_pickup, r_latest_time=r_latest_pickup) delivery_constraint = TimepointConstraint(name="delivery", r_earliest_time= 0, - r_latest_time=np.inf) + r_latest_time=float('inf')) return [start_constraint, pickup_constraint, delivery_constraint] diff --git a/stn/task.py b/stn/task.py index f1ab1b3..eb1678e 100644 --- a/stn/task.py +++ b/stn/task.py @@ -1,4 +1,3 @@ -import numpy as np from stn.utils.as_dict import AsDictMixin @@ -84,7 +83,7 @@ def get_inter_timepoint_constraint(self, constraint_name): return [constraint for constraint in self.inter_timepoint_constraints if constraint.name == constraint_name].pop() - def update_timepoint_constraint(self, constraint_name, r_earliest_time, r_latest_time=np.inf): + def update_timepoint_constraint(self, constraint_name, r_earliest_time, r_latest_time=float('inf')): in_list = False for constraint in self.timepoint_constraints: if constraint.name == constraint_name: From 6d5823ad888fa54e8943dfddd0a1e4a3e81f4fcf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Mon, 23 Mar 2020 09:29:25 +0100 Subject: [PATCH 39/44] stn: Add methods get_task_nodes and get_task_graph --- stn/stn.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/stn/stn.py b/stn/stn.py index 10789dd..e1de434 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -540,6 +540,22 @@ def get_earliest_task_id(self): self.logger.debug("STN has no tasks yet") + def get_task_nodes(self, task_id): + """ Gets the nodes in the stn associated with the given task_id + + Args: + task_id: (string) id of the task + + Returns: list of node ids + + """ + nodes = list() + for i in self.nodes(): + if task_id == self.nodes[i]['data'].task_id: + nodes.append(self.nodes[i]['data']) + + return nodes + def get_task_node_ids(self, task_id): """ Gets the node_ids in the stn associated with the given task_id @@ -583,6 +599,21 @@ def get_subgraph(self, n_tasks): sub_graph = self.subgraph(node_ids) return sub_graph + def get_task_graph(self, task_id): + """ Returns a graph with the nodes of the task_id + + Args: + task_id: ID of the task + + Returns: nx graph with nodes of task_id + + """ + node_ids = self.get_task_node_ids(task_id) + # The first node in the subgraph is the zero timepoint + node_ids.insert(0, 0) + sub_graph = self.subgraph(node_ids) + return sub_graph + def execute_timepoint(self, task_id, node_type): for i in self.nodes(): node_data = self.nodes[i]['data'] From c1bf42753021db97ea0be9f94085aad04040bcc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Tue, 24 Mar 2020 16:59:53 +0100 Subject: [PATCH 40/44] stn: Add methods get_earliest_time, get_latest_time, is_empty --- stn/stn.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/stn/stn.py b/stn/stn.py index e1de434..b7bc843 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -82,6 +82,19 @@ def add_zero_timepoint(self): node = Node(generate_uuid(), 'zero_timepoint') self.add_node(0, data=node) + def get_earliest_time(self): + edges = [e for e in self.edges] + first_edge = edges[0] + return -self[first_edge[1]][0]['weight'] + + def get_latest_time(self): + edges = [e for e in self.edges] + last_edge = edges[-1] + return self[0][last_edge[0]]['weight'] + + def is_empty(self): + return nx.is_empty(self) + def add_constraint(self, i, j, wji=0.0, wij=float('inf')): """ Adds constraint between nodes i and j From f8b7c46f55b0e503905081d143fcf82c84e16262 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Thu, 2 Apr 2020 15:46:37 +0200 Subject: [PATCH 41/44] Add action_id to node Add pickup and delivery actions to task Refactor task (rename attrs and methods) Add methods based on node_id --- stn/node.py | 12 +++- stn/pstn/pstn.py | 27 +++------ stn/stn.py | 138 ++++++++++++++++++++++++------------------- stn/stnu/stnu.py | 44 +++++--------- stn/task.py | 114 ++++++++++++++++++----------------- stn/utils/as_dict.py | 2 +- stn/utils/utils.py | 11 ++-- 7 files changed, 173 insertions(+), 175 deletions(-) diff --git a/stn/node.py b/stn/node.py index fea2c5f..590c4d7 100644 --- a/stn/node.py +++ b/stn/node.py @@ -4,7 +4,7 @@ class Node(object): """Represents a timepoint in the STN """ - def __init__(self, task_id, node_type, is_executed=False): + def __init__(self, task_id, node_type, is_executed=False, **kwargs): # id of the task represented by this node if isinstance(task_id, str): task_id = from_str(task_id) @@ -12,10 +12,11 @@ def __init__(self, task_id, node_type, is_executed=False): # The node can be of node_type zero_timepoint, start, pickup or delivery self.node_type = node_type self.is_executed = is_executed + self.action_id = kwargs.get("action_id") def __str__(self): to_print = "" - to_print += "{} {}".format(self.task_id, self.node_type) + to_print += "{} {} ".format(self.task_id, self.node_type) return to_print def __repr__(self): @@ -29,7 +30,8 @@ def __eq__(self, other): return False return (self.task_id == other.task_id and self.node_type == other.node_type and - self.is_executed == other.is_executed) + self.is_executed == other.is_executed and + self.action_id == other.action_id) def __ne__(self, other): return not self.__eq__(other) @@ -42,6 +44,8 @@ def to_dict(self): node_dict['task_id'] = str(self.task_id) node_dict['node_type'] = self.node_type node_dict['is_executed'] = self.is_executed + if self.action_id: + node_dict['action_id'] = str(self.action_id) return node_dict @staticmethod @@ -52,4 +56,6 @@ def from_dict(node_dict): node_type = node_dict['node_type'] is_executed = node_dict.get('is_executed', False) node = Node(task_id, node_type, is_executed) + if node_dict.get('action_id'): + node.action_id = from_str(node_dict['action_id']) return node diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index 6c280ff..8a06a6a 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -27,7 +27,7 @@ from stn.pstn.constraint import Constraint from stn.stn import STN -from stn.task import TimepointConstraint +from stn.task import Timepoint class MyEncoder(JSONEncoder): @@ -146,37 +146,24 @@ def add_intertimepoints_constraints(self, constraints, task): @staticmethod def get_travel_time_distribution(task): - travel_time = task.get_inter_timepoint_constraint("travel_time") + travel_time = task.get_edge("travel_time") travel_time_distribution = "N_" + str(travel_time.mean) + "_" + str(travel_time.standard_dev) return travel_time_distribution @staticmethod def get_work_time_distribution(task): - work_time = task.get_inter_timepoint_constraint("work_time") + work_time = task.get_edge("work_time") work_time_distribution = "N_" + str(work_time.mean) + "_" + str(work_time.standard_dev) return work_time_distribution @staticmethod - def get_prev_timepoint_constraint(constraint_name, next_timepoint_constraint, inter_timepoint_constraint): + def get_prev_timepoint(timepoint_name, next_timepoint, edge_in_between): r_earliest_time = 0 r_latest_time = float('inf') - return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) + return Timepoint(timepoint_name, r_earliest_time, r_latest_time) @staticmethod - def get_next_timepoint_constraint(constraint_name, prev_timepoint_constraint, inter_timepoint_constraint): + def get_next_timepoint_constraint(timepoint_name, prev_timepoint, edge_in_between): r_earliest_time = 0 r_latest_time = float('inf') - return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) - - @staticmethod - def create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time): - start_constraint = TimepointConstraint(name="start", - r_earliest_time=r_earliest_pickup - (travel_time.mean - 2*work_time.standard_dev), - r_latest_time=float('inf')) - pickup_constraint = TimepointConstraint(name="pickup", - r_earliest_time=r_earliest_pickup, - r_latest_time=r_latest_pickup) - delivery_constraint = TimepointConstraint(name="delivery", - r_earliest_time= 0, - r_latest_time=float('inf')) - return [start_constraint, pickup_constraint, delivery_constraint] + return Timepoint(timepoint_name, r_earliest_time, r_latest_time) diff --git a/stn/stn.py b/stn/stn.py index b7bc843..0e8b242 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -11,7 +11,7 @@ from uuid import UUID import copy import math -from stn.task import TimepointConstraint +from stn.task import Timepoint MAX_FLOAT = sys.float_info.max @@ -141,7 +141,7 @@ def get_constraints(self): return constraints - def add_timepoint(self, id, task, node_type): + def add_timepoint(self, id, task, node_type, **kwargs): """ A timepoint is represented by a node in the STN The node can be of node_type: - zero_timepoint: references the schedule to the origin @@ -149,7 +149,7 @@ def add_timepoint(self, id, task, node_type): - pickup : time at which the robot arrives starts the pickup action - delivery : time at which the robot finishes the delivery action """ - node = Node(task.task_id, node_type) + node = Node(task.task_id, node_type, **kwargs) self.add_node(id, data=node) def add_task(self, task, position=1): @@ -191,13 +191,13 @@ def add_task(self, task, position=1): # Add new timepoints self.add_timepoint(start_node_id, task, "start") - self.add_timepoint_constraint(start_node_id, task.get_timepoint_constraint("start")) + self.add_timepoint_constraint(start_node_id, task.get_timepoint("start")) - self.add_timepoint(pickup_node_id, task, "pickup") - self.add_timepoint_constraint(pickup_node_id, task.get_timepoint_constraint("pickup")) + self.add_timepoint(pickup_node_id, task, "pickup", action_id=task.pickup_action_id) + self.add_timepoint_constraint(pickup_node_id, task.get_timepoint("pickup")) - self.add_timepoint(delivery_node_id, task, "delivery") - self.add_timepoint_constraint(delivery_node_id, task.get_timepoint_constraint("delivery")) + self.add_timepoint(delivery_node_id, task, "delivery", action_id=task.delivery_action_id) + self.add_timepoint_constraint(delivery_node_id, task.get_timepoint("delivery")) # Add constraints between new nodes new_constraints_between = [start_node_id, pickup_node_id, delivery_node_id] @@ -249,27 +249,27 @@ def add_intertimepoints_constraints(self, constraints, task): def get_travel_time(task): """ Returns the mean of the travel time (time for going from current pose to pickup pose) """ - travel_time = task.get_inter_timepoint_constraint("travel_time") + travel_time = task.get_edge("travel_time") return travel_time.mean @staticmethod def get_work_time(task): """ Returns the mean of the work time (time to transport an object from the pickup to the delivery location) """ - work_time = task.get_inter_timepoint_constraint("work_time") + work_time = task.get_edge("work_time") return work_time.mean @staticmethod def create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time): - start_constraint = TimepointConstraint(name="start", - r_earliest_time=r_earliest_pickup - travel_time.mean, - r_latest_time=r_latest_pickup - travel_time.mean) - pickup_constraint = TimepointConstraint(name="pickup", - r_earliest_time=r_earliest_pickup, - r_latest_time=r_latest_pickup) - delivery_constraint = TimepointConstraint(name="delivery", - r_earliest_time=r_earliest_pickup + work_time.mean, - r_latest_time=r_latest_pickup + work_time.mean) + start_constraint = Timepoint(name="start", + r_earliest_time=r_earliest_pickup - travel_time.mean, + r_latest_time=r_latest_pickup - travel_time.mean) + pickup_constraint = Timepoint(name="pickup", + r_earliest_time=r_earliest_pickup, + r_latest_time=r_latest_pickup) + delivery_constraint = Timepoint(name="delivery", + r_earliest_time=r_earliest_pickup + work_time.mean, + r_latest_time=r_latest_pickup + work_time.mean) return [start_constraint, pickup_constraint, delivery_constraint] def show_n_nodes_edges(self): @@ -285,9 +285,9 @@ def update_task(self, task): delivery_node_id = pickup_node_id + 1 # Adding an existing timepoint constraint updates the constraint - self.add_timepoint_constraint(start_node_id, task.get_timepoint_constraint("start")) - self.add_timepoint_constraint(pickup_node_id, task.get_timepoint_constraint("pickup")) - self.add_timepoint_constraint(delivery_node_id, task.get_timepoint_constraint("delivery")) + self.add_timepoint_constraint(start_node_id, task.get_timepoint("start")) + self.add_timepoint_constraint(pickup_node_id, task.get_timepoint("pickup")) + self.add_timepoint_constraint(delivery_node_id, task.get_timepoint("delivery")) # Add constraints between new nodes new_constraints_between = [start_node_id, pickup_node_id, delivery_node_id] @@ -403,22 +403,17 @@ def update_edge_weight(self, i, j, weight, force=False): if force: self[i][j]['weight'] = weight - def assign_timepoint(self, allotted_time, task_id, node_type, force=False): + def assign_timepoint(self, allotted_time, node_id, force=False): """ Assigns the allotted time to the earliest and latest time of the timepoint - of task_id of type node_type + in node_id Args: allotted_time (float): seconds after zero timepoint - task_id(UUID): id of the task - node_type(string): can be "navigation", "start" of "finish" + node_id (inf): idx of the timepoint in the stn """ - for i in self.nodes(): - node_data = self.nodes[i]['data'] - if node_data.task_id == task_id and node_data.node_type == node_type: - self.update_edge_weight(0, i, allotted_time, force) - self.update_edge_weight(i, 0, -allotted_time, force) - break + self.update_edge_weight(0, node_id, allotted_time, force) + self.update_edge_weight(node_id, 0, -allotted_time, force) def assign_earliest_time(self, time_, task_id, node_type, force=False): for i in self.nodes(): @@ -486,16 +481,16 @@ def add_timepoint_constraint(self, node_id, timepoint_constraint): self.add_constraint(0, node_id, timepoint_constraint.r_earliest_time, timepoint_constraint.r_latest_time) @staticmethod - def get_prev_timepoint_constraint(constraint_name, next_timepoint_constraint, inter_timepoint_constraint): - r_earliest_time = next_timepoint_constraint.r_earliest_time - inter_timepoint_constraint.mean - r_latest_time = next_timepoint_constraint.r_latest_time - inter_timepoint_constraint.mean - return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) + def get_prev_timepoint(timepoint_name, next_timepoint, edge_in_between): + r_earliest_time = next_timepoint.r_earliest_time - edge_in_between.mean + r_latest_time = next_timepoint.r_latest_time - edge_in_between.mean + return Timepoint(timepoint_name, r_earliest_time, r_latest_time) @staticmethod - def get_next_timepoint_constraint(constraint_name, prev_timepoint_constraint, inter_timepoint_constraint): - r_earliest_time = prev_timepoint_constraint.r_earliest_time + inter_timepoint_constraint.mean - r_latest_time = prev_timepoint_constraint.r_latest_time + inter_timepoint_constraint.mean - return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) + def get_next_timepoint(timepoint_name, prev_timepoint, edge_in_between): + r_earliest_time = prev_timepoint.r_earliest_time + edge_in_between.mean + r_latest_time = prev_timepoint.r_latest_time + edge_in_between.mean + return Timepoint(timepoint_name, r_earliest_time, r_latest_time) def get_time(self, task_id, node_type='start', lower_bound=True): _time = None @@ -509,6 +504,39 @@ def get_time(self, task_id, node_type='start', lower_bound=True): return _time + def get_node_earliest_time(self, node_id): + return -self[node_id][0]['weight'] + + def get_node_latest_time(self, node_id): + return self[0][node_id]['weight'] + + def get_nodes_by_action(self, action_id): + nodes = list() + for node_id, data in self.nodes.data(): + if data['data'].action_id == action_id: + node = (node_id, self.nodes[node_id]['data']) + nodes.append(node) + return nodes + + def get_nodes_by_task(self, task_id): + nodes = list() + for node_id, data in self.nodes.data(): + if data['data'].task_id == task_id: + node = (node_id, self.nodes[node_id]['data']) + nodes.append(node) + return nodes + + def get_node_by_type(self, task_id, node_type): + for node_id, data in self.nodes.data(): + if data['data'].task_id == task_id and data['data'].node_type == node_type: + return node_id, self.nodes[node_id]['data'] + + def set_action_id(self, node_id, action_id): + self.nodes[node_id]['data'].action_id = action_id + + def get_node(self, node_id): + return self.nodes[node_id]['data'] + def get_task_id(self, position): """ Returns the id of the task in the given position @@ -586,6 +614,14 @@ def get_task_node_ids(self, task_id): return node_ids def get_task_graph(self, task_id): + """ Returns a graph with the nodes of the task_id + + Args: + task_id: ID of the task + + Returns: nx graph with nodes of task_id + + """ node_ids = self.get_task_node_ids(task_id) node_ids.insert(0, 0) task_graph = self.subgraph(node_ids) @@ -612,26 +648,8 @@ def get_subgraph(self, n_tasks): sub_graph = self.subgraph(node_ids) return sub_graph - def get_task_graph(self, task_id): - """ Returns a graph with the nodes of the task_id - - Args: - task_id: ID of the task - - Returns: nx graph with nodes of task_id - - """ - node_ids = self.get_task_node_ids(task_id) - # The first node in the subgraph is the zero timepoint - node_ids.insert(0, 0) - sub_graph = self.subgraph(node_ids) - return sub_graph - - def execute_timepoint(self, task_id, node_type): - for i in self.nodes(): - node_data = self.nodes[i]['data'] - if node_data.task_id == task_id and node_data.node_type == node_type: - node_data.is_executed = True + def execute_timepoint(self, node_id): + self.nodes[node_id]['data'].is_executed = True def execute_edge(self, node_1, node_2): nx.set_edge_attributes(self, {(node_1, node_2): {'is_executed': True}, diff --git a/stn/stnu/stnu.py b/stn/stnu/stnu.py index b2184ec..cacbac6 100644 --- a/stn/stnu/stnu.py +++ b/stn/stnu/stnu.py @@ -1,7 +1,7 @@ from stn.stn import STN from json import JSONEncoder import logging -from stn.task import TimepointConstraint +from stn.task import Timepoint class MyEncoder(JSONEncoder): @@ -143,7 +143,7 @@ def get_travel_time_bounded_duration(task): Shyan Akmal, Savana Ammons, Hemeng Li, and James Boerkoel Jr. Quantifying Degrees of Controllability in Temporal Networks with Uncertainty. In Proceedings of the 29th International Conference on Automated Planning and Scheduling, ICAPS 2019, 07 2019. """ - travel_time = task.get_inter_timepoint_constraint("travel_time") + travel_time = task.get_edge("travel_time") lower_bound = travel_time.mean - 2*travel_time.standard_dev upper_bound = travel_time.mean + 2*travel_time.standard_dev @@ -157,40 +157,26 @@ def get_work_time_bounded_duration(task): Shyan Akmal, Savana Ammons, Hemeng Li, and James Boerkoel Jr. Quantifying Degrees of Controllability in Temporal Networks with Uncertainty. In Proceedings of the 29th International Conference on Automated Planning and Scheduling, ICAPS 2019, 07 2019. """ - work_time = task.get_inter_timepoint_constraint("work_time") + work_time = task.get_edge("work_time") lower_bound = work_time.mean - 2*work_time.standard_dev upper_bound = work_time.mean + 2*work_time.standard_dev return lower_bound, upper_bound @staticmethod - def get_prev_timepoint_constraint(constraint_name, next_timepoint_constraint, inter_timepoint_constraint): - r_earliest_time = next_timepoint_constraint.r_earliest_time - \ - (inter_timepoint_constraint.mean + 2*inter_timepoint_constraint.standard_dev) - r_latest_time = next_timepoint_constraint.r_latest_time - \ - (inter_timepoint_constraint.mean - 2*inter_timepoint_constraint.standard_dev) + def get_prev_timepoint(timepoint_name, next_timepoint, edge_in_between): + r_earliest_time = next_timepoint.r_earliest_time - \ + (edge_in_between.mean + 2*edge_in_between.standard_dev) + r_latest_time = next_timepoint.r_latest_time - \ + (edge_in_between.mean - 2*edge_in_between.standard_dev) - return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) + return Timepoint(timepoint_name, r_earliest_time, r_latest_time) @staticmethod - def get_next_timepoint_constraint(constraint_name, prev_timepoint_constraint, inter_timepoint_constraint): - r_earliest_time = prev_timepoint_constraint.r_earliest_time + \ - (inter_timepoint_constraint.mean - 2*inter_timepoint_constraint.standard_dev) - r_latest_time = prev_timepoint_constraint.r_latest_time + \ - (inter_timepoint_constraint.mean + 2*inter_timepoint_constraint.standard_dev) - - return TimepointConstraint(constraint_name, r_earliest_time, r_latest_time) - - @staticmethod - def create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time): - start_constraint = TimepointConstraint(name="start", - r_earliest_time=r_earliest_pickup - (travel_time.mean - 2*work_time.standard_dev), - r_latest_time=r_latest_pickup - (travel_time.mean + 2*work_time.standard_dev)) - pickup_constraint = TimepointConstraint(name="pickup", - r_earliest_time=r_earliest_pickup, - r_latest_time=r_latest_pickup) - delivery_constraint = TimepointConstraint(name="delivery", - r_earliest_time=r_earliest_pickup + work_time.mean - 2*work_time.standard_dev, - r_latest_time=r_latest_pickup + work_time.mean - 2*work_time.standard_dev) - return [start_constraint, pickup_constraint, delivery_constraint] + def get_next_timepoint(timepoint_name, prev_timepoint, edge_in_between): + r_earliest_time = prev_timepoint.r_earliest_time + \ + (edge_in_between.mean - 2*edge_in_between.standard_dev) + r_latest_time = prev_timepoint.r_latest_time + \ + (edge_in_between.mean + 2*edge_in_between.standard_dev) + return Timepoint(timepoint_name, r_earliest_time, r_latest_time) diff --git a/stn/task.py b/stn/task.py index eb1678e..da37fa1 100644 --- a/stn/task.py +++ b/stn/task.py @@ -1,7 +1,7 @@ from stn.utils.as_dict import AsDictMixin -class InterTimepointConstraint(AsDictMixin): +class Edge(AsDictMixin): def __init__(self, name, mean, variance, **kwargs): self.name = name self.mean = round(mean, 3) @@ -26,7 +26,7 @@ def __add__(self, other): return mean, variance -class TimepointConstraint(AsDictMixin): +class Timepoint(AsDictMixin): """ r_earliest_time (float): earliest time relative to a ztp r_latest_time (float): latest time relative to a ztp @@ -44,90 +44,88 @@ def __str__(self): class Task(AsDictMixin): - def __init__(self, task_id, timepoint_constraints, inter_timepoint_constraints): + def __init__(self, task_id, timepoints, edges, pickup_action_id, delivery_action_id): """ Constructor for the Task object Args: task_id (UUID): An instance of an UUID object - timepoint_constraints (list): list of timepoint constraints (TimepointConstraint) - inter_timepoint_constraints (list): list of inter timepoint constraints (InterTimepointConstraint) - hard_constraints (bool): False if the task can be - scheduled ASAP, True if the task is not flexible. Defaults to True + timepoints (list): list of timepoints (Timepoints) + Edges (list): list of edges (Edges) + pickup_action_id (UUID): Action id of the pickup action + delivery_action_id (UUID): Action id of te delivery action """ self.task_id = task_id - self.timepoint_constraints = list() - self.inter_timepoint_constraints = list() - - for constraint in timepoint_constraints: - self.timepoint_constraints.append(constraint) - for constraint in inter_timepoint_constraints: - self.inter_timepoint_constraints.append(constraint) + self.timepoints = timepoints + self.edges = edges + self.pickup_action_id = pickup_action_id + self.delivery_action_id = delivery_action_id def __str__(self): to_print = "" to_print += "{} \n".format(self.task_id) - to_print += "TimepointConstraints: \n" - for constraint in self.timepoint_constraints: - to_print += str(constraint) + "\t" - to_print += "\n InterTimepointConstraints\n" - for constraint in self.inter_timepoint_constraints: - to_print += str(constraint) + "\t" + to_print += "Timepoints: \n" + for timepoint in self.timepoints: + to_print += str(timepoint) + "\t" + to_print += "\n Edges: \n" + for edge in self.edges: + to_print += str(edge) + "\t" + to_print += "\n Pickup action:" + str(self.pickup_action_id) + to_print += "\n Delivery action:" + str(self.delivery_action_id) return to_print - def get_timepoint_constraint(self, constraint_name): - return [constraint for constraint in self.timepoint_constraints - if constraint.name == constraint_name].pop() + def get_timepoint(self, timepoint_name): + for timepoint in self.timepoints: + if timepoint.name == timepoint_name: + return timepoint - def get_inter_timepoint_constraint(self, constraint_name): - return [constraint for constraint in self.inter_timepoint_constraints - if constraint.name == constraint_name].pop() + def get_edge(self, edge_name): + for edge in self.edges: + if edge.name == edge_name: + return edge - def update_timepoint_constraint(self, constraint_name, r_earliest_time, r_latest_time=float('inf')): + def update_timepoint(self, timepoint_name, r_earliest_time, r_latest_time=float('inf')): in_list = False - for constraint in self.timepoint_constraints: - if constraint.name == constraint_name: + for timepoint in self.timepoints: + if timepoint.name == timepoint_name: in_list = True - constraint.r_earliest_time = r_earliest_time - constraint.r_latest_time = r_latest_time + timepoint.r_earliest_time = r_earliest_time + timepoint.r_latest_time = r_latest_time if not in_list: - self.timepoint_constraints.append(TimepointConstraint(constraint_name, - r_earliest_time, - r_latest_time)) + self.timepoints.append(Timepoint(timepoint_name, r_earliest_time, r_latest_time)) - def update_inter_timepoint_constraint(self, name, mean, variance): + def update_edge(self, edge_name, mean, variance): in_list = False - for constraint in self.inter_timepoint_constraints: - if constraint.name == name: + for edge in self.edges: + if edge.name == edge_name: in_list = True - constraint.mean = round(mean, 3) - constraint.variance = round(variance, 3) - constraint.standard_dev = round(variance ** 0.5, 3) + edge.mean = round(mean, 3) + edge.variance = round(variance, 3) + edge.standard_dev = round(variance ** 0.5, 3) if not in_list: - self.inter_timepoint_constraints.append(InterTimepointConstraint(name=name, mean=mean, - variance=variance)) + self.edges.append(Edge(name=edge_name, mean=mean, variance=variance)) def to_dict(self): dict_repr = super().to_dict() - timepoint_constraints = list() - inter_timepoint_constraints = list() - for c in self.timepoint_constraints: - timepoint_constraints.append(c.to_dict()) - for c in self.inter_timepoint_constraints: - inter_timepoint_constraints.append(c.to_dict()) - dict_repr.update(timepoint_constraints=timepoint_constraints) - dict_repr.update(inter_timepoint_constraints=inter_timepoint_constraints) + timepoints = list() + edges = list() + for t in self.timepoints: + timepoints.append(t.to_dict()) + for e in self.edges: + edges.append(e.to_dict()) + dict_repr.update(timepoints=timepoints) + dict_repr.update(edges=edges) return dict_repr @classmethod def to_attrs(cls, dict_repr): attrs = super().to_attrs(dict_repr) - timepoint_constraints = list() - inter_timepoint_constraints = list() - for c in attrs.get("timepoint_constraints"): - timepoint_constraints.append(TimepointConstraint.from_dict(c)) - for c in attrs.get("inter_timepoint_constraints"): - inter_timepoint_constraints.append(InterTimepointConstraint.from_dict(c)) - attrs.update(timepoint_constraints=timepoint_constraints) - attrs.update(inter_timepoint_constraints=inter_timepoint_constraints) + timepoints = list() + edges = list() + for t in attrs.get("timepoints"): + timepoints.append(Timepoint.from_dict(t)) + for e in attrs.get("edges"): + edges.append(Edge.from_dict(e)) + attrs.update(timepoints=timepoints) + attrs.update(edges=edges) return attrs diff --git a/stn/utils/as_dict.py b/stn/utils/as_dict.py index ac1eef5..7a24687 100644 --- a/stn/utils/as_dict.py +++ b/stn/utils/as_dict.py @@ -44,7 +44,7 @@ def to_attrs(cls, dict_repr): @classmethod def _get_value(cls, key, value): - if key == 'task_id': + if key in ['task_id', 'pickup_action_id', 'delivery_action_id']: return from_str(value) else: return value diff --git a/stn/utils/utils.py b/stn/utils/utils.py index b5e68cb..94fab8b 100644 --- a/stn/utils/utils.py +++ b/stn/utils/utils.py @@ -1,6 +1,7 @@ import logging.config import yaml -from stn.task import Task, InterTimepointConstraint +from stn.task import Task, Edge +from stn.utils.uuid import generate_uuid def config_logger(logging_file): @@ -25,9 +26,11 @@ def create_task(stn, task_dict): task_id = task_dict.get("task_id") r_earliest_pickup = task_dict.get("earliest_pickup") r_latest_pickup = task_dict.get("latest_pickup") - travel_time = InterTimepointConstraint(**task_dict.get("travel_time")) - work_time = InterTimepointConstraint(**task_dict.get("work_time")) + travel_time = Edge(**task_dict.get("travel_time")) + work_time = Edge(**task_dict.get("work_time")) timepoint_constraints = stn.create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time) inter_timepoint_constraints = [travel_time, work_time] + pickup_action_id = generate_uuid() + delivery_action_id = generate_uuid() - return Task(task_id, timepoint_constraints, inter_timepoint_constraints) + return Task(task_id, timepoint_constraints, inter_timepoint_constraints, pickup_action_id, delivery_action_id) From 8a4a09b4593cbcb388787be6f6883fb6c746f16e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Wed, 8 Apr 2020 20:32:54 +0200 Subject: [PATCH 42/44] pstn: Update method name to get_next_timepoint --- stn/pstn/pstn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index 8a06a6a..94ad9bf 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -163,7 +163,7 @@ def get_prev_timepoint(timepoint_name, next_timepoint, edge_in_between): return Timepoint(timepoint_name, r_earliest_time, r_latest_time) @staticmethod - def get_next_timepoint_constraint(timepoint_name, prev_timepoint, edge_in_between): + def get_next_timepoint(timepoint_name, prev_timepoint, edge_in_between): r_earliest_time = 0 r_latest_time = float('inf') return Timepoint(timepoint_name, r_earliest_time, r_latest_time) From a1a6f9baa4253be8b9aa92aa24eafbb910e03c49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Wed, 8 Apr 2020 20:33:53 +0200 Subject: [PATCH 43/44] stn: refactor_get_earliest_task_id --- stn/stn.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stn/stn.py b/stn/stn.py index 0e8b242..cf83f44 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -575,8 +575,8 @@ def get_earliest_task_id(self): # The first task in the graph is the task with the earliest start time # The first task is in node 1, node 0 is reserved for the zero timepoint - task_id = self.get_task_id(1) - if task_id: + if self.has_node(1): + task_id = self.nodes[1]['data'].task_id return task_id self.logger.debug("STN has no tasks yet") From 9fcf33c5ec3192e17e29468fe6bb3b2a4c55ffb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81ngela=20Enr=C3=ADquez=20G=C3=B3mez?= Date: Sat, 11 Apr 2020 13:28:05 +0200 Subject: [PATCH 44/44] fix/dsc: Return dsc=1 if len(original)<0 --- stn/methods/dsc_lp.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/stn/methods/dsc_lp.py b/stn/methods/dsc_lp.py index 638b982..09d8225 100644 --- a/stn/methods/dsc_lp.py +++ b/stn/methods/dsc_lp.py @@ -216,6 +216,8 @@ def compute_dsc(self, original, shrinked): return the value of degree of strong controllability """ + new = 1 + orig = 1 for i in range(len(original)): x, y = original[i] orig = y-x