Skip to content

Commit

Permalink
Merge pull request #41 from ropod-project/develop
Browse files Browse the repository at this point in the history
End of project stable version
  • Loading branch information
argenos authored Apr 24, 2020
2 parents f11679f + d48bc4a commit 0a00cf1
Show file tree
Hide file tree
Showing 29 changed files with 1,013 additions and 577 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from setuptools import setup

setup(name='stn',
packages=['stn', 'stn.config', 'stn.methods', 'stn.pstn', 'stn.stnu', 'stn.utils'],
packages=['stn', 'stn.config', 'stn.exceptions', 'stn.methods', 'stn.pstn', 'stn.stnu', 'stn.utils'],
version='0.2.0',
install_requires=[
'numpy',
Expand Down
20 changes: 14 additions & 6 deletions stn/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,9 @@ def srea_algorithm(stn):
return
risk_metric, dispatchable_graph = result

return risk_metric, dispatchable_graph
dispatchable_graph.risk_metric = risk_metric

return dispatchable_graph


class DegreeStongControllability(object):
Expand All @@ -108,14 +110,16 @@ def dsc_lp_algorithm(stn):

stnu = dsc_lp.get_stnu(bounds)

# Returns a schedule because it is an offline approach
# The dispatchable graph is a schedule because it is an offline approach
schedule = dsc_lp.get_schedule(bounds)

# A strongly controllable STNU has a DSC of 1, i.e., a DSC value of 1 is better. We take
# 1 − DC to be the risk metric, so that small values are preferable
risk_metric = 1 - dsc

return risk_metric, schedule
schedule.risk_metric = risk_metric

return schedule


class FullPathConsistency(object):
Expand All @@ -134,15 +138,19 @@ def fpc_algorithm(stn):
if dispatchable_graph is None:
return
risk_metric = 1
return risk_metric, dispatchable_graph

dispatchable_graph.risk_metric = risk_metric

return dispatchable_graph


stn_factory = STNFactory()
stn_factory.register_stn('fpc', STN)
stn_factory.register_stn('srea', PSTN)
stn_factory.register_stn('dsc_lp', STNU)
stn_factory.register_stn('dsc', STNU)

stp_solver_factory = STPSolverFactory()
stp_solver_factory.register_solver('fpc', FullPathConsistency)
stp_solver_factory.register_solver('srea', StaticRobustExecution)
stp_solver_factory.register_solver('dsc_lp', DegreeStongControllability)
stp_solver_factory.register_solver('drea', StaticRobustExecution)
stp_solver_factory.register_solver('dsc', DegreeStongControllability)
Empty file added stn/exceptions/__init__.py
Empty file.
6 changes: 6 additions & 0 deletions stn/exceptions/stp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
class NoSTPSolution(Exception):

def __init__(self):
""" Raised when the stp solver cannot produce a solution for the problem
"""
Exception.__init__(self)
5 changes: 4 additions & 1 deletion stn/methods/dsc_lp.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import copy

import pulp
import sys
Expand All @@ -43,7 +44,7 @@ class DSC_LP(object):
logger = logging.getLogger('stn.dsc_lp')

def __init__(self, stnu):
self.stnu = stnu
self.stnu = copy.deepcopy(stnu)
self.constraints = stnu.get_constraints()
self.contingent_constraints = stnu.get_contingent_constraints()
self.contingent_timepoints = stnu.get_contingent_timepoints()
Expand Down Expand Up @@ -215,6 +216,8 @@ def compute_dsc(self, original, shrinked):
return the value of degree of strong controllability
"""
new = 1
orig = 1
for i in range(len(original)):
x, y = original[i]
orig = y-x
Expand Down
2 changes: 1 addition & 1 deletion stn/methods/fpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@ def get_minimal_network(stn):
minimal_network.update_edges(shortest_path_array)
return minimal_network
else:
logger.warning("The minimal network is inconsistent")
logger.debug("The minimal network is inconsistent. STP could not be solved")
8 changes: 7 additions & 1 deletion stn/methods/srea.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,13 @@ def srea_LP(inputstn,
prob.writeLP('STN.lp')
pulp.LpSolverDefault.msg = 10

prob.solve()
# Based on https://stackoverflow.com/questions/27406858/pulp-solver-error
# Sometimes pulp throws an exception instead of returning a problem with unfeasible status
try:
prob.solve()
except pulp.PulpSolverError:
print("Problem unfeasible")
return None

status = pulp.LpStatus[prob.status]
if debug:
Expand Down
41 changes: 28 additions & 13 deletions stn/node.py
Original file line number Diff line number Diff line change
@@ -1,46 +1,61 @@
from stn.utils.uuid import generate_uuid
from stn.utils.uuid import from_str


class Node(object):
"""Represents a timepoint in the STN """

def __init__(self, task_id, pose, node_type):
def __init__(self, task_id, node_type, is_executed=False, **kwargs):
# id of the task represented by this node
if isinstance(task_id, str):
task_id = from_str(task_id)
self.task_id = task_id
# Pose in the map where the node has to be executed
self.pose = pose
# The node can be of node_type zero_timepoint, navigation, start or finish
# The node can be of node_type zero_timepoint, start, pickup or delivery
self.node_type = node_type
self.is_executed = is_executed
self.action_id = kwargs.get("action_id")

def __str__(self):
to_print = ""
to_print += "node {} {}".format(self.task_id, self.node_type)
to_print += "{} {} ".format(self.task_id, self.node_type)
return to_print

def __repr__(self):
return str(self.to_dict())

def __hash__(self):
return hash((self.task_id, self.pose, self.node_type))
return hash((self.task_id, self.node_type, self.is_executed))

def __eq__(self, other):
if other is None:
return False
return (self.task_id == other.task_id and
self.pose == other.pose and
self.node_type == other.node_type)
self.node_type == other.node_type and
self.is_executed == other.is_executed and
self.action_id == other.action_id)

def __ne__(self, other):
return not self.__eq__(other)

def execute(self):
self.is_executed = True

def to_dict(self):
node_dict = dict()
node_dict['task_id'] = self.task_id
node_dict['pose'] = self.pose
node_dict['task_id'] = str(self.task_id)
node_dict['node_type'] = self.node_type
node_dict['is_executed'] = self.is_executed
if self.action_id:
node_dict['action_id'] = str(self.action_id)
return node_dict

@staticmethod
def from_dict(node_dict):
task_id = node_dict['task_id']
pose = node_dict['pose']
if isinstance(task_id, str):
task_id = from_str(task_id)
node_type = node_dict['node_type']
node = Node(task_id, pose, node_type)
is_executed = node_dict.get('is_executed', False)
node = Node(task_id, node_type, is_executed)
if node_dict.get('action_id'):
node.action_id = from_str(node_dict['action_id'])
return node
97 changes: 50 additions & 47 deletions stn/pstn/pstn.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,12 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

import logging
from json import JSONEncoder

from stn.pstn.constraint import Constraint
from stn.stn import STN
from stn.stn import Node
from json import JSONEncoder
import logging
from stn.task import Timepoint


class MyEncoder(JSONEncoder):
Expand All @@ -48,17 +49,23 @@ def __str__(self):
if self.has_edge(j, i) and i < j:
# Constraints with the zero timepoint
if i == 0:
timepoint = Node.from_dict(self.node[j]['data'])
timepoint = self.nodes[j]['data']
lower_bound = -self[j][i]['weight']
upper_bound = self[i][j]['weight']
to_print += "Timepoint {}: [{}, {}]".format(timepoint, lower_bound, upper_bound)
if timepoint.is_executed:
to_print += " Ex"
# Constraints between the other timepoints
else:
if 'is_contingent' in self[j][i]:
to_print += "Constraint {} => {}: [{}, {}] ({})".format(i, j, -self[j][i]['weight'], self[i][j]['weight'], self[i][j]['distribution'])
if self[i][j]['is_executed']:
to_print += " Ex"
else:

to_print += "Constraint {} => {}: [{}, {}]".format(i, j, -self[j][i]['weight'], self[i][j]['weight'])
if self[i][j]['is_executed']:
to_print += " Ex"

to_print += "\n"

Expand Down Expand Up @@ -89,27 +96,8 @@ def add_constraint(self, i, j, wji=0.0, wij=float('inf'), distribution=""):

super().add_constraint(i, j, wji, wij)

self.add_edge(i, j, distribution=distribution)
self.add_edge(i, j, is_contingent=is_contingent)

self.add_edge(j, i, distribution=distribution)
self.add_edge(j, i, is_contingent=is_contingent)

def timepoint_hard_constraints(self, node_id, task, node_type):
""" Adds the earliest and latest times to execute a timepoint (node)
Navigation timepoint [0, inf]
Start timepoint [earliest_start_time, latest_start_time]
Finish timepoint [0, inf]
"""

if node_type == "navigation":
self.add_constraint(0, node_id, task.r_earliest_navigation_start_time)

if node_type == "start":
self.add_constraint(0, node_id, task.r_earliest_start_time, task.r_latest_start_time)

elif node_type == "finish":
self.add_constraint(0, node_id)
self.add_edge(i, j, distribution=distribution, is_contingent=is_contingent)
self.add_edge(j, i, distribution=distribution, is_contingent=is_contingent)

def get_contingent_constraints(self):
""" Returns a dictionary with the contingent constraints in the PSTN
Expand All @@ -126,9 +114,9 @@ def get_contingent_constraints(self):
def add_intertimepoints_constraints(self, constraints, task):
""" Adds constraints between the timepoints of a task
Constraints between:
- navigation start and start (contingent)
- start and finish (contingent)
- finish and next task (if any) (requirement)
- start and pickup (contingent)
- pickup and delivery (contingent)
- delivery and next task (if any) (requirement)
Args:
constraints (list) : list of tuples that defines the pair of nodes between which a new constraint should be added
Example:
Expand All @@ -139,28 +127,43 @@ def add_intertimepoints_constraints(self, constraints, task):
"""
for (i, j) in constraints:
self.logger.debug("Adding constraint: %s ", (i, j))
if self.node[i]['data']['node_type'] == "navigation":
distribution = self.get_navigation_distribution(i, j)
self.add_constraint(i, j, distribution=distribution)
if self.nodes[i]['data'].node_type == "start":
distribution = self.get_travel_time_distribution(task)
if distribution.endswith("_0.0"): # the distribution has no variation (stdev is 0)
# Make the constraint a requirement constraint
mean = float(distribution.split("_")[1])
self.add_constraint(i, j, mean, mean)
else:
self.add_constraint(i, j, distribution=distribution)

elif self.node[i]['data']['node_type'] == "start":
distribution = self.get_task_distribution(task)
elif self.nodes[i]['data'].node_type == "pickup":
distribution = self.get_work_time_distribution(task)
self.add_constraint(i, j, distribution=distribution)

elif self.node[i]['data']['node_type'] == "finish":
elif self.nodes[i]['data'].node_type == "delivery":
# wait time between finish of one task and start of the next one. Fixed to [0, inf]
self.add_constraint(i, j)

def get_navigation_distribution(self, source, destination):
""" Reads from the database the probability distribution for navigating from source to destination
"""
# TODO: Read estimated distribution from dataset
distribution = "N_1_1"
return distribution

def get_task_distribution(self, task):
""" Reads from the database the estimated distribution of the task
In the case of transportation tasks, the estimated distribution is the navigation time from the pickup to the delivery location
"""
distribution = "N_1_1"
return distribution
@staticmethod
def get_travel_time_distribution(task):
travel_time = task.get_edge("travel_time")
travel_time_distribution = "N_" + str(travel_time.mean) + "_" + str(travel_time.standard_dev)
return travel_time_distribution

@staticmethod
def get_work_time_distribution(task):
work_time = task.get_edge("work_time")
work_time_distribution = "N_" + str(work_time.mean) + "_" + str(work_time.standard_dev)
return work_time_distribution

@staticmethod
def get_prev_timepoint(timepoint_name, next_timepoint, edge_in_between):
r_earliest_time = 0
r_latest_time = float('inf')
return Timepoint(timepoint_name, r_earliest_time, r_latest_time)

@staticmethod
def get_next_timepoint(timepoint_name, prev_timepoint, edge_in_between):
r_earliest_time = 0
r_latest_time = float('inf')
return Timepoint(timepoint_name, r_earliest_time, r_latest_time)
Loading

0 comments on commit 0a00cf1

Please sign in to comment.