Skip to content

Commit

Permalink
Protect peers for low traffic sites from closing
Browse files Browse the repository at this point in the history
  • Loading branch information
shortcutme committed May 7, 2017
1 parent 728e497 commit 438bdbc
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 5 deletions.
10 changes: 7 additions & 3 deletions src/File/FileServer.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,19 +205,21 @@ def cleanupSites(self):
import gc
startup = True
time.sleep(5 * 60) # Sites already cleaned up on startup
peers_protected = set([])
while 1:
# Sites health care every 20 min
self.log.debug("Running site cleanup, connections: %s, internet: %s" % (len(self.connections), self.has_internet))
self.log.debug("Running site cleanup, connections: %s, internet: %s, protected peers: %s" % (len(self.connections), self.has_internet, peers_protected))

for address, site in self.sites.items():
if not site.settings["serving"]:
continue

if not startup:
site.cleanupPeers()
site.cleanupPeers(peers_protected)

time.sleep(1) # Prevent too quick request

peers_protected = set([])
for address, site in self.sites.items():
if not site.settings["serving"]:
continue
Expand All @@ -231,7 +233,9 @@ def cleanupSites(self):
site.retryBadFiles()

if not startup: # Don't do it at start up because checkSite already has needConnections at start up.
site.needConnections(check_site_on_reconnect=True) # Keep active peer connection to get the updates
connected_num = site.needConnections(check_site_on_reconnect=True) # Keep active peer connection to get the updates
if connected_num < config.connected_limit: # This site has small amount of peers, protect them from closing
peers_protected.update([peer.key for peer in site.getConnectedPeers()])

time.sleep(1) # Prevent too quick request

Expand Down
6 changes: 4 additions & 2 deletions src/Site/Site.py
Original file line number Diff line number Diff line change
Expand Up @@ -900,9 +900,9 @@ def needConnections(self, num=4, check_site_on_reconnect=False):
connected += 1 # Successfully connected
if connected >= need:
break
self.log.debug("Connected before: %s, after: %s. Check site: %s." % (connected_before, connected, check_site_on_reconnect))

if check_site_on_reconnect and connected_before == 0 and connected > 0 and self.connection_server.has_internet:
self.log.debug("Connected before: %s, after: %s. We need to check the site." % (connected_before, connected))
gevent.spawn(self.update, check_files=False)

return connected
Expand Down Expand Up @@ -949,7 +949,7 @@ def getConnectedPeers(self):
return back

# Cleanup probably dead peers and close connection if too much
def cleanupPeers(self):
def cleanupPeers(self, peers_protected=[]):
peers = self.peers.values()
if len(peers) > 20:
# Cleanup old peers
Expand Down Expand Up @@ -982,6 +982,8 @@ def cleanupPeers(self):
for peer in sorted(connected_peers, key=lambda peer: min(peer.connection.sites, 5)): # Try to keep connections with more sites
if not peer.connection:
continue
if peer.key in peers_protected:
continue
if peer.connection.sites > 5:
break
peer.connection.close("Cleanup peers")
Expand Down

0 comments on commit 438bdbc

Please sign in to comment.