Skip to content

Commit

Permalink
Merge pull request ZeroNetX#213 from zeronet-conservancy/recursive-pu…
Browse files Browse the repository at this point in the history
…blish

support --recursive sitePublish on command line
  • Loading branch information
caryoscelus authored Jul 22, 2023
2 parents 00f298a + c09b5e1 commit 5cc1bdf
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 41 deletions.
2 changes: 2 additions & 0 deletions src/Config.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,8 @@ def createArguments(self):
default=15441, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
default="content.json", metavar="inner_path")
action.add_argument('--recursive', help="Whether to publish all of site's content.json. "
"Overrides --inner_path. (default: false)", action='store_true', dest='recursive')

# SiteVerify
action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
Expand Down
11 changes: 0 additions & 11 deletions src/Content/ContentDbDict.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,17 +95,6 @@ def items(self):
back.append((key, val))
return back

def values(self):
back = []
for key, val in dict.iteritems(self):
if not val:
try:
val = self.loadItem(key)
except Exception:
continue
back.append(val)
return back

def get(self, key, default=None):
try:
return self.__getitem__(key)
Expand Down
74 changes: 44 additions & 30 deletions src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,47 +422,61 @@ def getWebsocket(self, site):
ws = websocket.create_connection(ws_address)
return ws

def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
global file_server
from Site.Site import Site
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", recursive=False):
from Site import SiteManager
from File import FileServer # We need fileserver to handle incoming file requests
from Peer import Peer
file_server = FileServer()
site = SiteManager.site_manager.get(address)
logging.info("Loading site...")
site = SiteManager.site_manager.get(address)
site.settings["serving"] = True # Serving the site even if its disabled

if not recursive:
inner_paths = [inner_path]
else:
inner_paths = list(site.content_manager.contents.keys())

try:
ws = self.getWebsocket(site)

except Exception as err:
self.sitePublishFallback(site, peer_ip, peer_port, inner_paths, err)

else:
logging.info("Sending siteReload")
self.siteCmd(address, "siteReload", inner_path)

logging.info("Sending sitePublish")
self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False})
for inner_path in inner_paths:
logging.info(f"Sending sitePublish for {inner_path}")
self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False})
logging.info("Done.")

except Exception as err:
logging.info("Can't connect to local websocket client: %s" % err)
logging.info("Creating FileServer....")
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
time.sleep(0.001)

# Started fileserver
file_server.portCheck()
if peer_ip: # Announce ip specificed
site.addPeer(peer_ip, peer_port)
else: # Just ask the tracker
logging.info("Gathering peers from tracker")
site.announce() # Gather peers
ws.close()

def sitePublishFallback(self, site, peer_ip, peer_port, inner_paths, err):
if err is not None:
logging.info(f"Can't connect to local websocket client: {err}")
logging.info("Publish using fallback mechanism. "
"Note that there might be not enough time for peer discovery, "
"but you can specify target peer on command line.")
logging.info("Creating FileServer....")
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
time.sleep(0.001)

# Started fileserver
file_server.portCheck()
if peer_ip: # Announce ip specificed
site.addPeer(peer_ip, peer_port)
else: # Just ask the tracker
logging.info("Gathering peers from tracker")
site.announce() # Gather peers

for inner_path in inner_paths:
published = site.publish(5, inner_path) # Push to peers
if published > 0:
time.sleep(3)
logging.info("Serving files (max 60s)...")
gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.")
else:
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")

if published > 0:
time.sleep(3)
logging.info("Serving files (max 60s)...")
gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.")
else:
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")

# Crypto commands
def cryptPrivatekeyToAddress(self, privatekey=None):
Expand Down

0 comments on commit 5cc1bdf

Please sign in to comment.