<html><head><meta name="color-scheme" content="light dark"></head><body><pre style="word-wrap: break-word; white-space: pre-wrap;">#!/usr/bin/env python
#
# Tilmann Haak &lt;spam@thaak.de&gt;
# 2011-12-04
# 2011-12-16 Allan Wegan &lt;allanwegan@allanwegan.de&gt;:
#   - Retries on network (or wiki) error.
#
# - Download all pages from larpwiki.de
# - Write each page into a text file, e.g. "LarpWiki.txt"

import sys
import xmlrpclib
import time
import string

netRetryM = 23 # How often to retry each request before giving up.
netRetryD = 0.5 # Seconds to wait initially before retrying a failed request.
netRetryDFun = lambda oldDelay: oldDelay * 1.1

# Retries a given function at most retriesMax times after failing:
def retryOnError(fun, retriesMax, retryDelay, retryDelayFun):
  while True:
    try:
      return fun()
    except:
      if retriesMax &lt; 1: raise
      print "Error:", sys.exc_info()
      print("Waiting %f seconds before retrying (Retries left: %i)..." % (
        retryDelay, retriesMax
      ))
      time.sleep(retryDelay)
      retriesMax -= 1
      retryDelay = retryDelayFun(retryDelay)
      continue

# stores a text in a file:
def writeFile(path, content):
  fd = open(path, 'w')
  fd.write(content)
  fd.close()

# Wiki accessor:
wiki = xmlrpclib.ServerProxy("http://www.larpwiki.de/?action=xmlrpc2")

# get all pages on the wiki (sorted):
pagenames = retryOnError(
  lambda: sorted(wiki.getAllPages()), netRetryM, netRetryD, netRetryDFun
)

# dump pages to current dir:
count = 1
for pagename in pagenames:
  filename = string.replace(pagename, '/', '%2F') + '.txt'
  print("%i/%i %s" % (count, len(pagenames), filename))
  text = retryOnError(
    lambda: writeFile(filename, wiki.getPage(pagename).encode('utf-8')), 
    netRetryM, netRetryD, netRetryDFun
  )
  time.sleep(0.05) # don't hammer on the wiki!
  count += 1
</pre></body></html>