Never adjust the share target below the network target
[bitcoin:eloipool.git] / eloipool.py
1 #!/usr/bin/python3
2 # Eloipool - Python Bitcoin pool server
3 # Copyright (C) 2011-2012  Luke Dashjr <luke-jr+eloipool@utopios.org>
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License as
7 # published by the Free Software Foundation, either version 3 of the
8 # License, or (at your option) any later version.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 # GNU Affero General Public License for more details.
14 #
15 # You should have received a copy of the GNU Affero General Public License
16 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
17
18 import config
19
20 if not hasattr(config, 'ServerName'):
21         config.ServerName = 'Unnamed Eloipool'
22
23 if not hasattr(config, 'ShareTarget'):
24         config.ShareTarget = 0x00000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff
25
26
27 import logging
28
29 if len(logging.root.handlers) == 0:
30         logging.basicConfig(
31                 format='%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s',
32                 level=logging.DEBUG,
33         )
34         for infoOnly in ('checkShare', 'JSONRPCHandler', 'merkleMaker', 'Waker for JSONRPCServer', 'JSONRPCServer'):
35                 logging.getLogger(infoOnly).setLevel(logging.INFO)
36
37 def RaiseRedFlags(reason):
38         logging.getLogger('redflag').critical(reason)
39         return reason
40
41
42 from bitcoin.node import BitcoinLink, BitcoinNode
43 bcnode = BitcoinNode(config.UpstreamNetworkId)
44 bcnode.userAgent += b'Eloipool:0.1/'
45
46 import jsonrpc
47 UpstreamBitcoindJSONRPC = jsonrpc.ServiceProxy(config.UpstreamURI)
48
49
50 from bitcoin.script import BitcoinScript
51 from bitcoin.txn import Txn
52 from base58 import b58decode
53 from struct import pack
54 import subprocess
55 from time import time
56
57 def makeCoinbaseTxn(coinbaseValue, useCoinbaser = True):
58         txn = Txn.new()
59         
60         if useCoinbaser and hasattr(config, 'CoinbaserCmd') and config.CoinbaserCmd:
61                 coinbased = 0
62                 try:
63                         cmd = config.CoinbaserCmd
64                         cmd = cmd.replace('%d', str(coinbaseValue))
65                         p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
66                         nout = int(p.stdout.readline())
67                         for i in range(nout):
68                                 amount = int(p.stdout.readline())
69                                 addr = p.stdout.readline().rstrip(b'\n').decode('utf8')
70                                 pkScript = BitcoinScript.toAddress(addr)
71                                 txn.addOutput(amount, pkScript)
72                                 coinbased += amount
73                 except:
74                         coinbased = coinbaseValue + 1
75                 if coinbased >= coinbaseValue:
76                         logging.getLogger('makeCoinbaseTxn').error('Coinbaser failed!')
77                         txn.outputs = []
78                 else:
79                         coinbaseValue -= coinbased
80         
81         pkScript = BitcoinScript.toAddress(config.TrackerAddr)
82         txn.addOutput(coinbaseValue, pkScript)
83         
84         # TODO
85         # TODO: red flag on dupe coinbase
86         return txn
87
88
89 import jsonrpc_getwork
90 from util import Bits2Target
91
92 workLog = {}
93 userStatus = {}
94 networkTarget = None
95 DupeShareHACK = {}
96
97 server = None
98 def updateBlocks():
99         server.wakeLongpoll()
100
101 def blockChanged():
102         global DupeShareHACK
103         DupeShareHACK = {}
104         jsonrpc_getwork._CheckForDupesHACK = {}
105         global MM, networkTarget, server
106         bits = MM.currentBlock[2]
107         if bits is None:
108                 networkTarget = None
109         else:
110                 networkTarget = Bits2Target(bits)
111         workLog.clear()
112         updateBlocks()
113
114
115 from merklemaker import merkleMaker
116 MM = merkleMaker()
117 MM.__dict__.update(config.__dict__)
118 MM.clearCoinbaseTxn = makeCoinbaseTxn(5000000000, False)  # FIXME
119 MM.clearCoinbaseTxn.assemble()
120 MM.makeCoinbaseTxn = makeCoinbaseTxn
121 MM.onBlockChange = blockChanged
122 MM.onBlockUpdate = updateBlocks
123
124
125 from binascii import b2a_hex
126 from copy import deepcopy
127 from struct import pack, unpack
128 import threading
129 from time import time
130 from util import RejectedShare, dblsha, hash2int, swap32
131 import jsonrpc
132 import traceback
133
134 gotwork = None
135 if hasattr(config, 'GotWorkURI'):
136         gotwork = jsonrpc.ServiceProxy(config.GotWorkURI)
137
138 if not hasattr(config, 'DynamicTargetting'):
139         config.DynamicTargetting = 0
140 else:
141         config.DynamicTargetGoal *= 2
142
143 def submitGotwork(info):
144         try:
145                 gotwork.gotwork(info)
146         except:
147                 checkShare.logger.warning('Failed to submit gotwork\n' + traceback.format_exc())
148
149 def getTarget(username, now):
150         if not config.DynamicTargetting:
151                 return None
152         if username in userStatus:
153                 status = userStatus[username]
154         else:
155                 userStatus[username] = [None, now, 0]
156                 return None
157         (targetIn, lastUpdate, work) = status
158         if work <= config.DynamicTargetGoal:
159                 if now < lastUpdate + 120 and (targetIn is None or targetIn >= networkTarget):
160                         return targetIn
161                 if not work:
162                         if targetIn:
163                                 getTarget.logger.debug("No shares from '%s', resetting to minimum target")
164                                 userStatus[username] = [None, now, 0]
165                         return None
166         
167         deltaSec = now - lastUpdate
168         targetIn = targetIn or config.ShareTarget
169         target = targetIn
170         target = int(target * config.DynamicTargetGoal * deltaSec / 120 / work)
171         if target > config.ShareTarget:
172                 target = None
173         elif target < networkTarget:
174                 target = networkTarget
175         if target != targetIn:
176                 pfx = 'Retargetting %s' % (repr(username),)
177                 getTarget.logger.debug("%s from: %064x" % (pfx, targetIn,))
178                 getTarget.logger.debug("%s   to: %064x" % (pfx, target,))
179         userStatus[username] = [target, now, 0]
180         return target
181 getTarget.logger = logging.getLogger('getTarget')
182
183 def RegisterWork(username, wli, wld):
184         now = time()
185         target = getTarget(username, now)
186         wld = tuple(wld) + (target,)
187         workLog.setdefault(username, {})[wli] = (wld, now)
188         return target or config.ShareTarget
189
190 def getBlockHeader(username):
191         MRD = MM.getMRD()
192         (merkleRoot, merkleTree, coinbase, prevBlock, bits, rollPrevBlk) = MRD[:6]
193         timestamp = pack('<L', int(time()))
194         hdr = b'\2\0\0\0' + prevBlock + merkleRoot + timestamp + bits + b'iolE'
195         workLog.setdefault(username, {})[merkleRoot] = (MRD, time())
196         target = RegisterWork(username, merkleRoot, MRD)
197         return (hdr, workLog[username][merkleRoot], target)
198
199 def getBlockTemplate(username):
200         MC = MM.getMC()
201         (dummy, merkleTree, coinbase, prevBlock, bits) = MC[:5]
202         wliPos = coinbase[0] + 2
203         wliLen = coinbase[wliPos - 1]
204         wli = coinbase[wliPos:wliPos+wliLen]
205         target = RegisterWork(username, wli, MC)
206         return (MC, workLog[username][wli], target)
207
208 loggersShare = []
209
210 RBDs = []
211 RBPs = []
212
213 from bitcoin.varlen import varlenEncode, varlenDecode
214 import bitcoin.txn
215 def assembleBlock(blkhdr, txlist):
216         payload = blkhdr
217         payload += varlenEncode(len(txlist))
218         for tx in txlist:
219                 payload += tx.data
220         return payload
221
222 def blockSubmissionThread(payload, blkhash):
223         myblock = (blkhash, payload[4:36])
224         payload = b2a_hex(payload).decode('ascii')
225         nexterr = 0
226         while True:
227                 try:
228                         rv = UpstreamBitcoindJSONRPC.submitblock(payload)
229                         break
230                 except:
231                         try:
232                                 rv = UpstreamBitcoindJSONRPC.getmemorypool(payload)
233                                 if rv is True:
234                                         rv = None
235                                 elif rv is False:
236                                         rv = 'rejected'
237                                 break
238                         except:
239                                 pass
240                         now = time()
241                         if now > nexterr:
242                                 # FIXME: This will show "Method not found" on pre-BIP22 servers
243                                 RaiseRedFlags(traceback.format_exc())
244                                 nexterr = now + 5
245                         if MM.currentBlock[0] not in myblock:
246                                 RaiseRedFlags('Giving up on submitting block upstream')
247                                 return
248         if rv:
249                 # FIXME: The returned value could be a list of multiple responses
250                 RaiseRedFlags('Upstream block submission failed: %s' % (rv,))
251
252 def checkShare(share):
253         shareTime = share['time'] = time()
254         
255         data = share['data']
256         data = data[:80]
257         (prevBlock, height, bits) = MM.currentBlock
258         sharePrevBlock = data[4:36]
259         if sharePrevBlock != prevBlock:
260                 if sharePrevBlock == MM.lastBlock[0]:
261                         raise RejectedShare('stale-prevblk')
262                 raise RejectedShare('bad-prevblk')
263         
264         # TODO: use userid
265         username = share['username']
266         if username not in workLog:
267                 raise RejectedShare('unknown-user')
268         
269         if data[72:76] != bits:
270                 raise RejectedShare('bad-diffbits')
271         
272         # Note that we should accept miners reducing version to 1 if they don't understand 2 yet
273         # FIXME: When the supermajority is upgraded to version 2, stop accepting 1!
274         if data[1:4] != b'\0\0\0' or data[0] > 2:
275                 raise RejectedShare('bad-version')
276         
277         shareMerkleRoot = data[36:68]
278         if 'blkdata' in share:
279                 pl = share['blkdata']
280                 (txncount, pl) = varlenDecode(pl)
281                 cbtxn = bitcoin.txn.Txn(pl)
282                 cbtxn.disassemble(retExtra=True)
283                 coinbase = cbtxn.getCoinbase()
284                 wliPos = coinbase[0] + 2
285                 wliLen = coinbase[wliPos - 1]
286                 wli = coinbase[wliPos:wliPos+wliLen]
287                 mode = 'MC'
288                 moden = 1
289         else:
290                 wli = shareMerkleRoot
291                 mode = 'MRD'
292                 moden = 0
293         
294         MWL = workLog[username]
295         if wli not in MWL:
296                 raise RejectedShare('unknown-work')
297         (wld, issueT) = MWL[wli]
298         share[mode] = wld
299         
300         if data in DupeShareHACK:
301                 raise RejectedShare('duplicate')
302         DupeShareHACK[data] = None
303         
304         blkhash = dblsha(data)
305         if blkhash[28:] != b'\0\0\0\0':
306                 raise RejectedShare('H-not-zero')
307         blkhashn = hash2int(blkhash)
308         
309         global networkTarget
310         logfunc = getattr(checkShare.logger, 'info' if blkhashn <= networkTarget else 'debug')
311         logfunc('BLKHASH: %64x' % (blkhashn,))
312         logfunc(' TARGET: %64x' % (networkTarget,))
313         
314         workMerkleTree = wld[1]
315         workCoinbase = wld[2]
316         workTarget = wld[6]
317         
318         # NOTE: this isn't actually needed for MC mode, but we're abusing it for a trivial share check...
319         txlist = workMerkleTree.data
320         txlist = [deepcopy(txlist[0]),] + txlist[1:]
321         cbtxn = txlist[0]
322         cbtxn.setCoinbase(workCoinbase)
323         cbtxn.assemble()
324         
325         if blkhashn <= networkTarget:
326                 logfunc("Submitting upstream")
327                 RBDs.append( deepcopy( (data, txlist, share.get('blkdata', None), workMerkleTree) ) )
328                 if not moden:
329                         payload = assembleBlock(data, txlist)
330                 else:
331                         payload = share['data'] + share['blkdata']
332                 logfunc('Real block payload: %s' % (b2a_hex(payload).decode('utf8'),))
333                 RBPs.append(payload)
334                 threading.Thread(target=blockSubmissionThread, args=(payload, blkhash)).start()
335                 bcnode.submitBlock(payload)
336                 share['upstreamResult'] = True
337                 MM.updateBlock(blkhash)
338         
339         # Gotwork hack...
340         if gotwork and blkhashn <= config.GotWorkTarget:
341                 try:
342                         coinbaseMrkl = cbtxn.data
343                         coinbaseMrkl += blkhash
344                         steps = workMerkleTree._steps
345                         coinbaseMrkl += pack('B', len(steps))
346                         for step in steps:
347                                 coinbaseMrkl += step
348                         coinbaseMrkl += b"\0\0\0\0"
349                         info = {}
350                         info['hash'] = b2a_hex(blkhash).decode('ascii')
351                         info['header'] = b2a_hex(data).decode('ascii')
352                         info['coinbaseMrkl'] = b2a_hex(coinbaseMrkl).decode('ascii')
353                         thr = threading.Thread(target=submitGotwork, args=(info,))
354                         thr.daemon = True
355                         thr.start()
356                 except:
357                         checkShare.logger.warning('Failed to build gotwork request')
358         
359         if workTarget is None:
360                 workTarget = config.ShareTarget
361         if blkhashn > workTarget:
362                 raise RejectedShare('high-hash')
363         share['target'] = workTarget
364         share['_targethex'] = '%064x' % (workTarget,)
365         
366         shareTimestamp = unpack('<L', data[68:72])[0]
367         if shareTime < issueT - 120:
368                 raise RejectedShare('stale-work')
369         if shareTimestamp < shareTime - 300:
370                 raise RejectedShare('time-too-old')
371         if shareTimestamp > shareTime + 7200:
372                 raise RejectedShare('time-too-new')
373         
374         if config.DynamicTargetting and username in userStatus:
375                 # NOTE: userStatus[username] only doesn't exist across restarts
376                 status = userStatus[username]
377                 target = status[0] or config.ShareTarget
378                 if target == workTarget:
379                         userStatus[username][2] += 1
380                 else:
381                         userStatus[username][2] += float(target) / workTarget
382         
383         if moden:
384                 cbpre = cbtxn.getCoinbase()
385                 cbpreLen = len(cbpre)
386                 if coinbase[:cbpreLen] != cbpre:
387                         raise RejectedShare('bad-cb-prefix')
388                 
389                 # Filter out known "I support" flags, to prevent exploits
390                 for ff in (b'/P2SH/', b'NOP2SH', b'p2sh/CHV', b'p2sh/NOCHV'):
391                         if coinbase.find(ff) > max(-1, cbpreLen - len(ff)):
392                                 raise RejectedShare('bad-cb-flag')
393                 
394                 if len(coinbase) > 100:
395                         raise RejectedShare('bad-cb-length')
396                 
397                 cbtxn.setCoinbase(coinbase)
398                 cbtxn.assemble()
399                 if shareMerkleRoot != workMerkleTree.withFirst(cbtxn):
400                         raise RejectedShare('bad-txnmrklroot')
401                 
402                 allowed = assembleBlock(data, txlist)
403                 if allowed != share['data'] + share['blkdata']:
404                         raise RejectedShare('bad-txns')
405 checkShare.logger = logging.getLogger('checkShare')
406
407 def receiveShare(share):
408         # TODO: username => userid
409         try:
410                 checkShare(share)
411         except RejectedShare as rej:
412                 share['rejectReason'] = str(rej)
413                 raise
414         finally:
415                 if '_origdata' in share:
416                         share['solution'] = share['_origdata']
417                 else:
418                         share['solution'] = b2a_hex(swap32(share['data'])).decode('utf8')
419                 for i in loggersShare:
420                         i(share)
421
422 def newBlockNotification():
423         logging.getLogger('newBlockNotification').info('Received new block notification')
424         MM.updateMerkleTree()
425         # TODO: Force RESPOND TO LONGPOLLS?
426         pass
427
428 def newBlockNotificationSIGNAL(signum, frame):
429         # Use a new thread, in case the signal handler is called with locks held
430         thr = threading.Thread(target=newBlockNotification, name='newBlockNotification via signal %s' % (signum,))
431         thr.daemon = True
432         thr.start()
433
434 from signal import signal, SIGUSR1
435 signal(SIGUSR1, newBlockNotificationSIGNAL)
436
437
438 import os
439 import os.path
440 import pickle
441 import signal
442 import sys
443 from time import sleep
444 import traceback
445
446 SAVE_STATE_FILENAME = 'eloipool.worklog'
447
448 def stopServers():
449         logger = logging.getLogger('stopServers')
450         
451         if hasattr(stopServers, 'already'):
452                 logger.debug('Already tried to stop servers before')
453                 return
454         stopServers.already = True
455         
456         logger.info('Stopping servers...')
457         global bcnode, server
458         servers = (bcnode, server)
459         for s in servers:
460                 s.keepgoing = False
461         for s in servers:
462                 try:
463                         s.wakeup()
464                 except:
465                         logger.error('Failed to stop server %s\n%s' % (s, traceback.format_exc()))
466         i = 0
467         while True:
468                 sl = []
469                 for s in servers:
470                         if s.running:
471                                 sl.append(s.__class__.__name__)
472                 if not sl:
473                         break
474                 i += 1
475                 if i >= 0x100:
476                         logger.error('Servers taking too long to stop (%s), giving up' % (', '.join(sl)))
477                         break
478                 sleep(0.01)
479         
480         for s in servers:
481                 for fd in s._fd.keys():
482                         os.close(fd)
483
484 def saveState(t = None):
485         logger = logging.getLogger('saveState')
486         
487         # Then, save data needed to resume work
488         logger.info('Saving work state to \'%s\'...' % (SAVE_STATE_FILENAME,))
489         i = 0
490         while True:
491                 try:
492                         with open(SAVE_STATE_FILENAME, 'wb') as f:
493                                 pickle.dump(t, f)
494                                 pickle.dump(DupeShareHACK, f)
495                                 pickle.dump(workLog, f)
496                         break
497                 except:
498                         i += 1
499                         if i >= 0x10000:
500                                 logger.error('Failed to save work\n' + traceback.format_exc())
501                                 try:
502                                         os.unlink(SAVE_STATE_FILENAME)
503                                 except:
504                                         logger.error(('Failed to unlink \'%s\'; resume may have trouble\n' % (SAVE_STATE_FILENAME,)) + traceback.format_exc())
505
506 def exit():
507         t = time()
508         stopServers()
509         saveState(t)
510         logging.getLogger('exit').info('Goodbye...')
511         os.kill(os.getpid(), signal.SIGTERM)
512         sys.exit(0)
513
514 def restart():
515         t = time()
516         stopServers()
517         saveState(t)
518         logging.getLogger('restart').info('Restarting...')
519         try:
520                 os.execv(sys.argv[0], sys.argv)
521         except:
522                 logging.getLogger('restart').error('Failed to exec\n' + traceback.format_exc())
523
524 def restoreState():
525         if not os.path.exists(SAVE_STATE_FILENAME):
526                 return
527         
528         global workLog, DupeShareHACK
529         
530         logger = logging.getLogger('restoreState')
531         s = os.stat(SAVE_STATE_FILENAME)
532         logger.info('Restoring saved state from \'%s\' (%d bytes)' % (SAVE_STATE_FILENAME, s.st_size))
533         try:
534                 with open(SAVE_STATE_FILENAME, 'rb') as f:
535                         t = pickle.load(f)
536                         if type(t) == tuple:
537                                 if len(t) > 2:
538                                         # Future formats, not supported here
539                                         ver = t[3]
540                                         # TODO
541                                 
542                                 # Old format, from 2012-02-02 to 2012-02-03
543                                 workLog = t[0]
544                                 DupeShareHACK = t[1]
545                                 t = None
546                         else:
547                                 if isinstance(t, dict):
548                                         # Old format, from 2012-02-03 to 2012-02-03
549                                         DupeShareHACK = t
550                                         t = None
551                                 else:
552                                         # Current format, from 2012-02-03 onward
553                                         DupeShareHACK = pickle.load(f)
554                                 
555                                 if t + 120 >= time():
556                                         workLog = pickle.load(f)
557                                 else:
558                                         logger.debug('Skipping restore of expired workLog')
559         except:
560                 logger.error('Failed to restore state\n' + traceback.format_exc())
561                 return
562         logger.info('State restored successfully')
563         if t:
564                 logger.info('Total downtime: %g seconds' % (time() - t,))
565
566
567 from jsonrpcserver import JSONRPCListener, JSONRPCServer
568 import interactivemode
569 from networkserver import NetworkListener
570 import threading
571 import sharelogging
572 import imp
573
574 if __name__ == "__main__":
575         if not hasattr(config, 'ShareLogging'):
576                 config.ShareLogging = ()
577         if hasattr(config, 'DbOptions'):
578                 logging.getLogger('backwardCompatibility').warn('DbOptions configuration variable is deprecated; upgrade to ShareLogging var before 2013-03-05')
579                 config.ShareLogging = list(config.ShareLogging)
580                 config.ShareLogging.append( {
581                         'type': 'sql',
582                         'engine': 'postgres',
583                         'dbopts': config.DbOptions,
584                         'statement': "insert into shares (rem_host, username, our_result, upstream_result, reason, solution) values ({Q(remoteHost)}, {username}, {YN(not(rejectReason))}, {YN(upstreamResult)}, {rejectReason}, decode({solution}, 'hex'))",
585                 } )
586         for i in config.ShareLogging:
587                 if not hasattr(i, 'keys'):
588                         name, parameters = i
589                         logging.getLogger('backwardCompatibility').warn('Using short-term backward compatibility for ShareLogging[\'%s\']; be sure to update config before 2012-04-04' % (name,))
590                         if name == 'postgres':
591                                 name = 'sql'
592                                 i = {
593                                         'engine': 'postgres',
594                                         'dbopts': parameters,
595                                 }
596                         elif name == 'logfile':
597                                 i = {}
598                                 i['thropts'] = parameters
599                                 if 'filename' in parameters:
600                                         i['filename'] = parameters['filename']
601                                         i['thropts'] = dict(i['thropts'])
602                                         del i['thropts']['filename']
603                         else:
604                                 i = parameters
605                         i['type'] = name
606                 
607                 name = i['type']
608                 parameters = i
609                 try:
610                         fp, pathname, description = imp.find_module(name, sharelogging.__path__)
611                         m = imp.load_module(name, fp, pathname, description)
612                         lo = getattr(m, name)(**parameters)
613                         loggersShare.append(lo.logShare)
614                 except:
615                         logging.getLogger('sharelogging').error("Error setting up share logger %s: %s", name,  sys.exc_info())
616
617         LSbc = []
618         if not hasattr(config, 'BitcoinNodeAddresses'):
619                 config.BitcoinNodeAddresses = ()
620         for a in config.BitcoinNodeAddresses:
621                 LSbc.append(NetworkListener(bcnode, a))
622         
623         if hasattr(config, 'UpstreamBitcoindNode') and config.UpstreamBitcoindNode:
624                 BitcoinLink(bcnode, dest=config.UpstreamBitcoindNode)
625         
626         import jsonrpc_getblocktemplate
627         import jsonrpc_getwork
628         import jsonrpc_setworkaux
629         
630         server = JSONRPCServer()
631         if hasattr(config, 'JSONRPCAddress'):
632                 logging.getLogger('backwardCompatibility').warn('JSONRPCAddress configuration variable is deprecated; upgrade to JSONRPCAddresses list before 2013-03-05')
633                 if not hasattr(config, 'JSONRPCAddresses'):
634                         config.JSONRPCAddresses = []
635                 config.JSONRPCAddresses.insert(0, config.JSONRPCAddress)
636         LS = []
637         for a in config.JSONRPCAddresses:
638                 LS.append(JSONRPCListener(server, a))
639         if hasattr(config, 'SecretUser'):
640                 server.SecretUser = config.SecretUser
641         server.aux = MM.CoinbaseAux
642         server.getBlockHeader = getBlockHeader
643         server.getBlockTemplate = getBlockTemplate
644         server.receiveShare = receiveShare
645         server.RaiseRedFlags = RaiseRedFlags
646         server.ShareTarget = config.ShareTarget
647         
648         if hasattr(config, 'TrustedForwarders'):
649                 server.TrustedForwarders = config.TrustedForwarders
650         server.ServerName = config.ServerName
651         
652         MM.start()
653         
654         restoreState()
655         
656         bcnode_thr = threading.Thread(target=bcnode.serve_forever)
657         bcnode_thr.daemon = True
658         bcnode_thr.start()
659         
660         server.serve_forever()