Bugfix: Fix some missing None handling
[bitcoin:eloipool.git] / eloipool.py
1 #!/usr/bin/python3
2 # Eloipool - Python Bitcoin pool server
3 # Copyright (C) 2011-2012  Luke Dashjr <luke-jr+eloipool@utopios.org>
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License as
7 # published by the Free Software Foundation, either version 3 of the
8 # License, or (at your option) any later version.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 # GNU Affero General Public License for more details.
14 #
15 # You should have received a copy of the GNU Affero General Public License
16 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
17
18 import config
19
20 if not hasattr(config, 'ServerName'):
21         config.ServerName = 'Unnamed Eloipool'
22
23 if not hasattr(config, 'ShareTarget'):
24         config.ShareTarget = 0x00000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff
25
26
27 import logging
28
29 if len(logging.root.handlers) == 0:
30         logging.basicConfig(
31                 format='%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s',
32                 level=logging.DEBUG,
33         )
34         for infoOnly in ('checkShare', 'JSONRPCHandler', 'merkleMaker', 'Waker for JSONRPCServer', 'JSONRPCServer'):
35                 logging.getLogger(infoOnly).setLevel(logging.INFO)
36
37 def RaiseRedFlags(reason):
38         logging.getLogger('redflag').critical(reason)
39         return reason
40
41
42 from bitcoin.node import BitcoinLink, BitcoinNode
43 bcnode = BitcoinNode(config.UpstreamNetworkId)
44 bcnode.userAgent += b'Eloipool:0.1/'
45
46 import jsonrpc
47 UpstreamBitcoindJSONRPC = jsonrpc.ServiceProxy(config.UpstreamURI)
48
49
50 from bitcoin.script import BitcoinScript
51 from bitcoin.txn import Txn
52 from base58 import b58decode
53 from struct import pack
54 import subprocess
55 from time import time
56
57 def makeCoinbaseTxn(coinbaseValue, useCoinbaser = True):
58         txn = Txn.new()
59         
60         if useCoinbaser and hasattr(config, 'CoinbaserCmd') and config.CoinbaserCmd:
61                 coinbased = 0
62                 try:
63                         cmd = config.CoinbaserCmd
64                         cmd = cmd.replace('%d', str(coinbaseValue))
65                         p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
66                         nout = int(p.stdout.readline())
67                         for i in range(nout):
68                                 amount = int(p.stdout.readline())
69                                 addr = p.stdout.readline().rstrip(b'\n').decode('utf8')
70                                 pkScript = BitcoinScript.toAddress(addr)
71                                 txn.addOutput(amount, pkScript)
72                                 coinbased += amount
73                 except:
74                         coinbased = coinbaseValue + 1
75                 if coinbased >= coinbaseValue:
76                         logging.getLogger('makeCoinbaseTxn').error('Coinbaser failed!')
77                         txn.outputs = []
78                 else:
79                         coinbaseValue -= coinbased
80         
81         pkScript = BitcoinScript.toAddress(config.TrackerAddr)
82         txn.addOutput(coinbaseValue, pkScript)
83         
84         # TODO
85         # TODO: red flag on dupe coinbase
86         return txn
87
88
89 import jsonrpc_getwork
90 from util import Bits2Target
91
92 workLog = {}
93 userStatus = {}
94 networkTarget = None
95 DupeShareHACK = {}
96
97 server = None
98 def updateBlocks():
99         server.wakeLongpoll()
100
101 def blockChanged():
102         global DupeShareHACK
103         DupeShareHACK = {}
104         jsonrpc_getwork._CheckForDupesHACK = {}
105         global MM, networkTarget, server
106         bits = MM.currentBlock[2]
107         if bits is None:
108                 networkTarget = None
109         else:
110                 networkTarget = Bits2Target(bits)
111         workLog.clear()
112         updateBlocks()
113
114
115 from merklemaker import merkleMaker
116 MM = merkleMaker()
117 MM.__dict__.update(config.__dict__)
118 MM.clearCoinbaseTxn = makeCoinbaseTxn(5000000000, False)  # FIXME
119 MM.clearCoinbaseTxn.assemble()
120 MM.makeCoinbaseTxn = makeCoinbaseTxn
121 MM.onBlockChange = blockChanged
122 MM.onBlockUpdate = updateBlocks
123
124
125 from binascii import b2a_hex
126 from copy import deepcopy
127 from struct import pack, unpack
128 import threading
129 from time import time
130 from util import RejectedShare, dblsha, hash2int, swap32
131 import jsonrpc
132 import traceback
133
134 gotwork = None
135 if hasattr(config, 'GotWorkURI'):
136         gotwork = jsonrpc.ServiceProxy(config.GotWorkURI)
137
138 if not hasattr(config, 'DynamicTargetting'):
139         config.DynamicTargetting = 0
140 else:
141         config.DynamicTargetGoal *= 2
142
143 def submitGotwork(info):
144         try:
145                 gotwork.gotwork(info)
146         except:
147                 checkShare.logger.warning('Failed to submit gotwork\n' + traceback.format_exc())
148
149 def getTarget(username, now):
150         if not config.DynamicTargetting:
151                 return None
152         if username in userStatus:
153                 status = userStatus[username]
154         else:
155                 userStatus[username] = [None, now, 0]
156                 return None
157         (targetIn, lastUpdate, work) = status
158         if work <= config.DynamicTargetGoal:
159                 if now < lastUpdate + 120 and (targetIn is None or targetIn >= networkTarget):
160                         return targetIn
161                 if not work:
162                         if targetIn:
163                                 getTarget.logger.debug("No shares from '%s', resetting to minimum target")
164                                 userStatus[username] = [None, now, 0]
165                         return None
166         
167         deltaSec = now - lastUpdate
168         target = targetIn or config.ShareTarget
169         target = int(target * config.DynamicTargetGoal * deltaSec / 120 / work)
170         if target >= config.ShareTarget:
171                 target = None
172         elif target < networkTarget:
173                 target = networkTarget
174         if target != targetIn:
175                 pfx = 'Retargetting %s' % (repr(username),)
176                 getTarget.logger.debug("%s from: %064x" % (pfx, targetIn or config.ShareTarget,))
177                 getTarget.logger.debug("%s   to: %064x" % (pfx, target   or config.ShareTarget,))
178         userStatus[username] = [target, now, 0]
179         return target
180 getTarget.logger = logging.getLogger('getTarget')
181
182 def RegisterWork(username, wli, wld):
183         now = time()
184         target = getTarget(username, now)
185         wld = tuple(wld) + (target,)
186         workLog.setdefault(username, {})[wli] = (wld, now)
187         return target or config.ShareTarget
188
189 def getBlockHeader(username):
190         MRD = MM.getMRD()
191         (merkleRoot, merkleTree, coinbase, prevBlock, bits, rollPrevBlk) = MRD[:6]
192         timestamp = pack('<L', int(time()))
193         hdr = b'\2\0\0\0' + prevBlock + merkleRoot + timestamp + bits + b'iolE'
194         workLog.setdefault(username, {})[merkleRoot] = (MRD, time())
195         target = RegisterWork(username, merkleRoot, MRD)
196         return (hdr, workLog[username][merkleRoot], target)
197
198 def getBlockTemplate(username):
199         MC = MM.getMC()
200         (dummy, merkleTree, coinbase, prevBlock, bits) = MC[:5]
201         wliPos = coinbase[0] + 2
202         wliLen = coinbase[wliPos - 1]
203         wli = coinbase[wliPos:wliPos+wliLen]
204         target = RegisterWork(username, wli, MC)
205         return (MC, workLog[username][wli], target)
206
207 loggersShare = []
208
209 RBDs = []
210 RBPs = []
211
212 from bitcoin.varlen import varlenEncode, varlenDecode
213 import bitcoin.txn
214 def assembleBlock(blkhdr, txlist):
215         payload = blkhdr
216         payload += varlenEncode(len(txlist))
217         for tx in txlist:
218                 payload += tx.data
219         return payload
220
221 def blockSubmissionThread(payload, blkhash):
222         myblock = (blkhash, payload[4:36])
223         payload = b2a_hex(payload).decode('ascii')
224         nexterr = 0
225         while True:
226                 try:
227                         rv = UpstreamBitcoindJSONRPC.submitblock(payload)
228                         break
229                 except:
230                         try:
231                                 rv = UpstreamBitcoindJSONRPC.getmemorypool(payload)
232                                 if rv is True:
233                                         rv = None
234                                 elif rv is False:
235                                         rv = 'rejected'
236                                 break
237                         except:
238                                 pass
239                         now = time()
240                         if now > nexterr:
241                                 # FIXME: This will show "Method not found" on pre-BIP22 servers
242                                 RaiseRedFlags(traceback.format_exc())
243                                 nexterr = now + 5
244                         if MM.currentBlock[0] not in myblock:
245                                 RaiseRedFlags('Giving up on submitting block upstream')
246                                 return
247         if rv:
248                 # FIXME: The returned value could be a list of multiple responses
249                 RaiseRedFlags('Upstream block submission failed: %s' % (rv,))
250
251 def checkShare(share):
252         shareTime = share['time'] = time()
253         
254         data = share['data']
255         data = data[:80]
256         (prevBlock, height, bits) = MM.currentBlock
257         sharePrevBlock = data[4:36]
258         if sharePrevBlock != prevBlock:
259                 if sharePrevBlock == MM.lastBlock[0]:
260                         raise RejectedShare('stale-prevblk')
261                 raise RejectedShare('bad-prevblk')
262         
263         # TODO: use userid
264         username = share['username']
265         if username not in workLog:
266                 raise RejectedShare('unknown-user')
267         
268         if data[72:76] != bits:
269                 raise RejectedShare('bad-diffbits')
270         
271         # Note that we should accept miners reducing version to 1 if they don't understand 2 yet
272         # FIXME: When the supermajority is upgraded to version 2, stop accepting 1!
273         if data[1:4] != b'\0\0\0' or data[0] > 2:
274                 raise RejectedShare('bad-version')
275         
276         shareMerkleRoot = data[36:68]
277         if 'blkdata' in share:
278                 pl = share['blkdata']
279                 (txncount, pl) = varlenDecode(pl)
280                 cbtxn = bitcoin.txn.Txn(pl)
281                 cbtxn.disassemble(retExtra=True)
282                 coinbase = cbtxn.getCoinbase()
283                 wliPos = coinbase[0] + 2
284                 wliLen = coinbase[wliPos - 1]
285                 wli = coinbase[wliPos:wliPos+wliLen]
286                 mode = 'MC'
287                 moden = 1
288         else:
289                 wli = shareMerkleRoot
290                 mode = 'MRD'
291                 moden = 0
292         
293         MWL = workLog[username]
294         if wli not in MWL:
295                 raise RejectedShare('unknown-work')
296         (wld, issueT) = MWL[wli]
297         share[mode] = wld
298         
299         if data in DupeShareHACK:
300                 raise RejectedShare('duplicate')
301         DupeShareHACK[data] = None
302         
303         blkhash = dblsha(data)
304         if blkhash[28:] != b'\0\0\0\0':
305                 raise RejectedShare('H-not-zero')
306         blkhashn = hash2int(blkhash)
307         
308         global networkTarget
309         logfunc = getattr(checkShare.logger, 'info' if blkhashn <= networkTarget else 'debug')
310         logfunc('BLKHASH: %64x' % (blkhashn,))
311         logfunc(' TARGET: %64x' % (networkTarget,))
312         
313         workMerkleTree = wld[1]
314         workCoinbase = wld[2]
315         workTarget = wld[6]
316         
317         # NOTE: this isn't actually needed for MC mode, but we're abusing it for a trivial share check...
318         txlist = workMerkleTree.data
319         txlist = [deepcopy(txlist[0]),] + txlist[1:]
320         cbtxn = txlist[0]
321         cbtxn.setCoinbase(workCoinbase)
322         cbtxn.assemble()
323         
324         if blkhashn <= networkTarget:
325                 logfunc("Submitting upstream")
326                 RBDs.append( deepcopy( (data, txlist, share.get('blkdata', None), workMerkleTree) ) )
327                 if not moden:
328                         payload = assembleBlock(data, txlist)
329                 else:
330                         payload = share['data'] + share['blkdata']
331                 logfunc('Real block payload: %s' % (b2a_hex(payload).decode('utf8'),))
332                 RBPs.append(payload)
333                 threading.Thread(target=blockSubmissionThread, args=(payload, blkhash)).start()
334                 bcnode.submitBlock(payload)
335                 share['upstreamResult'] = True
336                 MM.updateBlock(blkhash)
337         
338         # Gotwork hack...
339         if gotwork and blkhashn <= config.GotWorkTarget:
340                 try:
341                         coinbaseMrkl = cbtxn.data
342                         coinbaseMrkl += blkhash
343                         steps = workMerkleTree._steps
344                         coinbaseMrkl += pack('B', len(steps))
345                         for step in steps:
346                                 coinbaseMrkl += step
347                         coinbaseMrkl += b"\0\0\0\0"
348                         info = {}
349                         info['hash'] = b2a_hex(blkhash).decode('ascii')
350                         info['header'] = b2a_hex(data).decode('ascii')
351                         info['coinbaseMrkl'] = b2a_hex(coinbaseMrkl).decode('ascii')
352                         thr = threading.Thread(target=submitGotwork, args=(info,))
353                         thr.daemon = True
354                         thr.start()
355                 except:
356                         checkShare.logger.warning('Failed to build gotwork request')
357         
358         if workTarget is None:
359                 workTarget = config.ShareTarget
360         if blkhashn > workTarget:
361                 raise RejectedShare('high-hash')
362         share['target'] = workTarget
363         share['_targethex'] = '%064x' % (workTarget,)
364         
365         shareTimestamp = unpack('<L', data[68:72])[0]
366         if shareTime < issueT - 120:
367                 raise RejectedShare('stale-work')
368         if shareTimestamp < shareTime - 300:
369                 raise RejectedShare('time-too-old')
370         if shareTimestamp > shareTime + 7200:
371                 raise RejectedShare('time-too-new')
372         
373         if config.DynamicTargetting and username in userStatus:
374                 # NOTE: userStatus[username] only doesn't exist across restarts
375                 status = userStatus[username]
376                 target = status[0] or config.ShareTarget
377                 if target == workTarget:
378                         userStatus[username][2] += 1
379                 else:
380                         userStatus[username][2] += float(target) / workTarget
381         
382         if moden:
383                 cbpre = cbtxn.getCoinbase()
384                 cbpreLen = len(cbpre)
385                 if coinbase[:cbpreLen] != cbpre:
386                         raise RejectedShare('bad-cb-prefix')
387                 
388                 # Filter out known "I support" flags, to prevent exploits
389                 for ff in (b'/P2SH/', b'NOP2SH', b'p2sh/CHV', b'p2sh/NOCHV'):
390                         if coinbase.find(ff) > max(-1, cbpreLen - len(ff)):
391                                 raise RejectedShare('bad-cb-flag')
392                 
393                 if len(coinbase) > 100:
394                         raise RejectedShare('bad-cb-length')
395                 
396                 cbtxn.setCoinbase(coinbase)
397                 cbtxn.assemble()
398                 if shareMerkleRoot != workMerkleTree.withFirst(cbtxn):
399                         raise RejectedShare('bad-txnmrklroot')
400                 
401                 allowed = assembleBlock(data, txlist)
402                 if allowed != share['data'] + share['blkdata']:
403                         raise RejectedShare('bad-txns')
404 checkShare.logger = logging.getLogger('checkShare')
405
406 def receiveShare(share):
407         # TODO: username => userid
408         try:
409                 checkShare(share)
410         except RejectedShare as rej:
411                 share['rejectReason'] = str(rej)
412                 raise
413         finally:
414                 if '_origdata' in share:
415                         share['solution'] = share['_origdata']
416                 else:
417                         share['solution'] = b2a_hex(swap32(share['data'])).decode('utf8')
418                 for i in loggersShare:
419                         i(share)
420
421 def newBlockNotification():
422         logging.getLogger('newBlockNotification').info('Received new block notification')
423         MM.updateMerkleTree()
424         # TODO: Force RESPOND TO LONGPOLLS?
425         pass
426
427 def newBlockNotificationSIGNAL(signum, frame):
428         # Use a new thread, in case the signal handler is called with locks held
429         thr = threading.Thread(target=newBlockNotification, name='newBlockNotification via signal %s' % (signum,))
430         thr.daemon = True
431         thr.start()
432
433 from signal import signal, SIGUSR1
434 signal(SIGUSR1, newBlockNotificationSIGNAL)
435
436
437 import os
438 import os.path
439 import pickle
440 import signal
441 import sys
442 from time import sleep
443 import traceback
444
445 SAVE_STATE_FILENAME = 'eloipool.worklog'
446
447 def stopServers():
448         logger = logging.getLogger('stopServers')
449         
450         if hasattr(stopServers, 'already'):
451                 logger.debug('Already tried to stop servers before')
452                 return
453         stopServers.already = True
454         
455         logger.info('Stopping servers...')
456         global bcnode, server
457         servers = (bcnode, server)
458         for s in servers:
459                 s.keepgoing = False
460         for s in servers:
461                 try:
462                         s.wakeup()
463                 except:
464                         logger.error('Failed to stop server %s\n%s' % (s, traceback.format_exc()))
465         i = 0
466         while True:
467                 sl = []
468                 for s in servers:
469                         if s.running:
470                                 sl.append(s.__class__.__name__)
471                 if not sl:
472                         break
473                 i += 1
474                 if i >= 0x100:
475                         logger.error('Servers taking too long to stop (%s), giving up' % (', '.join(sl)))
476                         break
477                 sleep(0.01)
478         
479         for s in servers:
480                 for fd in s._fd.keys():
481                         os.close(fd)
482
483 def saveState(t = None):
484         logger = logging.getLogger('saveState')
485         
486         # Then, save data needed to resume work
487         logger.info('Saving work state to \'%s\'...' % (SAVE_STATE_FILENAME,))
488         i = 0
489         while True:
490                 try:
491                         with open(SAVE_STATE_FILENAME, 'wb') as f:
492                                 pickle.dump(t, f)
493                                 pickle.dump(DupeShareHACK, f)
494                                 pickle.dump(workLog, f)
495                         break
496                 except:
497                         i += 1
498                         if i >= 0x10000:
499                                 logger.error('Failed to save work\n' + traceback.format_exc())
500                                 try:
501                                         os.unlink(SAVE_STATE_FILENAME)
502                                 except:
503                                         logger.error(('Failed to unlink \'%s\'; resume may have trouble\n' % (SAVE_STATE_FILENAME,)) + traceback.format_exc())
504
505 def exit():
506         t = time()
507         stopServers()
508         saveState(t)
509         logging.getLogger('exit').info('Goodbye...')
510         os.kill(os.getpid(), signal.SIGTERM)
511         sys.exit(0)
512
513 def restart():
514         t = time()
515         stopServers()
516         saveState(t)
517         logging.getLogger('restart').info('Restarting...')
518         try:
519                 os.execv(sys.argv[0], sys.argv)
520         except:
521                 logging.getLogger('restart').error('Failed to exec\n' + traceback.format_exc())
522
523 def restoreState():
524         if not os.path.exists(SAVE_STATE_FILENAME):
525                 return
526         
527         global workLog, DupeShareHACK
528         
529         logger = logging.getLogger('restoreState')
530         s = os.stat(SAVE_STATE_FILENAME)
531         logger.info('Restoring saved state from \'%s\' (%d bytes)' % (SAVE_STATE_FILENAME, s.st_size))
532         try:
533                 with open(SAVE_STATE_FILENAME, 'rb') as f:
534                         t = pickle.load(f)
535                         if type(t) == tuple:
536                                 if len(t) > 2:
537                                         # Future formats, not supported here
538                                         ver = t[3]
539                                         # TODO
540                                 
541                                 # Old format, from 2012-02-02 to 2012-02-03
542                                 workLog = t[0]
543                                 DupeShareHACK = t[1]
544                                 t = None
545                         else:
546                                 if isinstance(t, dict):
547                                         # Old format, from 2012-02-03 to 2012-02-03
548                                         DupeShareHACK = t
549                                         t = None
550                                 else:
551                                         # Current format, from 2012-02-03 onward
552                                         DupeShareHACK = pickle.load(f)
553                                 
554                                 if t + 120 >= time():
555                                         workLog = pickle.load(f)
556                                 else:
557                                         logger.debug('Skipping restore of expired workLog')
558         except:
559                 logger.error('Failed to restore state\n' + traceback.format_exc())
560                 return
561         logger.info('State restored successfully')
562         if t:
563                 logger.info('Total downtime: %g seconds' % (time() - t,))
564
565
566 from jsonrpcserver import JSONRPCListener, JSONRPCServer
567 import interactivemode
568 from networkserver import NetworkListener
569 import threading
570 import sharelogging
571 import imp
572
573 if __name__ == "__main__":
574         if not hasattr(config, 'ShareLogging'):
575                 config.ShareLogging = ()
576         if hasattr(config, 'DbOptions'):
577                 logging.getLogger('backwardCompatibility').warn('DbOptions configuration variable is deprecated; upgrade to ShareLogging var before 2013-03-05')
578                 config.ShareLogging = list(config.ShareLogging)
579                 config.ShareLogging.append( {
580                         'type': 'sql',
581                         'engine': 'postgres',
582                         'dbopts': config.DbOptions,
583                         'statement': "insert into shares (rem_host, username, our_result, upstream_result, reason, solution) values ({Q(remoteHost)}, {username}, {YN(not(rejectReason))}, {YN(upstreamResult)}, {rejectReason}, decode({solution}, 'hex'))",
584                 } )
585         for i in config.ShareLogging:
586                 if not hasattr(i, 'keys'):
587                         name, parameters = i
588                         logging.getLogger('backwardCompatibility').warn('Using short-term backward compatibility for ShareLogging[\'%s\']; be sure to update config before 2012-04-04' % (name,))
589                         if name == 'postgres':
590                                 name = 'sql'
591                                 i = {
592                                         'engine': 'postgres',
593                                         'dbopts': parameters,
594                                 }
595                         elif name == 'logfile':
596                                 i = {}
597                                 i['thropts'] = parameters
598                                 if 'filename' in parameters:
599                                         i['filename'] = parameters['filename']
600                                         i['thropts'] = dict(i['thropts'])
601                                         del i['thropts']['filename']
602                         else:
603                                 i = parameters
604                         i['type'] = name
605                 
606                 name = i['type']
607                 parameters = i
608                 try:
609                         fp, pathname, description = imp.find_module(name, sharelogging.__path__)
610                         m = imp.load_module(name, fp, pathname, description)
611                         lo = getattr(m, name)(**parameters)
612                         loggersShare.append(lo.logShare)
613                 except:
614                         logging.getLogger('sharelogging').error("Error setting up share logger %s: %s", name,  sys.exc_info())
615
616         LSbc = []
617         if not hasattr(config, 'BitcoinNodeAddresses'):
618                 config.BitcoinNodeAddresses = ()
619         for a in config.BitcoinNodeAddresses:
620                 LSbc.append(NetworkListener(bcnode, a))
621         
622         if hasattr(config, 'UpstreamBitcoindNode') and config.UpstreamBitcoindNode:
623                 BitcoinLink(bcnode, dest=config.UpstreamBitcoindNode)
624         
625         import jsonrpc_getblocktemplate
626         import jsonrpc_getwork
627         import jsonrpc_setworkaux
628         
629         server = JSONRPCServer()
630         if hasattr(config, 'JSONRPCAddress'):
631                 logging.getLogger('backwardCompatibility').warn('JSONRPCAddress configuration variable is deprecated; upgrade to JSONRPCAddresses list before 2013-03-05')
632                 if not hasattr(config, 'JSONRPCAddresses'):
633                         config.JSONRPCAddresses = []
634                 config.JSONRPCAddresses.insert(0, config.JSONRPCAddress)
635         LS = []
636         for a in config.JSONRPCAddresses:
637                 LS.append(JSONRPCListener(server, a))
638         if hasattr(config, 'SecretUser'):
639                 server.SecretUser = config.SecretUser
640         server.aux = MM.CoinbaseAux
641         server.getBlockHeader = getBlockHeader
642         server.getBlockTemplate = getBlockTemplate
643         server.receiveShare = receiveShare
644         server.RaiseRedFlags = RaiseRedFlags
645         server.ShareTarget = config.ShareTarget
646         
647         if hasattr(config, 'TrustedForwarders'):
648                 server.TrustedForwarders = config.TrustedForwarders
649         server.ServerName = config.ServerName
650         
651         MM.start()
652         
653         restoreState()
654         
655         bcnode_thr = threading.Thread(target=bcnode.serve_forever)
656         bcnode_thr.daemon = True
657         bcnode_thr.start()
658         
659         server.serve_forever()