Bugfix: Stratum: Replies should not be sent if request id is null
[bitcoin:eloipool.git] / eloipool.py
1 #!/usr/bin/python3
2 # Eloipool - Python Bitcoin pool server
3 # Copyright (C) 2011-2012  Luke Dashjr <luke-jr+eloipool@utopios.org>
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License as
7 # published by the Free Software Foundation, either version 3 of the
8 # License, or (at your option) any later version.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 # GNU Affero General Public License for more details.
14 #
15 # You should have received a copy of the GNU Affero General Public License
16 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
17
18 import config
19
20 if not hasattr(config, 'ServerName'):
21         config.ServerName = 'Unnamed Eloipool'
22
23 if not hasattr(config, 'ShareTarget'):
24         config.ShareTarget = 0x00000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff
25
26
27 import logging
28
29 if len(logging.root.handlers) == 0:
30         logging.basicConfig(
31                 format='%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s',
32                 level=logging.DEBUG,
33         )
34         for infoOnly in ('checkShare', 'JSONRPCHandler', 'merkleMaker', 'Waker for JSONRPCServer', 'JSONRPCServer'):
35                 logging.getLogger(infoOnly).setLevel(logging.INFO)
36
37 def RaiseRedFlags(reason):
38         logging.getLogger('redflag').critical(reason)
39         return reason
40
41
42 from bitcoin.node import BitcoinLink, BitcoinNode
43 bcnode = BitcoinNode(config.UpstreamNetworkId)
44 bcnode.userAgent += b'Eloipool:0.1/'
45
46 import jsonrpc
47 UpstreamBitcoindJSONRPC = jsonrpc.ServiceProxy(config.UpstreamURI)
48
49
50 from bitcoin.script import BitcoinScript
51 from bitcoin.txn import Txn
52 from base58 import b58decode
53 from struct import pack
54 import subprocess
55 from time import time
56
57 def makeCoinbaseTxn(coinbaseValue, useCoinbaser = True):
58         txn = Txn.new()
59         
60         if useCoinbaser and hasattr(config, 'CoinbaserCmd') and config.CoinbaserCmd:
61                 coinbased = 0
62                 try:
63                         cmd = config.CoinbaserCmd
64                         cmd = cmd.replace('%d', str(coinbaseValue))
65                         p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
66                         nout = int(p.stdout.readline())
67                         for i in range(nout):
68                                 amount = int(p.stdout.readline())
69                                 addr = p.stdout.readline().rstrip(b'\n').decode('utf8')
70                                 pkScript = BitcoinScript.toAddress(addr)
71                                 txn.addOutput(amount, pkScript)
72                                 coinbased += amount
73                 except:
74                         coinbased = coinbaseValue + 1
75                 if coinbased >= coinbaseValue:
76                         logging.getLogger('makeCoinbaseTxn').error('Coinbaser failed!')
77                         txn.outputs = []
78                 else:
79                         coinbaseValue -= coinbased
80         
81         pkScript = BitcoinScript.toAddress(config.TrackerAddr)
82         txn.addOutput(coinbaseValue, pkScript)
83         
84         # TODO
85         # TODO: red flag on dupe coinbase
86         return txn
87
88
89 import jsonrpc_getwork
90 from util import Bits2Target
91
92 workLog = {}
93 userStatus = {}
94 networkTarget = None
95 DupeShareHACK = {}
96
97 server = None
98 stratumsrv = None
99 def updateBlocks():
100         server.wakeLongpoll()
101         stratumsrv.updateJob()
102
103 def blockChanged():
104         global DupeShareHACK
105         DupeShareHACK = {}
106         jsonrpc_getwork._CheckForDupesHACK = {}
107         global MM, networkTarget, server
108         bits = MM.currentBlock[2]
109         if bits is None:
110                 networkTarget = None
111         else:
112                 networkTarget = Bits2Target(bits)
113         workLog.clear()
114         server.wakeLongpoll(wantClear=True)
115         stratumsrv.updateJob(wantClear=True)
116
117
118 from time import sleep, time
119 import traceback
120
121 def _WorkLogPruner_I(wl):
122         now = time()
123         pruned = 0
124         for username in wl:
125                 userwork = wl[username]
126                 for wli in tuple(userwork.keys()):
127                         if now > userwork[wli][1] + 120:
128                                 del userwork[wli]
129                                 pruned += 1
130         WorkLogPruner.logger.debug('Pruned %d jobs' % (pruned,))
131
132 def WorkLogPruner(wl):
133         while True:
134                 try:
135                         sleep(60)
136                         _WorkLogPruner_I(wl)
137                 except:
138                         WorkLogPruner.logger.error(traceback.format_exc())
139 WorkLogPruner.logger = logging.getLogger('WorkLogPruner')
140
141
142 from merklemaker import merkleMaker
143 MM = merkleMaker()
144 MM.__dict__.update(config.__dict__)
145 MM.clearCoinbaseTxn = makeCoinbaseTxn(5000000000, False)  # FIXME
146 MM.clearCoinbaseTxn.assemble()
147 MM.makeCoinbaseTxn = makeCoinbaseTxn
148 MM.onBlockChange = blockChanged
149 MM.onBlockUpdate = updateBlocks
150
151
152 from binascii import b2a_hex
153 from copy import deepcopy
154 from math import log
155 from merklemaker import MakeBlockHeader
156 from struct import pack, unpack
157 import threading
158 from time import time
159 from util import RejectedShare, dblsha, hash2int, swap32, target2pdiff
160 import jsonrpc
161 import traceback
162
163 gotwork = None
164 if hasattr(config, 'GotWorkURI'):
165         gotwork = jsonrpc.ServiceProxy(config.GotWorkURI)
166
167 if not hasattr(config, 'DynamicTargetting'):
168         config.DynamicTargetting = 0
169 else:
170         if not hasattr(config, 'DynamicTargetWindow'):
171                 config.DynamicTargetWindow = 120
172         config.DynamicTargetGoal *= config.DynamicTargetWindow / 60
173
174 def submitGotwork(info):
175         try:
176                 gotwork.gotwork(info)
177         except:
178                 checkShare.logger.warning('Failed to submit gotwork\n' + traceback.format_exc())
179
180 def getTarget(username, now):
181         if not config.DynamicTargetting:
182                 return None
183         if username in userStatus:
184                 status = userStatus[username]
185         else:
186                 userStatus[username] = [None, now, 0]
187                 return None
188         (targetIn, lastUpdate, work) = status
189         if work <= config.DynamicTargetGoal:
190                 if now < lastUpdate + config.DynamicTargetWindow and (targetIn is None or targetIn >= networkTarget):
191                         return targetIn
192                 if not work:
193                         if targetIn:
194                                 getTarget.logger.debug("No shares from '%s', resetting to minimum target")
195                                 userStatus[username] = [None, now, 0]
196                         return None
197         
198         deltaSec = now - lastUpdate
199         target = targetIn or config.ShareTarget
200         target = int(target * config.DynamicTargetGoal * deltaSec / config.DynamicTargetWindow / work)
201         if target >= config.ShareTarget:
202                 target = None
203         else:
204                 if target < networkTarget:
205                         target = networkTarget
206                 if config.DynamicTargetting == 2:
207                         # Round target to a power of two :)
208                         target = 2**int(log(target, 2) + 1) - 1
209                 if target == config.ShareTarget:
210                         target = None
211         if target != targetIn:
212                 pfx = 'Retargetting %s' % (repr(username),)
213                 tin = targetIn or config.ShareTarget
214                 getTarget.logger.debug("%s from: %064x (pdiff %s)" % (pfx, tin, target2pdiff(tin)))
215                 tgt = target or config.ShareTarget
216                 getTarget.logger.debug("%s   to: %064x (pdiff %s)" % (pfx, tgt, target2pdiff(tgt)))
217         userStatus[username] = [target, now, 0]
218         return target
219 getTarget.logger = logging.getLogger('getTarget')
220
221 def RegisterWork(username, wli, wld):
222         now = time()
223         target = getTarget(username, now)
224         wld = tuple(wld) + (target,)
225         workLog.setdefault(username, {})[wli] = (wld, now)
226         return target or config.ShareTarget
227
228 def getBlockHeader(username):
229         MRD = MM.getMRD()
230         merkleRoot = MRD[0]
231         hdr = MakeBlockHeader(MRD)
232         workLog.setdefault(username, {})[merkleRoot] = (MRD, time())
233         target = RegisterWork(username, merkleRoot, MRD)
234         return (hdr, workLog[username][merkleRoot], target)
235
236 def getBlockTemplate(username, p_magic = None):
237         if server.tls.wantClear:
238                 wantClear = True
239         elif p_magic and username not in workLog:
240                 wantClear = True
241                 p_magic[0] = True
242         else:
243                 wantClear = False
244         MC = MM.getMC(wantClear)
245         (dummy, merkleTree, coinbase, prevBlock, bits) = MC[:5]
246         wliPos = coinbase[0] + 2
247         wliLen = coinbase[wliPos - 1]
248         wli = coinbase[wliPos:wliPos+wliLen]
249         target = RegisterWork(username, wli, MC)
250         return (MC, workLog[username][wli], target)
251
252 def getStratumJob(jobid, wantClear = False):
253         MC = MM.getMC(wantClear)
254         (dummy, merkleTree, coinbase, prevBlock, bits) = MC[:5]
255         now = time()
256         workLog.setdefault(None, {})[jobid] = (MC, now)
257         return (MC, workLog[None][jobid])
258
259 loggersShare = []
260
261 RBDs = []
262 RBPs = []
263
264 from bitcoin.varlen import varlenEncode, varlenDecode
265 import bitcoin.txn
266 from merklemaker import assembleBlock
267
268 RBFs = []
269 def blockSubmissionThread(payload, blkhash, share):
270         myblock = (blkhash, payload[4:36])
271         payload = b2a_hex(payload).decode('ascii')
272         nexterr = 0
273         gmperr = None
274         while True:
275                 try:
276                         rv = UpstreamBitcoindJSONRPC.submitblock(payload)
277                         break
278                 except BaseException as gbterr:
279                         try:
280                                 rv = UpstreamBitcoindJSONRPC.getmemorypool(payload)
281                                 if rv is True:
282                                         rv = None
283                                 elif rv is False:
284                                         rv = 'rejected'
285                                 break
286                         except BaseException as e2:
287                                 gmperr = e2
288                         now = time()
289                         if now > nexterr:
290                                 # FIXME: This will show "Method not found" on pre-BIP22 servers
291                                 RaiseRedFlags(traceback.format_exc())
292                                 nexterr = now + 5
293                         if MM.currentBlock[0] not in myblock:
294                                 RBFs.append( (('next block', MM.currentBlock, now, (gbterr, gmperr)), payload, blkhash, share) )
295                                 RaiseRedFlags('Giving up on submitting block upstream')
296                                 return
297         if rv:
298                 # FIXME: The returned value could be a list of multiple responses
299                 RBFs.append( (('upstream reject', rv, time()), payload, blkhash, share) )
300                 RaiseRedFlags('Upstream block submission failed: %s' % (rv,))
301
302 def checkData(share):
303         data = share['data']
304         data = data[:80]
305         (prevBlock, height, bits) = MM.currentBlock
306         sharePrevBlock = data[4:36]
307         if sharePrevBlock != prevBlock:
308                 if sharePrevBlock == MM.lastBlock[0]:
309                         raise RejectedShare('stale-prevblk')
310                 raise RejectedShare('bad-prevblk')
311         
312         if data[72:76] != bits:
313                 raise RejectedShare('bad-diffbits')
314         
315         # Note that we should accept miners reducing version to 1 if they don't understand 2 yet
316         # FIXME: When the supermajority is upgraded to version 2, stop accepting 1!
317         if data[1:4] != b'\0\0\0' or data[0] > 2:
318                 raise RejectedShare('bad-version')
319
320 def buildStratumData(share, merkleroot):
321         (prevBlock, height, bits) = MM.currentBlock
322         
323         data = b'\x02\0\0\0'
324         data += prevBlock
325         data += merkleroot
326         data += share['ntime'][::-1]
327         data += bits
328         data += share['nonce'][::-1]
329         
330         share['data'] = data
331         return data
332
333 def checkShare(share):
334         shareTime = share['time'] = time()
335         
336         username = share['username']
337         if 'data' in share:
338                 # getwork/GBT
339                 checkData(share)
340                 data = share['data']
341                 
342                 if username not in workLog:
343                         raise RejectedShare('unknown-user')
344                 MWL = workLog[username]
345                 
346                 shareMerkleRoot = data[36:68]
347                 if 'blkdata' in share:
348                         pl = share['blkdata']
349                         (txncount, pl) = varlenDecode(pl)
350                         cbtxn = bitcoin.txn.Txn(pl)
351                         othertxndata = cbtxn.disassemble(retExtra=True)
352                         coinbase = cbtxn.getCoinbase()
353                         wliPos = coinbase[0] + 2
354                         wliLen = coinbase[wliPos - 1]
355                         wli = coinbase[wliPos:wliPos+wliLen]
356                         mode = 'MC'
357                         moden = 1
358                 else:
359                         wli = shareMerkleRoot
360                         mode = 'MRD'
361                         moden = 0
362                         coinbase = None
363         else:
364                 # Stratum
365                 MWL = workLog[None]
366                 wli = share['jobid']
367                 buildStratumData(share, b'\0' * 32)
368                 mode = 'MC'
369                 moden = 1
370                 othertxndata = b''
371         
372         if wli not in MWL:
373                 raise RejectedShare('unknown-work')
374         (wld, issueT) = MWL[wli]
375         share[mode] = wld
376         
377         share['issuetime'] = issueT
378         
379         (workMerkleTree, workCoinbase) = wld[1:3]
380         if 'jobid' in share:
381                 cbtxn = deepcopy(workMerkleTree.data[0])
382                 coinbase = workCoinbase + share['extranonce1'] + share['extranonce2']
383                 cbtxn.setCoinbase(coinbase)
384                 cbtxn.assemble()
385                 data = buildStratumData(share, workMerkleTree.withFirst(cbtxn))
386                 shareMerkleRoot = data[36:68]
387         
388         if data in DupeShareHACK:
389                 raise RejectedShare('duplicate')
390         DupeShareHACK[data] = None
391         
392         blkhash = dblsha(data)
393         if blkhash[28:] != b'\0\0\0\0':
394                 raise RejectedShare('H-not-zero')
395         blkhashn = hash2int(blkhash)
396         
397         global networkTarget
398         logfunc = getattr(checkShare.logger, 'info' if blkhashn <= networkTarget else 'debug')
399         logfunc('BLKHASH: %64x' % (blkhashn,))
400         logfunc(' TARGET: %64x' % (networkTarget,))
401         
402         workTarget = wld[6] if len(wld) > 6 else None
403         
404         # NOTE: this isn't actually needed for MC mode, but we're abusing it for a trivial share check...
405         txlist = workMerkleTree.data
406         txlist = [deepcopy(txlist[0]),] + txlist[1:]
407         cbtxn = txlist[0]
408         cbtxn.setCoinbase(coinbase or workCoinbase)
409         cbtxn.assemble()
410         
411         if blkhashn <= networkTarget:
412                 logfunc("Submitting upstream")
413                 RBDs.append( deepcopy( (data, txlist, share.get('blkdata', None), workMerkleTree, share, wld) ) )
414                 if not moden:
415                         payload = assembleBlock(data, txlist)
416                 else:
417                         payload = share['data']
418                         if len(othertxndata):
419                                 payload += share['blkdata']
420                         else:
421                                 payload += assembleBlock(data, txlist)[80:]
422                 logfunc('Real block payload: %s' % (b2a_hex(payload).decode('utf8'),))
423                 RBPs.append(payload)
424                 threading.Thread(target=blockSubmissionThread, args=(payload, blkhash, share)).start()
425                 bcnode.submitBlock(payload)
426                 share['upstreamResult'] = True
427                 MM.updateBlock(blkhash)
428         
429         # Gotwork hack...
430         if gotwork and blkhashn <= config.GotWorkTarget:
431                 try:
432                         coinbaseMrkl = cbtxn.data
433                         coinbaseMrkl += blkhash
434                         steps = workMerkleTree._steps
435                         coinbaseMrkl += pack('B', len(steps))
436                         for step in steps:
437                                 coinbaseMrkl += step
438                         coinbaseMrkl += b"\0\0\0\0"
439                         info = {}
440                         info['hash'] = b2a_hex(blkhash).decode('ascii')
441                         info['header'] = b2a_hex(data).decode('ascii')
442                         info['coinbaseMrkl'] = b2a_hex(coinbaseMrkl).decode('ascii')
443                         thr = threading.Thread(target=submitGotwork, args=(info,))
444                         thr.daemon = True
445                         thr.start()
446                 except:
447                         checkShare.logger.warning('Failed to build gotwork request')
448         
449         if workTarget is None:
450                 workTarget = config.ShareTarget
451         if blkhashn > workTarget:
452                 raise RejectedShare('high-hash')
453         share['target'] = workTarget
454         share['_targethex'] = '%064x' % (workTarget,)
455         
456         shareTimestamp = unpack('<L', data[68:72])[0]
457         if shareTime < issueT - 120:
458                 raise RejectedShare('stale-work')
459         if shareTimestamp < shareTime - 300:
460                 raise RejectedShare('time-too-old')
461         if shareTimestamp > shareTime + 7200:
462                 raise RejectedShare('time-too-new')
463         
464         if config.DynamicTargetting and username in userStatus:
465                 # NOTE: userStatus[username] only doesn't exist across restarts
466                 status = userStatus[username]
467                 target = status[0] or config.ShareTarget
468                 if target == workTarget:
469                         userStatus[username][2] += 1
470                 else:
471                         userStatus[username][2] += float(target) / workTarget
472         
473         if moden:
474                 cbpre = workCoinbase
475                 cbpreLen = len(cbpre)
476                 if coinbase[:cbpreLen] != cbpre:
477                         raise RejectedShare('bad-cb-prefix')
478                 
479                 # Filter out known "I support" flags, to prevent exploits
480                 for ff in (b'/P2SH/', b'NOP2SH', b'p2sh/CHV', b'p2sh/NOCHV'):
481                         if coinbase.find(ff) > max(-1, cbpreLen - len(ff)):
482                                 raise RejectedShare('bad-cb-flag')
483                 
484                 if len(coinbase) > 100:
485                         raise RejectedShare('bad-cb-length')
486                 
487                 if shareMerkleRoot != workMerkleTree.withFirst(cbtxn):
488                         raise RejectedShare('bad-txnmrklroot')
489                 
490                 if len(othertxndata):
491                         allowed = assembleBlock(data, txlist)[80:]
492                         if allowed != share['blkdata']:
493                                 raise RejectedShare('bad-txns')
494 checkShare.logger = logging.getLogger('checkShare')
495
496 def receiveShare(share):
497         # TODO: username => userid
498         try:
499                 checkShare(share)
500         except RejectedShare as rej:
501                 share['rejectReason'] = str(rej)
502                 raise
503         finally:
504                 if '_origdata' in share:
505                         share['solution'] = share['_origdata']
506                 else:
507                         share['solution'] = b2a_hex(swap32(share['data'])).decode('utf8')
508                 for i in loggersShare:
509                         i.logShare(share)
510
511 def newBlockNotification():
512         logging.getLogger('newBlockNotification').info('Received new block notification')
513         MM.updateMerkleTree()
514         # TODO: Force RESPOND TO LONGPOLLS?
515         pass
516
517 def newBlockNotificationSIGNAL(signum, frame):
518         # Use a new thread, in case the signal handler is called with locks held
519         thr = threading.Thread(target=newBlockNotification, name='newBlockNotification via signal %s' % (signum,))
520         thr.daemon = True
521         thr.start()
522
523 from signal import signal, SIGUSR1
524 signal(SIGUSR1, newBlockNotificationSIGNAL)
525
526
527 import os
528 import os.path
529 import pickle
530 import signal
531 import sys
532 from time import sleep
533 import traceback
534
535 SAVE_STATE_FILENAME = 'eloipool.worklog'
536
537 def stopServers():
538         logger = logging.getLogger('stopServers')
539         
540         if hasattr(stopServers, 'already'):
541                 logger.debug('Already tried to stop servers before')
542                 return
543         stopServers.already = True
544         
545         logger.info('Stopping servers...')
546         global bcnode, server
547         servers = (bcnode, server, stratumsrv)
548         for s in servers:
549                 s.keepgoing = False
550         for s in servers:
551                 try:
552                         s.wakeup()
553                 except:
554                         logger.error('Failed to stop server %s\n%s' % (s, traceback.format_exc()))
555         i = 0
556         while True:
557                 sl = []
558                 for s in servers:
559                         if s.running:
560                                 sl.append(s.__class__.__name__)
561                 if not sl:
562                         break
563                 i += 1
564                 if i >= 0x100:
565                         logger.error('Servers taking too long to stop (%s), giving up' % (', '.join(sl)))
566                         break
567                 sleep(0.01)
568         
569         for s in servers:
570                 for fd in s._fd.keys():
571                         os.close(fd)
572
573 def stopLoggers():
574         for i in loggersShare:
575                 if hasattr(i, 'stop'):
576                         i.stop()
577
578 def saveState(t = None):
579         logger = logging.getLogger('saveState')
580         
581         # Then, save data needed to resume work
582         logger.info('Saving work state to \'%s\'...' % (SAVE_STATE_FILENAME,))
583         i = 0
584         while True:
585                 try:
586                         with open(SAVE_STATE_FILENAME, 'wb') as f:
587                                 pickle.dump(t, f)
588                                 pickle.dump(DupeShareHACK, f)
589                                 pickle.dump(workLog, f)
590                         break
591                 except:
592                         i += 1
593                         if i >= 0x10000:
594                                 logger.error('Failed to save work\n' + traceback.format_exc())
595                                 try:
596                                         os.unlink(SAVE_STATE_FILENAME)
597                                 except:
598                                         logger.error(('Failed to unlink \'%s\'; resume may have trouble\n' % (SAVE_STATE_FILENAME,)) + traceback.format_exc())
599
600 def exit():
601         t = time()
602         stopServers()
603         stopLoggers()
604         saveState(t)
605         logging.getLogger('exit').info('Goodbye...')
606         os.kill(os.getpid(), signal.SIGTERM)
607         sys.exit(0)
608
609 def restart():
610         t = time()
611         stopServers()
612         stopLoggers()
613         saveState(t)
614         logging.getLogger('restart').info('Restarting...')
615         try:
616                 os.execv(sys.argv[0], sys.argv)
617         except:
618                 logging.getLogger('restart').error('Failed to exec\n' + traceback.format_exc())
619
620 def restoreState():
621         if not os.path.exists(SAVE_STATE_FILENAME):
622                 return
623         
624         global workLog, DupeShareHACK
625         
626         logger = logging.getLogger('restoreState')
627         s = os.stat(SAVE_STATE_FILENAME)
628         logger.info('Restoring saved state from \'%s\' (%d bytes)' % (SAVE_STATE_FILENAME, s.st_size))
629         try:
630                 with open(SAVE_STATE_FILENAME, 'rb') as f:
631                         t = pickle.load(f)
632                         if type(t) == tuple:
633                                 if len(t) > 2:
634                                         # Future formats, not supported here
635                                         ver = t[3]
636                                         # TODO
637                                 
638                                 # Old format, from 2012-02-02 to 2012-02-03
639                                 workLog = t[0]
640                                 DupeShareHACK = t[1]
641                                 t = None
642                         else:
643                                 if isinstance(t, dict):
644                                         # Old format, from 2012-02-03 to 2012-02-03
645                                         DupeShareHACK = t
646                                         t = None
647                                 else:
648                                         # Current format, from 2012-02-03 onward
649                                         DupeShareHACK = pickle.load(f)
650                                 
651                                 if t + 120 >= time():
652                                         workLog = pickle.load(f)
653                                 else:
654                                         logger.debug('Skipping restore of expired workLog')
655         except:
656                 logger.error('Failed to restore state\n' + traceback.format_exc())
657                 return
658         logger.info('State restored successfully')
659         if t:
660                 logger.info('Total downtime: %g seconds' % (time() - t,))
661
662
663 from jsonrpcserver import JSONRPCListener, JSONRPCServer
664 import interactivemode
665 from networkserver import NetworkListener
666 import threading
667 import sharelogging
668 from stratumserver import StratumServer
669 import imp
670
671 if __name__ == "__main__":
672         if not hasattr(config, 'ShareLogging'):
673                 config.ShareLogging = ()
674         if hasattr(config, 'DbOptions'):
675                 logging.getLogger('backwardCompatibility').warn('DbOptions configuration variable is deprecated; upgrade to ShareLogging var before 2013-03-05')
676                 config.ShareLogging = list(config.ShareLogging)
677                 config.ShareLogging.append( {
678                         'type': 'sql',
679                         'engine': 'postgres',
680                         'dbopts': config.DbOptions,
681                         'statement': "insert into shares (rem_host, username, our_result, upstream_result, reason, solution) values ({Q(remoteHost)}, {username}, {YN(not(rejectReason))}, {YN(upstreamResult)}, {rejectReason}, decode({solution}, 'hex'))",
682                 } )
683         for i in config.ShareLogging:
684                 if not hasattr(i, 'keys'):
685                         name, parameters = i
686                         logging.getLogger('backwardCompatibility').warn('Using short-term backward compatibility for ShareLogging[\'%s\']; be sure to update config before 2012-04-04' % (name,))
687                         if name == 'postgres':
688                                 name = 'sql'
689                                 i = {
690                                         'engine': 'postgres',
691                                         'dbopts': parameters,
692                                 }
693                         elif name == 'logfile':
694                                 i = {}
695                                 i['thropts'] = parameters
696                                 if 'filename' in parameters:
697                                         i['filename'] = parameters['filename']
698                                         i['thropts'] = dict(i['thropts'])
699                                         del i['thropts']['filename']
700                         else:
701                                 i = parameters
702                         i['type'] = name
703                 
704                 name = i['type']
705                 parameters = i
706                 try:
707                         fp, pathname, description = imp.find_module(name, sharelogging.__path__)
708                         m = imp.load_module(name, fp, pathname, description)
709                         lo = getattr(m, name)(**parameters)
710                         loggersShare.append(lo)
711                 except:
712                         logging.getLogger('sharelogging').error("Error setting up share logger %s: %s", name,  sys.exc_info())
713
714         LSbc = []
715         if not hasattr(config, 'BitcoinNodeAddresses'):
716                 config.BitcoinNodeAddresses = ()
717         for a in config.BitcoinNodeAddresses:
718                 LSbc.append(NetworkListener(bcnode, a))
719         
720         if hasattr(config, 'UpstreamBitcoindNode') and config.UpstreamBitcoindNode:
721                 BitcoinLink(bcnode, dest=config.UpstreamBitcoindNode)
722         
723         import jsonrpc_getblocktemplate
724         import jsonrpc_getwork
725         import jsonrpc_setworkaux
726         
727         server = JSONRPCServer()
728         server.tls = threading.local()
729         server.tls.wantClear = False
730         if hasattr(config, 'JSONRPCAddress'):
731                 logging.getLogger('backwardCompatibility').warn('JSONRPCAddress configuration variable is deprecated; upgrade to JSONRPCAddresses list before 2013-03-05')
732                 if not hasattr(config, 'JSONRPCAddresses'):
733                         config.JSONRPCAddresses = []
734                 config.JSONRPCAddresses.insert(0, config.JSONRPCAddress)
735         LS = []
736         for a in config.JSONRPCAddresses:
737                 LS.append(JSONRPCListener(server, a))
738         if hasattr(config, 'SecretUser'):
739                 server.SecretUser = config.SecretUser
740         server.aux = MM.CoinbaseAux
741         server.getBlockHeader = getBlockHeader
742         server.getBlockTemplate = getBlockTemplate
743         server.receiveShare = receiveShare
744         server.RaiseRedFlags = RaiseRedFlags
745         server.ShareTarget = config.ShareTarget
746         
747         if hasattr(config, 'TrustedForwarders'):
748                 server.TrustedForwarders = config.TrustedForwarders
749         server.ServerName = config.ServerName
750         
751         stratumsrv = StratumServer()
752         stratumsrv.getStratumJob = getStratumJob
753         stratumsrv.receiveShare = receiveShare
754         if not hasattr(config, 'StratumAddresses'):
755                 config.StratumAddresses = ()
756         for a in config.StratumAddresses:
757                 NetworkListener(stratumsrv, a)
758         
759         MM.start()
760         
761         restoreState()
762         
763         prune_thr = threading.Thread(target=WorkLogPruner, args=(workLog,))
764         prune_thr.daemon = True
765         prune_thr.start()
766         
767         bcnode_thr = threading.Thread(target=bcnode.serve_forever)
768         bcnode_thr.daemon = True
769         bcnode_thr.start()
770         
771         stratum_thr = threading.Thread(target=stratumsrv.serve_forever)
772         stratum_thr.daemon = True
773         stratum_thr.start()
774         
775         server.serve_forever()