Hello all,
Need help with configuration for Paccoin , I have posted issue here
https://github.com/bitcoin-abe/bitcoin-abe/issues/234Pulled latest version from git (master) and configured it for Paccoin (PAC).
abe.conf
dbtype MySQLdb
connect-args {"user":"abe","db":"abe","passwd":"pass"}
port 2750
host 0.0.0.0
upgrade
datadir += [{"dirname": "/root/.paccoin","chain": "Paccoin","code3": "PAC","address_version": "18"}]
address-history-rows-max 100000
default-loader = blkfile
Datastore.py
CONFIG_DEFAULTS = {
"dbtype": None,
"connect_args": None,
"binary_type": None,
"int_type": None,
"upgrade": None,
"rescan": None,
"commit_bytes": None,
"log_sql": None,
"log_rpc": None,
"default_chain": "Paccoin",
"datadir": None,
"ignore_bit8_chains": None,
"use_firstbits": False,
"keep_scriptsig": True,
"import_tx": [],
"default_loader": "default",
"rpc_load_mempool": False,
}
WORK_BITS = 304 # XXX more than necessary.
CHAIN_CONFIG = [{"chain":"Paccoin"}]
Chain/Paccoin.py
from .Sha256Chain import Sha256Chain
class Paccoin(Sha256Chain):
def __init__(chain, **kwargs):
chain.name = 'Paccoin'
chain.dirname = '/root/.paccoin/'
chain.code3 = 'PAC'
# MultiChain handshake is randomly created, so use Bitcoin compatible network settings as the default.
chain.address_version = '\x18' # dec 24 = hex 18 -- FROM paccoin/src/base58.h dec value PUBKEY_ADDRESS
chain.script_addr_vers = '\x1e' # dec 30 = hex 1e.
chain.magic = '\xe4\xe8\xe9\xe5' # \xe4\xe8\xe9\xe5 -- FROM paccoin/src/main.cpp value char pchMessageStart
#chain.address_checksum = '\x00\x00\x00\x00'
Sha256Chain.__init__(chain, **kwargs)
datadir_conf_file_name = "paccoin.conf"
After start got this error:
Opened /root/.paccoin/blk0001.dat
Exception at 146191074854428494
Failed to catch up {'blkfile_offset': 515780, 'blkfile_number': 1, 'chain_id': 1, 'loader': u'blkfile', 'conf': u'paccoin.conf', 'dirname': '/root/.paccoin/', 'id': Decimal('5')}
Traceback (most recent call last):
File "Abe/DataStore.py", line 2535, in catch_up
store.catch_up_dir(dircfg)
File "Abe/DataStore.py", line 2821, in catch_up_dir
store.import_blkdat(dircfg, ds, blkfile['name'])
File "Abe/DataStore.py", line 2943, in import_blkdat
b = chain.ds_parse_block(ds)
File "Abe/Chain/__init__.py", line 82, in ds_parse_block
d['transactions'].append(chain.ds_parse_transaction(ds))
File "Abe/Chain/__init__.py", line 75, in ds_parse_transaction
return deserialize.parse_Transaction(ds)
File "Abe/deserialize.py", line 91, in parse_Transaction
d['txIn'].append(parse_TxIn(vds))
File "Abe/deserialize.py", line 46, in parse_TxIn
d['sequence'] = vds.read_uint32()
File "Abe/BCDataStream.py", line 71, in read_uint32
def read_uint32 (self): return self._read_num(' File "Abe/BCDataStream.py", line 110, in _read_num
(i,) = struct.unpack_from(format, self.input, self.read_cursor)
error: unpack_from requires a buffer of at least 4 bytes
After few hours of searching for solution, I tried edit next files.
in deserialize.py :
d['nTime'] = vds.read_uint32()
# if has_nTime:
# d['nTime'] = vds.read_uint32()
got error:
Opened /root/.paccoin/blk0001.dat
Exception at 515981
Failed to catch up {'blkfile_offset': 515780, 'blkfile_number': 1, 'chain_id': 1, 'loader': u'blkfile', 'conf': u'paccoin.conf', 'dirname': '/root/.paccoin/', 'id': Decimal('5')}
Traceback (most recent call last):
File "Abe/DataStore.py", line 2535, in catch_up
store.catch_up_dir(dircfg)
File "Abe/DataStore.py", line 2821, in catch_up_dir
store.import_blkdat(dircfg, ds, blkfile['name'])
File "Abe/DataStore.py", line 2953, in import_blkdat
store.import_block(b, chain = chain)
File "Abe/DataStore.py", line 1069, in import_block
raise MerkleRootMismatch(b['hash'], tx_hash_array)
MerkleRootMismatch: Block header Merkle root does not match its transactions. block hash=00000000001b6bd7774c118eb7e14669d6e2099e1ca7d8b135031e8d091bb363
next recommendation was edit
in Datastore.py :
#if chain is not None:
# Verify Merkle root.
#if b['hashMerkleRoot'] != chain.merkle_root(tx_hash_array):
# raise MerkleRootMismatch(b['hash'], tx_hash_array)
I got next error:
Exception at 9671054941915873462
Failed to catch up {'blkfile_offset': 515780, 'blkfile_number': 1, 'chain_id': 1, 'loader': u'blkfile', 'conf': u'paccoin.conf', 'dirname': '/root/.paccoin/', 'id': Decimal('5')}
Traceback (most recent call last):
File "Abe/DataStore.py", line 2535, in catch_up
store.catch_up_dir(dircfg)
File "Abe/DataStore.py", line 2821, in catch_up_dir
store.import_blkdat(dircfg, ds, blkfile['name'])
File "Abe/DataStore.py", line 2943, in import_blkdat
b = chain.ds_parse_block(ds)
File "Abe/Chain/__init__.py", line 82, in ds_parse_block
d['transactions'].append(chain.ds_parse_transaction(ds))
File "Abe/Chain/__init__.py", line 75, in ds_parse_transaction
return deserialize.parse_Transaction(ds)
File "Abe/deserialize.py", line 91, in parse_Transaction
d['txIn'].append(parse_TxIn(vds))
File "Abe/deserialize.py", line 46, in parse_TxIn
d['sequence'] = vds.read_uint32()
File "Abe/BCDataStream.py", line 71, in read_uint32
def read_uint32 (self): return self._read_num(' File "Abe/BCDataStream.py", line 110, in _read_num
(i,) = struct.unpack_from(format, self.input, self.read_cursor)
OverflowError: Python int too large to convert to C long
Here is my maxsize:
Python 2.7.6 (default, Oct 26 2016, 20:30:19)
[GCC 4.8.4] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import sys
>>> sys.maxsize
9223372036854775807
>>>
I'm stuck at this point, looking for advice.