Yes you can. I guess the question is why would you want to?
Abe is (was) a nice project, but there are many better explorers out there at the moment.
Kind of like tenebrix, was interesting back in 2011 now.......
-Dave
It was the Bitcointalk forum that inspired us to create Bitcointalksearch.org - Bitcointalk is an excellent site that should be the default page for anybody dealing in cryptocurrency, since it is a virtual gold-mine of data. However, our experience and user feedback led us create our site; Bitcointalk's search is slow, and difficult to get the results you need, because you need to log in first to find anything useful - furthermore, there are rate limiters for their search functionality.
The aim of our project is to create a faster website that yields more results and faster without having to create an account and eliminate the need to log in - your personal data, therefore, will never be in jeopardy since we are not asking for any of your data and you don't need to provide them to use our site with all of its capabilities.
We created this website with the sole purpose of users being able to search quickly and efficiently in the field of cryptocurrency so they will have access to the latest and most accurate information and thereby assisting the crypto-community at large.
dbtype MySQLdb
connect-args {"user":"abe1","db":"abe"}
upgrade
port 2750
block 4175 already in chain 1
block 4176 already in chain 1
Exception at 969746
Failed to catch up {'blkfile_offset': 968753, 'blkfile_number': 100000, 'chain_id': 1, 'loader': u'blkfile', 'conf': None, 'dirname': 'G:/Users/****/Downloads/Bitcoin Database/Bitcoin', 'id': Decimal('1')}
Traceback (most recent call last):
File "Abe\DataStore.py", line 2557, in catch_up
store.catch_up_dir(dircfg)
File "Abe\DataStore.py", line 2855, in catch_up_dir
store.import_blkdat(dircfg, ds, blkfile['name'])
File "Abe\DataStore.py", line 2977, in import_blkdat
b = chain.ds_parse_block(ds)
File "Abe\Chain\__init__.py", line 81, in ds_parse_block
for i in xrange(nTransactions):
OverflowError: Python int too large to convert to C long
# Some databases have trouble with the large integers that Abe uses
# for statistics. Setting int-type=str causes Abe to pass certain
# integers to the database as strings and cast them to floating point
# in SQL expressions that calculate statistics. Try this if SQLite
# complains "OverflowError: long too big to convert".
int-type=str
int-type str
dbtype MySQLdb
connect-args {"user":"abe","db":"abe","passwd":"pass"}
port 2750
host 0.0.0.0
upgrade
datadir += [{"dirname": "/root/.paccoin","chain": "Paccoin","code3": "PAC","address_version": "18"}]
address-history-rows-max 100000
default-loader = blkfile
CONFIG_DEFAULTS = {
"dbtype": None,
"connect_args": None,
"binary_type": None,
"int_type": None,
"upgrade": None,
"rescan": None,
"commit_bytes": None,
"log_sql": None,
"log_rpc": None,
"default_chain": "Paccoin",
"datadir": None,
"ignore_bit8_chains": None,
"use_firstbits": False,
"keep_scriptsig": True,
"import_tx": [],
"default_loader": "default",
"rpc_load_mempool": False,
}
WORK_BITS = 304 # XXX more than necessary.
CHAIN_CONFIG = [{"chain":"Paccoin"}]
from .Sha256Chain import Sha256Chain
class Paccoin(Sha256Chain):
def __init__(chain, **kwargs):
chain.name = 'Paccoin'
chain.dirname = '/root/.paccoin/'
chain.code3 = 'PAC'
# MultiChain handshake is randomly created, so use Bitcoin compatible network settings as the default.
chain.address_version = '\x18' # dec 24 = hex 18 -- FROM paccoin/src/base58.h dec value PUBKEY_ADDRESS
chain.script_addr_vers = '\x1e' # dec 30 = hex 1e.
chain.magic = '\xe4\xe8\xe9\xe5' # \xe4\xe8\xe9\xe5 -- FROM paccoin/src/main.cpp value char pchMessageStart
#chain.address_checksum = '\x00\x00\x00\x00'
Sha256Chain.__init__(chain, **kwargs)
datadir_conf_file_name = "paccoin.conf"
Opened /root/.paccoin/blk0001.dat
Exception at 146191074854428494
Failed to catch up {'blkfile_offset': 515780, 'blkfile_number': 1, 'chain_id': 1, 'loader': u'blkfile', 'conf': u'paccoin.conf', 'dirname': '/root/.paccoin/', 'id': Decimal('5')}
Traceback (most recent call last):
File "Abe/DataStore.py", line 2535, in catch_up
store.catch_up_dir(dircfg)
File "Abe/DataStore.py", line 2821, in catch_up_dir
store.import_blkdat(dircfg, ds, blkfile['name'])
File "Abe/DataStore.py", line 2943, in import_blkdat
b = chain.ds_parse_block(ds)
File "Abe/Chain/__init__.py", line 82, in ds_parse_block
d['transactions'].append(chain.ds_parse_transaction(ds))
File "Abe/Chain/__init__.py", line 75, in ds_parse_transaction
return deserialize.parse_Transaction(ds)
File "Abe/deserialize.py", line 91, in parse_Transaction
d['txIn'].append(parse_TxIn(vds))
File "Abe/deserialize.py", line 46, in parse_TxIn
d['sequence'] = vds.read_uint32()
File "Abe/BCDataStream.py", line 71, in read_uint32
def read_uint32 (self): return self._read_num(' File "Abe/BCDataStream.py", line 110, in _read_num
(i,) = struct.unpack_from(format, self.input, self.read_cursor)
error: unpack_from requires a buffer of at least 4 bytes
d['nTime'] = vds.read_uint32()
# if has_nTime:
# d['nTime'] = vds.read_uint32()
Opened /root/.paccoin/blk0001.dat
Exception at 515981
Failed to catch up {'blkfile_offset': 515780, 'blkfile_number': 1, 'chain_id': 1, 'loader': u'blkfile', 'conf': u'paccoin.conf', 'dirname': '/root/.paccoin/', 'id': Decimal('5')}
Traceback (most recent call last):
File "Abe/DataStore.py", line 2535, in catch_up
store.catch_up_dir(dircfg)
File "Abe/DataStore.py", line 2821, in catch_up_dir
store.import_blkdat(dircfg, ds, blkfile['name'])
File "Abe/DataStore.py", line 2953, in import_blkdat
store.import_block(b, chain = chain)
File "Abe/DataStore.py", line 1069, in import_block
raise MerkleRootMismatch(b['hash'], tx_hash_array)
MerkleRootMismatch: Block header Merkle root does not match its transactions. block hash=00000000001b6bd7774c118eb7e14669d6e2099e1ca7d8b135031e8d091bb363
#if chain is not None:
# Verify Merkle root.
#if b['hashMerkleRoot'] != chain.merkle_root(tx_hash_array):
# raise MerkleRootMismatch(b['hash'], tx_hash_array)
Exception at 9671054941915873462
Failed to catch up {'blkfile_offset': 515780, 'blkfile_number': 1, 'chain_id': 1, 'loader': u'blkfile', 'conf': u'paccoin.conf', 'dirname': '/root/.paccoin/', 'id': Decimal('5')}
Traceback (most recent call last):
File "Abe/DataStore.py", line 2535, in catch_up
store.catch_up_dir(dircfg)
File "Abe/DataStore.py", line 2821, in catch_up_dir
store.import_blkdat(dircfg, ds, blkfile['name'])
File "Abe/DataStore.py", line 2943, in import_blkdat
b = chain.ds_parse_block(ds)
File "Abe/Chain/__init__.py", line 82, in ds_parse_block
d['transactions'].append(chain.ds_parse_transaction(ds))
File "Abe/Chain/__init__.py", line 75, in ds_parse_transaction
return deserialize.parse_Transaction(ds)
File "Abe/deserialize.py", line 91, in parse_Transaction
d['txIn'].append(parse_TxIn(vds))
File "Abe/deserialize.py", line 46, in parse_TxIn
d['sequence'] = vds.read_uint32()
File "Abe/BCDataStream.py", line 71, in read_uint32
def read_uint32 (self): return self._read_num(' File "Abe/BCDataStream.py", line 110, in _read_num
(i,) = struct.unpack_from(format, self.input, self.read_cursor)
OverflowError: Python int too large to convert to C long
Python 2.7.6 (default, Oct 26 2016, 20:30:19)
[GCC 4.8.4] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import sys
>>> sys.maxsize
9223372036854775807
>>>
# Specify port and/or host to serve HTTP instead of FastCGI:
port 2750
host localhost
# Specify port and/or host to serve HTTP instead of FastCGI:
port 2750
host localhost