commit
a300f2e959
@ -0,0 +1,102 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# JetBrains IDE data
|
||||
.idea
|
||||
.idea/*
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask instance folder
|
||||
instance/
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# IPython Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# OS specific and other
|
||||
.DS_Store
|
||||
docs/.DS_Store
|
||||
.keep
|
||||
|
||||
# ignore everything in ignore/
|
||||
ignore/
|
||||
|
||||
# SQLite databases
|
||||
database/*.db
|
||||
@ -0,0 +1,16 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
- "3.5"
|
||||
- "3.6"
|
||||
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
- ./share/travis_setup.sh
|
||||
|
||||
script:
|
||||
# run unit tests
|
||||
- py.test -svv test/unit/
|
||||
|
||||
# style guide check
|
||||
- find ./lib ./test ./bin -name \*.py -exec pycodestyle --show-source --ignore=E501,E402,E722,E129,W503,W504 {} +
|
||||
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 The Dash Developers
|
||||
Copyright (c) 2018 The Sibcoin Developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@ -0,0 +1,107 @@
|
||||
# Sibcoin Sentinel
|
||||
|
||||
> An automated governance helper for Sibcoin Masternodes.
|
||||
|
||||
[](https://travis-ci.org/dashpay/sentinel)
|
||||
|
||||
Sentinel is an autonomous agent for persisting, processing and automating Sibcoin governance objects and tasks, and for expanded functions in the upcoming Sibcoin V17 release (Evolution).
|
||||
|
||||
Sentinel is implemented as a Python application that binds to a local sibcoind instance on each Sibcoin Masternode.
|
||||
|
||||
|
||||
## Table of Contents
|
||||
- [Install](#install)
|
||||
- [Dependencies](#dependencies)
|
||||
- [Usage](#usage)
|
||||
- [Configuration](#configuration)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
- [Maintainer](#maintainer)
|
||||
- [Contributing](#contributing)
|
||||
- [License](#license)
|
||||
|
||||
## Install
|
||||
|
||||
These instructions cover installing Sentinel on Ubuntu 16.04 / 18.04.
|
||||
|
||||
### Dependencies
|
||||
|
||||
Make sure Python version 2.7.x or above is installed:
|
||||
|
||||
python --version
|
||||
|
||||
Update system packages and ensure virtualenv is installed:
|
||||
|
||||
$ sudo apt-get update
|
||||
$ sudo apt-get -y install python-virtualenv
|
||||
|
||||
Make sure the local Sibcoin daemon running is at least version 16.4 (160400)
|
||||
|
||||
$ sibcoin-cli getinfo | grep version
|
||||
|
||||
### Install Sentinel
|
||||
|
||||
Clone the Sentinel repo and install Python dependencies.
|
||||
|
||||
$ git clone https://github.com/ivansib/sentinel.git && cd sentinel
|
||||
$ virtualenv ./venv
|
||||
$ ./venv/bin/pip install -r requirements.txt
|
||||
|
||||
## Usage
|
||||
|
||||
Sentinel is "used" as a script called from cron every minute.
|
||||
|
||||
### Set up Cron
|
||||
|
||||
Set up a crontab entry to call Sentinel every minute:
|
||||
|
||||
$ crontab -e
|
||||
|
||||
In the crontab editor, add the lines below, replacing '/path/to/sentinel' to the path where you cloned sentinel to:
|
||||
|
||||
* * * * * cd /path/to/sentinel && ./venv/bin/python bin/sentinel.py >/dev/null 2>&1
|
||||
|
||||
### Test Configuration
|
||||
|
||||
Test the config by running tests:
|
||||
|
||||
$ ./venv/bin/py.test ./test
|
||||
|
||||
With all tests passing and crontab setup, Sentinel will stay in sync with sibcoind and the installation is complete
|
||||
|
||||
## Configuration
|
||||
|
||||
An alternative (non-default) path to the `sibcoin.conf` file can be specified in `sentinel.conf`:
|
||||
|
||||
sibcoin_conf=/path/to/sibcoin.conf
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
To view debug output, set the `SENTINEL_DEBUG` environment variable to anything non-zero, then run the script manually:
|
||||
|
||||
$ SENTINEL_DEBUG=1 ./venv/bin/python bin/sentinel.py
|
||||
|
||||
## Maintainer
|
||||
|
||||
[@ivansib](https://github.com/ivansib)
|
||||
|
||||
## Contributing
|
||||
|
||||
Please follow the [Sibcoin Core guidelines for contributing](https://github.com/ivansib/sibcoin/blob/master/CONTRIBUTING.md).
|
||||
|
||||
Specifically:
|
||||
|
||||
* [Contributor Workflow](https://github.com/ivansib/sibcoin/blob/master/CONTRIBUTING.md#contributor-workflow)
|
||||
|
||||
To contribute a patch, the workflow is as follows:
|
||||
|
||||
* Fork repository
|
||||
* Create topic branch
|
||||
* Commit patches
|
||||
|
||||
In general commits should be atomic and diffs should be easy to read. For this reason do not mix any formatting fixes or code moves with actual code changes.
|
||||
|
||||
Commit messages should be verbose by default, consisting of a short subject line (50 chars max), a blank line and detailed explanatory text as separate paragraph(s); unless the title alone is self-explanatory (like "Corrected typo in main.cpp") then a single title line is sufficient. Commit messages should be helpful to people reading your code in the future, so explain the reasoning for your decisions. Further explanation [here](http://chris.beams.io/posts/git-commit/).
|
||||
|
||||
## License
|
||||
|
||||
Released under the MIT license, under the same terms as Sibcoin Core itself. See [LICENSE](LICENSE) for more info.
|
||||
@ -0,0 +1,64 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pdb
|
||||
from pprint import pprint
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib')))
|
||||
import config
|
||||
from models import Superblock, Proposal, GovernanceObject, Setting, Signal, Vote, Outcome
|
||||
from models import VoteSignals, VoteOutcomes
|
||||
from peewee import PeeweeException # , OperationalError, IntegrityError
|
||||
#from dashd import DashDaemon
|
||||
from sibcoind import SibcoinDaemon
|
||||
import dashlib
|
||||
from decimal import Decimal
|
||||
#dashd = DashDaemon.from_dash_conf(config.dash_conf)
|
||||
sibcoind = SibcoinDaemon.from_sibcoin_conf(config.sibcoin_conf)
|
||||
import misc
|
||||
# ==============================================================================
|
||||
# do stuff here
|
||||
|
||||
pr = Proposal(
|
||||
name='proposal7',
|
||||
url='https://dashcentral.com/proposal7',
|
||||
payment_address='yTC62huR4YQEPn9AJHjnQxxreHSbgAoatV',
|
||||
payment_amount=39.23,
|
||||
start_epoch=1483250400,
|
||||
end_epoch=1491022800,
|
||||
)
|
||||
|
||||
# sb = Superblock(
|
||||
# event_block_height = 62500,
|
||||
# payment_addresses = "yYe8KwyaUu5YswSYmB3q3ryx8XTUu9y7Ui|yTC62huR4YQEPn9AJHjnQxxreHSbgAoatV",
|
||||
# payment_amounts = "5|3"
|
||||
# )
|
||||
|
||||
|
||||
# TODO: make this a test, mock 'dashd' and tie a test block height to a
|
||||
# timestamp, ensure only unit testing a within_window method
|
||||
#
|
||||
# also, create the `within_window` or similar method & use that.
|
||||
#
|
||||
bh = 131112
|
||||
bh_epoch = sibcoind.block_height_to_epoch(bh)
|
||||
|
||||
fudge = 72000
|
||||
window_start = 1483689082 - fudge
|
||||
window_end = 1483753726 + fudge
|
||||
|
||||
print("Window start: %s" % misc.epoch2str(window_start))
|
||||
print("Window end: %s" % misc.epoch2str(window_end))
|
||||
print("\nbh_epoch: %s" % misc.epoch2str(bh_epoch))
|
||||
|
||||
|
||||
if (bh_epoch < window_start or bh_epoch > window_end):
|
||||
print("outside of window!")
|
||||
else:
|
||||
print("Within window, we're good!")
|
||||
|
||||
# pdb.set_trace()
|
||||
# dashd.get_object_list()
|
||||
# ==============================================================================
|
||||
# pdb.set_trace()
|
||||
1
|
||||
@ -0,0 +1,10 @@
|
||||
import binascii
|
||||
import sys
|
||||
|
||||
usage = "%s <hex>" % sys.argv[0]
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print(usage)
|
||||
else:
|
||||
json = binascii.unhexlify(sys.argv[1])
|
||||
print(json)
|
||||
@ -0,0 +1,21 @@
|
||||
import simplejson
|
||||
import binascii
|
||||
import sys
|
||||
import pdb
|
||||
from pprint import pprint
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib')))
|
||||
import dashlib
|
||||
# ============================================================================
|
||||
usage = "%s <hex>" % sys.argv[0]
|
||||
|
||||
obj = None
|
||||
if len(sys.argv) < 2:
|
||||
print(usage)
|
||||
sys.exit(1)
|
||||
else:
|
||||
obj = dashlib.deserialise(sys.argv[1])
|
||||
|
||||
pdb.set_trace()
|
||||
1
|
||||
@ -0,0 +1,211 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib')))
|
||||
import init
|
||||
import config
|
||||
import misc
|
||||
#from dashd import DashDaemon
|
||||
from sibcoind import SibcoinDaemon
|
||||
from models import Superblock, Proposal, GovernanceObject
|
||||
from models import VoteSignals, VoteOutcomes, Transient
|
||||
import socket
|
||||
from misc import printdbg
|
||||
import time
|
||||
from bitcoinrpc.authproxy import JSONRPCException
|
||||
import signal
|
||||
import atexit
|
||||
import random
|
||||
from scheduler import Scheduler
|
||||
import argparse
|
||||
|
||||
|
||||
# sync dashd gobject list with our local relational DB backend
|
||||
def perform_dashd_object_sync(dashd):
|
||||
GovernanceObject.sync(dashd)
|
||||
|
||||
|
||||
def prune_expired_proposals(dashd):
|
||||
# vote delete for old proposals
|
||||
for proposal in Proposal.expired(dashd.superblockcycle()):
|
||||
proposal.vote(dashd, VoteSignals.delete, VoteOutcomes.yes)
|
||||
|
||||
|
||||
# ping dashd
|
||||
def sentinel_ping(dashd):
|
||||
printdbg("in sentinel_ping")
|
||||
|
||||
dashd.ping()
|
||||
|
||||
printdbg("leaving sentinel_ping")
|
||||
|
||||
|
||||
def attempt_superblock_creation(dashd):
|
||||
import dashlib
|
||||
|
||||
if not dashd.is_masternode():
|
||||
print("We are not a Masternode... can't submit superblocks!")
|
||||
return
|
||||
|
||||
# query votes for this specific ebh... if we have voted for this specific
|
||||
# ebh, then it's voted on. since we track votes this is all done using joins
|
||||
# against the votes table
|
||||
#
|
||||
# has this masternode voted on *any* superblocks at the given event_block_height?
|
||||
# have we voted FUNDING=YES for a superblock for this specific event_block_height?
|
||||
|
||||
event_block_height = dashd.next_superblock_height()
|
||||
|
||||
if Superblock.is_voted_funding(event_block_height):
|
||||
# printdbg("ALREADY VOTED! 'til next time!")
|
||||
|
||||
# vote down any new SBs because we've already chosen a winner
|
||||
for sb in Superblock.at_height(event_block_height):
|
||||
if not sb.voted_on(signal=VoteSignals.funding):
|
||||
sb.vote(dashd, VoteSignals.funding, VoteOutcomes.no)
|
||||
|
||||
# now return, we're done
|
||||
return
|
||||
|
||||
if not dashd.is_govobj_maturity_phase():
|
||||
printdbg("Not in maturity phase yet -- will not attempt Superblock")
|
||||
return
|
||||
|
||||
proposals = Proposal.approved_and_ranked(proposal_quorum=dashd.governance_quorum(), next_superblock_max_budget=dashd.next_superblock_max_budget())
|
||||
budget_max = dashd.get_superblock_budget_allocation(event_block_height)
|
||||
sb_epoch_time = dashd.block_height_to_epoch(event_block_height)
|
||||
|
||||
sb = dashlib.create_superblock(proposals, event_block_height, budget_max, sb_epoch_time)
|
||||
if not sb:
|
||||
printdbg("No superblock created, sorry. Returning.")
|
||||
return
|
||||
|
||||
# find the deterministic SB w/highest object_hash in the DB
|
||||
dbrec = Superblock.find_highest_deterministic(sb.hex_hash())
|
||||
if dbrec:
|
||||
dbrec.vote(dashd, VoteSignals.funding, VoteOutcomes.yes)
|
||||
|
||||
# any other blocks which match the sb_hash are duplicates, delete them
|
||||
for sb in Superblock.select().where(Superblock.sb_hash == sb.hex_hash()):
|
||||
if not sb.voted_on(signal=VoteSignals.funding):
|
||||
sb.vote(dashd, VoteSignals.delete, VoteOutcomes.yes)
|
||||
|
||||
printdbg("VOTED FUNDING FOR SB! We're done here 'til next superblock cycle.")
|
||||
return
|
||||
else:
|
||||
printdbg("The correct superblock wasn't found on the network...")
|
||||
|
||||
# if we are the elected masternode...
|
||||
if (dashd.we_are_the_winner()):
|
||||
printdbg("we are the winner! Submit SB to network")
|
||||
sb.submit(dashd)
|
||||
|
||||
|
||||
def check_object_validity(dashd):
|
||||
# vote (in)valid objects
|
||||
for gov_class in [Proposal, Superblock]:
|
||||
for obj in gov_class.select():
|
||||
obj.vote_validity(dashd)
|
||||
|
||||
|
||||
def is_dashd_port_open(dashd):
|
||||
# test socket open before beginning, display instructive message to MN
|
||||
# operators if it's not
|
||||
port_open = False
|
||||
try:
|
||||
info = dashd.rpc_command('getgovernanceinfo')
|
||||
port_open = True
|
||||
except (socket.error, JSONRPCException) as e:
|
||||
print("%s" % e)
|
||||
|
||||
return port_open
|
||||
|
||||
|
||||
def main():
|
||||
dashd = SibcoinDaemon.from_sibcoin_conf(config.sibcoin_conf)
|
||||
|
||||
# check dashd connectivity
|
||||
if not is_dashd_port_open(dashd):
|
||||
print("Cannot connect to sibcoind. Please ensure sibcoind is running and the JSONRPC port is open to Sentinel.")
|
||||
return
|
||||
|
||||
# check dashd sync
|
||||
if not dashd.is_synced():
|
||||
print("sibcoind not synced with network! Awaiting full sync before running Sentinel.")
|
||||
return
|
||||
|
||||
# ensure valid masternode
|
||||
if not dashd.is_masternode():
|
||||
print("Invalid Masternode Status, cannot continue.")
|
||||
return
|
||||
|
||||
|
||||
if init.options.bypass:
|
||||
# bypassing scheduler, remove the scheduled event
|
||||
printdbg("--bypass-schedule option used, clearing schedule")
|
||||
Scheduler.clear_schedule()
|
||||
|
||||
if not Scheduler.is_run_time():
|
||||
printdbg("Not yet time for an object sync/vote, moving on.")
|
||||
return
|
||||
|
||||
if not init.options.bypass:
|
||||
# delay to account for cron minute sync
|
||||
Scheduler.delay()
|
||||
|
||||
# running now, so remove the scheduled event
|
||||
Scheduler.clear_schedule()
|
||||
|
||||
# ========================================================================
|
||||
# general flow:
|
||||
# ========================================================================
|
||||
#
|
||||
# load "gobject list" rpc command data, sync objects into internal database
|
||||
perform_dashd_object_sync(dashd)
|
||||
|
||||
if dashd.has_sentinel_ping:
|
||||
sentinel_ping(dashd)
|
||||
|
||||
# auto vote network objects as valid/invalid
|
||||
# check_object_validity(dashd)
|
||||
|
||||
# vote to delete expired proposals
|
||||
prune_expired_proposals(dashd)
|
||||
|
||||
# create a Superblock if necessary
|
||||
attempt_superblock_creation(dashd)
|
||||
|
||||
# schedule the next run
|
||||
Scheduler.schedule_next_run()
|
||||
|
||||
|
||||
def signal_handler(signum, frame):
|
||||
print("Got a signal [%d], cleaning up..." % (signum))
|
||||
Transient.delete('SENTINEL_RUNNING')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def cleanup():
|
||||
Transient.delete(mutex_key)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
atexit.register(cleanup)
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
|
||||
# ensure another instance of Sentinel is not currently running
|
||||
mutex_key = 'SENTINEL_RUNNING'
|
||||
# assume that all processes expire after 'timeout_seconds' seconds
|
||||
timeout_seconds = 90
|
||||
|
||||
is_running = Transient.get(mutex_key)
|
||||
if is_running:
|
||||
printdbg("An instance of Sentinel is already running -- aborting.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
Transient.set(mutex_key, misc.now(), timeout_seconds)
|
||||
|
||||
# locked to this instance -- perform main logic here
|
||||
main()
|
||||
|
||||
Transient.delete(mutex_key)
|
||||
@ -0,0 +1,124 @@
|
||||
'''
|
||||
Bitcoin base58 encoding and decoding.
|
||||
|
||||
Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain)
|
||||
'''
|
||||
import hashlib
|
||||
|
||||
|
||||
# for compatibility with following code...
|
||||
class SHA256(object):
|
||||
new = hashlib.sha256
|
||||
|
||||
|
||||
if str != bytes:
|
||||
# Python 3.x
|
||||
def ord(c):
|
||||
return c
|
||||
|
||||
def chr(n):
|
||||
return bytes((n,))
|
||||
|
||||
|
||||
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
__b58base = len(__b58chars)
|
||||
b58chars = __b58chars
|
||||
|
||||
|
||||
def b58encode(v):
|
||||
""" encode v, which is a string of bytes, to base58.
|
||||
"""
|
||||
long_value = 0
|
||||
for (i, c) in enumerate(v[::-1]):
|
||||
long_value += (256**i) * ord(c)
|
||||
|
||||
result = ''
|
||||
while long_value >= __b58base:
|
||||
div, mod = divmod(long_value, __b58base)
|
||||
result = __b58chars[mod] + result
|
||||
long_value = div
|
||||
result = __b58chars[long_value] + result
|
||||
|
||||
# Bitcoin does a little leading-zero-compression:
|
||||
# leading 0-bytes in the input become leading-1s
|
||||
nPad = 0
|
||||
for c in v:
|
||||
if c == '\0':
|
||||
nPad += 1
|
||||
else:
|
||||
break
|
||||
|
||||
return (__b58chars[0] * nPad) + result
|
||||
|
||||
|
||||
def b58decode(v, length=None):
|
||||
""" decode v into a string of len bytes
|
||||
"""
|
||||
long_value = 0
|
||||
for (i, c) in enumerate(v[::-1]):
|
||||
long_value += __b58chars.find(c) * (__b58base**i)
|
||||
|
||||
result = bytes()
|
||||
while long_value >= 256:
|
||||
div, mod = divmod(long_value, 256)
|
||||
result = chr(mod) + result
|
||||
long_value = div
|
||||
result = chr(long_value) + result
|
||||
|
||||
nPad = 0
|
||||
for c in v:
|
||||
if c == __b58chars[0]:
|
||||
nPad += 1
|
||||
else:
|
||||
break
|
||||
|
||||
result = chr(0) * nPad + result
|
||||
|
||||
if length is not None and len(result) != length:
|
||||
return None
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def checksum(v):
|
||||
"""Return 32-bit checksum based on SHA256"""
|
||||
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
|
||||
|
||||
|
||||
def b58encode_chk(v):
|
||||
"""b58encode a string, with 32-bit checksum"""
|
||||
return b58encode(v + checksum(v))
|
||||
|
||||
|
||||
def b58decode_chk(v):
|
||||
"""decode a base58 string, check and remove checksum"""
|
||||
result = b58decode(v)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
|
||||
h3 = checksum(result[:-4])
|
||||
|
||||
if result[-4:] == checksum(result[:-4]):
|
||||
return result[:-4]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def get_bcaddress_version(strAddress):
|
||||
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
|
||||
addr = b58decode_chk(strAddress)
|
||||
if addr is None or len(addr) != 21:
|
||||
return None
|
||||
version = addr[0]
|
||||
return ord(version)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Test case (from http://gitorious.org/bitcoin/python-base58.git)
|
||||
assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0
|
||||
_ohai = 'o hai'.encode('ascii')
|
||||
_tmp = b58encode(_ohai)
|
||||
assert _tmp == 'DYB3oMS'
|
||||
assert b58decode(_tmp, 5) == _ohai
|
||||
print("Tests passed")
|
||||
@ -0,0 +1,110 @@
|
||||
"""
|
||||
Set up defaults and read sentinel.conf
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
from sib_config import SibcoinConfig
|
||||
|
||||
default_sentinel_config = os.path.normpath(
|
||||
os.path.join(os.path.dirname(__file__), '../sentinel.conf')
|
||||
)
|
||||
|
||||
debug_enabled = os.environ.get('SENTINEL_DEBUG', False)
|
||||
|
||||
sentinel_config_file = os.environ.get('SENTINEL_CONFIG', default_sentinel_config)
|
||||
sentinel_cfg = SibcoinConfig.tokenize(sentinel_config_file)
|
||||
sentinel_version = "1.3.0"
|
||||
min_dashd_proto_version_with_sentinel_ping = 70208
|
||||
|
||||
|
||||
def get_dash_conf():
|
||||
if sys.platform == 'win32':
|
||||
dash_conf = os.path.join(os.getenv('APPDATA'), "DashCore/dash.conf")
|
||||
else:
|
||||
home = os.environ.get('HOME')
|
||||
|
||||
dash_conf = os.path.join(home, ".dashcore/dash.conf")
|
||||
if sys.platform == 'darwin':
|
||||
dash_conf = os.path.join(home, "Library/Application Support/DashCore/dash.conf")
|
||||
|
||||
dash_conf = sentinel_cfg.get('dash_conf', dash_conf)
|
||||
|
||||
return dash_conf
|
||||
|
||||
def get_sibcoin_conf():
|
||||
if sys.platform == 'win32':
|
||||
sibcoin_conf = os.path.join(os.getenv('APPDATA'), "Sibcoin/sibcoin.conf")
|
||||
else:
|
||||
home = os.environ.get('HOME')
|
||||
|
||||
sibcoin_conf = os.path.join(home, ".sibcoin/sibcoin.conf")
|
||||
if sys.platform == 'darwin':
|
||||
sibcoin_conf = os.path.join(home, "Library/Application Support/Sibcoin/sibcoin.conf")
|
||||
|
||||
sibcoin_conf = sentinel_cfg.get('sibcoin_conf', sibcoin_conf)
|
||||
|
||||
return sibcoin_conf
|
||||
|
||||
|
||||
def get_network():
|
||||
return sentinel_cfg.get('network', 'mainnet')
|
||||
|
||||
|
||||
def get_rpchost():
|
||||
return sentinel_cfg.get('rpchost', '127.0.0.1')
|
||||
|
||||
|
||||
def sqlite_test_db_name(sqlite_file_path):
|
||||
(root, ext) = os.path.splitext(sqlite_file_path)
|
||||
test_sqlite_file_path = root + '_test' + ext
|
||||
return test_sqlite_file_path
|
||||
|
||||
|
||||
def get_db_conn():
|
||||
import peewee
|
||||
env = os.environ.get('SENTINEL_ENV', 'production')
|
||||
|
||||
# default values should be used unless you need a different config for development
|
||||
db_host = sentinel_cfg.get('db_host', '127.0.0.1')
|
||||
db_port = sentinel_cfg.get('db_port', None)
|
||||
db_name = sentinel_cfg.get('db_name', 'sentinel')
|
||||
db_user = sentinel_cfg.get('db_user', 'sentinel')
|
||||
db_password = sentinel_cfg.get('db_password', 'sentinel')
|
||||
db_charset = sentinel_cfg.get('db_charset', 'utf8mb4')
|
||||
db_driver = sentinel_cfg.get('db_driver', 'sqlite')
|
||||
|
||||
if (env == 'test'):
|
||||
if db_driver == 'sqlite':
|
||||
db_name = sqlite_test_db_name(db_name)
|
||||
else:
|
||||
db_name = "%s_test" % db_name
|
||||
|
||||
peewee_drivers = {
|
||||
'mysql': peewee.MySQLDatabase,
|
||||
'postgres': peewee.PostgresqlDatabase,
|
||||
'sqlite': peewee.SqliteDatabase,
|
||||
}
|
||||
driver = peewee_drivers.get(db_driver)
|
||||
|
||||
dbpfn = 'passwd' if db_driver == 'mysql' else 'password'
|
||||
db_conn = {
|
||||
'host': db_host,
|
||||
'user': db_user,
|
||||
dbpfn: db_password,
|
||||
}
|
||||
if db_port:
|
||||
db_conn['port'] = int(db_port)
|
||||
|
||||
if driver == peewee.SqliteDatabase:
|
||||
db_conn = {}
|
||||
|
||||
db = driver(db_name, **db_conn)
|
||||
|
||||
return db
|
||||
|
||||
|
||||
#dash_conf = get_dash_conf()
|
||||
#sibcoin_conf = get_sibcoin_conf()
|
||||
#network = get_network()
|
||||
#rpc_host = get_rpchost()
|
||||
#db = get_db_conn()
|
||||
@ -0,0 +1,4 @@
|
||||
# for constants which need to be accessed by various parts of Sentinel
|
||||
|
||||
# skip proposals on superblock creation if the SB isn't within the fudge window
|
||||
SUPERBLOCK_FUDGE_WINDOW = 60 * 60 * 2
|
||||
@ -0,0 +1,59 @@
|
||||
import sys
|
||||
import os
|
||||
import io
|
||||
import re
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
|
||||
from misc import printdbg
|
||||
|
||||
|
||||
class DashConfig():
|
||||
|
||||
@classmethod
|
||||
def slurp_config_file(self, filename):
|
||||
# read dash.conf config but skip commented lines
|
||||
f = io.open(filename)
|
||||
lines = []
|
||||
for line in f:
|
||||
if re.match(r'^\s*#', line):
|
||||
continue
|
||||
lines.append(line)
|
||||
f.close()
|
||||
|
||||
# data is dash.conf without commented lines
|
||||
data = ''.join(lines)
|
||||
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def get_rpc_creds(self, data, network='mainnet'):
|
||||
# get rpc info from dash.conf
|
||||
match = re.findall(r'rpc(user|password|port)=(.*?)$', data, re.MULTILINE)
|
||||
|
||||
# python >= 2.7
|
||||
creds = {key: value for (key, value) in match}
|
||||
|
||||
# standard Dash defaults...
|
||||
default_port = 9998 if (network == 'mainnet') else 19998
|
||||
|
||||
# use default port for network if not specified in dash.conf
|
||||
if not ('port' in creds):
|
||||
creds[u'port'] = default_port
|
||||
|
||||
# convert to an int if taken from dash.conf
|
||||
creds[u'port'] = int(creds[u'port'])
|
||||
|
||||
# return a dictionary with RPC credential key, value pairs
|
||||
return creds
|
||||
|
||||
@classmethod
|
||||
def tokenize(self, filename):
|
||||
tokens = {}
|
||||
try:
|
||||
data = self.slurp_config_file(filename)
|
||||
match = re.findall(r'(.*?)=(.*?)$', data, re.MULTILINE)
|
||||
tokens = {key: value for (key, value) in match}
|
||||
except IOError as e:
|
||||
printdbg("[warning] error reading config file: %s" % e)
|
||||
|
||||
return tokens
|
||||
@ -0,0 +1,226 @@
|
||||
"""
|
||||
dashd JSONRPC interface
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
|
||||
import config
|
||||
import base58
|
||||
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
||||
from masternode import Masternode
|
||||
from decimal import Decimal
|
||||
import time
|
||||
|
||||
|
||||
class DashDaemon():
|
||||
def __init__(self, **kwargs):
|
||||
host = kwargs.get('host', '127.0.0.1')
|
||||
user = kwargs.get('user')
|
||||
password = kwargs.get('password')
|
||||
port = kwargs.get('port')
|
||||
|
||||
self.creds = (user, password, host, port)
|
||||
|
||||
# memoize calls to some dashd methods
|
||||
self.governance_info = None
|
||||
self.gobject_votes = {}
|
||||
|
||||
@property
|
||||
def rpc_connection(self):
|
||||
return AuthServiceProxy("http://{0}:{1}@{2}:{3}".format(*self.creds))
|
||||
|
||||
@classmethod
|
||||
def from_dash_conf(self, dash_dot_conf):
|
||||
from dash_config import DashConfig
|
||||
config_text = DashConfig.slurp_config_file(dash_dot_conf)
|
||||
creds = DashConfig.get_rpc_creds(config_text, config.network)
|
||||
|
||||
creds[u'host'] = config.rpc_host
|
||||
|
||||
return self(**creds)
|
||||
|
||||
def rpc_command(self, *params):
|
||||
return self.rpc_connection.__getattr__(params[0])(*params[1:])
|
||||
|
||||
# common RPC convenience methods
|
||||
|
||||
def get_masternodes(self):
|
||||
mnlist = self.rpc_command('masternodelist', 'full')
|
||||
return [Masternode(k, v) for (k, v) in mnlist.items()]
|
||||
|
||||
def get_current_masternode_vin(self):
|
||||
from dashlib import parse_masternode_status_vin
|
||||
|
||||
my_vin = None
|
||||
|
||||
try:
|
||||
status = self.rpc_command('masternode', 'status')
|
||||
mn_outpoint = status.get('outpoint') or status.get('vin')
|
||||
my_vin = parse_masternode_status_vin(mn_outpoint)
|
||||
except JSONRPCException as e:
|
||||
pass
|
||||
|
||||
return my_vin
|
||||
|
||||
def governance_quorum(self):
|
||||
# TODO: expensive call, so memoize this
|
||||
total_masternodes = self.rpc_command('masternode', 'count', 'enabled')
|
||||
min_quorum = self.govinfo['governanceminquorum']
|
||||
|
||||
# the minimum quorum is calculated based on the number of masternodes
|
||||
quorum = max(min_quorum, (total_masternodes // 10))
|
||||
return quorum
|
||||
|
||||
@property
|
||||
def govinfo(self):
|
||||
if (not self.governance_info):
|
||||
self.governance_info = self.rpc_command('getgovernanceinfo')
|
||||
return self.governance_info
|
||||
|
||||
# governance info convenience methods
|
||||
def superblockcycle(self):
|
||||
return self.govinfo['superblockcycle']
|
||||
|
||||
def last_superblock_height(self):
|
||||
height = self.rpc_command('getblockcount')
|
||||
cycle = self.superblockcycle()
|
||||
return cycle * (height // cycle)
|
||||
|
||||
def next_superblock_height(self):
|
||||
return self.last_superblock_height() + self.superblockcycle()
|
||||
|
||||
def is_masternode(self):
|
||||
return not (self.get_current_masternode_vin() is None)
|
||||
|
||||
def is_synced(self):
|
||||
mnsync_status = self.rpc_command('mnsync', 'status')
|
||||
synced = (mnsync_status['IsBlockchainSynced'] and
|
||||
mnsync_status['IsMasternodeListSynced'] and
|
||||
mnsync_status['IsWinnersListSynced'] and
|
||||
mnsync_status['IsSynced'] and
|
||||
not mnsync_status['IsFailed'])
|
||||
return synced
|
||||
|
||||
def current_block_hash(self):
|
||||
height = self.rpc_command('getblockcount')
|
||||
block_hash = self.rpc_command('getblockhash', height)
|
||||
return block_hash
|
||||
|
||||
def get_superblock_budget_allocation(self, height=None):
|
||||
if height is None:
|
||||
height = self.rpc_command('getblockcount')
|
||||
return Decimal(self.rpc_command('getsuperblockbudget', height))
|
||||
|
||||
def next_superblock_max_budget(self):
|
||||
cycle = self.superblockcycle()
|
||||
current_block_height = self.rpc_command('getblockcount')
|
||||
|
||||
last_superblock_height = (current_block_height // cycle) * cycle
|
||||
next_superblock_height = last_superblock_height + cycle
|
||||
|
||||
last_allocation = self.get_superblock_budget_allocation(last_superblock_height)
|
||||
next_allocation = self.get_superblock_budget_allocation(next_superblock_height)
|
||||
|
||||
next_superblock_max_budget = next_allocation
|
||||
|
||||
return next_superblock_max_budget
|
||||
|
||||
# "my" votes refers to the current running masternode
|
||||
# memoized on a per-run, per-object_hash basis
|
||||
def get_my_gobject_votes(self, object_hash):
|
||||
import dashlib
|
||||
if not self.gobject_votes.get(object_hash):
|
||||
my_vin = self.get_current_masternode_vin()
|
||||
# if we can't get MN vin from output of `masternode status`,
|
||||
# return an empty list
|
||||
if not my_vin:
|
||||
return []
|
||||
|
||||
(txid, vout_index) = my_vin.split('-')
|
||||
|
||||
cmd = ['gobject', 'getcurrentvotes', object_hash, txid, vout_index]
|
||||
raw_votes = self.rpc_command(*cmd)
|
||||
self.gobject_votes[object_hash] = dashlib.parse_raw_votes(raw_votes)
|
||||
|
||||
return self.gobject_votes[object_hash]
|
||||
|
||||
def is_govobj_maturity_phase(self):
|
||||
# 3-day period for govobj maturity
|
||||
maturity_phase_delta = 1662 # ~(60*24*3)/2.6
|
||||
if config.network == 'testnet':
|
||||
maturity_phase_delta = 24 # testnet
|
||||
|
||||
event_block_height = self.next_superblock_height()
|
||||
maturity_phase_start_block = event_block_height - maturity_phase_delta
|
||||
|
||||
current_height = self.rpc_command('getblockcount')
|
||||
event_block_height = self.next_superblock_height()
|
||||
|
||||
# print "current_height = %d" % current_height
|
||||
# print "event_block_height = %d" % event_block_height
|
||||
# print "maturity_phase_delta = %d" % maturity_phase_delta
|
||||
# print "maturity_phase_start_block = %d" % maturity_phase_start_block
|
||||
|
||||
return (current_height >= maturity_phase_start_block)
|
||||
|
||||
def we_are_the_winner(self):
|
||||
import dashlib
|
||||
# find the elected MN vin for superblock creation...
|
||||
current_block_hash = self.current_block_hash()
|
||||
mn_list = self.get_masternodes()
|
||||
winner = dashlib.elect_mn(block_hash=current_block_hash, mnlist=mn_list)
|
||||
my_vin = self.get_current_masternode_vin()
|
||||
|
||||
# print "current_block_hash: [%s]" % current_block_hash
|
||||
# print "MN election winner: [%s]" % winner
|
||||
# print "current masternode VIN: [%s]" % my_vin
|
||||
|
||||
return (winner == my_vin)
|
||||
|
||||
def estimate_block_time(self, height):
|
||||
import dashlib
|
||||
"""
|
||||
Called by block_height_to_epoch if block height is in the future.
|
||||
Call `block_height_to_epoch` instead of this method.
|
||||
|
||||
DO NOT CALL DIRECTLY if you don't want a "Oh Noes." exception.
|
||||
"""
|
||||
current_block_height = self.rpc_command('getblockcount')
|
||||
diff = height - current_block_height
|
||||
|
||||
if (diff < 0):
|
||||
raise Exception("Oh Noes.")
|
||||
|
||||
future_seconds = dashlib.blocks_to_seconds(diff)
|
||||
estimated_epoch = int(time.time() + future_seconds)
|
||||
|
||||
return estimated_epoch
|
||||
|
||||
def block_height_to_epoch(self, height):
|
||||
"""
|
||||
Get the epoch for a given block height, or estimate it if the block hasn't
|
||||
been mined yet. Call this method instead of `estimate_block_time`.
|
||||
"""
|
||||
epoch = -1
|
||||
|
||||
try:
|
||||
bhash = self.rpc_command('getblockhash', height)
|
||||
block = self.rpc_command('getblock', bhash)
|
||||
epoch = block['time']
|
||||
except JSONRPCException as e:
|
||||
if e.message == 'Block height out of range':
|
||||
epoch = self.estimate_block_time(height)
|
||||
else:
|
||||
print("error: %s" % e)
|
||||
raise e
|
||||
|
||||
return epoch
|
||||
|
||||
@property
|
||||
def has_sentinel_ping(self):
|
||||
getinfo = self.rpc_command('getinfo')
|
||||
return (getinfo['protocolversion'] >= config.min_dashd_proto_version_with_sentinel_ping)
|
||||
|
||||
def ping(self):
|
||||
self.rpc_command('sentinelping', config.sentinel_version)
|
||||
@ -0,0 +1,304 @@
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
|
||||
import base58
|
||||
import hashlib
|
||||
import re
|
||||
from decimal import Decimal
|
||||
import simplejson
|
||||
import binascii
|
||||
from misc import printdbg, epoch2str
|
||||
import time
|
||||
|
||||
|
||||
def is_valid_dash_address(address, network='mainnet'):
|
||||
raise RuntimeWarning('This method should not be used with sibcoin')
|
||||
# Only public key addresses are allowed
|
||||
# A valid address is a RIPEMD-160 hash which contains 20 bytes
|
||||
# Prior to base58 encoding 1 version byte is prepended and
|
||||
# 4 checksum bytes are appended so the total number of
|
||||
# base58 encoded bytes should be 25. This means the number of characters
|
||||
# in the encoding should be about 34 ( 25 * log2( 256 ) / log2( 58 ) ).
|
||||
dash_version = 140 if network == 'testnet' else 76
|
||||
|
||||
# Check length (This is important because the base58 library has problems
|
||||
# with long addresses (which are invalid anyway).
|
||||
if ((len(address) < 26) or (len(address) > 35)):
|
||||
return False
|
||||
|
||||
address_version = None
|
||||
|
||||
try:
|
||||
decoded = base58.b58decode_chk(address)
|
||||
address_version = ord(decoded[0:1])
|
||||
except:
|
||||
# rescue from exception, not a valid Dash address
|
||||
return False
|
||||
|
||||
if (address_version != dash_version):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def is_valid_sibcoin_address(address, network='mainnet'):
|
||||
# Only public key addresses are allowed
|
||||
# A valid address is a RIPEMD-160 hash which contains 20 bytes
|
||||
# Prior to base58 encoding 1 version byte is prepended and
|
||||
# 4 checksum bytes are appended so the total number of
|
||||
# base58 encoded bytes should be 25. This means the number of characters
|
||||
# in the encoding should be about 34 ( 25 * log2( 256 ) / log2( 58 ) ).
|
||||
dash_version = 125 if network == 'testnet' else 63
|
||||
|
||||
# Check length (This is important because the base58 library has problems
|
||||
# with long addresses (which are invalid anyway).
|
||||
if ((len(address) < 26) or (len(address) > 35)):
|
||||
return False
|
||||
|
||||
address_version = None
|
||||
|
||||
try:
|
||||
decoded = base58.b58decode_chk(address)
|
||||
address_version = ord(decoded[0:1])
|
||||
except:
|
||||
# rescue from exception, not a valid Dash address
|
||||
return False
|
||||
|
||||
if (address_version != dash_version):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def is_valid_address(address, network='mainnet'):
|
||||
return is_valid_sibcoin_address(address, network)
|
||||
|
||||
|
||||
def hashit(data):
|
||||
return int(hashlib.sha256(data.encode('utf-8')).hexdigest(), 16)
|
||||
|
||||
|
||||
# returns the masternode VIN of the elected winner
|
||||
def elect_mn(**kwargs):
|
||||
current_block_hash = kwargs['block_hash']
|
||||
mn_list = kwargs['mnlist']
|
||||
|
||||
# filter only enabled MNs
|
||||
enabled = [mn for mn in mn_list if mn.status == 'ENABLED']
|
||||
|
||||
block_hash_hash = hashit(current_block_hash)
|
||||
|
||||
candidates = []
|
||||
for mn in enabled:
|
||||
mn_vin_hash = hashit(mn.vin)
|
||||
diff = mn_vin_hash - block_hash_hash
|
||||
absdiff = abs(diff)
|
||||
candidates.append({'vin': mn.vin, 'diff': absdiff})
|
||||
|
||||
candidates.sort(key=lambda k: k['diff'])
|
||||
|
||||
try:
|
||||
winner = candidates[0]['vin']
|
||||
except:
|
||||
winner = None
|
||||
|
||||
return winner
|
||||
|
||||
|
||||
def parse_masternode_status_vin(status_vin_string):
|
||||
status_vin_string_regex = re.compile(r'CTxIn\(COutPoint\(([0-9a-zA-Z]+),\s*(\d+)\),')
|
||||
|
||||
m = status_vin_string_regex.match(status_vin_string)
|
||||
|
||||
# To Support additional format of string return from masternode status rpc.
|
||||
if m is None:
|
||||
status_output_string_regex = re.compile(r'([0-9a-zA-Z]+)-(\d+)')
|
||||
m = status_output_string_regex.match(status_vin_string)
|
||||
|
||||
txid = m.group(1)
|
||||
index = m.group(2)
|
||||
|
||||
vin = txid + '-' + index
|
||||
if (txid == '0000000000000000000000000000000000000000000000000000000000000000'):
|
||||
vin = None
|
||||
|
||||
return vin
|
||||
|
||||
|
||||
def create_superblock(proposals, event_block_height, budget_max, sb_epoch_time):
|
||||
from models import Superblock, GovernanceObject, Proposal
|
||||
from constants import SUPERBLOCK_FUDGE_WINDOW
|
||||
import copy
|
||||
|
||||
# don't create an empty superblock
|
||||
if (len(proposals) == 0):
|
||||
printdbg("No proposals, cannot create an empty superblock.")
|
||||
return None
|
||||
|
||||
budget_allocated = Decimal(0)
|
||||
fudge = SUPERBLOCK_FUDGE_WINDOW # fudge-factor to allow for slightly incorrect estimates
|
||||
|
||||
payments_list = []
|
||||
|
||||
for proposal in proposals:
|
||||
fmt_string = "name: %s, rank: %4d, hash: %s, amount: %s <= %s"
|
||||
|
||||
# skip proposals that are too expensive...
|
||||
if (budget_allocated + proposal.payment_amount) > budget_max:
|
||||
printdbg(
|
||||
fmt_string % (
|
||||
proposal.name,
|
||||
proposal.rank,
|
||||
proposal.object_hash,
|
||||
proposal.payment_amount,
|
||||
"skipped (blows the budget)",
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
# skip proposals if the SB isn't within the Proposal time window...
|
||||
window_start = proposal.start_epoch - fudge
|
||||
window_end = proposal.end_epoch + fudge
|
||||
|
||||
printdbg("\twindow_start: %s" % epoch2str(window_start))
|
||||
printdbg("\twindow_end: %s" % epoch2str(window_end))
|
||||
printdbg("\tsb_epoch_time: %s" % epoch2str(sb_epoch_time))
|
||||
|
||||
if (sb_epoch_time < window_start or sb_epoch_time > window_end):
|
||||
printdbg(
|
||||
fmt_string % (
|
||||
proposal.name,
|
||||
proposal.rank,
|
||||
proposal.object_hash,
|
||||
proposal.payment_amount,
|
||||
"skipped (SB time is outside of Proposal window)",
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
printdbg(
|
||||
fmt_string % (
|
||||
proposal.name,
|
||||
proposal.rank,
|
||||
proposal.object_hash,
|
||||
proposal.payment_amount,
|
||||
"adding",
|
||||
)
|
||||
)
|
||||
|
||||
payment = {
|
||||
'address': proposal.payment_address,
|
||||
'amount': "{0:.8f}".format(proposal.payment_amount),
|
||||
'proposal': "{}".format(proposal.object_hash)
|
||||
}
|
||||
|
||||
temp_payments_list = copy.deepcopy(payments_list)
|
||||
temp_payments_list.append(payment)
|
||||
|
||||
# calculate size of proposed Superblock
|
||||
sb_temp = Superblock(
|
||||
event_block_height=event_block_height,
|
||||
payment_addresses='|'.join([pd['address'] for pd in temp_payments_list]),
|
||||
payment_amounts='|'.join([pd['amount'] for pd in temp_payments_list]),
|
||||
proposal_hashes='|'.join([pd['proposal'] for pd in temp_payments_list])
|
||||
)
|
||||
proposed_sb_size = len(sb_temp.serialise())
|
||||
|
||||
# add proposal and keep track of total budget allocation
|
||||
budget_allocated += proposal.payment_amount
|
||||
payments_list.append(payment)
|
||||
|
||||
# don't create an empty superblock
|
||||
if not payments_list:
|
||||
printdbg("No proposals made the cut!")
|
||||
return None
|
||||
|
||||
# 'payments' now contains all the proposals for inclusion in the
|
||||
# Superblock, but needs to be sorted by proposal hash descending
|
||||
payments_list.sort(key=lambda k: k['proposal'], reverse=True)
|
||||
|
||||
sb = Superblock(
|
||||
event_block_height=event_block_height,
|
||||
payment_addresses='|'.join([pd['address'] for pd in payments_list]),
|
||||
payment_amounts='|'.join([pd['amount'] for pd in payments_list]),
|
||||
proposal_hashes='|'.join([pd['proposal'] for pd in payments_list]),
|
||||
)
|
||||
printdbg("generated superblock: %s" % sb.__dict__)
|
||||
|
||||
return sb
|
||||
|
||||
|
||||
# convenience
|
||||
def deserialise(hexdata):
|
||||
json = binascii.unhexlify(hexdata)
|
||||
obj = simplejson.loads(json, use_decimal=True)
|
||||
return obj
|
||||
|
||||
|
||||
def serialise(dikt):
|
||||
json = simplejson.dumps(dikt, sort_keys=True, use_decimal=True)
|
||||
hexdata = binascii.hexlify(json.encode('utf-8')).decode('utf-8')
|
||||
return hexdata
|
||||
|
||||
|
||||
def did_we_vote(output):
|
||||
from bitcoinrpc.authproxy import JSONRPCException
|
||||
|
||||
# sentinel
|
||||
voted = False
|
||||
err_msg = ''
|
||||
|
||||
try:
|
||||
detail = output.get('detail').get('sibcoin.conf')
|
||||
result = detail.get('result')
|
||||
if 'errorMessage' in detail:
|
||||
err_msg = detail.get('errorMessage')
|
||||
except JSONRPCException as e:
|
||||
result = 'failed'
|
||||
err_msg = e.message
|
||||
|
||||
# success, failed
|
||||
printdbg("result = [%s]" % result)
|
||||
if err_msg:
|
||||
printdbg("err_msg = [%s]" % err_msg)
|
||||
|
||||
voted = False
|
||||
if result == 'success':
|
||||
voted = True
|
||||
|
||||
# in case we spin up a new instance or server, but have already voted
|
||||
# on the network and network has recorded those votes
|
||||
m_old = re.match(r'^time between votes is too soon', err_msg)
|
||||
m_new = re.search(r'Masternode voting too often', err_msg, re.M)
|
||||
|
||||
if result == 'failed' and (m_old or m_new):
|
||||
printdbg("DEBUG: Voting too often, need to sync w/network")
|
||||
voted = False
|
||||
|
||||
return voted
|
||||
|
||||
|
||||
def parse_raw_votes(raw_votes):
|
||||
votes = []
|
||||
for v in list(raw_votes.values()):
|
||||
(outpoint, ntime, outcome, signal) = v.split(':')
|
||||
signal = signal.lower()
|
||||
outcome = outcome.lower()
|
||||
|
||||
mn_collateral_outpoint = parse_masternode_status_vin(outpoint)
|
||||
v = {
|
||||
'mn_collateral_outpoint': mn_collateral_outpoint,
|
||||
'signal': signal,
|
||||
'outcome': outcome,
|
||||
'ntime': ntime,
|
||||
}
|
||||
votes.append(v)
|
||||
|
||||
return votes
|
||||
|
||||
|
||||
def blocks_to_seconds(blocks):
|
||||
"""
|
||||
Return the estimated number of seconds which will transpire for a given
|
||||
number of blocks.
|
||||
"""
|
||||
return blocks * 2.62 * 60
|
||||
@ -0,0 +1,33 @@
|
||||
import simplejson
|
||||
|
||||
|
||||
def valid_json(input):
|
||||
""" Return true/false depending on whether input is valid JSON """
|
||||
is_valid = False
|
||||
try:
|
||||
simplejson.loads(input)
|
||||
is_valid = True
|
||||
except:
|
||||
pass
|
||||
|
||||
return is_valid
|
||||
|
||||
|
||||
def extract_object(json_input):
|
||||
"""
|
||||
Given either an old-style or new-style Proposal JSON string, extract the
|
||||
actual object used (ignore old-style multi-dimensional array and unused
|
||||
string for object type)
|
||||
"""
|
||||
if not valid_json(json_input):
|
||||
raise Exception("Invalid JSON input.")
|
||||
|
||||
obj = simplejson.loads(json_input, use_decimal=True)
|
||||
|
||||
if (isinstance(obj, list) and
|
||||
isinstance(obj[0], list) and
|
||||
(isinstance(obj[0][0], str) or (isinstance(obj[0][0], unicode))) and
|
||||
isinstance(obj[0][1], dict)):
|
||||
obj = obj[0][1]
|
||||
|
||||
return obj
|
||||
@ -0,0 +1,92 @@
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
|
||||
import models
|
||||
from bitcoinrpc.authproxy import JSONRPCException
|
||||
import misc
|
||||
import re
|
||||
from misc import printdbg
|
||||
import time
|
||||
|
||||
|
||||
# mixin for GovObj composed classes like proposal and superblock, etc.
|
||||
class GovernanceClass(object):
|
||||
only_masternode_can_submit = False
|
||||
|
||||
# lazy
|
||||
@property
|
||||
def go(self):
|
||||
return self.governance_object
|
||||
|
||||
# pass thru to GovernanceObject#vote
|
||||
def vote(self, dashd, signal, outcome):
|
||||
return self.go.vote(dashd, signal, outcome)
|
||||
|
||||
# pass thru to GovernanceObject#voted_on
|
||||
def voted_on(self, **kwargs):
|
||||
return self.go.voted_on(**kwargs)
|
||||
|
||||
def vote_validity(self, dashd):
|
||||
if self.is_valid():
|
||||
printdbg("Voting valid! %s: %d" % (self.__class__.__name__, self.id))
|
||||
self.vote(dashd, models.VoteSignals.valid, models.VoteOutcomes.yes)
|
||||
else:
|
||||
printdbg("Voting INVALID! %s: %d" % (self.__class__.__name__, self.id))
|
||||
self.vote(dashd, models.VoteSignals.valid, models.VoteOutcomes.no)
|
||||
|
||||
def get_submit_command(self):
|
||||
obj_data = self.serialise()
|
||||
|
||||
# new objects won't have parent_hash, revision, etc...
|
||||
cmd = ['gobject', 'submit', '0', '1', str(int(time.time())), obj_data]
|
||||
|
||||
# some objects don't have a collateral tx to submit
|
||||
if not self.only_masternode_can_submit:
|
||||
cmd.append(go.object_fee_tx)
|
||||
|
||||
return cmd
|
||||
|
||||
def submit(self, dashd):
|
||||
# don't attempt to submit a superblock unless a masternode
|
||||
# note: will probably re-factor this, this has code smell
|
||||
if (self.only_masternode_can_submit and not dashd.is_masternode()):
|
||||
print("Not a masternode. Only masternodes may submit these objects")
|
||||
return
|
||||
|
||||
try:
|
||||
object_hash = dashd.rpc_command(*self.get_submit_command())
|
||||
printdbg("Submitted: [%s]" % object_hash)
|
||||
except JSONRPCException as e:
|
||||
print("Unable to submit: %s" % e.message)
|
||||
|
||||
def serialise(self):
|
||||
import binascii
|
||||
import simplejson
|
||||
|
||||
return binascii.hexlify(simplejson.dumps(self.get_dict(), sort_keys=True).encode('utf-8')).decode('utf-8')
|
||||
|
||||
@classmethod
|
||||
def serialisable_fields(self):
|
||||
# Python is so not very elegant...
|
||||
pk_column = self._meta.primary_key.db_column
|
||||
fk_columns = [fk.db_column for fk in self._meta.rel.values()]
|
||||
do_not_use = [pk_column]
|
||||
do_not_use.extend(fk_columns)
|
||||
do_not_use.append('object_hash')
|
||||
fields_to_serialise = list(self._meta.columns.keys())
|
||||
|
||||
for field in do_not_use:
|
||||
if field in fields_to_serialise:
|
||||
fields_to_serialise.remove(field)
|
||||
|
||||
return fields_to_serialise
|
||||
|
||||
def get_dict(self):
|
||||
dikt = {}
|
||||
|
||||
for field_name in self.serialisable_fields():
|
||||
dikt[field_name] = getattr(self, field_name)
|
||||
|
||||
dikt['type'] = getattr(self, 'govobj_type')
|
||||
|
||||
return dikt
|
||||
@ -0,0 +1,147 @@
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
|
||||
import argparse
|
||||
import config
|
||||
|
||||
def is_valid_python_version():
|
||||
version_valid = False
|
||||
|
||||
ver = sys.version_info
|
||||
if (2 == ver.major) and (7 <= ver.minor):
|
||||
version_valid = True
|
||||
|
||||
if (3 == ver.major) and (4 <= ver.minor):
|
||||
version_valid = True
|
||||
|
||||
return version_valid
|
||||
|
||||
|
||||
def python_short_ver_str():
|
||||
ver = sys.version_info
|
||||
return "%s.%s" % (ver.major, ver.minor)
|
||||
|
||||
|
||||
def are_deps_installed():
|
||||
installed = False
|
||||
|
||||
try:
|
||||
import peewee
|
||||
import bitcoinrpc.authproxy
|
||||
import simplejson
|
||||
installed = True
|
||||
except ImportError as e:
|
||||
print("[error]: Missing dependencies")
|
||||
|
||||
return installed
|
||||
|
||||
|
||||
def is_database_correctly_configured():
|
||||
import peewee
|
||||
import config
|
||||
|
||||
configured = False
|
||||
|
||||
cannot_connect_message = "Cannot connect to database. Please ensure database service is running and user access is properly configured in 'sentinel.conf'."
|
||||
|
||||
try:
|
||||
db = config.db
|
||||
db.connect()
|
||||
configured = True
|
||||
except (peewee.ImproperlyConfigured, peewee.OperationalError, ImportError) as e:
|
||||
print("[error]: %s" % e)
|
||||
print(cannot_connect_message)
|
||||
sys.exit(1)
|
||||
|
||||
return configured
|
||||
|
||||
|
||||
def has_sibcoin_conf():
|
||||
import config
|
||||
import io
|
||||
|
||||
valid_sibcoin_conf = False
|
||||
|
||||
# ensure dash_conf exists & readable
|
||||
#
|
||||
# if not, print a message stating that Dash Core must be installed and
|
||||
# configured, including JSONRPC access in dash.conf
|
||||
try:
|
||||
f = io.open(config.sibcoin_conf)
|
||||
valid_sibcoin_conf = True
|
||||
except IOError as e:
|
||||
print(e)
|
||||
|
||||
return valid_sibcoin_conf
|
||||
|
||||
def process_args():
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-b', '--bypass-scheduler',
|
||||
action='store_true',
|
||||
help='Bypass scheduler and sync/vote immediately',
|
||||
dest='bypass')
|
||||
parser.add_argument('-c', '--config',
|
||||
help='Path to sentinel.conf (default: ../sentinel.conf)',
|
||||
dest='config')
|
||||
parser.add_argument('-d', '--debug',
|
||||
action='store_true',
|
||||
help='Enable debug mode',
|
||||
dest='debug')
|
||||
args, unknown = parser.parse_known_args()
|
||||
|
||||
return args
|
||||
|
||||
initmodule = sys.modules[__name__]
|
||||
initmodule.options = False
|
||||
|
||||
# === begin main
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
options = process_args()
|
||||
|
||||
if options.config:
|
||||
config.sentinel_config_file = options.config
|
||||
|
||||
# register a handler if SENTINEL_DEBUG is set
|
||||
if os.environ.get('SENTINEL_DEBUG', None) or options.debug:
|
||||
config.debug_enabled = True
|
||||
import logging
|
||||
logger = logging.getLogger('peewee')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.addHandler(logging.StreamHandler())
|
||||
|
||||
initmodule.options = options
|
||||
|
||||
from sib_config import SibcoinConfig
|
||||
config.sentinel_cfg = SibcoinConfig.tokenize(config.sentinel_config_file)
|
||||
|
||||
config.sibcoin_conf = config.get_sibcoin_conf()
|
||||
config.network = config.get_network()
|
||||
config.rpc_host = config.get_rpchost()
|
||||
config.db = config.get_db_conn()
|
||||
|
||||
install_instructions = "\tpip install -r requirements.txt"
|
||||
|
||||
if not is_valid_python_version():
|
||||
print("Python %s is not supported" % python_short_ver_str())
|
||||
sys.exit(1)
|
||||
|
||||
if not are_deps_installed():
|
||||
print("Please ensure all dependencies are installed:")
|
||||
print(install_instructions)
|
||||
sys.exit(1)
|
||||
|
||||
if not is_database_correctly_configured():
|
||||
print("Please ensure correct database configuration.")
|
||||
sys.exit(1)
|
||||
|
||||
if not has_sibcoin_conf():
|
||||
print("Sibcoin Core must be installed and configured, including JSONRPC access in sibcoin.conf")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
main()
|
||||
@ -0,0 +1,41 @@
|
||||
# basically just parse & make it easier to access the MN data from the output of
|
||||
# "masternodelist full"
|
||||
|
||||
|
||||
class Masternode():
|
||||
def __init__(self, collateral, mnstring):
|
||||
(txid, vout_index) = self.parse_collateral_string(collateral)
|
||||
self.txid = txid
|
||||
self.vout_index = int(vout_index)
|
||||
|
||||
(status, protocol, address, ip_port, lastseen, activeseconds, lastpaid) = self.parse_mn_string(mnstring)
|
||||
self.status = status
|
||||
self.protocol = int(protocol)
|
||||
self.address = address
|
||||
|
||||
# TODO: break this out... take ipv6 into account
|
||||
self.ip_port = ip_port
|
||||
|
||||
self.lastseen = int(lastseen)
|
||||
self.activeseconds = int(activeseconds)
|
||||
self.lastpaid = int(lastpaid)
|
||||
|
||||
@classmethod
|
||||
def parse_collateral_string(self, collateral):
|
||||
(txid, index) = collateral.split('-')
|
||||
return (txid, index)
|
||||
|
||||
@classmethod
|
||||
def parse_mn_string(self, mn_full_out):
|
||||
# trim whitespace
|
||||
# mn_full_out = mn_full_out.strip()
|
||||
|
||||
(status, protocol, address, lastseen, activeseconds, lastpaid,
|
||||
lastpaidblock, ip_port) = mn_full_out.split()
|
||||
|
||||
# status protocol pubkey IP lastseen activeseconds lastpaid
|
||||
return (status, protocol, address, ip_port, lastseen, activeseconds, lastpaid)
|
||||
|
||||
@property
|
||||
def vin(self):
|
||||
return self.txid + '-' + str(self.vout_index)
|
||||
@ -0,0 +1,52 @@
|
||||
import time
|
||||
from datetime import datetime
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
import config
|
||||
|
||||
|
||||
def is_numeric(strin):
|
||||
import decimal
|
||||
|
||||
strin = str(strin)
|
||||
|
||||
# Decimal allows spaces in input, but we don't
|
||||
if strin.strip() != strin:
|
||||
return False
|
||||
try:
|
||||
value = decimal.Decimal(strin)
|
||||
except decimal.InvalidOperation as e:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def printdbg(str):
|
||||
ts = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(now()))
|
||||
logstr = "{} {}".format(ts, str)
|
||||
if config.debug_enabled:
|
||||
print(logstr)
|
||||
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def is_hash(s):
|
||||
m = re.match('^[a-f0-9]{64}$', s)
|
||||
return m is not None
|
||||
|
||||
|
||||
def now():
|
||||
return int(time.time())
|
||||
|
||||
|
||||
def epoch2str(epoch):
|
||||
return datetime.utcfromtimestamp(epoch).strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
class Bunch(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
def get(self, name):
|
||||
return self.__dict__.get(name, None)
|
||||
@ -0,0 +1,768 @@
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
|
||||
import init
|
||||
import time
|
||||
import datetime
|
||||
import re
|
||||
import simplejson
|
||||
from peewee import IntegerField, CharField, TextField, ForeignKeyField, DecimalField, DateTimeField
|
||||
import peewee
|
||||
import playhouse.signals
|
||||
import misc
|
||||
import dashd
|
||||
from misc import (printdbg, is_numeric)
|
||||
import config
|
||||
from bitcoinrpc.authproxy import JSONRPCException
|
||||
try:
|
||||
import urllib.parse as urlparse
|
||||
except ImportError:
|
||||
import urlparse
|
||||
|
||||
# our mixin
|
||||
from governance_class import GovernanceClass
|
||||
|
||||
db = config.db
|
||||
db.connect()
|
||||
|
||||
|
||||
# TODO: lookup table?
|
||||
DASHD_GOVOBJ_TYPES = {
|
||||
'proposal': 1,
|
||||
'superblock': 2,
|
||||
}
|
||||
GOVOBJ_TYPE_STRINGS = {
|
||||
1: 'proposal',
|
||||
2: 'trigger', # it should be trigger here, not superblock
|
||||
}
|
||||
|
||||
# schema version follows format 'YYYYMMDD-NUM'.
|
||||
#
|
||||
# YYYYMMDD is the 4-digit year, 2-digit month and 2-digit day the schema
|
||||
# changes were added.
|
||||
#
|
||||
# NUM is a numerical version of changes for that specific date. If the date
|
||||
# changes, the NUM resets to 1.
|
||||
SCHEMA_VERSION = '20170111-1'
|
||||
|
||||
# === models ===
|
||||
|
||||
|
||||
class BaseModel(playhouse.signals.Model):
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
@classmethod
|
||||
def is_database_connected(self):
|
||||
return not db.is_closed()
|
||||
|
||||
|
||||
class GovernanceObject(BaseModel):
|
||||
parent_id = IntegerField(default=0)
|
||||
object_creation_time = IntegerField(default=int(time.time()))
|
||||
object_hash = CharField(max_length=64)
|
||||
object_parent_hash = CharField(default='0')
|
||||
object_type = IntegerField(default=0)
|
||||
object_revision = IntegerField(default=1)
|
||||
object_fee_tx = CharField(default='')
|
||||
yes_count = IntegerField(default=0)
|
||||
no_count = IntegerField(default=0)
|
||||
abstain_count = IntegerField(default=0)
|
||||
absolute_yes_count = IntegerField(default=0)
|
||||
|
||||
class Meta:
|
||||
db_table = 'governance_objects'
|
||||
|
||||
# sync dashd gobject list with our local relational DB backend
|
||||
@classmethod
|
||||
def sync(self, dashd):
|
||||
golist = dashd.rpc_command('gobject', 'list')
|
||||
|
||||
# objects which are removed from the network should be removed from the DB
|
||||
try:
|
||||
for purged in self.purged_network_objects(list(golist.keys())):
|
||||
# SOMEDAY: possible archive step here
|
||||
purged.delete_instance(recursive=True, delete_nullable=True)
|
||||
except Exception as e:
|
||||
printdbg("Got an error while purging: %s" % e)
|
||||
|
||||
for item in golist.values():
|
||||
try:
|
||||
(go, subobj) = self.import_gobject_from_dashd(dashd, item)
|
||||
except Exception as e:
|
||||
printdbg("Got an error upon import: %s" % e)
|
||||
|
||||
@classmethod
|
||||
def purged_network_objects(self, network_object_hashes):
|
||||
query = self.select()
|
||||
if network_object_hashes:
|
||||
query = query.where(~(self.object_hash << network_object_hashes))
|
||||
return query
|
||||
|
||||
@classmethod
|
||||
def import_gobject_from_dashd(self, dashd, rec):
|
||||
import decimal
|
||||
import dashlib
|
||||
import binascii
|
||||
import gobject_json
|
||||
|
||||
object_hash = rec['Hash']
|
||||
|
||||
gobj_dict = {
|
||||
'object_hash': object_hash,
|
||||
'object_fee_tx': rec['CollateralHash'],
|
||||
'absolute_yes_count': rec['AbsoluteYesCount'],
|
||||
'abstain_count': rec['AbstainCount'],
|
||||
'yes_count': rec['YesCount'],
|
||||
'no_count': rec['NoCount'],
|
||||
}
|
||||
|
||||
# deserialise and extract object
|
||||
json_str = binascii.unhexlify(rec['DataHex']).decode('utf-8')
|
||||
dikt = gobject_json.extract_object(json_str)
|
||||
|
||||
subobj = None
|
||||
|
||||
type_class_map = {
|
||||
1: Proposal,
|
||||
2: Superblock,
|
||||
}
|
||||
subclass = type_class_map[dikt['type']]
|
||||
|
||||
# set object_type in govobj table
|
||||
gobj_dict['object_type'] = subclass.govobj_type
|
||||
|
||||
# exclude any invalid model data from dashd...
|
||||
valid_keys = subclass.serialisable_fields()
|
||||
subdikt = {k: dikt[k] for k in valid_keys if k in dikt}
|
||||
|
||||
# get/create, then sync vote counts from dashd, with every run
|
||||
govobj, created = self.get_or_create(object_hash=object_hash, defaults=gobj_dict)
|
||||
if created:
|
||||
printdbg("govobj created = %s" % created)
|
||||
count = govobj.update(**gobj_dict).where(self.id == govobj.id).execute()
|
||||
if count:
|
||||
printdbg("govobj updated = %d" % count)
|
||||
subdikt['governance_object'] = govobj
|
||||
|
||||
# get/create, then sync payment amounts, etc. from dashd - Dashd is the master
|
||||
try:
|
||||
newdikt = subdikt.copy()
|
||||
newdikt['object_hash'] = object_hash
|
||||
if subclass(**newdikt).is_valid() is False:
|
||||
govobj.vote_delete(dashd)
|
||||
return (govobj, None)
|
||||
|
||||
subobj, created = subclass.get_or_create(object_hash=object_hash, defaults=subdikt)
|
||||
|
||||
except Exception as e:
|
||||
# in this case, vote as delete, and log the vote in the DB
|
||||
printdbg("Got invalid object from dashd! %s" % e)
|
||||
govobj.vote_delete(dashd)
|
||||
return (govobj, None)
|
||||
|
||||
if created:
|
||||
printdbg("subobj created = %s" % created)
|
||||
count = subobj.update(**subdikt).where(subclass.id == subobj.id).execute()
|
||||
if count:
|
||||
printdbg("subobj updated = %d" % count)
|
||||
|
||||
# ATM, returns a tuple w/gov attributes and the govobj
|
||||
return (govobj, subobj)
|
||||
|
||||
def vote_delete(self, dashd):
|
||||
if not self.voted_on(signal=VoteSignals.delete, outcome=VoteOutcomes.yes):
|
||||
self.vote(dashd, VoteSignals.delete, VoteOutcomes.yes)
|
||||
return
|
||||
|
||||
def get_vote_command(self, signal, outcome):
|
||||
cmd = ['gobject', 'vote-conf', self.object_hash,
|
||||
signal.name, outcome.name]
|
||||
return cmd
|
||||
|
||||
def vote(self, dashd, signal, outcome):
|
||||
import dashlib
|
||||
|
||||
# At this point, will probably never reach here. But doesn't hurt to
|
||||
# have an extra check just in case objects get out of sync (people will
|
||||
# muck with the DB).
|
||||
if (self.object_hash == '0' or not misc.is_hash(self.object_hash)):
|
||||
printdbg("No governance object hash, nothing to vote on.")
|
||||
return
|
||||
|
||||
# have I already voted on this gobject with this particular signal and outcome?
|
||||
if self.voted_on(signal=signal):
|
||||
printdbg("Found a vote for this gobject/signal...")
|
||||
vote = self.votes.where(Vote.signal == signal)[0]
|
||||
|
||||
# if the outcome is the same, move on, nothing more to do
|
||||
if vote.outcome == outcome:
|
||||
# move on.
|
||||
printdbg("Already voted for this same gobject/signal/outcome, no need to re-vote.")
|
||||
return
|
||||
else:
|
||||
printdbg("Found a STALE vote for this gobject/signal, deleting so that we can re-vote.")
|
||||
vote.delete_instance()
|
||||
|
||||
else:
|
||||
printdbg("Haven't voted on this gobject/signal yet...")
|
||||
|
||||
# now ... vote!
|
||||
|
||||
vote_command = self.get_vote_command(signal, outcome)
|
||||
printdbg(' '.join(vote_command))
|
||||
output = dashd.rpc_command(*vote_command)
|
||||
|
||||
# extract vote output parsing to external lib
|
||||
voted = dashlib.did_we_vote(output)
|
||||
|
||||
if voted:
|
||||
printdbg('VOTE success, saving Vote object to database')
|
||||
Vote(governance_object=self, signal=signal, outcome=outcome,
|
||||
object_hash=self.object_hash).save()
|
||||
else:
|
||||
printdbg('VOTE failed, trying to sync with network vote')
|
||||
self.sync_network_vote(dashd, signal)
|
||||
|
||||
def sync_network_vote(self, dashd, signal):
|
||||
printdbg('\tSyncing network vote for object %s with signal %s' % (self.object_hash, signal.name))
|
||||
vote_info = dashd.get_my_gobject_votes(self.object_hash)
|
||||
for vdikt in vote_info:
|
||||
if vdikt['signal'] != signal.name:
|
||||
continue
|
||||
|
||||
# ensure valid outcome
|
||||
outcome = VoteOutcomes.get(vdikt['outcome'])
|
||||
if not outcome:
|
||||
continue
|
||||
|
||||
printdbg('\tFound a matching valid vote on the network, outcome = %s' % vdikt['outcome'])
|
||||
Vote(governance_object=self, signal=signal, outcome=outcome,
|
||||
object_hash=self.object_hash).save()
|
||||
|
||||
def voted_on(self, **kwargs):
|
||||
signal = kwargs.get('signal', None)
|
||||
outcome = kwargs.get('outcome', None)
|
||||
|
||||
query = self.votes
|
||||
|
||||
if signal:
|
||||
query = query.where(Vote.signal == signal)
|
||||
|
||||
if outcome:
|
||||
query = query.where(Vote.outcome == outcome)
|
||||
|
||||
count = query.count()
|
||||
return count
|
||||
|
||||
|
||||
class Setting(BaseModel):
|
||||
name = CharField(default='')
|
||||
value = CharField(default='')
|
||||
created_at = DateTimeField(default=datetime.datetime.utcnow())
|
||||
updated_at = DateTimeField(default=datetime.datetime.utcnow())
|
||||
|
||||
class Meta:
|
||||
db_table = 'settings'
|
||||
|
||||
|
||||
class Proposal(GovernanceClass, BaseModel):
|
||||
governance_object = ForeignKeyField(GovernanceObject, related_name='proposals', on_delete='CASCADE', on_update='CASCADE')
|
||||
name = CharField(default='', max_length=40)
|
||||
url = CharField(default='')
|
||||
start_epoch = IntegerField()
|
||||
end_epoch = IntegerField()
|
||||
payment_address = CharField(max_length=36)
|
||||
payment_amount = DecimalField(max_digits=16, decimal_places=8)
|
||||
object_hash = CharField(max_length=64)
|
||||
|
||||
# src/governance-validators.cpp
|
||||
MAX_DATA_SIZE = 512
|
||||
|
||||
govobj_type = DASHD_GOVOBJ_TYPES['proposal']
|
||||
|
||||
class Meta:
|
||||
db_table = 'proposals'
|
||||
|
||||
def is_valid(self):
|
||||
import dashlib
|
||||
|
||||
printdbg("In Proposal#is_valid, for Proposal: %s" % self.__dict__)
|
||||
|
||||
try:
|
||||
# proposal name exists and is not null/whitespace
|
||||
if (len(self.name.strip()) == 0):
|
||||
printdbg("\tInvalid Proposal name [%s], returning False" % self.name)
|
||||
return False
|
||||
|
||||
# proposal name is normalized (something like "[a-zA-Z0-9-_]+")
|
||||
if not re.match(r'^[-_a-zA-Z0-9]+$', self.name):
|
||||
printdbg("\tInvalid Proposal name [%s] (does not match regex), returning False" % self.name)
|
||||
return False
|
||||
|
||||
# end date < start date
|
||||
if (self.end_epoch <= self.start_epoch):
|
||||
printdbg("\tProposal end_epoch [%s] <= start_epoch [%s] , returning False" % (self.end_epoch, self.start_epoch))
|
||||
return False
|
||||
|
||||
# amount must be numeric
|
||||
if misc.is_numeric(self.payment_amount) is False:
|
||||
printdbg("\tProposal amount [%s] is not valid, returning False" % self.payment_amount)
|
||||
return False
|
||||
|
||||
# amount can't be negative or 0
|
||||
if (float(self.payment_amount) <= 0):
|
||||
printdbg("\tProposal amount [%s] is negative or zero, returning False" % self.payment_amount)
|
||||
return False
|
||||
|
||||
# payment address is valid base58 dash addr, non-multisig
|
||||
if not dashlib.is_valid_address(self.payment_address, config.network):
|
||||
printdbg("\tPayment address [%s] not a valid Dash address for network [%s], returning False" % (self.payment_address, config.network))
|
||||
return False
|
||||
|
||||
# URL
|
||||
if (len(self.url.strip()) < 4):
|
||||
printdbg("\tProposal URL [%s] too short, returning False" % self.url)
|
||||
return False
|
||||
|
||||
# proposal URL has any whitespace
|
||||
if (re.search(r'\s', self.url)):
|
||||
printdbg("\tProposal URL [%s] has whitespace, returning False" % self.name)
|
||||
return False
|
||||
|
||||
# Dash Core restricts proposals to 512 bytes max
|
||||
if len(self.serialise()) > (self.MAX_DATA_SIZE * 2):
|
||||
printdbg("\tProposal [%s] is too big, returning False" % self.name)
|
||||
return False
|
||||
|
||||
try:
|
||||
parsed = urlparse.urlparse(self.url)
|
||||
except Exception as e:
|
||||
printdbg("\tUnable to parse Proposal URL, marking invalid: %s" % e)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
printdbg("Unable to validate in Proposal#is_valid, marking invalid: %s" % e.message)
|
||||
return False
|
||||
|
||||
printdbg("Leaving Proposal#is_valid, Valid = True")
|
||||
return True
|
||||
|
||||
def is_expired(self, superblockcycle=None):
|
||||
from constants import SUPERBLOCK_FUDGE_WINDOW
|
||||
import dashlib
|
||||
|
||||
if not superblockcycle:
|
||||
raise Exception("Required field superblockcycle missing.")
|
||||
|
||||
printdbg("In Proposal#is_expired, for Proposal: %s" % self.__dict__)
|
||||
now = misc.now()
|
||||
printdbg("\tnow = %s" % now)
|
||||
|
||||
# half the SB cycle, converted to seconds
|
||||
# add the fudge_window in seconds, defined elsewhere in Sentinel
|
||||
expiration_window_seconds = int(
|
||||
(dashlib.blocks_to_seconds(superblockcycle) / 2) +
|
||||
SUPERBLOCK_FUDGE_WINDOW
|
||||
)
|
||||
printdbg("\texpiration_window_seconds = %s" % expiration_window_seconds)
|
||||
|
||||
# "fully expires" adds the expiration window to end time to ensure a
|
||||
# valid proposal isn't excluded from SB by cutting it too close
|
||||
fully_expires_at = self.end_epoch + expiration_window_seconds
|
||||
printdbg("\tfully_expires_at = %s" % fully_expires_at)
|
||||
|
||||
if (fully_expires_at < now):
|
||||
printdbg("\tProposal end_epoch [%s] < now [%s] , returning True" % (self.end_epoch, now))
|
||||
return True
|
||||
|
||||
printdbg("Leaving Proposal#is_expired, Expired = False")
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def approved_and_ranked(self, proposal_quorum, next_superblock_max_budget):
|
||||
# return all approved proposals, in order of descending vote count
|
||||
#
|
||||
# we need a secondary 'order by' in case of a tie on vote count, since
|
||||
# superblocks must be deterministic
|
||||
query = (self
|
||||
.select(self, GovernanceObject) # Note that we are selecting both models.
|
||||
.join(GovernanceObject)
|
||||
.where(GovernanceObject.absolute_yes_count > proposal_quorum)
|
||||
.order_by(GovernanceObject.absolute_yes_count.desc(), GovernanceObject.object_hash.desc())
|
||||
)
|
||||
|
||||
ranked = []
|
||||
for proposal in query:
|
||||
proposal.max_budget = next_superblock_max_budget
|
||||
if proposal.is_valid():
|
||||
ranked.append(proposal)
|
||||
|
||||
return ranked
|
||||
|
||||
@classmethod
|
||||
def expired(self, superblockcycle=None):
|
||||
if not superblockcycle:
|
||||
raise Exception("Required field superblockcycle missing.")
|
||||
|
||||
expired = []
|
||||
|
||||
for proposal in self.select():
|
||||
if proposal.is_expired(superblockcycle):
|
||||
expired.append(proposal)
|
||||
|
||||
return expired
|
||||
|
||||
@property
|
||||
def rank(self):
|
||||
rank = 0
|
||||
if self.governance_object:
|
||||
rank = self.governance_object.absolute_yes_count
|
||||
return rank
|
||||
|
||||
|
||||
class Superblock(BaseModel, GovernanceClass):
|
||||
governance_object = ForeignKeyField(GovernanceObject, related_name='superblocks', on_delete='CASCADE', on_update='CASCADE')
|
||||
event_block_height = IntegerField()
|
||||
payment_addresses = TextField()
|
||||
payment_amounts = TextField()
|
||||
proposal_hashes = TextField(default='')
|
||||
sb_hash = CharField()
|
||||
object_hash = CharField(max_length=64)
|
||||
|
||||
govobj_type = DASHD_GOVOBJ_TYPES['superblock']
|
||||
only_masternode_can_submit = True
|
||||
|
||||
class Meta:
|
||||
db_table = 'superblocks'
|
||||
|
||||
def is_valid(self):
|
||||
import dashlib
|
||||
import decimal
|
||||
|
||||
printdbg("In Superblock#is_valid, for SB: %s" % self.__dict__)
|
||||
|
||||
# it's a string from the DB...
|
||||
addresses = self.payment_addresses.split('|')
|
||||
for addr in addresses:
|
||||
if not dashlib.is_valid_address(addr, config.network):
|
||||
printdbg("\tInvalid address [%s], returning False" % addr)
|
||||
return False
|
||||
|
||||
amounts = self.payment_amounts.split('|')
|
||||
for amt in amounts:
|
||||
if not misc.is_numeric(amt):
|
||||
printdbg("\tAmount [%s] is not numeric, returning False" % amt)
|
||||
return False
|
||||
|
||||
# no negative or zero amounts allowed
|
||||
damt = decimal.Decimal(amt)
|
||||
if not damt > 0:
|
||||
printdbg("\tAmount [%s] is zero or negative, returning False" % damt)
|
||||
return False
|
||||
|
||||
# verify proposal hashes correctly formatted...
|
||||
if len(self.proposal_hashes) > 0:
|
||||
hashes = self.proposal_hashes.split('|')
|
||||
for object_hash in hashes:
|
||||
if not misc.is_hash(object_hash):
|
||||
printdbg("\tInvalid proposal hash [%s], returning False" % object_hash)
|
||||
return False
|
||||
|
||||
# ensure number of payment addresses matches number of payments
|
||||
if len(addresses) != len(amounts):
|
||||
printdbg("\tNumber of payment addresses [%s] != number of payment amounts [%s], returning False" % (len(addresses), len(amounts)))
|
||||
return False
|
||||
|
||||
printdbg("Leaving Superblock#is_valid, Valid = True")
|
||||
return True
|
||||
|
||||
def hash(self):
|
||||
import dashlib
|
||||
return dashlib.hashit(self.serialise())
|
||||
|
||||
def hex_hash(self):
|
||||
return "%x" % self.hash()
|
||||
|
||||
# workaround for now, b/c we must uniquely ID a superblock with the hash,
|
||||
# in case of differing superblocks
|
||||
#
|
||||
# this prevents sb_hash from being added to the serialised fields
|
||||
@classmethod
|
||||
def serialisable_fields(self):
|
||||
return [
|
||||
'event_block_height',
|
||||
'payment_addresses',
|
||||
'payment_amounts',
|
||||
'proposal_hashes'
|
||||
]
|
||||
|
||||
# has this masternode voted to fund *any* superblocks at the given
|
||||
# event_block_height?
|
||||
@classmethod
|
||||
def is_voted_funding(self, ebh):
|
||||
count = (self.select()
|
||||
.where(self.event_block_height == ebh)
|
||||
.join(GovernanceObject)
|
||||
.join(Vote)
|
||||
.join(Signal)
|
||||
.switch(Vote) # switch join query context back to Vote
|
||||
.join(Outcome)
|
||||
.where(Vote.signal == VoteSignals.funding)
|
||||
.where(Vote.outcome == VoteOutcomes.yes)
|
||||
.count())
|
||||
return count
|
||||
|
||||
@classmethod
|
||||
def latest(self):
|
||||
try:
|
||||
obj = self.select().order_by(self.event_block_height).desc().limit(1)[0]
|
||||
except IndexError as e:
|
||||
obj = None
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def at_height(self, ebh):
|
||||
query = (self.select().where(self.event_block_height == ebh))
|
||||
return query
|
||||
|
||||
@classmethod
|
||||
def find_highest_deterministic(self, sb_hash):
|
||||
# highest block hash wins
|
||||
query = (self.select()
|
||||
.where(self.sb_hash == sb_hash)
|
||||
.order_by(self.object_hash.desc()))
|
||||
try:
|
||||
obj = query.limit(1)[0]
|
||||
except IndexError as e:
|
||||
obj = None
|
||||
return obj
|
||||
|
||||
|
||||
# ok, this is an awkward way to implement these...
|
||||
# "hook" into the Superblock model and run this code just before any save()
|
||||
from playhouse.signals import pre_save
|
||||
|
||||
|
||||
@pre_save(sender=Superblock)
|
||||
def on_save_handler(model_class, instance, created):
|
||||
instance.sb_hash = instance.hex_hash()
|
||||
|
||||
|
||||
class Signal(BaseModel):
|
||||
name = CharField(unique=True)
|
||||
created_at = DateTimeField(default=datetime.datetime.utcnow())
|
||||
updated_at = DateTimeField(default=datetime.datetime.utcnow())
|
||||
|
||||
class Meta:
|
||||
db_table = 'signals'
|
||||
|
||||
|
||||
class Outcome(BaseModel):
|
||||
name = CharField(unique=True)
|
||||
created_at = DateTimeField(default=datetime.datetime.utcnow())
|
||||
updated_at = DateTimeField(default=datetime.datetime.utcnow())
|
||||
|
||||
class Meta:
|
||||
db_table = 'outcomes'
|
||||
|
||||
|
||||
class Vote(BaseModel):
|
||||
governance_object = ForeignKeyField(GovernanceObject, related_name='votes', on_delete='CASCADE', on_update='CASCADE')
|
||||
signal = ForeignKeyField(Signal, related_name='votes', on_delete='CASCADE', on_update='CASCADE')
|
||||
outcome = ForeignKeyField(Outcome, related_name='votes', on_delete='CASCADE', on_update='CASCADE')
|
||||
voted_at = DateTimeField(default=datetime.datetime.utcnow())
|
||||
created_at = DateTimeField(default=datetime.datetime.utcnow())
|
||||
updated_at = DateTimeField(default=datetime.datetime.utcnow())
|
||||
object_hash = CharField(max_length=64)
|
||||
|
||||
class Meta:
|
||||
db_table = 'votes'
|
||||
|
||||
|
||||
class Transient(object):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for key in ['created_at', 'timeout', 'value']:
|
||||
self.__setattr__(key, kwargs.get(key))
|
||||
|
||||
def is_expired(self):
|
||||
return (self.created_at + self.timeout) < misc.now()
|
||||
|
||||
@classmethod
|
||||
def deserialise(self, json):
|
||||
try:
|
||||
dikt = simplejson.loads(json)
|
||||
# a no-op, but this tells us what exception to expect
|
||||
except simplejson.scanner.JSONDecodeError as e:
|
||||
raise e
|
||||
|
||||
lizt = [dikt.get(key, None) for key in ['timeout', 'value']]
|
||||
lizt = list(set(lizt))
|
||||
if None in lizt:
|
||||
printdbg("Not all fields required for transient -- moving along.")
|
||||
raise Exception("Required fields not present for transient.")
|
||||
|
||||
return dikt
|
||||
|
||||
@classmethod
|
||||
def from_setting(self, setting):
|
||||
dikt = Transient.deserialise(setting.value)
|
||||
dikt['created_at'] = int((setting.created_at - datetime.datetime.utcfromtimestamp(0)).total_seconds())
|
||||
return Transient(**dikt)
|
||||
|
||||
@classmethod
|
||||
def cleanup(self):
|
||||
for s in Setting.select().where(Setting.name.startswith('__transient_')):
|
||||
try:
|
||||
t = Transient.from_setting(s)
|
||||
except:
|
||||
continue
|
||||
|
||||
if t.is_expired():
|
||||
s.delete_instance()
|
||||
|
||||
@classmethod
|
||||
def get(self, name):
|
||||
setting_name = "__transient_%s" % (name)
|
||||
|
||||
try:
|
||||
the_setting = Setting.get(Setting.name == setting_name)
|
||||
t = Transient.from_setting(the_setting)
|
||||
except Setting.DoesNotExist as e:
|
||||
return False
|
||||
|
||||
if t.is_expired():
|
||||
the_setting.delete_instance()
|
||||
return False
|
||||
else:
|
||||
return t.value
|
||||
|
||||
@classmethod
|
||||
def set(self, name, value, timeout):
|
||||
setting_name = "__transient_%s" % (name)
|
||||
setting_dikt = {
|
||||
'value': simplejson.dumps({
|
||||
'value': value,
|
||||
'timeout': timeout,
|
||||
}),
|
||||
}
|
||||
setting, created = Setting.get_or_create(name=setting_name, defaults=setting_dikt)
|
||||
return setting
|
||||
|
||||
@classmethod
|
||||
def delete(self, name):
|
||||
setting_name = "__transient_%s" % (name)
|
||||
try:
|
||||
s = Setting.get(Setting.name == setting_name)
|
||||
except Setting.DoesNotExist as e:
|
||||
return False
|
||||
return s.delete_instance()
|
||||
|
||||
# === /models ===
|
||||
|
||||
|
||||
def load_db_seeds():
|
||||
rows_created = 0
|
||||
|
||||
for name in ['funding', 'valid', 'delete']:
|
||||
(obj, created) = Signal.get_or_create(name=name)
|
||||
if created:
|
||||
rows_created = rows_created + 1
|
||||
|
||||
for name in ['yes', 'no', 'abstain']:
|
||||
(obj, created) = Outcome.get_or_create(name=name)
|
||||
if created:
|
||||
rows_created = rows_created + 1
|
||||
|
||||
return rows_created
|
||||
|
||||
|
||||
def db_models():
|
||||
""" Return a list of Sentinel DB models. """
|
||||
models = [
|
||||
GovernanceObject,
|
||||
Setting,
|
||||
Proposal,
|
||||
Superblock,
|
||||
Signal,
|
||||
Outcome,
|
||||
Vote
|
||||
]
|
||||
return models
|
||||
|
||||
|
||||
def check_db_sane():
|
||||
""" Ensure DB tables exist, create them if they don't. """
|
||||
check_db_schema_version()
|
||||
|
||||
missing_table_models = []
|
||||
|
||||
for model in db_models():
|
||||
if not getattr(model, 'table_exists')():
|
||||
missing_table_models.append(model)
|
||||
printdbg("[warning]: Table for %s (%s) doesn't exist in DB." % (model, model._meta.db_table))
|
||||
|
||||
if missing_table_models:
|
||||
printdbg("[warning]: Missing database tables. Auto-creating tables.")
|
||||
try:
|
||||
db.create_tables(missing_table_models, safe=True)
|
||||
except (peewee.InternalError, peewee.OperationalError, peewee.ProgrammingError) as e:
|
||||
print("[error] Could not create tables: %s" % e)
|
||||
|
||||
update_schema_version()
|
||||
purge_invalid_amounts()
|
||||
|
||||
|
||||
def check_db_schema_version():
|
||||
""" Ensure DB schema is correct version. Drop tables if not. """
|
||||
db_schema_version = None
|
||||
|
||||
try:
|
||||
db_schema_version = Setting.get(Setting.name == 'DB_SCHEMA_VERSION').value
|
||||
except (peewee.OperationalError, peewee.DoesNotExist, peewee.ProgrammingError) as e:
|
||||
printdbg("[info]: Can't get DB_SCHEMA_VERSION...")
|
||||
|
||||
printdbg("[info]: SCHEMA_VERSION (code) = [%s]" % SCHEMA_VERSION)
|
||||
printdbg("[info]: DB_SCHEMA_VERSION = [%s]" % db_schema_version)
|
||||
if (SCHEMA_VERSION != db_schema_version):
|
||||
printdbg("[info]: Schema version mis-match. Syncing tables.")
|
||||
try:
|
||||
existing_table_names = db.get_tables()
|
||||
existing_models = [m for m in db_models() if m._meta.db_table in existing_table_names]
|
||||
if (existing_models):
|
||||
printdbg("[info]: Dropping tables...")
|
||||
db.drop_tables(existing_models, safe=False, cascade=False)
|
||||
except (peewee.InternalError, peewee.OperationalError, peewee.ProgrammingError) as e:
|
||||
print("[error] Could not drop tables: %s" % e)
|
||||
|
||||
|
||||
def update_schema_version():
|
||||
schema_version_setting, created = Setting.get_or_create(name='DB_SCHEMA_VERSION', defaults={'value': SCHEMA_VERSION})
|
||||
if (schema_version_setting.value != SCHEMA_VERSION):
|
||||
schema_version_setting.save()
|
||||
return
|
||||
|
||||
|
||||
def purge_invalid_amounts():
|
||||
result_set = Proposal.select(
|
||||
Proposal.id,
|
||||
Proposal.governance_object
|
||||
).where(Proposal.payment_amount.contains(','))
|
||||
|
||||
for proposal in result_set:
|
||||
gobject = GovernanceObject.get(
|
||||
GovernanceObject.id == proposal.governance_object_id
|
||||
)
|
||||
printdbg("[info]: Pruning governance object w/invalid amount: %s" % gobject.object_hash)
|
||||
gobject.delete_instance(recursive=True, delete_nullable=True)
|
||||
|
||||
|
||||
# sanity checks...
|
||||
check_db_sane() # ensure tables exist
|
||||
load_db_seeds() # ensure seed data loaded
|
||||
|
||||
# convenience accessors
|
||||
VoteSignals = misc.Bunch(**{sig.name: sig for sig in Signal.select()})
|
||||
VoteOutcomes = misc.Bunch(**{out.name: out for out in Outcome.select()})
|
||||
@ -0,0 +1,50 @@
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib')))
|
||||
import init
|
||||
import misc
|
||||
from models import Transient
|
||||
from misc import printdbg
|
||||
import time
|
||||
import random
|
||||
|
||||
|
||||
class Scheduler(object):
|
||||
transient_key_scheduled = 'NEXT_SENTINEL_CHECK_AT'
|
||||
random_interval_max = 1200
|
||||
|
||||
@classmethod
|
||||
def is_run_time(self):
|
||||
next_run_time = Transient.get(self.transient_key_scheduled) or 0
|
||||
now = misc.now()
|
||||
|
||||
printdbg("current_time = %d" % now)
|
||||
printdbg("next_run_time = %d" % next_run_time)
|
||||
|
||||
return now >= next_run_time
|
||||
|
||||
@classmethod
|
||||
def clear_schedule(self):
|
||||
Transient.delete(self.transient_key_scheduled)
|
||||
|
||||
@classmethod
|
||||
def schedule_next_run(self, random_interval=None):
|
||||
if not random_interval:
|
||||
random_interval = self.random_interval_max
|
||||
|
||||
next_run_at = misc.now() + random.randint(1, random_interval)
|
||||
printdbg("scheduling next sentinel run for %d" % next_run_at)
|
||||
Transient.set(self.transient_key_scheduled, next_run_at,
|
||||
next_run_at)
|
||||
|
||||
@classmethod
|
||||
def delay(self, delay_in_seconds=None):
|
||||
if not delay_in_seconds:
|
||||
delay_in_seconds = random.randint(0, 60)
|
||||
|
||||
# do not delay longer than 60 seconds
|
||||
# in case an int > 60 given as argument
|
||||
delay_in_seconds = delay_in_seconds % 60
|
||||
|
||||
printdbg("Delay of [%d] seconds for cron minute offset" % delay_in_seconds)
|
||||
time.sleep(delay_in_seconds)
|
||||
@ -0,0 +1,46 @@
|
||||
import sys
|
||||
import os
|
||||
import io
|
||||
import re
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
|
||||
from misc import printdbg
|
||||
from dash_config import DashConfig
|
||||
|
||||
|
||||
class SibcoinConfig(DashConfig):
|
||||
|
||||
@classmethod
|
||||
def get_rpc_creds(self, data, network='mainnet'):
|
||||
# get rpc info from dash.conf
|
||||
match = re.findall(r'rpc(user|password|port)=(.*?)$', data, re.MULTILINE)
|
||||
|
||||
# python >= 2.7
|
||||
creds = {key: value for (key, value) in match}
|
||||
|
||||
# standard Dash defaults...
|
||||
default_port = 1944 if (network == 'mainnet') else 11944
|
||||
|
||||
# use default port for network if not specified in dash.conf
|
||||
if not ('port' in creds):
|
||||
creds[u'port'] = default_port
|
||||
|
||||
# convert to an int if taken from dash.conf
|
||||
creds[u'port'] = int(creds[u'port'])
|
||||
|
||||
# return a dictionary with RPC credential key, value pairs
|
||||
return creds
|
||||
|
||||
@classmethod
|
||||
def tokenize(self, filename, throw_exception=False):
|
||||
tokens = {}
|
||||
try:
|
||||
data = self.slurp_config_file(filename)
|
||||
match = re.findall(r'(.*?)=(.*?)$', data, re.MULTILINE)
|
||||
tokens = {key: value for (key, value) in match}
|
||||
except IOError as e:
|
||||
printdbg("[warning] error reading config file: %s" % e)
|
||||
if throw_exception:
|
||||
raise e
|
||||
|
||||
return tokens
|
||||
@ -0,0 +1,33 @@
|
||||
"""
|
||||
dashd JSONRPC interface
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
|
||||
import config
|
||||
import base58
|
||||
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
||||
from masternode import Masternode
|
||||
from decimal import Decimal
|
||||
import time
|
||||
from dashd import DashDaemon
|
||||
|
||||
|
||||
class SibcoinDaemon(DashDaemon):
|
||||
|
||||
@classmethod
|
||||
def from_sibcoin_conf(self, sibcoin_dot_conf):
|
||||
from sib_config import SibcoinConfig
|
||||
config_text = SibcoinConfig.slurp_config_file(sibcoin_dot_conf)
|
||||
creds = SibcoinConfig.get_rpc_creds(config_text, config.network)
|
||||
|
||||
creds[u'host'] = config.rpc_host
|
||||
|
||||
return self(**creds)
|
||||
|
||||
@classmethod
|
||||
def from_dash_conf(self, dash_dot_conf):
|
||||
raise RuntimeWarning('This method should not be used with sibcoin')
|
||||
|
||||
|
||||
@ -0,0 +1,6 @@
|
||||
peewee==2.8.3
|
||||
py==1.4.31
|
||||
pycodestyle==2.4.0
|
||||
pytest==3.0.1
|
||||
python-bitcoinrpc==1.0
|
||||
simplejson==3.8.2
|
||||
@ -0,0 +1,11 @@
|
||||
# specify path to sibcoin.conf or leave blank
|
||||
# default is the same as Sibcoin
|
||||
sibcoin_conf=/root/.sibcoin/sibcoin.conf
|
||||
|
||||
# valid options are mainnet, testnet (default=mainnet)
|
||||
network=mainnet
|
||||
#network=testnet
|
||||
|
||||
# database connection details
|
||||
db_name=database/sentinel.db
|
||||
db_driver=sqlite
|
||||
@ -0,0 +1,16 @@
|
||||
# basic settings
|
||||
txindex=1
|
||||
testnet=1 # TESTNET
|
||||
logtimestamps=1
|
||||
|
||||
# optional indices
|
||||
txindex=1
|
||||
addressindex=1
|
||||
timestampindex=1
|
||||
spentindex=1
|
||||
|
||||
# JSONRPC
|
||||
server=1
|
||||
rpcuser=dashrpc
|
||||
rpcpassword=abcdefghijklmnopqrstuvwxyz0123456789ABCDEF10
|
||||
rpcallowip=127.0.0.1
|
||||
@ -0,0 +1,2 @@
|
||||
# run Dash-Sentinel every minute
|
||||
* * * * * cd /home/YOURUSERNAME/sentinel && ./venv/bin/python bin/sentinel.py >/dev/null 2>&1
|
||||
@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
set -evx
|
||||
|
||||
mkdir ~/.dashcore
|
||||
|
||||
# safety check
|
||||
if [ ! -f ~/.dashcore/.dash.conf ]; then
|
||||
cp share/dash.conf.example ~/.dashcore/dash.conf
|
||||
fi
|
||||
@ -0,0 +1,51 @@
|
||||
import pytest
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
os.environ['SENTINEL_ENV'] = 'test'
|
||||
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../test_sentinel.conf'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'lib'))
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||
import config
|
||||
|
||||
from sibcoind import SibcoinDaemon
|
||||
from sib_config import SibcoinConfig
|
||||
|
||||
|
||||
def test_dashd():
|
||||
config_text = SibcoinConfig.slurp_config_file(config.sibcoin_conf)
|
||||
network = 'mainnet'
|
||||
is_testnet = False
|
||||
genesis_hash = u'00000c492bf73490420868bc577680bfc4c60116e7e85343bc624787c21efa4c'
|
||||
for line in config_text.split("\n"):
|
||||
if line.startswith('testnet=1'):
|
||||
network = 'testnet'
|
||||
is_testnet = True
|
||||
genesis_hash = u'00000617791d0e19f524387f67e558b2a928b670b9a3b387ae003ad7f9093017'
|
||||
|
||||
creds = SibcoinConfig.get_rpc_creds(config_text, network)
|
||||
sibcoind = SibcoinDaemon(**creds)
|
||||
assert sibcoind.rpc_command is not None
|
||||
|
||||
assert hasattr(sibcoind, 'rpc_connection')
|
||||
|
||||
# Dash testnet block 0 hash == 00000617791d0e19f524387f67e558b2a928b670b9a3b387ae003ad7f9093017
|
||||
# test commands without arguments
|
||||
info = sibcoind.rpc_command('getinfo')
|
||||
info_keys = [
|
||||
'blocks',
|
||||
'connections',
|
||||
'difficulty',
|
||||
'errors',
|
||||
'protocolversion',
|
||||
'proxy',
|
||||
'testnet',
|
||||
'timeoffset',
|
||||
'version',
|
||||
]
|
||||
for key in info_keys:
|
||||
assert key in info
|
||||
assert info['testnet'] is is_testnet
|
||||
|
||||
# test commands with args
|
||||
assert sibcoind.rpc_command('getblockhash', 0) == genesis_hash
|
||||
@ -0,0 +1,3 @@
|
||||
network=testnet
|
||||
db_name=database/sentinel.db
|
||||
db_driver=sqlite
|
||||
@ -0,0 +1,315 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
os.environ['SENTINEL_ENV'] = 'test'
|
||||
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../../test_sentinel.conf'))
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../../lib')))
|
||||
import misc
|
||||
import config
|
||||
from models import GovernanceObject, Proposal, Vote
|
||||
|
||||
|
||||
# clear DB tables before each execution
|
||||
def setup():
|
||||
# clear tables first
|
||||
Vote.delete().execute()
|
||||
Proposal.delete().execute()
|
||||
GovernanceObject.delete().execute()
|
||||
|
||||
|
||||
def teardown():
|
||||
pass
|
||||
|
||||
|
||||
# list of proposal govobjs to import for testing
|
||||
@pytest.fixture
|
||||
def go_list_proposals():
|
||||
items = [
|
||||
{u'AbsoluteYesCount': 1000,
|
||||
u'AbstainCount': 7,
|
||||
u'CollateralHash': u'996eae8ba8dbe5152ccb302ba513cf59b79fa95a7899fe34519804e4a4e6c94e',
|
||||
u'DataHex': u'5b5b2270726f706f73616c222c7b22656e645f65706f6368223a2232313232353230343030222c226e616d65223a2274657374222c227061796d656e745f61646472657373223a22736352613467356154697a6f724c31484a7179326431796f4d6f6e456f677239675a222c227061796d656e745f616d6f756e74223a223235222c2273746172745f65706f6368223a2231343930313833313830222c2274797065223a312c2275726c223a2268747470733a2f2f736962636f696e2e6f7267227d5d5d',
|
||||
u'DataString': u'[["proposal",{"end_epoch":"2122520400","name":"test","payment_address":"scRa4g5aTizorL1HJqy2d1yoMonEogr9gZ","payment_amount":"25","start_epoch":"1490183180","type":1,"url":"https://sibcoin.org"}]]',
|
||||
u'Hash': u'7e38a64c2e5275b978e0075be2d87765b91f1bab75285de6818c00fb009465be',
|
||||
u'IsValidReason': u'',
|
||||
u'NoCount': 25,
|
||||
u'YesCount': 1025,
|
||||
u'fBlockchainValidity': True,
|
||||
u'fCachedDelete': False,
|
||||
u'fCachedEndorsed': False,
|
||||
u'fCachedFunding': True,
|
||||
u'fCachedValid': True},
|
||||
{u'AbsoluteYesCount': 11,
|
||||
u'AbstainCount': 0,
|
||||
u'CollateralHash': u'0542fe1a708ebc5857a1a86c9c394792e89302df070f604c5a90e2d6dcddf6b2',
|
||||
u'DataHex': u'5b5b2270726f706f73616c222c7b22656e645f65706f6368223a2232313232353230343030222c226e616d65223a22746573745f32222c227061796d656e745f61646472657373223a22734e74335a7a686963513277713847545334577644475745513466554e766132636f222c227061796d656e745f616d6f756e74223a2235222c2273746172745f65706f6368223a2231343930333131303237222c2274797065223a312c2275726c223a2268747470733a2f2f736962636f696e2e6f72672f7465737432227d5d5d',
|
||||
u'DataString': u'[["proposal",{"end_epoch":"2122520400","name":"test_2","payment_address":"sNt3ZzhicQ2wq8GTS4WvDGWEQ4fUNva2co","payment_amount":"5","start_epoch":"1490311027","type":1,"url":"https://sibcoin.org/test2"}]]',
|
||||
u'Hash': u'62319ca4478962bfd6601095b29bae00cab0ad4d037f6eee55d1ccfae7d637eb',
|
||||
u'IsValidReason': u'',
|
||||
u'NoCount': 0,
|
||||
u'YesCount': 11,
|
||||
u'fBlockchainValidity': True,
|
||||
u'fCachedDelete': False,
|
||||
u'fCachedEndorsed': False,
|
||||
u'fCachedFunding': True,
|
||||
u'fCachedValid': True},
|
||||
]
|
||||
|
||||
return items
|
||||
|
||||
|
||||
# Proposal
|
||||
@pytest.fixture
|
||||
def proposal():
|
||||
# NOTE: no governance_object_id is set
|
||||
pobj = Proposal(
|
||||
start_epoch=1483250400, # 2017-01-01
|
||||
end_epoch=2122520400,
|
||||
name="wine-n-cheeze-party",
|
||||
url="https://sibcoin.net/wine-n-cheeze-party",
|
||||
payment_address="sYNpoRsQDBN8qYFxeifN2XHazF58e14BbQ",
|
||||
payment_amount=13
|
||||
)
|
||||
|
||||
# NOTE: this object is (intentionally) not saved yet.
|
||||
# We want to return an built, but unsaved, object
|
||||
return pobj
|
||||
|
||||
|
||||
def test_proposal_is_valid(proposal):
|
||||
from sibcoind import SibcoinDaemon
|
||||
import dashlib
|
||||
dashd = SibcoinDaemon.from_sibcoin_conf(config.sibcoin_conf)
|
||||
|
||||
orig = Proposal(**proposal.get_dict()) # make a copy
|
||||
|
||||
# fixture as-is should be valid
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
# ============================================================
|
||||
# ensure end_date not greater than start_date
|
||||
# ============================================================
|
||||
proposal.end_epoch = proposal.start_epoch
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.end_epoch = proposal.start_epoch - 1
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.end_epoch = proposal.start_epoch + 0
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.end_epoch = proposal.start_epoch + 1
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
# reset
|
||||
proposal = Proposal(**orig.get_dict())
|
||||
|
||||
# ============================================================
|
||||
# ensure valid proposal name
|
||||
# ============================================================
|
||||
|
||||
proposal.name = ' heya!@209h '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.name = "anything' OR 'x'='x"
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.name = ' '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.name = ''
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.name = '0'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.name = 'R66-Y'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.name = 'valid-name'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.name = ' mostly-valid-name'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.name = 'also-mostly-valid-name '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.name = ' similarly-kinda-valid-name '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.name = 'dean miller 5493'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.name = 'dean-millerà-5493'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.name = 'dean-миллер-5493'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
# binary gibberish
|
||||
proposal.name = dashlib.deserialise('22385c7530303933375c75303363375c75303232395c75303138635c75303064335c75303163345c75303264385c75303236615c75303134625c75303163335c75303063335c75303362385c75303266615c75303261355c75303266652f2b5c75303065395c75303164655c75303136655c75303338645c75303062385c75303138635c75303064625c75303064315c75303038325c75303133325c753032333222')
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
# reset
|
||||
proposal = Proposal(**orig.get_dict())
|
||||
|
||||
# ============================================================
|
||||
# ensure valid payment address
|
||||
# ============================================================
|
||||
proposal.payment_address = '7'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.payment_address = 'YYE8KWYAUU5YSWSYMB3Q3RYX8XTUU9Y7UI'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.payment_address = 'seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyc'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.payment_address = '221 B Baker St., London, United Kingdom'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
# this is actually the Dash foundation multisig address...
|
||||
proposal.payment_address = '7gnwGHt17heGpG9Crfeh4KGpYNFugPhJdh'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.payment_address = 'seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyb'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.payment_address = ' yYe8KwyaUu5YswSYmB3q3ryx8XTUu9y7Ui'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.payment_address = 'yYe8KwyaUu5YswSYmB3q3ryx8XTUu9y7Ui '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.payment_address = ' yYe8KwyaUu5YswSYmB3q3ryx8XTUu9y7Ui '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
# reset
|
||||
proposal = Proposal(**orig.get_dict())
|
||||
|
||||
# validate URL
|
||||
proposal.url = ' '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.url = ' '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.url = 'http://bit.ly/1e1EYJv'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = ' http://bit.ly/1e1EYJv'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.url = 'http://bit.ly/1e1EYJv '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.url = ' http://bit.ly/1e1EYJv '
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.url = 'http://::12.34.56.78]/'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.url = 'http://[::1/foo/bad]/bad'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.url = 'http://dashcentral.org/dean-miller 5493'
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
proposal.url = 'http://dashcentralisé.org/dean-miller-5493'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = 'http://dashcentralisé.org/dean-миллер-5493'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = 'https://example.com/resource.ext?param=1&other=2'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = 'www.com'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = 'v.ht/'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = 'ipfs:///ipfs/QmPwwoytFU3gZYk5tSppumxaGbHymMUgHsSvrBdQH69XRx/'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = '/ipfs/QmPwwoytFU3gZYk5tSppumxaGbHymMUgHsSvrBdQH69XRx/'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = 's3://bucket/thing/anotherthing/file.pdf'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = 'http://zqktlwi4fecvo6ri.onion/wiki/index.php/Main_Page'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
proposal.url = 'ftp://ftp.funet.fi/pub/standards/RFC/rfc959.txt'
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
# gibberish URL
|
||||
proposal.url = dashlib.deserialise('22687474703a2f2f5c75303330385c75303065665c75303362345c75303362315c75303266645c75303331345c625c75303134655c75303031615c75303139655c75303133365c75303264315c75303238655c75303364395c75303230665c75303363355c75303030345c75303336665c75303238355c75303165375c75303063635c75303139305c75303262615c75303239316a5c75303130375c75303362365c7530306562645c75303133335c75303335665c7530326562715c75303038655c75303332645c75303362645c75303064665c75303135654f365c75303237335c75303363645c7530333539275c75303165345c75303339615c75303365385c75303334345c75303130615c75303265662e5c75303231625c75303164356a5c75303232345c75303163645c75303336365c75303064625c75303339665c75303230305c75303337615c75303138395c75303263325c75303038345c75303066615c75303031335c75303233655c75303135345c75303165395c75303139635c75303239375c75303039355c75303038345c75303362305c7530306233435c75303135345c75303063665c75303163345c75303261335c75303362655c75303136305c75303139365c75303263665c75303131305c7530313031475c75303162645c75303338645c75303363325c75303138625c75303235625c75303266325c75303264635c75303139335c75303066665c75303066645c75303133625c75303234305c75303137615c75303062355c75303031645c75303238655c75303166315c75303232315c75303161615c75303265325c75303335625c75303333665c75303239345c75303335315c75303038345c75303339395c75303262385c75303132375c75303330357a5c75303263625c75303066305c75303062355c75303164335c75303338385c75303364385c75303130625c75303266325c75303137305c75303335315c75303030305c75303136385c75303039646d5c75303331315c75303236615c75303330375c75303332635c75303361635c665c75303363335c75303264365c75303238645c75303136395c7530323438635c75303163385c75303261355c75303164615c75303165375c75303337355c75303332645c7530333165755c75303131665c75303338375c75303135325c75303065325c75303135326c5c75303164325c75303164615c75303136645c75303061665c75303333375c75303264375c75303339375c75303139395c75303134635c75303165385c75303234315c75303336635c75303130645c75303230635c75303161615c75303339355c75303133315c75303064615c75303165615c75303336645c75303064325c75303337365c75303363315c75303132645c75303266305c75303064364f255c75303263635c75303162645c75303062385c75303238365c75303136395c75303337335c75303232335c75303336655c75303037665c75303062616b5c75303132365c75303233305c75303330645c75303362385c75303164355c75303166615c75303338395c75303062635c75303135325c75303334365c75303139645c75303135615c75303031395c75303061385c75303133615c75303338635c75303339625c75303261655c75303065395c75303362635c75303166385c75303031665c75303230615c75303263355c75303134335c75303361635c75303334355c75303236645c75303139365c75303362665c75303135615c75303137305c75303165395c75303231395c75303332665c75303232645c75303030365c75303066305c75303134665c75303337375c75303234325d5c75303164325c75303337655c75303265665c75303331395c75303261355c75303265385c75303338395c75303235645c75303334315c75303338395c7530323230585c75303062645c75303166365c75303238645c75303231375c75303066665c75303130385c75303331305c75303330335c75303031395c75303039635c75303363315c75303039615c75303334355c75303331305c75303162335c75303263315c75303132395c75303234335c75303038627c5c75303361335c75303261635c75303165655c75303030305c75303237615c75303038385c75303066355c75303232375c75303236635c75303236355c7530336336205c75303038615c7530333561787c735c75303336305c75303362655c75303235385c75303334345c75303264365c75303262355c75303361315c75303135345c75303131625c75303061625c75303038615c75303332655c75303238325c75303031393d5c75303263335c75303332655c75303163645c75303139305c75303231305c75303131365c75303334305c75303234665c75303162635c75303333645c75303135305c75303132335c75303233645c75303133345c75303062327a5c75303331635c75303136312a5c753032316522')
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
# reset
|
||||
proposal = Proposal(**orig.get_dict())
|
||||
|
||||
# ============================================================
|
||||
# ensure proposal can't request negative dash
|
||||
# ============================================================
|
||||
proposal.payment_amount = -1
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
|
||||
def test_proposal_is_expired(proposal):
|
||||
cycle = 24 # testnet
|
||||
now = misc.now()
|
||||
|
||||
proposal.start_epoch = now - (86400 * 2) # two days ago
|
||||
proposal.end_epoch = now - (60 * 60) # expired one hour ago
|
||||
assert proposal.is_expired(superblockcycle=cycle) is False
|
||||
|
||||
# fudge factor + a 24-block cycle == an expiry window of 9086, so...
|
||||
proposal.end_epoch = now - 9085
|
||||
assert proposal.is_expired(superblockcycle=cycle) is False
|
||||
|
||||
proposal.end_epoch = now - 9087
|
||||
assert proposal.is_expired(superblockcycle=cycle) is True
|
||||
|
||||
|
||||
# deterministic ordering
|
||||
def test_approved_and_ranked(go_list_proposals):
|
||||
from sibcoind import SibcoinDaemon
|
||||
sibcoind = SibcoinDaemon.from_sibcoin_conf(config.sibcoin_conf)
|
||||
|
||||
for item in go_list_proposals:
|
||||
(go, subobj) = GovernanceObject.import_gobject_from_dashd(sibcoind, item)
|
||||
|
||||
prop_list = Proposal.approved_and_ranked(proposal_quorum=1, next_superblock_max_budget=60)
|
||||
|
||||
assert prop_list[0].object_hash == u'7e38a64c2e5275b978e0075be2d87765b91f1bab75285de6818c00fb009465be'
|
||||
assert prop_list[1].object_hash == u'62319ca4478962bfd6601095b29bae00cab0ad4d037f6eee55d1ccfae7d637eb'
|
||||
|
||||
|
||||
def test_proposal_size(proposal):
|
||||
orig = Proposal(**proposal.get_dict()) # make a copy
|
||||
|
||||
proposal.url = 'https://testurl.com/'
|
||||
proposal_length_bytes = len(proposal.serialise()) // 2
|
||||
|
||||
# how much space is available in the Proposal
|
||||
extra_bytes = (Proposal.MAX_DATA_SIZE - proposal_length_bytes)
|
||||
|
||||
# fill URL field with max remaining space
|
||||
proposal.url = proposal.url + ('x' * extra_bytes)
|
||||
|
||||
# ensure this is the max proposal size and is valid
|
||||
assert (len(proposal.serialise()) // 2) == Proposal.MAX_DATA_SIZE
|
||||
assert proposal.is_valid() is True
|
||||
|
||||
# add one more character to URL, Proposal should now be invalid
|
||||
proposal.url = proposal.url + 'x'
|
||||
assert (len(proposal.serialise()) // 2) == (Proposal.MAX_DATA_SIZE + 1)
|
||||
assert proposal.is_valid() is False
|
||||
|
||||
@ -0,0 +1,257 @@
|
||||
import pytest
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
os.environ['SENTINEL_ENV'] = 'test'
|
||||
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../../test_sentinel.conf'))
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../../lib')))
|
||||
import misc
|
||||
import config
|
||||
from models import GovernanceObject, Proposal, Superblock, Vote
|
||||
|
||||
|
||||
# clear DB tables before each execution
|
||||
def setup():
|
||||
# clear tables first...
|
||||
Vote.delete().execute()
|
||||
Proposal.delete().execute()
|
||||
Superblock.delete().execute()
|
||||
GovernanceObject.delete().execute()
|
||||
|
||||
|
||||
def teardown():
|
||||
pass
|
||||
|
||||
|
||||
# list of proposal govobjs to import for testing
|
||||
@pytest.fixture
|
||||
def go_list_proposals():
|
||||
items = [
|
||||
{u'AbsoluteYesCount': 1000,
|
||||
u'AbstainCount': 7,
|
||||
u'CollateralHash': u'acb67ec3f3566c9b94a26b70b36c1f74a010a37c0950c22d683cc50da324fdca',
|
||||
u'DataHex': u'5b5b2270726f706f73616c222c207b22656e645f65706f6368223a20323132323532303430302c20226e616d65223a20227465737470726f706f73616c2d35343933222c20227061796d656e745f61646472657373223a20227365564e704835726b617538644b68756d694c46314259737070327666374c6b7962222c20227061796d656e745f616d6f756e74223a2032352e37352c202273746172745f65706f6368223a20313437343236313038362c202274797065223a20312c202275726c223a2022687474703a2f2f736962636f6e74726f6c2e6f72672f70726f706f73616c732f7465737470726f706f73616c2d35343933227d5d5d',
|
||||
u'DataString': u'[["proposal", {"end_epoch": 2122520400, "name": "testproposal-5493", "payment_address": "seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyb", "payment_amount": 25.75, "start_epoch": 1474261086, "type": 1, "url": "http://sibcontrol.org/proposals/testproposal-5493"}]]',
|
||||
u'Hash': u'dfd7d63979c0b62456b63d5fc5306dbec451180adee85876cbf5b28c69d1a86c',
|
||||
u'IsValidReason': u'',
|
||||
u'NoCount': 25,
|
||||
u'YesCount': 1025,
|
||||
u'fBlockchainValidity': True,
|
||||
u'fCachedDelete': False,
|
||||
u'fCachedEndorsed': False,
|
||||
u'fCachedFunding': False,
|
||||
u'fCachedValid': True},
|
||||
{u'AbsoluteYesCount': 1000,
|
||||
u'AbstainCount': 29,
|
||||
u'CollateralHash': u'3efd23283aa98c2c33f80e4d9ed6f277d195b72547b6491f43280380f6aac810',
|
||||
u'DataHex': u'5b5b2270726f706f73616c222c207b22656e645f65706f6368223a20323132323532303430302c20226e616d65223a20226665726e616e64657a2d37363235222c20227061796d656e745f61646472657373223a2022736674734a6564686d4c71594257506b627670716b7371737653397041624c614c53222c20227061796d656e745f616d6f756e74223a2033322e30312c202273746172745f65706f6368223a20313437343236313038362c202274797065223a20312c202275726c223a2022687474703a2f2f736962636f6e74726f6c2e6f72672f70726f706f73616c732f6665726e616e64657a2d37363235227d5d5d',
|
||||
u'DataString': u'[["proposal", {"end_epoch": 2122520400, "name": "fernandez-7625", "payment_address": "sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS", "payment_amount": 32.01, "start_epoch": 1474261086, "type": 1, "url": "http://sibcontrol.org/proposals/fernandez-7625"}]]',
|
||||
u'Hash': u'0523445762025b2e01a2cd34f1d10f4816cf26ee1796167e5b029901e5873630',
|
||||
u'IsValidReason': u'',
|
||||
u'NoCount': 56,
|
||||
u'YesCount': 1056,
|
||||
u'fBlockchainValidity': True,
|
||||
u'fCachedDelete': False,
|
||||
u'fCachedEndorsed': False,
|
||||
u'fCachedFunding': False,
|
||||
u'fCachedValid': True},
|
||||
]
|
||||
|
||||
return items
|
||||
|
||||
|
||||
# list of superblock govobjs to import for testing
|
||||
@pytest.fixture
|
||||
def go_list_superblocks():
|
||||
items = [
|
||||
{u'AbsoluteYesCount': 1,
|
||||
u'AbstainCount': 0,
|
||||
u'CollateralHash': u'0000000000000000000000000000000000000000000000000000000000000000',
|
||||
u'DataHex': u'5b5b2274726967676572222c207b226576656e745f626c6f636b5f686569676874223a2037323639362c20227061796d656e745f616464726573736573223a20227365564e704835726b617538644b68756d694c46314259737070327666374c6b79627c736674734a6564686d4c71594257506b627670716b7371737653397041624c614c53222c20227061796d656e745f616d6f756e7473223a202232352e37353030303030307c32352e3735303030303030222c202274797065223a20327d5d5d',
|
||||
u'DataString': u'[["trigger", {"event_block_height": 72696, "payment_addresses": "seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyb|sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS", "payment_amounts": "25.75000000|25.7575000000", "type": 2}]]',
|
||||
u'Hash': u'667c4a53eb81ba14d02860fdb4779e830eb8e98306f9145f3789d347cbeb0721',
|
||||
u'IsValidReason': u'',
|
||||
u'NoCount': 0,
|
||||
u'YesCount': 1,
|
||||
u'fBlockchainValidity': True,
|
||||
u'fCachedDelete': False,
|
||||
u'fCachedEndorsed': False,
|
||||
u'fCachedFunding': False,
|
||||
u'fCachedValid': True},
|
||||
{u'AbsoluteYesCount': 1,
|
||||
u'AbstainCount': 0,
|
||||
u'CollateralHash': u'0000000000000000000000000000000000000000000000000000000000000000',
|
||||
u'DataHex': u'5b5b2274726967676572222c207b226576656e745f626c6f636b5f686569676874223a2037323639362c20227061796d656e745f616464726573736573223a20227365564e704835726b617538644b68756d694c46314259737070327666374c6b79627c736674734a6564686d4c71594257506b627670716b7371737653397041624c614c53222c20227061796d656e745f616d6f756e7473223a202232352e37353030303030307c32352e3735303030303030222c202274797065223a20327d5d5d',
|
||||
u'DataString': u'[["trigger", {"event_block_height": 72696, "payment_addresses": "seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyb|sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS", "payment_amounts": "25.75000000|25.75000000", "type": 2}]]',
|
||||
u'Hash': u'8f91ffb105739ec7d5b6c0b12000210fcfcc0837d3bb8ca6333ba93ab5fc0bdf',
|
||||
u'IsValidReason': u'',
|
||||
u'NoCount': 0,
|
||||
u'YesCount': 1,
|
||||
u'fBlockchainValidity': True,
|
||||
u'fCachedDelete': False,
|
||||
u'fCachedEndorsed': False,
|
||||
u'fCachedFunding': False,
|
||||
u'fCachedValid': True},
|
||||
{u'AbsoluteYesCount': 1,
|
||||
u'AbstainCount': 0,
|
||||
u'CollateralHash': u'0000000000000000000000000000000000000000000000000000000000000000',
|
||||
u'DataHex': u'5b5b2274726967676572222c207b226576656e745f626c6f636b5f686569676874223a2037323639362c20227061796d656e745f616464726573736573223a20227365564e704835726b617538644b68756d694c46314259737070327666374c6b79627c736674734a6564686d4c71594257506b627670716b7371737653397041624c614c53222c20227061796d656e745f616d6f756e7473223a202232352e37353030303030307c32352e3735303030303030222c202274797065223a20327d5d5d',
|
||||
u'DataString': u'[["trigger", {"event_block_height": 72696, "payment_addresses": "seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyb|sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS", "payment_amounts": "25.75000000|25.75000000", "type": 2}]]',
|
||||
u'Hash': u'bc2834f357da7504138566727c838e6ada74d079e63b6104701f4f8eb05dae36',
|
||||
u'IsValidReason': u'',
|
||||
u'NoCount': 0,
|
||||
u'YesCount': 1,
|
||||
u'fBlockchainValidity': True,
|
||||
u'fCachedDelete': False,
|
||||
u'fCachedEndorsed': False,
|
||||
u'fCachedFunding': False,
|
||||
u'fCachedValid': True},
|
||||
]
|
||||
|
||||
return items
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def superblock():
|
||||
sb = Superblock(
|
||||
event_block_height=62500,
|
||||
payment_addresses='seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyb|sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS',
|
||||
payment_amounts='5|3',
|
||||
proposal_hashes='e8a0057914a2e1964ae8a945c4723491caae2077a90a00a2aabee22b40081a87|d1ce73527d7cd6f2218f8ca893990bc7d5c6b9334791ce7973bfa22f155f826e',
|
||||
)
|
||||
return sb
|
||||
|
||||
|
||||
def test_superblock_is_valid(superblock):
|
||||
from sibcoind import SibcoinDaemon
|
||||
sibcoind = SibcoinDaemon.from_sibcoin_conf(config.sibcoin_conf)
|
||||
|
||||
orig = Superblock(**superblock.get_dict()) # make a copy
|
||||
|
||||
# original as-is should be valid
|
||||
assert orig.is_valid() is True
|
||||
|
||||
# mess with payment amounts
|
||||
superblock.payment_amounts = '7|yyzx'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
superblock.payment_amounts = '7,|yzx'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
superblock.payment_amounts = '7|8'
|
||||
assert superblock.is_valid() is True
|
||||
|
||||
superblock.payment_amounts = ' 7|8'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
superblock.payment_amounts = '7|8 '
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
superblock.payment_amounts = ' 7|8 '
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
# reset
|
||||
superblock = Superblock(**orig.get_dict())
|
||||
assert superblock.is_valid() is True
|
||||
|
||||
# mess with payment addresses
|
||||
superblock.payment_addresses = 'yTC62huR4YQEPn9AJHjnQxxreHSbgAoatV|1234 Anywhere ST, Chicago, USA'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
# leading spaces in payment addresses
|
||||
superblock.payment_addresses = ' yTC62huR4YQEPn9AJHjnQxxreHSbgAoatV'
|
||||
superblock.payment_amounts = '5.00'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
# trailing spaces in payment addresses
|
||||
superblock.payment_addresses = 'yTC62huR4YQEPn9AJHjnQxxreHSbgAoatV '
|
||||
superblock.payment_amounts = '5.00'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
# leading & trailing spaces in payment addresses
|
||||
superblock.payment_addresses = ' yTC62huR4YQEPn9AJHjnQxxreHSbgAoatV '
|
||||
superblock.payment_amounts = '5.00'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
# single payment addr/amt is ok
|
||||
superblock.payment_addresses = 'sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS'
|
||||
superblock.payment_amounts = '5.00'
|
||||
assert superblock.is_valid() is True
|
||||
|
||||
# ensure number of payment addresses matches number of payments
|
||||
superblock.payment_addresses = 'sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS'
|
||||
superblock.payment_amounts = '37.00|23.24'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
superblock.payment_addresses = 'seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyb|sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS'
|
||||
superblock.payment_amounts = '37.00'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
# ensure amounts greater than zero
|
||||
superblock.payment_addresses = 'sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS'
|
||||
superblock.payment_amounts = '-37.00'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
# reset
|
||||
superblock = Superblock(**orig.get_dict())
|
||||
assert superblock.is_valid() is True
|
||||
|
||||
# mess with proposal hashes
|
||||
superblock.proposal_hashes = '7|yyzx'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
superblock.proposal_hashes = '7,|yyzx'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
superblock.proposal_hashes = '0|1'
|
||||
assert superblock.is_valid() is False
|
||||
|
||||
superblock.proposal_hashes = '0000000000000000000000000000000000000000000000000000000000000000|1111111111111111111111111111111111111111111111111111111111111111'
|
||||
assert superblock.is_valid() is True
|
||||
|
||||
# reset
|
||||
superblock = Superblock(**orig.get_dict())
|
||||
assert superblock.is_valid() is True
|
||||
|
||||
|
||||
def test_serialisable_fields():
|
||||
s1 = ['event_block_height', 'payment_addresses', 'payment_amounts', 'proposal_hashes']
|
||||
s2 = Superblock.serialisable_fields()
|
||||
|
||||
s1.sort()
|
||||
s2.sort()
|
||||
|
||||
assert s2 == s1
|
||||
|
||||
|
||||
def test_deterministic_superblock_creation(go_list_proposals):
|
||||
import dashlib
|
||||
import misc
|
||||
from sibcoind import SibcoinDaemon
|
||||
sibcoind = SibcoinDaemon.from_sibcoin_conf(config.sibcoin_conf)
|
||||
for item in go_list_proposals:
|
||||
(go, subobj) = GovernanceObject.import_gobject_from_dashd(sibcoind, item)
|
||||
|
||||
max_budget = 60
|
||||
prop_list = Proposal.approved_and_ranked(proposal_quorum=1, next_superblock_max_budget=max_budget)
|
||||
|
||||
sb = dashlib.create_superblock(prop_list, 72000, max_budget, misc.now())
|
||||
|
||||
assert sb.event_block_height == 72000
|
||||
assert sb.payment_addresses == 'seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyb|sftsJedhmLqYBWPkbvpqksqsvS9pAbLaLS'
|
||||
assert sb.payment_amounts == '25.75000000|32.01000000'
|
||||
assert sb.proposal_hashes == 'dfd7d63979c0b62456b63d5fc5306dbec451180adee85876cbf5b28c69d1a86c|0523445762025b2e01a2cd34f1d10f4816cf26ee1796167e5b029901e5873630'
|
||||
|
||||
assert sb.hex_hash() == 'f8cabf11ddc5479a9440868064b85bc7c726d267fc942b324e940e02949618c7'
|
||||
|
||||
|
||||
def test_deterministic_superblock_selection(go_list_superblocks):
|
||||
from sibcoind import SibcoinDaemon
|
||||
sibcoind = SibcoinDaemon.from_sibcoin_conf(config.sibcoin_conf)
|
||||
|
||||
for item in go_list_superblocks:
|
||||
(go, subobj) = GovernanceObject.import_gobject_from_dashd(sibcoind, item)
|
||||
|
||||
# highest hash wins if same -- so just order by hash
|
||||
sb = Superblock.find_highest_deterministic('366d15f2075cf8dc29301ec862d0343f79976a804ef76ef61adf50f818228413')
|
||||
assert sb.object_hash == 'bc2834f357da7504138566727c838e6ada74d079e63b6104701f4f8eb05dae36'
|
||||
@ -0,0 +1,85 @@
|
||||
import pytest
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../test_sentinel.conf'))
|
||||
os.environ['SENTINEL_ENV'] = 'test'
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../lib')))
|
||||
import config
|
||||
#from dash_config import DashConfig
|
||||
from sib_config import SibcoinConfig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dash_conf(**kwargs):
|
||||
defaults = {
|
||||
'rpcuser': 'dashrpc',
|
||||
'rpcpassword': 'EwJeV3fZTyTVozdECF627BkBMnNDwQaVLakG3A4wXYyk',
|
||||
'rpcport': 29241,
|
||||
}
|
||||
|
||||
# merge kwargs into defaults
|
||||
for (key, value) in kwargs.items():
|
||||
defaults[key] = value
|
||||
|
||||
conf = """# basic settings
|
||||
testnet=1 # TESTNET
|
||||
server=1
|
||||
rpcuser={rpcuser}
|
||||
rpcpassword={rpcpassword}
|
||||
rpcallowip=127.0.0.1
|
||||
rpcport={rpcport}
|
||||
""".format(**defaults)
|
||||
|
||||
return conf
|
||||
|
||||
|
||||
def test_get_rpc_creds():
|
||||
dash_config = dash_conf()
|
||||
creds = SibcoinConfig.get_rpc_creds(dash_config, 'testnet')
|
||||
|
||||
for key in ('user', 'password', 'port'):
|
||||
assert key in creds
|
||||
assert creds.get('user') == 'dashrpc'
|
||||
assert creds.get('password') == 'EwJeV3fZTyTVozdECF627BkBMnNDwQaVLakG3A4wXYyk'
|
||||
assert creds.get('port') == 29241
|
||||
|
||||
dash_config = dash_conf(rpcpassword='s00pers33kr1t', rpcport=8000)
|
||||
creds = SibcoinConfig.get_rpc_creds(dash_config, 'testnet')
|
||||
|
||||
for key in ('user', 'password', 'port'):
|
||||
assert key in creds
|
||||
assert creds.get('user') == 'dashrpc'
|
||||
assert creds.get('password') == 's00pers33kr1t'
|
||||
assert creds.get('port') == 8000
|
||||
|
||||
no_port_specified = re.sub('\nrpcport=.*?\n', '\n', dash_conf(), re.M)
|
||||
creds = SibcoinConfig.get_rpc_creds(no_port_specified, 'testnet')
|
||||
|
||||
for key in ('user', 'password', 'port'):
|
||||
assert key in creds
|
||||
assert creds.get('user') == 'dashrpc'
|
||||
assert creds.get('password') == 'EwJeV3fZTyTVozdECF627BkBMnNDwQaVLakG3A4wXYyk'
|
||||
assert creds.get('port') == 11944
|
||||
|
||||
|
||||
def test_slurp_config_file():
|
||||
import tempfile
|
||||
|
||||
dash_config = """# basic settings
|
||||
#testnet=1 # TESTNET
|
||||
server=1
|
||||
printtoconsole=1
|
||||
txindex=1 # enable transaction index
|
||||
"""
|
||||
|
||||
expected_stripped_config = """server=1
|
||||
printtoconsole=1
|
||||
txindex=1 # enable transaction index
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode='w') as temp:
|
||||
temp.write(dash_config)
|
||||
temp.flush()
|
||||
conf = SibcoinConfig.slurp_config_file(temp.name)
|
||||
assert conf == expected_stripped_config
|
||||
@ -0,0 +1,140 @@
|
||||
import pytest
|
||||
import sys
|
||||
import os
|
||||
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../test_sentinel.conf'))
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../lib')))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_dash_address(network='mainnet'):
|
||||
return 'seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyb' if (network == 'testnet') else 'Sa9Vn2V4gtBFHovqVQh5V6dCt4ukMYUU2Z'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def invalid_dash_address(network='mainnet'):
|
||||
return 'seVNpH5rkau8dKhumiLF1BYspp2vf7Lkyc' if (network == 'testnet') else 'Sa9Vn2V4gtBFHovqVQh5V6dCt4ukMYUU2Y'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def current_block_hash():
|
||||
return '000001c9ba1df5a1c58a4e458fb6febfe9329b1947802cd60a4ae90dd754b534'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mn_list():
|
||||
from masternode import Masternode
|
||||
|
||||
masternodelist_full = {
|
||||
u'701854b26809343704ab31d1c45abc08f9f83c5c2bd503a9d5716ef3c0cda857-1': u' ENABLED 70201 yjaFS6dudxUTxYPTDB9BYd1Nv4vMJXm3vK 1474157572 82842 1474152618 71111 52.90.74.124:19999',
|
||||
u'f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56-1': u' ENABLED 70201 yUuAsYCnG5XrjgsGvRwcDqPhgLUnzNfe8L 1474157732 1590425 1474155175 71122 [2604:a880:800:a1::9b:0]:19999',
|
||||
u'656695ed867e193490261bea74783f0a39329ff634a10a9fb6f131807eeca744-1': u' ENABLED 70201 yepN97UoBLoP2hzWnwWGRVTcWtw1niKwcB 1474157704 824622 1474152571 71110 178.62.203.249:19999',
|
||||
}
|
||||
|
||||
mnlist = [Masternode(vin, mnstring) for (vin, mnstring) in masternodelist_full.items()]
|
||||
|
||||
return mnlist
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mn_status_good():
|
||||
# valid masternode status enabled & running
|
||||
status = {
|
||||
"vin": "CTxIn(COutPoint(f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56, 1), scriptSig=)",
|
||||
"service": "[2604:a880:800:a1::9b:0]:19999",
|
||||
"pubkey": "yUuAsYCnG5XrjgsGvRwcDqPhgLUnzNfe8L",
|
||||
"status": "Masternode successfully started"
|
||||
}
|
||||
return status
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mn_status_bad():
|
||||
# valid masternode but not running/waiting
|
||||
status = {
|
||||
"vin": "CTxIn(COutPoint(0000000000000000000000000000000000000000000000000000000000000000, 4294967295), coinbase )",
|
||||
"service": "[::]:0",
|
||||
"status": "Node just started, not yet activated"
|
||||
}
|
||||
return status
|
||||
|
||||
|
||||
# ========================================================================
|
||||
|
||||
|
||||
def test_valid_dash_address():
|
||||
from dashlib import is_valid_address
|
||||
|
||||
main = valid_dash_address()
|
||||
test = valid_dash_address('testnet')
|
||||
|
||||
assert is_valid_address(main) is True
|
||||
assert is_valid_address(main, 'mainnet') is True
|
||||
assert is_valid_address(main, 'testnet') is False
|
||||
|
||||
assert is_valid_address(test) is False
|
||||
assert is_valid_address(test, 'mainnet') is False
|
||||
assert is_valid_address(test, 'testnet') is True
|
||||
|
||||
|
||||
def test_invalid_dash_address():
|
||||
from dashlib import is_valid_address
|
||||
|
||||
main = invalid_dash_address()
|
||||
test = invalid_dash_address('testnet')
|
||||
|
||||
assert is_valid_address(main) is False
|
||||
assert is_valid_address(main, 'mainnet') is False
|
||||
assert is_valid_address(main, 'testnet') is False
|
||||
|
||||
assert is_valid_address(test) is False
|
||||
assert is_valid_address(test, 'mainnet') is False
|
||||
assert is_valid_address(test, 'testnet') is False
|
||||
|
||||
|
||||
def test_deterministic_masternode_elections(current_block_hash, mn_list):
|
||||
winner = elect_mn(block_hash=current_block_hash, mnlist=mn_list)
|
||||
assert winner == 'f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56-1'
|
||||
|
||||
winner = elect_mn(block_hash='00000056bcd579fa3dc9a1ee41e8124a4891dcf2661aa3c07cc582bfb63b52b9', mnlist=mn_list)
|
||||
assert winner == '656695ed867e193490261bea74783f0a39329ff634a10a9fb6f131807eeca744-1'
|
||||
|
||||
|
||||
def test_deterministic_masternode_elections(current_block_hash, mn_list):
|
||||
from dashlib import elect_mn
|
||||
|
||||
winner = elect_mn(block_hash=current_block_hash, mnlist=mn_list)
|
||||
assert winner == 'f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56-1'
|
||||
|
||||
winner = elect_mn(block_hash='00000056bcd579fa3dc9a1ee41e8124a4891dcf2661aa3c07cc582bfb63b52b9', mnlist=mn_list)
|
||||
assert winner == '656695ed867e193490261bea74783f0a39329ff634a10a9fb6f131807eeca744-1'
|
||||
|
||||
|
||||
def test_parse_masternode_status_vin():
|
||||
from dashlib import parse_masternode_status_vin
|
||||
status = mn_status_good()
|
||||
vin = parse_masternode_status_vin(status['vin'])
|
||||
assert vin == 'f68a2e5d64f4a9be7ff8d0fbd9059dcd3ce98ad7a19a9260d1d6709127ffac56-1'
|
||||
|
||||
status = mn_status_bad()
|
||||
vin = parse_masternode_status_vin(status['vin'])
|
||||
assert vin is None
|
||||
|
||||
|
||||
def test_hash_function():
|
||||
import dashlib
|
||||
sb_data_hex = '7b226576656e745f626c6f636b5f686569676874223a2037323639362c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e7473223a202232352e37353030303030307c32352e3735303030303030222c202274797065223a20327d'
|
||||
sb_hash = '7ae8b02730113382ea75cbb1eecc497c3aa1fdd9e76e875e38617e07fb2cb21a'
|
||||
|
||||
hex_hash = "%x" % dashlib.hashit(sb_data_hex)
|
||||
assert hex_hash == sb_hash
|
||||
|
||||
|
||||
def test_blocks_to_seconds():
|
||||
import dashlib
|
||||
from decimal import Decimal
|
||||
|
||||
precision = Decimal('0.001')
|
||||
assert Decimal(dashlib.blocks_to_seconds(0)) == Decimal(0.0)
|
||||
assert Decimal(dashlib.blocks_to_seconds(2)).quantize(precision) \
|
||||
== Decimal(314.4).quantize(precision)
|
||||
assert int(dashlib.blocks_to_seconds(16616)) == 2612035
|
||||
@ -0,0 +1,110 @@
|
||||
import pytest
|
||||
import sys
|
||||
import os
|
||||
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../test_sentinel.conf'))
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../lib')))
|
||||
import dashlib
|
||||
import gobject_json
|
||||
|
||||
|
||||
# old format proposal hex w/multi-dimensional array
|
||||
@pytest.fixture
|
||||
def proposal_hex_old():
|
||||
return "5b5b2270726f706f73616c222c207b22656e645f65706f6368223a20313534373138333939342c20226e616d65223a20226a61636b2d73706172726f772d6e65772d73686970222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a2034392c202273746172745f65706f6368223a20313532313432393139342c202274797065223a20312c202275726c223a202268747470733a2f2f7777772e6461736863656e7472616c2e6f72672f626c61636b2d706561726c227d5d5d"
|
||||
|
||||
|
||||
# same proposal data as old, but streamlined format
|
||||
@pytest.fixture
|
||||
def proposal_hex_new():
|
||||
return "7b22656e645f65706f6368223a20313534373138333939342c20226e616d65223a20226a61636b2d73706172726f772d6e65772d73686970222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a2034392c202273746172745f65706f6368223a20313532313432393139342c202274797065223a20312c202275726c223a202268747470733a2f2f7777772e6461736863656e7472616c2e6f72672f626c61636b2d706561726c227d"
|
||||
|
||||
|
||||
# old format trigger hex w/multi-dimensional array
|
||||
@pytest.fixture
|
||||
def trigger_hex_old():
|
||||
return "5b5b2274726967676572222c207b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33222c202274797065223a20327d5d5d"
|
||||
|
||||
|
||||
# same data as new, but simpler format
|
||||
@pytest.fixture
|
||||
def trigger_hex_new():
|
||||
return "7b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33222c202274797065223a20327d"
|
||||
|
||||
|
||||
def test_valid_json():
|
||||
import binascii
|
||||
|
||||
# test some valid JSON
|
||||
assert gobject_json.valid_json("{}") is True
|
||||
assert gobject_json.valid_json("null") is True
|
||||
assert gobject_json.valid_json("true") is True
|
||||
assert gobject_json.valid_json("false") is True
|
||||
assert gobject_json.valid_json("\"rubbish\"") is True
|
||||
assert gobject_json.valid_json(
|
||||
binascii.unhexlify(proposal_hex_old())
|
||||
) is True
|
||||
assert gobject_json.valid_json(
|
||||
binascii.unhexlify(proposal_hex_new())
|
||||
) is True
|
||||
assert gobject_json.valid_json(
|
||||
binascii.unhexlify(trigger_hex_new())
|
||||
) is True
|
||||
assert gobject_json.valid_json(
|
||||
binascii.unhexlify(trigger_hex_old())
|
||||
) is True
|
||||
|
||||
# test some invalid/bad/not JSON
|
||||
assert gobject_json.valid_json("False") is False
|
||||
assert gobject_json.valid_json("True") is False
|
||||
assert gobject_json.valid_json("Null") is False
|
||||
assert gobject_json.valid_json("NULL") is False
|
||||
assert gobject_json.valid_json("nil") is False
|
||||
assert gobject_json.valid_json("rubbish") is False
|
||||
assert gobject_json.valid_json("{{}") is False
|
||||
assert gobject_json.valid_json("") is False
|
||||
|
||||
poorly_formatted = trigger_hex_old() + "7d"
|
||||
assert gobject_json.valid_json(
|
||||
binascii.unhexlify(poorly_formatted)
|
||||
) is False
|
||||
|
||||
|
||||
def test_extract_object():
|
||||
from decimal import Decimal
|
||||
import binascii
|
||||
|
||||
# jack sparrow needs a new ship - same expected proposal data for both new &
|
||||
# old formats
|
||||
expected = {
|
||||
'type': 1,
|
||||
'name': 'jack-sparrow-new-ship',
|
||||
'url': 'https://www.dashcentral.org/black-pearl',
|
||||
'start_epoch': 1521429194,
|
||||
'end_epoch': 1547183994,
|
||||
'payment_address': 'yYe8KwyaUu5YswSYmB3q3ryx8XTUu9y7Ui',
|
||||
'payment_amount': Decimal('49'),
|
||||
}
|
||||
|
||||
# test proposal old format
|
||||
json_str = binascii.unhexlify(proposal_hex_old()).decode('utf-8')
|
||||
assert gobject_json.extract_object(json_str) == expected
|
||||
|
||||
# test proposal new format
|
||||
json_str = binascii.unhexlify(proposal_hex_new()).decode('utf-8')
|
||||
assert gobject_json.extract_object(json_str) == expected
|
||||
|
||||
# same expected trigger data for both new & old formats
|
||||
expected = {
|
||||
'type': 2,
|
||||
'event_block_height': 62500,
|
||||
'payment_addresses': 'yYe8KwyaUu5YswSYmB3q3ryx8XTUu9y7Ui|yTC62huR4YQEPn9AJHjnQxxreHSbgAoatV',
|
||||
'payment_amounts': '5|3',
|
||||
}
|
||||
|
||||
# test trigger old format
|
||||
json_str = binascii.unhexlify(trigger_hex_old()).decode('utf-8')
|
||||
assert gobject_json.extract_object(json_str) == expected
|
||||
|
||||
# test trigger new format
|
||||
json_str = binascii.unhexlify(trigger_hex_new()).decode('utf-8')
|
||||
assert gobject_json.extract_object(json_str) == expected
|
||||
@ -0,0 +1,19 @@
|
||||
import pytest
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../lib')))
|
||||
import misc
|
||||
|
||||
|
||||
def test_is_numeric():
|
||||
assert misc.is_numeric('45') is True
|
||||
assert misc.is_numeric('45.7') is True
|
||||
assert misc.is_numeric(0) is True
|
||||
assert misc.is_numeric(-1) is True
|
||||
|
||||
assert misc.is_numeric('45,7') is False
|
||||
assert misc.is_numeric('fuzzy_bunny_slippers') is False
|
||||
assert misc.is_numeric('') is False
|
||||
assert misc.is_numeric(None) is False
|
||||
assert misc.is_numeric(False) is False
|
||||
assert misc.is_numeric(True) is False
|
||||
@ -0,0 +1,63 @@
|
||||
import pytest
|
||||
import os
|
||||
import sys
|
||||
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../test_sentinel.conf'))
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../lib')))
|
||||
|
||||
# setup/teardown?
|
||||
|
||||
|
||||
# Proposal model
|
||||
@pytest.fixture
|
||||
def proposal():
|
||||
from models import Proposal
|
||||
return Proposal()
|
||||
|
||||
|
||||
def test_proposal(proposal):
|
||||
d = proposal.get_dict()
|
||||
assert isinstance(d, dict)
|
||||
|
||||
fields = [
|
||||
'type',
|
||||
'name',
|
||||
'url',
|
||||
'start_epoch',
|
||||
'end_epoch',
|
||||
'payment_address',
|
||||
'payment_amount',
|
||||
]
|
||||
fields.sort()
|
||||
sorted_keys = sorted(d.keys())
|
||||
assert sorted_keys == fields
|
||||
|
||||
|
||||
# GovernanceObject model
|
||||
@pytest.fixture
|
||||
def governance_object():
|
||||
from models import GovernanceObject
|
||||
return GovernanceObject()
|
||||
|
||||
|
||||
def test_governance_object(governance_object):
|
||||
d = governance_object._meta.columns
|
||||
assert isinstance(d, dict)
|
||||
|
||||
fields = [
|
||||
'id',
|
||||
'parent_id',
|
||||
'object_creation_time',
|
||||
'object_hash',
|
||||
'object_parent_hash',
|
||||
'object_type',
|
||||
'object_revision',
|
||||
'object_fee_tx',
|
||||
'yes_count',
|
||||
'no_count',
|
||||
'abstain_count',
|
||||
'absolute_yes_count',
|
||||
]
|
||||
|
||||
fields.sort()
|
||||
sorted_keys = sorted(d.keys())
|
||||
assert sorted_keys == fields
|
||||
@ -0,0 +1,37 @@
|
||||
import pytest
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
os.environ['SENTINEL_ENV'] = 'test'
|
||||
os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../test_sentinel.conf'))
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../lib')))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def superblock():
|
||||
from models import Superblock
|
||||
# NOTE: no governance_object_id is set
|
||||
sbobj = Superblock(
|
||||
event_block_height=62500,
|
||||
payment_addresses='yYe8KwyaUu5YswSYmB3q3ryx8XTUu9y7Ui|yTC62huR4YQEPn9AJHjnQxxreHSbgAoatV',
|
||||
payment_amounts='5|3',
|
||||
proposal_hashes='e8a0057914a2e1964ae8a945c4723491caae2077a90a00a2aabee22b40081a87|d1ce73527d7cd6f2218f8ca893990bc7d5c6b9334791ce7973bfa22f155f826e',
|
||||
)
|
||||
|
||||
return sbobj
|
||||
|
||||
|
||||
def test_submit_command(superblock):
|
||||
cmd = superblock.get_submit_command()
|
||||
|
||||
assert re.match(r'^gobject$', cmd[0]) is not None
|
||||
assert re.match(r'^submit$', cmd[1]) is not None
|
||||
assert re.match(r'^[\da-f]+$', cmd[2]) is not None
|
||||
assert re.match(r'^[\da-f]+$', cmd[3]) is not None
|
||||
assert re.match(r'^[\d]+$', cmd[4]) is not None
|
||||
assert re.match(r'^[\w-]+$', cmd[5]) is not None
|
||||
|
||||
submit_time = cmd[4]
|
||||
|
||||
gobject_command = ['gobject', 'submit', '0', '1', submit_time, '7b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33222c202270726f706f73616c5f686173686573223a2022653861303035373931346132653139363461653861393435633437323334393163616165323037376139306130306132616162656532326234303038316138377c64316365373335323764376364366632323138663863613839333939306263376435633662393333343739316365373937336266613232663135356638323665222c202274797065223a20327d']
|
||||
assert cmd == gobject_command
|
||||
Loading…
Reference in new issue