DERO-HE STARGATE Testnet Release9

This commit is contained in:
Captain 2021-02-22 17:48:14 +00:00
parent c05c895a1f
commit cb5f96b3c9
No known key found for this signature in database
GPG Key ID: 18CDB3ED5E85D2D4
1757 changed files with 66952 additions and 355657 deletions

46
Changelog.md Normal file
View File

@ -0,0 +1,46 @@
### Welcome to the DEROHE Testnet
[Explorer](https://testnetexplorer.dero.io) [Source](https://github.com/deroproject/derohe) [Twitter](https://twitter.com/DeroProject) [Discord](https://discord.gg/H95TJDp) [Wiki](https://wiki.dero.io) [Github](https://github.com/deroproject/derohe) [DERO CryptoNote Mainnet Stats](http://network.dero.io) [Mainnet WebWallet](https://wallet.dero.io/)
### DERO HE Changelog
[From Wikipedia: ](https://en.wikipedia.org/wiki/Homomorphic_encryption)
###At this point in time, DERO blockchain has the first mover advantage in the following
* Private SCs ( no one knows who owns what tokens and who is transferring to whom and how much is being transferred)
* Homomorphic protocol
* Ability to do instant sync (takes couple of seconds or minutes), depends on network bandwidth.
* Ability to deliver encrypted license keys and other data.
* Pruned chains are the core.
* Ability to model 99.9% earth based financial model of the world.
* Privacy by design, backed by crypto algorithms. Many years of research in place.
###3.3
* Private SCs are now supported. (90% completed).
* Sample Token contract is available with guide.
* Multi-send is now possible. sending to multiple destination per tx
* Few more ideas implemented and will be tested for review in upcoming technology preview.
###3.2
* Open SCs are now supported
* Private SCs which have their balance encrypted at all times (under implementation)
* SCs can now update themselves. however, new code will only run on next invocation
* Multi Send is under implementation.
###3.1
* TX now have significant savings of around 31 * ringsize bytes for every tx
* Daemon now supports pruned chains.
* Daemon by default bootstraps a pruned chain.
* Daemon currently syncs full node by using --fullnode option.
* P2P has been rewritten for various improvements and easier understanding of state machine
* Address specification now enables to embed various RPC parameters for easier transaction
* DERO blockchain represents transaction finality in a couple of blocks (less than 1 minute), unlike other blockchains.
* Proving and parsing of embedded data is now available in explorer.
* Senders/Receivers both have proofs which confirm data sent on execution.
* All tx now have inbuilt space of 144 bytes for user defined data
* User defined space has inbuilt RPC which can be used to implement most practical use-cases.All user defined data is encrypted.
* The model currrently defines data on chain while execution is referred to wallet extensions. A dummy example of pongserver extension showcases how to enable purchases/delivery of license keys/information privately.
* Burn transactions which burn value are now working.
###3.0
* DERO HE implemented

View File

@ -2,7 +2,7 @@
DERO is written in golang and very easy to install both from source and binary.
Installation From Source:
Install Golang, minimum Golang 1.10.3 required.
Install Golang, minimum Golang 1.15 required.
In go workspace: go get -u github.com/deroproject/derohe/...
Check go workspace bin folder for binaries.
For example on Linux machine following binaries will be created:

View File

@ -24,10 +24,10 @@ import "runtime/debug"
import "encoding/hex"
import "encoding/binary"
import "github.com/ebfe/keccak"
import "golang.org/x/crypto/sha3"
import "github.com/romana/rlog"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
//import "github.com/deroproject/derosuite/config"
import "github.com/deroproject/derohe/astrobwt"
@ -65,7 +65,7 @@ func (bl *Block) GetHash() (hash crypto.Hash) {
long_header := bl.GetBlockWork()
// keccak hash of this above blob, gives the block id
return crypto.Keccak256(long_header)
return sha3.Sum256(long_header)
}
// converts a block, into a getwork style work, ready for either submitting the block
@ -78,7 +78,7 @@ func (bl *Block) GetBlockWork() []byte {
buf = append(buf, []byte{byte(bl.Major_Version), byte(bl.Minor_Version), 0, 0, 0, 0, 0}...) // 0 first 7 bytes are version in little endia format
binary.LittleEndian.PutUint32(buf[2:6], uint32(bl.Timestamp))
header_hash := crypto.Keccak256(bl.getserializedheaderforwork()) // 0 + 7
header_hash := sha3.Sum256(bl.getserializedheaderforwork()) // 0 + 7
buf = append(buf, header_hash[:]...) // 0 + 7 + 32 = 39
@ -251,7 +251,7 @@ func (bl *Block) GetTipsHash() (result crypto.Hash) {
}*/
// add all the remaining hashes
h := keccak.New256()
h := sha3.New256()
for i := range bl.Tips {
h.Write(bl.Tips[i][:])
}
@ -263,7 +263,7 @@ func (bl *Block) GetTipsHash() (result crypto.Hash) {
// get block transactions
// we have discarded the merkle tree and have shifted to a plain version
func (bl *Block) GetTXSHash() (result crypto.Hash) {
h := keccak.New256()
h := sha3.New256()
for i := range bl.Tx_hashes {
h.Write(bl.Tx_hashes[i][:])
}

View File

@ -47,7 +47,7 @@ import "github.com/golang/groupcache/lru"
import hashicorp_lru "github.com/hashicorp/golang-lru"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/errormsg"
import "github.com/prometheus/client_golang/prometheus"
@ -57,6 +57,7 @@ import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/transaction"
import "github.com/deroproject/derohe/blockchain/mempool"
import "github.com/deroproject/derohe/blockchain/regpool"
import "github.com/deroproject/derohe/rpc"
/*
import "github.com/deroproject/derosuite/emission"
@ -80,6 +81,7 @@ type Blockchain struct {
Height int64 // chain height is always 1 more than block
height_seen int64 // height seen on peers
Top_ID crypto.Hash // id of the top block
Pruned int64 // until where the chain has been pruned
Tips map[crypto.Hash]crypto.Hash // current tips
@ -108,6 +110,10 @@ type Blockchain struct {
RPC_NotifyNewBlock *sync.Cond // used to notify rpc that a new block has been found
RPC_NotifyHeightChanged *sync.Cond // used to notify rpc that chain height has changed due to addition of block
Dev_Address_Bytes []byte // used to fund reward every block
Sync bool // whether the sync is active, used while bootstrapping
sync.RWMutex
}
@ -168,6 +174,7 @@ func Blockchain_Start(params map[string]interface{}) (*Blockchain, error) {
logger.Fatalf("Failed to add genesis block, we can no longer continue. err %s", err)
}
}
/*
// genesis block not in chain, add it to chain, together with its miner tx
@ -225,9 +232,26 @@ func Blockchain_Start(params map[string]interface{}) (*Blockchain, error) {
// hard forks must be initialized asap
init_hard_forks(params)
// parse dev address once and for all
if addr, err := rpc.NewAddress(globals.Config.Dev_Address); err != nil {
logger.Fatalf("Could not parse dev address, err:%s", err)
} else {
chain.Dev_Address_Bytes = addr.PublicKey.EncodeCompressed()
}
// load the chain from the disk
chain.Initialise_Chain_From_DB()
chain.Sync = true
//if globals.Arguments["--fullnode"] != nil {
if chain.Get_Height() <= 1 {
chain.Sync = false
if globals.Arguments["--fullnode"].(bool) {
chain.Sync = globals.Arguments["--fullnode"].(bool)
}
}
//}
// logger.Fatalf("Testing complete quitting")
go clean_up_valid_cache() // clean up valid cache
@ -504,9 +528,11 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
}
// always check whether the coin base tx is okay
if bl.Height != 0 && !chain.Verify_Transaction_Coinbase(cbl, &bl.Miner_TX) { // if miner address is not registered give error
block_logger.Warnf("Miner address is not registered")
return errormsg.ErrInvalidBlock, false
if bl.Height != 0 {
if err = chain.Verify_Transaction_Coinbase(cbl, &bl.Miner_TX); err != nil { // if miner address is not registered give error
block_logger.Warnf("Error verifying coinbase tx, err :'%s'", err)
return err, false
}
}
// TODO we need to verify address whether they are valid points on curve or not
@ -563,7 +589,7 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
// another check, whether the tx is build with the latest snapshot of balance tree
{
for i := 0; i < len(cbl.Txs); i++ {
if cbl.Txs[i].TransactionType == transaction.NORMAL {
if cbl.Txs[i].TransactionType == transaction.NORMAL || cbl.Txs[i].TransactionType == transaction.BURN_TX || cbl.Txs[i].TransactionType == transaction.SC_TX {
if cbl.Txs[i].Height+1 != cbl.Bl.Height {
block_logger.Warnf("invalid tx mined %s", cbl.Txs[i].GetHash())
return errormsg.ErrTXDoubleSpend, false
@ -579,12 +605,12 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
nonce_map := map[crypto.Hash]bool{}
for i := 0; i < len(cbl.Txs); i++ {
if cbl.Txs[i].TransactionType == transaction.NORMAL {
if _, ok := nonce_map[cbl.Txs[i].Proof.Nonce()]; ok {
if cbl.Txs[i].TransactionType == transaction.NORMAL || cbl.Txs[i].TransactionType == transaction.BURN_TX || cbl.Txs[i].TransactionType == transaction.SC_TX {
if _, ok := nonce_map[cbl.Txs[i].Payloads[0].Proof.Nonce()]; ok {
block_logger.Warnf("Double Spend attack within block %s", cbl.Txs[i].GetHash())
return errormsg.ErrTXDoubleSpend, false
}
nonce_map[cbl.Txs[i].Proof.Nonce()] = true
nonce_map[cbl.Txs[i].Payloads[0].Proof.Nonce()] = true
}
}
}
@ -613,9 +639,9 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
hf_version := chain.Get_Current_Version_at_Height(chain.Calculate_Height_At_Tips(bl.Tips))
for i := 0; i < len(cbl.Txs); i++ {
go func(j int) {
if !chain.Verify_Transaction_NonCoinbase(hf_version, cbl.Txs[j]) { // transaction verification failed
if err := chain.Verify_Transaction_NonCoinbase(hf_version, cbl.Txs[j]); err != nil { // transaction verification failed
atomic.AddInt32(&fail_count, 1) // increase fail count by 1
block_logger.Warnf("Block verification failed rejecting since TX %s verification failed", cbl.Txs[j].GetHash())
block_logger.Warnf("Block verification failed rejecting since TX %s verification failed, err:'%s'", cbl.Txs[j].GetHash(), err)
}
wg.Done()
}(i)
@ -705,6 +731,7 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
// any blocks which have not changed their topo will be skipped using graviton trick
skip := true
for i := int64(0); i < int64(len(full_order)); i++ {
// check whether the new block is at the same position at the last position
@ -752,12 +779,16 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
}
var balance_tree *graviton.Tree
//
var balance_tree, sc_meta *graviton.Tree
_ = sc_meta
var ss *graviton.Snapshot
if bl_current.Height == 0 { // if it's genesis block
if ss, err := chain.Store.Balance_store.LoadSnapshot(0); err != nil {
if ss, err = chain.Store.Balance_store.LoadSnapshot(0); err != nil {
panic(err)
} else if balance_tree, err = ss.GetTree(BALANCE_TREE); err != nil {
} else if balance_tree, err = ss.GetTree(config.BALANCE_TREE); err != nil {
panic(err)
} else if sc_meta, err = ss.GetTree(config.SC_META); err != nil {
panic(err)
}
} else { // we already have a block before us, use it
@ -772,13 +803,15 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
record_version = toporecord.State_Version
}
ss, err := chain.Store.Balance_store.LoadSnapshot(record_version)
ss, err = chain.Store.Balance_store.LoadSnapshot(record_version)
if err != nil {
panic(err)
}
balance_tree, err = ss.GetTree(BALANCE_TREE)
if err != nil {
if balance_tree, err = ss.GetTree(config.BALANCE_TREE); err != nil {
panic(err)
}
if sc_meta, err = ss.GetTree(config.SC_META); err != nil {
panic(err)
}
}
@ -790,7 +823,12 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
// their transactions are ignored
//chain.Store.Topo_store.Write(i+base_topo_index, full_order[i],0, int64(bl_current.Height)) // write entry so as sideblock could work
var data_trees []*graviton.Tree
if !chain.isblock_SideBlock_internal(full_order[i], current_topo_block, int64(bl_current.Height)) {
sc_change_cache := map[crypto.Hash]*graviton.Tree{} // cache entire changes for entire block
for _, txhash := range bl_current.Tx_hashes { // execute all the transactions
if tx_bytes, err := chain.Store.Block_tx_store.ReadTX(txhash); err != nil {
panic(err)
@ -799,23 +837,74 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
if err = tx.DeserializeHeader(tx_bytes); err != nil {
panic(err)
}
for t := range tx.Payloads {
if !tx.Payloads[t].SCID.IsZero() {
tree, _ := ss.GetTree(string(tx.Payloads[t].SCID[:]))
sc_change_cache[tx.Payloads[t].SCID] = tree
}
}
// we have loaded a tx successfully, now lets execute it
fees_collected += chain.process_transaction(tx, balance_tree)
tx_fees := chain.process_transaction(sc_change_cache, tx, balance_tree)
//fmt.Printf("transaction %s type %s data %+v\n", txhash, tx.TransactionType, tx.SCDATA)
if tx.TransactionType == transaction.SC_TX {
tx_fees, err = chain.process_transaction_sc(sc_change_cache, ss, bl_current.Height, uint64(current_topo_block), bl_current_hash, tx, balance_tree, sc_meta)
//fmt.Printf("Processsing sc err %s\n", err)
if err == nil { // TODO process gasg here
}
}
fees_collected += tx_fees
}
}
chain.process_miner_transaction(bl_current.Miner_TX, bl_current.Height == 0, balance_tree, fees_collected)
// at this point, we must commit all the SCs, so entire tree hash is interlinked
for scid, v := range sc_change_cache {
meta_bytes, err := sc_meta.Get(SC_Meta_Key(scid))
if err != nil {
panic(err)
}
var meta SC_META_DATA // the meta contains metadata about SC
if err := meta.UnmarshalBinary(meta_bytes); err != nil {
panic(err)
}
if meta.DataHash, err = v.Hash(); err != nil { // encode data tree hash
panic(err)
}
sc_meta.Put(SC_Meta_Key(scid), meta.MarshalBinary())
data_trees = append(data_trees, v)
/*fmt.Printf("will commit tree name %x \n", v.GetName())
c := v.Cursor()
for k, v, err := c.First(); err == nil; k, v, err = c.Next() {
fmt.Printf("key=%x, value=%x\n", k, v)
}*/
}
chain.process_miner_transaction(bl_current.Miner_TX, bl_current.Height == 0, balance_tree, fees_collected, bl_current.Height)
} else {
rlog.Debugf("this block is a side block block height %d blid %s ", chain.Load_Block_Height(full_order[i]), full_order[i])
}
// we are here, means everything is okay, lets commit the update balance tree
commit_version, err := graviton.Commit(balance_tree)
data_trees = append(data_trees, balance_tree, sc_meta)
//fmt.Printf("committing data trees %+v\n", data_trees)
commit_version, err := graviton.Commit(data_trees...)
if err != nil {
panic(err)
}
//fmt.Printf("committed trees version %d\n", commit_version)
chain.Store.Topo_store.Write(current_topo_block, full_order[i], commit_version, chain.Load_Block_Height(full_order[i]))
rlog.Debugf("%d %s topo_index %d base topo %d", i, full_order[i], current_topo_block, base_topo_index)
@ -904,7 +993,7 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
continue
}
case transaction.NORMAL:
case transaction.NORMAL, transaction.BURN_TX, transaction.SC_TX:
if chain.Mempool.Mempool_TX_Exist(txid) {
rlog.Tracef(1, "Deleting TX from mempool txid=%s", txid)
chain.Mempool.Mempool_Delete_TX(txid)
@ -920,7 +1009,7 @@ func (chain *Blockchain) Add_Complete_Block(cbl *block.Complete_Block) (err erro
// ggive regpool a chance to register
if ss, err := chain.Store.Balance_store.LoadSnapshot(0); err == nil {
if balance_tree, err := ss.GetTree(BALANCE_TREE); err == nil {
if balance_tree, err := ss.GetTree(config.BALANCE_TREE); err == nil {
chain.Regpool.HouseKeeping(uint64(block_height), func(tx *transaction.Transaction) bool {
if tx.TransactionType != transaction.REGISTRATION { // tx not registration so delete
@ -948,6 +1037,11 @@ func (chain *Blockchain) Initialise_Chain_From_DB() {
chain.Lock()
defer chain.Unlock()
chain.Pruned = chain.LocatePruneTopo()
if chain.Pruned >= 1 {
logger.Debugf("Chain Pruned until %d\n", chain.Pruned)
}
// find the tips from the chain , first by reaching top height
// then downgrading to top-10 height
// then reworking the chain to get the tip
@ -963,7 +1057,7 @@ func (chain *Blockchain) Initialise_Chain_From_DB() {
// get dag unsettled, it's only possible when we have the tips
// chain.dag_unsettled = chain.Get_DAG_Unsettled() // directly off the disk
logger.Infof("Chain Tips %+v Height %d", chain.Tips, chain.Height)
logger.Debugf("Reloaded Chain Tips %+v Height %d", chain.Tips, chain.Height)
}
@ -1093,19 +1187,38 @@ var block_processing_time = prometheus.NewHistogram(prometheus.HistogramOpts{
// add a transaction to MEMPOOL,
// verifying everything means everything possible
// this only change mempool, no DB changes
func (chain *Blockchain) Add_TX_To_Pool(tx *transaction.Transaction) (result bool) {
func (chain *Blockchain) Add_TX_To_Pool(tx *transaction.Transaction) error {
var err error
if tx.IsPremine() {
return fmt.Errorf("premine tx not mineable")
}
if tx.IsRegistration() { // registration tx will not go any forward
// ggive regpool a chance to register
if ss, err := chain.Store.Balance_store.LoadSnapshot(0); err == nil {
if balance_tree, err := ss.GetTree(BALANCE_TREE); err == nil {
if balance_tree, err := ss.GetTree(config.BALANCE_TREE); err == nil {
if _, err := balance_tree.Get(tx.MinerAddress[:]); err == nil { // address already registered
return false
return nil
} else { // add to regpool
if chain.Regpool.Regpool_Add_TX(tx, 0) {
return nil
} else {
return fmt.Errorf("registration for address is already pending")
}
}
} else {
return err
}
} else {
return err
}
return chain.Regpool.Regpool_Add_TX(tx, 0)
}
switch tx.TransactionType {
case transaction.BURN_TX, transaction.NORMAL, transaction.SC_TX:
default:
return fmt.Errorf("such transaction type cannot appear in mempool")
}
// track counter for the amount of mempool tx
@ -1116,25 +1229,25 @@ func (chain *Blockchain) Add_TX_To_Pool(tx *transaction.Transaction) (result boo
// Coin base TX can not come through this path
if tx.IsCoinbase() {
logger.WithFields(log.Fields{"txid": txhash}).Warnf("TX rejected coinbase tx cannot appear in mempool")
return false
return fmt.Errorf("TX rejected coinbase tx cannot appear in mempool")
}
chain_height := uint64(chain.Get_Height())
if chain_height > tx.Height {
rlog.Tracef(2, "TX %s rejected since chain has already progressed", txhash)
return false
return fmt.Errorf("TX %s rejected since chain has already progressed", txhash)
}
// quick check without calculating everything whether tx is in pool, if yes we do nothing
if chain.Mempool.Mempool_TX_Exist(txhash) {
rlog.Tracef(2, "TX %s rejected Already in MEMPOOL", txhash)
return false
return fmt.Errorf("TX %s rejected Already in MEMPOOL", txhash)
}
// check whether tx is already mined
if _, err = chain.Store.Block_tx_store.ReadTX(txhash); err == nil {
rlog.Tracef(2, "TX %s rejected Already mined in some block", txhash)
return false
return fmt.Errorf("TX %s rejected Already mined in some block", txhash)
}
hf_version := chain.Get_Current_Version_at_Height(int64(chain_height))
@ -1143,12 +1256,12 @@ func (chain *Blockchain) Add_TX_To_Pool(tx *transaction.Transaction) (result boo
// currently, limits are as per consensus
if uint64(len(tx.Serialize())) > config.STARGATE_HE_MAX_TX_SIZE {
logger.WithFields(log.Fields{"txid": txhash}).Warnf("TX rejected Size %d byte Max possible %d", len(tx.Serialize()), config.STARGATE_HE_MAX_TX_SIZE)
return false
return fmt.Errorf("TX rejected Size %d byte Max possible %d", len(tx.Serialize()), config.STARGATE_HE_MAX_TX_SIZE)
}
// check whether enough fees is provided in the transaction
calculated_fee := chain.Calculate_TX_fee(hf_version, uint64(len(tx.Serialize())))
provided_fee := tx.Statement.Fees // get fee from tx
provided_fee := tx.Fees() // get fee from tx
_ = calculated_fee
_ = provided_fee
@ -1164,20 +1277,20 @@ func (chain *Blockchain) Add_TX_To_Pool(tx *transaction.Transaction) (result boo
}
*/
if chain.Verify_Transaction_NonCoinbase(hf_version, tx) && chain.Verify_Transaction_NonCoinbase_DoubleSpend_Check(tx) {
if chain.Mempool.Mempool_Add_TX(tx, 0) { // new tx come with 0 marker
rlog.Tracef(2, "Successfully added tx %s to pool", txhash)
mempool_tx_counter.Inc()
return true
} else {
rlog.Tracef(2, "TX %s rejected by pool", txhash)
return false
}
if err := chain.Verify_Transaction_NonCoinbase(hf_version, tx); err != nil {
rlog.Warnf("Incoming TX %s could not be verified, err %s", txhash, err)
return fmt.Errorf("Incoming TX %s could not be verified, err %s", txhash, err)
}
rlog.Warnf("Incoming TX %s could not be verified", txhash)
return false
if chain.Mempool.Mempool_Add_TX(tx, 0) { // new tx come with 0 marker
rlog.Tracef(2, "Successfully added tx %s to pool", txhash)
mempool_tx_counter.Inc()
return nil
} else {
rlog.Tracef(2, "TX %s rejected by pool", txhash)
return fmt.Errorf("TX %s rejected by pool", txhash)
}
}
@ -1618,7 +1731,7 @@ func (chain *Blockchain) BuildReachabilityNonces(bl *block.Block) map[crypto.Has
}
// tx has been loaded, now lets get the nonce
nonce_reach_map[tx.Proof.Nonce()] = true // add element to map for next check
nonce_reach_map[tx.Payloads[0].Proof.Nonce()] = true // add element to map for next check
}
}
return nonce_reach_map

View File

@ -17,13 +17,13 @@
package blockchain
//import "fmt"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/rpc"
// this function is only used by the RPC and is not used by the core and should be moved to RPC interface
/* fill up the above structure from the blockchain */
func (chain *Blockchain) GetBlockHeader(hash crypto.Hash) (result structures.BlockHeader_Print, err error) {
func (chain *Blockchain) GetBlockHeader(hash crypto.Hash) (result rpc.BlockHeader_Print, err error) {
bl, err := chain.Load_BL_FROM_ID(hash)
if err != nil {
return

View File

@ -23,7 +23,7 @@ import "math/big"
import "github.com/deroproject/derohe/block"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/globals"
var (

View File

@ -22,7 +22,7 @@ import "encoding/hex"
import "github.com/romana/rlog"
//import "github.com/deroproject/derosuite/address"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/block"
import "github.com/deroproject/derohe/globals"

View File

@ -31,7 +31,7 @@ import log "github.com/sirupsen/logrus"
import "github.com/deroproject/derohe/transaction"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
// this is only used for sorting and nothing else
type TX_Sorting_struct struct {
@ -126,7 +126,7 @@ func (obj *mempool_object) UnmarshalJSON(data []byte) error {
err = obj.Tx.DeserializeHeader(tx_bytes)
if err == nil {
obj.FEEperBYTE = obj.Tx.Statement.Fees / obj.Size
obj.FEEperBYTE = obj.Tx.Fees() / obj.Size
}
return err
}
@ -283,7 +283,7 @@ func (pool *Mempool) Mempool_Add_TX(tx *transaction.Transaction, Height uint64)
var object mempool_object
tx_hash := crypto.Hash(tx.GetHash())
if pool.Mempool_Keyimage_Spent(tx.Proof.Nonce()) {
if pool.Mempool_Keyimage_Spent(tx.Payloads[0].Proof.Nonce()) {
rlog.Debugf("Rejecting TX, since nonce already seen %x", tx_hash)
return false
}
@ -300,7 +300,7 @@ func (pool *Mempool) Mempool_Add_TX(tx *transaction.Transaction, Height uint64)
// pool.key_images.Store(tx.Vin[i].(transaction.Txin_to_key).K_image,true) // add element to map for next check
// }
pool.key_images.Store(tx.Proof.Nonce(), true)
pool.key_images.Store(tx.Payloads[0].Proof.Nonce(), true)
// we are here means we can add it to pool
object.Tx = tx
@ -308,7 +308,7 @@ func (pool *Mempool) Mempool_Add_TX(tx *transaction.Transaction, Height uint64)
object.Added = uint64(time.Now().UTC().Unix())
object.Size = uint64(len(tx.Serialize()))
object.FEEperBYTE = tx.Statement.Fees / object.Size
object.FEEperBYTE = tx.Fees() / object.Size
pool.txs.Store(tx_hash, &object)
@ -367,7 +367,7 @@ func (pool *Mempool) Mempool_Delete_TX(txid crypto.Hash) (tx *transaction.Transa
// for i := 0; i < len(object.Tx.Vin); i++ {
// pool.key_images.Delete(object.Tx.Vin[i].(transaction.Txin_to_key).K_image)
// }
pool.key_images.Delete(tx.Proof.Nonce())
pool.key_images.Delete(tx.Payloads[0].Proof.Nonce())
//pool.sort_list() // sort and update pool list
pool.modified = true // pool has been modified

View File

@ -31,9 +31,9 @@ import "github.com/romana/rlog"
import "github.com/deroproject/derohe/block"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/address"
import "github.com/deroproject/derohe/rpc"
//import "github.com/deroproject/derohe/emission"
import "github.com/deroproject/derohe/transaction"
@ -50,7 +50,7 @@ import "github.com/deroproject/graviton"
// the top hash over which to do mining now ( it should already be in the chain)
// this is work in progress
// TODO we need to rework fees algorithm, to improve its quality and lower fees
func (chain *Blockchain) Create_new_miner_block(miner_address address.Address, tx *transaction.Transaction) (cbl *block.Complete_Block, bl block.Block) {
func (chain *Blockchain) Create_new_miner_block(miner_address rpc.Address, tx *transaction.Transaction) (cbl *block.Complete_Block, bl block.Block) {
//chain.Lock()
//defer chain.Unlock()
@ -88,7 +88,7 @@ func (chain *Blockchain) Create_new_miner_block(miner_address address.Address, t
panic(err)
}
balance_tree, err := ss.GetTree(BALANCE_TREE)
balance_tree, err := ss.GetTree(config.BALANCE_TREE)
if err != nil {
panic(err)
}
@ -268,7 +268,7 @@ func (chain *Blockchain) Create_new_miner_block(miner_address address.Address, t
var cache_block block.Block
var cache_block_mutex sync.Mutex
func (chain *Blockchain) Create_new_block_template_mining(top_hash crypto.Hash, miner_address address.Address, reserve_space int) (bl block.Block, blockhashing_blob string, block_template_blob string, reserved_pos int) {
func (chain *Blockchain) Create_new_block_template_mining(top_hash crypto.Hash, miner_address rpc.Address, reserve_space int) (bl block.Block, blockhashing_blob string, block_template_blob string, reserved_pos int) {
rlog.Debugf("Mining block will give reward to %s", miner_address)
cache_block_mutex.Lock()
@ -310,12 +310,6 @@ var duplicate_height_check = map[uint64]bool{}
// otherwise the miner is trying to attack the network
func (chain *Blockchain) Accept_new_block(block_template []byte, blockhashing_blob []byte) (blid crypto.Hash, result bool, err error) {
// check whether we are in lowcpu mode, if yes reject the block
if globals.Arguments["--lowcpuram"].(bool) {
globals.Logger.Warnf("Mining is deactivated since daemon is running in low cpu mode, please check program options.")
return blid, false, fmt.Errorf("Please decativate lowcpuram mode")
}
if globals.Arguments["--sync-node"].(bool) {
globals.Logger.Warnf("Mining is deactivated since daemon is running with --sync-mode, please check program options.")
return blid, false, fmt.Errorf("Please deactivate --sync-node option before mining")
@ -415,7 +409,6 @@ func (chain *Blockchain) Accept_new_block(block_template []byte, blockhashing_bl
}
err, result = chain.Add_Complete_Block(cbl)
if result {
duplicate_height_check[bl.Height] = true

View File

@ -28,6 +28,7 @@ import "path/filepath"
import "github.com/romana/rlog"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/graviton"
import "github.com/deroproject/derohe/block"
import "github.com/deroproject/derohe/globals"
@ -158,9 +159,9 @@ func rewrite_graviton_store(store *storage, prune_topoheight int64, max_topoheig
var old_ss, write_ss *graviton.Snapshot
var old_balance_tree, write_balance_tree *graviton.Tree
if old_ss, err = store.Balance_store.LoadSnapshot(toporecord.State_Version); err == nil {
if old_balance_tree, err = old_ss.GetTree(BALANCE_TREE); err == nil {
if old_balance_tree, err = old_ss.GetTree(config.BALANCE_TREE); err == nil {
if write_ss, err = write_store.LoadSnapshot(0); err == nil {
if write_balance_tree, err = write_ss.GetTree(BALANCE_TREE); err == nil {
if write_balance_tree, err = write_ss.GetTree(config.BALANCE_TREE); err == nil {
var latest_commit_version uint64
latest_commit_version, err = clone_entire_tree(old_balance_tree, write_balance_tree)
@ -195,16 +196,16 @@ func rewrite_graviton_store(store *storage, prune_topoheight int64, max_topoheig
if old_toporecord, err = store.Topo_store.Read(old_topo); err == nil {
if old_ss, err = store.Balance_store.LoadSnapshot(old_toporecord.State_Version); err == nil {
if old_balance_tree, err = old_ss.GetTree(BALANCE_TREE); err == nil {
if old_balance_tree, err = old_ss.GetTree(config.BALANCE_TREE); err == nil {
// fetch new tree data
if new_toporecord, err = store.Topo_store.Read(new_topo); err == nil {
if new_ss, err = store.Balance_store.LoadSnapshot(new_toporecord.State_Version); err == nil {
if new_balance_tree, err = new_ss.GetTree(BALANCE_TREE); err == nil {
if new_balance_tree, err = new_ss.GetTree(config.BALANCE_TREE); err == nil {
// fetch tree where to write it
if write_ss, err = write_store.LoadSnapshot(0); err == nil {
if write_tree, err = write_ss.GetTree(BALANCE_TREE); err == nil {
if write_tree, err = write_ss.GetTree(config.BALANCE_TREE); err == nil {
// new_balance_tree.Graph("/tmp/original.dot")
// write_tree.Graph("/tmp/writable.dot")

View File

@ -30,7 +30,7 @@ import log "github.com/sirupsen/logrus"
import "github.com/deroproject/derohe/transaction"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
// this is only used for sorting and nothing else
type TX_Sorting_struct struct {
@ -125,7 +125,7 @@ func (obj *regpool_object) UnmarshalJSON(data []byte) error {
err = obj.Tx.DeserializeHeader(tx_bytes)
if err == nil {
obj.FEEperBYTE = obj.Tx.Statement.Fees / obj.Size
obj.FEEperBYTE = 0
}
return err
}

300
blockchain/sc.go Normal file
View File

@ -0,0 +1,300 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package blockchain
// this file implements necessary structure to SC handling
import "fmt"
import "runtime/debug"
import "encoding/binary"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/dvm"
//import "github.com/deroproject/graviton"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/transaction"
import "github.com/romana/rlog"
// currently we 2 contract types
// 1 OPEN
// 2 PRIVATE
type SC_META_DATA struct {
Type byte // 0 Open, 1 Private
Balance uint64
DataHash crypto.Hash // hash of SC data tree is here, so as the meta tree verifies all SC DATA
}
// serialize the structure
func (meta SC_META_DATA) MarshalBinary() (buf []byte) {
buf = make([]byte, 41, 41)
buf[0] = meta.Type
binary.LittleEndian.PutUint64(buf[1:], meta.Balance)
copy(buf[1+8+len(meta.DataHash):], meta.DataHash[:])
return
}
func (meta *SC_META_DATA) UnmarshalBinary(buf []byte) (err error) {
if len(buf) != 1+8+32 {
return fmt.Errorf("input buffer should be of 41 bytes in length")
}
meta.Type = buf[0]
meta.Balance = binary.LittleEndian.Uint64(buf[1:])
copy(meta.DataHash[:], buf[1+8+len(meta.DataHash):])
return nil
}
func SC_Meta_Key(scid crypto.Hash) []byte {
return scid[:]
}
func SC_Code_Key(scid crypto.Hash) []byte {
return dvm.Variable{Type: dvm.String, Value: "C"}.MarshalBinaryPanic()
}
// this will process the SC transaction
// the tx should only be processed , if it has been processed
func (chain *Blockchain) execute_sc_function(w_sc_tree *Tree_Wrapper, data_tree *Tree_Wrapper, scid crypto.Hash, bl_height, bl_topoheight uint64, bl_hash crypto.Hash, tx transaction.Transaction, entrypoint string, hard_fork_version_current int64) (gas uint64, err error) {
defer func() {
// safety so if anything wrong happens, verification fails
if r := recover(); r != nil {
logger.Warnf("Recovered while rewinding chain, Stack trace below block_hash ")
logger.Warnf("Stack trace \n%s", debug.Stack())
}
}()
//if !tx.Verify_SC_Signature() { // if tx is not SC TX, or Signature could not be verified skip it
// return
//}
tx_hash := tx.GetHash()
tx_store := dvm.Initialize_TX_store()
// used as value loader from disk
// this function is used to load any data required by the SC
diskloader := func(key dvm.DataKey, found *uint64) (result dvm.Variable) {
var exists bool
if result, exists = chain.LoadSCValue(data_tree, key.SCID, key.MarshalBinaryPanic()); exists {
*found = uint64(1)
}
//fmt.Printf("Loading from disk %+v result %+v found status %+v \n", key, result, exists)
return
}
diskloader_raw := func(key []byte) (value []byte, found bool) {
var err error
value, err = data_tree.Get(key[:])
if err != nil {
return value, false
}
if len(value) == 0 {
return value, false
}
//fmt.Printf("Loading from disk %+v result %+v found status %+v \n", key, result, exists)
return value, true
}
balance, sc_parsed, found := chain.ReadSC(w_sc_tree, data_tree, scid)
if !found {
fmt.Printf("SC not found\n")
return
}
// if we found the SC in parsed form, check whether entrypoint is found
function, ok := sc_parsed.Functions[entrypoint]
if !ok {
rlog.Warnf("stored SC does not contain entrypoint '%s' scid %s \n", entrypoint, scid)
return
}
_ = function
//fmt.Printf("entrypoint found '%s' scid %s\n", entrypoint, scid)
//if len(sc_tx.Params) == 0 { // initialize params if not initialized earlier
// sc_tx.Params = map[string]string{}
//}
//sc_tx.Params["value"] = fmt.Sprintf("%d", sc_tx.Value) // overide value
tx_store.DiskLoader = diskloader // hook up loading from chain
tx_store.DiskLoaderRaw = diskloader_raw
tx_store.BalanceAtStart = balance
tx_store.SCID = scid
//fmt.Printf("tx store %v\n", tx_store)
// setup block hash, height, topoheight correctly
state := &dvm.Shared_State{
Store: tx_store,
Chain_inputs: &dvm.Blockchain_Input{
BL_HEIGHT: bl_height,
BL_TOPOHEIGHT: uint64(bl_topoheight),
SCID: scid,
BLID: bl_hash,
TXID: tx_hash,
Signer: string(tx.MinerAddress[:]),
},
}
for p := range tx.Payloads {
if tx.Payloads[p].SCID.IsZero() {
state.DERO_Received += tx.Payloads[p].BurnValue
}
if tx.Payloads[p].SCID == scid {
state.Token_Received += tx.Payloads[p].BurnValue
}
}
// setup balance correctly
tx_store.ReceiveInternal(scid, state.DERO_Received)
// we have an entrypoint, now we must setup parameters and dvm
// all parameters are in string form to bypass translation issues in middle layers
params := map[string]interface{}{}
for _, p := range function.Params {
switch {
case p.Type == dvm.Uint64 && p.Name == "value":
params[p.Name] = fmt.Sprintf("%d", state.DERO_Received) // overide value
case p.Type == dvm.Uint64 && tx.SCDATA.Has(p.Name, rpc.DataUint64):
params[p.Name] = fmt.Sprintf("%d", tx.SCDATA.Value(p.Name, rpc.DataUint64).(uint64))
case p.Type == dvm.String && tx.SCDATA.Has(p.Name, rpc.DataString):
params[p.Name] = tx.SCDATA.Value(p.Name, rpc.DataString).(string)
default:
err = fmt.Errorf("entrypoint '%s' parameter type missing or not yet supported (%+v)", entrypoint, p)
return
}
}
result, err := dvm.RunSmartContract(&sc_parsed, entrypoint, state, params)
//fmt.Printf("result value %+v\n", result)
if err != nil {
rlog.Warnf("entrypoint '%s' scid %s err execution '%s' \n", entrypoint, scid, err)
return
}
if err == nil && result.Type == dvm.Uint64 && result.Value.(uint64) == 0 { // confirm the changes
for k, v := range tx_store.Keys {
chain.StoreSCValue(data_tree, scid, k.MarshalBinaryPanic(), v.MarshalBinaryPanic())
}
for k, v := range tx_store.RawKeys {
chain.StoreSCValue(data_tree, scid, []byte(k), v)
}
data_tree.leftover_balance = tx_store.Balance(scid)
data_tree.transfere = append(data_tree.transfere, tx_store.Transfers[scid].TransferE...)
} else { // discard all changes, since we never write to store immediately, they are purged, however we need to return any value associated
err = fmt.Errorf("Discarded knowingly")
return
}
//fmt.Printf("SC execution finished amount value %d\n", tx.Value)
return
}
// reads SC, balance
func (chain *Blockchain) ReadSC(w_sc_tree *Tree_Wrapper, data_tree *Tree_Wrapper, scid crypto.Hash) (balance uint64, sc dvm.SmartContract, found bool) {
meta_bytes, err := w_sc_tree.Get(SC_Meta_Key(scid))
if err != nil {
return
}
var meta SC_META_DATA // the meta contains the link to the SC bytes
if err := meta.UnmarshalBinary(meta_bytes); err != nil {
return
}
balance = meta.Balance
sc_bytes, err := data_tree.Get(SC_Code_Key(scid))
if err != nil {
return
}
var v dvm.Variable
if err = v.UnmarshalBinary(sc_bytes); err != nil {
return
}
sc, pos, err := dvm.ParseSmartContract(v.Value.(string))
if err != nil {
return
}
_ = pos
found = true
return
}
func (chain *Blockchain) LoadSCValue(data_tree *Tree_Wrapper, scid crypto.Hash, key []byte) (v dvm.Variable, found bool) {
//fmt.Printf("loading fromdb %s %s \n", scid, key)
object_data, err := data_tree.Get(key[:])
if err != nil {
return v, false
}
if len(object_data) == 0 {
return v, false
}
if err = v.UnmarshalBinary(object_data); err != nil {
return v, false
}
return v, true
}
// reads a value from SC, always read balance
func (chain *Blockchain) ReadSCValue(data_tree *Tree_Wrapper, scid crypto.Hash, key interface{}) (value interface{}) {
var keybytes []byte
if key == nil {
return
}
switch k := key.(type) {
case uint64:
keybytes = dvm.DataKey{Key: dvm.Variable{Type: dvm.Uint64, Value: k}}.MarshalBinaryPanic()
case string:
keybytes = dvm.DataKey{Key: dvm.Variable{Type: dvm.String, Value: k}}.MarshalBinaryPanic()
case int64:
keybytes = dvm.DataKey{Key: dvm.Variable{Type: dvm.String, Value: k}}.MarshalBinaryPanic()
default:
return
}
value_var, found := chain.LoadSCValue(data_tree, scid, keybytes)
//fmt.Printf("read value %+v", value_var)
if found && value_var.Type != dvm.Invalid {
value = value_var.Value
}
return
}
// store the value in the chain
func (chain *Blockchain) StoreSCValue(data_tree *Tree_Wrapper, scid crypto.Hash, key, value []byte) {
data_tree.Put(key, value)
return
}

View File

@ -25,15 +25,13 @@ import "path/filepath"
import log "github.com/sirupsen/logrus"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/block"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/graviton"
import "github.com/golang/groupcache/lru"
// note we are keeping the tree name small for disk savings, since they will be stored n times (atleast or archival nodes)
const BALANCE_TREE = "B"
// though these can be done within a single DB, these are separated for completely clarity purposes
type storage struct {
Balance_store *graviton.Store // stores most critical data, only history can be purged, its merkle tree is stored in the block
@ -85,7 +83,7 @@ func (s *storage) IsBalancesIntialized() bool {
var balancehash, random_hash [32]byte
balance_ss, _ := s.Balance_store.LoadSnapshot(0) // load most recent snapshot
balancetree, _ := balance_ss.GetTree(BALANCE_TREE)
balancetree, _ := balance_ss.GetTree(config.BALANCE_TREE)
// avoid hardcoding any hash
if balancehash, err = balancetree.Hash(); err == nil {
@ -354,3 +352,42 @@ func (chain *Blockchain) Load_Block_Topological_order_at_index(index_pos int64)
}
}
//load store hash from 2 tree
func (chain *Blockchain) Load_Merkle_Hash(index_pos int64) (hash crypto.Hash, err error) {
toporecord, err := chain.Store.Topo_store.Read(index_pos)
if err != nil {
return hash, err
}
if toporecord.IsClean() {
err = fmt.Errorf("cannot query clean block")
return
}
ss, err := chain.Store.Balance_store.LoadSnapshot(toporecord.State_Version)
if err != nil {
return
}
balance_tree, err := ss.GetTree(config.BALANCE_TREE)
if err != nil {
return
}
sc_meta_tree, err := ss.GetTree(config.SC_META)
if err != nil {
return
}
balance_merkle_hash, err := balance_tree.Hash()
if err != nil {
return
}
meta_merkle_hash, err := sc_meta_tree.Hash()
if err != nil {
return
}
for i := range balance_merkle_hash {
hash[i] = balance_merkle_hash[i] ^ meta_merkle_hash[i]
}
return hash, nil
}

View File

@ -35,7 +35,7 @@ type storefs struct {
func (s *storefs) ReadBlock(h [32]byte) ([]byte, error) {
var dummy [32]byte
if h == dummy {
panic("empty block")
return nil, fmt.Errorf("empty block")
}
dir := filepath.Join(filepath.Join(s.basedir, "bltx_store"), fmt.Sprintf("%02x", h[0]), fmt.Sprintf("%02x", h[1]), fmt.Sprintf("%02x", h[2]))

View File

@ -23,7 +23,7 @@ import "path/filepath"
import "encoding/binary"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
type TopoRecord struct {
BLOCK_ID [32]byte
@ -161,8 +161,7 @@ func (s *storetopofs) LocatePruneTopo() int64 {
prune_topo--
// fmt.Printf("pruned topo %d\n",prune_topo)
pruned_till = prune_topo
return prune_topo
}
@ -208,7 +207,7 @@ func (s *storetopofs) binarySearchHeight(targetheight int64) (blids []crypto.Has
}
}
for i, count := midIndex, 0; i <= total_records && count < 100; i, count = i+1, count+1 {
for i, count := midIndex, 0; i < total_records && count < 100; i, count = i+1, count+1 {
record, _ := s.Read(i)
if record.Height == targetheight {
blids = append(blids, record.BLOCK_ID)

View File

@ -18,16 +18,42 @@ package blockchain
// this file implements core execution of all changes to block chain homomorphically
import "fmt"
import "math/big"
import "golang.org/x/xerrors"
import "github.com/deroproject/derohe/crypto"
import "github.com/romana/rlog"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/transaction"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/dvm"
import "github.com/deroproject/graviton"
// process the miner tx, giving fees, miner rewatd etc
func (chain *Blockchain) process_miner_transaction(tx transaction.Transaction, genesis bool, balance_tree *graviton.Tree, fees uint64) {
// convert bitcoin model to our, but skip initial 4 years of supply, so our total supply gets to 10.5 million
const RewardReductionInterval = 210000 * 600 / config.BLOCK_TIME // 210000 comes from bitcoin
const BaseReward = 50 * 100000 * config.BLOCK_TIME / 600 // convert bitcoin reward system to our block
// CalcBlockSubsidy returns the subsidy amount a block at the provided height
// should have. This is mainly used for determining how much the coinbase for
// newly generated blocks awards as well as validating the coinbase for blocks
// has the expected value.
//
// The subsidy is halved every SubsidyReductionInterval blocks. Mathematically
// this is: baseSubsidy / 2^(height/SubsidyReductionInterval)
//
// At the target block generation rate for the main network, this is
// approximately every 4 years.
//
// basically out of of the bitcoin supply, we have wiped of initial interval ( this wipes of 10.5 million, so total remaining is around 10.5 million
func CalcBlockReward(height uint64) uint64 {
return BaseReward >> ((height + RewardReductionInterval) / RewardReductionInterval)
}
// process the miner tx, giving fees, miner rewatd etc
func (chain *Blockchain) process_miner_transaction(tx transaction.Transaction, genesis bool, balance_tree *graviton.Tree, fees uint64, height uint64) {
var acckey crypto.Point
if err := acckey.DecodeCompressed(tx.MinerAddress[:]); err != nil {
panic(err)
@ -41,21 +67,39 @@ func (chain *Blockchain) process_miner_transaction(tx transaction.Transaction, g
}
// general coin base transaction
balance_serialized, err := balance_tree.Get(tx.MinerAddress[:])
if err != nil {
panic(err)
base_reward := CalcBlockReward(uint64(height))
full_reward := base_reward + fees
dev_reward := (full_reward * config.DEVSHARE) / 10000 // take % from reward
miner_reward := full_reward - dev_reward // it's value, do subtraction
{ // giver miner reward
balance_serialized, err := balance_tree.Get(tx.MinerAddress[:])
if err != nil {
panic(err)
}
balance := new(crypto.ElGamal).Deserialize(balance_serialized)
balance = balance.Plus(new(big.Int).SetUint64(miner_reward)) // add miners reward to miners balance homomorphically
balance_tree.Put(tx.MinerAddress[:], balance.Serialize()) // reserialize and store
}
{ // give devs reward
balance_serialized, err := balance_tree.Get(chain.Dev_Address_Bytes[:])
if err != nil {
panic(err)
}
balance := new(crypto.ElGamal).Deserialize(balance_serialized)
balance = balance.Plus(new(big.Int).SetUint64(dev_reward)) // add devs reward to devs balance homomorphically
balance_tree.Put(chain.Dev_Address_Bytes[:], balance.Serialize()) // reserialize and store
}
balance := new(crypto.ElGamal).Deserialize(balance_serialized)
balance = balance.Plus(new(big.Int).SetUint64(fees + 50000)) // add fees colllected to users balance homomorphically
balance_tree.Put(tx.MinerAddress[:], balance.Serialize()) // reserialize and store
return
}
// process the tx, giving fees, miner rewatd etc
// this should be atomic, either all should be done or none at all
func (chain *Blockchain) process_transaction(tx transaction.Transaction, balance_tree *graviton.Tree) uint64 {
func (chain *Blockchain) process_transaction(changed map[crypto.Hash]*graviton.Tree, tx transaction.Transaction, balance_tree *graviton.Tree) uint64 {
//fmt.Printf("Processing/Executing transaction %s %s\n", tx.GetHash(), tx.TransactionType.String())
switch tx.TransactionType {
@ -79,25 +123,233 @@ func (chain *Blockchain) process_transaction(tx transaction.Transaction, balance
return 0 // registration doesn't give any fees . why & how ?
case transaction.NORMAL:
for i := range tx.Statement.Publickeylist_compressed {
if balance_serialized, err := balance_tree.Get(tx.Statement.Publickeylist_compressed[i][:]); err == nil {
case transaction.BURN_TX, transaction.NORMAL, transaction.SC_TX: // burned amount is not added anywhere and thus lost forever
balance := new(crypto.ElGamal).Deserialize(balance_serialized)
echanges := crypto.ConstructElGamal(tx.Statement.C[i], tx.Statement.D)
balance = balance.Add(echanges) // homomorphic addition of changes
balance_tree.Put(tx.Statement.Publickeylist_compressed[i][:], balance.Serialize()) // reserialize and store
for t := range tx.Payloads {
var tree *graviton.Tree
if tx.Payloads[t].SCID.IsZero() {
tree = balance_tree
} else {
panic(err) // if balance could not be obtained panic ( we can never reach here, otherwise how tx got verified)
tree = changed[tx.Payloads[t].SCID]
}
for i := 0; i < int(tx.Payloads[t].Statement.RingSize); i++ {
key_pointer := tx.Payloads[t].Statement.Publickeylist_pointers[i*int(tx.Payloads[t].Statement.Bytes_per_publickey) : (i+1)*int(tx.Payloads[t].Statement.Bytes_per_publickey)]
if _, key_compressed, balance_serialized, err := tree.GetKeyValueFromHash(key_pointer); err == nil {
balance := new(crypto.ElGamal).Deserialize(balance_serialized)
echanges := crypto.ConstructElGamal(tx.Payloads[t].Statement.C[i], tx.Payloads[t].Statement.D)
balance = balance.Add(echanges) // homomorphic addition of changes
tree.Put(key_compressed, balance.Serialize()) // reserialize and store
} else {
panic(err) // if balance could not be obtained panic ( we can never reach here, otherwise how tx got verified)
}
}
}
return tx.Statement.Fees
return tx.Fees()
default:
panic("unknown transaction, do not know how to process it")
return 0
}
}
type Tree_Wrapper struct {
tree *graviton.Tree
entries map[string][]byte
leftover_balance uint64
transfere []dvm.TransferExternal
}
func (t *Tree_Wrapper) Get(key []byte) ([]byte, error) {
if value, ok := t.entries[string(key)]; ok {
return value, nil
} else {
return t.tree.Get(key)
}
}
func (t *Tree_Wrapper) Put(key []byte, value []byte) error {
t.entries[string(key)] = append([]byte{}, value...)
return nil
}
// does additional processing for SC
func (chain *Blockchain) process_transaction_sc(cache map[crypto.Hash]*graviton.Tree, ss *graviton.Snapshot, bl_height, bl_topoheight uint64, blid crypto.Hash, tx transaction.Transaction, balance_tree *graviton.Tree, sc_tree *graviton.Tree) (gas uint64, err error) {
if len(tx.SCDATA) == 0 {
return tx.Fees(), nil
}
success := false
w_balance_tree := &Tree_Wrapper{tree: balance_tree, entries: map[string][]byte{}}
w_sc_tree := &Tree_Wrapper{tree: sc_tree, entries: map[string][]byte{}}
_ = w_balance_tree
var sc_data_tree *graviton.Tree // SC data tree
var w_sc_data_tree *Tree_Wrapper
txhash := tx.GetHash()
scid := txhash
defer func() {
if success { // merge the trees
}
}()
if tx.SCDATA.Has(rpc.SCACTION, rpc.DataUint64) { // but only it is present
action_code := rpc.SC_ACTION(tx.SCDATA.Value(rpc.SCACTION, rpc.DataUint64).(uint64))
switch action_code {
case rpc.SC_INSTALL: // request to install an SC
if !tx.SCDATA.Has(rpc.SCCODE, rpc.DataString) { // but only it is present
break
}
sc_code := tx.SCDATA.Value(rpc.SCCODE, rpc.DataString).(string)
if sc_code == "" { // no code provided nothing to do
err = fmt.Errorf("no code provided")
break
}
// check whether sc can be parsed
//var sc_parsed dvm.SmartContract
pos := ""
var sc dvm.SmartContract
sc, pos, err = dvm.ParseSmartContract(sc_code)
if err != nil {
rlog.Warnf("error Parsing sc txid %s err %s pos %s\n", txhash, err, pos)
break
}
meta := SC_META_DATA{Balance: tx.Value}
if _, ok := sc.Functions["InitializePrivate"]; ok {
meta.Type = 1
}
if sc_data_tree, err = ss.GetTree(string(scid[:])); err != nil {
break
} else {
w_sc_data_tree = &Tree_Wrapper{tree: sc_data_tree, entries: map[string][]byte{}}
}
// install SC, should we check for sanity now, why or why not
w_sc_data_tree.Put(SC_Code_Key(scid), dvm.Variable{Type: dvm.String, Value: sc_code}.MarshalBinaryPanic())
w_sc_tree.Put(SC_Meta_Key(scid), meta.MarshalBinary())
// at this point we must trigger the initialize call in the DVM
//fmt.Printf("We must call the SC initialize function\n")
if meta.Type == 1 { // if its a a private SC
gas, err = chain.execute_sc_function(w_sc_tree, w_sc_data_tree, scid, bl_height, bl_topoheight, blid, tx, "InitializePrivate", 1)
} else {
gas, err = chain.execute_sc_function(w_sc_tree, w_sc_data_tree, scid, bl_height, bl_topoheight, blid, tx, "Initialize", 1)
}
case rpc.SC_CALL: // trigger a CALL
if !tx.SCDATA.Has(rpc.SCID, rpc.DataHash) { // but only it is present
err = fmt.Errorf("no scid provided")
break
}
if !tx.SCDATA.Has("entrypoint", rpc.DataString) { // but only it is present
err = fmt.Errorf("no entrypoint provided")
break
}
scid = tx.SCDATA.Value(rpc.SCID, rpc.DataHash).(crypto.Hash)
if _, err = w_sc_tree.Get(SC_Meta_Key(scid)); err != nil {
err = fmt.Errorf("scid %s not installed", scid)
return
}
if sc_data_tree, err = ss.GetTree(string(scid[:])); err != nil {
return
} else {
w_sc_data_tree = &Tree_Wrapper{tree: sc_data_tree, entries: map[string][]byte{}}
}
entrypoint := tx.SCDATA.Value("entrypoint", rpc.DataString).(string)
//fmt.Printf("We must call the SC %s function\n", entrypoint)
gas, err = chain.execute_sc_function(w_sc_tree, w_sc_data_tree, scid, bl_height, bl_topoheight, blid, tx, entrypoint, 1)
default: // unknown what to do
err = fmt.Errorf("unknown action what to do", scid)
return
}
}
if err == nil { // we must commit the changes
var data_tree *graviton.Tree
var ok bool
if data_tree, ok = cache[scid]; !ok {
data_tree = w_sc_data_tree.tree
cache[scid] = w_sc_data_tree.tree
}
// commit entire data to tree
for k, v := range w_sc_data_tree.entries {
//fmt.Printf("persisting %x %x\n", k, v)
if err = data_tree.Put([]byte(k), v); err != nil {
return
}
}
for k, v := range w_sc_tree.entries { // these entries are only partial
if err = sc_tree.Put([]byte(k), v); err != nil {
return
}
}
// at this point, settle the balances, how ??
var meta_bytes []byte
meta_bytes, err = w_sc_tree.Get(SC_Meta_Key(scid))
if err != nil {
return
}
var meta SC_META_DATA // the meta contains the link to the SC bytes
if err = meta.UnmarshalBinary(meta_bytes); err != nil {
return
}
meta.Balance = w_sc_data_tree.leftover_balance
//fmt.Printf("SC %s balance %d\n", scid, w_sc_data_tree.leftover_balance)
sc_tree.Put(SC_Meta_Key(scid), meta.MarshalBinary())
for _, transfer := range w_sc_data_tree.transfere { // give devs reward
var balance_serialized []byte
addr_bytes := []byte(transfer.Address)
balance_serialized, err = balance_tree.Get(addr_bytes)
if err != nil {
return
}
balance := new(crypto.ElGamal).Deserialize(balance_serialized)
balance = balance.Plus(new(big.Int).SetUint64(transfer.Amount)) // add devs reward to devs balance homomorphically
balance_tree.Put(addr_bytes, balance.Serialize()) // reserialize and store
//fmt.Printf("%s paid back %d\n", scid, transfer.Amount)
}
/*
c := data_tree.Cursor()
for k, v, err := c.First(); err == nil; k, v, err = c.Next() {
fmt.Printf("key=%s (%x), value=%s\n", k, k, v)
}
fmt.Printf("cursor complete\n")
*/
//h, err := data_tree.Hash()
//fmt.Printf("%s successfully executed sc_call data_tree hash %x %s\n", scid, h, err)
}
return tx.Fees(), nil
}

View File

@ -16,7 +16,7 @@
package blockchain
//import "fmt"
import "fmt"
import "time"
/*import "bytes"
@ -33,11 +33,11 @@ import "github.com/deroproject/graviton"
//import "github.com/romana/rlog"
import log "github.com/sirupsen/logrus"
//import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/block"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/transaction"
import "github.com/deroproject/derohe/crypto/bn256"
import "github.com/deroproject/derohe/cryptography/bn256"
//import "github.com/deroproject/derosuite/emission"
@ -71,10 +71,10 @@ func clean_up_valid_cache() {
/* Coinbase transactions need to verify registration
* */
func (chain *Blockchain) Verify_Transaction_Coinbase(cbl *block.Complete_Block, minertx *transaction.Transaction) (result bool) {
func (chain *Blockchain) Verify_Transaction_Coinbase(cbl *block.Complete_Block, minertx *transaction.Transaction) (err error) {
if !minertx.IsCoinbase() { // transaction is not coinbase, return failed
return false
return fmt.Errorf("tx is not coinbase")
}
// make sure miner address is registered
@ -82,7 +82,7 @@ func (chain *Blockchain) Verify_Transaction_Coinbase(cbl *block.Complete_Block,
_, topos := chain.Store.Topo_store.binarySearchHeight(int64(cbl.Bl.Height - 1))
// load all db versions one by one and check whether the root hash matches the one mentioned in the tx
if len(topos) < 1 {
return false
return fmt.Errorf("could not find previous height blocks")
}
var balance_tree *graviton.Tree
@ -90,29 +90,26 @@ func (chain *Blockchain) Verify_Transaction_Coinbase(cbl *block.Complete_Block,
toporecord, err := chain.Store.Topo_store.Read(topos[i])
if err != nil {
log.Infof("Skipping block at height %d due to error while obtaining toporecord %s\n", i, err)
continue
return fmt.Errorf("could not read block at height %d due to error while obtaining toporecord topos %+v processing %d err:%s\n", cbl.Bl.Height-1, topos, i, err)
}
ss, err := chain.Store.Balance_store.LoadSnapshot(toporecord.State_Version)
if err != nil {
panic(err)
return err
}
if balance_tree, err = ss.GetTree(BALANCE_TREE); err != nil {
panic(err)
if balance_tree, err = ss.GetTree(config.BALANCE_TREE); err != nil {
return err
}
if _, err := balance_tree.Get(minertx.MinerAddress[:]); err != nil {
//logger.Infof("balance not obtained err %s\n",err)
return fmt.Errorf("balance not obtained err %s\n", err)
//return false
} else {
return true
}
}
return false
return nil // success comes last
}
// all non miner tx must be non-coinbase tx
@ -122,156 +119,223 @@ func (chain *Blockchain) Verify_Transaction_Coinbase(cbl *block.Complete_Block,
// if the transaction has passed the check it can be added to mempool, relayed or added to blockchain
// the transaction has already been deserialized thats it
// It also expands the transactions, using the repective state trie
func (chain *Blockchain) Verify_Transaction_NonCoinbase(hf_version int64, tx *transaction.Transaction) (result bool) {
func (chain *Blockchain) Verify_Transaction_NonCoinbase(hf_version int64, tx *transaction.Transaction) (err error) {
var tx_hash crypto.Hash
defer func() { // safety so if anything wrong happens, verification fails
if r := recover(); r != nil {
logger.WithFields(log.Fields{"txid": tx_hash}).Warnf("Recovered while Verifying transaction, failed verification, Stack trace below")
logger.Warnf("Stack trace \n%s", debug.Stack())
result = false
err = fmt.Errorf("Stack Trace %s", debug.Stack())
}
}()
if tx.Version != 1 {
return false
return fmt.Errorf("TX should be version 1")
}
tx_hash = tx.GetHash()
if tx.TransactionType == transaction.REGISTRATION {
if _, ok := transaction_valid_cache.Load(tx_hash); ok {
return true //logger.Infof("Found in cache %s ",tx_hash)
return nil //logger.Infof("Found in cache %s ",tx_hash)
} else {
//logger.Infof("TX not found in cache %s len %d ",tx_hash, len(tmp_buffer))
}
if tx.IsRegistrationValid() {
transaction_valid_cache.Store(tx_hash, time.Now()) // signature got verified, cache it
return true
return nil
}
return false
return fmt.Errorf("Registration has invalid signature")
}
// currently we allow 2 types of transaction
if !(tx.TransactionType == transaction.NORMAL || tx.TransactionType == transaction.REGISTRATION) {
return false
// currently we allow following types of transaction
if !(tx.TransactionType == transaction.NORMAL || tx.TransactionType == transaction.SC_TX || tx.TransactionType == transaction.BURN_TX) {
return fmt.Errorf("Unknown transaction type")
}
// check sanity
if tx.Statement.RingSize != uint64(len(tx.Statement.Publickeylist_compressed)) || tx.Statement.RingSize != uint64(len(tx.Statement.Publickeylist)) {
return false
if tx.TransactionType == transaction.BURN_TX {
if tx.Value == 0 {
return fmt.Errorf("Burn Value cannot be zero")
}
}
// avoid some bugs lurking elsewhere
if tx.Height != uint64(int64(tx.Height)) {
return false
return fmt.Errorf("invalid tx height")
}
if tx.Statement.RingSize < 2 { // ring size minimum 4
return false
}
if tx.Statement.RingSize > 128 { // ring size current limited to 128
return false
}
if !crypto.IsPowerOf2(len(tx.Statement.Publickeylist_compressed)) {
return false
}
// check duplicate ring members within the tx
{
key_map := map[string]bool{}
for i := range tx.Statement.Publickeylist_compressed {
key_map[string(tx.Statement.Publickeylist_compressed[i][:])] = true
}
if len(key_map) != len(tx.Statement.Publickeylist_compressed) {
return false
for t := range tx.Payloads {
// check sanity
if tx.Payloads[t].Statement.RingSize != uint64(len(tx.Payloads[t].Statement.Publickeylist_pointers)/int(tx.Payloads[t].Statement.Bytes_per_publickey)) {
return fmt.Errorf("corrupted key pointers ringsize")
}
if tx.Payloads[t].Statement.RingSize < 2 { // ring size minimum 4
return fmt.Errorf("RingSize cannot be less than 2")
}
if tx.Payloads[t].Statement.RingSize > 128 { // ring size current limited to 128
return fmt.Errorf("RingSize cannot be more than 128")
}
if !crypto.IsPowerOf2(len(tx.Payloads[t].Statement.Publickeylist_pointers) / int(tx.Payloads[t].Statement.Bytes_per_publickey)) {
return fmt.Errorf("corrupted key pointers")
}
// check duplicate ring members within the tx
{
key_map := map[string]bool{}
for i := 0; i < int(tx.Payloads[t].Statement.RingSize); i++ {
key_map[string(tx.Payloads[t].Statement.Publickeylist_pointers[i*int(tx.Payloads[t].Statement.Bytes_per_publickey):(i+1)*int(tx.Payloads[t].Statement.Bytes_per_publickey)])] = true
}
if len(key_map) != int(tx.Payloads[t].Statement.RingSize) {
return fmt.Errorf("Duplicated ring members")
}
}
tx.Payloads[t].Statement.CLn = tx.Payloads[t].Statement.CLn[:0]
tx.Payloads[t].Statement.CRn = tx.Payloads[t].Statement.CRn[:0]
}
match_topo := int64(1)
// transaction needs to be expanded. this expansion needs balance state
_, topos := chain.Store.Topo_store.binarySearchHeight(int64(tx.Height))
// load all db versions one by one and check whether the root hash matches the one mentioned in the tx
if len(topos) < 1 {
return fmt.Errorf("TX could NOT be expanded")
}
for i := range topos {
hash, err := chain.Load_Merkle_Hash(topos[i])
if err != nil {
continue
}
if hash == tx.Payloads[0].Statement.Roothash {
match_topo = topos[i]
break // we have found the balance tree with which it was built now lets verify
}
}
if match_topo < 0 {
return fmt.Errorf("mentioned balance tree not found, cannot verify TX")
}
var balance_tree *graviton.Tree
toporecord, err := chain.Store.Topo_store.Read(match_topo)
if err != nil {
return err
}
tx.Statement.CLn = tx.Statement.CLn[:0]
tx.Statement.CRn = tx.Statement.CRn[:0]
ss, err := chain.Store.Balance_store.LoadSnapshot(toporecord.State_Version)
if err != nil {
return err
}
// this expansion needs balance state
if len(tx.Statement.CLn) == 0 { // transaction needs to be expanded
_, topos := chain.Store.Topo_store.binarySearchHeight(int64(tx.Height))
// load all db versions one by one and check whether the root hash matches the one mentioned in the tx
if len(topos) < 1 {
panic("TX could NOT be expanded")
}
for i := range topos {
toporecord, err := chain.Store.Topo_store.Read(topos[i])
if err != nil {
//log.Infof("Skipping block at height %d due to error while obtaining toporecord %s\n", i, err)
continue
}
ss, err := chain.Store.Balance_store.LoadSnapshot(toporecord.State_Version)
if err != nil {
panic(err)
}
if balance_tree, err = ss.GetTree(BALANCE_TREE); err != nil {
panic(err)
}
if hash, err := balance_tree.Hash(); err != nil {
panic(err)
} else {
//logger.Infof("dTX balance tree hash from tx %x treehash from blockchain %x", tx.Statement.Roothash, hash)
if hash == tx.Statement.Roothash {
break // we have found the balance tree with which it was built now lets verify
}
}
balance_tree = nil
}
if balance_tree, err = ss.GetTree(config.BALANCE_TREE); err != nil {
return err
}
if balance_tree == nil {
panic("mentioned balance tree not found, cannot verify TX")
return fmt.Errorf("mentioned balance tree not found, cannot verify TX")
}
if _, ok := transaction_valid_cache.Load(tx_hash); ok {
return true //logger.Infof("Found in cache %s ",tx_hash)
return nil //logger.Infof("Found in cache %s ",tx_hash)
} else {
//logger.Infof("TX not found in cache %s len %d ",tx_hash, len(tmp_buffer))
}
//logger.Infof("dTX state tree has been found")
// now lets calculate CLn and CRn
for i := range tx.Statement.Publickeylist_compressed {
balance_serialized, err := balance_tree.Get(tx.Statement.Publickeylist_compressed[i][:])
if err != nil {
//logger.Infof("balance not obtained err %s\n",err)
return false
trees := map[crypto.Hash]*graviton.Tree{}
var zerohash crypto.Hash
trees[zerohash] = balance_tree // initialize main tree by default
for t := range tx.Payloads {
tx.Payloads[t].Statement.Publickeylist_compressed = tx.Payloads[t].Statement.Publickeylist_compressed[:0]
tx.Payloads[t].Statement.Publickeylist = tx.Payloads[t].Statement.Publickeylist[:0]
var tree *graviton.Tree
if _, ok := trees[tx.Payloads[t].SCID]; ok {
tree = trees[tx.Payloads[t].SCID]
} else {
// fmt.Printf("SCID loading %s tree\n", tx.Payloads[t].SCID)
tree, _ = ss.GetTree(string(tx.Payloads[t].SCID[:]))
trees[tx.Payloads[t].SCID] = tree
}
var ll, rr bn256.G1
ebalance := new(crypto.ElGamal).Deserialize(balance_serialized)
// now lets calculate CLn and CRn
for i := 0; i < int(tx.Payloads[t].Statement.RingSize); i++ {
key_pointer := tx.Payloads[t].Statement.Publickeylist_pointers[i*int(tx.Payloads[t].Statement.Bytes_per_publickey) : (i+1)*int(tx.Payloads[t].Statement.Bytes_per_publickey)]
_, key_compressed, balance_serialized, err := tree.GetKeyValueFromHash(key_pointer)
if err != nil {
return fmt.Errorf("balance not obtained err %s\n", err)
}
ll.Add(ebalance.Left, tx.Statement.C[i])
tx.Statement.CLn = append(tx.Statement.CLn, &ll)
rr.Add(ebalance.Right, tx.Statement.D)
tx.Statement.CRn = append(tx.Statement.CRn, &rr)
// decode public key and expand
{
var p bn256.G1
var pcopy [33]byte
copy(pcopy[:], key_compressed)
if err = p.DecodeCompressed(key_compressed[:]); err != nil {
return fmt.Errorf("key %d could not be decompressed", i)
}
tx.Payloads[t].Statement.Publickeylist_compressed = append(tx.Payloads[t].Statement.Publickeylist_compressed, pcopy)
tx.Payloads[t].Statement.Publickeylist = append(tx.Payloads[t].Statement.Publickeylist, &p)
}
var ll, rr bn256.G1
ebalance := new(crypto.ElGamal).Deserialize(balance_serialized)
ll.Add(ebalance.Left, tx.Payloads[t].Statement.C[i])
tx.Payloads[t].Statement.CLn = append(tx.Payloads[t].Statement.CLn, &ll)
rr.Add(ebalance.Right, tx.Payloads[t].Statement.D)
tx.Payloads[t].Statement.CRn = append(tx.Payloads[t].Statement.CRn, &rr)
// prepare for another sub transaction
echanges := crypto.ConstructElGamal(tx.Payloads[t].Statement.C[i], tx.Payloads[t].Statement.D)
ebalance = new(crypto.ElGamal).Deserialize(balance_serialized).Add(echanges) // homomorphic addition of changes
tree.Put(key_compressed, ebalance.Serialize()) // reserialize and store temporarily, tree will be discarded after verification
}
}
if tx.Proof.Verify(&tx.Statement, tx.GetHash()) {
//logger.Infof("dTX verified with proof successfuly")
// at this point has been completely expanded, verify the tx statement
for t := range tx.Payloads {
if !tx.Payloads[t].Proof.Verify(&tx.Payloads[t].Statement, tx.GetHash(), tx.Payloads[t].BurnValue) {
fmt.Printf("Statement %+v\n", tx.Payloads[t].Statement)
fmt.Printf("Proof %+v\n", tx.Payloads[t].Proof)
return fmt.Errorf("transaction statement %d verification failed", t)
}
}
// these transactions are done
if tx.TransactionType == transaction.NORMAL || tx.TransactionType == transaction.BURN_TX {
transaction_valid_cache.Store(tx_hash, time.Now()) // signature got verified, cache it
return true
return nil
}
logger.Infof("transaction verification failed\n")
return false
// we reach here if tx proofs are valid
if tx.TransactionType != transaction.SC_TX {
return fmt.Errorf("non sc transaction should never reach here")
}
if !tx.IsRegistrationValid() {
return fmt.Errorf("SC has invalid signature")
}
return nil
/*
var tx_hash crypto.Hash
@ -501,23 +565,5 @@ func (chain *Blockchain) Verify_Transaction_NonCoinbase(hf_version int64, tx *tr
//logger.WithFields(log.Fields{"txid": tx_hash}).Debugf("TX successfully verified")
*/
return true
}
// double spend check is separate from the core checks ( due to softforks )
func (chain *Blockchain) Verify_Transaction_NonCoinbase_DoubleSpend_Check(tx *transaction.Transaction) (result bool) {
return true
}
// verify all non coinbase tx, single threaded for double spending on current active chain
func (chain *Blockchain) Verify_Block_DoubleSpending(cbl *block.Complete_Block) (result bool) {
/*
for i := 0; i < len(cbl.Txs); i++ {
if !chain.Verify_Transaction_NonCoinbase_DoubleSpend_Check(dbtx, cbl.Txs[i]) {
return false
}
}
*/
return true
}

View File

@ -1,16 +1,14 @@
#!/usr/bin/env bash
CURDIR=`/bin/pwd`
BASEDIR=$(dirname $0)
ABSPATH=$(readlink -f $0)
ABSDIR=$(dirname $ABSPATH)
cd $ABSDIR/../../../../
GOPATH=`pwd`
version=`cat src/github.com/deroproject/derohe/config/version.go | grep -i version |cut -d\" -f 2`
unset GOPATH
version=`cat ./config/version.go | grep -i version |cut -d\" -f 2`
cd $CURDIR
@ -18,6 +16,7 @@ bash $ABSDIR/build_package.sh "github.com/deroproject/derohe/cmd/derod"
bash $ABSDIR/build_package.sh "github.com/deroproject/derohe/cmd/explorer"
bash $ABSDIR/build_package.sh "github.com/deroproject/derohe/cmd/dero-wallet-cli"
bash $ABSDIR/build_package.sh "github.com/deroproject/derohe/cmd/dero-miner"
bash $ABSDIR/build_package.sh "github.com/deroproject/derohe/cmd/rpc_examples/pong_server"
for d in build/*; do cp Start.md "$d"; done

View File

@ -18,7 +18,7 @@ package main
// ripoff from blockchain folder
import "math/big"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
var (
// bigZero is 0 represented as a big.Int. It is defined here to avoid

View File

@ -37,9 +37,9 @@ import "strconv"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/astrobwt"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/rpc"
import log "github.com/sirupsen/logrus"
import "github.com/ybbus/jsonrpc"
@ -48,10 +48,10 @@ import "github.com/romana/rlog"
import "github.com/chzyer/readline"
import "github.com/docopt/docopt-go"
var rpcClient *jsonrpc.RPCClient
var rpcClient jsonrpc.RPCClient
var netClient *http.Client
var mutex sync.RWMutex
var job structures.GetBlockTemplate_Result
var job rpc.GetBlockTemplate_Result
var maxdelay int = 10000
var threads int
var iterations int = 100
@ -423,7 +423,7 @@ func increase_delay() {
func getwork() {
// create client
rpcClient = jsonrpc.NewRPCClient(daemon_rpc_address + "/json_rpc")
rpcClient = jsonrpc.NewClient(daemon_rpc_address + "/json_rpc")
var netTransport = &http.Transport{
Dial: (&net.Dialer{
@ -450,9 +450,9 @@ func getwork() {
for {
response, err = rpcClient.CallNamed("getblocktemplate", map[string]interface{}{"wallet_address": fmt.Sprintf("%s", wallet_address), "reserve_size": 10})
response, err = rpcClient.Call("getblocktemplate", map[string]interface{}{"wallet_address": fmt.Sprintf("%s", wallet_address), "reserve_size": 10})
if err == nil {
var block_template structures.GetBlockTemplate_Result
var block_template rpc.GetBlockTemplate_Result
err = response.GetObject(&block_template)
if err == nil {
mutex.Lock()

View File

@ -17,17 +17,22 @@
package main
import "io"
import "os"
import "time"
import "fmt"
import "io/ioutil"
//import "io/ioutil"
import "strings"
import "path/filepath"
import "encoding/hex"
//import "path/filepath"
//import "encoding/hex"
import "github.com/chzyer/readline"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/address"
//import "github.com/deroproject/derohe/address"
//import "github.com/deroproject/derohe/walletapi"
import "github.com/deroproject/derohe/transaction"
@ -42,12 +47,12 @@ func display_easymenu_post_open_command(l *readline.Instance) {
io.WriteString(w, "\t\033[1m3\033[0m\tDisplay Keys (hex)\n")
if !is_registered(wallet) {
if !wallet.IsRegistered() {
io.WriteString(w, "\t\033[1m4\033[0m\tAccount registration to blockchain (registration has no fee requirement and is precondition to use the account)\n")
io.WriteString(w, "\n")
io.WriteString(w, "\n")
} else { // hide some commands, if view only wallet
io.WriteString(w, "\n")
io.WriteString(w, "\t\033[1m4\033[0m\tDisplay wallet pool\n")
io.WriteString(w, "\t\033[1m5\033[0m\tTransfer (send DERO) To Another Wallet\n")
//io.WriteString(w, "\t\033[1m6\033[0m\tCreate Transaction in offline mode\n")
io.WriteString(w, "\n")
@ -55,7 +60,7 @@ func display_easymenu_post_open_command(l *readline.Instance) {
io.WriteString(w, "\t\033[1m7\033[0m\tChange wallet password\n")
io.WriteString(w, "\t\033[1m8\033[0m\tClose Wallet\n")
if is_registered(wallet) {
if wallet.IsRegistered() {
io.WriteString(w, "\t\033[1m12\033[0m\tTransfer all balance (send DERO) To Another Wallet\n")
io.WriteString(w, "\t\033[1m13\033[0m\tShow transaction history\n")
io.WriteString(w, "\t\033[1m14\033[0m\tRescan transaction history\n")
@ -90,7 +95,7 @@ func handle_easymenu_post_open_command(l *readline.Instance, line string) (proce
case "1":
fmt.Fprintf(l.Stderr(), "Wallet address : "+color_green+"%s"+color_white+"\n", wallet.GetAddress())
if !is_registered(wallet) {
if !wallet.IsRegistered() {
reg_tx := wallet.GetRegistrationTX()
fmt.Fprintf(l.Stderr(), "Registration TX : "+color_green+"%x"+color_white+"\n", reg_tx.Serialize())
}
@ -118,28 +123,34 @@ func handle_easymenu_post_open_command(l *readline.Instance, line string) (proce
case "4": // Registration
if !ValidateCurrentPassword(l, wallet) {
globals.Logger.Warnf("Invalid password")
PressAnyKey(l, wallet)
break
}
if !wallet.IsRegistered() {
//if valid_registration_or_display_error(l, wallet) {
// globals.Logger.Warnf("This wallet address is already registered.")
// break
//}
fmt.Fprintf(l.Stderr(), "Wallet address : "+color_green+"%s"+color_white+" is going to be registered.This is a pre-condition for using the online chain.It will take few seconds to register/", wallet.GetAddress())
reg_tx := wallet.GetRegistrationTX()
fmt.Fprintf(l.Stderr(), "Wallet address : "+color_green+"%s"+color_white+" is going to be registered.This is a pre-condition for using the online chain.It will take few seconds to register.\n", wallet.GetAddress())
reg_tx := wallet.GetRegistrationTX()
// at this point we must send the registration transaction
fmt.Fprintf(l.Stderr(), "Wallet address : "+color_green+"%s"+color_white+" is going to be registered.Pls wait till the account is registered.", wallet.GetAddress())
fmt.Fprintf(l.Stderr(), "Wallet address : "+color_green+"%s"+color_white+" is going to be registered.Pls wait till the account is registered.\n", wallet.GetAddress())
wallet.SendTransaction(reg_tx)
fmt.Printf("sending registration tx err %s\n", wallet.SendTransaction(reg_tx))
} else {
pool := wallet.GetPool()
fmt.Fprintf(l.Stderr(), "Wallet pool has %d pending/in-progress transactions.\n", len(pool))
fmt.Fprintf(l.Stderr(), "%5s %9s %8s %64s %s %s\n", "No.", "Amount", "TH", "TXID", "Destination", "Status")
for i := range pool {
var txid, status string
if len(pool[i].Tries) > 0 {
try := pool[i].Tries[len(pool[i].Tries)-1]
txid = try.TXID.String()
status = try.Status
} else {
status = "Will Dispatch in next block"
}
fmt.Fprintf(l.Stderr(), "%5d %9s %8d %64s %s %s\n", i, "-"+globals.FormatMoney(pool[i].Amount()), pool[i].Trigger_Height, txid, "Not implemented", status)
}
}
case "6":
offline_tx = true
@ -160,34 +171,129 @@ func handle_easymenu_post_open_command(l *readline.Instance, line string) (proce
break
}
amount_str := read_line_with_prompt(l, fmt.Sprintf("Enter amount to transfer in DERO (max TODO .00004 hard coded): "))
var amount_to_transfer uint64
if amount_str == "" {
amount_str = ".00009"
var arguments = rpc.Arguments{
// { rpc.RPC_DESTINATION_PORT, rpc.DataUint64,uint64(0x1234567812345678)},
// { rpc.RPC_VALUE_TRANSFER, rpc.DataUint64,uint64(12345)},
// { rpc.RPC_EXPIRY , rpc.DataTime, time.Now().Add(time.Hour).UTC()},
// { rpc.RPC_COMMENT , rpc.DataString, "Purchase XYZ"},
}
amount_to_transfer, err := globals.ParseAmount(amount_str)
if err != nil {
globals.Logger.Warnf("Err :%s", err)
break // invalid amount provided, bail out
if a.IsIntegratedAddress() { // read everything from the address
if a.Arguments.Validate_Arguments() != nil {
globals.Logger.Warnf("Integrated Address arguments could not be validated, err: %s", err)
break
}
if !a.Arguments.Has(rpc.RPC_DESTINATION_PORT, rpc.DataUint64) { // but only it is present
globals.Logger.Warnf("Integrated Address does not contain destination port.")
break
}
arguments = append(arguments, rpc.Argument{rpc.RPC_DESTINATION_PORT, rpc.DataUint64, a.Arguments.Value(rpc.RPC_DESTINATION_PORT, rpc.DataUint64).(uint64)})
// arguments = append(arguments, rpc.Argument{"Comment", rpc.DataString, "holygrail of all data is now working if you can see this"})
if a.Arguments.Has(rpc.RPC_EXPIRY, rpc.DataTime) { // but only it is present
if a.Arguments.Value(rpc.RPC_EXPIRY, rpc.DataTime).(time.Time).Before(time.Now().UTC()) {
globals.Logger.Warnf("This address has expired on %s", a.Arguments.Value(rpc.RPC_EXPIRY, rpc.DataTime))
break
} else {
globals.Logger.Infof("This address will expire on %s", a.Arguments.Value(rpc.RPC_EXPIRY, rpc.DataTime))
}
}
globals.Logger.Infof("Destination port is integreted in address ID:%016x", a.Arguments.Value(rpc.RPC_DESTINATION_PORT, rpc.DataUint64).(uint64))
if a.Arguments.Has(rpc.RPC_COMMENT, rpc.DataString) { // but only it is present
globals.Logger.Infof("Integrated Message:%s", a.Arguments.Value(rpc.RPC_COMMENT, rpc.DataString))
}
}
var payment_id []byte
_ = payment_id
// if user provided an integrated address donot ask him payment id
if a.IsIntegratedAddress() {
globals.Logger.Infof("Payment ID is integreted in address ID:%x", a.PaymentID)
// arguments have been already validated
for _, arg := range a.Arguments {
if !(arg.Name == rpc.RPC_COMMENT || arg.Name == rpc.RPC_EXPIRY || arg.Name == rpc.RPC_DESTINATION_PORT || arg.Name == rpc.RPC_SOURCE_PORT || arg.Name == rpc.RPC_VALUE_TRANSFER) {
switch arg.DataType {
case rpc.DataString:
if v, err := ReadString(l, arg.Name, arg.Value.(string)); err == nil {
arguments = append(arguments, rpc.Argument{arg.Name, arg.DataType, v})
} else {
globals.Logger.Warnf("%s could not be parsed (type %s),", arg.Name, arg.DataType)
return
}
case rpc.DataInt64:
if v, err := ReadInt64(l, arg.Name, arg.Value.(int64)); err == nil {
arguments = append(arguments, rpc.Argument{arg.Name, arg.DataType, v})
} else {
globals.Logger.Warnf("%s could not be parsed (type %s),", arg.Name, arg.DataType)
return
}
case rpc.DataUint64:
if v, err := ReadUint64(l, arg.Name, arg.Value.(uint64)); err == nil {
arguments = append(arguments, rpc.Argument{arg.Name, arg.DataType, v})
} else {
globals.Logger.Warnf("%s could not be parsed (type %s),", arg.Name, arg.DataType)
return
}
case rpc.DataFloat64:
if v, err := ReadFloat64(l, arg.Name, arg.Value.(float64)); err == nil {
arguments = append(arguments, rpc.Argument{arg.Name, arg.DataType, v})
} else {
globals.Logger.Warnf("%s could not be parsed (type %s),", arg.Name, arg.DataType)
return
}
case rpc.DataTime:
globals.Logger.Warnf("time argument is currently not supported.")
break
}
}
}
if a.Arguments.Has(rpc.RPC_VALUE_TRANSFER, rpc.DataUint64) { // but only it is present
globals.Logger.Infof("Transaction Value: %s", globals.FormatMoney(a.Arguments.Value(rpc.RPC_VALUE_TRANSFER, rpc.DataUint64).(uint64)))
amount_to_transfer = a.Arguments.Value(rpc.RPC_VALUE_TRANSFER, rpc.DataUint64).(uint64)
} else {
amount_str := read_line_with_prompt(l, fmt.Sprintf("Enter amount to transfer in DERO (max TODO): "))
if amount_str == "" {
amount_str = ".00009"
}
amount_to_transfer, err = globals.ParseAmount(amount_str)
if err != nil {
globals.Logger.Warnf("Err :%s", err)
break // invalid amount provided, bail out
}
}
// if no arguments, use space by embedding a small comment
if len(arguments) == 0 { // allow user to enter Comment
if v, err := ReadString(l, "Comment", ""); err == nil {
arguments = append(arguments, rpc.Argument{"Comment", rpc.DataString, v})
} else {
globals.Logger.Warnf("%s could not be parsed (type %s),", "Comment", rpc.DataString)
return
}
}
if _, err := arguments.CheckPack(transaction.PAYLOAD0_LIMIT); err != nil {
globals.Logger.Warnf("Arguments packing err: %s,", err)
return
}
if ConfirmYesNoDefaultNo(l, "Confirm Transaction (y/N)") {
addr_list := []address.Address{*a}
amount_list := []uint64{amount_to_transfer} // transfer 50 dero, 2 dero
fees_per_kb := uint64(0) // fees must be calculated by walletapi
tx, err := wallet.Transfer(addr_list, amount_list, 0, hex.EncodeToString(payment_id), fees_per_kb, 0, false)
//src_port := uint64(0xffffffffffffffff)
_, err := wallet.PoolTransfer([]rpc.Transfer{rpc.Transfer{Amount: amount_to_transfer, Destination: a.String(), Payload_RPC: arguments}}, rpc.Arguments{}) // empty SCDATA
if err != nil {
globals.Logger.Warnf("Error while building Transaction err %s\n", err)
break
}
build_relay_transaction(l, tx, err, offline_tx, amount_list)
//fmt.Printf("queued tx err %s\n")
}
case "12":
@ -199,31 +305,34 @@ func handle_easymenu_post_open_command(l *readline.Instance, line string) (proce
break
}
// a , amount_to_transfer, err := collect_transfer_info(l,wallet)
fmt.Printf("dest address %s\n", "deroi1qxqqkmaz8nhv4q07w3cjyt84kmrqnuw4nprpqfl9xmmvtvwa7cdykxq5dph4ufnx5ndq4ltraf (14686f5e2666a4da) dero1qxqqkmaz8nhv4q07w3cjyt84kmrqnuw4nprpqfl9xmmvtvwa7cdykxqpfpaes")
a, err := ReadAddress(l)
if err != nil {
globals.Logger.Warnf("Err :%s", err)
break
}
// if user provided an integrated address donot ask him payment id
if a.IsIntegratedAddress() {
globals.Logger.Infof("Payment ID is integreted in address ID:%x", a.PaymentID)
}
globals.Logger.Warnf("Not supported err %s\n", err)
if ConfirmYesNoDefaultNo(l, "Confirm Transaction to send entire balance (y/N)") {
addr_list := []address.Address{*a}
amount_list := []uint64{0} // transfer 50 dero, 2 dero
fees_per_kb := uint64(0) // fees must be calculated by walletapi
tx, err := wallet.Transfer(addr_list, amount_list, 0, "", fees_per_kb, 0, true)
/*
// a , amount_to_transfer, err := collect_transfer_info(l,wallet)
fmt.Printf("dest address %s\n", "deroi1qxqqkmaz8nhv4q07w3cjyt84kmrqnuw4nprpqfl9xmmvtvwa7cdykxq5dph4ufnx5ndq4ltraf (14686f5e2666a4da) dero1qxqqkmaz8nhv4q07w3cjyt84kmrqnuw4nprpqfl9xmmvtvwa7cdykxqpfpaes")
a, err := ReadAddress(l)
if err != nil {
globals.Logger.Warnf("Error while building Transaction err %s\n", err)
globals.Logger.Warnf("Err :%s", err)
break
}
// if user provided an integrated address donot ask him payment id
if a.IsIntegratedAddress() {
globals.Logger.Infof("Payment ID is integreted in address ID:%x", a.PaymentID)
}
build_relay_transaction(l, tx, err, offline_tx, amount_list)
}
if ConfirmYesNoDefaultNo(l, "Confirm Transaction to send entire balance (y/N)") {
addr_list := []address.Address{*a}
amount_list := []uint64{0} // transfer 50 dero, 2 dero
fees_per_kb := uint64(0) // fees must be calculated by walletapi
uid, err := wallet.PoolTransfer(addr_list, amount_list, fees_per_kb, 0, true)
_ = uid
if err != nil {
globals.Logger.Warnf("Error while building Transaction err %s\n", err)
break
}
}
*/
//PressAnyKey(l, wallet) // wait for a key press
@ -275,62 +384,3 @@ func handle_easymenu_post_open_command(l *readline.Instance, line string) (proce
}
return
}
// handles the output after building tx, takes feedback, confirms or relays tx
func build_relay_transaction(l *readline.Instance, tx *transaction.Transaction, err error, offline_tx bool, amount_list []uint64) {
if err != nil {
globals.Logger.Warnf("Error while building Transaction err %s\n", err)
return
}
amount := uint64(0)
for i := range amount_list {
amount += amount_list[i]
}
globals.Logger.Infof("Transfering total amount %s DERO", globals.FormatMoney(amount))
globals.Logger.Infof("fees %s DERO", globals.FormatMoney(tx.Statement.Fees))
globals.Logger.Infof("TX Size %0.1f KiB", float32(len(tx.Serialize()))/1024.0)
//if input_sum != (amount + change + tx.Fee()) {
// panic(fmt.Sprintf("Inputs %d != outputs ( %d + %d + %d )", input_sum, amount, change, tx.RctSignature.Get_TX_Fee()))
//}
//if ConfirmYesNoDefaultNo(l, "Confirm Transaction (y/N)") {
if true {
if offline_tx { // if its an offline tx, dump it to a file
cur_dir, err := os.Getwd()
if err != nil {
globals.Logger.Warnf("Cannot obtain current directory to save tx")
globals.Logger.Infof("Transaction discarded")
return
}
filename := filepath.Join(cur_dir, tx.GetHash().String()+".tx")
err = ioutil.WriteFile(filename, []byte(hex.EncodeToString(tx.Serialize())), 0600)
if err == nil {
if err == nil {
globals.Logger.Infof("Transaction saved successfully. txid = %s", tx.GetHash())
globals.Logger.Infof("Saved to %s", filename)
} else {
globals.Logger.Warnf("Error saving tx to %s, err %s", filename, err)
}
}
} else {
err = wallet.SendTransaction(tx) // relay tx to daemon/network
if err == nil {
globals.Logger.Infof("Transaction sent successfully. txid = %s", tx.GetHash())
} else {
globals.Logger.Warnf("Transaction sending failed txid = %s, err %s", tx.GetHash(), err)
}
}
PressAnyKey(l, wallet) // wait for a key press
} else {
globals.Logger.Infof("Transaction discarded")
}
}

View File

@ -24,7 +24,7 @@ import "encoding/hex"
import "github.com/chzyer/readline"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/walletapi"
@ -61,6 +61,8 @@ func handle_easymenu_pre_open_command(l *readline.Instance, line string) {
command = strings.ToLower(line_parts[0])
}
var wallett *walletapi.Wallet_Disk
//account_state := account_valid
switch command {
case "1": // open existing wallet
@ -68,7 +70,7 @@ func handle_easymenu_pre_open_command(l *readline.Instance, line string) {
// ask user a password
for i := 0; i < 3; i++ {
wallet, err = walletapi.Open_Encrypted_Wallet(filename, ReadPassword(l, filename))
wallett, err = walletapi.Open_Encrypted_Wallet(filename, ReadPassword(l, filename))
if err != nil {
globals.Logger.Warnf("Error occurred while opening wallet file %s. err %s", filename, err)
wallet = nil
@ -77,7 +79,9 @@ func handle_easymenu_pre_open_command(l *readline.Instance, line string) {
break
}
}
if wallet != nil {
if wallett != nil {
wallet = wallett
wallett = nil
globals.Logger.Infof("Successfully opened wallet")
common_processing(wallet)
@ -89,17 +93,20 @@ func handle_easymenu_pre_open_command(l *readline.Instance, line string) {
password := ReadConfirmedPassword(l, "Enter password", "Confirm password")
wallet, err = walletapi.Create_Encrypted_Wallet_Random(filename, password)
wallett, err = walletapi.Create_Encrypted_Wallet_Random(filename, password)
if err != nil {
globals.Logger.Warnf("Error occured while creating new wallet, err: %s", err)
wallet = nil
break
}
err = wallet.Set_Encrypted_Wallet_Password(password)
err = wallett.Set_Encrypted_Wallet_Password(password)
if err != nil {
globals.Logger.Warnf("Error changing password")
}
wallet = wallett
wallett = nil
seed_language := choose_seed_language(l)
wallet.SetSeedLanguage(seed_language)
globals.Logger.Debugf("Seed Language %s", seed_language)
@ -114,11 +121,13 @@ func handle_easymenu_pre_open_command(l *readline.Instance, line string) {
password := ReadConfirmedPassword(l, "Enter password", "Confirm password")
electrum_words := read_line_with_prompt(l, "Enter seed (25 words) : ")
wallet, err = walletapi.Create_Encrypted_Wallet_From_Recovery_Words(filename, password, electrum_words)
wallett, err = walletapi.Create_Encrypted_Wallet_From_Recovery_Words(filename, password, electrum_words)
if err != nil {
globals.Logger.Warnf("Error while recovering wallet using seed err %s\n", err)
break
}
wallet = wallett
wallett = nil
//globals.Logger.Debugf("Seed Language %s", account.SeedLanguage)
globals.Logger.Infof("Successfully recovered wallet from seed")
common_processing(wallet)
@ -136,12 +145,14 @@ func handle_easymenu_pre_open_command(l *readline.Instance, line string) {
break
}
wallet, err = walletapi.Create_Encrypted_Wallet(filename, password, new(crypto.BNRed).SetBytes(seed_raw))
wallett, err = walletapi.Create_Encrypted_Wallet(filename, password, new(crypto.BNRed).SetBytes(seed_raw))
if err != nil {
globals.Logger.Warnf("Error while recovering wallet using seed key err %s\n", err)
break
}
globals.Logger.Infof("Successfully recovered wallet from hex seed")
wallet = wallett
wallett = nil
seed_language := choose_seed_language(l)
wallet.SetSeedLanguage(seed_language)
globals.Logger.Debugf("Seed Language %s", seed_language)
@ -233,8 +244,10 @@ func common_processing(wallet *walletapi.Wallet_Disk) {
globals.Logger.Warnf("Error starting rpc server err %s", err)
}
}
time.Sleep(time.Second)
// init_script_engine(wallet) // init script engine
// init_plugins_engine(wallet) // init script engine
}

View File

@ -119,7 +119,7 @@ func main() {
}
// init the lookup table one, anyone importing walletapi should init this first, this will take around 1 sec on any recent system
walletapi.Initialize_LookupTable(1, 1<<20)
walletapi.Initialize_LookupTable(1, 1<<17)
// We need to initialize readline first, so it changes stderr to ansi processor on windows
l, err := readline.NewEx(&readline.Config{
@ -407,11 +407,13 @@ func update_prompt(l *readline.Instance) {
balance_string := ""
//balance_unlocked, locked_balance := wallet.Get_Balance_Rescan()// wallet.Get_Balance()
balance_unlocked, locked_balance := wallet.Get_Balance()
balance_string = fmt.Sprintf(color_green+"%s "+color_white+"| "+color_yellow+"%s", globals.FormatMoney(balance_unlocked), globals.FormatMoney(locked_balance))
balance_unlocked, _ := wallet.Get_Balance()
balance_string = fmt.Sprintf(color_green+"%s "+color_white, globals.FormatMoney(balance_unlocked))
if wallet.Error != nil {
balance_string += fmt.Sprintf(color_red+" %s ", wallet.Error)
} else if wallet.PoolCount() > 0 {
balance_string += fmt.Sprintf(color_yellow+"(%d tx pending for -%s)", wallet.PoolCount(), globals.FormatMoney(wallet.PoolBalance()))
}
testnet_string := ""

View File

@ -30,12 +30,13 @@ import "encoding/hex"
import "github.com/chzyer/readline"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/address"
import "github.com/deroproject/derohe/walletapi"
import "github.com/deroproject/derohe/cryptography/crypto"
var account walletapi.Account
// handle all commands while in prompt mode
@ -80,7 +81,32 @@ func handle_prompt_command(l *readline.Instance, line string) {
fallthrough
case "balance": // give user his balance
balance_unlocked, locked_balance := wallet.Get_Balance_Rescan()
fmt.Fprintf(l.Stderr(), "Balance : "+color_green+"%s"+color_white+"\n\n", globals.FormatMoney(locked_balance+balance_unlocked))
fmt.Fprintf(l.Stderr(), "DERO Balance : "+color_green+"%s"+color_white+"\n", globals.FormatMoney(locked_balance+balance_unlocked))
line_parts := line_parts[1:] // remove first part
switch len(line_parts) {
case 0:
//globals.Logger.Warnf("not implemented")
break
case 1: // scid balance
scid := crypto.HashHexToHash(line_parts[0])
//globals.Logger.Infof("scid1 %s line_parts %+v", scid, line_parts)
balance, err := wallet.GetDecryptedBalanceAtTopoHeight(scid, -1, wallet.GetAddress().String())
//globals.Logger.Infof("scid %s", scid)
if err != nil {
globals.Logger.Infof("error %s", err)
} else {
fmt.Fprintf(l.Stderr(), "SCID %s Balance : "+color_green+"%s"+color_white+"\n\n", line_parts[0], globals.FormatMoney(balance))
}
case 2: // scid balance at topoheight
globals.Logger.Warnf("not implemented")
break
}
case "rescan_bc", "rescan_spent": // rescan from 0
if offline_mode {
@ -123,7 +149,7 @@ func handle_prompt_command(l *readline.Instance, line string) {
globals.Logger.Warnf("Error parsing txhash")
break
}
key := wallet.GetTXKey(crypto.HexToHash(line_parts[1]))
key := wallet.GetTXKey(line_parts[1])
if key != "" {
globals.Logger.Infof("TX Proof key \"%s\"", key)
} else {
@ -134,7 +160,7 @@ func handle_prompt_command(l *readline.Instance, line string) {
globals.Logger.Warnf("eg. get_tx_key ea551b02b9f1e8aebe4d7b1b7f6bf173d76ae614cb9a066800773fee9e226fd7")
}
case "sweep_all", "transfer_all": // transfer everything
Transfer_Everything(l)
//Transfer_Everything(l)
case "show_transfers":
show_transfers(l, wallet, 100)
@ -153,11 +179,37 @@ func handle_prompt_command(l *readline.Instance, line string) {
case "i8", "integrated_address": // user wants a random integrated address 8 bytes
a := wallet.GetRandomIAddress8()
fmt.Fprintf(l.Stderr(), "Wallet integrated address : "+color_green+"%s"+color_white+"\n", a.String())
fmt.Fprintf(l.Stderr(), "Embedded payment ID : "+color_green+"%x"+color_white+"\n", a.PaymentID)
fmt.Fprintf(l.Stderr(), "Embedded Arguments : "+color_green+"%s"+color_white+"\n", a.Arguments)
case "version":
globals.Logger.Infof("Version %s\n", config.Version.String())
case "burn":
line_parts := line_parts[1:] // remove first part
if len(line_parts) < 2 {
globals.Logger.Warnf("burn needs destination address and amount as input parameter")
break
}
addr := line_parts[0]
send_amount := uint64(1)
burn_amount, err := globals.ParseAmount(line_parts[1])
if err != nil {
globals.Logger.Warnf("Error Parsing burn amount \"%s\" err %s", line_parts[1], err)
return
}
if ConfirmYesNoDefaultNo(l, "Confirm Transaction (y/N)") {
//uid, err := wallet.PoolTransferWithBurn(addr, send_amount, burn_amount, data, rpc.Arguments{})
uid, err := wallet.PoolTransfer([]rpc.Transfer{rpc.Transfer{Amount: send_amount, Burn: burn_amount, Destination: addr}}, rpc.Arguments{}) // empty SCDATA
_ = uid
if err != nil {
globals.Logger.Warnf("Error while building Transaction err %s\n", err)
break
}
//fmt.Printf("queued tx err %s\n", err)
//build_relay_transaction(l, uid, err, offline_tx, amount_list)
}
case "transfer":
// parse the address, amount pair
/*
@ -221,16 +273,6 @@ func handle_prompt_command(l *readline.Instance, line string) {
}
// if user provided an integrated address donot ask him payment id
// otherwise confirm whether user wants to send without payment id
if payment_id_integrated == false && len(payment_id) == 0 {
payment_id_bytes, err := ReadPaymentID(l)
payment_id = hex.EncodeToString(payment_id_bytes)
if err != nil {
globals.Logger.Warnf("Err :%s", err)
break
}
}
offline := false
tx, inputs, input_sum, change, err := wallet.Transfer(addr_list, amount_list, 0, payment_id, 0, 0)
@ -242,6 +284,10 @@ func handle_prompt_command(l *readline.Instance, line string) {
if wallet != nil {
wallet.Close_Encrypted_Wallet() // overwrite previous instance
}
case "flush": // flush wallet pool
if wallet != nil {
fmt.Fprintf(l.Stderr(), "Flushed %d transactions from wallet pool\n", wallet.PoolClear())
}
case "": // blank enter key just loop
default:
@ -317,85 +363,8 @@ func handle_set_command(l *readline.Instance, line string) {
}
}
func Transfer_Everything(l *readline.Instance) {
/*
if wallet.Is_View_Only() {
fmt.Fprintf(l.Stderr(), color_yellow+"View Only wallet cannot transfer."+color_white)
}
if !ValidateCurrentPassword(l, wallet) {
globals.Logger.Warnf("Invalid password")
return
}
// a , amount_to_transfer, err := collect_transfer_info(l,wallet)
addr, err := ReadAddress(l)
if err != nil {
globals.Logger.Warnf("Err :%s", err)
return
}
var payment_id []byte
// if user provided an integrated address donot ask him payment id
if !addr.IsIntegratedAddress() {
payment_id, err = ReadPaymentID(l)
if err != nil {
globals.Logger.Warnf("Err :%s", err)
return
}
} else {
globals.Logger.Infof("Payment ID is integreted in address ID:%x", addr.PaymentID)
}
fees_per_kb := uint64(0) // fees must be calculated by walletapi
tx, inputs, input_sum, err := wallet.Transfer_Everything(*addr, hex.EncodeToString(payment_id), 0, fees_per_kb, 5)
_ = inputs
if err != nil {
globals.Logger.Warnf("Error while building Transaction err %s\n", err)
return
}
globals.Logger.Infof("%d Inputs Selected for %s DERO", len(inputs), globals.FormatMoney12(input_sum))
globals.Logger.Infof("fees %s DERO", globals.FormatMoneyPrecision(tx.RctSignature.Get_TX_Fee(), 12))
globals.Logger.Infof("TX Size %0.1f KiB (should be < 240 KiB)", float32(len(tx.Serialize()))/1024.0)
offline_tx := false
if ConfirmYesNoDefaultNo(l, "Confirm Transaction (y/N)") {
if offline_tx { // if its an offline tx, dump it to a file
cur_dir, err := os.Getwd()
if err != nil {
globals.Logger.Warnf("Cannot obtain current directory to save tx")
return
}
filename := filepath.Join(cur_dir, tx.GetHash().String()+".tx")
err = ioutil.WriteFile(filename, []byte(hex.EncodeToString(tx.Serialize())), 0600)
if err == nil {
if err == nil {
globals.Logger.Infof("Transaction saved successfully. txid = %s", tx.GetHash())
globals.Logger.Infof("Saved to %s", filename)
} else {
globals.Logger.Warnf("Error saving tx to %s , err %s", filename, err)
}
}
} else {
err = wallet.SendTransaction(tx) // relay tx to daemon/network
if err == nil {
globals.Logger.Infof("Transaction sent successfully. txid = %s", tx.GetHash())
} else {
globals.Logger.Warnf("Transaction sending failed txid = %s, err %s", tx.GetHash(), err)
}
}
}
*/
}
// read an address with all goodies such as color encoding and other things in prompt
func ReadAddress(l *readline.Instance) (a *address.Address, err error) {
func ReadAddress(l *readline.Instance) (a *rpc.Address, err error) {
setPasswordCfg := l.GenPasswordConfig()
setPasswordCfg.EnableMask = false
@ -435,18 +404,10 @@ func ReadAddress(l *readline.Instance) (a *address.Address, err error) {
return
}
/*
// read an payment with all goodies such as color encoding and other things in prompt
func ReadPaymentID(l *readline.Instance) (payment_id []byte, err error) {
func ReadFloat64(l *readline.Instance, cprompt string, default_value float64) (a float64, err error) {
setPasswordCfg := l.GenPasswordConfig()
setPasswordCfg.EnableMask = false
// ask user whether he want to enter a payment ID
if !ConfirmYesNoDefaultNo(l, "Provide Payment ID (y/N)") { // user doesnot want to provide payment it, skip
return
}
prompt_mutex.Lock()
defer prompt_mutex.Unlock()
@ -455,19 +416,17 @@ func ReadPaymentID(l *readline.Instance) (payment_id []byte, err error) {
color := color_green
if len(line) >= 1 {
_, err := hex.DecodeString(string(line))
if (len(line) == 16 || len(line) == 64) && err == nil {
error_message = ""
} else {
_, err := strconv.ParseFloat(string(line), 64)
if err != nil {
error_message = " " //err.Error()
}
}
if error_message != "" {
color = color_red // Should we display the error message here??
l.SetPrompt(fmt.Sprintf("%sEnter Payment ID (16/64 hex char): ", color))
l.SetPrompt(fmt.Sprintf("%sEnter %s (default %f): ", color, cprompt, default_value))
} else {
l.SetPrompt(fmt.Sprintf("%sEnter Payment ID (16/64 hex char): ", color))
l.SetPrompt(fmt.Sprintf("%sEnter %s (default %f): ", color, cprompt, default_value))
}
@ -479,21 +438,128 @@ func ReadPaymentID(l *readline.Instance) (payment_id []byte, err error) {
if err != nil {
return
}
payment_id, err = hex.DecodeString(string(line))
a, err = strconv.ParseFloat(string(line), 64)
l.SetPrompt(cprompt)
l.Refresh()
return
}
func ReadUint64(l *readline.Instance, cprompt string, default_value uint64) (a uint64, err error) {
setPasswordCfg := l.GenPasswordConfig()
setPasswordCfg.EnableMask = false
prompt_mutex.Lock()
defer prompt_mutex.Unlock()
setPasswordCfg.SetListener(func(line []rune, pos int, key rune) (newLine []rune, newPos int, ok bool) {
error_message := ""
color := color_green
if len(line) >= 1 {
_, err := strconv.ParseUint(string(line), 0, 64)
if err != nil {
error_message = " " //err.Error()
}
}
if error_message != "" {
color = color_red // Should we display the error message here??
l.SetPrompt(fmt.Sprintf("%sEnter %s (default %d): ", color, cprompt, default_value))
} else {
l.SetPrompt(fmt.Sprintf("%sEnter %s (default %d): ", color, cprompt, default_value))
}
l.Refresh()
return nil, 0, false
})
line, err := l.ReadPasswordWithConfig(setPasswordCfg)
if err != nil {
return
}
l.SetPrompt(prompt)
a, err = strconv.ParseUint(string(line), 0, 64)
l.SetPrompt(cprompt)
l.Refresh()
return
}
func ReadInt64(l *readline.Instance, cprompt string, default_value int64) (a int64, err error) {
setPasswordCfg := l.GenPasswordConfig()
setPasswordCfg.EnableMask = false
prompt_mutex.Lock()
defer prompt_mutex.Unlock()
setPasswordCfg.SetListener(func(line []rune, pos int, key rune) (newLine []rune, newPos int, ok bool) {
error_message := ""
color := color_green
if len(line) >= 1 {
_, err := strconv.ParseInt(string(line), 0, 64)
if err != nil {
error_message = " " //err.Error()
}
}
if error_message != "" {
color = color_red // Should we display the error message here??
l.SetPrompt(fmt.Sprintf("%sEnter %s (default %d): ", color, cprompt, default_value))
} else {
l.SetPrompt(fmt.Sprintf("%sEnter %s (default %d): ", color, cprompt, default_value))
}
l.Refresh()
return nil, 0, false
})
line, err := l.ReadPasswordWithConfig(setPasswordCfg)
if err != nil {
return
}
a, err = strconv.ParseInt(string(line), 0, 64)
l.SetPrompt(cprompt)
l.Refresh()
return
}
func ReadString(l *readline.Instance, cprompt string, default_value string) (a string, err error) {
setPasswordCfg := l.GenPasswordConfig()
setPasswordCfg.EnableMask = false
prompt_mutex.Lock()
defer prompt_mutex.Unlock()
setPasswordCfg.SetListener(func(line []rune, pos int, key rune) (newLine []rune, newPos int, ok bool) {
error_message := ""
color := color_green
if len(line) < 1 {
error_message = " " //err.Error()
}
if error_message != "" {
color = color_red // Should we display the error message here??
l.SetPrompt(fmt.Sprintf("%sEnter %s (default '%s'): ", color, cprompt, default_value))
} else {
l.SetPrompt(fmt.Sprintf("%sEnter %s (default '%s'): ", color, cprompt, default_value))
}
l.Refresh()
return nil, 0, false
})
line, err := l.ReadPasswordWithConfig(setPasswordCfg)
if err != nil {
return
}
a = string(line)
l.SetPrompt(cprompt)
l.Refresh()
if len(payment_id) == 8 || len(payment_id) == 32 {
return
}
err = fmt.Errorf("Invalid Payment ID")
return
}
*/
// confirms whether the user wants to confirm yes
func ConfirmYesNoDefaultYes(l *readline.Instance, prompt_temporary string) bool {
@ -656,32 +722,6 @@ func PressAnyKey(l *readline.Instance, wallet *walletapi.Wallet_Disk) {
return
}
/*
// if we are in offline, scan default or user provided file
// this function will replay the blockchain data in offline mode
func trigger_offline_data_scan() {
filename := default_offline_datafile
if globals.Arguments["--offline_datafile"] != nil {
filename = globals.Arguments["--offline_datafile"].(string)
}
f, err := os.Open(filename)
if err != nil {
globals.Logger.Warnf("Cannot read offline data file=\"%s\" err: %s ", filename, err)
return
}
w := bufio.NewReader(f)
gzipreader, err := gzip.NewReader(w)
if err != nil {
globals.Logger.Warnf("Error while decompressing offline data file=\"%s\" err: %s ", filename, err)
return
}
defer gzipreader.Close()
io.Copy(pipe_writer, gzipreader)
}
*/
// this completer is used to complete the commands at the prompt
// BUG, this needs to be disabled in menu mode
var completer = readline.NewPrefixCompleter(
@ -733,6 +773,7 @@ func usage(w io.Writer) {
io.WriteString(w, "\t\033[1mtransfer\033[0m\tTransfer/Send DERO to another address\n")
io.WriteString(w, "\t\t\tEg. transfer <address> <amount>\n")
io.WriteString(w, "\t\033[1mtransfer_all\033[0m\tTransfer everything to another address\n")
io.WriteString(w, "\t\033[1mflush\033[0m\tFlush local wallet pool (for testing purposes)\n")
io.WriteString(w, "\t\033[1mversion\033[0m\t\tShow version\n")
io.WriteString(w, "\t\033[1mbye\033[0m\t\tQuit wallet\n")
io.WriteString(w, "\t\033[1mexit\033[0m\t\tQuit wallet\n")
@ -754,7 +795,7 @@ func display_seed(l *readline.Instance, wallet *walletapi.Wallet_Disk) {
func display_spend_key(l *readline.Instance, wallet *walletapi.Wallet_Disk) {
keys := wallet.Get_Keys()
h := "0000000000000000000000000000000000000000000000"+keys.Secret.Text(16)
h := "0000000000000000000000000000000000000000000000" + keys.Secret.Text(16)
fmt.Fprintf(os.Stderr, "secret key: "+color_red+"%s"+color_white+"\n", h[len(h)-64:])
fmt.Fprintf(os.Stderr, "public key: %s\n", keys.Public.StringHex())
@ -769,15 +810,8 @@ func rescan_bc(wallet *walletapi.Wallet_Disk) {
}
func is_registered(wallet *walletapi.Wallet_Disk) bool {
if wallet.Get_Registration_TopoHeight() == -1 {
return false
}
return true
}
func valid_registration_or_display_error(l *readline.Instance, wallet *walletapi.Wallet_Disk) bool {
if !is_registered(wallet) {
if !wallet.IsRegistered() {
globals.Logger.Warnf("Your account is not registered.Please register.")
}
return true
@ -786,11 +820,9 @@ func valid_registration_or_display_error(l *readline.Instance, wallet *walletapi
// show the transfers to the user originating from this account
func show_transfers(l *readline.Instance, wallet *walletapi.Wallet_Disk, limit uint64) {
available := true
in := true
out := true
pool := true // this is not processed still TODO list
failed := false // this is not processed still TODO list
coinbase := true
min_height := uint64(0)
max_height := uint64(0)
@ -798,37 +830,18 @@ func show_transfers(l *readline.Instance, wallet *walletapi.Wallet_Disk, limit u
line_parts := strings.Fields(line)
if len(line_parts) >= 2 {
switch strings.ToLower(line_parts[1]) {
case "available":
available = true
in = false
case "coinbase":
out = false
pool = false
failed = false
in = false
case "in":
available = true
coinbase = false
in = true
out = false
pool = false
failed = false
case "out":
available = false
coinbase = false
in = false
out = true
pool = false
failed = false
case "pool":
available = false
in = false
out = false
pool = true
failed = false
case "failed":
available = false
in = false
out = false
pool = false
failed = true
}
}
@ -851,7 +864,7 @@ func show_transfers(l *readline.Instance, wallet *walletapi.Wallet_Disk, limit u
}
// request payments without payment id
transfers := wallet.Show_Transfers(available, in, out, pool, failed, false, min_height, max_height) // receives sorted list of transfers
transfers := wallet.Show_Transfers(coinbase, in, out, min_height, max_height, "", "", 0, 0) // receives sorted list of transfers
if len(transfers) == 0 {
globals.Logger.Warnf("No transfers available")
@ -872,16 +885,42 @@ func show_transfers(l *readline.Instance, wallet *walletapi.Wallet_Disk, limit u
if transfers[i].Coinbase {
io.WriteString(l.Stderr(), fmt.Sprintf(color_green+"%s Height %d TopoHeight %d Coinbase (miner reward) received %s DERO"+color_white+"\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, globals.FormatMoney(transfers[i].Amount)))
} else if len(transfers[i].PaymentID) == 0 {
io.WriteString(l.Stderr(), fmt.Sprintf(color_green+"%s Height %d TopoHeight %d transaction %s received %s DERO"+color_white+"\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, transfers[i].TXID, globals.FormatMoney(transfers[i].Amount)))
} else {
payment_id := fmt.Sprintf("%x", transfers[i].PaymentID)
io.WriteString(l.Stderr(), fmt.Sprintf(color_green+"%s Height %d TopoHeight %d transaction %s received %s DERO"+color_white+" PAYMENT ID:%s\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, transfers[i].TXID, globals.FormatMoney(transfers[i].Amount), payment_id))
args, err := transfers[i].ProcessPayload()
if err != nil {
io.WriteString(l.Stderr(), fmt.Sprintf(color_green+"%s Height %d TopoHeight %d transaction %s received %s DERO Proof: %s"+color_white+"\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, transfers[i].TXID, globals.FormatMoney(transfers[i].Amount), transfers[i].Proof))
io.WriteString(l.Stderr(), fmt.Sprintf("Full Entry\n", transfers[i])) // dump entire entry for debugging purposes
} else if len(args) == 0 { // no rpc
io.WriteString(l.Stderr(), fmt.Sprintf(color_green+"%s Height %d TopoHeight %d transaction %s received %s DERO Proof: %s NO RPC CALL"+color_white+"\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, transfers[i].TXID, globals.FormatMoney(transfers[i].Amount), transfers[i].Proof))
} else { // yes, its rpc
io.WriteString(l.Stderr(), fmt.Sprintf(color_green+"%s Height %d TopoHeight %d transaction %s received %s DERO Proof: %s RPC CALL arguments %s "+color_white+"\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, transfers[i].TXID, globals.FormatMoney(transfers[i].Amount), transfers[i].Proof, args))
}
}
case 1:
payment_id := fmt.Sprintf("%x", transfers[i].PaymentID)
io.WriteString(l.Stderr(), fmt.Sprintf(color_magenta+"%s Height %d TopoHeight %d transaction %s spent %s DERO"+color_white+" PAYMENT ID: %s Proof:%s\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, transfers[i].TXID, globals.FormatMoney(transfers[i].Amount), payment_id, transfers[i].Proof))
args, err := transfers[i].ProcessPayload()
if err != nil {
io.WriteString(l.Stderr(), fmt.Sprintf(color_yellow+"%s Height %d TopoHeight %d transaction %s spent %s DERO Destination: %s Proof: %s\n"+color_white+"\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, transfers[i].TXID, globals.FormatMoney(transfers[i].Amount), transfers[i].Destination, transfers[i].Proof))
io.WriteString(l.Stderr(), fmt.Sprintf("Err decoding entry %s\nFull Entry %+v\n", err, transfers[i])) // dump entire entry for debugging purposes
} else if len(args) == 0 { // no rpc
io.WriteString(l.Stderr(), fmt.Sprintf(color_yellow+"%s Height %d TopoHeight %d transaction %s spent %s DERO Destination: %s Proof: %s NO RPC CALL"+color_white+"\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, transfers[i].TXID, globals.FormatMoney(transfers[i].Amount), transfers[i].Destination, transfers[i].Proof))
} else { // yes, its rpc
io.WriteString(l.Stderr(), fmt.Sprintf(color_yellow+"%s Height %d TopoHeight %d transaction %s spent %s DERO Destination: %s Proof: %s RPC CALL arguments %s "+color_white+"\n", transfers[i].Time.Format(time.RFC822), transfers[i].Height, transfers[i].TopoHeight, transfers[i].TXID, globals.FormatMoney(transfers[i].Amount), transfers[i].Destination, transfers[i].Proof, args))
}
case 2:
fallthrough
default:

View File

@ -45,12 +45,12 @@ import "github.com/deroproject/derohe/p2p"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/block"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/address"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/blockchain"
import "github.com/deroproject/derohe/transaction"
//import "github.com/deroproject/derosuite/checkpoints"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
//import "github.com/deroproject/derosuite/cryptonight"
@ -62,7 +62,7 @@ var command_line string = `derod
DERO : A secure, private blockchain with smart-contracts
Usage:
derod [--help] [--version] [--testnet] [--debug] [--sync-node] [--disable-checkpoints] [--socks-proxy=<socks_ip:port>] [--data-dir=<directory>] [--p2p-bind=<0.0.0.0:18089>] [--add-exclusive-node=<ip:port>]... [--add-priority-node=<ip:port>]... [--min-peers=<11>] [--rpc-bind=<127.0.0.1:9999>] [--lowcpuram] [--mining-address=<wallet_address>] [--mining-threads=<cpu_num>] [--node-tag=<unique name>] [--prune-history=<50>]
derod [--help] [--version] [--testnet] [--debug] [--sync-node] [--fullnode] [--disable-checkpoints] [--socks-proxy=<socks_ip:port>] [--data-dir=<directory>] [--p2p-bind=<0.0.0.0:18089>] [--add-exclusive-node=<ip:port>]... [--add-priority-node=<ip:port>]... [--min-peers=<11>] [--rpc-bind=<127.0.0.1:9999>] [--node-tag=<unique name>] [--prune-history=<50>]
derod -h | --help
derod --version
@ -71,6 +71,7 @@ Options:
--version Show version.
--testnet Run in testnet mode.
--debug Debug mode enabled, print log messages
--fullnode Full node mode (this option has effect only while bootstrapping)
--socks-proxy=<socks_ip:port> Use a proxy to connect to network.
--data-dir=<directory> Store blockchain data at this location
--rpc-bind=<127.0.0.1:9999> RPC listens on this ip:port
@ -78,10 +79,6 @@ Options:
--add-exclusive-node=<ip:port> Connect to specific peer only
--add-priority-node=<ip:port> Maintain persistant connection to specified peer
--sync-node Sync node automatically with the seeds nodes. This option is for rare use.
--min-peers=<11> Number of connections the daemon tries to maintain
--lowcpuram Disables some RAM consuming sections (deactivates mining/ultra compact protocol etc).
--mining-address=<wallet_address> This address is rewarded when a block is mined sucessfully
--mining-threads=<cpu_num> Number of CPU threads for mining
--node-tag=<unique name> Unique name of node, visible to everyone
--prune-history=<50> prunes blockchain history until the specific topo_height
@ -188,18 +185,18 @@ func main() {
p2p.P2P_Init(params)
rpc, _ := RPCServer_Start(params)
rpcserver, _ := RPCServer_Start(params)
// setup function pointers
// these pointers need to fixed
chain.Mempool.P2P_TX_Relayer = func(tx *transaction.Transaction, peerid uint64) (count int) {
count += p2p.Broadcast_Tx(tx, peerid)
count += int(p2p.Broadcast_Tx(tx, peerid))
return
}
chain.Regpool.P2P_TX_Relayer = func(tx *transaction.Transaction, peerid uint64) (count int) {
count += p2p.Broadcast_Tx(tx, peerid)
count += int(p2p.Broadcast_Tx(tx, peerid))
return
}
@ -207,47 +204,6 @@ func main() {
p2p.Broadcast_Block(cbl, peerid)
}
if globals.Arguments["--lowcpuram"].(bool) == false && globals.Arguments["--sync-node"].(bool) == false { // enable v1 of protocol only if requested
// if an address has been provided, verify that it satisfies //mainnet/testnet criteria
if globals.Arguments["--mining-address"] != nil {
addr, err := globals.ParseValidateAddress(globals.Arguments["--mining-address"].(string))
if err != nil {
globals.Logger.Fatalf("Mining address is invalid: err %s", err)
}
params["mining-address"] = addr
//log.Debugf("Setting up proxy using %s", Arguments["--socks-proxy"].(string))
}
if globals.Arguments["--mining-threads"] != nil {
thread_count := 0
if s, err := strconv.Atoi(globals.Arguments["--mining-threads"].(string)); err == nil {
//fmt.Printf("%T, %v", s, s)
thread_count = s
} else {
globals.Logger.Fatalf("Mining threads argument cannot be parsed: err %s", err)
}
if thread_count > runtime.GOMAXPROCS(0) {
globals.Logger.Fatalf("Mining threads (%d) is more than available CPUs (%d). This is NOT optimal", thread_count, runtime.GOMAXPROCS(0))
}
params["mining-threads"] = thread_count
if _, ok := params["mining-address"]; !ok {
globals.Logger.Fatalf("Mining threads require a valid wallet address")
}
globals.Logger.Infof("System will mine to %s with %d threads. Good Luck!!", globals.Arguments["--mining-address"].(string), thread_count)
go start_miner(chain, params["mining-address"].(*address.Address), nil, thread_count)
}
}
//go time_check_routine() // check whether server time is in sync
// This tiny goroutine continuously updates status as required
@ -397,7 +353,7 @@ func main() {
break
}
//
case command == "import_chain": // this migrates existing chain from DERO to DERO atlantis
case command == "import_chain": // this migrates existing chain from DERO atlantis to DERO HE
/*
f, err := os.Open("/tmp/raw_export.txt")
if err != nil {
@ -553,27 +509,14 @@ func main() {
diff = chain.Load_Block_Difficulty(current_block_id)
}
toporecord, err := chain.Store.Topo_store.Read(i)
if err != nil {
log.Infof("Skipping block at height %d due to error while obtaining toporecord %s\n", i, err)
continue
}
ss, err := chain.Store.Balance_store.LoadSnapshot(uint64(toporecord.State_Version))
if err != nil {
panic(err)
}
balance_tree, err := ss.GetTree(blockchain.BALANCE_TREE)
balance_hash, err := chain.Load_Merkle_Hash(i)
if err != nil {
panic(err)
}
balance_hash, _ := balance_tree.Hash()
log.Infof("topo height: %10d, height %d, timestamp: %10d, difficulty: %s cdiff: %s", i, chain.Load_Height_for_BL_ID(current_block_id), timestamp, diff.String(), cdiff.String())
log.Infof("Block Id: %s , balance_tree hash %x \n", current_block_id, balance_hash)
log.Infof("Block Id: %s , balance_tree hash %s \n", current_block_id, balance_hash)
log.Infof("")
}
@ -625,24 +568,19 @@ func main() {
case command == "start_mining": // it needs 2 parameters, one dero address, second number of threads
var tx *transaction.Transaction
var addr *address.Address
var addr *rpc.Address
if mining {
fmt.Printf("Mining is already started\n")
continue
}
if globals.Arguments["--lowcpuram"].(bool) {
globals.Logger.Warnf("Mining is deactivated since daemon is running in low cpu mode, please check program options.")
continue
}
if globals.Arguments["--sync-node"].(bool) {
globals.Logger.Warnf("Mining is deactivated since daemon is running with --sync-mode, please check program options.")
continue
}
if len(line_parts) != 3 {
fmt.Printf("This function requires 2 parameters 1) dero address or registration TX 2) number of threads\n")
fmt.Printf("This function requires 2 parameters 1) dero address 2) number of threads\n")
continue
}
@ -661,40 +599,16 @@ func main() {
}
hexdecoded, err := hex.DecodeString(line_parts[1])
if err == nil {
tx = &transaction.Transaction{}
if err = tx.DeserializeHeader(hexdecoded); err == nil {
var err error
if tx.IsRegistration() {
if tx.IsRegistrationValid() {
addr = &address.Address{
PublicKey: new(crypto.Point),
}
err = addr.PublicKey.DecodeCompressed(tx.MinerAddress[0:33])
} else {
err = fmt.Errorf("Registration TX is invalid")
}
} else {
err = fmt.Errorf("TX is not registration")
}
}
} else {
err = nil
addr, err = globals.ParseValidateAddress(line_parts[1])
if err != nil {
globals.Logger.Warnf("Mining address is invalid: err %s", err)
continue
}
}
addr, err = globals.ParseValidateAddress(line_parts[1])
if err != nil {
globals.Logger.Warnf("Registration TX/Mining address is invalid: err %s", err)
globals.Logger.Warnf("Mining address is invalid: err %s", err)
continue
}
if err != nil {
globals.Logger.Warnf("Mining address is invalid: err %s", err)
continue
}
@ -757,25 +671,12 @@ func main() {
fmt.Printf("Height: %d\n", chain.Load_Height_for_BL_ID(hash))
fmt.Printf("TopoHeight: %d\n", s)
toporecord, err := chain.Store.Topo_store.Read(s)
if err != nil {
log.Infof("Skipping block at topo height %d due to error while obtaining toporecord %s\n", s, err)
panic(err)
continue
}
bhash, err := chain.Load_Merkle_Hash(s)
ss, err := chain.Store.Balance_store.LoadSnapshot(uint64(toporecord.State_Version))
if err != nil {
panic(err)
}
balance_tree, err := ss.GetTree(blockchain.BALANCE_TREE)
if err != nil {
panic(err)
}
bhash, _ := balance_tree.Hash()
fmt.Printf("BALANCE_TREE : %s\n", bhash)
fmt.Printf("PoW: %s\n", bl.GetPoWHash())
@ -1124,7 +1025,7 @@ exit:
globals.Logger.Infof("Exit in Progress, Please wait")
time.Sleep(100 * time.Millisecond) // give prompt update time to finish
rpc.RPCServer_Stop()
rpcserver.RPCServer_Stop()
p2p.P2P_Shutdown() // shutdown p2p subsystem
chain.Shutdown() // shutdown chain subsysem
@ -1155,7 +1056,7 @@ func writenode(chain *blockchain.Blockchain, w *bufio.Writer, blid crypto.Hash,
panic(err)
}
addr := address.NewAddressFromKeys(&acckey)
addr := rpc.NewAddressFromKeys(&acckey)
addr.Mainnet = globals.IsMainnet()
w.WriteString(fmt.Sprintf("L%s [ fillcolor=%s label = \"%s %d height %d score %d stored %d order %d\nminer %s\" ];\n", blid.String(), color, blid.String(), 0, chain.Load_Height_for_BL_ID(blid), 0, chain.Load_Block_Cumulative_Difficulty(blid), chain.Load_Block_Topological_order(blid), addr.String()))

View File

@ -63,9 +63,9 @@ import "sync/atomic"
import "encoding/binary"
import "github.com/deroproject/derohe/block"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/address"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/blockchain"
import "github.com/deroproject/derohe/transaction"
@ -76,38 +76,41 @@ var counter uint64 = 0 // used to track speeds of current miner
var mining bool // whether system is mining
// request block chain template, see if the tip changes, then continously mine
func start_miner(chain *blockchain.Blockchain, addr *address.Address, tx *transaction.Transaction, threads int) {
func start_miner(chain *blockchain.Blockchain, addr *rpc.Address, tx *transaction.Transaction, threads int) {
mining = true
counter = 0
//tip_counter := 0
for {
//time.Sleep(50 * time.Millisecond)
for { // once started keep generating blocks after every 10 secs
mining = true
counter = 0
for {
//time.Sleep(50 * time.Millisecond)
if !mining {
break
if !mining {
break
}
if chain.MINING_BLOCK == true {
time.Sleep(10 * time.Millisecond)
continue
}
cbl, bl := chain.Create_new_miner_block(*addr, tx)
difficulty := chain.Get_Difficulty_At_Tips(bl.Tips)
//globals.Logger.Infof("Difficulty of new block is %s", difficulty.String())
// calculate difficulty once
// update job from chain
wg := sync.WaitGroup{}
wg.Add(threads) // add total number of tx as work
for i := 0; i < threads; i++ {
go generate_valid_PoW(chain, 0, cbl, cbl.Bl, difficulty, &wg) // work should be complete in approx 100 ms, on a 12 cpu system, this would add cost of launching 12 g routine per second
}
wg.Wait()
}
if chain.MINING_BLOCK == true {
time.Sleep(10 * time.Millisecond)
continue
}
cbl, bl := chain.Create_new_miner_block(*addr, tx)
difficulty := chain.Get_Difficulty_At_Tips(bl.Tips)
//globals.Logger.Infof("Difficulty of new block is %s", difficulty.String())
// calculate difficulty once
// update job from chain
wg := sync.WaitGroup{}
wg.Add(threads) // add total number of tx as work
for i := 0; i < threads; i++ {
go generate_valid_PoW(chain, 0, cbl, cbl.Bl, difficulty, &wg) // work should be complete in approx 100 ms, on a 12 cpu system, this would add cost of launching 12 g routine per second
}
wg.Wait()
time.Sleep(10 * time.Second)
}
// g
@ -150,7 +153,7 @@ func generate_valid_PoW(chain *blockchain.Blockchain, hf_version uint64, cbl *bl
if _, ok := chain.Add_Complete_Block(cbl); ok {
globals.Logger.Infof("Block %s successfully accepted diff %s", bl.GetHash(), current_difficulty.String())
//chain.P2P_Block_Relayer(cbl, 0) // broadcast block to network ASAP
chain.P2P_Block_Relayer(cbl, 0) // broadcast block to network ASAP
mining = false // this line enables single block mining in 1 go

View File

@ -21,16 +21,12 @@ import "context"
import "encoding/hex"
import "encoding/json"
import "runtime/debug"
//import "log"
//import "net/http"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/rpc"
//import "github.com/deroproject/derosuite/blockchain"
func (DERO_RPC_APIS) GetBlock(ctx context.Context, p structures.GetBlock_Params) (result structures.GetBlock_Result, err error) {
func (DERO_RPC_APIS) GetBlock(ctx context.Context, p rpc.GetBlock_Params) (result rpc.GetBlock_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
@ -70,7 +66,7 @@ func (DERO_RPC_APIS) GetBlock(ctx context.Context, p structures.GetBlock_Params)
if err != nil { // if err return err
return
}
return structures.GetBlock_Result{ // return success
return rpc.GetBlock_Result{ // return success
Block_Header: block_header,
Blob: hex.EncodeToString(bl.Serialize()),
Json: string(json_encoded_bytes),

View File

@ -17,10 +17,10 @@
package main
import "context"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/rpc"
func (DERO_RPC_APIS) GetBlockCount(ctx context.Context) structures.GetBlockCount_Result {
return structures.GetBlockCount_Result{
func (DERO_RPC_APIS) GetBlockCount(ctx context.Context) rpc.GetBlockCount_Result {
return rpc.GetBlockCount_Result{
Count: uint64(chain.Get_Height()),
Status: "OK",
}

View File

@ -19,14 +19,10 @@ package main
import "fmt"
import "context"
import "runtime/debug"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/rpc"
//import "log"
//import "net/http"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/structures"
func (DERO_RPC_APIS) GetBlockHeaderByHash(ctx context.Context, p structures.GetBlockHeaderByHash_Params) (result structures.GetBlockHeaderByHash_Result, err error) {
func (DERO_RPC_APIS) GetBlockHeaderByHash(ctx context.Context, p rpc.GetBlockHeaderByHash_Params) (result rpc.GetBlockHeaderByHash_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
err = fmt.Errorf("panic occured. stack trace %s", debug.Stack())
@ -34,7 +30,7 @@ func (DERO_RPC_APIS) GetBlockHeaderByHash(ctx context.Context, p structures.GetB
}()
hash := crypto.HashHexToHash(p.Hash)
if block_header, err := chain.GetBlockHeader(hash); err == nil { // if err return err
return structures.GetBlockHeaderByHash_Result{ // return success
return rpc.GetBlockHeaderByHash_Result{ // return success
Block_Header: block_header,
Status: "OK",
}, nil

View File

@ -19,13 +19,9 @@ package main
import "fmt"
import "context"
import "runtime/debug"
import "github.com/deroproject/derohe/rpc"
//import "log"
//import "net/http"
//import "github.com/deroproject/derosuite/crypto"
import "github.com/deroproject/derohe/structures"
func (DERO_RPC_APIS) GetBlockHeaderByTopoHeight(ctx context.Context, p structures.GetBlockHeaderByTopoHeight_Params) (result structures.GetBlockHeaderByHeight_Result, err error) {
func (DERO_RPC_APIS) GetBlockHeaderByTopoHeight(ctx context.Context, p rpc.GetBlockHeaderByTopoHeight_Params) (result rpc.GetBlockHeaderByHeight_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
@ -51,7 +47,7 @@ func (DERO_RPC_APIS) GetBlockHeaderByTopoHeight(ctx context.Context, p structure
return
}
return structures.GetBlockHeaderByHeight_Result{ // return success
return rpc.GetBlockHeaderByHeight_Result{ // return success
Block_Header: block_header,
Status: "OK",
}, nil

View File

@ -20,23 +20,14 @@ import "fmt"
import "time"
import "context"
import "runtime/debug"
//import "log"
//import "net/http"
import "golang.org/x/time/rate"
//import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/address"
import "github.com/deroproject/derohe/structures"
//import "github.com/deroproject/derohe/transaction"
import "github.com/deroproject/derohe/rpc"
// rate limiter is deployed, in case RPC is exposed over internet
// someone should not be just giving fake inputs and delay chain syncing
var get_block_limiter = rate.NewLimiter(16.0, 8) // 16 req per sec, burst of 8 req is okay
func (DERO_RPC_APIS) GetBlockTemplate(ctx context.Context, p structures.GetBlockTemplate_Params) (result structures.GetBlockTemplate_Result, err error) {
func (DERO_RPC_APIS) GetBlockTemplate(ctx context.Context, p rpc.GetBlockTemplate_Params) (result rpc.GetBlockTemplate_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
@ -57,7 +48,7 @@ func (DERO_RPC_APIS) GetBlockTemplate(ctx context.Context, p structures.GetBlock
*/
// validate address
miner_address, err := address.NewAddress(p.Wallet_Address)
miner_address, err := rpc.NewAddress(p.Wallet_Address)
if err != nil {
return result, fmt.Errorf("Address could not be parsed, err:%s", err)
}
@ -72,7 +63,7 @@ func (DERO_RPC_APIS) GetBlockTemplate(ctx context.Context, p structures.GetBlock
for i := range bl.Tips {
prev_hash = prev_hash + bl.Tips[i].String()
}
return structures.GetBlockTemplate_Result{
return rpc.GetBlockTemplate_Result{
Blocktemplate_blob: block_template_hex,
Blockhashing_blob: block_hashing_blob_hex,
Reserved_Offset: uint64(reserved_pos),

View File

@ -19,25 +19,23 @@ package main
import "fmt"
import "math"
import "context"
import "encoding/hex"
import "runtime/debug"
//import "log"
//import "net/http"
import "golang.org/x/xerrors"
import "github.com/deroproject/graviton"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/blockchain"
import "github.com/deroproject/derohe/address"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/errormsg"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/rpc"
func (DERO_RPC_APIS) GetEncryptedBalance(ctx context.Context, p structures.GetEncryptedBalance_Params) (result structures.GetEncryptedBalance_Result, err error) {
//import "github.com/deroproject/derohe/dvm"
//import "github.com/deroproject/derohe/cryptography/crypto"
func (DERO_RPC_APIS) GetEncryptedBalance(ctx context.Context, p rpc.GetEncryptedBalance_Params) (result rpc.GetEncryptedBalance_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
err = fmt.Errorf("panic occured. stack trace %s", debug.Stack())
fmt.Printf("panic stack trace %s\n", debug.Stack())
}
}()
@ -66,33 +64,23 @@ func (DERO_RPC_APIS) GetEncryptedBalance(ctx context.Context, p structures.GetEn
var balance_tree *graviton.Tree
if p.Merkle_Balance_TreeHash != "" { // user requested a specific tree hash version
hash, err := hex.DecodeString(p.Merkle_Balance_TreeHash)
if err != nil {
panic(err)
}
if len(hash) != 32 {
panic("corruted hash")
}
balance_tree, err = ss.GetTreeWithRootHash(hash)
} else {
balance_tree, err = ss.GetTree(blockchain.BALANCE_TREE)
treename := config.BALANCE_TREE
keyname := uaddress.Compressed()
if !p.SCID.IsZero() {
treename = string(p.SCID[:])
}
if err != nil {
if balance_tree, err = ss.GetTree(treename); err != nil {
panic(err)
}
balance_serialized, err := balance_tree.Get(uaddress.Compressed())
bits, _, balance_serialized, err := balance_tree.GetKeyValueFromKey(keyname)
//fmt.Printf("balance_serialized %x err %s, scid %s keyname %x treename %x\n", balance_serialized,err,p.SCID, keyname, treename)
if err != nil {
if xerrors.Is(err, graviton.ErrNotFound) { // address needs registration
return structures.GetEncryptedBalance_Result{ // return success
return rpc.GetEncryptedBalance_Result{ // return success
Registration: registration,
Status: errormsg.ErrAccountUnregistered.Error(),
}, errormsg.ErrAccountUnregistered
@ -101,7 +89,7 @@ func (DERO_RPC_APIS) GetEncryptedBalance(ctx context.Context, p structures.GetEn
panic(err)
}
}
merkle_hash, err := balance_tree.Hash()
merkle_hash, err := chain.Load_Merkle_Hash(topoheight)
if err != nil {
panic(err)
}
@ -109,29 +97,15 @@ func (DERO_RPC_APIS) GetEncryptedBalance(ctx context.Context, p structures.GetEn
// calculate top height merkle tree hash
//var dmerkle_hash crypto.Hash
toporecord, err = chain.Store.Topo_store.Read(chain.Load_TOPO_HEIGHT())
dmerkle_hash, err := chain.Load_Merkle_Hash(chain.Load_TOPO_HEIGHT())
if err != nil {
panic(err)
}
ss, err = chain.Store.Balance_store.LoadSnapshot(toporecord.State_Version)
if err != nil {
panic(err)
}
balance_tree, err = ss.GetTree(blockchain.BALANCE_TREE)
if err != nil {
panic(err)
}
dmerkle_hash, err := balance_tree.Hash()
if err != nil {
panic(err)
}
return structures.GetEncryptedBalance_Result{ // return success
return rpc.GetEncryptedBalance_Result{ // return success
Data: fmt.Sprintf("%x", balance_serialized),
Registration: registration,
Bits: bits, // no. of bbits required
Height: toporecord.Height,
Topoheight: topoheight,
BlockHash: fmt.Sprintf("%x", toporecord.BLOCK_ID),
@ -144,7 +118,7 @@ func (DERO_RPC_APIS) GetEncryptedBalance(ctx context.Context, p structures.GetEn
}
// if address is unregistered, returns negative numbers
func LocatePointOfRegistration(uaddress *address.Address) int64 {
func LocatePointOfRegistration(uaddress *rpc.Address) int64 {
addr := uaddress.Compressed()
@ -192,7 +166,7 @@ func IsRegisteredAtTopoHeight(addr []byte, topoheight int64) bool {
}
var balance_tree *graviton.Tree
balance_tree, err = ss.GetTree(blockchain.BALANCE_TREE)
balance_tree, err = ss.GetTree(config.BALANCE_TREE)
if err != nil {
panic(err)
}

View File

@ -17,14 +17,10 @@
package main
import "context"
import "github.com/deroproject/derohe/rpc"
//import "log"
//import "net/http"
import "github.com/deroproject/derohe/structures"
func (DERO_RPC_APIS) GetHeight(ctx context.Context) structures.Daemon_GetHeight_Result {
return structures.Daemon_GetHeight_Result{
func (DERO_RPC_APIS) GetHeight(ctx context.Context) rpc.Daemon_GetHeight_Result {
return rpc.Daemon_GetHeight_Result{
Height: uint64(chain.Get_Height()),
StableHeight: chain.Get_Stable_Height(),
TopoHeight: chain.Load_TOPO_HEIGHT(),

View File

@ -17,20 +17,15 @@
package main
import "fmt"
//import "time"
import "context"
import "runtime/debug"
//import "log"
//import "net/http"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/blockchain"
import "github.com/deroproject/derohe/rpc"
func (DERO_RPC_APIS) GetInfo(ctx context.Context) (result structures.GetInfo_Result, err error) {
//import "github.com/deroproject/derohe/blockchain"
func (DERO_RPC_APIS) GetInfo(ctx context.Context) (result rpc.GetInfo_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
@ -44,28 +39,11 @@ func (DERO_RPC_APIS) GetInfo(ctx context.Context) (result structures.GetInfo_Res
result.TopoHeight = chain.Load_TOPO_HEIGHT()
{
toporecord, err := chain.Store.Topo_store.Read(result.TopoHeight)
//fmt.Printf("current block %d previous topo %d record %+v err %s\n", i+base_topo_index, i+base_topo_index-1, toporecord,err)
balance_merkle_hash, err := chain.Load_Merkle_Hash(result.TopoHeight)
if err != nil {
panic(err)
}
ss, err := chain.Store.Balance_store.LoadSnapshot(toporecord.State_Version)
if err != nil {
panic(err)
}
balance_tree, err := ss.GetTree(blockchain.BALANCE_TREE)
if err != nil {
panic(err)
}
merkle_hash, err := balance_tree.Hash()
if err != nil {
panic(err)
}
result.Merkle_Balance_TreeHash = fmt.Sprintf("%X", merkle_hash[:])
result.Merkle_Balance_TreeHash = fmt.Sprintf("%X", balance_merkle_hash[:])
}
blid, err := chain.Load_Block_Topological_order_at_index(result.TopoHeight)
@ -78,7 +56,7 @@ func (DERO_RPC_APIS) GetInfo(ctx context.Context) (result structures.GetInfo_Res
result.Top_block_hash = blid.String()
result.Target = chain.Get_Current_BlockTime()
if result.TopoHeight > 50 {
if result.TopoHeight-chain.LocatePruneTopo() > 100 {
blid50, err := chain.Load_Block_Topological_order_at_index(result.TopoHeight - 50)
if err == nil {
now := chain.Load_Block_Timestamp(blid)

View File

@ -17,18 +17,16 @@
package main
import "fmt"
import "context"
import "runtime/debug"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/address"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/blockchain"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/rpc"
func (DERO_RPC_APIS) GetRandomAddress(ctx context.Context) (result structures.GetRandomAddress_Result, err error) {
//import "github.com/deroproject/derohe/blockchain"
func (DERO_RPC_APIS) GetRandomAddress(ctx context.Context, p rpc.GetRandomAddress_Params) (result rpc.GetRandomAddress_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
err = fmt.Errorf("panic occured. stack trace %s", debug.Stack())
@ -54,7 +52,12 @@ func (DERO_RPC_APIS) GetRandomAddress(ctx context.Context) (result structures.Ge
panic(err)
}
balance_tree, err := ss.GetTree(blockchain.BALANCE_TREE)
treename := config.BALANCE_TREE
if p.SCID.IsZero() {
treename = string(p.SCID[:])
}
balance_tree, err := ss.GetTree(treename)
if err != nil {
panic(err)
}
@ -70,10 +73,10 @@ func (DERO_RPC_APIS) GetRandomAddress(ctx context.Context) (result structures.Ge
var acckey crypto.Point
if err := acckey.DecodeCompressed(k[:]); err != nil {
panic(err)
continue
}
addr := address.NewAddressFromKeys(&acckey)
addr := rpc.NewAddressFromKeys(&acckey)
addr.Mainnet = true
if globals.Config.Name != config.Mainnet.Name { // anything other than mainnet is testnet at this point in time
addr.Mainnet = false

165
cmd/derod/rpc_dero_getsc.go Normal file
View File

@ -0,0 +1,165 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package main
import "fmt"
import "context"
//import "encoding/hex"
import "runtime/debug"
//import "github.com/romana/rlog"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/dvm"
//import "github.com/deroproject/derohe/transaction"
import "github.com/deroproject/derohe/blockchain"
import "github.com/deroproject/graviton"
func (DERO_RPC_APIS) GetSC(ctx context.Context, p rpc.GetSC_Params) (result rpc.GetSC_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
err = fmt.Errorf("panic occured. stack trace %s", debug.Stack())
}
}()
scid := crypto.HashHexToHash(p.SCID)
topoheight := chain.Load_TOPO_HEIGHT()
if p.TopoHeight >= 1 {
topoheight = p.TopoHeight
}
toporecord, err := chain.Store.Topo_store.Read(topoheight)
// we must now fill in compressed ring members
if err == nil {
var ss *graviton.Snapshot
ss, err = chain.Store.Balance_store.LoadSnapshot(toporecord.State_Version)
if err == nil {
var sc_meta_tree *graviton.Tree
if sc_meta_tree, err = ss.GetTree(config.SC_META); err == nil {
var meta_bytes []byte
if meta_bytes, err = sc_meta_tree.Get(blockchain.SC_Meta_Key(scid)); err == nil {
var meta blockchain.SC_META_DATA
if err = meta.UnmarshalBinary(meta_bytes); err == nil {
result.Balance = meta.Balance
}
}
} else {
return
}
if sc_data_tree, err := ss.GetTree(string(scid[:])); err == nil {
if p.Code { // give SC code
var code_bytes []byte
var v dvm.Variable
if code_bytes, err = sc_data_tree.Get(blockchain.SC_Code_Key(scid)); err == nil {
if err = v.UnmarshalBinary(code_bytes); err != nil {
result.Code = "Unmarshal error"
} else {
result.Code = v.Value.(string)
}
}
}
// give any uint64 keys data if any
for _, value := range p.KeysUint64 {
var v dvm.Variable
key, _ := dvm.Variable{Type: dvm.Uint64, Value: value}.MarshalBinary()
var value_bytes []byte
if value_bytes, err = sc_data_tree.Get(key); err != nil {
result.ValuesUint64 = append(result.ValuesUint64, fmt.Sprintf("NOT AVAILABLE err: %s", err))
continue
}
if err = v.UnmarshalBinary(value_bytes); err != nil {
result.ValuesUint64 = append(result.ValuesUint64, "Unmarshal error")
continue
}
switch v.Type {
case dvm.Uint64:
result.ValuesUint64 = append(result.ValuesUint64, fmt.Sprintf("%d", v.Value))
case dvm.String:
result.ValuesUint64 = append(result.ValuesUint64, fmt.Sprintf("%s", v.Value))
default:
result.ValuesUint64 = append(result.ValuesUint64, "UNKNOWN Data type")
}
}
for _, value := range p.KeysString {
var v dvm.Variable
key, _ := dvm.Variable{Type: dvm.String, Value: value}.MarshalBinary()
var value_bytes []byte
if value_bytes, err = sc_data_tree.Get(key); err != nil {
fmt.Printf("Getting key %x\n", key)
result.ValuesString = append(result.ValuesString, fmt.Sprintf("NOT AVAILABLE err: %s", err))
continue
}
if err = v.UnmarshalBinary(value_bytes); err != nil {
result.ValuesString = append(result.ValuesString, "Unmarshal error")
continue
}
switch v.Type {
case dvm.Uint64:
result.ValuesString = append(result.ValuesUint64, fmt.Sprintf("%d", v.Value))
case dvm.String:
result.ValuesString = append(result.ValuesString, fmt.Sprintf("%s", v.Value))
default:
result.ValuesString = append(result.ValuesString, "UNKNOWN Data type")
}
}
for _, value := range p.KeysBytes {
var v dvm.Variable
key, _ := dvm.Variable{Type: dvm.String, Value: string(value)}.MarshalBinary()
var value_bytes []byte
if value_bytes, err = sc_data_tree.Get(key); err != nil {
result.ValuesBytes = append(result.ValuesBytes, "NOT AVAILABLE")
continue
}
if err = v.UnmarshalBinary(value_bytes); err != nil {
result.ValuesBytes = append(result.ValuesBytes, "Unmarshal error")
continue
}
switch v.Type {
case dvm.Uint64:
result.ValuesBytes = append(result.ValuesBytes, fmt.Sprintf("%d", v.Value))
case dvm.String:
result.ValuesBytes = append(result.ValuesBytes, fmt.Sprintf("%s", v.Value))
default:
result.ValuesBytes = append(result.ValuesBytes, "UNKNOWN Data type")
}
}
}
}
}
result.Status = "OK"
err = nil
//logger.Debugf("result %+v\n", result);
return
}

View File

@ -22,15 +22,14 @@ import "encoding/hex"
import "runtime/debug"
//import "github.com/romana/rlog"
//import "github.com/vmihailenco/msgpack"
import "github.com/deroproject/derohe/crypto"
//import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/config"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/transaction"
import "github.com/deroproject/derohe/blockchain"
import "github.com/deroproject/graviton"
func (DERO_RPC_APIS) GetTransaction(ctx context.Context, p structures.GetTransaction_Params) (result structures.GetTransaction_Result, err error) {
func (DERO_RPC_APIS) GetTransaction(ctx context.Context, p rpc.GetTransaction_Params) (result rpc.GetTransaction_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
@ -48,7 +47,7 @@ func (DERO_RPC_APIS) GetTransaction(ctx context.Context, p structures.GetTransac
// logger.Debugf("checking tx in pool %+v", tx);
if tx != nil { // found the tx in the mempool
var related structures.Tx_Related_Info
var related rpc.Tx_Related_Info
related.Block_Height = -1 // not mined
related.In_pool = true
@ -63,8 +62,11 @@ func (DERO_RPC_APIS) GetTransaction(ctx context.Context, p structures.GetTransac
{ // check if tx is from blockchain
var tx transaction.Transaction
var tx_bytes []byte
if tx_bytes, err = chain.Store.Block_tx_store.ReadTX(hash); err != nil {
return
if tx_bytes, err = chain.Store.Block_tx_store.ReadTX(hash); err != nil { // if tx not found return empty rpc
var related rpc.Tx_Related_Info
result.Txs_as_hex = append(result.Txs_as_hex, "") // a not found tx will return ""
result.Txs = append(result.Txs, related)
continue
} else {
//fmt.Printf("txhash %s loaded %d bytes\n", hash, len(tx_bytes))
@ -75,7 +77,7 @@ func (DERO_RPC_APIS) GetTransaction(ctx context.Context, p structures.GetTransac
}
if err == nil {
var related structures.Tx_Related_Info
var related rpc.Tx_Related_Info
// check whether tx is orphan
@ -100,8 +102,69 @@ func (DERO_RPC_APIS) GetTransaction(ctx context.Context, p structures.GetTransac
if valid {
related.ValidBlock = valid_blid.String()
// topo height at which it was mined
related.Block_Height = int64(chain.Load_Block_Topological_order(valid_blid))
topo_height := int64(chain.Load_Block_Topological_order(valid_blid))
related.Block_Height = topo_height
if tx.TransactionType != transaction.REGISTRATION {
// we must now fill in compressed ring members
if toporecord, err := chain.Store.Topo_store.Read(topo_height); err == nil {
if ss, err := chain.Store.Balance_store.LoadSnapshot(toporecord.State_Version); err == nil {
if tx.TransactionType == transaction.SC_TX {
scid := tx.GetHash()
if tx.SCDATA.Has(rpc.SCACTION, rpc.DataUint64) && rpc.SC_INSTALL == rpc.SC_ACTION(tx.SCDATA.Value(rpc.SCACTION, rpc.DataUint64).(uint64)) {
if sc_meta_tree, err := ss.GetTree(config.SC_META); err == nil {
var meta_bytes []byte
if meta_bytes, err = sc_meta_tree.Get(blockchain.SC_Meta_Key(scid)); err == nil {
var meta blockchain.SC_META_DATA // the meta contains the link to the SC bytes
if err = meta.UnmarshalBinary(meta_bytes); err == nil {
related.Balance = meta.Balance
}
}
}
if sc_data_tree, err := ss.GetTree(string(scid[:])); err == nil {
var code_bytes []byte
if code_bytes, err = sc_data_tree.Get(blockchain.SC_Code_Key(scid)); err == nil {
related.Code = string(code_bytes)
}
}
}
}
for t := range tx.Payloads {
var ring [][]byte
var tree *graviton.Tree
if tx.Payloads[t].SCID.IsZero() {
tree, err = ss.GetTree(config.BALANCE_TREE)
} else {
tree, err = ss.GetTree(string(tx.Payloads[t].SCID[:]))
}
if err != nil {
fmt.Printf("no such SC %s\n", tx.Payloads[t].SCID)
}
for j := 0; j < int(tx.Payloads[t].Statement.RingSize); j++ {
key_pointer := tx.Payloads[t].Statement.Publickeylist_pointers[j*int(tx.Payloads[t].Statement.Bytes_per_publickey) : (j+1)*int(tx.Payloads[t].Statement.Bytes_per_publickey)]
_, key_compressed, _, err := tree.GetKeyValueFromHash(key_pointer)
if err == nil {
ring = append(ring, key_compressed)
} else { // we should some how report error
fmt.Printf("Error expanding member for txid %s t %d err %s key_compressed %x\n", hash, t, err, key_compressed)
}
}
related.Ring = append(related.Ring, ring)
}
}
}
}
}
for i := range invalid_blid {
related.InvalidBlock = append(related.InvalidBlock, invalid_blid[i].String())
@ -125,6 +188,6 @@ func (DERO_RPC_APIS) GetTransaction(ctx context.Context, p structures.GetTransac
result.Status = "OK"
err = nil
//logger.Debugf("result %+v\n", result);
logger.Debugf("result %+v\n", result)
return
}

View File

@ -18,10 +18,9 @@ package main
import "fmt"
import "context"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/structures"
func (DERO_RPC_APIS) GetTxPool(ctx context.Context) (result structures.GetTxPool_Result) {
func (DERO_RPC_APIS) GetTxPool(ctx context.Context) (result rpc.GetTxPool_Result) {
result.Status = "OK"
pool_list := chain.Mempool.Mempool_List_TX()

View File

@ -20,14 +20,12 @@ import "fmt"
import "context"
import "encoding/hex"
import "runtime/debug"
import "github.com/romana/rlog"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/transaction"
//NOTE: finally we have shifted to json api
func (DERO_RPC_APIS) SendRawTransaction(ctx context.Context, p structures.SendRawTransaction_Params) (result structures.SendRawTransaction_Result, err error) {
func (DERO_RPC_APIS) SendRawTransaction(ctx context.Context, p rpc.SendRawTransaction_Params) (result rpc.SendRawTransaction_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
@ -61,15 +59,12 @@ func (DERO_RPC_APIS) SendRawTransaction(ctx context.Context, p structures.SendRa
rlog.Debugf("Incoming TXID %s from RPC Server", tx.GetHash())
// lets try to add it to pool
success := chain.Add_TX_To_Pool(&tx)
if success {
if err = chain.Add_TX_To_Pool(&tx); err == nil {
result.Status = "OK"
err = nil
rlog.Debugf("Incoming TXID %s from RPC Server successfully accepted by MEMPOOL", tx.GetHash())
} else {
err = fmt.Errorf("Transaction %s rejected by daemon, check daemon msgs", tx.GetHash())
err = fmt.Errorf("Transaction %s rejected by daemon err '%s'", tx.GetHash(), err)
rlog.Warnf("Incoming TXID %s from RPC Server rejected by POOL", tx.GetHash())
}
return

View File

@ -20,10 +20,9 @@ import "fmt"
import "context"
import "encoding/hex"
import "runtime/debug"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/structures"
func (DERO_RPC_APIS) SubmitBlock(ctx context.Context, block_data [2]string) (result structures.SubmitBlock_Result, err error) {
func (DERO_RPC_APIS) SubmitBlock(ctx context.Context, block_data [2]string) (result rpc.SubmitBlock_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
@ -47,7 +46,7 @@ func (DERO_RPC_APIS) SubmitBlock(ctx context.Context, block_data [2]string) (res
if sresult {
logger.Infof("Submitted block %s accepted", blid)
return structures.SubmitBlock_Result{
return rpc.SubmitBlock_Result{
BLID: blid.String(),
Status: "OK",
}, nil
@ -59,7 +58,7 @@ func (DERO_RPC_APIS) SubmitBlock(ctx context.Context, block_data [2]string) (res
}
logger.Infof("Submitting block rejected err %s", err)
return structures.SubmitBlock_Result{
return rpc.SubmitBlock_Result{
Status: "REJECTED",
}, nil

View File

@ -21,9 +21,9 @@ package main
import "fmt"
import "context"
import "runtime/debug"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/rpc"
func (DERO_RPC_APIS) GetLastBlockHeader(ctx context.Context) (result structures.GetLastBlockHeader_Result, err error) {
func (DERO_RPC_APIS) GetLastBlockHeader(ctx context.Context) (result rpc.GetLastBlockHeader_Result, err error) {
defer func() { // safety so if anything wrong happens, we return error
if r := recover(); r != nil {
err = fmt.Errorf("panic occured. stack trace %s", debug.Stack())
@ -35,7 +35,7 @@ func (DERO_RPC_APIS) GetLastBlockHeader(ctx context.Context) (result structures.
return
}
return structures.GetLastBlockHeader_Result{
return rpc.GetLastBlockHeader_Result{
Block_Header: block_header,
Status: "OK",
}, nil

View File

@ -136,65 +136,6 @@ func (r *RPCServer) RPCServer_Stop() {
// setup handlers
func (r *RPCServer) Run() {
/*
mr := jsonrpc.NewMethodRepository()
if err := mr.RegisterMethod("Main.Echo", EchoHandler{}, EchoParams{}, EchoResult{}); err != nil {
log.Fatalln(err)
}
// install getblockcount handler
if err := mr.RegisterMethod("getblockcount", GetBlockCount_Handler{}, structures.GetBlockCount_Params{}, structures.GetBlockCount_Result{}); err != nil {
log.Fatalln(err)
}
// install on_getblockhash
if err := mr.RegisterMethod("on_getblockhash", On_GetBlockHash_Handler{}, structures.On_GetBlockHash_Params{}, structures.On_GetBlockHash_Result{}); err != nil {
log.Fatalln(err)
}
// install getblocktemplate handler
//if err := mr.RegisterMethod("getblocktemplate", GetBlockTemplate_Handler{}, structures.GetBlockTemplate_Params{}, structures.GetBlockTemplate_Result{}); err != nil {
// log.Fatalln(err)
//}
// submitblock handler
if err := mr.RegisterMethod("submitblock", SubmitBlock_Handler{}, structures.SubmitBlock_Params{}, structures.SubmitBlock_Result{}); err != nil {
log.Fatalln(err)
}
if err := mr.RegisterMethod("getlastblockheader", GetLastBlockHeader_Handler{}, structures.GetLastBlockHeader_Params{}, structures.GetLastBlockHeader_Result{}); err != nil {
log.Fatalln(err)
}
if err := mr.RegisterMethod("getblockheaderbyhash", GetBlockHeaderByHash_Handler{}, structures.GetBlockHeaderByHash_Params{}, structures.GetBlockHeaderByHash_Result{}); err != nil {
log.Fatalln(err)
}
//if err := mr.RegisterMethod("getblockheaderbyheight", GetBlockHeaderByHeight_Handler{}, structures.GetBlockHeaderByHeight_Params{}, structures.GetBlockHeaderByHeight_Result{}); err != nil {
// log.Fatalln(err)
//}
if err := mr.RegisterMethod("getblockheaderbytopoheight", GetBlockHeaderByTopoHeight_Handler{}, structures.GetBlockHeaderByTopoHeight_Params{}, structures.GetBlockHeaderByHeight_Result{}); err != nil {
log.Fatalln(err)
}
if err := mr.RegisterMethod("getblock", GetBlock_Handler{}, structures.GetBlock_Params{}, structures.GetBlock_Result{}); err != nil {
log.Fatalln(err)
}
if err := mr.RegisterMethod("get_info", GetInfo_Handler{}, structures.GetInfo_Params{}, structures.GetInfo_Result{}); err != nil {
log.Fatalln(err)
}
if err := mr.RegisterMethod("getencryptedbalance", GetEncryptedBalance_Handler{}, structures.GetEncryptedBalance_Params{}, structures.GetEncryptedBalance_Result{}); err != nil {
log.Fatalln(err)
}
if err := mr.RegisterMethod("gettxpool", GetTxPool_Handler{}, structures.GetTxPool_Params{}, structures.GetTxPool_Result{}); err != nil {
log.Fatalln(err)
}
*/
// create a new mux
r.mux = http.NewServeMux()
@ -344,7 +285,8 @@ var historical_apis = handler.Map{"getinfo": handler.New(dero_apis.GetInfo),
"getblockcount": handler.New(dero_apis.GetBlockCount),
"getlastblockheader": handler.New(dero_apis.GetLastBlockHeader),
"getblocktemplate": handler.New(dero_apis.GetBlockTemplate),
"getencryptedbalance": handler.New(dero_apis.GetEncryptedBalance)}
"getencryptedbalance": handler.New(dero_apis.GetEncryptedBalance),
"getsc": handler.New(dero_apis.GetSC)}
func translate_http_to_jsonrpc_and_vice_versa(w http.ResponseWriter, r *http.Request) {
bridge.ServeHTTP(w, r)

View File

@ -20,9 +20,6 @@ package main
// this needs only RPC access
// NOTE: Only use data exported from within the RPC interface, do direct use of exported variables fom packages
// NOTE: we can use structs defined within the RPCserver package
// This is being developed to track down and confirm some bugs
// NOTE: This is NO longer entirely compliant with the xyz RPC interface ( the pool part is not compliant), currently and can be used as it for their chain,
// atleast for the last 1 year
// TODO: error handling is non-existant ( as this was built up in hrs ). Add proper error handling
//
@ -31,7 +28,9 @@ import "time"
import "fmt"
//import "net"
//import "bytes"
import "bytes"
import "unicode"
import "unsafe" // need to avoid this, but only used by byteviewer
import "strings"
import "strconv"
import "context"
@ -48,11 +47,11 @@ import log "github.com/sirupsen/logrus"
//import "github.com/ybbus/jsonrpc"
import "github.com/deroproject/derohe/block"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/address"
import "github.com/deroproject/derohe/cryptography/crypto"
import "github.com/deroproject/derohe/cryptography/bn256"
import "github.com/deroproject/derohe/globals"
import "github.com/deroproject/derohe/transaction"
import "github.com/deroproject/derohe/structures"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/proof"
import "github.com/deroproject/derohe/glue/rwc"
@ -164,7 +163,7 @@ func Connect() (err error) {
fmt.Printf("Ping Received %s\n", result)
}
var info structures.GetInfo_Result
var info rpc.GetInfo_Result
// collect all the data afresh, execute rpc to service
if err = rpc_client.Call("DERO.GetInfo", nil, &info); err != nil {
@ -248,6 +247,7 @@ type txinfo struct {
Version int // version of tx
Size string // size of tx in KB
Sizeuint64 uint64 // size of tx in bytes
Burn_Value string // value of burned amount
Fee string // fee in TX
Feeuint64 uint64 // fee in atomic units
In int // inputs counts
@ -263,18 +263,37 @@ type txinfo struct {
InvalidBlock []string // the tx is invalid in which block
Skipped bool // this is only valid, when a block is being listed
Ring_size int
//Ring [][]globals.TX_Output_Data
Ring [][][]byte // contains entire ring in raw form
TXpublickey string
PayID32 string // 32 byte payment ID
PayID8 string // 8 byte encrypted payment ID
Proof_address string // address agains which which the proving ran
Proof_index int64 // proof satisfied for which index
Proof_amount string // decoded amount
Proof_PayID8 string // decrypted 8 byte payment id
Proof_error string // error if any while decoding proof
Proof_address string // address agains which which the proving ran
Proof_index int64 // proof satisfied for which index
Proof_amount string // decoded amount
Proof_Payload_raw string // payload raw bytes
Proof_Payload string // if proof decoded, decoded , else decode error
Proof_error string // error if any while decoding proof
SC_TX_Available string //bool // whether this contains an SC TX
SC_Signer string // whether SC signer
SC_Signer_verified string // whether SC signer can be verified successfully
SC_Balance uint64 // SC SC_Balance in atomic units
SC_Balance_string string // SC_Balance in DERO
SC_Keys map[string]string // SC key value of
SC_Args rpc.Arguments // rpc.Arguments
SC_Code string // install SC
Assets []Asset
}
type Asset struct {
SCID crypto.Hash
Fees string
Burn string
Ring []string
Ring_size int
}
// any information for block which needs to be printed
@ -306,7 +325,7 @@ type block_info struct {
// if hash is less than 64 bytes then it is considered a height parameter
func load_block_from_rpc(info *block_info, block_hash string, recursive bool) (err error) {
var bl block.Block
var bresult structures.GetBlock_Result
var bresult rpc.GetBlock_Result
var block_height int
var block_bin []byte
@ -314,7 +333,7 @@ func load_block_from_rpc(info *block_info, block_hash string, recursive bool) (e
fmt.Sscanf(block_hash, "%d", &block_height)
// user requested block height
log.Debugf("User requested block at topoheight %d user input %s", block_height, block_hash)
if err = rpc_client.Call("DERO.GetBlock", structures.GetBlock_Params{Height: uint64(block_height)}, &bresult); err != nil {
if err = rpc_client.Call("DERO.GetBlock", rpc.GetBlock_Params{Height: uint64(block_height)}, &bresult); err != nil {
return fmt.Errorf("getblock rpc failed")
}
@ -322,7 +341,7 @@ func load_block_from_rpc(info *block_info, block_hash string, recursive bool) (e
log.Debugf("User requested block using hash %s", block_hash)
if err = rpc_client.Call("DERO.GetBlock", structures.GetBlock_Params{Hash: block_hash}, &bresult); err != nil {
if err = rpc_client.Call("DERO.GetBlock", rpc.GetBlock_Params{Hash: block_hash}, &bresult); err != nil {
return fmt.Errorf("getblock rpc failed")
}
}
@ -405,11 +424,18 @@ func load_tx_info_from_tx(info *txinfo, tx *transaction.Transaction) (err error)
info.Sizeuint64 = uint64(len(tx.Serialize()))
info.Version = int(tx.Version)
//info.Extra = fmt.Sprintf("%x", tx.Extra)
info.RootHash = fmt.Sprintf("%x", tx.Statement.Roothash[:])
if len(tx.Payloads) >= 1 {
info.RootHash = fmt.Sprintf("%x", tx.Payloads[0].Statement.Roothash[:])
}
info.HeightBuilt = tx.Height
//info.In = len(tx.Vin)
//info.Out = len(tx.Vout)
if tx.TransactionType == transaction.BURN_TX {
info.Burn_Value = fmt.Sprintf(" %.05f", float64(tx.Value)/100000)
}
switch tx.TransactionType {
case transaction.PREMINE:
@ -418,7 +444,7 @@ func load_tx_info_from_tx(info *txinfo, tx *transaction.Transaction) (err error)
panic(err)
}
astring := address.NewAddressFromKeys(&acckey)
astring := rpc.NewAddressFromKeys(&acckey)
astring.Mainnet = mainnet
info.OutAddress = append(info.OutAddress, astring.String())
info.Amount = globals.FormatMoney(tx.Value)
@ -429,7 +455,7 @@ func load_tx_info_from_tx(info *txinfo, tx *transaction.Transaction) (err error)
panic(err)
}
astring := address.NewAddressFromKeys(&acckey)
astring := rpc.NewAddressFromKeys(&acckey)
astring.Mainnet = mainnet
info.OutAddress = append(info.OutAddress, astring.String())
@ -440,23 +466,15 @@ func load_tx_info_from_tx(info *txinfo, tx *transaction.Transaction) (err error)
if err := acckey.DecodeCompressed(tx.MinerAddress[:]); err != nil {
panic(err)
}
astring := address.NewAddressFromKeys(&acckey)
astring := rpc.NewAddressFromKeys(&acckey)
astring.Mainnet = mainnet
info.OutAddress = append(info.OutAddress, astring.String())
case transaction.NORMAL:
case transaction.NORMAL, transaction.BURN_TX, transaction.SC_TX:
info.Fee = fmt.Sprintf("%.05f", float64(tx.Statement.Fees)/100000)
info.Feeuint64 = tx.Statement.Fees
info.Amount = "?"
}
info.Ring_size = len(tx.Statement.Publickeylist_compressed) //len(tx.Vin[0].(transaction.Txin_to_key).Key_offsets)
for i := range tx.Statement.Publickeylist {
astring := address.NewAddressFromKeys((*crypto.Point)(tx.Statement.Publickeylist[i]))
astring.Mainnet = mainnet
info.OutAddress = append(info.OutAddress, astring.String())
}
if tx.TransactionType == transaction.SC_TX {
info.SC_Args = tx.SCDATA
}
// if outputs cannot be located, do not panic
@ -468,21 +486,11 @@ func load_tx_info_from_tx(info *txinfo, tx *transaction.Transaction) (err error)
switch 0 {
case 0:
info.Type = "DERO_HOMOMORPHIC"
/*case 1:
info.Type = "RingCT/1 MG"
case 2:
info.Type = "RingCT/2 Simple"
case 3:
info.Type = "RingCT/3 Full bulletproof"
case 4:
info.Type = "RingCT/4 Simple Bulletproof"
*/
default:
panic("not implement")
panic("not implemented")
}
if !info.In_Pool && !info.CoinBase && tx.TransactionType == transaction.NORMAL { // find the age of block and other meta
if !info.In_Pool && !info.CoinBase && (tx.TransactionType == transaction.NORMAL || tx.TransactionType == transaction.BURN_TX || tx.TransactionType == transaction.SC_TX) { // find the age of block and other meta
var blinfo block_info
err := load_block_from_rpc(&blinfo, fmt.Sprintf("%s", info.Height), false) // we only need block data and not data of txs
if err != nil {
@ -504,8 +512,8 @@ func load_tx_info_from_tx(info *txinfo, tx *transaction.Transaction) (err error)
// load and setup txinfo from rpc
func load_tx_from_rpc(info *txinfo, txhash string) (err error) {
var tx_params structures.GetTransaction_Params
var tx_result structures.GetTransaction_Result
var tx_params rpc.GetTransaction_Params
var tx_result rpc.GetTransaction_Result
//fmt.Printf("Requesting tx data %s", txhash);
tx_params.Tx_Hashes = append(tx_params.Tx_Hashes, txhash)
@ -541,13 +549,46 @@ func load_tx_from_rpc(info *txinfo, txhash string) (err error) {
}
if tx.IsCoinbase() { // fill miner tx reward from what the chain tells us
info.Amount = fmt.Sprintf("%.012f", float64(uint64(tx_result.Txs[0].Reward))/1000000000000)
info.Amount = fmt.Sprintf("%.05f", float64(uint64(tx_result.Txs[0].Reward))/100000)
}
info.ValidBlock = tx_result.Txs[0].ValidBlock
info.InvalidBlock = tx_result.Txs[0].InvalidBlock
//info.Ring = tx_result.Txs[0].Ring
info.Ring = tx_result.Txs[0].Ring
if tx.TransactionType == transaction.NORMAL || tx.TransactionType == transaction.BURN_TX || tx.TransactionType == transaction.SC_TX {
for t := range tx.Payloads {
var a Asset
a.SCID = tx.Payloads[t].SCID
a.Fees = fmt.Sprintf("%.05f", float64(tx.Payloads[t].Statement.Fees)/100000)
a.Burn = fmt.Sprintf("%.05f", float64(tx.Payloads[t].BurnValue)/100000)
a.Ring_size = len(tx_result.Txs[0].Ring[t])
for i := range tx_result.Txs[0].Ring[t] {
point_compressed := tx_result.Txs[0].Ring[t][i]
var p bn256.G1
if err = p.DecodeCompressed(point_compressed[:]); err != nil {
continue
}
astring := rpc.NewAddressFromKeys((*crypto.Point)(&p))
astring.Mainnet = mainnet
a.Ring = append(a.Ring, astring.String())
}
info.Assets = append(info.Assets, a)
}
//fmt.Printf("assets now %+v\n", info.Assets)
}
info.SC_Balance = tx_result.Txs[0].Balance
info.SC_Balance_string = fmt.Sprintf("%.05f", float64(uint64(info.SC_Balance)/100000))
info.SC_Code = tx_result.Txs[0].Code
//info.Ring = strings.Join(info.OutAddress, " ")
//fmt.Printf("tx_result %+v\n",tx_result.Txs)
// fmt.Printf("response contained tx %s \n", tx.GetHash())
@ -607,14 +648,14 @@ func tx_handler(w http.ResponseWriter, r *http.Request) {
if tx_proof != "" {
// there may be more than 1 amounts, only first one is shown
addresses, amounts, payids, err := proof.Prove(tx_proof, info.Hex, mainnet)
addresses, amounts, raw, decoded, err := proof.Prove(tx_proof, info.Hex, info.Ring, mainnet)
if err == nil { //&& len(amounts) > 0 && len(indexes) > 0{
log.Debugf("Successfully proved transaction %s len(payids) %d", tx_hex, len(payids))
log.Debugf("Successfully proved transaction %s len(payids) %d", tx_hex, len(decoded))
info.Proof_address = addresses[0]
info.Proof_amount = globals.FormatMoney(amounts[0])
if len(payids) >= 1 {
info.Proof_PayID8 = fmt.Sprintf("%x", payids[0]) // decrypted payment ID
}
info.Proof_Payload_raw = BytesViewer(raw[0]).String() // raw payload
info.Proof_Payload = decoded[0]
} else {
log.Debugf("err while proving %s", err)
if err != nil {
@ -675,7 +716,7 @@ func fill_tx_structure(pos int, size_in_blocks int) (data []block_info) {
func show_page(w http.ResponseWriter, page int) {
data := map[string]interface{}{}
var info structures.GetInfo_Result
var info rpc.GetInfo_Result
data["title"] = "DERO HE BlockChain Explorer(v1)"
data["servertime"] = time.Now().UTC().Format("2006-01-02 15:04:05")
@ -747,7 +788,7 @@ exit_error:
func txpool_handler(w http.ResponseWriter, r *http.Request) {
data := map[string]interface{}{}
var info structures.GetInfo_Result
var info rpc.GetInfo_Result
data["title"] = "DERO HE BlockChain Explorer(v1)"
data["servertime"] = time.Now().UTC().Format("2006-01-02 15:04:05")
@ -801,7 +842,7 @@ func root_handler(w http.ResponseWriter, r *http.Request) {
// search handler, finds the items using rpc bruteforce
func search_handler(w http.ResponseWriter, r *http.Request) {
var info structures.GetInfo_Result
var info rpc.GetInfo_Result
var err error
log.Debugf("Showing search page")
@ -860,7 +901,7 @@ func search_handler(w http.ResponseWriter, r *http.Request) {
{ // show error page
data := map[string]interface{}{}
var info structures.GetInfo_Result
var info rpc.GetInfo_Result
data["title"] = "DERO HE BlockChain Explorer(v1)"
data["servertime"] = time.Now().UTC().Format("2006-01-02 15:04:05")
@ -901,7 +942,7 @@ func fill_tx_pool_info(data map[string]interface{}, max_count int) error {
var err error
var txs []txinfo
var txpool structures.GetTxPool_Result
var txpool rpc.GetTxPool_Result
data["mempool"] = txs // initialize with empty data
if err = rpc_client.Call("DERO.GetTxPool", nil, &txpool); err != nil {
@ -923,5 +964,56 @@ func fill_tx_pool_info(data map[string]interface{}, max_count int) error {
data["mempool"] = txs
return nil
}
// BytesViewer bytes viewer
type BytesViewer []byte
// String returns view in hexadecimal
func (b BytesViewer) String() string {
if len(b) == 0 {
return "invlaid string"
}
const head = `
| Address | Hex | Text |
| -------: | :---------------------------------------------- | :--------------- |
`
const row = 16
result := make([]byte, 0, len(head)/2*(len(b)/16+3))
result = append(result, head...)
for i := 0; i < len(b); i += row {
result = append(result, "| "...)
result = append(result, fmt.Sprintf("%08x", i)...)
result = append(result, " | "...)
k := i + row
more := 0
if k >= len(b) {
more = k - len(b)
k = len(b)
}
for j := i; j != k; j++ {
if b[j] < 16 {
result = append(result, '0')
}
result = strconv.AppendUint(result, uint64(b[j]), 16)
result = append(result, ' ')
}
for j := 0; j != more; j++ {
result = append(result, " "...)
}
result = append(result, "| "...)
buf := bytes.Map(func(r rune) rune {
if unicode.IsSpace(r) {
return ' '
}
return r
}, b[i:k])
result = append(result, buf...)
for j := 0; j != more; j++ {
result = append(result, ' ')
}
result = append(result, " |\n"...)
}
return *(*string)(unsafe.Pointer(&result))
}

View File

@ -303,7 +303,10 @@ var tx_template string = `{{define "tx"}}
{{ template "header" . }}
<div>
<H4 style="margin:5px">Tx hash: {{.info.Hash}} Type {{.info.TransactionType }}</H4>
<H5 style="margin:5px">Tx prefix hash: {{.info.PrefixHash}}</H5>
{{if eq .info.TransactionType "BURN" }}
<H4 style="margin:5px; color: red">Burns: {{.info.Burn_Value }} DERO</H4>
{{end}}
<H5>Block: <a href="/block/{{.info.ValidBlock}}">{{.info.ValidBlock}}</a> (VALID) </H5>
@ -329,7 +332,7 @@ var tx_template string = `{{define "tx"}}
{{end}}
{{if eq .info.TransactionType "NORMAL"}}
{{if or (eq .info.TransactionType "NORMAL") (eq .info.TransactionType "BURN") (eq .info.TransactionType "SC") }}
<H5 style="margin:5px">Tx RootHash: {{.info.RootHash}} built height : {{.info.HeightBuilt}} </H5>
@ -360,23 +363,40 @@ var tx_template string = `{{define "tx"}}
<td colspan="3">Extra: {{.info.Extra}}</td>
</tr>
</table>
<h3>{{len .info.OutAddress}} inputs/outputs (RING size)</h3>
<div class="center">
{{range $ii, $ee := .info.Assets}}
<H5>SCID: {{$ee.SCID}} {{$ee.Ring_size}} inputs/outputs (RING size) Fees {{$ee.Fees}} Burned {{$ee.Burn}}</H5>
<div class="center">
<table class="center">
<tr>
<td>address</td>
<td>amount</td>
</tr>
{{range $i, $e := .info.OutAddress}}
{{range $i, $e := $ee.Ring}}
<tr>
<td>{{ $e }}</td>
<td>{{$.info.Amount}}</td>
</tr>
{{end}}
</table>
</div>
{{end}}
{{if eq .info.TransactionType "SC"}}
<table class="center" style="width: 80%; margin-top:10px">
<tr>
<td>SC Balance: {{ .info.SC_Balance_string }} DERO</td>
</tr>
<tr>
<td>SC CODE:<pre style="text-align: left;"> {{ .info.SC_Code }}</pre></td>
</tr>
<tr>
<td>SC Arguments: {{ .info.SC_Args }}</td>
</tr>
</table>
{{end}}
<!-- TODO currently we donot enable user to prove or decode something -->
<br/>
<br/>
@ -415,8 +435,13 @@ var tx_template string = `{{define "tx"}}
<tr>
<td><h2><font color="blue">{{.info.Proof_address}} Received {{.info.Proof_amount}} DERO
{{if .info.Proof_PayID8}}
<br/> Decrypted Payment ID {{ .info.Proof_PayID8}}
{{if .info.Proof_Payload}}
<br/> Decoded Data {{ .info.Proof_Payload}}
<br/> Raw Data
<br/><pre>{{ .info.Proof_Payload_raw}}</pre>
{{end}}
</font> </h2>
</td>

View File

@ -0,0 +1,182 @@
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package main
//import "os"
import "fmt"
import "time"
import "crypto/sha1"
import "github.com/romana/rlog"
import "etcd.io/bbolt"
import "github.com/deroproject/derohe/rpc"
import "github.com/deroproject/derohe/walletapi"
import "github.com/ybbus/jsonrpc"
const PLUGIN_NAME = "pong_server"
const DEST_PORT = uint64(0x1234567812345678)
var expected_arguments = rpc.Arguments{
{rpc.RPC_DESTINATION_PORT, rpc.DataUint64, DEST_PORT},
// { rpc.RPC_EXPIRY , rpc.DataTime, time.Now().Add(time.Hour).UTC()},
{rpc.RPC_COMMENT, rpc.DataString, "Purchase PONG"},
//{"float64", rpc.DataFloat64, float64(0.12345)}, // in atomic units
{rpc.RPC_VALUE_TRANSFER, rpc.DataUint64, uint64(12345)}, // in atomic units
}
// currently the interpreter seems to have a glitch if this gets initialized within the code
// see limitations github.com/traefik/yaegi
var response = rpc.Arguments{
{rpc.RPC_DESTINATION_PORT, rpc.DataUint64, uint64(0)},
{rpc.RPC_SOURCE_PORT, rpc.DataUint64, DEST_PORT},
{rpc.RPC_COMMENT, rpc.DataString, "Successfully purchased pong (this could be serial/license key or download link or further)"},
}
var rpcClient = jsonrpc.NewClient("http://127.0.0.1:40403/json_rpc")
// empty place holder
func main() {
var err error
fmt.Printf("Pong Server to demonstrate RPC over dero chain.\n")
var addr *rpc.Address
var addr_result rpc.GetAddress_Result
err = rpcClient.CallFor(&addr_result, "GetAddress")
if err != nil || addr_result.Address == "" {
fmt.Printf("Could not obtain address from wallet err %s\n", err)
return
}
if addr, err = rpc.NewAddress(addr_result.Address); err != nil {
fmt.Printf("address could not be parsed: addr:%s err:%s\n", addr_result.Address, err)
return
}
shasum := fmt.Sprintf("%x", sha1.Sum([]byte(addr.String())))
db_name := fmt.Sprintf("%s_%s.bbolt.db", PLUGIN_NAME, shasum)
db, err := bbolt.Open(db_name, 0600, nil)
if err != nil {
fmt.Printf("could not open db err:%s\n", err)
return
}
//defer db.Close()
err = db.Update(func(tx *bbolt.Tx) error {
_, err := tx.CreateBucketIfNotExists([]byte("SALE"))
return err
})
if err != nil {
fmt.Printf("err creating bucket. err %s\n", err)
}
fmt.Printf("Persistant store created in '%s'\n", db_name)
fmt.Printf("Wallet Address: %s\n", addr)
service_address_without_amount := addr.Clone()
service_address_without_amount.Arguments = expected_arguments[:len(expected_arguments)-1]
fmt.Printf("Integrated address to activate '%s', (without hardcoded amount) service: \n%s\n", PLUGIN_NAME, service_address_without_amount.String())
// service address can be created client side for now
service_address := addr.Clone()
service_address.Arguments = expected_arguments
fmt.Printf("Integrated address to activate '%s', service: \n%s\n", PLUGIN_NAME, service_address.String())
processing_thread(db) // rkeep processing
//time.Sleep(time.Second)
//return
}
func processing_thread(db *bbolt.DB) {
var err error
for { // currently we traverse entire history
time.Sleep(time.Second)
var transfers rpc.Get_Transfers_Result
err = rpcClient.CallFor(&transfers, "GetTransfers", rpc.Get_Transfers_Params{In: true, DestinationPort: DEST_PORT})
if err != nil {
rlog.Warnf("Could not obtain gettransfers from wallet err %s\n", err)
continue
}
for _, e := range transfers.Entries {
if e.Coinbase || !e.Incoming { // skip coinbase or outgoing, self generated transactions
continue
}
// check whether the entry has been processed before, if yes skip it
var already_processed bool
db.View(func(tx *bbolt.Tx) error {
if b := tx.Bucket([]byte("SALE")); b != nil {
if ok := b.Get([]byte(e.TXID)); ok != nil { // if existing in bucket
already_processed = true
}
}
return nil
})
if already_processed { // if already processed skip it
continue
}
// check whether this service should handle the transfer
if !e.Payload_RPC.Has(rpc.RPC_DESTINATION_PORT, rpc.DataUint64) ||
DEST_PORT != e.Payload_RPC.Value(rpc.RPC_DESTINATION_PORT, rpc.DataUint64).(uint64) { // this service is expecting value to be specfic
continue
}
rlog.Infof("tx should be processed %s\n", e.TXID)
if expected_arguments.Has(rpc.RPC_VALUE_TRANSFER, rpc.DataUint64) { // this service is expecting value to be specfic
value_expected := expected_arguments.Value(rpc.RPC_VALUE_TRANSFER, rpc.DataUint64).(uint64)
if e.Amount != value_expected { // TODO we should mark it as faulty
rlog.Warnf("user transferred %d, we were expecting %d. so we will not do anything\n", e.Amount, value_expected) // this is an unexpected situation
continue
}
// value received is what we are expecting, so time for response
response[0].Value = e.SourcePort // source port now becomes destination port, similar to TCP
response[2].Value = fmt.Sprintf("Sucessfully purchased pong (could be serial, license or download link or anything).You sent %s at height %d", walletapi.FormatMoney(e.Amount), e.Height)
//_, err := response.CheckPack(transaction.PAYLOAD0_LIMIT)) // we only have 144 bytes for RPC
// sender of ping now becomes destination
var str string
tparams := rpc.Transfer_Params{Transfers: []rpc.Transfer{{Destination: e.Sender, Amount: uint64(1), Payload_RPC: response}}}
err = rpcClient.CallFor(&str, "Transfer", tparams)
if err != nil {
rlog.Warnf("sending reply tx err %s\n", err)
continue
}
err = db.Update(func(tx *bbolt.Tx) error {
b := tx.Bucket([]byte("SALE"))
return b.Put([]byte(e.TXID), []byte("done"))
})
if err != nil {
rlog.Warnf("err updating db to err %s\n", err)
} else {
rlog.Infof("ping replied successfully with pong")
}
}
}
}
}

View File

@ -0,0 +1,2 @@
Various RPC servers can be developed, which can represent various activitities not representable on any existing blockchain.

View File

@ -17,7 +17,7 @@
package config
import "github.com/satori/go.uuid"
import "github.com/deroproject/derohe/crypto"
import "github.com/deroproject/derohe/cryptography/crypto"
// all global configuration variables are picked from here
@ -28,27 +28,12 @@ import "github.com/deroproject/derohe/crypto"
// since most mining nodes will be running in datacenter, 3 secs blocks c
const BLOCK_TIME = uint64(18)
// we are ignoring leap seconds from calculations
// coin emiision related settings
const COIN_MONEY_SUPPLY = uint64(18446744073709551615) // 2^64-1
const COIN_EMISSION_SPEED_FACTOR = uint64(20)
const COIN_DIFFICULTY_TARGET = uint64(120) // this is a feeder to emission formula
const COIN_FINAL_SUBSIDY_PER_MINUTE = uint64(300000000000) // 0.3 DERO per minute = 157680 per year roughly
// these are used to configure mainnet hard fork
const HARDFORK_1_END = int64(1)
//const HARDFORK_1_TOTAL_SUPPLY = uint64(2000000000000000000 ) // this is used to mark total supply
// till 95532 (includind) 4739519967524007940
// 95543 4739807553788105597
// 95549 4739964392976757069
// 95550 4739990536584241377
const MAINNET_HARDFORK_1_TOTAL_SUPPLY = uint64(4739990536584241377)
const TESTNET_HARDFORK_1_TOTAL_SUPPLY = uint64(4319584000000000000)
const MAX_CHAIN_HEIGHT = uint64(2147483648) // 2^31
// note we are keeping the tree name small for disk savings, since they will be stored n times (atleast or archival nodes)
// this is used by graviton
const BALANCE_TREE = "B" // keeps main balance
const SC_META = "M" // keeps all SCs balance, their state, their OWNER, their data tree top hash is stored here
// one are open SCs, which provide i/o privacy
// one are private SCs which are truly private, in which no one has visibility of io or functionality
// 1.25 MB block every 12 secs is equal to roughly 75 TX per second
// if we consider side blocks, TPS increase to > 100 TPS
@ -65,8 +50,8 @@ const MAX_MIXIN = 128 // <= 128, mixin will be accepted
// ATLANTIS FEE calculation constants are here
const FEE_PER_KB = uint64(1000000000) // .001 dero per kb
const MAINNET_BOOTSTRAP_DIFFICULTY = uint64(10 * BLOCK_TIME) // atlantis mainnet botstrapped at 200 MH/s
const MAINNET_MINIMUM_DIFFICULTY = uint64(10 * BLOCK_TIME) // 5 KH/s
const MAINNET_BOOTSTRAP_DIFFICULTY = uint64(800 * BLOCK_TIME) // atlantis mainnet botstrapped at 200 MH/s
const MAINNET_MINIMUM_DIFFICULTY = uint64(800 * BLOCK_TIME) // 5 KH/s
// testnet bootstraps at 1 MH
//const TESTNET_BOOTSTRAP_DIFFICULTY = uint64(1000*1000*BLOCK_TIME)
@ -79,6 +64,9 @@ const TESTNET_MINIMUM_DIFFICULTY = uint64(800 * BLOCK_TIME) // 800 H
// gives immense scalability,
const STABLE_LIMIT = int64(8)
// reward percent that is shared between miners/dev
const DEVSHARE = uint64(600) // it's out of 10000, 600*100/10000 = 6%, 3% dev, 3% foundation
// we can have number of chains running for testing reasons
type CHAIN_CONFIG struct {
Name string
@ -88,6 +76,8 @@ type CHAIN_CONFIG struct {
RPC_Default_Port int
Wallet_RPC_Default_Port int
Dev_Address string // to which address the dev's share of fees must go
Genesis_Nonce uint32
Genesis_Block_Hash crypto.Hash
@ -96,7 +86,7 @@ type CHAIN_CONFIG struct {
}
var Mainnet = CHAIN_CONFIG{Name: "mainnet",
Network_ID: uuid.FromBytesOrNil([]byte{0x59, 0xd7, 0xf7, 0xe9, 0xdd, 0x48, 0xd5, 0xfd, 0x13, 0x0a, 0xf6, 0xe0, 0x9a, 0x44, 0x44, 0x0}),
Network_ID: uuid.FromBytesOrNil([]byte{0x59, 0xd7, 0xf7, 0xe9, 0xdd, 0x48, 0xd5, 0xfd, 0x13, 0x0a, 0xf6, 0xe0, 0x9a, 0x44, 0x45, 0x0}),
P2P_Default_Port: 10101,
RPC_Default_Port: 10102,
Wallet_RPC_Default_Port: 10103,
@ -110,10 +100,11 @@ var Mainnet = CHAIN_CONFIG{Name: "mainnet",
"8fff7f" + // PREMINE_VALUE
"a01f9bcc1208dee302769931ad378a4c0c4b2c21b0cfb3e752607e12d2b6fa6425", // miners public key
Dev_Address: "deto1qxsplx7vzgydacczw6vnrtfh3fxqcjevyxcvlvl82fs8uykjkmaxgfgulfha5",
}
var Testnet = CHAIN_CONFIG{Name: "testnet", // testnet will always have last 3 bytes 0
Network_ID: uuid.FromBytesOrNil([]byte{0x59, 0xd7, 0xf7, 0xe9, 0xdd, 0x48, 0xd5, 0xfd, 0x13, 0x0a, 0xf6, 0xe0, 0x24, 0x00, 0x00, 0x00}),
Network_ID: uuid.FromBytesOrNil([]byte{0x59, 0xd7, 0xf7, 0xe9, 0xdd, 0x48, 0xd5, 0xfd, 0x13, 0x0a, 0xf6, 0xe0, 0x25, 0x00, 0x00, 0x00}),
P2P_Default_Port: 40401,
RPC_Default_Port: 40402,
Wallet_RPC_Default_Port: 40403,
@ -127,6 +118,7 @@ var Testnet = CHAIN_CONFIG{Name: "testnet", // testnet will always have last 3 b
"8fff7f" + // PREMINE_VALUE
"a01f9bcc1208dee302769931ad378a4c0c4b2c21b0cfb3e752607e12d2b6fa6425", // miners public key
Dev_Address: "deto1qxsplx7vzgydacczw6vnrtfh3fxqcjevyxcvlvl82fs8uykjkmaxgfgulfha5",
}
// mainnet has a remote daemon node, which can be used be default, if user provides a --remote flag

View File

@ -20,4 +20,4 @@ import "github.com/blang/semver"
// right now it has to be manually changed
// do we need to include git commitsha??
var Version = semver.MustParse("3.0.0-25.DEROHE.alpha+30122020")
var Version = semver.MustParse("3.2.0-12.DEROHE.STARGATE+22022021")

View File

@ -0,0 +1,47 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [opensource@clearmatics.com][email]. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[email]: mailto:opensource@clearmatics.com
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

View File

@ -0,0 +1,34 @@
SHELL = bash
GO_FILES = $(shell find . -name "*.go" | grep -vE ".git")
GO_COVER_FILE = `find . -name "coverage.out"`
.PHONY: all test format cover-clean check fmt vet lint
test: $(GO_FILES)
go test ./...
format:
gofmt -s -w ${GO_FILES}
cover: $(GO_FILES)
go test -coverprofile=coverage.out ./...
go tool cover -html=coverage.out
cover-clean:
rm -f $(GO_COVER_FILE)
deps:
go mod download
check:
if [ -n "$(shell gofmt -l ${GO_FILES})" ]; then \
echo 1>&2 'The following files need to be formatted:'; \
gofmt -l .; \
exit 1; \
fi
vet:
go vet $(GO_FILES)
lint:
golint $(GO_FILES)

View File

@ -0,0 +1,33 @@
# BN256
[![Build Status](https://travis-ci.org/clearmatics/bn256.svg?branch=master)](https://travis-ci.org/clearmatics/bn256)
This package implements a [particular](https://eprint.iacr.org/2013/507.pdf) bilinear group.
The code is imported from https://github.com/ethereum/go-ethereum/tree/master/crypto/bn256/cloudflare
:rotating_light: **WARNING** This package originally claimed to operate at a 128-bit level. However, [recent work](https://ellipticnews.wordpress.com/2016/05/02/kim-barbulescu-variant-of-the-number-field-sieve-to-compute-discrete-logarithms-in-finite-fields/) suggest that **this is no longer the case**.
## A note on the selection of the bilinear group
The parameters defined in the `constants.go` file follow the parameters used in [alt-bn128 (libff)](https://github.com/scipr-lab/libff/blob/master/libff/algebra/curves/alt_bn128/alt_bn128_init.cpp). These parameters were selected so that `r1` has a high 2-adic order. This is key to improve efficiency of the key and proof generation algorithms of the SNARK used.
## Installation
go get github.com/clearmatics/bn256
## Development
This project uses [go modules](https://github.com/golang/go/wiki/Modules).
If you develop in your `GOPATH` and use GO 1.11, make sure to run:
```bash
export GO111MODULE=on
```
In fact:
> (Inside $GOPATH/src, for compatibility, the go command still runs in the old GOPATH mode, even if a go.mod is found.)
See: https://blog.golang.org/using-go-modules
> For more fine-grained control, the module support in Go 1.11 respects a temporary environment variable, GO111MODULE, which can be set to one of three string values: off, on, or auto (the default). If GO111MODULE=off, then the go command never uses the new module support. Instead it looks in vendor directories and GOPATH to find dependencies; we now refer to this as "GOPATH mode." If GO111MODULE=on, then the go command requires the use of modules, never consulting GOPATH. We refer to this as the command being module-aware or running in "module-aware mode". If GO111MODULE=auto or is unset, then the go command enables or disables module support based on the current directory. Module support is enabled only when the current directory is outside GOPATH/src and itself contains a go.mod file or is below a directory containing a go.mod file.
See: https://golang.org/cmd/go/#hdr-Preliminary_module_support
The project follows standard Go conventions using `gofmt`. If you wish to contribute to the project please follow standard Go conventions. The CI server automatically runs these checks.

490
cryptography/bn256/bn256.go Normal file
View File

@ -0,0 +1,490 @@
// Package bn256 implements a particular bilinear group at the 128-bit security
// level.
//
// Bilinear groups are the basis of many of the new cryptographic protocols that
// have been proposed over the past decade. They consist of a triplet of groups
// (G₁, G₂ and GT) such that there exists a function e(g₁ˣ,g₂ʸ)=gTˣʸ (where gₓ
// is a generator of the respective group). That function is called a pairing
// function.
//
// This package specifically implements the Optimal Ate pairing over a 256-bit
// Barreto-Naehrig curve as described in
// http://cryptojedi.org/papers/dclxvi-20100714.pdf. Its output is compatible
// with the implementation described in that paper.
package bn256
import (
"crypto/rand"
"errors"
"io"
"math/big"
)
func randomK(r io.Reader) (k *big.Int, err error) {
for {
k, err = rand.Int(r, Order)
if k.Sign() > 0 || err != nil {
return
}
}
}
// G1 is an abstract cyclic group. The zero value is suitable for use as the
// output of an operation, but cannot be used as an input.
type G1 struct {
p *curvePoint
}
// RandomG1 returns x and g₁ˣ where x is a random, non-zero number read from r.
func RandomG1(r io.Reader) (*big.Int, *G1, error) {
k, err := randomK(r)
if err != nil {
return nil, nil, err
}
return k, new(G1).ScalarBaseMult(k), nil
}
func (e *G1) String() string {
return "bn256.G1" + e.p.String()
}
// ScalarBaseMult sets e to g*k where g is the generator of the group and then
// returns e.
func (e *G1) ScalarBaseMult(k *big.Int) *G1 {
if e.p == nil {
e.p = &curvePoint{}
}
e.p.Mul(curveGen, k)
return e
}
// ScalarMult sets e to a*k and then returns e.
func (e *G1) ScalarMult(a *G1, k *big.Int) *G1 {
if e.p == nil {
e.p = &curvePoint{}
}
e.p.Mul(a.p, k)
return e
}
// Add sets e to a+b and then returns e.
func (e *G1) Add(a, b *G1) *G1 {
if e.p == nil {
e.p = &curvePoint{}
}
e.p.Add(a.p, b.p)
return e
}
// Neg sets e to -a and then returns e.
func (e *G1) Neg(a *G1) *G1 {
if e.p == nil {
e.p = &curvePoint{}
}
e.p.Neg(a.p)
return e
}
// Set sets e to a and then returns e.
func (e *G1) Set(a *G1) *G1 {
if e.p == nil {
e.p = &curvePoint{}
}
e.p.Set(a.p)
return e
}
// Marshal converts e to a byte slice.
func (e *G1) Marshal() []byte {
// Each value is a 256-bit number.
const numBytes = 256 / 8
if e.p == nil {
e.p = &curvePoint{}
}
e.p.MakeAffine()
ret := make([]byte, numBytes*2)
if e.p.IsInfinity() {
return ret
}
temp := &gfP{}
montDecode(temp, &e.p.x)
temp.Marshal(ret)
montDecode(temp, &e.p.y)
temp.Marshal(ret[numBytes:])
return ret
}
// Unmarshal sets e to the result of converting the output of Marshal back into
// a group element and then returns e.
func (e *G1) Unmarshal(m []byte) ([]byte, error) {
// Each value is a 256-bit number.
const numBytes = 256 / 8
if len(m) < 2*numBytes {
return nil, errors.New("bn256: not enough data")
}
// Unmarshal the points and check their caps
if e.p == nil {
e.p = &curvePoint{}
} else {
e.p.x, e.p.y = gfP{0}, gfP{0}
}
var err error
if err = e.p.x.Unmarshal(m); err != nil {
return nil, err
}
if err = e.p.y.Unmarshal(m[numBytes:]); err != nil {
return nil, err
}
// Encode into Montgomery form and ensure it's on the curve
montEncode(&e.p.x, &e.p.x)
montEncode(&e.p.y, &e.p.y)
zero := gfP{0}
if e.p.x == zero && e.p.y == zero {
// This is the point at infinity.
e.p.y = *newGFp(1)
e.p.z = gfP{0}
e.p.t = gfP{0}
} else {
e.p.z = *newGFp(1)
e.p.t = *newGFp(1)
if !e.p.IsOnCurve() {
return nil, errors.New("bn256: malformed point")
}
}
return m[2*numBytes:], nil
}
// G2 is an abstract cyclic group. The zero value is suitable for use as the
// output of an operation, but cannot be used as an input.
type G2 struct {
p *twistPoint
}
// RandomG2 returns x and g₂ˣ where x is a random, non-zero number read from r.
func RandomG2(r io.Reader) (*big.Int, *G2, error) {
k, err := randomK(r)
if err != nil {
return nil, nil, err
}
return k, new(G2).ScalarBaseMult(k), nil
}
func (e *G2) String() string {
return "bn256.G2" + e.p.String()
}
// ScalarBaseMult sets e to g*k where g is the generator of the group and then
// returns out.
func (e *G2) ScalarBaseMult(k *big.Int) *G2 {
if e.p == nil {
e.p = &twistPoint{}
}
e.p.Mul(twistGen, k)
return e
}
// ScalarMult sets e to a*k and then returns e.
func (e *G2) ScalarMult(a *G2, k *big.Int) *G2 {
if e.p == nil {
e.p = &twistPoint{}
}
e.p.Mul(a.p, k)
return e
}
// Add sets e to a+b and then returns e.
func (e *G2) Add(a, b *G2) *G2 {
if e.p == nil {
e.p = &twistPoint{}
}
e.p.Add(a.p, b.p)
return e
}
// Neg sets e to -a and then returns e.
func (e *G2) Neg(a *G2) *G2 {
if e.p == nil {
e.p = &twistPoint{}
}
e.p.Neg(a.p)
return e
}
// Set sets e to a and then returns e.
func (e *G2) Set(a *G2) *G2 {
if e.p == nil {
e.p = &twistPoint{}
}
e.p.Set(a.p)
return e
}
// Marshal converts e into a byte slice.
func (e *G2) Marshal() []byte {
// Each value is a 256-bit number.
const numBytes = 256 / 8
if e.p == nil {
e.p = &twistPoint{}
}
e.p.MakeAffine()
ret := make([]byte, numBytes*4)
if e.p.IsInfinity() {
return ret
}
temp := &gfP{}
montDecode(temp, &e.p.x.x)
temp.Marshal(ret)
montDecode(temp, &e.p.x.y)
temp.Marshal(ret[numBytes:])
montDecode(temp, &e.p.y.x)
temp.Marshal(ret[2*numBytes:])
montDecode(temp, &e.p.y.y)
temp.Marshal(ret[3*numBytes:])
return ret
}
// Unmarshal sets e to the result of converting the output of Marshal back into
// a group element and then returns e.
func (e *G2) Unmarshal(m []byte) ([]byte, error) {
// Each value is a 256-bit number.
const numBytes = 256 / 8
if len(m) < 4*numBytes {
return nil, errors.New("bn256: not enough data")
}
// Unmarshal the points and check their caps
if e.p == nil {
e.p = &twistPoint{}
}
var err error
if err = e.p.x.x.Unmarshal(m); err != nil {
return nil, err
}
if err = e.p.x.y.Unmarshal(m[numBytes:]); err != nil {
return nil, err
}
if err = e.p.y.x.Unmarshal(m[2*numBytes:]); err != nil {
return nil, err
}
if err = e.p.y.y.Unmarshal(m[3*numBytes:]); err != nil {
return nil, err
}
// Encode into Montgomery form and ensure it's on the curve
montEncode(&e.p.x.x, &e.p.x.x)
montEncode(&e.p.x.y, &e.p.x.y)
montEncode(&e.p.y.x, &e.p.y.x)
montEncode(&e.p.y.y, &e.p.y.y)
if e.p.x.IsZero() && e.p.y.IsZero() {
// This is the point at infinity.
e.p.y.SetOne()
e.p.z.SetZero()
e.p.t.SetZero()
} else {
e.p.z.SetOne()
e.p.t.SetOne()
if !e.p.IsOnCurve() {
return nil, errors.New("bn256: malformed point")
}
}
return m[4*numBytes:], nil
}
// GT is an abstract cyclic group. The zero value is suitable for use as the
// output of an operation, but cannot be used as an input.
type GT struct {
p *gfP12
}
// Pair calculates an Optimal Ate pairing.
func Pair(g1 *G1, g2 *G2) *GT {
return &GT{optimalAte(g2.p, g1.p)}
}
// PairingCheck calculates the Optimal Ate pairing for a set of points.
func PairingCheck(a []*G1, b []*G2) bool {
acc := new(gfP12)
acc.SetOne()
for i := 0; i < len(a); i++ {
if a[i].p.IsInfinity() || b[i].p.IsInfinity() {
continue
}
acc.Mul(acc, miller(b[i].p, a[i].p))
}
return finalExponentiation(acc).IsOne()
}
// Miller applies Miller's algorithm, which is a bilinear function from the
// source groups to F_p^12. Miller(g1, g2).Finalize() is equivalent to Pair(g1,
// g2).
func Miller(g1 *G1, g2 *G2) *GT {
return &GT{miller(g2.p, g1.p)}
}
func (e *GT) String() string {
return "bn256.GT" + e.p.String()
}
// ScalarMult sets e to a*k and then returns e.
func (e *GT) ScalarMult(a *GT, k *big.Int) *GT {
if e.p == nil {
e.p = &gfP12{}
}
e.p.Exp(a.p, k)
return e
}
// Add sets e to a+b and then returns e.
func (e *GT) Add(a, b *GT) *GT {
if e.p == nil {
e.p = &gfP12{}
}
e.p.Mul(a.p, b.p)
return e
}
// Neg sets e to -a and then returns e.
func (e *GT) Neg(a *GT) *GT {
if e.p == nil {
e.p = &gfP12{}
}
e.p.Conjugate(a.p)
return e
}
// Set sets e to a and then returns e.
func (e *GT) Set(a *GT) *GT {
if e.p == nil {
e.p = &gfP12{}
}
e.p.Set(a.p)
return e
}
// Finalize is a linear function from F_p^12 to GT.
func (e *GT) Finalize() *GT {
ret := finalExponentiation(e.p)
e.p.Set(ret)
return e
}
// Marshal converts e into a byte slice.
func (e *GT) Marshal() []byte {
// Each value is a 256-bit number.
const numBytes = 256 / 8
if e.p == nil {
e.p = &gfP12{}
e.p.SetOne()
}
ret := make([]byte, numBytes*12)
temp := &gfP{}
montDecode(temp, &e.p.x.x.x)
temp.Marshal(ret)
montDecode(temp, &e.p.x.x.y)
temp.Marshal(ret[numBytes:])
montDecode(temp, &e.p.x.y.x)
temp.Marshal(ret[2*numBytes:])
montDecode(temp, &e.p.x.y.y)
temp.Marshal(ret[3*numBytes:])
montDecode(temp, &e.p.x.z.x)
temp.Marshal(ret[4*numBytes:])
montDecode(temp, &e.p.x.z.y)
temp.Marshal(ret[5*numBytes:])
montDecode(temp, &e.p.y.x.x)
temp.Marshal(ret[6*numBytes:])
montDecode(temp, &e.p.y.x.y)
temp.Marshal(ret[7*numBytes:])
montDecode(temp, &e.p.y.y.x)
temp.Marshal(ret[8*numBytes:])
montDecode(temp, &e.p.y.y.y)
temp.Marshal(ret[9*numBytes:])
montDecode(temp, &e.p.y.z.x)
temp.Marshal(ret[10*numBytes:])
montDecode(temp, &e.p.y.z.y)
temp.Marshal(ret[11*numBytes:])
return ret
}
// Unmarshal sets e to the result of converting the output of Marshal back into
// a group element and then returns e.
func (e *GT) Unmarshal(m []byte) ([]byte, error) {
// Each value is a 256-bit number.
const numBytes = 256 / 8
if len(m) < 12*numBytes {
return nil, errors.New("bn256: not enough data")
}
if e.p == nil {
e.p = &gfP12{}
}
var err error
if err = e.p.x.x.x.Unmarshal(m); err != nil {
return nil, err
}
if err = e.p.x.x.y.Unmarshal(m[numBytes:]); err != nil {
return nil, err
}
if err = e.p.x.y.x.Unmarshal(m[2*numBytes:]); err != nil {
return nil, err
}
if err = e.p.x.y.y.Unmarshal(m[3*numBytes:]); err != nil {
return nil, err
}
if err = e.p.x.z.x.Unmarshal(m[4*numBytes:]); err != nil {
return nil, err
}
if err = e.p.x.z.y.Unmarshal(m[5*numBytes:]); err != nil {
return nil, err
}
if err = e.p.y.x.x.Unmarshal(m[6*numBytes:]); err != nil {
return nil, err
}
if err = e.p.y.x.y.Unmarshal(m[7*numBytes:]); err != nil {
return nil, err
}
if err = e.p.y.y.x.Unmarshal(m[8*numBytes:]); err != nil {
return nil, err
}
if err = e.p.y.y.y.Unmarshal(m[9*numBytes:]); err != nil {
return nil, err
}
if err = e.p.y.z.x.Unmarshal(m[10*numBytes:]); err != nil {
return nil, err
}
if err = e.p.y.z.y.Unmarshal(m[11*numBytes:]); err != nil {
return nil, err
}
montEncode(&e.p.x.x.x, &e.p.x.x.x)
montEncode(&e.p.x.x.y, &e.p.x.x.y)
montEncode(&e.p.x.y.x, &e.p.x.y.x)
montEncode(&e.p.x.y.y, &e.p.x.y.y)
montEncode(&e.p.x.z.x, &e.p.x.z.x)
montEncode(&e.p.x.z.y, &e.p.x.z.y)
montEncode(&e.p.y.x.x, &e.p.y.x.x)
montEncode(&e.p.y.x.y, &e.p.y.x.y)
montEncode(&e.p.y.y.x, &e.p.y.y.x)
montEncode(&e.p.y.y.y, &e.p.y.y.y)
montEncode(&e.p.y.z.x, &e.p.y.z.x)
montEncode(&e.p.y.z.y, &e.p.y.z.y)
return m[12*numBytes:], nil
}

View File

@ -0,0 +1,116 @@
package bn256
import (
"bytes"
"crypto/rand"
"testing"
)
func TestG1Marshal(t *testing.T) {
_, Ga, err := RandomG1(rand.Reader)
if err != nil {
t.Fatal(err)
}
ma := Ga.Marshal()
Gb := new(G1)
_, err = Gb.Unmarshal(ma)
if err != nil {
t.Fatal(err)
}
mb := Gb.Marshal()
if !bytes.Equal(ma, mb) {
t.Fatal("bytes are different")
}
}
func TestG2Marshal(t *testing.T) {
_, Ga, err := RandomG2(rand.Reader)
if err != nil {
t.Fatal(err)
}
ma := Ga.Marshal()
Gb := new(G2)
_, err = Gb.Unmarshal(ma)
if err != nil {
t.Fatal(err)
}
mb := Gb.Marshal()
if !bytes.Equal(ma, mb) {
t.Fatal("bytes are different")
}
}
func TestBilinearity(t *testing.T) {
for i := 0; i < 2; i++ {
a, p1, _ := RandomG1(rand.Reader)
b, p2, _ := RandomG2(rand.Reader)
e1 := Pair(p1, p2)
e2 := Pair(&G1{curveGen}, &G2{twistGen})
e2.ScalarMult(e2, a)
e2.ScalarMult(e2, b)
if *e1.p != *e2.p {
t.Fatalf("bad pairing result: %s", e1)
}
}
}
func TestTripartiteDiffieHellman(t *testing.T) {
a, _ := rand.Int(rand.Reader, Order)
b, _ := rand.Int(rand.Reader, Order)
c, _ := rand.Int(rand.Reader, Order)
pa, pb, pc := new(G1), new(G1), new(G1)
qa, qb, qc := new(G2), new(G2), new(G2)
pa.Unmarshal(new(G1).ScalarBaseMult(a).Marshal())
qa.Unmarshal(new(G2).ScalarBaseMult(a).Marshal())
pb.Unmarshal(new(G1).ScalarBaseMult(b).Marshal())
qb.Unmarshal(new(G2).ScalarBaseMult(b).Marshal())
pc.Unmarshal(new(G1).ScalarBaseMult(c).Marshal())
qc.Unmarshal(new(G2).ScalarBaseMult(c).Marshal())
k1 := Pair(pb, qc)
k1.ScalarMult(k1, a)
k1Bytes := k1.Marshal()
k2 := Pair(pc, qa)
k2.ScalarMult(k2, b)
k2Bytes := k2.Marshal()
k3 := Pair(pa, qb)
k3.ScalarMult(k3, c)
k3Bytes := k3.Marshal()
if !bytes.Equal(k1Bytes, k2Bytes) || !bytes.Equal(k2Bytes, k3Bytes) {
t.Errorf("keys didn't agree")
}
}
func BenchmarkG1(b *testing.B) {
x, _ := rand.Int(rand.Reader, Order)
b.ResetTimer()
for i := 0; i < b.N; i++ {
new(G1).ScalarBaseMult(x)
}
}
func BenchmarkG2(b *testing.B) {
x, _ := rand.Int(rand.Reader, Order)
b.ResetTimer()
for i := 0; i < b.N; i++ {
new(G2).ScalarBaseMult(x)
}
}
func BenchmarkPairing(b *testing.B) {
for i := 0; i < b.N; i++ {
Pair(&G1{curveGen}, &G2{twistGen})
}
}

View File

@ -0,0 +1,79 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package bn256
import (
"math/big"
)
func bigFromBase10(s string) *big.Int {
n, _ := new(big.Int).SetString(s, 10)
return n
}
// u is the BN parameter.
var u = bigFromBase10("4965661367192848881")
// Order is the number of elements in both G₁ and G₂: 36u⁴+36u³+18u²+6u+1.
var Order = bigFromBase10("21888242871839275222246405745257275088548364400416034343698204186575808495617")
// P is a prime over which we form a basic field: 36u⁴+36u³+24u²+6u+1.
var P = bigFromBase10("21888242871839275222246405745257275088696311157297823662689037894645226208583")
// p2 is p, represented as little-endian 64-bit words.
var p2 = [4]uint64{0x3c208c16d87cfd47, 0x97816a916871ca8d, 0xb85045b68181585d, 0x30644e72e131a029}
// np is the negative inverse of p, mod 2^256.
var np = [4]uint64{0x87d20782e4866389, 0x9ede7d651eca6ac9, 0xd8afcbd01833da80, 0xf57a22b791888c6b}
// <sage>
// p = 21888242871839275222246405745257275088696311157297823662689037894645226208583; Fp = GF(p)
// r = Fp(2^256) # 6350874878119819312338956282401532409788428879151445726012394534686998597021
// rInv = 1/r # 20988524275117001072002809824448087578619730785600314334253784976379291040311
// hex(20988524275117001072002809824448087578619730785600314334253784976379291040311)
// # 2e67157159e5c639 cf63e9cfb74492d9 eb2022850278edf8 ed84884a014afa37
// <\sage>
//
// rN1 is R^-1 where R = 2^256 mod p.
var rN1 = &gfP{0xed84884a014afa37, 0xeb2022850278edf8, 0xcf63e9cfb74492d9, 0x2e67157159e5c639}
// <sage>
// r2 = r^2 # 3096616502983703923843567936837374451735540968419076528771170197431451843209
// hex(3096616502983703923843567936837374451735540968419076528771170197431451843209)
// # 06d89f71cab8351f 47ab1eff0a417ff6 b5e71911d44501fb f32cfc5b538afa89
// <\sage>
//
// r2 is R^2 where R = 2^256 mod p.
var r2 = &gfP{0xf32cfc5b538afa89, 0xb5e71911d44501fb, 0x47ab1eff0a417ff6, 0x06d89f71cab8351f}
// r3 is R^3 where R = 2^256 mod p.
var r3 = &gfP{0xb1cd6dafda1530df, 0x62f210e6a7283db6, 0xef7f0b0c0ada0afb, 0x20fd6e902d592544}
// <sage>
// xiToPMinus1Over6 = Fp2(i + 9) ^ ((p-1)/6); xiToPMinus1Over6
// # 16469823323077808223889137241176536799009286646108169935659301613961712198316*i + 8376118865763821496583973867626364092589906065868298776909617916018768340080
// <\sage>
//
// The value of `xiToPMinus1Over6` below is the same as the one obtained in sage, but where every field element is montgomery encoded
// xiToPMinus1Over6 is ξ^((p-1)/6) where ξ = i+9.
var xiToPMinus1Over6 = &gfP2{gfP{0xa222ae234c492d72, 0xd00f02a4565de15b, 0xdc2ff3a253dfc926, 0x10a75716b3899551}, gfP{0xaf9ba69633144907, 0xca6b1d7387afb78a, 0x11bded5ef08a2087, 0x02f34d751a1f3a7c}}
// xiToPMinus1Over3 is ξ^((p-1)/3) where ξ = i+9.
var xiToPMinus1Over3 = &gfP2{gfP{0x6e849f1ea0aa4757, 0xaa1c7b6d89f89141, 0xb6e713cdfae0ca3a, 0x26694fbb4e82ebc3}, gfP{0xb5773b104563ab30, 0x347f91c8a9aa6454, 0x7a007127242e0991, 0x1956bcd8118214ec}}
// xiToPMinus1Over2 is ξ^((p-1)/2) where ξ = i+9.
var xiToPMinus1Over2 = &gfP2{gfP{0xa1d77ce45ffe77c7, 0x07affd117826d1db, 0x6d16bd27bb7edc6b, 0x2c87200285defecc}, gfP{0xe4bbdd0c2936b629, 0xbb30f162e133bacb, 0x31a9d1b6f9645366, 0x253570bea500f8dd}}
// xiToPSquaredMinus1Over3 is ξ^((p²-1)/3) where ξ = i+9.
var xiToPSquaredMinus1Over3 = &gfP{0x3350c88e13e80b9c, 0x7dce557cdb5e56b9, 0x6001b4b8b615564a, 0x2682e617020217e0}
// xiTo2PSquaredMinus2Over3 is ξ^((2p²-2)/3) where ξ = i+9 (a cubic root of unity, mod p).
var xiTo2PSquaredMinus2Over3 = &gfP{0x71930c11d782e155, 0xa6bb947cffbe3323, 0xaa303344d4741444, 0x2c3b3f0d26594943}
// xiToPSquaredMinus1Over6 is ξ^((1p²-1)/6) where ξ = i+9 (a cubic root of -1, mod p).
var xiToPSquaredMinus1Over6 = &gfP{0xca8d800500fa1bf2, 0xf0c5d61468b39769, 0x0e201271ad0d4418, 0x04290f65bad856e6}
// xiTo2PMinus2Over3 is ξ^((2p-2)/3) where ξ = i+9.
var xiTo2PMinus2Over3 = &gfP2{gfP{0x5dddfd154bd8c949, 0x62cb29a5a4445b60, 0x37bc870a0c7dd2b9, 0x24830a9d3171f0fd}, gfP{0x7361d77f843abe92, 0xa5bb2bd3273411fb, 0x9c941f314b3e2399, 0x15df9cddbb9fd3ec}}

318
cryptography/bn256/curve.go Normal file
View File

@ -0,0 +1,318 @@
package bn256
import (
"math/big"
)
// curvePoint implements the elliptic curve y²=x³+3. Points are kept in Jacobian
// form and t=z² when valid. G₁ is the set of points of this curve on GF(p).
type curvePoint struct {
x, y, z, t gfP
}
var curveB = newGFp(3)
// curveGen is the generator of G₁.
var curveGen = &curvePoint{
x: *newGFp(1),
y: *newGFp(2),
z: *newGFp(1),
t: *newGFp(1),
}
func (c *curvePoint) String() string {
c.MakeAffine()
x, y := &gfP{}, &gfP{}
montDecode(x, &c.x)
montDecode(y, &c.y)
return "(" + x.String() + ", " + y.String() + ")"
}
func (c *curvePoint) Set(a *curvePoint) {
c.x.Set(&a.x)
c.y.Set(&a.y)
c.z.Set(&a.z)
c.t.Set(&a.t)
}
// IsOnCurve returns true iff c is on the curve.
func (c *curvePoint) IsOnCurve() bool {
c.MakeAffine()
if c.IsInfinity() {
return true
}
y2, x3 := &gfP{}, &gfP{}
gfpMul(y2, &c.y, &c.y)
gfpMul(x3, &c.x, &c.x)
gfpMul(x3, x3, &c.x)
gfpAdd(x3, x3, curveB)
return *y2 == *x3
}
func (c *curvePoint) SetInfinity() {
c.x = gfP{0}
c.y = *newGFp(1)
c.z = gfP{0}
c.t = gfP{0}
}
func (c *curvePoint) IsInfinity() bool {
return c.z == gfP{0}
}
func (c *curvePoint) Add(a, b *curvePoint) {
if a.IsInfinity() {
c.Set(b)
return
}
if b.IsInfinity() {
c.Set(a)
return
}
// See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/addition/add-2007-bl.op3
// Normalize the points by replacing a = [x1:y1:z1] and b = [x2:y2:z2]
// by [u1:s1:z1·z2] and [u2:s2:z1·z2]
// where u1 = x1·z2², s1 = y1·z2³ and u1 = x2·z1², s2 = y2·z1³
z12, z22 := &gfP{}, &gfP{}
gfpMul(z12, &a.z, &a.z)
gfpMul(z22, &b.z, &b.z)
u1, u2 := &gfP{}, &gfP{}
gfpMul(u1, &a.x, z22)
gfpMul(u2, &b.x, z12)
t, s1 := &gfP{}, &gfP{}
gfpMul(t, &b.z, z22)
gfpMul(s1, &a.y, t)
s2 := &gfP{}
gfpMul(t, &a.z, z12)
gfpMul(s2, &b.y, t)
// Compute x = (2h)²(s²-u1-u2)
// where s = (s2-s1)/(u2-u1) is the slope of the line through
// (u1,s1) and (u2,s2). The extra factor 2h = 2(u2-u1) comes from the value of z below.
// This is also:
// 4(s2-s1)² - 4h²(u1+u2) = 4(s2-s1)² - 4h³ - 4h²(2u1)
// = r² - j - 2v
// with the notations below.
h := &gfP{}
gfpSub(h, u2, u1)
xEqual := *h == gfP{0}
gfpAdd(t, h, h)
// i = 4h²
i := &gfP{}
gfpMul(i, t, t)
// j = 4h³
j := &gfP{}
gfpMul(j, h, i)
gfpSub(t, s2, s1)
yEqual := *t == gfP{0}
if xEqual && yEqual {
c.Double(a)
return
}
r := &gfP{}
gfpAdd(r, t, t)
v := &gfP{}
gfpMul(v, u1, i)
// t4 = 4(s2-s1)²
t4, t6 := &gfP{}, &gfP{}
gfpMul(t4, r, r)
gfpAdd(t, v, v)
gfpSub(t6, t4, j)
gfpSub(&c.x, t6, t)
// Set y = -(2h)³(s1 + s*(x/4h²-u1))
// This is also
// y = - 2·s1·j - (s2-s1)(2x - 2i·u1) = r(v-x) - 2·s1·j
gfpSub(t, v, &c.x) // t7
gfpMul(t4, s1, j) // t8
gfpAdd(t6, t4, t4) // t9
gfpMul(t4, r, t) // t10
gfpSub(&c.y, t4, t6)
// Set z = 2(u2-u1)·z1·z2 = 2h·z1·z2
gfpAdd(t, &a.z, &b.z) // t11
gfpMul(t4, t, t) // t12
gfpSub(t, t4, z12) // t13
gfpSub(t4, t, z22) // t14
gfpMul(&c.z, t4, h)
}
func (c *curvePoint) Double(a *curvePoint) {
// See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/doubling/dbl-2009-l.op3
A, B, C := &gfP{}, &gfP{}, &gfP{}
gfpMul(A, &a.x, &a.x)
gfpMul(B, &a.y, &a.y)
gfpMul(C, B, B)
t, t2 := &gfP{}, &gfP{}
gfpAdd(t, &a.x, B)
gfpMul(t2, t, t)
gfpSub(t, t2, A)
gfpSub(t2, t, C)
d, e, f := &gfP{}, &gfP{}, &gfP{}
gfpAdd(d, t2, t2)
gfpAdd(t, A, A)
gfpAdd(e, t, A)
gfpMul(f, e, e)
gfpAdd(t, d, d)
gfpSub(&c.x, f, t)
gfpAdd(t, C, C)
gfpAdd(t2, t, t)
gfpAdd(t, t2, t2)
gfpSub(&c.y, d, &c.x)
gfpMul(t2, e, &c.y)
gfpSub(&c.y, t2, t)
gfpMul(t, &a.y, &a.z)
gfpAdd(&c.z, t, t)
}
func (c *curvePoint) Mul(a *curvePoint, scalar *big.Int) {
precomp := [1 << 2]*curvePoint{nil, {}, {}, {}}
precomp[1].Set(a)
precomp[2].Set(a)
gfpMul(&precomp[2].x, &precomp[2].x, xiTo2PSquaredMinus2Over3)
precomp[3].Add(precomp[1], precomp[2])
multiScalar := curveLattice.Multi(scalar)
sum := &curvePoint{}
sum.SetInfinity()
t := &curvePoint{}
for i := len(multiScalar) - 1; i >= 0; i-- {
t.Double(sum)
if multiScalar[i] == 0 {
sum.Set(t)
} else {
sum.Add(t, precomp[multiScalar[i]])
}
}
c.Set(sum)
}
// Transforms Jacobian coordinates to Affine coordinates
// (X' : Y' : Z) -> (X'/(Z^2) : Y'/(Z^3) : 1)
func (c *curvePoint) MakeAffine() {
// point0 := *newGFp(0)
// point1 := *newGFp(1)
if c.z == point1 {
return
} else if c.z == point0 { // return point at infinity if z = 0
c.x = gfP{0}
c.y = point1
c.t = gfP{0}
return
}
zInv := &gfP{}
zInv.Invert(&c.z)
t, zInv2 := &gfP{}, &gfP{}
gfpMul(t, &c.y, zInv) // t = y/z
gfpMul(zInv2, zInv, zInv) // zInv2 = 1/(z^2)
gfpMul(&c.x, &c.x, zInv2) // x = x/(z^2)
gfpMul(&c.y, t, zInv2) // y = y/(z^3)
c.z = point1
c.t = point1
}
func (c *curvePoint) Neg(a *curvePoint) {
c.x.Set(&a.x)
gfpNeg(&c.y, &a.y)
c.z.Set(&a.z)
c.t = gfP{0}
}
var point0 = *newGFp(0)
var point1 = *newGFp(1)
// this will do batch inversions and thus optimize lookup table generation
// Montgomery Batch Inversion based trick
type G1Array []*G1
func (points G1Array) MakeAffine() {
// point0 := *newGFp(0)
// point1 := *newGFp(1)
accum := newGFp(1)
var scratch_backup [256]gfP
var scratch []gfP
if len(points) <= 256 {
scratch = scratch_backup[:0] // avoid allocation is possible
}
for _, e := range points {
if e.p == nil {
e.p = &curvePoint{}
}
scratch = append(scratch, *accum)
if e.p.z == point1 {
continue
} else if e.p.z == point0 { // return point at infinity if z = 0
e.p.x = gfP{0}
e.p.y = point1
e.p.t = gfP{0}
continue
}
gfpMul(accum, accum, &e.p.z) // accum *= z
/*
zInv := &gfP{}
zInv.Invert(&e.p.z)
fmt.Printf("%d inv %s\n",i, zInv)
*/
}
zInv_accum := gfP{}
zInv_accum.Invert(accum)
tmp := gfP{}
zInv := &gfP{}
for i := len(points) - 1; i >= 0; i-- {
e := points[i]
if e.p.z == point1 {
continue
} else if e.p.z == point0 { // return point at infinity if z = 0
continue
}
tmp = gfP{}
gfpMul(&tmp, &zInv_accum, &e.p.z)
gfpMul(zInv, &zInv_accum, &scratch[i])
zInv_accum = tmp
// fmt.Printf("%d inv %s\n",i, zInv)
t, zInv2 := &gfP{}, &gfP{}
gfpMul(t, &e.p.y, zInv) // t = y/z
gfpMul(zInv2, zInv, zInv) // zInv2 = 1/(z^2)
gfpMul(&e.p.x, &e.p.x, zInv2) // x = x/(z^2)
gfpMul(&e.p.y, t, zInv2) // y = y/(z^3)
e.p.z = point1
e.p.t = point1
}
}

View File

@ -0,0 +1,66 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package bn256
import (
"crypto/rand"
"testing"
"github.com/stretchr/testify/require"
)
func TestG1Array(t *testing.T) {
count := 8
var g1array G1Array
var g1array_opt G1Array
for i := 0; i < count; i++ {
a, _ := rand.Int(rand.Reader, Order)
g1array = append(g1array, new(G1).ScalarBaseMult(a))
g1array_opt = append(g1array_opt, new(G1).ScalarBaseMult(a))
}
g1array_opt.MakeAffine()
for i := range g1array_opt {
require.Equal(t, g1array_opt[i].p.z, *newGFp(1)) // current we are not testing points of infinity
}
}
func benchmarksingleinverts(count int, b *testing.B) {
var g1array, g1backup G1Array
for i := 0; i < count; i++ {
a, _ := rand.Int(rand.Reader, Order)
g1backup = append(g1backup, new(G1).ScalarBaseMult(a))
}
for n := 0; n < b.N; n++ {
g1array = g1array[:0]
for i := range g1backup {
g1array = append(g1array, new(G1).Set(g1backup[i]))
g1array[i].p.MakeAffine()
}
}
}
func benchmarkbatchedinverts(count int, b *testing.B) {
var g1array, g1backup G1Array
for i := 0; i < count; i++ {
a, _ := rand.Int(rand.Reader, Order)
g1backup = append(g1backup, new(G1).ScalarBaseMult(a))
}
for n := 0; n < b.N; n++ {
g1array = g1array[:0]
for i := range g1backup {
g1array = append(g1array, new(G1).Set(g1backup[i]))
}
g1array.MakeAffine()
}
}
func BenchmarkInverts_Single_256(b *testing.B) { benchmarksingleinverts(256, b) }
func BenchmarkInverts_Batched_256(b *testing.B) { benchmarkbatchedinverts(256, b) }

View File

@ -0,0 +1,51 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package bn256
import (
"crypto/rand"
"testing"
"github.com/stretchr/testify/require"
)
func TestExamplePair(t *testing.T) {
// This implements the tripartite Diffie-Hellman algorithm from "A One
// Round Protocol for Tripartite Diffie-Hellman", A. Joux.
// http://www.springerlink.com/content/cddc57yyva0hburb/fulltext.pdf
// Each of three parties, a, b and c, generate a private value.
a, _ := rand.Int(rand.Reader, Order)
b, _ := rand.Int(rand.Reader, Order)
c, _ := rand.Int(rand.Reader, Order)
// Then each party calculates g₁ and g₂ times their private value.
pa := new(G1).ScalarBaseMult(a)
qa := new(G2).ScalarBaseMult(a)
pb := new(G1).ScalarBaseMult(b)
qb := new(G2).ScalarBaseMult(b)
pc := new(G1).ScalarBaseMult(c)
qc := new(G2).ScalarBaseMult(c)
// Now each party exchanges its public values with the other two and
// all parties can calculate the shared key.
k1 := Pair(pb, qc)
k1.ScalarMult(k1, a)
k2 := Pair(pc, qa)
k2.ScalarMult(k2, b)
k3 := Pair(pa, qb)
k3.ScalarMult(k3, c)
// k1, k2 and k3 will all be equal.
require.Equal(t, k1, k2)
require.Equal(t, k1, k3)
require.Equal(t, len(np), 4) //Avoid gometalinter varcheck err on np
}

View File

@ -0,0 +1,424 @@
// Package bn256 implements a particular bilinear group at the 128-bit security
// level.
//
// Bilinear groups are the basis of many of the new cryptographic protocols that
// have been proposed over the past decade. They consist of a triplet of groups
// (G₁, G₂ and GT) such that there exists a function e(g₁ˣ,g₂ʸ)=gTˣʸ (where gₓ
// is a generator of the respective group). That function is called a pairing
// function.
//
// This package specifically implements the Optimal Ate pairing over a 256-bit
// Barreto-Naehrig curve as described in
// http://cryptojedi.org/papers/dclxvi-20100714.pdf. Its output is compatible
// with the implementation described in that paper.
package bn256
// This file implement some util functions for the MPC
// especially the serialization and deserialization functions for points in G1
import (
"errors"
"math/big"
)
// Constants related to the bn256 pairing friendly curve
const (
FqElementSize = 32
G1CompressedSize = FqElementSize + 1 // + 1 accounts for the additional byte used for masking
G1UncompressedSize = 2*FqElementSize + 1 // + 1 accounts for the additional byte used for masking
)
// https://github.com/ebfull/pairing/tree/master/src/bls12_381#serialization
// Bytes used to detect the formatting. By reading the first byte of the encoded point we can know it's nature
// ie: we can know if the point is the point at infinity, if it is encoded uncompressed or if it is encoded compressed
// Bit masking used to detect the serialization of the points and their nature
//
// The BSL12-381 curve is built over a 381-bit prime field.
// Thus each point coordinate is represented over 381 bits = 47bytes + 5bits
// Thus, to represent a point we need to have 48bytes, but the last 3 bits of the 48th byte will be set to 0
// These are these bits that are used to implement the masking, hence why the masking proposed by ebfull was:
const (
serializationMask = (1 << 5) - 1 // 0001 1111 // Enable to pick the 3 MSB corresponding to the serialization flag
serializationCompressed = 1 << 7 // 1000 0000
serializationInfinity = 1 << 6 // 0100 0000
serializationBigY = 1 << 5 // 0010 0000
)
// IsHigherY is used to distinguish between the 2 points of E
// that have the same x-coordinate
// The point e is assumed to be given in the affine form
func (e *G1) IsHigherY() bool {
// Check nil pointers
if e.p == nil {
e.p = &curvePoint{}
}
var yCoord gfP
//yCoord.Set(&e.p.y)
yCoord = e.p.y
var yCoordNeg gfP
gfpNeg(&yCoordNeg, &yCoord)
res := gfpCmp(&yCoord, &yCoordNeg)
if res == 1 { // yCoord > yCoordNeg
return true
} else if res == -1 {
return false
}
return false
}
// EncodeCompressed converts the compressed point e into bytes
// This function takes a point in the Jacobian form
// This function does not modify the point e
// (the variable `temp` is introduced to avoid to modify e)
func (e *G1) EncodeCompressed() []byte {
// Check nil pointers
if e.p == nil {
e.p = &curvePoint{}
}
e.p.MakeAffine()
ret := make([]byte, G1CompressedSize)
// Flag the encoding with the compressed flag
ret[0] |= serializationCompressed
if e.p.IsInfinity() {
// Flag the encoding with the infinity flag
ret[0] |= serializationInfinity
return ret
}
if e.IsHigherY() {
// Flag the encoding with the bigY flag
ret[0] |= serializationBigY
}
// We start the serializagtion of the coordinates at the index 1
// Since the index 0 in the `ret` corresponds to the masking
temp := &gfP{}
montDecode(temp, &e.p.x)
temp.Marshal(ret[1:])
return ret
}
// returns to buffer rather than allocation from GC
func (e *G1) EncodeCompressedToBuf(ret []byte) {
// Check nil pointers
if e.p == nil {
e.p = &curvePoint{}
}
e.p.MakeAffine()
//ret := make([]byte, G1CompressedSize)
// Flag the encoding with the compressed flag
ret[0] |= serializationCompressed
if e.p.IsInfinity() {
// Flag the encoding with the infinity flag
ret[0] |= serializationInfinity
return
}
if e.IsHigherY() {
// Flag the encoding with the bigY flag
ret[0] |= serializationBigY
}
// We start the serializagtion of the coordinates at the index 1
// Since the index 0 in the `ret` corresponds to the masking
temp := &gfP{}
montDecode(temp, &e.p.x)
temp.Marshal(ret[1:])
return
}
// EncodeUncompressed converts the compressed point e into bytes
// Take a point P in Jacobian form (where each coordinate is MontEncoded)
// and encodes it by going back to affine coordinates and montDecode all coordinates
// This function does not modify the point e
// (the variable `temp` is introduced to avoid to modify e)
/*
func (e *G1) EncodeUncompressed() []byte {
// Check nil pointers
if e.p == nil {
e.p = &curvePoint{}
}
e.p.MakeAffine()
ret := make([]byte, G1UncompressedSize)
if e.p.IsInfinity() {
// Flag the encoding with the infinity flag
ret[0] |= serializationInfinity
return ret
}
// We start the serialization of the coordinates at the index 1
// Since the index 0 in the `ret` corresponds to the masking
temp := &gfP{}
montDecode(temp, &e.p.x) // Store the montgomery decoding in temp
temp.Marshal(ret[1:33]) // Write temp in the `ret` slice, this is the x-coordinate
montDecode(temp, &e.p.y)
temp.Marshal(ret[33:]) // this is the y-coordinate
return ret
}
*/
func (e *G1) EncodeUncompressed() []byte {
// Check nil pointers
if e.p == nil {
e.p = &curvePoint{}
}
// Set the right flags
ret := make([]byte, G1UncompressedSize)
if e.p.IsInfinity() {
// Flag the encoding with the infinity flag
ret[0] |= serializationInfinity
return ret
}
// Marshal
marshal := e.Marshal()
// The encoding = flags || marshalledPoint
copy(ret[1:], marshal)
return ret
}
// Takes a MontEncoded x and finds the corresponding y (one of the two possible y's)
func getYFromMontEncodedX(x *gfP) (*gfP, error) {
// Check nil pointers
if x == nil {
return nil, errors.New("Cannot retrieve the y-coordinate form a nil pointer")
}
// Operations on montgomery encoded field elements
x2 := &gfP{}
gfpMul(x2, x, x)
x3 := &gfP{}
gfpMul(x3, x2, x)
rhs := &gfP{}
gfpAdd(rhs, x3, curveB) // curveB is MontEncoded, since it is create with newGFp
// Montgomery decode rhs
// Needed because when we create a GFp element
// with gfP{}, then it is not montEncoded. However
// if we create an element of GFp by using `newGFp()`
// then this field element is Montgomery encoded
// Above, we have been working on Montgomery encoded field elements
// here we solve the quad. resid. over F (not encoded)
// and then we encode back and return the encoded result
//
// Eg:
// - Px := &gfP{1} => 0000000000000000000000000000000000000000000000000000000000000001
// - PxNew := newGFp(1) => 0e0a77c19a07df2f666ea36f7879462c0a78eb28f5c70b3dd35d438dc58f0d9d
montDecode(rhs, rhs)
rhsBig, err := rhs.gFpToBigInt()
if err != nil {
return nil, err
}
// Note, if we use the ModSqrt method, we don't need the exponent, so we can comment these lines
yCoord := big.NewInt(0)
res := yCoord.ModSqrt(rhsBig, P)
if res == nil {
return nil, errors.New("not a square mod P")
}
yCoordGFp := newGFpFromBigInt(yCoord)
montEncode(yCoordGFp, yCoordGFp)
return yCoordGFp, nil
}
// DecodeCompressed decodes a point in the compressed form
// Take a point P encoded (ie: written in affine form where each coordinate is MontDecoded)
// and encodes it by going back to Jacobian coordinates and montEncode all coordinates
func (e *G1) DecodeCompressed(encoding []byte) error {
if len(encoding) != G1CompressedSize {
return errors.New("wrong encoded point size")
}
if encoding[0]&serializationCompressed == 0 { // Also test the length of the encoding to make sure it is 33bytes
return errors.New("point isn't compressed")
}
// Unmarshal the points and check their caps
if e.p == nil {
e.p = &curvePoint{}
}
{
e.p.x, e.p.y = gfP{0}, gfP{0}
e.p.z, e.p.t = *newGFp(1), *newGFp(1)
}
// Removes the bits of the masking (This does a bitwise AND with `0001 1111`)
// And thus removes the first 3 bits corresponding to the masking
bin := make([]byte, G1CompressedSize)
copy(bin, encoding)
bin[0] &= serializationMask
// Decode the point at infinity in the compressed form
if encoding[0]&serializationInfinity != 0 {
if encoding[0]&serializationBigY != 0 {
return errors.New("high Y bit improperly set")
}
// Similar to `for i:=0; i<len(bin); i++ {}`
for i := range bin {
// Makes sense to check that all bytes of bin are 0x0 since we removed the masking above
if bin[i] != 0 {
return errors.New("invalid infinity encoding")
}
}
e.p.SetInfinity()
//panic("point is infinity")
return nil
}
// Decompress the point P (P =/= ∞)
var err error
if err = e.p.x.Unmarshal(bin[1:]); err != nil {
return err
}
// MontEncode our field elements for fast finite field arithmetic
// Needs to be done since the z and t coordinates are also encoded (ie: created with newGFp)
montEncode(&e.p.x, &e.p.x)
y, err := getYFromMontEncodedX(&e.p.x)
if err != nil {
return err
}
e.p.y = *y
// The flag serializationBigY is set (so the point pt with the higher Y is encoded)
// but the point e retrieved from the `getYFromX` is NOT the higher, then we inverse
if !e.IsHigherY() {
if encoding[0]&serializationBigY != 0 {
e.Neg(e)
}
} else {
if encoding[0]&serializationBigY == 0 { // The point given by getYFromX is the higher but the mask is not set for higher y
e.Neg(e)
}
}
// No need to check that the point e.p is on the curve
// since we retrieved y from x by using the curve equation.
// Adding it would be redundant
return nil
}
// DecodeUncompressed decodes a point in the uncompressed form
// Take a point P encoded (ie: written in affine form where each coordinate is MontDecoded)
// and encodes it by going back to Jacobian coordinates and montEncode all coordinates
/*
func (e *G1) DecodeUncompressed(encoding []byte) error {
if len(encoding) != G1UncompressedSize {
return errors.New("wrong encoded point size")
}
if encoding[0]&serializationCompressed != 0 { // Also test the length of the encoding to make sure it is 65bytes
return errors.New("point is compressed")
}
if encoding[0]&serializationBigY != 0 { // Also test that the bigY flag if not set
return errors.New("bigY flag should not be set")
}
// Unmarshal the points and check their caps
if e.p == nil {
e.p = &curvePoint{}
} else {
e.p.x, e.p.y = gfP{0}, gfP{0}
e.p.z, e.p.t = *newGFp(1), *newGFp(1)
}
// Removes the bits of the masking (This does a bitwise AND with `0001 1111`)
// And thus removes the first 3 bits corresponding to the masking
// Useless for now because in bn256, we added a full byte to enable masking
// However, this is needed if we work over BLS12 and its underlying field
bin := make([]byte, G1UncompressedSize)
copy(bin, encoding)
bin[0] &= serializationMask
// Decode the point at infinity in the compressed form
if encoding[0]&serializationInfinity != 0 {
// Makes sense to check that all bytes of bin are 0x0 since we removed the masking above}
for i := range bin {
if bin[i] != 0 {
return errors.New("invalid infinity encoding")
}
}
e.p.SetInfinity()
return nil
}
// Decode the point P (P =/= ∞)
var err error
// Decode the x-coordinate
if err = e.p.x.Unmarshal(bin[1:33]); err != nil {
return err
}
// Decode the y-coordinate
if err = e.p.y.Unmarshal(bin[33:]); err != nil {
return err
}
// MontEncode our field elements for fast finite field arithmetic
montEncode(&e.p.x, &e.p.x)
montEncode(&e.p.y, &e.p.y)
if !e.p.IsOnCurve() {
return errors.New("malformed point: Not on the curve")
}
return nil
}
*/
func (e *G1) DecodeUncompressed(encoding []byte) error {
if len(encoding) != G1UncompressedSize {
return errors.New("wrong encoded point size")
}
if encoding[0]&serializationCompressed != 0 { // Also test the length of the encoding to make sure it is 65bytes
return errors.New("point is compressed")
}
if encoding[0]&serializationBigY != 0 { // Also test that the bigY flag if not set
return errors.New("bigY flag should not be set")
}
// Unmarshal the points and check their caps
if e.p == nil {
e.p = &curvePoint{}
}
// Removes the bits of the masking (This does a bitwise AND with `0001 1111`)
// And thus removes the first 3 bits corresponding to the masking
// Useless for now because in bn256, we added a full byte to enable masking
// However, this is needed if we work over BLS12 and its underlying field
bin := make([]byte, G1UncompressedSize)
copy(bin, encoding)
bin[0] &= serializationMask
// Decode the point at infinity in the compressed form
if encoding[0]&serializationInfinity != 0 {
// Makes sense to check that all bytes of bin are 0x0 since we removed the masking above}
for i := range bin {
if bin[i] != 0 {
return errors.New("invalid infinity encoding")
}
}
e.p.SetInfinity()
return nil
}
// We remote the flags and unmarshall the data
_, err := e.Unmarshal(encoding[1:])
return err
}

View File

@ -0,0 +1,241 @@
package bn256
import (
"crypto/rand"
"fmt"
"math/big"
"testing"
"github.com/stretchr/testify/assert"
)
func assertGFpEqual(t *testing.T, a, b *gfP) {
for i := 0; i < FpUint64Size; i++ {
assert.Equal(t, a[i], b[i], fmt.Sprintf("The %d's elements differ between the 2 field elements", i))
}
}
func TestEncodeCompressed(t *testing.T) {
// Case1: Create random point (Jacobian form)
_, GaInit, err := RandomG1(rand.Reader)
if err != nil {
t.Fatal(err)
}
// Affine form of GaInit
GaAffine := new(G1)
GaAffine.Set(GaInit)
GaAffine.p.MakeAffine()
// Encode GaCopy1 with the EncodeCompress function
GaCopy1 := new(G1)
GaCopy1.Set(GaInit)
compressed := GaCopy1.EncodeCompressed()
// Encode GaCopy2 with the Marshal function
GaCopy2 := new(G1)
GaCopy2.Set(GaInit)
marshalled := GaCopy2.Marshal() // Careful Marshal modifies the point since it makes it an affine point!
// Make sure that the x-coordinate is encoded as it is when we call the Marshal function
assert.Equal(
t,
compressed[1:], // Ignore the masking byte
marshalled[:32], // Get only the x-coordinate
"The EncodeCompressed and Marshal function yield different results for the x-coordinate")
// Unmarshal the point Ga with the unmarshal function
Gb1 := new(G1)
_, err = Gb1.Unmarshal(marshalled)
assert.Nil(t, err)
assert.Equal(t, GaAffine.p.x.String(), Gb1.p.x.String(), "The x-coord of the unmarshalled point should equal the x-coord of the intial point")
assert.Equal(t, GaAffine.p.y.String(), Gb1.p.y.String(), "The y-coord of the unmarshalled point should equal the y-coord of the intial point")
// Decode the point Ga with the decodeCompress function
Gb2 := new(G1)
err = Gb2.DecodeCompressed(compressed)
assert.Nil(t, err)
assert.Equal(t, GaAffine.p.x.String(), Gb2.p.x.String(), "The x-coord of the decompressed point should equal the x-coord of the intial point")
assert.Equal(t, GaAffine.p.y.String(), Gb2.p.y.String(), "The y-coord of the decompressed point should equal the y-coord of the intial point")
// Case2: Encode the point at infinity
GInfinity := new(G1)
GInfinity.p = &curvePoint{}
GInfinity.p.SetInfinity()
// Get the point in affine form
GInfinityAffine := new(G1)
GInfinityAffine.Set(GInfinity)
GInfinityAffine.p.MakeAffine()
// Encode GaCopy1 with the EncodeCompress function
GInfinityCopy1 := new(G1)
GInfinityCopy1.Set(GInfinity)
compressed = GInfinityCopy1.EncodeCompressed()
// Encode GaCopy2 with the Marshal function
GInfinityCopy2 := new(G1)
GInfinityCopy2.Set(GInfinity)
marshalled = GInfinityCopy2.Marshal() // Careful Marshal modifies the point since it makes it an affine point!
// Make sure that the x-coordinate is encoded as it is when we call the Marshal function
assert.Equal(
t,
compressed[1:], // Ignore the masking byte
marshalled[:32],
"The EncodeCompressed and Marshal function yield different results")
// Unmarshal the point Ga with the unmarshal function
Gb1 = new(G1)
_, err = Gb1.Unmarshal(marshalled)
assert.Nil(t, err)
assert.Equal(t, GInfinityAffine.p.x.String(), Gb1.p.x.String(), "The x-coord of the unmarshalled point should equal the x-coord of the intial point")
assert.Equal(t, GInfinityAffine.p.y.String(), Gb1.p.y.String(), "The y-coord of the unmarshalled point should equal the y-coord of the intial point")
// Decode the point Ga with the decodeCompress function
Gb2 = new(G1)
err = Gb2.DecodeCompressed(compressed)
assert.Nil(t, err)
assert.Equal(t, GInfinityAffine.p.x.String(), Gb2.p.x.String(), "The x-coord of the decompressed point should equal the x-coord of the intial point")
assert.Equal(t, GInfinityAffine.p.y.String(), Gb2.p.y.String(), "The y-coord of the decompressed point should equal the y-coord of the intial point")
}
func TestIsHigherY(t *testing.T) {
_, Ga, err := RandomG1(rand.Reader)
if err != nil {
t.Fatal(err)
}
Ga.p.MakeAffine()
GaYString := Ga.p.y.String()
GaYBig := new(big.Int)
_, ok := GaYBig.SetString(GaYString, 16)
assert.True(t, ok, "ok should be True")
GaNeg := new(G1)
GaNeg.Neg(Ga)
GaNeg.p.MakeAffine()
GaNegYString := GaNeg.p.y.String()
GaNegYBig := new(big.Int)
_, ok = GaNegYBig.SetString(GaNegYString, 16)
assert.True(t, ok, "ok should be True")
// Verify that Ga.p.y + GaNeg.p.y == 0
sumYs := &gfP{}
fieldZero := newGFp(0)
gfpAdd(sumYs, &Ga.p.y, &GaNeg.p.y)
assert.Equal(t, *sumYs, *fieldZero, "The y-coordinates of P and -P should add up to zero")
// Find which point between Ga and GaNeg is the one witht eh higher Y
res := gfpCmp(&GaNeg.p.y, &Ga.p.y)
if res > 0 { // GaNeg.p.y > Ga.p.y
assert.True(t, GaNeg.IsHigherY(), "GaNeg.IsHigherY should be true if GaNeg.p.y > Ga.p.y")
// Test the comparision of the big int also, should be the same result
assert.Equal(t, GaNegYBig.Cmp(GaYBig), 1, "GaNegYBig should be bigger than GaYBig")
} else if res < 0 { // GaNeg.p.y < Ga.p.y
assert.False(t, GaNeg.IsHigherY(), "GaNeg.IsHigherY should be false if GaNeg.p.y < Ga.p.y")
// Test the comparision of the big int also, should be the same result
assert.Equal(t, GaYBig.Cmp(GaNegYBig), 1, "GaYBig should be bigger than GaNegYBig")
}
}
func TestGetYFromMontEncodedX(t *testing.T) {
// We know that the generator of the curve is P = (x: 1, y: 2, z: 1, t: 1)
// We take x = 1 and we see if we retrieve P such that y = 2 or -P such that y' = Inv(2)
// Create the GFp element 1 and MontEncode it
PxMontEncoded := newGFp(1)
yRetrieved, err := getYFromMontEncodedX(PxMontEncoded)
assert.Nil(t, err)
smallYMontEncoded := newGFp(2)
bigYMontEncoded := &gfP{}
gfpNeg(bigYMontEncoded, smallYMontEncoded)
testCondition := (*yRetrieved == *smallYMontEncoded) || (*yRetrieved == *bigYMontEncoded)
assert.True(t, testCondition, "The retrieved Y should either equal 2 or Inv(2)")
}
func TestEncodeUncompressed(t *testing.T) {
// Case1: Create random point (Jacobian form)
_, GaInit, err := RandomG1(rand.Reader)
if err != nil {
t.Fatal(err)
}
// Affine form of GaInit
GaAffine := new(G1)
GaAffine.Set(GaInit)
GaAffine.p.MakeAffine()
// Encode GaCopy1 with the EncodeUncompress function
GaCopy1 := new(G1)
GaCopy1.Set(GaInit)
encoded := GaCopy1.EncodeUncompressed()
// Encode GaCopy2 with the Marshal function
GaCopy2 := new(G1)
GaCopy2.Set(GaInit)
marshalled := GaCopy2.Marshal() // Careful Marshal modifies the point since it makes it an affine point!
// Make sure that the x-coordinate is encoded as it is when we call the Marshal function
assert.Equal(
t,
encoded[1:], // Ignore the masking byte
marshalled[:],
"The EncodeUncompressed and Marshal function yield different results")
// Unmarshal the point Ga with the unmarshal function
Gb1 := new(G1)
_, err = Gb1.Unmarshal(marshalled)
assert.Nil(t, err)
assert.Equal(t, GaAffine.p.x.String(), Gb1.p.x.String(), "The x-coord of the unmarshalled point should equal the x-coord of the intial point")
assert.Equal(t, GaAffine.p.y.String(), Gb1.p.y.String(), "The y-coord of the unmarshalled point should equal the y-coord of the intial point")
// Decode the point Ga with the decodeUncompress function
Gb2 := new(G1)
err = Gb2.DecodeUncompressed(encoded)
assert.Nil(t, err)
assert.Equal(t, GaAffine.p.x.String(), Gb2.p.x.String(), "The x-coord of the decoded point should equal the x-coord of the intial point")
assert.Equal(t, GaAffine.p.y.String(), Gb2.p.y.String(), "The y-coord of the decoded point should equal the y-coord of the intial point")
// Case2: Encode the point at infinity
GInfinity := new(G1)
GInfinity.p = &curvePoint{}
GInfinity.p.SetInfinity()
// Get the point in affine form
GInfinityAffine := new(G1)
GInfinityAffine.Set(GInfinity)
GInfinityAffine.p.MakeAffine()
// Encode GaCopy1 with the EncodeUncompress function
GInfinityCopy1 := new(G1)
GInfinityCopy1.Set(GInfinity)
encoded = GInfinityCopy1.EncodeUncompressed()
// Encode GaCopy2 with the Marshal function
GInfinityCopy2 := new(G1)
GInfinityCopy2.Set(GInfinity)
marshalled = GInfinityCopy2.Marshal() // Careful Marshal modifies the point since it makes it an affine point!
// Make sure that the x-coordinate is encoded as it is when we call the Marshal function
assert.Equal(
t,
encoded[1:], // Ignore the masking byte
marshalled[:],
"The EncodeUncompressed and Marshal function yield different results")
// Unmarshal the point Ga with the unmarshal function
Gb1 = new(G1)
_, err = Gb1.Unmarshal(marshalled)
assert.Nil(t, err)
assert.Equal(t, GInfinityAffine.p.x.String(), Gb1.p.x.String(), "The x-coord of the unmarshalled point should equal the x-coord of the intial point")
assert.Equal(t, GInfinityAffine.p.y.String(), Gb1.p.y.String(), "The y-coord of the unmarshalled point should equal the y-coord of the intial point")
// Decode the point Ga with the decodeCompress function
Gb2 = new(G1)
err = Gb2.DecodeUncompressed(encoded)
assert.Nil(t, err)
assert.Equal(t, GInfinityAffine.p.x.String(), Gb2.p.x.String(), "The x-coord of the decompressed point should equal the x-coord of the intial point")
assert.Equal(t, GInfinityAffine.p.y.String(), Gb2.p.y.String(), "The y-coord of the decompressed point should equal the y-coord of the intial point")
}

View File

@ -0,0 +1,266 @@
// Package bn256 implements a particular bilinear group at the 128-bit security
// level.
//
// Bilinear groups are the basis of many of the new cryptographic protocols that
// have been proposed over the past decade. They consist of a triplet of groups
// (G₁, G₂ and GT) such that there exists a function e(g₁ˣ,g₂ʸ)=gTˣʸ (where gₓ
// is a generator of the respective group). That function is called a pairing
// function.
//
// This package specifically implements the Optimal Ate pairing over a 256-bit
// Barreto-Naehrig curve as described in
// http://cryptojedi.org/papers/dclxvi-20100714.pdf. Its output is compatible
// with the implementation described in that paper.
package bn256
import (
"errors"
)
// This file implement some util functions for the MPC
// especially the serialization and deserialization functions for points in G1
// Constants related to the bn256 pairing friendly curve
const (
Fq2ElementSize = 2 * FqElementSize
G2CompressedSize = Fq2ElementSize + 1 // + 1 accounts for the additional byte used for masking
G2UncompressedSize = 2*Fq2ElementSize + 1 // + 1 accounts for the additional byte used for masking
)
// EncodeUncompressed converts the compressed point e into bytes
// Take a point P in Jacobian form (where each coordinate is MontEncoded)
// and encodes it by going back to affine coordinates and montDecode all coordinates
// This function does not modify the point e
// (the variable `temp` is introduced to avoid to modify e)
func (e *G2) EncodeUncompressed() []byte {
// Check nil pointers
if e.p == nil {
e.p = &twistPoint{}
}
// Set the right flags
ret := make([]byte, G2UncompressedSize)
if e.p.IsInfinity() {
// Flag the encoding with the infinity flag
ret[0] |= serializationInfinity
return ret
}
// Marshal
marshal := e.Marshal()
// The encoding = flags || marshalledPoint
copy(ret[1:], marshal)
return ret
}
// DecodeUncompressed decodes a point in the uncompressed form
// Take a point P encoded (ie: written in affine form where each coordinate is MontDecoded)
// and encodes it by going back to Jacobian coordinates and montEncode all coordinates
func (e *G2) DecodeUncompressed(encoding []byte) error {
if len(encoding) != G2UncompressedSize {
return errors.New("wrong encoded point size")
}
if encoding[0]&serializationCompressed != 0 { // Also test the length of the encoding to make sure it is 65bytes
return errors.New("point is compressed")
}
if encoding[0]&serializationBigY != 0 { // Also test that the bigY flag if not set
return errors.New("bigY flag should not be set")
}
// Unmarshal the points and check their caps
if e.p == nil {
e.p = &twistPoint{}
}
// Removes the bits of the masking (This does a bitwise AND with `0001 1111`)
// And thus removes the first 3 bits corresponding to the masking
// Useless for now because in bn256, we added a full byte to enable masking
// However, this is needed if we work over BLS12 and its underlying field
bin := make([]byte, G2UncompressedSize)
copy(bin, encoding)
bin[0] &= serializationMask
// Decode the point at infinity in the compressed form
if encoding[0]&serializationInfinity != 0 {
// Makes sense to check that all bytes of bin are 0x0 since we removed the masking above}
for i := range bin {
if bin[i] != 0 {
return errors.New("invalid infinity encoding")
}
}
e.p.SetInfinity()
return nil
}
// We remove the flags and unmarshal the data
_, err := e.Unmarshal(encoding[1:])
return err
}
func (e *G2) IsHigherY() bool {
// Check nil pointers
if e.p == nil {
e.p = &twistPoint{}
e.p.MakeAffine()
}
// Note: the structures attributes are quite confusing here
// In fact, each element of Fp2 is a polynomial with 2 terms
// the `x` and `y` denote these coefficients, ie: xi + y
// However, `x` and `y` are also used to denote the x and y **coordinates**
// of an elliptic curve point. Hence, e.p.y represents the y-coordinate of the
// point e, and e.p.y.y represents the **coefficient** y of the y-coordinate
// of the elliptic curve point e.
//
// TODO: Rename the coefficients of the elements of Fp2 as c0 and c1 to clarify the code
yCoordY := &gfP{}
yCoordY.Set(&e.p.y.y)
yCoordYNeg := &gfP{}
gfpNeg(yCoordYNeg, yCoordY)
res := gfpCmp(yCoordY, yCoordYNeg)
if res == 1 { // yCoordY > yCoordNegY
return true
} else if res == -1 {
return false
}
return false
}
func (e *G2) EncodeCompressed() []byte {
// Check nil pointers
if e.p == nil {
e.p = &twistPoint{}
}
e.p.MakeAffine()
ret := make([]byte, G2CompressedSize)
// Flag the encoding with the compressed flag
ret[0] |= serializationCompressed
if e.p.IsInfinity() {
// Flag the encoding with the infinity flag
ret[0] |= serializationInfinity
return ret
}
if e.IsHigherY() {
// Flag the encoding with the bigY flag
ret[0] |= serializationBigY
}
// We start the serialization of the coordinates at the index 1
// Since the index 0 in the `ret` corresponds to the masking
//
// `temp` contains the the x-coordinate of the point
// Thus, to fully encode `temp`, we need to Marshal it's x coefficient and y coefficient
temp := gfP2Decode(&e.p.x)
temp.x.Marshal(ret[1:])
temp.y.Marshal(ret[FqElementSize+1:])
return ret
}
// Takes a MontEncoded x and finds the corresponding y (one of the two possible y's)
func getYFromMontEncodedXG2(x *gfP2) (*gfP2, error) {
// Check nil pointers
if x == nil {
return nil, errors.New("Cannot retrieve the y-coordinate from a nil pointer")
}
x2 := new(gfP2).Mul(x, x)
x3 := new(gfP2).Mul(x2, x)
rhs := new(gfP2).Add(x3, twistB) // twistB is MontEncoded, since it is create with newGFp
yCoord, err := rhs.Sqrt()
if err != nil {
return nil, err
}
return yCoord, nil
}
// DecodeCompressed decodes a point in the compressed form
// Take a point P in G2 decoded (ie: written in affine form where each coordinate is MontDecoded)
// and encodes it by going back to Jacobian coordinates and montEncode all coordinates
func (e *G2) DecodeCompressed(encoding []byte) error {
if len(encoding) != G2CompressedSize {
return errors.New("wrong encoded point size")
}
if encoding[0]&serializationCompressed == 0 { // Also test the length of the encoding to make sure it is 33bytes
return errors.New("point isn't compressed")
}
// Unmarshal the points and check their caps
if e.p == nil {
e.p = &twistPoint{}
} else {
e.p.x.SetZero()
e.p.y.SetZero()
e.p.z.SetOne()
e.p.t.SetOne()
}
// Removes the bits of the masking (This does a bitwise AND with `0001 1111`)
// And thus removes the first 3 bits corresponding to the masking
bin := make([]byte, G2CompressedSize)
copy(bin, encoding)
bin[0] &= serializationMask
// Decode the point at infinity in the compressed form
if encoding[0]&serializationInfinity != 0 {
if encoding[0]&serializationBigY != 0 {
return errors.New("high Y bit improperly set")
}
// Similar to `for i:=0; i<len(bin); i++ {}`
for i := range bin {
// Makes sense to check that all bytes of bin are 0x0 since we removed the masking above
if bin[i] != 0 {
return errors.New("invalid infinity encoding")
}
}
e.p.SetInfinity()
return nil
}
// Decompress the point P (P =/= ∞)
var err error
if err = e.p.x.x.Unmarshal(bin[1:]); err != nil {
return err
}
if err = e.p.x.y.Unmarshal(bin[FqElementSize+1:]); err != nil {
return err
}
// MontEncode our field elements for fast finite field arithmetic
// Needs to be done since the z and t coordinates are also encoded (ie: created with newGFp)
montEncode(&e.p.x.x, &e.p.x.x)
montEncode(&e.p.x.y, &e.p.x.y)
y, err := getYFromMontEncodedXG2(&e.p.x)
if err != nil {
return err
}
e.p.y = *y
// The flag serializationBigY is set (so the point pt with the higher Y is encoded)
// but the point e retrieved from the `getYFromX` is NOT the higher, then we inverse
if !e.IsHigherY() {
if encoding[0]&serializationBigY != 0 {
e.Neg(e)
}
} else {
if encoding[0]&serializationBigY == 0 { // The point given by getYFromX is the higher but the mask is not set for higher y
e.Neg(e)
}
}
// No need to check that the point e.p is on the curve
// since we retrieved y from x by using the curve equation.
// Adding it would be redundant
return nil
}

View File

@ -0,0 +1,174 @@
package bn256
import (
"crypto/rand"
"testing"
"github.com/stretchr/testify/assert"
)
func TestG2DecodeCompressed(t *testing.T) {
_, GaInit, err := RandomG2(rand.Reader)
assert.NoError(t, err, "Err should be nil")
// Affine form of GaInit
GaAffine := new(G2)
GaAffine.Set(GaInit)
GaAffine.p.MakeAffine()
// Encode GaCopy1 with the EncodeCompress function
GaCopy1 := new(G2)
GaCopy1.Set(GaInit)
compressed := GaCopy1.EncodeCompressed()
// Encode GaCopy2 with the Marshal function
GaCopy2 := new(G2)
GaCopy2.Set(GaInit)
marshalled := GaCopy2.Marshal() // Careful Marshal modifies the point since it makes it an affine point!
// Make sure that the x-coordinate is encoded as it is when we call the Marshal function
assert.Equal(
t,
compressed[1:], // Ignore the masking byte
marshalled[:64], // Get only the x-coordinate
"The EncodeCompressed and Marshal function yield different results for the x-coordinate",
)
// Unmarshal the point Ga with the unmarshal function
Gb1 := new(G2)
_, err = Gb1.Unmarshal(marshalled)
assert.Nil(t, err)
assert.Equal(t, GaAffine.p.x.String(), Gb1.p.x.String(), "The x-coord of the unmarshalled point should equal the x-coord of the intial point")
assert.Equal(t, GaAffine.p.y.String(), Gb1.p.y.String(), "The y-coord of the unmarshalled point should equal the y-coord of the intial point")
// Decode the point Ga with the decodeCompress function
Gb2 := new(G2)
err = Gb2.DecodeCompressed(compressed)
assert.Nil(t, err)
assert.Equal(t, GaAffine.p.x.String(), Gb2.p.x.String(), "The x-coord of the decompressed point should equal the x-coord of the intial point")
assert.Equal(t, GaAffine.p.y.String(), Gb2.p.y.String(), "The y-coord of the decompressed point should equal the y-coord of the intial point")
// == Case2: Encode the point at infinity == //
GInfinity := new(G2)
GInfinity.p = &twistPoint{}
GInfinity.p.SetInfinity()
// Get the point in affine form
GInfinityAffine := new(G2)
GInfinityAffine.Set(GInfinity)
GInfinityAffine.p.MakeAffine()
// Encode GaCopy1 with the EncodeCompress function
GInfinityCopy1 := new(G2)
GInfinityCopy1.Set(GInfinity)
compressed = GInfinityCopy1.EncodeCompressed()
// Encode GaCopy2 with the Marshal function
GInfinityCopy2 := new(G2)
GInfinityCopy2.Set(GInfinity)
marshalled = GInfinityCopy2.Marshal() // Careful Marshal modifies the point since it makes it an affine point!
// Make sure that the x-coordinate is encoded as it is when we call the Marshal function
assert.Equal(
t,
compressed[1:], // Ignore the masking byte
marshalled[:64],
"The EncodeCompressed and Marshal function yield different results")
// Unmarshal the point Ga with the unmarshal function
Gb1 = new(G2)
_, err = Gb1.Unmarshal(marshalled)
assert.Nil(t, err)
assert.Equal(t, GInfinityAffine.p.x.String(), Gb1.p.x.String(), "The x-coord of the unmarshalled point should equal the x-coord of the intial point")
assert.Equal(t, GInfinityAffine.p.y.String(), Gb1.p.y.String(), "The y-coord of the unmarshalled point should equal the y-coord of the intial point")
// Decode the point Ga with the decodeCompress function
Gb2 = new(G2)
err = Gb2.DecodeCompressed(compressed)
assert.Nil(t, err)
assert.Equal(t, GInfinityAffine.p.x.String(), Gb2.p.x.String(), "The x-coord of the decompressed point should equal the x-coord of the intial point")
assert.Equal(t, GInfinityAffine.p.y.String(), Gb2.p.y.String(), "The y-coord of the decompressed point should equal the y-coord of the intial point")
}
func TestG2DecodeUncompressed(t *testing.T) {
// == Case1: Create random point (Jacobian form) == //
_, GaInit, err := RandomG2(rand.Reader)
assert.NoError(t, err, "Err should be nil")
// Affine form of GaInit
GaAffine := new(G2)
GaAffine.Set(GaInit)
GaAffine.p.MakeAffine()
// Encode GaCopy1 with the EncodeUncompress function
GaCopy1 := new(G2)
GaCopy1.Set(GaInit)
encoded := GaCopy1.EncodeUncompressed()
// Encode GaCopy2 with the Marshal function
GaCopy2 := new(G2)
GaCopy2.Set(GaInit)
marshalled := GaCopy2.Marshal() // Careful Marshal modifies the point since it makes it an affine point!
// Make sure that the x-coordinate is encoded as it is when we call the Marshal function
assert.Equal(
t,
encoded[1:], // Ignore the masking byte
marshalled[:],
"The EncodeUncompressed and Marshal function yield different results")
// Unmarshal the point Ga with the unmarshal function
Gb1 := new(G2)
_, err = Gb1.Unmarshal(marshalled)
assert.Nil(t, err)
assert.Equal(t, GaAffine.p.x.String(), Gb1.p.x.String(), "The x-coord of the unmarshalled point should equal the x-coord of the intial point")
assert.Equal(t, GaAffine.p.y.String(), Gb1.p.y.String(), "The y-coord of the unmarshalled point should equal the y-coord of the intial point")
// Decode the point Ga with the decodeUncompress function
Gb2 := new(G2)
err = Gb2.DecodeUncompressed(encoded)
assert.Nil(t, err)
assert.Equal(t, GaAffine.p.x.String(), Gb2.p.x.String(), "The x-coord of the decoded point should equal the x-coord of the intial point")
assert.Equal(t, GaAffine.p.y.String(), Gb2.p.y.String(), "The y-coord of the decoded point should equal the y-coord of the intial point")
// == Case2: Encode the point at infinity == //
GInfinity := new(G2)
GInfinity.p = &twistPoint{}
GInfinity.p.SetInfinity()
// Get the point in affine form
GInfinityAffine := new(G2)
GInfinityAffine.Set(GInfinity)
GInfinityAffine.p.MakeAffine()
// Encode GaCopy1 with the EncodeUncompress function
GInfinityCopy1 := new(G2)
GInfinityCopy1.Set(GInfinity)
encoded = GInfinityCopy1.EncodeUncompressed()
// Encode GaCopy2 with the Marshal function
GInfinityCopy2 := new(G2)
GInfinityCopy2.Set(GInfinity)
marshalled = GInfinityCopy2.Marshal() // Careful Marshal modifies the point since it makes it an affine point!
// Make sure that the x-coordinate is encoded as it is when we call the Marshal function
assert.Equal(
t,
encoded[1:], // Ignore the masking byte
marshalled[:],
"The EncodeUncompressed and Marshal function yield different results")
// Unmarshal the point Ga with the unmarshal function
Gb1 = new(G2)
_, err = Gb1.Unmarshal(marshalled)
assert.Nil(t, err)
assert.Equal(t, GInfinityAffine.p.x.String(), Gb1.p.x.String(), "The x-coord of the unmarshalled point should equal the x-coord of the intial point")
assert.Equal(t, GInfinityAffine.p.y.String(), Gb1.p.y.String(), "The y-coord of the unmarshalled point should equal the y-coord of the intial point")
// Decode the point Ga with the decodeCompress function
Gb2 = new(G2)
err = Gb2.DecodeUncompressed(encoded)
assert.Nil(t, err)
assert.Equal(t, GInfinityAffine.p.x.String(), Gb2.p.x.String(), "The x-coord of the decompressed point should equal the x-coord of the intial point")
assert.Equal(t, GInfinityAffine.p.y.String(), Gb2.p.y.String(), "The y-coord of the decompressed point should equal the y-coord of the intial point")
}

188
cryptography/bn256/gfp.go Normal file
View File

@ -0,0 +1,188 @@
package bn256
import (
"bytes"
"encoding/binary"
"errors"
"fmt"
"math/big"
)
// FpUint64Size is the number of uint64 chunks to represent a field element
const FpUint64Size = 4
type gfP [FpUint64Size]uint64
func newGFp(x int64) (out *gfP) {
if x >= 0 {
out = &gfP{uint64(x)}
} else {
out = &gfP{uint64(-x)}
gfpNeg(out, out)
}
montEncode(out, out)
return out
}
func (e *gfP) String() string {
return fmt.Sprintf("%16.16x%16.16x%16.16x%16.16x", e[3], e[2], e[1], e[0])
}
/*
func byteToUint64(in []byte) (uint64, error) {
if len(in) > 8 {
return 0, errors.New("the input bytes length should be equal to 8 (or smaller)")
}
// Takes the bytes in the little endian order
// The byte 0x64 translate in a uint64 of the shape 0x64 (= 0x0000000000000064) rather than 0x6400000000000000
res := binary.LittleEndian.Uint64(in)
return res, nil
}
*/
// Makes sure that the
func padBytes(bb []byte) ([]byte, error) {
if len(bb) > 32 {
return []byte{}, errors.New("Cannot pad the given byte slice as the length exceed the padding length")
}
if len(bb) == 32 {
return bb, nil
}
padSlice := make([]byte, 32)
index := len(padSlice) - len(bb)
copy(padSlice[index:], bb)
return padSlice, nil
}
// Convert a big.Int into gfP
func newGFpFromBigInt(in *big.Int) (out *gfP) {
// in >= P, so we mod it to get back in the field
// (ie: we get the smallest representative of the equivalence class mod P)
if res := in.Cmp(P); res >= 0 {
// We need to mod P to get back into the field
in.Mod(in, P)
}
inBytes := in.Bytes()
// We want to work on byte slices of length 32 to re-assemble our GFpe element
if len(inBytes) < 32 {
// Safe to ignore the err as we are in the if so the condition is satisfied
inBytes, _ = padBytes(inBytes)
}
out = &gfP{}
var n uint64
// Now we have the guarantee that inBytes has length 32 so it makes sense to run this for
// loop safely (we won't exceed the boundaries of the container)
for i := 0; i < FpUint64Size; i++ {
buf := bytes.NewBuffer(inBytes[i*8 : (i+1)*8])
binary.Read(buf, binary.BigEndian, &n)
out[(FpUint64Size-1)-i] = n // In gfP field elements are represented as little-endian 64-bit words
}
return out
}
// Returns a new element of GFp montgomery encoded
func newMontEncodedGFpFromBigInt(in *big.Int) *gfP {
res := newGFpFromBigInt(in)
montEncode(res, res)
return res
}
// Convert a gfP into a big.Int
func (e *gfP) gFpToBigInt() (*big.Int, error) {
str := e.String()
out := new(big.Int)
_, ok := out.SetString(str, 16)
if !ok {
return nil, errors.New("couldn't create big.Int from gfP element")
}
return out, nil
}
func (e *gfP) Set(f *gfP) {
e[0] = f[0]
e[1] = f[1]
e[2] = f[2]
e[3] = f[3]
}
func (e *gfP) Invert(f *gfP) {
bits := [4]uint64{0x3c208c16d87cfd45, 0x97816a916871ca8d, 0xb85045b68181585d, 0x30644e72e131a029}
sum, power := &gfP{}, &gfP{}
sum.Set(rN1)
power.Set(f)
for word := 0; word < 4; word++ {
for bit := uint(0); bit < 64; bit++ {
if (bits[word]>>bit)&1 == 1 {
gfpMul(sum, sum, power)
}
gfpMul(power, power, power)
}
}
gfpMul(sum, sum, r3)
e.Set(sum)
}
func (e *gfP) Marshal(out []byte) {
for w := uint(0); w < 4; w++ {
for b := uint(0); b < 8; b++ {
out[8*w+b] = byte(e[3-w] >> (56 - 8*b))
}
}
}
func (e *gfP) Unmarshal(in []byte) error {
// Unmarshal the bytes into little endian form
for w := uint(0); w < 4; w++ {
for b := uint(0); b < 8; b++ {
e[3-w] += uint64(in[8*w+b]) << (56 - 8*b)
}
}
// Ensure the point respects the curve modulus
for i := 3; i >= 0; i-- {
if e[i] < p2[i] {
return nil
}
if e[i] > p2[i] {
return errors.New("bn256: coordinate exceeds modulus")
}
}
return errors.New("bn256: coordinate equals modulus")
}
// Note: This function is only used to distinguish between points with the same x-coordinates
// when doing point compression.
// An ordered field must be infinite and we are working over a finite field here
func gfpCmp(a, b *gfP) int {
for i := FpUint64Size - 1; i >= 0; i-- { // Remember that the gfP elements are written as little-endian 64-bit words
if a[i] > b[i] { // As soon as we figure out that the MSByte of A > MSByte of B, we return
return 1
} else if a[i] == b[i] { // If the current bytes are equal we continue as we cannot conclude on A and B relation
continue
} else { // a[i] < b[i] so we can directly conclude and we return
return -1
}
}
return 0
}
// In Montgomery representation, an element x is represented by xR mod p, where
// R is a power of 2 corresponding to the number of machine-words that can contain p.
// (where p is the characteristic of the prime field we work over)
// See: https://web.wpi.edu/Pubs/ETD/Available/etd-0430102-120529/unrestricted/thesis.pdf
func montEncode(c, a *gfP) { gfpMul(c, a, r2) }
func montDecode(c, a *gfP) { gfpMul(c, a, &gfP{1}) }

160
cryptography/bn256/gfp12.go Normal file
View File

@ -0,0 +1,160 @@
package bn256
// For details of the algorithms used, see "Multiplication and Squaring on
// Pairing-Friendly Fields, Devegili et al.
// http://eprint.iacr.org/2006/471.pdf.
import (
"math/big"
)
// gfP12 implements the field of size p¹² as a quadratic extension of gfP6
// where ω²=τ.
type gfP12 struct {
x, y gfP6 // value is xω + y
}
func (e *gfP12) String() string {
return "(" + e.x.String() + "," + e.y.String() + ")"
}
func (e *gfP12) Set(a *gfP12) *gfP12 {
e.x.Set(&a.x)
e.y.Set(&a.y)
return e
}
func (e *gfP12) SetZero() *gfP12 {
e.x.SetZero()
e.y.SetZero()
return e
}
func (e *gfP12) SetOne() *gfP12 {
e.x.SetZero()
e.y.SetOne()
return e
}
func (e *gfP12) IsZero() bool {
return e.x.IsZero() && e.y.IsZero()
}
func (e *gfP12) IsOne() bool {
return e.x.IsZero() && e.y.IsOne()
}
func (e *gfP12) Conjugate(a *gfP12) *gfP12 {
e.x.Neg(&a.x)
e.y.Set(&a.y)
return e
}
func (e *gfP12) Neg(a *gfP12) *gfP12 {
e.x.Neg(&a.x)
e.y.Neg(&a.y)
return e
}
// Frobenius computes (xω+y)^p = x^p ω·ξ^((p-1)/6) + y^p
func (e *gfP12) Frobenius(a *gfP12) *gfP12 {
e.x.Frobenius(&a.x)
e.y.Frobenius(&a.y)
e.x.MulScalar(&e.x, xiToPMinus1Over6)
return e
}
// FrobeniusP2 computes (xω+y)^p² = x^p² ω·ξ^((p²-1)/6) + y^p²
func (e *gfP12) FrobeniusP2(a *gfP12) *gfP12 {
e.x.FrobeniusP2(&a.x)
e.x.MulGFP(&e.x, xiToPSquaredMinus1Over6)
e.y.FrobeniusP2(&a.y)
return e
}
func (e *gfP12) FrobeniusP4(a *gfP12) *gfP12 {
e.x.FrobeniusP4(&a.x)
e.x.MulGFP(&e.x, xiToPSquaredMinus1Over3)
e.y.FrobeniusP4(&a.y)
return e
}
func (e *gfP12) Add(a, b *gfP12) *gfP12 {
e.x.Add(&a.x, &b.x)
e.y.Add(&a.y, &b.y)
return e
}
func (e *gfP12) Sub(a, b *gfP12) *gfP12 {
e.x.Sub(&a.x, &b.x)
e.y.Sub(&a.y, &b.y)
return e
}
func (e *gfP12) Mul(a, b *gfP12) *gfP12 {
tx := (&gfP6{}).Mul(&a.x, &b.y)
t := (&gfP6{}).Mul(&b.x, &a.y)
tx.Add(tx, t)
ty := (&gfP6{}).Mul(&a.y, &b.y)
t.Mul(&a.x, &b.x).MulTau(t)
e.x.Set(tx)
e.y.Add(ty, t)
return e
}
func (e *gfP12) MulScalar(a *gfP12, b *gfP6) *gfP12 {
e.x.Mul(&e.x, b)
e.y.Mul(&e.y, b)
return e
}
func (e *gfP12) Exp(a *gfP12, power *big.Int) *gfP12 {
sum := (&gfP12{}).SetOne()
t := &gfP12{}
for i := power.BitLen() - 1; i >= 0; i-- {
t.Square(sum)
if power.Bit(i) != 0 {
sum.Mul(t, a)
} else {
sum.Set(t)
}
}
e.Set(sum)
return e
}
func (e *gfP12) Square(a *gfP12) *gfP12 {
// Complex squaring algorithm
v0 := (&gfP6{}).Mul(&a.x, &a.y)
t := (&gfP6{}).MulTau(&a.x)
t.Add(&a.y, t)
ty := (&gfP6{}).Add(&a.x, &a.y)
ty.Mul(ty, t).Sub(ty, v0)
t.MulTau(v0)
ty.Sub(ty, t)
e.x.Add(v0, v0)
e.y.Set(ty)
return e
}
func (e *gfP12) Invert(a *gfP12) *gfP12 {
// See "Implementing cryptographic pairings", M. Scott, section 3.2.
// ftp://136.206.11.249/pub/crypto/pairings.pdf
t1, t2 := &gfP6{}, &gfP6{}
t1.Square(&a.x)
t2.Square(&a.y)
t1.MulTau(t1).Sub(t2, t1)
t2.Invert(t1)
e.x.Neg(&a.x)
e.y.Set(&a.y)
e.MulScalar(e, t2)
return e
}

327
cryptography/bn256/gfp2.go Normal file
View File

@ -0,0 +1,327 @@
package bn256
import (
"errors"
"math"
"math/big"
)
// For details of the algorithms used, see "Multiplication and Squaring on
// Pairing-Friendly Fields, Devegili et al.
// http://eprint.iacr.org/2006/471.pdf.
// gfP2 implements a field of size p² as a quadratic extension of the base field
// where i²=-1.
type gfP2 struct {
x, y gfP // value is xi+y.
}
func gfP2Decode(in *gfP2) *gfP2 {
out := &gfP2{}
montDecode(&out.x, &in.x)
montDecode(&out.y, &in.y)
return out
}
func (e *gfP2) String() string {
return "(" + e.x.String() + ", " + e.y.String() + ")"
}
func (e *gfP2) Set(a *gfP2) *gfP2 {
e.x.Set(&a.x)
e.y.Set(&a.y)
return e
}
func (e *gfP2) SetZero() *gfP2 {
e.x = gfP{0}
e.y = gfP{0}
return e
}
func (e *gfP2) SetOne() *gfP2 {
e.x = gfP{0}
e.y = *newGFp(1)
return e
}
func (e *gfP2) IsZero() bool {
zero := gfP{0}
return e.x == zero && e.y == zero
}
func (e *gfP2) IsOne() bool {
zero, one := gfP{0}, *newGFp(1)
return e.x == zero && e.y == one
}
func (e *gfP2) Conjugate(a *gfP2) *gfP2 {
e.y.Set(&a.y)
gfpNeg(&e.x, &a.x)
return e
}
func (e *gfP2) Neg(a *gfP2) *gfP2 {
gfpNeg(&e.x, &a.x)
gfpNeg(&e.y, &a.y)
return e
}
func (e *gfP2) Add(a, b *gfP2) *gfP2 {
gfpAdd(&e.x, &a.x, &b.x)
gfpAdd(&e.y, &a.y, &b.y)
return e
}
func (e *gfP2) Sub(a, b *gfP2) *gfP2 {
gfpSub(&e.x, &a.x, &b.x)
gfpSub(&e.y, &a.y, &b.y)
return e
}
// See "Multiplication and Squaring in Pairing-Friendly Fields",
// http://eprint.iacr.org/2006/471.pdf Section 3 "Schoolbook method"
func (e *gfP2) Mul(a, b *gfP2) *gfP2 {
tx, t := &gfP{}, &gfP{}
gfpMul(tx, &a.x, &b.y) // tx = a.x * b.y
gfpMul(t, &b.x, &a.y) // t = b.x * a.y
gfpAdd(tx, tx, t) // tx = a.x * b.y + b.x * a.y
ty := &gfP{}
gfpMul(ty, &a.y, &b.y) // ty = a.y * b.y
gfpMul(t, &a.x, &b.x) // t = a.x * b.x
// We do a subtraction in the field since β = -1 in our case
// In fact, Fp2 is built using the irreducible polynomial X^2 - β, where β = -1 = p-1
gfpSub(ty, ty, t) // ty = a.y * b.y - a.x * b.x
e.x.Set(tx) // e.x = a.x * b.y + b.x * a.y
e.y.Set(ty) // e.y = a.y * b.y - a.x * b.x
return e
}
func (e *gfP2) MulScalar(a *gfP2, b *gfP) *gfP2 {
gfpMul(&e.x, &a.x, b)
gfpMul(&e.y, &a.y, b)
return e
}
// MulXi sets e=ξa where ξ=i+9 and then returns e.
func (e *gfP2) MulXi(a *gfP2) *gfP2 {
// (xi+y)(i+9) = (9x+y)i+(9y-x)
tx := &gfP{}
gfpAdd(tx, &a.x, &a.x)
gfpAdd(tx, tx, tx)
gfpAdd(tx, tx, tx)
gfpAdd(tx, tx, &a.x)
gfpAdd(tx, tx, &a.y)
ty := &gfP{}
gfpAdd(ty, &a.y, &a.y)
gfpAdd(ty, ty, ty)
gfpAdd(ty, ty, ty)
gfpAdd(ty, ty, &a.y)
gfpSub(ty, ty, &a.x)
e.x.Set(tx)
e.y.Set(ty)
return e
}
func (e *gfP2) Square(a *gfP2) *gfP2 {
// Complex squaring algorithm:
// (xi+y)² = (x+y)(y-x) + 2*i*x*y
// - "Devegili OhEig Scott Dahab --- Multiplication and Squaring on Pairing-Friendly Fields.pdf"; Section 3 (Complex squaring)
// - URL: https://eprint.iacr.org/2006/471.pdf
// Here, since the non residue used is β = -1 in Fp, then we have:
// c0 = (a0 + a1)(a0 + βa1) - v0 - βv0 => c0 = (a0 + a1)(a0 - a1)
// c1 = 2v0, where v0 is a0 * a1 (= x * y, with our notations)
tx, ty := &gfP{}, &gfP{}
gfpSub(tx, &a.y, &a.x) // a.y - a.x
gfpAdd(ty, &a.x, &a.y) // a.x + a.y
gfpMul(ty, tx, ty)
gfpMul(tx, &a.x, &a.y)
gfpAdd(tx, tx, tx)
e.x.Set(tx)
e.y.Set(ty)
return e
}
func (e *gfP2) Invert(a *gfP2) *gfP2 {
// See "Implementing cryptographic pairings", M. Scott, section 3.2.
// ftp://136.206.11.249/pub/crypto/pairings.pdf
t1, t2 := &gfP{}, &gfP{}
gfpMul(t1, &a.x, &a.x)
gfpMul(t2, &a.y, &a.y)
gfpAdd(t1, t1, t2)
inv := &gfP{}
inv.Invert(t1)
gfpNeg(t1, &a.x)
gfpMul(&e.x, t1, inv)
gfpMul(&e.y, &a.y, inv)
return e
}
// Exp is a function to exponentiate field elements
// This function navigates the big.Int binary representation
// from left to right (assumed to be in big endian)
// When going from left to right, each bit is checked, and when the first `1` bit is found
// the `foundOne` flag is set, and the "exponentiation begins"
//
// Eg: Let's assume that we want to exponentiate 3^5
// then the exponent is 5 = 0000 0101
// We navigate 0000 0101 from left to right until we reach 0000 0101
// ^
// |
// When this bit is reached, the flag `foundOne` is set, and and we do:
// res = res * 3 = 3
// Then, we move on to the left to read the next bit, and since `foundOne` is set (ie:
// the exponentiation has started), then we square the result, and do:
// res = res * res = 3*3 = 3^2
// The bit is `0`, so we continue
// Next bit is `1`, so we do: res = res * res = 3^2 * 3^2 = 3^4
// and because the bit is `1`, then, we do res = res * 3 = 3^4 * 3 = 3^5
// We reached the end of the bit string, so we can stop.
//
// The binary representation of the exponent is assumed to be binary big endian
//
// Careful, since `res` is initialized with SetOne() and since this function
// initializes the calling gfP2 to the one element of the Gfp2 which is montEncoded
// then, we need to make sure that the `e` element of gfP2 used to call the Exp function
// is also montEncoded (ie; both x and y are montEncoded)
/*
TODO: Refactor this function like this:
func (e *gfP2) Exp(a *gfP2, exponent *big.Int) *gfP2 {
sum := (&gfP2{}).SetOne()
t := &gfP2{}
for i := exponent.BitLen() - 1; i >= 0; i-- {
t.Square(sum)
if exponent.Bit(i) != 0 {
sum.Mul(t, a)
} else {
sum.Set(t)
}
}
e.Set(sum)
return e
}
*/
func (e *gfP2) Exp(exponent *big.Int) *gfP2 {
res := &gfP2{}
res = res.SetOne()
base := &gfP2{}
base = base.Set(e)
foundOne := false
exponentBytes := exponent.Bytes() // big endian bytes slice
for i := 0; i < len(exponentBytes); i++ { // for each byte (remember the slice is big endian)
for j := 0; j <= 7; j++ { // A byte contains the powers of 2 to 2^7 to 2^0 from left to right
if foundOne {
res = res.Mul(res, res)
}
if uint(exponentBytes[i])&uint(math.Pow(2, float64(7-j))) != uint(0) { // a byte contains the powers of 2 from 2^7 to 2^0 hence why we do 2^(7-j) (big-endian assumed)
foundOne = true
res = res.Mul(res, base)
}
}
}
e.Set(res)
return e
}
// Sqrt returns the square root of e in GFp2
// See:
// - "A High-Speed Square Root Algorithm for Extension Fields - Especially for Fast Extension Fields"
// - URL: https://core.ac.uk/download/pdf/12530172.pdf
//
// - "Square Roots Modulo p"
// - URL: http://www.cmat.edu.uy/~tornaria/pub/Tornaria-2002.pdf
//
// - "Faster square roots in annoying finite fields"
// - URL: http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.21.9172
func (e *gfP2) Sqrt() (*gfP2, error) {
// In GF(p^m), Euler's Criterion is defined like EC(x) = x^((p^m -1) / 2), if EC(x) == 1, x is a QR; if EC(x) == -1, x is a QNR
// `euler` here, is the exponent used in Euler's criterion, thus, euler = (p^m -1) / 2 for GF(p^m)
// here, we work over GF(p^2), so euler = (p^2 -1) / 2, where p = 21888242871839275222246405745257275088696311157297823662689037894645226208583
////euler := bigFromBase10("239547588008311421220994022608339370399626158265550411218223901127035046843189118723920525909718935985594116157406550130918127817069793474323196511433944")
// modulus^2 = 2^s * t + 1 => p^2 = 2^s * t + 1, where t is odd
// In our case, p^2 = 2^s * t + 1, where s = 4, t = 29943448501038927652624252826042421299953269783193801402277987640879380855398639840490065738714866998199264519675818766364765977133724184290399563929243
////t := bigFromBase10("29943448501038927652624252826042421299953269783193801402277987640879380855398639840490065738714866998199264519675818766364765977133724184290399563929243")
////s := bigFromBase10("4")
s := 4
// tMinus1Over2 = (t-1) / 2
tMinus1Over2 := bigFromBase10("14971724250519463826312126413021210649976634891596900701138993820439690427699319920245032869357433499099632259837909383182382988566862092145199781964621")
// A non quadratic residue in Fp
////nonResidueFp := bigFromBase10("21888242871839275222246405745257275088696311157297823662689037894645226208582")
// A non quadratic residue in Fp2. Here nonResidueFp2 = i + 2 (Euler Criterion applied to this element of Fp2 shows that this element is a QNR)
////nonResidueFp2 := &gfP2{*newGFp(1), *newGFp(2)}
// nonResidueFp2 ^ t
nonResidueFp2ToTXCoord := bigFromBase10("314498342015008975724433667930697407966947188435857772134235984660852259084")
nonResidueFp2ToTYCoord := bigFromBase10("5033503716262624267312492558379982687175200734934877598599011485707452665730")
nonResidueFp2ToT := &gfP2{*newMontEncodedGFpFromBigInt(nonResidueFp2ToTXCoord), *newMontEncodedGFpFromBigInt(nonResidueFp2ToTYCoord)}
// Start algorithm
// Initialize the algorithm variables
v := s
z := nonResidueFp2ToT
w := new(gfP2).Set(e)
w = w.Exp(tMinus1Over2)
x := new(gfP2).Mul(e, w)
b := new(gfP2).Mul(x, w) // contains e^t
// Check if the element is a QR
// Since p^2 = 2^s * t + 1 => t = (p^2 - 1)/2
// Thus, since we have b = e^t, and since we want to test if e is a QR
// we need to square b (s-1) times. That way we'd have
// (e^t)^{2^(s-1)} which equals e^{(p^2 - 1)/2} => Euler criterion
bCheck := new(gfP2).Set(b)
for i := 0; i < s-1; i++ { // s-1 == 3 here (see comment above)
bCheck = bCheck.Square(bCheck)
}
if !bCheck.IsOne() {
return nil, errors.New("Cannot extract a root. The element is not a QR in Fp2")
}
// Extract the root of the quadratic residue using the Tonelli-Shanks algorithm
for !b.IsOne() {
m := 0
b2m := new(gfP2).Set(b)
for !b2m.IsOne() {
/* invariant: b2m = b^(2^m) after entering this loop */
b2m = b2m.Square(b2m)
m++
}
j := v - m - 1
w = z
for j > 0 {
w = w.Square(w)
j--
} // w = z^2^(v-m-1)
z = new(gfP2).Square(w)
b = b.Mul(b, z)
x = x.Mul(x, w)
v = m
}
return x, nil
}

View File

@ -0,0 +1,164 @@
package bn256
import (
"testing"
"github.com/stretchr/testify/assert"
)
// Tests that the exponentiation in gfp2 works correctly
// SageMath test vector:
/*
p = 21888242871839275222246405745257275088696311157297823662689037894645226208583
Fp = GF(p)
Fpx.<j> = PolynomialRing(Fp, 'j')
// The modulus is in the form `j^2 - non-residue-in-Fp`
// Fp2.<i> = GF(p^2, modulus=j^2 - 3) // 3 is a quadratic non-residue in Fp
// See: https://github.com/scipr-lab/libff/blob/master/libff/algebra/curves/alt_bn128/alt_bn128_init.cpp#L95
// The quadratic non-residue used in -1, so the modulus is
Fp2.<i> = GF(p^2, modulus=j^2 + 1)
// Quad. Non. Resid. test (Euler's criterion)
eulerExp = (Fp(p-1)/Fp(2))
Fp(-1)^eulerExp + Fp(1) == p // Should return true, then we see that -1 (ie: p-1) is a nqr (non quadratic residue mod p)
// Element taken randomly in the field Fp2
// we denote an element of Fp2 as: e = x*i + y
baseElement = 8192512702373747571754527085437364828369119615795326562285198594140975111129*i + 14719814144181528030533020377409648968040866053797156997322427920899698335369
// baseElementXHex = hex(8192512702373747571754527085437364828369119615795326562285198594140975111129)
baseElementXHex = 121ccc410d6339f7 bbc9a8f5b577c5c5 96c9dfdd6233cbac 34a8ddeafedf9bd9
baseElementXHexLittleEndian = 34a8ddeafedf9bd9 96c9dfdd6233cbac bbc9a8f5b577c5c5 121ccc410d6339f7
// baseElementYHex = hex(14719814144181528030533020377409648968040866053797156997322427920899698335369)
baseElementYHex = 208b1e9b9b11a98c 30a84b2641e87244 9a54780d0e482cfb 146adf9eb7641e89
baseElementYHexLittleEndian = 146adf9eb7641e89 9a54780d0e482cfb 30a84b2641e87244 208b1e9b9b11a98c
// We run in Sage, resExponentiation = baseElement ^ 5, and we get
baseElementTo5 = baseElement ^ 5
baseElementTo5 = 1919494216989370714282264091499504460829540920627494019318177103740489354093*i + 3944397571509712892671395330294281468555185483198747137614153093242854529958
baseElementTo5XHex = 043e652d8f044857 c4cbfe9636928309 44288a2a00432390 7fa7e33a3e5acb6d
baseElementTo5XHexLittleEndian = 7fa7e33a3e5acb6d 44288a2a00432390 c4cbfe9636928309 043e652d8f044857
baseElementTo5YHex = 08b8732d547b1cda b5c82ff0bfaa42c1 54e7b24b65223fc2 88b3e8a6de535ba6
baseElementTo5XHexLittleEndian = 88b3e8a6de535ba6 54e7b24b65223fc2 b5c82ff0bfaa42c1 08b8732d547b1cda
*/
func TestExp(t *testing.T) {
// Case 1: Exponent = 5 (= 0x05)
baseElementX := &gfP{0x34a8ddeafedf9bd9, 0x96c9dfdd6233cbac, 0xbbc9a8f5b577c5c5, 0x121ccc410d6339f7}
baseElementY := &gfP{0x146adf9eb7641e89, 0x9a54780d0e482cfb, 0x30a84b2641e87244, 0x208b1e9b9b11a98c}
// montEncode each Fp element
// Important to do since the field arithmetic uses montgomery encoding in the library
montEncode(baseElementX, baseElementX)
montEncode(baseElementY, baseElementY)
baseElement := &gfP2{*baseElementX, *baseElementY}
// We keep the expected result non encoded
// Will need to decode the obtained result to be able to assert it with this
baseElementTo5X := &gfP{0x7fa7e33a3e5acb6d, 0x44288a2a00432390, 0xc4cbfe9636928309, 0x043e652d8f044857}
baseElementTo5Y := &gfP{0x88b3e8a6de535ba6, 0x54e7b24b65223fc2, 0xb5c82ff0bfaa42c1, 0x08b8732d547b1cda}
baseElementTo5 := &gfP2{*baseElementTo5X, *baseElementTo5Y}
// Manual multiplication, to make sure the results are all coherent with each other
manual := &gfP2{}
manual = manual.Set(baseElement)
manual = manual.Mul(manual, manual) // manual ^ 2
manual = manual.Mul(manual, manual) // manual ^ 4
manual = manual.Mul(manual, baseElement) // manual ^ 5
manualDecoded := gfP2Decode(manual)
// Expected result (obtained with sagemath, after some type conversions)
w := &gfP2{}
w = w.Set(baseElementTo5)
// Result returned by the Exp function
exponent5 := bigFromBase10("5")
h := &gfP2{}
h = h.Set(baseElement)
h = h.Exp(exponent5)
// We decode the result of the exponentiation to be able to compare with the
// non-encoded/sagemath generated expected result
hDecoded := gfP2Decode(h)
assert.Equal(t, *w, *hDecoded, "The result of the exponentiation is not coherent with the Sagemath test vector")
assert.Equal(t, *manualDecoded, *hDecoded, "The result of the exponentiation is not coherent with the manual repeated multiplication")
// Case 2: Exponent = bigExponent = 39028236692093846773374607431768211455 + 2^128 - 2^64 = 379310603613032310218302470789826871295 = 0x11d5c90a20486bd1c40686b493777ffff
// This exponent can be encoded on 3 words/uint64 => 0x1 0x1d5c90a20486bd1c 0x40686b493777ffff if 64bit machine or
// on 5 words/uint32 => 0x1 0x1d5c90a2 0x0486bd1c 0x40686b49 0x3777ffff if 32bit machine
baseElementX = &gfP{0x34a8ddeafedf9bd9, 0x96c9dfdd6233cbac, 0xbbc9a8f5b577c5c5, 0x121ccc410d6339f7}
baseElementY = &gfP{0x146adf9eb7641e89, 0x9a54780d0e482cfb, 0x30a84b2641e87244, 0x208b1e9b9b11a98c}
// montEncode each Fp element
// Important to do since the field arithmetic uses montgomery encoding in the library
montEncode(baseElementX, baseElementX)
montEncode(baseElementY, baseElementY)
baseElement = &gfP2{*baseElementX, *baseElementY}
// We keep the expected result non encoded
// Will need to decode the obtained result to be able to assert it with this
// Sagemath:
// baseElementToBigExp = baseElement ^ bigExponent
// baseElementToBigExp => 7379142427977467878031119988604583496475317621776403696479934226513132928021*i + 17154720713365092794088637301427106756251681045968150072197181728711103784706
// baseElementToBigExpXHex = 10507254ce787236 62cf3f84eb21adee 30ec827a799a519a 1464fc2ec9263c15
// baseElementToBigExpYHex = 25ed3a53d558db9a 07da01cc9d10c5d5 ff7b1e4f41b874d7 debbc13409c8a702
baseElementToBigExpX := &gfP{0x1464fc2ec9263c15, 0x30ec827a799a519a, 0x62cf3f84eb21adee, 0x10507254ce787236}
baseElementToBigExpY := &gfP{0xdebbc13409c8a702, 0xff7b1e4f41b874d7, 0x07da01cc9d10c5d5, 0x25ed3a53d558db9a}
baseElementToBigExp := &gfP2{*baseElementToBigExpX, *baseElementToBigExpY}
// Expected result (obtained with sagemath, after some type conversions)
w = &gfP2{}
w = w.Set(baseElementToBigExp)
// Result returned by the Exp function
bigExp := bigFromBase10("379310603613032310218302470789826871295")
h = &gfP2{}
h = h.Set(baseElement)
h = h.Exp(bigExp)
// We decode the result of the exponentiation to be able to compare with the
// non-encoded/sagemath generated expected result
hDecoded = gfP2Decode(h)
assert.Equal(t, *w, *hDecoded, "The result of the exponentiation is not coherent with the Sagemath test vector")
}
func TestSqrt(t *testing.T) {
// Case 1: Valid QR
// qr = 8192512702373747571754527085437364828369119615795326562285198594140975111129*i + 14719814144181528030533020377409648968040866053797156997322427920899698335369
// This is a QR in Fp2
qrXBig := bigFromBase10("8192512702373747571754527085437364828369119615795326562285198594140975111129")
qrYBig := bigFromBase10("14719814144181528030533020377409648968040866053797156997322427920899698335369")
qr := &gfP2{*newMontEncodedGFpFromBigInt(qrXBig), *newMontEncodedGFpFromBigInt(qrYBig)}
res, err := qr.Sqrt()
assert.NoError(t, err, "An error shouldn't be returned as we try to get the sqrt of a QR")
// We decode the result of the squaring to compare the result with the Sagemath test vector
// To get the sqrt of `r` in Sage, we run: `r.sqrt()`, and we get:
// 838738240039331261565244756819667559640832302782323121523807597830118111128*i + 701115843855913009657260259360827182296091347204618857804078039211229345012
resDecoded := gfP2Decode(res)
expectedXBig := bigFromBase10("838738240039331261565244756819667559640832302782323121523807597830118111128")
expectedYBig := bigFromBase10("701115843855913009657260259360827182296091347204618857804078039211229345012")
expected := &gfP2{*newGFpFromBigInt(expectedXBig), *newGFpFromBigInt(expectedYBig)}
assert.Equal(t, *expected, *resDecoded, "The result of the sqrt is not coherent with the Sagemath test vector")
// Case 2: Valid QR
// qr = -1 = 0 * i + 21888242871839275222246405745257275088696311157297823662689037894645226208582
// The sqrt of qr is: sqrt = 21888242871839275222246405745257275088696311157297823662689037894645226208582 * i + 0
qr = &gfP2{*newGFp(0), *newMontEncodedGFpFromBigInt(bigFromBase10("21888242871839275222246405745257275088696311157297823662689037894645226208582"))}
res, err = qr.Sqrt()
assert.NoError(t, err, "An error shouldn't be returned as we try to get the sqrt of a QR")
resDecoded = gfP2Decode(res)
expected = &gfP2{*newGFpFromBigInt(bigFromBase10("21888242871839275222246405745257275088696311157297823662689037894645226208582")), *newGFp(0)}
assert.Equal(t, *expected, *resDecoded, "The result of the sqrt is not coherent with the Sagemath test vector")
// Case 3: Get the sqrt of a QNR
// qnr = 10142231111593789910248975994434553601587001629804098271704323146176084338608*i + 13558357083504759335548106329923635779485621365040524539176938811542516618464
qnrXBig := bigFromBase10("10142231111593789910248975994434553601587001629804098271704323146176084338608")
qnrYBig := bigFromBase10("13558357083504759335548106329923635779485621365040524539176938811542516618464")
qnr := &gfP2{*newMontEncodedGFpFromBigInt(qnrXBig), *newMontEncodedGFpFromBigInt(qnrYBig)}
res, err = qnr.Sqrt()
assert.Error(t, err, "An error should have been returned as we try to get the sqrt of a QNR")
assert.Nil(t, res, "The result of sqrt should be nil as we try to get the sqrt of a QNR")
}

213
cryptography/bn256/gfp6.go Normal file
View File

@ -0,0 +1,213 @@
package bn256
// For details of the algorithms used, see "Multiplication and Squaring on
// Pairing-Friendly Fields, Devegili et al.
// http://eprint.iacr.org/2006/471.pdf.
// gfP6 implements the field of size p⁶ as a cubic extension of gfP2 where τ³=ξ
// and ξ=i+3.
type gfP6 struct {
x, y, z gfP2 // value is xτ² + yτ + z
}
func (e *gfP6) String() string {
return "(" + e.x.String() + ", " + e.y.String() + ", " + e.z.String() + ")"
}
func (e *gfP6) Set(a *gfP6) *gfP6 {
e.x.Set(&a.x)
e.y.Set(&a.y)
e.z.Set(&a.z)
return e
}
func (e *gfP6) SetZero() *gfP6 {
e.x.SetZero()
e.y.SetZero()
e.z.SetZero()
return e
}
func (e *gfP6) SetOne() *gfP6 {
e.x.SetZero()
e.y.SetZero()
e.z.SetOne()
return e
}
func (e *gfP6) IsZero() bool {
return e.x.IsZero() && e.y.IsZero() && e.z.IsZero()
}
func (e *gfP6) IsOne() bool {
return e.x.IsZero() && e.y.IsZero() && e.z.IsOne()
}
func (e *gfP6) Neg(a *gfP6) *gfP6 {
e.x.Neg(&a.x)
e.y.Neg(&a.y)
e.z.Neg(&a.z)
return e
}
func (e *gfP6) Frobenius(a *gfP6) *gfP6 {
e.x.Conjugate(&a.x)
e.y.Conjugate(&a.y)
e.z.Conjugate(&a.z)
e.x.Mul(&e.x, xiTo2PMinus2Over3)
e.y.Mul(&e.y, xiToPMinus1Over3)
return e
}
// FrobeniusP2 computes (xτ²+yτ+z)^(p²) = xτ^(2p²) + yτ^(p²) + z
func (e *gfP6) FrobeniusP2(a *gfP6) *gfP6 {
// τ^(2p²) = τ²τ^(2p²-2) = τ²ξ^((2p²-2)/3)
e.x.MulScalar(&a.x, xiTo2PSquaredMinus2Over3)
// τ^(p²) = ττ^(p²-1) = τξ^((p²-1)/3)
e.y.MulScalar(&a.y, xiToPSquaredMinus1Over3)
e.z.Set(&a.z)
return e
}
func (e *gfP6) FrobeniusP4(a *gfP6) *gfP6 {
e.x.MulScalar(&a.x, xiToPSquaredMinus1Over3)
e.y.MulScalar(&a.y, xiTo2PSquaredMinus2Over3)
e.z.Set(&a.z)
return e
}
func (e *gfP6) Add(a, b *gfP6) *gfP6 {
e.x.Add(&a.x, &b.x)
e.y.Add(&a.y, &b.y)
e.z.Add(&a.z, &b.z)
return e
}
func (e *gfP6) Sub(a, b *gfP6) *gfP6 {
e.x.Sub(&a.x, &b.x)
e.y.Sub(&a.y, &b.y)
e.z.Sub(&a.z, &b.z)
return e
}
func (e *gfP6) Mul(a, b *gfP6) *gfP6 {
// "Multiplication and Squaring on Pairing-Friendly Fields"
// Section 4, Karatsuba method.
// http://eprint.iacr.org/2006/471.pdf
v0 := (&gfP2{}).Mul(&a.z, &b.z)
v1 := (&gfP2{}).Mul(&a.y, &b.y)
v2 := (&gfP2{}).Mul(&a.x, &b.x)
t0 := (&gfP2{}).Add(&a.x, &a.y)
t1 := (&gfP2{}).Add(&b.x, &b.y)
tz := (&gfP2{}).Mul(t0, t1)
tz.Sub(tz, v1).Sub(tz, v2).MulXi(tz).Add(tz, v0)
t0.Add(&a.y, &a.z)
t1.Add(&b.y, &b.z)
ty := (&gfP2{}).Mul(t0, t1)
t0.MulXi(v2)
ty.Sub(ty, v0).Sub(ty, v1).Add(ty, t0)
t0.Add(&a.x, &a.z)
t1.Add(&b.x, &b.z)
tx := (&gfP2{}).Mul(t0, t1)
tx.Sub(tx, v0).Add(tx, v1).Sub(tx, v2)
e.x.Set(tx)
e.y.Set(ty)
e.z.Set(tz)
return e
}
func (e *gfP6) MulScalar(a *gfP6, b *gfP2) *gfP6 {
e.x.Mul(&a.x, b)
e.y.Mul(&a.y, b)
e.z.Mul(&a.z, b)
return e
}
func (e *gfP6) MulGFP(a *gfP6, b *gfP) *gfP6 {
e.x.MulScalar(&a.x, b)
e.y.MulScalar(&a.y, b)
e.z.MulScalar(&a.z, b)
return e
}
// MulTau computes τ·(aτ²+bτ+c) = bτ²+cτ+aξ
func (e *gfP6) MulTau(a *gfP6) *gfP6 {
tz := (&gfP2{}).MulXi(&a.x)
ty := (&gfP2{}).Set(&a.y)
e.y.Set(&a.z)
e.x.Set(ty)
e.z.Set(tz)
return e
}
func (e *gfP6) Square(a *gfP6) *gfP6 {
v0 := (&gfP2{}).Square(&a.z)
v1 := (&gfP2{}).Square(&a.y)
v2 := (&gfP2{}).Square(&a.x)
c0 := (&gfP2{}).Add(&a.x, &a.y)
c0.Square(c0).Sub(c0, v1).Sub(c0, v2).MulXi(c0).Add(c0, v0)
c1 := (&gfP2{}).Add(&a.y, &a.z)
c1.Square(c1).Sub(c1, v0).Sub(c1, v1)
xiV2 := (&gfP2{}).MulXi(v2)
c1.Add(c1, xiV2)
c2 := (&gfP2{}).Add(&a.x, &a.z)
c2.Square(c2).Sub(c2, v0).Add(c2, v1).Sub(c2, v2)
e.x.Set(c2)
e.y.Set(c1)
e.z.Set(c0)
return e
}
func (e *gfP6) Invert(a *gfP6) *gfP6 {
// See "Implementing cryptographic pairings", M. Scott, section 3.2.
// ftp://136.206.11.249/pub/crypto/pairings.pdf
// Here we can give a short explanation of how it works: let j be a cubic root of
// unity in GF(p²) so that 1+j+j²=0.
// Then (xτ² + yτ + z)(xj²τ² + yjτ + z)(xjτ² + yj²τ + z)
// = (xτ² + yτ + z)(Cτ²+Bτ+A)
// = (x³ξ²+y³ξ+z³-3ξxyz) = F is an element of the base field (the norm).
//
// On the other hand (xj²τ² + yjτ + z)(xjτ² + yj²τ + z)
// = τ²(y²-ξxz) + τ(ξx²-yz) + (z²-ξxy)
//
// So that's why A = (z²-ξxy), B = (ξx²-yz), C = (y²-ξxz)
t1 := (&gfP2{}).Mul(&a.x, &a.y)
t1.MulXi(t1)
A := (&gfP2{}).Square(&a.z)
A.Sub(A, t1)
B := (&gfP2{}).Square(&a.x)
B.MulXi(B)
t1.Mul(&a.y, &a.z)
B.Sub(B, t1)
C := (&gfP2{}).Square(&a.y)
t1.Mul(&a.x, &a.z)
C.Sub(C, t1)
F := (&gfP2{}).Mul(C, &a.y)
F.MulXi(F)
t1.Mul(A, &a.z)
F.Add(F, t1)
t1.Mul(B, &a.x).MulXi(t1)
F.Add(F, t1)
F.Invert(F)
e.x.Mul(C, F)
e.y.Mul(B, F)
e.z.Mul(A, F)
return e
}

View File

@ -0,0 +1,129 @@
// +build amd64,!generic
#define storeBlock(a0,a1,a2,a3, r) \
MOVQ a0, 0+r \
MOVQ a1, 8+r \
MOVQ a2, 16+r \
MOVQ a3, 24+r
#define loadBlock(r, a0,a1,a2,a3) \
MOVQ 0+r, a0 \
MOVQ 8+r, a1 \
MOVQ 16+r, a2 \
MOVQ 24+r, a3
#define gfpCarry(a0,a1,a2,a3,a4, b0,b1,b2,b3,b4) \
\ // b = a-p
MOVQ a0, b0 \
MOVQ a1, b1 \
MOVQ a2, b2 \
MOVQ a3, b3 \
MOVQ a4, b4 \
\
SUBQ ·p2+0(SB), b0 \
SBBQ ·p2+8(SB), b1 \
SBBQ ·p2+16(SB), b2 \
SBBQ ·p2+24(SB), b3 \
SBBQ $0, b4 \
\
\ // if b is negative then return a
\ // else return b
CMOVQCC b0, a0 \
CMOVQCC b1, a1 \
CMOVQCC b2, a2 \
CMOVQCC b3, a3
#include "mul_amd64.h"
#include "mul_bmi2_amd64.h"
TEXT ·gfpNeg(SB),0,$0-16
MOVQ ·p2+0(SB), R8
MOVQ ·p2+8(SB), R9
MOVQ ·p2+16(SB), R10
MOVQ ·p2+24(SB), R11
MOVQ a+8(FP), DI
SUBQ 0(DI), R8
SBBQ 8(DI), R9
SBBQ 16(DI), R10
SBBQ 24(DI), R11
MOVQ $0, AX
gfpCarry(R8,R9,R10,R11,AX, R12,R13,R14,R15,BX)
MOVQ c+0(FP), DI
storeBlock(R8,R9,R10,R11, 0(DI))
RET
TEXT ·gfpAdd(SB),0,$0-24
MOVQ a+8(FP), DI
MOVQ b+16(FP), SI
loadBlock(0(DI), R8,R9,R10,R11)
MOVQ $0, R12
ADDQ 0(SI), R8
ADCQ 8(SI), R9
ADCQ 16(SI), R10
ADCQ 24(SI), R11
ADCQ $0, R12
gfpCarry(R8,R9,R10,R11,R12, R13,R14,R15,AX,BX)
MOVQ c+0(FP), DI
storeBlock(R8,R9,R10,R11, 0(DI))
RET
TEXT ·gfpSub(SB),0,$0-24
MOVQ a+8(FP), DI
MOVQ b+16(FP), SI
loadBlock(0(DI), R8,R9,R10,R11)
MOVQ ·p2+0(SB), R12
MOVQ ·p2+8(SB), R13
MOVQ ·p2+16(SB), R14
MOVQ ·p2+24(SB), R15
MOVQ $0, AX
SUBQ 0(SI), R8
SBBQ 8(SI), R9
SBBQ 16(SI), R10
SBBQ 24(SI), R11
CMOVQCC AX, R12
CMOVQCC AX, R13
CMOVQCC AX, R14
CMOVQCC AX, R15
ADDQ R12, R8
ADCQ R13, R9
ADCQ R14, R10
ADCQ R15, R11
MOVQ c+0(FP), DI
storeBlock(R8,R9,R10,R11, 0(DI))
RET
TEXT ·gfpMul(SB),0,$160-24
MOVQ a+8(FP), DI
MOVQ b+16(FP), SI
// Jump to a slightly different implementation if MULX isn't supported.
CMPB ·hasBMI2(SB), $0
JE nobmi2Mul
mulBMI2(0(DI),8(DI),16(DI),24(DI), 0(SI))
storeBlock( R8, R9,R10,R11, 0(SP))
storeBlock(R12,R13,R14,R15, 32(SP))
gfpReduceBMI2()
JMP end
nobmi2Mul:
mul(0(DI),8(DI),16(DI),24(DI), 0(SI), 0(SP))
gfpReduce(0(SP))
end:
MOVQ c+0(FP), DI
storeBlock(R12,R13,R14,R15, 0(DI))
RET

View File

@ -0,0 +1,113 @@
// +build arm64,!generic
#define storeBlock(a0,a1,a2,a3, r) \
MOVD a0, 0+r \
MOVD a1, 8+r \
MOVD a2, 16+r \
MOVD a3, 24+r
#define loadBlock(r, a0,a1,a2,a3) \
MOVD 0+r, a0 \
MOVD 8+r, a1 \
MOVD 16+r, a2 \
MOVD 24+r, a3
#define loadModulus(p0,p1,p2,p3) \
MOVD ·p2+0(SB), p0 \
MOVD ·p2+8(SB), p1 \
MOVD ·p2+16(SB), p2 \
MOVD ·p2+24(SB), p3
#include "mul_arm64.h"
TEXT ·gfpNeg(SB),0,$0-16
MOVD a+8(FP), R0
loadBlock(0(R0), R1,R2,R3,R4)
loadModulus(R5,R6,R7,R8)
SUBS R1, R5, R1
SBCS R2, R6, R2
SBCS R3, R7, R3
SBCS R4, R8, R4
SUBS R5, R1, R5
SBCS R6, R2, R6
SBCS R7, R3, R7
SBCS R8, R4, R8
CSEL CS, R5, R1, R1
CSEL CS, R6, R2, R2
CSEL CS, R7, R3, R3
CSEL CS, R8, R4, R4
MOVD c+0(FP), R0
storeBlock(R1,R2,R3,R4, 0(R0))
RET
TEXT ·gfpAdd(SB),0,$0-24
MOVD a+8(FP), R0
loadBlock(0(R0), R1,R2,R3,R4)
MOVD b+16(FP), R0
loadBlock(0(R0), R5,R6,R7,R8)
loadModulus(R9,R10,R11,R12)
MOVD ZR, R0
ADDS R5, R1
ADCS R6, R2
ADCS R7, R3
ADCS R8, R4
ADCS ZR, R0
SUBS R9, R1, R5
SBCS R10, R2, R6
SBCS R11, R3, R7
SBCS R12, R4, R8
SBCS ZR, R0, R0
CSEL CS, R5, R1, R1
CSEL CS, R6, R2, R2
CSEL CS, R7, R3, R3
CSEL CS, R8, R4, R4
MOVD c+0(FP), R0
storeBlock(R1,R2,R3,R4, 0(R0))
RET
TEXT ·gfpSub(SB),0,$0-24
MOVD a+8(FP), R0
loadBlock(0(R0), R1,R2,R3,R4)
MOVD b+16(FP), R0
loadBlock(0(R0), R5,R6,R7,R8)
loadModulus(R9,R10,R11,R12)
SUBS R5, R1
SBCS R6, R2
SBCS R7, R3
SBCS R8, R4
CSEL CS, ZR, R9, R9
CSEL CS, ZR, R10, R10
CSEL CS, ZR, R11, R11
CSEL CS, ZR, R12, R12
ADDS R9, R1
ADCS R10, R2
ADCS R11, R3
ADCS R12, R4
MOVD c+0(FP), R0
storeBlock(R1,R2,R3,R4, 0(R0))
RET
TEXT ·gfpMul(SB),0,$0-24
MOVD a+8(FP), R0
loadBlock(0(R0), R1,R2,R3,R4)
MOVD b+16(FP), R0
loadBlock(0(R0), R5,R6,R7,R8)
mul(R9,R10,R11,R12,R13,R14,R15,R16)
gfpReduce()
MOVD c+0(FP), R0
storeBlock(R1,R2,R3,R4, 0(R0))
RET

View File

@ -0,0 +1,25 @@
// +build amd64,!generic arm64,!generic
package bn256
// This file contains forward declarations for the architecture-specific
// assembly implementations of these functions, provided that they exist.
import (
"golang.org/x/sys/cpu"
)
//nolint:varcheck
var hasBMI2 = cpu.X86.HasBMI2
// go:noescape
func gfpNeg(c, a *gfP)
//go:noescape
func gfpAdd(c, a, b *gfP)
//go:noescape
func gfpSub(c, a, b *gfP)
//go:noescape
func gfpMul(c, a, b *gfP)

View File

@ -0,0 +1,209 @@
// +build !amd64,!arm64 generic
package bn256
func gfpCarry(a *gfP, head uint64) {
b := &gfP{}
var carry uint64
for i, pi := range p2 {
ai := a[i]
bi := ai - pi - carry
b[i] = bi
carry = (pi&^ai | (pi|^ai)&bi) >> 63
}
carry = carry &^ head
// If b is negative, then return a.
// Else return b.
carry = -carry
ncarry := ^carry
for i := 0; i < 4; i++ {
a[i] = (a[i] & carry) | (b[i] & ncarry)
}
}
func gfpNeg(c, a *gfP) {
var carry uint64
for i, pi := range p2 { // p2 being the prime that defines the base/prime field
ai := a[i]
ci := pi - ai - carry
c[i] = ci
carry = (ai&^pi | (ai|^pi)&ci) >> 63
}
gfpCarry(c, 0)
}
func gfpAdd(c, a, b *gfP) {
var carry uint64
for i, ai := range a {
bi := b[i]
ci := ai + bi + carry
c[i] = ci
carry = (ai&bi | (ai|bi)&^ci) >> 63
}
gfpCarry(c, carry)
}
func gfpSub(c, a, b *gfP) {
t := &gfP{}
var carry uint64
for i, pi := range p2 {
bi := b[i]
ti := pi - bi - carry
t[i] = ti
carry = (bi&^pi | (bi|^pi)&ti) >> 63
}
carry = 0
for i, ai := range a {
ti := t[i]
ci := ai + ti + carry
c[i] = ci
carry = (ai&ti | (ai|ti)&^ci) >> 63
}
gfpCarry(c, carry)
}
func mul(a, b [4]uint64) [8]uint64 {
const (
mask16 uint64 = 0x0000ffff
mask32 uint64 = 0xffffffff
)
var buff [32]uint64
for i, ai := range a {
a0, a1, a2, a3 := ai&mask16, (ai>>16)&mask16, (ai>>32)&mask16, ai>>48
for j, bj := range b {
b0, b2 := bj&mask32, bj>>32
off := 4 * (i + j)
buff[off+0] += a0 * b0
buff[off+1] += a1 * b0
buff[off+2] += a2*b0 + a0*b2
buff[off+3] += a3*b0 + a1*b2
buff[off+4] += a2 * b2
buff[off+5] += a3 * b2
}
}
for i := uint(1); i < 4; i++ {
shift := 16 * i
var head, carry uint64
for j := uint(0); j < 8; j++ {
block := 4 * j
xi := buff[block]
yi := (buff[block+i] << shift) + head
zi := xi + yi + carry
buff[block] = zi
carry = (xi&yi | (xi|yi)&^zi) >> 63
head = buff[block+i] >> (64 - shift)
}
}
return [8]uint64{buff[0], buff[4], buff[8], buff[12], buff[16], buff[20], buff[24], buff[28]}
}
func halfMul(a, b [4]uint64) [4]uint64 {
const (
mask16 uint64 = 0x0000ffff
mask32 uint64 = 0xffffffff
)
var buff [18]uint64
for i, ai := range a {
a0, a1, a2, a3 := ai&mask16, (ai>>16)&mask16, (ai>>32)&mask16, ai>>48
for j, bj := range b {
if i+j > 3 {
break
}
b0, b2 := bj&mask32, bj>>32
off := 4 * (i + j)
buff[off+0] += a0 * b0
buff[off+1] += a1 * b0
buff[off+2] += a2*b0 + a0*b2
buff[off+3] += a3*b0 + a1*b2
buff[off+4] += a2 * b2
buff[off+5] += a3 * b2
}
}
for i := uint(1); i < 4; i++ {
shift := 16 * i
var head, carry uint64
for j := uint(0); j < 4; j++ {
block := 4 * j
xi := buff[block]
yi := (buff[block+i] << shift) + head
zi := xi + yi + carry
buff[block] = zi
carry = (xi&yi | (xi|yi)&^zi) >> 63
head = buff[block+i] >> (64 - shift)
}
}
return [4]uint64{buff[0], buff[4], buff[8], buff[12]}
}
func gfpMul(c, a, b *gfP) {
T := mul(*a, *b)
m := halfMul([4]uint64{T[0], T[1], T[2], T[3]}, np)
t := mul([4]uint64{m[0], m[1], m[2], m[3]}, p2)
var carry uint64
for i, Ti := range T {
ti := t[i]
zi := Ti + ti + carry
T[i] = zi
carry = (Ti&ti | (Ti|ti)&^zi) >> 63
}
*c = gfP{T[4], T[5], T[6], T[7]}
gfpCarry(c, carry)
}
// Util function to compare field elements. Should be defined in gfP files
// Compares 2 GFp elements
// Returns 1 if a > b; 0 if a == b; -1 if a < b
/*
func gfpCmp(a, b *gfP) int {
for i := FpUint64Size - 1; i >= 0; i-- { // Remember that the gfP elements are written as little-endian 64-bit words
if a[i] > b[i] { // As soon as we figure out that the MSByte of A > MSByte of B, we return
return 1
} else if a[i] == b[i] { // If the current bytes are equal we continue as we cannot conclude on A and B relation
continue
} else { // a[i] < b[i] so we can directly conclude and we return
return -1
}
}
return 0
}
*/
// TODO: Optimize these functions as for now all it's doing is to convert in big.Int
// and use big integer arithmetic
// Computes c = a^{exp} in Fp (so mod p)
/*
func gfpExp(a *gfP, exp, mod *big.Int) *gfP {
// Convert the field elements to big.Int
aBig := a.gFpToBigInt()
// Run the big.Int Exp algorithm
resBig := new(big.Int).Exp(aBig, exp, mod)
// Convert the big.Int result back to field element
res := newGFpFromBigInt(resBig)
return res
}
*/

View File

@ -0,0 +1,116 @@
package bn256
import (
"testing"
)
// Tests that negation works the same way on both assembly-optimized and pure Go
// implementation.
func TestGFpNeg(t *testing.T) {
n := &gfP{0x0123456789abcdef, 0xfedcba9876543210, 0xdeadbeefdeadbeef, 0xfeebdaedfeebdaed}
w := &gfP{0xfedcba9876543211, 0x0123456789abcdef, 0x2152411021524110, 0x0114251201142512}
h := &gfP{}
gfpNeg(h, n)
if *h != *w {
t.Errorf("negation mismatch: have %#x, want %#x", *h, *w)
}
}
// Tests that addition works the same way on both assembly-optimized and pure Go
// implementation.
func TestGFpAdd(t *testing.T) {
a := &gfP{0x0123456789abcdef, 0xfedcba9876543210, 0xdeadbeefdeadbeef, 0xfeebdaedfeebdaed}
b := &gfP{0xfedcba9876543210, 0x0123456789abcdef, 0xfeebdaedfeebdaed, 0xdeadbeefdeadbeef}
w := &gfP{0xc3df73e9278302b8, 0x687e956e978e3572, 0x254954275c18417f, 0xad354b6afc67f9b4}
h := &gfP{}
gfpAdd(h, a, b)
if *h != *w {
t.Errorf("addition mismatch: have %#x, want %#x", *h, *w)
}
}
// Tests that subtraction works the same way on both assembly-optimized and pure Go
// implementation.
func TestGFpSub(t *testing.T) {
a := &gfP{0x0123456789abcdef, 0xfedcba9876543210, 0xdeadbeefdeadbeef, 0xfeebdaedfeebdaed}
b := &gfP{0xfedcba9876543210, 0x0123456789abcdef, 0xfeebdaedfeebdaed, 0xdeadbeefdeadbeef}
w := &gfP{0x02468acf13579bdf, 0xfdb97530eca86420, 0xdfc1e401dfc1e402, 0x203e1bfe203e1bfd}
h := &gfP{}
gfpSub(h, a, b)
if *h != *w {
t.Errorf("subtraction mismatch: have %#x, want %#x", *h, *w)
}
}
// Tests that multiplication works the same way on both assembly-optimized and pure Go
// implementation.
func TestGFpMul(t *testing.T) {
a := &gfP{0x0123456789abcdef, 0xfedcba9876543210, 0xdeadbeefdeadbeef, 0xfeebdaedfeebdaed}
b := &gfP{0xfedcba9876543210, 0x0123456789abcdef, 0xfeebdaedfeebdaed, 0xdeadbeefdeadbeef}
w := &gfP{0xcbcbd377f7ad22d3, 0x3b89ba5d849379bf, 0x87b61627bd38b6d2, 0xc44052a2a0e654b2}
h := &gfP{}
gfpMul(h, a, b)
if *h != *w {
t.Errorf("multiplication mismatch: have %#x, want %#x", *h, *w)
}
}
// Tests the conversion from big.Int to GFp element
func TestNewGFpFromBigInt(t *testing.T) {
// Case 1
twoBig := bigFromBase10("2")
h := *newGFpFromBigInt(twoBig)
twoHex := [4]uint64{0x0000000000000002, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000}
w := gfP(twoHex)
if h != w {
t.Errorf("conversion mismatch: have %s, want %s", h.String(), w.String())
}
// Case 2
pMinus1Big := bigFromBase10("21888242871839275222246405745257275088696311157297823662689037894645226208582")
h = *newGFpFromBigInt(pMinus1Big)
pMinus1Hex := [4]uint64{0x3c208c16d87cfd46, 0x97816a916871ca8d, 0xb85045b68181585d, 0x30644e72e131a029}
w = gfP(pMinus1Hex)
if h != w {
t.Errorf("conversion mismatch: have %s, want %s", h.String(), w.String())
}
}
// Tests the conversion from GFp element to big.Int
func TestGFpToBigInt(t *testing.T) {
// Case 1
twoHex := [4]uint64{0x0000000000000002, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000}
twoBig := bigFromBase10("2")
twoGFp := gfP(twoHex) // Not MontEncoded!
w := twoBig
h, err := twoGFp.gFpToBigInt()
if err != nil {
t.Errorf("Couldn't convert GFp to big.Int: %s", err)
}
if r := h.Cmp(w); r != 0 {
t.Errorf("conversion mismatch: have %s, want %s", h.String(), w.String())
}
// Case 2
pMinus1Hex := [4]uint64{0x3c208c16d87cfd46, 0x97816a916871ca8d, 0xb85045b68181585d, 0x30644e72e131a029}
pMinus1Big := bigFromBase10("21888242871839275222246405745257275088696311157297823662689037894645226208582")
pMinus1GFp := gfP(pMinus1Hex) // Not MontEncoded!
w = pMinus1Big
h, err = pMinus1GFp.gFpToBigInt()
if err != nil {
t.Errorf("Couldn't convert GFp to big.Int: %s", err)
}
if r := h.Cmp(w); r != 0 {
t.Errorf("conversion mismatch: have %s, want %s", h.String(), w.String())
}
}

View File

@ -0,0 +1,115 @@
package bn256
import (
"math/big"
)
var half = new(big.Int).Rsh(Order, 1)
var curveLattice = &lattice{
vectors: [][]*big.Int{
{bigFromBase10("147946756881789319000765030803803410728"), bigFromBase10("147946756881789319010696353538189108491")},
{bigFromBase10("147946756881789319020627676272574806254"), bigFromBase10("-147946756881789318990833708069417712965")},
},
inverse: []*big.Int{
bigFromBase10("147946756881789318990833708069417712965"),
bigFromBase10("147946756881789319010696353538189108491"),
},
det: bigFromBase10("43776485743678550444492811490514550177096728800832068687396408373151616991234"),
}
var targetLattice = &lattice{
vectors: [][]*big.Int{
{bigFromBase10("9931322734385697761"), bigFromBase10("9931322734385697761"), bigFromBase10("9931322734385697763"), bigFromBase10("9931322734385697764")},
{bigFromBase10("4965661367192848881"), bigFromBase10("4965661367192848881"), bigFromBase10("4965661367192848882"), bigFromBase10("-9931322734385697762")},
{bigFromBase10("-9931322734385697762"), bigFromBase10("-4965661367192848881"), bigFromBase10("4965661367192848881"), bigFromBase10("-4965661367192848882")},
{bigFromBase10("9931322734385697763"), bigFromBase10("-4965661367192848881"), bigFromBase10("-4965661367192848881"), bigFromBase10("-4965661367192848881")},
},
inverse: []*big.Int{
bigFromBase10("734653495049373973658254490726798021314063399421879442165"),
bigFromBase10("147946756881789319000765030803803410728"),
bigFromBase10("-147946756881789319005730692170996259609"),
bigFromBase10("1469306990098747947464455738335385361643788813749140841702"),
},
det: new(big.Int).Set(Order),
}
type lattice struct {
vectors [][]*big.Int
inverse []*big.Int
det *big.Int
}
// decompose takes a scalar mod Order as input and finds a short, positive decomposition of it wrt to the lattice basis.
func (l *lattice) decompose(k *big.Int) []*big.Int {
n := len(l.inverse)
// Calculate closest vector in lattice to <k,0,0,...> with Babai's rounding.
c := make([]*big.Int, n)
for i := 0; i < n; i++ {
c[i] = new(big.Int).Mul(k, l.inverse[i])
round(c[i], l.det)
}
// Transform vectors according to c and subtract <k,0,0,...>.
out := make([]*big.Int, n)
temp := new(big.Int)
for i := 0; i < n; i++ {
out[i] = new(big.Int)
for j := 0; j < n; j++ {
temp.Mul(c[j], l.vectors[j][i])
out[i].Add(out[i], temp)
}
out[i].Neg(out[i])
out[i].Add(out[i], l.vectors[0][i]).Add(out[i], l.vectors[0][i])
}
out[0].Add(out[0], k)
return out
}
func (l *lattice) Precompute(add func(i, j uint)) {
n := uint(len(l.vectors))
total := uint(1) << n
for i := uint(0); i < n; i++ {
for j := uint(0); j < total; j++ {
if (j>>i)&1 == 1 {
add(i, j)
}
}
}
}
func (l *lattice) Multi(scalar *big.Int) []uint8 {
decomp := l.decompose(scalar)
maxLen := 0
for _, x := range decomp {
if x.BitLen() > maxLen {
maxLen = x.BitLen()
}
}
out := make([]uint8, maxLen)
for j, x := range decomp {
for i := 0; i < maxLen; i++ {
out[i] += uint8(x.Bit(i)) << uint(j)
}
}
return out
}
// round sets num to num/denom rounded to the nearest integer.
func round(num, denom *big.Int) {
r := new(big.Int)
num.DivMod(num, denom, r)
if r.Cmp(half) == 1 {
num.Add(num, big.NewInt(1))
}
}

View File

@ -0,0 +1,29 @@
package bn256
import (
"crypto/rand"
"testing"
)
func TestLatticeReduceCurve(t *testing.T) {
k, _ := rand.Int(rand.Reader, Order)
ks := curveLattice.decompose(k)
if ks[0].BitLen() > 130 || ks[1].BitLen() > 130 {
t.Fatal("reduction too large")
} else if ks[0].Sign() < 0 || ks[1].Sign() < 0 {
t.Fatal("reduction must be positive")
}
}
func TestLatticeReduceTarget(t *testing.T) {
k, _ := rand.Int(rand.Reader, Order)
ks := targetLattice.decompose(k)
if ks[0].BitLen() > 66 || ks[1].BitLen() > 66 || ks[2].BitLen() > 66 || ks[3].BitLen() > 66 {
t.Fatal("reduction too large")
} else if ks[0].Sign() < 0 || ks[1].Sign() < 0 || ks[2].Sign() < 0 || ks[3].Sign() < 0 {
t.Fatal("reduction must be positive")
}
}

View File

@ -0,0 +1,71 @@
package bn256
import (
"testing"
"crypto/rand"
)
func TestRandomG2Marshal(t *testing.T) {
for i := 0; i < 10; i++ {
n, g2, err := RandomG2(rand.Reader)
if err != nil {
t.Error(err)
continue
}
t.Logf("%v: %x\n", n, g2.Marshal())
}
}
func TestPairings(t *testing.T) {
a1 := new(G1).ScalarBaseMult(bigFromBase10("1"))
a2 := new(G1).ScalarBaseMult(bigFromBase10("2"))
a37 := new(G1).ScalarBaseMult(bigFromBase10("37"))
an1 := new(G1).ScalarBaseMult(bigFromBase10("21888242871839275222246405745257275088548364400416034343698204186575808495616"))
b0 := new(G2).ScalarBaseMult(bigFromBase10("0"))
b1 := new(G2).ScalarBaseMult(bigFromBase10("1"))
b2 := new(G2).ScalarBaseMult(bigFromBase10("2"))
b27 := new(G2).ScalarBaseMult(bigFromBase10("27"))
b999 := new(G2).ScalarBaseMult(bigFromBase10("999"))
bn1 := new(G2).ScalarBaseMult(bigFromBase10("21888242871839275222246405745257275088548364400416034343698204186575808495616"))
p1 := Pair(a1, b1)
pn1 := Pair(a1, bn1)
np1 := Pair(an1, b1)
if pn1.String() != np1.String() {
t.Error("Pairing mismatch: e(a, -b) != e(-a, b)")
}
if !PairingCheck([]*G1{a1, an1}, []*G2{b1, b1}) {
t.Error("MultiAte check gave false negative!")
}
p0 := new(GT).Add(p1, pn1)
p0_2 := Pair(a1, b0)
if p0.String() != p0_2.String() {
t.Error("Pairing mismatch: e(a, b) * e(a, -b) != 1")
}
p0_3 := new(GT).ScalarMult(p1, bigFromBase10("21888242871839275222246405745257275088548364400416034343698204186575808495617"))
if p0.String() != p0_3.String() {
t.Error("Pairing mismatch: e(a, b) has wrong order")
}
p2 := Pair(a2, b1)
p2_2 := Pair(a1, b2)
p2_3 := new(GT).ScalarMult(p1, bigFromBase10("2"))
if p2.String() != p2_2.String() {
t.Error("Pairing mismatch: e(a, b * 2) != e(a * 2, b)")
}
if p2.String() != p2_3.String() {
t.Error("Pairing mismatch: e(a, b * 2) != e(a, b) ** 2")
}
if p2.String() == p1.String() {
t.Error("Pairing is degenerate!")
}
if PairingCheck([]*G1{a1, a1}, []*G2{b1, b1}) {
t.Error("MultiAte check gave false positive!")
}
p999 := Pair(a37, b27)
p999_2 := Pair(a1, b999)
if p999.String() != p999_2.String() {
t.Error("Pairing mismatch: e(a * 37, b * 27) != e(a, b * 999)")
}
}

View File

@ -0,0 +1,181 @@
#define mul(a0,a1,a2,a3, rb, stack) \
MOVQ a0, AX \
MULQ 0+rb \
MOVQ AX, R8 \
MOVQ DX, R9 \
MOVQ a0, AX \
MULQ 8+rb \
ADDQ AX, R9 \
ADCQ $0, DX \
MOVQ DX, R10 \
MOVQ a0, AX \
MULQ 16+rb \
ADDQ AX, R10 \
ADCQ $0, DX \
MOVQ DX, R11 \
MOVQ a0, AX \
MULQ 24+rb \
ADDQ AX, R11 \
ADCQ $0, DX \
MOVQ DX, R12 \
\
storeBlock(R8,R9,R10,R11, 0+stack) \
MOVQ R12, 32+stack \
\
MOVQ a1, AX \
MULQ 0+rb \
MOVQ AX, R8 \
MOVQ DX, R9 \
MOVQ a1, AX \
MULQ 8+rb \
ADDQ AX, R9 \
ADCQ $0, DX \
MOVQ DX, R10 \
MOVQ a1, AX \
MULQ 16+rb \
ADDQ AX, R10 \
ADCQ $0, DX \
MOVQ DX, R11 \
MOVQ a1, AX \
MULQ 24+rb \
ADDQ AX, R11 \
ADCQ $0, DX \
MOVQ DX, R12 \
\
ADDQ 8+stack, R8 \
ADCQ 16+stack, R9 \
ADCQ 24+stack, R10 \
ADCQ 32+stack, R11 \
ADCQ $0, R12 \
storeBlock(R8,R9,R10,R11, 8+stack) \
MOVQ R12, 40+stack \
\
MOVQ a2, AX \
MULQ 0+rb \
MOVQ AX, R8 \
MOVQ DX, R9 \
MOVQ a2, AX \
MULQ 8+rb \
ADDQ AX, R9 \
ADCQ $0, DX \
MOVQ DX, R10 \
MOVQ a2, AX \
MULQ 16+rb \
ADDQ AX, R10 \
ADCQ $0, DX \
MOVQ DX, R11 \
MOVQ a2, AX \
MULQ 24+rb \
ADDQ AX, R11 \
ADCQ $0, DX \
MOVQ DX, R12 \
\
ADDQ 16+stack, R8 \
ADCQ 24+stack, R9 \
ADCQ 32+stack, R10 \
ADCQ 40+stack, R11 \
ADCQ $0, R12 \
storeBlock(R8,R9,R10,R11, 16+stack) \
MOVQ R12, 48+stack \
\
MOVQ a3, AX \
MULQ 0+rb \
MOVQ AX, R8 \
MOVQ DX, R9 \
MOVQ a3, AX \
MULQ 8+rb \
ADDQ AX, R9 \
ADCQ $0, DX \
MOVQ DX, R10 \
MOVQ a3, AX \
MULQ 16+rb \
ADDQ AX, R10 \
ADCQ $0, DX \
MOVQ DX, R11 \
MOVQ a3, AX \
MULQ 24+rb \
ADDQ AX, R11 \
ADCQ $0, DX \
MOVQ DX, R12 \
\
ADDQ 24+stack, R8 \
ADCQ 32+stack, R9 \
ADCQ 40+stack, R10 \
ADCQ 48+stack, R11 \
ADCQ $0, R12 \
storeBlock(R8,R9,R10,R11, 24+stack) \
MOVQ R12, 56+stack
#define gfpReduce(stack) \
\ // m = (T * N') mod R, store m in R8:R9:R10:R11
MOVQ ·np+0(SB), AX \
MULQ 0+stack \
MOVQ AX, R8 \
MOVQ DX, R9 \
MOVQ ·np+0(SB), AX \
MULQ 8+stack \
ADDQ AX, R9 \
ADCQ $0, DX \
MOVQ DX, R10 \
MOVQ ·np+0(SB), AX \
MULQ 16+stack \
ADDQ AX, R10 \
ADCQ $0, DX \
MOVQ DX, R11 \
MOVQ ·np+0(SB), AX \
MULQ 24+stack \
ADDQ AX, R11 \
\
MOVQ ·np+8(SB), AX \
MULQ 0+stack \
MOVQ AX, R12 \
MOVQ DX, R13 \
MOVQ ·np+8(SB), AX \
MULQ 8+stack \
ADDQ AX, R13 \
ADCQ $0, DX \
MOVQ DX, R14 \
MOVQ ·np+8(SB), AX \
MULQ 16+stack \
ADDQ AX, R14 \
\
ADDQ R12, R9 \
ADCQ R13, R10 \
ADCQ R14, R11 \
\
MOVQ ·np+16(SB), AX \
MULQ 0+stack \
MOVQ AX, R12 \
MOVQ DX, R13 \
MOVQ ·np+16(SB), AX \
MULQ 8+stack \
ADDQ AX, R13 \
\
ADDQ R12, R10 \
ADCQ R13, R11 \
\
MOVQ ·np+24(SB), AX \
MULQ 0+stack \
ADDQ AX, R11 \
\
storeBlock(R8,R9,R10,R11, 64+stack) \
\
\ // m * N
mul(·p2+0(SB),·p2+8(SB),·p2+16(SB),·p2+24(SB), 64+stack, 96+stack) \
\
\ // Add the 512-bit intermediate to m*N
loadBlock(96+stack, R8,R9,R10,R11) \
loadBlock(128+stack, R12,R13,R14,R15) \
\
MOVQ $0, AX \
ADDQ 0+stack, R8 \
ADCQ 8+stack, R9 \
ADCQ 16+stack, R10 \
ADCQ 24+stack, R11 \
ADCQ 32+stack, R12 \
ADCQ 40+stack, R13 \
ADCQ 48+stack, R14 \
ADCQ 56+stack, R15 \
ADCQ $0, AX \
\
gfpCarry(R12,R13,R14,R15,AX, R8,R9,R10,R11,BX)

View File

@ -0,0 +1,133 @@
#define mul(c0,c1,c2,c3,c4,c5,c6,c7) \
MUL R1, R5, c0 \
UMULH R1, R5, c1 \
MUL R1, R6, R0 \
ADDS R0, c1 \
UMULH R1, R6, c2 \
MUL R1, R7, R0 \
ADCS R0, c2 \
UMULH R1, R7, c3 \
MUL R1, R8, R0 \
ADCS R0, c3 \
UMULH R1, R8, c4 \
ADCS ZR, c4 \
\
MUL R2, R5, R1 \
UMULH R2, R5, R26 \
MUL R2, R6, R0 \
ADDS R0, R26 \
UMULH R2, R6, R27 \
MUL R2, R7, R0 \
ADCS R0, R27 \
UMULH R2, R7, R29 \
MUL R2, R8, R0 \
ADCS R0, R29 \
UMULH R2, R8, c5 \
ADCS ZR, c5 \
ADDS R1, c1 \
ADCS R26, c2 \
ADCS R27, c3 \
ADCS R29, c4 \
ADCS ZR, c5 \
\
MUL R3, R5, R1 \
UMULH R3, R5, R26 \
MUL R3, R6, R0 \
ADDS R0, R26 \
UMULH R3, R6, R27 \
MUL R3, R7, R0 \
ADCS R0, R27 \
UMULH R3, R7, R29 \
MUL R3, R8, R0 \
ADCS R0, R29 \
UMULH R3, R8, c6 \
ADCS ZR, c6 \
ADDS R1, c2 \
ADCS R26, c3 \
ADCS R27, c4 \
ADCS R29, c5 \
ADCS ZR, c6 \
\
MUL R4, R5, R1 \
UMULH R4, R5, R26 \
MUL R4, R6, R0 \
ADDS R0, R26 \
UMULH R4, R6, R27 \
MUL R4, R7, R0 \
ADCS R0, R27 \
UMULH R4, R7, R29 \
MUL R4, R8, R0 \
ADCS R0, R29 \
UMULH R4, R8, c7 \
ADCS ZR, c7 \
ADDS R1, c3 \
ADCS R26, c4 \
ADCS R27, c5 \
ADCS R29, c6 \
ADCS ZR, c7
#define gfpReduce() \
\ // m = (T * N') mod R, store m in R1:R2:R3:R4
MOVD ·np+0(SB), R17 \
MOVD ·np+8(SB), R25 \
MOVD ·np+16(SB), R19 \
MOVD ·np+24(SB), R20 \
\
MUL R9, R17, R1 \
UMULH R9, R17, R2 \
MUL R9, R25, R0 \
ADDS R0, R2 \
UMULH R9, R25, R3 \
MUL R9, R19, R0 \
ADCS R0, R3 \
UMULH R9, R19, R4 \
MUL R9, R20, R0 \
ADCS R0, R4 \
\
MUL R10, R17, R21 \
UMULH R10, R17, R22 \
MUL R10, R25, R0 \
ADDS R0, R22 \
UMULH R10, R25, R23 \
MUL R10, R19, R0 \
ADCS R0, R23 \
ADDS R21, R2 \
ADCS R22, R3 \
ADCS R23, R4 \
\
MUL R11, R17, R21 \
UMULH R11, R17, R22 \
MUL R11, R25, R0 \
ADDS R0, R22 \
ADDS R21, R3 \
ADCS R22, R4 \
\
MUL R12, R17, R21 \
ADDS R21, R4 \
\
\ // m * N
loadModulus(R5,R6,R7,R8) \
mul(R17,R25,R19,R20,R21,R22,R23,R24) \
\
\ // Add the 512-bit intermediate to m*N
MOVD ZR, R0 \
ADDS R9, R17 \
ADCS R10, R25 \
ADCS R11, R19 \
ADCS R12, R20 \
ADCS R13, R21 \
ADCS R14, R22 \
ADCS R15, R23 \
ADCS R16, R24 \
ADCS ZR, R0 \
\
\ // Our output is R21:R22:R23:R24. Reduce mod p if necessary.
SUBS R5, R21, R10 \
SBCS R6, R22, R11 \
SBCS R7, R23, R12 \
SBCS R8, R24, R13 \
\
CSEL CS, R10, R21, R1 \
CSEL CS, R11, R22, R2 \
CSEL CS, R12, R23, R3 \
CSEL CS, R13, R24, R4

View File

@ -0,0 +1,112 @@
#define mulBMI2(a0,a1,a2,a3, rb) \
MOVQ a0, DX \
MOVQ $0, R13 \
MULXQ 0+rb, R8, R9 \
MULXQ 8+rb, AX, R10 \
ADDQ AX, R9 \
MULXQ 16+rb, AX, R11 \
ADCQ AX, R10 \
MULXQ 24+rb, AX, R12 \
ADCQ AX, R11 \
ADCQ $0, R12 \
ADCQ $0, R13 \
\
MOVQ a1, DX \
MOVQ $0, R14 \
MULXQ 0+rb, AX, BX \
ADDQ AX, R9 \
ADCQ BX, R10 \
MULXQ 16+rb, AX, BX \
ADCQ AX, R11 \
ADCQ BX, R12 \
ADCQ $0, R13 \
MULXQ 8+rb, AX, BX \
ADDQ AX, R10 \
ADCQ BX, R11 \
MULXQ 24+rb, AX, BX \
ADCQ AX, R12 \
ADCQ BX, R13 \
ADCQ $0, R14 \
\
MOVQ a2, DX \
MOVQ $0, R15 \
MULXQ 0+rb, AX, BX \
ADDQ AX, R10 \
ADCQ BX, R11 \
MULXQ 16+rb, AX, BX \
ADCQ AX, R12 \
ADCQ BX, R13 \
ADCQ $0, R14 \
MULXQ 8+rb, AX, BX \
ADDQ AX, R11 \
ADCQ BX, R12 \
MULXQ 24+rb, AX, BX \
ADCQ AX, R13 \
ADCQ BX, R14 \
ADCQ $0, R15 \
\
MOVQ a3, DX \
MULXQ 0+rb, AX, BX \
ADDQ AX, R11 \
ADCQ BX, R12 \
MULXQ 16+rb, AX, BX \
ADCQ AX, R13 \
ADCQ BX, R14 \
ADCQ $0, R15 \
MULXQ 8+rb, AX, BX \
ADDQ AX, R12 \
ADCQ BX, R13 \
MULXQ 24+rb, AX, BX \
ADCQ AX, R14 \
ADCQ BX, R15
#define gfpReduceBMI2() \
\ // m = (T * N') mod R, store m in R8:R9:R10:R11
MOVQ ·np+0(SB), DX \
MULXQ 0(SP), R8, R9 \
MULXQ 8(SP), AX, R10 \
ADDQ AX, R9 \
MULXQ 16(SP), AX, R11 \
ADCQ AX, R10 \
MULXQ 24(SP), AX, BX \
ADCQ AX, R11 \
\
MOVQ ·np+8(SB), DX \
MULXQ 0(SP), AX, BX \
ADDQ AX, R9 \
ADCQ BX, R10 \
MULXQ 16(SP), AX, BX \
ADCQ AX, R11 \
MULXQ 8(SP), AX, BX \
ADDQ AX, R10 \
ADCQ BX, R11 \
\
MOVQ ·np+16(SB), DX \
MULXQ 0(SP), AX, BX \
ADDQ AX, R10 \
ADCQ BX, R11 \
MULXQ 8(SP), AX, BX \
ADDQ AX, R11 \
\
MOVQ ·np+24(SB), DX \
MULXQ 0(SP), AX, BX \
ADDQ AX, R11 \
\
storeBlock(R8,R9,R10,R11, 64(SP)) \
\
\ // m * N
mulBMI2(·p2+0(SB),·p2+8(SB),·p2+16(SB),·p2+24(SB), 64(SP)) \
\
\ // Add the 512-bit intermediate to m*N
MOVQ $0, AX \
ADDQ 0(SP), R8 \
ADCQ 8(SP), R9 \
ADCQ 16(SP), R10 \
ADCQ 24(SP), R11 \
ADCQ 32(SP), R12 \
ADCQ 40(SP), R13 \
ADCQ 48(SP), R14 \
ADCQ 56(SP), R15 \
ADCQ $0, AX \
\
gfpCarry(R12,R13,R14,R15,AX, R8,R9,R10,R11,BX)

View File

@ -0,0 +1,271 @@
package bn256
func lineFunctionAdd(r, p *twistPoint, q *curvePoint, r2 *gfP2) (a, b, c *gfP2, rOut *twistPoint) {
// See the mixed addition algorithm from "Faster Computation of the
// Tate Pairing", http://arxiv.org/pdf/0904.0854v3.pdf
B := (&gfP2{}).Mul(&p.x, &r.t)
D := (&gfP2{}).Add(&p.y, &r.z)
D.Square(D).Sub(D, r2).Sub(D, &r.t).Mul(D, &r.t)
H := (&gfP2{}).Sub(B, &r.x)
I := (&gfP2{}).Square(H)
E := (&gfP2{}).Add(I, I)
E.Add(E, E)
J := (&gfP2{}).Mul(H, E)
L1 := (&gfP2{}).Sub(D, &r.y)
L1.Sub(L1, &r.y)
V := (&gfP2{}).Mul(&r.x, E)
rOut = &twistPoint{}
rOut.x.Square(L1).Sub(&rOut.x, J).Sub(&rOut.x, V).Sub(&rOut.x, V)
rOut.z.Add(&r.z, H).Square(&rOut.z).Sub(&rOut.z, &r.t).Sub(&rOut.z, I)
t := (&gfP2{}).Sub(V, &rOut.x)
t.Mul(t, L1)
t2 := (&gfP2{}).Mul(&r.y, J)
t2.Add(t2, t2)
rOut.y.Sub(t, t2)
rOut.t.Square(&rOut.z)
t.Add(&p.y, &rOut.z).Square(t).Sub(t, r2).Sub(t, &rOut.t)
t2.Mul(L1, &p.x)
t2.Add(t2, t2)
a = (&gfP2{}).Sub(t2, t)
c = (&gfP2{}).MulScalar(&rOut.z, &q.y)
c.Add(c, c)
b = (&gfP2{}).Neg(L1)
b.MulScalar(b, &q.x).Add(b, b)
return
}
func lineFunctionDouble(r *twistPoint, q *curvePoint) (a, b, c *gfP2, rOut *twistPoint) {
// See the doubling algorithm for a=0 from "Faster Computation of the
// Tate Pairing", http://arxiv.org/pdf/0904.0854v3.pdf
A := (&gfP2{}).Square(&r.x)
B := (&gfP2{}).Square(&r.y)
C := (&gfP2{}).Square(B)
D := (&gfP2{}).Add(&r.x, B)
D.Square(D).Sub(D, A).Sub(D, C).Add(D, D)
E := (&gfP2{}).Add(A, A)
E.Add(E, A)
G := (&gfP2{}).Square(E)
rOut = &twistPoint{}
rOut.x.Sub(G, D).Sub(&rOut.x, D)
rOut.z.Add(&r.y, &r.z).Square(&rOut.z).Sub(&rOut.z, B).Sub(&rOut.z, &r.t)
rOut.y.Sub(D, &rOut.x).Mul(&rOut.y, E)
t := (&gfP2{}).Add(C, C)
t.Add(t, t).Add(t, t)
rOut.y.Sub(&rOut.y, t)
rOut.t.Square(&rOut.z)
t.Mul(E, &r.t).Add(t, t)
b = (&gfP2{}).Neg(t)
b.MulScalar(b, &q.x)
a = (&gfP2{}).Add(&r.x, E)
a.Square(a).Sub(a, A).Sub(a, G)
t.Add(B, B).Add(t, t)
a.Sub(a, t)
c = (&gfP2{}).Mul(&rOut.z, &r.t)
c.Add(c, c).MulScalar(c, &q.y)
return
}
func mulLine(ret *gfP12, a, b, c *gfP2) {
a2 := &gfP6{}
a2.y.Set(a)
a2.z.Set(b)
a2.Mul(a2, &ret.x)
t3 := (&gfP6{}).MulScalar(&ret.y, c)
t := (&gfP2{}).Add(b, c)
t2 := &gfP6{}
t2.y.Set(a)
t2.z.Set(t)
ret.x.Add(&ret.x, &ret.y)
ret.y.Set(t3)
ret.x.Mul(&ret.x, t2).Sub(&ret.x, a2).Sub(&ret.x, &ret.y)
a2.MulTau(a2)
ret.y.Add(&ret.y, a2)
}
// sixuPlus2NAF is 6u+2 in non-adjacent form.
var sixuPlus2NAF = []int8{0, 0, 0, 1, 0, 1, 0, -1, 0, 0, 1, -1, 0, 0, 1, 0,
0, 1, 1, 0, -1, 0, 0, 1, 0, -1, 0, 0, 0, 0, 1, 1,
1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, -1, 0, 0, 1,
1, 0, 0, -1, 0, 0, 0, 1, 1, 0, -1, 0, 0, 1, 0, 1, 1}
// miller implements the Miller loop for calculating the Optimal Ate pairing.
// See algorithm 1 from http://cryptojedi.org/papers/dclxvi-20100714.pdf
func miller(q *twistPoint, p *curvePoint) *gfP12 {
ret := (&gfP12{}).SetOne()
aAffine := &twistPoint{}
aAffine.Set(q)
aAffine.MakeAffine()
bAffine := &curvePoint{}
bAffine.Set(p)
bAffine.MakeAffine()
minusA := &twistPoint{}
minusA.Neg(aAffine)
r := &twistPoint{}
r.Set(aAffine)
r2 := (&gfP2{}).Square(&aAffine.y)
for i := len(sixuPlus2NAF) - 1; i > 0; i-- {
a, b, c, newR := lineFunctionDouble(r, bAffine)
if i != len(sixuPlus2NAF)-1 {
ret.Square(ret)
}
mulLine(ret, a, b, c)
r = newR
switch sixuPlus2NAF[i-1] {
case 1:
a, b, c, newR = lineFunctionAdd(r, aAffine, bAffine, r2)
case -1:
a, b, c, newR = lineFunctionAdd(r, minusA, bAffine, r2)
default:
continue
}
mulLine(ret, a, b, c)
r = newR
}
// In order to calculate Q1 we have to convert q from the sextic twist
// to the full GF(p^12) group, apply the Frobenius there, and convert
// back.
//
// The twist isomorphism is (x', y') -> (xω², yω³). If we consider just
// x for a moment, then after applying the Frobenius, we have x̄ω^(2p)
// where x̄ is the conjugate of x. If we are going to apply the inverse
// isomorphism we need a value with a single coefficient of ω² so we
// rewrite this as x̄ω^(2p-2)ω². ξ⁶ = ω and, due to the construction of
// p, 2p-2 is a multiple of six. Therefore we can rewrite as
// x̄ξ^((p-1)/3)ω² and applying the inverse isomorphism eliminates the
// ω².
//
// A similar argument can be made for the y value.
q1 := &twistPoint{}
q1.x.Conjugate(&aAffine.x).Mul(&q1.x, xiToPMinus1Over3)
q1.y.Conjugate(&aAffine.y).Mul(&q1.y, xiToPMinus1Over2)
q1.z.SetOne()
q1.t.SetOne()
// For Q2 we are applying the p² Frobenius. The two conjugations cancel
// out and we are left only with the factors from the isomorphism. In
// the case of x, we end up with a pure number which is why
// xiToPSquaredMinus1Over3 is ∈ GF(p). With y we get a factor of -1. We
// ignore this to end up with -Q2.
minusQ2 := &twistPoint{}
minusQ2.x.MulScalar(&aAffine.x, xiToPSquaredMinus1Over3)
minusQ2.y.Set(&aAffine.y)
minusQ2.z.SetOne()
minusQ2.t.SetOne()
r2.Square(&q1.y)
a, b, c, newR := lineFunctionAdd(r, q1, bAffine, r2)
mulLine(ret, a, b, c)
r = newR
r2.Square(&minusQ2.y)
a, b, c, newR = lineFunctionAdd(r, minusQ2, bAffine, r2)
mulLine(ret, a, b, c)
r = newR
return ret
}
// finalExponentiation computes the (p¹²-1)/Order-th power of an element of
// GF(p¹²) to obtain an element of GT (steps 13-15 of algorithm 1 from
// http://cryptojedi.org/papers/dclxvi-20100714.pdf)
func finalExponentiation(in *gfP12) *gfP12 {
t1 := &gfP12{}
// This is the p^6-Frobenius
t1.x.Neg(&in.x)
t1.y.Set(&in.y)
inv := &gfP12{}
inv.Invert(in)
t1.Mul(t1, inv)
t2 := (&gfP12{}).FrobeniusP2(t1)
t1.Mul(t1, t2)
fp := (&gfP12{}).Frobenius(t1)
fp2 := (&gfP12{}).FrobeniusP2(t1)
fp3 := (&gfP12{}).Frobenius(fp2)
fu := (&gfP12{}).Exp(t1, u)
fu2 := (&gfP12{}).Exp(fu, u)
fu3 := (&gfP12{}).Exp(fu2, u)
y3 := (&gfP12{}).Frobenius(fu)
fu2p := (&gfP12{}).Frobenius(fu2)
fu3p := (&gfP12{}).Frobenius(fu3)
y2 := (&gfP12{}).FrobeniusP2(fu2)
y0 := &gfP12{}
y0.Mul(fp, fp2).Mul(y0, fp3)
y1 := (&gfP12{}).Conjugate(t1)
y5 := (&gfP12{}).Conjugate(fu2)
y3.Conjugate(y3)
y4 := (&gfP12{}).Mul(fu, fu2p)
y4.Conjugate(y4)
y6 := (&gfP12{}).Mul(fu3, fu3p)
y6.Conjugate(y6)
t0 := (&gfP12{}).Square(y6)
t0.Mul(t0, y4).Mul(t0, y5)
t1.Mul(y3, y5).Mul(t1, t0)
t0.Mul(t0, y2)
t1.Square(t1).Mul(t1, t0).Square(t1)
t0.Mul(t1, y1)
t1.Mul(t1, y0)
t0.Square(t0).Mul(t0, t1)
return t0
}
func optimalAte(a *twistPoint, b *curvePoint) *gfP12 {
e := miller(a, b)
ret := finalExponentiation(e)
if a.IsInfinity() || b.IsInfinity() {
ret.SetOne()
}
return ret
}

217
cryptography/bn256/twist.go Normal file
View File

@ -0,0 +1,217 @@
package bn256
import (
"math/big"
)
// twistPoint implements the elliptic curve y²=x³+3/ξ over GF(p²). Points are
// kept in Jacobian form and t=z² when valid. The group G₂ is the set of
// n-torsion points of this curve over GF(p²) (where n = Order)
type twistPoint struct {
x, y, z, t gfP2
}
// <sage>
// btwist = 3 / Fp2(i + 9); btwist
// # 266929791119991161246907387137283842545076965332900288569378510910307636690*i + 19485874751759354771024239261021720505790618469301721065564631296452457478373
// hex(266929791119991161246907387137283842545076965332900288569378510910307636690)
// # 009713b03af0fed4 cd2cafadeed8fdf4 a74fa084e52d1852 e4a2bd0685c315d2
// hex(19485874751759354771024239261021720505790618469301721065564631296452457478373)
// # 2b149d40ceb8aaae 81be18991be06ac3 b5b4c5e559dbefa3 3267e6dc24a138e5
// <\sage>
//
// c0 = 19485874751759354771024239261021720505790618469301721065564631296452457478373
// c1 = 266929791119991161246907387137283842545076965332900288569378510910307636690
//
// twistB is the montgomery encoding of the btwist obtained above
var twistB = &gfP2{
gfP{0x38e7ecccd1dcff67, 0x65f0b37d93ce0d3e, 0xd749d0dd22ac00aa, 0x0141b9ce4a688d4d},
gfP{0x3bf938e377b802a8, 0x020b1b273633535d, 0x26b7edf049755260, 0x2514c6324384a86d},
}
// twistGen is the generator of group G₂.
var twistGen = &twistPoint{
gfP2{
gfP{0xafb4737da84c6140, 0x6043dd5a5802d8c4, 0x09e950fc52a02f86, 0x14fef0833aea7b6b},
gfP{0x8e83b5d102bc2026, 0xdceb1935497b0172, 0xfbb8264797811adf, 0x19573841af96503b},
},
gfP2{
gfP{0x64095b56c71856ee, 0xdc57f922327d3cbb, 0x55f935be33351076, 0x0da4a0e693fd6482},
gfP{0x619dfa9d886be9f6, 0xfe7fd297f59e9b78, 0xff9e1a62231b7dfe, 0x28fd7eebae9e4206},
},
gfP2{*newGFp(0), *newGFp(1)},
gfP2{*newGFp(0), *newGFp(1)},
}
func (c *twistPoint) String() string {
c.MakeAffine()
x, y := gfP2Decode(&c.x), gfP2Decode(&c.y)
return "(" + x.String() + ", " + y.String() + ")"
}
func (c *twistPoint) Set(a *twistPoint) {
c.x.Set(&a.x)
c.y.Set(&a.y)
c.z.Set(&a.z)
c.t.Set(&a.t)
}
// IsOnCurve returns true iff c is on the curve.
func (c *twistPoint) IsOnCurve() bool {
c.MakeAffine()
if c.IsInfinity() {
return true
}
y2, x3 := &gfP2{}, &gfP2{}
y2.Square(&c.y)
x3.Square(&c.x).Mul(x3, &c.x).Add(x3, twistB)
if *y2 != *x3 {
return false
}
cneg := &twistPoint{}
cneg.Mul(c, Order)
return cneg.z.IsZero()
}
func (c *twistPoint) SetInfinity() {
c.x.SetZero()
c.y.SetOne()
c.z.SetZero()
c.t.SetZero()
}
func (c *twistPoint) IsInfinity() bool {
return c.z.IsZero()
}
func (c *twistPoint) Add(a, b *twistPoint) {
// For additional comments, see the same function in curve.go.
if a.IsInfinity() {
c.Set(b)
return
}
if b.IsInfinity() {
c.Set(a)
return
}
// See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/addition/add-2007-bl.op3
z12 := (&gfP2{}).Square(&a.z)
z22 := (&gfP2{}).Square(&b.z)
u1 := (&gfP2{}).Mul(&a.x, z22)
u2 := (&gfP2{}).Mul(&b.x, z12)
t := (&gfP2{}).Mul(&b.z, z22)
s1 := (&gfP2{}).Mul(&a.y, t)
t.Mul(&a.z, z12)
s2 := (&gfP2{}).Mul(&b.y, t)
h := (&gfP2{}).Sub(u2, u1)
xEqual := h.IsZero()
t.Add(h, h)
i := (&gfP2{}).Square(t)
j := (&gfP2{}).Mul(h, i)
t.Sub(s2, s1)
yEqual := t.IsZero()
if xEqual && yEqual {
c.Double(a)
return
}
r := (&gfP2{}).Add(t, t)
v := (&gfP2{}).Mul(u1, i)
t4 := (&gfP2{}).Square(r)
t.Add(v, v)
t6 := (&gfP2{}).Sub(t4, j)
c.x.Sub(t6, t)
t.Sub(v, &c.x) // t7
t4.Mul(s1, j) // t8
t6.Add(t4, t4) // t9
t4.Mul(r, t) // t10
c.y.Sub(t4, t6)
t.Add(&a.z, &b.z) // t11
t4.Square(t) // t12
t.Sub(t4, z12) // t13
t4.Sub(t, z22) // t14
c.z.Mul(t4, h)
}
func (c *twistPoint) Double(a *twistPoint) {
// See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/doubling/dbl-2009-l.op3
A := (&gfP2{}).Square(&a.x)
B := (&gfP2{}).Square(&a.y)
C := (&gfP2{}).Square(B)
t := (&gfP2{}).Add(&a.x, B)
t2 := (&gfP2{}).Square(t)
t.Sub(t2, A)
t2.Sub(t, C)
d := (&gfP2{}).Add(t2, t2)
t.Add(A, A)
e := (&gfP2{}).Add(t, A)
f := (&gfP2{}).Square(e)
t.Add(d, d)
c.x.Sub(f, t)
t.Add(C, C)
t2.Add(t, t)
t.Add(t2, t2)
c.y.Sub(d, &c.x)
t2.Mul(e, &c.y)
c.y.Sub(t2, t)
t.Mul(&a.y, &a.z)
c.z.Add(t, t)
}
func (c *twistPoint) Mul(a *twistPoint, scalar *big.Int) {
sum, t := &twistPoint{}, &twistPoint{}
for i := scalar.BitLen(); i >= 0; i-- {
t.Double(sum)
if scalar.Bit(i) != 0 {
sum.Add(t, a)
} else {
sum.Set(t)
}
}
c.Set(sum)
}
func (c *twistPoint) MakeAffine() {
if c.z.IsOne() {
return
} else if c.z.IsZero() {
c.x.SetZero()
c.y.SetOne()
c.t.SetZero()
return
}
zInv := (&gfP2{}).Invert(&c.z)
t := (&gfP2{}).Mul(&c.y, zInv)
zInv2 := (&gfP2{}).Square(zInv)
c.y.Mul(t, zInv2)
t.Mul(&c.x, zInv2)
c.x.Set(t)
c.z.SetOne()
c.t.SetOne()
}
func (c *twistPoint) Neg(a *twistPoint) {
c.x.Set(&a.x)
c.y.Neg(&a.y)
c.z.Set(&a.z)
c.t.SetZero()
}

View File

@ -0,0 +1,90 @@
RESEARCH LICENSE
Version 1.1.2
I. DEFINITIONS.
"Licensee " means You and any other party that has entered into and has in effect a version of this License.
“Licensor” means DERO PROJECT(GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8) and its successors and assignees.
"Modifications" means any (a) change or addition to the Technology or (b) new source or object code implementing any portion of the Technology.
"Research Use" means research, evaluation, or development for the purpose of advancing knowledge, teaching, learning, or customizing the Technology for personal use. Research Use expressly excludes use or distribution for direct or indirect commercial (including strategic) gain or advantage.
"Technology" means the source code, object code and specifications of the technology made available by Licensor pursuant to this License.
"Technology Site" means the website designated by Licensor for accessing the Technology.
"You" means the individual executing this License or the legal entity or entities represented by the individual executing this License.
II. PURPOSE.
Licensor is licensing the Technology under this Research License (the "License") to promote research, education, innovation, and development using the Technology.
COMMERCIAL USE AND DISTRIBUTION OF TECHNOLOGY AND MODIFICATIONS IS PERMITTED ONLY UNDER AN APPROPRIATE COMMERCIAL USE LICENSE AVAILABLE FROM LICENSOR AT <url>.
III. RESEARCH USE RIGHTS.
A. Subject to the conditions contained herein, Licensor grants to You a non-exclusive, non-transferable, worldwide, and royalty-free license to do the following for Your Research Use only:
1. reproduce, create Modifications of, and use the Technology alone, or with Modifications;
2. share source code of the Technology alone, or with Modifications, with other Licensees;
3. distribute object code of the Technology, alone, or with Modifications, to any third parties for Research Use only, under a license of Your choice that is consistent with this License; and
4. publish papers and books discussing the Technology which may include relevant excerpts that do not in the aggregate constitute a significant portion of the Technology.
B. Residual Rights. You may use any information in intangible form that you remember after accessing the Technology, except when such use violates Licensor's copyrights or patent rights.
C. No Implied Licenses. Other than the rights granted herein, Licensor retains all rights, title, and interest in Technology , and You retain all rights, title, and interest in Your Modifications and associated specifications, subject to the terms of this License.
D. Open Source Licenses. Portions of the Technology may be provided with notices and open source licenses from open source communities and third parties that govern the use of those portions, and any licenses granted hereunder do not alter any rights and obligations you may have under such open source licenses, however, the disclaimer of warranty and limitation of liability provisions in this License will apply to all Technology in this distribution.
IV. INTELLECTUAL PROPERTY REQUIREMENTS
As a condition to Your License, You agree to comply with the following restrictions and responsibilities:
A. License and Copyright Notices. You must include a copy of this License in a Readme file for any Technology or Modifications you distribute. You must also include the following statement, "Use and distribution of this technology is subject to the Java Research License included herein", (a) once prominently in the source code tree and/or specifications for Your source code distributions, and (b) once in the same file as Your copyright or proprietary notices for Your binary code distributions. You must cause any files containing Your Modification to carry prominent notice stating that You changed the files. You must not remove or alter any copyright or other proprietary notices in the Technology.
B. Licensee Exchanges. Any Technology and Modifications You receive from any Licensee are governed by this License.
V. GENERAL TERMS.
A. Disclaimer Of Warranties.
TECHNOLOGY IS PROVIDED "AS IS", WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT ANY SUCH TECHNOLOGY IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING OF THIRD PARTY RIGHTS. YOU AGREE THAT YOU BEAR THE ENTIRE RISK IN CONNECTION WITH YOUR USE AND DISTRIBUTION OF ANY AND ALL TECHNOLOGY UNDER THIS LICENSE.
B. Infringement; Limitation Of Liability.
1. If any portion of, or functionality implemented by, the Technology becomes the subject of a claim or threatened claim of infringement ("Affected Materials"), Licensor may, in its unrestricted discretion, suspend Your rights to use and distribute the Affected Materials under this License. Such suspension of rights will be effective immediately upon Licensor's posting of notice of suspension on the Technology Site.
2. IN NO EVENT WILL LICENSOR BE LIABLE FOR ANY DIRECT, INDIRECT, PUNITIVE, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES IN CONNECTION WITH OR ARISING OUT OF THIS LICENSE (INCLUDING, WITHOUT LIMITATION, LOSS OF PROFITS, USE, DATA, OR ECONOMIC ADVANTAGE OF ANY SORT), HOWEVER IT ARISES AND ON ANY THEORY OF LIABILITY (including negligence), WHETHER OR NOT LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. LIABILITY UNDER THIS SECTION V.B.2 SHALL BE SO LIMITED AND EXCLUDED, NOTWITHSTANDING FAILURE OF THE ESSENTIAL PURPOSE OF ANY REMEDY.
C. Termination.
1. You may terminate this License at any time by notifying Licensor in writing.
2. All Your rights will terminate under this License if You fail to comply with any of its material terms or conditions and do not cure such failure within thirty (30) days after becoming aware of such noncompliance.
3. Upon termination, You must discontinue all uses and distribution of the Technology , and all provisions of this Section V shall survive termination.
D. Miscellaneous.
1. Trademark. You agree to comply with Licensor's Trademark & Logo Usage Requirements, if any and as modified from time to time, available at the Technology Site. Except as expressly provided in this License, You are granted no rights in or to any Licensor's trademarks now or hereafter used or licensed by Licensor.
2. Integration. This License represents the complete agreement of the parties concerning the subject matter hereof.
3. Severability. If any provision of this License is held unenforceable, such provision shall be reformed to the extent necessary to make it enforceable unless to do so would defeat the intent of the parties, in which case, this License shall terminate.
4. Governing Law. This License is governed by the laws of the United States and the State of California, as applied to contracts entered into and performed in California between California residents. In no event shall this License be construed against the drafter.
5. Export Control. You agree to comply with the U.S. export controlsand trade laws of other countries that apply to Technology and Modifications.
READ ALL THE TERMS OF THIS LICENSE CAREFULLY BEFORE ACCEPTING.
BY CLICKING ON THE YES BUTTON BELOW OR USING THE TECHNOLOGY, YOU ARE ACCEPTING AND AGREEING TO ABIDE BY THE TERMS AND CONDITIONS OF THIS LICENSE. YOU MUST BE AT LEAST 18 YEARS OF AGE AND OTHERWISE COMPETENT TO ENTER INTO CONTRACTS.
IF YOU DO NOT MEET THESE CRITERIA, OR YOU DO NOT AGREE TO ANY OF THE TERMS OF THIS LICENSE, DO NOT USE THIS SOFTWARE IN ANY FORM.

View File

@ -0,0 +1,177 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
//import "fmt"
import "math/big"
//import "crypto/rand"
//import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
//import "golang.org/x/crypto/sha3"
// a ZERO
var ElGamal_ZERO *bn256.G1
var ElGamal_ZERO_string string
var ElGamal_BASE_G *bn256.G1
type ElGamal struct {
G *bn256.G1
Randomness *big.Int
Left *bn256.G1
Right *bn256.G1
}
func NewElGamal() (p *ElGamal) {
return &ElGamal{G: global_pedersen_values.G}
}
func CommitElGamal(key *bn256.G1, value *big.Int) *ElGamal {
e := NewElGamal()
e.Randomness = RandomScalarFixed()
e.Left = new(bn256.G1).Add(new(bn256.G1).ScalarMult(e.G, value), new(bn256.G1).ScalarMult(key, e.Randomness))
e.Right = new(bn256.G1).ScalarMult(G, e.Randomness)
return e
}
func ConstructElGamal(left, right *bn256.G1) *ElGamal {
e := NewElGamal()
if left != nil {
e.Left = new(bn256.G1).Set(left)
}
e.Right = new(bn256.G1).Set(right)
return e
}
func (e *ElGamal) IsZero() bool {
if e.Left != nil && e.Right != nil && e.Left.String() == ElGamal_ZERO_string && e.Right.String() == ElGamal_ZERO_string {
return true
}
return false
}
func (e *ElGamal) Add(addendum *ElGamal) *ElGamal {
if e.Left == nil {
return ConstructElGamal(nil, new(bn256.G1).Add(e.Right, addendum.Right))
}
return ConstructElGamal(new(bn256.G1).Add(e.Left, addendum.Left), new(bn256.G1).Add(e.Right, addendum.Right))
}
func (e *ElGamal) Mul(scalar *big.Int) *ElGamal {
return ConstructElGamal(new(bn256.G1).ScalarMult(e.Left, scalar), new(bn256.G1).ScalarMult(e.Right, scalar))
}
func (e *ElGamal) Plus(value *big.Int) *ElGamal {
if e.Right == nil {
return ConstructElGamal(new(bn256.G1).Add(e.Left, new(bn256.G1).ScalarMult(e.G, value)), nil)
}
return ConstructElGamal(new(bn256.G1).Add(e.Left, new(bn256.G1).ScalarMult(e.G, value)), new(bn256.G1).Set(e.Right))
}
func (e *ElGamal) Serialize() (data []byte) {
if e.Left == nil || e.Right == nil {
panic("elgamal has nil pointer")
}
data = append(data, e.Left.EncodeUncompressed()...)
data = append(data, e.Right.EncodeUncompressed()...)
return data
}
func (e *ElGamal) Deserialize(data []byte) *ElGamal {
if len(data) != 130 {
panic("insufficient buffer size")
}
//var left,right *bn256.G1
left := new(bn256.G1)
right := new(bn256.G1)
if err := left.DecodeUncompressed(data[:65]); err != nil {
panic(err)
}
if err := right.DecodeUncompressed(data[65:130]); err != nil {
panic(err)
}
e = ConstructElGamal(left, right)
return e
}
func (e *ElGamal) Neg() *ElGamal {
var left, right *bn256.G1
if e.Left != nil {
left = new(bn256.G1).Neg(e.Left)
}
if e.Right != nil {
right = new(bn256.G1).Neg(e.Right)
}
return ConstructElGamal(left, right)
}
type ElGamalVector struct {
vector []*ElGamal
}
func (e *ElGamalVector) MultiExponentiate(exponents *FieldVector) *ElGamal {
accumulator := ConstructElGamal(ElGamal_ZERO, ElGamal_ZERO)
for i := range exponents.vector {
accumulator = accumulator.Add(e.vector[i].Mul(exponents.vector[i]))
}
return accumulator
}
func (e *ElGamalVector) Sum() *ElGamal {
r := ConstructElGamal(ElGamal_ZERO, ElGamal_ZERO)
for i := range e.vector {
r = r.Add(e.vector[i])
}
return r
}
func (e *ElGamalVector) Add(other *ElGamalVector) *ElGamalVector {
var r ElGamalVector
for i := range e.vector {
r.vector = append(r.vector, ConstructElGamal(e.vector[i].Left, e.vector[i].Right))
}
for i := range other.vector {
r.vector[i] = r.vector[i].Add(other.vector[i])
}
return &r
}
func (e *ElGamalVector) Hadamard(exponents FieldVector) *ElGamalVector {
var r ElGamalVector
for i := range e.vector {
r.vector = append(r.vector, ConstructElGamal(e.vector[i].Left, e.vector[i].Right))
}
for i := range exponents.vector {
r.vector[i] = r.vector[i].Mul(exponents.vector[i])
}
return &r
}
func (e *ElGamalVector) Times(scalar *big.Int) *ElGamalVector {
var r ElGamalVector
for i := range e.vector {
r.vector = append(r.vector, e.vector[i].Mul(scalar))
}
return &r
}

View File

@ -0,0 +1,201 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
//import "fmt"
import "math/big"
//import "crypto/rand"
//import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
//import "golang.org/x/crypto/sha3"
type FieldVector struct {
vector []*big.Int
}
func NewFieldVector(input []*big.Int) *FieldVector {
return &FieldVector{vector: input}
}
func NewFieldVectorRandomFilled(capacity int) *FieldVector {
fv := &FieldVector{vector: make([]*big.Int, capacity, capacity)}
for i := range fv.vector {
fv.vector[i] = RandomScalarFixed()
}
return fv
}
func (fv *FieldVector) Length() int {
return len(fv.vector)
}
// slice and return
func (fv *FieldVector) Slice(start, end int) *FieldVector {
var result FieldVector
for i := start; i < end; i++ {
result.vector = append(result.vector, new(big.Int).Set(fv.vector[i]))
}
return &result
}
//copy and return
func (fv *FieldVector) Clone() *FieldVector {
return fv.Slice(0, len(fv.vector))
}
func (fv *FieldVector) SliceRaw(start, end int) []*big.Int {
var result FieldVector
for i := start; i < end; i++ {
result.vector = append(result.vector, new(big.Int).Set(fv.vector[i]))
}
return result.vector
}
func (fv *FieldVector) Flip() *FieldVector {
var result FieldVector
for i := range fv.vector {
result.vector = append(result.vector, new(big.Int).Set(fv.vector[(len(fv.vector)-i)%len(fv.vector)]))
}
return &result
}
func (fv *FieldVector) Sum() *big.Int {
var accumulator big.Int
for i := range fv.vector {
var accopy big.Int
accopy.Add(&accumulator, fv.vector[i])
accumulator.Mod(&accopy, bn256.Order)
}
return &accumulator
}
func (fv *FieldVector) Add(addendum *FieldVector) *FieldVector {
var result FieldVector
if len(fv.vector) != len(addendum.vector) {
panic("mismatched number of elements")
}
for i := range fv.vector {
var ri big.Int
ri.Mod(new(big.Int).Add(fv.vector[i], addendum.vector[i]), bn256.Order)
result.vector = append(result.vector, &ri)
}
return &result
}
func (gv *FieldVector) AddConstant(c *big.Int) *FieldVector {
var result FieldVector
for i := range gv.vector {
var ri big.Int
ri.Mod(new(big.Int).Add(gv.vector[i], c), bn256.Order)
result.vector = append(result.vector, &ri)
}
return &result
}
func (fv *FieldVector) Hadamard(exponent *FieldVector) *FieldVector {
var result FieldVector
if len(fv.vector) != len(exponent.vector) {
panic("mismatched number of elements")
}
for i := range fv.vector {
result.vector = append(result.vector, new(big.Int).Mod(new(big.Int).Mul(fv.vector[i], exponent.vector[i]), bn256.Order))
}
return &result
}
func (fv *FieldVector) InnerProduct(exponent *FieldVector) *big.Int {
if len(fv.vector) != len(exponent.vector) {
panic("mismatched number of elements")
}
accumulator := new(big.Int)
for i := range fv.vector {
tmp := new(big.Int).Mod(new(big.Int).Mul(fv.vector[i], exponent.vector[i]), bn256.Order)
accumulator.Add(accumulator, tmp)
accumulator.Mod(accumulator, bn256.Order)
}
return accumulator
}
func (fv *FieldVector) Negate() *FieldVector {
var result FieldVector
for i := range fv.vector {
result.vector = append(result.vector, new(big.Int).Mod(new(big.Int).Neg(fv.vector[i]), bn256.Order))
}
return &result
}
func (fv *FieldVector) Times(multiplier *big.Int) *FieldVector {
var result FieldVector
for i := range fv.vector {
res := new(big.Int).Mul(fv.vector[i], multiplier)
res.Mod(res, bn256.Order)
result.vector = append(result.vector, res)
}
return &result
}
func (fv *FieldVector) Invert() *FieldVector {
var result FieldVector
for i := range fv.vector {
result.vector = append(result.vector, new(big.Int).ModInverse(fv.vector[i], bn256.Order))
}
return &result
}
func (fv *FieldVector) Concat(addendum *FieldVector) *FieldVector {
var result FieldVector
for i := range fv.vector {
result.vector = append(result.vector, new(big.Int).Set(fv.vector[i]))
}
for i := range addendum.vector {
result.vector = append(result.vector, new(big.Int).Set(addendum.vector[i]))
}
return &result
}
func (fv *FieldVector) Extract(parity bool) *FieldVector {
var result FieldVector
remainder := 0
if parity {
remainder = 1
}
for i := range fv.vector {
if i%2 == remainder {
result.vector = append(result.vector, new(big.Int).Set(fv.vector[i]))
}
}
return &result
}

View File

@ -0,0 +1,110 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
import "fmt"
import "math/big"
//import "crypto/rand"
//import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
//import "golang.org/x/crypto/sha3"
var G *bn256.G1
var global_pedersen_values PedersenVectorCommitment
func init() {
var zeroes [64]byte
var gs, hs []*bn256.G1
global_pedersen_values.G = HashToPoint(HashtoNumber([]byte(PROTOCOL_CONSTANT + "G"))) // this is same as mybase or vice-versa
global_pedersen_values.H = HashToPoint(HashtoNumber([]byte(PROTOCOL_CONSTANT + "H")))
global_pedersen_values.GSUM = new(bn256.G1)
global_pedersen_values.GSUM.Unmarshal(zeroes[:])
for i := 0; i < 128; i++ {
gs = append(gs, HashToPoint(HashtoNumber(append([]byte(PROTOCOL_CONSTANT+"G"), hextobytes(makestring64(fmt.Sprintf("%x", i)))...))))
hs = append(hs, HashToPoint(HashtoNumber(append([]byte(PROTOCOL_CONSTANT+"H"), hextobytes(makestring64(fmt.Sprintf("%x", i)))...))))
global_pedersen_values.GSUM = new(bn256.G1).Add(global_pedersen_values.GSUM, gs[i])
}
global_pedersen_values.Gs = NewPointVector(gs)
global_pedersen_values.Hs = NewPointVector(hs)
// also initialize elgamal_zero
ElGamal_ZERO = new(bn256.G1).ScalarMult(global_pedersen_values.G, new(big.Int).SetUint64(0))
ElGamal_ZERO_string = ElGamal_ZERO.String()
ElGamal_BASE_G = global_pedersen_values.G
G = global_pedersen_values.G
((*bn256.G1)(&GPoint)).Set(G) // setup base point
// fmt.Printf("basepoint %s on %x\n", G.String(), G.Marshal())
}
type PedersenCommitmentNew struct {
G *bn256.G1
H *bn256.G1
Randomness *big.Int
Result *bn256.G1
}
func NewPedersenCommitmentNew() (p *PedersenCommitmentNew) {
return &PedersenCommitmentNew{G: global_pedersen_values.G, H: global_pedersen_values.H}
}
// commit a specific value to specific bases
func (p *PedersenCommitmentNew) Commit(value *big.Int) *PedersenCommitmentNew {
p.Randomness = RandomScalarFixed()
point := new(bn256.G1).Add(new(bn256.G1).ScalarMult(p.G, value), new(bn256.G1).ScalarMult(p.H, p.Randomness))
p.Result = new(bn256.G1).Set(point)
return p
}
type PedersenVectorCommitment struct {
G *bn256.G1
H *bn256.G1
GSUM *bn256.G1
Gs *PointVector
Hs *PointVector
Randomness *big.Int
Result *bn256.G1
gvalues *FieldVector
hvalues *FieldVector
}
func NewPedersenVectorCommitment() (p *PedersenVectorCommitment) {
p = &PedersenVectorCommitment{}
*p = global_pedersen_values
return
}
// commit a specific value to specific bases
func (p *PedersenVectorCommitment) Commit(gvalues, hvalues *FieldVector) *PedersenVectorCommitment {
p.Randomness = RandomScalarFixed()
point := new(bn256.G1).ScalarMult(p.H, p.Randomness)
point = new(bn256.G1).Add(point, p.Gs.MultiExponentiate(gvalues))
point = new(bn256.G1).Add(point, p.Hs.MultiExponentiate(hvalues))
p.Result = new(bn256.G1).Set(point)
return p
}

View File

@ -0,0 +1,192 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
//import "fmt"
import "math/big"
//import "crypto/rand"
//import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
//import "golang.org/x/crypto/sha3"
// ToDO evaluate others curves such as BLS12 used by zcash, also BLS24 or others , providing ~200 bits of security , may be required for long time use( say 50 years)
type PointVector struct {
vector []*bn256.G1
}
func NewPointVector(input []*bn256.G1) *PointVector {
return &PointVector{vector: input}
}
func (gv *PointVector) Length() int {
return len(gv.vector)
}
// slice and return
func (gv *PointVector) Slice(start, end int) *PointVector {
var result PointVector
for i := start; i < end; i++ {
var ri bn256.G1
ri.Set(gv.vector[i])
result.vector = append(result.vector, &ri)
}
return &result
}
func (gv *PointVector) Commit(exponent []*big.Int) *bn256.G1 {
var accumulator, zero bn256.G1
var zeroes [64]byte
accumulator.Unmarshal(zeroes[:]) // obtain zero element, this should be static and
zero.Unmarshal(zeroes[:])
accumulator.ScalarMult(G, new(big.Int))
//fmt.Printf("zero %s\n", accumulator.String())
if len(gv.vector) != len(exponent) {
panic("mismatched number of elements")
}
for i := range gv.vector { // TODO a bug exists somewhere deep here
var tmp, accopy bn256.G1
tmp.ScalarMult(gv.vector[i], exponent[i])
accopy.Set(&accumulator)
accumulator.Add(&accopy, &tmp)
}
return &accumulator
}
func (gv *PointVector) Sum() *bn256.G1 {
var accumulator bn256.G1
accumulator.ScalarMult(G, new(big.Int)) // set it to zero
for i := range gv.vector {
var accopy bn256.G1
accopy.Set(&accumulator)
accumulator.Add(&accopy, gv.vector[i])
}
return &accumulator
}
func (gv *PointVector) Add(addendum *PointVector) *PointVector {
var result PointVector
if len(gv.vector) != len(addendum.vector) {
panic("mismatched number of elements")
}
for i := range gv.vector {
var ri bn256.G1
ri.Add(gv.vector[i], addendum.vector[i])
result.vector = append(result.vector, &ri)
}
return &result
}
func (gv *PointVector) Hadamard(exponent []*big.Int) *PointVector {
var result PointVector
if len(gv.vector) != len(exponent) {
panic("mismatched number of elements")
}
for i := range gv.vector {
var ri bn256.G1
ri.ScalarMult(gv.vector[i], exponent[i])
result.vector = append(result.vector, &ri)
}
return &result
}
func (gv *PointVector) Negate() *PointVector {
var result PointVector
for i := range gv.vector {
var ri bn256.G1
ri.Neg(gv.vector[i])
result.vector = append(result.vector, &ri)
}
return &result
}
func (gv *PointVector) Times(multiplier *big.Int) *PointVector {
var result PointVector
for i := range gv.vector {
var ri bn256.G1
ri.ScalarMult(gv.vector[i], multiplier)
result.vector = append(result.vector, &ri)
}
return &result
}
func (gv *PointVector) Extract(parity bool) *PointVector {
var result PointVector
remainder := 0
if parity {
remainder = 1
}
for i := range gv.vector {
if i%2 == remainder {
var ri bn256.G1
ri.Set(gv.vector[i])
result.vector = append(result.vector, &ri)
}
}
return &result
}
func (gv *PointVector) Concat(addendum *PointVector) *PointVector {
var result PointVector
for i := range gv.vector {
var ri bn256.G1
ri.Set(gv.vector[i])
result.vector = append(result.vector, &ri)
}
for i := range addendum.vector {
var ri bn256.G1
ri.Set(addendum.vector[i])
result.vector = append(result.vector, &ri)
}
return &result
}
func (pv *PointVector) MultiExponentiate(fv *FieldVector) *bn256.G1 {
var accumulator bn256.G1
accumulator.ScalarMult(G, new(big.Int)) // set it to zero
for i := range fv.vector {
var accopy bn256.G1
accopy.Set(&accumulator)
accumulator.Add(&accopy, new(bn256.G1).ScalarMult(pv.vector[i], fv.vector[i]))
}
return &accumulator
}

View File

@ -0,0 +1,102 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
import "fmt"
import "math/big"
//import "encoding/binary"
//import "crypto/rand"
//import "github.com/deroproject/derohe/crypto/bn256"
// this file implements Big Number Reduced form with bn256's Order
type BNRed big.Int
func RandomScalarBNRed() *BNRed {
return (*BNRed)(RandomScalar())
}
// converts big.Int to BNRed
func GetBNRed(x *big.Int) *BNRed {
result := new(BNRed)
((*big.Int)(result)).Set(x)
return result
}
// convert BNRed to BigInt
func (x *BNRed) BigInt() *big.Int {
return new(big.Int).Set(((*big.Int)(x)))
}
func (x *BNRed) SetBytes(buf []byte) *BNRed {
((*big.Int)(x)).SetBytes(buf)
return x
}
func (x *BNRed) String() string {
return ((*big.Int)(x)).Text(16)
}
func (x *BNRed) Text(base int) string {
return ((*big.Int)(x)).Text(base)
}
func (x *BNRed) MarshalText() ([]byte, error) {
return []byte(((*big.Int)(x)).Text(16)), nil
}
func (x *BNRed) UnmarshalText(text []byte) error {
_, err := fmt.Sscan("0x"+string(text), ((*big.Int)(x)))
return err
}
func FillBytes(x *big.Int, xbytes []byte) {
// FillBytes not available pre 1.15
bb := x.Bytes()
if len(bb) > 32 {
panic(fmt.Sprintf("number not representable in 32 bytes %d %x", len(bb), bb))
}
for i := range xbytes { // optimized to memclr
xbytes[i] = 0
}
j := 32
for i := len(bb) - 1; i >= 0; i-- {
j--
xbytes[j] = bb[i]
}
}
/*
// this will return fixed random scalar
func RandomScalarFixed() *big.Int {
//return new(big.Int).Set(fixed)
return RandomScalar()
}
type KeyPair struct {
x *big.Int // secret key
y *bn256.G1 // public key
}
*/

View File

@ -0,0 +1,57 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
import "fmt"
const POINT_SIZE = 33 // this can be optimized to 33 bytes
const FIELDELEMENT_SIZE = 32 // why not have bigger curves
// protocol supports amounts upto this amounts
const MAX_AMOUNT = 18446744073709551616 // 2^64 - 1,
const PROTOCOL_CONSTANT = "DERO"
// checks a number is power of 2
func IsPowerOf2(num int) bool {
for num >= 2 {
if num%2 != 0 {
return false
}
num = num / 2
}
return num == 1
}
// tell what power a number is
func GetPowerof2(num int) int {
if num <= 0 {
panic("number cannot be less than 0")
}
if !IsPowerOf2(num) {
panic(fmt.Sprintf("number(%d) must be power of 2", num))
}
power := 0
calculated := 1
for ; calculated != num; power++ {
calculated *= 2
}
return power
}

View File

@ -0,0 +1,297 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
//import "fmt"
import "math/big"
//import "crypto/rand"
//import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
type FieldVectorPolynomial struct {
coefficients []*FieldVector
}
func NewFieldVectorPolynomial(inputs ...*FieldVector) *FieldVectorPolynomial {
fv := &FieldVectorPolynomial{}
for _, input := range inputs {
fv.coefficients = append(fv.coefficients, input.Clone())
}
return fv
}
func (fv *FieldVectorPolynomial) Length() int {
return len(fv.coefficients)
}
func (fv *FieldVectorPolynomial) Evaluate(x *big.Int) *FieldVector {
result := fv.coefficients[0].Clone()
accumulator := new(big.Int).Set(x)
for i := 1; i < len(fv.coefficients); i++ {
result = result.Add(fv.coefficients[i].Times(accumulator))
accumulator.Mul(accumulator, x)
accumulator.Mod(accumulator, bn256.Order)
}
return result
}
func (fv *FieldVectorPolynomial) InnerProduct(other *FieldVectorPolynomial) []*big.Int {
var result []*big.Int
result_length := fv.Length() + other.Length() - 1
for i := 0; i < result_length; i++ {
result = append(result, new(big.Int)) // 0 value fill
}
for i := range fv.coefficients {
for j := range other.coefficients {
tmp := new(big.Int).Set(result[i+j])
result[i+j].Add(tmp, fv.coefficients[i].InnerProduct(other.coefficients[j]))
result[i+j].Mod(result[i+j], bn256.Order)
}
}
return result
}
/*
type PedersenCommitment struct {
X *big.Int
R *big.Int
Params *GeneratorParams
}
func NewPedersenCommitment(params *GeneratorParams, x, r *big.Int) *PedersenCommitment {
pc := &PedersenCommitment{Params: params, X: new(big.Int).Set(x), R: new(big.Int).Set(r)}
return pc
}
func (pc *PedersenCommitment) Commit() *bn256.G1 {
var left, right, result bn256.G1
left.ScalarMult(pc.Params.G, pc.X)
right.ScalarMult(pc.Params.H, pc.R)
result.Add(&left, &right)
return &result
}
func (pc *PedersenCommitment) Add(other *PedersenCommitment) *PedersenCommitment {
var x, r big.Int
x.Mod(new(big.Int).Add(pc.X, other.X), bn256.Order)
r.Mod(new(big.Int).Add(pc.R, other.R), bn256.Order)
return NewPedersenCommitment(pc.Params, &x, &r)
}
func (pc *PedersenCommitment) Times(constant *big.Int) *PedersenCommitment {
var x, r big.Int
x.Mod(new(big.Int).Mul(pc.X, constant), bn256.Order)
r.Mod(new(big.Int).Mul(pc.R, constant), bn256.Order)
return NewPedersenCommitment(pc.Params, &x, &r)
}
type PolyCommitment struct {
coefficient_commitments []*PedersenCommitment
Params *GeneratorParams
}
func NewPolyCommitment(params *GeneratorParams, coefficients []*big.Int) *PolyCommitment {
pc := &PolyCommitment{Params: params}
pc.coefficient_commitments = append(pc.coefficient_commitments, NewPedersenCommitment(params, coefficients[0], new(big.Int).SetUint64(0)))
for i := 1; i < len(coefficients); i++ {
pc.coefficient_commitments = append(pc.coefficient_commitments, NewPedersenCommitment(params, coefficients[i], RandomScalarFixed()))
}
return pc
}
func (pc *PolyCommitment) GetCommitments() []*bn256.G1 {
var result []*bn256.G1
for i := 1; i < len(pc.coefficient_commitments); i++ {
result = append(result, pc.coefficient_commitments[i].Commit())
}
return result
}
func (pc *PolyCommitment) Evaluate(constant *big.Int) *PedersenCommitment {
result := pc.coefficient_commitments[0]
accumulator := new(big.Int).Set(constant)
for i := 1; i < len(pc.coefficient_commitments); i++ {
tmp := new(big.Int).Set(accumulator)
result = result.Add(pc.coefficient_commitments[i].Times(accumulator))
accumulator.Mod(new(big.Int).Mul(tmp, constant), bn256.Order)
}
return result
}
*/
/*
// bother FieldVector and GeneratorVector satisfy this
type Vector interface{
Length() int
Extract(parity bool) Vector
Add(other Vector)Vector
Hadamard( []*big.Int) Vector
Times (*big.Int) Vector
Negate() Vector
}
*/
// check this https://pdfs.semanticscholar.org/d38d/e48ee4127205a0f25d61980c8f241718b66e.pdf
// https://arxiv.org/pdf/1802.03932.pdf
var unity *big.Int
func init() {
// primitive 2^28th root of unity modulo q
unity, _ = new(big.Int).SetString("14a3074b02521e3b1ed9852e5028452693e87be4e910500c7ba9bbddb2f46edd", 16)
}
func fft_FieldVector(input *FieldVector, inverse bool) *FieldVector {
length := input.Length()
if length == 1 {
return input
}
// lngth must be multiple of 2 ToDO
if length%2 != 0 {
panic("length must be multiple of 2")
}
//unity,_ := new(big.Int).SetString("14a3074b02521e3b1ed9852e5028452693e87be4e910500c7ba9bbddb2f46edd",16)
omega := new(big.Int).Exp(unity, new(big.Int).SetUint64((1<<28)/uint64(length)), bn256.Order)
if inverse {
omega = new(big.Int).ModInverse(omega, bn256.Order)
}
even := fft_FieldVector(input.Extract(false), inverse)
odd := fft_FieldVector(input.Extract(true), inverse)
omegas := []*big.Int{new(big.Int).SetUint64(1)}
for i := 1; i < length/2; i++ {
omegas = append(omegas, new(big.Int).Mod(new(big.Int).Mul(omegas[i-1], omega), bn256.Order))
}
omegasv := NewFieldVector(omegas)
result := even.Add(odd.Hadamard(omegasv)).Concat(even.Add(odd.Hadamard(omegasv).Negate()))
if inverse {
result = result.Times(new(big.Int).ModInverse(new(big.Int).SetUint64(2), bn256.Order))
}
return result
}
// this is exactly same as fft_FieldVector, alternate implementation
func fftints(input []*big.Int) (result []*big.Int) {
size := len(input)
if size == 1 {
return input
}
//require(size % 2 == 0, "Input size is not a power of 2!");
unity, _ := new(big.Int).SetString("14a3074b02521e3b1ed9852e5028452693e87be4e910500c7ba9bbddb2f46edd", 16)
omega := new(big.Int).Exp(unity, new(big.Int).SetUint64((1<<28)/uint64(size)), bn256.Order)
even := fftints(extractbits(input, 0))
odd := fftints(extractbits(input, 1))
omega_run := new(big.Int).SetUint64(1)
result = make([]*big.Int, len(input), len(input))
for i := 0; i < len(input)/2; i++ {
temp := new(big.Int).Mod(new(big.Int).Mul(odd[i], omega_run), bn256.Order)
result[i] = new(big.Int).Mod(new(big.Int).Add(even[i], temp), bn256.Order)
result[i+size/2] = new(big.Int).Mod(new(big.Int).Sub(even[i], temp), bn256.Order)
omega_run = new(big.Int).Mod(new(big.Int).Mul(omega, omega_run), bn256.Order)
}
return result
}
func extractbits(input []*big.Int, parity int) (result []*big.Int) {
result = make([]*big.Int, len(input)/2, len(input)/2)
for i := 0; i < len(input)/2; i++ {
result[i] = new(big.Int).Set(input[2*i+parity])
}
return
}
func fft_GeneratorVector(input *PointVector, inverse bool) *PointVector {
length := input.Length()
if length == 1 {
return input
}
// lngth must be multiple of 2 ToDO
if length%2 != 0 {
panic("length must be multiple of 2")
}
// unity,_ := new(big.Int).SetString("14a3074b02521e3b1ed9852e5028452693e87be4e910500c7ba9bbddb2f46edd",16)
omega := new(big.Int).Exp(unity, new(big.Int).SetUint64((1<<28)/uint64(length)), bn256.Order)
if inverse {
omega = new(big.Int).ModInverse(omega, bn256.Order)
}
even := fft_GeneratorVector(input.Extract(false), inverse)
//fmt.Printf("exponent_fft %d %s \n",i, exponent_fft.vector[i].Text(16))
odd := fft_GeneratorVector(input.Extract(true), inverse)
omegas := []*big.Int{new(big.Int).SetUint64(1)}
for i := 1; i < length/2; i++ {
omegas = append(omegas, new(big.Int).Mod(new(big.Int).Mul(omegas[i-1], omega), bn256.Order))
}
omegasv := omegas
result := even.Add(odd.Hadamard(omegasv)).Concat(even.Add(odd.Hadamard(omegasv).Negate()))
if inverse {
result = result.Times(new(big.Int).ModInverse(new(big.Int).SetUint64(2), bn256.Order))
}
return result
}
func Convolution(exponent *FieldVector, base *PointVector) *PointVector {
size := base.Length()
exponent_fft := fft_FieldVector(exponent.Flip(), false)
/*exponent_fft2 := fftints( exponent.Flip().vector) // aternate implementation proof checking
for i := range exponent_fft.vector{
fmt.Printf("exponent_fft %d %s \n",i, exponent_fft.vector[i].Text(16))
fmt.Printf("exponent_ff2 %d %s \n",i, exponent_fft2[i].Text(16))
}
*/
temp := fft_GeneratorVector(base, false).Hadamard(exponent_fft.vector)
return fft_GeneratorVector(temp.Slice(0, size/2).Add(temp.Slice(size/2, size)).Times(new(big.Int).ModInverse(new(big.Int).SetUint64(2), bn256.Order)), true)
// using the optimization described here https://dsp.stackexchange.com/a/30699
}

View File

@ -0,0 +1,72 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
import "fmt"
import "math/big"
//import "crypto/rand"
//import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
func NewGeneratorParams(count int) *GeneratorParams {
GP := &GeneratorParams{}
var zeroes [64]byte
GP.G = HashToPoint(HashtoNumber([]byte(PROTOCOL_CONSTANT + "G"))) // this is same as mybase or vice-versa
GP.H = HashToPoint(HashtoNumber([]byte(PROTOCOL_CONSTANT + "H")))
var gs, hs []*bn256.G1
GP.GSUM = new(bn256.G1)
GP.GSUM.Unmarshal(zeroes[:])
for i := 0; i < count; i++ {
gs = append(gs, HashToPoint(HashtoNumber(append([]byte(PROTOCOL_CONSTANT+"G"), hextobytes(makestring64(fmt.Sprintf("%x", i)))...))))
hs = append(hs, HashToPoint(HashtoNumber(append([]byte(PROTOCOL_CONSTANT+"H"), hextobytes(makestring64(fmt.Sprintf("%x", i)))...))))
GP.GSUM = new(bn256.G1).Add(GP.GSUM, gs[i])
}
GP.Gs = NewPointVector(gs)
GP.Hs = NewPointVector(hs)
return GP
}
func NewGeneratorParams3(h *bn256.G1, gs, hs *PointVector) *GeneratorParams {
GP := &GeneratorParams{}
GP.G = HashToPoint(HashtoNumber([]byte(PROTOCOL_CONSTANT + "G"))) // this is same as mybase or vice-versa
GP.H = h
GP.Gs = gs
GP.Hs = hs
return GP
}
func (gp *GeneratorParams) Commit(blind *big.Int, gexps, hexps *FieldVector) *bn256.G1 {
result := new(bn256.G1).ScalarMult(gp.H, blind)
for i := range gexps.vector {
result = new(bn256.G1).Add(result, new(bn256.G1).ScalarMult(gp.Gs.vector[i], gexps.vector[i]))
}
if hexps != nil {
for i := range hexps.vector {
result = new(bn256.G1).Add(result, new(bn256.G1).ScalarMult(gp.Hs.vector[i], hexps.vector[i]))
}
}
return result
}

View File

@ -0,0 +1,73 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
//import "fmt"
import "math/big"
import "encoding/hex"
//import "crypto/rand"
import "github.com/deroproject/derohe/cryptography/bn256"
// this file implements Big Number Reduced form with bn256's Order
type Point bn256.G1
var GPoint Point
// ScalarMult with chainable API
func (p *Point) ScalarMult(r *BNRed) (result *Point) {
result = new(Point)
((*bn256.G1)(result)).ScalarMult(((*bn256.G1)(p)), ((*big.Int)(r)))
return result
}
func (p *Point) EncodeCompressed() []byte {
return ((*bn256.G1)(p)).EncodeCompressed()
}
func (p *Point) DecodeCompressed(i []byte) error {
return ((*bn256.G1)(p)).DecodeCompressed(i)
}
func (p *Point) G1() *bn256.G1 {
return ((*bn256.G1)(p))
}
func (p *Point) Set(x *Point) *Point {
return ((*Point)(((*bn256.G1)(p)).Set(((*bn256.G1)(x)))))
}
func (p *Point) String() string {
return string(((*bn256.G1)(p)).EncodeCompressed())
}
func (p *Point) StringHex() string {
return string(hex.EncodeToString(((*bn256.G1)(p)).EncodeCompressed()))
}
func (p *Point) MarshalText() ([]byte, error) {
return []byte(hex.EncodeToString(((*bn256.G1)(p)).EncodeCompressed())), nil
}
func (p *Point) UnmarshalText(text []byte) error {
tmp, err := hex.DecodeString(string(text))
if err != nil {
return err
}
return p.DecodeCompressed(tmp)
}

View File

@ -0,0 +1,68 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
import "fmt"
import "encoding/hex"
const HashLength = 32
type Hash [HashLength]byte
var ZEROHASH Hash
func (h Hash) MarshalText() ([]byte, error) {
return []byte(fmt.Sprintf("%x", h[:])), nil
}
func (h *Hash) UnmarshalText(data []byte) (err error) {
byteSlice, _ := hex.DecodeString(string(data))
if len(byteSlice) != 32 {
return fmt.Errorf("Incorrect hash size")
}
copy(h[:], byteSlice)
return
}
// stringifier
func (h Hash) String() string {
return fmt.Sprintf("%064x", h[:])
}
func (h Hash) IsZero() bool {
var zerohash Hash
return h == zerohash
}
// convert a hash of hex form to binary form, returns a zero hash if any error
// TODO this should be in crypto
func HashHexToHash(hash_hex string) (hash Hash) {
hash_raw, err := hex.DecodeString(hash_hex)
if err != nil {
//panic(fmt.Sprintf("Cannot hex decode checkpint hash \"%s\"", hash_hex))
return hash
}
if len(hash_raw) != 32 {
//panic(fmt.Sprintf(" hash not 32 byte size Cannot hex decode checkpint hash \"%s\"", hash_hex))
return hash
}
copy(hash[:], hash_raw)
return
}

View File

@ -0,0 +1,260 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
import "fmt"
import "math/big"
//import "crypto/rand"
import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
//import "golang.org/x/crypto/sha3"
import "github.com/deroproject/derohe/cryptography/sha3"
// the original try and increment method A Note on Hashing to BN Curves https://www.normalesup.org/~tibouchi/papers/bnhash-scis.pdf
// see this for a simplified version https://github.com/clearmatics/mobius/blob/7ad988b816b18e22424728329fc2b166d973a120/contracts/bn256g1.sol
var FIELD_MODULUS, w = new(big.Int).SetString("30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd47", 16)
var GROUP_MODULUS, w1 = new(big.Int).SetString("30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001", 16)
// this file basically implements curve based items
type GeneratorParams struct {
G *bn256.G1
H *bn256.G1
GSUM *bn256.G1
Gs *PointVector
Hs *PointVector
}
// converts a big int to 32 bytes, prepending zeroes
func ConvertBigIntToByte(x *big.Int) []byte {
var dummy [128]byte
joined := append(dummy[:], x.Bytes()...)
return joined[len(joined)-32:]
}
// the number if already reduced
func HashtoNumber(input []byte) *big.Int {
hasher := sha3.NewLegacyKeccak256()
hasher.Write(input)
hash := hasher.Sum(nil)
return new(big.Int).SetBytes(hash[:])
}
// calculate hash and reduce it by curve's order
func reducedhash(input []byte) *big.Int {
return new(big.Int).Mod(HashtoNumber(input), bn256.Order)
}
func ReducedHash(input []byte) *big.Int {
return new(big.Int).Mod(HashtoNumber(input), bn256.Order)
}
func makestring64(input string) string {
for len(input) != 64 {
input = "0" + input
}
return input
}
func makestring66(input string) string {
for len(input) != 64 {
input = "0" + input
}
return input + "00"
}
func hextobytes(input string) []byte {
ibytes, err := hex.DecodeString(input)
if err != nil {
panic(err)
}
return ibytes
}
// p = p(u) = 36u^4 + 36u^3 + 24u^2 + 6u + 1
var FIELD_ORDER, _ = new(big.Int).SetString("30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd47", 16)
// Number of elements in the field (often called `q`)
// n = n(u) = 36u^4 + 36u^3 + 18u^2 + 6u + 1
var GEN_ORDER, _ = new(big.Int).SetString("30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001", 16)
var CURVE_B = new(big.Int).SetUint64(3)
// a = (p+1) / 4
var CURVE_A, _ = new(big.Int).SetString("c19139cb84c680a6e14116da060561765e05aa45a1c72a34f082305b61f3f52", 16)
func HashToPointNew(seed *big.Int) *bn256.G1 {
y_squared := new(big.Int)
one := new(big.Int).SetUint64(1)
x := new(big.Int).Set(seed)
x.Mod(x, GEN_ORDER)
for {
beta, y := findYforX(x)
if y != nil {
// fmt.Printf("beta %s y %s\n", beta.String(),y.String())
// y^2 == beta
y_squared.Mul(y, y)
y_squared.Mod(y_squared, FIELD_ORDER)
if beta.Cmp(y_squared) == 0 {
// fmt.Printf("liesoncurve test %+v\n", isOnCurve(x,y))
// fmt.Printf("x %s\n",x.Text(16))
// fmt.Printf("y %s\n",y.Text(16))
xstring := x.Text(16)
ystring := y.Text(16)
var point bn256.G1
xbytes, err := hex.DecodeString(makestring64(xstring))
if err != nil {
panic(err)
}
ybytes, err := hex.DecodeString(makestring64(ystring))
if err != nil {
panic(err)
}
if _, err := point.Unmarshal(append(xbytes, ybytes...)); err == nil {
return &point
} else {
panic(fmt.Sprintf("not found err %s\n", err))
}
}
}
x.Add(x, one)
x.Mod(x, FIELD_ORDER)
}
}
/*
* Given X, find Y
*
* where y = sqrt(x^3 + b)
*
* Returns: (x^3 + b), y
**/
func findYforX(x *big.Int) (*big.Int, *big.Int) {
// beta = (x^3 + b) % p
xcube := new(big.Int).Exp(x, CURVE_B, FIELD_ORDER)
xcube.Add(xcube, CURVE_B)
beta := new(big.Int).Mod(xcube, FIELD_ORDER)
//beta := addmod(mulmod(mulmod(x, x, FIELD_ORDER), x, FIELD_ORDER), CURVE_B, FIELD_ORDER);
// y^2 = x^3 + b
// this acts like: y = sqrt(beta)
//ymod := new(big.Int).ModSqrt(beta,FIELD_ORDER) // this can return nil in some cases
y := new(big.Int).Exp(beta, CURVE_A, FIELD_ORDER)
return beta, y
}
/*
* Verify if the X and Y coordinates represent a valid Point on the Curve
*
* Where the G1 curve is: x^2 = x^3 + b
**/
func isOnCurve(x, y *big.Int) bool {
//p_squared := new(big.Int).Exp(x, new(big.Int).SetUint64(2), FIELD_ORDER);
p_cubed := new(big.Int).Exp(x, new(big.Int).SetUint64(3), FIELD_ORDER)
p_cubed.Add(p_cubed, CURVE_B)
p_cubed.Mod(p_cubed, FIELD_ORDER)
// return addmod(p_cubed, CURVE_B, FIELD_ORDER) == mulmod(p.Y, p.Y, FIELD_ORDER);
return p_cubed.Cmp(new(big.Int).Exp(y, new(big.Int).SetUint64(2), FIELD_ORDER)) == 0
}
// this should be merged , simplified just as simple as 25519
func HashToPoint(seed *big.Int) *bn256.G1 {
/*
var x, _ = new(big.Int).SetString("0d36fdf1852f1563df9c904374055bb2a4d351571b853971764b9561ae203a9e",16)
var y, _ = new(big.Int).SetString("06efda2e606d7bafec34b82914953fa253d21ca3ced18db99c410e9057dccd50",16)
fmt.Printf("hardcode point on curve %+v\n", isOnCurve(x,y))
panic("done")
*/
return HashToPointNew(seed)
seed_reduced := new(big.Int)
seed_reduced.Mod(seed, FIELD_MODULUS)
counter := 0
p_1_4 := new(big.Int).Add(FIELD_MODULUS, new(big.Int).SetInt64(1))
p_1_4 = p_1_4.Div(p_1_4, new(big.Int).SetInt64(4))
for {
tmp := new(big.Int)
y, y_squared, y_resquare := new(big.Int), new(big.Int), new(big.Int) // basically y_sqaured = seed ^3 + 3 mod group order
tmp.Exp(seed_reduced, new(big.Int).SetInt64(3), FIELD_MODULUS)
y_squared.Add(tmp, new(big.Int).SetInt64(3))
y_squared.Mod(y_squared, FIELD_MODULUS)
y = y.Exp(y_squared, p_1_4, FIELD_MODULUS)
y_resquare = y_resquare.Exp(y, new(big.Int).SetInt64(2), FIELD_MODULUS)
if y_resquare.Cmp(y_squared) == 0 { // seed becomes x and y iis usy
xstring := seed_reduced.Text(16)
ystring := y.Text(16)
var point bn256.G1
xbytes, err := hex.DecodeString(makestring64(xstring))
if err != nil {
panic(err)
}
ybytes, err := hex.DecodeString(makestring64(ystring))
if err != nil {
panic(err)
}
if _, err = point.Unmarshal(append(xbytes, ybytes...)); err == nil {
return &point
} else {
// continue finding
counter++
if counter%10000 == 0 {
fmt.Printf("tried %d times\n", counter)
}
}
}
seed_reduced.Add(seed_reduced, new(big.Int).SetInt64(1))
seed_reduced.Mod(seed_reduced, FIELD_MODULUS)
}
return nil
}

View File

@ -0,0 +1,41 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
//import "golang.org/x/crypto/sha3"
import "github.com/deroproject/derohe/cryptography/sha3"
// quick keccak wrapper
func Keccak256(data ...[]byte) (result Hash) {
h := sha3.NewLegacyKeccak256()
for _, b := range data {
h.Write(b)
}
r := h.Sum(nil)
copy(result[:], r)
return
}
func Keccak512(data ...[]byte) (result Hash) {
h := sha3.NewLegacyKeccak512()
for _, b := range data {
h.Write(b)
}
r := h.Sum(nil)
copy(result[:], r)
return
}

View File

@ -0,0 +1,57 @@
// Copyright 2017-2018 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
import "testing"
import "encoding/hex"
func TestKeccak256(t *testing.T) {
tests := []struct {
name string
messageHex string
wantHex string
}{
{
name: "from monero 1",
messageHex: "c8fedd380dbae40ffb52",
wantHex: "8e41962058b7422e7404253121489a3e63d186ed115086919a75105661483ba9",
},
{
name: "from monero 2",
messageHex: "5020c4d530b6ec6cb4d9",
wantHex: "8a597f11961935e32e0adeab2ce48b3df2d907c9b26619dad22f42ff65ab7593",
},
{
name: "hello",
messageHex: "68656c6c6f",
wantHex: "1c8aff950685c2ed4bc3174f3472287b56d9517b9c948127319a09a7a36deac8",
},
{
name: "from monero cryptotest.pl",
messageHex: "0f3fe9c20b24a11bf4d6d1acd335c6a80543f1f0380590d7323caf1390c78e88",
wantHex: "73b7a236f2a97c4e1805f7a319f1283e3276598567757186c526caf9a49e0a92",
},
}
for _, test := range tests {
message, _ := hex.DecodeString(test.messageHex)
got := Keccak256(message)
want := HexToHash(test.wantHex)
if want != got {
t.Errorf("want %x, got %x", want, got)
}
}
}

View File

@ -0,0 +1,70 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
import "fmt"
import "encoding/hex"
const KeyLength = 32
// Key can be a Scalar or a Point
type Key [KeyLength]byte
func (k Key) MarshalText() ([]byte, error) {
return []byte(fmt.Sprintf("%x", k[:])), nil
}
func (k *Key) UnmarshalText(data []byte) (err error) {
byteSlice, _ := hex.DecodeString(string(data))
if len(byteSlice) != 32 {
return fmt.Errorf("Incorrect key size")
}
copy(k[:], byteSlice)
return
}
func (k Key) String() string {
return fmt.Sprintf("%x", k[:])
}
func (p *Key) FromBytes(b [KeyLength]byte) {
*p = b
}
func (p *Key) ToBytes() (result [KeyLength]byte) {
result = [KeyLength]byte(*p)
return
}
// convert a hex string to a key
func HexToKey(h string) (result Key) {
byteSlice, _ := hex.DecodeString(h)
if len(byteSlice) != 32 {
panic("Incorrect key size")
}
copy(result[:], byteSlice)
return
}
func HexToHash(h string) (result Hash) {
byteSlice, _ := hex.DecodeString(h)
if len(byteSlice) != 32 {
panic("Incorrect key size")
}
copy(result[:], byteSlice)
return
}

View File

@ -0,0 +1,91 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
//import "fmt"
import "math/big"
//import "crypto/rand"
//import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
//import "golang.org/x/crypto/sha3"
type Polynomial struct {
coefficients []*big.Int
}
func NewPolynomial(input []*big.Int) *Polynomial {
if input == nil {
return &Polynomial{coefficients: []*big.Int{new(big.Int).SetInt64(1)}}
}
return &Polynomial{coefficients: input}
}
func (p *Polynomial) Length() int {
return len(p.coefficients)
}
func (p *Polynomial) Mul(m *Polynomial) *Polynomial {
var product []*big.Int
for i := range p.coefficients {
product = append(product, new(big.Int).Mod(new(big.Int).Mul(p.coefficients[i], m.coefficients[0]), bn256.Order))
}
product = append(product, new(big.Int)) // add 0 element
if m.coefficients[1].IsInt64() && m.coefficients[1].Int64() == 1 {
for i := range product {
if i > 0 {
tmp := new(big.Int).Add(product[i], p.coefficients[i-1])
product[i] = new(big.Int).Mod(tmp, bn256.Order)
} else { // do nothing
}
}
}
return NewPolynomial(product)
}
type dummy struct {
list [][]*big.Int
}
func RecursivePolynomials(list [][]*big.Int, accum *Polynomial, a, b []*big.Int) (rlist [][]*big.Int) {
var d dummy
d.recursivePolynomialsinternal(accum, a, b)
return d.list
}
func (d *dummy) recursivePolynomialsinternal(accum *Polynomial, a, b []*big.Int) {
if len(a) == 0 {
d.list = append(d.list, accum.coefficients)
return
}
atop := a[len(a)-1]
btop := b[len(b)-1]
left := NewPolynomial([]*big.Int{new(big.Int).Mod(new(big.Int).Neg(atop), bn256.Order), new(big.Int).Mod(new(big.Int).Sub(new(big.Int).SetInt64(1), btop), bn256.Order)})
right := NewPolynomial([]*big.Int{atop, btop})
d.recursivePolynomialsinternal(accum.Mul(left), a[:len(a)-1], b[:len(b)-1])
d.recursivePolynomialsinternal(accum.Mul(right), a[:len(a)-1], b[:len(b)-1])
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,313 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
//import "fmt"
import "math"
import "math/big"
import "bytes"
//import "crypto/rand"
//import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
//import "golang.org/x/crypto/sha3"
// basically the Σ-protocol
type InnerProduct struct {
a, b *big.Int
ls, rs []*bn256.G1
}
func (ip *InnerProduct) Size() int {
return FIELDELEMENT_SIZE + FIELDELEMENT_SIZE + 1 + len(ip.ls)*POINT_SIZE + len(ip.rs)*POINT_SIZE
}
// since our bulletproofs are 128 bits, we can get away hard coded 7 entries
func (ip *InnerProduct) Serialize(w *bytes.Buffer) {
w.Write(ConvertBigIntToByte(ip.a))
w.Write(ConvertBigIntToByte(ip.b))
// w.WriteByte(byte(len(ip.ls))) // we can skip this byte also, why not skip it
// fmt.Printf("inner proof length byte %d\n",len(ip.ls))
for i := range ip.ls {
w.Write(ip.ls[i].EncodeCompressed())
w.Write(ip.rs[i].EncodeCompressed())
}
}
func (ip *InnerProduct) Deserialize(r *bytes.Reader) (err error) {
var buf [32]byte
var bufp [33]byte
if n, err := r.Read(buf[:]); n == 32 && err == nil {
ip.a = new(big.Int).SetBytes(buf[:])
} else {
return err
}
if n, err := r.Read(buf[:]); n == 32 && err == nil {
ip.b = new(big.Int).SetBytes(buf[:])
} else {
return err
}
length := 7
ip.ls = ip.ls[:0]
ip.rs = ip.rs[:0]
for i := 0; i < length; i++ {
if n, err := r.Read(bufp[:]); n == 33 && err == nil {
var p bn256.G1
if err = p.DecodeCompressed(bufp[:]); err != nil {
return err
}
ip.ls = append(ip.ls, &p)
} else {
return err
}
if n, err := r.Read(bufp[:]); n == 33 && err == nil {
var p bn256.G1
if err = p.DecodeCompressed(bufp[:]); err != nil {
return err
}
ip.rs = append(ip.rs, &p)
} else {
return err
}
}
return err
}
func NewInnerProductProof(ips *IPStatement, witness *IPWitness, salt *big.Int) *InnerProduct {
var ip InnerProduct
ip.generateInnerProductProof(ips.PrimeBase, ips.P, witness.L, witness.R, salt)
return &ip
}
func (ip *InnerProduct) generateInnerProductProof(base *GeneratorParams, P *bn256.G1, as, bs *FieldVector, prev_challenge *big.Int) {
n := as.Length()
if n == 1 { // the proof is done, ls,rs are already in place
ip.a = as.vector[0]
ip.b = bs.vector[0]
return
}
nPrime := n / 2
asLeft := as.Slice(0, nPrime)
asRight := as.Slice(nPrime, n)
bsLeft := bs.Slice(0, nPrime)
bsRight := bs.Slice(nPrime, n)
gLeft := base.Gs.Slice(0, nPrime)
gRight := base.Gs.Slice(nPrime, n)
hLeft := base.Hs.Slice(0, nPrime)
hRight := base.Hs.Slice(nPrime, n)
cL := asLeft.InnerProduct(bsRight)
cR := asRight.InnerProduct(bsLeft)
u := base.H
L := new(bn256.G1).Add(gRight.Commit(asLeft.vector), hLeft.Commit(bsRight.vector))
L = new(bn256.G1).Add(L, new(bn256.G1).ScalarMult(u, cL))
R := new(bn256.G1).Add(gLeft.Commit(asRight.vector), hRight.Commit(bsLeft.vector))
R = new(bn256.G1).Add(R, new(bn256.G1).ScalarMult(u, cR))
ip.ls = append(ip.ls, L)
ip.rs = append(ip.rs, R)
var input []byte
input = append(input, ConvertBigIntToByte(prev_challenge)...)
input = append(input, L.Marshal()...)
input = append(input, R.Marshal()...)
x := reducedhash(input)
xinv := new(big.Int).ModInverse(x, bn256.Order)
gPrime := gLeft.Times(xinv).Add(gRight.Times(x))
hPrime := hLeft.Times(x).Add(hRight.Times(xinv))
aPrime := asLeft.Times(x).Add(asRight.Times(xinv))
bPrime := bsLeft.Times(xinv).Add(bsRight.Times(x))
basePrime := NewGeneratorParams3(u, gPrime, hPrime)
PPrimeL := new(bn256.G1).ScalarMult(L, new(big.Int).Mod(new(big.Int).Mul(x, x), bn256.Order)) //L * (x*x)
PPrimeR := new(bn256.G1).ScalarMult(R, new(big.Int).Mod(new(big.Int).Mul(xinv, xinv), bn256.Order)) //R * (xinv*xinv)
PPrime := new(bn256.G1).Add(PPrimeL, PPrimeR)
PPrime = new(bn256.G1).Add(PPrime, P)
ip.generateInnerProductProof(basePrime, PPrime, aPrime, bPrime, x)
return
}
func NewInnerProductProofNew(p *PedersenVectorCommitment, salt *big.Int) *InnerProduct {
var ip InnerProduct
ip.generateInnerProductProofNew(p, p.gvalues, p.hvalues, salt)
return &ip
}
func (ip *InnerProduct) generateInnerProductProofNew(p *PedersenVectorCommitment, as, bs *FieldVector, prev_challenge *big.Int) {
n := as.Length()
if n == 1 { // the proof is done, ls,rs are already in place
ip.a = as.vector[0]
ip.b = bs.vector[0]
return
}
nPrime := n / 2
asLeft := as.Slice(0, nPrime)
asRight := as.Slice(nPrime, n)
bsLeft := bs.Slice(0, nPrime)
bsRight := bs.Slice(nPrime, n)
gsLeft := p.Gs.Slice(0, nPrime)
gsRight := p.Gs.Slice(nPrime, n)
hsLeft := p.Hs.Slice(0, nPrime)
hsRight := p.Hs.Slice(nPrime, n)
cL := asLeft.InnerProduct(bsRight)
cR := asRight.InnerProduct(bsLeft)
/*u := base.H
L := new(bn256.G1).Add(gRight.Commit(asLeft.vector), hLeft.Commit(bsRight.vector))
L = new(bn256.G1).Add(L, new(bn256.G1).ScalarMult(u, cL))
R := new(bn256.G1).Add(gLeft.Commit(asRight.vector), hRight.Commit(bsLeft.vector))
R = new(bn256.G1).Add(R, new(bn256.G1).ScalarMult(u, cR))
*/
Lpart := new(bn256.G1).Add(gsRight.MultiExponentiate(asLeft), hsLeft.MultiExponentiate(bsRight))
L := new(bn256.G1).Add(Lpart, new(bn256.G1).ScalarMult(p.H, cL))
Rpart := new(bn256.G1).Add(gsLeft.MultiExponentiate(asRight), hsRight.MultiExponentiate(bsLeft))
R := new(bn256.G1).Add(Rpart, new(bn256.G1).ScalarMult(p.H, cR))
ip.ls = append(ip.ls, L)
ip.rs = append(ip.rs, R)
var input []byte
input = append(input, ConvertBigIntToByte(prev_challenge)...)
input = append(input, L.Marshal()...)
input = append(input, R.Marshal()...)
x := reducedhash(input)
xInv := new(big.Int).ModInverse(x, bn256.Order)
p.Gs = gsLeft.Times(xInv).Add(gsRight.Times(x))
p.Hs = hsLeft.Times(x).Add(hsRight.Times(xInv))
asPrime := asLeft.Times(x).Add(asRight.Times(xInv))
bsPrime := bsLeft.Times(xInv).Add(bsRight.Times(x))
ip.generateInnerProductProofNew(p, asPrime, bsPrime, x)
return
}
func (ip *InnerProduct) Verify(hs []*bn256.G1, u, P *bn256.G1, salt *big.Int, gp *GeneratorParams) bool {
log_n := uint(len(ip.ls))
if len(ip.ls) != len(ip.rs) { // length must be same
return false
}
n := uint(math.Pow(2, float64(log_n)))
o := salt
var challenges []*big.Int
for i := uint(0); i < log_n; i++ {
var input []byte
input = append(input, ConvertBigIntToByte(o)...)
input = append(input, ip.ls[i].Marshal()...)
input = append(input, ip.rs[i].Marshal()...)
o = reducedhash(input)
challenges = append(challenges, o)
o_inv := new(big.Int).ModInverse(o, bn256.Order)
PPrimeL := new(bn256.G1).ScalarMult(ip.ls[i], new(big.Int).Mod(new(big.Int).Mul(o, o), bn256.Order)) //L * (x*x)
PPrimeR := new(bn256.G1).ScalarMult(ip.rs[i], new(big.Int).Mod(new(big.Int).Mul(o_inv, o_inv), bn256.Order)) //L * (x*x)
PPrime := new(bn256.G1).Add(PPrimeL, PPrimeR)
P = new(bn256.G1).Add(PPrime, P)
}
exp := new(big.Int).SetUint64(1)
for i := uint(0); i < log_n; i++ {
exp = new(big.Int).Mod(new(big.Int).Mul(exp, challenges[i]), bn256.Order)
}
exp_inv := new(big.Int).ModInverse(exp, bn256.Order)
exponents := make([]*big.Int, n, n)
exponents[0] = exp_inv // initializefirst element
bits := make([]bool, n, n)
for i := uint(0); i < n/2; i++ {
for j := uint(0); (1<<j)+i < n; j++ {
i1 := (1 << j) + i
if !bits[i1] {
temp := new(big.Int).Mod(new(big.Int).Mul(challenges[log_n-1-j], challenges[log_n-1-j]), bn256.Order)
exponents[i1] = new(big.Int).Mod(new(big.Int).Mul(exponents[i], temp), bn256.Order)
bits[i1] = true
}
}
}
var zeroes [64]byte
gtemp := new(bn256.G1) // obtain zero element, this should be static and
htemp := new(bn256.G1) // obtain zero element, this should be static and
gtemp.Unmarshal(zeroes[:])
htemp.Unmarshal(zeroes[:])
for i := uint(0); i < n; i++ {
gtemp = new(bn256.G1).Add(gtemp, new(bn256.G1).ScalarMult(gp.Gs.vector[i], exponents[i]))
htemp = new(bn256.G1).Add(htemp, new(bn256.G1).ScalarMult(hs[i], exponents[n-1-i]))
}
gtemp = new(bn256.G1).ScalarMult(gtemp, ip.a)
htemp = new(bn256.G1).ScalarMult(htemp, ip.b)
utemp := new(bn256.G1).ScalarMult(u, new(big.Int).Mod(new(big.Int).Mul(ip.a, ip.b), bn256.Order))
P_calculated := new(bn256.G1).Add(gtemp, htemp)
P_calculated = new(bn256.G1).Add(P_calculated, utemp)
// fmt.Printf("P %s\n",P.String())
// fmt.Printf("P_calculated %s\n",P_calculated.String())
if P_calculated.String() != P.String() { // need something better here
panic("Faulty or invalid proof")
return false
}
return true
}

View File

@ -0,0 +1,541 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
//import "fmt"
import "math"
import "math/big"
//import "crypto/rand"
import "encoding/hex"
import "github.com/deroproject/derohe/cryptography/bn256"
//import "golang.org/x/crypto/sha3"
//import "github.com/kubernetes/klog"
// below 2 structures form bulletproofs and many to many proofs
type AnonSupport struct {
v *big.Int
w *big.Int
vPow *big.Int
wPow *big.Int
f [][2]*big.Int
r [][2]*big.Int
temp *bn256.G1
CLnR *bn256.G1
CRnR *bn256.G1
CR [][2]*bn256.G1
yR [][2]*bn256.G1
C_XR *bn256.G1
y_XR *bn256.G1
gR *bn256.G1
DR *bn256.G1
}
type ProtocolSupport struct {
y *big.Int
ys []*big.Int
z *big.Int
zs []*big.Int // [z^2, z^3] // only max 2
twoTimesZSquared [128]*big.Int
zSum *big.Int
x *big.Int
t *big.Int
k *big.Int
tEval *bn256.G1
}
// sigma protocol
type SigmaSupport struct {
c *big.Int
A_y, A_D, A_b, A_X, A_t, A_u *bn256.G1
}
// support structures are those which
type InnerProductSupport struct {
P bn256.G1
u_x bn256.G1
hPrimes []*bn256.G1
hPrimeSum bn256.G1
o *big.Int
}
func unmarshalpoint(input string) *bn256.G1 {
d, err := hex.DecodeString(input)
if err != nil {
panic(err)
}
if len(d) != 64 {
panic("wrong length")
}
x := new(bn256.G1)
x.Unmarshal(d)
return x
}
var gparams = NewGeneratorParams(128) // these can be pregenerated similarly as in DERO project
// verify proof
// first generate supporting structures
func (proof *Proof) Verify(s *Statement, txid Hash, extra_value uint64) bool {
var anonsupport AnonSupport
var protsupport ProtocolSupport
var sigmasupport SigmaSupport
if len(s.C) != len(s.Publickeylist) {
return false
}
total_open_value := s.Fees + extra_value
if total_open_value < s.Fees || total_open_value < extra_value { // stop over flowing attacks
return false
}
statementhash := reducedhash(txid[:])
var input []byte
input = append(input, ConvertBigIntToByte(statementhash)...)
input = append(input, proof.BA.Marshal()...)
input = append(input, proof.BS.Marshal()...)
input = append(input, proof.A.Marshal()...)
input = append(input, proof.B.Marshal()...)
anonsupport.v = reducedhash(input)
anonsupport.w = proof.hashmash1(anonsupport.v)
m := proof.f.Length() / 2
N := int(math.Pow(2, float64(m)))
anonsupport.f = make([][2]*big.Int, 2*m, 2*m)
for k := 0; k < 2*m; k++ {
anonsupport.f[k][1] = new(big.Int).Set(proof.f.vector[k])
anonsupport.f[k][0] = new(big.Int).Mod(new(big.Int).Sub(anonsupport.w, proof.f.vector[k]), bn256.Order)
}
anonsupport.temp = new(bn256.G1)
var zeroes [64]byte
anonsupport.temp.Unmarshal(zeroes[:])
for k := 0; k < 2*m; k++ {
anonsupport.temp = new(bn256.G1).Add(anonsupport.temp, new(bn256.G1).ScalarMult(gparams.Gs.vector[k], anonsupport.f[k][1]))
t := new(big.Int).Mod(new(big.Int).Mul(anonsupport.f[k][1], anonsupport.f[k][0]), bn256.Order)
anonsupport.temp = new(bn256.G1).Add(anonsupport.temp, new(bn256.G1).ScalarMult(gparams.Hs.vector[k], t))
}
t0 := new(bn256.G1).ScalarMult(gparams.Hs.vector[0+2*m], new(big.Int).Mod(new(big.Int).Mul(anonsupport.f[0][1], anonsupport.f[m][1]), bn256.Order))
t1 := new(bn256.G1).ScalarMult(gparams.Hs.vector[1+2*m], new(big.Int).Mod(new(big.Int).Mul(anonsupport.f[0][0], anonsupport.f[m][0]), bn256.Order))
anonsupport.temp = new(bn256.G1).Add(anonsupport.temp, t0)
anonsupport.temp = new(bn256.G1).Add(anonsupport.temp, t1)
// check whether we successfuly recover B^w * A
stored := new(bn256.G1).Add(new(bn256.G1).ScalarMult(proof.B, anonsupport.w), proof.A)
computed := new(bn256.G1).Add(anonsupport.temp, new(bn256.G1).ScalarMult(gparams.H, proof.z_A))
// for i := range proof.f.vector {
// klog.V(2).Infof("proof.f %d %s\n", i, proof.f.vector[i].Text(16))
// }
// klog.V(2).Infof("anonsupport.w %s\n", anonsupport.w.Text(16))
// klog.V(2).Infof("proof.z_A %s\n", proof.z_A.Text(16))
// klog.V(2).Infof("proof.B %s\n", proof.B.String())
// klog.V(2).Infof("proof.A %s\n", proof.A.String())
// klog.V(2).Infof("gparams.H %s\n", gparams.H.String())
// klog.V(2).Infof("stored %s\n", stored.String())
// klog.V(2).Infof("computed %s\n", computed.String())
if stored.String() != computed.String() { // if failed bail out
// klog.Warning("Recover key failed B^w * A")
return false
}
anonsupport.r = assemblepolynomials(anonsupport.f)
// for i := 0; i < len(anonsupport.r); i++ {
// klog.V(2).Infof("proof.r %d %s\n", i, anonsupport.r[i][0].Text(16))
// }
// for i := 0; i < len(anonsupport.r); i++ {
// klog.V(2).Infof("proof.q %d %s\n", i, anonsupport.r[i][1].Text(16))
// }
anonsupport.CLnR = new(bn256.G1)
anonsupport.CRnR = new(bn256.G1)
anonsupport.CLnR.Unmarshal(zeroes[:])
anonsupport.CRnR.Unmarshal(zeroes[:])
for i := 0; i < N; i++ {
anonsupport.CLnR = new(bn256.G1).Add(anonsupport.CLnR, new(bn256.G1).ScalarMult(s.CLn[i], anonsupport.r[i][0]))
anonsupport.CRnR = new(bn256.G1).Add(anonsupport.CRnR, new(bn256.G1).ScalarMult(s.CRn[i], anonsupport.r[i][0]))
}
// klog.V(2).Infof("qCrnR %s\n", anonsupport.CRnR.String())
var p, q []*big.Int
for i := 0; i < len(anonsupport.r); i++ {
p = append(p, anonsupport.r[i][0])
q = append(q, anonsupport.r[i][1])
}
// for i := range s.C {
// klog.V(2).Infof("S.c %d %s \n", i, s.C[i].String())
// }
// share code with proof generator for better testing
C_p := Convolution(NewFieldVector(p), NewPointVector(s.C))
C_q := Convolution(NewFieldVector(q), NewPointVector(s.C))
y_p := Convolution(NewFieldVector(p), NewPointVector(s.Publickeylist))
y_q := Convolution(NewFieldVector(q), NewPointVector(s.Publickeylist))
// for i := range s.C {
// klog.V(2).Infof("S.c %d %s \n", i, s.C[i].String())
// }
// for i := range y_p.vector {
// klog.V(2).Infof("y_p %d %s \n", i, y_p.vector[i].String())
// }
// for i := range y_q.vector {
// klog.V(2).Infof("y_q %d %s \n", i, y_q.vector[i].String())
// }
for i := range C_p.vector { // assemble back
anonsupport.CR = append(anonsupport.CR, [2]*bn256.G1{C_p.vector[i], C_q.vector[i]})
anonsupport.yR = append(anonsupport.yR, [2]*bn256.G1{y_p.vector[i], y_q.vector[i]})
}
anonsupport.vPow = new(big.Int).SetUint64(1)
anonsupport.C_XR = new(bn256.G1)
anonsupport.y_XR = new(bn256.G1)
anonsupport.C_XR.Unmarshal(zeroes[:])
anonsupport.y_XR.Unmarshal(zeroes[:])
for i := 0; i < N; i++ {
anonsupport.C_XR.Add(new(bn256.G1).Set(anonsupport.C_XR), new(bn256.G1).ScalarMult(anonsupport.CR[i/2][i%2], anonsupport.vPow))
anonsupport.y_XR.Add(new(bn256.G1).Set(anonsupport.y_XR), new(bn256.G1).ScalarMult(anonsupport.yR[i/2][i%2], anonsupport.vPow))
if i > 0 {
anonsupport.vPow = new(big.Int).Mod(new(big.Int).Mul(anonsupport.vPow, anonsupport.v), bn256.Order)
// klog.V(2).Infof("vPow %s\n", anonsupport.vPow.Text(16))
}
}
// klog.V(2).Infof("vPow %s\n", anonsupport.vPow.Text(16))
// klog.V(2).Infof("v %s\n", anonsupport.v.Text(16))
anonsupport.wPow = new(big.Int).SetUint64(1)
anonsupport.gR = new(bn256.G1)
anonsupport.gR.Unmarshal(zeroes[:])
anonsupport.DR = new(bn256.G1)
anonsupport.DR.Unmarshal(zeroes[:])
for i := 0; i < m; i++ {
wPow_neg := new(big.Int).Mod(new(big.Int).Neg(anonsupport.wPow), bn256.Order)
anonsupport.CLnR.Add(new(bn256.G1).Set(anonsupport.CLnR), new(bn256.G1).ScalarMult(proof.CLnG[i], wPow_neg))
anonsupport.CRnR.Add(new(bn256.G1).Set(anonsupport.CRnR), new(bn256.G1).ScalarMult(proof.CRnG[i], wPow_neg))
anonsupport.CR[0][0].Add(new(bn256.G1).Set(anonsupport.CR[0][0]), new(bn256.G1).ScalarMult(proof.C_0G[i], wPow_neg))
anonsupport.DR.Add(new(bn256.G1).Set(anonsupport.DR), new(bn256.G1).ScalarMult(proof.DG[i], wPow_neg))
anonsupport.yR[0][0].Add(new(bn256.G1).Set(anonsupport.yR[0][0]), new(bn256.G1).ScalarMult(proof.y_0G[i], wPow_neg))
anonsupport.gR.Add(new(bn256.G1).Set(anonsupport.gR), new(bn256.G1).ScalarMult(proof.gG[i], wPow_neg))
anonsupport.C_XR.Add(new(bn256.G1).Set(anonsupport.C_XR), new(bn256.G1).ScalarMult(proof.C_XG[i], wPow_neg))
anonsupport.y_XR.Add(new(bn256.G1).Set(anonsupport.y_XR), new(bn256.G1).ScalarMult(proof.y_XG[i], wPow_neg))
anonsupport.wPow = new(big.Int).Mod(new(big.Int).Mul(anonsupport.wPow, anonsupport.w), bn256.Order)
}
// klog.V(2).Infof("qCrnR %s\n", anonsupport.CRnR.String())
anonsupport.DR.Add(new(bn256.G1).Set(anonsupport.DR), new(bn256.G1).ScalarMult(s.D, anonsupport.wPow))
anonsupport.gR.Add(new(bn256.G1).Set(anonsupport.gR), new(bn256.G1).ScalarMult(gparams.G, anonsupport.wPow))
anonsupport.C_XR.Add(new(bn256.G1).Set(anonsupport.C_XR), new(bn256.G1).ScalarMult(gparams.G, new(big.Int).Mod(new(big.Int).Mul(new(big.Int).SetUint64(total_open_value), anonsupport.wPow), bn256.Order)))
//anonAuxiliaries.C_XR = anonAuxiliaries.C_XR.add(Utils.g().mul(Utils.fee().mul(anonAuxiliaries.wPow))); // this line is new
// at this point, these parameters are comparable with proof generator
// klog.V(2).Infof("CLnR %s\n", anonsupport.CLnR.String())
// klog.V(2).Infof("qCrnR %s\n", anonsupport.CRnR.String())
// klog.V(2).Infof("DR %s\n", anonsupport.DR.String())
// klog.V(2).Infof("gR %s\n", anonsupport.gR.String())
// klog.V(2).Infof("C_XR %s\n", anonsupport.C_XR.String())
// klog.V(2).Infof("y_XR %s\n", anonsupport.y_XR.String())
protsupport.y = reducedhash(ConvertBigIntToByte(anonsupport.w))
protsupport.ys = append(protsupport.ys, new(big.Int).SetUint64(1))
protsupport.k = new(big.Int).SetUint64(1)
for i := 1; i < 128; i++ {
protsupport.ys = append(protsupport.ys, new(big.Int).Mod(new(big.Int).Mul(protsupport.ys[i-1], protsupport.y), bn256.Order))
protsupport.k = new(big.Int).Mod(new(big.Int).Add(protsupport.k, protsupport.ys[i]), bn256.Order)
}
protsupport.z = reducedhash(ConvertBigIntToByte(protsupport.y))
protsupport.zs = []*big.Int{new(big.Int).Exp(protsupport.z, new(big.Int).SetUint64(2), bn256.Order), new(big.Int).Exp(protsupport.z, new(big.Int).SetUint64(3), bn256.Order)}
protsupport.zSum = new(big.Int).Mod(new(big.Int).Add(protsupport.zs[0], protsupport.zs[1]), bn256.Order)
protsupport.zSum = new(big.Int).Mod(new(big.Int).Mul(new(big.Int).Set(protsupport.zSum), protsupport.z), bn256.Order)
// klog.V(2).Infof("zsum %s\n ", protsupport.zSum.Text(16))
z_z0 := new(big.Int).Mod(new(big.Int).Sub(protsupport.z, protsupport.zs[0]), bn256.Order)
protsupport.k = new(big.Int).Mod(new(big.Int).Mul(protsupport.k, z_z0), bn256.Order)
proof_2_64, _ := new(big.Int).SetString("18446744073709551616", 10)
zsum_pow := new(big.Int).Mod(new(big.Int).Mul(protsupport.zSum, proof_2_64), bn256.Order)
zsum_pow = new(big.Int).Mod(new(big.Int).Sub(zsum_pow, protsupport.zSum), bn256.Order)
protsupport.k = new(big.Int).Mod(new(big.Int).Sub(protsupport.k, zsum_pow), bn256.Order)
protsupport.t = new(big.Int).Mod(new(big.Int).Sub(proof.that, protsupport.k), bn256.Order) // t = tHat - delta(y, z)
// klog.V(2).Infof("that %s\n ", proof.that.Text(16))
// klog.V(2).Infof("zk %s\n ", protsupport.k.Text(16))
for i := 0; i < 64; i++ {
protsupport.twoTimesZSquared[i] = new(big.Int).Mod(new(big.Int).Mul(protsupport.zs[0], new(big.Int).SetUint64(uint64(math.Pow(2, float64(i))))), bn256.Order)
protsupport.twoTimesZSquared[64+i] = new(big.Int).Mod(new(big.Int).Mul(protsupport.zs[1], new(big.Int).SetUint64(uint64(math.Pow(2, float64(i))))), bn256.Order)
}
// for i := 0; i < 128; i++ {
// klog.V(2).Infof("zsq %d %s", i, protsupport.twoTimesZSquared[i].Text(16))
// }
x := new(big.Int)
{
var input []byte
input = append(input, ConvertBigIntToByte(protsupport.z)...) // tie intermediates/commit
input = append(input, proof.T_1.Marshal()...)
input = append(input, proof.T_2.Marshal()...)
x = reducedhash(input)
}
xsq := new(big.Int).Mod(new(big.Int).Mul(x, x), bn256.Order)
protsupport.tEval = new(bn256.G1).ScalarMult(proof.T_1, x)
protsupport.tEval.Add(new(bn256.G1).Set(protsupport.tEval), new(bn256.G1).ScalarMult(proof.T_2, xsq))
// klog.V(2).Infof("protsupport.tEval %s\n", protsupport.tEval.String())
proof_c_neg := new(big.Int).Mod(new(big.Int).Neg(proof.c), bn256.Order)
sigmasupport.A_y = new(bn256.G1).Add(new(bn256.G1).ScalarMult(anonsupport.gR, proof.s_sk), new(bn256.G1).ScalarMult(anonsupport.yR[0][0], proof_c_neg))
sigmasupport.A_D = new(bn256.G1).Add(new(bn256.G1).ScalarMult(gparams.G, proof.s_r), new(bn256.G1).ScalarMult(s.D, proof_c_neg))
zs0_neg := new(big.Int).Mod(new(big.Int).Neg(protsupport.zs[0]), bn256.Order)
left := new(bn256.G1).ScalarMult(anonsupport.DR, zs0_neg)
left.Add(new(bn256.G1).Set(left), new(bn256.G1).ScalarMult(anonsupport.CRnR, protsupport.zs[1]))
left = new(bn256.G1).ScalarMult(new(bn256.G1).Set(left), proof.s_sk)
// TODO mid seems wrong
amount_fees := new(big.Int).SetUint64(total_open_value)
mid := new(bn256.G1).ScalarMult(G, new(big.Int).Mod(new(big.Int).Mul(amount_fees, anonsupport.wPow), bn256.Order))
mid.Add(new(bn256.G1).Set(mid), new(bn256.G1).Set(anonsupport.CR[0][0]))
right := new(bn256.G1).ScalarMult(mid, zs0_neg)
right.Add(new(bn256.G1).Set(right), new(bn256.G1).ScalarMult(anonsupport.CLnR, protsupport.zs[1]))
right = new(bn256.G1).ScalarMult(new(bn256.G1).Set(right), proof_c_neg)
sigmasupport.A_b = new(bn256.G1).ScalarMult(gparams.G, proof.s_b)
temp := new(bn256.G1).Add(left, right)
sigmasupport.A_b.Add(new(bn256.G1).Set(sigmasupport.A_b), temp)
//- sigmaAuxiliaries.A_b = Utils.g().mul(proof.s_b).add(anonAuxiliaries.DR.mul(zetherAuxiliaries.zs[0].neg()).add(anonAuxiliaries.CRnR.mul(zetherAuxiliaries.zs[1])).mul(proof.s_sk).add(anonAuxiliaries.CR[0][0] .mul(zetherAuxiliaries.zs[0].neg()).add(anonAuxiliaries.CLnR.mul(zetherAuxiliaries.zs[1])).mul(proof.c.neg())));
//+ sigmaAuxiliaries.A_b = Utils.g().mul(proof.s_b).add(anonAuxiliaries.DR.mul(zetherAuxiliaries.zs[0].neg()).add(anonAuxiliaries.CRnR.mul(zetherAuxiliaries.zs[1])).mul(proof.s_sk).add(anonAuxiliaries.CR[0][0].add(Utils.g().mul(Utils.fee().mul(anonAuxiliaries.wPow))).mul(zetherAuxiliaries.zs[0].neg()).add(anonAuxiliaries.CLnR.mul(zetherAuxiliaries.zs[1])).mul(proof.c.neg())));
//var fees bn256.G1
//fees.ScalarMult(G, new(big.Int).SetInt64(int64( -1 )))
//anonsupport.C_XR.Add( new(bn256.G1).Set(anonsupport.C_XR), &fees)
sigmasupport.A_X = new(bn256.G1).Add(new(bn256.G1).ScalarMult(anonsupport.y_XR, proof.s_r), new(bn256.G1).ScalarMult(anonsupport.C_XR, proof_c_neg))
proof_s_b_neg := new(big.Int).Mod(new(big.Int).Neg(proof.s_b), bn256.Order)
sigmasupport.A_t = new(bn256.G1).ScalarMult(gparams.G, protsupport.t)
sigmasupport.A_t.Add(new(bn256.G1).Set(sigmasupport.A_t), new(bn256.G1).Neg(protsupport.tEval))
sigmasupport.A_t = new(bn256.G1).ScalarMult(sigmasupport.A_t, new(big.Int).Mod(new(big.Int).Mul(proof.c, anonsupport.wPow), bn256.Order))
sigmasupport.A_t.Add(new(bn256.G1).Set(sigmasupport.A_t), new(bn256.G1).ScalarMult(gparams.H, proof.s_tau))
sigmasupport.A_t.Add(new(bn256.G1).Set(sigmasupport.A_t), new(bn256.G1).ScalarMult(gparams.G, proof_s_b_neg))
// klog.V(2).Infof("t %s\n ", protsupport.t.Text(16))
// klog.V(2).Infof("protsupport.tEval %s\n", protsupport.tEval.String())
{
var input []byte
input = append(input, []byte(PROTOCOL_CONSTANT)...)
input = append(input, s.Roothash[:]...)
point := HashToPoint(HashtoNumber(input))
sigmasupport.A_u = new(bn256.G1).ScalarMult(point, proof.s_sk)
sigmasupport.A_u.Add(new(bn256.G1).Set(sigmasupport.A_u), new(bn256.G1).ScalarMult(proof.u, proof_c_neg))
}
// klog.V(2).Infof("A_y %s\n", sigmasupport.A_y.String())
// klog.V(2).Infof("A_D %s\n", sigmasupport.A_D.String())
// klog.V(2).Infof("A_b %s\n", sigmasupport.A_b.String())
// klog.V(2).Infof("A_X %s\n", sigmasupport.A_X.String())
// klog.V(2).Infof("A_t %s\n", sigmasupport.A_t.String())
// klog.V(2).Infof("A_u %s\n", sigmasupport.A_u.String())
{
var input []byte
input = append(input, ConvertBigIntToByte(x)...)
input = append(input, sigmasupport.A_y.Marshal()...)
input = append(input, sigmasupport.A_D.Marshal()...)
input = append(input, sigmasupport.A_b.Marshal()...)
input = append(input, sigmasupport.A_X.Marshal()...)
input = append(input, sigmasupport.A_t.Marshal()...)
input = append(input, sigmasupport.A_u.Marshal()...)
// fmt.Printf("C calculation expected %s actual %s\n",proof.c.Text(16), reducedhash(input).Text(16) )
if reducedhash(input).Text(16) != proof.c.Text(16) { // we must fail here
// klog.Warning("C calculation failed")
return false
}
}
o := reducedhash(ConvertBigIntToByte(proof.c))
u_x := new(bn256.G1).ScalarMult(gparams.H, o)
var hPrimes []*bn256.G1
hPrimeSum := new(bn256.G1)
hPrimeSum.Unmarshal(zeroes[:])
for i := 0; i < 128; i++ {
hPrimes = append(hPrimes, new(bn256.G1).ScalarMult(gparams.Hs.vector[i], new(big.Int).ModInverse(protsupport.ys[i], bn256.Order)))
// klog.V(2).Infof("hPrimes %d %s\n", i, hPrimes[i].String())
tmp := new(big.Int).Mod(new(big.Int).Mul(protsupport.ys[i], protsupport.z), bn256.Order)
tmp = new(big.Int).Mod(new(big.Int).Add(tmp, protsupport.twoTimesZSquared[i]), bn256.Order)
hPrimeSum = new(bn256.G1).Add(hPrimeSum, new(bn256.G1).ScalarMult(hPrimes[i], tmp))
}
P := new(bn256.G1).Add(proof.BA, new(bn256.G1).ScalarMult(proof.BS, x))
P = new(bn256.G1).Add(P, new(bn256.G1).ScalarMult(gparams.GSUM, new(big.Int).Mod(new(big.Int).Neg(protsupport.z), bn256.Order)))
P = new(bn256.G1).Add(P, hPrimeSum)
P = new(bn256.G1).Add(P, new(bn256.G1).ScalarMult(gparams.H, new(big.Int).Mod(new(big.Int).Neg(proof.mu), bn256.Order)))
P = new(bn256.G1).Add(P, new(bn256.G1).ScalarMult(u_x, new(big.Int).Mod(new(big.Int).Set(proof.that), bn256.Order)))
// klog.V(2).Infof("P %s\n", P.String())
if !proof.ip.Verify(hPrimes, u_x, P, o, gparams) {
// klog.Warning("inner proof failed")
return false
}
// klog.V(2).Infof("proof %s\n", proof.String())
// panic("proof successful")
// klog.V(2).Infof("Proof successful verified\n")
return true
}
/*
func (proof *Proof) String() string {
klog.V(1).Infof("proof BA %s\n", proof.BA.String())
klog.V(1).Infof("proof BS %s\n", proof.BS.String())
klog.V(1).Infof("proof A %s\n", proof.A.String())
klog.V(1).Infof("proof B %s\n", proof.B.String())
for i := range proof.CLnG {
klog.V(1).Infof("CLnG %d %s \n", i, proof.CLnG[i].String())
}
for i := range proof.CRnG {
klog.V(1).Infof("CRnG %d %s \n", i, proof.CRnG[i].String())
}
for i := range proof.C_0G {
klog.V(1).Infof("C_0G %d %s \n", i, proof.C_0G[i].String())
}
for i := range proof.DG {
klog.V(1).Infof("DG %d %s \n", i, proof.DG[i].String())
}
for i := range proof.y_0G {
klog.V(1).Infof("y_0G %d %s \n", i, proof.y_0G[i].String())
}
for i := range proof.gG {
klog.V(1).Infof("gG %d %s \n", i, proof.gG[i].String())
}
for i := range proof.C_XG {
klog.V(1).Infof("C_XG %d %s \n", i, proof.C_XG[i].String())
}
for i := range proof.y_XG {
klog.V(1).Infof("y_XG %d %s \n", i, proof.y_XG[i].String())
}
//for i := range proof.tCommits.vector {
// klog.V(1).Infof("tCommits %d %s \n", i, proof.tCommits.vector[i].String())
//}
klog.V(1).Infof("proof z_A %s\n", proof.z_A.Text(16))
klog.V(1).Infof("proof that %s\n", proof.that.Text(16))
klog.V(1).Infof("proof mu %s\n", proof.mu.Text(16))
klog.V(1).Infof("proof C %s\n", proof.c.Text(16))
klog.V(1).Infof("proof s_sk %s\n", proof.s_sk.Text(16))
klog.V(1).Infof("proof s_r %s\n", proof.s_r.Text(16))
klog.V(1).Infof("proof s_b %s\n", proof.s_b.Text(16))
klog.V(1).Infof("proof s_tau %s\n", proof.s_tau.Text(16))
return ""
}
*/
func assemblepolynomials(f [][2]*big.Int) [][2]*big.Int {
m := len(f) / 2
N := int(math.Pow(2, float64(m)))
result := make([][2]*big.Int, N, N)
for i := 0; i < 2; i++ {
half := recursivepolynomials(i*m, (i+1)*m, new(big.Int).SetInt64(1), f)
for j := 0; j < N; j++ {
result[j][i] = half[j]
}
}
return result
}
func recursivepolynomials(baseline, current int, accum *big.Int, f [][2]*big.Int) []*big.Int {
size := int(math.Pow(2, float64(current-baseline)))
result := make([]*big.Int, size, size)
if current == baseline {
result[0] = accum
return result
}
current--
left := recursivepolynomials(baseline, current, new(big.Int).Mod(new(big.Int).Mul(accum, f[current][0]), bn256.Order), f)
right := recursivepolynomials(baseline, current, new(big.Int).Mod(new(big.Int).Mul(accum, f[current][1]), bn256.Order), f)
for i := 0; i < size/2; i++ {
result[i] = left[i]
result[i+size/2] = right[i]
}
return result
}

View File

@ -0,0 +1,225 @@
// Copyright 2017-2021 DERO Project. All rights reserved.
// Use of this source code in any form is governed by RESEARCH license.
// license can be found in the LICENSE file.
// GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8
//
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package crypto
import "bytes"
import "encoding/binary"
import "math/big"
import "github.com/deroproject/derohe/cryptography/bn256"
import "github.com/deroproject/graviton"
type Statement struct {
RingSize uint64
CLn []*bn256.G1
CRn []*bn256.G1
Bytes_per_publickey byte // number of bytes need per public key, it will be from 1 to 32 bytes long, but will rarely be more than 4 byte
Publickeylist_pointers []byte // all the public keys are hashed and there necessary bits taken from the start to reach graviton leaf
Publickeylist []*bn256.G1 // Todo these can be skipped and collected back later on from the chain, this will save ringsize * POINTSIZE bytes
Publickeylist_compressed [][33]byte // compressed format for public keys NOTE: only valid in deserialized transactions
C []*bn256.G1 // commitments
D *bn256.G1
Fees uint64
Roothash [32]byte // note roothash contains the merkle root hash of chain, when it was build
}
type Witness struct {
SecretKey *big.Int
R *big.Int
TransferAmount uint64 // total value being transferred
Balance uint64 // whatever is the the amount left after transfer
Index []int // index of sender in the public key list
}
func (s *Statement) Serialize(w *bytes.Buffer) {
buf := make([]byte, binary.MaxVarintLen64)
//n := binary.PutUvarint(buf, uint64(len(s.Publickeylist)))
//w.Write(buf[:n])
if len(s.Publickeylist_pointers) == 0 {
power := byte(GetPowerof2(len(s.Publickeylist))) // len(s.Publickeylist) is always power of 2
w.WriteByte(power)
w.WriteByte(s.Bytes_per_publickey)
n := binary.PutUvarint(buf, s.Fees)
w.Write(buf[:n])
w.Write(s.D.EncodeCompressed())
s.Publickeylist_pointers = s.Publickeylist_pointers[:0]
for i := 0; i < len(s.Publickeylist); i++ {
hashed_key := graviton.Sum(s.Publickeylist[i].EncodeCompressed())
w.Write(hashed_key[:s.Bytes_per_publickey])
s.Publickeylist_pointers = append(s.Publickeylist_pointers, hashed_key[:s.Bytes_per_publickey]...)
}
} else {
power := byte(GetPowerof2(len(s.Publickeylist_pointers) / int(s.Bytes_per_publickey))) // len(s.Publickeylist) is always power of 2
w.WriteByte(power)
w.WriteByte(s.Bytes_per_publickey)
n := binary.PutUvarint(buf, s.Fees)
w.Write(buf[:n])
w.Write(s.D.EncodeCompressed())
w.Write(s.Publickeylist_pointers[:])
}
for i := 0; i < len(s.Publickeylist_pointers)/int(s.Bytes_per_publickey); i++ {
// w.Write( s.CLn[i].EncodeCompressed()) /// this is expanded from graviton store
// w.Write( s.CRn[i].EncodeCompressed()) /// this is expanded from graviton store
//w.Write(s.Publickeylist[i].EncodeCompressed()) /// this is expanded from graviton store
w.Write(s.C[i].EncodeCompressed())
}
w.Write(s.Roothash[:])
}
func (s *Statement) Deserialize(r *bytes.Reader) error {
var err error
//var buf [32]byte
var bufp [33]byte
length, err := r.ReadByte()
if err != nil {
return err
}
s.RingSize = 1 << length
s.Bytes_per_publickey, err = r.ReadByte()
if err != nil {
return err
}
s.Fees, err = binary.ReadUvarint(r)
if err != nil {
return err
}
if n, err := r.Read(bufp[:]); n == 33 && err == nil {
var p bn256.G1
if err = p.DecodeCompressed(bufp[:]); err != nil {
return err
}
s.D = &p
} else {
return err
}
s.CLn = s.CLn[:0]
s.CRn = s.CRn[:0]
s.Publickeylist = s.Publickeylist[:0]
s.Publickeylist_compressed = s.Publickeylist_compressed[:0]
s.Publickeylist_pointers = s.Publickeylist_pointers[:0]
s.C = s.C[:0]
s.Publickeylist_pointers = make([]byte, s.RingSize*uint64(s.Bytes_per_publickey), s.RingSize*uint64(s.Bytes_per_publickey))
// read all compressed pointers in 1 go
if n, err := r.Read(s.Publickeylist_pointers); n == int(s.RingSize*uint64(s.Bytes_per_publickey)) && err == nil {
} else {
return err
}
for i := uint64(0); i < s.RingSize; i++ {
/*
if n,err := r.Read(bufp[:]); n == 33 && err == nil {
var p bn256.G1
if err = p.DecodeCompressed(bufp[:]); err != nil {
return err
}
s.CLn = append(s.CLn,&p)
}else{
return err
}
if n,err := r.Read(bufp[:]); n == 33 && err == nil {
var p bn256.G1
if err = p.DecodeCompressed(bufp[:]); err != nil {
return err
}
s.CRn = append(s.CRn,&p)
}else{
return err
}
*/
/*
if n, err := r.Read(bufp[:]); n == 33 && err == nil {
var p bn256.G1
var pcopy [33]byte
copy(pcopy[:], bufp[:])
if err = p.DecodeCompressed(bufp[:]); err != nil {
return err
}
s.Publickeylist_compressed = append(s.Publickeylist_compressed, pcopy)
s.Publickeylist = append(s.Publickeylist, &p)
} else {
return err
}
*/
if n, err := r.Read(bufp[:]); n == 33 && err == nil {
var p bn256.G1
if err = p.DecodeCompressed(bufp[:]); err != nil {
return err
}
s.C = append(s.C, &p)
} else {
return err
}
}
if n, err := r.Read(s.Roothash[:]); n == 32 && err == nil {
} else {
return err
}
return nil
}
/*
type Proof struct {
BA *bn256.G1
BS *bn256.G1
A *bn256.G1
B *bn256.G1
CLnG, CRnG, C_0G, DG, y_0G, gG, C_XG, y_XG []*bn256.G1
u *bn256.G1
f *FieldVector
z_A *big.Int
T_1 *bn256.G1
T_2 *bn256.G1
that *big.Int
mu *big.Int
c *big.Int
s_sk, s_r, s_b, s_tau *big.Int
//ip *InnerProduct
}
*/

Some files were not shown because too many files have changed in this diff Show More