diff --git a/core/src/blockchain/block_info.rs b/core/src/blockchain/block_info.rs
index 3d28982280..e0aa1b0012 100644
--- a/core/src/blockchain/block_info.rs
+++ b/core/src/blockchain/block_info.rs
@@ -1,4 +1,4 @@
-// Copyright 2018 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -14,7 +14,6 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-use super::route::TreeRoute;
use crate::views::{BlockView, HeaderView};
use ctypes::BlockHash;
use primitives::Bytes;
@@ -28,13 +27,6 @@ pub enum BestBlockChanged {
},
/// Nothing changed.
None,
- /// It's part of the fork which should become canon chain,
- /// because its total score is higher than current
- /// canon chain score.
- BranchBecomingCanonChain {
- best_block: Bytes,
- tree_route: TreeRoute,
- },
}
impl BestBlockChanged {
@@ -47,10 +39,6 @@ impl BestBlockChanged {
BestBlockChanged::CanonChainAppended {
best_block,
} => best_block,
- BestBlockChanged::BranchBecomingCanonChain {
- best_block,
- ..
- } => best_block,
BestBlockChanged::None => return None,
};
@@ -67,13 +55,6 @@ pub enum BestHeaderChanged {
},
/// Nothing changed.
None,
- /// It's part of the fork which should become canon chain,
- /// because its total score is higher than current
- /// canon chain score.
- BranchBecomingCanonChain {
- best_header: Vec,
- tree_route: TreeRoute,
- },
}
impl BestHeaderChanged {
@@ -86,10 +67,6 @@ impl BestHeaderChanged {
BestHeaderChanged::CanonChainAppended {
best_header,
} => best_header,
- BestHeaderChanged::BranchBecomingCanonChain {
- best_header,
- ..
- } => best_header,
BestHeaderChanged::None => return None,
};
diff --git a/core/src/blockchain/blockchain.rs b/core/src/blockchain/blockchain.rs
index a206e45f7c..c814d2de45 100644
--- a/core/src/blockchain/blockchain.rs
+++ b/core/src/blockchain/blockchain.rs
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -256,10 +256,15 @@ impl BlockChain {
0 => BestBlockChanged::CanonChainAppended {
best_block: new_best_block,
},
- _ => BestBlockChanged::BranchBecomingCanonChain {
- tree_route: route,
- best_block: new_best_block,
- },
+ _ => {
+ cerror!(
+ BLOCKCHAIN,
+ "Older/Forked block header #{}({}) is inserted as a new block",
+ new_header.number(),
+ new_header.hash()
+ );
+ BestBlockChanged::None
+ }
}
} else {
BestBlockChanged::None
diff --git a/core/src/blockchain/body_db.rs b/core/src/blockchain/body_db.rs
index d2c5cd27a1..76742d39da 100644
--- a/core/src/blockchain/body_db.rs
+++ b/core/src/blockchain/body_db.rs
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -36,10 +36,10 @@ pub struct BodyDB {
// block cache
body_cache: Mutex>,
address_by_hash_cache: RwLock>,
- pending_addresses_by_hash: RwLock>>,
+ pending_addresses_by_hash: RwLock>,
addresses_by_tracker_cache: Mutex>,
- pending_addresses_by_tracker: Mutex>>,
+ pending_addresses_by_tracker: Mutex>,
db: Arc,
}
@@ -90,13 +90,13 @@ impl BodyDB {
pub fn update_best_block(&self, batch: &mut DBTransaction, best_block_changed: &BestBlockChanged) {
let mut pending_addresses_by_hash = self.pending_addresses_by_hash.write();
let mut pending_addresses_by_tracker = self.pending_addresses_by_tracker.lock();
- batch.extend_with_option_cache(
+ batch.extend_with_cache(
db::COL_EXTRA,
&mut *pending_addresses_by_hash,
self.new_transaction_address_entries(best_block_changed),
CacheUpdatePolicy::Overwrite,
);
- batch.extend_with_option_cache(
+ batch.extend_with_cache(
db::COL_EXTRA,
&mut *pending_addresses_by_tracker,
self.new_transaction_addresses_entries(best_block_changed),
@@ -113,38 +113,19 @@ impl BodyDB {
let mut pending_addresses_by_tracker = self.pending_addresses_by_tracker.lock();
let new_txs_by_hash = mem::replace(&mut *pending_addresses_by_hash, HashMap::new());
- let (retracted_txs, enacted_txs) =
- new_txs_by_hash.into_iter().partition::, _>(|&(_, ref value)| value.is_none());
- address_by_hash_cache
- .extend(enacted_txs.into_iter().map(|(k, v)| (k, v.expect("Transactions were partitioned; qed"))));
-
- for hash in retracted_txs.keys() {
- address_by_hash_cache.remove(hash);
- }
+ address_by_hash_cache.extend(new_txs_by_hash.into_iter());
let new_txs_by_tracker = mem::replace(&mut *pending_addresses_by_tracker, HashMap::new());
- let (removed_transactions, added_transactions) =
- new_txs_by_tracker.into_iter().partition::, _>(|&(_, ref value)| value.is_none());
-
- addresses_by_tracker_cache
- .extend(added_transactions.into_iter().map(|(k, v)| (k, v.expect("Transactions were partitioned; qed"))));
- for hash in removed_transactions.keys() {
- addresses_by_tracker_cache.remove(hash);
- }
+ addresses_by_tracker_cache.extend(new_txs_by_tracker.into_iter());
}
/// This function returns modified transaction addresses.
fn new_transaction_address_entries(
&self,
best_block_changed: &BestBlockChanged,
- ) -> HashMap> {
- let block_hash = if let Some(best_block_hash) = best_block_changed.new_best_hash() {
- best_block_hash
- } else {
- return HashMap::new()
- };
+ ) -> HashMap {
let block = match best_block_changed.best_block() {
Some(block) => block,
None => return HashMap::new(),
@@ -155,27 +136,6 @@ impl BodyDB {
BestBlockChanged::CanonChainAppended {
..
} => tx_hash_and_address_entries(best_block_changed.new_best_hash().unwrap(), tx_hashes).collect(),
- BestBlockChanged::BranchBecomingCanonChain {
- tree_route,
- ..
- } => {
- let enacted = tree_route.enacted.iter().flat_map(|hash| {
- let body = self.block_body(hash).expect("Enacted block must be in database.");
- let enacted_tx_hashes = body.transaction_hashes();
- tx_hash_and_address_entries(*hash, enacted_tx_hashes)
- });
-
- let current_addresses = { tx_hash_and_address_entries(block_hash, tx_hashes) };
-
- let retracted = tree_route.retracted.iter().flat_map(|hash| {
- let body = self.block_body(&hash).expect("Retracted block must be in database.");
- let retracted_tx_hashes = body.transaction_hashes().into_iter();
- retracted_tx_hashes.map(|hash| (hash, None))
- });
-
- // The order here is important! Don't remove transactions if it was part of enacted blocks as well.
- retracted.chain(enacted).chain(current_addresses).collect()
- }
BestBlockChanged::None => HashMap::new(),
}
}
@@ -183,7 +143,7 @@ impl BodyDB {
fn new_transaction_addresses_entries(
&self,
best_block_changed: &BestBlockChanged,
- ) -> HashMap> {
+ ) -> HashMap {
let block_hash = if let Some(best_block_hash) = best_block_changed.new_best_hash() {
best_block_hash
} else {
@@ -194,61 +154,25 @@ impl BodyDB {
None => return HashMap::new(),
};
- let (removed, added): (
- Box>,
- Box>,
- ) = match best_block_changed {
+ let added: Box> = match best_block_changed {
BestBlockChanged::CanonChainAppended {
..
- } => (
- Box::new(::std::iter::empty()),
- Box::new(tracker_and_addresses_entries(block_hash, block.transactions())),
- ),
- BestBlockChanged::BranchBecomingCanonChain {
- ref tree_route,
- ..
- } => {
- let enacted = tree_route
- .enacted
- .iter()
- .flat_map(|hash| {
- let body = self.block_body(hash).expect("Enacted block must be in database.");
- tracker_and_addresses_entries(*hash, body.transactions())
- })
- .chain(tracker_and_addresses_entries(block_hash, block.transactions()));
-
- let retracted = tree_route.retracted.iter().flat_map(|hash| {
- let body = self.block_body(hash).expect("Retracted block must be in database.");
- tracker_and_addresses_entries(*hash, body.transactions())
- });
-
- (Box::new(retracted), Box::new(enacted))
- }
+ } => Box::new(tracker_and_addresses_entries(block_hash, block.transactions())),
BestBlockChanged::None => return Default::default(),
};
let mut added_addresses: HashMap = Default::default();
- let mut removed_addresses: HashMap = Default::default();
let mut trackers: HashSet = Default::default();
for (tracker, address) in added {
trackers.insert(tracker);
*added_addresses.entry(tracker).or_insert_with(Default::default) += address;
}
- for (tracker, address) in removed {
- trackers.insert(tracker);
- *removed_addresses.entry(tracker).or_insert_with(Default::default) += address;
- }
let mut inserted_address: HashMap = Default::default();
for tracker in trackers.into_iter() {
let address: TransactionAddresses = self.db.read(db::COL_EXTRA, &tracker).unwrap_or_default();
inserted_address.insert(tracker, address);
}
- for (tracker, removed_address) in removed_addresses.into_iter() {
- *inserted_address
- .get_mut(&tracker)
- .expect("inserted addresses are sum of added_addresses and removed_addresses") -= removed_address;
- }
for (tracker, added_address) in added_addresses.into_iter() {
*inserted_address
.get_mut(&tracker)
@@ -256,15 +180,6 @@ impl BodyDB {
}
inserted_address
- .into_iter()
- .map(|(hash, address)| {
- if address.is_empty() {
- (hash, None)
- } else {
- (hash, Some(address))
- }
- })
- .collect()
}
/// Create a block body from a block.
@@ -332,15 +247,12 @@ impl BodyProvider for BodyDB {
fn tx_hash_and_address_entries(
block_hash: BlockHash,
tx_hashes: impl IntoIterator- ,
-) -> impl Iterator
- )> {
+) -> impl Iterator
- {
tx_hashes.into_iter().enumerate().map(move |(index, tx_hash)| {
- (
- tx_hash,
- Some(TransactionAddress {
- block_hash,
- index,
- }),
- )
+ (tx_hash, TransactionAddress {
+ block_hash,
+ index,
+ })
})
}
diff --git a/core/src/blockchain/extras.rs b/core/src/blockchain/extras.rs
index b8b38cd05f..1ff9d3422b 100644
--- a/core/src/blockchain/extras.rs
+++ b/core/src/blockchain/extras.rs
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -128,10 +128,6 @@ impl TransactionAddresses {
addresses: vec![address],
}
}
-
- pub fn is_empty(&self) -> bool {
- self.addresses.is_empty()
- }
}
impl IntoIterator for TransactionAddresses {
diff --git a/core/src/blockchain/headerchain.rs b/core/src/blockchain/headerchain.rs
index f0a69ab826..aa813bbef5 100644
--- a/core/src/blockchain/headerchain.rs
+++ b/core/src/blockchain/headerchain.rs
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -243,20 +243,6 @@ impl HeaderChain {
let best_header_view = HeaderView::new(best_header);
hashes.insert(best_header_view.number(), best_header_view.hash());
}
- BestHeaderChanged::BranchBecomingCanonChain {
- tree_route,
- best_header,
- } => {
- let ancestor_number = self.block_number(&tree_route.ancestor).expect("Ancestor always exist in DB");
- let start_number = ancestor_number + 1;
-
- for (index, hash) in tree_route.enacted.iter().enumerate() {
- hashes.insert(start_number + index as BlockNumber, *hash);
- }
-
- let best_header_view = HeaderView::new(best_header);
- hashes.insert(best_header_view.number(), best_header_view.hash());
- }
}
hashes
@@ -321,10 +307,15 @@ impl HeaderChain {
0 => BestHeaderChanged::CanonChainAppended {
best_header: new_best_header,
},
- _ => BestHeaderChanged::BranchBecomingCanonChain {
- tree_route: route,
- best_header: new_best_header,
- },
+ _ => {
+ cerror!(
+ HEADERCHAIN,
+ "Older/Forked block header #{}({}) is inserted as a new block",
+ new_header.number(),
+ new_header.hash()
+ );
+ BestHeaderChanged::None
+ }
}
} else {
BestHeaderChanged::None
diff --git a/core/src/blockchain/route.rs b/core/src/blockchain/route.rs
index 1256c1508b..0c6bb4ab83 100644
--- a/core/src/blockchain/route.rs
+++ b/core/src/blockchain/route.rs
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -110,11 +110,9 @@ pub fn tree_route(db: &dyn HeaderProvider, from: BlockHash, to: BlockHash) -> Op
/// Import route for newly inserted block.
#[derive(Debug, PartialEq)]
pub struct ImportRoute {
- /// Blocks that were invalidated by new block.
- pub retracted: Vec,
/// Blocks that were validated by new block.
pub enacted: Vec,
- /// Blocks which are neither retracted nor enacted.
+ /// Blocks which are not enacted.
pub omitted: Vec,
}
@@ -132,29 +130,14 @@ impl ImportRoute {
let mut enacted = Vec::new();
enacted.push(best_block_changed.new_best_hash().unwrap());
ImportRoute {
- retracted: vec![],
enacted,
omitted,
}
}
BestBlockChanged::None => ImportRoute {
- retracted: vec![],
enacted: vec![],
omitted,
},
- BestBlockChanged::BranchBecomingCanonChain {
- tree_route,
- ..
- } => {
- let mut enacted = tree_route.enacted.clone();
- enacted.push(best_block_changed.new_best_hash().unwrap());
- let retracted = tree_route.retracted.clone();
- ImportRoute {
- retracted,
- enacted,
- omitted,
- }
- }
}
}
@@ -170,41 +153,25 @@ impl ImportRoute {
} => {
let enacted = vec![best_header_changed.new_best_hash().unwrap()];
ImportRoute {
- retracted: vec![],
enacted,
omitted,
}
}
BestHeaderChanged::None => ImportRoute {
- retracted: vec![],
enacted: vec![],
omitted,
},
- BestHeaderChanged::BranchBecomingCanonChain {
- tree_route,
- ..
- } => {
- let mut enacted = tree_route.enacted.clone();
- enacted.push(best_header_changed.new_best_hash().unwrap());
- let retracted = tree_route.retracted.clone();
- ImportRoute {
- retracted,
- enacted,
- omitted,
- }
- }
}
}
pub fn none() -> Self {
ImportRoute {
- retracted: vec![],
enacted: vec![],
omitted: vec![],
}
}
pub fn is_none(&self) -> bool {
- self.retracted.is_empty() && self.enacted.is_empty() && self.omitted.is_empty()
+ self.enacted.is_empty() && self.omitted.is_empty()
}
}
diff --git a/core/src/client/chain_notify.rs b/core/src/client/chain_notify.rs
index a4868a7571..70407c4be7 100644
--- a/core/src/client/chain_notify.rs
+++ b/core/src/client/chain_notify.rs
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -25,7 +25,6 @@ pub trait ChainNotify: Send + Sync {
_imported: Vec,
_invalid: Vec,
_enacted: Vec,
- _retracted: Vec,
_sealed: Vec,
_new_best_proposal: Option,
) {
@@ -38,7 +37,6 @@ pub trait ChainNotify: Send + Sync {
_imported: Vec,
_invalid: Vec,
_enacted: Vec,
- _retracted: Vec,
_sealed: Vec,
) {
// does nothing by default
diff --git a/core/src/client/client.rs b/core/src/client/client.rs
index 562354b198..ca3e66bef8 100644
--- a/core/src/client/client.rs
+++ b/core/src/client/client.rs
@@ -150,18 +150,9 @@ impl Client {
imported: &[BlockHash],
invalid: &[BlockHash],
enacted: &[BlockHash],
- retracted: &[BlockHash],
sealed: &[BlockHash],
) {
- self.notify(|notify| {
- notify.new_blocks(
- imported.to_vec(),
- invalid.to_vec(),
- enacted.to_vec(),
- retracted.to_vec(),
- sealed.to_vec(),
- )
- });
+ self.notify(|notify| notify.new_blocks(imported.to_vec(), invalid.to_vec(), enacted.to_vec(), sealed.to_vec()));
}
pub fn new_headers(
@@ -169,7 +160,6 @@ impl Client {
imported: &[BlockHash],
invalid: &[BlockHash],
enacted: &[BlockHash],
- retracted: &[BlockHash],
sealed: &[BlockHash],
new_best_proposal: Option,
) {
@@ -178,7 +168,6 @@ impl Client {
imported.to_vec(),
invalid.to_vec(),
enacted.to_vec(),
- retracted.to_vec(),
sealed.to_vec(),
new_best_proposal,
);
@@ -273,9 +262,9 @@ impl Client {
return
}
- let (enacted, retracted) = self.importer.calculate_enacted_retracted(&[route]);
- self.importer.miner.chain_new_blocks(self, &[], &[], &enacted, &retracted);
- self.new_blocks(&[], &[], &enacted, &retracted, &[]);
+ let enacted = self.importer.extract_enacted(vec![route]);
+ self.importer.miner.chain_new_blocks(self, &[], &[], &enacted);
+ self.new_blocks(&[], &[], &enacted, &[]);
}
fn block_number_ref(&self, id: &BlockId) -> Option {
@@ -664,9 +653,9 @@ impl ImportBlock for Client {
cinfo!(CLIENT, "Imported sealed block #{} ({})", number, h);
route
};
- let (enacted, retracted) = self.importer.calculate_enacted_retracted(&[route]);
- self.importer.miner.chain_new_blocks(self, &[h], &[], &enacted, &retracted);
- self.new_blocks(&[h], &[], &enacted, &retracted, &[h]);
+ let enacted = self.importer.extract_enacted(vec![route]);
+ self.importer.miner.chain_new_blocks(self, &[h], &[], &enacted);
+ self.new_blocks(&[h], &[], &enacted, &[h]);
self.db().flush().expect("DB flush failed.");
Ok(h)
}
diff --git a/core/src/client/importer.rs b/core/src/client/importer.rs
index 5c70c80518..5c99aa5044 100644
--- a/core/src/client/importer.rs
+++ b/core/src/client/importer.rs
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -33,7 +33,8 @@ use kvdb::DBTransaction;
use parking_lot::{Mutex, MutexGuard};
use rlp::Encodable;
use std::borrow::Borrow;
-use std::collections::{HashMap, HashSet};
+use std::collections::HashSet;
+use std::iter::FromIterator;
use std::sync::Arc;
pub struct Importer {
@@ -134,9 +135,9 @@ impl Importer {
if !is_empty {
ctrace!(CLIENT, "Call new_blocks even though block verification queue is not empty");
}
- let (enacted, retracted) = self.calculate_enacted_retracted(&import_results);
- self.miner.chain_new_blocks(client, &imported_blocks, &invalid_blocks, &enacted, &retracted);
- client.new_blocks(&imported_blocks, &invalid_blocks, &enacted, &retracted, &[]);
+ let enacted = self.extract_enacted(import_results);
+ self.miner.chain_new_blocks(client, &imported_blocks, &invalid_blocks, &enacted);
+ client.new_blocks(&imported_blocks, &invalid_blocks, &enacted, &[]);
}
}
@@ -144,30 +145,12 @@ impl Importer {
imported
}
- pub fn calculate_enacted_retracted(&self, import_results: &[ImportRoute]) -> (Vec, Vec) {
- fn map_to_vec(map: Vec<(BlockHash, bool)>) -> Vec {
- map.into_iter().map(|(k, _v)| k).collect()
- }
-
- // In ImportRoute we get all the blocks that have been enacted and retracted by single insert.
- // Because we are doing multiple inserts some of the blocks that were enacted in import `k`
- // could be retracted in import `k+1`. This is why to understand if after all inserts
- // the block is enacted or retracted we iterate over all routes and at the end final state
- // will be in the hashmap
- let map = import_results.iter().fold(HashMap::new(), |mut map, route| {
- for hash in &route.enacted {
- map.insert(*hash, true);
- }
- for hash in &route.retracted {
- map.insert(*hash, false);
- }
- map
+ pub fn extract_enacted(&self, import_results: Vec) -> Vec {
+ let set = import_results.into_iter().fold(HashSet::new(), |mut set, route| {
+ set.extend(route.enacted);
+ set
});
-
- // Split to enacted retracted (using hashmap value)
- let (enacted, retracted) = map.into_iter().partition(|&(_k, v)| v);
- // And convert tuples to keys
- (map_to_vec(enacted), map_to_vec(retracted))
+ Vec::from_iter(set)
}
// NOTE: the header of the block passed here is not necessarily sealed, as
@@ -336,7 +319,7 @@ impl Importer {
}
self.header_queue.mark_as_bad(&bad.drain().collect::>());
- let (enacted, retracted) = self.calculate_enacted_retracted(&routes);
+ let enacted = self.extract_enacted(routes);
let new_best_proposal_header_hash = client.block_chain().best_proposal_header().hash();
let best_proposal_header_changed = if prev_best_proposal_header_hash != new_best_proposal_header_hash {
@@ -349,7 +332,6 @@ impl Importer {
&imported,
&bad.iter().cloned().collect::>(),
&enacted,
- &retracted,
&[],
best_proposal_header_changed,
);
@@ -370,7 +352,7 @@ impl Importer {
client.db().write_buffered(batch);
chain.commit();
}
- client.new_headers(&[hash], &[], &[], &[], &[], None);
+ client.new_headers(&[hash], &[], &[], &[], None);
client.db().flush().expect("DB flush failed.");
}
@@ -388,8 +370,8 @@ impl Importer {
client.db().write_buffered(batch);
chain.commit();
}
- self.miner.chain_new_blocks(client, &[hash], &[], &[], &[]);
- client.new_blocks(&[hash], &[], &[], &[], &[]);
+ self.miner.chain_new_blocks(client, &[hash], &[], &[]);
+ client.new_blocks(&[hash], &[], &[], &[]);
client.db().flush().expect("DB flush failed.");
}
diff --git a/core/src/consensus/tendermint/chain_notify.rs b/core/src/consensus/tendermint/chain_notify.rs
index 35f16af566..6de498e2f4 100644
--- a/core/src/consensus/tendermint/chain_notify.rs
+++ b/core/src/consensus/tendermint/chain_notify.rs
@@ -38,7 +38,6 @@ impl ChainNotify for TendermintChainNotify {
imported: Vec,
_invalid: Vec,
enacted: Vec,
- _retracted: Vec,
_sealed: Vec,
) {
self.inner
diff --git a/core/src/db.rs b/core/src/db.rs
index b37869eb08..c43cbe49bd 100644
--- a/core/src/db.rs
+++ b/core/src/db.rs
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -149,39 +149,6 @@ pub trait Writable {
}
}
}
-
- /// Writes and removes the values into the database and updates the cache.
- fn extend_with_option_cache(
- &mut self,
- col: Option,
- cache: &mut dyn Cache>,
- values: HashMap>,
- policy: CacheUpdatePolicy,
- ) where
- K: Key + Hash + Eq,
- T: rlp::Encodable,
- R: Deref, {
- match policy {
- CacheUpdatePolicy::Overwrite => {
- for (key, value) in values {
- match value {
- Some(ref v) => self.write(col, &key, v),
- None => self.delete(col, &key),
- }
- cache.insert(key, value);
- }
- }
- CacheUpdatePolicy::Remove => {
- for (key, value) in values {
- match value {
- Some(v) => self.write(col, &key, &v),
- None => self.delete(col, &key),
- }
- cache.remove(&key);
- }
- }
- }
- }
}
/// Should be used to read values from database.
diff --git a/core/src/miner/mem_pool.rs b/core/src/miner/mem_pool.rs
index 75db23b599..50d34cfa63 100644
--- a/core/src/miner/mem_pool.rs
+++ b/core/src/miner/mem_pool.rs
@@ -158,7 +158,7 @@ impl MemPool {
.filter(|order| {
count += 1;
mem_usage += order.mem_usage;
- !order.origin.is_local_or_retracted() && (mem_usage > memory_limit || count > limit)
+ !order.origin.is_local() && (mem_usage > memory_limit || count > limit)
})
.cloned()
.collect()
@@ -985,12 +985,7 @@ pub mod test {
#[test]
fn origin_ordering() {
assert_eq!(TxOrigin::Local.cmp(&TxOrigin::External), Ordering::Less);
- assert_eq!(TxOrigin::RetractedBlock.cmp(&TxOrigin::Local), Ordering::Less);
- assert_eq!(TxOrigin::RetractedBlock.cmp(&TxOrigin::External), Ordering::Less);
-
assert_eq!(TxOrigin::External.cmp(&TxOrigin::Local), Ordering::Greater);
- assert_eq!(TxOrigin::Local.cmp(&TxOrigin::RetractedBlock), Ordering::Greater);
- assert_eq!(TxOrigin::External.cmp(&TxOrigin::RetractedBlock), Ordering::Greater);
}
#[test]
diff --git a/core/src/miner/mem_pool_types.rs b/core/src/miner/mem_pool_types.rs
index a7df671cba..f11e6d24aa 100644
--- a/core/src/miner/mem_pool_types.rs
+++ b/core/src/miner/mem_pool_types.rs
@@ -1,4 +1,4 @@
-// Copyright 2019 Kodebox, Inc.
+// Copyright 2019-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -33,21 +33,17 @@ pub enum TxOrigin {
Local,
/// External transaction received from network
External,
- /// Transaction from retracted blocks
- RetractedBlock,
}
type TxOriginType = u8;
const LOCAL: TxOriginType = 0x01;
const EXTERNAL: TxOriginType = 0x02;
-const RETRACTEDBLOCK: TxOriginType = 0x03;
impl Encodable for TxOrigin {
fn rlp_append(&self, s: &mut RlpStream) {
match self {
TxOrigin::Local => LOCAL.rlp_append(s),
TxOrigin::External => EXTERNAL.rlp_append(s),
- TxOrigin::RetractedBlock => RETRACTEDBLOCK.rlp_append(s),
};
}
}
@@ -57,7 +53,6 @@ impl Decodable for TxOrigin {
match d.as_val().expect("rlp decode Error") {
LOCAL => Ok(TxOrigin::Local),
EXTERNAL => Ok(TxOrigin::External),
- RETRACTEDBLOCK => Ok(TxOrigin::RetractedBlock),
_ => Err(DecoderError::Custom("Unexpected Txorigin type")),
}
}
@@ -76,8 +71,6 @@ impl Ord for TxOrigin {
}
match (*self, *other) {
- (TxOrigin::RetractedBlock, _) => Ordering::Less,
- (_, TxOrigin::RetractedBlock) => Ordering::Greater,
(TxOrigin::Local, _) => Ordering::Less,
_ => Ordering::Greater,
}
@@ -89,10 +82,6 @@ impl TxOrigin {
self == TxOrigin::Local
}
- pub fn is_local_or_retracted(self) -> bool {
- self == TxOrigin::Local || self == TxOrigin::RetractedBlock
- }
-
pub fn is_external(self) -> bool {
self == TxOrigin::External
}
@@ -329,7 +318,7 @@ impl CurrentQueue {
pub fn insert(&mut self, order: TransactionOrder) {
self.queue.insert(order);
- if !order.origin.is_local_or_retracted() {
+ if !order.origin.is_local() {
self.mem_usage += order.mem_usage;
self.count += 1;
}
@@ -338,7 +327,7 @@ impl CurrentQueue {
pub fn remove(&mut self, order: &TransactionOrder) {
assert!(self.queue.remove(order));
- if !order.origin.is_local_or_retracted() {
+ if !order.origin.is_local() {
self.mem_usage -= order.mem_usage;
self.count -= 1;
}
@@ -388,7 +377,7 @@ impl FutureQueue {
pub fn insert(&mut self, order: TransactionOrder) {
self.queue.insert(order);
- if !order.origin.is_local_or_retracted() {
+ if !order.origin.is_local() {
self.mem_usage += order.mem_usage;
self.count += 1;
}
@@ -396,7 +385,7 @@ impl FutureQueue {
pub fn remove(&mut self, order: &TransactionOrder) {
assert!(self.queue.remove(order));
- if !order.origin.is_local_or_retracted() {
+ if !order.origin.is_local() {
self.mem_usage -= order.mem_usage;
self.count -= 1;
}
diff --git a/core/src/miner/miner.rs b/core/src/miner/miner.rs
index f1f8c955a0..97afce1cc0 100644
--- a/core/src/miner/miner.rs
+++ b/core/src/miner/miner.rs
@@ -598,30 +598,11 @@ impl MinerService for Miner {
self.mem_pool.write().set_limit(limit)
}
- fn chain_new_blocks(
- &self,
- chain: &C,
- _imported: &[BlockHash],
- _invalid: &[BlockHash],
- _enacted: &[BlockHash],
- retracted: &[BlockHash],
- ) where
+ fn chain_new_blocks(&self, chain: &C, _imported: &[BlockHash], _invalid: &[BlockHash], _enacted: &[BlockHash])
+ where
C: AccountData + BlockChainTrait + BlockProducer + EngineInfo + ImportBlock, {
ctrace!(MINER, "chain_new_blocks");
- // Then import all transactions...
- {
- let mut mem_pool = self.mem_pool.write();
- for hash in retracted {
- let block = chain.block(&(*hash).into()).expect(
- "Client is sending message after commit to db and inserting to chain; the block is available; qed",
- );
- let transactions = block.transactions();
- let _ = self.add_transactions_to_pool(chain, transactions, TxOrigin::RetractedBlock, &mut mem_pool);
- }
- }
-
- // ...and at the end remove the old ones
{
let fetch_account = |p: &Public| {
let address = public_to_address(p);
diff --git a/core/src/miner/mod.rs b/core/src/miner/mod.rs
index 2b1d606ff1..7d459d537e 100644
--- a/core/src/miner/mod.rs
+++ b/core/src/miner/mod.rs
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -63,14 +63,8 @@ pub trait MinerService: Send + Sync {
fn set_transactions_limit(&self, limit: usize);
/// Called when blocks are imported to chain, updates transactions queue.
- fn chain_new_blocks(
- &self,
- chain: &C,
- imported: &[BlockHash],
- invalid: &[BlockHash],
- enacted: &[BlockHash],
- retracted: &[BlockHash],
- ) where
+ fn chain_new_blocks(&self, chain: &C, imported: &[BlockHash], invalid: &[BlockHash], enacted: &[BlockHash])
+ where
C: AccountData + BlockChainTrait + BlockProducer + EngineInfo + ImportBlock;
/// Get the type of consensus engine.
diff --git a/sync/src/block/extension.rs b/sync/src/block/extension.rs
index c007b56301..d0bbf6f82d 100644
--- a/sync/src/block/extension.rs
+++ b/sync/src/block/extension.rs
@@ -666,9 +666,8 @@ impl NetworkExtension for Extension {
Event::NewHeaders {
imported,
enacted,
- retracted,
} => {
- self.new_headers(imported, enacted, retracted);
+ self.new_headers(imported, enacted);
}
Event::NewBlocks {
imported,
@@ -685,7 +684,6 @@ pub enum Event {
NewHeaders {
imported: Vec,
enacted: Vec,
- retracted: Vec,
},
NewBlocks {
imported: Vec,
@@ -694,7 +692,7 @@ pub enum Event {
}
impl Extension {
- fn new_headers(&mut self, imported: Vec, enacted: Vec, retracted: Vec) {
+ fn new_headers(&mut self, imported: Vec, enacted: Vec) {
if let State::Full = self.state {
for peer in self.header_downloaders.values_mut() {
peer.mark_as_imported(imported.clone());
@@ -715,7 +713,6 @@ impl Extension {
for header in headers {
self.body_downloader.add_target(&header.decode());
}
- self.body_downloader.remove_target(&retracted);
}
}
@@ -1184,7 +1181,6 @@ impl ChainNotify for BlockSyncSender {
imported: Vec,
_invalid: Vec,
enacted: Vec,
- retracted: Vec,
_sealed: Vec,
_new_best_proposal: Option,
) {
@@ -1192,7 +1188,6 @@ impl ChainNotify for BlockSyncSender {
.send(Event::NewHeaders {
imported,
enacted,
- retracted,
})
.unwrap();
}
@@ -1202,7 +1197,6 @@ impl ChainNotify for BlockSyncSender {
imported: Vec,
invalid: Vec,
_enacted: Vec,
- _retracted: Vec,
_sealed: Vec,
) {
self.0
diff --git a/test/src/e2e.long/reward2.test.ts b/test/src/e2e.long/reward2.test.ts
index 182328ba37..6e8e11d46d 100644
--- a/test/src/e2e.long/reward2.test.ts
+++ b/test/src/e2e.long/reward2.test.ts
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -51,149 +51,6 @@ describe("reward2", function() {
).to.deep.equal(new U64(50));
}).timeout(30_000);
- it("alice creates one block and bob creates two blocks in parallel. And then, sync", async function() {
- await nodeA.sdk.rpc.devel.startSealing();
- expect(
- await nodeA.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(50));
-
- await nodeB.sdk.rpc.devel.startSealing();
- await nodeB.sdk.rpc.devel.startSealing();
- expect(await nodeB.sdk.rpc.chain.getBalance(bobAddress)).to.deep.equal(
- new U64(100)
- );
-
- await nodeA.connect(nodeB);
- await nodeA.waitBlockNumberSync(nodeB);
-
- expect(
- await nodeA.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(0));
- expect(await nodeA.sdk.rpc.chain.getBalance(bobAddress)).to.deep.equal(
- new U64(100)
- );
- }).timeout(30_000);
-
- it("A reorganization of block rewards and payments", async function() {
- // nodeA creates a block
- {
- await nodeA.sdk.rpc.devel.startSealing(); // +50 for alice
- expect(
- await nodeA.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(50));
- }
-
- // sync and disconnect
- {
- await nodeB.connect(nodeA);
- await nodeB.waitBlockNumberSync(nodeA);
-
- expect(
- await nodeB.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(50));
-
- await nodeB.disconnect(nodeA);
- }
-
- // nodeA creates 2 blocks
- {
- await nodeA.pay(aliceAddress, 100); // +100 +50 +10*4/10 for alice in nodeA, +10*3/10 for bob
- expect(
- await nodeA.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(50 + 100 + 50 + 4));
- expect(
- await nodeA.sdk.rpc.chain.getBalance(bobAddress)
- ).to.deep.equal(new U64(3));
- await nodeA.sdk.rpc.chain.sendSignedTransaction(
- nodeA.sdk.core
- .createPayTransaction({
- recipient: bobAddress,
- quantity: 5
- })
- .sign({
- secret: aliceSecret,
- fee: 10,
- seq: 0
- })
- ); // +50 -5 + 10*4/10 -10 for alice, +5 +10*3/10 for bob in nodeA
-
- expect(
- await nodeA.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(50 + 100 + 50 + 4 + 50 - 5 + 4 - 10));
- expect(
- await nodeA.sdk.rpc.chain.getBalance(bobAddress)
- ).to.deep.equal(new U64(3 + 5 + 3));
- }
-
- // nodeB creates 3 blocks
- {
- await nodeB.pay(aliceAddress, 200); // +200 +10*4/10 for alice, +50 +10*3/10 for bob in nodeB
- expect(
- await nodeB.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(50 + 200 + 4));
- expect(
- await nodeB.sdk.rpc.chain.getBalance(bobAddress)
- ).to.deep.equal(new U64(50 + 3));
- await nodeB.pay(bobAddress, 300); // 10*4/10 for alice, +300 +50 +10*3/10 for bob in nodeB
- expect(
- await nodeB.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(50 + 200 + 4 + 4));
- expect(
- await nodeB.sdk.rpc.chain.getBalance(bobAddress)
- ).to.deep.equal(new U64(50 + 3 + 300 + 50 + 3));
- await nodeB.sdk.rpc.chain.sendSignedTransaction(
- nodeB.sdk.core
- .createPayTransaction({
- recipient: bobAddress,
- quantity: 15
- })
- .sign({
- secret: aliceSecret,
- fee: 10,
- seq: 0
- })
- ); // -15 -10 +10*4/10 for alice. +50 + 15 + 10*3/10 for bob in nodeB
- expect(
- await nodeB.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(50 + 200 + 4 + 4 - 15 - 10 + 4));
- expect(
- await nodeB.sdk.rpc.chain.getBalance(bobAddress)
- ).to.deep.equal(new U64(50 + 3 + 300 + 50 + 3 + 50 + 15 + 3));
- }
-
- // sync. nodeA now sees nodeB's state
- {
- const nodeBBestBlockHash = await nodeB.getBestBlockHash();
- expect(await nodeB.getBestBlockNumber()).to.equal(4);
-
- await nodeB.connect(nodeA);
- await nodeA.waitBlockNumberSync(nodeB);
- expect(await nodeA.getBestBlockHash()).to.deep.equal(
- nodeBBestBlockHash
- );
-
- expect(
- await nodeA.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(new U64(50 + 200 + 4 + 4 - 15 - 10 + 4));
- expect(
- await nodeA.sdk.rpc.chain.getBalance(bobAddress)
- ).to.deep.equal(new U64(50 + 3 + 300 + 50 + 3 + 50 + 15 + 3));
- }
-
- // nodeA creates a block
- {
- await nodeA.pay(aliceAddress, 1000); // +1000 + 50 + 10*4/10 for alice, 10*3/10 for bob
- expect(
- await nodeA.sdk.rpc.chain.getBalance(aliceAddress)
- ).to.deep.equal(
- new U64(50 + 200 + 4 + 4 - 15 - 10 + 4 + 1000 + 50 + 4)
- );
- expect(
- await nodeA.sdk.rpc.chain.getBalance(bobAddress)
- ).to.deep.equal(new U64(50 + 3 + 300 + 50 + 3 + 50 + 15 + 3 + 3));
- }
- }).timeout(120_000);
-
afterEach(async function() {
if (this.currentTest!.state === "failed") {
nodeA.keepLogs();
diff --git a/test/src/e2e.long/sync2.test.ts b/test/src/e2e.long/sync2.test.ts
index f63f821e25..64a3415b32 100644
--- a/test/src/e2e.long/sync2.test.ts
+++ b/test/src/e2e.long/sync2.test.ts
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -58,27 +58,6 @@ describe("sync 2 nodes", function() {
await nodeB.sdk.rpc.chain.getTransaction(transaction.hash())
).not.null;
}).timeout(30_000);
-
- describe("A-B diverged", function() {
- beforeEach(async function() {
- await nodeA.sendPayTx();
- await nodeB.sendPayTx();
- expect(await nodeA.getBestBlockNumber()).to.equal(
- await nodeB.getBestBlockNumber()
- );
- expect(await nodeA.getBestBlockHash()).to.not.deep.equal(
- await nodeB.getBestBlockHash()
- );
- });
-
- it("It should be synced when nodeA becomes ahead", async function() {
- await nodeA.sendPayTx();
- await nodeB.waitBlockNumberSync(nodeA);
- expect(await nodeA.getBestBlockHash()).to.deep.equal(
- await nodeB.getBestBlockHash()
- );
- }).timeout(10_000);
- });
});
describe("nodeA becomes ahead", function() {
@@ -94,104 +73,6 @@ describe("sync 2 nodes", function() {
);
}).timeout(10_000);
});
-
- describe("A-B diverged", function() {
- beforeEach(async function() {
- await nodeA.sendPayTx();
- await nodeB.sendPayTx();
- expect(await nodeA.getBestBlockNumber()).to.equal(
- await nodeB.getBestBlockNumber()
- );
- expect(await nodeA.getBestBlockHash()).to.not.deep.equal(
- await nodeB.getBestBlockHash()
- );
- });
-
- describe("nodeA becomes ahead", function() {
- beforeEach(async function() {
- await nodeA.sendPayTx();
- expect(await nodeA.getBestBlockNumber()).to.equal(
- (await nodeB.getBestBlockNumber()) + 1
- );
- });
-
- it("It should be synced when A-B connected", async function() {
- await nodeA.connect(nodeB);
- await nodeB.waitBlockNumberSync(nodeA);
- expect(await nodeA.getBestBlockHash()).to.deep.equal(
- await nodeB.getBestBlockHash()
- );
- }).timeout(30_000);
- });
- });
-
- describe("A-B diverged with the same transaction", function() {
- beforeEach(async function() {
- const transactionA = await nodeA.sendPayTx({ fee: 10 });
- await wait(1000);
- const transactionB = await nodeB.sendPayTx({ fee: 10 });
- expect(transactionA.unsigned).to.deep.equal(
- transactionB.unsigned
- );
- expect(await nodeA.getBestBlockNumber()).to.equal(
- await nodeB.getBestBlockNumber()
- );
- expect(await nodeA.getBestBlockHash()).to.not.deep.equal(
- await nodeB.getBestBlockHash()
- );
- });
-
- describe("nodeA becomes ahead", function() {
- beforeEach(async function() {
- await nodeA.sendPayTx();
- expect(await nodeA.getBestBlockNumber()).to.equal(
- (await nodeB.getBestBlockNumber()) + 1
- );
- });
-
- it("It should be synced when A-B connected", async function() {
- await nodeA.connect(nodeB);
- await nodeB.waitBlockNumberSync(nodeA);
- expect(await nodeA.getBestBlockHash()).to.deep.equal(
- await nodeB.getBestBlockHash()
- );
- }).timeout(30_000);
- });
- });
-
- describe("A-B diverged with the same transaction", function() {
- describe("Both transaction success", function() {
- beforeEach(async function() {
- const recipient = await nodeA.createP2PKHAddress();
- await nodeA.mintAsset({ supply: 10, recipient });
- await nodeB.mintAsset({ supply: 10, recipient });
- expect(await nodeA.getBestBlockNumber()).to.equal(
- await nodeB.getBestBlockNumber()
- );
- expect(await nodeA.getBestBlockHash()).to.not.deep.equal(
- await nodeB.getBestBlockHash()
- );
- });
-
- describe("nodeA becomes ahead", function() {
- beforeEach(async function() {
- this.timeout(60_000);
- await nodeA.sendPayTx();
- expect(await nodeA.getBestBlockNumber()).to.equal(
- (await nodeB.getBestBlockNumber()) + 1
- );
- });
-
- it("It should be synced when A-B connected", async function() {
- await nodeA.connect(nodeB);
- await nodeB.waitBlockNumberSync(nodeA);
- expect(await nodeA.getBestBlockHash()).to.deep.equal(
- await nodeB.getBestBlockHash()
- );
- }).timeout(30_000);
- });
- });
- });
});
describe("with no transaction relay", function() {
diff --git a/test/src/e2e.long/sync3.test.ts b/test/src/e2e.long/sync3.test.ts
index ab724ad702..5a3b658faa 100644
--- a/test/src/e2e.long/sync3.test.ts
+++ b/test/src/e2e.long/sync3.test.ts
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -67,39 +67,6 @@ describe("sync 3 nodes", function() {
);
}
}).timeout(15_000 + 10_000 * NUM_NODES);
-
- describe("All diverged by both end nodes", function() {
- beforeEach(async function() {
- const nodeA = nodes[0];
- const nodeB = nodes[NUM_NODES - 1];
- await nodeA.sendPayTx();
- await nodeB.sendPayTx();
- expect(await nodeA.getBestBlockNumber()).to.equal(
- await nodeB.getBestBlockNumber()
- );
- expect(await nodeA.getBestBlockHash()).to.not.deep.equal(
- await nodeB.getBestBlockHash()
- );
- });
-
- it("Every node should be synced to one", async function() {
- const waits = [];
- for (let i = 1; i < NUM_NODES; i++) {
- waits.push(nodes[i].waitBlockNumberSync(nodes[0]));
- }
- await Promise.all(waits);
- }).timeout(15_000 + 10_000 * NUM_NODES);
-
- it("It should be synced when the first node becomes ahead", async function() {
- await nodes[0].sendPayTx();
- for (let i = 1; i < NUM_NODES; i++) {
- await nodes[i].waitBlockNumberSync(nodes[i - 1]);
- expect(await nodes[i].getBestBlockHash()).to.deep.equal(
- await nodes[0].getBestBlockHash()
- );
- }
- }).timeout(15_000 + 10_000 * NUM_NODES);
- });
});
describe("the first node becomes ahead", function() {
@@ -153,39 +120,6 @@ describe("sync 3 nodes", function() {
).to.deep.equal(transaction.blockHash);
}
}).timeout(15_000 + 10_000 * NUM_NODES);
-
- describe("All diverged by two nodes in the opposite", function() {
- beforeEach(async function() {
- const nodeA = nodes[0];
- const nodeB = nodes[numHalf];
- await nodeA.sendPayTx();
- await nodeB.sendPayTx();
- expect(await nodeA.getBestBlockNumber()).to.equal(
- await nodeB.getBestBlockNumber()
- );
- expect(await nodeA.getBestBlockHash()).to.not.deep.equal(
- await nodeB.getBestBlockHash()
- );
- });
-
- it("Every node should be synced", async function() {
- const waits = [];
- for (let i = 1; i < NUM_NODES; i++) {
- waits.push(nodes[i].waitBlockNumberSync(nodes[0]));
- }
- await Promise.all(waits);
- }).timeout(15_000 + 10_000 * NUM_NODES);
-
- it("It should be synced when the first node becomes ahead", async function() {
- await nodes[0].sendPayTx();
- for (let i = 1; i < NUM_NODES; i++) {
- await nodes[i].waitBlockNumberSync(nodes[i - 1]);
- expect(await nodes[i].getBestBlockHash()).to.deep.equal(
- await nodes[0].getBestBlockHash()
- );
- }
- }).timeout(15_000 + 10_000 * NUM_NODES);
- });
}).timeout(NUM_NODES * 60_000);
afterEach(async function() {
diff --git a/test/src/e2e.long/sync5.test.ts b/test/src/e2e.long/sync5.test.ts
index 32f02aa903..9c20e821de 100644
--- a/test/src/e2e.long/sync5.test.ts
+++ b/test/src/e2e.long/sync5.test.ts
@@ -1,4 +1,4 @@
-// Copyright 2018-2019 Kodebox, Inc.
+// Copyright 2018-2020 Kodebox, Inc.
// This file is part of CodeChain.
//
// This program is free software: you can redistribute it and/or modify
@@ -62,39 +62,6 @@ describe("sync 5 nodes", function() {
);
}
}).timeout(5000 + 10000 * NUM_NODES);
-
- describe("All diverged by both end nodes", function() {
- beforeEach(async function() {
- const nodeA = nodes[0];
- const nodeB = nodes[NUM_NODES - 1];
- await nodeA.sendPayTx();
- await nodeB.sendPayTx();
- expect(await nodeA.getBestBlockNumber()).to.equal(
- await nodeB.getBestBlockNumber()
- );
- expect(await nodeA.getBestBlockHash()).to.not.deep.equal(
- await nodeB.getBestBlockHash()
- );
- });
-
- it("Every node should be synced to one", async function() {
- const waits = [];
- for (let i = 1; i < NUM_NODES; i++) {
- waits.push(nodes[i].waitBlockNumberSync(nodes[0]));
- }
- await Promise.all(waits);
- }).timeout(5000 + 5000 * NUM_NODES);
-
- it("It should be synced when the first node becomes ahead", async function() {
- await nodes[0].sendPayTx();
- for (let i = 1; i < NUM_NODES; i++) {
- await nodes[i].waitBlockNumberSync(nodes[i - 1]);
- expect(await nodes[i].getBestBlockHash()).to.deep.equal(
- await nodes[0].getBestBlockHash()
- );
- }
- }).timeout(5000 + 10000 * NUM_NODES);
- });
});
describe("the first node becomes ahead", function() {
@@ -145,39 +112,6 @@ describe("sync 5 nodes", function() {
).to.deep.equal(transaction.blockHash);
}
}).timeout(20_000 + 5_000 * NUM_NODES);
-
- describe("All diverged by two nodes in the opposite", function() {
- beforeEach(async function() {
- const nodeA = nodes[0];
- const nodeB = nodes[numHalf];
- await nodeA.sendPayTx();
- await nodeB.sendPayTx();
- expect(await nodeA.getBestBlockNumber()).to.equal(
- await nodeB.getBestBlockNumber()
- );
- expect(await nodeA.getBestBlockHash()).to.not.deep.equal(
- await nodeB.getBestBlockHash()
- );
- });
-
- it("Every node should be synced", async function() {
- const waits = [];
- for (let i = 1; i < NUM_NODES; i++) {
- waits.push(nodes[i].waitBlockNumberSync(nodes[0]));
- }
- await Promise.all(waits);
- }).timeout(20_000 + 5_000 * NUM_NODES);
-
- it("It should be synced when the first node becomes ahead", async function() {
- await nodes[0].sendPayTx();
- for (let i = 1; i < NUM_NODES; i++) {
- await nodes[i].waitBlockNumberSync(nodes[i - 1]);
- expect(await nodes[i].getBestBlockHash()).to.deep.equal(
- await nodes[0].getBestBlockHash()
- );
- }
- }).timeout(5_000 + 10_000 * NUM_NODES);
- });
}).timeout(NUM_NODES * 60_000);
describe("Connected in a star", function() {