Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
H
haf
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
hive
haf
Commits
7ac3c4ed
Commit
7ac3c4ed
authored
3 years ago
by
Marcin Sobczyk
Browse files
Options
Downloads
Patches
Plain Diff
update test-tools
parent
d72e3796
No related branches found
No related tags found
No related merge requests found
Pipeline
#33842
passed
3 years ago
Stage: build_and_test
Changes
3
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
hive
+1
-1
1 addition, 1 deletion
hive
tests/integration/system/haf/local_tools.py
+6
-19
6 additions, 19 deletions
tests/integration/system/haf/local_tools.py
tests/integration/system/haf/test_event_massive_sync.py
+3
-3
3 additions, 3 deletions
tests/integration/system/haf/test_event_massive_sync.py
with
10 additions
and
23 deletions
hive
@
9e0f071e
Compare
e266447c
...
9e0f071e
Subproject commit
e266447c601c89f16a94cea69b4ca59249811897
Subproject commit
9e0f071eae283cdb98fcb3af5277d3889b9c4495
This diff is collapsed.
Click to expand it.
tests/integration/system/haf/local_tools.py
+
6
−
19
View file @
7ac3c4ed
...
@@ -101,36 +101,23 @@ def get_irreversible_block(node):
...
@@ -101,36 +101,23 @@ def get_irreversible_block(node):
return
irreversible_block_num
return
irreversible_block_num
def
get_time
_offset
_from_file
(
name
):
def
get_time
stamp
_from_file
(
name
):
timestamp
=
''
timestamp
=
''
with
open
(
name
,
'
r
'
)
as
f
:
with
open
(
name
,
'
r
'
)
as
f
:
timestamp
=
f
.
read
()
timestamp
=
f
.
read
()
timestamp
=
timestamp
.
strip
()
timestamp
=
timestamp
.
strip
()
current_time
=
datetime
.
now
(
timezone
.
utc
)
return
timestamp
new_time
=
datetime
.
strptime
(
timestamp
,
'
%Y-%m-%dT%H:%M:%S
'
).
replace
(
tzinfo
=
timezone
.
utc
)
difference
=
round
(
new_time
.
timestamp
()
-
current_time
.
timestamp
())
-
10
# circa 10 seconds is needed for nodes to startup
time_offset
=
str
(
difference
)
+
'
s
'
return
time_offset
def
run_networks
(
world
,
blocklog_directory
):
def
run_networks
(
world
,
blocklog_directory
):
time_offset
=
get_time_offset_from_file
(
blocklog_directory
/
'
timestamp
'
)
timestamp
=
''
with
open
(
blocklog_directory
/
'
timestamp
'
,
'
r
'
)
as
f
:
timestamp
=
f
.
read
()
block_log
=
BlockLog
(
None
,
blocklog_directory
/
'
block_log
'
,
include_index
=
False
)
block_log
=
BlockLog
(
None
,
blocklog_directory
/
'
block_log
'
,
include_index
=
False
)
logger
.
info
(
'
Running nodes...
'
)
logger
.
info
(
'
Running nodes...
'
)
world
.
run_all_nodes
(
block_log
,
timestamp
=
timestamp
,
speedup
=
3
,
wait_for_live
=
True
)
nodes
=
world
.
nodes
()
nodes
[
0
].
run
(
wait_for_live
=
False
,
replay_from
=
block_log
,
time_offset
=
time_offset
)
endpoint
=
nodes
[
0
].
get_p2p_endpoint
()
for
node
in
nodes
[
1
:]:
node
.
config
.
p2p_seed_node
.
append
(
endpoint
)
node
.
run
(
wait_for_live
=
False
,
replay_from
=
block_log
,
time_offset
=
time_offset
)
for
network
in
world
.
networks
():
network
.
is_running
=
True
for
node
in
nodes
:
node
.
wait_for_live
()
def
create_node_with_database
(
network
,
url
):
def
create_node_with_database
(
network
,
url
):
...
...
This diff is collapsed.
Click to expand it.
tests/integration/system/haf/test_event_massive_sync.py
+
3
−
3
View file @
7ac3c4ed
...
@@ -3,7 +3,7 @@ from sqlalchemy.orm.exc import NoResultFound
...
@@ -3,7 +3,7 @@ from sqlalchemy.orm.exc import NoResultFound
from
sqlalchemy.orm.exc
import
MultipleResultsFound
from
sqlalchemy.orm.exc
import
MultipleResultsFound
from
test_tools
import
logger
,
BlockLog
from
test_tools
import
logger
,
BlockLog
from
local_tools
import
get_time
_offset
_from_file
from
local_tools
import
get_time
stamp
_from_file
MASSIVE_SYNC_BLOCK_NUM
=
105
MASSIVE_SYNC_BLOCK_NUM
=
105
...
@@ -16,14 +16,14 @@ def test_event_massive_sync(world_with_witnesses_and_database):
...
@@ -16,14 +16,14 @@ def test_event_massive_sync(world_with_witnesses_and_database):
world
,
session
,
Base
=
world_with_witnesses_and_database
world
,
session
,
Base
=
world_with_witnesses_and_database
node_under_test
=
world
.
network
(
'
Beta
'
).
node
(
'
NodeUnderTest
'
)
node_under_test
=
world
.
network
(
'
Beta
'
).
node
(
'
NodeUnderTest
'
)
time
_offset
=
get_time
_offset
_from_file
(
Path
().
resolve
()
/
'
timestamp
'
)
time
stamp
=
get_time
stamp
_from_file
(
Path
().
resolve
()
/
'
timestamp
'
)
block_log
=
BlockLog
(
None
,
Path
().
resolve
()
/
'
block_log
'
,
include_index
=
False
)
block_log
=
BlockLog
(
None
,
Path
().
resolve
()
/
'
block_log
'
,
include_index
=
False
)
events_queue
=
Base
.
classes
.
events_queue
events_queue
=
Base
.
classes
.
events_queue
# WHEN
# WHEN
logger
.
info
(
'
Running node...
'
)
logger
.
info
(
'
Running node...
'
)
node_under_test
.
run
(
wait_for_live
=
False
,
replay_from
=
block_log
,
time_offset
=
time_offset
)
world
.
run_all_nodes
(
block_log
,
timestamp
=
timestamp
,
speedup
=
1
,
wait_for_live
=
False
,
nodes
=
[
node_under_test
]
)
# TODO get_p2p_endpoint is workaround to check if replay is finished
# TODO get_p2p_endpoint is workaround to check if replay is finished
node_under_test
.
get_p2p_endpoint
()
node_under_test
.
get_p2p_endpoint
()
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment