Skip to content
This repository has been archived by the owner on Mar 3, 2023. It is now read-only.

Commit

Permalink
Debug
Browse files Browse the repository at this point in the history
  • Loading branch information
xlz-jbleclere committed Apr 1, 2021
1 parent 6bee826 commit a6b1231
Show file tree
Hide file tree
Showing 6 changed files with 128 additions and 105 deletions.
37 changes: 31 additions & 6 deletions deployment/azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ variables:
value: $(Build.SourcesDirectory)/deployment

stages:
- stage: compile_and_test
displayName: Compile sources and test on hardware
- stage: compile_and_full_test
displayName: Compile sources and test on multiple hardware designs
variables:
# Set Git branch conditional variables
${{ if eq(variables['Build.SourceBranch'], 'refs/heads/master') }}:
Expand All @@ -56,16 +56,41 @@ stages:

jobs:
- template: runTestOnEc2.yml
displayName: Test advanced features on one standard design
parameters:
instance_type: 'f1.4xlarge'
config_list:
- os_distrib: 'centos_7'
tox_mode: 'debug'
tox_coverage: true
tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug'
tox_extra_option: '$(tox_mark)'
#- os_distrib: 'ubuntu_18_04'
# tox_mode: 'debug'
# tox_coverage: false
# tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug'
# tox_extra_option: '$(tox_mark)'
# dependsOn: 'centos_7'

- template: runTestOnEc2.yml
displayName: Test AWS XRT specific hardware configuration
parameters:
instance_type: 'f1.2xlarge'
config_list:
- os_distrib: 'centos_7'
tox_operation: 'aws-build-debug,awsxrt-debug'
tox_extra_option: '$(tox_mark)'
#- os_distrib: 'ubuntu_18_04'
# tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug'
# tox_extra_option: '$(tox_mark)'
# dependsOn: 'centos_7'

- template: runTestOnEc2.yml
displayName: Test AWS F1 specific hardware configuration
parameters:
instance_type: 'f1.2xlarge'
config_list:
- os_distrib: 'centos_7'
tox_operation: 'aws-build-debug,awsf1-debug'
tox_extra_option: '$(tox_mark)'
#- os_distrib: 'ubuntu_18_04'
# tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug'
# tox_extra_option: '$(tox_mark)'
# dependsOn: 'centos_7'

Expand Down
10 changes: 2 additions & 8 deletions deployment/runTestOnEc2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@ parameters:
instance_type: 'f1.4xlarge'
config_list:
- os_distrib: 'centos_7'
tox_mode: 'release'
tox_coverage: false
tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug'
tox_extra_option: ''

jobs:
Expand Down Expand Up @@ -36,11 +35,6 @@ jobs:
name: Default
demands: # Use previously instantiated agent
- agent.Name -equals $(Build.BuildId) AWS ${{ config.os_distrib }}
variables:
${{ if eq(config.tox_coverage, true) }}:
tox_operation: aws-build-${{ config.tox_mode }},cpp-${{ config.tox_mode }},c-${{ config.tox_mode }},integration-${{ config.tox_mode }},coverage-${{ config.tox_mode }}
${{ if ne(config.tox_coverage, true) }}:
tox_operation: aws-build-${{ config.tox_mode }},cpp-${{ config.tox_mode }},c-${{ config.tox_mode }},integration-${{ config.tox_mode }}
steps:
- checkout: self
submodules: true
Expand Down Expand Up @@ -86,7 +80,7 @@ jobs:
EOF
displayName: Create Accelize credentials file
- script: sudo -E tox -p all -e $(tox_operation)
- script: sudo -E tox -p all -e $(config.tox_operation)
-- --cred=$(Build.SourcesDirectory)/cred.json --server=$(meteringServer) --artifacts_dir=$(Build.SourcesDirectory)/artifacts -rxs ${{ config.tox_extra_option }}
displayName: Run tests with Tox
env:
Expand Down
10 changes: 10 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,16 @@ def pytest_runtest_setup(item):
"""
Configure test initialization
"""
# Check awsxrt tests
m_option = item.config.getoption('-m')
if search(r'\bawsf1\b', m_option) and not search(r'\nnot\n\s+\bawsf1\b', m_option):
skip_awsf1 = False
else:
skip_awsf1 = True
markers = tuple(item.iter_markers(name='awsf1'))
if skip_awsf1 and markers:
pytest.skip("Don't run AWS F1 (Vivado RTL) tests.")

# Check awsxrt tests
m_option = item.config.getoption('-m')
if search(r'\bawsxrt\b', m_option) and not search(r'\nnot\n\s+\bawsxrt\b', m_option):
Expand Down
109 changes: 49 additions & 60 deletions tests/test_drm_license_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,23 +33,22 @@ def test_header_error_on_key(accelize_drm, conf_json, cred_json, async_handler,
conf_json['licensing']['url'] = _request.url + request.function.__name__
conf_json.save()

drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)

# Set initial context on the live server
context = {'cnt':0}
set_context(context)
assert get_context() == context

with pytest.raises(accelize_drm.exceptions.DRMCtlrError) as excinfo:
drm_manager.activate()
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMCtlrError.error_code
assert "License header check error" in str(excinfo.value)
with accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
) as drm_manager:
# Set initial context on the live server
context = {'cnt':0}
set_context(context)
assert get_context() == context
# Check failure is detected
with pytest.raises(accelize_drm.exceptions.DRMCtlrError) as excinfo:
drm_manager.activate()
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMCtlrError.error_code
assert "License header check error" in str(excinfo.value)
async_cb.assert_NoError()


Expand All @@ -76,21 +75,18 @@ def test_header_error_on_licenseTimer(accelize_drm, conf_json, cred_json, async_
conf_json['licensing']['url'] = _request.url + request.function.__name__
conf_json.save()

drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)

# Set initial context on the live server
context = {'cnt':0}
set_context(context)
assert get_context() == context

drm_manager.activate()
try:
with accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
) as drm_manager:
# Set initial context on the live server
context = {'cnt':0}
set_context(context)
assert get_context() == context
drm_manager.activate()
start = datetime.now()
lic_duration = drm_manager.get('license_duration')
assert drm_manager.get('license_status')
Expand All @@ -99,9 +95,6 @@ def test_header_error_on_licenseTimer(accelize_drm, conf_json, cred_json, async_
sleep(wait_period.total_seconds())
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
finally:
drm_manager.deactivate()
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
assert async_cb.was_called
assert async_cb.message is not None
Expand All @@ -127,47 +120,43 @@ def test_session_id_error(accelize_drm, conf_json, cred_json, async_handler,
conf_json['licensing']['url'] = _request.url + request.function.__name__
conf_json.save()

drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)

# Set initial context on the live server
context = {'session_id':'0', 'session_cnt':0, 'request_cnt':0}
set_context(context)
assert get_context() == context

# Start session #1 to record
drm_manager.activate()
start = datetime.now()
try:
with accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
) as drm_manager:

# Set initial context on the live server
context = {'session_id':'0', 'session_cnt':0, 'request_cnt':0}
set_context(context)
assert get_context() == context

# Start session #1 to record
drm_manager.activate()
start = datetime.now()
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
lic_duration = drm_manager.get('license_duration')
wait_period = start + timedelta(seconds=lic_duration+2) - datetime.now()
sleep(wait_period.total_seconds())
assert drm_manager.get('license_status')
finally:
drm_manager.deactivate()
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
async_cb.assert_NoError()
activators.autotest(is_activated=False)
async_cb.assert_NoError()

# Start session #2 to replay session #1
drm_manager.activate()
start = datetime.now()
try:
# Start session #2 to replay session #1
drm_manager.activate()
start = datetime.now()
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
lic_duration = drm_manager.get('license_duration')
wait_period = start + timedelta(seconds=lic_duration+2) - datetime.now()
sleep(wait_period.total_seconds())
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
finally:
drm_manager.deactivate()
assert async_cb.was_called
assert async_cb.message is not None
Expand Down
Loading

0 comments on commit a6b1231

Please sign in to comment.