From a6b123118c2f797082bc0f6c076807efb565830c Mon Sep 17 00:00:00 2001 From: jbleclere Date: Thu, 1 Apr 2021 12:53:35 +0000 Subject: [PATCH] Debug --- deployment/azure-pipelines.yml | 37 +++++++++-- deployment/runTestOnEc2.yml | 10 +-- tests/conftest.py | 10 +++ tests/test_drm_license_error.py | 109 ++++++++++++++------------------ tests/test_parameters.py | 50 +++++++-------- tox.ini | 17 +++-- 6 files changed, 128 insertions(+), 105 deletions(-) diff --git a/deployment/azure-pipelines.yml b/deployment/azure-pipelines.yml index dddbe43c..5e4cace7 100644 --- a/deployment/azure-pipelines.yml +++ b/deployment/azure-pipelines.yml @@ -41,8 +41,8 @@ variables: value: $(Build.SourcesDirectory)/deployment stages: - - stage: compile_and_test - displayName: Compile sources and test on hardware + - stage: compile_and_full_test + displayName: Compile sources and test on multiple hardware designs variables: # Set Git branch conditional variables ${{ if eq(variables['Build.SourceBranch'], 'refs/heads/master') }}: @@ -56,16 +56,41 @@ stages: jobs: - template: runTestOnEc2.yml + displayName: Test advanced features on one standard design parameters: instance_type: 'f1.4xlarge' config_list: - os_distrib: 'centos_7' - tox_mode: 'debug' - tox_coverage: true + tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug' tox_extra_option: '$(tox_mark)' #- os_distrib: 'ubuntu_18_04' - # tox_mode: 'debug' - # tox_coverage: false + # tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug' + # tox_extra_option: '$(tox_mark)' + # dependsOn: 'centos_7' + + - template: runTestOnEc2.yml + displayName: Test AWS XRT specific hardware configuration + parameters: + instance_type: 'f1.2xlarge' + config_list: + - os_distrib: 'centos_7' + tox_operation: 'aws-build-debug,awsxrt-debug' + tox_extra_option: '$(tox_mark)' + #- os_distrib: 'ubuntu_18_04' + # tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug' + # tox_extra_option: '$(tox_mark)' + # dependsOn: 'centos_7' + + - template: runTestOnEc2.yml + displayName: Test AWS F1 specific hardware configuration + parameters: + instance_type: 'f1.2xlarge' + config_list: + - os_distrib: 'centos_7' + tox_operation: 'aws-build-debug,awsf1-debug' + tox_extra_option: '$(tox_mark)' + #- os_distrib: 'ubuntu_18_04' + # tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug' # tox_extra_option: '$(tox_mark)' # dependsOn: 'centos_7' diff --git a/deployment/runTestOnEc2.yml b/deployment/runTestOnEc2.yml index 48e24bcd..6cc3b85e 100644 --- a/deployment/runTestOnEc2.yml +++ b/deployment/runTestOnEc2.yml @@ -5,8 +5,7 @@ parameters: instance_type: 'f1.4xlarge' config_list: - os_distrib: 'centos_7' - tox_mode: 'release' - tox_coverage: false + tox_operation: 'aws-build-debug,cpp-debug,c-debug,integration-debug,coverage-debug' tox_extra_option: '' jobs: @@ -36,11 +35,6 @@ jobs: name: Default demands: # Use previously instantiated agent - agent.Name -equals $(Build.BuildId) AWS ${{ config.os_distrib }} - variables: - ${{ if eq(config.tox_coverage, true) }}: - tox_operation: aws-build-${{ config.tox_mode }},cpp-${{ config.tox_mode }},c-${{ config.tox_mode }},integration-${{ config.tox_mode }},coverage-${{ config.tox_mode }} - ${{ if ne(config.tox_coverage, true) }}: - tox_operation: aws-build-${{ config.tox_mode }},cpp-${{ config.tox_mode }},c-${{ config.tox_mode }},integration-${{ config.tox_mode }} steps: - checkout: self submodules: true @@ -86,7 +80,7 @@ jobs: EOF displayName: Create Accelize credentials file - - script: sudo -E tox -p all -e $(tox_operation) + - script: sudo -E tox -p all -e $(config.tox_operation) -- --cred=$(Build.SourcesDirectory)/cred.json --server=$(meteringServer) --artifacts_dir=$(Build.SourcesDirectory)/artifacts -rxs ${{ config.tox_extra_option }} displayName: Run tests with Tox env: diff --git a/tests/conftest.py b/tests/conftest.py index 590f7dba..7f5adc80 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -214,6 +214,16 @@ def pytest_runtest_setup(item): """ Configure test initialization """ + # Check awsxrt tests + m_option = item.config.getoption('-m') + if search(r'\bawsf1\b', m_option) and not search(r'\nnot\n\s+\bawsf1\b', m_option): + skip_awsf1 = False + else: + skip_awsf1 = True + markers = tuple(item.iter_markers(name='awsf1')) + if skip_awsf1 and markers: + pytest.skip("Don't run AWS F1 (Vivado RTL) tests.") + # Check awsxrt tests m_option = item.config.getoption('-m') if search(r'\bawsxrt\b', m_option) and not search(r'\nnot\n\s+\bawsxrt\b', m_option): diff --git a/tests/test_drm_license_error.py b/tests/test_drm_license_error.py index 5574e6e9..7168acec 100644 --- a/tests/test_drm_license_error.py +++ b/tests/test_drm_license_error.py @@ -33,23 +33,22 @@ def test_header_error_on_key(accelize_drm, conf_json, cred_json, async_handler, conf_json['licensing']['url'] = _request.url + request.function.__name__ conf_json.save() - drm_manager = accelize_drm.DrmManager( - conf_json.path, - cred_json.path, - driver.read_register_callback, - driver.write_register_callback, - async_cb.callback - ) - - # Set initial context on the live server - context = {'cnt':0} - set_context(context) - assert get_context() == context - - with pytest.raises(accelize_drm.exceptions.DRMCtlrError) as excinfo: - drm_manager.activate() - assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMCtlrError.error_code - assert "License header check error" in str(excinfo.value) + with accelize_drm.DrmManager( + conf_json.path, + cred_json.path, + driver.read_register_callback, + driver.write_register_callback, + async_cb.callback + ) as drm_manager: + # Set initial context on the live server + context = {'cnt':0} + set_context(context) + assert get_context() == context + # Check failure is detected + with pytest.raises(accelize_drm.exceptions.DRMCtlrError) as excinfo: + drm_manager.activate() + assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMCtlrError.error_code + assert "License header check error" in str(excinfo.value) async_cb.assert_NoError() @@ -76,21 +75,18 @@ def test_header_error_on_licenseTimer(accelize_drm, conf_json, cred_json, async_ conf_json['licensing']['url'] = _request.url + request.function.__name__ conf_json.save() - drm_manager = accelize_drm.DrmManager( - conf_json.path, - cred_json.path, - driver.read_register_callback, - driver.write_register_callback, - async_cb.callback - ) - - # Set initial context on the live server - context = {'cnt':0} - set_context(context) - assert get_context() == context - - drm_manager.activate() - try: + with accelize_drm.DrmManager( + conf_json.path, + cred_json.path, + driver.read_register_callback, + driver.write_register_callback, + async_cb.callback + ) as drm_manager: + # Set initial context on the live server + context = {'cnt':0} + set_context(context) + assert get_context() == context + drm_manager.activate() start = datetime.now() lic_duration = drm_manager.get('license_duration') assert drm_manager.get('license_status') @@ -99,9 +95,6 @@ def test_header_error_on_licenseTimer(accelize_drm, conf_json, cred_json, async_ sleep(wait_period.total_seconds()) assert not drm_manager.get('license_status') activators.autotest(is_activated=False) - finally: - drm_manager.deactivate() - assert not drm_manager.get('license_status') activators.autotest(is_activated=False) assert async_cb.was_called assert async_cb.message is not None @@ -127,39 +120,36 @@ def test_session_id_error(accelize_drm, conf_json, cred_json, async_handler, conf_json['licensing']['url'] = _request.url + request.function.__name__ conf_json.save() - drm_manager = accelize_drm.DrmManager( - conf_json.path, - cred_json.path, - driver.read_register_callback, - driver.write_register_callback, - async_cb.callback - ) - - # Set initial context on the live server - context = {'session_id':'0', 'session_cnt':0, 'request_cnt':0} - set_context(context) - assert get_context() == context - - # Start session #1 to record - drm_manager.activate() - start = datetime.now() - try: + with accelize_drm.DrmManager( + conf_json.path, + cred_json.path, + driver.read_register_callback, + driver.write_register_callback, + async_cb.callback + ) as drm_manager: + + # Set initial context on the live server + context = {'session_id':'0', 'session_cnt':0, 'request_cnt':0} + set_context(context) + assert get_context() == context + + # Start session #1 to record + drm_manager.activate() + start = datetime.now() assert drm_manager.get('license_status') activators.autotest(is_activated=True) lic_duration = drm_manager.get('license_duration') wait_period = start + timedelta(seconds=lic_duration+2) - datetime.now() sleep(wait_period.total_seconds()) assert drm_manager.get('license_status') - finally: drm_manager.deactivate() assert not drm_manager.get('license_status') - activators.autotest(is_activated=False) - async_cb.assert_NoError() + activators.autotest(is_activated=False) + async_cb.assert_NoError() - # Start session #2 to replay session #1 - drm_manager.activate() - start = datetime.now() - try: + # Start session #2 to replay session #1 + drm_manager.activate() + start = datetime.now() assert drm_manager.get('license_status') activators.autotest(is_activated=True) lic_duration = drm_manager.get('license_duration') @@ -167,7 +157,6 @@ def test_session_id_error(accelize_drm, conf_json, cred_json, async_handler, sleep(wait_period.total_seconds()) assert not drm_manager.get('license_status') activators.autotest(is_activated=False) - finally: drm_manager.deactivate() assert async_cb.was_called assert async_cb.message is not None diff --git a/tests/test_parameters.py b/tests/test_parameters.py index 64401261..4870ded5 100644 --- a/tests/test_parameters.py +++ b/tests/test_parameters.py @@ -148,7 +148,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr exp_value = 0 conf_json['settings']['log_file_verbosity'] = exp_value conf_json.save() - accelize_drm.DrmManager( + with accelize_drm.DrmManager( conf_json.path, cred_json.path, driver.read_register_callback, @@ -205,7 +205,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr driver.read_register_callback, driver.write_register_callback, async_cb.callback - ) as drm_manager + ) as drm_manager: assert drm_manager.get('log_file_type') == exp_value async_cb.assert_NoError() print("Test parameter 'log_file_type': PASS") @@ -308,7 +308,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() exp_value = 2*orig_frequency_detect_period - conf_json['settings'] = {'frequency_detection_period': exp_value} + conf_json['settings']['frequency_detection_period'] = exp_value conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -325,7 +325,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() exp_value = 2*orig_frequency_detect_threshold - conf_json['settings'] = {'frequency_detection_threshold': exp_value} + conf_json['settings']['frequency_detection_threshold'] = exp_value conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -342,7 +342,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() # Check error: ws_retry_period_long must be != ws_retry_period_short - conf_json['settings'] = {'ws_retry_period_long': orig_retry_period_short} + conf_json['settings']['ws_retry_period_long'] = orig_retry_period_short conf_json.save() with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo: accelize_drm.DrmManager( @@ -360,7 +360,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() exp_value = orig_retry_period_long + 1 - conf_json['settings'] = {'ws_retry_period_long': exp_value} + conf_json['settings']['ws_retry_period_long'] = exp_value conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -377,7 +377,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() # Check error: ws_retry_period_long must be != ws_retry_period_short - conf_json['settings'] = {'ws_retry_period_short': orig_retry_period_long} + conf_json['settings']['ws_retry_period_short'] = orig_retry_period_long conf_json.save() with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo: accelize_drm.DrmManager( @@ -396,7 +396,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() exp_value = orig_retry_period_short + 1 - conf_json['settings'] = {'ws_retry_period_short': exp_value} + conf_json['settings']['ws_retry_period_short'] = exp_value conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -412,7 +412,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr # Test parameter: ws_api_retry_duration async_cb.reset() conf_json.reset() - conf_json['settings'] = {'ws_api_retry_duration': 0} + conf_json['settings']['ws_api_retry_duration'] = 0 conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -425,7 +425,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() exp_value = orig_api_retry_duration + 1 - conf_json['settings'] = {'ws_api_retry_duration': exp_value} + conf_json['settings']['ws_api_retry_duration'] = exp_value conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -441,7 +441,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr # Test parameter: ws_request_timeout async_cb.reset() conf_json.reset() - conf_json['settings'] = {'ws_request_timeout': 0} + conf_json['settings']['ws_request_timeout'] = 0 conf_json.save() with pytest.raises(accelize_drm.exceptions.DRMBadArg) as excinfo: accelize_drm.DrmManager( @@ -457,7 +457,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() exp_value = 2*orig_request_timeout - conf_json['settings'] = {'ws_request_timeout': exp_value} + conf_json['settings']['ws_request_timeout'] = exp_value conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -474,7 +474,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() expectVal = 0 - conf_json['settings'] = {'host_data_verbosity': expectVal} + conf_json['settings']['host_data_verbosity'] = expectVal conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -487,7 +487,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr conf_json.reset() expectVal = 2 - conf_json['settings'] = {'host_data_verbosity': expectVal} + conf_json['settings']['host_data_verbosity'] = expectVal conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -504,7 +504,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() expectVal = False - conf_json['settings'] = {'log_file_append': expectVal} + conf_json['settings']['log_file_append'] = expectVal conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -515,9 +515,9 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr ) as drm_manager: assert drm_manager.get('log_file_append') == expectVal - conf_json.reset() expectVal = True - conf_json['settings'] = {'log_file_append': expectVal} + conf_json.reset() + conf_json['settings']['log_file_append'] = expectVal conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -525,7 +525,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr driver.read_register_callback, driver.write_register_callback, async_cb.callback - ) as drm_manage: + ) as drm_manager: assert drm_manager.get('log_file_append') == expectVal async_cb.assert_NoError() print("Test parameter 'log_file_append': PASS") @@ -534,7 +534,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr async_cb.reset() conf_json.reset() expectVal = 1 - conf_json['settings'] = {'ws_verbosity': expectVal} + conf_json['settings']['ws_verbosity'] = expectVal conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -546,7 +546,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr assert drm_manager.get('ws_verbosity') == expectVal conf_json.reset() expectVal = 0 - conf_json['settings'] = {'ws_verbosity': expectVal} + conf_json['settings']['ws_verbosity'] = expectVal conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -605,7 +605,7 @@ def test_parameter_key_modification_with_config_file(accelize_drm, conf_json, cr # Test unsupported parameter async_cb.reset() conf_json.reset() - conf_json['settings'] = {'unsupported_param': 10.2} + conf_json['settings']['unsupported_param'] = 10.2 conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -685,7 +685,7 @@ def test_parameter_key_modification_with_get_set(accelize_drm, conf_json, cred_j exp_val = LOG_FORMAT_LONG drm_manager.set(log_format=exp_val) assert drm_manager.get('log_format') == exp_val - drm_ manager.set(log_format=orig_val) + drm_manager.set(log_format=orig_val) async_cb.assert_NoError() print("Test parameter 'log_format': PASS") @@ -1082,7 +1082,7 @@ def test_parameter_key_modification_with_get_set(accelize_drm, conf_json, cred_j # Test parameter: host_data async_cb.reset() conf_json.reset() - conf_json['settings'] = {'host_data': 0} + conf_json['settings']['host_data'] = 0 conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -1104,7 +1104,7 @@ def test_parameter_key_modification_with_get_set(accelize_drm, conf_json, cred_j # Test parameter: log_file_append async_cb.reset() conf_json.reset() - conf_json['settings'] = {'log_file_append': False} + conf_json['settings']['log_file_append'] = False conf_json.save() with accelize_drm.DrmManager( conf_json.path, @@ -1123,7 +1123,7 @@ def test_parameter_key_modification_with_get_set(accelize_drm, conf_json, cred_j async_cb.reset() conf_json.reset() expvalue = 0 - conf_json['settings'] = {'ws_verbosity': expvalue} + conf_json['settings']['ws_verbosity'] = expvalue conf_json.save() with accelize_drm.DrmManager( conf_json.path, diff --git a/tox.ini b/tox.ini index 1202bd06..01573dcc 100644 --- a/tox.ini +++ b/tox.ini @@ -47,7 +47,7 @@ [tox] minversion = 3.7.0 -envlist = {build,c,cpp,integration,awsxrt}-{debug,release,install}, {sign,export}-release, package-install, build-doc, {coverage,aws-build}-debug +envlist = {build,c,cpp,integration,awsxrt,awsf1}-{debug,release,install}, {sign,export}-release, package-install, build-doc, {coverage,aws-build}-debug skipsdist = True [testenv] @@ -61,7 +61,7 @@ install_command = python -m pip install -U --no-cache-dir --disable-pip-version- description = build: Accelize DRM library build - {c,cpp,integration,awsxrt}: Accelize DRM Library tests ({envname}) + {c,cpp,integration,awsxrt,awsf1}: Accelize DRM Library tests ({envname}) coverage: Combine coverage data and create report deps = @@ -100,8 +100,8 @@ passenv= sign: GPG_PUBLIC_KEY sign: GPG_PASS_PHRASE # Tests - {c,cpp,integration,awsxrt}: TOX_* - {c,cpp,integration,awsxrt}: XILINX_* + {c,cpp,integration,awsxrt,awsf1}: TOX_* + {c,cpp,integration,awsxrt,awsf1}: XILINX_* aws: SDK_DIR envdir = @@ -111,7 +111,7 @@ envdir = changedir = !install: {envdir}/build build-install: {envdir}/build - {c,cpp,integration,awsxrt,package}-install: {toxinidir} + {c,cpp,integration,awsxrt,awsf1,package}-install: {toxinidir} commands = # Force the use of system interpreter @@ -142,6 +142,8 @@ commands = integration-debug: python3 -m pytest {posargs} --integration --cov=accelize_drm --cov-append awsxrt-!debug: python3 -m pytest {posargs} --fpga_driver="aws_xrt" -m awsxrt awsxrt-debug: python3 -m pytest {posargs} --fpga_driver="aws_xrt" -m awsxrt --cov=accelize_drm --cov-append + awsf1-!debug: python3 -m pytest {posargs} --fpga_driver="aws_xrt" -m awsf1 + awsf1-debug: python3 -m pytest {posargs} --fpga_driver="aws_xrt" -m awsf1 --cov=accelize_drm --cov-append # Collect coverage coverage: -python3 -m coverage combine coverage: -lcov --capture --directory . --output-file coverage.info -q @@ -166,14 +168,17 @@ depends = {c,cpp}-debug: build-debug, aws-build-debug integration-debug: build-debug, aws-build-debug, c-debug, cpp-debug awsxrt-debug: build-debug, aws-build-debug, c-debug, cpp-debug, integration-debug + awsf1-debug: build-debug, aws-build-debug, c-debug, cpp-debug, integration-debug, awsxrt-debug c-release: build-release, c-debug, integration-debug cpp-release: build-release, cpp-debug, integration-debug integration-release: build-release, c-release, cpp-release awsxrt-release: build-release, c-release, cpp-release, integration-release + awsf1-release: build-release, c-release, cpp-release, integration-release, awsxrt-release export-release: build-release build-install: build-release, c-release, cpp-release, integration-release package-install: build-install, export-release {c,cpp}-install: build-install, package-install integration-install: build-install, package-install, c-install, cpp-install awsxrt-install: build-install, package-install, c-install, cpp-install, integration-install - coverage: cpp-debug, c-debug, integration-debug, awsxrt-debug + awsf1-install: build-install, package-install, c-install, cpp-install, integration-install, awsxrt-install + coverage: cpp-debug, c-debug, integration-debug