diff --git a/.github/actions/on_device_tests/action.yaml b/.github/actions/on_device_tests/action.yaml index d07f984f94d27..0481de365ae4f 100644 --- a/.github/actions/on_device_tests/action.yaml +++ b/.github/actions/on_device_tests/action.yaml @@ -100,3 +100,5 @@ runs: --change_id "${GITHUB_SHA}" \ watch ${{ env.SESSION_ID }} shell: bash + # TODO + # - name: download unit test reports diff --git a/.github/actions/on_host_test/action.yaml b/.github/actions/on_host_test/action.yaml index 7df5e2f62e048..554b3f2be9074 100644 --- a/.github/actions/on_host_test/action.yaml +++ b/.github/actions/on_host_test/action.yaml @@ -7,106 +7,20 @@ inputs: runs: using: "composite" steps: - - name: Set up Cloud SDK - uses: isarkis/setup-gcloud@40dce7857b354839efac498d3632050f568090b6 # v1.1.1 - - name: Configure Environment - id: configure-environment - shell: bash - run: | - set -x - if [ "${{inputs.os}}" == 'linux' ] - then - echo "ARCHIVE_EXTENSION=tar.xz" >> $GITHUB_ENV - elif [ "${{inputs.os}}" == 'windows' ] - then - echo "ARCHIVE_EXTENSION=tar.gz" >> $GITHUB_ENV - fi - - name: Download Archive - shell: bash - env: - WORKFLOW: ${{ github.workflow }} - run: | - set -x - PROJECT_NAME=$(gcloud config get-value project) - gsutil cp gs://${PROJECT_NAME}-test-artifacts/${WORKFLOW}/${GITHUB_RUN_NUMBER}/${{matrix.platform}}_${{matrix.config}}/${{matrix.platform}}_${{matrix.config}}.${ARCHIVE_EXTENSION} ${GITHUB_WORKSPACE}/out/tmp/${{matrix.platform}}_${{matrix.config}}.${ARCHIVE_EXTENSION} - - name: Extract Archive - shell: bash - run: | - set -x - parallel= - if [[ "${{inputs.os}}" == 'linux' ]]; then - parallel="--parallel" - fi - python3 ${GITHUB_WORKSPACE}/tools/create_archive.py -x -s ${GITHUB_WORKSPACE}/out/tmp/${{matrix.platform}}_${{matrix.config}}.${ARCHIVE_EXTENSION} -d ${GITHUB_WORKSPACE}/out ${parallel} - rm -rf ${GITHUB_WORKSPACE}/out/tmp - - name: Download Bootloader Archive - if: ${{ env.COBALT_BOOTLOADER != null && env.COBALT_BOOTLOADER != 'null' }} - shell: bash - env: - WORKFLOW: ${{ github.workflow }} - run: | - set -x - PROJECT_NAME=$(gcloud config get-value project) - gsutil cp gs://${PROJECT_NAME}-test-artifacts/${WORKFLOW}/${GITHUB_RUN_NUMBER}/${{matrix.platform}}_${{matrix.config}}/${COBALT_BOOTLOADER}_${{matrix.config}}.${ARCHIVE_EXTENSION} ${GITHUB_WORKSPACE}/out/tmp/${COBALT_BOOTLOADER}_${{matrix.config}}.${ARCHIVE_EXTENSION} - - name: Extract Bootloader Archive - if: ${{ env.COBALT_BOOTLOADER != null && env.COBALT_BOOTLOADER != 'null' }} - shell: bash - run: | - set -x - python3 ${GITHUB_WORKSPACE}/tools/create_archive.py -x -s ${GITHUB_WORKSPACE}/out/tmp/${COBALT_BOOTLOADER}_${{matrix.config}}.${ARCHIVE_EXTENSION} -d ${GITHUB_WORKSPACE}/out --parallel - rm -rf ${GITHUB_WORKSPACE}/out/tmp - name: Set Env Variables shell: bash run: | echo "PYTHONPATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV echo "TEST_RESULTS_DIR=${GITHUB_WORKSPACE}/unit-test-results" >> $GITHUB_ENV echo "COVERAGE_DIR=${GITHUB_WORKSPACE}/coverage" >> $GITHUB_ENV - echo "TEST_REPORT_FILE=${GITHUB_WORKSPACE}/${{matrix.platform}}-${{matrix.shard}}" >> $GITHUB_ENV - - name: Run Tests - shell: bash - run: | - set -x - # Starboard toolchains are downloaded to a different dir on github. Create a symlink to reassure our tooling that everything is fine. - if [ -d /root/starboard-toolchains ]; then - ln -s /root/starboard-toolchains /github/home/starboard-toolchains - fi - loader_args='' - if [ "${COBALT_BOOTLOADER}" != "null" ]; then - loader_args="--loader_platform ${COBALT_BOOTLOADER} --loader_config ${{matrix.config}}" - fi - if [[ "${{matrix.shard}}" == 'integration' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/black_box_tests/black_box_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} - elif [[ "${{matrix.shard}}" == 'blackbox' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/black_box_tests/black_box_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} --test_set blackbox - elif [[ "${{matrix.shard}}" == 'wpt' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/black_box_tests/black_box_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} --test_set wpt - elif [[ "${{matrix.shard}}" == 'evergreen' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/evergreen_tests/evergreen_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} --no-can_mount_tmpfs - elif [[ "${{matrix.shard}}" == 'evergreen-as-blackbox' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/black_box_tests/black_box_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} --test_set evergreen - elif [[ "${{matrix.shard}}" == 'coverage' ]]; then - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -r ${loader_args} --xml_output_dir=${TEST_RESULTS_DIR} --coverage_dir=${COVERAGE_DIR} --coverage_report - else - if [[ "${{inputs.os}}" == 'windows' ]]; then - python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -s ${{matrix.shard}} -r - else - xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -s ${{matrix.shard}} -r ${loader_args} --xml_output_dir=${TEST_RESULTS_DIR} - fi - fi - - name: Process unit test results - if: failure() - shell: bash - run: | - set -x - echo "Saving unit test report to ${TEST_REPORT_FILE}" - python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_report_parser.py ${TEST_RESULTS_DIR} > ${TEST_REPORT_FILE} - - name: Upload unit test report + - name: Archive unit test report uses: actions/upload-artifact@v3 - if: failure() + # TODO: Should only run for unit-tests + if: always() with: name: unit-test-reports - path: ${{env.TEST_REPORT_FILE}} - - name: Upload coverage html report + path: ${{env.TEST_RESULTS_DIR}}/ + - name: Archive coverage html report if: success() && matrix.shard == 'coverage' uses: actions/upload-artifact@v3 with: diff --git a/.github/workflows/linux.yaml b/.github/workflows/linux.yaml index e15041dd6e616..cfbd09bf53485 100644 --- a/.github/workflows/linux.yaml +++ b/.github/workflows/linux.yaml @@ -30,6 +30,8 @@ jobs: with: platform: linux nightly: ${{ github.event.inputs.nightly }} + secrets: + DD_API_KEY: ${{ secrets.DD_API_KEY }} linux-clang-3-9: uses: ./.github/workflows/main.yaml permissions: @@ -38,6 +40,8 @@ jobs: with: platform: linux-clang-3-9 nightly: ${{ github.event.inputs.nightly }} + secrets: + DD_API_KEY: ${{ secrets.DD_API_KEY }} linux-gcc-6-3: uses: ./.github/workflows/main.yaml permissions: @@ -56,6 +60,8 @@ jobs: platform: linux-modular nightly: ${{ github.event.inputs.nightly }} modular: true + secrets: + DD_API_KEY: ${{ secrets.DD_API_KEY }} linux-coverage: # Run on main branch for pushes, PRs and manual invocations. if: | @@ -70,3 +76,5 @@ jobs: with: platform: linux-coverage nightly: ${{ github.event.inputs.nightly }} + secrets: + DD_API_KEY: ${{ secrets.DD_API_KEY }} diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index b501c1d822196..76530a77b9fcb 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -29,6 +29,10 @@ on: required: false type: boolean default: false + secrets: + DD_API_KEY: + description: DataDog API key + required: false # Global env vars. env: @@ -197,60 +201,9 @@ jobs: # filesystem, whereas /__w which contains Cobalt source code is on tmpfs. TMPDIR: /__w/_temp steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - # Use fetch depth of 0 to get full history for a valid build id. - fetch-depth: 0 - persist-credentials: false - - name: GN - uses: ./.github/actions/gn - - name: Build Cobalt - uses: ./.github/actions/build - - name: Run API Leak Detector - uses: ./.github/actions/api_leak_detector - if: inputs.run_api_leak_detector - with: - relative_manifest_path: ${{ inputs.leak_manifest_filename }} - - name: Upload Nightly Artifacts - if: ${{ ( inputs.nightly == 'true' || github.event_name == 'schedule' ) && matrix.config != 'debug' }} - uses: ./.github/actions/upload_nightly_artifacts - - name: Upload On Host Test Artifacts - if: ${{ matrix.config == 'devel' && needs.initialize.outputs.on_host_test == 'true' }} - uses: ./.github/actions/upload_test_artifacts - with: - type: onhost - os: linux - # For some reason passing needs.initialize.outputs.bootloader as parameter to build - # action didn't work, so instead we set an env var. - - name: Set bootloader config - if: ${{ needs.initialize.outputs.bootloader != 'null' }} - run: echo "COBALT_BOOTLOADER=${{needs.initialize.outputs.bootloader}}" >> $GITHUB_ENV - # Build bootloader for on-host tests if necessary. - - name: Bootloader GN - if: ${{ needs.initialize.outputs.bootloader != 'null' && matrix.config == 'devel' }} - uses: ./.github/actions/gn - - name: Build Bootloader - if: ${{ needs.initialize.outputs.bootloader != 'null' && matrix.config == 'devel' }} - uses: ./.github/actions/build - - name: Upload Bootloader On Host Test Artifacts - if: ${{ needs.initialize.outputs.bootloader != 'null' && matrix.config == 'devel' && needs.initialize.outputs.on_host_test == 'true'}} - uses: ./.github/actions/upload_test_artifacts - with: - type: onhost - os: linux - - name: Upload On Device Test Artifacts - if: | - matrix.config == 'devel' && - fromJSON(needs.initialize.outputs.on_device_test).enabled == true && - ( - github.event_name != 'pull_request' || - contains(github.event.pull_request.labels.*.name, 'on_device') - ) - uses: ./.github/actions/upload_test_artifacts - with: - type: ondevice - os: linux + - name: No-op + shell: bash + run: echo Build # Runs on-device integration and unit tests. on-device-test: @@ -280,13 +233,9 @@ jobs: ON_DEVICE_TEST_ATTEMPTS: ${{ needs.initialize.outputs.on_device_test_attempts }} MODULAR_BUILD: ${{ inputs.modular && 1 || 0 }} steps: - - name: Checkout - uses: kaidokert/checkout@v3.5.999 - with: - fetch-depth: 1 - persist-credentials: false - - name: Run Tests (${{ matrix.shard }}) - uses: ./.github/actions/on_device_tests + - name: No-op + shell: bash + run: echo on-device-test # Runs on-host integration and unit tests. on-host-test: @@ -320,23 +269,3 @@ jobs: uses: ./.github/actions/on_host_test with: os: linux - - # Gets unit test report from on host tests and prints it. - on-host-unit-test-report: - needs: [on-host-test] - permissions: {} - if: failure() - runs-on: ubuntu-latest - steps: - - name: Collect Unit Test Reports - uses: actions/download-artifact@v3 - with: - name: unit-test-reports - path: unit-test-reports - - name: Print Unit Test Reports - run: | - for filename in ${GITHUB_WORKSPACE}/unit-test-reports/*; do - basename $filename - cat $filename - echo - done diff --git a/.github/workflows/main_win.yaml b/.github/workflows/main_win.yaml index 3285f1a509a98..ea1a301a2cb38 100644 --- a/.github/workflows/main_win.yaml +++ b/.github/workflows/main_win.yaml @@ -130,7 +130,7 @@ jobs: build: needs: [initialize] permissions: {} - runs-on: [self-hosted, X64, Windows] + runs-on: [self-hosted, win32] name: ${{matrix.name}}_${{matrix.config}} strategy: fail-fast: false @@ -164,7 +164,7 @@ jobs: needs: [initialize, build] permissions: {} if: needs.initialize.outputs.on_host_test == 'true' - runs-on: [self-hosted, Windows, X64] + runs-on: [self-hosted, win32] name: ${{matrix.name}}_${{matrix.shard}}_test strategy: fail-fast: false diff --git a/.github/workflows/unit_test_results_reporter.yaml b/.github/workflows/unit_test_results_reporter.yaml new file mode 100644 index 0000000000000..86efd239de9d1 --- /dev/null +++ b/.github/workflows/unit_test_results_reporter.yaml @@ -0,0 +1,45 @@ +name: Upload Unit Test Reports + +on: + workflow_run: + workflows: + - linux + # TODO: xml file generation on windows + # - win32 + # TODO: Other platforms + types: + - completed + +jobs: + # Gets unit test report from artifact storage and uploads them to DataDog. + unit-test-report: + permissions: {} + if: always() + runs-on: ubuntu-latest + name: Test Result Report + env: + DATADOG_API_KEY: ${{ secrets.DD_API_KEY }} + DATADOG_SITE: us5.datadoghq.com + DD_TAGS: name:${{matrix.name}},platform:${{matrix.platform}} + steps: + # TODO: Check if artifact exists + # TODO: Check if secret is set + - name: Collect Unit Test Reports + uses: actions/download-artifact@v3 + with: + name: unit-test-reports + path: unit-test-reports/ + - name: Install node + uses: actions/setup-node@v3 + with: + node-version: 16 + - name: Get Datadog CLI + shell: bash + run: npm install -g @datadog/datadog-ci + - name: Upload the JUnit files + shell: bash + run: | + datadog-ci junit upload \ + --service ${{ github.event.repository.name }} \ + --max-concurrency 20 \ + unit-test-reports/ diff --git a/base/single_thread_task_runner.cc b/base/single_thread_task_runner.cc index 3c1d832ad793b..49c9f0bab50a1 100644 --- a/base/single_thread_task_runner.cc +++ b/base/single_thread_task_runner.cc @@ -22,6 +22,12 @@ namespace base { #if defined(STARBOARD) namespace { +#if defined(COBALT_BUILD_TYPE_DEBUG) +const int kTimeWaitInterval = 10000; +#else +const int kTimeWaitInterval = 2000; +#endif + // Runs the given task, and then signals the given WaitableEvent. void RunAndSignal(const base::Closure& task, base::WaitableEvent* event) { TRACE_EVENT0("task", "RunAndSignal"); @@ -49,7 +55,8 @@ void SingleThreadTaskRunner::PostBlockingTask(const base::Location& from_here, if (task_may_run) { // Wait for the task to complete before proceeding. do { - if (task_finished.TimedWait(base::TimeDelta::FromMilliseconds(1000))) { + if (task_finished.TimedWait( + base::TimeDelta::FromMilliseconds(kTimeWaitInterval))) { break; } #if !defined(COBALT_BUILD_TYPE_GOLD) diff --git a/cobalt/black_box_tests/testdata/telemetry_test.html b/cobalt/black_box_tests/testdata/telemetry_test.html index f4d2b7eebaad4..01ccf90bd2013 100644 --- a/cobalt/black_box_tests/testdata/telemetry_test.html +++ b/cobalt/black_box_tests/testdata/telemetry_test.html @@ -41,30 +41,31 @@ metrics.setMetricEventInterval(EVENT_INTERVAL_SECS); metrics.onMetricEvent(metricEventHandler); } + window.onload = function () { + initTelemetry(); - initTelemetry(); + setupFinished(); - setupFinished(); - - setTimeout(() => { - assertTrue(metrics.isEnabled(), 'metrics should be enabled'); - assertEqual('COBALT_UMA', lastMetricType, 'metricType should be uma'); - assertTrue(lastPayload.length > 0, 'payload should be non-empty'); - assertEqual(1, payloadCount, 'only one payload sent'); - }, 4000); + setTimeout(() => { + assertTrue(metrics.isEnabled(), 'metrics should be enabled'); + assertEqual('COBALT_UMA', lastMetricType, 'metricType should be uma'); + assertTrue(lastPayload.length > 0, 'payload should be non-empty'); + assertEqual(1, payloadCount, 'only one payload sent'); + }, 4000); - setTimeout(() => { - assertEqual('COBALT_UMA', lastMetricType, 'metricType should be uma'); - assertTrue(lastPayload.length > 0, 'payload should be non-empty'); - assertEqual(2, payloadCount, 'two payloads sent'); + setTimeout(() => { + assertEqual('COBALT_UMA', lastMetricType, 'metricType should be uma'); + assertTrue(lastPayload.length > 0, 'payload should be non-empty'); + assertEqual(2, payloadCount, 'two payloads sent'); - metrics.disable(); + metrics.disable(); - setTimeout(() => { - assertFalse(metrics.isEnabled(), 'should disable metrics'); - onEndTest(); - }, 1000); - }, 11000); + setTimeout(() => { + assertFalse(metrics.isEnabled(), 'should disable metrics'); + onEndTest(); + }, 1000); + }, 11000); + } diff --git a/cobalt/build/build_info.py b/cobalt/build/build_info.py index 2edcc52e4309c..aed2646659372 100755 --- a/cobalt/build/build_info.py +++ b/cobalt/build/build_info.py @@ -26,6 +26,7 @@ _BUILD_ID_PATTERN = '^BUILD_NUMBER=([1-9][0-9]{6,})$' _GIT_REV_PATTERN = '^GitOrigin-RevId: ([0-9a-f]{40})$' +_COBALT_VERSION_PATTERN = '^#define COBALT_VERSION "(.*)"$' def get_build_id_and_git_rev_from_commits(cwd): @@ -77,6 +78,16 @@ def _get_subject_from_last_commit(cwd): return _get_last_commit_with_format(r'%s', cwd=cwd) +def _get_cobalt_version(): + version_header_path = os.path.join(os.path.dirname(FILE_DIR), 'version.h') + contents = '' + with open(version_header_path, 'r', encoding='utf-8') as f: + contents = f.read() + compiled_cobalt_version_pattern = re.compile( + _COBALT_VERSION_PATTERN, flags=re.MULTILINE) + return compiled_cobalt_version_pattern.search(contents).group(1) + + def main(output_path, cwd=FILE_DIR): """Writes a Cobalt build_info json file.""" build_rev = _get_hash_from_last_commit(cwd=cwd) @@ -84,6 +95,7 @@ def main(output_path, cwd=FILE_DIR): if build_id is None: build_id = get_build_id_from_commit_count(cwd=cwd) git_rev = build_rev + cobalt_version = _get_cobalt_version() build_time = datetime.datetime.now().ctime() author = _get_author_from_last_commit(cwd=cwd) commit = _get_subject_from_last_commit(cwd=cwd) @@ -92,6 +104,7 @@ def main(output_path, cwd=FILE_DIR): 'build_id': build_id, 'build_rev': build_rev, 'git_rev': git_rev, + 'cobalt_version': cobalt_version, 'build_time': build_time, 'author': author, 'commit': commit @@ -108,6 +121,7 @@ def main(output_path, cwd=FILE_DIR): Build ID: {build_id} Build Rev: {build_rev} Git Rev: {git_rev} +Cobalt Version: {cobalt_version} Build Time: {build_time} Author: {author} Commit: {commit} diff --git a/cobalt/demos/content/telemetry/index.html b/cobalt/demos/content/telemetry/index.html index d3744a8233e70..3dc29585434b0 100644 --- a/cobalt/demos/content/telemetry/index.html +++ b/cobalt/demos/content/telemetry/index.html @@ -50,7 +50,6 @@

LOG OUTPUT:

log('Enabling metrics reporting: metrics.enable()'); metrics.enable(); log(`Setting metric event interval: metrics.setMetricEventInterval(${EVENT_INTERVAL_SECS})`); - log(`Note that the first upload interval will always be 60 seconds, regardless of metricEventInterval.`); metrics.setMetricEventInterval(EVENT_INTERVAL_SECS); log('Binding metric event handler: metrics.onMetricEvent(metricEventHandler)'); metrics.onMetricEvent(metricEventHandler); @@ -62,8 +61,12 @@

LOG OUTPUT:

log.textContent = logText; LOG_CONTAINER.appendChild(log); } - - init(); + // Persistent settings are only ready/validated after the browser module + // has fully loaded. As telemetry uses persistent settings, everything + // must happen after onload. + window.onload = function() { + init(); + } diff --git a/cobalt/speech/url_fetcher_fake.h b/cobalt/speech/url_fetcher_fake.h index 7b6d3cbbc6c01..43ac8c52a16b4 100644 --- a/cobalt/speech/url_fetcher_fake.h +++ b/cobalt/speech/url_fetcher_fake.h @@ -84,6 +84,7 @@ class URLFetcherFake : public net::URLFetcher { void SaveResponseToFileAtPath( const base::FilePath& file_path, scoped_refptr file_task_runner) override {} + void SaveResponseToLargeString() override {} void SaveResponseToTemporaryFile( scoped_refptr file_task_runner) override {} void SaveResponseWithWriter( @@ -151,6 +152,11 @@ class URLFetcherFake : public net::URLFetcher { NOTREACHED(); return false; } + bool GetResponseAsLargeString( + std::string* out_response_string) const override { + NOTREACHED(); + return false; + } private: void OnURLFetchDownloadData(); diff --git a/cobalt/updater/network_fetcher.cc b/cobalt/updater/network_fetcher.cc index 0f4887e91ddd1..2c180a9505206 100644 --- a/cobalt/updater/network_fetcher.cc +++ b/cobalt/updater/network_fetcher.cc @@ -109,6 +109,35 @@ void NetworkFetcher::PostRequest( url_fetcher_->Start(); } +#if defined(IN_MEMORY_UPDATES) +void NetworkFetcher::DownloadToString( + const GURL& url, std::string* dst, + ResponseStartedCallback response_started_callback, + ProgressCallback progress_callback, + DownloadToStringCompleteCallback download_to_string_complete_callback) { + DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); + + LOG(INFO) << "cobalt::updater::NetworkFetcher::DownloadToString"; + LOG(INFO) << "DownloadToString url = " << url; + + CHECK(dst != nullptr); + dst_str_ = dst; + + response_started_callback_ = std::move(response_started_callback); + progress_callback_ = std::move(progress_callback); + download_to_string_complete_callback_ = + std::move(download_to_string_complete_callback); + + CreateUrlFetcher(url, net::URLFetcher::GET); + + url_fetcher_->SaveResponseToLargeString(); + + url_fetcher_type_ = kUrlFetcherTypeDownloadToString; + + url_fetcher_->Start(); +} + +#else void NetworkFetcher::DownloadToFile( const GURL& url, const base::FilePath& file_path, ResponseStartedCallback response_started_callback, @@ -134,6 +163,7 @@ void NetworkFetcher::DownloadToFile( url_fetcher_->Start(); } +#endif void NetworkFetcher::Cancel() { DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); @@ -158,9 +188,15 @@ void NetworkFetcher::OnURLFetchComplete(const net::URLFetcher* source) { const int response_code = source->GetResponseCode(); if (url_fetcher_type_ == kUrlFetcherTypePostRequest) { OnPostRequestComplete(source, status.error()); +#if defined(IN_MEMORY_UPDATES) + } else if (url_fetcher_type_ == kUrlFetcherTypeDownloadToString) { + OnDownloadToStringComplete(source, status.error()); + } +#else } else if (url_fetcher_type_ == kUrlFetcherTypeDownloadToFile) { OnDownloadToFileComplete(source, status.error()); } +#endif if (!status.is_success() || !IsResponseCodeSuccess(response_code)) { std::string msg(base::StringPrintf( @@ -229,6 +265,22 @@ void NetworkFetcher::OnPostRequestComplete(const net::URLFetcher* source, update_client::NetworkFetcher::kHeaderXRetryAfter)); } +#if defined(IN_MEMORY_UPDATES) +void NetworkFetcher::OnDownloadToStringComplete(const net::URLFetcher* source, + const int status_error) { + LOG(INFO) << "cobalt::updater::NetworkFetcher::OnDownloadToStringComplete"; + + if (!source->GetResponseAsLargeString(dst_str_)) { + LOG(ERROR) << "DownloadToString failed to get response from a string"; + } + + std::move(download_to_string_complete_callback_) + .Run(dst_str_, status_error, + source->GetResponseHeaders() + ? source->GetResponseHeaders()->GetContentLength() + : -1); +} +#else void NetworkFetcher::OnDownloadToFileComplete(const net::URLFetcher* source, const int status_error) { LOG(INFO) << "cobalt::updater::NetworkFetcher::OnDownloadToFileComplete"; @@ -244,6 +296,7 @@ void NetworkFetcher::OnDownloadToFileComplete(const net::URLFetcher* source, ? source->GetResponseHeaders()->GetContentLength() : -1); } +#endif NetworkFetcher::ReturnWrapper NetworkFetcher::HandleError( const std::string& message) { diff --git a/cobalt/updater/network_fetcher.h b/cobalt/updater/network_fetcher.h index c32f74dab5cdd..1ec57e3201396 100644 --- a/cobalt/updater/network_fetcher.h +++ b/cobalt/updater/network_fetcher.h @@ -40,7 +40,11 @@ namespace updater { typedef enum UrlFetcherType { kUrlFetcherTypePostRequest, +#if defined(IN_MEMORY_UPDATES) + kUrlFetcherTypeDownloadToString, +#else kUrlFetcherTypeDownloadToFile, +#endif } UrlFetcherType; class NetworkFetcher : public update_client::NetworkFetcher, @@ -51,8 +55,13 @@ class NetworkFetcher : public update_client::NetworkFetcher, using ProgressCallback = update_client::NetworkFetcher::ProgressCallback; using PostRequestCompleteCallback = update_client::NetworkFetcher::PostRequestCompleteCallback; +#if defined(IN_MEMORY_UPDATES) + using DownloadToStringCompleteCallback = + update_client::NetworkFetcher::DownloadToStringCompleteCallback; +#else using DownloadToFileCompleteCallback = update_client::NetworkFetcher::DownloadToFileCompleteCallback; +#endif explicit NetworkFetcher(const network::NetworkModule* network_module); ~NetworkFetcher() override; @@ -64,11 +73,21 @@ class NetworkFetcher : public update_client::NetworkFetcher, ResponseStartedCallback response_started_callback, ProgressCallback progress_callback, PostRequestCompleteCallback post_request_complete_callback) override; +#if defined(IN_MEMORY_UPDATES) + // Does not take ownership of |dst|, which must refer to a valid string that + // outlives this object. + void DownloadToString(const GURL& url, std::string* dst, + ResponseStartedCallback response_started_callback, + ProgressCallback progress_callback, + DownloadToStringCompleteCallback + download_to_string_complete_callback) override; +#else void DownloadToFile(const GURL& url, const base::FilePath& file_path, ResponseStartedCallback response_started_callback, ProgressCallback progress_callback, DownloadToFileCompleteCallback download_to_file_complete_callback) override; +#endif void Cancel() override; // net::URLFetcherDelegate interface. @@ -97,8 +116,14 @@ class NetworkFetcher : public update_client::NetworkFetcher, void OnPostRequestComplete(const net::URLFetcher* source, const int status_error); + +#if defined(IN_MEMORY_UPDATES) + void OnDownloadToStringComplete(const net::URLFetcher* source, + const int status_error); +#else void OnDownloadToFileComplete(const net::URLFetcher* source, const int status_error); +#endif static constexpr int kMaxRetriesOnNetworkChange = 3; @@ -111,7 +136,12 @@ class NetworkFetcher : public update_client::NetworkFetcher, ResponseStartedCallback response_started_callback_; ProgressCallback progress_callback_; PostRequestCompleteCallback post_request_complete_callback_; +#if defined(IN_MEMORY_UPDATES) + DownloadToStringCompleteCallback download_to_string_complete_callback_; + std::string* dst_str_; // not owned, can't be null +#else DownloadToFileCompleteCallback download_to_file_complete_callback_; +#endif const network::NetworkModule* network_module_; diff --git a/components/update_client/action_runner.cc b/components/update_client/action_runner.cc index 11d5dc0166348..2df16f5a92928 100644 --- a/components/update_client/action_runner.cc +++ b/components/update_client/action_runner.cc @@ -91,11 +91,17 @@ void ActionRunner::RunOnTaskRunner(std::unique_ptr unzip, } const auto config = component_.config(); + +#if defined(IN_MEMORY_UPDATES) + // TODO(hwarriner): see if we need to support this code path in Cobalt. + CHECK(false); +#else auto unpacker = base::MakeRefCounted( config->GetRunActionKeyHash(), crx_path, installer, std::move(unzip), std::move(patch), component_.crx_component()->crx_format_requirement); unpacker->Unpack( base::BindOnce(&ActionRunner::UnpackComplete, base::Unretained(this))); +#endif } void ActionRunner::UnpackComplete(const ComponentUnpacker::Result& result) { diff --git a/components/update_client/component.cc b/components/update_client/component.cc index a683d2903be06..5ba8895304399 100644 --- a/components/update_client/component.cc +++ b/components/update_client/component.cc @@ -196,7 +196,11 @@ void InstallOnBlockingTaskRunner( void UnpackCompleteOnBlockingTaskRunner( scoped_refptr main_task_runner, +#if defined(IN_MEMORY_UPDATES) + const base::FilePath& installation_dir, +#else const base::FilePath& crx_path, +#endif #if defined(STARBOARD) const int installation_index, PersistedData* metadata, @@ -209,16 +213,22 @@ void UnpackCompleteOnBlockingTaskRunner( const ComponentUnpacker::Result& result) { #if defined(STARBOARD) LOG(INFO) << "UnpackCompleteOnBlockingTaskRunner"; +#if !defined(IN_MEMORY_UPDATES) base::DeleteFile(crx_path, false); -#else +#endif // !defined(IN_MEMORY_UPDATES) +#else // defined(STARBOARD) update_client::DeleteFileAndEmptyParentDirectory(crx_path); -#endif +#endif // defined(STARBOARD) if (result.error != UnpackerError::kNone) { #if defined(STARBOARD) // When there is an error unpacking the downloaded CRX, such as a failure to // verify the package, we should remember to clear out any drain files. +#if defined(IN_MEMORY_UPDATES) + if (base::DirectoryExists(installation_dir)) { +#else // defined(IN_MEMORY_UPDATES) if (base::DirectoryExists(crx_path.DirName())) { +#endif // defined(IN_MEMORY_UPDATES) const auto installation_api = static_cast( SbSystemGetExtension(kCobaltExtensionInstallationManagerName)); @@ -229,7 +239,7 @@ void UnpackCompleteOnBlockingTaskRunner( } } } -#endif +#endif // #if defined(STARBOARD) main_task_runner->PostTask( FROM_HERE, base::BindOnce(std::move(callback), ErrorCategory::kUnpack, @@ -253,7 +263,12 @@ void UnpackCompleteOnBlockingTaskRunner( void StartInstallOnBlockingTaskRunner( scoped_refptr main_task_runner, const std::vector& pk_hash, +#if defined(IN_MEMORY_UPDATES) + const base::FilePath& installation_dir, + const std::string* crx_str, +#else const base::FilePath& crx_path, +#endif #if defined(STARBOARD) const int installation_index, PersistedData* metadata, @@ -270,11 +285,23 @@ void StartInstallOnBlockingTaskRunner( LOG(INFO) << "StartInstallOnBlockingTaskRunner"; #endif auto unpacker = base::MakeRefCounted( - pk_hash, crx_path, installer, std::move(unzipper_), std::move(patcher_), + pk_hash, +#if defined(IN_MEMORY_UPDATES) + crx_str, + installation_dir, +#else + crx_path, +#endif + installer, std::move(unzipper_), std::move(patcher_), crx_format); unpacker->Unpack(base::BindOnce(&UnpackCompleteOnBlockingTaskRunner, - main_task_runner, crx_path, + main_task_runner, +#if defined(IN_MEMORY_UPDATES) + installation_dir, +#else + crx_path, +#endif #if defined(STARBOARD) installation_index, metadata, id, version, #endif @@ -813,7 +840,11 @@ void Component::StateDownloadingDiff::Cancel() { void Component::StateDownloadingDiff::DoHandle() { DCHECK(thread_checker_.CalledOnValidThread()); +#if defined(IN_MEMORY_UPDATES) + auto& component = Component::State::component(); +#else const auto& component = Component::State::component(); +#endif const auto& update_context = component.update_context_; DCHECK(component.crx_component()); @@ -833,6 +864,9 @@ void Component::StateDownloadingDiff::DoHandle() { base::Unretained(this), id)); crx_downloader_->StartDownload( component.crx_diffurls_, component.hashdiff_sha256_, +#if defined(IN_MEMORY_UPDATES) + &component.crx_str_, +#endif base::BindOnce(&Component::StateDownloadingDiff::DownloadComplete, base::Unretained(this), id)); @@ -860,7 +894,9 @@ void Component::StateDownloadingDiff::DownloadComplete( crx_downloader_.reset(); if (download_result.error) { +#if !defined(IN_MEMORY_UPDATES) DCHECK(download_result.response.empty()); +#endif component.diff_error_category_ = ErrorCategory::kDownload; component.diff_error_code_ = download_result.error; @@ -868,7 +904,11 @@ void Component::StateDownloadingDiff::DownloadComplete( return; } +#if defined(IN_MEMORY_UPDATES) + component.installation_dir_ = download_result.installation_dir; +#else component.crx_path_ = download_result.response; +#endif #if defined(STARBOARD) component.installation_index_ = download_result.installation_index; @@ -894,7 +934,11 @@ void Component::StateDownloading::Cancel() { void Component::StateDownloading::DoHandle() { DCHECK(thread_checker_.CalledOnValidThread()); +#if defined(IN_MEMORY_UPDATES) + auto& component = Component::State::component(); +#else const auto& component = Component::State::component(); +#endif const auto& update_context = component.update_context_; DCHECK(component.crx_component()); @@ -914,6 +958,9 @@ void Component::StateDownloading::DoHandle() { base::Unretained(this), id)); crx_downloader_->StartDownload( component.crx_urls_, component.hash_sha256_, +#if defined(IN_MEMORY_UPDATES) + &component.crx_str_, +#endif base::BindOnce(&Component::StateDownloading::DownloadComplete, base::Unretained(this), id)); @@ -942,7 +989,9 @@ void Component::StateDownloading::DownloadComplete( crx_downloader_.reset(); if (download_result.error) { +#if !defined(IN_MEMORY_UPDATES) DCHECK(download_result.response.empty()); +#endif component.error_category_ = ErrorCategory::kDownload; component.error_code_ = download_result.error; @@ -950,7 +999,11 @@ void Component::StateDownloading::DownloadComplete( return; } +#if defined(IN_MEMORY_UPDATES) + component.installation_dir_ = download_result.installation_dir; +#else component.crx_path_ = download_result.response; +#endif #if defined(STARBOARD) component.installation_index_ = download_result.installation_index; @@ -969,7 +1022,11 @@ Component::StateUpdatingDiff::~StateUpdatingDiff() { void Component::StateUpdatingDiff::DoHandle() { DCHECK(thread_checker_.CalledOnValidThread()); +#if defined(IN_MEMORY_UPDATES) + auto& component = Component::State::component(); +#else const auto& component = Component::State::component(); +#endif const auto& update_context = component.update_context_; DCHECK(component.crx_component()); @@ -982,7 +1039,13 @@ void Component::StateUpdatingDiff::DoHandle() { base::BindOnce( &update_client::StartInstallOnBlockingTaskRunner, base::ThreadTaskRunnerHandle::Get(), - component.crx_component()->pk_hash, component.crx_path_, + component.crx_component()->pk_hash, +#if defined(IN_MEMORY_UPDATES) + component.installation_dir_, + &component.crx_str_, +#else + component.crx_path_, +#endif #if defined(STARBOARD) component.installation_index_, update_context.update_checker->GetPersistedData(), component.id_, @@ -1039,7 +1102,11 @@ void Component::StateUpdating::DoHandle() { #endif DCHECK(thread_checker_.CalledOnValidThread()); +#if defined(IN_MEMORY_UPDATES) + auto& component = Component::State::component(); +#else const auto& component = Component::State::component(); +#endif const auto& update_context = component.update_context_; DCHECK(component.crx_component()); @@ -1050,7 +1117,13 @@ void Component::StateUpdating::DoHandle() { base::BindOnce( &update_client::StartInstallOnBlockingTaskRunner, base::ThreadTaskRunnerHandle::Get(), - component.crx_component()->pk_hash, component.crx_path_, + component.crx_component()->pk_hash, +#if defined(IN_MEMORY_UPDATES) + component.installation_dir_, + &component.crx_str_, +#else + component.crx_path_, +#endif #if defined(STARBOARD) component.installation_index_, update_context.update_checker->GetPersistedData(), diff --git a/components/update_client/component.h b/components/update_client/component.h index ddefa13e5caf6..d5d9e43bf170f 100644 --- a/components/update_client/component.h +++ b/components/update_client/component.h @@ -481,7 +481,19 @@ class Component { // The error reported by the update checker. int update_check_error_ = 0; +#if defined(IN_MEMORY_UPDATES) + // To hold the CRX package in memory. The component owns this string + // throughout the entire update. + std::string crx_str_; + + // With in-memory updates the installation directory is still determined in + // the download flow even though it isn't needed until the unpack flow. Since + // there is no crx_path_ that the installation directory can be derived from, + // a dedicated installation_dir_ data member is added. + base::FilePath installation_dir_; +#else base::FilePath crx_path_; +#endif #if defined(STARBOARD) int installation_index_ = IM_EXT_INVALID_INDEX; diff --git a/components/update_client/component_patcher_operation.cc b/components/update_client/component_patcher_operation.cc index 01ec312a977da..0d957e21a742a 100644 --- a/components/update_client/component_patcher_operation.cc +++ b/components/update_client/component_patcher_operation.cc @@ -101,9 +101,15 @@ void DeltaUpdateOp::DoneRunning(UnpackerError error, int extended_error) { // Uses the hash as a checksum to confirm that the file now residing in the // output directory probably has the contents it should. UnpackerError DeltaUpdateOp::CheckHash() { +#if defined(IN_MEMORY_UPDATES) + CHECK(false) << "Delta updates not supported for in-memory updates, or " + << "for that matter Cobalt"; + return UnpackerError::kDeltaUnsupportedCommand; +#else return VerifyFileHash256(output_abs_path_, output_sha256_) ? UnpackerError::kNone : UnpackerError::kDeltaVerificationFailure; +#endif } DeltaUpdateOpCopy::DeltaUpdateOpCopy() {} diff --git a/components/update_client/component_unpacker.cc b/components/update_client/component_unpacker.cc index f76cb6a87c7e0..6a1db54bff473 100644 --- a/components/update_client/component_unpacker.cc +++ b/components/update_client/component_unpacker.cc @@ -32,13 +32,23 @@ namespace update_client { ComponentUnpacker::Result::Result() {} ComponentUnpacker::ComponentUnpacker(const std::vector& pk_hash, +#if defined(IN_MEMORY_UPDATES) + const std::string* crx_str, + const base::FilePath& installation_dir, +#else const base::FilePath& path, +#endif scoped_refptr installer, std::unique_ptr unzipper, scoped_refptr patcher, crx_file::VerifierFormat crx_format) : pk_hash_(pk_hash), +#if defined(IN_MEMORY_UPDATES) + crx_str_(*crx_str), + installation_dir_(installation_dir), +#else path_(path), +#endif is_delta_(false), installer_(installer), unzipper_(std::move(unzipper)), @@ -56,17 +66,29 @@ void ComponentUnpacker::Unpack(Callback callback) { } bool ComponentUnpacker::Verify() { +#if defined(IN_MEMORY_UPDATES) + VLOG(1) << "Verifying component"; +#else VLOG(1) << "Verifying component: " << path_.value(); +#endif +#if !defined(IN_MEMORY_UPDATES) if (path_.empty()) { error_ = UnpackerError::kInvalidParams; return false; } +#endif std::vector> required_keys; if (!pk_hash_.empty()) required_keys.push_back(pk_hash_); const crx_file::VerifierResult result = - crx_file::Verify(path_, crx_format_, required_keys, + crx_file::Verify( +#if defined(IN_MEMORY_UPDATES) + crx_str_, +#else + path_, +#endif + crx_format_, required_keys, std::vector(), &public_key_, nullptr); if (result != crx_file::VerifierResult::OK_FULL && result != crx_file::VerifierResult::OK_DELTA) { @@ -76,7 +98,11 @@ bool ComponentUnpacker::Verify() { return false; } is_delta_ = result == crx_file::VerifierResult::OK_DELTA; +#if defined(IN_MEMORY_UPDATES) + VLOG(1) << "Verification successful"; +#else VLOG(1) << "Verification successful: " << path_.value(); +#endif return true; } @@ -91,14 +117,23 @@ bool ComponentUnpacker::BeginUnzipping() { error_ = UnpackerError::kUnzipPathError; return false; } -#else +#else // !defined(STARBOARD) +#if defined(IN_MEMORY_UPDATES) + destination = installation_dir_; +#else // defined(IN_MEMORY_UPDATES) // The directory of path_ is the installation slot. destination = path_.DirName(); +#endif // defined(IN_MEMORY_UPDATES) +#endif // !defined(STARBOARD) -#endif VLOG(1) << "Unpacking in: " << destination.value(); +#if defined(IN_MEMORY_UPDATES) + unzipper_->Unzip(crx_str_, destination, + base::BindOnce(&ComponentUnpacker::EndUnzipping, this)); +#else unzipper_->Unzip(path_, destination, base::BindOnce(&ComponentUnpacker::EndUnzipping, this)); +#endif return true; } diff --git a/components/update_client/component_unpacker.h b/components/update_client/component_unpacker.h index fccf44b8c5d22..bb979aedad566 100644 --- a/components/update_client/component_unpacker.h +++ b/components/update_client/component_unpacker.h @@ -88,6 +88,7 @@ class ComponentUnpacker : public base::RefCountedThreadSafe { // |pk_hash| is the expected public developer key's SHA256 hash. If empty, // the unpacker accepts any developer key. |path| is the current location // of the CRX. +#if !defined(IN_MEMORY_UPDATES) ComponentUnpacker(const std::vector& pk_hash, const base::FilePath& path, scoped_refptr installer, @@ -95,6 +96,20 @@ class ComponentUnpacker : public base::RefCountedThreadSafe { scoped_refptr patcher, crx_file::VerifierFormat crx_format); +#else + // An overload where |crx_str| currently holds the CRX in memory and + // |installation_dir| is the location where the CRX should be unpacked to. + // Does not take ownership of |crx_str|, which must refer to a valid string + // that outlives this object. + ComponentUnpacker(const std::vector& pk_hash, + const std::string* crx_str, + const base::FilePath& installation_dir, + scoped_refptr installer, + std::unique_ptr unzipper, + scoped_refptr patcher, + crx_file::VerifierFormat crx_format); +#endif + // Begins the actual unpacking of the files. May invoke a patcher and the // component installer if the package is a differential update. // Calls |callback| with the result. @@ -128,7 +143,12 @@ class ComponentUnpacker : public base::RefCountedThreadSafe { void EndUnpacking(); std::vector pk_hash_; +#if defined(IN_MEMORY_UPDATES) + const std::string& crx_str_; + base::FilePath installation_dir_; +#else base::FilePath path_; +#endif base::FilePath unpack_path_; base::FilePath unpack_diff_path_; bool is_delta_; diff --git a/components/update_client/component_unpacker_unittest.cc b/components/update_client/component_unpacker_unittest.cc index 617abcb37d4a7..19778ea2f4467 100644 --- a/components/update_client/component_unpacker_unittest.cc +++ b/components/update_client/component_unpacker_unittest.cc @@ -101,6 +101,7 @@ void ComponentUnpackerTest::UnpackComplete( main_thread_task_runner_->PostTask(FROM_HERE, std::move(quit_closure_)); } +#if !defined(IN_MEMORY_UPDATES) TEST_F(ComponentUnpackerTest, UnpackFullCrx) { auto config = base::MakeRefCounted(); scoped_refptr component_unpacker = @@ -171,5 +172,8 @@ TEST_F(ComponentUnpackerTest, UnpackFileHashMismatch) { EXPECT_TRUE(result_.unpack_path.empty()); } +// TODO(b/290410288): write tests targeting the version of ComponentUnpacker +// that unpacks a Crx from memory. +#endif } // namespace update_client diff --git a/components/update_client/crx_downloader.cc b/components/update_client/crx_downloader.cc index 4a7c36d01fdb4..133f4ab7fef16 100644 --- a/components/update_client/crx_downloader.cc +++ b/components/update_client/crx_downloader.cc @@ -94,19 +94,34 @@ CrxDownloader::download_metrics() const { void CrxDownloader::StartDownloadFromUrl(const GURL& url, const std::string& expected_hash, +#if defined(IN_MEMORY_UPDATES) + std::string* dst, +#endif DownloadCallback download_callback) { #if defined(STARBOARD) LOG(INFO) << "CrxDownloader::StartDownloadFromUrl: url=" << url; #endif + std::vector urls; urls.push_back(url); +#if defined(IN_MEMORY_UPDATES) + CHECK(dst != nullptr); + StartDownload(urls, expected_hash, dst, std::move(download_callback)); +#else StartDownload(urls, expected_hash, std::move(download_callback)); +#endif } void CrxDownloader::StartDownload(const std::vector& urls, const std::string& expected_hash, +#if defined(IN_MEMORY_UPDATES) + std::string* dst, +#endif DownloadCallback download_callback) { DCHECK(thread_checker_.CalledOnValidThread()); +#if defined(IN_MEMORY_UPDATES) + CHECK(dst != nullptr); +#endif auto error = CrxDownloaderError::NONE; if (urls.empty()) { @@ -123,12 +138,20 @@ void CrxDownloader::StartDownload(const std::vector& urls, return; } +#if defined(IN_MEMORY_UPDATES) + dst_str_ = dst; +#endif + urls_ = urls; expected_hash_ = expected_hash; current_url_ = urls_.begin(); download_callback_ = std::move(download_callback); +#if defined(IN_MEMORY_UPDATES) + DoStartDownload(*current_url_, dst); +#else DoStartDownload(*current_url_); +#endif } #if defined(STARBOARD) @@ -178,7 +201,12 @@ void CrxDownloader::VerifyResponse(bool is_handled, #if defined(STARBOARD) LOG(INFO) << "CrxDownloader::VerifyResponse"; #endif + +#if defined(IN_MEMORY_UPDATES) + if (VerifyHash256(dst_str_, expected_hash_)) { +#else if (VerifyFileHash256(result.response, expected_hash_)) { +#endif download_metrics_.push_back(download_metrics); main_task_runner()->PostTask( FROM_HERE, base::BindOnce(std::move(download_callback_), result)); @@ -191,11 +219,16 @@ void CrxDownloader::VerifyResponse(bool is_handled, result.error = static_cast(CrxDownloaderError::BAD_HASH); download_metrics.error = result.error; #if defined(STARBOARD) +#if !defined(IN_MEMORY_UPDATES) base::DeleteFile(result.response, false); -#else +#endif // !defined(IN_MEMORY_UPDATES) +#else // defined(STARBOARD) DeleteFileAndEmptyParentDirectory(result.response); -#endif +#endif // defined(STARBOARD) + +#if !defined(IN_MEMORY_UPDATES) result.response.clear(); +#endif main_task_runner()->PostTask( FROM_HERE, base::BindOnce(&CrxDownloader::HandleDownloadError, @@ -209,7 +242,9 @@ void CrxDownloader::HandleDownloadError( const DownloadMetrics& download_metrics) { DCHECK(thread_checker_.CalledOnValidThread()); DCHECK_NE(0, result.error); +#if !defined(IN_MEMORY_UPDATES) DCHECK(result.response.empty()); +#endif DCHECK_NE(0, download_metrics.error); #if defined(STARBOARD) @@ -235,14 +270,27 @@ void CrxDownloader::HandleDownloadError( // Try downloading from another url from the list. if (current_url_ != urls_.end()) { +#if defined(IN_MEMORY_UPDATES) + // TODO(b/158043520): manually test that Cobalt can update using a + // successor URL when an error occurs on the first URL, and/or consider + // adding an Evergreen end-to-end test case for this behavior. This is + // important since the unit tests are currently disabled (b/290410288). + DoStartDownload(*current_url_, dst_str_); +#else DoStartDownload(*current_url_); +#endif return; } // Try downloading using the next downloader. if (successor_ && !urls_.empty()) { +#if defined(IN_MEMORY_UPDATES) + successor_->StartDownload(urls_, expected_hash_, dst_str_, + std::move(download_callback_)); +#else successor_->StartDownload(urls_, expected_hash_, std::move(download_callback_)); +#endif return; } diff --git a/components/update_client/crx_downloader.h b/components/update_client/crx_downloader.h index 76c2e6ac5ae49..03da88a79f170 100644 --- a/components/update_client/crx_downloader.h +++ b/components/update_client/crx_downloader.h @@ -66,8 +66,13 @@ class CrxDownloader { int installation_index = IM_EXT_INVALID_INDEX; #endif +#if defined(IN_MEMORY_UPDATES) + // Path where the contents of the downloaded Crx should later be installed. + base::FilePath installation_dir; +#else // Path of the downloaded file if the download was successful. base::FilePath response; +#endif }; // The callback fires only once, regardless of how many urls are tried, and @@ -115,6 +120,7 @@ class CrxDownloader { // behavior is undefined. The callback gets invoked if the download can't // be started. |expected_hash| represents the SHA256 cryptographic hash of // the download payload, represented as a hexadecimal string. +#if !defined(IN_MEMORY_UPDATES) void StartDownloadFromUrl(const GURL& url, const std::string& expected_hash, DownloadCallback download_callback); @@ -122,6 +128,21 @@ class CrxDownloader { const std::string& expected_hash, DownloadCallback download_callback); +#else + // Overloads where |dst| points to a string that the Crx package should be + // downloaded to. + // These functions do not take ownership of |dst|, which must refer to a valid + // string that outlives this object. + void StartDownloadFromUrl(const GURL& url, + const std::string& expected_hash, + std::string* dst, + DownloadCallback download_callback); + void StartDownload(const std::vector& urls, + const std::string& expected_hash, + std::string* dst, + DownloadCallback download_callback); +#endif + #if defined(STARBOARD) void CancelDownload(); #endif @@ -154,7 +175,11 @@ class CrxDownloader { } private: +#if defined(IN_MEMORY_UPDATES) + virtual void DoStartDownload(const GURL& url, std::string* dst) = 0; +#else virtual void DoStartDownload(const GURL& url) = 0; +#endif #if defined(STARBOARD) virtual void DoCancelDownload() = 0; #endif @@ -184,6 +209,10 @@ class CrxDownloader { std::vector download_metrics_; +#if defined(IN_MEMORY_UPDATES) + std::string* dst_str_; // not owned, can't be null +#endif + DISALLOW_COPY_AND_ASSIGN(CrxDownloader); }; diff --git a/components/update_client/crx_downloader_unittest.cc b/components/update_client/crx_downloader_unittest.cc index 1ff46347d9bb3..22f29a29e250e 100644 --- a/components/update_client/crx_downloader_unittest.cc +++ b/components/update_client/crx_downloader_unittest.cc @@ -233,6 +233,7 @@ void CrxDownloaderTest::RunThreadsUntilIdle() { base::RunLoop().RunUntilIdle(); } +#if !defined(IN_MEMORY_UPDATES) // Tests that starting a download without a url results in an error. TEST_F(CrxDownloaderTest, NoUrl) { std::vector urls; @@ -498,5 +499,8 @@ TEST_F(CrxDownloaderTest, TwoUrls_BothInvalid) { EXPECT_EQ(-1, download_metrics[1].downloaded_bytes); EXPECT_EQ(-1, download_metrics[1].total_bytes); } +// TODO(b/290410288): write tests targeting the overload of StartDownload() that +// downloads to a string. +#endif } // namespace update_client diff --git a/components/update_client/net/network_impl_cobalt.cc b/components/update_client/net/network_impl_cobalt.cc index feada05fe249f..853d2de745d06 100644 --- a/components/update_client/net/network_impl_cobalt.cc +++ b/components/update_client/net/network_impl_cobalt.cc @@ -106,6 +106,36 @@ void NetworkFetcherCobaltImpl::PostRequest( url_fetcher_->Start(); } +#if defined(IN_MEMORY_UPDATES) +void NetworkFetcherCobaltImpl::DownloadToString( + const GURL& url, + std::string* dst, + ResponseStartedCallback response_started_callback, + ProgressCallback progress_callback, + DownloadToStringCompleteCallback download_to_string_complete_callback) { + DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); + + LOG(INFO) << "cobalt::updater::NetworkFetcher::DownloadToString"; + LOG(INFO) << "DownloadToString url = " << url; + + CHECK(dst != nullptr); + dst_str_ = dst; + + response_started_callback_ = std::move(response_started_callback); + progress_callback_ = std::move(progress_callback); + download_to_string_complete_callback_ = + std::move(download_to_string_complete_callback); + + CreateUrlFetcher(url, net::URLFetcher::GET); + + url_fetcher_->SaveResponseToLargeString(); + + url_fetcher_type_ = kUrlFetcherTypeDownloadToString; + + url_fetcher_->Start(); +} + +#else void NetworkFetcherCobaltImpl::DownloadToFile( const GURL& url, const base::FilePath& file_path, @@ -131,6 +161,7 @@ void NetworkFetcherCobaltImpl::DownloadToFile( url_fetcher_->Start(); } +#endif void NetworkFetcherCobaltImpl::Cancel() { DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); @@ -156,9 +187,15 @@ void NetworkFetcherCobaltImpl::OnURLFetchComplete( const int response_code = source->GetResponseCode(); if (url_fetcher_type_ == kUrlFetcherTypePostRequest) { OnPostRequestComplete(source, status.error()); +#if defined(IN_MEMORY_UPDATES) + } else if (url_fetcher_type_ == kUrlFetcherTypeDownloadToString) { + OnDownloadToStringComplete(source, status.error()); + } +#else } else if (url_fetcher_type_ == kUrlFetcherTypeDownloadToFile) { OnDownloadToFileComplete(source, status.error()); } +#endif if (!status.is_success() || !IsResponseCodeSuccess(response_code)) { std::string msg(base::StringPrintf( @@ -227,6 +264,21 @@ void NetworkFetcherCobaltImpl::OnPostRequestComplete( update_client::NetworkFetcher::kHeaderXRetryAfter)); } +#if defined(IN_MEMORY_UPDATES) +void NetworkFetcherCobaltImpl::OnDownloadToStringComplete( + const net::URLFetcher* source, + const int status_error) { + if (!source->GetResponseAsLargeString(dst_str_)) { + LOG(ERROR) << "DownloadToString failed to get response from a string"; + } + + std::move(download_to_string_complete_callback_) + .Run(dst_str_, status_error, + source->GetResponseHeaders() + ? source->GetResponseHeaders()->GetContentLength() + : -1); +} +#else void NetworkFetcherCobaltImpl::OnDownloadToFileComplete( const net::URLFetcher* source, const int status_error) { @@ -242,6 +294,7 @@ void NetworkFetcherCobaltImpl::OnDownloadToFileComplete( ? source->GetResponseHeaders()->GetContentLength() : -1); } +#endif NetworkFetcherCobaltImpl::ReturnWrapper NetworkFetcherCobaltImpl::HandleError( const std::string& message) { diff --git a/components/update_client/net/network_impl_cobalt.h b/components/update_client/net/network_impl_cobalt.h index 742e8ea89ea19..d6181add24123 100644 --- a/components/update_client/net/network_impl_cobalt.h +++ b/components/update_client/net/network_impl_cobalt.h @@ -39,7 +39,11 @@ namespace update_client { typedef enum UrlFetcherType { kUrlFetcherTypePostRequest, +#if defined(IN_MEMORY_UPDATES) + kUrlFetcherTypeDownloadToString, +#else kUrlFetcherTypeDownloadToFile, +#endif } UrlFetcherType; class NetworkFetcherCobaltImpl : public NetworkFetcher, @@ -50,8 +54,13 @@ class NetworkFetcherCobaltImpl : public NetworkFetcher, using ProgressCallback = update_client::NetworkFetcher::ProgressCallback; using PostRequestCompleteCallback = update_client::NetworkFetcher::PostRequestCompleteCallback; +#if defined(IN_MEMORY_UPDATES) + using DownloadToStringCompleteCallback = + update_client::NetworkFetcher::DownloadToStringCompleteCallback; +#else using DownloadToFileCompleteCallback = update_client::NetworkFetcher::DownloadToFileCompleteCallback; +#endif explicit NetworkFetcherCobaltImpl( const cobalt::network::NetworkModule* network_module); @@ -65,12 +74,23 @@ class NetworkFetcherCobaltImpl : public NetworkFetcher, ResponseStartedCallback response_started_callback, ProgressCallback progress_callback, PostRequestCompleteCallback post_request_complete_callback) override; +#if defined(IN_MEMORY_UPDATES) + // Does not take ownership of |dst|, which must refer to a valid string that + // outlives this object. + void DownloadToString(const GURL& url, + std::string* dst, + ResponseStartedCallback response_started_callback, + ProgressCallback progress_callback, + DownloadToStringCompleteCallback + download_to_string_complete_callback) override; +#else void DownloadToFile(const GURL& url, const base::FilePath& file_path, ResponseStartedCallback response_started_callback, ProgressCallback progress_callback, DownloadToFileCompleteCallback download_to_file_complete_callback) override; +#endif void Cancel() override; // net::URLFetcherDelegate interface. @@ -100,8 +120,14 @@ class NetworkFetcherCobaltImpl : public NetworkFetcher, void OnPostRequestComplete(const net::URLFetcher* source, const int status_error); + +#if defined(IN_MEMORY_UPDATES) + void OnDownloadToStringComplete(const net::URLFetcher* source, + const int status_error); +#else void OnDownloadToFileComplete(const net::URLFetcher* source, const int status_error); +#endif static constexpr int kMaxRetriesOnNetworkChange = 3; @@ -114,7 +140,12 @@ class NetworkFetcherCobaltImpl : public NetworkFetcher, ResponseStartedCallback response_started_callback_; ProgressCallback progress_callback_; PostRequestCompleteCallback post_request_complete_callback_; +#if defined(IN_MEMORY_UPDATES) + DownloadToStringCompleteCallback download_to_string_complete_callback_; + std::string* dst_str_; // not owned, can't be null +#else DownloadToFileCompleteCallback download_to_file_complete_callback_; +#endif const cobalt::network::NetworkModule* network_module_; diff --git a/components/update_client/network.h b/components/update_client/network.h index cf884b54a351b..b6b179d3b58f2 100644 --- a/components/update_client/network.h +++ b/components/update_client/network.h @@ -30,8 +30,13 @@ class NetworkFetcher { int net_error, const std::string& header_etag, int64_t xheader_retry_after_sec)>; +#if defined(IN_MEMORY_UPDATES) + using DownloadToStringCompleteCallback = base::OnceCallback< + void(std::string* dst, int net_error, int64_t content_size)>; +#else using DownloadToFileCompleteCallback = base::OnceCallback< void(base::FilePath path, int net_error, int64_t content_size)>; +#endif using ResponseStartedCallback = base::OnceCallback< void(const GURL& final_url, int response_code, int64_t content_length)>; using ProgressCallback = base::RepeatingCallback; @@ -57,12 +62,24 @@ class NetworkFetcher { ResponseStartedCallback response_started_callback, ProgressCallback progress_callback, PostRequestCompleteCallback post_request_complete_callback) = 0; +#if defined(IN_MEMORY_UPDATES) + // Does not take ownership of |dst|, which must refer to a valid string that + // outlives this object. + virtual void DownloadToString( + const GURL& url, + std::string* dst, + ResponseStartedCallback response_started_callback, + ProgressCallback progress_callback, + DownloadToStringCompleteCallback download_to_string_complete_callback) = 0; +#else virtual void DownloadToFile( const GURL& url, const base::FilePath& file_path, ResponseStartedCallback response_started_callback, ProgressCallback progress_callback, DownloadToFileCompleteCallback download_to_file_complete_callback) = 0; +#endif + #if defined(STARBOARD) virtual void Cancel() = 0; #endif diff --git a/components/update_client/url_fetcher_downloader.cc b/components/update_client/url_fetcher_downloader.cc index 1ae692c377cd8..16079672fdca2 100644 --- a/components/update_client/url_fetcher_downloader.cc +++ b/components/update_client/url_fetcher_downloader.cc @@ -85,36 +85,56 @@ UrlFetcherDownloader::UrlFetcherDownloader( UrlFetcherDownloader::~UrlFetcherDownloader() { DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); #if defined(STARBOARD) - LOG(INFO) << "UrlFetcherDownloader::UrlFetcherDownloader"; + LOG(INFO) << "UrlFetcherDownloader::~UrlFetcherDownloader"; #endif } #if defined(STARBOARD) +#if defined(IN_MEMORY_UPDATES) +void UrlFetcherDownloader::ConfirmSlot(const GURL& url, std::string* dst) { +#else // defined(IN_MEMORY_UPDATES) void UrlFetcherDownloader::ConfirmSlot(const GURL& url) { +#endif // defined(IN_MEMORY_UPDATES) LOG(INFO) << "UrlFetcherDownloader::ConfirmSlot: url=" << url; if (is_cancelled_) { LOG(ERROR) << "UrlFetcherDownloader::ConfirmSlot: Download already cancelled"; ReportDownloadFailure(url); return; } +#if defined(IN_MEMORY_UPDATES) + if (!cobalt_slot_management_.ConfirmSlot(installation_dir_)) { +#else // defined(IN_MEMORY_UPDATES) if (!cobalt_slot_management_.ConfirmSlot(download_dir_)) { +#endif // defined(IN_MEMORY_UPDATES) ReportDownloadFailure(url, CrxDownloaderError::SLOT_UNAVAILABLE); return; } base::SequencedTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::BindOnce(&UrlFetcherDownloader::StartURLFetch, +#if defined(IN_MEMORY_UPDATES) + base::Unretained(this), url, dst)); +#else // defined(IN_MEMORY_UPDATES) base::Unretained(this), url)); +#endif // defined(IN_MEMORY_UPDATES) } +#if defined(IN_MEMORY_UPDATES) +void UrlFetcherDownloader::SelectSlot(const GURL& url, std::string* dst) { +#else // defined(IN_MEMORY_UPDATES) void UrlFetcherDownloader::SelectSlot(const GURL& url) { +#endif // defined(IN_MEMORY_UPDATES) LOG(INFO) << "UrlFetcherDownloader::SelectSlot: url=" << url; if (is_cancelled_) { LOG(ERROR) << "UrlFetcherDownloader::SelectSlot: Download already cancelled"; ReportDownloadFailure(url); return; } +#if defined(IN_MEMORY_UPDATES) + if (!cobalt_slot_management_.SelectSlot(&installation_dir_)) { +#else // defined(IN_MEMORY_UPDATES) if (!cobalt_slot_management_.SelectSlot(&download_dir_)) { +#endif // defined(IN_MEMORY_UPDATES) ReportDownloadFailure(url, CrxDownloaderError::SLOT_UNAVAILABLE); return; } @@ -125,12 +145,20 @@ void UrlFetcherDownloader::SelectSlot(const GURL& url) { base::SequencedTaskRunnerHandle::Get()->PostDelayedTask( FROM_HERE, base::BindOnce(&UrlFetcherDownloader::ConfirmSlot, base::Unretained(this), +#if defined(IN_MEMORY_UPDATES) + url, dst), +#else // defined(IN_MEMORY_UPDATES) url), +#endif // defined(IN_MEMORY_UPDATES) base::TimeDelta::FromSeconds(15)); } -#endif +#endif // defined(STARBOARD) +#if defined(IN_MEMORY_UPDATES) +void UrlFetcherDownloader::DoStartDownload(const GURL& url, std::string* dst) { +#else // defined(IN_MEMORY_UPDATES) void UrlFetcherDownloader::DoStartDownload(const GURL& url) { +#endif // defined(IN_MEMORY_UPDATES) DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); #if defined(STARBOARD) @@ -154,15 +182,19 @@ void UrlFetcherDownloader::DoStartDownload(const GURL& url) { } base::SequencedTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::BindOnce(&UrlFetcherDownloader::SelectSlot, +#if defined(IN_MEMORY_UPDATES) + base::Unretained(this), url, dst)); +#else // defined(IN_MEMORY_UPDATES) base::Unretained(this), url)); -#else +#endif // defined(IN_MEMORY_UPDATES) +#else // defined(STARBOARD) base::PostTaskWithTraitsAndReply( FROM_HERE, kTaskTraits, base::BindOnce(&UrlFetcherDownloader::CreateDownloadDir, base::Unretained(this)), base::BindOnce(&UrlFetcherDownloader::StartURLFetch, base::Unretained(this), url)); -#endif +#endif // defined(STARBOARD) } #if defined(STARBOARD) @@ -176,10 +208,12 @@ void UrlFetcherDownloader::DoCancelDownload() { } #endif +#if !defined(IN_MEMORY_UPDATES) void UrlFetcherDownloader::CreateDownloadDir() { base::CreateNewTempDirectory(FILE_PATH_LITERAL("chrome_url_fetcher_"), &download_dir_); } +#endif #if defined(STARBOARD) void UrlFetcherDownloader::ReportDownloadFailure(const GURL& url) { @@ -217,30 +251,60 @@ void UrlFetcherDownloader::ReportDownloadFailure(const GURL& url) { base::Unretained(this), false, result, download_metrics)); } +#if defined(IN_MEMORY_UPDATES) +void UrlFetcherDownloader::StartURLFetch(const GURL& url, std::string* dst) { +#else void UrlFetcherDownloader::StartURLFetch(const GURL& url) { +#endif DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); +#if defined(IN_MEMORY_UPDATES) + CHECK(dst != nullptr); +#endif #if defined(STARBOARD) LOG(INFO) << "UrlFetcherDownloader::StartURLFetch: url" << url +#if defined(IN_MEMORY_UPDATES) + << " installation_dir=" << installation_dir_; +#else // defined(IN_MEMORY_UPDATES) << " download_dir=" << download_dir_; +#endif // defined(IN_MEMORY_UPDATES) if (is_cancelled_) { LOG(ERROR) << "UrlFetcherDownloader::StartURLFetch: Download already cancelled"; ReportDownloadFailure(url); return; } -#endif +#endif // defined(STARBOARD) +#if defined(IN_MEMORY_UPDATES) + if (installation_dir_.empty()) { +#else if (download_dir_.empty()) { +#endif #if defined(STARBOARD) LOG(ERROR) << "UrlFetcherDownloader::StartURLFetch: failed with empty " - "download_dir"; -#endif +#if defined(IN_MEMORY_UPDATES) + << "installation_dir"; +#else // defined(IN_MEMORY_UPDATES) + << "download_dir"; +#endif // defined(IN_MEMORY_UPDATES) +#endif // defined(STARBOARD) ReportDownloadFailure(url); return; } - const auto file_path = download_dir_.AppendASCII(url.ExtractFileName()); network_fetcher_ = network_fetcher_factory_->Create(); +#if defined(IN_MEMORY_UPDATES) + network_fetcher_->DownloadToString( + url, + dst, + base::BindOnce(&UrlFetcherDownloader::OnResponseStarted, + base::Unretained(this)), + base::BindRepeating(&UrlFetcherDownloader::OnDownloadProgress, + base::Unretained(this)), + base::BindOnce(&UrlFetcherDownloader::OnNetworkFetcherComplete, + base::Unretained(this))); +#else + const auto file_path = download_dir_.AppendASCII(url.ExtractFileName()); network_fetcher_->DownloadToFile( url, file_path, base::BindOnce(&UrlFetcherDownloader::OnResponseStarted, @@ -249,13 +313,19 @@ void UrlFetcherDownloader::StartURLFetch(const GURL& url) { base::Unretained(this)), base::BindOnce(&UrlFetcherDownloader::OnNetworkFetcherComplete, base::Unretained(this))); +#endif download_start_time_ = base::TimeTicks::Now(); } -void UrlFetcherDownloader::OnNetworkFetcherComplete(base::FilePath file_path, - int net_error, - int64_t content_size) { +void UrlFetcherDownloader::OnNetworkFetcherComplete( +#if defined(IN_MEMORY_UPDATES) + std::string* dst, +#else + base::FilePath file_path, +#endif + int net_error, + int64_t content_size) { DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); #if defined(STARBOARD) @@ -273,8 +343,12 @@ void UrlFetcherDownloader::OnNetworkFetcherComplete(base::FilePath file_path, // is not accepting requests for the moment. int error = -1; #if defined(STARBOARD) +#if defined(IN_MEMORY_UPDATES) + if (response_code_ == 200 && net_error == 0) { +#else // defined(IN_MEMORY_UPDATES) if (!file_path.empty() && response_code_ == 200 && net_error == 0) { -#else +#endif // defined(IN_MEMORY_UPDATES) +#else // defined(STARBOARD) if (!file_path.empty() && response_code_ == 200) { #endif DCHECK_EQ(0, net_error); @@ -290,7 +364,11 @@ void UrlFetcherDownloader::OnNetworkFetcherComplete(base::FilePath file_path, Result result; result.error = error; if (!error) { +#if defined(IN_MEMORY_UPDATES) + result.installation_dir = installation_dir_; +#else result.response = file_path; +#endif #if defined(STARBOARD) result.installation_index = cobalt_slot_management_.GetInstallationIndex(); #endif @@ -307,20 +385,26 @@ void UrlFetcherDownloader::OnNetworkFetcherComplete(base::FilePath file_path, VLOG(1) << "Downloaded " << content_size << " bytes in " << download_time.InMilliseconds() << "ms from " << final_url_.spec() +#if defined(IN_MEMORY_UPDATES) + << " to string"; +#else << " to " << result.response.value(); +#endif +#if !defined(IN_MEMORY_UPDATES) #if !defined(STARBOARD) // Delete the download directory in the error cases. if (error && !download_dir_.empty()) base::PostTaskWithTraits( FROM_HERE, kTaskTraits, base::BindOnce(IgnoreResult(&base::DeleteFile), download_dir_, true)); -#else +#else // defined(STARBOARD) if (error && !download_dir_.empty()) { // Cleanup the download dir. CleanupDirectory(download_dir_); } -#endif +#endif // defined(STARBOARD) +#endif // !defined(IN_MEMORY_UPDATES) main_task_runner()->PostTask( FROM_HERE, base::BindOnce(&UrlFetcherDownloader::OnDownloadComplete, diff --git a/components/update_client/url_fetcher_downloader.h b/components/update_client/url_fetcher_downloader.h index 3adc4c22f2d67..f593e6385ce7c 100644 --- a/components/update_client/url_fetcher_downloader.h +++ b/components/update_client/url_fetcher_downloader.h @@ -41,21 +41,60 @@ class UrlFetcherDownloader : public CrxDownloader { private: // Overrides for CrxDownloader. +#if defined(IN_MEMORY_UPDATES) + void DoStartDownload(const GURL& url, std::string* dst) override; +#else void DoStartDownload(const GURL& url) override; +#endif #if defined(STARBOARD) void DoCancelDownload() override; #endif +#if !defined(IN_MEMORY_UPDATES) void CreateDownloadDir(); +#endif +#if defined(IN_MEMORY_UPDATES) + // Does not take ownership of |dst|, which must refer to a valid string that + // outlives this object. + void StartURLFetch(const GURL& url, std::string* dst); +#else void StartURLFetch(const GURL& url); +#endif #if defined(STARBOARD) +#if defined(IN_MEMORY_UPDATES) + // With in-memory updates it's no longer necessary to select and confirm the + // installation slot at download time, and it would likely be more clear to + // move this responsibility to the unpack flow. That said, yavor@ requested + // that we keep it here for now since there are a number of edge cases to + // consider and data races to avoid in this space (e.g., racing updaters). To + // limit the scope and risk of the in-memory updates changes we can leave it + // here for now and continue passing the installation dir on to the unpack + // flow, and consider changing this in a future refactoring. + + // Does not take ownership of |dst|, which must refer to a valid string that + // outlives this object. + void SelectSlot(const GURL& url, std::string* dst); + + // Does not take ownership of |dst|, which must refer to a valid string that + // outlives this object. + void ConfirmSlot(const GURL& url, std::string* dst); +#else // defined(IN_MEMORY_UPDATES) void SelectSlot(const GURL& url); void ConfirmSlot(const GURL& url); -#endif +#endif // defined(IN_MEMORY_UPDATES) +#endif // defined(STARBOARD) +#if defined(IN_MEMORY_UPDATES) + // Does not take ownership of |dst|, which must refer to a valid string that + // outlives this object. + void OnNetworkFetcherComplete(std::string* dst, + int net_error, + int64_t content_size); +#else void OnNetworkFetcherComplete(base::FilePath file_path, int net_error, int64_t content_size); +#endif void OnResponseStarted(const GURL& final_url, int response_code, int64_t content_length); @@ -70,8 +109,15 @@ class UrlFetcherDownloader : public CrxDownloader { scoped_refptr network_fetcher_factory_; std::unique_ptr network_fetcher_; +#if !defined(IN_MEMORY_UPDATES) // Contains a temporary download directory for the downloaded file. base::FilePath download_dir_; +#else + // For legacy, file-system based updates the download_dir_ doubles as the + // installation dir. But for in-memory updates this directory only serves as + // the installation directory and should be named accordingly. + base::FilePath installation_dir_; +#endif base::TimeTicks download_start_time_; diff --git a/components/update_client/utils.cc b/components/update_client/utils.cc index 2a9b45a66b6d5..d0c49f2c860ec 100644 --- a/components/update_client/utils.cc +++ b/components/update_client/utils.cc @@ -64,6 +64,25 @@ std::string GetCrxIdFromPublicKeyHash(const std::vector& pk_hash) { } #if defined(STARBOARD) +#if defined(IN_MEMORY_UPDATES) +bool VerifyHash256(const std::string* content, + const std::string& expected_hash_str) { + std::vector expected_hash; + if (!base::HexStringToBytes(expected_hash_str, &expected_hash) || + expected_hash.size() != crypto::kSHA256Length) { + return false; + } + + uint8_t actual_hash[crypto::kSHA256Length] = {0}; + std::unique_ptr hasher( + crypto::SecureHash::Create(crypto::SecureHash::SHA256)); + + hasher->Update(content->c_str(), content->size()); + hasher->Finish(actual_hash, sizeof(actual_hash)); + + return memcmp(actual_hash, &expected_hash[0], sizeof(actual_hash)) == 0; +} +#else // defined(IN_MEMORY_UPDATES) bool VerifyFileHash256(const base::FilePath& filepath, const std::string& expected_hash_str) { std::vector expected_hash; @@ -106,7 +125,8 @@ bool VerifyFileHash256(const base::FilePath& filepath, return memcmp(actual_hash, &expected_hash[0], sizeof(actual_hash)) == 0; } -#else +#endif // defined(IN_MEMORY_UPDATES) +#else // defined(STARBOARD) bool VerifyFileHash256(const base::FilePath& filepath, const std::string& expected_hash_str) { std::vector expected_hash; diff --git a/components/update_client/utils.h b/components/update_client/utils.h index 8beebe8062e49..b50839f415731 100644 --- a/components/update_client/utils.h +++ b/components/update_client/utils.h @@ -49,10 +49,18 @@ std::string GetCrxComponentID(const CrxComponent& component); // Returns a CRX id from a public key hash. std::string GetCrxIdFromPublicKeyHash(const std::vector& pk_hash); +#if defined(IN_MEMORY_UPDATES) +// Returns true if the actual SHA-256 hash of |content| matches the +// |expected_hash|. +// |content| must refer to a valid string. +bool VerifyHash256(const std::string* content, + const std::string& expected_hash); +#else // Returns true if the actual SHA-256 hash of the |filepath| matches the // |expected_hash|. bool VerifyFileHash256(const base::FilePath& filepath, const std::string& expected_hash); +#endif // Returns true if the |brand| parameter matches ^[a-zA-Z]{4}?$ . bool IsValidBrand(const std::string& brand); diff --git a/components/update_client/utils_unittest.cc b/components/update_client/utils_unittest.cc index 71a8b8868272d..a022032c7ee2b 100644 --- a/components/update_client/utils_unittest.cc +++ b/components/update_client/utils_unittest.cc @@ -27,6 +27,7 @@ base::FilePath MakeTestFilePath(const char* file) { namespace update_client { +#if !defined(IN_MEMORY_UPDATES) TEST(UpdateClientUtils, VerifyFileHash256) { EXPECT_TRUE(VerifyFileHash256( MakeTestFilePath("jebgalgnebhfojomionfpkfelancnnkf.crx"), @@ -46,6 +47,9 @@ TEST(UpdateClientUtils, VerifyFileHash256) { std::string( "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))); } +// TODO(b/290410288): write a test targeting the string-based API, +// VerifyHash256(). +#endif // Tests that the brand matches ^[a-zA-Z]{4}?$ TEST(UpdateClientUtils, IsValidBrand) { diff --git a/net/url_request/test_url_fetcher_factory.cc b/net/url_request/test_url_fetcher_factory.cc index a5b135402f0ed..a847216345b0a 100644 --- a/net/url_request/test_url_fetcher_factory.cc +++ b/net/url_request/test_url_fetcher_factory.cc @@ -173,6 +173,13 @@ void TestURLFetcher::SaveResponseToTemporaryFile( SaveResponseToFileAtPath(path, file_task_runner); } +#if defined(STARBOARD) +void TestURLFetcher::SaveResponseToLargeString() { + // TODO(b/158043520): implement when dependent unit tests are enabled. + CHECK(false) << "Unimplemented"; +} +#endif + void TestURLFetcher::SaveResponseWithWriter( std::unique_ptr response_writer) { // In class URLFetcherCore this method is called by all three: @@ -285,6 +292,15 @@ bool TestURLFetcher::GetResponseAsString( return true; } +#if defined(STARBOARD) +bool TestURLFetcher::GetResponseAsLargeString( + std::string* out_response_string) const { + // TODO(b/158043520): implement when dependent unit tests are enabled. + CHECK(false) << "Unimplemented"; + return false; +} +#endif + bool TestURLFetcher::GetResponseAsFilePath( bool take_ownership, base::FilePath* out_response_path) const { if (fake_response_destination_ != TEMP_FILE) diff --git a/net/url_request/test_url_fetcher_factory.h b/net/url_request/test_url_fetcher_factory.h index 880d078c199e2..bcc40396ff1bb 100644 --- a/net/url_request/test_url_fetcher_factory.h +++ b/net/url_request/test_url_fetcher_factory.h @@ -133,6 +133,9 @@ class TestURLFetcher : public URLFetcher { scoped_refptr file_task_runner) override; void SaveResponseToTemporaryFile( scoped_refptr file_task_runner) override; +#if defined(STARBOARD) + void SaveResponseToLargeString() override; +#endif void SaveResponseWithWriter( std::unique_ptr response_writer) override; #if defined(STARBOARD) @@ -161,6 +164,10 @@ class TestURLFetcher : public URLFetcher { void ReceivedContentWasMalformed() override; // Override response access functions to return fake data. bool GetResponseAsString(std::string* out_response_string) const override; +#if defined(STARBOARD) + bool GetResponseAsLargeString(std::string* out_response_string) + const override; +#endif bool GetResponseAsFilePath(bool take_ownership, base::FilePath* out_response_path) const override; diff --git a/net/url_request/url_fetcher.h b/net/url_request/url_fetcher.h index 5bc635f478ce2..88a16c5ed94df 100644 --- a/net/url_request/url_fetcher.h +++ b/net/url_request/url_fetcher.h @@ -311,6 +311,14 @@ class NET_EXPORT URLFetcher { virtual void SaveResponseToTemporaryFile( scoped_refptr file_task_runner) = 0; +#if defined(STARBOARD) + // By default, the response is saved in a string without preallocation. Call + // this method when the response is expected to be large and the capacity + // should be reserved to the response's content size upfront to reduce + // fragmentation. + virtual void SaveResponseToLargeString() = 0; +#endif + // By default, the response is saved in a string. Call this method to use the // specified writer to save the response. Must be called before Start(). virtual void SaveResponseWithWriter( @@ -381,6 +389,15 @@ class NET_EXPORT URLFetcher { // set to store the response as a string. virtual bool GetResponseAsString(std::string* out_response_string) const = 0; + #if defined(STARBOARD) + // Get the resonse as a string when the response is expected to be large and + // the data should be "moved" and not copied. Should only be used in + // conjunction with SaveResponseToLargeString() and returns false if the + // fetcher was not set to store the response as a "large" string. + virtual bool GetResponseAsLargeString(std::string* out_response_string) + const = 0; + #endif + // Get the path to the file containing the response body. Returns false // if the response body was not saved to a file. If take_ownership is // true, caller takes responsibility for the file, and it will not diff --git a/net/url_request/url_fetcher_core.cc b/net/url_request/url_fetcher_core.cc index 39512ed459d42..4798a3ae28af9 100644 --- a/net/url_request/url_fetcher_core.cc +++ b/net/url_request/url_fetcher_core.cc @@ -352,6 +352,14 @@ void URLFetcherCore::SaveResponseToFileAtPath( new URLFetcherFileWriter(file_task_runner, file_path))); } +#if defined(STARBOARD) +void URLFetcherCore::SaveResponseToLargeString() { + DCHECK(delegate_task_runner_->RunsTasksInCurrentSequence()); + SaveResponseWithWriter(std::unique_ptr( + new URLFetcherLargeStringWriter())); +} +#endif + void URLFetcherCore::SaveResponseToTemporaryFile( scoped_refptr file_task_runner) { DCHECK(delegate_task_runner_->RunsTasksInCurrentSequence()); @@ -435,6 +443,19 @@ bool URLFetcherCore::GetResponseAsString( return true; } +#if defined(STARBOARD) +bool URLFetcherCore::GetResponseAsLargeString( + std::string* out_response_string) const { + URLFetcherLargeStringWriter* large_string_writer = + response_writer_ ? response_writer_->AsLargeStringWriter() : NULL; + if (!large_string_writer) + return false; + + large_string_writer->GetAndResetData(out_response_string); + return true; +} +#endif + bool URLFetcherCore::GetResponseAsFilePath(bool take_ownership, base::FilePath* out_response_path) { DCHECK(delegate_task_runner_->RunsTasksInCurrentSequence()); diff --git a/net/url_request/url_fetcher_core.h b/net/url_request/url_fetcher_core.h index 7c1bbcbe0187e..c40184dd469d0 100644 --- a/net/url_request/url_fetcher_core.h +++ b/net/url_request/url_fetcher_core.h @@ -114,6 +114,9 @@ class URLFetcherCore : public base::RefCountedThreadSafe, void SaveResponseToFileAtPath( const base::FilePath& file_path, scoped_refptr file_task_runner); +#if defined(STARBOARD) + void SaveResponseToLargeString(); +#endif void SaveResponseToTemporaryFile( scoped_refptr file_task_runner); void SaveResponseWithWriter( @@ -143,6 +146,9 @@ class URLFetcherCore : public base::RefCountedThreadSafe, // headers. void ReceivedContentWasMalformed(); bool GetResponseAsString(std::string* out_response_string) const; +#if defined(STARBOARD) + bool GetResponseAsLargeString(std::string* out_response_string) const; +#endif bool GetResponseAsFilePath(bool take_ownership, base::FilePath* out_response_path); diff --git a/net/url_request/url_fetcher_impl.cc b/net/url_request/url_fetcher_impl.cc index b651ce4c4762b..19a345b1dbcf3 100644 --- a/net/url_request/url_fetcher_impl.cc +++ b/net/url_request/url_fetcher_impl.cc @@ -145,6 +145,12 @@ void URLFetcherImpl::SaveResponseToTemporaryFile( core_->SaveResponseToTemporaryFile(file_task_runner); } +#if defined(STARBOARD) +void URLFetcherImpl::SaveResponseToLargeString() { + core_->SaveResponseToLargeString(); +} +#endif + void URLFetcherImpl::SaveResponseWithWriter( std::unique_ptr response_writer) { core_->SaveResponseWithWriter(std::move(response_writer)); @@ -217,6 +223,13 @@ bool URLFetcherImpl::GetResponseAsString( return core_->GetResponseAsString(out_response_string); } +#if defined(STARBOARD) +bool URLFetcherImpl::GetResponseAsLargeString( + std::string* out_response_string) const { + return core_->GetResponseAsLargeString(out_response_string); +} +#endif + bool URLFetcherImpl::GetResponseAsFilePath( bool take_ownership, base::FilePath* out_response_path) const { diff --git a/net/url_request/url_fetcher_impl.h b/net/url_request/url_fetcher_impl.h index 21aafed181dde..877536050ae84 100644 --- a/net/url_request/url_fetcher_impl.h +++ b/net/url_request/url_fetcher_impl.h @@ -79,6 +79,9 @@ class NET_EXPORT_PRIVATE URLFetcherImpl : public URLFetcher { scoped_refptr file_task_runner) override; void SaveResponseToTemporaryFile( scoped_refptr file_task_runner) override; +#if defined(STARBOARD) + void SaveResponseToLargeString() override; +#endif void SaveResponseWithWriter( std::unique_ptr response_writer) override; #if defined(STARBOARD) @@ -99,6 +102,10 @@ class NET_EXPORT_PRIVATE URLFetcherImpl : public URLFetcher { int GetResponseCode() const override; void ReceivedContentWasMalformed() override; bool GetResponseAsString(std::string* out_response_string) const override; +#if defined(STARBOARD) + bool GetResponseAsLargeString(std::string* out_response_string) + const override; +#endif bool GetResponseAsFilePath(bool take_ownership, base::FilePath* out_response_path) const override; diff --git a/net/url_request/url_fetcher_impl_unittest.cc b/net/url_request/url_fetcher_impl_unittest.cc index ed01d26ea5a69..cf24c6809d9fc 100644 --- a/net/url_request/url_fetcher_impl_unittest.cc +++ b/net/url_request/url_fetcher_impl_unittest.cc @@ -1664,6 +1664,73 @@ TEST_F(URLFetcherBadHTTPSTest, BadHTTPS) { } #endif // STARBOARD +#if defined(STARBOARD) +// Get a small file and save it to a "large string". SaveResponseToLargeString() +// and GetResponseAsLargeString() are intended to be used for URLs with large +// data sizes but should still work for URLs with small data sizes. +TEST_F(URLFetcherTest, LargeStringTestSmallGet) { + // Follows the structure of the TempFileTestSmallGet case + SaveFileTest + // helper. + const char* file_to_fetch = "simple.html"; + std::unique_ptr delegate( + new WaitingURLFetcherDelegate()); + delegate->CreateFetcher( + test_server_->GetURL(std::string(kTestServerFilePrefix) + file_to_fetch), + URLFetcher::GET, CreateSameThreadContextGetter()); + + delegate->fetcher()->SaveResponseToLargeString(); + + delegate->StartFetcherAndWait(); + + EXPECT_TRUE(delegate->fetcher()->GetStatus().is_success()); + EXPECT_EQ(200, delegate->fetcher()->GetResponseCode()); + + std::string actual_content; + EXPECT_TRUE( + delegate->fetcher()->GetResponseAsLargeString(&actual_content)); + + base::FilePath server_root; + base::PathService::Get(base::DIR_TEST_DATA, &server_root); + std::string expected_content; + base::ReadFileToString( + server_root.Append(kDocRoot).AppendASCII(file_to_fetch), + &expected_content); + EXPECT_EQ(expected_content, actual_content); +} + +// Get a file large enough to require more than one read into URLFetcher::Core's +// IOBuffer and save it to a "large string". +TEST_F(URLFetcherTest, LargeStringTestLargeGet) { + // Follows the structure of the TempFileTestLargeGet case + SaveFileTest + // helper. + const char* file_to_fetch = "animate1.gif"; + std::unique_ptr delegate( + new WaitingURLFetcherDelegate()); + delegate->CreateFetcher( + test_server_->GetURL(std::string(kTestServerFilePrefix) + file_to_fetch), + URLFetcher::GET, CreateSameThreadContextGetter()); + + delegate->fetcher()->SaveResponseToLargeString(); + + delegate->StartFetcherAndWait(); + + EXPECT_TRUE(delegate->fetcher()->GetStatus().is_success()); + EXPECT_EQ(200, delegate->fetcher()->GetResponseCode()); + + std::string actual_content; + EXPECT_TRUE( + delegate->fetcher()->GetResponseAsLargeString(&actual_content)); + + base::FilePath server_root; + base::PathService::Get(base::DIR_TEST_DATA, &server_root); + std::string expected_content; + base::ReadFileToString( + server_root.Append(kDocRoot).AppendASCII(file_to_fetch), + &expected_content); + EXPECT_EQ(expected_content, actual_content); +} +#endif + } // namespace } // namespace net diff --git a/net/url_request/url_fetcher_response_writer.cc b/net/url_request/url_fetcher_response_writer.cc index 27e91a31b9d4d..b9f4812606335 100644 --- a/net/url_request/url_fetcher_response_writer.cc +++ b/net/url_request/url_fetcher_response_writer.cc @@ -21,6 +21,12 @@ URLFetcherStringWriter* URLFetcherResponseWriter::AsStringWriter() { return NULL; } +#if defined(STARBOARD) +URLFetcherLargeStringWriter* URLFetcherResponseWriter::AsLargeStringWriter() { + return NULL; +} +#endif + URLFetcherFileWriter* URLFetcherResponseWriter::AsFileWriter() { return NULL; } @@ -51,6 +57,49 @@ URLFetcherStringWriter* URLFetcherStringWriter::AsStringWriter() { return this; } +#if defined(STARBOARD) +URLFetcherLargeStringWriter::URLFetcherLargeStringWriter() = default; + +URLFetcherLargeStringWriter::~URLFetcherLargeStringWriter() = default; + +int URLFetcherLargeStringWriter::Initialize(CompletionOnceCallback callback) { + data_.clear(); + return OK; +} + +void URLFetcherLargeStringWriter::OnResponseStarted(int64_t content_length) { + data_.reserve(content_length); +} + +void URLFetcherLargeStringWriter::GetAndResetData(std::string* data) { + CHECK(data != nullptr); + + // This implementation is copied from + // cobalt::loader::URLFetcherStringWriter::GetAndResetData(). + std::string empty; + data->swap(empty); + data_.swap(*data); +} + +int URLFetcherLargeStringWriter::Write(IOBuffer* buffer, + int num_bytes, + CompletionOnceCallback callback) { + data_.append(buffer->data(), num_bytes); + return num_bytes; +} + +int URLFetcherLargeStringWriter::Finish(int net_error, + CompletionOnceCallback callback) { + // Do nothing. + return OK; +} + +URLFetcherLargeStringWriter* + URLFetcherLargeStringWriter::AsLargeStringWriter() { + return this; +} +#endif + URLFetcherFileWriter::URLFetcherFileWriter( scoped_refptr file_task_runner, const base::FilePath& file_path) diff --git a/net/url_request/url_fetcher_response_writer.h b/net/url_request/url_fetcher_response_writer.h index bd4ca147c22ac..c86a7cf099fc7 100644 --- a/net/url_request/url_fetcher_response_writer.h +++ b/net/url_request/url_fetcher_response_writer.h @@ -25,6 +25,9 @@ class FileStream; class IOBuffer; class URLFetcherFileWriter; class URLFetcherStringWriter; +#if defined(STARBOARD) +class URLFetcherLargeStringWriter; +#endif // This class encapsulates all state involved in writing URLFetcher response // bytes to the destination. @@ -63,6 +66,12 @@ class NET_EXPORT URLFetcherResponseWriter { // Returns this instance's pointer as URLFetcherStringWriter when possible. virtual URLFetcherStringWriter* AsStringWriter(); +#if defined(STARBOARD) + // Returns this instance's pointer as URLFetcherLargeStringWriter when + // possible. + virtual URLFetcherLargeStringWriter* AsLargeStringWriter(); +#endif + // Returns this instance's pointer as URLFetcherFileWriter when possible. virtual URLFetcherFileWriter* AsFileWriter(); }; @@ -92,6 +101,35 @@ class NET_EXPORT URLFetcherStringWriter : public URLFetcherResponseWriter { DISALLOW_COPY_AND_ASSIGN(URLFetcherStringWriter); }; +#if defined(STARBOARD) +// Memory-conscious URLFetcherResponseWriter implementation for a "large" +// std::string. The string's capacity is preallocated to the size of the content +// and the data can be "moved" out. +// Similar to cobalt::loader::URLFetcherStringWriter but with a different +// preallocation strategy. +class NET_EXPORT URLFetcherLargeStringWriter : public URLFetcherResponseWriter { + public: + URLFetcherLargeStringWriter(); + ~URLFetcherLargeStringWriter() override; + + void GetAndResetData(std::string* data); + + // URLFetcherResponseWriter overrides: + int Initialize(CompletionOnceCallback callback) override; + void OnResponseStarted(int64_t content_length) override; + int Write(IOBuffer* buffer, + int num_bytes, + CompletionOnceCallback callback) override; + int Finish(int net_error, CompletionOnceCallback callback) override; + URLFetcherLargeStringWriter* AsLargeStringWriter() override; + + private: + std::string data_; + + DISALLOW_COPY_AND_ASSIGN(URLFetcherLargeStringWriter); +}; +#endif + // URLFetcherResponseWriter implementation for files. class NET_EXPORT URLFetcherFileWriter : public URLFetcherResponseWriter { public: diff --git a/net/url_request/url_fetcher_response_writer_unittest.cc b/net/url_request/url_fetcher_response_writer_unittest.cc index 45bfd2296f88b..e8266f44181b1 100644 --- a/net/url_request/url_fetcher_response_writer_unittest.cc +++ b/net/url_request/url_fetcher_response_writer_unittest.cc @@ -57,6 +57,61 @@ TEST_F(URLFetcherStringWriterTest, Basic) { EXPECT_TRUE(writer_->data().empty()); } +#if defined(STARBOARD) +class URLFetcherLargeStringWriterTest : public PlatformTest { + protected: + void SetUp() override { + writer_.reset(new URLFetcherLargeStringWriter); + buf_ = base::MakeRefCounted(kData); + } + + std::unique_ptr writer_; + scoped_refptr buf_; +}; + +TEST_F(URLFetcherLargeStringWriterTest, Basic) { + // The structure of this case is copied from URLFetcherStringWriterTest.Basic. + + int rv = 0; + // Initialize(), Write() and Finish(). + TestCompletionCallback callback; + rv = writer_->Initialize(callback.callback()); + EXPECT_THAT(callback.GetResult(rv), IsOk()); + rv = writer_->Write(buf_.get(), buf_->size(), callback.callback()); + EXPECT_EQ(buf_->size(), callback.GetResult(rv)); + rv = writer_->Finish(OK, callback.callback()); + EXPECT_THAT(callback.GetResult(rv), IsOk()); + + // Verify the result. + std::string actual_data; + writer_->GetAndResetData(&actual_data); + // TODO(b/158043520): as requested by yuying@, we should add a test that uses + // a string that is actually large as the test data. + EXPECT_EQ(kData, actual_data); + + // Initialize() again to reset. + rv = writer_->Initialize(callback.callback()); + EXPECT_THAT(callback.GetResult(rv), IsOk()); + writer_->GetAndResetData(&actual_data); + EXPECT_TRUE(actual_data.empty()); +} + +TEST_F(URLFetcherLargeStringWriterTest, OnResponseStartedPreallocatesMemory) { + int rv = 0; + TestCompletionCallback callback; + rv = writer_->Initialize(callback.callback()); + EXPECT_THAT(callback.GetResult(rv), IsOk()); + // Should be larger than the default string size but otherwise arbitrary. + int64_t content_length = 1234; + + writer_->OnResponseStarted(content_length); + + std::string actual_data; + writer_->GetAndResetData(&actual_data); + EXPECT_GE(actual_data.capacity(), content_length); +} +#endif + class URLFetcherFileWriterTest : public PlatformTest, public WithScopedTaskEnvironment { protected: diff --git a/starboard/build/config/win/BUILD.gn b/starboard/build/config/win/BUILD.gn index b69e93cdb4b3e..847ddfd633be9 100644 --- a/starboard/build/config/win/BUILD.gn +++ b/starboard/build/config/win/BUILD.gn @@ -47,7 +47,7 @@ config("common") { ] cflags += [ "/EHsc", - "/std:c++14", + "/std:c++17", ] # msvs_debug/_devel/etc diff --git a/starboard/nplb/compiler_compliance/cpp17_support.cc b/starboard/nplb/compiler_compliance/cpp17_support.cc index 6a59dbafa1905..c11719193addf 100644 --- a/starboard/nplb/compiler_compliance/cpp17_support.cc +++ b/starboard/nplb/compiler_compliance/cpp17_support.cc @@ -64,6 +64,26 @@ static_assert(add_one(1) == 2, "Constexpr lambdas support is required"); static_assert(starboard::foo::bar::baz::life() == 42, "Nested Namespaces support is required"); +// Test fallthrough support + +constexpr int test_fallthrough(int n) { + switch (n) { + case 1: + return 1; + break; + case 2: + [[fallthrough]]; // intentional fallthrough + case 3: + return 3; + break; + } + + return 0; +} + +static_assert(test_fallthrough(2) == 3, + "[fallthrough]] attribute support is required"); + } // namespace } // namespace nplb } // namespace starboard diff --git a/starboard/win/shared/platform_configuration/configuration.gni b/starboard/win/shared/platform_configuration/configuration.gni index 1798610bc9bbe..74fe80625278a 100644 --- a/starboard/win/shared/platform_configuration/configuration.gni +++ b/starboard/win/shared/platform_configuration/configuration.gni @@ -27,5 +27,3 @@ pedantic_warnings_config_path = "//starboard/win/shared/platform_configuration:pedantic_warnings" cobalt_platform_dependencies = [ "//starboard/egl_and_gles" ] - -sb_enable_cpp17_audit = false diff --git a/starboard/xb1/cert/README.md b/starboard/xb1/cert/README.md new file mode 100644 index 0000000000000..7462013705633 --- /dev/null +++ b/starboard/xb1/cert/README.md @@ -0,0 +1,32 @@ +This directory contains a cobalt.pfx cert for signing UWP appx packages. + +This cert is not for use with submitting to the Microsoft store. It +is intended only to be used for running the Windows App Cert Kit. Note that +you will need to regenerate this file to be able to sign a cobalt appx yourself. + +It was generated as follows, using tools in SDK 10.0.22621.0 run in PowerShell +as an administrator: + +Create a new self-signed certificate with an extended key usage for code +signing. The Subject must match the Publisher field in your AppxManifest. +`New-SelfSignedCertificate -Type Custom -Subject "" -FriendlyName "cobalt-cert" -KeyUsage DigitalSignature -TextExtension @("2.5.29.37={text}1.3.6.1.5.5.7.3.3")` + +Verify that the cert was created properly. You should see the new cert if you +run the following command (Cert:\LocalMachine\My is the default cert store, +yours may be in a different location). +`Get-ChildItem Cert:\LocalMachine\My | Format-Table Subject, FriendlyName, Thumbprint` + +Export the certificate to a Personal Information Exchange (pfx) file. +`Export-PfxCertificate -cert Cert:\LocalMachine\My\ -FilePath .pfx -ProtectTo ` + +See the following for more information: + +https://learn.microsoft.com/en-us/windows/msix/package/create-certificate-package-signing + +It is recommended that you remove any certificates once they are no longer +necessary to prevent them from being used maliciously. If you need to remove +this certificate, run the following in PowerShell as an administrator. + +`Get-ChildItem Cert:\LocalMachine\My | Format-Table Subject, FriendlyName, Thumbprint` + +`Get-ChildItem Cert:\LocalMachine\My\ | Remove-Item` diff --git a/starboard/xb1/cert/cobalt.pfx b/starboard/xb1/cert/cobalt.pfx new file mode 100644 index 0000000000000..3f8d22dc79867 Binary files /dev/null and b/starboard/xb1/cert/cobalt.pfx differ diff --git a/starboard/xb1/tools/packager.py b/starboard/xb1/tools/packager.py index 3682baf8b35ce..9503f20f24b77 100644 --- a/starboard/xb1/tools/packager.py +++ b/starboard/xb1/tools/packager.py @@ -17,28 +17,40 @@ Called by package_application.py """ +import argparse import logging import os import platform import shutil import subprocess +import sys from xml.etree import ElementTree as ET import zipfile from starboard.tools import package -_DEFAULT_CERT_PATH = os.path.join( +_INTERNAL_CERT_PATH = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir, 'internal', 'starboard', 'xb1', 'cert', 'youtube.pfx') +_EXTERNAL_CERT_PATH = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, + os.pardir, 'starboard', 'xb1', 'cert', 'cobalt.pfx') _APPX_MANIFEST_XML_NAMESPACE = \ 'http://schemas.microsoft.com/appx/manifest/foundation/windows10' _NAMESPACE_DICT = {'appx': _APPX_MANIFEST_XML_NAMESPACE} _PUBLISHER_XPATH = './appx:Identity[@Publisher]' _PRODUCT_APPX_NAME = { + 'cobalt': 'cobalt', 'youtube': 'cobalt', 'mainappbeta': 'mainappbeta', 'youtubetv': 'youtubetv' } +_PRODUCT_CERT_PATH = { + 'cobalt': _EXTERNAL_CERT_PATH, + 'youtube': _INTERNAL_CERT_PATH, + 'mainappbeta': _INTERNAL_CERT_PATH, + 'youtubetv': _INTERNAL_CERT_PATH, +} _DEFAULT_SDK_BIN_DIR = 'C:\\Program Files (x86)\\Windows Kits\\10\\bin' _DEFAULT_WIN_SDK_VERSION = '10.0.22000.0' _SOURCE_SPLASH_SCREEN_SUB_PATH = os.path.join('internal', 'cobalt', 'browser', @@ -48,6 +60,7 @@ _DESTINATION__SPLASH_SCREEN_SUB_PATH = os.path.join('appx', 'content', 'data', 'web', 'splash_screen') _SPLASH_SCREEN_FILE = { + 'cobalt': '', 'youtube': 'youtube_splash_screen.html', 'youtubetv': 'ytv_splash_screen.html', 'mainappbeta': 'youtube_splash_screen.html', @@ -184,6 +197,7 @@ def Install(self, targets=None): @property def appx_folder_location(self): product_locations = { + 'cobalt': 'appx', 'youtube': 'appx', 'mainappbeta': 'mainappbeta-appx', 'youtubetv': 'youtubetv-appx' @@ -229,7 +243,7 @@ def _BuildPackage(self): logging.info('Running %s', ' '.join(makeappx_args)) subprocess.check_call(makeappx_args) - cert_path = _DEFAULT_CERT_PATH + cert_path = _PRODUCT_CERT_PATH[self.product] try: signtool_args = [ @@ -244,3 +258,40 @@ def _BuildPackage(self): raise # Rethrow original error with original stack trace. return self.appx_location + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '-s', + '--source', + required=True, + help='Source directory from which to create a package.') + parser.add_argument( + '-o', + '--output', + default=os.path.join( + os.path.dirname(os.path.realpath(__file__)), 'package'), + help='Output directory to place the packaged app. Defaults to ./package/') + parser.add_argument( + '-p', + '--product', + default='cobalt', + help=( + 'Product name. This must be one of [cobalt, youtube, youtubetv,' + 'mainappbeta]. Any builds that are not internal to YouTube should use' + 'cobalt.')) + args, _ = parser.parse_known_args() + + if not os.path.exists(args.output): + os.makedirs(args.output) + + Package( + publisher=None, + product=args.product, + source_dir=args.source, + output_dir=args.output) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/third_party/libwebp/BUILD.gn b/third_party/libwebp/BUILD.gn index 47b775cca9c1b..8b6c4760f399c 100644 --- a/third_party/libwebp/BUILD.gn +++ b/third_party/libwebp/BUILD.gn @@ -13,6 +13,9 @@ # limitations under the License. import("//third_party/libwebp/libwebp.gni") +if (current_cpu == "arm" || current_cpu == "arm64") { + import("//build/config/arm.gni") +} template("libwebp_lib") { static_library(target_name) { @@ -110,7 +113,8 @@ config("libwebp_direct_config") { ] defines += [ "WEBP_HAVE_AVX2" ] } - if (current_cpu == "arm" || current_cpu == "arm64") { + if ((current_cpu == "arm" || current_cpu == "arm64") && arm_use_neon && + (target_os == "android" || target_os == "linux")) { defines = [ "WEBP_HAVE_NEON" ] cflags = [ "-mfpu=neon" ] } diff --git a/unit-test-results/drain_file_test_testoutput.xml b/unit-test-results/drain_file_test_testoutput.xml new file mode 100644 index 0000000000000..713fb7449f454 --- /dev/null +++ b/unit-test-results/drain_file_test_testoutput.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/unit-test-results/eztime_test_testoutput.xml b/unit-test-results/eztime_test_testoutput.xml new file mode 100644 index 0000000000000..353a0e9111834 --- /dev/null +++ b/unit-test-results/eztime_test_testoutput.xml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +