Add FP16Act-FP6Weight Linear #223
Dr.CI classification results
{"FAILED":[{"workflowId":9072160645,"workflowUniqueId":89543087,"id":24928741838,"runnerName":"i-0486b6c52565181ae","authorEmail":"gau.nernst@yahoo.com.sg","name":"Run Regression Tests / test (CUDA Nightly, linux.g5.12xlarge.nvidia.gpu, --pre torch --index-url https://download.pytorc... / linux-job","jobName":"test (CUDA Nightly, linux.g5.12xlarge.nvidia.gpu, --pre torch --index-url https://download.pytorc... / linux-job","conclusion":"failure","completed_at":"2024-05-14T02:43:13Z","html_url":"https://github.com/pytorch/ao/actions/runs/9072160645/job/24928741838","head_branch":"fp6","pr_number":223,"head_sha":"a8b4dd3bce1c6821f9f8d9079ee329c0b633ba3c","head_sha_timestamp":"2024-05-14T01:13:03Z","failure_captures":["test/integration/test_integration.py::TestAutoQuant::test_autoquant_one_input_29_cuda"],"failure_lines":["FAILED test/integration/test_integration.py::TestAutoQuant::test_autoquant_one_input_29_cuda - TypeError: do_bench() missing 2 required positional arguments: 'fn_args' and 'fn_kwargs'"],"failure_context":["+ pytest test --verbose -s","+ pip install .","+ pip install -r dev-requirements.txt","+ pip install -r requirements.txt","+ pip install --pre torch --index-url https://download.pytorch.org/whl/nightly/cu121","+ python -m pip install --upgrade pip","+ PATH=/opt/rh/devtoolset-10/root/usr/bin/:/opt/conda/envs/venv/bin:/opt/conda/condabin:/opt/conda/bin:/usr/local/cuda-12.1/bin:/opt/rh/devtoolset-9/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin","+ export PATH=/opt/rh/devtoolset-10/root/usr/bin/:/opt/conda/envs/venv/bin:/opt/conda/condabin:/opt/conda/bin:/usr/local/cuda-12.1/bin:/opt/rh/devtoolset-9/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin","+ yum install -y devtoolset-10-binutils","+ echo '::group::Install newer objcopy that supports --set-section-alignment'","+ hash -r","+ '[' -n '' ']'"],"time":"2024-05-14T02:48:12.477548Z"}],"FLAKY":[],"BROKEN_TRUNK":[],"UNSTABLE":[]}