Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HipTensor Benchmarking Overhaul #276

Merged
merged 20 commits into from
Oct 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ hiptensor-version.hpp

# Generated source file
test/*/configs/*.hpp
test/*/configs/*/*.hpp

# Precompiled Headers
*.gch
Expand Down
5 changes: 3 additions & 2 deletions library/src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#
# MIT License
#
# Copyright (C) 2023-2024 Advanced Micro Devices, Inc. All rights reserved.
# Copyright (C) 2023-2025 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
Expand All @@ -24,7 +24,7 @@
#
###############################################################################

find_package( composable_kernel 1.0.0 REQUIRED PATHS /opt/rocm /opt/rocm/ck $ENV{CK_DIR}/lib/cmake COMPONENTS device_contraction_operations device_reduction_operations device_other_operations)
find_package( composable_kernel 1.0.0 REQUIRED PATHS $ENV{CK_DIR}/lib/cmake /opt/rocm /opt/rocm/ck COMPONENTS device_contraction_operations device_reduction_operations device_other_operations)
rocm_package_add_dependencies("composable_kernel >= 1.0.0" COMPONENT tests)

set(THREADS_PREFER_PTHREAD_FLAG ON)
Expand Down Expand Up @@ -80,6 +80,7 @@ set(HIPTENSOR_CORE_SOURCES
${CMAKE_CURRENT_SOURCE_DIR}/data_types.cpp
${CMAKE_CURRENT_SOURCE_DIR}/hip_device.cpp
${CMAKE_CURRENT_SOURCE_DIR}/handle.cpp
${CMAKE_CURRENT_SOURCE_DIR}/hiptensor_options.cpp
)

add_hiptensor_component(hiptensor_core ${HIPTENSOR_CORE_SOURCES})
Expand Down
11 changes: 8 additions & 3 deletions library/src/contraction/hiptensor_contraction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
*
* MIT License
*
* Copyright (C) 2023-2024 Advanced Micro Devices, Inc. All rights reserved.
* Copyright (C) 2023-2025 Advanced Micro Devices, Inc. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
Expand Down Expand Up @@ -33,6 +33,8 @@
#include "hip_device.hpp"
#include "logger.hpp"

#include "hiptensor_options.hpp"

// Convert between vectors of void ptrs stored in opaque API objects
// to vectors of ContractionSolution ptrs with simple cast.
inline auto toContractionSolutionVec(std::vector<void*> const& v)
Expand Down Expand Up @@ -747,6 +749,9 @@ hiptensorStatus_t hiptensorContraction(const hiptensorHandle_t* handle,
// Perform contraction with timing if LOG_LEVEL_PERF_TRACE
if(logger->getLogMask() & HIPTENSOR_LOG_LEVEL_PERF_TRACE)
{
using hiptensor::HiptensorOptions;
auto& options = HiptensorOptions::instance();

std::tie(errorCode, time) = (*cSolution)(alpha,
A,
B,
Expand All @@ -771,8 +776,8 @@ hiptensorStatus_t hiptensorContraction(const hiptensorHandle_t* handle,
stream, // stream id
true, // time_kernel
0, // log_level
0, // cold_niters
1, // nrepeat
options->coldRuns(), // cold_niters
options->hotRuns(), // nrepeat
});

if(errorCode == HIPTENSOR_STATUS_SUCCESS)
Expand Down
105 changes: 105 additions & 0 deletions library/src/data_types.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -350,6 +350,111 @@ namespace hiptensor
return "HIP_TYPE_NONE";
}
}

std::string opTypeToString(hiptensorOperator_t opType)
{
if(opType == HIPTENSOR_OP_IDENTITY)
{
return "HIPTENSOR_OP_IDENTITY";
}
else if(opType == HIPTENSOR_OP_SQRT)
{
return "HIPTENSOR_OP_SQRT";
}
else if(opType == HIPTENSOR_OP_ADD)
{
return "HIPTENSOR_OP_ADD";
}
else if(opType == HIPTENSOR_OP_MUL)
{
return "HIPTENSOR_OP_MUL";
}
else if(opType == HIPTENSOR_OP_MAX)
{
return "HIPTENSOR_OP_MAX";
}
else if(opType == HIPTENSOR_OP_MIN)
{
return "HIPTENSOR_OP_MIN";
}
else
{
return "HIPTENSOR_OP_UNKNOWN";
}
}

std::string algoTypeToString(hiptensorAlgo_t algoType)
{
if(algoType == HIPTENSOR_ALGO_ACTOR_CRITIC)
{
return "HIPTENSOR_ALGO_ACTOR_CRITIC";
}
else if(algoType == HIPTENSOR_ALGO_DEFAULT)
{
return "HIPTENSOR_ALGO_DEFAULT";
}
else if(algoType == HIPTENSOR_ALGO_DEFAULT_PATIENT)
{
return "HIPTENSOR_ALGO_DEFAULT_PATIENT";
}
else
{
return "HIPTENSOR_ALGO_UNKNOWN";
}
}

std::string logLevelToString(hiptensorLogLevel_t logLevel)
{
if(logLevel == HIPTENSOR_LOG_LEVEL_OFF)
{
return "HIPTENSOR_LOG_LEVEL_OFF";
}
else if(logLevel == HIPTENSOR_LOG_LEVEL_ERROR)
{
return "HIPTENSOR_LOG_LEVEL_ERROR";
}
else if(logLevel == HIPTENSOR_LOG_LEVEL_PERF_TRACE)
{
return "HIPTENSOR_LOG_LEVEL_PERF_TRACE";
}
else if(logLevel == HIPTENSOR_LOG_LEVEL_PERF_HINT)
{
return "HIPTENSOR_LOG_LEVEL_PERF_HINT";
}
else if(logLevel == HIPTENSOR_LOG_LEVEL_HEURISTICS_TRACE)
{
return "HIPTENSOR_LOG_LEVEL_HEURISTICS_TRACE";
}
else if(logLevel == HIPTENSOR_LOG_LEVEL_API_TRACE)
{
return "HIPTENSOR_LOG_LEVEL_API_TRACE";
}
else
{
return "HIPTENSOR_LOG_LEVEL_UNKNOWN";
}
}

std::string workSizePrefToString(hiptensorWorksizePreference_t workSize)
{
if(workSize == HIPTENSOR_WORKSPACE_MIN)
{
return "HIPTENSOR_WORKSPACE_MIN";
}
else if(workSize == HIPTENSOR_WORKSPACE_RECOMMENDED)
{
return "HIPTENSOR_WORKSPACE_RECOMMENDED";
}
else if(workSize == HIPTENSOR_WORKSPACE_MAX)
{
return "HIPTENSOR_WORKSPACE_MAX";
}
else
{
return "HIPTENSOR_WORKSPACE_UNKNOWN";
}
}

} // namespace hiptensor

bool operator==(hipDataType hipType, hiptensorComputeType_t computeType)
Expand Down
184 changes: 184 additions & 0 deletions library/src/hiptensor_options.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
/*******************************************************************************
*
* MIT License
*
* Copyright (C) 2023-2025 Advanced Micro Devices, Inc. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*******************************************************************************/

#include "hiptensor_options.hpp"
#include <hiptensor/hiptensor-version.hpp>

namespace hiptensor
{
HiptensorOptions::HiptensorOptions()
: mOstream()
, mOmitSkipped(false)
, mOmitFailed(false)
, mOmitPassed(false)
, mOmitCout(false)
, mUsingDefaultParams(true)
, mValidate(true)
, mHotRuns(1)
, mColdRuns(0)
, mInputFilename("")
, mOutputFilename("")
{
}

void HiptensorOptions::setOstream(std::string file)
{
mOstream.initializeStream(file);
}

void HiptensorOptions::setOmits(int mask)
{
if(mask & 1)
{
mOmitSkipped = true;
dlangbe marked this conversation as resolved.
Show resolved Hide resolved
}
else
{
mOmitSkipped = false;
}

if(mask & 2)
{
mOmitFailed = true;
}
else
{
mOmitFailed = false;
}

if(mask & 4)
{
mOmitPassed = true;
}
else
{
mOmitPassed = false;
}

if(mask & 8)
{
mOmitCout = true;
}
else
{
mOmitCout = false;
}
}

void HiptensorOptions::setDefaultParams(bool val)
{
mUsingDefaultParams = val;
}

void HiptensorOptions::setValidation(std::string val)
{
std::transform(val.begin(), val.end(), val.begin(), ::toupper);
if(val.compare("ON") == 0)
dlangbe marked this conversation as resolved.
Show resolved Hide resolved
{
mValidate = true;
}
else if(val.compare("OFF") == 0)
{
mValidate = false;
}
}

void HiptensorOptions::setHotRuns(int runs)
{
mHotRuns = runs;
}

void HiptensorOptions::setColdRuns(int runs)
{
mColdRuns = runs;
}

void HiptensorOptions::setInputYAMLFilename(std::string file)
{
mInputFilename = file;
}

void HiptensorOptions::setOutputStreamFilename(std::string file)
{
mOutputFilename = file;
}

HiptensorOStream& HiptensorOptions::ostream()
{
return mOstream;
}

bool HiptensorOptions::omitSkipped()
{
return mOmitSkipped;
}

bool HiptensorOptions::omitFailed()
{
return mOmitFailed;
}

bool HiptensorOptions::omitPassed()
{
return mOmitPassed;
}

bool HiptensorOptions::omitCout()
{
return mOmitCout;
}

bool HiptensorOptions::usingDefaultConfig()
{
return mUsingDefaultParams;
}

bool HiptensorOptions::performValidation()
{
return mValidate;
}

int32_t HiptensorOptions::hotRuns()
{
return mHotRuns;
}

int32_t HiptensorOptions::coldRuns()
{
return mColdRuns;
}

std::string HiptensorOptions::inputFilename()
{
return mInputFilename;
}

std::string HiptensorOptions::outputFilename()
{
return mOutputFilename;
}

} // namespace hiptensor
4 changes: 4 additions & 0 deletions library/src/include/data_types.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,10 @@ namespace hiptensor

std::string computeTypeToString(hiptensorComputeType_t computeType);
std::string hipTypeToString(hipDataType hipType);
std::string opTypeToString(hiptensorOperator_t opType);
std::string algoTypeToString(hiptensorAlgo_t algoType);
std::string logLevelToString(hiptensorLogLevel_t);
std::string workSizePrefToString(hiptensorWorksizePreference_t workSize);
} // namespace hiptensor

bool operator==(hipDataType hipType, hiptensorComputeType_t computeType);
Expand Down
Loading
Loading