Skip to content

Commit

Permalink
[pre-RFC][BYOC] Marvell ML/AI Accelerator Integration
Browse files Browse the repository at this point in the history
  • Loading branch information
Joe (Chien-Chun) Chou committed Dec 18, 2021
1 parent 115919b commit 2b5c376
Show file tree
Hide file tree
Showing 30 changed files with 4,281 additions and 140 deletions.
2 changes: 2 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ tvm_option(USE_RELAY_DEBUG "Building Relay in debug mode..." OFF)
tvm_option(USE_RTTI "Build with RTTI" ON)
tvm_option(USE_MSVC_MT "Build with MT" OFF)
tvm_option(USE_MICRO "Build with Micro TVM support" OFF)
tvm_option(USE_MRVL "Build with MRVL TVM support" OFF)
tvm_option(INSTALL_DEV "Install compiler infrastructure" OFF)
tvm_option(HIDE_PRIVATE_SYMBOLS "Compile with -fvisibility=hidden." OFF)
tvm_option(USE_TF_TVMDSOOP "Build with TensorFlow TVMDSOOp" OFF)
Expand Down Expand Up @@ -455,6 +456,7 @@ include(cmake/modules/contrib/ArmComputeLib.cmake)
include(cmake/modules/contrib/TensorRT.cmake)
include(cmake/modules/contrib/VitisAI.cmake)
include(cmake/modules/contrib/Verilator.cmake)
include(cmake/modules/contrib/Mrvl.cmake)
include(cmake/modules/Git.cmake)
include(cmake/modules/LibInfo.cmake)
include(cmake/modules/RustExt.cmake)
Expand Down
1 change: 1 addition & 0 deletions cmake/modules/LibInfo.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ function(add_lib_info src_file)
TVM_INFO_USE_TARGET_ONNX="${USE_TARGET_ONNX}"
TVM_INFO_USE_ARM_COMPUTE_LIB="${USE_ARM_COMPUTE_LIB}"
TVM_INFO_USE_ARM_COMPUTE_LIB_GRAPH_EXECUTOR="${USE_ARM_COMPUTE_LIB_GRAPH_EXECUTOR}"
TVM_INFO_USE_MRVL="${USE_MRVL}"
TVM_INFO_INDEX_DEFAULT_I64="${INDEX_DEFAULT_I64}"
TVM_CXX_COMPILER_PATH="${CMAKE_CXX_COMPILER}"
)
Expand Down
56 changes: 56 additions & 0 deletions cmake/modules/contrib/Mrvl.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
include(ExternalProject)

if(USE_MRVL)
IF (MRVL_COMPILER_LIB_PATH)
add_definitions(-DTVM_USE_MRVL_COMPILER_LIB=1)
# copy 3 pre-built static lib files of Marvell compiler-backend
# under the MRVL_COMPILER_LIB_PATH directory
file(COPY ${MRVL_COMPILER_LIB_PATH}/libmrvlcompiler.a
DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
file(COPY ${MRVL_COMPILER_LIB_PATH}/libml.a
DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
file(COPY ${MRVL_COMPILER_LIB_PATH}/libnum.a
DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
file(COPY ${MRVL_COMPILER_LIB_PATH}/libisa.a
DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
file(GLOB MRVL_RUNTIME_LIB
${CMAKE_CURRENT_BINARY_DIR}/libmrvlcompiler.a
${CMAKE_CURRENT_BINARY_DIR}/libml.a
${CMAKE_CURRENT_BINARY_DIR}/libisa.a
${CMAKE_CURRENT_BINARY_DIR}/libnum.a
)
# FIXME: list(APPEND TVM_LINKER_LIBS ${MRVL_LIB})
message(STATUS "Build with 4 Mrvl lib *.a files: ${MRVL_RUNTIME_LIB}")
list(APPEND TVM_RUNTIME_LINKER_LIBS ${MRVL_RUNTIME_LIB})
ENDIF (MRVL_COMPILER_LIB_PATH)

# Mrvl Module
message(STATUS "Build with Mrvl support")
add_definitions(-DTVM_USE_MRVL=1)
# FIXME: find_livrary(MRVL_LIB Mrvl)
# FIXME: find_livrary(MRVL_RUNTIME_LIB Mrvl_runtime)
file(GLOB RUNTIME_MRVL_SRCS
src/relay/backend/contrib/mrvl/graph_executor_codegen_mrvl.cc
src/relay/backend/contrib/mrvl/codegen.cc
src/relay/backend/contrib/mrvl/drop_noop_transpose.cc
src/runtime/contrib/mrvl/mrvl_runtime.cc
)
list(APPEND RUNTIME_SRCS ${RUNTIME_MRVL_SRCS})

endif(USE_MRVL)
9 changes: 9 additions & 0 deletions include/tvm/ir/expr.h
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,15 @@ class RelayExprNode : public BaseExprNode {
* \return The checked_type
*/
inline const Type& checked_type() const;

/*!
* \brief members to identify an expr node
*/
static int64_t _global_en_id;
mutable int64_t en_id;
RelayExprNode() { en_id = _global_en_id++; }
inline int64_t get_en_id() const { return en_id; }

/*!
* \brief Check if the inferred(checked) type of the Expr
* is backed by a TTypeNode and return it.
Expand Down
12 changes: 12 additions & 0 deletions include/tvm/ir/op.h
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,18 @@ class Op : public RelayExpr {
*/
TVM_DLL static const Op& Get(const String& op_name);

/*!
* \brief list all registered op names
*/
TVM_DLL static void ListAllOpNames();

/*!
* \brief get the name of an op, if it is registered.
* \param op Obj of an op
* \return op name in String, if it is registered.
*/
TVM_DLL static String GetOpName(const Op& op);

/*! \brief specify container node */
using ContainerType = OpNode;

Expand Down
2 changes: 2 additions & 0 deletions include/tvm/relay/expr.h
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ class TupleNode : public ExprNode {
tvm::Array<relay::Expr> fields;

void VisitAttrs(tvm::AttrVisitor* v) {
v->Visit("en_id", &en_id);
v->Visit("fields", &fields);
v->Visit("span", &span);
v->Visit("_checked_type_", &checked_type_);
Expand Down Expand Up @@ -315,6 +316,7 @@ class CallNode : public ExprNode {
tvm::Array<Type> type_args;

void VisitAttrs(tvm::AttrVisitor* v) {
v->Visit("en_id", &en_id);
v->Visit("op", &op);
v->Visit("args", &args);
v->Visit("attrs", &attrs);
Expand Down
5 changes: 5 additions & 0 deletions python/tvm/driver/tvmc/composite_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from tvm.relay.op.contrib.ethosu import partition_for_ethosu
from tvm.relay.op.contrib.bnns import partition_for_bnns
from tvm.relay.op.contrib.vitis_ai import partition_for_vitis_ai
from tvm.relay.op.contrib.mrvl import partition_for_mrvl


from .common import TVMCException
Expand Down Expand Up @@ -76,6 +77,10 @@
"config_key": "relay.ext.vitis_ai.options",
"pass_pipeline": partition_for_vitis_ai,
},
"mrvl": {
"config_key": "relay.ext.mrvl.options",
"pass_pipeline": partition_for_mrvl,
},
}


Expand Down
5 changes: 5 additions & 0 deletions python/tvm/relay/backend/executor_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@ def __init__(
libmod_name,
params,
function_metadata,
external_graph_json_str=None,
):
assert isinstance(graph_json_str, string_types)
fcreate = get_global_func("tvm.graph_executor_factory.create")
Expand All @@ -177,6 +178,7 @@ def __init__(
self.executor = executor
self.module = fcreate(graph_json_str, libmod, libmod_name, *args)
self.graph_json = graph_json_str
self.external_graph_json = external_graph_json_str
self.lib = libmod
self.libmod_name = libmod_name
self.params = params
Expand All @@ -198,5 +200,8 @@ def get_graph_json(self):
def get_executor_config(self):
return self.graph_json

def get_external_graph_json(self):
return self.external_graph_json

def get_lib(self):
return self.lib
22 changes: 19 additions & 3 deletions python/tvm/relay/build_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ class BuildModule(object):
def __init__(self):
self.mod = _build_module._BuildModule()
self._get_graph_json = self.mod["get_graph_json"]
self._get_external_graph_json = self.mod["get_external_graph_json"]
self._get_module = self.mod["get_module"]
self._build = self.mod["build"]
self._optimize = self.mod["optimize"]
Expand Down Expand Up @@ -193,8 +194,11 @@ def build(
mod = self.get_module()
params = self.get_params()
executor_config = self.get_graph_json() if str(executor) == "graph" else None
external_executor_config = (
self.get_external_graph_json() if str(executor) == "graph" else None
)

return executor_config, mod, params
return executor_config, mod, params, external_executor_config

def optimize(self, mod, target=None, params=None):
"""
Expand Down Expand Up @@ -238,6 +242,10 @@ def get_graph_json(self):
"""Return the json file of the built program."""
return self._get_graph_json()

def get_external_graph_json(self):
"""Return the external json file of the built program."""
return self._get_external_graph_json()

def get_module(self):
"""Return the built module."""
return self._get_module()
Expand Down Expand Up @@ -446,7 +454,7 @@ def build(

with tophub_context:
bld_mod = BuildModule()
graph_json, runtime_mod, params = bld_mod.build(
graph_json, runtime_mod, params, external_graph_json = bld_mod.build(
mod=ir_mod,
target=target,
params=params,
Expand All @@ -472,7 +480,15 @@ def build(
)
elif str(executor) == "graph":
executor_factory = _executor_factory.GraphExecutorFactoryModule(
ir_mod, target, executor, graph_json, runtime_mod, mod_name, params, func_metadata
ir_mod,
target,
executor,
graph_json,
runtime_mod,
mod_name,
params,
func_metadata,
external_graph_json,
)
else:
assert False, "Executor " + executor + " not supported"
Expand Down
1 change: 1 addition & 0 deletions python/tvm/relay/op/contrib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,4 @@
from .ethosn import *
from .tensorrt import *
from .cutlass import *
from .mrvl import *
Loading

0 comments on commit 2b5c376

Please sign in to comment.