Skip to content

Commit

Permalink
fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
icemelon committed Jan 8, 2020
1 parent 5f11005 commit ff0ca78
Show file tree
Hide file tree
Showing 21 changed files with 309 additions and 158 deletions.
6 changes: 3 additions & 3 deletions include/tvm/relay/op_attr_types.h
Original file line number Diff line number Diff line change
Expand Up @@ -268,9 +268,9 @@ class OpImplement : public ObjectRef {
* \param target The build target.
* \return The computation schedule.
*/
Schedule Schedule(const Attrs& attrs,
const Array<Tensor>& outs,
const Target& target);
tvm::Schedule Schedule(const Attrs& attrs,
const Array<Tensor>& outs,
const Target& target);
};

/*!
Expand Down
18 changes: 15 additions & 3 deletions include/tvm/schedule.h
Original file line number Diff line number Diff line change
Expand Up @@ -744,11 +744,17 @@ class SingletonNode : public IterVarRelationNode {

class SpecializedConditionNode;

/*!
* \brief Specialized condition to enable op specialization
*/
class SpecializedCondition : public ObjectRef {
public:
SpecializedCondition() {}
explicit SpecializedCondition(ObjectPtr<Object> n) : ObjectRef(n) {}

/*!
* \brief Get the current specialized condition.
* \return The current specialized condition.
*/
TVM_DLL static tvm::SpecializedCondition Current();

const SpecializedConditionNode* operator->() const;
Expand All @@ -759,14 +765,20 @@ class SpecializedCondition : public ObjectRef {
// enable with syntax.
friend class Internal;
friend class With<SpecializedCondition>;

/*! \brief Push a new specialized condition onto the thread local stack. */
TVM_DLL void EnterWithScope();

/*! \brief Pop a specialized condition off the thread local context stack. */
TVM_DLL void ExitWithScope();
};

/*! \brief Container for specialization conditions. */
class SpecializedConditionNode : public Object {
public:
/*!
* \brief List of conditions in conjunctive joint form (CNF).
* Each condition should be a simple expression, e.g., n > 16, m % 8 == 0, etc.,
* where n, m are tvm::Var that represents a dimension in the tensor shape.
*/
Array<Expr> clauses;

void VisitAttrs(AttrVisitor* v) {
Expand Down
4 changes: 1 addition & 3 deletions python/tvm/autotvm/graph_tuner/base_graph_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import tvm
from tvm import autotvm, relay
from tvm.autotvm.task import get_config
from tvm.autotvm.task.topi_integration import deserialize_args, serialize_args
from tvm.autotvm.task.topi_integration import serialize_args
from tvm.autotvm.record import encode, load_from_file
from tvm.autotvm.measure import MeasureResult, MeasureInput

Expand All @@ -50,11 +50,9 @@ def get_infer_layout(task_name):
else:
raise ValueError("Cannot find infer layout for task %s" % task_name)

#@autotvm.template
@autotvm.register_customized_task("layout_transform")
def layout_transform(*args):
"""Autotvm layout transform template."""
args = deserialize_args(args)
cfg = get_config()
cfg.add_flop(-1)
data = args[0]
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/record.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
import json
import time
import os
import numpy as np
import itertools
from collections import OrderedDict
import numpy as np

from .. import build, lower, target as _target

Expand Down
4 changes: 2 additions & 2 deletions python/tvm/autotvm/task/dispatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ def _query_inside(self, target, workload):
if key in self._best_user_defined:
return self._best_user_defined[key]
if key in self.best_by_model:
inp, res = self.best_by_model[key]
inp, _ = self.best_by_model[key]
return inp.config

# then try matching by target key
Expand All @@ -344,7 +344,7 @@ def _query_inside(self, target, workload):
if key in self._best_user_defined:
return self._best_user_defined[key]
if key in self.best_by_targetkey:
inp, res = self.best_by_targetkey[key]
inp, _ = self.best_by_targetkey[key]
return inp.config

return None
Expand Down
13 changes: 10 additions & 3 deletions python/tvm/relay/backend/compile_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,19 @@
from .. import expr as _expr
from .. import op as _op
from .. import ty as _ty
from ..expr_functor import ExprFunctor
from ..expr_functor import ExprVisitor
from . import _backend

@register_relay_node
class CachedFunc(NodeBase):
"""Low-level tensor function to back a relay primitive function.
"""
def __init__(self, target, func_name, inputs, outputs, schedule=None,
lowered_funcs=[], shape_func_param_states=[]):
lowered_funcs=None, shape_func_param_states=None):
if lowered_funcs is None:
lowered_funcs = []
if shape_func_param_states is None:
shape_func_param_states = []
self.__init_handle_by_constructor__(
_backend._make_CachedFunc, target, func_name, inputs, outputs,
schedule, lowered_funcs, shape_func_param_states)
Expand Down Expand Up @@ -79,6 +83,7 @@ def _get_cache_key(source_func, target):


def get_shape(shape):
"""Convert the shape to correct dtype and vars."""
ret = []
for dim in shape:
if isinstance(dim, tvm.expr.IntImm):
Expand All @@ -92,7 +97,9 @@ def get_shape(shape):
return ret


class ScheduleGetter(ExprFunctor):
class ScheduleGetter(ExprVisitor):
"""Get the schedule given a fused Relay function"""

MAX_FUNC_NAME_LENGTH = 80

def __init__(self, target):
Expand Down
2 changes: 0 additions & 2 deletions python/tvm/relay/op/_reduce.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@
"""Backend compiler related feature registration"""
from __future__ import absolute_import

import topi

from topi.util import get_const_int, get_const_tuple
from . import op as _reg
from ...api import convert
Expand Down
1 change: 0 additions & 1 deletion python/tvm/relay/op/contrib/_contrib.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
"""Backend compiler related feature registration"""
from __future__ import absolute_import

import topi
from .. import op as reg
from .. import strategy
from ..op import OpPattern
Expand Down
4 changes: 1 addition & 3 deletions python/tvm/relay/op/op.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#pylint: disable=unused-argument
#pylint: disable=unused-argument,invalid-name
"""The base node types for the Relay language."""
import topi

from ..._ffi.function import _init_api

from ..base import register_relay_node
Expand Down
21 changes: 21 additions & 0 deletions python/tvm/relay/op/strategy/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,24 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

# pylint: disable=wildcard-import
"""Relay op strategies."""
from __future__ import absolute_import as _abs

from .generic import *
from . import x86
from . import arm_cpu
Expand Down
9 changes: 5 additions & 4 deletions python/tvm/relay/op/strategy/arm_cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,20 @@
# specific language governing permissions and limitations
# under the License.
"""Definition of ARM CPU operator strategy."""
# pylint: disable=invalid-name,unused-argument
# pylint: disable=invalid-name,unused-argument,wildcard-import,unused-wildcard-import
from __future__ import absolute_import

import topi
from .generic import *
from .. import op as _op

@schedule_injective.register("arm_cpu")
def schedule_injective(_, outs, target):
def schedule_injective_arm_cpu(_, outs, target):
"""schedule injective ops for arm cpu"""
with target:
return topi.arm_cpu.schedule_injective(outs)

@schedule_concatenate.register("arm_cpu")
def schedule_concatenate(_, outs, target):
def schedule_concatenate_arm_cpu(_, outs, target):
"""schedule concatenate for arm cpu"""
with target:
return topi.arm_cpu.schedule_concatenate(outs)
Loading

0 comments on commit ff0ca78

Please sign in to comment.