Skip to content

SONARPY-1902 Rule S6979: "torch.tensor" should be used instead of "torch.autograd.Variable" #1953

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Sep 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -364,6 +364,7 @@ public static Iterable<Class> getChecks() {
TooManyLinesInFunctionCheck.class,
TooManyParametersCheck.class,
TooManyReturnsCheck.class,
TorchAutogradVariableShouldNotBeUsedCheck.class,
TrailingCommentCheck.class,
TrailingWhitespaceCheck.class,
TypeAliasAnnotationCheck.class,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
/*
* SonarQube Python Plugin
* Copyright (C) 2011-2024 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.python.checks;

import org.sonar.check.Rule;
import org.sonar.plugins.python.api.PythonSubscriptionCheck;
import org.sonar.plugins.python.api.symbols.Symbol;
import org.sonar.plugins.python.api.tree.CallExpression;
import org.sonar.plugins.python.api.tree.Tree;

@Rule(key = "S6979")
public class TorchAutogradVariableShouldNotBeUsedCheck extends PythonSubscriptionCheck {
private static final String MESSAGE = "Replace this call with a call to \"torch.tensor\".";
private static final String TORCH_AUTOGRAD_VARIABLE = "torch.autograd.Variable";

@Override
public void initialize(Context context) {
context.registerSyntaxNodeConsumer(Tree.Kind.CALL_EXPR, ctx -> {
CallExpression callExpression = (CallExpression) ctx.syntaxNode();
Symbol calleeSymbol = callExpression.calleeSymbol();
if (calleeSymbol != null && TORCH_AUTOGRAD_VARIABLE.equals(calleeSymbol.fullyQualifiedName())) {
ctx.addIssue(callExpression.callee(), MESSAGE);
}
});
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
<p>This rule raises when a <code>torch.autograd.Variable</code> is instantiated.</p>
<h2>Why is this an issue?</h2>
<p>The Pytorch Variable API has been deprecated. The behavior of Variables is now provided by the Pytorch tensors and can be controlled with the
<code>requires_grad</code> parameter.</p>
<p>The Variable API now returns tensors anyway, so there should not be any breaking changes.</p>
<h2>How to fix it</h2>
<p>Replace the call to <code>torch.autograd.Variable</code> with a call to <code>torch.tensor</code> and set the <code>requires_grad</code> attribute
to <code>True</code> if needed.</p>
<h3>Code examples</h3>
<h4>Noncompliant code example</h4>
<pre data-diff-id="1" data-diff-type="noncompliant">
import torch

x = torch.autograd.Variable(torch.tensor([1.0]), requires_grad=True) # Noncompliant
x2 = torch.autograd.Variable(torch.tensor([1.0])) # Noncompliant
</pre>
<h4>Compliant solution</h4>
<pre data-diff-id="1" data-diff-type="compliant">
import torch

x = torch.tensor([1.0], requires_grad=True)
x2 = torch.tensor([1.0])
</pre>
<h2>Resources</h2>
<h3>Documentation</h3>
<ul>
<li> Pytorch documentation - <a href="https://pytorch.org/docs/stable/autograd.html#variable-deprecated">Variable API</a> </li>
</ul>

Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
{
"title": "\"torch.tensor\" should be used instead of \"torch.autograd.Variable\"",
"type": "CODE_SMELL",
"status": "ready",
"remediation": {
"func": "Constant\/Issue",
"constantCost": "2min"
},
"tags": [],
"defaultSeverity": "Major",
"ruleSpecification": "RSPEC-6979",
"sqKey": "S6979",
"scope": "All",
"quickfix": "targeted",
"code": {
"impacts": {
"MAINTAINABILITY": "MEDIUM"
},
"attribute": "CONVENTIONAL"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,7 @@
"S6971",
"S6972",
"S6973",
"S6974"
"S6974",
"S6979"
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* SonarQube Python Plugin
* Copyright (C) 2011-2024 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.python.checks;

import org.junit.jupiter.api.Test;
import org.sonar.python.checks.utils.PythonCheckVerifier;

class TorchAutogradVariableShouldNotBeUsedCheckTest {
@Test
void test() {
PythonCheckVerifier.verify("src/test/resources/checks/torchAutogradVariable.py", new TorchAutogradVariableShouldNotBeUsedCheck());
}
}
40 changes: 40 additions & 0 deletions python-checks/src/test/resources/checks/torchAutogradVariable.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
def torch_import():
import torch

x6 = Variable(torch.tensor([15]))

x = torch.autograd.Variable(torch.tensor([1.0]), requires_grad=True) # Noncompliant {{Replace this call with a call to "torch.tensor".}}
#^^^^^^^^^^^^^^^^^^^^^^^
x2 = torch.autograd.Variable(torch.tensor([1.0])) # Noncompliant
#^^^^^^^^^^^^^^^^^^^^^^^

x4 = torch.autograd.Variable() # Noncompliant

# Compliant solution
c_x3 = torch.tensor([1.0])

def torch_autograd_import_as():
from torch.autograd import Variable as V
x3 = V(torch.tensor([15])) # Noncompliant
#^

def torch_alias_import():
import torch as t
x5 = t.autograd.Variable(torch.tensor([15])) # Noncompliant


def unrelated_import():
from something.autograd import Variable
x6 = Variable(torch.tensor([15]))

def multiple_imports():
# Resulting symbol will be ambiguous, therefore no issue is raised
from something.autograd import Variable
x6 = Variable(torch.tensor([15]))
from torch.autograd import Variable
x6 = Variable(torch.tensor([15]))

def use_before_assignment():
x6 = Variable(torch.tensor([15])) # Noncompliant
from torch.autograd import Variable
x6 = Variable(torch.tensor([15])) # Noncompliant