diff --git a/python-checks/src/main/java/org/sonar/python/checks/CheckList.java b/python-checks/src/main/java/org/sonar/python/checks/CheckList.java index 4d21fcd49a..a4bdf1a6f4 100644 --- a/python-checks/src/main/java/org/sonar/python/checks/CheckList.java +++ b/python-checks/src/main/java/org/sonar/python/checks/CheckList.java @@ -364,6 +364,7 @@ public static Iterable getChecks() { TooManyLinesInFunctionCheck.class, TooManyParametersCheck.class, TooManyReturnsCheck.class, + TorchAutogradVariableShouldNotBeUsedCheck.class, TrailingCommentCheck.class, TrailingWhitespaceCheck.class, TypeAliasAnnotationCheck.class, diff --git a/python-checks/src/main/java/org/sonar/python/checks/TorchAutogradVariableShouldNotBeUsedCheck.java b/python-checks/src/main/java/org/sonar/python/checks/TorchAutogradVariableShouldNotBeUsedCheck.java new file mode 100644 index 0000000000..810a2784aa --- /dev/null +++ b/python-checks/src/main/java/org/sonar/python/checks/TorchAutogradVariableShouldNotBeUsedCheck.java @@ -0,0 +1,43 @@ +/* + * SonarQube Python Plugin + * Copyright (C) 2011-2024 SonarSource SA + * mailto:info AT sonarsource DOT com + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3 of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.sonar.python.checks; + +import org.sonar.check.Rule; +import org.sonar.plugins.python.api.PythonSubscriptionCheck; +import org.sonar.plugins.python.api.symbols.Symbol; +import org.sonar.plugins.python.api.tree.CallExpression; +import org.sonar.plugins.python.api.tree.Tree; + +@Rule(key = "S6979") +public class TorchAutogradVariableShouldNotBeUsedCheck extends PythonSubscriptionCheck { + private static final String MESSAGE = "Replace this call with a call to \"torch.tensor\"."; + private static final String TORCH_AUTOGRAD_VARIABLE = "torch.autograd.Variable"; + + @Override + public void initialize(Context context) { + context.registerSyntaxNodeConsumer(Tree.Kind.CALL_EXPR, ctx -> { + CallExpression callExpression = (CallExpression) ctx.syntaxNode(); + Symbol calleeSymbol = callExpression.calleeSymbol(); + if (calleeSymbol != null && TORCH_AUTOGRAD_VARIABLE.equals(calleeSymbol.fullyQualifiedName())) { + ctx.addIssue(callExpression.callee(), MESSAGE); + } + }); + } +} diff --git a/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/S6979.html b/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/S6979.html new file mode 100644 index 0000000000..7b42671d51 --- /dev/null +++ b/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/S6979.html @@ -0,0 +1,29 @@ +

This rule raises when a torch.autograd.Variable is instantiated.

+

Why is this an issue?

+

The Pytorch Variable API has been deprecated. The behavior of Variables is now provided by the Pytorch tensors and can be controlled with the +requires_grad parameter.

+

The Variable API now returns tensors anyway, so there should not be any breaking changes.

+

How to fix it

+

Replace the call to torch.autograd.Variable with a call to torch.tensor and set the requires_grad attribute +to True if needed.

+

Code examples

+

Noncompliant code example

+
+import torch
+
+x = torch.autograd.Variable(torch.tensor([1.0]), requires_grad=True) # Noncompliant
+x2 = torch.autograd.Variable(torch.tensor([1.0])) # Noncompliant
+
+

Compliant solution

+
+import torch
+
+x = torch.tensor([1.0], requires_grad=True)
+x2 = torch.tensor([1.0])
+
+

Resources

+

Documentation

+ + diff --git a/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/S6979.json b/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/S6979.json new file mode 100644 index 0000000000..0b9369fa01 --- /dev/null +++ b/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/S6979.json @@ -0,0 +1,21 @@ +{ + "title": "\"torch.tensor\" should be used instead of \"torch.autograd.Variable\"", + "type": "CODE_SMELL", + "status": "ready", + "remediation": { + "func": "Constant\/Issue", + "constantCost": "2min" + }, + "tags": [], + "defaultSeverity": "Major", + "ruleSpecification": "RSPEC-6979", + "sqKey": "S6979", + "scope": "All", + "quickfix": "targeted", + "code": { + "impacts": { + "MAINTAINABILITY": "MEDIUM" + }, + "attribute": "CONVENTIONAL" + } +} diff --git a/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/Sonar_way_profile.json b/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/Sonar_way_profile.json index ab1a262c0d..437bf5017c 100644 --- a/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/Sonar_way_profile.json +++ b/python-checks/src/main/resources/org/sonar/l10n/py/rules/python/Sonar_way_profile.json @@ -245,6 +245,7 @@ "S6971", "S6972", "S6973", - "S6974" + "S6974", + "S6979" ] } diff --git a/python-checks/src/test/java/org/sonar/python/checks/TorchAutogradVariableShouldNotBeUsedCheckTest.java b/python-checks/src/test/java/org/sonar/python/checks/TorchAutogradVariableShouldNotBeUsedCheckTest.java new file mode 100644 index 0000000000..cbe0f463cf --- /dev/null +++ b/python-checks/src/test/java/org/sonar/python/checks/TorchAutogradVariableShouldNotBeUsedCheckTest.java @@ -0,0 +1,30 @@ +/* + * SonarQube Python Plugin + * Copyright (C) 2011-2024 SonarSource SA + * mailto:info AT sonarsource DOT com + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3 of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.sonar.python.checks; + +import org.junit.jupiter.api.Test; +import org.sonar.python.checks.utils.PythonCheckVerifier; + +class TorchAutogradVariableShouldNotBeUsedCheckTest { + @Test + void test() { + PythonCheckVerifier.verify("src/test/resources/checks/torchAutogradVariable.py", new TorchAutogradVariableShouldNotBeUsedCheck()); + } +} diff --git a/python-checks/src/test/resources/checks/torchAutogradVariable.py b/python-checks/src/test/resources/checks/torchAutogradVariable.py new file mode 100644 index 0000000000..b98d9b3948 --- /dev/null +++ b/python-checks/src/test/resources/checks/torchAutogradVariable.py @@ -0,0 +1,40 @@ +def torch_import(): + import torch + + x6 = Variable(torch.tensor([15])) + + x = torch.autograd.Variable(torch.tensor([1.0]), requires_grad=True) # Noncompliant {{Replace this call with a call to "torch.tensor".}} + #^^^^^^^^^^^^^^^^^^^^^^^ + x2 = torch.autograd.Variable(torch.tensor([1.0])) # Noncompliant + #^^^^^^^^^^^^^^^^^^^^^^^ + + x4 = torch.autograd.Variable() # Noncompliant + + # Compliant solution + c_x3 = torch.tensor([1.0]) + +def torch_autograd_import_as(): + from torch.autograd import Variable as V + x3 = V(torch.tensor([15])) # Noncompliant + #^ + +def torch_alias_import(): + import torch as t + x5 = t.autograd.Variable(torch.tensor([15])) # Noncompliant + + +def unrelated_import(): + from something.autograd import Variable + x6 = Variable(torch.tensor([15])) + +def multiple_imports(): + # Resulting symbol will be ambiguous, therefore no issue is raised + from something.autograd import Variable + x6 = Variable(torch.tensor([15])) + from torch.autograd import Variable + x6 = Variable(torch.tensor([15])) + +def use_before_assignment(): + x6 = Variable(torch.tensor([15])) # Noncompliant + from torch.autograd import Variable + x6 = Variable(torch.tensor([15])) # Noncompliant