|
1 | 1 | import logging |
2 | | -import re |
3 | 2 | from functools import cached_property |
4 | 3 | from typing import Any, Literal |
5 | 4 |
|
6 | 5 | from asgiref.sync import sync_to_async |
7 | 6 |
|
8 | 7 | from codebase.api.callbacks import BaseCallback |
9 | | -from codebase.api.models import Issue, IssueAction, MergeRequest, Note, NoteableType, NoteAction, Project, User |
| 8 | +from codebase.api.models import ( |
| 9 | + Issue, |
| 10 | + IssueAction, |
| 11 | + MergeRequest, |
| 12 | + Note, |
| 13 | + NoteableType, |
| 14 | + NoteAction, |
| 15 | + Pipeline, |
| 16 | + PipelineBuild, |
| 17 | + Project, |
| 18 | + User, |
| 19 | +) |
10 | 20 | from codebase.base import MergeRequest as BaseMergeRequest |
11 | 21 | from codebase.clients import RepoClient |
12 | 22 | from codebase.tasks import address_issue_task, address_review_task, fix_pipeline_job_task, update_index_repository |
13 | 23 | from core.config import RepositoryConfig |
14 | | -from core.utils import generate_uuid |
15 | 24 |
|
16 | 25 | PIPELINE_JOB_REF_SUFFIX = "refs/merge-requests/" |
17 | 26 |
|
@@ -176,68 +185,73 @@ def related_merge_requests(self) -> list[BaseMergeRequest]: |
176 | 185 | return client.get_commit_related_merge_requests(self.project.path_with_namespace, commit_sha=self.checkout_sha) |
177 | 186 |
|
178 | 187 |
|
179 | | -class PipelineJobCallback(BaseCallback): |
| 188 | +class PipelineStatusCallback(BaseCallback): |
180 | 189 | """ |
181 | | - Gitlab Pipeline Job Webhook |
| 190 | + Gitlab Pipeline Status Webhook |
182 | 191 | """ |
183 | 192 |
|
184 | | - object_kind: Literal["build"] |
| 193 | + object_kind: Literal["pipeline"] |
185 | 194 | project: Project |
186 | | - sha: str |
187 | | - ref: str |
188 | | - build_id: int |
189 | | - build_name: str |
190 | | - build_allow_failure: bool |
191 | | - build_status: Literal[ |
192 | | - "created", "pending", "running", "failed", "success", "canceled", "skipped", "manual", "scheduled" |
193 | | - ] |
194 | | - build_failure_reason: str |
| 195 | + merge_request: MergeRequest | None = None |
| 196 | + object_attributes: Pipeline |
| 197 | + builds: list[PipelineBuild] |
195 | 198 |
|
196 | 199 | def model_post_init(self, __context: Any): |
197 | 200 | self._repo_config = RepositoryConfig.get_config(self.project.path_with_namespace) |
198 | 201 |
|
199 | 202 | def accept_callback(self) -> bool: |
200 | 203 | """ |
201 | | - Accept the webhook if the pipeline job failed due to a script failure and there are related merge requests. |
| 204 | + Accept callback if the pipeline failed and has a failed build to fix. |
202 | 205 | """ |
203 | 206 | return ( |
204 | | - not self.build_allow_failure |
205 | | - and self._repo_config.features.autofix_pipeline_enabled |
206 | | - and self.build_status == "failed" |
207 | | - # Only fix pipeline jobs that failed due to a script failure. |
208 | | - and self.build_failure_reason == "script_failure" |
209 | | - # Only fix pipeline jobs of the latest commit of the merge request. |
210 | | - and self.merge_request is not None |
211 | | - and self.merge_request.is_daiv() |
212 | | - and self.merge_request.sha == self.sha |
| 207 | + self._repo_config.features.autofix_pipeline_enabled |
| 208 | + and self.object_attributes.status == "failed" |
| 209 | + and self._first_failed_build is not None |
| 210 | + and self._merge_request is not None |
| 211 | + and self._merge_request.is_daiv() |
213 | 212 | ) |
214 | 213 |
|
215 | 214 | async def process_callback(self): |
216 | 215 | """ |
217 | | - Trigger the task to fix the pipeline job. |
| 216 | + Trigger the task to fix the pipeline failed build. |
| 217 | +
|
| 218 | + Only one build is fixed at a time to avoid two or more fixes being applied simultaneously to the same files, |
| 219 | + which could lead to conflicts or a job being fixed with outdated code. |
218 | 220 | """ |
219 | | - if self.merge_request: |
| 221 | + if self.merge_request is not None and self._first_failed_build is not None: |
220 | 222 | await sync_to_async( |
221 | 223 | fix_pipeline_job_task.si( |
222 | 224 | repo_id=self.project.path_with_namespace, |
223 | 225 | ref=self.merge_request.source_branch, |
224 | | - merge_request_id=self.merge_request.merge_request_id, |
225 | | - job_id=self.build_id, |
226 | | - job_name=self.build_name, |
227 | | - thread_id=generate_uuid( |
228 | | - f"{self.project.path_with_namespace}{self.merge_request.merge_request_id}{self.build_name}" |
229 | | - ), |
| 226 | + merge_request_id=self.merge_request.iid, |
| 227 | + job_id=self._first_failed_build.id, |
| 228 | + job_name=self._first_failed_build.name, |
230 | 229 | ).delay |
231 | 230 | )() |
232 | 231 |
|
233 | 232 | @cached_property |
234 | | - def merge_request(self) -> BaseMergeRequest | None: |
| 233 | + def _merge_request(self) -> BaseMergeRequest | None: |
235 | 234 | """ |
236 | | - Get the merge request related to the job. |
| 235 | + Get the merge request related to the pipeline to obtain associated labels and infer if is a DAIV MR. |
237 | 236 | """ |
238 | | - # The ref points to the source branch of a merge request. |
239 | | - match = re.search(rf"{PIPELINE_JOB_REF_SUFFIX}(\d+)(?:/\w+)?$", self.ref) |
240 | | - if match: |
241 | | - client = RepoClient.create_instance() |
242 | | - return client.get_merge_request(self.project.path_with_namespace, int(match.group(1))) |
| 237 | + client = RepoClient.create_instance() |
| 238 | + if self.merge_request is not None: |
| 239 | + return client.get_merge_request(self.project.path_with_namespace, self.merge_request.iid) |
243 | 240 | return None |
| 241 | + |
| 242 | + @cached_property |
| 243 | + def _first_failed_build(self) -> PipelineBuild | None: |
| 244 | + """ |
| 245 | + Get the first failed build of the pipeline. |
| 246 | + """ |
| 247 | + return next( |
| 248 | + ( |
| 249 | + build |
| 250 | + for build in self.builds |
| 251 | + if build.status == "failed" |
| 252 | + and not build.manual |
| 253 | + and not build.allow_failure |
| 254 | + and build.failure_reason == "script_failure" |
| 255 | + ), |
| 256 | + None, |
| 257 | + ) |
0 commit comments