Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
[![Build Status](https://circleci.com/gh/mozilla/treeherder.svg?branch=master&style=shield)](https://app.circleci.com/pipelines/github/mozilla/treeherder)
![Node dependencies Status](https://shields.io/librariesio/github/mozilla/treeherder)
[![Documentation Status](https://readthedocs.org/projects/treeherder/badge/?version=latest)](https://treeherder.readthedocs.io/?badge=latest)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![Code style: ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://docs.astral.sh/ruff/formatter/)
[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)

## Description
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -634,9 +634,9 @@ def modify_spy(alerts_arg, commented_bugs, new_bugs):
# Verify modify_alert_bugs was called (could be multiple times due to house_keeping)
# Find the call with non-empty alerts
non_empty_calls = [call for call in modify_calls if len(call) > 0]
assert len(non_empty_calls) >= 1, (
"modify_alert_bugs should be called with at least one non-empty alert list"
)
assert (
len(non_empty_calls) >= 1
), "modify_alert_bugs should be called with at least one non-empty alert list"

# Check the first non-empty call (the main manage_alerts call)
passed_alerts = non_empty_calls[0]
Expand Down
42 changes: 21 additions & 21 deletions treeherder/changelog/migrations/0001_initial.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,41 +11,41 @@ class Migration(migrations.Migration):

operations = [
migrations.CreateModel(
name='Changelog',
name="Changelog",
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('remote_id', models.CharField(max_length=255)),
('date', models.DateTimeField(db_index=True)),
('author', models.CharField(max_length=100)),
('owner', models.CharField(max_length=100)),
('project', models.CharField(max_length=100)),
('project_url', models.CharField(max_length=360)),
('message', models.CharField(max_length=360)),
('description', models.CharField(max_length=360)),
('type', models.CharField(max_length=100)),
('url', models.CharField(max_length=360)),
("id", models.AutoField(primary_key=True, serialize=False)),
("remote_id", models.CharField(max_length=255)),
("date", models.DateTimeField(db_index=True)),
("author", models.CharField(max_length=100)),
("owner", models.CharField(max_length=100)),
("project", models.CharField(max_length=100)),
("project_url", models.CharField(max_length=360)),
("message", models.CharField(max_length=360)),
("description", models.CharField(max_length=360)),
("type", models.CharField(max_length=100)),
("url", models.CharField(max_length=360)),
],
options={
'db_table': 'changelog_entry',
'unique_together': {('id', 'remote_id', 'type')},
"db_table": "changelog_entry",
"unique_together": {("id", "remote_id", "type")},
},
),
migrations.CreateModel(
name='ChangelogFile',
name="ChangelogFile",
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.SlugField(max_length=255)),
("id", models.AutoField(primary_key=True, serialize=False)),
("name", models.SlugField(max_length=255)),
(
'changelog',
"changelog",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name='files',
to='changelog.Changelog',
related_name="files",
to="changelog.Changelog",
),
),
],
options={
'db_table': 'changelog_file',
"db_table": "changelog_file",
},
),
]
6 changes: 3 additions & 3 deletions treeherder/config/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,9 +370,9 @@

# Periodically publish runtime statistics on statsd (in minutes)
CELERY_STATS_PUBLICATION_DELAY = 20
assert 0 < CELERY_STATS_PUBLICATION_DELAY < 60 and 60 % 10 == 0, (
"Celery task must be a valid cron delay in minutes"
)
assert (
0 < CELERY_STATS_PUBLICATION_DELAY < 60 and 60 % 10 == 0
), "Celery task must be a valid cron delay in minutes"

CELERY_BEAT_SCHEDULE = {
# this is just a failsafe in case the Pulse ingestion misses something
Expand Down
12 changes: 6 additions & 6 deletions treeherder/etl/classification_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ def process(self, pulse_job, root_url):
task_id = pulse_job["status"]["taskId"]

task_definition = get_task_definition(root_url, task_id)
assert "routes" in task_definition and len(task_definition["routes"]) > 0, (
"A route containing the push project and revision is needed to save the mozci classification"
)
assert (
"routes" in task_definition and len(task_definition["routes"]) > 0
), "A route containing the push project and revision is needed to save the mozci classification"
# Retrieving a Push object thanks to the project/revision parsed from the task first route
try:
push = self.get_push(task_definition["routes"][0])
Expand All @@ -53,9 +53,9 @@ def process(self, pulse_job, root_url):
# Saving the mozci classification in the database
results = dict(MozciClassification.CLASSIFICATION_RESULT)
classification = classification_json["push"]["classification"]
assert classification in results.keys(), (
"Classification result should be a value in BAD, GOOD, UNKNOWN"
)
assert (
classification in results.keys()
), "Classification result should be a value in BAD, GOOD, UNKNOWN"

logger.info(
"Storing mozci classification calculated as %s for push %s on repository %s",
Expand Down
Loading