introvoyz041 commited on
Commit
d49d1ea
·
verified ·
1 Parent(s): 8d72858

Migrated from GitHub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. data/.arcconfig +7 -0
  2. data/.arclint +13 -0
  3. data/.coveragerc +5 -0
  4. data/.dockerignore +3 -0
  5. data/.pre-commit-config.yaml +87 -0
  6. data/.taskotron_task_dockerbuild.json +1 -0
  7. data/APIDOCS.apiary +620 -0
  8. data/Dockerfile +117 -0
  9. data/LICENSE +339 -0
  10. data/alembic.ini +68 -0
  11. data/conf/fedora-messaging-example.toml +18 -0
  12. data/conf/oauth2_client_secrets.json.example +11 -0
  13. data/conf/resultsdb.conf +20 -0
  14. data/conf/resultsdb.wsgi +4 -0
  15. data/conf/settings.py.example +95 -0
  16. data/entrypoint.sh +25 -0
  17. data/get-version.sh +42 -0
  18. data/gunicorn.cfg +5 -0
  19. data/init_db.sh +5 -0
  20. data/logo.png +3 -0
  21. data/migration.sh +8 -0
  22. data/openshift/resultsdb-test-template.yaml +289 -0
  23. data/poetry.lock +0 -0
  24. data/pyproject.toml +71 -0
  25. data/renovate.json +29 -0
  26. data/resultsdb/__init__.py +317 -0
  27. data/resultsdb/__main__.py +158 -0
  28. data/resultsdb/alembic/env.py +80 -0
  29. data/resultsdb/alembic/script.py.mako +24 -0
  30. data/resultsdb/alembic/versions/153c416322c2_create_indexes_on_foreign_keys.py +33 -0
  31. data/resultsdb/alembic/versions/15f5eeb9f635_initial_revision.py +25 -0
  32. data/resultsdb/alembic/versions/17ec41bd6e9a_added_uuid_column_to_the_job_table.py +28 -0
  33. data/resultsdb/alembic/versions/34760e10040b_add_aborted_outcome.py +76 -0
  34. data/resultsdb/alembic/versions/433d0b5b3b96_added_index_on_the_keyval_store.py +33 -0
  35. data/resultsdb/alembic/versions/4ace44a44bf_change_index_on_result_data_so_like_can_.py +35 -0
  36. data/resultsdb/alembic/versions/4bf1390f06d1_added_index_on_submit_time.py +27 -0
  37. data/resultsdb/alembic/versions/4dbe714897fe_remove_the_user_model.py +31 -0
  38. data/resultsdb/alembic/versions/540dbe71fa91_change_schema_to_v2_0_step_1.py +186 -0
  39. data/resultsdb/alembic/versions/978007ecd2b_changed_testcase_name_to_text.py +36 -0
  40. data/resultsdb/alembic/versions/cd581d0e83df_change_outcome_from_enum_to_string.py +37 -0
  41. data/resultsdb/alembic/versions/dbfab576c81_change_schema_to_v2_0_step_2.py +130 -0
  42. data/resultsdb/authorization.py +82 -0
  43. data/resultsdb/config.py +205 -0
  44. data/resultsdb/controllers/__init__.py +0 -0
  45. data/resultsdb/controllers/api_v2.py +661 -0
  46. data/resultsdb/controllers/api_v3.py +175 -0
  47. data/resultsdb/controllers/common.py +38 -0
  48. data/resultsdb/controllers/main.py +28 -0
  49. data/resultsdb/lib/__init__.py +0 -0
  50. data/resultsdb/messaging.py +271 -0
data/.arcconfig ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "project_id" : "resultsdb",
3
+ "conduit_uri" : "https://phab.qa.fedoraproject.org",
4
+ "arc.land.onto.default" : "develop",
5
+ "arc.feature.start.default" : "develop",
6
+ "unit.engine" : "PytestTestEngine"
7
+ }
data/.arclint ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "linters": {
3
+ "flake8": {
4
+ "type": "flake8",
5
+ "include": "(\\.py$)",
6
+ "severity.rules": {
7
+ "(E501)": "disabled",
8
+ "(^E)": "warning",
9
+ "(^F)": "error"
10
+ }
11
+ }
12
+ }
13
+ }
data/.coveragerc ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ [run]
2
+ branch = True
3
+
4
+ [report]
5
+ fail_under = 80
data/.dockerignore ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ .*_cache/
2
+ **/__pycache__/
3
+ *.egg-info/
data/.pre-commit-config.yaml ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ ci:
3
+ autoupdate_schedule: monthly
4
+ skip:
5
+ - hadolint-docker
6
+ # renovate exceeds tier max size 250MiB on pre-commit.ci
7
+ # (due to huge node.js dependencies)
8
+ - renovate-config-validator
9
+
10
+ repos:
11
+ - repo: https://github.com/pre-commit/pre-commit-hooks
12
+ rev: v5.0.0
13
+ hooks:
14
+ - id: check-merge-conflict
15
+ - id: check-yaml
16
+ - id: debug-statements
17
+ - id: end-of-file-fixer
18
+ - id: trailing-whitespace
19
+
20
+ # Sort imports
21
+ - repo: https://github.com/pycqa/isort
22
+ rev: 6.0.1
23
+ hooks:
24
+ - id: isort
25
+ name: isort
26
+ args:
27
+ - --line-length=79
28
+ - --profile=black
29
+
30
+ # Remove unused imports, variables, statements
31
+ - repo: https://github.com/PyCQA/autoflake
32
+ rev: v2.3.1
33
+ hooks:
34
+ - id: autoflake
35
+
36
+ # Auto-update syntax
37
+ - repo: https://github.com/asottile/pyupgrade
38
+ rev: v3.20.0
39
+ hooks:
40
+ - id: pyupgrade
41
+ args:
42
+ - --py313-plus
43
+
44
+ # Linter and formatter
45
+ - repo: https://github.com/astral-sh/ruff-pre-commit
46
+ rev: v0.12.4
47
+ hooks:
48
+ - id: ruff
49
+ args:
50
+ # ignore: E501 Line too long
51
+ - --ignore=E501
52
+ - id: ruff-format
53
+
54
+ # Linter and formatter
55
+ - repo: https://github.com/Instagram/Fixit
56
+ rev: v2.1.0
57
+ hooks:
58
+ - id: fixit-fix
59
+
60
+ # Type linter
61
+ - repo: https://github.com/pre-commit/mirrors-mypy
62
+ rev: v1.17.0
63
+ hooks:
64
+ - id: mypy
65
+ args:
66
+ - --disable-error-code=import-untyped
67
+ - --ignore-missing-imports
68
+
69
+ # Security linter
70
+ - repo: https://github.com/pycqa/bandit
71
+ rev: 1.8.6
72
+ hooks:
73
+ - id: bandit
74
+ name: bandit
75
+ exclude: tests/
76
+
77
+ # Dockerfile linter
78
+ - repo: https://github.com/hadolint/hadolint
79
+ rev: v2.13.1-beta
80
+ hooks:
81
+ - id: hadolint-docker
82
+
83
+ # renovate.json validator
84
+ - repo: https://github.com/renovatebot/pre-commit-hooks
85
+ rev: 41.40.0
86
+ hooks:
87
+ - id: renovate-config-validator
data/.taskotron_task_dockerbuild.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"docker_project_name": "resultsdb", "docker_credentials_bucket_uuid": "1e77148e-64ab-11e8-8f59-525400ee7c53", "release_branch": "master"}
data/APIDOCS.apiary ADDED
@@ -0,0 +1,620 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [comment0]: # (Apiary only does GitHub synchronization. This is here so we can have the data outside of "closed" app. Also, feel free to post patches against this file for docs changes.)
2
+ [comment1]: # (Last updated: Tue Jan 31, 17:55)
3
+
4
+ FORMAT: 1A
5
+ HOST: https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/
6
+
7
+ # ResultsDB 2.0
8
+
9
+ ResultsDB is an independent system for storing results, originaly written as a part of [Taskotron](https://taskotron.fedoraproject.org/) automation tool.
10
+
11
+ Source code: https://bitbucket.org/account/user/fedoraqa/projects/RES
12
+
13
+ ## Changes since 1.0
14
+
15
+ * Removed `Jobs` and replaced with `Groups` to remove the "execution status" (originaly represented by `Job`) from ResultsDB,
16
+ but keeping the ability to group `Results`, as it might make semantical sense
17
+ * `Result` can be a part of 0-n `Groups`
18
+ * UUID as a default `Group` identifier instead of `id`
19
+ * In the response data:
20
+ *`href` (pointer to `self`) is only present in the resource's data, not in the general queries that return collections, since the 'search' queries' `href` was not permalink
21
+ * `ref_url` as a common "external url" attribute
22
+ * `Result.ref_url` instead of `Result.log_url`
23
+ * `Testcase.ref_url` instead of `Testcase.url`
24
+ * Changes in the `Group` object
25
+ * `results` is an URL at which `Results` in that particular `Group` can be
26
+ * `name` renamed to `description`
27
+ * Changes in the `Result` object
28
+ * `ref_url` instead of `log_url`
29
+ * `result_data` replaced with `data`
30
+ * `summary` replaced with `note`
31
+ * only `PASSED, FAILED, INFO, NEEDS_INSPECTION` are now valid `outcome` values
32
+ * `groups` is a list of `Group` uuids which the `Result` is part of
33
+ * When submitting new `Result`, `Testcase` and `Group` \[`resource`\] can be represented either by the unique identifier \[`uid`\]
34
+ (`Testcase.name`, `Group.uuid`), or by `object` containing the `uid`, and any other applicable `resource`'s attributes
35
+ * `resource` is identified by `uid` and:
36
+ * exists -> `resource` is linked to the `Result`
37
+ * does _not_ exist -> `resource` is created. Other attributes are set to default values
38
+ * `resource` is identified by `object` and:
39
+ * exists -> relevant attributes of the `resource` are updated to the provided values
40
+ * does _not_ exist -> `resource` is created, relevant attributes are set to the provided values
41
+ * Changes in the `Testcase` object
42
+ * `ref_url` instead of `url`
43
+ * Changes in querying:
44
+ * `*` (star) used as a wildcard instead of `%` (percent sign) in the `like:` filters
45
+ * Result
46
+ * added `testcases` keyword to filter by `Testcase.name`
47
+ * `like` filter allowed
48
+ * multiple values can be provided, separate by commas to get `or` filter based on all the values provided: `...&testcases=name1,name2`
49
+ * Group
50
+ * `like` filter for `description`
51
+ * `_auth` placeholder for the Authorization/Authenticaion data in the Request queries; Ignored at the moment
52
+
53
+
54
+ # Results [/results]
55
+
56
+ `Result` is the most common entry in the database. We try to keep ResultsDB pretty "stupid" and only enforce the minimal common subset of attributes on
57
+ the `Result` object. For example, when creating a new `Result`, only `outcome` and `testcase` parameters are required. Also, no specific meaning is really
58
+ assigned to any of the (other) parameters.
59
+
60
+ The other common attributes are `note`, to provide a short digest of the testrun's details, and `ref_url` which usually points to log or artifacts.
61
+
62
+ Should you need to store additional data use the `data` keyval store. The stored data can then be used to filter `Results` searches.
63
+ For example in Taskotron, `item` and `type` are used to represent "what was tested", where `type` could be `koji_build`, `bodhi_update`, `compose`, `docker_image` ...
64
+ and `item` is then the (reasonable) identifier of said "item under test".
65
+
66
+ We advise agaings using `_expand`, `_auth`, `_fields`, `_sort` and `_distinct_on` as key names - although ResultsDB will store and return the data, these keys might not be
67
+ query-able for searching the `Results` collection, as these are reserved for the future API functionalities.
68
+
69
+ Through all the `Result` instances, there is a `href` attribute, that represents a link to self.
70
+
71
+ + Attributes (Result GET)
72
+
73
+
74
+ ## Retrieve a Result [GET /results/{id}]
75
+
76
+ Retrieve a single `Result` based on the `id`.
77
+
78
+ + Request .../results/7484989
79
+ + Parameters
80
+ + id: 7484989
81
+
82
+ + Response 200 (application/json)
83
+
84
+ {
85
+ "id": 7484989,
86
+ "outcome": "PASSED",
87
+ "testcase": {
88
+ "name": "dist.rpmlint",
89
+ "ref_url": "https://fedoraproject.org/wiki/Common_Rpmlint_issues",
90
+ "href": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/testcases/dist.rpmlint"
91
+ },
92
+ "note": "0 errors, 30 warnings",
93
+ "ref_url": "https://taskotron-dev.fedoraproject.org/artifacts/all/27f94e36-62ec-11e6-83fd-525400d7d6a4/task_output/koschei-1.7.2-1.fc24.log",
94
+ "submit_time": "2016-08-15T13:29:06",
95
+ "groups": ["27f94e36-62ec-11e6-83fd-525400d7d6a4"],
96
+ "data": {
97
+ "item": ["koschei-1.7.2-1.fc24"],
98
+ "type": ["koji_build"],
99
+ "arch": ["x86_64","noarch"]
100
+ },
101
+ "href": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results/7484989"
102
+ }
103
+
104
+ + Response 404 (application/json)
105
+
106
+ {
107
+ "message":"Result not found"
108
+ }
109
+
110
+
111
+ ## Browse the Results collection [GET /results{?page,limit,outcome,testcases,groups,since,keyval}]
112
+
113
+ Collection of all the Results. Results are returned in paginated format, and references to the next and previous page (if applicable) are
114
+ given as a part of the reponse.
115
+
116
+ The search can be filtered by time, or the `outcome`, or any of the key-values in the `data` store.
117
+ By default, the `data` values are matched for equality, but `like` filter is available to allow for wildcard searches.
118
+
119
+ Examples are provided in the Parameters section of the documentation.
120
+
121
+ + Parameters
122
+ + page: 8 (number, optional)
123
+ + Default: 0
124
+ + limit: 20 (number, optional)
125
+ + Default: 20
126
+ + outcome: PASSED (enum, optional)
127
+ Multiple values can be provided, separate by coma to get `or` filter based on all the values provided: `...&outcome=PASSED,FAILED`
128
+ + Members
129
+ + PASSED
130
+ + FAILED
131
+ + INFO
132
+ + NEEDS_INSPECTION
133
+ + testcases: dist.rpmlint (string, optional)
134
+ - Multiple values can be provided, separate by coma to get `or` filter based on all the values provided: `...&testcases=dist.rpmlint,dist.depcheck`
135
+ - `like` filter with `*` as wildcard: `...&testcases:like=dist.*`
136
+ + groups: `27f94e36-62ec-11e6-83fd-525400d7d6a4` (string, optional)
137
+ - Multiple values can be provided, separate by commas to get `or` filter based on all the values provided: `...&groups=uuid1,uuid2`
138
+ + since: `2016-08-15T13:00:00` (string)
139
+ Date (or datetime) in ISO8601 format.
140
+ To specify range, separate start and end date(time) by comma: `...&since=2016-08-14,2016-08-15T13:42:57`
141
+ + keyval (string)
142
+ - Any key-value pair in `Result.data`. Replace `keyval` with the key's name: `...&item=koschei-1.7.2-1.fc24`
143
+ - Multiple values can be provided, separate by commas to get `or` filter based on all the values provided: `...&arch=x86_64,noarch`
144
+ - `like` filter with `*` as wildcards: `...&item:like=koschei*fc24*`
145
+ - Multiple key-value pairs provide `and` filter, e.g. to search for all `Results` with `item` like `koschei*fc24*` and `arch` being either `noarch` or `x86_64`: `...&item:like=koschei*fc24*&arch=noarch`
146
+
147
+ + Request .../results?item:like=koschei*fc24*&outcome=PASSED,FAILED&since=2016-08-15T13:00:00,2016-08-15T13:30:00
148
+ + Parameters
149
+ + `item:like`: koschei*fc24*
150
+ + outcome: PASSED,FAILED
151
+ + since: 2016-08-15T13:00:00
152
+
153
+ + Response 200 (application/json)
154
+
155
+ {
156
+ "next":"https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results?item:like=koschei*fc24*&outcome=PASSED,FAILED&since=2016-08-15T13:00:00,2016-08-15T13:30:00&page=1",
157
+ "prev":null,
158
+ "data":[
159
+ {
160
+ "id":7484989,
161
+ "outcome":"PASSED",
162
+ "testcase":{
163
+ "name":"dist.rpmlint",
164
+ "ref_url":"https://fedoraproject.org/wiki/Common_Rpmlint_issues",
165
+ "href":"https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/testcases/dist.rpmlint"
166
+ },
167
+ "groups":["27f94e36-62ec-11e6-83fd-525400d7d6a4"],
168
+ "note":"0 errors, 30 warnings",
169
+ "submit_time":"2016-08-15T13:29:06",
170
+ "ref_url":"https://taskotron-dev.fedoraproject.org/artifacts/all/27f94e36-62ec-11e6-83fd-525400d7d6a4/task_output/koschei-1.7.2-1.fc24.log",
171
+ "data":{
172
+ "item":["koschei-1.7.2-1.fc24"],
173
+ "type":["koji_build"],
174
+ "arch":["x86_64","noarch"]
175
+ },
176
+ "href":"https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results/7484989"
177
+ }
178
+ ]
179
+ }
180
+
181
+
182
+ ## Get a list of latest Results for a specified filter [GET /results/latest{?keyval,testcases,groups,since,_distinct_on}]
183
+
184
+ Especially with automation in mind, a simpe query to get the latest `Results` of all the `Testcases` based on a filter
185
+ makes a lot of sense. For example Koji could be interested in data like "All current results for the `koji_build` `koschei-1.7.2-1.fc24`", without
186
+ the need to browse the whole Results collection.
187
+
188
+ This endpoint does just that - takes filter parameters, and returns the most recent result for all the relevant `Testcases`.
189
+ Only `Testcases` with at least one `Result` that meet the filter are present - e.g. if ResultsDB contained `dist.rpmlint` and `dist.rpmgrill`
190
+ `Testcases`, but there was only a `dist.rpmlint` `Result` for the `koschei-1.7.2-1.fc24` `koji_build`, just `dist.rpmlint`'s `Result` would be returned.
191
+
192
+ An additional available parameter is `_distinct_on`, if specified allows the user to group by additional fields (example: `scenario`).
193
+
194
+ + Parameters
195
+ + keyval (string)
196
+ - Any key-value pair in `Result.data`. Replace `keyval` with the key's name: `...&item=koschei-1.7.2-1.fc24`
197
+ - Multiple values can be provided, separate by commas to get `or` filter based on all the values provided: `...&arch=x86_64,noarch`
198
+ - `like` filter with `*` as wildcards: `...&item:like=koschei*fc24*`
199
+ - Multiple key-value pairs provide `and` filter, e.g. to search for all `Results` with `item` like `koschei*fc24*` and `arch` being either `noarch` or `x86_64`: `...&item:like=koschei*fc24*&arch=noarch`
200
+ + testcases (string, optional)
201
+ - Use to narrow down `Testcases` of interest. By default, all `Testcases` are searched for `Results`
202
+ - Multiple values can be provided, separate by comma to get `or` filter based on all the values provided: `...&testcases=dist.rpmlint,dist.depcheck`
203
+ - `like` filter with `*` as wildcards: `...&testcases:like=dist.*`
204
+ + groups: `27f94e36-62ec-11e6-83fd-525400d7d6a4` (string, optional)
205
+ - Multiple values can be provided, separate by commas to get `or` filter based on all the values provided: `...&groups=uuid1,uuid2`
206
+ + since: `2016-08-15T13:00:00` (string)
207
+ Date (or datetime) in ISO8601 format.
208
+ To specify range, separate start and end date(time) by comma: `...&since=2016-08-14,2016-08-15T13:42:57`
209
+ + _distinct_on: `scenario` (string, optional)
210
+ - The value can be any `key` in `Result.data`. Example: `...&_distinct_on=scenario`
211
+ - Multiple values can be provided, separate by comma. Example: `...&_distinct_on=scenario,item`
212
+
213
+ + Request `.../results/latest?item=koschei-1.7.2-1.fc24&type=koji_build`
214
+ + Parameters
215
+ + item: koschei-1.7.2-1.fc24
216
+ + type: koji_build
217
+
218
+ + Response 200 (application/json)
219
+
220
+ {
221
+ "data":[
222
+ {
223
+ "id":7484989,
224
+ "outcome":"PASSED",
225
+ "testcase":{
226
+ "name":"dist.rpmlint",
227
+ "ref_url":"https://fedoraproject.org/wiki/Common_Rpmlint_issues",
228
+ "href":"https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/testcases/dist.rpmlint"
229
+ },
230
+ "groups":["27f94e36-62ec-11e6-83fd-525400d7d6a4"],
231
+ "note":"0 errors, 30 warnings",
232
+ "submit_time":"2016-08-15T13:29:06",
233
+ "ref_url":"https://taskotron-dev.fedoraproject.org/artifacts/all/27f94e36-62ec-11e6-83fd-525400d7d6a4/task_output/koschei-1.7.2-1.fc24.log",
234
+ "data":{
235
+ "item":["koschei-1.7.2-1.fc24"],
236
+ "type":["koji_build"],
237
+ "arch":["x86_64","noarch"]
238
+ },
239
+ "href":"https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results/7484989"
240
+ }
241
+ ]
242
+ }
243
+
244
+ ## Create new Result [POST /results]
245
+
246
+ To create new `Result`, simply provide a JSON object containing the `outcome` and `testcase` fields.
247
+ Should you want to store more information, you can make use of `groups`, `note`, `ref_url` and `data` (the key-value store).
248
+
249
+ When a new `Result` is created, it is assigned an unique `id` and `submit_time` (UTC time of the `Result` submission, unless it is overridden in the request) by the API.
250
+
251
+ + Attributes (Result POST)
252
+
253
+ + Request Using just the testcase/group identifiers (application/json)
254
+
255
+ {
256
+ "outcome":"PASSED",
257
+ "testcase":"dist.rpmlint",
258
+ "groups":["27f94e36-62ec-11e6-83fd-525400d7d6a4"],
259
+ "note":"0 errors, 30 warnings",
260
+ "data":{
261
+ "item":"koschei-1.7.2-1.fc24",
262
+ "type":"koji_build",
263
+ "arch":["x86_64","noarch"]
264
+ },
265
+ "ref_url":"https://taskotron-dev.fedoraproject.org/artifacts/all/27f94e36-62ec-11e6-83fd-525400d7d6a4/task_output/koschei-1.7.2-1.fc24.log",
266
+ "_auth": null
267
+ }
268
+
269
+ + Request Using the whole testcase/group objects (application/json)
270
+
271
+ {
272
+ "outcome":"PASSED",
273
+ "testcase":{"name":"dist.rpmlint", "ref_url": "https://fedoraproject.org/wiki/Common_Rpmlint_issues"},
274
+ "groups":[{"uuid":"27f94e36-62ec-11e6-83fd-525400d7d6a4", "description":"Taskotron job on koji_build koschei-1.7.2-1.fc24"}],
275
+ "note":"0 errors, 30 warnings",
276
+ "data":{
277
+ "item":"koschei-1.7.2-1.fc24",
278
+ "type":"koji_build",
279
+ "arch":["x86_64","noarch"]
280
+ },
281
+ "ref_url":"https://taskotron-dev.fedoraproject.org/artifacts/all/27f94e36-62ec-11e6-83fd-525400d7d6a4/task_output/koschei-1.7.2-1.fc24.log",
282
+ "_auth": null
283
+ }
284
+
285
+ + Response 201 (application/json)
286
+
287
+ {
288
+ "id":7484989,
289
+ "outcome":"PASSED",
290
+ "testcase":{
291
+ "name":"dist.rpmlint",
292
+ "ref_url":"https://fedoraproject.org/wiki/Common_Rpmlint_issues",
293
+ "href":"https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/testcases/dist.rpmlint"
294
+ },
295
+ "groups":["27f94e36-62ec-11e6-83fd-525400d7d6a4"],
296
+ "note":"0 errors, 30 warnings",
297
+ "submit_time":"2016-08-15T13:29:06",
298
+ "ref_url":"https://taskotron-dev.fedoraproject.org/artifacts/all/27f94e36-62ec-11e6-83fd-525400d7d6a4/task_output/koschei-1.7.2-1.fc24.log",
299
+ "data":{
300
+ "item":["koschei-1.7.2-1.fc24"],
301
+ "type":["koji_build"],
302
+ "arch":["x86_64","noarch"]
303
+ },
304
+ "href":"https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results/7484989"
305
+ }
306
+
307
+ + Response 400 (application/json)
308
+
309
+ When any of the required attributes is missing.
310
+
311
+ + Body
312
+
313
+ {
314
+ "message": "Missing data"
315
+ }
316
+
317
+
318
+ # Result Groups [/groups]
319
+
320
+ As not all `Results` are necessarily standalone, the `Group` resource can be used to organize them into any number of groups.
321
+
322
+ Each group is identified by `uuid`, and when you create a new group, you can either provide your own UUID ([RFC4122](https://tools.ietf.org/html/rfc4122),
323
+ [Wiki](https://en.wikipedia.org/wiki/Universally_unique_identifier)), or let ResultsDB create one for you.
324
+
325
+ Groups can be either created in advance, using this resource's methods, or on the fly during the `Result` submission. If you use the latter,
326
+ but still want to group several `Results` into a group, we suggest using UUID
327
+ [Version 3](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_3_.28MD5_hash_.26_namespace.29) or
328
+ [Version 5](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_5_.28SHA-1_hash_.26_namespace.29),
329
+ and building your own namespaces in order to identify the groups with a predictable UUID.
330
+
331
+ On top of `uuid`, the `Group` can also contain `description` to explain the `Group`'s purpose, and `ref_url` to point to outside resource
332
+ like execution state, or a related documentation.
333
+
334
+
335
+ + Attributes (Group GET)
336
+
337
+
338
+ ## Retrieve a Group [GET /groups/{uuid}]
339
+
340
+ Retrieve a single `Group` based on the `uuid`.
341
+
342
+ + Request .../groups/27f94e36-62ec-11e6-83fd-525400d7d6a4
343
+ + Parameters
344
+ + uuid: 27f94e36-62ec-11e6-83fd-525400d7d6a4
345
+
346
+ + Response 200 (application/json)
347
+
348
+ {
349
+ "uuid": "27f94e36-62ec-11e6-83fd-525400d7d6a4",
350
+ "description": "Taskotron job on koji_build koschei-1.7.2-1.fc24",
351
+ "ref_url": "https://taskotron-dev.fedoraproject.org/execdb/jobs/27f94e36-62ec-11e6-83fd-525400d7d6a4",
352
+ "results": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results?group=27f94e36-62ec-11e6-83fd-525400d7d6a4",
353
+ "results_count": 1,
354
+ "href": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/groups/27f94e36-62ec-11e6-83fd-525400d7d6a4"
355
+ }
356
+
357
+
358
+ + Response 404 (application/json)
359
+
360
+ {
361
+ "message":"Group not found"
362
+ }
363
+
364
+
365
+ ## Browse the Group collection [GET /groups{?page,limit,description,uuid}]
366
+
367
+ Collection of all the `Groups`.
368
+ `Groups` are returned in paginated format, and references to the next and previous page (if applicable) are given as a part of the reponse.
369
+
370
+ + Parameters
371
+ + page: 8 (number, optional)
372
+ + Default: 0
373
+ + limit: 20 (number, optional)
374
+ + Default: 20
375
+ + description: `Taskotron job` (string, optional)
376
+ - Multiple values can be provided, separate by coma to get `or` filter based on all the values provided: `...&description=Taskotron job,OpenQA job`
377
+ - `like` filter with `*` as wildcards: `...&description:like=Taskotron*`
378
+ + uuid: `27f94e36-62ec-11e6-83fd-525400d7d6a4` (string, optional)
379
+ - Multiple values can be provided, separate by coma to get `or` filter based on all the values provided: `...&uuid=27f94e36-62ec-11e6-83fd-525400d7d6a4,...`
380
+
381
+ + Response 200 (application/json)
382
+
383
+ {
384
+ "next":"https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/groups?page=1",
385
+ "prev":null,
386
+ "data":[
387
+ {
388
+ "uuid": "27f94e36-62ec-11e6-83fd-525400d7d6a4",
389
+ "description": "Taskotron job on koji_build koschei-1.7.2-1.fc24",
390
+ "ref_url": "https://taskotron-dev.fedoraproject.org/execdb/jobs/27f94e36-62ec-11e6-83fd-525400d7d6a4",
391
+ "results": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results?group=27f94e36-62ec-11e6-83fd-525400d7d6a4",
392
+ "results_count": 1,
393
+ "href": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/groups/27f94e36-62ec-11e6-83fd-525400d7d6a4"
394
+ }
395
+ ]
396
+ }
397
+
398
+
399
+ ## Create new Group [POST /groups]
400
+
401
+ To create new `Group`, you can simply post an empty JSON object, and new `Group` with an unique UUID
402
+ [Version 1](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_.28date-time_.26_MAC_address.29)
403
+ will be created. Should you want to store more information, you can make use of `description` and `ref_url`.
404
+
405
+ You can also provide your own `uuid`, in which case we strongly suggest UUID
406
+ [Version 3](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_3_.28MD5_hash_.26_namespace.29) or
407
+ [Version 5](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_5_.28SHA-1_hash_.26_namespace.29),
408
+ and building your own namespaces in order to identify the groups with a predictable, but non-conflicting UUID.
409
+
410
+ Note that when an `uuid` already exists, new entry will not be created, but rather the old one will get updated with
411
+ the new `description` or `ref_url`, if set in the JSON data.
412
+
413
+ + Attributes (Group POST)
414
+
415
+ + Request (application/json)
416
+
417
+ {
418
+ "uuid": "27f94e36-62ec-11e6-83fd-525400d7d6a4",
419
+ "description": "Taskotron job on koji_build koschei-1.7.2-1.fc24",
420
+ "ref_url": "https://taskotron-dev.fedoraproject.org/execdb/jobs/27f94e36-62ec-11e6-83fd-525400d7d6a4",
421
+ }
422
+
423
+ + Response 201 (application/json)
424
+
425
+ {
426
+ "uuid": "27f94e36-62ec-11e6-83fd-525400d7d6a4",
427
+ "description": "Taskotron job on koji_build koschei-1.7.2-1.fc24",
428
+ "ref_url": "https://taskotron-dev.fedoraproject.org/execdb/jobs/27f94e36-62ec-11e6-83fd-525400d7d6a4",
429
+ "results": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results?group=27f94e36-62ec-11e6-83fd-525400d7d6a4",
430
+ "results_count": 0,
431
+ "href": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/groups/27f94e36-62ec-11e6-83fd-525400d7d6a4"
432
+ }
433
+
434
+
435
+ # Testcases [/testcases]
436
+
437
+ Each `Result` has to have one `Testcase` assigned.
438
+
439
+ In ResultsDB, `Testcases` can be organized into namespaces using a separator in the `Testcase.name` attribute. Although you can use anything
440
+ as a sepparator, we suggest `.` (dot) as it does not have to be URL-encoded, and is a fairly common separator for the usecase.
441
+
442
+ ResultsDB is not (supposed to be) smart enough to assing any actual meaning to the `Testcase.name`, and thus can not decide on its own,
443
+ what is a namespace, and what is a subcheck e.g. `dist.` is a namespace in Taskotron, but `dist.rpmgrill` is a checkname, which can then
444
+ contain subchecks, that get represented as `dist.rpmgrill.subcheck_foobar`. It is always the consumer's responsibility to make use of
445
+ the (possible) namespacing in the way correct for the specific problem.
446
+
447
+ That said, ResultsDB is able to perform namespace searches, and possibly other operations that are not implemented yet (e.g. auth-based
448
+ ability to submit `Results` only for a limited subset of namespaces).
449
+
450
+ Each `Testcase` is identified by its `name` attribute, which is unique in the ResultsDB. `ref_url` can be used to point to an outside
451
+ resource like documentation, or a list of well-known issues for the `Testcase`
452
+
453
+ + Attributes (Testcase GET)
454
+
455
+
456
+ ## Retrieve a Testcase [GET /testcases/{name}]
457
+
458
+ Retrieve a single `Testcase` based on the `name`.
459
+
460
+
461
+ + Request .../testcases/dist.rpmlint
462
+ + Parameters
463
+ + name: dist.rpmlint
464
+
465
+ + Response 200 (application/json)
466
+
467
+ {
468
+ "name": "dist.rpmlint",
469
+ "ref_url": "https://fedoraproject.org/wiki/Common_Rpmlint_issues",
470
+ "href": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/testcases/dist.rpmlint"
471
+ }
472
+
473
+ + Response 404 (application/json)
474
+
475
+ {
476
+ "message":"Testcase not found"
477
+ }
478
+
479
+
480
+ ## Browse the Testcase collection [GET /testcases{?page,limit,name}]
481
+
482
+ Collection of all the `Testcases`.
483
+ `Testcases` are returned in paginated format, and references to the next and previous page (if applicable) are given as a part of the reponse.
484
+
485
+ + Parameters
486
+ + page: 8 (number, optional)
487
+ + Default: 0
488
+ + limit: 20 (number, optional)
489
+ + Default: 20
490
+ + name: `dist.rpmlint` (string, optional)
491
+ - Multiple values can be provided, separate by coma to get `or` filter based on all the values provided: `...&name=dist.rpmlint,dist.rpmgrill`
492
+ - `like` filter with `*` as wildcards: `...&name:like=dist.rpmgrill.*`
493
+
494
+ + Request .../testcases?name:like=dist.rpmlint.*
495
+ + Parameters
496
+ + `name:like`: dist.rpmlint.*
497
+
498
+ + Response 200 (application/json)
499
+
500
+ {
501
+ "next":"https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/testcases/name:like=dist.rpmlint.*&page=1",
502
+ "prev":null,
503
+ "data":[
504
+ {
505
+ "name": "dist.rpmlint",
506
+ "ref_url": "https://fedoraproject.org/wiki/Common_Rpmlint_issues",
507
+ "href": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/testcases/dist.rpmlint"
508
+ }
509
+ ]
510
+ }
511
+
512
+
513
+ ## Create new Testcase [POST /testcases]
514
+
515
+ To create new `Testcase`, post a JSON object containing the `name` parameter, and possibly also the `ref_url`.
516
+
517
+ Note that when a `Testcase` with the `name` already exists, new entry will not be created, but rather the old one will
518
+ get updated with the new `ref_url`, if set in the JSON data.
519
+
520
+ + Attributes (Testcase POST)
521
+
522
+ + Request (application/json)
523
+
524
+ {
525
+ "name": "dist.rpmlint",
526
+ "ref_url": "https://fedoraproject.org/wiki/Common_Rpmlint_issues",
527
+ }
528
+
529
+ + Response 201 (application/json)
530
+
531
+ {
532
+ "name": "dist.rpmlint",
533
+ "ref_url": "https://fedoraproject.org/wiki/Common_Rpmlint_issues",
534
+ "href": "https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/testcases/dist.rpmlint"
535
+ }
536
+
537
+
538
+ # Data Structures
539
+
540
+ ## OUTCOMES (enum)
541
+ - PASSED
542
+ - FAILED
543
+ - INFO - represents a "soft pass" - use to treat as `"PASSED"` by automation/gating, and flag for human review
544
+ - NEEDS_INSPECTION - use to treat as `"FAILED"` by automation/gating, and flag for human review
545
+
546
+ ## Result GET (object)
547
+ - id: 7484989 (number, required) - Unique identifier. Assigned by API at the moment of creation.
548
+ - testcase (required, Testcase GET) - The `Testcase` associated with the result.
549
+ - outcome (OUTCOMES, required) - Represents the outcome of the testing.
550
+ - note: `0 errors, 30 warnings` (string, optional, nullable) - Should be used as a _short_ summary of important information about the result. Detailed hints/logs should be accessible at the `ref_url` URL.
551
+ - `ref_url`: `https://taskotron-dev.fedoraproject.org/artifacts/all/27f94e36-62ec-11e6-83fd-525400d7d6a4/task_output/koschei-1.7.2-1.fc24.log` (string, optional) - Use as a pointer to logs/artifacts/detailed information about the result.
552
+ - submit_time: `2016-08-15T13:29:06` (string) - UTC time of the result creation in ISO8601 format.
553
+ - groups (array) - List of `Groups`'s UUIDs the result is part of.
554
+ - `27f94e36-62ec-11e6-83fd-525400d7d6a4` (string)
555
+ - data (object) - Any number of key-value pairs. Used to store any additional information. In Taskotron `item` and `type` are the most common keys used to represent "what was tested".
556
+ - *key* (array[string], optional)
557
+ - `href`: `https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results/7484989` (string) - Reference to self.
558
+
559
+ ## Result POST (object)
560
+ - outcome (OUTCOMES, required)
561
+ - testcase (enum, required)
562
+ `Testcase` can be represented either by `testcase.name`, or by `object` containing the `name`, and any other applicable attributes.
563
+ * When identified by `name` and:
564
+ * exists -> is linked to the `Result`
565
+ * does _not_ exist -> is created. Other attributes are set to default values
566
+ * When identified by `object` and:
567
+ * exists -> is linked to the `Result`, relevant attributes are updated to the provided values
568
+ * does _not_ exist -> is created, relevant attributes are set to the provided values
569
+ - Members
570
+ - (Testcase POST data)
571
+ - `dist.rpmlint` (string) - `testcase.name` - Unique identifier. Represents namespacing, using `.` (dot) as a namespace separator
572
+ - groups (array, optional, nullable)
573
+ `Group`s can be represented either by `uuid`, or by `object` containing the `uuid`, and any other applicable attributes.
574
+ * When identified by `uuid` and:
575
+ * exists -> is linked to the `Group`
576
+ * does _not_ exist -> is created. Other attributes are set to default values
577
+ * When identified by `object` and:
578
+ * exists -> is linked to the `Group`, relevant attributes are updated to the provided values
579
+ * does _not_ exist -> is created, relevant attributes are set to the provided values
580
+ - Items
581
+ - (Group POST data) - foobar
582
+ - `27f94e36-62ec-11e6-83fd-525400d7d6a4` (string) - `Group.uuid` - Unique identifier
583
+ - note: `0 errors, 30 warnings` (string, optional, nullable) - Should be used as a _short_ summary of important information about the result. Detailed hints/logs should be accessible at the `ref_url` URL.
584
+ - `ref_url`: `https://taskotron-dev.fedoraproject.org/artifacts/all/27f94e36-62ec-11e6-83fd-525400d7d6a4/task_output/koschei-1.7.2-1.fc24.log` (string, optional, nullable) - Use as a pointer to logs/artifacts/detailed information about the result.
585
+ - data (object, optional, nullable) - Any number of key-value pairs used to store any additional information. Note that colon `:` can not be a part of key's name, since it is used in the API as a filter separator (i.e. `:like`), and the key would be inaccessible.
586
+ - *key* (enum)
587
+ - `foo` (string) - Single value
588
+ - `foo`, `bar` (array[string]) - List of values
589
+ - `submit_time` (string/number, optional, nullable): UTC time of the result creation in ISO8601 format (YYYY-MM-DDTHH:MM:SS.ffffff) or a number of milliseconds since the Epoch. Defaults to the time of the `Result` submission.
590
+ - _auth (object, nullable, optional) - Placeholder for the future implemantation of Authentication/Authorization
591
+
592
+
593
+ ## Group POST data (object)
594
+ `Group` object containing the relevant attributes
595
+ ### Properties
596
+ - uuid: `27f94e36-62ec-11e6-83fd-525400d7d6a4` (string, optional) - UUID in one of the RFC4122 formats
597
+ - description: `Taskotron job on koji_build koschei-1.7.2-1.fc24` (string, optional, nullable) - Short description of the group - intended to be consumed by humans.
598
+ - `ref_url`: `https://taskotron-dev.fedoraproject.org/execdb/jobs/27f94e36-62ec-11e6-83fd-525400d7d6a4` (string, optional, nullable) - URL pointing to logs/execution status/...
599
+
600
+ ## Group POST (Group POST data)
601
+ - uuid: `27f94e36-62ec-11e6-83fd-525400d7d6a4` (string, optional) - If not provided, ResultsDB will assign an UUID1 value to it
602
+ - _auth (nullable, optional) - Placeholder for the future implemantation of Authentication/Authorization
603
+
604
+ ## Group GET (Group POST data)
605
+ - results: `https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/results?group=27f94e36-62ec-11e6-83fd-525400d7d6a4` (string) - URL to retrieve results in the `Group` via the `Result` resource
606
+ - `results_count`: 1 (number) - Amount of `Results` in the `Group`
607
+ - `href`: `https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/groups/27f94e36-62ec-11e6-83fd-525400d7d6a4` (string) - Reference to self.
608
+
609
+
610
+ ## Testcase POST data (object)
611
+ `Testcase` object containing the relevant attributes
612
+ ### Properties
613
+ - name: `dist.rpmlint` (string, required) - Unique identifier. Represents namespacing, using dot `.` as a namespace separator.
614
+ - `ref_url`: `https://fedoraproject.org/wiki/Common_Rpmlint_issues` (string, optional, nullable) - URL pointing to the documentation or other reference material.
615
+
616
+ ## Testcase POST (Testcase POST data)
617
+ - _auth (nullable, optional) - Placeholder for the future implemantation of Authentication/Authorization.
618
+
619
+ ## Testcase GET (Testcase POST data)
620
+ - `href`: `https://taskotron-dev.fedoraproject.org/resultsdb_api/api/v2.0/testcases/dist.rpmlint` (string) - Reference to self.
data/Dockerfile ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM quay.io/fedora/python-313:20250716@sha256:3d229a9b58a9f5245d5696d7f817309ecfb2da2d92cc11cdad5a25308164421b AS builder
2
+
3
+ # builder should use root to install/create all files
4
+ USER root
5
+
6
+ # hadolint ignore=DL3033,DL3041,DL4006,SC2039,SC3040
7
+ RUN set -exo pipefail \
8
+ && mkdir -p /mnt/rootfs \
9
+ # install runtime dependencies
10
+ && dnf install -y \
11
+ --installroot=/mnt/rootfs \
12
+ --use-host-config \
13
+ --setopt install_weak_deps=false \
14
+ --nodocs \
15
+ --disablerepo=* \
16
+ --enablerepo=fedora,updates \
17
+ krb5-libs \
18
+ mod_ssl \
19
+ openldap \
20
+ python3 \
21
+ httpd-core \
22
+ python3-mod_wsgi \
23
+ && dnf --installroot=/mnt/rootfs clean all \
24
+ && ln -s mod_wsgi-express-3 /mnt/rootfs/usr/bin/mod_wsgi-express \
25
+ # https://python-poetry.org/docs/master/#installing-with-the-official-installer
26
+ && curl -sSL --proto "=https" https://install.python-poetry.org | python3 - \
27
+ && python3 -m venv /venv
28
+
29
+ ENV \
30
+ PIP_DEFAULT_TIMEOUT=100 \
31
+ PIP_DISABLE_PIP_VERSION_CHECK=1 \
32
+ PIP_NO_CACHE_DIR=1 \
33
+ PYTHONFAULTHANDLER=1 \
34
+ PYTHONHASHSEED=random \
35
+ PYTHONUNBUFFERED=1
36
+
37
+ WORKDIR /build
38
+
39
+ # Copy only specific files to avoid accidentally including any generated files
40
+ # or secrets.
41
+ COPY resultsdb ./resultsdb
42
+ COPY conf ./conf
43
+ COPY \
44
+ pyproject.toml \
45
+ poetry.lock \
46
+ README.md \
47
+ alembic.ini \
48
+ entrypoint.sh \
49
+ ./
50
+
51
+ # hadolint ignore=SC1091
52
+ RUN set -ex \
53
+ && export PATH=/root/.local/bin:"$PATH" \
54
+ && . /venv/bin/activate \
55
+ && poetry build --format=wheel \
56
+ && version=$(poetry version --short) \
57
+ && pip install --no-cache-dir dist/resultsdb-"$version"-py3*.whl \
58
+ && deactivate \
59
+ && mv /venv /mnt/rootfs \
60
+ && mkdir -p /mnt/rootfs/app \
61
+ && cp -v entrypoint.sh /mnt/rootfs/app
62
+
63
+ # fix apache config for container use
64
+ RUN sed -i 's#^WSGISocketPrefix .*#WSGISocketPrefix /tmp/wsgi#' conf/resultsdb.conf \
65
+ # install configuration
66
+ && install -d /mnt/rootfs/usr/share/resultsdb/conf \
67
+ && install -p -m 0644 conf/resultsdb.conf /mnt/rootfs/usr/share/resultsdb/conf/ \
68
+ && install -p -m 0644 conf/resultsdb.wsgi /mnt/rootfs/usr/share/resultsdb/ \
69
+ && install -d /mnt/rootfs/etc/resultsdb \
70
+ && install -p -m 0644 conf/resultsdb.conf /mnt/rootfs/etc/httpd/conf.d/ \
71
+ # install alembic configuration and migrations
72
+ && install -p -m 0644 alembic.ini /mnt/rootfs/usr/share/resultsdb/alembic.ini \
73
+ && cp -a resultsdb/alembic /mnt/rootfs/usr/share/resultsdb/alembic \
74
+ && chmod -R 0755 /mnt/rootfs/usr/share/resultsdb/alembic
75
+
76
+ # This is just to satisfy linters
77
+ USER 1001
78
+
79
+ # --- Final image
80
+ FROM scratch
81
+ ARG GITHUB_SHA
82
+ ARG EXPIRES_AFTER
83
+ LABEL \
84
+ name="ResultsDB application" \
85
+ vendor="ResultsDB developers" \
86
+ license="GPLv2+" \
87
+ description="ResultsDB is a results store engine for, but not limited to, Fedora QA tools." \
88
+ usage="https://pagure.io/taskotron/resultsdb/blob/develop/f/openshift/README.md" \
89
+ url="https://github.com/release-engineering/resultsdb" \
90
+ vcs-type="git" \
91
+ vcs-ref=$GITHUB_SHA \
92
+ io.k8s.display-name="ResultsDB" \
93
+ quay.expires-after=$EXPIRES_AFTER
94
+
95
+ ENV \
96
+ PYTHONFAULTHANDLER=1 \
97
+ PYTHONHASHSEED=random \
98
+ PYTHONUNBUFFERED=1 \
99
+ WEB_CONCURRENCY=8
100
+
101
+ COPY --from=builder /mnt/rootfs/ /
102
+ COPY --from=builder \
103
+ /etc/yum.repos.d/fedora.repo \
104
+ /etc/yum.repos.d/fedora-updates.repo \
105
+ /etc/yum.repos.d/
106
+ WORKDIR /app
107
+
108
+ USER 1001
109
+ EXPOSE 5001
110
+
111
+ # Validate virtual environment
112
+ RUN /app/entrypoint.sh python -c 'import resultsdb' \
113
+ && mod_wsgi-express module-config \
114
+ && /app/entrypoint.sh resultsdb --help
115
+
116
+ ENTRYPOINT ["/app/entrypoint.sh"]
117
+ USER 1001:0
data/LICENSE ADDED
@@ -0,0 +1,339 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ GNU GENERAL PUBLIC LICENSE
2
+ Version 2, June 1991
3
+
4
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
5
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
6
+ Everyone is permitted to copy and distribute verbatim copies
7
+ of this license document, but changing it is not allowed.
8
+
9
+ Preamble
10
+
11
+ The licenses for most software are designed to take away your
12
+ freedom to share and change it. By contrast, the GNU General Public
13
+ License is intended to guarantee your freedom to share and change free
14
+ software--to make sure the software is free for all its users. This
15
+ General Public License applies to most of the Free Software
16
+ Foundation's software and to any other program whose authors commit to
17
+ using it. (Some other Free Software Foundation software is covered by
18
+ the GNU Lesser General Public License instead.) You can apply it to
19
+ your programs, too.
20
+
21
+ When we speak of free software, we are referring to freedom, not
22
+ price. Our General Public Licenses are designed to make sure that you
23
+ have the freedom to distribute copies of free software (and charge for
24
+ this service if you wish), that you receive source code or can get it
25
+ if you want it, that you can change the software or use pieces of it
26
+ in new free programs; and that you know you can do these things.
27
+
28
+ To protect your rights, we need to make restrictions that forbid
29
+ anyone to deny you these rights or to ask you to surrender the rights.
30
+ These restrictions translate to certain responsibilities for you if you
31
+ distribute copies of the software, or if you modify it.
32
+
33
+ For example, if you distribute copies of such a program, whether
34
+ gratis or for a fee, you must give the recipients all the rights that
35
+ you have. You must make sure that they, too, receive or can get the
36
+ source code. And you must show them these terms so they know their
37
+ rights.
38
+
39
+ We protect your rights with two steps: (1) copyright the software, and
40
+ (2) offer you this license which gives you legal permission to copy,
41
+ distribute and/or modify the software.
42
+
43
+ Also, for each author's protection and ours, we want to make certain
44
+ that everyone understands that there is no warranty for this free
45
+ software. If the software is modified by someone else and passed on, we
46
+ want its recipients to know that what they have is not the original, so
47
+ that any problems introduced by others will not reflect on the original
48
+ authors' reputations.
49
+
50
+ Finally, any free program is threatened constantly by software
51
+ patents. We wish to avoid the danger that redistributors of a free
52
+ program will individually obtain patent licenses, in effect making the
53
+ program proprietary. To prevent this, we have made it clear that any
54
+ patent must be licensed for everyone's free use or not licensed at all.
55
+
56
+ The precise terms and conditions for copying, distribution and
57
+ modification follow.
58
+
59
+ GNU GENERAL PUBLIC LICENSE
60
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
61
+
62
+ 0. This License applies to any program or other work which contains
63
+ a notice placed by the copyright holder saying it may be distributed
64
+ under the terms of this General Public License. The "Program", below,
65
+ refers to any such program or work, and a "work based on the Program"
66
+ means either the Program or any derivative work under copyright law:
67
+ that is to say, a work containing the Program or a portion of it,
68
+ either verbatim or with modifications and/or translated into another
69
+ language. (Hereinafter, translation is included without limitation in
70
+ the term "modification".) Each licensee is addressed as "you".
71
+
72
+ Activities other than copying, distribution and modification are not
73
+ covered by this License; they are outside its scope. The act of
74
+ running the Program is not restricted, and the output from the Program
75
+ is covered only if its contents constitute a work based on the
76
+ Program (independent of having been made by running the Program).
77
+ Whether that is true depends on what the Program does.
78
+
79
+ 1. You may copy and distribute verbatim copies of the Program's
80
+ source code as you receive it, in any medium, provided that you
81
+ conspicuously and appropriately publish on each copy an appropriate
82
+ copyright notice and disclaimer of warranty; keep intact all the
83
+ notices that refer to this License and to the absence of any warranty;
84
+ and give any other recipients of the Program a copy of this License
85
+ along with the Program.
86
+
87
+ You may charge a fee for the physical act of transferring a copy, and
88
+ you may at your option offer warranty protection in exchange for a fee.
89
+
90
+ 2. You may modify your copy or copies of the Program or any portion
91
+ of it, thus forming a work based on the Program, and copy and
92
+ distribute such modifications or work under the terms of Section 1
93
+ above, provided that you also meet all of these conditions:
94
+
95
+ a) You must cause the modified files to carry prominent notices
96
+ stating that you changed the files and the date of any change.
97
+
98
+ b) You must cause any work that you distribute or publish, that in
99
+ whole or in part contains or is derived from the Program or any
100
+ part thereof, to be licensed as a whole at no charge to all third
101
+ parties under the terms of this License.
102
+
103
+ c) If the modified program normally reads commands interactively
104
+ when run, you must cause it, when started running for such
105
+ interactive use in the most ordinary way, to print or display an
106
+ announcement including an appropriate copyright notice and a
107
+ notice that there is no warranty (or else, saying that you provide
108
+ a warranty) and that users may redistribute the program under
109
+ these conditions, and telling the user how to view a copy of this
110
+ License. (Exception: if the Program itself is interactive but
111
+ does not normally print such an announcement, your work based on
112
+ the Program is not required to print an announcement.)
113
+
114
+ These requirements apply to the modified work as a whole. If
115
+ identifiable sections of that work are not derived from the Program,
116
+ and can be reasonably considered independent and separate works in
117
+ themselves, then this License, and its terms, do not apply to those
118
+ sections when you distribute them as separate works. But when you
119
+ distribute the same sections as part of a whole which is a work based
120
+ on the Program, the distribution of the whole must be on the terms of
121
+ this License, whose permissions for other licensees extend to the
122
+ entire whole, and thus to each and every part regardless of who wrote it.
123
+
124
+ Thus, it is not the intent of this section to claim rights or contest
125
+ your rights to work written entirely by you; rather, the intent is to
126
+ exercise the right to control the distribution of derivative or
127
+ collective works based on the Program.
128
+
129
+ In addition, mere aggregation of another work not based on the Program
130
+ with the Program (or with a work based on the Program) on a volume of
131
+ a storage or distribution medium does not bring the other work under
132
+ the scope of this License.
133
+
134
+ 3. You may copy and distribute the Program (or a work based on it,
135
+ under Section 2) in object code or executable form under the terms of
136
+ Sections 1 and 2 above provided that you also do one of the following:
137
+
138
+ a) Accompany it with the complete corresponding machine-readable
139
+ source code, which must be distributed under the terms of Sections
140
+ 1 and 2 above on a medium customarily used for software interchange; or,
141
+
142
+ b) Accompany it with a written offer, valid for at least three
143
+ years, to give any third party, for a charge no more than your
144
+ cost of physically performing source distribution, a complete
145
+ machine-readable copy of the corresponding source code, to be
146
+ distributed under the terms of Sections 1 and 2 above on a medium
147
+ customarily used for software interchange; or,
148
+
149
+ c) Accompany it with the information you received as to the offer
150
+ to distribute corresponding source code. (This alternative is
151
+ allowed only for noncommercial distribution and only if you
152
+ received the program in object code or executable form with such
153
+ an offer, in accord with Subsection b above.)
154
+
155
+ The source code for a work means the preferred form of the work for
156
+ making modifications to it. For an executable work, complete source
157
+ code means all the source code for all modules it contains, plus any
158
+ associated interface definition files, plus the scripts used to
159
+ control compilation and installation of the executable. However, as a
160
+ special exception, the source code distributed need not include
161
+ anything that is normally distributed (in either source or binary
162
+ form) with the major components (compiler, kernel, and so on) of the
163
+ operating system on which the executable runs, unless that component
164
+ itself accompanies the executable.
165
+
166
+ If distribution of executable or object code is made by offering
167
+ access to copy from a designated place, then offering equivalent
168
+ access to copy the source code from the same place counts as
169
+ distribution of the source code, even though third parties are not
170
+ compelled to copy the source along with the object code.
171
+
172
+ 4. You may not copy, modify, sublicense, or distribute the Program
173
+ except as expressly provided under this License. Any attempt
174
+ otherwise to copy, modify, sublicense or distribute the Program is
175
+ void, and will automatically terminate your rights under this License.
176
+ However, parties who have received copies, or rights, from you under
177
+ this License will not have their licenses terminated so long as such
178
+ parties remain in full compliance.
179
+
180
+ 5. You are not required to accept this License, since you have not
181
+ signed it. However, nothing else grants you permission to modify or
182
+ distribute the Program or its derivative works. These actions are
183
+ prohibited by law if you do not accept this License. Therefore, by
184
+ modifying or distributing the Program (or any work based on the
185
+ Program), you indicate your acceptance of this License to do so, and
186
+ all its terms and conditions for copying, distributing or modifying
187
+ the Program or works based on it.
188
+
189
+ 6. Each time you redistribute the Program (or any work based on the
190
+ Program), the recipient automatically receives a license from the
191
+ original licensor to copy, distribute or modify the Program subject to
192
+ these terms and conditions. You may not impose any further
193
+ restrictions on the recipients' exercise of the rights granted herein.
194
+ You are not responsible for enforcing compliance by third parties to
195
+ this License.
196
+
197
+ 7. If, as a consequence of a court judgment or allegation of patent
198
+ infringement or for any other reason (not limited to patent issues),
199
+ conditions are imposed on you (whether by court order, agreement or
200
+ otherwise) that contradict the conditions of this License, they do not
201
+ excuse you from the conditions of this License. If you cannot
202
+ distribute so as to satisfy simultaneously your obligations under this
203
+ License and any other pertinent obligations, then as a consequence you
204
+ may not distribute the Program at all. For example, if a patent
205
+ license would not permit royalty-free redistribution of the Program by
206
+ all those who receive copies directly or indirectly through you, then
207
+ the only way you could satisfy both it and this License would be to
208
+ refrain entirely from distribution of the Program.
209
+
210
+ If any portion of this section is held invalid or unenforceable under
211
+ any particular circumstance, the balance of the section is intended to
212
+ apply and the section as a whole is intended to apply in other
213
+ circumstances.
214
+
215
+ It is not the purpose of this section to induce you to infringe any
216
+ patents or other property right claims or to contest validity of any
217
+ such claims; this section has the sole purpose of protecting the
218
+ integrity of the free software distribution system, which is
219
+ implemented by public license practices. Many people have made
220
+ generous contributions to the wide range of software distributed
221
+ through that system in reliance on consistent application of that
222
+ system; it is up to the author/donor to decide if he or she is willing
223
+ to distribute software through any other system and a licensee cannot
224
+ impose that choice.
225
+
226
+ This section is intended to make thoroughly clear what is believed to
227
+ be a consequence of the rest of this License.
228
+
229
+ 8. If the distribution and/or use of the Program is restricted in
230
+ certain countries either by patents or by copyrighted interfaces, the
231
+ original copyright holder who places the Program under this License
232
+ may add an explicit geographical distribution limitation excluding
233
+ those countries, so that distribution is permitted only in or among
234
+ countries not thus excluded. In such case, this License incorporates
235
+ the limitation as if written in the body of this License.
236
+
237
+ 9. The Free Software Foundation may publish revised and/or new versions
238
+ of the General Public License from time to time. Such new versions will
239
+ be similar in spirit to the present version, but may differ in detail to
240
+ address new problems or concerns.
241
+
242
+ Each version is given a distinguishing version number. If the Program
243
+ specifies a version number of this License which applies to it and "any
244
+ later version", you have the option of following the terms and conditions
245
+ either of that version or of any later version published by the Free
246
+ Software Foundation. If the Program does not specify a version number of
247
+ this License, you may choose any version ever published by the Free Software
248
+ Foundation.
249
+
250
+ 10. If you wish to incorporate parts of the Program into other free
251
+ programs whose distribution conditions are different, write to the author
252
+ to ask for permission. For software which is copyrighted by the Free
253
+ Software Foundation, write to the Free Software Foundation; we sometimes
254
+ make exceptions for this. Our decision will be guided by the two goals
255
+ of preserving the free status of all derivatives of our free software and
256
+ of promoting the sharing and reuse of software generally.
257
+
258
+ NO WARRANTY
259
+
260
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
261
+ FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
262
+ OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
263
+ PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
264
+ OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
265
+ MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
266
+ TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
267
+ PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
268
+ REPAIR OR CORRECTION.
269
+
270
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
271
+ WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
272
+ REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
273
+ INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
274
+ OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
275
+ TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
276
+ YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
277
+ PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
278
+ POSSIBILITY OF SUCH DAMAGES.
279
+
280
+ END OF TERMS AND CONDITIONS
281
+
282
+ How to Apply These Terms to Your New Programs
283
+
284
+ If you develop a new program, and you want it to be of the greatest
285
+ possible use to the public, the best way to achieve this is to make it
286
+ free software which everyone can redistribute and change under these terms.
287
+
288
+ To do so, attach the following notices to the program. It is safest
289
+ to attach them to the start of each source file to most effectively
290
+ convey the exclusion of warranty; and each file should have at least
291
+ the "copyright" line and a pointer to where the full notice is found.
292
+
293
+ <one line to give the program's name and a brief idea of what it does.>
294
+ Copyright (C) <year> <name of author>
295
+
296
+ This program is free software; you can redistribute it and/or modify
297
+ it under the terms of the GNU General Public License as published by
298
+ the Free Software Foundation; either version 2 of the License, or
299
+ (at your option) any later version.
300
+
301
+ This program is distributed in the hope that it will be useful,
302
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
303
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
304
+ GNU General Public License for more details.
305
+
306
+ You should have received a copy of the GNU General Public License along
307
+ with this program; if not, write to the Free Software Foundation, Inc.,
308
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
309
+
310
+ Also add information on how to contact you by electronic and paper mail.
311
+
312
+ If the program is interactive, make it output a short notice like this
313
+ when it starts in an interactive mode:
314
+
315
+ Gnomovision version 69, Copyright (C) year name of author
316
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
317
+ This is free software, and you are welcome to redistribute it
318
+ under certain conditions; type `show c' for details.
319
+
320
+ The hypothetical commands `show w' and `show c' should show the appropriate
321
+ parts of the General Public License. Of course, the commands you use may
322
+ be called something other than `show w' and `show c'; they could even be
323
+ mouse-clicks or menu items--whatever suits your program.
324
+
325
+ You should also get your employer (if you work as a programmer) or your
326
+ school, if any, to sign a "copyright disclaimer" for the program, if
327
+ necessary. Here is a sample; alter the names:
328
+
329
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
330
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
331
+
332
+ <signature of Ty Coon>, 1 April 1989
333
+ Ty Coon, President of Vice
334
+
335
+ This General Public License does not permit incorporating your program into
336
+ proprietary programs. If your program is a subroutine library, you may
337
+ consider it more useful to permit linking proprietary applications with the
338
+ library. If this is what you want to do, use the GNU Lesser General
339
+ Public License instead of this License.
data/alembic.ini ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts
5
+ script_location = resultsdb/alembic
6
+
7
+ # template used to generate migration files
8
+ # file_template = %%(rev)s_%%(slug)s
9
+
10
+ # max length of characters to apply to the
11
+ # "slug" field
12
+ #truncate_slug_length = 40
13
+
14
+ # set to 'true' to run the environment during
15
+ # the 'revision' command, regardless of autogenerate
16
+ # revision_environment = false
17
+
18
+ # set to 'true' to allow .pyc and .pyo files without
19
+ # a source .py file to be detected as revisions in the
20
+ # versions/ directory
21
+ # sourceless = false
22
+
23
+ # version location specification; this defaults
24
+ # to alembic/versions. When using multiple version
25
+ # directories, initial revisions must be specified with --version-path
26
+ # version_locations = %(here)s/bar %(here)s/bat alembic/versions
27
+
28
+ # the output encoding used when revision files
29
+ # are written from script.py.mako
30
+ # output_encoding = utf-8
31
+
32
+ sqlalchemy.url = driver://user:pass@localhost/dbname
33
+
34
+
35
+ # Logging configuration
36
+ [loggers]
37
+ keys = root,sqlalchemy,alembic
38
+
39
+ [handlers]
40
+ keys = console
41
+
42
+ [formatters]
43
+ keys = generic
44
+
45
+ [logger_root]
46
+ level = WARN
47
+ handlers = console
48
+ qualname =
49
+
50
+ [logger_sqlalchemy]
51
+ level = WARN
52
+ handlers =
53
+ qualname = sqlalchemy.engine
54
+
55
+ [logger_alembic]
56
+ level = INFO
57
+ handlers =
58
+ qualname = alembic
59
+
60
+ [handler_console]
61
+ class = StreamHandler
62
+ args = (sys.stderr,)
63
+ level = NOTSET
64
+ formatter = generic
65
+
66
+ [formatter_generic]
67
+ format = %(levelname)-5.5s [%(name)s] %(message)s
68
+ datefmt = %H:%M:%S
data/conf/fedora-messaging-example.toml ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A sample configuration for fedora-messaging. This file is in the TOML format.
2
+ # For complete details on all configuration options, see the documentation.
3
+
4
+ amqp_url = "amqp://"
5
+
6
+ publish_exchange = "amq.topic"
7
+
8
+ # The topic_prefix configuration value will add a prefix to the topics of every sent message.
9
+ # This is used for migrating from fedmsg, and should not be used afterwards.
10
+ topic_prefix = ""
11
+
12
+ [tls]
13
+ ca_cert = "/etc/pki/tls/certs/ca-bundle.crt"
14
+ keyfile = "/my/client/key.pem"
15
+ certfile = "/my/client/cert.pem"
16
+
17
+ [client_properties]
18
+ app = "ResultsDB"
data/conf/oauth2_client_secrets.json.example ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"web": {
2
+ "client_id": "resultsdbdev",
3
+ "client_secret": "uvwFVN7SNgmNCQVJTR9QdrkMginl0RM4",
4
+ "auth_uri": "https://iddev.fedorainfracloud.org/openidc/Authorization",
5
+ "token_uri": "https://iddev.fedorainfracloud.org/openidc/Token",
6
+ "userinfo_uri": "https://iddev.fedorainfracloud.org/openidc/UserInfo",
7
+ "redirect_uris": ["https://resultsdb.example.com/oidc_callback"],
8
+ "issuer": "https://iddev.fedorainfracloud.org/openidc/",
9
+ "token_introspection_uri": "https://iddev.fedorainfracloud.org/openidc/TokenInfo"
10
+ }
11
+ }
data/conf/resultsdb.conf ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ WSGIDaemonProcess resultsdb user=apache group=apache threads=5
2
+ WSGIScriptAlias /resultsdb /usr/share/resultsdb/resultsdb.wsgi
3
+ WSGISocketPrefix run/wsgi
4
+
5
+ <Directory /usr/share/resultsdb>
6
+ WSGIProcessGroup resultsdb
7
+ WSGIApplicationGroup %{GLOBAL}
8
+ WSGIScriptReloading On
9
+ Order deny,allow
10
+ Allow from all
11
+ Require all granted
12
+ </Directory>
13
+
14
+ #Alias /resultsdb/static /var/www/resultsdb/resultsdb/static
15
+
16
+ #<Directory /var/www/resultsdb/resultsdb/static>
17
+ #Order allow,deny
18
+ #Allow from all
19
+ #Require all granted
20
+ #</Directory>
data/conf/resultsdb.wsgi ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # SPDX-License-Identifier: GPL-2.0+
2
+ from resultsdb import create_app
3
+
4
+ application = create_app()
data/conf/settings.py.example ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copy this file to `conf/settings.py` to put it into effect. It overrides the values defined
2
+ # in `resultsdb/config.py`.
3
+
4
+ # ================== General ===================
5
+
6
+ DEBUG=False
7
+ PRODUCTION=True
8
+ SECRET_KEY = 'replace-me-with-something-random'
9
+
10
+ HOST = '0.0.0.0'
11
+ PORT = 5001
12
+
13
+
14
+ SQLALCHEMY_DATABASE_URI = 'sqlite:////var/tmp/resultsdb_db.sqlite'
15
+ #SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://dbuser:dbpassword@dbhost:dbport/dbname'
16
+ SHOW_DB_URI = False
17
+
18
+ LOGGING = {
19
+ 'version': 1,
20
+ 'disable_existing_loggers': False,
21
+ 'loggers': {
22
+ 'resultsdb': {
23
+ 'level': 'INFO',
24
+ },
25
+ 'dogpile': {
26
+ 'level': 'WARNING',
27
+ },
28
+ },
29
+ 'handlers': {
30
+ 'console': {
31
+ 'formatter': 'bare',
32
+ 'class': 'logging.StreamHandler',
33
+ 'stream': 'ext://sys.stdout',
34
+ 'level': 'INFO',
35
+ },
36
+ },
37
+ 'formatters': {
38
+ 'bare': {
39
+ 'format': '[%(asctime)s] [%(process)d] [%(levelname)s] %(name)s: %(message)s',
40
+ 'datefmt': '%Y-%m-%d %H:%M:%S',
41
+ }
42
+ },
43
+ 'root': {
44
+ 'level': 'WARNING',
45
+ 'handlers': ['console'],
46
+ },
47
+ }
48
+
49
+ # Extend the list of allowed outcomes.
50
+ ADDITIONAL_RESULT_OUTCOMES = []
51
+
52
+ # Fedmenu configuration
53
+ FEDMENU_URL = 'https://apps.fedoraproject.org/fedmenu'
54
+ FEDMENU_DATA_URL = 'https://apps.fedoraproject.org/js/data.js'
55
+
56
+ # ================== Authentication ===================
57
+
58
+ # Supported values: "oidc"
59
+ AUTH_MODULE = None
60
+
61
+ # OIDC Configuration
62
+ import os
63
+ OIDC_CLIENT_SECRETS = os.getcwd() + '/conf/oauth2_client_secrets.json'
64
+ OIDC_RESOURCE_SERVER_ONLY = True
65
+ OIDC_USERNAME_FIELD = 'uid'
66
+
67
+
68
+ # ================== Messaging ===================
69
+
70
+ # Set this to True or False to enable publishing to a message bus
71
+ MESSAGE_BUS_PUBLISH = False
72
+ # Name of the message bus plugin to use goes here. 'fedmsg' is installed by
73
+ # default, but you could create your own.
74
+ # Supported values: 'dummy', 'stomp', 'fedmsg'
75
+ MESSAGE_BUS_PLUGIN = 'fedmsg'
76
+ MESSAGE_BUS_KWARGS = {'modname': 'resultsdb'}
77
+
78
+ ## Alternatively, you could use the 'stomp' messaging plugin.
79
+ #MESSAGE_BUS_PLUGIN = 'stomp'
80
+ #MESSAGE_BUS_KWARGS = {
81
+ # 'destination': '/topic/VirtualTopic.eng.resultsdb.result.new',
82
+ # 'connection': {
83
+ # 'host_and_ports': [
84
+ # ('broker01', 61612),
85
+ # ('broker02', 61612),
86
+ # ],
87
+ # 'use_ssl': True,
88
+ # 'ssl_key_file': '/path/to/key/file',
89
+ # 'ssl_cert_file': '/path/to/cert/file',
90
+ # 'ssl_ca_certs': '/path/to/ca/certs',
91
+ # },
92
+ #}
93
+
94
+ # Publish Taskotron-compatible fedmsgs on the 'taskotron' topic
95
+ MESSAGE_BUS_PUBLISH_TASKOTRON = False
data/entrypoint.sh ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ # ENTRYPOINT for the container.
3
+ # Activates virtualenv before running any commands.
4
+ set -e
5
+
6
+ # shellcheck disable=SC1091
7
+ . /venv/bin/activate
8
+
9
+ if [[ $# == 0 ]]; then
10
+ exec mod_wsgi-express \
11
+ start-server \
12
+ /usr/share/resultsdb/resultsdb.wsgi \
13
+ --user=apache \
14
+ --group=apache \
15
+ --processes="${MOD_WSGI_PROCESSES:-1}" \
16
+ --threads="${MOD_WSGI_THREADS:-5}" \
17
+ --port="${MOD_WSGI_PORT:-5001}" \
18
+ --include-file=/etc/httpd/conf.d/resultsdb.conf \
19
+ --log-level="${MOD_WSGI_LOG_LEVEL:-info}" \
20
+ --log-to-terminal \
21
+ --access-log \
22
+ --startup-log
23
+ else
24
+ exec "$@"
25
+ fi
data/get-version.sh ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # SPDX-License-Identifier: GPL-2.0+
4
+
5
+ # Prints the current version based on the current git revision.
6
+
7
+ set -e
8
+
9
+ if [[ -n "$GITHUB_SHA" ]]; then
10
+ if [[ $GITHUB_REF =~ ^ref/tags/ ]]; then
11
+ echo "${GITHUB_REF#refs/tags/}"
12
+ else
13
+ last_version=$(poetry version --short)
14
+ echo "$last_version+git.${GITHUB_SHA:0:7}"
15
+ fi
16
+ exit
17
+ fi
18
+
19
+ if [ "$(git tag | wc -l)" -eq 0 ] ; then
20
+ # never been tagged since the project is just starting out
21
+ lastversion="0.0"
22
+ revbase=""
23
+ else
24
+ lasttag="$(git describe --abbrev=0 HEAD)"
25
+ lastversion="$lasttag"
26
+ revbase="^$lasttag"
27
+ fi
28
+ if [ "$(git rev-list $revbase HEAD | wc -l)" -eq 0 ] ; then
29
+ # building a tag
30
+ version="$lastversion"
31
+ else
32
+ # git builds count as a pre-release of the next version
33
+ version="$lastversion"
34
+ version="${version%%[a-z]*}" # strip non-numeric suffixes like "rc1"
35
+ # increment the last portion of the version
36
+ version="${version%.*}.$((${version##*.} + 1))"
37
+ commitcount=$(git rev-list $revbase HEAD | wc -l)
38
+ commitsha=$(git rev-parse --short HEAD)
39
+ version="${version}.dev${commitcount}+git.${commitsha}"
40
+ fi
41
+
42
+ echo $version
data/gunicorn.cfg ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import logging
2
+ import sys
3
+
4
+ gunicorn_logger = logging.getLogger('gunicorn.error')
5
+ gunicorn_logger.addHandler(logging.StreamHandler(sys.stdout))
data/init_db.sh ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ set -e
3
+ poetry run resultsdb init_alembic
4
+ poetry run resultsdb init_db
5
+ poetry run resultsdb mock_data
data/logo.png ADDED

Git LFS Details

  • SHA256: 528e86f33b0ebaca52eecf4ce19ecccd52e0b82344994ddf6609260b18bf9de9
  • Pointer size: 129 Bytes
  • Size of remote file: 8.13 kB
data/migration.sh ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ git commit -am .
2
+ docker stop pg_resultsdb;docker rm pg_resultsdb;docker run --name pg_resultsdb -e POSTGRES_USER=resultsdb -e POSTGRES_PASSWORD=fedora -p 5432:5432 -d postgres
3
+ sleep 10
4
+ git co 5dc67d8382dd0c0aaf5b9d74df4dd2c00585af4e; DEV=true bash init_db.sh; git co feature/v20
5
+ DEV=true alembic upgrade 540dbe71fa91
6
+
7
+
8
+ #DEV=true alembic upgrade head
data/openshift/resultsdb-test-template.yaml ADDED
@@ -0,0 +1,289 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # Template to produce a new test environment in OpenShift. Uses OpenID Connect
3
+ # against iddev.fedorainfracloud.org for authentication, and ephemeral storage
4
+ # for Postgres data.
5
+ #
6
+ # To create an environment from the template, process and apply it:
7
+ # oc process -f openshift/resultsdb-test-template.yaml -p TEST_ID=123 | oc apply -f -
8
+ # To clean up the environment, use a selector on the environment label:
9
+ # oc delete dc,deploy,pod,configmap,secret,svc,route -l environment=test-123
10
+
11
+ ---
12
+ apiVersion: v1
13
+ kind: Template
14
+ metadata:
15
+ name: resultsdb-test-template
16
+ parameters:
17
+ - name: TEST_ID
18
+ displayName: Test id
19
+ description: Short unique identifier for this test run (e.g. Jenkins job number)
20
+ required: true
21
+ - name: RESULTSDB_IMAGE
22
+ displayName: ResultsDB container image
23
+ description: Image to be used for ResultsDB deployement
24
+ value: 172.30.1.1:5000/myproject/resultsdb:latest
25
+ required: true
26
+ - name: DATABASE_PASSWORD
27
+ displayName: Database password
28
+ generate: expression
29
+ from: "[\\w]{32}"
30
+ - name: RESULTSDB_SECRET_KEY
31
+ displayName: Secret Key for ResultsDB
32
+ generate: expression
33
+ from: "[\\w]{32}"
34
+ - name: RESULTSDB_ADDITIONAL_RESULT_OUTCOMES
35
+ displayName: Additional outcomes values.
36
+ value: "[]"
37
+ objects:
38
+ - apiVersion: v1
39
+ kind: Secret
40
+ metadata:
41
+ name: "resultsdb-test-${TEST_ID}-secret"
42
+ labels:
43
+ environment: "test-${TEST_ID}"
44
+ app: resultsdb
45
+ stringData:
46
+ database-password: "${DATABASE_PASSWORD}"
47
+ - apiVersion: v1
48
+ kind: Secret
49
+ metadata:
50
+ name: "resultsdb-test-${TEST_ID}-config"
51
+ labels:
52
+ environment: "test-${TEST_ID}"
53
+ app: resultsdb
54
+ stringData:
55
+ settings.py: |-
56
+ SECRET_KEY = '${RESULTSDB_SECRET_KEY}'
57
+ SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://resultsdb:${DATABASE_PASSWORD}@resultsdb-test-${TEST_ID}-database:5432/resultsdb'
58
+ FILE_LOGGING = False
59
+ LOGFILE = '/var/log/resultsdb/resultsdb.log'
60
+ SYSLOG_LOGGING = False
61
+ STREAM_LOGGING = True
62
+ RUN_HOST= '0.0.0.0'
63
+ RUN_PORT = 5001
64
+ MESSAGE_BUS_PUBLISH = False
65
+ MESSAGE_BUS_PLUGIN = 'fedmsg'
66
+ MESSAGE_BUS_KWARGS = {'modname': 'resultsdb'}
67
+ ADDITIONAL_RESULT_OUTCOMES = ${RESULTSDB_ADDITIONAL_RESULT_OUTCOMES}
68
+ # Credentials are resultsdb-updater:password
69
+ .htpasswd: |-
70
+ resultsdb-updater:$2y$05$yAlqAYWqfMoxAMrFgFMfpuQsDw.v5b4tuS8x43h6tcPP1gIXBRutq
71
+ - apiVersion: v1
72
+ kind: ConfigMap
73
+ metadata:
74
+ name: "resultsdb-test-${TEST_ID}-httpd-config"
75
+ labels:
76
+ environment: "test-${TEST_ID}"
77
+ app: resultsdb
78
+ data:
79
+ resultsdb.conf: |-
80
+ <IfModule !auth_basic_module>
81
+ LoadModule auth_basic_module '${MOD_WSGI_MODULES_DIRECTORY}/mod_auth_basic.so'
82
+ </IfModule>
83
+ <IfModule !authn_file_module>
84
+ LoadModule authn_file_module '${MOD_WSGI_MODULES_DIRECTORY}/mod_authn_file.so'
85
+ </IfModule>
86
+ <IfModule !authz_user_module>
87
+ LoadModule authz_user_module '${MOD_WSGI_MODULES_DIRECTORY}/mod_authz_user.so'
88
+ </IfModule>
89
+
90
+ <Location "/">
91
+ AuthType Basic
92
+ AuthName "Authentication Required"
93
+ AuthBasicProvider file
94
+ AuthUserFile "/etc/resultsdb/.htpasswd"
95
+ <LimitExcept GET>
96
+ Require valid-user
97
+ </LimitExcept>
98
+ </Location>
99
+ - apiVersion: v1
100
+ kind: Service
101
+ metadata:
102
+ name: "resultsdb-test-${TEST_ID}-database"
103
+ labels:
104
+ environment: "test-${TEST_ID}"
105
+ app: resultsdb
106
+ spec:
107
+ selector:
108
+ app: resultsdb
109
+ environment: "test-${TEST_ID}"
110
+ service: database
111
+ ports:
112
+ - name: postgresql
113
+ port: 5432
114
+ targetPort: 5432
115
+ - apiVersion: v1
116
+ kind: DeploymentConfig
117
+ metadata:
118
+ name: "resultsdb-test-${TEST_ID}-database"
119
+ labels:
120
+ environment: "test-${TEST_ID}"
121
+ service: database
122
+ app: resultsdb
123
+ spec:
124
+ replicas: 1
125
+ strategy:
126
+ type: Recreate
127
+ selector:
128
+ app: resultsdb
129
+ environment: "test-${TEST_ID}"
130
+ service: database
131
+ template:
132
+ metadata:
133
+ labels:
134
+ environment: "test-${TEST_ID}"
135
+ service: database
136
+ app: resultsdb
137
+ spec:
138
+ containers:
139
+ - name: postgresql
140
+ image: registry.access.redhat.com/rhscl/postgresql-95-rhel7:latest
141
+ imagePullPolicy: Always
142
+ ports:
143
+ - containerPort: 5432
144
+ protocol: TCP
145
+ resources:
146
+ limits:
147
+ memory: 512Mi
148
+ cpu: 0.4
149
+ readinessProbe:
150
+ timeoutSeconds: 1
151
+ initialDelaySeconds: 5
152
+ exec:
153
+ command: [ /bin/sh, -i, -c, "psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'" ]
154
+ livenessProbe:
155
+ timeoutSeconds: 1
156
+ initialDelaySeconds: 30
157
+ tcpSocket:
158
+ port: 5432
159
+ env:
160
+ - name: POSTGRESQL_USER
161
+ value: resultsdb
162
+ - name: POSTGRESQL_PASSWORD
163
+ valueFrom:
164
+ secretKeyRef:
165
+ name: "resultsdb-test-${TEST_ID}-secret"
166
+ key: database-password
167
+ - name: POSTGRESQL_DATABASE
168
+ value: resultsdb
169
+ triggers:
170
+ - type: ConfigChange
171
+ - apiVersion: v1
172
+ kind: Service
173
+ metadata:
174
+ name: "resultsdb-test-${TEST_ID}-api"
175
+ labels:
176
+ environment: "test-${TEST_ID}"
177
+ app: resultsdb
178
+ spec:
179
+ selector:
180
+ app: resultsdb
181
+ environment: "test-${TEST_ID}"
182
+ service: api
183
+ ports:
184
+ - name: api
185
+ port: 5001
186
+ targetPort: 5001
187
+ - apiVersion: v1
188
+ kind: Route
189
+ metadata:
190
+ name: "resultsdb-test-${TEST_ID}-api"
191
+ labels:
192
+ environment: "test-${TEST_ID}"
193
+ app: resultsdb
194
+ spec:
195
+ port:
196
+ targetPort: api
197
+ to:
198
+ kind: Service
199
+ name: "resultsdb-test-${TEST_ID}-api"
200
+ tls:
201
+ termination: edge
202
+ insecureEdgeTerminationPolicy: Redirect
203
+ - apiVersion: v1
204
+ kind: DeploymentConfig
205
+ metadata:
206
+ name: "resultsdb-test-${TEST_ID}-api"
207
+ labels:
208
+ environment: "test-${TEST_ID}"
209
+ service: api
210
+ app: resultsdb
211
+ spec:
212
+ replicas: 2
213
+ selector:
214
+ app: resultsdb
215
+ environment: "test-${TEST_ID}"
216
+ service: api
217
+ strategy:
218
+ type: Rolling
219
+ rollingParams:
220
+ pre:
221
+ failurePolicy: Abort
222
+ execNewPod:
223
+ containerName: api
224
+ command:
225
+ - /bin/sh
226
+ - -i
227
+ - -c
228
+ - |
229
+ # try for 10 minutes (600 seconds)
230
+ e=$(( $(date +%s) + 600 ))
231
+ i=0
232
+ while [ $(date +%s) -lt $e ]; do
233
+ echo 'TRY #'$((++i))
234
+ if resultsdb init_db ; then
235
+ exit 0
236
+ fi
237
+ done
238
+ exit 1
239
+ volumes:
240
+ - config-volume
241
+ - httpd-config-volume
242
+ template:
243
+ metadata:
244
+ labels:
245
+ environment: "test-${TEST_ID}"
246
+ service: api
247
+ app: resultsdb
248
+ spec:
249
+ containers:
250
+ - name: api
251
+ image: "${RESULTSDB_IMAGE}"
252
+ imagePullPolicy: Always
253
+ ports:
254
+ - containerPort: 5001
255
+ volumeMounts:
256
+ - name: config-volume
257
+ mountPath: /etc/resultsdb
258
+ readOnly: true
259
+ - name: httpd-config-volume
260
+ mountPath: /etc/httpd/conf.d
261
+ readOnly: true
262
+ readinessProbe:
263
+ timeoutSeconds: 1
264
+ initialDelaySeconds: 5
265
+ httpGet:
266
+ path: /api/v2.0/
267
+ port: 5001
268
+ livenessProbe:
269
+ timeoutSeconds: 1
270
+ initialDelaySeconds: 30
271
+ httpGet:
272
+ path: /api/v2.0/
273
+ port: 5001
274
+ # Limit to 384MB memory. This is probably *not* enough but it is
275
+ # necessary in the current environment to allow for 2 replicas and
276
+ # rolling updates, without hitting the (very aggressive) memory quota.
277
+ resources:
278
+ limits:
279
+ memory: 384Mi
280
+ cpu: 0.3
281
+ volumes:
282
+ - name: config-volume
283
+ secret:
284
+ secretName: "resultsdb-test-${TEST_ID}-config"
285
+ - name: httpd-config-volume
286
+ configMap:
287
+ name: "resultsdb-test-${TEST_ID}-httpd-config"
288
+ triggers:
289
+ - type: ConfigChange
data/poetry.lock ADDED
The diff for this file is too large to render. See raw diff
 
data/pyproject.toml ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [tool.poetry]
2
+ name = "resultsdb"
3
+ version = "2.2.0"
4
+ description = "The test results store engine for (not only) Fedora QA tools"
5
+ authors = [
6
+ "Red Hat, Inc. and others"
7
+ ]
8
+ license = "GPL-2.0-or-later"
9
+ readme = "README.md"
10
+ repository = "https://github.com/release-engineering/resultsdb"
11
+ homepage = "https://docs.resultsdb20.apiary.io/"
12
+
13
+ include = [
14
+ "LICENSE",
15
+ "README.md",
16
+ "alembic.ini",
17
+ "tox.ini",
18
+ "resultsdb/templates/*",
19
+ "resultsdb/static/*",
20
+ "resultsdb/alembic/*",
21
+ ]
22
+
23
+ [tool.poetry.dependencies]
24
+ python = ">=3.12,<3.14"
25
+ flask = "^3.0.1"
26
+ gunicorn = "^23.0.0"
27
+ fedora-messaging = "^3.4.1"
28
+ stomp_py = "^8.2.0"
29
+
30
+ pytest = {version = "^8.3.3", optional = true}
31
+ pytest-cov = {version = "^6.0.0", optional = true}
32
+
33
+ Flask-SQLAlchemy = "^3.1.1"
34
+ SQLAlchemy = {version = "^2.0.36"}
35
+ psycopg2-binary = {version = "^2.9.10"}
36
+ alembic = "^1.13.3"
37
+ iso8601 = "^2.1.0"
38
+ pydantic = "^2.10.2"
39
+ Flask-Pydantic = "^0.13.0"
40
+
41
+ email-validator = "^2.2.0"
42
+ python-ldap = "^3.4.3"
43
+ Flask-pyoidc = "^3.14.3"
44
+ Flask-Session = "^0.8.0"
45
+
46
+ # tracing support
47
+ opentelemetry-exporter-otlp = "^1.25.0"
48
+ opentelemetry-instrumentation = "^0.46b0"
49
+ opentelemetry-instrumentation-flask = "^0.46b0"
50
+ opentelemetry-instrumentation-sqlalchemy = "^0.46b0"
51
+ tenacity = "^9.0.0"
52
+
53
+ mod-wsgi = "^5.0.2"
54
+
55
+ [tool.poetry.extras]
56
+ test = [
57
+ "pytest",
58
+ "pytest-cov",
59
+ ]
60
+
61
+ [tool.poetry.scripts]
62
+ resultsdb = "resultsdb.__main__:cli"
63
+
64
+ [tool.poetry.plugins."resultsdb.messaging.plugins"]
65
+ dummy = "resultsdb.messaging:DummyPlugin"
66
+ fedmsg = "resultsdb.messaging:FedmsgPlugin"
67
+ stomp = "resultsdb.messaging:StompPlugin"
68
+
69
+ [build-system]
70
+ requires = ["poetry-core>=1.0.0"]
71
+ build-backend = "poetry.core.masonry.api"
data/renovate.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3
+ "extends": [
4
+ "config:best-practices",
5
+ ":enablePreCommit",
6
+ "group:all",
7
+ ":gitSignOff",
8
+ ":disableDependencyDashboard",
9
+ "schedule:weekly"
10
+ ],
11
+ "packageRules": [
12
+ {
13
+ "description": "Patch and digest updates",
14
+ "matchUpdateTypes": [
15
+ "major",
16
+ "minor",
17
+ "patch",
18
+ "digest"
19
+ ],
20
+ "groupName": "all dependencies",
21
+ "automerge": true,
22
+ "platformAutomerge": true
23
+ }
24
+ ],
25
+ "constraints": {
26
+ "python": ">=3.12,<3.14"
27
+ },
28
+ "configMigration": true
29
+ }
data/resultsdb/__init__.py ADDED
@@ -0,0 +1,317 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2013-2014, Red Hat, Inc
2
+ #
3
+ # This program is free software; you can redistribute it and/or modify
4
+ # it under the terms of the GNU General Public License as published by
5
+ # the Free Software Foundation; either version 2 of the License, or
6
+ # (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License along
14
+ # with this program; if not, write to the Free Software Foundation, Inc.,
15
+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
16
+ #
17
+ # Authors:
18
+ # Josef Skladanka <[email protected]>
19
+ # Ralph Bean <[email protected]>
20
+
21
+ import json
22
+ import logging
23
+ import logging.config as logging_config
24
+ import logging.handlers
25
+ import os
26
+
27
+ from flask import Flask, current_app, jsonify, send_from_directory, session
28
+ from flask_pydantic.exceptions import ValidationError
29
+ from flask_pyoidc import OIDCAuthentication
30
+ from flask_pyoidc.provider_configuration import (
31
+ ClientMetadata,
32
+ ProviderConfiguration,
33
+ ProviderMetadata,
34
+ )
35
+ from flask_pyoidc.user_session import UserSession
36
+ from flask_session import Session
37
+
38
+ from resultsdb.controllers.api_v2 import api as api_v2
39
+ from resultsdb.controllers.api_v3 import api as api_v3
40
+ from resultsdb.controllers.api_v3 import create_endpoints
41
+ from resultsdb.controllers.main import main
42
+ from resultsdb.messaging import load_messaging_plugin
43
+ from resultsdb.models import db
44
+ from resultsdb.proxy import ReverseProxied
45
+ from resultsdb.tracing import setup_tracing
46
+
47
+ from . import config
48
+
49
+ # the version as used in setup.py
50
+ __version__ = "2.2.0"
51
+
52
+ VALIDATION_KEYS = frozenset({"input", "loc", "msg", "type", "url"})
53
+
54
+
55
+ def create_app(config_obj=None):
56
+ app = Flask(__name__)
57
+ app.secret_key = "replace-me-with-something-random" # nosec # NOSONAR
58
+
59
+ # make sure app behaves when behind a proxy
60
+ app.wsgi_app = ReverseProxied(app.wsgi_app)
61
+
62
+ # Expose the __version__ variable in templates
63
+ app.jinja_env.globals["app_version"] = __version__
64
+
65
+ # Checks for env variable OPENSHIFT_PROD to trigger OpenShift codepath on init
66
+ # The main difference is that settings will be queried from env
67
+ # (check config.openshift_config())
68
+ # Possible values are:
69
+ # "1" - OpenShift production deployment
70
+ # "0" - OpenShift testing deployment
71
+ openshift = os.getenv("OPENSHIFT_PROD")
72
+
73
+ # Load default config, then override that with a config file
74
+ if not config_obj:
75
+ if os.getenv("DEV") == "true":
76
+ config_obj = "resultsdb.config.DevelopmentConfig"
77
+ elif os.getenv("TEST") == "true" or openshift == "0":
78
+ config_obj = "resultsdb.config.TestingConfig"
79
+ else:
80
+ config_obj = "resultsdb.config.ProductionConfig"
81
+
82
+ app.config.from_object(config_obj)
83
+
84
+ if openshift:
85
+ config.openshift_config(app.config, openshift)
86
+
87
+ default_config_file = app.config["DEFAULT_CONFIG_FILE"]
88
+ config_file = os.environ.get("RESULTSDB_CONFIG", default_config_file)
89
+ if config_file and os.path.exists(config_file):
90
+ app.config.from_pyfile(config_file)
91
+
92
+ if app.config["PRODUCTION"]:
93
+ if app.secret_key == "replace-me-with-something-random": # nosec # NOSONAR
94
+ raise Warning("You need to change the app.secret_key value for production")
95
+
96
+ setup_logging(app)
97
+
98
+ app.logger.info("Using configuration object: %s", config_obj)
99
+ if openshift:
100
+ app.logger.info("Using OpenShift configuration")
101
+ app.logger.info("Using configuration file: %s", config_file)
102
+
103
+ if app.config["SHOW_DB_URI"]:
104
+ app.logger.debug("Using DBURI: %s", app.config["SQLALCHEMY_DATABASE_URI"])
105
+
106
+ db.init_app(app)
107
+
108
+ with app.app_context():
109
+ setup_tracing(app, db.engine)
110
+
111
+ init_session(app)
112
+
113
+ register_handlers(app)
114
+
115
+ app.register_blueprint(main)
116
+ app.register_blueprint(api_v2, url_prefix="/api/v2.0")
117
+ app.add_url_rule("/favicon.png", view_func=favicon)
118
+
119
+ if app.config["AUTH_MODULE"] == "oidc":
120
+ app.logger.info("OpenIDConnect authentication is enabled")
121
+ enable_oidc(app)
122
+ app.register_blueprint(api_v3, url_prefix="/api/v3")
123
+ else:
124
+ app.logger.info("OpenIDConnect authentication is disabled")
125
+
126
+ setup_messaging(app)
127
+
128
+ app.logger.debug("Finished ResultsDB initialization")
129
+ return app
130
+
131
+
132
+ def setup_logging(app):
133
+ # Use LOGGING if defined instead of the old options
134
+ log_config = app.config.get("LOGGING")
135
+ if log_config:
136
+ logging_config.dictConfig(log_config)
137
+ return
138
+
139
+ fmt = "[%(filename)s:%(lineno)d] " if app.debug else "%(module)-12s "
140
+ fmt += "%(asctime)s %(levelname)-7s %(message)s"
141
+ datefmt = "%Y-%m-%d %H:%M:%S"
142
+ loglevel = logging.DEBUG if app.debug else logging.INFO
143
+ formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
144
+
145
+ root_logger = logging.getLogger("")
146
+ root_logger.setLevel(logging.DEBUG)
147
+
148
+ # Keep the old way to setup logging in settings.py or config.py, example:
149
+ # LOGFILE = '/var/log/resultsdb/resultsdb.log'
150
+ # FILE_LOGGING = False
151
+ # SYSLOG_LOGGING = False
152
+ # STREAM_LOGGING = True
153
+ if app.config["STREAM_LOGGING"]:
154
+ print("doing stream logging")
155
+ stream_handler = logging.StreamHandler()
156
+ stream_handler.setLevel(loglevel)
157
+ stream_handler.setFormatter(formatter)
158
+ root_logger.addHandler(stream_handler)
159
+ app.logger.addHandler(stream_handler)
160
+
161
+ if app.config["SYSLOG_LOGGING"]:
162
+ print("doing syslog logging")
163
+ syslog_handler = logging.handlers.SysLogHandler(
164
+ address="/dev/log", facility=logging.handlers.SysLogHandler.LOG_LOCAL4
165
+ )
166
+ syslog_handler.setLevel(loglevel)
167
+ syslog_handler.setFormatter(formatter)
168
+ root_logger.addHandler(syslog_handler)
169
+ app.logger.addHandler(syslog_handler)
170
+
171
+ if app.config["FILE_LOGGING"] and app.config["LOGFILE"]:
172
+ print(f"doing file logging to {app.config['LOGFILE']}")
173
+ file_handler = logging.handlers.RotatingFileHandler(
174
+ app.config["LOGFILE"], maxBytes=500000, backupCount=5
175
+ )
176
+ file_handler.setLevel(loglevel)
177
+ file_handler.setFormatter(formatter)
178
+ root_logger.addHandler(file_handler)
179
+ app.logger.addHandler(file_handler)
180
+
181
+
182
+ def setup_messaging(app):
183
+ app.messaging_plugin = None
184
+ if not app.config["MESSAGE_BUS_PUBLISH"]:
185
+ app.logger.info("No messaging plugin selected")
186
+ return
187
+
188
+ plugin_name = app.config["MESSAGE_BUS_PLUGIN"]
189
+ app.logger.info("Using messaging plugin %s", plugin_name)
190
+ plugin_args = app.config["MESSAGE_BUS_KWARGS"]
191
+ app.messaging_plugin = load_messaging_plugin(
192
+ name=plugin_name,
193
+ plugin_args=plugin_args,
194
+ )
195
+
196
+
197
+ def register_handlers(app):
198
+ # TODO: find out why error handler works for 404 but not for 400
199
+ @app.errorhandler(400)
200
+ def bad_request(error):
201
+ return jsonify({"message": "Bad request"}), 400
202
+
203
+ @app.errorhandler(401)
204
+ def unauthorized(error):
205
+ app.logger.warning("Unauthorized access: %s", error)
206
+ return jsonify({"message": str(error)}), 401
207
+
208
+ @app.errorhandler(403)
209
+ def forbidden(error):
210
+ app.logger.warning("Permission denied: %s", error)
211
+ return jsonify({"message": str(error)}), 403
212
+
213
+ @app.errorhandler(404)
214
+ def not_found(error):
215
+ return jsonify({"message": "Not found"}), 404
216
+
217
+ @app.errorhandler(500)
218
+ def internal_server_error(error):
219
+ app.logger.error("Internal error: %s", error)
220
+ return jsonify({"message": "Internal Server Error"}), 500
221
+
222
+ @app.errorhandler(502)
223
+ def bad_gateway(error):
224
+ app.logger.error("External error received: %s", error)
225
+ return jsonify({"message": "Bad Gateway"}), 502
226
+
227
+ app.register_error_handler(ValidationError, handle_validation_error)
228
+
229
+
230
+ def handle_validation_error(error: ValidationError):
231
+ errors = (
232
+ error.body_params
233
+ or error.form_params
234
+ or error.path_params
235
+ or error.query_params
236
+ )
237
+ # Keep only interesting stuff and remove objects potentially
238
+ # unserializable in JSON.
239
+ err = [{k: v for k, v in e.items() if k in VALIDATION_KEYS} for e in errors]
240
+ response = jsonify({"validation_error": err})
241
+ response.status_code = 400
242
+ return response
243
+
244
+
245
+ def init_session(app):
246
+ app.config["SESSION_SQLALCHEMY"] = db
247
+ app.server_session = Session(app)
248
+ if app.config["SESSION_TYPE"] == "sqlalchemy":
249
+ import sqlalchemy
250
+
251
+ with app.app_context():
252
+ inspect = sqlalchemy.inspect(db.engine)
253
+ table = app.config["SESSION_SQLALCHEMY_TABLE"]
254
+ if not inspect.has_table(table):
255
+ db.create_all()
256
+
257
+
258
+ def enable_oidc(app):
259
+ with open(app.config["OIDC_CLIENT_SECRETS"]) as client_secrets_file:
260
+ client_secrets = json.load(client_secrets_file)
261
+
262
+ provider = app.config.get("OIDC_PROVIDER", "web")
263
+ metadata = client_secrets[provider]
264
+ app.config.update(
265
+ {
266
+ "OIDC_PROVIDER": provider,
267
+ "OIDC_REDIRECT_URI": metadata["redirect_uris"][0],
268
+ }
269
+ )
270
+ client_metadata = ClientMetadata(metadata["client_id"], metadata["client_secret"])
271
+ provider_metadata = ProviderMetadata(
272
+ issuer=metadata["issuer"],
273
+ authorization_endpoint=metadata["auth_uri"],
274
+ token_endpoint=metadata["token_uri"],
275
+ userinfo_endpoint=metadata["userinfo_uri"],
276
+ introspection_endpoint=metadata["token_introspection_uri"],
277
+ jwks_uri=metadata.get(
278
+ "jwks_uri", metadata["token_uri"].replace("/token", "/certs")
279
+ ),
280
+ )
281
+ config = ProviderConfiguration(
282
+ issuer=metadata["issuer"],
283
+ client_metadata=client_metadata,
284
+ provider_metadata=provider_metadata,
285
+ session_refresh_interval_seconds=app.config[
286
+ "OIDC_SESSION_REFRESH_INTERVAL_SECONDS"
287
+ ],
288
+ )
289
+ oidc = OIDCAuthentication({provider: config}, app)
290
+
291
+ @app.route("/auth/oidclogin")
292
+ @oidc.oidc_auth(provider)
293
+ def login():
294
+ user_session = UserSession(session)
295
+ return jsonify(
296
+ {
297
+ "username": user_session.userinfo[app.config["OIDC_USERNAME_FIELD"]],
298
+ "token": user_session.access_token,
299
+ }
300
+ )
301
+
302
+ @app.route("/auth/logout")
303
+ @oidc.oidc_logout
304
+ def logout():
305
+ return jsonify({"message": "Logged out"})
306
+
307
+ app.oidc = oidc
308
+
309
+ create_endpoints(oidc, provider)
310
+
311
+
312
+ def favicon():
313
+ return send_from_directory(
314
+ os.path.join(current_app.root_path, "static"),
315
+ "favicon.png",
316
+ mimetype="image/png",
317
+ )
data/resultsdb/__main__.py ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2013, Red Hat, Inc
2
+ #
3
+ # This program is free software; you can redistribute it and/or modify
4
+ # it under the terms of the GNU General Public License as published by
5
+ # the Free Software Foundation; either version 2 of the License, or
6
+ # (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License along
14
+ # with this program; if not, write to the Free Software Foundation, Inc.,
15
+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
16
+ #
17
+ # Authors:
18
+ # Josef Skladanka <[email protected]>
19
+
20
+ import click
21
+ from alembic import command as al_command
22
+ from alembic.config import Config
23
+ from alembic.migration import MigrationContext
24
+ from flask.cli import FlaskGroup
25
+ from sqlalchemy.engine import reflection
26
+
27
+ from resultsdb import create_app
28
+ from resultsdb.models import db
29
+ from resultsdb.models.results import Group, Result, ResultData, Testcase
30
+
31
+
32
+ def get_alembic_config():
33
+ # the location of the alembic ini file and alembic scripts changes when
34
+ # installed via package
35
+ alembic_cfg = Config()
36
+ alembic_cfg.set_main_option("script_location", "resultsdb:alembic")
37
+ return alembic_cfg
38
+
39
+
40
+ @click.group(cls=FlaskGroup, create_app=create_app)
41
+ def cli():
42
+ """Management script for ResultsDB server."""
43
+
44
+
45
+ @cli.command(name="upgrade_db")
46
+ def upgrade_db():
47
+ print("Upgrading Database to Latest Revision")
48
+ alembic_cfg = get_alembic_config()
49
+ al_command.upgrade(alembic_cfg, "head")
50
+
51
+
52
+ @cli.command(name="init_alembic")
53
+ def init_alembic():
54
+ alembic_cfg = get_alembic_config()
55
+
56
+ # check to see if the db has already been initialized by checking for an
57
+ # alembic revision
58
+ context = MigrationContext.configure(db.engine.connect())
59
+ current_rev = context.get_current_revision()
60
+
61
+ if not current_rev:
62
+ print("Initializing alembic")
63
+ print(" - Setting the current version to the first revision")
64
+ al_command.stamp(alembic_cfg, "15f5eeb9f635")
65
+ else:
66
+ print("Alembic already initialized")
67
+
68
+
69
+ @cli.command(name="init_db")
70
+ @click.pass_context
71
+ def initialize_db(ctx):
72
+ alembic_cfg = get_alembic_config()
73
+
74
+ print("Initializing database")
75
+
76
+ # check whether the table 'group' exists
77
+ # if it does, we assume that the database is empty
78
+ insp = reflection.Inspector.from_engine(db.engine)
79
+ table_names = insp.get_table_names()
80
+ if "testcase" not in table_names and "Testcase" not in table_names:
81
+ print(" - Creating tables")
82
+ db.create_all()
83
+ print(" - Stamping alembic's current version to 'head'")
84
+ al_command.stamp(alembic_cfg, "head")
85
+
86
+ # check to see if the db has already been initialized by checking for an
87
+ # alembic revision
88
+ context = MigrationContext.configure(db.engine.connect())
89
+ current_rev = context.get_current_revision()
90
+ if current_rev:
91
+ print(f" - Database is currently at rev {current_rev}")
92
+ ctx.invoke(upgrade_db)
93
+ else:
94
+ print("WARN: You need to have your db stamped with an alembic revision")
95
+ print(" Run 'init_alembic' sub-command first.")
96
+
97
+
98
+ @cli.command(name="mock_data")
99
+ def mock_data():
100
+ print("Populating tables with mock-data")
101
+
102
+ if not db.session.query(Testcase).count():
103
+ print(" - Testcase, Job, Result, ResultData")
104
+ tc1 = Testcase(ref_url="https://example.com/depcheck", name="depcheck")
105
+ tc2 = Testcase(ref_url="https://example.com/rpmlint", name="rpmlint")
106
+
107
+ j1 = Group(
108
+ uuid="5b3f47b4-2ba2-11e5-a343-5254007dccf9",
109
+ ref_url="https://example.com/job1",
110
+ )
111
+
112
+ j2 = Group(
113
+ uuid="4e575b2c-2ba2-11e5-a343-5254007dccf9",
114
+ ref_url="https://example.com/job2",
115
+ )
116
+
117
+ r1 = Result(
118
+ groups=[j1],
119
+ testcase=tc1,
120
+ outcome="PASSED",
121
+ ref_url="https://example.com/r1",
122
+ )
123
+ r2 = Result(
124
+ groups=[j1, j2],
125
+ testcase=tc1,
126
+ outcome="FAILED",
127
+ ref_url="https://example.com/r2",
128
+ )
129
+ r3 = Result(
130
+ groups=[j2],
131
+ testcase=tc2,
132
+ outcome="FAILED",
133
+ ref_url="https://example.com/r2",
134
+ )
135
+
136
+ ResultData(r1, "item", "cabal-rpm-0.8.3-1.fc18")
137
+ ResultData(r1, "arch", "x86_64")
138
+ ResultData(r1, "type", "koji_build")
139
+
140
+ ResultData(r2, "item", "htop-1.0-1.fc22")
141
+ ResultData(r2, "arch", "i386")
142
+ ResultData(r2, "type", "bodhi_update")
143
+
144
+ ResultData(r3, "item", "cabal-rpm-0.8.3-1.fc18")
145
+ ResultData(r3, "arch", "i386")
146
+ ResultData(r3, "type", "bodhi_update")
147
+
148
+ db.session.add(tc1)
149
+ db.session.add(j1)
150
+ db.session.add(j2)
151
+
152
+ db.session.commit()
153
+ else:
154
+ print(" - skipped Testcase, Job, Result, ResultData")
155
+
156
+
157
+ if __name__ == "__main__":
158
+ cli()
data/resultsdb/alembic/env.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # add '.' to the pythonpath to support migration inside development env
2
+ import sys
3
+
4
+ from alembic import context
5
+ from flask import current_app as app
6
+ from sqlalchemy import engine_from_config, pool
7
+
8
+ from resultsdb.models import db
9
+
10
+ sys.path.append(".")
11
+
12
+ # this is the Alembic Config object, which provides
13
+ # access to the values within the .ini file in use.
14
+ config = context.config
15
+
16
+ # Interpret the config file for Python logging.
17
+ # This line sets up loggers basically.
18
+ # fileConfig(config.config_file_name)
19
+
20
+ # add your model's MetaData object here
21
+ # for 'autogenerate' support
22
+
23
+ target_metadata = db.metadata
24
+ # target_metadata = None
25
+
26
+ # other values from the config, defined by the needs of env.py,
27
+ # can be acquired:
28
+ # my_important_option = config.get_main_option("my_important_option")
29
+ # ... etc.
30
+
31
+
32
+ def run_migrations_offline():
33
+ """Run migrations in 'offline' mode.
34
+
35
+ This configures the context with just a URL
36
+ and not an Engine, though an Engine is acceptable
37
+ here as well. By skipping the Engine creation
38
+ we don't even need a DBAPI to be available.
39
+
40
+ Calls to context.execute() here emit the given string to the
41
+ script output.
42
+
43
+ """
44
+ url = config.get_main_option("sqlalchemy.url")
45
+ context.configure(url=url, compare_type=True)
46
+
47
+ with context.begin_transaction():
48
+ context.run_migrations()
49
+
50
+
51
+ def run_migrations_online():
52
+ """Run migrations in 'online' mode.
53
+
54
+ In this scenario we need to create an Engine
55
+ and associate a connection with the context.
56
+
57
+ """
58
+
59
+ alembic_config = config.get_section(config.config_ini_section)
60
+
61
+ alembic_config["sqlalchemy.url"] = app.config["SQLALCHEMY_DATABASE_URI"]
62
+
63
+ engine = engine_from_config(
64
+ alembic_config, prefix="sqlalchemy.", poolclass=pool.NullPool
65
+ )
66
+
67
+ connection = engine.connect()
68
+ context.configure(connection=connection, target_metadata=target_metadata)
69
+
70
+ try:
71
+ with context.begin_transaction():
72
+ context.run_migrations()
73
+ finally:
74
+ connection.close()
75
+
76
+
77
+ if context.is_offline_mode():
78
+ run_migrations_offline()
79
+ else:
80
+ run_migrations_online()
data/resultsdb/alembic/script.py.mako ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+
9
+ # revision identifiers, used by Alembic.
10
+ revision = ${repr(up_revision)}
11
+ down_revision = ${repr(down_revision)}
12
+ branch_labels = ${repr(branch_labels)}
13
+ depends_on = ${repr(depends_on)}
14
+
15
+ from alembic import op
16
+ import sqlalchemy as sa
17
+ ${imports if imports else ""}
18
+
19
+ def upgrade():
20
+ ${upgrades if upgrades else "pass"}
21
+
22
+
23
+ def downgrade():
24
+ ${downgrades if downgrades else "pass"}
data/resultsdb/alembic/versions/153c416322c2_create_indexes_on_foreign_keys.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Create indexes on Foreign Keys
2
+
3
+ Revision ID: 153c416322c2
4
+ Revises: 17ec41bd6e9a
5
+ Create Date: 2015-04-14 14:30:03.096651
6
+
7
+ """
8
+
9
+ from alembic import op
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = "153c416322c2"
13
+ down_revision = "17ec41bd6e9a"
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade():
19
+ ### commands auto generated by Alembic - please adjust! ###
20
+ op.create_index("result_fk_job_id", "result", ["job_id"], unique=False)
21
+ op.create_index("result_fk_testcase_id", "result", ["testcase_id"], unique=False)
22
+ op.create_index(
23
+ "result_data_fk_result_id", "result_data", ["result_id"], unique=False
24
+ )
25
+ ### end Alembic commands ###
26
+
27
+
28
+ def downgrade():
29
+ ### commands auto generated by Alembic - please adjust! ###
30
+ op.drop_index("result_data_fk_result_id", table_name="result_data")
31
+ op.drop_index("result_fk_testcase_id", table_name="result")
32
+ op.drop_index("result_fk_job_id", table_name="result")
33
+ ### end Alembic commands ###
data/resultsdb/alembic/versions/15f5eeb9f635_initial_revision.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Initial revision
2
+
3
+ Revision ID: 15f5eeb9f635
4
+ Revises:
5
+ Create Date: 2015-02-23 17:40:33.366352
6
+
7
+ """
8
+
9
+ # revision identifiers, used by Alembic.
10
+ revision = "15f5eeb9f635"
11
+ down_revision = None
12
+ branch_labels = None
13
+ depends_on = None
14
+
15
+
16
+ def upgrade():
17
+ ### commands auto generated by Alembic - please adjust! ###
18
+ pass
19
+ ### end Alembic commands ###
20
+
21
+
22
+ def downgrade():
23
+ ### commands auto generated by Alembic - please adjust! ###
24
+ pass
25
+ ### end Alembic commands ###
data/resultsdb/alembic/versions/17ec41bd6e9a_added_uuid_column_to_the_job_table.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Added UUID column to the Job table
2
+
3
+ Revision ID: 17ec41bd6e9a
4
+ Revises: 433d0b5b3b96
5
+ Create Date: 2015-02-23 17:45:57.085449
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "17ec41bd6e9a"
14
+ down_revision = "433d0b5b3b96"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade():
20
+ ### commands auto generated by Alembic - please adjust! ###
21
+ op.add_column("job", sa.Column("uuid", sa.String(length=36), nullable=True))
22
+ ### end Alembic commands ###
23
+
24
+
25
+ def downgrade():
26
+ ### commands auto generated by Alembic - please adjust! ###
27
+ op.drop_column("job", "uuid")
28
+ ### end Alembic commands ###
data/resultsdb/alembic/versions/34760e10040b_add_aborted_outcome.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Add ABORTED outcome
2
+
3
+ Revision ID: 34760e10040b
4
+ Revises: 4ace44a44bf
5
+ Create Date: 2015-04-21 14:01:41.374105
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+ from sqlalchemy import text
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "34760e10040b"
15
+ down_revision = "4ace44a44bf"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+ old_values = ("PASSED", "INFO", "FAILED", "ERROR", "WAIVED", "NEEDS_INSPECTION")
20
+ new_values = (
21
+ "PASSED",
22
+ "INFO",
23
+ "FAILED",
24
+ "ERROR",
25
+ "WAIVED",
26
+ "NEEDS_INSPECTION",
27
+ "ABORTED",
28
+ )
29
+
30
+ old_enum = sa.Enum(*old_values, name="resultoutcome")
31
+ tmp_enum = sa.Enum(*new_values, name="resultoutcome_tmp")
32
+ new_enum = sa.Enum(*new_values, name="resultoutcome")
33
+
34
+
35
+ def upgrade():
36
+ # this migration is postgresql specific and fails on sqlite
37
+ if op.get_bind().engine.url.drivername.startswith("postgresql"):
38
+ tmp_enum.create(op.get_bind(), checkfirst=False)
39
+ op.execute(
40
+ text(
41
+ "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp "
42
+ " USING outcome::text::resultoutcome_tmp"
43
+ )
44
+ )
45
+ old_enum.drop(op.get_bind(), checkfirst=False)
46
+ new_enum.create(op.get_bind(), checkfirst=False)
47
+ op.execute(
48
+ text(
49
+ "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome "
50
+ " USING outcome::text::resultoutcome"
51
+ )
52
+ )
53
+ tmp_enum.drop(op.get_bind(), checkfirst=False)
54
+
55
+
56
+ def downgrade():
57
+ # this migration is postgresql specific and fails on sqlite
58
+ if op.get_bind().engine.url.drivername.startswith("postgresql"):
59
+ op.execute(text("UPDATE result SET outcome='ERROR' WHERE outcome='ABORTED'"))
60
+
61
+ tmp_enum.create(op.get_bind(), checkfirst=False)
62
+ op.execute(
63
+ text(
64
+ "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp "
65
+ " USING outcome::text::resultoutcome_tmp"
66
+ )
67
+ )
68
+ new_enum.drop(op.get_bind(), checkfirst=False)
69
+ old_enum.create(op.get_bind(), checkfirst=False)
70
+ op.execute(
71
+ text(
72
+ "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome "
73
+ " USING outcome::text::resultoutcome"
74
+ )
75
+ )
76
+ tmp_enum.drop(op.get_bind(), checkfirst=False)
data/resultsdb/alembic/versions/433d0b5b3b96_added_index_on_the_keyval_store.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Added index on the Keyval store
2
+
3
+ Revision ID: 433d0b5b3b96
4
+ Revises: 15f5eeb9f635
5
+ Create Date: 2015-02-24 19:57:57.643189
6
+
7
+ """
8
+
9
+ from alembic import op
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = "433d0b5b3b96"
13
+ down_revision = "15f5eeb9f635"
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade():
19
+ ### commands auto generated by Alembic - please adjust! ###
20
+ op.create_index(
21
+ "rd_key_value_idx",
22
+ "result_data",
23
+ ["key", "value"],
24
+ unique=False,
25
+ mysql_length={"value": 50, "key": 20},
26
+ )
27
+ ### end Alembic commands ###
28
+
29
+
30
+ def downgrade():
31
+ ### commands auto generated by Alembic - please adjust! ###
32
+ op.drop_index("rd_key_value_idx", table_name="result_data")
33
+ ### end Alembic commands ###
data/resultsdb/alembic/versions/4ace44a44bf_change_index_on_result_data_so_like_can_.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Change index on result_data so LIKE can use it
2
+
3
+ Revision ID: 4ace44a44bf
4
+ Revises: 153c416322c2
5
+ Create Date: 2015-04-14 17:20:32.575195
6
+
7
+ """
8
+
9
+ from alembic import op
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = "4ace44a44bf"
13
+ down_revision = "153c416322c2"
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade():
19
+ ### commands auto generated by Alembic - please adjust! ###
20
+ op.create_index(
21
+ "result_data_idx_key_value",
22
+ "result_data",
23
+ ["key", "value"],
24
+ unique=False,
25
+ postgresql_ops={"value": "text_pattern_ops", "key": "text_pattern_ops"},
26
+ )
27
+ op.drop_index("rd_key_value_idx", table_name="result_data")
28
+ ### end Alembic commands ###
29
+
30
+
31
+ def downgrade():
32
+ ### commands auto generated by Alembic - please adjust! ###
33
+ op.create_index("rd_key_value_idx", "result_data", ["key", "value"], unique=False)
34
+ op.drop_index("result_data_idx_key_value", table_name="result_data")
35
+ ### end Alembic commands ###
data/resultsdb/alembic/versions/4bf1390f06d1_added_index_on_submit_time.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Added index on submit_time
2
+
3
+ Revision ID: 4bf1390f06d1
4
+ Revises: 34760e10040b
5
+ Create Date: 2015-11-03 13:06:35.167227
6
+
7
+ """
8
+
9
+ from alembic import op
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = "4bf1390f06d1"
13
+ down_revision = "34760e10040b"
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade():
19
+ ### commands auto generated by Alembic - please adjust! ###
20
+ op.create_index("result_submit_time", "result", ["submit_time"], unique=False)
21
+ ### end Alembic commands ###
22
+
23
+
24
+ def downgrade():
25
+ ### commands auto generated by Alembic - please adjust! ###
26
+ op.drop_index("result_submit_time", table_name="result")
27
+ ### end Alembic commands ###
data/resultsdb/alembic/versions/4dbe714897fe_remove_the_user_model.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Remove the user model
2
+
3
+ Revision ID: 4dbe714897fe
4
+ Revises: dbfab576c81
5
+ Create Date: 2016-10-17 15:52:14.061320
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "4dbe714897fe"
14
+ down_revision = "dbfab576c81"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade():
20
+ op.drop_table("user")
21
+
22
+
23
+ def downgrade():
24
+ op.create_table(
25
+ "user",
26
+ sa.Column("id", sa.INTEGER(), nullable=False),
27
+ sa.Column("username", sa.VARCHAR(length=80), nullable=True),
28
+ sa.Column("pw_hash", sa.VARCHAR(length=120), nullable=True),
29
+ sa.PrimaryKeyConstraint("id"),
30
+ sa.UniqueConstraint("username"),
31
+ )
data/resultsdb/alembic/versions/540dbe71fa91_change_schema_to_v2_0_step_1.py ADDED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Change schema to v2.0 - step 1 - prepare columns
2
+
3
+ Revision ID: 59bef5afc9aa
4
+ Revises: 978007ecd2b
5
+ Create Date: 2016-08-23 20:10:05.734728
6
+
7
+ """
8
+
9
+ import logging
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+ from sqlalchemy.ext.declarative import declarative_base
14
+ from sqlalchemy.orm import relationship, sessionmaker
15
+ from sqlalchemy.orm.decl_api import DeclarativeMeta
16
+ from sqlalchemy.sql import text
17
+
18
+ # revision identifiers, used by Alembic.
19
+ revision = "540dbe71fa91"
20
+ down_revision = "978007ecd2b"
21
+ branch_labels = None
22
+ depends_on = None
23
+
24
+ Session = sessionmaker()
25
+ Base: DeclarativeMeta = declarative_base()
26
+
27
+
28
+ class Job(Base):
29
+ __tablename__ = "job"
30
+
31
+ id = sa.Column(sa.Integer, primary_key=True)
32
+ uuid = sa.Column(sa.String(36), unique=True)
33
+ results = relationship("Result", backref="job")
34
+
35
+
36
+ class Result(Base):
37
+ __tablename__ = "result"
38
+
39
+ id = sa.Column(sa.Integer, primary_key=True)
40
+ job_id = sa.Column(sa.Integer, sa.ForeignKey("job.id"))
41
+
42
+
43
+ def upgrade():
44
+ # Merge duplicate Jobs
45
+ logger = logging.getLogger("alembic")
46
+ connection = op.get_bind()
47
+ session = Session(bind=connection)
48
+ merge_targets = {}
49
+ jobs_to_delete = []
50
+
51
+ job_count_query = connection.execute(
52
+ text(
53
+ "select count(*) from job where uuid in (select uuid from job group by uuid having count(uuid) > 1);"
54
+ )
55
+ )
56
+ job_count = -1
57
+ for row in job_count_query:
58
+ job_count = row[0]
59
+
60
+ logger.info("Jobs marked for inspection: %s", job_count)
61
+
62
+ job_query = (
63
+ session.query(Job)
64
+ .from_statement(
65
+ text(
66
+ "select id, uuid from job where uuid in (select uuid from job group by uuid having count(uuid) > 1) order by id;"
67
+ )
68
+ )
69
+ .yield_per(100)
70
+ )
71
+
72
+ j = r = 0
73
+ for job in job_query:
74
+ j += 1
75
+ primary = merge_targets.setdefault(job.uuid, job)
76
+ if primary.id != job.id:
77
+ for result in job.results:
78
+ r += 1
79
+ result.job_id = primary.id
80
+ session.add(result)
81
+ jobs_to_delete.append(job)
82
+ if not j % 1000:
83
+ logger.info("Jobs seen: %s out of %s", j, job_count)
84
+ logger.info("Results marked for move: %s", r)
85
+ session.commit()
86
+ session.commit()
87
+ logger.info("Removing duplicate jobs")
88
+ for job in jobs_to_delete:
89
+ session.delete(job)
90
+ session.commit()
91
+
92
+ logger.info("Changing table structure")
93
+
94
+ # JOB
95
+ op.rename_table("job", "group")
96
+ op.alter_column("group", "name", new_column_name="description")
97
+ op.drop_column("group", "status")
98
+ op.drop_column("group", "start_time")
99
+ op.drop_column("group", "end_time")
100
+ op.create_unique_constraint(None, "group", ["uuid"])
101
+ op.create_index(
102
+ "group_idx_uuid",
103
+ "group",
104
+ ["uuid"],
105
+ unique=False,
106
+ postgresql_ops={"uuid": "text_pattern_ops"},
107
+ )
108
+
109
+ # RESULT
110
+ op.add_column("result", sa.Column("testcase_name", sa.Text(), nullable=True))
111
+ op.alter_column("result", "summary", new_column_name="note")
112
+ op.alter_column("result", "log_url", new_column_name="ref_url")
113
+ op.create_index(
114
+ "result_fk_testcase_name",
115
+ "result",
116
+ ["testcase_name"],
117
+ unique=False,
118
+ postgresql_ops={"testcase_name": "text_pattern_ops"},
119
+ )
120
+ op.drop_index("result_fk_job_id", table_name="result")
121
+ op.drop_index("result_fk_testcase_id", table_name="result")
122
+ op.drop_constraint("result_testcase_id_fkey", "result", type_="foreignkey")
123
+ op.drop_constraint("result_job_id_fkey", "result", type_="foreignkey")
124
+ op.create_foreign_key(None, "result", "testcase", ["testcase_name"], ["name"])
125
+
126
+ # TESTCASE
127
+ op.alter_column("testcase", "url", new_column_name="ref_url")
128
+
129
+ # MANY TO MANY
130
+ op.create_table(
131
+ "groups_to_results",
132
+ sa.Column("id", sa.Integer(), nullable=False),
133
+ sa.Column("group_uuid", sa.String(36), nullable=True),
134
+ sa.Column("result_id", sa.Integer(), nullable=True),
135
+ sa.ForeignKeyConstraint(
136
+ ["group_uuid"],
137
+ ["group.uuid"],
138
+ ),
139
+ sa.ForeignKeyConstraint(
140
+ ["result_id"],
141
+ ["result.id"],
142
+ ),
143
+ sa.PrimaryKeyConstraint("id"),
144
+ )
145
+ op.create_index(
146
+ "gtr_fk_group_uuid",
147
+ "groups_to_results",
148
+ ["group_uuid"],
149
+ unique=False,
150
+ postgresql_ops={"uuid": "text_pattern_ops"},
151
+ )
152
+ op.create_index(
153
+ "gtr_fk_result_id", "groups_to_results", ["result_id"], unique=False
154
+ )
155
+
156
+
157
+ def downgrade():
158
+ # TESTCASE
159
+ op.alter_column("testcase", "ref_url", new_column_name="url")
160
+
161
+ # RESULT
162
+ op.alter_column("result", "note", new_column_name="summary")
163
+ op.alter_column("result", "ref_url", new_column_name="log_url")
164
+ op.drop_constraint("result_testcase_name_fkey", "result", type_="foreignkey")
165
+ op.create_index("result_fk_testcase_id", "result", ["testcase_id"], unique=False)
166
+ op.create_index("result_fk_job_id", "result", ["job_id"], unique=False)
167
+ op.drop_index("result_fk_testcase_name", table_name="result")
168
+ op.drop_column("result", "testcase_name")
169
+
170
+ # JOB
171
+ op.rename_table("group", "job")
172
+ op.alter_column("job", "description", new_column_name="name")
173
+ op.add_column("job", sa.Column("end_time", sa.DateTime(), nullable=True))
174
+ op.add_column("job", sa.Column("start_time", sa.DateTime(), nullable=True))
175
+ op.add_column("job", sa.Column("status", sa.VARCHAR(length=16), nullable=True))
176
+ op.drop_index("group_idx_uuid", table_name="job")
177
+
178
+ # MANY TO MANY
179
+ op.drop_index("gtr_fk_result_id", table_name="groups_to_results")
180
+ op.drop_index("gtr_fk_group_uuid", table_name="groups_to_results")
181
+ op.drop_table("groups_to_results")
182
+
183
+ # CONSTRAINTS
184
+ op.create_foreign_key(None, "result", "job", ["job_id"], ["id"])
185
+ op.create_foreign_key(None, "result", "testcase", ["testcase_id"], ["id"])
186
+ op.drop_constraint("group_uuid_key", "job")
data/resultsdb/alembic/versions/978007ecd2b_changed_testcase_name_to_text.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Changed testcase.name to text
2
+
3
+ Revision ID: 978007ecd2b
4
+ Revises: 4bf1390f06d1
5
+ Create Date: 2016-02-18 21:41:04.273020
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "978007ecd2b"
14
+ down_revision = "4bf1390f06d1"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade():
20
+ ### commands auto generated by Alembic - please adjust! ###
21
+ op.alter_column("testcase", "name", type_=sa.Text)
22
+ op.create_index(
23
+ "testcase_idx_name",
24
+ "testcase",
25
+ ["name"],
26
+ unique=False,
27
+ postgresql_ops={"name": "text_pattern_ops"},
28
+ )
29
+ ### end Alembic commands ###
30
+
31
+
32
+ def downgrade():
33
+ ### commands auto generated by Alembic - please adjust! ###
34
+ op.alter_column("testcase", "name", type_=sa.String(255))
35
+ op.drop_index("testcase_idx_name", table_name="testcase")
36
+ ### end Alembic commands ###
data/resultsdb/alembic/versions/cd581d0e83df_change_outcome_from_enum_to_string.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Change outcome from enum to string.
2
+
3
+ Revision ID: cd581d0e83df
4
+ Revises: 4dbe714897fe
5
+ Create Date: 2018-03-28 20:47:27.338605
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+ from sqlalchemy import text
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "cd581d0e83df"
15
+ down_revision = "4dbe714897fe"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade():
21
+ op.alter_column("result", "outcome", type_=sa.String(32))
22
+ op.create_index(
23
+ "result_idx_outcome",
24
+ "result",
25
+ ["outcome"],
26
+ unique=False,
27
+ postgresql_ops={"outcome": "text_pattern_ops"},
28
+ )
29
+
30
+
31
+ def downgrade():
32
+ op.execute(
33
+ text(
34
+ "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome USING outcome::resultoutcome;"
35
+ )
36
+ )
37
+ op.drop_index("result_idx_outcome", table_name="result")
data/resultsdb/alembic/versions/dbfab576c81_change_schema_to_v2_0_step_2.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Change schema to v2.0 - step 2 - data migration
2
+
3
+ Revision ID: dbfab576c81
4
+ Revises: 540dbe71fa91
5
+ Create Date: 2016-08-23 23:02:56.928292
6
+
7
+ """
8
+
9
+ import logging
10
+ import uuid
11
+
12
+ import sqlalchemy as db
13
+ from alembic import op
14
+ from sqlalchemy.ext.declarative import declarative_base
15
+ from sqlalchemy.orm import relationship, sessionmaker
16
+ from sqlalchemy.orm.decl_api import DeclarativeMeta
17
+
18
+ # revision identifiers, used by Alembic.
19
+ revision = "dbfab576c81"
20
+ down_revision = "540dbe71fa91"
21
+ branch_labels = None
22
+ depends_on = None
23
+
24
+ Session = sessionmaker()
25
+ Base: DeclarativeMeta = declarative_base()
26
+
27
+
28
+ db.relationship = relationship
29
+
30
+
31
+ class GroupsToResults(Base):
32
+ __tablename__ = "groups_to_results"
33
+ id = db.Column(db.Integer, primary_key=True)
34
+ group_uuid = db.Column(db.String(36), db.ForeignKey("group.uuid"))
35
+ result_id = db.Column(db.Integer, db.ForeignKey("result.id"))
36
+
37
+
38
+ class Group(Base):
39
+ __tablename__ = "group"
40
+
41
+ id = db.Column(db.Integer, primary_key=True)
42
+ uuid = db.Column(db.String(36), unique=True)
43
+
44
+
45
+ # results = db.relationship("Result", secondary = 'groups_to_results', backref="groups")
46
+
47
+
48
+ class Testcase(Base):
49
+ __tablename__ = "testcase"
50
+
51
+ id = db.Column(db.Integer, primary_key=True)
52
+ name = db.Column(db.Text, unique=True)
53
+
54
+
55
+ def upgrade():
56
+ class Result(Base):
57
+ __tablename__ = "result"
58
+
59
+ id = db.Column(db.Integer, primary_key=True)
60
+ job_id = db.Column(db.Integer, db.ForeignKey("group.id"))
61
+ testcase_id = db.Column(db.Integer, db.ForeignKey("testcase.id"))
62
+ testcase_name = db.Column(db.Text)
63
+
64
+ groups = db.relationship(
65
+ "Group", secondary="groups_to_results", backref="results"
66
+ )
67
+ job = db.relationship("Group") # , lazy = False)
68
+ testcase = db.relationship("Testcase", backref="results") # , lazy = False)
69
+
70
+ logger = logging.getLogger("alembic")
71
+ connection = op.get_bind()
72
+ session = Session(bind=connection)
73
+ i = 0
74
+ for group in session.query(Group).yield_per(100):
75
+ i += 1
76
+ if not group.uuid:
77
+ group.uuid = str(uuid.uuid1())
78
+ session.add(group)
79
+ if not i % 1000:
80
+ logger.info("Traversed %s groups", i)
81
+ session.commit()
82
+ logger.info("Final group commit")
83
+ session.commit()
84
+ i = 0
85
+ logger.info("Starting results")
86
+ for result in session.query(Result).yield_per(100):
87
+ i += 1
88
+ result.groups = [result.job]
89
+ result.testcase_name = result.testcase.name
90
+ session.add(result)
91
+ if not i % 1000:
92
+ logger.info("Traversed %s results", i)
93
+ session.commit()
94
+ logger.info("Final result commit")
95
+ session.commit()
96
+ logger.info("Removing the columns")
97
+ op.drop_column("result", "testcase_id")
98
+ op.drop_column("result", "job_id")
99
+
100
+
101
+ def downgrade():
102
+ class Result(Base):
103
+ __tablename__ = "result"
104
+
105
+ id = db.Column(db.Integer, primary_key=True)
106
+ job_id = db.Column(db.Integer, db.ForeignKey("group.id"))
107
+ testcase_id = db.Column(db.Integer)
108
+ testcase_name = db.Column(db.Text, db.ForeignKey("testcase.name"))
109
+
110
+ groups = db.relationship(
111
+ "Group", secondary="groups_to_results", backref="results"
112
+ )
113
+ job = db.relationship("Group") # , lazy = False)
114
+ testcase = db.relationship("Testcase", backref="results") # , lazy = False)
115
+
116
+ op.add_column(
117
+ "result", db.Column("job_id", db.INTEGER(), autoincrement=False, nullable=True)
118
+ )
119
+ op.add_column(
120
+ "result",
121
+ db.Column("testcase_id", db.INTEGER(), autoincrement=False, nullable=True),
122
+ )
123
+
124
+ connection = op.get_bind()
125
+ session = Session(bind=connection)
126
+ for result in session.query(Result):
127
+ result.job_id = result.groups[0].id
128
+ result.testcase_id = result.testcase.id
129
+ session.add(result)
130
+ session.commit()
data/resultsdb/authorization.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: GPL-2.0+
2
+ import logging
3
+ from fnmatch import fnmatch
4
+
5
+ from werkzeug.exceptions import BadGateway, Forbidden, InternalServerError
6
+
7
+ log = logging.getLogger(__name__)
8
+
9
+ LDAP_ERROR = "Some error occurred initializing the LDAP connection"
10
+
11
+
12
+ def get_group_membership(ldap, user, con, ldap_search):
13
+ try:
14
+ results = con.search_s(
15
+ ldap_search["BASE"],
16
+ ldap.SCOPE_SUBTREE,
17
+ ldap_search.get("SEARCH_STRING", "(memberUid={user})").format(user=user),
18
+ ["cn"],
19
+ )
20
+ return [group[1]["cn"][0].decode("utf-8") for group in results]
21
+ except KeyError:
22
+ log.exception("LDAP_SEARCHES parameter should contain the BASE key")
23
+ raise InternalServerError("LDAP_SEARCHES parameter should contain the BASE key")
24
+ except ldap.SERVER_DOWN:
25
+ log.exception("The LDAP server is not reachable")
26
+ raise BadGateway("The LDAP server is not reachable")
27
+ except ldap.LDAPError:
28
+ log.exception(LDAP_ERROR)
29
+ raise BadGateway(LDAP_ERROR)
30
+
31
+
32
+ def match_testcase_permissions(testcase, permissions):
33
+ for permission in permissions:
34
+ if "testcases" in permission:
35
+ testcase_match = any(
36
+ fnmatch(testcase, testcase_pattern)
37
+ for testcase_pattern in permission["testcases"]
38
+ )
39
+ if testcase_match:
40
+ yield permission
41
+
42
+
43
+ def verify_authorization(user, testcase, permissions, ldap_host, ldap_searches):
44
+ """
45
+ Raises an exception if the user is not permitted to publish a result for
46
+ the testcase.
47
+ """
48
+ if not (ldap_host and ldap_searches):
49
+ raise InternalServerError(
50
+ "LDAP_HOST and LDAP_SEARCHES also need to be defined if PERMISSIONS is defined"
51
+ )
52
+
53
+ allowed_groups = []
54
+ for permission in match_testcase_permissions(testcase, permissions):
55
+ if user in permission.get("users", []):
56
+ return
57
+ allowed_groups += permission.get("groups", [])
58
+
59
+ try:
60
+ import ldap
61
+ except ImportError:
62
+ raise InternalServerError(
63
+ "If PERMISSIONS is defined, python-ldap needs to be installed"
64
+ )
65
+
66
+ try:
67
+ con = ldap.initialize(ldap_host)
68
+ except ldap.LDAPError:
69
+ log.exception(LDAP_ERROR)
70
+ raise BadGateway(LDAP_ERROR)
71
+
72
+ any_groups_found = False
73
+ for cur_ldap_search in ldap_searches:
74
+ groups = get_group_membership(ldap, user, con, cur_ldap_search)
75
+ if any(g in groups for g in allowed_groups):
76
+ return
77
+ any_groups_found = any_groups_found or len(groups) > 0
78
+
79
+ raise Forbidden(
80
+ f"User {user} is not authorized to submit results for the test case {testcase}"
81
+ + ("" if any_groups_found else "; failed to find the user in LDAP")
82
+ )
data/resultsdb/config.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2013, Red Hat, Inc
2
+ #
3
+ # This program is free software; you can redistribute it and/or modify
4
+ # it under the terms of the GNU General Public License as published by
5
+ # the Free Software Foundation; either version 2 of the License, or
6
+ # (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License along
14
+ # with this program; if not, write to the Free Software Foundation, Inc.,
15
+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
16
+ #
17
+ # Authors:
18
+ # Josef Skladanka <[email protected]>
19
+ # Ralph Bean <[email protected]>
20
+
21
+ # For Python 2.7 compatibility
22
+
23
+ import os
24
+ import sys
25
+
26
+
27
+ def db_uri_for_testing():
28
+ postgres_port = os.getenv("RESULTSDB_POSTGRES_PORT")
29
+ if postgres_port:
30
+ return f"postgresql+psycopg2://resultsdb:resultsdb@localhost:{postgres_port}/resultsdb" # nosec # NOSONAR
31
+
32
+ return "sqlite:///.test_db.sqlite"
33
+
34
+
35
+ class Config:
36
+ DEFAULT_CONFIG_FILE: str | None = None
37
+
38
+ DEBUG = True
39
+ PRODUCTION = False
40
+ SECRET_KEY = "replace-me-with-something-random" # nosec # NOSONAR
41
+
42
+ HOST = "127.0.0.1"
43
+ PORT = 5001
44
+
45
+ SQLALCHEMY_DATABASE_URI = "sqlite://"
46
+ SHOW_DB_URI = True
47
+
48
+ FLASK_PYDANTIC_VALIDATION_ERROR_RAISE = True
49
+
50
+ LOGGING = {
51
+ "version": 1,
52
+ "disable_existing_loggers": False,
53
+ "loggers": {
54
+ "resultsdb": {
55
+ "level": "INFO",
56
+ },
57
+ "dogpile": {
58
+ "level": "WARNING",
59
+ },
60
+ },
61
+ "handlers": {
62
+ "console": {
63
+ "formatter": "bare",
64
+ "class": "logging.StreamHandler",
65
+ "stream": "ext://sys.stdout",
66
+ "level": "INFO",
67
+ },
68
+ },
69
+ "formatters": {
70
+ "bare": {
71
+ "format": "[%(asctime)s] [%(process)d] [%(levelname)s] %(name)s: %(message)s",
72
+ "datefmt": "%Y-%m-%d %H:%M:%S",
73
+ }
74
+ },
75
+ "root": {
76
+ "level": "WARNING",
77
+ "handlers": ["console"],
78
+ },
79
+ }
80
+
81
+ # Extend the list of allowed outcomes.
82
+ ADDITIONAL_RESULT_OUTCOMES: tuple[str] | tuple[()] = ()
83
+
84
+ PERMISSIONS: list[dict[str, object]] = []
85
+
86
+ # Supported values: "oidc"
87
+ AUTH_MODULE: str | None = None
88
+
89
+ OIDC_CLIENT_SECRETS = "/etc/resultsdb/oauth2_client_secrets.json"
90
+ OIDC_USERNAME_FIELD = "uid"
91
+ OIDC_SESSION_REFRESH_INTERVAL_SECONDS = 300
92
+ OIDC_SESSION_PERMANENT = False
93
+ PERMANENT_SESSION_LIFETIME = 300
94
+
95
+ SESSION_TYPE = "sqlalchemy"
96
+ SESSION_SQLALCHEMY_TABLE = "sessions"
97
+ SESSION_PERMANENT = True
98
+ SESSION_USE_SIGNER = True
99
+ SESSION_COOKIE_NAME = "session"
100
+ SESSION_COOKIE_SECURE = True
101
+ SESSION_COOKIE_SAMESITE = "Lax"
102
+
103
+ FEDMENU_URL = "https://apps.fedoraproject.org/fedmenu"
104
+ FEDMENU_DATA_URL = "https://apps.fedoraproject.org/js/data.js"
105
+
106
+ # Set this to True or False to enable publishing to a message bus
107
+ MESSAGE_BUS_PUBLISH = True
108
+ # Name of the message bus plugin to use goes here. 'fedmsg' is installed by
109
+ # default, but you could create your own.
110
+ # Supported values: 'dummy', 'stomp', 'fedmsg'
111
+ MESSAGE_BUS_PLUGIN = "dummy"
112
+ # You can pass extra arguments to your message bus plugin here. For instance,
113
+ # the fedmsg plugin expects an extra `modname` argument that can be used to
114
+ # configure the topic, like this:
115
+ # <topic_prefix>.<environment>.<modname>.<topic>
116
+ # e.g. org.fedoraproject.prod.resultsdb.result.new
117
+ MESSAGE_BUS_KWARGS: dict[str, object] = {}
118
+
119
+ # Publish Taskotron-compatible fedmsgs on the 'taskotron' topic
120
+ MESSAGE_BUS_PUBLISH_TASKOTRON = False
121
+ OTEL_EXPORTER_OTLP_METRICS_ENDPOINT = None
122
+ OTEL_EXPORTER_SERVICE_NAME = "resultsdb"
123
+
124
+
125
+ class ProductionConfig(Config):
126
+ DEFAULT_CONFIG_FILE = "/etc/resultsdb/settings.py"
127
+ DEBUG = False
128
+ PRODUCTION = True
129
+ SHOW_DB_URI = False
130
+ MESSAGE_BUS_PLUGIN = "fedmsg"
131
+ MESSAGE_BUS_KWARGS = {"modname": "resultsdb"}
132
+
133
+
134
+ class DevelopmentConfig(Config):
135
+ DEFAULT_CONFIG_FILE = os.getcwd() + "/conf/settings.py"
136
+ TRAP_BAD_REQUEST_ERRORS = True
137
+ SQLALCHEMY_DATABASE_URI = "sqlite:////var/tmp/resultsdb_db.sqlite"
138
+ OIDC_CLIENT_SECRETS = os.getcwd() + "/conf/oauth2_client_secrets.json.example"
139
+
140
+
141
+ class TestingConfig(Config):
142
+ TRAP_BAD_REQUEST_ERRORS = True
143
+
144
+ SQLALCHEMY_DATABASE_URI = db_uri_for_testing()
145
+
146
+ FEDMENU_URL = "https://apps.stg.fedoraproject.org/fedmenu"
147
+ FEDMENU_DATA_URL = "https://apps.stg.fedoraproject.org/js/data.js"
148
+ ADDITIONAL_RESULT_OUTCOMES = ("AMAZING",)
149
+ MESSAGE_BUS_PLUGIN = "dummy"
150
+ MESSAGE_BUS_KWARGS = {}
151
+ PERMISSIONS = [
152
+ {
153
+ "users": ["testuser1"],
154
+ "testcases": ["testcase1"],
155
+ }
156
+ ]
157
+ AUTH_MODULE = "oidc"
158
+ LDAP_HOST = "ldap://ldap.example.com"
159
+ LDAP_SEARCHES = [
160
+ {
161
+ "BASE": "ou=Groups,dc=example,dc=com",
162
+ "SEARCH_STRING": "(memberUid={user})",
163
+ }
164
+ ]
165
+
166
+ OIDC_CLIENT_SECRETS = os.getcwd() + "/conf/oauth2_client_secrets.json.example"
167
+
168
+
169
+ def openshift_config(config_object, openshift_production):
170
+ # First, get db details from env
171
+ try:
172
+ config_object["SQLALCHEMY_DATABASE_URI"] = (
173
+ "postgresql+psycopg2://{}:{}@{}:{}/{}".format(
174
+ os.environ["POSTGRESQL_USER"],
175
+ os.environ["POSTGRESQL_PASSWORD"],
176
+ os.environ["POSTGRESQL_SERVICE_HOST"],
177
+ os.environ["POSTGRESQL_SERVICE_PORT"],
178
+ os.environ["POSTGRESQL_DATABASE"],
179
+ )
180
+ )
181
+ config_object["SECRET_KEY"] = os.environ["SECRET_KEY"]
182
+ except KeyError:
183
+ print(
184
+ "OpenShift mode enabled but required values couldn't be fetched. "
185
+ "Check, if you have these variables defined in you env: "
186
+ "(POSTGRESQL_[USER, PASSWORD, DATABASE, SERVICE_HOST, SERVICE_PORT], "
187
+ "SECRET_KEY)",
188
+ file=sys.stderr,
189
+ )
190
+ sys.exit(1)
191
+
192
+ # Nuke out messaging, we don't support this in OpenShift mode
193
+ # Inject settings.py and disable OpenShift mode if you need this
194
+ config_object["MESSAGE_BUS_PLUGIN"] = "dummy"
195
+ config_object["MESSAGE_BUS_KWARGS"] = {}
196
+
197
+ if os.getenv("MESSAGE_BUS_PLUGIN") or os.getenv("MESSAGE_BUS_KWARGS"):
198
+ print("It appears you've tried to set up messaging in OpenShift mode.")
199
+ print(
200
+ "This is not supported, you need to inject setting.py and disable "
201
+ "OpenShift mode if you need messaging."
202
+ )
203
+
204
+ # Danger zone, keep this False out in the wild, always
205
+ config_object["SHOW_DB_URI"] = False
data/resultsdb/controllers/__init__.py ADDED
File without changes
data/resultsdb/controllers/api_v2.py ADDED
@@ -0,0 +1,661 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2013-2016, Red Hat, Inc
2
+ #
3
+ # This program is free software; you can redistribute it and/or modify
4
+ # it under the terms of the GNU General Public License as published by
5
+ # the Free Software Foundation; either version 2 of the License, or
6
+ # (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License along
14
+ # with this program; if not, write to the Free Software Foundation, Inc.,
15
+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
16
+ #
17
+ # Authors:
18
+ # Josef Skladanka <[email protected]>
19
+ # Ralph Bean <[email protected]>
20
+
21
+ import re
22
+ import uuid
23
+
24
+ from flask import Blueprint
25
+ from flask import current_app as app
26
+ from flask import jsonify, request, url_for
27
+ from flask_pydantic import validate
28
+ from sqlalchemy.orm import exc as orm_exc
29
+
30
+ from resultsdb.controllers.common import SERIALIZE, commit_result
31
+ from resultsdb.models import db
32
+ from resultsdb.models.results import (
33
+ Group,
34
+ Result,
35
+ ResultData,
36
+ Testcase,
37
+ result_outcomes,
38
+ )
39
+ from resultsdb.parsers.api_v2 import (
40
+ QUERY_LIMIT,
41
+ CreateGroupParams,
42
+ CreateResultParams,
43
+ CreateTestcaseParams,
44
+ GroupsParams,
45
+ ResultsParams,
46
+ TestcasesParams,
47
+ )
48
+
49
+ api = Blueprint("api_v2", __name__)
50
+
51
+
52
+ # =============================================================================
53
+ # GLOBAL VARIABLES
54
+ # =============================================================================
55
+
56
+ RE_PAGE = re.compile(r"([?&])page=([0-9]+)")
57
+ RE_CALLBACK = re.compile(r"([?&])callback=[^&]*&?")
58
+ RE_CLEAN_AMPERSANDS = re.compile(r"&+")
59
+
60
+ # =============================================================================
61
+ # GLOBAL METHODS
62
+ # =============================================================================
63
+
64
+
65
+ def pagination(q, page, limit):
66
+ """
67
+ Sets the offset/limit for the DB query.
68
+ limit+1 is purposely set as 'limit' so we can later on decide whether 'next'
69
+ page link should be provided or set to None.
70
+ """
71
+ # pagination offset
72
+ try:
73
+ page = int(page)
74
+ if page > 0:
75
+ offset = page * limit
76
+ q = q.offset(offset)
77
+ except (TypeError, ValueError):
78
+ pass
79
+
80
+ q = q.limit(limit + 1)
81
+ return q
82
+
83
+
84
+ def prev_next_urls(data, limit=QUERY_LIMIT):
85
+ global RE_PAGE
86
+
87
+ try:
88
+ match = RE_PAGE.findall(request.url)
89
+ flag, page = match[0][0], int(match[0][1])
90
+ except IndexError: # page not found
91
+ page = None
92
+
93
+ prev = None
94
+ next = None
95
+ placeholder = "[!@#$%^&*PLACEHOLDER*&^%$#@!]"
96
+
97
+ if page is None:
98
+ if "?" in request.url:
99
+ baseurl = f"{request.url}&page={placeholder}"
100
+ else:
101
+ baseurl = f"{request.url}?page={placeholder}"
102
+ page = 0
103
+ else:
104
+ baseurl = RE_PAGE.sub(f"{flag}page={placeholder}", request.url)
105
+
106
+ baseurl = RE_CALLBACK.sub(r"\1", baseurl)
107
+ baseurl = RE_CLEAN_AMPERSANDS.sub("&", baseurl)
108
+
109
+ if page > 0:
110
+ prev = baseurl.replace(placeholder, str(page - 1))
111
+ if len(data) > limit:
112
+ next = baseurl.replace(placeholder, str(page + 1))
113
+ data = data[:limit]
114
+
115
+ return data, prev, next
116
+
117
+
118
+ # =============================================================================
119
+ # GROUPS
120
+ # =============================================================================
121
+
122
+
123
+ def add_group(grp):
124
+ if isinstance(grp, (str, bytes)):
125
+ grp = dict(uuid=grp)
126
+ elif isinstance(grp, dict):
127
+ grp["uuid"] = grp.get("uuid", str(uuid.uuid1()))
128
+
129
+ group = Group.query.filter_by(uuid=grp["uuid"]).first()
130
+ if not group:
131
+ group = Group(uuid=grp["uuid"])
132
+
133
+ group.description = grp.get("description", group.description)
134
+ group.ref_url = grp.get("ref_url", group.ref_url)
135
+
136
+ db.session.add(group)
137
+ return group
138
+
139
+
140
+ @api.route("/groups", methods=["GET"])
141
+ @validate()
142
+ def get_groups(query: GroupsParams):
143
+ q = db.session.query(Group).order_by(db.desc(Group.id))
144
+
145
+ desc_filters = []
146
+ if query.description:
147
+ for description in query.description.split(","):
148
+ if not description.strip():
149
+ continue
150
+ desc_filters.append(Group.description == description)
151
+ # desc_filters.append(Group.description.in_(query.description.split(',')))
152
+ elif query.description_like_:
153
+ for description in query.description_like_.split(","):
154
+ if not description.strip():
155
+ continue
156
+ desc_filters.append(Group.description.like(description.replace("*", "%")))
157
+ if desc_filters:
158
+ q = q.filter(db.or_(*desc_filters))
159
+
160
+ # Filter by uuid
161
+ if query.uuid:
162
+ q = q.filter(Group.uuid.in_(query.uuid.split(",")))
163
+
164
+ q = pagination(q, query.page, query.limit)
165
+ data, prev, next = prev_next_urls(q.all(), query.limit)
166
+
167
+ return jsonify(
168
+ dict(
169
+ prev=prev,
170
+ next=next,
171
+ data=[SERIALIZE(o) for o in data],
172
+ )
173
+ )
174
+
175
+
176
+ @api.route("/groups/<group_id>", methods=["GET"])
177
+ def get_group(group_id):
178
+ q = Group.query.filter_by(uuid=group_id)
179
+ group = q.first()
180
+ if not group:
181
+ return jsonify({"message": "Group not found"}), 404
182
+
183
+ return jsonify(SERIALIZE(group))
184
+
185
+
186
+ @api.route("/groups", methods=["POST"])
187
+ @validate()
188
+ def create_group(body: CreateGroupParams):
189
+ if body.uuid:
190
+ group = Group.query.filter_by(uuid=body.uuid).first()
191
+ if not group:
192
+ group = Group(uuid=body.uuid)
193
+ else:
194
+ group = Group(uuid=str(uuid.uuid1()))
195
+
196
+ if body.ref_url:
197
+ group.ref_url = body.ref_url
198
+ if body.description:
199
+ group.description = body.description
200
+
201
+ db.session.add(group)
202
+ db.session.commit()
203
+
204
+ return jsonify(SERIALIZE(group)), 201
205
+
206
+
207
+ # =============================================================================
208
+ # RESULTS
209
+ # =============================================================================
210
+ def select_results(
211
+ since_start=None,
212
+ since_end=None,
213
+ outcomes=None,
214
+ groups=None,
215
+ testcases=None,
216
+ testcases_like=None,
217
+ result_data=None,
218
+ _sort=None,
219
+ ):
220
+ # Checks if the sort parameter specified in the request is valid before querying.
221
+ # Sorts by submit_time in a descending order if the sort parameter is absent or invalid.
222
+ q = db.session.query(Result)
223
+ query_sorted = False
224
+ if _sort:
225
+ sort_match = re.match(r"^(?P<order>asc|desc):(?P<column>.+)$", _sort)
226
+ if sort_match and sort_match.group("column") == "submit_time":
227
+ sort_order = {"asc": db.asc, "desc": db.desc}[sort_match.group("order")]
228
+ sort_column = getattr(Result, sort_match.group("column"))
229
+ q = q.order_by(sort_order(sort_column))
230
+ query_sorted = True
231
+ if _sort and _sort == "disable_sorting":
232
+ query_sorted = True
233
+ if not query_sorted:
234
+ q = q.order_by(db.desc(Result.submit_time))
235
+
236
+ # Time constraints
237
+ if since_start:
238
+ q = q.filter(Result.submit_time >= since_start)
239
+ if since_end:
240
+ q = q.filter(Result.submit_time <= since_end)
241
+
242
+ # Filter by outcome
243
+ if outcomes:
244
+ q = q.filter(Result.outcome.in_(outcomes))
245
+
246
+ # Filter by group_id
247
+ if groups:
248
+ q = q.filter(Result.groups.any(Group.uuid.in_(groups)))
249
+
250
+ # Filter by testcase_name
251
+ filter_by_testcase = []
252
+ if testcases:
253
+ filter_by_testcase.append(Result.testcase_name.in_(testcases))
254
+ if testcases_like:
255
+ for testcase in testcases_like:
256
+ testcase = testcase.replace("*", "%")
257
+ filter_by_testcase.append(Result.testcase_name.like(testcase))
258
+ if filter_by_testcase:
259
+ q = q.filter(db.or_(*filter_by_testcase))
260
+
261
+ # Filter by result_data
262
+ if result_data is not None:
263
+ for key, values in result_data.items():
264
+ try:
265
+ key, modifier = key.split(":")
266
+ except ValueError: # no : in key
267
+ key, modifier = (key, None)
268
+
269
+ if modifier == "like":
270
+ alias = db.aliased(ResultData)
271
+ if len(values) > 1: # multiple values
272
+ likes = []
273
+ # create the (value LIKE foo OR value LIKE bar OR ...) part
274
+ for value in values:
275
+ value = value.replace("*", "%")
276
+ likes.append(alias.value.like(value))
277
+ # put it together to (key = key AND (value LIKE foo OR value LIKE bar OR ...))
278
+ q = q.join(alias).filter(db.and_(alias.key == key, db.or_(*likes)))
279
+ else:
280
+ value = values[0].replace("*", "%")
281
+ q = q.join(alias).filter(
282
+ db.and_(alias.key == key, alias.value.like(value))
283
+ )
284
+
285
+ else:
286
+ alias = db.aliased(ResultData)
287
+ q = q.join(alias).filter(
288
+ db.and_(alias.key == key, alias.value.in_(values))
289
+ )
290
+ return q
291
+
292
+
293
+ def __get_results_parse_args(query: ResultsParams):
294
+ args = {
295
+ "_sort": query.sort_,
296
+ "limit": query.limit,
297
+ "page": query.page,
298
+ "testcases": query.testcases,
299
+ "testcases:like": query.testcases_like_,
300
+ "groups": query.groups,
301
+ "_distinct_on": query.distinct_on_,
302
+ "outcome": query.outcome,
303
+ "since": query.since,
304
+ }
305
+
306
+ # find results_data with the query parameters
307
+ # these are the paramters other than those defined in RequestParser
308
+ # request.args is a ImmutableMultiDict, which allows for more values to be
309
+ # stored in one key (so one can do stuff like .../results?item=foo&item=bar in URL).
310
+ # Here we transform the `request.args` MultiDict to `results_data` dict of lists, and
311
+ # while also filtering out the reserved-keyword-args
312
+ results_data = {
313
+ k: request.args.getlist(k) for k in request.args.keys() if k not in args
314
+ }
315
+ for param, values in results_data.items():
316
+ for i, value in enumerate(values):
317
+ results_data[param][i] = value.split(",")
318
+ # flatten the list
319
+ results_data[param] = [
320
+ item for sublist in results_data[param] for item in sublist
321
+ ]
322
+
323
+ return {
324
+ "result_data": results_data if results_data else None,
325
+ "args": args,
326
+ }
327
+
328
+
329
+ def __get_results(query: ResultsParams, group_ids=None, testcase_names=None):
330
+ p = __get_results_parse_args(query)
331
+ args = p["args"]
332
+
333
+ groups = group_ids if group_ids is not None else args["groups"]
334
+ testcases = testcase_names if testcase_names is not None else args["testcases"]
335
+
336
+ q = select_results(
337
+ since_start=args["since"]["start"],
338
+ since_end=args["since"]["end"],
339
+ outcomes=args["outcome"],
340
+ groups=groups,
341
+ testcases=testcases,
342
+ testcases_like=args["testcases:like"],
343
+ result_data=p["result_data"],
344
+ _sort=args["_sort"],
345
+ )
346
+
347
+ q = pagination(q, args["page"], args["limit"])
348
+ data, prev, next = prev_next_urls(q.all(), args["limit"])
349
+
350
+ return jsonify(
351
+ dict(
352
+ prev=prev,
353
+ next=next,
354
+ data=[SERIALIZE(o) for o in data],
355
+ )
356
+ )
357
+
358
+
359
+ def add_data_to_result(data, result):
360
+ to_store = []
361
+ for key, value in data.items():
362
+ if not isinstance(key, str):
363
+ key = str(key)
364
+
365
+ if isinstance(value, str):
366
+ to_store.append((key, value))
367
+
368
+ elif isinstance(value, (list, tuple)):
369
+ for v in value:
370
+ if not isinstance(v, str):
371
+ v = str(v)
372
+ to_store.append((key, v))
373
+ else:
374
+ value = str(value)
375
+ to_store.append((key, value))
376
+
377
+ for key, value in to_store:
378
+ ResultData(result, key, value)
379
+
380
+
381
+ @api.route("/results", methods=["GET"])
382
+ @validate()
383
+ def get_results(query: ResultsParams):
384
+ return __get_results(query)
385
+
386
+
387
+ @api.route("/results/latest", methods=["GET"])
388
+ @validate()
389
+ def get_results_latest(query: ResultsParams):
390
+ p = __get_results_parse_args(query)
391
+ args = p["args"]
392
+ since_start = args["since"].get("start", None)
393
+ since_end = args["since"].get("end", None)
394
+ groups = args.get("groups", None)
395
+ testcases = args.get("testcases", None)
396
+ testcases_like = args.get("testcases:like", None)
397
+ distinct_on = args.get("_distinct_on", None)
398
+
399
+ if not distinct_on:
400
+ q = select_results(
401
+ since_start=since_start,
402
+ since_end=since_end,
403
+ groups=groups,
404
+ testcases=testcases,
405
+ testcases_like=testcases_like,
406
+ result_data=p["result_data"],
407
+ )
408
+
409
+ # Produce a subquery with the same filter criteria as above *except*
410
+ # test case name, which we group by and join on.
411
+ sq = (
412
+ select_results(
413
+ since_start=since_start,
414
+ since_end=since_end,
415
+ groups=groups,
416
+ result_data=p["result_data"],
417
+ )
418
+ .order_by(None)
419
+ .with_entities(
420
+ Result.testcase_name.label("testcase_name"),
421
+ db.func.max(Result.submit_time).label("max_submit_time"),
422
+ )
423
+ .group_by(Result.testcase_name)
424
+ .subquery()
425
+ )
426
+ q = q.join(
427
+ sq,
428
+ db.and_(
429
+ Result.testcase_name == sq.c.testcase_name,
430
+ Result.submit_time == sq.c.max_submit_time,
431
+ ),
432
+ )
433
+
434
+ results = q.all()
435
+
436
+ return jsonify(
437
+ dict(
438
+ data=[SERIALIZE(o) for o in results],
439
+ )
440
+ )
441
+
442
+ if not any(
443
+ [testcases, testcases_like, since_start, since_end, groups, p["result_data"]]
444
+ ):
445
+ return (
446
+ jsonify(
447
+ {
448
+ "message": (
449
+ "Please, provide at least one filter beside '_distinct_on'"
450
+ )
451
+ }
452
+ ),
453
+ 400,
454
+ )
455
+
456
+ q = db.session.query(Result)
457
+ q = select_results(
458
+ since_start=since_start,
459
+ since_end=since_end,
460
+ groups=groups,
461
+ testcases=testcases,
462
+ testcases_like=testcases_like,
463
+ result_data=p["result_data"],
464
+ _sort="disable_sorting",
465
+ )
466
+
467
+ values_distinct_on = [Result.testcase_name]
468
+ for i, key in enumerate(distinct_on):
469
+ name = f"result_data_{i}_{key}"
470
+ alias = db.aliased(
471
+ db.session.query(ResultData).filter(ResultData.key == key).subquery(),
472
+ name=name,
473
+ )
474
+ q = q.outerjoin(alias)
475
+ values_distinct_on.append(db.text(f"{name}.value"))
476
+
477
+ q = q.distinct(*values_distinct_on)
478
+ q = q.order_by(*values_distinct_on).order_by(db.desc(Result.submit_time))
479
+
480
+ results = q.all()
481
+ results = dict(
482
+ data=[SERIALIZE(o) for o in results],
483
+ )
484
+ results["data"] = sorted(
485
+ results["data"], key=lambda x: x["submit_time"], reverse=True
486
+ )
487
+ return jsonify(results)
488
+
489
+
490
+ @api.route("/groups/<group_id>/results", methods=["GET"])
491
+ @validate()
492
+ def get_results_by_group(group_id: str, query: ResultsParams):
493
+ group = Group.query.filter_by(uuid=group_id).first()
494
+ if not group:
495
+ return jsonify({"message": f"Group not found: {group_id}"}), 404
496
+ return __get_results(query, group_ids=[group.uuid])
497
+
498
+
499
+ @api.route("/testcases/<path:testcase_name>/results", methods=["GET"])
500
+ @validate()
501
+ def get_results_by_testcase(testcase_name: str, query: ResultsParams):
502
+ testcase = Testcase.query.filter_by(name=testcase_name).first()
503
+ if not testcase:
504
+ return jsonify({"message": "Testcase not found"}), 404
505
+ return __get_results(query, testcase_names=[testcase.name])
506
+
507
+
508
+ @api.route("/results/<result_id>", methods=["GET"])
509
+ def get_result(result_id):
510
+ try:
511
+ result = Result.query.filter_by(id=result_id).one()
512
+ except orm_exc.NoResultFound:
513
+ return jsonify({"message": "Result not found"}), 404
514
+
515
+ return jsonify(SERIALIZE(result))
516
+
517
+
518
+ @api.route("/results", methods=["POST"])
519
+ @validate()
520
+ def create_result(body: CreateResultParams):
521
+ return create_result_any_data(body)
522
+
523
+
524
+ def create_result_any_data(body: CreateResultParams):
525
+ """
526
+ Allows creating test results with data not checked by any schema (in
527
+ contrast to v3 API).
528
+ """
529
+ if body.data:
530
+ invalid_keys = [key for key in body.data.keys() if ":" in key]
531
+ if invalid_keys:
532
+ app.logger.warning("Colon not allowed in key name: %s", invalid_keys)
533
+ return jsonify(
534
+ {"message": "Colon not allowed in key name: %r" % invalid_keys}
535
+ ), 400
536
+
537
+ tc = body.testcase
538
+
539
+ testcase = Testcase.query.filter_by(name=tc["name"]).first()
540
+ if not testcase:
541
+ app.logger.debug("Testcase %s does not exist yet. Creating", tc["name"])
542
+ testcase = Testcase(name=tc["name"])
543
+ testcase.ref_url = tc.get("ref_url", testcase.ref_url)
544
+ db.session.add(testcase)
545
+
546
+ # groups is a list of strings(uuid) or dicts(group object)
547
+ # when a group defined by the string is not found, new is created
548
+ # group defined by the object, is updated/created with the values from the object
549
+ # non-existing groups are created automatically
550
+ groups = [add_group(group) for group in (body.groups or [])]
551
+
552
+ result = Result(
553
+ testcase, body.outcome, groups, body.ref_url, body.note, body.submit_time
554
+ )
555
+
556
+ if isinstance(body.data, dict):
557
+ add_data_to_result(body.data, result)
558
+
559
+ return commit_result(result)
560
+
561
+
562
+ # =============================================================================
563
+ # TESTCASES
564
+ # =============================================================================
565
+
566
+
567
+ def select_testcases(args_name=None, args_name_like=None):
568
+ q = db.session.query(Testcase).order_by(db.asc(Testcase.name))
569
+
570
+ name_filters = []
571
+ if args_name:
572
+ for name in [name.strip() for name in args_name.split(",") if name.strip()]:
573
+ name_filters.append(Testcase.name == name)
574
+ elif args_name_like:
575
+ for name in [
576
+ name.strip() for name in args_name_like.split(",") if name.strip()
577
+ ]:
578
+ name_filters.append(Testcase.name.like(name.replace("*", "%")))
579
+ if name_filters:
580
+ q = q.filter(db.or_(*name_filters))
581
+
582
+ return q
583
+
584
+
585
+ @api.route("/testcases", methods=["GET"])
586
+ @validate()
587
+ def get_testcases(query: TestcasesParams):
588
+ q = select_testcases(query.name, query.name_like_)
589
+ q = pagination(q, query.page, query.limit)
590
+ data, prev, next = prev_next_urls(q.all(), query.limit)
591
+
592
+ return jsonify(
593
+ dict(
594
+ prev=prev,
595
+ next=next,
596
+ data=[SERIALIZE(o) for o in data],
597
+ )
598
+ )
599
+
600
+
601
+ @api.route("/testcases/<path:testcase_name>", methods=["GET"])
602
+ def get_testcase(testcase_name):
603
+ try:
604
+ testcase = Testcase.query.filter_by(name=testcase_name).one()
605
+ except orm_exc.NoResultFound:
606
+ return jsonify({"message": "Testcase not found"}), 404
607
+
608
+ return jsonify(SERIALIZE(testcase))
609
+
610
+
611
+ @api.route("/testcases", methods=["POST"])
612
+ @validate()
613
+ def create_testcase(body: CreateTestcaseParams):
614
+ testcase = Testcase.query.filter_by(name=body.name).first()
615
+ if not testcase:
616
+ testcase = Testcase(name=body.name)
617
+ if body.ref_url is not None:
618
+ testcase.ref_url = body.ref_url
619
+
620
+ db.session.add(testcase)
621
+ db.session.commit()
622
+
623
+ return jsonify(SERIALIZE(testcase)), 201
624
+
625
+
626
+ @api.route("/healthcheck", methods=["GET"])
627
+ def healthcheck():
628
+ """
629
+ Request handler for performing an application-level health check. This is
630
+ not part of the published API, it is intended for use by OpenShift or other
631
+ monitoring tools.
632
+
633
+ Returns a 200 response if the application is alive and able to serve requests.
634
+ """
635
+ try:
636
+ db.session.execute(db.text("SELECT 1 FROM result LIMIT 0")).fetchall()
637
+ except Exception:
638
+ app.logger.exception("Healthcheck failed on DB query.")
639
+ return jsonify({"message": "Unable to communicate with database"}), 503
640
+
641
+ return jsonify({"message": "Health check OK"}), 200
642
+
643
+
644
+ @api.route("", methods=["GET"])
645
+ @api.route("/", methods=["GET"])
646
+ def landing_page():
647
+ return (
648
+ jsonify(
649
+ {
650
+ "message": "Everything is fine. But choose wisely, for while "
651
+ "the true Grail will bring you life, the false "
652
+ "Grail will take it from you.",
653
+ "documentation": "https://docs.resultsdb20.apiary.io/",
654
+ "groups": url_for(".get_groups", _external=True),
655
+ "results": url_for(".get_results", _external=True),
656
+ "testcases": url_for(".get_testcases", _external=True),
657
+ "outcomes": result_outcomes(),
658
+ }
659
+ ),
660
+ 300,
661
+ )
data/resultsdb/controllers/api_v3.py ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: GPL-2.0+
2
+ from flask import Blueprint
3
+ from flask import current_app as app
4
+ from flask import jsonify, render_template
5
+ from flask_pydantic import validate
6
+ from pydantic import RootModel
7
+
8
+ from resultsdb.authorization import (
9
+ match_testcase_permissions,
10
+ verify_authorization,
11
+ )
12
+ from resultsdb.controllers.api_v2 import create_result_any_data
13
+ from resultsdb.controllers.common import commit_result
14
+ from resultsdb.models import db
15
+ from resultsdb.models.results import Result, ResultData, Testcase
16
+ from resultsdb.parsers.api_v2 import CreateResultParams
17
+ from resultsdb.parsers.api_v3 import (
18
+ RESULTS_PARAMS_CLASSES,
19
+ PermissionsParams,
20
+ ResultParamsBase,
21
+ result_outcomes_extended,
22
+ )
23
+
24
+ api = Blueprint("api_v3", __name__)
25
+
26
+
27
+ def permissions():
28
+ return app.config.get("PERMISSIONS", [])
29
+
30
+
31
+ def get_authorized_user(testcase) -> str:
32
+ """
33
+ Raises an exception if the current user cannot publish a result for the
34
+ testcase, otherwise returns the name of the current user.
35
+ """
36
+ user = app.oidc.current_token_identity[app.config["OIDC_USERNAME_FIELD"]]
37
+ ldap_host = app.config.get("LDAP_HOST")
38
+ ldap_searches = app.config.get("LDAP_SEARCHES")
39
+ verify_authorization(user, testcase, permissions(), ldap_host, ldap_searches)
40
+ return user
41
+
42
+
43
+ def create_result(body: ResultParamsBase):
44
+ user = get_authorized_user(body.testcase)
45
+
46
+ testcase = Testcase.query.filter_by(name=body.testcase).first()
47
+ if not testcase:
48
+ app.logger.debug("Testcase %s does not exist yet. Creating", body.testcase)
49
+ testcase = Testcase(name=body.testcase)
50
+ if body.testcase_ref_url:
51
+ app.logger.debug(
52
+ "Updating ref_url for testcase %s: %s", body.testcase, body.testcase_ref_url
53
+ )
54
+ testcase.ref_url = str(body.testcase_ref_url)
55
+ db.session.add(testcase)
56
+
57
+ ref_url = str(body.ref_url) if body.ref_url else None
58
+
59
+ result = Result(
60
+ testcase=testcase,
61
+ outcome=body.outcome,
62
+ ref_url=ref_url,
63
+ note=body.note,
64
+ groups=[],
65
+ )
66
+
67
+ if user:
68
+ ResultData(result, "username", user)
69
+
70
+ for name, value in body.result_data():
71
+ ResultData(result, name, value)
72
+
73
+ return commit_result(result)
74
+
75
+
76
+ def create_endpoint(params_class, oidc, provider):
77
+ params = params_class.model_construct()
78
+
79
+ @oidc.token_auth(provider)
80
+ @validate()
81
+ # Using RootModel is a workaround for a bug in flask-pydantic that causes
82
+ # validation to fail with unexpected exception.
83
+ def create(body: RootModel[params_class]):
84
+ return create_result(body.root)
85
+
86
+ def get_schema():
87
+ return jsonify(params.model_construct().model_json_schema()), 200
88
+
89
+ artifact_type = params.artifact_type()
90
+ api.add_url_rule(
91
+ f"/results/{artifact_type}s",
92
+ endpoint=f"results_{artifact_type}s",
93
+ methods=["POST"],
94
+ view_func=create,
95
+ )
96
+ api.add_url_rule(
97
+ f"/schemas/{artifact_type}s",
98
+ endpoint=f"schemas_{artifact_type}s",
99
+ view_func=get_schema,
100
+ )
101
+
102
+
103
+ def create_any_data_endpoint(oidc, provider):
104
+ """
105
+ Creates an endpoint that accepts the same data as POST /api/v2.0/results
106
+ but supports OIDC authentication and permission control.
107
+
108
+ Other users/groups won't be able to POST results to this endpoint unless
109
+ they have a permission mapping with testcase pattern matching
110
+ "ANY-DATA:<testcase_name>" (instead of just "<testcase_name>" as in the
111
+ other v3 endpoints).
112
+ """
113
+
114
+ @oidc.token_auth(provider)
115
+ @validate()
116
+ # Using RootModel is a workaround for a bug in flask-pydantic that causes
117
+ # validation to fail with unexpected exception.
118
+ def create(body: RootModel[CreateResultParams]):
119
+ testcase = body.root.testcase["name"]
120
+ get_authorized_user(f"ANY-DATA:{testcase}")
121
+ return create_result_any_data(body.root)
122
+
123
+ api.add_url_rule(
124
+ "/results",
125
+ endpoint="results",
126
+ methods=["POST"],
127
+ view_func=create,
128
+ )
129
+
130
+
131
+ def create_endpoints(oidc, provider):
132
+ for params_class in RESULTS_PARAMS_CLASSES:
133
+ create_endpoint(params_class, oidc, provider)
134
+
135
+ create_any_data_endpoint(oidc, provider)
136
+
137
+
138
+ @api.route("/permissions")
139
+ @validate()
140
+ def get_permissions(query: PermissionsParams):
141
+ if query.testcase:
142
+ return list(match_testcase_permissions(query.testcase, permissions()))
143
+
144
+ return permissions()
145
+
146
+
147
+ @api.route("/")
148
+ def index():
149
+ examples = [params_class.example() for params_class in RESULTS_PARAMS_CLASSES]
150
+ endpoints = [
151
+ {
152
+ "name": f"results/{example.artifact_type()}s",
153
+ "method": "POST",
154
+ "description": example.__doc__,
155
+ "query_type": "JSON",
156
+ "example": example.model_dump_json(exclude_unset=True, indent=2),
157
+ "schema": example.model_json_schema(),
158
+ "schema_endpoint": f".schemas_{example.artifact_type()}s",
159
+ }
160
+ for example in examples
161
+ ]
162
+ endpoints.append(
163
+ {
164
+ "name": "permissions",
165
+ "method": "GET",
166
+ "description": PermissionsParams.__doc__,
167
+ "query_type": "Query",
168
+ "schema": PermissionsParams.model_construct().model_json_schema(),
169
+ }
170
+ )
171
+ return render_template(
172
+ "api_v3.html",
173
+ endpoints=endpoints,
174
+ result_outcomes_extended=", ".join(result_outcomes_extended()),
175
+ )
data/resultsdb/controllers/common.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: GPL-2.0+
2
+ from flask import current_app as app
3
+ from flask import jsonify
4
+
5
+ from resultsdb.messaging import create_message, publish_taskotron_message
6
+ from resultsdb.models import db
7
+ from resultsdb.serializers.api_v2 import Serializer
8
+
9
+ SERIALIZE = Serializer().serialize
10
+
11
+
12
+ def commit_result(result):
13
+ """
14
+ Saves result in database and publishes message.
15
+
16
+ Returns value for the POST HTTP API response.
17
+ """
18
+ db.session.add(result)
19
+ db.session.commit()
20
+
21
+ app.logger.debug(
22
+ "Created new result for testcase %s with outcome %s",
23
+ result.testcase.name,
24
+ result.outcome,
25
+ )
26
+
27
+ if app.messaging_plugin:
28
+ app.logger.debug("Preparing to publish message for result id %d", result.id)
29
+ message = create_message(result)
30
+ app.messaging_plugin.publish(message)
31
+
32
+ if app.config["MESSAGE_BUS_PUBLISH_TASKOTRON"]:
33
+ app.logger.debug(
34
+ "Preparing to publish Taskotron message for result id %d", result.id
35
+ )
36
+ publish_taskotron_message(result)
37
+
38
+ return jsonify(SERIALIZE(result)), 201
data/resultsdb/controllers/main.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2013, Red Hat, Inc
2
+ #
3
+ # This program is free software; you can redistribute it and/or modify
4
+ # it under the terms of the GNU General Public License as published by
5
+ # the Free Software Foundation; either version 2 of the License, or
6
+ # (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License along
14
+ # with this program; if not, write to the Free Software Foundation, Inc.,
15
+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
16
+ #
17
+ # Authors:
18
+ # Josef Skladanka <[email protected]>
19
+
20
+ from flask import Blueprint, render_template
21
+
22
+ main = Blueprint("main", __name__)
23
+
24
+
25
+ @main.route("/")
26
+ @main.route("/index")
27
+ def index():
28
+ return render_template("index.html")
data/resultsdb/lib/__init__.py ADDED
File without changes
data/resultsdb/messaging.py ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016, Red Hat, Inc
2
+ #
3
+ # This program is free software; you can redistribute it and/or modify
4
+ # it under the terms of the GNU General Public License as published by
5
+ # the Free Software Foundation; either version 2 of the License, or
6
+ # (at your option) any later version.
7
+ #
8
+ # This program is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License along
14
+ # with this program; if not, write to the Free Software Foundation, Inc.,
15
+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
16
+ #
17
+ # Authors:
18
+ # Ralph Bean <[email protected]>
19
+
20
+ import abc
21
+ import json
22
+ import logging
23
+ from threading import Lock
24
+
25
+ import pkg_resources
26
+ import stomp
27
+ from fedora_messaging.api import Message, publish
28
+ from fedora_messaging.exceptions import (
29
+ ConnectionException,
30
+ PublishForbidden,
31
+ PublishReturned,
32
+ PublishTimeout,
33
+ )
34
+ from opentelemetry import trace
35
+ from opentelemetry.trace.propagation.tracecontext import (
36
+ TraceContextTextMapPropagator,
37
+ )
38
+ from tenacity import retry, stop_after_attempt, wait_exponential
39
+
40
+ from resultsdb.models import db
41
+ from resultsdb.models.results import Result, ResultData
42
+ from resultsdb.serializers.api_v2 import Serializer
43
+
44
+ log = logging.getLogger(__name__)
45
+ tracer = trace.get_tracer(__name__)
46
+
47
+ SERIALIZE = Serializer().serialize
48
+
49
+ STOMP_RETRY_STOP = stop_after_attempt(3)
50
+ STOMP_RETRY_WAIT = wait_exponential(multiplier=2, min=5, max=15)
51
+
52
+
53
+ def get_prev_result(result):
54
+ """
55
+ Find previous result with the same testcase, item, type, and arch.
56
+ Return None if no result is found.
57
+
58
+ Note that this logic is Taskotron-specific: it does not consider the
59
+ possibility that a result may be distinguished by other keys in the data
60
+ (for example 'scenario' which is used in OpenQA results). But this is only
61
+ used for publishing Taskotron compatibility messages, thus we keep this
62
+ logic as is.
63
+ """
64
+ q = db.session.query(Result).filter(Result.id != result.id)
65
+ q = q.filter_by(testcase_name=result.testcase_name)
66
+
67
+ for result_data in result.data:
68
+ if result_data.key in ["item", "type", "arch"]:
69
+ alias = db.aliased(ResultData)
70
+ q = q.join(alias).filter(
71
+ db.and_(alias.key == result_data.key, alias.value == result_data.value)
72
+ )
73
+
74
+ q = q.order_by(db.desc(Result.submit_time))
75
+ return q.first()
76
+
77
+
78
+ def publish_taskotron_message(result):
79
+ """
80
+ Publish a fedmsg on the taskotron topic with Taskotron-compatible structure.
81
+
82
+ These messages are deprecated, consumers should consume from the resultsdb
83
+ topic instead.
84
+ """
85
+ prev_result = get_prev_result(result)
86
+ if prev_result is not None and prev_result.outcome == result.outcome:
87
+ # If the previous result had the same outcome, skip publishing
88
+ # a message for this new result.
89
+ # This was intended as a workaround to avoid spammy messages from the
90
+ # dist.depcheck task, which tends to produce a very large number of
91
+ # identical results for any given build, because of the way that it is
92
+ # designed.
93
+ log.debug(
94
+ "Skipping Taskotron message for result %d, outcome has not changed",
95
+ result.id,
96
+ )
97
+ return
98
+
99
+ task = {
100
+ datum.key: datum.value
101
+ for datum in result.data
102
+ if datum.key
103
+ in (
104
+ "item",
105
+ "type",
106
+ )
107
+ }
108
+ task["name"] = result.testcase.name
109
+ body = {
110
+ "task": task,
111
+ "result": {
112
+ "id": result.id,
113
+ "submit_time": result.submit_time.strftime("%Y-%m-%d %H:%M:%S UTC"),
114
+ "prev_outcome": prev_result.outcome if prev_result else None,
115
+ "outcome": result.outcome,
116
+ "log_url": result.ref_url,
117
+ },
118
+ }
119
+
120
+ try:
121
+ msg = Message(topic="taskotron.result.new", body=body)
122
+ publish(msg)
123
+ log.debug("Message published")
124
+ except PublishReturned as e:
125
+ log.error(f"Fedora Messaging broker rejected message {msg.id}: {e}")
126
+ except PublishTimeout:
127
+ log.error(f"Timeout publishing message {msg.id}")
128
+ except PublishForbidden as e:
129
+ log.error(f"Permission error publishing message {msg.id}: {e}")
130
+ except ConnectionException as e:
131
+ log.error(f"Error sending message {msg.id}: {e.reason}")
132
+
133
+
134
+ def create_message(result):
135
+ # Re-use the same structure as in the HTTP API v2.
136
+ return SERIALIZE(result)
137
+
138
+
139
+ class MessagingPlugin:
140
+ """Abstract base class that messaging plugins must extend.
141
+
142
+ One abstract method is declared which must be implemented:
143
+ - publish(message)
144
+
145
+ """
146
+
147
+ __metaclass__ = abc.ABCMeta
148
+
149
+ def __init__(self, **kwargs):
150
+ for key, value in kwargs.items():
151
+ setattr(self, key, value)
152
+
153
+ @abc.abstractmethod
154
+ def publish(self, message):
155
+ pass
156
+
157
+
158
+ class DummyPlugin(MessagingPlugin):
159
+ """A dummy plugin used for testing. Just logs the messages."""
160
+
161
+ # A class attribute where we store all messages published.
162
+ # Used by the test suite. This would cause a memory leak if used in prod.
163
+ history: list[dict[str, object]] = []
164
+
165
+ def publish(self, message):
166
+ # Add telemetry information. This includes an extra key
167
+ # traceparent.
168
+ TraceContextTextMapPropagator().inject(message)
169
+ self.history.append(message)
170
+ log.info(f"{self!r}->{message!r}")
171
+
172
+
173
+ class FedmsgPlugin(MessagingPlugin):
174
+ """A fedmsg plugin, used to publish to the fedmsg bus."""
175
+
176
+ def publish(self, message):
177
+ try:
178
+ msg = Message(topic=f"{self.modname}.result.new", body=message)
179
+ publish(msg)
180
+ log.debug("Message published")
181
+ except PublishReturned as e:
182
+ log.error(f"Fedora Messaging broker rejected message {msg.id}: {e}")
183
+ except PublishTimeout:
184
+ log.error(f"Timeout publishing message {msg.id}")
185
+ except PublishForbidden as e:
186
+ log.error(f"Permission error publishing message {msg.id}: {e}")
187
+ except ConnectionException as e:
188
+ log.error(f"Error sending message {msg.id}: {e.reason}")
189
+
190
+
191
+ class StompPlugin(MessagingPlugin):
192
+ def __init__(self, **kwargs):
193
+ args = kwargs.copy()
194
+ conn_args = args["connection"].copy()
195
+ if "use_ssl" in conn_args:
196
+ use_ssl = conn_args["use_ssl"]
197
+ del conn_args["use_ssl"]
198
+ else:
199
+ use_ssl = False
200
+
201
+ ssl_args = {"for_hosts": conn_args.get("host_and_ports", [])}
202
+ for attr in ("key_file", "cert_file", "ca_certs"):
203
+ conn_attr = f"ssl_{attr}"
204
+ if conn_attr in conn_args:
205
+ ssl_args[attr] = conn_args[conn_attr]
206
+ del conn_args[conn_attr]
207
+
208
+ if "ssl_version" in conn_args:
209
+ ssl_args["ssl_version"] = conn_args["ssl_version"]
210
+ del conn_args["ssl_version"]
211
+
212
+ args["connection"] = conn_args
213
+ args["use_ssl"] = use_ssl
214
+ args["ssl_args"] = ssl_args
215
+
216
+ super().__init__(**args)
217
+
218
+ # Validate that some required config is present
219
+ required = ["connection", "destination"]
220
+ for attr in required:
221
+ if getattr(self, attr, None) is None:
222
+ raise ValueError(f"Missing {attr!r} option for STOMP messaging plugin")
223
+
224
+ self.conn_lock = Lock()
225
+ self.conn = stomp.connect.StompConnection11(**self.connection)
226
+ if self.use_ssl:
227
+ self.conn.set_ssl(**self.ssl_args)
228
+
229
+ @tracer.start_as_current_span("StompPlugin.publish")
230
+ def publish(self, msg):
231
+ # Add telemetry information. This includes an extra key
232
+ # traceparent.
233
+ TraceContextTextMapPropagator().inject(msg)
234
+
235
+ msg = json.dumps(msg)
236
+ kwargs = {"body": msg, "headers": {}, "destination": self.destination}
237
+ self._publish_with_retry(**kwargs)
238
+
239
+ @retry(stop=STOMP_RETRY_STOP, wait=STOMP_RETRY_WAIT, reraise=True)
240
+ def _publish_with_retry(self, **kwargs):
241
+ with self.conn_lock:
242
+ if not self.conn.is_connected():
243
+ log.info("Connecting to message bus")
244
+
245
+ # Inactive connection is be closed/disconnected automatically
246
+ # after a short time.
247
+ with tracer.start_as_current_span("StompPlugin.publish.connect"):
248
+ self.conn.connect(wait=True)
249
+ with tracer.start_as_current_span("StompPlugin.publish.send"):
250
+ self.conn.send(**kwargs)
251
+ log.debug("Published message through stomp: %s", kwargs["body"])
252
+
253
+
254
+ def load_messaging_plugin(name, plugin_args):
255
+ """Instantiate and return the appropriate messaging plugin."""
256
+ points = pkg_resources.iter_entry_points("resultsdb.messaging.plugins")
257
+ classes = {"dummy": DummyPlugin}
258
+ classes.update({point.name: point.load() for point in points})
259
+
260
+ log.debug("Found the following installed messaging plugin %r" % classes)
261
+ if name not in classes:
262
+ raise KeyError(f"{name!r} not found in {classes.keys()!r}")
263
+
264
+ cls = classes[name]
265
+
266
+ # Sanity check
267
+ if not issubclass(cls, MessagingPlugin):
268
+ raise TypeError(f"{name} {cls!r} does not extend MessagingPlugin.")
269
+
270
+ log.debug(f"Instantiating plugin {cls!r} named {name}")
271
+ return cls(**plugin_args)