diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 04c704d3..3fc8aa2c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,7 @@ repos: hooks: - id: flake8 language_version: python3 - args: [--max-line-length, "100"] + args: [--max-line-length, "100", --ignore, "E203"] - repo: https://github.com/asottile/pyupgrade rev: v2.6.1 hooks: diff --git a/backend/ibutsu_server/controllers/artifact_controller.py b/backend/ibutsu_server/controllers/artifact_controller.py index c69d3218..3df2d4ae 100644 --- a/backend/ibutsu_server/controllers/artifact_controller.py +++ b/backend/ibutsu_server/controllers/artifact_controller.py @@ -1,4 +1,5 @@ import json +from datetime import datetime import connexion import magic @@ -119,6 +120,7 @@ def upload_artifact(body): filename=filename, result_id=data["resultId"], content=file_.read(), + upload_date=datetime.utcnow(), data=additional_metadata, ) session.add(artifact) diff --git a/backend/ibutsu_server/controllers/result_controller.py b/backend/ibutsu_server/controllers/result_controller.py index 3e305263..60f900ec 100644 --- a/backend/ibutsu_server/controllers/result_controller.py +++ b/backend/ibutsu_server/controllers/result_controller.py @@ -81,41 +81,53 @@ def get_result_list(filter_=None, page=1, page_size=25, apply_max=False): :rtype: List[Result] """ query = Result.query + count_estimate = None if filter_: for filter_string in filter_: filter_clause = convert_filter(filter_string, Result) if filter_clause is not None: query = query.filter(filter_clause) + else: + # use a count estimate when no filter is applied + count_estimate = int( + session.execute( + "SELECT reltuples as approx_count FROM pg_class WHERE relname='results'" + ).fetchall()[0][0] + ) + offset = (page * page_size) - page_size - try: - # if the count is fast, just use it! Even if apply_max is set to true - session.execute(f"SET statement_timeout TO {int(COUNT_TIMEOUT*1000)}; commit;") - total_items = query.count() - except OperationalError: - # reset the timeout if we hit an exception - session.execute("SET statement_timeout TO 0; commit;") - if apply_max: - print( - f"FunctionTimedOut: 'query.count' with filters: {filter_} timed out, " - f"using default items of {MAX_DOCUMENTS}" - ) - if offset > MAX_DOCUMENTS: - raise ValueError( - f"Offset: {offset} exceeds the " - f"MAX_DOCUMENTS: {MAX_DOCUMENTS} able to be displayed in the UI. " - f"Please use the API for this request." + if not count_estimate: + try: + # if the count is fast, just use it! Even if apply_max is set to true + session.execute(f"SET statement_timeout TO {int(COUNT_TIMEOUT*1000)}; commit;") + total_items = query.count() + except OperationalError: + # reset the timeout if we hit an exception + session.execute("SET statement_timeout TO 0; commit;") + if apply_max: + print( + f"FunctionTimedOut: 'query.count' with filters: {filter_} timed out, " + f"using default items of {MAX_DOCUMENTS}" ) - total_items = MAX_DOCUMENTS + if offset > MAX_DOCUMENTS: + raise ValueError( + f"Offset: {offset} exceeds the " + f"MAX_DOCUMENTS: {MAX_DOCUMENTS} able to be displayed in the UI. " + f"Please use the API for this request." + ) + total_items = MAX_DOCUMENTS + else: + print( + f"FunctionTimedOut: 'query.count' with args: {filter_} timed out, " + f"but limit_documents is set to False, proceeding" + ) + # if we don't want to limit documents, just do the standard count + total_items = query.count() else: - print( - f"FunctionTimedOut: 'query.count' with args: {filter_} timed out, " - f"but limit_documents is set to False, proceeding" - ) - # if we don't want to limit documents, just do the standard count - total_items = query.count() + # reset the timeout if we don't hit an exception + session.execute("SET statement_timeout TO 0; commit;") else: - # reset the timeout if we don't hit an exception - session.execute("SET statement_timeout TO 0; commit;") + total_items = count_estimate total_pages = (total_items // page_size) + (1 if total_items % page_size > 0 else 0) results = query.order_by(Result.start_time.desc()).offset(offset).limit(page_size).all() diff --git a/backend/ibutsu_server/controllers/widget_config_controller.py b/backend/ibutsu_server/controllers/widget_config_controller.py index 4ca47750..3bcc7f6a 100644 --- a/backend/ibutsu_server/controllers/widget_config_controller.py +++ b/backend/ibutsu_server/controllers/widget_config_controller.py @@ -76,7 +76,7 @@ def get_widget_config_list(filter_=None, page=1, page_size=25): offset = (page * page_size) - page_size total_items = query.count() total_pages = (total_items // page_size) + (1 if total_items % page_size > 0 else 0) - widgets = query.order_by(WidgetConfig.weight.desc()).offset(offset).limit(page_size) + widgets = query.order_by(WidgetConfig.weight.asc()).offset(offset).limit(page_size) return { "widgets": [widget.to_dict() for widget in widgets], "pagination": { diff --git a/backend/ibutsu_server/db/models.py b/backend/ibutsu_server/db/models.py index 3fddca39..0793a5f6 100644 --- a/backend/ibutsu_server/db/models.py +++ b/backend/ibutsu_server/db/models.py @@ -1,3 +1,4 @@ +from datetime import datetime from uuid import uuid4 from ibutsu_server.db.base import Boolean @@ -65,14 +66,15 @@ def to_dict(self): class Artifact(Model, FileMixin): __tablename__ = "artifacts" - result_id = Column(PortableUUID(), ForeignKey("results.id"), nullable=False) - filename = Column(Text) + result_id = Column(PortableUUID(), ForeignKey("results.id"), nullable=False, index=True) + filename = Column(Text, index=True) data = Column(mutable_json_type(dbtype=PortableJSON(), nested=True)) + upload_date = Column(DateTime, default=datetime.utcnow, index=True) class Group(Model, ModelMixin): __tablename__ = "groups" - name = Column(Text) + name = Column(Text, index=True) projects = relationship("Project") data = Column(mutable_json_type(dbtype=PortableJSON(), nested=True)) @@ -80,23 +82,23 @@ class Group(Model, ModelMixin): class Import(Model, ModelMixin): __tablename__ = "imports" file = relationship("ImportFile") - filename = Column(Text) - format = Column(Text) + filename = Column(Text, index=True) + format = Column(Text, index=True) data = Column(mutable_json_type(dbtype=PortableJSON(), nested=True)) - status = Column(Text) + status = Column(Text, index=True) class ImportFile(Model, FileMixin): __tablename__ = "import_files" - import_id = Column(PortableUUID(), ForeignKey("imports.id"), nullable=False) + import_id = Column(PortableUUID(), ForeignKey("imports.id"), nullable=False, index=True) class Project(Model, ModelMixin): __tablename__ = "projects" - name = Column(Text) - title = Column(Text) - owner_id = Column(Text) - group_id = Column(PortableUUID(), ForeignKey("groups.id")) + name = Column(Text, index=True) + title = Column(Text, index=True) + owner_id = Column(Text, index=True) + group_id = Column(PortableUUID(), ForeignKey("groups.id"), index=True) reports = relationship("Report") results = relationship("Result") runs = relationship("Run") @@ -105,64 +107,64 @@ class Project(Model, ModelMixin): class Report(Model, ModelMixin): __tablename__ = "reports" - created = Column(DateTime) - download_url = Column(Text) - filename = Column(Text) - mimetype = Column(Text) - name = Column(Text) + created = Column(DateTime, default=datetime.utcnow, index=True) + download_url = Column(Text, index=True) + filename = Column(Text, index=True) + mimetype = Column(Text, index=True) + name = Column(Text, index=True) params = Column(mutable_json_type(dbtype=PortableJSON())) - project_id = Column(PortableUUID(), ForeignKey("projects.id")) + project_id = Column(PortableUUID(), ForeignKey("projects.id"), index=True) file = relationship("ReportFile") - status = Column(Text) - url = Column(Text) - view_url = Column(Text) + status = Column(Text, index=True) + url = Column(Text, index=True) + view_url = Column(Text, index=True) class ReportFile(Model, FileMixin): __tablename__ = "report_files" - report_id = Column(PortableUUID(), ForeignKey("reports.id"), nullable=False) - filename = Column(Text) + report_id = Column(PortableUUID(), ForeignKey("reports.id"), nullable=False, index=True) + filename = Column(Text, index=True) data = Column(mutable_json_type(dbtype=PortableJSON(), nested=True)) class Result(Model, ModelMixin): __tablename__ = "results" artifacts = relationship("Artifact") - component = Column(Text) + component = Column(Text, index=True) # this is metadata but it is a reserved attr data = Column(mutable_json_type(dbtype=PortableJSON(), nested=True)) - duration = Column(Float) - env = Column(Text) + duration = Column(Float, index=True) + env = Column(Text, index=True) params = Column(mutable_json_type(dbtype=PortableJSON())) - project_id = Column(PortableUUID(), ForeignKey("projects.id")) - result = Column(Text) - run_id = Column(PortableUUID(), ForeignKey("runs.id")) - source = Column(Text) - start_time = Column(DateTime) - test_id = Column(Text) + project_id = Column(PortableUUID(), ForeignKey("projects.id"), index=True) + result = Column(Text, index=True) + run_id = Column(PortableUUID(), ForeignKey("runs.id"), index=True) + source = Column(Text, index=True) + start_time = Column(DateTime, default=datetime.utcnow, index=True) + test_id = Column(Text, index=True) class Run(Model, ModelMixin): __tablename__ = "runs" - component = Column(Text) - created = Column(DateTime) + component = Column(Text, index=True) + created = Column(DateTime, default=datetime.utcnow, index=True) # this is metadata but it is a reserved attr data = Column(mutable_json_type(dbtype=PortableJSON(), nested=True)) - duration = Column(Float) - env = Column(Text) - project_id = Column(PortableUUID(), ForeignKey("projects.id")) + duration = Column(Float, index=True) + env = Column(Text, index=True) + project_id = Column(PortableUUID(), ForeignKey("projects.id"), index=True) results = relationship("Result") - source = Column(Text) - start_time = Column(DateTime) + source = Column(Text, index=True) + start_time = Column(DateTime, default=datetime.utcnow, index=True) summary = Column(mutable_json_type(dbtype=PortableJSON())) class WidgetConfig(Model, ModelMixin): __tablename__ = "widget_configs" - navigable = Column(Boolean) + navigable = Column(Boolean, index=True) params = Column(mutable_json_type(dbtype=PortableJSON())) - project_id = Column(PortableUUID(), ForeignKey("projects.id")) - title = Column(Text) - type = Column(Text) - weight = Column(Integer) - widget = Column(Text) + project_id = Column(PortableUUID(), ForeignKey("projects.id"), index=True) + title = Column(Text, index=True) + type = Column(Text, index=True) + weight = Column(Integer, index=True) + widget = Column(Text, index=True) diff --git a/backend/ibutsu_server/filters.py b/backend/ibutsu_server/filters.py index 25bae8e5..17b672db 100644 --- a/backend/ibutsu_server/filters.py +++ b/backend/ibutsu_server/filters.py @@ -26,6 +26,8 @@ def string_to_column(field, model): if idx == 0: continue column = column[part] + if field not in ARRAY_FIELDS: + column = column.as_string() else: column = getattr(model, field) return column diff --git a/backend/ibutsu_server/scripts/mongo2postgres.py b/backend/ibutsu_server/scripts/mongo2postgres.py new file mode 100755 index 00000000..d3819ada --- /dev/null +++ b/backend/ibutsu_server/scripts/mongo2postgres.py @@ -0,0 +1,375 @@ +#!/bin/env python3 +""" +Convert a Ibutsu's MongoDB to PSQL + +python3 mongo2postgres.py + mongodb://localhost/test_artifacts postgresql://ibutsu:ibutsu@localhost:5432/ibutsu -v +""" +from argparse import ArgumentParser +from datetime import datetime +from datetime import timedelta +from datetime import timezone +from uuid import UUID + +from bson import ObjectId +from gridfs import GridFSBucket +from ibutsu_server.db.models import Artifact +from ibutsu_server.db.models import Group +from ibutsu_server.db.models import Project +from ibutsu_server.db.models import Result +from ibutsu_server.db.models import Run +from ibutsu_server.db.models import WidgetConfig +from pymongo import MongoClient +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker + + +UUID_1_EPOCH = datetime(1582, 10, 15, tzinfo=timezone.utc) +UUID_TICKS = 10000000 +UUID_VARIANT_1 = 0b1000000000000000 +ROWS_TO_COMMIT_AT_ONCE = 1000 +MONTHS_TO_KEEP = 2 +# To avoid foreign key constraints, just shift the range of artifacts to keep a bit +ARTIFACT_MONTHS_TO_KEEP = 0.5 * MONTHS_TO_KEEP +MIGRATION_LIMIT = 10000000000 # mostly for testing purposes + +Base = declarative_base() +session = None + + +TABLE_MAP = [ + ("groups", Group), + ("projects", Project), + ("widgetConfig", WidgetConfig), + ("runs", Run), + ("results", Result), +] +FILE_MAP = [ + # only convert artifacts, the reports in the existing DB aren't particularly useful + ("fs", Artifact), +] + +# metadata fields that are their own column now +FIELDS_TO_PROMOTE = [ + "component", + "env", +] +# ID fields that must be converted +ID_FIELDS = [ + "result_id", + "resultId", + "run_id", + "runId", + "project", + "run", +] +# fields that need to be typecast +FIELDS_TO_TYPECAST = ["navigable", "weight"] + +# json indexes for the tables +INDEXES = { + "results": [ + "CREATE INDEX ix_results_jenkins_job_name ON results((data->'jenkins'->>'job_name'));", + "CREATE INDEX ix_results_jenkins_build_number " + "ON results((data->'jenkins'->>'build_number'));", + "CREATE INDEX ix_results_classification ON results((data->>'classification'));", + "CREATE INDEX ix_results_assignee ON results((data->>'assignee'));", + "CREATE INDEX ix_results_exception_name ON results((data->>'exception_name'));", + ], + "runs": [ + "CREATE INDEX ix_runs_jenkins_job_name ON runs((data->'jenkins'->>'job_name'));" + "CREATE INDEX ix_runs_jenkins_build_number ON runs((data->'jenkins'->>'build_number'));" + ], +} + + +def is_uuid(candidate): + """Determine if this is a uuid""" + try: + UUID(candidate) + return True + except ValueError: + return False + + +def convert_objectid_to_uuid(object_id): + """Convert an ObjectId to a UUID""" + if isinstance(object_id, str) and not is_uuid(object_id) and ObjectId.is_valid(object_id): + object_id = ObjectId(object_id) + if not isinstance(object_id, ObjectId): + return object_id + unix_time = object_id.generation_time.astimezone(timezone.utc) + hex_string = str(object_id) + counter = int(hex_string[18:], 16) + + uuid_time = "1{:015x}".format( + int((unix_time + (unix_time - UUID_1_EPOCH)).timestamp() * UUID_TICKS) + ) + uuid_clock = "{:04x}".format(UUID_VARIANT_1 | (counter & 0x3FFF)) + uuid_node = "1" + hex_string[8:18].rjust(11, "0") + string_uuid = "{}-{}-{}-{}-{}".format( + uuid_time[-8:], uuid_time[4:8], uuid_time[:4], uuid_clock, uuid_node + ) + converted_uuid = UUID(string_uuid) + return str(converted_uuid) + + +def get_mongo(mongo_url, mongo_db): + """Create a MongoDB client""" + client = MongoClient(mongo_url) + return client[mongo_db] + + +def setup_postgres(postgres_url): + """Connect to PostgreSQL""" + global session + engine = create_engine(postgres_url) + Base.metadata.bind = engine + Base.metadata.create_all() + # create a Session + Session = sessionmaker(bind=engine) + session = Session() + + +def migrate_table(collection, Model, vprint, filter_=None): + """Migrate a collection from MongoDB into a table in PostgreSQL""" + # TODO: update indexes once we know them + + if Model.__tablename__ in ["runs", "results"]: + ids = [] + + for idx, row in enumerate(collection.find(filter_, sort=[("_id", -1)])): + if idx > MIGRATION_LIMIT: + break + vprint(".", end="") + mongo_id = row.pop("_id") + # overwrite id with PSQL uuid + row["id"] = convert_objectid_to_uuid(mongo_id) + + # handle the time fields + if row.get("starttime"): + row["start_time"] = datetime.fromtimestamp(row.pop("starttime")) + if row.get("start_time") and isinstance(row["start_time"], float): + row["start_time"] = datetime.fromtimestamp(row["start_time"]) + if row.get("start_time") and isinstance(row["start_time"], str): + row["start_time"] = datetime.fromtimestamp(float(row["start_time"])) + if row.get("created"): + if isinstance(row["created"], str): + row["created"] = datetime.fromisoformat(row["created"]) + else: + row.pop("created") + + # promote some metadata fields to the appropriate column + for field in FIELDS_TO_PROMOTE: + if row.get("metadata") and row["metadata"].get(field): + row[field] = row["metadata"][field] + # convert some ObjectId's to UUID's + for field in ID_FIELDS: + if row.get("metadata") and row["metadata"].get(field): + if field == "project": + row["project_id"] = convert_objectid_to_uuid(row["metadata"][field]) + # also update the metadata field + row["metadata"][field] = row["project_id"] + if not is_uuid(row["project_id"]): + row["project_id"] = None + elif field == "run": + row["run_id"] = convert_objectid_to_uuid(row["metadata"][field]) + # also update the metadata field + row["metadata"][field] = row["run_id"] + if not is_uuid(row["run_id"]): + row["run_id"] = None + elif field in ["result_id", "resultId"]: + row["result_id"] = convert_objectid_to_uuid(row["metadata"][field]) + else: + row["metadata"][field] = convert_objectid_to_uuid(row["metadata"][field]) + if row.get(field): + if field == "project": + row["project_id"] = convert_objectid_to_uuid(row.pop(field)) + if not is_uuid(row["project_id"]): + row["project_id"] = None + + # Table specific stuff + if Model.__tablename__ == "projects": + if row.get("group_id"): + # one of the projects has a group_id assigned (but no group exists in the DB) + row["group_id"] = None + + if Model.__tablename__ == "widget_configs": + for field in FIELDS_TO_TYPECAST: + if row.get(field): + if field == "navigable": + row[field] = row[field].lower()[0] in ["t", "y"] + if field == "weight": + row[field] = int(row[field]) + if row.get("params") and row["params"].get("sort_field"): + # we no longer use this field + row["params"].pop("sort_field") + if row.get("params") and row["params"].get("group_field") == "metadata.component": + row["params"]["group_field"] = "component" + + if is_uuid(row["id"]): + obj = Model.from_dict(**row) + session.add(obj) + + if idx % ROWS_TO_COMMIT_AT_ONCE == 0: + session.commit() + + if Model.__tablename__ in ["runs", "results"]: + ids.append(str(mongo_id)) + + session.commit() + # at the end of the session do a little cleanup + if Model.__tablename__ in ["runs", "results"]: + conn = Base.metadata.bind.connect() + # delete any results or runs without start_time + sql_delete = f"DELETE FROM {Model.__tablename__} where start_time IS NULL;" + conn.execute(sql_delete) + vprint(" done") + + if Model.__tablename__ in ["runs", "results"]: + # return run_ids for the results to use + return ids + + +def migrate_file(collection, Model, vprint, filter_=None): + """Migrate a GridFS collection from MongoDB into a table in PostgreSQL""" + # Access the underlying collection object + # TODO: update indexes once we know them + conn = Base.metadata.bind.connect() + for sql_index in INDEXES.get(Model.__tablename__, []): + vprint(".", end="") + conn.execute(sql_index) + + # for runs and results, sort by descending start_time + if Model.__tablename__ == "artifacts": + sort = [("_id", -1)] + # only include most recent runs and results + filter_ = filter_ + else: + sort = None + filter_ = None + + for idx, row in enumerate(collection.find(filter_, sort=sort)): + if idx > MIGRATION_LIMIT: + break + vprint(".", end="") + pg_id = convert_objectid_to_uuid(row._id) + data = dict() + data["metadata"] = row.metadata + data["id"] = pg_id + data["filename"] = row.filename + data["content"] = row.read() + data["upload_date"] = row.upload_date + for field in ID_FIELDS: + if field == "resultId": + data["result_id"] = convert_objectid_to_uuid(row.metadata[field]) + data["metadata"][field] = data["result_id"] + else: + pass + obj = Model.from_dict(**data) + session.add(obj) + if idx % ROWS_TO_COMMIT_AT_ONCE == 0: + session.commit() + session.commit() + vprint(" done") + + +def migrate_tables(mongo, vprint, migrate_files=False): + """Migrate all the tables""" + # first get the time range + sort = [("_id", -1)] + most_recent_record = mongo["results"].find_one(sort=sort) + most_recent_create_time = most_recent_record["_id"].generation_time + # only include most recent runs and results + filter_ = { + "_id": { + "$gt": ObjectId.from_datetime( + most_recent_create_time - timedelta(days=30 * MONTHS_TO_KEEP) + ), + "$lt": ObjectId.from_datetime(most_recent_create_time), + } + } + # for files, filter by _id + file_filter = { + "_id": { + "$gt": ObjectId.from_datetime( + most_recent_create_time - timedelta(days=30 * ARTIFACT_MONTHS_TO_KEEP) + ), + "$lt": ObjectId.from_datetime(most_recent_create_time), + } + } + + # loop over collections and migrate + for collection, model in TABLE_MAP: + # first create indexes for the table + conn = Base.metadata.bind.connect() + for sql_index in INDEXES.get(model.__tablename__, []): + vprint(".", end="") + conn.execute(sql_index) + + # get a list of result_ids + result_ids = [] + + # migrate the table over + vprint("Migrating {} ".format(collection), end="") + if collection == "runs": + run_ids = migrate_table(mongo[collection], model, vprint, filter_=filter_) + elif collection == "results": + # migrate in chunks of 100 runs at a time + run_chunks = [run_ids[i : i + 100] for i in range(0, len(run_ids), 100)] + for run_list in run_chunks: + result_filter = {"metadata.run": {"$in": run_list}} # filter on runs we know exist + result_ids.extend( + migrate_table(mongo[collection], model, vprint, filter_=result_filter) + ) + else: + migrate_table(mongo[collection], model, vprint) + if migrate_files: + for collection, model in FILE_MAP: + vprint("Migrating {} ".format(collection), end="") + if collection == "fs": + # migrate in chunks of 1000 results at a time + result_chunks = [result_ids[i : i + 1000] for i in range(0, len(result_ids), 1000)] + artifact_filter = file_filter.copy() + for result_list in result_chunks: + artifact_filter.update({"metadata.resultId": {"$in": result_list}}) + migrate_file( + GridFSBucket(mongo, collection), model, vprint, filter_=artifact_filter + ) + else: + migrate_file(GridFSBucket(mongo, collection), model, vprint, filter_=file_filter) + + +def build_mongo_connection(url): + """Create a MongoDB connection URL""" + url_parts = url.split("/") + database = url_parts[-1] + connection_url = "/".join(url_parts[:-1]) + "/?authSource={}".format(database) + return connection_url, database + + +def fake_print(*args, **kwargs): + pass + + +def parse_args(): + parser = ArgumentParser() + parser.add_argument("mongo_url", help="URL to MongoDB database") + parser.add_argument("postgres_url", help="URL to PostgreSQL database") + parser.add_argument("-v", "--verbose", action="store_true", help="Say what I'm doing") + parser.add_argument("-f", "--files", action="store_true", help="Migrate artifact files") + return parser.parse_args() + + +def main(): + args = parse_args() + vprint = print if args.verbose else fake_print + mongo_url, database = build_mongo_connection(args.mongo_url) + mongo = get_mongo(mongo_url, database) + setup_postgres(args.postgres_url) + migrate_tables(mongo, vprint, args.files) + + +if __name__ == "__main__": + main() diff --git a/backend/ibutsu_server/widgets/jenkins_heatmap.py b/backend/ibutsu_server/widgets/jenkins_heatmap.py index 89992cf8..6be35a19 100644 --- a/backend/ibutsu_server/widgets/jenkins_heatmap.py +++ b/backend/ibutsu_server/widgets/jenkins_heatmap.py @@ -1,4 +1,5 @@ from ibutsu_server.db.base import Float +from ibutsu_server.db.base import Integer from ibutsu_server.db.base import session from ibutsu_server.db.models import Run from ibutsu_server.filters import apply_filters @@ -42,7 +43,7 @@ def _get_builds(job_name, builds, project=None): # create the query query = ( - session.query(group_field.label("build_number")) + session.query(group_field.cast(Integer).label("build_number")) .group_by("build_number") .order_by(desc("build_number")) ) @@ -51,7 +52,7 @@ def _get_builds(job_name, builds, project=None): query = apply_filters(query, filters, Run) # make the query - return [build_number[0] for build_number in query.limit(builds)] + return [str(build_number[0]) for build_number in query.limit(builds)] def _get_heatmap(job_name, builds, group_field, count_skips, project=None): @@ -67,7 +68,7 @@ def _get_heatmap(job_name, builds, group_field, count_skips, project=None): f"{group_field}@y", ] if project: - filters.append(f"metadata.project={project}") + filters.append(f"project_id={project}") # generate the group_fields group_field = string_to_column(group_field, Run) diff --git a/frontend/src/app.js b/frontend/src/app.js index 0cff88a1..bc233f5c 100644 --- a/frontend/src/app.js +++ b/frontend/src/app.js @@ -93,7 +93,7 @@ class App extends React.Component { let params = {'filter': ['type=view', 'navigable=true']}; let project = getActiveProject(); if (project) { - params['filter'].push('project=' + project.id); + params['filter'].push('project_id=' + project.id); } fetch(buildUrl(Settings.serverUrl + '/widget-config', params)) .then(response => response.json()) diff --git a/frontend/src/dashboard.js b/frontend/src/dashboard.js index 857c3e5c..8d7c9635 100644 --- a/frontend/src/dashboard.js +++ b/frontend/src/dashboard.js @@ -40,7 +40,7 @@ export class Dashboard extends React.Component { let params = {"type": "widget"}; let project = getActiveProject(); if (project) { - params['filter'] = 'project=' + project.id; + params['filter'] = 'project_id=' + project.id; } fetch(buildUrl(Settings.serverUrl + '/widget-config', params)) .then(response => response.json()) diff --git a/frontend/src/result-list.js b/frontend/src/result-list.js index 53d38601..721ee7ab 100644 --- a/frontend/src/result-list.js +++ b/frontend/src/result-list.js @@ -341,10 +341,10 @@ export class ResultList extends React.Component { let filters = this.state.filters; const project = getActiveProject(); if (project) { - filters['metadata.project'] = {'val': project.id, 'op': 'eq'}; + filters['project_id'] = {'val': project.id, 'op': 'eq'}; } - else if (Object.prototype.hasOwnProperty.call(filters, 'metadata.project')) { - delete filters['metadata.project'] + else if (Object.prototype.hasOwnProperty.call(filters, 'project_id')) { + delete filters['project_id'] } if (filters) { params['apply_max'] = true; // if filters are applied limit the documents returned diff --git a/frontend/src/run-list.js b/frontend/src/run-list.js index 3ce3268d..ebe88360 100644 --- a/frontend/src/run-list.js +++ b/frontend/src/run-list.js @@ -323,10 +323,10 @@ export class RunList extends React.Component { let filters = this.state.filters; const project = getActiveProject(); if (project) { - filters['metadata.project'] = {'val': project.id, 'op': 'eq'}; + filters['project_id'] = {'val': project.id, 'op': 'eq'}; } - else if (Object.prototype.hasOwnProperty.call(filters, 'metadata.project')) { - delete filters['metadata.project'] + else if (Object.prototype.hasOwnProperty.call(filters, 'project_id')) { + delete filters['project_id'] } params['pageSize'] = this.state.pageSize; params['page'] = this.state.page; diff --git a/frontend/src/run.js b/frontend/src/run.js index 65ed7fe0..147b4fb5 100644 --- a/frontend/src/run.js +++ b/frontend/src/run.js @@ -343,7 +343,7 @@ export class Run extends React.Component { } getResultsForTree(page) { - let params = {filter: 'metadata.run=' + this.state.id}; + let params = {filter: 'run_id=' + this.state.id}; params['pageSize'] = 500; params['page'] = page; fetch(buildUrl(Settings.serverUrl + '/result', params))