Package coprs :: Package logic :: Module builds_logic
[hide private]
[frames] | no frames]

Source Code for Module coprs.logic.builds_logic

   1  import tempfile 
   2  import shutil 
   3  import json 
   4  import os 
   5  import pprint 
   6  import time 
   7  import requests 
   8   
   9  from sqlalchemy.sql import text 
  10  from sqlalchemy.sql.expression import not_ 
  11  from sqlalchemy.orm import joinedload, selectinload 
  12  from sqlalchemy import or_ 
  13  from sqlalchemy import and_ 
  14  from sqlalchemy import func, desc 
  15  from sqlalchemy.sql import false,true 
  16  from werkzeug.utils import secure_filename 
  17  from sqlalchemy import bindparam, Integer, String 
  18  from sqlalchemy.exc import IntegrityError 
  19   
  20  from copr_common.enums import FailTypeEnum, StatusEnum 
  21  from coprs import app 
  22  from coprs import cache 
  23  from coprs import db 
  24  from coprs import models 
  25  from coprs import helpers 
  26  from coprs.constants import DEFAULT_BUILD_TIMEOUT, MAX_BUILD_TIMEOUT 
  27  from coprs.exceptions import MalformedArgumentException, ActionInProgressException, InsufficientRightsException, \ 
  28                               UnrepeatableBuildException, RequestCannotBeExecuted, DuplicateException 
  29   
  30  from coprs.logic import coprs_logic 
  31  from coprs.logic import users_logic 
  32  from coprs.logic.actions_logic import ActionsLogic 
  33  from coprs.models import BuildChroot 
  34  from .coprs_logic import MockChrootsLogic 
  35  from coprs.logic.packages_logic import PackagesLogic 
  36   
  37  from .helpers import get_graph_parameters 
  38  log = app.logger 
39 40 41 -class BuildsLogic(object):
42 @classmethod
43 - def get(cls, build_id):
44 return models.Build.query.filter(models.Build.id == build_id)
45 46 @classmethod
47 - def get_build_tasks(cls, status, background=None):
48 """ Returns tasks with given status. If background is specified then 49 returns normal jobs (false) or background jobs (true) 50 """ 51 result = models.BuildChroot.query.join(models.Build)\ 52 .filter(models.BuildChroot.status == status)\ 53 .order_by(models.Build.id.asc()) 54 if background is not None: 55 result = result.filter(models.Build.is_background == (true() if background else false())) 56 return result
57 58 @classmethod
59 - def get_srpm_build_tasks(cls, status, background=None):
60 """ Returns srpm build tasks with given status. If background is 61 specified then returns normal jobs (false) or background jobs (true) 62 """ 63 result = models.Build.query\ 64 .filter(models.Build.source_status == status)\ 65 .order_by(models.Build.id.asc()) 66 if background is not None: 67 result = result.filter(models.Build.is_background == (true() if background else false())) 68 return result
69 70 @classmethod 71 @cache.memoize(timeout=2*60)
72 - def get_recent_task_ids(cls, user=None, limit=100, period_days=2):
73 query_args = ( 74 models.BuildChroot.build_id, 75 func.max(models.BuildChroot.ended_on).label('max_ended_on'), 76 models.Build.submitted_on, 77 ) 78 group_by_args = ( 79 models.BuildChroot.build_id, 80 models.Build.submitted_on, 81 ) 82 83 84 if user: 85 query_args += (models.Build.user_id,) 86 group_by_args += (models.Build.user_id,) 87 88 subquery = (db.session.query(*query_args) 89 .join(models.Build) 90 .group_by(*group_by_args) 91 .having(func.count() == func.count(models.BuildChroot.ended_on)) 92 .having(models.Build.submitted_on > time.time() - 3600*24*period_days) 93 ) 94 if user: 95 subquery = subquery.having(models.Build.user_id == user.id) 96 97 subquery = subquery.order_by(desc('max_ended_on')).limit(limit).subquery() 98 99 query = models.Build.query.join(subquery, subquery.c.build_id == models.Build.id) 100 return [i.id for i in query.all()]
101 102 @classmethod
103 - def get_recent_tasks(cls, *args, **kwargs):
104 task_ids = cls.get_recent_task_ids(*args, **kwargs) 105 query = models.Build.query.filter(models.Build.id.in_(task_ids)) 106 return sorted(query.all(), key=lambda o: task_ids.index(o.id))
107 108 @classmethod
109 - def get_running_tasks_by_time(cls, start, end):
110 result = models.BuildChroot.query\ 111 .filter(models.BuildChroot.ended_on > start)\ 112 .filter(models.BuildChroot.started_on < end)\ 113 .order_by(models.BuildChroot.started_on.asc()) 114 115 return result
116 117 @classmethod
118 - def get_chroot_histogram(cls, start, end):
119 chroots = [] 120 chroot_query = BuildChroot.query\ 121 .filter(models.BuildChroot.started_on < end)\ 122 .filter(models.BuildChroot.ended_on > start)\ 123 .with_entities(BuildChroot.mock_chroot_id, 124 func.count(BuildChroot.mock_chroot_id))\ 125 .group_by(BuildChroot.mock_chroot_id)\ 126 .order_by(BuildChroot.mock_chroot_id) 127 128 for chroot in chroot_query: 129 chroots.append([chroot[0], chroot[1]]) 130 131 mock_chroots = coprs_logic.MockChrootsLogic.get_multiple() 132 for mock_chroot in mock_chroots: 133 for l in chroots: 134 if l[0] == mock_chroot.id: 135 l[0] = mock_chroot.name 136 137 return chroots
138 139 @classmethod
140 - def get_pending_jobs_bucket(cls, start, end):
141 query = text(""" 142 SELECT COUNT(*) as result 143 FROM build_chroot JOIN build on build.id = build_chroot.build_id 144 WHERE 145 build.submitted_on < :end 146 AND ( 147 build_chroot.started_on > :start 148 OR (build_chroot.started_on is NULL AND build_chroot.status = :status) 149 -- for currently pending builds we need to filter on status=pending because there might be 150 -- failed builds that have started_on=NULL 151 ) 152 AND NOT build.canceled 153 """) 154 155 res = db.engine.execute(query, start=start, end=end, status=StatusEnum("pending")) 156 return res.first().result
157 158 @classmethod
159 - def get_running_jobs_bucket(cls, start, end):
160 query = text(""" 161 SELECT COUNT(*) as result 162 FROM build_chroot 163 WHERE 164 started_on < :end 165 AND (ended_on > :start OR (ended_on is NULL AND status = :status)) 166 -- for currently running builds we need to filter on status=running because there might be failed 167 -- builds that have ended_on=NULL 168 """) 169 170 res = db.engine.execute(query, start=start, end=end, status=StatusEnum("running")) 171 return res.first().result
172 173 @classmethod
174 - def get_cached_graph_data(cls, params):
175 data = { 176 "pending": [], 177 "running": [], 178 } 179 result = models.BuildsStatistics.query\ 180 .filter(models.BuildsStatistics.stat_type == params["type"])\ 181 .filter(models.BuildsStatistics.time >= params["start"])\ 182 .filter(models.BuildsStatistics.time <= params["end"])\ 183 .order_by(models.BuildsStatistics.time) 184 185 for row in result: 186 data["pending"].append(row.pending) 187 data["running"].append(row.running) 188 189 return data
190 191 @classmethod
192 - def get_task_graph_data(cls, type):
193 data = [["pending"], ["running"], ["avg running"], ["time"]] 194 params = get_graph_parameters(type) 195 cached_data = cls.get_cached_graph_data(params) 196 data[0].extend(cached_data["pending"]) 197 data[1].extend(cached_data["running"]) 198 199 for i in range(len(data[0]) - 1, params["steps"]): 200 step_start = params["start"] + i * params["step"] 201 step_end = step_start + params["step"] 202 pending = cls.get_pending_jobs_bucket(step_start, step_end) 203 running = cls.get_running_jobs_bucket(step_start, step_end) 204 data[0].append(pending) 205 data[1].append(running) 206 cls.cache_graph_data(type, time=step_start, pending=pending, running=running) 207 208 running_total = 0 209 for i in range(1, params["steps"] + 1): 210 running_total += data[1][i] 211 212 data[2].extend([running_total * 1.0 / params["steps"]] * (len(data[0]) - 1)) 213 214 for i in range(params["start"], params["end"], params["step"]): 215 data[3].append(time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(i))) 216 217 return data
218 219 @classmethod
220 - def get_small_graph_data(cls, type):
221 data = [[""]] 222 params = get_graph_parameters(type) 223 cached_data = cls.get_cached_graph_data(params) 224 data[0].extend(cached_data["running"]) 225 226 for i in range(len(data[0]) - 1, params["steps"]): 227 step_start = params["start"] + i * params["step"] 228 step_end = step_start + params["step"] 229 running = cls.get_running_jobs_bucket(step_start, step_end) 230 data[0].append(running) 231 cls.cache_graph_data(type, time=step_start, running=running) 232 233 return data
234 235 @classmethod
236 - def cache_graph_data(cls, type, time, pending=0, running=0):
237 result = models.BuildsStatistics.query\ 238 .filter(models.BuildsStatistics.stat_type == type)\ 239 .filter(models.BuildsStatistics.time == time).first() 240 if result: 241 return 242 243 try: 244 cached_data = models.BuildsStatistics( 245 time = time, 246 stat_type = type, 247 running = running, 248 pending = pending 249 ) 250 db.session.add(cached_data) 251 db.session.commit() 252 except IntegrityError: # other process already calculated the graph data and cached it 253 db.session.rollback()
254 255 @classmethod
256 - def get_build_importing_queue(cls, background=None):
257 """ 258 Returns Builds which are waiting to be uploaded to dist git 259 """ 260 query = (models.Build.query 261 .filter(models.Build.canceled == false()) 262 .filter(models.Build.source_status == StatusEnum("importing")) 263 .order_by(models.Build.id.asc())) 264 if background is not None: 265 query = query.filter(models.Build.is_background == (true() if background else false())) 266 return query
267 268 @classmethod
269 - def get_pending_srpm_build_tasks(cls, background=None):
270 query = (models.Build.query 271 .filter(models.Build.canceled == false()) 272 .filter(models.Build.source_status == StatusEnum("pending")) 273 .order_by(models.Build.is_background.asc(), models.Build.id.asc())) 274 if background is not None: 275 query = query.filter(models.Build.is_background == (true() if background else false())) 276 return query
277 278 @classmethod
279 - def get_pending_build_tasks(cls, background=None):
280 query = (models.BuildChroot.query 281 .outerjoin(models.Build) 282 .outerjoin(models.CoprDir) 283 .outerjoin(models.Package, models.Package.id == models.Build.package_id) 284 .options(joinedload('build').joinedload('copr_dir'), 285 joinedload('build').joinedload('package')) 286 .filter(models.Build.canceled == false()) 287 .filter(or_( 288 models.BuildChroot.status == StatusEnum("pending"), 289 and_( 290 models.BuildChroot.status == StatusEnum("running"), 291 models.BuildChroot.started_on < int(time.time() - 1.1 * MAX_BUILD_TIMEOUT), 292 models.BuildChroot.ended_on.is_(None) 293 ) 294 )) 295 .order_by(models.Build.is_background.asc(), models.Build.id.asc())) 296 if background is not None: 297 query = query.filter(models.Build.is_background == (true() if background else false())) 298 return query
299 300 @classmethod
301 - def get_build_task(cls, task_id):
302 try: 303 build_id, chroot_name = task_id.split("-", 1) 304 except ValueError: 305 raise MalformedArgumentException("Invalid task_id {}".format(task_id)) 306 307 build_chroot = BuildChrootsLogic.get_by_build_id_and_name(build_id, chroot_name) 308 return build_chroot.join(models.Build).first()
309 310 @classmethod
311 - def get_srpm_build_task(cls, build_id):
312 return BuildsLogic.get_by_id(build_id).first()
313 314 @classmethod
315 - def get_multiple(cls):
316 return models.Build.query.order_by(models.Build.id.desc())
317 318 @classmethod
319 - def get_multiple_by_copr(cls, copr):
320 """ Get collection of builds in copr sorted by build_id descending 321 """ 322 return cls.get_multiple().filter(models.Build.copr == copr)
323 324 @classmethod
325 - def get_multiple_by_user(cls, user):
326 """ Get collection of builds in copr sorted by build_id descending 327 form the copr belonging to `user` 328 """ 329 return cls.get_multiple().join(models.Build.copr).filter( 330 models.Copr.user == user)
331 332 @classmethod
333 - def get_copr_builds_list(cls, copr, dirname=None):
334 query = models.Build.query.filter(models.Build.copr_id==copr.id) 335 if dirname: 336 copr_dir = coprs_logic.CoprDirsLogic.get_by_copr(copr, dirname).one() 337 else: 338 copr_dir = copr.main_dir 339 query = query.filter(models.Build.copr_dir_id==copr_dir.id) 340 query = query.options(selectinload('build_chroots'), selectinload('package')) 341 return query
342 343 @classmethod
344 - def join_group(cls, query):
345 return query.join(models.Copr).outerjoin(models.Group)
346 347 @classmethod
348 - def get_multiple_by_name(cls, username, coprname):
349 query = cls.get_multiple() 350 return (query.join(models.Build.copr) 351 .options(db.contains_eager(models.Build.copr)) 352 .join(models.Copr.user) 353 .filter(models.Copr.name == coprname) 354 .filter(models.User.username == username))
355 356 @classmethod
357 - def get_by_ids(cls, ids):
358 return models.Build.query.filter(models.Build.id.in_(ids))
359 360 @classmethod
361 - def get_by_id(cls, build_id):
362 return models.Build.query.filter(models.Build.id == build_id)
363 364 @classmethod
365 - def create_new_from_other_build(cls, user, copr, source_build, 366 chroot_names=None, **build_options):
367 skip_import = False 368 git_hashes = {} 369 370 if source_build.source_type == helpers.BuildSourceEnum('upload'): 371 if source_build.repeatable: 372 skip_import = True 373 for chroot in source_build.build_chroots: 374 git_hashes[chroot.name] = chroot.git_hash 375 else: 376 raise UnrepeatableBuildException("Build sources were not fully imported into CoprDistGit.") 377 378 build = cls.create_new(user, copr, source_build.source_type, source_build.source_json, chroot_names, 379 pkgs=source_build.pkgs, git_hashes=git_hashes, skip_import=skip_import, 380 srpm_url=source_build.srpm_url, copr_dirname=source_build.copr_dir.name, **build_options) 381 build.package_id = source_build.package_id 382 build.pkg_version = source_build.pkg_version 383 build.resubmitted_from_id = source_build.id 384 385 return build
386 387 @classmethod
388 - def create_new_from_url(cls, user, copr, url, chroot_names=None, 389 copr_dirname=None, **build_options):
390 """ 391 :type user: models.User 392 :type copr: models.Copr 393 394 :type chroot_names: List[str] 395 396 :rtype: models.Build 397 """ 398 source_type = helpers.BuildSourceEnum("link") 399 source_json = json.dumps({"url": url}) 400 srpm_url = None if url.endswith('.spec') else url 401 return cls.create_new(user, copr, source_type, source_json, chroot_names, 402 pkgs=url, srpm_url=srpm_url, copr_dirname=copr_dirname, **build_options)
403 404 @classmethod
405 - def create_new_from_scm(cls, user, copr, scm_type, clone_url, 406 committish='', subdirectory='', spec='', srpm_build_method='rpkg', 407 chroot_names=None, copr_dirname=None, **build_options):
408 """ 409 :type user: models.User 410 :type copr: models.Copr 411 412 :type chroot_names: List[str] 413 414 :rtype: models.Build 415 """ 416 source_type = helpers.BuildSourceEnum("scm") 417 source_json = json.dumps({"type": scm_type, 418 "clone_url": clone_url, 419 "committish": committish, 420 "subdirectory": subdirectory, 421 "spec": spec, 422 "srpm_build_method": srpm_build_method}) 423 return cls.create_new(user, copr, source_type, source_json, chroot_names, copr_dirname=copr_dirname, **build_options)
424 425 @classmethod
426 - def create_new_from_pypi(cls, user, copr, pypi_package_name, pypi_package_version, spec_template, 427 python_versions, chroot_names=None, copr_dirname=None, **build_options):
428 """ 429 :type user: models.User 430 :type copr: models.Copr 431 :type package_name: str 432 :type version: str 433 :type python_versions: List[str] 434 435 :type chroot_names: List[str] 436 437 :rtype: models.Build 438 """ 439 source_type = helpers.BuildSourceEnum("pypi") 440 source_json = json.dumps({"pypi_package_name": pypi_package_name, 441 "pypi_package_version": pypi_package_version, 442 "spec_template": spec_template, 443 "python_versions": python_versions}) 444 return cls.create_new(user, copr, source_type, source_json, chroot_names, copr_dirname=copr_dirname, **build_options)
445 446 @classmethod
447 - def create_new_from_rubygems(cls, user, copr, gem_name, chroot_names=None, 448 copr_dirname=None, **build_options):
449 """ 450 :type user: models.User 451 :type copr: models.Copr 452 :type gem_name: str 453 :type chroot_names: List[str] 454 :rtype: models.Build 455 """ 456 source_type = helpers.BuildSourceEnum("rubygems") 457 source_json = json.dumps({"gem_name": gem_name}) 458 return cls.create_new(user, copr, source_type, source_json, chroot_names, copr_dirname=copr_dirname, **build_options)
459 460 @classmethod
461 - def create_new_from_custom(cls, user, copr, script, script_chroot=None, script_builddeps=None, 462 script_resultdir=None, chroot_names=None, copr_dirname=None, **kwargs):
463 """ 464 :type user: models.User 465 :type copr: models.Copr 466 :type script: str 467 :type script_chroot: str 468 :type script_builddeps: str 469 :type script_resultdir: str 470 :type chroot_names: List[str] 471 :rtype: models.Build 472 """ 473 source_type = helpers.BuildSourceEnum("custom") 474 source_dict = { 475 'script': script, 476 'chroot': script_chroot, 477 'builddeps': script_builddeps, 478 'resultdir': script_resultdir, 479 } 480 481 return cls.create_new(user, copr, source_type, json.dumps(source_dict), 482 chroot_names, copr_dirname=copr_dirname, **kwargs)
483 484 @classmethod
485 - def create_new_from_upload(cls, user, copr, f_uploader, orig_filename, 486 chroot_names=None, copr_dirname=None, **build_options):
487 """ 488 :type user: models.User 489 :type copr: models.Copr 490 :param f_uploader(file_path): function which stores data at the given `file_path` 491 :return: 492 """ 493 tmp = tempfile.mkdtemp(dir=app.config["STORAGE_DIR"]) 494 tmp_name = os.path.basename(tmp) 495 filename = secure_filename(orig_filename) 496 file_path = os.path.join(tmp, filename) 497 f_uploader(file_path) 498 499 # make the pkg public 500 pkg_url = "{baseurl}/tmp/{tmp_dir}/{filename}".format( 501 baseurl=app.config["PUBLIC_COPR_BASE_URL"], 502 tmp_dir=tmp_name, 503 filename=filename) 504 505 # create json describing the build source 506 source_type = helpers.BuildSourceEnum("upload") 507 source_json = json.dumps({"url": pkg_url, "pkg": filename, "tmp": tmp_name}) 508 srpm_url = None if pkg_url.endswith('.spec') else pkg_url 509 510 try: 511 build = cls.create_new(user, copr, source_type, source_json, 512 chroot_names, pkgs=pkg_url, srpm_url=srpm_url, 513 copr_dirname=copr_dirname, **build_options) 514 except Exception: 515 shutil.rmtree(tmp) # todo: maybe we should delete in some cleanup procedure? 516 raise 517 518 return build
519 520 @classmethod
521 - def create_new(cls, user, copr, source_type, source_json, chroot_names=None, pkgs="", 522 git_hashes=None, skip_import=False, background=False, batch=None, 523 srpm_url=None, copr_dirname=None, **build_options):
524 """ 525 :type user: models.User 526 :type copr: models.Copr 527 :type chroot_names: List[str] 528 :type source_type: int value from helpers.BuildSourceEnum 529 :type source_json: str in json format 530 :type pkgs: str 531 :type git_hashes: dict 532 :type skip_import: bool 533 :type background: bool 534 :type batch: models.Batch 535 :rtype: models.Build 536 """ 537 chroots = None 538 if chroot_names: 539 chroots = [] 540 for chroot in copr.active_chroots: 541 if chroot.name in chroot_names: 542 chroots.append(chroot) 543 544 build = cls.add( 545 user=user, 546 pkgs=pkgs, 547 copr=copr, 548 chroots=chroots, 549 source_type=source_type, 550 source_json=source_json, 551 enable_net=build_options.get("enable_net", copr.build_enable_net), 552 background=background, 553 git_hashes=git_hashes, 554 skip_import=skip_import, 555 batch=batch, 556 srpm_url=srpm_url, 557 copr_dirname=copr_dirname, 558 ) 559 560 if user.proven: 561 if "timeout" in build_options: 562 build.timeout = build_options["timeout"] 563 564 return build
565 566 @classmethod
567 - def add(cls, user, pkgs, copr, source_type=None, source_json=None, 568 repos=None, chroots=None, timeout=None, enable_net=True, 569 git_hashes=None, skip_import=False, background=False, batch=None, 570 srpm_url=None, copr_dirname=None):
571 572 if chroots is None: 573 chroots = [] 574 575 coprs_logic.CoprsLogic.raise_if_unfinished_blocking_action( 576 copr, "Can't build while there is an operation in progress: {action}") 577 users_logic.UsersLogic.raise_if_cant_build_in_copr( 578 user, copr, 579 "You don't have permissions to build in this copr.") 580 581 if not repos: 582 repos = copr.repos 583 584 # todo: eliminate pkgs and this check 585 if pkgs and (" " in pkgs or "\n" in pkgs or "\t" in pkgs or pkgs.strip() != pkgs): 586 raise MalformedArgumentException("Trying to create a build using src_pkg " 587 "with bad characters. Forgot to split?") 588 589 # just temporary to keep compatibility 590 if not source_type or not source_json: 591 source_type = helpers.BuildSourceEnum("link") 592 source_json = json.dumps({"url":pkgs}) 593 594 if skip_import and srpm_url: 595 chroot_status = StatusEnum("pending") 596 source_status = StatusEnum("succeeded") 597 else: 598 chroot_status = StatusEnum("waiting") 599 source_status = StatusEnum("pending") 600 601 copr_dir = None 602 if copr_dirname: 603 if not copr_dirname.startswith(copr.name+':') and copr_dirname != copr.name: 604 raise MalformedArgumentException("Copr dirname not starting with copr name.") 605 copr_dir = coprs_logic.CoprDirsLogic.get_or_create(copr, copr_dirname) 606 607 build = models.Build( 608 user=user, 609 pkgs=pkgs, 610 copr=copr, 611 repos=repos, 612 source_type=source_type, 613 source_json=source_json, 614 source_status=source_status, 615 submitted_on=int(time.time()), 616 enable_net=bool(enable_net), 617 is_background=bool(background), 618 batch=batch, 619 srpm_url=srpm_url, 620 copr_dir=copr_dir, 621 ) 622 623 if timeout: 624 build.timeout = timeout or DEFAULT_BUILD_TIMEOUT 625 626 db.session.add(build) 627 628 for chroot in chroots: 629 # Chroots were explicitly set per-build. 630 git_hash = None 631 if git_hashes: 632 git_hash = git_hashes.get(chroot.name) 633 buildchroot = models.BuildChroot( 634 build=build, 635 status=chroot_status, 636 mock_chroot=chroot, 637 git_hash=git_hash, 638 ) 639 db.session.add(buildchroot) 640 641 return build
642 643 @classmethod
644 - def rebuild_package(cls, package, source_dict_update={}, copr_dir=None, update_callback=None, 645 scm_object_type=None, scm_object_id=None, 646 scm_object_url=None, submitted_by=None):
647 648 source_dict = package.source_json_dict 649 source_dict.update(source_dict_update) 650 source_json = json.dumps(source_dict) 651 652 if not copr_dir: 653 copr_dir = package.copr.main_dir 654 655 build = models.Build( 656 user=None, 657 pkgs=None, 658 package=package, 659 copr=package.copr, 660 repos=package.copr.repos, 661 source_status=StatusEnum("pending"), 662 source_type=package.source_type, 663 source_json=source_json, 664 submitted_on=int(time.time()), 665 enable_net=package.copr.build_enable_net, 666 timeout=DEFAULT_BUILD_TIMEOUT, 667 copr_dir=copr_dir, 668 update_callback=update_callback, 669 scm_object_type=scm_object_type, 670 scm_object_id=scm_object_id, 671 scm_object_url=scm_object_url, 672 submitted_by=submitted_by, 673 ) 674 db.session.add(build) 675 676 status = StatusEnum("waiting") 677 for chroot in package.chroots: 678 buildchroot = models.BuildChroot( 679 build=build, 680 status=status, 681 mock_chroot=chroot, 682 git_hash=None 683 ) 684 db.session.add(buildchroot) 685 686 cls.process_update_callback(build) 687 return build
688 689 690 terminal_states = {StatusEnum("failed"), StatusEnum("succeeded"), StatusEnum("canceled")} 691 692 @classmethod
693 - def get_buildchroots_by_build_id_and_branch(cls, build_id, branch):
694 """ 695 Returns a list of BuildChroots identified by build_id and dist-git 696 branch name. 697 """ 698 return ( 699 models.BuildChroot.query 700 .join(models.MockChroot) 701 .filter(models.BuildChroot.build_id==build_id) 702 .filter(models.MockChroot.distgit_branch_name==branch) 703 ).all()
704 705 706 @classmethod
707 - def delete_local_source(cls, build):
708 """ 709 Deletes the locally stored data for build purposes. This is typically 710 uploaded srpm file, uploaded spec file or webhook POST content. 711 """ 712 # is it hosted on the copr frontend? 713 data = json.loads(build.source_json) 714 if 'tmp' in data: 715 tmp = data["tmp"] 716 storage_path = app.config["STORAGE_DIR"] 717 try: 718 shutil.rmtree(os.path.join(storage_path, tmp)) 719 except: 720 pass
721 722 723 @classmethod
724 - def update_state_from_dict(cls, build, upd_dict):
725 """ 726 :param build: 727 :param upd_dict: 728 example: 729 { 730 "builds":[ 731 { 732 "id": 1, 733 "copr_id": 2, 734 "started_on": 1390866440 735 }, 736 { 737 "id": 2, 738 "copr_id": 1, 739 "status": 0, 740 "chroot": "fedora-18-x86_64", 741 "result_dir": "baz", 742 "ended_on": 1390866440 743 }] 744 } 745 """ 746 log.info("Updating build {} by: {}".format(build.id, upd_dict)) 747 748 # create the package if it doesn't exist 749 pkg_name = upd_dict.get('pkg_name', None) 750 if pkg_name: 751 if not PackagesLogic.get(build.copr_dir.id, pkg_name).first(): 752 try: 753 package = PackagesLogic.add( 754 build.copr.user, build.copr_dir, 755 pkg_name, build.source_type, build.source_json) 756 db.session.add(package) 757 db.session.commit() 758 except (IntegrityError, DuplicateException) as e: 759 app.logger.exception(e) 760 db.session.rollback() 761 return 762 build.package = PackagesLogic.get(build.copr_dir.id, pkg_name).first() 763 764 for attr in ["built_packages", "srpm_url", "pkg_version"]: 765 value = upd_dict.get(attr, None) 766 if value: 767 setattr(build, attr, value) 768 769 # update source build status 770 if str(upd_dict.get("task_id")) == str(build.task_id): 771 build.result_dir = upd_dict.get("result_dir", "") 772 773 new_status = upd_dict.get("status") 774 if new_status == StatusEnum("succeeded"): 775 new_status = StatusEnum("importing") 776 chroot_status=StatusEnum("waiting") 777 if not build.build_chroots: 778 # create the BuildChroots from Package setting, if not 779 # already set explicitly for concrete build 780 for chroot in build.package.chroots: 781 buildchroot = models.BuildChroot( 782 build=build, 783 status=chroot_status, 784 mock_chroot=chroot, 785 git_hash=None, 786 ) 787 db.session.add(buildchroot) 788 else: 789 for buildchroot in build.build_chroots: 790 buildchroot.status = chroot_status 791 db.session.add(buildchroot) 792 793 build.source_status = new_status 794 if new_status == StatusEnum("failed") or \ 795 new_status == StatusEnum("skipped"): 796 for ch in build.build_chroots: 797 ch.status = new_status 798 ch.ended_on = upd_dict.get("ended_on") or time.time() 799 ch.started_on = upd_dict.get("started_on", ch.ended_on) 800 db.session.add(ch) 801 802 if new_status == StatusEnum("failed"): 803 build.fail_type = FailTypeEnum("srpm_build_error") 804 805 cls.process_update_callback(build) 806 db.session.add(build) 807 return 808 809 if "chroot" in upd_dict: 810 # update respective chroot status 811 for build_chroot in build.build_chroots: 812 if build_chroot.name == upd_dict["chroot"]: 813 build_chroot.result_dir = upd_dict.get("result_dir", "") 814 815 if "status" in upd_dict and build_chroot.status not in BuildsLogic.terminal_states: 816 build_chroot.status = upd_dict["status"] 817 818 if upd_dict.get("status") in BuildsLogic.terminal_states: 819 build_chroot.ended_on = upd_dict.get("ended_on") or time.time() 820 821 if upd_dict.get("status") == StatusEnum("starting"): 822 build_chroot.started_on = upd_dict.get("started_on") or time.time() 823 824 db.session.add(build_chroot) 825 826 # If the last package of a module was successfully built, 827 # then send an action to create module repodata on backend 828 if (build.module 829 and upd_dict.get("status") == StatusEnum("succeeded") 830 and all(b.status == StatusEnum("succeeded") for b in build.module.builds)): 831 ActionsLogic.send_build_module(build.copr, build.module) 832 833 cls.process_update_callback(build) 834 db.session.add(build)
835 836 @classmethod
837 - def process_update_callback(cls, build):
838 parsed_git_url = helpers.get_parsed_git_url(build.copr.scm_repo_url) 839 if not parsed_git_url: 840 return 841 842 if build.update_callback == 'pagure_flag_pull_request': 843 api_url = 'https://{0}/api/0/{1}/pull-request/{2}/flag'.format( 844 parsed_git_url.netloc, parsed_git_url.path, build.scm_object_id) 845 return cls.pagure_flag(build, api_url) 846 847 elif build.update_callback == 'pagure_flag_commit': 848 api_url = 'https://{0}/api/0/{1}/c/{2}/flag'.format( 849 parsed_git_url.netloc, parsed_git_url.path, build.scm_object_id) 850 return cls.pagure_flag(build, api_url)
851 852 @classmethod
853 - def pagure_flag(cls, build, api_url):
854 headers = { 855 'Authorization': 'token {}'.format(build.copr.scm_api_auth.get('api_key')) 856 } 857 858 if build.srpm_url: 859 progress = 50 860 else: 861 progress = 10 862 863 state_table = { 864 'failed': ('failure', 0), 865 'succeeded': ('success', 100), 866 'canceled': ('canceled', 0), 867 'running': ('pending', progress), 868 'pending': ('pending', progress), 869 'skipped': ('error', 0), 870 'starting': ('pending', progress), 871 'importing': ('pending', progress), 872 'forked': ('error', 0), 873 'waiting': ('pending', progress), 874 'unknown': ('error', 0), 875 } 876 877 build_url = os.path.join( 878 app.config['PUBLIC_COPR_BASE_URL'], 879 'coprs', build.copr.full_name.replace('@', 'g/'), 880 'build', str(build.id) 881 ) 882 883 data = { 884 'username': 'Copr build', 885 'comment': '#{}'.format(build.id), 886 'url': build_url, 887 'status': state_table[build.state][0], 888 'percent': state_table[build.state][1], 889 'uid': str(build.id), 890 } 891 892 log.debug('Sending data to Pagure API: %s', pprint.pformat(data)) 893 response = requests.post(api_url, data=data, headers=headers) 894 log.debug('Pagure API response: %s', response.text)
895 896 @classmethod
897 - def cancel_build(cls, user, build):
898 if not user.can_build_in(build.copr): 899 raise InsufficientRightsException( 900 "You are not allowed to cancel this build.") 901 if not build.cancelable: 902 if build.status == StatusEnum("starting"): 903 # this is not intuitive, that's why we provide more specific message 904 err_msg = "Cannot cancel build {} in state 'starting'".format(build.id) 905 else: 906 err_msg = "Cannot cancel build {}".format(build.id) 907 raise RequestCannotBeExecuted(err_msg) 908 909 if build.status == StatusEnum("running"): # otherwise the build is just in frontend 910 ActionsLogic.send_cancel_build(build) 911 912 build.canceled = True 913 cls.process_update_callback(build) 914 915 for chroot in build.build_chroots: 916 chroot.status = 2 # canceled 917 if chroot.ended_on is not None: 918 chroot.ended_on = time.time()
919 920 @classmethod
921 - def check_build_to_delete(cls, user, build):
922 """ 923 :type user: models.User 924 :type build: models.Build 925 """ 926 if not user.can_edit(build.copr) or build.persistent: 927 raise InsufficientRightsException( 928 "You are not allowed to delete build `{}`.".format(build.id)) 929 930 if not build.finished: 931 raise ActionInProgressException( 932 "You can not delete build `{}` which is not finished.".format(build.id), 933 "Unfinished build")
934 935 @classmethod
936 - def delete_build(cls, user, build, send_delete_action=True):
937 """ 938 :type user: models.User 939 :type build: models.Build 940 """ 941 cls.check_build_to_delete(user, build) 942 943 if send_delete_action: 944 ActionsLogic.send_delete_build(build) 945 946 db.session.delete(build)
947 948 @classmethod
949 - def delete_multiple_builds(cls, user, builds):
950 """ 951 :type user: models.User 952 :type builds: list of models.Build 953 """ 954 to_delete = [] 955 for build in builds: 956 cls.check_build_to_delete(user, build) 957 to_delete.append(build) 958 959 if to_delete: 960 ActionsLogic.send_delete_multiple_builds(to_delete) 961 962 for build in to_delete: 963 for build_chroot in build.build_chroots: 964 db.session.delete(build_chroot) 965 966 db.session.delete(build)
967 968 @classmethod
969 - def mark_as_failed(cls, build_id):
970 """ 971 Marks build as failed on all its non-finished chroots 972 """ 973 build = cls.get(build_id).one() 974 chroots = filter(lambda x: x.status != StatusEnum("succeeded"), build.build_chroots) 975 for chroot in chroots: 976 chroot.status = StatusEnum("failed") 977 if build.source_status != StatusEnum("succeeded"): 978 build.source_status = StatusEnum("failed") 979 cls.process_update_callback(build) 980 return build
981 982 @classmethod
983 - def last_modified(cls, copr):
984 """ Get build datetime (as epoch) of last successful build 985 986 :arg copr: object of copr 987 """ 988 builds = cls.get_multiple_by_copr(copr) 989 990 last_build = ( 991 builds.join(models.BuildChroot) 992 .filter((models.BuildChroot.status == StatusEnum("succeeded")) 993 | (models.BuildChroot.status == StatusEnum("skipped"))) 994 .filter(models.BuildChroot.ended_on.isnot(None)) 995 .order_by(models.BuildChroot.ended_on.desc()) 996 ).first() 997 if last_build: 998 return last_build.ended_on 999 else: 1000 return None
1001 1002 @classmethod
1003 - def filter_is_finished(cls, query, is_finished):
1004 # todo: check that ended_on is set correctly for all cases 1005 # e.g.: failed dist-git import, cancellation 1006 if is_finished: 1007 return query.join(models.BuildChroot).filter(models.BuildChroot.ended_on.isnot(None)) 1008 else: 1009 return query.join(models.BuildChroot).filter(models.BuildChroot.ended_on.is_(None))
1010 1011 @classmethod
1012 - def filter_by_group_name(cls, query, group_name):
1013 return query.filter(models.Group.name == group_name)
1014 1015 @classmethod
1016 - def filter_by_package_name(cls, query, package_name):
1017 return query.join(models.Package).filter(models.Package.name == package_name)
1018 1019 @classmethod
1020 - def clean_old_builds(cls):
1021 dirs = ( 1022 db.session.query( 1023 models.CoprDir.id, 1024 models.Package.id, 1025 models.Package.max_builds) 1026 .join(models.Build, models.Build.copr_dir_id==models.CoprDir.id) 1027 .join(models.Package) 1028 .filter(models.Package.max_builds > 0) 1029 .group_by( 1030 models.CoprDir.id, 1031 models.Package.max_builds, 1032 models.Package.id) 1033 .having(func.count(models.Build.id) > models.Package.max_builds) 1034 ) 1035 1036 for dir_id, package_id, limit in dirs.all(): 1037 delete_builds = ( 1038 models.Build.query.filter( 1039 models.Build.copr_dir_id==dir_id, 1040 models.Build.package_id==package_id) 1041 .order_by(desc(models.Build.id)) 1042 .offset(limit) 1043 .all() 1044 ) 1045 1046 for build in delete_builds: 1047 try: 1048 cls.delete_build(build.copr.user, build) 1049 except ActionInProgressException: 1050 # postpone this one to next day run 1051 log.error("Build(id={}) delete failed, unfinished action.".format(build.id))
1052 1053 @classmethod
1054 - def delete_orphaned_builds(cls):
1055 builds_to_delete = models.Build.query\ 1056 .join(models.Copr, models.Build.copr_id == models.Copr.id)\ 1057 .filter(models.Copr.deleted == True) 1058 1059 counter = 0 1060 for build in builds_to_delete: 1061 cls.delete_build(build.copr.user, build) 1062 counter += 1 1063 if counter >= 100: 1064 db.session.commit() 1065 counter = 0 1066 1067 if counter > 0: 1068 db.session.commit()
1069
1070 1071 -class BuildChrootsLogic(object):
1072 @classmethod
1073 - def get_by_build_id_and_name(cls, build_id, name):
1074 mc = MockChrootsLogic.get_from_name(name).one() 1075 1076 return ( 1077 BuildChroot.query 1078 .filter(BuildChroot.build_id == build_id) 1079 .filter(BuildChroot.mock_chroot_id == mc.id) 1080 )
1081 1082 @classmethod
1083 - def get_multiply(cls):
1084 query = ( 1085 models.BuildChroot.query 1086 .join(models.BuildChroot.build) 1087 .join(models.BuildChroot.mock_chroot) 1088 .join(models.Build.copr) 1089 .join(models.Copr.user) 1090 .outerjoin(models.Group) 1091 ) 1092 return query
1093 1094 @classmethod
1095 - def filter_by_build_id(cls, query, build_id):
1096 return query.filter(models.Build.id == build_id)
1097 1098 @classmethod
1099 - def filter_by_project_id(cls, query, project_id):
1100 return query.filter(models.Copr.id == project_id)
1101 1102 @classmethod
1103 - def filter_by_project_user_name(cls, query, username):
1104 return query.filter(models.User.username == username)
1105 1106 @classmethod
1107 - def filter_by_state(cls, query, state):
1108 return query.filter(models.BuildChroot.status == StatusEnum(state))
1109 1110 @classmethod
1111 - def filter_by_group_name(cls, query, group_name):
1112 return query.filter(models.Group.name == group_name)
1113
1114 1115 -class BuildsMonitorLogic(object):
1116 @classmethod
1117 - def get_monitor_data(cls, copr):
1118 query = """ 1119 SELECT 1120 package.id as package_id, 1121 package.name AS package_name, 1122 build.id AS build_id, 1123 build_chroot.status AS build_chroot_status, 1124 build.pkg_version AS build_pkg_version, 1125 mock_chroot.id AS mock_chroot_id, 1126 mock_chroot.os_release AS mock_chroot_os_release, 1127 mock_chroot.os_version AS mock_chroot_os_version, 1128 mock_chroot.arch AS mock_chroot_arch 1129 FROM package 1130 JOIN (SELECT 1131 MAX(build.id) AS max_build_id_for_chroot, 1132 build.package_id AS package_id, 1133 build_chroot.mock_chroot_id AS mock_chroot_id 1134 FROM build 1135 JOIN build_chroot 1136 ON build.id = build_chroot.build_id 1137 WHERE build.copr_id = {copr_id} 1138 AND build_chroot.status != 2 1139 GROUP BY build.package_id, 1140 build_chroot.mock_chroot_id) AS max_build_ids_for_a_chroot 1141 ON package.id = max_build_ids_for_a_chroot.package_id 1142 JOIN build 1143 ON build.id = max_build_ids_for_a_chroot.max_build_id_for_chroot 1144 JOIN build_chroot 1145 ON build_chroot.mock_chroot_id = max_build_ids_for_a_chroot.mock_chroot_id 1146 AND build_chroot.build_id = max_build_ids_for_a_chroot.max_build_id_for_chroot 1147 JOIN mock_chroot 1148 ON mock_chroot.id = max_build_ids_for_a_chroot.mock_chroot_id 1149 JOIN copr_dir ON build.copr_dir_id=copr_dir.id WHERE copr_dir.main IS TRUE 1150 ORDER BY package.name ASC, package.id ASC, mock_chroot.os_release ASC, mock_chroot.os_version ASC, mock_chroot.arch ASC 1151 """.format(copr_id=copr.id) 1152 rows = db.session.execute(query) 1153 return rows
1154