+ echo '====== automated-ec2-builds [/tmp/automated-ec2-builds] ======' ====== automated-ec2-builds [/tmp/automated-ec2-builds] ====== + cd /tmp/automated-ec2-builds + bzr info Standalone tree (format: 2a) Location: branch root: . Related branches: parent branch: http://bazaar.launchpad.net/~ubuntu-on-ec2/vmbuilder/automated-ec2-builds/ + bzr version-info revision-id: ben.howard@canonical.com-20130429161223-4mogv3ogkzd6zwrd date: 2013-04-29 10:12:23 -0600 build-date: 2013-05-02 07:04:38 +0000 revno: 535 branch-nick: automated-ec2-builds + bzr log -p -r-1 ------------------------------------------------------------ revno: 535 committer: Ben Howard branch nick: automated-ec2-builds timestamp: Mon 2013-04-29 10:12:23 -0600 message: Adding the saucy loaders diff: === added directory 'loaders/saucy-armhf' === added file 'loaders/saucy-armhf/MLO' Binary files loaders/saucy-armhf/MLO 1970-01-01 00:00:00 +0000 and loaders/saucy-armhf/MLO 2013-04-29 16:12:23 +0000 differ === added file 'loaders/saucy-armhf/u-boot.bin' Binary files loaders/saucy-armhf/u-boot.bin 1970-01-01 00:00:00 +0000 and loaders/saucy-armhf/u-boot.bin 2013-04-29 16:12:23 +0000 differ + echo '' + echo '====== ec2-publishing-scripts [/tmp/ec2-publishing-scripts] ======' ====== ec2-publishing-scripts [/tmp/ec2-publishing-scripts] ====== + cd /tmp/ec2-publishing-scripts + bzr info Standalone tree (format: unnamed) Location: branch root: . Related branches: parent branch: http://bazaar.launchpad.net/~ubuntu-on-ec2/ubuntu-on-ec2/ec2-publishing-scripts/ + bzr version-info revision-id: ben.howard@canonical.com-20130425135310-ui12syiw61yc93bu date: 2013-04-25 07:53:10 -0600 build-date: 2013-05-02 07:04:38 +0000 revno: 501 branch-nick: ec2-publishing-scripts + bzr log -p -r-1 ------------------------------------------------------------ revno: 501 committer: Ben Howard branch nick: ec2-publishing-scripts timestamp: Thu 2013-04-25 07:53:10 -0600 message: Removed Query2 completely. diff: === modified file 'publicize-build' --- publicize-build 2012-12-04 13:31:36 +0000 +++ publicize-build 2013-04-25 13:53:10 +0000 @@ -193,15 +193,6 @@ error "updated ${url_outfile}" fi - if [ "${label}" = "daily" -o "${label}" = "release" ]; then - record-api "${allow_existing}" \ - --reg "${curfile}" \ - --files "${build_d}" \ - --build "${url_curfile}" \ - --outdir "${PUBLISH_BASE}/beta/api" \ - --db "/srv/builder/query2/query2.db" - fi - } short_opts="d:h" === removed file 'queries.py' --- queries.py 2012-03-26 17:13:57 +0000 +++ queries.py 1970-01-01 00:00:00 +0000 @@ -1,564 +0,0 @@ -#!/usr/bin/python -# vi: ts=4 noexpandtab - -## Copyright (C) 2011 Ben Howard -## Date: 25 October 2011 -## -## This comes with ABSOLUTELY NO WARRANTY; for details see COPYING. -## This is free software, and you are welcome to redistribute it -## under certain conditions; see copying for details. - -import os.path -import sqlite3 -import argparse - - -""" - This file contains the query definitions for interacting with the database. - - queries to support - - distro queries - - get supported distros - - published queries - - get active builds published to a cloud - - get active builds published to a cloud region - - get active builds published to a cloud for %(distro)s - - get active builds published to a cloud for region for %(distro)s - - get active builds published to a cloud for %(arch_name)s - - get active builds published to a cloud region for %(arch) - - get active builds published to a cloud for %(arch_name)s and %(distro)s - - get active builds published to a cloud for %(region)s and %(arch_name)s and %(distro)s - - files queries - - get files for %(build_id)s - - adding things: - - Add new build - - Add new published image - - Add new distro - -""" - -and_daily = ' AND build_serials.release_tag == "daily" ' -and_release = ' AND build_serials.release_tag != "daily" ' -and_clouds = ' AND clouds.name = "%(cloud)s"' -and_regions = ' AND regions.name = "%(region_name)s"' -and_stream = ' AND streams.name = "%(stream)s" ' -and_codename = ' AND distros.code_name = "%(distro_name)s" ' -and_version = ' AND distros.version = "%(distro_version)s"' -and_arches = ' AND arches.name = "%(arch_name)s"' - - -# Add new cloud -insert_cloud=''' - INSERT INTO clouds - ( name, description, url, active ) - VALUES ( "%(cloud)s", "%(description)s", "%(url)s", 1 ) -''' - -# Add new region -insert_region=''' - INSERT INTO regions - ( cloud_id, name, description, url, active ) - VALUES ( - ( SELECT id FROM clouds WHERE clouds.name = "%(cloud)s" ), - "%(region)s", "%(description)s", "%(url)s", 1 ) -''' - -# Add new release types -insert_release_type=''' - INSERT INTO release_types - ( tag, description, supported ) - VALUES ( "%(release_tag)s", "%(description)s", "%(supported)s" ) -''' - -# Add new distro -insert_distro=''' - INSERT INTO distros - ( version, code_name, name, type ) - VALUES ( "%(distro_version)s", "%(code_name)s", "%(distro_full_name)s", "%(distro_type)s" ) -''' - -# Add new distro_dates -insert_distro_dates=''' - INSERT INTO distro_dates - ( distro_id, stream_id, release, end ) - VALUES ( - ( SELECT id FROM distros WHERE distros.version = "%(distro_version)s" ), - ( SELECT id FROM streams WHERE streams.name = "%(stream_name)s" ), - "%(release_date)s", "%(end_date)s" - ) -''' - -# Update distro_dates -update_distro_end_date=''' - UPDATE distro_dates - SET end="%(end_date)s" - WHERE distro_id = ( SELECT id FROM distros WHERE distros.version = "%(distro_version)s" ) - AND stream_id = ( SELECT id FROM streams WHERE streams.name = "%(stream)s") -''' - -# Add new build -insert_new_build=''' - INSERT INTO builds - ( id, date, name_base, arch_id, serial_id ) - VALUES ( hex(randomblob(16)), "%(date)s", "%(name_base)s", - ( SELECT id FROM arches WHERE arches.name = "%(arch)s" ), - ( SELECT build_serials.id - FROM build_serials - JOIN streams ON build_serials.stream_id = streams.id - JOIN distros ON build_serials.distro_id = distros.id - WHERE streams.name = "%(stream_name)s" - AND distros.version = "%(distro_version)s" - AND build_serials.serial = "%(serial)s" - AND build_serials.release_tag = "%(release_tag)s" ) - )''' - -insert_new_build_existing=insert_new_build.replace('INSERT INTO', 'INSERT OR REPLACE INTO') - -# Add files for a build -insert_new_files=''' - INSERT INTO files - ( id, sha1, build_id, file_type_id, sha512, url ) - VALUES ( hex(randomblob(16)), "%(sha1)s", - ( SELECT builds.id - FROM builds - JOIN build_serials ON builds.serial_id = build_serials.id - JOIN arches ON builds.arch_id = arches.id - JOIN streams ON build_serials.stream_id = streams.id - JOIN distros ON build_serials.distro_id = distros.id - WHERE arches.name = "%(arch_name)s" - AND streams.name = "%(stream)s" - AND distros.version = "%(distro_version)s" - AND build_serials.serial = "%(serial)s" - AND build_serials.release_tag = "%(release_tag)s" ), - ( SELECT id FROM file_types WHERE name = "%(file_type)s" ), - "%(sha512)s", "%(url)s" ) -''' - -insert_new_files_existing=insert_new_files.replace('INSERT INTO', 'INSERT OR REPLACE INTO') - -# Add published build -insert_new_published=''' - INSERT INTO published - ( cloud_id, instance_type_id, region_name, - build_id, image_id, url, publish_date, is_public, - kernel_id, ramdisk_id, registered_name ) - SELECT clouds.id, instance_types.id, "%(region)s", - ( SELECT builds.id - FROM builds - JOIN build_serials ON build_serials.id = builds.serial_id - JOIN distros ON build_serials.distro_id = distros.id - JOIN streams ON build_serials.stream_id = streams.id - JOIN arches ON builds.arch_id = arches.id - WHERE arches.name = "%(arch_name)s" - AND streams.name = "%(stream)s" - AND distros.version = "%(distro_version)s" - AND build_serials.release_tag = "%(release_tag)s" - AND build_serials.serial = "%(serial)s" ), - "%(reg_id)s", "%(url)s", "%(date)s", "%(public)s", - "%(kernel_id)s", "%(ramdisk_id)s", "%(register_name)s" - - FROM region_instance_types - JOIN instance_types ON region_instance_types.instance_type_id = instance_types.id - JOIN arches ON instance_types.arch_id = arches.id - JOIN clouds ON region_instance_types.cloud_id = clouds.id - WHERE arches.name = "%(arch_name)s" - AND instance_types.name = "%(instance_type)s" - AND clouds.name = "%(cloud)s" - AND region_instance_types.region_name = "%(region)s" -''' -insert_new_published_existing=insert_new_published.replace('INSERT INTO', 'INSERT OR REPLACE INTO') - -# Update the is_public field -update_publish_public='UPDATE published SET is_public="True", unpublish_date="" WHERE published.build_id="%(build_id)s"' - -# Update the is_public and date for non-public -update_remove_public='UPDATE published SET is_public="True", unpublish_date=%(date)s WHERE published.build_id="%(build_id)s"' - -# Setup the query for published images -query_published_base_all_clouds=''' - SELECT DISTINCT - build_serials.release_tag AS release_tag, - published.image_id AS published_id, - published.kernel_id AS kernel_id, - published.ramdisk_id AS ramdisk_id, - published.registered_name AS registered_name, - published.url AS published_url, - publish_date AS published_date, - published.is_public AS is_public, - regions.name AS region_name, - clouds.name AS cloud_name, - builds.date AS build_date, - builds.id AS build_id, - build_serials.serial AS build_serial, - build_serials.id AS build_serial_id, - streams.name AS build_type, - build_serials.current_build AS build_current, - distros.name AS distro_name, - distros.code_name AS distro_code_name, - distros.version AS distro_version, - arches.name AS arch, - virt_type.name AS virt_type, - instance_types.name AS instance_type - FROM published - JOIN builds ON published.build_id = builds.id - JOIN build_serials ON builds.serial_id = build_serials.id - JOIN arches ON builds.arch_id = arches.id - JOIN distros ON build_serials.distro_id = distros.id - JOIN streams ON build_serials.stream_id = streams.id - JOIN regions ON published.region_name = regions.name - JOIN clouds ON published.cloud_id = clouds.id - JOIN instance_types ON published.instance_type_id = instance_types.id - JOIN virt_type ON instance_types.virt_type_id = virt_type.id -''' - -# Setup the query for published images -query_published_base= query_published_base_all_clouds + '\n WHERE build_serials.current_build = "True" and ' -query_published_base_cloud= query_published_base_all_clouds + '\n WHERE build_serials.current_build = "True" and clouds.name = "%(cloud)s" ' -query_get_builds_cloud_stream = query_published_base_cloud + and_stream - -#Get builds published to a cloud -query_active_builds = query_published_base + 'published.is_public = "True"' -query_private_builds = query_published_base + 'published.is_public = "False"' - -query_active_builds_stream = query_active_builds + and_stream -query_private_builds_stream = query_private_builds + and_stream - -query_active_builds_arch = query_active_builds + and_arches -query_active_builds_distro = query_active_builds + and_codename -query_active_builds_distro_version = query_active_builds + and_version -query_active_builds_arch_distro = query_active_builds_arch + and_arches + and_version - -#Get builds published to a cloud -query_active_builds_for_cloud = query_active_builds + and_clouds -query_active_builds_for_cloud_daily = query_active_builds_for_cloud + and_daily -query_active_builds_for_cloud_release = query_active_builds_for_cloud + and_release - -query_active_builds_for_cloud_arch = query_active_builds_arch + and_clouds - -query_active_builds_for_cloud_stream = query_active_builds_distro + and_stream -query_active_builds_for_cloud_stream_daily = query_active_builds_for_cloud_stream + and_daily -query_active_builds_for_cloud_stream_release = query_active_builds_for_cloud_stream + and_release - -query_active_builds_for_cloud_distro = query_active_builds_distro + and_clouds -query_active_builds_for_cloud_distro_daily = query_active_builds_for_cloud_distro + and_daily -query_active_builds_for_cloud_distro_release = query_active_builds_for_cloud_distro + and_release - -query_active_builds_for_cloud_distro_stream = query_active_builds_for_cloud_distro + and_stream -query_active_builds_for_cloud_distro_stream_daily = query_active_builds_for_cloud_distro_stream + and_daily -query_active_builds_for_cloud_distro_stream_release = query_active_builds_for_cloud_distro + and_release - -query_active_builds_for_cloud_distro_version = query_active_builds_distro_version + and_clouds -query_active_builds_for_cloud_distro_version_release = query_active_builds_for_cloud_distro_version + and_release -query_active_builds_for_cloud_distro_version_daily = query_active_builds_for_cloud_distro_version + and_daily - -query_active_builds_for_cloud_distro_version_stream = query_active_builds_for_cloud_distro_version + and_stream -query_active_builds_for_cloud_distro_version_stream_daily = query_active_builds_for_cloud_distro_version_stream + and_daily -query_active_builds_for_cloud_distro_version_stream_release = query_active_builds_for_cloud_distro_version_stream + and_release - -query_active_builds_for_cloud_arch_distro = query_active_builds_arch_distro + and_clouds -query_active_builds_for_cloud_arch_distro_daily = query_active_builds_for_cloud_arch_distro + and_daily -query_active_builds_for_cloud_arch_distro_release = query_active_builds_for_cloud_arch_distro + and_release - -# Regional Queries -query_active_builds_for_region = query_active_builds_for_cloud + and_regions -query_active_builds_for_region_distro = query_active_builds_for_cloud_distro + and_regions -query_active_builds_for_region_arch_distro = query_active_builds_for_cloud_arch_distro + and_regions -query_active_builds_for_region_arch = query_active_builds_for_cloud_arch + and_regions - - -# Base build query -query_files_base=''' -SELECT DISTINCT - files.sha1 AS file_sha1, - files.sha512 AS file_sha512, - file_types.name AS file_type, - file_types.description AS file_type_description, - files.url AS file_url, - builds.name_base AS path_base -FROM files -JOIN file_types ON files.file_type_id = file_types.id -JOIN builds ON files.build_id = builds.id -JOIN build_serials ON build_serials.distro_id = build_serials.distro_id - AND build_serials.stream_id = build_serials.stream_id - AND builds.serial_id = build_serials.serial - WHERE ''' - -# Get files for a specific build ID -query_files_for_build_id= query_files_base + " files.build_id = %(build_id)s" - -# Get files for a specific build date -query_files_for_build_date= query_files_base + " builds.date = %(date)s" - -query_get_distro_name=''' - SELECT code_name AS name FROM distros WHERE version = "%(distro_version)s" -''' - -# Get supported builds -query_get_supported_distros=''' - SELECT distros.version AS version, - distros.code_name AS code_name, - distros.name AS name, - streams.name AS stream, - distros.type AS type, - distro_dates.release AS release_date, - distro_dates.end AS end_of_life - FROM distros - JOIN distro_dates ON distros.id = distro_dates.distro_id - JOIN streams ON distro_dates.stream_id = streams.id - WHERE streams.name = "%(stream_name)s" - AND distro_dates.end > "%(today)s" -''' - -query_get_cloud_regions_only=''' - SELECT regions.name AS name, - regions.url AS url, - regions.description AS description - FROM regions - JOIN clouds ON regions.cloud_id = clouds.id - WHERE clouds.name = "%(cloud)s" -''' - -query_get_all_regions=''' - SELECT clouds.name AS cloud_provider, - clouds.description AS cloud_description, - regions.name AS region_name, - regions.description AS region_description, - regions.url AS region_description - FROM regions - JOIN clouds ON regions.cloud_id = clouds.id -''' - -query_get_cloud_regions= query_get_all_regions + ' WHERE clouds.name = "%(cloud)s" ' - -query_get_clouds=''' - SELECT clouds.name AS name, - clouds.description AS description, - clouds.url AS url - FROM clouds - ''' - -query_get_single_cloud= query_get_clouds + ' WHERE clouds.name = "%(cloud)s" ' - - -query_get_cloud_region_instance_types = ''' - SELECT instance_types.name AS instance_type, - arches.name AS arch, - FROM instance_types - JOIN region_instance_types ON instance_types.id = region_instance_types.instance_type_id - JOIN arches ON arches.id = instance_types.arch_id - JOIN regions ON region_instance_types.cloud_id = regions.cloud_id - AND region_instance_types.region_name = regions.name - JOIN clouds ON regions.cloud_id = clouds.id - WHERE clouds.name = "%(clouds_name)s" - AND regions.name = "%(region_name)s" -''' - -query_get_release_tags=''' - SELECT tag AS tag, - description AS description - FROM release_types -''' - -query_get_files_builds_base=''' - SELECT - files.id AS file_id, - files.sha1 AS file_sha1, - files.sha512 AS file_sha512, - file_types.name AS file_type, - file_types.description AS file_description, - files.url AS file_url, - builds.name_base AS path_base, - files.sha512 AS file_sha512, - file_types.name AS file_type, - file_types.description AS file_type_description, - files.url AS file_url, - build_serials.serial AS build_serial, - build_serials.id AS build_serial_id, - build_serials.release_tag AS release_tag, - builds.id AS build_id, - streams.name AS build_type, - arches.name AS arch, - distros.code_name AS distro_code_name, - distros.version AS distro_version - FROM files - JOIN file_types ON files.file_type_id = file_types.id - JOIN builds ON files.build_id = builds.id - JOIN build_serials ON builds.serial_id = build_serials.id - JOIN distros ON distros.id = build_serials.distro_id - JOIN streams ON streams.id = build_serials.stream_id - JOIN arches ON builds.arch_id = arches.id - WHERE build_serials.current_build = "True" ''' - -query_get_files_build_distro = query_get_files_builds_base + and_codename -query_get_files_build_distro_daily = query_get_files_build_distro + and_daily -query_get_files_build_distro_release = query_get_files_build_distro + and_release - - -query_get_files_build_stream = query_get_files_builds_base + and_stream -query_get_files_build_stream_daily = query_get_files_build_stream + and_daily -query_get_files_build_stream_release = query_get_files_build_stream + and_release - -query_get_files_build_stream_distro = query_get_files_build_stream + and_codename -query_get_files_build_stream_distro_daily = query_get_files_build_stream_distro + and_daily -query_get_files_build_stream_distro_release = query_get_files_build_stream_distro + and_release - -query_get_files_all_stream_distro_stream_daily = query_get_files_build_stream_distro + and_daily -query_get_files_all_stream_daily = query_get_files_build_stream + and_daily -query_get_files_all_distro_daily = query_get_files_build_distro + and_daily -query_get_files_all_daily = query_get_files_builds_base + and_daily -query_get_files_all_stream_distro_stream_release = query_get_files_build_stream_distro + and_release -query_get_files_all_stream_release = query_get_files_build_stream + and_release -query_get_files_all_distro_release = query_get_files_build_distro + and_release -query_get_files_all_release = query_get_files_builds_base + and_release - - -query_get_files_all_active_cloud=''' - SELECT DISTINCT - files.id AS file_id, - files.sha1 AS file_sha1, - files.sha512 AS file_sha512, - file_types.name AS file_type, - file_types.description AS file_description, - files.url AS file_url, - builds.name_base AS path_base, - files.sha512 AS file_sha512, - file_types.name AS file_type, - file_types.description AS file_type_description, - files.url AS file_url, - build_serials.serial AS build_serial, - build_serials.id AS build_serial_id, - build_serials.release_tag AS release_tag, - builds.id AS build_id, - streams.name AS build_type, - arches.name AS arch, - distros.code_name AS distro_code_name, - distros.version AS distro_version - - FROM files - JOIN file_types ON files.file_type_id = file_types.id - JOIN builds ON files.build_id = builds.id - JOIN build_serials ON builds.serial_id = build_serials.id - JOIN distros ON distros.id = build_serials.distro_id - JOIN streams ON streams.id = build_serials.stream_id - JOIN arches ON builds.arch_id = arches.id - JOIN published ON builds.id = published.build_id - JOIN regions ON regions.name = published.region_name - JOIN clouds ON clouds.id = regions.cloud_id - WHERE clouds.name = "%(cloud)s" - AND published.is_public = "True" - AND build_serials.current_build = "True" -''' - - -query_get_files_all_active_cloud_daily = query_get_files_all_active_cloud + and_daily -query_get_files_all_active_cloud_release = query_get_files_all_active_cloud + and_release - -query_get_files_all_active_cloud_distro = query_get_files_all_active_cloud + and_codename -query_get_files_all_active_cloud_distro_daily = query_get_files_all_active_cloud_distro + and_daily -query_get_files_all_active_cloud_distro_release = query_get_files_all_active_cloud_distro + and_release - -query_get_files_all_active_cloud_distro_version = query_get_files_all_active_cloud + and_version -query_get_files_all_active_cloud_distro_version_daily = query_get_files_all_active_cloud_distro_version + and_daily -query_get_files_all_active_cloud_distro_version_release = query_get_files_all_active_cloud_distro_version + and_release - -query_get_files_all_active_cloud_distro_stream = query_get_files_all_active_cloud_distro + and_stream -query_get_files_all_active_cloud_distro_stream_daily = query_get_files_all_active_cloud_distro_stream + and_daily -query_get_files_all_active_cloud_distro_stream_release = query_get_files_all_active_cloud_distro_stream + and_release - -query_get_files_all_active_cloud_distro_version_stream = query_get_files_all_active_cloud_distro_version + and_stream -query_get_files_all_active_cloud_distro_version_stream_daily = query_get_files_all_active_cloud_distro_version_stream + and_daily -query_get_files_all_active_cloud_distro_version_stream_release = query_get_files_all_active_cloud_distro_version_stream + and_release - -query_get_files_all_active_cloud_stream = query_get_files_all_active_cloud + and_stream -query_get_files_all_active_cloud_stream_daily = query_get_files_all_active_cloud_stream + and_daily -query_get_files_all_active_cloud_stream_release = query_get_files_all_active_cloud_stream + and_release - - -query_expire_current=''' - UPDATE published SET current_build="False" - WHERE rowid IN ( SELECT published.rowid - FROM published - JOIN builds ON published.build_id = builds.id - JOIN build_serials ON builds.serial_id = build_serial_id - JOIN distros ON build_serials.distro_id = distros.id - JOIN clouds ON published.cloud_id = clouds.id - JOIN streams ON build_serials.stream_id = streams.id - WHERE distros.version = "%(distro_version)s" - AND clouds.name = "%(cloud)s" - AND streams.name = "%(stream)s" - AND build_serial.release_tag = "%(release_tag)s")''' - -query_expire_current_releases= query_expire_current + ' != "daily" )' -query_expire_current_daily= query_expire_current + ' = "daily" )' - -query_expire_dailies_keep_three=''' - UPDATE build_serials - SET current_build = "False" - WHERE current_build = "True" - AND distro_id = ( SELECT id FROM distros WHERE distros.version = "%(distro_version)s" ) - AND stream_id = ( SELECT id FROM streams WHERE streams.name = "%(stream)s" ) - AND release_tag = "daily" - AND rowid NOT IN ( SELECT rowid - FROM build_serials - WHERE distro_id = ( SELECT id FROM distros WHERE distros.version = "%(distro_version)s" ) - AND stream_id = ( SELECT id FROM streams WHERE streams.name = "%(stream)s" ) - AND release_tag = "daily" - ORDER BY rowid DESC - LIMIT 3 ) -''' - -query_update_current=''' - UPDATE build_serials - SET current_build = "True" - WHERE distro_id = ( SELECT id FROM distros WHERE distros.version = "%(distro_version)s" ) - AND stream_id = ( SELECT id FROM streams WHERE streams.name = "%(stream)s" ) - AND release_tag = "%(release_tag)s" - AND serial = "%(serial)s" -''' - -query_expire_serial_milestones=''' - UPDATE build_serials - SET current_build = "False" - WHERE rowid IN ( - SELECT build_serials.rowid - FROM build_serials - JOIN distros ON build_serials.distro_id = distros.id - WHERE distros.version = "%(distro_version)s" - AND build_serials.serial != "%(build_id)s" - AND build_serials.release_tag IN ( SELECT tag - FROM release_types - WHERE tag != "daily" AND tag != "%(release_tag)s" )) - OR rowid IN ( - SELECT build_serials.rowid - FROM build_serials - JOIN distros ON build_serials.distro_id = distros.id - WHERE distros.version = "%(distro_version)s" - AND build_serials.serial = "%(build_id)s" - AND build_serials.release_tag IN ( SELECT tag - FROM release_types - WHERE tag != "daily" AND tag != "%(release_tag)s" )) -''' - -query_expire_serial=''' - UPDATE build_serials - SET current_build = "False" - WHERE current_build = "True" - AND distro_id = ( SELECT id FROM distros WHERE distros.version = "%(distro_version)s" ) - AND stream_id = ( SELECT id FROM streams WHERE streams.name = "%(stream)s" ) - AND release_tag = "%(release_tag)" - AND serial != "%(serial)s" -''' - -query_insert_serial=''' - INSERT INTO build_serials - ( id, serial, release_tag, distro_id, stream_id ) - VALUES ( hex(randomblob(16)), "%(serial)s", "%(release_tag)s", - ( SELECT id FROM distros WHERE distros.version = "%(distro_version)s" ), - ( SELECT id FROM streams WHERE streams.name = "%(stream)s" ) - ) -''' -query_insert_serial_existing=query_insert_serial.replace('INSERT INTO', 'INSERT OR REPLACE INTO') === removed file 'query-exec' --- query-exec 2012-10-26 07:35:08 +0000 +++ query-exec 1970-01-01 00:00:00 +0000 @@ -1,928 +0,0 @@ -#!/usr/bin/python -# vi: ts=4 noexpandtab - -## Copyright (C) 2011 Ben Howard -## Date: 25 October 2011 -## -## This comes with ABSOLUTELY NO WARRANTY; for details see COPYING. -## This is free software, and you are welcome to redistribute it -## under certain conditions; see copying for details. - -import os.path -import sqlite3 -import argparse -import queries -import logging -import sys -import json -import time -from datetime import date - -class ExecQuery(): - - def __init__( self, db_name, output_format, print_queries=False, opts=None): - self.db = sqlite3.connect( db_name ) - self.output = output_format - self.print_queries = print_queries - self.opts = opts - - - def exec_query(self, *args): - c = self.db.cursor() - c.execute('''PRAGMA foreign_keys = ON''') - - for query in args: - q = self.opt_replacer(query) - if self.print_queries: - print q - c.execute( q ) - - c.close() - self.db.commit() - - - def return_query(self, q): - - query = self.opt_replacer( q ) - if self.print_queries: - print query - - c = self.db.cursor() - c.execute('''PRAGMA foreign_keys = ON''') - c.execute( query ) - - columns_names = [] - returns = [] - - for desc in c.description: - columns_names.append(desc[0]) - - for row in c.fetchall(): - anon = {} - count = 0 - for tup in row: - anon[ columns_names[count] ] = tup.replace('None', 'null') - count += 1 - - returns.append(anon) - - out = returns - if self.output == 'json': - out = json.dumps( returns, sort_keys=True, indent=4) - - c.close() - return out - - def sha1( fname, sha="sha1" ): - - sha = None - - if sha == "sha1": - sha = hashlib.sha1() - else: - sha = hashlib.sha512() - - with open( fname,'rb') as f: - for chunk in iter(lambda: f.read(8192), ''): - sha.update(chunk) - - return sha.hexdigest() - - def map_distro_version_to_name(self, distro_version ): - c = self.db.cursor() - c.execute('''PRAGMA foreign_keys = ON''') - c.execute( queries.query_get_distro_name % { 'distro_version': distro_version } ) - - for row in c.fetchall(): - return row[0] - - def opt_replacer(self, string): - """ - Do variable replacement in a single place to simplify SQL mangling - """ - - o = self.opts - - if not o: - return None - - return string % { - 'region': o.region, - 'build_id': o.build_id, 'serial': o.build_id, - 'code_name': o.distro, - 'distro_name': o.distro, - 'distro_full_name': o.distro_full_name, - 'distro_version': o.distro_version, - 'distro_type': o.distro_type, - 'release_tag': o.release_tag, - 'date': o.date, - 'cloud': o.cloud, 'cloud_name': o.cloud, - 'arch': o.arch, 'arch_name': o.arch, - 'instance_type': o.instance_type, - 'release_date': o.release_date, - 'end_date': o.end_date, - 'stream': o.stream, 'stream_name': o.stream, - 'reg_id': o.reg_id, - 'distro_type': o.distro_type, - 'url': o.url, - 'public': o.public, - 'supported': o.supported, - 'description': o.description, - 'sha1': o.sha1, - 'sha512': o.sha512, - 'file_type': o.file_type, - 'file_name': o.file_name, - 'output': o.output, - 'kernel_id': o.kernel_id, - 'ramdisk_id': o.ramdisk_id, - 'register_name': o.register_name, - 'name_base': o.name_base, - 'today': date.today() - } - - '''Insert Functions''' - - def insert_region(self): - self.exec_query(queries.insert_region) - - def insert_cloud(self): - self.exec_query(queries.insert_cloud) - - def insert_distro(self): - self.exec_query(queries.insert_distro) - - def insert_distro_dates(self): - self.exec_query(queries.insert_distro_dates) - - def insert_release_tag(self): - self.exec_query(queries.insert_release_type) - - def insert_new_build(self): - - if self.opts.allow_existing: - try: - self.exec_query(queries.insert_new_build_existing) - except sqlite3.IntegrityError as e: - logger.debug("Existing row already exists...skipping ahead") - else: - self.exec_query(queries.insert_new_build) - - def insert_published(self): - - if self.opts.allow_existing: - self.exec_query(queries.insert_new_published_existing) - else: - self.exec_query(queries.insert_new_published) - - def insert_files(self): - - if self.opts.allow_existing: - self.exec_query(queries.insert_new_files_existing) - else: - self.exec_query(queries.insert_new_files) - - def insert_build_id(self): - - if self.opts.allow_existing: - try: - self.exec_query(queries.query_insert_serial_existing) - except sqlite3.IntegrityError as e: - logger.debug("Existing row already exists...skipping ahead") - else: - self.exec_query(queries.query_insert_serial) - - def update_published_public(self): - self.exec_query(queries.update_publish_public) - - def update_remove_public(self): - self.exec_query(queries.update_remove_public) - - def update_distro_end_date(self): - self.exec_query(queries.update_distro_end_date) - - def update_current_milestone(self): - self.exec_query( queries.query_update_current, queries.query_expire_serial_milestones) - - def update_activate_daily(self): - self.exec_query( queries.query_update_current, queries.query_expire_dailies_keep_three) - - def get_active_builds_for_cloud(self): - return self.return_query(queries.query_active_builds_for_cloud) - - def get_active_builds_for_cloud_distro(self): - return self.return_query(queries.query_active_builds_for_cloud_distro) - - def get_active_builds_for_cloud_distro_version(self): - return self.return_query(queries.query_active_builds_for_cloud_distro_version) - - def get_active_builds_for_region(self): - return self.return_query(queries.query_active_builds_for_region) - - def get_active_builds_for_region_distro(self): - return self.return_query(queries.query_active_builds_for_cloud_distro) - - def get_active_builds_for_cloud_arch_distro(self): - return self.return_query(queries.query_active_builds_for_cloud_arch_distro) - - def get_active_builds_for_region_arch_distro(self): - return self.return_query(queries.query_active_builds_for_region_arch_distro) - - def get_active_builds_cloud_distro_version(self): - return self.return_query(queries.query_active_builds_for_cloud_distro_version) - - def get_active_builds_cloud_distro_version_stream(self): - return self.return_query(queries.query_active_builds_for_cloud_distro_version_stream) - - def get_active_builds_cloud_distro(self): - return self.return_query(queries.query_active_builds_for_cloud_distro) - - def get_active_builds_cloud_distro_stream(self): - return self.return_query(queries.query_active_builds_for_cloud_distro_stream) - - def get_active_builds_for_cloud_arch(self): - return self.return_query(queries.query_active_builds_arches) - - def get_active_builds_arches_region(self): - return self.return_query(queries.query_active_builds_for_region_arch) - - def get_active_builds_cloud_stream(self): - return self.return_query(queries.query_get_builds_cloud_stream) - - def get_files_for_build_id(self): - return self.return_query(queries.query_files_for_build_id) - - def get_files_for_build_date(self): - return self.return_query(queries.query_files_for_build_date) - - def get_supported_distros(self): - return self.return_query(queries.query_get_supported_distros) - - def get_clouds(self): - return self.return_query(queries.query_get_clouds) - - def get_cloud(self): - return self.return_query(queries.query_get_single_cloud) - - def get_all_regions(self): - return self.return_query(queries.query_get_cloud_regions) - - def get_cloud_regions(self): - return self.return_query(queries.query_get_cloud_regions) - - def get_cloud_regions_only(self): - return self.return_query(queries.query_get_cloud_regions_only) - - def get_release_tags(self): - return self.return_query(queries.query_get_release_tags) - - def get_active_files_cloud(self): - return self.return_query(queries.query_get_files_all_active_cloud) - - def get_active_files_cloud_distro(self): - return self.return_query(queries.query_get_files_all_active_cloud_distro) - - def get_active_files_cloud_distro_version(self): - return self.return_query(queries.query_get_files_all_active_cloud_distro_version) - - def get_active_files_cloud_distro_stream(self): - return self.return_query(queries.query_get_files_all_active_cloud_distro_stream) - - def get_active_files_cloud_distro_version_stream(self): - return self.return_query(queries.query_get_files_all_active_cloud_distro_version_stream) - - def get_active_files_cloud_stream(self): - return self.return_query(queries.query_get_files_all_active_cloud_stream) - - def get_files_all_stream_distro(self): - return self.return_query(queries.query_get_files_build_stream_distro) - - def get_file_all_stream(self): - return self.return_query(queries.query_get_files_build_stream) - - def get_files_all_distro(self): - return self.return_query(queries.query_get_files_build_distro) - - def get_files_all(self): - return self.return_query(queries.query_get_files_builds_base) - - def get_files_all_distro_daily(self): - return self.return_query(queries.query_get_files_all_distro_daily) - - def get_files_all_distro_release(self): - return self.return_query(queries.query_get_files_all_distro_release) - - def get_files_all_stream_release(self): - return self.return_query(queries.query_get_files_build_stream_release) - - def get_files_all_stream_distro_stream_release(self): - return self.return_query(queries.query_get_files_build_stream_distro_release) - - def generic_query(self, query_name): - return self.return_query(getattr(queries, str("query_%s" % query_name))) - -def get_build_files(db, opts): - """ - Returns a tuple of builds and files. - """ - files = None - builds = None - - if opts.build_only: - - if opts.dailies_only: - if opts.stream and opts.distro: - files = db.generic_query("get_files_build_stream_distro_daily") - - elif opts.stream: - files = db.generic_query("get_files_all_stream_daily") - - elif opts.distro: - files = db.generic_query("get_files_all_distro_daily") - - else: - files = db.generic_query("get_files_builds_base") - - elif opts.releases_only: - if opts.stream and opts.distro: - files = db.generic_query("get_files_build_stream_distro_release") - - elif opts.stream: - files = db.generic_query("get_files_build_stream_release") - - elif opts.distro: - files = db.generic_query("get_files_all_distro_release") - - else: - files = db.generic_query("get_files_all_release") - - else: - - if opts.stream and opts.distro: - files = db.get_files_all_stream_distro() - - elif opts.stream: - files = db.get_file_all_stream() - - elif opts.distro: - files = db.get_files_all_distro() - - else: - files = db.get_files_all() - - else: - - if opts.stream and opts.distro: - - if opts.dailies_only: - builds = db.generic_query("active_builds_for_cloud_distro_stream_daily") - files = db.generic_query("get_files_all_active_cloud_distro_stream_daily") - - elif opts.releases_only: - builds = db.generic_query("active_builds_for_cloud_distro_stream_release") - files = db.generic_query("get_files_all_active_cloud_distro_stream_release") - - else: - builds = db.generic_query("active_builds_for_cloud_distro_stream") - files = db.generic_query("get_files_all_active_cloud_distro_stream") - - elif opts.stream: - - if opts.dailies_only: - files = db.generic_query("get_files_all_active_cloud_stream_daily") - builds = db.generic_query("active_builds_for_cloud_stream") - - elif opts.releases_only: - files = db.generic_query("get_files_all_active_cloud_stream_release") - builds = db.generic_query("active_builds_for_cloud_stream") - - else: - files = db.generic_query("get_files_all_active_cloud_stream") - builds = db.generic_query("active_builds_for_cloud_stream") - - elif opts.distro: - - - if opts.dailies_only: - files = db.generic_query("get_files_all_active_cloud_distro_daily") - builds = db.generic_query("active_builds_for_cloud_distro_daily") - - elif opts.releases_only: - files = db.generic_query("get_files_all_active_cloud_distro_release") - builds = db.generic_query("active_builds_for_cloud_distro_release") - - else: - files = db.generic_query("get_files_all_active_cloud_distro") - builds = db.generic_query("active_builds_for_cloud_distro") - - else: - - if opts.dailies_only: - files = db.generic_query("get_files_all_active_cloud_daily") - builds = db.generic_query("active_builds_for_cloud_daily") - - elif opts.releases_only: - files = db.generic_query("get_files_all_active_cloud_release") - builds = db.generic_query("active_builds_for_cloud_release") - - else: - files = db.generic_query("get_files_all_active_cloud") - builds = db.generic_query("active_builds_for_cloud") - - - return builds, files - -'''Yeah, this sucks, but hey, JSON is a tad bit verbose''' -def generate_json( opts ): - - json_out = {} - - vendor = {} - vendor['name'] = 'Ubuntu Cloud Images' - vendor['url'] = 'http://cloud-images.ubuntu.com' - vendor['support_url'] = 'http://www.ubuntu.com/cloud' - vendor['company'] = 'Canonical Group, Ltd' - - mirrors = {} - mirrors['authoritative'] = "https://cloud-images.ubuntu.com" - mirrors['transfer'] = [ "http://cloud-images.ubuntu.com" ] - json_out['mirrors'] = mirrors - - manifest = {} - manifest['date'] = "%s" % date.today() - manifest['serial'] = "%s" % str(time.time()).split('.')[0] - manifest['subject'] = 'Ubuntu Cloud Images' - manifest['description'] = "Active Ubuntu builds with URL and meta-data" - - db = ExecQuery( opts.db, 'raw', opts.printq, opts=opts ) - - opts.cloud = opts.cloud_json - - json_out['manifest'] = manifest - json_out['vendor'] = vendor - - builds, files = get_build_files(db, opts) - - cloud = db.get_cloud() - - for cld in cloud: - if cld['name'] == "EC2": - cld['account'] = '099720109477' - - build_catalog = [] - - for f in files: - - added_serial = False - added_bc = False - - for bc in build_catalog: - if f['distro_version'] == bc['distro_version']: - added_bc = True - - if not added_bc: - anon = {} - anon['distro_code_name'] = f['distro_code_name'] - anon['distro_version'] = f['distro_version'] - anon['build_types'] = {} - build_catalog.append( anon ) - - for bt in build_catalog: - if f['distro_version'] == bt['distro_version']: - if f['build_type'] not in bt['build_types']: - bt['build_types'][ f['build_type'] ] = [] - - added_serial = False - for nbd in bt['build_types'][ f['build_type'] ]: - if f['build_serial_id'] == nbd['build_serial_id']: - added_serial = True - - if not added_serial: - anon = {} - anon['build_serial'] = f['build_serial'] - anon['build_serial_id'] = f['build_serial_id'] - anon['release_tag'] = f['release_tag'] - anon['arches'] = {} - bt['build_types'][ f['build_type'] ].append( anon ) - - for nbd in bt['build_types'][ f['build_type'] ]: - - if nbd['build_serial_id'] == f['build_serial_id']: - - added_sub = False - for anbd in nbd['arches']: - if anbd == f['arch']: - added_sub = True - - if not added_sub: - anon = {} - anon['build_id'] = f['build_id'] - anon['file_list'] = [] - - # get cloud registrations stuff out - cloud_reg = [] - - if builds: - for b in builds: - - if b['build_id'] != f['build_id']: - continue - - c_anon = None - for c_reg in cloud_reg: - if b['cloud_name'] in c_reg['name']: - c_anon = c_reg - - if not c_anon: - c_anon = {} - c_anon['name'] = b['cloud_name'] - c_anon['instance_types'] = [] - cloud_reg.append( c_anon ) - - c_inst = False - for inst in c_anon['instance_types']: - if inst['name'] == b['instance_type']: - c_inst = inst - - if not c_inst: - c_inst = {} - c_inst['name'] = b['instance_type'] - c_inst['virt_type'] = b['virt_type'] - c_inst['registered_name'] = b['registered_name'] - c_inst['registrations'] = [] - c_anon['instance_types'].append( c_inst ) - - r = {} - r['region_name'] = b['region_name'] - r['published_id'] = b['published_id'] - r['kernel_id'] = b['kernel_id'] - r['ramdisk_id'] = b['ramdisk_id'] - c_inst['registrations'].append( r ) - - anon['cloud_registrations'] = cloud_reg - - nbd['arches'][ f['arch'] ] = anon - - for sb in nbd['arches']: - if sb == f['arch']: - anon = {} - anon['file_type'] = f['file_type'] - anon['description'] = f['file_description'] - anon['sha1'] = f['file_sha1'] - anon['sha512'] = f['file_sha512'] - anon['path'] = f['file_url'] - nbd['arches'][ f['arch'] ]['file_list'].append( anon ) - - json_out['clouds'] = cloud - json_out['catalog'] = build_catalog - - if not opts.ugly_json: - print json.dumps( json_out, sort_keys=True, indent=4) - else: - print json.dumps( json_out ) - - - -def check_args(logger, opts, *args): - - valid = True - for arg in args: - if not arg in opts: - raise Exception("invalid_argument_request", "Unknown argument %s in commandline list; argument not valid" % arg ) - - else: - if opts.__dict__[arg] is None: - logger.critical("Option requires --%s" % arg) - valid = False - - if not valid: - print opts - - return valid - -if __name__=="__main__": - - parser = argparse.ArgumentParser() - parser.add_argument('--db', action="store", required=True, - help="File name of the Database to operate on") - - parser.add_argument('--cloud_json', action="store", default=None, - help="Output Query2 Formatted json out") - - parser.add_argument('--build_json', dest="build_only", action="store_true", default=False, - help="Output Query2 Formated json with out cloud information") - - # Valid update operations - parser.add_argument('--update', action="store", - help="Update the database", - choices= [ "distro_end_date", "published_public", "remove_public", "current", "daily"]) - - # Valid insert operations - parser.add_argument('--insert', action="store", - help="Insert information into database", - choices= [ "distro", "dates", "build", "published", "cloud", "region", "release_tag", "files", "serial" ]) - - # Valid query operations - parser.add_argument('--query', action="store", - help="Query the database, i.e. get information out", - choices= [ "active_builds_cloud", - "active_builds_cloud_distro", - "active_builds_region", - "active_builds_region_distro", - "active_builds_cloud_arch", - "active_builds_arches_region", - "active_builds_regions_arch_distro", - "active_builds_regions_arch_distro", - "active_builds_cloud_distro_version", - "active_files_cloud", - "active_files_cloud_files", - "active_files_cloud_distro_stream", - "active_builds_for_cloud_distro", - "active_files_cloud_distro_version_stream", - "files_build_id", "files_build_date", "supported_distros", - "cloud", "clouds", "cloud_regions", "regions", "tags" ] ) - - # General flags - parser.add_argument('--allow_existing', action="store_true", default=False, - help="For republishing options, insert new and replace existing.") #Named "allow-..." for consistency with other tools - parser.add_argument('--distro', action="store", - help="Distribution name, i.e. precise") - parser.add_argument('--distro_full_name', action="store", - help="Distribution's full name, i.e. Precise Pangolin") - parser.add_argument('--distro_version', action="store", - help="Distribution's version number, i.e. 12.04") - parser.add_argument('--date', action="store", default=date.today(), - help="Date of the operation") - parser.add_argument('--cloud', action="store", - help="Name of the cloud, i.e. EC2") - parser.add_argument('--region', action="store", - help="Region name of the cloud, i.e. us-east-1") - parser.add_argument('--build_id', action="store", - help="Build ID of the build") - parser.add_argument('--arch', action="store", - help="Architecture type") - parser.add_argument('--instance_type', action="store", - help="Instance type") - parser.add_argument('--release_date', action="store", - help="The release date of distro") - parser.add_argument('--end_date', action="store", - help="The end of life date for the distro") - parser.add_argument('--stream', action="store", - help="The stream type, i.e. server, desktop") - parser.add_argument('--distro_type', action="store", - help="The type of distribution it is", - choices= [ "changes", "release" ]) - parser.add_argument('--reg_id', action="store", - help="Cloud provider registration ID") - parser.add_argument('--release_tag', action="store", - help="Build/release type") - parser.add_argument('--url', action="store", - help="Standard web URL, i.e. http://ubuntu.com") - parser.add_argument('--public', action="store_true", default=True, - help="Mark registration as public") - parser.add_argument('--private', dest='public', action="store_false", - help="Mark registration as not public") - parser.add_argument('--supported', action="store_true", default=False, - help="Mark as supported") - parser.add_argument('--description', action="store", - help="Description of element") - parser.add_argument('--file_type', action="store", - help="file type", - choices= [ "root.tar.gz", "qcow2","vmdk","tar.gz", "manifest", "kernel", "raw", "ovf" ] ) - parser.add_argument('--file_name', action="store", - help="Name of file") - parser.add_argument('--sha1', action= "store", - help="SHA1 of file") - parser.add_argument('--sha512', action= "store", - help="SHA512 of file") - parser.add_argument('--output', action="store", - help="Output format", default="json", - choices=[ "json", "raw" ] ) - parser.add_argument('--ugly_json', action="store_true", default=False, - help="Output condensed, ugly json") - parser.add_argument('--print', dest="printq", action="store_true", - help="Print queries on commandline", default=False ) - parser.add_argument('--kernel_id', action="store", - help="Kernel ID of published image") - parser.add_argument('--ramdisk_id', action="store", default=None, - help="Ramdisk ID of published image") - parser.add_argument('--register_name', action="store", default=None, - help="Registered name for image, i.e. 099720109477/ubuntu/images-testing/ebs/ubuntu") - parser.add_argument('--name_base', action="store", - help="Suggested base name of files for download") - parser.add_argument('--dailies_only', action="store_true", default=False, - help="For queries return just dailies") - parser.add_argument('--releases_only', action="store_true", default=False, - help="For queries return just releases, i.e. not dailies") - - opts = parser.parse_args() - - ### Now the fun part....check to make sure that options are valid - logger = logging.getLogger('_querydb_') - logging.basicConfig(format='%(asctime)s %(levelname)s - querydb - %(message)s') - logger.setLevel(logging.DEBUG) - - logger.info("Parsing commandline options") - - ### Cloud JSON - - if opts.cloud_json or opts.build_only: - - generate_json(opts) - sys.exit(0) - - opt_counts = 0 - if opts.update: - opt_counts += 1 - if opts.query: - opt_counts += 1 - if opts.insert: - opt_counts += 1 - - logger.info("Setting up connection to database") - db = ExecQuery( opts.db, opts.output, opts.printq, opts=opts ) - logger.info("Connected to SQLite3 Database at %s" % opts.db ) - - logger.info("%s operations received on commandline" % opt_counts ) - - if opt_counts > 1: - logger.critical("Only a single operation of --update, --insert, --query, --cloud_json is allowed") - sys.exit(1) - - elif opt_counts == 0: - logger.critical("Please support an operation command --update, --insert or --query") - sys.exit(1) - - - #### Update operations #### - if opts.update == "distro_end_date": - - if check_args( logger, opts, "end_date", "distro_version" ): - logger.info("Updating distro version %s to end of life on %s" % ( opts.distro_version, opts.end_date)) - - elif opts.update == "published_public": - - if check_args( logger, opts, "build_id" ): - logger.info("Marking Build %s as public" % opts.build_id) - db.update_published_public() - - elif opts.update == "remove_public": - - if check_args( logger, opts, "build_id" ): - logger.info("Marking Build %s as non-public with date of %s" % ( opts.build_id, opts.date )) - - elif opts.update == "current": - - if check_args( logger, opts, "build_id", "distro_version", "stream", "release_tag" ): - logger.info("Marking build %s %s %s %s current" % ( opts.release_tag, opts.stream, opts.build_id, opts.distro_version )) - db.update_current_milestone() - - elif opts.update == "daily": - - if check_args( logger, opts, "build_id", "distro_version", "stream" ): - logger.info("Marking build daily build %s active" % opts.build_id ) - db.update_activate_daily() - - #### Insert operations #### - if opts.insert == "distro": - - if check_args( logger, opts, "distro", "distro_full_name", "distro_version", "distro_type"): - logger.info("Adding distro %s" % opts.distro ) - db.insert_distro(); - - elif opts.insert == "dates": - - if check_args( logger, opts, "distro_version", "stream", "release_date", "end_date"): - db.insert_distro_dates() - - elif opts.insert == "build": - - if check_args( logger, opts, "build_id", "date", "distro_version", "stream", "arch", "release_tag", "name_base" ): - logger.info("Registering build id %s for %s %s" % ( opts.build_id, opts.stream, opts.arch )) - db.insert_new_build() - - elif opts.insert == "published": - - if check_args( logger, opts, "instance_type", "release_tag", "cloud", "region", "distro_version", "arch", "stream", - "build_id", "reg_id", "url", "kernel_id", "register_name"): - logger.info("Registering published build for public (%s) image %s ( %s ) %s %s" % ( opts.public, opts.cloud, opts.region, opts.release_tag, opts.reg_id)) - db.insert_published() - - elif opts.insert == "files": - - if check_args( logger, opts, "arch", "stream", "build_id", "distro_version", "file_type", "url", "release_tag"): - logger.info("Registering file %s with build %s %s %s %s" % ( opts.file_name, opts.distro_version, opts.build_id, opts.stream, opts.arch )) - db.insert_files() - - elif opts.insert == "serial": - - if check_args( logger, opts, "build_id", "distro_version", "stream", "release_tag" ): - logger.info("Recording build serial of %s %s %s" % ( opts.build_id, opts.distro_version, opts.stream )) - db.insert_build_id() - - elif opts.insert == "release_tag": - - if check_args( logger, opts, "release_tag", "description", "supported" ): - logger.info("Adding new release_tag of %s" % opts.description ) - db.insert_release_tag() - - elif opts.insert == "region": - - if check_args( logger, opts, "cloud", "region", "description", "url"): - logger.info("Adding new region %s to cloud %s" % ( opts.region, opts.cloud )) - db.insert_region() - - elif opts.insert == "cloud": - - if check_args( logger, opts, "cloud", "description", "url" ): - logger.info("Adding new cloud %s" % opts.cloud ) - db.insert_cloud() - - - ### Query Operations #### - if opts.query == "active_builds_cloud": - - if check_args( logger, opts, "cloud" ): - print db.get_active_builds_for_cloud() - - elif opts.query == "active_builds_cloud_distro": - - if check_args( logger, opts, "cloud", "distro" ): - print db.get_active_builds_cloud_distro() - - elif opts.query == "active_builds_cloud_distro_version": - - if check_args( logger, opts, "cloud", "distro_version" ): - print db.get_active_builds_cloud_distro_version() - - elif opts.query == "active_builds_for_cloud_distro": - - if check_args( logger, opts, "cloud", "distro" ): - print db.get_active_builds_for_cloud_distro() - - elif opts.query == "active_builds_region": - - if check_args( logger, opts, "cloud", "region" ): - print db.get_active_builds_for_region() - - elif opts.query == "active_builds_region_distro": - - if check_args( logger, opts, "cloud", "region", "distro" ): - print db.get_active_builds_for_region_distro() - - elif opts.query == "active_builds_regions_arch_distro": - - if check_args( logger, opts, "cloud", "region", "distro", "arch" ): - print db.get_active_builds_for_region_arch_distro() - - elif opts.query == "active_builds_arches_region": - - if check_args( logger, opts, "cloud", "region", "arch" ): - print db.get_active_builds_arches_region() - - elif opts.query == "active_files_cloud": - - if check_args( logger, opts, "cloud" ): - print db.get_active_files_cloud() - - elif opts.query == "active_files_cloud_files": - - if check_args( logger, opts, "cloud", "distro" ): - print db.get_active_files_cloud_distro() - - elif opts.query == "active_files_cloud_distro_stream": - - if check_args( logger, opts, "cloud", "distro", "stream" ): - print db.get_active_files_cloud_distro_stream() - - elif opts.query == "files_for_build_id": - pass - - elif opts.query == "files_for_build_date": - pass - - elif opts.query == "supported_distros": - - if check_args( logger, opts, "stream" ): - print db.get_supported_distros() - - elif opts.query == "clouds": - - print db.get_clouds() - - elif opts.query == "cloud": - - if check_args( logger, opts, "cloud" ): - print db.get_cloud() - - elif opts.query == "all_regions": - - print db.get_all_regions() - - elif opts.query == "tags": - - print db.get_release_tags() - - elif opts.query == "cloud_regions" : - - if check_args( logger, opts, "cloud" ): - print db.get_cloud_regions() === removed file 'record-api' --- record-api 2012-10-26 07:43:17 +0000 +++ record-api 1970-01-01 00:00:00 +0000 @@ -1,279 +0,0 @@ -#!/bin/bash -# vi: ts=4 noexpandtab - -# This script is a transitional script for supporting query2 - -VERBOSITY=1 -PREFIX="${0}: " -skip_serial=0 - -set -e -error() { echo ${PREFIX:+"${PREFIX}"} "$@" 1>&2; } -fail() { - error "$@" - error "** FATAL query2 STOP **" - exit 1; -} -debug() { - local level=${1} - shift; - [ "${level:-0}" -gt "${VERBOSITY}" ] && return - error "$(date -R):" "${@}" -} -bad_usage(){ - cat </dev/null && - error "initialized bzr directory in ${info_dir}" || - fail "failed to initialize bzr directory in ${info_dir}" - fi - - ( cd "${1}" && - find . -type f -not \( -iname "*bz2*" -or -wholename "*.bzr/*" \) -exec bzr add {} + && - bzr commit -m "${distro} ${stream} ${release_tag} ${build_id}" ) >/dev/null && - debug 1 "Committed data to a bzr repository" || - fail "failed to commit data to bzr repository" -} - - -generate_json() { - python "${q_path:-${0%/*}}/build-api.py" \ - "${db_loc}" \ - "${distro}" \ - /srv/ec2-images/beta/api || - debug "Failed to generate new Query2 data" -} - -# 'Snapshot' the database and keep up to 7 days of copies of the -# database. This should give us sufficient copies of the DB -# in case things go south. - -debug 1 "** START query2 work **" -epoch=$(date +%s) -db_path="${db_loc%/*}" - -[ "${json_only:-0}" -eq 1 ] && { - generate_json - exit 0 -} - -[ -d "${db_path}/backups/${stream}/${distro}" ] || - mkdir -p "${db_path}/backups/${stream}/${distro}" - -cp "${db_loc}" "${db_path}/backups/${stream}/${distro}/${epoch}" || - fail "Failed to backup the database, not proceeding" - -cp "${db_loc}" "${db_path}/backups/database.latest" || - fail "Failed to preserve the latest copy of the database" - -# Now purge backups older than 7 days -find "${db_path}/backups/${stream}/${distro}" -used 7 -type f -exec rm {} +; - - -# Now start the work - -[ "${skip_serial:-0}" -eq 0 ] && insert_serial -arches="" - -dashed=$(echo "${release_tag}" | sed 's,\([^0-9]*\)\([0-9]\),\1-\2,') -case "${release_tag}" in - release) path="${stream}/releases/${distro}/release-${build_id}";; - daily) path="${stream}/${distro}/${build_id}";; - *) path="${stream}/releases/${distro}/${dashed}";; -esac - -for manifest in $( find ${files_d} -maxdepth 1 -iname "*.manifest" ) -do - arch=${manifest%.manifest}; arch="${arch##*-}"; - insert_build "${arch}" "${path}" -done - -for file in $(find ${files_d} -type f | egrep ".ovf$|.manifest$|.tar.gz$|.vmdk$|.img$|vmlinuz-virtual$" ) -do - debug 1 "Processing file ${file}" - case $file in - *amd64*) file_arch="amd64";; - *i386*) file_arch="i386";; - *armel*) file_arch="armel";; - *armhf*) file_arch="armhf";; - esac - - file_type="${file##*.}" - [[ "${file}" =~ ".tar.gz" ]] && file_type="tar.gz" - [[ "${file}" =~ "rootfs.tar.gz" ]] && file_type = "root.tar.gz" - [[ "${file}" =~ "vmlinuz" ]] && file_type="kernel" - - l_path="${path}" - [[ "${file}" =~ "unpacked" ]] && l_path="${path}/unpacked" - - if [[ "${file}" =~ ".img" ]]; then - f_type="$(file ${file} | tr [:lower:] [:upper:] )" - debug 1 "Identifying file ${file}" - debug 1 "${f_type}" - - case ${f_type} in - *QCOW*|*qcow*) file_type="qcow2";; - *VMDK*|*vmdk*) file_type="vmdk";; - esac - fi - - insert_files_d "${file_arch}" "${file_type}" "${l_path}/${file##*/}" "${file}" -done - -while read l_distro l_stream r_tag serial itype arch region ami aki virt_type -do - r_url="https://console.aws.amazon.com/ec2/home?region=${reigon}#launchAmi=${ami}" - - case "${release_tag}" in - daily) reg_name="099720109477/ubuntu/images-testing/ubuntu-${l_distro}-daily-${arch}-${l_stream}-${build_id}";; - alpha*|beta*) reg_name="099720109477/ubuntu/images-milestones/ubuntu-${l_distro}-MILESTONE-${arch}-${l_stream}-${build_id}"; - reg_name="${reg_name//MILESTONE/$release_tag}";; - release) reg_name="099720109477/ubuntu/images/ubuntu-${l_distro}-${l_distro_version}-${arch}-${l_stream}-${build_id}";; - esac - - if [ "${aki}" == "hvm" ]; then - reg_name="${reg_name//ubuntu/hvm/ubuntu}" - itype="hvm" - fi - - insert_published "${arch}" "${region}" "${itype}" "${ami}" "${r_url}" "${aki}" "${reg_name}" "${l_stream}" -done < ${reg} - -update_current -generate_json -debug 1 "** END query2 work **" + echo '' + echo '====== live-build [/tmp/live-build] ======' ====== live-build [/tmp/live-build] ====== + cd /tmp/live-build + bzr info Standalone tree (format: 1.9-rich-root) Location: branch root: . Related branches: parent branch: http://bazaar.launchpad.net/~ubuntu-on-ec2/live-build/cloud-images/ + bzr version-info revision-id: ben.howard@canonical.com-20130307184550-sfuml0svxs4x8kyy date: 2013-03-07 11:45:50 -0700 build-date: 2013-05-02 07:04:39 +0000 revno: 1860 branch-nick: live-build + bzr log -p -r-1 ------------------------------------------------------------ revno: 1860 committer: Ben Howard branch nick: live-build timestamp: Thu 2013-03-07 11:45:50 -0700 message: Use dpkg-divert for upstart and start-stop-daemon (LP: #1150737) diff: === modified file 'scripts/build/lb_chroot_dpkg' --- scripts/build/lb_chroot_dpkg 2011-11-28 17:56:01 +0000 +++ scripts/build/lb_chroot_dpkg 2013-03-07 18:45:50 +0000 @@ -41,10 +41,9 @@ Create_lockfile .lock # Save start-stop-daemon program - mv chroot/sbin/start-stop-daemon chroot/sbin/start-stop-daemon.orig + Chroot chroot dpkg-divert --quiet --rename --add /sbin/start-stop-daemon # Create start-stop-daemon program - cat > chroot/sbin/start-stop-daemon << EOF #!/bin/sh @@ -103,10 +102,16 @@ rmdir --ignore-fail-on-non-empty /var/state/samhain || true fi - # Restore start-stop-daemon program - if [ -e chroot/sbin/start-stop-daemon.orig ] - then - mv chroot/sbin/start-stop-daemon.orig chroot/sbin/start-stop-daemon + # Delete the fake start-stop-daemon + if [ -e chroot/sbin/start-stop-daemon ] + then + rm chroot/sbin/start-stop-daemon + fi + + # Restore the originial dpkg diversion + if [ -e chroot/sbin/start-stop-daemon.distrib ] + then + Chroot chroot dpkg-divert --quiet --rename --remove /sbin/start-stop-daemon fi # Remove dpkg sync configuration === modified file 'scripts/build/lb_chroot_upstart' --- scripts/build/lb_chroot_upstart 2011-11-28 17:56:01 +0000 +++ scripts/build/lb_chroot_upstart 2013-03-07 18:45:50 +0000 @@ -42,8 +42,8 @@ if [ -f chroot/sbin/initctl ] then - # Save initctl file - mv chroot/sbin/initctl chroot/sbin/initctl.orig + # Divert to allow for upgrades + Chroot chroot dpkg-divert --quiet --rename --add /sbin/initctl fi # Create initctl file @@ -68,13 +68,14 @@ # Creating lock file Create_lockfile .lock - if [ -f chroot/sbin/initctl.orig ] + if [ -e chroot/sbin/initctl ]; then + rm -f chroot/sbin/initctl + fi + + if [ -f chroot/sbin/initctl.distrib ] then # Restore initctl file - mv chroot/sbin/initctl.orig chroot/sbin/initctl - else - # Remove initctl file - rm -f chroot/sbin/initctl + Chroot chroot dpkg-divert --quiet --rename --remove /sbin/initctl fi # Removing stage file + echo '' + echo '====== vmbuilder-0.11 [/tmp/vmbuilder-0.11] ======' ====== vmbuilder-0.11 [/tmp/vmbuilder-0.11] ====== + cd /tmp/vmbuilder-0.11 + bzr info Standalone tree (format: 2a) Location: branch root: . Related branches: parent branch: http://bazaar.launchpad.net/~ubuntu-on-ec2/vmbuilder/0.11a/ + bzr version-info revision-id: ben.howard@canonical.com-20120605221454-crv9cc4612f907lh date: 2012-06-05 16:14:54 -0600 build-date: 2013-05-02 07:04:39 +0000 revno: 398 branch-nick: vmbuilder-0.11 + bzr log -p -r-1 ------------------------------------------------------------ revno: 398 committer: Ben Howard branch nick: vmbuilder timestamp: Tue 2012-06-05 16:14:54 -0600 message: Fix for allow vmbuilder to run on 12.04 LTS. diff: === modified file 'VMBuilder/plugins/ubuntu/dapper.py' --- VMBuilder/plugins/ubuntu/dapper.py 2011-05-18 20:49:25 +0000 +++ VMBuilder/plugins/ubuntu/dapper.py 2012-06-05 22:14:54 +0000 @@ -157,18 +157,15 @@ self.vm.addpkg += ['openssh-server'] def mount_dev_proc(self): - run_cmd('mount', '--bind', '/dev', '%s/dev' % self.destdir) - self.vm.add_clean_cmd('umount', '%s/dev' % self.destdir, ignore_fail=True) - - run_cmd('mount', '--bind', '/dev/pts', '%s/dev/pts' % self.destdir) + run_cmd('mkdir', '-p', '%s/dev/pts' % self.destdir) + run_cmd('mount', '-t', 'devpts', 'devpts-live', '%s/dev/pts' % self.destdir) self.vm.add_clean_cmd('umount', '%s/dev/pts' % self.destdir, ignore_fail=True) - self.run_in_target('mount', '-t', 'proc', 'proc', '/proc') + run_cmd('mount', '-t', 'proc', 'proc-live', '%s/proc' % self.destdir) self.vm.add_clean_cmd('umount', '%s/proc' % self.destdir, ignore_fail=True) def unmount_dev_proc(self): run_cmd('umount', '%s/dev/pts' % self.destdir) - run_cmd('umount', '%s/dev' % self.destdir) run_cmd('sh', '-c', 'grep -q "$1" /proc/mounts || exit 0; umount "$1"', 'umount_binfmt', "%s/proc/sys/fs/binfmt_misc" % self.destdir) run_cmd('umount', '%s/proc' % self.destdir) + echo ''