diff --git a/Gemfile b/Gemfile index b2832cf8db..efb31baa6b 100644 --- a/Gemfile +++ b/Gemfile @@ -65,7 +65,7 @@ if ENV['NO_ACCEPTANCE'] != 'true' # use the pinned version gem 'beaker', '~> 4.1' end - gem 'beaker-hostgenerator', '~> 2.2.3' + gem 'beaker-hostgenerator', '~> 2.4' gem 'beaker-abs', *location_for(ENV['BEAKER_ABS_VERSION'] || '~> 0.2') gem 'beaker-vmpooler', *location_for(ENV['BEAKER_VMPOOLER_VERSION'] || "~> 1.3") gem 'beaker-puppet', '~> 1.0' diff --git a/ext/test/upgrade-and-exit b/ext/test/upgrade-and-exit index 74977e7316..1c910c64bd 100755 --- a/ext/test/upgrade-and-exit +++ b/ext/test/upgrade-and-exit @@ -83,6 +83,6 @@ psql -U puppetdb puppetdb -c 'select max(version) from schema_migrations;' \ > "$tmpdir/out" cat "$tmpdir/out" # This must be updated every time we add a new migration -grep -qE ' 87$' "$tmpdir/out" +grep -qE ' 88$' "$tmpdir/out" test ! -e "$PDBBOX"/var/mq-migrated diff --git a/resources/ext/cli/delete-reports.erb b/resources/ext/cli/delete-reports.erb index 0b2700afe0..5c7a5c7429 100644 --- a/resources/ext/cli/delete-reports.erb +++ b/resources/ext/cli/delete-reports.erb @@ -78,7 +78,7 @@ chown "$pg_user:$pg_user" "$tmp_dir" # Verify that the PuppetDB schema version is the expected value # so that we do not incorrectly delete the report data. -expected_schema_ver=87 +expected_schema_ver=88 su - "$pg_user" -s /bin/sh -c "$psql_cmd -p $pg_port -d $pdb_db_name -c 'COPY ( SELECT max(version) FROM schema_migrations ) TO STDOUT;' > $tmp_dir/schema_ver" actual_schema_ver="$(cat "$tmp_dir/schema_ver")" if test "$actual_schema_ver" -ne $expected_schema_ver; then diff --git a/src/puppetlabs/puppetdb/scf/migrate.clj b/src/puppetlabs/puppetdb/scf/migrate.clj index 60f4bfdeac..eca0a77ca5 100644 --- a/src/puppetlabs/puppetdb/scf/migrate.clj +++ b/src/puppetlabs/puppetdb/scf/migrate.clj @@ -2514,6 +2514,20 @@ " FROM reports_latest" " WHERE reports_latest.id = resource_events.report_id"])) +(defn prevent-duplicate-catalogs + [] + (jdbc/do-commands + ;; Clear any possible duplicates + ["DELETE FROM catalogs c1 USING catalogs c2" + " WHERE c1.certname = c2.certname" + " AND (c1.producer_timestamp, c1.id) < (c2.producer_timestamp, c2.id)"] + + ;; Remove the old index + "DROP INDEX catalogs_certname_idx" + + ;; Create a unique constraint on the certname, which creates the unique index + "ALTER TABLE catalogs ADD CONSTRAINT catalogs_certname_idx UNIQUE (certname)")) + (def migrations "The available migrations, as a map from migration version to migration function." {00 require-schema-migrations-table @@ -2584,7 +2598,8 @@ 84 remove-catalog-resources-file-trgm-index 85 split-certnames-table 86 store-latest-reports-separately - 87 store-latest-events-separately}) + 87 store-latest-events-separately + 88 prevent-duplicate-catalogs}) ;; Make sure that if you change the structure of reports ;; or resource events, you also update the delete-reports ;; cli command. diff --git a/test/puppetlabs/puppetdb/scf/migrate_test.clj b/test/puppetlabs/puppetdb/scf/migrate_test.clj index 95aa64488c..8e16f7f026 100644 --- a/test/puppetlabs/puppetdb/scf/migrate_test.clj +++ b/test/puppetlabs/puppetdb/scf/migrate_test.clj @@ -20,7 +20,7 @@ :refer [*db* clear-db-for-testing! schema-info-map diff-schema-maps with-test-db]] [puppetlabs.puppetdb.scf.hash :as shash] - [puppetlabs.puppetdb.time :refer [now to-timestamp]] + [puppetlabs.puppetdb.time :refer [now to-timestamp] :as t] [puppetlabs.puppetdb.scf.partitioning :as part] [clojure.string :as str]) (:import (java.time ZoneId ZonedDateTime) @@ -2311,3 +2311,64 @@ :deferrable? "NO"} :same nil}]} (diff-schema-maps before-migration (schema-info-map *db*))))))) + +(deftest migration-88-prevent-duplicate-catalogs + (testing "reports table declarative partitioning migration" + (jdbc/with-db-connection *db* + (clear-db-for-testing!) + (fast-forward-to-migration! 82) + (let [ts1 (to-timestamp (now)) + ts2 (-> (now) + (t/plus (t/hours 1)) + to-timestamp) + fake-hash (sutils/munge-hash-for-storage "0001")] + + (jdbc/insert-multi! :certnames + [{:certname "host-1"} + {:certname "host-2"}]) + + (jdbc/insert-multi! :catalogs + [{:id 1 :hash fake-hash + :certname "host-1" :producer_timestamp ts1 + :api_version 1 :catalog_version "one"} + {:id 2 :hash fake-hash + :certname "host-1" :producer_timestamp ts2 + :api_version 1 :catalog_version "one"} + {:id 3 :hash fake-hash + :certname "host-1" :producer_timestamp ts2 + :api_version 1 :catalog_version "one"} + {:id 4 :hash fake-hash + :certname "host-2" :producer_timestamp ts1 + :api_version 1 :catalog_version "one"}]) + (let [before-migration (schema-info-map *db*) + _ (apply-migration-for-testing! 88) + diff (-> (diff-schema-maps before-migration (schema-info-map *db*)) + (update :index-diff set) + (update :constraint-diff set))] + (is (= {:index-diff + #{{:left-only {:unique? false} + :right-only {:unique? true} + :same {:index "catalogs_certname_idx" + :user "pdb_test" + :primary? false + :is_partial false + :functional? false + :type "btree" + :index_keys ["certname"] + :table "catalogs" + :schema "public"}}} + + :table-diff nil + + :constraint-diff + #{{:left-only nil + :right-only {:constraint_name "catalogs_certname_idx" + :table_name "catalogs" + :constraint_type "UNIQUE" + :initially_deferred "NO" + :deferrable? "NO"} + :same nil}}} + diff)) + (is (= [{:id 3 :certname "host-1" :producer_timestamp ts2} + {:id 4 :certname "host-2" :producer_timestamp ts1}] + (query-to-vec "select id, certname, producer_timestamp from catalogs"))))))))