Commit 2e61ea9f authored by Andrew Roetker's avatar Andrew Roetker

(PDB-841) Retire the legacy storeconfigs tool

This commit removes support for the legacy storeconfigs export tool as
we no longer support Puppet <4 in PuppetDB.
parent aef93caa
......@@ -32,8 +32,6 @@ group :test do
end
gem 'mocha', '~> 1.0'
gem 'activerecord', '~> 3.2'
gem 'sqlite3'
end
group :acceptance do
......
puppetdb_query_url = "http://localhost:8080/pdb/query"
test_name "storeconfigs export and import" do
skip_test "storeconfigs not supported in puppet >= 4.0" if options[:type] == 'aio'
skip_test "Skipping test for PE because sqlite3 isn't available" if master.is_pe?
db_path = master.tmpfile('storeconfigs.sqlite3')
manifest_path = master.tmpfile('storeconfigs.pp')
args = "--dbadapter sqlite3 --dblocation #{db_path} --storeconfigs_backend active_record --debug"
step "setup a test manifest for the master" do
manifest = <<-MANIFEST
node default {
@@notify { "exported_resource": }
notify { "non_exported_resource": }
}
MANIFEST
manifest_path = create_remote_site_pp(master, manifest)
on master, "chown puppet:puppet #{db_path}"
on master, "chmod -R 777 #{db_path}"
end
step "run each agent once to populate the database" do
# dbadapter, dblocation, storeconfigs_backend, routefile
with_puppet_running_on master, {
'master' => {
'dbadapter' => 'sqlite3',
'dblocation' => db_path,
'storeconfigs_backend' => 'active_record',
'debug' => 'true',
'autosign' => 'true'
},
'main' => {
'environmentpath' => manifest_path,
}
} do
hosts.each do |host|
run_agent_on host, "--test --server #{master}", :acceptable_exit_codes => [0,2]
end
end
end
driver_legacy_export_file = "./legacy_storeconfigs_export.tar.gz"
driver_new_export_file = "./puppetdb-export.tar.gz"
step "export the storeconfigs data" do
result = on master, "puppet storeconfigs export #{args}"
regex = /Exported storeconfigs data to (.*)/
assert_match regex, result.output
legacy_export_file = regex.match(result.output)[1]
scp_from(master, legacy_export_file, ".")
File.rename(File.join(".", File.basename(legacy_export_file)), driver_legacy_export_file)
end
clear_and_restart_puppetdb(database)
step "import data into puppetdb" do
db_legacy_export_dir = "."
db_legacy_export_file = File.join(db_legacy_export_dir, "legacy_storeconfigs_export.tar.gz")
scp_to(database, driver_legacy_export_file, db_legacy_export_dir)
on database, "puppetdb import --infile #{db_legacy_export_file}"
end
step "Verify imported catalogs" do
hosts.each do |host|
result = on database, %Q|curl -G #{puppetdb_query_url}/v4/catalogs/#{host.node_name}|
result_catalog = JSON.parse(result.stdout)
assert_equal(host.node_name, result_catalog['certname'], "Catalog for node #{host.node_name} not found")
end
end
end
......@@ -27,7 +27,6 @@ Install It Now
To start using PuppetDB today:
* Review [the system requirements below](#system-requirements) (and, optionally, [our scaling recommendations][scaling]).
* If you'd like to migrate existing exported resources from your ActiveRecord storeconfigs database, please see the documentation on [Migrating Data][migrating].
* Choose your installation method:
* [easy install][install_via_module] using the PuppetDB puppet module on our recommended platforms
* [install from packages][install_from_packages] on our recommended platforms
......
......@@ -28,7 +28,6 @@ Additionally, these instructions may be useful for understanding the various mov
> **Notes:**
>
> * If you'd like to migrate existing exported resources from your ActiveRecord storeconfigs database, please see the documentation on [Migrating Data][migrating].
> * After following these instructions, you should [connect your puppet master(s) to PuppetDB][connect_master]. (If you use a standalone Puppet deployment, you will need to [connect every node to PuppetDB][connect_apply].)
> * These instructions are for [platforms with official PuppetDB packages][requirements]. To install on other systems, you should instead follow [the instructions for installing from source](./install_from_source.html).
> * If this is a production deployment, [review the scaling recommendations](./scaling_recommendations.html) before installing. You should ensure your PuppetDB server will be able to comfortably handle your site's load.
......
......@@ -18,11 +18,6 @@ This page describes how to install PuppetDB from an archive of the source code,
If possible, we recommend installing PuppetDB [with the puppetlabs-puppetdb module][module] or [from packages][packages]; either approach will be easier. However, if you are testing a new version, developing PuppetDB, or installing it on a system not supported with official packages, you will need to install it from source.
> **Note:**
>
> If you'd like to migrate existing exported resources from your ActiveRecord storeconfigs database, please see the documentation on [Migrating Data][migrating].
Step 1: Install Prerequisites
-----
......
......@@ -16,10 +16,6 @@ PuppetDB termini for your Puppet master) using [the PuppetDB module][module] fro
* If you are **already familiar with Puppet** and have a working Puppet deployment, this is the easiest method for installing PuppetDB. In this guide, we expect that you already know how to assign Puppet classes to nodes.
* If you are **just getting started with Puppet,** you should probably follow the [Installing PuppetDB From Packages guide](./install_from_packages.html) instead.
> **Note:**
>
> If you'd like to migrate existing exported resources from your ActiveRecord storeconfigs database, please see the documentation on [Migrating Data][migrating].
Step 1: Enable the Puppet Labs Package Repository
-----
......
......@@ -4,17 +4,6 @@ layout: default
canonical: "/puppetdb/latest/migrate.html"
---
Migrating from ActiveRecord storeconfigs
-----
If you're using exported resources with ActiveRecord storeconfigs, you may want to migrate your existing data to PuppetDB before connecting the master to it. This will ensure that whatever resources were being collected by the agents will still be collected, and no incorrect configuration will be applied.
The existing ActiveRecord data can be exported using the `puppet storeconfigs export` command, which will produce a tarball that can be consumed by PuppetDB. Because this command is intended only to stop nodes from failing until they have check into PuppetDB, it will only include exported resources, excluding edges and facts.
NOTE: in order for this to work properly, you need to make sure you've run this command and generated the export tarball *prior* to configuring your master for PuppetDB.
Once you've run this command and generated an export tarball, you should follow the instructions below to import the tarball into your PuppetDB database.
Exporting data from an existing PuppetDB database
------
......
......@@ -9,12 +9,16 @@ canonical: "/puppetdb/latest/puppetdb-faq.html"
[migrating]: ./migrate.html
[maintaining_tuning]: ./maintain_and_tune.html
[low_catalog_dupe]: ./trouble_low_catalog_duplication.html
[puppetdb3]: /puppetdb/3.1/migrate.html
## Can I migrate my data from ActiveRecord storeconfigs or from an existing PuppetDB to a new instance?
## Can I migrate my data from ActiveRecord storeconfigs?
Yes. At this time, you can only migrate exported resources from ActiveRecord, and
you can migrate catalogs from an existing PuppetDB. For more information, see
[Migrating Data][migrating] for more information.
Yes, but you must use PuppetDB 3.x to do so. Please consult the
[PuppetDB 3.x documentiation][puppetdb3] for more details.
## Can I migrate from an existing PuppetDB to a new instance?
Yes. See [Migrating Data][migrating] for more information.
## PuppetDB is complaining about a truststore or keystore file. What do I do?
......
require 'puppet/application/face_base'
class Puppet::Application::Storeconfigs < Puppet::Application::FaceBase
end
require 'puppet/util/puppetdb'
require 'puppet/face'
if Puppet::Util::Puppetdb.puppet3compat?
require 'tmpdir'
Puppet::Face.define(:storeconfigs, '0.0.1') do
copyright "Puppet Labs", 2011
license "Apache 2 license"
summary "Interact with the storeconfigs database"
description <<-DESC
This subcommand interacts with the ActiveRecord storeconfigs database, and
can be used to export a dump of that data which is suitable for import by
PuppetDB.
DESC
action :export do
summary "Export the storeconfigs database"
description <<-DESC
Generate a dump of all catalogs from the storeconfigs database, as a
tarball which can be imported by PuppetDB. Only exported resources are
included; non-exported resources, edges, facts, or other data are
omitted. Returns the location of the output.
DESC
when_invoked do |options|
require 'puppet/rails'
tmpdir = Dir.mktmpdir
workdir = File.join(tmpdir, 'puppetdb-bak')
Dir.mkdir(workdir)
begin
Puppet::Rails.connect
timestamp = Time.now
# Fetch all nodes, including exported resources and their params
nodes = Puppet::Rails::Host.all(:include => {:resources => [:param_values, :puppet_tags]},
:conditions => {:resources => {:exported => true}})
catalogs = nodes.map { |node| node_to_catalog_hash(node, timestamp.iso8601(5)) }
catalog_dir = File.join(workdir, 'catalogs')
FileUtils.mkdir(catalog_dir)
catalogs.each do |catalog|
filename = File.join(catalog_dir, "#{catalog[:certname]}.json")
File.open(filename, 'w') do |file|
file.puts catalog.to_json
end
end
node_names = nodes.map(&:name).sort
File.open(File.join(workdir, 'export-metadata.json'), 'w') do |file|
metadata = {
'timestamp' => timestamp,
'command_versions' => {
'replace_catalog' => 6,
}
}
file.puts metadata.to_json
end
tarfile = destination_file(timestamp)
if tar = Puppet::Util.which('tar')
execute("cd #{tmpdir} && #{tar} -cf #{tarfile} puppetdb-bak")
FileUtils.rm_rf(workdir)
if gzip = Puppet::Util.which('gzip')
execute("#{gzip} #{tarfile}")
"#{tarfile}.gz"
else
Puppet.warning "Can't find the `gzip` command to compress the tarball; output will not be compressed"
tarfile
end
else
Puppet.warning "Can't find the `tar` command to produce a tarball; output will remain in the temporary working directory"
workdir
end
rescue => e
# Clean up if something goes wrong. We don't want to ensure this,
# because we want the directory to stick around in the case where they
# don't have tar.
FileUtils.rm_rf(workdir)
raise
end
end
when_rendering :console do |filename|
"Exported storeconfigs data to #{filename}"
end
end
# Returns the location to leave the output. This is really only here for testing. :/
def destination_file(timestamp)
File.expand_path("storeconfigs-#{timestamp.strftime('%Y%m%d%H%M%S')}.tar")
end
# Execute a command using Puppet's execution static method.
#
# @param command [Array<String>, String] the command to execute. If it is
# an Array the first element should be the executable and the rest of the
# elements should be the individual arguments to that executable.
# @return [Puppet::Util::Execution::ProcessOutput] output as specified by options
# @raise [Puppet::ExecutionFailure] if the executed chiled process did not exit with status == 0 and `failonfail` is
# `true`.
def execute(command)
Puppet::Util::Execution.execute(command)
end
def node_to_catalog_hash(node, timestamp)
resources = node.resources.map { |resource| resource_to_hash(resource) }
edges = node.resources.map { |resource| resource_to_edge_hash(resource) }
{
:environment => "production",
:metadata => {
:api_version => 1,
},
:certname => node.name,
:version => node.last_compile || Time.now,
:edges => edges,
:resources => resources + [stage_main_hash],
:timestamp => timestamp,
:producer_timestamp => timestamp,
}
end
def resource_to_hash(resource)
parameters = resource.param_values.inject({}) do |params,param_value|
if params.has_key?(param_value.param_name.name)
value = [params[param_value.param_name.name],param_value.value].flatten
else
value = param_value.value
end
params.merge(param_value.param_name.name => value)
end
tags = resource.puppet_tags.map(&:name).uniq.sort
hash = {
:type => resource.restype,
:title => resource.title,
:exported => true,
:parameters => parameters,
:tags => tags,
}
hash[:file] = resource.file if resource.file
hash[:line] = resource.line if resource.line
hash
end
# The catalog *must* have edges, so everything is contained by Stage[main]!
def resource_to_edge_hash(resource)
{
'source' => {'type' => 'Stage', 'title' => 'main'},
'target' => {'type' => resource.restype, 'title' => resource.title},
'relationship' => 'contains',
}
end
def stage_main_hash
{
:type => 'Stage',
:title => 'main',
:exported => false,
:parameters => {},
:tags => ['stage', 'main'],
}
end
end
else
Puppet::Face.define(:storeconfigs, '0.0.1') do
copyright "Puppet Labs", 2011
license "Apache 2 license"
summary "storeconfigs is not supported on Puppet 4.0.0+"
description <<-DESC
Users needing this feature should migrate using Puppet 3.7.2 or a more recent
3.7 release.
DESC
end
end
#!/usr/bin/env ruby
require 'puppet/util/puppetdb'
if Puppet::Util::Puppetdb.puppet3compat?
require 'spec_helper'
require 'puppet/face/storeconfigs'
require 'json'
require 'puppet/util/feature'
require 'puppet/util/puppetdb'
describe Puppet::Face[:storeconfigs, '0.0.1'], :if => (Puppet.features.rails? && Puppet.features.sqlite?) do
def setup_scratch_database
Puppet::Rails.stubs(:database_arguments).returns(
:adapter => 'sqlite3',
:log_level => Puppet[:rails_loglevel],
:database => ':memory:'
)
Puppet[:railslog] = '/dev/null'
Puppet::Rails.init
end
before :all do
# We have to have this block to require this file, so they get loaded on
# platforms where we are going to run the tests, but not on Ruby 1.8.5.
# Unfortunately, rspec will evaluate the describe block (but not the before
# block or tests) even if the conditions fail. The lack of a sqlite3 gem
# for Ruby 1.8.5 ensures that the condition will always be false on Ruby
# 1.8.5, so at this point it's safe to require this.
require 'puppet/indirector/catalog/active_record'
end
before :each do
setup_scratch_database
Puppet[:storeconfigs] = true
Puppet[:storeconfigs_backend] = :active_record
end
describe "export action" do
after :each do
FileUtils.rm_rf(@path)
end
before :each do
tempfile = Tempfile.new('export')
@path = tempfile.path
tempfile.close!
Dir.mkdir(@path)
subject.stubs(:destination_file).returns File.join(@path, 'storeconfigs-test.tar')
end
# Turn the filename of a gzipped tar into a hash from filename to content.
def tgz_to_hash(filename)
# List the files in the archive, ignoring directories (whose names end
# with /), and stripping the leading puppetdb-bak.
files = `tar tf #{filename}`.lines.map(&:chomp).reject { |fname| fname[-1,1] == '/'}.map {|fname| fname.sub('puppetdb-bak/', '') }
# Get the content of the files, one per line. Thank goodness they're a
# single line each.
content = `tar xf #{filename} -O`.lines.to_a
# Build a hash from filename to content. Ruby 1.8.5 doesn't like
# Hash[array_of_pairs], so we have to jump through hoops by flattening
# and splatting this list.
Hash[*files.zip(content).flatten]
end
describe "with nodes present" do
def notify(title, exported=false)
Puppet::Resource.new(:notify, title, :parameters => {:message => title}, :exported => exported)
end
def user(name)
Puppet::Resource.new(:user, name,
:parameters => {:groups => ['foo', 'bar', 'baz'],
:profiles => ['stuff', 'here'] #<-- Uses an ordered list
}, :exported => true)
end
def save_catalog(catalog)
request = Puppet::Resource::Catalog.indirection.request(:save, catalog.name, catalog)
Puppet::Resource::Catalog::ActiveRecord.new.save(request)
end
before :each do
catalog = Puppet::Resource::Catalog.new('foo')
catalog.add_resource notify('not exported')
catalog.add_resource notify('exported', true)
catalog.add_resource user('someuser')
save_catalog(catalog)
end
it "should have the right structure" do
filename = subject.export
results = tgz_to_hash(filename)
results.keys.should =~ ['export-metadata.json', 'catalogs/foo.json']
metadata = JSON.load(results['export-metadata.json'])
metadata.keys.should =~ ['timestamp', 'command_versions']
metadata['command_versions'].should == {'replace_catalog' => 6}
catalog = JSON.load(results['catalogs/foo.json'])
catalog.keys.should =~ ['metadata', 'environment', 'certname', 'version', 'edges', 'resources', 'timestamp', 'producer_timestamp']
catalog['metadata'].should == {'api_version' => 1}
catalog['certname'].should == 'foo'
catalog['edges'].to_set.should == [{
'source' => {'type' => 'Stage', 'title' => 'main'},
'target' => {'type' => 'Notify', 'title' => 'exported'},
'relationship' => 'contains'},
{"source"=>{"type"=>"Stage", "title"=>"main"},
"target"=>{"type"=>"User", "title"=>"someuser"},
"relationship"=>"contains"}].to_set
catalog['resources'].should include({
'type' => 'Stage',
'title' => 'main',
'exported' => false,
'tags' => ['stage', 'main'],
'parameters' => {},
})
catalog['resources'].should include({
'type' => 'Notify',
'title' => 'exported',
'exported' => true,
'tags' => ['exported', 'notify'],
'parameters' => {
'message' => 'exported',
},
})
catalog['resources'].should include({
'type' => 'User',
'title' => 'someuser',
'exported' => true,
'tags' => ['someuser', 'user'],
'parameters' => {
'groups' => ['foo', 'bar', 'baz'],
'profiles' => ['stuff', 'here']
},
})
end
it "should only include exported resources and Stage[main]" do
filename = subject.export
results = tgz_to_hash(filename)
results.keys.should =~ ['export-metadata.json', 'catalogs/foo.json']
catalog = JSON.load(results['catalogs/foo.json'])
catalog['certname'].should == 'foo'
catalog['edges'].map do |edge|
[edge['source']['type'], edge['source']['title'], edge['relationship'], edge['target']['type'], edge['target']['title']]
end.to_set.should == [['Stage', 'main', 'contains', 'Notify', 'exported'],
['Stage', 'main', 'contains', 'User', 'someuser']].to_set
catalog['resources'].map { |resource| [resource['type'], resource['title']] }.to_set.should == [['Notify', 'exported'], ["User", "someuser"], ['Stage', 'main']].to_set
notify = catalog['resources'].find {|resource| resource['type'] == 'Notify'}
notify['exported'].should == true
end
it "should exclude nodes with no exported resources" do
catalog = Puppet::Resource::Catalog.new('bar')
catalog.add_resource notify('also not exported')
save_catalog(catalog)
filename = subject.export
results = tgz_to_hash(filename)
results.keys.should =~ ['export-metadata.json', 'catalogs/foo.json']
end
end
it "should do nothing if there are no nodes" do
filename = subject.export
results = tgz_to_hash(filename)
results.keys.should == ['export-metadata.json']
end
end
end
end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment