diff --git a/config/cloud_controller.yml b/config/cloud_controller.yml index c4d48202b9..98f65c1237 100644 --- a/config/cloud_controller.yml +++ b/config/cloud_controller.yml @@ -320,6 +320,12 @@ directories: diagnostics: /tmp stacks_file: config/stacks.yml + +storage_cli_config_file_droplets: config/storage_cli_config_droplets.json +storage_cli_config_file_packages: config/storage_cli_config_packages.json +storage_cli_config_file_buildpacks: config/storage_cli_config_buildpacks.json +storage_cli_config_file_resource_pool: config/storage_cli_config_resource_pool.json + newrelic_enabled: false max_annotations_per_resource: 200 diff --git a/lib/cloud_controller/blobstore/client_provider.rb b/lib/cloud_controller/blobstore/client_provider.rb index ecda94b289..6ba786ee47 100644 --- a/lib/cloud_controller/blobstore/client_provider.rb +++ b/lib/cloud_controller/blobstore/client_provider.rb @@ -16,7 +16,7 @@ def self.provide(options:, directory_key:, root_dir: nil, resource_type: nil) provide_fog(options, directory_key, root_dir) elsif options[:blobstore_type] == 'storage-cli' # storage-cli is an experimental feature and not yet fully implemented. !!! DO NOT USE IN PRODUCTION !!! - provide_storage_cli(options, directory_key, root_dir) + provide_storage_cli(options, directory_key, root_dir, resource_type) else provide_webdav(options, directory_key, root_dir) end @@ -71,14 +71,14 @@ def provide_webdav(options, directory_key, root_dir) Client.new(SafeDeleteClient.new(retryable_client, root_dir)) end - def provide_storage_cli(options, directory_key, root_dir) - raise BlobstoreError.new('connection_config for storage-cli is not provided') unless options[:connection_config] - - client = StorageCliClient.build(connection_config: options.fetch(:connection_config), - directory_key: directory_key, - root_dir: root_dir, - min_size: options[:minimum_size], - max_size: options[:maximum_size]) + def provide_storage_cli(options, directory_key, root_dir, resource_type) + client = StorageCliClient.build( + directory_key: directory_key, + resource_type: resource_type, + root_dir: root_dir, + min_size: options[:minimum_size], + max_size: options[:maximum_size] + ) logger = Steno.logger('cc.blobstore.storage_cli_client') errors = [StandardError] diff --git a/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client.rb b/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client.rb index 1ac93b3084..297337c076 100644 --- a/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client.rb +++ b/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client.rb @@ -5,15 +5,6 @@ def cli_path ENV['AZURE_STORAGE_CLI_PATH'] || '/var/vcap/packages/azure-storage-cli/bin/azure-storage-cli' end - def build_config(connection_config) - { - account_name: connection_config[:azure_storage_account_name], - account_key: connection_config[:azure_storage_access_key], - container_name: @directory_key, - environment: connection_config[:environment] - }.compact - end - CloudController::Blobstore::StorageCliClient.register('AzureRM', AzureStorageCliClient) end end diff --git a/lib/cloud_controller/blobstore/storage_cli/storage_cli_client.rb b/lib/cloud_controller/blobstore/storage_cli/storage_cli_client.rb index 2bce9d9360..8ca62a015d 100644 --- a/lib/cloud_controller/blobstore/storage_cli/storage_cli_client.rb +++ b/lib/cloud_controller/blobstore/storage_cli/storage_cli_client.rb @@ -16,29 +16,108 @@ class << self attr_reader :registry def register(provider, klass) - registry[provider] = klass + registry[provider.to_s] = klass end - def build(connection_config:, directory_key:, root_dir:, min_size: nil, max_size: nil) - provider = connection_config[:provider] - raise 'Missing connection_config[:provider]' if provider.nil? + def build(directory_key:, root_dir:, resource_type: nil, min_size: nil, max_size: nil) + raise 'Missing resource_type' if resource_type.nil? - impl_class = registry[provider] + cfg = fetch_and_validate_config!(resource_type) + provider = cfg['provider'] + + key = provider.to_s + impl_class = registry[key] || registry[key.downcase] || registry[key.upcase] raise "No storage CLI client registered for provider #{provider}" unless impl_class - impl_class.new(connection_config:, directory_key:, root_dir:, min_size:, max_size:) + impl_class.new(provider:, directory_key:, root_dir:, resource_type:, min_size:, max_size:) + end + + def fetch_and_validate_config!(resource_type) + path = config_path_for!(resource_type) + + begin + json = Oj.load(File.read(path)) + rescue StandardError => e + raise BlobstoreError.new("Failed to parse storage-cli config JSON at #{path}: #{e.message}") + end + + validate_required_keys!(json, path) + json + end + + def config_path_for!(resource_type) + key = + case resource_type.to_s + when 'droplets', 'buildpack_cache' then :storage_cli_config_file_droplets + when 'buildpacks' then :storage_cli_config_file_buildpacks + when 'packages' then :storage_cli_config_file_packages + when 'resource_pool' then :storage_cli_config_file_resource_pool + else + raise BlobstoreError.new("Unknown resource_type: #{resource_type}") + end + + path = VCAP::CloudController::Config.config.get(key) + raise BlobstoreError.new("storage-cli config file not found or not readable at: #{path.inspect}") unless path && File.file?(path) && File.readable?(path) + + path + end + + def validate_required_keys!(json, path) + validate_provider!(json, path) + required = %w[ + account_key + account_name + container_name + environment + ] + missing = required.reject { |k| json.key?(k) && !json[k].to_s.strip.empty? } + return if missing.empty? + + raise BlobstoreError.new("Missing required keys in config file #{path}: #{missing.join(', ')} (json: #{json})") + end + + def validate_provider!(json, path) + provider = json['provider'] + return unless provider.nil? || provider.to_s.strip.empty? + + raise BlobstoreError.new("No provider specified in config file: #{path.inspect} json: #{json}") end end - def initialize(connection_config:, directory_key:, root_dir:, min_size: nil, max_size: nil) + def initialize(provider:, directory_key:, resource_type:, root_dir:, min_size: nil, max_size: nil) @cli_path = cli_path @directory_key = directory_key + @resource_type = resource_type.to_s @root_dir = root_dir @min_size = min_size || 0 @max_size = max_size - config = build_config(connection_config) - @config_file = write_config_file(config) - @fork = connection_config.fetch(:fork, false) + @provider = provider + + file_path = case @resource_type + when 'droplets', 'buildpack_cache' + VCAP::CloudController::Config.config.get(:storage_cli_config_file_droplets) + when 'buildpacks' + VCAP::CloudController::Config.config.get(:storage_cli_config_file_buildpacks) + when 'packages' + VCAP::CloudController::Config.config.get(:storage_cli_config_file_packages) + when 'resource_pool' + VCAP::CloudController::Config.config.get(:storage_cli_config_file_resource_pool) + else + raise BlobstoreError.new("Unknown resource_type: #{@resource_type}") + end + + unless file_path && File.file?(file_path) && File.readable?(file_path) + raise BlobstoreError.new("storage-cli config file not found or not readable at: #{file_path.inspect}") + end + + begin + VCAP::CloudController::YAMLConfig.safe_load_file(file_path) + rescue StandardError => e + raise BlobstoreError.new("Failed to load storage-cli config at #{file_path}: #{e.message}") + end + + @config_file = file_path + logger.info('storage_cli_config_selected', resource_type: @resource_type, path: @config_file) end def local? @@ -88,28 +167,16 @@ def cp_to_blobstore(source_path, destination_key) end def cp_file_between_keys(source_key, destination_key) - if @fork - run_cli('copy', partitioned_key(source_key), partitioned_key(destination_key)) - else - # Azure CLI doesn't support server-side copy yet, so fallback to local copy - Tempfile.create('blob-copy') do |tmp| - download_from_blobstore(source_key, tmp.path) - cp_to_blobstore(tmp.path, destination_key) - end - end + run_cli('copy', partitioned_key(source_key), partitioned_key(destination_key)) end def delete_all(_=nil) # page_size is currently not considered. Azure SDK / API has a limit of 5000 - pass unless @fork - # Currently, storage-cli does not support bulk deletion. run_cli('delete-recursive', @root_dir) end def delete_all_in_path(path) - pass unless @fork - # Currently, storage-cli does not support bulk deletion. run_cli('delete-recursive', partitioned_key(path)) end @@ -123,29 +190,19 @@ def delete_blob(blob) end def blob(key) - if @fork - properties = properties(key) - return nil if properties.nil? || properties.empty? - - signed_url = sign_url(partitioned_key(key), verb: 'get', expires_in_seconds: 3600) - StorageCliBlob.new(key, properties:, signed_url:) - elsif exists?(key) - # Azure CLI does not support getting blob properties directly, so fallback to local check - signed_url = sign_url(partitioned_key(key), verb: 'get', expires_in_seconds: 3600) - StorageCliBlob.new(key, signed_url:) - end + properties = properties(key) + return nil if properties.nil? || properties.empty? + + signed_url = sign_url(partitioned_key(key), verb: 'get', expires_in_seconds: 3600) + StorageCliBlob.new(key, properties:, signed_url:) end def files_for(prefix, _ignored_directory_prefixes=[]) - return nil unless @fork - files, _status = run_cli('list', prefix) files.split("\n").map(&:strip).reject(&:empty?).map { |file| StorageCliBlob.new(file) } end def ensure_bucket_exists - return unless @fork - run_cli('ensure-bucket-exists') end diff --git a/lib/cloud_controller/config_schemas/api_schema.rb b/lib/cloud_controller/config_schemas/api_schema.rb index f68b93d34e..aa9b362a26 100644 --- a/lib/cloud_controller/config_schemas/api_schema.rb +++ b/lib/cloud_controller/config_schemas/api_schema.rb @@ -95,6 +95,12 @@ class ApiSchema < VCAP::Config }, stacks_file: String, + + optional(:storage_cli_config_file_buildpacks) => String, + optional(:storage_cli_config_file_packages) => String, + optional(:storage_cli_config_file_resource_pool) => String, + optional(:storage_cli_config_file_droplets) => String, + newrelic_enabled: bool, optional(:max_migration_duration_in_minutes) => Integer, diff --git a/lib/cloud_controller/config_schemas/clock_schema.rb b/lib/cloud_controller/config_schemas/clock_schema.rb index 21d32d6778..14c9952c0e 100644 --- a/lib/cloud_controller/config_schemas/clock_schema.rb +++ b/lib/cloud_controller/config_schemas/clock_schema.rb @@ -50,6 +50,11 @@ class ClockSchema < VCAP::Config pid_filename: String, # Pid filename to use + optional(:storage_cli_config_file_buildpacks) => String, + optional(:storage_cli_config_file_packages) => String, + optional(:storage_cli_config_file_resource_pool) => String, + optional(:storage_cli_config_file_droplets) => String, + newrelic_enabled: bool, optional(:max_migration_duration_in_minutes) => Integer, diff --git a/lib/cloud_controller/config_schemas/worker_schema.rb b/lib/cloud_controller/config_schemas/worker_schema.rb index f10347ef96..eaf958c5ca 100644 --- a/lib/cloud_controller/config_schemas/worker_schema.rb +++ b/lib/cloud_controller/config_schemas/worker_schema.rb @@ -41,6 +41,12 @@ class WorkerSchema < VCAP::Config }, stacks_file: String, + + optional(:storage_cli_config_file_buildpacks) => String, + optional(:storage_cli_config_file_packages) => String, + optional(:storage_cli_config_file_resource_pool) => String, + optional(:storage_cli_config_file_droplets) => String, + newrelic_enabled: bool, optional(:max_migration_duration_in_minutes) => Integer, diff --git a/lib/cloud_controller/dependency_locator.rb b/lib/cloud_controller/dependency_locator.rb index b885178fe5..6d7aabc4fb 100644 --- a/lib/cloud_controller/dependency_locator.rb +++ b/lib/cloud_controller/dependency_locator.rb @@ -167,7 +167,8 @@ def legacy_global_app_bits_cache Blobstore::ClientProvider.provide( options: options, - directory_key: options.fetch(:resource_directory_key) + directory_key: options.fetch(:resource_directory_key), + resource_type: :resource_pool ) end @@ -177,7 +178,8 @@ def global_app_bits_cache Blobstore::ClientProvider.provide( options: options, directory_key: options.fetch(:resource_directory_key), - root_dir: RESOURCE_POOL_DIR + root_dir: RESOURCE_POOL_DIR, + resource_type: :resource_pool ) end diff --git a/lib/cloud_controller/resource_pool.rb b/lib/cloud_controller/resource_pool.rb index 3cfdfb6763..1e8061802e 100644 --- a/lib/cloud_controller/resource_pool.rb +++ b/lib/cloud_controller/resource_pool.rb @@ -25,7 +25,8 @@ def initialize(config) @blobstore = CloudController::Blobstore::ClientProvider.provide( options: options, directory_key: options.fetch(:resource_directory_key), - root_dir: CloudController::DependencyLocator::RESOURCE_POOL_DIR + root_dir: CloudController::DependencyLocator::RESOURCE_POOL_DIR, + resource_type: 'resource_pool' ) @minimum_size = options[:minimum_size] || 0 # TODO: move default into config object? diff --git a/spec/unit/lib/cloud_controller/blobstore/client_provider_spec.rb b/spec/unit/lib/cloud_controller/blobstore/client_provider_spec.rb index be6f9b6c95..d0490d15e5 100644 --- a/spec/unit/lib/cloud_controller/blobstore/client_provider_spec.rb +++ b/spec/unit/lib/cloud_controller/blobstore/client_provider_spec.rb @@ -129,22 +129,37 @@ module Blobstore context 'when storage-cli is requested' do let(:blobstore_type) { 'storage-cli' } let(:directory_key) { 'some-bucket' } + let(:resource_type) { 'droplets' } let(:root_dir) { 'some-root-dir' } let(:storage_cli_client_mock) { class_double(CloudController::Blobstore::StorageCliClient) } + let(:tmpdir) { Dir.mktmpdir('storage_cli_spec') } + let(:config_path) { File.join(tmpdir, 'storage_cli_config_droplets.json') } before do - options.merge!(connection_config: {}, minimum_size: 100, maximum_size: 1000) + File.write(config_path, '{"provider": "AzureRM", + "account_name": "some-account-name", + "account_key": "some-access-key", + "container_name": "directory_key", + "environment": "AzureCloud" }') + allow(VCAP::CloudController::Config.config).to receive(:get).with(:storage_cli_config_file_droplets).and_return(config_path) + options.merge!(provider: 'AzureRM', minimum_size: 100, maximum_size: 1000) end it 'provides a storage-cli client' do allow(StorageCliClient).to receive(:build).and_return(storage_cli_client_mock) - ClientProvider.provide(options:, directory_key:, root_dir:) - expect(StorageCliClient).to have_received(:build).with(connection_config: {}, directory_key: directory_key, root_dir: root_dir, min_size: 100, max_size: 1000) + ClientProvider.provide(options:, directory_key:, root_dir:, resource_type:) + expect(StorageCliClient).to have_received(:build).with(directory_key: directory_key, resource_type: resource_type, root_dir: root_dir, + min_size: 100, max_size: 1000) end - it 'raises an error if connection_config is not provided' do - options.delete(:connection_config) - expect { ClientProvider.provide(options:, directory_key:, root_dir:) }.to raise_error(BlobstoreError, 'connection_config for storage-cli is not provided') + it 'raises an error if provider is not provided' do + config_path = VCAP::CloudController::Config.config.get(:storage_cli_config_file_droplets) + File.write(config_path, + '{"provider": "", "account_name": "some-account-name", "account_key": "some-access-key", "container_name": "directory_key", "environment": "AzureCloud" }') + expect { ClientProvider.provide(options:, directory_key:, root_dir:, resource_type:) }.to raise_error(BlobstoreError) { |e| + expect(e.message).to include('No provider specified in config file:') + expect(e.message).to include(File.basename(config_path)) + } end end end diff --git a/spec/unit/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client_spec.rb b/spec/unit/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client_spec.rb index c1e097bc4b..5aa26d7b69 100644 --- a/spec/unit/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client_spec.rb +++ b/spec/unit/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client_spec.rb @@ -1,4 +1,6 @@ require 'spec_helper' +require 'tempfile' +require 'json' require_relative '../client_shared' require 'cloud_controller/blobstore/storage_cli/azure_storage_cli_client' require 'cloud_controller/blobstore/storage_cli/storage_cli_blob' @@ -6,18 +8,38 @@ module CloudController module Blobstore RSpec.describe AzureStorageCliClient do - subject(:client) { AzureStorageCliClient.new(connection_config: connection_config, directory_key: directory_key, root_dir: 'bommel') } - let(:directory_key) { 'my-bucket' } - let(:connection_config) do - { - azure_storage_access_key: 'some-access-key', - azure_storage_account_name: 'some-account-name', - container_name: directory_key, - environment: 'AzureCloud', - provider: 'AzureRM', - fork: true - } + let!(:tmp_cfg) do + f = Tempfile.new(['storage_cli_config', '.json']) + f.write({ provider: 'AzureRM', + account_name: 'some-account-name', + account_key: 'some-access-key', + container_name: directory_key, + environment: 'AzureCloud' }.to_json) + f.flush + f + end + + before do + cc_cfg = instance_double(VCAP::CloudController::Config) + allow(VCAP::CloudController::Config).to receive(:config).and_return(cc_cfg) + + allow(cc_cfg).to receive(:get) do |key, *_| + case key + when :storage_cli_config_file_droplets, + :storage_cli_config_file_buildpacks, + :storage_cli_config_file_packages, + :storage_cli_config_file_resource_pool + tmp_cfg.path + end + end + allow(Steno).to receive(:logger).and_return(double(info: nil, error: nil)) end + + after { tmp_cfg.close! } + + subject(:client) { AzureStorageCliClient.new(provider: 'AzureRM', directory_key: directory_key, resource_type: resource_type, root_dir: 'bommel') } + let(:directory_key) { 'my-bucket' } + let(:resource_type) { 'resource_pool' } let(:downloaded_file) do Tempfile.open('') do |tmpfile| tmpfile.write('downloaded file content') @@ -59,7 +81,7 @@ module Blobstore expect(client.instance_variable_get(:@config_file)).to be_a(String) expect(File.exist?(client.instance_variable_get(:@config_file))).to be true expect(File.read(client.instance_variable_get(:@config_file))).to eq( - '{"account_name":"some-account-name","account_key":"some-access-key","container_name":"my-bucket","environment":"AzureCloud"}' + '{"provider":"AzureRM","account_name":"some-account-name","account_key":"some-access-key","container_name":"my-bucket","environment":"AzureCloud"}' ) end end diff --git a/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_client_spec.rb b/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_client_spec.rb index 08873e926f..152866de6a 100644 --- a/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_client_spec.rb +++ b/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_client_spec.rb @@ -5,21 +5,241 @@ module CloudController module Blobstore RSpec.describe StorageCliClient do describe 'registry build and lookup' do - it 'builds the correct client' do - client_from_registry = StorageCliClient.build(connection_config: { provider: 'AzureRM' }, directory_key: 'dummy-key', root_dir: 'dummy-root') + it 'builds the correct client when JSON has provider AzureRM' do + droplets_cfg = Tempfile.new(['droplets', '.json']) + droplets_cfg.write({ provider: 'AzureRM', + account_key: 'bommelkey', + account_name: 'bommel', + container_name: 'bommelcontainer', + environment: 'BommelCloud' }.to_json) + droplets_cfg.flush + + config_double = instance_double(VCAP::CloudController::Config) + allow(VCAP::CloudController::Config).to receive(:config).and_return(config_double) + allow(config_double).to receive(:get).with(:storage_cli_config_file_droplets).and_return(droplets_cfg.path) + + client_from_registry = StorageCliClient.build( + directory_key: 'dummy-key', + root_dir: 'dummy-root', + resource_type: 'droplets' + ) expect(client_from_registry).to be_a(AzureStorageCliClient) + + droplets_cfg.close! end it 'raises an error for an unregistered provider' do + droplets_cfg = Tempfile.new(['droplets', '.json']) + droplets_cfg.write( + { provider: 'UnknownProvider', + account_key: 'bommelkey', + account_name: 'bommel', + container_name: 'bommelcontainer', + environment: 'BommelCloud' }.to_json + ) + droplets_cfg.flush + + config_double = instance_double(VCAP::CloudController::Config) + allow(VCAP::CloudController::Config).to receive(:config).and_return(config_double) + allow(config_double).to receive(:get).with(:storage_cli_config_file_droplets).and_return(droplets_cfg.path) + expect do - StorageCliClient.build(connection_config: { provider: 'UnknownProvider' }, directory_key: 'dummy-key', root_dir: 'dummy-root') + StorageCliClient.build(directory_key: 'dummy-key', root_dir: 'dummy-root', resource_type: 'droplets') end.to raise_error(RuntimeError, 'No storage CLI client registered for provider UnknownProvider') + + droplets_cfg.close! + end + + it 'raises an error when provider is missing from the JSON' do + droplets_cfg = Tempfile.new(['droplets', '.json']) + droplets_cfg.write({}.to_json) + droplets_cfg.flush + + config_double = instance_double(VCAP::CloudController::Config) + allow(VCAP::CloudController::Config).to receive(:config).and_return(config_double) + allow(config_double).to receive(:get).with(:storage_cli_config_file_droplets).and_return(droplets_cfg.path) + + expect do + StorageCliClient.build(directory_key: 'dummy-key', root_dir: 'dummy-root', resource_type: 'droplets') + end.to raise_error(CloudController::Blobstore::BlobstoreError, /No provider specified in config file/) + + droplets_cfg.close! + end + end + + describe 'resource_type → config file selection & validation' do + let(:config_double) { instance_double(VCAP::CloudController::Config) } + + let(:droplets_cfg) { Tempfile.new(['droplets', '.json']) } + let(:buildpacks_cfg) { Tempfile.new(['buildpacks', '.json']) } + let(:packages_cfg) { Tempfile.new(['packages', '.json']) } + let(:resource_pool_cfg) { Tempfile.new(['resource_pool', '.json']) } + + before do + [droplets_cfg, buildpacks_cfg, packages_cfg, resource_pool_cfg].each do |f| + f.write({ provider: 'AzureRM', + account_key: 'bommelkey', + account_name: 'bommel', + container_name: 'bommelcontainer', + environment: 'BommelCloud' }.to_json) + f.flush + end + + allow(VCAP::CloudController::Config).to receive(:config).and_return(config_double) + + allow(config_double).to receive(:get) do |key| + case key + when :storage_cli_config_file_droplets then droplets_cfg.path + when :storage_cli_config_file_buildpacks then buildpacks_cfg.path + when :storage_cli_config_file_packages then packages_cfg.path + when :storage_cli_config_file_resource_pool then resource_pool_cfg.path + end + end + + allow(Steno).to receive(:logger).and_return(double(info: nil, error: nil)) + end + + after do + [droplets_cfg, buildpacks_cfg, packages_cfg, resource_pool_cfg].each(&:close!) + end + + def build_client(resource_type) + StorageCliClient.build( + directory_key: 'dir-key', + root_dir: 'root', + resource_type: resource_type + ) + end + + it 'picks droplets file for "droplets"' do + client = build_client('droplets') + expect(client.instance_variable_get(:@config_file)).to eq(droplets_cfg.path) + end + + it 'picks droplets file for "buildpack_cache" too' do + client = build_client('buildpack_cache') + expect(client.instance_variable_get(:@config_file)).to eq(droplets_cfg.path) + end + + it 'picks buildpacks file for "buildpacks"' do + client = build_client('buildpacks') + expect(client.instance_variable_get(:@config_file)).to eq(buildpacks_cfg.path) + end + + it 'picks packages file for "packages"' do + client = build_client('packages') + expect(client.instance_variable_get(:@config_file)).to eq(packages_cfg.path) + end + + it 'picks resource_pool file for "resource_pool"' do + client = build_client('resource_pool') + expect(client.instance_variable_get(:@config_file)).to eq(resource_pool_cfg.path) + end + + it 'accepts a symbol and normalizes it' do + client = build_client(:droplets) + expect(client.instance_variable_get(:@config_file)).to eq(droplets_cfg.path) end - it 'raises an error if provider is missing' do + it 'raises for unknown resource_type' do expect do - StorageCliClient.build(connection_config: {}, directory_key: 'dummy-key', root_dir: 'dummy-root') - end.to raise_error(RuntimeError, 'Missing connection_config[:provider]') + build_client('nope') + end.to raise_error(CloudController::Blobstore::BlobstoreError, /Unknown resource_type: nope/) + end + + it 'raises when file missing/unreadable' do + allow(config_double).to receive(:get).with(:storage_cli_config_file_packages).and_return('/no/such/file.json') + expect do + build_client('packages') + end.to raise_error(CloudController::Blobstore::BlobstoreError, /not found or not readable/) + end + + it 'raises when provider is missing from config file' do + File.write(packages_cfg.path, { + azure_storage_access_key: 'bommelkey', + azure_storage_account_name: 'bommel', + container_name: 'bommelcontainer', + environment: 'BommelCloud' + }.to_json) + + expect do + build_client('packages') + end.to raise_error( + CloudController::Blobstore::BlobstoreError, + /No provider specified/ + ) + end + + it 'raises when YAML load fails' do + File.write(packages_cfg.path, { provider: 'AzureRM', + account_key: 'bommelkey', + account_name: 'bommel', + container_name: 'bommelcontainer', + environment: 'BommelCloud' }.to_json) + + expect(VCAP::CloudController::YAMLConfig).to receive(:safe_load_file). + with(packages_cfg.path). + and_raise(StandardError.new('parse blew up')) + + expect do + build_client('packages') + end.to raise_error( + CloudController::Blobstore::BlobstoreError, + /Failed to load storage-cli config/ + ) + end + end + + describe '#exists? exit code handling' do + let(:config_double) { instance_double(VCAP::CloudController::Config) } + let(:droplets_cfg) { Tempfile.new(['droplets', '.json']) } + + before do + droplets_cfg.write({ provider: 'AzureRM', + account_key: 'bommelkey', + account_name: 'bommel', + container_name: 'bommelcontainer', + environment: 'BommelCloud' }.to_json) + droplets_cfg.flush + + allow(VCAP::CloudController::Config).to receive(:config).and_return(config_double) + allow(config_double).to receive(:get).with(:storage_cli_config_file_droplets).and_return(droplets_cfg.path) + + allow(Steno).to receive(:logger).and_return(double(info: nil, error: nil)) + end + + after { droplets_cfg.close! } + + let(:client) do + StorageCliClient.build( + directory_key: 'dir', + root_dir: 'root', + resource_type: 'droplets' + ) + end + + it 'returns true on exitstatus 0' do + expect(Open3).to receive(:capture3). + with(kind_of(String), '-c', droplets_cfg.path, 'exists', kind_of(String)). + and_return(['', '', instance_double(Process::Status, success?: true, exitstatus: 0)]) + + expect(client.exists?('key')).to be true + end + + it 'returns false on exitstatus 3' do + expect(Open3).to receive(:capture3). + with(kind_of(String), '-c', droplets_cfg.path, 'exists', kind_of(String)). + and_return(['', '', instance_double(Process::Status, success?: false, exitstatus: 3)]) + + expect(client.exists?('key')).to be false + end + + it 'raises for other non-zero exit codes' do + expect(Open3).to receive(:capture3). + with(kind_of(String), '-c', droplets_cfg.path, 'exists', kind_of(String)). + and_return(['', 'boom', instance_double(Process::Status, success?: false, exitstatus: 2)]) + + expect { client.exists?('key') }.to raise_error(/storage-cli exists failed/) end end end diff --git a/spec/unit/lib/cloud_controller/dependency_locator_spec.rb b/spec/unit/lib/cloud_controller/dependency_locator_spec.rb index ff579e30e5..5c3cc3fe68 100644 --- a/spec/unit/lib/cloud_controller/dependency_locator_spec.rb +++ b/spec/unit/lib/cloud_controller/dependency_locator_spec.rb @@ -77,7 +77,8 @@ it 'creates blob store' do expect(CloudController::Blobstore::ClientProvider).to receive(:provide).with( options: config.get(:resource_pool), - directory_key: 'key' + directory_key: 'key', + resource_type: :resource_pool ) locator.legacy_global_app_bits_cache end @@ -97,7 +98,8 @@ expect(CloudController::Blobstore::ClientProvider).to receive(:provide).with( options: config.get(:resource_pool), directory_key: 'key', - root_dir: 'app_bits_cache' + root_dir: 'app_bits_cache', + resource_type: :resource_pool ) locator.global_app_bits_cache end