123456789_123456789_123456789_123456789_123456789_

Class: ActiveStorage::Service::S3Service

Relationships & Source Files
Super Chains via Extension / Inclusion / Inheritance
Class Chain:
Instance Chain:
Inherits: ActiveStorage::Service
Defined in: activestorage/lib/active_storage/service/s3_service.rb

Overview

Active Storage S3 Service

Wraps the Amazon Simple Storage Service (S3) as an Active Storage service. See ::ActiveStorage::Service for the generic API documentation that applies to all services.

Constant Summary

Class Method Summary

::ActiveStorage::Service - Inherited

.configure

Configure an Active Storage service by name from a set of configurations, typically loaded from a YAML file.

.build

Override in subclasses that stitch together multiple services and hence need to build additional services using the configurator.

::ActiveSupport::Autoload - Extended

Instance Attribute Summary

Instance Method Summary

::ActiveStorage::Service - Inherited

#compose

Concatenate multiple files into a single “composed” file.

#delete

Delete the file at the key.

#delete_prefixed

Delete files at keys starting with the prefix.

#download

Return the content of the file at the key.

#download_chunk

Return the partial content in the byte range of the file at the key.

#exist?

Return true if a file exists at the key.

#headers_for_direct_upload

Returns a ::Hash of headers for #url_for_direct_upload requests.

#open,
#update_metadata

Update metadata for the file identified by key in the service.

#upload

Upload the io to the key specified.

#url

Returns the URL for the file at the key.

#url_for_direct_upload

Returns a signed, temporary URL that a direct upload file can be PUT to on the key.

#content_disposition_with, #custom_metadata_headers, #instrument, #private_url, #public_url, #service_name

Constructor Details

.new(bucket:, upload: {}, public: false, **options) ⇒ S3Service

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 17

def initialize(bucket:, upload: {}, public: false, **options)
  @client = Aws::S3::Resource.new(**options)
  @bucket = @client.bucket(bucket)

  @multipart_upload_threshold = upload.delete(:multipart_threshold) || 100.megabytes
  @public = public

  @upload_options = upload
  @upload_options[:acl] = "public-read" if public?
end

Instance Attribute Details

#bucket (readonly)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 14

attr_reader :client, :bucket

#client (readonly)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 14

attr_reader :client, :bucket

#multipart_upload_threshold (readonly)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 15

attr_reader :multipart_upload_threshold, :upload_options

#upload_options (readonly)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 15

attr_reader :multipart_upload_threshold, :upload_options

Instance Method Details

#compose(source_keys, destination_key, filename: nil, content_type: nil, disposition: nil, custom_metadata: {})

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 100

def compose(source_keys, destination_key, filename: nil, content_type: nil, disposition: nil, custom_metadata: {})
  content_disposition = content_disposition_with(type: disposition, filename: filename) if disposition && filename

  object_for(destination_key).upload_stream(
    content_type: content_type,
    content_disposition: content_disposition,
    part_size: MINIMUM_UPLOAD_PART_SIZE,
    metadata: ,
    **upload_options
  ) do |out|
    source_keys.each do |source_key|
      stream(source_key) do |chunk|
        IO.copy_stream(StringIO.new(chunk), out)
      end
    end
  end
end

#custom_metadata_headers(metadata) (private)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 167

def ()
  .transform_keys { |key| "x-amz-meta-#{key}" }
end

#delete(key)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 62

def delete(key)
  instrument :delete, key: key do
    object_for(key).delete
  end
end

#delete_prefixed(prefix)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 68

def delete_prefixed(prefix)
  instrument :delete_prefixed, prefix: prefix do
    bucket.objects(prefix: prefix).batch_delete!
  end
end

#download(key, &block)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 40

def download(key, &block)
  if block_given?
    instrument :streaming_download, key: key do
      stream(key, &block)
    end
  else
    instrument :download, key: key do
      object_for(key).get.body.string.force_encoding(Encoding::BINARY)
    rescue Aws::S3::Errors::NoSuchKey
      raise ActiveStorage::FileNotFoundError
    end
  end
end

#download_chunk(key, range)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 54

def download_chunk(key, range)
  instrument :download_chunk, key: key, range: range do
    object_for(key).get(range: "bytes=#{range.begin}-#{range.exclude_end? ? range.end - 1 : range.end}").body.string.force_encoding(Encoding::BINARY)
  rescue Aws::S3::Errors::NoSuchKey
    raise ActiveStorage::FileNotFoundError
  end
end

#exist?(key) ⇒ Boolean

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 74

def exist?(key)
  instrument :exist, key: key do |payload|
    answer = object_for(key).exists?
    payload[:exist] = answer
    answer
  end
end

#headers_for_direct_upload(key, content_type:, checksum:, filename: nil, disposition: nil, custom_metadata: {})

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 94

def headers_for_direct_upload(key, content_type:, checksum:, filename: nil, disposition: nil, custom_metadata: {}, **)
  content_disposition = content_disposition_with(type: disposition, filename: filename) if filename

  { "Content-Type" => content_type, "Content-MD5" => checksum, "Content-Disposition" => content_disposition, **() }
end

#object_for(key) (private)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 148

def object_for(key)
  bucket.object(key)
end

#private_url(key, expires_in:, filename:, disposition:, content_type:, **client_opts) (private)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 119

def private_url(key, expires_in:, filename:, disposition:, content_type:, **client_opts)
  object_for(key).presigned_url :get, expires_in: expires_in.to_i,
    response_content_disposition: content_disposition_with(type: disposition, filename: filename),
    response_content_type: content_type, **client_opts
end

#public_url(key, **client_opts) (private)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 125

def public_url(key, **client_opts)
  object_for(key).public_url(**client_opts)
end

#stream(key) (private)

Reads the object for the given key in chunks, yielding each to the block.

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 153

def stream(key)
  object = object_for(key)

  chunk_size = 5.megabytes
  offset = 0

  raise ActiveStorage::FileNotFoundError unless object.exists?

  while offset < object.content_length
    yield object.get(range: "bytes=#{offset}-#{offset + chunk_size - 1}").body.string.force_encoding(Encoding::BINARY)
    offset += chunk_size
  end
end

#upload(key, io, checksum: nil, filename: nil, content_type: nil, disposition: nil, custom_metadata: {})

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 28

def upload(key, io, checksum: nil, filename: nil, content_type: nil, disposition: nil, custom_metadata: {}, **)
  instrument :upload, key: key, checksum: checksum do
    content_disposition = content_disposition_with(filename: filename, type: disposition) if disposition && filename

    if io.size < multipart_upload_threshold
      upload_with_single_part key, io, checksum: checksum, content_type: content_type, content_disposition: content_disposition, custom_metadata: 
    else
      upload_with_multipart key, io, content_type: content_type, content_disposition: content_disposition, custom_metadata: 
    end
  end
end

#upload_with_multipart(key, io, content_type: nil, content_disposition: nil, custom_metadata: {}) (private)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 139

def upload_with_multipart(key, io, content_type: nil, content_disposition: nil, custom_metadata: {})
  part_size = [ io.size.fdiv(MAXIMUM_UPLOAD_PARTS_COUNT).ceil, MINIMUM_UPLOAD_PART_SIZE ].max

  object_for(key).upload_stream(content_type: content_type, content_disposition: content_disposition, part_size: part_size, metadata: , **upload_options) do |out|
    IO.copy_stream(io, out)
  end
end

#upload_with_single_part(key, io, checksum: nil, content_type: nil, content_disposition: nil, custom_metadata: {}) (private)

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 133

def upload_with_single_part(key, io, checksum: nil, content_type: nil, content_disposition: nil, custom_metadata: {})
  object_for(key).put(body: io, content_md5: checksum, content_type: content_type, content_disposition: content_disposition, metadata: , **upload_options)
rescue Aws::S3::Errors::BadDigest
  raise ActiveStorage::IntegrityError
end

#url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:, custom_metadata: {})

[ GitHub ]

  
# File 'activestorage/lib/active_storage/service/s3_service.rb', line 82

def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:, custom_metadata: {})
  instrument :url, key: key do |payload|
    generated_url = object_for(key).presigned_url :put, expires_in: expires_in.to_i,
      content_type: content_type, content_length: content_length, content_md5: checksum,
      metadata: , whitelist_headers: ["content-length"], **upload_options

    payload[:url] = generated_url

    generated_url
  end
end