Skip to content
Snippets Groups Projects
Commit 5e6a4912 authored by Dave Bailey's avatar Dave Bailey
Browse files

Issue #2496033 by steel-track: Upgrade to v3 of AWS SDK and refactor of stream wrapper.

parent 39a74cf5
Branches 2.0.x
No related tags found
No related merge requests found
......@@ -9,41 +9,33 @@ viable under such a configuration.
=========================================
== Dependencies and Other Requirements ==
=========================================
- Libraries API 2.x - https://drupal.org/project/libraries
- AWS SDK for PHP 2.x - https://github.com/aws/aws-sdk-php/releases
- PHP 5.3.3+ is required. The AWS SDK will not work on earlier versions.
- Composer Manager 1.x - https://drupal.org/project/composer_manager
- AWS SDK for PHP 3.x - https://github.com/aws/aws-sdk-php/releases
- PHP 5.5+ is required. The AWS SDK will not work on earlier versions.
- Your PHP must be configured with "allow_url_fopen = On" in your php.ini file.
Otherwise, PHP will be unable to open files that are in your S3 bucket.
==================
== Installation ==
==================
1) Install Libraries version 2.x from http://drupal.org/project/libraries.
2) Install the AWS SDK for PHP.
a) If you have drush, you can install the SDK with this command (executed
from the root folder of your Drupal codebase):
drush make --no-core sites/all/modules/s3fs/s3fs.make
b) If you don't have drush, download the SDK from here:
https://github.com/aws/aws-sdk-php/releases/download/2.7.25/aws.zip
Extract that zip file into your Drupal codebase's
sites/all/libraries/awssdk2 folder such that the path to aws-autoloader.php
is: sites/all/libraries/awssdk2/aws-autoloader.php
IN CASE OF TROUBLE DETECTING THE AWS SDK LIBRARY:
Ensure that the awssdk2 folder itself, and all the files within it, can be read
by your webserver. Usually this means that the user "apache" (or "_www" on OSX)
must have read permissions for the files, and read+execute permissions for all
the folders in the path leading to the awssdk2 files.
1) Install composer manager and follow its instructions for installing the AWS
SDK PHP library. The composer.json file included with this module will set the
version to the latest 3.x.
====================
== Initial Setup ==
====================
With the code installation complete, you must now configure s3fs to use your
Amazon Web Services credentials. To do so, store them in the $conf array in
Amazon Web Services credentials.
The preferred method is to use environment variables or IAM credentials as
outlined here: https://docs.aws.amazon.com/aws-sdk-php/v3/guide/guide/credentials.html
However, you can also set the credentials in the $conf array in
your site's settings.php file (sites/default/settings.php), like so:
$conf['awssdk2_access_key'] = 'YOUR ACCESS KEY';
$conf['awssdk2_secret_key'] = 'YOUR SECRET KEY';
$conf['awssdk_access_key'] = 'YOUR ACCESS KEY';
$conf['awssdk_secret_key'] = 'YOUR SECRET KEY';
Configure your settings for S3 File System (including your S3 bucket name) at
/admin/config/media/s3fs/settings. You can input your AWS credentials on this
......@@ -241,13 +233,12 @@ $conf['s3fs_private_folder'] = 's3fs-private';
$conf['s3fs_presigned_urls'] = "300|presigned-files/*\n60|other-presigned/*";
$conf['s3fs_saveas'] = "videos/*\nfull-size-images/*";
$conf['s3fs_torrents'] = "yarrr/*";
$conf['s3fs_use_instance_profile'] = TRUE or FALSE;
$conf['s3fs_credentials_file'] = '/full/path/to/credentials.ini';
// AWS Credentials use a different prefix than the rest of s3fs's settings
$conf['awssdk2_access_key'] = 'YOUR ACCESS KEY';
$conf['awssdk2_secret_key'] = 'YOUR SECRET KEY';
$conf['awssdk2_use_instance_profile'] = TRUE or FALSE;
$conf['awssdk2_default_cache_config'] = '/path/to/cache';
$conf['awssdk_access_key'] = 'YOUR ACCESS KEY';
$conf['awssdk_secret_key'] = 'YOUR SECRET KEY';
===========================================
== Upgrading from S3 File System 7.x-1.x ==
......@@ -278,6 +269,17 @@ previously put into your bucket into the Root Folder. And if there are other
files in your bucket that you want s3fs to know about, move them into there,
too. Then do a metadata refresh.
===========================================
== Upgrading from S3 File System 7.x-2.x ==
===========================================
Various configuration names were changed to better match the AWS SDK. Please
update these values in your settings.php file:
- awssdk2_access_key -> awssdk_access_key
- awssdk2_secret_key -> awssdk_secret_key
- awssdk2_use_instance_profile -> s3fs_use_instance_profile
- awssdk2_default_cache_config -> s3fs_credentials_file
==================
== Known Issues ==
==================
......
This diff is collapsed.
{
"name": "drupal/s3fs",
"description": "",
"type": "drupal-module",
"license": "GPL-2.0+",
"require": {
"aws/aws-sdk-php": "~3.0"
}
}
......@@ -5,6 +5,8 @@
* Administration form setup for S3 File System.
*/
use Aws\Api;
/**
* Builds the Settings form.
*/
......@@ -36,42 +38,40 @@ function s3fs_settings() {
'#title' => t('Amazon Web Services Credentials'),
'#description' => t(
"To configure your Amazon Web Services credentials, enter the values in the appropriate fields below.
You may instead set \$conf['awssdk2_access_key'] and \$conf['awssdk2_secret_key'] in settings.php."
You may instead set \$conf['awssdk_access_key'] and \$conf['awssdk_secret_key'] in settings.php."
),
'#collapsible' => TRUE,
// Simplify the form by collapsing this fieldset if it's already been configured.
'#collapsed' => _s3fs_get_setting('awssdk2_access_key') ||
_s3fs_get_setting('use_instance_profile'),
'#collapsed' => _s3fs_get_setting('awssdk_access_key') || _s3fs_get_setting('use_instance_profile'),
);
$form['aws_credentials']['s3fs_awssdk2_access_key'] = array(
$form['aws_credentials']['s3fs_awssdk_access_key'] = array(
'#type' => 'textfield',
'#title' => t('Amazon Web Services Access Key'),
'#default_value' => _s3fs_get_setting('awssdk2_access_key'),
'#default_value' => _s3fs_get_setting('awssdk_access_key'),
);
$form['aws_credentials']['s3fs_awssdk2_secret_key'] = array(
$form['aws_credentials']['s3fs_awssdk_secret_key'] = array(
'#type' => 'textfield',
'#title' => t('Amazon Web Services Secret Key'),
'#default_value' => _s3fs_get_setting('awssdk2_secret_key'),
'#default_value' => _s3fs_get_setting('awssdk_secret_key'),
);
$form['aws_credentials']['s3fs_awssdk2_use_instance_profile'] = array(
$form['aws_credentials']['s3fs_use_instance_profile'] = array(
'#type' => 'checkbox',
'#title' => t('Use EC2 Instance Profile Credentials'),
// TODO: Remove the 'use_instance_profile' and 's3fs_use_instance_profile' bits in 3.0.
'#default_value' => _s3fs_get_setting('awssdk2_use_instance_profile') || _s3fs_get_setting('use_instance_profile'),
'#default_value' => _s3fs_get_setting('use_instance_profile'),
'#description' => t(
'If your Drupal site is running on an Amazon EC2 server, you may use the Instance Profile Credentials
from that server rather than setting your AWS credentials directly.'
),
from that server rather than setting your AWS credentials directly.'
),
);
$form['aws_credentials']['s3fs_awssdk2_default_cache_config'] = array(
$form['aws_credentials']['s3fs_credentials_file'] = array(
'#type' => 'textfield',
'#title' => t('Default Cache Location'),
'#default_value' => _s3fs_get_setting('awssdk2_default_cache_config'),
'#description' => t('The default cache location for your EC2 Instance Profile Credentials.'),
'#title' => t('Custom Credentials File Location'),
'#default_value' => _s3fs_get_setting('credentials_file'),
'#description' => t('The custom profile or ini file location. This will use the ini provider instead.'),
'#states' => array(
'visible' => array(
':input[id=edit-s3fs-use-instance-profile]' => array('checked' => TRUE),
......
......@@ -25,7 +25,7 @@
* - 'presigned_url': (boolean) Triggers use of an authenticated URL.
* - 'timeout': (int) Time in seconds before a pre-signed URL times out.
* - 'api_args': array of additional arguments to the getObject() function:
* http://docs.aws.amazon.com/aws-sdk-php/latest/class-Aws.S3.S3Client.html#_getObject
* https://docs.aws.amazon.com/aws-sdk-php/v3/api/api-s3-2006-03-01.html#getobject
* - 'custom_GET_args': (array) Implementing this hook allows you to add
* your own set of custom GET arguments to the S3 URLs of your files.
* If your custom args' keys start with "x-", S3 will ignore them, but
......@@ -57,7 +57,7 @@ function hook_s3fs_url_settings_alter(&$url_settings, $s3_file_path) {
*
* @param array $upload_params
* Associative array of upload settings
* @see http://docs.aws.amazon.com/aws-sdk-php/latest/class-Aws.S3.S3Client.html#_putObject
* @see https://docs.aws.amazon.com/aws-sdk-php/v3/api/api-s3-2006-03-01.html#putobject
*/
function hook_s3fs_upload_params_alter(&$upload_params) {
if (strpos($upload_params['Key'], 'private/') !== FALSE){
......
name = S3 File System
description = Adds an Amazon Simple Storage Service-based remote file system to Drupal.
core = 7.x
php = 5.5
configure = admin/config/media/s3fs/settings
dependencies[] = libraries (2.x)
dependencies[] = composer_manager
; Includes
files[] = S3fsStreamWrapper.inc
......
<?php
use Aws\Sdk;
/**
* @file
* Install, update and uninstall functions for the S3 File System module.
......@@ -47,37 +49,18 @@ function s3fs_requirements($phase) {
);
}
// Check for the Libraries module. This shouldn't really be needed, but if
// the user doesn't have Libraries, the error won't be reported correctly.
if (!function_exists('libraries_detect')) {
$requirements['s3fs_libraries'] = array(
'title' => $t('Libraries module'),
'value' => $t('Not installed.'),
'description' => $t('S3 File System requires the Libraries module. Please install it from !here.',
array('!here' => l('here', 'http://www.drupal.org/project/libraries'))
),
'severity' => REQUIREMENT_ERROR,
);
// Return immediately, since we can't attempt to determine if AWS SDK 2
// is installed.
return $requirements;
}
$library = libraries_detect('awssdk2');
if ($library && !empty($library['installed'])) {
$requirements['s3fs_awssdk2'] = array(
if (class_exists('Aws\Sdk')) {
$requirements['s3fs_awssdk'] = array(
'title' => $t('AWS SDK for PHP'),
'value' => $t('Installed @ v@version.', array('@version' => $library['version'])),
'value' => $t('AWS SDK is installed.'),
'severity' => REQUIREMENT_OK,
);
}
else {
$requirements['s3fs_awssdk2'] = array(
$requirements['s3fs_awssdk'] = array(
'title' => $t('AWS SDK for PHP'),
'value' => $t('The S3 File System module requires the AWS SDK for PHP v2.x library.'),
'description' => $t('The Libraries module reported the following error: !error_message<br>
Please check the installation instructions for S3 File System.', array('!error_message' => filter_xss($library['error message']))
),
'value' => $t('The S3 File System module requires the AWS SDK for PHP v3.x library.'),
'description' => $t('The AWS SDK could not be loaded.'),
'severity' => REQUIREMENT_ERROR,
);
}
......
core = 7.x
api = 2
libraries[awssdk2][download][type]= "get"
libraries[awssdk2][download][url] = "https://github.com/aws/aws-sdk-php/releases/download/2.7.25/aws.zip"
libraries[awssdk2][directory_name] = "awssdk2"
libraries[awssdk2][destination] = "libraries"
......@@ -5,6 +5,11 @@
* Hook implementations and other primary functionality for S3 File System.
*/
use Aws\Credentials\CredentialProvider;
use Aws\S3\S3Client;
use Aws\S3\Exception\S3Exception;
use Aws\Sdk;
/**
* The prefix for our Drupal-cached metadata.
*/
......@@ -22,6 +27,8 @@ class S3fsException extends Exception {}
* Defines the s3:// stream wrapper.
*/
function s3fs_stream_wrappers() {
// On bootstrap of a fresh install the composer autoloader needs to be loaded.
composer_manager_register_autoloader();
return array(
's3' => array(
'name' => 'S3 File System',
......@@ -59,29 +66,6 @@ function s3fs_stream_wrappers_alter(&$wrappers) {
}
}
/**
* Implements hook_libraries_info()
*/
function s3fs_libraries_info() {
return array(
'awssdk2' => array(
'title' => 'AWS SDK for PHP',
'vendor url' => 'http://docs.aws.amazon.com/aws-sdk-php/guide/latest/index.html',
'download url' => 'https://github.com/aws/aws-sdk-php/releases',
'version arguments' => array(
'file' => 'Aws/Common/Aws.php',
'pattern' => "/const VERSION = '(.*)';/",
'lines' => 200,
),
'files' => array(
'php' => array(
'aws-autoloader.php',
),
),
),
);
}
/**
* Implements hook_menu().
*/
......@@ -246,10 +230,10 @@ function _s3fs_image_style_deliver() {
// acquiring the lock.
$success = file_exists($derivative_uri);
if (!$success) {
// If we successfully generate the derivative, wait until S3 acknolowedges
// If we successfully generate the derivative, wait until S3 acknowledges
// its existence. Otherwise, redirecting to it may cause a 403 error.
$success = image_style_create_derivative($style, $image_uri, $derivative_uri) &&
file_stream_wrapper_get_instance_by_scheme('s3')->waitUntilFileExists($derivative_uri);
$success = image_style_create_derivative($style, $image_uri, $derivative_uri);
file_stream_wrapper_get_instance_by_scheme('s3')->waitUntilFileExists($derivative_uri);
}
if (!empty($lock_acquired)) {
......@@ -307,25 +291,9 @@ function _s3fs_validate_config($config) {
// bucket name, or region are invalid/mismatched.
$s3->listObjects(array('Bucket' => $config['bucket'], 'MaxKeys' => 1));
}
catch (Aws\S3\Exception\InvalidAccessKeyIdException $e) {
form_set_error('s3fs_awssdk2_access_key', t('The Access Key in your AWS credentials is invalid.'));
return FALSE;
}
catch (Aws\S3\Exception\SignatureDoesNotMatchException $e) {
form_set_error('s3fs_awssdk2_secret_key', t('The Secret Key in your AWS credentials is invalid.'));
return FALSE;
}
catch (Aws\S3\Exception\NoSuchBucketException $e) {
form_set_error('s3fs_bucket', t('The specified bucket does not exist.'));
return FALSE;
}
catch (Aws\S3\Exception\PermanentRedirectException $e) {
form_set_error('s3fs_region', t('This bucket exists, but it is not in the specified region.'));
return FALSE;
}
catch (Exception $e) {
form_set_error('form', t('An unexpected %exception occured, with the following error message:<br>%error',
array('%exception' => get_class($e), '%error' => $e->getMessage())));
catch (S3Exception $e) {
form_set_error('form', t('An unexpected %exception occurred, with the following error message:<br>%error',
array('%exception' => $e->getAwsErrorCode(), '%error' => $e->getMessage())));
return FALSE;
}
......@@ -361,10 +329,7 @@ function _s3fs_refresh_cache($config) {
// which reside in the root folder.
$iterator_args['Prefix'] = "{$config['root_folder']}/";
}
$iterator = $s3->getListObjectVersionsIterator($iterator_args);
// NOTE: Setting the maximum page size lower than 1000 will have no effect,
// as stated by the API docs.
$iterator->setPageSize(1000);
$iterator = $s3->getPaginator('ListObjects', $iterator_args);
// The $folders array is an associative array keyed by folder paths, which
// is constructed as each filename is written to the DB. After all the files
......@@ -393,68 +358,65 @@ function _s3fs_refresh_cache($config) {
db_truncate('s3fs_file_temp')->execute();
}
// Set up an event listener to consume each page of results before the next
// request is made.
$dispatcher = $iterator->getEventDispatcher();
$dispatcher->addListener('resource_iterator.before_send', function($event) use (&$file_metadata_list, &$folders) {
_s3fs_write_metadata($file_metadata_list, $folders);
});
foreach ($iterator as $s3_metadata) {
$key = $s3_metadata['Key'];
// The root folder is an impementation detail that only appears on S3.
// Files' URIs are not aware of it, so we need to remove it beforehand.
if (!empty($config['root_folder'])) {
$key = str_replace("{$config['root_folder']}/", '', $key);
}
// Figure out the scheme based on the key's folder prefix.
$public_folder_name = !empty($config['public_folder']) ? $config['public_folder'] : 's3fs-public';
$private_folder_name = !empty($config['private_folder']) ? $config['private_folder'] : 's3fs-private';
if (strpos($key, "$public_folder_name/") === 0) {
// Much like the root folder, the public folder name must be removed from URIs.
$key = str_replace("$public_folder_name/", '', $key);
$uri = "public://$key";
}
else if (strpos($key, "$private_folder_name/") === 0) {
$key = str_replace("$private_folder_name/", '', $key);
$uri = "private://$key";
}
else {
// No special prefix means it's an s3:// file.
$uri = "s3://$key";
foreach ($iterator as $result) {
if (!isset($result['Contents'])) {
continue;
}
foreach ($result['Contents'] as $s3_metadata) {
$key = $s3_metadata['Key'];
// The root folder is an impementation detail that only appears on S3.
// Files' URIs are not aware of it, so we need to remove it beforehand.
if (!empty($config['root_folder'])) {
$key = str_replace("{$config['root_folder']}/", '', $key);
}
if ($uri[strlen($uri) - 1] == '/') {
// Treat objects in S3 whose filenames end in a '/' as folders.
// But don't store the '/' itself as part of the folder's uri.
$folders[rtrim($uri, '/')] = TRUE;
}
else {
// Only store the metadata for the latest version of the file.
if (isset($s3_metadata['IsLatest']) && !$s3_metadata['IsLatest']) {
continue;
// Figure out the scheme based on the key's folder prefix.
$public_folder_name = !empty($config['public_folder']) ? $config['public_folder'] : 's3fs-public';
$private_folder_name = !empty($config['private_folder']) ? $config['private_folder'] : 's3fs-private';
if (strpos($key, "$public_folder_name/") === 0) {
// Much like the root folder, the public folder name must be removed from URIs.
$key = str_replace("$public_folder_name/", '', $key);
$uri = "public://$key";
}
// Files with no StorageClass are actually from the DeleteMarkers list,
// rather then the Versions list. They represent a file which has been
// deleted, so don't cache them.
if (!isset($s3_metadata['StorageClass'])) {
continue;
else {
if (strpos($key, "$private_folder_name/") === 0) {
$key = str_replace("$private_folder_name/", '', $key);
$uri = "private://$key";
}
else {
// No special prefix means it's an s3:// file.
$uri = "s3://$key";
}
}
if ($uri[strlen($uri) - 1] == '/') {
// Treat objects in S3 whose filenames end in a '/' as folders.
// But don't store the '/' itself as part of the folder's uri.
$folders[rtrim($uri, '/')] = TRUE;
}
// Buckets with Versioning disabled set all files' VersionIds to "null".
// If we see that, unset VersionId to prevent "null" from being written
// to the DB.
if (isset($s3_metadata['VersionId']) && $s3_metadata['VersionId'] == 'null') {
unset($s3_metadata['VersionId']);
else {
// Only store the metadata for the latest version of the file.
if (isset($s3_metadata['IsLatest']) && !$s3_metadata['IsLatest']) {
continue;
}
// Files with no StorageClass are actually from the DeleteMarkers list,
// rather then the Versions list. They represent a file which has been
// deleted, so don't cache them.
if (!isset($s3_metadata['StorageClass'])) {
continue;
}
// Buckets with Versioning disabled set all files' VersionIds to "null".
// If we see that, unset VersionId to prevent "null" from being written
// to the DB.
if (isset($s3_metadata['VersionId']) && $s3_metadata['VersionId'] == 'null') {
unset($s3_metadata['VersionId']);
}
$file_metadata_list[] = _s3fs_convert_metadata($uri, $s3_metadata);
}
$file_metadata_list[] = _s3fs_convert_metadata($uri, $s3_metadata);
_s3fs_write_metadata($file_metadata_list, $folders);
}
}
// The event listener doesn't fire after the last page is done, so we have
// to write the last page of metadata manually.
_s3fs_write_metadata($file_metadata_list, $folders);
// Now that the $folders array contains all the ancestors of every file in
// the cache, as well as the existing folders from before the refresh,
// write those folders to the DB.
......@@ -575,7 +537,7 @@ function _s3fs_convert_metadata($uri, $s3_metadata) {
* For performance reasons, only one S3Client object will ever be created
* within a single request.
*
* @param $config Array
* @param array $config
* Array of configuration settings from which to configure the client.
*
* @return Aws\S3\S3Client
......@@ -590,64 +552,57 @@ function _s3fs_get_amazons3_client($config) {
if (!isset($s3) || $static_config != $config) {
// For the SDK credentials, get the saved settings from _s3fs_get_setting(). But since $config might be populated
// with to-be-validated settings, its contents (if set) override the saved settings.
$access_key = _s3fs_get_setting('awssdk2_access_key');
if (isset($config['awssdk2_access_key'])) {
$access_key = $config['awssdk2_access_key'];
$access_key = _s3fs_get_setting('awssdk_access_key');
if (isset($config['awssdk_access_key'])) {
$access_key = $config['awssdk_access_key'];
}
$secret_key = _s3fs_get_setting('awssdk2_secret_key');
if (isset($config['awssdk2_secret_key'])) {
$secret_key = $config['awssdk2_secret_key'];
$secret_key = _s3fs_get_setting('awssdk_secret_key');
if (isset($config['awssdk_secret_key'])) {
$secret_key = $config['awssdk_secret_key'];
}
// BACKWARD COMPATIBILITY.
// TODO: REMOVE IN 3.0
$use_instance_profile = _s3fs_get_setting('use_instance_profile');
if (isset($config['use_instance_profile'])) {
$use_instance_profile = $config['use_instance_profile'];
}
if (_s3fs_get_setting('awssdk2_use_instance_profile') || isset($config['awssdk2_use_instance_profile'])) {
// END BACKWARDS COMPATIBILITY
$use_instance_profile = _s3fs_get_setting('awssdk2_use_instance_profile');
if (isset($config['awssdk2_use_instance_profile'])) {
$use_instance_profile = $config['awssdk2_use_instance_profile'];
}
}
$default_cache_config = _s3fs_get_setting('awssdk2_default_cache_config');
if (isset($config['awssdk2_default_cache_config'])) {
$default_cache_config = $config['awssdk2_default_cache_config'];
$credentials_file = _s3fs_get_setting('credentials_file');
if (isset($config['credentials_file'])) {
$credentials_file = $config['credentials_file'];
}
$library = _s3fs_load_awssdk2_library();
if (!$library['loaded']) {
if (!class_exists('Aws\Sdk')) {
throw new S3fsException(
t('Unable to load the AWS SDK. Please ensure that the awssdk2 library is installed correctly.')
t('Unable to load the AWS SDK. Please ensure that the AWS SDK library is installed correctly.')
);
}
else if (!class_exists('Aws\S3\S3Client')) {
throw new S3fsException(
t('Cannot load Aws\S3\S3Client class. Please ensure that the awssdk2 library is installed correctly.')
);
}
else if (!$use_instance_profile && (!$secret_key || !$access_key)) {
throw new S3fsException(t("Your AWS credentials have not been properly configured.
Please set them on the S3 File System !settings_page or
set \$conf['awssdk2_access_key'] and \$conf['awssdk2_secret_key'] in settings.php.",
array('!settings_page' => l(t('settings page'), 'admin/config/media/s3fs/settings')))
);
}
else if ($use_instance_profile && empty($default_cache_config)) {
throw new s3fsException(t("Your AWS credentials have not been properly configured.
You are attempting to use instance profile credentials but you have not set a default cache location.
Please set it on the !settings_page or set \$conf['awssdk2_default_cache_config'] in settings.php.",
array('!settings_page' => l(t('settings page'), 'admin/config/media/s3fs/settings')))
t('Cannot load Aws\S3\S3Client class. Please ensure that the AWS SDK library is installed correctly.')
);
}
// Create the Aws\S3\S3Client object.
$client_config = array();
// If we have configured credentials locally use them, otherwise let the SDK
// find them per API docs.
// https://docs.aws.amazon.com/aws-sdk-php/v3/guide/guide/configuration.html#id3
if ($use_instance_profile) {
$client_config = array('default_cache_config' => $default_cache_config);
}
else {
$client_config = array(
// If defined path use that otherwise SDK will check home directory.
if ($credentials_file) {
$provider = CredentialProvider::ini(NULL, $credentials_file);
}
else {
// Assume an instance profile provider if no path.
$provider = CredentialProvider::instanceProfile();
}
// Cache the results in a memoize function to avoid loading and parsing
// the ini file on every API operation.
$provider = CredentialProvider::memoize($provider);
$client_config['credentials'] = $provider;
}
// Use defined keys.
elseif (!empty($access_key) && !empty($secret_key)) {
$client_config['credentials'] = array(
'key' => $access_key,
'secret' => $secret_key,
);
......@@ -661,7 +616,10 @@ function _s3fs_get_amazons3_client($config) {
if (!empty($config['use_customhost']) && !empty($config['hostname'])) {
$client_config['base_url'] = $config['hostname'];
}
$s3 = Aws\S3\S3Client::factory($client_config);
// S3 Service only supports 2006-03-01 API version currently. V3 requires
// an explicit version declaration, and use of 'latest' is discouraged.
$client_config['version'] = '2006-03-01';
$s3 = new S3Client($client_config);
}
$static_config = $config;
return $s3;
......@@ -712,7 +670,7 @@ function _s3fs_get_config($reset=FALSE) {
/**
* Internal function to retrieve the value of a specific setting, taking overrides in settings.php into account.
*
* This function is most useful on the config form and for retrieving the awssdk2 settings.
* This function is most useful on the config form and for retrieving the AWS SDK settings.
* _s3fs_get_config() should be used in most other cases.
*
* @param string $setting
......@@ -726,63 +684,9 @@ function _s3fs_get_setting($setting, $default='') {
// Get the value from _s3fs_get_config(), if it's set. This will include any overrides from settings.php.
$value = !empty($config[$setting]) ? $config[$setting] : $default;
// The SDK settings are treated a little differently, as they have overrides in settings.php with different names.
if ($setting == 'awssdk2_access_key' && isset($conf['awssdk2_access_key'])) {
$value = $conf['awssdk2_access_key'];
}
if ($setting == 'awssdk2_secret_key' && isset($conf['awssdk2_secret_key'])) {
$value = $conf['awssdk2_secret_key'];
}
// This OR check is for backwards compatibility with the old s3fs_use_instance_profile variable.
// TODO: Remove it in 3.0.
if (($setting == 'awssdk2_use_instance_profile' || $setting == 'use_instance_profile') && isset($conf['awssdk2_use_instance_profile'])) {
$value = $conf['awssdk2_use_instance_profile'];
}
if ($setting == 'awssdk2_default_cache_config' && isset($conf['awssdk2_default_cache_config'])) {
$value = $conf['awssdk2_default_cache_config'];
}
return $value;
}
/**
* Loads the awssdk2 library.
*
* This function is a replacement for calling libraries_load('awsdsk2'). It's
* needed because libraries_load() caches failures to load the library, meaning
* that temporarily having a bad setup (e.g. nonexistent or unreadable files
* in the awssdk2 folder) can lead to the library being permanently unable to
* be loaded, even after the bad setup is repaired. This can only be remedied
* by clearing the full site cache.
*
* This is especially disasterous when upgrading the awssdk2 library on a
* system that is currently using it, because if the upgrade results in a bad
* setup, the site cache may become impossible to clear. If some other module's
* data has been cached in S3 (e.g. ctools css cache), the cache clearing
* process itself will attempt to use S3FS. But if Libaries' cache has not yet
* been cleared by this time, it will continue to insist that awssdk2 is not
* installed, and the cache clear will crash because s3fs can't function
* without the awssdk2 library. This leaves the site in an unrecoverable broken
* state.
*
* @return array
* The array returned by libraries_load('awssdk2'), as if it used no cache.
*/
function _s3fs_load_awssdk2_library() {
// Start by calling libraries_load().
$library = libraries_load('awssdk2');
// If it detects and loads the library, great! We're done.
if (!empty($library['loaded'])) {
return $library;
}
// Otherwise, clear the awssdk2 value from the Libraries cache, erase the
// static data for libraries_load(), then call it again to get the real
// state of the library.
cache_clear_all('awssdk2', 'cache_libraries');
drupal_static_reset('libraries_load');
return libraries_load('awssdk2');
}
/**
* Copies all the local files from the specified file system into S3.
*/
......
......@@ -14,19 +14,12 @@ else {
// DrupalWebTestcase.
class S3fsTests extends SimpleTestCloneTestcase {
/**
* These should be edited to match your site's actual setup.
*/
// WARNING: DO NOT SET THIS TO YOUR SITE'S REAL BUCKET.
// These tests will erase the ENTIRE CONTENTS of the bucket.
protected $s3_bucket = 's3fs-testing-bucket';
protected $s3_region = 'us-east-1';
/**
* State variables.
*/
protected $bucket_not_found = FALSE;
protected $remote_tests_folder = '_s3fs_tests';
protected $remote_tests_folder_key = '_s3fs_tests';
protected $remote_tests_folder_uri = 's3://_s3fs_tests';
public static function getInfo() {
......@@ -39,22 +32,21 @@ else {
function setUp() {
parent::setUp('s3fs');
variable_set('s3fs_bucket', $this->s3_bucket);
variable_set('s3fs_region', $this->s3_region);
// Empty out the bucket before the test, to prevent unexpected errors.
$this->s3 = _s3fs_get_amazons3_client(_s3fs_get_config());
try {
$deleted_files_count = $this->s3->clearBucket($this->s3_bucket);
debug("Deleted $deleted_files_count file(s) from S3 to prepare for the test.");
$this->config = _s3fs_get_config();
$this->s3 = _s3fs_get_amazons3_client($this->config);
if (!empty($this->config['root_folder'])) {
$this->remote_tests_folder_key = "{$this->config['root_folder']}/$this->remote_tests_folder";
}
catch (Aws\S3\Exception\NoSuchBucketException $e) {
$this->bucket_not_found = TRUE;
// Adding this to ensure it's set.
$this->public_files_directory = variable_get('file_public_path');
// Prevent issues with derivative tokens during test.
variable_set('image_allow_insecure_derivatives', TRUE);
$this->bucket_not_found = !$this->s3->doesBucketExist($this->config['bucket']);
if (!$this->bucket_not_found) {
$this->s3->deleteMatchingObjects($this->config['bucket'], $this->remote_tests_folder_key);
debug("Deleted file(s) from S3 test folder to prepare for the test.");
}
global $_s3fs_debug, $_s3fs_debug_internal;
$_s3fs_debug = TRUE;
// Normally we want to hide the debug log messages for the wrapper's
// internal functons. But setting this to TRUE will unhide them.
$_s3fs_debug_internal = FALSE;
}
/**
......@@ -64,60 +56,60 @@ else {
// This is here, rather than the setUp() function, because we want the
// test to immediately exit if this happens.
if ($this->bucket_not_found) {
$this->fail("The '{$this->s3_bucket}' bucket does not exist in the '{$this->s3_region}' region on your AWS account.
$this->fail("The '{$this->config['bucket']}' bucket does not exist in the '{$this->s3_region}' region on your AWS account.
Either edit the protected properties of the S3fsTests class, or create a bucket called 's3fs-testing-bucket' in the 'us-east-1' region.");
return;
}
$test_uri1 = "{$this->remote_tests_folder_uri}/test_file.txt";
$test_uri2 = "{$this->remote_tests_folder_uri}/test_file2.txt";
$this->assertTrue(file_stream_wrapper_valid_scheme('s3'), '"s3" is a valid stream wrapper scheme.');
$this->assertEqual(file_stream_wrapper_get_class('s3'), 'S3fsStreamWrapper', 'URIs with scheme "s3" should be handled by S3fsStreamWrapper.');
// The test.txt file is enough data to force multiple calls to write_stream().
$file_contents = file_get_contents(drupal_get_path('module', 's3fs') . '/tests/test.txt');
$this->assertTrue(drupal_mkdir($this->remote_tests_folder_uri), 'Exercised mkdir to create the testing directory (in the DB).');
$this->assertTrue(is_dir($this->remote_tests_folder_uri), 'Make sure the folder we just created correctly reports that it is a folder.');
debug("Exercising file upload functionality.");
$start_time = time();
$s3_file = file_save_data($file_contents, $test_uri1);
$end_time = time();
$total = $end_time - $start_time;
debug("Upload time: $total seconds");
$this->assertTrue(file_valid_uri($s3_file->uri), "Uploaded the first test file, $test_uri1.");
debug("Exercising file copy functionality.");
$s3_file2 = file_copy($s3_file, $test_uri2);
$this->assertNotIdentical($s3_file2, FALSE, "Copied the the first test file to $test_uri2.");
debug('Exercising the dir_*() functions.');
$files = file_scan_directory($this->remote_tests_folder_uri, '#.*#');
$this->assertTrue(isset($files[$test_uri1]), 'The first test file is in the tests directory.');
$this->assertTrue(isset($files[$test_uri2]), 'The second test file is in the tests directory.');
$this->assertEqual(count($files), 2, "There are exactly two files in the tests directory.");
debug('Exercising getExternalUrl().');
$url = file_create_url($test_uri1);
$this->assertNotIdentical($url, FALSE, 'file_create_url() succeeded.');
debug('Exercising unlink().');
$this->assertIdentical(file_delete($s3_file), TRUE, "Deleted the first test file.");
$this->assertIdentical(file_exists($test_uri1), FALSE, 'The wrapper reports that the first test file no longer exists.');
debug('Exercising rename().');
$this->assertTrue(rename($test_uri2, $test_uri1), "Renamed the second test file to the newly-vacated URI of $test_uri1.");
$s3_file2->uri = $test_uri1;
debug('Exercising rmdir().');
$this->assertFalse(drupal_rmdir($this->remote_tests_folder_uri), 'rmdir() did not delete the tests folder because it is not empty.');
$this->assertTrue(file_delete($s3_file2), 'Deleted the last test file.');
$this->assertTrue(drupal_rmdir($this->remote_tests_folder_uri), 'Deleted the tests folder.');
$this->assertFalse(is_dir($this->remote_tests_folder_uri), 'The wrapper reports that the tests folder is gone.');
}
/**
* Test the image derivative functionality.
*/
......@@ -125,27 +117,26 @@ else {
// This is here, rather than the setUp() function, because we want the
// test to immediately exit if this happens.
if ($this->bucket_not_found) {
$this->fail("The '{$this->s3_bucket}' bucket does not exist in the '{$this->s3_region}' region on your AWS account.
$this->fail("The '{$this->config['bucket']}' bucket does not exist in the '{$this->s3_region}' region on your AWS account.
Either edit the protected properties of the S3fsTests class, or create a bucket called 's3fs-testing-bucket' in the 'us-east-1' region.");
return;
}
$img_uri1 = "{$this->remote_tests_folder_uri}/test.png";
$img_path1 = "{$this->remote_tests_folder}/test.png";
$img_localpath = drupal_get_path('module', 's3fs') . '/tests/test.png';
// Upload the test image.
$this->assertTrue(drupal_mkdir($this->remote_tests_folder_uri), 'Created the testing directory in the DB.');
$img_data = file_get_contents($img_localpath);
$img_file = file_save_data($img_data, $img_uri1);
$this->assertTrue($img_file, "Copied the the test image to $img_uri1.");
// Request a derivative.
// If you're using Xdebug and the Eclipse IDE, the XDEBUG_SESSION_START
// query arg will make this drupalGet() call debuggable.
$derivative = $this->drupalGet(image_style_url('thumbnail', $img_uri1), array('query' => array('XDEBUG_SESSION_START' => 'ECLIPSE_DPGP')));
// Parse query parameters to ensure they get passed.
$style_url_parsed = drupal_parse_url(image_style_url('thumbnail', $img_uri1));
$derivative = $this->drupalGet($style_url_parsed['path'], array('query' => $style_url_parsed['query']));
$this->assertTrue(imagecreatefromstring($derivative), 'The returned derivative is a valid image.');
}
/**
* Test the cache refresh.
*/
......@@ -153,7 +144,7 @@ else {
// This is here, rather than the setUp() function, because we want the
// test to immediately exit if this happens.
if ($this->bucket_not_found) {
$this->fail("The '{$this->s3_bucket}' bucket does not exist in the '{$this->s3_region}' region on your AWS account.
$this->fail("The '{$this->config['bucket']}' bucket does not exist in the '{$this->s3_region}' region on your AWS account.
Either edit the protected properties of the S3fsTests class, or create a bucket called 's3fs-testing-bucket' in the 'us-east-1' region.");
return;
}
......@@ -162,12 +153,19 @@ else {
// s3fs won't cache them.
$filenames = array('files/test2.txt', 'parts/test3.txt', 'test.txt', );
foreach ($filenames as $filename) {
$this->s3->upload($this->s3_bucket, $filename, 'testtesttest', 'public-read');
$filename = $this->remote_tests_folder_key . '/' . $filename;
$this->s3->putObject(
array(
'Bucket' => $this->config['bucket'],
'Key' => $filename,
'ACL' => 'public-read',
)
);
}
// Run a regular cache refresh, being sure to use no root_folder.
$config = _s3fs_get_config();
$config['root_folder'] = '';
// Set the current test folder as the root prefix.
$config['root_folder'] = $this->remote_tests_folder_key;
_s3fs_refresh_cache($config);
// Query the DB to confirm that all the new files are cached.
......@@ -184,7 +182,7 @@ else {
// Flush the cache, then do a refresh using the root_folder setting.
// Only the file in the root folder (test3.txt) should become cached.
$delete_query = db_delete('s3fs_file')->execute();
$config['root_folder'] = 'parts';
$config['root_folder'] = $this->remote_tests_folder_key . '/parts';
_s3fs_refresh_cache($config);
// Confirm that only the file in the "parts" folder was cached.
......@@ -196,5 +194,13 @@ else {
$this->assertEqual(count($records), 1, 'There was only one file in the partially rereshed cache.');
$this->assertEqual($records[0]->uri, 's3://test3.txt', 'That file was the one in the "parts" folder, which is now the root folder, so "parts" is not in the URI.');
}
/**
* Clean up S3 folder.
*/
public function tearDown() {
$this->s3->deleteMatchingObjects($this->config['bucket'], $this->remote_tests_folder_key);
parent::tearDown();
}
} // END S3fsTests class
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment