From c124637b52a6770cee4e79341c8524fb68da7a43 Mon Sep 17 00:00:00 2001 From: danneh3826 Date: Thu, 20 Jan 2011 20:34:46 +0000 Subject: [PATCH] aws_s3 module v2 commit --- 3.0/modules/aws_s3/changelog.txt | 24 ++ .../aws_s3/controllers/admin_aws_s3.php | 161 +++++++-- 3.0/modules/aws_s3/controllers/aws_s3.php | 9 + 3.0/modules/aws_s3/helpers/MY_item.php | 4 +- 3.0/modules/aws_s3/helpers/aws_s3.php | 308 +++++++++++++----- 3.0/modules/aws_s3/helpers/aws_s3_event.php | 50 +-- .../aws_s3/helpers/aws_s3_installer.php | 142 +++++--- 3.0/modules/aws_s3/helpers/aws_s3_task.php | 259 ++++++++++----- 3.0/modules/aws_s3/lib/s3.php | 8 +- 3.0/modules/aws_s3/models/MY_Item_Model.php | 94 ++++-- 3.0/modules/aws_s3/models/aws_s3_meta.php | 6 + 3.0/modules/aws_s3/module.info | 2 +- 3.0/modules/aws_s3/todo.txt | 10 + .../aws_s3/views/admin_aws_s3.html.php | 3 + 14 files changed, 783 insertions(+), 297 deletions(-) create mode 100644 3.0/modules/aws_s3/changelog.txt create mode 100644 3.0/modules/aws_s3/controllers/aws_s3.php create mode 100644 3.0/modules/aws_s3/models/aws_s3_meta.php create mode 100644 3.0/modules/aws_s3/todo.txt diff --git a/3.0/modules/aws_s3/changelog.txt b/3.0/modules/aws_s3/changelog.txt new file mode 100644 index 00000000..04268b90 --- /dev/null +++ b/3.0/modules/aws_s3/changelog.txt @@ -0,0 +1,24 @@ +2011-01-20: Version 2 + +Added features +-------------- +- upload select types of images only (thumbs/resizes/fullsizes) +- schedule initial synchronisation during gallery shutdown to prevent browser hanging on large installations +- schedule upload of new items after upload during gallery shutdown to prevent browser hanging on slower + uplink connections (dsl home servers, etc) + +Bug fixes +--------- +- added verification routine to verify entered bucket name and access credentials to avoid false hope +- added check in module's installed to prevent installation should curl not be installed, as the s3 library + depends on it +- resolved some issues with cooliris slideshow player not working correctly, due to browsers blocking ajax calls + for images since it's cross-domain. the module will now allow the browser to pull the images from the local server + to prevent any xdr issues +- fixed problems moving items around. the module now moves content around the s3 bucket to coincide with the local + filesystem and g3's perception of items and albums +- resolved problems with odd characters in filenames. this actually wasn't to do with odd characters but to do with + the filename being escaped twice, causing file not found errors + +2010-11-26: Version 1 +- Initial release. \ No newline at end of file diff --git a/3.0/modules/aws_s3/controllers/admin_aws_s3.php b/3.0/modules/aws_s3/controllers/admin_aws_s3.php index 8ce2e7ea..4e663f56 100644 --- a/3.0/modules/aws_s3/controllers/admin_aws_s3.php +++ b/3.0/modules/aws_s3/controllers/admin_aws_s3.php @@ -3,37 +3,90 @@ class Admin_Aws_S3_Controller extends Admin_Controller { public function index() { - // require_once(MODPATH . "aws_s3/lib/s3.php"); - $form = $this->_get_s3_form(); if (request::method() == "post") { access::verify_csrf(); - if ($form->validate()) { - module::set_var("aws_s3", "enabled", (isset($_POST['enabled']) ? true : false)); - module::set_var("aws_s3", "access_key", $_POST['access_key']); - module::set_var("aws_s3", "secret_key", $_POST['secret_key']); - module::set_var("aws_s3", "bucket_name", $_POST['bucket_name']); - module::set_var("aws_s3", "g3id", $_POST['g3id']); + if (($valid_form = $form->validate()) && + ($s3_axs_correct = aws_s3::validate_access_details($_POST['access_key'], $_POST['secret_key'], $_POST['bucket_name']))) { - module::set_var("aws_s3", "url_str", $_POST['url_str']); - module::set_var("aws_s3", "sig_exp", $_POST['sig_exp']); + // get variable values before changes so we can act on certain changes later + $vars = array(); + foreach (ORM::factory("var")->where("module_name", "=", "aws_s3")->find_all() as $var) { + $vars[$var->name] = $var->value; + } + + // set variables from $_POST into module::set_var() to save + module::set_var("aws_s3", "enabled", (isset($_POST['enabled']) ? true : false)); + module::set_var("aws_s3", "access_key", $_POST['access_key']); + module::set_var("aws_s3", "secret_key", $_POST['secret_key']); + module::set_var("aws_s3", "bucket_name", $_POST['bucket_name']); + site_status::clear("aws_s3_not_configured"); - module::set_var("aws_s3", "use_ssl", (isset($_POST['use_ssl']) ? true : false)); + module::set_var("aws_s3", "g3id", $_POST['g3id']); - if (module::get_var("aws_s3", "enabled") && !module::get_var("aws_s3", "synced", false)) + module::set_var("aws_s3", "url_str", $_POST['url_str']); + module::set_var("aws_s3", "sig_exp", $_POST['sig_exp']); + + module::set_var("aws_s3", "use_ssl", (isset($_POST['use_ssl']) ? true : false)); + + module::set_var("aws_s3", "upload_thumbs", (isset($_POST['upload_thumbs']) ? true : false)); + module::set_var("aws_s3", "upload_resizes", (isset($_POST['upload_resizes']) ? true : false)); + module::set_var("aws_s3", "upload_fullsizes", (isset($_POST['upload_fullsizes']) ? true : false)); + + module::set_var("aws_s3", "s3_storage_only", (isset($_POST['s3_storage_only']) ? true : false)); + + // post option processing +// if (module::get_var("aws_s3", "s3_storage_only") && !module::get_var("aws_s3", "enabled")) { +// module::set_var("aws_s3", "enabled", true); +// module::set_var("aws_s3", "upload_thumbs", true); +// module::set_var("aws_s3", "upload_resizes", true); +// module::set_var("aws_s3", "upload_fullsizes", true); +// } +// if (module::get_var("aws_s3", "s3_storage_only") && !$vars['s3_storage_only']) { +// // content needs remove from local storage as it wasn't switched on before this point. +// if (!module::get_var("aws_s3", "synced")) { +// // force a sync between local storage and S3, as we're about to remove content from local storage. +// } +// } +// else if (!module::get_var("aws_s3", "s3_storage_only") && $vars['s3_storage_only']) { +// // content needs to be downloaded from s3 as it was just switched off. at this point, +// // we shouldn't actually have a copy of the gallery content locally. +// } + + if (module::get_var("aws_s3", "enabled") && !module::get_var("aws_s3", "synced", false)) { + if (aws_s3::can_schedule()) { + // i can schedule this task + aws_s3::schedule_full_sync2(); site_status::warning( - t('Your site has not yet been syncronised with your Amazon S3 bucket. Content will not appear correctly until you perform syncronisation. Fix this now', - array("url" => html::mark_clean(url::site("admin/maintenance/start/aws_s3_task::sync?csrf=__CSRF__"))) - ), "aws_s3_not_synced"); - + "Your site has been scheduled for full Amazon S3 re-synchronisation. This message will clear when this has been completed.", + "aws_s3_not_synced" + ); + } + else { + // i CAN'T schedule it.. + site_status::warning( + t('Your site has not been synchronised to Amazon S3. Until it has, your server will continue to serve image content to your visitors. Click here to start the synchronisation task.', + array("url" => html::mark_clean(url::site("admin/maintenance/start/aws_s3_task::manual_sync?csrf=__CSRF__"))) + ), + "aws_s3_not_synced" + ); + } + } message::success(t("Settings have been saved")); url::redirect("admin/aws_s3"); } else { - message::error(t("There was a problem with the submitted form. Please check your values and try again.")); + if (!$valid_form) + message::error(t("There was a problem with the submitted form. Please check your values and try again.")); + if (!$s3_axs_correct) { + message::error(t("The Amazon S3 access details provided appear to be incorrect. Please check your values and try again.")); + $form->aws_s3->access_key->add_error("invalid", true); + $form->aws_s3->secret_key->add_error("invalid", true); + $form->aws_s3->bucket_name->add_error("invalid", true); + } } } @@ -51,10 +104,15 @@ class Admin_Aws_S3_Controller extends Admin_Controller { $group = $form->group("aws_s3")->label(t("Amazon S3 Settings")); - $group ->checkbox("enabled") - ->id("s3-enabled") - ->checked(module::get_var("aws_s3", "enabled")) - ->label("S3 enabled"); + $chkbox = + $group ->checkbox("enabled") + ->id("s3-enabled") + ->checked(module::get_var("aws_s3", "enabled", true)) + ->label("S3 enabled"); + + if (module::get_var("aws_s3", "s3_storage_only")) + $chkbox->disabled(true) + ->message("Warning:You may not turn this option off as S3 Storage Only is enabled. In order to disable using S3, you must first disable S3 Storage Only to re-download your content from Amazon S3, since it does not yet exist on the local server."); $group ->input("access_key") ->id("s3-access-key") @@ -62,22 +120,27 @@ class Admin_Aws_S3_Controller extends Admin_Controller { ->value(module::get_var("aws_s3", "access_key")) ->rules("required") ->error_messages("required", "This field is required") - ->message('Sign up to Amazon S3'); + ->error_messages("invalid", "Access Key is invalid") + ->message('Click here to sign up to Amazon Web Services.'); $group ->input("secret_key") ->id("s3-secret-key") ->label("Secret Access Key") ->value(module::get_var("aws_s3", "secret_key")) ->rules("required") - ->error_messages("required", "This field is required"); + ->error_messages("required", "This field is required") + ->error_messages("invalid", "Secret Key is invalid"); $group ->input("bucket_name") ->id("s3-bucket") ->label("Bucket Name") ->value(module::get_var("aws_s3", "bucket_name")) ->rules("required") + ->callback("aws_s3::validate_bucket") ->error_messages("required", "This field is required") - ->message("Note: This module will not create a bucket if it does not already exist. Please ensure you have already created the bucket and the bucket has the correct ACL permissions before continuing."); + ->error_messages("invalid", "Bucket name is invalid") + ->message('Note: This module will not create a bucket if it does not already exist. Please ensure you have already created the bucket using the AWS Console before continuing.
+Click here for information on Amazon S3 bucket naming conventions/restrictions.'); $group ->input("g3id") ->id("s3-g3id") @@ -85,12 +148,13 @@ class Admin_Aws_S3_Controller extends Admin_Controller { ->value(module::get_var("aws_s3", "g3id", md5(time()))) ->rules("required") ->error_messages("required", "This field is required") - ->message("This field allows for multiple G3 instances running off of a single S3 bucket."); + ->message("Utilising this field allows for multiple G3 file repositories stored inside the same S3 bucket."); $group ->checkbox("use_ssl") ->id("s3-use-ssl") ->checked(module::get_var("aws_s3", "use_ssl")) - ->label("Use SSL for S3 transfers"); + ->label("Use SSL for S3 transfers") + ->message("You may have problems when uploading content to S3 if this option is enabled. If so, turn off this option."); $group = $form->group("cdn_settings")->label(t("CDN Settings")); @@ -105,15 +169,54 @@ class Admin_Aws_S3_Controller extends Admin_Controller { • {resource} - The end path to the resource/object"); $group ->input("sig_exp") - ->id("sig_exp") + ->id("s3-sig_exp") ->label("Private Content Signature Duration") ->value(module::get_var("aws_s3", "sig_exp", 60)) ->rules("required") ->callback("aws_s3::validate_number") ->error_messages("not_numeric", "The value provided is not numeric. Please enter a number in this field.") - ->message("Set the time in seconds for the generated signature for access to permission-restricted S3 objects

-Note: this module does not yet support the creation of signatures to access private objects on S3 via CloudFront CDN."); + ->message("Set the time in seconds until the generated signature expires access to permission-restricted S3 objects (private content on G3 is where the user group 'Everybody' does not have access).

+Note: this module does not yet support the creation of signatures to access private objects on S3 via CloudFront CDN."); + $group = $form->group("general_settings")->label(t("General Settings")); + + $chkbox = + $group ->checkbox("upload_thumbs") + ->id("s3-upload_thumbs") + ->label("Upload Thumbnails") + ->checked(module::get_var("aws_s3", "upload_thumbs", true)); + if (module::get_var("aws_s3", "s3_storage_only")) + $chkbox->disabled(true); + + $chkbox = + $group ->checkbox("upload_resizes") + ->id("s3-upload_resizes") + ->label("Upload Resized Images") + ->checked(module::get_var("aws_s3", "upload_resizes", true)); + if (module::get_var("aws_s3", "s3_storage_only")) + $chkbox->disabled(true); + + $chkbox = + $group ->checkbox("upload_fullsizes") + ->id("s3-upload_fullsizes") + ->label("Upload Fullsize Images") + ->checked(module::get_var("aws_s3", "upload_fullsizes", true)); + if (module::get_var("aws_s3", "s3_storage_only")) + $chkbox->disabled(true); + + $chkbox = + $group ->checkbox("s3_storage_only") + ->id("s3-storage-only") + ->label("Use S3 for primary storage of Gallery content (Not yet available)") + ->checked(module::get_var("aws_s3", "s3_storage_only", false)) + ->message("Use this option if your webhost has limited space available on your account. This module will remove content from the local server after it has been uploaded to S3.

+Note: You must have enough storage on your webhost account to store the images temporarily until they have been uploaded to S3.
") + ->disabled(true); + + if (!module::get_var("aws_s3", "enabled")) + $chkbox->disabled(true); + + // done creating form. $form ->submit("save") ->value("Save Settings"); diff --git a/3.0/modules/aws_s3/controllers/aws_s3.php b/3.0/modules/aws_s3/controllers/aws_s3.php new file mode 100644 index 00000000..a5d092b2 --- /dev/null +++ b/3.0/modules/aws_s3/controllers/aws_s3.php @@ -0,0 +1,9 @@ +parent(); if ($parent->id > 1) { - aws_s3::upload_album_cover($parent); + aws_s3::upload_item($parent); } } @@ -34,7 +34,7 @@ class item extends item_Core { parent::remove_album_cover($album); if ($album->id > 1) { - aws_s3::remove_album_cover($album); + aws_s3::remove_item($album); } } diff --git a/3.0/modules/aws_s3/helpers/aws_s3.php b/3.0/modules/aws_s3/helpers/aws_s3.php index 891ba0d3..c6df82a2 100644 --- a/3.0/modules/aws_s3/helpers/aws_s3.php +++ b/3.0/modules/aws_s3/helpers/aws_s3.php @@ -1,25 +1,11 @@ relative_path(), - module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("fs/" . $item->relative_path()), - ($item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); - $success_th = S3::putObjectFile(VARPATH . "thumbs/" . $item->relative_path(), - module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("th/" . $item->relative_path()), - ($item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); - $success_rs = S3::putObjectFile(VARPATH . "resizes/" . $item->relative_path(), - module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("rs/" . $item->relative_path()), - ($item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); + $filename = urldecode($item->relative_path()); + $itype = "I"; + if ($item->is_album()) { + $filename .= "/.album.jpg"; + $itype = "A"; + } + + if ((!$item->s3_fullsize_uploaded || $flags & aws_s3::UPLOAD_FULLSIZE) && !$item->is_album()) { + aws_s3::log("[" . $itype . ":" . $item->id . "] Uploading fullsize object"); + $success_fs = S3::putObjectFile(VARPATH . "albums/" . $filename, + module::get_var("aws_s3", "bucket_name"), + self::get_resource_url("fs/" . $filename), + ($item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); + $item->s3_fullsize_uploaded = $success_fs; + } + else + $success_fs = true; + + if ((!$item->s3_resize_uploaded || $flags & aws_s3::UPLOAD_RESIZE) && !$item->is_album()) { + aws_s3::log("[" . $itype . ":" . $item->id . "] Uploading resize object"); + $success_rs = S3::putObjectFile(VARPATH . "resizes/" . $filename, + module::get_var("aws_s3", "bucket_name"), + self::get_resource_url("rs/" . $filename), + ($item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); + $item->s3_resize_uploaded = $success_rs; + } + else + $success_rs = true; + + if (!$item->s3_thumb_uploaded || $flags & aws_s3::UPLOAD_THUMB) { + aws_s3::log("[" . $itype . ":" . $item->id . "] Uploading thumbnail object"); + $success_th = S3::putObjectFile(VARPATH . "thumbs/" . $filename, + module::get_var("aws_s3", "bucket_name"), + self::get_resource_url("th/" . $filename), + ($item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); + $item->s3_thumb_uploaded = $success_th; + } + else + $success_th = true; + + $item->s3_item_hash = md5($item->relative_path()); + + $item->save(); $success = $success_fs && $success_th && $success_rs; aws_s3::log("item upload success: " . $success); + return $success; } static function move_item($old_item, $new_item) { self::get_s3(); - S3::copyObject(module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("fs/" . $old_item->relative_path()), - module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("fs/" . $new_item->relative_path()), - ($new_item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); - S3::deleteObject(module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("fs/" . $old_item->relative_path())); + $old_filename = urldecode($old_item->relative_path()); + $new_filename = urldecode($new_item->relative_path()); - S3::copyObject(module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("rs/" . $old_item->relative_path()), - module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("rs/" . $new_item->relative_path()), - ($new_item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); - S3::deleteObject(module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("rs/" . $old_item->relative_path())); + aws_s3::log("old filename: " . self::get_resource_url("fs/" . $old_filename) . ", " . + "new filename: " . self::get_resource_url("fs/" . $new_filename)); - S3::copyObject(module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("th/" . $old_item->relative_path()), - module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("th/" . $new_item->relative_path()), - ($new_item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); - S3::deleteObject(module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("th/" . $old_item->relative_path())); + //aws_s3::log($old_item->get_aws_s3_meta()); + + if ($old_item->s3_fullsize_uploaded) { + aws_s3::log("Copying fullsize " . $old_filename . " to " . $new_filename); + S3::copyObject(module::get_var("aws_s3", "bucket_name"), self::get_resource_url("fs/" . $old_filename), + module::get_var("aws_s3", "bucket_name"), self::get_resource_url("fs/" . $new_filename), + ($new_item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); + S3::deleteObject(module::get_var("aws_s3", "bucket_name"), self::get_resource_url("fs/" . $old_filename)); + } + else + aws_s3::upload_item($new_item, aws_s3::UPLOAD_FULLSIZE); + + if ($old_item->s3_resize_uploaded) { + aws_s3::log("Copying resized " . $old_filename . " to " . $new_filename); + S3::copyObject(module::get_var("aws_s3", "bucket_name"), self::get_resource_url("rs/" . $old_filename), + module::get_var("aws_s3", "bucket_name"), self::get_resource_url("rs/" . $new_filename), + ($new_item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); + S3::deleteObject(module::get_var("aws_s3", "bucket_name"), self::get_resource_url("rs/" . $old_filename)); + } + else + aws_s3::upload_item($new_item, aws_s3::UPLOAD_RESIZE); + + if ($old_item->s3_thumb_uploaded) { + aws_s3::log("Copying thumbnail " . $old_filename . " to " . $new_filename); + S3::copyObject(module::get_var("aws_s3", "bucket_name"), self::get_resource_url("th/" . $old_filename), + module::get_var("aws_s3", "bucket_name"), self::get_resource_url("th/" . $new_filename), + ($new_item->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); + S3::deleteObject(module::get_var("aws_s3", "bucket_name"), self::get_resource_url("th/" . $old_filename)); + } + else + aws_s3::upload_item($new_item, aws_s3::UPLOAD_THUMB); } static function remove_item($item) { self::get_s3(); - $success_fs = S3::deleteObject(module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("fs/" . $item->relative_path())); - $success_th = S3::deleteObject(module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("th/" . $item->relative_path())); - $success_rs = S3::deleteObject(module::get_var("aws_s3", "bucket_name"), - self::get_resource_url("rs/" . $item->relative_path())); + $filename = urldecode($item->relative_path()); + $itype = "I"; + if ($item->is_album()) { + $filename .= "/.album.jpg"; + $itype = "A"; + } - $success = $success_fs && $success_th && $success_rs; - aws_s3::log("s3 delete success: " . $success); - } + if ($item->s3_fullsize_uploaded && !$item->is_album()) { + aws_s3::log("[" . $itype . ":" . $item->id . "] Deleting fullsize object"); + $success_fs = S3::deleteObject(module::get_var("aws_s3", "bucket_name"), + self::get_resource_url("fs/" . $filename)); + $item->s3_fullsize_uploaded = !$success_fs; + } + else + $success_fs = true; - static function upload_album_cover($album) { - self::get_s3(); - - if (file_exists(VARPATH . "resizes/" . $album->relative_path() . "/.album.jpg")) - $success_rs = S3::putObjectFile(VARPATH . "resizes/" . $album->relative_path() . "/.album.jpg", - module::get_var("aws_s3", "bucket_name"), - "g3/" . module::get_var("aws_s3", "g3id") . "/rs/" . $album->relative_path() . "/.album.jpg", - ($album->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); + if ($item->s3_resize_uploaded && !$item->is_album()) { + aws_s3::log("[" . $itype . ":" . $item->id . "] Deleting resize object"); + $success_rs = S3::deleteObject(module::get_var("aws_s3", "bucket_name"), + self::get_resource_url("rs/" . $filename)); + $item->s3_resize_uploaded = !$success_rs; + } else $success_rs = true; - if (file_exists(VARPATH . "thumbs/" . $album->relative_path() . "/.album.jpg")) - $success_th = S3::putObjectFile(VARPATH . "thumbs/" . $album->relative_path() . "/.album.jpg", - module::get_var("aws_s3", "bucket_name"), - "g3/" . module::get_var("aws_s3", "g3id") . "/th/" . $album->relative_path() . "/.album.jpg", - ($album->view_1 ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)); + if ($item->s3_thumb_uploaded) { + aws_s3::log("[" . $itype . ":" . $item->id . "] Deleting thumbnail object"); + $success_th = S3::deleteObject(module::get_var("aws_s3", "bucket_name"), + self::get_resource_url("th/" . $filename)); + $item->s3_thumb_uploaded = !$success_th; + } else $success_th = true; + + $item->save_s3_meta(); - $success = $success_rs && $success_th; - aws_s3::log("album cover upload success: " . $success); - } - - static function remove_album_cover($album) { - self::get_s3(); - - $success_th = S3::deleteObject(module::get_var("aws_s3", "bucket_name"), - "g3/" . module::get_var("aws_s3", "g3id") . "/th/" . $album->relative_path() . "/.album.jpg"); - $success_rs = S3::deleteObject(module::get_var("aws_s3", "bucket_name"), - "g3/" . module::get_var("aws_s3", "g3id") . "/rs/" . $album->relative_path() . "/.album.jpg"); - - $success = $success_rs && $success_th; - aws_s3::log("album cover removal success: " . $success); + $success = $success_fs && $success_th && $success_rs; + aws_s3::log("S3 delete success: " . $success); + return $success; } static function getAuthenticatedURL($bucket, $uri) { @@ -184,7 +211,108 @@ class aws_s3_Core { static function validate_number($field) { if (preg_match("/\D/", $field->value)) - $field->add_error("not_numeric", 1); + $field->add_error("not_numeric", 1); + } + + static function validate_bucket($field) { + if (preg_match("/[^a-zA-Z0-9\-\.]/", $field->value)) + $field->add_error("invalid", 1); + } + + // @TODO: Write validation function (check with S3) + static function validate_access_details($access_key, $secret_key, $bucket_name) { + require_once(MODPATH . "aws_s3/lib/s3.php"); + S3::setAuth($access_key, $secret_key); + S3::$useSSL = false; + + $success_test = S3::putObjectString((string)time(), $bucket_name, ".s3_test"); + if ($success_test) + S3::deleteObject($bucket_name, ".s3_test"); + + return $success_test; + } + + static function base64_filename(Item_Model $item) { + $file_path = explode("/", $item->relative_path()); + return base64_encode(end($file_path)); + } + + static function can_schedule() { + if (!module::is_active("scheduler")) { + return false; + } + + return true; + } + + static function schedule_task($task) { + $schedule = ORM::factory("schedule"); + $schedule->add_task($task); + } + + static function schedule_full_sync2() { + $task_def = + Task_Definition::factory() + ->callback("aws_s3_task::sync") + ->name("Amazon S3 bucket synchronisation") + ->severity(log::SUCCESS); + + $task = task::create($task_def, array()); + self::schedule_task($task); + } + + static function schedule_full_sync($this_task) { + if (!self::can_schedule()) + throw new Exception("Unable to initialize schedule"); + + try { + self::schedule_full_sync2(); + + $this_task->status = "Scheduled re-sync task"; + $this_task->done = true; + $this_task->state = "success"; + $this_task->percent_complete = 100; + } + catch (Exception $err) { + $task->done = true; + $thisSynchronise_task->state = "error"; + $this_task->status = $err->getMessage(); + $this_task->log((string)$err); + } + + $this_task->save(); + + if (!module::get_var("aws_s3", "synced", false)) { + site_status::warning( + "Your site has been scheduled for full Amazon S3 re-synchronisation. This message will clear when this has been completed.", + "aws_s3_not_synced" + ); + } + + return true; + } + + static function schedule_item_sync($item) { + if (!self::can_schedule()) + throw new Exception("Unable to initialize schedule"); + + $item_id = null; + if (is_object($item) && $item instanceof Item_Model) + $item_id = $item->id; + else if (is_numeric($item)) + $item_id = $item; + else + throw new Exception("Un-intelligible item reference passed."); + + $task_def = + Task_Definition::factory() + ->callback("aws_s3_task::upload_item") + ->name("Amazon S3 item upload (ID: " . $item_id . ")") + ->severity(log::SUCCESS); + + $task = task::create($task_def, array("item_id" => $item_id)); + + self::schedule_task($task); } diff --git a/3.0/modules/aws_s3/helpers/aws_s3_event.php b/3.0/modules/aws_s3/helpers/aws_s3_event.php index 240964e8..29a9477a 100644 --- a/3.0/modules/aws_s3/helpers/aws_s3_event.php +++ b/3.0/modules/aws_s3/helpers/aws_s3_event.php @@ -1,22 +1,4 @@ is_album()) + if ($item->id == 1) return true; aws_s3::log("Item created - " . $item->id); - aws_s3::upload_item($item); + aws_s3::schedule_item_sync($item); } static function item_deleted($item) { + if ($item->id == 1) + return true; + aws_s3::log("Item deleted - " . $item->id); aws_s3::remove_item($item); + + ORM::factory("aws_s3_meta", $item->id)->delete(); } - static function item_moved($new_item, $old_item) { - aws_s3::log("Item moved - " . $item->id); - aws_s3::move_item($old_item, $new_item); + static function item_updated($old_item, $new_item) { + if ($new_item->id == 1) + return true; + + if ($new_item->has_aws_s3_meta()) { + aws_s3::log("Item updated - " . $new_item->id); + + if ($old_item->relative_path() == $new_item->relative_path() && $old_item->s3_item_hash == $new_item->s3_item_hash) { + aws_s3::log("nothing changed?!"); + } + else if ($old_item->relative_path() != $new_item->relative_path()) { + aws_s3::log("Item moved..."); + aws_s3::move_item($old_item, $new_item); + } + else { + aws_s3::log("Item hasn't moved. Image updated?"); + aws_s3::remove_item($old_item); + aws_s3::schedule_item_sync($new_item); + } + } } } \ No newline at end of file diff --git a/3.0/modules/aws_s3/helpers/aws_s3_installer.php b/3.0/modules/aws_s3/helpers/aws_s3_installer.php index 246217f0..0a297dbb 100644 --- a/3.0/modules/aws_s3/helpers/aws_s3_installer.php +++ b/3.0/modules/aws_s3/helpers/aws_s3_installer.php @@ -1,57 +1,125 @@ query("DROP TABLE {aws_s3_meta}"); } static function upgrade($version) { - if ($version < self::getversion()) - self::setversion(); + log::info("aws_s3", "Commencing module upgrade (" . $version . ")"); + switch ($version) { + case 0: { + log::info("aws_s3", "Installing version 1"); + + @mkdir(VARPATH . "modules/aws_s3"); + @mkdir(VARPATH . "modules/aws_s3/log"); + + // installation's unique identifier - allows multiple g3's pointing to the same s3 bucket. + if (!module::get_var("aws_s3", "g3id")) + module::set_var("aws_s3", "g3id", md5(time())); + + module::set_var("aws_s3", "synced", false); + module::set_var("aws_s3", "enabled", false); + module::set_var("aws_s3", "access_key", ""); + module::set_var("aws_s3", "secret_key", ""); + module::set_var("aws_s3", "bucket_name", ""); + + module::set_version("aws_s3", 1); + } + case 1: { + log::info("aws_s3", "Upgrading to version 2"); + $db = Database::instance(); + $db->query("CREATE TABLE {aws_s3_meta} ( + `item_id` int(9) NOT NULL, + `item_hash` varchar(32) NOT NULL DEFAULT '', + `thumb_uploaded` smallint(1) NOT NULL DEFAULT 0, + `resize_uploaded` smallint(1) NOT NULL DEFAULT 0, + `fullsize_uploaded` smallint(1) NOT NULL DEFAULT 0, + `local_deleted` smallint(1) NOT NULL DEFAULT 0, + PRIMARY KEY (`item_id`) + ) DEFAULT CHARSET=utf8;"); + + module::set_var("aws_s3", "upload_thumbs", true); + module::set_var("aws_s3", "upload_resizes", true); + module::set_var("aws_s3", "upload_fullsizes", true); + module::set_var("aws_s3", "s3_storage_only", false); + + if (module::get_var("aws_s3", "synced")) { + // v1 has already synced this installation to s3. mark all the items with the relevant meta data + $items = ORM::factory("item")->find_all(); + foreach ($items as $item) { + aws_s3::log("Updating S3 meta for item ID: " . $item->id); + $item->s3_thumb_uploaded = true; + if (!$item->is_album()) { + $item->s3_resize_uploaded = true; + $item->s3_fullsize_uploaded = true; + } + $item->s3_local_deleted = false; + $item->s3_item_hash = md5($item->relative_path()); + $item->save_s3_meta(); + } + } + else { + // check various states after upgrade from v1.. + + if (module::get_var("aws_s3", "access_key") != "" && + module::get_var("aws_s3", "secret_key") != "" && + module::get_var("aws_s3", "bucket_name") != "" && + aws_s3::validate_access_details(module::get_var("aws_s3", "access_key"), + module::get_var("aws_s3", "secret_key"), + module::get_var("aws_s3", "bucket_name")) + ) { + // details are correct but hasn't been synced. + if (aws_s3::can_schedule()) { + // i can schedule this task + aws_s3::schedule_full_sync2(); + site_status::warning( + "Your site has been scheduled for full Amazon S3 re-synchronisation. This message will clear when this has been completed.", + "aws_s3_not_synced" + ); + } + else { + // i CAN'T schedule it.. + site_status::warning( + t('Your site has not been synchronised to Amazon S3. Until it has, your server will continue to serve image content to your visitors.
Click here to start the synchronisation task.', + array("url" => html::mark_clean(url::site("admin/maintenance/start/aws_s3_task::manual_sync?csrf=__CSRF__"))) + ), + "aws_s3_not_synced" + ); + } + } + else { + site_status::warning( + t('Amazon S3 module needs configuration. Click here to go to the configuration page.', + array("url" => html::mark_clean(url::site("admin/aws_s3"))) + ), + "aws_s3_not_configured" + ); + } + } + + module::set_version("aws_s3", 2); + } + } + log::info("aws_s3", "Module upgrade complete"); } static function deactivate() {} static function activate() {} static function can_activate() { $messages = array(); + if (!function_exists("curl_init")) { + $messages['error'][] = "The S3 library (and this module) depend on the php5-curl extension. Please install this extension and try again."; + } + if (!module::is_active("scheduler")) { + $messages['warn'][] = "The 'Scheduler' module is not installed/active. Scheduled maintenance tasks such as synchronisation will not be available."; + } return $messages; } diff --git a/3.0/modules/aws_s3/helpers/aws_s3_task.php b/3.0/modules/aws_s3/helpers/aws_s3_task.php index 29d32e12..cca2d019 100644 --- a/3.0/modules/aws_s3/helpers/aws_s3_task.php +++ b/3.0/modules/aws_s3/helpers/aws_s3_task.php @@ -1,96 +1,193 @@ callback("aws_s3_task::sync") - ->name(t("Syncronise with Amazon S3")) - ->description(t("Syncronise your Gallery 3 data/images with your Amazon S3 bucket")) - ->severity(log::SUCCESS)); + if (aws_s3::can_schedule()) + return array(Task_Definition::factory() + ->callback("aws_s3::schedule_full_sync") + ->name(t("Synchronise with Amazon S3")) + ->description(t("Schedule a task to synchronise your Gallery 3 data/images with your Amazon S3 bucket")) + ->severity(log::SUCCESS) + ->set_flags(Task_Definition::CAN_RUN_NOW)); + else + return array(Task_Definition::factory() + ->callback("aws_s3_task::manual_sync") + ->name(t("Synchronise with Amazon S3")) + ->description(t("Synchronise your Gallery 3 data/images with your Amazon S3 bucket")) + ->severity(log::SUCCESS)); + + } + + static function upload_item($task) { + aws_s3::log("aws_s3_task::upload_item called"); + + $item = ORM::factory("item", $task->get("item_id")); + + aws_s3::log("Commencing upload task for item " . $item->id); + + $task->status = "Commencing upload"; + $task->percent_complete = 0; + $task->save(); + if (aws_s3::upload_item($item)) { + $task->percent_complete = 100; + $task->done = true; + $task->state = "success"; + $task->status = "Upload complete"; + } + else { + $task->done = false; + $task->state = "error"; + $task->status = "Upload failed"; + } + $task->save(); + } + + static function manual_sync($task) { + aws_s3::log("Amazon S3 manual re-sync started."); + + if (!$task->get("mode")) { + $task->set("mode", "init"); + } + + aws_s3::log("mode: " . $task->get("mode")); + switch ($task->get("mode")) { + case "init": { + batch::start(); + $items = ORM::factory("item")->find_all(); + $task->set("total_count", count($items)); + + if (count($items) <= 50) + $task->set("batch", 1); + else if (count($items) > 50 && count($items) <= 500) + $task->set("batch", 5); + else if (count($items) > 500 && count($items) <= 5000) + $task->set("batch", 10); + else if (count($items) > 5000) + $task->set("batch", 25); + + $task->set("completed", 0); + $task->state = "running"; + + if (!module::get_var("aws_s3", "synced", false)) { + $task->set("mode", "clean"); + $task->status = "Emptying contents of bucket"; + } + else { + $task->status = "Uploading items..."; + $task->percent_complete = 10; + $task->set("mode", "upload"); + } + } break; + case "clean": { + aws_s3::log("Emptying contents of bucket"); + + require_once(MODPATH . "aws_s3/lib/s3.php"); + $s3 = new S3(module::get_var("aws_s3", "access_key"), module::get_var("aws_s3", "secret_key")); + + $bucket = module::get_var("aws_s3", "bucket_name"); + $resource = aws_s3::get_resource_url(""); + $stuff = array_reverse(S3::getBucket($bucket, $resource)); + $i = 0; + foreach ($stuff as $uri => $item) { + $i++; + aws_s3::log("Removing " . $uri . " from S3"); + S3::deleteObject($bucket, $uri); + $task->percent_complete = round(20 * ($i / count($stuff))); + $task->save(); + } + $task->set("mode", "upload"); + $task->status = "Uploading items..."; + } break; + case "upload": { + $items = ORM::factory("item")->find_all($task->get("batch"), $task->get("completed")); + foreach ($items as $item) { + aws_s3::upload_item($item); + $task->set("completed", $task->get("completed") + 1); + } + $task->percent_complete = (90 * ($task->get("completed") / $task->get("total_count"))) + 10; + $task->status = "Uploaded " . $task->get("completed") . " of " . $task->get("total_count") . " items..."; + + if ($task->get("completed") == $task->get("total_count")) { + $task->set("mode", "complete"); + } + } break; + case "complete": { + $task->done = true; + $task->state = "success"; + $task->percent_complete = 100; + $task->status = "Completed."; + module::set_var("aws_s3", "synced", true); + site_status::clear("aws_s3_not_synced"); + batch::stop(); + } break; + } + aws_s3::log("End of function.."); + $task->save(); } static function sync($task) { - require_once(MODPATH . "aws_s3/lib/s3.php"); - $s3 = new S3(module::get_var("aws_s3", "access_key"), module::get_var("aws_s3", "secret_key")); - - $mode = $task->get("mode", "init"); - switch ($mode) { - case "init": { - aws_s3::log("re-sync task started.."); - batch::start(); - $items = ORM::factory("item")->find_all(); - aws_s3::log("items to sync: " . count($items)); - $task->set("total_count", count($items)); - $task->set("completed", 0); - $task->set("mode", "empty"); - $task->status = "Emptying contents of bucket"; - } break; - case "empty": { // 0 - 10% - aws_s3::log("emptying bucket contents (any files that may already exist in the bucket/prefix path)"); - $bucket = module::get_var("aws_s3", "bucket_name"); + aws_s3::log("Amazon S3 Re-sync task started.."); - $resource = aws_s3::get_resource_url(""); - $stuff = array_reverse(S3::getBucket($bucket, $resource)); - foreach ($stuff as $uri => $item) { - aws_s3::log("removing: " . $uri); - S3::deleteObject($bucket, $uri); - } - $task->percent_complete = 10; - $task->set("mode", "upload"); - $task->state = "Commencing upload..."; - } break; - case "upload": { // 10 - 100% - $completed = $task->get("completed", 0); - $items = ORM::factory("item")->find_all(1, $completed); - foreach ($items as $item) { - if ($item->id > 1) { - aws_s3::log("uploading item " . $item->id . " (" . ($completed + 1) . "/" . $task->get("total_count") . ")"); - if ($item->is_album()) - aws_s3::upload_album_cover($item); - else - aws_s3::upload_item($item); - } - $completed++; - } - $task->set("completed", $completed); - $task->percent_complete = round(90 * ($completed / $task->get("total_count"))) + 10; - $task->status = $completed . " of " . $task->get("total_count"). " uploaded."; + batch::start(); + $items = ORM::factory("item")->find_all(); - if ($completed == $task->get("total_count")) { - $task->set("mode", "finish"); - } - } break; - case "finish": { - aws_s3::log("completing upload task.."); - $task->percent_complete = 100; - $task->state = "success"; - $task->done = true; - $task->status = "Sync task completed successfully"; - batch::stop(); - module::set_var("aws_s3", "synced", true); - site_status::clear("aws_s3_not_synced"); - } break; + $task->set("total_count", count($items)); + $task->set("completed", 0); + + if (!module::get_var("aws_s3", "synced", false)) { + aws_s3::log("Emptying contents of bucket"); + $task->status = "Emptying contents of bucket"; + $task->save(); + + require_once(MODPATH . "aws_s3/lib/s3.php"); + $s3 = new S3(module::get_var("aws_s3", "access_key"), module::get_var("aws_s3", "secret_key")); + + $bucket = module::get_var("aws_s3", "bucket_name"); + $resource = aws_s3::get_resource_url(""); + $stuff = array_reverse(S3::getBucket($bucket, $resource)); + $i = 0; + foreach ($stuff as $uri => $item) { + $i++; + aws_s3::log("Removing " . $uri . " from S3"); + S3::deleteObject($bucket, $uri); + $task->percent_complete = round(20 * ($i / count($stuff))); + $task->save(); + } } + + $task->percent_complete = 20; + aws_s3::log("Commencing upload tasks"); + $task->state = "Commencing upload..."; + $task->save(); + + $completed = $task->get("completed", 0); + + $items = ORM::factory("item")->find_all(); + foreach ($items as $item) { + try { + if ($item->id > 1) + aws_s3::upload_item($item); + } + catch (Exception $err) {} + $completed++; + + $task->set("completed", $completed); + $task->percent_complete = round(80 * ($completed / $task->get("total_count"))) + 20; + $task->status = $completed . " of " . $task->get("total_count"). " uploaded."; + $task->save(); + } + + $task->percent_complete = 100; + $task->state = "success"; + $task->done = true; + aws_s3::log("Sync task completed successfully"); + $task->status = "Sync task completed successfully"; + module::set_var("aws_s3", "synced", true); + site_status::clear("aws_s3_not_synced"); + batch::stop(); + $task->save(); } } \ No newline at end of file diff --git a/3.0/modules/aws_s3/lib/s3.php b/3.0/modules/aws_s3/lib/s3.php index ae9aeb83..b2ce6cec 100644 --- a/3.0/modules/aws_s3/lib/s3.php +++ b/3.0/modules/aws_s3/lib/s3.php @@ -302,7 +302,7 @@ class S3 { if ($input === false) return false; $rest = new S3Request('PUT', $bucket, $uri); - if (is_string($input)) $input = array( + if (is_string($input) || is_numeric($input)) $input = array( 'data' => $input, 'size' => strlen($input), 'md5sum' => base64_encode(md5($input, true)) ); @@ -1218,8 +1218,10 @@ final class S3Request { curl_setopt($curl, CURLOPT_USERAGENT, 'S3/php'); if (S3::$useSSL) { - curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 1); - curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 1); + curl_setopt($curl, CURLOPT_SSL_VERIFYPEER , false ); + curl_setopt($curl, CURLOPT_SSL_VERIFYHOST , false ); +// curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 1); +// curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 1); } curl_setopt($curl, CURLOPT_URL, $url); diff --git a/3.0/modules/aws_s3/models/MY_Item_Model.php b/3.0/modules/aws_s3/models/MY_Item_Model.php index cbef8072..1ef443f2 100644 --- a/3.0/modules/aws_s3/models/MY_Item_Model.php +++ b/3.0/modules/aws_s3/models/MY_Item_Model.php @@ -1,57 +1,89 @@ s3_thumb_uploaded) return parent::thumb_url($full_uri); - if ($this->is_photo()) { + if ($this->is_photo()) return aws_s3::generate_url("th/" . $this->relative_path(), ($this->view_1 == 1 ? false : true), $this->updated); - } - else if ($this->is_album() && $this->id > 1) { + else if ($this->is_album() && $this->id > 1) return aws_s3::generate_url("th/" . $this->relative_path() . "/.album.jpg", ($this->view_1 == 1 ? false : true), $this->updated); - } - else if ($this->is_movie()) { - $relative_path = preg_replace("/...$/", "jpg", $this->relative_path()); - return aws_s3::generate_url("th/" . $relative_path, ($this->view_1 == 1 ? false : true), $this->updated); - } + else if ($this->is_movie()) + return aws_s3::generate_url("th/" . preg_replace("/...$/", "jpg", $this->relative_path()), ($this->view_1 == 1 ? false : true), $this->updated); } public function file_url($full_uri=false) { - if (!module::get_var("aws_s3", "enabled")) + if (!module::get_var("aws_s3", "enabled") || Router::$controller == "rest" || !$this->s3_fullsize_uploaded) return parent::file_url($full_uri); return aws_s3::generate_url("fs/" . $this->relative_path(), ($this->view_1 == 1 ? false : true), $this->updated); } public function resize_url($full_uri=false) { - if (!module::get_var("aws_s3", "enabled")) + if (!module::get_var("aws_s3", "enabled") || Router::$controller == "rest" || !$this->s3_resize_uploaded) return parent::resize_url($full_uri); - if ($this->is_album() && $this->id > 1) { + if ($this->is_album() && $this->id > 1) return aws_s3::generate_url("rs/" . $this->relative_path() . "/.album.jpg", ($this->view_1 == 1 ? false : true), $this->updated); + else + return aws_s3::generate_url("rs/" . $this->relative_path(), ($this->view_1 == 1 ? false : true), $this->updated); + } + + private function _load_aws_s3_meta($create_if_not_exists = true) { + $this->_aws_s3_meta = ORM::factory("aws_s3_meta")->find($this->id); + if (!$this->_aws_s3_meta->item_id) { + if ($create_if_not_exists) { + $this->_aws_s3_meta->item_id = $this->id; + $this->_aws_s3_meta->save(); + } + else + return false; + } + return $this; + } + + public function has_aws_s3_meta() { + if (!$this->_load_aws_s3_meta(false)) + return false; + return true; + } + + public function get_aws_s3_meta() { + if (!$this->_aws_s3_meta) + $this->_load_aws_s3_meta(); + + return $this->_aws_s3_meta; + } + + public function save_s3_meta() { + if ($this->_aws_s3_meta) + $this->_aws_s3_meta->save(); + } + + public function save() { + $this->save_s3_meta(); + return parent::save(); + } + + public function __get($column) { + if (substr($column, 0, 3) == "s3_") { + $var = substr($column, 3); + return $this->get_aws_s3_meta()->$var; + } + return parent::__get($column); + } + + public function __set($column, $value) { + if (substr($column, 0, 3) == "s3_") { + $var = substr($column, 3); + $this->get_aws_s3_meta()->$var = $value; } else { - return aws_s3::generate_url("rs/" . $this->relative_path(), ($this->view_1 == 1 ? false : true), $this->updated); + parent::__set($column, $value); } } diff --git a/3.0/modules/aws_s3/models/aws_s3_meta.php b/3.0/modules/aws_s3/models/aws_s3_meta.php new file mode 100644 index 00000000..7010c595 --- /dev/null +++ b/3.0/modules/aws_s3/models/aws_s3_meta.php @@ -0,0 +1,6 @@ + S3 file transfers diff --git a/3.0/modules/aws_s3/views/admin_aws_s3.html.php b/3.0/modules/aws_s3/views/admin_aws_s3.html.php index 961186e3..05d340de 100644 --- a/3.0/modules/aws_s3/views/admin_aws_s3.html.php +++ b/3.0/modules/aws_s3/views/admin_aws_s3.html.php @@ -1,10 +1,13 @@
+

+

donating to help support future development."); ?> +