Commit ffe12a44 authored by heddn's avatar heddn Committed by heddn
Browse files

Issue #3092353 by heddn: Backport D8 features to D7

parent 64bbc6fb
<?php
/**
* Provides an iterator filter for file paths which are ignored.
*/
class IgnoredPathsIteratorFilter extends \FilterIterator {
use IgnoredPathsTrait;
/**
* {@inheritdoc}
*/
public function accept() {
return self::isIgnoredPath($this->current());
}
}
<?php
/**
* Provide a helper to check if file paths are ignored.
*/
trait IgnoredPathsTrait {
/**
* Check if the file path is ignored.
*
* @param string $file_path
* The file path.
*
* @return bool
* TRUE if file path is ignored, else FALSE.
*/
protected static function isIgnoredPath($file_path) {
$paths = variable_get('automatic_updates_ignored_paths', "sites/all/modules/*\nsites/all/themes/*");
if (drupal_match_path($file_path, $paths)) {
return TRUE;
}
}
}
<?php
use Drupal\Signify\ChecksumList;
use Drupal\Signify\FailedCheckumFilter;
use Drupal\Signify\Verifier;
/**
* Class to apply in-place updates.
*/
class InPlaceUpdate {
use ProjectInfoTrait;
/**
* The manifest file that lists all file deletions.
*/
const DELETION_MANIFEST = 'DELETION_MANIFEST.txt';
/**
* The checksum file with hashes of archive file contents.
*/
const CHECKSUM_LIST = 'checksumlist.csig';
/**
* The directory inside the archive for file additions and modifications.
*/
const ARCHIVE_DIRECTORY = 'files/';
/**
* The root file path.
*
* @var string
*/
protected static $rootPath;
/**
* The folder where files are backed up.
*
* @var string
*/
protected static $backup;
/**
* The temporary extract directory.
*
* @var string
*/
protected static $tempDirectory;
/**
* {@inheritdoc}
*/
public static function update($project_name, $project_type, $from_version, $to_version) {
self::$rootPath = DRUPAL_ROOT;
$project_root = drupal_get_path('module', 'automatic_updates');
require_once $project_root . DIRECTORY_SEPARATOR . 'vendor' . DIRECTORY_SEPARATOR . 'autoload.php';
// Bail immediately on updates if error category checks fail.
if (ReadinessCheckerManager::getResults('error')) {
return FALSE;
}
$success = FALSE;
if ($project_name === 'drupal') {
$project_root = self::$rootPath;
}
else {
$project_root = drupal_get_path($project_type, $project_name);
}
if ($archive = self::getArchive($project_name, $from_version, $to_version)) {
$modified = self::checkModifiedFiles($project_name, $archive);
if (!$modified && self::backup($archive, $project_root)) {
$success = self::processUpdate($archive, $project_root);
if (!$success) {
self::rollback($project_root);
}
else {
self::clearOpcodeCache();
}
}
}
return $success;
}
/**
* Get an archive with the quasi-patch contents.
*
* @param string $project_name
* The project name.
* @param string $from_version
* The current project version.
* @param string $to_version
* The desired next project version.
*
* @return \ArchiverZip
* The archive.
*/
protected static function getArchive($project_name, $from_version, $to_version) {
$url = self::buildUrl($project_name, self::getQuasiPatchFileName($project_name, $from_version, $to_version));
$destination = drupal_realpath(file_destination("temporary://$project_name.zip", FILE_EXISTS_RENAME));
self::doGetArchive($url, $destination);
if (file_exists($destination)) {
return new \ArchiverZip($destination);
}
}
/**
* Check if files are modified before applying updates.
*
* @param string $project_name
* The project name.
* @param \ArchiverZip $archive
* The archive.
*
* @return bool
* Return TRUE if modified files exist, FALSE otherwise.
*/
protected static function checkModifiedFiles($project_name, \ArchiverZip $archive) {
$extensions = self::getInfos();
$files = iterator_to_array(ModifiedFilesService::getModifiedFiles([$extensions[$project_name]]));
$files = array_unique($files);
$archive_files = $archive->listContents();
foreach ($archive_files as $index => &$archive_file) {
$skipped_files = [
self::DELETION_MANIFEST,
self::CHECKSUM_LIST,
];
// Skip certain files and all directories.
if (in_array($archive_file, $skipped_files, TRUE) || substr($archive_file, -1) === '/') {
unset($archive_files[$index]);
continue;
}
self::stripFileDirectoryPath($archive_file);
}
if ($intersection = array_intersect($files, $archive_files)) {
watchdog('automatic_updates', 'Can not update because %count files are modified: %paths', [
'%count' => count($intersection),
'%paths' => implode(', ', $intersection),
], WATCHDOG_ERROR);
return TRUE;
}
return FALSE;
}
/**
* Perform retrieval of archive, with delay if archive is still being created.
*
* @param string $url
* The URL to retrieve.
* @param string $destination
* The destination to download the archive.
* @param null|int $delay
* The delay, defaults to NULL.
*/
protected static function doGetArchive($url, $destination, $delay = 0) {
sleep($delay);
$result = drupal_http_request($url);
if ($result->code === '429' || isset($result->headers['retry-after'])) {
$retry = $result->headers['retry-after'];
self::doGetArchive($url, $destination, $retry);
}
elseif ($result->code !== '200') {
watchdog('automatic_updates', 'Retrieval of "@url" failed with: @message', [
'@url' => $url,
'@message' => $result->data,
], WATCHDOG_ERROR);
}
else {
file_unmanaged_save_data($result->data, $destination, FILE_EXISTS_REPLACE);
}
}
/**
* Process update.
*
* @param \ArchiverZip $archive
* The archive.
* @param string $project_root
* The project root directory.
*
* @return bool
* Return TRUE if update succeeds, FALSE otherwise.
*/
protected static function processUpdate(\ArchiverZip $archive, $project_root) {
$archive->extract(self::getTempDirectory());
self::validateArchive(self::getTempDirectory());
foreach (self::getFilesList(self::getTempDirectory()) as $file) {
$file_real_path = self::getFileRealPath($file);
$file_path = substr($file_real_path, strlen(self::getTempDirectory() . self::ARCHIVE_DIRECTORY));
$project_real_path = self::getProjectRealPath($file_path, $project_root);
$directory = dirname($project_real_path);
file_prepare_directory($directory, FILE_CREATE_DIRECTORY);
file_unmanaged_copy($file_real_path, $project_real_path, FILE_EXISTS_REPLACE);
watchdog('automatic_updates', '"@file" was updated.', ['@file' => $project_real_path], WATCHDOG_INFO);
}
foreach (self::getDeletions() as $deletion) {
$file_deletion = self::getProjectRealPath($deletion, $project_root);
file_unmanaged_delete($file_deletion);
watchdog('automatic_updates', '"@file" was deleted.', ['@file' => $file_deletion], WATCHDOG_INFO);
}
return TRUE;
}
/**
* Validate the downloaded archive.
*
* @param string $directory
* The location of the downloaded archive.
*/
protected static function validateArchive($directory) {
$csig_file = $directory . DIRECTORY_SEPARATOR . self::CHECKSUM_LIST;
if (!file_exists($csig_file)) {
throw new \RuntimeException('The CSIG file does not exist in the archive.');
}
$contents = file_get_contents($csig_file);
$module_path = drupal_get_path('module', 'automatic_updates');
$key = file_get_contents($module_path . '/artifacts/keys/root.pub');
$verifier = new Verifier($key);
$files = $verifier->verifyCsigMessage($contents);
$checksums = new ChecksumList($files, TRUE);
$failed_checksums = new FailedCheckumFilter($checksums, $directory);
if (iterator_count($failed_checksums)) {
throw new \RuntimeException('The downloaded files did not match what was expected.');
}
}
/**
* Backup before an update.
*
* @param \ArchiverZip $archive
* The archive.
* @param string $project_root
* The project root directory.
*
* @return bool
* Return TRUE if backup succeeds, FALSE otherwise.
*/
protected static function backup(\ArchiverZip $archive, $project_root) {
$backup = file_create_filename('automatic_updates-backup', 'temporary://');
file_prepare_directory($backup);
self::$backup = drupal_realpath($backup) . DIRECTORY_SEPARATOR;
if (!self::$backup) {
return FALSE;
}
foreach ($archive->listContents() as $file) {
// Ignore files that aren't in the files directory.
if (!self::stripFileDirectoryPath($file)) {
continue;
}
$success = self::doBackup($file, $project_root);
if (!$success) {
return FALSE;
}
}
$archive->extract(self::getTempDirectory(), [self::DELETION_MANIFEST]);
foreach (self::getDeletions() as $deletion) {
$success = self::doBackup($deletion, $project_root);
if (!$success) {
return FALSE;
}
}
return TRUE;
}
/**
* Remove the files directory path from files from the archive.
*
* @param string $file
* The file path.
*
* @return bool
* TRUE if path was removed, else FALSE.
*/
protected static function stripFileDirectoryPath(&$file) {
if (strpos($file, self::ARCHIVE_DIRECTORY) === 0) {
$file = substr($file, 6);
return TRUE;
}
return FALSE;
}
/**
* Execute file backup.
*
* @param string $file
* The file to backup.
* @param string $project_root
* The project root directory.
*
* @return bool
* Return TRUE if backup succeeds, FALSE otherwise.
*/
protected static function doBackup($file, $project_root) {
$directory = self::$backup . dirname($file);
if (!file_exists($directory) && !drupal_mkdir($directory, NULL, TRUE)) {
return FALSE;
}
$project_real_path = self::getProjectRealPath($file, $project_root);
if (file_exists($project_real_path) && !is_dir($project_real_path)) {
$success = file_unmanaged_copy($project_real_path, self::$backup . $file, FILE_EXISTS_REPLACE);
if (!$success) {
return FALSE;
}
watchdog('automatic_updates', '"@file" was backed up in preparation for an update.', ['@file' => $project_real_path], WATCHDOG_INFO);
}
return TRUE;
}
/**
* Rollback after a failed update.
*
* @param string $project_root
* The project root directory.
*/
protected static function rollback($project_root) {
if (!self::$backup) {
return;
}
foreach (self::getFilesList(self::$backup) as $file) {
$file_real_path = self::getFileRealPath($file);
$file_path = substr($file_real_path, strlen(self::$backup));
$success = file_unmanaged_copy($file_real_path, self::getProjectRealPath($file_path, $project_root), FILE_EXISTS_REPLACE);
if (!$success) {
watchdog('automatic_updates', '@file was not rolled back successfully.', ['@file' => $file_real_path], WATCHDOG_ERROR);
}
else {
watchdog('automatic_updates', '"@file" was restored due to failure(s) in applying update.', ['@file' => $file_path], WATCHDOG_INFO);
}
}
}
/**
* Provide a recursive list of files, excluding directories.
*
* @param string $directory
* The directory to recurse for files.
*
* @return \RecursiveIteratorIterator|\SplFileInfo[]
* The iterator of SplFileInfos.
*/
protected static function getFilesList($directory) {
$filter = static function ($file, $file_name, $iterator) {
/** @var \SplFileInfo $file */
/** @var string $file_name */
/** @var \RecursiveDirectoryIterator $iterator */
if ($iterator->hasChildren() && $file->getFilename() !== '.git') {
return TRUE;
}
$skipped_files = [
self::DELETION_MANIFEST,
self::CHECKSUM_LIST,
];
return $file->isFile() && !in_array($file->getFilename(), $skipped_files, TRUE);
};
$innerIterator = new \RecursiveDirectoryIterator($directory, \RecursiveDirectoryIterator::SKIP_DOTS);
return new \RecursiveIteratorIterator(new \RecursiveCallbackFilterIterator($innerIterator, $filter));
}
/**
* Build a project quasi-patch download URL.
*
* @param string $project_name
* The project name.
* @param string $file_name
* The file name.
*
* @return string
* The URL endpoint with for an extension.
*/
protected static function buildUrl($project_name, $file_name) {
$uri = ltrim(variable_get('automatic_updates_download_uri', 'https://www.drupal.org/in-place-updates'), '/');
return "$uri/$project_name/$file_name";
}
/**
* Get the quasi-patch file name.
*
* @param string $project_name
* The project name.
* @param string $from_version
* The current project version.
* @param string $to_version
* The desired next project version.
*
* @return string
* The quasi-patch file name.
*/
protected static function getQuasiPatchFileName($project_name, $from_version, $to_version) {
return "$project_name-$from_version-to-$to_version.zip";
}
/**
* Get file real path.
*
* @param \SplFileInfo $file
* The file to retrieve the real path.
*
* @return string
* The file real path.
*/
protected static function getFileRealPath(\SplFileInfo $file) {
$real_path = $file->getRealPath();
if (!$real_path) {
throw new \RuntimeException(sprintf('Could not get real path for "%s"', $file->getFilename()));
}
return $real_path;
}
/**
* Get the real path of a file.
*
* @param string $file_path
* The file path.
* @param string $project_root
* The project root directory.
*
* @return string
* The real path of a file.
*/
protected static function getProjectRealPath($file_path, $project_root) {
return rtrim($project_root, '/\\') . DIRECTORY_SEPARATOR . $file_path;
}
/**
* Provides the temporary extraction directory.
*
* @return string
* The temporary directory.
*/
protected static function getTempDirectory() {
if (!self::$tempDirectory) {
self::$tempDirectory = file_create_filename('automatic_updates-update', 'temporary://');
file_prepare_directory(self::$tempDirectory);
self::$tempDirectory = drupal_realpath(self::$tempDirectory) . DIRECTORY_SEPARATOR;
}
return self::$tempDirectory;
}
/**
* Get an iterator of files to delete.
*
* @return \ArrayIterator
* Iterator of files to delete.
*/
protected static function getDeletions() {
$deletions = [];
if (!file_exists(self::getTempDirectory() . self::DELETION_MANIFEST)) {
return new \ArrayIterator($deletions);
}
$handle = fopen(self::getTempDirectory() . self::DELETION_MANIFEST, 'r');
if ($handle) {
while (($deletion = fgets($handle)) !== FALSE) {
if ($result = trim($deletion)) {
$deletions[] = $result;
}
}
fclose($handle);
}
return new \ArrayIterator($deletions);
}
/**
* Clear opcode cache on successful update.
*/
protected static function clearOpcodeCache() {
if (function_exists('opcache_reset')) {
opcache_reset();
}
}
}
<?php
use Drupal\Signify\ChecksumList;
use Drupal\Signify\FailedCheckumFilter;
use Drupal\Signify\Verifier;
/**
* Modified files service.
*/
class ModifiedFilesService {
/**
* ModifiedCode constructor.
*/
public function __construct() {
$project_root = drupal_get_path('module', 'automatic_updates');
require_once $project_root . DIRECTORY_SEPARATOR . 'vendor' . DIRECTORY_SEPARATOR . 'autoload.php';
}
/**
* {@inheritdoc}
*/
public static function getModifiedFiles(array $extensions = []) {
$modified_files = [];
foreach (static::getHashUrls($extensions) as $url) {
$response = drupal_http_request($url);
if (isset($response->code) && ($response->code == 200)) {
static::processHashes($response->data, $modified_files);
$modified_files = new \ArrayIterator();
foreach (static::getHashRequests($extensions) as $hash_info) {
$response = drupal_http_request(reset($hash_info));
$extension_name = key($hash_info);
if (isset($response->code) && ($response->code === 200)) {
static::processHashes($response->data, $extensions[$extension_name], $modified_files);
}
}
return $modified_files;
}
......@@ -24,38 +37,40 @@ class ModifiedFilesService {
* Process checking hashes of files from external URL.
*
* @param string $data
* The response data.
* @param array $modified_files
* Response data.
* @param array $info
* Array of extension information.
* @param \ArrayIterator $modified_files
* The list of modified files.
*/
protected static function processHashes($data, array &$modified_files) {
$separator = "\r\n";
$line = strtok($data, $separator);
while ($line !== FALSE) {
$line = strtok($separator);
list($hash, $file) = preg_split('/\s+/', $line, 2);
$file = trim($file);
// If the line is empty, proceed to the next line.
if (empty($hash) && empty($file)) {
continue;
}
// If one of the values is invalid, log and continue.
if (!$hash || !$file) {
watchdog('automatic_updaters', '@hash or @file is empty; the hash file is malformed for this line.', ['@hash' => $hash, '@file' => $file], WATCHDOG_ERROR);
continue;
}
if (static::isIgnoredPath($file)) {
protected static function processHashes($data, array $info, \ArrayIterator $modified_files) {
$directory_root = $info['install path'];
if ($info['project'] === 'drupal') {
$directory_root = '';
}
$module_path = drupal_get_path('module', 'automatic_updates');
$key = file_get_contents($module_path . '/artifacts/keys/root.pub');
$verifier = new Verifier($key);
$files = $verifier->verifyCsigMessage($data);
$checksums = new ChecksumList($files, TRUE);
foreach (new FailedCheckumFilter($checksums, $directory_root) as $failed_checksum) {
$file_path = implode(DIRECTORY_SEPARATOR, array_filter([
$directory_root,
$failed_checksum->filename,
]));
if (!file_exists($file_path)) {
$modified_files->append($file_path);
continue;
}
$file_path = DRUPAL_ROOT . DIRECTORY_SEPARATOR . $file;
if (!file_exists($file_path) || hash_file('sha512', $file_path) !== $hash) {
$modified_files[] = $file_path;
$actual_hash = @hash_file(strtolower($failed_checksum->algorithm), $file_path);
if ($actual_hash === FALSE || empty($actual_hash) || strlen($actual_hash) < 64 || strcmp($actual_hash, $failed_checksum->hex_hash) !== 0) {
$modified_files->append($file_path);
}
}
}
/**
* Get an iterator of urls.
* Get an iterator of extension name and hash URL.
*
* @param array