Edit File by line
/home/barbar84/public_h.../wp-conte.../plugins/worker/src/MWP/Action/Incremen...
File: HashFiles.php
<?php
[0] Fix | Delete
/*
[1] Fix | Delete
* This file is part of the ManageWP Worker plugin.
[2] Fix | Delete
*
[3] Fix | Delete
* (c) ManageWP LLC <contact@managewp.com>
[4] Fix | Delete
*
[5] Fix | Delete
* For the full copyright and license information, please view the LICENSE
[6] Fix | Delete
* file that was distributed with this source code.
[7] Fix | Delete
*/
[8] Fix | Delete
[9] Fix | Delete
class MWP_Action_IncrementalBackup_HashFiles extends MWP_Action_IncrementalBackup_AbstractFiles
[10] Fix | Delete
{
[11] Fix | Delete
// 100MB
[12] Fix | Delete
const UNIX_HASH_THRESHOLD = 104857600;
[13] Fix | Delete
[14] Fix | Delete
// 100KB
[15] Fix | Delete
const MAX_CHUNK_SIZE = 102400;
[16] Fix | Delete
[17] Fix | Delete
public function execute(array $params = array(), MWP_Worker_Request $request)
[18] Fix | Delete
{
[19] Fix | Delete
$hashComputer = new MWP_IncrementalBackup_HashComputer();
[20] Fix | Delete
[21] Fix | Delete
/**
[22] Fix | Delete
* Each file is structured like:
[23] Fix | Delete
* [
[24] Fix | Delete
* "relativePath" => file path relative to ABSPATH,
[25] Fix | Delete
* "pathEncoded" => is path url encoded?,
[26] Fix | Delete
* "size" => file size sent for reference,
[27] Fix | Delete
* "offset" => number of bytes to offset hash start (integer, optional, default 0),
[28] Fix | Delete
* "limit" => number of bytes to hash (integer, optional, default 0),
[29] Fix | Delete
* "forcePartialHashing" => partially hashes file instead of md5_file always (boolean, optional, default false),
[30] Fix | Delete
* ]
[31] Fix | Delete
*/
[32] Fix | Delete
$files = $params['files'];
[33] Fix | Delete
$result = array();
[34] Fix | Delete
[35] Fix | Delete
// Allow overriding max chunk byte size per request for doing partial hashes
[36] Fix | Delete
$chunkByteSize = isset($params['maxChunkByteSize']) ? $params['maxChunkByteSize'] : self::MAX_CHUNK_SIZE;
[37] Fix | Delete
$hashComputer->setMaxChunkByteSize($chunkByteSize);
[38] Fix | Delete
[39] Fix | Delete
$unixHashThreshold = isset($params['unixMd5Threshold']) ? $params['unixMd5Threshold'] : self::UNIX_HASH_THRESHOLD;
[40] Fix | Delete
[41] Fix | Delete
foreach ($files as $file) {
[42] Fix | Delete
$relativePath = $file['path'];
[43] Fix | Delete
$size = $file['size'];
[44] Fix | Delete
$offset = isset($file['offset']) ? $file['offset'] : 0;
[45] Fix | Delete
$limit = isset($file['limit']) ? $file['limit'] : 0;
[46] Fix | Delete
$forcePartial = isset($file['forcePartialHashing']) ? $file['forcePartialHashing'] : false;
[47] Fix | Delete
$decodedRelativePath = $file['pathEncoded'] ? $this->pathDecode($relativePath) : $relativePath;
[48] Fix | Delete
$realPath = $this->getRealPath($decodedRelativePath);
[49] Fix | Delete
[50] Fix | Delete
// Run a unix command to generate md5 hash if file size exceeds threshold
[51] Fix | Delete
// Ignore partial requests for big files because of speed problems
[52] Fix | Delete
if ($size > $unixHashThreshold && $offset === 0 && $limit === 0) {
[53] Fix | Delete
$hash = $hashComputer->computeUnixMd5Sum($realPath);
[54] Fix | Delete
[55] Fix | Delete
if ($hash !== null) {
[56] Fix | Delete
$result[] = array(
[57] Fix | Delete
'path' => $relativePath,
[58] Fix | Delete
'hash' => $hash,
[59] Fix | Delete
);
[60] Fix | Delete
[61] Fix | Delete
continue;
[62] Fix | Delete
}
[63] Fix | Delete
// In case of a failed hashing fall back bellow to compute md5 hash from PHP
[64] Fix | Delete
}
[65] Fix | Delete
[66] Fix | Delete
$hash = $hashComputer->computeMd5Hash($realPath, $offset, $limit, $forcePartial);
[67] Fix | Delete
$result[] = array(
[68] Fix | Delete
'path' => $relativePath,
[69] Fix | Delete
'hash' => $hash,
[70] Fix | Delete
);
[71] Fix | Delete
}
[72] Fix | Delete
[73] Fix | Delete
return $this->createResult(array(
[74] Fix | Delete
'files' => $result,
[75] Fix | Delete
));
[76] Fix | Delete
}
[77] Fix | Delete
}
[78] Fix | Delete
[79] Fix | Delete
It is recommended that you Edit text format, this type of Fix handles quite a lot in one request
Function