Merge branch 'PHP-8.2' into PHP-8.3

* PHP-8.2:
  Fix GH-13071: Copying large files using mmap-able source streams may exhaust available memory and fail
This commit is contained in:
Niels Dossche 2024-01-16 23:46:43 +01:00
commit 47454cb771
6 changed files with 80 additions and 8 deletions

4
NEWS
View file

@ -36,6 +36,10 @@ PHP NEWS
- Standard:
. Fixed bug GH-13094 (range(9.9, '0') causes segmentation fault). (nielsdos)
- Streams:
. Fixed bug GH-13071 (Copying large files using mmap-able source streams may
exhaust available memory and fail). (nielsdos)
18 Jan 2024, PHP 8.3.2
- Core:

View file

@ -0,0 +1,55 @@
--TEST--
GH-13071 (Copying large files using mmap-able source streams may exhaust available memory and fail)
--FILE--
<?php
class CustomStream {
public $context;
protected $file;
protected $seekable;
public static int $writes = 0;
public function stream_open($path, $mode, $options, &$opened_path) {
$path = $this->trim_path($path);
$this->file = fopen($path, $mode);
return true;
}
public function stream_close() {
fclose($this->file);
return true;
}
public function stream_write($data) {
self::$writes++;
return fwrite($this->file, $data);
}
public function url_stat($path, $flags) {
return false;
}
private function trim_path(string $path): string {
return substr($path, strlen("up://"));
}
}
file_put_contents(__DIR__ . "/gh13071.tmp", str_repeat("a", 1024 * 1024 * 8));
stream_wrapper_register("up", CustomStream::class, STREAM_IS_URL);
$old_limit = ini_get("memory_limit");
ini_set("memory_limit", memory_get_usage(true) + 5 * 1024 * 1024);
copy(__DIR__ . "/gh13071.tmp", "up://" . __DIR__ . "/gh13071.out.tmp");
ini_set("memory_limit", $old_limit);
echo "Done ", CustomStream::$writes, " writes\n";
?>
--CLEAN--
<?php
@unlink(__DIR__ . "/gh13071.tmp");
@unlink(__DIR__ . "/gh13071.out.tmp");
?>
--EXPECT--
Done 1024 writes

View file

@ -34,5 +34,6 @@ bool(true)
option: 3, 2, 50
int(-1)
int(8192)
size: 70
size: 42
size: 28
int(70)

View file

@ -39,4 +39,5 @@ option: %d, %d, %d
int(%i)
int(%d)
size: %d
size: 28
int(%d)

View file

@ -35,7 +35,7 @@ echo "should return previous chunk size (8192)\n";
var_dump(stream_set_chunk_size($f, 1));
echo "should be read without buffer (\$count == 10000)\n";
var_dump(strlen(fread($f, 10000)));
echo "should have no effect on writes\n";
echo "should elicit 3 writes\n";
var_dump(fwrite($f, str_repeat('b', 3)));
echo "should return previous chunk size (1)\n";
@ -46,7 +46,7 @@ echo "should elicit one read of size 100 (chunk size)\n";
var_dump(strlen(fread($f, 50)));
echo "should elicit no read because there is sufficient cached data\n";
var_dump(strlen(fread($f, 50)));
echo "should have no effect on writes\n";
echo "should elicit 3 writes\n";
var_dump(strlen(fwrite($f, str_repeat('b', 250))));
echo "\nerror conditions\n";
@ -68,8 +68,10 @@ int(8192)
should be read without buffer ($count == 10000)
read with size: 10000
int(10000)
should have no effect on writes
write with size: 3
should elicit 3 writes
write with size: 1
write with size: 1
write with size: 1
int(3)
should return previous chunk size (1)
int(1)
@ -81,8 +83,10 @@ read with size: 100
int(50)
should elicit no read because there is sufficient cached data
int(50)
should have no effect on writes
write with size: 250
should elicit 3 writes
write with size: 100
write with size: 100
write with size: 50
int(3)
error conditions

View file

@ -1153,8 +1153,15 @@ static ssize_t _php_stream_write_buffer(php_stream *stream, const char *buf, siz
bool old_eof = stream->eof;
/* See GH-13071: userspace stream is subject to the memory limit. */
size_t chunk_size = count;
if (php_stream_is(stream, PHP_STREAM_IS_USERSPACE)) {
/* If the stream is unbuffered, we can only write one byte at a time. */
chunk_size = stream->chunk_size;
}
while (count > 0) {
ssize_t justwrote = stream->ops->write(stream, buf, count);
ssize_t justwrote = stream->ops->write(stream, buf, MIN(chunk_size, count));
if (justwrote <= 0) {
/* If we already successfully wrote some bytes and a write error occurred
* later, report the successfully written bytes. */