PHP code example of cmatosbc / penelope

1. Go to this page and download the library: Download cmatosbc/penelope library. Choose the download type require.

2. Extract the ZIP file and open the index.php.

3. Add this code to the index.php.
    
        
<?php
require_once('vendor/autoload.php');

/* Start to develop here. Best regards https://php-download.com/ */

    

cmatosbc / penelope example snippets


use Penelope\AsyncFileHandler;

// Create a handler instance
$handler = new AsyncFileHandler('large_file.txt', 'r');

// Synchronous read
$content = $handler->readSync();

// Asynchronous read
$fiber = $handler->readAsync();
$content = '';

$chunk = $fiber->start();
if ($chunk !== null) {
    $content .= $chunk;
}

while ($fiber->isSuspended()) {
    $chunk = $fiber->resume();
    if ($chunk !== null) {
        $content .= $chunk;
    }
}

use Penelope\Compression\CompressionHandler;

// Create a compression handler (gzip, bzip2, or deflate)
$compression = new CompressionHandler('gzip', 6); // level 6 compression

// Compress data
$compressed = $compression->compress($data);

// Decompress data
$decompressed = $compression->decompress($compressed);

// Get file extension for compressed files
$extension = $compression->getFileExtension(); // Returns .gz for gzip

use Penelope\Error\ErrorHandler;
use Penelope\Error\RetryPolicy;
use Psr\Log\LoggerInterface;

// Create a retry policy with custom settings
$retryPolicy = new RetryPolicy(
    maxAttempts: 3,        // Maximum number of retry attempts
    delayMs: 100,          // Initial delay between retries in milliseconds
    backoffMultiplier: 2.0, // Multiplier for exponential backoff
    maxDelayMs: 5000       // Maximum delay between retries
);

// Create an error handler with custom logger (optional)
$errorHandler = new ErrorHandler($logger, $retryPolicy);

// Execute an operation with retry logic
try {
    $result = $errorHandler->executeWithRetry(
        function() {
            // Your operation here
            return $someResult;
        },
        'Reading file chunk'
    );
} catch (\RuntimeException $e) {
    // Handle final failure after all retries
}

use Penelope\AsyncFileHandler;
use Penelope\Compression\CompressionHandler;
use Penelope\Error\ErrorHandler;
use Penelope\Error\RetryPolicy;

// Set up handlers
$compression = new CompressionHandler('gzip');
$retryPolicy = new RetryPolicy(maxAttempts: 3);
$errorHandler = new ErrorHandler(null, $retryPolicy);
$fileHandler = new AsyncFileHandler('large_file.txt', 'r');

// Read and compress file with retry logic
$errorHandler->executeWithRetry(
    function() use ($fileHandler, $compression) {
        $fiber = $fileHandler->readAsync();
        $compressedContent = '';
        
        // Start reading
        $chunk = $fiber->start();
        if ($chunk !== null) {
            $compressedContent .= $compression->compress($chunk);
        }
        
        // Continue reading
        while ($fiber->isSuspended()) {
            $chunk = $fiber->resume();
            if ($chunk !== null) {
                $compressedContent .= $compression->compress($chunk);
            }
        }
        
        // Write compressed content
        file_put_contents('output.gz', $compressedContent);
    },
    'Compressing file'
);

$handler = new AsyncFileHandler('large_log.txt', 'r');
$fiber = $handler->readAsync();

// Process line by line without loading entire file
while ($chunk = $fiber->resume()) {
    // Process chunk
    analyzeLogData($chunk);
}