1. Go to this page and download the library: Download sqrtspace/spacetime library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
sqrtspace / spacetime example snippets
use SqrtSpace\SpaceTime\Collections\SpaceTimeArray;
use SqrtSpace\SpaceTime\Algorithms\ExternalSort;
// Handle large arrays with automatic memory management
$array = new SpaceTimeArray();
for ($i = 0; $i < 10000000; $i++) {
$array[] = random_int(1, 1000000);
}
// Sort large datasets using only √n memory
$sorted = ExternalSort::sort($array);
// Process in optimal chunks
foreach ($array->chunkBySqrtN() as $chunk) {
processChunk($chunk);
}
use SqrtSpace\SpaceTime\Collections\SpaceTimeArray;
use SqrtSpace\SpaceTime\Collections\AdaptiveDictionary;
// Adaptive array - automatically switches between memory and disk
$array = new SpaceTimeArray();
$array->setThreshold(10000); // Switch to external storage after 10k items
// Adaptive dictionary with optimal memory usage
$dict = new AdaptiveDictionary();
for ($i = 0; $i < 1000000; $i++) {
$dict["key_$i"] = "value_$i";
}
use SqrtSpace\SpaceTime\Algorithms\ExternalSort;
use SqrtSpace\SpaceTime\Algorithms\ExternalGroupBy;
// Sort millions of records using minimal memory
$data = getData(); // Large dataset
$sorted = ExternalSort::sort($data, fn($a, $b) => $a['date'] <=> $b['date']);
// Group by with external storage
$grouped = ExternalGroupBy::groupBy($data, fn($item) => $item['category']);
use SqrtSpace\SpaceTime\Streams\SpaceTimeStream;
// Process large files with bounded memory
$stream = SpaceTimeStream::fromFile('large_file.csv')
->map(fn($line) => str_getcsv($line))
->filter(fn($row) => $row[2] > 100)
->chunkBySqrtN()
->each(function($chunk) {
processBatch($chunk);
});
use SqrtSpace\SpaceTime\Database\SpaceTimeQueryBuilder;
// Process large result sets efficiently
$query = new SpaceTimeQueryBuilder($pdo);
$query->from('orders')
->where('status', '=', 'pending')
->orderByExternal('created_at', 'desc')
->chunkBySqrtN(function($orders) {
foreach ($orders as $order) {
processOrder($order);
}
});
// Stream results for minimal memory usage
$stream = $query->from('logs')
->where('level', '=', 'error')
->stream();
$stream->filter(fn($log) => strpos($log['message'], 'critical') !== false)
->each(fn($log) => alertAdmin($log));
// In AppServiceProvider
use SqrtSpace\SpaceTime\Laravel\SpaceTimeServiceProvider;
public function register()
{
$this->app->register(SpaceTimeServiceProvider::class);
}
// Collection macros
$collection = collect($largeArray);
// Sort using external memory
$sorted = $collection->sortByExternal('price');
// Group by with external storage
$grouped = $collection->groupByExternal('category');
// Process in √n chunks
$collection->chunkBySqrtN()->each(function ($chunk) {
processBatch($chunk);
});
// Query builder extensions
DB::table('orders')
->chunkBySqrtN(function ($orders) {
foreach ($orders as $order) {
processOrder($order);
}
});
use SqrtSpace\SpaceTime\Memory\MemoryPressureMonitor;
use SqrtSpace\SpaceTime\Memory\Handlers\LoggingHandler;
use SqrtSpace\SpaceTime\Memory\Handlers\CacheEvictionHandler;
use SqrtSpace\SpaceTime\Memory\Handlers\GarbageCollectionHandler;
$monitor = new MemoryPressureMonitor('512M');
// Add handlers
$monitor->registerHandler(new LoggingHandler($logger));
$monitor->registerHandler(new CacheEvictionHandler());
$monitor->registerHandler(new GarbageCollectionHandler());
// Check pressure in your operations
if ($monitor->check() === MemoryPressureLevel::HIGH) {
// Switch to more aggressive memory saving
$processor->useExternalStorage();
}
use SqrtSpace\SpaceTime\Checkpoint\CheckpointManager;
$checkpoint = new CheckpointManager('import_job_123');
foreach ($largeDataset->chunkBySqrtN() as $chunk) {
processChunk($chunk);
// Save progress every √n items
if ($checkpoint->shouldCheckpoint()) {
$checkpoint->save([
'processed' => $processedCount,
'last_id' => $lastId
]);
}
}
use SqrtSpace\SpaceTime\File\CsvReader;
use SqrtSpace\SpaceTime\Algorithms\ExternalGroupBy;
$reader = new CsvReader('sales_data.csv');
// Get column statistics
$stats = $reader->getColumnStats('amount');
echo "Average order: $" . $stats['avg'];
// Process with type conversion
$totals = $reader->readWithTypes([
'amount' => 'float',
'quantity' => 'int',
'date' => 'date'
])->reduce(function ($totals, $row) {
$month = $row['date']->format('Y-m');
$totals[$month] = ($totals[$month] ?? 0) + $row['amount'];
return $totals;
}, []);
use SqrtSpace\SpaceTime\File\CsvExporter;
use SqrtSpace\SpaceTime\Database\SpaceTimeQueryBuilder;
$exporter = new CsvExporter('users_export.csv');
$query = new SpaceTimeQueryBuilder($pdo);
// Export with headers
$exporter->writeHeaders(['ID', 'Name', 'Email', 'Created At']);
// Stream data directly to CSV
$query->from('users')
->orderBy('created_at', 'desc')
->chunkBySqrtN(function($users) use ($exporter) {
$exporter->writeRows(array_map(function($user) {
return [
$user['id'],
$user['name'],
$user['email'],
$user['created_at']
];
}, $users));
});
echo "Exported " . number_format($exporter->getBytesWritten()) . " bytes\n";