Repated bulk csv import overloading server resources

Silverstripe Version: 4.0

Question: How can I best diagnose a performance issue with repeated bulk imports?

I have a very data centric project which needs to import thousands of records. I’ve set up a batch process which takes 500 rows at a time and generates a file which is then fed into the batch import system using a custom importer. Somewhere along this chain there may be a memory leak or something which is slowly overloading the servers capacity.
What I’ve seen is the sever is maxing out the CPU and the time it takes for each batch to process is getting longer and longer. A restart seems to reset everything.

My importer looks something like this;

<?php
use SilverStripe\Dev\CsvBulkLoader;
class RecordLoader extends CsvBulkLoader {
    public $columnMap = [
        'NAME' => '->recordFound',
        'FORENAMES' => '->recordFound',
        'RECID' => 'RecID',
    ];
    public $duplicateChecks = [
        'RecID' => ['callback' => 'getDuplicate']
    ];
    public function recordFound(&$obj, $val, $record){
        $obj->FieldData = json_encode($record);
    }
    public function getDuplicate($recID, $record){
        return Record::get()->where(["RecID" => $recID, "RecordTypeID" =>$this->recordTypeID])->first();
    }
}

My processing queue generates the csv file content with this code;

    private function getNextFile($path){
        $file = new SplFileObject($path);
        $newFileContent = '';
        //read first row for headders
        $newFileContent .= $file->current(); 
        $file->next();
        //offset to current
        if($this->LinesProcessed > 0){
            $file->seek($this->LinesProcessed);
        }
        //read up to Row limit or EOF
        for($i = 1; !$file->eof() && $i < QueuedCSV::$row_limit; $i++) {
            $newFileContent .= $file->current(); 
            $file->next();
        }
        //save to temp file and return
        $output = tmpfile();
        fwrite($output, $newFileContent);
        rewind($output);
        return $output;
    }

and then processes like so;

    private function getFHPath($FH){
        $metaData = stream_get_meta_data($FH);
        return $metaData['uri'];
    }
    private function processFile($file) {
        $path = $this->getFHPath($file);
        echo("Proccessing file: $path</br>\n");
        $loader = $this->checkLoader();
        $results = $loader->load($path);
        return true;
    }
    public function checkLoader(){
        if(method_exists($this,'getRecordLoader')){
            return $this->getRecordLoader();
        }
        return new $this->BulkLoader($this->Type);
    }

Is there some cleanup I’m missing on my side?