From b069d063466d09e94d71965f08abecc0a90ee32f Mon Sep 17 00:00:00 2001 From: kddubey Date: Wed, 29 Nov 2023 02:39:52 -0800 Subject: [PATCH] Fix #891 (#952) --- llama_cpp/llama.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index 3cb07e5..8076548 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -214,6 +214,8 @@ class _LlamaModel: NOTE: For stability it's recommended you use the Llama class instead.""" _llama_free_model = None + # NOTE: this must be "saved" here to avoid exceptions when calling __del__ + suppress_stdout_stderr = suppress_stdout_stderr def __init__( self, @@ -237,7 +239,7 @@ class _LlamaModel: ) def __del__(self): - with suppress_stdout_stderr(disable=self.verbose): + with self.suppress_stdout_stderr(disable=self.verbose): if self.model is not None and self._llama_free_model is not None: self._llama_free_model(self.model) self.model = None @@ -399,6 +401,8 @@ class _LlamaContext: NOTE: For stability it's recommended you use the Llama class instead.""" _llama_free = None + # NOTE: this must be "saved" here to avoid exceptions when calling __del__ + suppress_stdout_stderr = suppress_stdout_stderr def __init__( self, @@ -419,7 +423,7 @@ class _LlamaContext: ) def __del__(self): - with suppress_stdout_stderr(disable=self.verbose): + with self.suppress_stdout_stderr(disable=self.verbose): if self.ctx is not None and self._llama_free is not None: self._llama_free(self.ctx) self.ctx = None @@ -650,6 +654,8 @@ class _LlamaContext: class _LlamaBatch: _llama_batch_free = None + # NOTE: this must be "saved" here to avoid exceptions when calling __del__ + suppress_stdout_stderr = suppress_stdout_stderr def __init__( self, *, n_tokens: int, embd: int, n_seq_max: int, verbose: bool = True @@ -667,7 +673,7 @@ class _LlamaBatch: ) def __del__(self): - with suppress_stdout_stderr(disable=self.verbose): + with self.suppress_stdout_stderr(disable=self.verbose): if self.batch is not None and self._llama_batch_free is not None: self._llama_batch_free(self.batch) self.batch = None