| import subprocess |
| import select |
| import os |
| from mods.btn_reset import restart_space |
|
|
| if not os.path.exists('models/causallm_14b.Q4_0.gguf'): |
| from huggingface_hub import snapshot_download |
| os.mkdir("models") |
| os.mkdir("cache") |
| snapshot_download(repo_id='TheBloke/CausalLM-14B-GGUF', local_dir=r'models', |
| allow_patterns='causallm_14b.Q4_0.gguf') |
| snapshot_download(repo_id='Limour/llama-python-streamingllm-cache', |
| repo_type='dataset', local_dir=r'cache', |
| allow_patterns='f051fe319ada24080ea38869dba6bdfac79d5a84') |
|
|
| try: |
| |
| process = subprocess.Popen(["python", "gradio_streamingllm.py"], |
| stdout=subprocess.PIPE, |
| stderr=subprocess.PIPE, |
| bufsize=1, universal_newlines=True) |
| while process.poll() is None: |
| |
| ready_reads, _, _ = select.select([process.stdout, process.stderr], [], []) |
| if not ready_reads: |
| print('select.select timeout') |
| for ready in ready_reads: |
| |
| output = ready.readline() |
| if output: |
| print(output, end='') |
| else: |
| print('select.select timeout') |
|
|
| |
| for output in process.stdout.readlines() + process.stderr.readlines(): |
| print(output, end='') |
|
|
| |
| if process.returncode == 0: |
| print("Process has terminated successfully.") |
| else: |
| print(f"Process has terminated with an error. {process.returncode}") |
|
|
| finally: |
| |
| pass |
|
|