chore: update experimental runtime to pytorch/pytorch:2.5.1-cuda12.4-cudnn9-runtime
diff --git a/.gitignore b/.gitignore
index ccd40df..064a9fa 100644
--- a/.gitignore
+++ b/.gitignore
@@ -35,6 +35,7 @@
 
 # Go binary proxy
 proxy
+proxy-experimental
 actionloop/proxy
 golang1.19/proxy
 golang1.20/proxy
@@ -91,6 +92,7 @@
 .env
 .env.nuv
 .env.src
+.param.json
 
 openserverless-runtimes
 
diff --git a/packages/python/mistral.py b/packages/python/mistral.py
index 0d45a61..592185b 100644
--- a/packages/python/mistral.py
+++ b/packages/python/mistral.py
@@ -19,43 +19,48 @@
 #--kind python:default
 
 from subprocess import run
-import os
+import os, io
 
-def login(args, status):
+def login(args, status: io.TextIOWrapper):
     from huggingface_hub import login, whoami
     try:
         whoami()
-        status.write("already logged in")
+        status.write("already logged in\n")
         return True
     except:
        try:
           login(token=args.get("hf_token", ""))
-          status.write("logged in")
+          status.write("logged in\n")
           return True
        except:
-          status.write("cannot log in - did you provide a correct hf_token?")
+          status.write("cannot log in - did you provide a correct hf_token?\n")
           return False
 
-def setup(args, status):
-    status.write("installing huggingface_hub")
-    run(["pip", "install", "huggingface_hub"])
-    status.write("installing accelerate")  
-    run(["pip", "install", "accelerate"])
-    status.write("installing protobuf")  
-    run(["pip", "install", "protobuf"])
-    status.write("installing sentencepiece")
-    run(["pip", "install", "sentencepiece"])
-    status.write("installing mistral_inference")
-    run(["pip", "install", "mistral_inference"])
+def setup(args, status: io.TextIOWrapper):
+    #status.write("installing huggingface_hub")
+    #run(["pip", "install", "huggingface_hub"])
+    #status.write("installing accelerate")  
+    #run(["pip", "install", "accelerate"])
+    #status.write("installing protobuf")  
+    #run(["pip", "install", "protobuf"])
+    #status.write("installing sentencepiece")
+    #run(["pip", "install", "sentencepiece"])
+    #status.write("installing mistral_inference")
+    #run(["pip", "install", "mistral_inference"])
     if login(args, status):
-        status.write("downloading mistral model - it is 14GB be patient!")
+        status.write("downloading mistral model - it is 14GB be patient!\n")
         from transformers import pipeline
         pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.3")
+        status.write("mistral model - downloaded\n")
 
 def main(args):
+    
+    print(repr(args))
+
     if "setup_status" in args:
-        res = "\n".join(args['setup_status'])
-        return { "body": res }
+        res = str(args['setup_status']).split("\n")
+        print(repr(res))
+        return { "body": "setup in progress", "statusCode": 202 }
     
     from huggingface_hub import  whoami 
     return {
diff --git a/proxy-experimental b/proxy-experimental
deleted file mode 100755
index e8d3bc9..0000000
--- a/proxy-experimental
+++ /dev/null
Binary files differ
diff --git a/runtime/experimental/python/v3.11cuda/Dockerfile b/runtime/experimental/python/v3.11cuda/Dockerfile
index 8bc1ea5..2886aee 100644
--- a/runtime/experimental/python/v3.11cuda/Dockerfile
+++ b/runtime/experimental/python/v3.11cuda/Dockerfile
@@ -19,7 +19,7 @@
 ARG COMMON=missing:missing
 FROM ${COMMON} AS builder
 
-FROM pytorch/pytorch:2.4.0-cuda12.1-cudnn9-runtime
+FROM pytorch/pytorch:2.5.1-cuda12.4-cudnn9-runtime
 ENV OW_EXECUTION_ENV=apacheopenserverless/runtime-python-v3.11
 COPY --from=builder /go/bin/proxy /bin/proxy
 
diff --git a/runtime/experimental/python/v3.11cuda/requirements.txt b/runtime/experimental/python/v3.11cuda/requirements.txt
index e55f5aa..6e9a964 100644
--- a/runtime/experimental/python/v3.11cuda/requirements.txt
+++ b/runtime/experimental/python/v3.11cuda/requirements.txt
@@ -15,3 +15,9 @@
 pymongo==4.4.1
 minio==7.1.16
 auth0-python==4.6.0
+huggingface_hub==0.26.2
+accelerate==1.1.1
+protobuf==5.28.3
+sentencepiece==0.2.0
+mistral_inference==1.5.0
+transformers==4.46.3