Error connecting to lm studio

I have llm studio installed locally and I’m trying to use the openai to connect to it and I’m getting connection error , here is my code , what is the problem

import openai
import re
import getpass

openai.base_url = “http://127.0.0.1:1234
openai.api_key = “”

def complete(prompt: str) → str:
try:

    response = openai.chat.completions.create(
        model="deepseek-r1-distill-qwen-7b",  # Ensure this matches your local model's name
        messages=[{"role": "user", "content": prompt}],
        temperature=0.7,  # Adjust as needed
        max_tokens=100  # Limit response length
    )
    return response.choices[0].message.content
except Exception as e:
    print(f"Error: {e}")
    return ""

def main():
user_input=“is it working”
result = complete(user_input)
print(result)

if name == “main”:
main()

1 Like

I don’t know much about LM Studio, but I found a case study of a similar error.

Try it by using OpenAI client:

from openai import OpenAI

client = OpenAI(
        base_url="http://localhost:1234/v1",
        api_key="if necessary "
        )

message = [{"role": "user", "content": "Hello"}]
response = client.chat.completions.create(
        temperature=1,
        messages=message,
        model="qwen/qwen3-4b-2507",
        seed=42
        )
print(response.choices[0].message.content)
1 Like