Skip to content

Commit ae28b9e

Browse files
committed
fix the embedding models
1 parent b488cfc commit ae28b9e

File tree

8 files changed

+11
-8
lines changed

8 files changed

+11
-8
lines changed

samples/js/azure_ai_inference/embeddings.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import { isUnexpected } from "@azure-rest/ai-inference";
33
import { AzureKeyCredential } from "@azure/core-auth";
44

55
const token = process.env["GITHUB_TOKEN"];
6-
const endpoint = "https://models.github.ai";
6+
const endpoint = "https://models.github.ai/inference";
77

88
/* By using the Azure AI Inference SDK, you can easily experiment with different models
99
by modifying the value of `modelName` in the code below. For this code sample

samples/js/openai/embeddings.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import OpenAI from "openai";
22

33
const token = process.env["GITHUB_TOKEN"];
4-
const endpoint = "https://models.github.ai";
4+
const endpoint = "https://models.github.ai/inference";
55

66
/* Pick one of the OpenAI embeddings models from the GitHub Models service */
77
const modelName = "text-embedding-3-small";

samples/python/azure_ai_inference/embeddings.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from azure.core.credentials import AzureKeyCredential
55

66
token = os.environ["GITHUB_TOKEN"]
7-
endpoint = "https://models.github.ai"
7+
endpoint = "https://models.github.ai/inference"
88

99
# By using the Azure AI Inference SDK, you can easily experiment with different models
1010
# by modifying the value of `modelName` in the code below. For this code sample

samples/python/azure_ai_inference/getting_started.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@
5858
" raise ValueError(\"GITHUB_TOKEN is not set\")\n",
5959
"\n",
6060
"github_token = os.environ[\"GITHUB_TOKEN\"]\n",
61-
"endpoint = \"https://models.github.ai\"\n",
61+
"endpoint = \"https://models.github.ai/inference\"\n",
6262
"\n",
6363
"\n",
6464
"# Create a client\n",

samples/python/mistralai/getting_started.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@
5858
" raise ValueError(\"GITHUB_TOKEN is not set\")\n",
5959
"\n",
6060
"github_token = os.environ[\"GITHUB_TOKEN\"]\n",
61-
"endpoint = \"https://models.github.ai\"\n",
61+
"endpoint = \"https://models.github.ai/inference\"\n",
6262
"\n",
6363
"# Pick one of the Mistral models from the GitHub Models service\n",
6464
"model_name = \"Mistral-large\"\n",

samples/python/openai/embeddings.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from openai import OpenAI
33

44
token = os.environ["GITHUB_TOKEN"]
5-
endpoint = "https://models.github.ai"
5+
endpoint = "https://models.github.ai/inference"
66

77
# Pick one of the OpenAI embeddings models from the GitHub Models service
88
model_name = "text-embedding-3-small"

samples/python/openai/embeddings_getting_started.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@
5555
" raise ValueError(\"GITHUB_TOKEN is not set\")\n",
5656
"\n",
5757
"os.environ[\"OPENAI_API_KEY\"] = os.getenv(\"GITHUB_TOKEN\")\n",
58-
"os.environ[\"OPENAI_BASE_URL\"] = \"https://models.github.ai/\"\n",
58+
"os.environ[\"OPENAI_BASE_URL\"] = \"https://models.github.ai/inference\"\n",
5959
"\n",
6060
"client = OpenAI()\n"
6161
]

samples/python/openai/multi_turn.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,9 @@
1616
client = OpenAI(
1717
base_url=endpoint,
1818
api_key=token,
19+
default_headers={
20+
"x-ms-useragent": "github-models-sample",
21+
}
1922
)
2023

2124
# Call the chat completion API
@@ -42,4 +45,4 @@
4245
)
4346

4447
# Print the response
45-
print(response.choices[0].message.content)
48+
print(response.choices[0].message.content)

0 commit comments

Comments
 (0)