File tree Expand file tree Collapse file tree 8 files changed +11
-8
lines changed
Expand file tree Collapse file tree 8 files changed +11
-8
lines changed Original file line number Diff line number Diff line change @@ -3,7 +3,7 @@ import { isUnexpected } from "@azure-rest/ai-inference";
33import { AzureKeyCredential } from "@azure/core-auth" ;
44
55const token = process . env [ "GITHUB_TOKEN" ] ;
6- const endpoint = "https://models.github.ai" ;
6+ const endpoint = "https://models.github.ai/inference " ;
77
88/* By using the Azure AI Inference SDK, you can easily experiment with different models
99 by modifying the value of `modelName` in the code below. For this code sample
Original file line number Diff line number Diff line change 11import OpenAI from "openai" ;
22
33const token = process . env [ "GITHUB_TOKEN" ] ;
4- const endpoint = "https://models.github.ai" ;
4+ const endpoint = "https://models.github.ai/inference " ;
55
66/* Pick one of the OpenAI embeddings models from the GitHub Models service */
77const modelName = "text-embedding-3-small" ;
Original file line number Diff line number Diff line change 44from azure .core .credentials import AzureKeyCredential
55
66token = os .environ ["GITHUB_TOKEN" ]
7- endpoint = "https://models.github.ai"
7+ endpoint = "https://models.github.ai/inference "
88
99# By using the Azure AI Inference SDK, you can easily experiment with different models
1010# by modifying the value of `modelName` in the code below. For this code sample
Original file line number Diff line number Diff line change 5858 " raise ValueError(\" GITHUB_TOKEN is not set\" )\n " ,
5959 " \n " ,
6060 " github_token = os.environ[\" GITHUB_TOKEN\" ]\n " ,
61- " endpoint = \" https://models.github.ai\"\n " ,
61+ " endpoint = \" https://models.github.ai/inference \"\n " ,
6262 " \n " ,
6363 " \n " ,
6464 " # Create a client\n " ,
Original file line number Diff line number Diff line change 5858 " raise ValueError(\" GITHUB_TOKEN is not set\" )\n " ,
5959 " \n " ,
6060 " github_token = os.environ[\" GITHUB_TOKEN\" ]\n " ,
61- " endpoint = \" https://models.github.ai\"\n " ,
61+ " endpoint = \" https://models.github.ai/inference \"\n " ,
6262 " \n " ,
6363 " # Pick one of the Mistral models from the GitHub Models service\n " ,
6464 " model_name = \" Mistral-large\"\n " ,
Original file line number Diff line number Diff line change 22from openai import OpenAI
33
44token = os .environ ["GITHUB_TOKEN" ]
5- endpoint = "https://models.github.ai"
5+ endpoint = "https://models.github.ai/inference "
66
77# Pick one of the OpenAI embeddings models from the GitHub Models service
88model_name = "text-embedding-3-small"
Original file line number Diff line number Diff line change 5555 " raise ValueError(\" GITHUB_TOKEN is not set\" )\n " ,
5656 " \n " ,
5757 " os.environ[\" OPENAI_API_KEY\" ] = os.getenv(\" GITHUB_TOKEN\" )\n " ,
58- " os.environ[\" OPENAI_BASE_URL\" ] = \" https://models.github.ai/\"\n " ,
58+ " os.environ[\" OPENAI_BASE_URL\" ] = \" https://models.github.ai/inference \"\n " ,
5959 " \n " ,
6060 " client = OpenAI()\n "
6161 ]
Original file line number Diff line number Diff line change 1616client = OpenAI (
1717 base_url = endpoint ,
1818 api_key = token ,
19+ default_headers = {
20+ "x-ms-useragent" : "github-models-sample" ,
21+ }
1922)
2023
2124# Call the chat completion API
4245)
4346
4447# Print the response
45- print (response .choices [0 ].message .content )
48+ print (response .choices [0 ].message .content )
You can’t perform that action at this time.
0 commit comments