Skip to content

Commit

Permalink
github copilot chat video (#1047)
Browse files Browse the repository at this point in the history
  • Loading branch information
pelikhan authored Jan 24, 2025
1 parent d63b9d0 commit f46c813
Show file tree
Hide file tree
Showing 12 changed files with 37 additions and 32 deletions.
2 changes: 1 addition & 1 deletion docs/.vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,6 @@
"/src/content/docs/**/*": "/src/assets/"
},
"genaiscript.languageChatModels": {
"client:gpt-4": "github.copilot-chat/2/gpt-4"
"github_copilot_chat:gpt-4": "github.copilot-chat/2/gpt-4"
}
}
8 changes: 6 additions & 2 deletions docs/src/content/docs/getting-started/configuration.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { FileTree } from "@astrojs/starlight/components"
import { Steps } from "@astrojs/starlight/components"
import { Tabs, TabItem } from "@astrojs/starlight/components"
import { Image } from "astro:assets"
import { YouTube } from "astro-embed"
import LLMProviderFeatures from "../../../components/LLMProviderFeatures.astro"

import lmSrc from "../../../assets/vscode-language-models.png"
Expand Down Expand Up @@ -273,6 +274,8 @@ script({ model: "github:gpt-4o" })
**If you are running from a [GitHub Codespace](https://github.com/features/codespaces), the token is already configured for you.**
<YouTube id="Wya3MQRIbmE" posterQuality="high" />
<Steps>
<ol>
Expand Down Expand Up @@ -594,7 +597,6 @@ The resources created by Azure AI Foundry are not visible by default in the Azur
To make them visible, open [All resources](https://portal.azure.com/#browse/all), click **Manage view**
and select **Show hidden types**.


:::

<Steps>
Expand Down Expand Up @@ -836,7 +838,9 @@ This mode is useful to run your scripts without having a separate LLM provider o
and have additional limitations and rate limiting defined by the GitHub Copilot platform.

There is no configuration needed as long as you have GitHub Copilot installed and configured in Visual Studio Code.
You can force using this model by using `client:*` as a model name.
You can force using this model by using `github_copilot_chat:*` as a model name.

<YouTube id="LRrVMiZgWJg" posterQuality="high" />

<Steps>

Expand Down
3 changes: 2 additions & 1 deletion packages/cli/src/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ import {
OPENAI_RETRY_DEFAULT_DEFAULT,
OPENAI_MAX_RETRY_COUNT,
MODEL_PROVIDERS,
MODEL_PROVIDER_GITHUB_COPILOT_CHAT,
} from "../../core/src/constants" // Core constants
import {
errorMessage,
Expand Down Expand Up @@ -509,7 +510,7 @@ export async function cli() {
"Preferred LLM provider aliases"
).choices(
MODEL_PROVIDERS.filter(
({ id, aliases }) => id !== "client"
({ id }) => id !== MODEL_PROVIDER_GITHUB_COPILOT_CHAT
).map(({ id }) => id)
)
)
Expand Down
7 changes: 4 additions & 3 deletions packages/cli/src/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
TRACE_CHUNK,
USER_CANCELLED_ERROR_CODE,
UNHANDLED_ERROR_CODE,
MODEL_PROVIDER_CLIENT,
MODEL_PROVIDER_GITHUB_COPILOT_CHAT,
} from "../../core/src/constants"
import { isCancelError, serializeError } from "../../core/src/error"
import { host, runtimeHost } from "../../core/src/host"
Expand Down Expand Up @@ -203,7 +203,7 @@ export async function startServer(options: {

// Configures the client language model with a completer function.
runtimeHost.clientLanguageModel = Object.freeze<LanguageModel>({
id: MODEL_PROVIDER_CLIENT,
id: MODEL_PROVIDER_GITHUB_COPILOT_CHAT,
completer: async (
req: CreateChatCompletionRequest,
connection: LanguageModelConfiguration,
Expand All @@ -212,7 +212,8 @@ export async function startServer(options: {
): Promise<ChatCompletionResponse> => {
const { messages, model } = req
const { partialCb, inner } = options
if (!wss.clients?.size) throw new Error("no llm clients connected")
if (!wss.clients?.size)
throw new Error("GitHub Copilot Chat Models not connected")

return new Promise<ChatCompletionResponse>((resolve, reject) => {
let responseSoFar: string = ""
Expand Down
9 changes: 6 additions & 3 deletions packages/core/src/connection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import {
MODEL_PROVIDER_ANTHROPIC,
MODEL_PROVIDER_AZURE_OPENAI,
MODEL_PROVIDER_AZURE_SERVERLESS_MODELS,
MODEL_PROVIDER_CLIENT,
MODEL_PROVIDER_GITHUB_COPILOT_CHAT,
MODEL_PROVIDER_GITHUB,
MODEL_PROVIDER_LITELLM,
MODEL_PROVIDER_LLAMAFILE,
Expand Down Expand Up @@ -538,12 +538,15 @@ export async function parseTokenFromEnv(
}
}

if (provider === MODEL_PROVIDER_CLIENT && runtimeHost.clientLanguageModel) {
if (
provider === MODEL_PROVIDER_GITHUB_COPILOT_CHAT &&
runtimeHost.clientLanguageModel
) {
return {
provider,
model,
base: undefined,
token: "client",
token: MODEL_PROVIDER_GITHUB_COPILOT_CHAT,
}
}

Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ export const MODEL_PROVIDER_OLLAMA = "ollama"
export const MODEL_PROVIDER_LLAMAFILE = "llamafile"
export const MODEL_PROVIDER_LITELLM = "litellm"
export const MODEL_PROVIDER_AICI = "aici"
export const MODEL_PROVIDER_CLIENT = "client"
export const MODEL_PROVIDER_GITHUB_COPILOT_CHAT = "github_copilot_chat"
export const MODEL_PROVIDER_ANTHROPIC = "anthropic"
export const MODEL_PROVIDER_ANTHROPIC_BEDROCK = "anthropic_bedrock"
export const MODEL_PROVIDER_HUGGINGFACE = "huggingface"
Expand Down
4 changes: 2 additions & 2 deletions packages/core/src/llms.json
Original file line number Diff line number Diff line change
Expand Up @@ -218,8 +218,8 @@
}
},
{
"id": "client",
"detail": "GitHub Copilot Chat Modes",
"id": "github_copilot_chat",
"detail": "GitHub Copilot Chat Models",
"tools": false,
"prediction": false,
"tokenless": true,
Expand Down
6 changes: 3 additions & 3 deletions packages/core/src/lm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import {
MODEL_PROVIDER_AICI,
MODEL_PROVIDER_ANTHROPIC,
MODEL_PROVIDER_ANTHROPIC_BEDROCK,
MODEL_PROVIDER_CLIENT,
MODEL_PROVIDER_GITHUB_COPILOT_CHAT,
MODEL_PROVIDER_GITHUB,
MODEL_PROVIDER_LMSTUDIO,
MODEL_PROVIDER_OLLAMA,
Expand All @@ -22,9 +22,9 @@ import { LMStudioModel } from "./lmstudio"
import { WhiserAsrModel } from "./whisperasr"

export function resolveLanguageModel(provider: string): LanguageModel {
if (provider === MODEL_PROVIDER_CLIENT) {
if (provider === MODEL_PROVIDER_GITHUB_COPILOT_CHAT) {
const m = runtimeHost.clientLanguageModel
if (!m) throw new Error("Client language model not available")
if (!m) throw new Error("Github Copilot Chat Models not available")
return m
}
if (provider === MODEL_PROVIDER_GITHUB) return GitHubModel
Expand Down
8 changes: 6 additions & 2 deletions packages/core/src/server/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,11 @@ export class VsCodeClient extends WebSocketClient {
}
> = {}

constructor(readonly url: string, readonly externalUrl: string, readonly cspUrl: string) {
constructor(
readonly url: string,
readonly externalUrl: string,
readonly cspUrl: string
) {
super(url)
this.configure()
}
Expand Down Expand Up @@ -118,7 +122,7 @@ export class VsCodeClient extends WebSocketClient {
case "chat.start": {
if (!this.chatRequest)
throw new Error(
"client language model not supported"
"GitHub Copilot Chat Models not supported"
)
await this.chatRequest(cev, (chunk) => {
this.queue<ChatChunk>({
Expand Down
15 changes: 3 additions & 12 deletions packages/sample/.vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,14 +1,5 @@
{
"cSpell.words": [
"frontmatter",
"genaiscript",
"openai",
"outputfilename"
],
"cSpell.words": ["frontmatter", "genaiscript", "openai", "outputfilename"],
"genaiscript.cli.path": "../cli/built/genaiscript.cjs",
"genaiscript.languageChatModels": {
"client:gpt-4": "gpt-4",
"client:gpt-4o": "gpt-4o",
"client:*": "github.copilot-chat/2/gpt-4"
}
}
"genaiscript.languageChatModels": {}
}
4 changes: 2 additions & 2 deletions packages/sample/genaisrc/poem-copilot-chat.genai.mts
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
script({ model: "client:*", system: []})
$`Write a fibonacci in python.`
script({ model: "github_copilot_chat:*", system: [] })
$`Write a fibonacci in python.`
1 change: 1 addition & 0 deletions packages/sample/src/video/.gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
*.local.mp4
*.local.mkv

0 comments on commit f46c813

Please sign in to comment.