Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release v2.2.7+0.1.8+8.19 #39

Merged
merged 2 commits into from
Sep 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# Changelog

## 2.2.7

### Internal changes:
- Allow prod/stgn auth-type for JetBrains AI service.

## 2.2.6

### Public changes
Expand Down
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 7 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
{
"name": "coqpilot",
"displayName": "СoqPilot",
"description": "An ai based completion extension for Coq interactive prover.",
"description": "An AI-based completion extension for Coq interactive prover.",
"icon": "etc/img/logo.ico",
"repository": {
"type": "git",
"url": "https://github.com/JetBrains-Research/coqpilot"
},
"publisher": "JetBrains-Research",
"version": "2.2.6",
"version": "2.2.7",
"engines": {
"vscode": "^1.82.0"
},
Expand Down Expand Up @@ -214,6 +214,11 @@
"description": "Api key to communicate with the Grazie api. Now available for JetBrains employees only.",
"default": "None"
},
"authType": {
"type": "string",
"description": "Use stgn if you are an internal JetBrains AI user and use prod otherwise.",
"default": "stgn"
},
"choices": {
"type": "number",
"description": "Number of attempts to generate proof for one hole with this model.",
Expand Down
9 changes: 9 additions & 0 deletions src/extension/coqPilot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,15 @@ export class CoqPilot {
"completion-started",
"CoqPilot has started the completion process"
);

if (editor.document.isDirty) {
showMessageToUser(
EditorMessages.saveFileBeforeCompletion,
"warning"
);
return;
}

const [completionContexts, sourceFileEnvironment, processEnvironment] =
await this.prepareForCompletions(
shouldCompleteHole,
Expand Down
3 changes: 3 additions & 0 deletions src/extension/editorMessages.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ export namespace EditorMessages {
export const noProofsForAdmit = (lineWithAdmitNumber: number) =>
`CoqPilot failed to find a proof for the admit at line ${lineWithAdmitNumber}.`;

export const saveFileBeforeCompletion =
"Please note that this file has not been saved after the updates. Currently CoqPilot requires the file to be saved before the completion.";

export const noAdmitsFound =
"No admits were found in this selection/file. Make sure your coq-lsp is running correctly.";

Expand Down
24 changes: 19 additions & 5 deletions src/llm/llmServices/grazie/grazieApi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ export type GrazieChatRole = "User" | "System" | "Assistant";
export type GrazieFormattedHistory = { role: GrazieChatRole; text: string }[];

interface GrazieConfig {
gateawayUrl: string;
gateawayUrlStgn: string;
gateawayUrlProd: string;
chatUrl: string;
quotaUrl: string;
}
Expand All @@ -17,8 +18,10 @@ export class GrazieApi {
private readonly config: GrazieConfig = {
chatUrl: "v5/llm/chat/stream/v3",
quotaUrl: "v5/quota/get",
gateawayUrl:
gateawayUrlStgn:
"https://api.app.stgn.grazie.aws.intellij.net/application/",
gateawayUrlProd:
"https://api.app.prod.grazie.aws.intellij.net/application/",
};

constructor(private readonly debug: DebugWrappers) {}
Expand All @@ -28,7 +31,12 @@ export class GrazieApi {
history: GrazieFormattedHistory
): Promise<string> {
const body = this.createRequestBody(history, params);
return this.post(this.config.chatUrl, body, params.apiKey);
return this.post(
this.config.chatUrl,
body,
params.apiKey,
params.authType
);
}

async checkQuota(apiToken: string): Promise<any> {
Expand All @@ -55,7 +63,8 @@ export class GrazieApi {
private async post(
url: string,
body: string,
apiToken: string
apiToken: string,
authType: "stgn" | "prod"
): Promise<string> {
const headers = await this.createHeaders(apiToken);

Expand All @@ -65,8 +74,13 @@ export class GrazieApi {
headers: headers,
});

const baseUrl =
authType === "stgn"
? this.config.gateawayUrlStgn
: this.config.gateawayUrlProd;

const response = await this.fetchAndProcessEvents(
this.config.gateawayUrl + url,
baseUrl + url,
body,
headers
);
Expand Down
7 changes: 7 additions & 0 deletions src/llm/llmServices/grazie/grazieModelParamsResolver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,13 @@ export class GrazieModelParamsResolver
.requiredToBeConfigured()
.validateAtRuntimeOnly();

readonly authType = this.resolveParam<"stgn" | "prod">("authType")
.requiredToBeConfigured()
.validate([
(value) => value === "stgn" || value === "prod",
"be either 'stgn' or 'prod'",
]);

readonly maxTokensToGenerate = this.resolveParam<number>(
"maxTokensToGenerate"
)
Expand Down
12 changes: 11 additions & 1 deletion src/llm/llmServices/modelParams.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ export interface OpenAiModelParams extends ModelParams {
export interface GrazieModelParams extends ModelParams {
modelName: string;
apiKey: string;
authType: "stgn" | "prod";
}

export interface LMStudioModelParams extends ModelParams {
Expand Down Expand Up @@ -134,9 +135,18 @@ export const grazieModelParamsSchema: JSONSchemaType<GrazieModelParams> = {
properties: {
modelName: { type: "string" },
apiKey: { type: "string" },
authType: {
type: "string",
enum: ["stgn", "prod"],
},
...(modelParamsSchema.properties as PropertiesSchema<ModelParams>),
},
required: ["modelName", "apiKey", ...modelParamsSchema.required],
required: [
"modelName",
"apiKey",
"authType",
...modelParamsSchema.required,
],
additionalProperties: false,
};

Expand Down
4 changes: 3 additions & 1 deletion src/llm/userModelParams.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ export interface OpenAiUserModelParams extends UserModelParams {
export interface GrazieUserModelParams extends UserModelParams {
modelName: string;
apiKey: string;
authType: string;
}

export interface LMStudioUserModelParams extends UserModelParams {
Expand Down Expand Up @@ -135,9 +136,10 @@ export const grazieUserModelParamsSchema: JSONSchemaType<GrazieUserModelParams>
properties: {
modelName: { type: "string" },
apiKey: { type: "string" },
authType: { type: "string" },
...(userModelParamsSchema.properties as PropertiesSchema<UserModelParams>),
},
required: ["modelId", "modelName", "apiKey"],
required: ["modelId", "modelName", "apiKey", "authType"],
additionalProperties: false,
};

Expand Down
1 change: 1 addition & 0 deletions src/test/benchmark/report.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{}
9 changes: 7 additions & 2 deletions src/test/llm/llmServices/grazieService.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import {

suite("[LLMService] Test `GrazieService`", function () {
const apiKey = process.env.GRAZIE_API_KEY;
const authType = process.env.GRAZIE_AUTH_TYPE;
const choices = 15;
const inputFile = ["small_document.v"];

Expand All @@ -37,14 +38,15 @@ suite("[LLMService] Test `GrazieService`", function () {
};

testIf(
apiKey !== undefined,
"`GRAZIE_API_KEY` is not specified",
apiKey !== undefined && authType !== undefined,
"`GRAZIE_API_KEY` or `GRAZIE_AUTH_TYPE` is not specified",
this.title,
`Simple generation: 1 request, ${choices} choices`,
async () => {
const inputParams: GrazieUserModelParams = {
...requiredInputParamsTemplate,
apiKey: apiKey!,
authType: authType!,
};
const grazieService = new GrazieService();
await testLLMServiceCompletesAdmitFromFile(
Expand All @@ -60,6 +62,7 @@ suite("[LLMService] Test `GrazieService`", function () {
const inputParams: GrazieUserModelParams = {
...requiredInputParamsTemplate,
apiKey: "undefined",
authType: "stgn",
};
await withLLMService(new GrazieService(), async (grazieService) => {
testResolveValidCompleteParameters(grazieService, inputParams);
Expand All @@ -79,6 +82,7 @@ suite("[LLMService] Test `GrazieService`", function () {
const inputParams: GrazieUserModelParams = {
...requiredInputParamsTemplate,
apiKey: "undefined",
authType: "stgn",
maxTokensToGenerate: 6666, // should be overriden by GrazieService
};
withLLMService(new GrazieService(), async (grazieService) => {
Expand All @@ -96,6 +100,7 @@ suite("[LLMService] Test `GrazieService`", function () {
const inputParams: GrazieUserModelParams = {
...requiredInputParamsTemplate,
apiKey: "undefined",
authType: "stgn",
};
await withLLMServiceAndParams(
new GrazieService(),
Expand Down
1 change: 1 addition & 0 deletions src/test/llm/parseUserModelParams.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ suite("Parse `UserModelParams` from JSON test", () => {
...validUserModelParamsCompelete,
modelName: "gpt-model",
apiKey: "api-key",
authType: "stgn",
};
const validLMStudioUserModelParamsComplete = {
...validUserModelParamsCompelete,
Expand Down