Skip to content

Commit

Permalink
Revert "fix(auto-edit): fix temperature value to be low for output co…
Browse files Browse the repository at this point in the history
…nsistency" (#6900)

Reverts #6853

## Test plan
CI
  • Loading branch information
hitesh-1997 authored Jan 31, 2025
1 parent 53a8969 commit 76c2921
Show file tree
Hide file tree
Showing 8 changed files with 10 additions and 10 deletions.
4 changes: 2 additions & 2 deletions vscode/src/autoedits/adapters/cody-gateway.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ describe('CodyGatewayAdapter', () => {
expect.objectContaining({
stream: false,
model: options.model,
temperature: 0.001,
temperature: 0.1,
response_format: { type: 'text' },
prediction: {
type: 'content',
Expand Down Expand Up @@ -100,7 +100,7 @@ describe('CodyGatewayAdapter', () => {
expect.objectContaining({
stream: false,
model: options.model,
temperature: 0.001,
temperature: 0.1,
response_format: { type: 'text' },
prediction: {
type: 'content',
Expand Down
2 changes: 1 addition & 1 deletion vscode/src/autoedits/adapters/cody-gateway.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ export class CodyGatewayAdapter implements AutoeditsModelAdapter {
const body: FireworksCompatibleRequestParams = {
stream: false,
model: options.model,
temperature: 0.001,
temperature: 0.1,
max_tokens: maxTokens,
response_format: {
type: 'text',
Expand Down
4 changes: 2 additions & 2 deletions vscode/src/autoedits/adapters/fireworks.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ describe('FireworksAdapter', () => {
expect.objectContaining({
stream: false,
model: options.model,
temperature: 0.001,
temperature: 0.1,
max_tokens: expect.any(Number),
response_format: { type: 'text' },
prediction: {
Expand Down Expand Up @@ -92,7 +92,7 @@ describe('FireworksAdapter', () => {
expect.objectContaining({
stream: false,
model: options.model,
temperature: 0.001,
temperature: 0.1,
max_tokens: expect.any(Number),
response_format: { type: 'text' },
prediction: {
Expand Down
2 changes: 1 addition & 1 deletion vscode/src/autoedits/adapters/fireworks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ export class FireworksAdapter implements AutoeditsModelAdapter {
const body: FireworksCompatibleRequestParams = {
stream: false,
model: options.model,
temperature: 0.001,
temperature: 0.1,
max_tokens: maxTokens,
response_format: {
type: 'text',
Expand Down
2 changes: 1 addition & 1 deletion vscode/src/autoedits/adapters/sourcegraph-chat.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ describe('SourcegraphChatAdapter', () => {
expect(chatOptions).toMatchObject({
model: 'anthropic/claude-2',
maxTokensToSample: getMaxOutputTokensForAutoedits(options.codeToRewrite),
temperature: 0.001,
temperature: 0.1,
prediction: {
type: 'content',
content: 'const x = 1',
Expand Down
2 changes: 1 addition & 1 deletion vscode/src/autoedits/adapters/sourcegraph-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ export class SourcegraphChatAdapter implements AutoeditsModelAdapter {
{
model: option.model,
maxTokensToSample: maxTokens,
temperature: 0.001,
temperature: 0.1,
prediction: {
type: 'content',
content: option.codeToRewrite,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ describe('SourcegraphCompletionsAdapter', () => {
expect(params).toMatchObject({
model: 'anthropic/claude-2',
maxTokensToSample: getMaxOutputTokensForAutoedits(options.codeToRewrite),
temperature: 0.001,
temperature: 0.1,
messages: [{ speaker: 'human', text: ps`user message` }],
prediction: {
type: 'content',
Expand Down
2 changes: 1 addition & 1 deletion vscode/src/autoedits/adapters/sourcegraph-completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export class SourcegraphCompletionsAdapter implements AutoeditsModelAdapter {
model: option.model as ModelRefStr,
messages,
maxTokensToSample: maxTokens,
temperature: 0.001,
temperature: 0.1,
prediction: {
type: 'content',
content: option.codeToRewrite,
Expand Down

0 comments on commit 76c2921

Please sign in to comment.